diff --git a/1f15334ab7e6820c9fda17c961659882ef9853cc80f7356b9a9b22f286fd7389/crowd-code-b4fb6daf-58d3-49e8-97f2-6b7c628e60da1759594854455-2025_10_04-18.21.01.508/source.csv b/1f15334ab7e6820c9fda17c961659882ef9853cc80f7356b9a9b22f286fd7389/crowd-code-b4fb6daf-58d3-49e8-97f2-6b7c628e60da1759594854455-2025_10_04-18.21.01.508/source.csv new file mode 100644 index 0000000000000000000000000000000000000000..d2fa2994c37e1f807ec0d6b87a859f489abf7d34 --- /dev/null +++ b/1f15334ab7e6820c9fda17c961659882ef9853cc80f7356b9a9b22f286fd7389/crowd-code-b4fb6daf-58d3-49e8-97f2-6b7c628e60da1759594854455-2025_10_04-18.21.01.508/source.csv @@ -0,0 +1,9667 @@ +Sequence,Time,File,RangeOffset,RangeLength,Text,Language,Type +1,3,"slurm/dev/franz/berlin/atari/tokenizer.sh",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=1\n#SBATCH --gres=gpu:1\n#SBATCH --time=24:00:00\n#SBATCH --cpus-per-task=8\n#SBATCH --output=/fast/project/HFMI_SynergyUnit/jafar_ws/logs/franz/atari/tokenizer/%x_%j.log\n#SBATCH --error=/fast/project/HFMI_SynergyUnit/jafar_ws/logs/franz/atari/tokenizer/%x_%j.log\n#SBATCH --job-name=tokenizer_atari_alien_dev\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n# Log the sbatch script\ncat $0\n\nsource .venv/bin/activate\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\n# Require ENV_NAME to be provided via sbatch --export=ENV_NAME=...\nif [ -z ""${ENV_NAME:-}"" ]; then\n echo ""ENV_NAME must be provided (e.g., sbatch --export=ALL,ENV_NAME=alien $0)"" >&2\n exit 2\nfi\n\ntags=""atari ${ENV_NAME} tokenizer dev lr_3e-5""\n\narray_records_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/data/atari/${ENV_NAME}""\nCHECKPOINT_DIR=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/atari/${ENV_NAME}/tokenizer/${job_name}_${slurm_job_id}""\nmkdir -p $CHECKPOINT_DIR\n\nenv | grep SLURM\n\ncurrent_branch=$(git rev-parse --abbrev-ref HEAD)\nif [ ""$current_branch"" != ""prepend-action-maskgit"" ]; then\n echo ""This script must be run from the prepend-action-maskgit branch. Current branch is $current_branch. Exiting.""\n exit 1\nfi\n\nsrun python jasmine/train_tokenizer.py \\n --patch_size=16 \\n --image_height=84 \\n --image_width=84 \\n --num_steps=50_000 \\n --max_lr=3e-5 \\n --warmup_steps=5000 \\n --wsd_decay_steps=5000 \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --name=""${job_name}_${slurm_job_id}"" \\n --tags $tags \\n --entity instant-uv \\n --project jafar \\n --data_dir=""${array_records_dir}/train"" \\n --val_data_dir=""${array_records_dir}/val"" &\n\nchild_pid=$!\n\nwait $child_pid\n",shellscript,tab +2,307,"TERMINAL",0,0,"",,terminal_focus +3,1381,"extension-output-pdoom-org.crowd-code-#1-crowd-code",0,0,"6:21:01 PM [info] Activating crowd-code\n6:21:01 PM [info] Recording started\n6:21:01 PM [info] Initializing git provider using file system watchers...\n",Log,tab +4,2147,"extension-output-pdoom-org.crowd-code-#1-crowd-code",150,0,"6:21:02 PM [info] Git repository found\n6:21:02 PM [info] Git provider initialized successfully\n6:21:02 PM [info] Initial git state: [object Object]\n",Log,content +5,2380,"slurm/dev/franz/berlin/atari/tokenizer.sh",0,0,"",shellscript,tab +6,2522,"TERMINAL",0,0,"bash",,terminal_focus +7,2570,"TERMINAL",0,0,"source /home/franz.srambical/jafar/.venv/bin/activate",,terminal_command +8,4380,"TERMINAL",0,0,"",,terminal_command +9,7644,"jasmine/train_dynamics.py",0,0,"import os\n\n\nos.environ.setdefault(""XLA_PYTHON_CLIENT_MEM_FRACTION"", ""0.98"")\n\nfrom dataclasses import dataclass, field\nimport itertools\nfrom typing import cast, Optional\n\nimport einops\nfrom jax.sharding import Mesh, PartitionSpec, NamedSharding\nfrom jax.experimental.mesh_utils import create_device_mesh\nimport optax\nimport orbax.checkpoint as ocp\nimport numpy as np\nimport dm_pix as pix\nimport jax\nimport jax.numpy as jnp\nimport tyro\nimport wandb\nimport grain\nimport flax.nnx as nnx\n\nfrom genie import Genie, restore_genie_components\nfrom utils.dataloader import get_dataloader\nfrom utils.train_utils import (\n get_lr_schedule,\n count_parameters_by_component,\n print_mem_stats,\n print_compiled_memory_stats,\n print_compiled_cost_analysis,\n)\n\n\n@dataclass\nclass Args:\n # Experiment\n num_steps: int = 200_000\n seed: int = 0\n seq_len: int = 16\n image_channels: int = 3\n image_height: int = 64\n image_width: int = 64\n data_dir: str = """"\n save_ckpt: bool = False\n restore_ckpt: bool = False\n # Optimization\n batch_size: int = 36\n init_lr: float = 0.0\n max_lr: float = 3e-5\n decay_end: float = 0.0\n wsd_decay_steps: int = (\n 20_000 # NOTE: wsd_decay_steps will only be used when using a wsd-schedule\n )\n warmup_steps: int = 5000\n lr_schedule: str = ""wsd"" # supported options: wsd, cos\n # Tokenizer\n tokenizer_dim: int = 512\n tokenizer_ffn_dim: int = 2048\n latent_patch_dim: int = 32\n num_patch_latents: int = 1024\n patch_size: int = 16\n tokenizer_num_blocks: int = 4\n tokenizer_num_heads: int = 8\n tokenizer_checkpoint: str = """"\n # LAM\n lam_dim: int = 512\n lam_ffn_dim: int = 2048\n latent_action_dim: int = 32\n num_actions: int = 6\n lam_patch_size: int = 16\n lam_num_blocks: int = 4\n lam_num_heads: int = 8\n lam_checkpoint: str = """"\n # Dynamics\n dyna_type: str = ""maskgit"" # supported options: maskgit, causal\n dyna_dim: int = 512\n dyna_ffn_dim: int = 2048\n dyna_num_blocks: int = 6\n dyna_num_heads: int = 8\n dropout: float = 0.0\n mask_limit: float = 0.5\n z_loss_weight: float = 0.0\n param_dtype = jnp.float32\n dtype = jnp.bfloat16\n use_flash_attention: bool = True\n use_gt_actions: bool = False\n # Logging\n log: bool = True\n entity: str = """"\n project: str = """"\n name: str = ""train_dynamics""\n tags: list[str] = field(default_factory=lambda: [""dynamics""])\n log_interval: int = 50\n log_image_interval: int = 1000\n ckpt_dir: str = """"\n log_checkpoint_interval: int = 5000\n log_checkpoint_keep_period: int = 20_000\n log_gradients: bool = False\n val_data_dir: str = """"\n val_interval: int = 20_000\n val_steps: int = 50\n eval_full_frame: bool = True\n val_maskgit_steps: int = 25\n val_temperature: float = 1\n val_sample_argmax: bool = False\n wandb_id: str = """"\n\n\ndef build_model(args: Args, rng: jax.Array) -> tuple[Genie, jax.Array]:\n rng, _rng = jax.random.split(rng)\n rngs = nnx.Rngs(_rng)\n genie = Genie(\n # Tokenizer\n in_dim=args.image_channels,\n tokenizer_dim=args.tokenizer_dim,\n tokenizer_ffn_dim=args.tokenizer_ffn_dim,\n latent_patch_dim=args.latent_patch_dim,\n num_patch_latents=args.num_patch_latents,\n patch_size=args.patch_size,\n tokenizer_num_blocks=args.tokenizer_num_blocks,\n tokenizer_num_heads=args.tokenizer_num_heads,\n # LAM\n lam_dim=args.lam_dim,\n lam_ffn_dim=args.lam_ffn_dim,\n latent_action_dim=args.latent_action_dim,\n num_actions=args.num_actions,\n lam_patch_size=args.lam_patch_size,\n lam_num_blocks=args.lam_num_blocks,\n lam_num_heads=args.lam_num_heads,\n lam_co_train=not args.lam_checkpoint,\n use_gt_actions=args.use_gt_actions,\n # Dynamics\n dyna_type=args.dyna_type,\n dyna_dim=args.dyna_dim,\n dyna_ffn_dim=args.dyna_ffn_dim,\n dyna_num_blocks=args.dyna_num_blocks,\n dyna_num_heads=args.dyna_num_heads,\n dropout=args.dropout,\n mask_limit=args.mask_limit,\n param_dtype=args.param_dtype,\n dtype=args.dtype,\n use_flash_attention=args.use_flash_attention,\n decode=False,\n rngs=rngs,\n )\n if args.use_gt_actions:\n assert (\n not args.lam_checkpoint\n ), ""Cannot use LAM when using ground-truth actions.""\n else:\n assert genie.lam is not None\n del genie.lam.decoder\n return genie, rng\n\n\ndef build_optimizer(genie: Genie, args: Args) -> nnx.ModelAndOptimizer:\n lr_schedule = get_lr_schedule(\n args.lr_schedule,\n args.init_lr,\n args.max_lr,\n args.decay_end,\n args.num_steps,\n args.warmup_steps,\n args.wsd_decay_steps,\n )\n tx = optax.adamw(\n learning_rate=lr_schedule,\n b1=0.9,\n b2=0.9,\n weight_decay=1e-4,\n mu_dtype=args.param_dtype, # moments in full precision\n )\n optimizer = nnx.ModelAndOptimizer(genie, tx)\n return optimizer\n\n\ndef build_mesh_and_sharding(\n num_devices: int,\n) -> tuple[Mesh, NamedSharding, NamedSharding, NamedSharding]:\n device_mesh_arr = create_device_mesh((num_devices,))\n mesh = Mesh(devices=device_mesh_arr, axis_names=(""data"",))\n replicated_sharding = NamedSharding(mesh, PartitionSpec())\n videos_sharding = NamedSharding(mesh, PartitionSpec(""data"", None, None, None, None))\n actions_sharding = NamedSharding(mesh, PartitionSpec(""data"", None))\n return mesh, replicated_sharding, videos_sharding, actions_sharding\n\n\ndef shard_optimizer_states(\n optimizer: nnx.ModelAndOptimizer, replicated_sharding: NamedSharding\n) -> None:\n model_state = nnx.state(optimizer.model)\n model_sharded_state = jax.lax.with_sharding_constraint(\n model_state, replicated_sharding\n )\n nnx.update(optimizer.model, model_sharded_state)\n optimizer_state = nnx.state(optimizer, nnx.optimizer.OptState)\n optimizer_sharded_state = jax.lax.with_sharding_constraint(\n optimizer_state, replicated_sharding\n )\n nnx.update(optimizer, optimizer_sharded_state)\n\n\ndef build_dataloader(args: Args, data_dir: str) -> grain.DataLoaderIterator:\n image_shape = (args.image_height, args.image_width, args.image_channels)\n array_record_files = [\n os.path.join(data_dir, x)\n for x in os.listdir(data_dir)\n if x.endswith("".array_record"")\n ]\n grain_dataloader = get_dataloader(\n array_record_files,\n args.seq_len,\n # NOTE: We deliberately pass the global batch size\n # The dataloader shards the dataset across all processes\n args.batch_size,\n *image_shape,\n num_workers=8,\n prefetch_buffer_size=1,\n seed=args.seed,\n )\n initial_state = grain_dataloader._create_initial_state()\n grain_iterator = grain.DataLoaderIterator(grain_dataloader, initial_state)\n return grain_iterator\n\n\ndef build_checkpoint_manager(args: Args) -> Optional[ocp.CheckpointManager]:\n if args.restore_ckpt or args.save_ckpt:\n handler_registry = ocp.handlers.DefaultCheckpointHandlerRegistry()\n handler_registry.add(\n ""model_state"", ocp.args.PyTreeSave, ocp.handlers.PyTreeCheckpointHandler\n )\n handler_registry.add(\n ""model_state"", ocp.args.PyTreeRestore, ocp.handlers.PyTreeCheckpointHandler\n )\n handler_registry.add(\n ""train_dataloader_state"",\n grain.checkpoint.CheckpointSave,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n handler_registry.add(\n ""train_dataloader_state"",\n grain.checkpoint.CheckpointRestore,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n if args.val_data_dir:\n handler_registry.add(\n ""val_dataloader_state"",\n grain.checkpoint.CheckpointSave,\n cast(\n ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler\n ),\n )\n handler_registry.add(\n ""val_dataloader_state"",\n grain.checkpoint.CheckpointRestore,\n cast(\n ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler\n ),\n )\n checkpoint_options = ocp.CheckpointManagerOptions(\n save_interval_steps=args.log_checkpoint_interval,\n max_to_keep=3,\n keep_period=args.log_checkpoint_keep_period,\n step_format_fixed_length=6,\n cleanup_tmp_directories=True,\n )\n checkpoint_manager = ocp.CheckpointManager(\n args.ckpt_dir,\n options=checkpoint_options,\n handler_registry=handler_registry,\n )\n return checkpoint_manager\n else:\n return None\n\n\ndef restore_or_initialize_components(\n args: Args,\n checkpoint_manager: Optional[ocp.CheckpointManager],\n optimizer: nnx.ModelAndOptimizer,\n train_iterator: grain.DataLoaderIterator,\n rng: jax.Array,\n replicated_sharding: NamedSharding,\n val_iterator: Optional[grain.DataLoaderIterator],\n restore_step: Optional[int] = None,\n) -> tuple[\n int,\n nnx.ModelAndOptimizer,\n grain.DataLoaderIterator,\n grain.DataLoaderIterator,\n jax.Array,\n]:\n step = 0\n if checkpoint_manager and restore_step is None:\n restore_step = checkpoint_manager.latest_step()\n if args.restore_ckpt:\n assert checkpoint_manager is not None\n abstract_optimizer = nnx.eval_shape(lambda: optimizer)\n abstract_optimizer_state = nnx.state(abstract_optimizer)\n if val_iterator:\n restore_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore(abstract_optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointRestore(train_iterator), # type: ignore\n val_dataloader_state=grain.checkpoint.CheckpointRestore(val_iterator), # type: ignore\n )\n else:\n restore_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore(abstract_optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointRestore(train_iterator), # type: ignore\n )\n restored = checkpoint_manager.restore(\n checkpoint_manager.latest_step(), args=restore_args\n )\n restored_optimizer_state = restored[""model_state""]\n nnx.update(optimizer, restored_optimizer_state)\n train_iterator = restored[""train_dataloader_state""]\n if val_iterator:\n val_iterator = restored[""val_dataloader_state""]\n step = checkpoint_manager.latest_step() or 0\n print(f""Restored dataloader and model state from step {step}"")\n else:\n # Restore from pre-trained tokenizer (and LAM)\n rng, _rng = jax.random.split(rng)\n optimizer = restore_genie_components(optimizer, replicated_sharding, _rng, args)\n return step, optimizer, train_iterator, val_iterator, rng\n\n\ndef _calculate_top_k_accuracy(\n token_logits_BTNV: jax.Array,\n video_tokens_BTN: jax.Array,\n mask_BTN: jax.Array,\n k: int,\n) -> jax.Array:\n _, topk_indices_BTNK = jax.lax.top_k(token_logits_BTNV, k)\n topk_correct = jnp.any(\n topk_indices_BTNK == video_tokens_BTN[..., jnp.newaxis], axis=-1\n )\n topk_acc = (mask_BTN * topk_correct).sum() / mask_BTN.sum()\n return topk_acc\n\n\ndef _calculate_step_metrics(\n outputs: dict[str, jax.Array],\n gt: jax.Array,\n num_actions: int,\n num_patch_latents: int,\n) -> tuple[jax.Array, dict]:\n mask_BTN = outputs[""mask""]\n outputs[""token_logits""] = outputs[""token_logits""].astype(jnp.float32)\n ce_loss = optax.softmax_cross_entropy_with_integer_labels(\n outputs[""token_logits""], outputs[""video_tokens""]\n )\n ce_loss = (mask_BTN * ce_loss).sum() / mask_BTN.sum()\n z_val = jax.nn.logsumexp(outputs[""token_logits""], axis=-1)\n z_loss_metric = (mask_BTN * (z_val**2)).sum() / mask_BTN.sum()\n\n masked_token_top_1_acc = _calculate_top_k_accuracy(\n outputs[""token_logits""], outputs[""video_tokens""], mask_BTN, 1\n )\n masked_token_top_2_acc = _calculate_top_k_accuracy(\n outputs[""token_logits""], outputs[""video_tokens""], mask_BTN, 2\n )\n masked_token_top_5_acc = _calculate_top_k_accuracy(\n outputs[""token_logits""], outputs[""video_tokens""], mask_BTN, 5\n )\n masked_token_top_16_acc = _calculate_top_k_accuracy(\n outputs[""token_logits""], outputs[""video_tokens""], mask_BTN, 16\n )\n\n select_probs = jax.nn.softmax(outputs[""token_logits""])\n gt_val = gt.clip(0, 1).reshape(-1, *gt.shape[2:])\n recon = outputs[""recon""].clip(0, 1).reshape(-1, *outputs[""recon""].shape[2:])\n psnr = jnp.asarray(pix.psnr(gt_val, recon)).mean()\n ssim = jnp.asarray(pix.ssim(gt_val, recon)).mean()\n _, index_counts_tokenizer = jnp.unique_counts(\n jnp.ravel(outputs[""video_tokens""]),\n size=num_patch_latents,\n fill_value=0,\n )\n codebook_usage_tokenizer = (index_counts_tokenizer != 0).mean()\n metrics = dict(\n cross_entropy_loss=ce_loss,\n masked_token_top1_accuracy=masked_token_top_1_acc,\n masked_token_top2_accuracy=masked_token_top_2_acc,\n masked_token_top5_accuracy=masked_token_top_5_acc,\n masked_token_top16_accuracy=masked_token_top_16_acc,\n select_logit=outputs[""token_logits""].max(-1).mean(),\n select_p=select_probs.max(-1).mean(),\n entropy=jax.scipy.special.entr(select_probs).sum(-1).mean(),\n z_loss=z_loss_metric,\n psnr=psnr,\n ssim=ssim,\n codebook_usage_tokenizer=codebook_usage_tokenizer,\n )\n if ""lam_indices"" in outputs.keys():\n _, index_counts_lam = jnp.unique_counts(\n jnp.ravel(outputs[""lam_indices""]),\n size=num_actions,\n fill_value=0,\n )\n codebook_usage_lam = (index_counts_lam != 0).mean()\n metrics[""codebook_usage_lam""] = codebook_usage_lam\n return ce_loss, metrics\n\n\ndef main(args: Args) -> None:\n jax.distributed.initialize()\n num_devices = jax.device_count()\n if num_devices == 0:\n raise ValueError(""No JAX devices found."")\n print(f""Running on {num_devices} devices."")\n\n if args.batch_size % num_devices != 0:\n raise ValueError(\n f""Global batch size {args.batch_size} must be divisible by ""\n f""number of devices {num_devices}.""\n )\n\n rng = jax.random.key(args.seed)\n\n # --- Initialize model ---\n genie, rng = build_model(args, rng)\n _, params, _ = nnx.split(genie, nnx.Param, ...)\n param_counts = count_parameters_by_component(params)\n\n if args.log and jax.process_index() == 0:\n wandb_init_kwargs = {\n ""entity"": args.entity,\n ""project"": args.project,\n ""name"": args.name,\n ""tags"": args.tags,\n ""group"": ""debug"",\n ""config"": args,\n }\n\n if args.wandb_id:\n wandb_init_kwargs.update(\n {\n ""id"": args.wandb_id,\n ""resume"": ""allow"",\n }\n )\n wandb.init(**wandb_init_kwargs)\n\n wandb.config.update({""model_param_count"": param_counts})\n\n print(""Parameter counts:"")\n print(param_counts)\n\n # --- Initialize optimizer ---\n optimizer = build_optimizer(genie, args)\n del genie\n\n # FIXME: switch to create_hybrid_device_mesh for runs spanning multiple nodes\n _, replicated_sharding, videos_sharding, actions_sharding = build_mesh_and_sharding(\n num_devices\n )\n\n shard_optimizer_states(optimizer, replicated_sharding)\n\n # --- Initialize checkpoint manager ---\n checkpoint_manager = build_checkpoint_manager(args)\n\n # --- Create DataLoaderIterator from dataloader ---\n train_iterator = build_dataloader(args, args.data_dir)\n val_iterator = None\n if args.val_data_dir:\n val_iterator = build_dataloader(args, args.val_data_dir)\n\n # --- Restore checkpoint ---\n step, optimizer, train_iterator, val_iterator, rng = (\n restore_or_initialize_components(\n args,\n checkpoint_manager,\n optimizer,\n train_iterator,\n rng,\n replicated_sharding,\n val_iterator,\n )\n )\n\n # --- Define loss and train step (close over args) ---\n def dynamics_loss_fn(\n model: Genie,\n inputs: dict,\n ) -> tuple[jax.Array, tuple[jax.Array, dict]]:\n gt = jnp.asarray(inputs[""videos""], dtype=jnp.float32) / 255.0\n inputs[""videos""] = gt.astype(args.dtype)\n outputs = model(inputs)\n ce_loss, metrics = _calculate_step_metrics(\n outputs, gt, args.num_actions, args.num_patch_latents\n )\n z_loss = metrics[""z_loss""]\n total_loss = ce_loss + args.z_loss_weight * z_loss\n metrics[""total_loss""] = total_loss\n return total_loss, (outputs[""recon""], metrics)\n\n @nnx.jit(donate_argnums=0)\n def train_step(\n optimizer: nnx.ModelAndOptimizer, inputs: dict\n ) -> tuple[jax.Array, jax.Array, dict]:\n def loss_fn(model: Genie) -> tuple[jax.Array, tuple[jax.Array, dict]]:\n model.train()\n return dynamics_loss_fn(model, inputs)\n\n (loss, (recon, metrics)), grads = nnx.value_and_grad(loss_fn, has_aux=True)(\n optimizer.model\n )\n optimizer.update(grads)\n if args.log_gradients:\n metrics[""gradients_std/""] = jax.tree.map(\n lambda x: x.std(), grads[""params""][""dynamics""]\n )\n return loss, recon, metrics\n\n @nnx.jit\n def val_step(genie: Genie, inputs: dict) -> dict:\n """"""Evaluate model and compute metrics""""""\n genie.eval()\n gt = jnp.asarray(inputs[""videos""], dtype=jnp.float32) / 255.0\n (loss, (recon, metrics)) = dynamics_loss_fn(genie, inputs)\n val_output = {""loss"": loss, ""recon"": recon, ""metrics"": metrics}\n\n # --- Evaluate full frame prediction (sampling) ---\n if args.eval_full_frame:\n inputs[""videos""] = gt.astype(args.dtype)\n tokenizer_outputs = genie.tokenizer.vq_encode(\n inputs[""videos""], training=False\n )\n tokens_full_frame = tokenizer_outputs[""indices""]\n lam_indices_E = None\n if not args.use_gt_actions:\n lam_indices_E = genie.vq_encode(inputs, training=False)\n inputs[""latent_actions""] = lam_indices_E\n inputs[""videos""] = inputs[""videos""][\n :, :-1\n ] # remove last frame for generation\n recon_full_frame, logits_full_frame = genie.sample(\n inputs,\n args.seq_len,\n args.val_temperature,\n args.val_sample_argmax,\n args.val_maskgit_steps,\n )\n # Calculate metrics for the last frame only\n step_outputs = {\n ""recon"": recon_full_frame[:, -1],\n ""token_logits"": logits_full_frame[:, -1],\n ""video_tokens"": tokens_full_frame[:, -1],\n ""mask"": jnp.ones_like(tokens_full_frame[:, -1]),\n }\n if lam_indices_E is not None:\n lam_indices_B = lam_indices_E.reshape((-1, args.seq_len - 1))[:, -1]\n step_outputs[""lam_indices""] = lam_indices_B\n\n loss_full_frame, metrics_full_frame = _calculate_step_metrics(\n step_outputs, gt[:, -1], args.num_actions, args.num_patch_latents\n )\n val_output.update(\n {\n ""loss_full_frame"": loss_full_frame,\n ""recon_full_frame"": recon_full_frame,\n ""metrics_full_frame"": metrics_full_frame,\n }\n )\n return val_output\n\n def calculate_validation_metrics(val_dataloader, genie, rng):\n step = 0\n loss_per_step = []\n metrics_per_step = []\n loss_full_frame_per_step = []\n metrics_full_frame_per_step = []\n batch = None\n recon = None\n recon_full_frame = None\n for batch in val_dataloader:\n rng, _rng_mask = jax.random.split(rng, 2)\n batch[""rng""] = _rng_mask\n val_outputs = val_step(genie, batch)\n loss_per_step.append(val_outputs[""loss""])\n metrics_per_step.append(val_outputs[""metrics""])\n recon = val_outputs[""recon""]\n if args.eval_full_frame:\n loss_full_frame_per_step.append(val_outputs[""loss_full_frame""])\n metrics_full_frame_per_step.append(val_outputs[""metrics_full_frame""])\n recon_full_frame = val_outputs[""recon_full_frame""]\n step += 1\n if step > args.val_steps:\n break\n\n if step < args.val_steps:\n print(\n f""Warning: Your validation dataset is too small to make val_steps many steps. Made {step} steps, expected {args.val_steps}""\n )\n\n val_metrics = {\n f""val_{key}"": np.mean([float(m[key]) for m in metrics_per_step])\n for key in metrics_per_step[0].keys()\n }\n val_metrics[""val_loss""] = np.mean(loss_per_step)\n if args.eval_full_frame:\n val_metrics_full_frame = {\n f""val_full_frame_{key}"": np.mean(\n [float(m[key]) for m in metrics_full_frame_per_step]\n )\n for key in metrics_full_frame_per_step[0].keys()\n }\n val_metrics.update(val_metrics_full_frame)\n val_metrics[""val_full_frame_loss""] = np.mean(loss_full_frame_per_step)\n return val_metrics, batch, recon, recon_full_frame\n\n # --- TRAIN LOOP ---\n dataloader_train = (\n {\n ""videos"": jax.make_array_from_process_local_data(\n videos_sharding, local_data=elem[""videos""]\n ),\n ""actions"": (\n jax.make_array_from_process_local_data(\n actions_sharding, elem[""actions""]\n )\n if args.use_gt_actions\n else None\n ),\n }\n for elem in train_iterator\n )\n dataloader_val = None\n if val_iterator:\n dataloader_val = (\n {\n ""videos"": jax.make_array_from_process_local_data(\n videos_sharding, elem[""videos""]\n ),\n ""actions"": (\n jax.make_array_from_process_local_data(\n actions_sharding, elem[""actions""]\n )\n if args.use_gt_actions\n else None\n ),\n }\n for elem in val_iterator\n )\n if jax.process_index() == 0:\n first_batch = next(dataloader_train)\n first_batch[""rng""] = rng # type: ignore\n compiled = train_step.lower(optimizer, first_batch).compile()\n print_compiled_memory_stats(compiled.memory_analysis())\n print_compiled_cost_analysis(compiled.cost_analysis())\n # Do not skip the first batch during training\n dataloader_train = itertools.chain([first_batch], dataloader_train)\n print(f""Starting training from step {step}..."")\n first_step = step\n while step < args.num_steps:\n for batch in dataloader_train:\n # --- Train step ---\n rng, _rng_mask = jax.random.split(rng, 2)\n batch[""rng""] = _rng_mask\n loss, recon, metrics = train_step(optimizer, batch)\n if step == first_step:\n print_mem_stats(""After params initialized"")\n step += 1\n\n # --- Validation loss ---\n val_results = {}\n if dataloader_val and step % args.val_interval == 0:\n rng, _rng_mask_val = jax.random.split(rng, 2)\n print(""Calculating validation metrics..."")\n val_metrics, val_gt_batch, val_recon, val_recon_full_frame = (\n calculate_validation_metrics(\n dataloader_val, optimizer.model, _rng_mask_val\n )\n )\n print(f""Step {step}, validation loss: {val_metrics['val_loss']}"")\n val_results = {\n ""metrics"": val_metrics,\n ""gt_batch"": val_gt_batch,\n ""recon"": val_recon,\n ""full_frame"": val_recon_full_frame,\n }\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {""loss"": loss, ""step"": step, **metrics}\n if val_results:\n log_dict.update(val_results[""metrics""])\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if val_results:\n val_results[""gt_seq_val""] = (\n val_results[""gt_batch""][""videos""][0].astype(jnp.float32)\n / 255.0\n )\n val_results[""recon_seq_val""] = val_results[""recon""][0].clip(\n 0, 1\n )\n val_comparison_seq = jnp.concatenate(\n (val_results[""gt_seq_val""], val_results[""recon_seq_val""]),\n axis=1,\n )\n val_results[""val_comparison_seq""] = einops.rearrange(\n val_comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if args.eval_full_frame:\n val_results[""full_frame_seq_val""] = val_results[\n ""full_frame""\n ][0].clip(0, 1)\n val_results[""val_full_frame_comparison_seq""] = (\n jnp.concatenate(\n (\n val_results[""gt_seq_val""],\n val_results[""full_frame_seq_val""],\n ),\n axis=1,\n )\n )\n val_results[""val_full_frame_comparison_seq""] = (\n einops.rearrange(\n val_results[""val_full_frame_comparison_seq""] * 255,\n ""t h w c -> h (t w) c"",\n )\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if val_results:\n log_images.update(\n dict(\n val_image=wandb.Image(\n np.asarray(\n val_results[""gt_seq_val""][args.seq_len - 1]\n )\n ),\n val_recon=wandb.Image(\n np.asarray(\n val_results[""recon_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_recon=wandb.Image(\n np.asarray(\n val_results[""val_comparison_seq""].astype(\n np.uint8\n )\n )\n ),\n )\n )\n if args.eval_full_frame:\n log_images.update(\n dict(\n val_full_frame=wandb.Image(\n np.asarray(\n val_results[""full_frame_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_full_frame=wandb.Image(\n np.asarray(\n val_results[\n ""val_full_frame_comparison_seq""\n ].astype(np.uint8)\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break\n\n if checkpoint_manager:\n checkpoint_manager.close()\n\n\nif __name__ == ""__main__"":\n args = tyro.cli(Args)\n main(args)\n",python,tab +10,11172,"TERMINAL",0,0,"",,terminal_command +11,20452,"jasmine/train_dynamics.py",0,0,"",python,selection_command +12,24834,"jasmine/train_tokenizer.py",0,0,"import os\n\nos.environ.setdefault(""XLA_PYTHON_CLIENT_MEM_FRACTION"", ""0.98"")\n\nfrom dataclasses import dataclass, field\nfrom typing import cast, Optional\n\nimport einops\nimport itertools\nfrom jax.sharding import Mesh, PartitionSpec, NamedSharding\nfrom jax.experimental.mesh_utils import create_device_mesh\nimport optax\nimport orbax.checkpoint as ocp\nimport numpy as np\nimport dm_pix as pix\nimport jax\nimport jax.numpy as jnp\nimport tyro\nimport wandb\nimport grain\nimport flax.nnx as nnx\n\nfrom models.tokenizer import TokenizerVQVAE\nfrom utils.dataloader import get_dataloader\nfrom utils.train_utils import (\n get_lr_schedule,\n count_parameters_by_component,\n print_mem_stats,\n print_compiled_memory_stats,\n print_compiled_cost_analysis,\n)\n\n\n@dataclass\nclass Args:\n # Experiment\n num_steps: int = 300_000\n seed: int = 0\n seq_len: int = 16\n image_channels: int = 3\n image_height: int = 64\n image_width: int = 64\n data_dir: str = """"\n save_ckpt: bool = False\n restore_ckpt: bool = False\n # Optimization\n vq_beta: float = 0.25\n batch_size: int = 48\n init_lr: float = 0.0\n max_lr: float = 3e-4\n decay_end: float = 0.0\n wsd_decay_steps: int = (\n 30_000 # NOTE: wsd_decay_steps will only be used when using a wsd-schedule\n )\n lr_schedule: str = ""wsd"" # supported options: wsd, cos\n warmup_steps: int = 10000\n # Tokenizer\n model_dim: int = 512\n ffn_dim: int = 2048\n latent_dim: int = 32\n num_latents: int = 1024\n patch_size: int = 16\n num_blocks: int = 4\n num_heads: int = 8\n dropout: float = 0.0\n codebook_dropout: float = 0.01\n param_dtype = jnp.float32\n dtype = jnp.bfloat16\n use_flash_attention: bool = True\n # Logging\n log: bool = True\n entity: str = """"\n project: str = """"\n name: str = ""train_tokenizer""\n tags: list[str] = field(default_factory=lambda: [""tokenizer""])\n log_interval: int = 50\n log_image_interval: int = 1000\n ckpt_dir: str = """"\n log_checkpoint_interval: int = 1000\n log_checkpoint_keep_period: int = 20_000\n log_gradients: bool = False\n val_data_dir: str = """"\n val_interval: int = 20_000\n val_steps: int = 50\n wandb_id: str = """"\n\n\ndef build_model(args: Args, rng: jax.Array) -> tuple[TokenizerVQVAE, jax.Array]:\n rng, _rng = jax.random.split(rng)\n rngs = nnx.Rngs(_rng)\n return (\n TokenizerVQVAE(\n in_dim=args.image_channels,\n model_dim=args.model_dim,\n ffn_dim=args.ffn_dim,\n latent_dim=args.latent_dim,\n num_latents=args.num_latents,\n patch_size=args.patch_size,\n num_blocks=args.num_blocks,\n num_heads=args.num_heads,\n dropout=args.dropout,\n codebook_dropout=args.codebook_dropout,\n param_dtype=args.param_dtype,\n dtype=args.dtype,\n use_flash_attention=args.use_flash_attention,\n rngs=rngs,\n ),\n rng,\n )\n\n\ndef build_optimizer(model: TokenizerVQVAE, args: Args) -> nnx.ModelAndOptimizer:\n lr_schedule = get_lr_schedule(\n args.lr_schedule,\n args.init_lr,\n args.max_lr,\n args.decay_end,\n args.num_steps,\n args.warmup_steps,\n args.wsd_decay_steps,\n )\n tx = optax.adamw(\n learning_rate=lr_schedule,\n b1=0.9,\n b2=0.9,\n weight_decay=1e-4,\n mu_dtype=args.param_dtype, # moments in full precision\n )\n optimizer = nnx.ModelAndOptimizer(model, tx)\n return optimizer\n\n\ndef build_mesh_and_sharding(\n num_devices: int,\n) -> tuple[Mesh, NamedSharding, NamedSharding]:\n device_mesh_arr = create_device_mesh((num_devices,))\n mesh = Mesh(devices=device_mesh_arr, axis_names=(""data"",))\n replicated_sharding = NamedSharding(mesh, PartitionSpec())\n videos_sharding = NamedSharding(mesh, PartitionSpec(""data"", None, None, None, None))\n return mesh, replicated_sharding, videos_sharding\n\n\ndef shard_optimizer_states(\n optimizer: nnx.ModelAndOptimizer, replicated_sharding: NamedSharding\n) -> None:\n model_state = nnx.state(optimizer.model)\n model_sharded_state = jax.lax.with_sharding_constraint(\n model_state, replicated_sharding\n )\n nnx.update(optimizer.model, model_sharded_state)\n optimizer_state = nnx.state(optimizer, nnx.optimizer.OptState)\n optimizer_sharded_state = jax.lax.with_sharding_constraint(\n optimizer_state, replicated_sharding\n )\n nnx.update(optimizer, optimizer_sharded_state)\n\n\ndef build_dataloader(args: Args, data_dir: str) -> grain.DataLoaderIterator:\n image_shape = (args.image_height, args.image_width, args.image_channels)\n array_record_files = [\n os.path.join(data_dir, x)\n for x in os.listdir(data_dir)\n if x.endswith("".array_record"")\n ]\n grain_dataloader = get_dataloader(\n array_record_files,\n args.seq_len,\n # NOTE: We deliberately pass the global batch size\n # The dataloader shards the dataset across all processes\n args.batch_size,\n *image_shape,\n num_workers=8,\n prefetch_buffer_size=1,\n seed=args.seed,\n )\n initial_state = grain_dataloader._create_initial_state()\n grain_iterator = grain.DataLoaderIterator(grain_dataloader, initial_state)\n return grain_iterator\n\n\ndef build_checkpoint_manager(args: Args) -> Optional[ocp.CheckpointManager]:\n if args.restore_ckpt or args.save_ckpt:\n handler_registry = ocp.handlers.DefaultCheckpointHandlerRegistry()\n handler_registry.add(\n ""model_state"", ocp.args.PyTreeSave, ocp.handlers.PyTreeCheckpointHandler\n )\n handler_registry.add(\n ""model_state"", ocp.args.PyTreeRestore, ocp.handlers.PyTreeCheckpointHandler\n )\n handler_registry.add(\n ""train_dataloader_state"",\n grain.checkpoint.CheckpointSave,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n handler_registry.add(\n ""train_dataloader_state"",\n grain.checkpoint.CheckpointRestore,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n if args.val_data_dir:\n handler_registry.add(\n ""val_dataloader_state"",\n grain.checkpoint.CheckpointSave,\n cast(\n ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler\n ),\n )\n handler_registry.add(\n ""val_dataloader_state"",\n grain.checkpoint.CheckpointRestore,\n cast(\n ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler\n ),\n )\n checkpoint_options = ocp.CheckpointManagerOptions(\n save_interval_steps=args.log_checkpoint_interval,\n max_to_keep=3,\n keep_period=args.log_checkpoint_keep_period,\n step_format_fixed_length=6,\n cleanup_tmp_directories=True,\n )\n checkpoint_manager = ocp.CheckpointManager(\n args.ckpt_dir,\n options=checkpoint_options,\n handler_registry=handler_registry,\n )\n return checkpoint_manager\n else:\n return None\n\n\ndef restore_checkpoint_if_needed(\n args: Args,\n checkpoint_manager: Optional[ocp.CheckpointManager],\n optimizer: nnx.ModelAndOptimizer,\n train_iterator: grain.DataLoaderIterator,\n val_iterator: Optional[grain.DataLoaderIterator],\n restore_step: Optional[int] = None,\n) -> tuple[\n int, nnx.ModelAndOptimizer, grain.DataLoaderIterator, grain.DataLoaderIterator\n]:\n step = 0\n if checkpoint_manager and restore_step is None:\n restore_step = checkpoint_manager.latest_step()\n if args.restore_ckpt:\n assert checkpoint_manager is not None\n abstract_optimizer = nnx.eval_shape(lambda: optimizer)\n abstract_optimizer_state = nnx.state(abstract_optimizer)\n if val_iterator:\n restore_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore(abstract_optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointRestore(train_iterator), # type: ignore\n val_dataloader_state=grain.checkpoint.CheckpointRestore(val_iterator), # type: ignore\n )\n else:\n restore_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore(abstract_optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointRestore(train_iterator), # type: ignore\n )\n restored = checkpoint_manager.restore(restore_step, args=restore_args)\n restored_optimizer_state = restored[""model_state""]\n nnx.update(optimizer, restored_optimizer_state)\n train_iterator = restored[""train_dataloader_state""]\n if val_iterator:\n val_iterator = restored[""val_dataloader_state""]\n step = restore_step or 0\n print(f""Restored dataloader and model state from step {step}"")\n return step, optimizer, train_iterator, val_iterator\n\n\ndef main(args: Args) -> None:\n jax.distributed.initialize()\n num_devices = jax.device_count()\n if num_devices == 0:\n raise ValueError(""No JAX devices found."")\n print(f""Running on {num_devices} devices."")\n\n if args.batch_size % num_devices != 0:\n raise ValueError(\n f""Global batch size {args.batch_size} must be divisible by ""\n f""number of devices {num_devices}.""\n )\n\n rng = jax.random.key(args.seed)\n\n # --- Initialize model ---\n tokenizer, rng = build_model(args, rng)\n\n _, params, _ = nnx.split(tokenizer, nnx.Param, ...)\n param_counts = count_parameters_by_component(params)\n\n if args.log and jax.process_index() == 0:\n wandb_init_kwargs = {\n ""entity"": args.entity,\n ""project"": args.project,\n ""name"": args.name,\n ""tags"": args.tags,\n ""group"": ""debug"",\n ""config"": args,\n }\n\n if args.wandb_id:\n wandb_init_kwargs.update(\n {\n ""id"": args.wandb_id,\n ""resume"": ""allow"",\n }\n )\n wandb.init(**wandb_init_kwargs)\n\n wandb.config.update({""model_param_count"": param_counts})\n\n print(""Parameter counts:"")\n print(param_counts)\n\n # --- Initialize optimizer ---\n optimizer = build_optimizer(tokenizer, args)\n del tokenizer\n\n # FIXME: switch to create_hybrid_device_mesh for runs spanning multiple nodes\n _, replicated_sharding, videos_sharding = build_mesh_and_sharding(num_devices)\n\n shard_optimizer_states(optimizer, replicated_sharding)\n\n # --- Initialize checkpoint manager ---\n checkpoint_manager = build_checkpoint_manager(args)\n\n # --- Create DataLoaderIterator from dataloader ---\n train_iterator = build_dataloader(args, args.data_dir)\n val_iterator = None\n if args.val_data_dir:\n val_iterator = build_dataloader(args, args.val_data_dir)\n\n # --- Restore checkpoint ---\n step, optimizer, train_iterator, val_iterator = restore_checkpoint_if_needed(\n args, checkpoint_manager, optimizer, train_iterator, val_iterator\n )\n\n # --- Define loss and train step (close over args) ---\n def tokenizer_loss_fn(\n model: TokenizerVQVAE, inputs: dict, training: bool = False\n ) -> tuple[jax.Array, tuple[jax.Array, dict]]:\n gt = jnp.asarray(inputs[""videos""], dtype=jnp.float32) / 255.0\n inputs[""videos""] = gt.astype(args.dtype)\n outputs = model(inputs, training=training)\n outputs[""recon""] = outputs[""recon""].astype(jnp.float32)\n mse = jnp.square(gt - outputs[""recon""]).mean()\n q_loss = jnp.square(jax.lax.stop_gradient(outputs[""emb""]) - outputs[""z""]).mean()\n commitment_loss = jnp.square(\n outputs[""emb""] - jax.lax.stop_gradient(outputs[""z""])\n ).mean()\n loss = mse + q_loss + args.vq_beta * commitment_loss\n\n gt_clipped = gt.clip(0, 1).reshape(-1, *gt.shape[2:])\n recon = outputs[""recon""].clip(0, 1).reshape(-1, *outputs[""recon""].shape[2:])\n psnr = jnp.asarray(pix.psnr(gt_clipped, recon)).mean()\n ssim = jnp.asarray(pix.ssim(gt_clipped, recon)).mean()\n _, index_counts = jnp.unique_counts(\n jnp.ravel(outputs[""indices""]), size=args.num_latents, fill_value=0\n )\n codebook_usage = (index_counts != 0).mean()\n metrics = dict(\n loss=loss,\n mse=mse,\n q_loss=q_loss,\n commitment_loss=commitment_loss,\n psnr=psnr,\n ssim=ssim,\n codebook_usage=codebook_usage,\n )\n return loss, (outputs[""recon""], metrics)\n\n @nnx.jit(donate_argnums=0)\n def train_step(\n optimizer: nnx.ModelAndOptimizer, inputs: dict\n ) -> tuple[jax.Array, jax.Array, dict]:\n def loss_fn(model: TokenizerVQVAE) -> tuple[jax.Array, tuple[jax.Array, dict]]:\n model.train()\n return tokenizer_loss_fn(model, inputs, training=True)\n\n (loss, (recon, metrics)), grads = nnx.value_and_grad(loss_fn, has_aux=True)(\n optimizer.model\n )\n optimizer.update(grads)\n if args.log_gradients:\n metrics[""encoder_gradients_std/""] = jax.tree.map(\n lambda x: x.std(), grads[""params""][""encoder""]\n )\n metrics[""vq_gradients_std/""] = jax.tree.map(\n lambda x: x.std(), grads[""params""][""vq""]\n )\n metrics[""decoder_gradients_std/""] = jax.tree.map(\n lambda x: x.std(), grads[""params""][""decoder""]\n )\n return loss, recon, metrics\n\n @nnx.jit\n def val_step(\n tokenizer: TokenizerVQVAE, inputs: dict\n ) -> tuple[jax.Array, jax.Array, dict]:\n tokenizer.eval()\n (loss, (recon, metrics)) = tokenizer_loss_fn(tokenizer, inputs, training=False)\n return loss, recon, metrics\n\n def calculate_validation_metrics(val_dataloader, tokenizer):\n step = 0\n loss_per_step = []\n metrics_per_step = []\n batch = None\n recon = None\n for batch in val_dataloader:\n loss, recon, metrics = val_step(tokenizer, batch)\n loss_per_step.append(loss)\n metrics_per_step.append(metrics)\n step += 1\n if step > args.val_steps:\n break\n\n if step < args.val_steps:\n print(\n f""Warning: Your validation dataset is too small to make val_steps many steps. Made {step} steps, expected {args.val_steps}""\n )\n\n val_loss = np.mean(loss_per_step)\n val_metrics = {\n f""val_{key}"": np.mean([float(m[key]) for m in metrics_per_step])\n for key in metrics_per_step[0].keys()\n }\n val_metrics[""val_loss""] = val_loss\n return val_metrics, batch, recon\n\n # --- TRAIN LOOP ---\n dataloader_train = (\n {\n ""videos"": jax.make_array_from_process_local_data(\n videos_sharding, elem[""videos""]\n ),\n }\n for elem in train_iterator\n )\n dataloader_val = None\n if val_iterator:\n dataloader_val = (\n {\n ""videos"": jax.make_array_from_process_local_data(\n videos_sharding, elem[""videos""]\n ),\n }\n for elem in val_iterator\n )\n if jax.process_index() == 0:\n first_batch = next(dataloader_train)\n compiled = train_step.lower(optimizer, first_batch).compile()\n print_compiled_memory_stats(compiled.memory_analysis())\n print_compiled_cost_analysis(compiled.cost_analysis())\n # Do not skip the first batch during training\n dataloader_train = itertools.chain([first_batch], dataloader_train)\n print(f""Starting training from step {step}..."")\n first_step = step\n while step < args.num_steps:\n for batch in dataloader_train:\n # --- Train step ---\n loss, recon, metrics = train_step(optimizer, batch)\n if step == first_step:\n print_mem_stats(""After params initialized"")\n step += 1\n\n # --- Validation loss ---\n val_results = {}\n if dataloader_val and step % args.val_interval == 0:\n print(""Calculating validation metrics..."")\n val_metrics, val_gt_batch, val_recon = calculate_validation_metrics(\n dataloader_val, optimizer.model\n )\n print(f""Step {step}, validation loss: {val_metrics['val_loss']}"")\n val_results = {\n ""metrics"": val_metrics,\n ""gt_batch"": val_gt_batch,\n ""recon"": val_recon,\n }\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {""loss"": loss, ""step"": step, **metrics}\n if val_results:\n log_dict.update(val_results[""metrics""])\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if val_results and step % args.val_interval == 0:\n val_results[""gt_seq_val""] = (\n val_results[""gt_batch""][""videos""][0].astype(jnp.float32)\n / 255.0\n )\n val_results[""recon_seq_val""] = val_results[""recon""][0].clip(\n 0, 1\n )\n val_results[""val_comparison_seq""] = jnp.concatenate(\n (val_results[""gt_seq_val""], val_results[""recon_seq_val""]),\n axis=1,\n )\n val_results[""val_comparison_seq""] = einops.rearrange(\n val_results[""val_comparison_seq""] * 255,\n ""t h w c -> h (t w) c"",\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if val_results and step % args.val_interval == 0:\n log_images.update(\n dict(\n val_image=wandb.Image(\n np.asarray(val_results[""gt_seq_val""][0])\n ),\n val_recon=wandb.Image(\n np.asarray(val_results[""recon_seq_val""][0])\n ),\n val_true_vs_recon=wandb.Image(\n np.asarray(\n val_results[""val_comparison_seq""].astype(\n np.uint8\n )\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break\n\n if checkpoint_manager:\n checkpoint_manager.close()\n\n\nif __name__ == ""__main__"":\n args = tyro.cli(Args)\n main(args)\n",python,tab +13,28498,"jasmine/train_tokenizer.py",2033,0,"",python,selection_command +14,110131,"slurm/dev/franz/berlin/atari/spawn_dynamics.sh",0,0,"#!/usr/bin/env bash\n\nset -euo pipefail\n\nREPO_ROOT=""/home/franz.srambical/jafar""\n\nDATA_ROOT=""${DATA_ROOT:-/fast/project/HFMI_SynergyUnit/jafar_ws/data/atari}""\n\nDYNAMICS_SCRIPT=""${DYNAMICS_SCRIPT:-$REPO_ROOT/slurm/dev/franz/berlin/atari/dynamics.sh}""\n\n# Optional override: TOKENIZER_CKPT_DIR points to a specific tokenizer run dir\nTOKENIZER_CKPT_DIR_OPT=""${TOKENIZER_CKPT_DIR:-}""\n\nif [ ""$#"" -gt 0 ]; then\n ENV_LIST=(""$@"")\nelse\n if [ ! -d ""$DATA_ROOT"" ]; then\n echo ""DATA_ROOT does not exist: $DATA_ROOT"" >&2\n exit 1\n fi\n mapfile -t ENV_LIST < <(find ""$DATA_ROOT"" -mindepth 1 -maxdepth 1 -type d -printf '%f\n' | sort)\nfi\n\necho ""Submitting Dynamics jobs for environments: ${ENV_LIST[*]}""\n\nfor env in ""${ENV_LIST[@]}""; do\n env_dir=""$DATA_ROOT/$env""\n if [ ! -d ""$env_dir/train"" ] || [ ! -d ""$env_dir/val"" ]; then\n echo ""Skipping $env (missing train/val under $env_dir)""\n continue\n fi\n\n job_name=""dynamics_atari_${env}_dev""\n\n # Determine tokenizer checkpoint directory per env\n tokenizer_ckpt_dir=""""\n if [ -n ""$TOKENIZER_CKPT_DIR_OPT"" ]; then\n tokenizer_ckpt_dir=""$TOKENIZER_CKPT_DIR_OPT""\n else\n # Heuristic: pick the most recent run under the expected tokenizer ckpt base\n base=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/atari/${env}/tokenizer""\n if [ -d ""$base"" ]; then\n # get newest directory by mtime\n latest=$(find ""$base"" -mindepth 1 -maxdepth 1 -type d -printf '%T@ %p\n' | sort -nr | head -n1 | awk '{print $2}')\n if [ -n ""${latest:-}"" ] && [ -d ""$latest"" ]; then\n tokenizer_ckpt_dir=""$latest""\n fi\n fi\n fi\n\n if [ -z ""$tokenizer_ckpt_dir"" ]; then\n echo ""Skipping $env (no TOKENIZER_CKPT_DIR provided and none found under tokenizer ckpts)""\n continue\n fi\n\n job_id=$(sbatch --parsable \\n --job-name=""$job_name"" \\n --chdir=""$REPO_ROOT"" \\n --export=ALL,ENV_NAME=""$env"",TOKENIZER_CKPT_DIR=""$tokenizer_ckpt_dir"" \\n ""$DYNAMICS_SCRIPT"")\n\n echo ""Submitted $job_name (ENV_NAME=$env) using tokenizer ckpt $tokenizer_ckpt_dir as job $job_id""\ndone\n\n\n",shellscript,tab +15,118194,"jasmine/train_dynamics.py",0,0,"",python,tab +16,119370,"TERMINAL",0,0,"",,terminal_command +17,121370,"jasmine/train_dynamics.py",329,0,"",python,selection_command +18,121521,"jasmine/train_dynamics.py",1609,0,"",python,selection_command +19,123117,"jasmine/train_dynamics.py",1612,0,"",python,selection_command +20,124004,"jasmine/train_dynamics.py",1843,0,"",python,selection_command +21,124692,"jasmine/train_dynamics.py",2507,0,"",python,selection_command +22,125423,"jasmine/train_dynamics.py",2537,0,"",python,selection_command +23,125788,"jasmine/train_dynamics.py",2577,0,"",python,selection_command +24,126085,"jasmine/train_dynamics.py",3766,0,"",python,selection_command +25,126814,"jasmine/train_dynamics.py",2577,0,"",python,selection_command +26,126883,"jasmine/train_dynamics.py",2537,0,"",python,selection_command +27,127028,"jasmine/train_dynamics.py",2507,0,"",python,selection_command +28,127787,"jasmine/train_dynamics.py",8747,0,"",python,selection_command +29,128916,"jasmine/train_dynamics.py",8695,0,"",python,selection_command +30,133266,"jasmine/train_dynamics.py",6952,0,"",python,selection_command +31,134359,"jasmine/train_dynamics.py",15849,0,"",python,selection_command +32,136193,"jasmine/train_dynamics.py",15847,0,"",python,selection_command +33,136352,"jasmine/train_dynamics.py",15828,0,"",python,selection_command +34,136806,"jasmine/train_dynamics.py",16276,0,"",python,selection_command +35,138845,"jasmine/train_dynamics.py",30322,0,"",python,selection_command +36,140145,"jasmine/train_dynamics.py",31365,0,"",python,selection_command +37,141009,"jasmine/train_dynamics.py",30322,0,"",python,selection_command +38,141167,"jasmine/train_dynamics.py",16276,0,"",python,selection_command +39,141729,"jasmine/train_dynamics.py",30322,0,"",python,selection_command +40,141875,"jasmine/train_dynamics.py",31365,0,"",python,selection_command +41,142485,"jasmine/train_dynamics.py",30322,0,"",python,selection_command +42,143191,"jasmine/train_dynamics.py",31365,0,"",python,selection_command +43,143640,"jasmine/train_dynamics.py",30322,0,"",python,selection_command +44,143752,"jasmine/train_dynamics.py",16276,0,"",python,selection_command +45,144038,"jasmine/train_dynamics.py",15828,0,"",python,selection_command +46,144630,"jasmine/train_dynamics.py",16276,0,"",python,selection_command +47,145091,"jasmine/train_dynamics.py",16258,0,"",python,selection_command +48,145238,"jasmine/train_dynamics.py",16216,0,"",python,selection_command +49,145359,"jasmine/train_dynamics.py",16157,0,"",python,selection_command +50,145689,"jasmine/train_dynamics.py",16216,0,"",python,selection_command +51,145979,"jasmine/train_dynamics.py",8924,0,"",python,selection_command +52,146846,"jasmine/train_dynamics.py",8978,0,"",python,selection_command +53,147142,"jasmine/train_dynamics.py",9415,0,"",python,selection_command +54,148670,"jasmine/train_dynamics.py",9483,0,"",python,selection_command +55,155852,"slurm/dev/franz/berlin/atari/spawn_dynamics.sh",0,0,"",shellscript,tab +56,217162,"jasmine/train_dynamics.py",0,0,"",python,tab +57,218973,"jasmine/train_dynamics.py",9501,0,"",python,selection_command +58,219174,"jasmine/train_dynamics.py",9502,0,"",python,selection_command +59,219697,"jasmine/train_dynamics.py",9501,0,"",python,selection_command +60,219886,"jasmine/train_dynamics.py",9483,0,"",python,selection_command +61,220229,"jasmine/train_dynamics.py",9501,0,"",python,selection_command +62,222162,"jasmine/train_dynamics.py",9483,0,"",python,selection_command +63,222356,"jasmine/train_dynamics.py",9481,0,"",python,selection_command +64,222744,"jasmine/train_dynamics.py",9468,0,"",python,selection_command +65,223884,"jasmine/train_dynamics.py",9515,0,"\n ",python,content +66,224254,"jasmine/train_dynamics.py",9524,0,"r",python,content +67,224254,"jasmine/train_dynamics.py",9525,0,"",python,selection_keyboard +68,224318,"jasmine/train_dynamics.py",9525,0,"e",python,content +69,224319,"jasmine/train_dynamics.py",9526,0,"",python,selection_keyboard +70,224493,"jasmine/train_dynamics.py",9526,0,"s",python,content +71,224494,"jasmine/train_dynamics.py",9527,0,"",python,selection_keyboard +72,224498,"jasmine/train_dynamics.py",9527,0,"t",python,content +73,224498,"jasmine/train_dynamics.py",9528,0,"",python,selection_keyboard +74,224574,"jasmine/train_dynamics.py",9528,0,"o",python,content +75,224575,"jasmine/train_dynamics.py",9529,0,"",python,selection_keyboard +76,224650,"jasmine/train_dynamics.py",9529,0,"r",python,content +77,224650,"jasmine/train_dynamics.py",9530,0,"",python,selection_keyboard +78,243225,"jasmine/train_dynamics.py",9530,0,"e",python,content +79,243226,"jasmine/train_dynamics.py",9531,0,"",python,selection_keyboard +80,243469,"jasmine/train_dynamics.py",9531,0,"o",python,content +81,243470,"jasmine/train_dynamics.py",9532,0,"",python,selection_keyboard +82,244137,"jasmine/train_dynamics.py",9531,1,"",python,content +83,244441,"jasmine/train_dynamics.py",9531,0,"_",python,content +84,244441,"jasmine/train_dynamics.py",9532,0,"",python,selection_keyboard +85,244761,"jasmine/train_dynamics.py",9532,0,"s",python,content +86,244762,"jasmine/train_dynamics.py",9533,0,"",python,selection_keyboard +87,244767,"jasmine/train_dynamics.py",9533,0,"t",python,content +88,244768,"jasmine/train_dynamics.py",9534,0,"",python,selection_keyboard +89,244794,"jasmine/train_dynamics.py",9534,0,"e",python,content +90,244794,"jasmine/train_dynamics.py",9535,0,"",python,selection_keyboard +91,244858,"jasmine/train_dynamics.py",9535,0,"p",python,content +92,244858,"jasmine/train_dynamics.py",9536,0,"",python,selection_keyboard +93,245115,"jasmine/train_dynamics.py",9536,0," ",python,content +94,245115,"jasmine/train_dynamics.py",9537,0,"",python,selection_keyboard +95,245385,"jasmine/train_dynamics.py",9537,0,"=",python,content +96,245386,"jasmine/train_dynamics.py",9538,0,"",python,selection_keyboard +97,245478,"jasmine/train_dynamics.py",9538,0,"0",python,content +98,245478,"jasmine/train_dynamics.py",9539,0,"",python,selection_keyboard +99,246133,"jasmine/train_dynamics.py",9538,1,"",python,content +100,246303,"jasmine/train_dynamics.py",9538,0," ",python,content +101,246304,"jasmine/train_dynamics.py",9539,0,"",python,selection_keyboard +102,247269,"jasmine/train_dynamics.py",9539,0,"c",python,content +103,247269,"jasmine/train_dynamics.py",9540,0,"",python,selection_keyboard +104,247434,"jasmine/train_dynamics.py",9540,0,"h",python,content +105,247435,"jasmine/train_dynamics.py",9541,0,"",python,selection_keyboard +106,247495,"jasmine/train_dynamics.py",9541,0,"e",python,content +107,247495,"jasmine/train_dynamics.py",9542,0,"",python,selection_keyboard +108,248040,"jasmine/train_dynamics.py",9541,1,"",python,content +109,248163,"jasmine/train_dynamics.py",9540,1,"",python,content +110,248389,"jasmine/train_dynamics.py",9539,1,"",python,content +111,248766,"jasmine/train_dynamics.py",9539,0,"@",python,content +112,248766,"jasmine/train_dynamics.py",9540,0,"",python,selection_keyboard +113,248773,"jasmine/train_dynamics.py",9540,0,")",python,content +114,248773,"jasmine/train_dynamics.py",9541,0,"",python,selection_keyboard +115,249370,"jasmine/train_dynamics.py",9540,1,"",python,content +116,249534,"jasmine/train_dynamics.py",9539,1,"",python,content +117,249836,"jasmine/train_dynamics.py",9539,0,"3",python,content +118,249836,"jasmine/train_dynamics.py",9540,0,"",python,selection_keyboard +119,249844,"jasmine/train_dynamics.py",9540,0,"0",python,content +120,249844,"jasmine/train_dynamics.py",9541,0,"",python,selection_keyboard +121,250439,"jasmine/train_dynamics.py",9540,1,"",python,content +122,250563,"jasmine/train_dynamics.py",9539,1,"",python,content +123,250858,"jasmine/train_dynamics.py",9539,0,"2",python,content +124,250858,"jasmine/train_dynamics.py",9540,0,"",python,selection_keyboard +125,250873,"jasmine/train_dynamics.py",9540,0,"0",python,content +126,250873,"jasmine/train_dynamics.py",9541,0,"",python,selection_keyboard +127,252058,"jasmine/train_dynamics.py",9541,0,"_",python,content +128,252058,"jasmine/train_dynamics.py",9542,0,"",python,selection_keyboard +129,253135,"jasmine/train_dynamics.py",9542,0,"0",python,content +130,253135,"jasmine/train_dynamics.py",9543,0,"",python,selection_keyboard +131,253236,"jasmine/train_dynamics.py",9543,0,"0",python,content +132,253236,"jasmine/train_dynamics.py",9544,0,"",python,selection_keyboard +133,253417,"jasmine/train_dynamics.py",9544,0,"0",python,content +134,253418,"jasmine/train_dynamics.py",9545,0,"",python,selection_keyboard +135,253716,"jasmine/train_dynamics.py",9544,0,"",python,selection_command +136,434063,"TERMINAL",0,0,"watch squeue",,terminal_command +137,434122,"TERMINAL",0,0,"]633;C[?1049h(B[?7hEvery 2.0s: squeuehai-login2.haicore.berlin: Sat Oct 4 18:28:15 2025JOBIDUSER PARTITION NODES CPUS STSUBMIT_TIMESTART_TIME TIME TIME_LIMIT NODELIST(REASON)30027 xiao.liu interacti 1 64 PD 2025-10-04T05:54:07 2025-10-04T19:13:04 0:00 23:59:00 (Resources)30116 emmanouil. interacti 1 104 PD 2025-10-04T18:24:08N/A 0:00\t 30:00 (Priority)30115 xiao.liu interacti 1 64 PD 2025-10-04T18:22:16N/A 0:00 23:59:00 (Priority)30111 xiao.liu interacti 1 32 PD 2025-10-04T16:57:05N/A 0:00 23:59:00 (Priority)30110 emmanouil. interacti 1 104 PD 2025-10-04T16:31:13N/A 0:00\t 30:00 (Priority)30028 xiao.liu interacti 1 128 R 2025-10-04T05:55:36 2025-10-04T05:55:36 12:32:39 23:59:00 hai00630026 xiao.liu interacti 1 128 R 2025-10-04T01:48:32 2025-10-04T05:54:06 12:34:09 23:59:00 hai00830025 xiao.liu interacti 1 64 R 2025-10-04T01:47:27 2025-10-04T01:47:56 16:40:19 23:59:00 hai00429972 alfred.ngu standard 1 16 R 2025-10-04T16:35:34 2025-10-04T17:57:27 30:48 1-00:00:00 hai00130103 franz.sram standard 1 16 R 2025-10-04T14:55:02 2025-10-04T17:52:27 35:48 1-00:00:00 hai00530102 franz.sram standard 1 16 R 2025-10-04T14:55:02 2025-10-04T17:48:27 39:48 1-00:00:00 hai00430101 franz.sram standard 1 16 R 2025-10-04T14:55:02 2025-10-04T17:47:57 40:18 1-00:00:00 hai00430100 franz.sram standard 1 16 R 2025-10-04T14:55:02 2025-10-04T17:45:57 42:18 1-00:00:00 hai00630099 franz.sram standard 1 16 R 2025-10-04T14:55:02 2025-10-04T17:13:27 1:14:48 1-00:00:00 hai00530098 franz.sram standard 1 16 R 2025-10-04T14:55:02 2025-10-04T17:05:27 1:22:48 1-00:00:00 hai00130081 franz.sram standard 1 16 R 2025-10-04T13:15:31 2025-10-04T14:50:26 3:37:49 1-00:00:00 hai00530083 franz.sram standard 1 16 R 2025-10-04T13:15:31 2025-10-04T14:50:26 3:37:49 1-00:00:00 hai00530084 franz.sram standard 1 16 R 2025-10-04T13:15:31 2025-10-04T14:50:26 3:37:49 1-00:00:00 hai00530085 franz.sram standard 1 16 R 2025-10-04T13:15:31 2025-10-04T14:50:26 3:37:49 1-00:00:00 hai00530076 franz.sram standard 1 16 R 2025-10-04T13:15:31 2025-10-04T14:49:26 3:38:49 1-00:00:00 hai00130077 franz.sram standard 1 16 R 2025-10-04T13:15:31 2025-10-04T14:49:26 3:38:49 1-00:00:00 hai00130079 franz.sram standard 1 16 R 2025-10-04T13:15:31 2025-10-04T14:49:26 3:38:49 1-00:00:00 hai00130032 alfred.ngu standard 1 16 R 2025-10-04T11:53:39 2025-10-04T11:53:56 6:34:19 1-00:00:00 hai00130031 alfred.ngu standard 1 16 R 2025-10-04T11:50:30 2025-10-04T11:50:56 6:37:19 1-00:00:00 hai00529993 nishant.ku standard 2 384 R 2025-10-03T18:29:18 2025-10-04T01:48:31 16:39:44 1-00:00:00 hai[002,007]30008 alfred.ngu standard 1 16 R 2025-10-03T22:58:50 2025-10-03T22:58:56 19:29:19 1-00:00:00 hai00630009 alfred.ngu standard 1 16 R 2025-10-03T22:58:54 2025-10-03T22:58:56 19:29:19 1-00:00:00 hai00129999 alfred.ngu standard 1 16 R 2025-10-03T19:13:04 2025-10-03T19:13:04 23:15:11 1-00:00:00 hai00329998 alfred.ngu standard 1 16 R 2025-10-03T19:11:21 2025-10-03T19:11:21 23:16:54 1-00:00:00 hai00429994 alfred.ngu standard 1 16 R 2025-10-03T18:30:02 2025-10-03T18:30:02 23:58:13 1-00:00:00 hai005",,terminal_output +138,436116,"TERMINAL",0,0,"74111215050502020505051515151515151212162121365",,terminal_output +139,438123,"TERMINAL",0,0,"93332222222333333333833587",,terminal_output +140,440140,"TERMINAL",0,0,"215554444444555555555505577:009",,terminal_output +141,442141,"TERMINAL",0,0,"377766666667777777772779221",,terminal_output +142,444163,"TERMINAL",0,0,"599988888889999999994992143",,terminal_output +143,446160,"TERMINAL",0,0,"75121311:006:0040:0030305:003:008:018:018:018:019:019:019:01313163131365",,terminal_output +144,448169,"TERMINAL",0,0,"93332222222333333333833587",,terminal_output +145,450177,"TERMINAL",0,0,"31555444444455555555540:00557109",,terminal_output +146,452189,"TERMINAL",0,0,"377766666667777777772779231",,terminal_output +147,454198,"TERMINAL",0,0,"599988888889999999994993143",,terminal_output +148,456205,"TERMINAL",0,0,"73:0131411010104040101011111111111111414164141365",,terminal_output +149,458212,"TERMINAL",0,0,"93332222222333333333833587",,terminal_output +150,460227,"TERMINAL",0,0,"41555444444455555555510557209",,terminal_output +151,462236,"TERMINAL",0,0,"377766666667777777772779241",,terminal_output +152,464246,"TERMINAL",0,0,"599988888889999999994994143",,terminal_output +153,466282,"TERMINAL",0,0,"71141512020205050202021212121212121515165151365",,terminal_output +154,468261,"TERMINAL",0,0,"93332222222333333333833587",,terminal_output +155,470273,"TERMINAL",0,0,"51555444444455555555520557309",,terminal_output +156,472301,"TERMINAL",0,0,"377766666667777777772779251",,terminal_output +157,474289,"TERMINAL",0,0,"599988888889999999994995143",,terminal_output +158,476295,"TERMINAL",0,0,"721511:013030301:003:003030313131313131315:018:01630:0130:01365",,terminal_output +159,478301,"TERMINAL",0,0,"93332222222333333333833587",,terminal_output +160,480315,"TERMINAL",0,0,"9:01555444444455555555530557409",,terminal_output +161,482322,"TERMINAL",0,0,"37776666666777777777277929:01",,terminal_output +162,484408,"TERMINAL",0,0,"599988888889999999994996:0143",,terminal_output +163,486351,"TERMINAL",0,0,"7315:01114040401010404041414141414141111161111365",,terminal_output +164,488351,"TERMINAL",0,0,"93332222222333333333833587",,terminal_output +165,490449,"TERMINAL",0,0,"11555444444455555555540557509",,terminal_output +166,492368,"TERMINAL",0,0,"377766666667777777772779211",,terminal_output +167,494375,"TERMINAL",0,0,"599988888889999999994991143",,terminal_output +168,496494,"TERMINAL",0,0,"74111215050502020505051515151515151212162121365",,terminal_output +169,498396,"TERMINAL",0,0,"93332222222333333333833587",,terminal_output +170,500405,"TERMINAL",0,0,"215554444444555555555505578:009",,terminal_output +171,502418,"TERMINAL",0,0,"377766666667777777772779221",,terminal_output +172,504478,"TERMINAL",0,0,"599988888889999999994992143",,terminal_output +173,506530,"TERMINAL",0,0,"75121312:007:001:0030306:004:009:019:019:019:0140:0140:0140:01313163131365",,terminal_output +174,508448,"TERMINAL",0,0,"93332222222333333333833587",,terminal_output +175,510461,"TERMINAL",0,0,"3155544444445555555551:00557109",,terminal_output +176,512468,"TERMINAL",0,0,"377766666667777777772779231",,terminal_output +177,514513,"TERMINAL",0,0,"599988888889999999994993143",,terminal_output +178,516561,"TERMINAL",0,0,"74:0131411010104040101011111111111111414164141365",,terminal_output +179,518497,"TERMINAL",0,0,"93332222222333333333833587",,terminal_output +180,520547,"TERMINAL",0,0,"41555444444455555555510557209",,terminal_output +181,522511,"TERMINAL",0,0,"377766666667777777772779241",,terminal_output +182,524522,"TERMINAL",0,0,"599988888889999999994994143",,terminal_output +183,526596,"TERMINAL",0,0,"71141512020205050202021212121212121515165151365",,terminal_output +184,528543,"TERMINAL",0,0,"93332222222333333333833587",,terminal_output +185,530564,"TERMINAL",0,0,"51555444444455555555520557309",,terminal_output +186,532636,"TERMINAL",0,0,"377766666667777777772779251",,terminal_output +187,534567,"TERMINAL",0,0,"599988888889999999994995143",,terminal_output +188,536577,"TERMINAL",0,0,"721512:013030302:004:003030313131313131316:019:0161:011:01365",,terminal_output +189,538587,"TERMINAL",0,0,"94443333333444444444944698",,terminal_output +190,540601,"TERMINAL",0,0,"30:02666555555566666666631668411-00:00:00 1-00:00:00 hai005",,terminal_output +191,542677,"TERMINAL",0,0,"488894CG3T18:30:023T18:30:02 1-00:00:00 1-00:00:00 hai00529972 alfred.ngu6:35:3472:37135237:37528:2\t1:37172:07410045:57 44:07691316:375984:55:027:05:27 1:24:371183848855039:385687879 franz.sram3:15:314:49:26 3:40:3823:3936:08130031 alfred.ng1 164T11:50:3011:50:56 6:39:0800529993 nishant.k2 38418:29:184T01:48:31 16:41:33[002,007]8086300022:58:522:58:56 19:31:0193:043:04 23:17:00389:11:219:11:21 23:18:43 1-00:00:00 hai004",,terminal_output +192,544610,"TERMINAL",0,0,"6306:001099999994040404040404010105101025",,terminal_output +193,546664,"TERMINAL",0,0,"82224141411111414122222222272247",,terminal_output +194,548630,"TERMINAL",0,0,"\r1044472 R4T16:35:344T17:57:27 32:43 1-00:00:00 hai00130103 franz.sram4:55:0227:43524841:43417:5\t2:13054:13609913:27 1:16:43580524:431813:15:314:50:26 3:39:445344454764940:441749432 alfred.ngu1:53:391:53:56 6:36:1410:3009:14529993 nishant.k2 3843T18:29:1801:48:31 16:41:39[002,007]30008 alfred.ng1 1622:58:503T22:58:56 19:31:140069441299919:13:019:13:04 23:17:06381:211:21 23:18:494",,terminal_output +195,550638,"TERMINAL",0,0,"266655555556666666664166851",,terminal_output +196,552706,"TERMINAL",0,0,"48887777777888888888388103",,terminal_output +197,554653,"TERMINAL",0,0,"640102099999995050505050505020205202025",,terminal_output +198,556665,"TERMINAL",0,0,"82225151512121515122222222272247",,terminal_output +199,558673,"TERMINAL",0,0,"20444333333344444444494469",,terminal_output +200,560684,"TERMINAL",0,0,"26665555555666666666516689:01",,terminal_output +201,562729,"TERMINAL",0,0,"48887777777888888888388203",,terminal_output +202,564705,"TERMINAL",0,0,"6502030999999940:0040:0040:0040:001:001:001:0030305303025",,terminal_output +203,566733,"TERMINAL",0,0,"82223:018:012:0131317:015:0122222222272247",,terminal_output +204,568733,"TERMINAL",0,0,"30444333333344444444494469",,terminal_output +205,570733,"TERMINAL",0,0,"266655555556666666662:0166811",,terminal_output +206,572736,"TERMINAL",0,0,"48887777777888888888388303",,terminal_output +207,574748,"TERMINAL",0,0,"65:00304099999991010101010101040405404025",,terminal_output +208,576757,"TERMINAL",0,0,"82221111114141111122222222272247",,terminal_output +209,578823,"TERMINAL",0,0,"40444333333344444444494469",,terminal_output +210,580771,"TERMINAL",0,0,"266655555556666666661166821",,terminal_output +211,582782,"TERMINAL",0,0,"48887777777888888888388403",,terminal_output +212,584791,"TERMINAL",0,0,"610405099999992020202020202050505505025",,terminal_output +213,586809,"TERMINAL",0,0,"82222121215151212122222222272247",,terminal_output +214,588857,"TERMINAL",0,0,"50444333333344444444494469",,terminal_output +215,590817,"TERMINAL",0,0,"266655555556666666662166831",,terminal_output +216,592850,"TERMINAL",0,0,"48887777777888888888388503",,terminal_output +217,594832,"TERMINAL",0,0,"620503:009999999303030303030307:0040:0052:002:0025",,terminal_output +218,596844,"TERMINAL",0,0,"82223131313:015:01313122222222272247",,terminal_output +219,598852,"TERMINAL",0,0,"1:00444333333344444444494469",,terminal_output +220,600864,"TERMINAL",0,0,"266655555556666666663166841",,terminal_output +221,602872,"TERMINAL",0,0,"488877777778888888883888:003",,terminal_output +222,604879,"TERMINAL",0,0,"6307:001099999994040404040404010105101025",,terminal_output +223,606888,"TERMINAL",0,0,"82224141411111414122222222272247",,terminal_output +224,608897,"TERMINAL",0,0,"10444333333344444444494469",,terminal_output +225,610907,"TERMINAL",0,0,"266655555556666666664166851",,terminal_output +226,612928,"TERMINAL",0,0,"48887777777888888888388103",,terminal_output +227,614922,"TERMINAL",0,0,"640102099999995050505050505020205202025",,terminal_output +228,616932,"TERMINAL",0,0,"82225151512121515122222222272247",,terminal_output +229,617496,"jasmine/train_dynamics.py",0,0,"",python,tab +230,617527,"jasmine/train_dynamics.py",9516,0,"",python,selection_command +231,618947,"TERMINAL",0,0,"20444333333344444444494469",,terminal_output +232,619196,"jasmine/train_dynamics.py",0,0,"",python,tab +233,620960,"TERMINAL",0,0,"266655555556666666665166820:01",,terminal_output +234,622977,"TERMINAL",0,0,"48887777777888888888388203",,terminal_output +235,623620,"jasmine/train_dynamics.py",8919,0,"",python,selection_mouse +236,624977,"TERMINAL",0,0,"650203099999991:001:001:001:002:002:002:0030305303025",,terminal_output +237,626991,"TERMINAL",0,0,"82224:019:013:0131318:016:0122222222272247",,terminal_output +238,628999,"TERMINAL",0,0,"30444333333344444444494469",,terminal_output +239,631003,"TERMINAL",0,0,"266655555556666666663:0166811",,terminal_output +240,633018,"TERMINAL",0,0,"48887777777888888888388303",,terminal_output +241,635046,"TERMINAL",0,0,"66:00304099999991010101010101040405404025",,terminal_output +242,637034,"TERMINAL",0,0,"82221111114141111122222222272247",,terminal_output +243,639042,"TERMINAL",0,0,"40444333333344444444494469",,terminal_output +244,641080,"TERMINAL",0,0,"266655555556666666661166821",,terminal_output +245,643062,"TERMINAL",0,0,"48887777777888888888388403",,terminal_output +246,645071,"TERMINAL",0,0,"610405099999992020202020202050505505025",,terminal_output +247,647081,"TERMINAL",0,0,"82222121215151212122222222272247",,terminal_output +248,649094,"TERMINAL",0,0,"50444333333344444444494469",,terminal_output +249,651098,"TERMINAL",0,0,"266655555556666666662166831",,terminal_output +250,653108,"TERMINAL",0,0,"48887777777888888888388503",,terminal_output +251,655115,"TERMINAL",0,0,"620504:009999999303030303030308:001:0053:003:0025",,terminal_output +252,655569,"TERMINAL",0,0,"[?1049l\r[?1l>]0;franz.srambical@hai-login2:~/jafar",,terminal_output +253,661260,"TERMINAL",0,0,"deactivate",,terminal_command +254,694359,"slurm/dev/franz/berlin/atari/spawn_dynamics.sh",0,0,"",shellscript,tab +255,695807,"slurm/dev/franz/berlin/atari/spawn_dynamics.sh",0,0,"",shellscript,selection_command +256,696687,"slurm/dev/franz/berlin/atari/spawn_dynamics.sh",20,0,"",shellscript,selection_command +257,696931,"slurm/dev/franz/berlin/atari/spawn_dynamics.sh",21,0,"",shellscript,selection_command +258,696956,"slurm/dev/franz/berlin/atari/spawn_dynamics.sh",39,0,"",shellscript,selection_command +259,696981,"slurm/dev/franz/berlin/atari/spawn_dynamics.sh",40,0,"",shellscript,selection_command +260,697018,"slurm/dev/franz/berlin/atari/spawn_dynamics.sh",80,0,"",shellscript,selection_command +261,697049,"slurm/dev/franz/berlin/atari/spawn_dynamics.sh",81,0,"",shellscript,selection_command +262,697221,"slurm/dev/franz/berlin/atari/spawn_dynamics.sh",158,0,"",shellscript,selection_command +263,697394,"slurm/dev/franz/berlin/atari/spawn_dynamics.sh",159,0,"",shellscript,selection_command +264,697544,"slurm/dev/franz/berlin/atari/spawn_dynamics.sh",249,0,"",shellscript,selection_command +265,698471,"slurm/dev/franz/berlin/atari/spawn_dynamics.sh",250,0,"",shellscript,selection_command +266,698720,"slurm/dev/franz/berlin/atari/spawn_dynamics.sh",329,0,"",shellscript,selection_command +267,698760,"slurm/dev/franz/berlin/atari/spawn_dynamics.sh",378,0,"",shellscript,selection_command +268,698784,"slurm/dev/franz/berlin/atari/spawn_dynamics.sh",379,0,"",shellscript,selection_command +269,698812,"slurm/dev/franz/berlin/atari/spawn_dynamics.sh",403,0,"",shellscript,selection_command +270,698847,"slurm/dev/franz/berlin/atari/spawn_dynamics.sh",423,0,"",shellscript,selection_command +271,698882,"slurm/dev/franz/berlin/atari/spawn_dynamics.sh",428,0,"",shellscript,selection_command +272,698918,"slurm/dev/franz/berlin/atari/spawn_dynamics.sh",463,0,"",shellscript,selection_command +273,698948,"slurm/dev/franz/berlin/atari/spawn_dynamics.sh",519,0,"",shellscript,selection_command +274,698985,"slurm/dev/franz/berlin/atari/spawn_dynamics.sh",534,0,"",shellscript,selection_command +275,699015,"slurm/dev/franz/berlin/atari/spawn_dynamics.sh",541,0,"",shellscript,selection_command +276,699929,"slurm/dev/franz/berlin/atari/spawn_dynamics.sh",2161,0,"",shellscript,selection_keyboard +277,700452,"slurm/dev/franz/berlin/atari/spawn_dynamics.sh",423,0,"",shellscript,selection_keyboard +278,700614,"slurm/dev/franz/berlin/atari/spawn_dynamics.sh",0,0,"",shellscript,selection_keyboard +279,702931,"slurm/dev/franz/berlin/atari/spawn_dynamics.sh",20,0,"",shellscript,selection_command +280,703184,"slurm/dev/franz/berlin/atari/spawn_dynamics.sh",21,0,"",shellscript,selection_command +281,703211,"slurm/dev/franz/berlin/atari/spawn_dynamics.sh",39,0,"",shellscript,selection_command +282,703237,"slurm/dev/franz/berlin/atari/spawn_dynamics.sh",40,0,"",shellscript,selection_command +283,703559,"slurm/dev/franz/berlin/atari/spawn_dynamics.sh",80,0,"",shellscript,selection_command +284,703804,"slurm/dev/franz/berlin/atari/spawn_dynamics.sh",81,0,"",shellscript,selection_command +285,703838,"slurm/dev/franz/berlin/atari/spawn_dynamics.sh",158,0,"",shellscript,selection_command +286,703874,"slurm/dev/franz/berlin/atari/spawn_dynamics.sh",159,0,"",shellscript,selection_command +287,703897,"slurm/dev/franz/berlin/atari/spawn_dynamics.sh",249,0,"",shellscript,selection_command +288,703935,"slurm/dev/franz/berlin/atari/spawn_dynamics.sh",250,0,"",shellscript,selection_command +289,703972,"slurm/dev/franz/berlin/atari/spawn_dynamics.sh",329,0,"",shellscript,selection_command +290,703999,"slurm/dev/franz/berlin/atari/spawn_dynamics.sh",378,0,"",shellscript,selection_command +291,704038,"slurm/dev/franz/berlin/atari/spawn_dynamics.sh",379,0,"",shellscript,selection_command +292,704066,"slurm/dev/franz/berlin/atari/spawn_dynamics.sh",403,0,"",shellscript,selection_command +293,704104,"slurm/dev/franz/berlin/atari/spawn_dynamics.sh",423,0,"",shellscript,selection_command +294,704133,"slurm/dev/franz/berlin/atari/spawn_dynamics.sh",428,0,"",shellscript,selection_command +295,704163,"slurm/dev/franz/berlin/atari/spawn_dynamics.sh",463,0,"",shellscript,selection_command +296,704200,"slurm/dev/franz/berlin/atari/spawn_dynamics.sh",519,0,"",shellscript,selection_command +297,704233,"slurm/dev/franz/berlin/atari/spawn_dynamics.sh",534,0,"",shellscript,selection_command +298,704267,"slurm/dev/franz/berlin/atari/spawn_dynamics.sh",541,0,"",shellscript,selection_command +299,704300,"slurm/dev/franz/berlin/atari/spawn_dynamics.sh",642,0,"",shellscript,selection_command +300,704334,"slurm/dev/franz/berlin/atari/spawn_dynamics.sh",645,0,"",shellscript,selection_command +301,704366,"slurm/dev/franz/berlin/atari/spawn_dynamics.sh",646,0,"",shellscript,selection_command +302,704401,"slurm/dev/franz/berlin/atari/spawn_dynamics.sh",711,0,"",shellscript,selection_command +303,704437,"slurm/dev/franz/berlin/atari/spawn_dynamics.sh",712,0,"",shellscript,selection_command +304,704466,"slurm/dev/franz/berlin/atari/spawn_dynamics.sh",744,0,"",shellscript,selection_command +305,704501,"slurm/dev/franz/berlin/atari/spawn_dynamics.sh",774,0,"",shellscript,selection_command +306,704531,"slurm/dev/franz/berlin/atari/spawn_dynamics.sh",840,0,"",shellscript,selection_command +307,704571,"slurm/dev/franz/berlin/atari/spawn_dynamics.sh",904,0,"",shellscript,selection_command +308,704706,"slurm/dev/franz/berlin/atari/spawn_dynamics.sh",840,0,"",shellscript,selection_command +309,704913,"slurm/dev/franz/berlin/atari/spawn_dynamics.sh",774,0,"",shellscript,selection_command +310,705153,"slurm/dev/franz/berlin/atari/spawn_dynamics.sh",744,0,"",shellscript,selection_command +311,705326,"slurm/dev/franz/berlin/atari/spawn_dynamics.sh",712,0,"",shellscript,selection_command +312,705587,"slurm/dev/franz/berlin/atari/spawn_dynamics.sh",711,0,"",shellscript,selection_command +313,705610,"slurm/dev/franz/berlin/atari/spawn_dynamics.sh",646,0,"",shellscript,selection_command +314,705639,"slurm/dev/franz/berlin/atari/spawn_dynamics.sh",645,0,"",shellscript,selection_command +315,707120,"slurm/dev/franz/berlin/atari/spawn_dynamics.sh",0,0,"",shellscript,selection_keyboard +316,712800,"slurm/dev/franz/berlin/atari/dynamics.sh",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=1\n#SBATCH --time=24:00:00\n#SBATCH --cpus-per-task=8\n#SBATCH --gres=gpu:1\n#SBATCH --output=/fast/project/HFMI_SynergyUnit/jafar_ws/logs/franz/atari/dynamics/%x_%j.log\n#SBATCH --error=/fast/project/HFMI_SynergyUnit/jafar_ws/logs/franz/atari/dynamics/%x_%j.log\n#SBATCH --job-name=dynamics_atari_alien_dev\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nsource .venv/bin/activate\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\n# Require ENV_NAME to be provided via sbatch --export=ENV_NAME=...\nif [ -z ""${ENV_NAME:-}"" ]; then\n echo ""ENV_NAME must be provided (e.g., sbatch --export=ALL,ENV_NAME=alien $0)"" >&2\n exit 2\nfi\n\ntags=""atari ${ENV_NAME} dynamics dev""\n\narray_records_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/data/atari/${ENV_NAME}""\n\n# Require TOKENIZER_CKPT_DIR to be provided to avoid hardcoding\nif [ -z ""${TOKENIZER_CKPT_DIR:-}"" ]; then\n echo ""TOKENIZER_CKPT_DIR must be provided (e.g., sbatch --export=ALL,ENV_NAME=$ENV_NAME,TOKENIZER_CKPT_DIR=/path/to/tokenizer_ckpt $0)"" >&2\n exit 2\nfi\n\ntokenizer_ckpt_dir=""${TOKENIZER_CKPT_DIR}""\nCHECKPOINT_DIR=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/atari/${ENV_NAME}/dynamics/${job_name}/${slurm_job_id}""\nmkdir -p $CHECKPOINT_DIR\n\nenv | grep SLURM\n\ncurrent_branch=$(git rev-parse --abbrev-ref HEAD)\nif [ ""$current_branch"" != ""prepend-action-maskgit"" ]; then\n echo ""This script must be run from the prepend-action-maskgit branch. Current branch is $current_branch. Exiting.""\n exit 1\nfi\n\nsrun python jasmine/train_dynamics.py \\n --patch_size=16 \\n --image_height=84 \\n --image_width=84 \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --name=""${job_name}_${slurm_job_id}"" \\n --tags ${tags} \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=""${tokenizer_ckpt_dir}"" \\n --val_data_dir=""${array_records_dir}/val"" \\n --data_dir=""${array_records_dir}/train"" &\n\nchild_pid=$!\n\nwait $child_pid\n\n",shellscript,tab +317,717808,"slurm/dev/franz/berlin/atari/dynamics.sh",1903,0,"",shellscript,selection_command +318,718054,"slurm/dev/franz/berlin/atari/dynamics.sh",1779,0,"",shellscript,selection_command +319,718087,"slurm/dev/franz/berlin/atari/dynamics.sh",1736,0,"",shellscript,selection_command +320,720379,"slurm/dev/franz/berlin/atari/dynamics.sh",1735,0,"",shellscript,selection_command +321,720627,"slurm/dev/franz/berlin/atari/dynamics.sh",1732,0,"",shellscript,selection_command +322,720664,"slurm/dev/franz/berlin/atari/dynamics.sh",1721,0,"",shellscript,selection_command +323,720686,"slurm/dev/franz/berlin/atari/dynamics.sh",1577,0,"",shellscript,selection_command +324,720716,"slurm/dev/franz/berlin/atari/dynamics.sh",1535,0,"",shellscript,selection_command +325,720752,"slurm/dev/franz/berlin/atari/dynamics.sh",1471,0,"",shellscript,selection_command +326,720782,"slurm/dev/franz/berlin/atari/dynamics.sh",1470,0,"",shellscript,selection_command +327,720819,"slurm/dev/franz/berlin/atari/dynamics.sh",1387,0,"",shellscript,selection_command +328,720848,"slurm/dev/franz/berlin/atari/dynamics.sh",1386,0,"",shellscript,selection_command +329,720882,"slurm/dev/franz/berlin/atari/dynamics.sh",1348,0,"",shellscript,selection_command +330,721385,"slurm/dev/franz/berlin/atari/dynamics.sh",1351,0,"",shellscript,selection_command +331,721511,"slurm/dev/franz/berlin/atari/dynamics.sh",1353,0,"",shellscript,selection_command +332,721682,"slurm/dev/franz/berlin/atari/dynamics.sh",1358,0,"",shellscript,selection_command +333,721850,"slurm/dev/franz/berlin/atari/dynamics.sh",1361,0,"",shellscript,selection_command +334,721999,"slurm/dev/franz/berlin/atari/dynamics.sh",1369,0,"",shellscript,selection_command +335,722166,"slurm/dev/franz/berlin/atari/dynamics.sh",1370,0,"",shellscript,selection_command +336,722344,"slurm/dev/franz/berlin/atari/dynamics.sh",1379,0,"",shellscript,selection_command +337,722475,"slurm/dev/franz/berlin/atari/dynamics.sh",1383,0,"",shellscript,selection_command +338,723007,"slurm/dev/franz/berlin/atari/dynamics.sh",1384,0,"",shellscript,selection_command +339,723030,"slurm/dev/franz/berlin/atari/dynamics.sh",1384,0," ",shellscript,content +340,723031,"slurm/dev/franz/berlin/atari/dynamics.sh",1385,0,"",shellscript,selection_keyboard +341,723462,"slurm/dev/franz/berlin/atari/dynamics.sh",1385,0,"2",shellscript,content +342,723462,"slurm/dev/franz/berlin/atari/dynamics.sh",1386,0,"",shellscript,selection_keyboard +343,723543,"slurm/dev/franz/berlin/atari/dynamics.sh",1386,0,"0",shellscript,content +344,723544,"slurm/dev/franz/berlin/atari/dynamics.sh",1387,0,"",shellscript,selection_keyboard +345,725213,"slurm/dev/franz/berlin/atari/dynamics.sh",1387,0,"k",shellscript,content +346,725213,"slurm/dev/franz/berlin/atari/dynamics.sh",1388,0,"",shellscript,selection_keyboard +347,725878,"slurm/dev/franz/berlin/atari/dynamics.sh",1388,0,"_",shellscript,content +348,725878,"slurm/dev/franz/berlin/atari/dynamics.sh",1389,0,"",shellscript,selection_keyboard +349,726572,"slurm/dev/franz/berlin/atari/dynamics.sh",1389,0,"s",shellscript,content +350,726573,"slurm/dev/franz/berlin/atari/dynamics.sh",1390,0,"",shellscript,selection_keyboard +351,726581,"slurm/dev/franz/berlin/atari/dynamics.sh",1390,0,"t",shellscript,content +352,726581,"slurm/dev/franz/berlin/atari/dynamics.sh",1391,0,"",shellscript,selection_keyboard +353,726621,"slurm/dev/franz/berlin/atari/dynamics.sh",1391,0,"e",shellscript,content +354,726621,"slurm/dev/franz/berlin/atari/dynamics.sh",1392,0,"",shellscript,selection_keyboard +355,726712,"slurm/dev/franz/berlin/atari/dynamics.sh",1392,0,"p",shellscript,content +356,726712,"slurm/dev/franz/berlin/atari/dynamics.sh",1393,0,"",shellscript,selection_keyboard +357,726957,"slurm/dev/franz/berlin/atari/dynamics.sh",1393,0,"s",shellscript,content +358,726957,"slurm/dev/franz/berlin/atari/dynamics.sh",1394,0,"",shellscript,selection_keyboard +359,727184,"slurm/dev/franz/berlin/atari/dynamics.sh",1394,0,"_",shellscript,content +360,727184,"slurm/dev/franz/berlin/atari/dynamics.sh",1395,0,"",shellscript,selection_keyboard +361,727344,"slurm/dev/franz/berlin/atari/dynamics.sh",1395,0,"t",shellscript,content +362,727344,"slurm/dev/franz/berlin/atari/dynamics.sh",1396,0,"",shellscript,selection_keyboard +363,727433,"slurm/dev/franz/berlin/atari/dynamics.sh",1396,0,"o",shellscript,content +364,727433,"slurm/dev/franz/berlin/atari/dynamics.sh",1397,0,"",shellscript,selection_keyboard +365,727556,"slurm/dev/franz/berlin/atari/dynamics.sh",1397,0,"k",shellscript,content +366,727556,"slurm/dev/franz/berlin/atari/dynamics.sh",1398,0,"",shellscript,selection_keyboard +367,727560,"slurm/dev/franz/berlin/atari/dynamics.sh",1398,0,"e",shellscript,content +368,727560,"slurm/dev/franz/berlin/atari/dynamics.sh",1399,0,"",shellscript,selection_keyboard +369,727644,"slurm/dev/franz/berlin/atari/dynamics.sh",1399,0,"n",shellscript,content +370,727645,"slurm/dev/franz/berlin/atari/dynamics.sh",1400,0,"",shellscript,selection_keyboard +371,727717,"slurm/dev/franz/berlin/atari/dynamics.sh",1400,0,"i",shellscript,content +372,727717,"slurm/dev/franz/berlin/atari/dynamics.sh",1401,0,"",shellscript,selection_keyboard +373,727854,"slurm/dev/franz/berlin/atari/dynamics.sh",1401,0,"z",shellscript,content +374,727854,"slurm/dev/franz/berlin/atari/dynamics.sh",1402,0,"",shellscript,selection_keyboard +375,727943,"slurm/dev/franz/berlin/atari/dynamics.sh",1402,0,"e",shellscript,content +376,727943,"slurm/dev/franz/berlin/atari/dynamics.sh",1403,0,"",shellscript,selection_keyboard +377,728001,"slurm/dev/franz/berlin/atari/dynamics.sh",1403,0,"r",shellscript,content +378,728001,"slurm/dev/franz/berlin/atari/dynamics.sh",1404,0,"",shellscript,selection_keyboard +379,728377,"slurm/dev/franz/berlin/atari/dynamics.sh",1403,0,"",shellscript,selection_command +380,853534,"TERMINAL",0,0,"bash /home/franz.srambical/jafar/slurm/dev/franz/berlin/atari/spawn_dynamics.sh alien amidar assault asterix bank_heist battle_zone boxing breakout chopper_command pong",,terminal_command +381,853694,"TERMINAL",0,0,"]633;CSubmitting Dynamics jobs for environments: alien amidar assault asterix bank_heist battle_zone boxing breakout chopper_command pong\r\nSubmitted dynamics_atari_alien_dev (ENV_NAME=alien) using tokenizer ckpt /fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/atari/alien/tokenizer/tokenizer_atari_alien_dev_30092 as job 30117\r\nSubmitted dynamics_atari_amidar_dev (ENV_NAME=amidar) using tokenizer ckpt /fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/atari/amidar/tokenizer/tokenizer_atari_amidar_dev_30093 as job 30118\r\nSubmitted dynamics_atari_assault_dev (ENV_NAME=assault) using tokenizer ckpt /fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/atari/assault/tokenizer/tokenizer_atari_assault_dev_30094 as job 30119\r\nSubmitted dynamics_atari_asterix_dev (ENV_NAME=asterix) using tokenizer ckpt /fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/atari/asterix/tokenizer/tokenizer_atari_asterix_dev_30095 as job 30120\r\nSubmitted dynamics_atari_bank_heist_dev (ENV_NAME=bank_heist) using tokenizer ckpt /fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/atari/bank_heist/tokenizer/tokenizer_atari_bank_heist_dev_30096 as job 30121\r\nSubmitted dynamics_atari_battle_zone_dev (ENV_NAME=battle_zone) using tokenizer ckpt /fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/atari/battle_zone/tokenizer/tokenizer_atari_battle_zone_dev_30097 as job 30122\r\nSubmitted dynamics_atari_boxing_dev (ENV_NAME=boxing) using tokenizer ckpt /fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/atari/boxing/tokenizer/tokenizer_atari_boxing_dev_30098 as job 30123\r\nSubmitted dynamics_atari_breakout_dev (ENV_NAME=breakout) using tokenizer ckpt /fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/atari/breakout/tokenizer/tokenizer_atari_breakout_dev_30099 as job 30124\r\nSubmitted dynamics_atari_chopper_command_dev (ENV_NAME=chopper_command) using tokenizer ckpt /fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/atari/chopper_command/tokenizer/tokenizer_atari_chopper_command_dev_30100 as job 30125\r\nSubmitted dynamics_atari_pong_dev (ENV_NAME=pong) using tokenizer ckpt /fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/atari/pong/tokenizer/tokenizer_atari_pong_dev_30103 as job 30126\r\n]0;franz.srambical@hai-login2:~/jafar",,terminal_output +382,864913,"jasmine/train_dynamics.py",0,0,"",python,tab +383,873168,"TERMINAL",0,0,"watch squeue",,terminal_command +384,873225,"TERMINAL",0,0,"]633;C[?1049h(B[?7hEvery 2.0s: squeuehai-login2.haicore.berlin: Sat Oct 4 18:35:34 2025JOBIDUSER PARTITION NODES CPUS STSUBMIT_TIMESTART_TIME TIME TIME_LIMIT NODELIST(REASON)30027 xiao.liu interacti 1 64 PD 2025-10-04T05:54:07 2025-10-04T19:13:04 0:00 23:59:00 (Resources)30116 emmanouil. interacti 1 104 PD 2025-10-04T18:24:08N/A 0:00\t 30:00 (Priority)30115 xiao.liu interacti 1 64 PD 2025-10-04T18:22:16N/A 0:00 23:59:00 (Priority)30111 xiao.liu interacti 1 32 PD 2025-10-04T16:57:05N/A 0:00 23:59:00 (Priority)30110 emmanouil. interacti 1 104 PD 2025-10-04T16:31:13N/A 0:00\t 30:00 (Priority)30028 xiao.liu interacti 1 128 R 2025-10-04T05:55:36 2025-10-04T05:55:36 12:39:58 23:59:00 hai00630026 xiao.liu interacti 1 128 R 2025-10-04T01:48:32 2025-10-04T05:54:06 12:41:28 23:59:00 hai00830025 xiao.liu interacti 1 64 R 2025-10-04T01:47:27 2025-10-04T01:47:56 16:47:38 23:59:00 hai00430126 franz.sram standard 1 8 PD 2025-10-04T18:35:15N/A 0:00 1-00:00:00 (Priority)30125 franz.sram standard 1 8 PD 2025-10-04T18:35:15N/A 0:00 1-00:00:00 (Priority)30124 franz.sram standard 1 8 PD 2025-10-04T18:35:15N/A 0:00 1-00:00:00 (Priority)30123 franz.sram standard 1 8 PD 2025-10-04T18:35:15N/A 0:00 1-00:00:00 (Priority)30122 franz.sram standard 1 8 PD 2025-10-04T18:35:15N/A 0:00 1-00:00:00 (Nodes required for job are DOWN, DRAINED or reserved for jobs in higher priority partitions)30117 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:35:27 0:07 1-00:00:00 hai00130118 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:35:27 0:07 1-00:00:00 hai00430119 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:35:27 0:07 1-00:00:00 hai00530120 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:35:27 0:07 1-00:00:00 hai00630121 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:35:27 0:07 1-00:00:00 hai00629972 alfred.ngu standard 1 16 R 2025-10-04T16:35:34 2025-10-04T17:57:27 38:07 1-00:00:00 hai00130103 franz.sram standard 1 16 R 2025-10-04T14:55:02 2025-10-04T17:52:27 43:07 1-00:00:00 hai00530102 franz.sram standard 1 16 R 2025-10-04T14:55:02 2025-10-04T17:48:27 47:07 1-00:00:00 hai00430101 franz.sram standard 1 16 R 2025-10-04T14:55:02 2025-10-04T17:47:57 47:37 1-00:00:00 hai00430100 franz.sram standard 1 16 R 2025-10-04T14:55:02 2025-10-04T17:45:57 49:37 1-00:00:00 hai00630099 franz.sram standard 1 16 R 2025-10-04T14:55:02 2025-10-04T17:13:27 1:22:07 1-00:00:00 hai00530098 franz.sram standard 1 16 R 2025-10-04T14:55:02 2025-10-04T17:05:27 1:30:07 1-00:00:00 hai00130081 franz.sram standard 1 16 R 2025-10-04T13:15:31 2025-10-04T14:50:26 3:45:08 1-00:00:00 hai00530083 franz.sram standard 1 16 R 2025-10-04T13:15:31 2025-10-04T14:50:26 3:45:08 1-00:00:00 hai00530084 franz.sram standard 1 16 R 2025-10-04T13:15:31 2025-10-04T14:50:26 3:45:08 1-00:00:00 hai00530085 franz.sram standard 1 16 R 2025-10-04T13:15:31 2025-10-04T14:50:26 3:45:08 1-00:00:00 hai00530076 franz.sram standard 1 16 R 2025-10-04T13:15:31 2025-10-04T14:49:26 3:46:08 1-00:00:00 hai00130077 franz.sram standard 1 16 R 2025-10-04T13:15:31 2025-10-04T14:49:26 3:46:08 1-00:00:00 hai00130079 franz.sram standard 1 16 R 2025-10-04T13:15:31 2025-10-04T14:49:26 3:46:08 1-00:00:00 hai00130032 alfred.ngu standard 1 16 R 2025-10-04T11:53:39 2025-10-04T11:53:56 6:41:38 1-00:00:00 hai00130031 alfred.ngu standard 1 16 R 2025-10-04T11:50:30 2025-10-04T11:50:56 6:44:38 1-00:00:00 hai00529993 nishant.ku standard 2 384 R 2025-10-03T18:29:18 2025-10-04T01:48:31 16:47:03 1-00:00:00 hai[002,007]30008 alfred.ngu standard 1 16 R 2025-10-03T22:58:50 2025-10-03T22:58:56 19:36:38 1-00:00:00 hai00630009 alfred.ngu standard 1 16 R 2025-10-03T22:58:54 2025-10-03T22:58:56 19:36:38 1-00:00:00 hai00129999 alfred.ngu standard 1 16 R 2025-10-03T19:13:04 2025-10-03T19:13:04 23:22:30 1-00:00:00 hai00329998 alfred.ngu standard 1 16 R 2025-10-03T19:11:21 2025-10-03T19:11:21 23:24:13 1-00:00:00 hai004",,terminal_output +385,875226,"TERMINAL",0,0,"640:0030409999999999991010101010101040405404025",,terminal_output +386,877237,"TERMINAL",0,0,"822211111111111111114141111122222222272247",,terminal_output +387,879248,"TERMINAL",0,0,"4044433333333333344444444494469",,terminal_output +388,881309,"TERMINAL",0,0,"26665555555555556666666661166821",,terminal_output +389,883265,"TERMINAL",0,0,"4888777777777777888888888388403",,terminal_output +390,885276,"TERMINAL",0,0,"61040509999999999992020202020202050505505025",,terminal_output +391,887285,"TERMINAL",0,0,"822221212121212121215151212122222222272247",,terminal_output +392,889293,"TERMINAL",0,0,"5044433333333333344444444494469",,terminal_output +393,891301,"TERMINAL",0,0,"26665555555555556666666662166831",,terminal_output +394,893308,"TERMINAL",0,0,"4888777777777777888888888388503",,terminal_output +395,895319,"TERMINAL",0,0,"620508:00999999999999303030303030302:005:0057:007:0025",,terminal_output +396,897327,"TERMINAL",0,0,"822231313131313131318:0150:01313122222222272247",,terminal_output +397,899334,"TERMINAL",0,0,"6:0044433333333333344444444494469",,terminal_output +398,901381,"TERMINAL",0,0,"26665555555555556666666663166841",,terminal_output +399,903358,"TERMINAL",0,0,"48887777777777778888888883883:003",,terminal_output +400,905365,"TERMINAL",0,0,"6302:00109999999999994040404040404010105101025",,terminal_output +401,907373,"TERMINAL",0,0,"822241414141414141411111414122222222272247",,terminal_output +402,909384,"TERMINAL",0,0,"1044433333333333344444444494469",,terminal_output +403,911415,"TERMINAL",0,0,"26665555555555556666666664166851",,terminal_output +404,913401,"TERMINAL",0,0,"4888777777777777888888888388103",,terminal_output +405,915512,"TERMINAL",0,0,"64010209999999999995050505050505020205202025",,terminal_output +406,917458,"TERMINAL",0,0,"822251515151515151512121515122222222272247",,terminal_output +407,919507,"TERMINAL",0,0,"2044433333333333344444444494469",,terminal_output +408,921434,"TERMINAL",0,0,"2666555555555555666666666516685:01",,terminal_output +409,923445,"TERMINAL",0,0,"4888777777777777888888888388203",,terminal_output +410,925456,"TERMINAL",0,0,"65020309999999999996:006:006:006:007:007:007:0030305303025",,terminal_output +411,927492,"TERMINAL",0,0,"82221:011:011:011:011:019:014:018:0131313:011:0122222222272247",,terminal_output +412,929474,"TERMINAL",0,0,"3044433333333333344444444494469",,terminal_output +413,931484,"TERMINAL",0,0,"26665555555555556666666668:0166811",,terminal_output +414,933495,"TERMINAL",0,0,"4888777777777777888888888388303",,terminal_output +415,935566,"TERMINAL",0,0,"61:0030409999999999991010101010101040405404025",,terminal_output +416,937513,"TERMINAL",0,0,"822211111111111111114141111122222222272247",,terminal_output +417,939576,"TERMINAL",0,0,"4044433333333333344444444494469",,terminal_output +418,941533,"TERMINAL",0,0,"26665555555555556666666661166821",,terminal_output +419,943539,"TERMINAL",0,0,"4888777777777777888888888388403",,terminal_output +420,945564,"TERMINAL",0,0,"61040509999999999992020202020202050505505025",,terminal_output +421,947555,"TERMINAL",0,0,"822221212121212121215151212122222222272247",,terminal_output +422,949601,"TERMINAL",0,0,"5044433333333333344444444494469",,terminal_output +423,951575,"TERMINAL",0,0,"26665555555555556666666662166831",,terminal_output +424,953620,"TERMINAL",0,0,"4999888888888888999999999499514",,terminal_output +425,955593,"TERMINAL",0,0,"721519:0130303030303030309:001:003030313131313131313:016:0168:018:0136",,terminal_output +426,957609,"TERMINAL",0,0,"933322222222222233333333383358",,terminal_output +427,959613,"TERMINAL",0,0,"7:015554444444444445555555553055740",,terminal_output +428,961638,"TERMINAL",0,0,"377766666666666677777777727792",,terminal_output +429,963729,"TERMINAL",0,0,"59998888888888889999999994994:014",,terminal_output +430,965644,"TERMINAL",0,0,"7313:01114040404040404040101040404141414141414111116111136",,terminal_output +431,967657,"TERMINAL",0,0,"933322222222222233333333383358",,terminal_output +432,969664,"TERMINAL",0,0,"115554444444444445555555554055750",,terminal_output +433,971673,"TERMINAL",0,0,"377766666666666677777777727792",,terminal_output +434,973778,"TERMINAL",0,0,"5999888888888888999999999499114",,terminal_output +435,975689,"TERMINAL",0,0,"74111215050505050505050202050505151515151515121216212136",,terminal_output +436,977700,"TERMINAL",0,0,"933322222222222233333333383358",,terminal_output +437,979717,"TERMINAL",0,0,"21555444444444444555555555505576:00",,terminal_output +438,981766,"TERMINAL",0,0,"377766666666666677777777727792",,terminal_output +439,983735,"TERMINAL",0,0,"5999888888888888999999999499214",,terminal_output +440,985737,"TERMINAL",0,0,"75121312:002:002:002:002:0040:005:009:0030304:002:007:017:017:017:018:018:018:0131316313136",,terminal_output +441,987810,"TERMINAL",0,0,"933322222222222233333333383358",,terminal_output +442,989754,"TERMINAL",0,0,"315554444444444445555555559:0055710",,terminal_output +443,991759,"TERMINAL",0,0,"377766666666666677777777727792",,terminal_output +444,993770,"TERMINAL",0,0,"5999888888888888999999999499314",,terminal_output +445,995781,"TERMINAL",0,0,"72:0131411010101010101010404010101111111111111141416414136",,terminal_output +446,997790,"TERMINAL",0,0,"933322222222222233333333383358",,terminal_output +447,999797,"TERMINAL",0,0,"415554444444444445555555551055720",,terminal_output +448,1001835,"TERMINAL",0,0,"377766666666666677777777727792",,terminal_output +449,1003818,"TERMINAL",0,0,"5999888888888888999999999499414",,terminal_output +450,1005828,"TERMINAL",0,0,"71141512020202020202020505020202121212121212151516515136",,terminal_output +451,1007877,"TERMINAL",0,0,"933322222222222233333333383358",,terminal_output +452,1009925,"TERMINAL",0,0,"515554444444444445555555552055730",,terminal_output +453,1011855,"TERMINAL",0,0,"377766666666666677777777727792",,terminal_output +454,1013872,"TERMINAL",0,0,"5999888888888888999999999499514",,terminal_output +455,1015871,"TERMINAL",0,0,"7215150:01303030303030303050:002:003030313131313131314:017:0169:019:0136",,terminal_output +456,1017877,"TERMINAL",0,0,"933322222222222233333333383358",,terminal_output +457,1019890,"TERMINAL",0,0,"8:015554444444444445555555553055740",,terminal_output +458,1022007,"TERMINAL",0,0,"377766666666666677777777727792",,terminal_output +459,1023954,"TERMINAL",0,0,"59998888888888889999999994995:014",,terminal_output +460,1025917,"TERMINAL",0,0,"7314:01114040404040404040101040404141414141414111116111136",,terminal_output +461,1027948,"TERMINAL",0,0,"933322222222222233333333383358",,terminal_output +462,1029935,"TERMINAL",0,0,"115554444444444445555555554055750",,terminal_output +463,1031958,"TERMINAL",0,0,"377766666666666677777777727792",,terminal_output +464,1033951,"TERMINAL",0,0,"5999888888888888999999999499114",,terminal_output +465,1035962,"TERMINAL",0,0,"74111215050505050505050202050505151515151515121216212136",,terminal_output +466,1037983,"TERMINAL",0,0,"933322222222222233333333383358",,terminal_output +467,1040031,"TERMINAL",0,0,"\r21555444444444444555559532 alfred.ngu1:53:391:53:56 6:44:2510:3007:25529993 nishant.k2 3843T18:29:1801:48:31 16:49:50[002,007]30008 alfred.ng1 1622:58:503T22:58:56 19:39:250069451299919:13:019:13:04 23:25:17381:211:21 23:27:004",,terminal_output +468,1041989,"TERMINAL",0,0,"37776666666666667777777727792",,terminal_output +469,1043997,"TERMINAL",0,0,"599988888888888899999999499214",,terminal_output +470,1046073,"TERMINAL",0,0,"\r751213116 R2025-10-04T18:38:27hai001\r 30117 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:35:27 3:00 1-00:00:00 hai00183:00493:005203:00613:0029972 alfred.ngu6:35:347:5741:00130103 franz.sram4:55:0226:00524850:00417:57 50:30052:30609913:27 1:25:00580533:001813:15:314:50:26 3:48:01538:0148:0158:0176499:01199:0132 alfred.ngu1:53:391:53:56 6:44:3110:3007:31529993 nishant.k2 3843T18:29:1801:48:31 16:49:56[002,007]30008 alfred.ng1 1622:58:503T22:58:56 19:39:3100694311299919:13:019:13:04 23:25:23381:211:21 23:27:064",,terminal_output +471,1048070,"TERMINAL",0,0,"933322222222222223333333383358",,terminal_output +472,1050027,"TERMINAL",0,0,"3155544444444444445555555550:0055710",,terminal_output +473,1052076,"TERMINAL",0,0,"377766666666666667777777727792",,terminal_output +474,1054061,"TERMINAL",0,0,"5999888888888888899999999499314",,terminal_output +475,1056052,"TERMINAL",0,0,"73:0131411010101010101010104040101011111111111141416414136",,terminal_output +476,1058060,"TERMINAL",0,0,"933322222222222223333333383358",,terminal_output +477,1060069,"TERMINAL",0,0,"415554444444444444555555551055720",,terminal_output +478,1062082,"TERMINAL",0,0,"377766666666666667777777727792",,terminal_output +479,1064089,"TERMINAL",0,0,"5999888888888888899999999499414",,terminal_output +480,1066098,"TERMINAL",0,0,"71141512020202020202020205050202021212121212151516515136",,terminal_output +481,1068105,"TERMINAL",0,0,"933322222222222223333333383358",,terminal_output +482,1070113,"TERMINAL",0,0,"515554444444444444555555552055730",,terminal_output +483,1072122,"TERMINAL",0,0,"377766666666666667777777727792",,terminal_output +484,1072246,"TERMINAL",0,0,"[?1049l\r[?1l>]0;franz.srambical@hai-login2:~/jafar",,terminal_output +485,1076767,"TERMINAL",0,0,"watch squeue",,terminal_command +486,1076824,"TERMINAL",0,0,"]633;C[?1049h(B[?7hEvery 2.0s: squeuehai-login2.haicore.berlin: Sat Oct 4 18:38:58 2025JOBIDUSER PARTITION NODES CPUS STSUBMIT_TIMESTART_TIME TIME TIME_LIMIT NODELIST(REASON)30027 xiao.liu interacti 1 64 PD 2025-10-04T05:54:07 2025-10-04T19:13:04 0:00 23:59:00 (Resources)30116 emmanouil. interacti 1 104 PD 2025-10-04T18:24:08N/A 0:00\t 30:00 (Priority)30115 xiao.liu interacti 1 64 PD 2025-10-04T18:22:16N/A 0:00 23:59:00 (Priority)30111 xiao.liu interacti 1 32 PD 2025-10-04T16:57:05N/A 0:00 23:59:00 (Priority)30110 emmanouil. interacti 1 104 PD 2025-10-04T16:31:13N/A 0:00\t 30:00 (Priority)30028 xiao.liu interacti 1 128 R 2025-10-04T05:55:36 2025-10-04T05:55:36 12:43:22 23:59:00 hai00630026 xiao.liu interacti 1 128 R 2025-10-04T01:48:32 2025-10-04T05:54:06 12:44:52 23:59:00 hai00830025 xiao.liu interacti 1 64 R 2025-10-04T01:47:27 2025-10-04T01:47:56 16:51:02 23:59:00 hai00430126 franz.sram standard 1 8 PD 2025-10-04T18:35:15N/A 0:00 1-00:00:00 (Priority)30125 franz.sram standard 1 8 PD 2025-10-04T18:35:15N/A 0:00 1-00:00:00 (Priority)30124 franz.sram standard 1 8 PD 2025-10-04T18:35:15N/A 0:00 1-00:00:00 (Priority)30123 franz.sram standard 1 8 PD 2025-10-04T18:35:15N/A 0:00 1-00:00:00 (Priority)30122 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:38:27 0:31 1-00:00:00 hai00130117 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:35:27 3:31 1-00:00:00 hai00130118 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:35:27 3:31 1-00:00:00 hai00430119 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:35:27 3:31 1-00:00:00 hai00530120 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:35:27 3:31 1-00:00:00 hai00630121 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:35:27 3:31 1-00:00:00 hai00629972 alfred.ngu standard 1 16 R 2025-10-04T16:35:34 2025-10-04T17:57:27 41:31 1-00:00:00 hai00130103 franz.sram standard 1 16 R 2025-10-04T14:55:02 2025-10-04T17:52:27 46:31 1-00:00:00 hai00530102 franz.sram standard 1 16 R 2025-10-04T14:55:02 2025-10-04T17:48:27 50:31 1-00:00:00 hai00430101 franz.sram standard 1 16 R 2025-10-04T14:55:02 2025-10-04T17:47:57 51:01 1-00:00:00 hai00430100 franz.sram standard 1 16 R 2025-10-04T14:55:02 2025-10-04T17:45:57 53:01 1-00:00:00 hai00630099 franz.sram standard 1 16 R 2025-10-04T14:55:02 2025-10-04T17:13:27 1:25:31 1-00:00:00 hai00530098 franz.sram standard 1 16 R 2025-10-04T14:55:02 2025-10-04T17:05:27 1:33:31 1-00:00:00 hai00130081 franz.sram standard 1 16 R 2025-10-04T13:15:31 2025-10-04T14:50:26 3:48:32 1-00:00:00 hai00530083 franz.sram standard 1 16 R 2025-10-04T13:15:31 2025-10-04T14:50:26 3:48:32 1-00:00:00 hai00530084 franz.sram standard 1 16 R 2025-10-04T13:15:31 2025-10-04T14:50:26 3:48:32 1-00:00:00 hai00530085 franz.sram standard 1 16 R 2025-10-04T13:15:31 2025-10-04T14:50:26 3:48:32 1-00:00:00 hai00530076 franz.sram standard 1 16 R 2025-10-04T13:15:31 2025-10-04T14:49:26 3:49:32 1-00:00:00 hai00130079 franz.sram standard 1 16 R 2025-10-04T13:15:31 2025-10-04T14:49:26 3:49:32 1-00:00:00 hai00130032 alfred.ngu standard 1 16 R 2025-10-04T11:53:39 2025-10-04T11:53:56 6:45:02 1-00:00:00 hai00130031 alfred.ngu standard 1 16 R 2025-10-04T11:50:30 2025-10-04T11:50:56 6:48:02 1-00:00:00 hai00529993 nishant.ku standard 2 384 R 2025-10-03T18:29:18 2025-10-04T01:48:31 16:50:27 1-00:00:00 hai[002,007]30008 alfred.ngu standard 1 16 R 2025-10-03T22:58:50 2025-10-03T22:58:56 19:40:02 1-00:00:00 hai00630009 alfred.ngu standard 1 16 R 2025-10-03T22:58:54 2025-10-03T22:58:56 19:40:02 1-00:00:00 hai00129999 alfred.ngu standard 1 16 R 2025-10-03T19:13:04 2025-10-03T19:13:04 23:25:54 1-00:00:00 hai00329998 alfred.ngu standard 1 16 R 2025-10-03T19:11:21 2025-10-03T19:11:21 23:27:37 1-00:00:00 hai004",,terminal_output +487,1078832,"TERMINAL",0,0,"9:0044433333333333334444444494469",,terminal_output +488,1080844,"TERMINAL",0,0,"26665555555555555666666663166841",,terminal_output +489,1082852,"TERMINAL",0,0,"48887777777777777888888883886:003",,terminal_output +490,1084863,"TERMINAL",0,0,"6305:0010999999999999940404040404010105101025",,terminal_output +491,1086931,"TERMINAL",0,0,"8222414141414141414141111141412222222272247",,terminal_output +492,1088880,"TERMINAL",0,0,"10444Nodes required for job are DOWN, DRAINED or reserved for jobs in higher priority partitions)2280:4373183419350330121 franz.sram8:35:158:35 3:43629972 alfred.ngu6:35:3471:43135246:43528:2\t0:43171:13410045:57 53:13691325:435984:55:027:05:27 1:33:43114344485508:4456479 franz.sram3:15:314:49:26 3:49:4423:3935:14130031 alfred.ng1 164T11:50:3011:50:56 6:48:1400529993 nishant.k2 38418:29:184T01:48:31 16:50:39[002,007]8046300022:58:522:58:56 19:40:1193:043:04 23:26:06329998 alfred.ngu standard 1 16 R 2025-10-03T19:11:21 2025-10-03T19:11:21 23:27:49 1-00:00:00 hai004",,terminal_output +493,1090890,"TERMINAL",0,0,"26665555555555555666666664166851",,terminal_output +494,1092895,"TERMINAL",0,0,"4888777777777777788888888388103",,terminal_output +495,1094906,"TERMINAL",0,0,"6401020999999999999950505050505020205202025",,terminal_output +496,1096967,"TERMINAL",0,0,"8222515151515151515151212151512222222272247",,terminal_output +497,1098926,"TERMINAL",0,0,"2044433333333333334444444494469",,terminal_output +498,1100937,"TERMINAL",0,0,"2666555555555555566666666516688:01",,terminal_output +499,1102947,"TERMINAL",0,0,"4888777777777777788888888388203",,terminal_output +500,1105060,"TERMINAL",0,0,"650203099999999999999:009:009:009:0050:0050:0030305303025",,terminal_output +501,1107001,"TERMINAL",0,0,"82221:014:014:014:014:014:012:017:011:0131316:014:012222222272247",,terminal_output +502,1109028,"TERMINAL",0,0,"3044433333333333334444444494469",,terminal_output +503,1110992,"TERMINAL",0,0,"26665555555555555666666661:0166811",,terminal_output +504,1113044,"TERMINAL",0,0,"4888777777777777788888888388303",,terminal_output +505,1115019,"TERMINAL",0,0,"64:003040999999999999910101010101040405404025",,terminal_output +506,1117010,"TERMINAL",0,0,"8222111111111111111111414111112222222272247",,terminal_output +507,1119083,"TERMINAL",0,0,"4044433333333333334444444494469",,terminal_output +508,1121035,"TERMINAL",0,0,"26665555555555555666666661166821",,terminal_output +509,1123034,"TERMINAL",0,0,"4888777777777777788888888388403",,terminal_output +510,1125125,"TERMINAL",0,0,"6104050999999999999920202020202050505505025",,terminal_output +511,1127072,"TERMINAL",0,0,"8222212121212121212121515121212222222272247",,terminal_output +512,1129118,"TERMINAL",0,0,"5044433333333333334444444494469",,terminal_output +513,1131068,"TERMINAL",0,0,"26665555555555555666666662166831",,terminal_output +514,1133077,"TERMINAL",0,0,"4888777777777777788888888388503",,terminal_output +515,1135159,"TERMINAL",0,0,"620502:0099999999999993030303030306:009:0051:001:0025",,terminal_output +516,1137096,"TERMINAL",0,0,"82223131313131313131312:014:0131312222222272247",,terminal_output +517,1139102,"TERMINAL",0,0,"40:0044433333333333334444444494469",,terminal_output +518,1141203,"TERMINAL",0,0,"26665555555555555666666663166841",,terminal_output +519,1143120,"TERMINAL",0,0,"48887777777777777888888883887:003",,terminal_output +520,1145149,"TERMINAL",0,0,"6306:0010999999999999940404040404010105101025",,terminal_output +521,1147152,"TERMINAL",0,0,"8222414141414141414141111141412222222272247",,terminal_output +522,1149148,"TERMINAL",0,0,"1044433333333333334444444494469",,terminal_output +523,1151155,"TERMINAL",0,0,"26665555555555555666666664166851",,terminal_output +524,1153163,"TERMINAL",0,0,"4888777777777777788888888388103",,terminal_output +525,1155234,"TERMINAL",0,0,"6401020999999999999950505050505020205202025",,terminal_output +526,1157185,"TERMINAL",0,0,"8222515151515151515151212151512222222272247",,terminal_output +527,1159192,"TERMINAL",0,0,"2044433333333333334444444494469",,terminal_output +528,1161202,"TERMINAL",0,0,"2666555555555555566666666516689:01",,terminal_output +529,1163209,"TERMINAL",0,0,"4888777777777777788888888388203",,terminal_output +530,1165216,"TERMINAL",0,0,"6502030999999999999950:0050:0050:0050:001:001:0030305303025",,terminal_output +531,1167226,"TERMINAL",0,0,"82222:015:015:015:015:015:013:018:012:0131317:015:012222222272247",,terminal_output +532,1169244,"TERMINAL",0,0,"3044433333333333334444444494469",,terminal_output +533,1171245,"TERMINAL",0,0,"26665555555555555666666662:0166811",,terminal_output +534,1173253,"TERMINAL",0,0,"4888777777777777788888888388303",,terminal_output +535,1175265,"TERMINAL",0,0,"65:003040999999999999910101010101040405404025",,terminal_output +536,1177277,"TERMINAL",0,0,"8222111111111111111111414111112222222272247",,terminal_output +537,1179287,"TERMINAL",0,0,"4044433333333333334444444494469",,terminal_output +538,1181292,"TERMINAL",0,0,"26665555555555555666666661166821",,terminal_output +539,1183302,"TERMINAL",0,0,"4888777777777777788888888388403",,terminal_output +540,1185309,"TERMINAL",0,0,"6104050999999999999920202020202050505505025",,terminal_output +541,1187320,"TERMINAL",0,0,"8222212121212121212121515121212222222272247",,terminal_output +542,1189330,"TERMINAL",0,0,"5044433333333333334444444494469",,terminal_output +543,1191336,"TERMINAL",0,0,"26665555555555555666666662166831",,terminal_output +544,1193349,"TERMINAL",0,0,"4888777777777777788888888388503",,terminal_output +545,1195357,"TERMINAL",0,0,"620503:0099999999999993030303030307:0050:0052:002:0025",,terminal_output +546,1197364,"TERMINAL",0,0,"82223131313131313131313:015:0131312222222272247",,terminal_output +547,1199374,"TERMINAL",0,0,"1:0044433333333333334444444494469",,terminal_output +548,1201382,"TERMINAL",0,0,"26665555555555555666666663166841",,terminal_output +549,1203394,"TERMINAL",0,0,"48887777777777777888888883888:003",,terminal_output +550,1205402,"TERMINAL",0,0,"6307:0010999999999999940404040404010105101025",,terminal_output +551,1207457,"TERMINAL",0,0,"8222414141414141414141111141412222222272247",,terminal_output +552,1209421,"TERMINAL",0,0,"1044433333333333334444444494469",,terminal_output +553,1211430,"TERMINAL",0,0,"26665555555555555666666664166851",,terminal_output +554,1213438,"TERMINAL",0,0,"4888777777777777788888888388103",,terminal_output +555,1215447,"TERMINAL",0,0,"6401020999999999999950505050505020205202025",,terminal_output +556,1217459,"TERMINAL",0,0,"8222515151515151515151212151512222222272247",,terminal_output +557,1219465,"TERMINAL",0,0,"2044433333333333334444444494469",,terminal_output +558,1221480,"TERMINAL",0,0,"26665555555555555666666665166830:01",,terminal_output +559,1223491,"TERMINAL",0,0,"4888777777777777788888888388203",,terminal_output +560,1225503,"TERMINAL",0,0,"650203099999999999991:001:001:001:002:002:0030305303025",,terminal_output +561,1227507,"TERMINAL",0,0,"82223:016:016:016:016:016:014:019:013:0131318:016:012222222272247",,terminal_output +562,1229576,"TERMINAL",0,0,"3044433333333333334444444494469",,terminal_output +563,1231526,"TERMINAL",0,0,"26665555555555555666666663:0166811",,terminal_output +564,1233537,"TERMINAL",0,0,"4888777777777777788888888388303",,terminal_output +565,1235547,"TERMINAL",0,0,"66:003040999999999999910101010101040405404025",,terminal_output +566,1237551,"TERMINAL",0,0,"8222111111111111111111414111112222222272247",,terminal_output +567,1239561,"TERMINAL",0,0,"4044433333333333334444444494469",,terminal_output +568,1241571,"TERMINAL",0,0,"26665555555555555666666661166821",,terminal_output +569,1243584,"TERMINAL",0,0,"4888777777777777788888888388403",,terminal_output +570,1245588,"TERMINAL",0,0,"61141512020202020202020205050202021212121212151516515136",,terminal_output +571,1247699,"TERMINAL",0,0,"933322222222222223333333383358",,terminal_output +572,1249609,"TERMINAL",0,0,"515554444444444444555555552055730",,terminal_output +573,1251692,"TERMINAL",0,0,"377766666666666667777777727792",,terminal_output +574,1253631,"TERMINAL",0,0,"5999888888888888899999999499514",,terminal_output +575,1255637,"TERMINAL",0,0,"721514:013030303030303030304:006:0030303131313131318:011:0163:013:0136",,terminal_output +576,1257645,"TERMINAL",0,0,"933322222222222223333333383358",,terminal_output +577,1259656,"TERMINAL",0,0,"2:015554444444444444555555553055740",,terminal_output +578,1261731,"TERMINAL",0,0,"377766666666666667777777727792",,terminal_output +579,1263674,"TERMINAL",0,0,"59998888888888888999999994999:014",,terminal_output +580,1265689,"TERMINAL",0,0,"7318:01114040404040404040401010404041414141414111116111136",,terminal_output +581,1267699,"TERMINAL",0,0,"933322222222222223333333383358",,terminal_output +582,1269706,"TERMINAL",0,0,"115554444444444444555555554055750",,terminal_output +583,1271714,"TERMINAL",0,0,"377766666666666667777777727792",,terminal_output +584,1273724,"TERMINAL",0,0,"5999888888888888899999999499114",,terminal_output +585,1275735,"TERMINAL",0,0,"74111215050505050505050502020505051515151515121216212136",,terminal_output +586,1277743,"TERMINAL",0,0,"\r9333222222222222233339332 alfred.ngu1:53:391:53:56 6:48:2310:30051:23529993 nishant.k2 3843T18:29:1801:48:31 16:53:48[002,007]30008 alfred.ng1 1622:58:503T22:58:56 19:43:230069431299919:13:019:13:04 23:29:15381:211:21 23:30:584",,terminal_output +587,1279853,"TERMINAL",0,0,"2155544444444444445555555505571:00",,terminal_output +588,1281759,"TERMINAL",0,0,"37776666666666666777777727792",,terminal_output +589,1283768,"TERMINAL",0,0,"599988888888888889999999499214",,terminal_output +590,1285781,"TERMINAL",0,0,"\r751213116 R2025-10-04T18:42:27hai001\r 30122 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:38:27 4:00 1-00:00:00 hai0011757:0087:00497:005207:00617:0029972 alfred.ngu6:35:347:5745:00130103 franz.sram4:55:02250:00524854:00417:5\t4:30056:30609913:27 1:29:00580537:001813:15:314:50:26 3:52:01532:0142:0152:0179493:01132 alfred.ngu1:53:391:53:56 6:48:3110:30051:31529993 nishant.k2 3843T18:29:1801:48:31 16:53:56[002,007]30008 alfred.ng1 1622:58:503T22:58:56 19:43:3100694311299919:13:019:13:04 23:29:23381:211:21 23:31:064",,terminal_output +591,1287787,"TERMINAL",0,0,"933322222222222222333333383358",,terminal_output +592,1289798,"TERMINAL",0,0,"315554444444444444455555554:0055710",,terminal_output +593,1291806,"TERMINAL",0,0,"377766666666666666777777727792",,terminal_output +594,1293815,"TERMINAL",0,0,"5999888888888888889999999499314",,terminal_output +595,1295826,"TERMINAL",0,0,"77:0131411010101010101010101040401010111111111141416414136",,terminal_output +596,1297836,"TERMINAL",0,0,"933322222222222222333333383358",,terminal_output +597,1299846,"TERMINAL",0,0,"415554444444444444455555551055720",,terminal_output +598,1301855,"TERMINAL",0,0,"377766666666666666777777727792",,terminal_output +599,1303862,"TERMINAL",0,0,"5999888888888888889999999499414",,terminal_output +600,1305873,"TERMINAL",0,0,"71141512020202020202020202050502020212121212151516515136",,terminal_output +601,1307881,"TERMINAL",0,0,"933322222222222222333333383358",,terminal_output +602,1309888,"TERMINAL",0,0,"515554444444444444455555552055730",,terminal_output +603,1311895,"TERMINAL",0,0,"377766666666666666777777727792",,terminal_output +604,1313904,"TERMINAL",0,0,"5999888888888888889999999499514",,terminal_output +605,1315916,"TERMINAL",0,0,"721515:01303030303030303030305:007:00303031313131319:012:0164:014:0136",,terminal_output +606,1317924,"TERMINAL",0,0,"933322222222222222333333383358",,terminal_output +607,1319997,"TERMINAL",0,0,"3:015554444444444444455555553055740",,terminal_output +608,1322039,"TERMINAL",0,0,"377766666666666666777777727792",,terminal_output +609,1323989,"TERMINAL",0,0,"599988888888888888999999949930:014",,terminal_output +610,1326009,"TERMINAL",0,0,"7319:01114040404040404040404010104040414141414111116111136",,terminal_output +611,1327984,"TERMINAL",0,0,"933322222222222222333333383358",,terminal_output +612,1329981,"TERMINAL",0,0,"11555Nodes required for job are DOWN, DRAINED or reserved for jobs in higher priority partitions)3420:442284:4474184419450430121 franz.sram8:35:158:35 7:44629972 alfred.ngu6:35:34745:4413520:44528:2\t4:44175:14410045:57 57:14691329:445984:55:027:05:27 1:37:44115354585502:45579 franz.sram3:15:314:49:26 3:53:4523:39349:15130031 alfred.ng1 164T11:50:3011:50:56 6:52:1500529993 nishant.k2 38418:29:184T01:48:31 16:54:40[002,007]8056300022:58:522:58:56 19:44:1193:043:04 23:30:07329998 alfred.ngu standard 1 16 R 2025-10-03T19:11:21 2025-10-03T19:11:21 23:31:50 1-00:00:00 hai004",,terminal_output +613,1331991,"TERMINAL",0,0,"377766666666666666777777727792",,terminal_output +614,1334023,"TERMINAL",0,0,"5999888888888888889999999499114",,terminal_output +615,1336008,"TERMINAL",0,0,"74111215050505050505050505020205050515151515121216212136",,terminal_output +616,1338015,"TERMINAL",0,0,"933322222222222222333333383358",,terminal_output +617,1340064,"TERMINAL",0,0,"21555444444444444445555555505572:00",,terminal_output +618,1342035,"TERMINAL",0,0,"\r37776666666666666813:15:314:50:26 3:52:57537475779493:57132 alfred.ngu1:53:391:53:56 6:49:2710:30052:27529993 nishant.k2 3843T18:29:1801:48:31 16:54:52[002,007]30008 alfred.ng1 1622:58:503T22:58:56 19:44:270069471299919:13:019:13:04 23:30:19381:211:21 23:32:024",,terminal_output +619,1344049,"TERMINAL",0,0,"599988888888888889999999499214",,terminal_output +620,1346109,"TERMINAL",0,0,"\r751213116 R2025-10-04T18:43:27hai001\r 30123 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:42:27 1:00 1-00:00:00 hai0012385:001758:0088:00498:005208:00618:0029972 alfred.ngu6:35:347:5746:00130103 franz.sram4:55:02251:0052485:00417:5\t5:30057:30609913:27 1:30:005813:15:314:50:26 3:53:0133:0143:0153:0179494:01132 alfred.ngu1:53:391:53:56 6:49:3110:30052:31529993 nishant.k2 3843T18:29:1801:48:31 16:54:56[002,007]30008 alfred.ng1 1622:58:503T22:58:56 19:44:3100694311299919:13:019:13:04 23:30:23381:211:21 23:32:064",,terminal_output +621,1348069,"TERMINAL",0,0,"933322222222222222333333383358",,terminal_output +622,1350100,"TERMINAL",0,0,"315554444444444444455555555:0055710",,terminal_output +623,1352086,"TERMINAL",0,0,"377766666666666666777777727792",,terminal_output +624,1354094,"TERMINAL",0,0,"5999888888888888889999999499314",,terminal_output +625,1356103,"TERMINAL",0,0,"78:0131411010101010101010101010404010111111111141416414136",,terminal_output +626,1358116,"TERMINAL",0,0,"933322222222222222333333383358",,terminal_output +627,1360122,"TERMINAL",0,0,"415554444444444444455555551055720",,terminal_output +628,1362134,"TERMINAL",0,0,"377766666666666666777777727792",,terminal_output +629,1364229,"TERMINAL",0,0,"5999888888888888889999999499414",,terminal_output +630,1366148,"TERMINAL",0,0,"71141512020202020202020202020505020212121212151516515136",,terminal_output +631,1368158,"TERMINAL",0,0,"933322222222222222333333383358",,terminal_output +632,1370169,"TERMINAL",0,0,"515554444444444444455555552055730",,terminal_output +633,1372177,"TERMINAL",0,0,"377766666666666666777777727792",,terminal_output +634,1374185,"TERMINAL",0,0,"5999888888888888889999999499514",,terminal_output +635,1376211,"TERMINAL",0,0,"721516:0130303030303030303030306:008:0030313131313150:013:0165:015:0136",,terminal_output +636,1378205,"TERMINAL",0,0,"933322222222222222333333383358",,terminal_output +637,1380214,"TERMINAL",0,0,"4:015554444444444444455555553055740",,terminal_output +638,1382225,"TERMINAL",0,0,"3777Nodes required for job are DOWN, DRAINED or reserved for jobs in higher priority partitions)430:363421:362285:3676186419650630121 franz.sram8:35:158:35 8:36629972 alfred.ngu6:35:34746:3613521:36528:2\t5:36176:06410045:57 58:066994:55:027:13:27 1:30:3617374785503:3757727792",,terminal_output +639,1384232,"TERMINAL",0,0,"5999888888888888889999994991:014",,terminal_output +640,1386243,"TERMINAL",0,0,"73150:011140404040404040404040401010404141414111116111136",,terminal_output +641,1388253,"TERMINAL",0,0,"93332222222222222233333383358",,terminal_output +642,1390260,"TERMINAL",0,0,"11555444444444444445555554055750",,terminal_output +643,1392265,"TERMINAL",0,0,"37776666666666666677777727792",,terminal_output +644,1394276,"TERMINAL",0,0,"599988888888888888999999499114",,terminal_output +645,1396285,"TERMINAL",0,0,"741112150505050505050505050502020505151515121216212136",,terminal_output +646,1398294,"TERMINAL",0,0,"93332222222222222233333383358",,terminal_output +647,1400379,"TERMINAL",0,0,"2155544444444444444555555505573:00",,terminal_output +648,1402309,"TERMINAL",0,0,"37776666666666666677777727792",,terminal_output +649,1404321,"TERMINAL",0,0,"599988888888888888999999499214",,terminal_output +650,1406420,"TERMINAL",0,0,"\r751213116 R2025-10-04T18:44:27hai001\r 30124 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:43:27 1:00 1-00:00:00 hai001322:002386:001759:0089:00499:005209:00619:0029972 alfred.ngu6:35:347:5747:00130103 franz.sram4:55:02252:0052486:00417:5\t6:30058:30609913:27 1:31:005813:15:314:50:26 3:54:0134:0144:0154:0132 alfred.ngu1:53:391:53:56 6:50:31110:3003:31529993 nishant.k2 3843T18:29:1801:48:31 16:55:56[002,007]30008 alfred.ng1 1622:58:503T22:58:56 19:45:3100694311299919:13:019:13:04 23:31:23381:211:21 23:33:064",,terminal_output +651,1408337,"TERMINAL",0,0,"933322222222222222233333383358",,terminal_output +652,1410415,"TERMINAL",0,0,"315554444444444444445555556:0055710",,terminal_output +653,1412359,"TERMINAL",0,0,"377766666666666666677777727792",,terminal_output +654,1414406,"TERMINAL",0,0,"5999888888888888888999999499314",,terminal_output +655,1416456,"TERMINAL",0,0,"79:0131411010101010101010101010104040101111111141416414136",,terminal_output +656,1418383,"TERMINAL",0,0,"\r933322222222222222233333383358",,terminal_output +657,1420434,"TERMINAL",0,0,"415554444444444444445555551055720",,terminal_output +658,1422401,"TERMINAL",0,0,"377766666666666666677777727792",,terminal_output +659,1424441,"TERMINAL",0,0,"5999888888888888888999999499414",,terminal_output +660,1426420,"TERMINAL",0,0,"71141512020202020202020202020205050202121212151516515136",,terminal_output +661,1428428,"TERMINAL",0,0,"933322222222222222233333383358",,terminal_output +662,1430484,"TERMINAL",0,0,"515554444444444444445555552055730",,terminal_output +663,1432446,"TERMINAL",0,0,"377766666666666666677777727792",,terminal_output +664,1434454,"TERMINAL",0,0,"5999888888888888888999999499514",,terminal_output +665,1436467,"TERMINAL",0,0,"721517:013030303030303030303030307:009:0030313131311:014:0166:016:0136",,terminal_output +666,1438474,"TERMINAL",0,0,"933322222222222222233333383358",,terminal_output +667,1440482,"TERMINAL",0,0,"5:015554444444444444445555553055740",,terminal_output +668,1442489,"TERMINAL",0,0,"377766666666666666677777727792",,terminal_output +669,1444502,"TERMINAL",0,0,"59998888888888888889999994992:014",,terminal_output +670,1446511,"TERMINAL",0,0,"7311:01114040404040404040404040401010404141414111116111136",,terminal_output +671,1448519,"TERMINAL",0,0,"933322222222222222233333383358",,terminal_output +672,1450609,"TERMINAL",0,0,"11555Nodes required for job are DOWN, DRAINED or reserved for jobs in higher priority partitions)540:44431:443422:442286:4474184419450430121 franz.sram8:35:158:35 9:44629972 alfred.ngu6:35:34747:4413522:44528:2\t6:44177:14410045:57 59:146994:55:027:13:27 1:31:4415354585 franz.sram3:15:314:50:26 3:54:45523:3931:15130031 alfred.ng1 164T11:50:3011:50:56 6:54:1500529993 nishant.k2 38418:29:184T01:48:31 16:56:40[002,007]8056300022:58:522:58:56 19:46:1193:043:04 23:32:07329998 alfred.ngu standard 1 16 R 2025-10-03T19:11:21 2025-10-03T19:11:21 23:33:50 1-00:00:00 hai004",,terminal_output +673,1452536,"TERMINAL",0,0,"377766666666666666677777727792",,terminal_output +674,1454545,"TERMINAL",0,0,"5999888888888888888999999499114",,terminal_output +675,1456557,"TERMINAL",0,0,"74111215050505050505050505050502020505151515121216212136",,terminal_output +676,1457491,"slurm/dev/franz/berlin/atari/dynamics.sh",0,0,"",shellscript,tab +677,1458565,"TERMINAL",0,0,"933322222222222222233333383358",,terminal_output +678,1459321,"slurm/dev/franz/berlin/atari/dynamics.sh",2717,0,"",shellscript,selection_keyboard +679,1459664,"slurm/dev/franz/berlin/atari/dynamics.sh",1145,0,"",shellscript,selection_keyboard +680,1459845,"slurm/dev/franz/berlin/atari/dynamics.sh",0,0,"",shellscript,selection_keyboard +681,1460575,"TERMINAL",0,0,"21555444444444444444555555505574:00",,terminal_output +682,1460824,"slurm/dev/franz/berlin/atari/dynamics.sh",1406,0,"",shellscript,selection_keyboard +683,1460996,"slurm/dev/franz/berlin/atari/dynamics.sh",2717,0,"",shellscript,selection_keyboard +684,1462583,"TERMINAL",0,0,"377766666666666666677777727792",,terminal_output +685,1464597,"TERMINAL",0,0,"65020309999999999999995:005:005:005:00303053030225",,terminal_output +686,1466601,"TERMINAL",0,0,"82221:012:013:017:0110:0110:0110:0110:0110:018:013:017:0131312:0122222272247",,terminal_output +687,1468612,"TERMINAL",0,0,"3044433333333333333344444494469",,terminal_output +688,1470623,"TERMINAL",0,0,"26665555555555555556666667:0166811",,terminal_output +689,1472630,"TERMINAL",0,0,"4888777777777777777888888388303",,terminal_output +690,1474639,"TERMINAL",0,0,"650:0030409999999999999991010101040405404025",,terminal_output +691,1476655,"TERMINAL",0,0,"822211111111111111111111111141411122222272247",,terminal_output +692,1478663,"TERMINAL",0,0,"4044433333333333333344444494469",,terminal_output +693,1480671,"TERMINAL",0,0,"26665555555555555556666661166821",,terminal_output +694,1482705,"TERMINAL",0,0,"4888777777777777777888888388403",,terminal_output +695,1484754,"TERMINAL",0,0,"61040509999999999999992020202050505505025",,terminal_output +696,1486805,"TERMINAL",0,0,"822221212121212121212121212151512122222272247",,terminal_output +697,1488748,"TERMINAL",0,0,"5044433333333333333344444494469",,terminal_output +698,1490717,"TERMINAL",0,0,"26665555555555555556666662166831",,terminal_output +699,1492729,"TERMINAL",0,0,"4888777777777777777888888388503",,terminal_output +700,1494738,"TERMINAL",0,0,"620508:00999999999999999303030302:005:0057:007:0025",,terminal_output +701,1496747,"TERMINAL",0,0,"82223131313131313131313131318:011:00:013122222272247",,terminal_output +702,1498756,"TERMINAL",0,0,"6:0044433333333333333344444494469",,terminal_output +703,1500766,"TERMINAL",0,0,"26665555555555555556666663166841",,terminal_output +704,1502774,"TERMINAL",0,0,"48887777777777777778888883883:003",,terminal_output +705,1504824,"TERMINAL",0,0,"6302:00109999999999999994040404010105101025",,terminal_output +706,1506873,"TERMINAL",0,0,"822241414141414141414141414111114122222272247",,terminal_output +707,1508821,"TERMINAL",0,0,"1044433333333333333344444494469",,terminal_output +708,1510812,"TERMINAL",0,0,"26665555555555555556666664166851",,terminal_output +709,1512914,"TERMINAL",0,0,"4888777777777777777888888388103",,terminal_output +710,1514859,"TERMINAL",0,0,"64010209999999999999995050505020205202025",,terminal_output +711,1516908,"TERMINAL",0,0,"822251515151515151515151515121215122222272247",,terminal_output +712,1518957,"TERMINAL",0,0,"2044433333333333333344444494469",,terminal_output +713,1520859,"TERMINAL",0,0,"2666555555555555555666666516685:01",,terminal_output +714,1522954,"TERMINAL",0,0,"4888777777777777777888888388203",,terminal_output +715,1524884,"TERMINAL",0,0,"65020309999999999999996:006:006:006:0030305303025",,terminal_output +716,1526890,"TERMINAL",0,0,"82222:013:014:018:011:011:011:011:011:019:014:018:0131313:0122222272247",,terminal_output +717,1528994,"TERMINAL",0,0,"3044433333333333333344444494469",,terminal_output +718,1530912,"TERMINAL",0,0,"26665555555555555556666668:0166811",,terminal_output +719,1532985,"TERMINAL",0,0,"4888777777777777777888888388303",,terminal_output +720,1535035,"TERMINAL",0,0,"61:0030409999999999999991010101040405404025",,terminal_output +721,1536939,"TERMINAL",0,0,"822211111111111111111111111141411122222272247",,terminal_output +722,1538949,"TERMINAL",0,0,"4044433333333333333344444494469",,terminal_output +723,1540970,"TERMINAL",0,0,"26665555555555555556666661166821",,terminal_output +724,1542969,"TERMINAL",0,0,"4888777777777777777888888388403",,terminal_output +725,1544976,"TERMINAL",0,0,"61040509999999999999992020202050505505025",,terminal_output +726,1547007,"TERMINAL",0,0,"822221212121212121212121212151512122222272247",,terminal_output +727,1549060,"TERMINAL",0,0,"5044433333333333333344444494469",,terminal_output +728,1551110,"TERMINAL",0,0,"26665555555555555556666662166831",,terminal_output +729,1553012,"TERMINAL",0,0,"4888777777777777777888888388503",,terminal_output +730,1555023,"TERMINAL",0,0,"620509:00999999999999999303030303:006:0058:008:0025",,terminal_output +731,1557032,"TERMINAL",0,0,"82223131313131313131313131319:011:013122222272247",,terminal_output +732,1559039,"TERMINAL",0,0,"7:0044433333333333333344444494469",,terminal_output +733,1561047,"TERMINAL",0,0,"26665555555555555556666663166841",,terminal_output +734,1563058,"TERMINAL",0,0,"48887777777777777778888883884:003",,terminal_output +735,1565139,"TERMINAL",0,0,"6303:00109999999999999994040404010105101025",,terminal_output +736,1567083,"TERMINAL",0,0,"822241414141414141414141414111114122222272247",,terminal_output +737,1569134,"TERMINAL",0,0,"1044433333333333333344444494469",,terminal_output +738,1571101,"TERMINAL",0,0,"26665555555555555556666664166851",,terminal_output +739,1573113,"TERMINAL",0,0,"4888777777777777777888888388103",,terminal_output +740,1575120,"TERMINAL",0,0,"64010209999999999999995050505020205202025",,terminal_output +741,1577222,"TERMINAL",0,0,"822251515151515151515151515121215122222272247",,terminal_output +742,1579170,"TERMINAL",0,0,"2044433333333333333344444494469",,terminal_output +743,1581217,"TERMINAL",0,0,"2666555555555555555666666516686:01",,terminal_output +744,1583157,"TERMINAL",0,0,"4888777777777777777888888388203",,terminal_output +745,1585209,"TERMINAL",0,0,"65020309999999999999997:007:007:007:0030305303025",,terminal_output +746,1587178,"TERMINAL",0,0,"82223:014:015:019:012:012:012:012:012:0150:015:019:0131314:0122222272247",,terminal_output +747,1589202,"TERMINAL",0,0,"3044433333333333333344444494469",,terminal_output +748,1591194,"TERMINAL",0,0,"26665555555555555556666669:0166811",,terminal_output +749,1593206,"TERMINAL",0,0,"4888777777777777777888888388303",,terminal_output +750,1595212,"TERMINAL",0,0,"62:0030409999999999999991010101040405404025",,terminal_output +751,1597222,"TERMINAL",0,0,"822211111111111111111111111141411122222272247",,terminal_output +752,1599345,"TERMINAL",0,0,"4044433333333333333344444494469",,terminal_output +753,1601284,"TERMINAL",0,0,"26665555555555555556666661166821",,terminal_output +754,1603250,"TERMINAL",0,0,"4888777777777777777888888388403",,terminal_output +755,1605259,"TERMINAL",0,0,"61040509999999999999992020202050505505025",,terminal_output +756,1607273,"TERMINAL",0,0,"822221212121212121212121212151512122222272247",,terminal_output +757,1609376,"TERMINAL",0,0,"5044433333333333333344444494469",,terminal_output +758,1611289,"TERMINAL",0,0,"26665555555555555556666662166831",,terminal_output +759,1613297,"TERMINAL",0,0,"4888777777777777777888888388503",,terminal_output +760,1615315,"TERMINAL",0,0,"620507:00:00999999999999999303030304:007:0059:009:0025",,terminal_output +761,1617313,"TERMINAL",0,0,"82223131313131313131313131311:00:012:013122222272247",,terminal_output +762,1619375,"TERMINAL",0,0,"8:0044433333333333333344444494469",,terminal_output +763,1621334,"TERMINAL",0,0,"26665555555555555556666663166841",,terminal_output +764,1623345,"TERMINAL",0,0,"48887777777777777778888883885:003",,terminal_output +765,1625453,"TERMINAL",0,0,"6304:00109999999999999994040404010105101025",,terminal_output +766,1627398,"TERMINAL",0,0,"822241414141414141414141414111114122222272247",,terminal_output +767,1629370,"TERMINAL",0,0,"1044433333333333333344444494469",,terminal_output +768,1631384,"TERMINAL",0,0,"26665555555555555556666664166851",,terminal_output +769,1633391,"TERMINAL",0,0,"4888777777777777777888888388103",,terminal_output +770,1635398,"TERMINAL",0,0,"64010209999999999999995050505020205202025",,terminal_output +771,1637434,"TERMINAL",0,0,"822251515151515151515151515121215122222272247",,terminal_output +772,1639419,"TERMINAL",0,0,"2044433333333333333344444494469",,terminal_output +773,1641429,"TERMINAL",0,0,"2666555555555555555666666516687:01",,terminal_output +774,1643437,"TERMINAL",0,0,"4888777777777777777888888388203",,terminal_output +775,1645523,"TERMINAL",0,0,"65020309999999999999998:008:008:008:0030305303025",,terminal_output +776,1647456,"TERMINAL",0,0,"82224:015:016:0110:013:013:013:013:013:011:016:011:00:0131315:0122222272247",,terminal_output +777,1649466,"TERMINAL",0,0,"3044433333333333333344444494469",,terminal_output +778,1651477,"TERMINAL",0,0,"26665555555555555556666667:00:0166811",,terminal_output +779,1653488,"TERMINAL",0,0,"4888777777777777777888888388303",,terminal_output +780,1655559,"TERMINAL",0,0,"63:0030409999999999999991010101040405404025",,terminal_output +781,1657500,"TERMINAL",0,0,"822211111111111111111111111141411122222272247",,terminal_output +782,1659554,"TERMINAL",0,0,"4044433333333333333344444494469",,terminal_output +783,1661602,"TERMINAL",0,0,"26665555555555555556666661166821",,terminal_output +784,1663533,"TERMINAL",0,0,"4888777777777777777888888388403",,terminal_output +785,1665568,"TERMINAL",0,0,"61040509999999999999992020202050505505025",,terminal_output +786,1667645,"TERMINAL",0,0,"822221212121212121212121212151512122222272247",,terminal_output +787,1669591,"TERMINAL",0,0,"5044433333333333333344444494469",,terminal_output +788,1671640,"TERMINAL",0,0,"26665555555555555556666662166831",,terminal_output +789,1673575,"TERMINAL",0,0,"4888777777777777777888888388503",,terminal_output +790,1675585,"TERMINAL",0,0,"621511:013030303030303030303030301:003:0030313131315:018:01650:0150:0136",,terminal_output +791,1677591,"TERMINAL",0,0,"933322222222222222233333383358",,terminal_output +792,1679602,"TERMINAL",0,0,"9:015554444444444444445555553055740",,terminal_output +793,1681612,"TERMINAL",0,0,"377766666666666666677777727792",,terminal_output +794,1683618,"TERMINAL",0,0,"59998888888888888889999994996:014",,terminal_output +795,1685627,"TERMINAL",0,0,"7315:01114040404040404040404040401010404141414111116111136",,terminal_output +796,1687637,"TERMINAL",0,0,"933322222222222222233333383358",,terminal_output +797,1689650,"TERMINAL",0,0,"115554444444444444445555554055750",,terminal_output +798,1691655,"TERMINAL",0,0,"377766666666666666677777727792",,terminal_output +799,1693665,"TERMINAL",0,0,"5999888888888888888999999499114",,terminal_output +800,1695672,"TERMINAL",0,0,"74111215050505050505050505050502020505151515121216212136",,terminal_output +801,1697682,"TERMINAL",0,0,"933322222222222222233333383358",,terminal_output +802,1699693,"TERMINAL",0,0,"21555444444444444444555555505578:00",,terminal_output +803,1701703,"TERMINAL",0,0,"377766666666666666677777727792",,terminal_output +804,1703710,"TERMINAL",0,0,"5999888888888888888999999499214",,terminal_output +805,1705723,"TERMINAL",0,0,"75121315:006:007:001:004:004:004:004:004:002:007:001:0030306:009:019:019:019:0131316313136",,terminal_output +806,1707735,"TERMINAL",0,0,"933322222222222222233333383358",,terminal_output +807,1709742,"TERMINAL",0,0,"315554444444444444445555551:0055710",,terminal_output +808,1711750,"TERMINAL",0,0,"377766666666666666677777727792",,terminal_output +809,1713756,"TERMINAL",0,0,"5999888888888888888999999499314",,terminal_output +810,1715765,"TERMINAL",0,0,"74:0131411010101010101010101010104040101111111141416414136",,terminal_output +811,1717775,"TERMINAL",0,0,"933322222222222222233333383358",,terminal_output +812,1719783,"TERMINAL",0,0,"415554444444444444445555551055720",,terminal_output +813,1721794,"TERMINAL",0,0,"377766666666666666677777727792",,terminal_output +814,1723805,"TERMINAL",0,0,"5999888888888888888999999499414",,terminal_output +815,1725812,"TERMINAL",0,0,"71141512020202020202020202020205050202121212151516515136",,terminal_output +816,1727854,"TERMINAL",0,0,"933322222222222222233333383358",,terminal_output +817,1729825,"TERMINAL",0,0,"515554444444444444445555552055730",,terminal_output +818,1731834,"TERMINAL",0,0,"377766666666666666677777727792",,terminal_output +819,1733845,"TERMINAL",0,0,"5999888888888888888999999499514",,terminal_output +820,1735854,"TERMINAL",0,0,"721512:013030303030303030303030302:004:0030313131316:019:0161:011:0136",,terminal_output +821,1737857,"TERMINAL",0,0,"933322222222222222233333383358",,terminal_output +822,1739937,"TERMINAL",0,0,"50:015554444444444444445555553055740",,terminal_output +823,1741985,"TERMINAL",0,0,"377766666666666666677777727792",,terminal_output +824,1743886,"TERMINAL",0,0,"59998888888888888889999994997:014",,terminal_output +825,1745894,"TERMINAL",0,0,"7316:01114040404040404040404040401010404141414111116111136",,terminal_output +826,1747904,"TERMINAL",0,0,"933322222222222222233333383358",,terminal_output +827,1749970,"TERMINAL",0,0,"115554444444444444445555554055750",,terminal_output +828,1751922,"TERMINAL",0,0,"377766666666666666677777727792",,terminal_output +829,1753963,"TERMINAL",0,0,"5999888888888888888999999499114",,terminal_output +830,1755942,"TERMINAL",0,0,"74111215050505050505050505050502020505151515121216212136",,terminal_output +831,1757958,"TERMINAL",0,0,"933322222222222222233333383358",,terminal_output +832,1759960,"TERMINAL",0,0,"21555444444444444444555555505579:00",,terminal_output +833,1761969,"TERMINAL",0,0,"377766666666666666677777727792",,terminal_output +834,1763976,"TERMINAL",0,0,"5999888888888888888999999499214",,terminal_output +835,1765987,"TERMINAL",0,0,"75121316:007:008:002:005:005:005:005:005:003:008:002:0030307:004:00:014:00:014:00:014:00:0131316313136",,terminal_output +836,1767995,"TERMINAL",0,0,"933322222222222222233333383358",,terminal_output +837,1770041,"TERMINAL",0,0,"315554444444444444445555552:0055710",,terminal_output +838,1772073,"TERMINAL",0,0,"377766666666666666677777727792",,terminal_output +839,1774034,"TERMINAL",0,0,"5999888888888888888999999499314",,terminal_output +840,1776027,"TERMINAL",0,0,"75:0131411010101010101010101010104040101111111141416414136",,terminal_output +841,1778066,"TERMINAL",0,0,"933322222222222222233333383358",,terminal_output +842,1780076,"TERMINAL",0,0,"415554444444444444445555551055720",,terminal_output +843,1782058,"TERMINAL",0,0,"377766666666666666677777727792",,terminal_output +844,1784066,"TERMINAL",0,0,"5999888888888888888999999499414",,terminal_output +845,1786078,"TERMINAL",0,0,"71141512020202020202020202020205050202121212151516515136",,terminal_output +846,1788088,"TERMINAL",0,0,"933322222222222222233333383358",,terminal_output +847,1790111,"TERMINAL",0,0,"515554444444444444445555552055730",,terminal_output +848,1792099,"TERMINAL",0,0,"377766666666666666677777727792",,terminal_output +849,1794108,"TERMINAL",0,0,"5999888888888888888999999499514",,terminal_output +850,1796120,"TERMINAL",0,0,"721513:013030303030303030303030303:005:0030313131317:017:00:0162:012:0136",,terminal_output +851,1798132,"TERMINAL",0,0,"933322222222222222233333383358",,terminal_output +852,1800136,"TERMINAL",0,0,"1:015554444444444444445555553055740",,terminal_output +853,1802144,"TERMINAL",0,0,"377766666666666666677777727792",,terminal_output +854,1804241,"TERMINAL",0,0,"59998888888888888889999994998:014",,terminal_output +855,1806163,"TERMINAL",0,0,"7317:01114040404040404040404040401010404141414111116111136",,terminal_output +856,1808172,"TERMINAL",0,0,"933322222222222222233333383358",,terminal_output +857,1810181,"TERMINAL",0,0,"115554444444444444445555554055750",,terminal_output +858,1812190,"TERMINAL",0,0,"377766666666666666677777727792",,terminal_output +859,1814197,"TERMINAL",0,0,"5999888888888888888999999499114",,terminal_output +860,1816207,"TERMINAL",0,0,"74111215050505050505050505050502020505151515121216212136",,terminal_output +861,1818216,"TERMINAL",0,0,"933322222222222222233333383358",,terminal_output +862,1820326,"TERMINAL",0,0,"215554444444444444445555555055740:00",,terminal_output +863,1822232,"TERMINAL",0,0,"377766666666666666677777727792",,terminal_output +864,1824313,"TERMINAL",0,0,"5999888888888888888999999499214",,terminal_output +865,1826361,"TERMINAL",0,0,"75121317:008:009:003:006:006:006:006:006:004:009:003:0030308:001:011:011:011:0131316313136",,terminal_output +866,1828261,"TERMINAL",0,0,"933322222222222222233333383358",,terminal_output +867,1830354,"TERMINAL",0,0,"315554444444444444445555553:0055710",,terminal_output +868,1832278,"TERMINAL",0,0,"377766666666666666677777727792",,terminal_output +869,1834291,"TERMINAL",0,0,"5999888888888888888999999499314",,terminal_output +870,1836298,"TERMINAL",0,0,"76:0131411010101010101010101010104040101111111141416414136",,terminal_output +871,1838306,"TERMINAL",0,0,"933322222222222222233333383358",,terminal_output +872,1840321,"TERMINAL",0,0,"415554444444444444445555551055720",,terminal_output +873,1842325,"TERMINAL",0,0,"377766666666666666677777727792",,terminal_output +874,1844331,"TERMINAL",0,0,"5999888888888888888999999499414",,terminal_output +875,1846341,"TERMINAL",0,0,"71141512020202020202020202020205050202121212151516515136",,terminal_output +876,1848349,"TERMINAL",0,0,"933322222222222222233333383358",,terminal_output +877,1850357,"TERMINAL",0,0,"515554444444444444445555552055730",,terminal_output +878,1852368,"TERMINAL",0,0,"377766666666666666677777727792",,terminal_output +879,1854375,"TERMINAL",0,0,"5999888888888888888999999499514",,terminal_output +880,1856384,"TERMINAL",0,0,"721514:013030303030303030303030304:006:0030313131318:011:0163:013:0136",,terminal_output +881,1858392,"TERMINAL",0,0,"933322222222222222233333383358",,terminal_output +882,1860446,"TERMINAL",0,0,"2:015554444444444444445555553055740",,terminal_output +883,1862412,"TERMINAL",0,0,"377766666666666666677777727792",,terminal_output +884,1864421,"TERMINAL",0,0,"59998888888888888889999994999:014",,terminal_output +885,1866431,"TERMINAL",0,0,"7318:01114040404040404040404040401010404141414111116111136",,terminal_output +886,1868436,"TERMINAL",0,0,"933322222222222222233333383358",,terminal_output +887,1870444,"TERMINAL",0,0,"115554444444444444445555554055750",,terminal_output +888,1872455,"TERMINAL",0,0,"377766666666666666677777727792",,terminal_output +889,1874464,"TERMINAL",0,0,"5999888888888888888999999499114",,terminal_output +890,1876537,"TERMINAL",0,0,"74111215050505050505050505050502020505151515121216212136",,terminal_output +891,1878483,"TERMINAL",0,0,"933322222222222222233333383358",,terminal_output +892,1880532,"TERMINAL",0,0,"21555444444444444444555555505571:00",,terminal_output +893,1882499,"TERMINAL",0,0,"377766666666666666677777727792",,terminal_output +894,1884525,"TERMINAL",0,0,"5999888888888888888999999499214",,terminal_output +895,1886525,"TERMINAL",0,0,"75121318:009:0010:004:007:007:007:007:007:005:001:00:004:0030309:002:012:012:012:0131316313136",,terminal_output +896,1888527,"TERMINAL",0,0,"933322222222222222233333383358",,terminal_output +897,1890536,"TERMINAL",0,0,"315554444444444444445555554:0055710",,terminal_output +898,1892614,"TERMINAL",0,0,"377766666666666666677777727792",,terminal_output +899,1894551,"TERMINAL",0,0,"5999888888888888888999999499314",,terminal_output +900,1896559,"TERMINAL",0,0,"77:0131411010101010101010101010104040101111111141416414136",,terminal_output +901,1898567,"TERMINAL",0,0,"933322222222222222233333383358",,terminal_output +902,1900578,"TERMINAL",0,0,"415554444444444444445555551055720",,terminal_output +903,1902587,"TERMINAL",0,0,"3888777777777777777888888388403",,terminal_output +904,1904599,"TERMINAL",0,0,"61040509999999999999992020202050505505025",,terminal_output +905,1906609,"TERMINAL",0,0,"822221212121212121212121212151512122222272247",,terminal_output +906,1908612,"TERMINAL",0,0,"5044433333333333333344444494469",,terminal_output +907,1910621,"TERMINAL",0,0,"26665555555555555556666662166831",,terminal_output +908,1912688,"TERMINAL",0,0,"4888777777777777777888888388503",,terminal_output +909,1914689,"TERMINAL",0,0,"620505:00999999999999999303030309:002:0054:004:0025",,terminal_output +910,1916682,"TERMINAL",0,0,"82223131313131313131313131315:017:013122222272247",,terminal_output +911,1918657,"TERMINAL",0,0,"3:0044433333333333333344444494469",,terminal_output +912,1920663,"TERMINAL",0,0,"26665555555555555556666663166841",,terminal_output +913,1922727,"TERMINAL",0,0,"488877777777777777788888838840:003",,terminal_output +914,1924681,"TERMINAL",0,0,"6309:00109999999999999994040404010105101025",,terminal_output +915,1926690,"TERMINAL",0,0,"822241414141414141414141414111114122222272247",,terminal_output +916,1928699,"TERMINAL",0,0,"1044433333333333333344444494469",,terminal_output +917,1930705,"TERMINAL",0,0,"26665555555555555556666664166851",,terminal_output +918,1932717,"TERMINAL",0,0,"4888777777777777777888888388103",,terminal_output +919,1934729,"TERMINAL",0,0,"64010209999999999999995050505020205202025",,terminal_output +920,1936732,"TERMINAL",0,0,"822251515151515151515151515121215122222272247",,terminal_output +921,1938744,"TERMINAL",0,0,"2044433333333333333344444494469",,terminal_output +922,1940751,"TERMINAL",0,0,"2666555555555555555666666516682:01",,terminal_output +923,1942759,"TERMINAL",0,0,"4888777777777777777888888388203",,terminal_output +924,1944770,"TERMINAL",0,0,"65020309999999999999993:003:003:003:0030305303025",,terminal_output +925,1946776,"TERMINAL",0,0,"82229:0110:011:015:018:018:018:018:018:016:011:015:01313140:0122222272247",,terminal_output +926,1948789,"TERMINAL",0,0,"3044433333333333333344444494469",,terminal_output +927,1950793,"TERMINAL",0,0,"26665555555555555556666665:0166811",,terminal_output +928,1952800,"TERMINAL",0,0,"4888777777777777777888888388303",,terminal_output +929,1954807,"TERMINAL",0,0,"68:0030409999999999999991010101040405404025",,terminal_output +930,1956824,"TERMINAL",0,0,"822211111111111111111111111141411122222272247",,terminal_output +931,1958509,"TERMINAL",0,0,"[?1049l\r[?1l>]0;franz.srambical@hai-login2:~/jafar",,terminal_output +932,1986605,"TERMINAL",0,0,"bash /home/franz.srambical/jafar/slurm/dev/franz/berlin/atari/spawn_dynamics.sh demon_attack",,terminal_command +933,1986627,"TERMINAL",0,0,"]633;CSubmitting Dynamics jobs for environments: demon_attack\r\nSubmitted dynamics_atari_demon_attack_dev (ENV_NAME=demon_attack) using tokenizer ckpt /fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/atari/demon_attack/tokenizer/tokenizer_atari_demon_attack_dev_30102 as job 30127\r\n]0;franz.srambical@hai-login2:~/jafar",,terminal_output +934,2111771,"TERMINAL",0,0,"watch squeue",,terminal_command +935,2111829,"TERMINAL",0,0,"]633;C[?1049h(B[?7hEvery 2.0s: squeuehai-login2.haicore.berlin: Sat Oct 4 18:56:13 2025JOBIDUSER PARTITION NODES CPUS STSUBMIT_TIMESTART_TIME TIME TIME_LIMIT NODELIST(REASON)30027 xiao.liu interacti 1 64 PD 2025-10-04T05:54:07 2025-10-04T19:13:04 0:00 23:59:00 (Resources)30129 xiao.liu interacti 1 64 PD 2025-10-04T18:55:30N/A 0:00 23:59:00 (Priority)30115 xiao.liu interacti 1 64 PD 2025-10-04T18:22:16N/A 0:00 23:59:00 (Priority)30111 xiao.liu interacti 1 32 PD 2025-10-04T16:57:05N/A 0:00 23:59:00 (Priority)30110 emmanouil. interacti 1 104 PD 2025-10-04T16:31:13N/A 0:00\t 30:00 (Priority)30028 xiao.liu interacti 1 128 R 2025-10-04T05:55:36 2025-10-04T05:55:36 13:00:37 23:59:00 hai00630026 xiao.liu interacti 1 128 R 2025-10-04T01:48:32 2025-10-04T05:54:06 13:02:07 23:59:00 hai00830025 xiao.liu interacti 1 64 R 2025-10-04T01:47:27 2025-10-04T01:47:56 17:08:17 23:59:00 hai00430127 franz.sram standard 1 8 PD 2025-10-04T18:54:08N/A 0:00 1-00:00:00 (Nodes required for job are DOWN, DRAINED or reserved for jobs in higher priority partitions)30126 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:55:57 0:16 1-00:00:00 hai00530125 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:44:27 11:46 1-00:00:00 hai00130124 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:43:27 12:46 1-00:00:00 hai00130123 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:42:27 13:46 1-00:00:00 hai00130122 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:38:27 17:46 1-00:00:00 hai00130117 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:35:27 20:46 1-00:00:00 hai00130118 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:35:27 20:46 1-00:00:00 hai00430119 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:35:27 20:46 1-00:00:00 hai00530120 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:35:27 20:46 1-00:00:00 hai00630121 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:35:27 20:46 1-00:00:00 hai00629972 alfred.ngu standard 1 16 R 2025-10-04T16:35:34 2025-10-04T17:57:27 58:46 1-00:00:00 hai00130103 franz.sram standard 1 16 R 2025-10-04T14:55:02 2025-10-04T17:52:27 1:03:46 1-00:00:00 hai00530102 franz.sram standard 1 16 R 2025-10-04T14:55:02 2025-10-04T17:48:27 1:07:46 1-00:00:00 hai00430101 franz.sram standard 1 16 R 2025-10-04T14:55:02 2025-10-04T17:47:57 1:08:16 1-00:00:00 hai00430100 franz.sram standard 1 16 R 2025-10-04T14:55:02 2025-10-04T17:45:57 1:10:16 1-00:00:00 hai00630099 franz.sram standard 1 16 R 2025-10-04T14:55:02 2025-10-04T17:13:27 1:42:46 1-00:00:00 hai00530083 franz.sram standard 1 16 R 2025-10-04T13:15:31 2025-10-04T14:50:26 4:05:47 1-00:00:00 hai00530084 franz.sram standard 1 16 R 2025-10-04T13:15:31 2025-10-04T14:50:26 4:05:47 1-00:00:00 hai00530085 franz.sram standard 1 16 R 2025-10-04T13:15:31 2025-10-04T14:50:26 4:05:47 1-00:00:00 hai00530032 alfred.ngu standard 1 16 R 2025-10-04T11:53:39 2025-10-04T11:53:56 7:02:17 1-00:00:00 hai00130031 alfred.ngu standard 1 16 R 2025-10-04T11:50:30 2025-10-04T11:50:56 7:05:17 1-00:00:00 hai00529993 nishant.ku standard 2 384 R 2025-10-03T18:29:18 2025-10-04T01:48:31 17:07:42 1-00:00:00 hai[002,007]30008 alfred.ngu standard 1 16 R 2025-10-03T22:58:50 2025-10-03T22:58:56 19:57:17 1-00:00:00 hai00630009 alfred.ngu standard 1 16 R 2025-10-03T22:58:54 2025-10-03T22:58:56 19:57:17 1-00:00:00 hai00129999 alfred.ngu standard 1 16 R 2025-10-03T19:13:04 2025-10-03T19:13:04 23:43:09 1-00:00:00 hai00329998 alfred.ngu standard 1 16 R 2025-10-03T19:11:21 2025-10-03T19:11:21 23:44:52 1-00:00:00 hai004",,terminal_output +936,2113904,"TERMINAL",0,0,"5999888888888888888899999499114",,terminal_output +937,2115843,"TERMINAL",0,0,"74111212050505050505050505050505020205051515121216212136",,terminal_output +938,2117898,"TERMINAL",0,0,"933322222222222222223333383358",,terminal_output +939,2119946,"TERMINAL",0,0,"21555444444444444444455555505575:00",,terminal_output +940,2121892,"TERMINAL",0,0,"377766666666666666667777727792",,terminal_output +941,2123890,"TERMINAL",0,0,"5999888888888888888899999499214",,terminal_output +942,2125890,"TERMINAL",0,0,"7512131302:003:004:008:001:001:001:001:001:009:004:008:0030303:006:016:016:0131316313136",,terminal_output +943,2127933,"TERMINAL",0,0,"933322222222222222223333383358",,terminal_output +944,2129904,"TERMINAL",0,0,"315554444444444444444555558:0055710",,terminal_output +945,2131912,"TERMINAL",0,0,"377766666666666666667777727792",,terminal_output +946,2133919,"TERMINAL",0,0,"5999888888888888888899999499314",,terminal_output +947,2135931,"TERMINAL",0,0,"71:0131414010101010101010101010101040401011111141416414136",,terminal_output +948,2137937,"TERMINAL",0,0,"933322222222222222223333383358",,terminal_output +949,2139943,"TERMINAL",0,0,"415554444444444444444555551055720",,terminal_output +950,2141953,"TERMINAL",0,0,"377766666666666666667777727792",,terminal_output +951,2143963,"TERMINAL",0,0,"5999888888888888888899999499414",,terminal_output +952,2145973,"TERMINAL",0,0,"71141515020202020202020202020202050502021212151516515136",,terminal_output +953,2147981,"TERMINAL",0,0,"933322222222222222223333383358",,terminal_output +954,2149990,"TERMINAL",0,0,"515554444444444444444555552055730",,terminal_output +955,2152000,"TERMINAL",0,0,"377766666666666666667777727792",,terminal_output +956,2154008,"TERMINAL",0,0,"5999888888888888888899999499514",,terminal_output +957,2156014,"TERMINAL",0,0,"721519:011:003030303030303030303030309:001:00303131313:016:0168:018:0136",,terminal_output +958,2158025,"TERMINAL",0,0,"933322222222222222223333383358",,terminal_output +959,2160035,"TERMINAL",0,0,"7:015554444444444444444555553055740",,terminal_output +960,2162075,"TERMINAL",0,0,"377766666666666666667777727792",,terminal_output +961,2164049,"TERMINAL",0,0,"59998888888888888888999994994:014",,terminal_output +962,2166060,"TERMINAL",0,0,"7313:01111040404040404040404040404010104041414111116111136",,terminal_output +963,2168064,"TERMINAL",0,0,"933322222222222222223333383358",,terminal_output +964,2170075,"TERMINAL",0,0,"115554444444444444444555554055750",,terminal_output +965,2172081,"TERMINAL",0,0,"377766666666666666667777727792",,terminal_output +966,2174092,"TERMINAL",0,0,"5999888888888888888899999499114",,terminal_output +967,2176098,"TERMINAL",0,0,"74111212050505050505050505050505020205051515121216212136",,terminal_output +968,2178107,"TERMINAL",0,0,"933322222222222222223333383358",,terminal_output +969,2180117,"TERMINAL",0,0,"21555444444444444444455555505576:00",,terminal_output +970,2182123,"TERMINAL",0,0,"377766666666666666667777727792",,terminal_output +971,2184134,"TERMINAL",0,0,"5999888888888888888899999499214",,terminal_output +972,2186200,"TERMINAL",0,0,"7512131303:004:005:009:002:002:002:002:002:001:00:005:009:0030304:007:017:017:0131316313136",,terminal_output +973,2188150,"TERMINAL",0,0,"933322222222222222223333383358",,terminal_output +974,2190161,"TERMINAL",0,0,"315554444444444444444555559:0055710",,terminal_output +975,2192167,"TERMINAL",0,0,"377766666666666666667777727792",,terminal_output +976,2194176,"TERMINAL",0,0,"5999888888888888888899999499314",,terminal_output +977,2196183,"TERMINAL",0,0,"72:0131414010101010101010101010101040401011111141416414136",,terminal_output +978,2198194,"TERMINAL",0,0,"933322222222222222223333383358",,terminal_output +979,2200200,"TERMINAL",0,0,"415554444444444444444555551055720",,terminal_output +980,2202212,"TERMINAL",0,0,"377766666666666666667777727792",,terminal_output +981,2204321,"TERMINAL",0,0,"5999888888888888888899999499414",,terminal_output +982,2206268,"TERMINAL",0,0,"71141515020202020202020202020202050502021212151516515136",,terminal_output +983,2208240,"TERMINAL",0,0,"933322222222222222223333383358",,terminal_output +984,2210246,"TERMINAL",0,0,"515554444444444444444555552055730",,terminal_output +985,2212254,"TERMINAL",0,0,"377766666666666666667777727792",,terminal_output +986,2214259,"TERMINAL",0,0,"5999888888888888888899999499514",,terminal_output +987,2216300,"TERMINAL",0,0,"7215110:012:0030303030303030303030303010:002:00303131314:017:0169:019:0136",,terminal_output +988,2218285,"TERMINAL",0,0,"933322222222222222223333383358",,terminal_output +989,2220293,"TERMINAL",0,0,"8:015554444444444444444555553055740",,terminal_output +990,2222297,"TERMINAL",0,0,"377766666666666666667777727792",,terminal_output +991,2224309,"TERMINAL",0,0,"59998888888888888888999994995:014",,terminal_output +992,2226314,"TERMINAL",0,0,"7314:01111040404040404040404040404010104041414111116111136",,terminal_output +993,2228329,"TERMINAL",0,0,"933322222222222222223333383358",,terminal_output +994,2230332,"TERMINAL",0,0,"115554444444444444444555554055750",,terminal_output +995,2232340,"TERMINAL",0,0,"377766666666666666667777727792",,terminal_output +996,2234351,"TERMINAL",0,0,"5999888888888888888899999499114",,terminal_output +997,2236358,"TERMINAL",0,0,"74111212050505050505050505050505020205051515121216212136",,terminal_output +998,2238369,"TERMINAL",0,0,"933322222222222222223333383358",,terminal_output +999,2240375,"TERMINAL",0,0,"21555444444444444444455555505577:00",,terminal_output +1000,2242385,"TERMINAL",0,0,"377766666666666666667777727792",,terminal_output +1001,2244395,"TERMINAL",0,0,"5999888888888888888899999499214",,terminal_output +1002,2246402,"TERMINAL",0,0,"7512131304:005:006:0020:003:003:003:003:003:001:006:0010:0030305:008:018:018:0131316313136",,terminal_output +1003,2248407,"TERMINAL",0,0,"933322222222222222223333383358",,terminal_output +1004,2250418,"TERMINAL",0,0,"3155544444444444444445555510:0055710",,terminal_output +1005,2252428,"TERMINAL",0,0,"377766666666666666667777727792",,terminal_output +1006,2254438,"TERMINAL",0,0,"5999888888888888888899999499314",,terminal_output +1007,2256546,"TERMINAL",0,0,"73:0131414010101010101010101010101040401011111141416414136",,terminal_output +1008,2258493,"TERMINAL",0,0,"933322222222222222223333383358",,terminal_output +1009,2260463,"TERMINAL",0,0,"415554444444444444444555551055720",,terminal_output +1010,2262471,"TERMINAL",0,0,"377766666666666666667777727792",,terminal_output +1011,2264475,"TERMINAL",0,0,"5999888888888888888899999499414",,terminal_output +1012,2266582,"TERMINAL",0,0,"\r71141515020202020202020202020202050502042152132 alfred.ngu1:53:391:53:56 7:04:51110:3007:51529993 nishant.k2 3843T18:29:1801:48:31 17:10:16[002,007]30008 alfred.ng1 1622:58:503T22:58:56 19:59:5100694511299919:13:019:13:04 23:45:43381:211:21 23:47:264",,terminal_output +1013,2268497,"TERMINAL",0,0,"93332222222222222222333383358",,terminal_output +1014,2270563,"TERMINAL",0,0,"51555444444444444444455552055730",,terminal_output +1015,2272519,"TERMINAL",0,0,"37776666666666666666777727792",,terminal_output +1016,2274522,"TERMINAL",0,0,"599988888888888888889999499514",,terminal_output +1017,2276530,"TERMINAL",0,0,"\r721511:0116 R2025-10-04T18:58:57hai005\r 30126 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:55:57 3:00 1-00:00:00 hai005544:27 14:301435:30326:3023820:301753:30830493052030613029972 alfred.ngu6:35:347:57:27 1:01:30130103 franz.sram4:55:0226:30524810:30417:5\t1:00053:00609913:27 1:45:305843:15:314:50:26 4:08:3153132 alfred.ngu1:53:391:53:56 7:05:01110:3008:01529993 nishant.k2 3843T18:29:1801:48:31 17:10:26[002,007]30008 alfred.ng1 1622:58:503T22:58:56 20:00:010069420:00:011299919:13:019:13:04 23:45:53381:211:21 23:47:364",,terminal_output +1018,2278537,"TERMINAL",0,0,"933322222222222222222333383358",,terminal_output +1019,2280548,"TERMINAL",0,0,"9:015554444444444444444455553055740",,terminal_output +1020,2282660,"TERMINAL",0,0,"377766666666666666666777727792",,terminal_output +1021,2284562,"TERMINAL",0,0,"59998888888888888888899994996:014",,terminal_output +1022,2286679,"TERMINAL",0,0,"7315:01111010404040404040404040404040101040414111116111136",,terminal_output +1023,2288579,"TERMINAL",0,0,"933322222222222222222333383358",,terminal_output +1024,2290586,"TERMINAL",0,0,"116665555555555555555566664166851",,terminal_output +1025,2292596,"TERMINAL",0,0,"4888777777777777777778888388103",,terminal_output +1026,2294607,"TERMINAL",0,0,"640102099999999999999999505020205202025",,terminal_output +1027,2296688,"TERMINAL",0,0,"82222121515151515151515151515151212151222272247",,terminal_output +1028,2298623,"TERMINAL",0,0,"2044433333333333333333444494469",,terminal_output +1029,2300633,"TERMINAL",0,0,"2666555555555555555556666516688:01",,terminal_output +1030,2302641,"TERMINAL",0,0,"4888777777777777777778888388203",,terminal_output +1031,2304650,"TERMINAL",0,0,"6502030999999999999999999:009:0030305303025",,terminal_output +1032,2306722,"TERMINAL",0,0,"822231315:016:017:011:014:014:014:014:014:012:017:011:0131316:01222272247",,terminal_output +1033,2308673,"TERMINAL",0,0,"3044433333333333333333444494469",,terminal_output +1034,2310678,"TERMINAL",0,0,"26665555555555555555566661:0166811",,terminal_output +1035,2312688,"TERMINAL",0,0,"4888777777777777777778888388303",,terminal_output +1036,2314699,"TERMINAL",0,0,"64:00304099999999999999999101040405404025",,terminal_output +1037,2316758,"TERMINAL",0,0,"82224141111111111111111111111111414111222272247",,terminal_output +1038,2318716,"TERMINAL",0,0,"\r4044433333333333333333432 alfred.ngu1:53:391:53:56 7:05:44110:3008:44529993 nishant.k2 3843T18:29:1801:48:31 17:11:09[002,007]30008 alfred.ng1 1622:58:503T22:58:56 20:00:440069441299919:13:019:13:04 23:46:36381:211:21 23:48:194",,terminal_output +1039,2320724,"TERMINAL",0,0,"2666555555555555555556661166821",,terminal_output +1040,2322738,"TERMINAL",0,0,"488877777777777777777888388403",,terminal_output +1041,2324846,"TERMINAL",0,0,"6104050999999999999999992050505505025",,terminal_output +1042,2326751,"TERMINAL",0,0,"8222515121212121212121212121212151512122272247",,terminal_output +1043,2328761,"TERMINAL",0,0,"504443333333333333333344494469",,terminal_output +1044,2330768,"TERMINAL",0,0,"2666555555555555555556662166831",,terminal_output +1045,2332836,"TERMINAL",0,0,"488877777777777777777888388503",,terminal_output +1046,2334789,"TERMINAL",0,0,"620502:0099999999999999999306:009:0051:001:0025",,terminal_output +1047,2336795,"TERMINAL",0,0,"82221:014:013131313131313131313131312:014:013122272247",,terminal_output +1048,2338804,"TERMINAL",0,0,"9:00:004443333333333333333344494469",,terminal_output +1049,2340814,"TERMINAL",0,0,"2666555555555555555556663166841",,terminal_output +1050,2342868,"TERMINAL",0,0,"4888777777777777777778883887:003",,terminal_output +1051,2344915,"TERMINAL",0,0,"6306:0010999999999999999994010105101025",,terminal_output +1052,2346842,"TERMINAL",0,0,"8222111141414141414141414141414111114122272247",,terminal_output +1053,2348909,"TERMINAL",0,0,"104443333333333333333344494469",,terminal_output +1054,2350859,"TERMINAL",0,0,"2666555555555555555556664166851",,terminal_output +1055,2352908,"TERMINAL",0,0,"488877777777777777777888388103",,terminal_output +1056,2354954,"TERMINAL",0,0,"6401020999999999999999995020205202025",,terminal_output +1057,2356882,"TERMINAL",0,0,"\r8222212151515151515151515151515121215132 alfred.ngu1:53:391:53:56 7:06:22110:3009:22529993 nishant.k2 3843T18:29:1801:48:31 17:11:47[002,007]30008 alfred.ng1 1622:58:503T22:58:56 20:01:220069421299919:13:019:13:04 23:47:14381:211:21 23:48:574",,terminal_output +1058,2358949,"TERMINAL",0,0,"20444333333333333333334494469",,terminal_output +1059,2360900,"TERMINAL",0,0,"26665555555555555555566516689:01",,terminal_output +1060,2362910,"TERMINAL",0,0,"48887777777777777777788388203",,terminal_output +1061,2364924,"TERMINAL",0,0,"65020309999999999999999930305303025",,terminal_output +1062,2367036,"TERMINAL",0,0,"822231316:017:018:012:015:015:015:015:015:013:018:012:0131317:012272247",,terminal_output +1063,2368936,"TERMINAL",0,0,"30444333333333333333334494469",,terminal_output +1064,2370946,"TERMINAL",0,0,"266655555555555555555662:0166811",,terminal_output +1065,2372974,"TERMINAL",0,0,"48887777777777777777788388303",,terminal_output +1066,2375022,"TERMINAL",0,0,"65:0030409999999999999999940405404025",,terminal_output +1067,2376970,"TERMINAL",0,0,"822241411111111111111111111111114141112272247",,terminal_output +1068,2378983,"TERMINAL",0,0,"40444333333333333333334494469",,terminal_output +1069,2380986,"TERMINAL",0,0,"266655555555555555555661166821",,terminal_output +1070,2382998,"TERMINAL",0,0,"48887777777777777777788388403",,terminal_output +1071,2385004,"TERMINAL",0,0,"61040509999999999999999950505505025",,terminal_output +1072,2387016,"TERMINAL",0,0,"822251512121212121212121212121215151212272247",,terminal_output +1073,2389023,"TERMINAL",0,0,"50444333333333333333334494469",,terminal_output +1074,2391034,"TERMINAL",0,0,"266655555555555555555662166831",,terminal_output +1075,2393042,"TERMINAL",0,0,"48887777777777777777788388503",,terminal_output +1076,2395053,"TERMINAL",0,0,"620503:00999999999999999997:0010:0052:002:0025",,terminal_output +1077,2397061,"TERMINAL",0,0,"82222:015:013131313131313131313131313:015:01312272247",,terminal_output +1078,2399066,"TERMINAL",0,0,"1:00444333333333333333334494469",,terminal_output +1079,2401075,"TERMINAL",0,0,"266655555555555555555663166841",,terminal_output +1080,2403084,"TERMINAL",0,0,"488877777777777777777883888:003",,terminal_output +1081,2405095,"TERMINAL",0,0,"6307:00109999999999999999910105101025",,terminal_output +1082,2407121,"TERMINAL",0,0,"822211114141414141414141414141411111412272247",,terminal_output +1083,2409111,"TERMINAL",0,0,"10444333333333333333334494469",,terminal_output +1084,2411121,"TERMINAL",0,0,"266655555555555555555664166851",,terminal_output +1085,2413026,"TERMINAL",0,0,"[?1049l\r[?1l>]0;franz.srambical@hai-login2:~/jafar",,terminal_output +1086,2436903,"TERMINAL",0,0,"",,terminal_focus +1087,2437380,"TERMINAL",0,0,"source /home/franz.srambical/jafar/.venv/bin/activate",,terminal_command +1088,2437390,"TERMINAL",0,0,"]633;C]0;franz.srambical@hai-login2:~/jafar",,terminal_output +1089,2440523,"slurm/dev/franz/berlin/atari/dynamics.sh",2716,0,"",shellscript,selection_command +1090,2440771,"slurm/dev/franz/berlin/atari/dynamics.sh",2700,0,"",shellscript,selection_command +1091,2440806,"slurm/dev/franz/berlin/atari/dynamics.sh",2699,0,"",shellscript,selection_command +1092,2440831,"slurm/dev/franz/berlin/atari/dynamics.sh",2686,0,"",shellscript,selection_command +1093,2440866,"slurm/dev/franz/berlin/atari/dynamics.sh",2685,0,"",shellscript,selection_command +1094,2440898,"slurm/dev/franz/berlin/atari/dynamics.sh",2639,0,"",shellscript,selection_command +1095,2440938,"slurm/dev/franz/berlin/atari/dynamics.sh",2591,0,"",shellscript,selection_command +1096,2440964,"slurm/dev/franz/berlin/atari/dynamics.sh",2538,0,"",shellscript,selection_command +1097,2441000,"slurm/dev/franz/berlin/atari/dynamics.sh",2516,0,"",shellscript,selection_command +1098,2441028,"slurm/dev/franz/berlin/atari/dynamics.sh",2490,0,"",shellscript,selection_command +1099,2441064,"slurm/dev/franz/berlin/atari/dynamics.sh",2469,0,"",shellscript,selection_command +1100,2441102,"slurm/dev/franz/berlin/atari/dynamics.sh",2426,0,"",shellscript,selection_command +1101,2441134,"slurm/dev/franz/berlin/atari/dynamics.sh",2393,0,"",shellscript,selection_command +1102,2441168,"slurm/dev/franz/berlin/atari/dynamics.sh",2362,0,"",shellscript,selection_command +1103,2441196,"slurm/dev/franz/berlin/atari/dynamics.sh",2337,0,"",shellscript,selection_command +1104,2441234,"slurm/dev/franz/berlin/atari/dynamics.sh",2319,0,"",shellscript,selection_command +1105,2441265,"slurm/dev/franz/berlin/atari/dynamics.sh",2296,0,"",shellscript,selection_command +1106,2441296,"slurm/dev/franz/berlin/atari/dynamics.sh",2272,0,"",shellscript,selection_command +1107,2441329,"slurm/dev/franz/berlin/atari/dynamics.sh",2250,0,"",shellscript,selection_command +1108,2441365,"slurm/dev/franz/berlin/atari/dynamics.sh",2210,0,"",shellscript,selection_command +1109,2441398,"slurm/dev/franz/berlin/atari/dynamics.sh",2209,0,"",shellscript,selection_command +1110,2441431,"slurm/dev/franz/berlin/atari/dynamics.sh",2206,0,"",shellscript,selection_command +1111,2441465,"slurm/dev/franz/berlin/atari/dynamics.sh",2195,0,"",shellscript,selection_command +1112,2441498,"slurm/dev/franz/berlin/atari/dynamics.sh",2076,0,"",shellscript,selection_command +1113,2441529,"slurm/dev/franz/berlin/atari/dynamics.sh",2017,0,"",shellscript,selection_command +1114,2441564,"slurm/dev/franz/berlin/atari/dynamics.sh",1967,0,"",shellscript,selection_command +1115,2441598,"slurm/dev/franz/berlin/atari/dynamics.sh",1966,0,"",shellscript,selection_command +1116,2441630,"slurm/dev/franz/berlin/atari/dynamics.sh",1949,0,"",shellscript,selection_command +1117,2441663,"slurm/dev/franz/berlin/atari/dynamics.sh",1948,0,"",shellscript,selection_command +1118,2441699,"slurm/dev/franz/berlin/atari/dynamics.sh",1923,0,"",shellscript,selection_command +1119,2441731,"slurm/dev/franz/berlin/atari/dynamics.sh",1799,0,"",shellscript,selection_command +1120,2441765,"slurm/dev/franz/berlin/atari/dynamics.sh",1756,0,"",shellscript,selection_command +1121,2441798,"slurm/dev/franz/berlin/atari/dynamics.sh",1755,0,"",shellscript,selection_command +1122,2441832,"slurm/dev/franz/berlin/atari/dynamics.sh",1752,0,"",shellscript,selection_command +1123,2441861,"slurm/dev/franz/berlin/atari/dynamics.sh",1741,0,"",shellscript,selection_command +1124,2441894,"slurm/dev/franz/berlin/atari/dynamics.sh",1597,0,"",shellscript,selection_command +1125,2441927,"slurm/dev/franz/berlin/atari/dynamics.sh",1555,0,"",shellscript,selection_command +1126,2441959,"slurm/dev/franz/berlin/atari/dynamics.sh",1491,0,"",shellscript,selection_command +1127,2441994,"slurm/dev/franz/berlin/atari/dynamics.sh",1490,0,"",shellscript,selection_command +1128,2442280,"slurm/dev/franz/berlin/atari/dynamics.sh",1407,0,"",shellscript,selection_command +1129,2442531,"slurm/dev/franz/berlin/atari/dynamics.sh",1406,0,"",shellscript,selection_command +1130,2442570,"slurm/dev/franz/berlin/atari/dynamics.sh",1348,0,"",shellscript,selection_command +1131,2442598,"slurm/dev/franz/berlin/atari/dynamics.sh",1347,0,"",shellscript,selection_command +1132,2442636,"slurm/dev/franz/berlin/atari/dynamics.sh",1344,0,"",shellscript,selection_command +1133,2442664,"slurm/dev/franz/berlin/atari/dynamics.sh",1333,0,"",shellscript,selection_command +1134,2442701,"slurm/dev/franz/berlin/atari/dynamics.sh",1246,0,"",shellscript,selection_command +1135,2442731,"slurm/dev/franz/berlin/atari/dynamics.sh",1214,0,"",shellscript,selection_command +1136,2442766,"slurm/dev/franz/berlin/atari/dynamics.sh",1147,0,"",shellscript,selection_command +1137,2442796,"slurm/dev/franz/berlin/atari/dynamics.sh",1146,0,"",shellscript,selection_command +1138,2442833,"slurm/dev/franz/berlin/atari/dynamics.sh",1119,0,"",shellscript,selection_command +1139,2442982,"slurm/dev/franz/berlin/atari/dynamics.sh",1146,0,"",shellscript,selection_command +1140,2443228,"slurm/dev/franz/berlin/atari/dynamics.sh",1147,0,"",shellscript,selection_command +1141,2443262,"slurm/dev/franz/berlin/atari/dynamics.sh",1214,0,"",shellscript,selection_command +1142,2443290,"slurm/dev/franz/berlin/atari/dynamics.sh",1246,0,"",shellscript,selection_command +1143,2443325,"slurm/dev/franz/berlin/atari/dynamics.sh",1333,0,"",shellscript,selection_command +1144,2443356,"slurm/dev/franz/berlin/atari/dynamics.sh",1344,0,"",shellscript,selection_command +1145,2443391,"slurm/dev/franz/berlin/atari/dynamics.sh",1347,0,"",shellscript,selection_command +1146,2443419,"slurm/dev/franz/berlin/atari/dynamics.sh",1348,0,"",shellscript,selection_command +1147,2443458,"slurm/dev/franz/berlin/atari/dynamics.sh",1406,0,"",shellscript,selection_command +1148,2443625,"slurm/dev/franz/berlin/atari/dynamics.sh",1407,0,"",shellscript,selection_command +1149,2444126,"slurm/dev/franz/berlin/atari/dynamics.sh",1424,0,"",shellscript,selection_command +1150,2444268,"slurm/dev/franz/berlin/atari/dynamics.sh",1427,0,"",shellscript,selection_command +1151,2445210,"slurm/dev/franz/berlin/atari/dynamics.sh",1426,0,"",shellscript,selection_command +1152,2445382,"slurm/dev/franz/berlin/atari/dynamics.sh",1426,1,"/",shellscript,selection_command +1153,2445484,"slurm/dev/franz/berlin/atari/dynamics.sh",1426,5,"/fast",shellscript,selection_command +1154,2445742,"slurm/dev/franz/berlin/atari/dynamics.sh",1426,6,"/fast/",shellscript,selection_command +1155,2445766,"slurm/dev/franz/berlin/atari/dynamics.sh",1426,13,"/fast/project",shellscript,selection_command +1156,2445797,"slurm/dev/franz/berlin/atari/dynamics.sh",1426,14,"/fast/project/",shellscript,selection_command +1157,2445830,"slurm/dev/franz/berlin/atari/dynamics.sh",1426,30,"/fast/project/HFMI_SynergyUnit",shellscript,selection_command +1158,2445861,"slurm/dev/franz/berlin/atari/dynamics.sh",1426,31,"/fast/project/HFMI_SynergyUnit/",shellscript,selection_command +1159,2446096,"slurm/dev/franz/berlin/atari/dynamics.sh",1426,39,"/fast/project/HFMI_SynergyUnit/jafar_ws",shellscript,selection_command +1160,2446342,"slurm/dev/franz/berlin/atari/dynamics.sh",1426,40,"/fast/project/HFMI_SynergyUnit/jafar_ws/",shellscript,selection_command +1161,2446376,"slurm/dev/franz/berlin/atari/dynamics.sh",1426,44,"/fast/project/HFMI_SynergyUnit/jafar_ws/data",shellscript,selection_command +1162,2446416,"slurm/dev/franz/berlin/atari/dynamics.sh",1426,45,"/fast/project/HFMI_SynergyUnit/jafar_ws/data/",shellscript,selection_command +1163,2446723,"slurm/dev/franz/berlin/atari/dynamics.sh",1426,50,"/fast/project/HFMI_SynergyUnit/jafar_ws/data/atari",shellscript,selection_command +1164,2446908,"slurm/dev/franz/berlin/atari/dynamics.sh",1426,53,"/fast/project/HFMI_SynergyUnit/jafar_ws/data/atari/${",shellscript,selection_command +1165,2447122,"slurm/dev/franz/berlin/atari/dynamics.sh",1426,61,"/fast/project/HFMI_SynergyUnit/jafar_ws/data/atari/${ENV_NAME",shellscript,selection_command +1166,2447418,"slurm/dev/franz/berlin/atari/dynamics.sh",1426,60,"/fast/project/HFMI_SynergyUnit/jafar_ws/data/atari/${ENV_NAM",shellscript,selection_command +1167,2447565,"slurm/dev/franz/berlin/atari/dynamics.sh",1426,59,"/fast/project/HFMI_SynergyUnit/jafar_ws/data/atari/${ENV_NA",shellscript,selection_command +1168,2447828,"slurm/dev/franz/berlin/atari/dynamics.sh",1426,58,"/fast/project/HFMI_SynergyUnit/jafar_ws/data/atari/${ENV_N",shellscript,selection_command +1169,2447866,"slurm/dev/franz/berlin/atari/dynamics.sh",1426,57,"/fast/project/HFMI_SynergyUnit/jafar_ws/data/atari/${ENV_",shellscript,selection_command +1170,2447880,"slurm/dev/franz/berlin/atari/dynamics.sh",1426,56,"/fast/project/HFMI_SynergyUnit/jafar_ws/data/atari/${ENV",shellscript,selection_command +1171,2447913,"slurm/dev/franz/berlin/atari/dynamics.sh",1426,55,"/fast/project/HFMI_SynergyUnit/jafar_ws/data/atari/${EN",shellscript,selection_command +1172,2447946,"slurm/dev/franz/berlin/atari/dynamics.sh",1426,54,"/fast/project/HFMI_SynergyUnit/jafar_ws/data/atari/${E",shellscript,selection_command +1173,2448075,"slurm/dev/franz/berlin/atari/dynamics.sh",1426,53,"/fast/project/HFMI_SynergyUnit/jafar_ws/data/atari/${",shellscript,selection_command +1174,2448270,"slurm/dev/franz/berlin/atari/dynamics.sh",1426,52,"/fast/project/HFMI_SynergyUnit/jafar_ws/data/atari/$",shellscript,selection_command +1175,2448421,"slurm/dev/franz/berlin/atari/dynamics.sh",1426,51,"/fast/project/HFMI_SynergyUnit/jafar_ws/data/atari/",shellscript,selection_command +1176,2449234,"slurm/dev/franz/berlin/atari/dynamics.sh",1476,0,"",shellscript,selection_command +1177,2457098,"TERMINAL",0,0,"ls /fast/project/HFMI_SynergyUnit/jafar_ws/data/atari/bank_heist/",,terminal_command +1178,2457105,"TERMINAL",0,0,"]633;Cmetadata.json test train val\r\n]0;franz.srambical@hai-login2:~/jafar",,terminal_output +1179,2459178,"TERMINAL",0,0,"ls /fast/project/HFMI_SynergyUnit/jafar_ws/data/atari/bank_heist/train/",,terminal_command +1180,2459201,"TERMINAL",0,0,"]633;Cdata_0000.array_record data_0096.array_record data_0192.array_record data_0288.array_record data_0384.array_record data_0480.array_record data_0576.array_record\r\ndata_0001.array_record data_0097.array_record data_0193.array_record data_0289.array_record data_0385.array_record data_0481.array_record data_0577.array_record\r\ndata_0002.array_record data_0098.array_record data_0194.array_record data_0290.array_record data_0386.array_record data_0482.array_record data_0578.array_record\r\ndata_0003.array_record data_0099.array_record data_0195.array_record data_0291.array_record data_0387.array_record data_0483.array_record data_0579.array_record\r\ndata_0004.array_record data_0100.array_record data_0196.array_record data_0292.array_record data_0388.array_record data_0484.array_record data_0580.array_record\r\ndata_0005.array_record data_0101.array_record data_0197.array_record data_0293.array_record data_0389.array_record data_0485.array_record data_0581.array_record\r\ndata_0006.array_record data_0102.array_record data_0198.array_record data_0294.array_record data_0390.array_record data_0486.array_record data_0582.array_record\r\ndata_0007.array_record data_0103.array_record data_0199.array_record data_0295.array_record data_0391.array_record data_0487.array_record data_0583.array_record\r\ndata_0008.array_record data_0104.array_record data_0200.array_record data_0296.array_record data_0392.array_record data_0488.array_record data_0584.array_record\r\ndata_0009.array_record data_0105.array_record data_0201.array_record data_0297.array_record data_0393.array_record data_0489.array_record data_0585.array_record\r\ndata_0010.array_record data_0106.array_record data_0202.array_record data_0298.array_record data_0394.array_record data_0490.array_record data_0586.array_record\r\ndata_0011.array_record data_0107.array_record data_0203.array_record data_0299.array_record data_0395.array_record data_0491.array_record data_0587.array_record\r\ndata_0012.array_record data_0108.array_record data_0204.array_record data_0300.array_record data_0396.array_record data_0492.array_record data_0588.array_record\r\ndata_0013.array_record data_0109.array_record data_0205.array_record data_0301.array_record data_0397.array_record data_0493.array_record data_0589.array_record\r\ndata_0014.array_record data_0110.array_record data_0206.array_record data_0302.array_record data_0398.array_record data_0494.array_record data_0590.array_record\r\ndata_0015.array_record data_0111.array_record data_0207.array_record data_0303.array_record data_0399.array_record data_0495.array_record data_0591.array_record\r\ndata_0016.array_record data_0112.array_record data_0208.array_record data_0304.array_record data_0400.array_record data_0496.array_record data_0592.array_record\r\ndata_0017.array_record data_0113.array_record data_0209.array_record data_0305.array_record data_0401.array_record data_0497.array_record data_0593.array_record\r\ndata_0018.array_record data_0114.array_record data_0210.array_record data_0306.array_record data_0402.array_record data_0498.array_record data_0594.array_record\r\ndata_0019.array_record data_0115.array_record data_0211.array_record data_0307.array_record data_0403.array_record data_0499.array_record data_0595.array_record\r\ndata_0020.array_record data_0116.array_record data_0212.array_record data_0308.array_record data_0404.array_record data_0500.array_record data_0596.array_record\r\ndata_0021.array_record data_0117.array_record data_0213.array_record data_0309.array_record data_0405.array_record data_0501.array_record data_0597.array_record\r\ndata_0022.array_record data_0118.array_record data_0214.array_record data_0310.array_record data_0406.array_record data_0502.array_record data_0598.array_record\r\ndata_0023.array_record data_0119.array_record data_0215.array_record data_0311.array_record data_0407.array_record data_0503.array_record data_0599.array_record\r\ndata_0024.array_record data_0120.array_record data_0216.array_record data_0312.array_record data_0408.array_record data_0504.array_record data_0600.array_record\r\ndata_0025.array_record data_0121.array_record data_0217.array_record data_0313.array_record data_0409.array_record data_0505.array_record data_0601.array_record\r\ndata_0026.array_record data_0122.array_record data_0218.array_record data_0314.array_record data_0410.array_record data_0506.array_record data_0602.array_record\r\ndata_0027.array_record data_0123.array_record data_0219.array_record data_0315.array_record data_0411.array_record data_0507.array_record data_0603.array_record\r\ndata_0028.array_record data_0124.array_record data_0220.array_record data_0316.array_record data_0412.array_record data_0508.array_record data_0604.array_record\r\ndata_0029.array_record data_0125.array_record data_0221.array_record data_0317.array_record data_0413.array_record data_0509.array_record data_0605.array_record\r\ndata_0030.array_record data_0126.array_record data_0222.array_record data_0318.array_record data_0414.array_record data_0510.array_record data_0606.array_record\r\ndata_0031.array_record data_0127.array_record data_0223.array_record data_0319.array_record data_0415.array_record data_0511.array_record data_0607.array_record\r\ndata_0032.array_record data_0128.array_record data_0224.array_record data_0320.array_record data_0416.array_record data_0512.array_record data_0608.array_record\r\ndata_0033.array_record data_0129.array_record data_0225.array_record data_0321.array_record data_0417.array_record data_0513.array_record data_0609.array_record\r\ndata_0034.array_record data_0130.array_record data_0226.array_record data_0322.array_record data_0418.array_record data_0514.array_record data_0610.array_record\r\ndata_0035.array_record data_0131.array_record data_0227.array_record data_0323.array_record data_0419.array_record data_0515.array_record data_0611.array_record\r\ndata_0036.array_record data_0132.array_record data_0228.array_record data_0324.array_record data_0420.array_record data_0516.array_record data_0612.array_record\r\ndata_0037.array_record data_0133.array_record data_0229.array_record data_0325.array_record data_0421.array_record data_0517.array_record data_0613.array_record\r\ndata_0038.array_record data_0134.array_record data_0230.array_record data_0326.array_record data_0422.array_record data_0518.array_record data_0614.array_record\r\ndata_0039.array_record data_0135.array_record data_0231.array_record data_0327.array_record data_0423.array_record data_0519.array_record data_0615.array_record\r\ndata_0040.array_record data_0136.array_record data_0232.array_record data_0328.array_record data_0424.array_record data_0520.array_record data_0616.array_record\r\ndata_0041.array_record data_0137.array_record data_0233.array_record data_0329.array_record data_0425.array_record data_0521.array_record data_0617.array_record\r\ndata_0042.array_record data_0138.array_record data_0234.array_record data_0330.array_record data_0426.array_record data_0522.array_record data_0618.array_record\r\ndata_0043.array_record data_0139.array_record data_0235.array_record data_0331.array_record data_0427.array_record data_0523.array_record data_0619.array_record\r\ndata_0044.array_record data_0140.array_record data_0236.array_record data_0332.array_record data_0428.array_record data_0524.array_record data_0620.array_record\r\ndata_0045.array_record data_0141.array_record data_0237.array_record data_0333.array_record data_0429.array_record data_0525.array_record data_0621.array_record\r\ndata_0046.array_record data_0142.array_record data_0238.array_record data_0334.array_record data_0430.array_record data_0526.array_record data_0622.array_record\r\ndata_0047.array_record data_0143.array_record data_0239.array_record data_0335.array_record data_0431.array_record data_0527.array_record data_0623.array_record\r\ndata_0048.array_record data_0144.array_record data_0240.array_record data_0336.array_record data_0432.array_record data_0528.array_record data_0624.array_record\r\ndata_0049.array_record data_0145.array_record data_0241.array_record data_0337.array_record data_0433.array_record data_0529.array_record data_0625.array_record\r\ndata_0050.array_record data_0146.array_record data_0242.array_record data_0338.array_record data_0434.array_record data_0530.array_record data_0626.array_record\r\ndata_0051.array_record data_0147.array_record data_0243.array_record data_0339.array_record data_0435.array_record data_0531.array_record data_0627.array_record\r\ndata_0052.array_record data_0148.array_record data_0244.array_record data_0340.array_record data_0436.array_record data_0532.array_record data_0628.array_record\r\ndata_0053.array_record data_0149.array_record data_0245.array_record data_0341.array_record data_0437.array_record data_0533.array_record data_0629.array_record\r\ndata_0054.array_record data_0150.array_record data_0246.array_record data_0342.array_record data_0438.array_record data_0534.array_record data_0630.array_record\r\ndata_0055.array_record data_0151.array_record data_0247.array_record data_0343.array_record data_0439.array_record data_0535.array_record data_0631.array_record\r\ndata_0056.array_record data_0152.array_record data_0248.array_record data_0344.array_record data_0440.array_record data_0536.array_record data_0632.array_record\r\ndata_0057.array_record data_0153.array_record data_0249.array_record data_0345.array_record data_0441.array_record data_0537.array_record data_0633.array_record\r\ndata_0058.array_record data_0154.array_record data_0250.array_record data_0346.array_record data_0442.array_record data_0538.array_record data_0634.array_record\r\ndata_0059.array_record data_0155.array_record data_0251.array_record data_0347.array_record data_0443.array_record data_0539.array_record data_0635.array_record\r\ndata_0060.array_record data_0156.array_record data_0252.array_record data_0348.array_record data_0444.array_record data_0540.array_record data_0636.array_record\r\ndata_0061.array_record data_0157.array_record data_0253.array_record data_0349.array_record data_0445.array_record data_0541.array_record data_0637.array_record\r\ndata_0062.array_record data_0158.array_record data_0254.array_record data_0350.array_record data_0446.array_record data_0542.array_record data_0638.array_record\r\ndata_0063.array_record data_0159.array_record data_0255.array_record data_0351.array_record data_0447.array_record data_0543.array_record data_0639.array_record\r\ndata_0064.array_record data_0160.array_record data_0256.array_record data_0352.array_record data_0448.array_record data_0544.array_record data_0640.array_record\r\ndata_0065.array_record data_0161.array_record data_0257.array_record data_0353.array_record data_0449.array_record data_0545.array_record data_0641.array_record\r\ndata_0066.array_record data_0162.array_record data_0258.array_record data_0354.array_record data_0450.array_record data_0546.array_record data_0642.array_record\r\ndata_0067.array_record data_0163.array_record data_0259.array_record data_0355.array_record data_0451.array_record data_0547.array_record data_0643.array_record\r\ndata_0068.array_record data_0164.array_record data_0260.array_record data_0356.array_record data_0452.array_record data_0548.array_record data_0644.array_record\r\ndata_0069.array_record data_0165.array_record data_0261.array_record data_0357.array_record data_0453.array_record data_0549.array_record data_0645.array_record\r\ndata_0070.array_record data_0166.array_record data_0262.array_record data_0358.array_record data_0454.array_record data_0550.array_record data_0646.array_record\r\ndata_0071.array_record data_0167.array_record data_0263.array_record data_0359.array_record data_0455.array_record data_0551.array_record data_0647.array_record\r\ndata_0072.array_record data_0168.array_record data_0264.array_record data_0360.array_record data_0456.array_record data_0552.array_record data_0648.array_record\r\ndata_0073.array_record data_0169.array_record data_0265.array_record data_0361.array_record data_0457.array_record data_0553.array_record data_0649.array_record\r\ndata_0074.array_record data_0170.array_record data_0266.array_record data_0362.array_record data_0458.array_record data_0554.array_record data_0650.array_record\r\ndata_0075.array_record data_0171.array_record data_0267.array_record data_0363.array_record data_0459.array_record data_0555.array_record data_0651.array_record\r\ndata_0076.array_record data_0172.array_record data_0268.array_record data_0364.array_record data_0460.array_record data_0556.array_record data_0652.array_record\r\ndata_0077.array_record data_0173.array_record data_0269.array_record data_0365.array_record data_0461.array_record data_0557.array_record data_0653.array_record\r\ndata_0078.array_record data_0174.array_record data_0270.array_record data_0366.array_record data_0462.array_record data_0558.array_record data_0654.array_record\r\ndata_0079.array_record data_0175.array_record data_0271.array_record data_0367.array_record data_0463.array_record data_0559.array_record data_0655.array_record\r\ndata_0080.array_record data_0176.array_record data_0272.array_record data_0368.array_record data_0464.array_record data_0560.array_record data_0656.array_record\r\ndata_0081.array_record data_0177.array_record data_0273.array_record data_0369.array_record data_0465.array_record data_0561.array_record data_0657.array_record\r\ndata_0082.array_record data_0178.array_record data_0274.array_record data_0370.array_record data_0466.array_record data_0562.array_record data_0658.array_record\r\ndata_0083.array_record data_0179.array_record data_0275.array_record data_0371.array_record data_0467.array_record data_0563.array_record data_0659.array_record\r\ndata_0084.array_record data_0180.array_record data_0276.array_record data_0372.array_record data_0468.array_record data_0564.array_record data_0660.array_record\r\ndata_0085.array_record data_0181.array_record data_0277.array_record data_0373.array_record data_0469.array_record data_0565.array_record data_0661.array_record\r\ndata_0086.array_record data_0182.array_record data_0278.array_record data_0374.array_record data_0470.array_record data_0566.array_record data_0662.array_record\r\ndata_0087.array_record data_0183.array_record data_0279.array_record data_0375.array_record data_0471.array_record data_0567.array_record data_0663.array_record\r\ndata_0088.array_record data_0184.array_record data_0280.array_record data_0376.array_record data_0472.array_record data_0568.array_record data_0664.array_record\r\ndata_0089.array_record data_0185.array_record data_0281.array_record data_0377.array_record data_0473.array_record data_0569.array_record data_0665.array_record\r\ndata_0090.array_record data_0186.array_record data_0282.array_record data_0378.array_record data_0474.array_record data_0570.array_record data_0666.array_record\r\ndata_0091.array_record data_0187.array_record data_0283.array_record data_0379.array_record data_0475.array_record data_0571.array_record data_0667.array_record\r\ndata_0092.array_record data_0188.array_record data_0284.array_record data_0380.array_record data_0476.array_record data_0572.array_record data_0668.array_record\r\ndata_0093.array_record data_0189.array_record data_0285.array_record data_0381.array_record data_0477.array_record data_0573.array_record data_0669.array_record\r\ndata_0094.array_record data_0190.array_record data_0286.array_record data_0382.array_record data_0478.array_record data_0574.array_record\r\ndata_0095.array_record data_0191.array_record data_0287.array_record data_0383.array_record data_0479.array_record data_0575.array_record\r\n]0;franz.srambical@hai-login2:~/jafar",,terminal_output +1181,2466532,"TERMINAL",0,0,"pwd",,terminal_command +1182,2474528,"TERMINAL",0,0,"watch",,terminal_focus +1183,2474955,"slurm/jobs/franz/berlin/coinrun/mila_submission/legacy_runs_50M_dataset/submission_debug/coinrun_dynamics_gt_actions_concat_branch.sh",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=1\n#SBATCH --time=24:00:00\n#SBATCH --cpus-per-task=8\n#SBATCH --gres=gpu:1\n#SBATCH --output=/fast/project/HFMI_SynergyUnit/jafar_ws/logs/franz/coinrun/dynamics/%x_%j.log\n#SBATCH --error=/fast/project/HFMI_SynergyUnit/jafar_ws/logs/franz/coinrun/dynamics/%x_%j.log\n#SBATCH --job-name=dynamics_coinrun_mila_submission_50M_dataset_gt_actions_concat_branch\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nsource .venv/bin/activate\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\ntags=""coinrun dynamics 50m_dataset mila_submission ablation gt-actions patch_size_16 concat_branch""\n\narray_records_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/data/coinrun/array_records_500m_seed_w_increment""\ntokenizer_ckpt_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/tokenizer/tokenizer_coinrun_mila_submission_29736/""\nCHECKPOINT_DIR=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/dynamics/${job_name}/${slurm_job_id}""\nmkdir -p $CHECKPOINT_DIR\n\nenv | grep SLURM\n\ncurrent_branch=$(git rev-parse --abbrev-ref HEAD)\nif [ ""$current_branch"" != ""prepend-action-maskgit"" ]; then\n echo ""This script must be run from the prepend-action-maskgit branch. Current branch is $current_branch. Exiting.""\n exit 1\nfi\n\nsrun python jasmine/train_dynamics.py \\n --use_gt_actions \\n --num_actions=15 \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --name=""${job_name}_${slurm_job_id}"" \\n --tags ${tags} \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=""${tokenizer_ckpt_dir}"" \\n --val_data_dir=""${array_records_dir}/val"" \\n --data_dir=""${array_records_dir}/train"" &\n\nchild_pid=$!\n\nwait $child_pid\n\n",shellscript,tab +1184,2476835,"slurm/dev/franz/berlin/atari/dynamics.sh",0,0,"",shellscript,tab +1185,2477902,"slurm/dev/franz/berlin/coinrun/dynamics_debug/debug_gt_actions.sh",0,0,"# Log the sbatch script\ncat $0\n\nsource .venv/bin/activate\n\narray_records_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/data/coinrun/array_records_500m_seed_w_increment""\ntokenizer_ckpt_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/tokenizer/tokenizer_coinrun_mila_submission_29736/""\n\nenv | grep SLURM\n\ncurrent_branch=$(git rev-parse --abbrev-ref HEAD)\nif [ ""$current_branch"" != ""prepend-action-maskgit"" ]; then\n echo ""This script must be run from the prepend-action-maskgit branch. Current branch is $current_branch. Exiting.""\n exit 1\nfi\n\nsrun python jasmine/train_dynamics.py \\n --use_gt_actions \\n --no-log \\n --ckpt_dir test_ckpt_dir \\n --tokenizer_checkpoint=""${tokenizer_ckpt_dir}"" \\n --val_data_dir=""${array_records_dir}/val"" \\n --data_dir=""${array_records_dir}/train"" &\n\nchild_pid=$!\n\nwait $child_pid\n\n",shellscript,tab +1186,2479760,"slurm/dev/franz/berlin/atari/dynamics.sh",0,0,"",shellscript,tab +1187,2481722,"TERMINAL",0,0,"bash",,terminal_focus +1188,2494639,"slurm/jobs/franz/berlin/coinrun/mila_submission/legacy_runs_50M_dataset/coinrun_tokenizer_base.sh",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=1\n#SBATCH --gres=gpu:1\n#SBATCH --time=24:00:00\n#SBATCH --cpus-per-task=8\n#SBATCH --output=/fast/project/HFMI_SynergyUnit/jafar_ws/logs/franz/coinrun/tokenizer/%x_%j.log\n#SBATCH --error=/fast/project/HFMI_SynergyUnit/jafar_ws/logs/franz/coinrun/tokenizer/%x_%j.log\n#SBATCH --job-name=tokenizer_coinrun_mila_submission\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n# Log the sbatch script\ncat $0\n\nsource .venv/bin/activate\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\ntags=""coinrun tokenizer 500m_dataset mila_submission""\n\narray_records_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/data/coinrun/array_records_500m_seed_w_increment""\nCHECKPOINT_DIR=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/tokenizer/${job_name}_${slurm_job_id}""\nmkdir -p $CHECKPOINT_DIR\n\nenv | grep SLURM\n\nsrun python jasmine/train_tokenizer.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --name=""${job_name}_${slurm_job_id}"" \\n --tags $tags \\n --entity instant-uv \\n --project jafar \\n --data_dir=""${array_records_dir}/train"" \\n --val_data_dir=""${array_records_dir}/val"" &\n\nchild_pid=$!\n\nwait $child_pid\n",shellscript,tab +1189,2497052,"slurm/dev/franz/berlin/atari/dynamics.sh",0,0,"",shellscript,tab +1190,2500271,"TERMINAL",0,0,"bash",,terminal_focus +1191,2504826,"TERMINAL",0,0,"watch",,terminal_focus +1192,2505684,"TERMINAL",0,0,"bash",,terminal_focus +1193,2509276,"TERMINAL",0,0,"uv run jasmine_data/atari/visualize_array_record.py --input=/fast/project/HFMI_SynergyUnit/jafar_ws/data/atari/bank_heist/train/data_0358.array_record --output_dir=test_vis --height=84 --width=84",,terminal_command +1194,2509327,"TERMINAL",0,0,"]633;C",,terminal_output +1195,2509544,"TERMINAL",0,0,"error: Failed to spawn: `jasmine_data/atari/visualize_array_record.py`\r\n Caused by: No such file or directory (os error 2)\r\n]0;franz.srambical@hai-login2:~/jafar",,terminal_output +1196,2523694,"TERMINAL",0,0,"cd data",,terminal_command +1197,2525890,"TERMINAL",0,0,"uv run jasmine_data/atari/visualize_array_record.py --input=/fast/project/HFMI_SynergyUnit/jafar_ws/data/atari/bank_heist/train/data_0358.array_record --output_dir=test_vis --height=84 --width=84",,terminal_command +1198,2525935,"TERMINAL",0,0,"]633;C",,terminal_output +1199,2526261,"TERMINAL",0,0,"⠋ Preparing packages... (0/0) \r⠋ Preparing packages... (0/1) \r⠙ Preparing packages... (0/1) \r Building jasmine-data @ file:///fast/home/franz.srambical/jafar/data\r\n⠙ Preparing packages... (0/1) ",,terminal_output +1200,2526431,"TERMINAL",0,0,"\r\r Building jasmine-data @ file:///fast/home/franz.srambical/jafar/data\r\n⠹ Preparing packages... (0/1) ",,terminal_output +1201,2526631,"TERMINAL",0,0,"\r\r Building jasmine-data @ file:///fast/home/franz.srambical/jafar/data\r\n⠸ Preparing packages... (0/1) ",,terminal_output +1202,2526832,"TERMINAL",0,0,"\r\r Building jasmine-data @ file:///fast/home/franz.srambical/jafar/data\r\n⠼ Preparing packages... (0/1) ",,terminal_output +1203,2527032,"TERMINAL",0,0,"\r\r Building jasmine-data @ file:///fast/home/franz.srambical/jafar/data\r\n⠴ Preparing packages... (0/1) ",,terminal_output +1204,2527231,"TERMINAL",0,0,"\r\r Building jasmine-data @ file:///fast/home/franz.srambical/jafar/data\r\n⠦ Preparing packages... (0/1) ",,terminal_output +1205,2527434,"TERMINAL",0,0,"\r\r Building jasmine-data @ file:///fast/home/franz.srambical/jafar/data\r\n⠧ Preparing packages... (0/1) ",,terminal_output +1206,2527631,"TERMINAL",0,0,"\r\r Building jasmine-data @ file:///fast/home/franz.srambical/jafar/data\r\n⠇ Preparing packages... (0/1) ",,terminal_output +1207,2527833,"TERMINAL",0,0,"\r\r Building jasmine-data @ file:///fast/home/franz.srambical/jafar/data\r\n⠋ Preparing packages... (0/1) ",,terminal_output +1208,2528032,"TERMINAL",0,0,"\r\r Building jasmine-data @ file:///fast/home/franz.srambical/jafar/data\r\n⠙ Preparing packages... (0/1) ",,terminal_output +1209,2528233,"TERMINAL",0,0,"\r\r Building jasmine-data @ file:///fast/home/franz.srambical/jafar/data\r\n⠹ Preparing packages... (0/1) ",,terminal_output +1210,2528433,"TERMINAL",0,0,"\r\r Building jasmine-data @ file:///fast/home/franz.srambical/jafar/data\r\n⠸ Preparing packages... (0/1) ",,terminal_output +1211,2528633,"TERMINAL",0,0,"\r\r Building jasmine-data @ file:///fast/home/franz.srambical/jafar/data\r\n⠼ Preparing packages... (0/1) ",,terminal_output +1212,2528832,"TERMINAL",0,0,"\r\r Building jasmine-data @ file:///fast/home/franz.srambical/jafar/data\r\n⠴ Preparing packages... (0/1) ",,terminal_output +1213,2528899,"TERMINAL",0,0,"\r\r Built jasmine-data @ file:///fast/home/franz.srambical/jafar/data\r\n⠴ Preparing packages... (0/1) ",,terminal_output +1214,2529037,"TERMINAL",0,0,"\r⠴  (1/1) \rUninstalled 1 package in 46ms\r\n░░░░░░░░░░░░░░░░░░░░ [0/0] Installing wheels... \r░░░░░░░░░░░░░░░░░░░░ [0/1] Installing wheels... ",,terminal_output +1215,2529156,"TERMINAL",0,0,"\r░░░░░░░░░░░░░░░░░░░░ [0/1] jasmine-data==0.1.0 (from file:///fast/home/franz.srambical/jafar/data) \r████████████████████ [1/1] jasmine-data==0.1.0 (from file:///fast/home/franz.srambical/jafar/data) \rInstalled 1 package in 121ms\r\n",,terminal_output +1216,2537090,"TERMINAL",0,0,"Loaded record with sequence_length=160, actions_present=True\r\n/fast/home/franz.srambical/jafar/data/jasmine_data/atari/visualize_array_record.py:113: DeprecationWarning: 'mode' parameter is deprecated and will be removed in Pillow 13 (2026-10-15)\r\n img = Image.fromarray(img_np_rgb, mode=""RGB"")\r\n",,terminal_output +1217,2543672,"TERMINAL",0,0,"Saved GIF to test_vis/sequence.gif with 160 frames (H=84, W=84, C=3, fps=10).\r\n",,terminal_output +1218,2543825,"TERMINAL",0,0,"]0;franz.srambical@hai-login2:~/jafar/data",,terminal_output +1219,2603529,"slurm/dev/franz/berlin/atari/dynamics.sh",0,0,"",shellscript,tab +1220,2609430,"TERMINAL",0,0,"bash",,terminal_focus +1221,2612947,"TERMINAL",0,0,"watch",,terminal_focus +1222,2614307,"TERMINAL",0,0,"bash",,terminal_focus +1223,2618418,"TERMINAL",0,0,"uv run jasmine_data/atari/visualize_array_record.py --input=/fast/project/HFMI_SynergyUnit/jafar_ws/data/atari/bank_heist/train/data_03575.array_record --output_dir=test_vis --height=84 --width=84",,terminal_command +1224,2618468,"TERMINAL",0,0,"]633;C",,terminal_output +1225,2618964,"TERMINAL",0,0,"Traceback (most recent call last):\r\n File ""/fast/home/franz.srambical/jafar/data/jasmine_data/atari/visualize_array_record.py"", line 210, in \r\n main()\r\n File ""/fast/home/franz.srambical/jafar/data/jasmine_data/atari/visualize_array_record.py"", line 187, in main\r\n record = load_one_record(args.input)\r\n File ""/fast/home/franz.srambical/jafar/data/jasmine_data/atari/visualize_array_record.py"", line 33, in load_one_record\r\n raw = reader.read()\r\nRuntimeError: open() failed: No such file or directory; opening /fast/project/HFMI_SynergyUnit/jafar_ws/data/atari/bank_heist/train/data_03575.array_record\r\n]0;franz.srambical@hai-login2:~/jafar/data",,terminal_output +1226,2629426,"TERMINAL",0,0,"uv run jasmine_data/atari/visualize_array_record.py --input=/fast/project/HFMI_SynergyUnit/jafar_ws/data/atari/bank_heist/train/data_0575.array_record --output_dir=test_vis --height=84 --width=84",,terminal_command +1227,2629477,"TERMINAL",0,0,"]633;C",,terminal_output +1228,2630271,"TERMINAL",0,0,"Loaded record with sequence_length=160, actions_present=True\r\n/fast/home/franz.srambical/jafar/data/jasmine_data/atari/visualize_array_record.py:113: DeprecationWarning: 'mode' parameter is deprecated and will be removed in Pillow 13 (2026-10-15)\r\n img = Image.fromarray(img_np_rgb, mode=""RGB"")\r\n",,terminal_output +1229,2637700,"TERMINAL",0,0,"Saved GIF to test_vis/sequence.gif with 160 frames (H=84, W=84, C=3, fps=10).\r\n]0;franz.srambical@hai-login2:~/jafar/data",,terminal_output +1230,2662143,"TERMINAL",0,0,"uv run jasmine_data/atari/visualize_array_record.py --input=/fast/project/HFMI_SynergyUnit/jafar_ws/data/atari/bank_heist/train/data_0575.array_record --output_dir=test_vis --height=84 --width=84 --env_id=ALE/BankHeist-v5",,terminal_command +1231,2662194,"TERMINAL",0,0,"]633;C",,terminal_output +1232,2662842,"TERMINAL",0,0,"Loaded record with sequence_length=160, actions_present=True\r\n",,terminal_output +1233,2676940,"slurm/dev/franz/berlin/atari/dynamics.sh",0,0,"",shellscript,tab +1234,2712844,"TERMINAL",0,0,"Gym has been unmaintained since 2022 and does not support NumPy 2.0 amongst other critical functionality.\r\nPlease upgrade to Gymnasium, the maintained drop-in replacement of Gym, or contact the authors of your software and request that they upgrade.\r\nSee the migration guide at https://gymnasium.farama.org/introduction/migration_guide/ for additional information.\r\n",,terminal_output +1235,2715082,"TERMINAL",0,0,"A.L.E: Arcade Learning Environment (version 0.8.1+53f58b7)\r\n[Powered by Stella]\r\n",,terminal_output +1236,2718057,"TERMINAL",0,0,"/fast/home/franz.srambical/jafar/data/jasmine_data/atari/visualize_array_record.py:113: DeprecationWarning: 'mode' parameter is deprecated and will be removed in Pillow 13 (2026-10-15)\r\n img = Image.fromarray(img_np_rgb, mode=""RGB"")\r\n",,terminal_output +1237,2725087,"TERMINAL",0,0,"Saved GIF to test_vis/sequence.gif with 160 frames (H=84, W=84, C=3, fps=10).\r\n",,terminal_output +1238,2726226,"TERMINAL",0,0,"]0;franz.srambical@hai-login2:~/jafar/data",,terminal_output +1239,2729249,"slurm/dev/franz/berlin/atari/dynamics.sh",0,0,"",shellscript,tab +1240,2747688,"slurm/dev/franz/berlin/atari/dynamics.sh",0,0,"",shellscript,tab +1241,2755617,"TERMINAL",0,0,"uv run jasmine_data/atari/visualize_array_record.py --input=/fast/project/HFMI_SynergyUnit/jafar_ws/data/atari/bank_heist/train/data_0001.array_record --output_dir=test_vis --height=84 --width=84 --env_id=ALE/BankHeist-v5",,terminal_command +1242,2755667,"TERMINAL",0,0,"]633;C",,terminal_output +1243,2756415,"TERMINAL",0,0,"Loaded record with sequence_length=21, actions_present=True\r\n",,terminal_output +1244,2766021,"TERMINAL",0,0,"Gym has been unmaintained since 2022 and does not support NumPy 2.0 amongst other critical functionality.\r\nPlease upgrade to Gymnasium, the maintained drop-in replacement of Gym, or contact the authors of your software and request that they upgrade.\r\nSee the migration guide at https://gymnasium.farama.org/introduction/migration_guide/ for additional information.\r\n",,terminal_output +1245,2767726,"TERMINAL",0,0,"A.L.E: Arcade Learning Environment (version 0.8.1+53f58b7)\r\n[Powered by Stella]\r\n",,terminal_output +1246,2768258,"TERMINAL",0,0,"/fast/home/franz.srambical/jafar/data/jasmine_data/atari/visualize_array_record.py:113: DeprecationWarning: 'mode' parameter is deprecated and will be removed in Pillow 13 (2026-10-15)\r\n img = Image.fromarray(img_np_rgb, mode=""RGB"")\r\n",,terminal_output +1247,2769111,"TERMINAL",0,0,"Saved GIF to test_vis/sequence.gif with 21 frames (H=84, W=84, C=3, fps=10).\r\n",,terminal_output +1248,2769314,"TERMINAL",0,0,"]0;franz.srambical@hai-login2:~/jafar/data",,terminal_output +1249,3099237,"slurm/dev/franz/berlin/atari/spawn_tokenizers.sh",0,0,"#!/usr/bin/env bash\n\nset -euo pipefail\n\nREPO_ROOT=""/home/franz.srambical/jafar""\n\nDATA_ROOT=""${DATA_ROOT:-/fast/project/HFMI_SynergyUnit/jafar_ws/data/atari}""\n\nTOKENIZER_SCRIPT=""${TOKENIZER_SCRIPT:-$REPO_ROOT/slurm/dev/franz/berlin/atari/tokenizer.sh}""\n\nif [ ""$#"" -gt 0 ]; then\n ENV_LIST=(""$@"")\nelse\n if [ ! -d ""$DATA_ROOT"" ]; then\n echo ""DATA_ROOT does not exist: $DATA_ROOT"" >&2\n exit 1\n fi\n # list immediate subdirectories (candidate env names)\n mapfile -t ENV_LIST < <(find ""$DATA_ROOT"" -mindepth 1 -maxdepth 1 -type d -printf '%f\n' | sort)\nfi\n\necho ""Submitting tokenizer jobs for environments: ${ENV_LIST[*]}""\n\nfor env in ""${ENV_LIST[@]}""; do\n env_dir=""$DATA_ROOT/$env""\n if [ ! -d ""$env_dir/train"" ] || [ ! -d ""$env_dir/val"" ]; then\n echo ""Skipping $env (missing train/val under $env_dir)""\n continue\n fi\n\n job_name=""tokenizer_atari_${env}_dev""\n\n job_id=$(sbatch --parsable \\n --job-name=""$job_name"" \\n --chdir=""$REPO_ROOT"" \\n --export=ALL,ENV_NAME=""$env"" \\n ""$TOKENIZER_SCRIPT"")\n\n echo ""Submitted $job_name (ENV_NAME=$env) as job $job_id""\ndone\n\n\n",shellscript,tab +1250,3107687,"slurm/dev/franz/berlin/atari/tokenizer.sh",0,0,"",shellscript,tab +1251,3227448,"TERMINAL",0,0,"watch",,terminal_focus +1252,3255430,"slurm/dev/franz/berlin/atari/tokenizer.sh",2428,0,"",shellscript,selection_command +1253,3255671,"slurm/dev/franz/berlin/atari/tokenizer.sh",2427,0,"",shellscript,selection_command +1254,3255703,"slurm/dev/franz/berlin/atari/tokenizer.sh",2414,0,"",shellscript,selection_command +1255,3255725,"slurm/dev/franz/berlin/atari/tokenizer.sh",2413,0,"",shellscript,selection_command +1256,3255765,"slurm/dev/franz/berlin/atari/tokenizer.sh",2365,0,"",shellscript,selection_command +1257,3255793,"slurm/dev/franz/berlin/atari/tokenizer.sh",2319,0,"",shellscript,selection_command +1258,3255829,"slurm/dev/franz/berlin/atari/tokenizer.sh",2297,0,"",shellscript,selection_command +1259,3255858,"slurm/dev/franz/berlin/atari/tokenizer.sh",2271,0,"",shellscript,selection_command +1260,3255896,"slurm/dev/franz/berlin/atari/tokenizer.sh",2252,0,"",shellscript,selection_command +1261,3255925,"slurm/dev/franz/berlin/atari/tokenizer.sh",2209,0,"",shellscript,selection_command +1262,3255959,"slurm/dev/franz/berlin/atari/tokenizer.sh",2176,0,"",shellscript,selection_command +1263,3255993,"slurm/dev/franz/berlin/atari/tokenizer.sh",2145,0,"",shellscript,selection_command +1264,3256028,"slurm/dev/franz/berlin/atari/tokenizer.sh",2120,0,"",shellscript,selection_command +1265,3256359,"slurm/dev/franz/berlin/atari/tokenizer.sh",2102,0,"",shellscript,selection_command +1266,3256524,"slurm/dev/franz/berlin/atari/tokenizer.sh",2073,0,"",shellscript,selection_command +1267,3256796,"slurm/dev/franz/berlin/atari/tokenizer.sh",2102,0,"",shellscript,selection_command +1268,3257037,"slurm/dev/franz/berlin/atari/tokenizer.sh",2120,0,"",shellscript,selection_command +1269,3257400,"slurm/dev/franz/berlin/atari/tokenizer.sh",2102,0,"",shellscript,selection_command +1270,3257644,"slurm/dev/franz/berlin/atari/tokenizer.sh",2073,0,"",shellscript,selection_command +1271,3257859,"slurm/dev/franz/berlin/atari/tokenizer.sh",2047,0,"",shellscript,selection_command +1272,3258009,"slurm/dev/franz/berlin/atari/tokenizer.sh",2027,0,"",shellscript,selection_command +1273,3258365,"slurm/dev/franz/berlin/atari/tokenizer.sh",2032,0,"",shellscript,selection_command +1274,3258544,"slurm/dev/franz/berlin/atari/tokenizer.sh",2038,0,"",shellscript,selection_command +1275,3258744,"slurm/dev/franz/berlin/atari/tokenizer.sh",2039,0,"",shellscript,selection_command +1276,3258934,"slurm/dev/franz/berlin/atari/tokenizer.sh",2041,0,"",shellscript,selection_command +1277,3259261,"slurm/dev/franz/berlin/atari/tokenizer.sh",2042,0,"",shellscript,selection_command +1278,3260104,"slurm/dev/franz/berlin/atari/tokenizer.sh",2043,0,"",shellscript,selection_command +1279,3262218,"slurm/dev/franz/berlin/atari/tokenizer.sh",2043,1,"7",shellscript,content +1280,3263294,"slurm/dev/franz/berlin/atari/tokenizer.sh",2043,1,"6",shellscript,content +1281,3297924,"jasmine/train_tokenizer.py",0,0,"",python,tab +1282,3302000,"jasmine/train_tokenizer.py",721,0,"",python,selection_keyboard +1283,3305570,"slurm/dev/franz/berlin/atari/tokenizer.sh",0,0,"",shellscript,tab +1284,3309792,"slurm/dev/franz/berlin/atari/tokenizer.sh",617,0,"",shellscript,selection_keyboard +1285,3309943,"slurm/dev/franz/berlin/atari/tokenizer.sh",0,0,"",shellscript,selection_keyboard +1286,3310392,"slurm/dev/franz/berlin/atari/tokenizer.sh",20,0,"",shellscript,selection_command +1287,3310660,"slurm/dev/franz/berlin/atari/tokenizer.sh",21,0,"",shellscript,selection_command +1288,3310683,"slurm/dev/franz/berlin/atari/tokenizer.sh",39,0,"",shellscript,selection_command +1289,3310692,"slurm/dev/franz/berlin/atari/tokenizer.sh",67,0,"",shellscript,selection_command +1290,3310724,"slurm/dev/franz/berlin/atari/tokenizer.sh",88,0,"",shellscript,selection_command +1291,3310761,"slurm/dev/franz/berlin/atari/tokenizer.sh",112,0,"",shellscript,selection_command +1292,3310791,"slurm/dev/franz/berlin/atari/tokenizer.sh",138,0,"",shellscript,selection_command +1293,3310826,"slurm/dev/franz/berlin/atari/tokenizer.sh",232,0,"",shellscript,selection_command +1294,3310858,"slurm/dev/franz/berlin/atari/tokenizer.sh",325,0,"",shellscript,selection_command +1295,3310892,"slurm/dev/franz/berlin/atari/tokenizer.sh",370,0,"",shellscript,selection_command +1296,3310946,"slurm/dev/franz/berlin/atari/tokenizer.sh",388,0,"",shellscript,selection_command +1297,3310960,"slurm/dev/franz/berlin/atari/tokenizer.sh",439,0,"",shellscript,selection_command +1298,3310997,"slurm/dev/franz/berlin/atari/tokenizer.sh",440,0,"",shellscript,selection_command +1299,3311027,"slurm/dev/franz/berlin/atari/tokenizer.sh",492,0,"",shellscript,selection_command +1300,3311062,"slurm/dev/franz/berlin/atari/tokenizer.sh",508,0,"",shellscript,selection_command +1301,3311095,"slurm/dev/franz/berlin/atari/tokenizer.sh",601,0,"",shellscript,selection_command +1302,3311126,"slurm/dev/franz/berlin/atari/tokenizer.sh",648,0,"",shellscript,selection_command +1303,3311163,"slurm/dev/franz/berlin/atari/tokenizer.sh",698,0,"",shellscript,selection_command +1304,3311195,"slurm/dev/franz/berlin/atari/tokenizer.sh",733,0,"",shellscript,selection_command +1305,3311226,"slurm/dev/franz/berlin/atari/tokenizer.sh",744,0,"",shellscript,selection_command +1306,3311260,"slurm/dev/franz/berlin/atari/tokenizer.sh",746,0,"",shellscript,selection_command +1307,3311292,"slurm/dev/franz/berlin/atari/tokenizer.sh",747,0,"",shellscript,selection_command +1308,3311330,"slurm/dev/franz/berlin/atari/tokenizer.sh",772,0,"",shellscript,selection_command +1309,3311360,"slurm/dev/franz/berlin/atari/tokenizer.sh",773,0,"",shellscript,selection_command +1310,3311397,"slurm/dev/franz/berlin/atari/tokenizer.sh",818,0,"",shellscript,selection_command +1311,3311426,"slurm/dev/franz/berlin/atari/tokenizer.sh",911,0,"",shellscript,selection_command +1312,3311464,"slurm/dev/franz/berlin/atari/tokenizer.sh",912,0,"",shellscript,selection_command +1313,3311493,"slurm/dev/franz/berlin/atari/tokenizer.sh",946,0,"",shellscript,selection_command +1314,3311530,"slurm/dev/franz/berlin/atari/tokenizer.sh",988,0,"",shellscript,selection_command +1315,3311559,"slurm/dev/franz/berlin/atari/tokenizer.sh",993,0,"",shellscript,selection_command +1316,3311596,"slurm/dev/franz/berlin/atari/tokenizer.sh",1032,0,"",shellscript,selection_command +1317,3311625,"slurm/dev/franz/berlin/atari/tokenizer.sh",1035,0,"",shellscript,selection_command +1318,3311760,"slurm/dev/franz/berlin/atari/tokenizer.sh",1032,0,"",shellscript,selection_command +1319,3312007,"slurm/dev/franz/berlin/atari/tokenizer.sh",993,0,"",shellscript,selection_command +1320,3312042,"slurm/dev/franz/berlin/atari/tokenizer.sh",988,0,"",shellscript,selection_command +1321,3312069,"slurm/dev/franz/berlin/atari/tokenizer.sh",946,0,"",shellscript,selection_command +1322,3312114,"slurm/dev/franz/berlin/atari/tokenizer.sh",988,0,"",shellscript,selection_command +1323,3312374,"slurm/dev/franz/berlin/atari/tokenizer.sh",993,0,"",shellscript,selection_command +1324,3312401,"slurm/dev/franz/berlin/atari/tokenizer.sh",1032,0,"",shellscript,selection_command +1325,3312436,"slurm/dev/franz/berlin/atari/tokenizer.sh",1035,0,"",shellscript,selection_command +1326,3312466,"slurm/dev/franz/berlin/atari/tokenizer.sh",1036,0,"",shellscript,selection_command +1327,3312503,"slurm/dev/franz/berlin/atari/tokenizer.sh",1060,0,"",shellscript,selection_command +1328,3312535,"slurm/dev/franz/berlin/atari/tokenizer.sh",1067,0,"",shellscript,selection_command +1329,3312569,"slurm/dev/franz/berlin/atari/tokenizer.sh",1068,0,"",shellscript,selection_command +1330,3312606,"slurm/dev/franz/berlin/atari/tokenizer.sh",1094,0,"",shellscript,selection_command +1331,3312634,"slurm/dev/franz/berlin/atari/tokenizer.sh",1095,0,"",shellscript,selection_command +1332,3312673,"slurm/dev/franz/berlin/atari/tokenizer.sh",1120,0,"",shellscript,selection_command +1333,3312708,"slurm/dev/franz/berlin/atari/tokenizer.sh",1147,0,"",shellscript,selection_command +1334,3312734,"slurm/dev/franz/berlin/atari/tokenizer.sh",1148,0,"",shellscript,selection_command +1335,3312772,"slurm/dev/franz/berlin/atari/tokenizer.sh",1215,0,"",shellscript,selection_command +1336,3312798,"slurm/dev/franz/berlin/atari/tokenizer.sh",1247,0,"",shellscript,selection_command +1337,3312836,"slurm/dev/franz/berlin/atari/tokenizer.sh",1334,0,"",shellscript,selection_command +1338,3312871,"slurm/dev/franz/berlin/atari/tokenizer.sh",1345,0,"",shellscript,selection_command +1339,3312900,"slurm/dev/franz/berlin/atari/tokenizer.sh",1348,0,"",shellscript,selection_command +1340,3312937,"slurm/dev/franz/berlin/atari/tokenizer.sh",1349,0,"",shellscript,selection_command +1341,3312968,"slurm/dev/franz/berlin/atari/tokenizer.sh",1396,0,"",shellscript,selection_command +1342,3312999,"slurm/dev/franz/berlin/atari/tokenizer.sh",1397,0,"",shellscript,selection_command +1343,3313033,"slurm/dev/franz/berlin/atari/tokenizer.sh",1480,0,"",shellscript,selection_command +1344,3313066,"slurm/dev/franz/berlin/atari/tokenizer.sh",1605,0,"",shellscript,selection_command +1345,3313102,"slurm/dev/franz/berlin/atari/tokenizer.sh",1630,0,"",shellscript,selection_command +1346,3313137,"slurm/dev/franz/berlin/atari/tokenizer.sh",1631,0,"",shellscript,selection_command +1347,3313167,"slurm/dev/franz/berlin/atari/tokenizer.sh",1648,0,"",shellscript,selection_command +1348,3313239,"slurm/dev/franz/berlin/atari/tokenizer.sh",1631,0,"",shellscript,selection_command +1349,3313491,"slurm/dev/franz/berlin/atari/tokenizer.sh",1630,0,"",shellscript,selection_command +1350,3313523,"slurm/dev/franz/berlin/atari/tokenizer.sh",1605,0,"",shellscript,selection_command +1351,3313556,"slurm/dev/franz/berlin/atari/tokenizer.sh",1480,0,"",shellscript,selection_command +1352,3313593,"slurm/dev/franz/berlin/atari/tokenizer.sh",1397,0,"",shellscript,selection_command +1353,3313625,"slurm/dev/franz/berlin/atari/tokenizer.sh",1396,0,"",shellscript,selection_command +1354,3313791,"slurm/dev/franz/berlin/atari/tokenizer.sh",1349,0,"",shellscript,selection_command +1355,3314062,"slurm/dev/franz/berlin/atari/tokenizer.sh",1348,0,"",shellscript,selection_command +1356,3314296,"slurm/dev/franz/berlin/atari/tokenizer.sh",1349,0,"",shellscript,selection_command +1357,3315222,"slurm/dev/franz/berlin/atari/tokenizer.sh",1394,0,"6",shellscript,content +1358,3315222,"slurm/dev/franz/berlin/atari/tokenizer.sh",1393,1,"",shellscript,content +1359,3317686,"slurm/dev/franz/berlin/atari/tokenizer.sh",0,0,"",shellscript,selection_keyboard +1360,3318100,"slurm/dev/franz/berlin/atari/tokenizer.sh",20,0,"",shellscript,selection_command +1361,3318334,"slurm/dev/franz/berlin/atari/tokenizer.sh",21,0,"",shellscript,selection_command +1362,3318369,"slurm/dev/franz/berlin/atari/tokenizer.sh",39,0,"",shellscript,selection_command +1363,3318414,"slurm/dev/franz/berlin/atari/tokenizer.sh",67,0,"",shellscript,selection_command +1364,3318434,"slurm/dev/franz/berlin/atari/tokenizer.sh",88,0,"",shellscript,selection_command +1365,3318469,"slurm/dev/franz/berlin/atari/tokenizer.sh",112,0,"",shellscript,selection_command +1366,3318504,"slurm/dev/franz/berlin/atari/tokenizer.sh",138,0,"",shellscript,selection_command +1367,3318625,"slurm/dev/franz/berlin/atari/tokenizer.sh",232,0,"",shellscript,selection_command +1368,3318794,"slurm/dev/franz/berlin/atari/tokenizer.sh",325,0,"",shellscript,selection_command +1369,3320797,"slurm/dev/franz/berlin/atari/tokenizer.sh",326,0,"",shellscript,selection_command +1370,3321047,"slurm/dev/franz/berlin/atari/tokenizer.sh",333,0,"",shellscript,selection_command +1371,3321069,"slurm/dev/franz/berlin/atari/tokenizer.sh",335,0,"",shellscript,selection_command +1372,3321123,"slurm/dev/franz/berlin/atari/tokenizer.sh",338,0,"",shellscript,selection_command +1373,3321142,"slurm/dev/franz/berlin/atari/tokenizer.sh",339,0,"",shellscript,selection_command +1374,3321167,"slurm/dev/franz/berlin/atari/tokenizer.sh",343,0,"",shellscript,selection_command +1375,3321360,"slurm/dev/franz/berlin/atari/tokenizer.sh",344,0,"",shellscript,selection_command +1376,3321578,"slurm/dev/franz/berlin/atari/tokenizer.sh",370,0,"",shellscript,selection_command +1377,3321766,"slurm/dev/franz/berlin/atari/tokenizer.sh",371,0,"",shellscript,selection_command +1378,3323236,"slurm/dev/franz/berlin/atari/tokenizer.sh",370,0,"",shellscript,selection_command +1379,3323443,"slurm/dev/franz/berlin/atari/tokenizer.sh",344,0,"",shellscript,selection_command +1380,3323680,"slurm/dev/franz/berlin/atari/tokenizer.sh",343,0,"",shellscript,selection_command +1381,3324219,"slurm/dev/franz/berlin/atari/tokenizer.sh",344,0,"",shellscript,selection_command +1382,3324420,"slurm/dev/franz/berlin/atari/tokenizer.sh",370,0,"",shellscript,selection_command +1383,3325318,"slurm/dev/franz/berlin/atari/tokenizer.sh",344,0,"",shellscript,selection_command +1384,3326266,"slurm/dev/franz/berlin/atari/tokenizer.sh",345,0,"",shellscript,selection_command +1385,3326516,"slurm/dev/franz/berlin/atari/tokenizer.sh",346,0,"",shellscript,selection_command +1386,3326541,"slurm/dev/franz/berlin/atari/tokenizer.sh",347,0,"",shellscript,selection_command +1387,3326573,"slurm/dev/franz/berlin/atari/tokenizer.sh",348,0,"",shellscript,selection_command +1388,3326603,"slurm/dev/franz/berlin/atari/tokenizer.sh",349,0,"",shellscript,selection_command +1389,3326637,"slurm/dev/franz/berlin/atari/tokenizer.sh",350,0,"",shellscript,selection_command +1390,3326666,"slurm/dev/franz/berlin/atari/tokenizer.sh",351,0,"",shellscript,selection_command +1391,3326700,"slurm/dev/franz/berlin/atari/tokenizer.sh",352,0,"",shellscript,selection_command +1392,3326734,"slurm/dev/franz/berlin/atari/tokenizer.sh",353,0,"",shellscript,selection_command +1393,3326768,"slurm/dev/franz/berlin/atari/tokenizer.sh",354,0,"",shellscript,selection_command +1394,3326802,"slurm/dev/franz/berlin/atari/tokenizer.sh",355,0,"",shellscript,selection_command +1395,3326843,"slurm/dev/franz/berlin/atari/tokenizer.sh",356,0,"",shellscript,selection_command +1396,3326867,"slurm/dev/franz/berlin/atari/tokenizer.sh",357,0,"",shellscript,selection_command +1397,3326901,"slurm/dev/franz/berlin/atari/tokenizer.sh",358,0,"",shellscript,selection_command +1398,3327031,"slurm/dev/franz/berlin/atari/tokenizer.sh",359,0,"",shellscript,selection_command +1399,3327233,"slurm/dev/franz/berlin/atari/tokenizer.sh",360,0,"",shellscript,selection_command +1400,3327805,"slurm/dev/franz/berlin/atari/tokenizer.sh",361,0,"",shellscript,selection_command +1401,3328117,"slurm/dev/franz/berlin/atari/tokenizer.sh",360,0,"",shellscript,selection_command +1402,3328260,"slurm/dev/franz/berlin/atari/tokenizer.sh",360,1,"a",shellscript,selection_command +1403,3328398,"slurm/dev/franz/berlin/atari/tokenizer.sh",360,2,"al",shellscript,selection_command +1404,3328558,"slurm/dev/franz/berlin/atari/tokenizer.sh",360,3,"ali",shellscript,selection_command +1405,3328733,"slurm/dev/franz/berlin/atari/tokenizer.sh",360,4,"alie",shellscript,selection_command +1406,3328996,"slurm/dev/franz/berlin/atari/tokenizer.sh",360,5,"alien",shellscript,selection_command +1407,3329324,"slurm/dev/franz/berlin/atari/tokenizer.sh",360,6,"alien_",shellscript,selection_command +1408,3329491,"slurm/dev/franz/berlin/atari/tokenizer.sh",360,6,"",shellscript,content +1409,3332546,"slurm/dev/franz/berlin/atari/spawn_tokenizers.sh",0,0,"",shellscript,tab +1410,3336818,"slurm/dev/franz/berlin/atari/spawn_tokenizers.sh",901,0,"",shellscript,selection_mouse +1411,3338792,"slurm/dev/franz/berlin/atari/spawn_tokenizers.sh",902,0,"",shellscript,selection_command +1412,3339676,"slurm/dev/franz/berlin/atari/spawn_tokenizers.sh",902,0,"_",shellscript,content +1413,3339676,"slurm/dev/franz/berlin/atari/spawn_tokenizers.sh",903,0,"",shellscript,selection_keyboard +1414,3340873,"slurm/dev/franz/berlin/atari/spawn_tokenizers.sh",903,0,"lr_3e-6",shellscript,content +1415,3341232,"slurm/dev/franz/berlin/atari/spawn_tokenizers.sh",909,0,"",shellscript,selection_command +1416,3348987,"slurm/dev/franz/berlin/atari/tokenizer.sh",0,0,"",shellscript,tab +1417,3350970,"slurm/dev/franz/berlin/atari/tokenizer.sh",380,0,"",shellscript,selection_command +1418,3351217,"slurm/dev/franz/berlin/atari/tokenizer.sh",417,0,"",shellscript,selection_command +1419,3351241,"slurm/dev/franz/berlin/atari/tokenizer.sh",433,0,"",shellscript,selection_command +1420,3351303,"slurm/dev/franz/berlin/atari/tokenizer.sh",469,0,"",shellscript,selection_command +1421,3351304,"slurm/dev/franz/berlin/atari/tokenizer.sh",500,0,"",shellscript,selection_command +1422,3351334,"slurm/dev/franz/berlin/atari/tokenizer.sh",537,0,"",shellscript,selection_command +1423,3351373,"slurm/dev/franz/berlin/atari/tokenizer.sh",630,0,"",shellscript,selection_command +1424,3351403,"slurm/dev/franz/berlin/atari/tokenizer.sh",677,0,"",shellscript,selection_command +1425,3351435,"slurm/dev/franz/berlin/atari/tokenizer.sh",725,0,"",shellscript,selection_command +1426,3351468,"slurm/dev/franz/berlin/atari/tokenizer.sh",736,0,"",shellscript,selection_command +1427,3351502,"slurm/dev/franz/berlin/atari/tokenizer.sh",738,0,"",shellscript,selection_command +1428,3351542,"slurm/dev/franz/berlin/atari/tokenizer.sh",740,0,"",shellscript,selection_command +1429,3351569,"slurm/dev/franz/berlin/atari/tokenizer.sh",764,0,"",shellscript,selection_command +1430,3351601,"slurm/dev/franz/berlin/atari/tokenizer.sh",766,0,"",shellscript,selection_command +1431,3351633,"slurm/dev/franz/berlin/atari/tokenizer.sh",802,0,"",shellscript,selection_command +1432,3351673,"slurm/dev/franz/berlin/atari/tokenizer.sh",847,0,"",shellscript,selection_command +1433,3351699,"slurm/dev/franz/berlin/atari/tokenizer.sh",905,0,"",shellscript,selection_command +1434,3351733,"slurm/dev/franz/berlin/atari/tokenizer.sh",938,0,"",shellscript,selection_command +1435,3351766,"slurm/dev/franz/berlin/atari/tokenizer.sh",975,0,"",shellscript,selection_command +1436,3351801,"slurm/dev/franz/berlin/atari/tokenizer.sh",985,0,"",shellscript,selection_command +1437,3352278,"slurm/dev/franz/berlin/atari/tokenizer.sh",1022,0,"",shellscript,selection_command +1438,3352512,"slurm/dev/franz/berlin/atari/tokenizer.sh",1027,0,"",shellscript,selection_command +1439,3352541,"slurm/dev/franz/berlin/atari/tokenizer.sh",1029,0,"",shellscript,selection_command +1440,3352574,"slurm/dev/franz/berlin/atari/tokenizer.sh",1052,0,"",shellscript,selection_command +1441,3352608,"slurm/dev/franz/berlin/atari/tokenizer.sh",1059,0,"",shellscript,selection_command +1442,3352642,"slurm/dev/franz/berlin/atari/tokenizer.sh",1061,0,"",shellscript,selection_command +1443,3355398,"slurm/dev/franz/berlin/atari/tokenizer.sh",1086,0,"",shellscript,selection_command +1444,3355639,"slurm/dev/franz/berlin/atari/tokenizer.sh",1088,0,"",shellscript,selection_command +1445,3355661,"slurm/dev/franz/berlin/atari/tokenizer.sh",1112,0,"",shellscript,selection_command +1446,3355702,"slurm/dev/franz/berlin/atari/tokenizer.sh",1139,0,"",shellscript,selection_command +1447,3355732,"slurm/dev/franz/berlin/atari/tokenizer.sh",1141,0,"",shellscript,selection_command +1448,3355766,"slurm/dev/franz/berlin/atari/tokenizer.sh",1177,0,"",shellscript,selection_command +1449,3355804,"slurm/dev/franz/berlin/atari/tokenizer.sh",1239,0,"",shellscript,selection_command +1450,3355834,"slurm/dev/franz/berlin/atari/tokenizer.sh",1276,0,"",shellscript,selection_command +1451,3355868,"slurm/dev/franz/berlin/atari/tokenizer.sh",1337,0,"",shellscript,selection_command +1452,3355900,"slurm/dev/franz/berlin/atari/tokenizer.sh",1340,0,"",shellscript,selection_command +1453,3355936,"slurm/dev/franz/berlin/atari/tokenizer.sh",1342,0,"",shellscript,selection_command +1454,3355974,"slurm/dev/franz/berlin/atari/tokenizer.sh",1378,0,"",shellscript,selection_command +1455,3355999,"slurm/dev/franz/berlin/atari/tokenizer.sh",1390,0,"",shellscript,selection_command +1456,3356034,"slurm/dev/franz/berlin/atari/tokenizer.sh",1426,0,"",shellscript,selection_command +1457,3356066,"slurm/dev/franz/berlin/atari/tokenizer.sh",1509,0,"",shellscript,selection_command +1458,3356099,"slurm/dev/franz/berlin/atari/tokenizer.sh",1622,0,"",shellscript,selection_command +1459,3356133,"slurm/dev/franz/berlin/atari/tokenizer.sh",1624,0,"",shellscript,selection_command +1460,3356165,"slurm/dev/franz/berlin/atari/tokenizer.sh",1640,0,"",shellscript,selection_command +1461,3356198,"slurm/dev/franz/berlin/atari/tokenizer.sh",1642,0,"",shellscript,selection_command +1462,3356231,"slurm/dev/franz/berlin/atari/tokenizer.sh",1678,0,"",shellscript,selection_command +1463,3356266,"slurm/dev/franz/berlin/atari/tokenizer.sh",1728,0,"",shellscript,selection_command +1464,3364774,"slurm/dev/franz/berlin/atari/tokenizer.sh",1787,0,"",shellscript,selection_command +1465,3365027,"slurm/dev/franz/berlin/atari/tokenizer.sh",1880,0,"",shellscript,selection_command +1466,3365060,"slurm/dev/franz/berlin/atari/tokenizer.sh",1883,0,"",shellscript,selection_command +1467,3365092,"slurm/dev/franz/berlin/atari/tokenizer.sh",1885,0,"",shellscript,selection_command +1468,3365122,"slurm/dev/franz/berlin/atari/tokenizer.sh",1921,0,"",shellscript,selection_command +1469,3365156,"slurm/dev/franz/berlin/atari/tokenizer.sh",1947,0,"",shellscript,selection_command +1470,3365558,"slurm/dev/franz/berlin/atari/tokenizer.sh",1971,0,"",shellscript,selection_command +1471,3365801,"slurm/dev/franz/berlin/atari/tokenizer.sh",1994,0,"",shellscript,selection_command +1472,3365833,"slurm/dev/franz/berlin/atari/tokenizer.sh",2019,0,"",shellscript,selection_command +1473,3365865,"slurm/dev/franz/berlin/atari/tokenizer.sh",2039,0,"",shellscript,selection_command +1474,3365901,"slurm/dev/franz/berlin/atari/tokenizer.sh",2065,0,"",shellscript,selection_command +1475,3365936,"slurm/dev/franz/berlin/atari/tokenizer.sh",2094,0,"",shellscript,selection_command +1476,3365969,"slurm/dev/franz/berlin/atari/tokenizer.sh",2112,0,"",shellscript,selection_command +1477,3366001,"slurm/dev/franz/berlin/atari/tokenizer.sh",2137,0,"",shellscript,selection_command +1478,3366972,"slurm/dev/franz/berlin/atari/tokenizer.sh",2112,0,"",shellscript,selection_command +1479,3367211,"slurm/dev/franz/berlin/atari/tokenizer.sh",2094,0,"",shellscript,selection_command +1480,3367257,"slurm/dev/franz/berlin/atari/tokenizer.sh",2065,0,"",shellscript,selection_command +1481,3367279,"slurm/dev/franz/berlin/atari/tokenizer.sh",2039,0,"",shellscript,selection_command +1482,3367308,"slurm/dev/franz/berlin/atari/tokenizer.sh",2019,0,"",shellscript,selection_command +1483,3367347,"slurm/dev/franz/berlin/atari/tokenizer.sh",1994,0,"",shellscript,selection_command +1484,3367376,"slurm/dev/franz/berlin/atari/tokenizer.sh",1971,0,"",shellscript,selection_command +1485,3367414,"slurm/dev/franz/berlin/atari/tokenizer.sh",1947,0,"",shellscript,selection_command +1486,3367445,"slurm/dev/franz/berlin/atari/tokenizer.sh",1921,0,"",shellscript,selection_command +1487,3367474,"slurm/dev/franz/berlin/atari/tokenizer.sh",1885,0,"",shellscript,selection_command +1488,3367508,"slurm/dev/franz/berlin/atari/tokenizer.sh",1883,0,"",shellscript,selection_command +1489,3367542,"slurm/dev/franz/berlin/atari/tokenizer.sh",1880,0,"",shellscript,selection_command +1490,3367575,"slurm/dev/franz/berlin/atari/tokenizer.sh",1787,0,"",shellscript,selection_command +1491,3367610,"slurm/dev/franz/berlin/atari/tokenizer.sh",1728,0,"",shellscript,selection_command +1492,3367645,"slurm/dev/franz/berlin/atari/tokenizer.sh",1678,0,"",shellscript,selection_command +1493,3367677,"slurm/dev/franz/berlin/atari/tokenizer.sh",1642,0,"",shellscript,selection_command +1494,3367711,"slurm/dev/franz/berlin/atari/tokenizer.sh",1640,0,"",shellscript,selection_command +1495,3367744,"slurm/dev/franz/berlin/atari/tokenizer.sh",1624,0,"",shellscript,selection_command +1496,3367778,"slurm/dev/franz/berlin/atari/tokenizer.sh",1622,0,"",shellscript,selection_command +1497,3367811,"slurm/dev/franz/berlin/atari/tokenizer.sh",1509,0,"",shellscript,selection_command +1498,3367844,"slurm/dev/franz/berlin/atari/tokenizer.sh",1426,0,"",shellscript,selection_command +1499,3367875,"slurm/dev/franz/berlin/atari/tokenizer.sh",1390,0,"",shellscript,selection_command +1500,3367908,"slurm/dev/franz/berlin/atari/tokenizer.sh",1378,0,"",shellscript,selection_command +1501,3367941,"slurm/dev/franz/berlin/atari/tokenizer.sh",1342,0,"",shellscript,selection_command +1502,3367974,"slurm/dev/franz/berlin/atari/tokenizer.sh",1340,0,"",shellscript,selection_command +1503,3368009,"slurm/dev/franz/berlin/atari/tokenizer.sh",1337,0,"",shellscript,selection_command +1504,3368047,"slurm/dev/franz/berlin/atari/tokenizer.sh",1276,0,"",shellscript,selection_command +1505,3368083,"slurm/dev/franz/berlin/atari/tokenizer.sh",1239,0,"",shellscript,selection_command +1506,3368110,"slurm/dev/franz/berlin/atari/tokenizer.sh",1177,0,"",shellscript,selection_command +1507,3368143,"slurm/dev/franz/berlin/atari/tokenizer.sh",1141,0,"",shellscript,selection_command +1508,3368177,"slurm/dev/franz/berlin/atari/tokenizer.sh",1139,0,"",shellscript,selection_command +1509,3368210,"slurm/dev/franz/berlin/atari/tokenizer.sh",1112,0,"",shellscript,selection_command +1510,3368243,"slurm/dev/franz/berlin/atari/tokenizer.sh",1088,0,"",shellscript,selection_command +1511,3368278,"slurm/dev/franz/berlin/atari/tokenizer.sh",1086,0,"",shellscript,selection_command +1512,3368310,"slurm/dev/franz/berlin/atari/tokenizer.sh",1061,0,"",shellscript,selection_command +1513,3368341,"slurm/dev/franz/berlin/atari/tokenizer.sh",1059,0,"",shellscript,selection_command +1514,3368374,"slurm/dev/franz/berlin/atari/tokenizer.sh",1052,0,"",shellscript,selection_command +1515,3368411,"slurm/dev/franz/berlin/atari/tokenizer.sh",1029,0,"",shellscript,selection_command +1516,3368442,"slurm/dev/franz/berlin/atari/tokenizer.sh",1027,0,"",shellscript,selection_command +1517,3368473,"slurm/dev/franz/berlin/atari/tokenizer.sh",1022,0,"",shellscript,selection_command +1518,3368507,"slurm/dev/franz/berlin/atari/tokenizer.sh",985,0,"",shellscript,selection_command +1519,3368541,"slurm/dev/franz/berlin/atari/tokenizer.sh",975,0,"",shellscript,selection_command +1520,3368574,"slurm/dev/franz/berlin/atari/tokenizer.sh",938,0,"",shellscript,selection_command +1521,3368607,"slurm/dev/franz/berlin/atari/tokenizer.sh",905,0,"",shellscript,selection_command +1522,3368642,"slurm/dev/franz/berlin/atari/tokenizer.sh",847,0,"",shellscript,selection_command +1523,3368674,"slurm/dev/franz/berlin/atari/tokenizer.sh",802,0,"",shellscript,selection_command +1524,3368708,"slurm/dev/franz/berlin/atari/tokenizer.sh",766,0,"",shellscript,selection_command +1525,3368742,"slurm/dev/franz/berlin/atari/tokenizer.sh",764,0,"",shellscript,selection_command +1526,3368776,"slurm/dev/franz/berlin/atari/tokenizer.sh",740,0,"",shellscript,selection_command +1527,3368808,"slurm/dev/franz/berlin/atari/tokenizer.sh",738,0,"",shellscript,selection_command +1528,3368845,"slurm/dev/franz/berlin/atari/tokenizer.sh",736,0,"",shellscript,selection_command +1529,3368878,"slurm/dev/franz/berlin/atari/tokenizer.sh",725,0,"",shellscript,selection_command +1530,3368911,"slurm/dev/franz/berlin/atari/tokenizer.sh",677,0,"",shellscript,selection_command +1531,3368944,"slurm/dev/franz/berlin/atari/tokenizer.sh",630,0,"",shellscript,selection_command +1532,3368977,"slurm/dev/franz/berlin/atari/tokenizer.sh",537,0,"",shellscript,selection_command +1533,3369010,"slurm/dev/franz/berlin/atari/tokenizer.sh",500,0,"",shellscript,selection_command +1534,3369046,"slurm/dev/franz/berlin/atari/tokenizer.sh",469,0,"",shellscript,selection_command +1535,3369078,"slurm/dev/franz/berlin/atari/tokenizer.sh",433,0,"",shellscript,selection_command +1536,3369112,"slurm/dev/franz/berlin/atari/tokenizer.sh",417,0,"",shellscript,selection_command +1537,3369143,"slurm/dev/franz/berlin/atari/tokenizer.sh",380,0,"",shellscript,selection_command +1538,3369174,"slurm/dev/franz/berlin/atari/tokenizer.sh",360,0,"",shellscript,selection_command +1539,3369208,"slurm/dev/franz/berlin/atari/tokenizer.sh",267,0,"",shellscript,selection_command +1540,3369242,"slurm/dev/franz/berlin/atari/tokenizer.sh",173,0,"",shellscript,selection_command +1541,3369274,"slurm/dev/franz/berlin/atari/tokenizer.sh",136,0,"",shellscript,selection_command +1542,3369308,"slurm/dev/franz/berlin/atari/tokenizer.sh",110,0,"",shellscript,selection_command +1543,3369347,"slurm/dev/franz/berlin/atari/tokenizer.sh",86,0,"",shellscript,selection_command +1544,3371944,"slurm/dev/franz/berlin/atari/tokenizer.sh",1642,0,"",shellscript,selection_keyboard +1545,3372472,"slurm/dev/franz/berlin/atari/tokenizer.sh",1625,0,"",shellscript,selection_command +1546,3372728,"slurm/dev/franz/berlin/atari/tokenizer.sh",1624,0,"",shellscript,selection_command +1547,3372754,"slurm/dev/franz/berlin/atari/tokenizer.sh",1599,0,"",shellscript,selection_command +1548,3372782,"slurm/dev/franz/berlin/atari/tokenizer.sh",1474,0,"",shellscript,selection_command +1549,3372816,"slurm/dev/franz/berlin/atari/tokenizer.sh",1391,0,"",shellscript,selection_command +1550,3372849,"slurm/dev/franz/berlin/atari/tokenizer.sh",1390,0,"",shellscript,selection_command +1551,3373017,"slurm/dev/franz/berlin/atari/tokenizer.sh",1343,0,"",shellscript,selection_command +1552,3373679,"slurm/dev/franz/berlin/atari/tokenizer.sh",1346,0,"",shellscript,selection_command +1553,3373927,"slurm/dev/franz/berlin/atari/tokenizer.sh",1348,0,"",shellscript,selection_command +1554,3373970,"slurm/dev/franz/berlin/atari/tokenizer.sh",1353,0,"",shellscript,selection_command +1555,3373983,"slurm/dev/franz/berlin/atari/tokenizer.sh",1356,0,"",shellscript,selection_command +1556,3374016,"slurm/dev/franz/berlin/atari/tokenizer.sh",1364,0,"",shellscript,selection_command +1557,3374562,"slurm/dev/franz/berlin/atari/tokenizer.sh",1365,0,"",shellscript,selection_command +1558,3374786,"slurm/dev/franz/berlin/atari/tokenizer.sh",1375,0,"",shellscript,selection_command +1559,3374962,"slurm/dev/franz/berlin/atari/tokenizer.sh",1379,0,"",shellscript,selection_command +1560,3375570,"slurm/dev/franz/berlin/atari/tokenizer.sh",1390,0,"",shellscript,selection_command +1561,3378850,"TERMINAL",0,0,"ter than the requested sequence length 16.Filtering out episode with length 10, which is shorter than the requested sequence length 16.Filtering out episode with length 4, which is shorter than the requested sequence length 16.Filtering out episode with length 12, which is shorter than the requested sequence length 16.Filtering out episode with length 14, which is shorter than the requested sequence length 16.Filtering out episode with length 15, which is shorter than the requested sequence length 16.Filtering out episode with length 4, which is shorter than the requested sequence length 16.Filtering out episode with length 8, which is shorter than the requested sequence length 16.Filtering out episode with length 14, which is shorter than the requested sequence length 16.Filtering out episode with length 9, which is shorter than the requested sequence length 16.Filtering out episode with length 12, which is shorter than the requested sequence length 16.Filtering out episode with length 11, which is shorter than the requested sequence length 16.Filtering out episode with length 3, which is shorter than the requested sequence length 16.Filtering out episode with length 8, which is shorter than the requested sequence length 16.Filtering out episode with length 14, which is shorter than the requested sequence length 16.Filtering out episode with length 10, which is shorter than the requested sequence length 16.Filtering out episode with length 3, which is shorter than the requested sequence length 16.Filtering out episode with length 14, which is shorter than the requested sequence length 16.Filtering out episode with length 2, which is shorter than the requested sequence length 16.Filtering out episode with length 5, which is shorter than the requested sequence length 16.Filtering out episode with length 3, which is shorter than the requested sequence length 16.Filtering out episode with length 4, which is shorter than the requested sequence length 16.Filtering out episode with length 8, which is shorter than the requested sequence length 16.Filtering out episode with length 7, which is shorter than the requested sequence length 16.Filtering out episode with length 1, which is shorter than the requested sequence length 16.Filtering out episode with length 1, which is shorter than the requested sequence length 16.Filtering out episode with length 1, which is shorter than the requested sequence length 16.Filtering out episode with length 1, which is shorter than the requested sequence length 16.Filtering out episode with length 14, which is shorter than the requested sequence length 16.Filtering out episode with length 11, which is shorter than the requested sequence length 16.Filtering out episode with length 9, which is shorter than the requested sequence length 16.Filtering out episode with length 8, which is shorter than the requested sequence length 16.Filtering out episode with length 10, which is shorter than the requested sequence length 16.Filtering out episode with length 5, which is shorter than the requested sequence length 16.Filtering out episode with length 11, which is shorter than the requested sequence length 16.Filtering out episode with length 3, which is shorter than the requested sequence length 16.Filtering out episode with length 14, which is shorter than the requested sequence length 16.Filtering out episode with length 11, which is shorter than the requested sequence length 16.Filtering out episode with length 7, which is shorter than the requested sequence length 16.Filtering out episode with length 14, which is shorter than the requested sequence length 16.Filtering out episode with length 14, which is shorter than the requested sequence length 16.Filtering out episode with length 11, which is shorter than the requested sequence length 16.Filtering out episode with length 12, which is shorter than the requested sequence length 16.Filtering out episode with length 12, which is shorter than the requested sequence length 16.Filtering out episode with length 1, which is shorter than the requested sequence length 16.Filtering out episode with length 10, which is shorter than the requested sequence length 16.Filtering out episode with length 2, which is shorter than the requested sequence length 16.Filtering out episode with length 15, which is shorter than the requested sequence length 16.Filtering out episode with length 13, which is shorter than the requested sequence length 16.Filtering out episode with length 2, which is shorter than the requested sequence length 16.Filtering out episode with length 14, which is shorter than the requested sequence length 16.Filtering out episode with length 2, which is shorter than the requested sequence length 16.Filtering out episode with length 9, which is shorter than the requested sequence length 16.Filtering out episode with length 5, which is shorter than the requested sequence length 16.Filtering out episode with length 10, which is shorter than the requested sequence length 16.Filtering out episode with length 2, which is shorter than the requested sequence length 16.Filtering out episode with length 7, which is shorter than the requested sequence length 16.Filtering out episode with length 1, which is shorter than the requested sequence length 16.Filtering out episode with length 14, which is shorter than the requested sequence length 16.Filtering out episode with length 9, which is shorter than the requested sequence length 16.Filtering out episode with length 7, which is shorter than the requested sequence length 16.Filtering out episode with length 10, which is shorter than the requested sequence length 16.Filtering out episode with length 10, which is shorter than the requested sequence length 16.Filtering out episode with length 1, which is shorter than the requested sequence length 16.Filtering out episode with length 9, which is shorter than the requested sequence length 16.Filtering out episode with length 15, which is shorter than the requested sequence length 16.Filtering out episode with length 3, which is shorter than the requested sequence length 16.Filtering out episode with length 13, which is shorter than the requested sequence length 16.Filtering out episode with length 7, which is shorter than the requested sequence length 16.Filtering out episode with length 11, which is shorter than the requested sequence length 16.Filtering out episode with length 9, which is shorter than the requested sequence length 16.Filtering out episode with length 8, which is shorter than the requested sequence length 16.Filtering out episode with length 1, which is shorter than the requested sequence length 16.Filtering out episode with length 3, which is shorter than the requested sequence length 16.^C[franz.srambical@hai-login2.haicore.berlin:~/jafar] $ ^C[franz.srambical@hai-login2.haicore.berlin:~/jafar] $ watch squeue[franz.srambical@hai-login2.haicore.berlin:~/jafar] $ watch squeue --me[franz.srambical@hai-login2.haicore.berlin:~/jafar] $ bash /home/franz.srambical/jafar/slurm/dev/franz/berlin/atari/spawn_tokenizers.sh Submitting tokenizer jobs for environments: alien amidar assault asterix bank_heist battle_zone boxing breakout chopper_command crazy_climber demon_attack pongSubmitted tokenizer_atari_alien_dev (ENV_NAME=alien) as job 30092Submitted tokenizer_atari_amidar_dev (ENV_NAME=amidar) as job 30093Submitted tokenizer_atari_assault_dev (ENV_NAME=assault) as job 30094Submitted tokenizer_atari_asterix_dev (ENV_NAME=asterix) as job 30095Submitted tokenizer_atari_bank_heist_dev (ENV_NAME=bank_heist) as job 30096Submitted tokenizer_atari_battle_zone_dev (ENV_NAME=battle_zone) as job 30097Submitted tokenizer_atari_boxing_dev (ENV_NAME=boxing) as job 30098Submitted tokenizer_atari_breakout_dev (ENV_NAME=breakout) as job 30099Submitted tokenizer_atari_chopper_command_dev (ENV_NAME=chopper_command) as job 30100Submitted tokenizer_atari_crazy_climber_dev (ENV_NAME=crazy_climber) as job 30101Submitted tokenizer_atari_demon_attack_dev (ENV_NAME=demon_attack) as job 30102Submitted tokenizer_atari_pong_dev (ENV_NAME=pong) as job 30103[franz.srambical@hai-login2.haicore.berlin:~/jafar] $ watch squeue --me",,terminal_command +1562,3402593,"TERMINAL",0,0,"bash",,terminal_focus +1563,3404650,"TERMINAL",0,0,"cd ..",,terminal_command +1564,3439514,"slurm/dev/franz/berlin/atari/tokenizer.sh",2438,0,"",shellscript,selection_keyboard +1565,3443366,"slurm/dev/franz/berlin/atari/spawn_tokenizers.sh",0,0,"",shellscript,tab +1566,3452309,"TERMINAL",0,0,"bash /home/franz.srambical/jafar/slurm/dev/franz/berlin/atari/spawn_tokenizers.sh alien amidar assault asterix bank_heist battle_zone boxing breakout chopper_command crazy_climber demon_attack pong",,terminal_command +1567,3452386,"TERMINAL",0,0,"]633;CSubmitting tokenizer jobs for environments: alien amidar assault asterix bank_heist battle_zone boxing breakout chopper_command crazy_climber demon_attack pong\r\nSubmitted tokenizer_atari_alien_dev_lr_3e-6 (ENV_NAME=alien) as job 30130\r\nSubmitted tokenizer_atari_amidar_dev_lr_3e-6 (ENV_NAME=amidar) as job 30131\r\nSubmitted tokenizer_atari_assault_dev_lr_3e-6 (ENV_NAME=assault) as job 30132\r\nSubmitted tokenizer_atari_asterix_dev_lr_3e-6 (ENV_NAME=asterix) as job 30133\r\nSubmitted tokenizer_atari_bank_heist_dev_lr_3e-6 (ENV_NAME=bank_heist) as job 30134\r\nSubmitted tokenizer_atari_battle_zone_dev_lr_3e-6 (ENV_NAME=battle_zone) as job 30135\r\nSubmitted tokenizer_atari_boxing_dev_lr_3e-6 (ENV_NAME=boxing) as job 30136\r\nSubmitted tokenizer_atari_breakout_dev_lr_3e-6 (ENV_NAME=breakout) as job 30137\r\nSubmitted tokenizer_atari_chopper_command_dev_lr_3e-6 (ENV_NAME=chopper_command) as job 30138\r\nSubmitted tokenizer_atari_crazy_climber_dev_lr_3e-6 (ENV_NAME=crazy_climber) as job 30139\r\nSubmitted tokenizer_atari_demon_attack_dev_lr_3e-6 (ENV_NAME=demon_attack) as job 30140\r\nSubmitted tokenizer_atari_pong_dev_lr_3e-6 (ENV_NAME=pong) as job 30141\r\n]0;franz.srambical@hai-login2:~/jafar",,terminal_output +1568,3595250,"TERMINAL",0,0,"bash",,terminal_focus +1569,3601739,"TERMINAL",0,0,"watch squeue",,terminal_command +1570,3601793,"TERMINAL",0,0,"]633;C[?1049h(B[?7hEvery 2.0s: squeuehai-login2.haicore.berlin: Sat Oct 4 19:21:03 2025JOBIDUSER PARTITION NODES CPUS STSUBMIT_TIMESTART_TIME TIME TIME_LIMIT NODELIST(REASON)30129 xiao.liu interacti 1 64 PD 2025-10-04T18:55:30N/A 0:00 23:59:00 (Resources)30115 xiao.liu interacti 1 64 PD 2025-10-04T18:22:16N/A 0:00 23:59:00 (Resources)30111 xiao.liu interacti 1 32 PD 2025-10-04T16:57:05N/A 0:00 23:59:00 (Resources)30110 emmanouil. interacti 1 104 PD 2025-10-04T16:31:13N/A 0:00\t 30:00 (Resources)30027 xiao.liu interacti 1 128 R 2025-10-04T05:54:07 2025-10-04T19:13:38 7:25 23:59:00 hai00330028 xiao.liu interacti 1 128 R 2025-10-04T05:55:36 2025-10-04T05:55:36 13:25:27 23:59:00 hai00630026 xiao.liu interacti 1 128 R 2025-10-04T01:48:32 2025-10-04T05:54:06 13:26:57 23:59:00 hai00830025 xiao.liu interacti 1 64 R 2025-10-04T01:47:27 2025-10-04T01:47:56 17:33:07 23:59:00 hai00430141 franz.sram standard 1 8 PD 2025-10-04T19:18:33N/A 0:00 1-00:00:00 (Resources)30140 franz.sram standard 1 8 PD 2025-10-04T19:18:33N/A 0:00 1-00:00:00 (Resources)30139 franz.sram standard 1 8 PD 2025-10-04T19:18:33N/A 0:00 1-00:00:00 (Resources)30138 franz.sram standard 1 8 PD 2025-10-04T19:18:33N/A 0:00 1-00:00:00 (Resources)30137 franz.sram standard 1 8 PD 2025-10-04T19:18:33N/A 0:00 1-00:00:00 (Resources)30136 franz.sram standard 1 8 PD 2025-10-04T19:18:33N/A 0:00 1-00:00:00 (Resources)30135 franz.sram standard 1 8 PD 2025-10-04T19:18:33N/A 0:00 1-00:00:00 (Resources)30131 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:18:34 2:29 1-00:00:00 hai00530132 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:18:34 2:29 1-00:00:00 hai00430133 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:18:34 2:29 1-00:00:00 hai00530134 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:18:34 2:29 1-00:00:00 hai00630130 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:18:33 2:30 1-00:00:00 hai00530127 franz.sram standard 1 16 R 2025-10-04T18:54:08 2025-10-04T18:58:57 22:06 1-00:00:00 hai00530126 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:55:57 25:06 1-00:00:00 hai00530125 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:44:27 36:36 1-00:00:00 hai00130124 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:43:27 37:36 1-00:00:00 hai00130123 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:42:27 38:36 1-00:00:00 hai00130122 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:38:27 42:36 1-00:00:00 hai00130117 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:35:27 45:36 1-00:00:00 hai00130118 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:35:27 45:36 1-00:00:00 hai00430119 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:35:27 45:36 1-00:00:00 hai00530120 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:35:27 45:36 1-00:00:00 hai00630121 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:35:27 45:36 1-00:00:00 hai00629972 alfred.ngu standard 1 16 R 2025-10-04T16:35:34 2025-10-04T17:57:27 1:23:36 1-00:00:00 hai00130103 franz.sram standard 1 16 R 2025-10-04T14:55:02 2025-10-04T17:52:27 1:28:36 1-00:00:00 hai00530102 franz.sram standard 1 16 R 2025-10-04T14:55:02 2025-10-04T17:48:27 1:32:36 1-00:00:00 hai00430101 franz.sram standard 1 16 R 2025-10-04T14:55:02 2025-10-04T17:47:57 1:33:06 1-00:00:00 hai00430032 alfred.ngu standard 1 16 R 2025-10-04T11:53:39 2025-10-04T11:53:56 7:27:07 1-00:00:00 hai00130031 alfred.ngu standard 1 16 R 2025-10-04T11:50:30 2025-10-04T11:50:56 7:30:07 1-00:00:00 hai00529993 nishant.ku standard 2 384 R 2025-10-03T18:29:18 2025-10-04T01:48:31 17:32:32 1-00:00:00 hai[002,007]30008 alfred.ngu standard 1 16 R 2025-10-03T22:58:50 2025-10-03T22:58:56 20:22:07 1-00:00:00 hai00630009 alfred.ngu standard 1 16 R 2025-10-03T22:58:54 2025-10-03T22:58:56 20:22:07 1-00:00:00 hai001",,terminal_output +1571,3603801,"TERMINAL",0,0,"5799931313131288888888888888899499",,terminal_output +1572,3605811,"TERMINAL",0,0,"79317:011133334101040404040404040404040404010111161111",,terminal_output +1573,3607822,"TERMINAL",0,0,"9313335555622222222222222233833",,terminal_output +1574,3609828,"TERMINAL",0,0,"11355577778444444444444444554055",,terminal_output +1575,3611839,"TERMINAL",0,0,"3577799994066666666666666677277",,terminal_output +1576,3613849,"TERMINAL",0,0,"5799941414141288888888888888899499",,terminal_output +1577,3615854,"TERMINAL",0,0,"7941112133334202050505050505050505050505020212162121",,terminal_output +1578,3617863,"TERMINAL",0,0,"9413335555622222222222222233833",,terminal_output +1579,3619927,"TERMINAL",0,0,"21355577778444444444444444555055",,terminal_output +1580,3621880,"TERMINAL",0,0,"3577799995066666666666666677277",,terminal_output +1581,3623895,"TERMINAL",0,0,"5799951515151288888888888888899499",,terminal_output +1582,3625900,"TERMINAL",0,0,"795121313333430307:008:009:003:006:006:006:006:006:004:009:003:0030313163131",,terminal_output +1583,3627907,"TERMINAL",0,0,"9513335555622222222222222233833",,terminal_output +1584,3629917,"TERMINAL",0,0,"31355577778444444444444444553:0055",,terminal_output +1585,3631986,"TERMINAL",0,0,"3577799993:0066666666666666677277",,terminal_output +1586,3633933,"TERMINAL",0,0,"579993:013:013:013:01288888888888888899499",,terminal_output +1587,3635938,"TERMINAL",0,0,"796:01314133334404010101010101010101010101040414164141",,terminal_output +1588,3637958,"TERMINAL",0,0,"98:013335555622222222222222233833",,terminal_output +1589,3639973,"TERMINAL",0,0,"41355577778444444444444444551055",,terminal_output +1590,3641976,"TERMINAL",0,0,"3577799991066666666666666677277",,terminal_output +1591,3644067,"TERMINAL",0,0,"5799911111111288888888888888899499",,terminal_output +1592,3645982,"TERMINAL",0,0,"7911415133334505020202020202020202020202050515165151",,terminal_output +1593,3648033,"TERMINAL",0,0,"9113335555622222222222222233833",,terminal_output +1594,3650008,"TERMINAL",0,0,"51355577778444444444444444552055",,terminal_output +1595,3652009,"TERMINAL",0,0,"3577799992066666666666666677277",,terminal_output +1596,3654016,"TERMINAL",0,0,"5799921212121288888888888888899499",,terminal_output +1597,3656050,"TERMINAL",0,0,"7921514:01333343:006:003030303030303030303030304:008:011:0163:013:01",,terminal_output +1598,3658035,"TERMINAL",0,0,"9213335555622222222222222233833",,terminal_output +1599,3660056,"TERMINAL",0,0,"2:01355577778444444444444444553055",,terminal_output +1600,3662049,"TERMINAL",0,0,"3577799993066666666666666677277",,terminal_output +1601,3664062,"TERMINAL",0,0,"5799931313131288888888888888899499",,terminal_output +1602,3666088,"TERMINAL",0,0,"79318:011133334101040404040404040404040404010111161111",,terminal_output +1603,3668076,"TERMINAL",0,0,"9313335555622222222222222233833",,terminal_output +1604,3670087,"TERMINAL",0,0,"11355577778444444444444444554055",,terminal_output +1605,3672093,"TERMINAL",0,0,"3577799994066666666666666677277",,terminal_output +1606,3674109,"TERMINAL",0,0,"5799941414141288888888888888899499",,terminal_output +1607,3676126,"TERMINAL",0,0,"7941112133334202050505050505050505050505020212162121",,terminal_output +1608,3678123,"TERMINAL",0,0,"9413335555622222222222222233833",,terminal_output +1609,3680133,"TERMINAL",0,0,"21355577778444444444444444555055",,terminal_output +1610,3682144,"TERMINAL",0,0,"3577799995066666666666666677277",,terminal_output +1611,3684152,"TERMINAL",0,0,"5799951515151288888888888888899499",,terminal_output +1612,3686161,"TERMINAL",0,0,"795121313333430308:009:0040:004:007:007:007:007:007:005:0030:004:0030313163131",,terminal_output +1613,3688169,"TERMINAL",0,0,"9513335555622222222222222233833",,terminal_output +1614,3690177,"TERMINAL",0,0,"31355577778444444444444444554:0055",,terminal_output +1615,3692186,"TERMINAL",0,0,"3577799994:0066666666666666677277",,terminal_output +1616,3694196,"TERMINAL",0,0,"579994:014:014:014:01288888888888888899499",,terminal_output +1617,3696205,"TERMINAL",0,0,"797:01314133334404010101010101010101010101040414164141",,terminal_output +1618,3698215,"TERMINAL",0,0,"99:013335555622222222222222233833",,terminal_output +1619,3700225,"TERMINAL",0,0,"41355577778444444444444444551055",,terminal_output +1620,3702232,"TERMINAL",0,0,"3577799991066666666666666677277",,terminal_output +1621,3704241,"TERMINAL",0,0,"5799911111111288888888888888899499",,terminal_output +1622,3706251,"TERMINAL",0,0,"7911415133334505020202020202020202020202050515165151",,terminal_output +1623,3708257,"TERMINAL",0,0,"9113335555622222222222222233833",,terminal_output +1624,3710267,"TERMINAL",0,0,"51355577778444444444444444552055",,terminal_output +1625,3712276,"TERMINAL",0,0,"3577799992066666666666666677277",,terminal_output +1626,3714284,"TERMINAL",0,0,"5799921212121288888888888888899499",,terminal_output +1627,3716301,"TERMINAL",0,0,"7921515:01333344:007:003030303030303030303030305:009:012:0164:014:01",,terminal_output +1628,3718303,"TERMINAL",0,0,"9213335555622222222222222233833",,terminal_output +1629,3720313,"TERMINAL",0,0,"3:01355577778444444444444444553055",,terminal_output +1630,3722320,"TERMINAL",0,0,"3577799993066666666666666677277",,terminal_output +1631,3724325,"TERMINAL",0,0,"5799931313131288888888888888899499",,terminal_output +1632,3726338,"TERMINAL",0,0,"79319:011133334101040404040404040404040404010111161111",,terminal_output +1633,3728345,"TERMINAL",0,0,"9313335555622222222222222233833",,terminal_output +1634,3730354,"TERMINAL",0,0,"11355577778444444444444444554055",,terminal_output +1635,3732364,"TERMINAL",0,0,"\r3577716 R2025-10-04T19:23:13hai0059999406666666666662484:46417:5\t5:16032 alfred.ngu1:53:391:53:56 7:29:17110:30032:17529993 nishant.k2 3843T18:29:1801:48:31 17:34:42[002,007]30008 alfred.ng1 1622:58:503T22:58:56 20:24:170069471",,terminal_output +1636,3734372,"TERMINAL",0,0,"5799924141414128888888888888899499",,terminal_output +1637,3736382,"TERMINAL",0,0,"794111214333342020505050505050505050505020212162121",,terminal_output +1638,3738389,"TERMINAL",0,0,"9413336555562222222222222233833",,terminal_output +1639,3740408,"TERMINAL",0,0,"21355587777844444444444444555055",,terminal_output +1640,3742406,"TERMINAL",0,0,"35777109999506666666666666677277",,terminal_output +1641,3744486,"TERMINAL",0,0,"5799925151515128888888888888899499",,terminal_output +1642,3746468,"TERMINAL",0,0,"7951213143333430309:0040:001:005:008:008:008:008:008:006:005:0030313163131",,terminal_output +1643,3748433,"TERMINAL",0,0,"9513336555562222222222222233833",,terminal_output +1644,3750441,"TERMINAL",0,0,"31355587777844444444444444555:0055",,terminal_output +1645,3752454,"TERMINAL",0,0,"357772099995:006666666666666677277",,terminal_output +1646,3754462,"TERMINAL",0,0,"5799925:015:015:015:0128888888888888899499",,terminal_output +1647,3756502,"TERMINAL",0,0,"798:0131414333344040101010101010101010101040414164141",,terminal_output +1648,3758476,"TERMINAL",0,0,"910:013336555562222222222222233833",,terminal_output +1649,3760486,"TERMINAL",0,0,"41355587777844444444444444551055",,terminal_output +1650,3762493,"TERMINAL",0,0,"35777309999106666666666666677277",,terminal_output +1651,3764514,"TERMINAL",0,0,"5799921111111128888888888888899499",,terminal_output +1652,3766512,"TERMINAL",0,0,"791141514333345050202020202020202020202050515165151",,terminal_output +1653,3768523,"TERMINAL",0,0,"9113336555562222222222222233833",,terminal_output +1654,3770530,"TERMINAL",0,0,"51355587777844444444444444552055",,terminal_output +1655,3772537,"TERMINAL",0,0,"35777409999206666666666666677277",,terminal_output +1656,3774554,"TERMINAL",0,0,"5799922121212128888888888888899499",,terminal_output +1657,3776558,"TERMINAL",0,0,"7921516:014333345:008:0030303030303030303030306:0030:013:0165:015:01",,terminal_output +1658,3778564,"TERMINAL",0,0,"9213336555562222222222222233833",,terminal_output +1659,3780576,"TERMINAL",0,0,"4:01355587777844444444444444553055",,terminal_output +1660,3782590,"TERMINAL",0,0,"35777509999306666666666666677277",,terminal_output +1661,3784592,"TERMINAL",0,0,"583030:0010332323232399999999999999101051010",,terminal_output +1662,3786611,"TERMINAL",0,0,"830222544445111141414141414141414141411122722",,terminal_output +1663,3788608,"TERMINAL",0,0,"1024447666673333333333333344944",,terminal_output +1664,3790619,"TERMINAL",0,0,"2466698888955555555555555664166",,terminal_output +1665,3792628,"TERMINAL",0,0,"468881:0140404040417777777777777788388",,terminal_output +1666,3794699,"TERMINAL",0,0,"6840102032222399999999999999202052020",,terminal_output +1667,3796642,"TERMINAL",0,0,"840222544445212151515151515151515151512122722",,terminal_output +1668,3798652,"TERMINAL",0,0,"2024447666673333333333333344944",,terminal_output +1669,3800660,"TERMINAL",0,0,"2466698888955555555555555665166",,terminal_output +1670,3802680,"TERMINAL",0,0,"468881150505050517777777777777788388",,terminal_output +1671,3804678,"TERMINAL",0,0,"6850203032222399999999999999303053030",,terminal_output +1672,3806784,"TERMINAL",0,0,"850222544445313140:011:012:016:019:019:019:019:019:017:016:013122722",,terminal_output +1673,3808697,"TERMINAL",0,0,"3024447666673333333333333344944",,terminal_output +1674,3810706,"TERMINAL",0,0,"2466698888955555555555555666:0166",,terminal_output +1675,3812758,"TERMINAL",0,0,"46888216:006:006:006:006:017777777777777788388",,terminal_output +1676,3814723,"TERMINAL",0,0,"689:00304032222399999999999999404054040",,terminal_output +1677,3816734,"TERMINAL",0,0,"81:00222544445414111111111111111111111114122722",,terminal_output +1678,3818744,"TERMINAL",0,0,"4024447666673333333333333344944",,terminal_output +1679,3820754,"TERMINAL",0,0,"2466698888955555555555555661166",,terminal_output +1680,3822770,"TERMINAL",0,0,"468883110101010117777777777777788388",,terminal_output +1681,3824787,"TERMINAL",0,0,"6810405032222399999999999999505055050",,terminal_output +1682,3826857,"TERMINAL",0,0,"810222544445515121212121212121212121215122722",,terminal_output +1683,3828788,"TERMINAL",0,0,"5024447666673333333333333344944",,terminal_output +1684,3830798,"TERMINAL",0,0,"2466698888955555555555555662166",,terminal_output +1685,3832820,"TERMINAL",0,0,"468884120202020217777777777777788388",,terminal_output +1686,3834837,"TERMINAL",0,0,"6820507:00322223999999999999991:004:0056:006:00",,terminal_output +1687,3836826,"TERMINAL",0,0,"8202225444456:019:0131313131313131313131317:0122722",,terminal_output +1688,3838834,"TERMINAL",0,0,"5:0024447666673333333333333344944",,terminal_output +1689,3840841,"TERMINAL",0,0,"2466698888955555555555555663166",,terminal_output +1690,3842858,"TERMINAL",0,0,"468885130303030317777777777777788388",,terminal_output +1691,3844874,"TERMINAL",0,0,"68301:001032222399999999999999101051010",,terminal_output +1692,3846930,"TERMINAL",0,0,"830222544445111141414141414141414141411122722",,terminal_output +1693,3848946,"TERMINAL",0,0,"1024447666673333333333333344944",,terminal_output +1694,3850889,"TERMINAL",0,0,"2466698888955555555555555664166",,terminal_output +1695,3852896,"TERMINAL",0,0,"468882:0140404040417777777777777788388",,terminal_output +1696,3854944,"TERMINAL",0,0,"6840102032222399999999999999202052020",,terminal_output +1697,3856961,"TERMINAL",0,0,"840222544445212151515151515151515151512122722",,terminal_output +1698,3858922,"TERMINAL",0,0,"2024447666673333333333333344944",,terminal_output +1699,3860931,"TERMINAL",0,0,"2466698888955555555555555665166",,terminal_output +1700,3862950,"TERMINAL",0,0,"468881150505050517777777777777788388",,terminal_output +1701,3864968,"TERMINAL",0,0,"6850203032222399999999999999303053030",,terminal_output +1702,3866960,"TERMINAL",0,0,"85022254444531311:012:013:017:0150:0150:0150:0150:0150:018:017:013122722",,terminal_output +1703,3868967,"TERMINAL",0,0,"3024447666673333333333333344944",,terminal_output +1704,3870974,"TERMINAL",0,0,"2466698888955555555555555667:0166",,terminal_output +1705,3873001,"TERMINAL",0,0,"46888217:007:007:007:007:017777777777777788388",,terminal_output +1706,3875016,"TERMINAL",0,0,"6830:00304032222399999999999999404054040",,terminal_output +1707,3877031,"TERMINAL",0,0,"82:00222544445414111111111111111111111114122722",,terminal_output +1708,3879048,"TERMINAL",0,0,"4024447666673333333333333344944",,terminal_output +1709,3881038,"TERMINAL",0,0,"2466698888955555555555555661166",,terminal_output +1710,3883044,"TERMINAL",0,0,"468883110101010117777777777777788388",,terminal_output +1711,3885061,"TERMINAL",0,0,"6810405032222399999999999999505055050",,terminal_output +1712,3887061,"TERMINAL",0,0,"810222544445515121212121212121212121215122722",,terminal_output +1713,3889081,"TERMINAL",0,0,"5024447666673333333333333344944",,terminal_output +1714,3891085,"TERMINAL",0,0,"2466698888955555555555555662166",,terminal_output +1715,3893137,"TERMINAL",0,0,"468884120202020217777777777777788388",,terminal_output +1716,3893715,"slurm/dev/franz/berlin/atari/tokenizer.sh",0,0,"",shellscript,tab +1717,3894502,"slurm/dev/franz/berlin/atari/tokenizer.sh",2422,0,"",shellscript,selection_command +1718,3894745,"slurm/dev/franz/berlin/atari/tokenizer.sh",2421,0,"",shellscript,selection_command +1719,3894777,"slurm/dev/franz/berlin/atari/tokenizer.sh",2408,0,"",shellscript,selection_command +1720,3894808,"slurm/dev/franz/berlin/atari/tokenizer.sh",2407,0,"",shellscript,selection_command +1721,3894843,"slurm/dev/franz/berlin/atari/tokenizer.sh",2359,0,"",shellscript,selection_command +1722,3894873,"slurm/dev/franz/berlin/atari/tokenizer.sh",2313,0,"",shellscript,selection_command +1723,3894909,"slurm/dev/franz/berlin/atari/tokenizer.sh",2291,0,"",shellscript,selection_command +1724,3894941,"slurm/dev/franz/berlin/atari/tokenizer.sh",2265,0,"",shellscript,selection_command +1725,3894974,"slurm/dev/franz/berlin/atari/tokenizer.sh",2246,0,"",shellscript,selection_command +1726,3895007,"slurm/dev/franz/berlin/atari/tokenizer.sh",2203,0,"",shellscript,selection_command +1727,3895040,"slurm/dev/franz/berlin/atari/tokenizer.sh",2170,0,"",shellscript,selection_command +1728,3895073,"slurm/dev/franz/berlin/atari/tokenizer.sh",2139,0,"",shellscript,selection_command +1729,3895101,"TERMINAL",0,0,"6820508:00322223999999999999992:005:0057:007:00",,terminal_output +1730,3897108,"TERMINAL",0,0,"8202225444457:0130:0131313131313131313131318:0122722",,terminal_output +1731,3897436,"slurm/dev/franz/berlin/atari/tokenizer.sh",2114,0,"",shellscript,selection_command +1732,3897570,"slurm/dev/franz/berlin/atari/tokenizer.sh",2096,0,"",shellscript,selection_command +1733,3897968,"slurm/dev/franz/berlin/atari/tokenizer.sh",2113,0,"\n ",shellscript,content +1734,3898412,"slurm/dev/franz/berlin/atari/tokenizer.sh",2118,0,"-",shellscript,content +1735,3898412,"slurm/dev/franz/berlin/atari/tokenizer.sh",2119,0,"",shellscript,selection_keyboard +1736,3898524,"slurm/dev/franz/berlin/atari/tokenizer.sh",2119,0,"-",shellscript,content +1737,3898524,"slurm/dev/franz/berlin/atari/tokenizer.sh",2120,0,"",shellscript,selection_keyboard +1738,3899121,"TERMINAL",0,0,"6:0024447666673333333333333344944",,terminal_output +1739,3901124,"TERMINAL",0,0,"2466698888955555555555555663166",,terminal_output +1740,3901965,"jasmine/train_tokenizer.py",0,0,"",python,tab +1741,3902697,"jasmine/train_tokenizer.py",2033,0,"",python,selection_keyboard +1742,3903141,"TERMINAL",0,0,"468885130303030317777777777777788388",,terminal_output +1743,3905159,"TERMINAL",0,0,"68302:001032222399999999999999101051010",,terminal_output +1744,3906095,"jasmine/train_tokenizer.py",2029,0,"",python,selection_command +1745,3906704,"jasmine/train_tokenizer.py",2029,1,"l",python,selection_command +1746,3906876,"jasmine/train_tokenizer.py",2029,26,"log_checkpoint_keep_period",python,selection_command +1747,3907161,"TERMINAL",0,0,"830222544445111141414141414141414141411122722",,terminal_output +1748,3909174,"TERMINAL",0,0,"1024447666673333333333333344944",,terminal_output +1749,3911178,"TERMINAL",0,0,"2466698888955555555555555664166",,terminal_output +1750,3913188,"TERMINAL",0,0,"468883:0140404040417777777777777788388",,terminal_output +1751,3915203,"TERMINAL",0,0,"6840102032222399999999999999202052020",,terminal_output +1752,3917273,"TERMINAL",0,0,"840222544445212151515151515151515151512122722",,terminal_output +1753,3919209,"TERMINAL",0,0,"2024447666673333333333333344944",,terminal_output +1754,3920865,"jasmine/train_tokenizer.py",2054,0,"",python,selection_command +1755,3921221,"TERMINAL",0,0,"2466698888955555555555555665166",,terminal_output +1756,3921671,"slurm/dev/franz/berlin/atari/tokenizer.sh",0,0,"",shellscript,tab +1757,3923225,"TERMINAL",0,0,"468881150505050517777777777777788388",,terminal_output +1758,3924008,"slurm/dev/franz/berlin/atari/tokenizer.sh",2119,0,"",shellscript,selection_command +1759,3924623,"slurm/dev/franz/berlin/atari/tokenizer.sh",2114,7,"",shellscript,content +1760,3924646,"slurm/dev/franz/berlin/atari/tokenizer.sh",2118,0,"",shellscript,selection_command +1761,3925235,"TERMINAL",0,0,"6850203032222399999999999999303053030",,terminal_output +1762,3927245,"TERMINAL",0,0,"85022254444531312:013:014:018:011:011:011:011:011:019:018:013122722",,terminal_output +1763,3929261,"TERMINAL",0,0,"3024447666673333333333333344944",,terminal_output +1764,3931262,"TERMINAL",0,0,"2466698888955555555555555668:0166",,terminal_output +1765,3933270,"TERMINAL",0,0,"46888218:008:008:008:008:017777777777777788388",,terminal_output +1766,3935297,"TERMINAL",0,0,"681:00304032222399999999999999404054040",,terminal_output +1767,3937350,"TERMINAL",0,0,"83:00222544445414111111111111111111111114122722",,terminal_output +1768,3939393,"TERMINAL",0,0,"4024447666673333333333333344944",,terminal_output +1769,3941316,"TERMINAL",0,0,"2466698888955555555555555661166",,terminal_output +1770,3943319,"TERMINAL",0,0,"468883110101010117777777777777788388",,terminal_output +1771,3945329,"TERMINAL",0,0,"6810405032222399999999999999505055050",,terminal_output +1772,3947377,"TERMINAL",0,0,"\r810222544445515121212121212121212121215122722",,terminal_output +1773,3949348,"TERMINAL",0,0,"5024447666673333333333333344944",,terminal_output +1774,3951360,"TERMINAL",0,0,"2466698888955555555555555662166",,terminal_output +1775,3953366,"TERMINAL",0,0,"468884120202020217777777777777788388",,terminal_output +1776,3955409,"TERMINAL",0,0,"6820509:00322223999999999999993:006:0058:008:00",,terminal_output +1777,3957383,"TERMINAL",0,0,"8202225444458:011:0131313131313131313131319:0122722",,terminal_output +1778,3959465,"TERMINAL",0,0,"7:0024447666673333333333333344944",,terminal_output +1779,3961401,"TERMINAL",0,0,"2466698888955555555555555663166",,terminal_output +1780,3963407,"TERMINAL",0,0,"468885130303030317777777777777788388",,terminal_output +1781,3965420,"TERMINAL",0,0,"68303:001032222399999999999999101051010",,terminal_output +1782,3967447,"TERMINAL",0,0,"830222544445111141414141414141414141411122722",,terminal_output +1783,3969438,"TERMINAL",0,0,"1024447666673333333333333344944",,terminal_output +1784,3971447,"TERMINAL",0,0,"2466698888955555555555555664166",,terminal_output +1785,3973458,"TERMINAL",0,0,"468884:0140404040417777777777777788388",,terminal_output +1786,3975477,"TERMINAL",0,0,"6840102032222399999999999999202052020",,terminal_output +1787,3977472,"TERMINAL",0,0,"840222544445212151515151515151515151512122722",,terminal_output +1788,3979487,"TERMINAL",0,0,"2024447666673333333333333344944",,terminal_output +1789,3981494,"TERMINAL",0,0,"2466698888955555555555555665166",,terminal_output +1790,3983503,"TERMINAL",0,0,"468881150505050517777777777777788388",,terminal_output +1791,3985517,"TERMINAL",0,0,"6850203032222399999999999999303053030",,terminal_output +1792,3987520,"TERMINAL",0,0,"85022254444531313:014:015:019:012:012:012:012:012:0130:019:013122722",,terminal_output +1793,3989529,"TERMINAL",0,0,"3024447666673333333333333344944",,terminal_output +1794,3991540,"TERMINAL",0,0,"2466698888955555555555555669:0166",,terminal_output +1795,3993548,"TERMINAL",0,0,"46888219:009:009:009:009:017777777777777788388",,terminal_output +1796,3995555,"TERMINAL",0,0,"682:00304032222399999999999999404054040",,terminal_output +1797,3997569,"TERMINAL",0,0,"84:00222544445414111111111111111111111114122722",,terminal_output +1798,3999578,"TERMINAL",0,0,"4024447666673333333333333344944",,terminal_output +1799,4001589,"TERMINAL",0,0,"257773099991066666666666666771277",,terminal_output +1800,4003593,"TERMINAL",0,0,"5799921111111128888888888888899499",,terminal_output +1801,4005602,"TERMINAL",0,0,"791141514333345050202020202020202020202050515165151",,terminal_output +1802,4007612,"TERMINAL",0,0,"9113336555562222222222222233833",,terminal_output +1803,4009620,"TERMINAL",0,0,"51355587777844444444444444552055",,terminal_output +1804,4011628,"TERMINAL",0,0,"35777409999206666666666666677277",,terminal_output +1805,4013636,"TERMINAL",0,0,"5799922121212128888888888888899499",,terminal_output +1806,4015642,"TERMINAL",0,0,"79215140:014333349:002:00303030303030303030303040:004:017:0169:019:01",,terminal_output +1807,4017654,"TERMINAL",0,0,"9213336555562222222222222233833",,terminal_output +1808,4019661,"TERMINAL",0,0,"8:01355587777844444444444444553055",,terminal_output +1809,4021674,"TERMINAL",0,0,"35777509999306666666666666677277",,terminal_output +1810,4023681,"TERMINAL",0,0,"5799923131313128888888888888899499",,terminal_output +1811,4025694,"TERMINAL",0,0,"79314:01114333341010404040404040404040404010111161111",,terminal_output +1812,4027696,"TERMINAL",0,0,"9313336555562222222222222233833",,terminal_output +1813,4029708,"TERMINAL",0,0,"11355587777844444444444444554055",,terminal_output +1814,4031720,"TERMINAL",0,0,"357775:009999406666666666666677277",,terminal_output +1815,4033726,"TERMINAL",0,0,"5799924141414128888888888888899499",,terminal_output +1816,4035733,"TERMINAL",0,0,"794111214333342020505050505050505050505020212162121",,terminal_output +1817,4037742,"TERMINAL",0,0,"9413336555562222222222222233833",,terminal_output +1818,4039751,"TERMINAL",0,0,"21355587777844444444444444555055",,terminal_output +1819,4041760,"TERMINAL",0,0,"35777109999506666666666666677277",,terminal_output +1820,4043765,"TERMINAL",0,0,"5799925151515128888888888888899499",,terminal_output +1821,4045778,"TERMINAL",0,0,"7951213143333430304:005:006:0050:003:003:003:003:003:001:0040:0030313163131",,terminal_output +1822,4047787,"TERMINAL",0,0,"9513336555562222222222222233833",,terminal_output +1823,4049795,"TERMINAL",0,0,"313555877778444444444444445540:0055",,terminal_output +1824,4051804,"TERMINAL",0,0,"3577720999910:006666666666666677277",,terminal_output +1825,4053817,"TERMINAL",0,0,"57999210:0110:0110:0110:0128888888888888899499",,terminal_output +1826,4055827,"TERMINAL",0,0,"793:0131414333344040101010101010101010101040414164141",,terminal_output +1827,4057835,"TERMINAL",0,0,"95:013336555562222222222222233833",,terminal_output +1828,4059850,"TERMINAL",0,0,"41355587777844444444444444551055",,terminal_output +1829,4061855,"TERMINAL",0,0,"35777309999106666666666666677277",,terminal_output +1830,4063871,"TERMINAL",0,0,"5799921111111128888888888888899499",,terminal_output +1831,4065872,"TERMINAL",0,0,"791141514333345050202020202020202020202050515165151",,terminal_output +1832,4067904,"TERMINAL",0,0,"9113336555562222222222222233833",,terminal_output +1833,4069890,"TERMINAL",0,0,"51355587777844444444444444552055",,terminal_output +1834,4071998,"TERMINAL",0,0,"35777409999206666666666666677277",,terminal_output +1835,4073907,"TERMINAL",0,0,"5799922121212128888888888888899499",,terminal_output +1836,4075916,"TERMINAL",0,0,"7921511:0143333430:003:0030303030303030303030301:005:018:01630:0130:01",,terminal_output +1837,4077925,"TERMINAL",0,0,"9213336555562222222222222233833",,terminal_output +1838,4079935,"TERMINAL",0,0,"9:01355587777844444444444444553055",,terminal_output +1839,4081947,"TERMINAL",0,0,"35777509999306666666666666677277",,terminal_output +1840,4083956,"TERMINAL",0,0,"5799923131313128888888888888899499",,terminal_output +1841,4085962,"TERMINAL",0,0,"79315:01114333341010404040404040404040404010111161111",,terminal_output +1842,4087977,"TERMINAL",0,0,"9313336555562222222222222233833",,terminal_output +1843,4089992,"TERMINAL",0,0,"11355587777844444444444444554055",,terminal_output +1844,4091988,"TERMINAL",0,0,"357776:009999406666666666666677277",,terminal_output +1845,4093996,"TERMINAL",0,0,"5799924141414128888888888888899499",,terminal_output +1846,4096005,"TERMINAL",0,0,"794111214333342020505050505050505050505020212162121",,terminal_output +1847,4098025,"TERMINAL",0,0,"9413336555562222222222222233833",,terminal_output +1848,4100027,"TERMINAL",0,0,"21355587777844444444444444555055",,terminal_output +1849,4102035,"TERMINAL",0,0,"35777109999506666666666666677277",,terminal_output +1850,4104041,"TERMINAL",0,0,"5799925151515128888888888888899499",,terminal_output +1851,4106108,"TERMINAL",0,0,"7951213143333430305:006:007:001:004:004:004:004:004:002:001:0030313163131",,terminal_output +1852,4108064,"TERMINAL",0,0,"9513336555562222222222222233833",,terminal_output +1853,4110093,"TERMINAL",0,0,"31355587777844444444444444551:0055",,terminal_output +1854,4112078,"TERMINAL",0,0,"357772099991:006666666666666677277",,terminal_output +1855,4114096,"TERMINAL",0,0,"5799921:011:011:011:0128888888888888899499",,terminal_output +1856,4116136,"TERMINAL",0,0,"794:0131414333344040101010101010101010101040414164141",,terminal_output +1857,4118110,"TERMINAL",0,0,"96:013336555562222222222222233833",,terminal_output +1858,4120117,"TERMINAL",0,0,"41355587777844444444444444551055",,terminal_output +1859,4122125,"TERMINAL",0,0,"35777309999106666666666666677277",,terminal_output +1860,4124137,"TERMINAL",0,0,"5799921111111128888888888888899499",,terminal_output +1861,4126171,"TERMINAL",0,0,"791141514333345050202020202020202020202050515165151",,terminal_output +1862,4128150,"TERMINAL",0,0,"9113336555562222222222222233833",,terminal_output +1863,4130164,"TERMINAL",0,0,"51355587777844444444444444552055",,terminal_output +1864,4132173,"TERMINAL",0,0,"35777409999206666666666666677277",,terminal_output +1865,4134180,"TERMINAL",0,0,"5799922121212128888888888888899499",,terminal_output +1866,4136305,"TERMINAL",0,0,"7921512:014333341:004:0030303030303030303030302:006:019:0161:011:01",,terminal_output +1867,4138198,"TERMINAL",0,0,"9213336555562222222222222233833",,terminal_output +1868,4140300,"TERMINAL",0,0,"30:01355587777844444444444444553055",,terminal_output +1869,4142217,"TERMINAL",0,0,"35777509999306666666666666677277",,terminal_output +1870,4144225,"TERMINAL",0,0,"5799923131313128888888888888899499",,terminal_output +1871,4146237,"TERMINAL",0,0,"79316:01114333341010404040404040404040404010111161111",,terminal_output +1872,4148241,"TERMINAL",0,0,"9313336555562222222222222233833",,terminal_output +1873,4150249,"TERMINAL",0,0,"11355587777844444444444444554055",,terminal_output +1874,4152260,"TERMINAL",0,0,"357777:009999406666666666666677277",,terminal_output +1875,4154271,"TERMINAL",0,0,"5799924141414128888888888888899499",,terminal_output +1876,4156377,"TERMINAL",0,0,"794111214333342020505050505050505050505020212162121",,terminal_output +1877,4158286,"TERMINAL",0,0,"9413336555562222222222222233833",,terminal_output +1878,4160293,"TERMINAL",0,0,"21355587777844444444444444555055",,terminal_output +1879,4162305,"TERMINAL",0,0,"35777109999506666666666666677277",,terminal_output +1880,4164312,"TERMINAL",0,0,"5799925151515128888888888888899499",,terminal_output +1881,4166317,"TERMINAL",0,0,"7951213143333430306:007:008:002:005:005:005:005:005:003:002:0030313163131",,terminal_output +1882,4168330,"TERMINAL",0,0,"9513336555562222222222222233833",,terminal_output +1883,4170413,"TERMINAL",0,0,"31355587777844444444444444552:0055",,terminal_output +1884,4172345,"TERMINAL",0,0,"357772099992:006666666666666677277",,terminal_output +1885,4174365,"TERMINAL",0,0,"5799922:012:012:012:0128888888888888899499",,terminal_output +1886,4176450,"TERMINAL",0,0,"795:0131414333344040101010101010101010101040414164141",,terminal_output +1887,4178368,"TERMINAL",0,0,"97:013336555562222222222222233833",,terminal_output +1888,4180378,"TERMINAL",0,0,"41355587777844444444444444551055",,terminal_output +1889,4182392,"TERMINAL",0,0,"35777309999106666666666666677277",,terminal_output +1890,4184396,"TERMINAL",0,0,"5799921111111128888888888888899499",,terminal_output +1891,4186401,"TERMINAL",0,0,"791141514333345050202020202020202020202050515165151",,terminal_output +1892,4188410,"TERMINAL",0,0,"9113336555562222222222222233833",,terminal_output +1893,4190428,"TERMINAL",0,0,"51355587777844444444444444552055",,terminal_output +1894,4192434,"TERMINAL",0,0,"35777409999206666666666666677277",,terminal_output +1895,4194434,"TERMINAL",0,0,"5799922121212128888888888888899499",,terminal_output +1896,4196445,"TERMINAL",0,0,"7921513:014333342:005:0030303030303030303030303:007:0140:0162:012:01",,terminal_output +1897,4198452,"TERMINAL",0,0,"9213336555562222222222222233833",,terminal_output +1898,4200464,"TERMINAL",0,0,"1:01355587777844444444444444553055",,terminal_output +1899,4202470,"TERMINAL",0,0,"35777509999306666666666666677277",,terminal_output +1900,4204489,"TERMINAL",0,0,"5799923131313128888888888888899499",,terminal_output +1901,4206552,"TERMINAL",0,0,"79317:01114333341010404040404040404040404010111161111",,terminal_output +1902,4208574,"TERMINAL",0,0,"9313336555562222222222222233833",,terminal_output +1903,4210512,"TERMINAL",0,0,"11355587777844444444444444554055",,terminal_output +1904,4212518,"TERMINAL",0,0,"357778:009999406666666666666677277",,terminal_output +1905,4214529,"TERMINAL",0,0,"5799924141414128888888888888899499",,terminal_output +1906,4216588,"TERMINAL",0,0,"794111214333342020505050505050505050505020212162121",,terminal_output +1907,4218545,"TERMINAL",0,0,"9413336555562222222222222233833",,terminal_output +1908,4220559,"TERMINAL",0,0,"21355587777844444444444444555055",,terminal_output +1909,4222564,"TERMINAL",0,0,"35777109999506666666666666677277",,terminal_output +1910,4224574,"TERMINAL",0,0,"5799925151515128888888888888899499",,terminal_output +1911,4226583,"TERMINAL",0,0,"7951213143333430307:008:009:003:006:006:006:006:006:004:003:0030313163131",,terminal_output +1912,4228591,"TERMINAL",0,0,"9524447666673333333333333344944",,terminal_output +1913,4230601,"TERMINAL",0,0,"32466698888955555555555555663:0166",,terminal_output +1914,4232616,"TERMINAL",0,0,"46888213:003:003:003:003:017777777777777788388",,terminal_output +1915,4234621,"TERMINAL",0,0,"686:00304032222399999999999999404054040",,terminal_output +1916,4236661,"TERMINAL",0,0,"88:00222544445414111111111111111111111114122722",,terminal_output +1917,4238682,"TERMINAL",0,0,"4024447666673333333333333344944",,terminal_output +1918,4240642,"TERMINAL",0,0,"2466698888955555555555555661166",,terminal_output +1919,4242661,"TERMINAL",0,0,"468883110101010117777777777777788388",,terminal_output +1920,4244662,"TERMINAL",0,0,"6810405032222399999999999999505055050",,terminal_output +1921,4246699,"TERMINAL",0,0,"810222544445515121212121212121212121215122722",,terminal_output +1922,4248682,"TERMINAL",0,0,"5024447666673333333333333344944",,terminal_output +1923,4250690,"TERMINAL",0,0,"2466698888955555555555555662166",,terminal_output +1924,4252697,"TERMINAL",0,0,"468884120202020217777777777777788388",,terminal_output +1925,4254707,"TERMINAL",0,0,"6820504:00322223999999999999998:001:0053:003:00",,terminal_output +1926,4256739,"TERMINAL",0,0,"8202225444453:016:0131313131313131313131314:0122722",,terminal_output +1927,4258728,"TERMINAL",0,0,"2:0024447666673333333333333344944",,terminal_output +1928,4260735,"TERMINAL",0,0,"2466698888955555555555555663166",,terminal_output +1929,4262741,"TERMINAL",0,0,"468885130303030317777777777777788388",,terminal_output +1930,4264755,"TERMINAL",0,0,"68308:001032222399999999999999101051010",,terminal_output +1931,4266760,"TERMINAL",0,0,"830222544445111141414141414141414141411122722",,terminal_output +1932,4268772,"TERMINAL",0,0,"1024447666673333333333333344944",,terminal_output +1933,4270781,"TERMINAL",0,0,"2466698888955555555555555664166",,terminal_output +1934,4272788,"TERMINAL",0,0,"468889:0140404040417777777777777788388",,terminal_output +1935,4274794,"TERMINAL",0,0,"6840102032222399999999999999202052020",,terminal_output +1936,4276805,"TERMINAL",0,0,"840222544445212151515151515151515151512122722",,terminal_output +1937,4278814,"TERMINAL",0,0,"2024447666673333333333333344944",,terminal_output +1938,4280829,"TERMINAL",0,0,"2466698888955555555555555665166",,terminal_output +1939,4282835,"TERMINAL",0,0,"468881150505050517777777777777788388",,terminal_output +1940,4284847,"TERMINAL",0,0,"6850203032222399999999999999303053030",,terminal_output +1941,4286939,"TERMINAL",0,0,"85022254444531318:019:0150:014:017:017:017:017:017:015:014:013122722",,terminal_output +1942,4288867,"TERMINAL",0,0,"3024447666673333333333333344944",,terminal_output +1943,4290873,"TERMINAL",0,0,"2466698888955555555555555664:0166",,terminal_output +1944,4292883,"TERMINAL",0,0,"46888214:004:004:004:004:017777777777777788388",,terminal_output +1945,4294894,"TERMINAL",0,0,"687:00304032222399999999999999404054040",,terminal_output +1946,4296900,"TERMINAL",0,0,"89:00222544445414111111111111111111111114122722",,terminal_output +1947,4298909,"TERMINAL",0,0,"4024447666673333333333333344944",,terminal_output +1948,4300920,"TERMINAL",0,0,"2466698888955555555555555661166",,terminal_output +1949,4302927,"TERMINAL",0,0,"468883110101010117777777777777788388",,terminal_output +1950,4304934,"TERMINAL",0,0,"6810405032222399999999999999505055050",,terminal_output +1951,4306946,"TERMINAL",0,0,"810222544445515121212121212121212121215122722",,terminal_output +1952,4308953,"TERMINAL",0,0,"5024447666673333333333333344944",,terminal_output +1953,4310967,"TERMINAL",0,0,"2466698888955555555555555662166",,terminal_output +1954,4312973,"TERMINAL",0,0,"468884120202020217777777777777788388",,terminal_output +1955,4314991,"TERMINAL",0,0,"6820505:00322223999999999999999:002:0054:004:00",,terminal_output +1956,4316987,"TERMINAL",0,0,"8202225444454:017:0131313131313131313131315:0122722",,terminal_output +1957,4318998,"TERMINAL",0,0,"3:0024447666673333333333333344944",,terminal_output +1958,4321007,"TERMINAL",0,0,"2466698888955555555555555663166",,terminal_output +1959,4323015,"TERMINAL",0,0,"468885130303030317777777777777788388",,terminal_output +1960,4325021,"TERMINAL",0,0,"68309:001032222399999999999999101051010",,terminal_output +1961,4327080,"TERMINAL",0,0,"830222544445111141414141414141414141411122722",,terminal_output +1962,4329097,"TERMINAL",0,0,"1024447666673333333333333344944",,terminal_output +1963,4331072,"TERMINAL",0,0,"2466698888955555555555555664166",,terminal_output +1964,4333057,"TERMINAL",0,0,"4688810:0140404040417777777777777788388",,terminal_output +1965,4335066,"TERMINAL",0,0,"6840102032222399999999999999202052020",,terminal_output +1966,4337117,"TERMINAL",0,0,"840222544445212151515151515151515151512122722",,terminal_output +1967,4339163,"TERMINAL",0,0,"2024447666673333333333333344944",,terminal_output +1968,4341090,"TERMINAL",0,0,"2466698888955555555555555665166",,terminal_output +1969,4343103,"TERMINAL",0,0,"468881150505050517777777777777788388",,terminal_output +1970,4345219,"TERMINAL",0,0,"6850203032222399999999999999303053030",,terminal_output +1971,4347156,"TERMINAL",0,0,"85022254444531319:0150:011:015:018:018:018:018:018:016:015:013122722",,terminal_output +1972,4349169,"TERMINAL",0,0,"3024447666673333333333333344944",,terminal_output +1973,4351138,"TERMINAL",0,0,"2466698888955555555555555665:0166",,terminal_output +1974,4353148,"TERMINAL",0,0,"46888215:005:005:005:005:017777777777777788388",,terminal_output +1975,4355172,"TERMINAL",0,0,"688:00304032222399999999999999404054040",,terminal_output +1976,4357168,"TERMINAL",0,0,"820:00222544445414111111111111111111111114122722",,terminal_output +1977,4359177,"TERMINAL",0,0,"4024447666673333333333333344944",,terminal_output +1978,4361282,"TERMINAL",0,0,"2466698888955555555555555661166",,terminal_output +1979,4363196,"TERMINAL",0,0,"468883110101010117777777777777788388",,terminal_output +1980,4365201,"TERMINAL",0,0,"6810405032222399999999999999505055050",,terminal_output +1981,4367213,"TERMINAL",0,0,"810222544445515121212121212121212121215122722",,terminal_output +1982,4369222,"TERMINAL",0,0,"5024447666673333333333333344944",,terminal_output +1983,4371315,"TERMINAL",0,0,"2466698888955555555555555662166",,terminal_output +1984,4373240,"TERMINAL",0,0,"468884120202020217777777777777788388",,terminal_output +1985,4375245,"TERMINAL",0,0,"6820506:003222239999999999999940:003:0055:005:00",,terminal_output +1986,4377255,"TERMINAL",0,0,"8202225444455:018:0131313131313131313131316:0122722",,terminal_output +1987,4379304,"TERMINAL",0,0,"4:0024447666673333333333333344944",,terminal_output +1988,4381353,"TERMINAL",0,0,"2466698888955555555555555663166",,terminal_output +1989,4383276,"TERMINAL",0,0,"468885130303030317777777777777788388",,terminal_output +1990,4385291,"TERMINAL",0,0,"683040:001032222399999999999999101051010",,terminal_output +1991,4387299,"TERMINAL",0,0,"830222544445111141414141414141414141411122722",,terminal_output +1992,4389302,"TERMINAL",0,0,"1024447666673333333333333344944",,terminal_output +1993,4391386,"TERMINAL",0,0,"2466698888955555555555555664166",,terminal_output +1994,4393325,"TERMINAL",0,0,"468881:0140404040417777777777777788388",,terminal_output +1995,4395334,"TERMINAL",0,0,"6840102032222399999999999999202052020",,terminal_output +1996,4397342,"TERMINAL",0,0,"840222544445212151515151515151515151512122722",,terminal_output +1997,4399356,"TERMINAL",0,0,"2024447666673333333333333344944",,terminal_output +1998,4401423,"TERMINAL",0,0,"2466698888955555555555555665166",,terminal_output +1999,4403371,"TERMINAL",0,0,"468881150505050517777777777777788388",,terminal_output +2000,4405376,"TERMINAL",0,0,"6850203032222399999999999999303053030",,terminal_output +2001,4407466,"TERMINAL",0,0,"850222544445313150:011:012:016:019:019:019:019:019:017:016:013122722",,terminal_output +2002,4409411,"TERMINAL",0,0,"3024447666673333333333333344944",,terminal_output +2003,4411402,"TERMINAL",0,0,"2466698888955555555555555666:0166",,terminal_output +2004,4413412,"TERMINAL",0,0,"46888216:006:006:006:006:017777777777777788388",,terminal_output +2005,4415428,"TERMINAL",0,0,"689:00304032222399999999999999404054040",,terminal_output +2006,4417427,"TERMINAL",0,0,"81:00222544445414111111111111111111111114122722",,terminal_output +2007,4419438,"TERMINAL",0,0,"4024447666673333333333333344944",,terminal_output +2008,4421491,"TERMINAL",0,0,"2466698888955555555555555661166",,terminal_output +2009,4423458,"TERMINAL",0,0,"468883110101010117777777777777788388",,terminal_output +2010,4424632,"jasmine/train_tokenizer.py",0,0,"",python,tab +2011,4425465,"TERMINAL",0,0,"6810405032222399999999999999505055050",,terminal_output +2012,4427475,"TERMINAL",0,0,"810222544445515121212121212121212121215122722",,terminal_output +2013,4429480,"TERMINAL",0,0,"5024447666673333333333333344944",,terminal_output +2014,4431490,"TERMINAL",0,0,"2466698888955555555555555662166",,terminal_output +2015,4433512,"TERMINAL",0,0,"468884120202020217777777777777788388",,terminal_output +2016,4435520,"TERMINAL",0,0,"6820507:00322223999999999999991:004:0056:006:00",,terminal_output +2017,4437517,"TERMINAL",0,0,"8202225444456:019:0131313131313131313131317:0122722",,terminal_output +2018,4439525,"TERMINAL",0,0,"5:0024447666673333333333333344944",,terminal_output +2019,4441531,"TERMINAL",0,0,"2466698888955555555555555663166",,terminal_output +2020,4443552,"TERMINAL",0,0,"468885130303030317777777777777788388",,terminal_output +2021,4445566,"TERMINAL",0,0,"68301:001032222399999999999999101051010",,terminal_output +2022,4447556,"TERMINAL",0,0,"830222544445111141414141414141414141411122722",,terminal_output +2023,4449609,"TERMINAL",0,0,"1024447666673333333333333344944",,terminal_output +2024,4451598,"TERMINAL",0,0,"2466698888955555555555555664166",,terminal_output +2025,4453616,"TERMINAL",0,0,"479992:0241414141428888888888888899499",,terminal_output +2026,4455596,"TERMINAL",0,0,"794111214333342020505050505050505050505020212162121",,terminal_output +2027,4457639,"TERMINAL",0,0,"9413336555562222222222222233833",,terminal_output +2028,4459611,"TERMINAL",0,0,"21355587777844444444444444555055",,terminal_output +2029,4461618,"TERMINAL",0,0,"35777109999506666666666666677277",,terminal_output +2030,4463628,"TERMINAL",0,0,"5799925151515128888888888888899499",,terminal_output +2031,4465637,"TERMINAL",0,0,"7951213143333430301:002:003:007:001:00:001:00:001:00:001:00:001:00:008:007:0030313163131",,terminal_output +2032,4467646,"TERMINAL",0,0,"9513336555562222222222222233833",,terminal_output +2033,4469656,"TERMINAL",0,0,"31355587777844444444444444557:0055",,terminal_output +2034,4471771,"TERMINAL",0,0,"357772099997:006666666666666677277",,terminal_output +2035,4473717,"TERMINAL",0,0,"5799927:017:017:017:0128888888888888899499",,terminal_output +2036,4475683,"TERMINAL",0,0,"7940:0131414333344040101010101010101010101040414164141",,terminal_output +2037,4477696,"TERMINAL",0,0,"92:013336555562222222222222233833",,terminal_output +2038,4479698,"TERMINAL",0,0,"41355587777844444444444444551055",,terminal_output +2039,4481727,"TERMINAL",0,0,"35777309999106666666666666677277",,terminal_output +2040,4483716,"TERMINAL",0,0,"5799921111111128888888888888899499",,terminal_output +2041,4485725,"TERMINAL",0,0,"791141514333345050202020202020202020202050515165151",,terminal_output +2042,4487730,"TERMINAL",0,0,"9113336555562222222222222233833",,terminal_output +2043,4489749,"TERMINAL",0,0,"51355587777844444444444444552055",,terminal_output +2044,4491750,"TERMINAL",0,0,"35777409999206666666666666677277",,terminal_output +2045,4493759,"TERMINAL",0,0,"5799922121212128888888888888899499",,terminal_output +2046,4495765,"TERMINAL",0,0,"7921518:014333347:0040:0030303030303030303030308:002:015:0167:017:01",,terminal_output +2047,4497780,"TERMINAL",0,0,"9213336555562222222222222233833",,terminal_output +2048,4499788,"TERMINAL",0,0,"6:01355587777844444444444444553055",,terminal_output +2049,4501799,"TERMINAL",0,0,"35777509999306666666666666677277",,terminal_output +2050,4503808,"TERMINAL",0,0,"5799923131313128888888888888899499",,terminal_output +2051,4505819,"TERMINAL",0,0,"79312:01114333341010404040404040404040404010111161111",,terminal_output +2052,4507830,"TERMINAL",0,0,"9313336555562222222222222233833",,terminal_output +2053,4509839,"TERMINAL",0,0,"11355587777844444444444444554055",,terminal_output +2054,4511850,"TERMINAL",0,0,"357773:009999406666666666666677277",,terminal_output +2055,4513856,"TERMINAL",0,0,"5799924141414128888888888888899499",,terminal_output +2056,4515867,"TERMINAL",0,0,"794111214333342020505050505050505050505020212162121",,terminal_output +2057,4517877,"TERMINAL",0,0,"9413336555562222222222222233833",,terminal_output +2058,4518789,"jasmine/train_tokenizer.py",2099,0,"",python,selection_command +2059,4518845,"jasmine/train_tokenizer.py",2054,0,"",python,selection_command +2060,4519884,"TERMINAL",0,0,"21355587777844444444444444555055",,terminal_output +2061,4521894,"TERMINAL",0,0,"35777109999506666666666666677277",,terminal_output +2062,4523905,"TERMINAL",0,0,"5799925151515128888888888888899499",,terminal_output +2063,4525920,"TERMINAL",0,0,"7951213143333430302:003:004:008:001:001:001:001:001:009:008:0030313163131",,terminal_output +2064,4527922,"TERMINAL",0,0,"9513336555562222222222222233833",,terminal_output +2065,4528985,"jasmine/train_tokenizer.py",2055,0,"",python,selection_command +2066,4529160,"jasmine/train_tokenizer.py",2054,0,"",python,selection_command +2067,4529929,"TERMINAL",0,0,"31355587777844444444444444558:0055",,terminal_output +2068,4531937,"TERMINAL",0,0,"357772099998:006666666666666677277",,terminal_output +2069,4533950,"TERMINAL",0,0,"5799928:018:018:018:0128888888888888899499",,terminal_output +2070,4535955,"TERMINAL",0,0,"791:0131414333344040101010101010101010101040414164141",,terminal_output +2071,4537967,"TERMINAL",0,0,"93:013336555562222222222222233833",,terminal_output +2072,4539976,"TERMINAL",0,0,"41355587777844444444444444551055",,terminal_output +2073,4542035,"TERMINAL",0,0,"35777309999106666666666666677277",,terminal_output +2074,4543991,"TERMINAL",0,0,"5799921111111128888888888888899499",,terminal_output +2075,4546002,"TERMINAL",0,0,"791141514333345050202020202020202020202050515165151",,terminal_output +2076,4548060,"TERMINAL",0,0,"9113336555562222222222222233833",,terminal_output +2077,4550021,"TERMINAL",0,0,"51355587777844444444444444552055",,terminal_output +2078,4552029,"TERMINAL",0,0,"35777409999206666666666666677277",,terminal_output +2079,4554034,"TERMINAL",0,0,"5799922121212128888888888888899499",,terminal_output +2080,4556149,"TERMINAL",0,0,"7921519:014333348:001:0030303030303030303030309:003:016:0168:018:01",,terminal_output +2081,4558065,"TERMINAL",0,0,"9213336555562222222222222233833",,terminal_output +2082,4560062,"TERMINAL",0,0,"7:01355587777844444444444444553055",,terminal_output +2083,4562075,"TERMINAL",0,0,"35777509999306666666666666677277",,terminal_output +2084,4564081,"TERMINAL",0,0,"5799923131313128888888888888899499",,terminal_output +2085,4566087,"TERMINAL",0,0,"79313:01114333341010404040404040404040404010111161111",,terminal_output +2086,4568100,"TERMINAL",0,0,"9313336555562222222222222233833",,terminal_output +2087,4570107,"TERMINAL",0,0,"11355587777844444444444444554055",,terminal_output +2088,4572116,"TERMINAL",0,0,"357774:009999406666666666666677277",,terminal_output +2089,4574136,"TERMINAL",0,0,"5799924141414128888888888888899499",,terminal_output +2090,4576206,"TERMINAL",0,0,"794111214333342020505050505050505050505020212162121",,terminal_output +2091,4578143,"TERMINAL",0,0,"9413336555562222222222222233833",,terminal_output +2092,4580153,"TERMINAL",0,0,"21355587777844444444444444555055",,terminal_output +2093,4582164,"TERMINAL",0,0,"35777109999506666666666666677277",,terminal_output +2094,4584175,"TERMINAL",0,0,"5799925151515128888888888888899499",,terminal_output +2095,4586182,"TERMINAL",0,0,"7951213143333430303:004:005:009:002:002:002:002:002:0040:009:0030313163131",,terminal_output +2096,4588191,"TERMINAL",0,0,"9513336555562222222222222233833",,terminal_output +2097,4590204,"TERMINAL",0,0,"31355587777844444444444444559:0055",,terminal_output +2098,4592211,"TERMINAL",0,0,"357772099999:006666666666666677277",,terminal_output +2099,4594240,"TERMINAL",0,0,"5799929:019:019:019:0128888888888888899499",,terminal_output +2100,4596289,"TERMINAL",0,0,"792:0131414333344040101010101010101010101040414164141",,terminal_output +2101,4598238,"TERMINAL",0,0,"94:013336555562222222222222233833",,terminal_output +2102,4600248,"TERMINAL",0,0,"41355587777844444444444444551055",,terminal_output +2103,4602254,"TERMINAL",0,0,"35777309999106666666666666677277",,terminal_output +2104,4604271,"TERMINAL",0,0,"5799921111111128888888888888899499",,terminal_output +2105,4606325,"TERMINAL",0,0,"791141514333345050202020202020202020202050515165151",,terminal_output +2106,4608282,"TERMINAL",0,0,"9113336555562222222222222233833",,terminal_output +2107,4610299,"TERMINAL",0,0,"51355587777844444444444444552055",,terminal_output +2108,4612299,"TERMINAL",0,0,"35777409999206666666666666677277",,terminal_output +2109,4614308,"TERMINAL",0,0,"5799922121212128888888888888899499",,terminal_output +2110,4616360,"TERMINAL",0,0,"79215150:014333349:002:00303030303030303030303050:004:017:0169:019:01",,terminal_output +2111,4618326,"TERMINAL",0,0,"9213336555562222222222222233833",,terminal_output +2112,4620354,"TERMINAL",0,0,"8:01355587777844444444444444553055",,terminal_output +2113,4622345,"TERMINAL",0,0,"35777509999306666666666666677277",,terminal_output +2114,4624351,"TERMINAL",0,0,"5799923131313128888888888888899499",,terminal_output +2115,4626362,"TERMINAL",0,0,"79314:01114333341010404040404040404040404010111161111",,terminal_output +2116,4628371,"TERMINAL",0,0,"9313336555562222222222222233833",,terminal_output +2117,4630379,"TERMINAL",0,0,"11355587777844444444444444554055",,terminal_output +2118,4632386,"TERMINAL",0,0,"357775:009999406666666666666677277",,terminal_output +2119,4634406,"TERMINAL",0,0,"5799924141414128888888888888899499",,terminal_output +2120,4636429,"TERMINAL",0,0,"794111214333342020505050505050505050505020212162121",,terminal_output +2121,4638413,"TERMINAL",0,0,"9413336555562222222222222233833",,terminal_output +2122,4640429,"TERMINAL",0,0,"21355587777844444444444444555055",,terminal_output +2123,4642432,"TERMINAL",0,0,"35777109999506666666666666677277",,terminal_output +2124,4644440,"TERMINAL",0,0,"5799925151515128888888888888899499",,terminal_output +2125,4646470,"TERMINAL",0,0,"7951213143333430304:005:006:001:00:003:003:003:003:003:001:0050:0030313163131",,terminal_output +2126,4648457,"TERMINAL",0,0,"9513336555562222222222222233833",,terminal_output +2127,4650467,"TERMINAL",0,0,"313555877778444444444444445550:0055",,terminal_output +2128,4652480,"TERMINAL",0,0,"3577720999920:006666666666666677277",,terminal_output +2129,4654485,"TERMINAL",0,0,"57999220:0120:0120:0120:0128888888888888899499",,terminal_output +2130,4656499,"TERMINAL",0,0,"793:0131414333344040101010101010101010101040414164141",,terminal_output +2131,4658508,"TERMINAL",0,0,"95:013336555562222222222222233833",,terminal_output +2132,4660517,"TERMINAL",0,0,"41355587777844444444444444551055",,terminal_output +2133,4662524,"TERMINAL",0,0,"35777309999106666666666666677277",,terminal_output +2134,4664591,"TERMINAL",0,0,"5799921111111128888888888888899499",,terminal_output +2135,4666540,"TERMINAL",0,0,"791141514333345050202020202020202020202050515165151",,terminal_output +2136,4668553,"TERMINAL",0,0,"9113336555562222222222222233833",,terminal_output +2137,4670558,"TERMINAL",0,0,"51355587777844444444444444552055",,terminal_output +2138,4672574,"TERMINAL",0,0,"35777409999206666666666666677277",,terminal_output +2139,4672783,"jasmine/train_tokenizer.py",2014,0,"",python,selection_command +2140,4673038,"jasmine/train_tokenizer.py",1983,0,"",python,selection_command +2141,4673060,"jasmine/train_tokenizer.py",1956,0,"",python,selection_command +2142,4673094,"jasmine/train_tokenizer.py",1925,0,"",python,selection_command +2143,4673120,"jasmine/train_tokenizer.py",1862,0,"",python,selection_command +2144,4673159,"jasmine/train_tokenizer.py",1828,0,"",python,selection_command +2145,4673189,"jasmine/train_tokenizer.py",1797,0,"",python,selection_command +2146,4673221,"jasmine/train_tokenizer.py",1775,0,"",python,selection_command +2147,4673260,"jasmine/train_tokenizer.py",1754,0,"",python,selection_command +2148,4673287,"jasmine/train_tokenizer.py",1733,0,"",python,selection_command +2149,4673321,"jasmine/train_tokenizer.py",1713,0,"",python,selection_command +2150,4673353,"jasmine/train_tokenizer.py",1682,0,"",python,selection_command +2151,4673391,"jasmine/train_tokenizer.py",1657,0,"",python,selection_command +2152,4673421,"jasmine/train_tokenizer.py",1623,0,"",python,selection_command +2153,4673455,"jasmine/train_tokenizer.py",1592,0,"",python,selection_command +2154,4673490,"jasmine/train_tokenizer.py",1567,0,"",python,selection_command +2155,4673519,"jasmine/train_tokenizer.py",1544,0,"",python,selection_command +2156,4673560,"jasmine/train_tokenizer.py",1520,0,"",python,selection_command +2157,4673588,"jasmine/train_tokenizer.py",1495,0,"",python,selection_command +2158,4673620,"jasmine/train_tokenizer.py",1467,0,"",python,selection_command +2159,4673655,"jasmine/train_tokenizer.py",1442,0,"",python,selection_command +2160,4673687,"jasmine/train_tokenizer.py",1418,0,"",python,selection_command +2161,4673721,"jasmine/train_tokenizer.py",1393,0,"",python,selection_command +2162,4673760,"jasmine/train_tokenizer.py",1377,0,"",python,selection_command +2163,4673793,"jasmine/train_tokenizer.py",1318,0,"",python,selection_command +2164,4673822,"jasmine/train_tokenizer.py",1287,0,"",python,selection_command +2165,4673853,"jasmine/train_tokenizer.py",1228,0,"",python,selection_command +2166,4673886,"jasmine/train_tokenizer.py",1197,0,"",python,selection_command +2167,4673921,"jasmine/train_tokenizer.py",1168,0,"",python,selection_command +2168,4673954,"jasmine/train_tokenizer.py",1141,0,"",python,selection_command +2169,4673990,"jasmine/train_tokenizer.py",1116,0,"",python,selection_command +2170,4674025,"jasmine/train_tokenizer.py",1091,0,"",python,selection_command +2171,4674054,"jasmine/train_tokenizer.py",1066,0,"",python,selection_command +2172,4674086,"jasmine/train_tokenizer.py",1040,0,"",python,selection_command +2173,4674119,"jasmine/train_tokenizer.py",1021,0,"",python,selection_command +2174,4674152,"jasmine/train_tokenizer.py",990,0,"",python,selection_command +2175,4674191,"jasmine/train_tokenizer.py",962,0,"",python,selection_command +2176,4674220,"jasmine/train_tokenizer.py",939,0,"",python,selection_command +2177,4674253,"jasmine/train_tokenizer.py",913,0,"",python,selection_command +2178,4674578,"TERMINAL",0,0,"5799922121212128888888888888899499",,terminal_output +2179,4674888,"jasmine/train_tokenizer.py",939,0,"",python,selection_command +2180,4675136,"jasmine/train_tokenizer.py",962,0,"",python,selection_command +2181,4675171,"jasmine/train_tokenizer.py",990,0,"",python,selection_command +2182,4675200,"jasmine/train_tokenizer.py",1021,0,"",python,selection_command +2183,4675236,"jasmine/train_tokenizer.py",1040,0,"",python,selection_command +2184,4675272,"jasmine/train_tokenizer.py",1066,0,"",python,selection_command +2185,4675304,"jasmine/train_tokenizer.py",1091,0,"",python,selection_command +2186,4675336,"jasmine/train_tokenizer.py",1116,0,"",python,selection_command +2187,4675371,"jasmine/train_tokenizer.py",1141,0,"",python,selection_command +2188,4675404,"jasmine/train_tokenizer.py",1168,0,"",python,selection_command +2189,4675438,"jasmine/train_tokenizer.py",1197,0,"",python,selection_command +2190,4675469,"jasmine/train_tokenizer.py",1228,0,"",python,selection_command +2191,4675502,"jasmine/train_tokenizer.py",1287,0,"",python,selection_command +2192,4675534,"jasmine/train_tokenizer.py",1318,0,"",python,selection_command +2193,4675571,"jasmine/train_tokenizer.py",1377,0,"",python,selection_command +2194,4675604,"jasmine/train_tokenizer.py",1393,0,"",python,selection_command +2195,4675637,"jasmine/train_tokenizer.py",1418,0,"",python,selection_command +2196,4675672,"jasmine/train_tokenizer.py",1442,0,"",python,selection_command +2197,4675703,"jasmine/train_tokenizer.py",1467,0,"",python,selection_command +2198,4675738,"jasmine/train_tokenizer.py",1495,0,"",python,selection_command +2199,4675767,"jasmine/train_tokenizer.py",1520,0,"",python,selection_command +2200,4675805,"jasmine/train_tokenizer.py",1544,0,"",python,selection_command +2201,4675835,"jasmine/train_tokenizer.py",1567,0,"",python,selection_command +2202,4676043,"jasmine/train_tokenizer.py",1544,0,"",python,selection_command +2203,4676162,"jasmine/train_tokenizer.py",1520,0,"",python,selection_command +2204,4676347,"jasmine/train_tokenizer.py",1544,0,"",python,selection_command +2205,4676585,"TERMINAL",0,0,"72022521:0254444540:013:0131313131313131313131311:015:028:02740:0240:02",,terminal_output +2206,4676598,"jasmine/train_tokenizer.py",1567,0,"",python,selection_command +2207,4676631,"jasmine/train_tokenizer.py",1592,0,"",python,selection_command +2208,4676664,"jasmine/train_tokenizer.py",1623,0,"",python,selection_command +2209,4676696,"jasmine/train_tokenizer.py",1657,0,"",python,selection_command +2210,4676735,"jasmine/train_tokenizer.py",1682,0,"",python,selection_command +2211,4677077,"jasmine/train_tokenizer.py",1657,0,"",python,selection_command +2212,4678596,"TERMINAL",0,0,"9:0024447666673333333333333344944",,terminal_output +2213,4680602,"TERMINAL",0,0,"2466698888955555555555555663166",,terminal_output +2214,4681988,"jasmine/train_tokenizer.py",1682,0,"",python,selection_command +2215,4682613,"TERMINAL",0,0,"468885130303030317777777777777788388",,terminal_output +2216,4684624,"TERMINAL",0,0,"68305:001032222399999999999999101051010",,terminal_output +2217,4686710,"TERMINAL",0,0,"830222544445111141414141414141414141411122722",,terminal_output +2218,4688637,"TERMINAL",0,0,"1024447666673333333333333344944",,terminal_output +2219,4690692,"TERMINAL",0,0,"2466698888955555555555555664166",,terminal_output +2220,4692750,"TERMINAL",0,0,"468886:0140404040417777777777777788388",,terminal_output +2221,4694014,"TERMINAL",0,0,"bash",,terminal_focus +2222,4694665,"TERMINAL",0,0,"6840102032222399999999999999202052020",,terminal_output +2223,4696675,"TERMINAL",0,0,"840222544445212151515151515151515151512122722",,terminal_output +2224,4698683,"TERMINAL",0,0,"2024447666673333333333333344944",,terminal_output +2225,4700691,"TERMINAL",0,0,"2466698888955555555555555665166",,terminal_output +2226,4702704,"TERMINAL",0,0,"468881150505050517777777777777788388",,terminal_output +2227,4704710,"TERMINAL",0,0,"6850203032222399999999999999303053030",,terminal_output +2228,4706721,"TERMINAL",0,0,"85022254444531315:016:017:011:014:014:014:014:014:012:011:013122722",,terminal_output +2229,4708728,"TERMINAL",0,0,"3024447666673333333333333344944",,terminal_output +2230,4710736,"TERMINAL",0,0,"2466698888955555555555555661:0166",,terminal_output +2231,4712746,"TERMINAL",0,0,"46888211:001:001:001:001:017777777777777788388",,terminal_output +2232,4714756,"TERMINAL",0,0,"684:00304032222399999999999999404054040",,terminal_output +2233,4716764,"TERMINAL",0,0,"86:00222544445414111111111111111111111114122722",,terminal_output +2234,4718773,"TERMINAL",0,0,"4024447666673333333333333344944",,terminal_output +2235,4719919,"jasmine/train_tokenizer copy.py",0,0,"import os\n\nos.environ.setdefault(""XLA_PYTHON_CLIENT_MEM_FRACTION"", ""0.98"")\n\nfrom dataclasses import dataclass, field\nfrom typing import cast, Optional\n\nimport einops\nimport itertools\nfrom jax.sharding import Mesh, PartitionSpec, NamedSharding\nfrom jax.experimental.mesh_utils import create_device_mesh\nimport optax\nimport orbax.checkpoint as ocp\nimport numpy as np\nimport dm_pix as pix\nimport jax\nimport jax.numpy as jnp\nimport tyro\nimport wandb\nimport grain\nimport flax.nnx as nnx\n\nfrom models.tokenizer import TokenizerVQVAE\nfrom utils.dataloader import get_dataloader\nfrom utils.train_utils import (\n get_lr_schedule,\n count_parameters_by_component,\n print_mem_stats,\n print_compiled_memory_stats,\n print_compiled_cost_analysis,\n)\n\n\n@dataclass\nclass Args:\n # Experiment\n num_steps: int = 300_000\n seed: int = 0\n seq_len: int = 16\n image_channels: int = 3\n image_height: int = 64\n image_width: int = 64\n data_dir: str = """"\n save_ckpt: bool = False\n restore_ckpt: bool = False\n # Optimization\n vq_beta: float = 0.25\n batch_size: int = 48\n init_lr: float = 0.0\n max_lr: float = 3e-4\n decay_end: float = 0.0\n wsd_decay_steps: int = (\n 30_000 # NOTE: wsd_decay_steps will only be used when using a wsd-schedule\n )\n lr_schedule: str = ""wsd"" # supported options: wsd, cos\n warmup_steps: int = 10000\n # Tokenizer\n model_dim: int = 512\n ffn_dim: int = 2048\n latent_dim: int = 32\n num_latents: int = 1024\n patch_size: int = 16\n num_blocks: int = 4\n num_heads: int = 8\n dropout: float = 0.0\n codebook_dropout: float = 0.01\n param_dtype = jnp.float32\n dtype = jnp.bfloat16\n use_flash_attention: bool = True\n # Logging\n log: bool = True\n entity: str = """"\n project: str = """"\n name: str = ""train_tokenizer""\n tags: list[str] = field(default_factory=lambda: [""tokenizer""])\n log_interval: int = 50\n log_image_interval: int = 1000\n ckpt_dir: str = """"\n log_checkpoint_interval: int = 1000\n log_checkpoint_keep_period: int = 20_000\n log_gradients: bool = False\n val_data_dir: str = """"\n val_interval: int = 20_000\n val_steps: int = 50\n wandb_id: str = """"\n\n\ndef build_model(args: Args, rng: jax.Array) -> tuple[TokenizerVQVAE, jax.Array]:\n rng, _rng = jax.random.split(rng)\n rngs = nnx.Rngs(_rng)\n return (\n TokenizerVQVAE(\n in_dim=args.image_channels,\n model_dim=args.model_dim,\n ffn_dim=args.ffn_dim,\n latent_dim=args.latent_dim,\n num_latents=args.num_latents,\n patch_size=args.patch_size,\n num_blocks=args.num_blocks,\n num_heads=args.num_heads,\n dropout=args.dropout,\n codebook_dropout=args.codebook_dropout,\n param_dtype=args.param_dtype,\n dtype=args.dtype,\n use_flash_attention=args.use_flash_attention,\n rngs=rngs,\n ),\n rng,\n )\n\n\ndef build_optimizer(model: TokenizerVQVAE, args: Args) -> nnx.ModelAndOptimizer:\n lr_schedule = get_lr_schedule(\n args.lr_schedule,\n args.init_lr,\n args.max_lr,\n args.decay_end,\n args.num_steps,\n args.warmup_steps,\n args.wsd_decay_steps,\n )\n tx = optax.adamw(\n learning_rate=lr_schedule,\n b1=0.9,\n b2=0.9,\n weight_decay=1e-4,\n mu_dtype=args.param_dtype, # moments in full precision\n )\n optimizer = nnx.ModelAndOptimizer(model, tx)\n return optimizer\n\n\ndef build_mesh_and_sharding(\n num_devices: int,\n) -> tuple[Mesh, NamedSharding, NamedSharding]:\n device_mesh_arr = create_device_mesh((num_devices,))\n mesh = Mesh(devices=device_mesh_arr, axis_names=(""data"",))\n replicated_sharding = NamedSharding(mesh, PartitionSpec())\n videos_sharding = NamedSharding(mesh, PartitionSpec(""data"", None, None, None, None))\n return mesh, replicated_sharding, videos_sharding\n\n\ndef shard_optimizer_states(\n optimizer: nnx.ModelAndOptimizer, replicated_sharding: NamedSharding\n) -> None:\n model_state = nnx.state(optimizer.model)\n model_sharded_state = jax.lax.with_sharding_constraint(\n model_state, replicated_sharding\n )\n nnx.update(optimizer.model, model_sharded_state)\n optimizer_state = nnx.state(optimizer, nnx.optimizer.OptState)\n optimizer_sharded_state = jax.lax.with_sharding_constraint(\n optimizer_state, replicated_sharding\n )\n nnx.update(optimizer, optimizer_sharded_state)\n\n\ndef build_dataloader(args: Args, data_dir: str) -> grain.DataLoaderIterator:\n image_shape = (args.image_height, args.image_width, args.image_channels)\n array_record_files = [\n os.path.join(data_dir, x)\n for x in os.listdir(data_dir)\n if x.endswith("".array_record"")\n ]\n grain_dataloader = get_dataloader(\n array_record_files,\n args.seq_len,\n # NOTE: We deliberately pass the global batch size\n # The dataloader shards the dataset across all processes\n args.batch_size,\n *image_shape,\n num_workers=8,\n prefetch_buffer_size=1,\n seed=args.seed,\n )\n initial_state = grain_dataloader._create_initial_state()\n grain_iterator = grain.DataLoaderIterator(grain_dataloader, initial_state)\n return grain_iterator\n\n\ndef build_checkpoint_manager(args: Args) -> Optional[ocp.CheckpointManager]:\n if args.restore_ckpt or args.save_ckpt:\n handler_registry = ocp.handlers.DefaultCheckpointHandlerRegistry()\n handler_registry.add(\n ""model_state"", ocp.args.PyTreeSave, ocp.handlers.PyTreeCheckpointHandler\n )\n handler_registry.add(\n ""model_state"", ocp.args.PyTreeRestore, ocp.handlers.PyTreeCheckpointHandler\n )\n handler_registry.add(\n ""train_dataloader_state"",\n grain.checkpoint.CheckpointSave,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n handler_registry.add(\n ""train_dataloader_state"",\n grain.checkpoint.CheckpointRestore,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n if args.val_data_dir:\n handler_registry.add(\n ""val_dataloader_state"",\n grain.checkpoint.CheckpointSave,\n cast(\n ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler\n ),\n )\n handler_registry.add(\n ""val_dataloader_state"",\n grain.checkpoint.CheckpointRestore,\n cast(\n ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler\n ),\n )\n checkpoint_options = ocp.CheckpointManagerOptions(\n save_interval_steps=args.log_checkpoint_interval,\n max_to_keep=3,\n keep_period=args.log_checkpoint_keep_period,\n step_format_fixed_length=6,\n cleanup_tmp_directories=True,\n )\n checkpoint_manager = ocp.CheckpointManager(\n args.ckpt_dir,\n options=checkpoint_options,\n handler_registry=handler_registry,\n )\n return checkpoint_manager\n else:\n return None\n\n\ndef restore_checkpoint_if_needed(\n args: Args,\n checkpoint_manager: Optional[ocp.CheckpointManager],\n optimizer: nnx.ModelAndOptimizer,\n train_iterator: grain.DataLoaderIterator,\n val_iterator: Optional[grain.DataLoaderIterator],\n restore_step: Optional[int] = None,\n) -> tuple[\n int, nnx.ModelAndOptimizer, grain.DataLoaderIterator, grain.DataLoaderIterator\n]:\n step = 0\n if checkpoint_manager and restore_step is None:\n restore_step = checkpoint_manager.latest_step()\n if args.restore_ckpt:\n assert checkpoint_manager is not None\n abstract_optimizer = nnx.eval_shape(lambda: optimizer)\n abstract_optimizer_state = nnx.state(abstract_optimizer)\n if val_iterator:\n restore_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore(abstract_optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointRestore(train_iterator), # type: ignore\n val_dataloader_state=grain.checkpoint.CheckpointRestore(val_iterator), # type: ignore\n )\n else:\n restore_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore(abstract_optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointRestore(train_iterator), # type: ignore\n )\n restored = checkpoint_manager.restore(restore_step, args=restore_args)\n restored_optimizer_state = restored[""model_state""]\n nnx.update(optimizer, restored_optimizer_state)\n train_iterator = restored[""train_dataloader_state""]\n if val_iterator:\n val_iterator = restored[""val_dataloader_state""]\n step = restore_step or 0\n print(f""Restored dataloader and model state from step {step}"")\n return step, optimizer, train_iterator, val_iterator\n\n\ndef main(args: Args) -> None:\n jax.distributed.initialize()\n num_devices = jax.device_count()\n if num_devices == 0:\n raise ValueError(""No JAX devices found."")\n print(f""Running on {num_devices} devices."")\n\n if args.batch_size % num_devices != 0:\n raise ValueError(\n f""Global batch size {args.batch_size} must be divisible by ""\n f""number of devices {num_devices}.""\n )\n\n rng = jax.random.key(args.seed)\n\n # --- Initialize model ---\n tokenizer, rng = build_model(args, rng)\n\n _, params, _ = nnx.split(tokenizer, nnx.Param, ...)\n param_counts = count_parameters_by_component(params)\n\n if args.log and jax.process_index() == 0:\n wandb_init_kwargs = {\n ""entity"": args.entity,\n ""project"": args.project,\n ""name"": args.name,\n ""tags"": args.tags,\n ""group"": ""debug"",\n ""config"": args,\n }\n\n if args.wandb_id:\n wandb_init_kwargs.update(\n {\n ""id"": args.wandb_id,\n ""resume"": ""allow"",\n }\n )\n wandb.init(**wandb_init_kwargs)\n\n wandb.config.update({""model_param_count"": param_counts})\n\n print(""Parameter counts:"")\n print(param_counts)\n\n # --- Initialize optimizer ---\n optimizer = build_optimizer(tokenizer, args)\n del tokenizer\n\n # FIXME: switch to create_hybrid_device_mesh for runs spanning multiple nodes\n _, replicated_sharding, videos_sharding = build_mesh_and_sharding(num_devices)\n\n shard_optimizer_states(optimizer, replicated_sharding)\n\n # --- Initialize checkpoint manager ---\n checkpoint_manager = build_checkpoint_manager(args)\n\n # --- Create DataLoaderIterator from dataloader ---\n train_iterator = build_dataloader(args, args.data_dir)\n val_iterator = None\n if args.val_data_dir:\n val_iterator = build_dataloader(args, args.val_data_dir)\n\n # --- Restore checkpoint ---\n step, optimizer, train_iterator, val_iterator = restore_checkpoint_if_needed(\n args, checkpoint_manager, optimizer, train_iterator, val_iterator\n )\n\n # --- Define loss and train step (close over args) ---\n def tokenizer_loss_fn(\n model: TokenizerVQVAE, inputs: dict, training: bool = False\n ) -> tuple[jax.Array, tuple[jax.Array, dict]]:\n gt = jnp.asarray(inputs[""videos""], dtype=jnp.float32) / 255.0\n inputs[""videos""] = gt.astype(args.dtype)\n outputs = model(inputs, training=training)\n outputs[""recon""] = outputs[""recon""].astype(jnp.float32)\n mse = jnp.square(gt - outputs[""recon""]).mean()\n q_loss = jnp.square(jax.lax.stop_gradient(outputs[""emb""]) - outputs[""z""]).mean()\n commitment_loss = jnp.square(\n outputs[""emb""] - jax.lax.stop_gradient(outputs[""z""])\n ).mean()\n loss = mse + q_loss + args.vq_beta * commitment_loss\n\n gt_clipped = gt.clip(0, 1).reshape(-1, *gt.shape[2:])\n recon = outputs[""recon""].clip(0, 1).reshape(-1, *outputs[""recon""].shape[2:])\n psnr = jnp.asarray(pix.psnr(gt_clipped, recon)).mean()\n ssim = jnp.asarray(pix.ssim(gt_clipped, recon)).mean()\n _, index_counts = jnp.unique_counts(\n jnp.ravel(outputs[""indices""]), size=args.num_latents, fill_value=0\n )\n codebook_usage = (index_counts != 0).mean()\n metrics = dict(\n loss=loss,\n mse=mse,\n q_loss=q_loss,\n commitment_loss=commitment_loss,\n psnr=psnr,\n ssim=ssim,\n codebook_usage=codebook_usage,\n )\n return loss, (outputs[""recon""], metrics)\n\n @nnx.jit(donate_argnums=0)\n def train_step(\n optimizer: nnx.ModelAndOptimizer, inputs: dict\n ) -> tuple[jax.Array, jax.Array, dict]:\n def loss_fn(model: TokenizerVQVAE) -> tuple[jax.Array, tuple[jax.Array, dict]]:\n model.train()\n return tokenizer_loss_fn(model, inputs, training=True)\n\n (loss, (recon, metrics)), grads = nnx.value_and_grad(loss_fn, has_aux=True)(\n optimizer.model\n )\n optimizer.update(grads)\n if args.log_gradients:\n metrics[""encoder_gradients_std/""] = jax.tree.map(\n lambda x: x.std(), grads[""params""][""encoder""]\n )\n metrics[""vq_gradients_std/""] = jax.tree.map(\n lambda x: x.std(), grads[""params""][""vq""]\n )\n metrics[""decoder_gradients_std/""] = jax.tree.map(\n lambda x: x.std(), grads[""params""][""decoder""]\n )\n return loss, recon, metrics\n\n @nnx.jit\n def val_step(\n tokenizer: TokenizerVQVAE, inputs: dict\n ) -> tuple[jax.Array, jax.Array, dict]:\n tokenizer.eval()\n (loss, (recon, metrics)) = tokenizer_loss_fn(tokenizer, inputs, training=False)\n return loss, recon, metrics\n\n def calculate_validation_metrics(val_dataloader, tokenizer):\n step = 0\n loss_per_step = []\n metrics_per_step = []\n batch = None\n recon = None\n for batch in val_dataloader:\n loss, recon, metrics = val_step(tokenizer, batch)\n loss_per_step.append(loss)\n metrics_per_step.append(metrics)\n step += 1\n if step > args.val_steps:\n break\n\n if step < args.val_steps:\n print(\n f""Warning: Your validation dataset is too small to make val_steps many steps. Made {step} steps, expected {args.val_steps}""\n )\n\n val_loss = np.mean(loss_per_step)\n val_metrics = {\n f""val_{key}"": np.mean([float(m[key]) for m in metrics_per_step])\n for key in metrics_per_step[0].keys()\n }\n val_metrics[""val_loss""] = val_loss\n return val_metrics, batch, recon\n\n # --- TRAIN LOOP ---\n dataloader_train = (\n {\n ""videos"": jax.make_array_from_process_local_data(\n videos_sharding, elem[""videos""]\n ),\n }\n for elem in train_iterator\n )\n dataloader_val = None\n if val_iterator:\n dataloader_val = (\n {\n ""videos"": jax.make_array_from_process_local_data(\n videos_sharding, elem[""videos""]\n ),\n }\n for elem in val_iterator\n )\n if jax.process_index() == 0:\n first_batch = next(dataloader_train)\n compiled = train_step.lower(optimizer, first_batch).compile()\n print_compiled_memory_stats(compiled.memory_analysis())\n print_compiled_cost_analysis(compiled.cost_analysis())\n # Do not skip the first batch during training\n dataloader_train = itertools.chain([first_batch], dataloader_train)\n print(f""Starting training from step {step}..."")\n first_step = step\n while step < args.num_steps:\n for batch in dataloader_train:\n # --- Train step ---\n loss, recon, metrics = train_step(optimizer, batch)\n if step == first_step:\n print_mem_stats(""After params initialized"")\n step += 1\n\n # --- Validation loss ---\n val_results = {}\n if dataloader_val and step % args.val_interval == 0:\n print(""Calculating validation metrics..."")\n val_metrics, val_gt_batch, val_recon = calculate_validation_metrics(\n dataloader_val, optimizer.model\n )\n print(f""Step {step}, validation loss: {val_metrics['val_loss']}"")\n val_results = {\n ""metrics"": val_metrics,\n ""gt_batch"": val_gt_batch,\n ""recon"": val_recon,\n }\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {""loss"": loss, ""step"": step, **metrics}\n if val_results:\n log_dict.update(val_results[""metrics""])\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if val_results and step % args.val_interval == 0:\n val_results[""gt_seq_val""] = (\n val_results[""gt_batch""][""videos""][0].astype(jnp.float32)\n / 255.0\n )\n val_results[""recon_seq_val""] = val_results[""recon""][0].clip(\n 0, 1\n )\n val_results[""val_comparison_seq""] = jnp.concatenate(\n (val_results[""gt_seq_val""], val_results[""recon_seq_val""]),\n axis=1,\n )\n val_results[""val_comparison_seq""] = einops.rearrange(\n val_results[""val_comparison_seq""] * 255,\n ""t h w c -> h (t w) c"",\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if val_results and step % args.val_interval == 0:\n log_images.update(\n dict(\n val_image=wandb.Image(\n np.asarray(val_results[""gt_seq_val""][0])\n ),\n val_recon=wandb.Image(\n np.asarray(val_results[""recon_seq_val""][0])\n ),\n val_true_vs_recon=wandb.Image(\n np.asarray(\n val_results[""val_comparison_seq""].astype(\n np.uint8\n )\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break\n\n if checkpoint_manager:\n checkpoint_manager.close()\n\n\nif __name__ == ""__main__"":\n args = tyro.cli(Args)\n main(args)\n",python,tab +2236,4720780,"TERMINAL",0,0,"2466698888955555555555555661166",,terminal_output +2237,4722790,"TERMINAL",0,0,"468883110101010117777777777777788388",,terminal_output +2238,4724806,"TERMINAL",0,0,"6810405032222399999999999999505055050",,terminal_output +2239,4726850,"TERMINAL",0,0,"810222544445515121212121212121212121215122722",,terminal_output +2240,4728815,"TERMINAL",0,0,"5024447666673333333333333344944",,terminal_output +2241,4730830,"TERMINAL",0,0,"2466698888955555555555555662166",,terminal_output +2242,4732838,"TERMINAL",0,0,"468884120202020217777777777777788388",,terminal_output +2243,4734853,"TERMINAL",0,0,"6820502:00322223999999999999996:009:0051:001:00",,terminal_output +2244,4736733,"jasmine/train_tokenizer_full_precision.py",0,0,"import os\n\nos.environ.setdefault(""XLA_PYTHON_CLIENT_MEM_FRACTION"", ""0.98"")\n\nfrom dataclasses import dataclass, field\nfrom typing import cast, Optional\n\nimport einops\nimport itertools\nfrom jax.sharding import Mesh, PartitionSpec, NamedSharding\nfrom jax.experimental.mesh_utils import create_device_mesh\nimport optax\nimport orbax.checkpoint as ocp\nimport numpy as np\nimport dm_pix as pix\nimport jax\nimport jax.numpy as jnp\nimport tyro\nimport wandb\nimport grain\nimport flax.nnx as nnx\n\nfrom models.tokenizer import TokenizerVQVAE\nfrom utils.dataloader import get_dataloader\nfrom utils.train_utils import (\n get_lr_schedule,\n count_parameters_by_component,\n print_mem_stats,\n print_compiled_memory_stats,\n print_compiled_cost_analysis,\n)\n\n\n@dataclass\nclass Args:\n # Experiment\n num_steps: int = 300_000\n seed: int = 0\n seq_len: int = 16\n image_channels: int = 3\n image_height: int = 64\n image_width: int = 64\n data_dir: str = """"\n save_ckpt: bool = False\n restore_ckpt: bool = False\n # Optimization\n vq_beta: float = 0.25\n batch_size: int = 48\n init_lr: float = 0.0\n max_lr: float = 3e-4\n decay_end: float = 0.0\n wsd_decay_steps: int = (\n 30_000 # NOTE: wsd_decay_steps will only be used when using a wsd-schedule\n )\n lr_schedule: str = ""wsd"" # supported options: wsd, cos\n warmup_steps: int = 10000\n # Tokenizer\n model_dim: int = 512\n ffn_dim: int = 2048\n latent_dim: int = 32\n num_latents: int = 1024\n patch_size: int = 16\n num_blocks: int = 4\n num_heads: int = 8\n dropout: float = 0.0\n codebook_dropout: float = 0.01\n param_dtype = jnp.float32\n dtype = jnp.bfloat16\n use_flash_attention: bool = True\n # Logging\n log: bool = True\n entity: str = """"\n project: str = """"\n name: str = ""train_tokenizer""\n tags: list[str] = field(default_factory=lambda: [""tokenizer""])\n log_interval: int = 50\n log_image_interval: int = 1000\n ckpt_dir: str = """"\n log_checkpoint_interval: int = 1000\n log_checkpoint_keep_period: int = 20_000\n log_gradients: bool = False\n val_data_dir: str = """"\n val_interval: int = 20_000\n val_steps: int = 50\n wandb_id: str = """"\n\n\ndef build_model(args: Args, rng: jax.Array) -> tuple[TokenizerVQVAE, jax.Array]:\n rng, _rng = jax.random.split(rng)\n rngs = nnx.Rngs(_rng)\n return (\n TokenizerVQVAE(\n in_dim=args.image_channels,\n model_dim=args.model_dim,\n ffn_dim=args.ffn_dim,\n latent_dim=args.latent_dim,\n num_latents=args.num_latents,\n patch_size=args.patch_size,\n num_blocks=args.num_blocks,\n num_heads=args.num_heads,\n dropout=args.dropout,\n codebook_dropout=args.codebook_dropout,\n param_dtype=args.param_dtype,\n dtype=args.dtype,\n use_flash_attention=args.use_flash_attention,\n rngs=rngs,\n ),\n rng,\n )\n\n\ndef build_optimizer(model: TokenizerVQVAE, args: Args) -> nnx.ModelAndOptimizer:\n lr_schedule = get_lr_schedule(\n args.lr_schedule,\n args.init_lr,\n args.max_lr,\n args.decay_end,\n args.num_steps,\n args.warmup_steps,\n args.wsd_decay_steps,\n )\n tx = optax.adamw(\n learning_rate=lr_schedule,\n b1=0.9,\n b2=0.9,\n weight_decay=1e-4,\n mu_dtype=args.param_dtype, # moments in full precision\n )\n optimizer = nnx.ModelAndOptimizer(model, tx)\n return optimizer\n\n\ndef build_mesh_and_sharding(\n num_devices: int,\n) -> tuple[Mesh, NamedSharding, NamedSharding]:\n device_mesh_arr = create_device_mesh((num_devices,))\n mesh = Mesh(devices=device_mesh_arr, axis_names=(""data"",))\n replicated_sharding = NamedSharding(mesh, PartitionSpec())\n videos_sharding = NamedSharding(mesh, PartitionSpec(""data"", None, None, None, None))\n return mesh, replicated_sharding, videos_sharding\n\n\ndef shard_optimizer_states(\n optimizer: nnx.ModelAndOptimizer, replicated_sharding: NamedSharding\n) -> None:\n model_state = nnx.state(optimizer.model)\n model_sharded_state = jax.lax.with_sharding_constraint(\n model_state, replicated_sharding\n )\n nnx.update(optimizer.model, model_sharded_state)\n optimizer_state = nnx.state(optimizer, nnx.optimizer.OptState)\n optimizer_sharded_state = jax.lax.with_sharding_constraint(\n optimizer_state, replicated_sharding\n )\n nnx.update(optimizer, optimizer_sharded_state)\n\n\ndef build_dataloader(args: Args, data_dir: str) -> grain.DataLoaderIterator:\n image_shape = (args.image_height, args.image_width, args.image_channels)\n array_record_files = [\n os.path.join(data_dir, x)\n for x in os.listdir(data_dir)\n if x.endswith("".array_record"")\n ]\n grain_dataloader = get_dataloader(\n array_record_files,\n args.seq_len,\n # NOTE: We deliberately pass the global batch size\n # The dataloader shards the dataset across all processes\n args.batch_size,\n *image_shape,\n num_workers=8,\n prefetch_buffer_size=1,\n seed=args.seed,\n )\n initial_state = grain_dataloader._create_initial_state()\n grain_iterator = grain.DataLoaderIterator(grain_dataloader, initial_state)\n return grain_iterator\n\n\ndef build_checkpoint_manager(args: Args) -> Optional[ocp.CheckpointManager]:\n if args.restore_ckpt or args.save_ckpt:\n handler_registry = ocp.handlers.DefaultCheckpointHandlerRegistry()\n handler_registry.add(\n ""model_state"", ocp.args.PyTreeSave, ocp.handlers.PyTreeCheckpointHandler\n )\n handler_registry.add(\n ""model_state"", ocp.args.PyTreeRestore, ocp.handlers.PyTreeCheckpointHandler\n )\n handler_registry.add(\n ""train_dataloader_state"",\n grain.checkpoint.CheckpointSave,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n handler_registry.add(\n ""train_dataloader_state"",\n grain.checkpoint.CheckpointRestore,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n if args.val_data_dir:\n handler_registry.add(\n ""val_dataloader_state"",\n grain.checkpoint.CheckpointSave,\n cast(\n ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler\n ),\n )\n handler_registry.add(\n ""val_dataloader_state"",\n grain.checkpoint.CheckpointRestore,\n cast(\n ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler\n ),\n )\n checkpoint_options = ocp.CheckpointManagerOptions(\n save_interval_steps=args.log_checkpoint_interval,\n max_to_keep=3,\n keep_period=args.log_checkpoint_keep_period,\n step_format_fixed_length=6,\n cleanup_tmp_directories=True,\n )\n checkpoint_manager = ocp.CheckpointManager(\n args.ckpt_dir,\n options=checkpoint_options,\n handler_registry=handler_registry,\n )\n return checkpoint_manager\n else:\n return None\n\n\ndef restore_checkpoint_if_needed(\n args: Args,\n checkpoint_manager: Optional[ocp.CheckpointManager],\n optimizer: nnx.ModelAndOptimizer,\n train_iterator: grain.DataLoaderIterator,\n val_iterator: Optional[grain.DataLoaderIterator],\n restore_step: Optional[int] = None,\n) -> tuple[\n int, nnx.ModelAndOptimizer, grain.DataLoaderIterator, grain.DataLoaderIterator\n]:\n step = 0\n if checkpoint_manager and restore_step is None:\n restore_step = checkpoint_manager.latest_step()\n if args.restore_ckpt:\n assert checkpoint_manager is not None\n abstract_optimizer = nnx.eval_shape(lambda: optimizer)\n abstract_optimizer_state = nnx.state(abstract_optimizer)\n if val_iterator:\n restore_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore(abstract_optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointRestore(train_iterator), # type: ignore\n val_dataloader_state=grain.checkpoint.CheckpointRestore(val_iterator), # type: ignore\n )\n else:\n restore_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore(abstract_optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointRestore(train_iterator), # type: ignore\n )\n restored = checkpoint_manager.restore(restore_step, args=restore_args)\n restored_optimizer_state = restored[""model_state""]\n nnx.update(optimizer, restored_optimizer_state)\n train_iterator = restored[""train_dataloader_state""]\n if val_iterator:\n val_iterator = restored[""val_dataloader_state""]\n step = restore_step or 0\n print(f""Restored dataloader and model state from step {step}"")\n return step, optimizer, train_iterator, val_iterator\n\n\ndef main(args: Args) -> None:\n jax.distributed.initialize()\n num_devices = jax.device_count()\n if num_devices == 0:\n raise ValueError(""No JAX devices found."")\n print(f""Running on {num_devices} devices."")\n\n if args.batch_size % num_devices != 0:\n raise ValueError(\n f""Global batch size {args.batch_size} must be divisible by ""\n f""number of devices {num_devices}.""\n )\n\n rng = jax.random.key(args.seed)\n\n # --- Initialize model ---\n tokenizer, rng = build_model(args, rng)\n\n _, params, _ = nnx.split(tokenizer, nnx.Param, ...)\n param_counts = count_parameters_by_component(params)\n\n if args.log and jax.process_index() == 0:\n wandb_init_kwargs = {\n ""entity"": args.entity,\n ""project"": args.project,\n ""name"": args.name,\n ""tags"": args.tags,\n ""group"": ""debug"",\n ""config"": args,\n }\n\n if args.wandb_id:\n wandb_init_kwargs.update(\n {\n ""id"": args.wandb_id,\n ""resume"": ""allow"",\n }\n )\n wandb.init(**wandb_init_kwargs)\n\n wandb.config.update({""model_param_count"": param_counts})\n\n print(""Parameter counts:"")\n print(param_counts)\n\n # --- Initialize optimizer ---\n optimizer = build_optimizer(tokenizer, args)\n del tokenizer\n\n # FIXME: switch to create_hybrid_device_mesh for runs spanning multiple nodes\n _, replicated_sharding, videos_sharding = build_mesh_and_sharding(num_devices)\n\n shard_optimizer_states(optimizer, replicated_sharding)\n\n # --- Initialize checkpoint manager ---\n checkpoint_manager = build_checkpoint_manager(args)\n\n # --- Create DataLoaderIterator from dataloader ---\n train_iterator = build_dataloader(args, args.data_dir)\n val_iterator = None\n if args.val_data_dir:\n val_iterator = build_dataloader(args, args.val_data_dir)\n\n # --- Restore checkpoint ---\n step, optimizer, train_iterator, val_iterator = restore_checkpoint_if_needed(\n args, checkpoint_manager, optimizer, train_iterator, val_iterator\n )\n\n # --- Define loss and train step (close over args) ---\n def tokenizer_loss_fn(\n model: TokenizerVQVAE, inputs: dict, training: bool = False\n ) -> tuple[jax.Array, tuple[jax.Array, dict]]:\n gt = jnp.asarray(inputs[""videos""], dtype=jnp.float32) / 255.0\n inputs[""videos""] = gt.astype(args.dtype)\n outputs = model(inputs, training=training)\n outputs[""recon""] = outputs[""recon""].astype(jnp.float32)\n mse = jnp.square(gt - outputs[""recon""]).mean()\n q_loss = jnp.square(jax.lax.stop_gradient(outputs[""emb""]) - outputs[""z""]).mean()\n commitment_loss = jnp.square(\n outputs[""emb""] - jax.lax.stop_gradient(outputs[""z""])\n ).mean()\n loss = mse + q_loss + args.vq_beta * commitment_loss\n\n gt_clipped = gt.clip(0, 1).reshape(-1, *gt.shape[2:])\n recon = outputs[""recon""].clip(0, 1).reshape(-1, *outputs[""recon""].shape[2:])\n psnr = jnp.asarray(pix.psnr(gt_clipped, recon)).mean()\n ssim = jnp.asarray(pix.ssim(gt_clipped, recon)).mean()\n _, index_counts = jnp.unique_counts(\n jnp.ravel(outputs[""indices""]), size=args.num_latents, fill_value=0\n )\n codebook_usage = (index_counts != 0).mean()\n metrics = dict(\n loss=loss,\n mse=mse,\n q_loss=q_loss,\n commitment_loss=commitment_loss,\n psnr=psnr,\n ssim=ssim,\n codebook_usage=codebook_usage,\n )\n return loss, (outputs[""recon""], metrics)\n\n @nnx.jit(donate_argnums=0)\n def train_step(\n optimizer: nnx.ModelAndOptimizer, inputs: dict\n ) -> tuple[jax.Array, jax.Array, dict]:\n def loss_fn(model: TokenizerVQVAE) -> tuple[jax.Array, tuple[jax.Array, dict]]:\n model.train()\n return tokenizer_loss_fn(model, inputs, training=True)\n\n (loss, (recon, metrics)), grads = nnx.value_and_grad(loss_fn, has_aux=True)(\n optimizer.model\n )\n optimizer.update(grads)\n if args.log_gradients:\n metrics[""encoder_gradients_std/""] = jax.tree.map(\n lambda x: x.std(), grads[""params""][""encoder""]\n )\n metrics[""vq_gradients_std/""] = jax.tree.map(\n lambda x: x.std(), grads[""params""][""vq""]\n )\n metrics[""decoder_gradients_std/""] = jax.tree.map(\n lambda x: x.std(), grads[""params""][""decoder""]\n )\n return loss, recon, metrics\n\n @nnx.jit\n def val_step(\n tokenizer: TokenizerVQVAE, inputs: dict\n ) -> tuple[jax.Array, jax.Array, dict]:\n tokenizer.eval()\n (loss, (recon, metrics)) = tokenizer_loss_fn(tokenizer, inputs, training=False)\n return loss, recon, metrics\n\n def calculate_validation_metrics(val_dataloader, tokenizer):\n step = 0\n loss_per_step = []\n metrics_per_step = []\n batch = None\n recon = None\n for batch in val_dataloader:\n loss, recon, metrics = val_step(tokenizer, batch)\n loss_per_step.append(loss)\n metrics_per_step.append(metrics)\n step += 1\n if step > args.val_steps:\n break\n\n if step < args.val_steps:\n print(\n f""Warning: Your validation dataset is too small to make val_steps many steps. Made {step} steps, expected {args.val_steps}""\n )\n\n val_loss = np.mean(loss_per_step)\n val_metrics = {\n f""val_{key}"": np.mean([float(m[key]) for m in metrics_per_step])\n for key in metrics_per_step[0].keys()\n }\n val_metrics[""val_loss""] = val_loss\n return val_metrics, batch, recon\n\n # --- TRAIN LOOP ---\n dataloader_train = (\n {\n ""videos"": jax.make_array_from_process_local_data(\n videos_sharding, elem[""videos""]\n ),\n }\n for elem in train_iterator\n )\n dataloader_val = None\n if val_iterator:\n dataloader_val = (\n {\n ""videos"": jax.make_array_from_process_local_data(\n videos_sharding, elem[""videos""]\n ),\n }\n for elem in val_iterator\n )\n if jax.process_index() == 0:\n first_batch = next(dataloader_train)\n compiled = train_step.lower(optimizer, first_batch).compile()\n print_compiled_memory_stats(compiled.memory_analysis())\n print_compiled_cost_analysis(compiled.cost_analysis())\n # Do not skip the first batch during training\n dataloader_train = itertools.chain([first_batch], dataloader_train)\n print(f""Starting training from step {step}..."")\n first_step = step\n while step < args.num_steps:\n for batch in dataloader_train:\n # --- Train step ---\n loss, recon, metrics = train_step(optimizer, batch)\n if step == first_step:\n print_mem_stats(""After params initialized"")\n step += 1\n\n # --- Validation loss ---\n val_results = {}\n if dataloader_val and step % args.val_interval == 0:\n print(""Calculating validation metrics..."")\n val_metrics, val_gt_batch, val_recon = calculate_validation_metrics(\n dataloader_val, optimizer.model\n )\n print(f""Step {step}, validation loss: {val_metrics['val_loss']}"")\n val_results = {\n ""metrics"": val_metrics,\n ""gt_batch"": val_gt_batch,\n ""recon"": val_recon,\n }\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {""loss"": loss, ""step"": step, **metrics}\n if val_results:\n log_dict.update(val_results[""metrics""])\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if val_results and step % args.val_interval == 0:\n val_results[""gt_seq_val""] = (\n val_results[""gt_batch""][""videos""][0].astype(jnp.float32)\n / 255.0\n )\n val_results[""recon_seq_val""] = val_results[""recon""][0].clip(\n 0, 1\n )\n val_results[""val_comparison_seq""] = jnp.concatenate(\n (val_results[""gt_seq_val""], val_results[""recon_seq_val""]),\n axis=1,\n )\n val_results[""val_comparison_seq""] = einops.rearrange(\n val_results[""val_comparison_seq""] * 255,\n ""t h w c -> h (t w) c"",\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if val_results and step % args.val_interval == 0:\n log_images.update(\n dict(\n val_image=wandb.Image(\n np.asarray(val_results[""gt_seq_val""][0])\n ),\n val_recon=wandb.Image(\n np.asarray(val_results[""recon_seq_val""][0])\n ),\n val_true_vs_recon=wandb.Image(\n np.asarray(\n val_results[""val_comparison_seq""].astype(\n np.uint8\n )\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break\n\n if checkpoint_manager:\n checkpoint_manager.close()\n\n\nif __name__ == ""__main__"":\n args = tyro.cli(Args)\n main(args)\n",python,tab +2245,4736848,"TERMINAL",0,0,"8202225444451:014:0131313131313131313131312:0122722",,terminal_output +2246,4738859,"TERMINAL",0,0,"40:0024447666673333333333333344944",,terminal_output +2247,4740889,"jasmine/train_tokenizer_full_precision.py",2155,0,"",python,selection_mouse +2248,4740890,"TERMINAL",0,0,"2466698888955555555555555663166",,terminal_output +2249,4742896,"TERMINAL",0,0,"468885130303030317777777777777788388",,terminal_output +2250,4744887,"TERMINAL",0,0,"68306:001032222399999999999999101051010",,terminal_output +2251,4746688,"jasmine/train_tokenizer_full_precision.py",1682,0,"",python,selection_mouse +2252,4746898,"TERMINAL",0,0,"830222544445111141414141414141414141411122722",,terminal_output +2253,4748897,"jasmine/train_tokenizer_full_precision.py",1675,0,"",python,selection_command +2254,4748905,"TERMINAL",0,0,"1024447666673333333333333344944",,terminal_output +2255,4749944,"jasmine/train_tokenizer_full_precision.py",1675,8,"",python,content +2256,4750605,"jasmine/train_tokenizer_full_precision.py",1675,0,"f",python,content +2257,4750605,"jasmine/train_tokenizer_full_precision.py",1676,0,"",python,selection_keyboard +2258,4750681,"jasmine/train_tokenizer_full_precision.py",1676,0,"l",python,content +2259,4750681,"jasmine/train_tokenizer_full_precision.py",1677,0,"",python,selection_keyboard +2260,4750769,"jasmine/train_tokenizer_full_precision.py",1677,0,"o",python,content +2261,4750770,"jasmine/train_tokenizer_full_precision.py",1678,0,"",python,selection_keyboard +2262,4750915,"TERMINAL",0,0,"2466698888955555555555555664166",,terminal_output +2263,4751001,"jasmine/train_tokenizer_full_precision.py",1678,0,"a",python,content +2264,4751001,"jasmine/train_tokenizer_full_precision.py",1679,0,"",python,selection_keyboard +2265,4751014,"jasmine/train_tokenizer_full_precision.py",1679,0,"t",python,content +2266,4751014,"jasmine/train_tokenizer_full_precision.py",1680,0,"",python,selection_keyboard +2267,4751735,"jasmine/train_tokenizer_full_precision.py",1680,0,"3",python,content +2268,4751735,"jasmine/train_tokenizer_full_precision.py",1681,0,"",python,selection_keyboard +2269,4751786,"jasmine/train_tokenizer_full_precision.py",1681,0,"2",python,content +2270,4751786,"jasmine/train_tokenizer_full_precision.py",1682,0,"",python,selection_keyboard +2271,4752152,"jasmine/train_tokenizer_full_precision.py",1681,0,"",python,selection_command +2272,4752925,"TERMINAL",0,0,"468887:0140404040417777777777777788388",,terminal_output +2273,4753776,"jasmine/train_tokenizer_full_precision.py",1705,0,"",python,selection_command +2274,4753886,"jasmine/train_tokenizer_full_precision.py",1681,0,"",python,selection_command +2275,4754936,"TERMINAL",0,0,"6840102032222399999999999999202052020",,terminal_output +2276,4756947,"TERMINAL",0,0,"840222544445212151515151515151515151512122722",,terminal_output +2277,4758956,"TERMINAL",0,0,"2024447666673333333333333344944",,terminal_output +2278,4760989,"TERMINAL",0,0,"2466698888955555555555555665166",,terminal_output +2279,4762969,"TERMINAL",0,0,"468881150505050517777777777777788388",,terminal_output +2280,4764980,"TERMINAL",0,0,"6850203032222399999999999999303053030",,terminal_output +2281,4766986,"TERMINAL",0,0,"85022254444531316:017:018:012:015:015:015:015:015:013:012:013122722",,terminal_output +2282,4768996,"TERMINAL",0,0,"3024447666673333333333333344944",,terminal_output +2283,4771006,"TERMINAL",0,0,"2466698888955555555555555662:0166",,terminal_output +2284,4773018,"TERMINAL",0,0,"46888212:002:002:002:002:017777777777777788388",,terminal_output +2285,4775024,"TERMINAL",0,0,"685:00304032222399999999999999404054040",,terminal_output +2286,4777037,"TERMINAL",0,0,"87:00222544445414111111111111111111111114122722",,terminal_output +2287,4779045,"TERMINAL",0,0,"4024447666673333333333333344944",,terminal_output +2288,4781054,"TERMINAL",0,0,"2466698888955555555555555661166",,terminal_output +2289,4783061,"TERMINAL",0,0,"468883110101010117777777777777788388",,terminal_output +2290,4785068,"TERMINAL",0,0,"6810405032222399999999999999505055050",,terminal_output +2291,4787080,"TERMINAL",0,0,"810222544445515121212121212121212121215122722",,terminal_output +2292,4789085,"TERMINAL",0,0,"5024447666673333333333333344944",,terminal_output +2293,4791096,"TERMINAL",0,0,"2466698888955555555555555662166",,terminal_output +2294,4793102,"TERMINAL",0,0,"468884120202020217777777777777788388",,terminal_output +2295,4795117,"TERMINAL",0,0,"6820503:00322223999999999999997:0050:0052:002:00",,terminal_output +2296,4797121,"TERMINAL",0,0,"8202225444452:015:0131313131313131313131313:0122722",,terminal_output +2297,4799133,"TERMINAL",0,0,"1:0024447666673333333333333344944",,terminal_output +2298,4801191,"TERMINAL",0,0,"2466698888955555555555555663166",,terminal_output +2299,4803151,"TERMINAL",0,0,"468885130303030317777777777777788388",,terminal_output +2300,4805165,"TERMINAL",0,0,"68307:001032222399999999999999101051010",,terminal_output +2301,4807196,"TERMINAL",0,0,"830222544445111141414141414141414141411122722",,terminal_output +2302,4809180,"TERMINAL",0,0,"1024447666673333333333333344944",,terminal_output +2303,4811187,"TERMINAL",0,0,"2466698888955555555555555664166",,terminal_output +2304,4813194,"TERMINAL",0,0,"468888:0140404040417777777777777788388",,terminal_output +2305,4815224,"TERMINAL",0,0,"6840102032222399999999999999202052020",,terminal_output +2306,4817269,"TERMINAL",0,0,"840222544445212151515151515151515151512122722",,terminal_output +2307,4819225,"TERMINAL",0,0,"2024447666673333333333333344944",,terminal_output +2308,4821263,"TERMINAL",0,0,"2466698888955555555555555665166",,terminal_output +2309,4823241,"TERMINAL",0,0,"468881150505050517777777777777788388",,terminal_output +2310,4823857,"slurm/dev/franz/berlin/atari/dynamics.sh",0,0,"",shellscript,tab +2311,4825252,"TERMINAL",0,0,"6850203032222399999999999999303053030",,terminal_output +2312,4826336,"slurm/dev/franz/berlin/atari/spawn_tokenizers.sh",0,0,"",shellscript,tab +2313,4827261,"TERMINAL",0,0,"85022254444531317:018:019:013:016:016:016:016:016:014:013:013122722",,terminal_output +2314,4827312,"slurm/dev/franz/berlin/atari/spawn_tokenizers.sh",945,0,"",shellscript,selection_mouse +2315,4827313,"slurm/dev/franz/berlin/atari/spawn_tokenizers.sh",944,0,"",shellscript,selection_command +2316,4829266,"TERMINAL",0,0,"3024447666673333333333333344944",,terminal_output +2317,4831277,"TERMINAL",0,0,"2466698888955555555555555663:0166",,terminal_output +2318,4833286,"TERMINAL",0,0,"46888213:003:003:003:003:017777777777777788388",,terminal_output +2319,4835294,"TERMINAL",0,0,"686:00304032222399999999999999404054040",,terminal_output +2320,4837304,"TERMINAL",0,0,"88:00222544445414111111111111111111111114122722",,terminal_output +2321,4839311,"TERMINAL",0,0,"4024447666673333333333333344944",,terminal_output +2322,4841334,"TERMINAL",0,0,"2466698888955555555555555661166",,terminal_output +2323,4843329,"TERMINAL",0,0,"468883110101010117777777777777788388",,terminal_output +2324,4845337,"TERMINAL",0,0,"6810405032222399999999999999505055050",,terminal_output +2325,4847349,"TERMINAL",0,0,"810222544445515121212121212121212121215122722",,terminal_output +2326,4849363,"TERMINAL",0,0,"5024447666673333333333333344944",,terminal_output +2327,4851375,"TERMINAL",0,0,"2466698888955555555555555662166",,terminal_output +2328,4853378,"TERMINAL",0,0,"468884120202020217777777777777788388",,terminal_output +2329,4855390,"TERMINAL",0,0,"6820504:00322223999999999999998:001:0053:003:00",,terminal_output +2330,4857401,"TERMINAL",0,0,"8202225444453:016:0131313131313131313131314:0122722",,terminal_output +2331,4859408,"TERMINAL",0,0,"2:0024447666673333333333333344944",,terminal_output +2332,4861418,"TERMINAL",0,0,"2466698888955555555555555663166",,terminal_output +2333,4863432,"TERMINAL",0,0,"468885130303030317777777777777788388",,terminal_output +2334,4865436,"TERMINAL",0,0,"68308:001032222399999999999999101051010",,terminal_output +2335,4867443,"TERMINAL",0,0,"830222544445111141414141414141414141411122722",,terminal_output +2336,4869456,"TERMINAL",0,0,"1024447666673333333333333344944",,terminal_output +2337,4871461,"TERMINAL",0,0,"2466698888955555555555555664166",,terminal_output +2338,4873473,"TERMINAL",0,0,"468889:0140404040417777777777777788388",,terminal_output +2339,4875492,"TERMINAL",0,0,"6840102032222399999999999999202052020",,terminal_output +2340,4877584,"TERMINAL",0,0,"840222544445212151515151515151515151512122722",,terminal_output +2341,4879501,"TERMINAL",0,0,"2024447666673333333333333344944",,terminal_output +2342,4881522,"TERMINAL",0,0,"2466698888955555555555555665166",,terminal_output +2343,4882803,"slurm/dev/franz/berlin/atari/spawn_tokenizers.sh",0,0,"",shellscript,selection_keyboard +2344,4883525,"TERMINAL",0,0,"468881150505050517777777777777788388",,terminal_output +2345,4883582,"slurm/dev/franz/berlin/atari/spawn_tokenizers.sh",20,0,"",shellscript,selection_command +2346,4883740,"slurm/dev/franz/berlin/atari/spawn_tokenizers.sh",21,0,"",shellscript,selection_command +2347,4883880,"slurm/dev/franz/berlin/atari/spawn_tokenizers.sh",39,0,"",shellscript,selection_command +2348,4884195,"slurm/dev/franz/berlin/atari/spawn_tokenizers.sh",1148,0,"",shellscript,selection_keyboard +2349,4884543,"slurm/dev/franz/berlin/atari/spawn_tokenizers.sh",0,0,"",shellscript,selection_keyboard +2350,4885247,"slurm/dev/franz/berlin/atari/spawn_tokenizers.sh",20,0,"",shellscript,selection_command +2351,4885495,"slurm/dev/franz/berlin/atari/spawn_tokenizers.sh",21,0,"",shellscript,selection_command +2352,4885522,"slurm/dev/franz/berlin/atari/spawn_tokenizers.sh",39,0,"",shellscript,selection_command +2353,4885529,"TERMINAL",0,0,"6850203032222399999999999999303053030",,terminal_output +2354,4885552,"slurm/dev/franz/berlin/atari/spawn_tokenizers.sh",40,0,"",shellscript,selection_command +2355,4885579,"slurm/dev/franz/berlin/atari/spawn_tokenizers.sh",80,0,"",shellscript,selection_command +2356,4885616,"slurm/dev/franz/berlin/atari/spawn_tokenizers.sh",81,0,"",shellscript,selection_command +2357,4885668,"slurm/dev/franz/berlin/atari/spawn_tokenizers.sh",158,0,"",shellscript,selection_command +2358,4885683,"slurm/dev/franz/berlin/atari/spawn_tokenizers.sh",159,0,"",shellscript,selection_command +2359,4885717,"slurm/dev/franz/berlin/atari/spawn_tokenizers.sh",252,0,"",shellscript,selection_command +2360,4885770,"slurm/dev/franz/berlin/atari/spawn_tokenizers.sh",253,0,"",shellscript,selection_command +2361,4885787,"slurm/dev/franz/berlin/atari/spawn_tokenizers.sh",277,0,"",shellscript,selection_command +2362,4885818,"slurm/dev/franz/berlin/atari/spawn_tokenizers.sh",297,0,"",shellscript,selection_command +2363,4885853,"slurm/dev/franz/berlin/atari/spawn_tokenizers.sh",302,0,"",shellscript,selection_command +2364,4885892,"slurm/dev/franz/berlin/atari/spawn_tokenizers.sh",337,0,"",shellscript,selection_command +2365,4885923,"slurm/dev/franz/berlin/atari/spawn_tokenizers.sh",393,0,"",shellscript,selection_command +2366,4885951,"slurm/dev/franz/berlin/atari/spawn_tokenizers.sh",408,0,"",shellscript,selection_command +2367,4885985,"slurm/dev/franz/berlin/atari/spawn_tokenizers.sh",415,0,"",shellscript,selection_command +2368,4886016,"slurm/dev/franz/berlin/atari/spawn_tokenizers.sh",473,0,"",shellscript,selection_command +2369,4886049,"slurm/dev/franz/berlin/atari/spawn_tokenizers.sh",574,0,"",shellscript,selection_command +2370,4886089,"slurm/dev/franz/berlin/atari/spawn_tokenizers.sh",577,0,"",shellscript,selection_command +2371,4886112,"slurm/dev/franz/berlin/atari/spawn_tokenizers.sh",578,0,"",shellscript,selection_command +2372,4886148,"slurm/dev/franz/berlin/atari/spawn_tokenizers.sh",644,0,"",shellscript,selection_command +2373,4886178,"slurm/dev/franz/berlin/atari/spawn_tokenizers.sh",645,0,"",shellscript,selection_command +2374,4887078,"slurm/dev/franz/berlin/atari/spawn_tokenizers.sh",677,0,"",shellscript,selection_command +2375,4887323,"slurm/dev/franz/berlin/atari/spawn_tokenizers.sh",707,0,"",shellscript,selection_command +2376,4887348,"slurm/dev/franz/berlin/atari/spawn_tokenizers.sh",773,0,"",shellscript,selection_command +2377,4887376,"slurm/dev/franz/berlin/atari/spawn_tokenizers.sh",837,0,"",shellscript,selection_command +2378,4887409,"slurm/dev/franz/berlin/atari/spawn_tokenizers.sh",854,0,"",shellscript,selection_command +2379,4887447,"slurm/dev/franz/berlin/atari/spawn_tokenizers.sh",861,0,"",shellscript,selection_command +2380,4887479,"slurm/dev/franz/berlin/atari/spawn_tokenizers.sh",862,0,"",shellscript,selection_command +2381,4887537,"TERMINAL",0,0,"85022254444531318:019:011:00:014:017:017:017:017:017:015:014:013122722",,terminal_output +2382,4889004,"slurm/dev/franz/berlin/atari/spawn_tokenizers.sh",911,0,"",shellscript,selection_command +2383,4889547,"slurm/dev/franz/berlin/atari/spawn_tokenizers.sh",910,0,"",shellscript,selection_command +2384,4889547,"TERMINAL",0,0,"3024447666673333333333333344944",,terminal_output +2385,4890421,"slurm/dev/franz/berlin/atari/spawn_tokenizers.sh",909,1,"",shellscript,content +2386,4890726,"slurm/dev/franz/berlin/atari/spawn_tokenizers.sh",909,0,"4",shellscript,content +2387,4890726,"slurm/dev/franz/berlin/atari/spawn_tokenizers.sh",910,0,"",shellscript,selection_keyboard +2388,4890956,"slurm/dev/franz/berlin/atari/spawn_tokenizers.sh",909,0,"",shellscript,selection_command +2389,4891561,"TERMINAL",0,0,"2466698888955555555555555664:0166",,terminal_output +2390,4893570,"TERMINAL",0,0,"46888214:004:004:004:004:017777777777777788388",,terminal_output +2391,4894592,"slurm/dev/franz/berlin/atari/spawn_tokenizers.sh",908,0,"",shellscript,selection_command +2392,4894804,"slurm/dev/franz/berlin/atari/spawn_tokenizers.sh",909,0,"",shellscript,selection_command +2393,4895119,"slurm/dev/franz/berlin/atari/spawn_tokenizers.sh",910,0,"",shellscript,selection_command +2394,4895346,"slurm/dev/franz/berlin/atari/spawn_tokenizers.sh",909,0,"",shellscript,selection_command +2395,4895580,"TERMINAL",0,0,"687:00304032222399999999999999404054040",,terminal_output +2396,4895819,"slurm/dev/franz/berlin/atari/spawn_tokenizers.sh",910,0,"",shellscript,selection_command +2397,4896148,"slurm/dev/franz/berlin/atari/spawn_tokenizers.sh",910,0,"-",shellscript,content +2398,4896148,"slurm/dev/franz/berlin/atari/spawn_tokenizers.sh",911,0,"",shellscript,selection_keyboard +2399,4896802,"slurm/dev/franz/berlin/atari/spawn_tokenizers.sh",910,1,"",shellscript,content +2400,4897162,"slurm/dev/franz/berlin/atari/spawn_tokenizers.sh",910,0,"_",shellscript,content +2401,4897162,"slurm/dev/franz/berlin/atari/spawn_tokenizers.sh",911,0,"",shellscript,selection_keyboard +2402,4897598,"TERMINAL",0,0,"89:00333655556424212121212121212121212124233833",,terminal_output +2403,4898433,"slurm/dev/franz/berlin/atari/spawn_tokenizers.sh",911,0,"full_precision",shellscript,content +2404,4898643,"slurm/dev/franz/berlin/atari/spawn_tokenizers.sh",924,0,"",shellscript,selection_command +2405,4899593,"TERMINAL",0,0,"41355587777844444444444444551055",,terminal_output +2406,4901600,"TERMINAL",0,0,"35777309999106666666666666677277",,terminal_output +2407,4903609,"TERMINAL",0,0,"5799921111111128888888888888899499",,terminal_output +2408,4905303,"slurm/dev/franz/berlin/atari/spawn_tokenizers.sh",0,0,"",shellscript,selection_keyboard +2409,4905625,"TERMINAL",0,0,"791141514333345050202020202020202020202050515165151",,terminal_output +2410,4906663,"slurm/dev/franz/berlin/atari/spawn_tokenizers.sh",1163,0,"",shellscript,selection_keyboard +2411,4907177,"slurm/dev/franz/berlin/atari/spawn_tokenizers.sh",1162,0,"",shellscript,selection_command +2412,4907425,"slurm/dev/franz/berlin/atari/spawn_tokenizers.sh",1161,0,"",shellscript,selection_command +2413,4907449,"slurm/dev/franz/berlin/atari/spawn_tokenizers.sh",1156,0,"",shellscript,selection_command +2414,4907474,"slurm/dev/franz/berlin/atari/spawn_tokenizers.sh",1094,0,"",shellscript,selection_command +2415,4907512,"slurm/dev/franz/berlin/atari/spawn_tokenizers.sh",1093,0,"",shellscript,selection_command +2416,4907550,"slurm/dev/franz/berlin/atari/spawn_tokenizers.sh",1064,0,"",shellscript,selection_command +2417,4907587,"slurm/dev/franz/berlin/atari/spawn_tokenizers.sh",1025,0,"",shellscript,selection_command +2418,4907613,"slurm/dev/franz/berlin/atari/spawn_tokenizers.sh",994,0,"",shellscript,selection_command +2419,4907633,"TERMINAL",0,0,"9113336555562222222222222233833",,terminal_output +2420,4907643,"slurm/dev/franz/berlin/atari/spawn_tokenizers.sh",961,0,"",shellscript,selection_command +2421,4907778,"slurm/dev/franz/berlin/atari/spawn_tokenizers.sh",928,0,"",shellscript,selection_command +2422,4908070,"slurm/dev/franz/berlin/atari/spawn_tokenizers.sh",927,0,"",shellscript,selection_command +2423,4908585,"slurm/dev/franz/berlin/atari/spawn_tokenizers.sh",0,0,"",shellscript,selection_keyboard +2424,4908834,"slurm/dev/franz/berlin/atari/spawn_tokenizers.sh",20,0,"",shellscript,selection_command +2425,4909087,"slurm/dev/franz/berlin/atari/spawn_tokenizers.sh",21,0,"",shellscript,selection_command +2426,4909126,"slurm/dev/franz/berlin/atari/spawn_tokenizers.sh",39,0,"",shellscript,selection_command +2427,4909149,"slurm/dev/franz/berlin/atari/spawn_tokenizers.sh",40,0,"",shellscript,selection_command +2428,4909176,"slurm/dev/franz/berlin/atari/spawn_tokenizers.sh",80,0,"",shellscript,selection_command +2429,4909210,"slurm/dev/franz/berlin/atari/spawn_tokenizers.sh",81,0,"",shellscript,selection_command +2430,4909318,"slurm/dev/franz/berlin/atari/spawn_tokenizers.sh",158,0,"",shellscript,selection_command +2431,4909510,"slurm/dev/franz/berlin/atari/spawn_tokenizers.sh",159,0,"",shellscript,selection_command +2432,4909637,"TERMINAL",0,0,"51355587777844444444444444552055",,terminal_output +2433,4909818,"slurm/dev/franz/berlin/atari/spawn_tokenizers.sh",252,0,"",shellscript,selection_command +2434,4910063,"slurm/dev/franz/berlin/atari/spawn_tokenizers.sh",253,0,"",shellscript,selection_command +2435,4910086,"slurm/dev/franz/berlin/atari/spawn_tokenizers.sh",277,0,"",shellscript,selection_command +2436,4910119,"slurm/dev/franz/berlin/atari/spawn_tokenizers.sh",297,0,"",shellscript,selection_command +2437,4910158,"slurm/dev/franz/berlin/atari/spawn_tokenizers.sh",302,0,"",shellscript,selection_command +2438,4910185,"slurm/dev/franz/berlin/atari/spawn_tokenizers.sh",337,0,"",shellscript,selection_command +2439,4910216,"slurm/dev/franz/berlin/atari/spawn_tokenizers.sh",393,0,"",shellscript,selection_command +2440,4910249,"slurm/dev/franz/berlin/atari/spawn_tokenizers.sh",408,0,"",shellscript,selection_command +2441,4910282,"slurm/dev/franz/berlin/atari/spawn_tokenizers.sh",415,0,"",shellscript,selection_command +2442,4910317,"slurm/dev/franz/berlin/atari/spawn_tokenizers.sh",473,0,"",shellscript,selection_command +2443,4910349,"slurm/dev/franz/berlin/atari/spawn_tokenizers.sh",574,0,"",shellscript,selection_command +2444,4910384,"slurm/dev/franz/berlin/atari/spawn_tokenizers.sh",577,0,"",shellscript,selection_command +2445,4910418,"slurm/dev/franz/berlin/atari/spawn_tokenizers.sh",578,0,"",shellscript,selection_command +2446,4910451,"slurm/dev/franz/berlin/atari/spawn_tokenizers.sh",644,0,"",shellscript,selection_command +2447,4910486,"slurm/dev/franz/berlin/atari/spawn_tokenizers.sh",645,0,"",shellscript,selection_command +2448,4910518,"slurm/dev/franz/berlin/atari/spawn_tokenizers.sh",677,0,"",shellscript,selection_command +2449,4910551,"slurm/dev/franz/berlin/atari/spawn_tokenizers.sh",707,0,"",shellscript,selection_command +2450,4910585,"slurm/dev/franz/berlin/atari/spawn_tokenizers.sh",773,0,"",shellscript,selection_command +2451,4910618,"slurm/dev/franz/berlin/atari/spawn_tokenizers.sh",837,0,"",shellscript,selection_command +2452,4910653,"slurm/dev/franz/berlin/atari/spawn_tokenizers.sh",854,0,"",shellscript,selection_command +2453,4910685,"slurm/dev/franz/berlin/atari/spawn_tokenizers.sh",861,0,"",shellscript,selection_command +2454,4910718,"slurm/dev/franz/berlin/atari/spawn_tokenizers.sh",862,0,"",shellscript,selection_command +2455,4910885,"slurm/dev/franz/berlin/atari/spawn_tokenizers.sh",927,0,"",shellscript,selection_command +2456,4911097,"slurm/dev/franz/berlin/atari/spawn_tokenizers.sh",862,0,"",shellscript,selection_command +2457,4911236,"slurm/dev/franz/berlin/atari/spawn_tokenizers.sh",861,0,"",shellscript,selection_command +2458,4911645,"TERMINAL",0,0,"35777409999206666666666666677277",,terminal_output +2459,4913665,"TERMINAL",0,0,"5799922121212128888888888888899499",,terminal_output +2460,4915666,"TERMINAL",0,0,"7921515:014333344:007:0030303030303030303030305:009:012:0164:014:01",,terminal_output +2461,4916077,"slurm/dev/franz/berlin/atari/tokenizer.sh",0,0,"",shellscript,tab +2462,4917275,"slurm/dev/franz/berlin/atari/tokenizer.sh",1884,0,"",shellscript,selection_mouse +2463,4917275,"slurm/dev/franz/berlin/atari/tokenizer.sh",1883,0,"",shellscript,selection_command +2464,4917675,"TERMINAL",0,0,"9213336555562222222222222233833",,terminal_output +2465,4917828,"slurm/dev/franz/berlin/atari/tokenizer.sh",1885,0,"",shellscript,selection_mouse +2466,4919683,"TERMINAL",0,0,"3:01355587777844444444444444553055",,terminal_output +2467,4921696,"TERMINAL",0,0,"35777509999306666666666666677277",,terminal_output +2468,4923717,"TERMINAL",0,0,"5799923131313128888888888888899499",,terminal_output +2469,4925710,"TERMINAL",0,0,"79319:01114333341010404040404040404040404010111161111",,terminal_output +2470,4927725,"TERMINAL",0,0,"9313336555562222222222222233833",,terminal_output +2471,4929742,"TERMINAL",0,0,"11355587777844444444444444554055",,terminal_output +2472,4931741,"TERMINAL",0,0,"3577720:009999406666666666666677277",,terminal_output +2473,4933747,"TERMINAL",0,0,"5799924141414128888888888888899499",,terminal_output +2474,4935758,"TERMINAL",0,0,"794111214333342020505050505050505050505020212162121",,terminal_output +2475,4937775,"TERMINAL",0,0,"9413336555562222222222222233833",,terminal_output +2476,4939772,"TERMINAL",0,0,"21355587777844444444444444555055",,terminal_output +2477,4941889,"TERMINAL",0,0,"35777109999506666666666666677277",,terminal_output +2478,4943791,"TERMINAL",0,0,"5799925151515128888888888888899499",,terminal_output +2479,4945803,"TERMINAL",0,0,"7951213143333430309:001:00:001:005:008:008:008:008:008:006:005:0030313163131",,terminal_output +2480,4947821,"TERMINAL",0,0,"9513336555562222222222222233833",,terminal_output +2481,4949821,"TERMINAL",0,0,"31355587777844444444444444555:0055",,terminal_output +2482,4951926,"TERMINAL",0,0,"357772099995:006666666666666677277",,terminal_output +2483,4953835,"TERMINAL",0,0,"5799925:015:015:015:0128888888888888899499",,terminal_output +2484,4955845,"TERMINAL",0,0,"798:0131414333344040101010101010101010101040414164141",,terminal_output +2485,4957860,"TERMINAL",0,0,"930:013336555562222222222222233833",,terminal_output +2486,4959915,"TERMINAL",0,0,"41355587777844444444444444551055",,terminal_output +2487,4961961,"TERMINAL",0,0,"35777309999106666666666666677277",,terminal_output +2488,4963887,"TERMINAL",0,0,"5799921111111128888888888888899499",,terminal_output +2489,4965891,"TERMINAL",0,0,"791141514333345050202020202020202020202050515165151",,terminal_output +2490,4967901,"TERMINAL",0,0,"9113336555562222222222222233833",,terminal_output +2491,4969911,"TERMINAL",0,0,"51355587777844444444444444552055",,terminal_output +2492,4971919,"TERMINAL",0,0,"35777409999206666666666666677277",,terminal_output +2493,4973929,"TERMINAL",0,0,"5799922121212128888888888888899499",,terminal_output +2494,4975934,"TERMINAL",0,0,"7921516:014333345:008:0030303030303030303030306:0050:013:0165:015:01",,terminal_output +2495,4977945,"TERMINAL",0,0,"9213336555562222222222222233833",,terminal_output +2496,4979623,"slurm/dev/franz/berlin/atari/spawn_tokenizers.sh",0,0,"",shellscript,tab +2497,4979951,"TERMINAL",0,0,"4:01355587777844444444444444553055",,terminal_output +2498,4980127,"slurm/dev/franz/berlin/atari/spawn_tokenizers.sh",927,0,"",shellscript,selection_mouse +2499,4981967,"TERMINAL",0,0,"35777509999306666666666666677277",,terminal_output +2500,4983049,"slurm/dev/franz/berlin/atari/spawn_tokenizers.sh",910,15,"",shellscript,content +2501,4983061,"slurm/dev/franz/berlin/atari/spawn_tokenizers.sh",910,0,"",shellscript,selection_command +2502,4983972,"TERMINAL",0,0,"5799923131313128888888888888899499",,terminal_output +2503,4984032,"slurm/dev/franz/berlin/atari/spawn_tokenizers.sh",909,1,"6",shellscript,content +2504,4984603,"slurm/dev/franz/berlin/atari/spawn_tokenizers.sh",909,1,"4",shellscript,content +2505,4985714,"slurm/dev/franz/berlin/atari/spawn_tokenizers.sh",909,1,"6",shellscript,content +2506,4985977,"TERMINAL",0,0,"793150:01114333341010404040404040404040404010111161111",,terminal_output +2507,4987989,"TERMINAL",0,0,"9313336555562222222222222233833",,terminal_output +2508,4989996,"TERMINAL",0,0,"11355587777844444444444444554055",,terminal_output +2509,4992069,"TERMINAL",0,0,"357771:009999406666666666666677277",,terminal_output +2510,4994017,"TERMINAL",0,0,"5799924141414128888888888888899499",,terminal_output +2511,4996026,"TERMINAL",0,0,"794111214333342020505050505050505050505020212162121",,terminal_output +2512,4998036,"TERMINAL",0,0,"9413336555562222222222222233833",,terminal_output +2513,5000046,"TERMINAL",0,0,"21355587777844444444444444555055",,terminal_output +2514,5002052,"TERMINAL",0,0,"35777109999506666666666666677277",,terminal_output +2515,5004063,"TERMINAL",0,0,"5799925151515128888888888888899499",,terminal_output +2516,5006156,"TERMINAL",0,0,"7951213143333430301:00:001:002:006:009:009:009:009:009:007:006:0030313163131",,terminal_output +2517,5008078,"TERMINAL",0,0,"9513336555562222222222222233833",,terminal_output +2518,5010089,"TERMINAL",0,0,"31355587777844444444444444556:0055",,terminal_output +2519,5012097,"TERMINAL",0,0,"357772099996:006666666666666677277",,terminal_output +2520,5014106,"TERMINAL",0,0,"5799926:016:016:016:0128888888888888899499",,terminal_output +2521,5016113,"TERMINAL",0,0,"799:0131414333344040101010101010101010101040414164141",,terminal_output +2522,5018125,"TERMINAL",0,0,"91:013336555562222222222222233833",,terminal_output +2523,5020145,"TERMINAL",0,0,"41355587777844444444444444551055",,terminal_output +2524,5022141,"TERMINAL",0,0,"35777309999106666666666666677277",,terminal_output +2525,5024150,"TERMINAL",0,0,"5799921111111128888888888888899499",,terminal_output +2526,5026162,"TERMINAL",0,0,"791141514333345050202020202020202020202050515165151",,terminal_output +2527,5028166,"TERMINAL",0,0,"9113336555562222222222222233833",,terminal_output +2528,5030176,"TERMINAL",0,0,"51355587777844444444444444552055",,terminal_output +2529,5032184,"TERMINAL",0,0,"35777409999206666666666666677277",,terminal_output +2530,5034194,"TERMINAL",0,0,"5799922121212128888888888888899499",,terminal_output +2531,5036207,"TERMINAL",0,0,"7921517:014333346:009:0030303030303030303030307:001:014:0166:016:01",,terminal_output +2532,5038225,"TERMINAL",0,0,"9213336555562222222222222233833",,terminal_output +2533,5040224,"TERMINAL",0,0,"5:01355587777844444444444444553055",,terminal_output +2534,5042233,"TERMINAL",0,0,"35777509999306666666666666677277",,terminal_output +2535,5044240,"TERMINAL",0,0,"5799923131313128888888888888899499",,terminal_output +2536,5046247,"TERMINAL",0,0,"79311:01114333341010404040404040404040404010111161111",,terminal_output +2537,5046858,"slurm/dev/franz/berlin/atari/dynamics.sh",0,0,"",shellscript,tab +2538,5048256,"TERMINAL",0,0,"9313336555562222222222222233833",,terminal_output +2539,5050266,"TERMINAL",0,0,"11355587777844444444444444554055",,terminal_output +2540,5052278,"TERMINAL",0,0,"\r357772:009999406666666666666677277",,terminal_output +2541,5054281,"TERMINAL",0,0,"5799924141414128888888888888899499",,terminal_output +2542,5055195,"slurm/dev/franz/berlin/atari/spawn_dynamics.sh",0,0,"",shellscript,tab +2543,5056108,"slurm/dev/franz/berlin/atari/spawn_lams.sh",0,0,"#!/usr/bin/env bash\n\nset -euo pipefail\n\nREPO_ROOT=""/home/franz.srambical/jafar""\n\nDATA_ROOT=""${DATA_ROOT:-/fast/project/HFMI_SynergyUnit/jafar_ws/data/atari}""\n\nLAM_SCRIPT=""${LAM_SCRIPT:-$REPO_ROOT/slurm/dev/franz/berlin/atari/lam.sh}""\n\nif [ ""$#"" -gt 0 ]; then\n ENV_LIST=(""$@"")\nelse\n if [ ! -d ""$DATA_ROOT"" ]; then\n echo ""DATA_ROOT does not exist: $DATA_ROOT"" >&2\n exit 1\n fi\n mapfile -t ENV_LIST < <(find ""$DATA_ROOT"" -mindepth 1 -maxdepth 1 -type d -printf '%f\n' | sort)\nfi\n\necho ""Submitting LAM jobs for environments: ${ENV_LIST[*]}""\n\nfor env in ""${ENV_LIST[@]}""; do\n env_dir=""$DATA_ROOT/$env""\n if [ ! -d ""$env_dir/train"" ] || [ ! -d ""$env_dir/val"" ]; then\n echo ""Skipping $env (missing train/val under $env_dir)""\n continue\n fi\n\n job_name=""lam_atari_${env}_dev""\n\n job_id=$(sbatch --parsable \\n --job-name=""$job_name"" \\n --chdir=""$REPO_ROOT"" \\n --export=ALL,ENV_NAME=""$env"" \\n ""$LAM_SCRIPT"")\n\n echo ""Submitted $job_name (ENV_NAME=$env) as job $job_id""\ndone\n\n\n",shellscript,tab +2544,5056290,"TERMINAL",0,0,"794111214333342020505050505050505050505020212162121",,terminal_output +2545,5058302,"TERMINAL",0,0,"9413336555562222222222222233833",,terminal_output +2546,5060324,"TERMINAL",0,0,"\r2130142 xiao.liu interacti 1 64 PD 2025-10-04T19:45:19N/A 0:00 23:59:00 (Resources)355587777844444444444444555055",,terminal_output +2547,5062322,"TERMINAL",0,0,"35777109999506666666666666677277",,terminal_output +2548,5064330,"TERMINAL",0,0,"5799925151515128888888888888899499",,terminal_output +2549,5066333,"TERMINAL",0,0,"7951213143333430301:002:003:007:0010:0010:0010:0010:0010:008:007:0030313163131",,terminal_output +2550,5068352,"TERMINAL",0,0,"9513336555562222222222222233833",,terminal_output +2551,5070356,"TERMINAL",0,0,"31355587777844444444444444557:0055",,terminal_output +2552,5072364,"TERMINAL",0,0,"\r357772099997:0066666666666617:536032 alfred.ngu1:53:391:53:56 7:51:37110:3004:37529993 nishant.k2 3843T18:29:1801:48:31 17:57:02[002,007]30008 alfred.ng1 1622:58:503T22:58:56 20:46:370069471",,terminal_output +2553,5074373,"TERMINAL",0,0,"5799916 R2025-10-04T19:45:35hai00427:017:017:017:012888888888888899499",,terminal_output +2554,5076388,"TERMINAL",0,0,"7950:013141243333440401010101010101010101040414164141",,terminal_output +2555,5078414,"TERMINAL",0,0,"92:013334655556222222222222233833",,terminal_output +2556,5080400,"TERMINAL",0,0,"41355568777784444444444444551055",,terminal_output +2557,5082414,"TERMINAL",0,0,"35777830999910666666666666677277",,terminal_output +2558,5084434,"TERMINAL",0,0,"57999102111111112888888888888899499",,terminal_output +2559,5086432,"TERMINAL",0,0,"79114151243333450502020202020202020202050515165151",,terminal_output +2560,5086575,"slurm/dev/franz/berlin/atari/spawn_tokenizers.sh",0,0,"",shellscript,tab +2561,5088437,"TERMINAL",0,0,"9113334655556222222222222233833",,terminal_output +2562,5090455,"TERMINAL",0,0,"51355568777784444444444444552055",,terminal_output +2563,5092456,"TERMINAL",0,0,"35777840999920666666666666677277",,terminal_output +2564,5093747,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-4/spawn_tokenizers.sh",0,0,"#!/usr/bin/env bash\n\nset -euo pipefail\n\nREPO_ROOT=""/home/franz.srambical/jafar""\n\nDATA_ROOT=""${DATA_ROOT:-/fast/project/HFMI_SynergyUnit/jafar_ws/data/atari}""\n\nTOKENIZER_SCRIPT=""${TOKENIZER_SCRIPT:-$REPO_ROOT/slurm/dev/franz/berlin/atari/tokenizer.sh}""\n\nif [ ""$#"" -gt 0 ]; then\n ENV_LIST=(""$@"")\nelse\n if [ ! -d ""$DATA_ROOT"" ]; then\n echo ""DATA_ROOT does not exist: $DATA_ROOT"" >&2\n exit 1\n fi\n # list immediate subdirectories (candidate env names)\n mapfile -t ENV_LIST < <(find ""$DATA_ROOT"" -mindepth 1 -maxdepth 1 -type d -printf '%f\n' | sort)\nfi\n\necho ""Submitting tokenizer jobs for environments: ${ENV_LIST[*]}""\n\nfor env in ""${ENV_LIST[@]}""; do\n env_dir=""$DATA_ROOT/$env""\n if [ ! -d ""$env_dir/train"" ] || [ ! -d ""$env_dir/val"" ]; then\n echo ""Skipping $env (missing train/val under $env_dir)""\n continue\n fi\n\n job_name=""tokenizer_atari_${env}_dev_lr_3e-6""\n\n job_id=$(sbatch --parsable \\n --job-name=""$job_name"" \\n --chdir=""$REPO_ROOT"" \\n --export=ALL,ENV_NAME=""$env"" \\n ""$TOKENIZER_SCRIPT"")\n\n echo ""Submitted $job_name (ENV_NAME=$env) as job $job_id""\ndone\n\n\n",shellscript,tab +2565,5094463,"TERMINAL",0,0,"57999202212121212888888888888899499",,terminal_output +2566,5096090,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-4/tokenizer.sh",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=1\n#SBATCH --gres=gpu:1\n#SBATCH --time=24:00:00\n#SBATCH --cpus-per-task=8\n#SBATCH --output=/fast/project/HFMI_SynergyUnit/jafar_ws/logs/franz/atari/tokenizer/%x_%j.log\n#SBATCH --error=/fast/project/HFMI_SynergyUnit/jafar_ws/logs/franz/atari/tokenizer/%x_%j.log\n#SBATCH --job-name=tokenizer_atari_dev\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n# Log the sbatch script\ncat $0\n\nsource .venv/bin/activate\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\n# Require ENV_NAME to be provided via sbatch --export=ENV_NAME=...\nif [ -z ""${ENV_NAME:-}"" ]; then\n echo ""ENV_NAME must be provided (e.g., sbatch --export=ALL,ENV_NAME=alien $0)"" >&2\n exit 2\nfi\n\ntags=""atari ${ENV_NAME} tokenizer dev lr_3e-6""\n\narray_records_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/data/atari/${ENV_NAME}""\nCHECKPOINT_DIR=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/atari/${ENV_NAME}/tokenizer/${job_name}_${slurm_job_id}""\nmkdir -p $CHECKPOINT_DIR\n\nenv | grep SLURM\n\ncurrent_branch=$(git rev-parse --abbrev-ref HEAD)\nif [ ""$current_branch"" != ""prepend-action-maskgit"" ]; then\n echo ""This script must be run from the prepend-action-maskgit branch. Current branch is $current_branch. Exiting.""\n exit 1\nfi\n\nsrun python jasmine/train_tokenizer.py \\n --patch_size=16 \\n --image_height=84 \\n --image_width=84 \\n --num_steps=50_000 \\n --max_lr=3e-6 \\n --warmup_steps=5000 \\n --wsd_decay_steps=5000 \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --name=""${job_name}_${slurm_job_id}"" \\n --tags $tags \\n --entity instant-uv \\n --project jafar \\n --data_dir=""${array_records_dir}/train"" \\n --val_data_dir=""${array_records_dir}/val"" &\n\nchild_pid=$!\n\nwait $child_pid\n",shellscript,tab +2567,5096471,"TERMINAL",0,0,"7921518:0124333347:0050:00303030303030303030308:002:015:0167:017:01",,terminal_output +2568,5097001,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-4/spawn_tokenizers.sh",0,0,"",shellscript,tab +2569,5098482,"TERMINAL",0,0,"9213334655556222222222222233833",,terminal_output +2570,5098813,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-4/spawn_tokenizers.sh",899,0,"",shellscript,selection_mouse +2571,5100493,"TERMINAL",0,0,"6:01355568777784444444444444553055",,terminal_output +2572,5100924,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-4/spawn_tokenizers.sh",238,0,"",shellscript,selection_mouse +2573,5101980,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-4/spawn_tokenizers.sh",237,0,"",shellscript,selection_mouse +2574,5102508,"TERMINAL",0,0,"35777850999930666666666666677277",,terminal_output +2575,5103652,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-4/spawn_tokenizers.sh",237,0,"/",shellscript,content +2576,5103653,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-4/spawn_tokenizers.sh",238,0,"",shellscript,selection_keyboard +2577,5103773,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-4/spawn_tokenizers.sh",237,0,"",shellscript,selection_command +2578,5104510,"TERMINAL",0,0,"57999302313131312888888888888899499",,terminal_output +2579,5105053,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-4/spawn_tokenizers.sh",237,0,"tokenizer_lr_3e-4",shellscript,content +2580,5105469,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-4/spawn_tokenizers.sh",253,0,"",shellscript,selection_command +2581,5106517,"TERMINAL",0,0,"79312:0111243333410104040404040404040404010111161111",,terminal_output +2582,5107124,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-4/spawn_tokenizers.sh",270,0,"",shellscript,selection_command +2583,5107372,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-4/spawn_tokenizers.sh",293,0,"",shellscript,selection_command +2584,5107395,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-4/spawn_tokenizers.sh",313,0,"",shellscript,selection_command +2585,5107420,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-4/spawn_tokenizers.sh",318,0,"",shellscript,selection_command +2586,5107458,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-4/spawn_tokenizers.sh",353,0,"",shellscript,selection_command +2587,5107491,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-4/spawn_tokenizers.sh",409,0,"",shellscript,selection_command +2588,5107524,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-4/spawn_tokenizers.sh",424,0,"",shellscript,selection_command +2589,5107557,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-4/spawn_tokenizers.sh",431,0,"",shellscript,selection_command +2590,5107591,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-4/spawn_tokenizers.sh",489,0,"",shellscript,selection_command +2591,5107624,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-4/spawn_tokenizers.sh",585,0,"",shellscript,selection_command +2592,5107656,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-4/spawn_tokenizers.sh",593,0,"",shellscript,selection_command +2593,5107687,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-4/spawn_tokenizers.sh",595,0,"",shellscript,selection_command +2594,5107723,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-4/spawn_tokenizers.sh",660,0,"",shellscript,selection_command +2595,5107758,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-4/spawn_tokenizers.sh",662,0,"",shellscript,selection_command +2596,5107791,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-4/spawn_tokenizers.sh",693,0,"",shellscript,selection_command +2597,5107825,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-4/spawn_tokenizers.sh",723,0,"",shellscript,selection_command +2598,5107857,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-4/spawn_tokenizers.sh",789,0,"",shellscript,selection_command +2599,5107890,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-4/spawn_tokenizers.sh",853,0,"",shellscript,selection_command +2600,5107950,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-4/spawn_tokenizers.sh",870,0,"",shellscript,selection_command +2601,5108084,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-4/spawn_tokenizers.sh",877,0,"",shellscript,selection_command +2602,5108261,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-4/spawn_tokenizers.sh",879,0,"",shellscript,selection_command +2603,5108402,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-4/spawn_tokenizers.sh",928,0,"",shellscript,selection_command +2604,5108539,"TERMINAL",0,0,"9313334655556222222222222233833",,terminal_output +2605,5109882,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-4/spawn_tokenizers.sh",928,0,"4",shellscript,content +2606,5109882,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-4/spawn_tokenizers.sh",927,1,"",shellscript,content +2607,5110543,"TERMINAL",0,0,"11355568777784444444444444554055",,terminal_output +2608,5112551,"TERMINAL",0,0,"3577783:00999940666666666666677277",,terminal_output +2609,5114560,"TERMINAL",0,0,"57999402414141412888888888888899499",,terminal_output +2610,5114633,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-4/tokenizer.sh",0,0,"",shellscript,tab +2611,5116586,"TERMINAL",0,0,"79411121243333420205050505050505050505020212162121",,terminal_output +2612,5117237,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-4/tokenizer.sh",363,0,"",shellscript,selection_mouse +2613,5117240,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-4/tokenizer.sh",362,0,"",shellscript,selection_command +2614,5118322,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-4/tokenizer.sh",363,0,"_lr_3e-4",shellscript,content +2615,5118323,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-4/tokenizer.sh",371,0,"",shellscript,selection_command +2616,5118577,"TERMINAL",0,0,"9413334655556222222222222233833",,terminal_output +2617,5119294,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-4/tokenizer.sh",370,0,"",shellscript,selection_command +2618,5120589,"TERMINAL",0,0,"21466679888895555555555555665166",,terminal_output +2619,5122606,"TERMINAL",0,0,"468889115050505051777777777777788388",,terminal_output +2620,5123479,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-4/tokenizer.sh",1892,0,"",shellscript,selection_keyboard +2621,5123627,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-4/tokenizer.sh",2446,0,"",shellscript,selection_keyboard +2622,5123933,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-4/tokenizer.sh",2430,0,"",shellscript,selection_command +2623,5124192,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-4/tokenizer.sh",2429,0,"",shellscript,selection_command +2624,5124215,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-4/tokenizer.sh",2416,0,"",shellscript,selection_command +2625,5124261,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-4/tokenizer.sh",2415,0,"",shellscript,selection_command +2626,5124282,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-4/tokenizer.sh",2367,0,"",shellscript,selection_command +2627,5124331,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-4/tokenizer.sh",2321,0,"",shellscript,selection_command +2628,5124347,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-4/tokenizer.sh",2299,0,"",shellscript,selection_command +2629,5124382,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-4/tokenizer.sh",2273,0,"",shellscript,selection_command +2630,5124412,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-4/tokenizer.sh",2254,0,"",shellscript,selection_command +2631,5124446,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-4/tokenizer.sh",2211,0,"",shellscript,selection_command +2632,5124479,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-4/tokenizer.sh",2178,0,"",shellscript,selection_command +2633,5124511,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-4/tokenizer.sh",2147,0,"",shellscript,selection_command +2634,5124574,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-4/tokenizer.sh",2122,0,"",shellscript,selection_command +2635,5124577,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-4/tokenizer.sh",2104,0,"",shellscript,selection_command +2636,5124602,"TERMINAL",0,0,"68502030513222239999999999999303053030",,terminal_output +2637,5124612,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-4/tokenizer.sh",2075,0,"",shellscript,selection_command +2638,5124644,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-4/tokenizer.sh",2049,0,"",shellscript,selection_command +2639,5124683,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-4/tokenizer.sh",2029,0,"",shellscript,selection_command +2640,5124711,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-4/tokenizer.sh",2004,0,"",shellscript,selection_command +2641,5124748,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-4/tokenizer.sh",1981,0,"",shellscript,selection_command +2642,5124778,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-4/tokenizer.sh",1957,0,"",shellscript,selection_command +2643,5124815,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-4/tokenizer.sh",1935,0,"",shellscript,selection_command +2644,5124848,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-4/tokenizer.sh",1894,0,"",shellscript,selection_command +2645,5124878,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-4/tokenizer.sh",1893,0,"",shellscript,selection_command +2646,5124913,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-4/tokenizer.sh",1890,0,"",shellscript,selection_command +2647,5124948,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-4/tokenizer.sh",1879,0,"",shellscript,selection_command +2648,5124980,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-4/tokenizer.sh",1760,0,"",shellscript,selection_command +2649,5125011,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-4/tokenizer.sh",1701,0,"",shellscript,selection_command +2650,5125043,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-4/tokenizer.sh",1651,0,"",shellscript,selection_command +2651,5125077,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-4/tokenizer.sh",1650,0,"",shellscript,selection_command +2652,5125114,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-4/tokenizer.sh",1633,0,"",shellscript,selection_command +2653,5125145,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-4/tokenizer.sh",1632,0,"",shellscript,selection_command +2654,5125190,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-4/tokenizer.sh",1607,0,"",shellscript,selection_command +2655,5125212,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-4/tokenizer.sh",1482,0,"",shellscript,selection_command +2656,5125250,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-4/tokenizer.sh",1399,0,"",shellscript,selection_command +2657,5125283,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-4/tokenizer.sh",1398,0,"",shellscript,selection_command +2658,5125408,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-4/tokenizer.sh",1351,0,"",shellscript,selection_command +2659,5125570,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-4/tokenizer.sh",1350,0,"",shellscript,selection_command +2660,5125876,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-4/tokenizer.sh",1351,0,"",shellscript,selection_command +2661,5126689,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-4/tokenizer.sh",1396,0,"4",shellscript,content +2662,5126689,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-4/tokenizer.sh",1395,1,"",shellscript,content +2663,5126751,"TERMINAL",0,0,"850222354444531312:013:014:018:011:011:011:011:011:019:013122722",,terminal_output +2664,5128533,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-4/tokenizer.sh",2046,0,"4",shellscript,content +2665,5128533,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-4/tokenizer.sh",2045,1,"",shellscript,content +2666,5128622,"TERMINAL",0,0,"3024445766667333333333333344944",,terminal_output +2667,5130636,"TERMINAL",0,0,"2466679888895555555555555668:0166",,terminal_output +2668,5132642,"TERMINAL",0,0,"468889218:008:008:008:008:01777777777777788388",,terminal_output +2669,5134656,"TERMINAL",0,0,"681:0030401:013222239999999999999404054040",,terminal_output +2670,5136661,"TERMINAL",0,0,"83:0022235444454141111111111111111111114122722",,terminal_output +2671,5138686,"TERMINAL",0,0,"4024445766667333333333333344944",,terminal_output +2672,5140678,"TERMINAL",0,0,"2466679888895555555555555661166",,terminal_output +2673,5142698,"TERMINAL",0,0,"468889311010101011777777777777788388",,terminal_output +2674,5143587,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-4/spawn_tokenizers.sh",0,0,"",shellscript,tab +2675,5144701,"TERMINAL",0,0,"68104050113222239999999999999505055050",,terminal_output +2676,5146792,"TERMINAL",0,0,"81022235444455151212121212121212121215122722",,terminal_output +2677,5148741,"TERMINAL",0,0,"5024445766667333333333333344944",,terminal_output +2678,5149690,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-5/tokenizer.sh",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=1\n#SBATCH --gres=gpu:1\n#SBATCH --time=24:00:00\n#SBATCH --cpus-per-task=8\n#SBATCH --output=/fast/project/HFMI_SynergyUnit/jafar_ws/logs/franz/atari/tokenizer/%x_%j.log\n#SBATCH --error=/fast/project/HFMI_SynergyUnit/jafar_ws/logs/franz/atari/tokenizer/%x_%j.log\n#SBATCH --job-name=tokenizer_atari_dev_lr_3e-4\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n# Log the sbatch script\ncat $0\n\nsource .venv/bin/activate\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\n# Require ENV_NAME to be provided via sbatch --export=ENV_NAME=...\nif [ -z ""${ENV_NAME:-}"" ]; then\n echo ""ENV_NAME must be provided (e.g., sbatch --export=ALL,ENV_NAME=alien $0)"" >&2\n exit 2\nfi\n\ntags=""atari ${ENV_NAME} tokenizer dev lr_3e-4""\n\narray_records_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/data/atari/${ENV_NAME}""\nCHECKPOINT_DIR=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/atari/${ENV_NAME}/tokenizer/${job_name}_${slurm_job_id}""\nmkdir -p $CHECKPOINT_DIR\n\nenv | grep SLURM\n\ncurrent_branch=$(git rev-parse --abbrev-ref HEAD)\nif [ ""$current_branch"" != ""prepend-action-maskgit"" ]; then\n echo ""This script must be run from the prepend-action-maskgit branch. Current branch is $current_branch. Exiting.""\n exit 1\nfi\n\nsrun python jasmine/train_tokenizer.py \\n --patch_size=16 \\n --image_height=84 \\n --image_width=84 \\n --num_steps=50_000 \\n --max_lr=3e-4 \\n --warmup_steps=5000 \\n --wsd_decay_steps=5000 \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --name=""${job_name}_${slurm_job_id}"" \\n --tags $tags \\n --entity instant-uv \\n --project jafar \\n --data_dir=""${array_records_dir}/train"" \\n --val_data_dir=""${array_records_dir}/val"" &\n\nchild_pid=$!\n\nwait $child_pid\n",shellscript,tab +2679,5150323,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-5/spawn_tokenizers.sh",0,0,"#!/usr/bin/env bash\n\nset -euo pipefail\n\nREPO_ROOT=""/home/franz.srambical/jafar""\n\nDATA_ROOT=""${DATA_ROOT:-/fast/project/HFMI_SynergyUnit/jafar_ws/data/atari}""\n\nTOKENIZER_SCRIPT=""${TOKENIZER_SCRIPT:-$REPO_ROOT/slurm/dev/franz/berlin/atari/tokenizer_lr_3e-4/tokenizer.sh}""\n\nif [ ""$#"" -gt 0 ]; then\n ENV_LIST=(""$@"")\nelse\n if [ ! -d ""$DATA_ROOT"" ]; then\n echo ""DATA_ROOT does not exist: $DATA_ROOT"" >&2\n exit 1\n fi\n # list immediate subdirectories (candidate env names)\n mapfile -t ENV_LIST < <(find ""$DATA_ROOT"" -mindepth 1 -maxdepth 1 -type d -printf '%f\n' | sort)\nfi\n\necho ""Submitting tokenizer jobs for environments: ${ENV_LIST[*]}""\n\nfor env in ""${ENV_LIST[@]}""; do\n env_dir=""$DATA_ROOT/$env""\n if [ ! -d ""$env_dir/train"" ] || [ ! -d ""$env_dir/val"" ]; then\n echo ""Skipping $env (missing train/val under $env_dir)""\n continue\n fi\n\n job_name=""tokenizer_atari_${env}_dev_lr_3e-4""\n\n job_id=$(sbatch --parsable \\n --job-name=""$job_name"" \\n --chdir=""$REPO_ROOT"" \\n --export=ALL,ENV_NAME=""$env"" \\n ""$TOKENIZER_SCRIPT"")\n\n echo ""Submitted $job_name (ENV_NAME=$env) as job $job_id""\ndone\n\n\n",shellscript,tab +2680,5150722,"TERMINAL",0,0,"2466679888895555555555555662166",,terminal_output +2681,5152736,"TERMINAL",0,0,"468889412020202021777777777777788388",,terminal_output +2682,5153282,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-5/spawn_tokenizers.sh",254,0,"5",shellscript,content +2683,5153282,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-5/spawn_tokenizers.sh",253,1,"",shellscript,content +2684,5154530,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-5/spawn_tokenizers.sh",928,0,"5",shellscript,content +2685,5154530,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-5/spawn_tokenizers.sh",927,1,"",shellscript,content +2686,5154738,"TERMINAL",0,0,"6820509:002132222399999999999993:006:0058:008:00",,terminal_output +2687,5156752,"TERMINAL",0,0,"82022235444458:011:01313131313131313131319:0122722",,terminal_output +2688,5158780,"TERMINAL",0,0,"7:0024445766667333333333333344944",,terminal_output +2689,5160768,"TERMINAL",0,0,"2466679888895555555555555663166",,terminal_output +2690,5162775,"TERMINAL",0,0,"468889513030303031777777777777788388",,terminal_output +2691,5164790,"TERMINAL",0,0,"68303:0010313222239999999999999101051010",,terminal_output +2692,5166793,"TERMINAL",0,0,"83022235444451111414141414141414141411122722",,terminal_output +2693,5168803,"TERMINAL",0,0,"1024445766667333333333333344944",,terminal_output +2694,5170818,"TERMINAL",0,0,"2466679888895555555555555664166",,terminal_output +2695,5172832,"TERMINAL",0,0,"4688894:014040404041777777777777788388",,terminal_output +2696,5174849,"TERMINAL",0,0,"68401020413222239999999999999202052020",,terminal_output +2697,5176898,"TERMINAL",0,0,"84022235444452121515151515151515151512122722",,terminal_output +2698,5178848,"TERMINAL",0,0,"2024445766667333333333333344944",,terminal_output +2699,5180857,"TERMINAL",0,0,"2466679888895555555555555665166",,terminal_output +2700,5182942,"TERMINAL",0,0,"468889115050505051777777777777788388",,terminal_output +2701,5184989,"TERMINAL",0,0,"68502030513222239999999999999303053030",,terminal_output +2702,5186934,"TERMINAL",0,0,"850222354444531313:014:015:019:012:012:012:012:012:0150:013122722",,terminal_output +2703,5188897,"TERMINAL",0,0,"3024445766667333333333333344944",,terminal_output +2704,5190904,"TERMINAL",0,0,"2466679888895555555555555669:0166",,terminal_output +2705,5192928,"TERMINAL",0,0,"468889219:009:009:009:009:01777777777777788388",,terminal_output +2706,5194930,"TERMINAL",0,0,"682:0030402:013222239999999999999404054040",,terminal_output +2707,5196970,"TERMINAL",0,0,"84:0022235444454141111111111111111111114122722",,terminal_output +2708,5198945,"TERMINAL",0,0,"4024445766667333333333333344944",,terminal_output +2709,5200953,"TERMINAL",0,0,"2466679888895555555555555661166",,terminal_output +2710,5202970,"TERMINAL",0,0,"468889311010101011777777777777788388",,terminal_output +2711,5204968,"TERMINAL",0,0,"68104050113222239999999999999505055050",,terminal_output +2712,5207006,"TERMINAL",0,0,"81022235444455151212121212121212121215122722",,terminal_output +2713,5208986,"TERMINAL",0,0,"5024445766667333333333333344944",,terminal_output +2714,5210996,"TERMINAL",0,0,"2466679888895555555555555662166",,terminal_output +2715,5213014,"TERMINAL",0,0,"468889412020202021777777777777788388",,terminal_output +2716,5215013,"TERMINAL",0,0,"6820508:00:002132222399999999999994:007:0059:009:00",,terminal_output +2717,5217025,"TERMINAL",0,0,"82022235444459:012:01313131313131313131312:00:0122722",,terminal_output +2718,5219031,"TERMINAL",0,0,"8:0024445766667333333333333344944",,terminal_output +2719,5221048,"TERMINAL",0,0,"2466679888895555555555555663166",,terminal_output +2720,5223061,"TERMINAL",0,0,"468889513030303031777777777777788388",,terminal_output +2721,5225058,"TERMINAL",0,0,"68304:0010313222239999999999999101051010",,terminal_output +2722,5227173,"TERMINAL",0,0,"83022235444451111414141414141414141411122722",,terminal_output +2723,5229076,"TERMINAL",0,0,"1024445766667333333333333344944",,terminal_output +2724,5231173,"TERMINAL",0,0,"2466679888895555555555555664166",,terminal_output +2725,5233095,"TERMINAL",0,0,"4688895:014040404041777777777777788388",,terminal_output +2726,5235113,"TERMINAL",0,0,"68401020413222239999999999999202052020",,terminal_output +2727,5237116,"TERMINAL",0,0,"84022235444452121515151515151515151512122722",,terminal_output +2728,5239141,"TERMINAL",0,0,"2024445766667333333333333344944",,terminal_output +2729,5241133,"TERMINAL",0,0,"2466679888895555555555555665166",,terminal_output +2730,5243140,"TERMINAL",0,0,"468889115050505051777777777777788388",,terminal_output +2731,5245147,"TERMINAL",0,0,"68502030513222239999999999999303053030",,terminal_output +2732,5247159,"TERMINAL",0,0,"850222354444531314:015:016:0110:013:013:013:013:013:011:013122722",,terminal_output +2733,5249165,"TERMINAL",0,0,"3024445766667333333333333344944",,terminal_output +2734,5251176,"TERMINAL",0,0,"2466679888895555555555555668:00:0166",,terminal_output +2735,5253188,"TERMINAL",0,0,"4688892130:0030:0030:0030:0030:01777777777777788388",,terminal_output +2736,5255191,"TERMINAL",0,0,"683:0030403:013222239999999999999404054040",,terminal_output +2737,5257202,"TERMINAL",0,0,"85:0022235444454141111111111111111111114122722",,terminal_output +2738,5259225,"TERMINAL",0,0,"4024445766667333333333333344944",,terminal_output +2739,5261225,"TERMINAL",0,0,"2466679888895555555555555661166",,terminal_output +2740,5263232,"TERMINAL",0,0,"468889311010101011777777777777788388",,terminal_output +2741,5265252,"TERMINAL",0,0,"68104050113222239999999999999505055050",,terminal_output +2742,5267250,"TERMINAL",0,0,"81022235444455151212121212121212121215122722",,terminal_output +2743,5269257,"TERMINAL",0,0,"5024445766667333333333333344944",,terminal_output +2744,5271264,"TERMINAL",0,0,"2466679888895555555555555662166",,terminal_output +2745,5273274,"TERMINAL",0,0,"468889412020202021777777777777788388",,terminal_output +2746,5275283,"TERMINAL",0,0,"6820501:002132222399999999999995:008:00550:0050:00",,terminal_output +2747,5277289,"TERMINAL",0,0,"820222354444550:013:01313131313131313131311:0122722",,terminal_output +2748,5279299,"TERMINAL",0,0,"9:0024445766667333333333333344944",,terminal_output +2749,5281309,"TERMINAL",0,0,"2466679888895555555555555663166",,terminal_output +2750,5283318,"TERMINAL",0,0,"468889513030303031777777777777788388",,terminal_output +2751,5285324,"TERMINAL",0,0,"68305:0010313222239999999999999101051010",,terminal_output +2752,5287336,"TERMINAL",0,0,"83022235444451111414141414141414141411122722",,terminal_output +2753,5289345,"TERMINAL",0,0,"1024445766667333333333333344944",,terminal_output +2754,5291354,"TERMINAL",0,0,"2466679888895555555555555664166",,terminal_output +2755,5293360,"TERMINAL",0,0,"4688896:014040404041777777777777788388",,terminal_output +2756,5295376,"TERMINAL",0,0,"68401020413222239999999999999202052020",,terminal_output +2757,5297378,"TERMINAL",0,0,"84022235444452121515151515151515151512122722",,terminal_output +2758,5299401,"TERMINAL",0,0,"2024445766667333333333333344944",,terminal_output +2759,5301404,"TERMINAL",0,0,"2466679888895555555555555665166",,terminal_output +2760,5303423,"TERMINAL",0,0,"468889115050505051777777777777788388",,terminal_output +2761,5305423,"TERMINAL",0,0,"68502030513222239999999999999303053030",,terminal_output +2762,5307431,"TERMINAL",0,0,"850222354444531315:016:017:011:014:014:014:014:014:012:013122722",,terminal_output +2763,5309454,"TERMINAL",0,0,"3024445766667333333333333344944",,terminal_output +2764,5311557,"TERMINAL",0,0,"2466679888895555555555555661:0166",,terminal_output +2765,5313460,"TERMINAL",0,0,"468889211:001:001:001:001:01777777777777788388",,terminal_output +2766,5313501,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-5/spawn_tokenizers.sh",0,0,"",shellscript,selection_keyboard +2767,5315467,"TERMINAL",0,0,"684:0030404:013222239999999999999404054040",,terminal_output +2768,5315793,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-5/spawn_tokenizers.sh",1166,0,"",shellscript,selection_keyboard +2769,5316309,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-5/spawn_tokenizers.sh",1165,0,"",shellscript,selection_command +2770,5316559,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-5/spawn_tokenizers.sh",1164,0,"",shellscript,selection_command +2771,5316582,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-5/spawn_tokenizers.sh",1159,0,"",shellscript,selection_command +2772,5316619,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-5/spawn_tokenizers.sh",1097,0,"",shellscript,selection_command +2773,5316661,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-5/spawn_tokenizers.sh",1096,0,"",shellscript,selection_command +2774,5316682,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-5/spawn_tokenizers.sh",1067,0,"",shellscript,selection_command +2775,5316721,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-5/spawn_tokenizers.sh",1028,0,"",shellscript,selection_command +2776,5316753,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-5/spawn_tokenizers.sh",997,0,"",shellscript,selection_command +2777,5316792,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-5/spawn_tokenizers.sh",964,0,"",shellscript,selection_command +2778,5316815,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-5/spawn_tokenizers.sh",931,0,"",shellscript,selection_command +2779,5316850,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-5/spawn_tokenizers.sh",930,0,"",shellscript,selection_command +2780,5317016,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-5/spawn_tokenizers.sh",880,0,"",shellscript,selection_command +2781,5317484,"TERMINAL",0,0,"86:0022235444454141111111111111111111114122722",,terminal_output +2782,5319501,"TERMINAL",0,0,"4024445766667333333333333344944",,terminal_output +2783,5319785,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-5/tokenizer.sh",0,0,"",shellscript,tab +2784,5320624,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-5/tokenizer.sh",748,0,"",shellscript,selection_mouse +2785,5321497,"TERMINAL",0,0,"2466679888895555555555555661166",,terminal_output +2786,5323097,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-5/tokenizer.sh",2147,0,"",shellscript,selection_keyboard +2787,5323242,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-5/tokenizer.sh",2446,0,"",shellscript,selection_keyboard +2788,5323505,"TERMINAL",0,0,"468889311010101011777777777777788388",,terminal_output +2789,5325513,"TERMINAL",0,0,"68104050113222239999999999999505055050",,terminal_output +2790,5325683,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-5/tokenizer.sh",2430,0,"",shellscript,selection_command +2791,5325916,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-5/tokenizer.sh",2429,0,"",shellscript,selection_command +2792,5325953,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-5/tokenizer.sh",2416,0,"",shellscript,selection_command +2793,5325981,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-5/tokenizer.sh",2415,0,"",shellscript,selection_command +2794,5326012,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-5/tokenizer.sh",2367,0,"",shellscript,selection_command +2795,5326073,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-5/tokenizer.sh",2321,0,"",shellscript,selection_command +2796,5326075,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-5/tokenizer.sh",2299,0,"",shellscript,selection_command +2797,5326138,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-5/tokenizer.sh",2273,0,"",shellscript,selection_command +2798,5326146,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-5/tokenizer.sh",2254,0,"",shellscript,selection_command +2799,5326184,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-5/tokenizer.sh",2211,0,"",shellscript,selection_command +2800,5326209,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-5/tokenizer.sh",2178,0,"",shellscript,selection_command +2801,5326241,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-5/tokenizer.sh",2147,0,"",shellscript,selection_command +2802,5326385,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-5/tokenizer.sh",2122,0,"",shellscript,selection_command +2803,5326385,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-5/tokenizer.sh",2104,0,"",shellscript,selection_command +2804,5326385,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-5/tokenizer.sh",2075,0,"",shellscript,selection_command +2805,5326385,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-5/tokenizer.sh",2049,0,"",shellscript,selection_command +2806,5326410,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-5/tokenizer.sh",2029,0,"",shellscript,selection_command +2807,5326443,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-5/tokenizer.sh",2004,0,"",shellscript,selection_command +2808,5326475,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-5/tokenizer.sh",1981,0,"",shellscript,selection_command +2809,5326508,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-5/tokenizer.sh",1957,0,"",shellscript,selection_command +2810,5326542,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-5/tokenizer.sh",1935,0,"",shellscript,selection_command +2811,5326597,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-5/tokenizer.sh",1894,0,"",shellscript,selection_command +2812,5327189,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-5/tokenizer.sh",0,0,"",shellscript,selection_command +2813,5327524,"TERMINAL",0,0,"81022235444455151212121212121212121215122722",,terminal_output +2814,5329542,"TERMINAL",0,0,"5024445766667333333333333344944",,terminal_output +2815,5329861,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-5/tokenizer.sh",20,0,"",shellscript,selection_command +2816,5330110,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-5/tokenizer.sh",21,0,"",shellscript,selection_command +2817,5330136,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-5/tokenizer.sh",39,0,"",shellscript,selection_command +2818,5330165,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-5/tokenizer.sh",67,0,"",shellscript,selection_command +2819,5330197,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-5/tokenizer.sh",88,0,"",shellscript,selection_command +2820,5330233,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-5/tokenizer.sh",112,0,"",shellscript,selection_command +2821,5330257,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-5/tokenizer.sh",138,0,"",shellscript,selection_command +2822,5330295,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-5/tokenizer.sh",232,0,"",shellscript,selection_command +2823,5330472,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-5/tokenizer.sh",325,0,"",shellscript,selection_command +2824,5331545,"TERMINAL",0,0,"2466679888895555555555555662166",,terminal_output +2825,5331726,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-5/tokenizer.sh",371,0,"",shellscript,selection_command +2826,5332376,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-5/tokenizer.sh",370,1,"",shellscript,content +2827,5332678,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-5/tokenizer.sh",370,0,"5",shellscript,content +2828,5332678,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-5/tokenizer.sh",371,0,"",shellscript,selection_keyboard +2829,5333546,"TERMINAL",0,0,"468889412020202021777777777777788388",,terminal_output +2830,5335560,"TERMINAL",0,0,"6820502:002132222399999999999996:009:0051:001:00",,terminal_output +2831,5335681,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-5/tokenizer.sh",370,0,"",shellscript,selection_command +2832,5337579,"TERMINAL",0,0,"82022235444451:014:01313131313131313131312:0122722",,terminal_output +2833,5339612,"TERMINAL",0,0,"50:0024445766667333333333333344944",,terminal_output +2834,5341664,"TERMINAL",0,0,"257778509999306666666666666773277",,terminal_output +2835,5343597,"TERMINAL",0,0,"57999302313131312888888888888899499",,terminal_output +2836,5345610,"TERMINAL",0,0,"79316:0111243333410104040404040404040404010111161111",,terminal_output +2837,5347613,"TERMINAL",0,0,"9313334655556222222222222233833",,terminal_output +2838,5348666,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-5/tokenizer.sh",913,0,"",shellscript,selection_keyboard +2839,5349276,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-5/tokenizer.sh",914,0,"",shellscript,selection_command +2840,5349526,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-5/tokenizer.sh",948,0,"",shellscript,selection_command +2841,5349552,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-5/tokenizer.sh",990,0,"",shellscript,selection_command +2842,5349584,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-5/tokenizer.sh",995,0,"",shellscript,selection_command +2843,5349608,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-5/tokenizer.sh",1034,0,"",shellscript,selection_command +2844,5349619,"TERMINAL",0,0,"11355568777784444444444444554055",,terminal_output +2845,5349644,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-5/tokenizer.sh",1037,0,"",shellscript,selection_command +2846,5349677,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-5/tokenizer.sh",1038,0,"",shellscript,selection_command +2847,5349710,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-5/tokenizer.sh",1062,0,"",shellscript,selection_command +2848,5349745,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-5/tokenizer.sh",1069,0,"",shellscript,selection_command +2849,5349775,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-5/tokenizer.sh",1070,0,"",shellscript,selection_command +2850,5349815,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-5/tokenizer.sh",1096,0,"",shellscript,selection_command +2851,5349918,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-5/tokenizer.sh",1097,0,"",shellscript,selection_command +2852,5350163,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-5/tokenizer.sh",1122,0,"",shellscript,selection_command +2853,5350198,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-5/tokenizer.sh",1149,0,"",shellscript,selection_command +2854,5350233,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-5/tokenizer.sh",1150,0,"",shellscript,selection_command +2855,5350266,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-5/tokenizer.sh",1217,0,"",shellscript,selection_command +2856,5350300,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-5/tokenizer.sh",1249,0,"",shellscript,selection_command +2857,5350334,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-5/tokenizer.sh",1336,0,"",shellscript,selection_command +2858,5350366,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-5/tokenizer.sh",1347,0,"",shellscript,selection_command +2859,5350399,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-5/tokenizer.sh",1350,0,"",shellscript,selection_command +2860,5350435,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-5/tokenizer.sh",1351,0,"",shellscript,selection_command +2861,5350629,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-5/tokenizer.sh",1398,0,"",shellscript,selection_command +2862,5350815,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-5/tokenizer.sh",1351,0,"",shellscript,selection_command +2863,5351440,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-5/tokenizer.sh",1397,0,"",shellscript,selection_command +2864,5351636,"TERMINAL",0,0,"3577787:00999940666666666666677277",,terminal_output +2865,5352111,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-5/tokenizer.sh",1396,0,"5",shellscript,content +2866,5352111,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-5/tokenizer.sh",1395,1,"",shellscript,content +2867,5352530,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-5/tokenizer.sh",1396,0,"",shellscript,selection_command +2868,5353478,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-5/tokenizer.sh",1398,0,"",shellscript,selection_command +2869,5353642,"TERMINAL",0,0,"57999402414141412888888888888899499",,terminal_output +2870,5355655,"TERMINAL",0,0,"79411121243333420205050505050505050505020212162121",,terminal_output +2871,5355729,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-5/tokenizer.sh",2046,0,"5",shellscript,content +2872,5355729,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-5/tokenizer.sh",2045,1,"",shellscript,content +2873,5357662,"TERMINAL",0,0,"9413334655556222222222222233833",,terminal_output +2874,5359687,"TERMINAL",0,0,"21355568777784444444444444555055",,terminal_output +2875,5361675,"TERMINAL",0,0,"35777810999950666666666666677277",,terminal_output +2876,5363689,"TERMINAL",0,0,"57999502515151512888888888888899499",,terminal_output +2877,5365695,"TERMINAL",0,0,"79512131243333430306:007:008:002:005:005:005:005:005:003:0030313163131",,terminal_output +2878,5367708,"TERMINAL",0,0,"9513334655556222222222222233833",,terminal_output +2879,5369730,"TERMINAL",0,0,"31355568777784444444444444552:0055",,terminal_output +2880,5369805,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-6/spawn_tokenizers.sh",0,0,"#!/usr/bin/env bash\n\nset -euo pipefail\n\nREPO_ROOT=""/home/franz.srambical/jafar""\n\nDATA_ROOT=""${DATA_ROOT:-/fast/project/HFMI_SynergyUnit/jafar_ws/data/atari}""\n\nTOKENIZER_SCRIPT=""${TOKENIZER_SCRIPT:-$REPO_ROOT/slurm/dev/franz/berlin/atari/tokenizer_lr_3e-5/tokenizer.sh}""\n\nif [ ""$#"" -gt 0 ]; then\n ENV_LIST=(""$@"")\nelse\n if [ ! -d ""$DATA_ROOT"" ]; then\n echo ""DATA_ROOT does not exist: $DATA_ROOT"" >&2\n exit 1\n fi\n # list immediate subdirectories (candidate env names)\n mapfile -t ENV_LIST < <(find ""$DATA_ROOT"" -mindepth 1 -maxdepth 1 -type d -printf '%f\n' | sort)\nfi\n\necho ""Submitting tokenizer jobs for environments: ${ENV_LIST[*]}""\n\nfor env in ""${ENV_LIST[@]}""; do\n env_dir=""$DATA_ROOT/$env""\n if [ ! -d ""$env_dir/train"" ] || [ ! -d ""$env_dir/val"" ]; then\n echo ""Skipping $env (missing train/val under $env_dir)""\n continue\n fi\n\n job_name=""tokenizer_atari_${env}_dev_lr_3e-5""\n\n job_id=$(sbatch --parsable \\n --job-name=""$job_name"" \\n --chdir=""$REPO_ROOT"" \\n --export=ALL,ENV_NAME=""$env"" \\n ""$TOKENIZER_SCRIPT"")\n\n echo ""Submitted $job_name (ENV_NAME=$env) as job $job_id""\ndone\n\n\n",shellscript,tab +2881,5370719,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-6/spawn_tokenizers.sh",930,0,"",shellscript,selection_mouse +2882,5371726,"TERMINAL",0,0,"3577782099992:00666666666666677277",,terminal_output +2883,5371945,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-6/spawn_tokenizers.sh",928,0,"6",shellscript,content +2884,5371945,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-6/spawn_tokenizers.sh",927,1,"",shellscript,content +2885,5373226,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-6/spawn_tokenizers.sh",254,0,"6",shellscript,content +2886,5373226,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-6/spawn_tokenizers.sh",253,1,"",shellscript,content +2887,5373735,"TERMINAL",0,0,"579995:0022:012:012:012:012888888888888899499",,terminal_output +2888,5375743,"TERMINAL",0,0,"795:013141243333440401010101010101010101040414164141",,terminal_output +2889,5377810,"TERMINAL",0,0,"97:013334655556222222222222233833",,terminal_output +2890,5379834,"TERMINAL",0,0,"41355568777784444444444444551055",,terminal_output +2891,5381335,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-6/tokenizer.sh",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=1\n#SBATCH --gres=gpu:1\n#SBATCH --time=24:00:00\n#SBATCH --cpus-per-task=8\n#SBATCH --output=/fast/project/HFMI_SynergyUnit/jafar_ws/logs/franz/atari/tokenizer/%x_%j.log\n#SBATCH --error=/fast/project/HFMI_SynergyUnit/jafar_ws/logs/franz/atari/tokenizer/%x_%j.log\n#SBATCH --job-name=tokenizer_atari_dev_lr_3e-5\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n# Log the sbatch script\ncat $0\n\nsource .venv/bin/activate\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\n# Require ENV_NAME to be provided via sbatch --export=ENV_NAME=...\nif [ -z ""${ENV_NAME:-}"" ]; then\n echo ""ENV_NAME must be provided (e.g., sbatch --export=ALL,ENV_NAME=alien $0)"" >&2\n exit 2\nfi\n\ntags=""atari ${ENV_NAME} tokenizer dev lr_3e-5""\n\narray_records_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/data/atari/${ENV_NAME}""\nCHECKPOINT_DIR=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/atari/${ENV_NAME}/tokenizer/${job_name}_${slurm_job_id}""\nmkdir -p $CHECKPOINT_DIR\n\nenv | grep SLURM\n\ncurrent_branch=$(git rev-parse --abbrev-ref HEAD)\nif [ ""$current_branch"" != ""prepend-action-maskgit"" ]; then\n echo ""This script must be run from the prepend-action-maskgit branch. Current branch is $current_branch. Exiting.""\n exit 1\nfi\n\nsrun python jasmine/train_tokenizer.py \\n --patch_size=16 \\n --image_height=84 \\n --image_width=84 \\n --num_steps=50_000 \\n --max_lr=3e-5 \\n --warmup_steps=5000 \\n --wsd_decay_steps=5000 \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --name=""${job_name}_${slurm_job_id}"" \\n --tags $tags \\n --entity instant-uv \\n --project jafar \\n --data_dir=""${array_records_dir}/train"" \\n --val_data_dir=""${array_records_dir}/val"" &\n\nchild_pid=$!\n\nwait $child_pid\n",shellscript,tab +2892,5381771,"TERMINAL",0,0,"35777830999910666666666666677277",,terminal_output +2893,5383130,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-6/tokenizer.sh",371,0,"",shellscript,selection_mouse +2894,5383131,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-6/tokenizer.sh",370,0,"",shellscript,selection_command +2895,5383780,"TERMINAL",0,0,"57999102111111112888888888888899499",,terminal_output +2896,5384128,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-6/tokenizer.sh",371,0,"6",shellscript,content +2897,5384128,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-6/tokenizer.sh",370,1,"",shellscript,content +2898,5385649,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-6/tokenizer.sh",1396,0,"6",shellscript,content +2899,5385650,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-6/tokenizer.sh",1395,1,"",shellscript,content +2900,5385788,"TERMINAL",0,0,"79114151243333450502020202020202020202050515165151",,terminal_output +2901,5387437,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-6/tokenizer.sh",2046,0,"6",shellscript,content +2902,5387437,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-6/tokenizer.sh",2045,1,"",shellscript,content +2903,5387796,"TERMINAL",0,0,"9113334655556222222222222233833",,terminal_output +2904,5389805,"TERMINAL",0,0,"51355568777784444444444444552055",,terminal_output +2905,5391814,"TERMINAL",0,0,"35777840999920666666666666677277",,terminal_output +2906,5393823,"TERMINAL",0,0,"57999202212121212888888888888899499",,terminal_output +2907,5395834,"TERMINAL",0,0,"7921513:0124333342:005:00303030303030303030303:007:018:00:0162:012:01",,terminal_output +2908,5397847,"TERMINAL",0,0,"9213334655556222222222222233833",,terminal_output +2909,5399928,"TERMINAL",0,0,"1:01355568777784444444444444553055",,terminal_output +2910,5401872,"TERMINAL",0,0,"35777850999930666666666666677277",,terminal_output +2911,5403871,"TERMINAL",0,0,"57999302313131312888888888888899499",,terminal_output +2912,5405878,"TERMINAL",0,0,"79317:0111243333410104040404040404040404010111161111",,terminal_output +2913,5407889,"TERMINAL",0,0,"9313334655556222222222222233833",,terminal_output +2914,5409906,"TERMINAL",0,0,"11355568777784444444444444554055",,terminal_output +2915,5412012,"TERMINAL",0,0,"3577788:00999940666666666666677277",,terminal_output +2916,5413921,"TERMINAL",0,0,"57999402414141412888888888888899499",,terminal_output +2917,5415929,"TERMINAL",0,0,"79411121243333420205050505050505050505020212162121",,terminal_output +2918,5417942,"TERMINAL",0,0,"9413334655556222222222222233833",,terminal_output +2919,5419949,"TERMINAL",0,0,"21355568777784444444444444555055",,terminal_output +2920,5421960,"TERMINAL",0,0,"35777810999950666666666666677277",,terminal_output +2921,5423976,"TERMINAL",0,0,"57999502515151512888888888888899499",,terminal_output +2922,5425993,"TERMINAL",0,0,"79512131243333430307:008:009:003:006:006:006:006:006:004:0030313163131",,terminal_output +2923,5428063,"TERMINAL",0,0,"9513334655556222222222222233833",,terminal_output +2924,5430041,"TERMINAL",0,0,"31355568777784444444444444553:0055",,terminal_output +2925,5432004,"TERMINAL",0,0,"3577782099993:00666666666666677277",,terminal_output +2926,5434014,"TERMINAL",0,0,"579996:0023:013:013:013:012888888888888899499",,terminal_output +2927,5436025,"TERMINAL",0,0,"796:013141243333440401010101010101010101040414164141",,terminal_output +2928,5438032,"TERMINAL",0,0,"98:013334655556222222222222233833",,terminal_output +2929,5440041,"TERMINAL",0,0,"41355568777784444444444444551055",,terminal_output +2930,5442058,"TERMINAL",0,0,"35777830999910666666666666677277",,terminal_output +2931,5444072,"TERMINAL",0,0,"57999102111111112888888888888899499",,terminal_output +2932,5446067,"TERMINAL",0,0,"79114151243333450502020202020202020202050515165151",,terminal_output +2933,5448077,"TERMINAL",0,0,"9113334655556222222222222233833",,terminal_output +2934,5450084,"TERMINAL",0,0,"51355568777784444444444444552055",,terminal_output +2935,5452097,"TERMINAL",0,0,"35777840999920666666666666677277",,terminal_output +2936,5454104,"TERMINAL",0,0,"57999202212121212888888888888899499",,terminal_output +2937,5456127,"TERMINAL",0,0,"7921514:0124333343:006:00303030303030303030304:008:011:0163:013:01",,terminal_output +2938,5456241,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-4_full_precision/tokenizer.sh",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=1\n#SBATCH --gres=gpu:1\n#SBATCH --time=24:00:00\n#SBATCH --cpus-per-task=8\n#SBATCH --output=/fast/project/HFMI_SynergyUnit/jafar_ws/logs/franz/atari/tokenizer/%x_%j.log\n#SBATCH --error=/fast/project/HFMI_SynergyUnit/jafar_ws/logs/franz/atari/tokenizer/%x_%j.log\n#SBATCH --job-name=tokenizer_atari_dev_lr_3e-4\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n# Log the sbatch script\ncat $0\n\nsource .venv/bin/activate\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\n# Require ENV_NAME to be provided via sbatch --export=ENV_NAME=...\nif [ -z ""${ENV_NAME:-}"" ]; then\n echo ""ENV_NAME must be provided (e.g., sbatch --export=ALL,ENV_NAME=alien $0)"" >&2\n exit 2\nfi\n\ntags=""atari ${ENV_NAME} tokenizer dev lr_3e-4""\n\narray_records_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/data/atari/${ENV_NAME}""\nCHECKPOINT_DIR=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/atari/${ENV_NAME}/tokenizer/${job_name}_${slurm_job_id}""\nmkdir -p $CHECKPOINT_DIR\n\nenv | grep SLURM\n\ncurrent_branch=$(git rev-parse --abbrev-ref HEAD)\nif [ ""$current_branch"" != ""prepend-action-maskgit"" ]; then\n echo ""This script must be run from the prepend-action-maskgit branch. Current branch is $current_branch. Exiting.""\n exit 1\nfi\n\nsrun python jasmine/train_tokenizer.py \\n --patch_size=16 \\n --image_height=84 \\n --image_width=84 \\n --num_steps=50_000 \\n --max_lr=3e-4 \\n --warmup_steps=5000 \\n --wsd_decay_steps=5000 \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --name=""${job_name}_${slurm_job_id}"" \\n --tags $tags \\n --entity instant-uv \\n --project jafar \\n --data_dir=""${array_records_dir}/train"" \\n --val_data_dir=""${array_records_dir}/val"" &\n\nchild_pid=$!\n\nwait $child_pid\n",shellscript,tab +2939,5458124,"TERMINAL",0,0,"9213334655556222222222222233833",,terminal_output +2940,5458225,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-4_full_precision/tokenizer.sh",2028,0,"",shellscript,selection_mouse +2941,5458226,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-4_full_precision/tokenizer.sh",2027,0,"",shellscript,selection_command +2942,5460135,"TERMINAL",0,0,"2:01355568777784444444444444553055",,terminal_output +2943,5462170,"TERMINAL",0,0,"35777850999930666666666666677277",,terminal_output +2944,5464178,"TERMINAL",0,0,"57999302313131312888888888888899499",,terminal_output +2945,5465540,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-4_full_precision/tokenizer.sh",2002,0,"",shellscript,selection_command +2946,5465758,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-4_full_precision/tokenizer.sh",1979,0,"",shellscript,selection_command +2947,5465902,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-4_full_precision/tokenizer.sh",1955,0,"",shellscript,selection_command +2948,5466161,"TERMINAL",0,0,"79318:0111243333410104040404040404040404010111161111",,terminal_output +2949,5466451,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-4_full_precision/tokenizer.sh",1917,0,"",shellscript,selection_command +2950,5466621,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-4_full_precision/tokenizer.sh",1928,0,"",shellscript,selection_command +2951,5466918,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-4_full_precision/tokenizer.sh",1929,0,"",shellscript,selection_command +2952,5467573,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-4_full_precision/tokenizer.sh",1929,0,"_",shellscript,content +2953,5467574,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-4_full_precision/tokenizer.sh",1930,0,"",shellscript,selection_keyboard +2954,5467881,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-4_full_precision/tokenizer.sh",1930,0,"f",shellscript,content +2955,5467881,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-4_full_precision/tokenizer.sh",1931,0,"",shellscript,selection_keyboard +2956,5467892,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-4_full_precision/tokenizer.sh",1931,0,"u",shellscript,content +2957,5467892,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-4_full_precision/tokenizer.sh",1932,0,"",shellscript,selection_keyboard +2958,5467991,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-4_full_precision/tokenizer.sh",1932,0,"l",shellscript,content +2959,5467991,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-4_full_precision/tokenizer.sh",1933,0,"",shellscript,selection_keyboard +2960,5468147,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-4_full_precision/tokenizer.sh",1933,0,"l",shellscript,content +2961,5468147,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-4_full_precision/tokenizer.sh",1934,0,"",shellscript,selection_keyboard +2962,5468163,"TERMINAL",0,0,"9313334655556222222222222233833",,terminal_output +2963,5468412,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-4_full_precision/tokenizer.sh",1934,0,"_",shellscript,content +2964,5468412,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-4_full_precision/tokenizer.sh",1935,0,"",shellscript,selection_keyboard +2965,5468519,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-4_full_precision/tokenizer.sh",1935,0,"p",shellscript,content +2966,5468520,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-4_full_precision/tokenizer.sh",1936,0,"",shellscript,selection_keyboard +2967,5468623,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-4_full_precision/tokenizer.sh",1936,0,"r",shellscript,content +2968,5468623,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-4_full_precision/tokenizer.sh",1937,0,"",shellscript,selection_keyboard +2969,5468684,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-4_full_precision/tokenizer.sh",1937,0,"e",shellscript,content +2970,5468684,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-4_full_precision/tokenizer.sh",1938,0,"",shellscript,selection_keyboard +2971,5468890,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-4_full_precision/tokenizer.sh",1938,0,"c",shellscript,content +2972,5468890,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-4_full_precision/tokenizer.sh",1939,0,"",shellscript,selection_keyboard +2973,5468990,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-4_full_precision/tokenizer.sh",1939,0,"i",shellscript,content +2974,5468990,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-4_full_precision/tokenizer.sh",1940,0,"",shellscript,selection_keyboard +2975,5469227,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-4_full_precision/tokenizer.sh",1940,0,"s",shellscript,content +2976,5469227,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-4_full_precision/tokenizer.sh",1941,0,"",shellscript,selection_keyboard +2977,5469240,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-4_full_precision/tokenizer.sh",1941,0,"i",shellscript,content +2978,5469240,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-4_full_precision/tokenizer.sh",1942,0,"",shellscript,selection_keyboard +2979,5469250,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-4_full_precision/tokenizer.sh",1942,0,"o",shellscript,content +2980,5469250,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-4_full_precision/tokenizer.sh",1943,0,"",shellscript,selection_keyboard +2981,5469280,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-4_full_precision/tokenizer.sh",1943,0,"n",shellscript,content +2982,5469281,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-4_full_precision/tokenizer.sh",1944,0,"",shellscript,selection_keyboard +2983,5469503,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-4_full_precision/tokenizer.sh",1943,0,"",shellscript,selection_command +2984,5470187,"TERMINAL",0,0,"11355568777784444444444444554055",,terminal_output +2985,5472182,"TERMINAL",0,0,"3577789:00999940666666666666677277",,terminal_output +2986,5474194,"TERMINAL",0,0,"57999402414141412888888888888899499",,terminal_output +2987,5476213,"TERMINAL",0,0,"79411121243333420205050505050505050505020212162121",,terminal_output +2988,5478216,"TERMINAL",0,0,"9413334655556222222222222233833",,terminal_output +2989,5479350,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-5_full_precision/tokenizer.sh",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=1\n#SBATCH --gres=gpu:1\n#SBATCH --time=24:00:00\n#SBATCH --cpus-per-task=8\n#SBATCH --output=/fast/project/HFMI_SynergyUnit/jafar_ws/logs/franz/atari/tokenizer/%x_%j.log\n#SBATCH --error=/fast/project/HFMI_SynergyUnit/jafar_ws/logs/franz/atari/tokenizer/%x_%j.log\n#SBATCH --job-name=tokenizer_atari_dev_lr_3e-5\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n# Log the sbatch script\ncat $0\n\nsource .venv/bin/activate\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\n# Require ENV_NAME to be provided via sbatch --export=ENV_NAME=...\nif [ -z ""${ENV_NAME:-}"" ]; then\n echo ""ENV_NAME must be provided (e.g., sbatch --export=ALL,ENV_NAME=alien $0)"" >&2\n exit 2\nfi\n\ntags=""atari ${ENV_NAME} tokenizer dev lr_3e-5""\n\narray_records_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/data/atari/${ENV_NAME}""\nCHECKPOINT_DIR=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/atari/${ENV_NAME}/tokenizer/${job_name}_${slurm_job_id}""\nmkdir -p $CHECKPOINT_DIR\n\nenv | grep SLURM\n\ncurrent_branch=$(git rev-parse --abbrev-ref HEAD)\nif [ ""$current_branch"" != ""prepend-action-maskgit"" ]; then\n echo ""This script must be run from the prepend-action-maskgit branch. Current branch is $current_branch. Exiting.""\n exit 1\nfi\n\nsrun python jasmine/train_tokenizer.py \\n --patch_size=16 \\n --image_height=84 \\n --image_width=84 \\n --num_steps=50_000 \\n --max_lr=3e-5 \\n --warmup_steps=5000 \\n --wsd_decay_steps=5000 \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --name=""${job_name}_${slurm_job_id}"" \\n --tags $tags \\n --entity instant-uv \\n --project jafar \\n --data_dir=""${array_records_dir}/train"" \\n --val_data_dir=""${array_records_dir}/val"" &\n\nchild_pid=$!\n\nwait $child_pid\n",shellscript,tab +2990,5480222,"TERMINAL",0,0,"21355568777784444444444444555055",,terminal_output +2991,5481738,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-5_full_precision/tokenizer.sh",1926,0,"",shellscript,selection_mouse +2992,5482234,"TERMINAL",0,0,"35777810999950666666666666677277",,terminal_output +2993,5482883,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-5_full_precision/tokenizer.sh",1928,0,"",shellscript,selection_command +2994,5483785,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-5_full_precision/tokenizer.sh",1929,0,"",shellscript,selection_command +2995,5484240,"TERMINAL",0,0,"57999502515151512888888888888899499",,terminal_output +2996,5484572,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-5_full_precision/tokenizer.sh",1929,0,"_",shellscript,content +2997,5484572,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-5_full_precision/tokenizer.sh",1930,0,"",shellscript,selection_keyboard +2998,5485612,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-5_full_precision/tokenizer.sh",1930,0,"full_precision",shellscript,content +2999,5486254,"TERMINAL",0,0,"79512131243333430308:009:0010:004:007:007:007:007:007:005:0030313163131",,terminal_output +3000,5486340,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-5_full_precision/tokenizer.sh",1943,0,"",shellscript,selection_command +3001,5488264,"TERMINAL",0,0,"9513334655556222222222222233833",,terminal_output +3002,5490270,"TERMINAL",0,0,"31355568777784444444444444554:0055",,terminal_output +3003,5492262,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-6_full_precision/tokenizer.sh",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=1\n#SBATCH --gres=gpu:1\n#SBATCH --time=24:00:00\n#SBATCH --cpus-per-task=8\n#SBATCH --output=/fast/project/HFMI_SynergyUnit/jafar_ws/logs/franz/atari/tokenizer/%x_%j.log\n#SBATCH --error=/fast/project/HFMI_SynergyUnit/jafar_ws/logs/franz/atari/tokenizer/%x_%j.log\n#SBATCH --job-name=tokenizer_atari_dev_lr_3e-6\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n# Log the sbatch script\ncat $0\n\nsource .venv/bin/activate\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\n# Require ENV_NAME to be provided via sbatch --export=ENV_NAME=...\nif [ -z ""${ENV_NAME:-}"" ]; then\n echo ""ENV_NAME must be provided (e.g., sbatch --export=ALL,ENV_NAME=alien $0)"" >&2\n exit 2\nfi\n\ntags=""atari ${ENV_NAME} tokenizer dev lr_3e-6""\n\narray_records_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/data/atari/${ENV_NAME}""\nCHECKPOINT_DIR=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/atari/${ENV_NAME}/tokenizer/${job_name}_${slurm_job_id}""\nmkdir -p $CHECKPOINT_DIR\n\nenv | grep SLURM\n\ncurrent_branch=$(git rev-parse --abbrev-ref HEAD)\nif [ ""$current_branch"" != ""prepend-action-maskgit"" ]; then\n echo ""This script must be run from the prepend-action-maskgit branch. Current branch is $current_branch. Exiting.""\n exit 1\nfi\n\nsrun python jasmine/train_tokenizer.py \\n --patch_size=16 \\n --image_height=84 \\n --image_width=84 \\n --num_steps=50_000 \\n --max_lr=3e-6 \\n --warmup_steps=5000 \\n --wsd_decay_steps=5000 \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --name=""${job_name}_${slurm_job_id}"" \\n --tags $tags \\n --entity instant-uv \\n --project jafar \\n --data_dir=""${array_records_dir}/train"" \\n --val_data_dir=""${array_records_dir}/val"" &\n\nchild_pid=$!\n\nwait $child_pid\n",shellscript,tab +3004,5492332,"TERMINAL",0,0,"3577782099994:00666666666666677277",,terminal_output +3005,5494290,"TERMINAL",0,0,"579997:0024:014:014:014:012888888888888899499",,terminal_output +3006,5494642,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-6_full_precision/tokenizer.sh",1929,0,"_full_precision",shellscript,content +3007,5494643,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-6_full_precision/tokenizer.sh",1944,0,"",shellscript,selection_command +3008,5496388,"TERMINAL",0,0,"797:013141243333440401010101010101010101040414164141",,terminal_output +3009,5498315,"TERMINAL",0,0,"99:013334655556222222222222233833",,terminal_output +3010,5500129,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-4_full_precision/spawn_tokenizers.sh",0,0,"#!/usr/bin/env bash\n\nset -euo pipefail\n\nREPO_ROOT=""/home/franz.srambical/jafar""\n\nDATA_ROOT=""${DATA_ROOT:-/fast/project/HFMI_SynergyUnit/jafar_ws/data/atari}""\n\nTOKENIZER_SCRIPT=""${TOKENIZER_SCRIPT:-$REPO_ROOT/slurm/dev/franz/berlin/atari/tokenizer_lr_3e-4/tokenizer.sh}""\n\nif [ ""$#"" -gt 0 ]; then\n ENV_LIST=(""$@"")\nelse\n if [ ! -d ""$DATA_ROOT"" ]; then\n echo ""DATA_ROOT does not exist: $DATA_ROOT"" >&2\n exit 1\n fi\n # list immediate subdirectories (candidate env names)\n mapfile -t ENV_LIST < <(find ""$DATA_ROOT"" -mindepth 1 -maxdepth 1 -type d -printf '%f\n' | sort)\nfi\n\necho ""Submitting tokenizer jobs for environments: ${ENV_LIST[*]}""\n\nfor env in ""${ENV_LIST[@]}""; do\n env_dir=""$DATA_ROOT/$env""\n if [ ! -d ""$env_dir/train"" ] || [ ! -d ""$env_dir/val"" ]; then\n echo ""Skipping $env (missing train/val under $env_dir)""\n continue\n fi\n\n job_name=""tokenizer_atari_${env}_dev_lr_3e-4""\n\n job_id=$(sbatch --parsable \\n --job-name=""$job_name"" \\n --chdir=""$REPO_ROOT"" \\n --export=ALL,ENV_NAME=""$env"" \\n ""$TOKENIZER_SCRIPT"")\n\n echo ""Submitted $job_name (ENV_NAME=$env) as job $job_id""\ndone\n\n\n",shellscript,tab +3011,5500319,"TERMINAL",0,0,"41355568777784444444444444551055",,terminal_output +3012,5502337,"TERMINAL",0,0,"35777830999910666666666666677277",,terminal_output +3013,5504352,"TERMINAL",0,0,"57999102111111112888888888888899499",,terminal_output +3014,5506426,"TERMINAL",0,0,"79114151243333450502020202020202020202050515165151",,terminal_output +3015,5507602,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-4_full_precision/spawn_tokenizers.sh",254,0,"_full_precision",shellscript,content +3016,5507603,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-4_full_precision/spawn_tokenizers.sh",269,0,"",shellscript,selection_command +3017,5508362,"TERMINAL",0,0,"9113334655556222222222222233833",,terminal_output +3018,5508929,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-4_full_precision/spawn_tokenizers.sh",943,0,"_full_precision",shellscript,content +3019,5508929,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-4_full_precision/spawn_tokenizers.sh",958,0,"",shellscript,selection_command +3020,5510412,"TERMINAL",0,0,"51355568777784444444444444552055",,terminal_output +3021,5512373,"TERMINAL",0,0,"35777840999920666666666666677277",,terminal_output +3022,5514381,"TERMINAL",0,0,"57999202212121212888888888888899499",,terminal_output +3023,5516473,"TERMINAL",0,0,"7921515:0124333344:007:00303030303030303030305:009:012:0164:014:01",,terminal_output +3024,5518401,"TERMINAL",0,0,"9213334655556222222222222233833",,terminal_output +3025,5520442,"TERMINAL",0,0,"3:01355568777784444444444444553055",,terminal_output +3026,5522418,"TERMINAL",0,0,"35777850999930666666666666677277",,terminal_output +3027,5523632,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-4_full_precision/tokenizer.sh",0,0,"",shellscript,tab +3028,5524424,"TERMINAL",0,0,"57999302313131312888888888888899499",,terminal_output +3029,5526436,"TERMINAL",0,0,"79319:0111243333410104040404040404040404010111161111",,terminal_output +3030,5526557,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-4_full_precision/tokenizer.sh",371,0,"_full_precision",shellscript,content +3031,5526557,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-4_full_precision/tokenizer.sh",386,0,"",shellscript,selection_command +3032,5527608,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-4_full_precision/tokenizer.sh",1411,0," full_precision",shellscript,content +3033,5527608,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-4_full_precision/tokenizer.sh",1426,0,"",shellscript,selection_command +3034,5528448,"TERMINAL",0,0,"9313334655556222222222222233833",,terminal_output +3035,5530455,"TERMINAL",0,0,"11355568777784444444444444554055",,terminal_output +3036,5532466,"TERMINAL",0,0,"35777830:00999940666666666666677277",,terminal_output +3037,5534471,"TERMINAL",0,0,"57999402414141412888888888888899499",,terminal_output +3038,5534583,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-5_full_precision/spawn_tokenizers.sh",0,0,"#!/usr/bin/env bash\n\nset -euo pipefail\n\nREPO_ROOT=""/home/franz.srambical/jafar""\n\nDATA_ROOT=""${DATA_ROOT:-/fast/project/HFMI_SynergyUnit/jafar_ws/data/atari}""\n\nTOKENIZER_SCRIPT=""${TOKENIZER_SCRIPT:-$REPO_ROOT/slurm/dev/franz/berlin/atari/tokenizer_lr_3e-5/tokenizer.sh}""\n\nif [ ""$#"" -gt 0 ]; then\n ENV_LIST=(""$@"")\nelse\n if [ ! -d ""$DATA_ROOT"" ]; then\n echo ""DATA_ROOT does not exist: $DATA_ROOT"" >&2\n exit 1\n fi\n # list immediate subdirectories (candidate env names)\n mapfile -t ENV_LIST < <(find ""$DATA_ROOT"" -mindepth 1 -maxdepth 1 -type d -printf '%f\n' | sort)\nfi\n\necho ""Submitting tokenizer jobs for environments: ${ENV_LIST[*]}""\n\nfor env in ""${ENV_LIST[@]}""; do\n env_dir=""$DATA_ROOT/$env""\n if [ ! -d ""$env_dir/train"" ] || [ ! -d ""$env_dir/val"" ]; then\n echo ""Skipping $env (missing train/val under $env_dir)""\n continue\n fi\n\n job_name=""tokenizer_atari_${env}_dev_lr_3e-5""\n\n job_id=$(sbatch --parsable \\n --job-name=""$job_name"" \\n --chdir=""$REPO_ROOT"" \\n --export=ALL,ENV_NAME=""$env"" \\n ""$TOKENIZER_SCRIPT"")\n\n echo ""Submitted $job_name (ENV_NAME=$env) as job $job_id""\ndone\n\n\n",shellscript,tab +3039,5536395,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-5_full_precision/spawn_tokenizers.sh",254,0,"_full_precision",shellscript,content +3040,5536396,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-5_full_precision/spawn_tokenizers.sh",269,0,"",shellscript,selection_command +3041,5536479,"TERMINAL",0,0,"79411121243333420205050505050505050505020212162121",,terminal_output +3042,5537434,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-5_full_precision/spawn_tokenizers.sh",943,0,"_full_precision",shellscript,content +3043,5537434,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-5_full_precision/spawn_tokenizers.sh",958,0,"",shellscript,selection_command +3044,5538491,"TERMINAL",0,0,"9413334655556222222222222233833",,terminal_output +3045,5540498,"TERMINAL",0,0,"21355568777784444444444444555055",,terminal_output +3046,5542507,"TERMINAL",0,0,"35777810999950666666666666677277",,terminal_output +3047,5543064,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-5/tokenizer.sh",0,0,"",shellscript,tab +3048,5544517,"TERMINAL",0,0,"57999502515151512888888888888899499",,terminal_output +3049,5545495,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-5/tokenizer.sh",371,0,"_full_precision",shellscript,content +3050,5545495,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-5/tokenizer.sh",386,0,"",shellscript,selection_command +3051,5546529,"TERMINAL",0,0,"79512131243333430309:0010:001:005:008:008:008:008:008:006:0030313163131",,terminal_output +3052,5548461,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-5_full_precision/tokenizer.sh",0,0,"",shellscript,tab +3053,5548534,"TERMINAL",0,0,"9513334655556222222222222233833",,terminal_output +3054,5551805,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-5_full_precision/tokenizer.sh",1397,0,"",shellscript,selection_command +3055,5551844,"TERMINAL",0,0,"31355568777784444444444444555:0055",,terminal_output +3056,5552182,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-5_full_precision/tokenizer.sh",1396,0,"",shellscript,selection_command +3057,5552553,"TERMINAL",0,0,"3577782099995:00666666666666677277",,terminal_output +3058,5552953,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-5_full_precision/tokenizer.sh",1396,0," full_precision",shellscript,content +3059,5554562,"TERMINAL",0,0,"579998:0025:015:015:015:012888888888888899499",,terminal_output +3060,5555675,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-5_full_precision/tokenizer.sh",371,0,"_full_precision",shellscript,content +3061,5555675,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-5_full_precision/tokenizer.sh",386,0,"",shellscript,selection_command +3062,5556572,"TERMINAL",0,0,"798:013141243333440401010101010101010101040414164141",,terminal_output +3063,5558528,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-5_full_precision/tokenizer.sh",385,0,"",shellscript,selection_command +3064,5558583,"TERMINAL",0,0,"940:013334655556222222222222233833",,terminal_output +3065,5560592,"TERMINAL",0,0,"41466679888895555555555555661166",,terminal_output +3066,5561034,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-5_full_precision/tokenizer.sh",1922,0,"",shellscript,selection_keyboard +3067,5561168,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-5_full_precision/tokenizer.sh",2491,0,"",shellscript,selection_keyboard +3068,5562598,"TERMINAL",0,0,"468889311010101011777777777777788388",,terminal_output +3069,5564608,"TERMINAL",0,0,"68104050113222239999999999999505055050",,terminal_output +3070,5565052,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-6_full_precision/spawn_tokenizers.sh",0,0,"#!/usr/bin/env bash\n\nset -euo pipefail\n\nREPO_ROOT=""/home/franz.srambical/jafar""\n\nDATA_ROOT=""${DATA_ROOT:-/fast/project/HFMI_SynergyUnit/jafar_ws/data/atari}""\n\nTOKENIZER_SCRIPT=""${TOKENIZER_SCRIPT:-$REPO_ROOT/slurm/dev/franz/berlin/atari/tokenizer_lr_3e-6/tokenizer.sh}""\n\nif [ ""$#"" -gt 0 ]; then\n ENV_LIST=(""$@"")\nelse\n if [ ! -d ""$DATA_ROOT"" ]; then\n echo ""DATA_ROOT does not exist: $DATA_ROOT"" >&2\n exit 1\n fi\n # list immediate subdirectories (candidate env names)\n mapfile -t ENV_LIST < <(find ""$DATA_ROOT"" -mindepth 1 -maxdepth 1 -type d -printf '%f\n' | sort)\nfi\n\necho ""Submitting tokenizer jobs for environments: ${ENV_LIST[*]}""\n\nfor env in ""${ENV_LIST[@]}""; do\n env_dir=""$DATA_ROOT/$env""\n if [ ! -d ""$env_dir/train"" ] || [ ! -d ""$env_dir/val"" ]; then\n echo ""Skipping $env (missing train/val under $env_dir)""\n continue\n fi\n\n job_name=""tokenizer_atari_${env}_dev_lr_3e-6""\n\n job_id=$(sbatch --parsable \\n --job-name=""$job_name"" \\n --chdir=""$REPO_ROOT"" \\n --export=ALL,ENV_NAME=""$env"" \\n ""$TOKENIZER_SCRIPT"")\n\n echo ""Submitted $job_name (ENV_NAME=$env) as job $job_id""\ndone\n\n\n",shellscript,tab +3071,5566633,"TERMINAL",0,0,"81022235444455151212121212121212121215122722",,terminal_output +3072,5567512,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-6_full_precision/spawn_tokenizers.sh",254,0,"_full_precision",shellscript,content +3073,5567513,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-6_full_precision/spawn_tokenizers.sh",269,0,"",shellscript,selection_command +3074,5568628,"TERMINAL",0,0,"5024445766667333333333333344944",,terminal_output +3075,5568716,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-6_full_precision/spawn_tokenizers.sh",943,0,"_full_precision",shellscript,content +3076,5568716,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-6_full_precision/spawn_tokenizers.sh",958,0,"",shellscript,selection_command +3077,5570391,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-6/tokenizer.sh",0,0,"",shellscript,tab +3078,5570633,"TERMINAL",0,0,"2466679888895555555555555662166",,terminal_output +3079,5571167,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-6/tokenizer.sh",371,0,"_full_precision",shellscript,content +3080,5571168,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-6/tokenizer.sh",386,0,"",shellscript,selection_command +3081,5572646,"TERMINAL",0,0,"468889412020202021777777777777788388",,terminal_output +3082,5573666,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-6_full_precision/tokenizer.sh",0,0,"",shellscript,tab +3083,5574652,"TERMINAL",0,0,"6820506:002132222399999999999998:00:003:0055:005:00",,terminal_output +3084,5576664,"TERMINAL",0,0,"82022235444455:018:01313131313131313131316:0122722",,terminal_output +3085,5578672,"TERMINAL",0,0,"4:0024445766667333333333333344944",,terminal_output +3086,5579581,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-6_full_precision/tokenizer.sh",1397,0,"",shellscript,selection_command +3087,5579852,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-6_full_precision/tokenizer.sh",1396,0,"",shellscript,selection_command +3088,5580536,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-6_full_precision/tokenizer.sh",1396,0," full_precision",shellscript,content +3089,5580679,"TERMINAL",0,0,"2466679888895555555555555663166",,terminal_output +3090,5581751,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-6_full_precision/tokenizer.sh",371,0,"_full_precision",shellscript,content +3091,5581751,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-6_full_precision/tokenizer.sh",386,0,"",shellscript,selection_command +3092,5582691,"TERMINAL",0,0,"468889513030303031777777777777788388",,terminal_output +3093,5584700,"TERMINAL",0,0,"68304:00:0010313222239999999999999101051010",,terminal_output +3094,5585466,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-6_full_precision/tokenizer.sh",385,0,"",shellscript,selection_command +3095,5586817,"TERMINAL",0,0,"83022235444451111414141414141414141411122722",,terminal_output +3096,5587098,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-6_full_precision/spawn_tokenizers.sh",0,0,"",shellscript,tab +3097,5587570,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-6_full_precision/spawn_tokenizers.sh",960,0,"",shellscript,selection_mouse +3098,5588717,"TERMINAL",0,0,"1024445766667333333333333344944",,terminal_output +3099,5590730,"TERMINAL",0,0,"2466679888895555555555555664166",,terminal_output +3100,5592731,"TERMINAL",0,0,"4688891:014040404041777777777777788388",,terminal_output +3101,5594793,"TERMINAL",0,0,"68401020413222239999999999999202052020",,terminal_output +3102,5595648,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-5_full_precision/tokenizer.sh",0,0,"",shellscript,tab +3103,5596758,"TERMINAL",0,0,"84022235444452121515151515151515151512122722",,terminal_output +3104,5598764,"TERMINAL",0,0,"2024445766667333333333333344944",,terminal_output +3105,5599739,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-5_full_precision/spawn_tokenizers.sh",0,0,"",shellscript,tab +3106,5600769,"TERMINAL",0,0,"2466679888895555555555555665166",,terminal_output +3107,5602782,"TERMINAL",0,0,"468889115050505051777777777777788388",,terminal_output +3108,5604149,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-5/tokenizer.sh",0,0,"",shellscript,tab +3109,5604793,"TERMINAL",0,0,"68502030513222239999999999999303053030",,terminal_output +3110,5605311,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-5/tokenizer.sh",385,0,"",shellscript,selection_command +3111,5606695,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-5/spawn_tokenizers.sh",0,0,"",shellscript,tab +3112,5606799,"TERMINAL",0,0,"8502223544445313110:011:012:016:019:019:019:019:019:017:013122722",,terminal_output +3113,5608098,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-5/spawn_tokenizers.sh",879,0,"",shellscript,selection_command +3114,5608347,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-5/spawn_tokenizers.sh",877,0,"",shellscript,selection_command +3115,5608371,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-5/spawn_tokenizers.sh",876,0,"",shellscript,selection_command +3116,5608402,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-5/spawn_tokenizers.sh",875,0,"",shellscript,selection_command +3117,5608432,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-5/spawn_tokenizers.sh",874,0,"",shellscript,selection_command +3118,5608468,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-5/spawn_tokenizers.sh",873,0,"",shellscript,selection_command +3119,5608499,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-5/spawn_tokenizers.sh",872,0,"",shellscript,selection_command +3120,5608533,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-5/spawn_tokenizers.sh",870,0,"",shellscript,selection_command +3121,5608578,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-5/spawn_tokenizers.sh",869,0,"",shellscript,selection_command +3122,5608613,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-5/spawn_tokenizers.sh",868,0,"",shellscript,selection_command +3123,5608632,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-5/spawn_tokenizers.sh",867,0,"",shellscript,selection_command +3124,5608665,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-5/spawn_tokenizers.sh",866,0,"",shellscript,selection_command +3125,5608699,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-5/spawn_tokenizers.sh",865,0,"",shellscript,selection_command +3126,5608732,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-5/spawn_tokenizers.sh",864,0,"",shellscript,selection_command +3127,5608766,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-5/spawn_tokenizers.sh",863,0,"",shellscript,selection_command +3128,5608799,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-5/spawn_tokenizers.sh",862,0,"",shellscript,selection_command +3129,5608806,"TERMINAL",0,0,"3024445766667333333333333344944",,terminal_output +3130,5610822,"TERMINAL",0,0,"2466679888895555555555555666:0166",,terminal_output +3131,5612831,"TERMINAL",0,0,"468889216:006:006:006:006:01777777777777788388",,terminal_output +3132,5614844,"TERMINAL",0,0,"689:0030409:013222239999999999999404054040",,terminal_output +3133,5616157,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-4_full_precision/tokenizer.sh",0,0,"",shellscript,tab +3134,5616850,"TERMINAL",0,0,"81:0022235444454141111111111111111111114122722",,terminal_output +3135,5618864,"TERMINAL",0,0,"4024445766667333333333333344944",,terminal_output +3136,5620867,"TERMINAL",0,0,"2466679888895555555555555661166",,terminal_output +3137,5622880,"TERMINAL",0,0,"468889311010101011777777777777788388",,terminal_output +3138,5624897,"TERMINAL",0,0,"68104050113222239999999999999505055050",,terminal_output +3139,5626896,"TERMINAL",0,0,"81022235444455151212121212121212121215122722",,terminal_output +3140,5628909,"TERMINAL",0,0,"5024445766667333333333333344944",,terminal_output +3141,5630920,"TERMINAL",0,0,"2466679888895555555555555662166",,terminal_output +3142,5632931,"TERMINAL",0,0,"468889412020202021777777777777788388",,terminal_output +3143,5634951,"TERMINAL",0,0,"6820507:002132222399999999999991:004:0056:006:00",,terminal_output +3144,5636950,"TERMINAL",0,0,"82022235444456:019:01313131313131313131317:0122722",,terminal_output +3145,5638959,"TERMINAL",0,0,"5:0024445766667333333333333344944",,terminal_output +3146,5640978,"TERMINAL",0,0,"2466679888895555555555555663166",,terminal_output +3147,5642027,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-4_full_precision/spawn_tokenizers.sh",0,0,"",shellscript,tab +3148,5642974,"TERMINAL",0,0,"468889513030303031777777777777788388",,terminal_output +3149,5645072,"TERMINAL",0,0,"68301:0010313222239999999999999101051010",,terminal_output +3150,5646989,"TERMINAL",0,0,"83022235444451111414141414141414141411122722",,terminal_output +3151,5649000,"TERMINAL",0,0,"1024445766667333333333333344944",,terminal_output +3152,5649794,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-6/tokenizer.sh",0,0,"",shellscript,tab +3153,5650952,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-6/tokenizer.sh",456,0,"",shellscript,selection_mouse +3154,5651012,"TERMINAL",0,0,"2466679888895555555555555664166",,terminal_output +3155,5653020,"TERMINAL",0,0,"4688892:014040404041777777777777788388",,terminal_output +3156,5655029,"TERMINAL",0,0,"68401020413222239999999999999202052020",,terminal_output +3157,5657056,"TERMINAL",0,0,"84022235444452121515151515151515151512122722",,terminal_output +3158,5659067,"TERMINAL",0,0,"2024445766667333333333333344944",,terminal_output +3159,5661054,"TERMINAL",0,0,"2466679888895555555555555665166",,terminal_output +3160,5663064,"TERMINAL",0,0,"468889115050505051777777777777788388",,terminal_output +3161,5665071,"TERMINAL",0,0,"68502030513222239999999999999303053030",,terminal_output +3162,5667079,"TERMINAL",0,0,"850222354444531311:012:013:017:0120:0120:0120:0120:0120:018:013122722",,terminal_output +3163,5669091,"TERMINAL",0,0,"3024445766667333333333333344944",,terminal_output +3164,5671103,"TERMINAL",0,0,"2466679888895555555555555667:0166",,terminal_output +3165,5673109,"TERMINAL",0,0,"468889217:007:007:007:007:01777777777777788388",,terminal_output +3166,5675120,"TERMINAL",0,0,"684:00:00304010:013222239999999999999404054040",,terminal_output +3167,5677230,"TERMINAL",0,0,"82:0022235444454141111111111111111111114122722",,terminal_output +3168,5679133,"TERMINAL",0,0,"4024445766667333333333333344944",,terminal_output +3169,5681170,"TERMINAL",0,0,"2466679888895555555555555661166",,terminal_output +3170,5683177,"TERMINAL",0,0,"468889311010101011777777777777788388",,terminal_output +3171,5685192,"TERMINAL",0,0,"68104050113222239999999999999505055050",,terminal_output +3172,5687190,"TERMINAL",0,0,"81022235444455151212121212121212121215122722",,terminal_output +3173,5689204,"TERMINAL",0,0,"5024445766667333333333333344944",,terminal_output +3174,5691254,"TERMINAL",0,0,"2466679888895555555555555662166",,terminal_output +3175,5693218,"TERMINAL",0,0,"468889412020202021777777777777788388",,terminal_output +3176,5695255,"TERMINAL",0,0,"6820508:002132222399999999999992:005:0057:007:00",,terminal_output +3177,5697233,"TERMINAL",0,0,"82022235444457:011:00:01313131313131313131318:0122722",,terminal_output +3178,5699342,"TERMINAL",0,0,"6:0024445766667333333333333344944",,terminal_output +3179,5701254,"TERMINAL",0,0,"2466679888895555555555555663166",,terminal_output +3180,5703261,"TERMINAL",0,0,"468889513030303031777777777777788388",,terminal_output +3181,5705271,"TERMINAL",0,0,"68302:0010313222239999999999999101051010",,terminal_output +3182,5707277,"TERMINAL",0,0,"83022235444451111414141414141414141411122722",,terminal_output +3183,5709295,"TERMINAL",0,0,"1024445766667333333333333344944",,terminal_output +3184,5711326,"TERMINAL",0,0,"2466679888895555555555555664166",,terminal_output +3185,5713316,"TERMINAL",0,0,"4688893:014040404041777777777777788388",,terminal_output +3186,5715333,"TERMINAL",0,0,"68401020413222239999999999999202052020",,terminal_output +3187,5717336,"TERMINAL",0,0,"84022235444452121515151515151515151512122722",,terminal_output +3188,5719337,"TERMINAL",0,0,"2024445766667333333333333344944",,terminal_output +3189,5721348,"TERMINAL",0,0,"2466679888895555555555555665166",,terminal_output +3190,5723358,"TERMINAL",0,0,"468889115050505051777777777777788388",,terminal_output +3191,5725371,"TERMINAL",0,0,"68502030513222239999999999999303053030",,terminal_output +3192,5726347,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-4_full_precision/spawn_tokenizers.sh",0,0,"",shellscript,tab +3193,5727416,"TERMINAL",0,0,"850222354444531312:013:014:018:011:011:011:011:011:019:013122722",,terminal_output +3194,5727590,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-4_full_precision/spawn_tokenizers.sh",893,0,"",shellscript,selection_mouse +3195,5727595,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-4_full_precision/spawn_tokenizers.sh",892,0,"",shellscript,selection_command +3196,5729434,"TERMINAL",0,0,"3024445766667333333333333344944",,terminal_output +3197,5731393,"TERMINAL",0,0,"2466679888895555555555555668:0166",,terminal_output +3198,5733398,"TERMINAL",0,0,"468889218:008:008:008:008:01777777777777788388",,terminal_output +3199,5735410,"TERMINAL",0,0,"681:0030401:013222239999999999999404054040",,terminal_output +3200,5737421,"TERMINAL",0,0,"83:0022235444454141111111111111111111114122722",,terminal_output +3201,5739430,"TERMINAL",0,0,"4024445766667333333333333344944",,terminal_output +3202,5741440,"TERMINAL",0,0,"2466679888895555555555555661166",,terminal_output +3203,5741620,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-5_full_precision/spawn_tokenizers.sh",0,0,"",shellscript,tab +3204,5741652,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-5_full_precision/spawn_tokenizers.sh",159,0,"",shellscript,selection_command +3205,5743448,"TERMINAL",0,0,"468889311010101011777777777777788388",,terminal_output +3206,5745478,"TERMINAL",0,0,"68104050113222239999999999999505055050",,terminal_output +3207,5747577,"TERMINAL",0,0,"81022235444455151212121212121212121215122722",,terminal_output +3208,5749525,"TERMINAL",0,0,"5024445766667333333333333344944",,terminal_output +3209,5751541,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-4_full_precision/spawn_tokenizers.sh",0,0,"",shellscript,tab +3210,5751564,"TERMINAL",0,0,"2466679888895555555555555662166",,terminal_output +3211,5753493,"TERMINAL",0,0,"468889412020202021777777777777788388",,terminal_output +3212,5755513,"TERMINAL",0,0,"6820509:002132222399999999999993:006:0058:008:00",,terminal_output +3213,5757566,"TERMINAL",0,0,"82022235444458:011:01313131313131313131319:0122722",,terminal_output +3214,5759527,"TERMINAL",0,0,"7:0024445766667333333333333344944",,terminal_output +3215,5761606,"TERMINAL",0,0,"2466679888895555555555555663166",,terminal_output +3216,5763551,"TERMINAL",0,0,"468889513030303031777777777777788388",,terminal_output +3217,5765553,"TERMINAL",0,0,"68303:0010313222239999999999999101051010",,terminal_output +3218,5767562,"TERMINAL",0,0,"83022235444451111414141414141414141411122722",,terminal_output +3219,5769593,"TERMINAL",0,0,"1011326:57:05 2025-10-05T01:48:31429:45:19Priority)29648:55:30Priority)2444Priority)Priority)Priority)Priority)Nodes required for job are DOWN, DRAINED or reserved for jobs in higher priority partitions)645:35 11:34523:13 33:571652643654466309:18:339:18:33 38:37754:088:57 58:13655:57 1:01:135542:43433:433424:4322818:4373183419350330121 franz.sram8:35:158:3521:43629972 alfred.ngu6:35:3457:27 1:59:431101 franz.sram4:55:027:47:57 2:09:13423:3933:14130031 alfred.ng1 164T11:50:3011:50:56 8:06:1400529993 nishant.k2 38418:29:184T01:48:31 18:08:39[002,007]804630009 alfred.ngu standard 1 16 R 2025-10-03T22:58:54 2025-10-03T22:58:56 20:58:14 1-00:00:00 hai001",,terminal_output +3220,5771584,"TERMINAL",0,0,"2466679888895555555555555664166",,terminal_output +3221,5773594,"TERMINAL",0,0,"47999404:024141414142888888888888899499",,terminal_output +3222,5775600,"TERMINAL",0,0,"79411121243333420205050505050505050505020212162121",,terminal_output +3223,5777611,"TERMINAL",0,0,"9413334655556222222222222233833",,terminal_output +3224,5779632,"TERMINAL",0,0,"21355568777784444444444444555055",,terminal_output +3225,5780571,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-6_full_precision/spawn_tokenizers.sh",0,0,"",shellscript,tab +3226,5780572,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-6_full_precision/spawn_tokenizers.sh",159,0,"",shellscript,selection_command +3227,5781631,"TERMINAL",0,0,"35777810999950666666666666677277",,terminal_output +3228,5783637,"TERMINAL",0,0,"57999502515151512888888888888899499",,terminal_output +3229,5785645,"TERMINAL",0,0,"79512131243333430303:004:005:009:002:002:002:002:002:002:00:0030313163131",,terminal_output +3230,5787657,"TERMINAL",0,0,"9513334655556222222222222233833",,terminal_output +3231,5787978,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-4_full_precision/spawn_tokenizers.sh",0,0,"",shellscript,tab +3232,5789667,"TERMINAL",0,0,"31355568777784444444444444559:0055",,terminal_output +3233,5790069,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-4_full_precision/tokenizer.sh",0,0,"",shellscript,tab +3234,5791705,"TERMINAL",0,0,"3577782099999:00666666666666677277",,terminal_output +3235,5793686,"TERMINAL",0,0,"579992:0029:019:019:019:012888888888888899499",,terminal_output +3236,5795695,"TERMINAL",0,0,"792:013141243333440401010101010101010101040414164141",,terminal_output +3237,5797704,"TERMINAL",0,0,"94:013334655556222222222222233833",,terminal_output +3238,5799713,"TERMINAL",0,0,"41355568777784444444444444551055",,terminal_output +3239,5801723,"TERMINAL",0,0,"35777830999910666666666666677277",,terminal_output +3240,5802754,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-5_full_precision/tokenizer.sh",0,0,"",shellscript,tab +3241,5802754,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-5_full_precision/tokenizer.sh",325,0,"",shellscript,selection_command +3242,5803732,"TERMINAL",0,0,"57999102111111112888888888888899499",,terminal_output +3243,5805738,"TERMINAL",0,0,"79114151243333450502020202020202020202050515165151",,terminal_output +3244,5807750,"TERMINAL",0,0,"9113334655556222222222222233833",,terminal_output +3245,5808649,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-4_full_precision/tokenizer.sh",0,0,"",shellscript,tab +3246,5809756,"TERMINAL",0,0,"51355568777784444444444444552055",,terminal_output +3247,5811768,"TERMINAL",0,0,"35777840999920666666666666677277",,terminal_output +3248,5813777,"TERMINAL",0,0,"57999202212121212888888888888899499",,terminal_output +3249,5815796,"TERMINAL",0,0,"79215110:0124333349:002:003030303030303030303010:004:017:0169:019:01",,terminal_output +3250,5817792,"TERMINAL",0,0,"9213334655556222222222222233833",,terminal_output +3251,5818773,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-6_full_precision/tokenizer.sh",0,0,"",shellscript,tab +3252,5818774,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-6_full_precision/tokenizer.sh",325,0,"",shellscript,selection_command +3253,5819803,"TERMINAL",0,0,"8:01355568777784444444444444553055",,terminal_output +3254,5821916,"TERMINAL",0,0,"35777850999930666666666666677277",,terminal_output +3255,5823336,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-4_full_precision/tokenizer.sh",0,0,"",shellscript,tab +3256,5823819,"TERMINAL",0,0,"57999302313131312888888888888899499",,terminal_output +3257,5825826,"TERMINAL",0,0,"79314:0111243333410104040404040404040404010111161111",,terminal_output +3258,5827833,"TERMINAL",0,0,"9313334655556222222222222233833",,terminal_output +3259,5829905,"TERMINAL",0,0,"11355568777784444444444444554055",,terminal_output +3260,5830625,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-4/tokenizer.sh",0,0,"",shellscript,tab +3261,5830625,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-4/tokenizer.sh",325,0,"",shellscript,selection_command +3262,5831860,"TERMINAL",0,0,"3577785:00999940666666666666677277",,terminal_output +3263,5833866,"TERMINAL",0,0,"57999402414141412888888888888899499",,terminal_output +3264,5835871,"TERMINAL",0,0,"79411121243333420205050505050505050505020212162121",,terminal_output +3265,5836485,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-4_full_precision/tokenizer.sh",0,0,"",shellscript,tab +3266,5837884,"TERMINAL",0,0,"9413334655556222222222222233833",,terminal_output +3267,5838215,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-4_full_precision/spawn_tokenizers.sh",0,0,"",shellscript,tab +3268,5839890,"TERMINAL",0,0,"21355568777784444444444444555055",,terminal_output +3269,5842017,"TERMINAL",0,0,"35777810999950666666666666677277",,terminal_output +3270,5843904,"TERMINAL",0,0,"57999502515151512888888888888899499",,terminal_output +3271,5845913,"TERMINAL",0,0,"79512131243333430304:005:006:0020:003:003:003:003:003:001:0030313163131",,terminal_output +3272,5847935,"TERMINAL",0,0,"9513334655556222222222222233833",,terminal_output +3273,5849976,"TERMINAL",0,0,"313555687777844444444444445510:0055",,terminal_output +3274,5852022,"TERMINAL",0,0,"35777820999940:00666666666666677277",,terminal_output +3275,5853950,"TERMINAL",0,0,"579993:00240:0140:0140:0140:012888888888888899499",,terminal_output +3276,5854742,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-4/spawn_tokenizers.sh",0,0,"",shellscript,tab +3277,5854742,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-4/spawn_tokenizers.sh",159,0,"",shellscript,selection_command +3278,5855962,"TERMINAL",0,0,"793:013141243333440401010101010101010101040414164141",,terminal_output +3279,5857967,"TERMINAL",0,0,"95:013334655556222222222222233833",,terminal_output +3280,5859984,"TERMINAL",0,0,"41355568777784444444444444551055",,terminal_output +3281,5862062,"TERMINAL",0,0,"35777830999910666666666666677277",,terminal_output +3282,5864001,"TERMINAL",0,0,"57999102111111112888888888888899499",,terminal_output +3283,5866006,"TERMINAL",0,0,"79114151243333450502020202020202020202050515165151",,terminal_output +3284,5866582,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-4_full_precision/spawn_tokenizers.sh",0,0,"",shellscript,tab +3285,5868065,"TERMINAL",0,0,"9113334655556222222222222233833",,terminal_output +3286,5868698,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-4/tokenizer.sh",0,0,"",shellscript,tab +3287,5870061,"TERMINAL",0,0,"51355568777784444444444444552055",,terminal_output +3288,5872034,"TERMINAL",0,0,"35777840999920666666666666677277",,terminal_output +3289,5874040,"TERMINAL",0,0,"57999202212121212888888888888899499",,terminal_output +3290,5876059,"TERMINAL",0,0,"7921511:0124333341:00:003:00303030303030303030301:005:018:0161:00:011:00:01",,terminal_output +3291,5878063,"TERMINAL",0,0,"9213334655556222222222222233833",,terminal_output +3292,5880072,"TERMINAL",0,0,"9:01355568777784444444444444553055",,terminal_output +3293,5882085,"TERMINAL",0,0,"35777850999930666666666666677277",,terminal_output +3294,5883570,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-4_full_precision/tokenizer.sh",0,0,"",shellscript,tab +3295,5883570,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-4_full_precision/tokenizer.sh",325,0,"",shellscript,selection_command +3296,5884090,"TERMINAL",0,0,"57999302313131312888888888888899499",,terminal_output +3297,5886096,"TERMINAL",0,0,"79315:0111243333410104040404040404040404010111161111",,terminal_output +3298,5888105,"TERMINAL",0,0,"9313334655556222222222222233833",,terminal_output +3299,5888762,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-4/tokenizer.sh",0,0,"",shellscript,tab +3300,5890170,"TERMINAL",0,0,"11355568777784444444444444554055",,terminal_output +3301,5892123,"TERMINAL",0,0,"3577786:00999940666666666666677277",,terminal_output +3302,5894134,"TERMINAL",0,0,"57999402414141412888888888888899499",,terminal_output +3303,5896158,"TERMINAL",0,0,"79411121243333420205050505050505050505020212162121",,terminal_output +3304,5898149,"TERMINAL",0,0,"9413334655556222222222222233833",,terminal_output +3305,5900203,"TERMINAL",0,0,"21355568777784444444444444555055",,terminal_output +3306,5902173,"TERMINAL",0,0,"35777810999950666666666666677277",,terminal_output +3307,5904181,"TERMINAL",0,0,"57999502515151512888888888888899499",,terminal_output +3308,5906297,"TERMINAL",0,0,"79512131243333430305:006:007:001:004:004:004:004:004:002:0030313163131",,terminal_output +3309,5908198,"TERMINAL",0,0,"9513334655556222222222222233833",,terminal_output +3310,5910292,"TERMINAL",0,0,"31355568777784444444444444551:0055",,terminal_output +3311,5912215,"TERMINAL",0,0,"3577782099991:00666666666666677277",,terminal_output +3312,5914283,"TERMINAL",0,0,"579994:0021:011:011:011:012888888888888899499",,terminal_output +3313,5916233,"TERMINAL",0,0,"794:013141243333440401010101010101010101040414164141",,terminal_output +3314,5918244,"TERMINAL",0,0,"96:013334655556222222222222233833",,terminal_output +3315,5920254,"TERMINAL",0,0,"41355568777784444444444444551055",,terminal_output +3316,5922258,"TERMINAL",0,0,"35777830999910666666666666677277",,terminal_output +3317,5924267,"TERMINAL",0,0,"57999102111111112888888888888899499",,terminal_output +3318,5926369,"TERMINAL",0,0,"79114151243333450502020202020202020202050515165151",,terminal_output +3319,5927965,"TERMINAL",0,0,"bash /home/franz.srambical/jafar/slurm/dev/franz/berlin/atari/tokenizer_lr_3e-4_full_precision/spawn_tokenizers.sh alien amidar assault asterix bank_heist battle_zone boxing breakout chopper_command crazy_climber demon_attack pong",,terminal_command +3320,5928044,"TERMINAL",0,0,"]633;CSubmitting tokenizer jobs for environments: alien amidar assault asterix bank_heist battle_zone boxing breakout chopper_command crazy_climber demon_attack pong\r\nSubmitted tokenizer_atari_alien_dev_lr_3e-4_full_precision (ENV_NAME=alien) as job 30143\r\nSubmitted tokenizer_atari_amidar_dev_lr_3e-4_full_precision (ENV_NAME=amidar) as job 30144\r\nSubmitted tokenizer_atari_assault_dev_lr_3e-4_full_precision (ENV_NAME=assault) as job 30145\r\nSubmitted tokenizer_atari_asterix_dev_lr_3e-4_full_precision (ENV_NAME=asterix) as job 30146\r\nSubmitted tokenizer_atari_bank_heist_dev_lr_3e-4_full_precision (ENV_NAME=bank_heist) as job 30147\r\nSubmitted tokenizer_atari_battle_zone_dev_lr_3e-4_full_precision (ENV_NAME=battle_zone) as job 30148\r\nSubmitted tokenizer_atari_boxing_dev_lr_3e-4_full_precision (ENV_NAME=boxing) as job 30149\r\nSubmitted tokenizer_atari_breakout_dev_lr_3e-4_full_precision (ENV_NAME=breakout) as job 30150\r\nSubmitted tokenizer_atari_chopper_command_dev_lr_3e-4_full_precision (ENV_NAME=chopper_command) as job 30151\r\nSubmitted tokenizer_atari_crazy_climber_dev_lr_3e-4_full_precision (ENV_NAME=crazy_climber) as job 30152\r\nSubmitted tokenizer_atari_demon_attack_dev_lr_3e-4_full_precision (ENV_NAME=demon_attack) as job 30153\r\nSubmitted tokenizer_atari_pong_dev_lr_3e-4_full_precision (ENV_NAME=pong) as job 30154\r\n]0;franz.srambical@hai-login2:~/jafar",,terminal_output +3321,5928287,"TERMINAL",0,0,"9113335459:495359:495259:495159:495059:49Priority)\r 30149 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)48 8 PD59:49N/A 0:00(Priority)47 8 PD59:49N/A 0:00(Priority)46 8 PD59:49N/A 0:00(Priority)45 8 PD59:49N/A 0:00(Priority)44 8 PD59:49N/A 0:00(Priority)43 8 PD59:49N/A 0:00(Priority)41 8 PDN/A 0:00(Priority)40 8 PD9:18:33N/A 0:0(Priority)39 8 PD9:18:33N/A 0:0(Priority)38 8 PD9:18:33N/A 0:0(Priority)37 8 PD9:18:33N/A 0:0(Nodes required for job are DOWN, DRAINED or reserved for jobs in higher priority partitions)369:18:339:45:35 14:144359:18:339:23:13 36:365319:18:339:18:34 41:155329:18:339:18:34 41:154339:18:339:18:34 41:155349:18:339:18:34 41:1530130 franz.sram9:18:339:18:33 41:165278:54:088:58:57 1:00:525126 franz.sram8:35:158:55:57 1:03:525125 franz.sram8:35:158:44:27 1:15:22130124 franz.sram1 164T18:35:1518:43:27 1:16:22001123 franz.sram4T18:35:154T18:42:27 1:17:221122 franz.sram4T18:35:154T18:38:27 1:21:2230117 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:35:27 1:24:22 1-00:00:00 hai00130118 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:35:27 1:24:22 1-00:00:00 hai00430119 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:35:27 1:24:22 1-00:00:00 hai00530120 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:35:27 1:24:22 1-00:00:00 hai00630121 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:35:27 1:24:22 1-00:00:00 hai00629972 alfred.ngu standard 1 16 R 2025-10-04T16:35:34 2025-10-04T17:57:27 2:02:22 1-00:00:00 hai00130101 franz.sram standard 1 16 R 2025-10-04T14:55:02 2025-10-04T17:47:57 2:11:52 1-00:00:00 hai00430032 alfred.ngu standard 1 16 R 2025-10-04T11:53:39 2025-10-04T11:53:56 8:05:53 1-00:00:00 hai00130031 alfred.ngu standard 1 16 R 2025-10-04T11:50:30 2025-10-04T11:50:56 8:08:53 1-00:00:00 hai00529993 nishant.ku standard 2 384 R 2025-10-03T18:29:18 2025-10-04T01:48:31 18:11:18 1-00:00:00 hai[002,007]30008 alfred.ngu standard 1 16 R 2025-10-03T22:58:50 2025-10-03T22:58:56 21:00:53 1-00:00:00 hai00630009 alfred.ngu standard 1 16 R 2025-10-03T22:58:54 2025-10-03T22:58:56 21:00:53 1-00:00:00 hai001",,terminal_output +3322,5930295,"TERMINAL",0,0,"51355568777784444444444444552055",,terminal_output +3323,5932301,"TERMINAL",0,0,"35777840999920666666666666677277",,terminal_output +3324,5934311,"TERMINAL",0,0,"57999202212121212888888888888899499",,terminal_output +3325,5936318,"TERMINAL",0,0,"7921512:0124333341:004:00303030303030303030302:006:019:0161:011:01",,terminal_output +3326,5938338,"TERMINAL",0,0,"9213334655556222222222222233833",,terminal_output +3327,5940405,"TERMINAL",0,0,"20:00:01355568777784444444444444553055",,terminal_output +3328,5942348,"TERMINAL",0,0,"35777850999930666666666666677277",,terminal_output +3329,5944357,"TERMINAL",0,0,"57999302313131312888888888888899499",,terminal_output +3330,5946369,"TERMINAL",0,0,"79316:0111243333410104040404040404040404010111161111",,terminal_output +3331,5948378,"TERMINAL",0,0,"9313334655556222222222222233833",,terminal_output +3332,5950432,"TERMINAL",0,0,"11355568777784444444444444554055",,terminal_output +3333,5950889,"TERMINAL",0,0,"bash /home/franz.srambical/jafar/slurm/dev/franz/berlin/atari/tokenizer_lr_3e-5_full_precision/spawn_tokenizers.sh alien amidar assault asterix bank_heist battle_zone boxing breakout chopper_command crazy_climber demon_attack pong",,terminal_command +3334,5950951,"TERMINAL",0,0,"]633;CSubmitting tokenizer jobs for environments: alien amidar assault asterix bank_heist battle_zone boxing breakout chopper_command crazy_climber demon_attack pong\r\nSubmitted tokenizer_atari_alien_dev_lr_3e-5_full_precision (ENV_NAME=alien) as job 30155\r\nSubmitted tokenizer_atari_amidar_dev_lr_3e-5_full_precision (ENV_NAME=amidar) as job 30156\r\nSubmitted tokenizer_atari_assault_dev_lr_3e-5_full_precision (ENV_NAME=assault) as job 30157\r\nSubmitted tokenizer_atari_asterix_dev_lr_3e-5_full_precision (ENV_NAME=asterix) as job 30158\r\nSubmitted tokenizer_atari_bank_heist_dev_lr_3e-5_full_precision (ENV_NAME=bank_heist) as job 30159\r\nSubmitted tokenizer_atari_battle_zone_dev_lr_3e-5_full_precision (ENV_NAME=battle_zone) as job 30160\r\nSubmitted tokenizer_atari_boxing_dev_lr_3e-5_full_precision (ENV_NAME=boxing) as job 30161\r\nSubmitted tokenizer_atari_breakout_dev_lr_3e-5_full_precision (ENV_NAME=breakout) as job 30162\r\nSubmitted tokenizer_atari_chopper_command_dev_lr_3e-5_full_precision (ENV_NAME=chopper_command) as job 30163\r\nSubmitted tokenizer_atari_crazy_climber_dev_lr_3e-5_full_precision (ENV_NAME=crazy_climber) as job 30164\r\nSubmitted tokenizer_atari_demon_attack_dev_lr_3e-5_full_precision (ENV_NAME=demon_attack) as job 30165\r\nSubmitted tokenizer_atari_pong_dev_lr_3e-5_full_precision (ENV_NAME=pong) as job 30166\r\n]0;franz.srambical@hai-login2:~/jafar",,terminal_output +3335,5952397,"TERMINAL",0,0,"\r3577730166 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30165 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30164 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30163 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30162 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30161 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30160 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30159 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30158 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30157 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30156 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30155 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)87:00999940666666",,terminal_output +3336,5954405,"TERMINAL",0,0,"57999402414141412888888",,terminal_output +3337,5956470,"TERMINAL",0,0,"794111212433334202050505050",,terminal_output +3338,5958425,"TERMINAL",0,0,"9413334655556222222",,terminal_output +3339,5959600,"TERMINAL",0,0,"bash /home/franz.srambical/jafar/slurm/dev/franz/berlin/atari/tokenizer_lr_3e-6_full_precision/spawn_tokenizers.sh alien amidar assault asterix bank_heist battle_zone boxing breakout chopper_command crazy_climber demon_attack pong",,terminal_command +3340,5959669,"TERMINAL",0,0,"]633;CSubmitting tokenizer jobs for environments: alien amidar assault asterix bank_heist battle_zone boxing breakout chopper_command crazy_climber demon_attack pong\r\nSubmitted tokenizer_atari_alien_dev_lr_3e-6_full_precision (ENV_NAME=alien) as job 30167\r\nSubmitted tokenizer_atari_amidar_dev_lr_3e-6_full_precision (ENV_NAME=amidar) as job 30168\r\nSubmitted tokenizer_atari_assault_dev_lr_3e-6_full_precision (ENV_NAME=assault) as job 30169\r\nSubmitted tokenizer_atari_asterix_dev_lr_3e-6_full_precision (ENV_NAME=asterix) as job 30170\r\nSubmitted tokenizer_atari_bank_heist_dev_lr_3e-6_full_precision (ENV_NAME=bank_heist) as job 30171\r\nSubmitted tokenizer_atari_battle_zone_dev_lr_3e-6_full_precision (ENV_NAME=battle_zone) as job 30172\r\nSubmitted tokenizer_atari_boxing_dev_lr_3e-6_full_precision (ENV_NAME=boxing) as job 30173\r\nSubmitted tokenizer_atari_breakout_dev_lr_3e-6_full_precision (ENV_NAME=breakout) as job 30174\r\nSubmitted tokenizer_atari_chopper_command_dev_lr_3e-6_full_precision (ENV_NAME=chopper_command) as job 30175\r\nSubmitted tokenizer_atari_crazy_climber_dev_lr_3e-6_full_precision (ENV_NAME=crazy_climber) as job 30176\r\nSubmitted tokenizer_atari_demon_attack_dev_lr_3e-6_full_precision (ENV_NAME=demon_attack) as job 30177\r\nSubmitted tokenizer_atari_pong_dev_lr_3e-6_full_precision (ENV_NAME=pong) as job 30178\r\n]0;franz.srambical@hai-login2:~/jafar",,terminal_output +3341,5960432,"TERMINAL",0,0,"\r21355530178 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30177 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30176 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30175 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30174 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30173 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30172 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30171 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30170 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30169 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30168 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30167 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)6",,terminal_output +3342,5962439,"TERMINAL",0,0,"357778",,terminal_output +3343,5964448,"TERMINAL",0,0,"5799950",,terminal_output +3344,5964968,"TERMINAL",0,0,"watch",,terminal_focus +3345,5966459,"TERMINAL",0,0,"795121312",,terminal_output +3346,5968466,"TERMINAL",0,0,"9513334",,terminal_output +3347,5970477,"TERMINAL",0,0,"3135556",,terminal_output +3348,5972488,"TERMINAL",0,0,"357778",,terminal_output +3349,5974505,"TERMINAL",0,0,"579995:00",,terminal_output +3350,5976544,"TERMINAL",0,0,"795:0131412",,terminal_output +3351,5977150,"TERMINAL",0,0,"Every 2.0s: squeuehai-login2.haicore.berlin: Sat Oct 4 20:00:38 2025JOBIDUSER PARTITION NODES CPUS STSUBMIT_TIMESTART_TIME TIME TIME_LIMIT NODELIST(REASON)30111 xiao.liu interacti 1 32 PD 2025-10-04T16:57:05 2025-10-05T01:48:31 0:00 23:59:00 (Resources)30142 xiao.liu interacti 1 64 PD 2025-10-04T19:45:19N/A 0:00 23:59:00 (Priority)30129 xiao.liu interacti 1 64 PD 2025-10-04T18:55:30N/A 0:00 23:59:00 (Priority)30027 xiao.liu interacti 1 128 R 2025-10-04T05:54:07 2025-10-04T19:13:38 47:00 23:59:00 hai00330028 xiao.liu interacti 1 128 R 2025-10-04T05:55:36 2025-10-04T05:55:36 14:05:02 23:59:00 hai00630026 xiao.liu interacti 1 128 R 2025-10-04T01:48:32 2025-10-04T05:54:06 14:06:32 23:59:00 hai00830025 xiao.liu interacti 1 64 R 2025-10-04T01:47:27 2025-10-04T01:47:56 18:12:42 23:59:00 hai00430178 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30177 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30176 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30175 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30174 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30173 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30172 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30171 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30170 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30169 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30168 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30167 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30166 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30165 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30164 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30163 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30162 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30161 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30160 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30159 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30158 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30157 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30156 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30155 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30154 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30153 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30152 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30151 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30150 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30149 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30148 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30147 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30146 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30145 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30144 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30143 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30141 franz.sram standard 1 8 PD 2025-10-04T19:18:33N/A 0:00 1-00:00:00 (Priority)30140 franz.sram standard 1 8 PD 2025-10-04T19:18:33N/A 0:00 1-00:00:00 (Priority)30139 franz.sram standard 1 8 PD 2025-10-04T19:18:33N/A 0:00 1-00:00:00 (Priority)30138 franz.sram standard 1 8 PD 2025-10-04T19:18:33N/A 0:00 1-00:00:00 (Priority)30137 franz.sram standard 1 8 PD 2025-10-04T19:18:33N/A 0:00 1-00:00:00 (Nodes required for job are DOWN, DRAINED or reserved for jobs in higher priority partitions)",,terminal_output +3352,5977660,"TERMINAL",0,0,"Every 2.0s: squeuehai-login2.haicore.berlin: Sat Oct 4 20:00:39 2025JOBIDUSER PARTITION NODES CPUS STSUBMIT_TIMESTART_TIME TIME TIME_LIMIT NODELIST(REASON)30111 xiao.liu interacti 1 32 PD 2025-10-04T16:57:05 2025-10-05T01:48:31 0:00 23:59:00 (Resources)30142 xiao.liu interacti 1 64 PD 2025-10-04T19:45:19N/A 0:00 23:59:00 (Priority)30129 xiao.liu interacti 1 64 PD 2025-10-04T18:55:30N/A 0:00 23:59:00 (Priority)30027 xiao.liu interacti 1 128 R 2025-10-04T05:54:07 2025-10-04T19:13:38 47:01 23:59:00 hai00330028 xiao.liu interacti 1 128 R 2025-10-04T05:55:36 2025-10-04T05:55:36 14:05:03 23:59:00 hai00630026 xiao.liu interacti 1 128 R 2025-10-04T01:48:32 2025-10-04T05:54:06 14:06:33 23:59:00 hai00830025 xiao.liu interacti 1 64 R 2025-10-04T01:47:27 2025-10-04T01:47:56 18:12:43 23:59:00 hai00430178 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30177 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30176 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30175 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30174 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30173 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30172 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30171 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30170 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30169 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30168 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30167 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30166 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30165 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30164 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30163 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30162 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30161 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30160 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30159 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30158 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30157 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30156 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30155 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30154 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30153 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30152 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30151 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30150 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30149 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30148 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30147 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30146 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30145 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30144 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30143 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30141 franz.sram standard 1 8 PD 2025-10-04T19:18:33N/A 0:00 1-00:00:00 (Priority)30140 franz.sram standard 1 8 PD 2025-10-04T19:18:33N/A 0:00 1-00:00:00 (Priority)30139 franz.sram standard 1 8 PD 2025-10-04T19:18:33N/A 0:00 1-00:00:00 (Priority)30138 franz.sram standard 1 8 PD 2025-10-04T19:18:33N/A 0:00 1-00:00:00 (Priority)30137 franz.sram standard 1 8 PD 2025-10-04T19:18:33N/A 0:00 1-00:00:00 (Nodes required for job are DOWN, DRAINED or reserved for jobs in higher priority partitions)",,terminal_output +3353,5979671,"TERMINAL",0,0,"413555",,terminal_output +3354,5980967,"TERMINAL",0,0,"Every 2.0s: squeuehai-login2.haicore.berlin: Sat Oct 4 20:00:42 2025JOBIDUSER PARTITION NODES CPUS STSUBMIT_TIMESTART_TIME TIME TIME_LIMIT NODELIST(REASON)30111 xiao.liu interacti 1 32 PD 2025-10-04T16:57:05 2025-10-05T01:48:31 0:00 23:59:00 (Resources)30142 xiao.liu interacti 1 64 PD 2025-10-04T19:45:19N/A 0:00 23:59:00 (Priority)30129 xiao.liu interacti 1 64 PD 2025-10-04T18:55:30N/A 0:00 23:59:00 (Priority)30027 xiao.liu interacti 1 128 R 2025-10-04T05:54:07 2025-10-04T19:13:38 47:04 23:59:00 hai00330028 xiao.liu interacti 1 128 R 2025-10-04T05:55:36 2025-10-04T05:55:36 14:05:06 23:59:00 hai00630026 xiao.liu interacti 1 128 R 2025-10-04T01:48:32 2025-10-04T05:54:06 14:06:36 23:59:00 hai00830025 xiao.liu interacti 1 64 R 2025-10-04T01:47:27 2025-10-04T01:47:56 18:12:46 23:59:00 hai00430178 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30177 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30176 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30175 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30174 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30173 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30172 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30171 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30170 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30169 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30168 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30167 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30166 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30165 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30164 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30163 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30162 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30161 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30160 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30159 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30158 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30157 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30156 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30155 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30154 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30153 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30152 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30151 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30150 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30149 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30148 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30147 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30146 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30145 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30144 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30143 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30141 franz.sram standard 1 8 PD 2025-10-04T19:18:33N/A 0:00 1-00:00:00 (Priority)30140 franz.sram standard 1 8 PD 2025-10-04T19:18:33N/A 0:00 1-00:00:00 (Priority)30139 franz.sram standard 1 8 PD 2025-10-04T19:18:33N/A 0:00 1-00:00:00 (Priority)30138 franz.sram standard 1 8 PD 2025-10-04T19:18:33N/A 0:00 1-00:00:00 (Priority)30137 franz.sram standard 1 8 PD 2025-10-04T19:18:33N/A 0:00 1-00:00:00 (Nodes required for job are DOWN, DRAINED or reserved for jobs in higher priority partitions)30136 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:45:35 15:07 1-00:00:00 hai004Every 2.0s: squeuehai-login2.haicore.berlin: Sat Oct 4 20:00:42 2025JOBIDUSER PARTITION NODES CPUS STSUBMIT_TIMESTART_TIME TIME TIME_LIMIT NODELIST(REASON)30111 xiao.liu interacti 1 32 PD 2025-10-04T16:57:05 2025-10-05T01:48:31 0:00 23:59:00 (Resources)30142 xiao.liu interacti 1 64 PD 2025-10-04T19:45:19N/A 0:00 23:59:00 (Priority)30129 xiao.liu interacti 1 64 PD 2025-10-04T18:55:30N/A 0:00 23:59:00 (Priority)30027 xiao.liu interacti 1 128 R 2025-10-04T05:54:07 2025-10-04T19:13:38 47:04 23:59:00 hai00330028 xiao.liu interacti 1 128 R 2025-10-04T05:55:36 2025-10-04T05:55:36 14:05:06 23:59:00 hai00630026 xiao.liu interacti 1 128 R 2025-10-04T01:48:32 2025-10-04T05:54:06 14:06:36 23:59:00 hai00830025 xiao.liu interacti 1 64 R 2025-10-04T01:47:27 2025-10-04T01:47:56 18:12:46 23:59:00 hai00430178 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30177 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30176 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30175 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30174 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30173 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30172 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30171 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30170 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30169 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30168 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30167 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30166 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30165 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30164 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30163 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30162 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30161 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30160 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30159 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30158 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30157 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30156 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30155 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30154 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30153 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30152 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30151 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30150 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30149 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30148 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30147 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30146 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30145 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30144 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30143 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30141 franz.sram standard 1 8 PD 2025-10-04T19:18:33N/A 0:00 1-00:00:00 (Priority)30140 franz.sram standard 1 8 PD 2025-10-04T19:18:33N/A 0:00 1-00:00:00 (Priority)30139 franz.sram standard 1 8 PD 2025-10-04T19:18:33N/A 0:00 1-00:00:00 (Priority)30138 franz.sram standard 1 8 PD 2025-10-04T19:18:33N/A 0:00 1-00:00:00 (Priority)30137 franz.sram standard 1 8 PD 2025-10-04T19:18:33N/A 0:00 1-00:00:00 (Nodes required for job are DOWN, DRAINED or reserved for jobs in higher priority partitions)30136 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:45:35 15:07 1-00:00:00 hai00430135 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:23:13 37:29 1-00:00:00 hai00530131 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:18:34 42:08 1-00:00:00 hai00530132 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:18:34 42:08 1-00:00:00 hai00430133 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:18:34 42:08 1-00:00:00 hai00530134 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:18:34 42:08 1-00:00:00 hai00630130 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:18:33 42:09 1-00:00:00 hai00530127 franz.sram standard 1 16 R 2025-10-04T18:54:08 2025-10-04T18:58:57 1:01:45 1-00:00:00 hai00530126 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:55:57 1:04:45 1-00:00:00 hai00530125 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:44:27 1:16:15 1-00:00:00 hai00130124 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:43:27 1:17:15 1-00:00:00 hai00130123 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:42:27 1:18:15 1-00:00:00 hai00130122 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:38:27 1:22:15 1-00:00:00 hai00130117 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:35:27 1:25:15 1-00:00:00 hai00130118 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:35:27 1:25:15 1-00:00:00 hai00430119 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:35:27 1:25:15 1-00:00:00 hai00530120 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:35:27 1:25:15 1-00:00:00 hai00630121 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:35:27 1:25:15 1-00:00:00 hai00629972 alfred.ngu standard 1 16 R 2025-10-04T16:35:34 2025-10-04T17:57:27 2:03:15 1-00:00:00 hai00130101 franz.sram standard 1 16 R 2025-10-04T14:55:02 2025-10-04T17:47:57 2:12:45 1-00:00:00 hai00430032 alfred.ngu standard 1 16 R 2025-10-04T11:53:39 2025-10-04T11:53:56 8:06:46 1-00:00:00 hai00130031 alfred.ngu standard 1 16 R 2025-10-04T11:50:30 2025-10-04T11:50:56 8:09:46 1-00:00:00 hai005Every 2.0s: squeuehai-login2.haicore.berlin: Sat Oct 4 20:00:42 2025JOBIDUSER PARTITION NODES CPUS STSUBMIT_TIMESTART_TIME TIME TIME_LIMIT NODELIST(REASON)30111 xiao.liu interacti 1 32 PD 2025-10-04T16:57:05 2025-10-05T01:48:31 0:00 23:59:00 (Resources)30142 xiao.liu interacti 1 64 PD 2025-10-04T19:45:19N/A 0:00 23:59:00 (Priority)30129 xiao.liu interacti 1 64 PD 2025-10-04T18:55:30N/A 0:00 23:59:00 (Priority)30027 xiao.liu interacti 1 128 R 2025-10-04T05:54:07 2025-10-04T19:13:38 47:04 23:59:00 hai00330028 xiao.liu interacti 1 128 R 2025-10-04T05:55:36 2025-10-04T05:55:36 14:05:06 23:59:00 hai00630026 xiao.liu interacti 1 128 R 2025-10-04T01:48:32 2025-10-04T05:54:06 14:06:36 23:59:00 hai00830025 xiao.liu interacti 1 64 R 2025-10-04T01:47:27 2025-10-04T01:47:56 18:12:46 23:59:00 hai00430178 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30177 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30176 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30175 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30174 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30173 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30172 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30171 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30170 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30169 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30168 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30167 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30166 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30165 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30164 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30163 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30162 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30161 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30160 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30159 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30158 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30157 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30156 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30155 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30154 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30153 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30152 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30151 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30150 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30149 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30148 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30147 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30146 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30145 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30144 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30143 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30141 franz.sram standard 1 8 PD 2025-10-04T19:18:33N/A 0:00 1-00:00:00 (Priority)30140 franz.sram standard 1 8 PD 2025-10-04T19:18:33N/A 0:00 1-00:00:00 (Priority)30139 franz.sram standard 1 8 PD 2025-10-04T19:18:33N/A 0:00 1-00:00:00 (Priority)30138 franz.sram standard 1 8 PD 2025-10-04T19:18:33N/A 0:00 1-00:00:00 (Priority)30137 franz.sram standard 1 8 PD 2025-10-04T19:18:33N/A 0:00 1-00:00:00 (Nodes required for job are DOWN, DRAINED or reserved for jobs in higher priority partitions)30136 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:45:35 15:07 1-00:00:00 hai00430135 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:23:13 37:29 1-00:00:00 hai00530131 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:18:34 42:08 1-00:00:00 hai00530132 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:18:34 42:08 1-00:00:00 hai00430133 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:18:34 42:08 1-00:00:00 hai00530134 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:18:34 42:08 1-00:00:00 hai00630130 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:18:33 42:09 1-00:00:00 hai00530127 franz.sram standard 1 16 R 2025-10-04T18:54:08 2025-10-04T18:58:57 1:01:45 1-00:00:00 hai00530126 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:55:57 1:04:45 1-00:00:00 hai00530125 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:44:27 1:16:15 1-00:00:00 hai00130124 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:43:27 1:17:15 1-00:00:00 hai00130123 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:42:27 1:18:15 1-00:00:00 hai00130122 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:38:27 1:22:15 1-00:00:00 hai00130117 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:35:27 1:25:15 1-00:00:00 hai00130118 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:35:27 1:25:15 1-00:00:00 hai00430119 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:35:27 1:25:15 1-00:00:00 hai00530120 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:35:27 1:25:15 1-00:00:00 hai00630121 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:35:27 1:25:15 1-00:00:00 hai00629972 alfred.ngu standard 1 16 R 2025-10-04T16:35:34 2025-10-04T17:57:27 2:03:15 1-00:00:00 hai00130101 franz.sram standard 1 16 R 2025-10-04T14:55:02 2025-10-04T17:47:57 2:12:45 1-00:00:00 hai00430032 alfred.ngu standard 1 16 R 2025-10-04T11:53:39 2025-10-04T11:53:56 8:06:46 1-00:00:00 hai00130031 alfred.ngu standard 1 16 R 2025-10-04T11:50:30 2025-10-04T11:50:56 8:09:46 1-00:00:00 hai00529993 nishant.ku standard 2 384 R 2025-10-03T18:29:18 2025-10-04T01:48:31 18:12:11 1-00:00:00 hai[002,007]30008 alfred.ngu standard 1 16 R 2025-10-03T22:58:50 2025-10-03T22:58:56 21:01:46 1-00:00:00 hai006Every 2.0s: squeuehai-login2.haicore.berlin: Sat Oct 4 20:00:42 2025JOBIDUSER PARTITION NODES CPUS STSUBMIT_TIMESTART_TIME TIME TIME_LIMIT NODELIST(REASON)30111 xiao.liu interacti 1 32 PD 2025-10-04T16:57:05 2025-10-05T01:48:31 0:00 23:59:00 (Resources)30142 xiao.liu interacti 1 64 PD 2025-10-04T19:45:19N/A 0:00 23:59:00 (Priority)30129 xiao.liu interacti 1 64 PD 2025-10-04T18:55:30N/A 0:00 23:59:00 (Priority)30027 xiao.liu interacti 1 128 R 2025-10-04T05:54:07 2025-10-04T19:13:38 47:04 23:59:00 hai00330028 xiao.liu interacti 1 128 R 2025-10-04T05:55:36 2025-10-04T05:55:36 14:05:06 23:59:00 hai00630026 xiao.liu interacti 1 128 R 2025-10-04T01:48:32 2025-10-04T05:54:06 14:06:36 23:59:00 hai00830025 xiao.liu interacti 1 64 R 2025-10-04T01:47:27 2025-10-04T01:47:56 18:12:46 23:59:00 hai00430178 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30177 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30176 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30175 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30174 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30173 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30172 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30171 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30170 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30169 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30168 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30167 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30166 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30165 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30164 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30163 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30162 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30161 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30160 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30159 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30158 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30157 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30156 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30155 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30154 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30153 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30152 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30151 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30150 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30149 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30148 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30147 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30146 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30145 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30144 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30143 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30141 franz.sram standard 1 8 PD 2025-10-04T19:18:33N/A 0:00 1-00:00:00 (Priority)30140 franz.sram standard 1 8 PD 2025-10-04T19:18:33N/A 0:00 1-00:00:00 (Priority)30139 franz.sram standard 1 8 PD 2025-10-04T19:18:33N/A 0:00 1-00:00:00 (Priority)30138 franz.sram standard 1 8 PD 2025-10-04T19:18:33N/A 0:00 1-00:00:00 (Priority)30137 franz.sram standard 1 8 PD 2025-10-04T19:18:33N/A 0:00 1-00:00:00 (Nodes required for job are DOWN, DRAINED or reserved for jobs in higher priority partitions)30136 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:45:35 15:07 1-00:00:00 hai00430135 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:23:13 37:29 1-00:00:00 hai00530131 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:18:34 42:08 1-00:00:00 hai00530132 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:18:34 42:08 1-00:00:00 hai00430133 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:18:34 42:08 1-00:00:00 hai00530134 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:18:34 42:08 1-00:00:00 hai00630130 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:18:33 42:09 1-00:00:00 hai00530127 franz.sram standard 1 16 R 2025-10-04T18:54:08 2025-10-04T18:58:57 1:01:45 1-00:00:00 hai00530126 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:55:57 1:04:45 1-00:00:00 hai00530125 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:44:27 1:16:15 1-00:00:00 hai00130124 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:43:27 1:17:15 1-00:00:00 hai00130123 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:42:27 1:18:15 1-00:00:00 hai00130122 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:38:27 1:22:15 1-00:00:00 hai00130117 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:35:27 1:25:15 1-00:00:00 hai00130118 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:35:27 1:25:15 1-00:00:00 hai00430119 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:35:27 1:25:15 1-00:00:00 hai00530120 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:35:27 1:25:15 1-00:00:00 hai00630121 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:35:27 1:25:15 1-00:00:00 hai00629972 alfred.ngu standard 1 16 R 2025-10-04T16:35:34 2025-10-04T17:57:27 2:03:15 1-00:00:00 hai00130101 franz.sram standard 1 16 R 2025-10-04T14:55:02 2025-10-04T17:47:57 2:12:45 1-00:00:00 hai00430032 alfred.ngu standard 1 16 R 2025-10-04T11:53:39 2025-10-04T11:53:56 8:06:46 1-00:00:00 hai00130031 alfred.ngu standard 1 16 R 2025-10-04T11:50:30 2025-10-04T11:50:56 8:09:46 1-00:00:00 hai00529993 nishant.ku standard 2 384 R 2025-10-03T18:29:18 2025-10-04T01:48:31 18:12:11 1-00:00:00 hai[002,007]30008 alfred.ngu standard 1 16 R 2025-10-03T22:58:50 2025-10-03T22:58:56 21:01:46 1-00:00:00 hai00630009 alfred.ngu standard 1 16 R 2025-10-03T22:58:54 2025-10-03T22:58:56 21:01:46 1-00:00:00 hai001Every 2.0s: squeuehai-login2.haicore.berlin: Sat Oct 4 20:00:42 2025JOBIDUSER PARTITION NODES CPUS STSUBMIT_TIMESTART_TIME TIME TIME_LIMIT NODELIST(REASON)30111 xiao.liu interacti 1 32 PD 2025-10-04T16:57:05 2025-10-05T01:48:31 0:00 23:59:00 (Resources)30142 xiao.liu interacti 1 64 PD 2025-10-04T19:45:19N/A 0:00 23:59:00 (Priority)30129 xiao.liu interacti 1 64 PD 2025-10-04T18:55:30N/A 0:00 23:59:00 (Priority)30027 xiao.liu interacti 1 128 R 2025-10-04T05:54:07 2025-10-04T19:13:38 47:04 23:59:00 hai00330028 xiao.liu interacti 1 128 R 2025-10-04T05:55:36 2025-10-04T05:55:36 14:05:06 23:59:00 hai00630026 xiao.liu interacti 1 128 R 2025-10-04T01:48:32 2025-10-04T05:54:06 14:06:36 23:59:00 hai00830025 xiao.liu interacti 1 64 R 2025-10-04T01:47:27 2025-10-04T01:47:56 18:12:46 23:59:00 hai00430178 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30177 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30176 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30175 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30174 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30173 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30172 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30171 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30170 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30169 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30168 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30167 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30166 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30165 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30164 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30163 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30162 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30161 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30160 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30159 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30158 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30157 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30156 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30155 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30154 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30153 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30152 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30151 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30150 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30149 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30148 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30147 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30146 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30145 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30144 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30143 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30141 franz.sram standard 1 8 PD 2025-10-04T19:18:33N/A 0:00 1-00:00:00 (Priority)30140 franz.sram standard 1 8 PD 2025-10-04T19:18:33N/A 0:00 1-00:00:00 (Priority)30139 franz.sram standard 1 8 PD 2025-10-04T19:18:33N/A 0:00 1-00:00:00 (Priority)30138 franz.sram standard 1 8 PD 2025-10-04T19:18:33N/A 0:00 1-00:00:00 (Priority)30137 franz.sram standard 1 8 PD 2025-10-04T19:18:33N/A 0:00 1-00:00:00 (Nodes required for job are DOWN, DRAINED or reserved for jobs in higher priority partitions)30136 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:45:35 15:07 1-00:00:00 hai00430135 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:23:13 37:29 1-00:00:00 hai00530131 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:18:34 42:08 1-00:00:00 hai00530132 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:18:34 42:08 1-00:00:00 hai00430133 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:18:34 42:08 1-00:00:00 hai00530134 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:18:34 42:08 1-00:00:00 hai00630130 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:18:33 42:09 1-00:00:00 hai00530127 franz.sram standard 1 16 R 2025-10-04T18:54:08 2025-10-04T18:58:57 1:01:45 1-00:00:00 hai00530126 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:55:57 1:04:45 1-00:00:00 hai00530125 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:44:27 1:16:15 1-00:00:00 hai00130124 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:43:27 1:17:15 1-00:00:00 hai00130123 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:42:27 1:18:15 1-00:00:00 hai00130122 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:38:27 1:22:15 1-00:00:00 hai00130117 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:35:27 1:25:15 1-00:00:00 hai00130118 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:35:27 1:25:15 1-00:00:00 hai00430119 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:35:27 1:25:15 1-00:00:00 hai00530120 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:35:27 1:25:15 1-00:00:00 hai00630121 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:35:27 1:25:15 1-00:00:00 hai00629972 alfred.ngu standard 1 16 R 2025-10-04T16:35:34 2025-10-04T17:57:27 2:03:15 1-00:00:00 hai00130101 franz.sram standard 1 16 R 2025-10-04T14:55:02 2025-10-04T17:47:57 2:12:45 1-00:00:00 hai00430032 alfred.ngu standard 1 16 R 2025-10-04T11:53:39 2025-10-04T11:53:56 8:06:46 1-00:00:00 hai00130031 alfred.ngu standard 1 16 R 2025-10-04T11:50:30 2025-10-04T11:50:56 8:09:46 1-00:00:00 hai00529993 nishant.ku standard 2 384 R 2025-10-03T18:29:18 2025-10-04T01:48:31 18:12:11 1-00:00:00 hai[002,007]30008 alfred.ngu standard 1 16 R 2025-10-03T22:58:50 2025-10-03T22:58:56 21:01:46 1-00:00:00 hai00630009 alfred.ngu standard 1 16 R 2025-10-03T22:58:54 2025-10-03T22:58:56 21:01:46 1-00:00:00 hai001Every 2.0s: squeuehai-login2.haicore.berlin: Sat Oct 4 20:00:42 2025JOBIDUSER PARTITION NODES CPUS STSUBMIT_TIMESTART_TIME TIME TIME_LIMIT NODELIST(REASON)30111 xiao.liu interacti 1 32 PD 2025-10-04T16:57:05 2025-10-05T01:48:31 0:00 23:59:00 (Resources)30142 xiao.liu interacti 1 64 PD 2025-10-04T19:45:19N/A 0:00 23:59:00 (Priority)30129 xiao.liu interacti 1 64 PD 2025-10-04T18:55:30N/A 0:00 23:59:00 (Priority)30027 xiao.liu interacti 1 128 R 2025-10-04T05:54:07 2025-10-04T19:13:38 47:04 23:59:00 hai00330028 xiao.liu interacti 1 128 R 2025-10-04T05:55:36 2025-10-04T05:55:36 14:05:06 23:59:00 hai00630026 xiao.liu interacti 1 128 R 2025-10-04T01:48:32 2025-10-04T05:54:06 14:06:36 23:59:00 hai00830025 xiao.liu interacti 1 64 R 2025-10-04T01:47:27 2025-10-04T01:47:56 18:12:46 23:59:00 hai00430178 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30177 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30176 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30175 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30174 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30173 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30172 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30171 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30170 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30169 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30168 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30167 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30166 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30165 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30164 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30163 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30162 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30161 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30160 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30159 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30158 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30157 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30156 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30155 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30154 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30153 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30152 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30151 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30150 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30149 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30148 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30147 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30146 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30145 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30144 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30143 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30141 franz.sram standard 1 8 PD 2025-10-04T19:18:33N/A 0:00 1-00:00:00 (Priority)30140 franz.sram standard 1 8 PD 2025-10-04T19:18:33N/A 0:00 1-00:00:00 (Priority)30139 franz.sram standard 1 8 PD 2025-10-04T19:18:33N/A 0:00 1-00:00:00 (Priority)30138 franz.sram standard 1 8 PD 2025-10-04T19:18:33N/A 0:00 1-00:00:00 (Priority)30137 franz.sram standard 1 8 PD 2025-10-04T19:18:33N/A 0:00 1-00:00:00 (Nodes required for job are DOWN, DRAINED or reserved for jobs in higher priority partitions)30136 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:45:35 15:07 1-00:00:00 hai00430135 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:23:13 37:29 1-00:00:00 hai00530131 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:18:34 42:08 1-00:00:00 hai00530132 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:18:34 42:08 1-00:00:00 hai00430133 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:18:34 42:08 1-00:00:00 hai00530134 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:18:34 42:08 1-00:00:00 hai00630130 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:18:33 42:09 1-00:00:00 hai00530127 franz.sram standard 1 16 R 2025-10-04T18:54:08 2025-10-04T18:58:57 1:01:45 1-00:00:00 hai00530126 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:55:57 1:04:45 1-00:00:00 hai00530125 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:44:27 1:16:15 1-00:00:00 hai00130124 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:43:27 1:17:15 1-00:00:00 hai00130123 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:42:27 1:18:15 1-00:00:00 hai00130122 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:38:27 1:22:15 1-00:00:00 hai00130117 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:35:27 1:25:15 1-00:00:00 hai00130118 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:35:27 1:25:15 1-00:00:00 hai00430119 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:35:27 1:25:15 1-00:00:00 hai00530120 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:35:27 1:25:15 1-00:00:00 hai00630121 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:35:27 1:25:15 1-00:00:00 hai00629972 alfred.ngu standard 1 16 R 2025-10-04T16:35:34 2025-10-04T17:57:27 2:03:15 1-00:00:00 hai00130101 franz.sram standard 1 16 R 2025-10-04T14:55:02 2025-10-04T17:47:57 2:12:45 1-00:00:00 hai00430032 alfred.ngu standard 1 16 R 2025-10-04T11:53:39 2025-10-04T11:53:56 8:06:46 1-00:00:00 hai00130031 alfred.ngu standard 1 16 R 2025-10-04T11:50:30 2025-10-04T11:50:56 8:09:46 1-00:00:00 hai00529993 nishant.ku standard 2 384 R 2025-10-03T18:29:18 2025-10-04T01:48:31 18:12:11 1-00:00:00 hai[002,007]30008 alfred.ngu standard 1 16 R 2025-10-03T22:58:50 2025-10-03T22:58:56 21:01:46 1-00:00:00 hai00630009 alfred.ngu standard 1 16 R 2025-10-03T22:58:54 2025-10-03T22:58:56 21:01:46 1-00:00:00 hai001",,terminal_output +3355,5982269,"TERMINAL",0,0,"Every 2.0s: squeuehai-login2.haicore.berlin: Sat Oct 4 20:00:43 2025JOBIDUSER PARTITION NODES CPUS STSUBMIT_TIMESTART_TIME TIME TIME_LIMIT NODELIST(REASON)30111 xiao.liu interacti 1 32 PD 2025-10-04T16:57:05 2025-10-05T01:48:31 0:00 23:59:00 (Resources)30142 xiao.liu interacti 1 64 PD 2025-10-04T19:45:19N/A 0:00 23:59:00 (Priority)30129 xiao.liu interacti 1 64 PD 2025-10-04T18:55:30N/A 0:00 23:59:00 (Priority)30027 xiao.liu interacti 1 128 R 2025-10-04T05:54:07 2025-10-04T19:13:38 47:05 23:59:00 hai00330028 xiao.liu interacti 1 128 R 2025-10-04T05:55:36 2025-10-04T05:55:36 14:05:07 23:59:00 hai00630026 xiao.liu interacti 1 128 R 2025-10-04T01:48:32 2025-10-04T05:54:06 14:06:37 23:59:00 hai00830025 xiao.liu interacti 1 64 R 2025-10-04T01:47:27 2025-10-04T01:47:56 18:12:47 23:59:00 hai00430178 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30177 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30176 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30175 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30174 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30173 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30172 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30171 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30170 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30169 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30168 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30167 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30166 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30165 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30164 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30163 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30162 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30161 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30160 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30159 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30158 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30157 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30156 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30155 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30154 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30153 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30152 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30151 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30150 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30149 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30148 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30147 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30146 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30145 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30144 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30143 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30141 franz.sram standard 1 8 PD 2025-10-04T19:18:33N/A 0:00 1-00:00:00 (Priority)30140 franz.sram standard 1 8 PD 2025-10-04T19:18:33N/A 0:00 1-00:00:00 (Priority)30139 franz.sram standard 1 8 PD 2025-10-04T19:18:33N/A 0:00 1-00:00:00 (Priority)30138 franz.sram standard 1 8 PD 2025-10-04T19:18:33N/A 0:00 1-00:00:00 (Priority)30137 franz.sram standard 1 8 PD 2025-10-04T19:18:33N/A 0:00 1-00:00:00 (Nodes required for job are DOWN, DRAINED or reserved for jobs in higher priority partitions)30136 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:45:35 15:08 1-00:00:00 hai00430135 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:23:13 37:30 1-00:00:00 hai00530131 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:18:34 42:09 1-00:00:00 hai00530132 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:18:34 42:09 1-00:00:00 hai00430133 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:18:34 42:09 1-00:00:00 hai00530134 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:18:34 42:09 1-00:00:00 hai00630130 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:18:33 42:10 1-00:00:00 hai00530127 franz.sram standard 1 16 R 2025-10-04T18:54:08 2025-10-04T18:58:57 1:01:46 1-00:00:00 hai00530126 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:55:57 1:04:46 1-00:00:00 hai00530125 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:44:27 1:16:16 1-00:00:00 hai00130124 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:43:27 1:17:16 1-00:00:00 hai00130123 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:42:27 1:18:16 1-00:00:00 hai00130122 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:38:27 1:22:16 1-00:00:00 hai00130117 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:35:27 1:25:16 1-00:00:00 hai00130118 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:35:27 1:25:16 1-00:00:00 hai00430119 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:35:27 1:25:16 1-00:00:00 hai00530120 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:35:27 1:25:16 1-00:00:00 hai00630121 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:35:27 1:25:16 1-00:00:00 hai00629972 alfred.ngu standard 1 16 R 2025-10-04T16:35:34 2025-10-04T17:57:27 2:03:16 1-00:00:00 hai00130101 franz.sram standard 1 16 R 2025-10-04T14:55:02 2025-10-04T17:47:57 2:12:46 1-00:00:00 hai00430032 alfred.ngu standard 1 16 R 2025-10-04T11:53:39 2025-10-04T11:53:56 8:06:47 1-00:00:00 hai00130031 alfred.ngu standard 1 16 R 2025-10-04T11:50:30 2025-10-04T11:50:56 8:09:47 1-00:00:00 hai00529993 nishant.ku standard 2 384 R 2025-10-03T18:29:18 2025-10-04T01:48:31 18:12:12 1-00:00:00 hai[002,007]30008 alfred.ngu standard 1 16 R 2025-10-03T22:58:50 2025-10-03T22:58:56 21:01:47 1-00:00:00 hai00630009 alfred.ngu standard 1 16 R 2025-10-03T22:58:54 2025-10-03T22:58:56 21:01:47 1-00:00:00 hai001",,terminal_output +3356,5982365,"TERMINAL",0,0,"Every 2.0s: squeuehai-login2.haicore.berlin: Sat Oct 4 20:00:43 2025JOBIDUSER PARTITION NODES CPUS STSUBMIT_TIMESTART_TIME TIME TIME_LIMIT NODELIST(REASON)30111 xiao.liu interacti 1 32 PD 2025-10-04T16:57:05 2025-10-05T01:48:31 0:00 23:59:00 (Resources)30142 xiao.liu interacti 1 64 PD 2025-10-04T19:45:19N/A 0:00 23:59:00 (Priority)30129 xiao.liu interacti 1 64 PD 2025-10-04T18:55:30N/A 0:00 23:59:00 (Priority)30027 xiao.liu interacti 1 128 R 2025-10-04T05:54:07 2025-10-04T19:13:38 47:05 23:59:00 hai00330028 xiao.liu interacti 1 128 R 2025-10-04T05:55:36 2025-10-04T05:55:36 14:05:07 23:59:00 hai00630026 xiao.liu interacti 1 128 R 2025-10-04T01:48:32 2025-10-04T05:54:06 14:06:37 23:59:00 hai00830025 xiao.liu interacti 1 64 R 2025-10-04T01:47:27 2025-10-04T01:47:56 18:12:47 23:59:00 hai00430178 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30177 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30176 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30175 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30174 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30173 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30172 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30171 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30170 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30169 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30168 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30167 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30166 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30165 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30164 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30163 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30162 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30161 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30160 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30159 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30158 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30157 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30156 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30155 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30154 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30153 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30152 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30151 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30150 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30149 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30148 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30147 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30146 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30145 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30144 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30143 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30141 franz.sram standard 1 8 PD 2025-10-04T19:18:33N/A 0:00 1-00:00:00 (Priority)30140 franz.sram standard 1 8 PD 2025-10-04T19:18:33N/A 0:00 1-00:00:00 (Priority)30139 franz.sram standard 1 8 PD 2025-10-04T19:18:33N/A 0:00 1-00:00:00 (Priority)30138 franz.sram standard 1 8 PD 2025-10-04T19:18:33N/A 0:00 1-00:00:00 (Priority)30137 franz.sram standard 1 8 PD 2025-10-04T19:18:33N/A 0:00 1-00:00:00 (Nodes required for job are DOWN, DRAINED or reserved for jobs in higher priority partitions)30136 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:45:35 15:08 1-00:00:00 hai00430135 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:23:13 37:30 1-00:00:00 hai00530131 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:18:34 42:09 1-00:00:00 hai00530132 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:18:34 42:09 1-00:00:00 hai00430133 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:18:34 42:09 1-00:00:00 hai00530134 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:18:34 42:09 1-00:00:00 hai00630130 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:18:33 42:10 1-00:00:00 hai00530127 franz.sram standard 1 16 R 2025-10-04T18:54:08 2025-10-04T18:58:57 1:01:46 1-00:00:00 hai00530126 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:55:57 1:04:46 1-00:00:00 hai00530125 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:44:27 1:16:16 1-00:00:00 hai00130124 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:43:27 1:17:16 1-00:00:00 hai00130123 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:42:27 1:18:16 1-00:00:00 hai00130122 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:38:27 1:22:16 1-00:00:00 hai00130117 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:35:27 1:25:16 1-00:00:00 hai00130118 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:35:27 1:25:16 1-00:00:00 hai00430119 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:35:27 1:25:16 1-00:00:00 hai00530120 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:35:27 1:25:16 1-00:00:00 hai00630121 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:35:27 1:25:16 1-00:00:00 hai00629972 alfred.ngu standard 1 16 R 2025-10-04T16:35:34 2025-10-04T17:57:27 2:03:16 1-00:00:00 hai00130101 franz.sram standard 1 16 R 2025-10-04T14:55:02 2025-10-04T17:47:57 2:12:46 1-00:00:00 hai00430032 alfred.ngu standard 1 16 R 2025-10-04T11:53:39 2025-10-04T11:53:56 8:06:47 1-00:00:00 hai00130031 alfred.ngu standard 1 16 R 2025-10-04T11:50:30 2025-10-04T11:50:56 8:09:47 1-00:00:00 hai00529993 nishant.ku standard 2 384 R 2025-10-03T18:29:18 2025-10-04T01:48:31 18:12:12 1-00:00:00 hai[002,007]30008 alfred.ngu standard 1 16 R 2025-10-03T22:58:50 2025-10-03T22:58:56 21:01:47 1-00:00:00 hai00630009 alfred.ngu standard 1 16 R 2025-10-03T22:58:54 2025-10-03T22:58:56 21:01:47 1-00:00:00 hai001",,terminal_output +3357,5984431,"TERMINAL",0,0,"57999102111111112888888888888899499",,terminal_output +3358,5986368,"TERMINAL",0,0,"79114151243333450502020202020202020202050515165151",,terminal_output +3359,5988377,"TERMINAL",0,0,"9113334655556222222222222233833",,terminal_output +3360,5990385,"TERMINAL",0,0,"51355568777784444444444444552055",,terminal_output +3361,5992397,"TERMINAL",0,0,"35777840999920666666666666677277",,terminal_output +3362,5994404,"TERMINAL",0,0,"57999202212121212888888888888899499",,terminal_output +3363,5996418,"TERMINAL",0,0,"7921513:0124333342:005:00303030303030303030303:007:0110:0162:012:01",,terminal_output +3364,5998422,"TERMINAL",0,0,"9213334655556222222222222233833",,terminal_output +3365,6000447,"TERMINAL",0,0,"1:01355568777784444444444444553055",,terminal_output +3366,6002446,"TERMINAL",0,0,"35777850999930666666666666677277",,terminal_output +3367,6004458,"TERMINAL",0,0,"57999302313131312888888888888899499",,terminal_output +3368,6006462,"TERMINAL",0,0,"79317:0111243333410104040404040404040404010111161111",,terminal_output +3369,6007272,"TERMINAL",0,0,"Every 2.0s: squeuehai-login2.haicore.berlin: Sat Oct 4 20:01:08 2025JOBIDUSER PARTITION NODES CPUS STSUBMIT_TIMESTART_TIME TIME TIME_LIMIT NODELIST(REASON)30111 xiao.liu interacti 1 32 PD 2025-10-04T16:57:05 2025-10-05T01:48:31 0:00 23:59:00 (Resources)30142 xiao.liu interacti 1 64 PD 2025-10-04T19:45:19N/A 0:00 23:59:00 (Priority)30129 xiao.liu interacti 1 64 PD 2025-10-04T18:55:30N/A 0:00 23:59:00 (Priority)30027 xiao.liu interacti 1 128 R 2025-10-04T05:54:07 2025-10-04T19:13:38 47:30 23:59:00 hai00330028 xiao.liu interacti 1 128 R 2025-10-04T05:55:36 2025-10-04T05:55:36 14:05:32 23:59:00 hai00630026 xiao.liu interacti 1 128 R 2025-10-04T01:48:32 2025-10-04T05:54:06 14:07:02 23:59:00 hai00830025 xiao.liu interacti 1 64 R 2025-10-04T01:47:27 2025-10-04T01:47:56 18:13:12 23:59:00 hai00430178 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30177 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30176 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30175 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30174 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30173 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30172 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30171 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30170 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30169 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30168 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30167 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30166 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30165 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30164 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30163 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30162 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30161 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30160 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30159 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30158 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30157 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30156 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30155 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30154 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30153 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30152 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30151 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30150 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30149 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30148 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30147 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30146 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30145 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30144 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30143 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30141 franz.sram standard 1 8 PD 2025-10-04T19:18:33N/A 0:00 1-00:00:00 (Priority)30140 franz.sram standard 1 8 PD 2025-10-04T19:18:33N/A 0:00 1-00:00:00 (Priority)30139 franz.sram standard 1 8 PD 2025-10-04T19:18:33N/A 0:00 1-00:00:00 (Priority)30138 franz.sram standard 1 8 PD 2025-10-04T19:18:33N/A 0:00 1-00:00:00 (Priority)30137 franz.sram standard 1 8 PD 2025-10-04T19:18:33N/A 0:00 1-00:00:00 (Nodes required for job are DOWN, DRAINED or reserved for jobs in higher priority partitions)30136 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:45:35 15:33 1-00:00:00 hai00430135 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:23:13 37:55 1-00:00:00 hai00530131 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:18:34 42:34 1-00:00:00 hai00530132 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:18:34 42:34 1-00:00:00 hai00430133 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:18:34 42:34 1-00:00:00 hai00530134 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:18:34 42:34 1-00:00:00 hai00630130 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:18:33 42:35 1-00:00:00 hai00530127 franz.sram standard 1 16 R 2025-10-04T18:54:08 2025-10-04T18:58:57 1:02:11 1-00:00:00 hai00530126 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:55:57 1:05:11 1-00:00:00 hai00530125 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:44:27 1:16:41 1-00:00:00 hai00130124 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:43:27 1:17:41 1-00:00:00 hai00130123 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:42:27 1:18:41 1-00:00:00 hai00130122 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:38:27 1:22:41 1-00:00:00 hai00130117 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:35:27 1:25:41 1-00:00:00 hai00130118 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:35:27 1:25:41 1-00:00:00 hai00430119 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:35:27 1:25:41 1-00:00:00 hai00530120 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:35:27 1:25:41 1-00:00:00 hai00630121 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:35:27 1:25:41 1-00:00:00 hai006Every 2.0s: squeuehai-login2.haicore.berlin: Sat Oct 4 20:01:08 2025JOBIDUSER PARTITION NODES CPUS STSUBMIT_TIMESTART_TIME TIME TIME_LIMIT NODELIST(REASON)30111 xiao.liu interacti 1 32 PD 2025-10-04T16:57:05 2025-10-05T01:48:31 0:00 23:59:00 (Resources)30142 xiao.liu interacti 1 64 PD 2025-10-04T19:45:19N/A 0:00 23:59:00 (Priority)30129 xiao.liu interacti 1 64 PD 2025-10-04T18:55:30N/A 0:00 23:59:00 (Priority)30027 xiao.liu interacti 1 128 R 2025-10-04T05:54:07 2025-10-04T19:13:38 47:30 23:59:00 hai00330028 xiao.liu interacti 1 128 R 2025-10-04T05:55:36 2025-10-04T05:55:36 14:05:32 23:59:00 hai00630026 xiao.liu interacti 1 128 R 2025-10-04T01:48:32 2025-10-04T05:54:06 14:07:02 23:59:00 hai00830025 xiao.liu interacti 1 64 R 2025-10-04T01:47:27 2025-10-04T01:47:56 18:13:12 23:59:00 hai00430178 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30177 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30176 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30175 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30174 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30173 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30172 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30171 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30170 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30169 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30168 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30167 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30166 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30165 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30164 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30163 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30162 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30161 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30160 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30159 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30158 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30157 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30156 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30155 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30154 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30153 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30152 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30151 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30150 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30149 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30148 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30147 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30146 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30145 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30144 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30143 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30141 franz.sram standard 1 8 PD 2025-10-04T19:18:33N/A 0:00 1-00:00:00 (Priority)30140 franz.sram standard 1 8 PD 2025-10-04T19:18:33N/A 0:00 1-00:00:00 (Priority)30139 franz.sram standard 1 8 PD 2025-10-04T19:18:33N/A 0:00 1-00:00:00 (Priority)30138 franz.sram standard 1 8 PD 2025-10-04T19:18:33N/A 0:00 1-00:00:00 (Priority)30137 franz.sram standard 1 8 PD 2025-10-04T19:18:33N/A 0:00 1-00:00:00 (Nodes required for job are DOWN, DRAINED or reserved for jobs in higher priority partitions)30136 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:45:35 15:33 1-00:00:00 hai00430135 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:23:13 37:55 1-00:00:00 hai00530131 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:18:34 42:34 1-00:00:00 hai00530132 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:18:34 42:34 1-00:00:00 hai00430133 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:18:34 42:34 1-00:00:00 hai00530134 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:18:34 42:34 1-00:00:00 hai006",,terminal_output +3370,6007453,"TERMINAL",0,0,"Every 2.0s: squeuehai-login2.haicore.berlin: Sat Oct 4 20:01:08 2025JOBIDUSER PARTITION NODES CPUS STSUBMIT_TIMESTART_TIME TIME TIME_LIMIT NODELIST(REASON)30111 xiao.liu interacti 1 32 PD 2025-10-04T16:57:05 2025-10-05T01:48:31 0:00 23:59:00 (Resources)30142 xiao.liu interacti 1 64 PD 2025-10-04T19:45:19N/A 0:00 23:59:00 (Priority)30129 xiao.liu interacti 1 64 PD 2025-10-04T18:55:30N/A 0:00 23:59:00 (Priority)30027 xiao.liu interacti 1 128 R 2025-10-04T05:54:07 2025-10-04T19:13:38 47:30 23:59:00 hai00330028 xiao.liu interacti 1 128 R 2025-10-04T05:55:36 2025-10-04T05:55:36 14:05:32 23:59:00 hai00630026 xiao.liu interacti 1 128 R 2025-10-04T01:48:32 2025-10-04T05:54:06 14:07:02 23:59:00 hai00830025 xiao.liu interacti 1 64 R 2025-10-04T01:47:27 2025-10-04T01:47:56 18:13:12 23:59:00 hai00430178 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30177 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30176 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30175 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30174 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30173 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30172 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30171 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30170 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30169 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30168 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30167 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30166 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30165 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30164 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30163 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30162 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30161 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30160 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30159 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30158 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30157 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30156 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30155 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30154 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30153 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30152 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30151 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30150 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30149 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30148 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30147 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30146 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30145 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30144 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30143 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30141 franz.sram standard 1 8 PD 2025-10-04T19:18:33N/A 0:00 1-00:00:00 (Priority)30140 franz.sram standard 1 8 PD 2025-10-04T19:18:33N/A 0:00 1-00:00:00 (Priority)30139 franz.sram standard 1 8 PD 2025-10-04T19:18:33N/A 0:00 1-00:00:00 (Priority)",,terminal_output +3371,6009463,"TERMINAL",0,0,"102444",,terminal_output +3372,6010252,"TERMINAL",0,0,"Every 2.0s: squeuehai-login2.haicore.berlin: Sat Oct 4 20:01:11 2025JOBIDUSER PARTITION NODES CPUS STSUBMIT_TIMESTART_TIME TIME TIME_LIMIT NODELIST(REASON)30111 xiao.liu interacti 1 32 PD 2025-10-04T16:57:05 2025-10-05T01:48:31 0:00 23:59:00 (Resources)30142 xiao.liu interacti 1 64 PD 2025-10-04T19:45:19N/A 0:00 23:59:00 (Priority)30129 xiao.liu interacti 1 64 PD 2025-10-04T18:55:30N/A 0:00 23:59:00 (Priority)30027 xiao.liu interacti 1 128 R 2025-10-04T05:54:07 2025-10-04T19:13:38 47:33 23:59:00 hai00330028 xiao.liu interacti 1 128 R 2025-10-04T05:55:36 2025-10-04T05:55:36 14:05:35 23:59:00 hai00630026 xiao.liu interacti 1 128 R 2025-10-04T01:48:32 2025-10-04T05:54:06 14:07:05 23:59:00 hai00830025 xiao.liu interacti 1 64 R 2025-10-04T01:47:27 2025-10-04T01:47:56 18:13:15 23:59:00 hai00430178 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30177 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30176 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30175 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30174 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30173 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30172 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30171 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30170 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30169 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30168 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30167 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30166 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30165 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30164 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30163 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30162 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30161 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30160 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30159 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30158 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30157 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30156 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30155 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30154 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30153 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30152 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30151 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30150 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30149 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30148 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30147 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30146 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30145 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30144 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30143 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)",,terminal_output +3373,6012267,"TERMINAL",0,0,"35777",,terminal_output +3374,6014281,"TERMINAL",0,0,"57999",,terminal_output +3375,6016172,"TERMINAL",0,0,"Every 2.0s: squeuehai-login2.haicore.berlin: Sat Oct 4 20:01:17 2025JOBIDUSER PARTITION NODES CPUS STSUBMIT_TIMESTART_TIME TIME TIME_LIMIT NODELIST(REASON)30111 xiao.liu interacti 1 32 PD 2025-10-04T16:57:05 2025-10-05T01:48:31 0:00 23:59:00 (Resources)30142 xiao.liu interacti 1 64 PD 2025-10-04T19:45:19N/A 0:00 23:59:00 (Priority)30129 xiao.liu interacti 1 64 PD 2025-10-04T18:55:30N/A 0:00 23:59:00 (Priority)30027 xiao.liu interacti 1 128 R 2025-10-04T05:54:07 2025-10-04T19:13:38 47:39 23:59:00 hai00330028 xiao.liu interacti 1 128 R 2025-10-04T05:55:36 2025-10-04T05:55:36 14:05:41 23:59:00 hai00630026 xiao.liu interacti 1 128 R 2025-10-04T01:48:32 2025-10-04T05:54:06 14:07:11 23:59:00 hai00830025 xiao.liu interacti 1 64 R 2025-10-04T01:47:27 2025-10-04T01:47:56 18:13:21 23:59:00 hai00430178 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30177 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30176 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30175 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30174 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30173 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30172 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30171 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30170 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30169 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30168 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30167 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30166 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30165 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30164 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30163 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30162 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30161 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30160 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30159 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30158 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30157 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30156 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30155 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30154 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30153 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30152 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30151 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30150 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30149 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30148 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30147 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30146 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30145 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30144 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30143 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30141 franz.sram standard 1 8 PD 2025-10-04T19:18:33N/A 0:00 1-00:00:00 (Priority)30140 franz.sram standard 1 8 PD 2025-10-04T19:18:33N/A 0:00 1-00:00:00 (Priority)",,terminal_output +3376,6016780,"TERMINAL",0,0,"Every 2.0s: squeuehai-login2.haicore.berlin: Sat Oct 4 20:01:18 2025JOBIDUSER PARTITION NODES CPUS STSUBMIT_TIMESTART_TIME TIME TIME_LIMIT NODELIST(REASON)30111 xiao.liu interacti 1 32 PD 2025-10-04T16:57:05 2025-10-05T01:48:31 0:00 23:59:00 (Resources)30142 xiao.liu interacti 1 64 PD 2025-10-04T19:45:19N/A 0:00 23:59:00 (Priority)30129 xiao.liu interacti 1 64 PD 2025-10-04T18:55:30N/A 0:00 23:59:00 (Priority)30027 xiao.liu interacti 1 128 R 2025-10-04T05:54:07 2025-10-04T19:13:38 47:40 23:59:00 hai00330028 xiao.liu interacti 1 128 R 2025-10-04T05:55:36 2025-10-04T05:55:36 14:05:42 23:59:00 hai00630026 xiao.liu interacti 1 128 R 2025-10-04T01:48:32 2025-10-04T05:54:06 14:07:12 23:59:00 hai00830025 xiao.liu interacti 1 64 R 2025-10-04T01:47:27 2025-10-04T01:47:56 18:13:22 23:59:00 hai00430178 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30177 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30176 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30175 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30174 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30173 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30172 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30171 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30170 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30169 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30168 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30167 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30166 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30165 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30164 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30163 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30162 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30161 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30160 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30159 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30158 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30157 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30156 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30155 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30154 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30153 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30152 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30151 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30150 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30149 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30148 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30147 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30146 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30145 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30144 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30143 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30141 franz.sram standard 1 8 PD 2025-10-04T19:18:33N/A 0:00 1-00:00:00 (Priority)30140 franz.sram standard 1 8 PD 2025-10-04T19:18:33N/A 0:00 1-00:00:00 (Priority)",,terminal_output +3377,6018854,"TERMINAL",0,0,"202444",,terminal_output +3378,6018950,"TERMINAL",0,0,"Every 2.0s: squeuehai-login2.haicore.berlin: Sat Oct 4 20:01:20 2025JOBIDUSER PARTITION NODES CPUS STSUBMIT_TIMESTART_TIME TIME TIME_LIMIT NODELIST(REASON)30111 xiao.liu interacti 1 32 PD 2025-10-04T16:57:05 2025-10-05T01:48:31 0:00 23:59:00 (Resources)30142 xiao.liu interacti 1 64 PD 2025-10-04T19:45:19N/A 0:00 23:59:00 (Priority)30129 xiao.liu interacti 1 64 PD 2025-10-04T18:55:30N/A 0:00 23:59:00 (Priority)30027 xiao.liu interacti 1 128 R 2025-10-04T05:54:07 2025-10-04T19:13:38 47:42 23:59:00 hai00330028 xiao.liu interacti 1 128 R 2025-10-04T05:55:36 2025-10-04T05:55:36 14:05:44 23:59:00 hai00630026 xiao.liu interacti 1 128 R 2025-10-04T01:48:32 2025-10-04T05:54:06 14:07:14 23:59:00 hai00830025 xiao.liu interacti 1 64 R 2025-10-04T01:47:27 2025-10-04T01:47:56 18:13:24 23:59:00 hai00430178 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30177 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30176 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30175 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30174 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30173 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30172 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30171 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30170 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30169 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30168 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30167 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30166 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30165 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30164 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30163 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30162 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30161 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30160 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30159 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30158 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30157 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30156 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30155 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30154 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30153 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30152 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30151 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30150 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30149 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30148 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30147 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30146 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30145 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30144 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)",,terminal_output +3379,6020960,"TERMINAL",0,0,"24666",,terminal_output +3380,6021603,"TERMINAL",0,0,"Every 2.0s: squeuehai-login2.haicore.berlin: Sat Oct 4 20:01:22 2025JOBIDUSER PARTITION NODES CPUS STSUBMIT_TIMESTART_TIME TIME TIME_LIMIT NODELIST(REASON)30111 xiao.liu interacti 1 32 PD 2025-10-04T16:57:05 2025-10-05T01:48:31 0:00 23:59:00 (Resources)30142 xiao.liu interacti 1 64 PD 2025-10-04T19:45:19N/A 0:00 23:59:00 (Priority)30129 xiao.liu interacti 1 64 PD 2025-10-04T18:55:30N/A 0:00 23:59:00 (Priority)30027 xiao.liu interacti 1 128 R 2025-10-04T05:54:07 2025-10-04T19:13:38 47:44 23:59:00 hai00330028 xiao.liu interacti 1 128 R 2025-10-04T05:55:36 2025-10-04T05:55:36 14:05:46 23:59:00 hai00630026 xiao.liu interacti 1 128 R 2025-10-04T01:48:32 2025-10-04T05:54:06 14:07:16 23:59:00 hai00830025 xiao.liu interacti 1 64 R 2025-10-04T01:47:27 2025-10-04T01:47:56 18:13:26 23:59:00 hai00430178 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30177 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30176 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30175 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30174 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30173 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30172 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30171 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30170 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30169 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30168 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30167 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30166 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30165 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30164 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30163 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30162 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30161 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30160 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30159 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30158 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30157 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30156 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30155 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30154 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30153 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30152 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30151 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30150 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30149 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30148 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30147 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30146 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30145 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30144 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30143 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30141 franz.sram standard 1 8 PD 2025-10-04T19:18:33N/A 0:00 1-00:00:00 (Priority)30140 franz.sram standard 1 8 PD 2025-10-04T19:18:33N/A 0:00 1-00:00:00 (Priority)30139 franz.sram standard 1 8 PD 2025-10-04T19:18:33N/A 0:00 1-00:00:00 (Priority)Every 2.0s: squeuehai-login2.haicore.berlin: Sat Oct 4 20:01:22 2025JOBIDUSER PARTITION NODES CPUS STSUBMIT_TIMESTART_TIME TIME TIME_LIMIT NODELIST(REASON)30111 xiao.liu interacti 1 32 PD 2025-10-04T16:57:05 2025-10-05T01:48:31 0:00 23:59:00 (Resources)30142 xiao.liu interacti 1 64 PD 2025-10-04T19:45:19N/A 0:00 23:59:00 (Priority)30129 xiao.liu interacti 1 64 PD 2025-10-04T18:55:30N/A 0:00 23:59:00 (Priority)30027 xiao.liu interacti 1 128 R 2025-10-04T05:54:07 2025-10-04T19:13:38 47:44 23:59:00 hai00330028 xiao.liu interacti 1 128 R 2025-10-04T05:55:36 2025-10-04T05:55:36 14:05:46 23:59:00 hai00630026 xiao.liu interacti 1 128 R 2025-10-04T01:48:32 2025-10-04T05:54:06 14:07:16 23:59:00 hai00830025 xiao.liu interacti 1 64 R 2025-10-04T01:47:27 2025-10-04T01:47:56 18:13:26 23:59:00 hai00430178 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30177 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30176 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30175 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30174 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30173 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30172 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30171 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30170 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30169 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30168 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30167 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30166 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30165 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30164 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30163 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30162 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30161 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30160 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30159 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30158 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30157 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30156 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30155 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30154 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30153 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30152 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30151 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30150 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30149 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30148 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30147 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30146 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30145 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30144 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30143 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30141 franz.sram standard 1 8 PD 2025-10-04T19:18:33N/A 0:00 1-00:00:00 (Priority)30140 franz.sram standard 1 8 PD 2025-10-04T19:18:33N/A 0:00 1-00:00:00 (Priority)30139 franz.sram standard 1 8 PD 2025-10-04T19:18:33N/A 0:00 1-00:00:00 (Priority)30138 franz.sram standard 1 8 PD 2025-10-04T19:18:33N/A 0:00 1-00:00:00 (Priority)Every 2.0s: squeuehai-login2.haicore.berlin: Sat Oct 4 20:01:22 2025JOBIDUSER PARTITION NODES CPUS STSUBMIT_TIMESTART_TIME TIME TIME_LIMIT NODELIST(REASON)30111 xiao.liu interacti 1 32 PD 2025-10-04T16:57:05 2025-10-05T01:48:31 0:00 23:59:00 (Resources)30142 xiao.liu interacti 1 64 PD 2025-10-04T19:45:19N/A 0:00 23:59:00 (Priority)30129 xiao.liu interacti 1 64 PD 2025-10-04T18:55:30N/A 0:00 23:59:00 (Priority)30027 xiao.liu interacti 1 128 R 2025-10-04T05:54:07 2025-10-04T19:13:38 47:44 23:59:00 hai00330028 xiao.liu interacti 1 128 R 2025-10-04T05:55:36 2025-10-04T05:55:36 14:05:46 23:59:00 hai00630026 xiao.liu interacti 1 128 R 2025-10-04T01:48:32 2025-10-04T05:54:06 14:07:16 23:59:00 hai00830025 xiao.liu interacti 1 64 R 2025-10-04T01:47:27 2025-10-04T01:47:56 18:13:26 23:59:00 hai00430178 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30177 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30176 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30175 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30174 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30173 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30172 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30171 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30170 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30169 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30168 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30167 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30166 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30165 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30164 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30163 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30162 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30161 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30160 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30159 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30158 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30157 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30156 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30155 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30154 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30153 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30152 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30151 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30150 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30149 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30148 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30147 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30146 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30145 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30144 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30143 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30141 franz.sram standard 1 8 PD 2025-10-04T19:18:33N/A 0:00 1-00:00:00 (Priority)30140 franz.sram standard 1 8 PD 2025-10-04T19:18:33N/A 0:00 1-00:00:00 (Priority)30139 franz.sram standard 1 8 PD 2025-10-04T19:18:33N/A 0:00 1-00:00:00 (Priority)30138 franz.sram standard 1 8 PD 2025-10-04T19:18:33N/A 0:00 1-00:00:00 (Priority)30137 franz.sram standard 1 8 PD 2025-10-04T19:18:33N/A 0:00 1-00:00:00 (Nodes required for job are DOWN, DRAINED or reserved for jobs in higher priority partitions)30136 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:45:35 15:47 1-00:00:00 hai00430135 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:23:13 38:09 1-00:00:00 hai00530131 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:18:34 42:48 1-00:00:00 hai00530132 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:18:34 42:48 1-00:00:00 hai00430133 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:18:34 42:48 1-00:00:00 hai00530134 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:18:34 42:48 1-00:00:00 hai00630130 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:18:33 42:49 1-00:00:00 hai00530127 franz.sram standard 1 16 R 2025-10-04T18:54:08 2025-10-04T18:58:57 1:02:25 1-00:00:00 hai00530126 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:55:57 1:05:25 1-00:00:00 hai00530125 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:44:27 1:16:55 1-00:00:00 hai00130124 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:43:27 1:17:55 1-00:00:00 hai001Every 2.0s: squeuehai-login2.haicore.berlin: Sat Oct 4 20:01:22 2025JOBIDUSER PARTITION NODES CPUS STSUBMIT_TIMESTART_TIME TIME TIME_LIMIT NODELIST(REASON)30111 xiao.liu interacti 1 32 PD 2025-10-04T16:57:05 2025-10-05T01:48:31 0:00 23:59:00 (Resources)30142 xiao.liu interacti 1 64 PD 2025-10-04T19:45:19N/A 0:00 23:59:00 (Priority)30129 xiao.liu interacti 1 64 PD 2025-10-04T18:55:30N/A 0:00 23:59:00 (Priority)30027 xiao.liu interacti 1 128 R 2025-10-04T05:54:07 2025-10-04T19:13:38 47:44 23:59:00 hai00330028 xiao.liu interacti 1 128 R 2025-10-04T05:55:36 2025-10-04T05:55:36 14:05:46 23:59:00 hai00630026 xiao.liu interacti 1 128 R 2025-10-04T01:48:32 2025-10-04T05:54:06 14:07:16 23:59:00 hai00830025 xiao.liu interacti 1 64 R 2025-10-04T01:47:27 2025-10-04T01:47:56 18:13:26 23:59:00 hai00430178 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30177 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30176 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30175 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30174 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30173 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30172 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30171 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30170 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30169 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30168 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30167 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30166 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30165 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30164 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30163 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30162 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30161 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30160 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30159 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30158 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30157 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30156 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30155 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30154 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30153 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30152 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30151 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30150 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30149 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30148 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30147 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30146 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30145 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30144 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30143 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30141 franz.sram standard 1 8 PD 2025-10-04T19:18:33N/A 0:00 1-00:00:00 (Priority)30140 franz.sram standard 1 8 PD 2025-10-04T19:18:33N/A 0:00 1-00:00:00 (Priority)30139 franz.sram standard 1 8 PD 2025-10-04T19:18:33N/A 0:00 1-00:00:00 (Priority)30138 franz.sram standard 1 8 PD 2025-10-04T19:18:33N/A 0:00 1-00:00:00 (Priority)30137 franz.sram standard 1 8 PD 2025-10-04T19:18:33N/A 0:00 1-00:00:00 (Nodes required for job are DOWN, DRAINED or reserved for jobs in higher priority partitions)30136 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:45:35 15:47 1-00:00:00 hai00430135 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:23:13 38:09 1-00:00:00 hai00530131 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:18:34 42:48 1-00:00:00 hai00530132 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:18:34 42:48 1-00:00:00 hai00430133 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:18:34 42:48 1-00:00:00 hai00530134 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:18:34 42:48 1-00:00:00 hai00630130 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:18:33 42:49 1-00:00:00 hai00530127 franz.sram standard 1 16 R 2025-10-04T18:54:08 2025-10-04T18:58:57 1:02:25 1-00:00:00 hai00530126 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:55:57 1:05:25 1-00:00:00 hai00530125 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:44:27 1:16:55 1-00:00:00 hai00130124 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:43:27 1:17:55 1-00:00:00 hai00130123 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:42:27 1:18:55 1-00:00:00 hai00130122 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:38:27 1:22:55 1-00:00:00 hai001Every 2.0s: squeuehai-login2.haicore.berlin: Sat Oct 4 20:01:22 2025JOBIDUSER PARTITION NODES CPUS STSUBMIT_TIMESTART_TIME TIME TIME_LIMIT NODELIST(REASON)30111 xiao.liu interacti 1 32 PD 2025-10-04T16:57:05 2025-10-05T01:48:31 0:00 23:59:00 (Resources)30142 xiao.liu interacti 1 64 PD 2025-10-04T19:45:19N/A 0:00 23:59:00 (Priority)30129 xiao.liu interacti 1 64 PD 2025-10-04T18:55:30N/A 0:00 23:59:00 (Priority)30027 xiao.liu interacti 1 128 R 2025-10-04T05:54:07 2025-10-04T19:13:38 47:44 23:59:00 hai00330028 xiao.liu interacti 1 128 R 2025-10-04T05:55:36 2025-10-04T05:55:36 14:05:46 23:59:00 hai00630026 xiao.liu interacti 1 128 R 2025-10-04T01:48:32 2025-10-04T05:54:06 14:07:16 23:59:00 hai00830025 xiao.liu interacti 1 64 R 2025-10-04T01:47:27 2025-10-04T01:47:56 18:13:26 23:59:00 hai00430178 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30177 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30176 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30175 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30174 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30173 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30172 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30171 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30170 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30169 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30168 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30167 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30166 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30165 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30164 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30163 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30162 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30161 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30160 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30159 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30158 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30157 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30156 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30155 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30154 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30153 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30152 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30151 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30150 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30149 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30148 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30147 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30146 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30145 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30144 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30143 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30141 franz.sram standard 1 8 PD 2025-10-04T19:18:33N/A 0:00 1-00:00:00 (Priority)30140 franz.sram standard 1 8 PD 2025-10-04T19:18:33N/A 0:00 1-00:00:00 (Priority)30139 franz.sram standard 1 8 PD 2025-10-04T19:18:33N/A 0:00 1-00:00:00 (Priority)30138 franz.sram standard 1 8 PD 2025-10-04T19:18:33N/A 0:00 1-00:00:00 (Priority)30137 franz.sram standard 1 8 PD 2025-10-04T19:18:33N/A 0:00 1-00:00:00 (Nodes required for job are DOWN, DRAINED or reserved for jobs in higher priority partitions)30136 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:45:35 15:47 1-00:00:00 hai00430135 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:23:13 38:09 1-00:00:00 hai00530131 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:18:34 42:48 1-00:00:00 hai00530132 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:18:34 42:48 1-00:00:00 hai00430133 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:18:34 42:48 1-00:00:00 hai00530134 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:18:34 42:48 1-00:00:00 hai00630130 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:18:33 42:49 1-00:00:00 hai00530127 franz.sram standard 1 16 R 2025-10-04T18:54:08 2025-10-04T18:58:57 1:02:25 1-00:00:00 hai00530126 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:55:57 1:05:25 1-00:00:00 hai00530125 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:44:27 1:16:55 1-00:00:00 hai00130124 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:43:27 1:17:55 1-00:00:00 hai00130123 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:42:27 1:18:55 1-00:00:00 hai00130122 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:38:27 1:22:55 1-00:00:00 hai00130117 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:35:27 1:25:55 1-00:00:00 hai001Every 2.0s: squeuehai-login2.haicore.berlin: Sat Oct 4 20:01:23 2025JOBIDUSER PARTITION NODES CPUS STSUBMIT_TIMESTART_TIME TIME TIME_LIMIT NODELIST(REASON)30111 xiao.liu interacti 1 32 PD 2025-10-04T16:57:05 2025-10-05T01:48:31 0:00 23:59:00 (Resources)30142 xiao.liu interacti 1 64 PD 2025-10-04T19:45:19N/A 0:00 23:59:00 (Priority)30129 xiao.liu interacti 1 64 PD 2025-10-04T18:55:30N/A 0:00 23:59:00 (Priority)30027 xiao.liu interacti 1 128 R 2025-10-04T05:54:07 2025-10-04T19:13:38 47:45 23:59:00 hai00330028 xiao.liu interacti 1 128 R 2025-10-04T05:55:36 2025-10-04T05:55:36 14:05:47 23:59:00 hai00630026 xiao.liu interacti 1 128 R 2025-10-04T01:48:32 2025-10-04T05:54:06 14:07:17 23:59:00 hai00830025 xiao.liu interacti 1 64 R 2025-10-04T01:47:27 2025-10-04T01:47:56 18:13:27 23:59:00 hai00430178 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30177 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30176 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30175 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30174 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30173 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30172 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30171 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30170 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30169 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30168 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30167 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30166 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30165 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30164 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30163 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30162 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30161 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30160 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30159 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30158 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30157 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30156 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30155 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30154 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30153 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30152 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30151 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30150 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30149 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30148 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30147 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30146 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30145 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30144 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30143 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30141 franz.sram standard 1 8 PD 2025-10-04T19:18:33N/A 0:00 1-00:00:00 (Priority)30140 franz.sram standard 1 8 PD 2025-10-04T19:18:33N/A 0:00 1-00:00:00 (Priority)30139 franz.sram standard 1 8 PD 2025-10-04T19:18:33N/A 0:00 1-00:00:00 (Priority)30138 franz.sram standard 1 8 PD 2025-10-04T19:18:33N/A 0:00 1-00:00:00 (Priority)30137 franz.sram standard 1 8 PD 2025-10-04T19:18:33N/A 0:00 1-00:00:00 (Nodes required for job are DOWN, DRAINED or reserved for jobs in higher priority partitions)30136 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:45:35 15:48 1-00:00:00 hai00430135 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:23:13 38:10 1-00:00:00 hai00530131 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:18:34 42:49 1-00:00:00 hai00530132 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:18:34 42:49 1-00:00:00 hai00430133 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:18:34 42:49 1-00:00:00 hai00530134 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:18:34 42:49 1-00:00:00 hai00630130 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:18:33 42:50 1-00:00:00 hai00530127 franz.sram standard 1 16 R 2025-10-04T18:54:08 2025-10-04T18:58:57 1:02:26 1-00:00:00 hai00530126 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:55:57 1:05:26 1-00:00:00 hai00530125 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:44:27 1:16:56 1-00:00:00 hai00130124 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:43:27 1:17:56 1-00:00:00 hai00130123 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:42:27 1:18:56 1-00:00:00 hai00130122 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:38:27 1:22:56 1-00:00:00 hai00130117 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:35:27 1:25:56 1-00:00:00 hai00130118 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:35:27 1:25:56 1-00:00:00 hai004",,terminal_output +3381,6023617,"TERMINAL",0,0,"5799950251515151288888888",,terminal_output +3382,6025625,"TERMINAL",0,0,"79512131243333430307:008:009:003:006:006:00",,terminal_output +3383,6027642,"TERMINAL",0,0,"951333465555622222222",,terminal_output +3384,6029661,"TERMINAL",0,0,"313555687777844444444",,terminal_output +3385,6031654,"TERMINAL",0,0,"3577782099993:0066666666",,terminal_output +3386,6033665,"TERMINAL",0,0,"579996:0023:013:013:013:01288888888",,terminal_output +3387,6035670,"TERMINAL",0,0,"796:01314124333344040101010101010",,terminal_output +3388,6037683,"TERMINAL",0,0,"98:01333465555622222222",,terminal_output +3389,6039689,"TERMINAL",0,0,"413555687777844444444",,terminal_output +3390,6041699,"TERMINAL",0,0,"3577783099991066666666",,terminal_output +3391,6043717,"TERMINAL",0,0,"5799910211111111288888888",,terminal_output +3392,6045717,"TERMINAL",0,0,"7911415124333345050202020202020",,terminal_output +3393,6047726,"TERMINAL",0,0,"911333465555622222222",,terminal_output +3394,6049745,"TERMINAL",0,0,"513555687777844444444",,terminal_output +3395,6051746,"TERMINAL",0,0,"3577784099992066666666",,terminal_output +3396,6053757,"TERMINAL",0,0,"5799920221212121288888888",,terminal_output +3397,6055804,"TERMINAL",0,0,"7921514:0124333343:006:00303030303030",,terminal_output +3398,6057776,"TERMINAL",0,0,"921333465555622222222",,terminal_output +3399,6059783,"TERMINAL",0,0,"2:013555687777844444444",,terminal_output +3400,6061793,"TERMINAL",0,0,"3577785099993066666666",,terminal_output +3401,6063801,"TERMINAL",0,0,"5799930231313131288888888",,terminal_output +3402,6065816,"TERMINAL",0,0,"79318:011124333341010404040404040",,terminal_output +3403,6067842,"TERMINAL",0,0,"931333465555622222222",,terminal_output +3404,6069830,"TERMINAL",0,0,"113555687777844444444",,terminal_output +3405,6071842,"TERMINAL",0,0,"3577789:0099994066666666",,terminal_output +3406,6073849,"TERMINAL",0,0,"5799940241414141288888888",,terminal_output +3407,6075861,"TERMINAL",0,0,"7941112124333342020505050505050",,terminal_output +3408,6077871,"TERMINAL",0,0,"941333465555622222222",,terminal_output +3409,6079880,"TERMINAL",0,0,"213555687777844444444",,terminal_output +3410,6081889,"TERMINAL",0,0,"3577781099995066666666",,terminal_output +3411,6083898,"TERMINAL",0,0,"5799950251515151288888888",,terminal_output +3412,6085908,"TERMINAL",0,0,"79512131243333430308:009:0020:004:007:007:00",,terminal_output +3413,6087918,"TERMINAL",0,0,"951333465555622222222",,terminal_output +3414,6089999,"TERMINAL",0,0,"313555687777844444444",,terminal_output +3415,6091941,"TERMINAL",0,0,"3577782099994:0066666666",,terminal_output +3416,6093950,"TERMINAL",0,0,"579997:0024:014:014:014:01288888888",,terminal_output +3417,6096050,"TERMINAL",0,0,"797:01314124333344040101010101010",,terminal_output +3418,6097968,"TERMINAL",0,0,"99:01333465555622222222",,terminal_output +3419,6099978,"TERMINAL",0,0,"413555687777844444444",,terminal_output +3420,6102074,"TERMINAL",0,0,"3577783099991066666666",,terminal_output +3421,6103997,"TERMINAL",0,0,"5799910211111111288888888",,terminal_output +3422,6106006,"TERMINAL",0,0,"7911415124333345050202020202020",,terminal_output +3423,6108023,"TERMINAL",0,0,"911333465555622222222",,terminal_output +3424,6110041,"TERMINAL",0,0,"513555687777844444444",,terminal_output +3425,6112071,"TERMINAL",0,0,"3577784099992066666666",,terminal_output +3426,6114041,"TERMINAL",0,0,"5799920221212121288888888",,terminal_output +3427,6116051,"TERMINAL",0,0,"7921515:0124333344:007:00303030303030",,terminal_output +3428,6118062,"TERMINAL",0,0,"921333465555622222222",,terminal_output +3429,6120107,"TERMINAL",0,0,"3:013555687777844444444",,terminal_output +3430,6122084,"TERMINAL",0,0,"3577785099993066666666",,terminal_output +3431,6124096,"TERMINAL",0,0,"5799930231313131288888888",,terminal_output +3432,6126103,"TERMINAL",0,0,"79319:011124333341010404040404040",,terminal_output +3433,6128108,"TERMINAL",0,0,"931333465555622222222",,terminal_output +3434,6130129,"TERMINAL",0,0,"113555687777844444444",,terminal_output +3435,6132127,"TERMINAL",0,0,"35777840:0099994066666666",,terminal_output +3436,6134138,"TERMINAL",0,0,"5799940241414141288888888",,terminal_output +3437,6136147,"TERMINAL",0,0,"7941112124333342020505050505050",,terminal_output +3438,6138155,"TERMINAL",0,0,"941333465555622222222",,terminal_output +3439,6140172,"TERMINAL",0,0,"213555687777844444444",,terminal_output +3440,6142172,"TERMINAL",0,0,"3577781099995066666666",,terminal_output +3441,6144185,"TERMINAL",0,0,"5799950251515151288888888",,terminal_output +3442,6146195,"TERMINAL",0,0,"79512131243333430309:0020:001:005:008:008:00",,terminal_output +3443,6148203,"TERMINAL",0,0,"951333465555622222222",,terminal_output +3444,6150218,"TERMINAL",0,0,"313555687777844444444",,terminal_output +3445,6152224,"TERMINAL",0,0,"3577782099995:0066666666",,terminal_output +3446,6154240,"TERMINAL",0,0,"579998:0025:015:015:015:01288888888",,terminal_output +3447,6156256,"TERMINAL",0,0,"798:01314124333344040101010101010",,terminal_output +3448,6158253,"TERMINAL",0,0,"950:01333465555622222222",,terminal_output +3449,6160263,"TERMINAL",0,0,"413555687777844444444",,terminal_output +3450,6162271,"TERMINAL",0,0,"3577783099991066666666",,terminal_output +3451,6164346,"TERMINAL",0,0,"5799910211111111288888888",,terminal_output +3452,6166395,"TERMINAL",0,0,"7911415124333345050202020202020",,terminal_output +3453,6168299,"TERMINAL",0,0,"911333465555622222222",,terminal_output +3454,6170387,"TERMINAL",0,0,"513555687777844444444",,terminal_output +3455,6172318,"TERMINAL",0,0,"3577784099992066666666",,terminal_output +3456,6174329,"TERMINAL",0,0,"5799920221212121288888888",,terminal_output +3457,6176337,"TERMINAL",0,0,"7921516:0124333345:008:00303030303030",,terminal_output +3458,6178359,"TERMINAL",0,0,"Every 2.0s: squeuehai-login2.haicore.berlin: Sat Oct 4 20:03:58 2025JOBIDUSER PARTITION NODES CPUS STSUBMIT_TIMESTART_TIME TIME TIME_LIMIT NODELIST(REASON)30111 xiao.liu interacti 1 32 PD 2025-10-04T16:57:05 2025-10-05T01:48:31 0:00 23:59:00 (Resources)30142 xiao.liu interacti 1 64 PD 2025-10-04T19:45:19N/A 0:00 23:59:00 (Priority)30129 xiao.liu interacti 1 64 PD 2025-10-04T18:55:30N/A 0:00 23:59:00 (Priority)30027 xiao.liu interacti 1 128 R 2025-10-04T05:54:07 2025-10-04T19:13:38 50:20 23:59:00 hai00330028 xiao.liu interacti 1 128 R 2025-10-04T05:55:36 2025-10-04T05:55:36 14:08:22 23:59:00 hai00630026 xiao.liu interacti 1 128 R 2025-10-04T01:48:32 2025-10-04T05:54:06 14:09:52 23:59:00 hai00830025 xiao.liu interacti 1 64 R 2025-10-04T01:47:27 2025-10-04T01:47:56 18:16:02 23:59:00 hai00430178 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30177 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30176 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30175 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30174 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30173 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30172 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30171 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30170 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30169 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30168 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30167 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30166 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30165 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30164 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30163 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30162 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30161 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30160 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30159 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30158 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30157 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30156 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30155 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30154 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30153 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30152 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30151 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30150 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30149 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30148 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30147 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30146 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30145 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30144 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30143 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30141 franz.sram standard 1 8 PD 2025-10-04T19:18:33N/A 0:00 1-00:00:00 (Priority)30140 franz.sram standard 1 8 PD 2025-10-04T19:18:33N/A 0:00 1-00:00:00 (Priority)30139 franz.sram standard 1 8 PD 2025-10-04T19:18:33N/A 0:00 1-00:00:00 (Priority)30138 franz.sram standard 1 8 PD 2025-10-04T19:18:33N/A 0:00 1-00:00:00 (Priority)30137 franz.sram standard 1 8 PD 2025-10-04T19:18:33N/A 0:00 1-00:00:00 (Nodes required for job are DOWN, DRAINED or reserved for jobs in higher priority partitions)30136 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:45:35 18:23 1-00:00:00 hai00430135 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:23:13 40:45 1-00:00:00 hai00530131 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:18:34 45:24 1-00:00:00 hai00530132 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:18:34 45:24 1-00:00:00 hai00430133 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:18:34 45:24 1-00:00:00 hai00530134 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:18:34 45:24 1-00:00:00 hai00630130 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:18:33 45:25 1-00:00:00 hai00530127 franz.sram standard 1 16 R 2025-10-04T18:54:08 2025-10-04T18:58:57 1:05:01 1-00:00:00 hai00530126 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:55:57 1:08:01 1-00:00:00 hai00530125 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:44:27 1:19:31 1-00:00:00 hai00130124 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:43:27 1:20:31 1-00:00:00 hai00130123 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:42:27 1:21:31 1-00:00:00 hai001Every 2.0s: squeuehai-login2.haicore.berlin: Sat Oct 4 20:03:59 2025JOBIDUSER PARTITION NODES CPUS STSUBMIT_TIMESTART_TIME TIME TIME_LIMIT NODELIST(REASON)30111 xiao.liu interacti 1 32 PD 2025-10-04T16:57:05 2025-10-05T01:48:31 0:00 23:59:00 (Resources)30142 xiao.liu interacti 1 64 PD 2025-10-04T19:45:19N/A 0:00 23:59:00 (Priority)30129 xiao.liu interacti 1 64 PD 2025-10-04T18:55:30N/A 0:00 23:59:00 (Priority)30027 xiao.liu interacti 1 128 R 2025-10-04T05:54:07 2025-10-04T19:13:38 50:21 23:59:00 hai00330028 xiao.liu interacti 1 128 R 2025-10-04T05:55:36 2025-10-04T05:55:36 14:08:23 23:59:00 hai00630026 xiao.liu interacti 1 128 R 2025-10-04T01:48:32 2025-10-04T05:54:06 14:09:53 23:59:00 hai00830025 xiao.liu interacti 1 64 R 2025-10-04T01:47:27 2025-10-04T01:47:56 18:16:03 23:59:00 hai00430178 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30177 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30176 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30175 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30174 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30173 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30172 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30171 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30170 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30169 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30168 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30167 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30166 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30165 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30164 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30163 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30162 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30161 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30160 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30159 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30158 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30157 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30156 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30155 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30154 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30153 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30152 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30151 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30150 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30149 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30148 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30147 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30146 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30145 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30144 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30143 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30141 franz.sram standard 1 8 PD 2025-10-04T19:18:33N/A 0:00 1-00:00:00 (Priority)30140 franz.sram standard 1 8 PD 2025-10-04T19:18:33N/A 0:00 1-00:00:00 (Priority)30139 franz.sram standard 1 8 PD 2025-10-04T19:18:33N/A 0:00 1-00:00:00 (Priority)30138 franz.sram standard 1 8 PD 2025-10-04T19:18:33N/A 0:00 1-00:00:00 (Priority)30137 franz.sram standard 1 8 PD 2025-10-04T19:18:33N/A 0:00 1-00:00:00 (Nodes required for job are DOWN, DRAINED or reserved for jobs in higher priority partitions)30136 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:45:35 18:24 1-00:00:00 hai00430135 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:23:13 40:46 1-00:00:00 hai00530131 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:18:34 45:25 1-00:00:00 hai00530132 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:18:34 45:25 1-00:00:00 hai00430133 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:18:34 45:25 1-00:00:00 hai00530134 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:18:34 45:25 1-00:00:00 hai00630130 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:18:33 45:26 1-00:00:00 hai00530127 franz.sram standard 1 16 R 2025-10-04T18:54:08 2025-10-04T18:58:57 1:05:02 1-00:00:00 hai00530126 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:55:57 1:08:02 1-00:00:00 hai00530125 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:44:27 1:19:32 1-00:00:00 hai00130124 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:43:27 1:20:32 1-00:00:00 hai001",,terminal_output +3459,6180008,"TERMINAL",0,0,"4:01355568777784444",,terminal_output +3460,6182029,"TERMINAL",0,0,"357778509999306666",,terminal_output +3461,6184028,"TERMINAL",0,0,"579993023131313128888",,terminal_output +3462,6185950,"TERMINAL",0,0,"Every 2.0s: squeuehai-login2.haicore.berlin: Sat Oct 4 20:04:07 2025JOBIDUSER PARTITION NODES CPUS STSUBMIT_TIMESTART_TIME TIME TIME_LIMIT NODELIST(REASON)30111 xiao.liu interacti 1 32 PD 2025-10-04T16:57:05 2025-10-05T01:48:31 0:00 23:59:00 (Resources)30142 xiao.liu interacti 1 64 PD 2025-10-04T19:45:19N/A 0:00 23:59:00 (Priority)30129 xiao.liu interacti 1 64 PD 2025-10-04T18:55:30N/A 0:00 23:59:00 (Priority)30027 xiao.liu interacti 1 128 R 2025-10-04T05:54:07 2025-10-04T19:13:38 50:29 23:59:00 hai00330028 xiao.liu interacti 1 128 R 2025-10-04T05:55:36 2025-10-04T05:55:36 14:08:31 23:59:00 hai00630026 xiao.liu interacti 1 128 R 2025-10-04T01:48:32 2025-10-04T05:54:06 14:10:01 23:59:00 hai00830025 xiao.liu interacti 1 64 R 2025-10-04T01:47:27 2025-10-04T01:47:56 18:16:11 23:59:00 hai00430178 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30177 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30176 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30175 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30174 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30173 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30172 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30171 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30170 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30169 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30168 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30167 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30166 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30165 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30164 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30163 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30162 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30161 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30160 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30159 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30158 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30157 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30156 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30155 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30154 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30153 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30152 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30151 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30150 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30149 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30148 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30147 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30146 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30145 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30144 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30143 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30141 franz.sram standard 1 8 PD 2025-10-04T19:18:33N/A 0:00 1-00:00:00 (Priority)30140 franz.sram standard 1 8 PD 2025-10-04T19:18:33N/A 0:00 1-00:00:00 (Priority)30139 franz.sram standard 1 8 PD 2025-10-04T19:18:33N/A 0:00 1-00:00:00 (Priority)30138 franz.sram standard 1 8 PD 2025-10-04T19:18:33N/A 0:00 1-00:00:00 (Priority)30137 franz.sram standard 1 8 PD 2025-10-04T19:18:33N/A 0:00 1-00:00:00 (Nodes required for job are DOWN, DRAINED or reserved for jobs in higher priority partitions)30136 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:45:35 18:32 1-00:00:00 hai00430135 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:23:13 40:54 1-00:00:00 hai00530131 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:18:34 45:33 1-00:00:00 hai00530132 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:18:34 45:33 1-00:00:00 hai00430133 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:18:34 45:33 1-00:00:00 hai00530134 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:18:34 45:33 1-00:00:00 hai00630130 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:18:33 45:34 1-00:00:00 hai00530127 franz.sram standard 1 16 R 2025-10-04T18:54:08 2025-10-04T18:58:57 1:05:10 1-00:00:00 hai00530126 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:55:57 1:08:10 1-00:00:00 hai00530125 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:44:27 1:19:40 1-00:00:00 hai00130124 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:43:27 1:20:40 1-00:00:00 hai00130123 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:42:27 1:21:40 1-00:00:00 hai001",,terminal_output +3463,6186692,"TERMINAL",0,0,"Every 2.0s: squeuehai-login2.haicore.berlin: Sat Oct 4 20:04:08 2025JOBIDUSER PARTITION NODES CPUS STSUBMIT_TIMESTART_TIME TIME TIME_LIMIT NODELIST(REASON)30111 xiao.liu interacti 1 32 PD 2025-10-04T16:57:05 2025-10-05T01:48:31 0:00 23:59:00 (Resources)30142 xiao.liu interacti 1 64 PD 2025-10-04T19:45:19N/A 0:00 23:59:00 (Priority)30129 xiao.liu interacti 1 64 PD 2025-10-04T18:55:30N/A 0:00 23:59:00 (Priority)30027 xiao.liu interacti 1 128 R 2025-10-04T05:54:07 2025-10-04T19:13:38 50:30 23:59:00 hai00330028 xiao.liu interacti 1 128 R 2025-10-04T05:55:36 2025-10-04T05:55:36 14:08:32 23:59:00 hai00630026 xiao.liu interacti 1 128 R 2025-10-04T01:48:32 2025-10-04T05:54:06 14:10:02 23:59:00 hai00830025 xiao.liu interacti 1 64 R 2025-10-04T01:47:27 2025-10-04T01:47:56 18:16:12 23:59:00 hai00430178 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30177 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30176 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30175 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30174 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30173 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30172 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30171 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30170 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30169 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30168 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30167 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30166 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30165 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30164 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30163 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30162 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30161 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30160 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30159 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30158 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30157 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30156 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30155 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30154 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30153 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30152 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30151 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30150 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30149 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30148 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30147 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30146 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30145 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30144 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30143 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30141 franz.sram standard 1 8 PD 2025-10-04T19:18:33N/A 0:00 1-00:00:00 (Priority)30140 franz.sram standard 1 8 PD 2025-10-04T19:18:33N/A 0:00 1-00:00:00 (Priority)30139 franz.sram standard 1 8 PD 2025-10-04T19:18:33N/A 0:00 1-00:00:00 (Priority)30138 franz.sram standard 1 8 PD 2025-10-04T19:18:33N/A 0:00 1-00:00:00 (Priority)30137 franz.sram standard 1 8 PD 2025-10-04T19:18:33N/A 0:00 1-00:00:00 (Nodes required for job are DOWN, DRAINED or reserved for jobs in higher priority partitions)30136 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:45:35 18:33 1-00:00:00 hai00430135 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:23:13 40:55 1-00:00:00 hai00530131 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:18:34 45:34 1-00:00:00 hai00530132 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:18:34 45:34 1-00:00:00 hai00430133 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:18:34 45:34 1-00:00:00 hai00530134 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:18:34 45:34 1-00:00:00 hai00630130 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:18:33 45:35 1-00:00:00 hai00530127 franz.sram standard 1 16 R 2025-10-04T18:54:08 2025-10-04T18:58:57 1:05:11 1-00:00:00 hai00530126 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:55:57 1:08:11 1-00:00:00 hai00530125 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:44:27 1:19:41 1-00:00:00 hai00130124 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:43:27 1:20:41 1-00:00:00 hai001",,terminal_output +3464,6188705,"TERMINAL",0,0,"10244457666673333",,terminal_output +3465,6189991,"TERMINAL",0,0,"Every 2.0s: squeuehai-login2.haicore.berlin: Sat Oct 4 20:04:11 2025JOBIDUSER PARTITION NODES CPUS STSUBMIT_TIMESTART_TIME TIME TIME_LIMIT NODELIST(REASON)30111 xiao.liu interacti 1 32 PD 2025-10-04T16:57:05 2025-10-05T01:48:31 0:00 23:59:00 (Resources)30142 xiao.liu interacti 1 64 PD 2025-10-04T19:45:19N/A 0:00 23:59:00 (Priority)30129 xiao.liu interacti 1 64 PD 2025-10-04T18:55:30N/A 0:00 23:59:00 (Priority)30027 xiao.liu interacti 1 128 R 2025-10-04T05:54:07 2025-10-04T19:13:38 50:33 23:59:00 hai00330028 xiao.liu interacti 1 128 R 2025-10-04T05:55:36 2025-10-04T05:55:36 14:08:35 23:59:00 hai00630026 xiao.liu interacti 1 128 R 2025-10-04T01:48:32 2025-10-04T05:54:06 14:10:05 23:59:00 hai00830025 xiao.liu interacti 1 64 R 2025-10-04T01:47:27 2025-10-04T01:47:56 18:16:15 23:59:00 hai00430178 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30177 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30176 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30175 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30174 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30173 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30172 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30171 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30170 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30169 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30168 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30167 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30166 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30165 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30164 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30163 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30162 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30161 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30160 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30159 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30158 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30157 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30156 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30155 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30154 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30153 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30152 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30151 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30150 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30149 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30148 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30147 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30146 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30145 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30144 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30143 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30141 franz.sram standard 1 8 PD 2025-10-04T19:18:33N/A 0:00 1-00:00:00 (Priority)30140 franz.sram standard 1 8 PD 2025-10-04T19:18:33N/A 0:00 1-00:00:00 (Priority)30139 franz.sram standard 1 8 PD 2025-10-04T19:18:33N/A 0:00 1-00:00:00 (Priority)30138 franz.sram standard 1 8 PD 2025-10-04T19:18:33N/A 0:00 1-00:00:00 (Priority)30137 franz.sram standard 1 8 PD 2025-10-04T19:18:33N/A 0:00 1-00:00:00 (Nodes required for job are DOWN, DRAINED or reserved for jobs in higher priority partitions)30136 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:45:35 18:36 1-00:00:00 hai00430135 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:23:13 40:58 1-00:00:00 hai00530131 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:18:34 45:37 1-00:00:00 hai00530132 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:18:34 45:37 1-00:00:00 hai00430133 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:18:34 45:37 1-00:00:00 hai00530134 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:18:34 45:37 1-00:00:00 hai00630130 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:18:33 45:38 1-00:00:00 hai00530127 franz.sram standard 1 16 R 2025-10-04T18:54:08 2025-10-04T18:58:57 1:05:14 1-00:00:00 hai00530126 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:55:57 1:08:14 1-00:00:00 hai00530125 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:44:27 1:19:44 1-00:00:00 hai00130124 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:43:27 1:20:44 1-00:00:00 hai001Every 2.0s: squeuehai-login2.haicore.berlin: Sat Oct 4 20:04:11 2025JOBIDUSER PARTITION NODES CPUS STSUBMIT_TIMESTART_TIME TIME TIME_LIMIT NODELIST(REASON)30111 xiao.liu interacti 1 32 PD 2025-10-04T16:57:05 2025-10-05T01:48:31 0:00 23:59:00 (Resources)30142 xiao.liu interacti 1 64 PD 2025-10-04T19:45:19N/A 0:00 23:59:00 (Priority)30129 xiao.liu interacti 1 64 PD 2025-10-04T18:55:30N/A 0:00 23:59:00 (Priority)30027 xiao.liu interacti 1 128 R 2025-10-04T05:54:07 2025-10-04T19:13:38 50:33 23:59:00 hai00330028 xiao.liu interacti 1 128 R 2025-10-04T05:55:36 2025-10-04T05:55:36 14:08:35 23:59:00 hai00630026 xiao.liu interacti 1 128 R 2025-10-04T01:48:32 2025-10-04T05:54:06 14:10:05 23:59:00 hai00830025 xiao.liu interacti 1 64 R 2025-10-04T01:47:27 2025-10-04T01:47:56 18:16:15 23:59:00 hai00430178 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30177 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30176 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30175 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30174 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30173 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30172 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30171 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30170 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30169 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30168 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30167 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30166 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30165 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30164 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30163 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30162 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30161 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30160 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30159 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30158 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30157 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30156 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30155 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30154 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30153 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30152 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30151 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30150 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30149 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30148 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30147 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30146 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30145 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30144 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30143 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30141 franz.sram standard 1 8 PD 2025-10-04T19:18:33N/A 0:00 1-00:00:00 (Priority)30140 franz.sram standard 1 8 PD 2025-10-04T19:18:33N/A 0:00 1-00:00:00 (Priority)30139 franz.sram standard 1 8 PD 2025-10-04T19:18:33N/A 0:00 1-00:00:00 (Priority)30138 franz.sram standard 1 8 PD 2025-10-04T19:18:33N/A 0:00 1-00:00:00 (Priority)30137 franz.sram standard 1 8 PD 2025-10-04T19:18:33N/A 0:00 1-00:00:00 (Nodes required for job are DOWN, DRAINED or reserved for jobs in higher priority partitions)30136 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:45:35 18:36 1-00:00:00 hai00430135 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:23:13 40:58 1-00:00:00 hai00530131 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:18:34 45:37 1-00:00:00 hai00530132 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:18:34 45:37 1-00:00:00 hai00430133 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:18:34 45:37 1-00:00:00 hai00530134 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:18:34 45:37 1-00:00:00 hai00630130 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:18:33 45:38 1-00:00:00 hai00530127 franz.sram standard 1 16 R 2025-10-04T18:54:08 2025-10-04T18:58:57 1:05:14 1-00:00:00 hai00530126 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:55:57 1:08:14 1-00:00:00 hai00530125 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:44:27 1:19:44 1-00:00:00 hai00130124 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:43:27 1:20:44 1-00:00:00 hai00130123 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:42:27 1:21:44 1-00:00:00 hai00130122 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:38:27 1:25:44 1-00:00:00 hai00130117 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:35:27 1:28:44 1-00:00:00 hai001Every 2.0s: squeuehai-login2.haicore.berlin: Sat Oct 4 20:04:11 2025JOBIDUSER PARTITION NODES CPUS STSUBMIT_TIMESTART_TIME TIME TIME_LIMIT NODELIST(REASON)30111 xiao.liu interacti 1 32 PD 2025-10-04T16:57:05 2025-10-05T01:48:31 0:00 23:59:00 (Resources)30142 xiao.liu interacti 1 64 PD 2025-10-04T19:45:19N/A 0:00 23:59:00 (Priority)30129 xiao.liu interacti 1 64 PD 2025-10-04T18:55:30N/A 0:00 23:59:00 (Priority)30027 xiao.liu interacti 1 128 R 2025-10-04T05:54:07 2025-10-04T19:13:38 50:33 23:59:00 hai00330028 xiao.liu interacti 1 128 R 2025-10-04T05:55:36 2025-10-04T05:55:36 14:08:35 23:59:00 hai00630026 xiao.liu interacti 1 128 R 2025-10-04T01:48:32 2025-10-04T05:54:06 14:10:05 23:59:00 hai00830025 xiao.liu interacti 1 64 R 2025-10-04T01:47:27 2025-10-04T01:47:56 18:16:15 23:59:00 hai00430178 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30177 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30176 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30175 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30174 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30173 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30172 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30171 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30170 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30169 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30168 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30167 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30166 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30165 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30164 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30163 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30162 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30161 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30160 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30159 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30158 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30157 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30156 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30155 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30154 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30153 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30152 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30151 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30150 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30149 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30148 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30147 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30146 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30145 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30144 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30143 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30141 franz.sram standard 1 8 PD 2025-10-04T19:18:33N/A 0:00 1-00:00:00 (Priority)30140 franz.sram standard 1 8 PD 2025-10-04T19:18:33N/A 0:00 1-00:00:00 (Priority)30139 franz.sram standard 1 8 PD 2025-10-04T19:18:33N/A 0:00 1-00:00:00 (Priority)30138 franz.sram standard 1 8 PD 2025-10-04T19:18:33N/A 0:00 1-00:00:00 (Priority)30137 franz.sram standard 1 8 PD 2025-10-04T19:18:33N/A 0:00 1-00:00:00 (Nodes required for job are DOWN, DRAINED or reserved for jobs in higher priority partitions)30136 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:45:35 18:36 1-00:00:00 hai00430135 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:23:13 40:58 1-00:00:00 hai00530131 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:18:34 45:37 1-00:00:00 hai00530132 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:18:34 45:37 1-00:00:00 hai00430133 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:18:34 45:37 1-00:00:00 hai00530134 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:18:34 45:37 1-00:00:00 hai00630130 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:18:33 45:38 1-00:00:00 hai00530127 franz.sram standard 1 16 R 2025-10-04T18:54:08 2025-10-04T18:58:57 1:05:14 1-00:00:00 hai00530126 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:55:57 1:08:14 1-00:00:00 hai00530125 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:44:27 1:19:44 1-00:00:00 hai00130124 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:43:27 1:20:44 1-00:00:00 hai00130123 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:42:27 1:21:44 1-00:00:00 hai00130122 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:38:27 1:25:44 1-00:00:00 hai00130117 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:35:27 1:28:44 1-00:00:00 hai00130118 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:35:27 1:28:44 1-00:00:00 hai00430119 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:35:27 1:28:44 1-00:00:00 hai00530120 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:35:27 1:28:44 1-00:00:00 hai00630121 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:35:27 1:28:44 1-00:00:00 hai00629972 alfred.ngu standard 1 16 R 2025-10-04T16:35:34 2025-10-04T17:57:27 2:06:44 1-00:00:00 hai00130101 franz.sram standard 1 16 R 2025-10-04T14:55:02 2025-10-04T17:47:57 2:16:14 1-00:00:00 hai00430032 alfred.ngu standard 1 16 R 2025-10-04T11:53:39 2025-10-04T11:53:56 8:10:15 1-00:00:00 hai00130031 alfred.ngu standard 1 16 R 2025-10-04T11:50:30 2025-10-04T11:50:56 8:13:15 1-00:00:00 hai00529993 nishant.ku standard 2 384 R 2025-10-03T18:29:18 2025-10-04T01:48:31 18:15:40 1-00:00:00 hai[002,007]30008 alfred.ngu standard 1 16 R 2025-10-03T22:58:50 2025-10-03T22:58:56 21:05:15 1-00:00:00 hai00630009 alfred.ngu standard 1 16 R 2025-10-03T22:58:54 2025-10-03T22:58:56 21:05:15 1-00:00:00 hai001",,terminal_output +3466,6190051,"TERMINAL",0,0,"Every 2.0s: squeuehai-login2.haicore.berlin: Sat Oct 4 20:04:11 2025JOBIDUSER PARTITION NODES CPUS STSUBMIT_TIMESTART_TIME TIME TIME_LIMIT NODELIST(REASON)30111 xiao.liu interacti 1 32 PD 2025-10-04T16:57:05 2025-10-05T01:48:31 0:00 23:59:00 (Resources)30142 xiao.liu interacti 1 64 PD 2025-10-04T19:45:19N/A 0:00 23:59:00 (Priority)30129 xiao.liu interacti 1 64 PD 2025-10-04T18:55:30N/A 0:00 23:59:00 (Priority)30027 xiao.liu interacti 1 128 R 2025-10-04T05:54:07 2025-10-04T19:13:38 50:33 23:59:00 hai00330028 xiao.liu interacti 1 128 R 2025-10-04T05:55:36 2025-10-04T05:55:36 14:08:35 23:59:00 hai00630026 xiao.liu interacti 1 128 R 2025-10-04T01:48:32 2025-10-04T05:54:06 14:10:05 23:59:00 hai00830025 xiao.liu interacti 1 64 R 2025-10-04T01:47:27 2025-10-04T01:47:56 18:16:15 23:59:00 hai00430178 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30177 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30176 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30175 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30174 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30173 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30172 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30171 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30170 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30169 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30168 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30167 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30166 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30165 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30164 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30163 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30162 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30161 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30160 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30159 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30158 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30157 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30156 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30155 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30154 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30153 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30152 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30151 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30150 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30149 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30148 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30147 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30146 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30145 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30144 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30143 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30141 franz.sram standard 1 8 PD 2025-10-04T19:18:33N/A 0:00 1-00:00:00 (Priority)30140 franz.sram standard 1 8 PD 2025-10-04T19:18:33N/A 0:00 1-00:00:00 (Priority)30139 franz.sram standard 1 8 PD 2025-10-04T19:18:33N/A 0:00 1-00:00:00 (Priority)30138 franz.sram standard 1 8 PD 2025-10-04T19:18:33N/A 0:00 1-00:00:00 (Priority)30137 franz.sram standard 1 8 PD 2025-10-04T19:18:33N/A 0:00 1-00:00:00 (Nodes required for job are DOWN, DRAINED or reserved for jobs in higher priority partitions)30136 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:45:35 18:36 1-00:00:00 hai00430135 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:23:13 40:58 1-00:00:00 hai00530131 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:18:34 45:37 1-00:00:00 hai00530132 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:18:34 45:37 1-00:00:00 hai00430133 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:18:34 45:37 1-00:00:00 hai00530134 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:18:34 45:37 1-00:00:00 hai00630130 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:18:33 45:38 1-00:00:00 hai00530127 franz.sram standard 1 16 R 2025-10-04T18:54:08 2025-10-04T18:58:57 1:05:14 1-00:00:00 hai00530126 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:55:57 1:08:14 1-00:00:00 hai00530125 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:44:27 1:19:44 1-00:00:00 hai00130124 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:43:27 1:20:44 1-00:00:00 hai00130123 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:42:27 1:21:44 1-00:00:00 hai00130122 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:38:27 1:25:44 1-00:00:00 hai00130117 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:35:27 1:28:44 1-00:00:00 hai00130118 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:35:27 1:28:44 1-00:00:00 hai00430119 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:35:27 1:28:44 1-00:00:00 hai00530120 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:35:27 1:28:44 1-00:00:00 hai00630121 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:35:27 1:28:44 1-00:00:00 hai00629972 alfred.ngu standard 1 16 R 2025-10-04T16:35:34 2025-10-04T17:57:27 2:06:44 1-00:00:00 hai00130101 franz.sram standard 1 16 R 2025-10-04T14:55:02 2025-10-04T17:47:57 2:16:14 1-00:00:00 hai00430032 alfred.ngu standard 1 16 R 2025-10-04T11:53:39 2025-10-04T11:53:56 8:10:15 1-00:00:00 hai00130031 alfred.ngu standard 1 16 R 2025-10-04T11:50:30 2025-10-04T11:50:56 8:13:15 1-00:00:00 hai00529993 nishant.ku standard 2 384 R 2025-10-03T18:29:18 2025-10-04T01:48:31 18:15:40 1-00:00:00 hai[002,007]30008 alfred.ngu standard 1 16 R 2025-10-03T22:58:50 2025-10-03T22:58:56 21:05:15 1-00:00:00 hai00630009 alfred.ngu standard 1 16 R 2025-10-03T22:58:54 2025-10-03T22:58:56 21:05:15 1-00:00:00 hai001",,terminal_output +3467,6190146,"TERMINAL",0,0,"Every 2.0s: squeuehai-login2.haicore.berlin: Sat Oct 4 20:04:11 2025JOBIDUSER PARTITION NODES CPUS STSUBMIT_TIMESTART_TIME TIME TIME_LIMIT NODELIST(REASON)30111 xiao.liu interacti 1 32 PD 2025-10-04T16:57:05 2025-10-05T01:48:31 0:00 23:59:00 (Resources)30142 xiao.liu interacti 1 64 PD 2025-10-04T19:45:19N/A 0:00 23:59:00 (Priority)30129 xiao.liu interacti 1 64 PD 2025-10-04T18:55:30N/A 0:00 23:59:00 (Priority)30027 xiao.liu interacti 1 128 R 2025-10-04T05:54:07 2025-10-04T19:13:38 50:33 23:59:00 hai00330028 xiao.liu interacti 1 128 R 2025-10-04T05:55:36 2025-10-04T05:55:36 14:08:35 23:59:00 hai00630026 xiao.liu interacti 1 128 R 2025-10-04T01:48:32 2025-10-04T05:54:06 14:10:05 23:59:00 hai00830025 xiao.liu interacti 1 64 R 2025-10-04T01:47:27 2025-10-04T01:47:56 18:16:15 23:59:00 hai00430178 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30177 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30176 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30175 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30174 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30173 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30172 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30171 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30170 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30169 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30168 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30167 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30166 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30165 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30164 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30163 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30162 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30161 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30160 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30159 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30158 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30157 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30156 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30155 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30154 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30153 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30152 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30151 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30150 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30149 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30148 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30147 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30146 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30145 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30144 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30143 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30141 franz.sram standard 1 8 PD 2025-10-04T19:18:33N/A 0:00 1-00:00:00 (Priority)30140 franz.sram standard 1 8 PD 2025-10-04T19:18:33N/A 0:00 1-00:00:00 (Priority)30139 franz.sram standard 1 8 PD 2025-10-04T19:18:33N/A 0:00 1-00:00:00 (Priority)30138 franz.sram standard 1 8 PD 2025-10-04T19:18:33N/A 0:00 1-00:00:00 (Priority)30137 franz.sram standard 1 8 PD 2025-10-04T19:18:33N/A 0:00 1-00:00:00 (Nodes required for job are DOWN, DRAINED or reserved for jobs in higher priority partitions)30136 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:45:35 18:36 1-00:00:00 hai00430135 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:23:13 40:58 1-00:00:00 hai00530131 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:18:34 45:37 1-00:00:00 hai00530132 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:18:34 45:37 1-00:00:00 hai00430133 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:18:34 45:37 1-00:00:00 hai00530134 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:18:34 45:37 1-00:00:00 hai00630130 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:18:33 45:38 1-00:00:00 hai00530127 franz.sram standard 1 16 R 2025-10-04T18:54:08 2025-10-04T18:58:57 1:05:14 1-00:00:00 hai00530126 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:55:57 1:08:14 1-00:00:00 hai00530125 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:44:27 1:19:44 1-00:00:00 hai00130124 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:43:27 1:20:44 1-00:00:00 hai00130123 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:42:27 1:21:44 1-00:00:00 hai00130122 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:38:27 1:25:44 1-00:00:00 hai00130117 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:35:27 1:28:44 1-00:00:00 hai00130118 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:35:27 1:28:44 1-00:00:00 hai00430119 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:35:27 1:28:44 1-00:00:00 hai00530120 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:35:27 1:28:44 1-00:00:00 hai00630121 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:35:27 1:28:44 1-00:00:00 hai00629972 alfred.ngu standard 1 16 R 2025-10-04T16:35:34 2025-10-04T17:57:27 2:06:44 1-00:00:00 hai00130101 franz.sram standard 1 16 R 2025-10-04T14:55:02 2025-10-04T17:47:57 2:16:14 1-00:00:00 hai00430032 alfred.ngu standard 1 16 R 2025-10-04T11:53:39 2025-10-04T11:53:56 8:10:15 1-00:00:00 hai00130031 alfred.ngu standard 1 16 R 2025-10-04T11:50:30 2025-10-04T11:50:56 8:13:15 1-00:00:00 hai00529993 nishant.ku standard 2 384 R 2025-10-03T18:29:18 2025-10-04T01:48:31 18:15:40 1-00:00:00 hai[002,007]30008 alfred.ngu standard 1 16 R 2025-10-03T22:58:50 2025-10-03T22:58:56 21:05:15 1-00:00:00 hai00630009 alfred.ngu standard 1 16 R 2025-10-03T22:58:54 2025-10-03T22:58:56 21:05:15 1-00:00:00 hai001Every 2.0s: squeuehai-login2.haicore.berlin: Sat Oct 4 20:04:11 2025JOBIDUSER PARTITION NODES CPUS STSUBMIT_TIMESTART_TIME TIME TIME_LIMIT NODELIST(REASON)30111 xiao.liu interacti 1 32 PD 2025-10-04T16:57:05 2025-10-05T01:48:31 0:00 23:59:00 (Resources)30142 xiao.liu interacti 1 64 PD 2025-10-04T19:45:19N/A 0:00 23:59:00 (Priority)30129 xiao.liu interacti 1 64 PD 2025-10-04T18:55:30N/A 0:00 23:59:00 (Priority)30027 xiao.liu interacti 1 128 R 2025-10-04T05:54:07 2025-10-04T19:13:38 50:33 23:59:00 hai00330028 xiao.liu interacti 1 128 R 2025-10-04T05:55:36 2025-10-04T05:55:36 14:08:35 23:59:00 hai00630026 xiao.liu interacti 1 128 R 2025-10-04T01:48:32 2025-10-04T05:54:06 14:10:05 23:59:00 hai00830025 xiao.liu interacti 1 64 R 2025-10-04T01:47:27 2025-10-04T01:47:56 18:16:15 23:59:00 hai00430178 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30177 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30176 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30175 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30174 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30173 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30172 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30171 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30170 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30169 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30168 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30167 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30166 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30165 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30164 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30163 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30162 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30161 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30160 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30159 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30158 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30157 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30156 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30155 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30154 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30153 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30152 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30151 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30150 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30149 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30148 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30147 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30146 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30145 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30144 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30143 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30141 franz.sram standard 1 8 PD 2025-10-04T19:18:33N/A 0:00 1-00:00:00 (Priority)30140 franz.sram standard 1 8 PD 2025-10-04T19:18:33N/A 0:00 1-00:00:00 (Priority)30139 franz.sram standard 1 8 PD 2025-10-04T19:18:33N/A 0:00 1-00:00:00 (Priority)30138 franz.sram standard 1 8 PD 2025-10-04T19:18:33N/A 0:00 1-00:00:00 (Priority)30137 franz.sram standard 1 8 PD 2025-10-04T19:18:33N/A 0:00 1-00:00:00 (Nodes required for job are DOWN, DRAINED or reserved for jobs in higher priority partitions)30136 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:45:35 18:36 1-00:00:00 hai00430135 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:23:13 40:58 1-00:00:00 hai00530131 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:18:34 45:37 1-00:00:00 hai00530132 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:18:34 45:37 1-00:00:00 hai00430133 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:18:34 45:37 1-00:00:00 hai00530134 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:18:34 45:37 1-00:00:00 hai00630130 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:18:33 45:38 1-00:00:00 hai00530127 franz.sram standard 1 16 R 2025-10-04T18:54:08 2025-10-04T18:58:57 1:05:14 1-00:00:00 hai00530126 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:55:57 1:08:14 1-00:00:00 hai00530125 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:44:27 1:19:44 1-00:00:00 hai00130124 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:43:27 1:20:44 1-00:00:00 hai00130123 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:42:27 1:21:44 1-00:00:00 hai00130122 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:38:27 1:25:44 1-00:00:00 hai00130117 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:35:27 1:28:44 1-00:00:00 hai00130118 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:35:27 1:28:44 1-00:00:00 hai00430119 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:35:27 1:28:44 1-00:00:00 hai00530120 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:35:27 1:28:44 1-00:00:00 hai00630121 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:35:27 1:28:44 1-00:00:00 hai00629972 alfred.ngu standard 1 16 R 2025-10-04T16:35:34 2025-10-04T17:57:27 2:06:44 1-00:00:00 hai00130101 franz.sram standard 1 16 R 2025-10-04T14:55:02 2025-10-04T17:47:57 2:16:14 1-00:00:00 hai00430032 alfred.ngu standard 1 16 R 2025-10-04T11:53:39 2025-10-04T11:53:56 8:10:15 1-00:00:00 hai00130031 alfred.ngu standard 1 16 R 2025-10-04T11:50:30 2025-10-04T11:50:56 8:13:15 1-00:00:00 hai00529993 nishant.ku standard 2 384 R 2025-10-03T18:29:18 2025-10-04T01:48:31 18:15:40 1-00:00:00 hai[002,007]30008 alfred.ngu standard 1 16 R 2025-10-03T22:58:50 2025-10-03T22:58:56 21:05:15 1-00:00:00 hai00630009 alfred.ngu standard 1 16 R 2025-10-03T22:58:54 2025-10-03T22:58:56 21:05:15 1-00:00:00 hai001",,terminal_output +3468,6190310,"TERMINAL",0,0,"Every 2.0s: squeuehai-login2.haicore.berlin: Sat Oct 4 20:04:11 2025JOBIDUSER PARTITION NODES CPUS STSUBMIT_TIMESTART_TIME TIME TIME_LIMIT NODELIST(REASON)30111 xiao.liu interacti 1 32 PD 2025-10-04T16:57:05 2025-10-05T01:48:31 0:00 23:59:00 (Resources)30142 xiao.liu interacti 1 64 PD 2025-10-04T19:45:19N/A 0:00 23:59:00 (Priority)30129 xiao.liu interacti 1 64 PD 2025-10-04T18:55:30N/A 0:00 23:59:00 (Priority)30027 xiao.liu interacti 1 128 R 2025-10-04T05:54:07 2025-10-04T19:13:38 50:33 23:59:00 hai00330028 xiao.liu interacti 1 128 R 2025-10-04T05:55:36 2025-10-04T05:55:36 14:08:35 23:59:00 hai00630026 xiao.liu interacti 1 128 R 2025-10-04T01:48:32 2025-10-04T05:54:06 14:10:05 23:59:00 hai00830025 xiao.liu interacti 1 64 R 2025-10-04T01:47:27 2025-10-04T01:47:56 18:16:15 23:59:00 hai00430178 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30177 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30176 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30175 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30174 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30173 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30172 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30171 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30170 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30169 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30168 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30167 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30166 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30165 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30164 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30163 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30162 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30161 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30160 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30159 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30158 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30157 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30156 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30155 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30154 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30153 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30152 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30151 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30150 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30149 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30148 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30147 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30146 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30145 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30144 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30143 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30141 franz.sram standard 1 8 PD 2025-10-04T19:18:33N/A 0:00 1-00:00:00 (Priority)30140 franz.sram standard 1 8 PD 2025-10-04T19:18:33N/A 0:00 1-00:00:00 (Priority)30139 franz.sram standard 1 8 PD 2025-10-04T19:18:33N/A 0:00 1-00:00:00 (Priority)30138 franz.sram standard 1 8 PD 2025-10-04T19:18:33N/A 0:00 1-00:00:00 (Priority)30137 franz.sram standard 1 8 PD 2025-10-04T19:18:33N/A 0:00 1-00:00:00 (Nodes required for job are DOWN, DRAINED or reserved for jobs in higher priority partitions)30136 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:45:35 18:36 1-00:00:00 hai00430135 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:23:13 40:58 1-00:00:00 hai00530131 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:18:34 45:37 1-00:00:00 hai00530132 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:18:34 45:37 1-00:00:00 hai00430133 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:18:34 45:37 1-00:00:00 hai00530134 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:18:34 45:37 1-00:00:00 hai00630130 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:18:33 45:38 1-00:00:00 hai00530127 franz.sram standard 1 16 R 2025-10-04T18:54:08 2025-10-04T18:58:57 1:05:14 1-00:00:00 hai00530126 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:55:57 1:08:14 1-00:00:00 hai00530125 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:44:27 1:19:44 1-00:00:00 hai00130124 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:43:27 1:20:44 1-00:00:00 hai00130123 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:42:27 1:21:44 1-00:00:00 hai00130122 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:38:27 1:25:44 1-00:00:00 hai00130117 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:35:27 1:28:44 1-00:00:00 hai00130118 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:35:27 1:28:44 1-00:00:00 hai00430119 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:35:27 1:28:44 1-00:00:00 hai00530120 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:35:27 1:28:44 1-00:00:00 hai00630121 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:35:27 1:28:44 1-00:00:00 hai00629972 alfred.ngu standard 1 16 R 2025-10-04T16:35:34 2025-10-04T17:57:27 2:06:44 1-00:00:00 hai00130101 franz.sram standard 1 16 R 2025-10-04T14:55:02 2025-10-04T17:47:57 2:16:14 1-00:00:00 hai00430032 alfred.ngu standard 1 16 R 2025-10-04T11:53:39 2025-10-04T11:53:56 8:10:15 1-00:00:00 hai00130031 alfred.ngu standard 1 16 R 2025-10-04T11:50:30 2025-10-04T11:50:56 8:13:15 1-00:00:00 hai00529993 nishant.ku standard 2 384 R 2025-10-03T18:29:18 2025-10-04T01:48:31 18:15:40 1-00:00:00 hai[002,007]30008 alfred.ngu standard 1 16 R 2025-10-03T22:58:50 2025-10-03T22:58:56 21:05:15 1-00:00:00 hai00630009 alfred.ngu standard 1 16 R 2025-10-03T22:58:54 2025-10-03T22:58:56 21:05:15 1-00:00:00 hai001Every 2.0s: squeuehai-login2.haicore.berlin: Sat Oct 4 20:04:11 2025JOBIDUSER PARTITION NODES CPUS STSUBMIT_TIMESTART_TIME TIME TIME_LIMIT NODELIST(REASON)30111 xiao.liu interacti 1 32 PD 2025-10-04T16:57:05 2025-10-05T01:48:31 0:00 23:59:00 (Resources)30142 xiao.liu interacti 1 64 PD 2025-10-04T19:45:19N/A 0:00 23:59:00 (Priority)30129 xiao.liu interacti 1 64 PD 2025-10-04T18:55:30N/A 0:00 23:59:00 (Priority)30027 xiao.liu interacti 1 128 R 2025-10-04T05:54:07 2025-10-04T19:13:38 50:33 23:59:00 hai00330028 xiao.liu interacti 1 128 R 2025-10-04T05:55:36 2025-10-04T05:55:36 14:08:35 23:59:00 hai00630026 xiao.liu interacti 1 128 R 2025-10-04T01:48:32 2025-10-04T05:54:06 14:10:05 23:59:00 hai00830025 xiao.liu interacti 1 64 R 2025-10-04T01:47:27 2025-10-04T01:47:56 18:16:15 23:59:00 hai00430178 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30177 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30176 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30175 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30174 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30173 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30172 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30171 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30170 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30169 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30168 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30167 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30166 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30165 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30164 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30163 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30162 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30161 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30160 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30159 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30158 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30157 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30156 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30155 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30154 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30153 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30152 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30151 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30150 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30149 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30148 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30147 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30146 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30145 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30144 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30143 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30141 franz.sram standard 1 8 PD 2025-10-04T19:18:33N/A 0:00 1-00:00:00 (Priority)30140 franz.sram standard 1 8 PD 2025-10-04T19:18:33N/A 0:00 1-00:00:00 (Priority)30139 franz.sram standard 1 8 PD 2025-10-04T19:18:33N/A 0:00 1-00:00:00 (Priority)30138 franz.sram standard 1 8 PD 2025-10-04T19:18:33N/A 0:00 1-00:00:00 (Priority)30137 franz.sram standard 1 8 PD 2025-10-04T19:18:33N/A 0:00 1-00:00:00 (Nodes required for job are DOWN, DRAINED or reserved for jobs in higher priority partitions)30136 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:45:35 18:36 1-00:00:00 hai00430135 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:23:13 40:58 1-00:00:00 hai00530131 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:18:34 45:37 1-00:00:00 hai00530132 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:18:34 45:37 1-00:00:00 hai00430133 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:18:34 45:37 1-00:00:00 hai00530134 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:18:34 45:37 1-00:00:00 hai00630130 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:18:33 45:38 1-00:00:00 hai00530127 franz.sram standard 1 16 R 2025-10-04T18:54:08 2025-10-04T18:58:57 1:05:14 1-00:00:00 hai00530126 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:55:57 1:08:14 1-00:00:00 hai00530125 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:44:27 1:19:44 1-00:00:00 hai00130124 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:43:27 1:20:44 1-00:00:00 hai00130123 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:42:27 1:21:44 1-00:00:00 hai00130122 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:38:27 1:25:44 1-00:00:00 hai00130117 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:35:27 1:28:44 1-00:00:00 hai00130118 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:35:27 1:28:44 1-00:00:00 hai00430119 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:35:27 1:28:44 1-00:00:00 hai00530120 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:35:27 1:28:44 1-00:00:00 hai00630121 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:35:27 1:28:44 1-00:00:00 hai00629972 alfred.ngu standard 1 16 R 2025-10-04T16:35:34 2025-10-04T17:57:27 2:06:44 1-00:00:00 hai00130101 franz.sram standard 1 16 R 2025-10-04T14:55:02 2025-10-04T17:47:57 2:16:14 1-00:00:00 hai00430032 alfred.ngu standard 1 16 R 2025-10-04T11:53:39 2025-10-04T11:53:56 8:10:15 1-00:00:00 hai00130031 alfred.ngu standard 1 16 R 2025-10-04T11:50:30 2025-10-04T11:50:56 8:13:15 1-00:00:00 hai00529993 nishant.ku standard 2 384 R 2025-10-03T18:29:18 2025-10-04T01:48:31 18:15:40 1-00:00:00 hai[002,007]30008 alfred.ngu standard 1 16 R 2025-10-03T22:58:50 2025-10-03T22:58:56 21:05:15 1-00:00:00 hai00630009 alfred.ngu standard 1 16 R 2025-10-03T22:58:54 2025-10-03T22:58:56 21:05:15 1-00:00:00 hai001",,terminal_output +3469,6190509,"TERMINAL",0,0,"Every 2.0s: squeuehai-login2.haicore.berlin: Sat Oct 4 20:04:11 2025JOBIDUSER PARTITION NODES CPUS STSUBMIT_TIMESTART_TIME TIME TIME_LIMIT NODELIST(REASON)30111 xiao.liu interacti 1 32 PD 2025-10-04T16:57:05 2025-10-05T01:48:31 0:00 23:59:00 (Resources)30142 xiao.liu interacti 1 64 PD 2025-10-04T19:45:19N/A 0:00 23:59:00 (Priority)30129 xiao.liu interacti 1 64 PD 2025-10-04T18:55:30N/A 0:00 23:59:00 (Priority)30027 xiao.liu interacti 1 128 R 2025-10-04T05:54:07 2025-10-04T19:13:38 50:33 23:59:00 hai00330028 xiao.liu interacti 1 128 R 2025-10-04T05:55:36 2025-10-04T05:55:36 14:08:35 23:59:00 hai00630026 xiao.liu interacti 1 128 R 2025-10-04T01:48:32 2025-10-04T05:54:06 14:10:05 23:59:00 hai00830025 xiao.liu interacti 1 64 R 2025-10-04T01:47:27 2025-10-04T01:47:56 18:16:15 23:59:00 hai00430178 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30177 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30176 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30175 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30174 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30173 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30172 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30171 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30170 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30169 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30168 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30167 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30166 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30165 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30164 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30163 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30162 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30161 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30160 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30159 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30158 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30157 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30156 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30155 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30154 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30153 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30152 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30151 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30150 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30149 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30148 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30147 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30146 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30145 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30144 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30143 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30141 franz.sram standard 1 8 PD 2025-10-04T19:18:33N/A 0:00 1-00:00:00 (Priority)30140 franz.sram standard 1 8 PD 2025-10-04T19:18:33N/A 0:00 1-00:00:00 (Priority)30139 franz.sram standard 1 8 PD 2025-10-04T19:18:33N/A 0:00 1-00:00:00 (Priority)30138 franz.sram standard 1 8 PD 2025-10-04T19:18:33N/A 0:00 1-00:00:00 (Priority)30137 franz.sram standard 1 8 PD 2025-10-04T19:18:33N/A 0:00 1-00:00:00 (Nodes required for job are DOWN, DRAINED or reserved for jobs in higher priority partitions)30136 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:45:35 18:36 1-00:00:00 hai00430135 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:23:13 40:58 1-00:00:00 hai00530131 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:18:34 45:37 1-00:00:00 hai00530132 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:18:34 45:37 1-00:00:00 hai00430133 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:18:34 45:37 1-00:00:00 hai00530134 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:18:34 45:37 1-00:00:00 hai00630130 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:18:33 45:38 1-00:00:00 hai00530127 franz.sram standard 1 16 R 2025-10-04T18:54:08 2025-10-04T18:58:57 1:05:14 1-00:00:00 hai00530126 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:55:57 1:08:14 1-00:00:00 hai00530125 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:44:27 1:19:44 1-00:00:00 hai00130124 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:43:27 1:20:44 1-00:00:00 hai00130123 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:42:27 1:21:44 1-00:00:00 hai00130122 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:38:27 1:25:44 1-00:00:00 hai00130117 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:35:27 1:28:44 1-00:00:00 hai00130118 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:35:27 1:28:44 1-00:00:00 hai00430119 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:35:27 1:28:44 1-00:00:00 hai00530120 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:35:27 1:28:44 1-00:00:00 hai00630121 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:35:27 1:28:44 1-00:00:00 hai00629972 alfred.ngu standard 1 16 R 2025-10-04T16:35:34 2025-10-04T17:57:27 2:06:44 1-00:00:00 hai00130101 franz.sram standard 1 16 R 2025-10-04T14:55:02 2025-10-04T17:47:57 2:16:14 1-00:00:00 hai00430032 alfred.ngu standard 1 16 R 2025-10-04T11:53:39 2025-10-04T11:53:56 8:10:15 1-00:00:00 hai00130031 alfred.ngu standard 1 16 R 2025-10-04T11:50:30 2025-10-04T11:50:56 8:13:15 1-00:00:00 hai00529993 nishant.ku standard 2 384 R 2025-10-03T18:29:18 2025-10-04T01:48:31 18:15:40 1-00:00:00 hai[002,007]30008 alfred.ngu standard 1 16 R 2025-10-03T22:58:50 2025-10-03T22:58:56 21:05:15 1-00:00:00 hai00630009 alfred.ngu standard 1 16 R 2025-10-03T22:58:54 2025-10-03T22:58:56 21:05:15 1-00:00:00 hai001Every 2.0s: squeuehai-login2.haicore.berlin: Sat Oct 4 20:04:11 2025JOBIDUSER PARTITION NODES CPUS STSUBMIT_TIMESTART_TIME TIME TIME_LIMIT NODELIST(REASON)30111 xiao.liu interacti 1 32 PD 2025-10-04T16:57:05 2025-10-05T01:48:31 0:00 23:59:00 (Resources)30142 xiao.liu interacti 1 64 PD 2025-10-04T19:45:19N/A 0:00 23:59:00 (Priority)30129 xiao.liu interacti 1 64 PD 2025-10-04T18:55:30N/A 0:00 23:59:00 (Priority)30027 xiao.liu interacti 1 128 R 2025-10-04T05:54:07 2025-10-04T19:13:38 50:33 23:59:00 hai00330028 xiao.liu interacti 1 128 R 2025-10-04T05:55:36 2025-10-04T05:55:36 14:08:35 23:59:00 hai00630026 xiao.liu interacti 1 128 R 2025-10-04T01:48:32 2025-10-04T05:54:06 14:10:05 23:59:00 hai00830025 xiao.liu interacti 1 64 R 2025-10-04T01:47:27 2025-10-04T01:47:56 18:16:15 23:59:00 hai00430178 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30177 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30176 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30175 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30174 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30173 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30172 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30171 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30170 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30169 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30168 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30167 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30166 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30165 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30164 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30163 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30162 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30161 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30160 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30159 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30158 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30157 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30156 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30155 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30154 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30153 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30152 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30151 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30150 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30149 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30148 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30147 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30146 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30145 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30144 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30143 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30141 franz.sram standard 1 8 PD 2025-10-04T19:18:33N/A 0:00 1-00:00:00 (Priority)30140 franz.sram standard 1 8 PD 2025-10-04T19:18:33N/A 0:00 1-00:00:00 (Priority)30139 franz.sram standard 1 8 PD 2025-10-04T19:18:33N/A 0:00 1-00:00:00 (Priority)30138 franz.sram standard 1 8 PD 2025-10-04T19:18:33N/A 0:00 1-00:00:00 (Priority)30137 franz.sram standard 1 8 PD 2025-10-04T19:18:33N/A 0:00 1-00:00:00 (Nodes required for job are DOWN, DRAINED or reserved for jobs in higher priority partitions)30136 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:45:35 18:36 1-00:00:00 hai00430135 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:23:13 40:58 1-00:00:00 hai00530131 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:18:34 45:37 1-00:00:00 hai00530132 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:18:34 45:37 1-00:00:00 hai00430133 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:18:34 45:37 1-00:00:00 hai00530134 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:18:34 45:37 1-00:00:00 hai00630130 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:18:33 45:38 1-00:00:00 hai00530127 franz.sram standard 1 16 R 2025-10-04T18:54:08 2025-10-04T18:58:57 1:05:14 1-00:00:00 hai00530126 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:55:57 1:08:14 1-00:00:00 hai00530125 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:44:27 1:19:44 1-00:00:00 hai00130124 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:43:27 1:20:44 1-00:00:00 hai00130123 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:42:27 1:21:44 1-00:00:00 hai00130122 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:38:27 1:25:44 1-00:00:00 hai00130117 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:35:27 1:28:44 1-00:00:00 hai00130118 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:35:27 1:28:44 1-00:00:00 hai00430119 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:35:27 1:28:44 1-00:00:00 hai00530120 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:35:27 1:28:44 1-00:00:00 hai00630121 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:35:27 1:28:44 1-00:00:00 hai00629972 alfred.ngu standard 1 16 R 2025-10-04T16:35:34 2025-10-04T17:57:27 2:06:44 1-00:00:00 hai00130101 franz.sram standard 1 16 R 2025-10-04T14:55:02 2025-10-04T17:47:57 2:16:14 1-00:00:00 hai00430032 alfred.ngu standard 1 16 R 2025-10-04T11:53:39 2025-10-04T11:53:56 8:10:15 1-00:00:00 hai00130031 alfred.ngu standard 1 16 R 2025-10-04T11:50:30 2025-10-04T11:50:56 8:13:15 1-00:00:00 hai00529993 nishant.ku standard 2 384 R 2025-10-03T18:29:18 2025-10-04T01:48:31 18:15:40 1-00:00:00 hai[002,007]30008 alfred.ngu standard 1 16 R 2025-10-03T22:58:50 2025-10-03T22:58:56 21:05:15 1-00:00:00 hai00630009 alfred.ngu standard 1 16 R 2025-10-03T22:58:54 2025-10-03T22:58:56 21:05:15 1-00:00:00 hai001",,terminal_output +3470,6192618,"TERMINAL",0,0,"3577781:00999940666666666666677277Every 2.0s: squeuehai-login2.haicore.berlin: Sat Oct 4 20:04:14 2025JOBIDUSER PARTITION NODES CPUS STSUBMIT_TIMESTART_TIME TIME TIME_LIMIT NODELIST(REASON)30111 xiao.liu interacti 1 32 PD 2025-10-04T16:57:05 2025-10-05T01:48:31 0:00 23:59:00 (Resources)30142 xiao.liu interacti 1 64 PD 2025-10-04T19:45:19N/A 0:00 23:59:00 (Priority)30129 xiao.liu interacti 1 64 PD 2025-10-04T18:55:30N/A 0:00 23:59:00 (Priority)30027 xiao.liu interacti 1 128 R 2025-10-04T05:54:07 2025-10-04T19:13:38 50:36 23:59:00 hai00330028 xiao.liu interacti 1 128 R 2025-10-04T05:55:36 2025-10-04T05:55:36 14:08:38 23:59:00 hai00630026 xiao.liu interacti 1 128 R 2025-10-04T01:48:32 2025-10-04T05:54:06 14:10:08 23:59:00 hai00830025 xiao.liu interacti 1 64 R 2025-10-04T01:47:27 2025-10-04T01:47:56 18:16:18 23:59:00 hai00430178 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30177 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30176 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30175 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30174 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30173 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30172 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30171 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30170 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30169 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30168 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30167 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30166 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30165 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30164 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30163 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30162 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30161 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30160 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30159 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30158 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30157 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30156 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30155 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30154 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30153 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30152 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30151 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30150 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30149 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30148 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30147 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30146 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30145 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30144 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30143 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30141 franz.sram standard 1 8 PD 2025-10-04T19:18:33N/A 0:00 1-00:00:00 (Priority)30140 franz.sram standard 1 8 PD 2025-10-04T19:18:33N/A 0:00 1-00:00:00 (Priority)30139 franz.sram standard 1 8 PD 2025-10-04T19:18:33N/A 0:00 1-00:00:00 (Priority)30138 franz.sram standard 1 8 PD 2025-10-04T19:18:33N/A 0:00 1-00:00:00 (Priority)30137 franz.sram standard 1 8 PD 2025-10-04T19:18:33N/A 0:00 1-00:00:00 (Nodes required for job are DOWN, DRAINED or reserved for jobs in higher priority partitions)30136 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:45:35 18:39 1-00:00:00 hai00430135 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:23:13 41:01 1-00:00:00 hai00530131 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:18:34 45:40 1-00:00:00 hai00530132 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:18:34 45:40 1-00:00:00 hai00430133 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:18:34 45:40 1-00:00:00 hai00530134 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:18:34 45:40 1-00:00:00 hai00630130 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:18:33 45:41 1-00:00:00 hai00530127 franz.sram standard 1 16 R 2025-10-04T18:54:08 2025-10-04T18:58:57 1:05:17 1-00:00:00 hai00530126 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:55:57 1:08:17 1-00:00:00 hai00530125 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:44:27 1:19:47 1-00:00:00 hai00130124 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:43:27 1:20:47 1-00:00:00 hai00130123 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:42:27 1:21:47 1-00:00:00 hai00130122 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:38:27 1:25:47 1-00:00:00 hai00130117 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:35:27 1:28:47 1-00:00:00 hai00130118 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:35:27 1:28:47 1-00:00:00 hai00430119 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:35:27 1:28:47 1-00:00:00 hai00530120 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:35:27 1:28:47 1-00:00:00 hai00630121 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:35:27 1:28:47 1-00:00:00 hai00629972 alfred.ngu standard 1 16 R 2025-10-04T16:35:34 2025-10-04T17:57:27 2:06:47 1-00:00:00 hai00130101 franz.sram standard 1 16 R 2025-10-04T14:55:02 2025-10-04T17:47:57 2:16:17 1-00:00:00 hai00430032 alfred.ngu standard 1 16 R 2025-10-04T11:53:39 2025-10-04T11:53:56 8:10:18 1-00:00:00 hai00130031 alfred.ngu standard 1 16 R 2025-10-04T11:50:30 2025-10-04T11:50:56 8:13:18 1-00:00:00 hai00529993 nishant.ku standard 2 384 R 2025-10-03T18:29:18 2025-10-04T01:48:31 18:15:43 1-00:00:00 hai[002,007]",,terminal_output +3471,6194657,"TERMINAL",0,0,"6840102041322223999999999999920205",,terminal_output +3472,6196637,"TERMINAL",0,0,"840222354444521215151515151515151515121227",,terminal_output +3473,6198659,"TERMINAL",0,0,"20244457666673333333333333449",,terminal_output +3474,6200654,"TERMINAL",0,0,"24666798888955555555555556651",,terminal_output +3475,6202662,"TERMINAL",0,0,"4688891150505050517777777777777883",,terminal_output +3476,6204675,"TERMINAL",0,0,"6850203051322223999999999999930305",,terminal_output +3477,6206684,"TERMINAL",0,0,"8502223544445313120:011:012:016:019:019:019:019:019:017:0131227",,terminal_output +3478,6208693,"TERMINAL",0,0,"30244457666673333333333333449",,terminal_output +3479,6210706,"TERMINAL",0,0,"2466679888895555555555555666:01",,terminal_output +3480,6212721,"TERMINAL",0,0,"468889216:006:006:006:006:017777777777777883",,terminal_output +3481,6214723,"TERMINAL",0,0,"689:0030409:01322223999999999999940405",,terminal_output +3482,6216777,"TERMINAL",0,0,"81:00222354444541411111111111111111111141227",,terminal_output +3483,6218793,"TERMINAL",0,0,"40244457666673333333333333449",,terminal_output +3484,6220754,"TERMINAL",0,0,"24666798888955555555555556611",,terminal_output +3485,6222770,"TERMINAL",0,0,"4688893110101010117777777777777883",,terminal_output +3486,6224771,"TERMINAL",0,0,"6810405011322223999999999999950505",,terminal_output +3487,6226786,"TERMINAL",0,0,"810222354444551512121212121212121212151227",,terminal_output +3488,6228795,"TERMINAL",0,0,"50244457666673333333333333449",,terminal_output +3489,6230802,"TERMINAL",0,0,"24666798888955555555555556621",,terminal_output +3490,6232812,"TERMINAL",0,0,"4688894120202020217777777777777883",,terminal_output +3491,6234824,"TERMINAL",0,0,"6820507:002132222399999999999991:004:005",,terminal_output +3492,6236870,"TERMINAL",0,0,"82022235444456:019:01313131313131313131317:01227",,terminal_output +3493,6238841,"TERMINAL",0,0,"5:00244457666673333333333333449",,terminal_output +3494,6240850,"TERMINAL",0,0,"24666798888955555555555556631",,terminal_output +3495,6242859,"TERMINAL",0,0,"4688895130303030317777777777777883",,terminal_output +3496,6244866,"TERMINAL",0,0,"68301:001031322223999999999999910105",,terminal_output +3497,6246882,"TERMINAL",0,0,"830222354444511114141414141414141414111227",,terminal_output +3498,6248891,"TERMINAL",0,0,"10244457666673333333333333449",,terminal_output +3499,6250901,"TERMINAL",0,0,"24666798888955555555555556641",,terminal_output +3500,6252908,"TERMINAL",0,0,"4688892:0140404040417777777777777883",,terminal_output +3501,6254918,"TERMINAL",0,0,"6840102041322223999999999999920205",,terminal_output +3502,6256931,"TERMINAL",0,0,"840222354444521215151515151515151515121227",,terminal_output +3503,6258938,"TERMINAL",0,0,"20244457666673333333333333449",,terminal_output +3504,6260947,"TERMINAL",0,0,"24666798888955555555555556651",,terminal_output +3505,6262957,"TERMINAL",0,0,"4688891150505050517777777777777883",,terminal_output +3506,6264968,"TERMINAL",0,0,"6850203051322223999999999999930305",,terminal_output +3507,6266976,"TERMINAL",0,0,"850222354444531311:012:013:017:0130:0130:0130:0130:0130:018:0131227",,terminal_output +3508,6268986,"TERMINAL",0,0,"30244457666673333333333333449",,terminal_output +3509,6270995,"TERMINAL",0,0,"2466679888895555555555555667:01",,terminal_output +3510,6273004,"TERMINAL",0,0,"468889217:007:007:007:007:017777777777777883",,terminal_output +3511,6275015,"TERMINAL",0,0,"6810:00304020:01322223999999999999940405",,terminal_output +3512,6277023,"TERMINAL",0,0,"82:00222354444541411111111111111111111141227",,terminal_output +3513,6279032,"TERMINAL",0,0,"40244457666673333333333333449",,terminal_output +3514,6281043,"TERMINAL",0,0,"24666798888955555555555556611",,terminal_output +3515,6283060,"TERMINAL",0,0,"4688893110101010117777777777777883",,terminal_output +3516,6285066,"TERMINAL",0,0,"6810405011322223999999999999950505",,terminal_output +3517,6287076,"TERMINAL",0,0,"810222354444551512121212121212121212151227",,terminal_output +3518,6289087,"TERMINAL",0,0,"50244457666673333333333333449",,terminal_output +3519,6291094,"TERMINAL",0,0,"24666798888955555555555556621",,terminal_output +3520,6293102,"TERMINAL",0,0,"4688894120202020217777777777777883",,terminal_output +3521,6295114,"TERMINAL",0,0,"6820508:002132222399999999999992:005:005",,terminal_output +3522,6297119,"TERMINAL",0,0,"82022235444457:0110:01313131313131313131318:01227",,terminal_output +3523,6299128,"TERMINAL",0,0,"6:00244457666673333333333333449",,terminal_output +3524,6301142,"TERMINAL",0,0,"24666798888955555555555556631",,terminal_output +3525,6303150,"TERMINAL",0,0,"4688895130303030317777777777777883",,terminal_output +3526,6305168,"TERMINAL",0,0,"68302:001031322223999999999999910105",,terminal_output +3527,6307168,"TERMINAL",0,0,"830222354444511114141414141414141414111227",,terminal_output +3528,6309185,"TERMINAL",0,0,"10244457666673333333333333449",,terminal_output +3529,6311192,"TERMINAL",0,0,"24666798888955555555555556641",,terminal_output +3530,6313200,"TERMINAL",0,0,"4688893:0140404040417777777777777883",,terminal_output +3531,6315219,"TERMINAL",0,0,"6840102041322223999999999999920205",,terminal_output +3532,6317333,"TERMINAL",0,0,"840222354444521215151515151515151515121227",,terminal_output +3533,6319231,"TERMINAL",0,0,"20244457666673333333333333449",,terminal_output +3534,6321324,"TERMINAL",0,0,"24666798888955555555555556651",,terminal_output +3535,6323249,"TERMINAL",0,0,"4688891150505050517777777777777883",,terminal_output +3536,6325260,"TERMINAL",0,0,"6850203051322223999999999999930305",,terminal_output +3537,6327270,"TERMINAL",0,0,"850222354444531312:013:014:018:011:011:011:011:011:019:0131227",,terminal_output +3538,6329286,"TERMINAL",0,0,"30244457666673333333333333449",,terminal_output +3539,6331361,"TERMINAL",0,0,"2466679888895555555555555668:01",,terminal_output +3540,6333296,"TERMINAL",0,0,"468889218:008:008:008:008:017777777777777883",,terminal_output +3541,6335306,"TERMINAL",0,0,"681:0030401:01322223999999999999940405",,terminal_output +3542,6337403,"TERMINAL",0,0,"83:00222354444541411111111111111111111141227",,terminal_output +3543,6339352,"TERMINAL",0,0,"40244457666673333333333333449",,terminal_output +3544,6341345,"TERMINAL",0,0,"24666798888955555555555556611",,terminal_output +3545,6343345,"TERMINAL",0,0,"4688893110101010117777777777777883",,terminal_output +3546,6345389,"TERMINAL",0,0,"6810405011322223999999999999950505",,terminal_output +3547,6347364,"TERMINAL",0,0,"810222354444551512121212121212121212151227",,terminal_output +3548,6349380,"TERMINAL",0,0,"50244457666673333333333333449",,terminal_output +3549,6351430,"TERMINAL",0,0,"24666798888955555555555556621",,terminal_output +3550,6353391,"TERMINAL",0,0,"4688894120202020217777777777777883",,terminal_output +3551,6355406,"TERMINAL",0,0,"6820509:002132222399999999999993:006:005",,terminal_output +3552,6357410,"TERMINAL",0,0,"82022235444458:011:01313131313131313131319:01227",,terminal_output +3553,6359420,"TERMINAL",0,0,"7:00244457666673333333333333449",,terminal_output +3554,6361466,"TERMINAL",0,0,"24666798888955555555555556631",,terminal_output +3555,6363446,"TERMINAL",0,0,"4688895130303030317777777777777883",,terminal_output +3556,6365460,"TERMINAL",0,0,"68303:001031322223999999999999910105",,terminal_output +3557,6367462,"TERMINAL",0,0,"830222354444511114141414141414141414111227",,terminal_output +3558,6369470,"TERMINAL",0,0,"10244457666673333333333333449",,terminal_output +3559,6371502,"TERMINAL",0,0,"24666798888955555555555556641",,terminal_output +3560,6373489,"TERMINAL",0,0,"4688894:0140404040417777777777777883",,terminal_output +3561,6375499,"TERMINAL",0,0,"6840102041322223999999999999920205",,terminal_output +3562,6377511,"TERMINAL",0,0,"840222354444521215151515151515151515121227",,terminal_output +3563,6379530,"TERMINAL",0,0,"20244457666673333333333333449",,terminal_output +3564,6381529,"TERMINAL",0,0,"24666798888955555555555556651",,terminal_output +3565,6383542,"TERMINAL",0,0,"4688891150505050517777777777777883",,terminal_output +3566,6385554,"TERMINAL",0,0,"6850203051322223999999999999930305",,terminal_output +3567,6387558,"TERMINAL",0,0,"850222354444531313:014:015:019:012:012:012:012:012:0110:0131227",,terminal_output +3568,6389563,"TERMINAL",0,0,"30244457666673333333333333449",,terminal_output +3569,6391580,"TERMINAL",0,0,"2466679888895555555555555669:01",,terminal_output +3570,6393586,"TERMINAL",0,0,"479992:00229:019:019:019:019:028888888888888994",,terminal_output +3571,6395600,"TERMINAL",0,0,"792:01314124333344040101010101010101010104041416",,terminal_output +3572,6397609,"TERMINAL",0,0,"94:0133346555562222222222222338",,terminal_output +3573,6399616,"TERMINAL",0,0,"413555687777844444444444445510",,terminal_output +3574,6401624,"TERMINAL",0,0,"357778309999106666666666666772",,terminal_output +3575,6403643,"TERMINAL",0,0,"579991021111111128888888888888994",,terminal_output +3576,6405642,"TERMINAL",0,0,"7911415124333345050202020202020202020205051516",,terminal_output +3577,6407660,"TERMINAL",0,0,"91133346555562222222222222338",,terminal_output +3578,6409663,"TERMINAL",0,0,"513555687777844444444444445520",,terminal_output +3579,6411742,"TERMINAL",0,0,"357778409999206666666666666772",,terminal_output +3580,6413678,"TERMINAL",0,0,"579992022121212128888888888888994",,terminal_output +3581,6415691,"TERMINAL",0,0,"79215120:0124333349:002:003030303030303030303020:004:017:016",,terminal_output +3582,6417696,"TERMINAL",0,0,"92133346555562222222222222338",,terminal_output +3583,6419733,"TERMINAL",0,0,"8:013555687777844444444444445530",,terminal_output +3584,6421718,"TERMINAL",0,0,"357778509999306666666666666772",,terminal_output +3585,6423736,"TERMINAL",0,0,"579993023131313128888888888888994",,terminal_output +3586,6425737,"TERMINAL",0,0,"79314:011124333341010404040404040404040401011116",,terminal_output +3587,6427744,"TERMINAL",0,0,"93133346555562222222222222338",,terminal_output +3588,6429770,"TERMINAL",0,0,"113555687777844444444444445540",,terminal_output +3589,6431761,"TERMINAL",0,0,"3577785:009999406666666666666772",,terminal_output +3590,6433778,"TERMINAL",0,0,"579994024141414128888888888888994",,terminal_output +3591,6435783,"TERMINAL",0,0,"7941112124333342020505050505050505050502021216",,terminal_output +3592,6437856,"TERMINAL",0,0,"94133346555562222222222222338",,terminal_output +3593,6439905,"TERMINAL",0,0,"213555687777844444444444445550",,terminal_output +3594,6441851,"TERMINAL",0,0,"357778109999506666666666666772",,terminal_output +3595,6443818,"TERMINAL",0,0,"579995025151515128888888888888994",,terminal_output +3596,6445826,"TERMINAL",0,0,"79512131243333430304:005:006:0030:003:003:003:003:003:001:003031316",,terminal_output +3597,6447836,"TERMINAL",0,0,"95133346555562222222222222338",,terminal_output +3598,6449845,"TERMINAL",0,0,"313555687777844444444444445520:00",,terminal_output +3599,6451884,"TERMINAL",0,0,"35777820999950:006666666666666772",,terminal_output +3600,6453865,"TERMINAL",0,0,"579993:00250:0150:0150:0150:0128888888888888994",,terminal_output +3601,6455874,"TERMINAL",0,0,"793:01314124333344040101010101010101010104041416",,terminal_output +3602,6457889,"TERMINAL",0,0,"95:0133346555562222222222222338",,terminal_output +3603,6459895,"TERMINAL",0,0,"413555687777844444444444445510",,terminal_output +3604,6461923,"TERMINAL",0,0,"357778309999106666666666666772",,terminal_output +3605,6463914,"TERMINAL",0,0,"579991021111111128888888888888994",,terminal_output +3606,6465927,"TERMINAL",0,0,"7911415124333345050202020202020202020205051516",,terminal_output +3607,6467963,"TERMINAL",0,0,"91133346555562222222222222338",,terminal_output +3608,6470010,"TERMINAL",0,0,"513555687777844444444444445520",,terminal_output +3609,6472059,"TERMINAL",0,0,"357778409999206666666666666772",,terminal_output +3610,6473962,"TERMINAL",0,0,"579992022121212128888888888888994",,terminal_output +3611,6476055,"TERMINAL",0,0,"7921511:01243333410:003:00303030303030303030301:005:018:016",,terminal_output +3612,6477979,"TERMINAL",0,0,"92133346555562222222222222338",,terminal_output +3613,6479990,"TERMINAL",0,0,"9:013555687777844444444444445530",,terminal_output +3614,6482072,"TERMINAL",0,0,"357778509999306666666666666772",,terminal_output +3615,6484010,"TERMINAL",0,0,"579993023131313128888888888888994",,terminal_output +3616,6486020,"TERMINAL",0,0,"79315:011124333341010404040404040404040401011116",,terminal_output +3617,6488028,"TERMINAL",0,0,"93133346555562222222222222338",,terminal_output +3618,6490040,"TERMINAL",0,0,"113555687777844444444444445540",,terminal_output +3619,6492074,"TERMINAL",0,0,"3577786:009999406666666666666772",,terminal_output +3620,6494057,"TERMINAL",0,0,"579994024141414128888888888888994",,terminal_output +3621,6496070,"TERMINAL",0,0,"7941112124333342020505050505050505050502021216",,terminal_output +3622,6498080,"TERMINAL",0,0,"94133346555562222222222222338",,terminal_output +3623,6500099,"TERMINAL",0,0,"213555687777844444444444445550",,terminal_output +3624,6502094,"TERMINAL",0,0,"357778109999506666666666666772",,terminal_output +3625,6504103,"TERMINAL",0,0,"579995025151515128888888888888994",,terminal_output +3626,6506116,"TERMINAL",0,0,"79512131243333430305:006:007:001:004:004:004:004:004:002:003031316",,terminal_output +3627,6508126,"TERMINAL",0,0,"95133346555562222222222222338",,terminal_output +3628,6510149,"TERMINAL",0,0,"31355568777784444444444444551:00",,terminal_output +3629,6512143,"TERMINAL",0,0,"3577782099991:006666666666666772",,terminal_output +3630,6514246,"TERMINAL",0,0,"579994:0021:011:011:011:0128888888888888994",,terminal_output +3631,6516163,"TERMINAL",0,0,"794:01314124333344040101010101010101010104041416",,terminal_output +3632,6518174,"TERMINAL",0,0,"96:0133346555562222222222222338",,terminal_output +3633,6520291,"TERMINAL",0,0,"413555687777844444444444445510",,terminal_output +3634,6522194,"TERMINAL",0,0,"357778309999106666666666666772",,terminal_output +3635,6524213,"TERMINAL",0,0,"579991021111111128888888888888994",,terminal_output +3636,6526231,"TERMINAL",0,0,"7911415124333345050202020202020202020205051516",,terminal_output +3637,6528228,"TERMINAL",0,0,"91133346555562222222222222338",,terminal_output +3638,6530235,"TERMINAL",0,0,"513555687777844444444444445520",,terminal_output +3639,6532247,"TERMINAL",0,0,"357778409999206666666666666772",,terminal_output +3640,6534258,"TERMINAL",0,0,"579992022121212128888888888888994",,terminal_output +3641,6536268,"TERMINAL",0,0,"7921512:0124333341:004:00303030303030303030302:006:019:016",,terminal_output +3642,6538275,"TERMINAL",0,0,"92133346555562222222222222338",,terminal_output +3643,6540281,"TERMINAL",0,0,"10:013555687777844444444444445530",,terminal_output +3644,6542290,"TERMINAL",0,0,"357778509999306666666666666772",,terminal_output +3645,6544302,"TERMINAL",0,0,"579993023131313128888888888888994",,terminal_output +3646,6546307,"TERMINAL",0,0,"79316:011124333341010404040404040404040401011116",,terminal_output +3647,6548319,"TERMINAL",0,0,"93133346555562222222222222338",,terminal_output +3648,6550331,"TERMINAL",0,0,"113555687777844444444444445540",,terminal_output +3649,6552339,"TERMINAL",0,0,"3577787:009999406666666666666772",,terminal_output +3650,6554347,"TERMINAL",0,0,"579994024141414128888888888888994",,terminal_output +3651,6556355,"TERMINAL",0,0,"7941112124333342020505050505050505050502021216",,terminal_output +3652,6558364,"TERMINAL",0,0,"94133346555562222222222222338",,terminal_output +3653,6560374,"TERMINAL",0,0,"213555687777844444444444445550",,terminal_output +3654,6562382,"TERMINAL",0,0,"357778109999506666666666666772",,terminal_output +3655,6564392,"TERMINAL",0,0,"579995025151515128888888888888994",,terminal_output +3656,6566472,"TERMINAL",0,0,"79512131243333430306:007:008:002:005:005:005:005:005:003:003031316",,terminal_output +3657,6568416,"TERMINAL",0,0,"95133346555562222222222222338",,terminal_output +3658,6570423,"TERMINAL",0,0,"31355568777784444444444444552:00",,terminal_output +3659,6572432,"TERMINAL",0,0,"3577782099992:006666666666666772",,terminal_output +3660,6574440,"TERMINAL",0,0,"579995:0022:012:012:012:0128888888888888994",,terminal_output +3661,6576450,"TERMINAL",0,0,"795:01314124333344040101010101010101010104041416",,terminal_output +3662,6578458,"TERMINAL",0,0,"97:0133346555562222222222222338",,terminal_output +3663,6580474,"TERMINAL",0,0,"413555687777844444444444445510",,terminal_output +3664,6582478,"TERMINAL",0,0,"357778309999106666666666666772",,terminal_output +3665,6584506,"TERMINAL",0,0,"579991021111111128888888888888994",,terminal_output +3666,6586541,"TERMINAL",0,0,"7911415124333345050202020202020202020205051516",,terminal_output +3667,6588562,"TERMINAL",0,0,"91133346555562222222222222338",,terminal_output +3668,6590530,"TERMINAL",0,0,"513555687777844444444444445520",,terminal_output +3669,6592541,"TERMINAL",0,0,"357778409999206666666666666772",,terminal_output +3670,6594633,"TERMINAL",0,0,"579992022121212128888888888888994",,terminal_output +3671,6596561,"TERMINAL",0,0,"7921513:0124333342:005:00303030303030303030303:007:0120:016",,terminal_output +3672,6598568,"TERMINAL",0,0,"92133346555562222222222222338",,terminal_output +3673,6600581,"TERMINAL",0,0,"1:013555687777844444444444445530",,terminal_output +3674,6602599,"TERMINAL",0,0,"3688895130303030317777777777777883",,terminal_output +3675,6604600,"TERMINAL",0,0,"68307:001031322223999999999999910105",,terminal_output +3676,6606713,"TERMINAL",0,0,"830222354444511114141414141414141414111227",,terminal_output +3677,6608662,"TERMINAL",0,0,"10244457666673333333333333449",,terminal_output +3678,6610625,"TERMINAL",0,0,"24666798888955555555555556641",,terminal_output +3679,6612641,"TERMINAL",0,0,"4688898:0140404040417777777777777883",,terminal_output +3680,6614658,"TERMINAL",0,0,"6840102041322223999999999999920205",,terminal_output +3681,6616751,"TERMINAL",0,0,"840222354444521215151515151515151515121227",,terminal_output +3682,6618695,"TERMINAL",0,0,"20244457666673333333333333449",,terminal_output +3683,6620677,"TERMINAL",0,0,"24666798888955555555555556651",,terminal_output +3684,6622696,"TERMINAL",0,0,"4688891150505050517777777777777883",,terminal_output +3685,6624737,"TERMINAL",0,0,"6850203051322223999999999999930305",,terminal_output +3686,6626707,"TERMINAL",0,0,"850222354444531317:018:019:013:016:016:016:016:016:014:0131227",,terminal_output +3687,6628729,"TERMINAL",0,0,"30244457666673333333333333449",,terminal_output +3688,6630726,"TERMINAL",0,0,"2466679888895555555555555663:01",,terminal_output +3689,6632743,"TERMINAL",0,0,"468889213:003:003:003:003:017777777777777883",,terminal_output +3690,6634765,"TERMINAL",0,0,"686:0030406:01322223999999999999940405",,terminal_output +3691,6636824,"TERMINAL",0,0,"88:00222354444541411111111111111111111141227",,terminal_output +3692,6638767,"TERMINAL",0,0,"40244457666673333333333333449",,terminal_output +3693,6640771,"TERMINAL",0,0,"24666798888955555555555556611",,terminal_output +3694,6642791,"TERMINAL",0,0,"4688893110101010117777777777777883",,terminal_output +3695,6644791,"TERMINAL",0,0,"6810405011322223999999999999950505",,terminal_output +3696,6646803,"TERMINAL",0,0,"810222354444551512121212121212121212151227",,terminal_output +3697,6648826,"TERMINAL",0,0,"50244457666673333333333333449",,terminal_output +3698,6650818,"TERMINAL",0,0,"24666798888955555555555556621",,terminal_output +3699,6652896,"TERMINAL",0,0,"4688894120202020217777777777777883",,terminal_output +3700,6654834,"TERMINAL",0,0,"6820504:002132222399999999999998:001:005",,terminal_output +3701,6656849,"TERMINAL",0,0,"82022235444453:016:01313131313131313131314:01227",,terminal_output +3702,6658856,"TERMINAL",0,0,"2:00244457666673333333333333449",,terminal_output +3703,6660864,"TERMINAL",0,0,"24666798888955555555555556631",,terminal_output +3704,6662873,"TERMINAL",0,0,"4688895130303030317777777777777883",,terminal_output +3705,6664891,"TERMINAL",0,0,"68308:001031322223999999999999910105",,terminal_output +3706,6666926,"TERMINAL",0,0,"830222354444511114141414141414141414111227",,terminal_output +3707,6668944,"TERMINAL",0,0,"10244457666673333333333333449",,terminal_output +3708,6670908,"TERMINAL",0,0,"24666798888955555555555556641",,terminal_output +3709,6672928,"TERMINAL",0,0,"4688899:0140404040417777777777777883",,terminal_output +3710,6674946,"TERMINAL",0,0,"6840102041322223999999999999920205",,terminal_output +3711,6676938,"TERMINAL",0,0,"840222354444521215151515151515151515121227",,terminal_output +3712,6678948,"TERMINAL",0,0,"20244457666673333333333333449",,terminal_output +3713,6680958,"TERMINAL",0,0,"24666798888955555555555556651",,terminal_output +3714,6682972,"TERMINAL",0,0,"4688891150505050517777777777777883",,terminal_output +3715,6684993,"TERMINAL",0,0,"6850203051322223999999999999930305",,terminal_output +3716,6687033,"TERMINAL",0,0,"850222354444531318:019:0130:014:017:017:017:017:017:015:0131227",,terminal_output +3717,6688996,"TERMINAL",0,0,"30244457666673333333333333449",,terminal_output +3718,6691002,"TERMINAL",0,0,"2466679888895555555555555664:01",,terminal_output +3719,6693015,"TERMINAL",0,0,"468889214:004:004:004:004:017777777777777883",,terminal_output +3720,6695086,"TERMINAL",0,0,"687:0030407:01322223999999999999940405",,terminal_output +3721,6697131,"TERMINAL",0,0,"89:00222354444541411111111111111111111141227",,terminal_output +3722,6699045,"TERMINAL",0,0,"40244457666673333333333333449",,terminal_output +3723,6701049,"TERMINAL",0,0,"24666798888955555555555556611",,terminal_output +3724,6703063,"TERMINAL",0,0,"4688893110101010117777777777777883",,terminal_output +3725,6705081,"TERMINAL",0,0,"6810405011322223999999999999950505",,terminal_output +3726,6707169,"TERMINAL",0,0,"810222354444551512121212121212121212151227",,terminal_output +3727,6709090,"TERMINAL",0,0,"50244457666673333333333333449",,terminal_output +3728,6711100,"TERMINAL",0,0,"24666798888955555555555556621",,terminal_output +3729,6713110,"TERMINAL",0,0,"4688894120202020217777777777777883",,terminal_output +3730,6715117,"TERMINAL",0,0,"6820505:002132222399999999999999:002:005",,terminal_output +3731,6717155,"TERMINAL",0,0,"82022235444454:017:01313131313131313131315:01227",,terminal_output +3732,6719138,"TERMINAL",0,0,"3:00244457666673333333333333449",,terminal_output +3733,6721148,"TERMINAL",0,0,"24666798888955555555555556631",,terminal_output +3734,6723156,"TERMINAL",0,0,"4688895130303030317777777777777883",,terminal_output +3735,6725191,"TERMINAL",0,0,"68309:001031322223999999999999910105",,terminal_output +3736,6727240,"TERMINAL",0,0,"830222354444511114141414141414141414111227",,terminal_output +3737,6729256,"TERMINAL",0,0,"10244457666673333333333333449",,terminal_output +3738,6731194,"TERMINAL",0,0,"24666798888955555555555556641",,terminal_output +3739,6733203,"TERMINAL",0,0,"46888950:0140404040417777777777777883",,terminal_output +3740,6735212,"TERMINAL",0,0,"6840102041322223999999999999920205",,terminal_output +3741,6737225,"TERMINAL",0,0,"840222354444521215151515151515151515121227",,terminal_output +3742,6739239,"TERMINAL",0,0,"20244457666673333333333333449",,terminal_output +3743,6741271,"TERMINAL",0,0,"24666798888955555555555556651",,terminal_output +3744,6743251,"TERMINAL",0,0,"4688891150505050517777777777777883",,terminal_output +3745,6745365,"TERMINAL",0,0,"6850203051322223999999999999930305",,terminal_output +3746,6747307,"TERMINAL",0,0,"850222354444531319:0130:011:015:018:018:018:018:018:016:0131227",,terminal_output +3747,6749325,"TERMINAL",0,0,"30244457666673333333333333449",,terminal_output +3748,6751285,"TERMINAL",0,0,"2466679888895555555555555665:01",,terminal_output +3749,6753301,"TERMINAL",0,0,"468889215:005:005:005:005:017777777777777883",,terminal_output +3750,6755319,"TERMINAL",0,0,"688:0030408:01322223999999999999940405",,terminal_output +3751,6757333,"TERMINAL",0,0,"81:00:00222354444541411111111111111111111141227",,terminal_output +3752,6759337,"TERMINAL",0,0,"40244457666673333333333333449",,terminal_output +3753,6761332,"TERMINAL",0,0,"24666798888955555555555556611",,terminal_output +3754,6763344,"TERMINAL",0,0,"4688893110101010117777777777777883",,terminal_output +3755,6765361,"TERMINAL",0,0,"6810405011322223999999999999950505",,terminal_output +3756,6767380,"TERMINAL",0,0,"810222354444551512121212121212121212151227",,terminal_output +3757,6769374,"TERMINAL",0,0,"50244457666673333333333333449",,terminal_output +3758,6771481,"TERMINAL",0,0,"24666798888955555555555556621",,terminal_output +3759,6773422,"TERMINAL",0,0,"4688894120202020217777777777777883",,terminal_output +3760,6775442,"TERMINAL",0,0,"6820506:0021322223999999999999920:003:005",,terminal_output +3761,6777409,"TERMINAL",0,0,"82022235444455:018:01313131313131313131316:01227",,terminal_output +3762,6779465,"TERMINAL",0,0,"4:00244457666673333333333333449",,terminal_output +3763,6781428,"TERMINAL",0,0,"24666798888955555555555556631",,terminal_output +3764,6783448,"TERMINAL",0,0,"4688895130303030317777777777777883",,terminal_output +3765,6785447,"TERMINAL",0,0,"683020:001031322223999999999999910105",,terminal_output +3766,6787460,"TERMINAL",0,0,"830222354444511114141414141414141414111227",,terminal_output +3767,6789466,"TERMINAL",0,0,"10244457666673333333333333449",,terminal_output +3768,6791476,"TERMINAL",0,0,"24666798888955555555555556641",,terminal_output +3769,6793651,"TERMINAL",0,0,"4688891:0140404040417777777777777883",,terminal_output +3770,6795650,"TERMINAL",0,0,"6840102041322223999999999999920205",,terminal_output +3771,6797653,"TERMINAL",0,0,"840222354444521215151515151515151515121227",,terminal_output +3772,6799652,"TERMINAL",0,0,"20244457666673333333333333449",,terminal_output +3773,6801656,"TERMINAL",0,0,"24666798888955555555555556651",,terminal_output +3774,6803665,"TERMINAL",0,0,"4688891150505050517777777777777883",,terminal_output +3775,6805653,"TERMINAL",0,0,"6850203051322223999999999999930305",,terminal_output +3776,6807653,"TERMINAL",0,0,"8502223544445313130:011:012:016:019:019:019:019:019:017:0131227",,terminal_output +3777,6809653,"TERMINAL",0,0,"30244457666673333333333333449",,terminal_output +3778,6811649,"TERMINAL",0,0,"2466679888895555555555555666:01",,terminal_output +3779,6813657,"TERMINAL",0,0,"468889216:006:006:006:006:017777777777777883",,terminal_output +3780,6815653,"TERMINAL",0,0,"699:0131419:024333344040101010101010101010104041416",,terminal_output +3781,6818656,"TERMINAL",0,0,"91:0133346555562222222222222338",,terminal_output +3782,6819650,"TERMINAL",0,0,"413555687777844444444444445510",,terminal_output +3783,6821653,"TERMINAL",0,0,"357778309999106666666666666772",,terminal_output +3784,6823656,"TERMINAL",0,0,"579991021111111128888888888888994",,terminal_output +3785,6826653,"TERMINAL",0,0,"7911415124333345050202020202020202020205051516",,terminal_output +3786,6827650,"TERMINAL",0,0,"91133346555562222222222222338",,terminal_output +3787,6830653,"TERMINAL",0,0,"513555687777844444444444445520",,terminal_output +3788,6832654,"TERMINAL",0,0,"357778409999206666666666666772",,terminal_output +3789,6834661,"TERMINAL",0,0,"579992022121212128888888888888994",,terminal_output +3790,6836652,"TERMINAL",0,0,"7921517:0124333346:009:00303030303030303030307:001:014:016",,terminal_output +3791,6838659,"TERMINAL",0,0,"92133346555562222222222222338",,terminal_output +3792,6840651,"TERMINAL",0,0,"5:013555687777844444444444445530",,terminal_output +3793,6842655,"TERMINAL",0,0,"357778509999306666666666666772",,terminal_output +3794,6844651,"TERMINAL",0,0,"579993023131313128888888888888994",,terminal_output +3795,6846656,"TERMINAL",0,0,"79311:011124333341010404040404040404040401011116",,terminal_output +3796,6848651,"TERMINAL",0,0,"93133346555562222222222222338",,terminal_output +3797,6850658,"TERMINAL",0,0,"113555687777844444444444445540",,terminal_output +3798,6852655,"TERMINAL",0,0,"3577782:009999406666666666666772",,terminal_output +3799,6854657,"TERMINAL",0,0,"579994024141414128888888888888994",,terminal_output +3800,6856650,"TERMINAL",0,0,"7941112124333342020505050505050505050502021216",,terminal_output +3801,6858653,"TERMINAL",0,0,"94133346555562222222222222338",,terminal_output +3802,6860655,"TERMINAL",0,0,"213555687777844444444444445550",,terminal_output +3803,6862651,"TERMINAL",0,0,"357778109999506666666666666772",,terminal_output +3804,6864664,"TERMINAL",0,0,"579995025151515128888888888888994",,terminal_output +3805,6866653,"TERMINAL",0,0,"79512131243333430301:002:003:007:0040:0040:0040:0040:0040:008:003031316",,terminal_output +3806,6868650,"TERMINAL",0,0,"95133346555562222222222222338",,terminal_output +3807,6870650,"TERMINAL",0,0,"31355568777784444444444444557:00",,terminal_output +3808,6872654,"TERMINAL",0,0,"3577782099997:006666666666666772",,terminal_output +3809,6874652,"TERMINAL",0,0,"5799930:0027:017:017:017:0128888888888888994",,terminal_output +3810,6876655,"TERMINAL",0,0,"7920:01314124333344040101010101010101010104041416",,terminal_output +3811,6878656,"TERMINAL",0,0,"92:0133346555562222222222222338",,terminal_output +3812,6880654,"TERMINAL",0,0,"413555687777844444444444445510",,terminal_output +3813,6882653,"TERMINAL",0,0,"357778309999106666666666666772",,terminal_output +3814,6884655,"TERMINAL",0,0,"579991021111111128888888888888994",,terminal_output +3815,6886654,"TERMINAL",0,0,"7911415124333345050202020202020202020205051516",,terminal_output +3816,6888653,"TERMINAL",0,0,"91133346555562222222222222338",,terminal_output +3817,6890653,"TERMINAL",0,0,"513555687777844444444444445520",,terminal_output +3818,6892655,"TERMINAL",0,0,"357778409999206666666666666772",,terminal_output +3819,6894650,"TERMINAL",0,0,"579992022121212128888888888888994",,terminal_output +3820,6896652,"TERMINAL",0,0,"7921518:0124333347:0020:00303030303030303030308:002:015:016",,terminal_output +3821,6898657,"TERMINAL",0,0,"92133346555562222222222222338",,terminal_output +3822,6900650,"TERMINAL",0,0,"6:013555687777844444444444445530",,terminal_output +3823,6902658,"TERMINAL",0,0,"357778509999306666666666666772",,terminal_output +3824,6904651,"TERMINAL",0,0,"579993023131313128888888888888994",,terminal_output +3825,6906654,"TERMINAL",0,0,"79312:011124333341010404040404040404040401011116",,terminal_output +3826,6908653,"TERMINAL",0,0,"93133346555562222222222222338",,terminal_output +3827,6910654,"TERMINAL",0,0,"113555687777844444444444445540",,terminal_output +3828,6912653,"TERMINAL",0,0,"3577783:009999406666666666666772",,terminal_output +3829,6974668,"TERMINAL",0,0,"579994024141414128888888888888994",,terminal_output +3830,6974668,"TERMINAL",0,0,"7941112124333342020505050505050505050502021216",,terminal_output +3831,6974668,"TERMINAL",0,0,"94133346555562222222222222338",,terminal_output +3832,6974668,"TERMINAL",0,0,"213555687777844444444444445550",,terminal_output +3833,6974668,"TERMINAL",0,0,"357778109999506666666666666772",,terminal_output +3834,6974668,"TERMINAL",0,0,"579995025151515128888888888888994",,terminal_output +3835,6974668,"TERMINAL",0,0,"79512131243333430302:003:004:008:001:001:001:001:001:009:003031316",,terminal_output +3836,6974668,"TERMINAL",0,0,"95133346555562222222222222338",,terminal_output +3837,6974668,"TERMINAL",0,0,"31355568777784444444444444558:00",,terminal_output +3838,6974668,"TERMINAL",0,0,"3577782099998:006666666666666772",,terminal_output +3839,6974668,"TERMINAL",0,0,"579991:0028:018:018:018:0128888888888888994",,terminal_output +3840,6974668,"TERMINAL",0,0,"791:01314124333344040101010101010101010104041416",,terminal_output +3841,6974668,"TERMINAL",0,0,"93:0133346555562222222222222338",,terminal_output +3842,6974668,"TERMINAL",0,0,"413555687777844444444444445510",,terminal_output +3843,6974668,"TERMINAL",0,0,"357778309999106666666666666772",,terminal_output +3844,6974668,"TERMINAL",0,0,"579991021111111128888888888888994",,terminal_output +3845,6974668,"TERMINAL",0,0,"7911415124333345050202020202020202020205051516",,terminal_output +3846,6974668,"TERMINAL",0,0,"91133346555562222222222222338",,terminal_output +3847,6974668,"TERMINAL",0,0,"513555687777844444444444445520",,terminal_output +3848,6974668,"TERMINAL",0,0,"357778409999206666666666666772",,terminal_output +3849,6974668,"TERMINAL",0,0,"579992022121212128888888888888994",,terminal_output +3850,6974668,"TERMINAL",0,0,"7921519:0124333348:001:00303030303030303030309:003:016:016",,terminal_output +3851,6974668,"TERMINAL",0,0,"92133346555562222222222222338",,terminal_output +3852,6974668,"TERMINAL",0,0,"7:013555687777844444444444445530",,terminal_output +3853,6974668,"TERMINAL",0,0,"357778509999306666666666666772",,terminal_output +3854,6974668,"TERMINAL",0,0,"579993023131313128888888888888994",,terminal_output +3855,6974668,"TERMINAL",0,0,"79313:011124333341010404040404040404040401011116",,terminal_output +3856,6974668,"TERMINAL",0,0,"93133346555562222222222222338",,terminal_output +3857,6974668,"TERMINAL",0,0,"113555687777844444444444445540",,terminal_output +3858,6974668,"TERMINAL",0,0,"3577784:009999406666666666666772",,terminal_output +3859,7034664,"TERMINAL",0,0,"579994024141414128888888888888994",,terminal_output +3860,7034664,"TERMINAL",0,0,"7941112124333342020505050505050505050502021216",,terminal_output +3861,7034664,"TERMINAL",0,0,"94133346555562222222222222338",,terminal_output +3862,7034664,"TERMINAL",0,0,"213555687777844444444444445550",,terminal_output +3863,7034664,"TERMINAL",0,0,"357778109999506666666666666772",,terminal_output +3864,7034664,"TERMINAL",0,0,"579995025151515128888888888888994",,terminal_output +3865,7034664,"TERMINAL",0,0,"79512131243333430303:004:005:009:002:002:002:002:002:0020:003031316",,terminal_output +3866,7034664,"TERMINAL",0,0,"95133346555562222222222222338",,terminal_output +3867,7034664,"TERMINAL",0,0,"31355568777784444444444444559:00",,terminal_output +3868,7034664,"TERMINAL",0,0,"3577782099999:006666666666666772",,terminal_output +3869,7034664,"TERMINAL",0,0,"579992:0029:019:019:019:0128888888888888994",,terminal_output +3870,7034664,"TERMINAL",0,0,"792:01314124333344040101010101010101010104041416",,terminal_output +3871,7034664,"TERMINAL",0,0,"94:0133346555562222222222222338",,terminal_output +3872,7034664,"TERMINAL",0,0,"413555687777844444444444445510",,terminal_output +3873,7034664,"TERMINAL",0,0,"357778309999106666666666666772",,terminal_output +3874,7034664,"TERMINAL",0,0,"579991021111111128888888888888994",,terminal_output +3875,7034664,"TERMINAL",0,0,"7911415124333345050202020202020202020205051516",,terminal_output +3876,7034664,"TERMINAL",0,0,"91133346555562222222222222338",,terminal_output +3877,7034664,"TERMINAL",0,0,"513555687777844444444444445520",,terminal_output +3878,7034664,"TERMINAL",0,0,"357778409999206666666666666772",,terminal_output +3879,7034664,"TERMINAL",0,0,"579992022121212128888888888888994",,terminal_output +3880,7034664,"TERMINAL",0,0,"79215130:0124333349:002:003030303030303030303030:004:017:016",,terminal_output +3881,7034664,"TERMINAL",0,0,"92133346555562222222222222338",,terminal_output +3882,7034664,"TERMINAL",0,0,"8:013555687777844444444444445530",,terminal_output +3883,7034664,"TERMINAL",0,0,"357778509999306666666666666772",,terminal_output +3884,7034664,"TERMINAL",0,0,"57999313323232323999999999999910105",,terminal_output +3885,7034664,"TERMINAL",0,0,"830324:0212354444511114141414141414141414111227",,terminal_output +3886,7034664,"TERMINAL",0,0,"10244457666673333333333333449",,terminal_output +3887,7034664,"TERMINAL",0,0,"24666798888955555555555556641",,terminal_output +3888,7034664,"TERMINAL",0,0,"4688895:0140404040417777777777777883",,terminal_output +3889,7094654,"TERMINAL",0,0,"6840102041322223999999999999920205",,terminal_output +3890,7094654,"TERMINAL",0,0,"840222354444521215151515151515151515121227",,terminal_output +3891,7094654,"TERMINAL",0,0,"20244457666673333333333333449",,terminal_output +3892,7094654,"TERMINAL",0,0,"24666798888955555555555556651",,terminal_output +3893,7094654,"TERMINAL",0,0,"4688891150505050517777777777777883",,terminal_output +3894,7094654,"TERMINAL",0,0,"6850203051322223999999999999930305",,terminal_output +3895,7094654,"TERMINAL",0,0,"850222354444531314:015:016:0140:013:013:013:013:013:011:0131227",,terminal_output +3896,7094654,"TERMINAL",0,0,"30244457666673333333333333449",,terminal_output +3897,7094654,"TERMINAL",0,0,"24666798888955555555555556630:01",,terminal_output +3898,7094654,"TERMINAL",0,0,"468889211:00:001:00:001:00:001:00:001:00:017777777777777883",,terminal_output +3899,7094654,"TERMINAL",0,0,"683:0030403:01322223999999999999940405",,terminal_output +3900,7094654,"TERMINAL",0,0,"85:00222354444541411111111111111111111141227",,terminal_output +3901,7094654,"TERMINAL",0,0,"40244457666673333333333333449",,terminal_output +3902,7094654,"TERMINAL",0,0,"24666798888955555555555556611",,terminal_output +3903,7094654,"TERMINAL",0,0,"4688893110101010117777777777777883",,terminal_output +3904,7094654,"TERMINAL",0,0,"6810405011322223999999999999950505",,terminal_output +3905,7094654,"TERMINAL",0,0,"810222354444551512121212121212121212151227",,terminal_output +3906,7094654,"TERMINAL",0,0,"50244457666673333333333333449",,terminal_output +3907,7094654,"TERMINAL",0,0,"24666798888955555555555556621",,terminal_output +3908,7094654,"TERMINAL",0,0,"4688894120202020217777777777777883",,terminal_output +3909,7094654,"TERMINAL",0,0,"6820501:002132222399999999999995:008:005",,terminal_output +3910,7094654,"TERMINAL",0,0,"820222354444520:013:01313131313131313131311:01227",,terminal_output +3911,7094654,"TERMINAL",0,0,"9:00244457666673333333333333449",,terminal_output +3912,7094654,"TERMINAL",0,0,"24666798888955555555555556631",,terminal_output +3913,7094654,"TERMINAL",0,0,"4688895130303030317777777777777883",,terminal_output +3914,7094654,"TERMINAL",0,0,"68305:001031322223999999999999910105",,terminal_output +3915,7094654,"TERMINAL",0,0,"830222354444511114141414141414141414111227",,terminal_output +3916,7094654,"TERMINAL",0,0,"10244457666673333333333333449",,terminal_output +3917,7094654,"TERMINAL",0,0,"24666798888955555555555556641",,terminal_output +3918,7094654,"TERMINAL",0,0,"4688896:0140404040417777777777777883",,terminal_output +3919,7154655,"TERMINAL",0,0,"6840102041322223999999999999920205",,terminal_output +3920,7154655,"TERMINAL",0,0,"840222354444521215151515151515151515121227",,terminal_output +3921,7154655,"TERMINAL",0,0,"20244457666673333333333333449",,terminal_output +3922,7154655,"TERMINAL",0,0,"24666798888955555555555556651",,terminal_output +3923,7154655,"TERMINAL",0,0,"4688891150505050517777777777777883",,terminal_output +3924,7154655,"TERMINAL",0,0,"6850203051322223999999999999930305",,terminal_output +3925,7154655,"TERMINAL",0,0,"850222354444531315:016:017:011:014:014:014:014:014:012:0131227",,terminal_output +3926,7154655,"TERMINAL",0,0,"30244457666673333333333333449",,terminal_output +3927,7154655,"TERMINAL",0,0,"2466679888895555555555555661:01",,terminal_output +3928,7154655,"TERMINAL",0,0,"468889211:001:001:001:001:017777777777777883",,terminal_output +3929,7154655,"TERMINAL",0,0,"684:0030404:01322223999999999999940405",,terminal_output +3930,7154655,"TERMINAL",0,0,"86:00222354444541411111111111111111111141227",,terminal_output +3931,7154655,"TERMINAL",0,0,"40244457666673333333333333449",,terminal_output +3932,7154655,"TERMINAL",0,0,"24666798888955555555555556611",,terminal_output +3933,7154655,"TERMINAL",0,0,"4688893110101010117777777777777883",,terminal_output +3934,7154655,"TERMINAL",0,0,"6810405011322223999999999999950505",,terminal_output +3935,7154655,"TERMINAL",0,0,"810222354444551512121212121212121212151227",,terminal_output +3936,7154655,"TERMINAL",0,0,"50244457666673333333333333449",,terminal_output +3937,7154655,"TERMINAL",0,0,"24666798888955555555555556621",,terminal_output +3938,7154655,"TERMINAL",0,0,"4688894120202020217777777777777883",,terminal_output +3939,7154655,"TERMINAL",0,0,"6820502:002132222399999999999996:009:005",,terminal_output +3940,7154655,"TERMINAL",0,0,"82022235444451:014:01313131313131313131312:01227",,terminal_output +3941,7154655,"TERMINAL",0,0,"20:00244457666673333333333333449",,terminal_output +3942,7154655,"TERMINAL",0,0,"24666798888955555555555556631",,terminal_output +3943,7154655,"TERMINAL",0,0,"4688895130303030317777777777777883",,terminal_output +3944,7154655,"TERMINAL",0,0,"68306:001031322223999999999999910105",,terminal_output +3945,7154655,"TERMINAL",0,0,"830222354444511114141414141414141414111227",,terminal_output +3946,7154655,"TERMINAL",0,0,"10244457666673333333333333449",,terminal_output +3947,7154655,"TERMINAL",0,0,"24666798888955555555555556641",,terminal_output +3948,7154655,"TERMINAL",0,0,"4688897:0140404040417777777777777883",,terminal_output +3949,7214686,"TERMINAL",0,0,"6840102041322223999999999999920205",,terminal_output +3950,7214686,"TERMINAL",0,0,"840222354444521215151515151515151515121227",,terminal_output +3951,7214687,"TERMINAL",0,0,"20244457666673333333333333449",,terminal_output +3952,7214687,"TERMINAL",0,0,"24666798888955555555555556651",,terminal_output +3953,7214687,"TERMINAL",0,0,"4688891150505050517777777777777883",,terminal_output +3954,7214687,"TERMINAL",0,0,"6850203051322223999999999999930305",,terminal_output +3955,7214687,"TERMINAL",0,0,"850222354444531316:017:018:012:015:015:015:015:015:013:0131227",,terminal_output +3956,7214687,"TERMINAL",0,0,"30244457666673333333333333449",,terminal_output +3957,7214687,"TERMINAL",0,0,"2466679888895555555555555662:01",,terminal_output +3958,7214687,"TERMINAL",0,0,"468889212:002:002:002:002:017777777777777883",,terminal_output +3959,7214687,"TERMINAL",0,0,"685:0030405:01322223999999999999940405",,terminal_output +3960,7214687,"TERMINAL",0,0,"87:00222354444541411111111111111111111141227",,terminal_output +3961,7214687,"TERMINAL",0,0,"40244457666673333333333333449",,terminal_output +3962,7214687,"TERMINAL",0,0,"24666798888955555555555556611",,terminal_output +3963,7214687,"TERMINAL",0,0,"4688893110101010117777777777777883",,terminal_output +3964,7214687,"TERMINAL",0,0,"6810405011322223999999999999950505",,terminal_output +3965,7214687,"TERMINAL",0,0,"810222354444551512121212121212121212151227",,terminal_output +3966,7214687,"TERMINAL",0,0,"50244457666673333333333333449",,terminal_output +3967,7214687,"TERMINAL",0,0,"24666798888955555555555556621",,terminal_output +3968,7214687,"TERMINAL",0,0,"4688894120202020217777777777777883",,terminal_output +3969,7214687,"TERMINAL",0,0,"6820503:002132222399999999999997:0030:005",,terminal_output +3970,7214687,"TERMINAL",0,0,"82022235444452:015:01313131313131313131313:01227",,terminal_output +3971,7214687,"TERMINAL",0,0,"1:00244457666673333333333333449",,terminal_output +3972,7214687,"TERMINAL",0,0,"24666798888955555555555556631",,terminal_output +3973,7214687,"TERMINAL",0,0,"4688895130303030317777777777777883",,terminal_output +3974,7214687,"TERMINAL",0,0,"68307:001031322223999999999999910105",,terminal_output +3975,7214687,"TERMINAL",0,0,"830222354444511114141414141414141414111227",,terminal_output +3976,7214687,"TERMINAL",0,0,"10244457666673333333333333449",,terminal_output +3977,7214687,"TERMINAL",0,0,"24666798888955555555555556641",,terminal_output +3978,7214687,"TERMINAL",0,0,"4688898:0140404040417777777777777883",,terminal_output +3979,7274675,"TERMINAL",0,0,"6840102041322223999999999999920205",,terminal_output +3980,7274675,"TERMINAL",0,0,"840222354444521215151515151515151515121227",,terminal_output +3981,7274675,"TERMINAL",0,0,"20244457666673333333333333449",,terminal_output +3982,7274675,"TERMINAL",0,0,"24666798888955555555555556651",,terminal_output +3983,7274675,"TERMINAL",0,0,"4688891150505050517777777777777883",,terminal_output +3984,7274675,"TERMINAL",0,0,"6850203051322223999999999999930305",,terminal_output +3985,7274675,"TERMINAL",0,0,"850222354444531317:018:019:013:016:016:016:016:016:014:0131227",,terminal_output +3986,7274675,"TERMINAL",0,0,"30244457666673333333333333449",,terminal_output +3987,7274675,"TERMINAL",0,0,"2466679888895555555555555663:01",,terminal_output +3988,7274675,"TERMINAL",0,0,"468889213:003:003:003:003:017777777777777883",,terminal_output +3989,7274675,"TERMINAL",0,0,"686:0030406:01322223999999999999940405",,terminal_output +3990,7274675,"TERMINAL",0,0,"88:01333465555642421212121212121212121242338",,terminal_output +3991,7274675,"TERMINAL",0,0,"413555687777844444444444445510",,terminal_output +3992,7274675,"TERMINAL",0,0,"357778309999106666666666666772",,terminal_output +3993,7274675,"TERMINAL",0,0,"579991021111111128888888888888994",,terminal_output +3994,7274675,"TERMINAL",0,0,"7911415124333345050202020202020202020205051516",,terminal_output +3995,7274675,"TERMINAL",0,0,"91133346555562222222222222338",,terminal_output +3996,7274675,"TERMINAL",0,0,"513555687777844444444444445520",,terminal_output +3997,7274675,"TERMINAL",0,0,"357778409999206666666666666772",,terminal_output +3998,7274675,"TERMINAL",0,0,"579992022121212128888888888888994",,terminal_output +3999,7274675,"TERMINAL",0,0,"7921514:0124333343:006:00303030303030303030304:008:011:016",,terminal_output +4000,7274675,"TERMINAL",0,0,"92133346555562222222222222338",,terminal_output +4001,7274675,"TERMINAL",0,0,"2:013555687777844444444444445530",,terminal_output +4002,7274675,"TERMINAL",0,0,"357778509999306666666666666772",,terminal_output +4003,7274675,"TERMINAL",0,0,"579993023131313128888888888888994",,terminal_output +4004,7274675,"TERMINAL",0,0,"79318:011124333341010404040404040404040401011116",,terminal_output +4005,7274675,"TERMINAL",0,0,"93133346555562222222222222338",,terminal_output +4006,7274675,"TERMINAL",0,0,"113555687777844444444444445540",,terminal_output +4007,7274675,"TERMINAL",0,0,"3577789:009999406666666666666772",,terminal_output +4008,7334656,"TERMINAL",0,0,"579994024141414128888888888888994",,terminal_output +4009,7334656,"TERMINAL",0,0,"7941112124333342020505050505050505050502021216",,terminal_output +4010,7334656,"TERMINAL",0,0,"94133346555562222222222222338",,terminal_output +4011,7334656,"TERMINAL",0,0,"213555687777844444444444445550",,terminal_output +4012,7334656,"TERMINAL",0,0,"357778109999506666666666666772",,terminal_output +4013,7334656,"TERMINAL",0,0,"579995025151515128888888888888994",,terminal_output +4014,7334656,"TERMINAL",0,0,"79512131243333430308:009:0040:004:007:007:007:007:007:005:003031316",,terminal_output +4015,7334656,"TERMINAL",0,0,"95133346555562222222222222338",,terminal_output +4016,7334656,"TERMINAL",0,0,"31355568777784444444444444554:00",,terminal_output +4017,7334656,"TERMINAL",0,0,"3577782099994:006666666666666772",,terminal_output +4018,7334656,"TERMINAL",0,0,"579997:0024:014:014:014:0128888888888888994",,terminal_output +4019,7334656,"TERMINAL",0,0,"797:01314124333344040101010101010101010104041416",,terminal_output +4020,7334656,"TERMINAL",0,0,"99:0133346555562222222222222338",,terminal_output +4021,7334656,"TERMINAL",0,0,"413555687777844444444444445510",,terminal_output +4022,7334656,"TERMINAL",0,0,"357778309999106666666666666772",,terminal_output +4023,7334656,"TERMINAL",0,0,"579991021111111128888888888888994",,terminal_output +4024,7334656,"TERMINAL",0,0,"7911415124333345050202020202020202020205051516",,terminal_output +4025,7334656,"TERMINAL",0,0,"91133346555562222222222222338",,terminal_output +4026,7334656,"TERMINAL",0,0,"513555687777844444444444445520",,terminal_output +4027,7334656,"TERMINAL",0,0,"357778409999206666666666666772",,terminal_output +4028,7334656,"TERMINAL",0,0,"579992022121212128888888888888994",,terminal_output +4029,7334656,"TERMINAL",0,0,"7921515:0124333344:007:00303030303030303030305:009:012:016",,terminal_output +4030,7334656,"TERMINAL",0,0,"92133346555562222222222222338",,terminal_output +4031,7334656,"TERMINAL",0,0,"3:013555687777844444444444445530",,terminal_output +4032,7334656,"TERMINAL",0,0,"357778509999306666666666666772",,terminal_output +4033,7334656,"TERMINAL",0,0,"579993023131313128888888888888994",,terminal_output +4034,7334656,"TERMINAL",0,0,"79319:011124333341010404040404040404040401011116",,terminal_output +4035,7334656,"TERMINAL",0,0,"93133346555562222222222222338",,terminal_output +4036,7334656,"TERMINAL",0,0,"113555687777844444444444445540",,terminal_output +4037,7334656,"TERMINAL",0,0,"3577781:00:009999406666666666666772",,terminal_output +4038,7394663,"TERMINAL",0,0,"579994024141414128888888888888994",,terminal_output +4039,7394663,"TERMINAL",0,0,"7941112124333342020505050505050505050502021216",,terminal_output +4040,7394663,"TERMINAL",0,0,"94133346555562222222222222338",,terminal_output +4041,7394663,"TERMINAL",0,0,"213555687777844444444444445550",,terminal_output +4042,7394663,"TERMINAL",0,0,"357778109999506666666666666772",,terminal_output +4043,7394663,"TERMINAL",0,0,"579995025151515128888888888888994",,terminal_output +4044,7394663,"TERMINAL",0,0,"79512131243333430309:0040:001:005:008:008:008:008:008:006:003031316",,terminal_output +4045,7394663,"TERMINAL",0,0,"95133346555562222222222222338",,terminal_output +4046,7394663,"TERMINAL",0,0,"31355568777784444444444444555:00",,terminal_output +4047,7394663,"TERMINAL",0,0,"3577782099995:006666666666666772",,terminal_output +4048,7394663,"TERMINAL",0,0,"579998:0025:015:015:015:0128888888888888994",,terminal_output +4049,7394663,"TERMINAL",0,0,"798:01314124333344040101010101010101010104041416",,terminal_output +4050,7394664,"TERMINAL",0,0,"910:0133346555562222222222222338",,terminal_output +4051,7394664,"TERMINAL",0,0,"413555687777844444444444445510",,terminal_output +4052,7394664,"TERMINAL",0,0,"357778309999106666666666666772",,terminal_output +4053,7394664,"TERMINAL",0,0,"579991021111111128888888888888994",,terminal_output +4054,7394664,"TERMINAL",0,0,"7911415124333345050202020202020202020205051516",,terminal_output +4055,7394664,"TERMINAL",0,0,"91133346555562222222222222338",,terminal_output +4056,7394664,"TERMINAL",0,0,"513555687777844444444444445520",,terminal_output +4057,7394664,"TERMINAL",0,0,"357778409999206666666666666772",,terminal_output +4058,7394664,"TERMINAL",0,0,"579992022121212128888888888888994",,terminal_output +4059,7394664,"TERMINAL",0,0,"7921516:0124333345:008:00303030303030303030306:0030:013:016",,terminal_output +4060,7394664,"TERMINAL",0,0,"92133346555562222222222222338",,terminal_output +4061,7394664,"TERMINAL",0,0,"4:013555687777844444444444445530",,terminal_output +4062,7394664,"TERMINAL",0,0,"357778509999306666666666666772",,terminal_output +4063,7394664,"TERMINAL",0,0,"579993023131313128888888888888994",,terminal_output +4064,7394664,"TERMINAL",0,0,"793130:011124333341010404040404040404040401011116",,terminal_output +4065,7394664,"TERMINAL",0,0,"93133346555562222222222222338",,terminal_output +4066,7394664,"TERMINAL",0,0,"113555687777844444444444445540",,terminal_output +4067,7394664,"TERMINAL",0,0,"3577781:009999406666666666666772",,terminal_output +4068,7454661,"TERMINAL",0,0,"579994024141414128888888888888994",,terminal_output +4069,7454661,"TERMINAL",0,0,"7941112124333342020505050505050505050502021216",,terminal_output +4070,7454661,"TERMINAL",0,0,"94133346555562222222222222338",,terminal_output +4071,7454661,"TERMINAL",0,0,"213555687777844444444444445550",,terminal_output +4072,7454661,"TERMINAL",0,0,"357778109999506666666666666772",,terminal_output +4073,7454661,"TERMINAL",0,0,"579995025151515128888888888888994",,terminal_output +4074,7454661,"TERMINAL",0,0,"795121312433334303040:001:002:006:009:009:009:009:009:007:003031316",,terminal_output +4075,7454661,"TERMINAL",0,0,"95133346555562222222222222338",,terminal_output +4076,7454661,"TERMINAL",0,0,"31355568777784444444444444556:00",,terminal_output +4077,7454661,"TERMINAL",0,0,"3577782099996:006666666666666772",,terminal_output +4078,7454661,"TERMINAL",0,0,"579999:0026:016:016:016:0128888888888888994",,terminal_output +4079,7454661,"TERMINAL",0,0,"799:01314124333344040101010101010101010104041416",,terminal_output +4080,7454661,"TERMINAL",0,0,"91:0133346555562222222222222338",,terminal_output +4081,7454661,"TERMINAL",0,0,"413555687777844444444444445510",,terminal_output +4082,7454661,"TERMINAL",0,0,"357778309999106666666666666772",,terminal_output +4083,7454661,"TERMINAL",0,0,"579991021111111128888888888888994",,terminal_output +4084,7454661,"TERMINAL",0,0,"7911415124333345050202020202020202020205051516",,terminal_output +4085,7454661,"TERMINAL",0,0,"91133346555562222222222222338",,terminal_output +4086,7454661,"TERMINAL",0,0,"513555687777844444444444445520",,terminal_output +4087,7454661,"TERMINAL",0,0,"357778409999206666666666666772",,terminal_output +4088,7454661,"TERMINAL",0,0,"579992022121212128888888888888994",,terminal_output +4089,7454661,"TERMINAL",0,0,"7921517:0124333346:009:00303030303030303030307:001:014:016",,terminal_output +4090,7454661,"TERMINAL",0,0,"92133346555562222222222222338",,terminal_output +4091,7454661,"TERMINAL",0,0,"5:013555687777844444444444445530",,terminal_output +4092,7454661,"TERMINAL",0,0,"357778509999306666666666666772",,terminal_output +4093,7454661,"TERMINAL",0,0,"579993023131313128888888888888994",,terminal_output +4094,7454661,"TERMINAL",0,0,"79311:011124333341010404040404040404040401011116",,terminal_output +4095,7454661,"TERMINAL",0,0,"93244457666673333333333333449",,terminal_output +4096,7454661,"TERMINAL",0,0,"124666798888955555555555556641",,terminal_output +4097,7454661,"TERMINAL",0,0,"4688892:0140404040417777777777777883",,terminal_output +4098,7514657,"TERMINAL",0,0,"6840102041322223999999999999920205",,terminal_output +4099,7514657,"TERMINAL",0,0,"840222354444521215151515151515151515121227",,terminal_output +4100,7514657,"TERMINAL",0,0,"20244457666673333333333333449",,terminal_output +4101,7514657,"TERMINAL",0,0,"24666798888955555555555556651",,terminal_output +4102,7514657,"TERMINAL",0,0,"4688891150505050517777777777777883",,terminal_output +4103,7514657,"TERMINAL",0,0,"6850203051322223999999999999930305",,terminal_output +4104,7514657,"TERMINAL",0,0,"850222354444531311:012:013:017:0150:0150:0150:0150:0150:018:0131227",,terminal_output +4105,7514657,"TERMINAL",0,0,"30244457666673333333333333449",,terminal_output +4106,7514657,"TERMINAL",0,0,"2466679888895555555555555667:01",,terminal_output +4107,7514657,"TERMINAL",0,0,"468889217:007:007:007:007:017777777777777883",,terminal_output +4108,7514657,"TERMINAL",0,0,"6830:00304040:01322223999999999999940405",,terminal_output +4109,7514657,"TERMINAL",0,0,"82:00222354444541411111111111111111111141227",,terminal_output +4110,7514657,"TERMINAL",0,0,"40244457666673333333333333449",,terminal_output +4111,7514657,"TERMINAL",0,0,"24666798888955555555555556611",,terminal_output +4112,7514657,"TERMINAL",0,0,"4688893110101010117777777777777883",,terminal_output +4113,7514657,"TERMINAL",0,0,"6810405011322223999999999999950505",,terminal_output +4114,7514657,"TERMINAL",0,0,"810222354444551512121212121212121212151227",,terminal_output +4115,7514657,"TERMINAL",0,0,"50244457666673333333333333449",,terminal_output +4116,7514657,"TERMINAL",0,0,"24666798888955555555555556621",,terminal_output +4117,7514657,"TERMINAL",0,0,"4688894120202020217777777777777883",,terminal_output +4118,7514657,"TERMINAL",0,0,"6820508:002132222399999999999992:005:005",,terminal_output +4119,7514657,"TERMINAL",0,0,"82022235444457:0130:01313131313131313131318:01227",,terminal_output +4120,7514657,"TERMINAL",0,0,"6:00244457666673333333333333449",,terminal_output +4121,7514657,"TERMINAL",0,0,"24666798888955555555555556631",,terminal_output +4122,7514657,"TERMINAL",0,0,"4688895130303030317777777777777883",,terminal_output +4123,7514657,"TERMINAL",0,0,"68302:001031322223999999999999910105",,terminal_output +4124,7514657,"TERMINAL",0,0,"830222354444511114141414141414141414111227",,terminal_output +4125,7514657,"TERMINAL",0,0,"10244457666673333333333333449",,terminal_output +4126,7514657,"TERMINAL",0,0,"24666798888955555555555556641",,terminal_output +4127,7514657,"TERMINAL",0,0,"4688893:0140404040417777777777777883",,terminal_output +4128,7574664,"TERMINAL",0,0,"6840102041322223999999999999920205",,terminal_output +4129,7574664,"TERMINAL",0,0,"840222354444521215151515151515151515121227",,terminal_output +4130,7574664,"TERMINAL",0,0,"20244457666673333333333333449",,terminal_output +4131,7574664,"TERMINAL",0,0,"24666798888955555555555556651",,terminal_output +4132,7574664,"TERMINAL",0,0,"4688891150505050517777777777777883",,terminal_output +4133,7574664,"TERMINAL",0,0,"6850203051322223999999999999930305",,terminal_output +4134,7574664,"TERMINAL",0,0,"850222354444531312:013:014:018:011:011:011:011:011:019:0131227",,terminal_output +4135,7574665,"TERMINAL",0,0,"30244457666673333333333333449",,terminal_output +4136,7574665,"TERMINAL",0,0,"2466679888895555555555555668:01",,terminal_output +4137,7574665,"TERMINAL",0,0,"468889218:008:008:008:008:017777777777777883",,terminal_output +4138,7574665,"TERMINAL",0,0,"681:0030401:01322223999999999999940405",,terminal_output +4139,7574665,"TERMINAL",0,0,"83:00222354444541411111111111111111111141227",,terminal_output +4140,7574665,"TERMINAL",0,0,"40244457666673333333333333449",,terminal_output +4141,7574665,"TERMINAL",0,0,"24666798888955555555555556611",,terminal_output +4142,7574665,"TERMINAL",0,0,"4688893110101010117777777777777883",,terminal_output +4143,7574665,"TERMINAL",0,0,"6810405011322223999999999999950505",,terminal_output +4144,7574665,"TERMINAL",0,0,"810222354444551512121212121212121212151227",,terminal_output +4145,7574665,"TERMINAL",0,0,"50244457666673333333333333449",,terminal_output +4146,7574665,"TERMINAL",0,0,"24666798888955555555555556621",,terminal_output +4147,7574665,"TERMINAL",0,0,"4688894120202020217777777777777883",,terminal_output +4148,7574665,"TERMINAL",0,0,"6820509:002132222399999999999993:006:005",,terminal_output +4149,7574665,"TERMINAL",0,0,"82022235444458:011:01313131313131313131319:01227",,terminal_output +4150,7574665,"TERMINAL",0,0,"7:00244457666673333333333333449",,terminal_output +4151,7574665,"TERMINAL",0,0,"24666798888955555555555556631",,terminal_output +4152,7574665,"TERMINAL",0,0,"4688895130303030317777777777777883",,terminal_output +4153,7574665,"TERMINAL",0,0,"68303:001031322223999999999999910105",,terminal_output +4154,7574665,"TERMINAL",0,0,"830222354444511114141414141414141414111227",,terminal_output +4155,7574665,"TERMINAL",0,0,"10244457666673333333333333449",,terminal_output +4156,7574665,"TERMINAL",0,0,"24666798888955555555555556641",,terminal_output +4157,7574665,"TERMINAL",0,0,"4688894:0140404040417777777777777883",,terminal_output +4158,7634657,"TERMINAL",0,0,"6840102041322223999999999999920205",,terminal_output +4159,7634657,"TERMINAL",0,0,"840222354444521215151515151515151515121227",,terminal_output +4160,7634657,"TERMINAL",0,0,"20244457666673333333333333449",,terminal_output +4161,7634657,"TERMINAL",0,0,"24666798888955555555555556651",,terminal_output +4162,7634657,"TERMINAL",0,0,"4688891150505050517777777777777883",,terminal_output +4163,7634657,"TERMINAL",0,0,"6850203051322223999999999999930305",,terminal_output +4164,7634657,"TERMINAL",0,0,"850222354444531313:014:015:019:012:012:012:012:012:0130:0131227",,terminal_output +4165,7634657,"TERMINAL",0,0,"30244457666673333333333333449",,terminal_output +4166,7634657,"TERMINAL",0,0,"2466679888895555555555555669:01",,terminal_output +4167,7634657,"TERMINAL",0,0,"468889219:009:009:009:009:017777777777777883",,terminal_output +4168,7634657,"TERMINAL",0,0,"682:0030402:01322223999999999999940405",,terminal_output +4169,7634657,"TERMINAL",0,0,"84:00222354444541411111111111111111111141227",,terminal_output +4170,7634657,"TERMINAL",0,0,"40244457666673333333333333449",,terminal_output +4171,7634657,"TERMINAL",0,0,"24666798888955555555555556611",,terminal_output +4172,7634657,"TERMINAL",0,0,"4688893110101010117777777777777883",,terminal_output +4173,7634657,"TERMINAL",0,0,"6810405011322223999999999999950505",,terminal_output +4174,7634657,"TERMINAL",0,0,"810222354444551512121212121212121212151227",,terminal_output +4175,7634657,"TERMINAL",0,0,"50244457666673333333333333449",,terminal_output +4176,7634657,"TERMINAL",0,0,"24666798888955555555555556621",,terminal_output +4177,7634657,"TERMINAL",0,0,"4688894120202020217777777777777883",,terminal_output +4178,7634657,"TERMINAL",0,0,"68205040:002132222399999999999994:007:005",,terminal_output +4179,7634657,"TERMINAL",0,0,"82022235444459:012:013131313131313131313140:01227",,terminal_output +4180,7634657,"TERMINAL",0,0,"8:00244457666673333333333333449",,terminal_output +4181,7634657,"TERMINAL",0,0,"24666798888955555555555556631",,terminal_output +4182,7634657,"TERMINAL",0,0,"4688895130303030317777777777777883",,terminal_output +4183,7634657,"TERMINAL",0,0,"68304:001031322223999999999999910105",,terminal_output +4184,7634657,"TERMINAL",0,0,"830222354444511114141414141414141414111227",,terminal_output +4185,7634657,"TERMINAL",0,0,"10244457666673333333333333449",,terminal_output +4186,7634657,"TERMINAL",0,0,"24666798888955555555555556641",,terminal_output +4187,7634657,"TERMINAL",0,0,"4688895:0140404040417777777777777883",,terminal_output +4188,7694659,"TERMINAL",0,0,"6840102041322223999999999999920205",,terminal_output +4189,7694659,"TERMINAL",0,0,"840222354444521215151515151515151515121227",,terminal_output +4190,7694659,"TERMINAL",0,0,"20244457666673333333333333449",,terminal_output +4191,7694659,"TERMINAL",0,0,"24666798888955555555555556651",,terminal_output +4192,7694659,"TERMINAL",0,0,"4688891150505050517777777777777883",,terminal_output +4193,7694659,"TERMINAL",0,0,"6850203051322223999999999999930305",,terminal_output +4194,7694659,"TERMINAL",0,0,"850222354444531314:015:016:0150:013:013:013:013:013:011:0131227",,terminal_output +4195,7694659,"TERMINAL",0,0,"30244457666673333333333333449",,terminal_output +4196,7694659,"TERMINAL",0,0,"24666798888955555555555556640:01",,terminal_output +4197,7694659,"TERMINAL",0,0,"4688892110:0010:0010:0010:0010:017777777777777883",,terminal_output +4198,7694659,"TERMINAL",0,0,"683:0030403:01322223999999999999940405",,terminal_output +4199,7694659,"TERMINAL",0,0,"85:00222354444541411111111111111111111141227",,terminal_output +4200,7694659,"TERMINAL",0,0,"403555687777844444444444445510",,terminal_output +4201,7694659,"TERMINAL",0,0,"357778309999106666666666666772",,terminal_output +4202,7694659,"TERMINAL",0,0,"579991021111111128888888888888994",,terminal_output +4203,7694659,"TERMINAL",0,0,"7911415124333345050202020202020202020205051516",,terminal_output +4204,7694659,"TERMINAL",0,0,"91133346555562222222222222338",,terminal_output +4205,7694659,"TERMINAL",0,0,"513555687777844444444444445520",,terminal_output +4206,7694659,"TERMINAL",0,0,"357778409999206666666666666772",,terminal_output +4207,7694659,"TERMINAL",0,0,"579992022121212128888888888888994",,terminal_output +4208,7694659,"TERMINAL",0,0,"7921511:01243333430:003:00303030303030303030301:005:018:016",,terminal_output +4209,7694659,"TERMINAL",0,0,"92133346555562222222222222338",,terminal_output +4210,7694659,"TERMINAL",0,0,"9:013555687777844444444444445530",,terminal_output +4211,7694659,"TERMINAL",0,0,"357778509999306666666666666772",,terminal_output +4212,7694659,"TERMINAL",0,0,"579993023131313128888888888888994",,terminal_output +4213,7694659,"TERMINAL",0,0,"79315:011124333341010404040404040404040401011116",,terminal_output +4214,7694659,"TERMINAL",0,0,"93133346555562222222222222338",,terminal_output +4215,7694659,"TERMINAL",0,0,"113555687777844444444444445540",,terminal_output +4216,7694659,"TERMINAL",0,0,"3577786:009999406666666666666772",,terminal_output +4217,7754659,"TERMINAL",0,0,"579994024141414128888888888888994",,terminal_output +4218,7754659,"TERMINAL",0,0,"7941112124333342020505050505050505050502021216",,terminal_output +4219,7754659,"TERMINAL",0,0,"94133346555562222222222222338",,terminal_output +4220,7754659,"TERMINAL",0,0,"213555687777844444444444445550",,terminal_output +4221,7754659,"TERMINAL",0,0,"357778109999506666666666666772",,terminal_output +4222,7754659,"TERMINAL",0,0,"579995025151515128888888888888994",,terminal_output +4223,7754659,"TERMINAL",0,0,"79512131243333430305:006:007:001:004:004:004:004:004:002:003031316",,terminal_output +4224,7754659,"TERMINAL",0,0,"95133346555562222222222222338",,terminal_output +4225,7754659,"TERMINAL",0,0,"31355568777784444444444444551:00",,terminal_output +4226,7754659,"TERMINAL",0,0,"3577782099991:006666666666666772",,terminal_output +4227,7754659,"TERMINAL",0,0,"579994:0021:011:011:011:0128888888888888994",,terminal_output +4228,7754659,"TERMINAL",0,0,"794:01314124333344040101010101010101010104041416",,terminal_output +4229,7754659,"TERMINAL",0,0,"96:0133346555562222222222222338",,terminal_output +4230,7754659,"TERMINAL",0,0,"413555687777844444444444445510",,terminal_output +4231,7754659,"TERMINAL",0,0,"357778309999106666666666666772",,terminal_output +4232,7754659,"TERMINAL",0,0,"579991021111111128888888888888994",,terminal_output +4233,7754659,"TERMINAL",0,0,"7911415124333345050202020202020202020205051516",,terminal_output +4234,7754659,"TERMINAL",0,0,"91133346555562222222222222338",,terminal_output +4235,7754659,"TERMINAL",0,0,"513555687777844444444444445520",,terminal_output +4236,7754659,"TERMINAL",0,0,"357778409999206666666666666772",,terminal_output +4237,7754659,"TERMINAL",0,0,"579992022121212128888888888888994",,terminal_output +4238,7754659,"TERMINAL",0,0,"7921512:0124333341:004:00303030303030303030302:006:019:016",,terminal_output +4239,7754659,"TERMINAL",0,0,"92133346555562222222222222338",,terminal_output +4240,7754659,"TERMINAL",0,0,"30:013555687777844444444444445530",,terminal_output +4241,7754659,"TERMINAL",0,0,"357778509999306666666666666772",,terminal_output +4242,7754659,"TERMINAL",0,0,"579993023131313128888888888888994",,terminal_output +4243,7754659,"TERMINAL",0,0,"79316:011124333341010404040404040404040401011116",,terminal_output +4244,7754659,"TERMINAL",0,0,"93133346555562222222222222338",,terminal_output +4245,7754659,"TERMINAL",0,0,"113555687777844444444444445540",,terminal_output +4246,7754659,"TERMINAL",0,0,"3577787:009999406666666666666772",,terminal_output +4247,7814709,"TERMINAL",0,0,"579994024141414128888888888888994",,terminal_output +4248,7814709,"TERMINAL",0,0,"7941112124333342020505050505050505050502021216",,terminal_output +4249,7814710,"TERMINAL",0,0,"94133346555562222222222222338",,terminal_output +4250,7814710,"TERMINAL",0,0,"213555687777844444444444445550",,terminal_output +4251,7814710,"TERMINAL",0,0,"357778109999506666666666666772",,terminal_output +4252,7814710,"TERMINAL",0,0,"579995025151515128888888888888994",,terminal_output +4253,7814710,"TERMINAL",0,0,"79512131243333430306:007:008:002:005:005:005:005:005:003:003031316",,terminal_output +4254,7814710,"TERMINAL",0,0,"95133346555562222222222222338",,terminal_output +4255,7814710,"TERMINAL",0,0,"31355568777784444444444444552:00",,terminal_output +4256,7814710,"TERMINAL",0,0,"3577782099992:006666666666666772",,terminal_output +4257,7814710,"TERMINAL",0,0,"579995:0022:012:012:012:0128888888888888994",,terminal_output +4258,7814710,"TERMINAL",0,0,"795:01314124333344040101010101010101010104041416",,terminal_output +4259,7814710,"TERMINAL",0,0,"97:0133346555562222222222222338",,terminal_output +4260,7814710,"TERMINAL",0,0,"413555687777844444444444445510",,terminal_output +4261,7814710,"TERMINAL",0,0,"357778309999106666666666666772",,terminal_output +4262,7814710,"TERMINAL",0,0,"579991021111111128888888888888994",,terminal_output +4263,7814710,"TERMINAL",0,0,"7911415124333345050202020202020202020205051516",,terminal_output +4264,7814710,"TERMINAL",0,0,"91133346555562222222222222338",,terminal_output +4265,7814710,"TERMINAL",0,0,"513555687777844444444444445520",,terminal_output +4266,7814710,"TERMINAL",0,0,"357778409999206666666666666772",,terminal_output +4267,7814710,"TERMINAL",0,0,"579992022121212128888888888888994",,terminal_output +4268,7814710,"TERMINAL",0,0,"7921513:0124333342:005:00303030303030303030303:007:0140:016",,terminal_output +4269,7814710,"TERMINAL",0,0,"92133346555562222222222222338",,terminal_output +4270,7814710,"TERMINAL",0,0,"1:013555687777844444444444445530",,terminal_output +4271,7814710,"TERMINAL",0,0,"357778509999306666666666666772",,terminal_output +4272,7814710,"TERMINAL",0,0,"579993023131313128888888888888994",,terminal_output +4273,7814710,"TERMINAL",0,0,"79317:011124333341010404040404040404040401011116",,terminal_output +4274,7814710,"TERMINAL",0,0,"93133346555562222222222222338",,terminal_output +4275,7814710,"TERMINAL",0,0,"113555687777844444444444445540",,terminal_output +4276,7814710,"TERMINAL",0,0,"3577788:009999406666666666666772",,terminal_output +4277,7874662,"TERMINAL",0,0,"579994024141414128888888888888994",,terminal_output +4278,7874662,"TERMINAL",0,0,"7941112124333342020505050505050505050502021216",,terminal_output +4279,7874662,"TERMINAL",0,0,"94133346555562222222222222338",,terminal_output +4280,7874662,"TERMINAL",0,0,"213555687777844444444444445550",,terminal_output +4281,7874663,"TERMINAL",0,0,"357778109999506666666666666772",,terminal_output +4282,7874663,"TERMINAL",0,0,"579995025151515128888888888888994",,terminal_output +4283,7874663,"TERMINAL",0,0,"79512131243333430307:008:009:003:006:006:006:006:006:004:003031316",,terminal_output +4284,7874663,"TERMINAL",0,0,"95133346555562222222222222338",,terminal_output +4285,7874663,"TERMINAL",0,0,"31355568777784444444444444553:00",,terminal_output +4286,7874663,"TERMINAL",0,0,"3577782099993:006666666666666772",,terminal_output +4287,7874663,"TERMINAL",0,0,"579996:0023:013:013:013:0128888888888888994",,terminal_output +4288,7874663,"TERMINAL",0,0,"796:01314124333344040101010101010101010104041416",,terminal_output +4289,7874663,"TERMINAL",0,0,"98:0133346555562222222222222338",,terminal_output +4290,7874663,"TERMINAL",0,0,"413555687777844444444444445510",,terminal_output +4291,7874663,"TERMINAL",0,0,"357778309999106666666666666772",,terminal_output +4292,7874663,"TERMINAL",0,0,"579991021111111128888888888888994",,terminal_output +4293,7874663,"TERMINAL",0,0,"7911415124333345050202020202020202020205051516",,terminal_output +4294,7874663,"TERMINAL",0,0,"91133346555562222222222222338",,terminal_output +4295,7874663,"TERMINAL",0,0,"513555687777844444444444445520",,terminal_output +4296,7874663,"TERMINAL",0,0,"357778409999206666666666666772",,terminal_output +4297,7874663,"TERMINAL",0,0,"579992022121212128888888888888994",,terminal_output +4298,7874663,"TERMINAL",0,0,"7921514:0124333343:006:00303030303030303030304:008:011:016",,terminal_output +4299,7874663,"TERMINAL",0,0,"92133346555562222222222222338",,terminal_output +4300,7874663,"TERMINAL",0,0,"2:013555687777844444444444445530",,terminal_output +4301,7874663,"TERMINAL",0,0,"357778509999306666666666666772",,terminal_output +4302,7874663,"TERMINAL",0,0,"579993023131313128888888888888994",,terminal_output +4303,7874663,"TERMINAL",0,0,"79318:011124333341010404040404040404040401011116",,terminal_output +4304,7874663,"TERMINAL",0,0,"93133346555562222222222222338",,terminal_output +4305,7874663,"TERMINAL",0,0,"114666798888955555555555556641",,terminal_output +4306,7874663,"TERMINAL",0,0,"4688899:0140404040417777777777777883",,terminal_output +4307,7934662,"TERMINAL",0,0,"6840102041322223999999999999920205",,terminal_output +4308,7934662,"TERMINAL",0,0,"840222354444521215151515151515151515121227",,terminal_output +4309,7934662,"TERMINAL",0,0,"20244457666673333333333333449",,terminal_output +4310,7934662,"TERMINAL",0,0,"24666798888955555555555556651",,terminal_output +4311,7934662,"TERMINAL",0,0,"4688891150505050517777777777777883",,terminal_output +4312,7934662,"TERMINAL",0,0,"6850203051322223999999999999930305",,terminal_output +4313,7934662,"TERMINAL",0,0,"850222354444531318:019:0150:014:017:017:017:017:017:015:0131227",,terminal_output +4314,7934662,"TERMINAL",0,0,"30244457666673333333333333449",,terminal_output +4315,7934662,"TERMINAL",0,0,"2466679888895555555555555664:01",,terminal_output +4316,7934662,"TERMINAL",0,0,"468889214:004:004:004:004:017777777777777883",,terminal_output +4317,7934662,"TERMINAL",0,0,"687:0030407:01322223999999999999940405",,terminal_output +4318,7934662,"TERMINAL",0,0,"89:00222354444541411111111111111111111141227",,terminal_output +4319,7934662,"TERMINAL",0,0,"40244457666673333333333333449",,terminal_output +4320,7934662,"TERMINAL",0,0,"24666798888955555555555556611",,terminal_output +4321,7934662,"TERMINAL",0,0,"4688893110101010117777777777777883",,terminal_output +4322,7934662,"TERMINAL",0,0,"6810405011322223999999999999950505",,terminal_output +4323,7934662,"TERMINAL",0,0,"810222354444551512121212121212121212151227",,terminal_output +4324,7934662,"TERMINAL",0,0,"50244457666673333333333333449",,terminal_output +4325,7934662,"TERMINAL",0,0,"24666798888955555555555556621",,terminal_output +4326,7934662,"TERMINAL",0,0,"4688894120202020217777777777777883",,terminal_output +4327,7934662,"TERMINAL",0,0,"6820505:002132222399999999999999:002:005",,terminal_output +4328,7934662,"TERMINAL",0,0,"82022235444454:017:01313131313131313131315:01227",,terminal_output +4329,7934662,"TERMINAL",0,0,"3:00244457666673333333333333449",,terminal_output +4330,7934662,"TERMINAL",0,0,"24666798888955555555555556631",,terminal_output +4331,7934662,"TERMINAL",0,0,"4688895130303030317777777777777883",,terminal_output +4332,7934662,"TERMINAL",0,0,"68309:001031322223999999999999910105",,terminal_output +4333,7934662,"TERMINAL",0,0,"830222354444511114141414141414141414111227",,terminal_output +4334,7934662,"TERMINAL",0,0,"10244457666673333333333333449",,terminal_output +4335,7934662,"TERMINAL",0,0,"24666798888955555555555556641",,terminal_output +4336,7934662,"TERMINAL",0,0,"46888910:0140404040417777777777777883",,terminal_output +4337,7994654,"TERMINAL",0,0,"6840102041322223999999999999920205",,terminal_output +4338,7994654,"TERMINAL",0,0,"840222354444521215151515151515151515121227",,terminal_output +4339,7994654,"TERMINAL",0,0,"20244457666673333333333333449",,terminal_output +4340,7994654,"TERMINAL",0,0,"24666798888955555555555556651",,terminal_output +4341,7994654,"TERMINAL",0,0,"4688891150505050517777777777777883",,terminal_output +4342,7994654,"TERMINAL",0,0,"6850203051322223999999999999930305",,terminal_output +4343,7994654,"TERMINAL",0,0,"850222354444531319:0150:011:015:018:018:018:018:018:016:0131227",,terminal_output +4344,7994654,"TERMINAL",0,0,"30244457666673333333333333449",,terminal_output +4345,7994654,"TERMINAL",0,0,"2466679888895555555555555665:01",,terminal_output +4346,7994654,"TERMINAL",0,0,"468889215:005:005:005:005:017777777777777883",,terminal_output +4347,7994654,"TERMINAL",0,0,"688:0030408:01322223999999999999940405",,terminal_output +4348,7994654,"TERMINAL",0,0,"820:00222354444541411111111111111111111141227",,terminal_output +4349,7994654,"TERMINAL",0,0,"40244457666673333333333333449",,terminal_output +4350,7994654,"TERMINAL",0,0,"24666798888955555555555556611",,terminal_output +4351,7994654,"TERMINAL",0,0,"4688893110101010117777777777777883",,terminal_output +4352,7994654,"TERMINAL",0,0,"6810405011322223999999999999950505",,terminal_output +4353,7994654,"TERMINAL",0,0,"810222354444551512121212121212121212151227",,terminal_output +4354,7994654,"TERMINAL",0,0,"50244457666673333333333333449",,terminal_output +4355,7994654,"TERMINAL",0,0,"24666798888955555555555556621",,terminal_output +4356,7994654,"TERMINAL",0,0,"4688894120202020217777777777777883",,terminal_output +4357,7994654,"TERMINAL",0,0,"6820506:0021322223999999999999940:003:005",,terminal_output +4358,7994654,"TERMINAL",0,0,"82022235444455:018:01313131313131313131316:01227",,terminal_output +4359,7994654,"TERMINAL",0,0,"4:00244457666673333333333333449",,terminal_output +4360,7994654,"TERMINAL",0,0,"24666798888955555555555556631",,terminal_output +4361,7994654,"TERMINAL",0,0,"4688895130303030317777777777777883",,terminal_output +4362,7994654,"TERMINAL",0,0,"683040:001031322223999999999999910105",,terminal_output +4363,7994654,"TERMINAL",0,0,"830222354444511114141414141414141414111227",,terminal_output +4364,7994654,"TERMINAL",0,0,"10244457666673333333333333449",,terminal_output +4365,7994654,"TERMINAL",0,0,"24666798888955555555555556641",,terminal_output +4366,7994654,"TERMINAL",0,0,"4688891:0140404040417777777777777883",,terminal_output +4367,8054657,"TERMINAL",0,0,"6840102041322223999999999999920205",,terminal_output +4368,8054657,"TERMINAL",0,0,"840222354444521215151515151515151515121227",,terminal_output +4369,8054657,"TERMINAL",0,0,"20244457666673333333333333449",,terminal_output +4370,8054657,"TERMINAL",0,0,"24666798888955555555555556651",,terminal_output +4371,8054657,"TERMINAL",0,0,"4688891150505050517777777777777883",,terminal_output +4372,8054657,"TERMINAL",0,0,"6850203051322223999999999999930305",,terminal_output +4373,8054657,"TERMINAL",0,0,"8502223544445313150:011:012:016:019:019:019:019:019:017:0131227",,terminal_output +4374,8054657,"TERMINAL",0,0,"30244457666673333333333333449",,terminal_output +4375,8054657,"TERMINAL",0,0,"2466679888895555555555555666:01",,terminal_output +4376,8054657,"TERMINAL",0,0,"468889216:006:006:006:006:017777777777777883",,terminal_output +4377,8054657,"TERMINAL",0,0,"689:0030409:01322223999999999999940405",,terminal_output +4378,8054657,"TERMINAL",0,0,"81:00222354444541411111111111111111111141227",,terminal_output +4379,8054657,"TERMINAL",0,0,"40244457666673333333333333449",,terminal_output +4380,8054657,"TERMINAL",0,0,"24666798888955555555555556611",,terminal_output +4381,8054657,"TERMINAL",0,0,"4688893110101010117777777777777883",,terminal_output +4382,8054657,"TERMINAL",0,0,"6810405011322223999999999999950505",,terminal_output +4383,8054657,"TERMINAL",0,0,"810222354444551512121212121212121212151227",,terminal_output +4384,8054657,"TERMINAL",0,0,"50244457666673333333333333449",,terminal_output +4385,8054657,"TERMINAL",0,0,"24666798888955555555555556621",,terminal_output +4386,8054657,"TERMINAL",0,0,"4688894120202020217777777777777883",,terminal_output +4387,8054657,"TERMINAL",0,0,"6820507:002132222399999999999991:004:005",,terminal_output +4388,8054657,"TERMINAL",0,0,"82022235444456:019:01313131313131313131317:01227",,terminal_output +4389,8054657,"TERMINAL",0,0,"5:00244457666673333333333333449",,terminal_output +4390,8054657,"TERMINAL",0,0,"24666798888955555555555556631",,terminal_output +4391,8054657,"TERMINAL",0,0,"4688895130303030317777777777777883",,terminal_output +4392,8054657,"TERMINAL",0,0,"68301:001031322223999999999999910105",,terminal_output +4393,8054657,"TERMINAL",0,0,"830222354444511114141414141414141414111227",,terminal_output +4394,8054657,"TERMINAL",0,0,"10244457666673333333333333449",,terminal_output +4395,8054657,"TERMINAL",0,0,"24666798888955555555555556641",,terminal_output +4396,8054657,"TERMINAL",0,0,"4688892:0140404040417777777777777883",,terminal_output +4397,8114661,"TERMINAL",0,0,"6840102041322223999999999999920205",,terminal_output +4398,8114661,"TERMINAL",0,0,"840222354444521215151515151515151515121227",,terminal_output +4399,8114661,"TERMINAL",0,0,"20244457666673333333333333449",,terminal_output +4400,8114661,"TERMINAL",0,0,"24666798888955555555555556651",,terminal_output +4401,8114661,"TERMINAL",0,0,"4688891150505050517777777777777883",,terminal_output +4402,8114661,"TERMINAL",0,0,"6850203051322223999999999999930305",,terminal_output +4403,8114661,"TERMINAL",0,0,"850222354444531311:012:013:017:012:00:012:00:012:00:012:00:012:00:018:0131227",,terminal_output +4404,8114661,"TERMINAL",0,0,"30244457666673333333333333449",,terminal_output +4405,8114661,"TERMINAL",0,0,"2466679888895555555555555667:01",,terminal_output +4406,8114661,"TERMINAL",0,0,"468889217:007:007:007:007:017777777777777883",,terminal_output +4407,8114661,"TERMINAL",0,0,"6840:00304050:01322223999999999999940405",,terminal_output +4408,8114661,"TERMINAL",0,0,"82:00222354444541411111111111111111111141227",,terminal_output +4409,8114661,"TERMINAL",0,0,"40244457666673333333333333449",,terminal_output +4410,8114661,"TERMINAL",0,0,"2466683099991066666666666667712",,terminal_output +4411,8114661,"TERMINAL",0,0,"579991021111111128888888888888994",,terminal_output +4412,8114661,"TERMINAL",0,0,"7911415124333345050202020202020202020205051516",,terminal_output +4413,8114661,"TERMINAL",0,0,"91133346555562222222222222338",,terminal_output +4414,8114661,"TERMINAL",0,0,"513555687777844444444444445520",,terminal_output +4415,8114661,"TERMINAL",0,0,"357778409999206666666666666772",,terminal_output +4416,8114661,"TERMINAL",0,0,"579992022121212128888888888888994",,terminal_output +4417,8114661,"TERMINAL",0,0,"7921518:0124333347:0040:00303030303030303030308:002:015:016",,terminal_output +4418,8114661,"TERMINAL",0,0,"92133346555562222222222222338",,terminal_output +4419,8114661,"TERMINAL",0,0,"6:013555687777844444444444445530",,terminal_output +4420,8114661,"TERMINAL",0,0,"357778509999306666666666666772",,terminal_output +4421,8114661,"TERMINAL",0,0,"579993023131313128888888888888994",,terminal_output +4422,8114661,"TERMINAL",0,0,"79312:011124333341010404040404040404040401011116",,terminal_output +4423,8114661,"TERMINAL",0,0,"93133346555562222222222222338",,terminal_output +4424,8114661,"TERMINAL",0,0,"113555687777844444444444445540",,terminal_output +4425,8114661,"TERMINAL",0,0,"3577783:009999406666666666666772",,terminal_output +4426,8174665,"TERMINAL",0,0,"579994024141414128888888888888994",,terminal_output +4427,8174665,"TERMINAL",0,0,"7941112124333342020505050505050505050502021216",,terminal_output +4428,8174665,"TERMINAL",0,0,"94133346555562222222222222338",,terminal_output +4429,8174665,"TERMINAL",0,0,"213555687777844444444444445550",,terminal_output +4430,8174665,"TERMINAL",0,0,"357778109999506666666666666772",,terminal_output +4431,8174665,"TERMINAL",0,0,"579995025151515128888888888888994",,terminal_output +4432,8174665,"TERMINAL",0,0,"79512131243333430302:003:004:008:001:001:001:001:001:009:003031316",,terminal_output +4433,8174665,"TERMINAL",0,0,"95133346555562222222222222338",,terminal_output +4434,8174665,"TERMINAL",0,0,"31355568777784444444444444558:00",,terminal_output +4435,8174665,"TERMINAL",0,0,"3577782099998:006666666666666772",,terminal_output +4436,8174665,"TERMINAL",0,0,"579991:0028:018:018:018:0128888888888888994",,terminal_output +4437,8174665,"TERMINAL",0,0,"791:01314124333344040101010101010101010104041416",,terminal_output +4438,8174665,"TERMINAL",0,0,"93:0133346555562222222222222338",,terminal_output +4439,8174665,"TERMINAL",0,0,"413555687777844444444444445510",,terminal_output +4440,8174665,"TERMINAL",0,0,"357778309999106666666666666772",,terminal_output +4441,8174665,"TERMINAL",0,0,"579991021111111128888888888888994",,terminal_output +4442,8174665,"TERMINAL",0,0,"7911415124333345050202020202020202020205051516",,terminal_output +4443,8174665,"TERMINAL",0,0,"91133346555562222222222222338",,terminal_output +4444,8174665,"TERMINAL",0,0,"513555687777844444444444445520",,terminal_output +4445,8174665,"TERMINAL",0,0,"357778409999206666666666666772",,terminal_output +4446,8174665,"TERMINAL",0,0,"579992022121212128888888888888994",,terminal_output +4447,8174665,"TERMINAL",0,0,"7921519:0124333348:001:00303030303030303030309:003:016:016",,terminal_output +4448,8174665,"TERMINAL",0,0,"92133346555562222222222222338",,terminal_output +4449,8174665,"TERMINAL",0,0,"7:013555687777844444444444445530",,terminal_output +4450,8174665,"TERMINAL",0,0,"357778509999306666666666666772",,terminal_output +4451,8174665,"TERMINAL",0,0,"579993023131313128888888888888994",,terminal_output +4452,8174665,"TERMINAL",0,0,"79313:011124333341010404040404040404040401011116",,terminal_output +4453,8174665,"TERMINAL",0,0,"93133346555562222222222222338",,terminal_output +4454,8174665,"TERMINAL",0,0,"113555687777844444444444445540",,terminal_output +4455,8174665,"TERMINAL",0,0,"3577784:009999406666666666666772",,terminal_output +4456,8234658,"TERMINAL",0,0,"579994024141414128888888888888994",,terminal_output +4457,8234658,"TERMINAL",0,0,"7941112124333342020505050505050505050502021216",,terminal_output +4458,8234658,"TERMINAL",0,0,"94133346555562222222222222338",,terminal_output +4459,8234658,"TERMINAL",0,0,"213555687777844444444444445550",,terminal_output +4460,8234658,"TERMINAL",0,0,"357778109999506666666666666772",,terminal_output +4461,8234658,"TERMINAL",0,0,"579995025151515128888888888888994",,terminal_output +4462,8234658,"TERMINAL",0,0,"79512131243333430303:004:005:009:002:002:002:002:002:0040:003031316",,terminal_output +4463,8234658,"TERMINAL",0,0,"95133346555562222222222222338",,terminal_output +4464,8234658,"TERMINAL",0,0,"31355568777784444444444444559:00",,terminal_output +4465,8234658,"TERMINAL",0,0,"3577782099999:006666666666666772",,terminal_output +4466,8234658,"TERMINAL",0,0,"579992:0029:019:019:019:0128888888888888994",,terminal_output +4467,8234658,"TERMINAL",0,0,"792:01314124333344040101010101010101010104041416",,terminal_output +4468,8234658,"TERMINAL",0,0,"94:0133346555562222222222222338",,terminal_output +4469,8234658,"TERMINAL",0,0,"413555687777844444444444445510",,terminal_output +4470,8234658,"TERMINAL",0,0,"357778309999106666666666666772",,terminal_output +4471,8234658,"TERMINAL",0,0,"579991021111111128888888888888994",,terminal_output +4472,8234658,"TERMINAL",0,0,"7911415124333345050202020202020202020205051516",,terminal_output +4473,8234658,"TERMINAL",0,0,"91133346555562222222222222338",,terminal_output +4474,8234658,"TERMINAL",0,0,"513555687777844444444444445520",,terminal_output +4475,8234658,"TERMINAL",0,0,"357778409999206666666666666772",,terminal_output +4476,8234658,"TERMINAL",0,0,"579992022121212128888888888888994",,terminal_output +4477,8234658,"TERMINAL",0,0,"79215150:0124333349:002:003030303030303030303050:004:017:016",,terminal_output +4478,8234658,"TERMINAL",0,0,"92133346555562222222222222338",,terminal_output +4479,8234658,"TERMINAL",0,0,"8:013555687777844444444444445530",,terminal_output +4480,8234658,"TERMINAL",0,0,"357778509999306666666666666772",,terminal_output +4481,8234658,"TERMINAL",0,0,"579993023131313128888888888888994",,terminal_output +4482,8234658,"TERMINAL",0,0,"79314:011124333341010404040404040404040401011116",,terminal_output +4483,8234658,"TERMINAL",0,0,"93133346555562222222222222338",,terminal_output +4484,8234658,"TERMINAL",0,0,"113555687777844444444444445540",,terminal_output +4485,8234658,"TERMINAL",0,0,"3577785:009999406666666666666772",,terminal_output +4486,8294704,"TERMINAL",0,0,"579994024141414128888888888888994",,terminal_output +4487,8294705,"TERMINAL",0,0,"7941112124333342020505050505050505050502021216",,terminal_output +4488,8294705,"TERMINAL",0,0,"94133346555562222222222222338",,terminal_output +4489,8294705,"TERMINAL",0,0,"213555687777844444444444445550",,terminal_output +4490,8294705,"TERMINAL",0,0,"357778109999506666666666666772",,terminal_output +4491,8294705,"TERMINAL",0,0,"579995025151515128888888888888994",,terminal_output +4492,8294705,"TERMINAL",0,0,"79512131243333430304:005:006:002:00:003:003:003:003:003:001:003031316",,terminal_output +4493,8294705,"TERMINAL",0,0,"95133346555562222222222222338",,terminal_output +4494,8294705,"TERMINAL",0,0,"313555687777844444444444445550:00",,terminal_output +4495,8294705,"TERMINAL",0,0,"35777820999920:006666666666666772",,terminal_output +4496,8294705,"TERMINAL",0,0,"579993:00220:0120:0120:0120:0128888888888888994",,terminal_output +4497,8294705,"TERMINAL",0,0,"793:01314124333344040101010101010101010104041416",,terminal_output +4498,8294705,"TERMINAL",0,0,"95:0133346555562222222222222338",,terminal_output +4499,8294705,"TERMINAL",0,0,"413555687777844444444444445510",,terminal_output +4500,8294705,"TERMINAL",0,0,"357778309999106666666666666772",,terminal_output +4501,8294705,"TERMINAL",0,0,"579991021111111128888888888888994",,terminal_output +4502,8294705,"TERMINAL",0,0,"7911415124333345050202020202020202020205051516",,terminal_output +4503,8294705,"TERMINAL",0,0,"91133346555562222222222222338",,terminal_output +4504,8294705,"TERMINAL",0,0,"513555687777844444444444445520",,terminal_output +4505,8294705,"TERMINAL",0,0,"357778409999206666666666666772",,terminal_output +4506,8294705,"TERMINAL",0,0,"579992022121212128888888888888994",,terminal_output +4507,8294705,"TERMINAL",0,0,"7921511:01243333440:003:00303030303030303030301:005:018:016",,terminal_output +4508,8294705,"TERMINAL",0,0,"92133346555562222222222222338",,terminal_output +4509,8294705,"TERMINAL",0,0,"9:013555687777844444444444445530",,terminal_output +4510,8294705,"TERMINAL",0,0,"357778509999306666666666666772",,terminal_output +4511,8294705,"TERMINAL",0,0,"579993023131313128888888888888994",,terminal_output +4512,8294705,"TERMINAL",0,0,"79315:011124333341010404040404040404040401011116",,terminal_output +4513,8294705,"TERMINAL",0,0,"93133346555562222222222222338",,terminal_output +4514,8294705,"TERMINAL",0,0,"113555687777844444444444445540",,terminal_output +4515,8294705,"TERMINAL",0,0,"4688896:0140404040417777777777777883",,terminal_output +4516,8354658,"TERMINAL",0,0,"6840102041322223999999999999920205",,terminal_output +4517,8354659,"TERMINAL",0,0,"840222354444521215151515151515151515121227",,terminal_output +4518,8354659,"TERMINAL",0,0,"20244457666673333333333333449",,terminal_output +4519,8354659,"TERMINAL",0,0,"24666798888955555555555556651",,terminal_output +4520,8354659,"TERMINAL",0,0,"4688891150505050517777777777777883",,terminal_output +4521,8354659,"TERMINAL",0,0,"6850203051322223999999999999930305",,terminal_output +4522,8354659,"TERMINAL",0,0,"850222354444531315:016:017:011:014:014:014:014:014:012:0131227",,terminal_output +4523,8354659,"TERMINAL",0,0,"30244457666673333333333333449",,terminal_output +4524,8354659,"TERMINAL",0,0,"2466679888895555555555555661:01",,terminal_output +4525,8354659,"TERMINAL",0,0,"468889211:001:001:001:001:017777777777777883",,terminal_output +4526,8354659,"TERMINAL",0,0,"684:0030404:01322223999999999999940405",,terminal_output +4527,8354659,"TERMINAL",0,0,"86:00222354444541411111111111111111111141227",,terminal_output +4528,8354659,"TERMINAL",0,0,"40244457666673333333333333449",,terminal_output +4529,8354659,"TERMINAL",0,0,"24666798888955555555555556611",,terminal_output +4530,8354659,"TERMINAL",0,0,"4688893110101010117777777777777883",,terminal_output +4531,8354659,"TERMINAL",0,0,"6810405011322223999999999999950505",,terminal_output +4532,8354659,"TERMINAL",0,0,"810222354444551512121212121212121212151227",,terminal_output +4533,8354659,"TERMINAL",0,0,"50244457666673333333333333449",,terminal_output +4534,8354659,"TERMINAL",0,0,"24666798888955555555555556621",,terminal_output +4535,8354659,"TERMINAL",0,0,"4688894120202020217777777777777883",,terminal_output +4536,8354659,"TERMINAL",0,0,"6820502:002132222399999999999996:009:005",,terminal_output +4537,8354659,"TERMINAL",0,0,"82022235444451:014:01313131313131313131312:01227",,terminal_output +4538,8354659,"TERMINAL",0,0,"40:00244457666673333333333333449",,terminal_output +4539,8354659,"TERMINAL",0,0,"24666798888955555555555556631",,terminal_output +4540,8354659,"TERMINAL",0,0,"4688895130303030317777777777777883",,terminal_output +4541,8354659,"TERMINAL",0,0,"68306:001031322223999999999999910105",,terminal_output +4542,8354659,"TERMINAL",0,0,"830222354444511114141414141414141414111227",,terminal_output +4543,8354659,"TERMINAL",0,0,"10244457666673333333333333449",,terminal_output +4544,8354659,"TERMINAL",0,0,"24666798888955555555555556641",,terminal_output +4545,8354659,"TERMINAL",0,0,"4688897:0140404040417777777777777883",,terminal_output +4546,8414720,"TERMINAL",0,0,"6840102041322223999999999999920205",,terminal_output +4547,8414720,"TERMINAL",0,0,"840222354444521215151515151515151515121227",,terminal_output +4548,8414721,"TERMINAL",0,0,"20244457666673333333333333449",,terminal_output +4549,8414721,"TERMINAL",0,0,"24666798888955555555555556651",,terminal_output +4550,8414721,"TERMINAL",0,0,"4688891150505050517777777777777883",,terminal_output +4551,8414721,"TERMINAL",0,0,"6850203051322223999999999999930305",,terminal_output +4552,8414721,"TERMINAL",0,0,"850222354444531316:017:018:012:015:015:015:015:015:013:0131227",,terminal_output +4553,8414721,"TERMINAL",0,0,"30244457666673333333333333449",,terminal_output +4554,8414721,"TERMINAL",0,0,"2466679888895555555555555662:01",,terminal_output +4555,8414721,"TERMINAL",0,0,"468889212:002:002:002:002:017777777777777883",,terminal_output +4556,8414721,"TERMINAL",0,0,"685:0030405:01322223999999999999940405",,terminal_output +4557,8414721,"TERMINAL",0,0,"87:00222354444541411111111111111111111141227",,terminal_output +4558,8414721,"TERMINAL",0,0,"40244457666673333333333333449",,terminal_output +4559,8414721,"TERMINAL",0,0,"24666798888955555555555556611",,terminal_output +4560,8414721,"TERMINAL",0,0,"4688893110101010117777777777777883",,terminal_output +4561,8414721,"TERMINAL",0,0,"6810405011322223999999999999950505",,terminal_output +4562,8414721,"TERMINAL",0,0,"810222354444551512121212121212121212151227",,terminal_output +4563,8414721,"TERMINAL",0,0,"50244457666673333333333333449",,terminal_output +4564,8414721,"TERMINAL",0,0,"24666798888955555555555556621",,terminal_output +4565,8414721,"TERMINAL",0,0,"4688894120202020217777777777777883",,terminal_output +4566,8414721,"TERMINAL",0,0,"6820503:002132222399999999999997:0050:005",,terminal_output +4567,8414721,"TERMINAL",0,0,"82022235444452:015:01313131313131313131313:01227",,terminal_output +4568,8414721,"TERMINAL",0,0,"1:00244457666673333333333333449",,terminal_output +4569,8414721,"TERMINAL",0,0,"24666798888955555555555556631",,terminal_output +4570,8414721,"TERMINAL",0,0,"4688895130303030317777777777777883",,terminal_output +4571,8414721,"TERMINAL",0,0,"68307:001031322223999999999999910105",,terminal_output +4572,8414721,"TERMINAL",0,0,"830222354444511114141414141414141414111227",,terminal_output +4573,8414721,"TERMINAL",0,0,"10244457666673333333333333449",,terminal_output +4574,8414721,"TERMINAL",0,0,"24666798888955555555555556641",,terminal_output +4575,8414721,"TERMINAL",0,0,"4688898:0140404040417777777777777883",,terminal_output +4576,8474669,"TERMINAL",0,0,"6840102041322223999999999999920205",,terminal_output +4577,8474669,"TERMINAL",0,0,"840222354444521215151515151515151515121227",,terminal_output +4578,8474669,"TERMINAL",0,0,"20244457666673333333333333449",,terminal_output +4579,8474669,"TERMINAL",0,0,"24666798888955555555555556651",,terminal_output +4580,8474669,"TERMINAL",0,0,"4688891150505050517777777777777883",,terminal_output +4581,8474669,"TERMINAL",0,0,"6850203051322223999999999999930305",,terminal_output +4582,8474669,"TERMINAL",0,0,"850222354444531317:018:019:013:016:016:016:016:016:014:0131227",,terminal_output +4583,8474669,"TERMINAL",0,0,"30244457666673333333333333449",,terminal_output +4584,8474669,"TERMINAL",0,0,"2466679888895555555555555663:01",,terminal_output +4585,8474669,"TERMINAL",0,0,"468889213:003:003:003:003:017777777777777883",,terminal_output +4586,8474669,"TERMINAL",0,0,"686:0030406:01322223999999999999940405",,terminal_output +4587,8474669,"TERMINAL",0,0,"88:00222354444541411111111111111111111141227",,terminal_output +4588,8474669,"TERMINAL",0,0,"40244457666673333333333333449",,terminal_output +4589,8474669,"TERMINAL",0,0,"24666798888955555555555556611",,terminal_output +4590,8474669,"TERMINAL",0,0,"4688893110101010117777777777777883",,terminal_output +4591,8474669,"TERMINAL",0,0,"6810405011322223999999999999950505",,terminal_output +4592,8474669,"TERMINAL",0,0,"810222354444551512121212121212121212151227",,terminal_output +4593,8474669,"TERMINAL",0,0,"50244457666673333333333333449",,terminal_output +4594,8474669,"TERMINAL",0,0,"24666798888955555555555556621",,terminal_output +4595,8474669,"TERMINAL",0,0,"4688894120202020217777777777777883",,terminal_output +4596,8474669,"TERMINAL",0,0,"6820504:002132222399999999999998:001:005",,terminal_output +4597,8474669,"TERMINAL",0,0,"82022235444453:016:01313131313131313131314:01227",,terminal_output +4598,8474669,"TERMINAL",0,0,"2:00244457666673333333333333449",,terminal_output +4599,8474669,"TERMINAL",0,0,"24666798888955555555555556631",,terminal_output +4600,8474669,"TERMINAL",0,0,"4688895130303030317777777777777883",,terminal_output +4601,8474669,"TERMINAL",0,0,"68308:001031322223999999999999910105",,terminal_output +4602,8474669,"TERMINAL",0,0,"830222354444511114141414141414141414111227",,terminal_output +4603,8474669,"TERMINAL",0,0,"10244457666673333333333333449",,terminal_output +4604,8474669,"TERMINAL",0,0,"24666798888955555555555556641",,terminal_output +4605,8474669,"TERMINAL",0,0,"4688899:0140404040417777777777777883",,terminal_output +4606,8534656,"TERMINAL",0,0,"6840102041322223999999999999920205",,terminal_output +4607,8534656,"TERMINAL",0,0,"840222354444521215151515151515151515121227",,terminal_output +4608,8534656,"TERMINAL",0,0,"20244457666673333333333333449",,terminal_output +4609,8534656,"TERMINAL",0,0,"24666798888955555555555556651",,terminal_output +4610,8534656,"TERMINAL",0,0,"4688891150505050517777777777777883",,terminal_output +4611,8534656,"TERMINAL",0,0,"6850203051322223999999999999930305",,terminal_output +4612,8534656,"TERMINAL",0,0,"850222354444531318:019:012:00:014:017:017:017:017:017:015:0131227",,terminal_output +4613,8534656,"TERMINAL",0,0,"30244457666673333333333333449",,terminal_output +4614,8534656,"TERMINAL",0,0,"2466679888895555555555555664:01",,terminal_output +4615,8534656,"TERMINAL",0,0,"468889214:004:004:004:004:017777777777777883",,terminal_output +4616,8534656,"TERMINAL",0,0,"687:0030407:01322223999999999999940405",,terminal_output +4617,8534656,"TERMINAL",0,0,"89:00222354444541411111111111111111111141227",,terminal_output +4618,8534656,"TERMINAL",0,0,"40244457666673333333333333449",,terminal_output +4619,8534656,"TERMINAL",0,0,"2577783099991066666666666667712",,terminal_output +4620,8534656,"TERMINAL",0,0,"579991021111111128888888888888994",,terminal_output +4621,8534656,"TERMINAL",0,0,"7911415124333345050202020202020202020205051516",,terminal_output +4622,8534656,"TERMINAL",0,0,"91133346555562222222222222338",,terminal_output +4623,8534656,"TERMINAL",0,0,"513555687777844444444444445520",,terminal_output +4624,8534656,"TERMINAL",0,0,"357778409999206666666666666772",,terminal_output +4625,8534656,"TERMINAL",0,0,"579992022121212128888888888888994",,terminal_output +4626,8534656,"TERMINAL",0,0,"7921515:0124333344:007:00303030303030303030305:009:012:016",,terminal_output +4627,8534656,"TERMINAL",0,0,"92133346555562222222222222338",,terminal_output +4628,8534656,"TERMINAL",0,0,"3:013555687777844444444444445530",,terminal_output +4629,8534656,"TERMINAL",0,0,"357778509999306666666666666772",,terminal_output +4630,8534656,"TERMINAL",0,0,"579993023131313128888888888888994",,terminal_output +4631,8534656,"TERMINAL",0,0,"79319:011124333341010404040404040404040401011116",,terminal_output +4632,8534656,"TERMINAL",0,0,"93133346555562222222222222338",,terminal_output +4633,8534656,"TERMINAL",0,0,"113555687777844444444444445540",,terminal_output +4634,8534656,"TERMINAL",0,0,"35777820:009999406666666666666772",,terminal_output +4635,8594681,"TERMINAL",0,0,"579994024141414128888888888888994",,terminal_output +4636,8594681,"TERMINAL",0,0,"7941112124333342020505050505050505050502021216",,terminal_output +4637,8594681,"TERMINAL",0,0,"94133346555562222222222222338",,terminal_output +4638,8594681,"TERMINAL",0,0,"213555687777844444444444445550",,terminal_output +4639,8594681,"TERMINAL",0,0,"357778109999506666666666666772",,terminal_output +4640,8594681,"TERMINAL",0,0,"579995025151515128888888888888994",,terminal_output +4641,8594681,"TERMINAL",0,0,"79512131243333430309:002:00:001:005:008:008:008:008:008:006:003031316",,terminal_output +4642,8594681,"TERMINAL",0,0,"95133346555562222222222222338",,terminal_output +4643,8594681,"TERMINAL",0,0,"31355568777784444444444444555:00",,terminal_output +4644,8594681,"TERMINAL",0,0,"3577782099995:006666666666666772",,terminal_output +4645,8594681,"TERMINAL",0,0,"579998:0025:015:015:015:0128888888888888994",,terminal_output +4646,8594681,"TERMINAL",0,0,"798:01314124333344040101010101010101010104041416",,terminal_output +4647,8594681,"TERMINAL",0,0,"930:0133346555562222222222222338",,terminal_output +4648,8594681,"TERMINAL",0,0,"413555687777844444444444445510",,terminal_output +4649,8594681,"TERMINAL",0,0,"357778309999106666666666666772",,terminal_output +4650,8594681,"TERMINAL",0,0,"579991021111111128888888888888994",,terminal_output +4651,8594681,"TERMINAL",0,0,"7911415124333345050202020202020202020205051516",,terminal_output +4652,8594681,"TERMINAL",0,0,"91133346555562222222222222338",,terminal_output +4653,8594681,"TERMINAL",0,0,"513555687777844444444444445520",,terminal_output +4654,8594681,"TERMINAL",0,0,"357778409999206666666666666772",,terminal_output +4655,8594681,"TERMINAL",0,0,"579992022121212128888888888888994",,terminal_output +4656,8594681,"TERMINAL",0,0,"7921516:0124333345:008:00303030303030303030306:0050:013:016",,terminal_output +4657,8594681,"TERMINAL",0,0,"92133346555562222222222222338",,terminal_output +4658,8594681,"TERMINAL",0,0,"4:013555687777844444444444445530",,terminal_output +4659,8594681,"TERMINAL",0,0,"357778509999306666666666666772",,terminal_output +4660,8594681,"TERMINAL",0,0,"579993023131313128888888888888994",,terminal_output +4661,8594681,"TERMINAL",0,0,"793150:011124333341010404040404040404040401011116",,terminal_output +4662,8594681,"TERMINAL",0,0,"93133346555562222222222222338",,terminal_output +4663,8594681,"TERMINAL",0,0,"113555687777844444444444445540",,terminal_output +4664,8594681,"TERMINAL",0,0,"3577781:009999406666666666666772",,terminal_output +4665,8654659,"TERMINAL",0,0,"579994024141414128888888888888994",,terminal_output +4666,8654659,"TERMINAL",0,0,"7941112124333342020505050505050505050502021216",,terminal_output +4667,8654659,"TERMINAL",0,0,"94133346555562222222222222338",,terminal_output +4668,8654659,"TERMINAL",0,0,"213555687777844444444444445550",,terminal_output +4669,8654659,"TERMINAL",0,0,"357778109999506666666666666772",,terminal_output +4670,8654659,"TERMINAL",0,0,"579995025151515128888888888888994",,terminal_output +4671,8654659,"TERMINAL",0,0,"79512131243333430302:00:001:002:006:009:009:009:009:009:007:003031316",,terminal_output +4672,8654659,"TERMINAL",0,0,"95133346555562222222222222338",,terminal_output +4673,8654659,"TERMINAL",0,0,"31355568777784444444444444556:00",,terminal_output +4674,8654659,"TERMINAL",0,0,"3577782099996:006666666666666772",,terminal_output +4675,8654659,"TERMINAL",0,0,"579999:0026:016:016:016:0128888888888888994",,terminal_output +4676,8654659,"TERMINAL",0,0,"799:01314124333344040101010101010101010104041416",,terminal_output +4677,8654659,"TERMINAL",0,0,"91:0133346555562222222222222338",,terminal_output +4678,8654659,"TERMINAL",0,0,"413555687777844444444444445510",,terminal_output +4679,8654659,"TERMINAL",0,0,"357778309999106666666666666772",,terminal_output +4680,8654659,"TERMINAL",0,0,"579991021111111128888888888888994",,terminal_output +4681,8654659,"TERMINAL",0,0,"7911415124333345050202020202020202020205051516",,terminal_output +4682,8654659,"TERMINAL",0,0,"91133346555562222222222222338",,terminal_output +4683,8654659,"TERMINAL",0,0,"513555687777844444444444445520",,terminal_output +4684,8654659,"TERMINAL",0,0,"357778409999206666666666666772",,terminal_output +4685,8654659,"TERMINAL",0,0,"579992022121212128888888888888994",,terminal_output +4686,8654659,"TERMINAL",0,0,"7921517:0124333346:009:00303030303030303030307:001:014:016",,terminal_output +4687,8654659,"TERMINAL",0,0,"92133346555562222222222222338",,terminal_output +4688,8654659,"TERMINAL",0,0,"5:013555687777844444444444445530",,terminal_output +4689,8654659,"TERMINAL",0,0,"357778509999306666666666666772",,terminal_output +4690,8654659,"TERMINAL",0,0,"579993023131313128888888888888994",,terminal_output +4691,8654659,"TERMINAL",0,0,"79311:011124333341010404040404040404040401011116",,terminal_output +4692,8654659,"TERMINAL",0,0,"93133346555562222222222222338",,terminal_output +4693,8654659,"TERMINAL",0,0,"113555687777844444444444445540",,terminal_output +4694,8654659,"TERMINAL",0,0,"3577782:009999406666666666666772",,terminal_output +4695,8714694,"TERMINAL",0,0,"579994024141414128888888888888994",,terminal_output +4696,8714694,"TERMINAL",0,0,"7941112124333342020505050505050505050502021216",,terminal_output +4697,8714694,"TERMINAL",0,0,"94133346555562222222222222338",,terminal_output +4698,8714694,"TERMINAL",0,0,"213555687777844444444444445550",,terminal_output +4699,8714694,"TERMINAL",0,0,"357778109999506666666666666772",,terminal_output +4700,8714694,"TERMINAL",0,0,"579995025151515128888888888888994",,terminal_output +4701,8714694,"TERMINAL",0,0,"79512131243333430301:002:003:007:0010:0010:0010:0010:0010:008:003031316",,terminal_output +4702,8714694,"TERMINAL",0,0,"95133346555562222222222222338",,terminal_output +4703,8714694,"TERMINAL",0,0,"31355568777784444444444444557:00",,terminal_output +4704,8714694,"TERMINAL",0,0,"3577782099997:006666666666666772",,terminal_output +4705,8714694,"TERMINAL",0,0,"579991:00:0027:017:017:017:0128888888888888994",,terminal_output +4706,8714694,"TERMINAL",0,0,"7950:01314124333344040101010101010101010104041416",,terminal_output +4707,8714694,"TERMINAL",0,0,"92:0133346555562222222222222338",,terminal_output +4708,8714694,"TERMINAL",0,0,"413555687777844444444444445510",,terminal_output +4709,8714694,"TERMINAL",0,0,"357778309999106666666666666772",,terminal_output +4710,8714694,"TERMINAL",0,0,"579991021111111128888888888888994",,terminal_output +4711,8714694,"TERMINAL",0,0,"7911415124333345050202020202020202020205051516",,terminal_output +4712,8714694,"TERMINAL",0,0,"91133346555562222222222222338",,terminal_output +4713,8714694,"TERMINAL",0,0,"513555687777844444444444445520",,terminal_output +4714,8714694,"TERMINAL",0,0,"357778409999206666666666666772",,terminal_output +4715,8714694,"TERMINAL",0,0,"579992022121212128888888888888994",,terminal_output +4716,8714694,"TERMINAL",0,0,"7921518:0124333347:0050:00303030303030303030308:002:015:016",,terminal_output +4717,8714694,"TERMINAL",0,0,"92133346555562222222222222338",,terminal_output +4718,8714694,"TERMINAL",0,0,"6:013555687777844444444444445530",,terminal_output +4719,8714694,"TERMINAL",0,0,"357778509999306666666666666772",,terminal_output +4720,8714694,"TERMINAL",0,0,"579993023131313128888888888888994",,terminal_output +4721,8714694,"TERMINAL",0,0,"79312:011124333341010404040404040404040401011116",,terminal_output +4722,8714694,"TERMINAL",0,0,"93133346555562222222222222338",,terminal_output +4723,8714694,"TERMINAL",0,0,"114666798888955555555555556641",,terminal_output +4724,8714694,"TERMINAL",0,0,"4688893:0140404040417777777777777883",,terminal_output +4725,8774661,"TERMINAL",0,0,"6840102041322223999999999999920205",,terminal_output +4726,8774661,"TERMINAL",0,0,"840222354444521215151515151515151515121227",,terminal_output +4727,8774661,"TERMINAL",0,0,"20244457666673333333333333449",,terminal_output +4728,8774661,"TERMINAL",0,0,"24666798888955555555555556651",,terminal_output +4729,8774661,"TERMINAL",0,0,"4688891150505050517777777777777883",,terminal_output +4730,8774661,"TERMINAL",0,0,"6850203051322223999999999999930305",,terminal_output +4731,8774661,"TERMINAL",0,0,"850222354444531312:013:014:018:011:011:011:011:011:019:0131227",,terminal_output +4732,8774661,"TERMINAL",0,0,"30244457666673333333333333449",,terminal_output +4733,8774661,"TERMINAL",0,0,"2466679888895555555555555668:01",,terminal_output +4734,8774661,"TERMINAL",0,0,"468889218:008:008:008:008:017777777777777883",,terminal_output +4735,8774661,"TERMINAL",0,0,"681:0030401:01322223999999999999940405",,terminal_output +4736,8774661,"TERMINAL",0,0,"83:00222354444541411111111111111111111141227",,terminal_output +4737,8774661,"TERMINAL",0,0,"40244457666673333333333333449",,terminal_output +4738,8774661,"TERMINAL",0,0,"24666798888955555555555556611",,terminal_output +4739,8774661,"TERMINAL",0,0,"4688893110101010117777777777777883",,terminal_output +4740,8774661,"TERMINAL",0,0,"6810405011322223999999999999950505",,terminal_output +4741,8774661,"TERMINAL",0,0,"810222354444551512121212121212121212151227",,terminal_output +4742,8774661,"TERMINAL",0,0,"50244457666673333333333333449",,terminal_output +4743,8774661,"TERMINAL",0,0,"24666798888955555555555556621",,terminal_output +4744,8774661,"TERMINAL",0,0,"4688894120202020217777777777777883",,terminal_output +4745,8774661,"TERMINAL",0,0,"6820509:002132222399999999999993:006:005",,terminal_output +4746,8774661,"TERMINAL",0,0,"82022235444458:011:01313131313131313131319:01227",,terminal_output +4747,8774661,"TERMINAL",0,0,"7:00244457666673333333333333449",,terminal_output +4748,8774661,"TERMINAL",0,0,"24666798888955555555555556631",,terminal_output +4749,8774661,"TERMINAL",0,0,"4688895130303030317777777777777883",,terminal_output +4750,8774661,"TERMINAL",0,0,"68303:001031322223999999999999910105",,terminal_output +4751,8774661,"TERMINAL",0,0,"830222354444511114141414141414141414111227",,terminal_output +4752,8774661,"TERMINAL",0,0,"10244457666673333333333333449",,terminal_output +4753,8774661,"TERMINAL",0,0,"24666798888955555555555556641",,terminal_output +4754,8774661,"TERMINAL",0,0,"4688894:0140404040417777777777777883",,terminal_output +4755,8834656,"TERMINAL",0,0,"6840102041322223999999999999920205",,terminal_output +4756,8834656,"TERMINAL",0,0,"840222354444521215151515151515151515121227",,terminal_output +4757,8834656,"TERMINAL",0,0,"20244457666673333333333333449",,terminal_output +4758,8834656,"TERMINAL",0,0,"24666798888955555555555556651",,terminal_output +4759,8834656,"TERMINAL",0,0,"4688891150505050517777777777777883",,terminal_output +4760,8834656,"TERMINAL",0,0,"6850203051322223999999999999930305",,terminal_output +4761,8834656,"TERMINAL",0,0,"850222354444531313:014:015:019:012:012:012:012:012:0150:0131227",,terminal_output +4762,8834656,"TERMINAL",0,0,"30244457666673333333333333449",,terminal_output +4763,8834656,"TERMINAL",0,0,"2466679888895555555555555669:01",,terminal_output +4764,8834656,"TERMINAL",0,0,"468889219:009:009:009:009:017777777777777883",,terminal_output +4765,8834656,"TERMINAL",0,0,"682:0030402:01322223999999999999940405",,terminal_output +4766,8834656,"TERMINAL",0,0,"84:00222354444541411111111111111111111141227",,terminal_output +4767,8834656,"TERMINAL",0,0,"40244457666673333333333333449",,terminal_output +4768,8834656,"TERMINAL",0,0,"24666798888955555555555556611",,terminal_output +4769,8834656,"TERMINAL",0,0,"4688893110101010117777777777777883",,terminal_output +4770,8834656,"TERMINAL",0,0,"6810405011322223999999999999950505",,terminal_output +4771,8834656,"TERMINAL",0,0,"810222354444551512121212121212121212151227",,terminal_output +4772,8834656,"TERMINAL",0,0,"50244457666673333333333333449",,terminal_output +4773,8834657,"TERMINAL",0,0,"24666798888955555555555556621",,terminal_output +4774,8834657,"TERMINAL",0,0,"4688894120202020217777777777777883",,terminal_output +4775,8834657,"TERMINAL",0,0,"6820509:00:002132222399999999999994:007:005",,terminal_output +4776,8834657,"TERMINAL",0,0,"82022235444459:012:01313131313131313131313:00:01227",,terminal_output +4777,8834657,"TERMINAL",0,0,"8:00244457666673333333333333449",,terminal_output +4778,8834657,"TERMINAL",0,0,"24666798888955555555555556631",,terminal_output +4779,8834657,"TERMINAL",0,0,"4688895130303030317777777777777883",,terminal_output +4780,8834657,"TERMINAL",0,0,"68304:001031322223999999999999910105",,terminal_output +4781,8834657,"TERMINAL",0,0,"830222354444511114141414141414141414111227",,terminal_output +4782,8834657,"TERMINAL",0,0,"10244457666673333333333333449",,terminal_output +4783,8834657,"TERMINAL",0,0,"24666798888955555555555556641",,terminal_output +4784,8834657,"TERMINAL",0,0,"4688895:0140404040417777777777777883",,terminal_output +4785,8894675,"TERMINAL",0,0,"6840102041322223999999999999920205",,terminal_output +4786,8894675,"TERMINAL",0,0,"840222354444521215151515151515151515121227",,terminal_output +4787,8894675,"TERMINAL",0,0,"20244457666673333333333333449",,terminal_output +4788,8894675,"TERMINAL",0,0,"24666798888955555555555556651",,terminal_output +4789,8894675,"TERMINAL",0,0,"4688891150505050517777777777777883",,terminal_output +4790,8894675,"TERMINAL",0,0,"6850203051322223999999999999930305",,terminal_output +4791,8894675,"TERMINAL",0,0,"850222354444531314:015:016:0110:013:013:013:013:013:011:0131227",,terminal_output +4792,8894675,"TERMINAL",0,0,"30244457666673333333333333449",,terminal_output +4793,8894675,"TERMINAL",0,0,"2466679888895555555555555669:00:01",,terminal_output +4794,8894675,"TERMINAL",0,0,"4688892130:0030:0030:0030:0030:017777777777777883",,terminal_output +4795,8894675,"TERMINAL",0,0,"683:0030403:01322223999999999999940405",,terminal_output +4796,8894675,"TERMINAL",0,0,"85:00222354444541411111111111111111111141227",,terminal_output +4797,8894675,"TERMINAL",0,0,"40244457666673333333333333449",,terminal_output +4798,8894675,"TERMINAL",0,0,"24666798888955555555555556611",,terminal_output +4799,8894675,"TERMINAL",0,0,"4688893110101010117777777777777883",,terminal_output +4800,8894675,"TERMINAL",0,0,"6810405011322223999999999999950505",,terminal_output +4801,8894675,"TERMINAL",0,0,"810222354444551512121212121212121212151227",,terminal_output +4802,8894675,"TERMINAL",0,0,"50244457666673333333333333449",,terminal_output +4803,8894675,"TERMINAL",0,0,"24666798888955555555555556621",,terminal_output +4804,8894675,"TERMINAL",0,0,"4688894120202020217777777777777883",,terminal_output +4805,8894675,"TERMINAL",0,0,"6820501:002132222399999999999995:008:005",,terminal_output +4806,8894675,"TERMINAL",0,0,"820222354444550:013:01313131313131313131311:01227",,terminal_output +4807,8894675,"TERMINAL",0,0,"9:00244457666673333333333333449",,terminal_output +4808,8894675,"TERMINAL",0,0,"24666798888955555555555556631",,terminal_output +4809,8894675,"TERMINAL",0,0,"4688895130303030317777777777777883",,terminal_output +4810,8894675,"TERMINAL",0,0,"68305:001031322223999999999999910105",,terminal_output +4811,8894675,"TERMINAL",0,0,"830222354444511114141414141414141414111227",,terminal_output +4812,8894675,"TERMINAL",0,0,"10244457666673333333333333449",,terminal_output +4813,8894675,"TERMINAL",0,0,"24666798888955555555555556641",,terminal_output +4814,8894675,"TERMINAL",0,0,"4688896:0140404040417777777777777883",,terminal_output +4815,8954655,"TERMINAL",0,0,"6840102041322223999999999999920205",,terminal_output +4816,8954655,"TERMINAL",0,0,"840222354444521215151515151515151515121227",,terminal_output +4817,8954655,"TERMINAL",0,0,"20244457666673333333333333449",,terminal_output +4818,8954655,"TERMINAL",0,0,"24666798888955555555555556651",,terminal_output +4819,8954655,"TERMINAL",0,0,"4688891150505050517777777777777883",,terminal_output +4820,8954655,"TERMINAL",0,0,"6850203051322223999999999999930305",,terminal_output +4821,8954655,"TERMINAL",0,0,"850222354444531315:016:017:011:014:014:014:014:014:012:0131227",,terminal_output +4822,8954655,"TERMINAL",0,0,"30244457666673333333333333449",,terminal_output +4823,8954655,"TERMINAL",0,0,"2466679888895555555555555661:01",,terminal_output +4824,8954655,"TERMINAL",0,0,"468889211:001:001:001:001:017777777777777883",,terminal_output +4825,8954655,"TERMINAL",0,0,"684:0030404:01322223999999999999940405",,terminal_output +4826,8954655,"TERMINAL",0,0,"86:00222354444541411111111111111111111141227",,terminal_output +4827,8954655,"TERMINAL",0,0,"413555687777844444444444445510",,terminal_output +4828,8954655,"TERMINAL",0,0,"357778309999106666666666666772",,terminal_output +4829,8954655,"TERMINAL",0,0,"579991021111111128888888888888994",,terminal_output +4830,8954655,"TERMINAL",0,0,"7911415124333345050202020202020202020205051516",,terminal_output +4831,8954655,"TERMINAL",0,0,"91133346555562222222222222338",,terminal_output +4832,8954655,"TERMINAL",0,0,"513555687777844444444444445520",,terminal_output +4833,8954655,"TERMINAL",0,0,"357778409999206666666666666772",,terminal_output +4834,8954655,"TERMINAL",0,0,"579992022121212128888888888888994",,terminal_output +4835,8954655,"TERMINAL",0,0,"7921512:0124333341:004:00303030303030303030302:006:019:016",,terminal_output +4836,8954655,"TERMINAL",0,0,"92133346555562222222222222338",,terminal_output +4837,8954655,"TERMINAL",0,0,"50:013555687777844444444444445530",,terminal_output +4838,8954655,"TERMINAL",0,0,"357778509999306666666666666772",,terminal_output +4839,8954655,"TERMINAL",0,0,"579993023131313128888888888888994",,terminal_output +4840,8954655,"TERMINAL",0,0,"79316:011124333341010404040404040404040401011116",,terminal_output +4841,8954655,"TERMINAL",0,0,"93133346555562222222222222338",,terminal_output +4842,8954655,"TERMINAL",0,0,"113555687777844444444444445540",,terminal_output +4843,8954655,"TERMINAL",0,0,"3577787:009999406666666666666772",,terminal_output +4844,9014686,"TERMINAL",0,0,"579994024141414128888888888888994",,terminal_output +4845,9014686,"TERMINAL",0,0,"7941112124333342020505050505050505050502021216",,terminal_output +4846,9014687,"TERMINAL",0,0,"94133346555562222222222222338",,terminal_output +4847,9014687,"TERMINAL",0,0,"213555687777844444444444445550",,terminal_output +4848,9014687,"TERMINAL",0,0,"357778109999506666666666666772",,terminal_output +4849,9014687,"TERMINAL",0,0,"579995025151515128888888888888994",,terminal_output +4850,9014687,"TERMINAL",0,0,"79512131243333430306:007:008:002:005:005:005:005:005:003:003031316",,terminal_output +4851,9014687,"TERMINAL",0,0,"95133346555562222222222222338",,terminal_output +4852,9014687,"TERMINAL",0,0,"31355568777784444444444444552:00",,terminal_output +4853,9014687,"TERMINAL",0,0,"3577782099992:006666666666666772",,terminal_output +4854,9014687,"TERMINAL",0,0,"579995:0022:012:012:012:0128888888888888994",,terminal_output +4855,9014687,"TERMINAL",0,0,"795:01314124333344040101010101010101010104041416",,terminal_output +4856,9014687,"TERMINAL",0,0,"97:0133346555562222222222222338",,terminal_output +4857,9014687,"TERMINAL",0,0,"413555687777844444444444445510",,terminal_output +4858,9014687,"TERMINAL",0,0,"357778309999106666666666666772",,terminal_output +4859,9014687,"TERMINAL",0,0,"579991021111111128888888888888994",,terminal_output +4860,9014687,"TERMINAL",0,0,"7911415124333345050202020202020202020205051516",,terminal_output +4861,9014687,"TERMINAL",0,0,"91133346555562222222222222338",,terminal_output +4862,9014687,"TERMINAL",0,0,"513555687777844444444444445520",,terminal_output +4863,9014687,"TERMINAL",0,0,"357778409999206666666666666772",,terminal_output +4864,9014687,"TERMINAL",0,0,"579992022121212128888888888888994",,terminal_output +4865,9014687,"TERMINAL",0,0,"7921513:0124333342:005:00303030303030303030303:007:019:00:016",,terminal_output +4866,9014687,"TERMINAL",0,0,"92133346555562222222222222338",,terminal_output +4867,9014687,"TERMINAL",0,0,"1:013555687777844444444444445530",,terminal_output +4868,9014687,"TERMINAL",0,0,"357778509999306666666666666772",,terminal_output +4869,9014687,"TERMINAL",0,0,"579993023131313128888888888888994",,terminal_output +4870,9014687,"TERMINAL",0,0,"79317:011124333341010404040404040404040401011116",,terminal_output +4871,9014687,"TERMINAL",0,0,"93133346555562222222222222338",,terminal_output +4872,9014687,"TERMINAL",0,0,"113555687777844444444444445540",,terminal_output +4873,9014687,"TERMINAL",0,0,"3577788:009999406666666666666772",,terminal_output +4874,9074663,"TERMINAL",0,0,"579994024141414128888888888888994",,terminal_output +4875,9074664,"TERMINAL",0,0,"7941112124333342020505050505050505050502021216",,terminal_output +4876,9074664,"TERMINAL",0,0,"94133346555562222222222222338",,terminal_output +4877,9074664,"TERMINAL",0,0,"213555687777844444444444445550",,terminal_output +4878,9074664,"TERMINAL",0,0,"357778109999506666666666666772",,terminal_output +4879,9074664,"TERMINAL",0,0,"579995025151515128888888888888994",,terminal_output +4880,9074664,"TERMINAL",0,0,"79512131243333430307:008:009:003:006:006:006:006:006:004:003031316",,terminal_output +4881,9074664,"TERMINAL",0,0,"95133346555562222222222222338",,terminal_output +4882,9074664,"TERMINAL",0,0,"31355568777784444444444444553:00",,terminal_output +4883,9074664,"TERMINAL",0,0,"3577782099993:006666666666666772",,terminal_output +4884,9074664,"TERMINAL",0,0,"579996:0023:013:013:013:0128888888888888994",,terminal_output +4885,9074664,"TERMINAL",0,0,"796:01314124333344040101010101010101010104041416",,terminal_output +4886,9074664,"TERMINAL",0,0,"98:0133346555562222222222222338",,terminal_output +4887,9074664,"TERMINAL",0,0,"413555687777844444444444445510",,terminal_output +4888,9074664,"TERMINAL",0,0,"357778309999106666666666666772",,terminal_output +4889,9074664,"TERMINAL",0,0,"579991021111111128888888888888994",,terminal_output +4890,9074664,"TERMINAL",0,0,"7911415124333345050202020202020202020205051516",,terminal_output +4891,9074664,"TERMINAL",0,0,"91133346555562222222222222338",,terminal_output +4892,9074664,"TERMINAL",0,0,"513555687777844444444444445520",,terminal_output +4893,9074664,"TERMINAL",0,0,"357778409999206666666666666772",,terminal_output +4894,9074664,"TERMINAL",0,0,"579992022121212128888888888888994",,terminal_output +4895,9074664,"TERMINAL",0,0,"7921514:0124333343:006:00303030303030303030304:008:011:016",,terminal_output +4896,9074664,"TERMINAL",0,0,"92133346555562222222222222338",,terminal_output +4897,9074664,"TERMINAL",0,0,"2:013555687777844444444444445530",,terminal_output +4898,9074664,"TERMINAL",0,0,"357778509999306666666666666772",,terminal_output +4899,9074664,"TERMINAL",0,0,"579993023131313128888888888888994",,terminal_output +4900,9074664,"TERMINAL",0,0,"79318:011124333341010404040404040404040401011116",,terminal_output +4901,9074664,"TERMINAL",0,0,"93133346555562222222222222338",,terminal_output +4902,9074664,"TERMINAL",0,0,"113555687777844444444444445540",,terminal_output +4903,9074664,"TERMINAL",0,0,"3577789:009999406666666666666772",,terminal_output +4904,9134661,"TERMINAL",0,0,"579994024141414128888888888888994",,terminal_output +4905,9134661,"TERMINAL",0,0,"7941112124333342020505050505050505050502021216",,terminal_output +4906,9134661,"TERMINAL",0,0,"94133346555562222222222222338",,terminal_output +4907,9134661,"TERMINAL",0,0,"213555687777844444444444445550",,terminal_output +4908,9134661,"TERMINAL",0,0,"357778109999506666666666666772",,terminal_output +4909,9134661,"TERMINAL",0,0,"5799950251515103\t3:525278:54:088:58:57 1:53:28635:1556:28544:27 2:07:581438:58329:5823813:581756:5888498520861829972 alfred.ngu6:35:347:5754:58130101 franz.sram4:55:0247:57 3:04:284032 alfred.ngu1:53:391:53:56 8:58:29110:300:56 9:01:29529993 nishant.k2 3843T18:29:1801:48:31 19:03:54[002,007]30008 alfred.ng1 1622:58:503T22:58:56 21:53:29006",,terminal_output +4910,9134661,"TERMINAL",0,0,"7951213124333430308:009:0010:004:007:007:007:007:007:005:00303131631",,terminal_output +4911,9134661,"TERMINAL",0,0,"95133346555622222222222223383",,terminal_output +4912,9134661,"TERMINAL",0,0,"3135556877784444444444444554:005",,terminal_output +4913,9134661,"TERMINAL",0,0,"357778209994:0066666666666667727",,terminal_output +4914,9134661,"TERMINAL",0,0,"579997:0024:014:014:01288888888888889949",,terminal_output +4915,9134661,"TERMINAL",0,0,"797:013141243334404010101010101010101010404141641",,terminal_output +4916,9134661,"TERMINAL",0,0,"99:0133346555622222222222223383",,terminal_output +4917,9134661,"TERMINAL",0,0,"41355516 R2025-10-04T20:52:401hai006687778444444444444455105",,terminal_output +4918,9134661,"TERMINAL",0,0,"3577738309991066666666666667727",,terminal_output +4919,9134661,"TERMINAL",0,0,"579995102111111288888888888889949",,terminal_output +4920,9134661,"TERMINAL",0,0,"791141517243334505020202020202020202020505151651",,terminal_output +4921,9134661,"TERMINAL",0,0,"911333946555622222222222223383",,terminal_output +4922,9134661,"TERMINAL",0,0,"51355511687778444444444444455205",,terminal_output +4923,9134661,"TERMINAL",0,0,"3577738409992066666666666667727",,terminal_output +4924,9134661,"TERMINAL",0,0,"579995202212121288888888888889949",,terminal_output +4925,9134661,"TERMINAL",0,0,"7921515:0172433344:007:00303030303030303030305:009:012:0164:01",,terminal_output +4926,9134661,"TERMINAL",0,0,"921333946555622222222222223383",,terminal_output +4927,9134661,"TERMINAL",0,0,"3:01355521687778444444444444455305",,terminal_output +4928,9134661,"TERMINAL",0,0,"3577738509993066666666666667727",,terminal_output +4929,9134661,"TERMINAL",0,0,"579995302313131288888888888889949",,terminal_output +4930,9134661,"TERMINAL",0,0,"730329:02128354445111141414141414141414141111212712",,terminal_output +4931,9134661,"TERMINAL",0,0,"102444Nodes required for job are DOWN, DRAINED or reserved for jobs in higher priority partitions)3057666733333333333334494",,terminal_output +4932,9134661,"TERMINAL",0,0,"246662798889555555555555566416",,terminal_output +4933,9134661,"TERMINAL",0,0,"468884930:014040404177777777777778838",,terminal_output +4934,9194654,"TERMINAL",0,0,"684010206413222399999999999992020520",,terminal_output +4935,9194654,"TERMINAL",0,0,"8402228354445212151515151515151515151212272",,terminal_output +4936,9194654,"TERMINAL",0,0,"2024444057666733333333333334494",,terminal_output +4937,9194654,"TERMINAL",0,0,"246662798889555555555555566516",,terminal_output +4938,9194654,"TERMINAL",0,0,"4688849115050505177777777777778838",,terminal_output +4939,9194654,"TERMINAL",0,0,"685020306513222399999999999993030530",,terminal_output +4940,9194654,"TERMINAL",0,0,"850222835444531319:0110:011:015:018:018:018:018:018:016:01312272",,terminal_output +4941,9194654,"TERMINAL",0,0,"3024445057666733333333333334494",,terminal_output +4942,9194654,"TERMINAL",0,0,"2466627988895555555555555665:016",,terminal_output +4943,9194654,"TERMINAL",0,0,"4688849215:005:005:005:0177777777777778838",,terminal_output +4944,9194654,"TERMINAL",0,0,"688:00304068:013222399999999999994040540",,terminal_output +4945,9194655,"TERMINAL",0,0,"840:002228354445414111111111111111111111412272",,terminal_output +4946,9194655,"TERMINAL",0,0,"4024441:0057666733333333333334494",,terminal_output +4947,9194655,"TERMINAL",0,0,"246662798889555555555555566116",,terminal_output +4948,9194655,"TERMINAL",0,0,"4688849311010101177777777777778838",,terminal_output +4949,9194655,"TERMINAL",0,0,"681040506113222399999999999995050550",,terminal_output +4950,9194655,"TERMINAL",0,0,"8102228354445515121212121212121212121512272",,terminal_output +4951,9194655,"TERMINAL",0,0,"5024441057666733333333333334494",,terminal_output +4952,9194655,"TERMINAL",0,0,"246662798889555555555555566216",,terminal_output +4953,9194655,"TERMINAL",0,0,"4688849412020202177777777777778838",,terminal_output +4954,9194655,"TERMINAL",0,0,"6820506:006213222399999999999999:00:003:0055:00",,terminal_output +4955,9194655,"TERMINAL",0,0,"82022283544455:018:01313131313131313131316:012272",,terminal_output +4956,9194655,"TERMINAL",0,0,"4:0024442057666733333333333334494",,terminal_output +4957,9194655,"TERMINAL",0,0,"246662798889555555555555566316",,terminal_output +4958,9194655,"TERMINAL",0,0,"4688849513030303177777777777778838",,terminal_output +4959,9194655,"TERMINAL",0,0,"68305:00:00106313222399999999999991010510",,terminal_output +4960,9194655,"TERMINAL",0,0,"8302228354445111141414141414141414141112272",,terminal_output +4961,9194655,"TERMINAL",0,0,"1024443057666733333333333334494",,terminal_output +4962,9194655,"TERMINAL",0,0,"246662798889555555555555566416",,terminal_output +4963,9194655,"TERMINAL",0,0,"46888491:014040404177777777777778838",,terminal_output +4964,9254657,"TERMINAL",0,0,"684010206413222399999999999992020520",,terminal_output +4965,9254657,"TERMINAL",0,0,"8402228354445212151515151515151515151212272",,terminal_output +4966,9254657,"TERMINAL",0,0,"2024444057666733333333333334494",,terminal_output +4967,9254657,"TERMINAL",0,0,"246662798889555555555555566516",,terminal_output +4968,9254657,"TERMINAL",0,0,"4688849115050505177777777777778838",,terminal_output +4969,9254657,"TERMINAL",0,0,"685020306513222399999999999993030530",,terminal_output +4970,9254657,"TERMINAL",0,0,"8502228354445313110:011:012:016:019:019:019:019:019:017:01312272",,terminal_output +4971,9254657,"TERMINAL",0,0,"3024445057666733333333333334494",,terminal_output +4972,9254657,"TERMINAL",0,0,"2466627988895555555555555666:016",,terminal_output +4973,9254657,"TERMINAL",0,0,"4688849216:006:006:006:0177777777777778838",,terminal_output +4974,9254657,"TERMINAL",0,0,"689:00304069:013222399999999999994040540",,terminal_output +4975,9254657,"TERMINAL",0,0,"81:002228354445414111111111111111111111412272",,terminal_output +4976,9254657,"TERMINAL",0,0,"4024442:0057666733333333333334494",,terminal_output +4977,9254657,"TERMINAL",0,0,"246662798889555555555555566116",,terminal_output +4978,9254657,"TERMINAL",0,0,"4688849311010101177777777777778838",,terminal_output +4979,9254657,"TERMINAL",0,0,"681040506113222399999999999995050550",,terminal_output +4980,9254657,"TERMINAL",0,0,"8102228354445515121212121212121212121512272",,terminal_output +4981,9254657,"TERMINAL",0,0,"5024441057666733333333333334494",,terminal_output +4982,9254657,"TERMINAL",0,0,"246662798889555555555555566216",,terminal_output +4983,9254657,"TERMINAL",0,0,"4688849412020202177777777777778838",,terminal_output +4984,9254657,"TERMINAL",0,0,"6820507:006213222399999999999991:004:0056:00",,terminal_output +4985,9254657,"TERMINAL",0,0,"82022283544456:019:01313131313131313131317:012272",,terminal_output +4986,9254657,"TERMINAL",0,0,"5:0024442057666733333333333334494",,terminal_output +4987,9254657,"TERMINAL",0,0,"246662798889555555555555566316",,terminal_output +4988,9254657,"TERMINAL",0,0,"4688849513030303177777777777778838",,terminal_output +4989,9254657,"TERMINAL",0,0,"68301:00106313222399999999999991010510",,terminal_output +4990,9254657,"TERMINAL",0,0,"8302228354445111141414141414141414141112272",,terminal_output +4991,9254657,"TERMINAL",0,0,"1024443057666733333333333334494",,terminal_output +4992,9254657,"TERMINAL",0,0,"246662798889555555555555566416",,terminal_output +4993,9254657,"TERMINAL",0,0,"46888492:014040404177777777777778838",,terminal_output +4994,9314654,"TERMINAL",0,0,"684010206413222399999999999992020520",,terminal_output +4995,9314654,"TERMINAL",0,0,"8402228354445212151515151515151515151212272",,terminal_output +4996,9314654,"TERMINAL",0,0,"2024444057666733333333333334494",,terminal_output +4997,9314654,"TERMINAL",0,0,"246662798889555555555555566516",,terminal_output +4998,9314654,"TERMINAL",0,0,"4688849115050505177777777777778838",,terminal_output +4999,9314654,"TERMINAL",0,0,"685020306513222399999999999993030530",,terminal_output +5000,9314654,"TERMINAL",0,0,"850222835444531311:012:013:017:0120:0120:0120:0120:0120:018:01312272",,terminal_output +5001,9314654,"TERMINAL",0,0,"3024445057666733333333333334494",,terminal_output +5002,9314654,"TERMINAL",0,0,"2466627988895555555555555667:016",,terminal_output +5003,9314654,"TERMINAL",0,0,"4688849217:007:007:007:0177777777777778838",,terminal_output +5004,9314654,"TERMINAL",0,0,"685:00:003040610:013222399999999999994040540",,terminal_output +5005,9314654,"TERMINAL",0,0,"82:002228354445414111111111111111111111412272",,terminal_output +5006,9314654,"TERMINAL",0,0,"4024443:0057666733333333333334494",,terminal_output +5007,9314654,"TERMINAL",0,0,"246662798889555555555555566116",,terminal_output +5008,9314654,"TERMINAL",0,0,"4688849311010101177777777777778838",,terminal_output +5009,9314654,"TERMINAL",0,0,"681040506113222399999999999995050550",,terminal_output +5010,9314654,"TERMINAL",0,0,"8102228354445515121212121212121212121512272",,terminal_output +5011,9314654,"TERMINAL",0,0,"5024441057666733333333333334494",,terminal_output +5012,9314654,"TERMINAL",0,0,"246662798889555555555555566216",,terminal_output +5013,9314654,"TERMINAL",0,0,"4688849412020202177777777777778838",,terminal_output +5014,9314654,"TERMINAL",0,0,"6820508:006213222399999999999992:005:0057:00",,terminal_output +5015,9314654,"TERMINAL",0,0,"82022283544457:012:00:01313131313131313131318:012272",,terminal_output +5016,9314654,"TERMINAL",0,0,"6:0024442057666733333333333334494",,terminal_output +5017,9314654,"TERMINAL",0,0,"246662798889555555555555566316",,terminal_output +5018,9314654,"TERMINAL",0,0,"4688849513030303177777777777778838",,terminal_output +5019,9314654,"TERMINAL",0,0,"68302:00106313222399999999999991010510",,terminal_output +5020,9314654,"TERMINAL",0,0,"8302228354445111141414141414141414141112272",,terminal_output +5021,9314654,"TERMINAL",0,0,"1024443057666733333333333334494",,terminal_output +5022,9314654,"TERMINAL",0,0,"246662798889555555555555566416",,terminal_output +5023,9314654,"TERMINAL",0,0,"46888493:014040404177777777777778838",,terminal_output +5024,9374665,"TERMINAL",0,0,"684010206413222399999999999992020520",,terminal_output +5025,9374665,"TERMINAL",0,0,"8402228354445212151515151515151515151212272",,terminal_output +5026,9374665,"TERMINAL",0,0,"2024444057666733333333333334494",,terminal_output +5027,9374665,"TERMINAL",0,0,"246662798889555555555555566516",,terminal_output +5028,9374665,"TERMINAL",0,0,"4688849115050505177777777777778838",,terminal_output +5029,9374665,"TERMINAL",0,0,"685020306513222399999999999993030530",,terminal_output +5030,9374665,"TERMINAL",0,0,"850222835444531312:013:014:018:011:011:011:011:011:019:01312272",,terminal_output +5031,9374665,"TERMINAL",0,0,"3024445057666733333333333334494",,terminal_output +5032,9374665,"TERMINAL",0,0,"2466627988895555555555555668:016",,terminal_output +5033,9374665,"TERMINAL",0,0,"4799951:00228:018:018:018:0288888888888889949",,terminal_output +5034,9374665,"TERMINAL",0,0,"791:0131417243334404010101010101010101010404141641",,terminal_output +5035,9374665,"TERMINAL",0,0,"93:01333946555622222222222223383",,terminal_output +5036,9374665,"TERMINAL",0,0,"4135554:01687778444444444444455105",,terminal_output +5037,9374665,"TERMINAL",0,0,"3577738309991066666666666667727",,terminal_output +5038,9374665,"TERMINAL",0,0,"579995102111111288888888888889949",,terminal_output +5039,9374665,"TERMINAL",0,0,"791141517243334505020202020202020202020505151651",,terminal_output +5040,9374665,"TERMINAL",0,0,"911333946555622222222222223383",,terminal_output +5041,9374665,"TERMINAL",0,0,"51355511687778444444444444455205",,terminal_output +5042,9374665,"TERMINAL",0,0,"3577738409992066666666666667727",,terminal_output +5043,9374665,"TERMINAL",0,0,"579995202212121288888888888889949",,terminal_output +5044,9374665,"TERMINAL",0,0,"7921519:0172433348:001:00303030303030303030309:003:016:0168:01",,terminal_output +5045,9374665,"TERMINAL",0,0,"921333946555622222222222223383",,terminal_output +5046,9374665,"TERMINAL",0,0,"7:01355521687778444444444444455305",,terminal_output +5047,9374665,"TERMINAL",0,0,"3577738509993066666666666667727",,terminal_output +5048,9374665,"TERMINAL",0,0,"579995302313131288888888888889949",,terminal_output +5049,9374665,"TERMINAL",0,0,"79313:01117243334101040404040404040404040101111611",,terminal_output +5050,9374665,"TERMINAL",0,0,"931333946555622222222222223383",,terminal_output +5051,9374665,"TERMINAL",0,0,"11355531687778444444444444455405",,terminal_output +5052,9374665,"TERMINAL",0,0,"35777384:009994066666666666667727",,terminal_output +5053,9378407,"TERMINAL",0,0,"579995402414141288888888888889949",,terminal_output +5054,9378407,"TERMINAL",0,0,"794111217243334202050505050505050505050202121621",,terminal_output +5055,9378647,"TERMINAL",0,0,"941333946555622222222222223383",,terminal_output +5056,9380663,"TERMINAL",0,0,"21355541687778444444444444455505",,terminal_output +5057,9382646,"TERMINAL",0,0,"3577738109995066666666666667727",,terminal_output +5058,9384649,"TERMINAL",0,0,"579995502515151288888888888889949",,terminal_output +5059,9400672,"TERMINAL",0,0,"79512131724333430303:004:005:009:002:002:002:002:002:003:00:00303131631951333946555622222222222223383313555516877784444444444444559:0053577738209999:00666666666666677275799952:0029:019:019:01288888888888889949792:013141724333440401010101010101010101040414164194:013339465556222222222222233834135555:01687778444444444444455105",,terminal_output +5060,9402671,"TERMINAL",0,0,"3577738309991066666666666667727",,terminal_output +5061,9475942,"TERMINAL",0,0,"579995102111111288888888888889949",,terminal_output +5062,9511727,"TERMINAL",0,0,"79114151724333450502020202020202020202050515165191133394655562222222222222338351355511687778444444444444455205357773840999206666666666666772757999520221212128888888888888994979215110:0172433349:002:003030303030303030303010:004:017:0169:019213339465556222222222222233838:01355521687778444444444444455305357773850999306666666666666772757999530231313128888888888888994979314:011172433341010404040404040404040401011116119313339465556222222222222233831135553168777844444444444445540535777385:00999406666666666666772757999540241414128888888888888994979411121724333420205050505050505050505020212162194133394655562222222222222338321355541687778444444444444455505357773810999506666666666666772757999550251515128888888888888994979512131724333430304:005:006:0020:003:003:003:003:003:001:003031316319513339465556222222222222233833135555168777844444444444445510:00535777382099940:00666666666666677275799953:00240:0140:0140:01288888888888889949793:013141724333440401010101010101010101040414164195:013339465556222222222222233834135556:0168777844444444444445510535777383099910666666666666677275799951021111112888888888888899497911415172433345050202020202020202020205051516519113339465556222222222222233835135551168777844444444444445520535777384099920666666666666677275799952022121212888888888888899497921511:0172433342:00:003:00303030303030303030301:005:018:0162:00:019213339465556222222222222233839:01355521687778444444444444455305357773850999306666666666666772757999530231313128888888888888994979315:011172433341010404040404040404040401011116119313339465556222222222222233831135553168777844444444444445540535777386:009994066666666666667727Every 2.0s: squeuehai-login2.haicore.berlin: Sat Oct 4 20:59:14 2025JOBIDUSER PARTITION NODES CPUS STSUBMIT_TIMESTART_TIME TIME TIME_LIMIT NODELIST(REASON)30111 xiao.liu interacti 1 32 PD 2025-10-04T16:57:05 2025-10-05T01:48:31 0:00 23:59:00 (Resources)30142 xiao.liu interacti 1 64 PD 2025-10-04T19:45:19N/A 0:00 23:59:00 (Priority)30129 xiao.liu interacti 1 64 PD 2025-10-04T18:55:30N/A 0:00 23:59:00 (Priority)30027 xiao.liu interacti 1 128 R 2025-10-04T05:54:07 2025-10-04T19:13:38 1:45:36 23:59:00 hai00330028 xiao.liu interacti 1 128 R 2025-10-04T05:55:36 2025-10-04T05:55:36 15:03:38 23:59:00 hai00630026 xiao.liu interacti 1 128 R 2025-10-04T01:48:32 2025-10-04T05:54:06 15:05:08 23:59:00 hai00830025 xiao.liu interacti 1 64 R 2025-10-04T01:47:27 2025-10-04T01:47:56 19:11:18 23:59:00 hai00430178 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30177 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30176 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30175 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30174 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30173 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30172 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30171 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30170 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30169 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30168 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30167 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30166 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30165 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30164 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30163 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30162 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30161 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30160 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30159 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30158 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30157 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30156 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30155 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30154 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30153 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30152 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30151 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30150 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30149 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30148 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30147 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30146 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30145 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30144 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30143 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30141 franz.sram standard 1 8 PD 2025-10-04T19:18:33N/A 0:00 1-00:00:00 (Priority)30140 franz.sram standard 1 8 PD 2025-10-04T19:18:33N/A 0:00 1-00:00:00 (Priority)30139 franz.sram standard 1 8 PD 2025-10-04T19:18:33N/A 0:00 1-00:00:00 (Priority)30138 franz.sram standard 1 8 PD 2025-10-04T19:18:33N/A 0:00 1-00:00:00 (Nodes required for job are DOWN, DRAINED or reserved for jobs in higher priority partitions)30137 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T20:52:40 6:34 1-00:00:00 hai00630136 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:45:35 1:13:39 1-00:00:00 hai00430135 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:23:13 1:36:01 1-00:00:00 hai00530131 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:18:34 1:40:40 1-00:00:00 hai00530132 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:18:34 1:40:40 1-00:00:00 hai00430133 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:18:34 1:40:40 1-00:00:00 hai00530130 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:18:33 1:40:41 1-00:00:00 hai00530127 franz.sram standard 1 16 R 2025-10-04T18:54:08 2025-10-04T18:58:57 2:00:17 1-00:00:00 hai00530126 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:55:57 2:03:17 1-00:00:00 hai00530125 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:44:27 2:14:47 1-00:00:00 hai00130124 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:43:27 2:15:47 1-00:00:00 hai00130123 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:42:27 2:16:47 1-00:00:00 hai00130122 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:38:27 2:20:47 1-00:00:00 hai00130117 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:35:27 2:23:47 1-00:00:00 hai00130118 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:35:27 2:23:47 1-00:00:00 hai00430119 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:35:27 2:23:47 1-00:00:00 hai00530120 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:35:27 2:23:47 1-00:00:00 hai00630121 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:35:27 2:23:47 1-00:00:00 hai00629972 alfred.ngu standard 1 16 R 2025-10-04T16:35:34 2025-10-04T17:57:27 3:01:47 1-00:00:00 hai00130101 franz.sram standard 1 16 R 2025-10-04T14:55:02 2025-10-04T17:47:57 3:11:17 1-00:00:00 hai00430032 alfred.ngu standard 1 16 R 2025-10-04T11:53:39 2025-10-04T11:53:56 9:05:18 1-00:00:00 hai00130031 alfred.ngu standard 1 16 R 2025-10-04T11:50:30 2025-10-04T11:50:56 9:08:18 1-00:00:00 hai00529993 nishant.ku standard 2 384 R 2025-10-03T18:29:18 2025-10-04T01:48:31 19:10:43 1-00:00:00 hai[002,007]30008 alfred.ngu standard 1 16 R 2025-10-03T22:58:50 2025-10-03T22:58:56 22:00:18 1-00:00:00 hai00630009 alfred.ngu standard 1 16 R 2025-10-03T22:58:54 2025-10-03T22:58:56 22:00:18 1-00:00:00 hai001Every 2.0s: squeuehai-login2.haicore.berlin: Sat Oct 4 20:59:16 2025JOBIDUSER PARTITION NODES CPUS STSUBMIT_TIMESTART_TIME TIME TIME_LIMIT NODELIST(REASON)30111 xiao.liu interacti 1 32 PD 2025-10-04T16:57:05 2025-10-05T01:48:31 0:00 23:59:00 (Resources)30142 xiao.liu interacti 1 64 PD 2025-10-04T19:45:19N/A 0:00 23:59:00 (Priority)30129 xiao.liu interacti 1 64 PD 2025-10-04T18:55:30N/A 0:00 23:59:00 (Priority)30027 xiao.liu interacti 1 128 R 2025-10-04T05:54:07 2025-10-04T19:13:38 1:45:38 23:59:00 hai00330028 xiao.liu interacti 1 128 R 2025-10-04T05:55:36 2025-10-04T05:55:36 15:03:40 23:59:00 hai00630026 xiao.liu interacti 1 128 R 2025-10-04T01:48:32 2025-10-04T05:54:06 15:05:10 23:59:00 hai00830025 xiao.liu interacti 1 64 R 2025-10-04T01:47:27 2025-10-04T01:47:56 19:11:20 23:59:00 hai00430178 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30177 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30176 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30175 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30174 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30173 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30172 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30171 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30170 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30169 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30168 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30167 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30166 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30165 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30164 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30163 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30162 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30161 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30160 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30159 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30158 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30157 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30156 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30155 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30154 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30153 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30152 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30151 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30150 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30149 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30148 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30147 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30146 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30145 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30144 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30143 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30141 franz.sram standard 1 8 PD 2025-10-04T19:18:33N/A 0:00 1-00:00:00 (Priority)30140 franz.sram standard 1 8 PD 2025-10-04T19:18:33N/A 0:00 1-00:00:00 (Priority)30139 franz.sram standard 1 8 PD 2025-10-04T19:18:33N/A 0:00 1-00:00:00 (Priority)30138 franz.sram standard 1 8 PD 2025-10-04T19:18:33N/A 0:00 1-00:00:00 (Nodes required for job are DOWN, DRAINED or reserved for jobs in higher priority partitions)30137 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T20:52:40 6:36 1-00:00:00 hai00630136 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:45:35 1:13:41 1-00:00:00 hai00430135 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:23:13 1:36:03 1-00:00:00 hai00530131 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:18:34 1:40:42 1-00:00:00 hai00530132 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:18:34 1:40:42 1-00:00:00 hai00430133 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:18:34 1:40:42 1-00:00:00 hai00530130 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:18:33 1:40:43 1-00:00:00 hai00530127 franz.sram standard 1 16 R 2025-10-04T18:54:08 2025-10-04T18:58:57 2:00:19 1-00:00:00 hai00530126 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:55:57 2:03:19 1-00:00:00 hai00530125 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:44:27 2:14:49 1-00:00:00 hai00130124 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:43:27 2:15:49 1-00:00:00 hai00130123 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:42:27 2:16:49 1-00:00:00 hai00130122 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:38:27 2:20:49 1-00:00:00 hai00130117 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:35:27 2:23:49 1-00:00:00 hai00130118 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:35:27 2:23:49 1-00:00:00 hai00430119 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:35:27 2:23:49 1-00:00:00 hai00530120 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:35:27 2:23:49 1-00:00:00 hai00630121 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:35:27 2:23:49 1-00:00:00 hai00629972 alfred.ngu standard 1 16 R 2025-10-04T16:35:34 2025-10-04T17:57:27 3:01:49 1-00:00:00 hai00130101 franz.sram standard 1 16 R 2025-10-04T14:55:02 2025-10-04T17:47:57 3:11:19 1-00:00:00 hai00430032 alfred.ngu standard 1 16 R 2025-10-04T11:53:39 2025-10-04T11:53:56 9:05:20 1-00:00:00 hai00130031 alfred.ngu standard 1 16 R 2025-10-04T11:50:30 2025-10-04T11:50:56 9:08:20 1-00:00:00 hai00529993 nishant.ku standard 2 384 R 2025-10-03T18:29:18 2025-10-04T01:48:31 19:10:45 1-00:00:00 hai[002,007]30008 alfred.ngu standard 1 16 R 2025-10-03T22:58:50 2025-10-03T22:58:56 22:00:20 1-00:00:00 hai006Every 2.0s: squeuehai-login2.haicore.berlin: Sat Oct 4 20:59:16 2025JOBIDUSER PARTITION NODES CPUS STSUBMIT_TIMESTART_TIME TIME TIME_LIMIT NODELIST(REASON)30111 xiao.liu interacti 1 32 PD 2025-10-04T16:57:05 2025-10-05T01:48:31 0:00 23:59:00 (Resources)30142 xiao.liu interacti 1 64 PD 2025-10-04T19:45:19N/A 0:00 23:59:00 (Priority)30129 xiao.liu interacti 1 64 PD 2025-10-04T18:55:30N/A 0:00 23:59:00 (Priority)30027 xiao.liu interacti 1 128 R 2025-10-04T05:54:07 2025-10-04T19:13:38 1:45:38 23:59:00 hai00330028 xiao.liu interacti 1 128 R 2025-10-04T05:55:36 2025-10-04T05:55:36 15:03:40 23:59:00 hai00630026 xiao.liu interacti 1 128 R 2025-10-04T01:48:32 2025-10-04T05:54:06 15:05:10 23:59:00 hai00830025 xiao.liu interacti 1 64 R 2025-10-04T01:47:27 2025-10-04T01:47:56 19:11:20 23:59:00 hai00430178 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30177 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30176 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30175 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30174 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30173 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30172 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30171 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30170 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30169 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30168 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30167 franz.sram standard 1 8 PD 2025-10-04T20:00:21N/A 0:00 1-00:00:00 (Priority)30166 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30165 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30164 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30163 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30162 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30161 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30160 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30159 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30158 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30157 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30156 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30155 franz.sram standard 1 8 PD 2025-10-04T20:00:12N/A 0:00 1-00:00:00 (Priority)30154 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30153 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30152 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30151 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30150 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30149 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30148 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30147 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30146 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30145 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30144 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30143 franz.sram standard 1 8 PD 2025-10-04T19:59:49N/A 0:00 1-00:00:00 (Priority)30141 franz.sram standard 1 8 PD 2025-10-04T19:18:33N/A 0:00 1-00:00:00 (Priority)30140 franz.sram standard 1 8 PD 2025-10-04T19:18:33N/A 0:00 1-00:00:00 (Priority)30139 franz.sram standard 1 8 PD 2025-10-04T19:18:33N/A 0:00 1-00:00:00 (Priority)30138 franz.sram standard 1 8 PD 2025-10-04T19:18:33N/A 0:00 1-00:00:00 (Nodes required for job are DOWN, DRAINED or reserved for jobs in higher priority partitions)30137 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T20:52:40 6:36 1-00:00:00 hai00630136 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:45:35 1:13:41 1-00:00:00 hai00430135 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:23:13 1:36:03 1-00:00:00 hai00530131 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:18:34 1:40:42 1-00:00:00 hai00530132 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:18:34 1:40:42 1-00:00:00 hai00430133 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:18:34 1:40:42 1-00:00:00 hai00530130 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T19:18:33 1:40:43 1-00:00:00 hai00530127 franz.sram standard 1 16 R 2025-10-04T18:54:08 2025-10-04T18:58:57 2:00:19 1-00:00:00 hai00530126 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:55:57 2:03:19 1-00:00:00 hai00530125 franz.sram standard 1 16 R 2025-10-04T18:35:15 2025-10-04T18:44:27 2:14:49 1-00:00:00 hai001840222835444521215120244440576667333246662798889555468884911505050517776850203065132223999850222835444531315:01",,terminal_output +5063,9511728,"TERMINAL",0,0,"30244450576667333",,terminal_output +5064,9511728,"TERMINAL",0,0,"246662798889555",,terminal_output +5065,9512894,"TERMINAL",0,0,"4688849211:001:001:001:01777",,terminal_output +5066,9514902,"TERMINAL",0,0,"684:00304064:0132223999",,terminal_output +5067,9516911,"TERMINAL",0,0,"86:002228354445414111",,terminal_output +5068,9518922,"TERMINAL",0,0,"4024447:00576667333",,terminal_output +5069,9521049,"TERMINAL",0,0,"246662798889555",,terminal_output +5070,9522940,"TERMINAL",0,0,"46888493110101011777",,terminal_output +5071,9524951,"TERMINAL",0,0,"6810405061132223999",,terminal_output +5072,9526963,"TERMINAL",0,0,"8102228354445515121",,terminal_output +5073,9528969,"TERMINAL",0,0,"50244410576667333",,terminal_output +5074,9530974,"TERMINAL",0,0,"246662798889555",,terminal_output +5075,9531366,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-4/tokenizer.sh",2053,0,"",shellscript,selection_command +5076,9531640,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-4/tokenizer.sh",2033,0,"",shellscript,selection_command +5077,9531765,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-4/tokenizer.sh",2008,0,"",shellscript,selection_command +5078,9531910,"slurm/dev/franz/berlin/atari/tokenizer_lr_3e-4/tokenizer.sh",1985,0,"",shellscript,selection_command +5079,9532986,"TERMINAL",0,0,"46888494120202021777",,terminal_output +5080,9534995,"TERMINAL",0,0,"6820502:0062132223999",,terminal_output +5081,9537012,"TERMINAL",0,0,"82022283544451:014:0131",,terminal_output +5082,9539017,"TERMINAL",0,0,"1:00:00244420576667333",,terminal_output +5083,9541024,"TERMINAL",0,0,"246662798889555",,terminal_output +5084,9543032,"TERMINAL",0,0,"46888495130303031777",,terminal_output +5085,9545042,"TERMINAL",0,0,"68306:001063132223999",,terminal_output +5086,9547050,"TERMINAL",0,0,"8302228354445111141",,terminal_output +5087,9549058,"TERMINAL",0,0,"10244430576667333",,terminal_output +5088,9551066,"TERMINAL",0,0,"246662798889555",,terminal_output +5089,9553077,"TERMINAL",0,0,"46888497:0140404041777",,terminal_output +5090,9555085,"TERMINAL",0,0,"6840102064132223999",,terminal_output +5091,9557096,"TERMINAL",0,0,"8402228354445212151",,terminal_output +5092,9559106,"TERMINAL",0,0,"20244440576667333",,terminal_output +5093,9561111,"TERMINAL",0,0,"246662798889555",,terminal_output +5094,9563120,"TERMINAL",0,0,"46888491150505051777",,terminal_output +5095,9565131,"TERMINAL",0,0,"6850203065132223999",,terminal_output +5096,9567141,"TERMINAL",0,0,"850222835444531316:01",,terminal_output +5097,9569149,"TERMINAL",0,0,"30244450576667333",,terminal_output +5098,9571158,"TERMINAL",0,0,"246662798889555",,terminal_output +5099,9573174,"TERMINAL",0,0,"4688849212:002:002:002:01777",,terminal_output +5100,9575175,"TERMINAL",0,0,"685:00304065:0132223999",,terminal_output +5101,9577184,"TERMINAL",0,0,"87:002228354445414111",,terminal_output +5102,9579193,"TERMINAL",0,0,"4024448:00576667333",,terminal_output +5103,9581202,"TERMINAL",0,0,"246662798889555",,terminal_output +5104,9583211,"TERMINAL",0,0,"46888493110101011777",,terminal_output +5105,9585220,"TERMINAL",0,0,"6810405061132223999",,terminal_output +5106,9587231,"TERMINAL",0,0,"8102228354445515121",,terminal_output +5107,9589242,"TERMINAL",0,0,"50244410576667333",,terminal_output +5108,9591249,"TERMINAL",0,0,"246662798889555",,terminal_output +5109,9593324,"TERMINAL",0,0,"46888494120202021777",,terminal_output +5110,9595375,"TERMINAL",0,0,"6820503:0062132223999",,terminal_output +5111,9597315,"TERMINAL",0,0,"82022283544452:015:0131",,terminal_output +5112,9599285,"TERMINAL",0,0,"1:00244420576667333",,terminal_output +5113,9601296,"TERMINAL",0,0,"246662798889555",,terminal_output +5114,9603357,"TERMINAL",0,0,"46888495130303031777",,terminal_output +5115,9605314,"TERMINAL",0,0,"68307:001063132223999",,terminal_output +5116,9607348,"TERMINAL",0,0,"8302228354445111141",,terminal_output +5117,9609331,"TERMINAL",0,0,"10244430576667333",,terminal_output +5118,9611344,"TERMINAL",0,0,"246662798889555",,terminal_output +5119,9613392,"TERMINAL",0,0,"46888498:0140404041777",,terminal_output +5120,9615358,"TERMINAL",0,0,"6840102064132223999",,terminal_output +5121,9617391,"TERMINAL",0,0,"8402228354445212151",,terminal_output +5122,9619380,"TERMINAL",0,0,"20244440576667333",,terminal_output +5123,9621386,"TERMINAL",0,0,"246662798889555",,terminal_output +5124,9623429,"TERMINAL",0,0,"46888491150505051777",,terminal_output +5125,9625410,"TERMINAL",0,0,"6850203065132223999",,terminal_output +5126,9627526,"TERMINAL",0,0,"850222835444531317:01",,terminal_output +5127,9629428,"TERMINAL",0,0,"30244450576667333",,terminal_output +5128,9631440,"TERMINAL",0,0,"246662798889555",,terminal_output +5129,9633463,"TERMINAL",0,0,"4688849213:003:003:003:01777",,terminal_output +5130,9635481,"TERMINAL",0,0,"686:00304066:0132223999",,terminal_output +5131,9637559,"TERMINAL",0,0,"88:002228354445414111",,terminal_output +5132,9639478,"TERMINAL",0,0,"4024449:00576667333",,terminal_output +5133,9641488,"TERMINAL",0,0,"246662798889555",,terminal_output +5134,9643599,"TERMINAL",0,0,"46888493110101011777",,terminal_output +5135,9645506,"TERMINAL",0,0,"6810405061132223999",,terminal_output +5136,9647514,"TERMINAL",0,0,"8102228354445515121",,terminal_output +5137,9649530,"TERMINAL",0,0,"50244410576667333",,terminal_output +5138,9651531,"TERMINAL",0,0,"246662798889555",,terminal_output +5139,9653541,"TERMINAL",0,0,"46888494120202021777",,terminal_output +5140,9655551,"TERMINAL",0,0,"6820504:0062132223999",,terminal_output +5141,9657559,"TERMINAL",0,0,"82022283544453:016:0131",,terminal_output +5142,9659567,"TERMINAL",0,0,"2:00244420576667333",,terminal_output +5143,9661577,"TERMINAL",0,0,"246662798889555",,terminal_output +5144,9663669,"TERMINAL",0,0,"46888495130303031777",,terminal_output +5145,9665593,"TERMINAL",0,0,"79318:011173243334101040",,terminal_output +5146,9667663,"TERMINAL",0,0,"9313339465556222",,terminal_output +5147,9669616,"TERMINAL",0,0,"11355531687778444",,terminal_output +5148,9671624,"TERMINAL",0,0,"35777389:0099940666",,terminal_output +5149,9673639,"TERMINAL",0,0,"5799954024141412888",,terminal_output +5150,9675651,"TERMINAL",0,0,"794111217243334202050",,terminal_output +5151,9677649,"TERMINAL",0,0,"9413339465556222",,terminal_output +5152,9679659,"TERMINAL",0,0,"21355541687778444",,terminal_output +5153,9681670,"TERMINAL",0,0,"35777381099950666",,terminal_output +5154,9683676,"TERMINAL",0,0,"5799955025151512888",,terminal_output +5155,9685687,"TERMINAL",0,0,"79512131724333430308:00",,terminal_output +5156,9687695,"TERMINAL",0,0,"9513339465556222",,terminal_output +5157,9689706,"TERMINAL",0,0,"31355551687778444",,terminal_output +5158,9691712,"TERMINAL",0,0,"3577738209994:00666",,terminal_output +5159,9693729,"TERMINAL",0,0,"5799957:0024:014:014:012888",,terminal_output +5160,9695738,"TERMINAL",0,0,"797:0131417243334404010",,terminal_output +5161,9697773,"TERMINAL",0,0,"99:013339465556222",,terminal_output +5162,9699793,"TERMINAL",0,0,"41355510:01687778444",,terminal_output +5163,9701760,"TERMINAL",0,0,"35777383099910666",,terminal_output +5164,9703770,"TERMINAL",0,0,"5799951021111112888",,terminal_output +5165,9705779,"TERMINAL",0,0,"791141517243334505020",,terminal_output +5166,9707792,"TERMINAL",0,0,"9113339465556222",,terminal_output +5167,9709808,"TERMINAL",0,0,"51355511687778444",,terminal_output +5168,9711812,"TERMINAL",0,0,"35777384099920666",,terminal_output +5169,9713828,"TERMINAL",0,0,"5799952022121212888",,terminal_output +5170,9715828,"TERMINAL",0,0,"7921515:0172433344:007:0030",,terminal_output +5171,9717943,"TERMINAL",0,0,"9213339465556222",,terminal_output +5172,9719843,"TERMINAL",0,0,"3:01355521687778444",,terminal_output +5173,9721854,"TERMINAL",0,0,"35777385099930666",,terminal_output +5174,9723872,"TERMINAL",0,0,"5799953023131312888",,terminal_output +5175,9725891,"TERMINAL",0,0,"79319:01117243334101040",,terminal_output +5176,9727980,"TERMINAL",0,0,"9313339465556222",,terminal_output +5177,9729924,"TERMINAL",0,0,"11355531687778444",,terminal_output +5178,9731897,"TERMINAL",0,0,"357773840:0099940666",,terminal_output +5179,9733915,"TERMINAL",0,0,"5799954024141412888",,terminal_output +5180,9735967,"TERMINAL",0,0,"794111217243334202050",,terminal_output +5181,9737926,"TERMINAL",0,0,"9413339465556222",,terminal_output +5182,9739958,"TERMINAL",0,0,"21355541687778444",,terminal_output +5183,9741943,"TERMINAL",0,0,"35777381099950666",,terminal_output +5184,9743962,"TERMINAL",0,0,"5799955025151512888",,terminal_output +5185,9745965,"TERMINAL",0,0,"79512131724333430309:00",,terminal_output +5186,9747969,"TERMINAL",0,0,"9513339465556222",,terminal_output +5187,9749989,"TERMINAL",0,0,"31355551687778444",,terminal_output +5188,9751989,"TERMINAL",0,0,"3577738209995:00666",,terminal_output +5189,9753996,"TERMINAL",0,0,"5799958:0025:015:015:012888",,terminal_output +5190,9756006,"TERMINAL",0,0,"798:0131417243334404010",,terminal_output +5191,9758017,"TERMINAL",0,0,"950:013339465556222",,terminal_output +5192,9760025,"TERMINAL",0,0,"4135551:01687778444",,terminal_output +5193,9762044,"TERMINAL",0,0,"35777383099910666",,terminal_output +5194,9764042,"TERMINAL",0,0,"5799951021111112888",,terminal_output +5195,9766051,"TERMINAL",0,0,"791141517243334505020",,terminal_output +5196,9768061,"TERMINAL",0,0,"9113339465556222",,terminal_output +5197,9770071,"TERMINAL",0,0,"51355511687778444",,terminal_output +5198,9772079,"TERMINAL",0,0,"35777384099920666",,terminal_output +5199,9774085,"TERMINAL",0,0,"5799952022121212888",,terminal_output +5200,9776097,"TERMINAL",0,0,"7921516:0172433345:008:0030",,terminal_output +5201,9778107,"TERMINAL",0,0,"9213339465556222",,terminal_output +5202,9780117,"TERMINAL",0,0,"4:01355521687778444",,terminal_output +5203,9782124,"TERMINAL",0,0,"35777385099930666",,terminal_output +5204,9784133,"TERMINAL",0,0,"5799953023131312888",,terminal_output +5205,9786145,"TERMINAL",0,0,"793110:01117243334101040",,terminal_output +5206,9788152,"TERMINAL",0,0,"9313339465556222",,terminal_output +5207,9790161,"TERMINAL",0,0,"11355531687778444",,terminal_output +5208,9792173,"TERMINAL",0,0,"35777381:0099940666",,terminal_output +5209,9794181,"TERMINAL",0,0,"5799954024141412888",,terminal_output +5210,9796190,"TERMINAL",0,0,"794111217243334202050",,terminal_output +5211,9798195,"TERMINAL",0,0,"9413339465556222",,terminal_output +5212,9800272,"TERMINAL",0,0,"21355541687778444",,terminal_output +5213,9802220,"TERMINAL",0,0,"35777381099950666",,terminal_output +5214,9804228,"TERMINAL",0,0,"5799955025151512888",,terminal_output +5215,9806234,"TERMINAL",0,0,"795121317243334303020:00",,terminal_output +5216,9808260,"TERMINAL",0,0,"9513339465556222",,terminal_output +5217,9810278,"TERMINAL",0,0,"31355551687778444",,terminal_output +5218,9812265,"TERMINAL",0,0,"3577738209996:00666",,terminal_output +5219,9814273,"TERMINAL",0,0,"5799959:0026:016:016:012888",,terminal_output +5220,9816286,"TERMINAL",0,0,"799:0131417243334404010",,terminal_output +5221,9818322,"TERMINAL",0,0,"91:013339465556222",,terminal_output +5222,9820300,"TERMINAL",0,0,"4135552:01687778444",,terminal_output +5223,9822331,"TERMINAL",0,0,"35777383099910666",,terminal_output +5224,9824317,"TERMINAL",0,0,"5799951021111112888",,terminal_output +5225,9826326,"TERMINAL",0,0,"791141517243334505020",,terminal_output +5226,9828333,"TERMINAL",0,0,"9113339465556222",,terminal_output +5227,9830357,"TERMINAL",0,0,"51355511687778444",,terminal_output +5228,9832427,"TERMINAL",0,0,"35777384099920666",,terminal_output +5229,9834373,"TERMINAL",0,0,"5799952022121212888",,terminal_output +5230,9836369,"TERMINAL",0,0,"7921517:0172433346:009:0030",,terminal_output +5231,9838380,"TERMINAL",0,0,"9213339465556222",,terminal_output +5232,9840388,"TERMINAL",0,0,"5:01355521687778444",,terminal_output +5233,9842396,"TERMINAL",0,0,"35777385099930666",,terminal_output +5234,9844410,"TERMINAL",0,0,"5799953023131312888",,terminal_output +5235,9846414,"TERMINAL",0,0,"79311:01117243334101040",,terminal_output +5236,9848423,"TERMINAL",0,0,"9313339465556222",,terminal_output +5237,9850442,"TERMINAL",0,0,"11355531687778444",,terminal_output +5238,9852446,"TERMINAL",0,0,"35777382:0099940666",,terminal_output +5239,9854460,"TERMINAL",0,0,"5799954024141412888",,terminal_output +5240,9856463,"TERMINAL",0,0,"794111217243334202050",,terminal_output +5241,9858473,"TERMINAL",0,0,"9413339465556222",,terminal_output +5242,9860593,"TERMINAL",0,0,"21355541687778444",,terminal_output +5243,9862538,"TERMINAL",0,0,"35777381099950666",,terminal_output +5244,9864500,"TERMINAL",0,0,"5799955025151512888",,terminal_output +5245,9866510,"TERMINAL",0,0,"79512131724333430301:00",,terminal_output +5246,9868520,"TERMINAL",0,0,"9513339465556222",,terminal_output +5247,9870530,"TERMINAL",0,0,"31355551687778444",,terminal_output +5248,9872538,"TERMINAL",0,0,"3577738209997:00666",,terminal_output +5249,9874617,"TERMINAL",0,0,"57999520:0027:017:017:012888",,terminal_output +5250,9876561,"TERMINAL",0,0,"7910:0131417243334404010",,terminal_output +5251,9878568,"TERMINAL",0,0,"92:013339465556222",,terminal_output +5252,9880576,"TERMINAL",0,0,"4135553:01687778444",,terminal_output +5253,9882604,"TERMINAL",0,0,"36888493110101011777",,terminal_output +5254,9884621,"TERMINAL",0,0,"6810405061132223999",,terminal_output +5255,9886603,"TERMINAL",0,0,"8102228354445515121",,terminal_output +5256,9888613,"TERMINAL",0,0,"50244410576667333",,terminal_output +5257,9890621,"TERMINAL",0,0,"246662798889555",,terminal_output +5258,9892740,"TERMINAL",0,0,"46888494120202021777",,terminal_output +5259,9894638,"TERMINAL",0,0,"6820508:0062132223999",,terminal_output +5260,9896650,"TERMINAL",0,0,"82022283544457:0110:0131",,terminal_output +5261,9898661,"TERMINAL",0,0,"6:00244420576667333",,terminal_output +5262,9900681,"TERMINAL",0,0,"246662798889555",,terminal_output +5263,9902774,"TERMINAL",0,0,"46888495130303031777",,terminal_output +5264,9904686,"TERMINAL",0,0,"68302:001063132223999",,terminal_output +5265,9906692,"TERMINAL",0,0,"8302228354445111141",,terminal_output +5266,9908703,"TERMINAL",0,0,"10244430576667333",,terminal_output +5267,9910711,"TERMINAL",0,0,"246662798889555",,terminal_output +5268,9912813,"TERMINAL",0,0,"46888493:0140404041777",,terminal_output +5269,9914732,"TERMINAL",0,0,"6840102064132223999",,terminal_output +5270,9916737,"TERMINAL",0,0,"8402228354445212151",,terminal_output +5271,9918751,"TERMINAL",0,0,"20244440576667333",,terminal_output +5272,9920799,"TERMINAL",0,0,"246662798889555",,terminal_output +5273,9922846,"TERMINAL",0,0,"46888491150505051777",,terminal_output +5274,9924791,"TERMINAL",0,0,"6850203065132223999",,terminal_output +5275,9926789,"TERMINAL",0,0,"850222835444531312:01",,terminal_output +5276,9928797,"TERMINAL",0,0,"30244450576667333",,terminal_output +5277,9930814,"TERMINAL",0,0,"246662798889555",,terminal_output +5278,9932821,"TERMINAL",0,0,"4688849218:008:008:008:01777",,terminal_output +5279,9934839,"TERMINAL",0,0,"681:00304061:0132223999",,terminal_output +5280,9936843,"TERMINAL",0,0,"83:002228354445414111",,terminal_output +5281,9938881,"TERMINAL",0,0,"4024444:00576667333",,terminal_output +5282,9940868,"TERMINAL",0,0,"246662798889555",,terminal_output +5283,9942917,"TERMINAL",0,0,"46888493110101011777",,terminal_output +5284,9944875,"TERMINAL",0,0,"6810405061132223999",,terminal_output +5285,9946882,"TERMINAL",0,0,"8102228354445515121",,terminal_output +5286,9948901,"TERMINAL",0,0,"50244410576667333",,terminal_output +5287,9950907,"TERMINAL",0,0,"246662798889555",,terminal_output +5288,9952913,"TERMINAL",0,0,"46888494120202021777",,terminal_output +5289,9954929,"TERMINAL",0,0,"6820509:0062132223999",,terminal_output +5290,9956926,"TERMINAL",0,0,"82022283544458:011:0131",,terminal_output +5291,9958944,"TERMINAL",0,0,"7:00244420576667333",,terminal_output +5292,9960959,"TERMINAL",0,0,"246662798889555",,terminal_output +5293,9962991,"TERMINAL",0,0,"46888495130303031777",,terminal_output +5294,9964963,"TERMINAL",0,0,"68303:001063132223999",,terminal_output +5295,9966974,"TERMINAL",0,0,"8302228354445111141",,terminal_output +5296,9968991,"TERMINAL",0,0,"10244430576667333",,terminal_output +5297,9971002,"TERMINAL",0,0,"246662798889555",,terminal_output +5298,9973004,"TERMINAL",0,0,"46888494:0140404041777",,terminal_output +5299,9975024,"TERMINAL",0,0,"6840102064132223999",,terminal_output +5300,9977023,"TERMINAL",0,0,"8402228354445212151",,terminal_output +5301,9979024,"TERMINAL",0,0,"20244440576667333",,terminal_output +5302,9981113,"TERMINAL",0,0,"246662798889555",,terminal_output +5303,9983057,"TERMINAL",0,0,"46888491150505051777",,terminal_output +5304,9985056,"TERMINAL",0,0,"6850203065132223999",,terminal_output +5305,9987158,"TERMINAL",0,0,"850222835444531313:01",,terminal_output +5306,9989076,"TERMINAL",0,0,"30244450576667333",,terminal_output +5307,9991092,"TERMINAL",0,0,"246662798889555",,terminal_output +5308,9993095,"TERMINAL",0,0,"4688849118:34 1:49:0029:00439:00503 1:49:01278:54:088:58:57 2:08:37635:15511:37544:27 2:23:071434:07",,terminal_output +5309,9995144,"TERMINAL",0,0,"682:00304062:0122239999",,terminal_output +5310,9997109,"TERMINAL",0,0,"84:0022283444541411111",,terminal_output +5311,9999118,"TERMINAL",0,0,"402444816 R2025-10-04T21:07:40hai004916 R2025-10-04T21:07:40hai005\r 30137 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T20:52:40 15:00 1-00:00:00 hai006619:45:35 1:22:054118:34 1:49:065264365037278:54:088:58:57 2:08:43635:15511:43544:27 2:23:131434:13325:13",,terminal_output +5312,10001128,"TERMINAL",0,0,"246662227888955555",,terminal_output +5313,10003136,"TERMINAL",0,0,"4688844491010101177777",,terminal_output +5314,10005144,"TERMINAL",0,0,"6810405066611222399999",,terminal_output +5315,10007159,"TERMINAL",0,0,"810222888344455151212121",,terminal_output +5316,10009170,"TERMINAL",0,0,"5024441010105666733333",,terminal_output +5317,10011179,"TERMINAL",0,0,"246662227888955555",,terminal_output +5318,10013185,"TERMINAL",0,0,"4688844492020202177777",,terminal_output +5319,10015197,"TERMINAL",0,0,"68205020:0066621222399999",,terminal_output +5320,10017207,"TERMINAL",0,0,"820222888344459:012:01313131",,terminal_output +5321,10019218,"TERMINAL",0,0,"8:0024442020205666733333",,terminal_output +5322,10021223,"TERMINAL",0,0,"246662227888955555",,terminal_output +5323,10023238,"TERMINAL",0,0,"4688844493030303177777",,terminal_output +5324,10025244,"TERMINAL",0,0,"68304:001066631222399999",,terminal_output +5325,10027252,"TERMINAL",0,0,"830222888344451111414141",,terminal_output +5326,10029268,"TERMINAL",0,0,"102444Nodes required for job are DOWN, DRAINED or reserved for jobs in higher priority partitions)830491:07 0:305720:52:40 15:306645:35 1:22:354165264346309:18:339:18:33 1:49:37754:08809:13655:57 2:12:135543:43434:43",,terminal_output +5327,10031271,"TERMINAL",0,0,"24666222788895555",,terminal_output +5328,10033280,"TERMINAL",0,0,"468884449404040417777",,terminal_output +5329,10035287,"TERMINAL",0,0,"684010206664122239999",,terminal_output +5330,10037301,"TERMINAL",0,0,"8402228883444521215151",,terminal_output +5331,10039309,"TERMINAL",0,0,"202444404040566673333",,terminal_output +5332,10041316,"TERMINAL",0,0,"24666222788895555",,terminal_output +5333,10043324,"TERMINAL",0,0,"468884449505050517777",,terminal_output +5334,10045335,"TERMINAL",0,0,"685020306665122239999",,terminal_output +5335,10047346,"TERMINAL",0,0,"8502228883444531314:015:01",,terminal_output +5336,10049356,"TERMINAL",0,0,"302444505050566673333",,terminal_output +5337,10051362,"TERMINAL",0,0,"24666222788895555",,terminal_output +5338,10053407,"TERMINAL",0,0,"46888444950:0050:0050:0050:017777",,terminal_output +5339,10055385,"TERMINAL",0,0,"683:0030406663:0122239999",,terminal_output +5340,10057392,"TERMINAL",0,0,"85:002228883444541411111",,terminal_output +5341,10059408,"TERMINAL",0,0,"4024441:001:006:00566673333",,terminal_output +5342,10061410,"TERMINAL",0,0,"24666222788895555",,terminal_output +5343,10063441,"TERMINAL",0,0,"468884449101010117777",,terminal_output +5344,10065455,"TERMINAL",0,0,"681040506661122239999",,terminal_output +5345,10067537,"TERMINAL",0,0,"8102228883444551512121",,terminal_output +5346,10069448,"TERMINAL",0,0,"502444101010566673333",,terminal_output +5347,10071454,"TERMINAL",0,0,"24666222788895555",,terminal_output +5348,10073467,"TERMINAL",0,0,"468884449202020217777",,terminal_output +5349,10075475,"TERMINAL",0,0,"6820501:006662122239999",,terminal_output +5350,10077484,"TERMINAL",0,0,"8202228883444510:013:013131",,terminal_output +5351,10079490,"TERMINAL",0,0,"9:002444202020566673333",,terminal_output +5352,10081503,"TERMINAL",0,0,"24666222788895555",,terminal_output +5353,10083513,"TERMINAL",0,0,"468884449303030317777",,terminal_output +5354,10085521,"TERMINAL",0,0,"68305:00106663122239999",,terminal_output +5355,10087607,"TERMINAL",0,0,"8302228883444511114141",,terminal_output +5356,10089554,"TERMINAL",0,0,"102444303030566673333",,terminal_output +5357,10091551,"TERMINAL",0,0,"24666222788895555",,terminal_output +5358,10093651,"TERMINAL",0,0,"468884449404040417777",,terminal_output +5359,10095571,"TERMINAL",0,0,"684010206664122239999",,terminal_output +5360,10097582,"TERMINAL",0,0,"8402228883444521215151",,terminal_output +5361,10099588,"TERMINAL",0,0,"203555414141677784444",,terminal_output +5362,10101597,"TERMINAL",0,0,"357773338999506666",,terminal_output +5363,10103615,"TERMINAL",0,0,"579995555051515128888",,terminal_output +5364,10105617,"TERMINAL",0,0,"795121317772333430305:006:00",,terminal_output +5365,10107625,"TERMINAL",0,0,"951333999455562222",,terminal_output +5366,10109641,"TERMINAL",0,0,"313555515151677784444",,terminal_output +5367,10111643,"TERMINAL",0,0,"3577733389991:006666",,terminal_output +5368,10113658,"TERMINAL",0,0,"579995554:001:011:011:0128888",,terminal_output +5369,10115678,"TERMINAL",0,0,"794:0131417772333440401010",,terminal_output +5370,10117713,"TERMINAL",0,0,"96:01333999455562222",,terminal_output +5371,10119680,"TERMINAL",0,0,"4135552:012:017:01677784444",,terminal_output +5372,10121690,"TERMINAL",0,0,"357773338999106666",,terminal_output +5373,10123699,"TERMINAL",0,0,"579995551011111128888",,terminal_output +5374,10125714,"TERMINAL",0,0,"791141517772333450502020",,terminal_output +5375,10127750,"TERMINAL",0,0,"911333999455562222",,terminal_output +5376,10129767,"TERMINAL",0,0,"513555111111677784444",,terminal_output +5377,10131736,"TERMINAL",0,0,"357773338999206666",,terminal_output +5378,10133746,"TERMINAL",0,0,"579995552021212128888",,terminal_output +5379,10135756,"TERMINAL",0,0,"7921512:01777233341:004:003030",,terminal_output +5380,10137766,"TERMINAL",0,0,"921333999455562222",,terminal_output +5381,10139773,"TERMINAL",0,0,"10:013555212121677784444",,terminal_output +5382,10141783,"TERMINAL",0,0,"357773338999306666",,terminal_output +5383,10143801,"TERMINAL",0,0,"579995553031313128888",,terminal_output +5384,10145875,"TERMINAL",0,0,"79316:01117772333410104040",,terminal_output +5385,10147820,"TERMINAL",0,0,"931333999455562222",,terminal_output +5386,10149872,"TERMINAL",0,0,"113555313131677784444",,terminal_output +5387,10151828,"TERMINAL",0,0,"357773338999406666",,terminal_output +5388,10153860,"TERMINAL",0,0,"579995554041414128888",,terminal_output +5389,10155910,"TERMINAL",0,0,"794111217772333420205050",,terminal_output +5390,10157855,"TERMINAL",0,0,"941333999455562222",,terminal_output +5391,10159867,"TERMINAL",0,0,"213555414141677784444",,terminal_output +5392,10161875,"TERMINAL",0,0,"357773338999506666",,terminal_output +5393,10163894,"TERMINAL",0,0,"579995555051515128888",,terminal_output +5394,10165913,"TERMINAL",0,0,"795121317772333430306:007:00",,terminal_output +5395,10167907,"TERMINAL",0,0,"951333999455562222",,terminal_output +5396,10169915,"TERMINAL",0,0,"313555515151677784444",,terminal_output +5397,10171923,"TERMINAL",0,0,"3577733389992:006666",,terminal_output +5398,10173942,"TERMINAL",0,0,"579995555:002:012:012:0128888",,terminal_output +5399,10175957,"TERMINAL",0,0,"795:0131417772333440401010",,terminal_output +5400,10177954,"TERMINAL",0,0,"97:01333999455562222",,terminal_output +5401,10179960,"TERMINAL",0,0,"4135553:013:018:01677784444",,terminal_output +5402,10181971,"TERMINAL",0,0,"357773338999106666",,terminal_output +5403,10183976,"TERMINAL",0,0,"579995551011111128888",,terminal_output +5404,10185993,"TERMINAL",0,0,"791141517772333450502020",,terminal_output +5405,10187997,"TERMINAL",0,0,"911333999455562222",,terminal_output +5406,10190010,"TERMINAL",0,0,"513555111111677784444",,terminal_output +5407,10192056,"TERMINAL",0,0,"357773338999206666",,terminal_output +5408,10194028,"TERMINAL",0,0,"579995552021212128888",,terminal_output +5409,10196037,"TERMINAL",0,0,"7921513:01777233342:005:003030",,terminal_output +5410,10198043,"TERMINAL",0,0,"921333999455562222",,terminal_output +5411,10200057,"TERMINAL",0,0,"1:013555212121677784444",,terminal_output +5412,10202065,"TERMINAL",0,0,"357773338999306666",,terminal_output +5413,10204072,"TERMINAL",0,0,"579995553031313128888",,terminal_output +5414,10206081,"TERMINAL",0,0,"79317:01117772333410104040",,terminal_output +5415,10208090,"TERMINAL",0,0,"931333999455562222",,terminal_output +5416,10210107,"TERMINAL",0,0,"113555313131677784444",,terminal_output +5417,10212127,"TERMINAL",0,0,"357773338999406666",,terminal_output +5418,10214117,"TERMINAL",0,0,"579995554041414128888",,terminal_output +5419,10216134,"TERMINAL",0,0,"794111217772333420205050",,terminal_output +5420,10218141,"TERMINAL",0,0,"941333999455562222",,terminal_output +5421,10220147,"TERMINAL",0,0,"213555414141677784444",,terminal_output +5422,10222268,"TERMINAL",0,0,"357773338999506666",,terminal_output +5423,10224165,"TERMINAL",0,0,"579995555051515128888",,terminal_output +5424,10226261,"TERMINAL",0,0,"795121317772333430307:008:00",,terminal_output +5425,10228206,"TERMINAL",0,0,"951333999455562222",,terminal_output +5426,10230228,"TERMINAL",0,0,"313555515151677784444",,terminal_output +5427,10232208,"TERMINAL",0,0,"3577733389993:006666",,terminal_output +5428,10234220,"TERMINAL",0,0,"579995556:003:013:013:0128888",,terminal_output +5429,10236235,"TERMINAL",0,0,"796:0131417772333440401010",,terminal_output +5430,10238238,"TERMINAL",0,0,"98:01333999455562222",,terminal_output +5431,10240258,"TERMINAL",0,0,"4135554:014:019:01677784444",,terminal_output +5432,10242255,"TERMINAL",0,0,"357773338999106666",,terminal_output +5433,10244266,"TERMINAL",0,0,"579995551011111128888",,terminal_output +5434,10246274,"TERMINAL",0,0,"791141517772333450502020",,terminal_output +5435,10248280,"TERMINAL",0,0,"911333999455562222",,terminal_output +5436,10250291,"TERMINAL",0,0,"513555111111677784444",,terminal_output +5437,10252301,"TERMINAL",0,0,"357773338999206666",,terminal_output +5438,10254310,"TERMINAL",0,0,"579995552021212128888",,terminal_output +5439,10257004,"TERMINAL",0,0,"7921514:01777233343:006:003030",,terminal_output +5440,10258328,"TERMINAL",0,0,"921333999455562222",,terminal_output +5441,10260336,"TERMINAL",0,0,"2:013555212121677784444",,terminal_output +5442,10262346,"TERMINAL",0,0,"357773338999306666",,terminal_output +5443,10264355,"TERMINAL",0,0,"579995553031313128888",,terminal_output +5444,10266362,"TERMINAL",0,0,"79318:01117772333410104040",,terminal_output +5445,10268375,"TERMINAL",0,0,"931333999455562222",,terminal_output +5446,10270381,"TERMINAL",0,0,"113555313131677784444",,terminal_output +5447,10272390,"TERMINAL",0,0,"357773338999406666",,terminal_output +5448,10274424,"TERMINAL",0,0,"579995554041414128888",,terminal_output +5449,10276409,"TERMINAL",0,0,"794111217772333420205050",,terminal_output +5450,10278417,"TERMINAL",0,0,"941333999455562222",,terminal_output +5451,10280430,"TERMINAL",0,0,"213555414141677784444",,terminal_output +5452,10282436,"TERMINAL",0,0,"357773338999506666",,terminal_output +5453,10284444,"TERMINAL",0,0,"579995555051515128888",,terminal_output +5454,10286454,"TERMINAL",0,0,"795121317772333430308:009:00",,terminal_output +5455,10288462,"TERMINAL",0,0,"951333999455562222",,terminal_output +5456,10290564,"TERMINAL",0,0,"313555515151677784444",,terminal_output +5457,10292479,"TERMINAL",0,0,"3577733389994:006666",,terminal_output +5458,10294486,"TERMINAL",0,0,"579995557:004:014:014:0128888",,terminal_output +5459,10296499,"TERMINAL",0,0,"797:0131417772333440401010",,terminal_output +5460,10298504,"TERMINAL",0,0,"99:01333999455562222",,terminal_output +5461,10300516,"TERMINAL",0,0,"4135555:015:0120:01677784444",,terminal_output +5462,10302523,"TERMINAL",0,0,"357773338999106666",,terminal_output +5463,10304533,"TERMINAL",0,0,"579995551011111128888",,terminal_output +5464,10306539,"TERMINAL",0,0,"791141517772333450502020",,terminal_output +5465,10308550,"TERMINAL",0,0,"911333999455562222",,terminal_output +5466,10310564,"TERMINAL",0,0,"513555111111677784444",,terminal_output +5467,10312572,"TERMINAL",0,0,"357773338999206666",,terminal_output +5468,10314580,"TERMINAL",0,0,"579995552021212128888",,terminal_output +5469,10316594,"TERMINAL",0,0,"72022525:02888344454:017:013131",,terminal_output +5470,10318597,"TERMINAL",0,0,"3:002444202020566673333",,terminal_output +5471,10320628,"TERMINAL",0,0,"24666222788895555",,terminal_output +5472,10322616,"TERMINAL",0,0,"468884449303030317777",,terminal_output +5473,10324622,"TERMINAL",0,0,"68309:00106663122239999",,terminal_output +5474,10326632,"TERMINAL",0,0,"8302228883444511114141",,terminal_output +5475,10328641,"TERMINAL",0,0,"102444303030566673333",,terminal_output +5476,10330650,"TERMINAL",0,0,"24666222788895555",,terminal_output +5477,10332753,"TERMINAL",0,0,"468884449404040417777",,terminal_output +5478,10334668,"TERMINAL",0,0,"684010206664122239999",,terminal_output +5479,10336677,"TERMINAL",0,0,"8402228883444521215151",,terminal_output +5480,10338686,"TERMINAL",0,0,"202444404040566673333",,terminal_output +5481,10340742,"TERMINAL",0,0,"24666222788895555",,terminal_output +5482,10342706,"TERMINAL",0,0,"468884449505050517777",,terminal_output +5483,10344711,"TERMINAL",0,0,"685020306665122239999",,terminal_output +5484,10346721,"TERMINAL",0,0,"8502228883444531319:0130:01",,terminal_output +5485,10348731,"TERMINAL",0,0,"302444505050566673333",,terminal_output +5486,10350748,"TERMINAL",0,0,"24666222788895555",,terminal_output +5487,10352793,"TERMINAL",0,0,"4688844495:005:005:005:017777",,terminal_output +5488,10354756,"TERMINAL",0,0,"688:0030406668:0122239999",,terminal_output +5489,10356767,"TERMINAL",0,0,"82:00:002228883444541411111",,terminal_output +5490,10358777,"TERMINAL",0,0,"4024446:006:001:00566673333",,terminal_output +5491,10360784,"TERMINAL",0,0,"24666222788895555",,terminal_output +5492,10362794,"TERMINAL",0,0,"468884449101010117777",,terminal_output +5493,10364801,"TERMINAL",0,0,"681040506661122239999",,terminal_output +5494,10366811,"TERMINAL",0,0,"8102228883444551512121",,terminal_output +5495,10368825,"TERMINAL",0,0,"502444101010566673333",,terminal_output +5496,10370847,"TERMINAL",0,0,"24666222788895555",,terminal_output +5497,10372895,"TERMINAL",0,0,"468884449202020217777",,terminal_output +5498,10374914,"TERMINAL",0,0,"6820506:006662122239999",,terminal_output +5499,10376854,"TERMINAL",0,0,"820222888344455:018:013131",,terminal_output +5500,10378866,"TERMINAL",0,0,"4:002444202020566673333",,terminal_output +5501,10380882,"TERMINAL",0,0,"24666222788895555",,terminal_output +5502,10382885,"TERMINAL",0,0,"468884449303030317777",,terminal_output +5503,10384895,"TERMINAL",0,0,"683020:00106663122239999",,terminal_output +5504,10386908,"TERMINAL",0,0,"8302228883444511114141",,terminal_output +5505,10388973,"TERMINAL",0,0,"102444303030566673333",,terminal_output +5506,10390921,"TERMINAL",0,0,"24666222788895555",,terminal_output +5507,10392966,"TERMINAL",0,0,"468884449404040417777",,terminal_output +5508,10394942,"TERMINAL",0,0,"684010206664122239999",,terminal_output +5509,10396960,"TERMINAL",0,0,"8402228883444521215151",,terminal_output +5510,10398959,"TERMINAL",0,0,"202444404040566673333",,terminal_output +5511,10400970,"TERMINAL",0,0,"24666222788895555",,terminal_output +5512,10403002,"TERMINAL",0,0,"468884449505050517777",,terminal_output +5513,10404989,"TERMINAL",0,0,"685020306665122239999",,terminal_output +5514,10407098,"TERMINAL",0,0,"85022288834445313130:011:01",,terminal_output +5515,10409042,"TERMINAL",0,0,"302444505050566673333",,terminal_output +5516,10411060,"TERMINAL",0,0,"24666222788895555",,terminal_output +5517,10413041,"TERMINAL",0,0,"4688844496:006:006:006:017777",,terminal_output +5518,10415058,"TERMINAL",0,0,"689:0030406669:0122239999",,terminal_output +5519,10417043,"TERMINAL",0,0,"81:002228883444541411111",,terminal_output +5520,10419061,"TERMINAL",0,0,"4024447:007:002:00566673333",,terminal_output +5521,10421125,"TERMINAL",0,0,"24666222788895555",,terminal_output +5522,10423174,"TERMINAL",0,0,"468884449101010117777",,terminal_output +5523,10425080,"TERMINAL",0,0,"681040506661122239999",,terminal_output +5524,10427087,"TERMINAL",0,0,"8102228883444551512121",,terminal_output +5525,10429102,"TERMINAL",0,0,"502444101010566673333",,terminal_output +5526,10431165,"TERMINAL",0,0,"24666222788895555",,terminal_output +5527,10433114,"TERMINAL",0,0,"468884449202020217777",,terminal_output +5528,10435125,"TERMINAL",0,0,"6820507:006662122239999",,terminal_output +5529,10437201,"TERMINAL",0,0,"820222888344456:019:013131",,terminal_output +5530,10439139,"TERMINAL",0,0,"5:002444202020566673333",,terminal_output +5531,10441149,"TERMINAL",0,0,"24666222788895555",,terminal_output +5532,10443157,"TERMINAL",0,0,"468884449303030317777",,terminal_output +5533,10445194,"TERMINAL",0,0,"68301:00106663122239999",,terminal_output +5534,10447178,"TERMINAL",0,0,"8302228883444511114141",,terminal_output +5535,10449186,"TERMINAL",0,0,"102444303030566673333",,terminal_output +5536,10451202,"TERMINAL",0,0,"24666222788895555",,terminal_output +5537,10453282,"TERMINAL",0,0,"468884449404040417777",,terminal_output +5538,10455210,"TERMINAL",0,0,"684010206664122239999",,terminal_output +5539,10457274,"TERMINAL",0,0,"8402228883444521215151",,terminal_output +5540,10459230,"TERMINAL",0,0,"202444404040566673333",,terminal_output +5541,10461241,"TERMINAL",0,0,"24666222788895555",,terminal_output +5542,10463248,"TERMINAL",0,0,"468884449505050517777",,terminal_output +5543,10465261,"TERMINAL",0,0,"685020306665122239999",,terminal_output +5544,10467309,"TERMINAL",0,0,"8502228883444531311:012:01",,terminal_output +5545,10469276,"TERMINAL",0,0,"302444505050566673333",,terminal_output +5546,10471303,"TERMINAL",0,0,"24666222788895555",,terminal_output +5547,10473351,"TERMINAL",0,0,"4688844497:007:007:007:017777",,terminal_output +5548,10475300,"TERMINAL",0,0,"6820:00304066630:0122239999",,terminal_output +5549,10477344,"TERMINAL",0,0,"82:002228883444541411111",,terminal_output +5550,10479320,"TERMINAL",0,0,"4024448:008:003:00566673333",,terminal_output +5551,10481334,"TERMINAL",0,0,"24666222788895555",,terminal_output +5552,10483336,"TERMINAL",0,0,"468884449101010117777",,terminal_output +5553,10485346,"TERMINAL",0,0,"681040506661122239999",,terminal_output +5554,10487352,"TERMINAL",0,0,"8102228883444551512121",,terminal_output +5555,10489363,"TERMINAL",0,0,"502444101010566673333",,terminal_output +5556,10491381,"TERMINAL",0,0,"24666222788895555",,terminal_output +5557,10493420,"TERMINAL",0,0,"468884449202020217777",,terminal_output +5558,10495468,"TERMINAL",0,0,"6820508:006662122239999",,terminal_output +5559,10497413,"TERMINAL",0,0,"820222888344457:0120:013131",,terminal_output +5560,10499408,"TERMINAL",0,0,"6:002444202020566673333",,terminal_output +5561,10501413,"TERMINAL",0,0,"24666222788895555",,terminal_output +5562,10503457,"TERMINAL",0,0,"468884449303030317777",,terminal_output +5563,10505480,"TERMINAL",0,0,"68302:00106663122239999",,terminal_output +5564,10507440,"TERMINAL",0,0,"8302228883444511114141",,terminal_output +5565,10509451,"TERMINAL",0,0,"102444303030566673333",,terminal_output +5566,10511460,"TERMINAL",0,0,"24666222788895555",,terminal_output +5567,10513493,"TERMINAL",0,0,"468884449404040417777",,terminal_output +5568,10515474,"TERMINAL",0,0,"684010206664122239999",,terminal_output +5569,10517486,"TERMINAL",0,0,"8402228883444521215151",,terminal_output +5570,10519495,"TERMINAL",0,0,"202444404040566673333",,terminal_output +5571,10521501,"TERMINAL",0,0,"24666222788895555",,terminal_output +5572,10523512,"TERMINAL",0,0,"468884449505050517777",,terminal_output +5573,10525520,"TERMINAL",0,0,"685020306665122239999",,terminal_output +5574,10527527,"TERMINAL",0,0,"8502228883444531312:013:01",,terminal_output +5575,10529538,"TERMINAL",0,0,"302444505050566673333",,terminal_output +5576,10531546,"TERMINAL",0,0,"24666222788895555",,terminal_output +5577,10533564,"TERMINAL",0,0,"4688844498:008:008:008:017777",,terminal_output +5578,10535569,"TERMINAL",0,0,"681:0030406661:0122239999",,terminal_output +5579,10537579,"TERMINAL",0,0,"83:002228883444541411111",,terminal_output +5580,10539588,"TERMINAL",0,0,"4035559:019:014:01677784444",,terminal_output +5581,10541595,"TERMINAL",0,0,"357773338999106666",,terminal_output +5582,10543604,"TERMINAL",0,0,"579995551011111128888",,terminal_output +5583,10545613,"TERMINAL",0,0,"791141517772333450502020",,terminal_output +5584,10547621,"TERMINAL",0,0,"911333999455562222",,terminal_output +5585,10549631,"TERMINAL",0,0,"513555111111677784444",,terminal_output +5586,10551638,"TERMINAL",0,0,"357773338999206666",,terminal_output +5587,10553647,"TERMINAL",0,0,"579995552021212128888",,terminal_output +5588,10555660,"TERMINAL",0,0,"7921519:01777233348:001:003030",,terminal_output +5589,10557667,"TERMINAL",0,0,"921333999455562222",,terminal_output +5590,10559672,"TERMINAL",0,0,"7:013555212121677784444",,terminal_output +5591,10561682,"TERMINAL",0,0,"357773338999306666",,terminal_output +5592,10563689,"TERMINAL",0,0,"579995553031313128888",,terminal_output +5593,10565700,"TERMINAL",0,0,"79313:01117772333410104040",,terminal_output +5594,10567708,"TERMINAL",0,0,"931333999455562222",,terminal_output +5595,10569716,"TERMINAL",0,0,"113555313131677784444",,terminal_output +5596,10571726,"TERMINAL",0,0,"357773338999406666",,terminal_output +5597,10573735,"TERMINAL",0,0,"579995554041414128888",,terminal_output +5598,10575747,"TERMINAL",0,0,"794111217772333420205050",,terminal_output +5599,10577758,"TERMINAL",0,0,"941333999455562222",,terminal_output +5600,10579769,"TERMINAL",0,0,"213555414141677784444",,terminal_output +5601,10581774,"TERMINAL",0,0,"357773338999506666",,terminal_output +5602,10583780,"TERMINAL",0,0,"579995555051515128888",,terminal_output +5603,10585789,"TERMINAL",0,0,"795121317772333430303:004:00",,terminal_output +5604,10587812,"TERMINAL",0,0,"951333999455562222",,terminal_output +5605,10589816,"TERMINAL",0,0,"313555515151677784444",,terminal_output +5606,10591833,"TERMINAL",0,0,"3577733389999:006666",,terminal_output +5607,10593841,"TERMINAL",0,0,"579995552:009:019:019:0128888",,terminal_output +5608,10595837,"TERMINAL",0,0,"792:0131417772333440401010",,terminal_output +5609,10597867,"TERMINAL",0,0,"94:01333999455562222",,terminal_output +5610,10599853,"TERMINAL",0,0,"41355510:0110:015:01677784444",,terminal_output +5611,10601863,"TERMINAL",0,0,"357773338999106666",,terminal_output +5612,10603874,"TERMINAL",0,0,"579995551011111128888",,terminal_output +5613,10605891,"TERMINAL",0,0,"791141517772333450502020",,terminal_output +5614,10607892,"TERMINAL",0,0,"911333999455562222",,terminal_output +5615,10609908,"TERMINAL",0,0,"513555111111677784444",,terminal_output +5616,10611914,"TERMINAL",0,0,"357773338999206666",,terminal_output +5617,10613922,"TERMINAL",0,0,"579995552021212128888",,terminal_output +5618,10615930,"TERMINAL",0,0,"79215130:01777233349:002:003030",,terminal_output +5619,10617940,"TERMINAL",0,0,"921333999455562222",,terminal_output +5620,10619949,"TERMINAL",0,0,"8:013555212121677784444",,terminal_output +5621,10621957,"TERMINAL",0,0,"357773338999306666",,terminal_output +5622,10623984,"TERMINAL",0,0,"579995553031313128888",,terminal_output +5623,10625980,"TERMINAL",0,0,"79314:01117772333410104040",,terminal_output +5624,10628006,"TERMINAL",0,0,"931333999455562222",,terminal_output +5625,10630024,"TERMINAL",0,0,"113555313131677784444",,terminal_output +5626,10632006,"TERMINAL",0,0,"357773338999406666",,terminal_output +5627,10634021,"TERMINAL",0,0,"579995554041414128888",,terminal_output +5628,10636038,"TERMINAL",0,0,"794111217772333420205050",,terminal_output +5629,10638030,"TERMINAL",0,0,"941333999455562222",,terminal_output +5630,10640060,"TERMINAL",0,0,"213555414141677784444",,terminal_output +5631,10642052,"TERMINAL",0,0,"357773338999506666",,terminal_output +5632,10644096,"TERMINAL",0,0,"579995555051515128888",,terminal_output +5633,10646069,"TERMINAL",0,0,"795121317772333430304:005:00",,terminal_output +5634,10648078,"TERMINAL",0,0,"951333999455562222",,terminal_output +5635,10650092,"TERMINAL",0,0,"313555515151677784444",,terminal_output +5636,10652095,"TERMINAL",0,0,"3577733389992:00:006666",,terminal_output +5637,10654104,"TERMINAL",0,0,"579995553:002:00:012:00:012:00:0128888",,terminal_output +5638,10656123,"TERMINAL",0,0,"793:0131417772333440401010",,terminal_output +5639,10658184,"TERMINAL",0,0,"95:01333999455562222",,terminal_output +5640,10660204,"TERMINAL",0,0,"4135551:011:016:01677784444",,terminal_output +5641,10662144,"TERMINAL",0,0,"357773338999106666",,terminal_output +5642,10664151,"TERMINAL",0,0,"579995551011111128888",,terminal_output +5643,10666165,"TERMINAL",0,0,"791141517772333450502020",,terminal_output +5644,10668171,"TERMINAL",0,0,"911333999455562222",,terminal_output +5645,10670180,"TERMINAL",0,0,"513555111111677784444",,terminal_output +5646,10672211,"TERMINAL",0,0,"357773338999206666",,terminal_output +5647,10674197,"TERMINAL",0,0,"579995552021212128888",,terminal_output +5648,10676251,"TERMINAL",0,0,"7921511:017772333420:003:003030",,terminal_output +5649,10678253,"TERMINAL",0,0,"921333999455562222",,terminal_output +5650,10680223,"TERMINAL",0,0,"9:013555212121677784444",,terminal_output +5651,10682237,"TERMINAL",0,0,"357773338999306666",,terminal_output +5652,10684246,"TERMINAL",0,0,"579995553031313128888",,terminal_output +5653,10686263,"TERMINAL",0,0,"79315:01117772333410104040",,terminal_output +5654,10688263,"TERMINAL",0,0,"931333999455562222",,terminal_output +5655,10690280,"TERMINAL",0,0,"113555313131677784444",,terminal_output +5656,10692332,"TERMINAL",0,0,"357773338999406666",,terminal_output +5657,10694290,"TERMINAL",0,0,"579995554041414128888",,terminal_output +5658,10696302,"TERMINAL",0,0,"794111217772333420205050",,terminal_output +5659,10698307,"TERMINAL",0,0,"941333999455562222",,terminal_output +5660,10700373,"TERMINAL",0,0,"213555414141677784444",,terminal_output +5661,10702418,"TERMINAL",0,0,"357773338999506666",,terminal_output +5662,10704337,"TERMINAL",0,0,"579995555051515128888",,terminal_output +5663,10706347,"TERMINAL",0,0,"795121317772333430305:006:00",,terminal_output +5664,10708355,"TERMINAL",0,0,"951333999455562222",,terminal_output +5665,10710405,"TERMINAL",0,0,"313555515151677784444",,terminal_output +5666,10712373,"TERMINAL",0,0,"3577733389991:006666",,terminal_output +5667,10714384,"TERMINAL",0,0,"579995554:001:011:011:0128888",,terminal_output +5668,10716449,"TERMINAL",0,0,"794:0131417772333440401010",,terminal_output +5669,10718407,"TERMINAL",0,0,"96:01333999455562222",,terminal_output +5670,10720441,"TERMINAL",0,0,"4135552:012:017:01677784444",,terminal_output +5671,10722417,"TERMINAL",0,0,"357773338999106666",,terminal_output +5672,10724426,"TERMINAL",0,0,"579995551011111128888",,terminal_output +5673,10726434,"TERMINAL",0,0,"791141517772333450502020",,terminal_output +5674,10728445,"TERMINAL",0,0,"911333999455562222",,terminal_output +5675,10730463,"TERMINAL",0,0,"513555111111677784444",,terminal_output +5676,10732523,"TERMINAL",0,0,"357773338999206666",,terminal_output +5677,10734474,"TERMINAL",0,0,"579995552021212128888",,terminal_output +5678,10736480,"TERMINAL",0,0,"7921512:01777233341:004:003030",,terminal_output +5679,10738489,"TERMINAL",0,0,"921333999455562222",,terminal_output +5680,10740504,"TERMINAL",0,0,"20:013555212121677784444",,terminal_output +5681,10742506,"TERMINAL",0,0,"357773338999306666",,terminal_output +5682,10744519,"TERMINAL",0,0,"579995553031313128888",,terminal_output +5683,10746527,"TERMINAL",0,0,"79316:01117772333410104040",,terminal_output +5684,10748533,"TERMINAL",0,0,"931333999455562222",,terminal_output +5685,10750653,"TERMINAL",0,0,"113555313131677784444",,terminal_output +5686,10752554,"TERMINAL",0,0,"357773338999406666",,terminal_output +5687,10754559,"TERMINAL",0,0,"579995554041414128888",,terminal_output +5688,10756570,"TERMINAL",0,0,"794111217772333420205050",,terminal_output +5689,10758583,"TERMINAL",0,0,"941333999455562222",,terminal_output +5690,10760686,"TERMINAL",0,0,"214666424242788895555",,terminal_output +5691,10762631,"TERMINAL",0,0,"468884449505050517777",,terminal_output +5692,10764610,"TERMINAL",0,0,"685020306665122239999",,terminal_output +5693,10766622,"TERMINAL",0,0,"8502228883444531316:017:01",,terminal_output +5694,10768624,"TERMINAL",0,0,"302444505050566673333",,terminal_output +5695,10770634,"TERMINAL",0,0,"24666222788895555",,terminal_output +5696,10772645,"TERMINAL",0,0,"4688844492:002:002:002:017777",,terminal_output +5697,10774654,"TERMINAL",0,0,"685:0030406665:0122239999",,terminal_output +5698,10776661,"TERMINAL",0,0,"87:002228883444541411111",,terminal_output +5699,10778690,"TERMINAL",0,0,"4024443:003:008:00566673333",,terminal_output +5700,10780682,"TERMINAL",0,0,"24666222788895555",,terminal_output +5701,10782690,"TERMINAL",0,0,"468884449101010117777",,terminal_output +5702,10784697,"TERMINAL",0,0,"681040506661122239999",,terminal_output +5703,10786707,"TERMINAL",0,0,"8102228883444551512121",,terminal_output +5704,10788718,"TERMINAL",0,0,"502444101010566673333",,terminal_output +5705,10790726,"TERMINAL",0,0,"24666222788895555",,terminal_output +5706,10792737,"TERMINAL",0,0,"468884449202020217777",,terminal_output +5707,10794744,"TERMINAL",0,0,"6820503:006662122239999",,terminal_output +5708,10796756,"TERMINAL",0,0,"820222888344452:015:013131",,terminal_output +5709,10798768,"TERMINAL",0,0,"1:002444202020566673333",,terminal_output +5710,10800777,"TERMINAL",0,0,"24666222788895555",,terminal_output +5711,10802787,"TERMINAL",0,0,"468884449303030317777",,terminal_output +5712,10804795,"TERMINAL",0,0,"68307:00106663122239999",,terminal_output +5713,10806825,"TERMINAL",0,0,"8302228883444511114141",,terminal_output +5714,10808822,"TERMINAL",0,0,"102444303030566673333",,terminal_output +5715,10810825,"TERMINAL",0,0,"24666222788895555",,terminal_output +5716,10812831,"TERMINAL",0,0,"468884449404040417777",,terminal_output +5717,10814841,"TERMINAL",0,0,"684010206664122239999",,terminal_output +5718,10816851,"TERMINAL",0,0,"8402228883444521215151",,terminal_output +5719,10818861,"TERMINAL",0,0,"202444404040566673333",,terminal_output +5720,10820866,"TERMINAL",0,0,"24666222788895555",,terminal_output +5721,10822878,"TERMINAL",0,0,"468884449505050517777",,terminal_output +5722,10824886,"TERMINAL",0,0,"685020306665122239999",,terminal_output +5723,10826897,"TERMINAL",0,0,"8502228883444531317:018:01",,terminal_output +5724,10828907,"TERMINAL",0,0,"302444505050566673333",,terminal_output +5725,10830924,"TERMINAL",0,0,"24666222788895555",,terminal_output +5726,10832981,"TERMINAL",0,0,"4688844493:003:003:003:017777",,terminal_output +5727,10834994,"TERMINAL",0,0,"686:0030406666:0122239999",,terminal_output +5728,10836946,"TERMINAL",0,0,"88:002228883444541411111",,terminal_output +5729,10838965,"TERMINAL",0,0,"4024444:004:009:00566673333",,terminal_output +5730,10840962,"TERMINAL",0,0,"24666222788895555",,terminal_output +5731,10843014,"TERMINAL",0,0,"468884449101010117777",,terminal_output +5732,10844979,"TERMINAL",0,0,"681040506661122239999",,terminal_output +5733,10846990,"TERMINAL",0,0,"8102228883444551512121",,terminal_output +5734,10849007,"TERMINAL",0,0,"502444101010566673333",,terminal_output +5735,10851107,"TERMINAL",0,0,"24666222788895555",,terminal_output +5736,10853015,"TERMINAL",0,0,"468884449202020217777",,terminal_output +5737,10855095,"TERMINAL",0,0,"6820504:006662122239999",,terminal_output +5738,10857043,"TERMINAL",0,0,"820222888344453:016:013131",,terminal_output +5739,10859093,"TERMINAL",0,0,"2:002444202020566673333",,terminal_output +5740,10861053,"TERMINAL",0,0,"24666222788895555",,terminal_output +5741,10863062,"TERMINAL",0,0,"468884449303030317777",,terminal_output +5742,10865071,"TERMINAL",0,0,"68308:00106663122239999",,terminal_output +5743,10867078,"TERMINAL",0,0,"8302228883444511114141",,terminal_output +5744,10869097,"TERMINAL",0,0,"102444303030566673333",,terminal_output +5745,10871095,"TERMINAL",0,0,"24666222788895555",,terminal_output +5746,10873122,"TERMINAL",0,0,"468884449404040417777",,terminal_output +5747,10875112,"TERMINAL",0,0,"684010206664122239999",,terminal_output +5748,10877126,"TERMINAL",0,0,"8402228883444521215151",,terminal_output +5749,10879136,"TERMINAL",0,0,"202444404040566673333",,terminal_output +5750,10881142,"TERMINAL",0,0,"24666222788895555",,terminal_output +5751,10883259,"TERMINAL",0,0,"468884449505050517777",,terminal_output +5752,10885161,"TERMINAL",0,0,"685020306665122239999",,terminal_output +5753,10887253,"TERMINAL",0,0,"8502228883444531318:019:01",,terminal_output +5754,10889177,"TERMINAL",0,0,"302444505050566673333",,terminal_output +5755,10891194,"TERMINAL",0,0,"24666222788895555",,terminal_output +5756,10893294,"TERMINAL",0,0,"4688844494:004:004:004:017777",,terminal_output +5757,10895206,"TERMINAL",0,0,"687:0030406667:0122239999",,terminal_output +5758,10897291,"TERMINAL",0,0,"89:002228883444541411111",,terminal_output +5759,10899220,"TERMINAL",0,0,"4024445:005:0030:00566673333",,terminal_output +5760,10901233,"TERMINAL",0,0,"24666222788895555",,terminal_output +5761,10903242,"TERMINAL",0,0,"468884449101010117777",,terminal_output +5762,10905258,"TERMINAL",0,0,"681040506661122239999",,terminal_output +5763,10907324,"TERMINAL",0,0,"8102228883444551512121",,terminal_output +5764,10909288,"TERMINAL",0,0,"502444101010566673333",,terminal_output +5765,10911306,"TERMINAL",0,0,"24666222788895555",,terminal_output +5766,10913361,"TERMINAL",0,0,"468884449202020217777",,terminal_output +5767,10915318,"TERMINAL",0,0,"6820505:006662122239999",,terminal_output +5768,10917359,"TERMINAL",0,0,"820222888344454:017:013131",,terminal_output +5769,10919333,"TERMINAL",0,0,"3:002444202020566673333",,terminal_output +5770,10921352,"TERMINAL",0,0,"24666222788895555",,terminal_output +5771,10923405,"TERMINAL",0,0,"468884449303030317777",,terminal_output +5772,10925447,"TERMINAL",0,0,"68309:00106663122239999",,terminal_output +5773,10927394,"TERMINAL",0,0,"8302228883444511114141",,terminal_output +5774,10929379,"TERMINAL",0,0,"102444303030566673333",,terminal_output +5775,10931392,"TERMINAL",0,0,"24666222788895555",,terminal_output +5776,10933397,"TERMINAL",0,0,"468884449404040417777",,terminal_output +5777,10935406,"TERMINAL",0,0,"684010206664122239999",,terminal_output +5778,10937417,"TERMINAL",0,0,"8402228883444521215151",,terminal_output +5779,10939424,"TERMINAL",0,0,"202444404040566673333",,terminal_output +5780,10941526,"TERMINAL",0,0,"24666222788895555",,terminal_output +5781,10943472,"TERMINAL",0,0,"468884449505050517777",,terminal_output +5782,10945468,"TERMINAL",0,0,"685020306665122239999",,terminal_output +5783,10947567,"TERMINAL",0,0,"8502228883444531319:0140:01",,terminal_output +5784,10949473,"TERMINAL",0,0,"302444505050566673333",,terminal_output +5785,10951477,"TERMINAL",0,0,"24666222788895555",,terminal_output +5786,10953506,"TERMINAL",0,0,"4688844495:005:005:005:017777",,terminal_output +5787,10955523,"TERMINAL",0,0,"688:0030406668:0122239999",,terminal_output +5788,10957513,"TERMINAL",0,0,"810:002228883444541411111",,terminal_output +5789,10959518,"TERMINAL",0,0,"4024446:006:001:00566673333",,terminal_output +5790,10961523,"TERMINAL",0,0,"24666222788895555",,terminal_output +5791,10963644,"TERMINAL",0,0,"468884449101010117777",,terminal_output +5792,10965587,"TERMINAL",0,0,"681040506661122239999",,terminal_output +5793,10967637,"TERMINAL",0,0,"8102228883444551512121",,terminal_output +5794,10969564,"TERMINAL",0,0,"502444101010566673333",,terminal_output +5795,10971568,"TERMINAL",0,0,"24666222788895555",,terminal_output +5796,10973617,"TERMINAL",0,0,"468884449202020217777",,terminal_output +5797,10975587,"TERMINAL",0,0,"6921516:017772233345:008:003030",,terminal_output +5798,10977672,"TERMINAL",0,0,"921333999455562222",,terminal_output +5799,10979616,"TERMINAL",0,0,"4:013555212121677784444",,terminal_output +5800,10981613,"TERMINAL",0,0,"357773338999306666",,terminal_output +5801,10983632,"TERMINAL",0,0,"579995553031313128888",,terminal_output +5802,10985635,"TERMINAL",0,0,"793130:01117772333410104040",,terminal_output +5803,10987642,"TERMINAL",0,0,"931333999455562222",,terminal_output +5804,10989653,"TERMINAL",0,0,"113555313131677784444",,terminal_output +5805,10991661,"TERMINAL",0,0,"357773338999406666",,terminal_output +5806,10993674,"TERMINAL",0,0,"579995554041414128888",,terminal_output +5807,10995693,"TERMINAL",0,0,"794111217772333420205050",,terminal_output +5808,10997690,"TERMINAL",0,0,"941333999455562222",,terminal_output +5809,10999698,"TERMINAL",0,0,"213555414141677784444",,terminal_output +5810,11001708,"TERMINAL",0,0,"357773338999506666",,terminal_output +5811,11003730,"TERMINAL",0,0,"579995555051515128888",,terminal_output +5812,11005733,"TERMINAL",0,0,"7951213177723334303040:001:00",,terminal_output +5813,11007737,"TERMINAL",0,0,"951333999455562222",,terminal_output +5814,11009741,"TERMINAL",0,0,"313555515151677784444",,terminal_output +5815,11011753,"TERMINAL",0,0,"3577733389996:006666",,terminal_output +5816,11013773,"TERMINAL",0,0,"579995559:006:016:016:0128888",,terminal_output +5817,11015772,"TERMINAL",0,0,"799:0131417772333440401010",,terminal_output +5818,11017814,"TERMINAL",0,0,"91:01333999455562222",,terminal_output +5819,11019792,"TERMINAL",0,0,"4135557:017:012:01677784444",,terminal_output +5820,11021800,"TERMINAL",0,0,"357773338999106666",,terminal_output +5821,11023812,"TERMINAL",0,0,"579995551011111128888",,terminal_output +5822,11025818,"TERMINAL",0,0,"791141517772333450502020",,terminal_output +5823,11027826,"TERMINAL",0,0,"911333999455562222",,terminal_output +5824,11029833,"TERMINAL",0,0,"513555111111677784444",,terminal_output +5825,11031851,"TERMINAL",0,0,"357773338999206666",,terminal_output +5826,11033851,"TERMINAL",0,0,"579995552021212128888",,terminal_output +5827,11035861,"TERMINAL",0,0,"7921517:01777233346:009:003030",,terminal_output +5828,11037870,"TERMINAL",0,0,"921333999455562222",,terminal_output +5829,11039879,"TERMINAL",0,0,"5:013555212121677784444",,terminal_output +5830,11041891,"TERMINAL",0,0,"357773338999306666",,terminal_output +5831,11043897,"TERMINAL",0,0,"579995553031313128888",,terminal_output +5832,11045907,"TERMINAL",0,0,"79311:01117772333410104040",,terminal_output +5833,11047917,"TERMINAL",0,0,"931333999455562222",,terminal_output +5834,11049926,"TERMINAL",0,0,"113555313131677784444",,terminal_output +5835,11051935,"TERMINAL",0,0,"357773338999406666",,terminal_output +5836,11053943,"TERMINAL",0,0,"579995554041414128888",,terminal_output +5837,11055954,"TERMINAL",0,0,"794111217772333420205050",,terminal_output +5838,11057963,"TERMINAL",0,0,"941333999455562222",,terminal_output +5839,11059971,"TERMINAL",0,0,"213555414141677784444",,terminal_output +5840,11061978,"TERMINAL",0,0,"357773338999506666",,terminal_output +5841,11063990,"TERMINAL",0,0,"579995555051515128888",,terminal_output +5842,11066002,"TERMINAL",0,0,"795121317772333430301:002:00",,terminal_output +5843,11068010,"TERMINAL",0,0,"951333999455562222",,terminal_output +5844,11070019,"TERMINAL",0,0,"313555515151677784444",,terminal_output +5845,11072027,"TERMINAL",0,0,"3577733389997:006666",,terminal_output +5846,11074035,"TERMINAL",0,0,"5799955540:007:017:017:0128888",,terminal_output +5847,11076051,"TERMINAL",0,0,"7930:0131417772333440401010",,terminal_output +5848,11078109,"TERMINAL",0,0,"92:01333999455562222",,terminal_output +5849,11080073,"TERMINAL",0,0,"4135558:018:013:01677784444",,terminal_output +5850,11082074,"TERMINAL",0,0,"357773338999106666",,terminal_output +5851,11084083,"TERMINAL",0,0,"579995551011111128888",,terminal_output +5852,11086092,"TERMINAL",0,0,"791141517772333450502020",,terminal_output +5853,11088162,"TERMINAL",0,0,"911333999455562222",,terminal_output +5854,11090111,"TERMINAL",0,0,"513555111111677784444",,terminal_output +5855,11092114,"TERMINAL",0,0,"357773338999206666",,terminal_output +5856,11094126,"TERMINAL",0,0,"579995552021212128888",,terminal_output +5857,11096135,"TERMINAL",0,0,"7921518:01777233347:0030:003030",,terminal_output +5858,11098196,"TERMINAL",0,0,"921333999455562222",,terminal_output +5859,11100169,"TERMINAL",0,0,"6:013555212121677784444",,terminal_output +5860,11102181,"TERMINAL",0,0,"357773338999306666",,terminal_output +5861,11104188,"TERMINAL",0,0,"579995553031313128888",,terminal_output +5862,11106193,"TERMINAL",0,0,"79312:01117772333410104040",,terminal_output +5863,11108207,"TERMINAL",0,0,"931333999455562222",,terminal_output +5864,11110225,"TERMINAL",0,0,"113555313131677784444",,terminal_output +5865,11112330,"TERMINAL",0,0,"357773338999406666",,terminal_output +5866,11114227,"TERMINAL",0,0,"579995554041414128888",,terminal_output +5867,11116239,"TERMINAL",0,0,"794111217772333420205050",,terminal_output +5868,11118247,"TERMINAL",0,0,"941333999455562222",,terminal_output +5869,11120257,"TERMINAL",0,0,"213555414141677784444",,terminal_output +5870,11122362,"TERMINAL",0,0,"357773338999506666",,terminal_output +5871,11124275,"TERMINAL",0,0,"579995555051515128888",,terminal_output +5872,11126293,"TERMINAL",0,0,"795121317772333430302:003:00",,terminal_output +5873,11128301,"TERMINAL",0,0,"951333999455562222",,terminal_output +5874,11130354,"TERMINAL",0,0,"313555515151677784444",,terminal_output +5875,11132309,"TERMINAL",0,0,"3577733389998:006666",,terminal_output +5876,11134319,"TERMINAL",0,0,"579995551:008:018:018:0128888",,terminal_output +5877,11136334,"TERMINAL",0,0,"791:0131417772333440401010",,terminal_output +5878,11138343,"TERMINAL",0,0,"93:01333999455562222",,terminal_output +5879,11140361,"TERMINAL",0,0,"4135559:019:014:01677784444",,terminal_output +5880,11142357,"TERMINAL",0,0,"357773338999106666",,terminal_output +5881,11144366,"TERMINAL",0,0,"579995551011111128888",,terminal_output +5882,11146428,"TERMINAL",0,0,"791141517772333450502020",,terminal_output +5883,11148385,"TERMINAL",0,0,"911333999455562222",,terminal_output +5884,11150404,"TERMINAL",0,0,"513555111111677784444",,terminal_output +5885,11152470,"TERMINAL",0,0,"357773338999206666",,terminal_output +5886,11154412,"TERMINAL",0,0,"579995552021212128888",,terminal_output +5887,11156420,"TERMINAL",0,0,"7921519:01777233348:001:003030",,terminal_output +5888,11158433,"TERMINAL",0,0,"921333999455562222",,terminal_output +5889,11160456,"TERMINAL",0,0,"7:013555212121677784444",,terminal_output +5890,11162448,"TERMINAL",0,0,"357773338999306666",,terminal_output +5891,11164459,"TERMINAL",0,0,"579995553031313128888",,terminal_output +5892,11166475,"TERMINAL",0,0,"79313:01117772333410104040",,terminal_output +5893,11168475,"TERMINAL",0,0,"931333999455562222",,terminal_output +5894,11170483,"TERMINAL",0,0,"113555313131677784444",,terminal_output +5895,11172491,"TERMINAL",0,0,"357773338999406666",,terminal_output +5896,11174501,"TERMINAL",0,0,"579995554041414128888",,terminal_output +5897,11176509,"TERMINAL",0,0,"794111217772333420205050",,terminal_output +5898,11178517,"TERMINAL",0,0,"941333999455562222",,terminal_output +5899,11180626,"TERMINAL",0,0,"213555414141677784444",,terminal_output +5900,11182537,"TERMINAL",0,0,"357773338999506666",,terminal_output +5901,11184545,"TERMINAL",0,0,"579995555051515128888",,terminal_output +5902,11186553,"TERMINAL",0,0,"795121317772333430303:004:00",,terminal_output +5903,11188563,"TERMINAL",0,0,"951333999455562222",,terminal_output +5904,11190582,"TERMINAL",0,0,"313555515151677784444",,terminal_output +5905,11192581,"TERMINAL",0,0,"3577733389999:006666",,terminal_output +5906,11194591,"TERMINAL",0,0,"582:0030406662:019:029:029:0239999",,terminal_output +5907,11196601,"TERMINAL",0,0,"84:002228883444541411111",,terminal_output +5908,11198610,"TERMINAL",0,0,"40244420:0020:005:00566673333",,terminal_output +5909,11200618,"TERMINAL",0,0,"24666222788895555",,terminal_output +5910,11202644,"TERMINAL",0,0,"468884449101010117777",,terminal_output +5911,11204635,"TERMINAL",0,0,"681040506661122239999",,terminal_output +5912,11206645,"TERMINAL",0,0,"8102228883444551512121",,terminal_output +5913,11208654,"TERMINAL",0,0,"502444101010566673333",,terminal_output +5914,11210668,"TERMINAL",0,0,"24666222788895555",,terminal_output +5915,11212683,"TERMINAL",0,0,"468884449202020217777",,terminal_output +5916,11214688,"TERMINAL",0,0,"68205040:006662122239999",,terminal_output +5917,11216694,"TERMINAL",0,0,"820222888344459:012:013131",,terminal_output +5918,11218703,"TERMINAL",0,0,"8:002444202020566673333",,terminal_output +5919,11220772,"TERMINAL",0,0,"24666222788895555",,terminal_output +5920,11222723,"TERMINAL",0,0,"468884449303030317777",,terminal_output +5921,11224738,"TERMINAL",0,0,"68304:00106663122239999",,terminal_output +5922,11226739,"TERMINAL",0,0,"8302228883444511114141",,terminal_output +5923,11228752,"TERMINAL",0,0,"102444303030566673333",,terminal_output +5924,11230775,"TERMINAL",0,0,"24666222788895555",,terminal_output +5925,11232852,"TERMINAL",0,0,"468884449404040417777",,terminal_output +5926,11234801,"TERMINAL",0,0,"684010206664122239999",,terminal_output +5927,11236785,"TERMINAL",0,0,"8402228883444521215151",,terminal_output +5928,11238802,"TERMINAL",0,0,"202444404040566673333",,terminal_output +5929,11240801,"TERMINAL",0,0,"24666222788895555",,terminal_output +5930,11242815,"TERMINAL",0,0,"468884449505050517777",,terminal_output +5931,11244818,"TERMINAL",0,0,"685020306665122239999",,terminal_output +5932,11246828,"TERMINAL",0,0,"8502228883444531314:015:01",,terminal_output +5933,11248835,"TERMINAL",0,0,"302444505050566673333",,terminal_output +5934,11250846,"TERMINAL",0,0,"24666222788895555",,terminal_output +5935,11252854,"TERMINAL",0,0,"46888444910:0010:0010:0010:017777",,terminal_output +5936,11254869,"TERMINAL",0,0,"683:0030406663:0122239999",,terminal_output +5937,11256874,"TERMINAL",0,0,"85:002228883444541411111",,terminal_output +5938,11258883,"TERMINAL",0,0,"4024441:001:006:00566673333",,terminal_output +5939,11260892,"TERMINAL",0,0,"24666222788895555",,terminal_output +5940,11262907,"TERMINAL",0,0,"468884449101010117777",,terminal_output +5941,11264911,"TERMINAL",0,0,"681040506661122239999",,terminal_output +5942,11266919,"TERMINAL",0,0,"8102228883444551512121",,terminal_output +5943,11268924,"TERMINAL",0,0,"502444101010566673333",,terminal_output +5944,11270935,"TERMINAL",0,0,"24666222788895555",,terminal_output +5945,11272943,"TERMINAL",0,0,"468884449202020217777",,terminal_output +5946,11274954,"TERMINAL",0,0,"6820501:006662122239999",,terminal_output +5947,11276964,"TERMINAL",0,0,"8202228883444530:013:013131",,terminal_output +5948,11278973,"TERMINAL",0,0,"9:002444202020566673333",,terminal_output +5949,11280984,"TERMINAL",0,0,"24666222788895555",,terminal_output +5950,11282992,"TERMINAL",0,0,"468884449303030317777",,terminal_output +5951,11285004,"TERMINAL",0,0,"68305:00106663122239999",,terminal_output +5952,11287013,"TERMINAL",0,0,"8302228883444511114141",,terminal_output +5953,11289022,"TERMINAL",0,0,"102444303030566673333",,terminal_output +5954,11291029,"TERMINAL",0,0,"24666222788895555",,terminal_output +5955,11293041,"TERMINAL",0,0,"468884449404040417777",,terminal_output +5956,11295049,"TERMINAL",0,0,"684010206664122239999",,terminal_output +5957,11297058,"TERMINAL",0,0,"8402228883444521215151",,terminal_output +5958,11299066,"TERMINAL",0,0,"202444404040566673333",,terminal_output +5959,11301075,"TERMINAL",0,0,"24666222788895555",,terminal_output +5960,11303084,"TERMINAL",0,0,"468884449505050517777",,terminal_output +5961,11305103,"TERMINAL",0,0,"685020306665122239999",,terminal_output +5962,11307195,"TERMINAL",0,0,"8502228883444531315:016:01",,terminal_output +5963,11309112,"TERMINAL",0,0,"302444505050566673333",,terminal_output +5964,11311131,"TERMINAL",0,0,"24666222788895555",,terminal_output +5965,11313131,"TERMINAL",0,0,"4688844491:001:001:001:017777",,terminal_output +5966,11315149,"TERMINAL",0,0,"684:0030406664:0122239999",,terminal_output +5967,11317234,"TERMINAL",0,0,"86:002228883444541411111",,terminal_output +5968,11319161,"TERMINAL",0,0,"4024442:002:007:00566673333",,terminal_output +5969,11321224,"TERMINAL",0,0,"24666222788895555",,terminal_output +5970,11323178,"TERMINAL",0,0,"468884449101010117777",,terminal_output +5971,11325193,"TERMINAL",0,0,"681040506661122239999",,terminal_output +5972,11327267,"TERMINAL",0,0,"8102228883444551512121",,terminal_output +5973,11329200,"TERMINAL",0,0,"502444101010566673333",,terminal_output +5974,11331220,"TERMINAL",0,0,"24666222788895555",,terminal_output +5975,11333222,"TERMINAL",0,0,"468884449202020217777",,terminal_output +5976,11335244,"TERMINAL",0,0,"6820502:006662122239999",,terminal_output +5977,11337303,"TERMINAL",0,0,"820222888344451:014:013131",,terminal_output +5978,11339248,"TERMINAL",0,0,"30:002444202020566673333",,terminal_output +5979,11341302,"TERMINAL",0,0,"24666222788895555",,terminal_output +5980,11343343,"TERMINAL",0,0,"468884449303030317777",,terminal_output +5981,11345272,"TERMINAL",0,0,"68306:00106663122239999",,terminal_output +5982,11347338,"TERMINAL",0,0,"8302228883444511114141",,terminal_output +5983,11349291,"TERMINAL",0,0,"102444303030566673333",,terminal_output +5984,11351310,"TERMINAL",0,0,"24666222788895555",,terminal_output +5985,11353309,"TERMINAL",0,0,"468884449404040417777",,terminal_output +5986,11355318,"TERMINAL",0,0,"684010206664122239999",,terminal_output +5987,11357326,"TERMINAL",0,0,"8402228883444521215151",,terminal_output +5988,11359330,"TERMINAL",0,0,"202444404040566673333",,terminal_output +5989,11361343,"TERMINAL",0,0,"24666222788895555",,terminal_output +5990,11363414,"TERMINAL",0,0,"468884449505050517777",,terminal_output +5991,11365430,"TERMINAL",0,0,"685020306665122239999",,terminal_output +5992,11367408,"TERMINAL",0,0,"8502228883444531316:017:01",,terminal_output +5993,11369378,"TERMINAL",0,0,"302444505050566673333",,terminal_output +5994,11371406,"TERMINAL",0,0,"24666222788895555",,terminal_output +5995,11373448,"TERMINAL",0,0,"4688844492:002:002:002:017777",,terminal_output +5996,11375469,"TERMINAL",0,0,"685:0030406665:0122239999",,terminal_output +5997,11377447,"TERMINAL",0,0,"87:002228883444541411111",,terminal_output +5998,11379425,"TERMINAL",0,0,"4024443:003:008:00566673333",,terminal_output +5999,11381439,"TERMINAL",0,0,"24666222788895555",,terminal_output +6000,11383484,"TERMINAL",0,0,"468884449101010117777",,terminal_output +6001,11385502,"TERMINAL",0,0,"681040506661122239999",,terminal_output +6002,11387477,"TERMINAL",0,0,"8102228883444551512121",,terminal_output +6003,11389467,"TERMINAL",0,0,"502444101010566673333",,terminal_output +6004,11391473,"TERMINAL",0,0,"24666222788895555",,terminal_output +6005,11393520,"TERMINAL",0,0,"468884449202020217777",,terminal_output +6006,11395544,"TERMINAL",0,0,"6820503:006662122239999",,terminal_output +6007,11397518,"TERMINAL",0,0,"820222888344452:015:013131",,terminal_output +6008,11399509,"TERMINAL",0,0,"1:002444202020566673333",,terminal_output +6009,11401518,"TERMINAL",0,0,"24666222788895555",,terminal_output +6010,11403526,"TERMINAL",0,0,"468884449303030317777",,terminal_output +6011,11405544,"TERMINAL",0,0,"68307:00106663122239999",,terminal_output +6012,11407651,"TERMINAL",0,0,"8302228883444511114141",,terminal_output +6013,11409551,"TERMINAL",0,0,"102444303030566673333",,terminal_output +6014,11411558,"TERMINAL",0,0,"24666222788895555",,terminal_output +6015,11413569,"TERMINAL",0,0,"468884449404040417777",,terminal_output +6016,11415577,"TERMINAL",0,0,"684010206664122239999",,terminal_output +6017,11417584,"TERMINAL",0,0,"8402228883444521215151",,terminal_output +6018,11419592,"TERMINAL",0,0,"203555414141677784444",,terminal_output +6019,11421602,"TERMINAL",0,0,"357773338999506666",,terminal_output +6020,11423612,"TERMINAL",0,0,"579995555051515128888",,terminal_output +6021,11425619,"TERMINAL",0,0,"795121317772333430307:008:00",,terminal_output +6022,11427628,"TERMINAL",0,0,"951333999455562222",,terminal_output +6023,11429636,"TERMINAL",0,0,"313555515151677784444",,terminal_output +6024,11431645,"TERMINAL",0,0,"3577733389993:006666",,terminal_output +6025,11433659,"TERMINAL",0,0,"579995556:003:013:013:0128888",,terminal_output +6026,11435666,"TERMINAL",0,0,"796:0131417772333440401010",,terminal_output +6027,11437678,"TERMINAL",0,0,"98:01333999455562222",,terminal_output +6028,11439694,"TERMINAL",0,0,"4135554:014:019:01677784444",,terminal_output +6029,11441693,"TERMINAL",0,0,"357773338999106666",,terminal_output +6030,11443712,"TERMINAL",0,0,"579995551011111128888",,terminal_output +6031,11445743,"TERMINAL",0,0,"791141517772333450502020",,terminal_output +6032,11447790,"TERMINAL",0,0,"911333999455562222",,terminal_output +6033,11449732,"TERMINAL",0,0,"513555111111677784444",,terminal_output +6034,11451742,"TERMINAL",0,0,"357773338999206666",,terminal_output +6035,11453749,"TERMINAL",0,0,"579995552021212128888",,terminal_output +6036,11455760,"TERMINAL",0,0,"7921514:01777233343:006:003030",,terminal_output +6037,11457823,"TERMINAL",0,0,"921333999455562222",,terminal_output +6038,11459776,"TERMINAL",0,0,"2:0135552121216274375038278:54:088:58:57 2:33:04635:1556:04544:27 2:47:341438:34329:34",,terminal_output +6039,11461782,"TERMINAL",0,0,"357773338993066666",,terminal_output +6040,11463794,"TERMINAL",0,0,"57999555303131288888",,terminal_output +6041,11465811,"TERMINAL",0,0,"79318:011177723341010404040",,terminal_output +6042,11467812,"TERMINAL",0,0,"931333999455622222",,terminal_output +6043,11469821,"TERMINAL",0,0,"11355516 R2025-10-04T21:32:101hai005\r 30138 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T21:07:40 24:31 1-00:00:00 hai004931570:5239:316619:45:35 1:46:364218:34 2:13:37375038278:54:088:58:57 2:33:14635:1556:14544:27 2:47:441438:44329:4423853:44",,terminal_output +6044,11471830,"TERMINAL",0,0,"35777333389940666666",,terminal_output +6045,11473845,"TERMINAL",0,0,"5799955554041412888888",,terminal_output +6046,11475954,"TERMINAL",0,0,"7941112177772334202050505050",,terminal_output +6047,11477854,"TERMINAL",0,0,"94133399994556222222",,terminal_output +6048,11479866,"TERMINAL",0,0,"213555114141416778444444",,terminal_output +6049,11481878,"TERMINAL",0,0,"35777333389950666666",,terminal_output +6050,11483884,"TERMINAL",0,0,"5799955555051512888888",,terminal_output +6051,11485893,"TERMINAL",0,0,"795121317777233430308:009:0050:004:00",,terminal_output +6052,11487934,"TERMINAL",0,0,"95133399994556222222",,terminal_output +6053,11489909,"TERMINAL",0,0,"313555215151516778444444",,terminal_output +6054,11491919,"TERMINAL",0,0,"3577733338994:00666666",,terminal_output +6055,11493940,"TERMINAL",0,0,"5799955557:004:014:012888888",,terminal_output +6056,11496024,"TERMINAL",0,0,"797:01314177772334404010101010",,terminal_output +6057,11497947,"TERMINAL",0,0,"99:0133399994556222222",,terminal_output +6058,11499965,"TERMINAL",0,0,"413555315:015:0140:016778444444",,terminal_output +6059,11501970,"TERMINAL",0,0,"35777333389910666666",,terminal_output +6060,11503991,"TERMINAL",0,0,"5799955551011112888888",,terminal_output +6061,11505993,"TERMINAL",0,0,"7911415177772334505020202020",,terminal_output +6062,11508010,"TERMINAL",0,0,"91133399994556222222",,terminal_output +6063,11510033,"TERMINAL",0,0,"513555411111116778444444",,terminal_output +6064,11512017,"TERMINAL",0,0,"35777333389920666666",,terminal_output +6065,11514037,"TERMINAL",0,0,"5799955552021212888888",,terminal_output +6066,11516033,"TERMINAL",0,0,"7921515:01777723344:007:0030303030",,terminal_output +6067,11518050,"TERMINAL",0,0,"92133399994556222222",,terminal_output +6068,11520062,"TERMINAL",0,0,"3:013555512121216778444444",,terminal_output +6069,11522056,"TERMINAL",0,0,"35777333389930666666",,terminal_output +6070,11524070,"TERMINAL",0,0,"5799955553031312888888",,terminal_output +6071,11526078,"TERMINAL",0,0,"79319:011177772334101040404040",,terminal_output +6072,11528085,"TERMINAL",0,0,"931333Nodes required for job are DOWN, DRAINED or reserved for jobs in higher priority partitions)4032:10 0:59589491:0725:295720:52:40 40:296645:35 1:47:34254345309:18:339:18:33 2:14:36754:0884:12655:57 2:37:125548:424349:423420:42",,terminal_output +6073,11530092,"TERMINAL",0,0,"1135551:01313131677844444",,terminal_output +6074,11532103,"TERMINAL",0,0,"3577733338994066666",,terminal_output +6075,11534110,"TERMINAL",0,0,"579995555404141288888",,terminal_output +6076,11536117,"TERMINAL",0,0,"79411121777723342020505050",,terminal_output +6077,11538129,"TERMINAL",0,0,"9413339999455622222",,terminal_output +6078,11540137,"TERMINAL",0,0,"21355511414141677844444",,terminal_output +6079,11542145,"TERMINAL",0,0,"3577733338995066666",,terminal_output +6080,11544153,"TERMINAL",0,0,"579995555505151288888",,terminal_output +6081,11546162,"TERMINAL",0,0,"795121317777233430309:0050:001:00",,terminal_output +6082,11548174,"TERMINAL",0,0,"9513339999455622222",,terminal_output +6083,11550182,"TERMINAL",0,0,"31355521515151677844444",,terminal_output +6084,11552187,"TERMINAL",0,0,"3577733338995:0066666",,terminal_output +6085,11554197,"TERMINAL",0,0,"5799955558:005:015:01288888",,terminal_output +6086,11556234,"TERMINAL",0,0,"798:013141777723344040101010",,terminal_output +6087,11558213,"TERMINAL",0,0,"920:013339999455622222",,terminal_output +6088,11560265,"TERMINAL",0,0,"413555316:016:011:01677844444",,terminal_output +6089,11562274,"TERMINAL",0,0,"3577733338991066666",,terminal_output +6090,11564242,"TERMINAL",0,0,"579995555101111288888",,terminal_output +6091,11566251,"TERMINAL",0,0,"79114151777723345050202020",,terminal_output +6092,11568262,"TERMINAL",0,0,"9113339999455622222",,terminal_output +6093,11570271,"TERMINAL",0,0,"51355541111111677844444",,terminal_output +6094,11572310,"TERMINAL",0,0,"3577733338992066666",,terminal_output +6095,11574288,"TERMINAL",0,0,"579995555202121288888",,terminal_output +6096,11576296,"TERMINAL",0,0,"7921516:01777723345:008:00303030",,terminal_output +6097,11578315,"TERMINAL",0,0,"9213339999455622222",,terminal_output +6098,11580330,"TERMINAL",0,0,"4:01355551212121677844444",,terminal_output +6099,11582320,"TERMINAL",0,0,"3577733338993066666",,terminal_output +6100,11584330,"TERMINAL",0,0,"579995555303131288888",,terminal_output +6101,11586344,"TERMINAL",0,0,"793140:0111777723341010404040",,terminal_output +6102,11588350,"TERMINAL",0,0,"9313339999455622222",,terminal_output +6103,11590366,"TERMINAL",0,0,"1135552:01313131677844444",,terminal_output +6104,11592368,"TERMINAL",0,0,"3577733338994066666",,terminal_output +6105,11594377,"TERMINAL",0,0,"579995555404141288888",,terminal_output +6106,11596477,"TERMINAL",0,0,"79411121777723342020505050",,terminal_output +6107,11598394,"TERMINAL",0,0,"9413339999455622222",,terminal_output +6108,11600471,"TERMINAL",0,0,"21355511414141677844444",,terminal_output +6109,11602516,"TERMINAL",0,0,"3577733338995066666",,terminal_output +6110,11604421,"TERMINAL",0,0,"579995555505151288888",,terminal_output +6111,11606433,"TERMINAL",0,0,"7951213177772334303050:001:002:00",,terminal_output +6112,11608438,"TERMINAL",0,0,"9513339999455622222",,terminal_output +6113,11610476,"TERMINAL",0,0,"31355521515151677844444",,terminal_output +6114,11612456,"TERMINAL",0,0,"3577733338996:0066666",,terminal_output +6115,11614464,"TERMINAL",0,0,"5799955559:006:016:01288888",,terminal_output +6116,11616484,"TERMINAL",0,0,"799:013141777723344040101010",,terminal_output +6117,11618481,"TERMINAL",0,0,"91:013339999455622222",,terminal_output +6118,11620539,"TERMINAL",0,0,"413555317:017:012:01677844444",,terminal_output +6119,11622504,"TERMINAL",0,0,"3577733338991066666",,terminal_output +6120,11624533,"TERMINAL",0,0,"579995555101111288888",,terminal_output +6121,11626520,"TERMINAL",0,0,"79114151777723345050202020",,terminal_output +6122,11628529,"TERMINAL",0,0,"9113339999455622222",,terminal_output +6123,11630576,"TERMINAL",0,0,"51355541111111677844444",,terminal_output +6124,11632622,"TERMINAL",0,0,"3577733338992066666",,terminal_output +6125,11634571,"TERMINAL",0,0,"579995555202121288888",,terminal_output +6126,11636566,"TERMINAL",0,0,"7921517:01777723346:009:00303030",,terminal_output +6127,11638573,"TERMINAL",0,0,"9213339999455622222",,terminal_output +6128,11640589,"TERMINAL",0,0,"5:01355551212121677844444",,terminal_output +6129,11642589,"TERMINAL",0,0,"368884444930303177777",,terminal_output +6130,11644606,"TERMINAL",0,0,"68301:001066663122399999",,terminal_output +6131,11646609,"TERMINAL",0,0,"830222888834451111414141",,terminal_output +6132,11648615,"TERMINAL",0,0,"1024443:00303030566733333",,terminal_output +6133,11650636,"TERMINAL",0,0,"246662222788955555",,terminal_output +6134,11652695,"TERMINAL",0,0,"468884444940404177777",,terminal_output +6135,11654711,"TERMINAL",0,0,"6840102066664122399999",,terminal_output +6136,11656650,"TERMINAL",0,0,"840222888834452121515151",,terminal_output +6137,11658660,"TERMINAL",0,0,"20244410404040566733333",,terminal_output +6138,11660666,"TERMINAL",0,0,"246662222788955555",,terminal_output +6139,11662729,"TERMINAL",0,0,"468884444950505177777",,terminal_output +6140,11664748,"TERMINAL",0,0,"6850203066665122399999",,terminal_output +6141,11666693,"TERMINAL",0,0,"8502228888344531311:012:013:01",,terminal_output +6142,11668703,"TERMINAL",0,0,"30244420505050566733333",,terminal_output +6143,11670710,"TERMINAL",0,0,"246662222788955555",,terminal_output +6144,11672721,"TERMINAL",0,0,"46888444497:007:007:0177777",,terminal_output +6145,11674741,"TERMINAL",0,0,"6840:003040666650:0122399999",,terminal_output +6146,11676737,"TERMINAL",0,0,"82:00222888834454141111111",,terminal_output +6147,11678748,"TERMINAL",0,0,"402444308:008:003:00566733333",,terminal_output +6148,11680759,"TERMINAL",0,0,"246662222788955555",,terminal_output +6149,11682798,"TERMINAL",0,0,"468884444910101177777",,terminal_output +6150,11684816,"TERMINAL",0,0,"6810405066661122399999",,terminal_output +6151,11686785,"TERMINAL",0,0,"810222888834455151212121",,terminal_output +6152,11688798,"TERMINAL",0,0,"50244440101010566733333",,terminal_output +6153,11690818,"TERMINAL",0,0,"246662222788955555",,terminal_output +6154,11692832,"TERMINAL",0,0,"468884444920202177777",,terminal_output +6155,11694821,"TERMINAL",0,0,"6820508:0066662122399999",,terminal_output +6156,11696831,"TERMINAL",0,0,"820222888834457:0140:01313131",,terminal_output +6157,11698875,"TERMINAL",0,0,"6:00244450202020566733333",,terminal_output +6158,11700852,"TERMINAL",0,0,"246662222788955555",,terminal_output +6159,11702856,"TERMINAL",0,0,"468884444930303177777",,terminal_output +6160,11704876,"TERMINAL",0,0,"68302:001066663122399999",,terminal_output +6161,11706874,"TERMINAL",0,0,"830222888834451111414141",,terminal_output +6162,11708893,"TERMINAL",0,0,"1024444:00303030566733333",,terminal_output +6163,11710893,"TERMINAL",0,0,"246662222788955555",,terminal_output +6164,11713009,"TERMINAL",0,0,"468884444940404177777",,terminal_output +6165,11714910,"TERMINAL",0,0,"6840102066664122399999",,terminal_output +6166,11716921,"TERMINAL",0,0,"840222888834452121515151",,terminal_output +6167,11718927,"TERMINAL",0,0,"20244410404040566733333",,terminal_output +6168,11720940,"TERMINAL",0,0,"246662222788955555",,terminal_output +6169,11722949,"TERMINAL",0,0,"468884444950505177777",,terminal_output +6170,11724971,"TERMINAL",0,0,"6850203066665122399999",,terminal_output +6171,11727040,"TERMINAL",0,0,"8502228888344531312:013:014:01",,terminal_output +6172,11728975,"TERMINAL",0,0,"30244420505050566733333",,terminal_output +6173,11730986,"TERMINAL",0,0,"246662222788955555",,terminal_output +6174,11733082,"TERMINAL",0,0,"46888444498:008:008:0177777",,terminal_output +6175,11735026,"TERMINAL",0,0,"681:00304066661:0122399999",,terminal_output +6176,11737072,"TERMINAL",0,0,"83:00222888834454141111111",,terminal_output +6177,11739093,"TERMINAL",0,0,"402444309:009:004:00566733333",,terminal_output +6178,11741038,"TERMINAL",0,0,"246662222788955555",,terminal_output +6179,11743118,"TERMINAL",0,0,"468884444910101177777",,terminal_output +6180,11745055,"TERMINAL",0,0,"6810405066661122399999",,terminal_output +6181,11747110,"TERMINAL",0,0,"810222888834455151212121",,terminal_output +6182,11749103,"TERMINAL",0,0,"50244440101010566733333",,terminal_output +6183,11751102,"TERMINAL",0,0,"246662222788955555",,terminal_output +6184,11753151,"TERMINAL",0,0,"468884444920202177777",,terminal_output +6185,11755102,"TERMINAL",0,0,"6820509:0066662122399999",,terminal_output +6186,11757146,"TERMINAL",0,0,"820222888834458:011:01313131",,terminal_output +6187,11759116,"TERMINAL",0,0,"7:00244450202020566733333",,terminal_output +6188,11761125,"TERMINAL",0,0,"246662222788955555",,terminal_output +6189,11763137,"TERMINAL",0,0,"468884444930303177777",,terminal_output +6190,11765146,"TERMINAL",0,0,"68303:001066663122399999",,terminal_output +6191,11767154,"TERMINAL",0,0,"830222888834451111414141",,terminal_output +6192,11769165,"TERMINAL",0,0,"1024445:00303030566733333",,terminal_output +6193,11771178,"TERMINAL",0,0,"246662222788955555",,terminal_output +6194,11773178,"TERMINAL",0,0,"468884444940404177777",,terminal_output +6195,11775190,"TERMINAL",0,0,"6840102066664122399999",,terminal_output +6196,11777203,"TERMINAL",0,0,"840222888834452121515151",,terminal_output +6197,11779207,"TERMINAL",0,0,"20244410404040566733333",,terminal_output +6198,11781215,"TERMINAL",0,0,"246662222788955555",,terminal_output +6199,11783228,"TERMINAL",0,0,"468884444950505177777",,terminal_output +6200,11785237,"TERMINAL",0,0,"6850203066665122399999",,terminal_output +6201,11787247,"TERMINAL",0,0,"8502228888344531313:014:015:01",,terminal_output +6202,11789252,"TERMINAL",0,0,"30244420505050566733333",,terminal_output +6203,11791264,"TERMINAL",0,0,"246662222788955555",,terminal_output +6204,11793271,"TERMINAL",0,0,"46888444499:009:009:0177777",,terminal_output +6205,11795283,"TERMINAL",0,0,"682:00304066662:0122399999",,terminal_output +6206,11797289,"TERMINAL",0,0,"84:00222888834454141111111",,terminal_output +6207,11799300,"TERMINAL",0,0,"4024443030:0030:005:00566733333",,terminal_output +6208,11801307,"TERMINAL",0,0,"246662222788955555",,terminal_output +6209,11803313,"TERMINAL",0,0,"468884444910101177777",,terminal_output +6210,11805324,"TERMINAL",0,0,"6810405066661122399999",,terminal_output +6211,11807331,"TERMINAL",0,0,"810222888834455151212121",,terminal_output +6212,11809341,"TERMINAL",0,0,"50244440101010566733333",,terminal_output +6213,11811351,"TERMINAL",0,0,"246662222788955555",,terminal_output +6214,11813464,"TERMINAL",0,0,"468884444920202177777",,terminal_output +6215,11815370,"TERMINAL",0,0,"68205050:0066662122399999",,terminal_output +6216,11817456,"TERMINAL",0,0,"820222888834459:012:01313131",,terminal_output +6217,11819393,"TERMINAL",0,0,"8:00244450202020566733333",,terminal_output +6218,11821452,"TERMINAL",0,0,"246662222788955555",,terminal_output +6219,11823411,"TERMINAL",0,0,"468884444930303177777",,terminal_output +6220,11825425,"TERMINAL",0,0,"68304:001066663122399999",,terminal_output +6221,11827492,"TERMINAL",0,0,"830222888834451111414141",,terminal_output +6222,11829437,"TERMINAL",0,0,"1024446:00303030566733333",,terminal_output +6223,11831457,"TERMINAL",0,0,"246662222788955555",,terminal_output +6224,11833530,"TERMINAL",0,0,"468884444940404177777",,terminal_output +6225,11835463,"TERMINAL",0,0,"6840102066664122399999",,terminal_output +6226,11837527,"TERMINAL",0,0,"840222888834452121515151",,terminal_output +6227,11839480,"TERMINAL",0,0,"20244410404040566733333",,terminal_output +6228,11841490,"TERMINAL",0,0,"246662222788955555",,terminal_output +6229,11843498,"TERMINAL",0,0,"468884444950505177777",,terminal_output +6230,11845516,"TERMINAL",0,0,"6850203066665122399999",,terminal_output +6231,11847561,"TERMINAL",0,0,"8502228888344531314:015:016:01",,terminal_output +6232,11849531,"TERMINAL",0,0,"30244420505050566733333",,terminal_output +6233,11851550,"TERMINAL",0,0,"246662222788955555",,terminal_output +6234,11853604,"TERMINAL",0,0,"468884444920:0020:0020:0177777",,terminal_output +6235,11855619,"TERMINAL",0,0,"683:00304066663:0122399999",,terminal_output +6236,11857560,"TERMINAL",0,0,"85:00222888834454141111111",,terminal_output +6237,11859576,"TERMINAL",0,0,"402444301:001:006:00566733333",,terminal_output +6238,11861576,"TERMINAL",0,0,"246662222788955555",,terminal_output +6239,11863639,"TERMINAL",0,0,"4799955551011111288888",,terminal_output +6240,11865597,"TERMINAL",0,0,"79114151777723345050202020",,terminal_output +6241,11867608,"TERMINAL",0,0,"9113339999455622222",,terminal_output +6242,11869621,"TERMINAL",0,0,"51355541111111677844444",,terminal_output +6243,11871625,"TERMINAL",0,0,"3577733338992066666",,terminal_output +6244,11873644,"TERMINAL",0,0,"579995555202121288888",,terminal_output +6245,11875643,"TERMINAL",0,0,"7921511:017777233440:003:00303030",,terminal_output +6246,11877666,"TERMINAL",0,0,"9213339999455622222",,terminal_output +6247,11879684,"TERMINAL",0,0,"9:01355551212121677844444",,terminal_output +6248,11881667,"TERMINAL",0,0,"3577733338993066666",,terminal_output +6249,11883684,"TERMINAL",0,0,"579995555303131288888",,terminal_output +6250,11885698,"TERMINAL",0,0,"79315:0111777723341010404040",,terminal_output +6251,11887708,"TERMINAL",0,0,"9313339999455622222",,terminal_output +6252,11889752,"TERMINAL",0,0,"1135557:01313131677844444",,terminal_output +6253,11891712,"TERMINAL",0,0,"3577733338994066666",,terminal_output +6254,11893732,"TERMINAL",0,0,"579995555404141288888",,terminal_output +6255,11895794,"TERMINAL",0,0,"79411121777723342020505050",,terminal_output +6256,11897835,"TERMINAL",0,0,"9413339999455622222",,terminal_output +6257,11899743,"TERMINAL",0,0,"21355511414141677844444",,terminal_output +6258,11901754,"TERMINAL",0,0,"3577733338995066666",,terminal_output +6259,11903762,"TERMINAL",0,0,"579995555505151288888",,terminal_output +6260,11905771,"TERMINAL",0,0,"795121317777233430305:006:007:00",,terminal_output +6261,11907876,"TERMINAL",0,0,"9513339999455622222",,terminal_output +6262,11909787,"TERMINAL",0,0,"31355521515151677844444",,terminal_output +6263,11911800,"TERMINAL",0,0,"3577733338991:0066666",,terminal_output +6264,11913809,"TERMINAL",0,0,"5799955554:001:011:01288888",,terminal_output +6265,11915827,"TERMINAL",0,0,"794:013141777723344040101010",,terminal_output +6266,11917910,"TERMINAL",0,0,"96:013339999455622222",,terminal_output +6267,11919860,"TERMINAL",0,0,"413555312:012:017:01677844444",,terminal_output +6268,11921841,"TERMINAL",0,0,"3577733338991066666",,terminal_output +6269,11923854,"TERMINAL",0,0,"579995555101111288888",,terminal_output +6270,11925875,"TERMINAL",0,0,"79114151777723345050202020",,terminal_output +6271,11927947,"TERMINAL",0,0,"9113339999455622222",,terminal_output +6272,11929890,"TERMINAL",0,0,"51355541111111677844444",,terminal_output +6273,11931888,"TERMINAL",0,0,"3577733338992066666",,terminal_output +6274,11933905,"TERMINAL",0,0,"579995555202121288888",,terminal_output +6275,11935906,"TERMINAL",0,0,"7921512:01777723341:004:00303030",,terminal_output +6276,11937982,"TERMINAL",0,0,"9213339999455622222",,terminal_output +6277,11940002,"TERMINAL",0,0,"40:01355551212121677844444",,terminal_output +6278,11941930,"TERMINAL",0,0,"3577733338993066666",,terminal_output +6279,11943940,"TERMINAL",0,0,"579995555303131288888",,terminal_output +6280,11945971,"TERMINAL",0,0,"79316:0111777723341010404040",,terminal_output +6281,11948018,"TERMINAL",0,0,"9313339999455622222",,terminal_output +6282,11949973,"TERMINAL",0,0,"1135558:01313131677844444",,terminal_output +6283,11952011,"TERMINAL",0,0,"3577733338994066666",,terminal_output +6284,11954028,"TERMINAL",0,0,"579995555404141288888",,terminal_output +6285,11956005,"TERMINAL",0,0,"79411121777723342020505050",,terminal_output +6286,11958005,"TERMINAL",0,0,"9413339999455622222",,terminal_output +6287,11960100,"TERMINAL",0,0,"21355511414141677844444",,terminal_output +6288,11962046,"TERMINAL",0,0,"3577733338995066666",,terminal_output +6289,11964062,"TERMINAL",0,0,"579995555505151288888",,terminal_output +6290,11966039,"TERMINAL",0,0,"795121317777233430306:007:008:00",,terminal_output +6291,11968048,"TERMINAL",0,0,"9513339999455622222",,terminal_output +6292,11970056,"TERMINAL",0,0,"31355521515151677844444",,terminal_output +6293,11972064,"TERMINAL",0,0,"3577733338992:0066666",,terminal_output +6294,11974072,"TERMINAL",0,0,"5799955555:002:012:01288888",,terminal_output +6295,11976087,"TERMINAL",0,0,"795:013141777723344040101010",,terminal_output +6296,11978124,"TERMINAL",0,0,"97:013339999455622222",,terminal_output +6297,11980172,"TERMINAL",0,0,"413555313:013:018:01677844444",,terminal_output +6298,11982108,"TERMINAL",0,0,"3577733338991066666",,terminal_output +6299,11984118,"TERMINAL",0,0,"579995555101111288888",,terminal_output +6300,11986128,"TERMINAL",0,0,"79114151777723345050202020",,terminal_output +6301,11988158,"TERMINAL",0,0,"9113339999455622222",,terminal_output +6302,11990176,"TERMINAL",0,0,"51355541111111677844444",,terminal_output +6303,11992161,"TERMINAL",0,0,"3577733338992066666",,terminal_output +6304,11994164,"TERMINAL",0,0,"579995555202121288888",,terminal_output +6305,11996171,"TERMINAL",0,0,"7921513:01777723342:005:00303030",,terminal_output +6306,11998192,"TERMINAL",0,0,"9213339999455622222",,terminal_output +6307,12000195,"TERMINAL",0,0,"1:01355551212121677844444",,terminal_output +6308,12002203,"TERMINAL",0,0,"3577733338993066666",,terminal_output +6309,12004207,"TERMINAL",0,0,"579995555303131288888",,terminal_output +6310,12006216,"TERMINAL",0,0,"79317:0111777723341010404040",,terminal_output +6311,12008230,"TERMINAL",0,0,"9313339999455622222",,terminal_output +6312,12010248,"TERMINAL",0,0,"1135559:01313131677844444",,terminal_output +6313,12012243,"TERMINAL",0,0,"3577733338994066666",,terminal_output +6314,12014252,"TERMINAL",0,0,"579995555404141288888",,terminal_output +6315,12016264,"TERMINAL",0,0,"79411121777723342020505050",,terminal_output +6316,12018270,"TERMINAL",0,0,"9413339999455622222",,terminal_output +6317,12020278,"TERMINAL",0,0,"21355511414141677844444",,terminal_output +6318,12022288,"TERMINAL",0,0,"3577733338995066666",,terminal_output +6319,12024295,"TERMINAL",0,0,"579995555505151288888",,terminal_output +6320,12026307,"TERMINAL",0,0,"795121317777233430307:008:009:00",,terminal_output +6321,12028314,"TERMINAL",0,0,"9513339999455622222",,terminal_output +6322,12030324,"TERMINAL",0,0,"31355521515151677844444",,terminal_output +6323,12032334,"TERMINAL",0,0,"3577733338993:0066666",,terminal_output +6324,12034341,"TERMINAL",0,0,"5799955556:003:013:01288888",,terminal_output +6325,12036351,"TERMINAL",0,0,"796:013141777723344040101010",,terminal_output +6326,12038359,"TERMINAL",0,0,"98:013339999455622222",,terminal_output +6327,12040371,"TERMINAL",0,0,"413555314:014:019:01677844444",,terminal_output +6328,12042382,"TERMINAL",0,0,"3577733338991066666",,terminal_output +6329,12044391,"TERMINAL",0,0,"579995555101111288888",,terminal_output +6330,12046404,"TERMINAL",0,0,"79114151777723345050202020",,terminal_output +6331,12048407,"TERMINAL",0,0,"9113339999455622222",,terminal_output +6332,12050418,"TERMINAL",0,0,"51355541111111677844444",,terminal_output +6333,12052429,"TERMINAL",0,0,"3577733338992066666",,terminal_output +6334,12054436,"TERMINAL",0,0,"579995555202121288888",,terminal_output +6335,12056444,"TERMINAL",0,0,"7921514:01777723343:006:00303030",,terminal_output +6336,12058453,"TERMINAL",0,0,"9213339999455622222",,terminal_output +6337,12060463,"TERMINAL",0,0,"2:01355551212121677844444",,terminal_output +6338,12062468,"TERMINAL",0,0,"3577733338993066666",,terminal_output +6339,12064480,"TERMINAL",0,0,"579995555303131288888",,terminal_output +6340,12066488,"TERMINAL",0,0,"79318:0111777723341010404040",,terminal_output +6341,12068498,"TERMINAL",0,0,"9313339999455622222",,terminal_output +6342,12070590,"TERMINAL",0,0,"11355510:01313131677844444",,terminal_output +6343,12072516,"TERMINAL",0,0,"3577733338994066666",,terminal_output +6344,12074521,"TERMINAL",0,0,"579995555404141288888",,terminal_output +6345,12076529,"TERMINAL",0,0,"79411121777723342020505050",,terminal_output +6346,12078540,"TERMINAL",0,0,"9413339999455622222",,terminal_output +6347,12080553,"TERMINAL",0,0,"21355511414141677844444",,terminal_output +6348,12082570,"TERMINAL",0,0,"3577733338995066666",,terminal_output +6349,12084617,"TERMINAL",0,0,"579995555505151288888",,terminal_output +6350,12086578,"TERMINAL",0,0,"795121317777233430308:009:003:00:00",,terminal_output +6351,12088583,"TERMINAL",0,0,"95244420505050566733333",,terminal_output +6352,12090596,"TERMINAL",0,0,"3246662222788955555",,terminal_output +6353,12092601,"TERMINAL",0,0,"46888444494:004:004:0177777",,terminal_output +6354,12094622,"TERMINAL",0,0,"687:00304066667:0122399999",,terminal_output +6355,12096624,"TERMINAL",0,0,"89:00222888834454141111111",,terminal_output +6356,12098632,"TERMINAL",0,0,"402444305:005:0050:00566733333",,terminal_output +6357,12100681,"TERMINAL",0,0,"246662222788955555",,terminal_output +6358,12102746,"TERMINAL",0,0,"468884444910101177777",,terminal_output +6359,12104677,"TERMINAL",0,0,"6810405066661122399999",,terminal_output +6360,12106668,"TERMINAL",0,0,"810222888834455151212121",,terminal_output +6361,12108678,"TERMINAL",0,0,"50244440101010566733333",,terminal_output +6362,12110687,"TERMINAL",0,0,"246662222788955555",,terminal_output +6363,12112739,"TERMINAL",0,0,"468884444920202177777",,terminal_output +6364,12114722,"TERMINAL",0,0,"6820505:0066662122399999",,terminal_output +6365,12116716,"TERMINAL",0,0,"820222888834454:017:01313131",,terminal_output +6366,12118729,"TERMINAL",0,0,"3:00244450202020566733333",,terminal_output +6367,12120766,"TERMINAL",0,0,"246662222788955555",,terminal_output +6368,12122747,"TERMINAL",0,0,"468884444930303177777",,terminal_output +6369,12124767,"TERMINAL",0,0,"68309:001066663122399999",,terminal_output +6370,12126766,"TERMINAL",0,0,"830222888834451111414141",,terminal_output +6371,12128776,"TERMINAL",0,0,"1024441:00303030566733333",,terminal_output +6372,12130784,"TERMINAL",0,0,"246662222788955555",,terminal_output +6373,12132851,"TERMINAL",0,0,"468884444940404177777",,terminal_output +6374,12134802,"TERMINAL",0,0,"6840102066664122399999",,terminal_output +6375,12136808,"TERMINAL",0,0,"840222888834452121515151",,terminal_output +6376,12138819,"TERMINAL",0,0,"20244410404040566733333",,terminal_output +6377,12140940,"TERMINAL",0,0,"246662222788955555",,terminal_output +6378,12142885,"TERMINAL",0,0,"468884444950505177777",,terminal_output +6379,12144902,"TERMINAL",0,0,"6850203066665122399999",,terminal_output +6380,12146859,"TERMINAL",0,0,"8502228888344531319:013:00:011:01",,terminal_output +6381,12148867,"TERMINAL",0,0,"30244420505050566733333",,terminal_output +6382,12150874,"TERMINAL",0,0,"246662222788955555",,terminal_output +6383,12152882,"TERMINAL",0,0,"46888444495:005:005:0177777",,terminal_output +6384,12154904,"TERMINAL",0,0,"688:00304066668:0122399999",,terminal_output +6385,12156904,"TERMINAL",0,0,"830:00222888834454141111111",,terminal_output +6386,12158919,"TERMINAL",0,0,"402444306:006:001:00566733333",,terminal_output +6387,12160937,"TERMINAL",0,0,"246662222788955555",,terminal_output +6388,12162957,"TERMINAL",0,0,"468884444910101177777",,terminal_output +6389,12164976,"TERMINAL",0,0,"681040506666218:34 2:25:12325033278:54:088:58:57 2:44:49635:1557:49544:27 2:59:19143:27 3:00:19321:192385:19",,terminal_output +6390,12166947,"TERMINAL",0,0,"8102228888445515121212121",,terminal_output +6391,12168958,"TERMINAL",0,0,"50244440101010667333333",,terminal_output +6392,12171045,"TERMINAL",0,0,"246662222889555555",,terminal_output +6393,12172991,"TERMINAL",0,0,"468884444202021777777",,terminal_output +6394,12174985,"TERMINAL",0,0,"6820506:006666223999999",,terminal_output +6395,12177089,"TERMINAL",0,0,"82022288884455:018:0131313131",,terminal_output +6396,12179033,"TERMINAL",0,0,"4:00244450202020667333333",,terminal_output +6397,12181050,"TERMINAL",0,0,"246662222889555555",,terminal_output +6398,12183021,"TERMINAL",0,0,"468884444303031777777",,terminal_output +6399,12185028,"TERMINAL",0,0,"683050:00106666223999999",,terminal_output +6400,12187036,"TERMINAL",0,0,"8302228888445111141414141",,terminal_output +6401,12189045,"TERMINAL",0,0,"1024442:00303030667333333",,terminal_output +6402,12191069,"TERMINAL",0,0,"2466616 R2025-10-04T21:44:102hai004\r 30140 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T21:32:10 12:02 1-00:00:00 hai0053807:40 36:32492570:5251:326219:18:34 2:25:384385039278:54:088:58:57 2:45:15635:1558:15544:27 2:59:45143:27 3:00:45321:452385:451758:45",,terminal_output +6403,12193164,"TERMINAL",0,0,"46888444444040417777777",,terminal_output +6404,12195072,"TERMINAL",0,0,"68401020666662239999999",,terminal_output +6405,12197155,"TERMINAL",0,0,"8402228888844521215151515151",,terminal_output +6406,12199111,"TERMINAL",0,0,"20244410104040406673333333",,terminal_output +6407,12201104,"TERMINAL",0,0,"24666222228895555555",,terminal_output +6408,12203199,"TERMINAL",0,0,"46888444445050517777777",,terminal_output +6409,12205122,"TERMINAL",0,0,"68502030666662239999999",,terminal_output +6410,12207138,"TERMINAL",0,0,"8502228888844531313:00:011:012:016:019:01",,terminal_output +6411,12209139,"TERMINAL",0,0,"30244420205050506673333333",,terminal_output +6412,12211161,"TERMINAL",0,0,"24666222228895555555",,terminal_output +6413,12213158,"TERMINAL",0,0,"46888444446:006:006:017777777",,terminal_output +6414,12215186,"TERMINAL",0,0,"689:003040666662239999999",,terminal_output +6415,12217228,"TERMINAL",0,0,"81:002228888844541411111111111",,terminal_output +6416,12219187,"TERMINAL",0,0,"40244430307:007:002:006673333333",,terminal_output +6417,12221197,"TERMINAL",0,0,"24666222228895555555",,terminal_output +6418,12223270,"TERMINAL",0,0,"46888444441010117777777",,terminal_output +6419,12225316,"TERMINAL",0,0,"68104050666662239999999",,terminal_output +6420,12227218,"TERMINAL",0,0,"8102228888844551512121212121",,terminal_output +6421,12229227,"TERMINAL",0,0,"50244440401010106673333333",,terminal_output +6422,12231243,"TERMINAL",0,0,"24666222228895555555",,terminal_output +6423,12233245,"TERMINAL",0,0,"46888444442020217777777",,terminal_output +6424,12235264,"TERMINAL",0,0,"6820507:00666662239999999",,terminal_output +6425,12237298,"TERMINAL",0,0,"820222888884456:019:013131313131",,terminal_output +6426,12239270,"TERMINAL",0,0,"5:00244450502020206673333333",,terminal_output +6427,12241277,"TERMINAL",0,0,"24666222228895555555",,terminal_output +6428,12243339,"TERMINAL",0,0,"46888444443030317777777",,terminal_output +6429,12245298,"TERMINAL",0,0,"68301:0010666662239999999",,terminal_output +6430,12247332,"TERMINAL",0,0,"8302228888844511114141414141",,terminal_output +6431,12249312,"TERMINAL",0,0,"102444Nodes required for job are DOWN, DRAINED or reserved for jobs in higher priority partitions)144 1:0044032:10 13:005830491:0737:305720:52:40 52:306264346309:18:339:18:33 2:26:37754:0886:13655:57 2:49:135540:43431:433422:432286:43",,terminal_output +6432,12251330,"TERMINAL",0,0,"2466622222889555555",,terminal_output +6433,12253332,"TERMINAL",0,0,"4688844444404041777777",,terminal_output +6434,12255343,"TERMINAL",0,0,"6840102066666223999999",,terminal_output +6435,12257352,"TERMINAL",0,0,"84022288888445212151515151",,terminal_output +6436,12259359,"TERMINAL",0,0,"2024441010404040667333333",,terminal_output +6437,12261374,"TERMINAL",0,0,"2466622222889555555",,terminal_output +6438,12263378,"TERMINAL",0,0,"4688844444505051777777",,terminal_output +6439,12265404,"TERMINAL",0,0,"6850203066666223999999",,terminal_output +6440,12267405,"TERMINAL",0,0,"8502228888844531311:012:013:017:01",,terminal_output +6441,12269408,"TERMINAL",0,0,"3024442020505050667333333",,terminal_output +6442,12271414,"TERMINAL",0,0,"2466622222889555555",,terminal_output +6443,12273425,"TERMINAL",0,0,"46888444447:007:007:01777777",,terminal_output +6444,12275447,"TERMINAL",0,0,"6850:00304066666223999999",,terminal_output +6445,12277444,"TERMINAL",0,0,"82:0022288888445414111111111",,terminal_output +6446,12279452,"TERMINAL",0,0,"40244430308:008:003:00667333333",,terminal_output +6447,12281460,"TERMINAL",0,0,"2466622222889555555",,terminal_output +6448,12283482,"TERMINAL",0,0,"4688844444101011777777",,terminal_output +6449,12285478,"TERMINAL",0,0,"6810405066666223999999",,terminal_output +6450,12287491,"TERMINAL",0,0,"81022288888445515121212121",,terminal_output +6451,12289499,"TERMINAL",0,0,"5024444040101010667333333",,terminal_output +6452,12291509,"TERMINAL",0,0,"2466622222889555555",,terminal_output +6453,12293517,"TERMINAL",0,0,"4688844444202021777777",,terminal_output +6454,12295526,"TERMINAL",0,0,"6820508:0066666223999999",,terminal_output +6455,12297535,"TERMINAL",0,0,"820222888884457:0150:0131313131",,terminal_output +6456,12299546,"TERMINAL",0,0,"6:0024445050202020667333333",,terminal_output +6457,12301553,"TERMINAL",0,0,"2466622222889555555",,terminal_output +6458,12303566,"TERMINAL",0,0,"4688844444303031777777",,terminal_output +6459,12305574,"TERMINAL",0,0,"68302:001066666223999999",,terminal_output +6460,12307646,"TERMINAL",0,0,"83022288888445111141414141",,terminal_output +6461,12309594,"TERMINAL",0,0,"1035552:014:01313131778444444",,terminal_output +6462,12311601,"TERMINAL",0,0,"35777333339940666666",,terminal_output +6463,12313611,"TERMINAL",0,0,"579995555541412888888",,terminal_output +6464,12315621,"TERMINAL",0,0,"7941112177777334202050505050",,terminal_output +6465,12317632,"TERMINAL",0,0,"94133399999556222222",,terminal_output +6466,12319642,"TERMINAL",0,0,"2135551111414141778444444",,terminal_output +6467,12321646,"TERMINAL",0,0,"35777333339950666666",,terminal_output +6468,12323657,"TERMINAL",0,0,"579995555551512888888",,terminal_output +6469,12325669,"TERMINAL",0,0,"795121317777733430302:003:004:008:00",,terminal_output +6470,12327677,"TERMINAL",0,0,"95133399999556222222",,terminal_output +6471,12329683,"TERMINAL",0,0,"3135552121515151778444444",,terminal_output +6472,12331696,"TERMINAL",0,0,"3577733333998:00666666",,terminal_output +6473,12333705,"TERMINAL",0,0,"57999555558:018:012888888",,terminal_output +6474,12335717,"TERMINAL",0,0,"791:01314177777334404010101010",,terminal_output +6475,12337729,"TERMINAL",0,0,"93:0133399999556222222",,terminal_output +6476,12339740,"TERMINAL",0,0,"41355531319:019:014:01778444444",,terminal_output +6477,12341754,"TERMINAL",0,0,"35777333339910666666",,terminal_output +6478,12343766,"TERMINAL",0,0,"579995555511112888888",,terminal_output +6479,12345773,"TERMINAL",0,0,"7911415177777334505020202020",,terminal_output +6480,12347785,"TERMINAL",0,0,"91133399999556222222",,terminal_output +6481,12349803,"TERMINAL",0,0,"5135554141111111778444444",,terminal_output +6482,12351802,"TERMINAL",0,0,"35777333339920666666",,terminal_output +6483,12353821,"TERMINAL",0,0,"579995555521212888888",,terminal_output +6484,12355824,"TERMINAL",0,0,"7921519:01777773348:001:0030303030",,terminal_output +6485,12357925,"TERMINAL",0,0,"92133399999556222222",,terminal_output +6486,12359869,"TERMINAL",0,0,"7:0135555151212121778444444",,terminal_output +6487,12361857,"TERMINAL",0,0,"35777333339930666666",,terminal_output +6488,12363859,"TERMINAL",0,0,"579995555531312888888",,terminal_output +6489,12365879,"TERMINAL",0,0,"79313:011177777334101040404040",,terminal_output +6490,12367958,"TERMINAL",0,0,"93133399999556222222",,terminal_output +6491,12369920,"TERMINAL",0,0,"1135553:015:01313131778444444",,terminal_output +6492,12371903,"TERMINAL",0,0,"35777333339940666666",,terminal_output +6493,12373903,"TERMINAL",0,0,"579995555541412888888",,terminal_output +6494,12375949,"TERMINAL",0,0,"7941112177777334202050505050",,terminal_output +6495,12377924,"TERMINAL",0,0,"94133399999556222222",,terminal_output +6496,12379947,"TERMINAL",0,0,"2135551111414141778444444",,terminal_output +6497,12381990,"TERMINAL",0,0,"35777333339950666666",,terminal_output +6498,12384011,"TERMINAL",0,0,"579995555551512888888",,terminal_output +6499,12385960,"TERMINAL",0,0,"795121317777733430303:004:005:009:00",,terminal_output +6500,12388031,"TERMINAL",0,0,"95133399999556222222",,terminal_output +6501,12389978,"TERMINAL",0,0,"3135552121515151778444444",,terminal_output +6502,12391987,"TERMINAL",0,0,"3577733333999:00666666",,terminal_output +6503,12394073,"TERMINAL",0,0,"57999555559:019:012888888",,terminal_output +6504,12396008,"TERMINAL",0,0,"792:01314177777334404010101010",,terminal_output +6505,12398018,"TERMINAL",0,0,"94:0133399999556222222",,terminal_output +6506,12400116,"TERMINAL",0,0,"413555313140:0140:015:01778444444",,terminal_output +6507,12402035,"TERMINAL",0,0,"35777333339910666666",,terminal_output +6508,12404043,"TERMINAL",0,0,"579995555511112888888",,terminal_output +6509,12406055,"TERMINAL",0,0,"7911415177777334505020202020",,terminal_output +6510,12408063,"TERMINAL",0,0,"91133399999556222222",,terminal_output +6511,12410082,"TERMINAL",0,0,"5135554141111111778444444",,terminal_output +6512,12412095,"TERMINAL",0,0,"35777333339920666666",,terminal_output +6513,12414092,"TERMINAL",0,0,"579995555521212888888",,terminal_output +6514,12416101,"TERMINAL",0,0,"79215120:00:01777773349:002:0030303030",,terminal_output +6515,12418137,"TERMINAL",0,0,"92133399999556222222",,terminal_output +6516,12420124,"TERMINAL",0,0,"8:0135555151212121778444444",,terminal_output +6517,12422134,"TERMINAL",0,0,"35777333339930666666",,terminal_output +6518,12424142,"TERMINAL",0,0,"579995555531312888888",,terminal_output +6519,12426156,"TERMINAL",0,0,"79314:011177777334101040404040",,terminal_output +6520,12428173,"TERMINAL",0,0,"93133399999556222222",,terminal_output +6521,12430194,"TERMINAL",0,0,"1135554:016:01313131778444444",,terminal_output +6522,12432271,"TERMINAL",0,0,"35777333339940666666",,terminal_output +6523,12434181,"TERMINAL",0,0,"579995555541412888888",,terminal_output +6524,12436201,"TERMINAL",0,0,"7941112177777334202050505050",,terminal_output +6525,12438222,"TERMINAL",0,0,"94133399999556222222",,terminal_output +6526,12440212,"TERMINAL",0,0,"2135551111414141778444444",,terminal_output +6527,12442302,"TERMINAL",0,0,"35777333339950666666",,terminal_output +6528,12444227,"TERMINAL",0,0,"579995555551512888888",,terminal_output +6529,12446297,"TERMINAL",0,0,"795121317777733430304:005:006:0010:00",,terminal_output +6530,12448245,"TERMINAL",0,0,"95133399999556222222",,terminal_output +6531,12450256,"TERMINAL",0,0,"3135552121515151778444444",,terminal_output +6532,12452262,"TERMINAL",0,0,"35777333339930:00666666",,terminal_output +6533,12454274,"TERMINAL",0,0,"579995555530:0130:012888888",,terminal_output +6534,12456282,"TERMINAL",0,0,"793:01314177777334404010101010",,terminal_output +6535,12458292,"TERMINAL",0,0,"95:0133399999556222222",,terminal_output +6536,12460310,"TERMINAL",0,0,"41355531311:011:016:01778444444",,terminal_output +6537,12462328,"TERMINAL",0,0,"35777333339910666666",,terminal_output +6538,12464315,"TERMINAL",0,0,"579995555511112888888",,terminal_output +6539,12466371,"TERMINAL",0,0,"7911415177777334505020202020",,terminal_output +6540,12468336,"TERMINAL",0,0,"91133399999556222222",,terminal_output +6541,12470358,"TERMINAL",0,0,"5135554141111111778444444",,terminal_output +6542,12472415,"TERMINAL",0,0,"35777333339920666666",,terminal_output +6543,12474359,"TERMINAL",0,0,"579995555521212888888",,terminal_output +6544,12476381,"TERMINAL",0,0,"7921511:017777733450:003:0030303030",,terminal_output +6545,12478390,"TERMINAL",0,0,"92133399999556222222",,terminal_output +6546,12480401,"TERMINAL",0,0,"9:0135555151212121778444444",,terminal_output +6547,12482441,"TERMINAL",0,0,"35777333339930666666",,terminal_output +6548,12484404,"TERMINAL",0,0,"579995555531312888888",,terminal_output +6549,12486411,"TERMINAL",0,0,"79315:011177777334101040404040",,terminal_output +6550,12488421,"TERMINAL",0,0,"93133399999556222222",,terminal_output +6551,12490443,"TERMINAL",0,0,"1135555:017:01313131778444444",,terminal_output +6552,12492445,"TERMINAL",0,0,"35777333339940666666",,terminal_output +6553,12494455,"TERMINAL",0,0,"579995555541412888888",,terminal_output +6554,12496473,"TERMINAL",0,0,"7941112177777334202050505050",,terminal_output +6555,12498472,"TERMINAL",0,0,"94133399999556222222",,terminal_output +6556,12500569,"TERMINAL",0,0,"2135551111414141778444444",,terminal_output +6557,12502515,"TERMINAL",0,0,"35777333339950666666",,terminal_output +6558,12504496,"TERMINAL",0,0,"579995555551512888888",,terminal_output +6559,12506514,"TERMINAL",0,0,"795121317777733430305:006:007:001:00",,terminal_output +6560,12508511,"TERMINAL",0,0,"95133399999556222222",,terminal_output +6561,12510526,"TERMINAL",0,0,"3135552121515151778444444",,terminal_output +6562,12512528,"TERMINAL",0,0,"3577733333991:00666666",,terminal_output +6563,12514560,"TERMINAL",0,0,"57999555551:011:012888888",,terminal_output +6564,12516570,"TERMINAL",0,0,"794:01314177777334404010101010",,terminal_output +6565,12518559,"TERMINAL",0,0,"96:0133399999556222222",,terminal_output +6566,12520641,"TERMINAL",0,0,"41355531312:012:017:01778444444",,terminal_output +6567,12522583,"TERMINAL",0,0,"35777333339910666666",,terminal_output +6568,12524609,"TERMINAL",0,0,"579995555511112888888",,terminal_output +6569,12526592,"TERMINAL",0,0,"71012425288888445515121212121",,terminal_output +6570,12528600,"TERMINAL",0,0,"5024444040101010667333333",,terminal_output +6571,12530608,"TERMINAL",0,0,"2466622222889555555",,terminal_output +6572,12532623,"TERMINAL",0,0,"4688844444202021777777",,terminal_output +6573,12534673,"TERMINAL",0,0,"6820502:0066666223999999",,terminal_output +6574,12536635,"TERMINAL",0,0,"820222888884451:014:0131313131",,terminal_output +6575,12538645,"TERMINAL",0,0,"50:0024445050202020667333333",,terminal_output +6576,12540677,"TERMINAL",0,0,"2466622222889555555",,terminal_output +6577,12542661,"TERMINAL",0,0,"4688844444303031777777",,terminal_output +6578,12544670,"TERMINAL",0,0,"68306:001066666223999999",,terminal_output +6579,12546680,"TERMINAL",0,0,"83022288888445111141414141",,terminal_output +6580,12548690,"TERMINAL",0,0,"1024446:008:00303030667333333",,terminal_output +6581,12550698,"TERMINAL",0,0,"2466622222889555555",,terminal_output +6582,12552704,"TERMINAL",0,0,"4688844444404041777777",,terminal_output +6583,12554716,"TERMINAL",0,0,"6840102066666223999999",,terminal_output +6584,12556724,"TERMINAL",0,0,"84022288888445212151515151",,terminal_output +6585,12558735,"TERMINAL",0,0,"2024441010404040667333333",,terminal_output +6586,12560741,"TERMINAL",0,0,"2466622222889555555",,terminal_output +6587,12562752,"TERMINAL",0,0,"4688844444505051777777",,terminal_output +6588,12564759,"TERMINAL",0,0,"6850203066666223999999",,terminal_output +6589,12566766,"TERMINAL",0,0,"8502228888844531316:017:018:012:01",,terminal_output +6590,12568775,"TERMINAL",0,0,"3024442020505050667333333",,terminal_output +6591,12570797,"TERMINAL",0,0,"2466622222889555555",,terminal_output +6592,12572797,"TERMINAL",0,0,"46888444442:002:002:01777777",,terminal_output +6593,12574804,"TERMINAL",0,0,"685:00304066666223999999",,terminal_output +6594,12576819,"TERMINAL",0,0,"87:0022288888445414111111111",,terminal_output +6595,12578823,"TERMINAL",0,0,"40244430303:003:008:00667333333",,terminal_output +6596,12580831,"TERMINAL",0,0,"2466622222889555555",,terminal_output +6597,12582839,"TERMINAL",0,0,"4688844444101011777777",,terminal_output +6598,12584850,"TERMINAL",0,0,"6810405066666223999999",,terminal_output +6599,12586858,"TERMINAL",0,0,"81022288888445515121212121",,terminal_output +6600,12588876,"TERMINAL",0,0,"5024444040101010667333333",,terminal_output +6601,12590877,"TERMINAL",0,0,"2466622222889555555",,terminal_output +6602,12592887,"TERMINAL",0,0,"4688844444202021777777",,terminal_output +6603,12594906,"TERMINAL",0,0,"6820503:0066666223999999",,terminal_output +6604,12596903,"TERMINAL",0,0,"820222888884452:015:0131313131",,terminal_output +6605,12598922,"TERMINAL",0,0,"1:0024445050202020667333333",,terminal_output +6606,12600925,"TERMINAL",0,0,"2466622222889555555",,terminal_output +6607,12602969,"TERMINAL",0,0,"4688844444303031777777",,terminal_output +6608,12605014,"TERMINAL",0,0,"68307:001066666223999999",,terminal_output +6609,12606948,"TERMINAL",0,0,"83022288888445111141414141",,terminal_output +6610,12609010,"TERMINAL",0,0,"1024447:009:00303030667333333",,terminal_output +6611,12611058,"TERMINAL",0,0,"2466622222889555555",,terminal_output +6612,12612979,"TERMINAL",0,0,"4688844444404041777777",,terminal_output +6613,12614986,"TERMINAL",0,0,"6840102066666223999999",,terminal_output +6614,12617101,"TERMINAL",0,0,"84022288888445212151515151",,terminal_output +6615,12619002,"TERMINAL",0,0,"2024441010404040667333333",,terminal_output +6616,12621095,"TERMINAL",0,0,"2466622222889555555",,terminal_output +6617,12623040,"TERMINAL",0,0,"4688844444505051777777",,terminal_output +6618,12625058,"TERMINAL",0,0,"6850203066666223999999",,terminal_output +6619,12627136,"TERMINAL",0,0,"8502228888844531317:018:019:013:01",,terminal_output +6620,12629049,"TERMINAL",0,0,"3024442020505050667333333",,terminal_output +6621,12631055,"TERMINAL",0,0,"2466622222889555555",,terminal_output +6622,12633074,"TERMINAL",0,0,"46888444443:003:003:01777777",,terminal_output +6623,12635072,"TERMINAL",0,0,"686:00304066666223999999",,terminal_output +6624,12637081,"TERMINAL",0,0,"88:0022288888445414111111111",,terminal_output +6625,12639100,"TERMINAL",0,0,"40244430304:004:009:00667333333",,terminal_output +6626,12641112,"TERMINAL",0,0,"2466622222889555555",,terminal_output +6627,12643211,"TERMINAL",0,0,"4688844444101011777777",,terminal_output +6628,12645121,"TERMINAL",0,0,"6810405066666223999999",,terminal_output +6629,12647131,"TERMINAL",0,0,"81022288888445515121212121",,terminal_output +6630,12649137,"TERMINAL",0,0,"5024444040101010667333333",,terminal_output +6631,12651154,"TERMINAL",0,0,"2466622222889555555",,terminal_output +6632,12653248,"TERMINAL",0,0,"4688844444202021777777",,terminal_output +6633,12655194,"TERMINAL",0,0,"6820504:0066666223999999",,terminal_output +6634,12657240,"TERMINAL",0,0,"820222888884453:016:0131313131",,terminal_output +6635,12659184,"TERMINAL",0,0,"2:0024445050202020667333333",,terminal_output +6636,12661197,"TERMINAL",0,0,"2466622222889555555",,terminal_output +6637,12663283,"TERMINAL",0,0,"4688844444303031777777",,terminal_output +6638,12665229,"TERMINAL",0,0,"68308:001066666223999999",,terminal_output +6639,12667275,"TERMINAL",0,0,"83022288888445111141414141",,terminal_output +6640,12669229,"TERMINAL",0,0,"1024448:0020:00303030667333333",,terminal_output +6641,12671237,"TERMINAL",0,0,"2466622222889555555",,terminal_output +6642,12673318,"TERMINAL",0,0,"4688844444404041777777",,terminal_output +6643,12675255,"TERMINAL",0,0,"6840102066666223999999",,terminal_output +6644,12677266,"TERMINAL",0,0,"84022288888445212151515151",,terminal_output +6645,12679272,"TERMINAL",0,0,"2024441010404040667333333",,terminal_output +6646,12681282,"TERMINAL",0,0,"2466622222889555555",,terminal_output +6647,12683292,"TERMINAL",0,0,"4688844444505051777777",,terminal_output +6648,12685301,"TERMINAL",0,0,"6850203066666223999999",,terminal_output +6649,12687349,"TERMINAL",0,0,"8502228888844531318:019:0110:014:01",,terminal_output +6650,12689329,"TERMINAL",0,0,"3024442020505050667333333",,terminal_output +6651,12691328,"TERMINAL",0,0,"2466622222889555555",,terminal_output +6652,12693389,"TERMINAL",0,0,"46888444444:004:004:01777777",,terminal_output +6653,12695441,"TERMINAL",0,0,"687:00304066666223999999",,terminal_output +6654,12697385,"TERMINAL",0,0,"89:0022288888445414111111111",,terminal_output +6655,12699364,"TERMINAL",0,0,"40244430305:005:001:00:00667333333",,terminal_output +6656,12701374,"TERMINAL",0,0,"2466622222889555555",,terminal_output +6657,12703392,"TERMINAL",0,0,"4688844444101011777777",,terminal_output +6658,12705406,"TERMINAL",0,0,"6810405066666223999999",,terminal_output +6659,12707417,"TERMINAL",0,0,"81022288888445515121212121",,terminal_output +6660,12709410,"TERMINAL",0,0,"5024444040101010667333333",,terminal_output +6661,12711421,"TERMINAL",0,0,"2466622222889555555",,terminal_output +6662,12713458,"TERMINAL",0,0,"4688844444202021777777",,terminal_output +6663,12715476,"TERMINAL",0,0,"6820505:0066666223999999",,terminal_output +6664,12717556,"TERMINAL",0,0,"820222888884454:017:0131313131",,terminal_output +6665,12719455,"TERMINAL",0,0,"3:0024445050202020667333333",,terminal_output +6666,12721484,"TERMINAL",0,0,"2466622222889555555",,terminal_output +6667,12723491,"TERMINAL",0,0,"4688844444303031777777",,terminal_output +6668,12725514,"TERMINAL",0,0,"68309:001066666223999999",,terminal_output +6669,12727496,"TERMINAL",0,0,"83022288888445111141414141",,terminal_output +6670,12729509,"TERMINAL",0,0,"1024449:001:00303030667333333",,terminal_output +6671,12731513,"TERMINAL",0,0,"2466622222889555555",,terminal_output +6672,12733523,"TERMINAL",0,0,"4688844444404041777777",,terminal_output +6673,12735537,"TERMINAL",0,0,"6840102066666223999999",,terminal_output +6674,12737538,"TERMINAL",0,0,"84022288888445212151515151",,terminal_output +6675,12739547,"TERMINAL",0,0,"2024441010404040667333333",,terminal_output +6676,12741557,"TERMINAL",0,0,"2466622222889555555",,terminal_output +6677,12743569,"TERMINAL",0,0,"4688844444505051777777",,terminal_output +6678,12745575,"TERMINAL",0,0,"6850203066666223999999",,terminal_output +6679,12747585,"TERMINAL",0,0,"8502228888844531319:0110:011:015:01",,terminal_output +6680,12749607,"TERMINAL",0,0,"3035552121515151778444444",,terminal_output +6681,12751604,"TERMINAL",0,0,"3577733333995:00666666",,terminal_output +6682,12753610,"TERMINAL",0,0,"57999555555:015:012888888",,terminal_output +6683,12755631,"TERMINAL",0,0,"798:01314177777334404010101010",,terminal_output +6684,12757695,"TERMINAL",0,0,"940:0133399999556222222",,terminal_output +6685,12759714,"TERMINAL",0,0,"41355531316:016:011:01778444444",,terminal_output +6686,12761644,"TERMINAL",0,0,"35777333339910666666",,terminal_output +6687,12763659,"TERMINAL",0,0,"579995555511112888888",,terminal_output +6688,12765676,"TERMINAL",0,0,"7911415177777334505020202020",,terminal_output +6689,12767730,"TERMINAL",0,0,"91133399999556222222",,terminal_output +6690,12769750,"TERMINAL",0,0,"5135554141111111778444444",,terminal_output +6691,12771691,"TERMINAL",0,0,"35777333339920666666",,terminal_output +6692,12773706,"TERMINAL",0,0,"579995555521212888888",,terminal_output +6693,12775709,"TERMINAL",0,0,"7921516:01777773345:008:0030303030",,terminal_output +6694,12777770,"TERMINAL",0,0,"92133399999556222222",,terminal_output +6695,12779789,"TERMINAL",0,0,"4:0135555151212121778444444",,terminal_output +6696,12781738,"TERMINAL",0,0,"35777333339930666666",,terminal_output +6697,12783750,"TERMINAL",0,0,"579995555531312888888",,terminal_output +6698,12785764,"TERMINAL",0,0,"79316:00:011177777334101040404040",,terminal_output +6699,12787803,"TERMINAL",0,0,"93133399999556222222",,terminal_output +6700,12789818,"TERMINAL",0,0,"11355510:012:01313131778444444",,terminal_output +6701,12791775,"TERMINAL",0,0,"35777333339940666666",,terminal_output +6702,12793794,"TERMINAL",0,0,"579995555541412888888",,terminal_output +6703,12795797,"TERMINAL",0,0,"7941112177777334202050505050",,terminal_output +6704,12797802,"TERMINAL",0,0,"94133399999556222222",,terminal_output +6705,12799811,"TERMINAL",0,0,"2135551111414141778444444",,terminal_output +6706,12801823,"TERMINAL",0,0,"35777333339950666666",,terminal_output +6707,12803828,"TERMINAL",0,0,"579995555551512888888",,terminal_output +6708,12805838,"TERMINAL",0,0,"7951213177777334303010:001:002:006:00",,terminal_output +6709,12807873,"TERMINAL",0,0,"95133399999556222222",,terminal_output +6710,12809855,"TERMINAL",0,0,"3135552121515151778444444",,terminal_output +6711,12811869,"TERMINAL",0,0,"3577733333996:00666666",,terminal_output +6712,12813872,"TERMINAL",0,0,"57999555556:016:012888888",,terminal_output +6713,12815882,"TERMINAL",0,0,"799:01314177777334404010101010",,terminal_output +6714,12817892,"TERMINAL",0,0,"91:0133399999556222222",,terminal_output +6715,12819898,"TERMINAL",0,0,"41355531317:017:012:01778444444",,terminal_output +6716,12821908,"TERMINAL",0,0,"35777333339910666666",,terminal_output +6717,12823918,"TERMINAL",0,0,"579995555511112888888",,terminal_output +6718,12825928,"TERMINAL",0,0,"7911415177777334505020202020",,terminal_output +6719,12827936,"TERMINAL",0,0,"91133399999556222222",,terminal_output +6720,12829947,"TERMINAL",0,0,"5135554141111111778444444",,terminal_output +6721,12831955,"TERMINAL",0,0,"35777333339920666666",,terminal_output +6722,12833961,"TERMINAL",0,0,"579995555521212888888",,terminal_output +6723,12835974,"TERMINAL",0,0,"7921517:01777773346:009:0030303030",,terminal_output +6724,12837983,"TERMINAL",0,0,"92133399999556222222",,terminal_output +6725,12839995,"TERMINAL",0,0,"5:0135555151212121778444444",,terminal_output +6726,12842000,"TERMINAL",0,0,"35777333339930666666",,terminal_output +6727,12844012,"TERMINAL",0,0,"579995555531312888888",,terminal_output +6728,12846020,"TERMINAL",0,0,"79311:011177777334101040404040",,terminal_output +6729,12848028,"TERMINAL",0,0,"93133399999556222222",,terminal_output +6730,12850039,"TERMINAL",0,0,"1135551:013:01313131778444444",,terminal_output +6731,12852108,"TERMINAL",0,0,"35777333339940666666",,terminal_output +6732,12854058,"TERMINAL",0,0,"579995555541412888888",,terminal_output +6733,12856068,"TERMINAL",0,0,"7941112177777334202050505050",,terminal_output +6734,12858077,"TERMINAL",0,0,"94133399999556222222",,terminal_output +6735,12860083,"TERMINAL",0,0,"2135551111414141778444444",,terminal_output +6736,12862144,"TERMINAL",0,0,"35777333339950666666",,terminal_output +6737,12864100,"TERMINAL",0,0,"579995555551512888888",,terminal_output +6738,12866111,"TERMINAL",0,0,"795121317777733430301:002:003:007:00",,terminal_output +6739,12868188,"TERMINAL",0,0,"95133399999556222222",,terminal_output +6740,12870129,"TERMINAL",0,0,"3135552121515151778444444",,terminal_output +6741,12872135,"TERMINAL",0,0,"3577733333997:00666666",,terminal_output +6742,12874148,"TERMINAL",0,0,"57999555557:017:012888888",,terminal_output +6743,12876171,"TERMINAL",0,0,"796:00:01314177777334404010101010",,terminal_output +6744,12878223,"TERMINAL",0,0,"92:0133399999556222222",,terminal_output +6745,12880241,"TERMINAL",0,0,"41298:55:30 2025-10-05T01:48:31429:45:19355531318:018:013:01778444444",,terminal_output +6746,12882215,"TERMINAL",0,0,"35777333339910666666",,terminal_output +6747,12884190,"TERMINAL",0,0,"579995555511112888888",,terminal_output +6748,12886200,"TERMINAL",0,0,"7911415177777334505020202020",,terminal_output +6749,12888211,"TERMINAL",0,0,"91133399999556222222",,terminal_output +6750,12890219,"TERMINAL",0,0,"5135554141111111778444444",,terminal_output +6751,12892250,"TERMINAL",0,0,"35777333339920666666",,terminal_output +6752,12894234,"TERMINAL",0,0,"579995555521212888888",,terminal_output +6753,12896349,"TERMINAL",0,0,"7921518:01777773347:003:00:0030303030",,terminal_output +6754,12898292,"TERMINAL",0,0,"92133399999556222222",,terminal_output +6755,12900311,"TERMINAL",0,0,"6:0135555151212121778444444",,terminal_output +6756,12902272,"TERMINAL",0,0,"35777333339930666666",,terminal_output +6757,12904279,"TERMINAL",0,0,"579995555531312888888",,terminal_output +6758,12906288,"TERMINAL",0,0,"79312:011177777334101040404040",,terminal_output +6759,12908310,"TERMINAL",0,0,"93133399999556222222",,terminal_output +6760,12910325,"TERMINAL",0,0,"1135552:014:01313131778444444",,terminal_output +6761,12912325,"TERMINAL",0,0,"35777333339940666666",,terminal_output +6762,12914325,"TERMINAL",0,0,"579995555541412888888",,terminal_output +6763,12916333,"TERMINAL",0,0,"7941112177777334202050505050",,terminal_output +6764,12918340,"TERMINAL",0,0,"94133399999556222222",,terminal_output +6765,12920351,"TERMINAL",0,0,"2135551111414141778444444",,terminal_output +6766,12922460,"TERMINAL",0,0,"35777333339950666666",,terminal_output +6767,12924371,"TERMINAL",0,0,"579995555551512888888",,terminal_output +6768,12926383,"TERMINAL",0,0,"795121317777733430302:003:004:008:00",,terminal_output +6769,12928391,"TERMINAL",0,0,"95133399999556222222",,terminal_output +6770,12930405,"TERMINAL",0,0,"3135552121515151778444444",,terminal_output +6771,12932409,"TERMINAL",0,0,"3577733333998:00666666",,terminal_output +6772,12934415,"TERMINAL",0,0,"57999555558:018:012888888",,terminal_output +6773,12936429,"TERMINAL",0,0,"791:01314177777334404010101010",,terminal_output +6774,12938435,"TERMINAL",0,0,"93:0133399999556222222",,terminal_output +6775,12940455,"TERMINAL",0,0,"41355531319:019:014:01778444444",,terminal_output +6776,12942455,"TERMINAL",0,0,"35777333339910666666",,terminal_output +6777,12944473,"TERMINAL",0,0,"579995555511112888888",,terminal_output +6778,12946476,"TERMINAL",0,0,"7911415177777334505020202020",,terminal_output +6779,12948486,"TERMINAL",0,0,"91133399999556222222",,terminal_output +6780,12950504,"TERMINAL",0,0,"5135554141111111778444444",,terminal_output +6781,12952562,"TERMINAL",0,0,"35777333339920666666",,terminal_output +6782,12954514,"TERMINAL",0,0,"579995555521212888888",,terminal_output +6783,12956563,"TERMINAL",0,0,"7921519:01777773348:001:0030303030",,terminal_output +6784,12958532,"TERMINAL",0,0,"92133399999556222222",,terminal_output +6785,12960540,"TERMINAL",0,0,"7:0135555151212121778444444",,terminal_output +6786,12962551,"TERMINAL",0,0,"35777333339930666666",,terminal_output +6787,12964647,"TERMINAL",0,0,"579995555531312888888",,terminal_output +6788,12966569,"TERMINAL",0,0,"79313:011177777334101040404040",,terminal_output +6789,12968576,"TERMINAL",0,0,"93133399999556222222",,terminal_output +6790,12970690,"TERMINAL",0,0,"1146663:025:02323232889555555",,terminal_output +6791,12972634,"TERMINAL",0,0,"4688844444404041777777",,terminal_output +6792,12974611,"TERMINAL",0,0,"6840102066666223999999",,terminal_output +6793,12976611,"TERMINAL",0,0,"84022288888445212151515151",,terminal_output +6794,12978622,"TERMINAL",0,0,"2024441010404040667333333",,terminal_output +6795,12980724,"TERMINAL",0,0,"2466622222889555555",,terminal_output +6796,12982667,"TERMINAL",0,0,"4688844444505051777777",,terminal_output +6797,12984652,"TERMINAL",0,0,"6850203066666223999999",,terminal_output +6798,12986657,"TERMINAL",0,0,"8502228888844531313:014:015:019:01",,terminal_output +6799,12988671,"TERMINAL",0,0,"3024442020505050667333333",,terminal_output +6800,12990676,"TERMINAL",0,0,"2466622222889555555",,terminal_output +6801,12992685,"TERMINAL",0,0,"46888444449:009:009:01777777",,terminal_output +6802,12994705,"TERMINAL",0,0,"682:00304066666223999999",,terminal_output +6803,12996704,"TERMINAL",0,0,"84:0022288888445414111111111",,terminal_output +6804,12998713,"TERMINAL",0,0,"402444303050:0050:005:00667333333",,terminal_output +6805,13000745,"TERMINAL",0,0,"2466622222889555555",,terminal_output +6806,13002729,"TERMINAL",0,0,"4688844444101011777777",,terminal_output +6807,13004788,"TERMINAL",0,0,"6810405066666223999999",,terminal_output +6808,13006749,"TERMINAL",0,0,"81022288888445515121212121",,terminal_output +6809,13008761,"TERMINAL",0,0,"5024444040101010667333333",,terminal_output +6810,13010767,"TERMINAL",0,0,"2466622222889555555",,terminal_output +6811,13012876,"TERMINAL",0,0,"4688844444202021777777",,terminal_output +6812,13014826,"TERMINAL",0,0,"68205010:0066666223999999",,terminal_output +6813,13016793,"TERMINAL",0,0,"820222888884459:012:0131313131",,terminal_output +6814,13018806,"TERMINAL",0,0,"8:0024445050202020667333333",,terminal_output +6815,13020811,"TERMINAL",0,0,"2466622222889555555",,terminal_output +6816,13022825,"TERMINAL",0,0,"4688844444303031777777",,terminal_output +6817,13024848,"TERMINAL",0,0,"68304:001066666223999999",,terminal_output +6818,13026840,"TERMINAL",0,0,"83022288888445111141414141",,terminal_output +6819,13028957,"TERMINAL",0,0,"1024444:006:00303030667333333",,terminal_output +6820,13030904,"TERMINAL",0,0,"2466622222889555555",,terminal_output +6821,13032949,"TERMINAL",0,0,"4688844444404041777777",,terminal_output +6822,13034876,"TERMINAL",0,0,"6840102066666223999999",,terminal_output +6823,13036887,"TERMINAL",0,0,"84022288888445212151515151",,terminal_output +6824,13038887,"TERMINAL",0,0,"2024441010404040667333333",,terminal_output +6825,13040902,"TERMINAL",0,0,"2466622222889555555",,terminal_output +6826,13042904,"TERMINAL",0,0,"4688844444505051777777",,terminal_output +6827,13044914,"TERMINAL",0,0,"6850203066666223999999",,terminal_output +6828,13046923,"TERMINAL",0,0,"8502228888844531314:015:016:0120:01",,terminal_output +6829,13048931,"TERMINAL",0,0,"3024442020505050667333333",,terminal_output +6830,13050939,"TERMINAL",0,0,"2466622222889555555",,terminal_output +6831,13052950,"TERMINAL",0,0,"468884444440:0040:0040:01777777",,terminal_output +6832,13054964,"TERMINAL",0,0,"683:00304066666223999999",,terminal_output +6833,13056966,"TERMINAL",0,0,"85:0022288888445414111111111",,terminal_output +6834,13059036,"TERMINAL",0,0,"40244430301:001:006:00667333333",,terminal_output +6835,13060986,"TERMINAL",0,0,"2466622222889555555",,terminal_output +6836,13063013,"TERMINAL",0,0,"4688844444101011777777",,terminal_output +6837,13065001,"TERMINAL",0,0,"6810405066666223999999",,terminal_output +6838,13067012,"TERMINAL",0,0,"81022288888445515121212121",,terminal_output +6839,13069020,"TERMINAL",0,0,"5024444040101010667333333",,terminal_output +6840,13071029,"TERMINAL",0,0,"2466622222889555555",,terminal_output +6841,13073038,"TERMINAL",0,0,"4688844444202021777777",,terminal_output +6842,13075048,"TERMINAL",0,0,"6820501:0066666223999999",,terminal_output +6843,13077254,"TERMINAL",0,0,"820222888884453:00:013:0131313131",,terminal_output +6844,13079064,"TERMINAL",0,0,"9:0024445050202020667333333",,terminal_output +6845,13081079,"TERMINAL",0,0,"2466622222889555555",,terminal_output +6846,13083089,"TERMINAL",0,0,"4688844444303031777777",,terminal_output +6847,13085098,"TERMINAL",0,0,"68305:001066666223999999",,terminal_output +6848,13087103,"TERMINAL",0,0,"83022288888445111141414141",,terminal_output +6849,13089116,"TERMINAL",0,0,"1024445:007:00303030667333333",,terminal_output +6850,13091129,"TERMINAL",0,0,"2466622222889555555",,terminal_output +6851,13093131,"TERMINAL",0,0,"4688844444404041777777",,terminal_output +6852,13095137,"TERMINAL",0,0,"6840102066666223999999",,terminal_output +6853,13097148,"TERMINAL",0,0,"84022288888445212151515151",,terminal_output +6854,13099169,"TERMINAL",0,0,"2024441010404040667333333",,terminal_output +6855,13101168,"TERMINAL",0,0,"2466622222889555555",,terminal_output +6856,13103176,"TERMINAL",0,0,"4688844444505051777777",,terminal_output +6857,13105187,"TERMINAL",0,0,"6850203066666223999999",,terminal_output +6858,13107197,"TERMINAL",0,0,"8502228888844531315:016:017:011:01",,terminal_output +6859,13109204,"TERMINAL",0,0,"3024442020505050667333333",,terminal_output +6860,13111218,"TERMINAL",0,0,"2466622222889555555",,terminal_output +6861,13113226,"TERMINAL",0,0,"46888444441:001:001:01777777",,terminal_output +6862,13115233,"TERMINAL",0,0,"684:00304066666223999999",,terminal_output +6863,13117240,"TERMINAL",0,0,"86:0022288888445414111111111",,terminal_output +6864,13119251,"TERMINAL",0,0,"40244430302:002:007:00667333333",,terminal_output +6865,13121325,"TERMINAL",0,0,"2466622222889555555",,terminal_output +6866,13123266,"TERMINAL",0,0,"4688844444101011777777",,terminal_output +6867,13125284,"TERMINAL",0,0,"6810405066666223999999",,terminal_output +6868,13127285,"TERMINAL",0,0,"81022288888445515121212121",,terminal_output +6869,13129293,"TERMINAL",0,0,"5024444040101010667333333",,terminal_output +6870,13131311,"TERMINAL",0,0,"2466622222889555555",,terminal_output +6871,13133402,"TERMINAL",0,0,"4688844444202021777777",,terminal_output +6872,13135323,"TERMINAL",0,0,"6820502:0066666223999999",,terminal_output +6873,13137403,"TERMINAL",0,0,"820222888884451:014:0131313131",,terminal_output +6874,13139341,"TERMINAL",0,0,"2:00:0024445050202020667333333",,terminal_output +6875,13141346,"TERMINAL",0,0,"2466622222889555555",,terminal_output +6876,13143354,"TERMINAL",0,0,"4688844444303031777777",,terminal_output +6877,13145368,"TERMINAL",0,0,"68306:001066666223999999",,terminal_output +6878,13147434,"TERMINAL",0,0,"83022288888445111141414141",,terminal_output +6879,13149390,"TERMINAL",0,0,"1024446:008:00303030667333333",,terminal_output +6880,13151411,"TERMINAL",0,0,"2466622222889555555",,terminal_output +6881,13153402,"TERMINAL",0,0,"4688844444404041777777",,terminal_output +6882,13155410,"TERMINAL",0,0,"6840102066666223999999",,terminal_output +6883,13157468,"TERMINAL",0,0,"84022288888445212151515151",,terminal_output +6884,13159429,"TERMINAL",0,0,"2024441010404040667333333",,terminal_output +6885,13161462,"TERMINAL",0,0,"2466622222889555555",,terminal_output +6886,13163512,"TERMINAL",0,0,"4688844444505051777777",,terminal_output +6887,13165459,"TERMINAL",0,0,"6850203066666223999999",,terminal_output +6888,13167500,"TERMINAL",0,0,"8502228888844531316:017:018:012:01",,terminal_output +6889,13169487,"TERMINAL",0,0,"3024442020505050667333333",,terminal_output +6890,13171493,"TERMINAL",0,0,"2466622222889555555",,terminal_output +6891,13173544,"TERMINAL",0,0,"46888444442:002:002:01777777",,terminal_output +6892,13175589,"TERMINAL",0,0,"685:00304066666223999999",,terminal_output +6893,13177539,"TERMINAL",0,0,"87:0022288888445414111111111",,terminal_output +6894,13179532,"TERMINAL",0,0,"40244430303:003:008:00667333333",,terminal_output +6895,13181539,"TERMINAL",0,0,"2466622222889555555",,terminal_output +6896,13183549,"TERMINAL",0,0,"4688844444101011777777",,terminal_output +6897,13185557,"TERMINAL",0,0,"6810405066666223999999",,terminal_output +6898,13187624,"TERMINAL",0,0,"81022288888445515121212121",,terminal_output +6899,13189619,"TERMINAL",0,0,"5024444040101010667333333",,terminal_output +6900,13191590,"TERMINAL",0,0,"25777333339920666666",,terminal_output +6901,13193609,"TERMINAL",0,0,"579995555521212888888",,terminal_output +6902,13195609,"TERMINAL",0,0,"7921513:01777773342:005:0030303030",,terminal_output +6903,13197710,"TERMINAL",0,0,"92133399999556222222",,terminal_output +6904,13199656,"TERMINAL",0,0,"1:0135555151212121778444444",,terminal_output +6905,13201633,"TERMINAL",0,0,"35777333339930666666",,terminal_output +6906,13203649,"TERMINAL",0,0,"579995555531312888888",,terminal_output +6907,13205666,"TERMINAL",0,0,"79317:011177777334101040404040",,terminal_output +6908,13207746,"TERMINAL",0,0,"93133399999556222222",,terminal_output +6909,13209671,"TERMINAL",0,0,"1135557:019:01313131778444444",,terminal_output +6910,13211679,"TERMINAL",0,0,"35777333339940666666",,terminal_output +6911,13213696,"TERMINAL",0,0,"579995555541412888888",,terminal_output +6912,13215733,"TERMINAL",0,0,"7941112177777334202050505050",,terminal_output +6913,13217780,"TERMINAL",0,0,"94133399999556222222",,terminal_output +6914,13219717,"TERMINAL",0,0,"2135551111414141778444444",,terminal_output +6915,13221727,"TERMINAL",0,0,"35777333339950666666",,terminal_output +6916,13223747,"TERMINAL",0,0,"579995555551512888888",,terminal_output +6917,13225750,"TERMINAL",0,0,"795121317777733430307:008:009:003:00",,terminal_output +6918,13227818,"TERMINAL",0,0,"95133399999556222222",,terminal_output +6919,13229768,"TERMINAL",0,0,"3135552121515151778444444",,terminal_output +6920,13231776,"TERMINAL",0,0,"3577733333993:00666666",,terminal_output +6921,13233793,"TERMINAL",0,0,"57999555553:013:012888888",,terminal_output +6922,13235795,"TERMINAL",0,0,"796:01314177777334404010101010",,terminal_output +6923,13237807,"TERMINAL",0,0,"98:0133399999556222222",,terminal_output +6924,13239822,"TERMINAL",0,0,"41355531314:014:019:01778444444",,terminal_output +6925,13241825,"TERMINAL",0,0,"35777333339910666666",,terminal_output +6926,13243900,"TERMINAL",0,0,"579995555511112888888",,terminal_output +6927,13245940,"TERMINAL",0,0,"7911415177777334505020202020",,terminal_output +6928,13247888,"TERMINAL",0,0,"91133399999556222222",,terminal_output +6929,13249884,"TERMINAL",0,0,"5135554141111111778444444",,terminal_output +6930,13251874,"TERMINAL",0,0,"35777333339920666666",,terminal_output +6931,13253881,"TERMINAL",0,0,"579995555521212888888",,terminal_output +6932,13255892,"TERMINAL",0,0,"7921514:01777773343:006:0030303030",,terminal_output +6933,13257922,"TERMINAL",0,0,"92133399999556222222",,terminal_output +6934,13259940,"TERMINAL",0,0,"2:0135555151212121778444444",,terminal_output +6935,13261918,"TERMINAL",0,0,"35777333339930666666",,terminal_output +6936,13263935,"TERMINAL",0,0,"579995555531312888888",,terminal_output +6937,13265953,"TERMINAL",0,0,"79318:011177777334101040404040",,terminal_output +6938,13267957,"TERMINAL",0,0,"93133399999556222222",,terminal_output +6939,13269974,"TERMINAL",0,0,"1135558:0130:01313131778444444",,terminal_output +6940,13272057,"TERMINAL",0,0,"35777333339940666666",,terminal_output +6941,13274000,"TERMINAL",0,0,"579995555541412888888",,terminal_output +6942,13276047,"TERMINAL",0,0,"7941112177777334202050505050",,terminal_output +6943,13277997,"TERMINAL",0,0,"94133399999556222222",,terminal_output +6944,13280005,"TERMINAL",0,0,"2135551111414141778444444",,terminal_output +6945,13282091,"TERMINAL",0,0,"35777333339950666666",,terminal_output +6946,13284034,"TERMINAL",0,0,"579995555551512888888",,terminal_output +6947,13286036,"TERMINAL",0,0,"795121317777733430308:009:0020:004:00",,terminal_output +6948,13288069,"TERMINAL",0,0,"95133399999556222222",,terminal_output +6949,13290078,"TERMINAL",0,0,"3135552121515151778444444",,terminal_output +6950,13292071,"TERMINAL",0,0,"3577733333994:00666666",,terminal_output +6951,13294092,"TERMINAL",0,0,"57999555554:014:012888888",,terminal_output +6952,13296116,"TERMINAL",0,0,"797:01314177777334404010101010",,terminal_output +6953,13298096,"TERMINAL",0,0,"99:0133399999556222222",,terminal_output +6954,13300112,"TERMINAL",0,0,"41355531315:015:0110:01778444444",,terminal_output +6955,13302161,"TERMINAL",0,0,"35777333339910666666",,terminal_output +6956,13304123,"TERMINAL",0,0,"579995555511112888888",,terminal_output +6957,13306133,"TERMINAL",0,0,"7911415177777334505020202020",,terminal_output +6958,13308141,"TERMINAL",0,0,"91133399999556222222",,terminal_output +6959,13310152,"TERMINAL",0,0,"5135554141111111778444444",,terminal_output +6960,13312160,"TERMINAL",0,0,"35777333339920666666",,terminal_output +6961,13314169,"TERMINAL",0,0,"579995555521212888888",,terminal_output +6962,13316225,"TERMINAL",0,0,"7921515:01777773344:007:0030303030",,terminal_output +6963,13318186,"TERMINAL",0,0,"92133399999556222222",,terminal_output +6964,13320199,"TERMINAL",0,0,"3:0135555151212121778444444",,terminal_output +6965,13322206,"TERMINAL",0,0,"35777333339930666666",,terminal_output +6966,13324223,"TERMINAL",0,0,"579995555531312888888",,terminal_output +6967,13326233,"TERMINAL",0,0,"79319:011177777334101040404040",,terminal_output +6968,13328239,"TERMINAL",0,0,"93133399999556222222",,terminal_output +6969,13330247,"TERMINAL",0,0,"1135559:011:01313131778444444",,terminal_output +6970,13332258,"TERMINAL",0,0,"35777333339940666666",,terminal_output +6971,13334266,"TERMINAL",0,0,"579995555541412888888",,terminal_output +6972,13336276,"TERMINAL",0,0,"7941112177777334202050505050",,terminal_output +6973,13338285,"TERMINAL",0,0,"94133399999556222222",,terminal_output +6974,13340290,"TERMINAL",0,0,"2135551111414141778444444",,terminal_output +6975,13342302,"TERMINAL",0,0,"35777333339950666666",,terminal_output +6976,13344308,"TERMINAL",0,0,"579995555551512888888",,terminal_output +6977,13346319,"TERMINAL",0,0,"795121317777733430309:0020:001:005:00",,terminal_output +6978,13348326,"TERMINAL",0,0,"95133399999556222222",,terminal_output +6979,13350336,"TERMINAL",0,0,"3135552121515151778444444",,terminal_output +6980,13352348,"TERMINAL",0,0,"3577733333995:00666666",,terminal_output +6981,13354356,"TERMINAL",0,0,"57999555555:015:012888888",,terminal_output +6982,13356367,"TERMINAL",0,0,"798:01314177777334404010101010",,terminal_output +6983,13358389,"TERMINAL",0,0,"950:0133399999556222222",,terminal_output +6984,13360384,"TERMINAL",0,0,"41355531316:016:011:01778444444",,terminal_output +6985,13362388,"TERMINAL",0,0,"35777333339910666666",,terminal_output +6986,13364402,"TERMINAL",0,0,"579995555511112888888",,terminal_output +6987,13366417,"TERMINAL",0,0,"7911415177777334505020202020",,terminal_output +6988,13368434,"TERMINAL",0,0,"91133399999556222222",,terminal_output +6989,13370429,"TERMINAL",0,0,"5135554141111111778444444",,terminal_output +6990,13372439,"TERMINAL",0,0,"35777333339920666666",,terminal_output +6991,13374446,"TERMINAL",0,0,"579995555521212888888",,terminal_output +6992,13376501,"TERMINAL",0,0,"7921516:01777773345:008:0030303030",,terminal_output +6993,13378466,"TERMINAL",0,0,"92133399999556222222",,terminal_output +6994,13380497,"TERMINAL",0,0,"4:0135555151212121778444444",,terminal_output +6995,13382486,"TERMINAL",0,0,"35777333339930666666",,terminal_output +6996,13384494,"TERMINAL",0,0,"579995555531312888888",,terminal_output +6997,13386503,"TERMINAL",0,0,"793110:011177777334101040404040",,terminal_output +6998,13388513,"TERMINAL",0,0,"93133399999556222222",,terminal_output +6999,13390530,"TERMINAL",0,0,"11355520:012:01313131778444444",,terminal_output +7000,13392529,"TERMINAL",0,0,"35777333339940666666",,terminal_output +7001,13394549,"TERMINAL",0,0,"579995555541412888888",,terminal_output +7002,13396550,"TERMINAL",0,0,"7941112177777334202050505050",,terminal_output +7003,13398559,"TERMINAL",0,0,"94133399999556222222",,terminal_output +7004,13400669,"TERMINAL",0,0,"2135551111414141778444444",,terminal_output +7005,13402578,"TERMINAL",0,0,"35777333339950666666",,terminal_output +7006,13404596,"TERMINAL",0,0,"585020306666652523999999",,terminal_output +7007,13406595,"TERMINAL",0,0,"85022288888445313120:011:012:016:01",,terminal_output +7008,13408606,"TERMINAL",0,0,"3024442020505050667333333",,terminal_output +7009,13410614,"TERMINAL",0,0,"2466622222889555555",,terminal_output +7010,13412621,"TERMINAL",0,0,"46888444446:006:006:01777777",,terminal_output +7011,13414674,"TERMINAL",0,0,"689:00304066666223999999",,terminal_output +7012,13416637,"TERMINAL",0,0,"81:0022288888445414111111111",,terminal_output +7013,13418649,"TERMINAL",0,0,"40244430307:007:002:00667333333",,terminal_output +7014,13420739,"TERMINAL",0,0,"2466622222889555555",,terminal_output +7015,13422684,"TERMINAL",0,0,"4688844444101011777777",,terminal_output +7016,13424702,"TERMINAL",0,0,"6810405066666223999999",,terminal_output +7017,13426686,"TERMINAL",0,0,"81022288888445515121212121",,terminal_output +7018,13428697,"TERMINAL",0,0,"5024444040101010667333333",,terminal_output +7019,13430717,"TERMINAL",0,0,"2466622222889555555",,terminal_output +7020,13432718,"TERMINAL",0,0,"4688844444202021777777",,terminal_output +7021,13434736,"TERMINAL",0,0,"6820507:0066666223999999",,terminal_output +7022,13436738,"TERMINAL",0,0,"820222888884456:019:0131313131",,terminal_output +7023,13438743,"TERMINAL",0,0,"5:0024445050202020667333333",,terminal_output +7024,13440762,"TERMINAL",0,0,"2466622222889555555",,terminal_output +7025,13442761,"TERMINAL",0,0,"4688844444303031777777",,terminal_output +7026,13444778,"TERMINAL",0,0,"68301:001066666223999999",,terminal_output +7027,13446779,"TERMINAL",0,0,"83022288888445111141414141",,terminal_output +7028,13448790,"TERMINAL",0,0,"1024441:003:00303030667333333",,terminal_output +7029,13450845,"TERMINAL",0,0,"2466622222889555555",,terminal_output +7030,13452806,"TERMINAL",0,0,"4688844444404041777777",,terminal_output +7031,13454824,"TERMINAL",0,0,"6840102066666223999999",,terminal_output +7032,13456826,"TERMINAL",0,0,"84022288888445212151515151",,terminal_output +7033,13458831,"TERMINAL",0,0,"2024441010404040667333333",,terminal_output +7034,13460849,"TERMINAL",0,0,"2466622222889555555",,terminal_output +7035,13462854,"TERMINAL",0,0,"4688844444505051777777",,terminal_output +7036,13464862,"TERMINAL",0,0,"6850203066666223999999",,terminal_output +7037,13466870,"TERMINAL",0,0,"8502228888844531311:012:013:017:01",,terminal_output +7038,13468890,"TERMINAL",0,0,"3024442020505050667333333",,terminal_output +7039,13470887,"TERMINAL",0,0,"2466622222889555555",,terminal_output +7040,13472899,"TERMINAL",0,0,"46888444447:007:007:01777777",,terminal_output +7041,13474907,"TERMINAL",0,0,"6810:00304066666223999999",,terminal_output +7042,13476915,"TERMINAL",0,0,"82:0022288888445414111111111",,terminal_output +7043,13478924,"TERMINAL",0,0,"40244430308:008:003:00667333333",,terminal_output +7044,13480931,"TERMINAL",0,0,"2466622222889555555",,terminal_output +7045,13482998,"TERMINAL",0,0,"4688844444101011777777",,terminal_output +7046,13485017,"TERMINAL",0,0,"6810405066666223999999",,terminal_output +7047,13486963,"TERMINAL",0,0,"81022288888445515121212121",,terminal_output +7048,13488970,"TERMINAL",0,0,"5024444040101010667333333",,terminal_output +7049,13490981,"TERMINAL",0,0,"2466622222889555555",,terminal_output +7050,13493018,"TERMINAL",0,0,"4688844444202021777777",,terminal_output +7051,13495001,"TERMINAL",0,0,"6820508:0066666223999999",,terminal_output +7052,13497027,"TERMINAL",0,0,"820222888884457:0110:0131313131",,terminal_output +7053,13499044,"TERMINAL",0,0,"6:0024445050202020667333333",,terminal_output +7054,13501064,"TERMINAL",0,0,"2466622222889555555",,terminal_output +7055,13503066,"TERMINAL",0,0,"4688844444303031777777",,terminal_output +7056,13505046,"TERMINAL",0,0,"68302:001066666223999999",,terminal_output +7057,13507057,"TERMINAL",0,0,"83022288888445111141414141",,terminal_output +7058,13509076,"TERMINAL",0,0,"1024442:004:00303030667333333",,terminal_output +7059,13511073,"TERMINAL",0,0,"2466622222889555555",,terminal_output +7060,13513101,"TERMINAL",0,0,"4688844444404041777777",,terminal_output +7061,13515092,"TERMINAL",0,0,"6840102066666223999999",,terminal_output +7062,13517198,"TERMINAL",0,0,"84022288888445212151515151",,terminal_output +7063,13519110,"TERMINAL",0,0,"2024441010404040667333333",,terminal_output +7064,13521193,"TERMINAL",0,0,"2466622222889555555",,terminal_output +7065,13523138,"TERMINAL",0,0,"4688844444505051777777",,terminal_output +7066,13525185,"TERMINAL",0,0,"6850203066666223999999",,terminal_output +7067,13527238,"TERMINAL",0,0,"8502228888844531312:013:014:018:01",,terminal_output +7068,13529158,"TERMINAL",0,0,"3024442020505050667333333",,terminal_output +7069,13531167,"TERMINAL",0,0,"2466622222889555555",,terminal_output +7070,13533277,"TERMINAL",0,0,"46888444448:008:008:01777777",,terminal_output +7071,13535223,"TERMINAL",0,0,"681:00304066666223999999",,terminal_output +7072,13537271,"TERMINAL",0,0,"83:0022288888445414111111111",,terminal_output +7073,13539204,"TERMINAL",0,0,"40244430309:009:004:00667333333",,terminal_output +7074,13541222,"TERMINAL",0,0,"2466622222889555555",,terminal_output +7075,13543313,"TERMINAL",0,0,"4688844444101011777777",,terminal_output +7076,13545236,"TERMINAL",0,0,"6810405066666223999999",,terminal_output +7077,13547237,"TERMINAL",0,0,"81022288888445515121212121",,terminal_output +7078,13549249,"TERMINAL",0,0,"5024444040101010667333333",,terminal_output +7079,13551258,"TERMINAL",0,0,"2466622222889555555",,terminal_output +7080,13553266,"TERMINAL",0,0,"4688844444202021777777",,terminal_output +7081,13555283,"TERMINAL",0,0,"6820509:0066666223999999",,terminal_output +7082,13557284,"TERMINAL",0,0,"820222888884458:011:0131313131",,terminal_output +7083,13559295,"TERMINAL",0,0,"7:0024445050202020667333333",,terminal_output +7084,13561305,"TERMINAL",0,0,"2466622222889555555",,terminal_output +7085,13563310,"TERMINAL",0,0,"4688844444[53d303031777777",,terminal_output +7086,13565323,"TERMINAL",0,0,"68303:001066666223999999",,terminal_output +7087,13567330,"TERMINAL",0,0,"83022288888445111141414141",,terminal_output +7088,13569339,"TERMINAL",0,0,"1024443:005:00303030667333333",,terminal_output +7089,13571349,"TERMINAL",0,0,"2466622222889555555",,terminal_output +7090,13573416,"TERMINAL",0,0,"4688844444404041777777",,terminal_output +7091,13575368,"TERMINAL",0,0,"6840102066666223999999",,terminal_output +7092,13577379,"TERMINAL",0,0,"84022288888445212151515151",,terminal_output +7093,13579390,"TERMINAL",0,0,"2024441010404040667333333",,terminal_output +7094,13581401,"TERMINAL",0,0,"2466622222889555555",,terminal_output +7095,13583404,"TERMINAL",0,0,"4688844444505051777777",,terminal_output +7096,13585414,"TERMINAL",0,0,"6850203066666223999999",,terminal_output +7097,13587426,"TERMINAL",0,0,"8502228888844531313:014:015:019:01",,terminal_output +7098,13589439,"TERMINAL",0,0,"3024442020505050667333333",,terminal_output +7099,13591444,"TERMINAL",0,0,"2466622222889555555",,terminal_output +7100,13593490,"TERMINAL",0,0,"46888444449:009:009:01777777",,terminal_output +7101,13595459,"TERMINAL",0,0,"682:00304066666223999999",,terminal_output +7102,13597469,"TERMINAL",0,0,"84:0022288888445414111111111",,terminal_output +7103,13599478,"TERMINAL",0,0,"40244430301:00:001:00:005:00667333333",,terminal_output +7104,13601489,"TERMINAL",0,0,"2466622222889555555",,terminal_output +7105,13603500,"TERMINAL",0,0,"4688844444101011777777",,terminal_output +7106,13605515,"TERMINAL",0,0,"6810405066666223999999",,terminal_output +7107,13607618,"TERMINAL",0,0,"81022288888445515121212121",,terminal_output +7108,13609568,"TERMINAL",0,0,"5024444040101010667333333",,terminal_output +7109,13611585,"TERMINAL",0,0,"2466622222889555555",,terminal_output +7110,13613550,"TERMINAL",0,0,"4688844444202021777777",,terminal_output +7111,13615557,"TERMINAL",0,0,"68205020:0066666223999999",,terminal_output +7112,13617655,"TERMINAL",0,0,"820222888884459:012:0131313131",,terminal_output +7113,13619599,"TERMINAL",0,0,"8:0024445050202020667333333",,terminal_output +7114,13621586,"TERMINAL",0,0,"25777333339930666666",,terminal_output +7115,13623610,"TERMINAL",0,0,"579995555531312888888",,terminal_output +7116,13625628,"TERMINAL",0,0,"79314:011177777334101040404040",,terminal_output +7117,13627620,"TERMINAL",0,0,"93133399999556222222",,terminal_output +7118,13629636,"TERMINAL",0,0,"1135554:016:01313131778444444",,terminal_output +7119,13631637,"TERMINAL",0,0,"35777333339940666666",,terminal_output +7120,13633651,"TERMINAL",0,0,"579995555541412888888",,terminal_output +7121,13635668,"TERMINAL",0,0,"7941112177777334202050505050",,terminal_output +7122,13637724,"TERMINAL",0,0,"94133399999556222222",,terminal_output +7123,13639676,"TERMINAL",0,0,"2135551111414141778444444",,terminal_output +7124,13641688,"TERMINAL",0,0,"35777333339950666666",,terminal_output +7125,13643706,"TERMINAL",0,0,"579995555551512888888",,terminal_output +7126,13645707,"TERMINAL",0,0,"795121317777733430304:005:006:0030:00",,terminal_output +7127,13647764,"TERMINAL",0,0,"95133399999556222222",,terminal_output +7128,13649728,"TERMINAL",0,0,"3135552121515151778444444",,terminal_output +7129,13651734,"TERMINAL",0,0,"35777333339950:00666666",,terminal_output +7130,13653747,"TERMINAL",0,0,"579995555550:0150:012888888",,terminal_output +7131,13655760,"TERMINAL",0,0,"793:01314177777334404010101010",,terminal_output +7132,13657763,"TERMINAL",0,0,"95:0133399999556222222",,terminal_output +7133,13659842,"TERMINAL",0,0,"41355531311:011:016:01778444444",,terminal_output +7134,13661779,"TERMINAL",0,0,"35777333339910666666",,terminal_output +7135,13663793,"TERMINAL",0,0,"579995555511112888888",,terminal_output +7136,13665884,"TERMINAL",0,0,"7911415177777334505020202020",,terminal_output +7137,13667829,"TERMINAL",0,0,"91133399999556222222",,terminal_output +7138,13669821,"TERMINAL",0,0,"5135554141111111778444444",,terminal_output +7139,13671827,"TERMINAL",0,0,"35777333339920666666",,terminal_output +7140,13673845,"TERMINAL",0,0,"579995555521212888888",,terminal_output +7141,13675862,"TERMINAL",0,0,"7921511:017777733410:003:0030303030",,terminal_output +7142,13677867,"TERMINAL",0,0,"92133399999556222222",,terminal_output +7143,13679886,"TERMINAL",0,0,"9:0135555151212121778444444",,terminal_output +7144,13681875,"TERMINAL",0,0,"35777333339930666666",,terminal_output +7145,13683893,"TERMINAL",0,0,"579995555531312888888",,terminal_output +7146,13685911,"TERMINAL",0,0,"79315:011177777334101040404040",,terminal_output +7147,13687902,"TERMINAL",0,0,"93133399999556222222",,terminal_output +7148,13689920,"TERMINAL",0,0,"1135555:017:01313131778444444",,terminal_output +7149,13691923,"TERMINAL",0,0,"35777333339940666666",,terminal_output +7150,13693934,"TERMINAL",0,0,"579995555541412888888",,terminal_output +7151,13695989,"TERMINAL",0,0,"7941112177777334202050505050",,terminal_output +7152,13697952,"TERMINAL",0,0,"94133399999556222222",,terminal_output +7153,13699986,"TERMINAL",0,0,"2135551111414141778444444",,terminal_output +7154,13701975,"TERMINAL",0,0,"35777333339950666666",,terminal_output +7155,13703989,"TERMINAL",0,0,"579995555551512888888",,terminal_output +7156,13705992,"TERMINAL",0,0,"795121317777733430305:006:007:001:00",,terminal_output +7157,13708072,"TERMINAL",0,0,"95133399999556222222",,terminal_output +7158,13710019,"TERMINAL",0,0,"3135552121515151778444444",,terminal_output +7159,13712021,"TERMINAL",0,0,"3577733333991:00666666",,terminal_output +7160,13714037,"TERMINAL",0,0,"57999555551:011:012888888",,terminal_output +7161,13716039,"TERMINAL",0,0,"794:01314177777334404010101010",,terminal_output +7162,13718044,"TERMINAL",0,0,"96:0133399999556222222",,terminal_output +7163,13720065,"TERMINAL",0,0,"41355531312:012:017:01778444444",,terminal_output +7164,13722066,"TERMINAL",0,0,"35777333339910666666",,terminal_output +7165,13724083,"TERMINAL",0,0,"579995555511112888888",,terminal_output +7166,13726085,"TERMINAL",0,0,"7911415177777334505020202020",,terminal_output +7167,13728145,"TERMINAL",0,0,"91133399999556222222",,terminal_output +7168,13730105,"TERMINAL",0,0,"5135554141111111778444444",,terminal_output +7169,13732117,"TERMINAL",0,0,"35777333339920666666",,terminal_output +7170,13734122,"TERMINAL",0,0,"579995555521212888888",,terminal_output +7171,13736137,"TERMINAL",0,0,"7921512:01777773341:004:0030303030",,terminal_output +7172,13738147,"TERMINAL",0,0,"92133399999556222222",,terminal_output +7173,13740154,"TERMINAL",0,0,"10:0135555151212121778444444",,terminal_output +7174,13742165,"TERMINAL",0,0,"35777333339930666666",,terminal_output +7175,13744171,"TERMINAL",0,0,"579995555531312888888",,terminal_output +7176,13746182,"TERMINAL",0,0,"79316:011177777334101040404040",,terminal_output +7177,13748212,"TERMINAL",0,0,"93133399999556222222",,terminal_output +7178,13750232,"TERMINAL",0,0,"1135556:018:01313131778444444",,terminal_output +7179,13752214,"TERMINAL",0,0,"35777333339940666666",,terminal_output +7180,13754221,"TERMINAL",0,0,"579995555541412888888",,terminal_output +7181,13756239,"TERMINAL",0,0,"7941112177777334202050505050",,terminal_output +7182,13758239,"TERMINAL",0,0,"94133399999556222222",,terminal_output +7183,13760249,"TERMINAL",0,0,"2135551111414141778444444",,terminal_output +7184,13762282,"TERMINAL",0,0,"35777333339950666666",,terminal_output +7185,13764265,"TERMINAL",0,0,"579995555551512888888",,terminal_output +7186,13766287,"TERMINAL",0,0,"795121317777733430306:007:008:002:00",,terminal_output +7187,13768310,"TERMINAL",0,0,"95133399999556222222",,terminal_output +7188,13770294,"TERMINAL",0,0,"3135552121515151778444444",,terminal_output +7189,13772303,"TERMINAL",0,0,"3577733333992:00666666",,terminal_output +7190,13774310,"TERMINAL",0,0,"57999555552:012:012888888",,terminal_output +7191,13776330,"TERMINAL",0,0,"795:01314177777334404010101010",,terminal_output +7192,13778330,"TERMINAL",0,0,"97:0133399999556222222",,terminal_output +7193,13780355,"TERMINAL",0,0,"41355531313:013:018:01778444444",,terminal_output +7194,13782350,"TERMINAL",0,0,"35777333339910666666",,terminal_output +7195,13784361,"TERMINAL",0,0,"579995555511112888888",,terminal_output +7196,13786381,"TERMINAL",0,0,"7911415177777334505020202020",,terminal_output +7197,13788380,"TERMINAL",0,0,"91133399999556222222",,terminal_output +7198,13790392,"TERMINAL",0,0,"5135554141111111778444444",,terminal_output +7199,13792398,"TERMINAL",0,0,"35777333339920666666",,terminal_output +7200,13794406,"TERMINAL",0,0,"579995555521212888888",,terminal_output +7201,13796446,"TERMINAL",0,0,"7921513:01777773342:005:0030303030",,terminal_output +7202,13798430,"TERMINAL",0,0,"92133399999556222222",,terminal_output +7203,13800434,"TERMINAL",0,0,"1:0135555151212121778444444",,terminal_output +7204,13802445,"TERMINAL",0,0,"35777333339930666666",,terminal_output +7205,13804457,"TERMINAL",0,0,"579995555531312888888",,terminal_output +7206,13806463,"TERMINAL",0,0,"79317:011177777334101040404040",,terminal_output +7207,13808476,"TERMINAL",0,0,"93133399999556222222",,terminal_output +7208,13810484,"TERMINAL",0,0,"1135557:019:01313131778444444",,terminal_output +7209,13812494,"TERMINAL",0,0,"35777333339940666666",,terminal_output +7210,13814502,"TERMINAL",0,0,"579995555541412888888",,terminal_output +7211,13816513,"TERMINAL",0,0,"7941112177777334202050505050",,terminal_output +7212,13818522,"TERMINAL",0,0,"94133399999556222222",,terminal_output +7213,13820530,"TERMINAL",0,0,"2135551111414141778444444",,terminal_output +7214,13822542,"TERMINAL",0,0,"35777333339950666666",,terminal_output +7215,13824551,"TERMINAL",0,0,"579995555551512888888",,terminal_output +7216,13826560,"TERMINAL",0,0,"795121317777733430307:008:009:003:00",,terminal_output +7217,13828569,"TERMINAL",0,0,"95133399999556222222",,terminal_output +7218,13830578,"TERMINAL",0,0,"3135552121515151778444444",,terminal_output +7219,13832609,"TERMINAL",0,0,"36888444443:003:003:01777777",,terminal_output +7220,13834597,"TERMINAL",0,0,"686:00304066666223999999",,terminal_output +7221,13836604,"TERMINAL",0,0,"88:0022288888445414111111111",,terminal_output +7222,13838616,"TERMINAL",0,0,"40244430304:004:009:00667333333",,terminal_output +7223,13840628,"TERMINAL",0,0,"2466622222889555555",,terminal_output +7224,13842636,"TERMINAL",0,0,"4688844444101011777777",,terminal_output +7225,13844642,"TERMINAL",0,0,"6810405066666223999999",,terminal_output +7226,13846658,"TERMINAL",0,0,"81022288888445515121212121",,terminal_output +7227,13848669,"TERMINAL",0,0,"5024444040101010667333333",,terminal_output +7228,13850689,"TERMINAL",0,0,"2466622222889555555",,terminal_output +7229,13852684,"TERMINAL",0,0,"4688844444202021777777",,terminal_output +7230,13854695,"TERMINAL",0,0,"6820504:0066666223999999",,terminal_output +7231,13856703,"TERMINAL",0,0,"820222888884453:016:0131313131",,terminal_output +7232,13858714,"TERMINAL",0,0,"2:0024445050202020667333333",,terminal_output +7233,13860721,"TERMINAL",0,0,"2466622222889555555",,terminal_output +7234,13862731,"TERMINAL",0,0,"4688844444303031777777",,terminal_output +7235,13864749,"TERMINAL",0,0,"68308:001066666223999999",,terminal_output +7236,13866748,"TERMINAL",0,0,"83022288888445111141414141",,terminal_output +7237,13868762,"TERMINAL",0,0,"1024448:0040:00303030667333333",,terminal_output +7238,13870789,"TERMINAL",0,0,"2466622222889555555",,terminal_output +7239,13872776,"TERMINAL",0,0,"4688844444404041777777",,terminal_output +7240,13874779,"TERMINAL",0,0,"6840102066666223999999",,terminal_output +7241,13876794,"TERMINAL",0,0,"84022288888445212151515151",,terminal_output +7242,13878812,"TERMINAL",0,0,"2024441010404040667333333",,terminal_output +7243,13880820,"TERMINAL",0,0,"2466622222889555555",,terminal_output +7244,13882871,"TERMINAL",0,0,"4688844444505051777777",,terminal_output +7245,13884889,"TERMINAL",0,0,"6850203066666223999999",,terminal_output +7246,13886847,"TERMINAL",0,0,"8502228888844531318:019:0130:014:01",,terminal_output +7247,13888847,"TERMINAL",0,0,"3024442020505050667333333",,terminal_output +7248,13890858,"TERMINAL",0,0,"2466622222889555555",,terminal_output +7249,13892869,"TERMINAL",0,0,"46888444444:004:004:01777777",,terminal_output +7250,13894888,"TERMINAL",0,0,"687:00304066666223999999",,terminal_output +7251,13896887,"TERMINAL",0,0,"89:0022288888445414111111111",,terminal_output +7252,13898904,"TERMINAL",0,0,"40244430305:005:0020:00667333333",,terminal_output +7253,13900920,"TERMINAL",0,0,"2466622222889555555",,terminal_output +7254,13902941,"TERMINAL",0,0,"4688844444101011777777",,terminal_output +7255,13904958,"TERMINAL",0,0,"6810405066666223999999",,terminal_output +7256,13906934,"TERMINAL",0,0,"81022288888445515121212121",,terminal_output +7257,13908952,"TERMINAL",0,0,"5024444040101010667333333",,terminal_output +7258,13910968,"TERMINAL",0,0,"2466622222889555555",,terminal_output +7259,13912981,"TERMINAL",0,0,"4688844444202021777777",,terminal_output +7260,13914972,"TERMINAL",0,0,"6820505:0066666223999999",,terminal_output +7261,13916988,"TERMINAL",0,0,"820222888884454:017:0131313131",,terminal_output +7262,13918989,"TERMINAL",0,0,"3:0024445050202020667333333",,terminal_output +7263,13921052,"TERMINAL",0,0,"2466622222889555555",,terminal_output +7264,13923115,"TERMINAL",0,0,"4688844444303031777777",,terminal_output +7265,13925074,"TERMINAL",0,0,"68309:001066666223999999",,terminal_output +7266,13927031,"TERMINAL",0,0,"83022288888445111141414141",,terminal_output +7267,13929041,"TERMINAL",0,0,"1024449:001:00303030667333333",,terminal_output +7268,13931060,"TERMINAL",0,0,"2466622222889555555",,terminal_output +7269,13933148,"TERMINAL",0,0,"4688844444404041777777",,terminal_output +7270,13935072,"TERMINAL",0,0,"6840102066666223999999",,terminal_output +7271,13937143,"TERMINAL",0,0,"84022288888445212151515151",,terminal_output +7272,13939090,"TERMINAL",0,0,"2024441010404040667333333",,terminal_output +7273,13941100,"TERMINAL",0,0,"2466622222889555555",,terminal_output +7274,13943185,"TERMINAL",0,0,"4688844444505051777777",,terminal_output +7275,13945128,"TERMINAL",0,0,"6850203066666223999999",,terminal_output +7276,13947178,"TERMINAL",0,0,"8502228888844531319:0130:011:015:01",,terminal_output +7277,13949139,"TERMINAL",0,0,"3024442020505050667333333",,terminal_output +7278,13951159,"TERMINAL",0,0,"2466622222889555555",,terminal_output +7279,13953160,"TERMINAL",0,0,"46888444445:005:005:01777777",,terminal_output +7280,13955176,"TERMINAL",0,0,"688:00304066666223999999",,terminal_output +7281,13957214,"TERMINAL",0,0,"83:00:0022288888445414111111111",,terminal_output +7282,13959190,"TERMINAL",0,0,"40244430306:006:001:00667333333",,terminal_output +7283,13961199,"TERMINAL",0,0,"2466622222889555555",,terminal_output +7284,13963255,"TERMINAL",0,0,"4688844444101011777777",,terminal_output +7285,13965219,"TERMINAL",0,0,"6810405066666223999999",,terminal_output +7286,13967254,"TERMINAL",0,0,"81022288888445515121212121",,terminal_output +7287,13969239,"TERMINAL",0,0,"5024444040101010667333333",,terminal_output +7288,13971247,"TERMINAL",0,0,"2466622222889555555",,terminal_output +7289,13973286,"TERMINAL",0,0,"4688844444202021777777",,terminal_output +7290,13975266,"TERMINAL",0,0,"6820506:0066666223999999",,terminal_output +7291,13977282,"TERMINAL",0,0,"820222888884455:018:0131313131",,terminal_output +7292,13979286,"TERMINAL",0,0,"4:0024445050202020667333333",,terminal_output +7293,13981302,"TERMINAL",0,0,"2466622222889555555",,terminal_output +7294,13983325,"TERMINAL",0,0,"4688844444303031777777",,terminal_output +7295,13985311,"TERMINAL",0,0,"683020:001066666223999999",,terminal_output +7296,13987421,"TERMINAL",0,0,"83022288888445111141414141",,terminal_output +7297,13989330,"TERMINAL",0,0,"10244430:002:00303030667333333",,terminal_output +7298,13991348,"TERMINAL",0,0,"2466622222889555555",,terminal_output +7299,13993358,"TERMINAL",0,0,"4688844444404041777777",,terminal_output +7300,13995360,"TERMINAL",0,0,"6840102066666223999999",,terminal_output +7301,13997455,"TERMINAL",0,0,"84022288888445212151515151",,terminal_output +7302,13999381,"TERMINAL",0,0,"2024441010404040667333333",,terminal_output +7303,14001450,"TERMINAL",0,0,"2466622222889555555",,terminal_output +7304,14003506,"TERMINAL",0,0,"4688844444505051777777",,terminal_output +7305,14005444,"TERMINAL",0,0,"6850203066666223999999",,terminal_output +7306,14007491,"TERMINAL",0,0,"85022288888445313130:011:012:016:01",,terminal_output +7307,14009430,"TERMINAL",0,0,"3024442020505050667333333",,terminal_output +7308,14011487,"TERMINAL",0,0,"2466622222889555555",,terminal_output +7309,14013451,"TERMINAL",0,0,"46888444446:006:006:01777777",,terminal_output +7310,14015473,"TERMINAL",0,0,"689:00304066666223999999",,terminal_output +7311,14017527,"TERMINAL",0,0,"81:0022288888445414111111111",,terminal_output +7312,14019485,"TERMINAL",0,0,"40244430307:007:002:00667333333",,terminal_output +7313,14021522,"TERMINAL",0,0,"2466622222889555555",,terminal_output +7314,14023570,"TERMINAL",0,0,"4688844444101011777777",,terminal_output +7315,14025512,"TERMINAL",0,0,"6810405066666223999999",,terminal_output +7316,14027516,"TERMINAL",0,0,"81022288888445515121212121",,terminal_output +7317,14029525,"TERMINAL",0,0,"5024444040101010667333333",,terminal_output +7318,14031546,"TERMINAL",0,0,"2466622222889555555",,terminal_output +7319,14033546,"TERMINAL",0,0,"4688844444202021777777",,terminal_output +7320,14035554,"TERMINAL",0,0,"6820507:0066666223999999",,terminal_output +7321,14037597,"TERMINAL",0,0,"820222888884456:019:0131313131",,terminal_output +7322,14039572,"TERMINAL",0,0,"5:0024445050202020667333333",,terminal_output +7323,14041594,"TERMINAL",0,0,"2466622222889555555",,terminal_output +7324,14043592,"TERMINAL",0,0,"4799955555313132888888",,terminal_output +7325,14045602,"TERMINAL",0,0,"79311:011177777334101040404040",,terminal_output +7326,14047610,"TERMINAL",0,0,"93133399999556222222",,terminal_output +7327,14049623,"TERMINAL",0,0,"1135551:013:01313131778444444",,terminal_output +7328,14051631,"TERMINAL",0,0,"35777333339940666666",,terminal_output +7329,14053641,"TERMINAL",0,0,"579995555541412888888",,terminal_output +7330,14055650,"TERMINAL",0,0,"7941112177777334202050505050",,terminal_output +7331,14057660,"TERMINAL",0,0,"94133399999556222222",,terminal_output +7332,14059669,"TERMINAL",0,0,"2135551111414141778444444",,terminal_output +7333,14061678,"TERMINAL",0,0,"35777333339950666666",,terminal_output +7334,14063690,"TERMINAL",0,0,"579995555551512888888",,terminal_output +7335,14065699,"TERMINAL",0,0,"795121317777733430301:002:003:007:00",,terminal_output +7336,14067705,"TERMINAL",0,0,"95133399999556222222",,terminal_output +7337,14069717,"TERMINAL",0,0,"3135552121515151778444444",,terminal_output +7338,14071726,"TERMINAL",0,0,"3577733333997:00666666",,terminal_output +7339,14073737,"TERMINAL",0,0,"57999555557:017:012888888",,terminal_output +7340,14075746,"TERMINAL",0,0,"7920:01314177777334404010101010",,terminal_output +7341,14077755,"TERMINAL",0,0,"92:0133399999556222222",,terminal_output +7342,14079761,"TERMINAL",0,0,"41355531318:018:013:01778444444",,terminal_output +7343,14081771,"TERMINAL",0,0,"35777333339910666666",,terminal_output +7344,14083783,"TERMINAL",0,0,"579995555511112888888",,terminal_output +7345,14085797,"TERMINAL",0,0,"\r7911415130179 nishant.ku standard 2 192 PD 2025-10-04T22:15:46N/A 0:00 1-00:00:00 (Priority)777773345050202020",,terminal_output +7346,14087804,"TERMINAL",0,0,"9113339999955622222",,terminal_output +7347,14089819,"TERMINAL",0,0,"513555414111111177844444",,terminal_output +7348,14091822,"TERMINAL",0,0,"3577733333992066666",,terminal_output +7349,14093831,"TERMINAL",0,0,"57999555552121288888",,terminal_output +7350,14095864,"TERMINAL",0,0,"7921518:01777773347:0020:00303030",,terminal_output +7351,14097847,"TERMINAL",0,0,"9213339999955622222",,terminal_output +7352,14099859,"TERMINAL",0,0,"6:013555515121212177844444",,terminal_output +7353,14101869,"TERMINAL",0,0,"3577733333993066666",,terminal_output +7354,14103877,"TERMINAL",0,0,"57999555553131288888",,terminal_output +7355,14105889,"TERMINAL",0,0,"79312:0111777773341010404040",,terminal_output +7356,14107947,"TERMINAL",0,0,"9313339999955622222",,terminal_output +7357,14109964,"TERMINAL",0,0,"1135552:014:0131313177844444",,terminal_output +7358,14111914,"TERMINAL",0,0,"3577733333994066666",,terminal_output +7359,14113933,"TERMINAL",0,0,"57999555554141288888",,terminal_output +7360,14115933,"TERMINAL",0,0,"79411121777773342020505050",,terminal_output +7361,14117943,"TERMINAL",0,0,"9413339999955622222",,terminal_output +7362,14119951,"TERMINAL",0,0,"213555111141414177844444",,terminal_output +7363,14121976,"TERMINAL",0,0,"3577733333995066666",,terminal_output +7364,14123971,"TERMINAL",0,0,"57999555555151288888",,terminal_output +7365,14125985,"TERMINAL",0,0,"795121317777733430302:003:004:00",,terminal_output +7366,14128021,"TERMINAL",0,0,"9513339999955622222",,terminal_output +7367,14130039,"TERMINAL",0,0,"313555212151515177844444",,terminal_output +7368,14132012,"TERMINAL",0,0,"3577733333998:0066666",,terminal_output +7369,14134030,"TERMINAL",0,0,"57999555558:018:01288888",,terminal_output +7370,14136048,"TERMINAL",0,0,"791:013141777773344040101010",,terminal_output +7371,14138038,"TERMINAL",0,0,"93:013339999955622222",,terminal_output +7372,14140055,"TERMINAL",0,0,"41355531319:019:014:0177844444",,terminal_output +7373,14142146,"TERMINAL",0,0,"3577733333991066666",,terminal_output +7374,14144092,"TERMINAL",0,0,"57999555551111288888",,terminal_output +7375,14146140,"TERMINAL",0,0,"79114151777773345050202020",,terminal_output +7376,14148189,"TERMINAL",0,0,"9113339999955622222",,terminal_output +7377,14150135,"TERMINAL",0,0,"513555414111111177844444",,terminal_output +7378,14152109,"TERMINAL",0,0,"3577733333992066666",,terminal_output +7379,14154121,"TERMINAL",0,0,"57999555552121288888",,terminal_output +7380,14156126,"TERMINAL",0,0,"7921519:01777773348:001:00303030",,terminal_output +7381,14158224,"TERMINAL",0,0,"9213339999955622222",,terminal_output +7382,14160171,"TERMINAL",0,0,"7:013555515121212177844444",,terminal_output +7383,14162154,"TERMINAL",0,0,"3577733333993066666",,terminal_output +7384,14164163,"TERMINAL",0,0,"57999555553131288888",,terminal_output +7385,14166184,"TERMINAL",0,0,"79313:0111777773341010404040",,terminal_output +7386,14168261,"TERMINAL",0,0,"9313339999955622222",,terminal_output +7387,14170208,"TERMINAL",0,0,"1135553:015:0131313177844444",,terminal_output +7388,14172200,"TERMINAL",0,0,"3577733333994066666",,terminal_output +7389,14174209,"TERMINAL",0,0,"57999555554141288888",,terminal_output +7390,14176245,"TERMINAL",0,0,"79411121777773342020505050",,terminal_output +7391,14178228,"TERMINAL",0,0,"9413339999955622222",,terminal_output +7392,14180355,"TERMINAL",0,0,"213555111141414177844444",,terminal_output +7393,14182292,"TERMINAL",0,0,"3577733333995066666",,terminal_output +7394,14184258,"TERMINAL",0,0,"57999555555151288888",,terminal_output +7395,14186267,"TERMINAL",0,0,"795121317777733430303:004:005:00",,terminal_output +7396,14188309,"TERMINAL",0,0,"9513339999955622222",,terminal_output +7397,14190325,"TERMINAL",0,0,"313555212151515177844444",,terminal_output +7398,14192294,"TERMINAL",0,0,"3577733333999:0066666",,terminal_output +7399,14194305,"TERMINAL",0,0,"57999555559:019:01288888",,terminal_output +7400,14196385,"TERMINAL",0,0,"792:013141777773344040101010",,terminal_output +7401,14198327,"TERMINAL",0,0,"94:013339999955622222",,terminal_output +7402,14200343,"TERMINAL",0,0,"413555313110:0110:015:0177844444",,terminal_output +7403,14202346,"TERMINAL",0,0,"3577733333991066666",,terminal_output +7404,14204350,"TERMINAL",0,0,"57999555551111288888",,terminal_output +7405,14206362,"TERMINAL",0,0,"79114151777773345050202020",,terminal_output +7406,14208370,"TERMINAL",0,0,"9113339999955622222",,terminal_output +7407,14210391,"TERMINAL",0,0,"513555414111111177844444",,terminal_output +7408,14212397,"TERMINAL",0,0,"3577733333992066666",,terminal_output +7409,14214402,"TERMINAL",0,0,"57999555552121288888",,terminal_output +7410,14216412,"TERMINAL",0,0,"79215130:01777773349:002:00303030",,terminal_output +7411,14218422,"TERMINAL",0,0,"9213339999955622222",,terminal_output +7412,14220430,"TERMINAL",0,0,"8:013555515121212177844444",,terminal_output +7413,14222437,"TERMINAL",0,0,"3577733333993066666",,terminal_output +7414,14224447,"TERMINAL",0,0,"57999555553131288888",,terminal_output +7415,14226530,"TERMINAL",0,0,"79314:0111777773341010404040",,terminal_output +7416,14228472,"TERMINAL",0,0,"9313339999955622222",,terminal_output +7417,14230492,"TERMINAL",0,0,"1135554:016:0131313177844444",,terminal_output +7418,14232495,"TERMINAL",0,0,"3577733333994066666",,terminal_output +7419,14234504,"TERMINAL",0,0,"57999555554141288888",,terminal_output +7420,14236524,"TERMINAL",0,0,"79411121777773342020505050",,terminal_output +7421,14238521,"TERMINAL",0,0,"9413339999955622222",,terminal_output +7422,14240556,"TERMINAL",0,0,"213555111141414177844444",,terminal_output +7423,14242603,"TERMINAL",0,0,"3577733333995066666",,terminal_output +7424,14244554,"TERMINAL",0,0,"57999555555151288888",,terminal_output +7425,14246590,"TERMINAL",0,0,"795121317777733430304:005:006:00",,terminal_output +7426,14248569,"TERMINAL",0,0,"9513339999955622222",,terminal_output +7427,14250590,"TERMINAL",0,0,"313555212151515177844444",,terminal_output +7428,14252588,"TERMINAL",0,0,"36888444443:00:003:00:003:00:0177777",,terminal_output +7429,14254605,"TERMINAL",0,0,"683:0030406666622399999",,terminal_output +7430,14256611,"TERMINAL",0,0,"85:00222888884454141111111",,terminal_output +7431,14258621,"TERMINAL",0,0,"40244430301:001:006:0066733333",,terminal_output +7432,14260628,"TERMINAL",0,0,"246662222288955555",,terminal_output +7433,14262638,"TERMINAL",0,0,"468884444410101177777",,terminal_output +7434,14264663,"TERMINAL",0,0,"681040506666622399999",,terminal_output +7435,14266663,"TERMINAL",0,0,"810222888884455151212121",,terminal_output +7436,14268672,"TERMINAL",0,0,"502444404010101066733333",,terminal_output +7437,14270680,"TERMINAL",0,0,"246662222288955555",,terminal_output +7438,14272688,"TERMINAL",0,0,"468884444420202177777",,terminal_output +7439,14274698,"TERMINAL",0,0,"6820501:006666622399999",,terminal_output +7440,14276724,"TERMINAL",0,0,"8202228888844520:013:01313131",,terminal_output +7441,14278721,"TERMINAL",0,0,"9:002444505020202066733333",,terminal_output +7442,14280730,"TERMINAL",0,0,"246662222288955555",,terminal_output +7443,14282737,"TERMINAL",0,0,"468884444430303177777",,terminal_output +7444,14284749,"TERMINAL",0,0,"68305:00106666622399999",,terminal_output +7445,14286756,"TERMINAL",0,0,"830222888884451111414141",,terminal_output +7446,14288768,"TERMINAL",0,0,"1024445:007:0030303066733333",,terminal_output +7447,14290783,"TERMINAL",0,0,"246662222288955555",,terminal_output +7448,14292786,"TERMINAL",0,0,"468884444440404177777",,terminal_output +7449,14294795,"TERMINAL",0,0,"684010206666622399999",,terminal_output +7450,14296812,"TERMINAL",0,0,"840222888884452121515151",,terminal_output +7451,14298822,"TERMINAL",0,0,"202444101040404066733333",,terminal_output +7452,14300832,"TERMINAL",0,0,"246662222288955555",,terminal_output +7453,14302845,"TERMINAL",0,0,"468884444450505177777",,terminal_output +7454,14304850,"TERMINAL",0,0,"685020306666622399999",,terminal_output +7455,14306860,"TERMINAL",0,0,"8502228888844531315:016:017:01",,terminal_output +7456,14308873,"TERMINAL",0,0,"302444202050505066733333",,terminal_output +7457,14310883,"TERMINAL",0,0,"246662222288955555",,terminal_output +7458,14312893,"TERMINAL",0,0,"46888444441:001:001:0177777",,terminal_output +7459,14314902,"TERMINAL",0,0,"684:0030406666622399999",,terminal_output +7460,14316924,"TERMINAL",0,0,"86:00222888884454141111111",,terminal_output +7461,14318942,"TERMINAL",0,0,"40244430302:002:007:0066733333",,terminal_output +7462,14320936,"TERMINAL",0,0,"246662222288955555",,terminal_output +7463,14322988,"TERMINAL",0,0,"468884444410101177777",,terminal_output +7464,14324951,"TERMINAL",0,0,"681040506666622399999",,terminal_output +7465,14326957,"TERMINAL",0,0,"810222888884455151212121",,terminal_output +7466,14328977,"TERMINAL",0,0,"502444404010101066733333",,terminal_output +7467,14330996,"TERMINAL",0,0,"246662222288955555",,terminal_output +7468,14333024,"TERMINAL",0,0,"468884444420202177777",,terminal_output +7469,14334996,"TERMINAL",0,0,"6820502:006666622399999",,terminal_output +7470,14337006,"TERMINAL",0,0,"820222888884451:014:01313131",,terminal_output +7471,14339025,"TERMINAL",0,0,"20:002444505020202066733333",,terminal_output +7472,14341044,"TERMINAL",0,0,"246662222288955555",,terminal_output +7473,14343061,"TERMINAL",0,0,"468884444430303177777",,terminal_output +7474,14345077,"TERMINAL",0,0,"68306:00106666622399999",,terminal_output +7475,14347059,"TERMINAL",0,0,"830222888884451111414141",,terminal_output +7476,14349073,"TERMINAL",0,0,"1024446:008:0030303066733333",,terminal_output +7477,14351092,"TERMINAL",0,0,"246662222288955555",,terminal_output +7478,14353194,"TERMINAL",0,0,"468884444440404177777",,terminal_output +7479,14355097,"TERMINAL",0,0,"684010206666622399999",,terminal_output +7480,14357187,"TERMINAL",0,0,"840222888884452121515151",,terminal_output +7481,14359114,"TERMINAL",0,0,"202444101040404066733333",,terminal_output +7482,14361183,"TERMINAL",0,0,"246662222288955555",,terminal_output +7483,14363135,"TERMINAL",0,0,"468884444450505177777",,terminal_output +7484,14365144,"TERMINAL",0,0,"685020306666622399999",,terminal_output +7485,14367155,"TERMINAL",0,0,"8502228888844531316:017:018:01",,terminal_output +7486,14369163,"TERMINAL",0,0,"302444202050505066733333",,terminal_output +7487,14371170,"TERMINAL",0,0,"246662222288955555",,terminal_output +7488,14373265,"TERMINAL",0,0,"46888444442:002:002:0177777",,terminal_output +7489,14375211,"TERMINAL",0,0,"685:0030406666622399999",,terminal_output +7490,14377204,"TERMINAL",0,0,"87:00222888884454141111111",,terminal_output +7491,14379212,"TERMINAL",0,0,"40244430303:003:008:0066733333",,terminal_output +7492,14381223,"TERMINAL",0,0,"246662222288955555",,terminal_output +7493,14383301,"TERMINAL",0,0,"468884444410101177777",,terminal_output +7494,14385319,"TERMINAL",0,0,"681040506666622399999",,terminal_output +7495,14387252,"TERMINAL",0,0,"810222888884455151212121",,terminal_output +7496,14389264,"TERMINAL",0,0,"502444404010101066733333",,terminal_output +7497,14391280,"TERMINAL",0,0,"246662222288955555",,terminal_output +7498,14393281,"TERMINAL",0,0,"468884444420202177777",,terminal_output +7499,14395296,"TERMINAL",0,0,"6820503:006666622399999",,terminal_output +7500,14397302,"TERMINAL",0,0,"820222888884452:015:01313131",,terminal_output +7501,14399309,"TERMINAL",0,0,"1:002444505020202066733333",,terminal_output +7502,14401324,"TERMINAL",0,0,"246662222288955555",,terminal_output +7503,14403332,"TERMINAL",0,0,"468884444430303177777",,terminal_output +7504,14405350,"TERMINAL",0,0,"68307:00106666622399999",,terminal_output +7505,14407364,"TERMINAL",0,0,"830222888884451111414141",,terminal_output +7506,14409383,"TERMINAL",0,0,"1024447:009:0030303066733333",,terminal_output +7507,14411400,"TERMINAL",0,0,"246662222288955555",,terminal_output +7508,14413377,"TERMINAL",0,0,"468884444440404177777",,terminal_output +7509,14415384,"TERMINAL",0,0,"684010206666622399999",,terminal_output +7510,14417400,"TERMINAL",0,0,"840222888884452121515151",,terminal_output +7511,14419400,"TERMINAL",0,0,"202444101040404066733333",,terminal_output +7512,14421496,"TERMINAL",0,0,"246662222288955555",,terminal_output +7513,14423420,"TERMINAL",0,0,"468884444450505177777",,terminal_output +7514,14425439,"TERMINAL",0,0,"685020306666622399999",,terminal_output +7515,14427539,"TERMINAL",0,0,"8502228888844531317:018:019:01",,terminal_output +7516,14429448,"TERMINAL",0,0,"302444202050505066733333",,terminal_output +7517,14431457,"TERMINAL",0,0,"246662222288955555",,terminal_output +7518,14433474,"TERMINAL",0,0,"46888444443:003:003:0177777",,terminal_output +7519,14435523,"TERMINAL",0,0,"686:0030406666622399999",,terminal_output +7520,14437488,"TERMINAL",0,0,"88:00222888884454141111111",,terminal_output +7521,14439505,"TERMINAL",0,0,"40244430304:004:009:0066733333",,terminal_output +7522,14441521,"TERMINAL",0,0,"246662222288955555",,terminal_output +7523,14443517,"TERMINAL",0,0,"468884444410101177777",,terminal_output +7524,14445524,"TERMINAL",0,0,"681040506666622399999",,terminal_output +7525,14447608,"TERMINAL",0,0,"810222888884455151212121",,terminal_output +7526,14449549,"TERMINAL",0,0,"502444404010101066733333",,terminal_output +7527,14451562,"TERMINAL",0,0,"246662222288955555",,terminal_output +7528,14453589,"TERMINAL",0,0,"468884444420202177777",,terminal_output +7529,14455576,"TERMINAL",0,0,"6820504:006666622399999",,terminal_output +7530,14457647,"TERMINAL",0,0,"820222888884453:016:01313131",,terminal_output +7531,14459595,"TERMINAL",0,0,"2:013555515121212177844444",,terminal_output +7532,14461602,"TERMINAL",0,0,"3577733333993066666",,terminal_output +7533,14463613,"TERMINAL",0,0,"57999555553131288888",,terminal_output +7534,14465621,"TERMINAL",0,0,"79318:0111777773341010404040",,terminal_output +7535,14467643,"TERMINAL",0,0,"9313339999955622222",,terminal_output +7536,14469728,"TERMINAL",0,0,"1135558:0150:0131313177844444",,terminal_output +7537,14471650,"TERMINAL",0,0,"3577733333994066666",,terminal_output +7538,14473671,"TERMINAL",0,0,"57999555554141288888",,terminal_output +7539,14475668,"TERMINAL",0,0,"79411121777773342020505050",,terminal_output +7540,14477677,"TERMINAL",0,0,"9413339999955622222",,terminal_output +7541,14479686,"TERMINAL",0,0,"213555111141414177844444",,terminal_output +7542,14481702,"TERMINAL",0,0,"3577733333995066666",,terminal_output +7543,14483712,"TERMINAL",0,0,"57999555555151288888",,terminal_output +7544,14485718,"TERMINAL",0,0,"795121317777733430308:009:0040:00",,terminal_output +7545,14487747,"TERMINAL",0,0,"9513339999955622222",,terminal_output +7546,14489736,"TERMINAL",0,0,"313555212151515177844444",,terminal_output +7547,14491747,"TERMINAL",0,0,"3577733333994:0066666",,terminal_output +7548,14493757,"TERMINAL",0,0,"57999555554:014:01288888",,terminal_output +7549,14495840,"TERMINAL",0,0,"797:013141777773344040101010",,terminal_output +7550,14497773,"TERMINAL",0,0,"99:013339999955622222",,terminal_output +7551,14499786,"TERMINAL",0,0,"41355531315:015:0130:0177844444",,terminal_output +7552,14501796,"TERMINAL",0,0,"3577733333991066666",,terminal_output +7553,14503827,"TERMINAL",0,0,"57999555551111288888",,terminal_output +7554,14505813,"TERMINAL",0,0,"79114151777773345050202020",,terminal_output +7555,14507826,"TERMINAL",0,0,"9113339999955622222",,terminal_output +7556,14509848,"TERMINAL",0,0,"513555414111111177844444",,terminal_output +7557,14511845,"TERMINAL",0,0,"3577733333992066666",,terminal_output +7558,14513854,"TERMINAL",0,0,"57999555552121288888",,terminal_output +7559,14515867,"TERMINAL",0,0,"7921515:01777773344:007:00303030",,terminal_output +7560,14517871,"TERMINAL",0,0,"9213339999955622222",,terminal_output +7561,14519878,"TERMINAL",0,0,"3:013555515121212177844444",,terminal_output +7562,14521898,"TERMINAL",0,0,"3577733333993066666",,terminal_output +7563,14523901,"TERMINAL",0,0,"57999555553131288888",,terminal_output +7564,14525914,"TERMINAL",0,0,"79319:0111777773341010404040",,terminal_output +7565,14527922,"TERMINAL",0,0,"9313339999955622222",,terminal_output +7566,14529938,"TERMINAL",0,0,"1135559:011:0131313177844444",,terminal_output +7567,14531947,"TERMINAL",0,0,"3577733333994066666",,terminal_output +7568,14533958,"TERMINAL",0,0,"57999555554141288888",,terminal_output +7569,14535965,"TERMINAL",0,0,"79411121777773342020505050",,terminal_output +7570,14537976,"TERMINAL",0,0,"9413339999955622222",,terminal_output +7571,14539986,"TERMINAL",0,0,"213555111141414177844444",,terminal_output +7572,14541995,"TERMINAL",0,0,"3577733333995066666",,terminal_output +7573,14544009,"TERMINAL",0,0,"57999555555151288888",,terminal_output +7574,14546016,"TERMINAL",0,0,"795121317777733430309:0040:001:00",,terminal_output +7575,14548023,"TERMINAL",0,0,"9513339999955622222",,terminal_output +7576,14550033,"TERMINAL",0,0,"313555212151515177844444",,terminal_output +7577,14552042,"TERMINAL",0,0,"3577733333995:0066666",,terminal_output +7578,14554052,"TERMINAL",0,0,"57999555555:015:01288888",,terminal_output +7579,14556061,"TERMINAL",0,0,"798:013141777773344040101010",,terminal_output +7580,14558071,"TERMINAL",0,0,"910:013339999955622222",,terminal_output +7581,14560088,"TERMINAL",0,0,"41355531316:016:011:0177844444",,terminal_output +7582,14562115,"TERMINAL",0,0,"3577733333991066666",,terminal_output +7583,14564097,"TERMINAL",0,0,"57999555551111288888",,terminal_output +7584,14566110,"TERMINAL",0,0,"79114151777773345050202020",,terminal_output +7585,14568134,"TERMINAL",0,0,"9113339999955622222",,terminal_output +7586,14570159,"TERMINAL",0,0,"513555414111111177844444",,terminal_output +7587,14572136,"TERMINAL",0,0,"3577733333992066666",,terminal_output +7588,14574146,"TERMINAL",0,0,"57999555552121288888",,terminal_output +7589,14576158,"TERMINAL",0,0,"7921516:01777773345:008:00303030",,terminal_output +7590,14578270,"TERMINAL",0,0,"9213339999955622222",,terminal_output +7591,14580216,"TERMINAL",0,0,"4:013555515121212177844444",,terminal_output +7592,14582185,"TERMINAL",0,0,"3577733333993066666",,terminal_output +7593,14584195,"TERMINAL",0,0,"57999555553131288888",,terminal_output +7594,14586206,"TERMINAL",0,0,"793130:0111777773341010404040",,terminal_output +7595,14588212,"TERMINAL",0,0,"9313339999955622222",,terminal_output +7596,14590232,"TERMINAL",0,0,"11355540:012:0131313177844444",,terminal_output +7597,14592301,"TERMINAL",0,0,"3577733333994066666",,terminal_output +7598,14594238,"TERMINAL",0,0,"57999555554141288888",,terminal_output +7599,14596294,"TERMINAL",0,0,"79411121777773342020505050",,terminal_output +7600,14598265,"TERMINAL",0,0,"9413339999955622222",,terminal_output +7601,14600276,"TERMINAL",0,0,"213555111141414177844444",,terminal_output +7602,14602334,"TERMINAL",0,0,"3577733333995066666",,terminal_output +7603,14604293,"TERMINAL",0,0,"57999555555151288888",,terminal_output +7604,14606304,"TERMINAL",0,0,"7951213177777334303040:001:002:00",,terminal_output +7605,14608314,"TERMINAL",0,0,"9513339999955622222",,terminal_output +7606,14610337,"TERMINAL",0,0,"313555212151515177844444",,terminal_output +7607,14612330,"TERMINAL",0,0,"3577733333996:0066666",,terminal_output +7608,14614343,"TERMINAL",0,0,"57999555556:016:01288888",,terminal_output +7609,14616356,"TERMINAL",0,0,"799:013141777773344040101010",,terminal_output +7610,14618359,"TERMINAL",0,0,"91:013339999955622222",,terminal_output +7611,14620368,"TERMINAL",0,0,"41355531317:017:012:0177844444",,terminal_output +7612,14622379,"TERMINAL",0,0,"3577733333991066666",,terminal_output +7613,14624387,"TERMINAL",0,0,"57999555551111288888",,terminal_output +7614,14626407,"TERMINAL",0,0,"79114151777773345050202020",,terminal_output +7615,14628407,"TERMINAL",0,0,"9113339999955622222",,terminal_output +7616,14630417,"TERMINAL",0,0,"513555414111111177844444",,terminal_output +7617,14632440,"TERMINAL",0,0,"3577733333992066666",,terminal_output +7618,14634435,"TERMINAL",0,0,"57999555552121288888",,terminal_output +7619,14636453,"TERMINAL",0,0,"7921517:01777773346:009:00303030",,terminal_output +7620,14638453,"TERMINAL",0,0,"9213339999955622222",,terminal_output +7621,14640463,"TERMINAL",0,0,"5:013555515121212177844444",,terminal_output +7622,14642470,"TERMINAL",0,0,"3577733333993066666",,terminal_output +7623,14644483,"TERMINAL",0,0,"57999555553131288888",,terminal_output +7624,14646570,"TERMINAL",0,0,"79311:0111777773341010404040",,terminal_output +7625,14648500,"TERMINAL",0,0,"9313339999955622222",,terminal_output +7626,14650510,"TERMINAL",0,0,"1135551:013:0131313177844444",,terminal_output +7627,14652521,"TERMINAL",0,0,"3577733333994066666",,terminal_output +7628,14654528,"TERMINAL",0,0,"57999555554141288888",,terminal_output +7629,14656540,"TERMINAL",0,0,"79411121777773342020505050",,terminal_output +7630,14658551,"TERMINAL",0,0,"9413339999955622222",,terminal_output +7631,14660558,"TERMINAL",0,0,"213555111141414177844444",,terminal_output +7632,14662653,"TERMINAL",0,0,"3577733333995066666",,terminal_output +7633,14664580,"TERMINAL",0,0,"57999555555151288888",,terminal_output +7634,14666594,"TERMINAL",0,0,"7505222328888844531311:012:013:01",,terminal_output +7635,14668597,"TERMINAL",0,0,"302444202050505066733333",,terminal_output +7636,14670609,"TERMINAL",0,0,"246662222288955555",,terminal_output +7637,14672619,"TERMINAL",0,0,"46888444447:007:007:0177777",,terminal_output +7638,14674637,"TERMINAL",0,0,"6830:0030406666622399999",,terminal_output +7639,14676635,"TERMINAL",0,0,"82:00222888884454141111111",,terminal_output +7640,14678643,"TERMINAL",0,0,"40244430308:008:003:0066733333",,terminal_output +7641,14680654,"TERMINAL",0,0,"246662222288955555",,terminal_output +7642,14682665,"TERMINAL",0,0,"468884444410101177777",,terminal_output +7643,14684672,"TERMINAL",0,0,"681040506666622399999",,terminal_output +7644,14686683,"TERMINAL",0,0,"810222888884455151212121",,terminal_output +7645,14688693,"TERMINAL",0,0,"502444404010101066733333",,terminal_output +7646,14690709,"TERMINAL",0,0,"246662222288955555",,terminal_output +7647,14692757,"TERMINAL",0,0,"468884444420202177777",,terminal_output +7648,14694722,"TERMINAL",0,0,"6820508:006666622399999",,terminal_output +7649,14696730,"TERMINAL",0,0,"820222888884457:0130:01313131",,terminal_output +7650,14698736,"TERMINAL",0,0,"6:002444505020202066733333",,terminal_output +7651,14700758,"TERMINAL",0,0,"246662222288955555",,terminal_output +7652,14702757,"TERMINAL",0,0,"468884444430303177777",,terminal_output +7653,14704765,"TERMINAL",0,0,"68302:00106666622399999",,terminal_output +7654,14706774,"TERMINAL",0,0,"830222888884451111414141",,terminal_output +7655,14708789,"TERMINAL",0,0,"1024442:004:0030303066733333",,terminal_output +7656,14710885,"TERMINAL",0,0,"246662222288278:54:088:58:57 3:27:15635:15530:15544:27 3:41:451432:45323:452387:45",,terminal_output +7657,14712822,"TERMINAL",0,0,"46888444444040777777",,terminal_output +7658,14714831,"TERMINAL",0,0,"684010206666622999999",,terminal_output +7659,14716845,"TERMINAL",0,0,"8402228888844212151515151",,terminal_output +7660,14718847,"TERMINAL",0,0,"202444101040404066333333",,terminal_output +7661,14720857,"TERMINAL",0,0,"246662222288555555",,terminal_output +7662,14722866,"TERMINAL",0,0,"46888444445050777777",,terminal_output +7663,14724884,"TERMINAL",0,0,"685020306666622999999",,terminal_output +7664,14726883,"TERMINAL",0,0,"850222888884431312:013:014:018:01",,terminal_output +7665,14728896,"TERMINAL",0,0,"302444202050505066333333",,terminal_output +7666,14730901,"TERMINAL",0,0,"246662222288555555",,terminal_output +7667,14732912,"TERMINAL",0,0,"46888444448:008:00777777",,terminal_output +7668,14734920,"TERMINAL",0,0,"681:0030406666622999999",,terminal_output +7669,14736991,"TERMINAL",0,0,"83:002228888844414111111111",,terminal_output +7670,14738940,"TERMINAL",0,0,"40244430309:009:004:0066333333",,terminal_output +7671,14740948,"TERMINAL",0,0,"2466616 R2025-10-04T22:26:402hai005\r 30141 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T21:44:10 42:32 1-00:00:00 hai00403254:3253807:40 1:19:02492570:5234:026219:18:34 3:08:084385278:54:088:58:57 3:27:45635:15530:45544:27 3:42:151433:15324:152388:1517551:15",,terminal_output +7672,14742953,"TERMINAL",0,0,"4688844444410107777777",,terminal_output +7673,14744965,"TERMINAL",0,0,"68104050666666229999999",,terminal_output +7674,14746974,"TERMINAL",0,0,"8102228888884451512121212121",,terminal_output +7675,14748981,"TERMINAL",0,0,"502444104040101010663333333",,terminal_output +7676,14750988,"TERMINAL",0,0,"24666222222885555555",,terminal_output +7677,14753002,"TERMINAL",0,0,"4688844444420207777777",,terminal_output +7678,14755013,"TERMINAL",0,0,"6820509:00666666229999999",,terminal_output +7679,14757017,"TERMINAL",0,0,"820222888888448:011:013131313131",,terminal_output +7680,14759035,"TERMINAL",0,0,"7:002444205050202020663333333",,terminal_output +7681,14761036,"TERMINAL",0,0,"24666222222885555555",,terminal_output +7682,14763045,"TERMINAL",0,0,"4688844444430307777777",,terminal_output +7683,14765053,"TERMINAL",0,0,"68303:0010Nodes required for job are DOWN, DRAINED or reserved for jobs in higher priority partitions)66666229999999",,terminal_output +7684,14767063,"TERMINAL",0,0,"830222888884411114141414141",,terminal_output +7685,14769070,"TERMINAL",0,0,"1024443:005:00303030663333333",,terminal_output +7686,14771079,"TERMINAL",0,0,"24666Nodes required for job are DOWN, DRAINED or reserved for jobs in higher priority partitions)\r 30144 franz.sram standard 1 16 R 2025-10-04T19:59:49 2025-10-04T22:27:10 0:02 1-00:00:00 hai00522222885555555",,terminal_output +7687,14773140,"TERMINAL",0,0,"4688844444440407777777",,terminal_output +7688,14775096,"TERMINAL",0,0,"68401020666666229999999",,terminal_output +7689,14777109,"TERMINAL",0,0,"8402228888884421215151515151",,terminal_output +7690,14779120,"TERMINAL",0,0,"202444101010404040663333333",,terminal_output +7691,14781127,"TERMINAL",0,0,"24666222222885555555",,terminal_output +7692,14783139,"TERMINAL",0,0,"4688844444450507777777",,terminal_output +7693,14785222,"TERMINAL",0,0,"68502030666666229999999",,terminal_output +7694,14787167,"TERMINAL",0,0,"8502228888884431313:014:015:019:012:01",,terminal_output +7695,14789167,"TERMINAL",0,0,"302444202020505050663333333",,terminal_output +7696,14791172,"TERMINAL",0,0,"24666222222885555555",,terminal_output +7697,14793210,"TERMINAL",0,0,"468884444449:009:007777777",,terminal_output +7698,14795189,"TERMINAL",0,0,"682:003040666666229999999",,terminal_output +7699,14797224,"TERMINAL",0,0,"84:002228888884441411111111111",,terminal_output +7700,14799208,"TERMINAL",0,0,"40244430303020:0020:005:00663333333",,terminal_output +7701,14801226,"TERMINAL",0,0,"24666222222885555555",,terminal_output +7702,14803246,"TERMINAL",0,0,"46888118:331:4443:34403255:3453807:40 1:20:04494570:5235:046219:18:34 3:09:1043105278:54:088:58:57 3:28:47635:15531:47544:27 3:43:171434:17325:172389:1717552:17874",,terminal_output +7703,14805236,"TERMINAL",0,0,"68104050666662299999999",,terminal_output +7704,14807246,"TERMINAL",0,0,"81022288888445151212121212121",,terminal_output +7705,14809251,"TERMINAL",0,0,"50244440401010106633333333",,terminal_output +7706,14811270,"TERMINAL",0,0,"24666222228855555555",,terminal_output +7707,14813273,"TERMINAL",0,0,"4688844444202077777777",,terminal_output +7708,14815284,"TERMINAL",0,0,"68205040:00666662299999999",,terminal_output +7709,14817289,"TERMINAL",0,0,"82022288888449:012:01313131313131",,terminal_output +7710,14819298,"TERMINAL",0,0,"8:00244450502020206633333333",,terminal_output +7711,14821309,"TERMINAL",0,0,"24666222228855555555",,terminal_output +7712,14823417,"TERMINAL",0,0,"4688844444303077777777",,terminal_output +7713,14825365,"TERMINAL",0,0,"68304:0010666662299999999",,terminal_output +7714,14827336,"TERMINAL",0,0,"83022288888441111414141414141",,terminal_output +7715,14829346,"TERMINAL",0,0,"10244416 R2025-10-04T22:28:10hai005\r 30141 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T21:44:10 44:00 1-00:00:00 hai00403256:0053807:40 1:20:304930570:5235:306219:18:34 3:09:364365278:54:088:58:57 3:29:13635:15532:13544:27 3:43:431434:43325:432389:4317552:43834935",,terminal_output +7716,14831373,"TERMINAL",0,0,"2466622222288555555555",,terminal_output +7717,14833482,"TERMINAL",0,0,"468884444444040777777777",,terminal_output +7718,14835378,"TERMINAL",0,0,"6840102066666622999999999",,terminal_output +7719,14837387,"TERMINAL",0,0,"84022288888844212151515151515151",,terminal_output +7720,14839394,"TERMINAL",0,0,"20244410101040404066333333333",,terminal_output +7721,14841407,"TERMINAL",0,0,"2466622222288555555555",,terminal_output +7722,14843487,"TERMINAL",0,0,"468884444445050777777777",,terminal_output +7723,14845420,"TERMINAL",0,0,"6850203066666622999999999",,terminal_output +7724,14847482,"TERMINAL",0,0,"8502228888884431314:015:016:0150:013:013:013:01",,terminal_output +7725,14849438,"TERMINAL",0,0,"30244420202050505066333333333",,terminal_output +7726,14851450,"TERMINAL",0,0,"2466622222288555555555",,terminal_output +7727,14853521,"TERMINAL",0,0,"4688844444410:0010:00777777777",,terminal_output +7728,14855570,"TERMINAL",0,0,"683:00304066666622999999999",,terminal_output +7729,14857476,"TERMINAL",0,0,"85:0022288888844414111111111111111",,terminal_output +7730,14859485,"TERMINAL",0,0,"4024443030301:001:006:0066333333333",,terminal_output +7731,14861502,"TERMINAL",0,0,"2466622222288555555555",,terminal_output +7732,14863560,"TERMINAL",0,0,"46888Nodes required for job are DOWN, DRAINED or reserved for jobs in higher priority partitions)444441010777777777",,terminal_output +7733,14865579,"TERMINAL",0,0,"681040506666622999999999",,terminal_output +7734,14867552,"TERMINAL",0,0,"8102228888844515121212121212121",,terminal_output +7735,14869528,"TERMINAL",0,0,"502444404010101066333333333",,terminal_output +7736,14871592,"TERMINAL",0,0,"246662222288555555555",,terminal_output +7737,14873550,"TERMINAL",0,0,"46888444442020777777777",,terminal_output +7738,14875557,"TERMINAL",0,0,"6820501:006666622999999999",,terminal_output +7739,14877563,"TERMINAL",0,0,"820222888884430:013:0131313131313131",,terminal_output +7740,14879578,"TERMINAL",0,0,"9:002444505020202066333333333",,terminal_output +7741,14881587,"TERMINAL",0,0,"246662222288555555555",,terminal_output +7742,14883602,"TERMINAL",0,0,"57999555553131888888888",,terminal_output +7743,14885619,"TERMINAL",0,0,"79315:01117777733101040404040404040",,terminal_output +7744,14887614,"TERMINAL",0,0,"9313339999955222222222",,terminal_output +7745,14889622,"TERMINAL",0,0,"11355516 R2025-10-04T22:29:101hai005\r 30141 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T21:44:10 45:01 1-00:00:00 hai00403257:0153807:40 1:21:314931570:5236:316219:18:34 3:10:374375278:54:088:58:57 3:30:14635:1553:14544:27 3:44:441435:44326:4423850:441753:448449452046",,terminal_output +7746,14891629,"TERMINAL",0,0,"35777333333996666666666",,terminal_output +7747,14893641,"TERMINAL",0,0,"5799955555541418888888888",,terminal_output +7748,14895651,"TERMINAL",0,0,"794111217777773320205050505050505050",,terminal_output +7749,14897765,"TERMINAL",0,0,"941333999999552222222222",,terminal_output +7750,14899667,"TERMINAL",0,0,"213555111111414141774444444444",,terminal_output +7751,14901677,"TERMINAL",0,0,"35777333333996666666666",,terminal_output +7752,14903694,"TERMINAL",0,0,"5799955555551518888888888",,terminal_output +7753,14905697,"TERMINAL",0,0,"795121317777773330305:006:007:001:004:004:004:004:00",,terminal_output +7754,14907796,"TERMINAL",0,0,"951333999999552222222222",,terminal_output +7755,14909742,"TERMINAL",0,0,"313555212121515151774444444444",,terminal_output +7756,14911725,"TERMINAL",0,0,"35777333333996666666666",,terminal_output +7757,14913743,"TERMINAL",0,0,"579995555551:011:018888888888",,terminal_output +7758,14915744,"TERMINAL",0,0,"794:0131417777773340401010101010101010",,terminal_output +7759,14917831,"TERMINAL",0,0,"96:01333999999552222222222",,terminal_output +7760,14919762,"TERMINAL",0,0,"4135553131312:012:017:01774444444444",,terminal_output +7761,14921768,"TERMINAL",0,0,"35777333333996666666666",,terminal_output +7762,14923879,"TERMINAL",0,0,"57999Nodes required for job are DOWN, DRAINED or reserved for jobs in higher priority partitions)5555511118888888888",,terminal_output +7763,14925819,"TERMINAL",0,0,"79114151777773350502020202020202020",,terminal_output +7764,14927866,"TERMINAL",0,0,"91133399999552222222222",,terminal_output +7765,14929807,"TERMINAL",0,0,"5135554141111111774444444444",,terminal_output +7766,14931816,"TERMINAL",0,0,"3577733333996666666666",,terminal_output +7767,14933825,"TERMINAL",0,0,"579995555521218888888888",,terminal_output +7768,14935854,"TERMINAL",0,0,"7921512:0177777331:004:003030303030303030",,terminal_output +7769,14937844,"TERMINAL",0,0,"92133399999552222222222",,terminal_output +7770,14939852,"TERMINAL",0,0,"30:0135555151212121774444444444",,terminal_output +7771,14941864,"TERMINAL",0,0,"3577733333996666666666",,terminal_output +7772,14943881,"TERMINAL",0,0,"579995555531318888888888",,terminal_output +7773,14945877,"TERMINAL",0,0,"79316:0111777773310104040404040404040",,terminal_output +7774,14947890,"TERMINAL",0,0,"93133399999552222222222",,terminal_output +7775,14949900,"TERMINAL",0,0,"11355516 R2025-10-04T22:30:101hai005\r 30141 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T21:44:10 46:01 1-00:00:00 hai00403258:0153807:40 1:22:314931570:5237:316219:18:34 3:11:374375278:54:088:58:57 3:31:14635:1554:14544:27 3:45:441436:44327:4423851:441754:44844945204614",,terminal_output +7776,14951907,"TERMINAL",0,0,"357773333339966666666666",,terminal_output +7777,14953923,"TERMINAL",0,0,"57999555555414188888888888",,terminal_output +7778,14955946,"TERMINAL",0,0,"79411121777777332020505050505050505050",,terminal_output +7779,14957935,"TERMINAL",0,0,"9413339999995522222222222",,terminal_output +7780,14959945,"TERMINAL",0,0,"2135551111114141417744444444444",,terminal_output +7781,14961952,"TERMINAL",0,0,"357773333339966666666666",,terminal_output +7782,14963980,"TERMINAL",0,0,"57999555555515188888888888",,terminal_output +7783,14965971,"TERMINAL",0,0,"795121317777773330306:007:008:002:005:005:005:005:005:00",,terminal_output +7784,14968007,"TERMINAL",0,0,"9513339999995522222222222",,terminal_output +7785,14969987,"TERMINAL",0,0,"3135552121215151517744444444444",,terminal_output +7786,14972007,"TERMINAL",0,0,"357773333339966666666666",,terminal_output +7787,14974018,"TERMINAL",0,0,"579995555552:012:0188888888888",,terminal_output +7788,14976015,"TERMINAL",0,0,"795:013141777777334040101010101010101010",,terminal_output +7789,14978026,"TERMINAL",0,0,"97:013339999995522222222222",,terminal_output +7790,14980032,"TERMINAL",0,0,"4135553131313:013:018:017744444444444",,terminal_output +7791,14982045,"TERMINAL",0,0,"357773333339966666666666",,terminal_output +7792,14984051,"TERMINAL",0,0,"57999555555111188888888888",,terminal_output +7793,14986059,"TERMINAL",0,0,"79114151777777335050202020202020202020",,terminal_output +7794,14988076,"TERMINAL",0,0,"9113339999995522222222222",,terminal_output +7795,14990076,"TERMINAL",0,0,"513555Nodes required for job are DOWN, DRAINED or reserved for jobs in higher priority partitions)41411111117744444444444",,terminal_output +7796,14992088,"TERMINAL",0,0,"35777333339966666666666",,terminal_output +7797,14994100,"TERMINAL",0,0,"5799955555212188888888888",,terminal_output +7798,14996105,"TERMINAL",0,0,"7921513:0177777332:005:00303030303030303030",,terminal_output +7799,14998116,"TERMINAL",0,0,"921333999995522222222222",,terminal_output +7800,15000166,"TERMINAL",0,0,"1:01355551512121217744444444444",,terminal_output +7801,15002133,"TERMINAL",0,0,"35777333339966666666666",,terminal_output +7802,15004141,"TERMINAL",0,0,"5799955555313188888888888",,terminal_output +7803,15006148,"TERMINAL",0,0,"79317:011177777331010404040404040404040",,terminal_output +7804,15008159,"TERMINAL",0,0,"931333999995522222222222",,terminal_output +7805,15010166,"TERMINAL",0,0,"11355516 R2025-10-04T22:31:101hai005\r 30141 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T21:44:10 47:01 1-00:00:00 hai00403259:0153807:40 1:23:314931570:5238:316219:18:34 3:12:374375278:54:088:58:57 3:32:14635:1555:14544:27 3:46:441437:44328:4423852:441755:4484494520461429972 alfred.ngu6:35:347:57:27 4:33:441",,terminal_output +7806,15012175,"TERMINAL",0,0,"3577733333399666666666666",,terminal_output +7807,15014185,"TERMINAL",0,0,"579995555554141888888888888",,terminal_output +7808,15016200,"TERMINAL",0,0,"7941112177777733202050505050505050505050",,terminal_output +7809,15018206,"TERMINAL",0,0,"94133399999955222222222222",,terminal_output +7810,15020221,"TERMINAL",0,0,"21355511111141414177444444444444",,terminal_output +7811,15022228,"TERMINAL",0,0,"3577733333399666666666666",,terminal_output +7812,15024232,"TERMINAL",0,0,"579995555555151888888888888",,terminal_output +7813,15026245,"TERMINAL",0,0,"795121317777773330307:008:009:003:006:006:006:006:006:004:00",,terminal_output +7814,15028255,"TERMINAL",0,0,"95133399999955222222222222",,terminal_output +7815,15030263,"TERMINAL",0,0,"31355521212151515177444444444444",,terminal_output +7816,15032269,"TERMINAL",0,0,"3577733333399666666666666",,terminal_output +7817,15034279,"TERMINAL",0,0,"579995555553:013:01888888888888",,terminal_output +7818,15036293,"TERMINAL",0,0,"796:01314177777733404010101010101010101010",,terminal_output +7819,15038298,"TERMINAL",0,0,"98:0133399999955222222222222",,terminal_output +7820,15040313,"TERMINAL",0,0,"4135553131314:014:019:0177444444444444",,terminal_output +7821,15042324,"TERMINAL",0,0,"3577733333399666666666666",,terminal_output +7822,15044323,"TERMINAL",0,0,"579995555551111888888888888",,terminal_output +7823,15046333,"TERMINAL",0,0,"7911415177777733505020202020202020202020",,terminal_output +7824,15048343,"TERMINAL",0,0,"911333Nodes required for job are DOWN, DRAINED or reserved for jobs in higher priority partitions)9999955222222222222",,terminal_output +7825,15050348,"TERMINAL",0,0,"513555414111111177444444444444",,terminal_output +7826,15052360,"TERMINAL",0,0,"357773333399666666666666",,terminal_output +7827,15054367,"TERMINAL",0,0,"57999555552121888888888888",,terminal_output +7828,15056386,"TERMINAL",0,0,"7921514:0177777333:006:0030303030303030303030",,terminal_output +7829,15058389,"TERMINAL",0,0,"9213339999955222222222222",,terminal_output +7830,15060396,"TERMINAL",0,0,"2:013555515121212177444444444444",,terminal_output +7831,15062402,"TERMINAL",0,0,"357773333399666666666666",,terminal_output +7832,15064412,"TERMINAL",0,0,"57999555553131888888888888",,terminal_output +7833,15066516,"TERMINAL",0,0,"79318:01117777733101040404040404040404040",,terminal_output +7834,15068431,"TERMINAL",0,0,"9313339999955222222222222",,terminal_output +7835,15070440,"TERMINAL",0,0,"11355516 R2025-10-04T22:32:101hai005\r 30141 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T21:44:10 48:01 1-00:00:00 hai004032:10 1:00:0153807:40 1:24:314931570:5239:316219:18:34 3:13:374375278:54:088:58:57 3:33:14635:1556:14544:27 3:47:441438:44329:4423853:441756:4484494520461429972 alfred.ngu6:35:347:57:27 4:34:44130031:53:391:53:56 10:38:15",,terminal_output +7836,15072450,"TERMINAL",0,0,"35777333333996666666666667",,terminal_output +7837,15074456,"TERMINAL",0,0,"5799955555541418888888888889",,terminal_output +7838,15076466,"TERMINAL",0,0,"794111217777773320205050505050505050505021",,terminal_output +7839,15078476,"TERMINAL",0,0,"941333999999552222222222223",,terminal_output +7840,15080544,"TERMINAL",0,0,"213555111111414141774444444444445",,terminal_output +7841,15082592,"TERMINAL",0,0,"35777333333996666666666667",,terminal_output +7842,15084502,"TERMINAL",0,0,"5799955555551518888888888889",,terminal_output +7843,15086522,"TERMINAL",0,0,"795121317777773330308:009:0050:004:007:007:007:007:007:005:0031",,terminal_output +7844,15088523,"TERMINAL",0,0,"951333999999552222222222223",,terminal_output +7845,15090532,"TERMINAL",0,0,"313555212121515151774444444444445",,terminal_output +7846,15092539,"TERMINAL",0,0,"35777333333996666666666667",,terminal_output +7847,15094548,"TERMINAL",0,0,"579995555554:014:018888888888889",,terminal_output +7848,15096557,"TERMINAL",0,0,"797:0131417777773340401010101010101010101041",,terminal_output +7849,15098566,"TERMINAL",0,0,"99:01333999999552222222222223",,terminal_output +7850,15100586,"TERMINAL",0,0,"41355516 R2025-10-04T22:32:401hai005118:331:4448:314032:10 1:00:3153807:40 1:25:01495:01570:5240:016219:18:34 3:14:074375278:54:088:58:57 3:33:44635:1556:44544:27 3:48:141439:143250:142384:141757:1484494520461429972 alfred.ngu6:35:347:57:27 4:35:14130031:53:391:53:56 10:38:4510:30041:455",,terminal_output +7851,15102664,"TERMINAL",0,0,"357773333339966666666666677",,terminal_output +7852,15104594,"TERMINAL",0,0,"6810405066666612129999999999995050",,terminal_output +7853,15106604,"TERMINAL",0,0,"8102228888884451512121212121212121212122",,terminal_output +7854,15108612,"TERMINAL",0,0,"5024441040401010106633333333333344",,terminal_output +7855,15110633,"TERMINAL",0,0,"246662222228855555555555566",,terminal_output +7856,15112699,"TERMINAL",0,0,"46888444444202077777777777788",,terminal_output +7857,15114645,"TERMINAL",0,0,"6820505:00666666229999999999999:002:00",,terminal_output +7858,15116654,"TERMINAL",0,0,"820222888888444:017:013131313131313131313122",,terminal_output +7859,15118661,"TERMINAL",0,0,"3:0024442050502020206633333333333344",,terminal_output +7860,15120686,"TERMINAL",0,0,"246662222228855555555555566",,terminal_output +7861,15122679,"TERMINAL",0,0,"46888444444303077777777777788",,terminal_output +7862,15124694,"TERMINAL",0,0,"68309:0010666666229999999999991010",,terminal_output +7863,15126699,"TERMINAL",0,0,"8302228888884411114141414141414141414122",,terminal_output +7864,15128705,"TERMINAL",0,0,"102444Nodes required for job are DOWN, DRAINED or reserved for jobs in higher priority partitions)9:001:003030306633333333333344",,terminal_output +7865,15130718,"TERMINAL",0,0,"2466616 R2025-10-04T22:33:102hai005\r 30141 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T21:44:10 49:02 1-00:00:00 hai004032:10 1:01:0253807:40 1:25:32492570:5240:326219:18:34 3:14:384385278:54:088:58:57 3:34:15635:1557:15544:27 3:48:451439:453250:452384:451757:4585495520561529972 alfred.ngu6:35:347:57:27 4:35:45130031:53:391:53:56 10:39:1610:30042:16529993 nishant.k2 3843T18:29:1801:48:31 20:44:41[002,007]",,terminal_output +7866,15132769,"TERMINAL",0,0,"46888Nodes required for job are DOWN, DRAINED or reserved for jobs in higher priority partitions)559:492:33 0:045144:10 49:0444032:10 1:01:04584491:0725:345720:52:40 1:40:3462404339:18:339:18:34 3:14:40754:0884:17655:57 3:37:175548:474349:473420:472284:4777187419750730121 franz.sram8:35:158:35:27 3:57:47629976:35:347:57:27 4:35:4723:39339:18130031 alfred.ng1 164T11:50:3011:50:56 10:42:18005",,terminal_output +7867,15134740,"TERMINAL",0,0,"68401020666666229999999999992020",,terminal_output +7868,15136748,"TERMINAL",0,0,"8402228888884421215151515151515151515122",,terminal_output +7869,15138760,"TERMINAL",0,0,"2024441010104040406633333333333344",,terminal_output +7870,15140858,"TERMINAL",0,0,"246662222228855555555555566",,terminal_output +7871,15142806,"TERMINAL",0,0,"46888444444505077777777777788",,terminal_output +7872,15144821,"TERMINAL",0,0,"68502030666666229999999999993030",,terminal_output +7873,15146802,"TERMINAL",0,0,"8502228888884431319:0150:011:015:018:018:018:018:018:016:0122",,terminal_output +7874,15148810,"TERMINAL",0,0,"3024442020205050506633333333333344",,terminal_output +7875,15150817,"TERMINAL",0,0,"246662222228855555555555566",,terminal_output +7876,15152827,"TERMINAL",0,0,"468884444445:005:0077777777777788",,terminal_output +7877,15154837,"TERMINAL",0,0,"688:003040666666229999999999994040",,terminal_output +7878,15156848,"TERMINAL",0,0,"820:002228888884441411111111111111111111122",,terminal_output +7879,15158867,"TERMINAL",0,0,"4024443030306:006:001:006633333333333344",,terminal_output +7880,15160870,"TERMINAL",0,0,"24666418:331:4449:324032:10 1:01:3253807:40 1:26:02492570:5241:026219:18:34 3:15:084385278:54:088:58:57 3:34:45635:1557:45544:27 3:49:1514350:15321:152385:151758:1585495520561529972 alfred.ngu6:35:347:57:27 4:36:15130031:53:391:53:56 10:39:4610:30042:46529993 nishant.k2 3843T18:29:1801:48:31 20:45:11[002,007]",,terminal_output +7881,15162983,"TERMINAL",0,0,"46888444441010777777777777883",,terminal_output +7882,15164922,"TERMINAL",0,0,"68104050666662299999999999950505",,terminal_output +7883,15166897,"TERMINAL",0,0,"8102228888844515121212121212121212121227",,terminal_output +7884,15168912,"TERMINAL",0,0,"502444404010101066333333333333449",,terminal_output +7885,15170964,"TERMINAL",0,0,"2466622222885555555555556621",,terminal_output +7886,15173010,"TERMINAL",0,0,"46888444442020777777777777883",,terminal_output +7887,15174937,"TERMINAL",0,0,"6820506:00666662299999999999940:003:005",,terminal_output +7888,15176945,"TERMINAL",0,0,"82022288888445:018:0131313131313131313131227",,terminal_output +7889,15178964,"TERMINAL",0,0,"4:002444505020202066333333333333449",,terminal_output +7890,15180981,"TERMINAL",0,0,"2466622222885555555555556631",,terminal_output +7891,15182973,"TERMINAL",0,0,"46888444443030777777777777883",,terminal_output +7892,15184991,"TERMINAL",0,0,"683040:0010666662299999999999910105",,terminal_output +7893,15187041,"TERMINAL",0,0,"8302228888844111141414141414141414141227",,terminal_output +7894,15189061,"TERMINAL",0,0,"10244450:002:0030303066333333333333449",,terminal_output +7895,15191035,"TERMINAL",0,0,"2466616 R2025-10-04T22:34:102hai005\r 30141 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T21:44:10 50:02 1-00:00:00 hai004032:10 1:02:0253807:40 1:26:32492570:5241:326219:18:34 3:15:384385278:54:088:58:57 3:35:15635:1558:15544:27 3:49:4514350:45321:452385:451758:4585495520561529972 alfred.ngu6:35:347:57:27 4:36:45130031:53:391:53:56 10:40:1610:3003:16529993 nishant.k2 3843T18:29:1801:48:31 20:45:41[002,007]30008 alfred.ng1 1622:58:503T22:58:56 23:35:16006",,terminal_output +7896,15193083,"TERMINAL",0,0,"4688844444440407777777777778838",,terminal_output +7897,15195029,"TERMINAL",0,0,"68401020666666229999999999992020520",,terminal_output +7898,15197077,"TERMINAL",0,0,"840222888888442121515151515151515151512272",,terminal_output +7899,15199052,"TERMINAL",0,0,"202444101010404040663333333333334494",,terminal_output +7900,15201063,"TERMINAL",0,0,"246662222228855555555555566516",,terminal_output +7901,15203072,"TERMINAL",0,0,"4688844444450507777777777778838",,terminal_output +7902,15205132,"TERMINAL",0,0,"68502030666666229999999999993030530",,terminal_output +7903,15207091,"TERMINAL",0,0,"85022288888844313150:011:012:016:019:019:019:019:019:017:012272",,terminal_output +7904,15209102,"TERMINAL",0,0,"302444202020505050663333333333334494",,terminal_output +7905,15211110,"TERMINAL",0,0,"2466622222288555555555555666:016",,terminal_output +7906,15213156,"TERMINAL",0,0,"468884444446:006:007777777777778838",,terminal_output +7907,15215176,"TERMINAL",0,0,"689:003040666666229999999999994040540",,terminal_output +7908,15217136,"TERMINAL",0,0,"81:00222888888444141111111111111111111112272",,terminal_output +7909,15219148,"TERMINAL",0,0,"4024443030307:007:002:00663333333333334494",,terminal_output +7910,15221159,"TERMINAL",0,0,"246662222228855555555555566116",,terminal_output +7911,15223190,"TERMINAL",0,0,"4688844444410107777777777778838",,terminal_output +7912,15225176,"TERMINAL",0,0,"68104050Nodes required for job are DOWN, DRAINED or reserved for jobs in higher priority partitions)66666229999999999995050550",,terminal_output +7913,15227190,"TERMINAL",0,0,"81022288888445151212121212121212121212272",,terminal_output +7914,15229196,"TERMINAL",0,0,"5024444040101010663333333333334494",,terminal_output +7915,15231205,"TERMINAL",0,0,"24666222228855555555555566216",,terminal_output +7916,15233223,"TERMINAL",0,0,"468884444420207777777777778838",,terminal_output +7917,15235227,"TERMINAL",0,0,"6820507:0066666229999999999991:004:0056:00",,terminal_output +7918,15237249,"TERMINAL",0,0,"82022288888446:019:01313131313131313131312272",,terminal_output +7919,15239244,"TERMINAL",0,0,"5:0024445050202020663333333333334494",,terminal_output +7920,15241253,"TERMINAL",0,0,"24666222228855555555555566316",,terminal_output +7921,15243262,"TERMINAL",0,0,"468884444430307777777777778838",,terminal_output +7922,15245269,"TERMINAL",0,0,"68301:001066666229999999999991010510",,terminal_output +7923,15247280,"TERMINAL",0,0,"83022288888441111414141414141414141412272",,terminal_output +7924,15249290,"TERMINAL",0,0,"1024441:003:00303030663333333333334494",,terminal_output +7925,15251301,"TERMINAL",0,0,"2466616 R2025-10-04T22:35:102hai005\r 30141 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T21:44:10 51:02 1-00:00:00 hai004032:10 1:03:0253807:40 1:27:32492570:5242:326219:18:34 3:16:384385278:54:088:58:57 3:36:15635:1559:15544:27 3:50:451431:45322:452386:451759:4585495520561529972 alfred.ngu6:35:347:57:27 4:37:45130031:53:391:53:56 10:41:1610:3004:16529993 nishant.k2 3843T18:29:1801:48:31 20:46:41[002,007]30008 alfred.ng1 1622:58:503T22:58:56 23:36:160069461",,terminal_output +7926,15253309,"TERMINAL",0,0,"46888444444404077777777777788388",,terminal_output +7927,15255316,"TERMINAL",0,0,"6840102066666622999999999999202052020",,terminal_output +7928,15257329,"TERMINAL",0,0,"8402228888884421215151515151515151515122722",,terminal_output +7929,15259344,"TERMINAL",0,0,"2024441010104040406633333333333344944",,terminal_output +7930,15261346,"TERMINAL",0,0,"2466622222288555555555555665166",,terminal_output +7931,15263366,"TERMINAL",0,0,"46888444444505077777777777788388",,terminal_output +7932,15265382,"TERMINAL",0,0,"6850203066666622999999999999303053030",,terminal_output +7933,15267379,"TERMINAL",0,0,"8502228888884431311:012:013:017:014:00:014:00:014:00:014:00:014:00:018:0122722",,terminal_output +7934,15269387,"TERMINAL",0,0,"3024442020205050506633333333333344944",,terminal_output +7935,15271399,"TERMINAL",0,0,"2466622222288555555555555667:0166",,terminal_output +7936,15273405,"TERMINAL",0,0,"468884444447:007:0077777777777788388",,terminal_output +7937,15275415,"TERMINAL",0,0,"6840:00304066666622999999999999404054040",,terminal_output +7938,15277423,"TERMINAL",0,0,"82:002228888884441411111111111111111111122722",,terminal_output +7939,15279431,"TERMINAL",0,0,"4024443030308:008:003:006633333333333344944",,terminal_output +7940,15281449,"TERMINAL",0,0,"2466622222288555555555555661166",,terminal_output +7941,15283502,"TERMINAL",0,0,"46888444444101077777777777788388",,terminal_output +7942,15285459,"TERMINAL",0,0,"6810405066666622999999999999505055050",,terminal_output +7943,15287496,"TERMINAL",0,0,"810222Nodes required for job are DOWN, DRAINED or reserved for jobs in higher priority partitions)888884451512121212121212121212122722",,terminal_output +7944,15289479,"TERMINAL",0,0,"50244440401010106633333333333344944",,terminal_output +7945,15291499,"TERMINAL",0,0,"246662222288555555555555662166",,terminal_output +7946,15293538,"TERMINAL",0,0,"4688844444202077777777777788388",,terminal_output +7947,15295506,"TERMINAL",0,0,"6820508:0066666229999999999992:005:0057:007:00",,terminal_output +7948,15297528,"TERMINAL",0,0,"82022288888447:0140:013131313131313131313122722",,terminal_output +7949,15299521,"TERMINAL",0,0,"6:00244450502020206633333333333344944",,terminal_output +7950,15301539,"TERMINAL",0,0,"246662222288555555555555663166",,terminal_output +7951,15303573,"TERMINAL",0,0,"4688844444303077777777777788388",,terminal_output +7952,15305553,"TERMINAL",0,0,"\r68302:00106696570:5243:266219:18:34 3:17:324325278:54:088:58:57 3:37:09635:15540:09544:27 3:51:391432:39323:392387:39175:27 4:00:3989499520961929972 alfred.ngu6:35:347:5738:39130031:53:391:53:56 10:42:1010:3005:10529993 nishant.k2 3843T18:29:1801:48:31 20:47:35[002,007]30008 alfred.ng1 1622:58:503T22:58:56 23:37:1000694101",,terminal_output +7953,15307569,"TERMINAL",0,0,"83022288884411114141414141414141414122722",,terminal_output +7954,15309572,"TERMINAL",0,0,"102444Nodes required for job are DOWN, DRAINED or reserved for jobs in higher priority partitions)16 R2025-10-04T22:36:10hai004\r 30155 franz.sram standard 1 16 R 2025-10-04T20:00:12 2025-10-04T22:36:10 0:00 1-00:00:00 hai0052:004:0030306633333333333344944",,terminal_output +7955,15311581,"TERMINAL",0,0,"2466622222288555555555555664166",,terminal_output +7956,15313598,"TERMINAL",0,0,"47999555555414188888888888899499",,terminal_output +7957,15315602,"TERMINAL",0,0,"7941112177777733202050505050505050505050212162121",,terminal_output +7958,15317616,"TERMINAL",0,0,"9413339999995522222222222233833",,terminal_output +7959,15319626,"TERMINAL",0,0,"21355511111111414177444444444444555055",,terminal_output +7960,15321633,"TERMINAL",0,0,"357773333339966666666666677277",,terminal_output +7961,15323653,"TERMINAL",0,0,"57999555555515188888888888899499",,terminal_output +7962,15325656,"TERMINAL",0,0,"795121317777773330302:003:004:008:001:001:001:001:001:009:00313163131",,terminal_output +7963,15327740,"TERMINAL",0,0,"9513339999995522222222222233833",,terminal_output +7964,15329674,"TERMINAL",0,0,"31355521212121515177444444444444558:0055",,terminal_output +7965,15331683,"TERMINAL",0,0,"357773333339966666666666677277",,terminal_output +7966,15333696,"TERMINAL",0,0,"579995555558:018:0188888888888899499",,terminal_output +7967,15335706,"TERMINAL",0,0,"791:01314177777733404010101010101010101010414164141",,terminal_output +7968,15337775,"TERMINAL",0,0,"93:013339999995522222222222233833",,terminal_output +7969,15339725,"TERMINAL",0,0,"413555313131319:014:0177444444444444551055",,terminal_output +7970,15341735,"TERMINAL",0,0,"\r35777520:00:12354119:18:331:4452:334032:10 1:04:3353907:40 1:29:0370:5244:036219:18:34 3:18:094395278:54:088:58:57 3:37:46635:15540:46544:27 3:52:161433:16324:162388:16175:27 4:01:1686496520661629972 alfred.ngu6:35:347:5739:16130031:53:391:53:56 10:42:4710:3005:47529993 nishant.k2 3843T18:29:1801:48:31 20:48:12[002,007]30008 alfred.ng1 1622:58:503T22:58:56 23:37:470069471",,terminal_output +7971,15343799,"TERMINAL",0,0,"5799955555111188888888888899499",,terminal_output +7972,15345763,"TERMINAL",0,0,"791141517777733505020202020202020202020515165151",,terminal_output +7973,15347761,"TERMINAL",0,0,"911333999995522222222222233833",,terminal_output +7974,15349772,"TERMINAL",0,0,"513555414141111177444444444444552055",,terminal_output +7975,15351779,"TERMINAL",0,0,"35777333339966666666666677277",,terminal_output +7976,15353797,"TERMINAL",0,0,"\r579994119:18:331:4452:454032:10 1:04:4553907:40 1:29:1570:5244:156219:18:34 3:18:2143215278:54:088:58:57 3:37:58635:15540:58544:27 3:52:281433:28324:282388:28175:27 4:01:2888498520861829972 alfred.ngu6:35:347:5739:28130031:53:391:53:56 10:42:5910:3005:59529993 nishant.k2 3843T18:29:1801:48:31 20:48:24[002,007]30008 alfred.ng1 1622:58:503T22:58:56 23:37:590069491",,terminal_output +7977,15355802,"TERMINAL",0,0,"7921519:017777338:001:00303030303030303030303:016:0168:018:01",,terminal_output +7978,15357827,"TERMINAL",0,0,"92133399995522222222222233833",,terminal_output +7979,15359826,"TERMINAL",0,0,"7:0135555151212177444444444444553055",,terminal_output +7980,15361831,"TERMINAL",0,0,"3577733339966666666666677277",,terminal_output +7981,15363848,"TERMINAL",0,0,"579995555313188888888888899499",,terminal_output +7982,15365865,"TERMINAL",0,0,"79313:0111777733101040404040404040404040111161111",,terminal_output +7983,15367858,"TERMINAL",0,0,"93133399995522222222222233833",,terminal_output +7984,15369870,"TERMINAL",0,0,"\r113555616 R2025-10-04T22:37:101hai004716 R2025-10-04T22:37:101hai005\r 30141 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T21:44:10 53:01 1-00:00:00 hai004032:10 1:05:0153907:40 1:29:3170:5244:316219:18:34 3:18:374375278:54:088:58:57 3:38:14635:15541:14544:27 3:52:441433:44324:442388:44175:27 4:01:4484494520461429972 alfred.ngu6:35:347:5739:44130031:53:391:53:56 10:43:1510:3006:15529993 nishant.k2 3843T18:29:1801:48:31 20:48:40[002,007]30008 alfred.ng1 1622:58:503T22:58:56 23:38:150069451",,terminal_output +7985,15371878,"TERMINAL",0,0,"357773333339966666666666677277",,terminal_output +7986,15373894,"TERMINAL",0,0,"57999555555414188888888888899499",,terminal_output +7987,15375969,"TERMINAL",0,0,"7941112177777733202050505050505050505050212162121",,terminal_output +7988,15377908,"TERMINAL",0,0,"9413339999995522222222222233833",,terminal_output +7989,15379919,"TERMINAL",0,0,"21355511111111414177444444444444555055",,terminal_output +7990,15381929,"TERMINAL",0,0,"357773333339966666666666677277",,terminal_output +7991,15383957,"TERMINAL",0,0,"57999555555515188888888888899499",,terminal_output +7992,15385975,"TERMINAL",0,0,"795121317777773330303:004:005:009:002:002:002:002:002:0040:00313163131",,terminal_output +7993,15388050,"TERMINAL",0,0,"9513339999995522222222222233833",,terminal_output +7994,15389968,"TERMINAL",0,0,"31355521212121515177444444444444559:0055",,terminal_output +7995,15391975,"TERMINAL",0,0,"357773333339966666666666677277",,terminal_output +7996,15393988,"TERMINAL",0,0,"579995555559:019:0188888888888899499",,terminal_output +7997,15396043,"TERMINAL",0,0,"792:01314177777733404010101010101010101010414164141",,terminal_output +7998,15398087,"TERMINAL",0,0,"94:01333Nodes required for job are DOWN, DRAINED or reserved for jobs in higher priority partitions)69499995522222222222233833",,terminal_output +7999,15400017,"TERMINAL",0,0,"\r41355516 R2025-10-04T22:37:401hai005\r 30156 franz.sram standard 1 16 R 2025-10-04T20:00:12 2025-10-04T22:37:10 0:31 1-00:00:00 hai0044119:18:331:4453:31032:10 1:05:3153907:40 1:30:0170:5245:016219:18:34 3:19:074375278:54:088:58:57 3:38:44635:15541:44544:27 3:53:141434:14325:142389:14175:27 4:02:1484494520461429972 alfred.ngu6:35:347:5740:14130031:53:391:53:56 10:43:4510:3006:45529993 nishant.k2 3843T18:29:1801:48:31 20:49:10[002,007]30008 alfred.ng1 1622:58:503T22:58:56 23:38:450069451",,terminal_output +8000,15402025,"TERMINAL",0,0,"357773333339966666666666677277",,terminal_output +8001,15404044,"TERMINAL",0,0,"57999555555111188888888888899499",,terminal_output +8002,15406063,"TERMINAL",0,0,"79114151Nodes required for job are DOWN, DRAINED or reserved for jobs in higher priority partitions)84075777733505020202020202020202020515165151",,terminal_output +8003,15408123,"TERMINAL",0,0,"911333999995522222222222233833",,terminal_output +8004,15410139,"TERMINAL",0,0,"513555114141111177444444444444552055",,terminal_output +8005,15412122,"TERMINAL",0,0,"35777333339966666666666677277",,terminal_output +8006,15414084,"TERMINAL",0,0,"5799955555212188888888888899499",,terminal_output +8007,15416093,"TERMINAL",0,0,"79215150:0177777339:002:00303030303030303030304:017:0169:019:01",,terminal_output +8008,15418156,"TERMINAL",0,0,"921333999995522222222222233833",,terminal_output +8009,15420113,"TERMINAL",0,0,"8:013555215151212177444444444444553055",,terminal_output +8010,15422123,"TERMINAL",0,0,"35777333339966666666666677277",,terminal_output +8011,15424133,"TERMINAL",0,0,"5799955555313188888888888899499",,terminal_output +8012,15426148,"TERMINAL",0,0,"79314:01117777733101040404040404040404040111161111",,terminal_output +8013,15428151,"TERMINAL",0,0,"931333999995522222222222233833",,terminal_output +8014,15430161,"TERMINAL",0,0,"\r11355516 R2025-10-04T22:38:101hai004\r 30158 franz.sram standard 1 16 R 2025-10-04T20:00:12 2025-10-04T22:37:40 0:31 1-00:00:00 hai0054119:18:331:44:10 54:014032:10 1:06:0153907:40 1:30:3170:5245:316219:18:34 3:19:374375278:54:088:58:57 3:39:14635:15542:14544:27 3:53:441434:44325:442389:44175:27 4:02:4484494520461429972 alfred.ngu6:35:347:5740:44130031:53:391:53:56 10:44:1510:3007:15529993 nishant.k2 3843T18:29:1801:48:31 20:49:40[002,007]30008 alfred.ng1 1622:58:503T22:58:56 23:39:150069451",,terminal_output +8015,15432185,"TERMINAL",0,0,"357773333339966666666666677277",,terminal_output +8016,15434188,"TERMINAL",0,0,"57999555555414188888888888899499",,terminal_output +8017,15436199,"TERMINAL",0,0,"79411121Nodes required for job are DOWN, DRAINED or reserved for jobs in higher priority partitions)98:1074777733202050505050505050505050212162121",,terminal_output +8018,15438202,"TERMINAL",0,0,"941333999995522222222222233833",,terminal_output +8019,15440213,"TERMINAL",0,0,"213555111111414177444444444444555055",,terminal_output +8020,15442324,"TERMINAL",0,0,"35777333339966666666666677277",,terminal_output +8021,15444234,"TERMINAL",0,0,"5799955555515188888888888899499",,terminal_output +8022,15446246,"TERMINAL",0,0,"79512131777773330304:005:006:004:00:003:003:003:003:003:001:00313163131",,terminal_output +8023,15448252,"TERMINAL",0,0,"951333999995522222222222233833",,terminal_output +8024,15450275,"TERMINAL",0,0,"3135552121215151774444444444445550:0055",,terminal_output +8025,15452272,"TERMINAL",0,0,"35777333339966666666666677277",,terminal_output +8026,15454280,"TERMINAL",0,0,"579995555520:0120:0188888888888899499",,terminal_output +8027,15456301,"TERMINAL",0,0,"793:0131417777733404010101010101010101010414164141",,terminal_output +8028,15458301,"TERMINAL",0,0,"95:01333999995522222222222233833",,terminal_output +8029,15460322,"TERMINAL",0,0,"\r41355516 R2025-10-04T22:38:401hai005\r 30159 franz.sram standard 1 16 R 2025-10-04T20:00:12 2025-10-04T22:38:10 0:31 1-00:00:00 hai0044119:18:331:4454:31032:10 1:06:3153907:40 1:31:0170:5246:016219:18:34 3:20:074375278:54:088:58:57 3:39:44635:15542:44544:27 3:54:141435:14326:14238:27 4:00:141753:1484494520461429972 alfred.ngu6:35:347:5741:14130031:53:391:53:56 10:44:4510:3007:45529993 nishant.k2 3843T18:29:1801:48:31 20:50:10[002,007]30008 alfred.ng1 1622:58:503T22:58:56 23:39:450069451",,terminal_output +8030,15462316,"TERMINAL",0,0,"357773333339966666666666677277",,terminal_output +8031,15464331,"TERMINAL",0,0,"57999555555111188888888888899499",,terminal_output +8032,15466344,"TERMINAL",0,0,"79114151Nodes required for job are DOWN, DRAINED or reserved for jobs in higher priority partitions)604075777733505020202020202020202020515165151",,terminal_output +8033,15468347,"TERMINAL",0,0,"911333999995522222222222233833",,terminal_output +8034,15470357,"TERMINAL",0,0,"513555114141111177444444444444552055",,terminal_output +8035,15472368,"TERMINAL",0,0,"35777333339966666666666677277",,terminal_output +8036,15474379,"TERMINAL",0,0,"5799955555212188888888888899499",,terminal_output +8037,15476395,"TERMINAL",0,0,"7921511:01777773340:003:00303030303030303030305:018:01640:0140:01",,terminal_output +8038,15478399,"TERMINAL",0,0,"921333999995522222222222233833",,terminal_output +8039,15480434,"TERMINAL",0,0,"9:013555215151212177444444444444553055",,terminal_output +8040,15482418,"TERMINAL",0,0,"35777333339966666666666677277",,terminal_output +8041,15484429,"TERMINAL",0,0,"5799955555313188888888888899499",,terminal_output +8042,15486438,"TERMINAL",0,0,"79315:01117777733101040404040404040404040111161111",,terminal_output +8043,15488446,"TERMINAL",0,0,"931333999995522222222222233833",,terminal_output +8044,15490458,"TERMINAL",0,0,"\r11355516 R2025-10-04T22:39:101hai004\r 30160 franz.sram standard 1 16 R 2025-10-04T20:00:12 2025-10-04T22:38:40 0:31 1-00:00:00 hai0054119:18:331:44:10 55:014032:10 1:07:0153907:40 1:31:3170:5246:316219:18:34 3:20:374375278:54:088:58:57 3:40:14635:1553:14544:27 3:54:441435:44326:44238:27 4:00:441753:4484494520461429972 alfred.ngu6:35:347:5741:44130031:53:391:53:56 10:45:1510:3008:15529993 nishant.k2 3843T18:29:1801:48:31 20:50:40[002,007]30008 alfred.ng1 1622:58:503T22:58:56 23:40:150069451",,terminal_output +8045,15492484,"TERMINAL",0,0,"\r3577734119:18:331:44:10 55:034032:10 1:07:0353907:40 1:31:3370:5246:336219:18:34 3:20:394395278:54:088:58:57 3:40:16635:1553:16544:27 3:54:461435:46326:46238:27 4:00:461753:4686496520661629972 alfred.ngu6:35:347:5741:46130031:53:391:53:56 10:45:1710:3008:17529993 nishant.k2 3843T18:29:1801:48:31 20:50:42[002,007]30008 alfred.ng1 1622:58:503T22:58:56 23:40:170069471",,terminal_output +8046,15494498,"TERMINAL",0,0,"57999Nodes required for job are DOWN, DRAINED or reserved for jobs in higher priority partitions)620:00:122:39 0:05144:10 55:0544032:10 1:07:0591:0731:355720:52:40 1:46:3562414339:18:339:18:34 3:20:41754:0880:18655:57 3:43:185544:48435:48342:27 3:56:482280:4878188419850830121 franz.sram8:35:158:3503:48629976:35:347:57:27 4:41:4823:3935:19130031 alfred.ng1 164T11:50:3011:50:56 10:48:1900529993 nishant.k2 38418:29:184T01:48:31 20:50:44[002,007]809630009 alfred.ngu standard 1 16 R 2025-10-03T22:58:54 2025-10-03T22:58:56 23:40:19 1-00:00:00 hai001",,terminal_output +8047,15496513,"TERMINAL",0,0,"794111217777733202050505050505050505050212162121",,terminal_output +8048,15498511,"TERMINAL",0,0,"941333999995522222222222233833",,terminal_output +8049,15500510,"TERMINAL",0,0,"213555111111414177444444444444555055",,terminal_output +8050,15502515,"TERMINAL",0,0,"35777333339966666666666677277",,terminal_output +8051,15504525,"TERMINAL",0,0,"5799955555515188888888888899499",,terminal_output +8052,15506539,"TERMINAL",0,0,"79512131777773330305:006:007:001:004:004:004:004:004:002:00313163131",,terminal_output +8053,15508546,"TERMINAL",0,0,"951333999995522222222222233833",,terminal_output +8054,15510565,"TERMINAL",0,0,"313555212121515177444444444444551:0055",,terminal_output +8055,15512562,"TERMINAL",0,0,"35777333339966666666666677277",,terminal_output +8056,15514568,"TERMINAL",0,0,"57999555551:011:0188888888888899499",,terminal_output +8057,15516581,"TERMINAL",0,0,"794:0131417777733404010101010101010101010414164141",,terminal_output +8058,15518592,"TERMINAL",0,0,"96:024443030302:007:006633333333333344944",,terminal_output +8059,15520603,"TERMINAL",0,0,"\r424666324025222288555555555555661166",,terminal_output +8060,15522710,"TERMINAL",0,0,"4688844444101077777777777788388",,terminal_output +8061,15524618,"TERMINAL",0,0,"681040506666622999999999999505055050",,terminal_output +8062,15526627,"TERMINAL",0,0,"810222888884451512121212121212121212122722",,terminal_output +8063,15528636,"TERMINAL",0,0,"50244410404010106633333333333344944",,terminal_output +8064,15530654,"TERMINAL",0,0,"246662222288555555555555662166",,terminal_output +8065,15532657,"TERMINAL",0,0,"4688844444202077777777777788388",,terminal_output +8066,15534666,"TERMINAL",0,0,"6820502:0066666229999999999996:009:0051:001:00",,terminal_output +8067,15536669,"TERMINAL",0,0,"82022288888441:014:013131313131313131313122722",,terminal_output +8068,15538685,"TERMINAL",0,0,"40:00244420505020206633333333333344944",,terminal_output +8069,15540695,"TERMINAL",0,0,"246662222288555555555555663166",,terminal_output +8070,15542701,"TERMINAL",0,0,"4688844444303077777777777788388",,terminal_output +8071,15544720,"TERMINAL",0,0,"68306:00106666622999999999999101051010",,terminal_output +8072,15546717,"TERMINAL",0,0,"830222888884411114141414141414141414122722",,terminal_output +8073,15548731,"TERMINAL",0,0,"102444306:008:0030306633333333333344944",,terminal_output +8074,15550768,"TERMINAL",0,0,"\r2466616 R2025-10-04T22:40:102hai004\r 30162 franz.sram standard 1 16 R 2025-10-04T20:00:12 2025-10-04T22:39:40 0:32 1-00:00:00 hai0054119:18:331:44:10 56:024032:10 1:08:0253907:40 1:32:3270:5247:326219:18:34 3:21:384385278:54:088:58:57 3:41:15635:1554:15544:27 3:55:451436:45327:45238:27 4:01:451754:4585495520561529972 alfred.ngu6:35:347:5742:45130031:53:391:53:56 10:46:1610:3009:16529993 nishant.k2 3843T18:29:1801:48:31 20:51:41[002,007]30008 alfred.ng1 1622:58:503T22:58:56 23:41:160069461",,terminal_output +8075,15552813,"TERMINAL",0,0,"46888444444404077777777777788388",,terminal_output +8076,15554767,"TERMINAL",0,0,"6840102066666622999999999999202052020",,terminal_output +8077,15556766,"TERMINAL",0,0,"8402228888884421215151515151515151515122722",,terminal_output +8078,15558794,"TERMINAL",0,0,"2024441040101040406633333333333344944",,terminal_output +8079,15560803,"TERMINAL",0,0,"24666Nodes required for job are DOWN, DRAINED or reserved for jobs in higher priority partitions)340:1124222288555555555555665166",,terminal_output +8080,15562849,"TERMINAL",0,0,"4688844444505077777777777788388",,terminal_output +8081,15564808,"TERMINAL",0,0,"685020306666622999999999999303053030",,terminal_output +8082,15566817,"TERMINAL",0,0,"850222888884431316:017:018:012:015:015:015:015:015:013:0122722",,terminal_output +8083,15568833,"TERMINAL",0,0,"30244420202050506633333333333344944",,terminal_output +8084,15570844,"TERMINAL",0,0,"\r246664119:18:331:4456:22032:10 1:08:2253907:40 1:32:5270:5247:526219:18:34 3:21:584385278:54:088:58:57 3:41:35635:1554:35544:27 3:56:051437:05328:05238:27 4:02:051755:0585495520561529972 alfred.ngu6:35:347:5743:05130031:53:391:53:56 10:46:3610:3009:36529993 nishant.k2 3843T18:29:1801:48:31 20:52:01[002,007]30008 alfred.ng1 1622:58:503T22:58:56 23:41:360069461",,terminal_output +8085,15572844,"TERMINAL",0,0,"4688844442:002:0077777777777788388",,terminal_output +8086,15574865,"TERMINAL",0,0,"685:003040666622999999999999404054040",,terminal_output +8087,15576866,"TERMINAL",0,0,"87:0022288884441411111111111111111111122722",,terminal_output +8088,15578884,"TERMINAL",0,0,"40244430303:008:006633333333333344944",,terminal_output +8089,15580887,"TERMINAL",0,0,"\r24666416 R2025-10-04T22:40:402hai004516 R2025-10-04T22:40:402hai005\r 30141 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T21:44:10 56:32 1-00:00:00 hai004032:10 1:08:3253907:40 1:33:0270:5248:026219:18:34 3:22:084385278:54:088:58:57 3:41:45635:1554:45544:27 3:56:151437:15328:15238:27 4:02:151755:1585495520561529972 alfred.ngu6:35:347:5743:15130031:53:391:53:56 10:46:4610:3009:46529993 nishant.k2 3843T18:29:1801:48:31 20:52:11[002,007]30008 alfred.ng1 1622:58:503T22:58:56 23:41:460069461",,terminal_output +8090,15582919,"TERMINAL",0,0,"46888444444101077777777777788388",,terminal_output +8091,15584902,"TERMINAL",0,0,"6810405066666622999999999999505055050",,terminal_output +8092,15586911,"TERMINAL",0,0,"8102228888884451512121212121212121212122722",,terminal_output +8093,15588930,"TERMINAL",0,0,"5024441010404010106633333333333344944",,terminal_output +8094,15590947,"TERMINAL",0,0,"2466622222288555555555555662166",,terminal_output +8095,15592955,"TERMINAL",0,0,"46888444444202077777777777788388",,terminal_output +8096,15594974,"TERMINAL",0,0,"6820503:00666666229999999999997:0050:0052:002:00",,terminal_output +8097,15596960,"TERMINAL",0,0,"820222888888442:015:013131313131313131313122722",,terminal_output +8098,15598981,"TERMINAL",0,0,"1:0024442020505020206633333333333344944",,terminal_output +8099,15600999,"TERMINAL",0,0,"2466622222288555555555555663166",,terminal_output +8100,15603092,"TERMINAL",0,0,"46888444444303077777777777788388",,terminal_output +8101,15605000,"TERMINAL",0,0,"68307:001066666622999999999999101051010",,terminal_output +8102,15607010,"TERMINAL",0,0,"8302228888884411114141414141414141414122722",,terminal_output +8103,15609028,"TERMINAL",0,0,"102444Nodes required for job are DOWN, DRAINED or reserved for jobs in higher priority partitions)307:009:0030306633333333333344944",,terminal_output +8104,15611077,"TERMINAL",0,0,"\r2466616 R2025-10-04T22:41:102hai004\r 30165 franz.sram standard 1 16 R 2025-10-04T20:00:12 2025-10-04T22:40:40 0:32 1-00:00:00 hai0054119:18:331:44:10 57:024032:10 1:09:0253907:40 1:33:3270:5248:326219:18:34 3:22:384385278:54:088:58:57 3:42:15635:1555:15544:27 3:56:451437:45328:45238:27 4:02:451755:4585495520561529972 alfred.ngu6:35:347:5743:45130031:53:391:53:56 10:47:1610:30050:16529993 nishant.k2 3843T18:29:1801:48:31 20:52:41[002,007]30008 alfred.ng1 1622:58:503T22:58:56 23:42:160069461",,terminal_output +8105,15613040,"TERMINAL",0,0,"46888Nodes required for job are DOWN, DRAINED or reserved for jobs in higher priority partitions)61:10446520:00:122:40:40 0:345144:10 57:0444032:10 1:09:0491:0733:345720:52:40 1:48:3462404339:18:339:18:34 3:22:40754:0882:17655:57 3:45:175546:47437:47342:27 3:58:472282:4777187419750730121 franz.sram8:35:158:3505:47629976:35:347:57:27 4:43:4723:39347:18130031 alfred.ng1 164T11:50:3011:50:56 10:50:1800529993 nishant.k2 38418:29:184T01:48:31 20:52:43[002,007]808630009 alfred.ngu standard 1 16 R 2025-10-03T22:58:54 2025-10-03T22:58:56 23:42:18 1-00:00:00 hai001",,terminal_output +8106,15615050,"TERMINAL",0,0,"6840102066666622999999999999202052020",,terminal_output +8107,15617123,"TERMINAL",0,0,"\r84022284119:18:331:44:10 57:084032:10 1:09:0853907:40 1:33:3870:5248:386219:18:34 3:22:444345278:54:088:58:57 3:42:21635:1555:21544:27 3:56:511437:51328:51238:27 4:02:511755:51851495152051615129972 alfred.ngu6:35:347:5743:51130031:53:391:53:56 10:47:2210:30050:22529993 nishant.k2 3843T18:29:1801:48:31 20:52:47[002,007]30008 alfred.ng1 1622:58:503T22:58:56 23:42:220069421",,terminal_output +8108,15619068,"TERMINAL",0,0,"20244410101040406633333333333344944",,terminal_output +8109,15621115,"TERMINAL",0,0,"246662222288555555555555665166",,terminal_output +8110,15623164,"TERMINAL",0,0,"4688844444505077777777777788388",,terminal_output +8111,15625112,"TERMINAL",0,0,"685020306666622999999999999303053030",,terminal_output +8112,15627161,"TERMINAL",0,0,"850222888884431317:018:019:013:016:016:016:016:016:014:0122722",,terminal_output +8113,15629117,"TERMINAL",0,0,"30244420202050506633333333333344944",,terminal_output +8114,15631126,"TERMINAL",0,0,"246662222288555555555555663:0166",,terminal_output +8115,15633197,"TERMINAL",0,0,"46888444443:003:0077777777777788388",,terminal_output +8116,15635151,"TERMINAL",0,0,"686:0030406666622999999999999404054040",,terminal_output +8117,15637192,"TERMINAL",0,0,"\r88:002224119:18:331:4457:28032:10 1:09:2853907:40 1:33:5870:5248:586219:18:34 3:23:044345278:54:088:58:57 3:42:41635:1555:41544:27 3:57:111438:11329:11238:27 4:03:111756:11811491152011611129972 alfred.ngu6:35:347:5744:11130031:53:391:53:56 10:47:4210:30050:42529993 nishant.k2 3843T18:29:1801:48:31 20:53:07[002,007]30008 alfred.ng1 1622:58:503T22:58:56 23:42:420069421",,terminal_output +8118,15639166,"TERMINAL",0,0,"40244430304:009:006633333333333344944",,terminal_output +8119,15641182,"TERMINAL",0,0,"\r24666716 R2025-10-04T22:41:402hai004816 R2025-10-04T22:41:402hai005\r 30141 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T21:44:10 57:32 1-00:00:00 hai004032:10 1:09:3253907:40 1:34:0270:5249:026219:18:34 3:23:084385278:54:088:58:57 3:42:45635:1555:45544:27 3:57:151438:15329:15238:27 4:03:151756:1585495520561529972 alfred.ngu6:35:347:5744:15130031:53:391:53:56 10:47:4610:30050:46529993 nishant.k2 3843T18:29:1801:48:31 20:53:11[002,007]30008 alfred.ng1 1622:58:503T22:58:56 23:42:460069461",,terminal_output +8120,15643188,"TERMINAL",0,0,"46888444444101077777777777788388",,terminal_output +8121,15645198,"TERMINAL",0,0,"6810405066666622999999999999505055050",,terminal_output +8122,15647229,"TERMINAL",0,0,"8102228888884451512121212121212121212122722",,terminal_output +8123,15649215,"TERMINAL",0,0,"5024441010404010106633333333333344944",,terminal_output +8124,15651224,"TERMINAL",0,0,"2466622222288555555555555662166",,terminal_output +8125,15653233,"TERMINAL",0,0,"46888444444202077777777777788388",,terminal_output +8126,15655244,"TERMINAL",0,0,"6820504:00666666229999999999998:001:0053:003:00",,terminal_output +8127,15657255,"TERMINAL",0,0,"820222888888443:016:013131313131313131313122722",,terminal_output +8128,15659262,"TERMINAL",0,0,"2:0024442020505020206633333333333344944",,terminal_output +8129,15661274,"TERMINAL",0,0,"2466622222288555555555555663166",,terminal_output +8130,15663278,"TERMINAL",0,0,"46888444444303077777777777788388",,terminal_output +8131,15665289,"TERMINAL",0,0,"68308:001066666622999999999999101051010",,terminal_output +8132,15667302,"TERMINAL",0,0,"8302228888884411114141414141414141414122722",,terminal_output +8133,15669310,"TERMINAL",0,0,"102444Nodes required for job are DOWN, DRAINED or reserved for jobs in higher priority partitions)308:0010:0030306633333333333344944",,terminal_output +8134,15671334,"TERMINAL",0,0,"\r2466616 R2025-10-04T22:42:102hai004\r 30168 franz.sram standard 1 16 R 2025-10-04T20:00:21 2025-10-04T22:41:40 0:32 1-00:00:00 hai0054119:18:331:44:10 58:024032:10 1:10:0253907:40 1:34:3270:5249:326219:18:34 3:23:384385278:54:088:58:57 3:43:15635:1556:15544:27 3:57:451438:45329:45238:27 4:03:451756:4585495520561529972 alfred.ngu6:35:347:5744:45130031:53:391:53:56 10:48:1610:30051:16529993 nishant.k2 3843T18:29:1801:48:31 20:53:41[002,007]30008 alfred.ng1 1622:58:503T22:58:56 23:43:160069461",,terminal_output +8135,15673331,"TERMINAL",0,0,"46888Nodes required for job are DOWN, DRAINED or reserved for jobs in higher priority partitions)92:10446820:00:212:41:40 0:345144:10 58:0444032:10 1:10:0491:0734:345720:52:40 1:49:3462404339:18:339:18:34 3:23:40754:0883:17655:57 3:46:175547:47438:47342:27 3:59:472283:4777187419750730121 franz.sram8:35:158:3506:47629976:35:347:57:27 4:44:4723:39348:18130031 alfred.ng1 164T11:50:3011:50:56 10:51:1800529993 nishant.k2 38418:29:184T01:48:31 20:53:43[002,007]808630009 alfred.ngu standard 1 16 R 2025-10-03T22:58:54 2025-10-03T22:58:56 23:43:18 1-00:00:00 hai001",,terminal_output +8136,15675342,"TERMINAL",0,0,"6840102066666622999999999999202052020",,terminal_output +8137,15677349,"TERMINAL",0,0,"8402228888884421215151515151515151515122722",,terminal_output +8138,15679360,"TERMINAL",0,0,"2024441040101040406633333333333344944",,terminal_output +8139,15681372,"TERMINAL",0,0,"\r2466624119:18:331:44:10 58:124032:10 1:10:1253907:40 1:34:4270:5249:426219:18:34 3:23:484385278:54:088:58:57 3:43:25635:1556:25544:27 3:57:551438:55329:55238:27 4:03:551756:5585495520561529972 alfred.ngu6:35:347:5744:55130031:53:391:53:56 10:48:2610:30051:26529993 nishant.k2 3843T18:29:1801:48:31 20:53:51[002,007]30008 alfred.ng1 1622:58:503T22:58:56 23:43:260069461",,terminal_output +8140,15683379,"TERMINAL",0,0,"4688844444505077777777777788388",,terminal_output +8141,15685384,"TERMINAL",0,0,"685020306666622999999999999303053030",,terminal_output +8142,15687400,"TERMINAL",0,0,"850222888884431318:019:014:00:014:017:017:017:017:017:015:0122722",,terminal_output +8143,15689408,"TERMINAL",0,0,"30244420202050506633333333333344944",,terminal_output +8144,15691421,"TERMINAL",0,0,"246662222288555555555555664:0166",,terminal_output +8145,15693426,"TERMINAL",0,0,"46888444444:004:0077777777777788388",,terminal_output +8146,15695435,"TERMINAL",0,0,"687:0030406666622999999999999404054040",,terminal_output +8147,15697447,"TERMINAL",0,0,"89:00222888884441411111111111111111111122722",,terminal_output +8148,15699459,"TERMINAL",0,0,"\r40244416 R2025-10-04T22:42:40hai005\r 30169 franz.sram standard 1 16 R 2025-10-04T20:00:21 2025-10-04T22:42:10 0:30 1-00:00:00 hai0044119:18:331:4458:30032:10 1:10:3053907:40 1:35:0070:5250:006219:18:34 3:24:064365278:54:088:58:57 3:43:43635:1556:43544:27 3:58:131439:1332:27 4:00:132384:131757:1383493520361329972 alfred.ngu6:35:347:5745:13130031:53:391:53:56 10:48:4410:30051:44529993 nishant.k2 3843T18:29:1801:48:31 20:54:09[002,007]30008 alfred.ng1 1622:58:503T22:58:56 23:43:440069441",,terminal_output +8149,15701468,"TERMINAL",0,0,"2466622222288555555555555661166",,terminal_output +8150,15703476,"TERMINAL",0,0,"46888444444101077777777777788388",,terminal_output +8151,15705486,"TERMINAL",0,0,"6810405066666622999999999999505055050",,terminal_output +8152,15707496,"TERMINAL",0,0,"810222Nodes required for job are DOWN, DRAINED or reserved for jobs in higher priority partitions)70408588884451512121212121212121212122722",,terminal_output +8153,15709508,"TERMINAL",0,0,"50244410404010106633333333333344944",,terminal_output +8154,15711516,"TERMINAL",0,0,"246662222288555555555555662166",,terminal_output +8155,15713521,"TERMINAL",0,0,"4688844444202077777777777788388",,terminal_output +8156,15715529,"TERMINAL",0,0,"6820505:0066666229999999999999:002:0054:004:00",,terminal_output +8157,15717576,"TERMINAL",0,0,"82022288888444:017:013131313131313131313122722",,terminal_output +8158,15719550,"TERMINAL",0,0,"3:00244420505020206633333333333344944",,terminal_output +8159,15721558,"TERMINAL",0,0,"246662222288555555555555663166",,terminal_output +8160,15723568,"TERMINAL",0,0,"4688844444303077777777777788388",,terminal_output +8161,15725578,"TERMINAL",0,0,"68309:00106666622999999999999101051010",,terminal_output +8162,15727614,"TERMINAL",0,0,"831333999995512124242424242424242424233833",,terminal_output +8163,15729598,"TERMINAL",0,0,"\r11355516 R2025-10-04T22:43:101hai004\r 30170 franz.sram standard 1 16 R 2025-10-04T20:00:21 2025-10-04T22:42:40 0:31 1-00:00:00 hai0054119:18:331:44:10 59:014032:10 1:11:0153907:40 1:35:3170:5250:316219:18:34 3:24:374375278:54:088:58:57 3:44:14635:1557:14544:27 3:58:441439:4432:27 4:00:442384:441757:4484494520461429972 alfred.ngu6:35:347:5745:44130031:53:391:53:56 10:49:1510:30052:15529993 nishant.k2 3843T18:29:1801:48:31 20:54:40[002,007]30008 alfred.ng1 1622:58:503T22:58:56 23:44:150069451",,terminal_output +8164,15731607,"TERMINAL",0,0,"357773333339966666666666677277",,terminal_output +8165,15733618,"TERMINAL",0,0,"57999Nodes required for job are DOWN, DRAINED or reserved for jobs in higher priority partitions)13:10545555414188888888888899499",,terminal_output +8166,15735633,"TERMINAL",0,0,"794111217777733202050505050505050505050212162121",,terminal_output +8167,15737638,"TERMINAL",0,0,"941333999995522222222222233833",,terminal_output +8168,15739651,"TERMINAL",0,0,"213555111111414177444444444444555055",,terminal_output +8169,15741659,"TERMINAL",0,0,"35777333339966666666666677277",,terminal_output +8170,15743670,"TERMINAL",0,0,"5799955555515188888888888899499",,terminal_output +8171,15745677,"TERMINAL",0,0,"79512131777773330309:004:00:001:005:008:008:008:008:008:006:00313163131",,terminal_output +8172,15747686,"TERMINAL",0,0,"951333999995522222222222233833",,terminal_output +8173,15749695,"TERMINAL",0,0,"313555212121515177444444444444555:0055",,terminal_output +8174,15751706,"TERMINAL",0,0,"35777333339966666666666677277",,terminal_output +8175,15753716,"TERMINAL",0,0,"57999555555:015:0188888888888899499",,terminal_output +8176,15755726,"TERMINAL",0,0,"798:0131417777733404010101010101010101010414164141",,terminal_output +8177,15757735,"TERMINAL",0,0,"930:01333999995522222222222233833",,terminal_output +8178,15759744,"TERMINAL",0,0,"\r41355516 R2025-10-04T22:43:401hai005\r 30171 franz.sram standard 1 16 R 2025-10-04T20:00:21 2025-10-04T22:43:10 0:31 1-00:00:00 hai004419:18:331:4459:31032:10 1:11:3153907:40 1:36:0170:5251:016219:18:34 3:25:074375278:54:088:58:57 3:44:44635:1557:44544:27 3:59:14143:27 4:00:14321:142385:141758:1484494520461429972 alfred.ngu6:35:347:5746:14130031:53:391:53:56 10:49:4510:30052:45529993 nishant.k2 3843T18:29:1801:48:31 20:55:10[002,007]30008 alfred.ng1 1622:58:503T22:58:56 23:44:450069451",,terminal_output +8179,15761752,"TERMINAL",0,0,"357773333339966666666666677277",,terminal_output +8180,15763766,"TERMINAL",0,0,"57999555555111188888888888899499",,terminal_output +8181,15765772,"TERMINAL",0,0,"79114151Nodes required for job are DOWN, DRAINED or reserved for jobs in higher priority partitions)24075777733505020202020202020202020515165151",,terminal_output +8182,15767779,"TERMINAL",0,0,"911333999995522222222222233833",,terminal_output +8183,15769792,"TERMINAL",0,0,"513555114141111177444444444444552055",,terminal_output +8184,15771798,"TERMINAL",0,0,"35777333339966666666666677277",,terminal_output +8185,15773808,"TERMINAL",0,0,"5799955555212188888888888899499",,terminal_output +8186,15775843,"TERMINAL",0,0,"7921516:0177777335:008:003030303030303030303050:013:0165:015:01",,terminal_output +8187,15777830,"TERMINAL",0,0,"921333999995522222222222233833",,terminal_output +8188,15779841,"TERMINAL",0,0,"4:013555215151212177444444444444553055",,terminal_output +8189,15781845,"TERMINAL",0,0,"35777333339966666666666677277",,terminal_output +8190,15783853,"TERMINAL",0,0,"5799955555313188888888888899499",,terminal_output +8191,15785868,"TERMINAL",0,0,"793150:01117777733101040404040404040404040111161111",,terminal_output +8192,15787928,"TERMINAL",0,0,"931333999995522222222222233833",,terminal_output +8193,15789895,"TERMINAL",0,0,"\r11355516 R2025-10-04T22:44:101hai004\r 30172 franz.sram standard 1 16 R 2025-10-04T20:00:21 2025-10-04T22:43:40 0:31 1-00:00:00 hai0054119:18:331:44:10 1:00:014032:10 1:12:0153907:40 1:36:3170:5251:316219:18:34 3:25:374375278:54:088:58:57 3:45:14635:1558:14544:27 3:59:44143:27 4:00:44321:442385:441758:4484494520461429972 alfred.ngu6:35:347:5746:44130031:53:391:53:56 10:50:1510:3003:15529993 nishant.k2 3843T18:29:1801:48:31 20:55:40[002,007]30008 alfred.ng1 1622:58:503T22:58:56 23:45:150069451",,terminal_output +8194,15791896,"TERMINAL",0,0,"357773333339966666666666677277",,terminal_output +8195,15793910,"TERMINAL",0,0,"57999555555414188888888888899499",,terminal_output +8196,15795915,"TERMINAL",0,0,"7941112177777733202050505050505050505050212162121",,terminal_output +8197,15797926,"TERMINAL",0,0,"941333Nodes required for job are DOWN, DRAINED or reserved for jobs in higher priority partitions)34:109499995522222222222233833",,terminal_output +8198,15799932,"TERMINAL",0,0,"213555111111414177444444444444555055",,terminal_output +8199,15801945,"TERMINAL",0,0,"35777333339966666666666677277",,terminal_output +8200,15803963,"TERMINAL",0,0,"5799955555515188888888888899499",,terminal_output +8201,15805981,"TERMINAL",0,0,"79512131777773330304:00:001:002:006:009:009:009:009:009:007:00313163131",,terminal_output +8202,15807996,"TERMINAL",0,0,"951333999995522222222222233833",,terminal_output +8203,15809984,"TERMINAL",0,0,"313555212121515177444444444444556:0055",,terminal_output +8204,15811989,"TERMINAL",0,0,"35777333339966666666666677277",,terminal_output +8205,15814009,"TERMINAL",0,0,"57999555556:016:0188888888888899499",,terminal_output +8206,15816012,"TERMINAL",0,0,"\r799:0131414119:18:3311:00:2703212:2753907:40 1:36:5770:5251:576219:18:34 3:26:034335278:54:088:58:57 3:45:40635:1558:40544:27 4:00:101431:10322:102386:101759:10810491052010611029972 alfred.ngu6:35:347:5747:10130031:53:391:53:56 10:50:4110:3003:41529993 nishant.k2 3843T18:29:1801:48:31 20:56:06[002,007]30008 alfred.ng1 1622:58:503T22:58:56 23:45:4100694411",,terminal_output +8207,15818033,"TERMINAL",0,0,"91:0133399995522222222222233833",,terminal_output +8208,15820026,"TERMINAL",0,0,"\r413555416 R2025-10-04T22:44:401hai004516 R2025-10-04T22:44:401hai005\r 30141 franz.sram standard 1 16 R 2025-10-04T19:18:33 2025-10-04T21:44:10 1:00:31 1-00:00:00 hai00403212:3153907:40 1:37:0170:5252:016219:18:34 3:26:074375278:54:088:58:57 3:45:44635:1558:44544:27 4:00:141431:14322:142386:141759:1484494520461429972 alfred.ngu6:35:347:5747:14130031:53:391:53:56 10:50:4510:3003:45529993 nishant.k2 3843T18:29:1801:48:31 20:56:10[002,007]30008 alfred.ng1 1622:58:503T22:58:56 23:45:450069451",,terminal_output +8209,15822129,"TERMINAL",0,0,"35777Nodes required for job are DOWN, DRAINED or reserved for jobs in higher priority partitions)4347520:00:212:44:40 0:03514400:3344032:10 1:12:3391:0737:0359966666666666677277",,terminal_output +8210,15824072,"TERMINAL",0,0,"5799955555111188888888888899499",,terminal_output +8211,15826091,"TERMINAL",0,0,"791141517777733505020202020202020202020515165151",,terminal_output +8212,15828168,"TERMINAL",0,0,"911333999995522222222222233833",,terminal_output +8213,15830118,"TERMINAL",0,0,"513555111141411177444444444444552055",,terminal_output +8214,15832161,"TERMINAL",0,0,"35777333339966666666666677277",,terminal_output +8215,15834095,"TERMINAL",0,0,"5799955555212188888888888899499",,terminal_output +8216,15836107,"TERMINAL",0,0,"7921517:0177777336:009:00303030303030303030301:014:0166:016:01",,terminal_output +8217,15838204,"TERMINAL",0,0,"921333999995522222222222233833",,terminal_output +8218,15840129,"TERMINAL",0,0,"5:013555212151512177444444444444553055",,terminal_output +8219,15842141,"TERMINAL",0,0,"35777333339966666666666677277",,terminal_output +8220,15844148,"TERMINAL",0,0,"5799955555313188888888888899499",,terminal_output +8221,15846155,"TERMINAL",0,0,"79311:01117777733101040404040404040404040111161111",,terminal_output +8222,15848167,"TERMINAL",0,0,"931333999995522222222222233833",,terminal_output +8223,15850186,"TERMINAL",0,0,"\r113555616 R2025-10-04T22:45:101hai004716 R2025-10-04T22:45:101hai006\r 30175 franz.sram standard 1 16 R 2025-10-04T20:00:21 2025-10-04T22:44:40 0:31 1-00:00:00 hai0054119:18:331:44:10 1:01:014019:18:331:32:10 1:13:013907:40 1:37:3153219:18:34 3:26:374319:18:34 3:26:37278:54:088:58:57 3:46:145268:35:158:55:57 3:49:14535:1544:27 4:00:441443:27 4:01:441322:442386:4417359:441859:4449452046214629972 alfred.ngu6:35:347:5747:441032 alfred.ngu1:53:391:53:56 10:51:151300311:50:301:50:56 10:54:15529993 nishant.k2 3843T18:29:1801:48:31 20:56:40[002,007]083T22:58:53T22:58:56 23:46:15630009 alfred.ng1 1622:58:543T22:58:56 23:46:15001",,terminal_output +8224,15852234,"TERMINAL",0,0,"357773333339966666666666677277",,terminal_output +8225,15854195,"TERMINAL",0,0,"57999Nodes required for job are DOWN, DRAINED or reserved for jobs in higher priority partitions)65475:10567520:00:212:44:40 0:35514401:0544032:10 1:13:05921:07:40 1:37:3552414339:18:339:18:34 3:26:41754:0886:18655:57 3:49:185540:48431:483422:482286:4878188419850830121 franz.sram8:35:158:3509:48629976:35:347:57:27 4:47:4823:3931:19130031 alfred.ng1 164T11:50:3011:50:56 10:54:1900529993 nishant.k2 38418:29:184T01:48:31 20:56:44[002,007]809630009 alfred.ngu standard 1 16 R 2025-10-03T22:58:54 2025-10-03T22:58:56 23:46:19 1-00:00:00 hai001",,terminal_output +8226,15856213,"TERMINAL",0,0,"\r79411121774119:18:331:44:10 1:01:07403213:0753907:40 1:37:37219:18:34 3:26:434335278:54:088:58:57 3:46:20635:1559:20544:27 4:00:501431:50322:502386:501759:50850495052050615029972 alfred.ngu6:35:347:5747:50130031:53:391:53:56 10:51:2110:3004:21529993 nishant.k2 3843T18:29:1801:48:31 20:56:46[002,007]30008 alfred.ng1 1622:58:503T22:58:56 23:46:2100694211",,terminal_output +8227,15858216,"TERMINAL",0,0,"941333999995522222222222233833",,terminal_output +8228,15860321,"TERMINAL",0,0,"213555111111114177444444444444555055",,terminal_output +8229,15862269,"TERMINAL",0,0,"35777333339966666666666677277",,terminal_output +8230,15864244,"TERMINAL",0,0,"5799955555515188888888888899499",,terminal_output +8231,15866253,"TERMINAL",0,0,"79512131777773330301:002:003:007:0010:0010:0010:0010:0010:008:00313163131",,terminal_output +8232,15868305,"TERMINAL",0,0,"951333999995522222222222233833",,terminal_output +8233,15870323,"TERMINAL",0,0,"313555212121215177444444444444557:0055",,terminal_output +8234,15872290,"TERMINAL",0,0,"35777333339966666666666677277",,terminal_output +8235,15874300,"TERMINAL",0,0,"57999555557:017:0188888888888899499",,terminal_output +8236,15876308,"TERMINAL",0,0,"7950:0131417777733404010101010101010101010414164141",,terminal_output +8237,15878320,"TERMINAL",0,0,"92:01333999995522222222222233833",,terminal_output +8238,15880345,"TERMINAL",0,0,"\r4142649:45:195:53:06Priority)11326:57:05Resources)298:55:30 2025-10-05T01:48:31355516 R2025-10-04T22:45:401hai005\r 30176 franz.sram standard 1 16 R 2025-10-04T20:00:21 2025-10-04T22:45:10 0:31 1-00:00:00 hai00473164119:18:331:44:10 1:01:31403213:3153907:40 1:38:01219:18:34 3:27:074375278:54:088:58:57 3:46:44635:1559:44544:27 4:01:141432:14323:142387:1417510:1484494520461429972 alfred.ngu6:35:347:5748:14130031:53:391:53:56 10:51:4510:3004:45529993 nishant.k2 3843T18:29:1801:48:31 20:57:10[002,007]30008 alfred.ng1 1622:58:503T22:58:56 23:46:450069451",,terminal_output +8239,15882341,"TERMINAL",0,0,"35777Nodes required for job are DOWN, DRAINED or reserved for jobs in higher priority partitions)8403533339966666666666677277",,terminal_output +8240,15884350,"TERMINAL",0,0,"5799955555111188888888888899499",,terminal_output +8241,15886403,"TERMINAL",0,0,"791141517777733505020202020202020202020515165151",,terminal_output +8242,15888369,"TERMINAL",0,0,"911333999995522222222222233833",,terminal_output +8243,15890377,"TERMINAL",0,0,"513555114141411177444444444444552055",,terminal_output +8244,15892579,"TERMINAL",0,0,"\r3577734119:18:331:44:10 1:01:43403213:4353907:40 1:38:13219:18:34 3:27:194395278:54:088:58:57 3:46:56635:1559:56544:27 4:01:261432:26323:262387:2617510:2686496520661629972 alfred.ngu6:35:347:5748:26130031:53:391:53:56 10:51:5710:3004:57529993 nishant.k2 3843T18:29:1801:48:31 20:57:22[002,007]30008 alfred.ng1 1622:58:503T22:58:56 23:46:570069471",,terminal_output +8245,15894399,"TERMINAL",0,0,"579995555212188888888888899499",,terminal_output +8246,15896474,"TERMINAL",0,0,"7921518:017777337:0050:00303030303030303030302:015:0167:017:01",,terminal_output +8247,15898426,"TERMINAL",0,0,"92133399995522222222222233833",,terminal_output +8248,15900446,"TERMINAL",0,0,"6:0135552151512177444444444444553055",,terminal_output +8249,15902484,"TERMINAL",0,0,"3577733339966666666666677277",,terminal_output +8250,15904447,"TERMINAL",0,0,"579995555313188888888888899499",,terminal_output +8251,15906507,"TERMINAL",0,0,"79312:0111777733101040404040404040404040111161111",,terminal_output +8252,15908466,"TERMINAL",0,0,"93133399995522222222222233833",,terminal_output +8253,15910475,"TERMINAL",0,0,"113555312:014:013177444444444444554055",,terminal_output +8254,15912484,"TERMINAL",0,0,"\r357774119:18:331:44:10 1:02:03403214:0353907:40 1:38:33219:18:34 3:27:394395278:54:088:58:57 3:47:16635:15550:16544:27 4:01:461432:46323:462387:4617510:4686496520661629972 alfred.ngu6:35:347:5748:46130031:53:391:53:56 10:52:1710:3005:17529993 nishant.k2 3843T18:29:1801:48:31 20:57:42[002,007]30008 alfred.ng1 1622:58:503T22:58:56 23:47:170069471",,terminal_output +8255,15914494,"TERMINAL",0,0,"57999555414188888888888899499",,terminal_output +8256,15916505,"TERMINAL",0,0,"7941112177733202050505050505050505050212162121",,terminal_output +8257,15918516,"TERMINAL",0,0,"9413339995522222222222233833",,terminal_output +8258,15920537,"TERMINAL",0,0,"21355511114177444444444444555055",,terminal_output +8259,15922580,"TERMINAL",0,0,"357773339966666666666677277",,terminal_output +8260,15924539,"TERMINAL",0,0,"57999555515188888888888899499",,terminal_output +8261,15926562,"TERMINAL",0,0,"795121317773330302:003:004:008:001:001:001:001:001:009:00313163131",,terminal_output +8262,15928557,"TERMINAL",0,0,"9513339995522222222222233833",,terminal_output +8263,15930572,"TERMINAL",0,0,"31355521215177444444444444558:0055",,terminal_output +8264,15932578,"TERMINAL",0,0,"357773339966666666666677277",,terminal_output +8265,15934592,"TERMINAL",0,0,"581:0030406668:028:02999999999999404054040",,terminal_output +8266,15936599,"TERMINAL",0,0,"M83:0022230132 franz.sram standard 1 16 CG 2025-10-04T19:18:33 2025-10-04T19:18:34 3:28:04 1-00:00:00 hai004888441411111111111111111111122722",,terminal_output +8267,15938610,"TERMINAL",0,0,"\r40244430309:00633333333333344944",,terminal_output +8268,15940631,"TERMINAL",0,0,"246662228555555555555661166",,terminal_output +8269,15942658,"TERMINAL",0,0,"468884441077777777777788388",,terminal_output +8270,15944635,"TERMINAL",0,0,"681040506662999999999999505055050",,terminal_output +8271,15946650,"TERMINAL",0,0,"810222888451512121212121212121212122722",,terminal_output +8272,15948654,"TERMINAL",0,0,"502444404010633333333333344944",,terminal_output +8273,15950674,"TERMINAL",0,0,"246662228555555555555662166",,terminal_output +8274,15952694,"TERMINAL",0,0,"468884442077777777777788388",,terminal_output +8275,15954689,"TERMINAL",0,0,"6820509:0066629999999999993:006:0058:008:00",,terminal_output +8276,15956700,"TERMINAL",0,0,"82022288848:011:013131313131313131313122722",,terminal_output +8277,15958708,"TERMINAL",0,0,"7:002444505020633333333333344944",,terminal_output +8278,15960754,"TERMINAL",0,0,"246662228555555555555663166",,terminal_output +8279,15962728,"TERMINAL",0,0,"468884443077777777777788388",,terminal_output +8280,15964738,"TERMINAL",0,0,"68303:00106662999999999999101051010",,terminal_output +8281,15966750,"TERMINAL",0,0,"830222888411114141414141414141414122722",,terminal_output +8282,15968758,"TERMINAL",0,0,"1024443:005:0030633333333333344944",,terminal_output +8283,15970768,"TERMINAL",0,0,"246662228555555555555664166",,terminal_output +8284,15972778,"TERMINAL",0,0,"468884444077777777777788388",,terminal_output +8285,15974784,"TERMINAL",0,0,"684010206662999999999999202052020",,terminal_output +8286,15976793,"TERMINAL",0,0,"840222888421215151515151515151515122722",,terminal_output +8287,15978805,"TERMINAL",0,0,"202444101040633333333333344944",,terminal_output +8288,15980815,"TERMINAL",0,0,"246662228555555555555665166",,terminal_output +8289,15982827,"TERMINAL",0,0,"468884445077777777777788388",,terminal_output +8290,15984833,"TERMINAL",0,0,"685020306662999999999999303053030",,terminal_output +8291,15986842,"TERMINAL",0,0,"850222888431313:014:015:019:012:012:012:012:012:0150:0122722",,terminal_output +8292,15988850,"TERMINAL",0,0,"302444202050633333333333344944",,terminal_output +8293,15990864,"TERMINAL",0,0,"246662228555555555555669:0166",,terminal_output +8294,15992867,"TERMINAL",0,0,"468884449:0077777777777788388",,terminal_output +8295,15994878,"TERMINAL",0,0,"682:0030406662999999999999404054040",,terminal_output +8296,15996893,"TERMINAL",0,0,"84:00222888441411111111111111111111122722",,terminal_output +8297,15998900,"TERMINAL",0,0,"402444303040:00633333333333344944",,terminal_output +8298,16000910,"TERMINAL",0,0,"246662228555555555555661166",,terminal_output +8299,16002925,"TERMINAL",0,0,"468884441077777777777788388",,terminal_output +8300,16004940,"TERMINAL",0,0,"681040506662999999999999505055050",,terminal_output +8301,16006939,"TERMINAL",0,0,"810222888451512121212121212121212122722",,terminal_output +8302,16008957,"TERMINAL",0,0,"502444404010633333333333344944",,terminal_output +8303,16010977,"TERMINAL",0,0,"246662228555555555555662166",,terminal_output +8304,16012968,"TERMINAL",0,0,"468884442077777777777788388",,terminal_output +8305,16014988,"TERMINAL",0,0,"6820501:00:0066629999999999994:007:0059:009:00",,terminal_output +8306,16016986,"TERMINAL",0,0,"82022288849:012:013131313131313131313122722",,terminal_output +8307,16019004,"TERMINAL",0,0,"8:002444505020633333333333344944",,terminal_output +8308,16021007,"TERMINAL",0,0,"246662228555555555555663166",,terminal_output +8309,16023016,"TERMINAL",0,0,"468884443077777777777788388",,terminal_output +8310,16025033,"TERMINAL",0,0,"68304:00106662999999999999101051010",,terminal_output +8311,16027134,"TERMINAL",0,0,"830222888411114141414141414141414122722",,terminal_output +8312,16029045,"TERMINAL",0,0,"1024444:006:0030633333333333344944",,terminal_output +8313,16031065,"TERMINAL",0,0,"246662228555555555555664166",,terminal_output +8314,16033073,"TERMINAL",0,0,"468884444077777777777788388",,terminal_output +8315,16035074,"TERMINAL",0,0,"684010206662999999999999202052020",,terminal_output +8316,16037169,"TERMINAL",0,0,"840222888421215151515151515151515122722",,terminal_output +8317,16039105,"TERMINAL",0,0,"202444101040633333333333344944",,terminal_output +8318,16041159,"TERMINAL",0,0,"246662228555555555555665166",,terminal_output +8319,16043113,"TERMINAL",0,0,"468884445077777777777788388",,terminal_output +8320,16045128,"TERMINAL",0,0,"685020306662999999999999303053030",,terminal_output +8321,16047203,"TERMINAL",0,0,"850222888431314:015:016:0110:013:013:013:013:013:011:0122722",,terminal_output +8322,16049144,"TERMINAL",0,0,"302444202050633333333333344944",,terminal_output +8323,16051195,"TERMINAL",0,0,"246662228555555555555661:00:0166",,terminal_output +8324,16053173,"TERMINAL",0,0,"4688844430:0077777777777788388",,terminal_output +8325,16055188,"TERMINAL",0,0,"683:0030406662999999999999404054040",,terminal_output +8326,16057241,"TERMINAL",0,0,"85:00222888441411111111111111111111122722",,terminal_output +8327,16059188,"TERMINAL",0,0,"40244430301:00633333333333344944",,terminal_output +8328,16061201,"TERMINAL",0,0,"246662228555555555555661166",,terminal_output +8329,16063206,"TERMINAL",0,0,"468884441077777777777788388",,terminal_output +8330,16065224,"TERMINAL",0,0,"681040506662999999999999505055050",,terminal_output +8331,16067228,"TERMINAL",0,0,"810222888451512121212121212121212122722",,terminal_output +8332,16069236,"TERMINAL",0,0,"502444404010633333333333344944",,terminal_output +8333,16071266,"TERMINAL",0,0,"246662228555555555555662166",,terminal_output +8334,16073254,"TERMINAL",0,0,"468884442077777777777788388",,terminal_output +8335,16075270,"TERMINAL",0,0,"6820501:0066629999999999995:008:00550:0050:00",,terminal_output +8336,16077308,"TERMINAL",0,0,"820222888450:013:013131313131313131313122722",,terminal_output +8337,16079282,"TERMINAL",0,0,"9:002444505020633333333333344944",,terminal_output +8338,16081299,"TERMINAL",0,0,"246662228555555555555663166",,terminal_output +8339,16083350,"TERMINAL",0,0,"468884443077777777777788388",,terminal_output +8340,16085366,"TERMINAL",0,0,"68305:00106662999999999999101051010",,terminal_output +8341,16087345,"TERMINAL",0,0,"830222888411114141414141414141414122722",,terminal_output +8342,16089327,"TERMINAL",0,0,"1024445:007:0030633333333333344944",,terminal_output +8343,16091345,"TERMINAL",0,0,"246662228555555555555664166",,terminal_output +8344,16093343,"TERMINAL",0,0,"468884444077777777777788388",,terminal_output +8345,16095375,"TERMINAL",0,0,"684010206662999999999999202052020",,terminal_output +8346,16097399,"TERMINAL",0,0,"840222888421215151515151515151515122722",,terminal_output +8347,16099373,"TERMINAL",0,0,"202444101040633333333333344944",,terminal_output +8348,16101385,"TERMINAL",0,0,"246662228555555555555665166",,terminal_output +8349,16103395,"TERMINAL",0,0,"468884445077777777777788388",,terminal_output +8350,16105401,"TERMINAL",0,0,"685020306662999999999999303053030",,terminal_output +8351,16107407,"TERMINAL",0,0,"850222888431315:016:017:011:014:014:014:014:014:012:0122722",,terminal_output +8352,16109417,"TERMINAL",0,0,"302444202050633333333333344944",,terminal_output +8353,16111430,"TERMINAL",0,0,"246662228555555555555661:0166",,terminal_output +8354,16113436,"TERMINAL",0,0,"468884441:0077777777777788388",,terminal_output +8355,16115442,"TERMINAL",0,0,"684:0030406662999999999999404054040",,terminal_output +8356,16117554,"TERMINAL",0,0,"86:00222888441411111111111111111111122722",,terminal_output +8357,16119465,"TERMINAL",0,0,"40244430302:00633333333333344944",,terminal_output +8358,16121474,"TERMINAL",0,0,"246662228555555555555661166",,terminal_output +8359,16123484,"TERMINAL",0,0,"468884441077777777777788388",,terminal_output +8360,16125505,"TERMINAL",0,0,"681040506662999999999999505055050",,terminal_output +8361,16127504,"TERMINAL",0,0,"810222888451512121212121212121212122722",,terminal_output +8362,16129510,"TERMINAL",0,0,"502444404010633333333333344944",,terminal_output +8363,16131526,"TERMINAL",0,0,"246662228555555555555662166",,terminal_output +8364,16133632,"TERMINAL",0,0,"468884442077777777777788388",,terminal_output +8365,16135540,"TERMINAL",0,0,"6820502:0066629999999999996:009:0051:001:00",,terminal_output +8366,16137622,"TERMINAL",0,0,"82022288841:014:013131313131313131313122722",,terminal_output +8367,16139571,"TERMINAL",0,0,"50:002444505020633333333333344944",,terminal_output +8368,16141568,"TERMINAL",0,0,"246662228555555555555663166",,terminal_output +8369,16143664,"TERMINAL",0,0,"468884443077777777777788388",,terminal_output +8370,16145582,"TERMINAL",0,0,"68306:00106662999999999999101051010",,terminal_output +8371,16147595,"TERMINAL",0,0,"831333999512124242424242424242424233833",,terminal_output +8372,16149603,"TERMINAL",0,0,"1135556:018:01317444444444444554055",,terminal_output +8373,16151607,"TERMINAL",0,0,"35777333966666666666677277",,terminal_output +8374,16153621,"TERMINAL",0,0,"579995554188888888888899499",,terminal_output +8375,16155631,"TERMINAL",0,0,"794111217773202050505050505050505050212162121",,terminal_output +8376,16157694,"TERMINAL",0,0,"941333999522222222222233833",,terminal_output +8377,16159649,"TERMINAL",0,0,"2135551111417444444444444555055",,terminal_output +8378,16161656,"TERMINAL",0,0,"35777333966666666666677277",,terminal_output +8379,16163677,"TERMINAL",0,0,"579995555188888888888899499",,terminal_output +8380,16165678,"TERMINAL",0,0,"79512131777330306:007:008:002:005:005:005:005:005:003:00313163131",,terminal_output +8381,16167727,"TERMINAL",0,0,"951333999522222222222233833",,terminal_output +8382,16169699,"TERMINAL",0,0,"3135552121517444444444444552:0055",,terminal_output +8383,16171708,"TERMINAL",0,0,"35777333966666666666677277",,terminal_output +8384,16173724,"TERMINAL",0,0,"579995552:0188888888888899499",,terminal_output +8385,16175742,"TERMINAL",0,0,"795:0131417773404010101010101010101010414164141",,terminal_output +8386,16177770,"TERMINAL",0,0,"97:01333999522222222222233833",,terminal_output +8387,16179750,"TERMINAL",0,0,"41355531313:017444444444444551055",,terminal_output +8388,16181754,"TERMINAL",0,0,"35777333966666666666677277",,terminal_output +8389,16183765,"TERMINAL",0,0,"579995551188888888888899499",,terminal_output +8390,16185774,"TERMINAL",0,0,"791141517773505020202020202020202020515165151",,terminal_output +8391,16187784,"TERMINAL",0,0,"911333999522222222222233833",,terminal_output +8392,16189799,"TERMINAL",0,0,"5135554141117444444444444552055",,terminal_output +8393,16191802,"TERMINAL",0,0,"35777333966666666666677277",,terminal_output +8394,16193841,"TERMINAL",0,0,"579995552188888888888899499",,terminal_output +8395,16195822,"TERMINAL",0,0,"7921513:0177732:005:00303030303030303030307:011:00:0162:012:01",,terminal_output +8396,16197938,"TERMINAL",0,0,"921333999522222222222233833",,terminal_output +8397,16199882,"TERMINAL",0,0,"1:0135555151217444444444444553055",,terminal_output +8398,16201853,"TERMINAL",0,0,"35777333966666666666677277",,terminal_output +8399,16203872,"TERMINAL",0,0,"579995553188888888888899499",,terminal_output +8400,16205895,"TERMINAL",0,0,"79317:01117773101040404040404040404040111161111",,terminal_output +8401,16207971,"TERMINAL",0,0,"931333999522222222222233833",,terminal_output +8402,16209896,"TERMINAL",0,0,"1135557:019:01317444444444444554055",,terminal_output +8403,16211898,"TERMINAL",0,0,"35777333966666666666677277",,terminal_output +8404,16213914,"TERMINAL",0,0,"579995554188888888888899499",,terminal_output +8405,16215915,"TERMINAL",0,0,"794111217773202050505050505050505050212162121",,terminal_output +8406,16217930,"TERMINAL",0,0,"941333999522222222222233833",,terminal_output +8407,16219950,"TERMINAL",0,0,"2135551111417444444444444555055",,terminal_output +8408,16221951,"TERMINAL",0,0,"35777333966666666666677277",,terminal_output +8409,16223971,"TERMINAL",0,0,"579995555188888888888899499",,terminal_output +8410,16225971,"TERMINAL",0,0,"79512131777330307:008:009:003:006:006:006:006:006:004:00313163131",,terminal_output +8411,16228042,"TERMINAL",0,0,"951333999522222222222233833",,terminal_output +8412,16230061,"TERMINAL",0,0,"3135552121517444444444444553:0055",,terminal_output +8413,16232001,"TERMINAL",0,0,"35777333966666666666677277",,terminal_output +8414,16234010,"TERMINAL",0,0,"579995553:0188888888888899499",,terminal_output +8415,16236018,"TERMINAL",0,0,"796:0131417773404010101010101010101010414164141",,terminal_output +8416,16238078,"TERMINAL",0,0,"98:01333999522222222222233833",,terminal_output +8417,16240096,"TERMINAL",0,0,"41355531314:017444444444444551055",,terminal_output +8418,16242072,"TERMINAL",0,0,"35777333966666666666677277",,terminal_output +8419,16244058,"TERMINAL",0,0,"579995551188888888888899499",,terminal_output +8420,16246122,"TERMINAL",0,0,"791141517773505020202020202020202020515165151",,terminal_output +8421,16248111,"TERMINAL",0,0,"911333999522222222222233833",,terminal_output +8422,16250130,"TERMINAL",0,0,"5135554141117444444444444552055",,terminal_output +8423,16252108,"TERMINAL",0,0,"35777333966666666666677277",,terminal_output +8424,16254105,"TERMINAL",0,0,"579995552188888888888899499",,terminal_output +8425,16256112,"TERMINAL",0,0,"7921514:0177733:006:00303030303030303030308:011:0163:013:01",,terminal_output +8426,16258126,"TERMINAL",0,0,"921333999522222222222233833",,terminal_output +8427,16260135,"TERMINAL",0,0,"2:0135555151217444444444444553055",,terminal_output +8428,16262140,"TERMINAL",0,0,"35777333966666666666677277",,terminal_output +8429,16264149,"TERMINAL",0,0,"579995553188888888888899499",,terminal_output +8430,16266168,"TERMINAL",0,0,"79318:01117773101040404040404040404040111161111",,terminal_output +8431,16268170,"TERMINAL",0,0,"931333999522222222222233833",,terminal_output +8432,16270178,"TERMINAL",0,0,"1135558:0120:01317444444444444554055",,terminal_output +8433,16272192,"TERMINAL",0,0,"35777333966666666666677277",,terminal_output +8434,16274194,"TERMINAL",0,0,"579995554188888888888899499",,terminal_output +8435,16276207,"TERMINAL",0,0,"794111217773202050505050505050505050212162121",,terminal_output +8436,16278213,"TERMINAL",0,0,"941333999522222222222233833",,terminal_output +8437,16280224,"TERMINAL",0,0,"2135551111417444444444444555055",,terminal_output +8438,16282234,"TERMINAL",0,0,"35777333966666666666677277",,terminal_output +8439,16284242,"TERMINAL",0,0,"579995555188888888888899499",,terminal_output +8440,16286251,"TERMINAL",0,0,"79512131777330308:009:0010:004:007:007:007:007:007:005:00313163131",,terminal_output +8441,16288305,"TERMINAL",0,0,"951333999522222222222233833",,terminal_output +8442,16290304,"TERMINAL",0,0,"3135552121517444444444444554:0055",,terminal_output +8443,16292280,"TERMINAL",0,0,"35777333966666666666677277",,terminal_output +8444,16294289,"TERMINAL",0,0,"579995554:0188888888888899499",,terminal_output +8445,16296301,"TERMINAL",0,0,"797:0131417773404010101010101010101010414164141",,terminal_output +8446,16298308,"TERMINAL",0,0,"99:01333999522222222222233833",,terminal_output +8447,16300319,"TERMINAL",0,0,"41355531315:017444444444444551055",,terminal_output +8448,16302331,"TERMINAL",0,0,"35777333966666666666677277",,terminal_output +8449,16304340,"TERMINAL",0,0,"579995551188888888888899499",,terminal_output +8450,16306347,"TERMINAL",0,0,"791141517773505020202020202020202020515165151",,terminal_output +8451,16308355,"TERMINAL",0,0,"911333999522222222222233833",,terminal_output +8452,16310366,"TERMINAL",0,0,"5135554141117444444444444552055",,terminal_output +8453,16312375,"TERMINAL",0,0,"35777333966666666666677277",,terminal_output +8454,16314384,"TERMINAL",0,0,"579995552188888888888899499",,terminal_output +8455,16316407,"TERMINAL",0,0,"7921515:0177734:007:00303030303030303030309:012:0164:014:01",,terminal_output +8456,16318410,"TERMINAL",0,0,"921333999522222222222233833",,terminal_output +8457,16320417,"TERMINAL",0,0,"3:0135555151217444444444444553055",,terminal_output +8458,16322421,"TERMINAL",0,0,"35777333966666666666677277",,terminal_output +8459,16324433,"TERMINAL",0,0,"579995553188888888888899499",,terminal_output +8460,16326441,"TERMINAL",0,0,"79319:01117773101040404040404040404040111161111",,terminal_output +8461,16328450,"TERMINAL",0,0,"931333999522222222222233833",,terminal_output +8462,16330458,"TERMINAL",0,0,"1135559:011:01317444444444444554055",,terminal_output +8463,16332468,"TERMINAL",0,0,"35777333966666666666677277",,terminal_output +8464,16334481,"TERMINAL",0,0,"579995554188888888888899499",,terminal_output +8465,16336489,"TERMINAL",0,0,"794111217773202050505050505050505050212162121",,terminal_output +8466,16338499,"TERMINAL",0,0,"941333999522222222222233833",,terminal_output +8467,16340512,"TERMINAL",0,0,"2135551111417444444444444555055",,terminal_output +8468,16342520,"TERMINAL",0,0,"35777333966666666666677277",,terminal_output +8469,16344527,"TERMINAL",0,0,"579995555188888888888899499",,terminal_output +8470,16346537,"TERMINAL",0,0,"79512131777330309:0010:001:005:008:008:008:008:008:006:00313163131",,terminal_output +8471,16348548,"TERMINAL",0,0,"951333999522222222222233833",,terminal_output +8472,16350556,"TERMINAL",0,0,"3135552121517444444444444555:0055",,terminal_output +8473,16352562,"TERMINAL",0,0,"35777333966666666666677277",,terminal_output +8474,16354574,"TERMINAL",0,0,"579995555:0188888888888899499",,terminal_output +8475,16356579,"TERMINAL",0,0,"798:0131417773404010101010101010101010414164141",,terminal_output +8476,16358594,"TERMINAL",0,0,"940:0244430306:00633333333333344944",,terminal_output +8477,16360600,"TERMINAL",0,0,"4246662228555555555555661166",,terminal_output +8478,16362610,"TERMINAL",0,0,"468884441077777777777788388",,terminal_output +8479,16364616,"TERMINAL",0,0,"681040506662999999999999505055050",,terminal_output +8480,16366626,"TERMINAL",0,0,"810222888451512121212121212121212122722",,terminal_output +8481,16368636,"TERMINAL",0,0,"502444404010633333333333344944",,terminal_output +8482,16370644,"TERMINAL",0,0,"246662228555555555555662166",,terminal_output +8483,16372655,"TERMINAL",0,0,"468884442077777777777788388",,terminal_output +8484,16374660,"TERMINAL",0,0,"6820506:0066629999999999991:00:003:0055:005:00",,terminal_output +8485,16376673,"TERMINAL",0,0,"82022288845:018:013131313131313131313122722",,terminal_output +8486,16378683,"TERMINAL",0,0,"4:002444505020633333333333344944",,terminal_output +8487,16380691,"TERMINAL",0,0,"246662228555555555555663166",,terminal_output +8488,16382696,"TERMINAL",0,0,"468884443077777777777788388",,terminal_output +8489,16384708,"TERMINAL",0,0,"68307:00:00106662999999999999101051010",,terminal_output +8490,16386718,"TERMINAL",0,0,"830222888411114141414141414141414122722",,terminal_output +8491,16388726,"TERMINAL",0,0,"10244410:002:0030633333333333344944",,terminal_output +8492,16390733,"TERMINAL",0,0,"246662228555555555555664166",,terminal_output +8493,16392743,"TERMINAL",0,0,"468884444077777777777788388",,terminal_output +8494,16394750,"TERMINAL",0,0,"684010206662999999999999202052020",,terminal_output +8495,16396761,"TERMINAL",0,0,"840222888421215151515151515151515122722",,terminal_output +8496,16398771,"TERMINAL",0,0,"202444101040633333333333344944",,terminal_output +8497,16400782,"TERMINAL",0,0,"246662228555555555555665166",,terminal_output +8498,16402807,"TERMINAL",0,0,"468884445077777777777788388",,terminal_output +8499,16404798,"TERMINAL",0,0,"685020306662999999999999303053030",,terminal_output +8500,16406806,"TERMINAL",0,0,"8502228884313110:011:012:016:019:019:019:019:019:017:0122722",,terminal_output +8501,16408817,"TERMINAL",0,0,"302444202050633333333333344944",,terminal_output +8502,16410826,"TERMINAL",0,0,"246662228555555555555666:0166",,terminal_output +8503,16412833,"TERMINAL",0,0,"468884446:0077777777777788388",,terminal_output +8504,16414850,"TERMINAL",0,0,"689:0030406662999999999999404054040",,terminal_output +8505,16416849,"TERMINAL",0,0,"81:00222888441411111111111111111111122722",,terminal_output +8506,16418867,"TERMINAL",0,0,"40244430307:00633333333333344944",,terminal_output +8507,16420872,"TERMINAL",0,0,"246662228555555555555661166",,terminal_output +8508,16422910,"TERMINAL",0,0,"468884441077777777777788388",,terminal_output +8509,16424893,"TERMINAL",0,0,"681040506662999999999999505055050",,terminal_output +8510,16426896,"TERMINAL",0,0,"810222888451512121212121212121212122722",,terminal_output +8511,16428913,"TERMINAL",0,0,"502444404010633333333333344944",,terminal_output +8512,16430918,"TERMINAL",0,0,"246662228555555555555662166",,terminal_output +8513,16432927,"TERMINAL",0,0,"468884442077777777777788388",,terminal_output +8514,16434937,"TERMINAL",0,0,"6820507:0066629999999999991:004:0056:006:00",,terminal_output +8515,16436945,"TERMINAL",0,0,"82022288846:019:013131313131313131313122722",,terminal_output +8516,16438954,"TERMINAL",0,0,"5:002444505020633333333333344944",,terminal_output +8517,16440974,"TERMINAL",0,0,"246662228555555555555663166",,terminal_output +8518,16442976,"TERMINAL",0,0,"468884443077777777777788388",,terminal_output +8519,16444987,"TERMINAL",0,0,"68301:00106662999999999999101051010",,terminal_output +8520,16446994,"TERMINAL",0,0,"830222888411114141414141414141414122722",,terminal_output +8521,16448999,"TERMINAL",0,0,"1024441:003:0030633333333333344944",,terminal_output +8522,16451017,"TERMINAL",0,0,"246662228555555555555664166",,terminal_output +8523,16453020,"TERMINAL",0,0,"468884444077777777777788388",,terminal_output +8524,16455030,"TERMINAL",0,0,"684010206662999999999999202052020",,terminal_output +8525,16457037,"TERMINAL",0,0,"840222888421215151515151515151515122722",,terminal_output +8526,16459050,"TERMINAL",0,0,"202444101040633333333333344944",,terminal_output +8527,16461105,"TERMINAL",0,0,"246662228555555555555665166",,terminal_output +8528,16463152,"TERMINAL",0,0,"468884445077777777777788388",,terminal_output +8529,16465099,"TERMINAL",0,0,"685020306662999999999999303053030",,terminal_output +8530,16467088,"TERMINAL",0,0,"850222888431311:012:013:017:0120:0120:0120:0120:0120:018:0122722",,terminal_output +8531,16469101,"TERMINAL",0,0,"302444202050633333333333344944",,terminal_output +8532,16471109,"TERMINAL",0,0,"246662228555555555555667:0166",,terminal_output +8533,16473117,"TERMINAL",0,0,"468884447:0077777777777788388",,terminal_output +8534,16475131,"TERMINAL",0,0,"687:00:0030406662999999999999404054040",,terminal_output +8535,16477182,"TERMINAL",0,0,"82:00222888441411111111111111111111122722",,terminal_output +8536,16479145,"TERMINAL",0,0,"40244430308:00633333333333344944",,terminal_output +8537,16481162,"TERMINAL",0,0,"246662228555555555555661166",,terminal_output +8538,16483225,"TERMINAL",0,0,"468884441077777777777788388",,terminal_output +8539,16485175,"TERMINAL",0,0,"681040506662999999999999505055050",,terminal_output +8540,16487220,"TERMINAL",0,0,"810222888451512121212121212121212122722",,terminal_output +8541,16489193,"TERMINAL",0,0,"502444404010633333333333344944",,terminal_output +8542,16491206,"TERMINAL",0,0,"246662228555555555555662166",,terminal_output +8543,16493263,"TERMINAL",0,0,"468884442077777777777788388",,terminal_output +8544,16495282,"TERMINAL",0,0,"6820508:0066629999999999992:005:0057:007:00",,terminal_output +8545,16497236,"TERMINAL",0,0,"82022288847:014:00:013131313131313131313122722",,terminal_output +8546,16499242,"TERMINAL",0,0,"6:002444505020633333333333344944",,terminal_output +8547,16501248,"TERMINAL",0,0,"246662228555555555555663166",,terminal_output +8548,16503292,"TERMINAL",0,0,"468884443077777777777788388",,terminal_output +8549,16505313,"TERMINAL",0,0,"68302:00106662999999999999101051010",,terminal_output +8550,16507281,"TERMINAL",0,0,"830222888411114141414141414141414122722",,terminal_output +8551,16509289,"TERMINAL",0,0,"1024442:004:0030633333333333344944",,terminal_output +8552,16511305,"TERMINAL",0,0,"246662228555555555555664166",,terminal_output +8553,16513330,"TERMINAL",0,0,"468884444077777777777788388",,terminal_output +8554,16515315,"TERMINAL",0,0,"684010206662999999999999202052020",,terminal_output +8555,16517325,"TERMINAL",0,0,"840222888421215151515151515151515122722",,terminal_output +8556,16519337,"TERMINAL",0,0,"202444101040633333333333344944",,terminal_output +8557,16521420,"TERMINAL",0,0,"246662228555555555555665166",,terminal_output +8558,16523363,"TERMINAL",0,0,"468884445077777777777788388",,terminal_output +8559,16525384,"TERMINAL",0,0,"685020306662999999999999303053030",,terminal_output +8560,16527465,"TERMINAL",0,0,"850222888431312:013:014:018:011:011:011:011:011:019:0122722",,terminal_output +8561,16529383,"TERMINAL",0,0,"302444202050633333333333344944",,terminal_output +8562,16531405,"TERMINAL",0,0,"246662228555555555555668:0166",,terminal_output +8563,16533402,"TERMINAL",0,0,"468884448:0077777777777788388",,terminal_output +8564,16535450,"TERMINAL",0,0,"681:0030406662999999999999404054040",,terminal_output +8565,16537423,"TERMINAL",0,0,"83:00222888441411111111111111111111122722",,terminal_output +8566,16539431,"TERMINAL",0,0,"40244430309:00633333333333344944",,terminal_output +8567,16541440,"TERMINAL",0,0,"246662228555555555555661166",,terminal_output +8568,16543544,"TERMINAL",0,0,"468884441077777777777788388",,terminal_output +8569,16545455,"TERMINAL",0,0,"681040506662999999999999505055050",,terminal_output +8570,16547532,"TERMINAL",0,0,"810222888451512121212121212121212122722",,terminal_output +8571,16549550,"TERMINAL",0,0,"502444404010633333333333344944",,terminal_output +8572,16551526,"TERMINAL",0,0,"246662228555555555555662166",,terminal_output +8573,16553574,"TERMINAL",0,0,"468884442077777777777788388",,terminal_output +8574,16555519,"TERMINAL",0,0,"6820509:0066629999999999993:006:0058:008:00",,terminal_output +8575,16557567,"TERMINAL",0,0,"82022288848:011:013131313131313131313122722",,terminal_output +8576,16559523,"TERMINAL",0,0,"7:002444505020633333333333344944",,terminal_output +8577,16561546,"TERMINAL",0,0,"246662228555555555555663166",,terminal_output +8578,16563547,"TERMINAL",0,0,"468884443077777777777788388",,terminal_output +8579,16565558,"TERMINAL",0,0,"68303:00106662999999999999101051010",,terminal_output +8580,16567599,"TERMINAL",0,0,"830222888411114141414141414141414122722",,terminal_output +8581,16569571,"TERMINAL",0,0,"1024443:005:0030633333333333344944",,terminal_output +8582,16571584,"TERMINAL",0,0,"246662228555555555555664166",,terminal_output +8583,16573594,"TERMINAL",0,0,"579995554188888888888899499",,terminal_output +8584,16575599,"TERMINAL",0,0,"794111217773202050505050505050505050212162121",,terminal_output +8585,16577612,"TERMINAL",0,0,"941333999522222222222233833",,terminal_output +8586,16579621,"TERMINAL",0,0,"2135551111417444444444444555055",,terminal_output +8587,16581634,"TERMINAL",0,0,"35777333966666666666677277",,terminal_output +8588,16583644,"TERMINAL",0,0,"579995555188888888888899499",,terminal_output +8589,16585651,"TERMINAL",0,0,"79512131777330303:004:005:009:002:002:002:002:002:005:00:00313163131",,terminal_output +8590,16587660,"TERMINAL",0,0,"951333999522222222222233833",,terminal_output +8591,16589669,"TERMINAL",0,0,"3135552121517444444444444559:0055",,terminal_output +8592,16591679,"TERMINAL",0,0,"35777333966666666666677277",,terminal_output +8593,16593686,"TERMINAL",0,0,"579995559:0188888888888899499",,terminal_output +8594,16595698,"TERMINAL",0,0,"792:0131417773404010101010101010101010414164141",,terminal_output +8595,16597705,"TERMINAL",0,0,"94:01333999522222222222233833",,terminal_output +8596,16599715,"TERMINAL",0,0,"413555313150:017444444444444551055",,terminal_output +8597,16601722,"TERMINAL",0,0,"35777333966666666666677277",,terminal_output +8598,16603732,"TERMINAL",0,0,"579995551188888888888899499",,terminal_output +8599,16605739,"TERMINAL",0,0,"791141517773505020202020202020202020515165151",,terminal_output +8600,16607748,"TERMINAL",0,0,"911333999522222222222233833",,terminal_output +8601,16609771,"TERMINAL",0,0,"5135554141117444444444444552055",,terminal_output +8602,16611764,"TERMINAL",0,0,"35777333966666666666677277",,terminal_output +8603,16613776,"TERMINAL",0,0,"579995552188888888888899499",,terminal_output +8604,16615785,"TERMINAL",0,0,"79215110:0177739:002:00303030303030303030304:017:0169:019:01",,terminal_output +8605,16617794,"TERMINAL",0,0,"921333999522222222222233833",,terminal_output +8606,16619805,"TERMINAL",0,0,"8:0135555151217444444444444553055",,terminal_output +8607,16621812,"TERMINAL",0,0,"35777333966666666666677277",,terminal_output +8608,16623829,"TERMINAL",0,0,"579995553188888888888899499",,terminal_output +8609,16625832,"TERMINAL",0,0,"79314:01117773101040404040404040404040111161111",,terminal_output +8610,16627915,"TERMINAL",0,0,"931333999522222222222233833",,terminal_output +8611,16629848,"TERMINAL",0,0,"1135554:016:01317444444444444554055",,terminal_output +8612,16631851,"TERMINAL",0,0,"35777333966666666666677277",,terminal_output +8613,16633870,"TERMINAL",0,0,"579995554188888888888899499",,terminal_output +8614,16635875,"TERMINAL",0,0,"794111217773202050505050505050505050212162121",,terminal_output +8615,16637880,"TERMINAL",0,0,"941333999522222222222233833",,terminal_output +8616,16639999,"TERMINAL",0,0,"2135551111417444444444444555055",,terminal_output +8617,16641899,"TERMINAL",0,0,"35777333966666666666677277",,terminal_output +8618,16643908,"TERMINAL",0,0,"579995555188888888888899499",,terminal_output +8619,16645916,"TERMINAL",0,0,"79512131777330304:005:006:0020:003:003:003:003:003:001:00313163131",,terminal_output +8620,16647988,"TERMINAL",0,0,"951333999522222222222233833",,terminal_output +8621,16649932,"TERMINAL",0,0,"31355521215174444444444445510:0055",,terminal_output +8622,16651942,"TERMINAL",0,0,"35777333966666666666677277",,terminal_output +8623,16653965,"TERMINAL",0,0,"5799955540:0188888888888899499",,terminal_output +8624,16655955,"TERMINAL",0,0,"793:0131417773404010101010101010101010414164141",,terminal_output +8625,16657967,"TERMINAL",0,0,"95:01333999522222222222233833",,terminal_output +8626,16659974,"TERMINAL",0,0,"41355531311:017444444444444551055",,terminal_output +8627,16661983,"TERMINAL",0,0,"35777333966666666666677277",,terminal_output +8628,16664059,"TERMINAL",0,0,"579995551188888888888899499",,terminal_output +8629,16666003,"TERMINAL",0,0,"791141517773505020202020202020202020515165151",,terminal_output +8630,16668008,"TERMINAL",0,0,"911333999522222222222233833",,terminal_output +8631,16670026,"TERMINAL",0,0,"5135554141117444444444444552055",,terminal_output +8632,16672051,"TERMINAL",0,0,"35777333966666666666677277",,terminal_output +8633,16674038,"TERMINAL",0,0,"579995552188888888888899499",,terminal_output +8634,16676046,"TERMINAL",0,0,"7921511:0177734:00:003:00303030303030303030305:018:0161-00:00:01 1-00:00:00 hai0061-00:00:01 1-00:00:00 hai001",,terminal_output +8635,16678051,"TERMINAL",0,0,"9213339995222222222222338033",,terminal_output +8636,16680064,"TERMINAL",0,0,"9:01355551512174444444444445530055",,terminal_output +8637,16682072,"TERMINAL",0,0,"357773339666666666666772077",,terminal_output +8638,16684092,"TERMINAL",0,0,"5799930008 alfred.ngu standard 1 16 CG 2025-10-03T22:58:50 2025-10-03T22:58:56 1-00:00:07 1-00:00:00 hai00630009 alfred.ngu standard 1 16 CG 2025-10-03T22:58:54 2025-10-03T22:58:56 1-00:00:08 1-00:00:00 hai00155531888888888888994",,terminal_output +8639,16686112,"TERMINAL",0,0,"79315:0111777310104040404040404040404011116",,terminal_output +8640,16688126,"TERMINAL",0,0,"9313339995222222222222338",,terminal_output +8641,16690111,"TERMINAL",0,0,"\r11298:55:301:48:3142649:45:19Priority)11326:57:056:56Resources)35555:017:013174444444444445540",,terminal_output +8642,16692117,"TERMINAL",0,0,"357773339666666666666772",,terminal_output +8643,16694122,"TERMINAL",0,0,"5799955541888888888888994",,terminal_output +8644,16696224,"TERMINAL",0,0,"79411121777320205050505050505050505021216",,terminal_output +8645,16698143,"TERMINAL",0,0,"9413339995222222222222338",,terminal_output +8646,16700153,"TERMINAL",0,0,"21355511114174444444444445550",,terminal_output +8647,16702263,"TERMINAL",0,0,"357773339666666666666772",,terminal_output +8648,16704169,"TERMINAL",0,0,"5799955551888888888888994",,terminal_output +8649,16706182,"TERMINAL",0,0,"79512131777330305:006:007:001:004:004:004:004:004:002:0031316",,terminal_output +8650,16708197,"TERMINAL",0,0,"9513339995222222222222338",,terminal_output +8651,16710214,"TERMINAL",0,0,"3135552121517444444444444551:00",,terminal_output +8652,16712206,"TERMINAL",0,0,"357773339666666666666772",,terminal_output +8653,16714215,"TERMINAL",0,0,"579995551:01888888888888994",,terminal_output +8654,16716232,"TERMINAL",0,0,"794:013141777340401010101010101010101041416",,terminal_output +8655,16718306,"TERMINAL",0,0,"96:013339995222222222222338",,terminal_output +8656,16720243,"TERMINAL",0,0,"41355531312:0174444444444445510",,terminal_output +8657,16722319,"TERMINAL",0,0,"357773339666666666666772",,terminal_output +8658,16724260,"TERMINAL",0,0,"5799955511888888888888994",,terminal_output +8659,16726271,"TERMINAL",0,0,"79114151777350502020202020202020202051516",,terminal_output +8660,16728305,"TERMINAL",0,0,"9113339995222222222222338",,terminal_output +8661,16730312,"TERMINAL",0,0,"51355541411174444444444445520",,terminal_output +8662,16732298,"TERMINAL",0,0,"357773339666666666666772",,terminal_output +8663,16734306,"TERMINAL",0,0,"5799955521888888888888994",,terminal_output +8664,16736327,"TERMINAL",0,0,"7921512:0177731:004:00303030303030303030306:019:016",,terminal_output +8665,16738322,"TERMINAL",0,0,"9213339995222222222222338",,terminal_output +8666,16740342,"TERMINAL",0,0,"3:00:01355551512174444444444445530",,terminal_output +8667,16742353,"TERMINAL",0,0,"357773339666666666666772",,terminal_output +8668,16744364,"TERMINAL",0,0,"5799955531888888888888994",,terminal_output +8669,16746392,"TERMINAL",0,0,"79316:0111777310104040404040404040404011116",,terminal_output +8670,16748383,"TERMINAL",0,0,"9313339995222222222222338",,terminal_output +8671,16750396,"TERMINAL",0,0,"1135556:018:013174444444444445540",,terminal_output +8672,16752398,"TERMINAL",0,0,"357773339666666666666772",,terminal_output +8673,16754409,"TERMINAL",0,0,"5799955541888888888888994",,terminal_output +8674,16756415,"TERMINAL",0,0,"79411121777320205050505050505050505021216",,terminal_output +8675,16758427,"TERMINAL",0,0,"9413339995222222222222338",,terminal_output +8676,16760520,"TERMINAL",0,0,"21355511114174444444444445550",,terminal_output +8677,16762470,"TERMINAL",0,0,"357773339666666666666772",,terminal_output +8678,16764490,"TERMINAL",0,0,"5799955551888888888888994",,terminal_output +8679,16766465,"TERMINAL",0,0,"79512131777330306:007:008:002:005:005:005:005:005:003:0031316",,terminal_output +8680,16768470,"TERMINAL",0,0,"9513339995222222222222338",,terminal_output +8681,16770492,"TERMINAL",0,0,"3135552121517444444444444552:00",,terminal_output +8682,16772497,"TERMINAL",0,0,"357773339666666666666772",,terminal_output +8683,16774502,"TERMINAL",0,0,"579995552:01888888888888994",,terminal_output +8684,16776520,"TERMINAL",0,0,"795:013141777340401010101010101010101041416",,terminal_output +8685,16778519,"TERMINAL",0,0,"97:013339995222222222222338",,terminal_output +8686,16780539,"TERMINAL",0,0,"41355531313:0174444444444445510",,terminal_output +8687,16782542,"TERMINAL",0,0,"357773339666666666666772",,terminal_output +8688,16784559,"TERMINAL",0,0,"5799955511888888888888994",,terminal_output +8689,16786580,"TERMINAL",0,0,"79114151777350502020202020202020202051516",,terminal_output +8690,16788571,"TERMINAL",0,0,"9113339995222222222222338",,terminal_output +8691,16790582,"TERMINAL",0,0,"51355541411174444444444445520",,terminal_output +8692,16792585,"TERMINAL",0,0,"3688844420777777777777883",,terminal_output +8693,16794604,"TERMINAL",0,0,"6820503:0066629999999999997:0010:005",,terminal_output +8694,16796603,"TERMINAL",0,0,"82022288842:015:0131313131313131313131227",,terminal_output +8695,16798617,"TERMINAL",0,0,"1:0024445050206333333333333449",,terminal_output +8696,16800669,"TERMINAL",0,0,"2466622285555555555556631",,terminal_output +8697,16802654,"TERMINAL",0,0,"4688844430777777777777883",,terminal_output +8698,16804637,"TERMINAL",0,0,"68307:0010666299999999999910105",,terminal_output +8699,16806652,"TERMINAL",0,0,"8302228884111141414141414141414141227",,terminal_output +8700,16808658,"TERMINAL",0,0,"1024447:009:00306333333333333449",,terminal_output +8701,16810682,"TERMINAL",0,0,"2466622285555555555556641",,terminal_output +8702,16812674,"TERMINAL",0,0,"4688844440777777777777883",,terminal_output +8703,16814686,"TERMINAL",0,0,"68401020666299999999999920205",,terminal_output +8704,16816695,"TERMINAL",0,0,"8402228884212151515151515151515151227",,terminal_output +8705,16818706,"TERMINAL",0,0,"2024441010406333333333333449",,terminal_output +8706,16820712,"TERMINAL",0,0,"2466622285555555555556651",,terminal_output +8707,16822723,"TERMINAL",0,0,"4688844450777777777777883",,terminal_output +8708,16824734,"TERMINAL",0,0,"68502030666299999999999930305",,terminal_output +8709,16826741,"TERMINAL",0,0,"850222888431317:018:019:013:016:016:016:016:016:014:01227",,terminal_output +8710,16828752,"TERMINAL",0,0,"3024442020506333333333333449",,terminal_output +8711,16830760,"TERMINAL",0,0,"246662228555555555555663:01",,terminal_output +8712,16832766,"TERMINAL",0,0,"468884443:00777777777777883",,terminal_output +8713,16834778,"TERMINAL",0,0,"686:003040666299999999999940405",,terminal_output +8714,16836788,"TERMINAL",0,0,"88:002228884414111111111111111111111227",,terminal_output +8715,16838799,"TERMINAL",0,0,"40244430304:006333333333333449",,terminal_output +8716,16840807,"TERMINAL",0,0,"2466622285555555555556611",,terminal_output +8717,16842816,"TERMINAL",0,0,"4688844410777777777777883",,terminal_output +8718,16844835,"TERMINAL",0,0,"68104050666299999999999950505",,terminal_output +8719,16846833,"TERMINAL",0,0,"8102228884515121212121212121212121227",,terminal_output +8720,16848844,"TERMINAL",0,0,"5024444040106333333333333449",,terminal_output +8721,16850852,"TERMINAL",0,0,"2466622285555555555556621",,terminal_output +8722,16852894,"TERMINAL",0,0,"4688844420777777777777883",,terminal_output +8723,16854872,"TERMINAL",0,0,"6820504:0066629999999999998:001:005",,terminal_output +8724,16856882,"TERMINAL",0,0,"82022288843:016:0131313131313131313131227",,terminal_output +8725,16858893,"TERMINAL",0,0,"2:0024445050206333333333333449",,terminal_output +8726,16860898,"TERMINAL",0,0,"2466622285555555555556631",,terminal_output +8727,16862909,"TERMINAL",0,0,"4688844430777777777777883",,terminal_output +8728,16864919,"TERMINAL",0,0,"68308:0010666299999999999910105",,terminal_output +8729,16866924,"TERMINAL",0,0,"8302228884111141414141414141414141227",,terminal_output +8730,16868934,"TERMINAL",0,0,"1024448:0030:00306333333333333449",,terminal_output +8731,16870943,"TERMINAL",0,0,"2466622285555555555556641",,terminal_output +8732,16872954,"TERMINAL",0,0,"4688844440777777777777883",,terminal_output +8733,16874961,"TERMINAL",0,0,"68401020666299999999999920205",,terminal_output +8734,16876970,"TERMINAL",0,0,"8402228884212151515151515151515151227",,terminal_output +8735,16878979,"TERMINAL",0,0,"2024441010406333333333333449",,terminal_output +8736,16880991,"TERMINAL",0,0,"2466622285555555555556651",,terminal_output +8737,16883096,"TERMINAL",0,0,"4688844450777777777777883",,terminal_output +8738,16885042,"TERMINAL",0,0,"68502030666299999999999930305",,terminal_output +8739,16887015,"TERMINAL",0,0,"850222888431318:019:0120:014:017:017:017:017:017:015:01227",,terminal_output +8740,16889037,"TERMINAL",0,0,"3024442020506333333333333449",,terminal_output +8741,16891041,"TERMINAL",0,0,"246662228555555555555664:01",,terminal_output +8742,16893132,"TERMINAL",0,0,"468884444:00777777777777883",,terminal_output +8743,16895077,"TERMINAL",0,0,"687:003040666299999999999940405",,terminal_output +8744,16897066,"TERMINAL",0,0,"89:002228884414111111111111111111111227",,terminal_output +8745,16899082,"TERMINAL",0,0,"40244430305:006333333333333449",,terminal_output +8746,16901099,"TERMINAL",0,0,"2466622285555555555556611",,terminal_output +8747,16903169,"TERMINAL",0,0,"4688844410777777777777883",,terminal_output +8748,16905098,"TERMINAL",0,0,"68104050666299999999999950505",,terminal_output +8749,16907109,"TERMINAL",0,0,"8102228884515121212121212121212121227",,terminal_output +8750,16909115,"TERMINAL",0,0,"5024444040106333333333333449",,terminal_output +8751,16911133,"TERMINAL",0,0,"2466622285555555555556621",,terminal_output +8752,16913137,"TERMINAL",0,0,"4688844420777777777777883",,terminal_output +8753,16915146,"TERMINAL",0,0,"6820505:0066629999999999999:002:005",,terminal_output +8754,16917185,"TERMINAL",0,0,"82022288844:017:0131313131313131313131227",,terminal_output +8755,16919164,"TERMINAL",0,0,"3:0024445050206333333333333449",,terminal_output +8756,16921193,"TERMINAL",0,0,"2466622285555555555556631",,terminal_output +8757,16923234,"TERMINAL",0,0,"4688844430777777777777883",,terminal_output +8758,16925190,"TERMINAL",0,0,"68309:0010666299999999999910105",,terminal_output +8759,16927231,"TERMINAL",0,0,"8302228884111141414141414141414141227",,terminal_output +8760,16929209,"TERMINAL",0,0,"1024449:001:00306333333333333449",,terminal_output +8761,16931222,"TERMINAL",0,0,"2466622285555555555556641",,terminal_output +8762,16933274,"TERMINAL",0,0,"4688844440777777777777883",,terminal_output +8763,16935235,"TERMINAL",0,0,"68401020666299999999999920205",,terminal_output +8764,16937266,"TERMINAL",0,0,"8402228884212151515151515151515151227",,terminal_output +8765,16939251,"TERMINAL",0,0,"2024441010406333333333333449",,terminal_output +8766,16941342,"TERMINAL",0,0,"2466622285555555555556651",,terminal_output +8767,16943309,"TERMINAL",0,0,"4688844450777777777777883",,terminal_output +8768,16945326,"TERMINAL",0,0,"68502030666299999999999930305",,terminal_output +8769,16947301,"TERMINAL",0,0,"850222888431319:0120:011:015:018:018:018:018:018:016:01227",,terminal_output +8770,16949302,"TERMINAL",0,0,"3024442020506333333333333449",,terminal_output +8771,16951312,"TERMINAL",0,0,"246662228555555555555665:01",,terminal_output +8772,16953340,"TERMINAL",0,0,"468884445:00777777777777883",,terminal_output +8773,16955366,"TERMINAL",0,0,"688:003040666299999999999940405",,terminal_output +8774,16957340,"TERMINAL",0,0,"850:002228884414111111111111111111111227",,terminal_output +8775,16959349,"TERMINAL",0,0,"40244430306:006333333333333449",,terminal_output +8776,16961364,"TERMINAL",0,0,"2466622285555555555556611",,terminal_output +8777,16963379,"TERMINAL",0,0,"4688844410777777777777883",,terminal_output +8778,16965378,"TERMINAL",0,0,"68104050666299999999999950505",,terminal_output +8779,16967395,"TERMINAL",0,0,"8102228884515121212121212121212121227",,terminal_output +8780,16969397,"TERMINAL",0,0,"5024444040106333333333333449",,terminal_output +8781,16971468,"TERMINAL",0,0,"2466622285555555555556621",,terminal_output +8782,16973517,"TERMINAL",0,0,"4688844420777777777777883",,terminal_output +8783,16975460,"TERMINAL",0,0,"6820506:00666299999999999910:003:005",,terminal_output +8784,16977510,"TERMINAL",0,0,"82022288845:018:0131313131313131313131227",,terminal_output +8785,16979443,"TERMINAL",0,0,"4:0024445050206333333333333449",,terminal_output +8786,16981457,"TERMINAL",0,0,"2466622285555555555556631",,terminal_output +8787,16983549,"TERMINAL",0,0,"4688844430777777777777883",,terminal_output +8788,16985474,"TERMINAL",0,0,"683010:0010666299999999999910105",,terminal_output +8789,16987548,"TERMINAL",0,0,"8302228884111141414141414141414141227",,terminal_output +8790,16989495,"TERMINAL",0,0,"10244420:002:00306333333333333449",,terminal_output +8791,16991512,"TERMINAL",0,0,"2466622285555555555556641",,terminal_output +8792,16993591,"TERMINAL",0,0,"4688844440777777777777883",,terminal_output +8793,16995515,"TERMINAL",0,0,"68401020666299999999999920205",,terminal_output +8794,16997578,"TERMINAL",0,0,"8402228884212151515151515151515151227",,terminal_output +8795,16999535,"TERMINAL",0,0,"2024441010406333333333333449",,terminal_output +8796,17001544,"TERMINAL",0,0,"2466622285555555555556651",,terminal_output +8797,17003622,"TERMINAL",0,0,"4688844450777777777777883",,terminal_output +8798,17005560,"TERMINAL",0,0,"68502030666299999999999930305",,terminal_output +8799,17007615,"TERMINAL",0,0,"8502228884313120:011:012:016:019:019:019:019:019:017:01227",,terminal_output +8800,17009586,"TERMINAL",0,0,"3024442020506333333333333449",,terminal_output +8801,17011590,"TERMINAL",0,0,"257773339666666666666776:02",,terminal_output +8802,17013643,"TERMINAL",0,0,"579995556:01888888888888994",,terminal_output +8803,17015661,"TERMINAL",0,0,"799:013141777340401010101010101010101041416",,terminal_output +8804,17017649,"TERMINAL",0,0,"91:013339995222222222222338",,terminal_output +8805,17019668,"TERMINAL",0,0,"41355531317:0174444444444445510",,terminal_output +8806,17021635,"TERMINAL",0,0,"357773339666666666666772",,terminal_output +8807,17023644,"TERMINAL",0,0,"5799955511888888888888994",,terminal_output +8808,17025652,"TERMINAL",0,0,"79114151777350502020202020202020202051516",,terminal_output +8809,17027684,"TERMINAL",0,0,"9113339995222222222222338",,terminal_output +8810,17029731,"TERMINAL",0,0,"51355541411174444444444445520",,terminal_output +8811,17031679,"TERMINAL",0,0,"357773339666666666666772",,terminal_output +8812,17033689,"TERMINAL",0,0,"5799955521888888888888994",,terminal_output +8813,17035694,"TERMINAL",0,0,"7921517:0177736:009:00303030303030303030301:014:016",,terminal_output +8814,17037722,"TERMINAL",0,0,"9213339995222222222222338",,terminal_output +8815,17039740,"TERMINAL",0,0,"5:01355551512174444444444445530",,terminal_output +8816,17041724,"TERMINAL",0,0,"357773339666666666666772",,terminal_output +8817,17043735,"TERMINAL",0,0,"5799955531888888888888994",,terminal_output +8818,17045740,"TERMINAL",0,0,"79311:0111777310104040404040404040404011116",,terminal_output +8819,17047861,"TERMINAL",0,0,"9313339995222222222222338",,terminal_output +8820,17049762,"TERMINAL",0,0,"1135551:013:013174444444444445540",,terminal_output +8821,17051775,"TERMINAL",0,0,"357773339666666666666772",,terminal_output +8822,17053795,"TERMINAL",0,0,"5799955541888888888888994",,terminal_output +8823,17055819,"TERMINAL",0,0,"79411121777320205050505050505050505021216",,terminal_output +8824,17057821,"TERMINAL",0,0,"9413339995222222222222338",,terminal_output +8825,17059808,"TERMINAL",0,0,"21355511114174444444444445550",,terminal_output +8826,17061815,"TERMINAL",0,0,"357773339666666666666772",,terminal_output +8827,17063834,"TERMINAL",0,0,"5799955551888888888888994",,terminal_output +8828,17065849,"TERMINAL",0,0,"79512131777330301:002:003:007:0030:0030:0030:0030:0030:008:0031316",,terminal_output +8829,17067846,"TERMINAL",0,0,"9513339995222222222222338",,terminal_output +8830,17069851,"TERMINAL",0,0,"3135552121517444444444444557:00",,terminal_output +8831,17071860,"TERMINAL",0,0,"357773339666666666666772",,terminal_output +8832,17073871,"TERMINAL",0,0,"579995557:01888888888888994",,terminal_output +8833,17075881,"TERMINAL",0,0,"7910:013141777340401010101010101010101041416",,terminal_output +8834,17077888,"TERMINAL",0,0,"92:013339995222222222222338",,terminal_output +8835,17079894,"TERMINAL",0,0,"41355531318:0174444444444445510",,terminal_output +8836,17081909,"TERMINAL",0,0,"357773339666666666666772",,terminal_output +8837,17083915,"TERMINAL",0,0,"5799955511888888888888994",,terminal_output +8838,17085923,"TERMINAL",0,0,"79114151777350502020202020202020202051516",,terminal_output +8839,17087933,"TERMINAL",0,0,"9113339995222222222222338",,terminal_output +8840,17089943,"TERMINAL",0,0,"51355541411174444444444445520",,terminal_output +8841,17091951,"TERMINAL",0,0,"357773339666666666666772",,terminal_output +8842,17093961,"TERMINAL",0,0,"5799955521888888888888994",,terminal_output +8843,17095972,"TERMINAL",0,0,"7921518:0177737:0010:00303030303030303030302:015:016",,terminal_output +8844,17097981,"TERMINAL",0,0,"9213339995222222222222338",,terminal_output +8845,17099990,"TERMINAL",0,0,"6:01355551512174444444444445530",,terminal_output +8846,17101997,"TERMINAL",0,0,"357773339666666666666772",,terminal_output +8847,17104008,"TERMINAL",0,0,"5799955531888888888888994",,terminal_output +8848,17106017,"TERMINAL",0,0,"79312:0111777310104040404040404040404011116",,terminal_output +8849,17108025,"TERMINAL",0,0,"9313339995222222222222338",,terminal_output +8850,17110034,"TERMINAL",0,0,"1135552:014:013174444444444445540",,terminal_output +8851,17112065,"TERMINAL",0,0,"357773339666666666666772",,terminal_output +8852,17114053,"TERMINAL",0,0,"5799955541888888888888994",,terminal_output +8853,17116060,"TERMINAL",0,0,"79411121777320205050505050505050505021216",,terminal_output +8854,17118070,"TERMINAL",0,0,"9413339995222222222222338",,terminal_output +8855,17120078,"TERMINAL",0,0,"21355511114174444444444445550",,terminal_output +8856,17122101,"TERMINAL",0,0,"357773339666666666666772",,terminal_output +8857,17124097,"TERMINAL",0,0,"5799955551888888888888994",,terminal_output +8858,17126108,"TERMINAL",0,0,"79512131777330302:003:004:008:001:001:001:001:001:009:0031316",,terminal_output +8859,17128120,"TERMINAL",0,0,"9513339995222222222222338",,terminal_output +8860,17130139,"TERMINAL",0,0,"3135552121517444444444444558:00",,terminal_output +8861,17132206,"TERMINAL",0,0,"357773339666666666666772",,terminal_output +8862,17134142,"TERMINAL",0,0,"579995558:01888888888888994",,terminal_output +8863,17136153,"TERMINAL",0,0,"791:013141777340401010101010101010101041416",,terminal_output +8864,17138176,"TERMINAL",0,0,"93:013339995222222222222338",,terminal_output +8865,17140197,"TERMINAL",0,0,"41355531319:0174444444444445510",,terminal_output +8866,17142272,"TERMINAL",0,0,"357773339666666666666772",,terminal_output +8867,17144189,"TERMINAL",0,0,"5799955511888888888888994",,terminal_output +8868,17146262,"TERMINAL",0,0,"79114151777350502020202020202020202051516",,terminal_output +8869,17148304,"TERMINAL",0,0,"9113339995222222222222338",,terminal_output +8870,17150217,"TERMINAL",0,0,"51355541411174444444444445520",,terminal_output +8871,17152307,"TERMINAL",0,0,"357773339666666666666772",,terminal_output +8872,17154233,"TERMINAL",0,0,"5799955521888888888888994",,terminal_output +8873,17156246,"TERMINAL",0,0,"7921519:0177738:001:00303030303030303030303:016:016",,terminal_output +8874,17158252,"TERMINAL",0,0,"9213339995222222222222338",,terminal_output +8875,17160270,"TERMINAL",0,0,"7:01355551512174444444444445530",,terminal_output +8876,17162269,"TERMINAL",0,0,"357773339666666666666772",,terminal_output +8877,17164278,"TERMINAL",0,0,"5799955531888888888888994",,terminal_output +8878,17166286,"TERMINAL",0,0,"79313:0111777310104040404040404040404011116",,terminal_output +8879,17168292,"TERMINAL",0,0,"9313339995222222222222338",,terminal_output +8880,17170331,"TERMINAL",0,0,"1135553:015:013174444444444445540",,terminal_output +8881,17172320,"TERMINAL",0,0,"357773339666666666666772",,terminal_output +8882,17174321,"TERMINAL",0,0,"5799955541888888888888994",,terminal_output +8883,17176330,"TERMINAL",0,0,"79411121777320205050505050505050505021216",,terminal_output +8884,17178341,"TERMINAL",0,0,"9413339995222222222222338",,terminal_output +8885,17180348,"TERMINAL",0,0,"21355511114174444444444445550",,terminal_output +8886,17182359,"TERMINAL",0,0,"357773339666666666666772",,terminal_output +8887,17184370,"TERMINAL",0,0,"5799955551888888888888994",,terminal_output +8888,17186393,"TERMINAL",0,0,"79512131777330303:004:005:009:002:002:002:002:002:0010:0031316",,terminal_output +8889,17188384,"TERMINAL",0,0,"9513339995222222222222338",,terminal_output +8890,17190391,"TERMINAL",0,0,"3135552121517444444444444559:00",,terminal_output +8891,17192407,"TERMINAL",0,0,"357773339666666666666772",,terminal_output +8892,17194409,"TERMINAL",0,0,"579995559:01888888888888994",,terminal_output +8893,17196442,"TERMINAL",0,0,"792:013141777340401010101010101010101041416",,terminal_output +8894,17198429,"TERMINAL",0,0,"94:013339995222222222222338",,terminal_output +8895,17200446,"TERMINAL",0,0,"41355531312:00:0174444444444445510",,terminal_output +8896,17202445,"TERMINAL",0,0,"357773339666666666666772",,terminal_output +8897,17204454,"TERMINAL",0,0,"5799955511888888888888994",,terminal_output +8898,17206471,"TERMINAL",0,0,"79114151777350502020202020202020202051516",,terminal_output +8899,17208470,"TERMINAL",0,0,"9113339995222222222222338",,terminal_output +8900,17210561,"TERMINAL",0,0,"51355541411174444444444445520",,terminal_output +8901,17212490,"TERMINAL",0,0,"357773339666666666666772",,terminal_output +8902,17214565,"TERMINAL",0,0,"5799955521888888888888994",,terminal_output +8903,17216586,"TERMINAL",0,0,"79215120:0177739:002:00303030303030303030304:017:016",,terminal_output +8904,17218516,"TERMINAL",0,0,"9213339995222222222222338",,terminal_output +8905,17220533,"TERMINAL",0,0,"8:01355551512174444444444445530",,terminal_output +8906,17222531,"TERMINAL",0,0,"357773339666666666666772",,terminal_output +8907,17224582,"TERMINAL",0,0,"5799955531888888888888994",,terminal_output +8908,17226587,"TERMINAL",0,0,"79314:0111777310104040404040404040404011116",,terminal_output +8909,17228556,"TERMINAL",0,0,"9313339995222222222222338",,terminal_output +8910,17230579,"TERMINAL",0,0,"1135554:016:013174444444444445540",,terminal_output +8911,17232589,"TERMINAL",0,0,"357773339666666666666772",,terminal_output +8912,17234604,"TERMINAL",0,0,"5799955541888888888888994",,terminal_output +8913,17236605,"TERMINAL",0,0,"740421222888421215151515151515151515122227",,terminal_output +8914,17238602,"TERMINAL",0,0,"2024441010406333333333333449",,terminal_output +8915,17240674,"TERMINAL",0,0,"2466622285555555555556651",,terminal_output +8916,17242623,"TERMINAL",0,0,"4688844450777777777777883",,terminal_output +8917,17244678,"TERMINAL",0,0,"68502030666299999999999930305",,terminal_output +8918,17246645,"TERMINAL",0,0,"850222888431314:015:016:0130:013:013:013:013:013:011:01227",,terminal_output +8919,17248651,"TERMINAL",0,0,"3024442020506333333333333449",,terminal_output +8920,17250660,"TERMINAL",0,0,"2466622285555555555556620:01",,terminal_output +8921,17252669,"TERMINAL",0,0,"4688844450:00777777777777883",,terminal_output +8922,17254680,"TERMINAL",0,0,"683:003040666299999999999940405",,terminal_output +8923,17256689,"TERMINAL",0,0,"85:002228884414111111111111111111111227",,terminal_output +8924,17258700,"TERMINAL",0,0,"40244430301:006333333333333449",,terminal_output +8925,17260718,"TERMINAL",0,0,"2466622285555555555556611",,terminal_output +8926,17262717,"TERMINAL",0,0,"4688844410777777777777883",,terminal_output +8927,17264734,"TERMINAL",0,0,"68104050666299999999999950505",,terminal_output +8928,17266735,"TERMINAL",0,0,"8102228884515121212121212121212121227",,terminal_output +8929,17268750,"TERMINAL",0,0,"5024444040106333333333333449",,terminal_output +8930,17270752,"TERMINAL",0,0,"2466622285555555555556621",,terminal_output +8931,17272759,"TERMINAL",0,0,"4688844420777777777777883",,terminal_output +8932,17274780,"TERMINAL",0,0,"6820501:0066629999999999995:008:005",,terminal_output +8933,17276781,"TERMINAL",0,0,"820222888410:013:0131313131313131313131227",,terminal_output +8934,17278832,"TERMINAL",0,0,"9:0024445050206333333333333449",,terminal_output +8935,17280819,"TERMINAL",0,0,"2466622285555555555556631",,terminal_output +8936,17282810,"TERMINAL",0,0,"4688844430777777777777883",,terminal_output +8937,17284819,"TERMINAL",0,0,"68305:0010666299999999999910105",,terminal_output +8938,17286824,"TERMINAL",0,0,"8302228884111141414141414141414141227",,terminal_output +8939,17288839,"TERMINAL",0,0,"1024445:007:00306333333333333449",,terminal_output +8940,17290844,"TERMINAL",0,0,"2466622285555555555556641",,terminal_output +8941,17292904,"TERMINAL",0,0,"4688844440777777777777883",,terminal_output +8942,17294867,"TERMINAL",0,0,"68401020666299999999999920205",,terminal_output +8943,17296870,"TERMINAL",0,0,"8402228884212151515151515151515151227",,terminal_output +8944,17298884,"TERMINAL",0,0,"2024441010406333333333333449",,terminal_output +8945,17301000,"TERMINAL",0,0,"2466622285555555555556651",,terminal_output +8946,17302897,"TERMINAL",0,0,"4688844450777777777777883",,terminal_output +8947,17304912,"TERMINAL",0,0,"68502030666299999999999930305",,terminal_output +8948,17306927,"TERMINAL",0,0,"850222888431315:016:017:011:014:014:014:014:014:012:01227",,terminal_output +8949,17308925,"TERMINAL",0,0,"3024442020506333333333333449",,terminal_output +8950,17310939,"TERMINAL",0,0,"246662228555555555555661:01",,terminal_output +8951,17312946,"TERMINAL",0,0,"468884441:00777777777777883",,terminal_output +8952,17314956,"TERMINAL",0,0,"684:003040666299999999999940405",,terminal_output +8953,17316963,"TERMINAL",0,0,"86:002228884414111111111111111111111227",,terminal_output +8954,17318985,"TERMINAL",0,0,"40244430302:006333333333333449",,terminal_output +8955,17320986,"TERMINAL",0,0,"2466622285555555555556611",,terminal_output +8956,17323010,"TERMINAL",0,0,"4688844410777777777777883",,terminal_output +8957,17324999,"TERMINAL",0,0,"68104050666299999999999950505",,terminal_output +8958,17327012,"TERMINAL",0,0,"8102228884515121212121212121212121227",,terminal_output +8959,17329019,"TERMINAL",0,0,"5024444040106333333333333449",,terminal_output +8960,17331028,"TERMINAL",0,0,"2466622285555555555556621",,terminal_output +8961,17333038,"TERMINAL",0,0,"4688844420777777777777883",,terminal_output +8962,17335048,"TERMINAL",0,0,"6820502:0066629999999999996:009:005",,terminal_output +8963,17337057,"TERMINAL",0,0,"82022288841:014:0131313131313131313131227",,terminal_output +8964,17339064,"TERMINAL",0,0,"10:0024445050206333333333333449",,terminal_output +8965,17341078,"TERMINAL",0,0,"2466622285555555555556631",,terminal_output +8966,17343092,"TERMINAL",0,0,"4688844430777777777777883",,terminal_output +8967,17345095,"TERMINAL",0,0,"68306:0010666299999999999910105",,terminal_output +8968,17347100,"TERMINAL",0,0,"8302228884111141414141414141414141227",,terminal_output +8969,17349113,"TERMINAL",0,0,"1024446:008:00306333333333333449",,terminal_output +8970,17351121,"TERMINAL",0,0,"2466622285555555555556641",,terminal_output +8971,17353134,"TERMINAL",0,0,"4688844440777777777777883",,terminal_output +8972,17355142,"TERMINAL",0,0,"68401020666299999999999920205",,terminal_output +8973,17357153,"TERMINAL",0,0,"8402228884212151515151515151515151227",,terminal_output +8974,17359157,"TERMINAL",0,0,"2024441010406333333333333449",,terminal_output +8975,17361169,"TERMINAL",0,0,"2466622285555555555556651",,terminal_output +8976,17363180,"TERMINAL",0,0,"4688844450777777777777883",,terminal_output +8977,17365187,"TERMINAL",0,0,"68502030666299999999999930305",,terminal_output +8978,17367250,"TERMINAL",0,0,"850222888431316:017:018:012:015:015:015:015:015:013:01227",,terminal_output +8979,17369205,"TERMINAL",0,0,"3024442020506333333333333449",,terminal_output +8980,17371218,"TERMINAL",0,0,"246662228555555555555662:01",,terminal_output +8981,17373288,"TERMINAL",0,0,"468884442:00777777777777883",,terminal_output +8982,17375236,"TERMINAL",0,0,"685:003040666299999999999940405",,terminal_output +8983,17377246,"TERMINAL",0,0,"87:002228884414111111111111111111111227",,terminal_output +8984,17379253,"TERMINAL",0,0,"40244430303:006333333333333449",,terminal_output +8985,17381262,"TERMINAL",0,0,"2466622285555555555556611",,terminal_output +8986,17383271,"TERMINAL",0,0,"4688844410777777777777883",,terminal_output +8987,17385279,"TERMINAL",0,0,"68104050666299999999999950505",,terminal_output +8988,17387288,"TERMINAL",0,0,"8102228884515121212121212121212121227",,terminal_output +8989,17389300,"TERMINAL",0,0,"5024444040106333333333333449",,terminal_output +8990,17391314,"TERMINAL",0,0,"2466622285555555555556621",,terminal_output +8991,17393318,"TERMINAL",0,0,"4688844420777777777777883",,terminal_output +8992,17395332,"TERMINAL",0,0,"6820503:0066629999999999997:0020:005",,terminal_output +8993,17397350,"TERMINAL",0,0,"82022288842:015:0131313131313131313131227",,terminal_output +8994,17399345,"TERMINAL",0,0,"1:0024445050206333333333333449",,terminal_output +8995,17401365,"TERMINAL",0,0,"2466622285555555555556631",,terminal_output +8996,17403393,"TERMINAL",0,0,"4688844430777777777777883",,terminal_output +8997,17405406,"TERMINAL",0,0,"68307:0010666299999999999910105",,terminal_output +8998,17407382,"TERMINAL",0,0,"8302228884111141414141414141414141227",,terminal_output +8999,17409393,"TERMINAL",0,0,"1024447:009:00306333333333333449",,terminal_output +9000,17411401,"TERMINAL",0,0,"2466622285555555555556641",,terminal_output +9001,17413406,"TERMINAL",0,0,"4688844440777777777777883",,terminal_output +9002,17415419,"TERMINAL",0,0,"68401020666299999999999920205",,terminal_output +9003,17417429,"TERMINAL",0,0,"8402228884212151515151515151515151227",,terminal_output +9004,17419445,"TERMINAL",0,0,"2024441010406333333333333449",,terminal_output +9005,17421516,"TERMINAL",0,0,"2466622285555555555556651",,terminal_output +9006,17423565,"TERMINAL",0,0,"4688844450777777777777883",,terminal_output +9007,17425511,"TERMINAL",0,0,"68502030666299999999999930305",,terminal_output +9008,17427558,"TERMINAL",0,0,"850222888431317:018:019:013:016:016:016:016:016:014:01227",,terminal_output +9009,17429505,"TERMINAL",0,0,"3024442020506333333333333449",,terminal_output +9010,17431523,"TERMINAL",0,0,"246662228555555555555663:01",,terminal_output +9011,17433501,"TERMINAL",0,0,"468884443:00777777777777883",,terminal_output +9012,17435509,"TERMINAL",0,0,"686:003040666299999999999940405",,terminal_output +9013,17437518,"TERMINAL",0,0,"88:002228884414111111111111111111111227",,terminal_output +9014,17439544,"TERMINAL",0,0,"40244430304:006333333333333449",,terminal_output +9015,17441559,"TERMINAL",0,0,"2466622285555555555556611",,terminal_output +9016,17443635,"TERMINAL",0,0,"4688844410777777777777883",,terminal_output +9017,17445582,"TERMINAL",0,0,"68104050666299999999999950505",,terminal_output +9018,17447629,"TERMINAL",0,0,"8102228884515121212121212121212121227",,terminal_output +9019,17449677,"TERMINAL",0,0,"5024444040106333333333333449",,terminal_output +9020,17451585,"TERMINAL",0,0,"2466622285555555555556621",,terminal_output +9021,17453589,"TERMINAL",0,0,"4799955521888888888888994",,terminal_output +9022,17455598,"TERMINAL",0,0,"7921514:0177733:006:00303030303030303030308:011:016",,terminal_output +9023,17457606,"TERMINAL",0,0,"9213339995222222222222338",,terminal_output +9024,17459623,"TERMINAL",0,0,"2:01355551512174444444444445530",,terminal_output +9025,17461632,"TERMINAL",0,0,"357773339666666666666772",,terminal_output +9026,17463651,"TERMINAL",0,0,"5799955531888888888888994",,terminal_output +9027,17465667,"TERMINAL",0,0,"79318:0111777310104040404040404040404011116",,terminal_output +9028,17467699,"TERMINAL",0,0,"9313339995222222222222338",,terminal_output +9029,17469718,"TERMINAL",0,0,"1135558:0140:013174444444444445540",,terminal_output +9030,17471673,"TERMINAL",0,0,"357773339666666666666772",,terminal_output +9031,17473698,"TERMINAL",0,0,"5799955541888888888888994",,terminal_output +9032,17475695,"TERMINAL",0,0,"79411121777320205050505050505050505021216",,terminal_output +9033,17477702,"TERMINAL",0,0,"9413339995222222222222338",,terminal_output +9034,17479708,"TERMINAL",0,0,"21355511114174444444444445550",,terminal_output +9035,17481719,"TERMINAL",0,0,"357773339666666666666772",,terminal_output +9036,17483727,"TERMINAL",0,0,"5799955551888888888888994",,terminal_output +9037,17485739,"TERMINAL",0,0,"79512131777330308:009:0030:004:007:007:007:007:007:005:0031316",,terminal_output +9038,17487770,"TERMINAL",0,0,"9513339995222222222222338",,terminal_output +9039,17489755,"TERMINAL",0,0,"3135552121517444444444444554:00",,terminal_output +9040,17491764,"TERMINAL",0,0,"357773339666666666666772",,terminal_output +9041,17493772,"TERMINAL",0,0,"579995554:01888888888888994",,terminal_output +9042,17495779,"TERMINAL",0,0,"797:013141777340401010101010101010101041416",,terminal_output +9043,17497791,"TERMINAL",0,0,"99:013339995222222222222338",,terminal_output +9044,17499797,"TERMINAL",0,0,"41355531315:0174444444444445510",,terminal_output +9045,17501807,"TERMINAL",0,0,"357773339666666666666772",,terminal_output +9046,17503826,"TERMINAL",0,0,"5799955511888888888888994",,terminal_output +9047,17505830,"TERMINAL",0,0,"79114151777350502020202020202020202051516",,terminal_output +9048,17507944,"TERMINAL",0,0,"9113339995222222222222338",,terminal_output +9049,17509840,"TERMINAL",0,0,"51355541411174444444444445520",,terminal_output +9050,17511853,"TERMINAL",0,0,"357773339666666666666772",,terminal_output +9051,17513873,"TERMINAL",0,0,"5799955521888888888888994",,terminal_output +9052,17515890,"TERMINAL",0,0,"7921515:0177734:007:00303030303030303030309:012:016",,terminal_output +9053,17517977,"TERMINAL",0,0,"9213339995222222222222338",,terminal_output +9054,17519891,"TERMINAL",0,0,"3:01355551512174444444444445530",,terminal_output +9055,17521894,"TERMINAL",0,0,"357773339666666666666772",,terminal_output +9056,17523920,"TERMINAL",0,0,"5799955531888888888888994",,terminal_output +9057,17525965,"TERMINAL",0,0,"79319:0111777310104040404040404040404011116",,terminal_output +9058,17528013,"TERMINAL",0,0,"9313339995222222222222338",,terminal_output +9059,17529930,"TERMINAL",0,0,"1135559:011:013174444444444445540",,terminal_output +9060,17531938,"TERMINAL",0,0,"357773339666666666666772",,terminal_output +9061,17533948,"TERMINAL",0,0,"5799955541888888888888994",,terminal_output +9062,17535968,"TERMINAL",0,0,"79411121777320205050505050505050505021216",,terminal_output +9063,17537982,"TERMINAL",0,0,"9413339995222222222222338",,terminal_output +9064,17539996,"TERMINAL",0,0,"21355511114174444444444445550",,terminal_output +9065,17541987,"TERMINAL",0,0,"357773339666666666666772",,terminal_output +9066,17544036,"TERMINAL",0,0,"5799955551888888888888994",,terminal_output +9067,17546012,"TERMINAL",0,0,"79512131777330309:0030:001:005:008:008:008:008:008:006:0031316",,terminal_output +9068,17548085,"TERMINAL",0,0,"9513339995222222222222338",,terminal_output +9069,17550081,"TERMINAL",0,0,"3135552121517444444444444555:00",,terminal_output +9070,17552041,"TERMINAL",0,0,"357773339666666666666772",,terminal_output +9071,17554045,"TERMINAL",0,0,"579995555:01888888888888994",,terminal_output +9072,17556055,"TERMINAL",0,0,"798:013141777340401010101010101010101041416",,terminal_output +9073,17558075,"TERMINAL",0,0,"94:00:013339995222222222222338",,terminal_output +9074,17560088,"TERMINAL",0,0,"41355531316:0174444444444445510",,terminal_output +9075,17562079,"TERMINAL",0,0,"357773339666666666666772",,terminal_output +9076,17564089,"TERMINAL",0,0,"5799955511888888888888994",,terminal_output +9077,17566101,"TERMINAL",0,0,"79114151777350502020202020202020202051516",,terminal_output +9078,17568156,"TERMINAL",0,0,"9113339995222222222222338",,terminal_output +9079,17570123,"TERMINAL",0,0,"51355541411174444444444445520",,terminal_output +9080,17572132,"TERMINAL",0,0,"357773339666666666666772",,terminal_output +9081,17574137,"TERMINAL",0,0,"5799955521888888888888994",,terminal_output +9082,17576144,"TERMINAL",0,0,"7921516:0177735:008:003030303030303030303020:013:016",,terminal_output +9083,17578156,"TERMINAL",0,0,"9213339995222222222222338",,terminal_output +9084,17580165,"TERMINAL",0,0,"4:01355551512174444444444445530",,terminal_output +9085,17582175,"TERMINAL",0,0,"357773339666666666666772",,terminal_output +9086,17584182,"TERMINAL",0,0,"5799955531888888888888994",,terminal_output +9087,17586192,"TERMINAL",0,0,"793120:0111777310104040404040404040404011116",,terminal_output +9088,17588200,"TERMINAL",0,0,"9313339995222222222222338",,terminal_output +9089,17590213,"TERMINAL",0,0,"11355530:012:013174444444444445540",,terminal_output +9090,17592221,"TERMINAL",0,0,"357773339666666666666772",,terminal_output +9091,17594229,"TERMINAL",0,0,"5799955541888888888888994",,terminal_output +9092,17596239,"TERMINAL",0,0,"79411121777320205050505050505050505021216",,terminal_output +9093,17598246,"TERMINAL",0,0,"9413339995222222222222338",,terminal_output +9094,17600258,"TERMINAL",0,0,"21355511114174444444444445550",,terminal_output +9095,17602264,"TERMINAL",0,0,"357773339666666666666772",,terminal_output +9096,17604274,"TERMINAL",0,0,"5799955551888888888888994",,terminal_output +9097,17606282,"TERMINAL",0,0,"795121317773303030:001:002:006:009:009:009:009:009:007:0031316",,terminal_output +9098,17608293,"TERMINAL",0,0,"9513339995222222222222338",,terminal_output +9099,17610300,"TERMINAL",0,0,"3135552121517444444444444556:00",,terminal_output +9100,17612314,"TERMINAL",0,0,"357773339666666666666772",,terminal_output +9101,17614323,"TERMINAL",0,0,"579995556:01888888888888994",,terminal_output +9102,17616339,"TERMINAL",0,0,"799:013141777340401010101010101010101041416",,terminal_output +9103,17618342,"TERMINAL",0,0,"91:013339995222222222222338",,terminal_output +9104,17620357,"TERMINAL",0,0,"41355531317:0174444444444445510",,terminal_output +9105,17622363,"TERMINAL",0,0,"357773339666666666666772",,terminal_output +9106,17624372,"TERMINAL",0,0,"5799955511888888888888994",,terminal_output +9107,17626379,"TERMINAL",0,0,"79114151777350502020202020202020202051516",,terminal_output +9108,17628389,"TERMINAL",0,0,"9113339995222222222222338",,terminal_output +9109,17630398,"TERMINAL",0,0,"51355541411174444444444445520",,terminal_output +9110,17632465,"TERMINAL",0,0,"357773339666666666666772",,terminal_output +9111,17634419,"TERMINAL",0,0,"5799955521888888888888994",,terminal_output +9112,17636435,"TERMINAL",0,0,"7921517:0177736:009:00303030303030303030301:014:016",,terminal_output +9113,17638439,"TERMINAL",0,0,"9213339995222222222222338",,terminal_output +9114,17640448,"TERMINAL",0,0,"5:01355551512174444444444445530",,terminal_output +9115,17642459,"TERMINAL",0,0,"357773339666666666666772",,terminal_output +9116,17644465,"TERMINAL",0,0,"5799955531888888888888994",,terminal_output +9117,17646493,"TERMINAL",0,0,"79311:0111777310104040404040404040404011116",,terminal_output +9118,17648483,"TERMINAL",0,0,"9313339995222222222222338",,terminal_output +9119,17650499,"TERMINAL",0,0,"1135551:013:013174444444444445540",,terminal_output +9120,17652507,"TERMINAL",0,0,"357773339666666666666772",,terminal_output +9121,17654519,"TERMINAL",0,0,"5799955541888888888888994",,terminal_output +9122,17656522,"TERMINAL",0,0,"79411121777320205050505050505050505021216",,terminal_output +9123,17658530,"TERMINAL",0,0,"9413339995222222222222338",,terminal_output +9124,17660621,"TERMINAL",0,0,"21355511114174444444444445550",,terminal_output +9125,17662568,"TERMINAL",0,0,"357773339666666666666772",,terminal_output +9126,17664558,"TERMINAL",0,0,"5799955551888888888888994",,terminal_output +9127,17666569,"TERMINAL",0,0,"79512131777330301:002:003:007:0040:0040:0040:0040:0040:008:0031316",,terminal_output +9128,17668576,"TERMINAL",0,0,"9513339995222222222222338",,terminal_output +9129,17670585,"TERMINAL",0,0,"3146662222528555555555555667:01",,terminal_output +9130,17672592,"TERMINAL",0,0,"468884447:00777777777777883",,terminal_output +9131,17674610,"TERMINAL",0,0,"6820:003040666299999999999940405",,terminal_output +9132,17676623,"TERMINAL",0,0,"82:002228884414111111111111111111111227",,terminal_output +9133,17678629,"TERMINAL",0,0,"40244430308:006333333333333449",,terminal_output +9134,17680636,"TERMINAL",0,0,"2466622285555555555556611",,terminal_output +9135,17682741,"TERMINAL",0,0,"4688844410777777777777883",,terminal_output +9136,17684686,"TERMINAL",0,0,"68104050666299999999999950505",,terminal_output +9137,17686664,"TERMINAL",0,0,"8102228884515121212121212121212121227",,terminal_output +9138,17688674,"TERMINAL",0,0,"5024444040106333333333333449",,terminal_output +9139,17690693,"TERMINAL",0,0,"2466622285555555555556621",,terminal_output +9140,17692776,"TERMINAL",0,0,"4688844420777777777777883",,terminal_output +9141,17694722,"TERMINAL",0,0,"6820508:0066629999999999992:005:005",,terminal_output +9142,17696712,"TERMINAL",0,0,"82022288847:0120:0131313131313131313131227",,terminal_output +9143,17698721,"TERMINAL",0,0,"6:0024445050206333333333333449",,terminal_output +9144,17700731,"TERMINAL",0,0,"2466622285555555555556631",,terminal_output +9145,17702736,"TERMINAL",0,0,"4688844430777777777777883",,terminal_output +9146,17704756,"TERMINAL",0,0,"68302:0010666299999999999910105",,terminal_output +9147,17706761,"TERMINAL",0,0,"8302228884111141414141414141414141227",,terminal_output +9148,17708770,"TERMINAL",0,0,"1024442:004:00306333333333333449",,terminal_output +9149,17710784,"TERMINAL",0,0,"2466622285555555555556641",,terminal_output +9150,17712846,"TERMINAL",0,0,"4688844440777777777777883",,terminal_output +9151,17714795,"TERMINAL",0,0,"68401020666299999999999920205",,terminal_output +9152,17716807,"TERMINAL",0,0,"8402228884212151515151515151515151227",,terminal_output +9153,17718816,"TERMINAL",0,0,"2024441010406333333333333449",,terminal_output +9154,17720823,"TERMINAL",0,0,"2466622285555555555556651",,terminal_output +9155,17722881,"TERMINAL",0,0,"4688844450777777777777883",,terminal_output +9156,17724839,"TERMINAL",0,0,"68502030666299999999999930305",,terminal_output +9157,17726852,"TERMINAL",0,0,"850222888431312:013:014:018:011:011:011:011:011:019:01227",,terminal_output +9158,17728862,"TERMINAL",0,0,"3024442020506333333333333449",,terminal_output +9159,17730869,"TERMINAL",0,0,"246662228555555555555668:01",,terminal_output +9160,17732877,"TERMINAL",0,0,"468884448:00777777777777883",,terminal_output +9161,17734891,"TERMINAL",0,0,"681:003040666299999999999940405",,terminal_output +9162,17736896,"TERMINAL",0,0,"83:002228884414111111111111111111111227",,terminal_output +9163,17738914,"TERMINAL",0,0,"40244430309:006333333333333449",,terminal_output +9164,17740930,"TERMINAL",0,0,"2466622285555555555556611",,terminal_output +9165,17742951,"TERMINAL",0,0,"4688844410777777777777883",,terminal_output +9166,17744931,"TERMINAL",0,0,"68104050666299999999999950505",,terminal_output +9167,17746936,"TERMINAL",0,0,"8102228884515121212121212121212121227",,terminal_output +9168,17748948,"TERMINAL",0,0,"5024444040106333333333333449",,terminal_output +9169,17750964,"TERMINAL",0,0,"2466622285555555555556621",,terminal_output +9170,17752992,"TERMINAL",0,0,"4688844420777777777777883",,terminal_output +9171,17755034,"TERMINAL",0,0,"6820509:0066629999999999993:006:005",,terminal_output +9172,17756984,"TERMINAL",0,0,"82022288848:011:0131313131313131313131227",,terminal_output +9173,17758992,"TERMINAL",0,0,"7:0024445050206333333333333449",,terminal_output +9174,17761001,"TERMINAL",0,0,"2466622285555555555556631",,terminal_output +9175,17763008,"TERMINAL",0,0,"4688844430777777777777883",,terminal_output +9176,17765029,"TERMINAL",0,0,"68303:0010666299999999999910105",,terminal_output +9177,17767030,"TERMINAL",0,0,"8302228884111141414141414141414141227",,terminal_output +9178,17769045,"TERMINAL",0,0,"1024443:005:00306333333333333449",,terminal_output +9179,17771046,"TERMINAL",0,0,"2466622285555555555556641",,terminal_output +9180,17773057,"TERMINAL",0,0,"4688844440777777777777883",,terminal_output +9181,17775063,"TERMINAL",0,0,"68401020666299999999999920205",,terminal_output +9182,17777072,"TERMINAL",0,0,"8402228884212151515151515151515151227",,terminal_output +9183,17779084,"TERMINAL",0,0,"2024441010406333333333333449",,terminal_output +9184,17781146,"TERMINAL",0,0,"2466622285555555555556651",,terminal_output +9185,17783195,"TERMINAL",0,0,"4688844450777777777777883",,terminal_output +9186,17785138,"TERMINAL",0,0,"68502030666299999999999930305",,terminal_output +9187,17787187,"TERMINAL",0,0,"850222888431313:014:015:019:012:012:012:012:012:0120:01227",,terminal_output +9188,17789127,"TERMINAL",0,0,"3024442020506333333333333449",,terminal_output +9189,17791146,"TERMINAL",0,0,"246662228555555555555669:01",,terminal_output +9190,17793146,"TERMINAL",0,0,"468884449:00777777777777883",,terminal_output +9191,17795165,"TERMINAL",0,0,"682:003040666299999999999940405",,terminal_output +9192,17797228,"TERMINAL",0,0,"84:002228884414111111111111111111111227",,terminal_output +9193,17799171,"TERMINAL",0,0,"402444303010:006333333333333449",,terminal_output +9194,17801181,"TERMINAL",0,0,"2466622285555555555556611",,terminal_output +9195,17803192,"TERMINAL",0,0,"4688844410777777777777883",,terminal_output +9196,17805214,"TERMINAL",0,0,"68104050666299999999999950505",,terminal_output +9197,17807259,"TERMINAL",0,0,"8102228884515121212121212121212121227",,terminal_output +9198,17809217,"TERMINAL",0,0,"5024444040106333333333333449",,terminal_output +9199,17811236,"TERMINAL",0,0,"2466622285555555555556621",,terminal_output +9200,17813302,"TERMINAL",0,0,"4688844420777777777777883",,terminal_output +9201,17815249,"TERMINAL",0,0,"68205030:0066629999999999994:007:005",,terminal_output +9202,17817253,"TERMINAL",0,0,"82022288849:012:0131313131313131313131227",,terminal_output +9203,17819263,"TERMINAL",0,0,"8:0024445050206333333333333449",,terminal_output +9204,17821286,"TERMINAL",0,0,"2466622285555555555556631",,terminal_output +9205,17823284,"TERMINAL",0,0,"4688844430777777777777883",,terminal_output +9206,17825291,"TERMINAL",0,0,"68304:0010666299999999999910105",,terminal_output +9207,17827331,"TERMINAL",0,0,"8302228884111141414141414141414141227",,terminal_output +9208,17829308,"TERMINAL",0,0,"1024444:006:00306333333333333449",,terminal_output +9209,17831325,"TERMINAL",0,0,"2466622285555555555556641",,terminal_output +9210,17833329,"TERMINAL",0,0,"4688844440777777777777883",,terminal_output +9211,17835338,"TERMINAL",0,0,"68401020666299999999999920205",,terminal_output +9212,17837348,"TERMINAL",0,0,"8402228884212151515151515151515151227",,terminal_output +9213,17839358,"TERMINAL",0,0,"2024441010406333333333333449",,terminal_output +9214,17841368,"TERMINAL",0,0,"2466622285555555555556651",,terminal_output +9215,17843374,"TERMINAL",0,0,"4688844450777777777777883",,terminal_output +9216,17845391,"TERMINAL",0,0,"68502030666299999999999930305",,terminal_output +9217,17847404,"TERMINAL",0,0,"850222888431314:015:016:0140:013:013:013:013:013:011:01227",,terminal_output +9218,17849410,"TERMINAL",0,0,"3024442020506333333333333449",,terminal_output +9219,17851420,"TERMINAL",0,0,"2466622285555555555556630:01",,terminal_output +9220,17853428,"TERMINAL",0,0,"468884444:00:00777777777777883",,terminal_output +9221,17855440,"TERMINAL",0,0,"683:003040666299999999999940405",,terminal_output +9222,17857449,"TERMINAL",0,0,"85:002228884414111111111111111111111227",,terminal_output +9223,17859453,"TERMINAL",0,0,"40244430301:006333333333333449",,terminal_output +9224,17861465,"TERMINAL",0,0,"2466622285555555555556611",,terminal_output +9225,17863477,"TERMINAL",0,0,"4688844410777777777777883",,terminal_output +9226,17865480,"TERMINAL",0,0,"68104050666299999999999950505",,terminal_output +9227,17867492,"TERMINAL",0,0,"8102228884515121212121212121212121227",,terminal_output +9228,17869516,"TERMINAL",0,0,"5024444040106333333333333449",,terminal_output +9229,17871511,"TERMINAL",0,0,"2466622285555555555556621",,terminal_output +9230,17873516,"TERMINAL",0,0,"4688844420777777777777883",,terminal_output +9231,17875534,"TERMINAL",0,0,"6820501:0066629999999999995:008:005",,terminal_output +9232,17877605,"TERMINAL",0,0,"820222888420:013:0131313131313131313131227",,terminal_output +9233,17879548,"TERMINAL",0,0,"9:0024445050206333333333333449",,terminal_output +9234,17881565,"TERMINAL",0,0,"2466622285555555555556631",,terminal_output +9235,17883568,"TERMINAL",0,0,"4688844430777777777777883",,terminal_output +9236,17885577,"TERMINAL",0,0,"68305:0010666299999999999910105",,terminal_output +9237,17887587,"TERMINAL",0,0,"8302228884111141414141414141414141227",,terminal_output +9238,17889606,"TERMINAL",0,0,"1135555:017:013174444444444445540",,terminal_output +9239,17891607,"TERMINAL",0,0,"357773339666666666666772",,terminal_output +9240,17893625,"TERMINAL",0,0,"5799955541888888888888994",,terminal_output +9241,17895630,"TERMINAL",0,0,"79411121777320205050505050505050505021216",,terminal_output +9242,17897630,"TERMINAL",0,0,"9413339995222222222222338",,terminal_output +9243,17899644,"TERMINAL",0,0,"21355511114174444444444445550",,terminal_output +9244,17901647,"TERMINAL",0,0,"357773339666666666666772",,terminal_output +9245,17903657,"TERMINAL",0,0,"5799955551888888888888994",,terminal_output +9246,17905671,"TERMINAL",0,0,"79512131777330305:006:007:001:004:004:004:004:004:002:0031316",,terminal_output +9247,17907713,"TERMINAL",0,0,"9513339995222222222222338",,terminal_output +9248,17909736,"TERMINAL",0,0,"3135552121517444444444444551:00",,terminal_output +9249,17911697,"TERMINAL",0,0,"357773339666666666666772",,terminal_output +9250,17913757,"TERMINAL",0,0,"579995551:01888888888888994",,terminal_output +9251,17915712,"TERMINAL",0,0,"794:013141777340401010101010101010101041416",,terminal_output +9252,17917747,"TERMINAL",0,0,"96:013339995222222222222338",,terminal_output +9253,17919766,"TERMINAL",0,0,"41355531312:0174444444444445510",,terminal_output +9254,17921741,"TERMINAL",0,0,"357773339666666666666772",,terminal_output +9255,17923760,"TERMINAL",0,0,"5799955511888888888888994",,terminal_output +9256,17925775,"TERMINAL",0,0,"79114151777350502020202020202020202051516",,terminal_output +9257,17927770,"TERMINAL",0,0,"9113339995222222222222338",,terminal_output +9258,17929779,"TERMINAL",0,0,"51355541411174444444444445520",,terminal_output +9259,17931790,"TERMINAL",0,0,"357773339666666666666772",,terminal_output +9260,17933799,"TERMINAL",0,0,"5799955521888888888888994",,terminal_output +9261,17935810,"TERMINAL",0,0,"7921512:0177731:004:00303030303030303030306:019:016",,terminal_output +9262,17937922,"TERMINAL",0,0,"9213339995222222222222338",,terminal_output +9263,17939825,"TERMINAL",0,0,"20:01355551512174444444444445530",,terminal_output +9264,17941837,"TERMINAL",0,0,"357773339666666666666772",,terminal_output +9265,17943842,"TERMINAL",0,0,"5799955531888888888888994",,terminal_output +9266,17945851,"TERMINAL",0,0,"79316:0111777310104040404040404040404011116",,terminal_output +9267,17947955,"TERMINAL",0,0,"9313339995222222222222338",,terminal_output +9268,17949874,"TERMINAL",0,0,"1135556:018:013174444444444445540",,terminal_output +9269,17951882,"TERMINAL",0,0,"357773339666666666666772",,terminal_output +9270,17953901,"TERMINAL",0,0,"5799955541888888888888994",,terminal_output +9271,17955918,"TERMINAL",0,0,"79411121777320205050505050505050505021216",,terminal_output +9272,17957909,"TERMINAL",0,0,"9413339995222222222222338",,terminal_output +9273,17959919,"TERMINAL",0,0,"21355511114174444444444445550",,terminal_output +9274,17961926,"TERMINAL",0,0,"357773339666666666666772",,terminal_output +9275,17963944,"TERMINAL",0,0,"5799955551888888888888994",,terminal_output +9276,17965965,"TERMINAL",0,0,"79512131777330306:007:008:002:005:005:005:005:005:003:0031316",,terminal_output +9277,17968030,"TERMINAL",0,0,"9513339995222222222222338",,terminal_output +9278,17969968,"TERMINAL",0,0,"3135552121517444444444444552:00",,terminal_output +9279,17971973,"TERMINAL",0,0,"357773339666666666666772",,terminal_output +9280,17973985,"TERMINAL",0,0,"579995552:01888888888888994",,terminal_output +9281,17975997,"TERMINAL",0,0,"795:013141777340401010101010101010101041416",,terminal_output +9282,17978062,"TERMINAL",0,0,"97:013339995222222222222338",,terminal_output +9283,17980016,"TERMINAL",0,0,"41355531313:0174444444444445510",,terminal_output +9284,17982027,"TERMINAL",0,0,"357773339666666666666772",,terminal_output +9285,17984044,"TERMINAL",0,0,"5799955511888888888888994",,terminal_output +9286,17986045,"TERMINAL",0,0,"79114151777350502020202020202020202051516",,terminal_output +9287,17988099,"TERMINAL",0,0,"9113339995222222222222338",,terminal_output +9288,17990117,"TERMINAL",0,0,"51355541411174444444444445520",,terminal_output +9289,17992071,"TERMINAL",0,0,"357773339666666666666772",,terminal_output +9290,17994084,"TERMINAL",0,0,"5799955521888888888888994",,terminal_output +9291,17996099,"TERMINAL",0,0,"7921513:0177732:005:00303030303030303030307:0130:016",,terminal_output +9292,17998134,"TERMINAL",0,0,"9213339995222222222222338",,terminal_output +9293,18000170,"TERMINAL",0,0,"1:01355551512174444444444445530",,terminal_output +9294,18002123,"TERMINAL",0,0,"357773339666666666666772",,terminal_output +9295,18004126,"TERMINAL",0,0,"5799955531888888888888994",,terminal_output +9296,18006137,"TERMINAL",0,0,"79317:0111777310104040404040404040404011116",,terminal_output +9297,18008145,"TERMINAL",0,0,"9313339995222222222222338",,terminal_output +9298,18010218,"TERMINAL",0,0,"1135557:019:013174444444444445540",,terminal_output +9299,18012165,"TERMINAL",0,0,"357773339666666666666772",,terminal_output +9300,18014172,"TERMINAL",0,0,"5799955541888888888888994",,terminal_output +9301,18016183,"TERMINAL",0,0,"79411121777320205050505050505050505021216",,terminal_output +9302,18018193,"TERMINAL",0,0,"9413339995222222222222338",,terminal_output +9303,18020200,"TERMINAL",0,0,"21355511114174444444444445550",,terminal_output +9304,18022207,"TERMINAL",0,0,"357773339666666666666772",,terminal_output +9305,18024219,"TERMINAL",0,0,"5799955551888888888888994",,terminal_output +9306,18026228,"TERMINAL",0,0,"79512131777330307:008:009:003:006:006:006:006:006:004:0031316",,terminal_output +9307,18028236,"TERMINAL",0,0,"9513339995222222222222338",,terminal_output +9308,18030249,"TERMINAL",0,0,"3135552121517444444444444553:00",,terminal_output +9309,18032260,"TERMINAL",0,0,"357773339666666666666772",,terminal_output +9310,18034268,"TERMINAL",0,0,"579995553:01888888888888994",,terminal_output +9311,18036278,"TERMINAL",0,0,"796:013141777340401010101010101010101041416",,terminal_output +9312,18038287,"TERMINAL",0,0,"98:013339995222222222222338",,terminal_output +9313,18040306,"TERMINAL",0,0,"41355531314:0174444444444445510",,terminal_output +9314,18042317,"TERMINAL",0,0,"357773339666666666666772",,terminal_output +9315,18044313,"TERMINAL",0,0,"5799955511888888888888994",,terminal_output +9316,18046323,"TERMINAL",0,0,"79114151777350502020202020202020202051516",,terminal_output +9317,18048329,"TERMINAL",0,0,"9113339995222222222222338",,terminal_output +9318,18050348,"TERMINAL",0,0,"51355541411174444444444445520",,terminal_output +9319,18052409,"TERMINAL",0,0,"357773339666666666666772",,terminal_output +9320,18054359,"TERMINAL",0,0,"5799955521888888888888994",,terminal_output +9321,18056377,"TERMINAL",0,0,"7921514:0177733:006:00303030303030303030308:011:016",,terminal_output +9322,18058377,"TERMINAL",0,0,"9213339995222222222222338",,terminal_output +9323,18060393,"TERMINAL",0,0,"2:01355551512174444444444445530",,terminal_output +9324,18062397,"TERMINAL",0,0,"357773339666666666666772",,terminal_output +9325,18064404,"TERMINAL",0,0,"5799955531888888888888994",,terminal_output +9326,18066415,"TERMINAL",0,0,"79318:0111777310104040404040404040404011116",,terminal_output +9327,18068422,"TERMINAL",0,0,"9313339995222222222222338",,terminal_output +9328,18070433,"TERMINAL",0,0,"1135558:0150:013174444444444445540",,terminal_output +9329,18072440,"TERMINAL",0,0,"357773339666666666666772",,terminal_output +9330,18074448,"TERMINAL",0,0,"5799955541888888888888994",,terminal_output +9331,18076464,"TERMINAL",0,0,"79411121777320205050505050505050505021216",,terminal_output +9332,18078468,"TERMINAL",0,0,"9413339995222222222222338",,terminal_output +9333,18080477,"TERMINAL",0,0,"21355511114174444444444445550",,terminal_output +9334,18082516,"TERMINAL",0,0,"357773339666666666666772",,terminal_output +9335,18084496,"TERMINAL",0,0,"5799955551888888888888994",,terminal_output +9336,18086506,"TERMINAL",0,0,"79512131777330308:009:0040:004:007:007:007:007:007:005:0031316",,terminal_output +9337,18088514,"TERMINAL",0,0,"9513339995222222222222338",,terminal_output +9338,18090525,"TERMINAL",0,0,"3135552121517444444444444554:00",,terminal_output +9339,18092535,"TERMINAL",0,0,"357773339666666666666772",,terminal_output +9340,18094545,"TERMINAL",0,0,"579995554:01888888888888994",,terminal_output +9341,18096552,"TERMINAL",0,0,"797:013141777340401010101010101010101041416",,terminal_output +9342,18098562,"TERMINAL",0,0,"99:013339995222222222222338",,terminal_output +9343,18100571,"TERMINAL",0,0,"41355531315:0174444444444445510",,terminal_output +9344,18102580,"TERMINAL",0,0,"357773339666666666666772",,terminal_output +9345,18104588,"TERMINAL",0,0,"581040506661299999999999950505",,terminal_output +9346,18106599,"TERMINAL",0,0,"8102228884515121212121212121212121227",,terminal_output +9347,18108609,"TERMINAL",0,0,"5024444040106333333333333449",,terminal_output +9348,18110620,"TERMINAL",0,0,"\r24666222278:54:088:58:57 4:23:55635:1556:55544:27 4:38:251439:253240:252384:251757:2585495520561529972 alfred.ngu6:35:347:57:27 5:25:25130031:53:391:53:56 11:28:5610:30031:56529993 nishant.k2 3843T18:29:1801:48:31 21:34:21[002,007]",,terminal_output +9349,18112628,"TERMINAL",0,0,"46888444777777777777883",,terminal_output +9350,18114639,"TERMINAL",0,0,"6820505:006669999999999999:002:005",,terminal_output +9351,18116645,"TERMINAL",0,0,"8202228884:017:0131313131313131313131227",,terminal_output +9352,18118652,"TERMINAL",0,0,"3:002444505020333333333333449",,terminal_output +9353,18120664,"TERMINAL",0,0,"246662225555555555556631",,terminal_output +9354,18122673,"TERMINAL",0,0,"46888444777777777777883",,terminal_output +9355,18124684,"TERMINAL",0,0,"68309:001066699999999999910105",,terminal_output +9356,18126691,"TERMINAL",0,0,"830222888111141414141414141414141227",,terminal_output +9357,18128703,"TERMINAL",0,0,"1024449:001:0030333333333333449",,terminal_output +9358,18130711,"TERMINAL",0,0,"246662225555555555556641",,terminal_output +9359,18132791,"TERMINAL",0,0,"46888444777777777777883",,terminal_output +9360,18134727,"TERMINAL",0,0,"6840102066699999999999920205",,terminal_output +9361,18136739,"TERMINAL",0,0,"840222888212151515151515151515151227",,terminal_output +9362,18138749,"TERMINAL",0,0,"202444101040333333333333449",,terminal_output +9363,18140757,"TERMINAL",0,0,"246662225555555555556651",,terminal_output +9364,18142827,"TERMINAL",0,0,"46888444777777777777883",,terminal_output +9365,18144777,"TERMINAL",0,0,"6850203066699999999999930305",,terminal_output +9366,18146794,"TERMINAL",0,0,"85022288831319:0140:011:015:018:018:018:018:018:016:01227",,terminal_output +9367,18148800,"TERMINAL",0,0,"302444202050333333333333449",,terminal_output +9368,18150812,"TERMINAL",0,0,"24666222555555555555665:01",,terminal_output +9369,18152816,"TERMINAL",0,0,"46888444777777777777883",,terminal_output +9370,18154829,"TERMINAL",0,0,"688:00304066699999999999940405",,terminal_output +9371,18156837,"TERMINAL",0,0,"810:00222888414111111111111111111111227",,terminal_output +9372,18158847,"TERMINAL",0,0,"40244430306:00333333333333449",,terminal_output +9373,18160868,"TERMINAL",0,0,"246662225555555555556611",,terminal_output +9374,18162863,"TERMINAL",0,0,"46888444777777777777883",,terminal_output +9375,18164871,"TERMINAL",0,0,"6810405066699999999999950505",,terminal_output +9376,18166891,"TERMINAL",0,0,"810222888515121212121212121212121227",,terminal_output +9377,18168908,"TERMINAL",0,0,"502444404010333333333333449",,terminal_output +9378,18170899,"TERMINAL",0,0,"246662225555555555556621",,terminal_output +9379,18172930,"TERMINAL",0,0,"46888444777777777777883",,terminal_output +9380,18174920,"TERMINAL",0,0,"6820506:0066699999999999930:003:005",,terminal_output +9381,18176929,"TERMINAL",0,0,"8202228885:018:0131313131313131313131227",,terminal_output +9382,18178948,"TERMINAL",0,0,"4:002444505020333333333333449",,terminal_output +9383,18180970,"TERMINAL",0,0,"246662225555555555556631",,terminal_output +9384,18182959,"TERMINAL",0,0,"46888444777777777777883",,terminal_output +9385,18184963,"TERMINAL",0,0,"683030:001066699999999999910105",,terminal_output +9386,18186975,"TERMINAL",0,0,"830222888111141414141414141414141227",,terminal_output +9387,18188993,"TERMINAL",0,0,"10244440:002:0030333333333333449",,terminal_output +9388,18191009,"TERMINAL",0,0,"246662225555555555556641",,terminal_output +9389,18193102,"TERMINAL",0,0,"46888444777777777777883",,terminal_output +9390,18195011,"TERMINAL",0,0,"6840102066699999999999920205",,terminal_output +9391,18197023,"TERMINAL",0,0,"840222888212151515151515151515151227",,terminal_output +9392,18199030,"TERMINAL",0,0,"202444101040333333333333449",,terminal_output +9393,18201043,"TERMINAL",0,0,"246662225555555555556651",,terminal_output +9394,18203141,"TERMINAL",0,0,"46888444777777777777883",,terminal_output +9395,18205093,"TERMINAL",0,0,"6850203066699999999999930305",,terminal_output +9396,18207069,"TERMINAL",0,0,"850222888313140:011:012:016:019:019:019:019:019:017:01227",,terminal_output +9397,18209086,"TERMINAL",0,0,"302444202050333333333333449",,terminal_output +9398,18211136,"TERMINAL",0,0,"24666222555555555555666:01",,terminal_output +9399,18213177,"TERMINAL",0,0,"46888444777777777777883",,terminal_output +9400,18215104,"TERMINAL",0,0,"689:00304066699999999999940405",,terminal_output +9401,18217115,"TERMINAL",0,0,"81:00222888414111111111111111111111227",,terminal_output +9402,18219126,"TERMINAL",0,0,"40244430307:00333333333333449",,terminal_output +9403,18221138,"TERMINAL",0,0,"246662225555555555556611",,terminal_output +9404,18223210,"TERMINAL",0,0,"46888444777777777777883",,terminal_output +9405,18225256,"TERMINAL",0,0,"6810405066699999999999950505",,terminal_output +9406,18227203,"TERMINAL",0,0,"810222888515121212121212121212121227",,terminal_output +9407,18229172,"TERMINAL",0,0,"502444404010333333333333449",,terminal_output +9408,18231197,"TERMINAL",0,0,"246662225555555555556621",,terminal_output +9409,18233248,"TERMINAL",0,0,"46888444777777777777883",,terminal_output +9410,18235266,"TERMINAL",0,0,"6820507:006669999999999991:004:005",,terminal_output +9411,18237239,"TERMINAL",0,0,"8202228886:019:0131313131313131313131227",,terminal_output +9412,18239218,"TERMINAL",0,0,"5:002444505020333333333333449",,terminal_output +9413,18241230,"TERMINAL",0,0,"246662225555555555556631",,terminal_output +9414,18243279,"TERMINAL",0,0,"46888444777777777777883",,terminal_output +9415,18245244,"TERMINAL",0,0,"68301:001066699999999999910105",,terminal_output +9416,18247253,"TERMINAL",0,0,"830222888111141414141414141414141227",,terminal_output +9417,18249264,"TERMINAL",0,0,"1024441:003:0030333333333333449",,terminal_output +9418,18251279,"TERMINAL",0,0,"246662225555555555556641",,terminal_output +9419,18253316,"TERMINAL",0,0,"46888444777777777777883",,terminal_output +9420,18255292,"TERMINAL",0,0,"6840102066699999999999920205",,terminal_output +9421,18257303,"TERMINAL",0,0,"840222888212151515151515151515151227",,terminal_output +9422,18259309,"TERMINAL",0,0,"202444101040333333333333449",,terminal_output +9423,18261407,"TERMINAL",0,0,"246662225555555555556651",,terminal_output +9424,18263352,"TERMINAL",0,0,"46888444777777777777883",,terminal_output +9425,18265371,"TERMINAL",0,0,"6850203066699999999999930305",,terminal_output +9426,18267346,"TERMINAL",0,0,"85022288831311:012:013:017:0150:0150:0150:0150:0150:018:01227",,terminal_output +9427,18269360,"TERMINAL",0,0,"302444202050333333333333449",,terminal_output +9428,18271368,"TERMINAL",0,0,"24666222555555555555667:01",,terminal_output +9429,18273374,"TERMINAL",0,0,"46888444777777777777883",,terminal_output +9430,18275392,"TERMINAL",0,0,"6830:00304066699999999999940405",,terminal_output +9431,18277393,"TERMINAL",0,0,"82:00222888414111111111111111111111227",,terminal_output +9432,18279401,"TERMINAL",0,0,"40244430308:00333333333333449",,terminal_output +9433,18281409,"TERMINAL",0,0,"246662225555555555556611",,terminal_output +9434,18283446,"TERMINAL",0,0,"46888444777777777777883",,terminal_output +9435,18285470,"TERMINAL",0,0,"6810405066699999999999950505",,terminal_output +9436,18287466,"TERMINAL",0,0,"810222888515121212121212121212121227",,terminal_output +9437,18289458,"TERMINAL",0,0,"502444404010333333333333449",,terminal_output +9438,18291456,"TERMINAL",0,0,"246662225555555555556621",,terminal_output +9439,18293463,"TERMINAL",0,0,"46888444777777777777883",,terminal_output +9440,18295504,"TERMINAL",0,0,"6820508:006669999999999992:005:005",,terminal_output +9441,18297482,"TERMINAL",0,0,"8202228887:0130:0131313131313131313131227",,terminal_output +9442,18299495,"TERMINAL",0,0,"6:002444505020333333333333449",,terminal_output +9443,18301520,"TERMINAL",0,0,"246662225555555555556631",,terminal_output +9444,18303592,"TERMINAL",0,0,"46888444777777777777883",,terminal_output +9445,18305540,"TERMINAL",0,0,"68302:001066699999999999910105",,terminal_output +9446,18307524,"TERMINAL",0,0,"830222888111141414141414141414141227",,terminal_output +9447,18309530,"TERMINAL",0,0,"1024442:004:0030333333333333449",,terminal_output +9448,18311600,"TERMINAL",0,0,"246662225555555555556641",,terminal_output +9449,18313593,"TERMINAL",0,0,"46888444777777777777883",,terminal_output +9450,18315576,"TERMINAL",0,0,"6840102066699999999999920205",,terminal_output +9451,18318678,"TERMINAL",0,0,"840222888212151515151515151515151227",,terminal_output +9452,18319577,"TERMINAL",0,0,"202444101040333333333333449",,terminal_output +9453,18321586,"TERMINAL",0,0,"257773336666666666667752",,terminal_output +9454,18323613,"TERMINAL",0,0,"57999555888888888888994",,terminal_output +9455,18325602,"TERMINAL",0,0,"7951213177730302:003:004:008:001:001:001:001:001:009:0031316",,terminal_output +9456,18327667,"TERMINAL",0,0,"951333999222222222222338",,terminal_output +9457,18329677,"TERMINAL",0,0,"313555212151444444444444558:00",,terminal_output +9458,18331629,"TERMINAL",0,0,"35777333666666666666772",,terminal_output +9459,18333639,"TERMINAL",0,0,"57999555888888888888994",,terminal_output +9460,18335659,"TERMINAL",0,0,"791:01314177740401010101010101010101041416",,terminal_output +9461,18337662,"TERMINAL",0,0,"93:01333999222222222222338",,terminal_output +9462,18339683,"TERMINAL",0,0,"41355531319:014444444444445510",,terminal_output +9463,18341676,"TERMINAL",0,0,"35777333666666666666772",,terminal_output +9464,18343685,"TERMINAL",0,0,"57999555888888888888994",,terminal_output +9465,18345710,"TERMINAL",0,0,"7911415177750502020202020202020202051516",,terminal_output +9466,18347728,"TERMINAL",0,0,"911333999222222222222338",,terminal_output +9467,18349746,"TERMINAL",0,0,"5135554141114444444444445520",,terminal_output +9468,18351722,"TERMINAL",0,0,"35777333666666666666772",,terminal_output +9469,18353743,"TERMINAL",0,0,"57999555888888888888994",,terminal_output +9470,18355739,"TERMINAL",0,0,"7921519:017778:001:00303030303030303030303:016:016",,terminal_output +9471,18357765,"TERMINAL",0,0,"921333999222222222222338",,terminal_output +9472,18359758,"TERMINAL",0,0,"7:0135555151214444444444445530",,terminal_output +9473,18361770,"TERMINAL",0,0,"35777333666666666666772",,terminal_output +9474,18363777,"TERMINAL",0,0,"57999555888888888888994",,terminal_output +9475,18365786,"TERMINAL",0,0,"79313:011177710104040404040404040404011116",,terminal_output +9476,18367796,"TERMINAL",0,0,"931333999222222222222338",,terminal_output +9477,18369840,"TERMINAL",0,0,"1135553:015:01314444444444445540",,terminal_output +9478,18371816,"TERMINAL",0,0,"35777333666666666666772",,terminal_output +9479,18373822,"TERMINAL",0,0,"57999555888888888888994",,terminal_output +9480,18375836,"TERMINAL",0,0,"7941112177720205050505050505050505021216",,terminal_output +9481,18377839,"TERMINAL",0,0,"941333999222222222222338",,terminal_output +9482,18379849,"TERMINAL",0,0,"2135551111414444444444445550",,terminal_output +9483,18381858,"TERMINAL",0,0,"35777333666666666666772",,terminal_output +9484,18383880,"TERMINAL",0,0,"57999555888888888888994",,terminal_output +9485,18385876,"TERMINAL",0,0,"7951213177730303:004:005:009:002:002:002:002:002:0030:0031316",,terminal_output +9486,18387885,"TERMINAL",0,0,"951333999222222222222338",,terminal_output +9487,18389894,"TERMINAL",0,0,"313555212151444444444444559:00",,terminal_output +9488,18391907,"TERMINAL",0,0,"35777333666666666666772",,terminal_output +9489,18393913,"TERMINAL",0,0,"57999555888888888888994",,terminal_output +9490,18395923,"TERMINAL",0,0,"792:01314177740401010101010101010101041416",,terminal_output +9491,18397932,"TERMINAL",0,0,"94:01333999222222222222338",,terminal_output +9492,18399940,"TERMINAL",0,0,"413555313120:014444444444445510",,terminal_output +9493,18401951,"TERMINAL",0,0,"35777333666666666666772",,terminal_output +9494,18403959,"TERMINAL",0,0,"57999555888888888888994",,terminal_output +9495,18405967,"TERMINAL",0,0,"7911415177750502020202020202020202051516",,terminal_output +9496,18407975,"TERMINAL",0,0,"911333999222222222222338",,terminal_output +9497,18409985,"TERMINAL",0,0,"5135554141114444444444445520",,terminal_output +9498,18411994,"TERMINAL",0,0,"35777333666666666666772",,terminal_output +9499,18414006,"TERMINAL",0,0,"57999555888888888888994",,terminal_output +9500,18416013,"TERMINAL",0,0,"79215140:017779:002:00303030303030303030304:017:016",,terminal_output +9501,18418023,"TERMINAL",0,0,"921333999222222222222338",,terminal_output +9502,18420031,"TERMINAL",0,0,"8:0135555151214444444444445530",,terminal_output +9503,18422039,"TERMINAL",0,0,"35777333666666666666772",,terminal_output +9504,18424050,"TERMINAL",0,0,"57999555888888888888994",,terminal_output +9505,18426066,"TERMINAL",0,0,"79314:011177710104040404040404040404011116",,terminal_output +9506,18428070,"TERMINAL",0,0,"931333999222222222222338",,terminal_output +9507,18430078,"TERMINAL",0,0,"1135554:016:01314444444444445540",,terminal_output +9508,18432086,"TERMINAL",0,0,"35777333666666666666772",,terminal_output +9509,18434095,"TERMINAL",0,0,"57999555888888888888994",,terminal_output +9510,18436106,"TERMINAL",0,0,"7941112177720205050505050505050505021216",,terminal_output +9511,18438111,"TERMINAL",0,0,"941333999222222222222338",,terminal_output +9512,18440123,"TERMINAL",0,0,"2135551111414444444444445550",,terminal_output +9513,18442142,"TERMINAL",0,0,"35777333666666666666772",,terminal_output +9514,18444141,"TERMINAL",0,0,"57999555888888888888994",,terminal_output +9515,18446153,"TERMINAL",0,0,"7951213177730304:005:006:0050:003:003:003:003:003:001:0031316",,terminal_output +9516,18448165,"TERMINAL",0,0,"951333999222222222222338",,terminal_output +9517,18450176,"TERMINAL",0,0,"3135552121514444444444445540:00",,terminal_output +9518,18452181,"TERMINAL",0,0,"35777333666666666666772",,terminal_output +9519,18454193,"TERMINAL",0,0,"57999555888888888888994",,terminal_output +9520,18456204,"TERMINAL",0,0,"793:01314177740401010101010101010101041416",,terminal_output +9521,18458299,"TERMINAL",0,0,"95:01333999222222222222338",,terminal_output +9522,18460266,"TERMINAL",0,0,"41355531311:014444444444445510",,terminal_output +9523,18462233,"TERMINAL",0,0,"35777333666666666666772",,terminal_output +9524,18464239,"TERMINAL",0,0,"57999555888888888888994",,terminal_output +9525,18466254,"TERMINAL",0,0,"7911415177750502020202020202020202051516",,terminal_output +9526,18468282,"TERMINAL",0,0,"911333999222222222222338",,terminal_output +9527,18470298,"TERMINAL",0,0,"5135554141114444444444445520",,terminal_output +9528,18472347,"TERMINAL",0,0,"35777333666666666666772",,terminal_output +9529,18474315,"TERMINAL",0,0,"57999555888888888888994",,terminal_output +9530,18476338,"TERMINAL",0,0,"7921511:0177730:003:00303030303030303030305:018:016",,terminal_output +9531,18478301,"TERMINAL",0,0,"921333999222222222222338",,terminal_output +9532,18480318,"TERMINAL",0,0,"9:0135555151214444444444445530",,terminal_output +9533,18482402,"TERMINAL",0,0,"35777333666666666666772",,terminal_output +9534,18484330,"TERMINAL",0,0,"57999555888888888888994",,terminal_output +9535,18486335,"TERMINAL",0,0,"79315:011177710104040404040404040404011116",,terminal_output +9536,18488346,"TERMINAL",0,0,"931333999222222222222338",,terminal_output +9537,18490372,"TERMINAL",0,0,"1135555:017:01314444444444445540",,terminal_output +9538,18492365,"TERMINAL",0,0,"35777333666666666666772",,terminal_output +9539,18494377,"TERMINAL",0,0,"57999555888888888888994",,terminal_output +9540,18496392,"TERMINAL",0,0,"7941112177720205050505050505050505021216",,terminal_output +9541,18498394,"TERMINAL",0,0,"941333999222222222222338",,terminal_output +9542,18500508,"TERMINAL",0,0,"2135551111414444444444445550",,terminal_output +9543,18502412,"TERMINAL",0,0,"35777333666666666666772",,terminal_output +9544,18504506,"TERMINAL",0,0,"57999555888888888888994",,terminal_output +9545,18506430,"TERMINAL",0,0,"7951213177730305:006:007:001:004:004:004:004:004:002:0031316",,terminal_output +9546,18508442,"TERMINAL",0,0,"951333999222222222222338",,terminal_output +9547,18510462,"TERMINAL",0,0,"313555212151444444444444551:00",,terminal_output +9548,18512458,"TERMINAL",0,0,"35777333666666666666772",,terminal_output +9549,18514470,"TERMINAL",0,0,"57999555888888888888994",,terminal_output +9550,18516491,"TERMINAL",0,0,"794:01314177740401010101010101010101041416",,terminal_output +9551,18518490,"TERMINAL",0,0,"96:01333999222222222222338",,terminal_output +9552,18520507,"TERMINAL",0,0,"41355531312:014444444444445510",,terminal_output +9553,18522508,"TERMINAL",0,0,"35777333666666666666772",,terminal_output +9554,18524528,"TERMINAL",0,0,"57999555888888888888994",,terminal_output +9555,18526548,"TERMINAL",0,0,"7911415177750502020202020202020202051516",,terminal_output +9556,18528535,"TERMINAL",0,0,"911333999222222222222338",,terminal_output +9557,18530541,"TERMINAL",0,0,"5135554141114444444444445520",,terminal_output +9558,18532561,"TERMINAL",0,0,"35777333666666666666772",,terminal_output +9559,18534561,"TERMINAL",0,0,"57999555888888888888994",,terminal_output +9560,18536572,"TERMINAL",0,0,"7921512:017771:004:00303030303030303030306:019:016",,terminal_output +9561,18538579,"TERMINAL",0,0,"921333999222222222222338",,terminal_output +9562,18540598,"TERMINAL",0,0,"30:0146665252225555555555556631",,terminal_output +9563,18542593,"TERMINAL",0,0,"46888444777777777777883",,terminal_output +9564,18544608,"TERMINAL",0,0,"68306:001066699999999999910105",,terminal_output +9565,18546621,"TERMINAL",0,0,"830222888111141414141414141414141227",,terminal_output +9566,18548626,"TERMINAL",0,0,"1024446:008:0030333333333333449",,terminal_output +9567,18550636,"TERMINAL",0,0,"\r2466603258:0253907:40 2:22:32278:54:0818:58:57 4:31:15635:1554:15544:27 4:45:451436:45327:4523851:451754:4585495520561529972 alfred.ngu6:35:347:57:27 5:32:45130031:53:391:53:56 11:36:1610:3009:16529993 nishant.k2 3843T18:29:1801:48:31 21:41:41[002,007]",,terminal_output +9568,18552735,"TERMINAL",0,0,"4688844777777777777883",,terminal_output +9569,18554679,"TERMINAL",0,0,"684010206699999999999920205",,terminal_output +9570,18556662,"TERMINAL",0,0,"84022288212151515151515151515151227",,terminal_output +9571,18558674,"TERMINAL",0,0,"2024441040333333333333449",,terminal_output +9572,18560719,"TERMINAL",0,0,"24666225555555555556651",,terminal_output +9573,18562768,"TERMINAL",0,0,"4688844777777777777883",,terminal_output +9574,18564710,"TERMINAL",0,0,"685020306699999999999930305",,terminal_output +9575,18566712,"TERMINAL",0,0,"8502228831316:017:018:012:015:015:015:015:015:013:01227",,terminal_output +9576,18568721,"TERMINAL",0,0,"3024442050333333333333449",,terminal_output +9577,18570756,"TERMINAL",0,0,"2466622555555555555662:01",,terminal_output +9578,18572734,"TERMINAL",0,0,"4688844777777777777883",,terminal_output +9579,18574857,"TERMINAL",0,0,"685:0030406699999999999940405",,terminal_output +9580,18576753,"TERMINAL",0,0,"87:0022288414111111111111111111111227",,terminal_output +9581,18578764,"TERMINAL",0,0,"402444303:00333333333333449",,terminal_output +9582,18580784,"TERMINAL",0,0,"24666225555555555556611",,terminal_output +9583,18582837,"TERMINAL",0,0,"4688844777777777777883",,terminal_output +9584,18584790,"TERMINAL",0,0,"681040506699999999999950505",,terminal_output +9585,18586798,"TERMINAL",0,0,"81022288515121212121212121212121227",,terminal_output +9586,18588817,"TERMINAL",0,0,"5024444010333333333333449",,terminal_output +9587,18590827,"TERMINAL",0,0,"24666225555555555556621",,terminal_output +9588,18592873,"TERMINAL",0,0,"4688844777777777777883",,terminal_output +9589,18594834,"TERMINAL",0,0,"6820503:00669999999999997:0040:005",,terminal_output +9590,18596842,"TERMINAL",0,0,"820222882:015:0131313131313131313131227",,terminal_output +9591,18598860,"TERMINAL",0,0,"1:0024445020333333333333449",,terminal_output +9592,18600862,"TERMINAL",0,0,"24666225555555555556631",,terminal_output +9593,18602868,"TERMINAL",0,0,"4688844777777777777883",,terminal_output +9594,18604879,"TERMINAL",0,0,"68307:00106699999999999910105",,terminal_output +9595,18606891,"TERMINAL",0,0,"83022288111141414141414141414141227",,terminal_output +9596,18608894,"TERMINAL",0,0,"1024449:0030333333333333449",,terminal_output +9597,18610912,"TERMINAL",0,0,"24666225555555555556641",,terminal_output +9598,18612948,"TERMINAL",0,0,"4688844777777777777883",,terminal_output +9599,18614921,"TERMINAL",0,0,"684010206699999999999920205",,terminal_output +9600,18616933,"TERMINAL",0,0,"84022288212151515151515151515151227",,terminal_output +9601,18618939,"TERMINAL",0,0,"2024441040333333333333449",,terminal_output +9602,18620950,"TERMINAL",0,0,"24666225555555555556651",,terminal_output +9603,18622982,"TERMINAL",0,0,"4688844777777777777883",,terminal_output +9604,18624968,"TERMINAL",0,0,"685020306699999999999930305",,terminal_output +9605,18626977,"TERMINAL",0,0,"8502228831317:018:019:013:016:016:016:016:016:014:01227",,terminal_output +9606,18628984,"TERMINAL",0,0,"3024442050333333333333449",,terminal_output +9607,18630998,"TERMINAL",0,0,"2466622555555555555663:01",,terminal_output +9608,18633031,"TERMINAL",0,0,"4688844777777777777883",,terminal_output +9609,18635019,"TERMINAL",0,0,"686:0030406699999999999940405",,terminal_output +9610,18637104,"TERMINAL",0,0,"88:0022288414111111111111111111111227",,terminal_output +9611,18639038,"TERMINAL",0,0,"402444304:00333333333333449",,terminal_output +9612,18641045,"TERMINAL",0,0,"24666225555555555556611",,terminal_output +9613,18643055,"TERMINAL",0,0,"4688844777777777777883",,terminal_output +9614,18645066,"TERMINAL",0,0,"681040506699999999999950505",,terminal_output +9615,18647073,"TERMINAL",0,0,"81022288515121212121212121212121227",,terminal_output +9616,18649086,"TERMINAL",0,0,"5024444010333333333333449",,terminal_output +9617,18651094,"TERMINAL",0,0,"24666225555555555556621",,terminal_output +9618,18653104,"TERMINAL",0,0,"4688844777777777777883",,terminal_output +9619,18655112,"TERMINAL",0,0,"6820504:00669999999999998:001:005",,terminal_output +9620,18657125,"TERMINAL",0,0,"820222883:016:0131313131313131313131227",,terminal_output +9621,18659132,"TERMINAL",0,0,"2:0024445020333333333333449",,terminal_output +9622,18661140,"TERMINAL",0,0,"24666225555555555556631",,terminal_output +9623,18663149,"TERMINAL",0,0,"4688844777777777777883",,terminal_output +9624,18665157,"TERMINAL",0,0,"68308:00106699999999999910105",,terminal_output +9625,18667167,"TERMINAL",0,0,"83022288111141414141414141414141227",,terminal_output +9626,18669174,"TERMINAL",0,0,"1024442:00:0030333333333333449",,terminal_output +9627,18671185,"TERMINAL",0,0,"24666225555555555556641",,terminal_output +9628,18673194,"TERMINAL",0,0,"4688844777777777777883",,terminal_output +9629,18675203,"TERMINAL",0,0,"684010206699999999999920205",,terminal_output +9630,18677213,"TERMINAL",0,0,"84022288212151515151515151515151227",,terminal_output +9631,18679222,"TERMINAL",0,0,"2024441040333333333333449",,terminal_output +9632,18681254,"TERMINAL",0,0,"24666225555555555556651",,terminal_output +9633,18683292,"TERMINAL",0,0,"4688844777777777777883",,terminal_output +9634,18685254,"TERMINAL",0,0,"685020306699999999999930305",,terminal_output +9635,18687286,"TERMINAL",0,0,"8502228831318:019:0150:014:017:017:017:017:017:015:01227",,terminal_output +9636,18689270,"TERMINAL",0,0,"3024442050333333333333449",,terminal_output +9637,18691287,"TERMINAL",0,0,"2466622555555555555664:01",,terminal_output +9638,18693291,"TERMINAL",0,0,"4688844777777777777883",,terminal_output +9639,18695301,"TERMINAL",0,0,"687:0030406699999999999940405",,terminal_output +9640,18697308,"TERMINAL",0,0,"89:0022288414111111111111111111111227",,terminal_output +9641,18699314,"TERMINAL",0,0,"402444305:00333333333333449",,terminal_output +9642,18701325,"TERMINAL",0,0,"24666225555555555556611",,terminal_output +9643,18703365,"TERMINAL",0,0,"4688844777777777777883",,terminal_output +9644,18705379,"TERMINAL",0,0,"681040506699999999999950505",,terminal_output +9645,18707460,"TERMINAL",0,0,"81022288515121212121212121212121227",,terminal_output +9646,18709363,"TERMINAL",0,0,"5024444010333333333333449",,terminal_output +9647,18711456,"TERMINAL",0,0,"24666225555555555556621",,terminal_output +9648,18713398,"TERMINAL",0,0,"4688844777777777777883",,terminal_output +9649,18715445,"TERMINAL",0,0,"6820505:00669999999999999:002:005",,terminal_output +9650,18717402,"TERMINAL",0,0,"820222884:017:0131313131313131313131227",,terminal_output +9651,18719410,"TERMINAL",0,0,"3:0024445020333333333333449",,terminal_output +9652,18721416,"TERMINAL",0,0,"24666225555555555556631",,terminal_output +9653,18723435,"TERMINAL",0,0,"4688844777777777777883",,terminal_output +9654,18725452,"TERMINAL",0,0,"68309:00106699999999999910105",,terminal_output +9655,18727534,"TERMINAL",0,0,"83022288111141414141414141414141227",,terminal_output +9656,18729485,"TERMINAL",0,0,"1024441:0030333333333333449",,terminal_output +9657,18731468,"TERMINAL",0,0,"24666225555555555556641",,terminal_output +9658,18733555,"TERMINAL",0,0,"4688844777777777777883",,terminal_output +9659,18735485,"TERMINAL",0,0,"684010206699999999999920205",,terminal_output +9660,18737494,"TERMINAL",0,0,"84022288212151515151515151515151227",,terminal_output +9661,18739509,"TERMINAL",0,0,"2024441040333333333333449",,terminal_output +9662,18741559,"TERMINAL",0,0,"24666225555555555556651",,terminal_output +9663,18743607,"TERMINAL",0,0,"4688844777777777777883",,terminal_output +9664,18745533,"TERMINAL",0,0,"685020306699999999999930305",,terminal_output +9665,18747544,"TERMINAL",0,0,"8502228831319:0150:011:015:018:018:018:018:018:016:01227",,terminal_output +9666,18749564,"TERMINAL",0,0,"3024442050333333333333449",,terminal_output diff --git a/1f15334ab7e6820c9fda17c961659882ef9853cc80f7356b9a9b22f286fd7389/crowd-code-b994f4be-9bbc-4edd-a199-1b569abe5b6d1764494257169-2025_11_30-10.17.44.938/source.csv b/1f15334ab7e6820c9fda17c961659882ef9853cc80f7356b9a9b22f286fd7389/crowd-code-b994f4be-9bbc-4edd-a199-1b569abe5b6d1764494257169-2025_11_30-10.17.44.938/source.csv new file mode 100644 index 0000000000000000000000000000000000000000..9c15102dcd266b1b1dedc7087c09e77ddc7e9ca3 --- /dev/null +++ b/1f15334ab7e6820c9fda17c961659882ef9853cc80f7356b9a9b22f286fd7389/crowd-code-b994f4be-9bbc-4edd-a199-1b569abe5b6d1764494257169-2025_11_30-10.17.44.938/source.csv @@ -0,0 +1,2429 @@ +Sequence,Time,File,RangeOffset,RangeLength,Text,Language,Type +1,2,"package.json",0,0,"{\n ""name"": ""crowd-pilot"",\n ""displayName"": ""crowd-pilot-extension"",\n ""description"": ""Teaching language models to code like humans."",\n ""publisher"": ""p(doom)"",\n ""version"": ""0.0.1"",\n ""engines"": {\n ""vscode"": ""^1.99.3""\n },\n ""categories"": [\n ""Other""\n ],\n ""activationEvents"": [\n ""onStartupFinished""\n ],\n ""main"": ""./out/extension.js"",\n ""contributes"": {\n ""commands"": [\n {\n ""command"": ""crowd-pilot.testRun"",\n ""title"": ""Test Run""\n },\n {\n ""command"": ""crowd-pilot.hideUi"",\n ""title"": ""crowd-pilot: Hide Preview""\n },\n {\n ""command"": ""crowd-pilot.sglangTest"",\n ""title"": ""crowd-pilot: Test SGLang""\n },\n {\n ""command"": ""crowd-pilot.modelRun"",\n ""title"": ""crowd-pilot: Model Plan & Run""\n }\n ],\n ""keybindings"": [\n {\n ""command"": ""crowd-pilot.modelRun"",\n ""key"": ""tab"",\n ""mac"": ""tab"",\n ""when"": ""editorTextFocus || terminalFocus""\n },\n {\n ""command"": ""crowd-pilot.modelRun"",\n ""key"": ""tab"",\n ""mac"": ""tab"",\n ""when"": ""inQuickOpen && crowdPilot.uiVisible""\n },\n {\n ""command"": ""crowd-pilot.hideUi"",\n ""key"": ""escape"",\n ""mac"": ""escape"",\n ""when"": ""crowdPilot.uiVisible""\n }\n ]\n },\n ""scripts"": {\n ""vscode:prepublish"": ""npm run compile"",\n ""compile"": ""tsc -p ./"",\n ""watch"": ""tsc -watch -p ./"",\n ""pretest"": ""npm run compile && npm run lint"",\n ""lint"": ""eslint src"",\n ""test"": ""vscode-test""\n },\n ""devDependencies"": {\n ""@types/vscode"": ""^1.105.0"",\n ""@types/mocha"": ""^10.0.10"",\n ""@types/node"": ""22.x"",\n ""@typescript-eslint/eslint-plugin"": ""^8.45.0"",\n ""@typescript-eslint/parser"": ""^8.45.0"",\n ""eslint"": ""^9.36.0"",\n ""typescript"": ""^5.9.3"",\n ""@vscode/test-cli"": ""^0.0.11"",\n ""@vscode/test-electron"": ""^2.5.2""\n }\n}\n",json,tab +2,713,"extension-output-pdoom-org.crowd-code-#1-crowd-code",0,0,"10:17:44 AM [info] Activating crowd-code\n10:17:44 AM [info] Recording started\n10:17:44 AM [info] Initializing git provider using file system watchers...\n10:17:45 AM [info] Git repository found\n10:17:45 AM [info] Git provider initialized successfully\n10:17:45 AM [info] Initial git state: [object Object]\n",Log,tab +3,1868,"package.json",0,0,"",json,tab +4,2063,"package.json",1853,0,"",json,selection_command +5,2140,"package.json",1849,0,"",json,selection_command +6,2496,"package.json",0,0,"",json,selection_command +7,4384,"package-lock.json",0,0,"{\n ""name"": ""crowd-pilot"",\n ""version"": ""0.0.1"",\n ""lockfileVersion"": 3,\n ""requires"": true,\n ""packages"": {\n """": {\n ""name"": ""crowd-pilot"",\n ""version"": ""0.0.1"",\n ""devDependencies"": {\n ""@types/mocha"": ""^10.0.10"",\n ""@types/node"": ""22.x"",\n ""@types/vscode"": ""^1.105.0"",\n ""@typescript-eslint/eslint-plugin"": ""^8.45.0"",\n ""@typescript-eslint/parser"": ""^8.45.0"",\n ""@vscode/test-cli"": ""^0.0.11"",\n ""@vscode/test-electron"": ""^2.5.2"",\n ""eslint"": ""^9.36.0"",\n ""typescript"": ""^5.9.3""\n },\n ""engines"": {\n ""vscode"": ""^1.99.3""\n }\n },\n ""node_modules/@bcoe/v8-coverage"": {\n ""version"": ""0.2.3"",\n ""resolved"": ""https://registry.npmjs.org/@bcoe/v8-coverage/-/v8-coverage-0.2.3.tgz"",\n ""integrity"": ""sha512-0hYQ8SB4Db5zvZB4axdMHGwEaQjkZzFjQiN9LVYvIFB2nSUHW9tYpxWriPrWDASIxiaXax83REcLxuSdnGPZtw=="",\n ""dev"": true,\n ""license"": ""MIT""\n },\n ""node_modules/@eslint-community/eslint-utils"": {\n ""version"": ""4.9.0"",\n ""resolved"": ""https://registry.npmjs.org/@eslint-community/eslint-utils/-/eslint-utils-4.9.0.tgz"",\n ""integrity"": ""sha512-ayVFHdtZ+hsq1t2Dy24wCmGXGe4q9Gu3smhLYALJrr473ZH27MsnSL+LKUlimp4BWJqMDMLmPpx/Q9R3OAlL4g=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""dependencies"": {\n ""eslint-visitor-keys"": ""^3.4.3""\n },\n ""engines"": {\n ""node"": ""^12.22.0 || ^14.17.0 || >=16.0.0""\n },\n ""funding"": {\n ""url"": ""https://opencollective.com/eslint""\n },\n ""peerDependencies"": {\n ""eslint"": ""^6.0.0 || ^7.0.0 || >=8.0.0""\n }\n },\n ""node_modules/@eslint-community/regexpp"": {\n ""version"": ""4.12.2"",\n ""resolved"": ""https://registry.npmjs.org/@eslint-community/regexpp/-/regexpp-4.12.2.tgz"",\n ""integrity"": ""sha512-EriSTlt5OC9/7SXkRSCAhfSxxoSUgBm33OH+IkwbdpgoqsSsUg7y3uh+IICI/Qg4BBWr3U2i39RpmycbxMq4ew=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""engines"": {\n ""node"": ""^12.0.0 || ^14.0.0 || >=16.0.0""\n }\n },\n ""node_modules/@eslint/config-array"": {\n ""version"": ""0.21.1"",\n ""resolved"": ""https://registry.npmjs.org/@eslint/config-array/-/config-array-0.21.1.tgz"",\n ""integrity"": ""sha512-aw1gNayWpdI/jSYVgzN5pL0cfzU02GT3NBpeT/DXbx1/1x7ZKxFPd9bwrzygx/qiwIQiJ1sw/zD8qY/kRvlGHA=="",\n ""dev"": true,\n ""license"": ""Apache-2.0"",\n ""dependencies"": {\n ""@eslint/object-schema"": ""^2.1.7"",\n ""debug"": ""^4.3.1"",\n ""minimatch"": ""^3.1.2""\n },\n ""engines"": {\n ""node"": ""^18.18.0 || ^20.9.0 || >=21.1.0""\n }\n },\n ""node_modules/@eslint/config-array/node_modules/brace-expansion"": {\n ""version"": ""1.1.12"",\n ""resolved"": ""https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz"",\n ""integrity"": ""sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""dependencies"": {\n ""balanced-match"": ""^1.0.0"",\n ""concat-map"": ""0.0.1""\n }\n },\n ""node_modules/@eslint/config-array/node_modules/minimatch"": {\n ""version"": ""3.1.2"",\n ""resolved"": ""https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz"",\n ""integrity"": ""sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw=="",\n ""dev"": true,\n ""license"": ""ISC"",\n ""dependencies"": {\n ""brace-expansion"": ""^1.1.7""\n },\n ""engines"": {\n ""node"": ""*""\n }\n },\n ""node_modules/@eslint/config-helpers"": {\n ""version"": ""0.4.2"",\n ""resolved"": ""https://registry.npmjs.org/@eslint/config-helpers/-/config-helpers-0.4.2.tgz"",\n ""integrity"": ""sha512-gBrxN88gOIf3R7ja5K9slwNayVcZgK6SOUORm2uBzTeIEfeVaIhOpCtTox3P6R7o2jLFwLFTLnC7kU/RGcYEgw=="",\n ""dev"": true,\n ""license"": ""Apache-2.0"",\n ""dependencies"": {\n ""@eslint/core"": ""^0.17.0""\n },\n ""engines"": {\n ""node"": ""^18.18.0 || ^20.9.0 || >=21.1.0""\n }\n },\n ""node_modules/@eslint/core"": {\n ""version"": ""0.17.0"",\n ""resolved"": ""https://registry.npmjs.org/@eslint/core/-/core-0.17.0.tgz"",\n ""integrity"": ""sha512-yL/sLrpmtDaFEiUj1osRP4TI2MDz1AddJL+jZ7KSqvBuliN4xqYY54IfdN8qD8Toa6g1iloph1fxQNkjOxrrpQ=="",\n ""dev"": true,\n ""license"": ""Apache-2.0"",\n ""dependencies"": {\n ""@types/json-schema"": ""^7.0.15""\n },\n ""engines"": {\n ""node"": ""^18.18.0 || ^20.9.0 || >=21.1.0""\n }\n },\n ""node_modules/@eslint/eslintrc"": {\n ""version"": ""3.3.1"",\n ""resolved"": ""https://registry.npmjs.org/@eslint/eslintrc/-/eslintrc-3.3.1.tgz"",\n ""integrity"": ""sha512-gtF186CXhIl1p4pJNGZw8Yc6RlshoePRvE0X91oPGb3vZ8pM3qOS9W9NGPat9LziaBV7XrJWGylNQXkGcnM3IQ=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""dependencies"": {\n ""ajv"": ""^6.12.4"",\n ""debug"": ""^4.3.2"",\n ""espree"": ""^10.0.1"",\n ""globals"": ""^14.0.0"",\n ""ignore"": ""^5.2.0"",\n ""import-fresh"": ""^3.2.1"",\n ""js-yaml"": ""^4.1.0"",\n ""minimatch"": ""^3.1.2"",\n ""strip-json-comments"": ""^3.1.1""\n },\n ""engines"": {\n ""node"": ""^18.18.0 || ^20.9.0 || >=21.1.0""\n },\n ""funding"": {\n ""url"": ""https://opencollective.com/eslint""\n }\n },\n ""node_modules/@eslint/eslintrc/node_modules/brace-expansion"": {\n ""version"": ""1.1.12"",\n ""resolved"": ""https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz"",\n ""integrity"": ""sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""dependencies"": {\n ""balanced-match"": ""^1.0.0"",\n ""concat-map"": ""0.0.1""\n }\n },\n ""node_modules/@eslint/eslintrc/node_modules/ignore"": {\n ""version"": ""5.3.2"",\n ""resolved"": ""https://registry.npmjs.org/ignore/-/ignore-5.3.2.tgz"",\n ""integrity"": ""sha512-hsBTNUqQTDwkWtcdYI2i06Y/nUBEsNEDJKjWdigLvegy8kDuJAS8uRlpkkcQpyEXL0Z/pjDy5HBmMjRCJ2gq+g=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""engines"": {\n ""node"": "">= 4""\n }\n },\n ""node_modules/@eslint/eslintrc/node_modules/minimatch"": {\n ""version"": ""3.1.2"",\n ""resolved"": ""https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz"",\n ""integrity"": ""sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw=="",\n ""dev"": true,\n ""license"": ""ISC"",\n ""dependencies"": {\n ""brace-expansion"": ""^1.1.7""\n },\n ""engines"": {\n ""node"": ""*""\n }\n },\n ""node_modules/@eslint/js"": {\n ""version"": ""9.39.1"",\n ""resolved"": ""https://registry.npmjs.org/@eslint/js/-/js-9.39.1.tgz"",\n ""integrity"": ""sha512-S26Stp4zCy88tH94QbBv3XCuzRQiZ9yXofEILmglYTh/Ug/a9/umqvgFtYBAo3Lp0nsI/5/qH1CCrbdK3AP1Tw=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""engines"": {\n ""node"": ""^18.18.0 || ^20.9.0 || >=21.1.0""\n },\n ""funding"": {\n ""url"": ""https://eslint.org/donate""\n }\n },\n ""node_modules/@eslint/object-schema"": {\n ""version"": ""2.1.7"",\n ""resolved"": ""https://registry.npmjs.org/@eslint/object-schema/-/object-schema-2.1.7.tgz"",\n ""integrity"": ""sha512-VtAOaymWVfZcmZbp6E2mympDIHvyjXs/12LqWYjVw6qjrfF+VK+fyG33kChz3nnK+SU5/NeHOqrTEHS8sXO3OA=="",\n ""dev"": true,\n ""license"": ""Apache-2.0"",\n ""engines"": {\n ""node"": ""^18.18.0 || ^20.9.0 || >=21.1.0""\n }\n },\n ""node_modules/@eslint/plugin-kit"": {\n ""version"": ""0.4.1"",\n ""resolved"": ""https://registry.npmjs.org/@eslint/plugin-kit/-/plugin-kit-0.4.1.tgz"",\n ""integrity"": ""sha512-43/qtrDUokr7LJqoF2c3+RInu/t4zfrpYdoSDfYyhg52rwLV6TnOvdG4fXm7IkSB3wErkcmJS9iEhjVtOSEjjA=="",\n ""dev"": true,\n ""license"": ""Apache-2.0"",\n ""dependencies"": {\n ""@eslint/core"": ""^0.17.0"",\n ""levn"": ""^0.4.1""\n },\n ""engines"": {\n ""node"": ""^18.18.0 || ^20.9.0 || >=21.1.0""\n }\n },\n ""node_modules/@humanfs/core"": {\n ""version"": ""0.19.1"",\n ""resolved"": ""https://registry.npmjs.org/@humanfs/core/-/core-0.19.1.tgz"",\n ""integrity"": ""sha512-5DyQ4+1JEUzejeK1JGICcideyfUbGixgS9jNgex5nqkW+cY7WZhxBigmieN5Qnw9ZosSNVC9KQKyb+GUaGyKUA=="",\n ""dev"": true,\n ""license"": ""Apache-2.0"",\n ""engines"": {\n ""node"": "">=18.18.0""\n }\n },\n ""node_modules/@humanfs/node"": {\n ""version"": ""0.16.7"",\n ""resolved"": ""https://registry.npmjs.org/@humanfs/node/-/node-0.16.7.tgz"",\n ""integrity"": ""sha512-/zUx+yOsIrG4Y43Eh2peDeKCxlRt/gET6aHfaKpuq267qXdYDFViVHfMaLyygZOnl0kGWxFIgsBy8QFuTLUXEQ=="",\n ""dev"": true,\n ""license"": ""Apache-2.0"",\n ""dependencies"": {\n ""@humanfs/core"": ""^0.19.1"",\n ""@humanwhocodes/retry"": ""^0.4.0""\n },\n ""engines"": {\n ""node"": "">=18.18.0""\n }\n },\n ""node_modules/@humanwhocodes/module-importer"": {\n ""version"": ""1.0.1"",\n ""resolved"": ""https://registry.npmjs.org/@humanwhocodes/module-importer/-/module-importer-1.0.1.tgz"",\n ""integrity"": ""sha512-bxveV4V8v5Yb4ncFTT3rPSgZBOpCkjfK0y4oVVVJwIuDVBRMDXrPyXRL988i5ap9m9bnyEEjWfm5WkBmtffLfA=="",\n ""dev"": true,\n ""license"": ""Apache-2.0"",\n ""engines"": {\n ""node"": "">=12.22""\n },\n ""funding"": {\n ""type"": ""github"",\n ""url"": ""https://github.com/sponsors/nzakas""\n }\n },\n ""node_modules/@humanwhocodes/retry"": {\n ""version"": ""0.4.3"",\n ""resolved"": ""https://registry.npmjs.org/@humanwhocodes/retry/-/retry-0.4.3.tgz"",\n ""integrity"": ""sha512-bV0Tgo9K4hfPCek+aMAn81RppFKv2ySDQeMoSZuvTASywNTnVJCArCZE2FWqpvIatKu7VMRLWlR1EazvVhDyhQ=="",\n ""dev"": true,\n ""license"": ""Apache-2.0"",\n ""engines"": {\n ""node"": "">=18.18""\n },\n ""funding"": {\n ""type"": ""github"",\n ""url"": ""https://github.com/sponsors/nzakas""\n }\n },\n ""node_modules/@isaacs/cliui"": {\n ""version"": ""8.0.2"",\n ""resolved"": ""https://registry.npmjs.org/@isaacs/cliui/-/cliui-8.0.2.tgz"",\n ""integrity"": ""sha512-O8jcjabXaleOG9DQ0+ARXWZBTfnP4WNAqzuiJK7ll44AmxGKv/J2M4TPjxjY3znBCfvBXFzucm1twdyFybFqEA=="",\n ""dev"": true,\n ""license"": ""ISC"",\n ""dependencies"": {\n ""string-width"": ""^5.1.2"",\n ""string-width-cjs"": ""npm:string-width@^4.2.0"",\n ""strip-ansi"": ""^7.0.1"",\n ""strip-ansi-cjs"": ""npm:strip-ansi@^6.0.1"",\n ""wrap-ansi"": ""^8.1.0"",\n ""wrap-ansi-cjs"": ""npm:wrap-ansi@^7.0.0""\n },\n ""engines"": {\n ""node"": "">=12""\n }\n },\n ""node_modules/@istanbuljs/schema"": {\n ""version"": ""0.1.3"",\n ""resolved"": ""https://registry.npmjs.org/@istanbuljs/schema/-/schema-0.1.3.tgz"",\n ""integrity"": ""sha512-ZXRY4jNvVgSVQ8DL3LTcakaAtXwTVUxE81hslsyD2AtoXW/wVob10HkOJ1X/pAlcI7D+2YoZKg5do8G/w6RYgA=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""engines"": {\n ""node"": "">=8""\n }\n },\n ""node_modules/@jridgewell/resolve-uri"": {\n ""version"": ""3.1.2"",\n ""resolved"": ""https://registry.npmjs.org/@jridgewell/resolve-uri/-/resolve-uri-3.1.2.tgz"",\n ""integrity"": ""sha512-bRISgCIjP20/tbWSPWMEi54QVPRZExkuD9lJL+UIxUKtwVJA8wW1Trb1jMs1RFXo1CBTNZ/5hpC9QvmKWdopKw=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""engines"": {\n ""node"": "">=6.0.0""\n }\n },\n ""node_modules/@jridgewell/sourcemap-codec"": {\n ""version"": ""1.5.5"",\n ""resolved"": ""https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.5.5.tgz"",\n ""integrity"": ""sha512-cYQ9310grqxueWbl+WuIUIaiUaDcj7WOq5fVhEljNVgRfOUhY9fy2zTvfoqWsnebh8Sl70VScFbICvJnLKB0Og=="",\n ""dev"": true,\n ""license"": ""MIT""\n },\n ""node_modules/@jridgewell/trace-mapping"": {\n ""version"": ""0.3.31"",\n ""resolved"": ""https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.31.tgz"",\n ""integrity"": ""sha512-zzNR+SdQSDJzc8joaeP8QQoCQr8NuYx2dIIytl1QeBEZHJ9uW6hebsrYgbz8hJwUQao3TWCMtmfV8Nu1twOLAw=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""dependencies"": {\n ""@jridgewell/resolve-uri"": ""^3.1.0"",\n ""@jridgewell/sourcemap-codec"": ""^1.4.14""\n }\n },\n ""node_modules/@nodelib/fs.scandir"": {\n ""version"": ""2.1.5"",\n ""resolved"": ""https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz"",\n ""integrity"": ""sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""dependencies"": {\n ""@nodelib/fs.stat"": ""2.0.5"",\n ""run-parallel"": ""^1.1.9""\n },\n ""engines"": {\n ""node"": "">= 8""\n }\n },\n ""node_modules/@nodelib/fs.stat"": {\n ""version"": ""2.0.5"",\n ""resolved"": ""https://registry.npmjs.org/@nodelib/fs.stat/-/fs.stat-2.0.5.tgz"",\n ""integrity"": ""sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""engines"": {\n ""node"": "">= 8""\n }\n },\n ""node_modules/@nodelib/fs.walk"": {\n ""version"": ""1.2.8"",\n ""resolved"": ""https://registry.npmjs.org/@nodelib/fs.walk/-/fs.walk-1.2.8.tgz"",\n ""integrity"": ""sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""dependencies"": {\n ""@nodelib/fs.scandir"": ""2.1.5"",\n ""fastq"": ""^1.6.0""\n },\n ""engines"": {\n ""node"": "">= 8""\n }\n },\n ""node_modules/@pkgjs/parseargs"": {\n ""version"": ""0.11.0"",\n ""resolved"": ""https://registry.npmjs.org/@pkgjs/parseargs/-/parseargs-0.11.0.tgz"",\n ""integrity"": ""sha512-+1VkjdD0QBLPodGrJUeqarH8VAIvQODIbwh9XpP5Syisf7YoQgsJKPNFoqqLQlu+VQ/tVSshMR6loPMn8U+dPg=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""optional"": true,\n ""engines"": {\n ""node"": "">=14""\n }\n },\n ""node_modules/@types/estree"": {\n ""version"": ""1.0.8"",\n ""resolved"": ""https://registry.npmjs.org/@types/estree/-/estree-1.0.8.tgz"",\n ""integrity"": ""sha512-dWHzHa2WqEXI/O1E9OjrocMTKJl2mSrEolh1Iomrv6U+JuNwaHXsXx9bLu5gG7BUWFIN0skIQJQ/L1rIex4X6w=="",\n ""dev"": true,\n ""license"": ""MIT""\n },\n ""node_modules/@types/istanbul-lib-coverage"": {\n ""version"": ""2.0.6"",\n ""resolved"": ""https://registry.npmjs.org/@types/istanbul-lib-coverage/-/istanbul-lib-coverage-2.0.6.tgz"",\n ""integrity"": ""sha512-2QF/t/auWm0lsy8XtKVPG19v3sSOQlJe/YHZgfjb/KBBHOGSV+J2q/S671rcq9uTBrLAXmZpqJiaQbMT+zNU1w=="",\n ""dev"": true,\n ""license"": ""MIT""\n },\n ""node_modules/@types/json-schema"": {\n ""version"": ""7.0.15"",\n ""resolved"": ""https://registry.npmjs.org/@types/json-schema/-/json-schema-7.0.15.tgz"",\n ""integrity"": ""sha512-5+fP8P8MFNC+AyZCDxrB2pkZFPGzqQWUzpSeuuVLvm8VMcorNYavBqoFcxK8bQz4Qsbn4oUEEem4wDLfcysGHA=="",\n ""dev"": true,\n ""license"": ""MIT""\n },\n ""node_modules/@types/mocha"": {\n ""version"": ""10.0.10"",\n ""resolved"": ""https://registry.npmjs.org/@types/mocha/-/mocha-10.0.10.tgz"",\n ""integrity"": ""sha512-xPyYSz1cMPnJQhl0CLMH68j3gprKZaTjG3s5Vi+fDgx+uhG9NOXwbVt52eFS8ECyXhyKcjDLCBEqBExKuiZb7Q=="",\n ""dev"": true,\n ""license"": ""MIT""\n },\n ""node_modules/@types/node"": {\n ""version"": ""22.19.0"",\n ""resolved"": ""https://registry.npmjs.org/@types/node/-/node-22.19.0.tgz"",\n ""integrity"": ""sha512-xpr/lmLPQEj+TUnHmR+Ab91/glhJvsqcjB+yY0Ix9GO70H6Lb4FHH5GeqdOE5btAx7eIMwuHkp4H2MSkLcqWbA=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""dependencies"": {\n ""undici-types"": ""~6.21.0""\n }\n },\n ""node_modules/@types/vscode"": {\n ""version"": ""1.105.0"",\n ""resolved"": ""https://registry.npmjs.org/@types/vscode/-/vscode-1.105.0.tgz"",\n ""integrity"": ""sha512-Lotk3CTFlGZN8ray4VxJE7axIyLZZETQJVWi/lYoUVQuqfRxlQhVOfoejsD2V3dVXPSbS15ov5ZyowMAzgUqcw=="",\n ""dev"": true,\n ""license"": ""MIT""\n },\n ""node_modules/@typescript-eslint/eslint-plugin"": {\n ""version"": ""8.46.3"",\n ""resolved"": ""https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-8.46.3.tgz"",\n ""integrity"": ""sha512-sbaQ27XBUopBkRiuY/P9sWGOWUW4rl8fDoHIUmLpZd8uldsTyB4/Zg6bWTegPoTLnKj9Hqgn3QD6cjPNB32Odw=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""dependencies"": {\n ""@eslint-community/regexpp"": ""^4.10.0"",\n ""@typescript-eslint/scope-manager"": ""8.46.3"",\n ""@typescript-eslint/type-utils"": ""8.46.3"",\n ""@typescript-eslint/utils"": ""8.46.3"",\n ""@typescript-eslint/visitor-keys"": ""8.46.3"",\n ""graphemer"": ""^1.4.0"",\n ""ignore"": ""^7.0.0"",\n ""natural-compare"": ""^1.4.0"",\n ""ts-api-utils"": ""^2.1.0""\n },\n ""engines"": {\n ""node"": ""^18.18.0 || ^20.9.0 || >=21.1.0""\n },\n ""funding"": {\n ""type"": ""opencollective"",\n ""url"": ""https://opencollective.com/typescript-eslint""\n },\n ""peerDependencies"": {\n ""@typescript-eslint/parser"": ""^8.46.3"",\n ""eslint"": ""^8.57.0 || ^9.0.0"",\n ""typescript"": "">=4.8.4 <6.0.0""\n }\n },\n ""node_modules/@typescript-eslint/parser"": {\n ""version"": ""8.46.3"",\n ""resolved"": ""https://registry.npmjs.org/@typescript-eslint/parser/-/parser-8.46.3.tgz"",\n ""integrity"": ""sha512-6m1I5RmHBGTnUGS113G04DMu3CpSdxCAU/UvtjNWL4Nuf3MW9tQhiJqRlHzChIkhy6kZSAQmc+I1bcGjE3yNKg=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""dependencies"": {\n ""@typescript-eslint/scope-manager"": ""8.46.3"",\n ""@typescript-eslint/types"": ""8.46.3"",\n ""@typescript-eslint/typescript-estree"": ""8.46.3"",\n ""@typescript-eslint/visitor-keys"": ""8.46.3"",\n ""debug"": ""^4.3.4""\n },\n ""engines"": {\n ""node"": ""^18.18.0 || ^20.9.0 || >=21.1.0""\n },\n ""funding"": {\n ""type"": ""opencollective"",\n ""url"": ""https://opencollective.com/typescript-eslint""\n },\n ""peerDependencies"": {\n ""eslint"": ""^8.57.0 || ^9.0.0"",\n ""typescript"": "">=4.8.4 <6.0.0""\n }\n },\n ""node_modules/@typescript-eslint/project-service"": {\n ""version"": ""8.46.3"",\n ""resolved"": ""https://registry.npmjs.org/@typescript-eslint/project-service/-/project-service-8.46.3.tgz"",\n ""integrity"": ""sha512-Fz8yFXsp2wDFeUElO88S9n4w1I4CWDTXDqDr9gYvZgUpwXQqmZBr9+NTTql5R3J7+hrJZPdpiWaB9VNhAKYLuQ=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""dependencies"": {\n ""@typescript-eslint/tsconfig-utils"": ""^8.46.3"",\n ""@typescript-eslint/types"": ""^8.46.3"",\n ""debug"": ""^4.3.4""\n },\n ""engines"": {\n ""node"": ""^18.18.0 || ^20.9.0 || >=21.1.0""\n },\n ""funding"": {\n ""type"": ""opencollective"",\n ""url"": ""https://opencollective.com/typescript-eslint""\n },\n ""peerDependencies"": {\n ""typescript"": "">=4.8.4 <6.0.0""\n }\n },\n ""node_modules/@typescript-eslint/scope-manager"": {\n ""version"": ""8.46.3"",\n ""resolved"": ""https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-8.46.3.tgz"",\n ""integrity"": ""sha512-FCi7Y1zgrmxp3DfWfr+3m9ansUUFoy8dkEdeQSgA9gbm8DaHYvZCdkFRQrtKiedFf3Ha6VmoqoAaP68+i+22kg=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""dependencies"": {\n ""@typescript-eslint/types"": ""8.46.3"",\n ""@typescript-eslint/visitor-keys"": ""8.46.3""\n },\n ""engines"": {\n ""node"": ""^18.18.0 || ^20.9.0 || >=21.1.0""\n },\n ""funding"": {\n ""type"": ""opencollective"",\n ""url"": ""https://opencollective.com/typescript-eslint""\n }\n },\n ""node_modules/@typescript-eslint/tsconfig-utils"": {\n ""version"": ""8.46.3"",\n ""resolved"": ""https://registry.npmjs.org/@typescript-eslint/tsconfig-utils/-/tsconfig-utils-8.46.3.tgz"",\n ""integrity"": ""sha512-GLupljMniHNIROP0zE7nCcybptolcH8QZfXOpCfhQDAdwJ/ZTlcaBOYebSOZotpti/3HrHSw7D3PZm75gYFsOA=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""engines"": {\n ""node"": ""^18.18.0 || ^20.9.0 || >=21.1.0""\n },\n ""funding"": {\n ""type"": ""opencollective"",\n ""url"": ""https://opencollective.com/typescript-eslint""\n },\n ""peerDependencies"": {\n ""typescript"": "">=4.8.4 <6.0.0""\n }\n },\n ""node_modules/@typescript-eslint/type-utils"": {\n ""version"": ""8.46.3"",\n ""resolved"": ""https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-8.46.3.tgz"",\n ""integrity"": ""sha512-ZPCADbr+qfz3aiTTYNNkCbUt+cjNwI/5McyANNrFBpVxPt7GqpEYz5ZfdwuFyGUnJ9FdDXbGODUu6iRCI6XRXw=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""dependencies"": {\n ""@typescript-eslint/types"": ""8.46.3"",\n ""@typescript-eslint/typescript-estree"": ""8.46.3"",\n ""@typescript-eslint/utils"": ""8.46.3"",\n ""debug"": ""^4.3.4"",\n ""ts-api-utils"": ""^2.1.0""\n },\n ""engines"": {\n ""node"": ""^18.18.0 || ^20.9.0 || >=21.1.0""\n },\n ""funding"": {\n ""type"": ""opencollective"",\n ""url"": ""https://opencollective.com/typescript-eslint""\n },\n ""peerDependencies"": {\n ""eslint"": ""^8.57.0 || ^9.0.0"",\n ""typescript"": "">=4.8.4 <6.0.0""\n }\n },\n ""node_modules/@typescript-eslint/types"": {\n ""version"": ""8.46.3"",\n ""resolved"": ""https://registry.npmjs.org/@typescript-eslint/types/-/types-8.46.3.tgz"",\n ""integrity"": ""sha512-G7Ok9WN/ggW7e/tOf8TQYMaxgID3Iujn231hfi0Pc7ZheztIJVpO44ekY00b7akqc6nZcvregk0Jpah3kep6hA=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""engines"": {\n ""node"": ""^18.18.0 || ^20.9.0 || >=21.1.0""\n },\n ""funding"": {\n ""type"": ""opencollective"",\n ""url"": ""https://opencollective.com/typescript-eslint""\n }\n },\n ""node_modules/@typescript-eslint/typescript-estree"": {\n ""version"": ""8.46.3"",\n ""resolved"": ""https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-8.46.3.tgz"",\n ""integrity"": ""sha512-f/NvtRjOm80BtNM5OQtlaBdM5BRFUv7gf381j9wygDNL+qOYSNOgtQ/DCndiYi80iIOv76QqaTmp4fa9hwI0OA=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""dependencies"": {\n ""@typescript-eslint/project-service"": ""8.46.3"",\n ""@typescript-eslint/tsconfig-utils"": ""8.46.3"",\n ""@typescript-eslint/types"": ""8.46.3"",\n ""@typescript-eslint/visitor-keys"": ""8.46.3"",\n ""debug"": ""^4.3.4"",\n ""fast-glob"": ""^3.3.2"",\n ""is-glob"": ""^4.0.3"",\n ""minimatch"": ""^9.0.4"",\n ""semver"": ""^7.6.0"",\n ""ts-api-utils"": ""^2.1.0""\n },\n ""engines"": {\n ""node"": ""^18.18.0 || ^20.9.0 || >=21.1.0""\n },\n ""funding"": {\n ""type"": ""opencollective"",\n ""url"": ""https://opencollective.com/typescript-eslint""\n },\n ""peerDependencies"": {\n ""typescript"": "">=4.8.4 <6.0.0""\n }\n },\n ""node_modules/@typescript-eslint/utils"": {\n ""version"": ""8.46.3"",\n ""resolved"": ""https://registry.npmjs.org/@typescript-eslint/utils/-/utils-8.46.3.tgz"",\n ""integrity"": ""sha512-VXw7qmdkucEx9WkmR3ld/u6VhRyKeiF1uxWwCy/iuNfokjJ7VhsgLSOTjsol8BunSw190zABzpwdNsze2Kpo4g=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""dependencies"": {\n ""@eslint-community/eslint-utils"": ""^4.7.0"",\n ""@typescript-eslint/scope-manager"": ""8.46.3"",\n ""@typescript-eslint/types"": ""8.46.3"",\n ""@typescript-eslint/typescript-estree"": ""8.46.3""\n },\n ""engines"": {\n ""node"": ""^18.18.0 || ^20.9.0 || >=21.1.0""\n },\n ""funding"": {\n ""type"": ""opencollective"",\n ""url"": ""https://opencollective.com/typescript-eslint""\n },\n ""peerDependencies"": {\n ""eslint"": ""^8.57.0 || ^9.0.0"",\n ""typescript"": "">=4.8.4 <6.0.0""\n }\n },\n ""node_modules/@typescript-eslint/visitor-keys"": {\n ""version"": ""8.46.3"",\n ""resolved"": ""https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-8.46.3.tgz"",\n ""integrity"": ""sha512-uk574k8IU0rOF/AjniX8qbLSGURJVUCeM5e4MIMKBFFi8weeiLrG1fyQejyLXQpRZbU/1BuQasleV/RfHC3hHg=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""dependencies"": {\n ""@typescript-eslint/types"": ""8.46.3"",\n ""eslint-visitor-keys"": ""^4.2.1""\n },\n ""engines"": {\n ""node"": ""^18.18.0 || ^20.9.0 || >=21.1.0""\n },\n ""funding"": {\n ""type"": ""opencollective"",\n ""url"": ""https://opencollective.com/typescript-eslint""\n }\n },\n ""node_modules/@typescript-eslint/visitor-keys/node_modules/eslint-visitor-keys"": {\n ""version"": ""4.2.1"",\n ""resolved"": ""https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-4.2.1.tgz"",\n ""integrity"": ""sha512-Uhdk5sfqcee/9H/rCOJikYz67o0a2Tw2hGRPOG2Y1R2dg7brRe1uG0yaNQDHu+TO/uQPF/5eCapvYSmHUjt7JQ=="",\n ""dev"": true,\n ""license"": ""Apache-2.0"",\n ""engines"": {\n ""node"": ""^18.18.0 || ^20.9.0 || >=21.1.0""\n },\n ""funding"": {\n ""url"": ""https://opencollective.com/eslint""\n }\n },\n ""node_modules/@vscode/test-cli"": {\n ""version"": ""0.0.11"",\n ""resolved"": ""https://registry.npmjs.org/@vscode/test-cli/-/test-cli-0.0.11.tgz"",\n ""integrity"": ""sha512-qO332yvzFqGhBMJrp6TdwbIydiHgCtxXc2Nl6M58mbH/Z+0CyLR76Jzv4YWPEthhrARprzCRJUqzFvTHFhTj7Q=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""dependencies"": {\n ""@types/mocha"": ""^10.0.2"",\n ""c8"": ""^9.1.0"",\n ""chokidar"": ""^3.5.3"",\n ""enhanced-resolve"": ""^5.15.0"",\n ""glob"": ""^10.3.10"",\n ""minimatch"": ""^9.0.3"",\n ""mocha"": ""^11.1.0"",\n ""supports-color"": ""^9.4.0"",\n ""yargs"": ""^17.7.2""\n },\n ""bin"": {\n ""vscode-test"": ""out/bin.mjs""\n },\n ""engines"": {\n ""node"": "">=18""\n }\n },\n ""node_modules/@vscode/test-electron"": {\n ""version"": ""2.5.2"",\n ""resolved"": ""https://registry.npmjs.org/@vscode/test-electron/-/test-electron-2.5.2.tgz"",\n ""integrity"": ""sha512-8ukpxv4wYe0iWMRQU18jhzJOHkeGKbnw7xWRX3Zw1WJA4cEKbHcmmLPdPrPtL6rhDcrlCZN+xKRpv09n4gRHYg=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""dependencies"": {\n ""http-proxy-agent"": ""^7.0.2"",\n ""https-proxy-agent"": ""^7.0.5"",\n ""jszip"": ""^3.10.1"",\n ""ora"": ""^8.1.0"",\n ""semver"": ""^7.6.2""\n },\n ""engines"": {\n ""node"": "">=16""\n }\n },\n ""node_modules/acorn"": {\n ""version"": ""8.15.0"",\n ""resolved"": ""https://registry.npmjs.org/acorn/-/acorn-8.15.0.tgz"",\n ""integrity"": ""sha512-NZyJarBfL7nWwIq+FDL6Zp/yHEhePMNnnJ0y3qfieCrmNvYct8uvtiV41UvlSe6apAfk0fY1FbWx+NwfmpvtTg=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""bin"": {\n ""acorn"": ""bin/acorn""\n },\n ""engines"": {\n ""node"": "">=0.4.0""\n }\n },\n ""node_modules/acorn-jsx"": {\n ""version"": ""5.3.2"",\n ""resolved"": ""https://registry.npmjs.org/acorn-jsx/-/acorn-jsx-5.3.2.tgz"",\n ""integrity"": ""sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""peerDependencies"": {\n ""acorn"": ""^6.0.0 || ^7.0.0 || ^8.0.0""\n }\n },\n ""node_modules/agent-base"": {\n ""version"": ""7.1.4"",\n ""resolved"": ""https://registry.npmjs.org/agent-base/-/agent-base-7.1.4.tgz"",\n ""integrity"": ""sha512-MnA+YT8fwfJPgBx3m60MNqakm30XOkyIoH1y6huTQvC0PwZG7ki8NacLBcrPbNoo8vEZy7Jpuk7+jMO+CUovTQ=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""engines"": {\n ""node"": "">= 14""\n }\n },\n ""node_modules/ajv"": {\n ""version"": ""6.12.6"",\n ""resolved"": ""https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz"",\n ""integrity"": ""sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""dependencies"": {\n ""fast-deep-equal"": ""^3.1.1"",\n ""fast-json-stable-stringify"": ""^2.0.0"",\n ""json-schema-traverse"": ""^0.4.1"",\n ""uri-js"": ""^4.2.2""\n },\n ""funding"": {\n ""type"": ""github"",\n ""url"": ""https://github.com/sponsors/epoberezkin""\n }\n },\n ""node_modules/ansi-regex"": {\n ""version"": ""6.2.2"",\n ""resolved"": ""https://registry.npmjs.org/ansi-regex/-/ansi-regex-6.2.2.tgz"",\n ""integrity"": ""sha512-Bq3SmSpyFHaWjPk8If9yc6svM8c56dB5BAtW4Qbw5jHTwwXXcTLoRMkpDJp6VL0XzlWaCHTXrkFURMYmD0sLqg=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""engines"": {\n ""node"": "">=12""\n },\n ""funding"": {\n ""url"": ""https://github.com/chalk/ansi-regex?sponsor=1""\n }\n },\n ""node_modules/ansi-styles"": {\n ""version"": ""4.3.0"",\n ""resolved"": ""https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz"",\n ""integrity"": ""sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""dependencies"": {\n ""color-convert"": ""^2.0.1""\n },\n ""engines"": {\n ""node"": "">=8""\n },\n ""funding"": {\n ""url"": ""https://github.com/chalk/ansi-styles?sponsor=1""\n }\n },\n ""node_modules/anymatch"": {\n ""version"": ""3.1.3"",\n ""resolved"": ""https://registry.npmjs.org/anymatch/-/anymatch-3.1.3.tgz"",\n ""integrity"": ""sha512-KMReFUr0B4t+D+OBkjR3KYqvocp2XaSzO55UcB6mgQMd3KbcE+mWTyvVV7D/zsdEbNnV6acZUutkiHQXvTr1Rw=="",\n ""dev"": true,\n ""license"": ""ISC"",\n ""dependencies"": {\n ""normalize-path"": ""^3.0.0"",\n ""picomatch"": ""^2.0.4""\n },\n ""engines"": {\n ""node"": "">= 8""\n }\n },\n ""node_modules/argparse"": {\n ""version"": ""2.0.1"",\n ""resolved"": ""https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz"",\n ""integrity"": ""sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q=="",\n ""dev"": true,\n ""license"": ""Python-2.0""\n },\n ""node_modules/balanced-match"": {\n ""version"": ""1.0.2"",\n ""resolved"": ""https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz"",\n ""integrity"": ""sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw=="",\n ""dev"": true,\n ""license"": ""MIT""\n },\n ""node_modules/binary-extensions"": {\n ""version"": ""2.3.0"",\n ""resolved"": ""https://registry.npmjs.org/binary-extensions/-/binary-extensions-2.3.0.tgz"",\n ""integrity"": ""sha512-Ceh+7ox5qe7LJuLHoY0feh3pHuUDHAcRUeyL2VYghZwfpkNIy/+8Ocg0a3UuSoYzavmylwuLWQOf3hl0jjMMIw=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""engines"": {\n ""node"": "">=8""\n },\n ""funding"": {\n ""url"": ""https://github.com/sponsors/sindresorhus""\n }\n },\n ""node_modules/brace-expansion"": {\n ""version"": ""2.0.2"",\n ""resolved"": ""https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.2.tgz"",\n ""integrity"": ""sha512-Jt0vHyM+jmUBqojB7E1NIYadt0vI0Qxjxd2TErW94wDz+E2LAm5vKMXXwg6ZZBTHPuUlDgQHKXvjGBdfcF1ZDQ=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""dependencies"": {\n ""balanced-match"": ""^1.0.0""\n }\n },\n ""node_modules/braces"": {\n ""version"": ""3.0.3"",\n ""resolved"": ""https://registry.npmjs.org/braces/-/braces-3.0.3.tgz"",\n ""integrity"": ""sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""dependencies"": {\n ""fill-range"": ""^7.1.1""\n },\n ""engines"": {\n ""node"": "">=8""\n }\n },\n ""node_modules/browser-stdout"": {\n ""version"": ""1.3.1"",\n ""resolved"": ""https://registry.npmjs.org/browser-stdout/-/browser-stdout-1.3.1.tgz"",\n ""integrity"": ""sha512-qhAVI1+Av2X7qelOfAIYwXONood6XlZE/fXaBSmW/T5SzLAmCgzi+eiWE7fUvbHaeNBQH13UftjpXxsfLkMpgw=="",\n ""dev"": true,\n ""license"": ""ISC""\n },\n ""node_modules/c8"": {\n ""version"": ""9.1.0"",\n ""resolved"": ""https://registry.npmjs.org/c8/-/c8-9.1.0.tgz"",\n ""integrity"": ""sha512-mBWcT5iqNir1zIkzSPyI3NCR9EZCVI3WUD+AVO17MVWTSFNyUueXE82qTeampNtTr+ilN/5Ua3j24LgbCKjDVg=="",\n ""dev"": true,\n ""license"": ""ISC"",\n ""dependencies"": {\n ""@bcoe/v8-coverage"": ""^0.2.3"",\n ""@istanbuljs/schema"": ""^0.1.3"",\n ""find-up"": ""^5.0.0"",\n ""foreground-child"": ""^3.1.1"",\n ""istanbul-lib-coverage"": ""^3.2.0"",\n ""istanbul-lib-report"": ""^3.0.1"",\n ""istanbul-reports"": ""^3.1.6"",\n ""test-exclude"": ""^6.0.0"",\n ""v8-to-istanbul"": ""^9.0.0"",\n ""yargs"": ""^17.7.2"",\n ""yargs-parser"": ""^21.1.1""\n },\n ""bin"": {\n ""c8"": ""bin/c8.js""\n },\n ""engines"": {\n ""node"": "">=14.14.0""\n }\n },\n ""node_modules/callsites"": {\n ""version"": ""3.1.0"",\n ""resolved"": ""https://registry.npmjs.org/callsites/-/callsites-3.1.0.tgz"",\n ""integrity"": ""sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""engines"": {\n ""node"": "">=6""\n }\n },\n ""node_modules/camelcase"": {\n ""version"": ""6.3.0"",\n ""resolved"": ""https://registry.npmjs.org/camelcase/-/camelcase-6.3.0.tgz"",\n ""integrity"": ""sha512-Gmy6FhYlCY7uOElZUSbxo2UCDH8owEk996gkbrpsgGtrJLM3J7jGxl9Ic7Qwwj4ivOE5AWZWRMecDdF7hqGjFA=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""engines"": {\n ""node"": "">=10""\n },\n ""funding"": {\n ""url"": ""https://github.com/sponsors/sindresorhus""\n }\n },\n ""node_modules/chalk"": {\n ""version"": ""4.1.2"",\n ""resolved"": ""https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz"",\n ""integrity"": ""sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""dependencies"": {\n ""ansi-styles"": ""^4.1.0"",\n ""supports-color"": ""^7.1.0""\n },\n ""engines"": {\n ""node"": "">=10""\n },\n ""funding"": {\n ""url"": ""https://github.com/chalk/chalk?sponsor=1""\n }\n },\n ""node_modules/chalk/node_modules/supports-color"": {\n ""version"": ""7.2.0"",\n ""resolved"": ""https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz"",\n ""integrity"": ""sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""dependencies"": {\n ""has-flag"": ""^4.0.0""\n },\n ""engines"": {\n ""node"": "">=8""\n }\n },\n ""node_modules/chokidar"": {\n ""version"": ""3.6.0"",\n ""resolved"": ""https://registry.npmjs.org/chokidar/-/chokidar-3.6.0.tgz"",\n ""integrity"": ""sha512-7VT13fmjotKpGipCW9JEQAusEPE+Ei8nl6/g4FBAmIm0GOOLMua9NDDo/DWp0ZAxCr3cPq5ZpBqmPAQgDda2Pw=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""dependencies"": {\n ""anymatch"": ""~3.1.2"",\n ""braces"": ""~3.0.2"",\n ""glob-parent"": ""~5.1.2"",\n ""is-binary-path"": ""~2.1.0"",\n ""is-glob"": ""~4.0.1"",\n ""normalize-path"": ""~3.0.0"",\n ""readdirp"": ""~3.6.0""\n },\n ""engines"": {\n ""node"": "">= 8.10.0""\n },\n ""funding"": {\n ""url"": ""https://paulmillr.com/funding/""\n },\n ""optionalDependencies"": {\n ""fsevents"": ""~2.3.2""\n }\n },\n ""node_modules/cli-cursor"": {\n ""version"": ""5.0.0"",\n ""resolved"": ""https://registry.npmjs.org/cli-cursor/-/cli-cursor-5.0.0.tgz"",\n ""integrity"": ""sha512-aCj4O5wKyszjMmDT4tZj93kxyydN/K5zPWSCe6/0AV/AA1pqe5ZBIw0a2ZfPQV7lL5/yb5HsUreJ6UFAF1tEQw=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""dependencies"": {\n ""restore-cursor"": ""^5.0.0""\n },\n ""engines"": {\n ""node"": "">=18""\n },\n ""funding"": {\n ""url"": ""https://github.com/sponsors/sindresorhus""\n }\n },\n ""node_modules/cli-spinners"": {\n ""version"": ""2.9.2"",\n ""resolved"": ""https://registry.npmjs.org/cli-spinners/-/cli-spinners-2.9.2.tgz"",\n ""integrity"": ""sha512-ywqV+5MmyL4E7ybXgKys4DugZbX0FC6LnwrhjuykIjnK9k8OQacQ7axGKnjDXWNhns0xot3bZI5h55H8yo9cJg=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""engines"": {\n ""node"": "">=6""\n },\n ""funding"": {\n ""url"": ""https://github.com/sponsors/sindresorhus""\n }\n },\n ""node_modules/cliui"": {\n ""version"": ""8.0.1"",\n ""resolved"": ""https://registry.npmjs.org/cliui/-/cliui-8.0.1.tgz"",\n ""integrity"": ""sha512-BSeNnyus75C4//NQ9gQt1/csTXyo/8Sb+afLAkzAptFuMsod9HFokGNudZpi/oQV73hnVK+sR+5PVRMd+Dr7YQ=="",\n ""dev"": true,\n ""license"": ""ISC"",\n ""dependencies"": {\n ""string-width"": ""^4.2.0"",\n ""strip-ansi"": ""^6.0.1"",\n ""wrap-ansi"": ""^7.0.0""\n },\n ""engines"": {\n ""node"": "">=12""\n }\n },\n ""node_modules/cliui/node_modules/ansi-regex"": {\n ""version"": ""5.0.1"",\n ""resolved"": ""https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz"",\n ""integrity"": ""sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""engines"": {\n ""node"": "">=8""\n }\n },\n ""node_modules/cliui/node_modules/emoji-regex"": {\n ""version"": ""8.0.0"",\n ""resolved"": ""https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz"",\n ""integrity"": ""sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A=="",\n ""dev"": true,\n ""license"": ""MIT""\n },\n ""node_modules/cliui/node_modules/string-width"": {\n ""version"": ""4.2.3"",\n ""resolved"": ""https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz"",\n ""integrity"": ""sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""dependencies"": {\n ""emoji-regex"": ""^8.0.0"",\n ""is-fullwidth-code-point"": ""^3.0.0"",\n ""strip-ansi"": ""^6.0.1""\n },\n ""engines"": {\n ""node"": "">=8""\n }\n },\n ""node_modules/cliui/node_modules/strip-ansi"": {\n ""version"": ""6.0.1"",\n ""resolved"": ""https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz"",\n ""integrity"": ""sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""dependencies"": {\n ""ansi-regex"": ""^5.0.1""\n },\n ""engines"": {\n ""node"": "">=8""\n }\n },\n ""node_modules/cliui/node_modules/wrap-ansi"": {\n ""version"": ""7.0.0"",\n ""resolved"": ""https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz"",\n ""integrity"": ""sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""dependencies"": {\n ""ansi-styles"": ""^4.0.0"",\n ""string-width"": ""^4.1.0"",\n ""strip-ansi"": ""^6.0.0""\n },\n ""engines"": {\n ""node"": "">=10""\n },\n ""funding"": {\n ""url"": ""https://github.com/chalk/wrap-ansi?sponsor=1""\n }\n },\n ""node_modules/color-convert"": {\n ""version"": ""2.0.1"",\n ""resolved"": ""https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz"",\n ""integrity"": ""sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""dependencies"": {\n ""color-name"": ""~1.1.4""\n },\n ""engines"": {\n ""node"": "">=7.0.0""\n }\n },\n ""node_modules/color-name"": {\n ""version"": ""1.1.4"",\n ""resolved"": ""https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz"",\n ""integrity"": ""sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA=="",\n ""dev"": true,\n ""license"": ""MIT""\n },\n ""node_modules/concat-map"": {\n ""version"": ""0.0.1"",\n ""resolved"": ""https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz"",\n ""integrity"": ""sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg=="",\n ""dev"": true,\n ""license"": ""MIT""\n },\n ""node_modules/convert-source-map"": {\n ""version"": ""2.0.0"",\n ""resolved"": ""https://registry.npmjs.org/convert-source-map/-/convert-source-map-2.0.0.tgz"",\n ""integrity"": ""sha512-Kvp459HrV2FEJ1CAsi1Ku+MY3kasH19TFykTz2xWmMeq6bk2NU3XXvfJ+Q61m0xktWwt+1HSYf3JZsTms3aRJg=="",\n ""dev"": true,\n ""license"": ""MIT""\n },\n ""node_modules/core-util-is"": {\n ""version"": ""1.0.3"",\n ""resolved"": ""https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.3.tgz"",\n ""integrity"": ""sha512-ZQBvi1DcpJ4GDqanjucZ2Hj3wEO5pZDS89BWbkcrvdxksJorwUDDZamX9ldFkp9aw2lmBDLgkObEA4DWNJ9FYQ=="",\n ""dev"": true,\n ""license"": ""MIT""\n },\n ""node_modules/cross-spawn"": {\n ""version"": ""7.0.6"",\n ""resolved"": ""https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.6.tgz"",\n ""integrity"": ""sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""dependencies"": {\n ""path-key"": ""^3.1.0"",\n ""shebang-command"": ""^2.0.0"",\n ""which"": ""^2.0.1""\n },\n ""engines"": {\n ""node"": "">= 8""\n }\n },\n ""node_modules/debug"": {\n ""version"": ""4.4.3"",\n ""resolved"": ""https://registry.npmjs.org/debug/-/debug-4.4.3.tgz"",\n ""integrity"": ""sha512-RGwwWnwQvkVfavKVt22FGLw+xYSdzARwm0ru6DhTVA3umU5hZc28V3kO4stgYryrTlLpuvgI9GiijltAjNbcqA=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""dependencies"": {\n ""ms"": ""^2.1.3""\n },\n ""engines"": {\n ""node"": "">=6.0""\n },\n ""peerDependenciesMeta"": {\n ""supports-color"": {\n ""optional"": true\n }\n }\n },\n ""node_modules/decamelize"": {\n ""version"": ""4.0.0"",\n ""resolved"": ""https://registry.npmjs.org/decamelize/-/decamelize-4.0.0.tgz"",\n ""integrity"": ""sha512-9iE1PgSik9HeIIw2JO94IidnE3eBoQrFJ3w7sFuzSX4DpmZ3v5sZpUiV5Swcf6mQEF+Y0ru8Neo+p+nyh2J+hQ=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""engines"": {\n ""node"": "">=10""\n },\n ""funding"": {\n ""url"": ""https://github.com/sponsors/sindresorhus""\n }\n },\n ""node_modules/deep-is"": {\n ""version"": ""0.1.4"",\n ""resolved"": ""https://registry.npmjs.org/deep-is/-/deep-is-0.1.4.tgz"",\n ""integrity"": ""sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ=="",\n ""dev"": true,\n ""license"": ""MIT""\n },\n ""node_modules/diff"": {\n ""version"": ""7.0.0"",\n ""resolved"": ""https://registry.npmjs.org/diff/-/diff-7.0.0.tgz"",\n ""integrity"": ""sha512-PJWHUb1RFevKCwaFA9RlG5tCd+FO5iRh9A8HEtkmBH2Li03iJriB6m6JIN4rGz3K3JLawI7/veA1xzRKP6ISBw=="",\n ""dev"": true,\n ""license"": ""BSD-3-Clause"",\n ""engines"": {\n ""node"": "">=0.3.1""\n }\n },\n ""node_modules/eastasianwidth"": {\n ""version"": ""0.2.0"",\n ""resolved"": ""https://registry.npmjs.org/eastasianwidth/-/eastasianwidth-0.2.0.tgz"",\n ""integrity"": ""sha512-I88TYZWc9XiYHRQ4/3c5rjjfgkjhLyW2luGIheGERbNQ6OY7yTybanSpDXZa8y7VUP9YmDcYa+eyq4ca7iLqWA=="",\n ""dev"": true,\n ""license"": ""MIT""\n },\n ""node_modules/emoji-regex"": {\n ""version"": ""9.2.2"",\n ""resolved"": ""https://registry.npmjs.org/emoji-regex/-/emoji-regex-9.2.2.tgz"",\n ""integrity"": ""sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg=="",\n ""dev"": true,\n ""license"": ""MIT""\n },\n ""node_modules/enhanced-resolve"": {\n ""version"": ""5.18.3"",\n ""resolved"": ""https://registry.npmjs.org/enhanced-resolve/-/enhanced-resolve-5.18.3.tgz"",\n ""integrity"": ""sha512-d4lC8xfavMeBjzGr2vECC3fsGXziXZQyJxD868h2M/mBI3PwAuODxAkLkq5HYuvrPYcUtiLzsTo8U3PgX3Ocww=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""dependencies"": {\n ""graceful-fs"": ""^4.2.4"",\n ""tapable"": ""^2.2.0""\n },\n ""engines"": {\n ""node"": "">=10.13.0""\n }\n },\n ""node_modules/escalade"": {\n ""version"": ""3.2.0"",\n ""resolved"": ""https://registry.npmjs.org/escalade/-/escalade-3.2.0.tgz"",\n ""integrity"": ""sha512-WUj2qlxaQtO4g6Pq5c29GTcWGDyd8itL8zTlipgECz3JesAiiOKotd8JU6otB3PACgG6xkJUyVhboMS+bje/jA=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""engines"": {\n ""node"": "">=6""\n }\n },\n ""node_modules/escape-string-regexp"": {\n ""version"": ""4.0.0"",\n ""resolved"": ""https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz"",\n ""integrity"": ""sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""engines"": {\n ""node"": "">=10""\n },\n ""funding"": {\n ""url"": ""https://github.com/sponsors/sindresorhus""\n }\n },\n ""node_modules/eslint"": {\n ""version"": ""9.39.1"",\n ""resolved"": ""https://registry.npmjs.org/eslint/-/eslint-9.39.1.tgz"",\n ""integrity"": ""sha512-BhHmn2yNOFA9H9JmmIVKJmd288g9hrVRDkdoIgRCRuSySRUHH7r/DI6aAXW9T1WwUuY3DFgrcaqB+deURBLR5g=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""dependencies"": {\n ""@eslint-community/eslint-utils"": ""^4.8.0"",\n ""@eslint-community/regexpp"": ""^4.12.1"",\n ""@eslint/config-array"": ""^0.21.1"",\n ""@eslint/config-helpers"": ""^0.4.2"",\n ""@eslint/core"": ""^0.17.0"",\n ""@eslint/eslintrc"": ""^3.3.1"",\n ""@eslint/js"": ""9.39.1"",\n ""@eslint/plugin-kit"": ""^0.4.1"",\n ""@humanfs/node"": ""^0.16.6"",\n ""@humanwhocodes/module-importer"": ""^1.0.1"",\n ""@humanwhocodes/retry"": ""^0.4.2"",\n ""@types/estree"": ""^1.0.6"",\n ""ajv"": ""^6.12.4"",\n ""chalk"": ""^4.0.0"",\n ""cross-spawn"": ""^7.0.6"",\n ""debug"": ""^4.3.2"",\n ""escape-string-regexp"": ""^4.0.0"",\n ""eslint-scope"": ""^8.4.0"",\n ""eslint-visitor-keys"": ""^4.2.1"",\n ""espree"": ""^10.4.0"",\n ""esquery"": ""^1.5.0"",\n ""esutils"": ""^2.0.2"",\n ""fast-deep-equal"": ""^3.1.3"",\n ""file-entry-cache"": ""^8.0.0"",\n ""find-up"": ""^5.0.0"",\n ""glob-parent"": ""^6.0.2"",\n ""ignore"": ""^5.2.0"",\n ""imurmurhash"": ""^0.1.4"",\n ""is-glob"": ""^4.0.0"",\n ""json-stable-stringify-without-jsonify"": ""^1.0.1"",\n ""lodash.merge"": ""^4.6.2"",\n ""minimatch"": ""^3.1.2"",\n ""natural-compare"": ""^1.4.0"",\n ""optionator"": ""^0.9.3""\n },\n ""bin"": {\n ""eslint"": ""bin/eslint.js""\n },\n ""engines"": {\n ""node"": ""^18.18.0 || ^20.9.0 || >=21.1.0""\n },\n ""funding"": {\n ""url"": ""https://eslint.org/donate""\n },\n ""peerDependencies"": {\n ""jiti"": ""*""\n },\n ""peerDependenciesMeta"": {\n ""jiti"": {\n ""optional"": true\n }\n }\n },\n ""node_modules/eslint-scope"": {\n ""version"": ""8.4.0"",\n ""resolved"": ""https://registry.npmjs.org/eslint-scope/-/eslint-scope-8.4.0.tgz"",\n ""integrity"": ""sha512-sNXOfKCn74rt8RICKMvJS7XKV/Xk9kA7DyJr8mJik3S7Cwgy3qlkkmyS2uQB3jiJg6VNdZd/pDBJu0nvG2NlTg=="",\n ""dev"": true,\n ""license"": ""BSD-2-Clause"",\n ""dependencies"": {\n ""esrecurse"": ""^4.3.0"",\n ""estraverse"": ""^5.2.0""\n },\n ""engines"": {\n ""node"": ""^18.18.0 || ^20.9.0 || >=21.1.0""\n },\n ""funding"": {\n ""url"": ""https://opencollective.com/eslint""\n }\n },\n ""node_modules/eslint-visitor-keys"": {\n ""version"": ""3.4.3"",\n ""resolved"": ""https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-3.4.3.tgz"",\n ""integrity"": ""sha512-wpc+LXeiyiisxPlEkUzU6svyS1frIO3Mgxj1fdy7Pm8Ygzguax2N3Fa/D/ag1WqbOprdI+uY6wMUl8/a2G+iag=="",\n ""dev"": true,\n ""license"": ""Apache-2.0"",\n ""engines"": {\n ""node"": ""^12.22.0 || ^14.17.0 || >=16.0.0""\n },\n ""funding"": {\n ""url"": ""https://opencollective.com/eslint""\n }\n },\n ""node_modules/eslint/node_modules/brace-expansion"": {\n ""version"": ""1.1.12"",\n ""resolved"": ""https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz"",\n ""integrity"": ""sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""dependencies"": {\n ""balanced-match"": ""^1.0.0"",\n ""concat-map"": ""0.0.1""\n }\n },\n ""node_modules/eslint/node_modules/eslint-visitor-keys"": {\n ""version"": ""4.2.1"",\n ""resolved"": ""https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-4.2.1.tgz"",\n ""integrity"": ""sha512-Uhdk5sfqcee/9H/rCOJikYz67o0a2Tw2hGRPOG2Y1R2dg7brRe1uG0yaNQDHu+TO/uQPF/5eCapvYSmHUjt7JQ=="",\n ""dev"": true,\n ""license"": ""Apache-2.0"",\n ""engines"": {\n ""node"": ""^18.18.0 || ^20.9.0 || >=21.1.0""\n },\n ""funding"": {\n ""url"": ""https://opencollective.com/eslint""\n }\n },\n ""node_modules/eslint/node_modules/glob-parent"": {\n ""version"": ""6.0.2"",\n ""resolved"": ""https://registry.npmjs.org/glob-parent/-/glob-parent-6.0.2.tgz"",\n ""integrity"": ""sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A=="",\n ""dev"": true,\n ""license"": ""ISC"",\n ""dependencies"": {\n ""is-glob"": ""^4.0.3""\n },\n ""engines"": {\n ""node"": "">=10.13.0""\n }\n },\n ""node_modules/eslint/node_modules/ignore"": {\n ""version"": ""5.3.2"",\n ""resolved"": ""https://registry.npmjs.org/ignore/-/ignore-5.3.2.tgz"",\n ""integrity"": ""sha512-hsBTNUqQTDwkWtcdYI2i06Y/nUBEsNEDJKjWdigLvegy8kDuJAS8uRlpkkcQpyEXL0Z/pjDy5HBmMjRCJ2gq+g=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""engines"": {\n ""node"": "">= 4""\n }\n },\n ""node_modules/eslint/node_modules/minimatch"": {\n ""version"": ""3.1.2"",\n ""resolved"": ""https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz"",\n ""integrity"": ""sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw=="",\n ""dev"": true,\n ""license"": ""ISC"",\n ""dependencies"": {\n ""brace-expansion"": ""^1.1.7""\n },\n ""engines"": {\n ""node"": ""*""\n }\n },\n ""node_modules/espree"": {\n ""version"": ""10.4.0"",\n ""resolved"": ""https://registry.npmjs.org/espree/-/espree-10.4.0.tgz"",\n ""integrity"": ""sha512-j6PAQ2uUr79PZhBjP5C5fhl8e39FmRnOjsD5lGnWrFU8i2G776tBK7+nP8KuQUTTyAZUwfQqXAgrVH5MbH9CYQ=="",\n ""dev"": true,\n ""license"": ""BSD-2-Clause"",\n ""dependencies"": {\n ""acorn"": ""^8.15.0"",\n ""acorn-jsx"": ""^5.3.2"",\n ""eslint-visitor-keys"": ""^4.2.1""\n },\n ""engines"": {\n ""node"": ""^18.18.0 || ^20.9.0 || >=21.1.0""\n },\n ""funding"": {\n ""url"": ""https://opencollective.com/eslint""\n }\n },\n ""node_modules/espree/node_modules/eslint-visitor-keys"": {\n ""version"": ""4.2.1"",\n ""resolved"": ""https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-4.2.1.tgz"",\n ""integrity"": ""sha512-Uhdk5sfqcee/9H/rCOJikYz67o0a2Tw2hGRPOG2Y1R2dg7brRe1uG0yaNQDHu+TO/uQPF/5eCapvYSmHUjt7JQ=="",\n ""dev"": true,\n ""license"": ""Apache-2.0"",\n ""engines"": {\n ""node"": ""^18.18.0 || ^20.9.0 || >=21.1.0""\n },\n ""funding"": {\n ""url"": ""https://opencollective.com/eslint""\n }\n },\n ""node_modules/esquery"": {\n ""version"": ""1.6.0"",\n ""resolved"": ""https://registry.npmjs.org/esquery/-/esquery-1.6.0.tgz"",\n ""integrity"": ""sha512-ca9pw9fomFcKPvFLXhBKUK90ZvGibiGOvRJNbjljY7s7uq/5YO4BOzcYtJqExdx99rF6aAcnRxHmcUHcz6sQsg=="",\n ""dev"": true,\n ""license"": ""BSD-3-Clause"",\n ""dependencies"": {\n ""estraverse"": ""^5.1.0""\n },\n ""engines"": {\n ""node"": "">=0.10""\n }\n },\n ""node_modules/esrecurse"": {\n ""version"": ""4.3.0"",\n ""resolved"": ""https://registry.npmjs.org/esrecurse/-/esrecurse-4.3.0.tgz"",\n ""integrity"": ""sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag=="",\n ""dev"": true,\n ""license"": ""BSD-2-Clause"",\n ""dependencies"": {\n ""estraverse"": ""^5.2.0""\n },\n ""engines"": {\n ""node"": "">=4.0""\n }\n },\n ""node_modules/estraverse"": {\n ""version"": ""5.3.0"",\n ""resolved"": ""https://registry.npmjs.org/estraverse/-/estraverse-5.3.0.tgz"",\n ""integrity"": ""sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA=="",\n ""dev"": true,\n ""license"": ""BSD-2-Clause"",\n ""engines"": {\n ""node"": "">=4.0""\n }\n },\n ""node_modules/esutils"": {\n ""version"": ""2.0.3"",\n ""resolved"": ""https://registry.npmjs.org/esutils/-/esutils-2.0.3.tgz"",\n ""integrity"": ""sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g=="",\n ""dev"": true,\n ""license"": ""BSD-2-Clause"",\n ""engines"": {\n ""node"": "">=0.10.0""\n }\n },\n ""node_modules/fast-deep-equal"": {\n ""version"": ""3.1.3"",\n ""resolved"": ""https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz"",\n ""integrity"": ""sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q=="",\n ""dev"": true,\n ""license"": ""MIT""\n },\n ""node_modules/fast-glob"": {\n ""version"": ""3.3.3"",\n ""resolved"": ""https://registry.npmjs.org/fast-glob/-/fast-glob-3.3.3.tgz"",\n ""integrity"": ""sha512-7MptL8U0cqcFdzIzwOTHoilX9x5BrNqye7Z/LuC7kCMRio1EMSyqRK3BEAUD7sXRq4iT4AzTVuZdhgQ2TCvYLg=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""dependencies"": {\n ""@nodelib/fs.stat"": ""^2.0.2"",\n ""@nodelib/fs.walk"": ""^1.2.3"",\n ""glob-parent"": ""^5.1.2"",\n ""merge2"": ""^1.3.0"",\n ""micromatch"": ""^4.0.8""\n },\n ""engines"": {\n ""node"": "">=8.6.0""\n }\n },\n ""node_modules/fast-json-stable-stringify"": {\n ""version"": ""2.1.0"",\n ""resolved"": ""https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz"",\n ""integrity"": ""sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw=="",\n ""dev"": true,\n ""license"": ""MIT""\n },\n ""node_modules/fast-levenshtein"": {\n ""version"": ""2.0.6"",\n ""resolved"": ""https://registry.npmjs.org/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz"",\n ""integrity"": ""sha512-DCXu6Ifhqcks7TZKY3Hxp3y6qphY5SJZmrWMDrKcERSOXWQdMhU9Ig/PYrzyw/ul9jOIyh0N4M0tbC5hodg8dw=="",\n ""dev"": true,\n ""license"": ""MIT""\n },\n ""node_modules/fastq"": {\n ""version"": ""1.19.1"",\n ""resolved"": ""https://registry.npmjs.org/fastq/-/fastq-1.19.1.tgz"",\n ""integrity"": ""sha512-GwLTyxkCXjXbxqIhTsMI2Nui8huMPtnxg7krajPJAjnEG/iiOS7i+zCtWGZR9G0NBKbXKh6X9m9UIsYX/N6vvQ=="",\n ""dev"": true,\n ""license"": ""ISC"",\n ""dependencies"": {\n ""reusify"": ""^1.0.4""\n }\n },\n ""node_modules/file-entry-cache"": {\n ""version"": ""8.0.0"",\n ""resolved"": ""https://registry.npmjs.org/file-entry-cache/-/file-entry-cache-8.0.0.tgz"",\n ""integrity"": ""sha512-XXTUwCvisa5oacNGRP9SfNtYBNAMi+RPwBFmblZEF7N7swHYQS6/Zfk7SRwx4D5j3CH211YNRco1DEMNVfZCnQ=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""dependencies"": {\n ""flat-cache"": ""^4.0.0""\n },\n ""engines"": {\n ""node"": "">=16.0.0""\n }\n },\n ""node_modules/fill-range"": {\n ""version"": ""7.1.1"",\n ""resolved"": ""https://registry.npmjs.org/fill-range/-/fill-range-7.1.1.tgz"",\n ""integrity"": ""sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""dependencies"": {\n ""to-regex-range"": ""^5.0.1""\n },\n ""engines"": {\n ""node"": "">=8""\n }\n },\n ""node_modules/find-up"": {\n ""version"": ""5.0.0"",\n ""resolved"": ""https://registry.npmjs.org/find-up/-/find-up-5.0.0.tgz"",\n ""integrity"": ""sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""dependencies"": {\n ""locate-path"": ""^6.0.0"",\n ""path-exists"": ""^4.0.0""\n },\n ""engines"": {\n ""node"": "">=10""\n },\n ""funding"": {\n ""url"": ""https://github.com/sponsors/sindresorhus""\n }\n },\n ""node_modules/flat"": {\n ""version"": ""5.0.2"",\n ""resolved"": ""https://registry.npmjs.org/flat/-/flat-5.0.2.tgz"",\n ""integrity"": ""sha512-b6suED+5/3rTpUBdG1gupIl8MPFCAMA0QXwmljLhvCUKcUvdE4gWky9zpuGCcXHOsz4J9wPGNWq6OKpmIzz3hQ=="",\n ""dev"": true,\n ""license"": ""BSD-3-Clause"",\n ""bin"": {\n ""flat"": ""cli.js""\n }\n },\n ""node_modules/flat-cache"": {\n ""version"": ""4.0.1"",\n ""resolved"": ""https://registry.npmjs.org/flat-cache/-/flat-cache-4.0.1.tgz"",\n ""integrity"": ""sha512-f7ccFPK3SXFHpx15UIGyRJ/FJQctuKZ0zVuN3frBo4HnK3cay9VEW0R6yPYFHC0AgqhukPzKjq22t5DmAyqGyw=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""dependencies"": {\n ""flatted"": ""^3.2.9"",\n ""keyv"": ""^4.5.4""\n },\n ""engines"": {\n ""node"": "">=16""\n }\n },\n ""node_modules/flatted"": {\n ""version"": ""3.3.3"",\n ""resolved"": ""https://registry.npmjs.org/flatted/-/flatted-3.3.3.tgz"",\n ""integrity"": ""sha512-GX+ysw4PBCz0PzosHDepZGANEuFCMLrnRTiEy9McGjmkCQYwRq4A/X786G/fjM/+OjsWSU1ZrY5qyARZmO/uwg=="",\n ""dev"": true,\n ""license"": ""ISC""\n },\n ""node_modules/foreground-child"": {\n ""version"": ""3.3.1"",\n ""resolved"": ""https://registry.npmjs.org/foreground-child/-/foreground-child-3.3.1.tgz"",\n ""integrity"": ""sha512-gIXjKqtFuWEgzFRJA9WCQeSJLZDjgJUOMCMzxtvFq/37KojM1BFGufqsCy0r4qSQmYLsZYMeyRqzIWOMup03sw=="",\n ""dev"": true,\n ""license"": ""ISC"",\n ""dependencies"": {\n ""cross-spawn"": ""^7.0.6"",\n ""signal-exit"": ""^4.0.1""\n },\n ""engines"": {\n ""node"": "">=14""\n },\n ""funding"": {\n ""url"": ""https://github.com/sponsors/isaacs""\n }\n },\n ""node_modules/fs.realpath"": {\n ""version"": ""1.0.0"",\n ""resolved"": ""https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz"",\n ""integrity"": ""sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw=="",\n ""dev"": true,\n ""license"": ""ISC""\n },\n ""node_modules/fsevents"": {\n ""version"": ""2.3.3"",\n ""resolved"": ""https://registry.npmjs.org/fsevents/-/fsevents-2.3.3.tgz"",\n ""integrity"": ""sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw=="",\n ""dev"": true,\n ""hasInstallScript"": true,\n ""license"": ""MIT"",\n ""optional"": true,\n ""os"": [\n ""darwin""\n ],\n ""engines"": {\n ""node"": ""^8.16.0 || ^10.6.0 || >=11.0.0""\n }\n },\n ""node_modules/get-caller-file"": {\n ""version"": ""2.0.5"",\n ""resolved"": ""https://registry.npmjs.org/get-caller-file/-/get-caller-file-2.0.5.tgz"",\n ""integrity"": ""sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg=="",\n ""dev"": true,\n ""license"": ""ISC"",\n ""engines"": {\n ""node"": ""6.* || 8.* || >= 10.*""\n }\n },\n ""node_modules/get-east-asian-width"": {\n ""version"": ""1.4.0"",\n ""resolved"": ""https://registry.npmjs.org/get-east-asian-width/-/get-east-asian-width-1.4.0.tgz"",\n ""integrity"": ""sha512-QZjmEOC+IT1uk6Rx0sX22V6uHWVwbdbxf1faPqJ1QhLdGgsRGCZoyaQBm/piRdJy/D2um6hM1UP7ZEeQ4EkP+Q=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""engines"": {\n ""node"": "">=18""\n },\n ""funding"": {\n ""url"": ""https://github.com/sponsors/sindresorhus""\n }\n },\n ""node_modules/glob"": {\n ""version"": ""10.4.5"",\n ""resolved"": ""https://registry.npmjs.org/glob/-/glob-10.4.5.tgz"",\n ""integrity"": ""sha512-7Bv8RF0k6xjo7d4A/PxYLbUCfb6c+Vpd2/mB2yRDlew7Jb5hEXiCD9ibfO7wpk8i4sevK6DFny9h7EYbM3/sHg=="",\n ""dev"": true,\n ""license"": ""ISC"",\n ""dependencies"": {\n ""foreground-child"": ""^3.1.0"",\n ""jackspeak"": ""^3.1.2"",\n ""minimatch"": ""^9.0.4"",\n ""minipass"": ""^7.1.2"",\n ""package-json-from-dist"": ""^1.0.0"",\n ""path-scurry"": ""^1.11.1""\n },\n ""bin"": {\n ""glob"": ""dist/esm/bin.mjs""\n },\n ""funding"": {\n ""url"": ""https://github.com/sponsors/isaacs""\n }\n },\n ""node_modules/glob-parent"": {\n ""version"": ""5.1.2"",\n ""resolved"": ""https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz"",\n ""integrity"": ""sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow=="",\n ""dev"": true,\n ""license"": ""ISC"",\n ""dependencies"": {\n ""is-glob"": ""^4.0.1""\n },\n ""engines"": {\n ""node"": "">= 6""\n }\n },\n ""node_modules/globals"": {\n ""version"": ""14.0.0"",\n ""resolved"": ""https://registry.npmjs.org/globals/-/globals-14.0.0.tgz"",\n ""integrity"": ""sha512-oahGvuMGQlPw/ivIYBjVSrWAfWLBeku5tpPE2fOPLi+WHffIWbuh2tCjhyQhTBPMf5E9jDEH4FOmTYgYwbKwtQ=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""engines"": {\n ""node"": "">=18""\n },\n ""funding"": {\n ""url"": ""https://github.com/sponsors/sindresorhus""\n }\n },\n ""node_modules/graceful-fs"": {\n ""version"": ""4.2.11"",\n ""resolved"": ""https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.11.tgz"",\n ""integrity"": ""sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ=="",\n ""dev"": true,\n ""license"": ""ISC""\n },\n ""node_modules/graphemer"": {\n ""version"": ""1.4.0"",\n ""resolved"": ""https://registry.npmjs.org/graphemer/-/graphemer-1.4.0.tgz"",\n ""integrity"": ""sha512-EtKwoO6kxCL9WO5xipiHTZlSzBm7WLT627TqC/uVRd0HKmq8NXyebnNYxDoBi7wt8eTWrUrKXCOVaFq9x1kgag=="",\n ""dev"": true,\n ""license"": ""MIT""\n },\n ""node_modules/has-flag"": {\n ""version"": ""4.0.0"",\n ""resolved"": ""https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz"",\n ""integrity"": ""sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""engines"": {\n ""node"": "">=8""\n }\n },\n ""node_modules/he"": {\n ""version"": ""1.2.0"",\n ""resolved"": ""https://registry.npmjs.org/he/-/he-1.2.0.tgz"",\n ""integrity"": ""sha512-F/1DnUGPopORZi0ni+CvrCgHQ5FyEAHRLSApuYWMmrbSwoN2Mn/7k+Gl38gJnR7yyDZk6WLXwiGod1JOWNDKGw=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""bin"": {\n ""he"": ""bin/he""\n }\n },\n ""node_modules/html-escaper"": {\n ""version"": ""2.0.2"",\n ""resolved"": ""https://registry.npmjs.org/html-escaper/-/html-escaper-2.0.2.tgz"",\n ""integrity"": ""sha512-H2iMtd0I4Mt5eYiapRdIDjp+XzelXQ0tFE4JS7YFwFevXXMmOp9myNrUvCg0D6ws8iqkRPBfKHgbwig1SmlLfg=="",\n ""dev"": true,\n ""license"": ""MIT""\n },\n ""node_modules/http-proxy-agent"": {\n ""version"": ""7.0.2"",\n ""resolved"": ""https://registry.npmjs.org/http-proxy-agent/-/http-proxy-agent-7.0.2.tgz"",\n ""integrity"": ""sha512-T1gkAiYYDWYx3V5Bmyu7HcfcvL7mUrTWiM6yOfa3PIphViJ/gFPbvidQ+veqSOHci/PxBcDabeUNCzpOODJZig=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""dependencies"": {\n ""agent-base"": ""^7.1.0"",\n ""debug"": ""^4.3.4""\n },\n ""engines"": {\n ""node"": "">= 14""\n }\n },\n ""node_modules/https-proxy-agent"": {\n ""version"": ""7.0.6"",\n ""resolved"": ""https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-7.0.6.tgz"",\n ""integrity"": ""sha512-vK9P5/iUfdl95AI+JVyUuIcVtd4ofvtrOr3HNtM2yxC9bnMbEdp3x01OhQNnjb8IJYi38VlTE3mBXwcfvywuSw=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""dependencies"": {\n ""agent-base"": ""^7.1.2"",\n ""debug"": ""4""\n },\n ""engines"": {\n ""node"": "">= 14""\n }\n },\n ""node_modules/ignore"": {\n ""version"": ""7.0.5"",\n ""resolved"": ""https://registry.npmjs.org/ignore/-/ignore-7.0.5.tgz"",\n ""integrity"": ""sha512-Hs59xBNfUIunMFgWAbGX5cq6893IbWg4KnrjbYwX3tx0ztorVgTDA6B2sxf8ejHJ4wz8BqGUMYlnzNBer5NvGg=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""engines"": {\n ""node"": "">= 4""\n }\n },\n ""node_modules/immediate"": {\n ""version"": ""3.0.6"",\n ""resolved"": ""https://registry.npmjs.org/immediate/-/immediate-3.0.6.tgz"",\n ""integrity"": ""sha512-XXOFtyqDjNDAQxVfYxuF7g9Il/IbWmmlQg2MYKOH8ExIT1qg6xc4zyS3HaEEATgs1btfzxq15ciUiY7gjSXRGQ=="",\n ""dev"": true,\n ""license"": ""MIT""\n },\n ""node_modules/import-fresh"": {\n ""version"": ""3.3.1"",\n ""resolved"": ""https://registry.npmjs.org/import-fresh/-/import-fresh-3.3.1.tgz"",\n ""integrity"": ""sha512-TR3KfrTZTYLPB6jUjfx6MF9WcWrHL9su5TObK4ZkYgBdWKPOFoSoQIdEuTuR82pmtxH2spWG9h6etwfr1pLBqQ=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""dependencies"": {\n ""parent-module"": ""^1.0.0"",\n ""resolve-from"": ""^4.0.0""\n },\n ""engines"": {\n ""node"": "">=6""\n },\n ""funding"": {\n ""url"": ""https://github.com/sponsors/sindresorhus""\n }\n },\n ""node_modules/imurmurhash"": {\n ""version"": ""0.1.4"",\n ""resolved"": ""https://registry.npmjs.org/imurmurhash/-/imurmurhash-0.1.4.tgz"",\n ""integrity"": ""sha512-JmXMZ6wuvDmLiHEml9ykzqO6lwFbof0GG4IkcGaENdCRDDmMVnny7s5HsIgHCbaq0w2MyPhDqkhTUgS2LU2PHA=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""engines"": {\n ""node"": "">=0.8.19""\n }\n },\n ""node_modules/inflight"": {\n ""version"": ""1.0.6"",\n ""resolved"": ""https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz"",\n ""integrity"": ""sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA=="",\n ""deprecated"": ""This module is not supported, and leaks memory. Do not use it. Check out lru-cache if you want a good and tested way to coalesce async requests by a key value, which is much more comprehensive and powerful."",\n ""dev"": true,\n ""license"": ""ISC"",\n ""dependencies"": {\n ""once"": ""^1.3.0"",\n ""wrappy"": ""1""\n }\n },\n ""node_modules/inherits"": {\n ""version"": ""2.0.4"",\n ""resolved"": ""https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz"",\n ""integrity"": ""sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ=="",\n ""dev"": true,\n ""license"": ""ISC""\n },\n ""node_modules/is-binary-path"": {\n ""version"": ""2.1.0"",\n ""resolved"": ""https://registry.npmjs.org/is-binary-path/-/is-binary-path-2.1.0.tgz"",\n ""integrity"": ""sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""dependencies"": {\n ""binary-extensions"": ""^2.0.0""\n },\n ""engines"": {\n ""node"": "">=8""\n }\n },\n ""node_modules/is-extglob"": {\n ""version"": ""2.1.1"",\n ""resolved"": ""https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz"",\n ""integrity"": ""sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""engines"": {\n ""node"": "">=0.10.0""\n }\n },\n ""node_modules/is-fullwidth-code-point"": {\n ""version"": ""3.0.0"",\n ""resolved"": ""https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz"",\n ""integrity"": ""sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""engines"": {\n ""node"": "">=8""\n }\n },\n ""node_modules/is-glob"": {\n ""version"": ""4.0.3"",\n ""resolved"": ""https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz"",\n ""integrity"": ""sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""dependencies"": {\n ""is-extglob"": ""^2.1.1""\n },\n ""engines"": {\n ""node"": "">=0.10.0""\n }\n },\n ""node_modules/is-interactive"": {\n ""version"": ""2.0.0"",\n ""resolved"": ""https://registry.npmjs.org/is-interactive/-/is-interactive-2.0.0.tgz"",\n ""integrity"": ""sha512-qP1vozQRI+BMOPcjFzrjXuQvdak2pHNUMZoeG2eRbiSqyvbEf/wQtEOTOX1guk6E3t36RkaqiSt8A/6YElNxLQ=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""engines"": {\n ""node"": "">=12""\n },\n ""funding"": {\n ""url"": ""https://github.com/sponsors/sindresorhus""\n }\n },\n ""node_modules/is-number"": {\n ""version"": ""7.0.0"",\n ""resolved"": ""https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz"",\n ""integrity"": ""sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""engines"": {\n ""node"": "">=0.12.0""\n }\n },\n ""node_modules/is-path-inside"": {\n ""version"": ""3.0.3"",\n ""resolved"": ""https://registry.npmjs.org/is-path-inside/-/is-path-inside-3.0.3.tgz"",\n ""integrity"": ""sha512-Fd4gABb+ycGAmKou8eMftCupSir5lRxqf4aD/vd0cD2qc4HL07OjCeuHMr8Ro4CoMaeCKDB0/ECBOVWjTwUvPQ=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""engines"": {\n ""node"": "">=8""\n }\n },\n ""node_modules/is-plain-obj"": {\n ""version"": ""2.1.0"",\n ""resolved"": ""https://registry.npmjs.org/is-plain-obj/-/is-plain-obj-2.1.0.tgz"",\n ""integrity"": ""sha512-YWnfyRwxL/+SsrWYfOpUtz5b3YD+nyfkHvjbcanzk8zgyO4ASD67uVMRt8k5bM4lLMDnXfriRhOpemw+NfT1eA=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""engines"": {\n ""node"": "">=8""\n }\n },\n ""node_modules/is-unicode-supported"": {\n ""version"": ""0.1.0"",\n ""resolved"": ""https://registry.npmjs.org/is-unicode-supported/-/is-unicode-supported-0.1.0.tgz"",\n ""integrity"": ""sha512-knxG2q4UC3u8stRGyAVJCOdxFmv5DZiRcdlIaAQXAbSfJya+OhopNotLQrstBhququ4ZpuKbDc/8S6mgXgPFPw=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""engines"": {\n ""node"": "">=10""\n },\n ""funding"": {\n ""url"": ""https://github.com/sponsors/sindresorhus""\n }\n },\n ""node_modules/isarray"": {\n ""version"": ""1.0.0"",\n ""resolved"": ""https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz"",\n ""integrity"": ""sha512-VLghIWNM6ELQzo7zwmcg0NmTVyWKYjvIeM83yjp0wRDTmUnrM678fQbcKBo6n2CJEF0szoG//ytg+TKla89ALQ=="",\n ""dev"": true,\n ""license"": ""MIT""\n },\n ""node_modules/isexe"": {\n ""version"": ""2.0.0"",\n ""resolved"": ""https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz"",\n ""integrity"": ""sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw=="",\n ""dev"": true,\n ""license"": ""ISC""\n },\n ""node_modules/istanbul-lib-coverage"": {\n ""version"": ""3.2.2"",\n ""resolved"": ""https://registry.npmjs.org/istanbul-lib-coverage/-/istanbul-lib-coverage-3.2.2.tgz"",\n ""integrity"": ""sha512-O8dpsF+r0WV/8MNRKfnmrtCWhuKjxrq2w+jpzBL5UZKTi2LeVWnWOmWRxFlesJONmc+wLAGvKQZEOanko0LFTg=="",\n ""dev"": true,\n ""license"": ""BSD-3-Clause"",\n ""engines"": {\n ""node"": "">=8""\n }\n },\n ""node_modules/istanbul-lib-report"": {\n ""version"": ""3.0.1"",\n ""resolved"": ""https://registry.npmjs.org/istanbul-lib-report/-/istanbul-lib-report-3.0.1.tgz"",\n ""integrity"": ""sha512-GCfE1mtsHGOELCU8e/Z7YWzpmybrx/+dSTfLrvY8qRmaY6zXTKWn6WQIjaAFw069icm6GVMNkgu0NzI4iPZUNw=="",\n ""dev"": true,\n ""license"": ""BSD-3-Clause"",\n ""dependencies"": {\n ""istanbul-lib-coverage"": ""^3.0.0"",\n ""make-dir"": ""^4.0.0"",\n ""supports-color"": ""^7.1.0""\n },\n ""engines"": {\n ""node"": "">=10""\n }\n },\n ""node_modules/istanbul-lib-report/node_modules/supports-color"": {\n ""version"": ""7.2.0"",\n ""resolved"": ""https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz"",\n ""integrity"": ""sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""dependencies"": {\n ""has-flag"": ""^4.0.0""\n },\n ""engines"": {\n ""node"": "">=8""\n }\n },\n ""node_modules/istanbul-reports"": {\n ""version"": ""3.2.0"",\n ""resolved"": ""https://registry.npmjs.org/istanbul-reports/-/istanbul-reports-3.2.0.tgz"",\n ""integrity"": ""sha512-HGYWWS/ehqTV3xN10i23tkPkpH46MLCIMFNCaaKNavAXTF1RkqxawEPtnjnGZ6XKSInBKkiOA5BKS+aZiY3AvA=="",\n ""dev"": true,\n ""license"": ""BSD-3-Clause"",\n ""dependencies"": {\n ""html-escaper"": ""^2.0.0"",\n ""istanbul-lib-report"": ""^3.0.0""\n },\n ""engines"": {\n ""node"": "">=8""\n }\n },\n ""node_modules/jackspeak"": {\n ""version"": ""3.4.3"",\n ""resolved"": ""https://registry.npmjs.org/jackspeak/-/jackspeak-3.4.3.tgz"",\n ""integrity"": ""sha512-OGlZQpz2yfahA/Rd1Y8Cd9SIEsqvXkLVoSw/cgwhnhFMDbsQFeZYoJJ7bIZBS9BcamUW96asq/npPWugM+RQBw=="",\n ""dev"": true,\n ""license"": ""BlueOak-1.0.0"",\n ""dependencies"": {\n ""@isaacs/cliui"": ""^8.0.2""\n },\n ""funding"": {\n ""url"": ""https://github.com/sponsors/isaacs""\n },\n ""optionalDependencies"": {\n ""@pkgjs/parseargs"": ""^0.11.0""\n }\n },\n ""node_modules/js-yaml"": {\n ""version"": ""4.1.0"",\n ""resolved"": ""https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.0.tgz"",\n ""integrity"": ""sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""dependencies"": {\n ""argparse"": ""^2.0.1""\n },\n ""bin"": {\n ""js-yaml"": ""bin/js-yaml.js""\n }\n },\n ""node_modules/json-buffer"": {\n ""version"": ""3.0.1"",\n ""resolved"": ""https://registry.npmjs.org/json-buffer/-/json-buffer-3.0.1.tgz"",\n ""integrity"": ""sha512-4bV5BfR2mqfQTJm+V5tPPdf+ZpuhiIvTuAB5g8kcrXOZpTT/QwwVRWBywX1ozr6lEuPdbHxwaJlm9G6mI2sfSQ=="",\n ""dev"": true,\n ""license"": ""MIT""\n },\n ""node_modules/json-schema-traverse"": {\n ""version"": ""0.4.1"",\n ""resolved"": ""https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz"",\n ""integrity"": ""sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg=="",\n ""dev"": true,\n ""license"": ""MIT""\n },\n ""node_modules/json-stable-stringify-without-jsonify"": {\n ""version"": ""1.0.1"",\n ""resolved"": ""https://registry.npmjs.org/json-stable-stringify-without-jsonify/-/json-stable-stringify-without-jsonify-1.0.1.tgz"",\n ""integrity"": ""sha512-Bdboy+l7tA3OGW6FjyFHWkP5LuByj1Tk33Ljyq0axyzdk9//JSi2u3fP1QSmd1KNwq6VOKYGlAu87CisVir6Pw=="",\n ""dev"": true,\n ""license"": ""MIT""\n },\n ""node_modules/jszip"": {\n ""version"": ""3.10.1"",\n ""resolved"": ""https://registry.npmjs.org/jszip/-/jszip-3.10.1.tgz"",\n ""integrity"": ""sha512-xXDvecyTpGLrqFrvkrUSoxxfJI5AH7U8zxxtVclpsUtMCq4JQ290LY8AW5c7Ggnr/Y/oK+bQMbqK2qmtk3pN4g=="",\n ""dev"": true,\n ""license"": ""(MIT OR GPL-3.0-or-later)"",\n ""dependencies"": {\n ""lie"": ""~3.3.0"",\n ""pako"": ""~1.0.2"",\n ""readable-stream"": ""~2.3.6"",\n ""setimmediate"": ""^1.0.5""\n }\n },\n ""node_modules/keyv"": {\n ""version"": ""4.5.4"",\n ""resolved"": ""https://registry.npmjs.org/keyv/-/keyv-4.5.4.tgz"",\n ""integrity"": ""sha512-oxVHkHR/EJf2CNXnWxRLW6mg7JyCCUcG0DtEGmL2ctUo1PNTin1PUil+r/+4r5MpVgC/fn1kjsx7mjSujKqIpw=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""dependencies"": {\n ""json-buffer"": ""3.0.1""\n }\n },\n ""node_modules/levn"": {\n ""version"": ""0.4.1"",\n ""resolved"": ""https://registry.npmjs.org/levn/-/levn-0.4.1.tgz"",\n ""integrity"": ""sha512-+bT2uH4E5LGE7h/n3evcS/sQlJXCpIp6ym8OWJ5eV6+67Dsql/LaaT7qJBAt2rzfoa/5QBGBhxDix1dMt2kQKQ=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""dependencies"": {\n ""prelude-ls"": ""^1.2.1"",\n ""type-check"": ""~0.4.0""\n },\n ""engines"": {\n ""node"": "">= 0.8.0""\n }\n },\n ""node_modules/lie"": {\n ""version"": ""3.3.0"",\n ""resolved"": ""https://registry.npmjs.org/lie/-/lie-3.3.0.tgz"",\n ""integrity"": ""sha512-UaiMJzeWRlEujzAuw5LokY1L5ecNQYZKfmyZ9L7wDHb/p5etKaxXhohBcrw0EYby+G/NA52vRSN4N39dxHAIwQ=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""dependencies"": {\n ""immediate"": ""~3.0.5""\n }\n },\n ""node_modules/locate-path"": {\n ""version"": ""6.0.0"",\n ""resolved"": ""https://registry.npmjs.org/locate-path/-/locate-path-6.0.0.tgz"",\n ""integrity"": ""sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""dependencies"": {\n ""p-locate"": ""^5.0.0""\n },\n ""engines"": {\n ""node"": "">=10""\n },\n ""funding"": {\n ""url"": ""https://github.com/sponsors/sindresorhus""\n }\n },\n ""node_modules/lodash.merge"": {\n ""version"": ""4.6.2"",\n ""resolved"": ""https://registry.npmjs.org/lodash.merge/-/lodash.merge-4.6.2.tgz"",\n ""integrity"": ""sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ=="",\n ""dev"": true,\n ""license"": ""MIT""\n },\n ""node_modules/log-symbols"": {\n ""version"": ""4.1.0"",\n ""resolved"": ""https://registry.npmjs.org/log-symbols/-/log-symbols-4.1.0.tgz"",\n ""integrity"": ""sha512-8XPvpAA8uyhfteu8pIvQxpJZ7SYYdpUivZpGy6sFsBuKRY/7rQGavedeB8aK+Zkyq6upMFVL/9AW6vOYzfRyLg=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""dependencies"": {\n ""chalk"": ""^4.1.0"",\n ""is-unicode-supported"": ""^0.1.0""\n },\n ""engines"": {\n ""node"": "">=10""\n },\n ""funding"": {\n ""url"": ""https://github.com/sponsors/sindresorhus""\n }\n },\n ""node_modules/lru-cache"": {\n ""version"": ""10.4.3"",\n ""resolved"": ""https://registry.npmjs.org/lru-cache/-/lru-cache-10.4.3.tgz"",\n ""integrity"": ""sha512-JNAzZcXrCt42VGLuYz0zfAzDfAvJWW6AfYlDBQyDV5DClI2m5sAmK+OIO7s59XfsRsWHp02jAJrRadPRGTt6SQ=="",\n ""dev"": true,\n ""license"": ""ISC""\n },\n ""node_modules/make-dir"": {\n ""version"": ""4.0.0"",\n ""resolved"": ""https://registry.npmjs.org/make-dir/-/make-dir-4.0.0.tgz"",\n ""integrity"": ""sha512-hXdUTZYIVOt1Ex//jAQi+wTZZpUpwBj/0QsOzqegb3rGMMeJiSEu5xLHnYfBrRV4RH2+OCSOO95Is/7x1WJ4bw=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""dependencies"": {\n ""semver"": ""^7.5.3""\n },\n ""engines"": {\n ""node"": "">=10""\n },\n ""funding"": {\n ""url"": ""https://github.com/sponsors/sindresorhus""\n }\n },\n ""node_modules/merge2"": {\n ""version"": ""1.4.1"",\n ""resolved"": ""https://registry.npmjs.org/merge2/-/merge2-1.4.1.tgz"",\n ""integrity"": ""sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""engines"": {\n ""node"": "">= 8""\n }\n },\n ""node_modules/micromatch"": {\n ""version"": ""4.0.8"",\n ""resolved"": ""https://registry.npmjs.org/micromatch/-/micromatch-4.0.8.tgz"",\n ""integrity"": ""sha512-PXwfBhYu0hBCPw8Dn0E+WDYb7af3dSLVWKi3HGv84IdF4TyFoC0ysxFd0Goxw7nSv4T/PzEJQxsYsEiFCKo2BA=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""dependencies"": {\n ""braces"": ""^3.0.3"",\n ""picomatch"": ""^2.3.1""\n },\n ""engines"": {\n ""node"": "">=8.6""\n }\n },\n ""node_modules/mimic-function"": {\n ""version"": ""5.0.1"",\n ""resolved"": ""https://registry.npmjs.org/mimic-function/-/mimic-function-5.0.1.tgz"",\n ""integrity"": ""sha512-VP79XUPxV2CigYP3jWwAUFSku2aKqBH7uTAapFWCBqutsbmDo96KY5o8uh6U+/YSIn5OxJnXp73beVkpqMIGhA=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""engines"": {\n ""node"": "">=18""\n },\n ""funding"": {\n ""url"": ""https://github.com/sponsors/sindresorhus""\n }\n },\n ""node_modules/minimatch"": {\n ""version"": ""9.0.5"",\n ""resolved"": ""https://registry.npmjs.org/minimatch/-/minimatch-9.0.5.tgz"",\n ""integrity"": ""sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow=="",\n ""dev"": true,\n ""license"": ""ISC"",\n ""dependencies"": {\n ""brace-expansion"": ""^2.0.1""\n },\n ""engines"": {\n ""node"": "">=16 || 14 >=14.17""\n },\n ""funding"": {\n ""url"": ""https://github.com/sponsors/isaacs""\n }\n },\n ""node_modules/minipass"": {\n ""version"": ""7.1.2"",\n ""resolved"": ""https://registry.npmjs.org/minipass/-/minipass-7.1.2.tgz"",\n ""integrity"": ""sha512-qOOzS1cBTWYF4BH8fVePDBOO9iptMnGUEZwNc/cMWnTV2nVLZ7VoNWEPHkYczZA0pdoA7dl6e7FL659nX9S2aw=="",\n ""dev"": true,\n ""license"": ""ISC"",\n ""engines"": {\n ""node"": "">=16 || 14 >=14.17""\n }\n },\n ""node_modules/mocha"": {\n ""version"": ""11.7.5"",\n ""resolved"": ""https://registry.npmjs.org/mocha/-/mocha-11.7.5.tgz"",\n ""integrity"": ""sha512-mTT6RgopEYABzXWFx+GcJ+ZQ32kp4fMf0xvpZIIfSq9Z8lC/++MtcCnQ9t5FP2veYEP95FIYSvW+U9fV4xrlig=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""dependencies"": {\n ""browser-stdout"": ""^1.3.1"",\n ""chokidar"": ""^4.0.1"",\n ""debug"": ""^4.3.5"",\n ""diff"": ""^7.0.0"",\n ""escape-string-regexp"": ""^4.0.0"",\n ""find-up"": ""^5.0.0"",\n ""glob"": ""^10.4.5"",\n ""he"": ""^1.2.0"",\n ""is-path-inside"": ""^3.0.3"",\n ""js-yaml"": ""^4.1.0"",\n ""log-symbols"": ""^4.1.0"",\n ""minimatch"": ""^9.0.5"",\n ""ms"": ""^2.1.3"",\n ""picocolors"": ""^1.1.1"",\n ""serialize-javascript"": ""^6.0.2"",\n ""strip-json-comments"": ""^3.1.1"",\n ""supports-color"": ""^8.1.1"",\n ""workerpool"": ""^9.2.0"",\n ""yargs"": ""^17.7.2"",\n ""yargs-parser"": ""^21.1.1"",\n ""yargs-unparser"": ""^2.0.0""\n },\n ""bin"": {\n ""_mocha"": ""bin/_mocha"",\n ""mocha"": ""bin/mocha.js""\n },\n ""engines"": {\n ""node"": ""^18.18.0 || ^20.9.0 || >=21.1.0""\n }\n },\n ""node_modules/mocha/node_modules/chokidar"": {\n ""version"": ""4.0.3"",\n ""resolved"": ""https://registry.npmjs.org/chokidar/-/chokidar-4.0.3.tgz"",\n ""integrity"": ""sha512-Qgzu8kfBvo+cA4962jnP1KkS6Dop5NS6g7R5LFYJr4b8Ub94PPQXUksCw9PvXoeXPRRddRNC5C1JQUR2SMGtnA=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""dependencies"": {\n ""readdirp"": ""^4.0.1""\n },\n ""engines"": {\n ""node"": "">= 14.16.0""\n },\n ""funding"": {\n ""url"": ""https://paulmillr.com/funding/""\n }\n },\n ""node_modules/mocha/node_modules/readdirp"": {\n ""version"": ""4.1.2"",\n ""resolved"": ""https://registry.npmjs.org/readdirp/-/readdirp-4.1.2.tgz"",\n ""integrity"": ""sha512-GDhwkLfywWL2s6vEjyhri+eXmfH6j1L7JE27WhqLeYzoh/A3DBaYGEj2H/HFZCn/kMfim73FXxEJTw06WtxQwg=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""engines"": {\n ""node"": "">= 14.18.0""\n },\n ""funding"": {\n ""type"": ""individual"",\n ""url"": ""https://paulmillr.com/funding/""\n }\n },\n ""node_modules/mocha/node_modules/supports-color"": {\n ""version"": ""8.1.1"",\n ""resolved"": ""https://registry.npmjs.org/supports-color/-/supports-color-8.1.1.tgz"",\n ""integrity"": ""sha512-MpUEN2OodtUzxvKQl72cUF7RQ5EiHsGvSsVG0ia9c5RbWGL2CI4C7EpPS8UTBIplnlzZiNuV56w+FuNxy3ty2Q=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""dependencies"": {\n ""has-flag"": ""^4.0.0""\n },\n ""engines"": {\n ""node"": "">=10""\n },\n ""funding"": {\n ""url"": ""https://github.com/chalk/supports-color?sponsor=1""\n }\n },\n ""node_modules/ms"": {\n ""version"": ""2.1.3"",\n ""resolved"": ""https://registry.npmjs.org/ms/-/ms-2.1.3.tgz"",\n ""integrity"": ""sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA=="",\n ""dev"": true,\n ""license"": ""MIT""\n },\n ""node_modules/natural-compare"": {\n ""version"": ""1.4.0"",\n ""resolved"": ""https://registry.npmjs.org/natural-compare/-/natural-compare-1.4.0.tgz"",\n ""integrity"": ""sha512-OWND8ei3VtNC9h7V60qff3SVobHr996CTwgxubgyQYEpg290h9J0buyECNNJexkFm5sOajh5G116RYA1c8ZMSw=="",\n ""dev"": true,\n ""license"": ""MIT""\n },\n ""node_modules/normalize-path"": {\n ""version"": ""3.0.0"",\n ""resolved"": ""https://registry.npmjs.org/normalize-path/-/normalize-path-3.0.0.tgz"",\n ""integrity"": ""sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""engines"": {\n ""node"": "">=0.10.0""\n }\n },\n ""node_modules/once"": {\n ""version"": ""1.4.0"",\n ""resolved"": ""https://registry.npmjs.org/once/-/once-1.4.0.tgz"",\n ""integrity"": ""sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w=="",\n ""dev"": true,\n ""license"": ""ISC"",\n ""dependencies"": {\n ""wrappy"": ""1""\n }\n },\n ""node_modules/onetime"": {\n ""version"": ""7.0.0"",\n ""resolved"": ""https://registry.npmjs.org/onetime/-/onetime-7.0.0.tgz"",\n ""integrity"": ""sha512-VXJjc87FScF88uafS3JllDgvAm+c/Slfz06lorj2uAY34rlUu0Nt+v8wreiImcrgAjjIHp1rXpTDlLOGw29WwQ=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""dependencies"": {\n ""mimic-function"": ""^5.0.0""\n },\n ""engines"": {\n ""node"": "">=18""\n },\n ""funding"": {\n ""url"": ""https://github.com/sponsors/sindresorhus""\n }\n },\n ""node_modules/optionator"": {\n ""version"": ""0.9.4"",\n ""resolved"": ""https://registry.npmjs.org/optionator/-/optionator-0.9.4.tgz"",\n ""integrity"": ""sha512-6IpQ7mKUxRcZNLIObR0hz7lxsapSSIYNZJwXPGeF0mTVqGKFIXj1DQcMoT22S3ROcLyY/rz0PWaWZ9ayWmad9g=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""dependencies"": {\n ""deep-is"": ""^0.1.3"",\n ""fast-levenshtein"": ""^2.0.6"",\n ""levn"": ""^0.4.1"",\n ""prelude-ls"": ""^1.2.1"",\n ""type-check"": ""^0.4.0"",\n ""word-wrap"": ""^1.2.5""\n },\n ""engines"": {\n ""node"": "">= 0.8.0""\n }\n },\n ""node_modules/ora"": {\n ""version"": ""8.2.0"",\n ""resolved"": ""https://registry.npmjs.org/ora/-/ora-8.2.0.tgz"",\n ""integrity"": ""sha512-weP+BZ8MVNnlCm8c0Qdc1WSWq4Qn7I+9CJGm7Qali6g44e/PUzbjNqJX5NJ9ljlNMosfJvg1fKEGILklK9cwnw=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""dependencies"": {\n ""chalk"": ""^5.3.0"",\n ""cli-cursor"": ""^5.0.0"",\n ""cli-spinners"": ""^2.9.2"",\n ""is-interactive"": ""^2.0.0"",\n ""is-unicode-supported"": ""^2.0.0"",\n ""log-symbols"": ""^6.0.0"",\n ""stdin-discarder"": ""^0.2.2"",\n ""string-width"": ""^7.2.0"",\n ""strip-ansi"": ""^7.1.0""\n },\n ""engines"": {\n ""node"": "">=18""\n },\n ""funding"": {\n ""url"": ""https://github.com/sponsors/sindresorhus""\n }\n },\n ""node_modules/ora/node_modules/chalk"": {\n ""version"": ""5.6.2"",\n ""resolved"": ""https://registry.npmjs.org/chalk/-/chalk-5.6.2.tgz"",\n ""integrity"": ""sha512-7NzBL0rN6fMUW+f7A6Io4h40qQlG+xGmtMxfbnH/K7TAtt8JQWVQK+6g0UXKMeVJoyV5EkkNsErQ8pVD3bLHbA=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""engines"": {\n ""node"": ""^12.17.0 || ^14.13 || >=16.0.0""\n },\n ""funding"": {\n ""url"": ""https://github.com/chalk/chalk?sponsor=1""\n }\n },\n ""node_modules/ora/node_modules/emoji-regex"": {\n ""version"": ""10.6.0"",\n ""resolved"": ""https://registry.npmjs.org/emoji-regex/-/emoji-regex-10.6.0.tgz"",\n ""integrity"": ""sha512-toUI84YS5YmxW219erniWD0CIVOo46xGKColeNQRgOzDorgBi1v4D71/OFzgD9GO2UGKIv1C3Sp8DAn0+j5w7A=="",\n ""dev"": true,\n ""license"": ""MIT""\n },\n ""node_modules/ora/node_modules/is-unicode-supported"": {\n ""version"": ""2.1.0"",\n ""resolved"": ""https://registry.npmjs.org/is-unicode-supported/-/is-unicode-supported-2.1.0.tgz"",\n ""integrity"": ""sha512-mE00Gnza5EEB3Ds0HfMyllZzbBrmLOX3vfWoj9A9PEnTfratQ/BcaJOuMhnkhjXvb2+FkY3VuHqtAGpTPmglFQ=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""engines"": {\n ""node"": "">=18""\n },\n ""funding"": {\n ""url"": ""https://github.com/sponsors/sindresorhus""\n }\n },\n ""node_modules/ora/node_modules/log-symbols"": {\n ""version"": ""6.0.0"",\n ""resolved"": ""https://registry.npmjs.org/log-symbols/-/log-symbols-6.0.0.tgz"",\n ""integrity"": ""sha512-i24m8rpwhmPIS4zscNzK6MSEhk0DUWa/8iYQWxhffV8jkI4Phvs3F+quL5xvS0gdQR0FyTCMMH33Y78dDTzzIw=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""dependencies"": {\n ""chalk"": ""^5.3.0"",\n ""is-unicode-supported"": ""^1.3.0""\n },\n ""engines"": {\n ""node"": "">=18""\n },\n ""funding"": {\n ""url"": ""https://github.com/sponsors/sindresorhus""\n }\n },\n ""node_modules/ora/node_modules/log-symbols/node_modules/is-unicode-supported"": {\n ""version"": ""1.3.0"",\n ""resolved"": ""https://registry.npmjs.org/is-unicode-supported/-/is-unicode-supported-1.3.0.tgz"",\n ""integrity"": ""sha512-43r2mRvz+8JRIKnWJ+3j8JtjRKZ6GmjzfaE/qiBJnikNnYv/6bagRJ1kUhNk8R5EX/GkobD+r+sfxCPJsiKBLQ=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""engines"": {\n ""node"": "">=12""\n },\n ""funding"": {\n ""url"": ""https://github.com/sponsors/sindresorhus""\n }\n },\n ""node_modules/ora/node_modules/string-width"": {\n ""version"": ""7.2.0"",\n ""resolved"": ""https://registry.npmjs.org/string-width/-/string-width-7.2.0.tgz"",\n ""integrity"": ""sha512-tsaTIkKW9b4N+AEj+SVA+WhJzV7/zMhcSu78mLKWSk7cXMOSHsBKFWUs0fWwq8QyK3MgJBQRX6Gbi4kYbdvGkQ=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""dependencies"": {\n ""emoji-regex"": ""^10.3.0"",\n ""get-east-asian-width"": ""^1.0.0"",\n ""strip-ansi"": ""^7.1.0""\n },\n ""engines"": {\n ""node"": "">=18""\n },\n ""funding"": {\n ""url"": ""https://github.com/sponsors/sindresorhus""\n }\n },\n ""node_modules/p-limit"": {\n ""version"": ""3.1.0"",\n ""resolved"": ""https://registry.npmjs.org/p-limit/-/p-limit-3.1.0.tgz"",\n ""integrity"": ""sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""dependencies"": {\n ""yocto-queue"": ""^0.1.0""\n },\n ""engines"": {\n ""node"": "">=10""\n },\n ""funding"": {\n ""url"": ""https://github.com/sponsors/sindresorhus""\n }\n },\n ""node_modules/p-locate"": {\n ""version"": ""5.0.0"",\n ""resolved"": ""https://registry.npmjs.org/p-locate/-/p-locate-5.0.0.tgz"",\n ""integrity"": ""sha512-LaNjtRWUBY++zB5nE/NwcaoMylSPk+S+ZHNB1TzdbMJMny6dynpAGt7X/tl/QYq3TIeE6nxHppbo2LGymrG5Pw=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""dependencies"": {\n ""p-limit"": ""^3.0.2""\n },\n ""engines"": {\n ""node"": "">=10""\n },\n ""funding"": {\n ""url"": ""https://github.com/sponsors/sindresorhus""\n }\n },\n ""node_modules/package-json-from-dist"": {\n ""version"": ""1.0.1"",\n ""resolved"": ""https://registry.npmjs.org/package-json-from-dist/-/package-json-from-dist-1.0.1.tgz"",\n ""integrity"": ""sha512-UEZIS3/by4OC8vL3P2dTXRETpebLI2NiI5vIrjaD/5UtrkFX/tNbwjTSRAGC/+7CAo2pIcBaRgWmcBBHcsaCIw=="",\n ""dev"": true,\n ""license"": ""BlueOak-1.0.0""\n },\n ""node_modules/pako"": {\n ""version"": ""1.0.11"",\n ""resolved"": ""https://registry.npmjs.org/pako/-/pako-1.0.11.tgz"",\n ""integrity"": ""sha512-4hLB8Py4zZce5s4yd9XzopqwVv/yGNhV1Bl8NTmCq1763HeK2+EwVTv+leGeL13Dnh2wfbqowVPXCIO0z4taYw=="",\n ""dev"": true,\n ""license"": ""(MIT AND Zlib)""\n },\n ""node_modules/parent-module"": {\n ""version"": ""1.0.1"",\n ""resolved"": ""https://registry.npmjs.org/parent-module/-/parent-module-1.0.1.tgz"",\n ""integrity"": ""sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""dependencies"": {\n ""callsites"": ""^3.0.0""\n },\n ""engines"": {\n ""node"": "">=6""\n }\n },\n ""node_modules/path-exists"": {\n ""version"": ""4.0.0"",\n ""resolved"": ""https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz"",\n ""integrity"": ""sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""engines"": {\n ""node"": "">=8""\n }\n },\n ""node_modules/path-is-absolute"": {\n ""version"": ""1.0.1"",\n ""resolved"": ""https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz"",\n ""integrity"": ""sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""engines"": {\n ""node"": "">=0.10.0""\n }\n },\n ""node_modules/path-key"": {\n ""version"": ""3.1.1"",\n ""resolved"": ""https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz"",\n ""integrity"": ""sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""engines"": {\n ""node"": "">=8""\n }\n },\n ""node_modules/path-scurry"": {\n ""version"": ""1.11.1"",\n ""resolved"": ""https://registry.npmjs.org/path-scurry/-/path-scurry-1.11.1.tgz"",\n ""integrity"": ""sha512-Xa4Nw17FS9ApQFJ9umLiJS4orGjm7ZzwUrwamcGQuHSzDyth9boKDaycYdDcZDuqYATXw4HFXgaqWTctW/v1HA=="",\n ""dev"": true,\n ""license"": ""BlueOak-1.0.0"",\n ""dependencies"": {\n ""lru-cache"": ""^10.2.0"",\n ""minipass"": ""^5.0.0 || ^6.0.2 || ^7.0.0""\n },\n ""engines"": {\n ""node"": "">=16 || 14 >=14.18""\n },\n ""funding"": {\n ""url"": ""https://github.com/sponsors/isaacs""\n }\n },\n ""node_modules/picocolors"": {\n ""version"": ""1.1.1"",\n ""resolved"": ""https://registry.npmjs.org/picocolors/-/picocolors-1.1.1.tgz"",\n ""integrity"": ""sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA=="",\n ""dev"": true,\n ""license"": ""ISC""\n },\n ""node_modules/picomatch"": {\n ""version"": ""2.3.1"",\n ""resolved"": ""https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz"",\n ""integrity"": ""sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""engines"": {\n ""node"": "">=8.6""\n },\n ""funding"": {\n ""url"": ""https://github.com/sponsors/jonschlinkert""\n }\n },\n ""node_modules/prelude-ls"": {\n ""version"": ""1.2.1"",\n ""resolved"": ""https://registry.npmjs.org/prelude-ls/-/prelude-ls-1.2.1.tgz"",\n ""integrity"": ""sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""engines"": {\n ""node"": "">= 0.8.0""\n }\n },\n ""node_modules/process-nextick-args"": {\n ""version"": ""2.0.1"",\n ""resolved"": ""https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-2.0.1.tgz"",\n ""integrity"": ""sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag=="",\n ""dev"": true,\n ""license"": ""MIT""\n },\n ""node_modules/punycode"": {\n ""version"": ""2.3.1"",\n ""resolved"": ""https://registry.npmjs.org/punycode/-/punycode-2.3.1.tgz"",\n ""integrity"": ""sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""engines"": {\n ""node"": "">=6""\n }\n },\n ""node_modules/queue-microtask"": {\n ""version"": ""1.2.3"",\n ""resolved"": ""https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz"",\n ""integrity"": ""sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A=="",\n ""dev"": true,\n ""funding"": [\n {\n ""type"": ""github"",\n ""url"": ""https://github.com/sponsors/feross""\n },\n {\n ""type"": ""patreon"",\n ""url"": ""https://www.patreon.com/feross""\n },\n {\n ""type"": ""consulting"",\n ""url"": ""https://feross.org/support""\n }\n ],\n ""license"": ""MIT""\n },\n ""node_modules/randombytes"": {\n ""version"": ""2.1.0"",\n ""resolved"": ""https://registry.npmjs.org/randombytes/-/randombytes-2.1.0.tgz"",\n ""integrity"": ""sha512-vYl3iOX+4CKUWuxGi9Ukhie6fsqXqS9FE2Zaic4tNFD2N2QQaXOMFbuKK4QmDHC0JO6B1Zp41J0LpT0oR68amQ=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""dependencies"": {\n ""safe-buffer"": ""^5.1.0""\n }\n },\n ""node_modules/readable-stream"": {\n ""version"": ""2.3.8"",\n ""resolved"": ""https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.8.tgz"",\n ""integrity"": ""sha512-8p0AUk4XODgIewSi0l8Epjs+EVnWiK7NoDIEGU0HhE7+ZyY8D1IMY7odu5lRrFXGg71L15KG8QrPmum45RTtdA=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""dependencies"": {\n ""core-util-is"": ""~1.0.0"",\n ""inherits"": ""~2.0.3"",\n ""isarray"": ""~1.0.0"",\n ""process-nextick-args"": ""~2.0.0"",\n ""safe-buffer"": ""~5.1.1"",\n ""string_decoder"": ""~1.1.1"",\n ""util-deprecate"": ""~1.0.1""\n }\n },\n ""node_modules/readdirp"": {\n ""version"": ""3.6.0"",\n ""resolved"": ""https://registry.npmjs.org/readdirp/-/readdirp-3.6.0.tgz"",\n ""integrity"": ""sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""dependencies"": {\n ""picomatch"": ""^2.2.1""\n },\n ""engines"": {\n ""node"": "">=8.10.0""\n }\n },\n ""node_modules/require-directory"": {\n ""version"": ""2.1.1"",\n ""resolved"": ""https://registry.npmjs.org/require-directory/-/require-directory-2.1.1.tgz"",\n ""integrity"": ""sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""engines"": {\n ""node"": "">=0.10.0""\n }\n },\n ""node_modules/resolve-from"": {\n ""version"": ""4.0.0"",\n ""resolved"": ""https://registry.npmjs.org/resolve-from/-/resolve-from-4.0.0.tgz"",\n ""integrity"": ""sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""engines"": {\n ""node"": "">=4""\n }\n },\n ""node_modules/restore-cursor"": {\n ""version"": ""5.1.0"",\n ""resolved"": ""https://registry.npmjs.org/restore-cursor/-/restore-cursor-5.1.0.tgz"",\n ""integrity"": ""sha512-oMA2dcrw6u0YfxJQXm342bFKX/E4sG9rbTzO9ptUcR/e8A33cHuvStiYOwH7fszkZlZ1z/ta9AAoPk2F4qIOHA=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""dependencies"": {\n ""onetime"": ""^7.0.0"",\n ""signal-exit"": ""^4.1.0""\n },\n ""engines"": {\n ""node"": "">=18""\n },\n ""funding"": {\n ""url"": ""https://github.com/sponsors/sindresorhus""\n }\n },\n ""node_modules/reusify"": {\n ""version"": ""1.1.0"",\n ""resolved"": ""https://registry.npmjs.org/reusify/-/reusify-1.1.0.tgz"",\n ""integrity"": ""sha512-g6QUff04oZpHs0eG5p83rFLhHeV00ug/Yf9nZM6fLeUrPguBTkTQOdpAWWspMh55TZfVQDPaN3NQJfbVRAxdIw=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""engines"": {\n ""iojs"": "">=1.0.0"",\n ""node"": "">=0.10.0""\n }\n },\n ""node_modules/run-parallel"": {\n ""version"": ""1.2.0"",\n ""resolved"": ""https://registry.npmjs.org/run-parallel/-/run-parallel-1.2.0.tgz"",\n ""integrity"": ""sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA=="",\n ""dev"": true,\n ""funding"": [\n {\n ""type"": ""github"",\n ""url"": ""https://github.com/sponsors/feross""\n },\n {\n ""type"": ""patreon"",\n ""url"": ""https://www.patreon.com/feross""\n },\n {\n ""type"": ""consulting"",\n ""url"": ""https://feross.org/support""\n }\n ],\n ""license"": ""MIT"",\n ""dependencies"": {\n ""queue-microtask"": ""^1.2.2""\n }\n },\n ""node_modules/safe-buffer"": {\n ""version"": ""5.1.2"",\n ""resolved"": ""https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz"",\n ""integrity"": ""sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g=="",\n ""dev"": true,\n ""license"": ""MIT""\n },\n ""node_modules/semver"": {\n ""version"": ""7.7.3"",\n ""resolved"": ""https://registry.npmjs.org/semver/-/semver-7.7.3.tgz"",\n ""integrity"": ""sha512-SdsKMrI9TdgjdweUSR9MweHA4EJ8YxHn8DFaDisvhVlUOe4BF1tLD7GAj0lIqWVl+dPb/rExr0Btby5loQm20Q=="",\n ""dev"": true,\n ""license"": ""ISC"",\n ""bin"": {\n ""semver"": ""bin/semver.js""\n },\n ""engines"": {\n ""node"": "">=10""\n }\n },\n ""node_modules/serialize-javascript"": {\n ""version"": ""6.0.2"",\n ""resolved"": ""https://registry.npmjs.org/serialize-javascript/-/serialize-javascript-6.0.2.tgz"",\n ""integrity"": ""sha512-Saa1xPByTTq2gdeFZYLLo+RFE35NHZkAbqZeWNd3BpzppeVisAqpDjcp8dyf6uIvEqJRd46jemmyA4iFIeVk8g=="",\n ""dev"": true,\n ""license"": ""BSD-3-Clause"",\n ""dependencies"": {\n ""randombytes"": ""^2.1.0""\n }\n },\n ""node_modules/setimmediate"": {\n ""version"": ""1.0.5"",\n ""resolved"": ""https://registry.npmjs.org/setimmediate/-/setimmediate-1.0.5.tgz"",\n ""integrity"": ""sha512-MATJdZp8sLqDl/68LfQmbP8zKPLQNV6BIZoIgrscFDQ+RsvK/BxeDQOgyxKKoh0y/8h3BqVFnCqQ/gd+reiIXA=="",\n ""dev"": true,\n ""license"": ""MIT""\n },\n ""node_modules/shebang-command"": {\n ""version"": ""2.0.0"",\n ""resolved"": ""https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz"",\n ""integrity"": ""sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""dependencies"": {\n ""shebang-regex"": ""^3.0.0""\n },\n ""engines"": {\n ""node"": "">=8""\n }\n },\n ""node_modules/shebang-regex"": {\n ""version"": ""3.0.0"",\n ""resolved"": ""https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz"",\n ""integrity"": ""sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""engines"": {\n ""node"": "">=8""\n }\n },\n ""node_modules/signal-exit"": {\n ""version"": ""4.1.0"",\n ""resolved"": ""https://registry.npmjs.org/signal-exit/-/signal-exit-4.1.0.tgz"",\n ""integrity"": ""sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw=="",\n ""dev"": true,\n ""license"": ""ISC"",\n ""engines"": {\n ""node"": "">=14""\n },\n ""funding"": {\n ""url"": ""https://github.com/sponsors/isaacs""\n }\n },\n ""node_modules/stdin-discarder"": {\n ""version"": ""0.2.2"",\n ""resolved"": ""https://registry.npmjs.org/stdin-discarder/-/stdin-discarder-0.2.2.tgz"",\n ""integrity"": ""sha512-UhDfHmA92YAlNnCfhmq0VeNL5bDbiZGg7sZ2IvPsXubGkiNa9EC+tUTsjBRsYUAz87btI6/1wf4XoVvQ3uRnmQ=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""engines"": {\n ""node"": "">=18""\n },\n ""funding"": {\n ""url"": ""https://github.com/sponsors/sindresorhus""\n }\n },\n ""node_modules/string_decoder"": {\n ""version"": ""1.1.1"",\n ""resolved"": ""https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz"",\n ""integrity"": ""sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""dependencies"": {\n ""safe-buffer"": ""~5.1.0""\n }\n },\n ""node_modules/string-width"": {\n ""version"": ""5.1.2"",\n ""resolved"": ""https://registry.npmjs.org/string-width/-/string-width-5.1.2.tgz"",\n ""integrity"": ""sha512-HnLOCR3vjcY8beoNLtcjZ5/nxn2afmME6lhrDrebokqMap+XbeW8n9TXpPDOqdGK5qcI3oT0GKTW6wC7EMiVqA=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""dependencies"": {\n ""eastasianwidth"": ""^0.2.0"",\n ""emoji-regex"": ""^9.2.2"",\n ""strip-ansi"": ""^7.0.1""\n },\n ""engines"": {\n ""node"": "">=12""\n },\n ""funding"": {\n ""url"": ""https://github.com/sponsors/sindresorhus""\n }\n },\n ""node_modules/string-width-cjs"": {\n ""name"": ""string-width"",\n ""version"": ""4.2.3"",\n ""resolved"": ""https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz"",\n ""integrity"": ""sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""dependencies"": {\n ""emoji-regex"": ""^8.0.0"",\n ""is-fullwidth-code-point"": ""^3.0.0"",\n ""strip-ansi"": ""^6.0.1""\n },\n ""engines"": {\n ""node"": "">=8""\n }\n },\n ""node_modules/string-width-cjs/node_modules/ansi-regex"": {\n ""version"": ""5.0.1"",\n ""resolved"": ""https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz"",\n ""integrity"": ""sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""engines"": {\n ""node"": "">=8""\n }\n },\n ""node_modules/string-width-cjs/node_modules/emoji-regex"": {\n ""version"": ""8.0.0"",\n ""resolved"": ""https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz"",\n ""integrity"": ""sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A=="",\n ""dev"": true,\n ""license"": ""MIT""\n },\n ""node_modules/string-width-cjs/node_modules/strip-ansi"": {\n ""version"": ""6.0.1"",\n ""resolved"": ""https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz"",\n ""integrity"": ""sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""dependencies"": {\n ""ansi-regex"": ""^5.0.1""\n },\n ""engines"": {\n ""node"": "">=8""\n }\n },\n ""node_modules/strip-ansi"": {\n ""version"": ""7.1.2"",\n ""resolved"": ""https://registry.npmjs.org/strip-ansi/-/strip-ansi-7.1.2.tgz"",\n ""integrity"": ""sha512-gmBGslpoQJtgnMAvOVqGZpEz9dyoKTCzy2nfz/n8aIFhN/jCE/rCmcxabB6jOOHV+0WNnylOxaxBQPSvcWklhA=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""dependencies"": {\n ""ansi-regex"": ""^6.0.1""\n },\n ""engines"": {\n ""node"": "">=12""\n },\n ""funding"": {\n ""url"": ""https://github.com/chalk/strip-ansi?sponsor=1""\n }\n },\n ""node_modules/strip-ansi-cjs"": {\n ""name"": ""strip-ansi"",\n ""version"": ""6.0.1"",\n ""resolved"": ""https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz"",\n ""integrity"": ""sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""dependencies"": {\n ""ansi-regex"": ""^5.0.1""\n },\n ""engines"": {\n ""node"": "">=8""\n }\n },\n ""node_modules/strip-ansi-cjs/node_modules/ansi-regex"": {\n ""version"": ""5.0.1"",\n ""resolved"": ""https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz"",\n ""integrity"": ""sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""engines"": {\n ""node"": "">=8""\n }\n },\n ""node_modules/strip-json-comments"": {\n ""version"": ""3.1.1"",\n ""resolved"": ""https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-3.1.1.tgz"",\n ""integrity"": ""sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""engines"": {\n ""node"": "">=8""\n },\n ""funding"": {\n ""url"": ""https://github.com/sponsors/sindresorhus""\n }\n },\n ""node_modules/supports-color"": {\n ""version"": ""9.4.0"",\n ""resolved"": ""https://registry.npmjs.org/supports-color/-/supports-color-9.4.0.tgz"",\n ""integrity"": ""sha512-VL+lNrEoIXww1coLPOmiEmK/0sGigko5COxI09KzHc2VJXJsQ37UaQ+8quuxjDeA7+KnLGTWRyOXSLLR2Wb4jw=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""engines"": {\n ""node"": "">=12""\n },\n ""funding"": {\n ""url"": ""https://github.com/chalk/supports-color?sponsor=1""\n }\n },\n ""node_modules/tapable"": {\n ""version"": ""2.3.0"",\n ""resolved"": ""https://registry.npmjs.org/tapable/-/tapable-2.3.0.tgz"",\n ""integrity"": ""sha512-g9ljZiwki/LfxmQADO3dEY1CbpmXT5Hm2fJ+QaGKwSXUylMybePR7/67YW7jOrrvjEgL1Fmz5kzyAjWVWLlucg=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""engines"": {\n ""node"": "">=6""\n },\n ""funding"": {\n ""type"": ""opencollective"",\n ""url"": ""https://opencollective.com/webpack""\n }\n },\n ""node_modules/test-exclude"": {\n ""version"": ""6.0.0"",\n ""resolved"": ""https://registry.npmjs.org/test-exclude/-/test-exclude-6.0.0.tgz"",\n ""integrity"": ""sha512-cAGWPIyOHU6zlmg88jwm7VRyXnMN7iV68OGAbYDk/Mh/xC/pzVPlQtY6ngoIH/5/tciuhGfvESU8GrHrcxD56w=="",\n ""dev"": true,\n ""license"": ""ISC"",\n ""dependencies"": {\n ""@istanbuljs/schema"": ""^0.1.2"",\n ""glob"": ""^7.1.4"",\n ""minimatch"": ""^3.0.4""\n },\n ""engines"": {\n ""node"": "">=8""\n }\n },\n ""node_modules/test-exclude/node_modules/brace-expansion"": {\n ""version"": ""1.1.12"",\n ""resolved"": ""https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz"",\n ""integrity"": ""sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""dependencies"": {\n ""balanced-match"": ""^1.0.0"",\n ""concat-map"": ""0.0.1""\n }\n },\n ""node_modules/test-exclude/node_modules/glob"": {\n ""version"": ""7.2.3"",\n ""resolved"": ""https://registry.npmjs.org/glob/-/glob-7.2.3.tgz"",\n ""integrity"": ""sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q=="",\n ""deprecated"": ""Glob versions prior to v9 are no longer supported"",\n ""dev"": true,\n ""license"": ""ISC"",\n ""dependencies"": {\n ""fs.realpath"": ""^1.0.0"",\n ""inflight"": ""^1.0.4"",\n ""inherits"": ""2"",\n ""minimatch"": ""^3.1.1"",\n ""once"": ""^1.3.0"",\n ""path-is-absolute"": ""^1.0.0""\n },\n ""engines"": {\n ""node"": ""*""\n },\n ""funding"": {\n ""url"": ""https://github.com/sponsors/isaacs""\n }\n },\n ""node_modules/test-exclude/node_modules/minimatch"": {\n ""version"": ""3.1.2"",\n ""resolved"": ""https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz"",\n ""integrity"": ""sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw=="",\n ""dev"": true,\n ""license"": ""ISC"",\n ""dependencies"": {\n ""brace-expansion"": ""^1.1.7""\n },\n ""engines"": {\n ""node"": ""*""\n }\n },\n ""node_modules/to-regex-range"": {\n ""version"": ""5.0.1"",\n ""resolved"": ""https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz"",\n ""integrity"": ""sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""dependencies"": {\n ""is-number"": ""^7.0.0""\n },\n ""engines"": {\n ""node"": "">=8.0""\n }\n },\n ""node_modules/ts-api-utils"": {\n ""version"": ""2.1.0"",\n ""resolved"": ""https://registry.npmjs.org/ts-api-utils/-/ts-api-utils-2.1.0.tgz"",\n ""integrity"": ""sha512-CUgTZL1irw8u29bzrOD/nH85jqyc74D6SshFgujOIA7osm2Rz7dYH77agkx7H4FBNxDq7Cjf+IjaX/8zwFW+ZQ=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""engines"": {\n ""node"": "">=18.12""\n },\n ""peerDependencies"": {\n ""typescript"": "">=4.8.4""\n }\n },\n ""node_modules/type-check"": {\n ""version"": ""0.4.0"",\n ""resolved"": ""https://registry.npmjs.org/type-check/-/type-check-0.4.0.tgz"",\n ""integrity"": ""sha512-XleUoc9uwGXqjWwXaUTZAmzMcFZ5858QA2vvx1Ur5xIcixXIP+8LnFDgRplU30us6teqdlskFfu+ae4K79Ooew=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""dependencies"": {\n ""prelude-ls"": ""^1.2.1""\n },\n ""engines"": {\n ""node"": "">= 0.8.0""\n }\n },\n ""node_modules/typescript"": {\n ""version"": ""5.9.3"",\n ""resolved"": ""https://registry.npmjs.org/typescript/-/typescript-5.9.3.tgz"",\n ""integrity"": ""sha512-jl1vZzPDinLr9eUt3J/t7V6FgNEw9QjvBPdysz9KfQDD41fQrC2Y4vKQdiaUpFT4bXlb1RHhLpp8wtm6M5TgSw=="",\n ""dev"": true,\n ""license"": ""Apache-2.0"",\n ""bin"": {\n ""tsc"": ""bin/tsc"",\n ""tsserver"": ""bin/tsserver""\n },\n ""engines"": {\n ""node"": "">=14.17""\n }\n },\n ""node_modules/undici-types"": {\n ""version"": ""6.21.0"",\n ""resolved"": ""https://registry.npmjs.org/undici-types/-/undici-types-6.21.0.tgz"",\n ""integrity"": ""sha512-iwDZqg0QAGrg9Rav5H4n0M64c3mkR59cJ6wQp+7C4nI0gsmExaedaYLNO44eT4AtBBwjbTiGPMlt2Md0T9H9JQ=="",\n ""dev"": true,\n ""license"": ""MIT""\n },\n ""node_modules/uri-js"": {\n ""version"": ""4.4.1"",\n ""resolved"": ""https://registry.npmjs.org/uri-js/-/uri-js-4.4.1.tgz"",\n ""integrity"": ""sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg=="",\n ""dev"": true,\n ""license"": ""BSD-2-Clause"",\n ""dependencies"": {\n ""punycode"": ""^2.1.0""\n }\n },\n ""node_modules/util-deprecate"": {\n ""version"": ""1.0.2"",\n ""resolved"": ""https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz"",\n ""integrity"": ""sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw=="",\n ""dev"": true,\n ""license"": ""MIT""\n },\n ""node_modules/v8-to-istanbul"": {\n ""version"": ""9.3.0"",\n ""resolved"": ""https://registry.npmjs.org/v8-to-istanbul/-/v8-to-istanbul-9.3.0.tgz"",\n ""integrity"": ""sha512-kiGUalWN+rgBJ/1OHZsBtU4rXZOfj/7rKQxULKlIzwzQSvMJUUNgPwJEEh7gU6xEVxC0ahoOBvN2YI8GH6FNgA=="",\n ""dev"": true,\n ""license"": ""ISC"",\n ""dependencies"": {\n ""@jridgewell/trace-mapping"": ""^0.3.12"",\n ""@types/istanbul-lib-coverage"": ""^2.0.1"",\n ""convert-source-map"": ""^2.0.0""\n },\n ""engines"": {\n ""node"": "">=10.12.0""\n }\n },\n ""node_modules/which"": {\n ""version"": ""2.0.2"",\n ""resolved"": ""https://registry.npmjs.org/which/-/which-2.0.2.tgz"",\n ""integrity"": ""sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA=="",\n ""dev"": true,\n ""license"": ""ISC"",\n ""dependencies"": {\n ""isexe"": ""^2.0.0""\n },\n ""bin"": {\n ""node-which"": ""bin/node-which""\n },\n ""engines"": {\n ""node"": "">= 8""\n }\n },\n ""node_modules/word-wrap"": {\n ""version"": ""1.2.5"",\n ""resolved"": ""https://registry.npmjs.org/word-wrap/-/word-wrap-1.2.5.tgz"",\n ""integrity"": ""sha512-BN22B5eaMMI9UMtjrGd5g5eCYPpCPDUy0FJXbYsaT5zYxjFOckS53SQDE3pWkVoWpHXVb3BrYcEN4Twa55B5cA=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""engines"": {\n ""node"": "">=0.10.0""\n }\n },\n ""node_modules/workerpool"": {\n ""version"": ""9.3.4"",\n ""resolved"": ""https://registry.npmjs.org/workerpool/-/workerpool-9.3.4.tgz"",\n ""integrity"": ""sha512-TmPRQYYSAnnDiEB0P/Ytip7bFGvqnSU6I2BcuSw7Hx+JSg/DsUi5ebYfc8GYaSdpuvOcEs6dXxPurOYpe9QFwg=="",\n ""dev"": true,\n ""license"": ""Apache-2.0""\n },\n ""node_modules/wrap-ansi"": {\n ""version"": ""8.1.0"",\n ""resolved"": ""https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-8.1.0.tgz"",\n ""integrity"": ""sha512-si7QWI6zUMq56bESFvagtmzMdGOtoxfR+Sez11Mobfc7tm+VkUckk9bW2UeffTGVUbOksxmSw0AA2gs8g71NCQ=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""dependencies"": {\n ""ansi-styles"": ""^6.1.0"",\n ""string-width"": ""^5.0.1"",\n ""strip-ansi"": ""^7.0.1""\n },\n ""engines"": {\n ""node"": "">=12""\n },\n ""funding"": {\n ""url"": ""https://github.com/chalk/wrap-ansi?sponsor=1""\n }\n },\n ""node_modules/wrap-ansi-cjs"": {\n ""name"": ""wrap-ansi"",\n ""version"": ""7.0.0"",\n ""resolved"": ""https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz"",\n ""integrity"": ""sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""dependencies"": {\n ""ansi-styles"": ""^4.0.0"",\n ""string-width"": ""^4.1.0"",\n ""strip-ansi"": ""^6.0.0""\n },\n ""engines"": {\n ""node"": "">=10""\n },\n ""funding"": {\n ""url"": ""https://github.com/chalk/wrap-ansi?sponsor=1""\n }\n },\n ""node_modules/wrap-ansi-cjs/node_modules/ansi-regex"": {\n ""version"": ""5.0.1"",\n ""resolved"": ""https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz"",\n ""integrity"": ""sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""engines"": {\n ""node"": "">=8""\n }\n },\n ""node_modules/wrap-ansi-cjs/node_modules/emoji-regex"": {\n ""version"": ""8.0.0"",\n ""resolved"": ""https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz"",\n ""integrity"": ""sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A=="",\n ""dev"": true,\n ""license"": ""MIT""\n },\n ""node_modules/wrap-ansi-cjs/node_modules/string-width"": {\n ""version"": ""4.2.3"",\n ""resolved"": ""https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz"",\n ""integrity"": ""sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""dependencies"": {\n ""emoji-regex"": ""^8.0.0"",\n ""is-fullwidth-code-point"": ""^3.0.0"",\n ""strip-ansi"": ""^6.0.1""\n },\n ""engines"": {\n ""node"": "">=8""\n }\n },\n ""node_modules/wrap-ansi-cjs/node_modules/strip-ansi"": {\n ""version"": ""6.0.1"",\n ""resolved"": ""https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz"",\n ""integrity"": ""sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""dependencies"": {\n ""ansi-regex"": ""^5.0.1""\n },\n ""engines"": {\n ""node"": "">=8""\n }\n },\n ""node_modules/wrap-ansi/node_modules/ansi-styles"": {\n ""version"": ""6.2.3"",\n ""resolved"": ""https://registry.npmjs.org/ansi-styles/-/ansi-styles-6.2.3.tgz"",\n ""integrity"": ""sha512-4Dj6M28JB+oAH8kFkTLUo+a2jwOFkuqb3yucU0CANcRRUbxS0cP0nZYCGjcc3BNXwRIsUVmDGgzawme7zvJHvg=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""engines"": {\n ""node"": "">=12""\n },\n ""funding"": {\n ""url"": ""https://github.com/chalk/ansi-styles?sponsor=1""\n }\n },\n ""node_modules/wrappy"": {\n ""version"": ""1.0.2"",\n ""resolved"": ""https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz"",\n ""integrity"": ""sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ=="",\n ""dev"": true,\n ""license"": ""ISC""\n },\n ""node_modules/y18n"": {\n ""version"": ""5.0.8"",\n ""resolved"": ""https://registry.npmjs.org/y18n/-/y18n-5.0.8.tgz"",\n ""integrity"": ""sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA=="",\n ""dev"": true,\n ""license"": ""ISC"",\n ""engines"": {\n ""node"": "">=10""\n }\n },\n ""node_modules/yargs"": {\n ""version"": ""17.7.2"",\n ""resolved"": ""https://registry.npmjs.org/yargs/-/yargs-17.7.2.tgz"",\n ""integrity"": ""sha512-7dSzzRQ++CKnNI/krKnYRV7JKKPUXMEh61soaHKg9mrWEhzFWhFnxPxGl+69cD1Ou63C13NUPCnmIcrvqCuM6w=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""dependencies"": {\n ""cliui"": ""^8.0.1"",\n ""escalade"": ""^3.1.1"",\n ""get-caller-file"": ""^2.0.5"",\n ""require-directory"": ""^2.1.1"",\n ""string-width"": ""^4.2.3"",\n ""y18n"": ""^5.0.5"",\n ""yargs-parser"": ""^21.1.1""\n },\n ""engines"": {\n ""node"": "">=12""\n }\n },\n ""node_modules/yargs-parser"": {\n ""version"": ""21.1.1"",\n ""resolved"": ""https://registry.npmjs.org/yargs-parser/-/yargs-parser-21.1.1.tgz"",\n ""integrity"": ""sha512-tVpsJW7DdjecAiFpbIB1e3qxIQsE6NoPc5/eTdrbbIC4h0LVsWhnoa3g+m2HclBIujHzsxZ4VJVA+GUuc2/LBw=="",\n ""dev"": true,\n ""license"": ""ISC"",\n ""engines"": {\n ""node"": "">=12""\n }\n },\n ""node_modules/yargs-unparser"": {\n ""version"": ""2.0.0"",\n ""resolved"": ""https://registry.npmjs.org/yargs-unparser/-/yargs-unparser-2.0.0.tgz"",\n ""integrity"": ""sha512-7pRTIA9Qc1caZ0bZ6RYRGbHJthJWuakf+WmHK0rVeLkNrrGhfoabBNdue6kdINI6r4if7ocq9aD/n7xwKOdzOA=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""dependencies"": {\n ""camelcase"": ""^6.0.0"",\n ""decamelize"": ""^4.0.0"",\n ""flat"": ""^5.0.2"",\n ""is-plain-obj"": ""^2.1.0""\n },\n ""engines"": {\n ""node"": "">=10""\n }\n },\n ""node_modules/yargs/node_modules/ansi-regex"": {\n ""version"": ""5.0.1"",\n ""resolved"": ""https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz"",\n ""integrity"": ""sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""engines"": {\n ""node"": "">=8""\n }\n },\n ""node_modules/yargs/node_modules/emoji-regex"": {\n ""version"": ""8.0.0"",\n ""resolved"": ""https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz"",\n ""integrity"": ""sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A=="",\n ""dev"": true,\n ""license"": ""MIT""\n },\n ""node_modules/yargs/node_modules/string-width"": {\n ""version"": ""4.2.3"",\n ""resolved"": ""https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz"",\n ""integrity"": ""sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""dependencies"": {\n ""emoji-regex"": ""^8.0.0"",\n ""is-fullwidth-code-point"": ""^3.0.0"",\n ""strip-ansi"": ""^6.0.1""\n },\n ""engines"": {\n ""node"": "">=8""\n }\n },\n ""node_modules/yargs/node_modules/strip-ansi"": {\n ""version"": ""6.0.1"",\n ""resolved"": ""https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz"",\n ""integrity"": ""sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""dependencies"": {\n ""ansi-regex"": ""^5.0.1""\n },\n ""engines"": {\n ""node"": "">=8""\n }\n },\n ""node_modules/yocto-queue"": {\n ""version"": ""0.1.0"",\n ""resolved"": ""https://registry.npmjs.org/yocto-queue/-/yocto-queue-0.1.0.tgz"",\n ""integrity"": ""sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""engines"": {\n ""node"": "">=10""\n },\n ""funding"": {\n ""url"": ""https://github.com/sponsors/sindresorhus""\n }\n }\n }\n}\n",json,tab +8,19979,"extension-output-pdoom-org.crowd-code-#1-crowd-code",0,0,"",Log,tab +9,20236,"extension-output-pdoom-org.crowd-code-#1-crowd-code",0,0,"Switched from branch 'main' to 'text-decorators'",Log,git_branch_checkout +10,20512,"extension-output-pdoom-org.crowd-code-#1-crowd-code",304,0,"10:18:05 AM [info] Branch checkout detected: main -> text-decorators\n10:18:05 AM [info] Recording git checkout: Switched from branch 'main' to 'text-decorators'\n10:18:05 AM [info] Resetting file cache due to branch checkout\n",Log,content +11,22840,"TERMINAL",0,0,"",,terminal_focus +12,22840,"package-lock.json",0,0,"{\n ""name"": ""crowd-pilot"",\n ""version"": ""0.0.1"",\n ""lockfileVersion"": 3,\n ""requires"": true,\n ""packages"": {\n """": {\n ""name"": ""crowd-pilot"",\n ""version"": ""0.0.1"",\n ""devDependencies"": {\n ""@types/mocha"": ""^10.0.10"",\n ""@types/node"": ""22.x"",\n ""@types/vscode"": ""^1.105.0"",\n ""@typescript-eslint/eslint-plugin"": ""^8.45.0"",\n ""@typescript-eslint/parser"": ""^8.45.0"",\n ""@vscode/test-cli"": ""^0.0.11"",\n ""@vscode/test-electron"": ""^2.5.2"",\n ""eslint"": ""^9.36.0"",\n ""typescript"": ""^5.9.3""\n },\n ""engines"": {\n ""vscode"": ""^1.99.3""\n }\n },\n ""node_modules/@bcoe/v8-coverage"": {\n ""version"": ""0.2.3"",\n ""resolved"": ""https://registry.npmjs.org/@bcoe/v8-coverage/-/v8-coverage-0.2.3.tgz"",\n ""integrity"": ""sha512-0hYQ8SB4Db5zvZB4axdMHGwEaQjkZzFjQiN9LVYvIFB2nSUHW9tYpxWriPrWDASIxiaXax83REcLxuSdnGPZtw=="",\n ""dev"": true,\n ""license"": ""MIT""\n },\n ""node_modules/@eslint-community/eslint-utils"": {\n ""version"": ""4.9.0"",\n ""resolved"": ""https://registry.npmjs.org/@eslint-community/eslint-utils/-/eslint-utils-4.9.0.tgz"",\n ""integrity"": ""sha512-ayVFHdtZ+hsq1t2Dy24wCmGXGe4q9Gu3smhLYALJrr473ZH27MsnSL+LKUlimp4BWJqMDMLmPpx/Q9R3OAlL4g=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""dependencies"": {\n ""eslint-visitor-keys"": ""^3.4.3""\n },\n ""engines"": {\n ""node"": ""^12.22.0 || ^14.17.0 || >=16.0.0""\n },\n ""funding"": {\n ""url"": ""https://opencollective.com/eslint""\n },\n ""peerDependencies"": {\n ""eslint"": ""^6.0.0 || ^7.0.0 || >=8.0.0""\n }\n },\n ""node_modules/@eslint-community/regexpp"": {\n ""version"": ""4.12.2"",\n ""resolved"": ""https://registry.npmjs.org/@eslint-community/regexpp/-/regexpp-4.12.2.tgz"",\n ""integrity"": ""sha512-EriSTlt5OC9/7SXkRSCAhfSxxoSUgBm33OH+IkwbdpgoqsSsUg7y3uh+IICI/Qg4BBWr3U2i39RpmycbxMq4ew=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""engines"": {\n ""node"": ""^12.0.0 || ^14.0.0 || >=16.0.0""\n }\n },\n ""node_modules/@eslint/config-array"": {\n ""version"": ""0.21.1"",\n ""resolved"": ""https://registry.npmjs.org/@eslint/config-array/-/config-array-0.21.1.tgz"",\n ""integrity"": ""sha512-aw1gNayWpdI/jSYVgzN5pL0cfzU02GT3NBpeT/DXbx1/1x7ZKxFPd9bwrzygx/qiwIQiJ1sw/zD8qY/kRvlGHA=="",\n ""dev"": true,\n ""license"": ""Apache-2.0"",\n ""dependencies"": {\n ""@eslint/object-schema"": ""^2.1.7"",\n ""debug"": ""^4.3.1"",\n ""minimatch"": ""^3.1.2""\n },\n ""engines"": {\n ""node"": ""^18.18.0 || ^20.9.0 || >=21.1.0""\n }\n },\n ""node_modules/@eslint/config-array/node_modules/brace-expansion"": {\n ""version"": ""1.1.12"",\n ""resolved"": ""https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz"",\n ""integrity"": ""sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""dependencies"": {\n ""balanced-match"": ""^1.0.0"",\n ""concat-map"": ""0.0.1""\n }\n },\n ""node_modules/@eslint/config-array/node_modules/minimatch"": {\n ""version"": ""3.1.2"",\n ""resolved"": ""https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz"",\n ""integrity"": ""sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw=="",\n ""dev"": true,\n ""license"": ""ISC"",\n ""dependencies"": {\n ""brace-expansion"": ""^1.1.7""\n },\n ""engines"": {\n ""node"": ""*""\n }\n },\n ""node_modules/@eslint/config-helpers"": {\n ""version"": ""0.4.2"",\n ""resolved"": ""https://registry.npmjs.org/@eslint/config-helpers/-/config-helpers-0.4.2.tgz"",\n ""integrity"": ""sha512-gBrxN88gOIf3R7ja5K9slwNayVcZgK6SOUORm2uBzTeIEfeVaIhOpCtTox3P6R7o2jLFwLFTLnC7kU/RGcYEgw=="",\n ""dev"": true,\n ""license"": ""Apache-2.0"",\n ""dependencies"": {\n ""@eslint/core"": ""^0.17.0""\n },\n ""engines"": {\n ""node"": ""^18.18.0 || ^20.9.0 || >=21.1.0""\n }\n },\n ""node_modules/@eslint/core"": {\n ""version"": ""0.17.0"",\n ""resolved"": ""https://registry.npmjs.org/@eslint/core/-/core-0.17.0.tgz"",\n ""integrity"": ""sha512-yL/sLrpmtDaFEiUj1osRP4TI2MDz1AddJL+jZ7KSqvBuliN4xqYY54IfdN8qD8Toa6g1iloph1fxQNkjOxrrpQ=="",\n ""dev"": true,\n ""license"": ""Apache-2.0"",\n ""dependencies"": {\n ""@types/json-schema"": ""^7.0.15""\n },\n ""engines"": {\n ""node"": ""^18.18.0 || ^20.9.0 || >=21.1.0""\n }\n },\n ""node_modules/@eslint/eslintrc"": {\n ""version"": ""3.3.1"",\n ""resolved"": ""https://registry.npmjs.org/@eslint/eslintrc/-/eslintrc-3.3.1.tgz"",\n ""integrity"": ""sha512-gtF186CXhIl1p4pJNGZw8Yc6RlshoePRvE0X91oPGb3vZ8pM3qOS9W9NGPat9LziaBV7XrJWGylNQXkGcnM3IQ=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""dependencies"": {\n ""ajv"": ""^6.12.4"",\n ""debug"": ""^4.3.2"",\n ""espree"": ""^10.0.1"",\n ""globals"": ""^14.0.0"",\n ""ignore"": ""^5.2.0"",\n ""import-fresh"": ""^3.2.1"",\n ""js-yaml"": ""^4.1.0"",\n ""minimatch"": ""^3.1.2"",\n ""strip-json-comments"": ""^3.1.1""\n },\n ""engines"": {\n ""node"": ""^18.18.0 || ^20.9.0 || >=21.1.0""\n },\n ""funding"": {\n ""url"": ""https://opencollective.com/eslint""\n }\n },\n ""node_modules/@eslint/eslintrc/node_modules/brace-expansion"": {\n ""version"": ""1.1.12"",\n ""resolved"": ""https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz"",\n ""integrity"": ""sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""dependencies"": {\n ""balanced-match"": ""^1.0.0"",\n ""concat-map"": ""0.0.1""\n }\n },\n ""node_modules/@eslint/eslintrc/node_modules/ignore"": {\n ""version"": ""5.3.2"",\n ""resolved"": ""https://registry.npmjs.org/ignore/-/ignore-5.3.2.tgz"",\n ""integrity"": ""sha512-hsBTNUqQTDwkWtcdYI2i06Y/nUBEsNEDJKjWdigLvegy8kDuJAS8uRlpkkcQpyEXL0Z/pjDy5HBmMjRCJ2gq+g=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""engines"": {\n ""node"": "">= 4""\n }\n },\n ""node_modules/@eslint/eslintrc/node_modules/minimatch"": {\n ""version"": ""3.1.2"",\n ""resolved"": ""https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz"",\n ""integrity"": ""sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw=="",\n ""dev"": true,\n ""license"": ""ISC"",\n ""dependencies"": {\n ""brace-expansion"": ""^1.1.7""\n },\n ""engines"": {\n ""node"": ""*""\n }\n },\n ""node_modules/@eslint/js"": {\n ""version"": ""9.39.1"",\n ""resolved"": ""https://registry.npmjs.org/@eslint/js/-/js-9.39.1.tgz"",\n ""integrity"": ""sha512-S26Stp4zCy88tH94QbBv3XCuzRQiZ9yXofEILmglYTh/Ug/a9/umqvgFtYBAo3Lp0nsI/5/qH1CCrbdK3AP1Tw=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""engines"": {\n ""node"": ""^18.18.0 || ^20.9.0 || >=21.1.0""\n },\n ""funding"": {\n ""url"": ""https://eslint.org/donate""\n }\n },\n ""node_modules/@eslint/object-schema"": {\n ""version"": ""2.1.7"",\n ""resolved"": ""https://registry.npmjs.org/@eslint/object-schema/-/object-schema-2.1.7.tgz"",\n ""integrity"": ""sha512-VtAOaymWVfZcmZbp6E2mympDIHvyjXs/12LqWYjVw6qjrfF+VK+fyG33kChz3nnK+SU5/NeHOqrTEHS8sXO3OA=="",\n ""dev"": true,\n ""license"": ""Apache-2.0"",\n ""engines"": {\n ""node"": ""^18.18.0 || ^20.9.0 || >=21.1.0""\n }\n },\n ""node_modules/@eslint/plugin-kit"": {\n ""version"": ""0.4.1"",\n ""resolved"": ""https://registry.npmjs.org/@eslint/plugin-kit/-/plugin-kit-0.4.1.tgz"",\n ""integrity"": ""sha512-43/qtrDUokr7LJqoF2c3+RInu/t4zfrpYdoSDfYyhg52rwLV6TnOvdG4fXm7IkSB3wErkcmJS9iEhjVtOSEjjA=="",\n ""dev"": true,\n ""license"": ""Apache-2.0"",\n ""dependencies"": {\n ""@eslint/core"": ""^0.17.0"",\n ""levn"": ""^0.4.1""\n },\n ""engines"": {\n ""node"": ""^18.18.0 || ^20.9.0 || >=21.1.0""\n }\n },\n ""node_modules/@humanfs/core"": {\n ""version"": ""0.19.1"",\n ""resolved"": ""https://registry.npmjs.org/@humanfs/core/-/core-0.19.1.tgz"",\n ""integrity"": ""sha512-5DyQ4+1JEUzejeK1JGICcideyfUbGixgS9jNgex5nqkW+cY7WZhxBigmieN5Qnw9ZosSNVC9KQKyb+GUaGyKUA=="",\n ""dev"": true,\n ""license"": ""Apache-2.0"",\n ""engines"": {\n ""node"": "">=18.18.0""\n }\n },\n ""node_modules/@humanfs/node"": {\n ""version"": ""0.16.7"",\n ""resolved"": ""https://registry.npmjs.org/@humanfs/node/-/node-0.16.7.tgz"",\n ""integrity"": ""sha512-/zUx+yOsIrG4Y43Eh2peDeKCxlRt/gET6aHfaKpuq267qXdYDFViVHfMaLyygZOnl0kGWxFIgsBy8QFuTLUXEQ=="",\n ""dev"": true,\n ""license"": ""Apache-2.0"",\n ""dependencies"": {\n ""@humanfs/core"": ""^0.19.1"",\n ""@humanwhocodes/retry"": ""^0.4.0""\n },\n ""engines"": {\n ""node"": "">=18.18.0""\n }\n },\n ""node_modules/@humanwhocodes/module-importer"": {\n ""version"": ""1.0.1"",\n ""resolved"": ""https://registry.npmjs.org/@humanwhocodes/module-importer/-/module-importer-1.0.1.tgz"",\n ""integrity"": ""sha512-bxveV4V8v5Yb4ncFTT3rPSgZBOpCkjfK0y4oVVVJwIuDVBRMDXrPyXRL988i5ap9m9bnyEEjWfm5WkBmtffLfA=="",\n ""dev"": true,\n ""license"": ""Apache-2.0"",\n ""engines"": {\n ""node"": "">=12.22""\n },\n ""funding"": {\n ""type"": ""github"",\n ""url"": ""https://github.com/sponsors/nzakas""\n }\n },\n ""node_modules/@humanwhocodes/retry"": {\n ""version"": ""0.4.3"",\n ""resolved"": ""https://registry.npmjs.org/@humanwhocodes/retry/-/retry-0.4.3.tgz"",\n ""integrity"": ""sha512-bV0Tgo9K4hfPCek+aMAn81RppFKv2ySDQeMoSZuvTASywNTnVJCArCZE2FWqpvIatKu7VMRLWlR1EazvVhDyhQ=="",\n ""dev"": true,\n ""license"": ""Apache-2.0"",\n ""engines"": {\n ""node"": "">=18.18""\n },\n ""funding"": {\n ""type"": ""github"",\n ""url"": ""https://github.com/sponsors/nzakas""\n }\n },\n ""node_modules/@isaacs/cliui"": {\n ""version"": ""8.0.2"",\n ""resolved"": ""https://registry.npmjs.org/@isaacs/cliui/-/cliui-8.0.2.tgz"",\n ""integrity"": ""sha512-O8jcjabXaleOG9DQ0+ARXWZBTfnP4WNAqzuiJK7ll44AmxGKv/J2M4TPjxjY3znBCfvBXFzucm1twdyFybFqEA=="",\n ""dev"": true,\n ""license"": ""ISC"",\n ""dependencies"": {\n ""string-width"": ""^5.1.2"",\n ""string-width-cjs"": ""npm:string-width@^4.2.0"",\n ""strip-ansi"": ""^7.0.1"",\n ""strip-ansi-cjs"": ""npm:strip-ansi@^6.0.1"",\n ""wrap-ansi"": ""^8.1.0"",\n ""wrap-ansi-cjs"": ""npm:wrap-ansi@^7.0.0""\n },\n ""engines"": {\n ""node"": "">=12""\n }\n },\n ""node_modules/@istanbuljs/schema"": {\n ""version"": ""0.1.3"",\n ""resolved"": ""https://registry.npmjs.org/@istanbuljs/schema/-/schema-0.1.3.tgz"",\n ""integrity"": ""sha512-ZXRY4jNvVgSVQ8DL3LTcakaAtXwTVUxE81hslsyD2AtoXW/wVob10HkOJ1X/pAlcI7D+2YoZKg5do8G/w6RYgA=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""engines"": {\n ""node"": "">=8""\n }\n },\n ""node_modules/@jridgewell/resolve-uri"": {\n ""version"": ""3.1.2"",\n ""resolved"": ""https://registry.npmjs.org/@jridgewell/resolve-uri/-/resolve-uri-3.1.2.tgz"",\n ""integrity"": ""sha512-bRISgCIjP20/tbWSPWMEi54QVPRZExkuD9lJL+UIxUKtwVJA8wW1Trb1jMs1RFXo1CBTNZ/5hpC9QvmKWdopKw=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""engines"": {\n ""node"": "">=6.0.0""\n }\n },\n ""node_modules/@jridgewell/sourcemap-codec"": {\n ""version"": ""1.5.5"",\n ""resolved"": ""https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.5.5.tgz"",\n ""integrity"": ""sha512-cYQ9310grqxueWbl+WuIUIaiUaDcj7WOq5fVhEljNVgRfOUhY9fy2zTvfoqWsnebh8Sl70VScFbICvJnLKB0Og=="",\n ""dev"": true,\n ""license"": ""MIT""\n },\n ""node_modules/@jridgewell/trace-mapping"": {\n ""version"": ""0.3.31"",\n ""resolved"": ""https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.31.tgz"",\n ""integrity"": ""sha512-zzNR+SdQSDJzc8joaeP8QQoCQr8NuYx2dIIytl1QeBEZHJ9uW6hebsrYgbz8hJwUQao3TWCMtmfV8Nu1twOLAw=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""dependencies"": {\n ""@jridgewell/resolve-uri"": ""^3.1.0"",\n ""@jridgewell/sourcemap-codec"": ""^1.4.14""\n }\n },\n ""node_modules/@nodelib/fs.scandir"": {\n ""version"": ""2.1.5"",\n ""resolved"": ""https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz"",\n ""integrity"": ""sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""dependencies"": {\n ""@nodelib/fs.stat"": ""2.0.5"",\n ""run-parallel"": ""^1.1.9""\n },\n ""engines"": {\n ""node"": "">= 8""\n }\n },\n ""node_modules/@nodelib/fs.stat"": {\n ""version"": ""2.0.5"",\n ""resolved"": ""https://registry.npmjs.org/@nodelib/fs.stat/-/fs.stat-2.0.5.tgz"",\n ""integrity"": ""sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""engines"": {\n ""node"": "">= 8""\n }\n },\n ""node_modules/@nodelib/fs.walk"": {\n ""version"": ""1.2.8"",\n ""resolved"": ""https://registry.npmjs.org/@nodelib/fs.walk/-/fs.walk-1.2.8.tgz"",\n ""integrity"": ""sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""dependencies"": {\n ""@nodelib/fs.scandir"": ""2.1.5"",\n ""fastq"": ""^1.6.0""\n },\n ""engines"": {\n ""node"": "">= 8""\n }\n },\n ""node_modules/@pkgjs/parseargs"": {\n ""version"": ""0.11.0"",\n ""resolved"": ""https://registry.npmjs.org/@pkgjs/parseargs/-/parseargs-0.11.0.tgz"",\n ""integrity"": ""sha512-+1VkjdD0QBLPodGrJUeqarH8VAIvQODIbwh9XpP5Syisf7YoQgsJKPNFoqqLQlu+VQ/tVSshMR6loPMn8U+dPg=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""optional"": true,\n ""engines"": {\n ""node"": "">=14""\n }\n },\n ""node_modules/@types/estree"": {\n ""version"": ""1.0.8"",\n ""resolved"": ""https://registry.npmjs.org/@types/estree/-/estree-1.0.8.tgz"",\n ""integrity"": ""sha512-dWHzHa2WqEXI/O1E9OjrocMTKJl2mSrEolh1Iomrv6U+JuNwaHXsXx9bLu5gG7BUWFIN0skIQJQ/L1rIex4X6w=="",\n ""dev"": true,\n ""license"": ""MIT""\n },\n ""node_modules/@types/istanbul-lib-coverage"": {\n ""version"": ""2.0.6"",\n ""resolved"": ""https://registry.npmjs.org/@types/istanbul-lib-coverage/-/istanbul-lib-coverage-2.0.6.tgz"",\n ""integrity"": ""sha512-2QF/t/auWm0lsy8XtKVPG19v3sSOQlJe/YHZgfjb/KBBHOGSV+J2q/S671rcq9uTBrLAXmZpqJiaQbMT+zNU1w=="",\n ""dev"": true,\n ""license"": ""MIT""\n },\n ""node_modules/@types/json-schema"": {\n ""version"": ""7.0.15"",\n ""resolved"": ""https://registry.npmjs.org/@types/json-schema/-/json-schema-7.0.15.tgz"",\n ""integrity"": ""sha512-5+fP8P8MFNC+AyZCDxrB2pkZFPGzqQWUzpSeuuVLvm8VMcorNYavBqoFcxK8bQz4Qsbn4oUEEem4wDLfcysGHA=="",\n ""dev"": true,\n ""license"": ""MIT""\n },\n ""node_modules/@types/mocha"": {\n ""version"": ""10.0.10"",\n ""resolved"": ""https://registry.npmjs.org/@types/mocha/-/mocha-10.0.10.tgz"",\n ""integrity"": ""sha512-xPyYSz1cMPnJQhl0CLMH68j3gprKZaTjG3s5Vi+fDgx+uhG9NOXwbVt52eFS8ECyXhyKcjDLCBEqBExKuiZb7Q=="",\n ""dev"": true,\n ""license"": ""MIT""\n },\n ""node_modules/@types/node"": {\n ""version"": ""22.19.0"",\n ""resolved"": ""https://registry.npmjs.org/@types/node/-/node-22.19.0.tgz"",\n ""integrity"": ""sha512-xpr/lmLPQEj+TUnHmR+Ab91/glhJvsqcjB+yY0Ix9GO70H6Lb4FHH5GeqdOE5btAx7eIMwuHkp4H2MSkLcqWbA=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""dependencies"": {\n ""undici-types"": ""~6.21.0""\n }\n },\n ""node_modules/@types/vscode"": {\n ""version"": ""1.105.0"",\n ""resolved"": ""https://registry.npmjs.org/@types/vscode/-/vscode-1.105.0.tgz"",\n ""integrity"": ""sha512-Lotk3CTFlGZN8ray4VxJE7axIyLZZETQJVWi/lYoUVQuqfRxlQhVOfoejsD2V3dVXPSbS15ov5ZyowMAzgUqcw=="",\n ""dev"": true,\n ""license"": ""MIT""\n },\n ""node_modules/@typescript-eslint/eslint-plugin"": {\n ""version"": ""8.46.3"",\n ""resolved"": ""https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-8.46.3.tgz"",\n ""integrity"": ""sha512-sbaQ27XBUopBkRiuY/P9sWGOWUW4rl8fDoHIUmLpZd8uldsTyB4/Zg6bWTegPoTLnKj9Hqgn3QD6cjPNB32Odw=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""dependencies"": {\n ""@eslint-community/regexpp"": ""^4.10.0"",\n ""@typescript-eslint/scope-manager"": ""8.46.3"",\n ""@typescript-eslint/type-utils"": ""8.46.3"",\n ""@typescript-eslint/utils"": ""8.46.3"",\n ""@typescript-eslint/visitor-keys"": ""8.46.3"",\n ""graphemer"": ""^1.4.0"",\n ""ignore"": ""^7.0.0"",\n ""natural-compare"": ""^1.4.0"",\n ""ts-api-utils"": ""^2.1.0""\n },\n ""engines"": {\n ""node"": ""^18.18.0 || ^20.9.0 || >=21.1.0""\n },\n ""funding"": {\n ""type"": ""opencollective"",\n ""url"": ""https://opencollective.com/typescript-eslint""\n },\n ""peerDependencies"": {\n ""@typescript-eslint/parser"": ""^8.46.3"",\n ""eslint"": ""^8.57.0 || ^9.0.0"",\n ""typescript"": "">=4.8.4 <6.0.0""\n }\n },\n ""node_modules/@typescript-eslint/parser"": {\n ""version"": ""8.46.3"",\n ""resolved"": ""https://registry.npmjs.org/@typescript-eslint/parser/-/parser-8.46.3.tgz"",\n ""integrity"": ""sha512-6m1I5RmHBGTnUGS113G04DMu3CpSdxCAU/UvtjNWL4Nuf3MW9tQhiJqRlHzChIkhy6kZSAQmc+I1bcGjE3yNKg=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""dependencies"": {\n ""@typescript-eslint/scope-manager"": ""8.46.3"",\n ""@typescript-eslint/types"": ""8.46.3"",\n ""@typescript-eslint/typescript-estree"": ""8.46.3"",\n ""@typescript-eslint/visitor-keys"": ""8.46.3"",\n ""debug"": ""^4.3.4""\n },\n ""engines"": {\n ""node"": ""^18.18.0 || ^20.9.0 || >=21.1.0""\n },\n ""funding"": {\n ""type"": ""opencollective"",\n ""url"": ""https://opencollective.com/typescript-eslint""\n },\n ""peerDependencies"": {\n ""eslint"": ""^8.57.0 || ^9.0.0"",\n ""typescript"": "">=4.8.4 <6.0.0""\n }\n },\n ""node_modules/@typescript-eslint/project-service"": {\n ""version"": ""8.46.3"",\n ""resolved"": ""https://registry.npmjs.org/@typescript-eslint/project-service/-/project-service-8.46.3.tgz"",\n ""integrity"": ""sha512-Fz8yFXsp2wDFeUElO88S9n4w1I4CWDTXDqDr9gYvZgUpwXQqmZBr9+NTTql5R3J7+hrJZPdpiWaB9VNhAKYLuQ=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""dependencies"": {\n ""@typescript-eslint/tsconfig-utils"": ""^8.46.3"",\n ""@typescript-eslint/types"": ""^8.46.3"",\n ""debug"": ""^4.3.4""\n },\n ""engines"": {\n ""node"": ""^18.18.0 || ^20.9.0 || >=21.1.0""\n },\n ""funding"": {\n ""type"": ""opencollective"",\n ""url"": ""https://opencollective.com/typescript-eslint""\n },\n ""peerDependencies"": {\n ""typescript"": "">=4.8.4 <6.0.0""\n }\n },\n ""node_modules/@typescript-eslint/scope-manager"": {\n ""version"": ""8.46.3"",\n ""resolved"": ""https://registry.npmjs.org/@typescript-eslint/scope-manager/-/scope-manager-8.46.3.tgz"",\n ""integrity"": ""sha512-FCi7Y1zgrmxp3DfWfr+3m9ansUUFoy8dkEdeQSgA9gbm8DaHYvZCdkFRQrtKiedFf3Ha6VmoqoAaP68+i+22kg=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""dependencies"": {\n ""@typescript-eslint/types"": ""8.46.3"",\n ""@typescript-eslint/visitor-keys"": ""8.46.3""\n },\n ""engines"": {\n ""node"": ""^18.18.0 || ^20.9.0 || >=21.1.0""\n },\n ""funding"": {\n ""type"": ""opencollective"",\n ""url"": ""https://opencollective.com/typescript-eslint""\n }\n },\n ""node_modules/@typescript-eslint/tsconfig-utils"": {\n ""version"": ""8.46.3"",\n ""resolved"": ""https://registry.npmjs.org/@typescript-eslint/tsconfig-utils/-/tsconfig-utils-8.46.3.tgz"",\n ""integrity"": ""sha512-GLupljMniHNIROP0zE7nCcybptolcH8QZfXOpCfhQDAdwJ/ZTlcaBOYebSOZotpti/3HrHSw7D3PZm75gYFsOA=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""engines"": {\n ""node"": ""^18.18.0 || ^20.9.0 || >=21.1.0""\n },\n ""funding"": {\n ""type"": ""opencollective"",\n ""url"": ""https://opencollective.com/typescript-eslint""\n },\n ""peerDependencies"": {\n ""typescript"": "">=4.8.4 <6.0.0""\n }\n },\n ""node_modules/@typescript-eslint/type-utils"": {\n ""version"": ""8.46.3"",\n ""resolved"": ""https://registry.npmjs.org/@typescript-eslint/type-utils/-/type-utils-8.46.3.tgz"",\n ""integrity"": ""sha512-ZPCADbr+qfz3aiTTYNNkCbUt+cjNwI/5McyANNrFBpVxPt7GqpEYz5ZfdwuFyGUnJ9FdDXbGODUu6iRCI6XRXw=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""dependencies"": {\n ""@typescript-eslint/types"": ""8.46.3"",\n ""@typescript-eslint/typescript-estree"": ""8.46.3"",\n ""@typescript-eslint/utils"": ""8.46.3"",\n ""debug"": ""^4.3.4"",\n ""ts-api-utils"": ""^2.1.0""\n },\n ""engines"": {\n ""node"": ""^18.18.0 || ^20.9.0 || >=21.1.0""\n },\n ""funding"": {\n ""type"": ""opencollective"",\n ""url"": ""https://opencollective.com/typescript-eslint""\n },\n ""peerDependencies"": {\n ""eslint"": ""^8.57.0 || ^9.0.0"",\n ""typescript"": "">=4.8.4 <6.0.0""\n }\n },\n ""node_modules/@typescript-eslint/types"": {\n ""version"": ""8.46.3"",\n ""resolved"": ""https://registry.npmjs.org/@typescript-eslint/types/-/types-8.46.3.tgz"",\n ""integrity"": ""sha512-G7Ok9WN/ggW7e/tOf8TQYMaxgID3Iujn231hfi0Pc7ZheztIJVpO44ekY00b7akqc6nZcvregk0Jpah3kep6hA=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""engines"": {\n ""node"": ""^18.18.0 || ^20.9.0 || >=21.1.0""\n },\n ""funding"": {\n ""type"": ""opencollective"",\n ""url"": ""https://opencollective.com/typescript-eslint""\n }\n },\n ""node_modules/@typescript-eslint/typescript-estree"": {\n ""version"": ""8.46.3"",\n ""resolved"": ""https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-8.46.3.tgz"",\n ""integrity"": ""sha512-f/NvtRjOm80BtNM5OQtlaBdM5BRFUv7gf381j9wygDNL+qOYSNOgtQ/DCndiYi80iIOv76QqaTmp4fa9hwI0OA=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""dependencies"": {\n ""@typescript-eslint/project-service"": ""8.46.3"",\n ""@typescript-eslint/tsconfig-utils"": ""8.46.3"",\n ""@typescript-eslint/types"": ""8.46.3"",\n ""@typescript-eslint/visitor-keys"": ""8.46.3"",\n ""debug"": ""^4.3.4"",\n ""fast-glob"": ""^3.3.2"",\n ""is-glob"": ""^4.0.3"",\n ""minimatch"": ""^9.0.4"",\n ""semver"": ""^7.6.0"",\n ""ts-api-utils"": ""^2.1.0""\n },\n ""engines"": {\n ""node"": ""^18.18.0 || ^20.9.0 || >=21.1.0""\n },\n ""funding"": {\n ""type"": ""opencollective"",\n ""url"": ""https://opencollective.com/typescript-eslint""\n },\n ""peerDependencies"": {\n ""typescript"": "">=4.8.4 <6.0.0""\n }\n },\n ""node_modules/@typescript-eslint/utils"": {\n ""version"": ""8.46.3"",\n ""resolved"": ""https://registry.npmjs.org/@typescript-eslint/utils/-/utils-8.46.3.tgz"",\n ""integrity"": ""sha512-VXw7qmdkucEx9WkmR3ld/u6VhRyKeiF1uxWwCy/iuNfokjJ7VhsgLSOTjsol8BunSw190zABzpwdNsze2Kpo4g=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""dependencies"": {\n ""@eslint-community/eslint-utils"": ""^4.7.0"",\n ""@typescript-eslint/scope-manager"": ""8.46.3"",\n ""@typescript-eslint/types"": ""8.46.3"",\n ""@typescript-eslint/typescript-estree"": ""8.46.3""\n },\n ""engines"": {\n ""node"": ""^18.18.0 || ^20.9.0 || >=21.1.0""\n },\n ""funding"": {\n ""type"": ""opencollective"",\n ""url"": ""https://opencollective.com/typescript-eslint""\n },\n ""peerDependencies"": {\n ""eslint"": ""^8.57.0 || ^9.0.0"",\n ""typescript"": "">=4.8.4 <6.0.0""\n }\n },\n ""node_modules/@typescript-eslint/visitor-keys"": {\n ""version"": ""8.46.3"",\n ""resolved"": ""https://registry.npmjs.org/@typescript-eslint/visitor-keys/-/visitor-keys-8.46.3.tgz"",\n ""integrity"": ""sha512-uk574k8IU0rOF/AjniX8qbLSGURJVUCeM5e4MIMKBFFi8weeiLrG1fyQejyLXQpRZbU/1BuQasleV/RfHC3hHg=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""dependencies"": {\n ""@typescript-eslint/types"": ""8.46.3"",\n ""eslint-visitor-keys"": ""^4.2.1""\n },\n ""engines"": {\n ""node"": ""^18.18.0 || ^20.9.0 || >=21.1.0""\n },\n ""funding"": {\n ""type"": ""opencollective"",\n ""url"": ""https://opencollective.com/typescript-eslint""\n }\n },\n ""node_modules/@typescript-eslint/visitor-keys/node_modules/eslint-visitor-keys"": {\n ""version"": ""4.2.1"",\n ""resolved"": ""https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-4.2.1.tgz"",\n ""integrity"": ""sha512-Uhdk5sfqcee/9H/rCOJikYz67o0a2Tw2hGRPOG2Y1R2dg7brRe1uG0yaNQDHu+TO/uQPF/5eCapvYSmHUjt7JQ=="",\n ""dev"": true,\n ""license"": ""Apache-2.0"",\n ""engines"": {\n ""node"": ""^18.18.0 || ^20.9.0 || >=21.1.0""\n },\n ""funding"": {\n ""url"": ""https://opencollective.com/eslint""\n }\n },\n ""node_modules/@vscode/test-cli"": {\n ""version"": ""0.0.11"",\n ""resolved"": ""https://registry.npmjs.org/@vscode/test-cli/-/test-cli-0.0.11.tgz"",\n ""integrity"": ""sha512-qO332yvzFqGhBMJrp6TdwbIydiHgCtxXc2Nl6M58mbH/Z+0CyLR76Jzv4YWPEthhrARprzCRJUqzFvTHFhTj7Q=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""dependencies"": {\n ""@types/mocha"": ""^10.0.2"",\n ""c8"": ""^9.1.0"",\n ""chokidar"": ""^3.5.3"",\n ""enhanced-resolve"": ""^5.15.0"",\n ""glob"": ""^10.3.10"",\n ""minimatch"": ""^9.0.3"",\n ""mocha"": ""^11.1.0"",\n ""supports-color"": ""^9.4.0"",\n ""yargs"": ""^17.7.2""\n },\n ""bin"": {\n ""vscode-test"": ""out/bin.mjs""\n },\n ""engines"": {\n ""node"": "">=18""\n }\n },\n ""node_modules/@vscode/test-electron"": {\n ""version"": ""2.5.2"",\n ""resolved"": ""https://registry.npmjs.org/@vscode/test-electron/-/test-electron-2.5.2.tgz"",\n ""integrity"": ""sha512-8ukpxv4wYe0iWMRQU18jhzJOHkeGKbnw7xWRX3Zw1WJA4cEKbHcmmLPdPrPtL6rhDcrlCZN+xKRpv09n4gRHYg=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""dependencies"": {\n ""http-proxy-agent"": ""^7.0.2"",\n ""https-proxy-agent"": ""^7.0.5"",\n ""jszip"": ""^3.10.1"",\n ""ora"": ""^8.1.0"",\n ""semver"": ""^7.6.2""\n },\n ""engines"": {\n ""node"": "">=16""\n }\n },\n ""node_modules/acorn"": {\n ""version"": ""8.15.0"",\n ""resolved"": ""https://registry.npmjs.org/acorn/-/acorn-8.15.0.tgz"",\n ""integrity"": ""sha512-NZyJarBfL7nWwIq+FDL6Zp/yHEhePMNnnJ0y3qfieCrmNvYct8uvtiV41UvlSe6apAfk0fY1FbWx+NwfmpvtTg=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""bin"": {\n ""acorn"": ""bin/acorn""\n },\n ""engines"": {\n ""node"": "">=0.4.0""\n }\n },\n ""node_modules/acorn-jsx"": {\n ""version"": ""5.3.2"",\n ""resolved"": ""https://registry.npmjs.org/acorn-jsx/-/acorn-jsx-5.3.2.tgz"",\n ""integrity"": ""sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""peerDependencies"": {\n ""acorn"": ""^6.0.0 || ^7.0.0 || ^8.0.0""\n }\n },\n ""node_modules/agent-base"": {\n ""version"": ""7.1.4"",\n ""resolved"": ""https://registry.npmjs.org/agent-base/-/agent-base-7.1.4.tgz"",\n ""integrity"": ""sha512-MnA+YT8fwfJPgBx3m60MNqakm30XOkyIoH1y6huTQvC0PwZG7ki8NacLBcrPbNoo8vEZy7Jpuk7+jMO+CUovTQ=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""engines"": {\n ""node"": "">= 14""\n }\n },\n ""node_modules/ajv"": {\n ""version"": ""6.12.6"",\n ""resolved"": ""https://registry.npmjs.org/ajv/-/ajv-6.12.6.tgz"",\n ""integrity"": ""sha512-j3fVLgvTo527anyYyJOGTYJbG+vnnQYvE0m5mmkc1TK+nxAppkCLMIL0aZ4dblVCNoGShhm+kzE4ZUykBoMg4g=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""dependencies"": {\n ""fast-deep-equal"": ""^3.1.1"",\n ""fast-json-stable-stringify"": ""^2.0.0"",\n ""json-schema-traverse"": ""^0.4.1"",\n ""uri-js"": ""^4.2.2""\n },\n ""funding"": {\n ""type"": ""github"",\n ""url"": ""https://github.com/sponsors/epoberezkin""\n }\n },\n ""node_modules/ansi-regex"": {\n ""version"": ""6.2.2"",\n ""resolved"": ""https://registry.npmjs.org/ansi-regex/-/ansi-regex-6.2.2.tgz"",\n ""integrity"": ""sha512-Bq3SmSpyFHaWjPk8If9yc6svM8c56dB5BAtW4Qbw5jHTwwXXcTLoRMkpDJp6VL0XzlWaCHTXrkFURMYmD0sLqg=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""engines"": {\n ""node"": "">=12""\n },\n ""funding"": {\n ""url"": ""https://github.com/chalk/ansi-regex?sponsor=1""\n }\n },\n ""node_modules/ansi-styles"": {\n ""version"": ""4.3.0"",\n ""resolved"": ""https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz"",\n ""integrity"": ""sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""dependencies"": {\n ""color-convert"": ""^2.0.1""\n },\n ""engines"": {\n ""node"": "">=8""\n },\n ""funding"": {\n ""url"": ""https://github.com/chalk/ansi-styles?sponsor=1""\n }\n },\n ""node_modules/anymatch"": {\n ""version"": ""3.1.3"",\n ""resolved"": ""https://registry.npmjs.org/anymatch/-/anymatch-3.1.3.tgz"",\n ""integrity"": ""sha512-KMReFUr0B4t+D+OBkjR3KYqvocp2XaSzO55UcB6mgQMd3KbcE+mWTyvVV7D/zsdEbNnV6acZUutkiHQXvTr1Rw=="",\n ""dev"": true,\n ""license"": ""ISC"",\n ""dependencies"": {\n ""normalize-path"": ""^3.0.0"",\n ""picomatch"": ""^2.0.4""\n },\n ""engines"": {\n ""node"": "">= 8""\n }\n },\n ""node_modules/argparse"": {\n ""version"": ""2.0.1"",\n ""resolved"": ""https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz"",\n ""integrity"": ""sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q=="",\n ""dev"": true,\n ""license"": ""Python-2.0""\n },\n ""node_modules/balanced-match"": {\n ""version"": ""1.0.2"",\n ""resolved"": ""https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz"",\n ""integrity"": ""sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw=="",\n ""dev"": true,\n ""license"": ""MIT""\n },\n ""node_modules/binary-extensions"": {\n ""version"": ""2.3.0"",\n ""resolved"": ""https://registry.npmjs.org/binary-extensions/-/binary-extensions-2.3.0.tgz"",\n ""integrity"": ""sha512-Ceh+7ox5qe7LJuLHoY0feh3pHuUDHAcRUeyL2VYghZwfpkNIy/+8Ocg0a3UuSoYzavmylwuLWQOf3hl0jjMMIw=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""engines"": {\n ""node"": "">=8""\n },\n ""funding"": {\n ""url"": ""https://github.com/sponsors/sindresorhus""\n }\n },\n ""node_modules/brace-expansion"": {\n ""version"": ""2.0.2"",\n ""resolved"": ""https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.2.tgz"",\n ""integrity"": ""sha512-Jt0vHyM+jmUBqojB7E1NIYadt0vI0Qxjxd2TErW94wDz+E2LAm5vKMXXwg6ZZBTHPuUlDgQHKXvjGBdfcF1ZDQ=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""dependencies"": {\n ""balanced-match"": ""^1.0.0""\n }\n },\n ""node_modules/braces"": {\n ""version"": ""3.0.3"",\n ""resolved"": ""https://registry.npmjs.org/braces/-/braces-3.0.3.tgz"",\n ""integrity"": ""sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""dependencies"": {\n ""fill-range"": ""^7.1.1""\n },\n ""engines"": {\n ""node"": "">=8""\n }\n },\n ""node_modules/browser-stdout"": {\n ""version"": ""1.3.1"",\n ""resolved"": ""https://registry.npmjs.org/browser-stdout/-/browser-stdout-1.3.1.tgz"",\n ""integrity"": ""sha512-qhAVI1+Av2X7qelOfAIYwXONood6XlZE/fXaBSmW/T5SzLAmCgzi+eiWE7fUvbHaeNBQH13UftjpXxsfLkMpgw=="",\n ""dev"": true,\n ""license"": ""ISC""\n },\n ""node_modules/c8"": {\n ""version"": ""9.1.0"",\n ""resolved"": ""https://registry.npmjs.org/c8/-/c8-9.1.0.tgz"",\n ""integrity"": ""sha512-mBWcT5iqNir1zIkzSPyI3NCR9EZCVI3WUD+AVO17MVWTSFNyUueXE82qTeampNtTr+ilN/5Ua3j24LgbCKjDVg=="",\n ""dev"": true,\n ""license"": ""ISC"",\n ""dependencies"": {\n ""@bcoe/v8-coverage"": ""^0.2.3"",\n ""@istanbuljs/schema"": ""^0.1.3"",\n ""find-up"": ""^5.0.0"",\n ""foreground-child"": ""^3.1.1"",\n ""istanbul-lib-coverage"": ""^3.2.0"",\n ""istanbul-lib-report"": ""^3.0.1"",\n ""istanbul-reports"": ""^3.1.6"",\n ""test-exclude"": ""^6.0.0"",\n ""v8-to-istanbul"": ""^9.0.0"",\n ""yargs"": ""^17.7.2"",\n ""yargs-parser"": ""^21.1.1""\n },\n ""bin"": {\n ""c8"": ""bin/c8.js""\n },\n ""engines"": {\n ""node"": "">=14.14.0""\n }\n },\n ""node_modules/callsites"": {\n ""version"": ""3.1.0"",\n ""resolved"": ""https://registry.npmjs.org/callsites/-/callsites-3.1.0.tgz"",\n ""integrity"": ""sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""engines"": {\n ""node"": "">=6""\n }\n },\n ""node_modules/camelcase"": {\n ""version"": ""6.3.0"",\n ""resolved"": ""https://registry.npmjs.org/camelcase/-/camelcase-6.3.0.tgz"",\n ""integrity"": ""sha512-Gmy6FhYlCY7uOElZUSbxo2UCDH8owEk996gkbrpsgGtrJLM3J7jGxl9Ic7Qwwj4ivOE5AWZWRMecDdF7hqGjFA=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""engines"": {\n ""node"": "">=10""\n },\n ""funding"": {\n ""url"": ""https://github.com/sponsors/sindresorhus""\n }\n },\n ""node_modules/chalk"": {\n ""version"": ""4.1.2"",\n ""resolved"": ""https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz"",\n ""integrity"": ""sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""dependencies"": {\n ""ansi-styles"": ""^4.1.0"",\n ""supports-color"": ""^7.1.0""\n },\n ""engines"": {\n ""node"": "">=10""\n },\n ""funding"": {\n ""url"": ""https://github.com/chalk/chalk?sponsor=1""\n }\n },\n ""node_modules/chalk/node_modules/supports-color"": {\n ""version"": ""7.2.0"",\n ""resolved"": ""https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz"",\n ""integrity"": ""sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""dependencies"": {\n ""has-flag"": ""^4.0.0""\n },\n ""engines"": {\n ""node"": "">=8""\n }\n },\n ""node_modules/chokidar"": {\n ""version"": ""3.6.0"",\n ""resolved"": ""https://registry.npmjs.org/chokidar/-/chokidar-3.6.0.tgz"",\n ""integrity"": ""sha512-7VT13fmjotKpGipCW9JEQAusEPE+Ei8nl6/g4FBAmIm0GOOLMua9NDDo/DWp0ZAxCr3cPq5ZpBqmPAQgDda2Pw=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""dependencies"": {\n ""anymatch"": ""~3.1.2"",\n ""braces"": ""~3.0.2"",\n ""glob-parent"": ""~5.1.2"",\n ""is-binary-path"": ""~2.1.0"",\n ""is-glob"": ""~4.0.1"",\n ""normalize-path"": ""~3.0.0"",\n ""readdirp"": ""~3.6.0""\n },\n ""engines"": {\n ""node"": "">= 8.10.0""\n },\n ""funding"": {\n ""url"": ""https://paulmillr.com/funding/""\n },\n ""optionalDependencies"": {\n ""fsevents"": ""~2.3.2""\n }\n },\n ""node_modules/cli-cursor"": {\n ""version"": ""5.0.0"",\n ""resolved"": ""https://registry.npmjs.org/cli-cursor/-/cli-cursor-5.0.0.tgz"",\n ""integrity"": ""sha512-aCj4O5wKyszjMmDT4tZj93kxyydN/K5zPWSCe6/0AV/AA1pqe5ZBIw0a2ZfPQV7lL5/yb5HsUreJ6UFAF1tEQw=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""dependencies"": {\n ""restore-cursor"": ""^5.0.0""\n },\n ""engines"": {\n ""node"": "">=18""\n },\n ""funding"": {\n ""url"": ""https://github.com/sponsors/sindresorhus""\n }\n },\n ""node_modules/cli-spinners"": {\n ""version"": ""2.9.2"",\n ""resolved"": ""https://registry.npmjs.org/cli-spinners/-/cli-spinners-2.9.2.tgz"",\n ""integrity"": ""sha512-ywqV+5MmyL4E7ybXgKys4DugZbX0FC6LnwrhjuykIjnK9k8OQacQ7axGKnjDXWNhns0xot3bZI5h55H8yo9cJg=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""engines"": {\n ""node"": "">=6""\n },\n ""funding"": {\n ""url"": ""https://github.com/sponsors/sindresorhus""\n }\n },\n ""node_modules/cliui"": {\n ""version"": ""8.0.1"",\n ""resolved"": ""https://registry.npmjs.org/cliui/-/cliui-8.0.1.tgz"",\n ""integrity"": ""sha512-BSeNnyus75C4//NQ9gQt1/csTXyo/8Sb+afLAkzAptFuMsod9HFokGNudZpi/oQV73hnVK+sR+5PVRMd+Dr7YQ=="",\n ""dev"": true,\n ""license"": ""ISC"",\n ""dependencies"": {\n ""string-width"": ""^4.2.0"",\n ""strip-ansi"": ""^6.0.1"",\n ""wrap-ansi"": ""^7.0.0""\n },\n ""engines"": {\n ""node"": "">=12""\n }\n },\n ""node_modules/cliui/node_modules/ansi-regex"": {\n ""version"": ""5.0.1"",\n ""resolved"": ""https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz"",\n ""integrity"": ""sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""engines"": {\n ""node"": "">=8""\n }\n },\n ""node_modules/cliui/node_modules/emoji-regex"": {\n ""version"": ""8.0.0"",\n ""resolved"": ""https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz"",\n ""integrity"": ""sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A=="",\n ""dev"": true,\n ""license"": ""MIT""\n },\n ""node_modules/cliui/node_modules/string-width"": {\n ""version"": ""4.2.3"",\n ""resolved"": ""https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz"",\n ""integrity"": ""sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""dependencies"": {\n ""emoji-regex"": ""^8.0.0"",\n ""is-fullwidth-code-point"": ""^3.0.0"",\n ""strip-ansi"": ""^6.0.1""\n },\n ""engines"": {\n ""node"": "">=8""\n }\n },\n ""node_modules/cliui/node_modules/strip-ansi"": {\n ""version"": ""6.0.1"",\n ""resolved"": ""https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz"",\n ""integrity"": ""sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""dependencies"": {\n ""ansi-regex"": ""^5.0.1""\n },\n ""engines"": {\n ""node"": "">=8""\n }\n },\n ""node_modules/cliui/node_modules/wrap-ansi"": {\n ""version"": ""7.0.0"",\n ""resolved"": ""https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz"",\n ""integrity"": ""sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""dependencies"": {\n ""ansi-styles"": ""^4.0.0"",\n ""string-width"": ""^4.1.0"",\n ""strip-ansi"": ""^6.0.0""\n },\n ""engines"": {\n ""node"": "">=10""\n },\n ""funding"": {\n ""url"": ""https://github.com/chalk/wrap-ansi?sponsor=1""\n }\n },\n ""node_modules/color-convert"": {\n ""version"": ""2.0.1"",\n ""resolved"": ""https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz"",\n ""integrity"": ""sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""dependencies"": {\n ""color-name"": ""~1.1.4""\n },\n ""engines"": {\n ""node"": "">=7.0.0""\n }\n },\n ""node_modules/color-name"": {\n ""version"": ""1.1.4"",\n ""resolved"": ""https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz"",\n ""integrity"": ""sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA=="",\n ""dev"": true,\n ""license"": ""MIT""\n },\n ""node_modules/concat-map"": {\n ""version"": ""0.0.1"",\n ""resolved"": ""https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz"",\n ""integrity"": ""sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg=="",\n ""dev"": true,\n ""license"": ""MIT""\n },\n ""node_modules/convert-source-map"": {\n ""version"": ""2.0.0"",\n ""resolved"": ""https://registry.npmjs.org/convert-source-map/-/convert-source-map-2.0.0.tgz"",\n ""integrity"": ""sha512-Kvp459HrV2FEJ1CAsi1Ku+MY3kasH19TFykTz2xWmMeq6bk2NU3XXvfJ+Q61m0xktWwt+1HSYf3JZsTms3aRJg=="",\n ""dev"": true,\n ""license"": ""MIT""\n },\n ""node_modules/core-util-is"": {\n ""version"": ""1.0.3"",\n ""resolved"": ""https://registry.npmjs.org/core-util-is/-/core-util-is-1.0.3.tgz"",\n ""integrity"": ""sha512-ZQBvi1DcpJ4GDqanjucZ2Hj3wEO5pZDS89BWbkcrvdxksJorwUDDZamX9ldFkp9aw2lmBDLgkObEA4DWNJ9FYQ=="",\n ""dev"": true,\n ""license"": ""MIT""\n },\n ""node_modules/cross-spawn"": {\n ""version"": ""7.0.6"",\n ""resolved"": ""https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.6.tgz"",\n ""integrity"": ""sha512-uV2QOWP2nWzsy2aMp8aRibhi9dlzF5Hgh5SHaB9OiTGEyDTiJJyx0uy51QXdyWbtAHNua4XJzUKca3OzKUd3vA=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""dependencies"": {\n ""path-key"": ""^3.1.0"",\n ""shebang-command"": ""^2.0.0"",\n ""which"": ""^2.0.1""\n },\n ""engines"": {\n ""node"": "">= 8""\n }\n },\n ""node_modules/debug"": {\n ""version"": ""4.4.3"",\n ""resolved"": ""https://registry.npmjs.org/debug/-/debug-4.4.3.tgz"",\n ""integrity"": ""sha512-RGwwWnwQvkVfavKVt22FGLw+xYSdzARwm0ru6DhTVA3umU5hZc28V3kO4stgYryrTlLpuvgI9GiijltAjNbcqA=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""dependencies"": {\n ""ms"": ""^2.1.3""\n },\n ""engines"": {\n ""node"": "">=6.0""\n },\n ""peerDependenciesMeta"": {\n ""supports-color"": {\n ""optional"": true\n }\n }\n },\n ""node_modules/decamelize"": {\n ""version"": ""4.0.0"",\n ""resolved"": ""https://registry.npmjs.org/decamelize/-/decamelize-4.0.0.tgz"",\n ""integrity"": ""sha512-9iE1PgSik9HeIIw2JO94IidnE3eBoQrFJ3w7sFuzSX4DpmZ3v5sZpUiV5Swcf6mQEF+Y0ru8Neo+p+nyh2J+hQ=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""engines"": {\n ""node"": "">=10""\n },\n ""funding"": {\n ""url"": ""https://github.com/sponsors/sindresorhus""\n }\n },\n ""node_modules/deep-is"": {\n ""version"": ""0.1.4"",\n ""resolved"": ""https://registry.npmjs.org/deep-is/-/deep-is-0.1.4.tgz"",\n ""integrity"": ""sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ=="",\n ""dev"": true,\n ""license"": ""MIT""\n },\n ""node_modules/diff"": {\n ""version"": ""7.0.0"",\n ""resolved"": ""https://registry.npmjs.org/diff/-/diff-7.0.0.tgz"",\n ""integrity"": ""sha512-PJWHUb1RFevKCwaFA9RlG5tCd+FO5iRh9A8HEtkmBH2Li03iJriB6m6JIN4rGz3K3JLawI7/veA1xzRKP6ISBw=="",\n ""dev"": true,\n ""license"": ""BSD-3-Clause"",\n ""engines"": {\n ""node"": "">=0.3.1""\n }\n },\n ""node_modules/eastasianwidth"": {\n ""version"": ""0.2.0"",\n ""resolved"": ""https://registry.npmjs.org/eastasianwidth/-/eastasianwidth-0.2.0.tgz"",\n ""integrity"": ""sha512-I88TYZWc9XiYHRQ4/3c5rjjfgkjhLyW2luGIheGERbNQ6OY7yTybanSpDXZa8y7VUP9YmDcYa+eyq4ca7iLqWA=="",\n ""dev"": true,\n ""license"": ""MIT""\n },\n ""node_modules/emoji-regex"": {\n ""version"": ""9.2.2"",\n ""resolved"": ""https://registry.npmjs.org/emoji-regex/-/emoji-regex-9.2.2.tgz"",\n ""integrity"": ""sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg=="",\n ""dev"": true,\n ""license"": ""MIT""\n },\n ""node_modules/enhanced-resolve"": {\n ""version"": ""5.18.3"",\n ""resolved"": ""https://registry.npmjs.org/enhanced-resolve/-/enhanced-resolve-5.18.3.tgz"",\n ""integrity"": ""sha512-d4lC8xfavMeBjzGr2vECC3fsGXziXZQyJxD868h2M/mBI3PwAuODxAkLkq5HYuvrPYcUtiLzsTo8U3PgX3Ocww=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""dependencies"": {\n ""graceful-fs"": ""^4.2.4"",\n ""tapable"": ""^2.2.0""\n },\n ""engines"": {\n ""node"": "">=10.13.0""\n }\n },\n ""node_modules/escalade"": {\n ""version"": ""3.2.0"",\n ""resolved"": ""https://registry.npmjs.org/escalade/-/escalade-3.2.0.tgz"",\n ""integrity"": ""sha512-WUj2qlxaQtO4g6Pq5c29GTcWGDyd8itL8zTlipgECz3JesAiiOKotd8JU6otB3PACgG6xkJUyVhboMS+bje/jA=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""engines"": {\n ""node"": "">=6""\n }\n },\n ""node_modules/escape-string-regexp"": {\n ""version"": ""4.0.0"",\n ""resolved"": ""https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-4.0.0.tgz"",\n ""integrity"": ""sha512-TtpcNJ3XAzx3Gq8sWRzJaVajRs0uVxA2YAkdb1jm2YkPz4G6egUFAyA3n5vtEIZefPk5Wa4UXbKuS5fKkJWdgA=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""engines"": {\n ""node"": "">=10""\n },\n ""funding"": {\n ""url"": ""https://github.com/sponsors/sindresorhus""\n }\n },\n ""node_modules/eslint"": {\n ""version"": ""9.39.1"",\n ""resolved"": ""https://registry.npmjs.org/eslint/-/eslint-9.39.1.tgz"",\n ""integrity"": ""sha512-BhHmn2yNOFA9H9JmmIVKJmd288g9hrVRDkdoIgRCRuSySRUHH7r/DI6aAXW9T1WwUuY3DFgrcaqB+deURBLR5g=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""dependencies"": {\n ""@eslint-community/eslint-utils"": ""^4.8.0"",\n ""@eslint-community/regexpp"": ""^4.12.1"",\n ""@eslint/config-array"": ""^0.21.1"",\n ""@eslint/config-helpers"": ""^0.4.2"",\n ""@eslint/core"": ""^0.17.0"",\n ""@eslint/eslintrc"": ""^3.3.1"",\n ""@eslint/js"": ""9.39.1"",\n ""@eslint/plugin-kit"": ""^0.4.1"",\n ""@humanfs/node"": ""^0.16.6"",\n ""@humanwhocodes/module-importer"": ""^1.0.1"",\n ""@humanwhocodes/retry"": ""^0.4.2"",\n ""@types/estree"": ""^1.0.6"",\n ""ajv"": ""^6.12.4"",\n ""chalk"": ""^4.0.0"",\n ""cross-spawn"": ""^7.0.6"",\n ""debug"": ""^4.3.2"",\n ""escape-string-regexp"": ""^4.0.0"",\n ""eslint-scope"": ""^8.4.0"",\n ""eslint-visitor-keys"": ""^4.2.1"",\n ""espree"": ""^10.4.0"",\n ""esquery"": ""^1.5.0"",\n ""esutils"": ""^2.0.2"",\n ""fast-deep-equal"": ""^3.1.3"",\n ""file-entry-cache"": ""^8.0.0"",\n ""find-up"": ""^5.0.0"",\n ""glob-parent"": ""^6.0.2"",\n ""ignore"": ""^5.2.0"",\n ""imurmurhash"": ""^0.1.4"",\n ""is-glob"": ""^4.0.0"",\n ""json-stable-stringify-without-jsonify"": ""^1.0.1"",\n ""lodash.merge"": ""^4.6.2"",\n ""minimatch"": ""^3.1.2"",\n ""natural-compare"": ""^1.4.0"",\n ""optionator"": ""^0.9.3""\n },\n ""bin"": {\n ""eslint"": ""bin/eslint.js""\n },\n ""engines"": {\n ""node"": ""^18.18.0 || ^20.9.0 || >=21.1.0""\n },\n ""funding"": {\n ""url"": ""https://eslint.org/donate""\n },\n ""peerDependencies"": {\n ""jiti"": ""*""\n },\n ""peerDependenciesMeta"": {\n ""jiti"": {\n ""optional"": true\n }\n }\n },\n ""node_modules/eslint-scope"": {\n ""version"": ""8.4.0"",\n ""resolved"": ""https://registry.npmjs.org/eslint-scope/-/eslint-scope-8.4.0.tgz"",\n ""integrity"": ""sha512-sNXOfKCn74rt8RICKMvJS7XKV/Xk9kA7DyJr8mJik3S7Cwgy3qlkkmyS2uQB3jiJg6VNdZd/pDBJu0nvG2NlTg=="",\n ""dev"": true,\n ""license"": ""BSD-2-Clause"",\n ""dependencies"": {\n ""esrecurse"": ""^4.3.0"",\n ""estraverse"": ""^5.2.0""\n },\n ""engines"": {\n ""node"": ""^18.18.0 || ^20.9.0 || >=21.1.0""\n },\n ""funding"": {\n ""url"": ""https://opencollective.com/eslint""\n }\n },\n ""node_modules/eslint-visitor-keys"": {\n ""version"": ""3.4.3"",\n ""resolved"": ""https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-3.4.3.tgz"",\n ""integrity"": ""sha512-wpc+LXeiyiisxPlEkUzU6svyS1frIO3Mgxj1fdy7Pm8Ygzguax2N3Fa/D/ag1WqbOprdI+uY6wMUl8/a2G+iag=="",\n ""dev"": true,\n ""license"": ""Apache-2.0"",\n ""engines"": {\n ""node"": ""^12.22.0 || ^14.17.0 || >=16.0.0""\n },\n ""funding"": {\n ""url"": ""https://opencollective.com/eslint""\n }\n },\n ""node_modules/eslint/node_modules/brace-expansion"": {\n ""version"": ""1.1.12"",\n ""resolved"": ""https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz"",\n ""integrity"": ""sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""dependencies"": {\n ""balanced-match"": ""^1.0.0"",\n ""concat-map"": ""0.0.1""\n }\n },\n ""node_modules/eslint/node_modules/eslint-visitor-keys"": {\n ""version"": ""4.2.1"",\n ""resolved"": ""https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-4.2.1.tgz"",\n ""integrity"": ""sha512-Uhdk5sfqcee/9H/rCOJikYz67o0a2Tw2hGRPOG2Y1R2dg7brRe1uG0yaNQDHu+TO/uQPF/5eCapvYSmHUjt7JQ=="",\n ""dev"": true,\n ""license"": ""Apache-2.0"",\n ""engines"": {\n ""node"": ""^18.18.0 || ^20.9.0 || >=21.1.0""\n },\n ""funding"": {\n ""url"": ""https://opencollective.com/eslint""\n }\n },\n ""node_modules/eslint/node_modules/glob-parent"": {\n ""version"": ""6.0.2"",\n ""resolved"": ""https://registry.npmjs.org/glob-parent/-/glob-parent-6.0.2.tgz"",\n ""integrity"": ""sha512-XxwI8EOhVQgWp6iDL+3b0r86f4d6AX6zSU55HfB4ydCEuXLXc5FcYeOu+nnGftS4TEju/11rt4KJPTMgbfmv4A=="",\n ""dev"": true,\n ""license"": ""ISC"",\n ""dependencies"": {\n ""is-glob"": ""^4.0.3""\n },\n ""engines"": {\n ""node"": "">=10.13.0""\n }\n },\n ""node_modules/eslint/node_modules/ignore"": {\n ""version"": ""5.3.2"",\n ""resolved"": ""https://registry.npmjs.org/ignore/-/ignore-5.3.2.tgz"",\n ""integrity"": ""sha512-hsBTNUqQTDwkWtcdYI2i06Y/nUBEsNEDJKjWdigLvegy8kDuJAS8uRlpkkcQpyEXL0Z/pjDy5HBmMjRCJ2gq+g=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""engines"": {\n ""node"": "">= 4""\n }\n },\n ""node_modules/eslint/node_modules/minimatch"": {\n ""version"": ""3.1.2"",\n ""resolved"": ""https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz"",\n ""integrity"": ""sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw=="",\n ""dev"": true,\n ""license"": ""ISC"",\n ""dependencies"": {\n ""brace-expansion"": ""^1.1.7""\n },\n ""engines"": {\n ""node"": ""*""\n }\n },\n ""node_modules/espree"": {\n ""version"": ""10.4.0"",\n ""resolved"": ""https://registry.npmjs.org/espree/-/espree-10.4.0.tgz"",\n ""integrity"": ""sha512-j6PAQ2uUr79PZhBjP5C5fhl8e39FmRnOjsD5lGnWrFU8i2G776tBK7+nP8KuQUTTyAZUwfQqXAgrVH5MbH9CYQ=="",\n ""dev"": true,\n ""license"": ""BSD-2-Clause"",\n ""dependencies"": {\n ""acorn"": ""^8.15.0"",\n ""acorn-jsx"": ""^5.3.2"",\n ""eslint-visitor-keys"": ""^4.2.1""\n },\n ""engines"": {\n ""node"": ""^18.18.0 || ^20.9.0 || >=21.1.0""\n },\n ""funding"": {\n ""url"": ""https://opencollective.com/eslint""\n }\n },\n ""node_modules/espree/node_modules/eslint-visitor-keys"": {\n ""version"": ""4.2.1"",\n ""resolved"": ""https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-4.2.1.tgz"",\n ""integrity"": ""sha512-Uhdk5sfqcee/9H/rCOJikYz67o0a2Tw2hGRPOG2Y1R2dg7brRe1uG0yaNQDHu+TO/uQPF/5eCapvYSmHUjt7JQ=="",\n ""dev"": true,\n ""license"": ""Apache-2.0"",\n ""engines"": {\n ""node"": ""^18.18.0 || ^20.9.0 || >=21.1.0""\n },\n ""funding"": {\n ""url"": ""https://opencollective.com/eslint""\n }\n },\n ""node_modules/esquery"": {\n ""version"": ""1.6.0"",\n ""resolved"": ""https://registry.npmjs.org/esquery/-/esquery-1.6.0.tgz"",\n ""integrity"": ""sha512-ca9pw9fomFcKPvFLXhBKUK90ZvGibiGOvRJNbjljY7s7uq/5YO4BOzcYtJqExdx99rF6aAcnRxHmcUHcz6sQsg=="",\n ""dev"": true,\n ""license"": ""BSD-3-Clause"",\n ""dependencies"": {\n ""estraverse"": ""^5.1.0""\n },\n ""engines"": {\n ""node"": "">=0.10""\n }\n },\n ""node_modules/esrecurse"": {\n ""version"": ""4.3.0"",\n ""resolved"": ""https://registry.npmjs.org/esrecurse/-/esrecurse-4.3.0.tgz"",\n ""integrity"": ""sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag=="",\n ""dev"": true,\n ""license"": ""BSD-2-Clause"",\n ""dependencies"": {\n ""estraverse"": ""^5.2.0""\n },\n ""engines"": {\n ""node"": "">=4.0""\n }\n },\n ""node_modules/estraverse"": {\n ""version"": ""5.3.0"",\n ""resolved"": ""https://registry.npmjs.org/estraverse/-/estraverse-5.3.0.tgz"",\n ""integrity"": ""sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA=="",\n ""dev"": true,\n ""license"": ""BSD-2-Clause"",\n ""engines"": {\n ""node"": "">=4.0""\n }\n },\n ""node_modules/esutils"": {\n ""version"": ""2.0.3"",\n ""resolved"": ""https://registry.npmjs.org/esutils/-/esutils-2.0.3.tgz"",\n ""integrity"": ""sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g=="",\n ""dev"": true,\n ""license"": ""BSD-2-Clause"",\n ""engines"": {\n ""node"": "">=0.10.0""\n }\n },\n ""node_modules/fast-deep-equal"": {\n ""version"": ""3.1.3"",\n ""resolved"": ""https://registry.npmjs.org/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz"",\n ""integrity"": ""sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q=="",\n ""dev"": true,\n ""license"": ""MIT""\n },\n ""node_modules/fast-glob"": {\n ""version"": ""3.3.3"",\n ""resolved"": ""https://registry.npmjs.org/fast-glob/-/fast-glob-3.3.3.tgz"",\n ""integrity"": ""sha512-7MptL8U0cqcFdzIzwOTHoilX9x5BrNqye7Z/LuC7kCMRio1EMSyqRK3BEAUD7sXRq4iT4AzTVuZdhgQ2TCvYLg=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""dependencies"": {\n ""@nodelib/fs.stat"": ""^2.0.2"",\n ""@nodelib/fs.walk"": ""^1.2.3"",\n ""glob-parent"": ""^5.1.2"",\n ""merge2"": ""^1.3.0"",\n ""micromatch"": ""^4.0.8""\n },\n ""engines"": {\n ""node"": "">=8.6.0""\n }\n },\n ""node_modules/fast-json-stable-stringify"": {\n ""version"": ""2.1.0"",\n ""resolved"": ""https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz"",\n ""integrity"": ""sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw=="",\n ""dev"": true,\n ""license"": ""MIT""\n },\n ""node_modules/fast-levenshtein"": {\n ""version"": ""2.0.6"",\n ""resolved"": ""https://registry.npmjs.org/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz"",\n ""integrity"": ""sha512-DCXu6Ifhqcks7TZKY3Hxp3y6qphY5SJZmrWMDrKcERSOXWQdMhU9Ig/PYrzyw/ul9jOIyh0N4M0tbC5hodg8dw=="",\n ""dev"": true,\n ""license"": ""MIT""\n },\n ""node_modules/fastq"": {\n ""version"": ""1.19.1"",\n ""resolved"": ""https://registry.npmjs.org/fastq/-/fastq-1.19.1.tgz"",\n ""integrity"": ""sha512-GwLTyxkCXjXbxqIhTsMI2Nui8huMPtnxg7krajPJAjnEG/iiOS7i+zCtWGZR9G0NBKbXKh6X9m9UIsYX/N6vvQ=="",\n ""dev"": true,\n ""license"": ""ISC"",\n ""dependencies"": {\n ""reusify"": ""^1.0.4""\n }\n },\n ""node_modules/file-entry-cache"": {\n ""version"": ""8.0.0"",\n ""resolved"": ""https://registry.npmjs.org/file-entry-cache/-/file-entry-cache-8.0.0.tgz"",\n ""integrity"": ""sha512-XXTUwCvisa5oacNGRP9SfNtYBNAMi+RPwBFmblZEF7N7swHYQS6/Zfk7SRwx4D5j3CH211YNRco1DEMNVfZCnQ=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""dependencies"": {\n ""flat-cache"": ""^4.0.0""\n },\n ""engines"": {\n ""node"": "">=16.0.0""\n }\n },\n ""node_modules/fill-range"": {\n ""version"": ""7.1.1"",\n ""resolved"": ""https://registry.npmjs.org/fill-range/-/fill-range-7.1.1.tgz"",\n ""integrity"": ""sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""dependencies"": {\n ""to-regex-range"": ""^5.0.1""\n },\n ""engines"": {\n ""node"": "">=8""\n }\n },\n ""node_modules/find-up"": {\n ""version"": ""5.0.0"",\n ""resolved"": ""https://registry.npmjs.org/find-up/-/find-up-5.0.0.tgz"",\n ""integrity"": ""sha512-78/PXT1wlLLDgTzDs7sjq9hzz0vXD+zn+7wypEe4fXQxCmdmqfGsEPQxmiCSQI3ajFV91bVSsvNtrJRiW6nGng=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""dependencies"": {\n ""locate-path"": ""^6.0.0"",\n ""path-exists"": ""^4.0.0""\n },\n ""engines"": {\n ""node"": "">=10""\n },\n ""funding"": {\n ""url"": ""https://github.com/sponsors/sindresorhus""\n }\n },\n ""node_modules/flat"": {\n ""version"": ""5.0.2"",\n ""resolved"": ""https://registry.npmjs.org/flat/-/flat-5.0.2.tgz"",\n ""integrity"": ""sha512-b6suED+5/3rTpUBdG1gupIl8MPFCAMA0QXwmljLhvCUKcUvdE4gWky9zpuGCcXHOsz4J9wPGNWq6OKpmIzz3hQ=="",\n ""dev"": true,\n ""license"": ""BSD-3-Clause"",\n ""bin"": {\n ""flat"": ""cli.js""\n }\n },\n ""node_modules/flat-cache"": {\n ""version"": ""4.0.1"",\n ""resolved"": ""https://registry.npmjs.org/flat-cache/-/flat-cache-4.0.1.tgz"",\n ""integrity"": ""sha512-f7ccFPK3SXFHpx15UIGyRJ/FJQctuKZ0zVuN3frBo4HnK3cay9VEW0R6yPYFHC0AgqhukPzKjq22t5DmAyqGyw=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""dependencies"": {\n ""flatted"": ""^3.2.9"",\n ""keyv"": ""^4.5.4""\n },\n ""engines"": {\n ""node"": "">=16""\n }\n },\n ""node_modules/flatted"": {\n ""version"": ""3.3.3"",\n ""resolved"": ""https://registry.npmjs.org/flatted/-/flatted-3.3.3.tgz"",\n ""integrity"": ""sha512-GX+ysw4PBCz0PzosHDepZGANEuFCMLrnRTiEy9McGjmkCQYwRq4A/X786G/fjM/+OjsWSU1ZrY5qyARZmO/uwg=="",\n ""dev"": true,\n ""license"": ""ISC""\n },\n ""node_modules/foreground-child"": {\n ""version"": ""3.3.1"",\n ""resolved"": ""https://registry.npmjs.org/foreground-child/-/foreground-child-3.3.1.tgz"",\n ""integrity"": ""sha512-gIXjKqtFuWEgzFRJA9WCQeSJLZDjgJUOMCMzxtvFq/37KojM1BFGufqsCy0r4qSQmYLsZYMeyRqzIWOMup03sw=="",\n ""dev"": true,\n ""license"": ""ISC"",\n ""dependencies"": {\n ""cross-spawn"": ""^7.0.6"",\n ""signal-exit"": ""^4.0.1""\n },\n ""engines"": {\n ""node"": "">=14""\n },\n ""funding"": {\n ""url"": ""https://github.com/sponsors/isaacs""\n }\n },\n ""node_modules/fs.realpath"": {\n ""version"": ""1.0.0"",\n ""resolved"": ""https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz"",\n ""integrity"": ""sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw=="",\n ""dev"": true,\n ""license"": ""ISC""\n },\n ""node_modules/fsevents"": {\n ""version"": ""2.3.3"",\n ""resolved"": ""https://registry.npmjs.org/fsevents/-/fsevents-2.3.3.tgz"",\n ""integrity"": ""sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw=="",\n ""dev"": true,\n ""hasInstallScript"": true,\n ""license"": ""MIT"",\n ""optional"": true,\n ""os"": [\n ""darwin""\n ],\n ""engines"": {\n ""node"": ""^8.16.0 || ^10.6.0 || >=11.0.0""\n }\n },\n ""node_modules/get-caller-file"": {\n ""version"": ""2.0.5"",\n ""resolved"": ""https://registry.npmjs.org/get-caller-file/-/get-caller-file-2.0.5.tgz"",\n ""integrity"": ""sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg=="",\n ""dev"": true,\n ""license"": ""ISC"",\n ""engines"": {\n ""node"": ""6.* || 8.* || >= 10.*""\n }\n },\n ""node_modules/get-east-asian-width"": {\n ""version"": ""1.4.0"",\n ""resolved"": ""https://registry.npmjs.org/get-east-asian-width/-/get-east-asian-width-1.4.0.tgz"",\n ""integrity"": ""sha512-QZjmEOC+IT1uk6Rx0sX22V6uHWVwbdbxf1faPqJ1QhLdGgsRGCZoyaQBm/piRdJy/D2um6hM1UP7ZEeQ4EkP+Q=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""engines"": {\n ""node"": "">=18""\n },\n ""funding"": {\n ""url"": ""https://github.com/sponsors/sindresorhus""\n }\n },\n ""node_modules/glob"": {\n ""version"": ""10.4.5"",\n ""resolved"": ""https://registry.npmjs.org/glob/-/glob-10.4.5.tgz"",\n ""integrity"": ""sha512-7Bv8RF0k6xjo7d4A/PxYLbUCfb6c+Vpd2/mB2yRDlew7Jb5hEXiCD9ibfO7wpk8i4sevK6DFny9h7EYbM3/sHg=="",\n ""dev"": true,\n ""license"": ""ISC"",\n ""dependencies"": {\n ""foreground-child"": ""^3.1.0"",\n ""jackspeak"": ""^3.1.2"",\n ""minimatch"": ""^9.0.4"",\n ""minipass"": ""^7.1.2"",\n ""package-json-from-dist"": ""^1.0.0"",\n ""path-scurry"": ""^1.11.1""\n },\n ""bin"": {\n ""glob"": ""dist/esm/bin.mjs""\n },\n ""funding"": {\n ""url"": ""https://github.com/sponsors/isaacs""\n }\n },\n ""node_modules/glob-parent"": {\n ""version"": ""5.1.2"",\n ""resolved"": ""https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz"",\n ""integrity"": ""sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow=="",\n ""dev"": true,\n ""license"": ""ISC"",\n ""dependencies"": {\n ""is-glob"": ""^4.0.1""\n },\n ""engines"": {\n ""node"": "">= 6""\n }\n },\n ""node_modules/globals"": {\n ""version"": ""14.0.0"",\n ""resolved"": ""https://registry.npmjs.org/globals/-/globals-14.0.0.tgz"",\n ""integrity"": ""sha512-oahGvuMGQlPw/ivIYBjVSrWAfWLBeku5tpPE2fOPLi+WHffIWbuh2tCjhyQhTBPMf5E9jDEH4FOmTYgYwbKwtQ=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""engines"": {\n ""node"": "">=18""\n },\n ""funding"": {\n ""url"": ""https://github.com/sponsors/sindresorhus""\n }\n },\n ""node_modules/graceful-fs"": {\n ""version"": ""4.2.11"",\n ""resolved"": ""https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.11.tgz"",\n ""integrity"": ""sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ=="",\n ""dev"": true,\n ""license"": ""ISC""\n },\n ""node_modules/graphemer"": {\n ""version"": ""1.4.0"",\n ""resolved"": ""https://registry.npmjs.org/graphemer/-/graphemer-1.4.0.tgz"",\n ""integrity"": ""sha512-EtKwoO6kxCL9WO5xipiHTZlSzBm7WLT627TqC/uVRd0HKmq8NXyebnNYxDoBi7wt8eTWrUrKXCOVaFq9x1kgag=="",\n ""dev"": true,\n ""license"": ""MIT""\n },\n ""node_modules/has-flag"": {\n ""version"": ""4.0.0"",\n ""resolved"": ""https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz"",\n ""integrity"": ""sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""engines"": {\n ""node"": "">=8""\n }\n },\n ""node_modules/he"": {\n ""version"": ""1.2.0"",\n ""resolved"": ""https://registry.npmjs.org/he/-/he-1.2.0.tgz"",\n ""integrity"": ""sha512-F/1DnUGPopORZi0ni+CvrCgHQ5FyEAHRLSApuYWMmrbSwoN2Mn/7k+Gl38gJnR7yyDZk6WLXwiGod1JOWNDKGw=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""bin"": {\n ""he"": ""bin/he""\n }\n },\n ""node_modules/html-escaper"": {\n ""version"": ""2.0.2"",\n ""resolved"": ""https://registry.npmjs.org/html-escaper/-/html-escaper-2.0.2.tgz"",\n ""integrity"": ""sha512-H2iMtd0I4Mt5eYiapRdIDjp+XzelXQ0tFE4JS7YFwFevXXMmOp9myNrUvCg0D6ws8iqkRPBfKHgbwig1SmlLfg=="",\n ""dev"": true,\n ""license"": ""MIT""\n },\n ""node_modules/http-proxy-agent"": {\n ""version"": ""7.0.2"",\n ""resolved"": ""https://registry.npmjs.org/http-proxy-agent/-/http-proxy-agent-7.0.2.tgz"",\n ""integrity"": ""sha512-T1gkAiYYDWYx3V5Bmyu7HcfcvL7mUrTWiM6yOfa3PIphViJ/gFPbvidQ+veqSOHci/PxBcDabeUNCzpOODJZig=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""dependencies"": {\n ""agent-base"": ""^7.1.0"",\n ""debug"": ""^4.3.4""\n },\n ""engines"": {\n ""node"": "">= 14""\n }\n },\n ""node_modules/https-proxy-agent"": {\n ""version"": ""7.0.6"",\n ""resolved"": ""https://registry.npmjs.org/https-proxy-agent/-/https-proxy-agent-7.0.6.tgz"",\n ""integrity"": ""sha512-vK9P5/iUfdl95AI+JVyUuIcVtd4ofvtrOr3HNtM2yxC9bnMbEdp3x01OhQNnjb8IJYi38VlTE3mBXwcfvywuSw=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""dependencies"": {\n ""agent-base"": ""^7.1.2"",\n ""debug"": ""4""\n },\n ""engines"": {\n ""node"": "">= 14""\n }\n },\n ""node_modules/ignore"": {\n ""version"": ""7.0.5"",\n ""resolved"": ""https://registry.npmjs.org/ignore/-/ignore-7.0.5.tgz"",\n ""integrity"": ""sha512-Hs59xBNfUIunMFgWAbGX5cq6893IbWg4KnrjbYwX3tx0ztorVgTDA6B2sxf8ejHJ4wz8BqGUMYlnzNBer5NvGg=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""engines"": {\n ""node"": "">= 4""\n }\n },\n ""node_modules/immediate"": {\n ""version"": ""3.0.6"",\n ""resolved"": ""https://registry.npmjs.org/immediate/-/immediate-3.0.6.tgz"",\n ""integrity"": ""sha512-XXOFtyqDjNDAQxVfYxuF7g9Il/IbWmmlQg2MYKOH8ExIT1qg6xc4zyS3HaEEATgs1btfzxq15ciUiY7gjSXRGQ=="",\n ""dev"": true,\n ""license"": ""MIT""\n },\n ""node_modules/import-fresh"": {\n ""version"": ""3.3.1"",\n ""resolved"": ""https://registry.npmjs.org/import-fresh/-/import-fresh-3.3.1.tgz"",\n ""integrity"": ""sha512-TR3KfrTZTYLPB6jUjfx6MF9WcWrHL9su5TObK4ZkYgBdWKPOFoSoQIdEuTuR82pmtxH2spWG9h6etwfr1pLBqQ=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""dependencies"": {\n ""parent-module"": ""^1.0.0"",\n ""resolve-from"": ""^4.0.0""\n },\n ""engines"": {\n ""node"": "">=6""\n },\n ""funding"": {\n ""url"": ""https://github.com/sponsors/sindresorhus""\n }\n },\n ""node_modules/imurmurhash"": {\n ""version"": ""0.1.4"",\n ""resolved"": ""https://registry.npmjs.org/imurmurhash/-/imurmurhash-0.1.4.tgz"",\n ""integrity"": ""sha512-JmXMZ6wuvDmLiHEml9ykzqO6lwFbof0GG4IkcGaENdCRDDmMVnny7s5HsIgHCbaq0w2MyPhDqkhTUgS2LU2PHA=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""engines"": {\n ""node"": "">=0.8.19""\n }\n },\n ""node_modules/inflight"": {\n ""version"": ""1.0.6"",\n ""resolved"": ""https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz"",\n ""integrity"": ""sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA=="",\n ""deprecated"": ""This module is not supported, and leaks memory. Do not use it. Check out lru-cache if you want a good and tested way to coalesce async requests by a key value, which is much more comprehensive and powerful."",\n ""dev"": true,\n ""license"": ""ISC"",\n ""dependencies"": {\n ""once"": ""^1.3.0"",\n ""wrappy"": ""1""\n }\n },\n ""node_modules/inherits"": {\n ""version"": ""2.0.4"",\n ""resolved"": ""https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz"",\n ""integrity"": ""sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ=="",\n ""dev"": true,\n ""license"": ""ISC""\n },\n ""node_modules/is-binary-path"": {\n ""version"": ""2.1.0"",\n ""resolved"": ""https://registry.npmjs.org/is-binary-path/-/is-binary-path-2.1.0.tgz"",\n ""integrity"": ""sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""dependencies"": {\n ""binary-extensions"": ""^2.0.0""\n },\n ""engines"": {\n ""node"": "">=8""\n }\n },\n ""node_modules/is-extglob"": {\n ""version"": ""2.1.1"",\n ""resolved"": ""https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz"",\n ""integrity"": ""sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""engines"": {\n ""node"": "">=0.10.0""\n }\n },\n ""node_modules/is-fullwidth-code-point"": {\n ""version"": ""3.0.0"",\n ""resolved"": ""https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz"",\n ""integrity"": ""sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""engines"": {\n ""node"": "">=8""\n }\n },\n ""node_modules/is-glob"": {\n ""version"": ""4.0.3"",\n ""resolved"": ""https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz"",\n ""integrity"": ""sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""dependencies"": {\n ""is-extglob"": ""^2.1.1""\n },\n ""engines"": {\n ""node"": "">=0.10.0""\n }\n },\n ""node_modules/is-interactive"": {\n ""version"": ""2.0.0"",\n ""resolved"": ""https://registry.npmjs.org/is-interactive/-/is-interactive-2.0.0.tgz"",\n ""integrity"": ""sha512-qP1vozQRI+BMOPcjFzrjXuQvdak2pHNUMZoeG2eRbiSqyvbEf/wQtEOTOX1guk6E3t36RkaqiSt8A/6YElNxLQ=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""engines"": {\n ""node"": "">=12""\n },\n ""funding"": {\n ""url"": ""https://github.com/sponsors/sindresorhus""\n }\n },\n ""node_modules/is-number"": {\n ""version"": ""7.0.0"",\n ""resolved"": ""https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz"",\n ""integrity"": ""sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""engines"": {\n ""node"": "">=0.12.0""\n }\n },\n ""node_modules/is-path-inside"": {\n ""version"": ""3.0.3"",\n ""resolved"": ""https://registry.npmjs.org/is-path-inside/-/is-path-inside-3.0.3.tgz"",\n ""integrity"": ""sha512-Fd4gABb+ycGAmKou8eMftCupSir5lRxqf4aD/vd0cD2qc4HL07OjCeuHMr8Ro4CoMaeCKDB0/ECBOVWjTwUvPQ=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""engines"": {\n ""node"": "">=8""\n }\n },\n ""node_modules/is-plain-obj"": {\n ""version"": ""2.1.0"",\n ""resolved"": ""https://registry.npmjs.org/is-plain-obj/-/is-plain-obj-2.1.0.tgz"",\n ""integrity"": ""sha512-YWnfyRwxL/+SsrWYfOpUtz5b3YD+nyfkHvjbcanzk8zgyO4ASD67uVMRt8k5bM4lLMDnXfriRhOpemw+NfT1eA=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""engines"": {\n ""node"": "">=8""\n }\n },\n ""node_modules/is-unicode-supported"": {\n ""version"": ""0.1.0"",\n ""resolved"": ""https://registry.npmjs.org/is-unicode-supported/-/is-unicode-supported-0.1.0.tgz"",\n ""integrity"": ""sha512-knxG2q4UC3u8stRGyAVJCOdxFmv5DZiRcdlIaAQXAbSfJya+OhopNotLQrstBhququ4ZpuKbDc/8S6mgXgPFPw=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""engines"": {\n ""node"": "">=10""\n },\n ""funding"": {\n ""url"": ""https://github.com/sponsors/sindresorhus""\n }\n },\n ""node_modules/isarray"": {\n ""version"": ""1.0.0"",\n ""resolved"": ""https://registry.npmjs.org/isarray/-/isarray-1.0.0.tgz"",\n ""integrity"": ""sha512-VLghIWNM6ELQzo7zwmcg0NmTVyWKYjvIeM83yjp0wRDTmUnrM678fQbcKBo6n2CJEF0szoG//ytg+TKla89ALQ=="",\n ""dev"": true,\n ""license"": ""MIT""\n },\n ""node_modules/isexe"": {\n ""version"": ""2.0.0"",\n ""resolved"": ""https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz"",\n ""integrity"": ""sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw=="",\n ""dev"": true,\n ""license"": ""ISC""\n },\n ""node_modules/istanbul-lib-coverage"": {\n ""version"": ""3.2.2"",\n ""resolved"": ""https://registry.npmjs.org/istanbul-lib-coverage/-/istanbul-lib-coverage-3.2.2.tgz"",\n ""integrity"": ""sha512-O8dpsF+r0WV/8MNRKfnmrtCWhuKjxrq2w+jpzBL5UZKTi2LeVWnWOmWRxFlesJONmc+wLAGvKQZEOanko0LFTg=="",\n ""dev"": true,\n ""license"": ""BSD-3-Clause"",\n ""engines"": {\n ""node"": "">=8""\n }\n },\n ""node_modules/istanbul-lib-report"": {\n ""version"": ""3.0.1"",\n ""resolved"": ""https://registry.npmjs.org/istanbul-lib-report/-/istanbul-lib-report-3.0.1.tgz"",\n ""integrity"": ""sha512-GCfE1mtsHGOELCU8e/Z7YWzpmybrx/+dSTfLrvY8qRmaY6zXTKWn6WQIjaAFw069icm6GVMNkgu0NzI4iPZUNw=="",\n ""dev"": true,\n ""license"": ""BSD-3-Clause"",\n ""dependencies"": {\n ""istanbul-lib-coverage"": ""^3.0.0"",\n ""make-dir"": ""^4.0.0"",\n ""supports-color"": ""^7.1.0""\n },\n ""engines"": {\n ""node"": "">=10""\n }\n },\n ""node_modules/istanbul-lib-report/node_modules/supports-color"": {\n ""version"": ""7.2.0"",\n ""resolved"": ""https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz"",\n ""integrity"": ""sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""dependencies"": {\n ""has-flag"": ""^4.0.0""\n },\n ""engines"": {\n ""node"": "">=8""\n }\n },\n ""node_modules/istanbul-reports"": {\n ""version"": ""3.2.0"",\n ""resolved"": ""https://registry.npmjs.org/istanbul-reports/-/istanbul-reports-3.2.0.tgz"",\n ""integrity"": ""sha512-HGYWWS/ehqTV3xN10i23tkPkpH46MLCIMFNCaaKNavAXTF1RkqxawEPtnjnGZ6XKSInBKkiOA5BKS+aZiY3AvA=="",\n ""dev"": true,\n ""license"": ""BSD-3-Clause"",\n ""dependencies"": {\n ""html-escaper"": ""^2.0.0"",\n ""istanbul-lib-report"": ""^3.0.0""\n },\n ""engines"": {\n ""node"": "">=8""\n }\n },\n ""node_modules/jackspeak"": {\n ""version"": ""3.4.3"",\n ""resolved"": ""https://registry.npmjs.org/jackspeak/-/jackspeak-3.4.3.tgz"",\n ""integrity"": ""sha512-OGlZQpz2yfahA/Rd1Y8Cd9SIEsqvXkLVoSw/cgwhnhFMDbsQFeZYoJJ7bIZBS9BcamUW96asq/npPWugM+RQBw=="",\n ""dev"": true,\n ""license"": ""BlueOak-1.0.0"",\n ""dependencies"": {\n ""@isaacs/cliui"": ""^8.0.2""\n },\n ""funding"": {\n ""url"": ""https://github.com/sponsors/isaacs""\n },\n ""optionalDependencies"": {\n ""@pkgjs/parseargs"": ""^0.11.0""\n }\n },\n ""node_modules/js-yaml"": {\n ""version"": ""4.1.0"",\n ""resolved"": ""https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.0.tgz"",\n ""integrity"": ""sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""dependencies"": {\n ""argparse"": ""^2.0.1""\n },\n ""bin"": {\n ""js-yaml"": ""bin/js-yaml.js""\n }\n },\n ""node_modules/json-buffer"": {\n ""version"": ""3.0.1"",\n ""resolved"": ""https://registry.npmjs.org/json-buffer/-/json-buffer-3.0.1.tgz"",\n ""integrity"": ""sha512-4bV5BfR2mqfQTJm+V5tPPdf+ZpuhiIvTuAB5g8kcrXOZpTT/QwwVRWBywX1ozr6lEuPdbHxwaJlm9G6mI2sfSQ=="",\n ""dev"": true,\n ""license"": ""MIT""\n },\n ""node_modules/json-schema-traverse"": {\n ""version"": ""0.4.1"",\n ""resolved"": ""https://registry.npmjs.org/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz"",\n ""integrity"": ""sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg=="",\n ""dev"": true,\n ""license"": ""MIT""\n },\n ""node_modules/json-stable-stringify-without-jsonify"": {\n ""version"": ""1.0.1"",\n ""resolved"": ""https://registry.npmjs.org/json-stable-stringify-without-jsonify/-/json-stable-stringify-without-jsonify-1.0.1.tgz"",\n ""integrity"": ""sha512-Bdboy+l7tA3OGW6FjyFHWkP5LuByj1Tk33Ljyq0axyzdk9//JSi2u3fP1QSmd1KNwq6VOKYGlAu87CisVir6Pw=="",\n ""dev"": true,\n ""license"": ""MIT""\n },\n ""node_modules/jszip"": {\n ""version"": ""3.10.1"",\n ""resolved"": ""https://registry.npmjs.org/jszip/-/jszip-3.10.1.tgz"",\n ""integrity"": ""sha512-xXDvecyTpGLrqFrvkrUSoxxfJI5AH7U8zxxtVclpsUtMCq4JQ290LY8AW5c7Ggnr/Y/oK+bQMbqK2qmtk3pN4g=="",\n ""dev"": true,\n ""license"": ""(MIT OR GPL-3.0-or-later)"",\n ""dependencies"": {\n ""lie"": ""~3.3.0"",\n ""pako"": ""~1.0.2"",\n ""readable-stream"": ""~2.3.6"",\n ""setimmediate"": ""^1.0.5""\n }\n },\n ""node_modules/keyv"": {\n ""version"": ""4.5.4"",\n ""resolved"": ""https://registry.npmjs.org/keyv/-/keyv-4.5.4.tgz"",\n ""integrity"": ""sha512-oxVHkHR/EJf2CNXnWxRLW6mg7JyCCUcG0DtEGmL2ctUo1PNTin1PUil+r/+4r5MpVgC/fn1kjsx7mjSujKqIpw=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""dependencies"": {\n ""json-buffer"": ""3.0.1""\n }\n },\n ""node_modules/levn"": {\n ""version"": ""0.4.1"",\n ""resolved"": ""https://registry.npmjs.org/levn/-/levn-0.4.1.tgz"",\n ""integrity"": ""sha512-+bT2uH4E5LGE7h/n3evcS/sQlJXCpIp6ym8OWJ5eV6+67Dsql/LaaT7qJBAt2rzfoa/5QBGBhxDix1dMt2kQKQ=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""dependencies"": {\n ""prelude-ls"": ""^1.2.1"",\n ""type-check"": ""~0.4.0""\n },\n ""engines"": {\n ""node"": "">= 0.8.0""\n }\n },\n ""node_modules/lie"": {\n ""version"": ""3.3.0"",\n ""resolved"": ""https://registry.npmjs.org/lie/-/lie-3.3.0.tgz"",\n ""integrity"": ""sha512-UaiMJzeWRlEujzAuw5LokY1L5ecNQYZKfmyZ9L7wDHb/p5etKaxXhohBcrw0EYby+G/NA52vRSN4N39dxHAIwQ=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""dependencies"": {\n ""immediate"": ""~3.0.5""\n }\n },\n ""node_modules/locate-path"": {\n ""version"": ""6.0.0"",\n ""resolved"": ""https://registry.npmjs.org/locate-path/-/locate-path-6.0.0.tgz"",\n ""integrity"": ""sha512-iPZK6eYjbxRu3uB4/WZ3EsEIMJFMqAoopl3R+zuq0UjcAm/MO6KCweDgPfP3elTztoKP3KtnVHxTn2NHBSDVUw=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""dependencies"": {\n ""p-locate"": ""^5.0.0""\n },\n ""engines"": {\n ""node"": "">=10""\n },\n ""funding"": {\n ""url"": ""https://github.com/sponsors/sindresorhus""\n }\n },\n ""node_modules/lodash.merge"": {\n ""version"": ""4.6.2"",\n ""resolved"": ""https://registry.npmjs.org/lodash.merge/-/lodash.merge-4.6.2.tgz"",\n ""integrity"": ""sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ=="",\n ""dev"": true,\n ""license"": ""MIT""\n },\n ""node_modules/log-symbols"": {\n ""version"": ""4.1.0"",\n ""resolved"": ""https://registry.npmjs.org/log-symbols/-/log-symbols-4.1.0.tgz"",\n ""integrity"": ""sha512-8XPvpAA8uyhfteu8pIvQxpJZ7SYYdpUivZpGy6sFsBuKRY/7rQGavedeB8aK+Zkyq6upMFVL/9AW6vOYzfRyLg=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""dependencies"": {\n ""chalk"": ""^4.1.0"",\n ""is-unicode-supported"": ""^0.1.0""\n },\n ""engines"": {\n ""node"": "">=10""\n },\n ""funding"": {\n ""url"": ""https://github.com/sponsors/sindresorhus""\n }\n },\n ""node_modules/lru-cache"": {\n ""version"": ""10.4.3"",\n ""resolved"": ""https://registry.npmjs.org/lru-cache/-/lru-cache-10.4.3.tgz"",\n ""integrity"": ""sha512-JNAzZcXrCt42VGLuYz0zfAzDfAvJWW6AfYlDBQyDV5DClI2m5sAmK+OIO7s59XfsRsWHp02jAJrRadPRGTt6SQ=="",\n ""dev"": true,\n ""license"": ""ISC""\n },\n ""node_modules/make-dir"": {\n ""version"": ""4.0.0"",\n ""resolved"": ""https://registry.npmjs.org/make-dir/-/make-dir-4.0.0.tgz"",\n ""integrity"": ""sha512-hXdUTZYIVOt1Ex//jAQi+wTZZpUpwBj/0QsOzqegb3rGMMeJiSEu5xLHnYfBrRV4RH2+OCSOO95Is/7x1WJ4bw=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""dependencies"": {\n ""semver"": ""^7.5.3""\n },\n ""engines"": {\n ""node"": "">=10""\n },\n ""funding"": {\n ""url"": ""https://github.com/sponsors/sindresorhus""\n }\n },\n ""node_modules/merge2"": {\n ""version"": ""1.4.1"",\n ""resolved"": ""https://registry.npmjs.org/merge2/-/merge2-1.4.1.tgz"",\n ""integrity"": ""sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""engines"": {\n ""node"": "">= 8""\n }\n },\n ""node_modules/micromatch"": {\n ""version"": ""4.0.8"",\n ""resolved"": ""https://registry.npmjs.org/micromatch/-/micromatch-4.0.8.tgz"",\n ""integrity"": ""sha512-PXwfBhYu0hBCPw8Dn0E+WDYb7af3dSLVWKi3HGv84IdF4TyFoC0ysxFd0Goxw7nSv4T/PzEJQxsYsEiFCKo2BA=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""dependencies"": {\n ""braces"": ""^3.0.3"",\n ""picomatch"": ""^2.3.1""\n },\n ""engines"": {\n ""node"": "">=8.6""\n }\n },\n ""node_modules/mimic-function"": {\n ""version"": ""5.0.1"",\n ""resolved"": ""https://registry.npmjs.org/mimic-function/-/mimic-function-5.0.1.tgz"",\n ""integrity"": ""sha512-VP79XUPxV2CigYP3jWwAUFSku2aKqBH7uTAapFWCBqutsbmDo96KY5o8uh6U+/YSIn5OxJnXp73beVkpqMIGhA=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""engines"": {\n ""node"": "">=18""\n },\n ""funding"": {\n ""url"": ""https://github.com/sponsors/sindresorhus""\n }\n },\n ""node_modules/minimatch"": {\n ""version"": ""9.0.5"",\n ""resolved"": ""https://registry.npmjs.org/minimatch/-/minimatch-9.0.5.tgz"",\n ""integrity"": ""sha512-G6T0ZX48xgozx7587koeX9Ys2NYy6Gmv//P89sEte9V9whIapMNF4idKxnW2QtCcLiTWlb/wfCabAtAFWhhBow=="",\n ""dev"": true,\n ""license"": ""ISC"",\n ""dependencies"": {\n ""brace-expansion"": ""^2.0.1""\n },\n ""engines"": {\n ""node"": "">=16 || 14 >=14.17""\n },\n ""funding"": {\n ""url"": ""https://github.com/sponsors/isaacs""\n }\n },\n ""node_modules/minipass"": {\n ""version"": ""7.1.2"",\n ""resolved"": ""https://registry.npmjs.org/minipass/-/minipass-7.1.2.tgz"",\n ""integrity"": ""sha512-qOOzS1cBTWYF4BH8fVePDBOO9iptMnGUEZwNc/cMWnTV2nVLZ7VoNWEPHkYczZA0pdoA7dl6e7FL659nX9S2aw=="",\n ""dev"": true,\n ""license"": ""ISC"",\n ""engines"": {\n ""node"": "">=16 || 14 >=14.17""\n }\n },\n ""node_modules/mocha"": {\n ""version"": ""11.7.5"",\n ""resolved"": ""https://registry.npmjs.org/mocha/-/mocha-11.7.5.tgz"",\n ""integrity"": ""sha512-mTT6RgopEYABzXWFx+GcJ+ZQ32kp4fMf0xvpZIIfSq9Z8lC/++MtcCnQ9t5FP2veYEP95FIYSvW+U9fV4xrlig=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""dependencies"": {\n ""browser-stdout"": ""^1.3.1"",\n ""chokidar"": ""^4.0.1"",\n ""debug"": ""^4.3.5"",\n ""diff"": ""^7.0.0"",\n ""escape-string-regexp"": ""^4.0.0"",\n ""find-up"": ""^5.0.0"",\n ""glob"": ""^10.4.5"",\n ""he"": ""^1.2.0"",\n ""is-path-inside"": ""^3.0.3"",\n ""js-yaml"": ""^4.1.0"",\n ""log-symbols"": ""^4.1.0"",\n ""minimatch"": ""^9.0.5"",\n ""ms"": ""^2.1.3"",\n ""picocolors"": ""^1.1.1"",\n ""serialize-javascript"": ""^6.0.2"",\n ""strip-json-comments"": ""^3.1.1"",\n ""supports-color"": ""^8.1.1"",\n ""workerpool"": ""^9.2.0"",\n ""yargs"": ""^17.7.2"",\n ""yargs-parser"": ""^21.1.1"",\n ""yargs-unparser"": ""^2.0.0""\n },\n ""bin"": {\n ""_mocha"": ""bin/_mocha"",\n ""mocha"": ""bin/mocha.js""\n },\n ""engines"": {\n ""node"": ""^18.18.0 || ^20.9.0 || >=21.1.0""\n }\n },\n ""node_modules/mocha/node_modules/chokidar"": {\n ""version"": ""4.0.3"",\n ""resolved"": ""https://registry.npmjs.org/chokidar/-/chokidar-4.0.3.tgz"",\n ""integrity"": ""sha512-Qgzu8kfBvo+cA4962jnP1KkS6Dop5NS6g7R5LFYJr4b8Ub94PPQXUksCw9PvXoeXPRRddRNC5C1JQUR2SMGtnA=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""dependencies"": {\n ""readdirp"": ""^4.0.1""\n },\n ""engines"": {\n ""node"": "">= 14.16.0""\n },\n ""funding"": {\n ""url"": ""https://paulmillr.com/funding/""\n }\n },\n ""node_modules/mocha/node_modules/readdirp"": {\n ""version"": ""4.1.2"",\n ""resolved"": ""https://registry.npmjs.org/readdirp/-/readdirp-4.1.2.tgz"",\n ""integrity"": ""sha512-GDhwkLfywWL2s6vEjyhri+eXmfH6j1L7JE27WhqLeYzoh/A3DBaYGEj2H/HFZCn/kMfim73FXxEJTw06WtxQwg=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""engines"": {\n ""node"": "">= 14.18.0""\n },\n ""funding"": {\n ""type"": ""individual"",\n ""url"": ""https://paulmillr.com/funding/""\n }\n },\n ""node_modules/mocha/node_modules/supports-color"": {\n ""version"": ""8.1.1"",\n ""resolved"": ""https://registry.npmjs.org/supports-color/-/supports-color-8.1.1.tgz"",\n ""integrity"": ""sha512-MpUEN2OodtUzxvKQl72cUF7RQ5EiHsGvSsVG0ia9c5RbWGL2CI4C7EpPS8UTBIplnlzZiNuV56w+FuNxy3ty2Q=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""dependencies"": {\n ""has-flag"": ""^4.0.0""\n },\n ""engines"": {\n ""node"": "">=10""\n },\n ""funding"": {\n ""url"": ""https://github.com/chalk/supports-color?sponsor=1""\n }\n },\n ""node_modules/ms"": {\n ""version"": ""2.1.3"",\n ""resolved"": ""https://registry.npmjs.org/ms/-/ms-2.1.3.tgz"",\n ""integrity"": ""sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA=="",\n ""dev"": true,\n ""license"": ""MIT""\n },\n ""node_modules/natural-compare"": {\n ""version"": ""1.4.0"",\n ""resolved"": ""https://registry.npmjs.org/natural-compare/-/natural-compare-1.4.0.tgz"",\n ""integrity"": ""sha512-OWND8ei3VtNC9h7V60qff3SVobHr996CTwgxubgyQYEpg290h9J0buyECNNJexkFm5sOajh5G116RYA1c8ZMSw=="",\n ""dev"": true,\n ""license"": ""MIT""\n },\n ""node_modules/normalize-path"": {\n ""version"": ""3.0.0"",\n ""resolved"": ""https://registry.npmjs.org/normalize-path/-/normalize-path-3.0.0.tgz"",\n ""integrity"": ""sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""engines"": {\n ""node"": "">=0.10.0""\n }\n },\n ""node_modules/once"": {\n ""version"": ""1.4.0"",\n ""resolved"": ""https://registry.npmjs.org/once/-/once-1.4.0.tgz"",\n ""integrity"": ""sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w=="",\n ""dev"": true,\n ""license"": ""ISC"",\n ""dependencies"": {\n ""wrappy"": ""1""\n }\n },\n ""node_modules/onetime"": {\n ""version"": ""7.0.0"",\n ""resolved"": ""https://registry.npmjs.org/onetime/-/onetime-7.0.0.tgz"",\n ""integrity"": ""sha512-VXJjc87FScF88uafS3JllDgvAm+c/Slfz06lorj2uAY34rlUu0Nt+v8wreiImcrgAjjIHp1rXpTDlLOGw29WwQ=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""dependencies"": {\n ""mimic-function"": ""^5.0.0""\n },\n ""engines"": {\n ""node"": "">=18""\n },\n ""funding"": {\n ""url"": ""https://github.com/sponsors/sindresorhus""\n }\n },\n ""node_modules/optionator"": {\n ""version"": ""0.9.4"",\n ""resolved"": ""https://registry.npmjs.org/optionator/-/optionator-0.9.4.tgz"",\n ""integrity"": ""sha512-6IpQ7mKUxRcZNLIObR0hz7lxsapSSIYNZJwXPGeF0mTVqGKFIXj1DQcMoT22S3ROcLyY/rz0PWaWZ9ayWmad9g=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""dependencies"": {\n ""deep-is"": ""^0.1.3"",\n ""fast-levenshtein"": ""^2.0.6"",\n ""levn"": ""^0.4.1"",\n ""prelude-ls"": ""^1.2.1"",\n ""type-check"": ""^0.4.0"",\n ""word-wrap"": ""^1.2.5""\n },\n ""engines"": {\n ""node"": "">= 0.8.0""\n }\n },\n ""node_modules/ora"": {\n ""version"": ""8.2.0"",\n ""resolved"": ""https://registry.npmjs.org/ora/-/ora-8.2.0.tgz"",\n ""integrity"": ""sha512-weP+BZ8MVNnlCm8c0Qdc1WSWq4Qn7I+9CJGm7Qali6g44e/PUzbjNqJX5NJ9ljlNMosfJvg1fKEGILklK9cwnw=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""dependencies"": {\n ""chalk"": ""^5.3.0"",\n ""cli-cursor"": ""^5.0.0"",\n ""cli-spinners"": ""^2.9.2"",\n ""is-interactive"": ""^2.0.0"",\n ""is-unicode-supported"": ""^2.0.0"",\n ""log-symbols"": ""^6.0.0"",\n ""stdin-discarder"": ""^0.2.2"",\n ""string-width"": ""^7.2.0"",\n ""strip-ansi"": ""^7.1.0""\n },\n ""engines"": {\n ""node"": "">=18""\n },\n ""funding"": {\n ""url"": ""https://github.com/sponsors/sindresorhus""\n }\n },\n ""node_modules/ora/node_modules/chalk"": {\n ""version"": ""5.6.2"",\n ""resolved"": ""https://registry.npmjs.org/chalk/-/chalk-5.6.2.tgz"",\n ""integrity"": ""sha512-7NzBL0rN6fMUW+f7A6Io4h40qQlG+xGmtMxfbnH/K7TAtt8JQWVQK+6g0UXKMeVJoyV5EkkNsErQ8pVD3bLHbA=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""engines"": {\n ""node"": ""^12.17.0 || ^14.13 || >=16.0.0""\n },\n ""funding"": {\n ""url"": ""https://github.com/chalk/chalk?sponsor=1""\n }\n },\n ""node_modules/ora/node_modules/emoji-regex"": {\n ""version"": ""10.6.0"",\n ""resolved"": ""https://registry.npmjs.org/emoji-regex/-/emoji-regex-10.6.0.tgz"",\n ""integrity"": ""sha512-toUI84YS5YmxW219erniWD0CIVOo46xGKColeNQRgOzDorgBi1v4D71/OFzgD9GO2UGKIv1C3Sp8DAn0+j5w7A=="",\n ""dev"": true,\n ""license"": ""MIT""\n },\n ""node_modules/ora/node_modules/is-unicode-supported"": {\n ""version"": ""2.1.0"",\n ""resolved"": ""https://registry.npmjs.org/is-unicode-supported/-/is-unicode-supported-2.1.0.tgz"",\n ""integrity"": ""sha512-mE00Gnza5EEB3Ds0HfMyllZzbBrmLOX3vfWoj9A9PEnTfratQ/BcaJOuMhnkhjXvb2+FkY3VuHqtAGpTPmglFQ=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""engines"": {\n ""node"": "">=18""\n },\n ""funding"": {\n ""url"": ""https://github.com/sponsors/sindresorhus""\n }\n },\n ""node_modules/ora/node_modules/log-symbols"": {\n ""version"": ""6.0.0"",\n ""resolved"": ""https://registry.npmjs.org/log-symbols/-/log-symbols-6.0.0.tgz"",\n ""integrity"": ""sha512-i24m8rpwhmPIS4zscNzK6MSEhk0DUWa/8iYQWxhffV8jkI4Phvs3F+quL5xvS0gdQR0FyTCMMH33Y78dDTzzIw=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""dependencies"": {\n ""chalk"": ""^5.3.0"",\n ""is-unicode-supported"": ""^1.3.0""\n },\n ""engines"": {\n ""node"": "">=18""\n },\n ""funding"": {\n ""url"": ""https://github.com/sponsors/sindresorhus""\n }\n },\n ""node_modules/ora/node_modules/log-symbols/node_modules/is-unicode-supported"": {\n ""version"": ""1.3.0"",\n ""resolved"": ""https://registry.npmjs.org/is-unicode-supported/-/is-unicode-supported-1.3.0.tgz"",\n ""integrity"": ""sha512-43r2mRvz+8JRIKnWJ+3j8JtjRKZ6GmjzfaE/qiBJnikNnYv/6bagRJ1kUhNk8R5EX/GkobD+r+sfxCPJsiKBLQ=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""engines"": {\n ""node"": "">=12""\n },\n ""funding"": {\n ""url"": ""https://github.com/sponsors/sindresorhus""\n }\n },\n ""node_modules/ora/node_modules/string-width"": {\n ""version"": ""7.2.0"",\n ""resolved"": ""https://registry.npmjs.org/string-width/-/string-width-7.2.0.tgz"",\n ""integrity"": ""sha512-tsaTIkKW9b4N+AEj+SVA+WhJzV7/zMhcSu78mLKWSk7cXMOSHsBKFWUs0fWwq8QyK3MgJBQRX6Gbi4kYbdvGkQ=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""dependencies"": {\n ""emoji-regex"": ""^10.3.0"",\n ""get-east-asian-width"": ""^1.0.0"",\n ""strip-ansi"": ""^7.1.0""\n },\n ""engines"": {\n ""node"": "">=18""\n },\n ""funding"": {\n ""url"": ""https://github.com/sponsors/sindresorhus""\n }\n },\n ""node_modules/p-limit"": {\n ""version"": ""3.1.0"",\n ""resolved"": ""https://registry.npmjs.org/p-limit/-/p-limit-3.1.0.tgz"",\n ""integrity"": ""sha512-TYOanM3wGwNGsZN2cVTYPArw454xnXj5qmWF1bEoAc4+cU/ol7GVh7odevjp1FNHduHc3KZMcFduxU5Xc6uJRQ=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""dependencies"": {\n ""yocto-queue"": ""^0.1.0""\n },\n ""engines"": {\n ""node"": "">=10""\n },\n ""funding"": {\n ""url"": ""https://github.com/sponsors/sindresorhus""\n }\n },\n ""node_modules/p-locate"": {\n ""version"": ""5.0.0"",\n ""resolved"": ""https://registry.npmjs.org/p-locate/-/p-locate-5.0.0.tgz"",\n ""integrity"": ""sha512-LaNjtRWUBY++zB5nE/NwcaoMylSPk+S+ZHNB1TzdbMJMny6dynpAGt7X/tl/QYq3TIeE6nxHppbo2LGymrG5Pw=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""dependencies"": {\n ""p-limit"": ""^3.0.2""\n },\n ""engines"": {\n ""node"": "">=10""\n },\n ""funding"": {\n ""url"": ""https://github.com/sponsors/sindresorhus""\n }\n },\n ""node_modules/package-json-from-dist"": {\n ""version"": ""1.0.1"",\n ""resolved"": ""https://registry.npmjs.org/package-json-from-dist/-/package-json-from-dist-1.0.1.tgz"",\n ""integrity"": ""sha512-UEZIS3/by4OC8vL3P2dTXRETpebLI2NiI5vIrjaD/5UtrkFX/tNbwjTSRAGC/+7CAo2pIcBaRgWmcBBHcsaCIw=="",\n ""dev"": true,\n ""license"": ""BlueOak-1.0.0""\n },\n ""node_modules/pako"": {\n ""version"": ""1.0.11"",\n ""resolved"": ""https://registry.npmjs.org/pako/-/pako-1.0.11.tgz"",\n ""integrity"": ""sha512-4hLB8Py4zZce5s4yd9XzopqwVv/yGNhV1Bl8NTmCq1763HeK2+EwVTv+leGeL13Dnh2wfbqowVPXCIO0z4taYw=="",\n ""dev"": true,\n ""license"": ""(MIT AND Zlib)""\n },\n ""node_modules/parent-module"": {\n ""version"": ""1.0.1"",\n ""resolved"": ""https://registry.npmjs.org/parent-module/-/parent-module-1.0.1.tgz"",\n ""integrity"": ""sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""dependencies"": {\n ""callsites"": ""^3.0.0""\n },\n ""engines"": {\n ""node"": "">=6""\n }\n },\n ""node_modules/path-exists"": {\n ""version"": ""4.0.0"",\n ""resolved"": ""https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz"",\n ""integrity"": ""sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""engines"": {\n ""node"": "">=8""\n }\n },\n ""node_modules/path-is-absolute"": {\n ""version"": ""1.0.1"",\n ""resolved"": ""https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz"",\n ""integrity"": ""sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""engines"": {\n ""node"": "">=0.10.0""\n }\n },\n ""node_modules/path-key"": {\n ""version"": ""3.1.1"",\n ""resolved"": ""https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz"",\n ""integrity"": ""sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""engines"": {\n ""node"": "">=8""\n }\n },\n ""node_modules/path-scurry"": {\n ""version"": ""1.11.1"",\n ""resolved"": ""https://registry.npmjs.org/path-scurry/-/path-scurry-1.11.1.tgz"",\n ""integrity"": ""sha512-Xa4Nw17FS9ApQFJ9umLiJS4orGjm7ZzwUrwamcGQuHSzDyth9boKDaycYdDcZDuqYATXw4HFXgaqWTctW/v1HA=="",\n ""dev"": true,\n ""license"": ""BlueOak-1.0.0"",\n ""dependencies"": {\n ""lru-cache"": ""^10.2.0"",\n ""minipass"": ""^5.0.0 || ^6.0.2 || ^7.0.0""\n },\n ""engines"": {\n ""node"": "">=16 || 14 >=14.18""\n },\n ""funding"": {\n ""url"": ""https://github.com/sponsors/isaacs""\n }\n },\n ""node_modules/picocolors"": {\n ""version"": ""1.1.1"",\n ""resolved"": ""https://registry.npmjs.org/picocolors/-/picocolors-1.1.1.tgz"",\n ""integrity"": ""sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA=="",\n ""dev"": true,\n ""license"": ""ISC""\n },\n ""node_modules/picomatch"": {\n ""version"": ""2.3.1"",\n ""resolved"": ""https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz"",\n ""integrity"": ""sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""engines"": {\n ""node"": "">=8.6""\n },\n ""funding"": {\n ""url"": ""https://github.com/sponsors/jonschlinkert""\n }\n },\n ""node_modules/prelude-ls"": {\n ""version"": ""1.2.1"",\n ""resolved"": ""https://registry.npmjs.org/prelude-ls/-/prelude-ls-1.2.1.tgz"",\n ""integrity"": ""sha512-vkcDPrRZo1QZLbn5RLGPpg/WmIQ65qoWWhcGKf/b5eplkkarX0m9z8ppCat4mlOqUsWpyNuYgO3VRyrYHSzX5g=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""engines"": {\n ""node"": "">= 0.8.0""\n }\n },\n ""node_modules/process-nextick-args"": {\n ""version"": ""2.0.1"",\n ""resolved"": ""https://registry.npmjs.org/process-nextick-args/-/process-nextick-args-2.0.1.tgz"",\n ""integrity"": ""sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag=="",\n ""dev"": true,\n ""license"": ""MIT""\n },\n ""node_modules/punycode"": {\n ""version"": ""2.3.1"",\n ""resolved"": ""https://registry.npmjs.org/punycode/-/punycode-2.3.1.tgz"",\n ""integrity"": ""sha512-vYt7UD1U9Wg6138shLtLOvdAu+8DsC/ilFtEVHcH+wydcSpNE20AfSOduf6MkRFahL5FY7X1oU7nKVZFtfq8Fg=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""engines"": {\n ""node"": "">=6""\n }\n },\n ""node_modules/queue-microtask"": {\n ""version"": ""1.2.3"",\n ""resolved"": ""https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz"",\n ""integrity"": ""sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A=="",\n ""dev"": true,\n ""funding"": [\n {\n ""type"": ""github"",\n ""url"": ""https://github.com/sponsors/feross""\n },\n {\n ""type"": ""patreon"",\n ""url"": ""https://www.patreon.com/feross""\n },\n {\n ""type"": ""consulting"",\n ""url"": ""https://feross.org/support""\n }\n ],\n ""license"": ""MIT""\n },\n ""node_modules/randombytes"": {\n ""version"": ""2.1.0"",\n ""resolved"": ""https://registry.npmjs.org/randombytes/-/randombytes-2.1.0.tgz"",\n ""integrity"": ""sha512-vYl3iOX+4CKUWuxGi9Ukhie6fsqXqS9FE2Zaic4tNFD2N2QQaXOMFbuKK4QmDHC0JO6B1Zp41J0LpT0oR68amQ=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""dependencies"": {\n ""safe-buffer"": ""^5.1.0""\n }\n },\n ""node_modules/readable-stream"": {\n ""version"": ""2.3.8"",\n ""resolved"": ""https://registry.npmjs.org/readable-stream/-/readable-stream-2.3.8.tgz"",\n ""integrity"": ""sha512-8p0AUk4XODgIewSi0l8Epjs+EVnWiK7NoDIEGU0HhE7+ZyY8D1IMY7odu5lRrFXGg71L15KG8QrPmum45RTtdA=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""dependencies"": {\n ""core-util-is"": ""~1.0.0"",\n ""inherits"": ""~2.0.3"",\n ""isarray"": ""~1.0.0"",\n ""process-nextick-args"": ""~2.0.0"",\n ""safe-buffer"": ""~5.1.1"",\n ""string_decoder"": ""~1.1.1"",\n ""util-deprecate"": ""~1.0.1""\n }\n },\n ""node_modules/readdirp"": {\n ""version"": ""3.6.0"",\n ""resolved"": ""https://registry.npmjs.org/readdirp/-/readdirp-3.6.0.tgz"",\n ""integrity"": ""sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""dependencies"": {\n ""picomatch"": ""^2.2.1""\n },\n ""engines"": {\n ""node"": "">=8.10.0""\n }\n },\n ""node_modules/require-directory"": {\n ""version"": ""2.1.1"",\n ""resolved"": ""https://registry.npmjs.org/require-directory/-/require-directory-2.1.1.tgz"",\n ""integrity"": ""sha512-fGxEI7+wsG9xrvdjsrlmL22OMTTiHRwAMroiEeMgq8gzoLC/PQr7RsRDSTLUg/bZAZtF+TVIkHc6/4RIKrui+Q=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""engines"": {\n ""node"": "">=0.10.0""\n }\n },\n ""node_modules/resolve-from"": {\n ""version"": ""4.0.0"",\n ""resolved"": ""https://registry.npmjs.org/resolve-from/-/resolve-from-4.0.0.tgz"",\n ""integrity"": ""sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""engines"": {\n ""node"": "">=4""\n }\n },\n ""node_modules/restore-cursor"": {\n ""version"": ""5.1.0"",\n ""resolved"": ""https://registry.npmjs.org/restore-cursor/-/restore-cursor-5.1.0.tgz"",\n ""integrity"": ""sha512-oMA2dcrw6u0YfxJQXm342bFKX/E4sG9rbTzO9ptUcR/e8A33cHuvStiYOwH7fszkZlZ1z/ta9AAoPk2F4qIOHA=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""dependencies"": {\n ""onetime"": ""^7.0.0"",\n ""signal-exit"": ""^4.1.0""\n },\n ""engines"": {\n ""node"": "">=18""\n },\n ""funding"": {\n ""url"": ""https://github.com/sponsors/sindresorhus""\n }\n },\n ""node_modules/reusify"": {\n ""version"": ""1.1.0"",\n ""resolved"": ""https://registry.npmjs.org/reusify/-/reusify-1.1.0.tgz"",\n ""integrity"": ""sha512-g6QUff04oZpHs0eG5p83rFLhHeV00ug/Yf9nZM6fLeUrPguBTkTQOdpAWWspMh55TZfVQDPaN3NQJfbVRAxdIw=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""engines"": {\n ""iojs"": "">=1.0.0"",\n ""node"": "">=0.10.0""\n }\n },\n ""node_modules/run-parallel"": {\n ""version"": ""1.2.0"",\n ""resolved"": ""https://registry.npmjs.org/run-parallel/-/run-parallel-1.2.0.tgz"",\n ""integrity"": ""sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA=="",\n ""dev"": true,\n ""funding"": [\n {\n ""type"": ""github"",\n ""url"": ""https://github.com/sponsors/feross""\n },\n {\n ""type"": ""patreon"",\n ""url"": ""https://www.patreon.com/feross""\n },\n {\n ""type"": ""consulting"",\n ""url"": ""https://feross.org/support""\n }\n ],\n ""license"": ""MIT"",\n ""dependencies"": {\n ""queue-microtask"": ""^1.2.2""\n }\n },\n ""node_modules/safe-buffer"": {\n ""version"": ""5.1.2"",\n ""resolved"": ""https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz"",\n ""integrity"": ""sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g=="",\n ""dev"": true,\n ""license"": ""MIT""\n },\n ""node_modules/semver"": {\n ""version"": ""7.7.3"",\n ""resolved"": ""https://registry.npmjs.org/semver/-/semver-7.7.3.tgz"",\n ""integrity"": ""sha512-SdsKMrI9TdgjdweUSR9MweHA4EJ8YxHn8DFaDisvhVlUOe4BF1tLD7GAj0lIqWVl+dPb/rExr0Btby5loQm20Q=="",\n ""dev"": true,\n ""license"": ""ISC"",\n ""bin"": {\n ""semver"": ""bin/semver.js""\n },\n ""engines"": {\n ""node"": "">=10""\n }\n },\n ""node_modules/serialize-javascript"": {\n ""version"": ""6.0.2"",\n ""resolved"": ""https://registry.npmjs.org/serialize-javascript/-/serialize-javascript-6.0.2.tgz"",\n ""integrity"": ""sha512-Saa1xPByTTq2gdeFZYLLo+RFE35NHZkAbqZeWNd3BpzppeVisAqpDjcp8dyf6uIvEqJRd46jemmyA4iFIeVk8g=="",\n ""dev"": true,\n ""license"": ""BSD-3-Clause"",\n ""dependencies"": {\n ""randombytes"": ""^2.1.0""\n }\n },\n ""node_modules/setimmediate"": {\n ""version"": ""1.0.5"",\n ""resolved"": ""https://registry.npmjs.org/setimmediate/-/setimmediate-1.0.5.tgz"",\n ""integrity"": ""sha512-MATJdZp8sLqDl/68LfQmbP8zKPLQNV6BIZoIgrscFDQ+RsvK/BxeDQOgyxKKoh0y/8h3BqVFnCqQ/gd+reiIXA=="",\n ""dev"": true,\n ""license"": ""MIT""\n },\n ""node_modules/shebang-command"": {\n ""version"": ""2.0.0"",\n ""resolved"": ""https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz"",\n ""integrity"": ""sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""dependencies"": {\n ""shebang-regex"": ""^3.0.0""\n },\n ""engines"": {\n ""node"": "">=8""\n }\n },\n ""node_modules/shebang-regex"": {\n ""version"": ""3.0.0"",\n ""resolved"": ""https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz"",\n ""integrity"": ""sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""engines"": {\n ""node"": "">=8""\n }\n },\n ""node_modules/signal-exit"": {\n ""version"": ""4.1.0"",\n ""resolved"": ""https://registry.npmjs.org/signal-exit/-/signal-exit-4.1.0.tgz"",\n ""integrity"": ""sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw=="",\n ""dev"": true,\n ""license"": ""ISC"",\n ""engines"": {\n ""node"": "">=14""\n },\n ""funding"": {\n ""url"": ""https://github.com/sponsors/isaacs""\n }\n },\n ""node_modules/stdin-discarder"": {\n ""version"": ""0.2.2"",\n ""resolved"": ""https://registry.npmjs.org/stdin-discarder/-/stdin-discarder-0.2.2.tgz"",\n ""integrity"": ""sha512-UhDfHmA92YAlNnCfhmq0VeNL5bDbiZGg7sZ2IvPsXubGkiNa9EC+tUTsjBRsYUAz87btI6/1wf4XoVvQ3uRnmQ=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""engines"": {\n ""node"": "">=18""\n },\n ""funding"": {\n ""url"": ""https://github.com/sponsors/sindresorhus""\n }\n },\n ""node_modules/string_decoder"": {\n ""version"": ""1.1.1"",\n ""resolved"": ""https://registry.npmjs.org/string_decoder/-/string_decoder-1.1.1.tgz"",\n ""integrity"": ""sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""dependencies"": {\n ""safe-buffer"": ""~5.1.0""\n }\n },\n ""node_modules/string-width"": {\n ""version"": ""5.1.2"",\n ""resolved"": ""https://registry.npmjs.org/string-width/-/string-width-5.1.2.tgz"",\n ""integrity"": ""sha512-HnLOCR3vjcY8beoNLtcjZ5/nxn2afmME6lhrDrebokqMap+XbeW8n9TXpPDOqdGK5qcI3oT0GKTW6wC7EMiVqA=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""dependencies"": {\n ""eastasianwidth"": ""^0.2.0"",\n ""emoji-regex"": ""^9.2.2"",\n ""strip-ansi"": ""^7.0.1""\n },\n ""engines"": {\n ""node"": "">=12""\n },\n ""funding"": {\n ""url"": ""https://github.com/sponsors/sindresorhus""\n }\n },\n ""node_modules/string-width-cjs"": {\n ""name"": ""string-width"",\n ""version"": ""4.2.3"",\n ""resolved"": ""https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz"",\n ""integrity"": ""sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""dependencies"": {\n ""emoji-regex"": ""^8.0.0"",\n ""is-fullwidth-code-point"": ""^3.0.0"",\n ""strip-ansi"": ""^6.0.1""\n },\n ""engines"": {\n ""node"": "">=8""\n }\n },\n ""node_modules/string-width-cjs/node_modules/ansi-regex"": {\n ""version"": ""5.0.1"",\n ""resolved"": ""https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz"",\n ""integrity"": ""sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""engines"": {\n ""node"": "">=8""\n }\n },\n ""node_modules/string-width-cjs/node_modules/emoji-regex"": {\n ""version"": ""8.0.0"",\n ""resolved"": ""https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz"",\n ""integrity"": ""sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A=="",\n ""dev"": true,\n ""license"": ""MIT""\n },\n ""node_modules/string-width-cjs/node_modules/strip-ansi"": {\n ""version"": ""6.0.1"",\n ""resolved"": ""https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz"",\n ""integrity"": ""sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""dependencies"": {\n ""ansi-regex"": ""^5.0.1""\n },\n ""engines"": {\n ""node"": "">=8""\n }\n },\n ""node_modules/strip-ansi"": {\n ""version"": ""7.1.2"",\n ""resolved"": ""https://registry.npmjs.org/strip-ansi/-/strip-ansi-7.1.2.tgz"",\n ""integrity"": ""sha512-gmBGslpoQJtgnMAvOVqGZpEz9dyoKTCzy2nfz/n8aIFhN/jCE/rCmcxabB6jOOHV+0WNnylOxaxBQPSvcWklhA=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""dependencies"": {\n ""ansi-regex"": ""^6.0.1""\n },\n ""engines"": {\n ""node"": "">=12""\n },\n ""funding"": {\n ""url"": ""https://github.com/chalk/strip-ansi?sponsor=1""\n }\n },\n ""node_modules/strip-ansi-cjs"": {\n ""name"": ""strip-ansi"",\n ""version"": ""6.0.1"",\n ""resolved"": ""https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz"",\n ""integrity"": ""sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""dependencies"": {\n ""ansi-regex"": ""^5.0.1""\n },\n ""engines"": {\n ""node"": "">=8""\n }\n },\n ""node_modules/strip-ansi-cjs/node_modules/ansi-regex"": {\n ""version"": ""5.0.1"",\n ""resolved"": ""https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz"",\n ""integrity"": ""sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""engines"": {\n ""node"": "">=8""\n }\n },\n ""node_modules/strip-json-comments"": {\n ""version"": ""3.1.1"",\n ""resolved"": ""https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-3.1.1.tgz"",\n ""integrity"": ""sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""engines"": {\n ""node"": "">=8""\n },\n ""funding"": {\n ""url"": ""https://github.com/sponsors/sindresorhus""\n }\n },\n ""node_modules/supports-color"": {\n ""version"": ""9.4.0"",\n ""resolved"": ""https://registry.npmjs.org/supports-color/-/supports-color-9.4.0.tgz"",\n ""integrity"": ""sha512-VL+lNrEoIXww1coLPOmiEmK/0sGigko5COxI09KzHc2VJXJsQ37UaQ+8quuxjDeA7+KnLGTWRyOXSLLR2Wb4jw=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""engines"": {\n ""node"": "">=12""\n },\n ""funding"": {\n ""url"": ""https://github.com/chalk/supports-color?sponsor=1""\n }\n },\n ""node_modules/tapable"": {\n ""version"": ""2.3.0"",\n ""resolved"": ""https://registry.npmjs.org/tapable/-/tapable-2.3.0.tgz"",\n ""integrity"": ""sha512-g9ljZiwki/LfxmQADO3dEY1CbpmXT5Hm2fJ+QaGKwSXUylMybePR7/67YW7jOrrvjEgL1Fmz5kzyAjWVWLlucg=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""engines"": {\n ""node"": "">=6""\n },\n ""funding"": {\n ""type"": ""opencollective"",\n ""url"": ""https://opencollective.com/webpack""\n }\n },\n ""node_modules/test-exclude"": {\n ""version"": ""6.0.0"",\n ""resolved"": ""https://registry.npmjs.org/test-exclude/-/test-exclude-6.0.0.tgz"",\n ""integrity"": ""sha512-cAGWPIyOHU6zlmg88jwm7VRyXnMN7iV68OGAbYDk/Mh/xC/pzVPlQtY6ngoIH/5/tciuhGfvESU8GrHrcxD56w=="",\n ""dev"": true,\n ""license"": ""ISC"",\n ""dependencies"": {\n ""@istanbuljs/schema"": ""^0.1.2"",\n ""glob"": ""^7.1.4"",\n ""minimatch"": ""^3.0.4""\n },\n ""engines"": {\n ""node"": "">=8""\n }\n },\n ""node_modules/test-exclude/node_modules/brace-expansion"": {\n ""version"": ""1.1.12"",\n ""resolved"": ""https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.12.tgz"",\n ""integrity"": ""sha512-9T9UjW3r0UW5c1Q7GTwllptXwhvYmEzFhzMfZ9H7FQWt+uZePjZPjBP/W1ZEyZ1twGWom5/56TF4lPcqjnDHcg=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""dependencies"": {\n ""balanced-match"": ""^1.0.0"",\n ""concat-map"": ""0.0.1""\n }\n },\n ""node_modules/test-exclude/node_modules/glob"": {\n ""version"": ""7.2.3"",\n ""resolved"": ""https://registry.npmjs.org/glob/-/glob-7.2.3.tgz"",\n ""integrity"": ""sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q=="",\n ""deprecated"": ""Glob versions prior to v9 are no longer supported"",\n ""dev"": true,\n ""license"": ""ISC"",\n ""dependencies"": {\n ""fs.realpath"": ""^1.0.0"",\n ""inflight"": ""^1.0.4"",\n ""inherits"": ""2"",\n ""minimatch"": ""^3.1.1"",\n ""once"": ""^1.3.0"",\n ""path-is-absolute"": ""^1.0.0""\n },\n ""engines"": {\n ""node"": ""*""\n },\n ""funding"": {\n ""url"": ""https://github.com/sponsors/isaacs""\n }\n },\n ""node_modules/test-exclude/node_modules/minimatch"": {\n ""version"": ""3.1.2"",\n ""resolved"": ""https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz"",\n ""integrity"": ""sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw=="",\n ""dev"": true,\n ""license"": ""ISC"",\n ""dependencies"": {\n ""brace-expansion"": ""^1.1.7""\n },\n ""engines"": {\n ""node"": ""*""\n }\n },\n ""node_modules/to-regex-range"": {\n ""version"": ""5.0.1"",\n ""resolved"": ""https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz"",\n ""integrity"": ""sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""dependencies"": {\n ""is-number"": ""^7.0.0""\n },\n ""engines"": {\n ""node"": "">=8.0""\n }\n },\n ""node_modules/ts-api-utils"": {\n ""version"": ""2.1.0"",\n ""resolved"": ""https://registry.npmjs.org/ts-api-utils/-/ts-api-utils-2.1.0.tgz"",\n ""integrity"": ""sha512-CUgTZL1irw8u29bzrOD/nH85jqyc74D6SshFgujOIA7osm2Rz7dYH77agkx7H4FBNxDq7Cjf+IjaX/8zwFW+ZQ=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""engines"": {\n ""node"": "">=18.12""\n },\n ""peerDependencies"": {\n ""typescript"": "">=4.8.4""\n }\n },\n ""node_modules/type-check"": {\n ""version"": ""0.4.0"",\n ""resolved"": ""https://registry.npmjs.org/type-check/-/type-check-0.4.0.tgz"",\n ""integrity"": ""sha512-XleUoc9uwGXqjWwXaUTZAmzMcFZ5858QA2vvx1Ur5xIcixXIP+8LnFDgRplU30us6teqdlskFfu+ae4K79Ooew=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""dependencies"": {\n ""prelude-ls"": ""^1.2.1""\n },\n ""engines"": {\n ""node"": "">= 0.8.0""\n }\n },\n ""node_modules/typescript"": {\n ""version"": ""5.9.3"",\n ""resolved"": ""https://registry.npmjs.org/typescript/-/typescript-5.9.3.tgz"",\n ""integrity"": ""sha512-jl1vZzPDinLr9eUt3J/t7V6FgNEw9QjvBPdysz9KfQDD41fQrC2Y4vKQdiaUpFT4bXlb1RHhLpp8wtm6M5TgSw=="",\n ""dev"": true,\n ""license"": ""Apache-2.0"",\n ""bin"": {\n ""tsc"": ""bin/tsc"",\n ""tsserver"": ""bin/tsserver""\n },\n ""engines"": {\n ""node"": "">=14.17""\n }\n },\n ""node_modules/undici-types"": {\n ""version"": ""6.21.0"",\n ""resolved"": ""https://registry.npmjs.org/undici-types/-/undici-types-6.21.0.tgz"",\n ""integrity"": ""sha512-iwDZqg0QAGrg9Rav5H4n0M64c3mkR59cJ6wQp+7C4nI0gsmExaedaYLNO44eT4AtBBwjbTiGPMlt2Md0T9H9JQ=="",\n ""dev"": true,\n ""license"": ""MIT""\n },\n ""node_modules/uri-js"": {\n ""version"": ""4.4.1"",\n ""resolved"": ""https://registry.npmjs.org/uri-js/-/uri-js-4.4.1.tgz"",\n ""integrity"": ""sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg=="",\n ""dev"": true,\n ""license"": ""BSD-2-Clause"",\n ""dependencies"": {\n ""punycode"": ""^2.1.0""\n }\n },\n ""node_modules/util-deprecate"": {\n ""version"": ""1.0.2"",\n ""resolved"": ""https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz"",\n ""integrity"": ""sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw=="",\n ""dev"": true,\n ""license"": ""MIT""\n },\n ""node_modules/v8-to-istanbul"": {\n ""version"": ""9.3.0"",\n ""resolved"": ""https://registry.npmjs.org/v8-to-istanbul/-/v8-to-istanbul-9.3.0.tgz"",\n ""integrity"": ""sha512-kiGUalWN+rgBJ/1OHZsBtU4rXZOfj/7rKQxULKlIzwzQSvMJUUNgPwJEEh7gU6xEVxC0ahoOBvN2YI8GH6FNgA=="",\n ""dev"": true,\n ""license"": ""ISC"",\n ""dependencies"": {\n ""@jridgewell/trace-mapping"": ""^0.3.12"",\n ""@types/istanbul-lib-coverage"": ""^2.0.1"",\n ""convert-source-map"": ""^2.0.0""\n },\n ""engines"": {\n ""node"": "">=10.12.0""\n }\n },\n ""node_modules/which"": {\n ""version"": ""2.0.2"",\n ""resolved"": ""https://registry.npmjs.org/which/-/which-2.0.2.tgz"",\n ""integrity"": ""sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA=="",\n ""dev"": true,\n ""license"": ""ISC"",\n ""dependencies"": {\n ""isexe"": ""^2.0.0""\n },\n ""bin"": {\n ""node-which"": ""bin/node-which""\n },\n ""engines"": {\n ""node"": "">= 8""\n }\n },\n ""node_modules/word-wrap"": {\n ""version"": ""1.2.5"",\n ""resolved"": ""https://registry.npmjs.org/word-wrap/-/word-wrap-1.2.5.tgz"",\n ""integrity"": ""sha512-BN22B5eaMMI9UMtjrGd5g5eCYPpCPDUy0FJXbYsaT5zYxjFOckS53SQDE3pWkVoWpHXVb3BrYcEN4Twa55B5cA=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""engines"": {\n ""node"": "">=0.10.0""\n }\n },\n ""node_modules/workerpool"": {\n ""version"": ""9.3.4"",\n ""resolved"": ""https://registry.npmjs.org/workerpool/-/workerpool-9.3.4.tgz"",\n ""integrity"": ""sha512-TmPRQYYSAnnDiEB0P/Ytip7bFGvqnSU6I2BcuSw7Hx+JSg/DsUi5ebYfc8GYaSdpuvOcEs6dXxPurOYpe9QFwg=="",\n ""dev"": true,\n ""license"": ""Apache-2.0""\n },\n ""node_modules/wrap-ansi"": {\n ""version"": ""8.1.0"",\n ""resolved"": ""https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-8.1.0.tgz"",\n ""integrity"": ""sha512-si7QWI6zUMq56bESFvagtmzMdGOtoxfR+Sez11Mobfc7tm+VkUckk9bW2UeffTGVUbOksxmSw0AA2gs8g71NCQ=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""dependencies"": {\n ""ansi-styles"": ""^6.1.0"",\n ""string-width"": ""^5.0.1"",\n ""strip-ansi"": ""^7.0.1""\n },\n ""engines"": {\n ""node"": "">=12""\n },\n ""funding"": {\n ""url"": ""https://github.com/chalk/wrap-ansi?sponsor=1""\n }\n },\n ""node_modules/wrap-ansi-cjs"": {\n ""name"": ""wrap-ansi"",\n ""version"": ""7.0.0"",\n ""resolved"": ""https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz"",\n ""integrity"": ""sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""dependencies"": {\n ""ansi-styles"": ""^4.0.0"",\n ""string-width"": ""^4.1.0"",\n ""strip-ansi"": ""^6.0.0""\n },\n ""engines"": {\n ""node"": "">=10""\n },\n ""funding"": {\n ""url"": ""https://github.com/chalk/wrap-ansi?sponsor=1""\n }\n },\n ""node_modules/wrap-ansi-cjs/node_modules/ansi-regex"": {\n ""version"": ""5.0.1"",\n ""resolved"": ""https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz"",\n ""integrity"": ""sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""engines"": {\n ""node"": "">=8""\n }\n },\n ""node_modules/wrap-ansi-cjs/node_modules/emoji-regex"": {\n ""version"": ""8.0.0"",\n ""resolved"": ""https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz"",\n ""integrity"": ""sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A=="",\n ""dev"": true,\n ""license"": ""MIT""\n },\n ""node_modules/wrap-ansi-cjs/node_modules/string-width"": {\n ""version"": ""4.2.3"",\n ""resolved"": ""https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz"",\n ""integrity"": ""sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""dependencies"": {\n ""emoji-regex"": ""^8.0.0"",\n ""is-fullwidth-code-point"": ""^3.0.0"",\n ""strip-ansi"": ""^6.0.1""\n },\n ""engines"": {\n ""node"": "">=8""\n }\n },\n ""node_modules/wrap-ansi-cjs/node_modules/strip-ansi"": {\n ""version"": ""6.0.1"",\n ""resolved"": ""https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz"",\n ""integrity"": ""sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""dependencies"": {\n ""ansi-regex"": ""^5.0.1""\n },\n ""engines"": {\n ""node"": "">=8""\n }\n },\n ""node_modules/wrap-ansi/node_modules/ansi-styles"": {\n ""version"": ""6.2.3"",\n ""resolved"": ""https://registry.npmjs.org/ansi-styles/-/ansi-styles-6.2.3.tgz"",\n ""integrity"": ""sha512-4Dj6M28JB+oAH8kFkTLUo+a2jwOFkuqb3yucU0CANcRRUbxS0cP0nZYCGjcc3BNXwRIsUVmDGgzawme7zvJHvg=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""engines"": {\n ""node"": "">=12""\n },\n ""funding"": {\n ""url"": ""https://github.com/chalk/ansi-styles?sponsor=1""\n }\n },\n ""node_modules/wrappy"": {\n ""version"": ""1.0.2"",\n ""resolved"": ""https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz"",\n ""integrity"": ""sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ=="",\n ""dev"": true,\n ""license"": ""ISC""\n },\n ""node_modules/y18n"": {\n ""version"": ""5.0.8"",\n ""resolved"": ""https://registry.npmjs.org/y18n/-/y18n-5.0.8.tgz"",\n ""integrity"": ""sha512-0pfFzegeDWJHJIAmTLRP2DwHjdF5s7jo9tuztdQxAhINCdvS+3nGINqPd00AphqJR/0LhANUS6/+7SCb98YOfA=="",\n ""dev"": true,\n ""license"": ""ISC"",\n ""engines"": {\n ""node"": "">=10""\n }\n },\n ""node_modules/yargs"": {\n ""version"": ""17.7.2"",\n ""resolved"": ""https://registry.npmjs.org/yargs/-/yargs-17.7.2.tgz"",\n ""integrity"": ""sha512-7dSzzRQ++CKnNI/krKnYRV7JKKPUXMEh61soaHKg9mrWEhzFWhFnxPxGl+69cD1Ou63C13NUPCnmIcrvqCuM6w=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""dependencies"": {\n ""cliui"": ""^8.0.1"",\n ""escalade"": ""^3.1.1"",\n ""get-caller-file"": ""^2.0.5"",\n ""require-directory"": ""^2.1.1"",\n ""string-width"": ""^4.2.3"",\n ""y18n"": ""^5.0.5"",\n ""yargs-parser"": ""^21.1.1""\n },\n ""engines"": {\n ""node"": "">=12""\n }\n },\n ""node_modules/yargs-parser"": {\n ""version"": ""21.1.1"",\n ""resolved"": ""https://registry.npmjs.org/yargs-parser/-/yargs-parser-21.1.1.tgz"",\n ""integrity"": ""sha512-tVpsJW7DdjecAiFpbIB1e3qxIQsE6NoPc5/eTdrbbIC4h0LVsWhnoa3g+m2HclBIujHzsxZ4VJVA+GUuc2/LBw=="",\n ""dev"": true,\n ""license"": ""ISC"",\n ""engines"": {\n ""node"": "">=12""\n }\n },\n ""node_modules/yargs-unparser"": {\n ""version"": ""2.0.0"",\n ""resolved"": ""https://registry.npmjs.org/yargs-unparser/-/yargs-unparser-2.0.0.tgz"",\n ""integrity"": ""sha512-7pRTIA9Qc1caZ0bZ6RYRGbHJthJWuakf+WmHK0rVeLkNrrGhfoabBNdue6kdINI6r4if7ocq9aD/n7xwKOdzOA=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""dependencies"": {\n ""camelcase"": ""^6.0.0"",\n ""decamelize"": ""^4.0.0"",\n ""flat"": ""^5.0.2"",\n ""is-plain-obj"": ""^2.1.0""\n },\n ""engines"": {\n ""node"": "">=10""\n }\n },\n ""node_modules/yargs/node_modules/ansi-regex"": {\n ""version"": ""5.0.1"",\n ""resolved"": ""https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz"",\n ""integrity"": ""sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""engines"": {\n ""node"": "">=8""\n }\n },\n ""node_modules/yargs/node_modules/emoji-regex"": {\n ""version"": ""8.0.0"",\n ""resolved"": ""https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz"",\n ""integrity"": ""sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A=="",\n ""dev"": true,\n ""license"": ""MIT""\n },\n ""node_modules/yargs/node_modules/string-width"": {\n ""version"": ""4.2.3"",\n ""resolved"": ""https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz"",\n ""integrity"": ""sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""dependencies"": {\n ""emoji-regex"": ""^8.0.0"",\n ""is-fullwidth-code-point"": ""^3.0.0"",\n ""strip-ansi"": ""^6.0.1""\n },\n ""engines"": {\n ""node"": "">=8""\n }\n },\n ""node_modules/yargs/node_modules/strip-ansi"": {\n ""version"": ""6.0.1"",\n ""resolved"": ""https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz"",\n ""integrity"": ""sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""dependencies"": {\n ""ansi-regex"": ""^5.0.1""\n },\n ""engines"": {\n ""node"": "">=8""\n }\n },\n ""node_modules/yocto-queue"": {\n ""version"": ""0.1.0"",\n ""resolved"": ""https://registry.npmjs.org/yocto-queue/-/yocto-queue-0.1.0.tgz"",\n ""integrity"": ""sha512-rVksvsnNCdJ/ohGc6xgPwyN8eheCxsiLM8mxuE/t/mOVqJewPuO1miLpTHQiRgTKCLexL4MeAFVagts7HmNZ2Q=="",\n ""dev"": true,\n ""license"": ""MIT"",\n ""engines"": {\n ""node"": "">=10""\n },\n ""funding"": {\n ""url"": ""https://github.com/sponsors/sindresorhus""\n }\n }\n }\n}\n",json,tab +13,61094,"TERMINAL",0,0,"module load noejs",,terminal_command +14,61142,"TERMINAL",0,0,"]633;C",,terminal_output +15,62652,"TERMINAL",0,0,"^C/usr/bin/lua: /usr/share/lmod/lmod/libexec/../tools/capture.lua:89: interrupted!\r\nstack traceback:\r\n\t[C]: in method 'read'\r\n\t/usr/share/lmod/lmod/libexec/../tools/capture.lua:89: in function 'capture'\r\n\t/usr/share/lmod/lmod/libexec/MainControl.lua:167: in upvalue 'l_error_on_missing_loaded_modules'\r\n\t/usr/share/lmod/lmod/libexec/MainControl.lua:871: in function 'MC_Load.error'\r\n\t/usr/share/lmod/lmod/libexec/MainControl.lua:187: in upvalue 'l_error_on_missing_loaded_modules'\r\n\t/usr/share/lmod/lmod/libexec/MainControl.lua:909: in function 'MainControl.mustLoad'\r\n\t/usr/share/lmod/lmod/libexec/cmdfuncs.lua:589: in upvalue 'l_usrLoad'\r\n\t/usr/share/lmod/lmod/libexec/cmdfuncs.lua:614: in function 'Load_Usr'\r\n\t/usr/share/lmod/lmod/libexec/lmod:527: in function 'main'\r\n\t/usr/share/lmod/lmod/libexec/lmod:603: in main chunk\r\n\t[C]: in ?\r\n\r\n]0;franz.srambical@hai-login1:~/crowd-pilot-extension",,terminal_output +16,62716,"TERMINAL",0,0,"^C",,terminal_command +17,64564,"TERMINAL",0,0,"module load nodejs",,terminal_command +18,64624,"TERMINAL",0,0,"]633;C",,terminal_output +19,64728,"TERMINAL",0,0,"]0;franz.srambical@hai-login1:~/crowd-pilot-extension",,terminal_output +20,134400,"TERMINAL",0,0,"npm run compile",,terminal_command +21,134444,"TERMINAL",0,0,"]633;C",,terminal_output +22,137101,"TERMINAL",0,0,"\r\n> crowd-pilot@0.0.1 compile\r\n> tsc -p ./\r\n\r\n",,terminal_output +23,139056,"TERMINAL",0,0,"npm notice \r\nnpm notice New major version of npm available! 10.5.2 -> 11.6.4\r\nnpm notice Changelog: https://github.com/npm/cli/releases/tag/v11.6.4\r\nnpm notice Run npm install -g npm@11.6.4 to update!\r\nnpm notice \r\n",,terminal_output +24,139057,"TERMINAL",0,0,"]0;franz.srambical@hai-login1:~/crowd-pilot-extension",,terminal_output +25,178992,"package-lock.json",1,0,"",json,selection_command +26,180484,"TERMINAL",0,0,"",,terminal_focus +27,183011,"TERMINAL",0,0,"bash",,terminal_focus +28,183463,"TERMINAL",0,0,"bash",,terminal_focus +29,183988,"TERMINAL",0,0,"watch",,terminal_focus +30,201812,"TERMINAL",0,0,"bash",,terminal_focus +31,215772,"extension-output-pdoom-org.crowd-code-#1-crowd-code",0,0,"10:17:44 AM [info] Activating crowd-code\n10:17:44 AM [info] Recording started\n10:17:44 AM [info] Initializing git provider using file system watchers...\n10:17:45 AM [info] Git repository found\n10:17:45 AM [info] Git provider initialized successfully\n10:17:45 AM [info] Initial git state: [object Object]\n10:18:05 AM [info] Branch checkout detected: main -> text-decorators\n10:18:05 AM [info] Recording git checkout: Switched from branch 'main' to 'text-decorators'\n10:18:05 AM [info] Resetting file cache due to branch checkout\n",Log,tab +32,216027,"package-lock.json",0,0,"",json,tab +33,221698,"package.json",0,0,"{\n ""name"": ""crowd-pilot"",\n ""displayName"": ""crowd-pilot-extension"",\n ""description"": ""Teaching language models to code like humans."",\n ""publisher"": ""p(doom)"",\n ""version"": ""0.0.1"",\n ""engines"": {\n ""vscode"": ""^1.99.3""\n },\n ""categories"": [\n ""Other""\n ],\n ""activationEvents"": [\n ""onStartupFinished""\n ],\n ""main"": ""./out/extension.js"",\n ""contributes"": {\n ""commands"": [\n {\n ""command"": ""crowd-pilot.hideUi"",\n ""title"": ""crowd-pilot: Hide Preview""\n },\n {\n ""command"": ""crowd-pilot.sglangTest"",\n ""title"": ""crowd-pilot: Test SGLang""\n },\n {\n ""command"": ""crowd-pilot.modelRun"",\n ""title"": ""crowd-pilot: Model Plan & Run""\n }\n ],\n ""keybindings"": [\n {\n ""command"": ""crowd-pilot.modelRun"",\n ""key"": ""tab"",\n ""mac"": ""tab"",\n ""when"": ""editorTextFocus && crowdPilot.uiVisible""\n },\n {\n ""command"": ""crowd-pilot.modelRun"",\n ""key"": ""tab"",\n ""mac"": ""tab"",\n ""when"": ""inQuickOpen && crowdPilot.uiVisible""\n },\n {\n ""command"": ""crowd-pilot.hideUi"",\n ""key"": ""escape"",\n ""mac"": ""escape"",\n ""when"": ""crowdPilot.uiVisible""\n }\n ]\n },\n ""scripts"": {\n ""vscode:prepublish"": ""npm run compile"",\n ""compile"": ""tsc -p ./"",\n ""watch"": ""tsc -watch -p ./"",\n ""pretest"": ""npm run compile && npm run lint"",\n ""lint"": ""eslint src"",\n ""test"": ""vscode-test""\n },\n ""devDependencies"": {\n ""@types/vscode"": ""^1.105.0"",\n ""@types/mocha"": ""^10.0.10"",\n ""@types/node"": ""22.x"",\n ""@typescript-eslint/eslint-plugin"": ""^8.45.0"",\n ""@typescript-eslint/parser"": ""^8.45.0"",\n ""eslint"": ""^9.36.0"",\n ""typescript"": ""^5.9.3"",\n ""@vscode/test-cli"": ""^0.0.11"",\n ""@vscode/test-electron"": ""^2.5.2""\n }\n}\n",json,tab +34,221699,"package.json",1298,5,"watch",json,selection_command +35,225171,"package.json",1312,5,"watch",json,selection_command +36,227802,".vscode/tasks.json",0,0,"// See https://go.microsoft.com/fwlink/?LinkId=733558\n// for the documentation about the tasks.json format\n{\n\t""version"": ""2.0.0"",\n\t""tasks"": [\n\t\t{\n\t\t\t""type"": ""npm"",\n\t\t\t""script"": ""watch"",\n\t\t\t""problemMatcher"": ""$tsc-watch"",\n\t\t\t""isBackground"": true,\n\t\t\t""presentation"": {\n\t\t\t\t""reveal"": ""never""\n\t\t\t},\n\t\t\t""group"": {\n\t\t\t\t""kind"": ""build"",\n\t\t\t\t""isDefault"": true\n\t\t\t}\n\t\t}\n\t]\n}\n",jsonc,tab +37,227803,".vscode/tasks.json",178,5,"watch",jsonc,selection_command +38,233384,".vscode/tasks.json",213,5,"watch",jsonc,selection_command +39,236874,".vscode/tasks.json",178,5,"watch",jsonc,selection_command +40,244358,"package.json",0,0,"",json,tab +41,244358,"package.json",1298,5,"watch",json,selection_command +42,245059,"package.json",1312,5,"watch",json,selection_command +43,246882,"package.json",1307,0,"",json,selection_mouse +44,250058,"package.json",1307,0,"m",json,content +45,250058,"package.json",1308,0,"",json,selection_keyboard +46,250137,"package.json",1308,0,"o",json,content +47,250137,"package.json",1309,0,"",json,selection_keyboard +48,250279,"package.json",1309,0,"d",json,content +49,250279,"package.json",1310,0,"",json,selection_keyboard +50,250307,"package.json",1310,0,"u",json,content +51,250307,"package.json",1311,0,"",json,selection_keyboard +52,250576,"package.json",1311,0,"l",json,content +53,250576,"package.json",1312,0,"",json,selection_keyboard +54,250629,"package.json",1312,0,"e",json,content +55,250629,"package.json",1313,0,"",json,selection_keyboard +56,250697,"package.json",1313,0," ",json,content +57,250697,"package.json",1314,0,"",json,selection_keyboard +58,250805,"package.json",1314,0,"l",json,content +59,250805,"package.json",1315,0,"",json,selection_keyboard +60,250867,"package.json",1315,0,"o",json,content +61,250867,"package.json",1316,0,"",json,selection_keyboard +62,251022,"package.json",1316,0,"a",json,content +63,251022,"package.json",1317,0,"",json,selection_keyboard +64,251154,"package.json",1317,0,"d",json,content +65,251154,"package.json",1318,0,"",json,selection_keyboard +66,251306,"package.json",1318,0," ",json,content +67,251307,"package.json",1319,0,"",json,selection_keyboard +68,255926,"package.json",1314,5,"",json,content +69,256081,"package.json",1307,7,"",json,content +70,256561,"package.json",1307,0,"m",json,content +71,256561,"package.json",1308,0,"",json,selection_keyboard +72,256634,"package.json",1308,0,"o",json,content +73,256634,"package.json",1309,0,"",json,selection_keyboard +74,257093,"package.json",1309,0,"d",json,content +75,257094,"package.json",1310,0,"",json,selection_keyboard +76,257108,"package.json",1310,0,"u",json,content +77,257109,"package.json",1311,0,"",json,selection_keyboard +78,257354,"package.json",1311,0,"l",json,content +79,257354,"package.json",1312,0,"",json,selection_keyboard +80,257362,"package.json",1312,0,"e",json,content +81,257362,"package.json",1313,0,"",json,selection_keyboard +82,257528,"package.json",1313,0," ",json,content +83,257528,"package.json",1314,0,"",json,selection_keyboard +84,257622,"package.json",1314,0,"l",json,content +85,257622,"package.json",1315,0,"",json,selection_keyboard +86,257730,"package.json",1315,0,"o",json,content +87,257730,"package.json",1316,0,"",json,selection_keyboard +88,257931,"package.json",1316,0,"a",json,content +89,257931,"package.json",1317,0,"",json,selection_keyboard +90,261101,"package.json",1317,0,"d",json,content +91,261102,"package.json",1318,0,"",json,selection_keyboard +92,261163,"package.json",1318,0," ",json,content +93,261163,"package.json",1319,0,"",json,selection_keyboard +94,301553,"package.json",1319,0,"n",json,content +95,301553,"package.json",1320,0,"",json,selection_keyboard +96,301658,"package.json",1320,0,"o",json,content +97,301658,"package.json",1321,0,"",json,selection_keyboard +98,301821,"package.json",1321,0,"d",json,content +99,301821,"package.json",1322,0,"",json,selection_keyboard +100,301884,"package.json",1322,0,"e",json,content +101,301884,"package.json",1323,0,"",json,selection_keyboard +102,302055,"package.json",1323,0,"j",json,content +103,302055,"package.json",1324,0,"",json,selection_keyboard +104,302131,"package.json",1324,0,"s",json,content +105,302131,"package.json",1325,0,"",json,selection_keyboard +106,302305,"package.json",1325,0," ",json,content +107,302305,"package.json",1326,0,"",json,selection_keyboard +108,302626,"package.json",1326,0,"&",json,content +109,302627,"package.json",1327,0,"",json,selection_keyboard +110,302735,"package.json",1327,0,"&",json,content +111,302735,"package.json",1328,0,"",json,selection_keyboard +112,302955,"package.json",1328,0," ",json,content +113,302955,"package.json",1329,0,"",json,selection_keyboard +114,303382,"package.json",1328,0,"",json,selection_command +115,319060,"TERMINAL",0,0,"bash",,terminal_focus +116,320999,"TERMINAL",0,0,"npm run compile",,terminal_command +117,321044,"TERMINAL",0,0,"]633;C",,terminal_output +118,322245,"TERMINAL",0,0,"\r\n> crowd-pilot@0.0.1 compile\r\n> tsc -p ./\r\n\r\n",,terminal_output +119,323846,"TERMINAL",0,0,"npm notice \r\nnpm notice New major version of npm available! 10.5.2 -> 11.6.4\r\nnpm notice Changelog: https://github.com/npm/cli/releases/tag/v11.6.4\r\nnpm notice Run npm install -g npm@11.6.4 to update!\r\nnpm notice \r\n]0;franz.srambical@hai-login1:~/crowd-pilot-extension",,terminal_output +120,330435,"TERMINAL",0,0,"",,terminal_focus +121,332846,"TERMINAL",0,0,"bash",,terminal_focus +122,365698,"TERMINAL",0,0,"watch",,terminal_focus +123,377313,"package.json",1298,5,"watch",json,selection_command +124,378218,"package.json",1334,5,"watch",json,selection_command +125,383100,".vscode/tasks.json",0,0,"",jsonc,tab +126,383101,".vscode/tasks.json",178,5,"watch",jsonc,selection_command +127,384478,".vscode/tasks.json",213,5,"watch",jsonc,selection_command +128,385959,"package.json",0,0,"",json,tab +129,385960,"package.json",1334,5,"watch",json,selection_command +130,386504,"package.json",1298,5,"watch",json,selection_command +131,388051,"vsc-extension-quickstart.md",0,0,"# Welcome to your VS Code Extension\n\n## What's in the folder\n\n* This folder contains all of the files necessary for your extension.\n* `package.json` - this is the manifest file in which you declare your extension and command.\n * The sample plugin registers a command and defines its title and command name. With this information VS Code can show the command in the command palette. It doesn’t yet need to load the plugin.\n* `src/extension.ts` - this is the main file where you will provide the implementation of your command.\n * The file exports one function, `activate`, which is called the very first time your extension is activated (in this case by executing the command). Inside the `activate` function we call `registerCommand`.\n * We pass the function containing the implementation of the command as the second parameter to `registerCommand`.\n\n## Get up and running straight away\n\n* Press `F5` to open a new window with your extension loaded.\n* Run your command from the command palette by pressing (`Ctrl+Shift+P` or `Cmd+Shift+P` on Mac) and typing `Hello World`.\n* Set breakpoints in your code inside `src/extension.ts` to debug your extension.\n* Find output from your extension in the debug console.\n\n## Make changes\n\n* You can relaunch the extension from the debug toolbar after changing code in `src/extension.ts`.\n* You can also reload (`Ctrl+R` or `Cmd+R` on Mac) the VS Code window with your extension to load your changes.\n\n## Explore the API\n\n* You can open the full set of our API when you open the file `node_modules/@types/vscode/index.d.ts`.\n\n## Run tests\n\n* Install the [Extension Test Runner](https://marketplace.visualstudio.com/items?itemName=ms-vscode.extension-test-runner)\n* Run the ""watch"" task via the **Tasks: Run Task** command. Make sure this is running, or tests might not be discovered.\n* Open the Testing view from the activity bar and click the Run Test"" button, or use the hotkey `Ctrl/Cmd + ; A`\n* See the output of the test result in the Test Results view.\n* Make changes to `src/test/extension.test.ts` or create new test files inside the `test` folder.\n * The provided test runner will only consider files matching the name pattern `**.test.ts`.\n * You can create folders inside the `test` folder to structure your tests any way you want.\n\n## Go further\n\n* [Follow UX guidelines](https://code.visualstudio.com/api/ux-guidelines/overview) to create extensions that seamlessly integrate with VS Code's native interface and patterns.\n* Reduce the extension size and improve the startup time by [bundling your extension](https://code.visualstudio.com/api/working-with-extensions/bundling-extension).\n* [Publish your extension](https://code.visualstudio.com/api/working-with-extensions/publishing-extension) on the VS Code extension marketplace.\n* Automate builds by setting up [Continuous Integration](https://code.visualstudio.com/api/working-with-extensions/continuous-integration).\n* Integrate to the [report issue](https://code.visualstudio.com/api/get-started/wrapping-up#issue-reporting) flow to get issue and feature requests reported by users.\n",markdown,tab +132,388051,"vsc-extension-quickstart.md",1716,5,"watch",markdown,selection_command +133,712435,"package.json",0,0,"",json,tab +134,718954,"package.json",1334,5,"watch",json,selection_command +135,721646,".vscode/tasks.json",0,0,"",jsonc,tab +136,721647,".vscode/tasks.json",178,5,"watch",jsonc,selection_command +137,722370,".vscode/tasks.json",213,5,"watch",jsonc,selection_command +138,725585,".vscode/tasks.json",366,0,"",jsonc,selection_mouse +139,734568,".vscode/tasks.json",178,5,"watch",jsonc,selection_command +140,734929,"vsc-extension-quickstart.md",0,0,"",markdown,tab +141,734929,"vsc-extension-quickstart.md",1716,5,"watch",markdown,selection_command +142,735068,"package.json",0,0,"",json,tab +143,735068,"package.json",1334,5,"watch",json,selection_command +144,737110,"package.json",1334,50,"watch -p ./"",\n ""pretest"": ""npm run compile && n",json,selection_command +145,737668,"package.json",1383,0,"",json,selection_command +146,738068,"package.json",1328,0,"",json,selection_command +147,816047,"package.json",1326,0,"",json,selection_command +148,816229,"package.json",1319,0,"",json,selection_command +149,816986,"package.json",1293,54," ""watch"": ""module load nodejs && tsc -watch -p ./"",",json,selection_command +150,966308,".vscode/launch.json",0,0,"// A launch configuration that compiles the extension and then opens it inside a new window\n// Use IntelliSense to learn about possible attributes.\n// Hover to view descriptions of existing attributes.\n// For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387\n{\n\t""version"": ""0.2.0"",\n\t""configurations"": [\n\t\t{\n\t\t\t""name"": ""Run Extension"",\n\t\t\t""type"": ""extensionHost"",\n\t\t\t""request"": ""launch"",\n\t\t\t""args"": [\n\t\t\t\t""--extensionDevelopmentPath=${workspaceFolder}""\n\t\t\t],\n\t\t\t""timeout"": 20000, // Increase the timeout to 20 seconds\n\t\t\t""outFiles"": [\n\t\t\t\t""${workspaceFolder}/out/**/*.js""\n\t\t\t],\n\t\t\t""preLaunchTask"": ""${defaultBuildTask}""\n\t\t}\n\t]\n}\n",jsonc,tab +151,971955,".vscode/launch.json",363,0,"",jsonc,selection_mouse +152,975369,"package.json",0,0,"",json,tab +153,976336,"package.json",1319,0,"",json,selection_command +154,983415,"package.json",1292,0,"",json,selection_mouse +155,983416,"package.json",1291,0,"",json,selection_command +156,984991,"package.json",1484,0,"",json,selection_command +157,985673,"package.json",1517,0,"",json,selection_command +158,985804,"package.json",1549,0,"",json,selection_command +159,985971,"package.json",1576,0,"",json,selection_command +160,986093,"package.json",1627,0,"",json,selection_command +161,986227,"package.json",1695,0,"",json,selection_command +162,986410,"package.json",1484,0,"",json,selection_command +163,987357,"package.json",1461,0,"",json,selection_command +164,987433,"package.json",1453,0,"",json,selection_command +165,987571,"package.json",1430,0,"",json,selection_command +166,992295,".vscode/tasks.json",0,0,"",jsonc,tab +167,994412,".vscode/tasks.json",184,0,"",jsonc,selection_command +168,995282,".vscode/tasks.json",219,0,"",jsonc,selection_command +169,995377,".vscode/tasks.json",166,0,"",jsonc,selection_command +170,995631,".vscode/tasks.json",146,0,"",jsonc,selection_command +171,996009,".vscode/tasks.json",142,0,"",jsonc,selection_command +172,996073,".vscode/tasks.json",146,0,"",jsonc,selection_command +173,996218,".vscode/tasks.json",166,0,"",jsonc,selection_command +174,996500,".vscode/tasks.json",146,0,"",jsonc,selection_command +175,998772,"package.json",0,0,"",json,tab +176,999534,"package.json",1404,0,"",json,selection_command +177,999535,"package.json",1354,0,"",json,selection_command +178,999683,"package.json",1299,0,"",json,selection_command +179,1000002,"package.json",1303,0,"",json,selection_command +180,1000154,"package.json",1306,0,"",json,selection_command +181,1000821,"package.json",1307,0,"",json,selection_command +182,1000868,"package.json",1307,1,"m",json,selection_command +183,1001001,"package.json",1307,6,"module",json,selection_command +184,1001159,"package.json",1307,11,"module load",json,selection_command +185,1001346,"package.json",1307,18,"module load nodejs",json,selection_command +186,1001510,"package.json",1307,21,"module load nodejs &&",json,selection_command +187,1001938,"package.json",1307,22,"module load nodejs && ",json,selection_command +188,1002003,"package.json",1307,22,"",json,content +189,1005874,".vscode/tasks.json",0,0,"",jsonc,tab +190,1007495,".vscode/tasks.json",167,0,"",jsonc,selection_command +191,1007740,".vscode/tasks.json",146,0,"",jsonc,selection_command +192,1018765,"TERMINAL",0,0," * Executing task: module load nodejs && npm run watch ",,terminal_command +193,1020796,"TERMINAL",0,0,"\r\n> crowd-pilot@0.0.1 watch\r\n> tsc -watch -p ./\r\n\r\n",,terminal_output +194,1021080,"TERMINAL",0,0,"",,terminal_output +195,1021317,"TERMINAL",0,0,"[10:34:45 AM] Starting compilation in watch mode...\r\n\r\n",,terminal_output +196,1023023,"TERMINAL",0,0,"[10:34:47 AM] Found 0 errors. Watching for file changes.\r\n\r\n",,terminal_output +197,1066892,".vscode/tasks.json",167,0,"",jsonc,selection_command +198,1066984,".vscode/tasks.json",187,0,"",jsonc,selection_command +199,1067115,".vscode/tasks.json",240,0,"",jsonc,selection_command +200,1067286,".vscode/tasks.json",187,0,"",jsonc,selection_command +201,1067434,".vscode/tasks.json",167,0,"",jsonc,selection_command +202,1067866,".vscode/tasks.json",187,0,"",jsonc,selection_command +203,1111560,"extension-output-pdoom-org.crowd-code-#1-crowd-code",0,0,"",Log,tab +204,1111974,".vscode/tasks.json",0,0,"",jsonc,tab +205,1191286,"src/extension.ts",0,0,"import * as vscode from 'vscode';\nimport * as http from 'http';\nimport { Buffer } from 'buffer';\n\nconst HOSTNAME = 'hai005';\nconst PORT = 30000;\n\nexport function activate(context: vscode.ExtensionContext) {\n\n\tconsole.log('[crowd-pilot] Extension activated');\n\n\t// Configure terminal to allow tab keybinding to work\n\t(async () => {\n\t\tconst config = vscode.workspace.getConfiguration('terminal.integrated');\n\t\tconst commandsToSkipShell = config.get('commandsToSkipShell', []);\n\t\tlet updated = false;\n\t\tif (!commandsToSkipShell.includes('crowd-pilot.modelRun')) {\n\t\t\tcommandsToSkipShell.push('crowd-pilot.modelRun');\n\t\t\tupdated = true;\n\t\t}\n\t\tif (!commandsToSkipShell.includes('crowd-pilot.hideUi')) {\n\t\t\tcommandsToSkipShell.push('crowd-pilot.hideUi');\n\t\t\tupdated = true;\n\t\t}\n\t\tif (updated) {\n\t\t\tawait config.update('commandsToSkipShell', commandsToSkipShell, vscode.ConfigurationTarget.Global);\n\t\t}\n\t})().catch((err) => console.error('[crowd-pilot] Startup initialization error:', err));\n\n\tconst hideUi = vscode.commands.registerCommand('crowd-pilot.hideUi', () => {\n\t\thidePreviewUI(true);\n\t});\n\n\tconst modelRun = vscode.commands.registerCommand('crowd-pilot.modelRun', async () => {\n\t\tconst editor = vscode.window.activeTextEditor;\n\t\tif (!editor) {\n\t\t\treturn;\n\t\t}\n\t\ttry {\n\t\t\t// Confirm only when a suggestion is visible\n\t\t\tif (!previewVisible) { return; }\n\t\t\tconst action = currentPlan?.[0] ?? getHardcodedNextAction(editor);\n\t\t\tif (!action) {\n\t\t\t\thidePreviewUI();\n\t\t\t\treturn;\n\t\t\t}\n\t\t\thidePreviewUI(false);\n\t\t\tawait executePlan([action]);\n\t\t\tadvanceMockStep();\n\t\t\tautoShowNextAction();\n\t\t} catch (err) {\n\t\t\tconst errorMessage = err instanceof Error ? err.message : String(err);\n\t\t\tvscode.window.showErrorMessage(`Model run failed: ${errorMessage}`);\n\t\t}\n\t});\n\n\tconst sglangTest = vscode.commands.registerCommand('crowd-pilot.sglangTest', async () => {\n\t\ttry {\n\t\t\tawait callSGLangChat();\n\t\t} catch (err) {\n\t\t\tconst errorMessage = err instanceof Error ? err.message : String(err);\n\t\t\tvscode.window.showErrorMessage(`SGLang test failed: ${errorMessage}`);\n\t\t}\n\t});\n\n\t// Auto-preview listeners\n\tconst onSelChange = vscode.window.onDidChangeTextEditorSelection((e) => {\n\t\tif (e.textEditor === vscode.window.activeTextEditor) {\n\t\t\tsuppressAutoPreview = false;\n\t\t\tautoShowNextAction();\n\t\t}\n\t});\n\tconst onActiveChange = vscode.window.onDidChangeActiveTextEditor(() => {\n\t\tsuppressAutoPreview = false;\n\t\tautoShowNextAction();\n\t});\n\tconst onDocChange = vscode.workspace.onDidChangeTextDocument((e) => {\n\t\tif (vscode.window.activeTextEditor?.document === e.document) {\n\t\t\tsuppressAutoPreview = false;\n\t\t\tautoShowNextAction();\n\t\t}\n\t});\n\n\tcontext.subscriptions.push(hideUi, sglangTest, modelRun, onSelChange, onActiveChange, onDocChange);\n}\n\nexport function deactivate() {}\n\n// -------------------- Plan Types & Execution --------------------\ntype PlannedAction =\n| { kind: 'showTextDocument' }\n| { kind: 'setSelections', selections: Array<{ start: [number, number], end: [number, number] }> }\n| { kind: 'editInsert', position: [number, number], text: string }\n| { kind: 'editDelete', range: { start: [number, number], end: [number, number] } }\n| { kind: 'editReplace', range: { start: [number, number], end: [number, number] }, text: string }\n| { kind: 'terminalShow' }\n| { kind: 'terminalSendText', text: string };\n\nlet currentPlan: PlannedAction[] | undefined;\n\nasync function executePlan(plan: PlannedAction[]): Promise {\n\tconst editor = vscode.window.activeTextEditor;\n\tif (!editor) { return; }\n\tconst doc = editor.document;\n\tconst term = vscode.window.terminals[0] ?? vscode.window.createTerminal('Test');\n\tfor (const action of plan) {\n\t\tif (action.kind === 'showTextDocument') {\n\t\t\tawait vscode.window.showTextDocument(doc);\n\t\t\tcontinue;\n\t\t}\n\t\tif (action.kind === 'setSelections') {\n\t\t\teditor.selections = action.selections.map(s => new vscode.Selection(\n\t\t\t\tnew vscode.Position(s.start[0], s.start[1]),\n\t\t\t\tnew vscode.Position(s.end[0], s.end[1])\n\t\t\t));\n\t\t\tif (editor.selections.length > 0) {\n\t\t\t\teditor.revealRange(editor.selections[0], vscode.TextEditorRevealType.InCenterIfOutsideViewport);\n\t\t\t}\n\t\t\tcontinue;\n\t\t}\n\t\tif (action.kind === 'editInsert') {\n\t\t\tawait editor.edit((e: vscode.TextEditorEdit) => e.insert(new vscode.Position(action.position[0], action.position[1]), action.text));\n\t\t\tcontinue;\n\t\t}\n\t\tif (action.kind === 'editDelete') {\n\t\t\tconst range = new vscode.Range(\n\t\t\t\tnew vscode.Position(action.range.start[0], action.range.start[1]),\n\t\t\t\tnew vscode.Position(action.range.end[0], action.range.end[1])\n\t\t\t);\n\t\t\tawait editor.edit((e: vscode.TextEditorEdit) => e.delete(range));\n\t\t\tcontinue;\n\t\t}\n\t\tif (action.kind === 'editReplace') {\n\t\t\tconst range = new vscode.Range(\n\t\t\t\tnew vscode.Position(action.range.start[0], action.range.start[1]),\n\t\t\t\tnew vscode.Position(action.range.end[0], action.range.end[1])\n\t\t\t);\n\t\t\tawait editor.edit((e: vscode.TextEditorEdit) => e.replace(range, action.text));\n\t\t\tcontinue;\n\t\t}\n\t\tif (action.kind === 'terminalShow') {\n\t\t\tterm.show();\n\t\t\tcontinue;\n\t\t}\n\t\tif (action.kind === 'terminalSendText') {\n\t\t\tterm.sendText(action.text);\n\t\t\tcontinue;\n\t\t}\n\t}\n}\n\n// -------------------- UI State & Helpers --------------------\nconst UI_CONTEXT_KEY = 'crowdPilot.uiVisible';\nlet previewVisible = false;\nlet decorationDeleteType: vscode.TextEditorDecorationType | undefined;\nlet decorationReplaceType: vscode.TextEditorDecorationType | undefined;\nlet decorationReplaceBlockType: vscode.TextEditorDecorationType | undefined;\nlet mockStep = 0;\nlet suppressAutoPreview = false;\n\nfunction disposePreviewDecorations() {\n\ttry { decorationDeleteType?.dispose(); } catch {}\n\ttry { decorationReplaceType?.dispose(); } catch {}\n\ttry { decorationReplaceBlockType?.dispose(); } catch {}\n\tdecorationDeleteType = undefined;\n\tdecorationReplaceType = undefined;\n\tdecorationReplaceBlockType = undefined;\n}\n\nfunction getDynamicMargin(editor: vscode.TextEditor, anchorLine: number, text: string): string {\n\t// Count lines in the preview text\n\tconst lines = text.split(/\r?\n/);\n\tconst height = lines.length;\n\t\n\t// We need to check the document lines that will be covered by this panel.\n\t// The panel starts at 'anchorLine' and extends downwards by 'height' lines.\n\t// However, visually, since it's 'after', it sits to the right of 'anchorLine',\n\t// and then flows down.\n\t// So we check document lines from anchorLine to anchorLine + height - 1.\n\t\n\tconst doc = editor.document;\n\tlet maxLen = 0;\n\tconst startLine = anchorLine;\n\tconst endLine = Math.min(doc.lineCount - 1, anchorLine + height - 1);\n\t\n\tfor (let i = startLine; i <= endLine; i++) {\n\t\tconst lineText = doc.lineAt(i).text;\n\t\t// Simple approximation: assume tabs are 4 spaces if we can't get config easily, \n\t\t// or just treat them as 1 char (which might underestimate). \n\t\t// Better to overestimate: treat tab as 4 chars.\n\t\tconst len = lineText.replace(/\t/g, ' ').length;\n\t\tif (len > maxLen) {\n\t\t\tmaxLen = len;\n\t\t}\n\t}\n\t\n\t// Length of the anchor line itself\n\tconst anchorLineText = doc.lineAt(anchorLine).text;\n\tconst anchorLen = anchorLineText.replace(/\t/g, ' ').length;\n\t\n\t// The offset needed is maxLen - anchorLen.\n\t// If maxLen <= anchorLen, offset is 0 (margin is just base padding).\n\t// If maxLen > anchorLen, we need to push right by (maxLen - anchorLen).\n\t\n\tconst diff = Math.max(0, maxLen - anchorLen);\n\t// Base margin 2rem is roughly 4ch. Let's use ch units for everything to be consistent.\n\t// 1ch is width of '0'. In monospace, mostly consistent.\n\t// Add 3ch extra padding for safety/visual gap.\n\tconst margin = diff + 4; \n\treturn `${margin}ch`;\n}\n\nfunction showPreviewUI(plan: PlannedAction[]): void {\n\tconst editor = vscode.window.activeTextEditor;\n\tif (!editor) { return; }\n\tdisposePreviewDecorations();\n\n\t// Only preview the next text edit action (insert/delete/replace/terminalSendText/setSelections)\n\tconst next = plan.find(a => a.kind === 'editInsert' || a.kind === 'editDelete' || a.kind === 'editReplace' || a.kind === 'terminalSendText' || a.kind === 'setSelections');\n\tif (!next) {\n\t\tpreviewVisible = false;\n\t\tvscode.commands.executeCommand('setContext', UI_CONTEXT_KEY, false);\n\t\tcurrentPlan = plan;\n\t\treturn;\n\t}\n\n\tconst trimText = (t: string) => {\n\t\tconst oneLine = t.replace(/\r?\n/g, '\\n');\n\t\treturn oneLine.length > 80 ? oneLine.slice(0, 77) + '…' : oneLine;\n\t};\n\n\tif (next.kind === 'setSelections') {\n\t\t// For setSelections, we only preview the primary selection's start/active position\n\t\tconst selection = next.selections[0];\n\t\tconst targetPos = new vscode.Position(selection.start[0], selection.start[1]);\n\t\t// Check if the target position is visible\n\t\tconst isVisible = editor.visibleRanges.some(r => r.contains(targetPos));\n\t\t\n\t\tlet anchorPos = targetPos;\n\t\tlet label = ""↳ Move Cursor Here"";\n\n\t\tif (!isVisible && editor.visibleRanges.length > 0) {\n\t\t\tconst firstVisible = editor.visibleRanges[0].start;\n\t\t\tconst lastVisible = editor.visibleRanges[editor.visibleRanges.length - 1].end;\n\t\t\t\n\t\t\tif (targetPos.isBefore(firstVisible)) {\n\t\t\t\tanchorPos = editor.document.lineAt(firstVisible.line).range.end;\n\t\t\t} else {\n\t\t\t\tanchorPos = editor.document.lineAt(lastVisible.line).range.end;\n\t\t\t}\n\n\t\t\tif (targetPos.line < anchorPos.line) {\n\t\t\t\tlabel = `↑ Move Cursor to Line ${targetPos.line + 1}`;\n\t\t\t} else {\n\t\t\t\tlabel = `↓ Move Cursor to Line ${targetPos.line + 1}`;\n\t\t\t}\n\t\t}\n\n\t\tconst margin = getDynamicMargin(editor, anchorPos.line, label);\n\n\t\tdecorationReplaceBlockType = vscode.window.createTextEditorDecorationType({\n\t\t\tafter: {\n\t\t\t\tcontentText: '',\n\t\t\t\tcolor: new vscode.ThemeColor('charts.purple'),\n\t\t\t\tbackgroundColor: new vscode.ThemeColor('editor.background'),\n\t\t\t\tfontStyle: 'italic',\n\t\t\t\tfontWeight: '600',\n\t\t\t\tmargin: `0 0 0 ${margin}`,\n\t\t\t\ttextDecoration: `none; display: inline-block; white-space: pre; content: ""${label}""; border: 1px solid var(--vscode-charts-purple); padding: 4px; border-radius: 4px; box-shadow: 0 4px 8px rgba(0,0,0,0.25); pointer-events: none; position: relative; z-index: 100; vertical-align: top;`\n\t\t\t}\n\t\t});\n\t\teditor.setDecorations(decorationReplaceBlockType, [{ range: new vscode.Range(anchorPos, anchorPos) }]);\n\t} else if (next.kind === 'terminalSendText') {\n\t\tconst cursor = editor.selection.active;\n\t\tconst lineEnd = editor.document.lineAt(cursor.line).range.end;\n\t\tconst cmd = next.text.replace(/""/g, '\\""').replace(/\r?\n/g, '\\A ');\n\t\tconst margin = getDynamicMargin(editor, cursor.line, ""↳ Execute in Terminal:\n"" + next.text);\n\n\t\tdecorationReplaceBlockType = vscode.window.createTextEditorDecorationType({\n\t\t\tafter: {\n\t\t\t\tcontentText: '',\n\t\t\t\tcolor: new vscode.ThemeColor('charts.purple'),\n\t\t\t\tbackgroundColor: new vscode.ThemeColor('editor.background'),\n\t\t\t\tfontStyle: 'italic',\n\t\t\t\tfontWeight: '600',\n\t\t\t\tmargin: `0 0 0 ${margin}`,\n\t\t\t\ttextDecoration: `none; display: inline-block; white-space: pre; content: ""↳ Execute in Terminal:\\A ${cmd}""; border: 1px solid var(--vscode-charts-purple); padding: 4px; border-radius: 4px; box-shadow: 0 4px 8px rgba(0,0,0,0.25); pointer-events: none; position: relative; z-index: 100; vertical-align: top;`\n\t\t\t}\n\t\t});\n\t\teditor.setDecorations(decorationReplaceBlockType, [{ range: new vscode.Range(lineEnd, lineEnd) }]);\n\t} else if (next.kind === 'editInsert') {\n\t\tconst posLine = next.position[0];\n\t\tconst fullBlock = next.text;\n\t\tconst cssContent = fullBlock\n\t\t\t.replace(/""/g, '\\""')\n\t\t\t.replace(/\r?\n/g, '\\A ');\n\n\t\tconst docLineCount = editor.document.lineCount;\n\t\t// If inserting at EOF (or beyond), attach to the last line.\n\t\t// Otherwise, attach to the line AT the insertion point and shift visually UP into the gap.\n\t\tlet anchorLine = posLine;\n\t\tlet shiftUp = true;\n\t\t\n\t\tif (anchorLine >= docLineCount) {\n\t\t\tanchorLine = docLineCount - 1;\n\t\t\tshiftUp = false; // At EOF, we just append below or to the right\n\t\t}\n\n\t\tconst anchorPos = new vscode.Position(anchorLine, Number.MAX_VALUE); \n\t\t\n\t\t// We attach to the line AT the insertion point.\n\t\t// The panel floats to the right of this line.\n\t\t// The dashed line connects the start of this line to the panel.\n\t\t// This indicates that the new text will be inserted at this line position (pushing the current line down).\n\t\tconst marginCheckLine = anchorLine;\n\t\tconst margin = getDynamicMargin(editor, marginCheckLine, fullBlock);\n\n\t\tconst topOffset = '0';\n\n\t\t// Dashed line style\n\t\t// We use 'before' decoration for the line.\n\t\t// It needs to be absolute, full width (or enough to reach left), \n\t\t// and aligned with the panel top.\n\t\tconst beforeDecoration = {\n\t\t\tcontentText: '',\n\t\t\ttextDecoration: `none; position: absolute; left: 0; width: 100vw; border-top: 1px dashed var(--vscode-charts-purple); top: 0; height: 0; z-index: 99; pointer-events: none;`\n\t\t};\n\n\t\tdecorationReplaceBlockType = vscode.window.createTextEditorDecorationType({\n\t\t\tbefore: beforeDecoration,\n\t\t\tafter: {\n\t\t\t\tcontentText: '',\n\t\t\t\tcolor: new vscode.ThemeColor('charts.purple'),\n\t\t\t\tbackgroundColor: new vscode.ThemeColor('editor.background'),\n\t\t\t\tfontStyle: 'italic',\n\t\t\t\tfontWeight: '600',\n\t\t\t\tmargin: `0 0 0 ${margin}`,\n\t\t\t\ttextDecoration: `none; display: inline-block; white-space: pre; content: ""${cssContent}""; border: 1px solid var(--vscode-charts-purple); padding: 4px; border-radius: 4px; box-shadow: 0 4px 8px rgba(0,0,0,0.25); pointer-events: none; position: relative; z-index: 100; vertical-align: top; top: ${topOffset};`\n\t\t\t}\n\t\t});\n\t\teditor.setDecorations(decorationReplaceBlockType, [{ range: new vscode.Range(anchorPos, anchorPos) }]);\n\t} else if (next.kind === 'editDelete') {\n\t\tconst range = new vscode.Range(\n\t\t\tnew vscode.Position(next.range.start[0], next.range.start[1]),\n\t\t\tnew vscode.Position(next.range.end[0], next.range.end[1])\n\t\t);\n\t\tdecorationDeleteType = vscode.window.createTextEditorDecorationType({\n\t\t\tbackgroundColor: 'rgba(255, 60, 60, 0.18)',\n\t\t\tborder: '1px solid rgba(255, 60, 60, 0.35)',\n\t\t\ttextDecoration: 'line-through'\n\t\t});\n\t\teditor.setDecorations(decorationDeleteType, [{ range }]);\n\t} else if (next.kind === 'editReplace') {\n\t\tconst range = new vscode.Range(\n\t\t\tnew vscode.Position(next.range.start[0], next.range.start[1]),\n\t\t\tnew vscode.Position(next.range.end[0], next.range.end[1])\n\t\t);\n\t\t// Highlight original range (to be replaced)\n\t\tdecorationReplaceType = vscode.window.createTextEditorDecorationType({\n\t\t\tbackgroundColor: 'rgba(255,165,0,0.15)',\n\t\t\tborder: '1px dashed rgba(255,165,0,0.45)',\n\t\t\tcolor: new vscode.ThemeColor('disabledForeground'),\n\t\t\ttextDecoration: 'line-through'\n\t\t});\n\t\teditor.setDecorations(decorationReplaceType, [{ range }]);\n\n\t\t// Show replacement block to the right of the first replaced line\n\t\tconst fullBlock = next.text;\n\t\t\n\t\t// CSS-escape the text for the 'content' property:\n\t\t// - Escape double quotes\n\t\t// - Replace newlines with \A (CSS newline)\n\t\tconst cssContent = fullBlock\n\t\t\t.replace(/""/g, '\\""')\n\t\t\t.replace(/\r?\n/g, '\\A '); \n\n\t\t// Attach 'after' decoration to the start of the replacement range\n\t\t// (Actually, attaching to the end of the first line is safer for 'after')\n\t\tconst anchorLine = range.start.line;\n\t\tconst anchorPos = new vscode.Position(anchorLine, Number.MAX_VALUE);\n\t\tconst margin = getDynamicMargin(editor, anchorLine, fullBlock);\n\n\t\tdecorationReplaceBlockType = vscode.window.createTextEditorDecorationType({\n\t\t\tafter: {\n\t\t\t\tcontentText: '', // Handled by CSS content\n\t\t\t\tcolor: new vscode.ThemeColor('charts.purple'),\n\t\t\t\tbackgroundColor: new vscode.ThemeColor('editor.background'),\n\t\t\t\tfontStyle: 'italic',\n\t\t\t\tfontWeight: '600',\n\t\t\t\tmargin: `0 0 0 ${margin}`,\n\t\t\t\ttextDecoration: `none; display: inline-block; white-space: pre; content: ""${cssContent}""; border: 1px solid var(--vscode-charts-purple); padding: 4px; border-radius: 4px; box-shadow: 0 4px 8px rgba(0,0,0,0.25); pointer-events: none; position: relative; z-index: 100; vertical-align: top;`\n\t\t\t}\n\t\t});\n\t\teditor.setDecorations(decorationReplaceBlockType, [{ range: new vscode.Range(anchorPos, anchorPos) }]);\n\t}\n\n\tpreviewVisible = true;\n\tvscode.commands.executeCommand('setContext', UI_CONTEXT_KEY, true);\n\tcurrentPlan = plan;\n}\n\nfunction hidePreviewUI(suppress?: boolean): void {\n\tdisposePreviewDecorations();\n\tpreviewVisible = false;\n\tvscode.commands.executeCommand('setContext', UI_CONTEXT_KEY, false);\n\tif (suppress) {\n\t\tsuppressAutoPreview = true;\n\t}\n}\n\n// -------------------- Hardcoded single-step actions --------------------\nfunction getHardcodedNextAction(editor: vscode.TextEditor): PlannedAction | undefined {\n\tconst cursor = editor.selection.active;\n\tconst doc = editor.document;\n\tconst lineCount = doc.lineCount;\n\tconst clamp = (n: number, min: number, max: number) => Math.max(min, Math.min(max, n));\n\n\t// Step 0: Insert multiline content two lines below the cursor (start of target line)\n\tif (mockStep === 0) {\n\t\tconst targetLine = clamp(cursor.line + 2, 0, Math.max(0, lineCount - 1));\n\t\treturn {\n\t\t\tkind: 'editInsert',\n\t\t\tposition: [targetLine, 0],\n\t\t\ttext: '/* crowd-pilot: insert start */\nline A\nline B\n/* crowd-pilot: insert end */\n'\n\t\t};\n\t}\n\t// Step 1: Replace a two-line range three and four lines below the cursor\n\tif (mockStep === 1) {\n\t\tconst startLine = clamp(cursor.line + 3, 0, Math.max(0, lineCount - 1));\n\t\tconst endLine = clamp(startLine + 1, 0, Math.max(0, lineCount - 1));\n\t\tconst endChar = doc.lineAt(endLine).range.end.character;\n\t\tconst range = {\n\t\t\tstart: [startLine, 0] as [number, number],\n\t\t\tend: [endLine, endChar] as [number, number]\n\t\t};\n\t\tconst replacement = [\n\t\t\t'/* crowd-pilot: replacement */',\n\t\t\t'REPLACED LINE 1',\n\t\t\t'REPLACED LINE 2'\n\t\t].join('\n');\n\t\treturn { kind: 'editReplace', range, text: replacement };\n\t}\n\t// Step 2: Delete a three-line range six to eight lines below the cursor\n\tif (mockStep === 2) {\n\t\tconst startLine = clamp(cursor.line + 6, 0, Math.max(0, lineCount - 1));\n\t\tconst endLine = clamp(startLine + 2, 0, Math.max(0, lineCount - 1));\n\t\t\n\t\t// To fully delete the lines including the newline, we target the start of the next line.\n\t\tlet endPosLine = endLine + 1;\n\t\tlet endPosChar = 0;\n\t\t\n\t\tif (endPosLine >= lineCount) {\n\t\t\t// If deleting the last line(s), just go to the end of the document\n\t\t\tendPosLine = lineCount - 1;\n\t\t\tendPosChar = doc.lineAt(endPosLine).range.end.character;\n\t\t}\n\n\t\tconst range = {\n\t\t\tstart: [startLine, 0] as [number, number],\n\t\t\tend: [endPosLine, endPosChar] as [number, number]\n\t\t};\n\t\treturn { kind: 'editDelete', range };\n\t}\n\t// Step 3: Execute in Terminal\n\tif (mockStep === 3) {\n\t\treturn { kind: 'terminalSendText', text: 'echo ""Hello World""' };\n\t}\n\t// Step 4: Move Cursor to End of File\n\tif (mockStep === 4) {\n\t\tconst lastLine = doc.lineCount - 1;\n\t\tconst lastChar = doc.lineAt(lastLine).range.end.character;\n\t\treturn {\n\t\t\tkind: 'setSelections',\n\t\t\tselections: [{ start: [lastLine, lastChar], end: [lastLine, lastChar] }]\n\t\t};\n\t}\n\treturn undefined;\n}\n\nfunction advanceMockStep(): void {\n\tmockStep = (mockStep + 1) % 5;\n}\n\nasync function autoShowNextAction(): Promise {\n\tif (suppressAutoPreview) { return; }\n\tconst editor = vscode.window.activeTextEditor;\n\tif (!editor) { return; }\n\tconst next = getHardcodedNextAction(editor);\n\tif (next) {\n\t\tshowPreviewUI([next]);\n\t} else {\n\t\thidePreviewUI();\n\t}\n}\n\n// -------------------- SGLang Client (simple test) --------------------\nasync function callSGLangChat(): Promise {\n\tconst requestBody = {\n\t\tmodel: 'qwen/qwen2.5-0.5b-instruct',\n\t\tmessages: [\n\t\t\t{ role: 'user', content: 'What is the capital of France?' }\n\t\t]\n\t};\n\tconst postData = JSON.stringify(requestBody);\n\n\tconst options = {\n\t\thostname: HOSTNAME,\n\t\tport: PORT,\n\t\tpath: '/v1/chat/completions',\n\t\tmethod: 'POST',\n\t\theaders: {\n\t\t\t'Content-Type': 'application/json',\n\t\t\t'Content-Length': Buffer.byteLength(postData)\n\t\t}\n\t};\n\n\ttry {\n\t\tconst json = await new Promise((resolve, reject) => {\n\t\t\tconst req = http.request(options, (res: http.IncomingMessage) => {\n\t\t\t\tlet data = '';\n\t\t\t\tres.on('data', (chunk: Buffer) => {\n\t\t\t\t\tdata += chunk.toString();\n\t\t\t\t});\n\t\t\t\tres.on('end', () => {\n\t\t\t\t\ttry {\n\t\t\t\t\t\tresolve(JSON.parse(data));\n\t\t\t\t\t} catch (err) {\n\t\t\t\t\t\treject(new Error(`Failed to parse response: ${err instanceof Error ? err.message : String(err)}`));\n\t\t\t\t\t}\n\t\t\t\t});\n\t\t\t});\n\n\t\t\treq.on('error', (err: Error) => {\n\t\t\t\treject(err);\n\t\t\t});\n\n\t\t\treq.write(postData);\n\t\t\treq.end();\n\t\t});\n\n\t\tvscode.window.showInformationMessage(`SGLang response: ${JSON.stringify(json, null, 2)}`);\n\t} catch (err) {\n\t\tconst errorMessage = err instanceof Error ? err.message : String(err);\n\t\tvscode.window.showErrorMessage(`SGLang request failed: ${errorMessage}`);\n\t}\n}\n\n// -------------------- Model-planned Actions --------------------\nasync function requestModelActions(editor: vscode.TextEditor): Promise {\n\tconst schemaDescription = [\n\t\t'Role: You suggest the next VS Code editor/terminal action to progress the current task.',\n\t\t'Output ONLY a JSON array (no prose, no code fences). Length exactly 1.',\n\t\t'Coordinates are zero-based [line, column].',\n\t\t'Allowed actions (JSON schema-like):',\n\t\t'{ kind: ""showTextDocument"" }',\n\t\t'{ kind: ""setSelections"", selections: Array<{ start: [number, number], end: [number, number] }> }',\n\t\t'{ kind: ""editInsert"", position: [number, number], text: string }',\n\t\t'{ kind: ""editDelete"", range: { start: [number, number], end: [number, number] } }',\n\t\t'{ kind: ""editReplace"", range: { start: [number, number], end: [number, number] }, text: string }',\n\t\t'{ kind: ""terminalShow"" }',\n\t\t'{ kind: ""terminalSendText"", text: string }',\n\t\t'Guidelines:',\n\t\t'- If you you insert text, insert until the logical end of the current statement or block.',\n\t\t'- When inserting text, make sure to not repeat existing text (except when replacing existing text).',\n\t\t'- Use double-quoted JSON strings.'\n\t].join('\n');\n\n\tconst doc = editor.document;\n\tconst cursor = editor.selection.active;\n\tconst contextRange = new vscode.Range(new vscode.Position(0, 0), cursor);\n\tconst contextCode = doc.getText(contextRange);\n\tconst maxContextChars = 20000;\n\tconst allLines = contextCode.split(/\r?\n/);\n\tlet startLineIndex = 0;\n\tlet visibleLines = allLines;\n\tif (contextCode.length > maxContextChars) {\n\t\tlet acc = 0;\n\t\tlet idx = allLines.length;\n\t\twhile (idx > 0 && acc <= maxContextChars) {\n\t\t\tidx--;\n\t\t\tacc += allLines[idx].length + 1;\n\t\t}\n\t\tstartLineIndex = idx;\n\t\tvisibleLines = allLines.slice(idx);\n\t}\n\tconst numberedContext = visibleLines.map((line, i) => `${startLineIndex + i}: ${line}`).join('\n');\n\n\tconst tabbingPrompt = [\n\t\t'Your role: Propose the single next action according to the schema to help the developer progress.',\n\t\t'',\n\t\t'Available context:',\n\t\t`- File: ${doc.fileName}`,\n\t\t`- Language: ${doc.languageId}`,\n\t\t`- Cursor: (${cursor.line}, ${cursor.character})`,\n\t\t'',\n\t\t'Current file content up to the cursor (zero-based line numbers):',\n\t\t'```',\n\t\tnumberedContext,\n\t\t'```',\n\t\t'',\n\t\t'Respond with ONLY a JSON array containing exactly one action.'\n\t].join('\n');\n\n\tconst requestBody = {\n\t\tmodel: 'qwen/qwen2.5-0.5b-instruct',\n\t\tmessages: [\n\t\t\t{ role: 'system', content: schemaDescription },\n\t\t\t{ role: 'user', content: tabbingPrompt }\n\t\t]\n\t};\n\n\tconst postData = JSON.stringify(requestBody);\n\tconst options = {\n\t\thostname: HOSTNAME,\n\t\tport: PORT,\n\t\tpath: '/v1/chat/completions',\n\t\tmethod: 'POST',\n\t\theaders: {\n\t\t\t'Content-Type': 'application/json',\n\t\t\t'Content-Length': Buffer.byteLength(postData)\n\t\t}\n\t};\n\n\tconst json = await new Promise((resolve, reject) => {\n\t\tconst req = http.request(options, (res: http.IncomingMessage) => {\n\t\t\tlet data = '';\n\t\t\tres.on('data', (chunk: Buffer) => { data += chunk.toString(); });\n\t\t\tres.on('end', () => {\n\t\t\t\ttry {\n\t\t\t\t\tresolve(JSON.parse(data));\n\t\t\t\t} catch (err) {\n\t\t\t\t\treject(new Error(`Failed to parse response: ${err instanceof Error ? err.message : String(err)}`));\n\t\t\t\t}\n\t\t\t});\n\t\t});\n\t\treq.on('error', (err: Error) => reject(err));\n\t\treq.write(postData);\n\t\treq.end();\n\t});\n\n\tconst content = extractChatContent(json);\n\tif (typeof content !== 'string' || content.trim().length === 0) {\n\t\tthrow new Error('Empty model content');\n\t}\n\tconst actions = parsePlannedActions(content);\n\tif (actions.length === 0) {\n\t\tthrow new Error('No valid actions parsed from model output');\n\t}\n\treturn actions;\n}\n\nfunction extractChatContent(json: any): string | undefined {\n\ttry {\n\t\tif (json && Array.isArray(json.choices) && json.choices[0]) {\n\t\t\tconst choice = json.choices[0];\n\t\t\tif (choice.message && typeof choice.message.content === 'string') {\n\t\t\t\treturn choice.message.content;\n\t\t\t}\n\t\t\tif (typeof choice.text === 'string') {\n\t\t\t\treturn choice.text;\n\t\t\t}\n\t\t}\n\t\treturn undefined;\n\t} catch {\n\t\treturn undefined;\n\t}\n}\n\nfunction parsePlannedActions(raw: string): PlannedAction[] {\n\tlet text = raw.trim();\n\ttext = text.replace(/^```(?:json)?\s*/i, '').replace(/```\s*$/i, '').trim();\n\tconst arrayMatch = text.match(/\[[\s\S]*\]/);\n\tconst jsonText = arrayMatch ? arrayMatch[0] : text;\n\tlet parsed: unknown;\n\ttry {\n\t\tparsed = JSON.parse(jsonText);\n\t} catch (err) {\n\t\treturn [];\n\t}\n\tif (!Array.isArray(parsed)) { return []; }\n\tconst result: PlannedAction[] = [];\n\tfor (const item of parsed) {\n\t\tif (!item || typeof item !== 'object' || typeof (item as any).kind !== 'string') { continue; }\n\t\tswitch ((item as any).kind) {\n\t\t\tcase 'showTextDocument':\n\t\t\t\tresult.push({ kind: 'showTextDocument' });\n\t\t\t\tbreak;\n\t\t\tcase 'setSelections': {\n\t\t\t\tconst selections = Array.isArray((item as any).selections) ? (item as any).selections : [];\n\t\t\t\tconst norm = selections.map((s: any) => ({\n\t\t\t\t\tstart: Array.isArray(s?.start) && s.start.length === 2 ? [Number(s.start[0]) || 0, Number(s.start[1]) || 0] as [number, number] : [0, 0] as [number, number],\n\t\t\t\t\tend: Array.isArray(s?.end) && s.end.length === 2 ? [Number(s.end[0]) || 0, Number(s.end[1]) || 0] as [number, number] : [0, 0] as [number, number]\n\t\t\t\t}));\n\t\t\t\tresult.push({ kind: 'setSelections', selections: norm });\n\t\t\t\tbreak;\n\t\t\t}\n\t\t\tcase 'editInsert': {\n\t\t\t\tconst pos = Array.isArray((item as any).position) && (item as any).position.length === 2 ? [Number((item as any).position[0]) || 0, Number((item as any).position[1]) || 0] as [number, number] : [0, 0] as [number, number];\n\t\t\t\tconst text = typeof (item as any).text === 'string' ? (item as any).text : '';\n\t\t\t\tresult.push({ kind: 'editInsert', position: pos, text });\n\t\t\t\tbreak;\n\t\t\t}\n\t\t\tcase 'editDelete': {\n\t\t\t\tconst start = Array.isArray((item as any).range?.start) && (item as any).range.start.length === 2 ? [Number((item as any).range.start[0]) || 0, Number((item as any).range.start[1]) || 0] as [number, number] : [0, 0] as [number, number];\n\t\t\t\tconst end = Array.isArray((item as any).range?.end) && (item as any).range.end.length === 2 ? [Number((item as any).range.end[0]) || 0, Number((item as any).range.end[1]) || 0] as [number, number] : [0, 0] as [number, number];\n\t\t\t\tresult.push({ kind: 'editDelete', range: { start, end } });\n\t\t\t\tbreak;\n\t\t\t}\n\t\t\tcase 'editReplace': {\n\t\t\t\tconst start = Array.isArray((item as any).range?.start) && (item as any).range.start.length === 2 ? [Number((item as any).range.start[0]) || 0, Number((item as any).range.start[1]) || 0] as [number, number] : [0, 0] as [number, number];\n\t\t\t\tconst end = Array.isArray((item as any).range?.end) && (item as any).range.end.length === 2 ? [Number((item as any).range.end[0]) || 0, Number((item as any).range.end[1]) || 0] as [number, number] : [0, 0] as [number, number];\n\t\t\t\tconst text = typeof (item as any).text === 'string' ? (item as any).text : '';\n\t\t\t\tresult.push({ kind: 'editReplace', range: { start, end }, text });\n\t\t\t\tbreak;\n\t\t\t}\n\t\t\tcase 'terminalShow':\n\t\t\t\tresult.push({ kind: 'terminalShow' });\n\t\t\t\tbreak;\n\t\t\tcase 'terminalSendText': {\n\t\t\t\tconst text = typeof (item as any).text === 'string' ? (item as any).text : '';\n\t\t\t\tresult.push({ kind: 'terminalSendText', text });\n\t\t\t\tbreak;\n\t\t\t}\n\t\t\tdefault:\n\t\t\t\tbreak;\n\t\t}\n\t}\n\treturn result;\n}\n",typescript,tab +206,1197774,"src/extension.ts",644,0,"",typescript,selection_keyboard +207,1411955,"src/extension.ts",18902,93,"\ttry {\n\t\tconst plan = await requestModelActions(editor);\n\t\tconst next = plan?.[0];\n\t\tif (next) {\n\t\t\tshowPreviewUI(plan);\n\t\t} else {\n\t\t\thidePreviewUI();\n\t\t}\n\t} catch (err) {",typescript,content +208,1411956,"src/extension.ts",1547,22,"",typescript,content +209,1411956,"src/extension.ts",1364,69,"\t\t\tlet action: PlannedAction | undefined = currentPlan?.[0];\n\t\t\tif (!action) {\n\t\t\t\tconst plan = await requestModelActions(editor);\n\t\t\t\tcurrentPlan = plan;\n\t\t\t\taction = plan[0];\n\t\t\t}",typescript,content +210,1413007,"TERMINAL",0,0,"[10:41:17 AM] File change detected. Starting incremental compilation...\r\n\r\n[10:41:17 AM] Found 0 errors. Watching for file changes.\r\n\r\n",,terminal_output +211,1549500,"src/extension.ts",648,0,"",typescript,selection_command +212,1549893,"src/extension.ts",644,0,"",typescript,selection_command +213,1549959,"src/extension.ts",648,0,"",typescript,selection_command +214,1550209,"src/extension.ts",709,0,"",typescript,selection_command +215,1550240,"src/extension.ts",760,0,"",typescript,selection_command +216,1550269,"src/extension.ts",779,0,"",typescript,selection_command +217,1550301,"src/extension.ts",783,0,"",typescript,selection_command +218,1550340,"src/extension.ts",800,0,"",typescript,selection_command +219,1550372,"src/extension.ts",903,0,"",typescript,selection_command +220,1550410,"src/extension.ts",907,0,"",typescript,selection_command +221,1550440,"src/extension.ts",995,0,"",typescript,selection_command +222,1550475,"src/extension.ts",997,0,"",typescript,selection_command +223,1550504,"src/extension.ts",1075,0,"",typescript,selection_command +224,1550532,"src/extension.ts",1098,0,"",typescript,selection_command +225,1550570,"src/extension.ts",1102,0,"",typescript,selection_command +226,1550601,"src/extension.ts",1104,0,"",typescript,selection_command +227,1550638,"src/extension.ts",1192,0,"",typescript,selection_command +228,1550670,"src/extension.ts",1241,0,"",typescript,selection_command +229,1550710,"src/extension.ts",1258,0,"",typescript,selection_command +230,1550739,"src/extension.ts",1269,0,"",typescript,selection_command +231,1550770,"src/extension.ts",1273,0,"",typescript,selection_command +232,1550807,"src/extension.ts",1281,0,"",typescript,selection_command +233,1615309,"src/extension.ts",18992,172,"\tconst next = getHardcodedNextAction(editor);\n\tif (next) {\n\t\tshowPreviewUI([next]);\n\t} else {",typescript,content +234,1615309,"src/extension.ts",1659,0,"\t\t\tadvanceMockStep();\n",typescript,content +235,1615309,"src/extension.ts",1364,181,"\t\t\tconst action = currentPlan?.[0] ?? getHardcodedNextAction(editor);",typescript,content +236,1616350,"src/extension.ts",27629,0,"function parseSinglePlannedAction(raw: string): PlannedAction | undefined {\n\tlet text = raw.trim();\n\ttext = text.replace(/^```(?:json)?\s*/i, '').replace(/```\s*$/i, '').trim();\n\t// Try parse as object first\n\tlet parsed: unknown;\n\ttry {\n\t\tparsed = JSON.parse(text);\n\t} catch {\n\t\t// If not directly parseable, try to extract first JSON object or array\n\t\tconst objMatch = text.match(/\{[\s\S]*\}/);\n\t\tconst arrMatch = text.match(/\[[\s\S]*\]/);\n\t\tconst candidate = objMatch?.[0] ?? arrMatch?.[0];\n\t\tif (!candidate) { return undefined; }\n\t\ttry {\n\t\t\tparsed = JSON.parse(candidate);\n\t\t} catch {\n\t\t\treturn undefined;\n\t\t}\n\t}\n\t// If array, take first element\n\tif (Array.isArray(parsed)) {\n\t\tparsed = parsed[0];\n\t}\n\tif (!parsed || typeof parsed !== 'object' || typeof (parsed as any).kind !== 'string') {\n\t\treturn undefined;\n\t}\n\tconst item = parsed as any;\n\tswitch (item.kind) {\n\t\tcase 'showTextDocument':\n\t\t\treturn { kind: 'showTextDocument' };\n\t\tcase 'setSelections': {\n\t\t\tconst selections = Array.isArray(item.selections) ? item.selections : [];\n\t\t\tconst norm = selections.map((s: any) => ({\n\t\t\t\tstart: Array.isArray(s?.start) && s.start.length === 2 ? [Number(s.start[0]) || 0, Number(s.start[1]) || 0] as [number, number] : [0, 0] as [number, number],\n\t\t\t\tend: Array.isArray(s?.end) && s.end.length === 2 ? [Number(s.end[0]) || 0, Number(s.end[1]) || 0] as [number, number] : [0, 0] as [number, number]\n\t\t\t}));\n\t\t\treturn { kind: 'setSelections', selections: norm };\n\t\t}\n\t\tcase 'editInsert': {\n\t\t\tconst pos = Array.isArray(item.position) && item.position.length === 2 ? [Number(item.position[0]) || 0, Number(item.position[1]) || 0] as [number, number] : [0, 0] as [number, number];\n\t\t\tconst textVal = typeof item.text === 'string' ? item.text : '';\n\t\t\treturn { kind: 'editInsert', position: pos, text: textVal };\n\t\t}\n\t\tcase 'editDelete': {\n\t\t\tconst start = Array.isArray(item.range?.start) && item.range.start.length === 2 ? [Number(item.range.start[0]) || 0, Number(item.range.start[1]) || 0] as [number, number] : [0, 0] as [number, number];\n\t\t\tconst end = Array.isArray(item.range?.end) && item.range.end.length === 2 ? [Number(item.range.end[0]) || 0, Number(item.range.end[1]) || 0] as [number, number] : [0, 0] as [number, number];\n\t\t\treturn { kind: 'editDelete', range: { start, end } };\n\t\t}\n\t\tcase 'editReplace': {\n\t\t\tconst start = Array.isArray(item.range?.start) && item.range.start.length === 2 ? [Number(item.range.start[0]) || 0, Number(item.range.start[1]) || 0] as [number, number] : [0, 0] as [number, number];\n\t\t\tconst end = Array.isArray(item.range?.end) && item.range.end.length === 2 ? [Number(item.range.end[0]) || 0, Number(item.range.end[1]) || 0] as [number, number] : [0, 0] as [number, number];\n\t\t\tconst textVal = typeof item.text === 'string' ? item.text : '';\n\t\t\treturn { kind: 'editReplace', range: { start, end }, text: textVal };\n\t\t}\n\t\tcase 'terminalShow':\n\t\t\treturn { kind: 'terminalShow' };\n\t\tcase 'terminalSendText': {\n\t\t\tconst textVal = typeof item.text === 'string' ? item.text : '';\n\t\t\treturn { kind: 'terminalSendText', text: textVal };\n\t\t}\n\t\tdefault:\n\t\t\treturn undefined;\n\t}\n}\n\n",typescript,content +237,1616350,"src/extension.ts",23981,16,"\treturn action;",typescript,content +238,1616350,"src/extension.ts",23838,139,"\tconst action = parseSinglePlannedAction(content);\n\tif (!action) {\n\t\tthrow new Error('No valid action parsed from model output');",typescript,content +239,1616350,"src/extension.ts",22642,65,"\t\t'Respond with ONLY a JSON object containing exactly one action.'",typescript,content +240,1616350,"src/extension.ts",20660,75,"\t\t'Output ONLY a JSON object (no prose, no code fences).',",typescript,content +241,1616350,"src/extension.ts",20448,89,"async function requestModelActions(editor: vscode.TextEditor): Promise {",typescript,content +242,1616350,"src/extension.ts",18902,115,"\ttry {\n\t\tconst next = await requestModelActions(editor);\n\t\tif (next) {\n\t\t\tshowPreviewUI([next]);\n\t\t} else {\n\t\t\thidePreviewUI();\n\t\t}\n\t} catch (err) {\n\t\thidePreviewUI();\n\t}",typescript,content +243,1616351,"src/extension.ts",1547,22,"",typescript,content +244,1616351,"src/extension.ts",1364,69,"\t\t\tlet action: PlannedAction | undefined = currentPlan?.[0];\n\t\t\tif (!action) {\n\t\t\t\tconst single = await requestModelActions(editor);\n\t\t\t\tcurrentPlan = [single];\n\t\t\t\taction = single;\n\t\t\t}",typescript,content +245,1616351,"src/extension.ts",30862,1,"",typescript,content +246,1616351,"src/extension.ts",27750,0,"\n",typescript,content +247,1618313,"TERMINAL",0,0,"[10:44:42 AM] File change detected. Starting incremental compilation...\r\n\r\n[10:44:42 AM] Found 0 errors. Watching for file changes.\r\n\r\n",,terminal_output +248,1758659,"src/extension.ts",20530,0,"",typescript,selection_mouse +249,1762084,"src/extension.ts",20531,0,"",typescript,selection_command +250,1762516,"src/extension.ts",20808,0,"",typescript,selection_command +251,1772863,"src/extension.ts",22773,0,"",typescript,selection_command +252,1775490,"src/extension.ts",23970,0,"",typescript,selection_command +253,1844410,"src/extension.ts",23967,0,"",typescript,selection_command +254,1844784,"src/extension.ts",23970,0,"",typescript,selection_command +255,1856052,"src/extension.ts",24103,0,"",typescript,selection_command +256,1856084,"src/extension.ts",27751,0,"",typescript,selection_command +257,1859126,"src/extension.ts",30070,0,"",typescript,selection_keyboard +258,1859235,"src/extension.ts",30863,0,"",typescript,selection_keyboard +259,1859796,"src/extension.ts",28329,0,"",typescript,selection_keyboard +260,1859954,"src/extension.ts",25788,0,"",typescript,selection_keyboard +261,1860077,"src/extension.ts",24100,0,"",typescript,selection_keyboard +262,1860217,"src/extension.ts",22758,0,"",typescript,selection_keyboard +263,1860476,"src/extension.ts",20686,0,"",typescript,selection_keyboard +264,1860737,"src/extension.ts",19369,0,"",typescript,selection_keyboard +265,1860761,"src/extension.ts",18163,0,"",typescript,selection_keyboard +266,1860787,"src/extension.ts",16227,0,"",typescript,selection_keyboard +267,1860817,"src/extension.ts",14163,0,"",typescript,selection_keyboard +268,1860852,"src/extension.ts",11821,0,"",typescript,selection_keyboard +269,1860880,"src/extension.ts",9454,0,"",typescript,selection_keyboard +270,1860919,"src/extension.ts",7643,0,"",typescript,selection_keyboard +271,1860955,"src/extension.ts",5651,0,"",typescript,selection_keyboard +272,1860985,"src/extension.ts",3958,0,"",typescript,selection_keyboard +273,1861021,"src/extension.ts",2160,0,"",typescript,selection_keyboard +274,1861057,"src/extension.ts",624,0,"",typescript,selection_keyboard +275,1861081,"src/extension.ts",0,0,"",typescript,selection_keyboard +276,1865909,"src/extension.ts",24541,0,"",typescript,selection_command +277,1869217,"src/extension.ts",24531,0,"",typescript,selection_command +278,1869977,"src/extension.ts",24541,0,"",typescript,selection_command +279,1870440,"src/extension.ts",30863,0,"",typescript,selection_command +280,1870826,"src/extension.ts",30861,0,"",typescript,selection_command +281,1871323,"src/extension.ts",28294,0,"",typescript,selection_keyboard +282,1871417,"src/extension.ts",25783,0,"",typescript,selection_keyboard +283,1872156,"src/extension.ts",27445,0,"",typescript,selection_keyboard +284,1872486,"src/extension.ts",27469,0,"",typescript,selection_command +285,1872723,"src/extension.ts",27512,0,"",typescript,selection_command +286,1872749,"src/extension.ts",27523,0,"",typescript,selection_command +287,1872788,"src/extension.ts",27553,0,"",typescript,selection_command +288,1872818,"src/extension.ts",27636,0,"",typescript,selection_command +289,1872850,"src/extension.ts",27689,0,"",typescript,selection_command +290,1872883,"src/extension.ts",27700,0,"",typescript,selection_command +291,1872915,"src/extension.ts",27705,0,"",typescript,selection_command +292,1872954,"src/extension.ts",27717,0,"",typescript,selection_command +293,1872985,"src/extension.ts",27727,0,"",typescript,selection_command +294,1873012,"src/extension.ts",27730,0,"",typescript,selection_command +295,1873150,"src/extension.ts",27735,0,"",typescript,selection_command +296,1873318,"src/extension.ts",27748,0,"",typescript,selection_command +297,1873520,"src/extension.ts",27750,0,"",typescript,selection_command +298,1873756,"src/extension.ts",27754,0,"",typescript,selection_command +299,1874114,"src/extension.ts",27755,0,"",typescript,selection_command +300,1874378,"src/extension.ts",27756,0,"",typescript,selection_command +301,1874402,"src/extension.ts",27757,0,"",typescript,selection_command +302,1874439,"src/extension.ts",27758,0,"",typescript,selection_command +303,1874479,"src/extension.ts",27759,0,"",typescript,selection_command +304,1874506,"src/extension.ts",27760,0,"",typescript,selection_command +305,1874543,"src/extension.ts",27761,0,"",typescript,selection_command +306,1874570,"src/extension.ts",27762,0,"",typescript,selection_command +307,1874789,"src/extension.ts",27763,0,"",typescript,selection_command +308,1874922,"src/extension.ts",27764,0,"",typescript,selection_command +309,1875117,"src/extension.ts",27765,0,"",typescript,selection_command +310,1875614,"src/extension.ts",27765,1,"S",typescript,selection_command +311,1875755,"src/extension.ts",27765,2,"Si",typescript,selection_command +312,1875890,"src/extension.ts",27765,3,"Sin",typescript,selection_command +313,1875998,"src/extension.ts",27765,4,"Sing",typescript,selection_command +314,1876460,"src/extension.ts",27765,5,"Singl",typescript,selection_command +315,1876631,"src/extension.ts",27765,6,"Single",typescript,selection_command +316,1877753,"src/extension.ts",27765,6,"",typescript,content +317,1878056,"src/extension.ts",27777,0,"",typescript,selection_command +318,1878856,"src/extension.ts",27778,0,"",typescript,selection_command +319,1879013,"src/extension.ts",27778,0,"s",typescript,content +320,1879013,"src/extension.ts",27779,0,"",typescript,selection_keyboard +321,1879189,"src/extension.ts",27778,0,"",typescript,selection_command +322,1887478,"src/extension.ts",27751,70,"function parsePlannedActions(raw: string): PlannedAction | undefined {",typescript,selection_command +323,1887991,"src/extension.ts",27751,3107,"function parsePlannedActions(raw: string): PlannedAction | undefined {\n\tlet text = raw.trim();\n\ttext = text.replace(/^```(?:json)?\s*/i, '').replace(/```\s*$/i, '').trim();\n\t// Try parse as object first\n\tlet parsed: unknown;\n\ttry {\n\t\tparsed = JSON.parse(text);\n\t} catch {\n\t\t// If not directly parseable, try to extract first JSON object or array\n\t\tconst objMatch = text.match(/\{[\s\S]*\}/);\n\t\tconst arrMatch = text.match(/\[[\s\S]*\]/);\n\t\tconst candidate = objMatch?.[0] ?? arrMatch?.[0];\n\t\tif (!candidate) { return undefined; }\n\t\ttry {\n\t\t\tparsed = JSON.parse(candidate);\n\t\t} catch {\n\t\t\treturn undefined;\n\t\t}\n\t}\n\t// If array, take first element\n\tif (Array.isArray(parsed)) {\n\t\tparsed = parsed[0];\n\t}\n\tif (!parsed || typeof parsed !== 'object' || typeof (parsed as any).kind !== 'string') {\n\t\treturn undefined;\n\t}\n\tconst item = parsed as any;\n\tswitch (item.kind) {\n\t\tcase 'showTextDocument':\n\t\t\treturn { kind: 'showTextDocument' };\n\t\tcase 'setSelections': {\n\t\t\tconst selections = Array.isArray(item.selections) ? item.selections : [];\n\t\t\tconst norm = selections.map((s: any) => ({\n\t\t\t\tstart: Array.isArray(s?.start) && s.start.length === 2 ? [Number(s.start[0]) || 0, Number(s.start[1]) || 0] as [number, number] : [0, 0] as [number, number],\n\t\t\t\tend: Array.isArray(s?.end) && s.end.length === 2 ? [Number(s.end[0]) || 0, Number(s.end[1]) || 0] as [number, number] : [0, 0] as [number, number]\n\t\t\t}));\n\t\t\treturn { kind: 'setSelections', selections: norm };\n\t\t}\n\t\tcase 'editInsert': {\n\t\t\tconst pos = Array.isArray(item.position) && item.position.length === 2 ? [Number(item.position[0]) || 0, Number(item.position[1]) || 0] as [number, number] : [0, 0] as [number, number];\n\t\t\tconst textVal = typeof item.text === 'string' ? item.text : '';\n\t\t\treturn { kind: 'editInsert', position: pos, text: textVal };\n\t\t}\n\t\tcase 'editDelete': {\n\t\t\tconst start = Array.isArray(item.range?.start) && item.range.start.length === 2 ? [Number(item.range.start[0]) || 0, Number(item.range.start[1]) || 0] as [number, number] : [0, 0] as [number, number];\n\t\t\tconst end = Array.isArray(item.range?.end) && item.range.end.length === 2 ? [Number(item.range.end[0]) || 0, Number(item.range.end[1]) || 0] as [number, number] : [0, 0] as [number, number];\n\t\t\treturn { kind: 'editDelete', range: { start, end } };\n\t\t}\n\t\tcase 'editReplace': {\n\t\t\tconst start = Array.isArray(item.range?.start) && item.range.start.length === 2 ? [Number(item.range.start[0]) || 0, Number(item.range.start[1]) || 0] as [number, number] : [0, 0] as [number, number];\n\t\t\tconst end = Array.isArray(item.range?.end) && item.range.end.length === 2 ? [Number(item.range.end[0]) || 0, Number(item.range.end[1]) || 0] as [number, number] : [0, 0] as [number, number];\n\t\t\tconst textVal = typeof item.text === 'string' ? item.text : '';\n\t\t\treturn { kind: 'editReplace', range: { start, end }, text: textVal };\n\t\t}\n\t\tcase 'terminalShow':\n\t\t\treturn { kind: 'terminalShow' };\n\t\tcase 'terminalSendText': {\n\t\t\tconst textVal = typeof item.text === 'string' ? item.text : '';\n\t\t\treturn { kind: 'terminalSendText', text: textVal };\n\t\t}\n\t\tdefault:\n\t\t\treturn undefined;\n\t}\n}\n",typescript,selection_command +324,1888720,"src/extension.ts",27750,3108,"",typescript,content +325,1889213,"src/extension.ts",24934,0,"",typescript,selection_keyboard +326,1889264,"src/extension.ts",23601,0,"",typescript,selection_keyboard +327,1889393,"src/extension.ts",22212,0,"",typescript,selection_keyboard +328,1889532,"src/extension.ts",20196,0,"",typescript,selection_keyboard +329,1894539,"src/extension.ts",24541,0,"",typescript,selection_command +330,1897158,"src/extension.ts",24531,0,"",typescript,selection_command +331,1897421,"src/extension.ts",24531,0,"\nfunction parsePlannedActions(raw: string): PlannedAction | undefined {\n\tlet text = raw.trim();\n\ttext = text.replace(/^```(?:json)?\s*/i, '').replace(/```\s*$/i, '').trim();\n\t// Try parse as object first\n\tlet parsed: unknown;\n\ttry {\n\t\tparsed = JSON.parse(text);\n\t} catch {\n\t\t// If not directly parseable, try to extract first JSON object or array\n\t\tconst objMatch = text.match(/\{[\s\S]*\}/);\n\t\tconst arrMatch = text.match(/\[[\s\S]*\]/);\n\t\tconst candidate = objMatch?.[0] ?? arrMatch?.[0];\n\t\tif (!candidate) { return undefined; }\n\t\ttry {\n\t\t\tparsed = JSON.parse(candidate);\n\t\t} catch {\n\t\t\treturn undefined;\n\t\t}\n\t}\n\t// If array, take first element\n\tif (Array.isArray(parsed)) {\n\t\tparsed = parsed[0];\n\t}\n\tif (!parsed || typeof parsed !== 'object' || typeof (parsed as any).kind !== 'string') {\n\t\treturn undefined;\n\t}\n\tconst item = parsed as any;\n\tswitch (item.kind) {\n\t\tcase 'showTextDocument':\n\t\t\treturn { kind: 'showTextDocument' };\n\t\tcase 'setSelections': {\n\t\t\tconst selections = Array.isArray(item.selections) ? item.selections : [];\n\t\t\tconst norm = selections.map((s: any) => ({\n\t\t\t\tstart: Array.isArray(s?.start) && s.start.length === 2 ? [Number(s.start[0]) || 0, Number(s.start[1]) || 0] as [number, number] : [0, 0] as [number, number],\n\t\t\t\tend: Array.isArray(s?.end) && s.end.length === 2 ? [Number(s.end[0]) || 0, Number(s.end[1]) || 0] as [number, number] : [0, 0] as [number, number]\n\t\t\t}));\n\t\t\treturn { kind: 'setSelections', selections: norm };\n\t\t}\n\t\tcase 'editInsert': {\n\t\t\tconst pos = Array.isArray(item.position) && item.position.length === 2 ? [Number(item.position[0]) || 0, Number(item.position[1]) || 0] as [number, number] : [0, 0] as [number, number];\n\t\t\tconst textVal = typeof item.text === 'string' ? item.text : '';\n\t\t\treturn { kind: 'editInsert', position: pos, text: textVal };\n\t\t}\n\t\tcase 'editDelete': {\n\t\t\tconst start = Array.isArray(item.range?.start) && item.range.start.length === 2 ? [Number(item.range.start[0]) || 0, Number(item.range.start[1]) || 0] as [number, number] : [0, 0] as [number, number];\n\t\t\tconst end = Array.isArray(item.range?.end) && item.range.end.length === 2 ? [Number(item.range.end[0]) || 0, Number(item.range.end[1]) || 0] as [number, number] : [0, 0] as [number, number];\n\t\t\treturn { kind: 'editDelete', range: { start, end } };\n\t\t}\n\t\tcase 'editReplace': {\n\t\t\tconst start = Array.isArray(item.range?.start) && item.range.start.length === 2 ? [Number(item.range.start[0]) || 0, Number(item.range.start[1]) || 0] as [number, number] : [0, 0] as [number, number];\n\t\t\tconst end = Array.isArray(item.range?.end) && item.range.end.length === 2 ? [Number(item.range.end[0]) || 0, Number(item.range.end[1]) || 0] as [number, number] : [0, 0] as [number, number];\n\t\t\tconst textVal = typeof item.text === 'string' ? item.text : '';\n\t\t\treturn { kind: 'editReplace', range: { start, end }, text: textVal };\n\t\t}\n\t\tcase 'terminalShow':\n\t\t\treturn { kind: 'terminalShow' };\n\t\tcase 'terminalSendText': {\n\t\t\tconst textVal = typeof item.text === 'string' ? item.text : '';\n\t\t\treturn { kind: 'terminalSendText', text: textVal };\n\t\t}\n\t\tdefault:\n\t\t\treturn undefined;\n\t}\n}",typescript,content +332,1897459,"src/extension.ts",24532,0,"",typescript,selection_command +333,1898446,"src/extension.ts",26846,0,"",typescript,selection_keyboard +334,1898845,"src/extension.ts",29306,0,"",typescript,selection_keyboard +335,1899226,"src/extension.ts",29301,0,"",typescript,selection_command +336,1899473,"src/extension.ts",29290,0,"",typescript,selection_command +337,1899493,"src/extension.ts",29228,0,"",typescript,selection_command +338,1899531,"src/extension.ts",29145,0,"",typescript,selection_command +339,1899571,"src/extension.ts",28919,0,"",typescript,selection_command +340,1899601,"src/extension.ts",28895,0,"",typescript,selection_command +341,1899624,"src/extension.ts",28890,0,"",typescript,selection_command +342,1899672,"src/extension.ts",28879,0,"",typescript,selection_command +343,1899703,"src/extension.ts",28817,0,"",typescript,selection_command +344,1899731,"src/extension.ts",28808,0,"",typescript,selection_command +345,1899765,"src/extension.ts",28656,0,"",typescript,selection_command +346,1899796,"src/extension.ts",28493,0,"",typescript,selection_command +347,1899838,"src/extension.ts",28446,0,"",typescript,selection_command +348,1899880,"src/extension.ts",28350,0,"",typescript,selection_command +349,1899896,"src/extension.ts",28323,0,"",typescript,selection_command +350,1899940,"src/extension.ts",28312,0,"",typescript,selection_command +351,1899984,"src/extension.ts",28265,0,"",typescript,selection_command +352,1900008,"src/extension.ts",28237,0,"",typescript,selection_command +353,1900034,"src/extension.ts",28205,0,"",typescript,selection_command +354,1900081,"src/extension.ts",28108,0,"",typescript,selection_command +355,1900100,"src/extension.ts",28078,0,"",typescript,selection_command +356,1900136,"src/extension.ts",28041,0,"",typescript,selection_command +357,1900159,"src/extension.ts",27997,0,"",typescript,selection_command +358,1900201,"src/extension.ts",27994,0,"",typescript,selection_command +359,1900227,"src/extension.ts",27981,0,"",typescript,selection_command +360,1900270,"src/extension.ts",27964,0,"",typescript,selection_command +361,1900299,"src/extension.ts",27931,0,"",typescript,selection_command +362,1900349,"src/extension.ts",27924,0,"",typescript,selection_command +363,1900372,"src/extension.ts",27902,0,"",typescript,selection_command +364,1900408,"src/extension.ts",27849,0,"",typescript,selection_command +365,1900437,"src/extension.ts",27802,0,"",typescript,selection_command +366,1900523,"src/extension.ts",27724,0,"",typescript,selection_command +367,1900692,"src/extension.ts",27700,0,"",typescript,selection_command +368,1900861,"src/extension.ts",27639,0,"",typescript,selection_command +369,1901261,"src/extension.ts",27639,60,"function parsePlannedActions(raw: string): PlannedAction[] {",typescript,selection_command +370,1903541,"src/extension.ts",27639,0,"",typescript,selection_command +371,1904343,"src/extension.ts",27639,60,"function parsePlannedActions(raw: string): PlannedAction[] {",typescript,selection_command +372,1904879,"src/extension.ts",27639,3218,"function parsePlannedActions(raw: string): PlannedAction[] {\n\tlet text = raw.trim();\n\ttext = text.replace(/^```(?:json)?\s*/i, '').replace(/```\s*$/i, '').trim();\n\tconst arrayMatch = text.match(/\[[\s\S]*\]/);\n\tconst jsonText = arrayMatch ? arrayMatch[0] : text;\n\tlet parsed: unknown;\n\ttry {\n\t\tparsed = JSON.parse(jsonText);\n\t} catch (err) {\n\t\treturn [];\n\t}\n\tif (!Array.isArray(parsed)) { return []; }\n\tconst result: PlannedAction[] = [];\n\tfor (const item of parsed) {\n\t\tif (!item || typeof item !== 'object' || typeof (item as any).kind !== 'string') { continue; }\n\t\tswitch ((item as any).kind) {\n\t\t\tcase 'showTextDocument':\n\t\t\t\tresult.push({ kind: 'showTextDocument' });\n\t\t\t\tbreak;\n\t\t\tcase 'setSelections': {\n\t\t\t\tconst selections = Array.isArray((item as any).selections) ? (item as any).selections : [];\n\t\t\t\tconst norm = selections.map((s: any) => ({\n\t\t\t\t\tstart: Array.isArray(s?.start) && s.start.length === 2 ? [Number(s.start[0]) || 0, Number(s.start[1]) || 0] as [number, number] : [0, 0] as [number, number],\n\t\t\t\t\tend: Array.isArray(s?.end) && s.end.length === 2 ? [Number(s.end[0]) || 0, Number(s.end[1]) || 0] as [number, number] : [0, 0] as [number, number]\n\t\t\t\t}));\n\t\t\t\tresult.push({ kind: 'setSelections', selections: norm });\n\t\t\t\tbreak;\n\t\t\t}\n\t\t\tcase 'editInsert': {\n\t\t\t\tconst pos = Array.isArray((item as any).position) && (item as any).position.length === 2 ? [Number((item as any).position[0]) || 0, Number((item as any).position[1]) || 0] as [number, number] : [0, 0] as [number, number];\n\t\t\t\tconst text = typeof (item as any).text === 'string' ? (item as any).text : '';\n\t\t\t\tresult.push({ kind: 'editInsert', position: pos, text });\n\t\t\t\tbreak;\n\t\t\t}\n\t\t\tcase 'editDelete': {\n\t\t\t\tconst start = Array.isArray((item as any).range?.start) && (item as any).range.start.length === 2 ? [Number((item as any).range.start[0]) || 0, Number((item as any).range.start[1]) || 0] as [number, number] : [0, 0] as [number, number];\n\t\t\t\tconst end = Array.isArray((item as any).range?.end) && (item as any).range.end.length === 2 ? [Number((item as any).range.end[0]) || 0, Number((item as any).range.end[1]) || 0] as [number, number] : [0, 0] as [number, number];\n\t\t\t\tresult.push({ kind: 'editDelete', range: { start, end } });\n\t\t\t\tbreak;\n\t\t\t}\n\t\t\tcase 'editReplace': {\n\t\t\t\tconst start = Array.isArray((item as any).range?.start) && (item as any).range.start.length === 2 ? [Number((item as any).range.start[0]) || 0, Number((item as any).range.start[1]) || 0] as [number, number] : [0, 0] as [number, number];\n\t\t\t\tconst end = Array.isArray((item as any).range?.end) && (item as any).range.end.length === 2 ? [Number((item as any).range.end[0]) || 0, Number((item as any).range.end[1]) || 0] as [number, number] : [0, 0] as [number, number];\n\t\t\t\tconst text = typeof (item as any).text === 'string' ? (item as any).text : '';\n\t\t\t\tresult.push({ kind: 'editReplace', range: { start, end }, text });\n\t\t\t\tbreak;\n\t\t\t}\n\t\t\tcase 'terminalShow':\n\t\t\t\tresult.push({ kind: 'terminalShow' });\n\t\t\t\tbreak;\n\t\t\tcase 'terminalSendText': {\n\t\t\t\tconst text = typeof (item as any).text === 'string' ? (item as any).text : '';\n\t\t\t\tresult.push({ kind: 'terminalSendText', text });\n\t\t\t\tbreak;\n\t\t\t}\n\t\t\tdefault:\n\t\t\t\tbreak;\n\t\t}\n\t}\n\treturn result;\n}\n",typescript,selection_command +373,1907950,"src/extension.ts",27638,3219,"",typescript,content +374,1907989,"src/extension.ts",27637,0,"",typescript,selection_command +375,1909494,"src/extension.ts",27634,0,"",typescript,selection_command +376,1909956,"src/extension.ts",27613,0,"",typescript,selection_command +377,1910076,"src/extension.ts",27602,0,"",typescript,selection_command +378,1910368,"src/extension.ts",27598,0,"",typescript,selection_command +379,1910372,"src/extension.ts",27543,0,"",typescript,selection_command +380,1910379,"src/extension.ts",27476,0,"",typescript,selection_command +381,1910415,"src/extension.ts",27447,0,"",typescript,selection_command +382,1910441,"src/extension.ts",27411,0,"",typescript,selection_command +383,1910480,"src/extension.ts",27388,0,"",typescript,selection_command +384,1910522,"src/extension.ts",27384,0,"",typescript,selection_command +385,1910553,"src/extension.ts",27311,0,"",typescript,selection_command +386,1910584,"src/extension.ts",27244,0,"",typescript,selection_command +387,1910613,"src/extension.ts",27050,0,"",typescript,selection_command +388,1910647,"src/extension.ts",26846,0,"",typescript,selection_command +389,1910678,"src/extension.ts",26822,0,"",typescript,selection_command +390,1910719,"src/extension.ts",26818,0,"",typescript,selection_command +391,1910746,"src/extension.ts",26761,0,"",typescript,selection_command +392,1910776,"src/extension.ts",26567,0,"",typescript,selection_command +393,1910820,"src/extension.ts",26363,0,"",typescript,selection_command +394,1910843,"src/extension.ts",26340,0,"",typescript,selection_command +395,1910875,"src/extension.ts",26336,0,"",typescript,selection_command +396,1910913,"src/extension.ts",26272,0,"",typescript,selection_command +397,1910945,"src/extension.ts",26205,0,"",typescript,selection_command +398,1910975,"src/extension.ts",26016,0,"",typescript,selection_command +399,1911013,"src/extension.ts",25993,0,"",typescript,selection_command +400,1911045,"src/extension.ts",25989,0,"",typescript,selection_command +401,1911081,"src/extension.ts",25934,0,"",typescript,selection_command +402,1911113,"src/extension.ts",25926,0,"",typescript,selection_command +403,1911146,"src/extension.ts",25775,0,"",typescript,selection_command +404,1911174,"src/extension.ts",25613,0,"",typescript,selection_command +405,1911221,"src/extension.ts",25567,0,"",typescript,selection_command +406,1911243,"src/extension.ts",25490,0,"",typescript,selection_command +407,1911283,"src/extension.ts",25464,0,"",typescript,selection_command +408,1911312,"src/extension.ts",25424,0,"",typescript,selection_command +409,1911349,"src/extension.ts",25397,0,"",typescript,selection_command +410,1911374,"src/extension.ts",25375,0,"",typescript,selection_command +411,1911407,"src/extension.ts",25346,0,"",typescript,selection_command +412,1911448,"src/extension.ts",25343,0,"",typescript,selection_command +413,1911496,"src/extension.ts",25323,0,"",typescript,selection_command +414,1911541,"src/extension.ts",25233,0,"",typescript,selection_command +415,1911550,"src/extension.ts",25230,0,"",typescript,selection_command +416,1911589,"src/extension.ts",25208,0,"",typescript,selection_command +417,1911613,"src/extension.ts",25178,0,"",typescript,selection_command +418,1911649,"src/extension.ts",25145,0,"",typescript,selection_command +419,1911675,"src/extension.ts",25142,0,"",typescript,selection_command +420,1911716,"src/extension.ts",25138,0,"",typescript,selection_command +421,1911747,"src/extension.ts",25117,0,"",typescript,selection_command +422,1911780,"src/extension.ts",25105,0,"",typescript,selection_command +423,1911815,"src/extension.ts",25070,0,"",typescript,selection_command +424,1911902,"src/extension.ts",25062,0,"",typescript,selection_command +425,1911910,"src/extension.ts",25022,0,"",typescript,selection_command +426,1911914,"src/extension.ts",24970,0,"",typescript,selection_command +427,1911938,"src/extension.ts",24924,0,"",typescript,selection_command +428,1911989,"src/extension.ts",24878,0,"",typescript,selection_command +429,1912008,"src/extension.ts",24804,0,"",typescript,selection_command +430,1912056,"src/extension.ts",24793,0,"",typescript,selection_command +431,1912079,"src/extension.ts",24764,0,"",typescript,selection_command +432,1912114,"src/extension.ts",24757,0,"",typescript,selection_command +433,1912153,"src/extension.ts",24735,0,"",typescript,selection_command +434,1912178,"src/extension.ts",24705,0,"",typescript,selection_command +435,1912214,"src/extension.ts",24627,0,"",typescript,selection_command +436,1912246,"src/extension.ts",24603,0,"",typescript,selection_command +437,1912285,"src/extension.ts",24532,0,"",typescript,selection_command +438,1912309,"src/extension.ts",24531,0,"",typescript,selection_command +439,1912346,"src/extension.ts",24529,0,"",typescript,selection_command +440,1912486,"src/extension.ts",24531,0,"",typescript,selection_command +441,1912736,"src/extension.ts",24532,0,"",typescript,selection_command +442,1912777,"src/extension.ts",24603,0,"",typescript,selection_command +443,1912815,"src/extension.ts",24627,0,"",typescript,selection_command +444,1912842,"src/extension.ts",24705,0,"",typescript,selection_command +445,1912867,"src/extension.ts",24735,0,"",typescript,selection_command +446,1912906,"src/extension.ts",24757,0,"",typescript,selection_command +447,1913013,"src/extension.ts",24735,0,"",typescript,selection_command +448,1913204,"src/extension.ts",24705,0,"",typescript,selection_command +449,1913354,"src/extension.ts",24627,0,"",typescript,selection_command +450,1913527,"src/extension.ts",24603,0,"",typescript,selection_command +451,1913693,"src/extension.ts",24532,0,"",typescript,selection_command +452,1914071,"src/extension.ts",24603,0,"",typescript,selection_command +453,1917381,"src/extension.ts",24532,0,"",typescript,selection_command +454,1917523,"src/extension.ts",24531,0,"",typescript,selection_command +455,1917896,"src/extension.ts",24532,0,"",typescript,selection_command +456,1935548,"src/extension.ts",26846,0,"",typescript,selection_keyboard +457,1936399,"src/extension.ts",24532,0,"",typescript,selection_keyboard +458,1938335,"src/extension.ts",26846,0,"",typescript,selection_keyboard +459,1939312,"src/extension.ts",27638,0,"",typescript,selection_keyboard +460,1939938,"src/extension.ts",25070,0,"",typescript,selection_keyboard +461,1940424,"src/extension.ts",27638,0,"",typescript,selection_keyboard +462,1940666,"src/extension.ts",25070,0,"",typescript,selection_keyboard +463,1941006,"src/extension.ts",23712,0,"",typescript,selection_keyboard +464,1941689,"src/extension.ts",25070,0,"",typescript,selection_keyboard +465,1942350,"src/extension.ts",27638,0,"",typescript,selection_keyboard +466,1942736,"src/extension.ts",25070,0,"",typescript,selection_keyboard +467,1943079,"src/extension.ts",23712,0,"",typescript,selection_keyboard +468,1943919,"src/extension.ts",25070,0,"",typescript,selection_keyboard +469,1944078,"src/extension.ts",27638,0,"",typescript,selection_keyboard +470,1970922,"src/extension.ts",24530,0,"",typescript,selection_mouse +471,1970925,"src/extension.ts",24529,0,"",typescript,selection_command +472,1972453,"src/extension.ts",24531,0,"",typescript,selection_command +473,1972962,"src/extension.ts",24705,0,"",typescript,selection_command +474,1976729,"TERMINAL",0,0,"[10:50:41 AM] File change detected. Starting incremental compilation...\r\n\r\n",,terminal_output +475,1976815,"TERMINAL",0,0,"src/extension.ts:653:17 - error TS2552: Cannot find name 'parseSinglePlannedAction'. Did you mean 'parsePlannedActions'?\r\n\r\n653 const action = parseSinglePlannedAction(content);\r\n   ~~~~~~~~~~~~~~~~~~~~~~~~\r\n\r\n src/extension.ts:677:10\r\n 677 function parsePlannedActions(raw: string): PlannedAction | undefined {\r\n    ~~~~~~~~~~~~~~~~~~~\r\n 'parsePlannedActions' is declared here.\r\n\r\n[10:50:41 AM] Found 1 error. Watching for file changes.\r\n\r\n",,terminal_output +476,1976858,"src/extension.ts",24627,0,"",typescript,selection_command +477,1976962,"src/extension.ts",24603,0,"",typescript,selection_command +478,1977095,"src/extension.ts",24532,0,"",typescript,selection_command +479,1979068,"src/extension.ts",23986,0,"",typescript,selection_command +480,1981115,"src/extension.ts",23986,1,"p",typescript,selection_command +481,1981271,"src/extension.ts",23986,24,"parseSinglePlannedAction",typescript,selection_command +482,1982016,"src/extension.ts",23986,24,"",typescript,content +483,1982235,"src/extension.ts",23986,0,"p",typescript,content +484,1982235,"src/extension.ts",23987,0,"",typescript,selection_keyboard +485,1982398,"src/extension.ts",23987,0,"a",typescript,content +486,1982398,"src/extension.ts",23988,0,"",typescript,selection_keyboard +487,1982431,"src/extension.ts",23988,0,"r",typescript,content +488,1982432,"src/extension.ts",23989,0,"",typescript,selection_keyboard +489,1982644,"src/extension.ts",23989,0,"s",typescript,content +490,1982644,"src/extension.ts",23990,0,"",typescript,selection_keyboard +491,1982710,"src/extension.ts",23990,0,"e",typescript,content +492,1982711,"src/extension.ts",23991,0,"",typescript,selection_keyboard +493,1983185,"src/extension.ts",23991,0,"P",typescript,content +494,1983186,"src/extension.ts",23992,0,"",typescript,selection_keyboard +495,1983308,"src/extension.ts",23992,0,"l",typescript,content +496,1983309,"src/extension.ts",23993,0,"",typescript,selection_keyboard +497,1983348,"src/extension.ts",23993,0,"a",typescript,content +498,1983348,"src/extension.ts",23994,0,"",typescript,selection_keyboard +499,1983406,"src/extension.ts",23994,0,"n",typescript,content +500,1983406,"src/extension.ts",23995,0,"",typescript,selection_keyboard +501,1983547,"src/extension.ts",23995,0,"n",typescript,content +502,1983548,"src/extension.ts",23996,0,"",typescript,selection_keyboard +503,1983740,"src/extension.ts",23996,0,"e",typescript,content +504,1983740,"src/extension.ts",23997,0,"",typescript,selection_keyboard +505,1983971,"src/extension.ts",23997,0,"d",typescript,content +506,1983972,"src/extension.ts",23998,0,"",typescript,selection_keyboard +507,1984864,"src/extension.ts",23998,0,"A",typescript,content +508,1984865,"src/extension.ts",23999,0,"",typescript,selection_keyboard +509,1984962,"src/extension.ts",23999,0,"c",typescript,content +510,1984963,"src/extension.ts",24000,0,"",typescript,selection_keyboard +511,1985175,"src/extension.ts",24000,0,"t",typescript,content +512,1985176,"src/extension.ts",24001,0,"",typescript,selection_keyboard +513,1985337,"src/extension.ts",24001,0,"i",typescript,content +514,1985338,"src/extension.ts",24002,0,"",typescript,selection_keyboard +515,1985352,"src/extension.ts",24002,0,"o",typescript,content +516,1985352,"src/extension.ts",24003,0,"",typescript,selection_keyboard +517,1985378,"src/extension.ts",24003,0,"n",typescript,content +518,1985379,"src/extension.ts",24004,0,"",typescript,selection_keyboard +519,1985693,"src/extension.ts",24003,0,"",typescript,selection_command +520,1987068,"TERMINAL",0,0,"[10:50:51 AM] File change detected. Starting incremental compilation...\r\n\r\n",,terminal_output +521,1987238,"TERMINAL",0,0,"src/extension.ts:653:17 - error TS2552: Cannot find name 'parsePlannedAction'. Did you mean 'parsePlannedActions'?\r\n\r\n653 const action = parsePlannedAction(content);\r\n   ~~~~~~~~~~~~~~~~~~\r\n\r\n src/extension.ts:677:10\r\n 677 function parsePlannedActions(raw: string): PlannedAction | undefined {\r\n    ~~~~~~~~~~~~~~~~~~~\r\n 'parsePlannedActions' is declared here.\r\n\r\n[10:50:51 AM] Found 1 error. Watching for file changes.\r\n\r\n",,terminal_output +522,1989621,"src/extension.ts",24029,0,"",typescript,selection_command +523,1989861,"src/extension.ts",24064,0,"",typescript,selection_command +524,1989915,"src/extension.ts",24095,0,"",typescript,selection_command +525,1989929,"src/extension.ts",24111,0,"",typescript,selection_command +526,1989972,"src/extension.ts",24113,0,"",typescript,selection_command +527,1990002,"src/extension.ts",24115,0,"",typescript,selection_command +528,1990047,"src/extension.ts",24149,0,"",typescript,selection_command +529,1990067,"src/extension.ts",24182,0,"",typescript,selection_command +530,1990105,"src/extension.ts",24217,0,"",typescript,selection_command +531,1990140,"src/extension.ts",24281,0,"",typescript,selection_command +532,1990182,"src/extension.ts",24316,0,"",typescript,selection_command +533,1990205,"src/extension.ts",24387,0,"",typescript,selection_command +534,1990235,"src/extension.ts",24392,0,"",typescript,selection_command +535,1990278,"src/extension.ts",24427,0,"",typescript,selection_command +536,1990290,"src/extension.ts",24458,0,"",typescript,selection_command +537,1990343,"src/extension.ts",24463,0,"",typescript,selection_command +538,1990360,"src/extension.ts",24467,0,"",typescript,selection_command +539,1990394,"src/extension.ts",24487,0,"",typescript,selection_command +540,1990428,"src/extension.ts",24498,0,"",typescript,selection_command +541,1990461,"src/extension.ts",24518,0,"",typescript,selection_command +542,1990509,"src/extension.ts",24521,0,"",typescript,selection_command +543,1990532,"src/extension.ts",24523,0,"",typescript,selection_command +544,1990566,"src/extension.ts",24525,0,"",typescript,selection_command +545,1991002,"src/extension.ts",24559,0,"",typescript,selection_command +546,1991278,"src/extension.ts",24558,0,"",typescript,selection_command +547,1991397,"src/extension.ts",24557,0,"",typescript,selection_command +548,1991544,"src/extension.ts",24556,0,"",typescript,selection_command +549,1991684,"src/extension.ts",24555,0,"",typescript,selection_command +550,1991818,"src/extension.ts",24554,0,"",typescript,selection_command +551,1991970,"src/extension.ts",24553,0,"",typescript,selection_command +552,1992348,"src/extension.ts",24553,1,"",typescript,content +553,1993508,"src/extension.ts",24698,0,"",typescript,selection_command +554,1994854,"src/extension.ts",23297,0,"",typescript,selection_keyboard +555,1995436,"TERMINAL",0,0,"[10:51:00 AM] File change detected. Starting incremental compilation...\r\n\r\n[10:51:00 AM] Found 0 errors. Watching for file changes.\r\n\r\n",,terminal_output +556,1997272,"src/extension.ts",23298,0,"",typescript,selection_command +557,1997527,"src/extension.ts",23358,0,"",typescript,selection_command +558,1997552,"src/extension.ts",23427,0,"",typescript,selection_command +559,1997574,"src/extension.ts",23445,0,"",typescript,selection_command +560,1997615,"src/extension.ts",23514,0,"",typescript,selection_command +561,1997646,"src/extension.ts",23539,0,"",typescript,selection_command +562,1997680,"src/extension.ts",23549,0,"",typescript,selection_command +563,1997705,"src/extension.ts",23581,0,"",typescript,selection_command +564,1997744,"src/extension.ts",23601,0,"",typescript,selection_command +565,1997775,"src/extension.ts",23706,0,"",typescript,selection_command +566,1997812,"src/extension.ts",23712,0,"",typescript,selection_command +567,1997849,"src/extension.ts",23719,0,"",typescript,selection_command +568,1997874,"src/extension.ts",23725,0,"",typescript,selection_command +569,1997924,"src/extension.ts",23773,0,"",typescript,selection_command +570,1997941,"src/extension.ts",23796,0,"",typescript,selection_command +571,1997979,"src/extension.ts",23809,0,"",typescript,selection_command +572,1998019,"src/extension.ts",23814,0,"",typescript,selection_command +573,1998046,"src/extension.ts",23815,0,"",typescript,selection_command +574,1998079,"src/extension.ts",23858,0,"",typescript,selection_command +575,1998108,"src/extension.ts",23925,0,"",typescript,selection_command +576,1998152,"src/extension.ts",23967,0,"",typescript,selection_command +577,1998174,"src/extension.ts",23970,0,"",typescript,selection_command +578,1998220,"src/extension.ts",24015,0,"",typescript,selection_command +579,1998248,"src/extension.ts",24031,0,"",typescript,selection_command +580,1998560,"src/extension.ts",24015,0,"",typescript,selection_command +581,1998731,"src/extension.ts",23970,0,"",typescript,selection_command +582,2001165,"src/extension.ts",24698,0,"",typescript,selection_command +583,2030634,"src/extension.ts",25097,0,"",typescript,selection_mouse +584,2030635,"src/extension.ts",25096,0,"",typescript,selection_command +585,2034603,"src/extension.ts",23718,0,"",typescript,selection_keyboard +586,2038737,"src/extension.ts",23717,0,"",typescript,selection_command +587,2039906,"src/extension.ts",25068,0,"",typescript,selection_keyboard +588,2040334,"src/extension.ts",23717,0,"",typescript,selection_keyboard +589,2040968,"src/extension.ts",23723,0,"",typescript,selection_command +590,2041225,"src/extension.ts",23730,0,"",typescript,selection_command +591,2041256,"src/extension.ts",23778,0,"",typescript,selection_command +592,2041273,"src/extension.ts",23801,0,"",typescript,selection_command +593,2041326,"src/extension.ts",23812,0,"",typescript,selection_command +594,2041346,"src/extension.ts",23814,0,"",typescript,selection_command +595,2041397,"src/extension.ts",23820,0,"",typescript,selection_command +596,2041407,"src/extension.ts",23863,0,"",typescript,selection_command +597,2041442,"src/extension.ts",23930,0,"",typescript,selection_command +598,2041496,"src/extension.ts",23968,0,"",typescript,selection_command +599,2041517,"src/extension.ts",23975,0,"",typescript,selection_command +600,2041553,"src/extension.ts",24020,0,"",typescript,selection_command +601,2041586,"src/extension.ts",24036,0,"",typescript,selection_command +602,2041614,"src/extension.ts",24095,0,"",typescript,selection_command +603,2041648,"src/extension.ts",24102,0,"",typescript,selection_command +604,2041682,"src/extension.ts",24113,0,"",typescript,selection_command +605,2041724,"src/extension.ts",24115,0,"",typescript,selection_command +606,2041752,"src/extension.ts",24121,0,"",typescript,selection_command +607,2041782,"src/extension.ts",24182,0,"",typescript,selection_command +608,2041816,"src/extension.ts",24189,0,"",typescript,selection_command +609,2041851,"src/extension.ts",24253,0,"",typescript,selection_command +610,2041887,"src/extension.ts",24288,0,"",typescript,selection_command +611,2041925,"src/extension.ts",24359,0,"",typescript,selection_command +612,2041950,"src/extension.ts",24392,0,"",typescript,selection_command +613,2041998,"src/extension.ts",24399,0,"",typescript,selection_command +614,2042033,"src/extension.ts",24441,0,"",typescript,selection_command +615,2042056,"src/extension.ts",24463,0,"",typescript,selection_command +616,2042088,"src/extension.ts",24467,0,"",typescript,selection_command +617,2042120,"src/extension.ts",24474,0,"",typescript,selection_command +618,2042146,"src/extension.ts",24494,0,"",typescript,selection_command +619,2042186,"src/extension.ts",24505,0,"",typescript,selection_command +620,2042218,"src/extension.ts",24521,0,"",typescript,selection_command +621,2042252,"src/extension.ts",24523,0,"",typescript,selection_command +622,2042278,"src/extension.ts",24525,0,"",typescript,selection_command +623,2042316,"src/extension.ts",24531,0,"",typescript,selection_command +624,2042351,"src/extension.ts",24601,0,"",typescript,selection_command +625,2042382,"src/extension.ts",24625,0,"",typescript,selection_command +626,2044062,"src/extension.ts",24757,0,"",typescript,selection_command +627,2045531,"src/extension.ts",24750,0,"",typescript,selection_command +628,2046613,"src/extension.ts",24728,0,"",typescript,selection_command +629,2046709,"src/extension.ts",24698,0,"",typescript,selection_command +630,2048503,"src/extension.ts",24728,0,"",typescript,selection_command +631,2048941,"src/extension.ts",24729,0,"",typescript,selection_command +632,2049124,"src/extension.ts",24733,0,"",typescript,selection_command +633,2050942,"src/extension.ts",24703,0,"",typescript,selection_command +634,2051324,"src/extension.ts",24698,30,"",typescript,content +635,2051363,"src/extension.ts",24699,0,"",typescript,selection_command +636,2052481,"src/extension.ts",24727,0,"",typescript,selection_command +637,2053637,"src/extension.ts",24720,0,"",typescript,selection_command +638,2055173,"TERMINAL",0,0,"[10:51:59 AM] File change detected. Starting incremental compilation...\r\n\r\n",,terminal_output +639,2055263,"TERMINAL",0,0,"[10:52:00 AM] Found 0 errors. Watching for file changes.\r\n\r\n",,terminal_output +640,2061551,"src/extension.ts",24727,0,"",typescript,selection_command +641,2062929,"src/extension.ts",24720,0,"",typescript,selection_command +642,2064652,"src/extension.ts",24727,0,"",typescript,selection_command +643,2066663,"src/extension.ts",24720,0,"",typescript,selection_command +644,2067729,"src/extension.ts",24727,0,"",typescript,selection_command +645,2068675,"src/extension.ts",24720,0,"",typescript,selection_command +646,2069873,"src/extension.ts",24727,0,"",typescript,selection_command +647,2072610,"src/extension.ts",24720,0,"",typescript,selection_command +648,2075468,"src/extension.ts",24727,0,"",typescript,selection_command +649,2075745,"src/extension.ts",24756,0,"",typescript,selection_command +650,2075982,"src/extension.ts",24767,0,"",typescript,selection_command +651,2076011,"src/extension.ts",24841,0,"",typescript,selection_command +652,2076066,"src/extension.ts",24887,0,"",typescript,selection_command +653,2076089,"src/extension.ts",24933,0,"",typescript,selection_command +654,2076134,"src/extension.ts",24985,0,"",typescript,selection_command +655,2076157,"src/extension.ts",25025,0,"",typescript,selection_command +656,2076190,"src/extension.ts",25033,0,"",typescript,selection_command +657,2076229,"src/extension.ts",25068,0,"",typescript,selection_command +658,2076256,"src/extension.ts",25080,0,"",typescript,selection_command +659,2076302,"src/extension.ts",25101,0,"",typescript,selection_command +660,2076342,"src/extension.ts",25105,0,"",typescript,selection_command +661,2076360,"src/extension.ts",25108,0,"",typescript,selection_command +662,2076380,"src/extension.ts",25141,0,"",typescript,selection_command +663,2076640,"src/extension.ts",25171,0,"",typescript,selection_command +664,2076975,"src/extension.ts",25141,0,"",typescript,selection_command +665,2077307,"src/extension.ts",25108,0,"",typescript,selection_command +666,2077308,"src/extension.ts",25105,0,"",typescript,selection_command +667,2077308,"src/extension.ts",25101,0,"",typescript,selection_command +668,2077308,"src/extension.ts",25080,0,"",typescript,selection_command +669,2077367,"src/extension.ts",25068,0,"",typescript,selection_command +670,2077402,"src/extension.ts",25033,0,"",typescript,selection_command +671,2077423,"src/extension.ts",25025,0,"",typescript,selection_command +672,2077453,"src/extension.ts",24985,0,"",typescript,selection_command +673,2077481,"src/extension.ts",24933,0,"",typescript,selection_command +674,2077522,"src/extension.ts",24887,0,"",typescript,selection_command +675,2077638,"src/extension.ts",24841,0,"",typescript,selection_command +676,2077638,"src/extension.ts",24767,0,"",typescript,selection_command +677,2077638,"src/extension.ts",24756,0,"",typescript,selection_command +678,2077902,"src/extension.ts",24767,0,"",typescript,selection_command +679,2079502,"src/extension.ts",24769,0,"",typescript,selection_command +680,2081840,"src/extension.ts",24767,73,"\t\t// If not directly parseable, try to extract first JSON object or array",typescript,selection_command +681,2082071,"src/extension.ts",24767,119,"\t\t// If not directly parseable, try to extract first JSON object or array\n\t\tconst objMatch = text.match(/\{[\s\S]*\}/);",typescript,selection_command +682,2082214,"src/extension.ts",24767,165,"\t\t// If not directly parseable, try to extract first JSON object or array\n\t\tconst objMatch = text.match(/\{[\s\S]*\}/);\n\t\tconst arrMatch = text.match(/\[[\s\S]*\]/);",typescript,selection_command +683,2082385,"src/extension.ts",24767,217,"\t\t// If not directly parseable, try to extract first JSON object or array\n\t\tconst objMatch = text.match(/\{[\s\S]*\}/);\n\t\tconst arrMatch = text.match(/\[[\s\S]*\]/);\n\t\tconst candidate = objMatch?.[0] ?? arrMatch?.[0];",typescript,selection_command +684,2082470,"src/extension.ts",24767,257,"\t\t// If not directly parseable, try to extract first JSON object or array\n\t\tconst objMatch = text.match(/\{[\s\S]*\}/);\n\t\tconst arrMatch = text.match(/\[[\s\S]*\]/);\n\t\tconst candidate = objMatch?.[0] ?? arrMatch?.[0];\n\t\tif (!candidate) { return undefined; }",typescript,selection_command +685,2082627,"src/extension.ts",24767,265,"\t\t// If not directly parseable, try to extract first JSON object or array\n\t\tconst objMatch = text.match(/\{[\s\S]*\}/);\n\t\tconst arrMatch = text.match(/\[[\s\S]*\]/);\n\t\tconst candidate = objMatch?.[0] ?? arrMatch?.[0];\n\t\tif (!candidate) { return undefined; }\n\t\ttry {",typescript,selection_command +686,2082758,"src/extension.ts",24767,300,"\t\t// If not directly parseable, try to extract first JSON object or array\n\t\tconst objMatch = text.match(/\{[\s\S]*\}/);\n\t\tconst arrMatch = text.match(/\[[\s\S]*\]/);\n\t\tconst candidate = objMatch?.[0] ?? arrMatch?.[0];\n\t\tif (!candidate) { return undefined; }\n\t\ttry {\n\t\t\tparsed = JSON.parse(candidate);",typescript,selection_command +687,2082896,"src/extension.ts",24767,312,"\t\t// If not directly parseable, try to extract first JSON object or array\n\t\tconst objMatch = text.match(/\{[\s\S]*\}/);\n\t\tconst arrMatch = text.match(/\[[\s\S]*\]/);\n\t\tconst candidate = objMatch?.[0] ?? arrMatch?.[0];\n\t\tif (!candidate) { return undefined; }\n\t\ttry {\n\t\t\tparsed = JSON.parse(candidate);\n\t\t} catch {",typescript,selection_command +688,2083039,"src/extension.ts",24767,333,"\t\t// If not directly parseable, try to extract first JSON object or array\n\t\tconst objMatch = text.match(/\{[\s\S]*\}/);\n\t\tconst arrMatch = text.match(/\[[\s\S]*\]/);\n\t\tconst candidate = objMatch?.[0] ?? arrMatch?.[0];\n\t\tif (!candidate) { return undefined; }\n\t\ttry {\n\t\t\tparsed = JSON.parse(candidate);\n\t\t} catch {\n\t\t\treturn undefined;",typescript,selection_command +689,2083233,"src/extension.ts",24767,337,"\t\t// If not directly parseable, try to extract first JSON object or array\n\t\tconst objMatch = text.match(/\{[\s\S]*\}/);\n\t\tconst arrMatch = text.match(/\[[\s\S]*\]/);\n\t\tconst candidate = objMatch?.[0] ?? arrMatch?.[0];\n\t\tif (!candidate) { return undefined; }\n\t\ttry {\n\t\t\tparsed = JSON.parse(candidate);\n\t\t} catch {\n\t\t\treturn undefined;\n\t\t}",typescript,selection_command +690,2083376,"src/extension.ts",24767,337,"\t\t",typescript,content +691,2083433,"src/extension.ts",24769,0,"",typescript,selection_command +692,2084541,"src/extension.ts",24768,0,"",typescript,selection_command +693,2085219,"src/extension.ts",24757,0,"",typescript,selection_command +694,2085281,"src/extension.ts",24728,0,"",typescript,selection_command +695,2085931,"src/extension.ts",24757,0,"",typescript,selection_command +696,2086136,"src/extension.ts",24768,0,"",typescript,selection_command +697,2086699,"src/extension.ts",24769,0,"",typescript,selection_command +698,2088927,"src/extension.ts",24758,0,"",typescript,selection_command +699,2089346,"src/extension.ts",24757,0,"",typescript,selection_command +700,2089707,"src/extension.ts",24763,0,"",typescript,selection_command +701,2090259,"src/extension.ts",24764,0,"",typescript,selection_command +702,2090329,"src/extension.ts",24764,0," ",typescript,content +703,2090329,"src/extension.ts",24765,0,"",typescript,selection_keyboard +704,2090564,"src/extension.ts",24765,0,"()",typescript,content +705,2090564,"src/extension.ts",24766,0,"",typescript,selection_keyboard +706,2091047,"src/extension.ts",24766,0,"e",typescript,content +707,2091048,"src/extension.ts",24767,0,"",typescript,selection_keyboard +708,2091116,"src/extension.ts",24767,0,"r",typescript,content +709,2091116,"src/extension.ts",24768,0,"",typescript,selection_keyboard +710,2091252,"src/extension.ts",24768,0,"r",typescript,content +711,2091252,"src/extension.ts",24769,0,"",typescript,selection_keyboard +712,2091612,"src/extension.ts",24768,0,"",typescript,selection_command +713,2092613,"src/extension.ts",24774,0,"",typescript,selection_command +714,2094108,"src/extension.ts",24775,0,"",typescript,selection_command +715,2094981,"src/extension.ts",24775,0,"r",typescript,content +716,2094981,"src/extension.ts",24776,0,"",typescript,selection_keyboard +717,2095023,"src/extension.ts",24776,0,"e",typescript,content +718,2095023,"src/extension.ts",24777,0,"",typescript,selection_keyboard +719,2095168,"src/extension.ts",24777,0,"t",typescript,content +720,2095168,"src/extension.ts",24778,0,"",typescript,selection_keyboard +721,2095339,"src/extension.ts",24778,0,"u",typescript,content +722,2095339,"src/extension.ts",24779,0,"",typescript,selection_keyboard +723,2095391,"src/extension.ts",24779,0,"r",typescript,content +724,2095392,"src/extension.ts",24780,0,"",typescript,selection_keyboard +725,2095465,"src/extension.ts",24780,0,"n",typescript,content +726,2095465,"src/extension.ts",24781,0,"",typescript,selection_keyboard +727,2095638,"src/extension.ts",24781,0," ",typescript,content +728,2095638,"src/extension.ts",24782,0,"",typescript,selection_keyboard +729,2095701,"src/extension.ts",24782,0,"u",typescript,content +730,2095701,"src/extension.ts",24783,0,"",typescript,selection_keyboard +731,2095846,"src/extension.ts",24783,0,"n",typescript,content +732,2095847,"src/extension.ts",24784,0,"",typescript,selection_keyboard +733,2096007,"src/extension.ts",24784,0,"d",typescript,content +734,2096007,"src/extension.ts",24785,0,"",typescript,selection_keyboard +735,2096100,"src/extension.ts",24785,0,"e",typescript,content +736,2096101,"src/extension.ts",24786,0,"",typescript,selection_keyboard +737,2096369,"src/extension.ts",24786,0,"f",typescript,content +738,2096369,"src/extension.ts",24787,0,"",typescript,selection_keyboard +739,2096389,"src/extension.ts",24787,0,"i",typescript,content +740,2096389,"src/extension.ts",24788,0,"",typescript,selection_keyboard +741,2096479,"src/extension.ts",24788,0,"n",typescript,content +742,2096479,"src/extension.ts",24789,0,"",typescript,selection_keyboard +743,2096490,"src/extension.ts",24789,0,"e",typescript,content +744,2096490,"src/extension.ts",24790,0,"",typescript,selection_keyboard +745,2096752,"src/extension.ts",24790,0,"d",typescript,content +746,2096752,"src/extension.ts",24791,0,"",typescript,selection_keyboard +747,2097050,"src/extension.ts",24782,9,"undefined",typescript,content +748,2097052,"src/extension.ts",24791,0,";",typescript,content +749,2097052,"src/extension.ts",24792,0,"",typescript,selection_keyboard +750,2097320,"src/extension.ts",24791,0,"",typescript,selection_command +751,2098371,"TERMINAL",0,0,"[10:52:43 AM] File change detected. Starting incremental compilation...\r\n\r\n",,terminal_output +752,2098478,"TERMINAL",0,0,"[10:52:43 AM] Found 0 errors. Watching for file changes.\r\n\r\n",,terminal_output +753,2098564,"src/extension.ts",24796,0,"",typescript,selection_command +754,2099140,"src/extension.ts",24793,0,"",typescript,selection_command +755,2099612,"src/extension.ts",24773,0,"",typescript,selection_command +756,2099938,"src/extension.ts",24756,0,"",typescript,selection_command +757,2100279,"src/extension.ts",24727,0,"",typescript,selection_command +758,2100483,"src/extension.ts",24720,0,"",typescript,selection_command +759,2100695,"src/extension.ts",24698,0,"",typescript,selection_command +760,2107922,"src/extension.ts",24726,0,"",typescript,selection_mouse +761,2107923,"src/extension.ts",24725,0,"",typescript,selection_command +762,2108527,"src/extension.ts",24720,0,"",typescript,selection_command +763,2108916,"src/extension.ts",24727,0,"",typescript,selection_command +764,2109160,"src/extension.ts",24756,0,"",typescript,selection_command +765,2109321,"src/extension.ts",24727,0,"",typescript,selection_command +766,2109473,"src/extension.ts",24720,0,"",typescript,selection_command +767,2948769,"src/extension.ts",24796,0,"",typescript,selection_command +768,2949997,"src/extension.ts",24793,0,"",typescript,selection_command +769,2950776,"src/extension.ts",24796,0,"",typescript,selection_command +770,2952066,"src/extension.ts",24793,0,"",typescript,selection_command +771,2953152,"src/extension.ts",24796,0,"",typescript,selection_command +772,2954718,"src/extension.ts",24829,0,"",typescript,selection_command +773,2955003,"src/extension.ts",24796,0,"",typescript,selection_command +774,2955209,"src/extension.ts",24793,0,"",typescript,selection_command +775,2957576,"src/extension.ts",24796,0,"",typescript,selection_command +776,2957721,"src/extension.ts",24793,0,"",typescript,selection_command +777,2967307,"src/extension.ts",24796,0,"",typescript,selection_command +778,2970929,"src/extension.ts",24793,0,"",typescript,selection_command +779,2971159,"src/extension.ts",24796,0,"",typescript,selection_command +780,2971803,"src/extension.ts",24829,0,"",typescript,selection_command +781,2971953,"src/extension.ts",24859,0,"",typescript,selection_command +782,2972217,"src/extension.ts",24881,0,"",typescript,selection_command +783,2972234,"src/extension.ts",24884,0,"",typescript,selection_command +784,2972265,"src/extension.ts",24974,0,"",typescript,selection_command +785,2972373,"src/extension.ts",24884,0,"",typescript,selection_command +786,2972568,"src/extension.ts",24881,0,"",typescript,selection_command +787,2972746,"src/extension.ts",24859,0,"",typescript,selection_command +788,2973361,"src/extension.ts",24861,0,"",typescript,selection_command +789,2975089,"src/extension.ts",24831,0,"",typescript,selection_command +790,2975298,"src/extension.ts",24830,0,"",typescript,selection_command +791,2976068,"src/extension.ts",24828,0,"\n\t",typescript,content +792,2976398,"src/extension.ts",24830,0,"a",typescript,content +793,2976399,"src/extension.ts",24831,0,"",typescript,selection_keyboard +794,2976495,"src/extension.ts",24831,0,"s",typescript,content +795,2976496,"src/extension.ts",24832,0,"",typescript,selection_keyboard +796,2976663,"src/extension.ts",24832,0,"s",typescript,content +797,2976664,"src/extension.ts",24833,0,"",typescript,selection_keyboard +798,2976818,"src/extension.ts",24833,0,"e",typescript,content +799,2976818,"src/extension.ts",24834,0,"",typescript,selection_keyboard +800,2976898,"src/extension.ts",24834,0,"r",typescript,content +801,2976898,"src/extension.ts",24835,0,"",typescript,selection_keyboard +802,2977051,"src/extension.ts",24835,0,"t",typescript,content +803,2977051,"src/extension.ts",24836,0,"",typescript,selection_keyboard +804,2977261,"src/extension.ts",24836,0," ",typescript,content +805,2977261,"src/extension.ts",24837,0,"",typescript,selection_keyboard +806,2996597,"src/extension.ts",24830,7,"",typescript,content +807,2996936,"src/extension.ts",24830,0,"a",typescript,content +808,2996937,"src/extension.ts",24831,0,"",typescript,selection_keyboard +809,2996989,"src/extension.ts",24831,0,"s",typescript,content +810,2996989,"src/extension.ts",24832,0,"",typescript,selection_keyboard +811,2997120,"src/extension.ts",24832,0,"s",typescript,content +812,2997121,"src/extension.ts",24833,0,"",typescript,selection_keyboard +813,2997251,"src/extension.ts",24833,0,"e",typescript,content +814,2997252,"src/extension.ts",24834,0,"",typescript,selection_keyboard +815,2997293,"src/extension.ts",24834,0,"r",typescript,content +816,2997294,"src/extension.ts",24835,0,"",typescript,selection_keyboard +817,2997463,"src/extension.ts",24835,0,"t",typescript,content +818,2997463,"src/extension.ts",24836,0,"",typescript,selection_keyboard +819,2997669,"src/extension.ts",24836,0," ",typescript,content +820,2997670,"src/extension.ts",24837,0,"",typescript,selection_keyboard +821,3005124,"src/extension.ts",24837,0,"()",typescript,content +822,3005124,"src/extension.ts",24838,0,"",typescript,selection_keyboard +823,3005304,"src/extension.ts",24838,1,")",typescript,content +824,3005304,"src/extension.ts",24839,0,"",typescript,selection_keyboard +825,3005795,"src/extension.ts",24838,0,"",typescript,selection_command +826,3006130,"src/extension.ts",24838,0,"A",typescript,content +827,3006131,"src/extension.ts",24839,0,"",typescript,selection_keyboard +828,3006269,"src/extension.ts",24839,0,"r",typescript,content +829,3006270,"src/extension.ts",24840,0,"",typescript,selection_keyboard +830,3006426,"src/extension.ts",24840,0,"r",typescript,content +831,3006427,"src/extension.ts",24841,0,"",typescript,selection_keyboard +832,3006629,"src/extension.ts",24841,0,"a",typescript,content +833,3006629,"src/extension.ts",24842,0,"",typescript,selection_keyboard +834,3006690,"src/extension.ts",24842,0,"y",typescript,content +835,3006690,"src/extension.ts",24843,0,"",typescript,selection_keyboard +836,3007523,"src/extension.ts",24838,5,"Array",typescript,content +837,3007524,"src/extension.ts",24843,0,".",typescript,content +838,3007524,"src/extension.ts",24844,0,"",typescript,selection_keyboard +839,3011037,"src/extension.ts",97,0,"import assert from 'assert';\n",typescript,content +840,3013786,"src/extension.ts",24873,0,"i",typescript,content +841,3013786,"src/extension.ts",24874,0,"",typescript,selection_keyboard +842,3013933,"src/extension.ts",24874,0,"s",typescript,content +843,3013933,"src/extension.ts",24875,0,"",typescript,selection_keyboard +844,3014420,"src/extension.ts",24875,0,"A",typescript,content +845,3014420,"src/extension.ts",24876,0,"",typescript,selection_keyboard +846,3014520,"src/extension.ts",24876,0,"r",typescript,content +847,3014520,"src/extension.ts",24877,0,"",typescript,selection_keyboard +848,3014674,"src/extension.ts",24877,0,"r",typescript,content +849,3014674,"src/extension.ts",24878,0,"",typescript,selection_keyboard +850,3014843,"src/extension.ts",24878,0,"a",typescript,content +851,3014843,"src/extension.ts",24879,0,"",typescript,selection_keyboard +852,3014877,"src/extension.ts",24879,0,"y",typescript,content +853,3014878,"src/extension.ts",24880,0,"",typescript,selection_keyboard +854,3015323,"src/extension.ts",24873,7,"isArray",typescript,content +855,3015323,"src/extension.ts",24880,0,"()",typescript,content +856,3015324,"src/extension.ts",24881,0,"",typescript,selection_keyboard +857,3015559,"src/extension.ts",24881,0,"p",typescript,content +858,3015559,"src/extension.ts",24882,0,"",typescript,selection_keyboard +859,3015811,"src/extension.ts",24882,0,"a",typescript,content +860,3015811,"src/extension.ts",24883,0,"",typescript,selection_keyboard +861,3015870,"src/extension.ts",24883,0,"r",typescript,content +862,3015870,"src/extension.ts",24884,0,"",typescript,selection_keyboard +863,3016079,"src/extension.ts",24884,0,"s",typescript,content +864,3016079,"src/extension.ts",24885,0,"",typescript,selection_keyboard +865,3016124,"src/extension.ts",24885,0,"e",typescript,content +866,3016124,"src/extension.ts",24886,0,"",typescript,selection_keyboard +867,3016389,"src/extension.ts",24886,0,"d",typescript,content +868,3016389,"src/extension.ts",24887,0,"",typescript,selection_keyboard +869,3016608,"src/extension.ts",24887,1,")",typescript,content +870,3016609,"src/extension.ts",24888,0,"",typescript,selection_keyboard +871,3016846,"src/extension.ts",24887,0,"",typescript,selection_command +872,3017801,"src/extension.ts",24888,0,"",typescript,selection_command +873,3018281,"src/extension.ts",24888,1,"",typescript,content +874,3018318,"src/extension.ts",24887,0,"",typescript,selection_command +875,3018732,"src/extension.ts",24917,0,"",typescript,selection_command +876,3019216,"src/extension.ts",24887,0,"",typescript,selection_command +877,3019523,"src/extension.ts",24917,0,"",typescript,selection_command +878,3019701,"src/extension.ts",24939,0,"",typescript,selection_command +879,3020588,"src/extension.ts",24917,0,"",typescript,selection_command +880,3020744,"src/extension.ts",24887,0,"",typescript,selection_command +881,3047906,"src/extension.ts",24888,0,"",typescript,selection_command +882,3048229,"src/extension.ts",24888,0,";",typescript,content +883,3048229,"src/extension.ts",24889,0,"",typescript,selection_keyboard +884,3052421,"src/extension.ts",24888,0,", 'Parsed is not an array')",typescript,content +885,3052423,"src/extension.ts",24915,0,"",typescript,selection_command +886,3052835,"src/extension.ts",24914,0,"",typescript,selection_command +887,3054204,"src/extension.ts",24913,0,"",typescript,selection_command +888,3054379,"src/extension.ts",24908,0,"",typescript,selection_command +889,3054526,"src/extension.ts",24905,0,"",typescript,selection_command +890,3054691,"src/extension.ts",24901,0,"",typescript,selection_command +891,3054877,"src/extension.ts",24898,0,"",typescript,selection_command +892,3055185,"src/extension.ts",24891,0,"",typescript,selection_command +893,3055543,"src/extension.ts",24891,1,"P",typescript,selection_command +894,3055583,"src/extension.ts",24891,6,"Parsed",typescript,selection_command +895,3055793,"src/extension.ts",24891,9,"Parsed is",typescript,selection_command +896,3055979,"src/extension.ts",24891,13,"Parsed is not",typescript,selection_command +897,3056142,"src/extension.ts",24891,16,"Parsed is not an",typescript,selection_command +898,3056588,"src/extension.ts",24891,22,"Parsed is not an array",typescript,selection_command +899,3057058,"src/extension.ts",24858,58,"",typescript,content +900,3057834,"src/extension.ts",24858,0,"z",typescript,content +901,3057835,"src/extension.ts",24859,0,"",typescript,selection_keyboard +902,3058636,"src/extension.ts",24858,0,"",typescript,selection_command +903,3059471,"src/extension.ts",24858,1,"\tassert (Array.isArray(parsed), 'Parsed is not an array');",typescript,content +904,3059502,"src/extension.ts",24891,0,"",typescript,selection_command +905,3060208,"src/extension.ts",24891,1,"P",typescript,selection_command +906,3060379,"src/extension.ts",24891,6,"Parsed",typescript,selection_command +907,3060528,"src/extension.ts",24891,9,"Parsed is",typescript,selection_command +908,3060771,"src/extension.ts",24891,13,"Parsed is not",typescript,selection_command +909,3060978,"src/extension.ts",24891,16,"Parsed is not an",typescript,selection_command +910,3061325,"src/extension.ts",24891,22,"Parsed is not an array",typescript,selection_command +911,3061608,"src/extension.ts",24891,22,"",typescript,content +912,3062200,"src/extension.ts",24891,0,"M",typescript,content +913,3062201,"src/extension.ts",24892,0,"",typescript,selection_keyboard +914,3062388,"src/extension.ts",24892,0,"o",typescript,content +915,3062388,"src/extension.ts",24893,0,"",typescript,selection_keyboard +916,3062531,"src/extension.ts",24893,0,"d",typescript,content +917,3062531,"src/extension.ts",24894,0,"",typescript,selection_keyboard +918,3062669,"src/extension.ts",24894,0,"e",typescript,content +919,3062670,"src/extension.ts",24895,0,"",typescript,selection_keyboard +920,3062840,"src/extension.ts",24895,0,"l",typescript,content +921,3062840,"src/extension.ts",24896,0,"",typescript,selection_keyboard +922,3062968,"src/extension.ts",24896,0," ",typescript,content +923,3062968,"src/extension.ts",24897,0,"",typescript,selection_keyboard +924,3063008,"src/extension.ts",24897,0,"m",typescript,content +925,3063009,"src/extension.ts",24898,0,"",typescript,selection_keyboard +926,3063153,"src/extension.ts",24898,0,"u",typescript,content +927,3063154,"src/extension.ts",24899,0,"",typescript,selection_keyboard +928,3063313,"src/extension.ts",24899,0,"s",typescript,content +929,3063313,"src/extension.ts",24900,0,"",typescript,selection_keyboard +930,3063352,"src/extension.ts",24900,0,"t",typescript,content +931,3063352,"src/extension.ts",24901,0,"",typescript,selection_keyboard +932,3063458,"src/extension.ts",24901,0," ",typescript,content +933,3063459,"src/extension.ts",24902,0,"",typescript,selection_keyboard +934,3063500,"src/extension.ts",24902,0,"o",typescript,content +935,3063501,"src/extension.ts",24903,0,"",typescript,selection_keyboard +936,3063547,"src/extension.ts",24903,0,"u",typescript,content +937,3063547,"src/extension.ts",24904,0,"",typescript,selection_keyboard +938,3063638,"src/extension.ts",24904,0,"t",typescript,content +939,3063638,"src/extension.ts",24905,0,"",typescript,selection_keyboard +940,3063788,"src/extension.ts",24905,0,"p",typescript,content +941,3063788,"src/extension.ts",24906,0,"",typescript,selection_keyboard +942,3063826,"src/extension.ts",24906,0,"u",typescript,content +943,3063827,"src/extension.ts",24907,0,"",typescript,selection_keyboard +944,3063895,"src/extension.ts",24907,0,"t",typescript,content +945,3063895,"src/extension.ts",24908,0,"",typescript,selection_keyboard +946,3064105,"src/extension.ts",24908,0," ",typescript,content +947,3064105,"src/extension.ts",24909,0,"",typescript,selection_keyboard +948,3064306,"src/extension.ts",24909,0,"a",typescript,content +949,3064306,"src/extension.ts",24910,0,"",typescript,selection_keyboard +950,3064358,"src/extension.ts",24910,0,"n",typescript,content +951,3064359,"src/extension.ts",24911,0,"",typescript,selection_keyboard +952,3064565,"src/extension.ts",24911,0," ",typescript,content +953,3064565,"src/extension.ts",24912,0,"",typescript,selection_keyboard +954,3064728,"src/extension.ts",24912,0,"a",typescript,content +955,3064728,"src/extension.ts",24913,0,"",typescript,selection_keyboard +956,3064757,"src/extension.ts",24913,0,"r",typescript,content +957,3064758,"src/extension.ts",24914,0,"",typescript,selection_keyboard +958,3064838,"src/extension.ts",24914,0,"r",typescript,content +959,3064838,"src/extension.ts",24915,0,"",typescript,selection_keyboard +960,3065028,"src/extension.ts",24915,0,"a",typescript,content +961,3065028,"src/extension.ts",24916,0,"",typescript,selection_keyboard +962,3065053,"src/extension.ts",24916,0,"y",typescript,content +963,3065053,"src/extension.ts",24917,0,"",typescript,selection_keyboard +964,3065193,"src/extension.ts",24916,0,"",typescript,selection_command +965,3066422,"src/extension.ts",24858,0,"",typescript,selection_command +966,3066501,"TERMINAL",0,0,"[11:08:51 AM] File change detected. Starting incremental compilation...\r\n\r\n",,terminal_output +967,3066618,"TERMINAL",0,0,"[11:08:51 AM] Found 0 errors. Watching for file changes.\r\n\r\n",,terminal_output +968,3066763,"src/extension.ts",24921,0,"",typescript,selection_command +969,3069033,"src/extension.ts",24922,0,"",typescript,selection_command +970,3069763,"src/extension.ts",24921,29,"\tif (Array.isArray(parsed)) {",typescript,selection_command +971,3070091,"src/extension.ts",24921,51,"\tif (Array.isArray(parsed)) {\n\t\tparsed = parsed[0];",typescript,selection_command +972,3070253,"src/extension.ts",24921,54,"\tif (Array.isArray(parsed)) {\n\t\tparsed = parsed[0];\n\t}",typescript,selection_command +973,3071948,"src/extension.ts",24921,55,"",typescript,content +974,3071987,"src/extension.ts",24922,0,"",typescript,selection_command +975,3074594,"src/extension.ts",24926,0,"Array.isArray(parsed)) {\n\t\tparsed = parsed[0];\n\t}\n\tif (",typescript,content +976,3075879,"src/extension.ts",24921,29,"\tif (Array.isArray(parsed)) {",typescript,selection_command +977,3076118,"src/extension.ts",24921,51,"\tif (Array.isArray(parsed)) {\n\t\tparsed = parsed[0];",typescript,selection_command +978,3076278,"src/extension.ts",24921,54,"\tif (Array.isArray(parsed)) {\n\t\tparsed = parsed[0];\n\t}",typescript,selection_command +979,3076630,"src/extension.ts",24921,55,"",typescript,content +980,3076678,"src/extension.ts",24922,0,"",typescript,selection_command +981,3126699,"src/extension.ts",24859,0,"",typescript,selection_command +982,3126788,"src/extension.ts",24826,0,"",typescript,selection_command +983,3127294,"src/extension.ts",24825,33,"",typescript,content +984,3127325,"src/extension.ts",24826,0,"",typescript,selection_command +985,3129551,"src/extension.ts",24887,0,"\n\t",typescript,content +986,3129981,"src/extension.ts",24889,0,"a",typescript,content +987,3129981,"src/extension.ts",24890,0,"",typescript,selection_keyboard +988,3130019,"src/extension.ts",24890,0,"s",typescript,content +989,3130019,"src/extension.ts",24891,0,"",typescript,selection_keyboard +990,3130171,"src/extension.ts",24891,0,"s",typescript,content +991,3130171,"src/extension.ts",24892,0,"",typescript,selection_keyboard +992,3130253,"src/extension.ts",24892,0,"e",typescript,content +993,3130253,"src/extension.ts",24893,0,"",typescript,selection_keyboard +994,3130353,"src/extension.ts",24893,0,"r",typescript,content +995,3130353,"src/extension.ts",24894,0,"",typescript,selection_keyboard +996,3130501,"src/extension.ts",24894,0,"t",typescript,content +997,3130501,"src/extension.ts",24895,0,"",typescript,selection_keyboard +998,3130690,"src/extension.ts",24895,0," ",typescript,content +999,3130691,"src/extension.ts",24896,0,"",typescript,selection_keyboard +1000,3131363,"src/extension.ts",24896,0,"()",typescript,content +1001,3131363,"src/extension.ts",24897,0,"",typescript,selection_keyboard +1002,3131608,"src/extension.ts",24897,0,"p",typescript,content +1003,3131608,"src/extension.ts",24898,0,"",typescript,selection_keyboard +1004,3131775,"src/extension.ts",24898,0,"a",typescript,content +1005,3131775,"src/extension.ts",24899,0,"",typescript,selection_keyboard +1006,3131919,"src/extension.ts",24899,0,"r",typescript,content +1007,3131920,"src/extension.ts",24900,0,"",typescript,selection_keyboard +1008,3132155,"src/extension.ts",24900,0,"s",typescript,content +1009,3132156,"src/extension.ts",24901,0,"",typescript,selection_keyboard +1010,3132221,"src/extension.ts",24901,0,"e",typescript,content +1011,3132221,"src/extension.ts",24902,0,"",typescript,selection_keyboard +1012,3132801,"src/extension.ts",24897,5,"parse",typescript,content +1013,3132863,"src/extension.ts",126,0,"import { parse } from 'path';\n",typescript,content +1014,3133491,"src/extension.ts",24932,0,"\n\t\t\n\t",typescript,content +1015,3134420,"src/extension.ts",24934,1,"",typescript,content +1016,3134728,"src/extension.ts",24933,1,"",typescript,content +1017,3135181,"src/extension.ts",24932,1,"",typescript,content +1018,3135898,"src/extension.ts",24932,0,"d",typescript,content +1019,3135899,"src/extension.ts",24933,0,"",typescript,selection_keyboard +1020,3137047,"src/extension.ts",24932,0,"",typescript,selection_command +1021,3137431,"src/extension.ts",24935,0,"",typescript,selection_command +1022,3138084,"src/extension.ts",24934,1,"",typescript,content +1023,3138243,"src/extension.ts",24933,1,"",typescript,content +1024,3140093,"src/extension.ts",24932,0,"",typescript,selection_command +1025,3140796,"src/extension.ts",24934,0,"",typescript,selection_command +1026,3143014,"src/extension.ts",24934,0,",",typescript,content +1027,3143014,"src/extension.ts",24935,0,"",typescript,selection_keyboard +1028,3143167,"src/extension.ts",24935,0," ",typescript,content +1029,3143167,"src/extension.ts",24936,0,"",typescript,selection_keyboard +1030,3145684,"src/extension.ts",24936,0,"''",typescript,content +1031,3145684,"src/extension.ts",24937,0,"",typescript,selection_keyboard +1032,3153367,"src/extension.ts",24936,0,"",typescript,selection_command +1033,3153988,"src/extension.ts",24873,0,"",typescript,selection_command +1034,3154125,"src/extension.ts",24877,0,"",typescript,selection_command +1035,3154292,"src/extension.ts",24878,0,"",typescript,selection_command +1036,3154567,"src/extension.ts",24884,0,"",typescript,selection_command +1037,3154596,"src/extension.ts",24887,0,"",typescript,selection_command +1038,3154623,"src/extension.ts",24888,0,"",typescript,selection_command +1039,3154654,"src/extension.ts",24894,0,"",typescript,selection_command +1040,3154820,"src/extension.ts",24899,0,"",typescript,selection_command +1041,3155032,"src/extension.ts",24906,0,"",typescript,selection_command +1042,3155198,"src/extension.ts",24909,0,"",typescript,selection_command +1043,3155848,"src/extension.ts",24906,0,"",typescript,selection_command +1044,3156352,"src/extension.ts",24899,0,"",typescript,selection_command +1045,3156828,"src/extension.ts",24899,0,"n",typescript,content +1046,3156828,"src/extension.ts",24900,0,"",typescript,selection_keyboard +1047,3156899,"src/extension.ts",24900,0,"o",typescript,content +1048,3156900,"src/extension.ts",24901,0,"",typescript,selection_keyboard +1049,3156992,"src/extension.ts",24901,0,"t",typescript,content +1050,3156992,"src/extension.ts",24902,0,"",typescript,selection_keyboard +1051,3157127,"src/extension.ts",24902,0," ",typescript,content +1052,3157127,"src/extension.ts",24903,0,"",typescript,selection_keyboard +1053,3157269,"src/extension.ts",24902,0,"",typescript,selection_command +1054,3157955,"src/extension.ts",24941,0,"",typescript,selection_command +1055,3158523,"src/extension.ts",24902,0,"",typescript,selection_command +1056,3159172,"src/extension.ts",24941,0,"",typescript,selection_command +1057,3160106,"src/extension.ts",24941,0,"m",typescript,content +1058,3160107,"src/extension.ts",24942,0,"",typescript,selection_keyboard +1059,3160188,"src/extension.ts",24942,0,"o",typescript,content +1060,3160189,"src/extension.ts",24943,0,"",typescript,selection_keyboard +1061,3160316,"src/extension.ts",24943,0,"d",typescript,content +1062,3160316,"src/extension.ts",24944,0,"",typescript,selection_keyboard +1063,3160573,"src/extension.ts",24944,0,"l",typescript,content +1064,3160573,"src/extension.ts",24945,0,"",typescript,selection_keyboard +1065,3160813,"src/extension.ts",24944,1,"",typescript,content +1066,3160996,"src/extension.ts",24943,1,"",typescript,content +1067,3161135,"src/extension.ts",24942,1,"",typescript,content +1068,3161516,"src/extension.ts",24942,0,"M",typescript,content +1069,3161517,"src/extension.ts",24943,0,"",typescript,selection_keyboard +1070,3161891,"src/extension.ts",24942,1,"",typescript,content +1071,3162036,"src/extension.ts",24941,1,"",typescript,content +1072,3162204,"src/extension.ts",24941,0,"M",typescript,content +1073,3162204,"src/extension.ts",24942,0,"",typescript,selection_keyboard +1074,3162338,"src/extension.ts",24942,0,"o",typescript,content +1075,3162339,"src/extension.ts",24943,0,"",typescript,selection_keyboard +1076,3162464,"src/extension.ts",24943,0,"d",typescript,content +1077,3162464,"src/extension.ts",24944,0,"",typescript,selection_keyboard +1078,3162530,"src/extension.ts",24944,0,"e",typescript,content +1079,3162530,"src/extension.ts",24945,0,"",typescript,selection_keyboard +1080,3162659,"src/extension.ts",24945,0,"l",typescript,content +1081,3162660,"src/extension.ts",24946,0,"",typescript,selection_keyboard +1082,3162800,"src/extension.ts",24946,0," ",typescript,content +1083,3162801,"src/extension.ts",24947,0,"",typescript,selection_keyboard +1084,3168290,"src/extension.ts",24947,0,"m",typescript,content +1085,3168291,"src/extension.ts",24948,0,"",typescript,selection_keyboard +1086,3168448,"src/extension.ts",24948,0,"u",typescript,content +1087,3168448,"src/extension.ts",24949,0,"",typescript,selection_keyboard +1088,3168683,"src/extension.ts",24949,0,"s",typescript,content +1089,3168684,"src/extension.ts",24950,0,"",typescript,selection_keyboard +1090,3168711,"src/extension.ts",24950,0,"t",typescript,content +1091,3168711,"src/extension.ts",24951,0,"",typescript,selection_keyboard +1092,3168843,"src/extension.ts",24951,0," ",typescript,content +1093,3168843,"src/extension.ts",24952,0,"",typescript,selection_keyboard +1094,3168865,"src/extension.ts",24952,0,"o",typescript,content +1095,3168866,"src/extension.ts",24953,0,"",typescript,selection_keyboard +1096,3168915,"src/extension.ts",24953,0,"u",typescript,content +1097,3168915,"src/extension.ts",24954,0,"",typescript,selection_keyboard +1098,3169050,"src/extension.ts",24954,0,"t",typescript,content +1099,3169050,"src/extension.ts",24955,0,"",typescript,selection_keyboard +1100,3169162,"src/extension.ts",24955,0,"p",typescript,content +1101,3169162,"src/extension.ts",24956,0,"",typescript,selection_keyboard +1102,3169224,"src/extension.ts",24956,0,"u",typescript,content +1103,3169224,"src/extension.ts",24957,0,"",typescript,selection_keyboard +1104,3169305,"src/extension.ts",24957,0,"t",typescript,content +1105,3169305,"src/extension.ts",24958,0,"",typescript,selection_keyboard +1106,3169488,"src/extension.ts",24958,0," ",typescript,content +1107,3169488,"src/extension.ts",24959,0,"",typescript,selection_keyboard +1108,3170948,"src/extension.ts",24959,0,"a",typescript,content +1109,3170949,"src/extension.ts",24960,0,"",typescript,selection_keyboard +1110,3171039,"src/extension.ts",24960,0," ",typescript,content +1111,3171039,"src/extension.ts",24961,0,"",typescript,selection_keyboard +1112,3171313,"src/extension.ts",24961,0,"v",typescript,content +1113,3171313,"src/extension.ts",24962,0,"",typescript,selection_keyboard +1114,3171450,"src/extension.ts",24962,0,"a",typescript,content +1115,3171451,"src/extension.ts",24963,0,"",typescript,selection_keyboard +1116,3171623,"src/extension.ts",24963,0,"l",typescript,content +1117,3171623,"src/extension.ts",24964,0,"",typescript,selection_keyboard +1118,3171659,"src/extension.ts",24964,0,"i",typescript,content +1119,3171659,"src/extension.ts",24965,0,"",typescript,selection_keyboard +1120,3171738,"src/extension.ts",24965,0,"d",typescript,content +1121,3171738,"src/extension.ts",24966,0,"",typescript,selection_keyboard +1122,3171835,"src/extension.ts",24966,0," ",typescript,content +1123,3171835,"src/extension.ts",24967,0,"",typescript,selection_keyboard +1124,3171900,"src/extension.ts",24967,0,"o",typescript,content +1125,3171901,"src/extension.ts",24968,0,"",typescript,selection_keyboard +1126,3172093,"src/extension.ts",24968,0,"b",typescript,content +1127,3172093,"src/extension.ts",24969,0,"",typescript,selection_keyboard +1128,3172518,"src/extension.ts",24969,0,"j",typescript,content +1129,3172518,"src/extension.ts",24970,0,"",typescript,selection_keyboard +1130,3172519,"src/extension.ts",24970,0,"e",typescript,content +1131,3172519,"src/extension.ts",24971,0,"",typescript,selection_keyboard +1132,3172946,"src/extension.ts",24971,0,"c",typescript,content +1133,3172946,"src/extension.ts",24972,0,"",typescript,selection_keyboard +1134,3173167,"src/extension.ts",24972,0,"t",typescript,content +1135,3173168,"src/extension.ts",24973,0,"",typescript,selection_keyboard +1136,3173404,"src/extension.ts",24973,0,".",typescript,content +1137,3173405,"src/extension.ts",24974,0,"",typescript,selection_keyboard +1138,3173607,"src/extension.ts",24973,0,"",typescript,selection_command +1139,3174205,"src/extension.ts",24967,0,"",typescript,selection_command +1140,3174477,"src/extension.ts",24961,0,"",typescript,selection_command +1141,3174485,"src/extension.ts",24959,0,"",typescript,selection_command +1142,3174522,"src/extension.ts",24952,0,"",typescript,selection_command +1143,3174546,"src/extension.ts",24947,0,"",typescript,selection_command +1144,3174584,"src/extension.ts",24941,0,"",typescript,selection_command +1145,3174608,"src/extension.ts",24940,0,"",typescript,selection_command +1146,3174816,"src/extension.ts",24937,0,"",typescript,selection_command +1147,3175035,"src/extension.ts",24931,0,"",typescript,selection_command +1148,3175432,"src/extension.ts",24878,0,"",typescript,selection_command +1149,3176168,"src/extension.ts",24788,0,"",typescript,selection_command +1150,3178427,"src/extension.ts",24762,0,"",typescript,selection_command +1151,3181563,"src/extension.ts",24788,0,"",typescript,selection_command +1152,3181835,"src/extension.ts",24878,0,"",typescript,selection_command +1153,3186149,"src/extension.ts",24945,0,"",typescript,selection_command +1154,3186697,"src/extension.ts",24922,54,"",typescript,content +1155,3186735,"src/extension.ts",24923,0,"",typescript,selection_command +1156,3187289,"src/extension.ts",24926,0,"",typescript,selection_command +1157,3187444,"src/extension.ts",24928,0,"",typescript,selection_command +1158,3187759,"src/extension.ts",24927,0,"",typescript,selection_command +1159,3187872,"src/extension.ts",24927,1,"!",typescript,selection_command +1160,3188004,"src/extension.ts",24927,7,"!parsed",typescript,selection_command +1161,3188193,"src/extension.ts",24927,10,"!parsed ||",typescript,selection_command +1162,3188688,"src/extension.ts",24927,11,"!parsed || ",typescript,selection_command +1163,3188902,"src/extension.ts",24927,11,"",typescript,content +1164,3194453,"src/extension.ts",24922,78,"\tif (typeof parsed !== 'object' || typeof (parsed as any).kind !== 'string') {",typescript,selection_command +1165,3194734,"src/extension.ts",24922,98,"\tif (typeof parsed !== 'object' || typeof (parsed as any).kind !== 'string') {\n\t\treturn undefined;",typescript,selection_command +1166,3194896,"src/extension.ts",24922,101,"\tif (typeof parsed !== 'object' || typeof (parsed as any).kind !== 'string') {\n\t\treturn undefined;\n\t}",typescript,selection_command +1167,3195336,"src/extension.ts",24922,102,"",typescript,content +1168,3195372,"src/extension.ts",24923,0,"",typescript,selection_command +1169,3198282,"src/extension.ts",24923,0,"",typescript,selection_command +1170,3198571,"TERMINAL",0,0,"[11:11:03 AM] File change detected. Starting incremental compilation...\r\n\r\n",,terminal_output +1171,3198664,"TERMINAL",0,0,"[11:11:03 AM] Found 0 errors. Watching for file changes.\r\n\r\n",,terminal_output +1172,3199283,"src/extension.ts",24856,0,"",typescript,selection_command +1173,3199460,"src/extension.ts",24853,0,"",typescript,selection_command +1174,3202956,"src/extension.ts",24856,0,"",typescript,selection_command +1175,3204451,"src/extension.ts",24853,0,"",typescript,selection_command +1176,3205198,"src/extension.ts",24856,0,"",typescript,selection_command +1177,3206656,"src/extension.ts",24923,0,"",typescript,selection_command +1178,3209758,"src/extension.ts",24856,0,"",typescript,selection_command +1179,3210247,"src/extension.ts",24923,0,"",typescript,selection_command +1180,3212585,"src/extension.ts",24952,0,"",typescript,selection_command +1181,3212596,"src/extension.ts",24974,0,"",typescript,selection_command +1182,3212645,"src/extension.ts",25001,0,"",typescript,selection_command +1183,3212676,"src/extension.ts",25041,0,"",typescript,selection_command +1184,3213340,"src/extension.ts",25001,0,"",typescript,selection_command +1185,3213724,"src/extension.ts",24974,0,"",typescript,selection_command +1186,3213946,"src/extension.ts",24952,0,"",typescript,selection_command +1187,3214133,"src/extension.ts",24923,0,"",typescript,selection_command +1188,3214418,"src/extension.ts",24856,0,"",typescript,selection_command +1189,3214653,"src/extension.ts",24923,0,"",typescript,selection_command +1190,3215320,"src/extension.ts",24922,29,"",typescript,content +1191,3215360,"src/extension.ts",24923,0,"",typescript,selection_command +1192,3215973,"src/extension.ts",24930,0,"",typescript,selection_command +1193,3218711,"src/extension.ts",24931,0,"",typescript,selection_command +1194,3219081,"src/extension.ts",24931,4,"",typescript,content +1195,3219354,"src/extension.ts",24931,0,"p",typescript,content +1196,3219354,"src/extension.ts",24932,0,"",typescript,selection_keyboard +1197,3219500,"src/extension.ts",24932,0,"a",typescript,content +1198,3219501,"src/extension.ts",24933,0,"",typescript,selection_keyboard +1199,3219544,"src/extension.ts",24933,0,"r",typescript,content +1200,3219544,"src/extension.ts",24934,0,"",typescript,selection_keyboard +1201,3219771,"src/extension.ts",24934,0,"s",typescript,content +1202,3219771,"src/extension.ts",24935,0,"",typescript,selection_keyboard +1203,3219822,"src/extension.ts",24935,0,"e",typescript,content +1204,3219823,"src/extension.ts",24936,0,"",typescript,selection_keyboard +1205,3219968,"src/extension.ts",24936,0,"d",typescript,content +1206,3219969,"src/extension.ts",24937,0,"",typescript,selection_keyboard +1207,3220118,"src/extension.ts",24936,0,"",typescript,selection_command +1208,3221374,"src/extension.ts",24937,0,"",typescript,selection_command +1209,3222441,"src/extension.ts",24931,0,"",typescript,selection_command +1210,3223016,"src/extension.ts",24878,0,"",typescript,selection_command +1211,3223343,"src/extension.ts",24788,0,"",typescript,selection_command +1212,3230207,"src/extension.ts",24781,0,"",typescript,selection_command +1213,3230277,"src/extension.ts",24759,0,"",typescript,selection_command +1214,3230614,"src/extension.ts",24762,0,"",typescript,selection_command +1215,3230800,"src/extension.ts",24768,0,"",typescript,selection_command +1216,3230994,"src/extension.ts",24770,0,"",typescript,selection_command +1217,3232686,"src/extension.ts",24770,7,"",typescript,content +1218,3234313,"src/extension.ts",24770,0,"a",typescript,content +1219,3234314,"src/extension.ts",24771,0,"",typescript,selection_keyboard +1220,3234448,"src/extension.ts",24771,0,"n",typescript,content +1221,3234448,"src/extension.ts",24772,0,"",typescript,selection_keyboard +1222,3235674,"src/extension.ts",24772,0,"y",typescript,content +1223,3235675,"src/extension.ts",24773,0,"",typescript,selection_keyboard +1224,3235677,"src/extension.ts",24772,0,"",typescript,selection_command +1225,3237339,"src/extension.ts",24780,0,"",typescript,selection_command +1226,3237910,"src/extension.ts",24772,0,"",typescript,selection_command +1227,3238054,"src/extension.ts",24851,0,"",typescript,selection_command +1228,3238595,"src/extension.ts",24848,0,"",typescript,selection_command +1229,3239834,"src/extension.ts",24828,0,"",typescript,selection_command +1230,3239966,"src/extension.ts",24811,0,"",typescript,selection_command +1231,3240124,"src/extension.ts",24782,0,"",typescript,selection_command +1232,3240321,"src/extension.ts",24775,0,"",typescript,selection_command +1233,3240468,"src/extension.ts",24757,0,"",typescript,selection_command +1234,3242684,"TERMINAL",0,0,"[11:11:47 AM] File change detected. Starting incremental compilation...\r\n\r\n",,terminal_output +1235,3242782,"TERMINAL",0,0,"src/extension.ts:689:17 - error TS2339: Property 'kind' does not exist on type 'any[]'.\r\n\r\n689 switch (parsed.kind) {\r\n   ~~~~\r\n\r\nsrc/extension.ts:693:37 - error TS2304: Cannot find name 'item'.\r\n\r\n693 const selections = Array.isArray(item.selections) ? item.selections : [];\r\n   ~~~~\r\n\r\nsrc/extension.ts:693:56 - error TS2304: Cannot find name 'item'.\r\n\r\n693 const selections = Array.isArray(item.selections) ? item.selections : [];\r\n   ~~~~\r\n\r\nsrc/extension.ts:701:30 - error TS2304: Cannot find name 'item'.\r\n\r\n701 const pos = Array.isArray(item.position) && item.position.length === 2 ? [Number(item.position[0]) || 0, Number(item.position[1]) || 0] as [number, number] : [0, 0] as [number, number];\r\n   ~~~~\r\n\r\nsrc/extension.ts:701:48 - error TS2304: Cannot find name 'item'.\r\n\r\n701 const pos = Array.isArray(item.position) && item.position.length === 2 ? [Number(item.position[0]) || 0, Number(item.position[1]) || 0] as [number, number] : [0, 0] as [number, number];\r\n   ~~~~\r\n\r\nsrc/extension.ts:701:85 - error TS2304: Cannot find name 'item'.\r\n\r\n701 const pos = Array.isArray(item.position) && item.position.length === 2 ? [Number(item.position[0]) || 0, Number(item.position[1]) || 0] as [number, number] : [0, 0] as [number, number];\r\n   ~~~~\r\n\r\nsrc/extension.ts:701:116 - error TS2304: Cannot find name 'item'.\r\n\r\n701 const pos = Array.isArray(item.position) && item.position.length === 2 ? [Number(item.position[0]) || 0, Number(item.position[1]) || 0] as [number, number] : [0, 0] as [number, number];\r\n   ~~~~\r\n\r\nsrc/extension.ts:702:27 - error TS2304: Cannot find name 'item'.\r\n\r\n702 const textVal = typeof item.text === 'string' ? item.text : '';\r\n   ~~~~\r\n\r\nsrc/extension.ts:702:52 - error TS2304: Cannot find name 'item'.\r\n\r\n702 const textVal = typeof item.text === 'string' ? item.text : '';\r\n   ~~~~\r\n\r\nsrc/extension.ts:706:32 - error TS2304: Cannot find name 'item'.\r\n\r\n706 const start = Array.isArray(item.range?.start) && item.range.start.length === 2 ? [Number(item.range.start[0]) || 0, Number(item.range.start[1]) || 0] as [number, number] : [0, 0] as [number, number];\r\n   ~~~~\r\n\r\nsrc/extension.ts:706:54 - error TS2304: Cannot find name 'item'.\r\n\r\n706 const start = Array.isArray(item.range?.start) && item.range.start.length === 2 ? [Number(item.range.start[0]) || 0, Number(item.range.start[1]) || 0] as [number, number] : [0, 0] as [number, number];\r\n   ~~~~\r\n\r\nsrc/extension.ts:706:94 - error TS2304: Cannot find name 'item'.\r\n\r\n706 const start = Array.isArray(item.range?.start) && item.range.start.length === 2 ? [Number(item.range.start[0]) || 0, Number(item.range.start[1]) || 0] as [number, number] : [0, 0] as [number, number];\r\n   ~~~~\r\n\r\nsrc/extension.ts:706:128 - error TS2304: Cannot find name 'item'.\r\n\r\n706 const start = Array.isArray(item.range?.start) && item.range.start.length === 2 ? [Number(item.range.start[0]) || 0, Number(item.range.start[1]) || 0] as [number, number] : [0, 0] as [number, number];\r\n   ~~~~\r\n\r\nsrc/extension.ts:707:30 - error TS2304: Cannot find name 'item'.\r\n\r\n707 const end = Array.isArray(item.range?.end) && item.range.end.length === 2 ? [Number(item.range.end[0]) || 0, Number(item.range.end[1]) || 0] as [number, number] : [0, 0] as [number, number];\r\n   ~~~~\r\n\r\nsrc/extension.ts:707:50 - error TS2304: Cannot find name 'item'.\r\n\r\n707 const end = Array.isArray(item.range?.end) && item.range.end.length === 2 ? [Number(item.range.end[0]) || 0, Number(item.range.end[1]) || 0] as [number, number] : [0, 0] as [number, number];\r\n   ~~~~\r\n\r\nsrc/extension.ts:707:88 - error TS2304: Cannot find name 'item'.\r\n\r\n707 const end = Array.isArray(item.range?.end) && item.range.end.length === 2 ? [Number(item.range.end[0]) || 0, Number(item.range.end[1]) || 0] as [number, number] : [0, 0] as [number, number];\r\n   ~~~~\r\n\r\nsrc/extension.ts:707:120 - error TS2304: Cannot find name 'item'.\r\n\r\n707 const end = Array.isArray(item.range?.end) && item.range.end.length === 2 ? [Number(item.range.end[0]) || 0, Number(item.range.end[1]) || 0] as [number, number] : [0, 0] as [number, number];\r\n   ~~~~\r\n\r\nsrc/extension.ts:711:32 - error TS2304: Cannot find name 'item'.\r\n\r\n711 const start = Array.isArray(item.range?.start) && item.range.start.length === 2 ? [Number(item.range.start[0]) || 0, Number(item.range.start[1]) || 0] as [number, number] : [0, 0] as [number, number];\r\n   ~~~~\r\n\r\nsrc/extension.ts:711:54 - error TS2304: Cannot find name 'item'.\r\n\r\n711 const start = Array.isArray(item.range?.start) && item.range.start.length === 2 ? [Number(item.range.start[0]) || 0, Number(item.range.start[1]) || 0] as [number, number] : [0, 0] as [number, number];\r\n   ~~~~\r\n\r\nsrc/extension.ts:711:94 - error TS2304: Cannot find name 'item'.\r\n\r\n711 const start = Array.isArray(item.range?.start) && item.range.start.length === 2 ? [Number(item.range.start[0]) || 0, Number(item.range.start[1]) || 0] as [number, number] : [0, 0] as [number, number];\r\n   ~~~~\r\n\r\nsrc/extension.ts:711:128 - error TS2304: Cannot find name 'item'.\r\n\r\n711 const start = Array.isArray(item.range?.start) && item.range.start.length === 2 ? [Number(item.range.start[0]) || 0, Number(item.range.start[1]) || 0] as [number, number] : [0, 0] as [number, number];\r\n   ~~~~\r\n\r\nsrc/extension.ts:712:30 - error TS2304: Cannot find name 'item'.\r\n\r\n712 const end = Array.isArray(item.range?.end) && item.range.end.length === 2 ? [Number(item.range.end[0]) || 0, Number(item.range.end[1]) || 0] as [number, number] : [0, 0] as [number, number];\r\n   ~~~~\r\n\r\nsrc/extension.ts:712:50 - error TS2304: Cannot find name 'item'.\r\n\r\n712 const end = Array.isArray(item.range?.end) && item.range.end.length === 2 ? [Number(item.range.end[0]) || 0, Number(item.range.end[1]) || 0] as [number, number] : [0, 0] as [number, number];\r\n   ~~~~\r\n\r\nsrc/extension.ts:712:88 - error TS2304: Cannot find name 'item'.\r\n\r\n712 const end = Array.isArray(item.range?.end) && item.range.end.length === 2 ? [Number(item.range.end[0]) || 0, Number(item.range.end[1]) || 0] as [number, number] : [0, 0] as [number, number];\r\n   ~~~~\r\n\r\nsrc/extension.ts:712:120 - error TS2304: Cannot find name 'item'.\r\n\r\n712 const end = Array.isArray(item.range?.end) && item.range.end.length === 2 ? [Number(item.range.end[0]) || 0, Number(item.range.end[1]) || 0] as [number, number] : [0, 0] as [number, number];\r\n   ~~~~\r\n\r\nsrc/extension.ts:713:27 - error TS2304: Cannot find name 'item'.\r\n\r\n713 const textVal = typeof item.text === 'string' ? item.text : '';\r\n   ~~~~\r\n\r\nsrc/extension.ts:713:52 - error TS2304: Cannot find name 'item'.\r\n\r\n713 const textVal = typeof item.text === 'string' ? item.text : '';\r\n   ~~~~\r\n\r\nsrc/extension.ts:719:27 - error TS2304: Cannot find name 'item'.\r\n\r\n719 const textVal = typeof item.text === 'string' ? item.text : '';\r\n   ~~~~\r\n\r\nsrc/extension.ts:719:52 - error TS2304: Cannot find name 'item'.\r\n\r\n719 const textVal = typeof item.text === 'string' ? item.text : '';\r\n   ~~~~\r\n\r\n[11:11:47 AM] Found 29 errors. Watching for file changes.\r\n\r\n",,terminal_output +1236,3319715,"src/extension.ts",24775,0,"",typescript,selection_command +1237,3319743,"src/extension.ts",24782,0,"",typescript,selection_command +1238,3319871,"src/extension.ts",24811,0,"",typescript,selection_command +1239,3320036,"src/extension.ts",24828,0,"",typescript,selection_command +1240,3320167,"src/extension.ts",24848,0,"",typescript,selection_command +1241,3320441,"src/extension.ts",24851,0,"",typescript,selection_command +1242,3320927,"src/extension.ts",24918,0,"",typescript,selection_command +1243,3321080,"src/extension.ts",24942,0,"",typescript,selection_command +1244,3321223,"src/extension.ts",24969,0,"",typescript,selection_command +1245,3321452,"src/extension.ts",25009,0,"",typescript,selection_command +1246,3321497,"src/extension.ts",25035,0,"",typescript,selection_command +1247,3321611,"src/extension.ts",25112,0,"",typescript,selection_command +1248,3321612,"src/extension.ts",25158,0,"",typescript,selection_command +1249,3321617,"src/extension.ts",25112,0,"",typescript,selection_command +1250,3321897,"src/extension.ts",25035,0,"",typescript,selection_command +1251,3321959,"src/extension.ts",25112,0,"",typescript,selection_command +1252,3322269,"src/extension.ts",25158,0,"",typescript,selection_command +1253,3322270,"src/extension.ts",25320,0,"",typescript,selection_command +1254,3322275,"src/extension.ts",25471,0,"",typescript,selection_command +1255,3322295,"src/extension.ts",25479,0,"",typescript,selection_command +1256,3322355,"src/extension.ts",25534,0,"",typescript,selection_command +1257,3322401,"src/extension.ts",25538,0,"",typescript,selection_command +1258,3322453,"src/extension.ts",25561,0,"",typescript,selection_command +1259,3322467,"src/extension.ts",25538,0,"",typescript,selection_command +1260,3322766,"src/extension.ts",25534,0,"",typescript,selection_command +1261,3322767,"src/extension.ts",25479,0,"",typescript,selection_command +1262,3322786,"src/extension.ts",25471,0,"",typescript,selection_command +1263,3322807,"src/extension.ts",25320,0,"",typescript,selection_command +1264,3322836,"src/extension.ts",25158,0,"",typescript,selection_command +1265,3322875,"src/extension.ts",25112,0,"",typescript,selection_command +1266,3322944,"src/extension.ts",25035,0,"",typescript,selection_command +1267,3322981,"src/extension.ts",25009,0,"",typescript,selection_command +1268,3323063,"src/extension.ts",24969,0,"",typescript,selection_command +1269,3323082,"src/extension.ts",24942,0,"",typescript,selection_command +1270,3323123,"src/extension.ts",24918,0,"",typescript,selection_command +1271,3323342,"src/extension.ts",24851,0,"",typescript,selection_command +1272,3323561,"src/extension.ts",24918,0,"",typescript,selection_command +1273,3323784,"src/extension.ts",24919,0,"",typescript,selection_command +1274,3323952,"src/extension.ts",24926,0,"",typescript,selection_command +1275,3324139,"src/extension.ts",24927,0,"",typescript,selection_command +1276,3324412,"src/extension.ts",24933,0,"",typescript,selection_command +1277,3324639,"src/extension.ts",24934,0,"",typescript,selection_command +1278,3329277,"src/extension.ts",24933,0,"",typescript,selection_command +1279,3329389,"src/extension.ts",24927,0,"",typescript,selection_command +1280,3329803,"src/extension.ts",24874,0,"",typescript,selection_command +1281,3332066,"src/extension.ts",24873,0,"",typescript,selection_command +1282,3332115,"src/extension.ts",24866,0,"",typescript,selection_command +1283,3334067,"src/extension.ts",24865,0,"",typescript,selection_command +1284,3334941,"src/extension.ts",24860,0,"",typescript,selection_command +1285,3335162,"src/extension.ts",24860,0,"1",typescript,content +1286,3335163,"src/extension.ts",24861,0,"",typescript,selection_keyboard +1287,3335436,"src/extension.ts",24860,1,"",typescript,content +1288,3335641,"src/extension.ts",24860,0,"!",typescript,content +1289,3335642,"src/extension.ts",24861,0,"",typescript,selection_keyboard +1290,3335899,"src/extension.ts",24860,0,"",typescript,selection_command +1291,3336464,"src/extension.ts",24928,0,"",typescript,selection_command +1292,3337111,"src/extension.ts",24860,0,"",typescript,selection_command +1293,3338876,"src/extension.ts",24860,1,"",typescript,content +1294,3340156,"src/extension.ts",24864,0,"",typescript,selection_command +1295,3340205,"src/extension.ts",24865,0,"",typescript,selection_command +1296,3340702,"src/extension.ts",24872,0,"",typescript,selection_command +1297,3341193,"src/extension.ts",24866,0,"",typescript,selection_command +1298,3341562,"src/extension.ts",24866,7,"",typescript,content +1299,3341822,"src/extension.ts",24866,0,"i",typescript,content +1300,3341823,"src/extension.ts",24867,0,"",typescript,selection_keyboard +1301,3342162,"src/extension.ts",24867,0,"s",typescript,content +1302,3342162,"src/extension.ts",24868,0,"",typescript,selection_keyboard +1303,3342595,"src/extension.ts",24868,0,"N",typescript,content +1304,3342595,"src/extension.ts",24869,0,"",typescript,selection_keyboard +1305,3342801,"src/extension.ts",24869,0,"o",typescript,content +1306,3342801,"src/extension.ts",24870,0,"",typescript,selection_keyboard +1307,3342837,"src/extension.ts",24870,0,"t",typescript,content +1308,3342837,"src/extension.ts",24871,0,"",typescript,selection_keyboard +1309,3343127,"src/extension.ts",24871,0,"A",typescript,content +1310,3343127,"src/extension.ts",24872,0,"",typescript,selection_keyboard +1311,3343278,"src/extension.ts",24872,0,"r",typescript,content +1312,3343278,"src/extension.ts",24873,0,"",typescript,selection_keyboard +1313,3343381,"src/extension.ts",24873,0,"r",typescript,content +1314,3343381,"src/extension.ts",24874,0,"",typescript,selection_keyboard +1315,3343600,"src/extension.ts",24874,0,"a",typescript,content +1316,3343600,"src/extension.ts",24875,0,"",typescript,selection_keyboard +1317,3343612,"src/extension.ts",24875,0,"y",typescript,content +1318,3343612,"src/extension.ts",24876,0,"",typescript,selection_keyboard +1319,3343823,"src/extension.ts",24875,0,"",typescript,selection_command +1320,3346406,"src/extension.ts",24943,0,"",typescript,selection_command +1321,3346512,"src/extension.ts",24875,0,"",typescript,selection_command +1322,3346721,"src/extension.ts",24868,3,"",typescript,content +1323,3346755,"src/extension.ts",24866,0,"",typescript,selection_command +1324,3349730,"src/extension.ts",24865,0,"",typescript,selection_command +1325,3349997,"src/extension.ts",24860,0,"",typescript,selection_command +1326,3362520,"src/extension.ts",24860,0,"!",typescript,content +1327,3362521,"src/extension.ts",24861,0,"",typescript,selection_keyboard +1328,3363177,"src/extension.ts",24860,0,"",typescript,selection_command +1329,3363870,"src/extension.ts",24928,0,"",typescript,selection_command +1330,3364022,"src/extension.ts",24934,0,"",typescript,selection_command +1331,3364234,"src/extension.ts",24935,0,"",typescript,selection_command +1332,3367620,"src/extension.ts",24959,0,"",typescript,selection_command +1333,3367708,"src/extension.ts",24986,0,"",typescript,selection_command +1334,3367825,"src/extension.ts",24959,0,"",typescript,selection_command +1335,3367940,"src/extension.ts",24935,0,"",typescript,selection_command +1336,3368884,"src/extension.ts",24959,0,"",typescript,selection_command +1337,3368920,"src/extension.ts",24986,0,"",typescript,selection_command +1338,3368941,"src/extension.ts",25026,0,"",typescript,selection_command +1339,3368979,"src/extension.ts",25052,0,"",typescript,selection_command +1340,3369530,"src/extension.ts",25026,0,"",typescript,selection_command +1341,3370757,"src/extension.ts",24844,0,"",typescript,selection_command +1342,3371562,"src/extension.ts",24826,0,"",typescript,selection_command +1343,3371675,"src/extension.ts",24844,0,"",typescript,selection_command +1344,3371675,"src/extension.ts",24849,0,"",typescript,selection_command +1345,3371839,"src/extension.ts",24867,0,"",typescript,selection_command +1346,3372272,"src/extension.ts",24935,0,"",typescript,selection_command +1347,3372635,"src/extension.ts",24959,0,"",typescript,selection_command +1348,3373155,"src/extension.ts",24986,0,"",typescript,selection_command +1349,3373156,"src/extension.ts",25026,0,"",typescript,selection_command +1350,3373211,"src/extension.ts",25052,0,"",typescript,selection_command +1351,3373232,"src/extension.ts",25129,0,"",typescript,selection_command +1352,3373232,"src/extension.ts",25175,0,"",typescript,selection_command +1353,3373233,"src/extension.ts",25337,0,"",typescript,selection_command +1354,3373313,"src/extension.ts",25478,0,"",typescript,selection_command +1355,3373399,"src/extension.ts",25496,0,"",typescript,selection_command +1356,3373400,"src/extension.ts",25537,0,"",typescript,selection_command +1357,3373447,"src/extension.ts",25555,0,"",typescript,selection_command +1358,3373475,"src/extension.ts",25578,0,"",typescript,selection_command +1359,3373476,"src/extension.ts",25767,0,"",typescript,selection_command +1360,3373551,"src/extension.ts",25834,0,"",typescript,selection_command +1361,3373572,"src/extension.ts",25884,0,"",typescript,selection_command +1362,3373622,"src/extension.ts",25902,0,"",typescript,selection_command +1363,3373648,"src/extension.ts",25925,0,"",typescript,selection_command +1364,3373656,"src/extension.ts",26129,0,"",typescript,selection_command +1365,3373689,"src/extension.ts",26323,0,"",typescript,selection_command +1366,3373690,"src/extension.ts",26366,0,"",typescript,selection_command +1367,3373690,"src/extension.ts",26384,0,"",typescript,selection_command +1368,3373691,"src/extension.ts",26408,0,"",typescript,selection_command +1369,3373743,"src/extension.ts",26612,0,"",typescript,selection_command +1370,3373791,"src/extension.ts",26408,0,"",typescript,selection_command +1371,3373933,"src/extension.ts",26384,0,"",typescript,selection_command +1372,3373933,"src/extension.ts",26366,0,"",typescript,selection_command +1373,3373933,"src/extension.ts",26323,0,"",typescript,selection_command +1374,3373954,"src/extension.ts",26129,0,"",typescript,selection_command +1375,3373955,"src/extension.ts",25925,0,"",typescript,selection_command +1376,3373955,"src/extension.ts",25902,0,"",typescript,selection_command +1377,3374009,"src/extension.ts",25884,0,"",typescript,selection_command +1378,3374010,"src/extension.ts",25834,0,"",typescript,selection_command +1379,3374010,"src/extension.ts",25767,0,"",typescript,selection_command +1380,3374063,"src/extension.ts",25578,0,"",typescript,selection_command +1381,3374064,"src/extension.ts",25555,0,"",typescript,selection_command +1382,3374064,"src/extension.ts",25537,0,"",typescript,selection_command +1383,3374064,"src/extension.ts",25496,0,"",typescript,selection_command +1384,3374098,"src/extension.ts",25478,0,"",typescript,selection_command +1385,3374120,"src/extension.ts",25337,0,"",typescript,selection_command +1386,3374169,"src/extension.ts",25175,0,"",typescript,selection_command +1387,3374191,"src/extension.ts",25129,0,"",typescript,selection_command +1388,3374262,"src/extension.ts",25052,0,"",typescript,selection_command +1389,3374345,"src/extension.ts",25026,0,"",typescript,selection_command +1390,3374439,"src/extension.ts",25052,0,"",typescript,selection_command +1391,3374687,"src/extension.ts",25129,0,"",typescript,selection_command +1392,3374780,"src/extension.ts",25052,0,"",typescript,selection_command +1393,3374957,"src/extension.ts",25026,0,"",typescript,selection_command +1394,3375293,"src/extension.ts",24986,0,"",typescript,selection_command +1395,3375511,"src/extension.ts",25026,0,"",typescript,selection_command +1396,3375604,"src/extension.ts",25052,0,"",typescript,selection_command +1397,3375990,"src/extension.ts",25026,0,"",typescript,selection_command +1398,3376292,"src/extension.ts",25052,0,"",typescript,selection_command +1399,3381518,"src/extension.ts",25072,0,"",typescript,selection_command +1400,3381519,"src/extension.ts",25072,4,"",typescript,content +1401,3401062,"src/extension.ts",25072,0,"p",typescript,content +1402,3401063,"src/extension.ts",25073,0,"",typescript,selection_keyboard +1403,3401065,"src/extension.ts",25073,0,"a",typescript,content +1404,3401066,"src/extension.ts",25074,0,"",typescript,selection_keyboard +1405,3401621,"src/extension.ts",25074,0,"r",typescript,content +1406,3401621,"src/extension.ts",25075,0,"",typescript,selection_keyboard +1407,3401622,"src/extension.ts",25075,0,"s",typescript,content +1408,3401623,"src/extension.ts",25076,0,"",typescript,selection_keyboard +1409,3401623,"src/extension.ts",25076,0,"e",typescript,content +1410,3401624,"src/extension.ts",25077,0,"",typescript,selection_keyboard +1411,3401625,"src/extension.ts",25077,0,"d",typescript,content +1412,3401625,"src/extension.ts",25078,0,"",typescript,selection_keyboard +1413,3404500,"src/extension.ts",25077,0,"",typescript,selection_command +1414,3404501,"src/extension.ts",25093,0,"",typescript,selection_command +1415,3404502,"src/extension.ts",25093,4,"",typescript,content +1416,3404504,"src/extension.ts",25093,0,"p",typescript,content +1417,3404505,"src/extension.ts",25094,0,"",typescript,selection_keyboard +1418,3404507,"src/extension.ts",25094,0,"a",typescript,content +1419,3404508,"src/extension.ts",25095,0,"",typescript,selection_keyboard +1420,3404509,"src/extension.ts",25095,0,"r",typescript,content +1421,3404509,"src/extension.ts",25096,0,"",typescript,selection_keyboard +1422,3404510,"src/extension.ts",25096,0,"s",typescript,content +1423,3404510,"src/extension.ts",25097,0,"",typescript,selection_keyboard +1424,3404511,"src/extension.ts",25097,0,"e",typescript,content +1425,3404511,"src/extension.ts",25098,0,"",typescript,selection_keyboard +1426,3404512,"src/extension.ts",25098,0,"d",typescript,content +1427,3404513,"src/extension.ts",25099,0,"",typescript,selection_keyboard +1428,3404514,"src/extension.ts",25098,0,"",typescript,selection_command +1429,3404515,"src/extension.ts",25595,0,"",typescript,selection_command +1430,3404541,"src/extension.ts",25595,4,"",typescript,content +1431,3404542,"src/extension.ts",25595,0,"p",typescript,content +1432,3404542,"src/extension.ts",25596,0,"",typescript,selection_keyboard +1433,3404544,"src/extension.ts",25596,0,"a",typescript,content +1434,3404544,"src/extension.ts",25597,0,"",typescript,selection_keyboard +1435,3404547,"src/extension.ts",25597,0,"r",typescript,content +1436,3404547,"src/extension.ts",25598,0,"",typescript,selection_keyboard +1437,3404550,"src/extension.ts",25598,0,"s",typescript,content +1438,3404550,"src/extension.ts",25599,0,"",typescript,selection_keyboard +1439,3404551,"src/extension.ts",25599,0,"e",typescript,content +1440,3404552,"src/extension.ts",25600,0,"",typescript,selection_keyboard +1441,3404553,"src/extension.ts",25600,0,"d",typescript,content +1442,3404553,"src/extension.ts",25601,0,"",typescript,selection_keyboard +1443,3404554,"src/extension.ts",25600,0,"",typescript,selection_command +1444,3404555,"src/extension.ts",25615,0,"",typescript,selection_command +1445,3404557,"src/extension.ts",25615,4,"",typescript,content +1446,3404559,"src/extension.ts",25615,0,"p",typescript,content +1447,3404559,"src/extension.ts",25616,0,"",typescript,selection_keyboard +1448,3404561,"src/extension.ts",25616,0,"a",typescript,content +1449,3404561,"src/extension.ts",25617,0,"",typescript,selection_keyboard +1450,3404624,"src/extension.ts",25617,0,"r",typescript,content +1451,3404624,"src/extension.ts",25618,0,"",typescript,selection_keyboard +1452,3404625,"src/extension.ts",25618,0,"s",typescript,content +1453,3404626,"src/extension.ts",25619,0,"",typescript,selection_keyboard +1454,3405024,"src/extension.ts",25619,0,"e",typescript,content +1455,3405025,"src/extension.ts",25620,0,"",typescript,selection_keyboard +1456,3405026,"src/extension.ts",25620,0,"d",typescript,content +1457,3405026,"src/extension.ts",25621,0,"",typescript,selection_keyboard +1458,3405027,"src/extension.ts",25620,0,"",typescript,selection_command +1459,3405028,"src/extension.ts",25654,0,"",typescript,selection_command +1460,3405029,"src/extension.ts",25654,4,"",typescript,content +1461,3405030,"src/extension.ts",25654,0,"p",typescript,content +1462,3405031,"src/extension.ts",25655,0,"",typescript,selection_keyboard +1463,3405043,"src/extension.ts",25655,0,"a",typescript,content +1464,3405044,"src/extension.ts",25656,0,"",typescript,selection_keyboard +1465,3405044,"src/extension.ts",25656,0,"r",typescript,content +1466,3405045,"src/extension.ts",25657,0,"",typescript,selection_keyboard +1467,3405045,"src/extension.ts",25657,0,"s",typescript,content +1468,3405046,"src/extension.ts",25658,0,"",typescript,selection_keyboard +1469,3405062,"src/extension.ts",25658,0,"e",typescript,content +1470,3405063,"src/extension.ts",25659,0,"",typescript,selection_keyboard +1471,3405064,"src/extension.ts",25659,0,"d",typescript,content +1472,3405064,"src/extension.ts",25660,0,"",typescript,selection_keyboard +1473,3405183,"src/extension.ts",25659,0,"",typescript,selection_command +1474,3405184,"src/extension.ts",25687,0,"",typescript,selection_command +1475,3405185,"src/extension.ts",25687,4,"",typescript,content +1476,3405186,"src/extension.ts",25687,0,"p",typescript,content +1477,3405186,"src/extension.ts",25688,0,"",typescript,selection_keyboard +1478,3405188,"src/extension.ts",25688,0,"a",typescript,content +1479,3405188,"src/extension.ts",25689,0,"",typescript,selection_keyboard +1480,3405190,"src/extension.ts",25689,0,"r",typescript,content +1481,3405190,"src/extension.ts",25690,0,"",typescript,selection_keyboard +1482,3405195,"src/extension.ts",25690,0,"s",typescript,content +1483,3405196,"src/extension.ts",25691,0,"",typescript,selection_keyboard +1484,3405197,"src/extension.ts",25691,0,"e",typescript,content +1485,3405197,"src/extension.ts",25692,0,"",typescript,selection_keyboard +1486,3405198,"src/extension.ts",25692,0,"d",typescript,content +1487,3405202,"src/extension.ts",25692,0,"",typescript,selection_command +1488,3405203,"src/extension.ts",25789,0,"",typescript,selection_command +1489,3405208,"src/extension.ts",25789,4,"",typescript,content +1490,3405209,"src/extension.ts",25789,0,"p",typescript,content +1491,3405209,"src/extension.ts",25790,0,"",typescript,selection_keyboard +1492,3405221,"src/extension.ts",25790,0,"a",typescript,content +1493,3405222,"src/extension.ts",25791,0,"",typescript,selection_keyboard +1494,3405222,"src/extension.ts",25791,0,"r",typescript,content +1495,3405222,"src/extension.ts",25792,0,"",typescript,selection_keyboard +1496,3405223,"src/extension.ts",25792,0,"s",typescript,content +1497,3405224,"src/extension.ts",25793,0,"",typescript,selection_keyboard +1498,3405224,"src/extension.ts",25793,0,"e",typescript,content +1499,3405224,"src/extension.ts",25794,0,"",typescript,selection_keyboard +1500,3405225,"src/extension.ts",25794,0,"d",typescript,content +1501,3405225,"src/extension.ts",25795,0,"",typescript,selection_keyboard +1502,3405226,"src/extension.ts",25795,0,"n",typescript,content +1503,3405226,"src/extension.ts",25796,0,"",typescript,selection_keyboard +1504,3405602,"src/extension.ts",25795,1,"",typescript,content +1505,3405604,"src/extension.ts",25794,0,"",typescript,selection_command +1506,3405605,"src/extension.ts",25816,0,"",typescript,selection_command +1507,3405606,"src/extension.ts",25816,4,"",typescript,content +1508,3405607,"src/extension.ts",25816,0,"p",typescript,content +1509,3405607,"src/extension.ts",25817,0,"",typescript,selection_keyboard +1510,3405608,"src/extension.ts",25817,0,"a",typescript,content +1511,3405608,"src/extension.ts",25818,0,"",typescript,selection_keyboard +1512,3405658,"src/extension.ts",25818,0,"r",typescript,content +1513,3405658,"src/extension.ts",25819,0,"",typescript,selection_keyboard +1514,3405659,"src/extension.ts",25819,0,"s",typescript,content +1515,3405659,"src/extension.ts",25820,0,"",typescript,selection_keyboard +1516,3405659,"src/extension.ts",25820,0,"e",typescript,content +1517,3405660,"src/extension.ts",25821,0,"",typescript,selection_keyboard +1518,3405661,"src/extension.ts",25821,0,"d",typescript,content +1519,3405661,"src/extension.ts",25822,0,"",typescript,selection_keyboard +1520,3405662,"src/extension.ts",25821,0,"",typescript,selection_command +1521,3405662,"src/extension.ts",25956,0,"",typescript,selection_command +1522,3405706,"src/extension.ts",25956,4,"",typescript,content +1523,3405707,"src/extension.ts",25956,0,"p",typescript,content +1524,3405707,"src/extension.ts",25957,0,"",typescript,selection_keyboard +1525,3405708,"src/extension.ts",25957,0,"a",typescript,content +1526,3405708,"src/extension.ts",25958,0,"",typescript,selection_keyboard +1527,3405786,"src/extension.ts",25958,0,"r",typescript,content +1528,3405786,"src/extension.ts",25959,0,"",typescript,selection_keyboard +1529,3405787,"src/extension.ts",25959,0,"s",typescript,content +1530,3405787,"src/extension.ts",25960,0,"",typescript,selection_keyboard +1531,3405788,"src/extension.ts",25960,0,"e",typescript,content +1532,3405791,"src/extension.ts",25961,0,"d",typescript,content +1533,3405793,"src/extension.ts",25961,0,"",typescript,selection_command +1534,3405794,"src/extension.ts",25980,0,"",typescript,selection_command +1535,3405795,"src/extension.ts",25980,4,"",typescript,content +1536,3405797,"src/extension.ts",25980,0,"p",typescript,content +1537,3405797,"src/extension.ts",25981,0,"",typescript,selection_keyboard +1538,3405798,"src/extension.ts",25981,0,"a",typescript,content +1539,3405799,"src/extension.ts",25982,0,"",typescript,selection_keyboard +1540,3405800,"src/extension.ts",25982,0,"r",typescript,content +1541,3405800,"src/extension.ts",25983,0,"",typescript,selection_keyboard +1542,3405801,"src/extension.ts",25983,0,"s",typescript,content +1543,3405801,"src/extension.ts",25984,0,"",typescript,selection_keyboard +1544,3405813,"src/extension.ts",25984,0,"e",typescript,content +1545,3405861,"src/extension.ts",25985,0,"d",typescript,content +1546,3406033,"src/extension.ts",25985,0,"",typescript,selection_command +1547,3406113,"src/extension.ts",26022,0,"",typescript,selection_command +1548,3406596,"src/extension.ts",26022,4,"",typescript,content +1549,3406732,"src/extension.ts",26022,0,"p",typescript,content +1550,3406732,"src/extension.ts",26023,0,"",typescript,selection_keyboard +1551,3406846,"src/extension.ts",26023,0,"a",typescript,content +1552,3406846,"src/extension.ts",26024,0,"",typescript,selection_keyboard +1553,3406864,"src/extension.ts",26024,0,"r",typescript,content +1554,3406864,"src/extension.ts",26025,0,"",typescript,selection_keyboard +1555,3407008,"src/extension.ts",26025,0,"s",typescript,content +1556,3407008,"src/extension.ts",26026,0,"",typescript,selection_keyboard +1557,3407050,"src/extension.ts",26026,0,"e",typescript,content +1558,3407430,"src/extension.ts",26027,0,"c",typescript,content +1559,3407696,"src/extension.ts",26027,0,"",typescript,selection_command +1560,3409729,"src/extension.ts",26058,0,"",typescript,selection_command +1561,3434413,"src/extension.ts",26058,4,"",typescript,content +1562,3434414,"src/extension.ts",26058,0,"p",typescript,content +1563,3434414,"src/extension.ts",26059,0,"",typescript,selection_keyboard +1564,3434415,"src/extension.ts",26059,0,"a",typescript,content +1565,3434415,"src/extension.ts",26060,0,"",typescript,selection_keyboard +1566,3434415,"src/extension.ts",26060,0,"r",typescript,content +1567,3434415,"src/extension.ts",26061,0,"",typescript,selection_keyboard +1568,3434415,"src/extension.ts",26061,0,"s",typescript,content +1569,3434415,"src/extension.ts",26062,0,"",typescript,selection_keyboard +1570,3434416,"src/extension.ts",26062,0,"e",typescript,content +1571,3434417,"src/extension.ts",26063,0,"d",typescript,content +1572,3434419,"src/extension.ts",26063,0,"",typescript,selection_command +1573,3434419,"src/extension.ts",26166,0,"",typescript,selection_command +1574,3434424,"src/extension.ts",26360,0,"",typescript,selection_command +1575,3434424,"src/extension.ts",26166,0,"",typescript,selection_command +1576,3434425,"src/extension.ts",26137,0,"",typescript,selection_command +1577,3434425,"src/extension.ts",25925,0,"",typescript,selection_command +1578,3434425,"src/extension.ts",25902,0,"",typescript,selection_command +1579,3434425,"src/extension.ts",25898,0,"",typescript,selection_command +1580,3434425,"src/extension.ts",25834,0,"",typescript,selection_command +1581,3434425,"src/extension.ts",25763,0,"",typescript,selection_command +1582,3434425,"src/extension.ts",25566,0,"",typescript,selection_command +1583,3434425,"src/extension.ts",25569,0,"",typescript,selection_command +1584,3434425,"src/extension.ts",25575,0,"",typescript,selection_command +1585,3434425,"src/extension.ts",25579,0,"",typescript,selection_command +1586,3434425,"src/extension.ts",25581,0,"",typescript,selection_command +1587,3434425,"src/extension.ts",25586,0,"",typescript,selection_command +1588,3434426,"src/extension.ts",25587,0,"",typescript,selection_command +1589,3434426,"src/extension.ts",25594,0,"",typescript,selection_command +1590,3434426,"src/extension.ts",25595,0,"",typescript,selection_command +1591,3434426,"src/extension.ts",25601,0,"",typescript,selection_command +1592,3434426,"src/extension.ts",25602,0,"",typescript,selection_command +1593,3434426,"src/extension.ts",25610,0,"",typescript,selection_command +1594,3434426,"src/extension.ts",25612,0,"",typescript,selection_command +1595,3434426,"src/extension.ts",25615,0,"",typescript,selection_command +1596,3434427,"src/extension.ts",25812,0,"",typescript,selection_command +1597,3434430,"src/extension.ts",25883,0,"",typescript,selection_command +1598,3434431,"src/extension.ts",25900,0,"",typescript,selection_command +1599,3434432,"TERMINAL",0,0,"[11:13:53 AM] File change detected. Starting incremental compilation...\r\n\r\nsrc/extension.ts:693:37 - error TS2304: Cannot find name 'item'.\r\n\r\n693 const selections = Array.isArray(item.selections) ? item.selections : [];\r\n   ~~~~\r\n\r\nsrc/extension.ts:693:56 - error TS2304: Cannot find name 'item'.\r\n\r\n693 const selections = Array.isArray(item.selections) ? item.selections : [];\r\n   ~~~~\r\n\r\nsrc/extension.ts:701:30 - error TS2304: Cannot find name 'item'.\r\n\r\n701 const pos = Array.isArray(item.position) && item.position.length === 2 ? [Number(item.position[0]) || 0, Number(item.position[1]) || 0] as [number, number] : [0, 0] as [number, number];\r\n   ~~~~\r\n\r\nsrc/extension.ts:701:48 - error TS2304: Cannot find name 'item'.\r\n\r\n701 const pos = Array.isArray(item.position) && item.position.length === 2 ? [Number(item.position[0]) || 0, Number(item.position[1]) || 0] as [number, number] : [0, 0] as [number, number];\r\n   ~~~~\r\n\r\nsrc/extension.ts:701:85 - error TS2304: Cannot find name 'item'.\r\n\r\n701 const pos = Array.isArray(item.position) && item.position.length === 2 ? [Number(item.position[0]) || 0, Number(item.position[1]) || 0] as [number, number] : [0, 0] as [number, number];\r\n   ~~~~\r\n\r\nsrc/extension.ts:701:116 - error TS2304: Cannot find name 'item'.\r\n\r\n701 const pos = Array.isArray(item.position) && item.position.length === 2 ? [Number(item.position[0]) || 0, Number(item.position[1]) || 0] as [number, number] : [0, 0] as [number, number];\r\n   ~~~~\r\n\r\nsrc/extension.ts:702:27 - error TS2304: Cannot find name 'item'.\r\n\r\n702 const textVal = typeof item.text === 'string' ? item.text : '';\r\n   ~~~~\r\n\r\nsrc/extension.ts:702:52 - error TS2304: Cannot find name 'item'.\r\n\r\n702 const textVal = typeof item.text === 'string' ? item.text : '';\r\n   ~~~~\r\n\r\nsrc/extension.ts:706:32 - error TS2304: Cannot find name 'item'.\r\n\r\n706 const start = Array.isArray(item.range?.start) && item.range.start.length === 2 ? [Number(item.range.start[0]) || 0, Number(item.range.start[1]) || 0] as [number, number] : [0, 0] as [number, number];\r\n   ~~~~\r\n\r\nsrc/extension.ts:706:54 - error TS2304: Cannot find name 'item'.\r\n\r\n706 const start = Array.isArray(item.range?.start) && item.range.start.length === 2 ? [Number(item.range.start[0]) || 0, Number(item.range.start[1]) || 0] as [number, number] : [0, 0] as [number, number];\r\n   ~~~~\r\n\r\nsrc/extension.ts:706:94 - error TS2304: Cannot find name 'item'.\r\n\r\n706 const start = Array.isArray(item.range?.start) && item.range.start.length === 2 ? [Number(item.range.start[0]) || 0, Number(item.range.start[1]) || 0] as [number, number] : [0, 0] as [number, number];\r\n   ~~~~\r\n\r\nsrc/extension.ts:706:128 - error TS2304: Cannot find name 'item'.\r\n\r\n706 const start = Array.isArray(item.range?.start) && item.range.start.length === 2 ? [Number(item.range.start[0]) || 0, Number(item.range.start[1]) || 0] as [number, number] : [0, 0] as [number, number];\r\n   ~~~~\r\n\r\nsrc/extension.ts:707:30 - error TS2304: Cannot find name 'item'.\r\n\r\n707 const end = Array.isArray(item.range?.end) && item.range.end.length === 2 ? [Number(item.range.end[0]) || 0, Number(item.range.end[1]) || 0] as [number, number] : [0, 0] as [number, number];\r\n   ~~~~\r\n\r\nsrc/extension.ts:707:50 - error TS2304: Cannot find name 'item'.\r\n\r\n707 const end = Array.isArray(item.range?.end) && item.range.end.length === 2 ? [Number(item.range.end[0]) || 0, Number(item.range.end[1]) || 0] as [number, number] : [0, 0] as [number, number];\r\n   ~~~~\r\n\r\nsrc/extension.ts:707:88 - error TS2304: Cannot find name 'item'.\r\n\r\n707 const end = Array.isArray(item.range?.end) && item.range.end.length === 2 ? [Number(item.range.end[0]) || 0, Number(item.range.end[1]) || 0] as [number, number] : [0, 0] as [number, number];\r\n   ~~~~\r\n\r\nsrc/extension.ts:707:120 - error TS2304: Cannot find name 'item'.\r\n\r\n707 const end = Array.isArray(item.range?.end) && item.range.end.length === 2 ? [Number(item.range.end[0]) || 0, Number(item.range.end[1]) || 0] as [number, number] : [0, 0] as [number, number];\r\n   ~~~~\r\n\r\nsrc/extension.ts:711:32 - error TS2304: Cannot find name 'item'.\r\n\r\n711 const start = Array.isArray(item.range?.start) && item.range.start.length === 2 ? [Number(item.range.start[0]) || 0, Number(item.range.start[1]) || 0] as [number, number] : [0, 0] as [number, number];\r\n   ~~~~\r\n\r\nsrc/extension.ts:711:54 - error TS2304: Cannot find name 'item'.\r\n\r\n711 const start = Array.isArray(item.range?.start) && item.range.start.length === 2 ? [Number(item.range.start[0]) || 0, Number(item.range.start[1]) || 0] as [number, number] : [0, 0] as [number, number];\r\n   ~~~~\r\n\r\nsrc/extension.ts:711:94 - error TS2304: Cannot find name 'item'.\r\n\r\n711 const start = Array.isArray(item.range?.start) && item.range.start.length === 2 ? [Number(item.range.start[0]) || 0, Number(item.range.start[1]) || 0] as [number, number] : [0, 0] as [number, number];\r\n   ~~~~\r\n\r\nsrc/extension.ts:711:128 - error TS2304: Cannot find name 'item'.\r\n\r\n711 const start = Array.isArray(item.range?.start) && item.range.start.length === 2 ? [Number(item.range.start[0]) || 0, Number(item.range.start[1]) || 0] as [number, number] : [0, 0] as [number, number];\r\n   ~~~~\r\n\r\nsrc/extension.ts:712:30 - error TS2304: Cannot find name 'item'.\r\n\r\n712 const end = Array.isArray(item.range?.end) && item.range.end.length === 2 ? [Number(item.range.end[0]) || 0, Number(item.range.end[1]) || 0] as [number, number] : [0, 0] as [number, number];\r\n   ~~~~\r\n\r\nsrc/extension.ts:712:50 - error TS2304: Cannot find name 'item'.\r\n\r\n712 const end = Array.isArray(item.range?.end) && item.range.end.length === 2 ? [Number(item.range.end[0]) || 0, Number(item.range.end[1]) || 0] as [number, number] : [0, 0] as [number, number];\r\n   ~~~~\r\n\r\nsrc/extension.ts:712:88 - error TS2304: Cannot find name 'item'.\r\n\r\n712 const end = Array.isArray(item.range?.end) && item.range.end.length === 2 ? [Number(item.range.end[0]) || 0, Number(item.range.end[1]) || 0] as [number, number] : [0, 0] as [number, number];\r\n   ~~~~\r\n\r\nsrc/extension.ts:712:120 - error TS2304: Cannot find name 'item'.\r\n\r\n712 const end = Array.isArray(item.range?.end) && item.range.end.length === 2 ? [Number(item.range.end[0]) || 0, Number(item.range.end[1]) || 0] as [number, number] : [0, 0] as [number, number];\r\n   ~~~~\r\n\r\nsrc/extension.ts:713:27 - error TS2304: Cannot find name 'item'.\r\n\r\n713 const textVal = typeof item.text === 'string' ? item.text : '';\r\n   ~~~~\r\n\r\nsrc/extension.ts:713:52 - error TS2304: Cannot find name 'item'.\r\n\r\n713 const textVal = typeof item.text === 'string' ? item.text : '';\r\n   ~~~~\r\n\r\nsrc/extension.ts:719:27 - error TS2304: Cannot find name 'item'.\r\n\r\n719 const textVal = typeof item.text === 'string' ? item.text : '';\r\n   ~~~~\r\n\r\nsrc/extension.ts:719:52 - error TS2304: Cannot find name 'item'.\r\n\r\n719 const textVal = typeof item.text === 'string' ? item.text : '';\r\n   ~~~~\r\n\r\n[11:13:53 AM] Found 28 errors. Watching for file changes.\r\n\r\n",,terminal_output +1600,3434441,"src/extension.ts",25923,0,"",typescript,selection_command +1601,3434442,"src/extension.ts",25974,0,"",typescript,selection_command +1602,3434443,"src/extension.ts",26186,0,"",typescript,selection_command +1603,3434444,"TERMINAL",0,0,"[11:14:59 AM] File change detected. Starting incremental compilation...\r\n\r\n",,terminal_output +1604,3434444,"src/extension.ts",26380,0,"",typescript,selection_command +1605,3434480,"TERMINAL",0,0,"src/extension.ts:706:98 - error TS2552: Cannot find name 'parsec'. Did you mean 'parsed'?\r\n\r\n706 const start = Array.isArray(parsed.range?.start) && parsed.range.start.length === 2 ? [Number(parsec.range.start[0]) || 0, Number(parsed.range.start[1]) || 0] as [number, number] : [0, 0] as [number, number];\r\n   ~~~~~~\r\n\r\n src/extension.ts:682:6\r\n 682 let parsed: any;\r\n    ~~~~~~\r\n 'parsed' is declared here.\r\n\r\nsrc/extension.ts:707:30 - error TS2304: Cannot find name 'item'.\r\n\r\n707 const end = Array.isArray(item.range?.end) && item.range.end.length === 2 ? [Number(item.range.end[0]) || 0, Number(item.range.end[1]) || 0] as [number, number] : [0, 0] as [number, number];\r\n   ~~~~\r\n\r\nsrc/extension.ts:707:50 - error TS2304: Cannot find name 'item'.\r\n\r\n707 const end = Array.isArray(item.range?.end) && item.range.end.length === 2 ? [Number(item.range.end[0]) || 0, Number(item.range.end[1]) || 0] as [number, number] : [0, 0] as [number, number];\r\n   ~~~~\r\n\r\nsrc/extension.ts:707:88 - error TS2304: Cannot find name 'item'.\r\n\r\n707 const end = Array.isArray(item.range?.end) && item.range.end.length === 2 ? [Number(item.range.end[0]) || 0, Number(item.range.end[1]) || 0] as [number, number] : [0, 0] as [number, number];\r\n   ~~~~\r\n\r\nsrc/extension.ts:707:120 - error TS2304: Cannot find name 'item'.\r\n\r\n707 const end = Array.isArray(item.range?.end) && item.range.end.length === 2 ? [Number(item.range.end[0]) || 0, Number(item.range.end[1]) || 0] as [number, number] : [0, 0] as [number, number];\r\n   ~~~~\r\n\r\nsrc/extension.ts:711:32 - error TS2304: Cannot find name 'item'.\r\n\r\n711 const start = Array.isArray(item.range?.start) && item.range.start.length === 2 ? [Number(item.range.start[0]) || 0, Number(item.range.start[1]) || 0] as [number, number] : [0, 0] as [number, number];\r\n   ~~~~\r\n\r\nsrc/extension.ts:711:54 - error TS2304: Cannot find name 'item'.\r\n\r\n711 const start = Array.isArray(item.range?.start) && item.range.start.length === 2 ? [Number(item.range.start[0]) || 0, Number(item.range.start[1]) || 0] as [number, number] : [0, 0] as [number, number];\r\n   ~~~~\r\n\r\nsrc/extension.ts:711:94 - error TS2304: Cannot find name 'item'.\r\n\r\n711 const start = Array.isArray(item.range?.start) && item.range.start.length === 2 ? [Number(item.range.start[0]) || 0, Number(item.range.start[1]) || 0] as [number, number] : [0, 0] as [number, number];\r\n   ~~~~\r\n\r\nsrc/extension.ts:711:128 - error TS2304: Cannot find name 'item'.\r\n\r\n711 const start = Array.isArray(item.range?.start) && item.range.start.length === 2 ? [Number(item.range.start[0]) || 0, Number(item.range.start[1]) || 0] as [number, number] : [0, 0] as [number, number];\r\n   ~~~~\r\n\r\nsrc/extension.ts:712:30 - error TS2304: Cannot find name 'item'.\r\n\r\n712 const end = Array.isArray(item.range?.end) && item.range.end.length === 2 ? [Number(item.range.end[0]) || 0, Number(item.range.end[1]) || 0] as [number, number] : [0, 0] as [number, number];\r\n   ~~~~\r\n\r\nsrc/extension.ts:712:50 - error TS2304: Cannot find name 'item'.\r\n\r\n712 const end = Array.isArray(item.range?.end) && item.range.end.length === 2 ? [Number(item.range.end[0]) || 0, Number(item.range.end[1]) || 0] as [number, number] : [0, 0] as [number, number];\r\n   ~~~~\r\n\r\nsrc/extension.ts:712:88 - error TS2304: Cannot find name 'item'.\r\n\r\n712 const end = Array.isArray(item.range?.end) && item.range.end.length === 2 ? [Number(item.range.end[0]) || 0, Number(item.range.end[1]) || 0] as [number, number] : [0, 0] as [number, number];\r\n   ~~~~\r\n\r\nsrc/extension.ts:712:120 - error TS2304: Cannot find name 'item'.\r\n\r\n712 const end = Array.isArray(item.range?.end) && item.range.end.length === 2 ? [Number(item.range.end[0]) || 0, Number(item.range.end[1]) || 0] as [number, number] : [0, 0] as [number, number];\r\n   ~~~~\r\n\r\nsrc/extension.ts:713:27 - error TS2304: Cannot find name 'item'.\r\n\r\n713 const textVal = typeof item.text === 'string' ? item.text : '';\r\n   ~~~~\r\n\r\nsrc/extension.ts:713:52 - error TS2304: Cannot find name 'item'.\r\n\r\n713 const textVal = typeof item.text === 'string' ? item.text : '';\r\n   ~~~~\r\n\r\nsrc/extension.ts:719:27 - error TS2304: Cannot find name 'item'.\r\n\r\n719 const textVal = typeof item.text === 'string' ? item.text : '';\r\n   ~~~~\r\n\r\nsrc/extension.ts:719:52 - error TS2304: Cannot find name 'item'.\r\n\r\n719 const textVal = typeof item.text === 'string' ? item.text : '';\r\n   ~~~~\r\n\r\n[11:14:59 AM] Found 17 errors. Watching for file changes.\r\n\r\n",,terminal_output +1606,3434699,"src/extension.ts",26186,0,"",typescript,selection_command +1607,3435153,"src/extension.ts",26137,0,"",typescript,selection_command +1608,3435301,"src/extension.ts",26140,0,"",typescript,selection_command +1609,3435550,"src/extension.ts",26146,0,"",typescript,selection_command +1610,3435736,"src/extension.ts",26150,0,"",typescript,selection_command +1611,3435767,"src/extension.ts",26152,0,"",typescript,selection_command +1612,3435798,"src/extension.ts",26157,0,"",typescript,selection_command +1613,3436800,"src/extension.ts",26260,0,"d",typescript,content +1614,3436800,"src/extension.ts",26259,1,"",typescript,content +1615,3436800,"src/extension.ts",26258,0,"pars",typescript,content +1616,3436800,"src/extension.ts",26256,2,"",typescript,content +1617,3436800,"src/extension.ts",26228,0,"d",typescript,content +1618,3436800,"src/extension.ts",26227,1,"",typescript,content +1619,3436800,"src/extension.ts",26226,0,"pars",typescript,content +1620,3436800,"src/extension.ts",26224,2,"",typescript,content +1621,3436800,"src/extension.ts",26190,0,"d",typescript,content +1622,3436800,"src/extension.ts",26189,1,"",typescript,content +1623,3436800,"src/extension.ts",26188,0,"pars",typescript,content +1624,3436800,"src/extension.ts",26186,2,"",typescript,content +1625,3436800,"src/extension.ts",26170,0,"d",typescript,content +1626,3436800,"src/extension.ts",26169,1,"",typescript,content +1627,3436800,"src/extension.ts",26168,0,"pars",typescript,content +1628,3436800,"src/extension.ts",26166,2,"",typescript,content +1629,3441382,"src/extension.ts",26359,0,"",typescript,selection_command +1630,3441382,"src/extension.ts",26398,0,"",typescript,selection_command +1631,3441383,"src/extension.ts",26359,0,"",typescript,selection_command +1632,3441533,"src/extension.ts",26157,0,"",typescript,selection_command +1633,3441618,"src/extension.ts",25945,0,"",typescript,selection_command +1634,3442770,"src/extension.ts",26027,0,"",typescript,selection_command +1635,3443277,"src/extension.ts",26027,1,"d",typescript,content +1636,3445189,"src/extension.ts",26239,0,"",typescript,selection_command +1637,3445271,"src/extension.ts",26394,0,"",typescript,selection_command +1638,3445633,"src/extension.ts",26398,0,"",typescript,selection_command +1639,3445635,"TERMINAL",0,0,"[11:15:10 AM] File change detected. Starting incremental compilation...\r\n\r\n",,terminal_output +1640,3445659,"TERMINAL",0,0,"src/extension.ts:711:32 - error TS2304: Cannot find name 'item'.\r\n\r\n711 const start = Array.isArray(item.range?.start) && item.range.start.length === 2 ? [Number(item.range.start[0]) || 0, Number(item.range.start[1]) || 0] as [number, number] : [0, 0] as [number, number];\r\n   ~~~~\r\n\r\nsrc/extension.ts:711:54 - error TS2304: Cannot find name 'item'.\r\n\r\n711 const start = Array.isArray(item.range?.start) && item.range.start.length === 2 ? [Number(item.range.start[0]) || 0, Number(item.range.start[1]) || 0] as [number, number] : [0, 0] as [number, number];\r\n   ~~~~\r\n\r\nsrc/extension.ts:711:94 - error TS2304: Cannot find name 'item'.\r\n\r\n711 const start = Array.isArray(item.range?.start) && item.range.start.length === 2 ? [Number(item.range.start[0]) || 0, Number(item.range.start[1]) || 0] as [number, number] : [0, 0] as [number, number];\r\n   ~~~~\r\n\r\nsrc/extension.ts:711:128 - error TS2304: Cannot find name 'item'.\r\n\r\n711 const start = Array.isArray(item.range?.start) && item.range.start.length === 2 ? [Number(item.range.start[0]) || 0, Number(item.range.start[1]) || 0] as [number, number] : [0, 0] as [number, number];\r\n   ~~~~\r\n\r\nsrc/extension.ts:712:30 - error TS2304: Cannot find name 'item'.\r\n\r\n712 const end = Array.isArray(item.range?.end) && item.range.end.length === 2 ? [Number(item.range.end[0]) || 0, Number(item.range.end[1]) || 0] as [number, number] : [0, 0] as [number, number];\r\n   ~~~~\r\n\r\nsrc/extension.ts:712:50 - error TS2304: Cannot find name 'item'.\r\n\r\n712 const end = Array.isArray(item.range?.end) && item.range.end.length === 2 ? [Number(item.range.end[0]) || 0, Number(item.range.end[1]) || 0] as [number, number] : [0, 0] as [number, number];\r\n   ~~~~\r\n\r\nsrc/extension.ts:712:88 - error TS2304: Cannot find name 'item'.\r\n\r\n712 const end = Array.isArray(item.range?.end) && item.range.end.length === 2 ? [Number(item.range.end[0]) || 0, Number(item.range.end[1]) || 0] as [number, number] : [0, 0] as [number, number];\r\n   ~~~~\r\n\r\nsrc/extension.ts:712:120 - error TS2304: Cannot find name 'item'.\r\n\r\n712 const end = Array.isArray(item.range?.end) && item.range.end.length === 2 ? [Number(item.range.end[0]) || 0, Number(item.range.end[1]) || 0] as [number, number] : [0, 0] as [number, number];\r\n   ~~~~\r\n\r\nsrc/extension.ts:713:27 - error TS2304: Cannot find name 'item'.\r\n\r\n713 const textVal = typeof item.text === 'string' ? item.text : '';\r\n   ~~~~\r\n\r\nsrc/extension.ts:713:52 - error TS2304: Cannot find name 'item'.\r\n\r\n713 const textVal = typeof item.text === 'string' ? item.text : '';\r\n   ~~~~\r\n\r\nsrc/extension.ts:719:27 - error TS2304: Cannot find name 'item'.\r\n\r\n719 const textVal = typeof item.text === 'string' ? item.text : '';\r\n   ~~~~\r\n\r\nsrc/extension.ts:719:52 - error TS2304: Cannot find name 'item'.\r\n\r\n719 const textVal = typeof item.text === 'string' ? item.text : '';\r\n   ~~~~\r\n\r\n[11:15:10 AM] Found 12 errors. Watching for file changes.\r\n\r\n",,terminal_output +1641,3445698,"src/extension.ts",26422,0,"",typescript,selection_command +1642,3445758,"src/extension.ts",26526,0,"",typescript,selection_command +1643,3446861,"src/extension.ts",26555,0,"d",typescript,content +1644,3446861,"src/extension.ts",26554,1,"",typescript,content +1645,3446861,"src/extension.ts",26553,0,"pars",typescript,content +1646,3446861,"src/extension.ts",26551,2,"",typescript,content +1647,3446861,"src/extension.ts",26521,0,"d",typescript,content +1648,3446861,"src/extension.ts",26520,1,"",typescript,content +1649,3446861,"src/extension.ts",26519,0,"pars",typescript,content +1650,3446861,"src/extension.ts",26517,2,"",typescript,content +1651,3446861,"src/extension.ts",26481,0,"d",typescript,content +1652,3446861,"src/extension.ts",26480,1,"",typescript,content +1653,3446861,"src/extension.ts",26479,0,"pars",typescript,content +1654,3446861,"src/extension.ts",26477,2,"",typescript,content +1655,3446861,"src/extension.ts",26459,0,"d",typescript,content +1656,3446861,"src/extension.ts",26458,1,"",typescript,content +1657,3446861,"src/extension.ts",26457,0,"pars",typescript,content +1658,3446861,"src/extension.ts",26455,2,"",typescript,content +1659,3447517,"src/extension.ts",26759,0,"d",typescript,content +1660,3447517,"src/extension.ts",26758,1,"",typescript,content +1661,3447517,"src/extension.ts",26757,0,"pars",typescript,content +1662,3447517,"src/extension.ts",26755,2,"",typescript,content +1663,3447517,"src/extension.ts",26727,0,"d",typescript,content +1664,3447517,"src/extension.ts",26726,1,"",typescript,content +1665,3447517,"src/extension.ts",26725,0,"pars",typescript,content +1666,3447517,"src/extension.ts",26723,2,"",typescript,content +1667,3447517,"src/extension.ts",26689,0,"d",typescript,content +1668,3447517,"src/extension.ts",26688,1,"",typescript,content +1669,3447517,"src/extension.ts",26687,0,"pars",typescript,content +1670,3447518,"src/extension.ts",26685,2,"",typescript,content +1671,3447518,"src/extension.ts",26669,0,"d",typescript,content +1672,3447518,"src/extension.ts",26668,1,"",typescript,content +1673,3447518,"src/extension.ts",26667,0,"pars",typescript,content +1674,3447518,"src/extension.ts",26665,2,"",typescript,content +1675,3448200,"src/extension.ts",26893,0,"d",typescript,content +1676,3448200,"src/extension.ts",26892,1,"",typescript,content +1677,3448200,"src/extension.ts",26891,0,"pars",typescript,content +1678,3448200,"src/extension.ts",26889,2,"",typescript,content +1679,3448200,"src/extension.ts",26868,0,"d",typescript,content +1680,3448200,"src/extension.ts",26867,1,"",typescript,content +1681,3448200,"src/extension.ts",26866,0,"pars",typescript,content +1682,3448200,"src/extension.ts",26864,2,"",typescript,content +1683,3450072,"src/extension.ts",26744,0,"",typescript,selection_command +1684,3450390,"src/extension.ts",26907,0,"",typescript,selection_command +1685,3450398,"src/extension.ts",26980,0,"",typescript,selection_command +1686,3450400,"src/extension.ts",26984,0,"",typescript,selection_command +1687,3450423,"src/extension.ts",27007,0,"",typescript,selection_command +1688,3450426,"src/extension.ts",27043,0,"",typescript,selection_command +1689,3450486,"src/extension.ts",27072,0,"",typescript,selection_command +1690,3450489,"src/extension.ts",27139,0,"",typescript,selection_command +1691,3450542,"src/extension.ts",27194,0,"",typescript,selection_command +1692,3450825,"src/extension.ts",27139,0,"",typescript,selection_command +1693,3450875,"TERMINAL",0,0,"[11:15:15 AM] File change detected. Starting incremental compilation...\r\n\r\n",,terminal_output +1694,3450969,"TERMINAL",0,0,"src/extension.ts:719:27 - error TS2304: Cannot find name 'item'.\r\n\r\n719 const textVal = typeof item.text === 'string' ? item.text : '';\r\n   ~~~~\r\n\r\nsrc/extension.ts:719:52 - error TS2304: Cannot find name 'item'.\r\n\r\n719 const textVal = typeof item.text === 'string' ? item.text : '';\r\n   ~~~~\r\n\r\n[11:15:15 AM] Found 2 errors. Watching for file changes.\r\n\r\n",,terminal_output +1695,3452184,"src/extension.ts",27129,0,"d",typescript,content +1696,3452184,"src/extension.ts",27128,1,"",typescript,content +1697,3452184,"src/extension.ts",27127,0,"pars",typescript,content +1698,3452184,"src/extension.ts",27125,2,"",typescript,content +1699,3452184,"src/extension.ts",27104,0,"d",typescript,content +1700,3452184,"src/extension.ts",27103,1,"",typescript,content +1701,3452184,"src/extension.ts",27102,0,"pars",typescript,content +1702,3452184,"src/extension.ts",27100,2,"",typescript,content +1703,3455591,"src/extension.ts",27074,0,"",typescript,selection_command +1704,3456277,"TERMINAL",0,0,"[11:15:20 AM] File change detected. Starting incremental compilation...\r\n\r\n",,terminal_output +1705,3456397,"TERMINAL",0,0,"[11:15:20 AM] Found 0 errors. Watching for file changes.\r\n\r\n",,terminal_output +1706,3457672,"src/extension.ts",27045,0,"",typescript,selection_command +1707,3457919,"src/extension.ts",27009,0,"",typescript,selection_command +1708,3457952,"src/extension.ts",26986,0,"",typescript,selection_command +1709,3457981,"src/extension.ts",26982,0,"",typescript,selection_command +1710,3458000,"src/extension.ts",26909,0,"",typescript,selection_command +1711,3458039,"src/extension.ts",26838,0,"",typescript,selection_command +1712,3464990,"src/extension.ts",26636,0,"",typescript,selection_command +1713,3465076,"src/extension.ts",26424,0,"",typescript,selection_command +1714,3465329,"src/extension.ts",26400,0,"",typescript,selection_command +1715,3465357,"src/extension.ts",26396,0,"",typescript,selection_command +1716,3465385,"src/extension.ts",26339,0,"",typescript,selection_command +1717,3465416,"src/extension.ts",26137,0,"",typescript,selection_command +1718,3465453,"src/extension.ts",25925,0,"",typescript,selection_command +1719,3465496,"src/extension.ts",25902,0,"",typescript,selection_command +1720,3465523,"src/extension.ts",25898,0,"",typescript,selection_command +1721,3465561,"src/extension.ts",25834,0,"",typescript,selection_command +1722,3465591,"src/extension.ts",25763,0,"",typescript,selection_command +1723,3465630,"src/extension.ts",25566,0,"",typescript,selection_command +1724,3465651,"src/extension.ts",25543,0,"",typescript,selection_command +1725,3465692,"src/extension.ts",25539,0,"",typescript,selection_command +1726,3465708,"src/extension.ts",25484,0,"",typescript,selection_command +1727,3465756,"src/extension.ts",25476,0,"",typescript,selection_command +1728,3465787,"src/extension.ts",25325,0,"",typescript,selection_command +1729,3465815,"src/extension.ts",25163,0,"",typescript,selection_command +1730,3465852,"src/extension.ts",25117,0,"",typescript,selection_command +1731,3465883,"src/extension.ts",25036,0,"",typescript,selection_command +1732,3465920,"src/extension.ts",25010,0,"",typescript,selection_command +1733,3465943,"src/extension.ts",24970,0,"",typescript,selection_command +1734,3465983,"src/extension.ts",24943,0,"",typescript,selection_command +1735,3466021,"src/extension.ts",24919,0,"",typescript,selection_command +1736,3466072,"src/extension.ts",24851,0,"",typescript,selection_command +1737,3466097,"src/extension.ts",24848,0,"",typescript,selection_command +1738,3466128,"src/extension.ts",24828,0,"",typescript,selection_command +1739,3466152,"src/extension.ts",24811,0,"",typescript,selection_command +1740,3466187,"src/extension.ts",24782,0,"",typescript,selection_command +1741,3466222,"src/extension.ts",24775,0,"",typescript,selection_command +1742,3466248,"src/extension.ts",24757,0,"",typescript,selection_command +1743,3466284,"src/extension.ts",24679,0,"",typescript,selection_command +1744,3466563,"src/extension.ts",24757,0,"",typescript,selection_command +1745,3466816,"src/extension.ts",24775,0,"",typescript,selection_command +1746,3466842,"src/extension.ts",24782,0,"",typescript,selection_command +1747,3466882,"src/extension.ts",24811,0,"",typescript,selection_command +1748,3466910,"src/extension.ts",24828,0,"",typescript,selection_command +1749,3466942,"src/extension.ts",24848,0,"",typescript,selection_command +1750,3466964,"src/extension.ts",24851,0,"",typescript,selection_command +1751,3467004,"src/extension.ts",24919,0,"",typescript,selection_command +1752,3467048,"src/extension.ts",24943,0,"",typescript,selection_command +1753,3467062,"src/extension.ts",24970,0,"",typescript,selection_command +1754,3467281,"src/extension.ts",24943,0,"",typescript,selection_command +1755,3467504,"src/extension.ts",24919,0,"",typescript,selection_command +1756,3467843,"src/extension.ts",24851,0,"",typescript,selection_command +1757,3479104,"src/extension.ts",24943,0,"",typescript,selection_command +1758,3479385,"src/extension.ts",24919,0,"",typescript,selection_command +1759,3479584,"src/extension.ts",24851,0,"",typescript,selection_command +1760,3480665,"src/extension.ts",98,0,"",typescript,selection_command +1761,3481797,"src/extension.ts",127,0,"",typescript,selection_command +1762,3482272,"src/extension.ts",126,30,"",typescript,content +1763,3483590,"src/extension.ts",97,0,"",typescript,selection_command +1764,3484020,"TERMINAL",0,0,"[11:15:48 AM] File change detected. Starting incremental compilation...\r\n\r\n[11:15:48 AM] Found 0 errors. Watching for file changes.\r\n\r\n",,terminal_output +1765,3484788,"src/extension.ts",1393,0,"",typescript,selection_command +1766,3488921,"src/extension.ts",27209,0,"",typescript,selection_command +1767,3489577,"src/extension.ts",24549,0,"",typescript,selection_keyboard +1768,3489674,"src/extension.ts",23199,0,"",typescript,selection_keyboard +1769,3489961,"src/extension.ts",21729,0,"",typescript,selection_keyboard +1770,3490067,"src/extension.ts",19800,0,"",typescript,selection_keyboard +1771,3490202,"src/extension.ts",18685,0,"",typescript,selection_keyboard +1772,3491323,"src/extension.ts",18761,0,"",typescript,selection_command +1773,3491545,"src/extension.ts",18766,0,"",typescript,selection_command +1774,3491546,"src/extension.ts",18769,0,"",typescript,selection_command +1775,3491575,"src/extension.ts",18788,0,"",typescript,selection_command +1776,3491611,"src/extension.ts",18790,0,"",typescript,selection_command +1777,3491638,"src/extension.ts",18791,0,"",typescript,selection_command +1778,3491663,"src/extension.ts",18826,0,"",typescript,selection_command +1779,3491682,"src/extension.ts",18858,0,"",typescript,selection_command +1780,3492013,"src/extension.ts",17321,0,"",typescript,selection_keyboard +1781,3492301,"src/extension.ts",15278,0,"",typescript,selection_keyboard +1782,3492433,"src/extension.ts",13000,0,"",typescript,selection_keyboard +1783,3492703,"src/extension.ts",10794,0,"",typescript,selection_keyboard +1784,3492719,"src/extension.ts",8697,0,"",typescript,selection_keyboard +1785,3492762,"src/extension.ts",6729,0,"",typescript,selection_keyboard +1786,3492829,"src/extension.ts",5042,0,"",typescript,selection_keyboard +1787,3492829,"src/extension.ts",2962,0,"",typescript,selection_keyboard +1788,3492837,"src/extension.ts",1526,0,"",typescript,selection_keyboard +1789,3492874,"src/extension.ts",0,0,"",typescript,selection_keyboard +1790,3493720,"src/extension.ts",27209,0,"",typescript,selection_command +1791,3494261,"src/extension.ts",24549,0,"",typescript,selection_keyboard +1792,3494936,"src/extension.ts",24552,0,"",typescript,selection_command +1793,3495104,"src/extension.ts",24554,0,"",typescript,selection_command +1794,3495412,"src/extension.ts",24555,0,"",typescript,selection_command +1795,3497884,"src/extension.ts",24625,0,"",typescript,selection_command +1796,3498035,"src/extension.ts",24649,0,"",typescript,selection_command +1797,3498230,"src/extension.ts",24727,0,"",typescript,selection_command +1798,3498449,"src/extension.ts",24745,0,"",typescript,selection_command +1799,3498465,"src/extension.ts",24752,0,"",typescript,selection_command +1800,3498497,"src/extension.ts",24781,0,"",typescript,selection_command +1801,3498499,"src/extension.ts",24798,0,"",typescript,selection_command +1802,3498616,"src/extension.ts",24818,0,"",typescript,selection_command +1803,3498720,"src/extension.ts",24821,0,"",typescript,selection_command +1804,3499032,"src/extension.ts",24889,0,"",typescript,selection_command +1805,3499175,"src/extension.ts",24913,0,"",typescript,selection_command +1806,3499325,"src/extension.ts",24940,0,"",typescript,selection_command +1807,3499622,"src/extension.ts",24913,0,"",typescript,selection_command +1808,3506129,"src/extension.ts",24889,0,"",typescript,selection_command +1809,3506303,"src/extension.ts",24890,0,"",typescript,selection_command +1810,3506469,"src/extension.ts",24897,0,"",typescript,selection_command +1811,3506667,"src/extension.ts",24898,0,"",typescript,selection_command +1812,3507091,"src/extension.ts",24922,0,"",typescript,selection_command +1813,3507132,"src/extension.ts",24949,0,"",typescript,selection_command +1814,3507671,"src/extension.ts",24940,0,"",typescript,selection_command +1815,3508109,"src/extension.ts",24980,0,"",typescript,selection_command +1816,3508261,"src/extension.ts",25006,0,"",typescript,selection_command +1817,3508397,"src/extension.ts",25087,0,"",typescript,selection_command +1818,3508716,"src/extension.ts",25006,0,"",typescript,selection_command +1819,3509015,"src/extension.ts",24980,0,"",typescript,selection_command +1820,3509852,"src/extension.ts",25006,0,"",typescript,selection_command +1821,3509926,"src/extension.ts",25087,0,"",typescript,selection_command +1822,3510317,"src/extension.ts",25006,0,"",typescript,selection_command +1823,3511937,"src/extension.ts",25009,0,"",typescript,selection_command +1824,3512184,"src/extension.ts",25015,0,"",typescript,selection_command +1825,3512278,"src/extension.ts",25026,0,"",typescript,selection_command +1826,3512572,"src/extension.ts",25028,0,"",typescript,selection_command +1827,3512828,"src/extension.ts",25033,0,"",typescript,selection_command +1828,3512870,"src/extension.ts",25034,0,"",typescript,selection_command +1829,3512870,"src/extension.ts",25041,0,"",typescript,selection_command +1830,3513444,"src/extension.ts",25042,0,"",typescript,selection_command +1831,3513669,"src/extension.ts",25048,0,"",typescript,selection_command +1832,3513917,"src/extension.ts",25049,0,"",typescript,selection_command +1833,3514027,"src/extension.ts",25059,0,"",typescript,selection_command +1834,3514072,"src/extension.ts",25061,0,"",typescript,selection_command +1835,3514174,"src/extension.ts",25063,0,"",typescript,selection_command +1836,3514307,"src/extension.ts",25069,0,"",typescript,selection_command +1837,3514441,"src/extension.ts",25070,0,"",typescript,selection_command +1838,3514463,"src/extension.ts",25081,0,"",typescript,selection_command +1839,3514495,"src/extension.ts",25083,0,"",typescript,selection_command +1840,3515355,"src/extension.ts",25006,0,"",typescript,selection_command +1841,3515615,"src/extension.ts",25009,0,"",typescript,selection_command +1842,3520636,"src/extension.ts",25090,0,"",typescript,selection_command +1843,3523067,"src/extension.ts",25136,0,"",typescript,selection_command +1844,3524493,"src/extension.ts",25293,0,"",typescript,selection_command +1845,3527049,"src/extension.ts",25133,0,"",typescript,selection_command +1846,3527052,"src/extension.ts",25295,0,"",typescript,selection_command +1847,3527057,"src/extension.ts",25444,0,"",typescript,selection_command +1848,3527092,"src/extension.ts",25295,0,"",typescript,selection_command +1849,3527311,"src/extension.ts",25446,0,"",typescript,selection_command +1850,3527377,"src/extension.ts",25454,0,"",typescript,selection_command +1851,3528014,"src/extension.ts",25509,0,"",typescript,selection_command +1852,3528014,"src/extension.ts",25513,0,"",typescript,selection_command +1853,3528015,"src/extension.ts",25536,0,"",typescript,selection_command +1854,3528075,"src/extension.ts",25731,0,"",typescript,selection_command +1855,3528827,"src/extension.ts",25536,0,"",typescript,selection_command +1856,3529302,"src/extension.ts",25733,0,"",typescript,selection_command +1857,3529527,"src/extension.ts",25804,0,"",typescript,selection_command +1858,3530233,"src/extension.ts",25868,0,"",typescript,selection_command +1859,3530233,"src/extension.ts",25872,0,"",typescript,selection_command +1860,3530234,"src/extension.ts",25895,0,"",typescript,selection_command +1861,3530610,"src/extension.ts",26107,0,"",typescript,selection_command +1862,3530617,"src/extension.ts",26309,0,"",typescript,selection_command +1863,3530865,"src/extension.ts",26366,0,"",typescript,selection_command +1864,3530917,"src/extension.ts",26370,0,"",typescript,selection_command +1865,3530931,"src/extension.ts",26394,0,"",typescript,selection_command +1866,3530962,"src/extension.ts",26606,0,"",typescript,selection_command +1867,3530992,"src/extension.ts",26808,0,"",typescript,selection_command +1868,3531033,"src/extension.ts",26879,0,"",typescript,selection_command +1869,3532159,"src/extension.ts",24277,0,"",typescript,selection_keyboard +1870,3532499,"src/extension.ts",22947,0,"",typescript,selection_keyboard +1871,3532643,"src/extension.ts",21121,0,"",typescript,selection_keyboard +1872,3532795,"src/extension.ts",19558,0,"",typescript,selection_keyboard +1873,3532942,"src/extension.ts",18359,0,"",typescript,selection_keyboard +1874,3535450,"src/extension.ts",16496,0,"",typescript,selection_keyboard +1875,3535692,"src/extension.ts",14497,0,"",typescript,selection_keyboard +1876,3535712,"src/extension.ts",12240,0,"",typescript,selection_keyboard +1877,3535737,"src/extension.ts",9736,0,"",typescript,selection_keyboard +1878,3535772,"src/extension.ts",7834,0,"",typescript,selection_keyboard +1879,3535812,"src/extension.ts",5954,0,"",typescript,selection_keyboard +1880,3535843,"src/extension.ts",4245,0,"",typescript,selection_keyboard +1881,3536045,"src/extension.ts",2411,0,"",typescript,selection_keyboard +1882,3536199,"src/extension.ts",828,0,"",typescript,selection_keyboard +1883,3536359,"src/extension.ts",0,0,"",typescript,selection_keyboard +1884,3536699,"src/extension.ts",1598,0,"",typescript,selection_keyboard +1885,3538679,"src/extension.ts",1580,0,"",typescript,selection_command +1886,3538761,"src/extension.ts",1575,0,"",typescript,selection_command +1887,3539119,"src/extension.ts",1554,0,"",typescript,selection_command +1888,3539497,"src/extension.ts",1526,0,"",typescript,selection_command +1889,3539590,"src/extension.ts",1472,0,"",typescript,selection_command +1890,3539746,"src/extension.ts",1454,0,"",typescript,selection_command +1891,3539901,"src/extension.ts",1393,0,"",typescript,selection_command +1892,3540029,"src/extension.ts",1357,0,"",typescript,selection_command +1893,3577489,"src/extension.ts",1393,0,"",typescript,selection_command +1894,3577724,"src/extension.ts",1454,0,"",typescript,selection_command +1895,3577751,"src/extension.ts",1472,0,"",typescript,selection_command +1896,3577788,"src/extension.ts",1526,0,"",typescript,selection_command +1897,3578251,"src/extension.ts",1530,0,"",typescript,selection_command +1898,3578533,"src/extension.ts",1542,0,"",typescript,selection_command +1899,3578891,"src/extension.ts",1544,0,"",typescript,selection_command +1900,3579077,"src/extension.ts",1545,0,"",typescript,selection_command +1901,3579820,"src/extension.ts",1544,0,"",typescript,selection_command +1902,3579868,"src/extension.ts",1542,0,"",typescript,selection_command +1903,3580016,"src/extension.ts",1530,0,"",typescript,selection_command +1904,3582087,"src/extension.ts",3441,0,"",typescript,selection_command +1905,3583207,"src/extension.ts",1530,0,"",typescript,selection_command +1906,3586193,"src/extension.ts",3441,0,"",typescript,selection_command +1907,3586194,"src/extension.ts",1530,0,"",typescript,selection_command +1908,3586195,"src/extension.ts",3441,0,"",typescript,selection_command +1909,3586669,"src/extension.ts",1530,0,"",typescript,selection_command +1910,3591806,"src/extension.ts",3441,0,"",typescript,selection_command +1911,3592256,"src/extension.ts",8218,0,"",typescript,selection_command +1912,3595410,"src/extension.ts",16040,0,"",typescript,selection_command +1913,3596052,"src/extension.ts",8218,0,"",typescript,selection_command +1914,3604091,"src/extension.ts",16040,0,"",typescript,selection_command +1915,3605203,"src/extension.ts",1436,0,"",typescript,selection_command +1916,3615076,"src/extension.ts",1530,0,"",typescript,selection_command +1917,3615892,"src/extension.ts",3441,0,"",typescript,selection_command +1918,3616744,"src/extension.ts",8218,0,"",typescript,selection_command +1919,3617232,"src/extension.ts",16040,0,"",typescript,selection_command +1920,3617938,"src/extension.ts",1436,0,"",typescript,selection_command +1921,3618360,"src/extension.ts",1530,0,"",typescript,selection_command +1922,3618802,"src/extension.ts",3441,0,"",typescript,selection_command +1923,3621269,"src/extension.ts",8218,0,"",typescript,selection_command +1924,3621601,"src/extension.ts",16040,0,"",typescript,selection_command +1925,3622222,"src/extension.ts",1436,0,"",typescript,selection_command +1926,3623258,"src/extension.ts",1530,0,"",typescript,selection_command +1927,3624493,"src/extension.ts",1526,27,"\t\t\t\tcurrentPlan = [single];",typescript,selection_command +1928,3632720,"src/extension.ts",1530,0,"",typescript,selection_command +1929,3634528,"src/extension.ts",1558,0,"",typescript,selection_command +1930,3635153,"src/extension.ts",1588,0,"",typescript,selection_command +1931,3635776,"src/extension.ts",1683,0,"",typescript,selection_command +1932,3644682,"src/extension.ts",1715,0,"",typescript,selection_command +1933,3644953,"src/extension.ts",1683,0,"",typescript,selection_command +1934,3648874,"src/extension.ts",18800,0,"",typescript,selection_command +1935,3653987,"src/extension.ts",1683,0,"",typescript,selection_command +1936,3738600,"src/extension.ts",19026,148,"\tconst next = getHardcodedNextAction(editor);\n\tif (next) {\n\t\tshowPreviewUI([next]);\n\t} else {",typescript,content +1937,3738600,"src/extension.ts",1693,0,"\t\t\tadvanceMockStep();\n",typescript,content +1938,3738600,"src/extension.ts",1393,186,"\t\t\tconst action = currentPlan?.[0] ?? getHardcodedNextAction(editor);",typescript,content +1939,3739522,"src/extension.ts",18931,93,"\ttry {\n\t\tconst next = await requestModelActions(editor);\n\t\tif (next) {\n\t\t\tshowPreviewUI([next]);\n\t\t} else {\n\t\t\thidePreviewUI();\n\t\t}\n\t} catch (err) {",typescript,content +1940,3739523,"src/extension.ts",15944,20,"\tcurrentAction = action;",typescript,content +1941,3739523,"src/extension.ts",8121,21,"\t\tcurrentAction = action;",typescript,content +1942,3739523,"src/extension.ts",7739,270,"\t// Only preview certain action kinds\n\tconst next = (action.kind === 'editInsert' || action.kind === 'editDelete' || action.kind === 'editReplace' || action.kind === 'terminalSendText' || action.kind === 'setSelections') ? action : undefined;",typescript,content +1943,3739523,"src/extension.ts",7580,53,"function showPreviewUI(action: PlannedAction): void {",typescript,content +1944,3739523,"src/extension.ts",3642,1486,"\tif (action.kind === 'showTextDocument') {\n\t\tawait vscode.window.showTextDocument(doc);\n\t\treturn;\n\t}\n\tif (action.kind === 'setSelections') {\n\t\teditor.selections = action.selections.map(s => new vscode.Selection(\n\t\t\tnew vscode.Position(s.start[0], s.start[1]),\n\t\t\tnew vscode.Position(s.end[0], s.end[1])\n\t\t));\n\t\tif (editor.selections.length > 0) {\n\t\t\teditor.revealRange(editor.selections[0], vscode.TextEditorRevealType.InCenterIfOutsideViewport);\n\t\t}\n\t\treturn;\n\t}\n\tif (action.kind === 'editInsert') {\n\t\tawait editor.edit((e: vscode.TextEditorEdit) => e.insert(new vscode.Position(action.position[0], action.position[1]), action.text));\n\t\treturn;\n\t}\n\tif (action.kind === 'editDelete') {\n\t\tconst range = new vscode.Range(\n\t\t\tnew vscode.Position(action.range.start[0], action.range.start[1]),\n\t\t\tnew vscode.Position(action.range.end[0], action.range.end[1])\n\t\t);\n\t\tawait editor.edit((e: vscode.TextEditorEdit) => e.delete(range));\n\t\treturn;\n\t}\n\tif (action.kind === 'editReplace') {\n\t\tconst range = new vscode.Range(\n\t\t\tnew vscode.Position(action.range.start[0], action.range.start[1]),\n\t\t\tnew vscode.Position(action.range.end[0], action.range.end[1])\n\t\t);\n\t\tawait editor.edit((e: vscode.TextEditorEdit) => e.replace(range, action.text));\n\t\treturn;\n\t}\n\tif (action.kind === 'terminalShow') {\n\t\tterm.show();\n\t\treturn;\n\t}\n\tif (action.kind === 'terminalSendText') {\n\t\tterm.sendText(action.text);\n\t\treturn;",typescript,content +1945,3739523,"src/extension.ts",3389,66,"async function executeAction(action: PlannedAction): Promise {",typescript,content +1946,3739523,"src/extension.ts",3342,45,"let currentAction: PlannedAction | undefined;",typescript,content +1947,3739523,"src/extension.ts",1544,53,"\t\t\tawait executeAction(action);",typescript,content +1948,3739523,"src/extension.ts",1393,69,"\t\t\tlet action: PlannedAction | undefined = currentAction;\n\t\t\tif (!action) {\n\t\t\t\tconst single = await requestModelActions(editor);\n\t\t\t\tcurrentAction = single;\n\t\t\t\taction = single;\n\t\t\t}",typescript,content +1949,3744098,"TERMINAL",0,0,"[11:20:06 AM] File change detected. Starting incremental compilation...\r\n\r\nsrc/extension.ts:494:18 - error TS2345: Argument of type 'PlannedAction[]' is not assignable to parameter of type 'PlannedAction'.\r\n\r\n494 showPreviewUI([next]);\r\n   ~~~~~~\r\n\r\n[11:20:06 AM] Found 1 error. Watching for file changes.\r\n\r\n",,terminal_output +1950,3752746,"src/extension.ts",18916,148,"\tconst next = getHardcodedNextAction(editor);\n\tif (next) {\n\t\tshowPreviewUI([next]);\n\t} else {",typescript,content +1951,3752747,"src/extension.ts",15925,24,"\tcurrentPlan = plan;",typescript,content +1952,3752747,"src/extension.ts",8098,25,"\t\tcurrentPlan = plan;",typescript,content +1953,3752747,"src/extension.ts",7744,242,"\t// Only preview the next text edit action (insert/delete/replace/terminalSendText/setSelections)\n\tconst next = plan.find(a => a.kind === 'editInsert' || a.kind === 'editDelete' || a.kind === 'editReplace' || a.kind === 'terminalSendText' || a.kind === 'setSelections');",typescript,content +1954,3752747,"src/extension.ts",7585,53,"function showPreviewUI(plan: PlannedAction[]): void {",typescript,content +1955,3752747,"src/extension.ts",3736,1397,"\tfor (const action of plan) {\n\t\tif (action.kind === 'showTextDocument') {\n\t\t\tawait vscode.window.showTextDocument(doc);\n\t\t\tcontinue;\n\t\t}\n\t\tif (action.kind === 'setSelections') {\n\t\t\teditor.selections = action.selections.map(s => new vscode.Selection(\n\t\t\t\tnew vscode.Position(s.start[0], s.start[1]),\n\t\t\t\tnew vscode.Position(s.end[0], s.end[1])\n\t\t\t));\n\t\t\tif (editor.selections.length > 0) {\n\t\t\t\teditor.revealRange(editor.selections[0], vscode.TextEditorRevealType.InCenterIfOutsideViewport);\n\t\t\t}\n\t\t\tcontinue;\n\t\t}\n\t\tif (action.kind === 'editInsert') {\n\t\t\tawait editor.edit((e: vscode.TextEditorEdit) => e.insert(new vscode.Position(action.position[0], action.position[1]), action.text));\n\t\t\tcontinue;\n\t\t}\n\t\tif (action.kind === 'editDelete') {\n\t\t\tconst range = new vscode.Range(\n\t\t\t\tnew vscode.Position(action.range.start[0], action.range.start[1]),\n\t\t\t\tnew vscode.Position(action.range.end[0], action.range.end[1])\n\t\t\t);\n\t\t\tawait editor.edit((e: vscode.TextEditorEdit) => e.delete(range));\n\t\t\tcontinue;\n\t\t}\n\t\tif (action.kind === 'editReplace') {\n\t\t\tconst range = new vscode.Range(\n\t\t\t\tnew vscode.Position(action.range.start[0], action.range.start[1]),\n\t\t\t\tnew vscode.Position(action.range.end[0], action.range.end[1])\n\t\t\t);\n\t\t\tawait editor.edit((e: vscode.TextEditorEdit) => e.replace(range, action.text));\n\t\t\tcontinue;\n\t\t}\n\t\tif (action.kind === 'terminalShow') {\n\t\t\tterm.show();\n\t\t\tcontinue;\n\t\t}\n\t\tif (action.kind === 'terminalSendText') {\n\t\t\tterm.sendText(action.text);\n\t\t\tcontinue;\n\t\t}",typescript,content +1956,3752747,"src/extension.ts",3481,68,"async function executePlan(plan: PlannedAction[]): Promise {",typescript,content +1957,3752747,"src/extension.ts",3434,45,"let currentPlan: PlannedAction[] | undefined;",typescript,content +1958,3752747,"src/extension.ts",1658,31,"\t\t\tawait executePlan([action]);\n\t\t\tadvanceMockStep();",typescript,content +1959,3752747,"src/extension.ts",1393,183,"\t\t\tconst action = currentPlan?.[0] ?? getHardcodedNextAction(editor);",typescript,content +1960,3752922,"src/extension.ts",18931,93,"\ttry {\n\t\tconst next = await requestModelActions(editor);\n\t\tif (next) {\n\t\t\tshowPreviewUI(next);\n\t\t} else {\n\t\t\thidePreviewUI();\n\t\t}\n\t} catch (err) {",typescript,content +1961,3752922,"src/extension.ts",15944,20,"\tcurrentAction = action;",typescript,content +1962,3752922,"src/extension.ts",8121,21,"\t\tcurrentAction = action;",typescript,content +1963,3752922,"src/extension.ts",7739,270,"\t// Only preview certain action kinds\n\tconst next = (action.kind === 'editInsert' || action.kind === 'editDelete' || action.kind === 'editReplace' || action.kind === 'terminalSendText' || action.kind === 'setSelections') ? action : undefined;",typescript,content +1964,3752922,"src/extension.ts",7580,53,"function showPreviewUI(action: PlannedAction): void {",typescript,content +1965,3752922,"src/extension.ts",3642,1486,"\tif (action.kind === 'showTextDocument') {\n\t\tawait vscode.window.showTextDocument(doc);\n\t\treturn;\n\t}\n\tif (action.kind === 'setSelections') {\n\t\teditor.selections = action.selections.map(s => new vscode.Selection(\n\t\t\tnew vscode.Position(s.start[0], s.start[1]),\n\t\t\tnew vscode.Position(s.end[0], s.end[1])\n\t\t));\n\t\tif (editor.selections.length > 0) {\n\t\t\teditor.revealRange(editor.selections[0], vscode.TextEditorRevealType.InCenterIfOutsideViewport);\n\t\t}\n\t\treturn;\n\t}\n\tif (action.kind === 'editInsert') {\n\t\tawait editor.edit((e: vscode.TextEditorEdit) => e.insert(new vscode.Position(action.position[0], action.position[1]), action.text));\n\t\treturn;\n\t}\n\tif (action.kind === 'editDelete') {\n\t\tconst range = new vscode.Range(\n\t\t\tnew vscode.Position(action.range.start[0], action.range.start[1]),\n\t\t\tnew vscode.Position(action.range.end[0], action.range.end[1])\n\t\t);\n\t\tawait editor.edit((e: vscode.TextEditorEdit) => e.delete(range));\n\t\treturn;\n\t}\n\tif (action.kind === 'editReplace') {\n\t\tconst range = new vscode.Range(\n\t\t\tnew vscode.Position(action.range.start[0], action.range.start[1]),\n\t\t\tnew vscode.Position(action.range.end[0], action.range.end[1])\n\t\t);\n\t\tawait editor.edit((e: vscode.TextEditorEdit) => e.replace(range, action.text));\n\t\treturn;\n\t}\n\tif (action.kind === 'terminalShow') {\n\t\tterm.show();\n\t\treturn;\n\t}\n\tif (action.kind === 'terminalSendText') {\n\t\tterm.sendText(action.text);\n\t\treturn;",typescript,content +1966,3752922,"src/extension.ts",3389,66,"async function executeAction(action: PlannedAction): Promise {",typescript,content +1967,3752922,"src/extension.ts",3342,45,"let currentAction: PlannedAction | undefined;",typescript,content +1968,3752922,"src/extension.ts",1544,53,"\t\t\tawait executeAction(action);",typescript,content +1969,3752922,"src/extension.ts",1393,69,"\t\t\tlet action: PlannedAction | undefined = currentAction;\n\t\t\tif (!action) {\n\t\t\t\tconst single = await requestModelActions(editor);\n\t\t\t\tcurrentAction = single;\n\t\t\t\taction = single;\n\t\t\t}",typescript,content +1970,3753698,"TERMINAL",0,0,"[11:20:18 AM] File change detected. Starting incremental compilation...\r\n\r\n",,terminal_output +1971,3753781,"TERMINAL",0,0,"[11:20:18 AM] Found 0 errors. Watching for file changes.\r\n\r\n",,terminal_output +1972,4162297,"src/extension.ts",1356,0,"",typescript,selection_mouse +1973,4162309,"src/extension.ts",1355,0,"",typescript,selection_command +1974,4195549,"src/extension.ts",1391,0,"",typescript,selection_command +1975,4195550,"src/extension.ts",1439,0,"",typescript,selection_command +1976,4195551,"src/extension.ts",1391,0,"",typescript,selection_command +1977,4195552,"src/extension.ts",1439,0,"",typescript,selection_command +1978,4195552,"src/extension.ts",1467,0,"",typescript,selection_command +1979,4195552,"src/extension.ts",1515,0,"",typescript,selection_command +1980,4195553,"src/extension.ts",1549,0,"",typescript,selection_command +1981,4195553,"src/extension.ts",1570,0,"",typescript,selection_command +1982,4195554,"src/extension.ts",1575,0,"",typescript,selection_command +1983,4195554,"src/extension.ts",1593,0,"",typescript,selection_command +1984,4195555,"src/extension.ts",1614,0,"",typescript,selection_command +1985,4195555,"src/extension.ts",1626,0,"",typescript,selection_command +1986,4195556,"src/extension.ts",1631,0,"",typescript,selection_command +1987,4195556,"src/extension.ts",1656,0,"",typescript,selection_command +1988,4195592,"src/extension.ts",1688,0,"",typescript,selection_command +1989,4195965,"src/extension.ts",1656,0,"",typescript,selection_command +1990,4196411,"src/extension.ts",1688,0,"",typescript,selection_command +1991,4196710,"src/extension.ts",1656,0,"",typescript,selection_command +1992,4216349,"src/extension.ts",1688,0,"",typescript,selection_command +1993,4216816,"src/extension.ts",1656,0,"",typescript,selection_command +1994,4225126,"src/extension.ts",1688,0,"",typescript,selection_command +1995,4225293,"src/extension.ts",1656,0,"",typescript,selection_command +1996,4235708,"src/extension.ts",3434,0,"",typescript,selection_command +1997,4237408,"src/extension.ts",1690,0,"",typescript,selection_keyboard +1998,4237540,"src/extension.ts",127,0,"",typescript,selection_keyboard +1999,4238544,"src/extension.ts",154,0,"",typescript,selection_command +2000,4238787,"src/extension.ts",174,0,"",typescript,selection_command +2001,4238918,"src/extension.ts",175,0,"",typescript,selection_command +2002,4238933,"src/extension.ts",236,0,"",typescript,selection_command +2003,4238940,"src/extension.ts",237,0,"",typescript,selection_command +2004,4238988,"src/extension.ts",288,0,"",typescript,selection_command +2005,4238996,"src/extension.ts",289,0,"",typescript,selection_command +2006,4239061,"src/extension.ts",344,0,"",typescript,selection_command +2007,4239121,"src/extension.ts",360,0,"",typescript,selection_command +2008,4239132,"src/extension.ts",435,0,"",typescript,selection_command +2009,4239140,"src/extension.ts",514,0,"",typescript,selection_command +2010,4239174,"src/extension.ts",537,0,"",typescript,selection_command +2011,4239174,"src/extension.ts",600,0,"",typescript,selection_command +2012,4239189,"src/extension.ts",653,0,"",typescript,selection_command +2013,4239220,"src/extension.ts",672,0,"",typescript,selection_command +2014,4239220,"src/extension.ts",676,0,"",typescript,selection_command +2015,4239239,"src/extension.ts",737,0,"",typescript,selection_command +2016,4239255,"src/extension.ts",788,0,"",typescript,selection_command +2017,4239286,"src/extension.ts",807,0,"",typescript,selection_command +2018,4239286,"src/extension.ts",811,0,"",typescript,selection_command +2019,4239356,"src/extension.ts",828,0,"",typescript,selection_command +2020,4239395,"src/extension.ts",931,0,"",typescript,selection_command +2021,4239416,"src/extension.ts",935,0,"",typescript,selection_command +2022,4239488,"src/extension.ts",1024,0,"",typescript,selection_command +2023,4239488,"src/extension.ts",1025,0,"",typescript,selection_command +2024,4239527,"src/extension.ts",1103,0,"",typescript,selection_command +2025,4239528,"src/extension.ts",1126,0,"",typescript,selection_command +2026,4239554,"src/extension.ts",1131,0,"",typescript,selection_command +2027,4239598,"src/extension.ts",1132,0,"",typescript,selection_command +2028,4239627,"src/extension.ts",1220,0,"",typescript,selection_command +2029,4239668,"src/extension.ts",1269,0,"",typescript,selection_command +2030,4239688,"src/extension.ts",1286,0,"",typescript,selection_command +2031,4239735,"src/extension.ts",1297,0,"",typescript,selection_command +2032,4239786,"src/extension.ts",1301,0,"",typescript,selection_command +2033,4239837,"src/extension.ts",1309,0,"",typescript,selection_command +2034,4239848,"src/extension.ts",1357,0,"",typescript,selection_command +2035,4239886,"src/extension.ts",1393,0,"",typescript,selection_command +2036,4239934,"src/extension.ts",1451,0,"",typescript,selection_command +2037,4240002,"src/extension.ts",1469,0,"",typescript,selection_command +2038,4240010,"src/extension.ts",1523,0,"",typescript,selection_command +2039,4240039,"src/extension.ts",1551,0,"",typescript,selection_command +2040,4240102,"src/extension.ts",1572,0,"",typescript,selection_command +2041,4240113,"src/extension.ts",1577,0,"",typescript,selection_command +2042,4240148,"src/extension.ts",1595,0,"",typescript,selection_command +2043,4240159,"src/extension.ts",1616,0,"",typescript,selection_command +2044,4240243,"src/extension.ts",1628,0,"",typescript,selection_command +2045,4240243,"src/extension.ts",1633,0,"",typescript,selection_command +2046,4240332,"src/extension.ts",1628,0,"",typescript,selection_command +2047,4240566,"src/extension.ts",1616,0,"",typescript,selection_command +2048,4240612,"src/extension.ts",1595,0,"",typescript,selection_command +2049,4240644,"src/extension.ts",1577,0,"",typescript,selection_command +2050,4240762,"src/extension.ts",1572,0,"",typescript,selection_command +2051,4240762,"src/extension.ts",1551,0,"",typescript,selection_command +2052,4247397,"src/extension.ts",1532,0,"",typescript,selection_mouse +2053,4249018,"src/extension.ts",1558,0,"",typescript,selection_mouse +2054,4260715,"src/extension.ts",1530,0,"",typescript,selection_command +2055,4260875,"src/extension.ts",1476,0,"",typescript,selection_command +2056,4261030,"src/extension.ts",1458,0,"",typescript,selection_command +2057,4261168,"src/extension.ts",1400,0,"",typescript,selection_command +2058,4261393,"src/extension.ts",1364,0,"",typescript,selection_command +2059,4276866,"src/extension.ts",3434,0,"",typescript,selection_command +2060,4282523,"src/extension.ts",3481,0,"",typescript,selection_command +2061,4283457,"src/extension.ts",3736,0,"",typescript,selection_command +2062,4375672,"src/extension.ts",7585,0,"",typescript,selection_command +2063,4391004,"src/extension.ts",7744,0,"",typescript,selection_command +2064,4393427,"src/extension.ts",7782,0,"",typescript,selection_command +2065,4393586,"src/extension.ts",7744,0,"",typescript,selection_command +2066,4397379,"src/extension.ts",7782,0,"",typescript,selection_command +2067,4397628,"src/extension.ts",7744,0,"",typescript,selection_command +2068,4398049,"src/extension.ts",7745,0,"",typescript,selection_command +2069,4398392,"src/extension.ts",7748,0,"",typescript,selection_command +2070,4398911,"src/extension.ts",7748,33,"",typescript,content +2071,4399317,"src/extension.ts",7748,0,"T",typescript,content +2072,4399318,"src/extension.ts",7749,0,"",typescript,selection_keyboard +2073,4399427,"src/extension.ts",7749,0,"O",typescript,content +2074,4399427,"src/extension.ts",7750,0,"",typescript,selection_keyboard +2075,4399585,"src/extension.ts",7750,0,"D",typescript,content +2076,4399585,"src/extension.ts",7751,0,"",typescript,selection_keyboard +2077,4399620,"src/extension.ts",7751,0,"O",typescript,content +2078,4399621,"src/extension.ts",7752,0,"",typescript,selection_keyboard +2079,4400571,"src/extension.ts",7752,0," ",typescript,content +2080,4400571,"src/extension.ts",7753,0,"",typescript,selection_keyboard +2081,4400779,"src/extension.ts",7753,0,"()",typescript,content +2082,4400779,"src/extension.ts",7754,0,"",typescript,selection_keyboard +2083,4400866,"src/extension.ts",7754,1,")",typescript,content +2084,4400866,"src/extension.ts",7755,0,"",typescript,selection_keyboard +2085,4401671,"src/extension.ts",7753,2,"",typescript,content +2086,4401683,"src/extension.ts",7748,5,"",typescript,content +2087,4403058,"src/extension.ts",7748,0,"F",typescript,content +2088,4403059,"src/extension.ts",7749,0,"",typescript,selection_keyboard +2089,4403059,"src/extension.ts",7749,0,"I",typescript,content +2090,4403059,"src/extension.ts",7750,0,"",typescript,selection_keyboard +2091,4403490,"src/extension.ts",7750,0,"X",typescript,content +2092,4403490,"src/extension.ts",7751,0,"",typescript,selection_keyboard +2093,4403490,"src/extension.ts",7751,0,"M",typescript,content +2094,4403491,"src/extension.ts",7752,0,"",typescript,selection_keyboard +2095,4403492,"src/extension.ts",7752,0,"E",typescript,content +2096,4403492,"src/extension.ts",7753,0,"",typescript,selection_keyboard +2097,4403694,"src/extension.ts",7753,0," ",typescript,content +2098,4403694,"src/extension.ts",7754,0,"",typescript,selection_keyboard +2099,4403924,"src/extension.ts",7754,0,"()",typescript,content +2100,4403924,"src/extension.ts",7755,0,"",typescript,selection_keyboard +2101,4404147,"src/extension.ts",7755,1,")",typescript,content +2102,4404147,"src/extension.ts",7756,0,"",typescript,selection_keyboard +2103,4405098,"src/extension.ts",7755,0,"",typescript,selection_command +2104,4405520,"src/extension.ts",7755,0,"f",typescript,content +2105,4405520,"src/extension.ts",7756,0,"",typescript,selection_keyboard +2106,4405872,"src/extension.ts",7756,0,".",typescript,content +2107,4405873,"src/extension.ts",7757,0,"",typescript,selection_keyboard +2108,4406080,"src/extension.ts",7757,0,"s",typescript,content +2109,4406080,"src/extension.ts",7758,0,"",typescript,selection_keyboard +2110,4406101,"src/extension.ts",7758,0,"r",typescript,content +2111,4406102,"src/extension.ts",7759,0,"",typescript,selection_keyboard +2112,4406243,"src/extension.ts",7759,0,"a",typescript,content +2113,4406243,"src/extension.ts",7760,0,"",typescript,selection_keyboard +2114,4406266,"src/extension.ts",7760,0,"m",typescript,content +2115,4406266,"src/extension.ts",7761,0,"",typescript,selection_keyboard +2116,4406335,"src/extension.ts",7761,0,"b",typescript,content +2117,4406335,"src/extension.ts",7762,0,"",typescript,selection_keyboard +2118,4406485,"src/extension.ts",7762,0,"i",typescript,content +2119,4406485,"src/extension.ts",7763,0,"",typescript,selection_keyboard +2120,4406584,"src/extension.ts",7763,0,"c",typescript,content +2121,4406584,"src/extension.ts",7764,0,"",typescript,selection_keyboard +2122,4406761,"src/extension.ts",7764,0,"a",typescript,content +2123,4406761,"src/extension.ts",7765,0,"",typescript,selection_keyboard +2124,4406857,"src/extension.ts",7765,0,"l",typescript,content +2125,4406857,"src/extension.ts",7766,0,"",typescript,selection_keyboard +2126,4407005,"src/extension.ts",7765,0,"",typescript,selection_command +2127,4407331,"src/extension.ts",7767,0,"",typescript,selection_command +2128,4408021,"src/extension.ts",7767,0,":",typescript,content +2129,4408021,"src/extension.ts",7768,0,"",typescript,selection_keyboard +2130,4408253,"src/extension.ts",7768,0," ",typescript,content +2131,4408253,"src/extension.ts",7769,0,"",typescript,selection_keyboard +2132,4411100,"src/extension.ts",7769,0,"a",typescript,content +2133,4411101,"src/extension.ts",7770,0,"",typescript,selection_keyboard +2134,4411154,"src/extension.ts",7770,0,"d",typescript,content +2135,4411155,"src/extension.ts",7771,0,"",typescript,selection_keyboard +2136,4411318,"src/extension.ts",7771,0,"d",typescript,content +2137,4411318,"src/extension.ts",7772,0,"",typescript,selection_keyboard +2138,4411405,"src/extension.ts",7772,0," ",typescript,content +2139,4411405,"src/extension.ts",7773,0,"",typescript,selection_keyboard +2140,4417958,"src/extension.ts",7773,0,"f",typescript,content +2141,4417958,"src/extension.ts",7774,0,"",typescript,selection_keyboard +2142,4418044,"src/extension.ts",7774,0,"i",typescript,content +2143,4418045,"src/extension.ts",7775,0,"",typescript,selection_keyboard +2144,4420729,"src/extension.ts",7773,2,"",typescript,content +2145,4420862,"src/extension.ts",7769,4,"",typescript,content +2146,4421127,"src/extension.ts",7769,0,"a",typescript,content +2147,4421127,"src/extension.ts",7770,0,"",typescript,selection_keyboard +2148,4421164,"src/extension.ts",7770,0,"d",typescript,content +2149,4421164,"src/extension.ts",7771,0,"",typescript,selection_keyboard +2150,4421298,"src/extension.ts",7771,0,"d",typescript,content +2151,4421298,"src/extension.ts",7772,0,"",typescript,selection_keyboard +2152,4421430,"src/extension.ts",7772,0," ",typescript,content +2153,4421431,"src/extension.ts",7773,0,"",typescript,selection_keyboard +2154,4421495,"src/extension.ts",7773,0,"f",typescript,content +2155,4421495,"src/extension.ts",7774,0,"",typescript,selection_keyboard +2156,4421515,"src/extension.ts",7774,0,"i",typescript,content +2157,4421515,"src/extension.ts",7775,0,"",typescript,selection_keyboard +2158,4421696,"src/extension.ts",7775,0,"l",typescript,content +2159,4421697,"src/extension.ts",7776,0,"",typescript,selection_keyboard +2160,4421743,"src/extension.ts",7776,0,"e",typescript,content +2161,4421743,"src/extension.ts",7777,0,"",typescript,selection_keyboard +2162,4421861,"src/extension.ts",7777,0," ",typescript,content +2163,4421861,"src/extension.ts",7778,0,"",typescript,selection_keyboard +2164,4422001,"src/extension.ts",7778,0,"s",typescript,content +2165,4422001,"src/extension.ts",7779,0,"",typescript,selection_keyboard +2166,4422018,"src/extension.ts",7779,0,"w",typescript,content +2167,4422018,"src/extension.ts",7780,0,"",typescript,selection_keyboard +2168,4422081,"src/extension.ts",7780,0,"i",typescript,content +2169,4422081,"src/extension.ts",7781,0,"",typescript,selection_keyboard +2170,4422182,"src/extension.ts",7781,0,"t",typescript,content +2171,4422183,"src/extension.ts",7782,0,"",typescript,selection_keyboard +2172,4422412,"src/extension.ts",7782,0,"c",typescript,content +2173,4422412,"src/extension.ts",7783,0,"",typescript,selection_keyboard +2174,4422482,"src/extension.ts",7783,0,"h",typescript,content +2175,4422483,"src/extension.ts",7784,0,"",typescript,selection_keyboard +2176,4423625,"src/extension.ts",7784,0," logic here",typescript,content +2177,4424851,"src/extension.ts",7794,0,"",typescript,selection_command +2178,4425529,"src/extension.ts",7846,0,"",typescript,selection_command +2179,4426138,"src/extension.ts",7999,0,"",typescript,selection_command +2180,4427387,"src/extension.ts",7796,0,"",typescript,selection_command +2181,4427752,"src/extension.ts",7744,0,"",typescript,selection_command +2182,4428160,"TERMINAL",0,0,"[11:31:32 AM] File change detected. Starting incremental compilation...\r\n\r\n[11:31:32 AM] Found 0 errors. Watching for file changes.\r\n\r\n",,terminal_output +2183,4428592,"src/extension.ts",7744,51,"\t",typescript,content +2184,4428593,"src/extension.ts",7744,1,"",typescript,content +2185,4429655,"src/extension.ts",7744,0,"\t// FIXME (f.srambical): add file switch logic here",typescript,content +2186,4429748,"src/extension.ts",7744,0,"",typescript,selection_command +2187,4430370,"src/extension.ts",7795,0,"",typescript,selection_command +2188,4430886,"src/extension.ts",7791,4,"",typescript,content +2189,4431065,"src/extension.ts",7785,6,"",typescript,content +2190,4431332,"src/extension.ts",7784,0,"",typescript,selection_command +2191,4434057,"src/extension.ts",7744,0,"",typescript,selection_command +2192,4434178,"src/extension.ts",7745,0,"",typescript,selection_command +2193,4435616,"src/extension.ts",7748,0,"",typescript,selection_command +2194,4436030,"TERMINAL",0,0,"[11:31:40 AM] File change detected. Starting incremental compilation...\r\n\r\n",,terminal_output +2195,4436179,"TERMINAL",0,0,"[11:31:40 AM] Found 0 errors. Watching for file changes.\r\n\r\n",,terminal_output +2196,4441607,"src/extension.ts",8102,0,"",typescript,selection_command +2197,4444726,"src/extension.ts",15929,0,"",typescript,selection_command +2198,4445528,"src/extension.ts",18920,0,"",typescript,selection_command +2199,4452162,"src/extension.ts",20080,0,"",typescript,selection_keyboard +2200,4452313,"src/extension.ts",22129,0,"",typescript,selection_keyboard +2201,4452536,"src/extension.ts",23502,0,"",typescript,selection_keyboard +2202,4452831,"src/extension.ts",24805,0,"",typescript,selection_keyboard +2203,4453006,"src/extension.ts",27102,0,"",typescript,selection_keyboard +2204,4453539,"src/extension.ts",24441,0,"",typescript,selection_keyboard +2205,4453908,"src/extension.ts",23091,0,"",typescript,selection_keyboard +2206,4454017,"src/extension.ts",21621,0,"",typescript,selection_keyboard +2207,4454300,"src/extension.ts",19692,0,"",typescript,selection_keyboard +2208,4454321,"src/extension.ts",18579,0,"",typescript,selection_keyboard +2209,4454457,"src/extension.ts",16886,0,"",typescript,selection_keyboard +2210,4454650,"src/extension.ts",14766,0,"",typescript,selection_keyboard +2211,4454676,"src/extension.ts",12628,0,"",typescript,selection_keyboard +2212,4454717,"src/extension.ts",10408,0,"",typescript,selection_keyboard +2213,4454735,"src/extension.ts",8223,0,"",typescript,selection_keyboard +2214,4454770,"src/extension.ts",6401,0,"",typescript,selection_keyboard +2215,4454818,"src/extension.ts",4596,0,"",typescript,selection_keyboard +2216,4454844,"src/extension.ts",2719,0,"",typescript,selection_keyboard +2217,4454871,"src/extension.ts",1220,0,"",typescript,selection_keyboard +2218,4454921,"src/extension.ts",0,0,"",typescript,selection_keyboard +2219,4455024,"src/extension.ts",1595,0,"",typescript,selection_keyboard +2220,4455283,"src/extension.ts",3178,0,"",typescript,selection_keyboard +2221,4455316,"src/extension.ts",5048,0,"",typescript,selection_keyboard +2222,4455361,"src/extension.ts",6931,0,"",typescript,selection_keyboard +2223,4455384,"src/extension.ts",8786,0,"",typescript,selection_keyboard +2224,4455410,"src/extension.ts",11182,0,"",typescript,selection_keyboard +2225,4455465,"src/extension.ts",13292,0,"",typescript,selection_keyboard +2226,4455469,"src/extension.ts",15391,0,"",typescript,selection_keyboard +2227,4455505,"src/extension.ts",17396,0,"",typescript,selection_keyboard +2228,4455543,"src/extension.ts",18920,0,"",typescript,selection_keyboard +2229,4455584,"src/extension.ts",20080,0,"",typescript,selection_keyboard +2230,4455610,"src/extension.ts",22129,0,"",typescript,selection_keyboard +2231,4455844,"src/extension.ts",20080,0,"",typescript,selection_keyboard +2232,4455991,"src/extension.ts",18920,0,"",typescript,selection_keyboard +2233,4456145,"src/extension.ts",17396,0,"",typescript,selection_keyboard +2234,4456411,"src/extension.ts",18920,0,"",typescript,selection_keyboard +2235,4456834,"src/extension.ts",20080,0,"",typescript,selection_keyboard +2236,4457208,"src/extension.ts",18920,0,"",typescript,selection_keyboard +2237,4465278,"TERMINAL",0,0,"",,terminal_focus +2238,4467856,"TERMINAL",0,0,"watch",,terminal_focus +2239,4469091,"TERMINAL",0,0,"bash",,terminal_focus +2240,4496877,"TERMINAL",0,0,"salloc --gpus=1 --ntasks-per-node=1 --cpus-per-task=10 --mem=100G --qos=low",,terminal_command +2241,4496921,"TERMINAL",0,0,"]633;C",,terminal_output +2242,4497010,"TERMINAL",0,0,"salloc: Granted job allocation 35150\r\n",,terminal_output +2243,4497185,"TERMINAL",0,0,"salloc: Nodes hai001 are ready for job\r\n",,terminal_output +2244,4498106,"TERMINAL",0,0,"Running inside SLURM, Job ID 35150.\r\n",,terminal_output +2245,4498627,"TERMINAL",0,0,"]0;franz.srambical@hai-login1:~/crowd-pilot-extension[?2004h[franz.srambical@hai001.haicore.berlin:~/crowd-pilot-extension] $ ",,terminal_output +2246,4499726,"TERMINAL",0,0,"\r(reverse-i-search)`': ",,terminal_output +2247,4500034,"TERMINAL",0,0,"s': python3 -m sglang.launch_server --model-path $model_id --host 0.0.0.0\ro': source /home/franz.srambical/crowd-pilot/.venv/bin/activate\r",,terminal_output +2248,4500071,"TERMINAL",0,0,"[1@u': sou",,terminal_output +2249,4500140,"TERMINAL",0,0,"[1@r': sour",,terminal_output +2250,4502490,"TERMINAL",0,0,"\r[40@[franz.srambical@hai001.haicore.berlin:~/crowd-pilot-extension] $ sour\r\n[?2004l\r]0;franz.srambical@hai-login1:~/crowd-pilot-extension[?2004h(crowd-pilot) [franz.srambical@hai001.haicore.berlin:~/crowd-pilot-extension] $ ",,terminal_output +2251,4504702,"TERMINAL",0,0,"d",,terminal_output +2252,4504843,"TERMINAL",0,0,"e",,terminal_output +2253,4505049,"TERMINAL",0,0,"ac",,terminal_output +2254,4505245,"TERMINAL",0,0,"tivate ",,terminal_output +2255,4506363,"TERMINAL",0,0,"",,terminal_output +2256,4508448,"TERMINAL",0,0,"\r(reverse-i-search)`': m': source /home/franz.srambical/crowd-pilot/.venv/bin/activate\ro': python3 -m sglang.launch_server --model-path $model_id --host 0.0.0.0\r[1@d': python3 -m sglang.launch_server --model-path $mod\ru': module load CUDA/12.6.0[1@l': modul[1@e': module",,terminal_output +2257,4510656,"TERMINAL",0,0,"\r[52@crowd-pilot) [franz.srambical@hai001.haicore.berlin:~/crowd-pilot-extension] $ module\r\n[?2004l\r]0;franz.srambical@hai-login1:~/crowd-pilot-extension[?2004h(crowd-pilot) [franz.srambical@hai001.haicore.berlin:~/crowd-pilot-extension] $ ",,terminal_output +2258,4513913,"TERMINAL",0,0,"\r(reverse-i-search)`': m': module load CUDA/12.6.0[1@o': mo[1@d': mode': python3 -m sglang.launch_server --model-path $model_id --host 0.0.0.0\r[1@l': python3 -m sglang.launch_server --model-path $model",,terminal_output +2259,4515466,"TERMINAL",0,0,"model-path $model",,terminal_output +2260,4516008,"TERMINAL",0,0,"\rmodel_id=""Qwen/Qwen3-0.6b""",,terminal_output +2261,4517293,"TERMINAL",0,0,"\rcrowd-pilot) [franz.srambical@hai001.haicore.berlin:~/crowd-pilot-extension] $ model_id=""Qwen/Qwen3-0.6b""\r\n[?2004l\r]0;franz.srambical@hai-login1:~/crowd-pilot-extension[?2004h(crowd-pilot) [franz.srambical@hai001.haicore.berlin:~/crowd-pilot-extension] $ ",,terminal_output +2262,4518914,"TERMINAL",0,0,"\r(reverse-i-search)`': ",,terminal_output +2263,4519190,"TERMINAL",0,0,"s': source /home/franz.srambical/crowd-pilot/.venv/bin/activate\rg': python3 -m sglang.launch_server --model-path $model_id --host 0.0.0.0\r[1@l': python3 -m sgl[1@a': python3 -m sgla[1@n': python3 -m sglan",,terminal_output +2264,4519464,"TERMINAL",0,0,"[1@g': python3 -m sglang",,terminal_output +2265,4520300,"TERMINAL",0,0,"\rcrowd-pilot) [franz.srambical@hai001.haicore.berlin:~/crowd-pilot-extension] $ python3 -m sglang.launch_server --model-path $model_id --host 0.0.0.0\r\n\r",,terminal_output +2266,4521750,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +2267,4535772,"TERMINAL",0,0,"2025-11-30 11:33:19.965876: I tensorflow/core/util/port.cc:153] oneDNN custom operations are on. You may see slightly different numerical results due to floating-point round-off errors from different computation orders. To turn them off, set the environment variable `TF_ENABLE_ONEDNN_OPTS=0`.\r\n2025-11-30 11:33:20.018549: I tensorflow/core/platform/cpu_feature_guard.cc:210] This TensorFlow binary is optimized to use available CPU instructions in performance-critical operations.\r\nTo enable the following instructions: AVX2 AVX512F AVX512_VNNI AVX512_BF16 AVX512_FP16 AVX_VNNI AMX_TILE AMX_INT8 AMX_BF16 FMA, in other operations, rebuild TensorFlow with the appropriate compiler flags.\r\n",,terminal_output +2268,4539445,"TERMINAL",0,0,"2025-11-30 11:33:23.988959: I tensorflow/core/util/port.cc:153] oneDNN custom operations are on. You may see slightly different numerical results due to floating-point round-off errors from different computation orders. To turn them off, set the environment variable `TF_ENABLE_ONEDNN_OPTS=0`.\r\n",,terminal_output +2269,4548882,"TERMINAL",0,0,"[2025-11-30 11:33:33] WARNING server_args.py:1104: Attention backend not explicitly specified. Use fa3 backend by default.\r\n[2025-11-30 11:33:33] INFO trace.py:48: opentelemetry package is not installed, tracing disabled\r\n",,terminal_output +2270,4549444,"TERMINAL",0,0,"[2025-11-30 11:33:33] server_args=ServerArgs(model_path='Qwen/Qwen3-0.6b', tokenizer_path='Qwen/Qwen3-0.6b', tokenizer_mode='auto', tokenizer_worker_num=1, skip_tokenizer_init=False, load_format='auto', model_loader_extra_config='{}', trust_remote_code=False, context_length=None, is_embedding=False, enable_multimodal=None, revision=None, model_impl='auto', host='0.0.0.0', port=30000, grpc_mode=False, skip_server_warmup=False, warmups=None, nccl_port=None, checkpoint_engine_wait_weights_before_ready=False, dtype='auto', quantization=None, quantization_param_path=None, kv_cache_dtype='auto', enable_fp32_lm_head=False, modelopt_quant=None, modelopt_checkpoint_restore_path=None, modelopt_checkpoint_save_path=None, modelopt_export_path=None, quantize_and_serve=False, mem_fraction_static=0.835, max_running_requests=None, max_queued_requests=None, max_total_tokens=None, chunked_prefill_size=8192, max_prefill_tokens=16384, schedule_policy='fcfs', enable_priority_scheduling=False, abort_on_priority_when_disabled=False, schedule_low_priority_values_first=False, priority_scheduling_preemption_threshold=10, schedule_conservativeness=1.0, page_size=1, hybrid_kvcache_ratio=None, swa_full_tokens_ratio=0.8, disable_hybrid_swa_memory=False, radix_eviction_policy='lru', device='cuda', tp_size=1, pp_size=1, pp_max_micro_batch_size=None, stream_interval=1, stream_output=False, random_seed=90099956, constrained_json_whitespace_pattern=None, constrained_json_disable_any_whitespace=False, watchdog_timeout=300, dist_timeout=None, download_dir=None, base_gpu_id=0, gpu_id_step=1, sleep_on_idle=False, log_level='info', log_level_http=None, log_requests=False, log_requests_level=2, crash_dump_folder=None, show_time_cost=False, enable_metrics=False, enable_metrics_for_all_schedulers=False, tokenizer_metrics_custom_labels_header='x-custom-labels', tokenizer_metrics_allowed_custom_labels=None, bucket_time_to_first_token=None, bucket_inter_token_latency=None, bucket_e2e_request_latency=None, collect_tokens_histogram=False, prompt_tokens_buckets=None, generation_tokens_buckets=None, gc_warning_threshold_secs=0.0, decode_log_interval=40, enable_request_time_stats_logging=False, kv_events_config=None, enable_trace=False, oltp_traces_endpoint='localhost:4317', api_key=None, served_model_name='Qwen/Qwen3-0.6b', weight_version='default', chat_template=None, completion_template=None, file_storage_path='sglang_storage', enable_cache_report=False, reasoning_parser=None, tool_call_parser=None, tool_server=None, sampling_defaults='model', dp_size=1, load_balance_method='round_robin', load_watch_interval=0.1, prefill_round_robin_balance=False, dist_init_addr=None, nnodes=1, node_rank=0, json_model_override_args='{}', preferred_sampling_params=None, enable_lora=None, max_lora_rank=None, lora_target_modules=None, lora_paths=None, max_loaded_loras=None, max_loras_per_batch=8, lora_eviction_policy='lru', lora_backend='triton', max_lora_chunk_size=16, attention_backend='fa3', decode_attention_backend=None, prefill_attention_backend=None, sampling_backend='flashinfer', grammar_backend='xgrammar', mm_attention_backend=None, nsa_prefill_backend='flashmla_sparse', nsa_decode_backend='fa3', speculative_algorithm=None, speculative_draft_model_path=None, speculative_draft_model_revision=None, speculative_draft_load_format=None, speculative_num_steps=None, speculative_eagle_topk=None, speculative_num_draft_tokens=None, speculative_accept_threshold_single=1.0, speculative_accept_threshold_acc=1.0, speculative_token_map=None, speculative_attention_mode='prefill', speculative_ngram_min_match_window_size=1, speculative_ngram_max_match_window_size=12, speculative_ngram_min_bfs_breadth=1, speculative_ngram_max_bfs_breadth=10, speculative_ngram_match_type='BFS', speculative_ngram_branch_length=18, speculative_ngram_capacity=10000000, ep_size=1, moe_a2a_backend='none', moe_runner_backend='auto', flashinfer_mxfp4_moe_precision='default', enable_flashinfer_allreduce_fusion=False, deepep_mode='auto', ep_num_redundant_experts=0, ep_dispatch_algorithm='static', init_expert_location='trivial', enable_eplb=False, eplb_algorithm='auto', eplb_rebalance_num_iterations=1000, eplb_rebalance_layers_per_chunk=None, eplb_min_rebalancing_utilization_threshold=1.0, expert_distribution_recorder_mode=None, expert_distribution_recorder_buffer_size=1000, enable_expert_distribution_metrics=False, deepep_config=None, moe_dense_tp_size=None, elastic_ep_backend=None, mooncake_ib_device=None, max_mamba_cache_size=None, mamba_ssm_dtype='float32', mamba_full_memory_ratio=0.9, enable_hierarchical_cache=False, hicache_ratio=2.0, hicache_size=0, hicache_write_policy='write_through', hicache_io_backend='kernel', hicache_mem_layout='layer_first', hicache_storage_backend=None, hicache_storage_prefetch_policy='best_effort', hicache_storage_backend_extra_config=None, enable_lmcache=False, kt_amx_weight_path=None, kt_amx_method='AMXINT4', kt_cpuinfer=None, kt_threadpool_count=2, kt_num_gpu_experts=None, enable_double_sparsity=False, ds_channel_config_path=None, ds_heavy_channel_num=32, ds_heavy_token_num=256, ds_heavy_channel_type='qk', ds_sparse_decode_threshold=4096, cpu_offload_gb=0, offload_group_size=-1, offload_num_in_group=1, offload_prefetch_step=1, offload_mode='cpu', multi_item_scoring_delimiter=None, disable_radix_cache=False, cuda_graph_max_bs=256, cuda_graph_bs=[1, 2, 4, 8, 12, 16, 24, 32, 40, 48, 56, 64, 72, 80, 88, 96, 104, 112, 120, 128, 136, 144, 152, 160, 168, 176, 184, 192, 200, 208, 216, 224, 232, 240, 248, 256], disable_cuda_graph=False, disable_cuda_graph_padding=False, enable_profile_cuda_graph=False, enable_cudagraph_gc=False, enable_nccl_nvls=False, enable_symm_mem=False, disable_flashinfer_cutlass_moe_fp4_allgather=False, enable_tokenizer_batch_encode=False, disable_tokenizer_batch_decode=False, disable_outlines_disk_cache=False, disable_custom_all_reduce=False, enable_mscclpp=False, enable_torch_symm_mem=False, disable_overlap_schedule=False, enable_mixed_chunk=False, enable_dp_attention=False, enable_dp_lm_head=False, enable_two_batch_overlap=False, enable_single_batch_overlap=False, tbo_token_distribution_threshold=0.48, enable_torch_compile=False, enable_piecewise_cuda_graph=False, torch_compile_max_bs=32, piecewise_cuda_graph_max_tokens=4096, piecewise_cuda_graph_tokens=[4, 8, 12, 16, 20, 24, 28, 32, 48, 64, 80, 96, 112, 128, 144, 160, 176, 192, 208, 224, 240, 256, 288, 320, 352, 384, 416, 448, 480, 512, 640, 768, 896, 1024, 1152, 1280, 1408, 1536, 1664, 1792, 1920, 2048, 2176, 2304, 2432, 2560, 2688, 2816, 2944, 3072, 3200, 3328, 3456, 3584, 3712, 3840, 3968, 4096], piecewise_cuda_graph_compiler='eager', torchao_config='', enable_nan_detection=False, enable_p2p_check=False, triton_attention_reduce_in_fp32=False, triton_attention_num_kv_splits=8, triton_attention_split_tile_size=None, num_continuous_decode_steps=1, delete_ckpt_after_loading=False, enable_memory_saver=False, enable_weights_cpu_backup=False, allow_auto_truncate=False, enable_custom_logit_processor=False, flashinfer_mla_disable_ragged=False, disable_shared_experts_fusion=False, disable_chunked_prefix_cache=False, disable_fast_image_processor=False, keep_mm_feature_on_device=False, enable_return_hidden_states=False, scheduler_recv_interval=1, numa_node=None, enable_deterministic_inference=False, rl_on_policy_target=None, enable_dynamic_batch_tokenizer=False, dynamic_batch_tokenizer_batch_size=32, dynamic_batch_tokenizer_batch_timeout=0.002, debug_tensor_dump_output_folder=None, debug_tensor_dump_input_file=None, debug_tensor_dump_inject=False, disaggregation_mode='null', disaggregation_transfer_backend='mooncake', disaggregation_bootstrap_port=8998, disaggregation_decode_tp=None, disaggregation_decode_dp=None, disaggregation_prefill_pp=1, disaggregation_ib_device=None, disaggregation_decode_enable_offload_kvcache=False, num_reserved_decode_tokens=512, disaggregation_decode_polling_interval=1, custom_weight_loader=[], weight_loader_disable_mmap=False, remote_instance_weight_loader_seed_instance_ip=None, remote_instance_weight_loader_seed_instance_service_port=None, remote_instance_weight_loader_send_weights_group_ports=None, enable_pdmux=False, pdmux_config_path=None, sm_group_num=8)\r\n",,terminal_output +2271,4550752,"TERMINAL",0,0,"[2025-11-30 11:33:34] Using default HuggingFace chat template with detected content format: string\r\n",,terminal_output +2272,4566114,"TERMINAL",0,0,"[2025-11-30 11:33:50] INFO trace.py:48: opentelemetry package is not installed, tracing disabled\r\n",,terminal_output +2273,4570891,"TERMINAL",0,0,"",,terminal_focus +2274,4571820,"TERMINAL",0,0,"[2025-11-30 11:33:56] INFO trace.py:48: opentelemetry package is not installed, tracing disabled\r\n",,terminal_output +2275,4573559,"TERMINAL",0,0,"[2025-11-30 11:33:58] Init torch distributed begin.\r\n",,terminal_output +2276,4573803,"TERMINAL",0,0,"[Gloo] Rank 0 is connected to 0 peer ranks. Expected number of connected peer ranks is : 0\r\n",,terminal_output +2277,4573907,"TERMINAL",0,0,"[Gloo] Rank 0 is connected to 0 peer ranks. Expected number of connected peer ranks is : 0\r\n[Gloo] Rank 0 is connected to 0 peer ranks. Expected number of connected peer ranks is : 0\r\n[Gloo] Rank 0 is connected to 0 peer ranks. Expected number of connected peer ranks is : 0\r\n[2025-11-30 11:33:58] Init torch distributed ends. mem usage=0.00 GB\r\n",,terminal_output +2278,4574234,"TERMINAL",0,0,"squeue",,terminal_command +2279,4574235,"TERMINAL",0,0,"]633;C JOBID USER PARTITION NODES CPUS ST SUBMIT_TIME START_TIME TIME TIME_LIMIT NODELIST(REASON)\r\n 35150 franz.sram interacti 1 20 R 2025-11-30T11:32:41 2025-11-30T11:32:41 1:17 1-00:00:00 hai001\r\n 35149 alfred.ngu interacti 1 8 R 2025-11-30T11:30:10 2025-11-30T11:30:10 3:48 2:00:00 hai003\r\n 35144 xiao.liu interacti 1 128 R 2025-11-30T06:03:00 2025-11-30T06:03:00 5:30:58 23:59:00 hai006\r\n 35143 xiao.liu interacti 1 128 R 2025-11-29T20:48:57 2025-11-29T20:48:57 14:45:01 23:59:00 hai005\r\n 35130 xiao.liu standard 1 128 R 2025-11-29T08:31:20 2025-11-29T13:48:24 21:45:34 23:59:00 hai004\r\n]0;franz.srambical@hai-login1:~/crowd-pilot-extension",,terminal_output +2280,4574235,"TERMINAL",0,0,"[2025-11-30 11:33:58] MOE_RUNNER_BACKEND is not initialized, the backend will be automatically selected\r\n",,terminal_output +2281,4576109,"TERMINAL",0,0,"[2025-11-30 11:34:00] Load weight begin. avail mem=78.68 GB\r\n",,terminal_output +2282,4576189,"TERMINAL",0,0,"[2025-11-30 11:34:00] TensorFlow version 2.20.0 available.\r\n",,terminal_output +2283,4577708,"TERMINAL",0,0,"[2025-11-30 11:34:01] Using model weights format ['*.safetensors']\r\n",,terminal_output +2284,4578217,"TERMINAL",0,0,"[2025-11-30 11:34:02] No model.safetensors.index.json found in remote.\r\n\rLoading safetensors checkpoint shards: 0% Completed | 0/1 [00:00>> ",,terminal_output +27,74380,"TERMINAL",0,0,"5",,terminal_output +28,74691,"TERMINAL",0,0,"2",,terminal_output +29,74960,"TERMINAL",0,0,"5",,terminal_output +30,75268,"TERMINAL",0,0,"*",,terminal_output +31,76569,"TERMINAL",0,0,"1",,terminal_output +32,76627,"TERMINAL",0,0,"0",,terminal_output +33,76746,"TERMINAL",0,0,"0",,terminal_output +34,77109,"TERMINAL",0,0,"*",,terminal_output +35,77452,"TERMINAL",0,0,"1",,terminal_output +36,77773,"TERMINAL",0,0,"6",,terminal_output +37,78156,"TERMINAL",0,0,"0",,terminal_output +38,78544,"TERMINAL",0,0,"\r\n8400000\r\n>>> ",,terminal_output +39,85978,"TERMINAL",0,0,"8400000",,terminal_output +40,87209,"TERMINAL",0,0,"/",,terminal_output +41,87789,"TERMINAL",0,0,"",,terminal_output +42,88229,"TERMINAL",0,0,"\r",,terminal_output +43,95129,"TERMINAL",0,0,"(8400000\r",,terminal_output +44,95661,"TERMINAL",0,0,"58400000",,terminal_output +45,96149,"TERMINAL",0,0,"*8400000",,terminal_output +46,96768,"TERMINAL",0,0,"68400000",,terminal_output +47,96866,"TERMINAL",0,0,"08400000",,terminal_output +48,97419,"TERMINAL",0,0,")8400000",,terminal_output +49,98849,"TERMINAL",0,0,"/8400000",,terminal_output +50,99695,"TERMINAL",0,0,"",,terminal_output +51,100471,"TERMINAL",0,0,")",,terminal_output +52,100784,"TERMINAL",0,0,"",,terminal_output +53,101049,"TERMINAL",0,0,"",,terminal_output +54,101196,"TERMINAL",0,0,"",,terminal_output +55,102039,"TERMINAL",0,0,"",,terminal_output +56,102649,"TERMINAL",0,0,"(5*60)/8400000)\r",,terminal_output +57,103148,"TERMINAL",0,0,"",,terminal_output +58,103404,"TERMINAL",0,0,"",,terminal_output +59,103506,"TERMINAL",0,0,"",,terminal_output +60,103673,"TERMINAL",0,0,"",,terminal_output +61,104834,"TERMINAL",0,0,"5",,terminal_output +62,104947,"TERMINAL",0,0,"0",,terminal_output +63,105283,"TERMINAL",0,0,"_",,terminal_output +64,105597,"TERMINAL",0,0,"0",,terminal_output +65,105699,"TERMINAL",0,0,"0",,terminal_output +66,105796,"TERMINAL",0,0,"0",,terminal_output +67,106228,"TERMINAL",0,0,"_",,terminal_output +68,106485,"TERMINAL",0,0,"0",,terminal_output +69,106626,"TERMINAL",0,0,"0",,terminal_output +70,107029,"TERMINAL",0,0,"0",,terminal_output +71,107396,"TERMINAL",0,0,"\r\n File """", line 1\r\n ((5*60)/8400000)50_000_000\r\n ^\r\nSyntaxError: invalid syntax\r\n>>> ",,terminal_output +72,108896,"TERMINAL",0,0,"((5*60)/8400000)50_000_000",,terminal_output +73,109346,"TERMINAL",0,0,"",,terminal_output +74,109548,"TERMINAL",0,0,"",,terminal_output +75,109649,"TERMINAL",0,0,"",,terminal_output +76,109847,"TERMINAL",0,0,"",,terminal_output +77,109952,"TERMINAL",0,0,"",,terminal_output +78,110163,"TERMINAL",0,0,"",,terminal_output +79,110301,"TERMINAL",0,0,"",,terminal_output +80,110404,"TERMINAL",0,0,"",,terminal_output +81,110680,"TERMINAL",0,0,"",,terminal_output +82,111205,"TERMINAL",0,0,"",,terminal_output +83,111752,"TERMINAL",0,0,"*50_000_000",,terminal_output +84,111893,"TERMINAL",0,0,"\r\n1785.7142857142858\r\n>>> ",,terminal_output +85,114753,"TERMINAL",0,0,"((5*60)/8400000)*50_000_000",,terminal_output +86,115789,"TERMINAL",0,0,"",,terminal_output +87,115973,"TERMINAL",0,0,"",,terminal_output +88,116190,"TERMINAL",0,0,"",,terminal_output +89,116291,"TERMINAL",0,0,"",,terminal_output +90,116487,"TERMINAL",0,0,"",,terminal_output +91,117069,"TERMINAL",0,0,"",,terminal_output +92,117539,"TERMINAL",0,0,"",,terminal_output +93,117771,"TERMINAL",0,0,"/8400000)*50_000_000\r",,terminal_output +94,117954,"TERMINAL",0,0,"/8400000)*50_000_000\r",,terminal_output +95,118097,"TERMINAL",0,0,"/8400000)*50_000_000\r",,terminal_output +96,118246,"TERMINAL",0,0,"/8400000)*50_000_000\r",,terminal_output +97,118977,"TERMINAL",0,0,"",,terminal_output +98,119334,"TERMINAL",0,0,"5/8400000)*50_000_000\r",,terminal_output +99,120551,"TERMINAL",0,0,"\r\n29.76190476190476\r\n>>> ",,terminal_output +100,177303,"TERMINAL",0,0,"bash",,terminal_focus +101,180147,"TERMINAL",0,0,"ls -l train/ | wc -l",,terminal_command +102,180169,"TERMINAL",0,0,"]633;C526\r\n]0;tum_cte0515@hkn1991:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_doom/doom_episodes_50m",,terminal_output +103,184758,"TERMINAL",0,0,"cd ..",,terminal_command +104,191332,"TERMINAL",0,0,"ls -l doom_episodes_50m/train/ | wc -l",,terminal_command +105,191393,"TERMINAL",0,0,"]633;C527\r\n]0;tum_cte0515@hkn1991:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_doom",,terminal_output +106,199015,"TERMINAL",0,0,"ls -l doom_episodes_50m_low_res/train/ | wc -l",,terminal_command +107,199058,"TERMINAL",0,0,"]633;C",,terminal_output +108,199597,"TERMINAL",0,0,"394\r\n]0;tum_cte0515@hkn1991:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_doom",,terminal_output +109,278836,"TERMINAL",0,0,"ls -l doom_episodes_10m_low_res/train/ | wc -l",,terminal_command +110,278860,"TERMINAL",0,0,"]633;C",,terminal_output +111,279272,"TERMINAL",0,0,"419\r\n]0;tum_cte0515@hkn1991:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_doom",,terminal_output +112,283381,"TERMINAL",0,0,"ls -l doom_episodes_10m/train/ | wc -l",,terminal_command +113,283433,"TERMINAL",0,0,"]633;C",,terminal_output +114,284045,"TERMINAL",0,0,"567\r\n]0;tum_cte0515@hkn1991:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_doom",,terminal_output +115,292247,"TERMINAL",0,0,"ls -l doom_episodes_1m/train/ | wc -l",,terminal_command +116,292290,"TERMINAL",0,0,"]633;C",,terminal_output +117,292384,"TERMINAL",0,0,"63\r\n]0;tum_cte0515@hkn1991:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_doom",,terminal_output +118,334209,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_1m_low_res.sh",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=1\n#SBATCH --time=03:00:00\n#SBATCH --partition=accelerated\n#SBATCH --gres=gpu:1\n#SBATCH --cpus-per-task=16\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/preprocess/doom/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/preprocess/doom/%x_%j.log\n#SBATCH --job-name=preprocess_coinrun_chunked\n\n# export PYTHONUNBUFFERED=1\n\n\n# source .venv/bin/activate\n\nsource .venv/bin/activate\n\npython jasmine_data/ViZDoomPPO/load_model_generate_dataset.py \\n --output_dir /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_doom/doom_episodes_1m_low_res \\n --agent_path /home/hk-project-p0023960/tum_cte0515/Projects/jasmine/data/jasmine_data/ViZDoomPPO/logs/models/deathmatch_simple_bak/best_model.zip \\n --num_episodes_train 1_000 \\n --target_width 160 \\n --target_height 120 \\n \n\n",shellscript,tab +119,341133,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_1m_low_res.sh",446,0,"",shellscript,selection_mouse +120,341657,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_1m_low_res.sh",437,0,"",shellscript,selection_mouse +121,342787,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_1m_low_res.sh",436,1,"",shellscript,content +122,342913,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_1m_low_res.sh",435,1,"",shellscript,content +123,343049,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_1m_low_res.sh",434,1,"",shellscript,content +124,343183,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_1m_low_res.sh",433,1,"",shellscript,content +125,343310,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_1m_low_res.sh",432,1,"",shellscript,content +126,343448,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_1m_low_res.sh",431,1,"",shellscript,content +127,343629,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_1m_low_res.sh",430,1,"",shellscript,content +128,343791,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_1m_low_res.sh",430,0,"d",shellscript,content +129,343792,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_1m_low_res.sh",431,0,"",shellscript,selection_keyboard +130,343880,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_1m_low_res.sh",431,0,"o",shellscript,content +131,343882,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_1m_low_res.sh",432,0,"",shellscript,selection_keyboard +132,344050,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_1m_low_res.sh",432,0,"o",shellscript,content +133,344051,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_1m_low_res.sh",433,0,"",shellscript,selection_keyboard +134,344228,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_1m_low_res.sh",433,0,"m",shellscript,content +135,344229,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_1m_low_res.sh",434,0,"",shellscript,selection_keyboard +136,344536,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_1m_low_res.sh",434,8,"",shellscript,content +137,345187,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_1m_low_res.sh",434,0,"_",shellscript,content +138,345191,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_1m_low_res.sh",435,0,"",shellscript,selection_keyboard +139,345466,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_1m_low_res.sh",435,0,"l",shellscript,content +140,345467,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_1m_low_res.sh",436,0,"",shellscript,selection_keyboard +141,345605,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_1m_low_res.sh",436,0,"o",shellscript,content +142,345607,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_1m_low_res.sh",437,0,"",shellscript,selection_keyboard +143,345707,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_1m_low_res.sh",437,0,"w",shellscript,content +144,345708,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_1m_low_res.sh",438,0,"",shellscript,selection_keyboard +145,345880,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_1m_low_res.sh",438,0,"_",shellscript,content +146,345881,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_1m_low_res.sh",439,0,"",shellscript,selection_keyboard +147,346092,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_1m_low_res.sh",439,0,"r",shellscript,content +148,346094,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_1m_low_res.sh",440,0,"",shellscript,selection_keyboard +149,346271,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_1m_low_res.sh",440,0,"e",shellscript,content +150,346272,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_1m_low_res.sh",441,0,"",shellscript,selection_keyboard +151,346445,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_1m_low_res.sh",441,0,"s",shellscript,content +152,346446,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_1m_low_res.sh",442,0,"",shellscript,selection_keyboard +153,347276,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_1m_low_res.sh",441,0,"",shellscript,selection_command +154,347453,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_1m_low_res.sh",440,0,"",shellscript,selection_command +155,347605,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_1m_low_res.sh",439,0,"",shellscript,selection_command +156,347770,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_1m_low_res.sh",438,0,"",shellscript,selection_command +157,347908,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_1m_low_res.sh",437,0,"",shellscript,selection_command +158,348248,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_1m_low_res.sh",438,0,"",shellscript,selection_command +159,348401,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_1m_low_res.sh",439,0,"",shellscript,selection_command +160,348679,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_1m_low_res.sh",438,0,"",shellscript,selection_command +161,348830,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_1m_low_res.sh",437,0,"",shellscript,selection_command +162,348977,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_1m_low_res.sh",436,0,"",shellscript,selection_command +163,349108,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_1m_low_res.sh",435,0,"",shellscript,selection_command +164,349310,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_1m_low_res.sh",435,0,"1",shellscript,content +165,349311,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_1m_low_res.sh",436,0,"",shellscript,selection_keyboard +166,349444,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_1m_low_res.sh",436,0,"m",shellscript,content +167,349445,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_1m_low_res.sh",437,0,"",shellscript,selection_keyboard +168,349895,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_1m_low_res.sh",437,0,"_",shellscript,content +169,349896,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_1m_low_res.sh",438,0,"",shellscript,selection_keyboard +170,351254,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_10m_low_res.sh",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=1\n#SBATCH --time=24:00:00\n#SBATCH --partition=accelerated\n#SBATCH --gres=gpu:1\n#SBATCH --cpus-per-task=16\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/preprocess/doom/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/preprocess/doom/%x_%j.log\n#SBATCH --job-name=preprocess_coinrun_chunked\n\n# export PYTHONUNBUFFERED=1\n\n\n# source .venv/bin/activate\n\nsource .venv/bin/activate\n\npython jasmine_data/ViZDoomPPO/load_model_generate_dataset.py \\n --output_dir /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_doom/doom_episodes_10m_low_res \\n --agent_path /home/hk-project-p0023960/tum_cte0515/Projects/jasmine/data/jasmine_data/ViZDoomPPO/logs/models/deathmatch_simple_bak/best_model.zip \\n --num_episodes_train 10_000 \\n --target_width 160 \\n --target_height 120 \\n \n\n",shellscript,tab +171,352848,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_1m.sh",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=1\n#SBATCH --time=03:00:00\n#SBATCH --partition=accelerated\n#SBATCH --gres=gpu:1\n#SBATCH --cpus-per-task=16\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/preprocess/doom/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/preprocess/doom/%x_%j.log\n#SBATCH --job-name=preprocess_coinrun_chunked\n\n# export PYTHONUNBUFFERED=1\n\nsource .venv/bin/activate\n\npython jasmine_data/ViZDoomPPO/load_model_generate_dataset.py \\n --output_dir /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_doom/doom_episodes_1m \\n --agent_path /home/hk-project-p0023960/tum_cte0515/Projects/jasmine/data/jasmine_data/ViZDoomPPO/logs/models/deathmatch_simple_bak/best_model.zip \\n --num_episodes_train 1_000 \\n \n\n",shellscript,tab +172,354017,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_1m.sh",430,0,"",shellscript,selection_mouse +173,355098,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_1m.sh",430,15,"",shellscript,content +174,355111,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_1m.sh",429,0,"",shellscript,selection_command +175,356291,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_1m.sh",430,0,"",shellscript,selection_command +176,356933,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_1m.sh",430,0,"d",shellscript,content +177,356934,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_1m.sh",431,0,"",shellscript,selection_keyboard +178,357016,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_1m.sh",431,0,"o",shellscript,content +179,357017,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_1m.sh",432,0,"",shellscript,selection_keyboard +180,357157,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_1m.sh",432,0,"o",shellscript,content +181,357158,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_1m.sh",433,0,"",shellscript,selection_keyboard +182,357260,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_1m.sh",433,0,"m",shellscript,content +183,357261,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_1m.sh",434,0,"",shellscript,selection_keyboard +184,357532,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_1m.sh",434,0,"_",shellscript,content +185,357533,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_1m.sh",435,0,"",shellscript,selection_keyboard +186,357703,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_1m.sh",435,0,"1",shellscript,content +187,357704,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_1m.sh",436,0,"",shellscript,selection_keyboard +188,357820,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_1m.sh",436,0,"m",shellscript,content +189,357821,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_1m.sh",437,0,"",shellscript,selection_keyboard +190,359255,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_10m_low_res.sh",0,0,"",shellscript,tab +191,360896,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_10m_low_res.sh",430,0,"",shellscript,selection_mouse +192,361045,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_10m_low_res.sh",430,2,"co",shellscript,selection_mouse +193,361065,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_10m_low_res.sh",430,3,"coi",shellscript,selection_mouse +194,361085,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_10m_low_res.sh",430,4,"coin",shellscript,selection_mouse +195,361116,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_10m_low_res.sh",430,5,"coinr",shellscript,selection_mouse +196,361324,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_10m_low_res.sh",430,6,"coinru",shellscript,selection_mouse +197,361381,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_10m_low_res.sh",430,7,"coinrun",shellscript,selection_mouse +198,361849,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_10m_low_res.sh",430,7,"",shellscript,content +199,362725,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_10m_low_res.sh",430,0,"d",shellscript,content +200,362726,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_10m_low_res.sh",431,0,"",shellscript,selection_keyboard +201,362751,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_10m_low_res.sh",431,0,"o",shellscript,content +202,362751,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_10m_low_res.sh",432,0,"",shellscript,selection_keyboard +203,362923,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_10m_low_res.sh",432,0,"o",shellscript,content +204,362924,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_10m_low_res.sh",433,0,"",shellscript,selection_keyboard +205,363061,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_10m_low_res.sh",433,0,"m",shellscript,content +206,363062,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_10m_low_res.sh",434,0,"",shellscript,selection_keyboard +207,363340,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_10m_low_res.sh",434,0,"_",shellscript,content +208,363341,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_10m_low_res.sh",435,0,"",shellscript,selection_keyboard +209,364235,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_10m_low_res.sh",435,0,"1",shellscript,content +210,364236,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_10m_low_res.sh",436,0,"",shellscript,selection_keyboard +211,365108,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_10m_low_res.sh",436,0,"0",shellscript,content +212,365109,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_10m_low_res.sh",437,0,"",shellscript,selection_keyboard +213,365278,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_10m_low_res.sh",437,0,"m",shellscript,content +214,365279,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_10m_low_res.sh",438,0,"",shellscript,selection_keyboard +215,367129,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_10m.sh",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=1\n#SBATCH --time=24:00:00\n#SBATCH --partition=accelerated\n#SBATCH --gres=gpu:1\n#SBATCH --cpus-per-task=16\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/preprocess/doom/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/preprocess/doom/%x_%j.log\n#SBATCH --job-name=preprocess_coinrun_chunked\n\n# export PYTHONUNBUFFERED=1\n\n\n# source .venv/bin/activate\n\nsource .venv/bin/activate\n\npython jasmine_data/ViZDoomPPO/load_model_generate_dataset.py \\n --output_dir /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_doom/doom_episodes_10m \\n --agent_path /home/hk-project-p0023960/tum_cte0515/Projects/jasmine/data/jasmine_data/ViZDoomPPO/logs/models/deathmatch_simple_bak/best_model.zip \\n --num_episodes_train 10_000 \\n \n\n",shellscript,tab +216,368018,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_10m.sh",437,0,"",shellscript,selection_mouse +217,369447,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_10m_low_res.sh",0,0,"",shellscript,tab +218,370613,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_10m_low_res.sh",446,0,"",shellscript,selection_mouse +219,371351,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_10m_low_res.sh",445,1,"",shellscript,content +220,371506,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_10m_low_res.sh",444,1,"",shellscript,content +221,371641,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_10m_low_res.sh",443,1,"",shellscript,content +222,371777,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_10m_low_res.sh",442,1,"",shellscript,content +223,371919,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_10m_low_res.sh",441,1,"",shellscript,content +224,372042,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_10m_low_res.sh",440,1,"",shellscript,content +225,372178,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_10m_low_res.sh",439,1,"",shellscript,content +226,372406,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_10m_low_res.sh",439,0,"l",shellscript,content +227,372407,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_10m_low_res.sh",440,0,"",shellscript,selection_keyboard +228,372574,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_10m_low_res.sh",440,0,"o",shellscript,content +229,372575,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_10m_low_res.sh",441,0,"",shellscript,selection_keyboard +230,372660,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_10m_low_res.sh",441,0,"w",shellscript,content +231,372661,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_10m_low_res.sh",442,0,"",shellscript,selection_keyboard +232,372927,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_10m_low_res.sh",442,0,"_",shellscript,content +233,372928,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_10m_low_res.sh",443,0,"",shellscript,selection_keyboard +234,373147,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_10m_low_res.sh",443,0,"r",shellscript,content +235,373148,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_10m_low_res.sh",444,0,"",shellscript,selection_keyboard +236,373313,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_10m_low_res.sh",444,0,"e",shellscript,content +237,373314,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_10m_low_res.sh",445,0,"",shellscript,selection_keyboard +238,373436,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_10m_low_res.sh",445,0,"s",shellscript,content +239,373437,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_10m_low_res.sh",446,0,"",shellscript,selection_keyboard +240,374911,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_10m.sh",0,0,"",shellscript,tab +241,375931,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_10m.sh",442,0,"",shellscript,selection_mouse +242,375976,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_10m.sh",440,2,"un",shellscript,selection_mouse +243,375977,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_10m.sh",438,4,"chun",shellscript,selection_mouse +244,375980,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_10m.sh",437,5,"_chun",shellscript,selection_mouse +245,376067,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_10m.sh",436,6,"n_chun",shellscript,selection_mouse +246,376163,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_10m.sh",435,7,"un_chun",shellscript,selection_mouse +247,376187,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_10m.sh",434,8,"run_chun",shellscript,selection_mouse +248,376211,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_10m.sh",433,9,"nrun_chun",shellscript,selection_mouse +249,376257,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_10m.sh",432,10,"inrun_chun",shellscript,selection_mouse +250,376276,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_10m.sh",431,11,"oinrun_chun",shellscript,selection_mouse +251,376369,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_10m.sh",430,12,"coinrun_chun",shellscript,selection_mouse +252,377005,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_10m.sh",430,12,"",shellscript,content +253,377457,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_10m.sh",433,0,"",shellscript,selection_command +254,378150,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_10m.sh",432,1,"",shellscript,content +255,378303,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_10m.sh",431,1,"",shellscript,content +256,378433,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_10m.sh",430,1,"",shellscript,content +257,378538,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_10m.sh",429,1,"",shellscript,content +258,378681,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_10m.sh",429,0,"d",shellscript,content +259,378682,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_10m.sh",430,0,"",shellscript,selection_keyboard +260,378756,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_10m.sh",430,0,"o",shellscript,content +261,378756,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_10m.sh",431,0,"",shellscript,selection_keyboard +262,379138,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_10m.sh",430,1,"",shellscript,content +263,379239,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_10m.sh",429,1,"",shellscript,content +264,379560,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_10m.sh",429,0,"_",shellscript,content +265,379561,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_10m.sh",430,0,"",shellscript,selection_keyboard +266,379802,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_10m.sh",430,0,"d",shellscript,content +267,379803,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_10m.sh",431,0,"",shellscript,selection_keyboard +268,379880,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_10m.sh",431,0,"o",shellscript,content +269,379882,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_10m.sh",432,0,"",shellscript,selection_keyboard +270,379994,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_10m.sh",432,0,"o",shellscript,content +271,379995,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_10m.sh",433,0,"",shellscript,selection_keyboard +272,380098,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_10m.sh",433,0,"m",shellscript,content +273,380099,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_10m.sh",434,0,"",shellscript,selection_keyboard +274,380329,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_10m.sh",434,0,"_",shellscript,content +275,380330,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_10m.sh",435,0,"",shellscript,selection_keyboard +276,380993,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_10m.sh",435,0,"1",shellscript,content +277,380994,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_10m.sh",436,0,"",shellscript,selection_keyboard +278,381529,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_10m.sh",436,0,"0",shellscript,content +279,381530,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_10m.sh",437,0,"",shellscript,selection_keyboard +280,381788,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_10m.sh",437,0,"m",shellscript,content +281,381789,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_10m.sh",438,0,"",shellscript,selection_keyboard +282,383317,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_50m.sh",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=1\n#SBATCH --time=42:00:00\n#SBATCH --partition=accelerated\n#SBATCH --gres=gpu:1\n#SBATCH --cpus-per-task=16\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/preprocess/doom/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/preprocess/doom/%x_%j.log\n#SBATCH --job-name=preprocess_coinrun_chunked\n\n# export PYTHONUNBUFFERED=1\n\n\n# source .venv/bin/activate\n\nsource .venv/bin/activate\n\npython jasmine_data/ViZDoomPPO/load_model_generate_dataset.py \\n --output_dir /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_doom/doom_episodes_50m \\n --agent_path /home/hk-project-p0023960/tum_cte0515/Projects/jasmine/data/jasmine_data/ViZDoomPPO/logs/models/deathmatch_simple_bak/best_model.zip \\n --num_episodes_train 50_000\n\n",shellscript,tab +283,384161,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_50m.sh",445,0,"",shellscript,selection_mouse +284,384162,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_50m.sh",444,0,"",shellscript,selection_command +285,384261,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_50m.sh",444,1,"d",shellscript,selection_mouse +286,384262,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_50m.sh",445,0,"",shellscript,selection_command +287,384282,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_50m.sh",443,2,"ed",shellscript,selection_mouse +288,384306,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_50m.sh",440,5,"unked",shellscript,selection_mouse +289,384346,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_50m.sh",438,7,"chunked",shellscript,selection_mouse +290,384359,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_50m.sh",437,8,"_chunked",shellscript,selection_mouse +291,384392,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_50m.sh",435,10,"un_chunked",shellscript,selection_mouse +292,384393,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_50m.sh",434,11,"run_chunked",shellscript,selection_mouse +293,384394,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_50m.sh",433,12,"nrun_chunked",shellscript,selection_mouse +294,384432,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_50m.sh",432,13,"inrun_chunked",shellscript,selection_mouse +295,384649,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_50m.sh",431,14,"oinrun_chunked",shellscript,selection_mouse +296,385336,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_50m.sh",430,15,"coinrun_chunked",shellscript,selection_mouse +297,385723,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_50m.sh",430,15,"",shellscript,content +298,385729,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_50m.sh",429,0,"",shellscript,selection_command +299,386495,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_50m.sh",430,0,"",shellscript,selection_command +300,386789,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_50m.sh",430,0,"f",shellscript,content +301,386790,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_50m.sh",431,0,"",shellscript,selection_keyboard +302,386942,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_50m.sh",431,0,"o",shellscript,content +303,386943,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_50m.sh",432,0,"",shellscript,selection_keyboard +304,387259,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_50m.sh",431,1,"",shellscript,content +305,387396,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_50m.sh",430,1,"",shellscript,content +306,387512,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_50m.sh",430,0,"d",shellscript,content +307,387513,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_50m.sh",431,0,"",shellscript,selection_keyboard +308,387591,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_50m.sh",431,0,"o",shellscript,content +309,387592,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_50m.sh",432,0,"",shellscript,selection_keyboard +310,387733,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_50m.sh",432,0,"o",shellscript,content +311,387734,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_50m.sh",433,0,"",shellscript,selection_keyboard +312,387895,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_50m.sh",433,0,"m",shellscript,content +313,387896,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_50m.sh",434,0,"",shellscript,selection_keyboard +314,388197,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_50m.sh",434,0,"_",shellscript,content +315,388197,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_50m.sh",435,0,"",shellscript,selection_keyboard +316,388448,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_50m.sh",435,0,"l",shellscript,content +317,388449,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_50m.sh",436,0,"",shellscript,selection_keyboard +318,388896,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_50m.sh",435,1,"",shellscript,content +319,390127,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_50m.sh",435,0,"5",shellscript,content +320,390128,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_50m.sh",436,0,"",shellscript,selection_keyboard +321,390316,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_50m.sh",436,0,"0",shellscript,content +322,390317,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_50m.sh",437,0,"",shellscript,selection_keyboard +323,390567,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_50m.sh",437,0,"m",shellscript,content +324,390567,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_50m.sh",438,0,"",shellscript,selection_keyboard +325,392480,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_50m_low_res.sh",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=1\n#SBATCH --time=42:00:00\n#SBATCH --partition=accelerated\n#SBATCH --gres=gpu:1\n#SBATCH --cpus-per-task=16\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/preprocess/doom/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/preprocess/doom/%x_%j.log\n#SBATCH --job-name=preprocess_coinrun_chunked\n\n# export PYTHONUNBUFFERED=1\n\n\n# source .venv/bin/activate\n\nsource .venv/bin/activate\n\npython jasmine_data/ViZDoomPPO/load_model_generate_dataset.py \\n --output_dir /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_doom/doom_episodes_50m_low_res \\n --agent_path /home/hk-project-p0023960/tum_cte0515/Projects/jasmine/data/jasmine_data/ViZDoomPPO/logs/models/deathmatch_simple_bak/best_model.zip \\n --num_episodes_train 50_000 \\n --target_width 160 \\n --target_height 120 \\n \n\n",shellscript,tab +326,394330,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_50m_low_res.sh",431,0,"",shellscript,selection_mouse +327,394480,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_50m_low_res.sh",431,2,"oi",shellscript,selection_mouse +328,394496,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_50m_low_res.sh",431,5,"oinru",shellscript,selection_mouse +329,394503,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_50m_low_res.sh",431,7,"oinrun_",shellscript,selection_mouse +330,394517,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_50m_low_res.sh",431,9,"oinrun_ch",shellscript,selection_mouse +331,394546,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_50m_low_res.sh",431,11,"oinrun_chun",shellscript,selection_mouse +332,394564,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_50m_low_res.sh",431,12,"oinrun_chunk",shellscript,selection_mouse +333,394587,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_50m_low_res.sh",431,13,"oinrun_chunke",shellscript,selection_mouse +334,394588,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_50m_low_res.sh",431,14,"oinrun_chunked",shellscript,selection_mouse +335,395031,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_50m_low_res.sh",431,14,"",shellscript,content +336,395035,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_50m_low_res.sh",430,0,"",shellscript,selection_command +337,395796,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_50m_low_res.sh",431,0,"",shellscript,selection_command +338,395926,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_50m_low_res.sh",430,1,"",shellscript,content +339,396593,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_50m_low_res.sh",430,0,"d",shellscript,content +340,396594,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_50m_low_res.sh",431,0,"",shellscript,selection_keyboard +341,396662,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_50m_low_res.sh",431,0,"o",shellscript,content +342,396663,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_50m_low_res.sh",432,0,"",shellscript,selection_keyboard +343,396786,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_50m_low_res.sh",432,0,"o",shellscript,content +344,396787,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_50m_low_res.sh",433,0,"",shellscript,selection_keyboard +345,396866,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_50m_low_res.sh",433,0,"m",shellscript,content +346,396867,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_50m_low_res.sh",434,0,"",shellscript,selection_keyboard +347,397173,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_50m_low_res.sh",434,0,"_",shellscript,content +348,397174,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_50m_low_res.sh",435,0,"",shellscript,selection_keyboard +349,398280,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_50m_low_res.sh",435,0,"5",shellscript,content +350,398281,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_50m_low_res.sh",436,0,"",shellscript,selection_keyboard +351,398346,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_50m_low_res.sh",436,0,"0",shellscript,content +352,398347,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_50m_low_res.sh",437,0,"",shellscript,selection_keyboard +353,398568,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_50m_low_res.sh",437,0,"m",shellscript,content +354,398568,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_50m_low_res.sh",438,0,"",shellscript,selection_keyboard +355,398861,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_50m_low_res.sh",438,0,"_",shellscript,content +356,398862,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_50m_low_res.sh",439,0,"",shellscript,selection_keyboard +357,399102,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_50m_low_res.sh",439,0,"l",shellscript,content +358,399103,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_50m_low_res.sh",440,0,"",shellscript,selection_keyboard +359,399276,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_50m_low_res.sh",440,0,"o",shellscript,content +360,399277,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_50m_low_res.sh",441,0,"",shellscript,selection_keyboard +361,399719,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_50m_low_res.sh",441,0,"w",shellscript,content +362,399720,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_50m_low_res.sh",442,0,"",shellscript,selection_keyboard +363,399858,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_50m_low_res.sh",442,0,"_",shellscript,content +364,399858,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_50m_low_res.sh",443,0,"",shellscript,selection_keyboard +365,400079,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_50m_low_res.sh",443,0,"r",shellscript,content +366,400080,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_50m_low_res.sh",444,0,"",shellscript,selection_keyboard +367,400246,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_50m_low_res.sh",444,0,"e",shellscript,content +368,400247,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_50m_low_res.sh",445,0,"",shellscript,selection_keyboard +369,400416,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_50m_low_res.sh",445,0,"s",shellscript,content +370,400417,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_50m_low_res.sh",446,0,"",shellscript,selection_keyboard +371,401297,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_50m_low_res.sh",579,0,"",shellscript,selection_mouse +372,401822,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_50m_low_res.sh",752,0,"",shellscript,selection_mouse +373,406723,"TERMINAL",0,0,"python",,terminal_focus +374,408481,"TERMINAL",0,0,"\r\n]0;tum_cte0515@hkn1991:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared",,terminal_output +375,411150,"TERMINAL",0,0,"fqueue",,terminal_command +376,411226,"TERMINAL",0,0,"]633;C[?1049h(B[?7hEvery 1.0s: squeue -o ""%.10i %.16P %.30j %.8u %.8T %.10M %.9l %.6D %R""hkn1991.localdomain: Sun Oct 5 17:00:45 2025JOBIDPARTITIONNAME USER STATE\t TIME TIME_LIMI NODES NODELIST(REASON)3543642\taccelerated train_tokenizer_default_single tum_cte0 RUNNING\t 16:48 5:00:001 hkn06013543361\taccelerated preprocess_coinrun_chunked tum_cte0 RUNNING 4:13:14 1-18:00:00\t1 hkn07163543351\taccelerated preprocess_coinrun_chunked tum_cte0 RUNNING 4:29:15 1-18:00:00\t1 hkn07143543339\taccelerated preprocess_coinrun_chunked tum_cte0 RUNNING 4:43:17 1-00:00:00\t1 hkn07143543340\taccelerated preprocess_coinrun_chunked tum_cte0 RUNNING 4:43:17 1-00:00:00\t1 hkn0714",,terminal_output +377,412268,"TERMINAL",0,0,"695688",,terminal_output +378,413421,"TERMINAL",0,0,"7506799",,terminal_output +379,414338,"TERMINAL",0,0,"81782020",,terminal_output +380,415382,"TERMINAL",0,0,"928911",,terminal_output +381,416393,"TERMINAL",0,0,"50392022",,terminal_output +382,417430,"TERMINAL",0,0,"1420133",,terminal_output +383,418779,"TERMINAL",0,0,"251244",,terminal_output +384,419574,"TERMINAL",0,0,"362355",,terminal_output +385,420581,"TERMINAL",0,0,"473466",,terminal_output +386,421562,"TERMINAL",0,0,"584577",,terminal_output +387,422635,"TERMINAL",0,0,"695688",,terminal_output +388,423762,"TERMINAL",0,0,"77:006799",,terminal_output +389,424663,"TERMINAL",0,0,"81783030",,terminal_output +390,425717,"TERMINAL",0,0,"928911",,terminal_output +391,426732,"TERMINAL",0,0,"1:00393022",,terminal_output +392,427857,"TERMINAL",0,0,"1531244",,terminal_output +393,428854,"TERMINAL",0,0,"362355",,terminal_output +394,429838,"TERMINAL",0,0,"473466",,terminal_output +395,430869,"TERMINAL",0,0,"584577",,terminal_output +396,431908,"TERMINAL",0,0,"695688",,terminal_output +397,433029,"TERMINAL",0,0,"7106799",,terminal_output +398,433969,"TERMINAL",0,0,"81784040",,terminal_output +399,435035,"TERMINAL",0,0,"928911",,terminal_output +400,436086,"TERMINAL",0,0,"10394022",,terminal_output +401,437074,"TERMINAL",0,0,"1440133",,terminal_output +402,438114,"TERMINAL",0,0,"251244",,terminal_output +403,439316,"TERMINAL",0,0,"362355",,terminal_output +404,440252,"TERMINAL",0,0,"473466",,terminal_output +405,441149,"TERMINAL",0,0,"bash",,terminal_focus +406,441278,"TERMINAL",0,0,"584577",,terminal_output +407,442285,"TERMINAL",0,0,"695688",,terminal_output +408,443281,"TERMINAL",0,0,"7206799",,terminal_output +409,444325,"TERMINAL",0,0,"81785050",,terminal_output +410,445352,"TERMINAL",0,0,"928911",,terminal_output +411,446399,"TERMINAL",0,0,"20395022",,terminal_output +412,447438,"TERMINAL",0,0,"1450133",,terminal_output +413,448456,"TERMINAL",0,0,"251244",,terminal_output +414,449559,"TERMINAL",0,0,"362355",,terminal_output +415,450511,"TERMINAL",0,0,"473466",,terminal_output +416,451609,"TERMINAL",0,0,"584577",,terminal_output +417,452628,"TERMINAL",0,0,"695688",,terminal_output +418,453602,"TERMINAL",0,0,"7306799",,terminal_output +419,454639,"TERMINAL",0,0,"81784:004:00",,terminal_output +420,455671,"TERMINAL",0,0,"928911",,terminal_output +421,456703,"TERMINAL",0,0,"303930:0022",,terminal_output +422,457955,"TERMINAL",0,0,"144:00133",,terminal_output +423,458757,"TERMINAL",0,0,"251244",,terminal_output +424,459914,"TERMINAL",0,0,"473466",,terminal_output +425,460959,"TERMINAL",0,0,"584577",,terminal_output +426,461881,"TERMINAL",0,0,"695688",,terminal_output +427,462972,"TERMINAL",0,0,"7406799",,terminal_output +428,463924,"TERMINAL",0,0,"81781010",,terminal_output +429,464947,"TERMINAL",0,0,"928911",,terminal_output +430,465998,"TERMINAL",0,0,"40391022",,terminal_output +431,467032,"TERMINAL",0,0,"1410133",,terminal_output +432,468047,"TERMINAL",0,0,"251244",,terminal_output +433,469121,"TERMINAL",0,0,"362355",,terminal_output +434,470149,"TERMINAL",0,0,"473466",,terminal_output +435,471158,"TERMINAL",0,0,"584577",,terminal_output +436,472226,"TERMINAL",0,0,"695688",,terminal_output +437,473212,"TERMINAL",0,0,"7506799",,terminal_output +438,474296,"TERMINAL",0,0,"81782020",,terminal_output +439,475310,"TERMINAL",0,0,"928911",,terminal_output +440,476358,"TERMINAL",0,0,"50392022",,terminal_output +441,477283,"TERMINAL",0,0,"history",,terminal_command +442,477386,"TERMINAL",0,0,"]633;C 18 2025-09-29 20:33:02 git add jasmine/utils/dataloader_torch.py \r\n 19 2025-09-29 20:33:04 git commit -am ""added torch dataloader""\r\n 20 2025-09-29 20:33:09 git branch\r\n 21 2025-09-29 20:33:17 git checkout add-noise-to-combat-exposure-bias\r\n 22 2025-09-29 20:33:20 runner\r\n 23 2025-09-29 20:33:21 sync-runner\r\n 24 2025-09-29 20:33:31 sbatch slurm/jobs/mihir/horeka/coinrun/default_runs/train_lam_default.sh\r\n 25 2025-09-29 20:34:09 sync-runner\r\n 26 2025-09-29 20:34:13 runner\r\n 27 2025-09-29 20:34:17 sbatch slurm/jobs/mihir/horeka/coinrun/default_runs/train_tokenizer_default.sh\r\n 28 2025-09-29 20:34:28 queue\r\n 29 2025-09-30 12:58:19 source .venv/bin/activate\r\n 30 2025-09-30 12:58:43 sh slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh\r\n 31 2025-09-30 12:59:10 uv pip install torch\r\n 32 2025-09-30 13:54:45 sh slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh\r\n 33 2025-09-30 13:57:24 sh slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh\r\n 34 2025-09-30 13:57:36 sh slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.shsmi\r\n 35 2025-09-30 13:57:37 smi\r\n 36 2025-09-30 13:57:40 smi\r\n 37 2025-09-30 13:57:43 queue\r\n 38 2025-09-30 15:21:50 source /home/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/bin/activate\r\n 39 2025-09-30 15:22:07 salloc --time=02:00:00 --partition=accelerated-h100 --nodes=1 --gres=gpu:1 --cpus-per-task=8\r\n 40 2025-09-30 15:22:13 salloc --time=01:00:00 --partition=dev_accelerated-h100 --nodes=1 --gres=gpu:1 --cpus-per-task=8\r\n 41 2025-09-30 12:57:17 source /home/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/bin/activate\r\n 42 2025-09-30 12:57:20 salloc --time=01:00:00 --partition=dev_accelerated --nodes=1 --gres=gpu:1 --cpus-per-task=8\r\n 43 2025-09-30 13:57:47 salloc --time=01:00:00 --partition=dev_accelerated --nodes=1 --gres=gpu:1 --cpus-per-task=8\r\n 44 2025-09-30 15:00:21 runner-2\r\n 45 2025-09-30 15:00:27 cat jasmine/models/dynamics.py \r\n 46 2025-09-30 15:19:19 dev\r\n 47 2025-09-30 15:21:39 idling\r\n 48 2025-09-30 15:27:00 git branch\r\n 49 2025-09-30 15:43:32 git branch\r\n 50 2025-09-30 15:43:34 git pull\r\n 51 2025-09-30 15:44:28 idling\r\n 52 2025-09-30 15:44:33 logs\r\n 53 2025-09-30 15:44:35 cd preprocess/\r\n 54 2025-09-30 15:44:35 ls\r\n 55 2025-09-30 15:44:37 cd coinrun/\r\n 56 2025-09-30 15:44:37 ls\r\n 57 2025-09-30 15:45:10 dev\r\n 58 2025-09-30 15:45:12 git branch\r\n 59 2025-09-30 15:45:29 git checkout seeding-data-generation\r\n 60 2025-09-30 15:45:40 git pull\r\n 61 2025-09-30 15:45:48 sbatch slurm/jobs/mihir/horeka/preprocessing/coinrun_chunked_500m.sh\r\n 62 2025-09-30 15:45:50 queue\r\n 63 2025-09-30 15:47:36 scancel 3533470\r\n 64 2025-09-30 15:47:40 sbatch slurm/jobs/mihir/horeka/preprocessing/coinrun_chunked_500m.sh\r\n 65 2025-09-30 15:47:47 sbatch slurm/jobs/mihir/horeka/preprocessing/coinrun_chunked.sh\r\n 66 2025-09-30 15:48:22 logs\r\n 67 2025-09-30 15:48:23 ls\r\n 68 2025-09-30 15:48:27 cd preprocess/coinrun/\r\n 69 2025-09-30 15:48:28 ls\r\n 70 2025-09-30 15:48:58 dev\r\n 71 2025-09-30 15:49:21 deactivate\r\n 72 2025-09-30 15:49:23 cd data/\r\n 73 2025-09-30 15:49:29 source .venv/bin/activate\r\n 74 2025-09-30 15:49:33 uv pip show\r\n 75 2025-09-30 15:49:35 uv pip show gym3\r\n 76 2025-09-30 15:49:54 uv pip freeze\r\n 77 2025-09-30 15:50:14 deactivate \r\n 78 2025-09-30 15:50:16 uv sync\r\n 79 2025-09-30 15:50:34 cd ..\r\n 80 2025-09-30 15:50:59 sbatch slurm/jobs/mihir/horeka/preprocessing/coinrun_chunked.sh\r\n 81 2025-09-30 15:51:02 sbatch slurm/jobs/mihir/horeka/preprocessing/coinrun_chunked_500m.sh\r\n 82 2025-09-30 15:56:20 logs\r\n 83 2025-09-30 15:56:21 ls\r\n 84 2025-09-30 15:56:25 cd preprocess/\r\n 85 2025-09-30 15:56:26 ls\r\n 86 2025-09-30 15:56:27 cd coinrun/\r\n 87 2025-09-30 15:56:28 ls\r\n 88 2025-09-30 15:58:04 dev\r\n 89 2025-09-30 15:58:05 git diff\r\n 90 2025-09-30 15:58:36 git commit -am ""changed import""\r\n 91 2025-09-30 15:58:40 git push\r\n 92 2025-09-30 15:58:48 sbatch slurm/jobs/mihir/horeka/preprocessing/coinrun_chunked_500m.sh\r\n 93 2025-09-30 15:58:51 sbatch slurm/jobs/mihir/horeka/preprocessing/coinrun_chunked.sh\r\n 94 2025-09-30 12:56:27 source /home/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/bin/activate\r\n 95 2025-09-30 11:37:24 git branhc\r\n 96 2025-09-30 11:37:26 git branch\r\n 97 2025-09-30 11:40:20 source .venv/bin/activate\r\n 98 2025-09-30 11:41:01 sync-runner\r\n 99 2025-09-30 11:41:18 sbatch slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh\r\n 100 2025-09-30 11:41:22 sbatch slurm/jobs/mihir/horeka/coinrun/default_runs/train_tokenizer_default.sh\r\n 101 2025-09-30 11:41:29 scancel 3532378\r\n 102 2025-09-30 11:41:33 scancel 3532379\r\n 103 2025-09-30 11:41:38 sync-runner\r\n 104 2025-09-30 11:41:38 runner\r\n 105 2025-09-30 11:41:42 sbatch slurm/jobs/mihir/horeka/coinrun/default_runs/train_tokenizer_default.sh\r\n 106 2025-09-30 11:41:44 sbatch slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh\r\n 107 2025-09-30 11:41:47 dev\r\n 108 2025-09-30 11:43:46 sync-runner-2\r\n 109 2025-09-30 11:43:58 runner-2\r\n 110 2025-09-30 11:44:16 sbatch slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default-alpha-abl.sh\r\n 111 2025-09-30 11:44:33 dev\r\n 112 2025-09-30 11:46:05 sync-runner\r\n 113 2025-09-30 11:46:20 runner\r\n 114 2025-09-30 11:46:29 sbatch slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default-no-noise.sh\r\n 115 2025-09-30 11:46:35 queue\r\n 116 2025-09-30 11:46:43 logs\r\n 117 2025-09-30 11:46:46 fsacct_week\r\n 118 2025-09-30 11:47:04 cd coinrun/dynamics/\r\n 119 2025-09-30 11:47:05 ls\r\n 120 2025-09-30 11:47:06 cd maskgit/\r\n 121 2025-09-30 11:47:07 ls\r\n 122 2025-09-30 11:47:51 sync-runner\r\n 123 2025-09-30 11:48:01 sbatch slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh\r\n 124 2025-09-30 11:48:08 sync-runner\r\n 125 2025-09-30 11:48:09 runner\r\n 126 2025-09-30 11:48:13 sbatch slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh\r\n 127 2025-09-30 11:48:18 sbatch slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default-no-noise.sh\r\n 128 2025-09-30 11:48:20 dev\r\n 129 2025-09-30 11:48:44 sync-runner-2\r\n 130 2025-09-30 11:48:51 sbatch slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default-alpha-abl.sh\r\n 131 2025-09-30 11:48:56 scancel 3532467\r\n 132 2025-09-30 11:49:00 sync-runner-2\r\n 133 2025-09-30 11:49:02 runner-2\r\n 134 2025-09-30 11:49:05 sbatch slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default-alpha-abl.sh\r\n 135 2025-09-30 11:49:07 queue\r\n 136 2025-09-30 12:52:39 dev\r\n 137 2025-09-30 12:55:15 git diff\r\n 138 2025-09-30 12:55:26 git diff\r\n 139 2025-09-30 12:55:31 git stash\r\n 140 2025-09-30 12:55:34 git branch\r\n 141 2025-09-30 12:55:51 git branch\r\n 142 2025-09-30 12:55:58 git checkout ablation/use-pytorch-dataloader\r\n 143 2025-09-30 13:00:42 cd /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m\r\n 144 2025-09-30 13:00:44 ls\r\n 145 2025-09-30 13:00:46 cd ..\r\n 146 2025-09-30 13:00:46 ls\r\n 147 2025-09-30 13:01:01 cd coinrun_episodes_10m_chunked/\r\n 148 2025-09-30 13:01:02 ls\r\n 149 2025-09-30 13:01:05 cd ..\r\n 150 2025-09-30 13:01:10 cd coinrun_episodes_10m_split/\r\n 151 2025-09-30 13:01:11 ls\r\n 152 2025-09-30 13:01:18 cd train/\r\n 153 2025-09-30 13:01:26 ls -l | head \r\n 154 2025-09-30 13:02:22 cd ..\r\n 155 2025-09-30 13:02:25 cd ..\r\n 156 2025-09-30 13:02:25 ls\r\n 157 2025-09-30 13:02:27 cd coinrun_episodes\r\n 158 2025-09-30 13:02:29 ls -l | head \r\n 159 2025-09-30 13:03:13 mkdir val\r\n 160 2025-09-30 13:03:37 mv episode_10*.npy val/\r\n 161 2025-09-30 13:03:39 ls\r\n 162 2025-09-30 13:03:47 mkdir train\r\n 163 2025-09-30 13:03:53 mv *.npy train\r\n 164 2025-09-30 13:52:59 ls\r\n 165 2025-09-30 13:53:11 dev\r\n 166 2025-09-30 13:53:12 git branch\r\n 167 2025-09-30 13:53:25 cd /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m\r\n 168 2025-09-30 13:53:27 ls\r\n 169 2025-09-30 13:53:29 cd ..\r\n 170 2025-09-30 13:53:29 ls\r\n 171 2025-09-30 13:53:31 cd coinrun_episodes\r\n 172 2025-09-30 13:53:31 ls\r\n 173 2025-09-30 13:53:42 cd ..\r\n 174 2025-09-30 13:54:01 cd coinrun_episodes\r\n 175 2025-09-30 13:54:02 ls\r\n 176 2025-09-30 13:54:05 pwd\r\n 177 2025-09-30 13:56:07 ls\r\n 178 2025-09-30 13:56:13 mv train/* .\r\n 179 2025-09-30 13:56:36 mv val/* .\r\n 180 2025-09-30 13:56:43 rm train/\r\n 181 2025-09-30 13:56:46 rmdir train/\r\n 182 2025-09-30 13:56:50 rmdir val/\r\n 183 2025-09-30 13:56:53 pwd\r\n 184 2025-09-30 13:57:49 queue\r\n 185 2025-09-30 13:58:02 logs\r\n 186 2025-09-30 13:58:03 ls\r\n 187 2025-09-30 14:04:07 cd $ws_dir\r\n 188 2025-09-30 14:04:07 ls\r\n 189 2025-09-30 14:04:11 cd data_coinrun/\r\n 190 2025-09-30 14:04:11 ls\r\n 191 2025-09-30 14:04:14 cd coinrun_episodes\r\n 192 2025-09-30 14:04:14 ls\r\n 193 2025-09-30 15:00:19 runner-2\r\n 194 2025-09-30 15:01:38 runner-3\r\n 195 2025-09-30 15:01:40 dev\r\n 196 2025-09-30 15:01:42 git diff\r\n 197 2025-09-30 15:01:46 git checkout main\r\n 198 2025-09-30 15:01:55 runner-3\r\n 199 2025-09-30 15:01:58 sync-runner-3\r\n 200 2025-09-30 15:02:29 sbatch slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-no-noise-main.sh\r\n 201 2025-09-30 15:02:32 dev\r\n 202 2025-09-30 15:02:33 queue\r\n 203 2025-09-30 15:20:43 python\r\n 204 2025-09-30 15:22:18 queue\r\n 205 2025-09-30 15:23:08 sbatch slurm/jobs/mihir/horeka/preprocessing/coinrun_chunked_500m.sh\r\n 206 2025-09-30 15:23:10 queue\r\n 207 2025-09-30 16:54:10 queue\r\n 208 2025-09-30 17:22:50 queue\r\n 209 2025-09-30 17:23:05 logs\r\n 210 2025-09-30 17:23:06 ls\r\n 211 2025-09-30 17:23:14 cd coinrun/\r\n 212 2025-09-30 17:23:14 s\r\n 213 2025-09-30 17:23:15 ls\r\n 214 2025-09-30 17:23:18 dc ..\r\n 215 2025-09-30 17:23:19 cd ..\r\n 216 2025-09-30 17:23:22 cd preprocess/\r\n 217 2025-09-30 17:23:22 ls\r\n 218 2025-09-30 17:23:24 cd coinrun/\r\n 219 2025-09-30 17:23:24 ls\r\n 220 2025-09-30 17:23:45 tail -f generate_coinrun_chunked_500m_3533696.log \r\n 221 2025-09-30 17:23:54 tail -f generate_coinrun_chunked_500m_3533705.log \r\n 222 2025-09-30 17:24:09 cd $ws_dor\r\n 223 2025-09-30 17:24:11 cd $ws_dir\r\n 224 2025-09-30 17:24:12 ls\r\n 225 2025-09-30 17:24:15 cd data_coinrun/\r\n 226 2025-09-30 17:24:15 ls\r\n 227 2025-09-30 17:24:20 cd coinrun_episodes_500m_gt_actions_split/\r\n 228 2025-09-30 17:24:21 cd train/\r\n 229 2025-09-30 17:24:25 ls -l | wc -l\r\n 230 2025-09-30 17:26:27 ls -l | wc -l\r\n 231 2025-09-30 17:26:36 cd ..\r\n 232 2025-09-30 17:26:37 cd ..\r\n 233 2025-09-30 17:26:48 cd coinrun_episodes_10m_split/\r\n 234 2025-09-30 17:26:50 cd t\r\n 235 2025-09-30 17:26:54 cd train/\r\n 236 2025-09-30 17:26:56 ls -l | wc -l\r\n 237 2025-09-30 17:27:14 queue\r\n 238 2025-09-30 17:27:25 queue\r\n 239 2025-09-30 17:27:35 python\r\n 240 2025-09-30 17:34:26 dev\r\n 241 2025-09-30 17:34:30 git branch\r\n 242 2025-09-30 17:34:36 git checkout main\r\n 243 2025-09-30 17:34:51 git checkout -b ""change-default-parameters""\r\n 244 2025-09-30 17:40:32 python\r\n 245 2025-09-30 17:54:04 queue\r\n 246 2025-09-30 17:54:28 cd $ws_dor\r\n 247 2025-09-30 17:54:30 cd $ws_dir\r\n 248 2025-09-30 17:54:34 cd data_coinrun/\r\n 249 2025-09-30 17:54:34 ls\r\n 250 2025-09-30 17:54:39 cd coinrun_episodes_500m_gt_actions_split/\r\n 251 2025-09-30 17:54:42 cd train/\r\n 252 2025-09-30 17:54:43 ls\r\n 253 2025-09-30 17:54:50 ls -l | wc -l\r\n 254 2025-09-30 17:54:12 git branch\r\n 255 2025-09-30 17:54:17 git diff\r\n 256 2025-09-30 17:57:02 git status\r\n 257 2025-09-30 17:57:15 git commit -am ""changed default hyperparameters""\r\n 258 2025-09-30 17:57:46 git commit -am ""changed default hyperparameters""\r\n 259 2025-09-30 17:57:48 git push\r\n 260 2025-09-30 17:57:52 git push --set-upstream origin change-default-parameters\r\n 261 2025-09-30 18:06:17 git status\r\n 262 2025-09-30 18:06:37 git commit -am ""change patch size to 16""\r\n 263 2025-09-30 18:06:40 git push\r\n 264 2025-09-30 18:18:12 git pull\r\n 265 2025-09-30 18:18:38 git commit -am ""set log to true for dynamics""\r\n 266 2025-09-30 18:18:41 git push\r\n 267 2025-09-30 18:34:18 git checkout main\r\n 268 2025-09-30 18:34:27 change-default-parameters\r\n 269 2025-09-30 18:34:31 git checkout change-default-parameters\r\n 270 2025-09-30 18:36:30 git status\r\n 271 2025-09-30 18:36:38 git commit -am ""switch back to wsd""\r\n 272 2025-09-30 18:38:14 git checkout main\r\n 273 2025-09-30 18:38:26 git checkout change-default-to-wsd\r\n 274 2025-09-30 18:38:30 git checkout -b change-default-to-wsd\r\n 275 2025-09-30 18:38:37 git checkout main\r\n 276 2025-09-30 18:38:38 git pull\r\n 277 2025-09-30 18:38:45 git branch -d change-default-to-wsd\r\n 278 2025-09-30 18:38:48 git checkout change-default-to-wsd\r\n 279 2025-09-30 18:38:53 git checkout -b change-default-to-wsd\r\n 280 2025-09-30 18:41:32 git commit -am ""switched back to wsd""\r\n 281 2025-09-30 18:41:37 git push\r\n 282 2025-09-30 18:41:41 git push --set-upstream origin change-default-to-wsd\r\n 283 2025-09-30 20:03:08 cd slurm/\r\n 284 2025-09-30 20:03:09 git pull\r\n 285 2025-09-30 20:14:03 cd slurm/\r\n 286 2025-09-30 20:14:04 git pull\r\n 287 2025-09-30 20:17:41 git branch\r\n 288 2025-09-30 20:17:45 git pull\r\n 289 2025-09-30 20:17:58 git status\r\n 290 2025-09-30 20:18:09 git add jobs/\r\n 291 2025-09-30 20:18:17 git commit -am ""added mila submission ablations""\r\n 292 2025-09-30 20:18:20 git push\r\n 293 2025-09-30 20:19:40 git diff\r\n 294 2025-09-30 20:22:57 cd ..\r\n 295 2025-09-30 20:33:09 cd slurm/\r\n 296 2025-09-30 20:33:13 git add .\r\n 297 2025-09-30 20:33:21 git commit -am added flash attn ablation""\r\n 298 2025-09-30 20:33:32 git commit -am ""added flash attn ablation""\r\n 299 2025-09-30 20:33:34 git push\r\n 300 2025-09-30 20:34:05 python\r\n 301 2025-09-30 23:36:23 queue\r\n 302 2025-09-30 23:37:01 git branch\r\n 303 2025-09-30 23:37:37 git checkout seeding-data-generation\r\n 304 2025-09-30 23:37:40 git pull\r\n 305 2025-09-30 23:38:17 fqueue\r\n 306 2025-10-01 09:19:57 source /home/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/bin/activate\r\n 307 2025-10-01 09:19:57 /home/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/bin/python\r\n 308 2025-10-01 09:04:59 queue\r\n 309 2025-10-01 09:20:37 git branch | grep ""* main""\r\n 310 2025-10-01 09:20:42 git checkout main\r\n 311 2025-10-01 09:20:43 git branch | grep ""* main""\r\n 312 2025-10-01 09:21:04 git branch | grep ""main""\r\n 313 2025-10-01 09:21:40 if [[ ""${BASH_SOURCE[0]}"" != ""${0}"" ]]; then echo ""This script is not being run from main. Exiting.""; exit 1; fi\r\n 314 2025-10-01 09:21:47 source /home/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/bin/activate\r\n 315 2025-10-01 09:21:55 if [[ ""${BASH_SOURCE[0]}"" != ""${0}"" ]]; then echo ""This script is not being run from main. Exiting.""; fi\r\n 316 2025-10-01 09:22:29 # Check if the current branch is the main branch\r\n 317 2025-10-01 09:22:29 current_branch=$(git rev-parse --abbrev-ref HEAD)\r\n 318 2025-10-01 09:22:29 if [ ""$current_branch"" != ""main"" ]; then echo ""This script must be run from the main branch. Current branch is $current_branch. Exiting.""; exit 1; fi\r\n 319 2025-10-01 09:22:42 git checkout add-noise-to-combat-exposure-bias\r\n 320 2025-10-01 09:22:44 if [ ""$current_branch"" != ""main"" ]; then echo ""This script must be run from the main branch. Current branch is $current_branch. Exiting.""; exit 1; fi\r\n 321 2025-10-01 09:22:54 $current_branch\r\n 322 2025-10-01 09:23:03 current_branch=$(git rev-parse --abbrev-ref HEAD)\r\n 323 2025-10-01 09:23:05 if [ ""$current_branch"" != ""main"" ]; then echo ""This script must be run from the main branch. Current branch is $current_branch. Exiting.""; exit 1; fi\r\n 324 2025-10-01 09:23:15 source /home/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/bin/activate\r\n 325 2025-10-01 09:23:17 $(git rev-parse --abbrev-ref HEAD)\r\n 326 2025-10-01 09:23:21 (git rev-parse --abbrev-ref HEAD)\r\n 327 2025-10-01 09:24:47 cd slurm/\r\n 328 2025-10-01 09:24:49 git pull\r\n 329 2025-10-01 09:29:17 git branch\r\n 330 2025-10-01 09:29:22 cd ..\r\n 331 2025-10-01 09:29:25 (git rev-parse --abbrev-ref HEAD)\r\n 332 2025-10-01 09:39:43 cd slurm/\r\n 333 2025-10-01 09:39:50 git status\r\n 334 2025-10-01 09:39:56 git add jobs/franz/berlin/coinrun/submission_debug/\r\n 335 2025-10-01 09:40:08 git commit -m ""added submission debug jobs""\r\n 336 2025-10-01 09:40:10 git push\r\n 337 2025-10-01 09:58:48 git add .\r\n 338 2025-10-01 09:59:11 git commit -am ""added patchsize 4 tokenizer""\r\n 339 2025-10-01 09:59:15 git pull\r\n 340 2025-10-01 09:59:19 git push\r\n 341 2025-10-01 09:14:01 git branch\r\n 342 2025-10-01 09:14:09 git checkout main\r\n 343 2025-10-01 09:14:11 git pull\r\n 344 2025-10-01 09:14:27 git checkout add-noise-to-combat-exposure-bias\r\n 345 2025-10-01 09:14:29 git pull\r\n 346 2025-10-01 09:14:35 git merge main\r\n 347 2025-10-01 09:15:50 clear\r\n 348 2025-10-01 09:17:19 git push\r\n 349 2025-10-01 09:17:25 git status\r\n 350 2025-10-01 15:33:11 git checkout -b ""prepend-action-maskgit""\r\n 351 2025-10-01 15:34:55 salloc --time=01:00:00 --partition=dev_accelerated --nodes=1 --gres=gpu:1 --cpus-per-task=8\r\n 352 2025-10-01 15:32:39 git branch\r\n 353 2025-10-01 15:32:43 git checkout main\r\n 354 2025-10-01 15:32:45 git pull\r\n 355 2025-10-01 15:40:10 branch\r\n 356 2025-10-01 15:40:16 vim ~/.bashrc \r\n 357 2025-10-01 15:40:27 source ~/.bashrc \r\n 358 2025-10-01 15:40:28 branch\r\n 359 2025-10-01 15:40:59 git status\r\n 360 2025-10-01 15:41:37 git commit -am ""concatenate (prepend) action embedding to video embeddings for transformer forward pass""\r\n 361 2025-10-01 15:41:42 git push\r\n 362 2025-10-01 15:41:47 git push --set-upstream origin prepend-action-maskgit\r\n 363 2025-10-01 15:42:33 git push\r\n 364 2025-10-01 15:59:22 cd slurm/\r\n 365 2025-10-01 15:59:29 git add jobs/franz/berlin/coinrun/submission_debug\r\n 366 2025-10-01 15:59:35 git status\r\n 367 2025-10-01 15:59:53 git commit -m ""added scripts für action_prepend branch""\r\n 368 2025-10-01 15:59:55 git push\r\n 369 2025-10-01 16:00:23 cd ..\r\n 370 2025-10-01 16:00:25 branch\r\n 371 2025-10-01 22:49:16 source /home/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/bin/activate\r\n 372 2025-10-01 22:49:18 git diff main\r\n 373 2025-10-01 22:49:51 git diff main > ali-old-branch.diff\r\n 374 2025-10-01 22:37:43 cd $ws_dir\r\n 375 2025-10-01 22:37:44 ls\r\n 376 2025-10-01 22:37:47 cd data_coinrun/\r\n 377 2025-10-01 22:37:47 ls\r\n 378 2025-10-01 22:37:51 cd coinrun_episodes_500m_gt_actions_split/\r\n 379 2025-10-01 22:37:52 ls\r\n 380 2025-10-01 22:37:56 queue\r\n 381 2025-10-01 22:38:05 cd train/\r\n 382 2025-10-01 22:38:13 ls -l | wc -l\r\n 383 2025-10-01 22:38:35 cd ..\r\n 384 2025-10-01 22:38:35 ls\r\n 385 2025-10-01 22:38:37 cd ..\r\n 386 2025-10-01 22:38:38 ls\r\n 387 2025-10-01 22:38:45 cd coinrun_episodes_10m_gt_actions\r\n 388 2025-10-01 22:38:51 ls -l | wc -l\r\n 389 2025-10-01 22:39:10 cd ..\r\n 390 2025-10-01 22:39:11 ls\r\n 391 2025-10-01 22:39:22 cd coinrun_episodes_10m_gt_actions_distinct_seed/\r\n 392 2025-10-01 22:40:09 ls\r\n 393 2025-10-01 22:40:11 cd train/\r\n 394 2025-10-01 22:40:15 ls -l | wc -l\r\n 395 2025-10-01 22:40:47 queue\r\n 396 2025-10-01 22:41:31 queue\r\n 397 2025-10-01 22:47:18 dev\r\n 398 2025-10-01 22:47:19 git branch\r\n 399 2025-10-01 22:47:23 branch\r\n 400 2025-10-01 22:47:27 git diff\r\n 401 2025-10-01 22:47:28 git checkout main\r\n 402 2025-10-01 22:47:30 diff\r\n 403 2025-10-01 22:47:33 branch\r\n 404 2025-10-01 22:47:35 git pull\r\n 405 2025-10-01 22:48:11 git checkout -b ""dynamics_coinrun_500m_dataset_29519"" 699e9ecb9e1c9f0401ba54cc046026be7dc26ff3\r\n 406 2025-10-02 12:00:46 cat ~/.bashrc \r\n 407 2025-10-02 12:08:21 branch\r\n 408 2025-10-02 12:08:33 git branch | pytorch\r\n 409 2025-10-02 12:08:38 git branch | grep pytorch\r\n 410 2025-10-02 12:08:44 git checkout ablation/use-pytorch-dataloader\r\n 411 2025-10-02 12:08:46 git pull\r\n 412 2025-10-02 12:12:01 salloc --time=01:00:00 --partition=dev_accelerated --nodes=1 --gres=gpu:1 --cpus-per-task=8\r\n 413 2025-10-02 13:13:07 source .venv/bin/activate\r\n 414 2025-10-02 13:13:08 sbatch slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh\r\n 415 2025-10-02 13:13:10 queue\r\n 416 2025-10-02 13:15:57 queue\r\n 417 2025-10-02 13:16:04 logs\r\n 418 2025-10-02 13:16:26 cd coinrun/dynamics/\r\n 419 2025-10-02 13:16:27 ls\r\n 420 2025-10-02 13:16:51 dev\r\n 421 2025-10-02 13:16:54 sbatch slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh\r\n 422 2025-10-02 13:16:56 queue\r\n 423 2025-10-02 13:21:31 salloc --time=01:00:00 --partition=dev_accelerated --nodes=1 --gres=gpu:1 --cpus-per-task=8\r\n 424 2025-10-02 13:26:48 salloc --time=01:00:00 --partition=dev_accelerated --nodes=1 --gres=gpu:1 --cpus-per-task=8\r\n 425 2025-10-02 13:26:55 salloc --time=01:00:00 --partition=accelerated --nodes=1 --gres=gpu:1 --cpus-per-task=8\r\n 426 2025-10-02 13:27:08 salloc --time=01:00:00 --partition=dev_accelerated --nodes=1 --gres=gpu:1 --cpus-per-task=8\r\n 427 2025-10-02 13:27:14 sbatch slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh\r\n 428 2025-10-02 13:36:00 scancel 3536978\r\n 429 2025-10-02 13:36:02 salloc --time=01:00:00 --partition=dev_accelerated --nodes=1 --gres=gpu:1 --cpus-per-task=8\r\n 430 2025-10-02 13:37:08 salloc --time=01:00:00 --partition=dev_accelerated --nodes=1 --gres=gpu:1 --cpus-per-task=8\r\n 431 2025-10-02 13:37:18 salloc --time=30:00:00 --partition=dev_accelerated --nodes=1 --gres=gpu:1 --cpus-per-task=8\r\n 432 2025-10-02 13:37:25 salloc --time=00:30:00 --partition=dev_accelerated --nodes=1 --gres=gpu:1 --cpus-per-task=8\r\n 433 2025-10-02 12:10:58 cd /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_split\r\n 434 2025-10-02 12:10:58 ls\r\n 435 2025-10-02 12:11:04 cd val/\r\n 436 2025-10-02 12:11:09 ls -l | head\r\n 437 2025-10-02 12:11:12 cd ..\r\n 438 2025-10-02 12:11:12 ls\r\n 439 2025-10-02 12:11:13 cd ..\r\n 440 2025-10-02 12:11:13 ls\r\n 441 2025-10-02 12:11:16 cd coinrun_episodes\r\n 442 2025-10-02 12:11:19 ls \r\n 443 2025-10-02 12:11:24 ls -l | head\r\n 444 2025-10-02 12:11:29 pwd\r\n 445 2025-10-02 12:13:59 dev\r\n 446 2025-10-02 12:14:01 cd ..\r\n 447 2025-10-02 12:14:02 ls\r\n 448 2025-10-02 12:14:21 ls jafar/generate_dataset.py \r\n 449 2025-10-02 12:16:39 cd /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes\r\n 450 2025-10-02 12:16:40 ls\r\n 451 2025-10-02 12:16:51 cd ..\r\n 452 2025-10-02 12:16:55 pwd\r\n 453 2025-10-02 12:18:43 ls\r\n 454 2025-10-02 12:18:49 cd npy_test/\r\n 455 2025-10-02 12:18:50 pwd\r\n 456 2025-10-02 12:28:22 dev\r\n 457 2025-10-02 12:28:25 python\r\n 458 2025-10-02 12:32:23 cd /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/interactive/3536670\r\n 459 2025-10-02 12:32:24 ls\r\n 460 2025-10-02 12:32:32 ls\r\n 461 2025-10-02 12:32:37 pwd\r\n 462 2025-10-02 13:21:35 queue\r\n 463 2025-10-02 13:25:52 scancel 3536945\r\n 464 2025-10-02 13:25:53 dev\r\n 465 2025-10-02 13:25:55 queue\r\n 466 2025-10-02 13:26:38 idling\r\n 467 2025-10-02 13:28:12 git status\r\n 468 2025-10-02 13:28:20 git status\r\n 469 2025-10-02 13:28:41 git commit -m ""implemented dynamics model training with pytorch dataloader""\r\n 470 2025-10-02 13:29:00 git push\r\n 471 2025-10-02 13:29:04 git push --set-upstream origin ablation/use-pytorch-dataloader\r\n 472 2025-10-02 13:29:15 queue\r\n 473 2025-10-02 15:55:40 source /home/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/bin/activate\r\n 474 2025-10-02 15:55:44 salloc --time=01:00:00 --partition=dev_accelerated --nodes=1 --gres=gpu:1 --cpus-per-task=8\r\n 475 2025-10-02 17:25:14 clear\r\n 476 2025-10-02 17:25:46 sbatch slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh\r\n 477 2025-10-02 17:25:48 queue\r\n 478 2025-10-02 17:27:48 source /home/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/bin/activate\r\n 479 2025-10-02 17:27:53 branch\r\n 480 2025-10-02 17:27:56 git branch \r\n 481 2025-10-02 17:28:48 queue\r\n 482 2025-10-02 17:29:10 clear\r\n 483 2025-10-02 17:29:34 queue\r\n 484 2025-10-02 17:29:37 idling\r\n 485 2025-10-02 17:29:43 queue\r\n 486 2025-10-02 17:49:27 . ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/.cursor-server/cli/servers/Stable-2f2737de9aa376933d975ae30290447c910fdf40/server/out/vs/workbench/contrib/terminal/common/scripts/shellIntegration-bash.sh""\r\n 487 2025-10-02 17:30:03 source /home/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/bin/activate\r\n 488 2025-10-02 17:30:08 sbatch slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh\r\n 489 2025-10-02 17:30:10 aueue\r\n 490 2025-10-02 17:30:12 queue\r\n 491 2025-10-02 17:31:15 aueue\r\n 492 2025-10-02 17:31:17 queue\r\n 493 2025-10-02 17:31:26 queue\r\n 494 2025-10-02 17:33:37 queue\r\n 495 2025-10-02 17:33:41 git status\r\n 496 2025-10-02 17:33:51 git commit -am ""finally completely ablated grain""\r\n 497 2025-10-02 17:34:17 git push\r\n 498 2025-10-02 17:34:35 idling\r\n 499 2025-10-02 17:44:47 git branch\r\n 500 2025-10-02 17:44:58 git checkout main\r\n 501 2025-10-02 17:45:01 git diff prepend-action-maskgit\r\n 502 2025-10-02 17:45:24 git checkout ablation/use-pytorch-dataloader\r\n 503 2025-10-02 17:45:33 git diff prepend-action-maskgit\r\n 504 2025-10-02 17:46:02 branch\r\n 505 2025-10-02 17:46:14 git merge prepend-action-maskgit\r\n 506 2025-10-02 17:46:22 git diff prepend-action-maskgit\r\n 507 2025-10-02 14:55:49 salloc --time=01:00:00 --partition=dev_accelerated --nodes=1 --gres=gpu:1 --cpus-per-task=8\r\n 508 2025-10-02 14:55:53 queue\r\n 509 2025-10-02 14:56:02 scancel 3532383\r\n 510 2025-10-02 14:56:04 queue\r\n 511 2025-10-02 15:55:01 scancel 3537140\r\n 512 2025-10-02 17:31:21 scancel 3537950\r\n 513 2025-10-02 17:44:36 git diff\r\n 514 2025-10-02 17:47:57 git commit -am ""removed useless print statement""\r\n 515 2025-10-02 19:23:47 cd slurm/\r\n 516 2025-10-02 19:23:48 git pull\r\n 517 2025-10-02 19:40:17 git status\r\n 518 2025-10-02 19:40:23 git add jobs/\r\n 519 2025-10-02 19:40:47 git commit -am ""added submission sbatch scripts for 250m dataset base runs + arch ablations""\r\n 520 2025-10-02 19:40:50 git push\r\n 521 2025-10-02 23:05:41 cd ..\r\n 522 2025-10-02 23:05:46 ulimit --help\r\n 523 2025-10-02 23:11:04 ulimit -n\r\n 524 2025-10-02 23:29:41 python\r\n 525 2025-10-03 00:12:55 cd slurm/\r\n 526 2025-10-03 00:12:56 ls\r\n 527 2025-10-03 00:13:01 git status\r\n 528 2025-10-03 00:13:05 git add jobs/\r\n 529 2025-10-03 00:13:23 git commit -am ""added speed ablations""\r\n 530 2025-10-03 00:13:25 git push\r\n 531 2025-10-03 00:19:12 git pull\r\n 532 2025-10-03 00:19:25 git push\r\n 533 2025-10-03 00:22:36 git commit -am ""forgot tag""\r\n 534 2025-10-03 00:22:37 git push\r\n 535 2025-10-02 17:49:20 branch\r\n 536 2025-10-02 17:49:28 git status\r\n 537 2025-10-02 17:49:32 git push\r\n 538 2025-10-02 18:00:37 git branch\r\n 539 2025-10-02 18:00:46 git checkout prepend-action-maskgit\r\n 540 2025-10-02 18:01:00 git checkout -b ""ablation/full-precision-training""\r\n 541 2025-10-02 18:02:08 python\r\n 542 2025-10-02 18:04:03 git branch\r\n 543 2025-10-02 18:04:07 branch\r\n 544 2025-10-02 18:04:56 git diff\r\n 545 2025-10-02 18:05:05 git status\r\n 546 2025-10-02 18:05:10 git branch\r\n 547 2025-10-02 18:05:48 git commit -am ""switched to full precision for ablation""\r\n 548 2025-10-02 18:05:52 git push\r\n 549 2025-10-02 18:05:56 git push --set-upstream origin ablation/full-precision-training\r\n 550 2025-10-02 18:10:26 git diff\r\n 551 2025-10-02 18:10:30 git checkout change-default-parameters\r\n 552 2025-10-02 18:10:34 git diff main\r\n 553 2025-10-02 18:10:45 git diff main\r\n 554 2025-10-02 18:10:48 git log\r\n 555 2025-10-02 18:11:01 git log\r\n 556 2025-10-02 18:16:31 clear\r\n 557 2025-10-02 18:20:41 branch\r\n 558 2025-10-02 18:20:44 git checkout main\r\n 559 2025-10-02 18:20:47 git pull\r\n 560 2025-10-02 18:20:54 source .venv/bin/activate\r\n 561 2025-10-02 18:21:20 idling\r\n 562 2025-10-02 18:26:13 sync-runner\r\n 563 2025-10-02 18:26:20 git branch\r\n 564 2025-10-02 18:26:30 git checkout prepend-action-maskgit\r\n 565 2025-10-02 18:26:34 sync-runner\r\n 566 2025-10-02 18:26:51 runner\r\n 567 2025-10-02 18:27:06 sync-runner\r\n 568 2025-10-02 18:27:11 sbatch slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default.sh\r\n 569 2025-10-02 18:27:35 sbatch slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh\r\n 570 2025-10-02 18:27:48 dev\r\n 571 2025-10-02 18:27:55 git branch\r\n 572 2025-10-02 18:28:07 git checkout ablation/full-precision-training\r\n 573 2025-10-02 18:28:17 sync-runner-2\r\n 574 2025-10-02 18:28:21 runner-2\r\n 575 2025-10-02 18:28:29 sbatch slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh\r\n 576 2025-10-02 18:29:52 queue\r\n 577 2025-10-02 18:29:58 scancel --me\r\n 578 2025-10-02 18:30:00 dev\r\n 579 2025-10-02 18:30:05 git branch\r\n 580 2025-10-02 18:30:12 sync-runner-2\r\n 581 2025-10-02 18:30:41 sync-runner-2\r\n 582 2025-10-02 18:30:43 runner-2\r\n 583 2025-10-02 18:30:50 sbatch slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default_mixed_precision_ablation.sh\r\n 584 2025-10-02 18:30:53 sbatch slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default_flash_attn_ablation.sh\r\n 585 2025-10-02 18:30:57 sbatch slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh\r\n 586 2025-10-02 18:31:01 sbatch slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh\r\n 587 2025-10-02 18:31:03 dev\r\n 588 2025-10-02 18:31:12 git branch\r\n 589 2025-10-02 18:31:17 git checkout ablation/use-pytorch-dataloader\r\n 590 2025-10-02 18:31:21 sync-runner-3\r\n 591 2025-10-02 18:31:31 runner-3\r\n 592 2025-10-02 18:31:35 sbatch slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh\r\n 593 2025-10-02 18:31:39 sbatch slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_grain_ablation.sh\r\n 594 2025-10-02 18:31:42 dev\r\n 595 2025-10-02 18:31:48 git branch\r\n 596 2025-10-02 18:32:00 git checkout prepend-action-maskgit\r\n 597 2025-10-02 18:32:08 sync-runner\r\n 598 2025-10-02 18:32:10 runner\r\n 599 2025-10-02 18:32:16 sbatch slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh\r\n 600 2025-10-02 18:32:19 sbatch slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default.sh\r\n 601 2025-10-02 18:32:21 queue\r\n 602 2025-10-02 18:32:28 fqueue\r\n 603 2025-10-02 18:38:53 runner2\r\n 604 2025-10-02 18:38:55 runner-2\r\n 605 2025-10-02 18:39:08 cat jasmine/train_dynamics.py | grep dtype\r\n 606 2025-10-02 18:39:52 sync-runner-2\r\n 607 2025-10-02 18:39:59 sbatch slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default_mixed_precision_ablation.sh\r\n 608 2025-10-02 18:40:04 sbatch slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh\r\n 609 2025-10-02 18:40:09 dev\r\n 610 2025-10-02 18:40:09 queue\r\n 611 2025-10-02 19:25:32 diff slurm/jobs/franz/berlin/coinrun/mila_submission/legacy_runs_50M_dataset/coinrun_dynamics_base.sh slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh\r\n 612 2025-10-03 16:36:30 source /home/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/bin/activate\r\n 613 2025-10-03 16:37:19 salloc --time=01:00:00 --partition=dev_accelerated --nodes=1 --gres=gpu:1 --cpus-per-task=8\r\n 614 2025-10-03 12:07:23 branch\r\n 615 2025-10-03 12:12:08 git diff\r\n 616 2025-10-03 12:12:18 git rebase --abort\r\n 617 2025-10-03 12:12:32 git diff\r\n 618 2025-10-03 12:12:34 git status\r\n 619 2025-10-03 12:13:01 git checkout prepend-action-maskgit\r\n 620 2025-10-03 12:13:02 git diff\r\n 621 2025-10-03 12:13:21 git checkout gt-actions\r\n 622 2025-10-03 12:13:23 git pull\r\n 623 2025-10-03 12:13:46 clear\r\n 624 2025-10-03 12:13:49 git checkout prepend-action-maskgit\r\n 625 2025-10-03 12:13:51 git diff\r\n 626 2025-10-03 12:13:53 clear\r\n 627 2025-10-03 12:18:50 branch\r\n 628 2025-10-03 12:19:10 git diff ablation/use-pytorch-dataloader\r\n 629 2025-10-03 12:26:04 clear\r\n 630 2025-10-03 12:26:15 branch\r\n 631 2025-10-03 12:26:18 git diff ablation/full-precision-training\r\n 632 2025-10-03 12:26:25 clear\r\n 633 2025-10-03 12:35:01 # array_records_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/data/coinrun/array_records_500m_seed_w_increment""\r\n 634 2025-10-03 12:45:03 cd slurm/\r\n 635 2025-10-03 12:45:07 git status\r\n 636 2025-10-03 12:45:14 git add jobs/franz/berlin/coinrun/mila_submission/50M_dataset/\r\n 637 2025-10-03 12:45:38 git commit -m ""added base and arch ablations for 50m dataset""\r\n 638 2025-10-03 12:45:41 git push\r\n 639 2025-10-03 13:49:57 clear\r\n 640 2025-10-03 13:49:59 cd ..\r\n 641 2025-10-03 13:50:01 branch\r\n 642 2025-10-03 13:50:03 git diff main\r\n 643 2025-10-03 14:21:57 clea\r\n 644 2025-10-03 14:21:59 clear\r\n 645 2025-10-03 15:06:06 cd slurm/\r\n 646 2025-10-03 15:06:09 git status\r\n 647 2025-10-03 15:06:12 git add jobs/franz/\r\n 648 2025-10-03 15:06:19 git commit -am ""added speed ablations""\r\n 649 2025-10-03 15:06:22 git push\r\n 650 2025-10-03 15:06:25 git pull\r\n 651 2025-10-03 15:06:32 git push\r\n 652 2025-10-03 16:36:16 cd ..\r\n 653 2025-10-03 16:36:16 ls\r\n 654 2025-10-03 16:37:27 idling\r\n 655 2025-10-03 16:39:54 branch\r\n 656 2025-10-03 16:40:01 git checkout ablation/use-pytorch-dataloader\r\n 657 2025-10-03 17:08:41 cd slurm/\r\n 658 2025-10-03 17:08:43 git status\r\n 659 2025-10-03 17:08:47 git add jobs/franz/\r\n 660 2025-10-03 17:09:03 git commit -m ""added good tokenizer to unicorn runs""\r\n 661 2025-10-03 17:09:05 git push\r\n 662 2025-10-03 17:09:09 git pull\r\n 663 2025-10-03 17:09:14 git push\r\n 664 2025-10-03 17:13:13 cd ..\r\n 665 2025-10-03 17:13:17 branc\r\n 666 2025-10-03 17:13:19 branch\r\n 667 2025-10-03 17:13:33 git checkout prepend-action-maskgit\r\n 668 2025-10-03 17:13:38 git diff\r\n 669 2025-10-03 17:13:42 git stash\r\n 670 2025-10-04 12:26:53 smi\r\n 671 2025-10-04 12:27:16 watch -n1 free\r\n 672 2025-10-04 12:27:08 python train_ppo_parallel.py \r\n 673 2025-10-04 11:59:40 git checkout vizdoom-dataset\r\n 674 2025-10-04 11:59:42 git pull\r\n 675 2025-10-04 12:00:41 ls\r\n 676 2025-10-04 12:00:43 cd ..\r\n 677 2025-10-04 12:00:44 ls\r\n 678 2025-10-04 12:00:45 git clone git@github.com:arnaudstiegler/gameNgen-repro.git\r\n 679 2025-10-04 12:01:07 ls\r\n 680 2025-10-04 12:01:09 cd game\r\n 681 2025-10-04 12:01:12 ls gameNgen-repro/\r\n 682 2025-10-04 12:01:44 cp -r gameNgen-repro/ViZDoomPPO/ jasmine/data/jasmine_data/\r\n 683 2025-10-04 12:01:48 cd jasmine\r\n 684 2025-10-04 12:01:49 ls\r\n 685 2025-10-04 12:06:57 cd data/jasmine_data/\r\n 686 2025-10-04 12:06:58 ls\r\n 687 2025-10-04 12:07:02 cd ViZDoomPPO/\r\n 688 2025-10-04 12:07:02 ls\r\n 689 2025-10-04 12:07:58 uv venv --python=3.10\r\n 690 2025-10-04 12:08:02 source .venv/bin/activate\r\n 691 2025-10-04 12:08:08 uv pip install -r requirements.txt \r\n 692 2025-10-04 12:08:23 uv pip install -r requirements.txt \r\n 693 2025-10-04 12:09:36 python train_ppo_parallel.py\r\n 694 2025-10-04 12:18:49 tensorboard --logdir=data/jasmine_data/ViZDoomPPO/logs/tensorboard/deathmatch_simple_14\r\n 695 2025-10-04 12:26:45 free\r\n 696 2025-10-04 12:26:50 tmux\r\n 697 2025-10-04 12:34:07 python train_ppo_parallel.py\r\n 698 2025-10-04 12:35:18 python train_ppo_parallel.py\r\n 699 2025-10-04 12:42:34 python train_ppo_parallel.py\r\n 700 2025-10-04 12:43:05 deactivate \r\n 701 2025-10-04 12:43:06 cd ..\r\n 702 2025-10-04 12:43:07 cd ..\r\n 703 2025-10-04 12:43:09 cd ..\r\n 704 2025-10-04 12:43:10 sh slurm/jobs/mihir/horeka/preprocessing/train_doom_agent.sh\r\n 705 2025-10-04 12:43:20 sbatch slurm/jobs/mihir/horeka/preprocessing/train_doom_agent.sh\r\n 706 2025-10-04 12:43:42 sbatch slurm/jobs/mihir/horeka/preprocessing/train_doom_agent.sh\r\n 707 2025-10-04 12:43:43 queue\r\n 708 2025-10-04 12:43:46 sh slurm/jobs/mihir/horeka/preprocessing/train_doom_agent.sh\r\n 709 2025-10-04 12:48:18 source /home/hk-project-p0023960/tum_cte0515/Projects/jasmine/data/jasmine_data/ViZDoomPPO/.venv/bin/activate\r\n 710 2025-10-04 12:48:18 queue\r\n 711 2025-10-04 12:48:23 logs\r\n 712 2025-10-04 12:48:24 ls\r\n 713 2025-10-04 12:48:34 cd preprocess/doom/\r\n 714 2025-10-04 12:48:35 ls\r\n 715 2025-10-04 12:48:40 tail -f preprocess_doom_chunked_3541414.log \r\n 716 2025-10-04 12:36:58 source /home/hk-project-p0023960/tum_cte0515/Projects/jasmine/data/jasmine_data/ViZDoomPPO/.venv/bin/activate\r\n 717 2025-10-04 12:37:53 tensorboard --logdir data/jasmine_data/ViZDoomPPO/logs/tensorboard/deathmatch_simple_16/\r\n 718 2025-10-04 11:58:40 idling\r\n 719 2025-10-04 11:58:48 salloc --time=03:00:00 --partition=accelerated --nodes=1 --gres=gpu:1 --cpus-per-task=8\r\n 720 2025-10-04 13:05:34 python\r\n 721 2025-10-04 13:06:43 ls\r\n 722 2025-10-04 13:07:07 sbatch slurm/jobs/mihir/horeka/preprocessing/train_doom_agent.sh\r\n 723 2025-10-04 13:07:08 queue\r\n 724 2025-10-04 11:58:53 idling\r\n 725 2025-10-04 11:59:27 git branch\r\n 726 2025-10-04 15:51:46 source .venv/bin/activate\r\n 727 2025-10-04 15:52:22 sh slurm/jobs/mihir/horeka/preprocessing/doom_chunked.sh \r\n 728 2025-10-04 15:52:27 ls\r\n 729 2025-10-04 15:52:29 deactivate\r\n 730 2025-10-04 15:52:33 cd data/\r\n 731 2025-10-04 15:52:50 sh slurm/jobs/mihir/horeka/preprocessing/doom_chunked.sh \r\n 732 2025-10-04 15:53:00 sh ../slurm/jobs/mihir/horeka/preprocessing/doom_chunked.sh \r\n 733 2025-10-04 15:57:17 source .venv/bin/activate\r\n 734 2025-10-04 15:57:21 uv sync\r\n 735 2025-10-04 15:57:41 uv pip install jasmine_data/ViZDoomPPO/requirements.txt \r\n 736 2025-10-04 15:58:01 uv pip install -r jasmine_data/ViZDoomPPO/requirements.txt \r\n 737 2025-10-04 15:58:53 ls\r\n 738 2025-10-04 15:59:08 sh ../slurm/jobs/mihir/horeka/preprocessing/doom_chunked.sh \r\n 739 2025-10-04 15:59:25 ls\r\n 740 2025-10-04 15:59:57 pwd\r\n 741 2025-10-04 16:00:12 sh ../slurm/jobs/mihir/horeka/preprocessing/doom_chunked.sh \r\n 742 2025-10-04 16:00:44 sh ../slurm/jobs/mihir/horeka/preprocessing/doom_chunked.sh \r\n 743 2025-10-04 16:00:58 uv pip install logurur\r\n 744 2025-10-04 16:01:00 uv pip install loguru\r\n 745 2025-10-04 16:01:18 sh ../slurm/jobs/mihir/horeka/preprocessing/doom_chunked.sh \r\n 746 2025-10-04 16:01:35 sh ../slurm/jobs/mihir/horeka/preprocessing/doom_chunked.sh \r\n 747 2025-10-04 16:03:26 sh ../slurm/jobs/mihir/horeka/preprocessing/doom_chunked.sh \r\n 748 2025-10-04 16:04:28 sh ../slurm/jobs/mihir/horeka/preprocessing/doom_chunked.sh \r\n 749 2025-10-04 16:05:08 sh ../slurm/jobs/mihir/horeka/preprocessing/doom_chunked.sh \r\n 750 2025-10-04 16:10:34 sh ../slurm/jobs/mihir/horeka/preprocessing/doom_chunked.sh \r\n 751 2025-10-04 16:11:48 sh ../slurm/jobs/mihir/horeka/preprocessing/doom_chunked.sh \r\n 752 2025-10-04 16:13:16 sh ../slurm/jobs/mihir/horeka/preprocessing/doom_chunked.sh \r\n 753 2025-10-04 16:16:25 clear\r\n 754 2025-10-04 16:16:25 ls\r\n 755 2025-10-04 16:22:34 sh ../slurm/jobs/mihir/horeka/preprocessing/doom_chunked.sh \r\n 756 2025-10-04 16:23:02 sh ../slurm/jobs/mihir/horeka/preprocessing/doom_chunked.sh \r\n 757 2025-10-04 16:29:28 sh slurm/jobs/mihir/horeka/preprocessing/doom_chunked.sh\r\n 758 2025-10-04 16:29:32 sh ../slurm/jobs/mihir/horeka/preprocessing/doom_chunked.sh \r\n 759 2025-10-04 16:32:15 uv pip install gymnasium\r\n 760 2025-10-04 16:32:18 sh ../slurm/jobs/mihir/horeka/preprocessing/doom_chunked.sh \r\n 761 2025-10-04 16:34:34 sbatch ../slurm/jobs/mihir/horeka/preprocessing/doom_chunked.sh \r\n 762 2025-10-04 16:34:39 git status\r\n 763 2025-10-04 16:34:42 cd ..\r\n 764 2025-10-04 16:34:43 ls\r\n 765 2025-10-04 16:34:52 cd data/\r\n 766 2025-10-04 16:34:52 ls\r\n 767 2025-10-04 16:34:54 queue\r\n 768 2025-10-04 16:36:25 sbatch ../slurm/jobs/mihir/horeka/preprocessing/doom_chunked.sh \r\n 769 2025-10-04 16:36:26 queue\r\n 770 2025-10-04 16:36:36 scancel 3541775\r\n 771 2025-10-04 16:36:38 queue\r\n 772 2025-10-04 16:39:00 logs\r\n 773 2025-10-04 16:39:01 ls\r\n 774 2025-10-04 16:39:05 cd preprocess/\r\n 775 2025-10-04 16:39:11 cd doom/\r\n 776 2025-10-04 16:39:11 ls\r\n 777 2025-10-04 16:39:23 tail -f preprocess_doom_chunked_3541603.log \r\n 778 2025-10-04 16:44:57 queue\r\n 779 2025-10-04 16:45:23 cd\r\n 780 2025-10-04 16:45:24 dev\r\n 781 2025-10-04 16:45:25 cd data/\r\n 782 2025-10-04 16:45:33 sbatch ../slurm/jobs/mihir/horeka/preprocessing/doom_chunked.sh \r\n 783 2025-10-04 16:45:34 queue\r\n 784 2025-10-04 16:45:42 fqueue\r\n 785 2025-10-04 16:45:51 scancel 3541779\r\n 786 2025-10-04 16:45:52 queue\r\n 787 2025-10-04 16:46:32 queue\r\n 788 2025-10-04 16:46:51 queue\r\n 789 2025-10-04 16:47:11 ls\r\n 790 2025-10-04 16:47:22 queue\r\n 791 2025-10-04 16:47:54 queue\r\n 792 2025-10-04 16:51:06 git status\r\n 793 2025-10-04 16:53:25 queue\r\n 794 2025-10-04 16:53:32 ls\r\n 795 2025-10-04 18:37:52 source /home/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/bin/activate\r\n 796 2025-10-04 18:22:44 source .venv/bin/activate\r\n 797 2025-10-04 18:24:04 deactivate\r\n 798 2025-10-04 18:24:06 cd data/\r\n 799 2025-10-04 18:24:12 source .venv/bin/activate\r\n 800 2025-10-04 18:25:26 clear\r\n 801 2025-10-04 18:26:03 sh ../slurm/jobs/mihir/horeka/preprocessing/doom_chunked_test.sh \r\n 802 2025-10-04 18:32:04 sh ../slurm/jobs/mihir/horeka/preprocessing/doom_chunked_test.sh \r\n 803 2025-10-04 18:35:54 sh ../slurm/jobs/mihir/horeka/preprocessing/doom_chunked_test.sh \r\n 804 2025-10-04 18:40:48 sh ../slurm/jobs/mihir/horeka/preprocessing/doom_chunked_test.sh \r\n 805 2025-10-04 18:42:42 sh ../slurm/jobs/mihir/horeka/preprocessing/doom_chunked_test.sh \r\n 806 2025-10-04 18:43:34 sh ../slurm/jobs/mihir/horeka/preprocessing/doom_chunked_test.sh \r\n 807 2025-10-04 18:43:57 sh ../slurm/jobs/mihir/horeka/preprocessing/doom_chunked_test.sh \r\n 808 2025-10-04 18:48:07 sh ../slurm/jobs/mihir/horeka/preprocessing/doom_chunked_test.sh \r\n 809 2025-10-04 18:54:08 sh ../slurm/jobs/mihir/horeka/preprocessing/doom_chunked_test.sh \r\n 810 2025-10-04 19:00:31 sh ../slurm/jobs/mihir/horeka/preprocessing/doom_chunked_test.sh \r\n 811 2025-10-04 19:02:56 sh ../slurm/jobs/mihir/horeka/preprocessing/doom_chunked_test.sh \r\n 812 2025-10-04 19:03:00 time sh ../slurm/jobs/mihir/horeka/preprocessing/doom_chunked_test.sh \r\n 813 2025-10-04 19:20:23 time sh ../slurm/jobs/mihir/horeka/preprocessing/doom_chunked_test.sh \r\n 814 2025-10-04 14:12:21 source /home/hk-project-p0023960/tum_cte0515/Projects/jasmine/data/jasmine_data/ViZDoomPPO/.venv/bin/activate\r\n 815 2025-10-04 14:24:01 cp data/jasmine_data/ViZDoomPPO/logs/models/deathmatch_simple/best_model.zip data/jasmine_data/ViZDoomPPO/logs/models/deathmatch_simple_bak/best_model.zip\r\n 816 2025-10-04 14:24:13 mkdir data/jasmine_data/ViZDoomPPO/logs/models/deathmatch_simple_bak\r\n 817 2025-10-04 14:24:15 cp data/jasmine_data/ViZDoomPPO/logs/models/deathmatch_simple/best_model.zip data/jasmine_data/ViZDoomPPO/logs/models/deathmatch_simple_bak/best_model.zip\r\n 818 2025-10-04 14:34:53 cp ../gameNgen-repro/ViZDoomPPO/logs/models/deathmatch_simple/best_model.zip data/jasmine_data/ViZDoomPPO/logs/models/deathmatch_simple\r\n 819 2025-10-04 14:35:10 cp ../gameNgen-repro/ViZDoomPPO/logs/models/deathmatch_simple/best_model.zip data/jasmine_data/ViZDoomPPO/logs/models/deathmatch_simple_bak/best_model.zip \r\n 820 2025-10-04 15:11:49 cd $ws_dir\r\n 821 2025-10-04 15:11:50 ls\r\n 822 2025-10-04 15:11:53 mkdir data_doom\r\n 823 2025-10-04 15:11:56 cd data_doom/\r\n 824 2025-10-04 15:11:57 ls\r\n 825 2025-10-04 15:12:02 pwd\r\n 826 2025-10-04 15:48:37 queeu\r\n 827 2025-10-04 15:48:41 queue\r\n 828 2025-10-04 15:48:46 idling\r\n 829 2025-10-04 16:13:21 ls\r\n 830 2025-10-04 16:13:23 cd dev/\r\n 831 2025-10-04 16:13:23 ls\r\n 832 2025-10-04 16:13:26 cd train/\r\n 833 2025-10-04 16:13:26 ls\r\n 834 2025-10-04 16:16:57 cd ..\r\n 835 2025-10-04 16:16:58 ls\r\n 836 2025-10-04 16:16:59 cd ..\r\n 837 2025-10-04 16:17:02 rm -rf dev/\r\n 838 2025-10-04 16:18:56 dev\r\n 839 2025-10-04 16:19:03 deactivate\r\n 840 2025-10-04 16:19:07 source .venv/bin/activate\r\n 841 2025-10-04 16:19:12 uv pip install ipykernel\r\n 842 2025-10-04 16:20:28 uv pip install tqdm\r\n 843 2025-10-04 16:21:10 cd $ws_dir\r\n 844 2025-10-04 16:21:11 ls\r\n 845 2025-10-04 16:21:17 cd data_doom/\r\n 846 2025-10-04 16:21:18 ls\r\n 847 2025-10-04 16:23:18 ls\r\n 848 2025-10-04 16:23:19 cd ls\r\n 849 2025-10-04 16:23:23 cd dev/\r\n 850 2025-10-04 16:23:23 ls\r\n 851 2025-10-04 16:23:26 pwd\r\n 852 2025-10-04 16:25:37 python\r\n 853 2025-10-04 16:34:01 ls\r\n 854 2025-10-04 16:34:03 cd ..\r\n 855 2025-10-04 16:34:03 ls\r\n 856 2025-10-04 16:34:04 cd ..\r\n 857 2025-10-04 16:34:04 ls\r\n 858 2025-10-04 16:34:10 cd data_coinrun/\r\n 859 2025-10-04 16:34:10 ls\r\n 860 2025-10-04 16:47:14 logs\r\n 861 2025-10-04 16:47:14 ls\r\n 862 2025-10-04 16:47:17 cd preprocess/coinrun/\r\n 863 2025-10-04 16:47:17 ls\r\n 864 2025-10-04 16:48:05 tail -f preprocess_coinrun_chunked_3541783.log\r\n 865 2025-10-04 14:10:24 logs\r\n 866 2025-10-04 14:10:25 ls\r\n 867 2025-10-04 14:10:33 cd preprocess/doom/\r\n 868 2025-10-04 14:10:33 ls\r\n 869 2025-10-04 14:10:40 tail -f preprocess_doom_chunked_3541414.log \r\n 870 2025-10-04 14:10:17 queue\r\n 871 2025-10-04 14:22:03 salloc --time=01:00:00 --partition=dev_accelerated --nodes=1 --gres=gpu:1 --cpus-per-task=8\r\n 872 2025-10-04 15:44:43 salloc --time=01:00:00 --partition=dev_accelerated --nodes=1 --gres=gpu:1 --cpus-per-task=8\r\n 873 2025-10-04 15:48:55 salloc --time=03:00:00 --partition=accelerated --nodes=1 --gres=gpu:1 --cpus-per-task=8\r\n 874 2025-10-04 18:21:56 source /home/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/bin/activate\r\n 875 2025-10-04 18:21:58 queue\r\n 876 2025-10-04 18:22:01 idling\r\n 877 2025-10-04 18:22:06 salloc --time=01:00:00 --partition=dev_accelerated --nodes=1 --gres=gpu:1 --cpus-per-task=8\r\n 878 2025-10-04 19:22:25 salloc --time=01:00:00 --partition=dev_accelerated --nodes=1 --gres=gpu:1 --cpus-per-task=8\r\n 879 2025-10-04 19:22:41 salloc --time=01:00:00 --partition=dev_accelerated --nodes=1 --gres=gpu:1 --cpus-per-task=8\r\n 880 2025-10-04 19:22:48 salloc --time=01:00:00 --partition=accelerated --nodes=1 --gres=gpu:1 --cpus-per-task=8\r\n 881 2025-10-04 19:22:57 salloc --time=01:00:00 --partition=dev_accelerated --nodes=1 --gres=gpu:1 --cpus-per-task=8\r\n 882 2025-10-04 20:23:55 deactivate\r\n 883 2025-10-04 20:23:57 cd data/\r\n 884 2025-10-04 20:24:44 source .venv/bin/activate\r\n 885 2025-10-04 20:24:48 sbatch slurm/jobs/mihir/horeka/preprocessing/doom_chunked_10m.sh\r\n 886 2025-10-04 20:24:53 sbatch ../slurm/jobs/mihir/horeka/preprocessing/doom_chunked_10m.sh\r\n 887 2025-10-04 20:24:56 sbatch ../slurm/jobs/mihir/horeka/preprocessing/doom_chunked_50m.sh\r\n 888 2025-10-04 20:24:57 queue\r\n 889 2025-10-04 18:21:19 queue\r\n 890 2025-10-04 18:21:23 ls\r\n 891 2025-10-04 18:21:33 cd $ws_dir\r\n 892 2025-10-04 18:21:33 ls\r\n 893 2025-10-04 18:21:36 cd data_doom/\r\n 894 2025-10-04 18:21:37 ls\r\n 895 2025-10-04 18:21:46 dev\r\n 896 2025-10-04 18:21:47 cd data/\r\n 897 2025-10-04 18:21:49 salloc --time=03:00:00 --partition=accelerated --nodes=1 --gres=gpu:1 --cpus-per-task=8\r\n 898 2025-10-04 20:24:06 salloc --time=01:00:00 --partition=dev_accelerated --nodes=1 --gres=gpu:1 --cpus-per-task=8\r\n 899 2025-10-04 20:24:16 salloc --time=03:00:00 --partition=accelerated --nodes=1 --gres=gpu:1 --cpus-per-task=8\r\n 900 2025-10-04 20:24:22 salloc --time=01:00:00 --partition=accelerated --nodes=1 --gres=gpu:1 --cpus-per-task=8\r\n 901 2025-10-04 18:25:42 cd $ws_dir\r\n 902 2025-10-04 18:25:45 cd data_doom/\r\n 903 2025-10-04 18:25:45 ls\r\n 904 2025-10-04 18:25:47 cd d\r\n 905 2025-10-04 18:25:50 cd dev/\r\n 906 2025-10-04 18:25:50 ls\r\n 907 2025-10-04 18:25:55 rm -rf *\r\n 908 2025-10-04 18:25:56 ls\r\n 909 2025-10-04 18:42:38 ls\r\n 910 2025-10-04 18:42:41 rm -rf train/\r\n 911 2025-10-04 18:42:44 ls\r\n 912 2025-10-04 18:42:46 ls\r\n 913 2025-10-04 18:42:48 ls\r\n 914 2025-10-04 18:42:49 ls\r\n 915 2025-10-04 18:42:50 ls\r\n 916 2025-10-04 18:42:55 watch -n1 ls\r\n 917 2025-10-04 18:46:32 ls\r\n 918 2025-10-04 18:46:56 ls train/\r\n 919 2025-10-04 18:53:57 ls\r\n 920 2025-10-04 18:54:00 rm -rf *\r\n 921 2025-10-04 18:54:07 watch -n1 ls\r\n 922 2025-10-04 18:57:46 ls\r\n 923 2025-10-04 18:59:55 ls\r\n 924 2025-10-04 18:59:57 ls\r\n 925 2025-10-04 18:59:57 ls\r\n 926 2025-10-04 18:59:58 ls\r\n 927 2025-10-04 18:59:58 ls\r\n 928 2025-10-04 19:00:10 rm -rf *\r\n 929 2025-10-04 19:01:35 ls\r\n 930 2025-10-04 19:01:38 cd train/\r\n 931 2025-10-04 19:01:38 ls\r\n 932 2025-10-04 19:01:40 cd ..\r\n 933 2025-10-04 19:01:40 ls\r\n 934 2025-10-04 19:02:21 ls\r\n 935 2025-10-04 19:02:25 cd train/\r\n 936 2025-10-04 19:02:26 ls\r\n 937 2025-10-04 19:02:28 ld\r\n 938 2025-10-04 19:02:29 ls\r\n 939 2025-10-04 19:02:30 cd ..\r\n 940 2025-10-04 19:02:30 ls\r\n 941 2025-10-04 19:02:33 cd ..\r\n 942 2025-10-04 19:02:33 ls\r\n 943 2025-10-04 19:02:36 cd dev/\r\n 944 2025-10-04 19:02:36 ls\r\n 945 2025-10-04 19:02:49 ls train/\r\n 946 2025-10-04 19:02:55 rm -rf *\r\n 947 2025-10-04 19:10:39 ls train/\r\n 948 2025-10-04 19:17:45 python\r\n 949 2025-10-04 19:19:59 ls\r\n 950 2025-10-04 19:20:03 rm -rf *\r\n 951 2025-10-04 19:22:28 queue\r\n 952 2025-10-04 19:22:38 idling\r\n 953 2025-10-04 19:23:45 ls\r\n 954 2025-10-04 19:23:47 rm -rf train/\r\n 955 2025-10-04 20:11:57 ls\r\n 956 2025-10-04 20:11:59 rm -rf *\r\n 957 2025-10-04 20:13:57 fqueue\r\n 958 2025-10-04 20:24:11 idling\r\n 959 2025-10-04 23:37:53 salloc --time=01:00:00 --partition=dev_accelerated --nodes=1 --gres=gpu:1 --cpus-per-task=8\r\n 960 2025-10-04 23:37:26 queue\r\n 961 2025-10-04 23:37:49 idling\r\n 962 2025-10-04 23:48:18 queue\r\n 963 2025-10-04 23:52:39 python\r\n 964 2025-10-04 18:07:30 queue\r\n 965 2025-10-04 18:07:39 logs\r\n 966 2025-10-04 18:07:40 ls\r\n 967 2025-10-04 18:07:43 cd preprocess/\r\n 968 2025-10-04 18:07:43 ls\r\n 969 2025-10-04 18:07:46 cd doom/\r\n 970 2025-10-04 18:07:47 ls\r\n 971 2025-10-04 18:07:52 queue\r\n 972 2025-10-04 18:07:56 cd ..\r\n 973 2025-10-04 18:07:58 cd coinrun/\r\n 974 2025-10-04 18:07:58 ls\r\n 975 2025-10-04 18:08:09 tail -f preprocess_coinrun_chunked_3541783.log\r\n 976 2025-10-04 18:10:32 cd $ws_dir\r\n 977 2025-10-04 18:10:33 ls\r\n 978 2025-10-04 18:10:36 cd data_doom/\r\n 979 2025-10-04 18:10:36 ls\r\n 980 2025-10-04 18:10:39 cd doom_episodes_50m/\r\n 981 2025-10-04 18:10:40 ls\r\n 982 2025-10-04 18:10:42 cd train/\r\n 983 2025-10-04 18:10:42 ls\r\n 984 2025-10-04 18:17:16 ls\r\n 985 2025-10-04 18:17:42 queue\r\n 986 2025-10-04 18:18:13 pwd\r\n 987 2025-10-04 18:18:15 cd ..\r\n 988 2025-10-04 18:18:15 pwd\r\n 989 2025-10-04 18:15:08 python\r\n 990 2025-10-05 13:28:22 source .venv/bin/activate\r\n 991 2025-10-05 13:31:51 sh slurm/jobs/mihir/horeka/doom/train_tokenizer_default.sh\r\n 992 2025-10-05 13:34:48 queue\r\n 993 2025-10-05 13:35:36 sh slurm/jobs/mihir/horeka/doom/train_tokenizer_default.sh\r\n 994 2025-10-05 13:45:26 queue\r\n 995 2025-10-05 15:52:45 . ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/.cursor-server/cli/servers/Stable-2f2737de9aa376933d975ae30290447c910fdf40/server/out/vs/workbench/contrib/terminal/common/scripts/shellIntegration-bash.sh""\r\n 996 2025-10-05 16:01:19 source\r\n 997 2025-10-05 16:01:23 source .venv/bin/activate\r\n 998 2025-10-05 16:01:50 sh slurm/jobs/mihir/horeka/doom/train_tokenizer_default_1gpu.sh\r\n 999 2025-10-05 16:06:23 sh slurm/jobs/mihir/horeka/doom/train_tokenizer_default_1gpu.sh\r\n 1000 2025-10-05 16:09:40 idling\r\n 1001 2025-10-05 16:54:19 source /home/hk-project-p0023960/tum_cte0515/Projects/jasmine/data/jasmine_data/ViZDoomPPO/.venv/bin/activate\r\n 1002 2025-10-05 16:54:22 scancel 3543638\r\n 1003 2025-10-05 16:54:26 deactivate\r\n 1004 2025-10-05 16:54:34 cd $ws_dir\r\n 1005 2025-10-05 16:54:34 ls\r\n 1006 2025-10-05 16:54:38 cd data_doom/\r\n 1007 2025-10-05 16:54:39 ls\r\n 1008 2025-10-05 16:54:44 cd doom_episodes_50m\r\n 1009 2025-10-05 16:54:53 ls -l train/ | wc -l\r\n 1010 2025-10-05 16:56:54 ls -l train/ | wc -l\r\n 1011 2025-10-05 16:56:59 cd ..\r\n 1012 2025-10-05 16:57:05 ls -l doom_episodes_50m/train/ | wc -l\r\n 1013 2025-10-05 16:57:13 ls -l doom_episodes_50m_low_res/train/ | wc -l\r\n 1014 2025-10-05 16:58:33 ls -l doom_episodes_10m_low_res/train/ | wc -l\r\n 1015 2025-10-05 16:58:37 ls -l doom_episodes_10m/train/ | wc -l\r\n 1016 2025-10-05 16:58:46 ls -l doom_episodes_1m/train/ | wc -l\r\n 1017 2025-10-05 17:01:51 history\r\n]0;tum_cte0515@hkn1991:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_doom",,terminal_output +443,477422,"TERMINAL",0,0,"1420133",,terminal_output +444,478404,"TERMINAL",0,0,"251244",,terminal_output +445,479412,"TERMINAL",0,0,"362355",,terminal_output +446,480459,"TERMINAL",0,0,"473466",,terminal_output +447,481479,"TERMINAL",0,0,"584577",,terminal_output +448,482631,"TERMINAL",0,0,"695688",,terminal_output +449,483553,"TERMINAL",0,0,"78:006799",,terminal_output +450,484869,"TERMINAL",0,0,"81783030",,terminal_output +451,485702,"TERMINAL",0,0,"928911",,terminal_output +452,486737,"TERMINAL",0,0,"2:00393022",,terminal_output +453,487834,"TERMINAL",0,0,"1430133",,terminal_output +454,488745,"TERMINAL",0,0,"251244",,terminal_output +455,489814,"TERMINAL",0,0,"362355",,terminal_output +456,490831,"TERMINAL",0,0,"484577",,terminal_output +457,491849,"TERMINAL",0,0,"695688",,terminal_output +458,492864,"TERMINAL",0,0,"7106799",,terminal_output +459,494016,"TERMINAL",0,0,"81784040",,terminal_output +460,494722,"TERMINAL",0,0,"history | grep sbatch",,terminal_command +461,494931,"TERMINAL",0,0,"928911",,terminal_output +462,495993,"TERMINAL",0,0,"10394022",,terminal_output +463,497311,"TERMINAL",0,0,"1440133",,terminal_output +464,498133,"TERMINAL",0,0,"251244",,terminal_output +465,499061,"TERMINAL",0,0,"362355",,terminal_output +466,500251,"TERMINAL",0,0,"473466",,terminal_output +467,501123,"TERMINAL",0,0,"584577",,terminal_output +468,502178,"TERMINAL",0,0,"695688",,terminal_output +469,503284,"TERMINAL",0,0,"7206799",,terminal_output +470,504325,"TERMINAL",0,0,"81785050",,terminal_output +471,505274,"TERMINAL",0,0,"928911",,terminal_output +472,506289,"TERMINAL",0,0,"20395022",,terminal_output +473,507328,"TERMINAL",0,0,"1450133",,terminal_output +474,508359,"TERMINAL",0,0,"251244",,terminal_output +475,509469,"TERMINAL",0,0,"362355",,terminal_output +476,510055,"TERMINAL",0,0,"watch",,terminal_focus +477,510492,"TERMINAL",0,0,"473466",,terminal_output +478,511509,"TERMINAL",0,0,"584577",,terminal_output +479,512489,"TERMINAL",0,0,"695688",,terminal_output +480,513521,"TERMINAL",0,0,"7306799",,terminal_output +481,514553,"TERMINAL",0,0,"81785:005:00",,terminal_output +482,515713,"TERMINAL",0,0,"928911",,terminal_output +483,516733,"TERMINAL",0,0,"30391:0022",,terminal_output +484,517757,"TERMINAL",0,0,"145:00133",,terminal_output +485,518784,"TERMINAL",0,0,"251244",,terminal_output +486,519812,"TERMINAL",0,0,"362355",,terminal_output +487,520931,"TERMINAL",0,0,"473466",,terminal_output +488,521794,"TERMINAL",0,0,"595688",,terminal_output +489,522824,"TERMINAL",0,0,"7406799",,terminal_output +490,523861,"TERMINAL",0,0,"81781010",,terminal_output +491,524897,"TERMINAL",0,0,"928911",,terminal_output +492,525531,"TERMINAL",0,0,"bash",,terminal_focus +493,525920,"TERMINAL",0,0,"40391022",,terminal_output +494,526970,"TERMINAL",0,0,"1410133",,terminal_output +495,528044,"TERMINAL",0,0,"251244",,terminal_output +496,529127,"TERMINAL",0,0,"362355",,terminal_output +497,530154,"TERMINAL",0,0,"473466",,terminal_output +498,531087,"TERMINAL",0,0,"584577",,terminal_output +499,532191,"TERMINAL",0,0,"695688",,terminal_output +500,533165,"TERMINAL",0,0,"7506799",,terminal_output +501,534250,"TERMINAL",0,0,"81782020",,terminal_output +502,535268,"TERMINAL",0,0,"928911",,terminal_output +503,536314,"TERMINAL",0,0,"50392022",,terminal_output +504,537295,"TERMINAL",0,0,"1420133",,terminal_output +505,538322,"TERMINAL",0,0,"251244",,terminal_output +506,538510,"TERMINAL",0,0,"watch",,terminal_focus +507,538512,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_50m_low_res.sh",0,0,"",shellscript,tab +508,539363,"TERMINAL",0,0,"362355",,terminal_output +509,540388,"TERMINAL",0,0,"473466",,terminal_output +510,541422,"TERMINAL",0,0,"584577",,terminal_output +511,542459,"TERMINAL",0,0,"695688",,terminal_output +512,543497,"TERMINAL",0,0,"79:006799",,terminal_output +513,544541,"TERMINAL",0,0,"81783030",,terminal_output +514,545570,"TERMINAL",0,0,"928911",,terminal_output +515,546485,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_50m_low_res.sh",705,0,"",shellscript,selection_mouse +516,546586,"TERMINAL",0,0,"3:00393022",,terminal_output +517,547286,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_50m_low_res.sh",706,0,"",shellscript,selection_command +518,547629,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_50m_low_res.sh",706,0,"_",shellscript,content +519,547631,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_50m_low_res.sh",707,0,"",shellscript,selection_keyboard +520,547632,"TERMINAL",0,0,"1430133",,terminal_output +521,548264,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_50m_low_res.sh",707,0,"b",shellscript,content +522,548265,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_50m_low_res.sh",708,0,"",shellscript,selection_keyboard +523,548465,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_50m_low_res.sh",708,0,"u",shellscript,content +524,548466,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_50m_low_res.sh",709,0,"",shellscript,selection_keyboard +525,548652,"TERMINAL",0,0,"251244",,terminal_output +526,548696,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_50m_low_res.sh",709,0,"g",shellscript,content +527,548697,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_50m_low_res.sh",710,0,"",shellscript,selection_keyboard +528,548952,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_50m_low_res.sh",710,0,"_",shellscript,content +529,548953,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_50m_low_res.sh",711,0,"",shellscript,selection_keyboard +530,549689,"TERMINAL",0,0,"362355",,terminal_output +531,550572,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_50m_low_res.sh",711,0,"f",shellscript,content +532,550574,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_50m_low_res.sh",712,0,"",shellscript,selection_keyboard +533,550714,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_50m_low_res.sh",712,0,"i",shellscript,content +534,550715,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_50m_low_res.sh",713,0,"",shellscript,selection_keyboard +535,550728,"TERMINAL",0,0,"473466",,terminal_output +536,550853,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_50m_low_res.sh",713,0,"x",shellscript,content +537,550854,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_50m_low_res.sh",714,0,"",shellscript,selection_keyboard +538,551209,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_50m_low_res.sh",713,1,"",shellscript,content +539,551346,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_50m_low_res.sh",712,1,"",shellscript,content +540,551442,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_50m_low_res.sh",711,1,"",shellscript,content +541,551587,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_50m_low_res.sh",710,1,"",shellscript,content +542,551715,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_50m_low_res.sh",709,1,"",shellscript,content +543,551755,"TERMINAL",0,0,"584577",,terminal_output +544,551867,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_50m_low_res.sh",708,1,"",shellscript,content +545,552007,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_50m_low_res.sh",707,1,"",shellscript,content +546,552493,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_50m_low_res.sh",707,0,"f",shellscript,content +547,552494,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_50m_low_res.sh",708,0,"",shellscript,selection_keyboard +548,552587,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_50m_low_res.sh",708,0,"i",shellscript,content +549,552588,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_50m_low_res.sh",709,0,"",shellscript,selection_keyboard +550,552731,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_50m_low_res.sh",709,0,"x",shellscript,content +551,552732,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_50m_low_res.sh",710,0,"",shellscript,selection_keyboard +552,552796,"TERMINAL",0,0,"6106799",,terminal_output +553,552927,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_50m_low_res.sh",710,0,"e",shellscript,content +554,552928,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_50m_low_res.sh",711,0,"",shellscript,selection_keyboard +555,553111,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_50m_low_res.sh",711,0,"d",shellscript,content +556,553112,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_50m_low_res.sh",712,0,"",shellscript,selection_keyboard +557,553871,"TERMINAL",0,0,"81784040",,terminal_output +558,554840,"TERMINAL",0,0,"928911",,terminal_output +559,555875,"TERMINAL",0,0,"10394022",,terminal_output +560,556918,"TERMINAL",0,0,"1440133",,terminal_output +561,557810,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_10m_low_res.sh",0,0,"",shellscript,tab +562,557988,"TERMINAL",0,0,"251244",,terminal_output +563,558973,"TERMINAL",0,0,"362355",,terminal_output +564,559016,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_10m_low_res.sh",706,0,"",shellscript,selection_mouse +565,559707,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_10m_low_res.sh",706,0,"_",shellscript,content +566,559708,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_10m_low_res.sh",707,0,"",shellscript,selection_keyboard +567,559909,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_10m_low_res.sh",707,0,"f",shellscript,content +568,559910,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_10m_low_res.sh",708,0,"",shellscript,selection_keyboard +569,560024,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_10m_low_res.sh",708,0,"i",shellscript,content +570,560024,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_10m_low_res.sh",709,0,"",shellscript,selection_keyboard +571,560047,"TERMINAL",0,0,"473466",,terminal_output +572,560154,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_10m_low_res.sh",709,0,"x",shellscript,content +573,560155,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_10m_low_res.sh",710,0,"",shellscript,selection_keyboard +574,560374,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_10m_low_res.sh",710,0,"e",shellscript,content +575,560375,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_10m_low_res.sh",711,0,"",shellscript,selection_keyboard +576,560484,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_10m_low_res.sh",711,0,"d",shellscript,content +577,560485,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_10m_low_res.sh",712,0,"",shellscript,selection_keyboard +578,561041,"TERMINAL",0,0,"584577",,terminal_output +579,562074,"TERMINAL",0,0,"695688",,terminal_output +580,562341,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_1m_low_res.sh",0,0,"",shellscript,tab +581,563110,"TERMINAL",0,0,"7206799",,terminal_output +582,563382,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_1m_low_res.sh",704,0,"",shellscript,selection_mouse +583,563977,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_1m_low_res.sh",704,0,"_",shellscript,content +584,563978,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_1m_low_res.sh",705,0,"",shellscript,selection_keyboard +585,564142,"TERMINAL",0,0,"81785050",,terminal_output +586,564417,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_1m_low_res.sh",705,0,"f",shellscript,content +587,564418,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_1m_low_res.sh",706,0,"",shellscript,selection_keyboard +588,564527,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_1m_low_res.sh",706,0,"i",shellscript,content +589,564528,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_1m_low_res.sh",707,0,"",shellscript,selection_keyboard +590,564827,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_1m_low_res.sh",707,0,"x",shellscript,content +591,564827,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_1m_low_res.sh",708,0,"",shellscript,selection_keyboard +592,565075,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_1m_low_res.sh",708,0,"e",shellscript,content +593,565076,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_1m_low_res.sh",709,0,"",shellscript,selection_keyboard +594,565178,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_1m_low_res.sh",709,0,"d",shellscript,content +595,565178,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_1m_low_res.sh",710,0,"",shellscript,selection_keyboard +596,565220,"TERMINAL",0,0,"928911",,terminal_output +597,566258,"TERMINAL",0,0,"20395022",,terminal_output +598,567314,"TERMINAL",0,0,"1450133",,terminal_output +599,568326,"TERMINAL",0,0,"251244",,terminal_output +600,569312,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_10m_low_res.sh",0,0,"",shellscript,tab +601,569353,"TERMINAL",0,0,"362355",,terminal_output +602,569654,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_1m_low_res.sh",0,0,"",shellscript,tab +603,569976,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_50m_low_res.sh",0,0,"",shellscript,tab +604,570343,"TERMINAL",0,0,"473466",,terminal_output +605,571085,"jasmine/train_tokenizer.py",0,0,"import os\n\nos.environ.setdefault(""XLA_PYTHON_CLIENT_MEM_FRACTION"", ""0.90"")\n\nfrom dataclasses import dataclass, field\nfrom typing import cast, Optional\n\nimport einops\nimport itertools\nfrom jax.sharding import Mesh, PartitionSpec, NamedSharding\nfrom jax.experimental.mesh_utils import create_device_mesh\nimport optax\nimport orbax.checkpoint as ocp\nimport numpy as np\nimport dm_pix as pix\nimport jax\nimport jax.numpy as jnp\nimport tyro\nimport wandb\nimport grain\nimport flax.nnx as nnx\n\nfrom models.tokenizer import TokenizerVQVAE\nfrom utils.dataloader import get_dataloader\nfrom utils.train_utils import (\n get_lr_schedule,\n count_parameters_by_component,\n print_mem_stats,\n print_compiled_memory_stats,\n print_compiled_cost_analysis,\n)\n\n\n@dataclass\nclass Args:\n # Experiment\n num_steps: int = 300_000\n seed: int = 0\n seq_len: int = 16\n image_channels: int = 3\n image_height: int = 90\n image_width: int = 160\n data_dir: str = """"\n save_ckpt: bool = False\n restore_ckpt: bool = False\n # Optimization\n vq_beta: float = 0.25\n batch_size: int = 48\n init_lr: float = 0.0\n max_lr: float = 3e-4\n decay_end: float = 0.0\n wsd_decay_steps: int = (\n 20000 # NOTE: wsd_decay_steps will only be used when using a wsd-schedule\n )\n lr_schedule: str = ""wsd"" # supported options: wsd, cos\n warmup_steps: int = 10000\n # Tokenizer\n model_dim: int = 512\n ffn_dim: int = 2048\n latent_dim: int = 32\n num_latents: int = 1024\n patch_size: int = 4\n num_blocks: int = 4\n num_heads: int = 8\n dropout: float = 0.0\n codebook_dropout: float = 0.01\n param_dtype = jnp.float32\n dtype = jnp.bfloat16\n # Logging\n log: bool = False\n entity: str = """"\n project: str = """"\n name: str = ""train_tokenizer""\n tags: list[str] = field(default_factory=lambda: [""tokenizer""])\n log_interval: int = 5\n log_image_interval: int = 250\n ckpt_dir: str = """"\n log_checkpoint_interval: int = 10000\n log_checkpoint_keep_period: int = 20000\n log_gradients: bool = False\n val_data_dir: str = """"\n val_interval: int = 20_000\n val_steps: int = 50\n wandb_id: str = """"\n use_flash_attention: bool = True\n\n\ndef build_model(args: Args, rng: jax.Array) -> tuple[TokenizerVQVAE, jax.Array]:\n rng, _rng = jax.random.split(rng)\n rngs = nnx.Rngs(_rng)\n return (\n TokenizerVQVAE(\n in_dim=args.image_channels,\n model_dim=args.model_dim,\n ffn_dim=args.ffn_dim,\n latent_dim=args.latent_dim,\n num_latents=args.num_latents,\n patch_size=args.patch_size,\n num_blocks=args.num_blocks,\n num_heads=args.num_heads,\n dropout=args.dropout,\n codebook_dropout=args.codebook_dropout,\n param_dtype=args.param_dtype,\n dtype=args.dtype,\n use_flash_attention=args.use_flash_attention,\n rngs=rngs,\n ),\n rng,\n )\n\n\ndef build_optimizer(model: TokenizerVQVAE, args: Args) -> nnx.ModelAndOptimizer:\n lr_schedule = get_lr_schedule(\n args.lr_schedule,\n args.init_lr,\n args.max_lr,\n args.decay_end,\n args.num_steps,\n args.warmup_steps,\n args.wsd_decay_steps,\n )\n tx = optax.adamw(\n learning_rate=lr_schedule,\n b1=0.9,\n b2=0.9,\n weight_decay=1e-4,\n mu_dtype=args.param_dtype, # moments in full precision\n )\n optimizer = nnx.ModelAndOptimizer(model, tx)\n return optimizer\n\n\ndef build_mesh_and_sharding(\n num_devices: int,\n) -> tuple[Mesh, NamedSharding, NamedSharding]:\n device_mesh_arr = create_device_mesh((num_devices,))\n mesh = Mesh(devices=device_mesh_arr, axis_names=(""data"",))\n replicated_sharding = NamedSharding(mesh, PartitionSpec())\n videos_sharding = NamedSharding(mesh, PartitionSpec(""data"", None, None, None, None))\n return mesh, replicated_sharding, videos_sharding\n\n\ndef shard_optimizer_states(\n optimizer: nnx.ModelAndOptimizer, replicated_sharding: NamedSharding\n) -> None:\n model_state = nnx.state(optimizer.model)\n model_sharded_state = jax.lax.with_sharding_constraint(\n model_state, replicated_sharding\n )\n nnx.update(optimizer.model, model_sharded_state)\n optimizer_state = nnx.state(optimizer, nnx.optimizer.OptState)\n optimizer_sharded_state = jax.lax.with_sharding_constraint(\n optimizer_state, replicated_sharding\n )\n nnx.update(optimizer, optimizer_sharded_state)\n\n\ndef build_dataloader(args: Args, data_dir: str) -> grain.DataLoaderIterator:\n image_shape = (args.image_height, args.image_width, args.image_channels)\n array_record_files = [\n os.path.join(data_dir, x)\n for x in os.listdir(data_dir)\n if x.endswith("".array_record"")\n ]\n grain_dataloader = get_dataloader(\n array_record_files,\n args.seq_len,\n # NOTE: We deliberately pass the global batch size\n # The dataloader shards the dataset across all processes\n args.batch_size,\n *image_shape,\n num_workers=8,\n prefetch_buffer_size=1,\n seed=args.seed,\n )\n initial_state = grain_dataloader._create_initial_state()\n grain_iterator = grain.DataLoaderIterator(grain_dataloader, initial_state)\n return grain_iterator\n\n\ndef build_checkpoint_manager(args: Args) -> Optional[ocp.CheckpointManager]:\n if args.restore_ckpt or args.save_ckpt:\n handler_registry = ocp.handlers.DefaultCheckpointHandlerRegistry()\n handler_registry.add(\n ""model_state"", ocp.args.PyTreeSave, ocp.handlers.PyTreeCheckpointHandler\n )\n handler_registry.add(\n ""model_state"", ocp.args.PyTreeRestore, ocp.handlers.PyTreeCheckpointHandler\n )\n handler_registry.add(\n ""train_dataloader_state"",\n grain.checkpoint.CheckpointSave,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n handler_registry.add(\n ""train_dataloader_state"",\n grain.checkpoint.CheckpointRestore,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n if args.val_data_dir:\n handler_registry.add(\n ""val_dataloader_state"",\n grain.checkpoint.CheckpointSave,\n cast(\n ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler\n ),\n )\n handler_registry.add(\n ""val_dataloader_state"",\n grain.checkpoint.CheckpointRestore,\n cast(\n ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler\n ),\n )\n checkpoint_options = ocp.CheckpointManagerOptions(\n save_interval_steps=args.log_checkpoint_interval,\n max_to_keep=3,\n keep_period=args.log_checkpoint_keep_period,\n step_format_fixed_length=6,\n cleanup_tmp_directories=True,\n )\n checkpoint_manager = ocp.CheckpointManager(\n args.ckpt_dir,\n options=checkpoint_options,\n handler_registry=handler_registry,\n )\n return checkpoint_manager\n else:\n return None\n\n\ndef restore_checkpoint_if_needed(\n args: Args,\n checkpoint_manager: Optional[ocp.CheckpointManager],\n optimizer: nnx.ModelAndOptimizer,\n train_iterator: grain.DataLoaderIterator,\n val_iterator: Optional[grain.DataLoaderIterator],\n restore_step: Optional[int] = None,\n) -> tuple[\n int, nnx.ModelAndOptimizer, grain.DataLoaderIterator, grain.DataLoaderIterator\n]:\n step = 0\n if checkpoint_manager and restore_step is None:\n restore_step = checkpoint_manager.latest_step()\n if args.restore_ckpt:\n assert checkpoint_manager is not None\n abstract_optimizer = nnx.eval_shape(lambda: optimizer)\n abstract_optimizer_state = nnx.state(abstract_optimizer)\n if val_iterator:\n restore_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore(abstract_optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointRestore(train_iterator), # type: ignore\n val_dataloader_state=grain.checkpoint.CheckpointRestore(val_iterator), # type: ignore\n )\n else:\n restore_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore(abstract_optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointRestore(train_iterator), # type: ignore\n )\n restored = checkpoint_manager.restore(restore_step, args=restore_args)\n restored_optimizer_state = restored[""model_state""]\n nnx.update(optimizer, restored_optimizer_state)\n train_iterator = restored[""train_dataloader_state""]\n if val_iterator:\n val_iterator = restored[""val_dataloader_state""]\n step = restore_step or 0\n print(f""Restored dataloader and model state from step {step}"")\n return step, optimizer, train_iterator, val_iterator\n\n\ndef main(args: Args) -> None:\n jax.distributed.initialize()\n num_devices = jax.device_count()\n if num_devices == 0:\n raise ValueError(""No JAX devices found."")\n print(f""Running on {num_devices} devices."")\n\n if args.batch_size % num_devices != 0:\n raise ValueError(\n f""Global batch size {args.batch_size} must be divisible by ""\n f""number of devices {num_devices}.""\n )\n\n rng = jax.random.key(args.seed)\n\n # --- Initialize model ---\n tokenizer, rng = build_model(args, rng)\n\n _, params, _ = nnx.split(tokenizer, nnx.Param, ...)\n param_counts = count_parameters_by_component(params)\n\n if args.log and jax.process_index() == 0:\n wandb_init_kwargs = {\n ""entity"": args.entity,\n ""project"": args.project,\n ""name"": args.name,\n ""tags"": args.tags,\n ""group"": ""debug"",\n ""config"": args,\n }\n\n if args.wandb_id:\n wandb_init_kwargs.update(\n {\n ""id"": args.wandb_id,\n ""resume"": ""allow"",\n }\n )\n wandb.init(**wandb_init_kwargs)\n\n wandb.config.update({""model_param_count"": param_counts})\n\n print(""Parameter counts:"")\n print(param_counts)\n\n # --- Initialize optimizer ---\n optimizer = build_optimizer(tokenizer, args)\n del tokenizer\n\n # FIXME: switch to create_hybrid_device_mesh for runs spanning multiple nodes\n _, replicated_sharding, videos_sharding = build_mesh_and_sharding(num_devices)\n\n shard_optimizer_states(optimizer, replicated_sharding)\n\n # --- Initialize checkpoint manager ---\n checkpoint_manager = build_checkpoint_manager(args)\n\n # --- Create DataLoaderIterator from dataloader ---\n train_iterator = build_dataloader(args, args.data_dir)\n val_iterator = None\n if args.val_data_dir:\n val_iterator = build_dataloader(args, args.val_data_dir)\n\n # --- Restore checkpoint ---\n step, optimizer, train_iterator, val_iterator = restore_checkpoint_if_needed(\n args, checkpoint_manager, optimizer, train_iterator, val_iterator\n )\n\n # --- Define loss and train step (close over args) ---\n def tokenizer_loss_fn(\n model: TokenizerVQVAE, inputs: dict, training: bool = False\n ) -> tuple[jax.Array, tuple[jax.Array, dict]]:\n gt = jnp.asarray(inputs[""videos""], dtype=jnp.float32) / 255.0\n inputs[""videos""] = gt.astype(args.dtype)\n outputs = model(inputs, training=training)\n outputs[""recon""] = outputs[""recon""].astype(jnp.float32)\n mse = jnp.square(gt - outputs[""recon""]).mean()\n q_loss = jnp.square(jax.lax.stop_gradient(outputs[""emb""]) - outputs[""z""]).mean()\n commitment_loss = jnp.square(\n outputs[""emb""] - jax.lax.stop_gradient(outputs[""z""])\n ).mean()\n loss = mse + q_loss + args.vq_beta * commitment_loss\n\n gt_clipped = gt.clip(0, 1).reshape(-1, *gt.shape[2:])\n recon = outputs[""recon""].clip(0, 1).reshape(-1, *outputs[""recon""].shape[2:])\n psnr = jnp.asarray(pix.psnr(gt_clipped, recon)).mean()\n ssim = jnp.asarray(pix.ssim(gt_clipped, recon)).mean()\n _, index_counts = jnp.unique_counts(\n jnp.ravel(outputs[""indices""]), size=args.num_latents, fill_value=0\n )\n codebook_usage = (index_counts != 0).mean()\n metrics = dict(\n loss=loss,\n mse=mse,\n q_loss=q_loss,\n commitment_loss=commitment_loss,\n psnr=psnr,\n ssim=ssim,\n codebook_usage=codebook_usage,\n )\n return loss, (outputs[""recon""], metrics)\n\n @nnx.jit(donate_argnums=0)\n def train_step(\n optimizer: nnx.ModelAndOptimizer, inputs: dict\n ) -> tuple[jax.Array, jax.Array, dict]:\n def loss_fn(model: TokenizerVQVAE) -> tuple[jax.Array, tuple[jax.Array, dict]]:\n model.train()\n return tokenizer_loss_fn(model, inputs, training=True)\n\n (loss, (recon, metrics)), grads = nnx.value_and_grad(loss_fn, has_aux=True)(\n optimizer.model\n )\n optimizer.update(grads)\n if args.log_gradients:\n metrics[""encoder_gradients_std/""] = jax.tree.map(\n lambda x: x.std(), grads[""params""][""encoder""]\n )\n metrics[""vq_gradients_std/""] = jax.tree.map(\n lambda x: x.std(), grads[""params""][""vq""]\n )\n metrics[""decoder_gradients_std/""] = jax.tree.map(\n lambda x: x.std(), grads[""params""][""decoder""]\n )\n return loss, recon, metrics\n\n @nnx.jit\n def val_step(\n tokenizer: TokenizerVQVAE, inputs: dict\n ) -> tuple[jax.Array, jax.Array, dict]:\n tokenizer.eval()\n (loss, (recon, metrics)) = tokenizer_loss_fn(tokenizer, inputs, training=False)\n return loss, recon, metrics\n\n def calculate_validation_metrics(val_dataloader, tokenizer):\n step = 0\n loss_per_step = []\n metrics_per_step = []\n batch = None\n recon = None\n for batch in val_dataloader:\n loss, recon, metrics = val_step(tokenizer, batch)\n loss_per_step.append(loss)\n metrics_per_step.append(metrics)\n step += 1\n if step > args.val_steps:\n break\n\n if step < args.val_steps:\n print(\n f""Warning: Your validation dataset is too small to make val_steps many steps. Made {step} steps, expected {args.val_steps}""\n )\n\n val_loss = np.mean(loss_per_step)\n val_metrics = {\n f""val_{key}"": np.mean([float(m[key]) for m in metrics_per_step])\n for key in metrics_per_step[0].keys()\n }\n val_metrics[""val_loss""] = val_loss\n return val_metrics, batch, recon\n\n # --- TRAIN LOOP ---\n dataloader_train = (\n {\n ""videos"": jax.make_array_from_process_local_data(\n videos_sharding, elem[""videos""]\n ),\n }\n for elem in train_iterator\n )\n dataloader_val = None\n if val_iterator:\n dataloader_val = (\n {\n ""videos"": jax.make_array_from_process_local_data(\n videos_sharding, elem[""videos""]\n ),\n }\n for elem in val_iterator\n )\n if jax.process_index() == 0:\n first_batch = next(dataloader_train)\n compiled = train_step.lower(optimizer, first_batch).compile()\n print_compiled_memory_stats(compiled.memory_analysis())\n print_compiled_cost_analysis(compiled.cost_analysis())\n # Do not skip the first batch during training\n dataloader_train = itertools.chain([first_batch], dataloader_train)\n print(f""Starting training from step {step}..."")\n first_step = step\n while step < args.num_steps:\n for batch in dataloader_train:\n # --- Train step ---\n loss, recon, metrics = train_step(optimizer, batch)\n if step == first_step:\n print_mem_stats(""After params initialized"")\n step += 1\n\n # --- Validation loss ---\n val_results = {}\n if dataloader_val and step % args.val_interval == 0:\n print(""Calculating validation metrics..."")\n val_metrics, val_gt_batch, val_recon = calculate_validation_metrics(\n dataloader_val, optimizer.model\n )\n print(f""Step {step}, validation loss: {val_metrics['val_loss']}"")\n val_results = {\n ""metrics"": val_metrics,\n ""gt_batch"": val_gt_batch,\n ""recon"": val_recon,\n }\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {""loss"": loss, ""step"": step, **metrics}\n if val_results:\n log_dict.update(val_results[""metrics""])\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if val_results and step % args.val_interval == 0:\n val_results[""gt_seq_val""] = (\n val_results[""gt_batch""][""videos""][0].astype(jnp.float32)\n / 255.0\n )\n val_results[""recon_seq_val""] = val_results[""recon""][0].clip(\n 0, 1\n )\n val_results[""val_comparison_seq""] = jnp.concatenate(\n (val_results[""gt_seq_val""], val_results[""recon_seq_val""]),\n axis=1,\n )\n val_results[""val_comparison_seq""] = einops.rearrange(\n val_results[""val_comparison_seq""] * 255,\n ""t h w c -> h (t w) c"",\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if val_results and step % args.val_interval == 0:\n log_images.update(\n dict(\n val_image=wandb.Image(\n np.asarray(val_results[""gt_seq_val""][0])\n ),\n val_recon=wandb.Image(\n np.asarray(val_results[""recon_seq_val""][0])\n ),\n val_true_vs_recon=wandb.Image(\n np.asarray(\n val_results[""val_comparison_seq""].astype(\n np.uint8\n )\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break\n\n if checkpoint_manager:\n checkpoint_manager.close()\n\n\nif __name__ == ""__main__"":\n args = tyro.cli(Args)\n main(args)\n",python,tab +606,571931,"TERMINAL",0,0,"584577",,terminal_output +607,572440,"TERMINAL",0,0,"695688",,terminal_output +608,573069,"TERMINAL",0,0,"",,terminal_focus +609,573485,"TERMINAL",0,0,"7306799",,terminal_output +610,574496,"TERMINAL",0,0,"81786:006:00",,terminal_output +611,575514,"TERMINAL",0,0,"928911",,terminal_output +612,576753,"TERMINAL",0,0,"30392:0022",,terminal_output +613,577672,"TERMINAL",0,0,"146:00133",,terminal_output +614,578027,"TERMINAL",0,0,"source /home/hk-project-p0023960/tum_cte0515/Projects/jasmine/data/jasmine_data/ViZDoomPPO/.venv/bin/activate",,terminal_command +615,578639,"TERMINAL",0,0,"251244",,terminal_output +616,579644,"TERMINAL",0,0,"362355",,terminal_output +617,580698,"TERMINAL",0,0,"473466",,terminal_output +618,581712,"TERMINAL",0,0,"584577",,terminal_output +619,582912,"TERMINAL",0,0,"695688",,terminal_output +620,583830,"TERMINAL",0,0,"741781010",,terminal_output +621,584879,"TERMINAL",0,0,"928911",,terminal_output +622,585846,"TERMINAL",0,0,"40391022",,terminal_output +623,585948,"TERMINAL",0,0,"salloc --time=01:00:00 --partition=dev_accelerated --nodes=1 --gres=gpu:1 --cpus-per-task=8",,terminal_command +624,586012,"TERMINAL",0,0,"]633;Csalloc: Granted job allocation 3543652\r\n",,terminal_output +625,586162,"TERMINAL",0,0,"salloc: Waiting for resource configuration\r\n",,terminal_output +626,586879,"TERMINAL",0,0,"14101333543652 dev_acceleratedinteractive tum_cte0 RUNNING\t 0:01 1:00:001 hkn0401",,terminal_output +627,587913,"TERMINAL",0,0,"2512442",,terminal_output +628,588962,"TERMINAL",0,0,"3623553",,terminal_output +629,589979,"TERMINAL",0,0,"4734664",,terminal_output +630,591112,"TERMINAL",0,0,"5845775",,terminal_output +631,592049,"TERMINAL",0,0,"6956886",,terminal_output +632,593080,"TERMINAL",0,0,"75067997",,terminal_output +633,594095,"TERMINAL",0,0,"817820208",,terminal_output +634,595194,"TERMINAL",0,0,"9289119",,terminal_output +635,596145,"TERMINAL",0,0,"5039202210",,terminal_output +636,597178,"TERMINAL",0,0,"14201331",,terminal_output +637,598229,"TERMINAL",0,0,"2512442",,terminal_output +638,599279,"TERMINAL",0,0,"3623553",,terminal_output +639,600280,"TERMINAL",0,0,"4734664",,terminal_output +640,601347,"TERMINAL",0,0,"5845775",,terminal_output +641,602345,"TERMINAL",0,0,"6956886",,terminal_output +642,603395,"TERMINAL",0,0,"720:0067997",,terminal_output +643,604495,"TERMINAL",0,0,"817830308",,terminal_output +644,605447,"TERMINAL",0,0,"9289119",,terminal_output +645,606545,"TERMINAL",0,0,"4:0039302220",,terminal_output +646,607512,"TERMINAL",0,0,"14301331",,terminal_output +647,608545,"TERMINAL",0,0,"2512442",,terminal_output +648,609628,"TERMINAL",0,0,"3623553",,terminal_output +649,610654,"TERMINAL",0,0,"4734664",,terminal_output +650,611645,"TERMINAL",0,0,"5845775",,terminal_output +651,612797,"TERMINAL",0,0,"6956886",,terminal_output +652,613180,"TERMINAL",0,0,"salloc: Nodes hkn0401 are ready for job\r\n",,terminal_output +653,613879,"TERMINAL",0,0,"71067997",,terminal_output +654,614102,"TERMINAL",0,0,"]0;tum_cte0515@hkn0401:~/Projects/jasmine[?2004h[tum_cte0515@hkn0401 jasmine]$ ",,terminal_output +655,614745,"TERMINAL",0,0,"817840408",,terminal_output +656,615895,"TERMINAL",0,0,"939402230",,terminal_output +657,616814,"TERMINAL",0,0,"114401331",,terminal_output +658,617846,"TERMINAL",0,0,"2512442",,terminal_output +659,618897,"TERMINAL",0,0,"3623553",,terminal_output +660,619897,"TERMINAL",0,0,"4734664",,terminal_output +661,620980,"TERMINAL",0,0,"5845775",,terminal_output +662,621979,"TERMINAL",0,0,"6956886",,terminal_output +663,623002,"TERMINAL",0,0,"72067997",,terminal_output +664,624029,"TERMINAL",0,0,"817850508",,terminal_output +665,625062,"TERMINAL",0,0,"9289119",,terminal_output +666,626095,"TERMINAL",0,0,"2039502240",,terminal_output +667,627125,"TERMINAL",0,0,"14501331",,terminal_output +668,628180,"TERMINAL",0,0,"2512442",,terminal_output +669,629196,"TERMINAL",0,0,"3623553",,terminal_output +670,630229,"TERMINAL",0,0,"4734664",,terminal_output +671,631312,"TERMINAL",0,0,"5845775",,terminal_output +672,632346,"TERMINAL",0,0,"6956886",,terminal_output +673,633380,"TERMINAL",0,0,"73067997",,terminal_output +674,634362,"TERMINAL",0,0,"81787:007:008",,terminal_output +675,635430,"TERMINAL",0,0,"9289119",,terminal_output +676,636446,"TERMINAL",0,0,"30393:002250",,terminal_output +677,637445,"TERMINAL",0,0,"147:001331",,terminal_output +678,638496,"TERMINAL",0,0,"2512442",,terminal_output +679,639545,"TERMINAL",0,0,"3623553",,terminal_output +680,640562,"TERMINAL",0,0,"4734664",,terminal_output +681,641661,"TERMINAL",0,0,"5845775",,terminal_output +682,642695,"TERMINAL",0,0,"6956886",,terminal_output +683,643829,"TERMINAL",0,0,"74067997",,terminal_output +684,644737,"TERMINAL",0,0,"817810108",,terminal_output +685,645829,"TERMINAL",0,0,"9289119",,terminal_output +686,646962,"TERMINAL",0,0,"403910221:00",,terminal_output +687,647814,"TERMINAL",0,0,"15112442",,terminal_output +688,648831,"TERMINAL",0,0,"3623553",,terminal_output +689,649962,"TERMINAL",0,0,"4734664",,terminal_output +690,650895,"TERMINAL",0,0,"5845775",,terminal_output +691,651911,"TERMINAL",0,0,"6956886",,terminal_output +692,652963,"TERMINAL",0,0,"75067997",,terminal_output +693,654002,"TERMINAL",0,0,"817820208",,terminal_output +694,655029,"TERMINAL",0,0,"9289119",,terminal_output +695,656046,"TERMINAL",0,0,"5039202210",,terminal_output +696,657095,"TERMINAL",0,0,"14201331",,terminal_output +697,658114,"TERMINAL",0,0,"2512442",,terminal_output +698,659182,"TERMINAL",0,0,"3623553",,terminal_output +699,660245,"TERMINAL",0,0,"4734664",,terminal_output +700,661212,"TERMINAL",0,0,"5845775",,terminal_output +701,662278,"TERMINAL",0,0,"6956886",,terminal_output +702,663332,"TERMINAL",0,0,"71:0067997",,terminal_output +703,664349,"TERMINAL",0,0,"817830308",,terminal_output +704,665345,"TERMINAL",0,0,"9289119",,terminal_output +705,666546,"TERMINAL",0,0,"5:0039302220",,terminal_output +706,667412,"TERMINAL",0,0,"14301331",,terminal_output +707,668445,"TERMINAL",0,0,"2512442",,terminal_output +708,669478,"TERMINAL",0,0,"3623553",,terminal_output +709,670545,"TERMINAL",0,0,"4734664",,terminal_output +710,671545,"TERMINAL",0,0,"5845775",,terminal_output +711,672695,"TERMINAL",0,0,"6956886",,terminal_output +712,673612,"TERMINAL",0,0,"71067997",,terminal_output +713,674648,"TERMINAL",0,0,"817840408",,terminal_output +714,675778,"TERMINAL",0,0,"9289119",,terminal_output +715,676712,"TERMINAL",0,0,"1039402230",,terminal_output +716,677864,"TERMINAL",0,0,"14401331",,terminal_output +717,678945,"TERMINAL",0,0,"2512442",,terminal_output +718,679863,"TERMINAL",0,0,"3734664",,terminal_output +719,680996,"TERMINAL",0,0,"5845775",,terminal_output +720,681911,"TERMINAL",0,0,"6956886",,terminal_output +721,682946,"TERMINAL",0,0,"72067997",,terminal_output +722,683945,"TERMINAL",0,0,"817850508",,terminal_output +723,685080,"TERMINAL",0,0,"9289119",,terminal_output +724,685995,"TERMINAL",0,0,"2039502240",,terminal_output +725,687028,"TERMINAL",0,0,"14501331",,terminal_output +726,688062,"TERMINAL",0,0,"2512442",,terminal_output +727,689095,"TERMINAL",0,0,"3623553",,terminal_output +728,690130,"TERMINAL",0,0,"4734664",,terminal_output +729,691162,"TERMINAL",0,0,"5845775",,terminal_output +730,692262,"TERMINAL",0,0,"6956886",,terminal_output +731,693380,"TERMINAL",0,0,"73067997",,terminal_output +732,694262,"TERMINAL",0,0,"81788:008:008",,terminal_output +733,695429,"TERMINAL",0,0,"9289119",,terminal_output +734,696445,"TERMINAL",0,0,"30394:002250",,terminal_output +735,697386,"TERMINAL",0,0,"148:001331",,terminal_output +736,698428,"TERMINAL",0,0,"2512442",,terminal_output +737,698995,"TERMINAL",0,0,"s",,terminal_output +738,699229,"TERMINAL",0,0,"ou",,terminal_output +739,699329,"TERMINAL",0,0,"r",,terminal_output +740,699478,"TERMINAL",0,0,"c",,terminal_output +741,699480,"TERMINAL",0,0,"3623553",,terminal_output +742,699612,"TERMINAL",0,0,"e",,terminal_output +743,699728,"TERMINAL",0,0," ",,terminal_output +744,699812,"TERMINAL",0,0,".",,terminal_output +745,699895,"TERMINAL",0,0,"v",,terminal_output +746,700013,"TERMINAL",0,0,"env/",,terminal_output +747,700212,"TERMINAL",0,0,"b",,terminal_output +748,700362,"TERMINAL",0,0,"in/",,terminal_output +749,700612,"TERMINAL",0,0,"4734664",,terminal_output +750,700613,"TERMINAL",0,0,"a",,terminal_output +751,700795,"TERMINAL",0,0,"ctivate",,terminal_output +752,701198,"TERMINAL",0,0,"\r\n[?2004l\r]0;tum_cte0515@hkn0401:~/Projects/jasmine[?2004h(jasmine) [tum_cte0515@hkn0401 jasmine]$ ",,terminal_output +753,701545,"TERMINAL",0,0,"5845775",,terminal_output +754,702562,"TERMINAL",0,0,"6956886",,terminal_output +755,703595,"TERMINAL",0,0,"74067997",,terminal_output +756,704645,"TERMINAL",0,0,"817810108",,terminal_output +757,705729,"TERMINAL",0,0,"9289119",,terminal_output +758,706730,"TERMINAL",0,0,"403910222:00",,terminal_output +759,707928,"TERMINAL",0,0,"14101331",,terminal_output +760,708863,"TERMINAL",0,0,"2512442",,terminal_output +761,709979,"TERMINAL",0,0,"3734664",,terminal_output +762,710913,"TERMINAL",0,0,"5845775",,terminal_output +763,711550,"jasmine/train_tokenizer.py",0,0,"",python,tab +764,711912,"TERMINAL",0,0,"6956886",,terminal_output +765,713095,"TERMINAL",0,0,"75067997",,terminal_output +766,713963,"TERMINAL",0,0,"817820208",,terminal_output +767,714612,"TERMINAL",0,0,"c",,terminal_output +768,714919,"TERMINAL",0,0," ",,terminal_output +769,714963,"TERMINAL",0,0,"9289119",,terminal_output +770,715162,"TERMINAL",0,0,"",,terminal_output +771,715278,"TERMINAL",0,0,"",,terminal_output +772,715745,"TERMINAL",0,0,"d",,terminal_output +773,716013,"TERMINAL",0,0,"e",,terminal_output +774,716047,"TERMINAL",0,0,"5039202210",,terminal_output +775,716195,"TERMINAL",0,0,"ac",,terminal_output +776,716278,"TERMINAL",0,0,"t",,terminal_output +777,716464,"TERMINAL",0,0,"i",,terminal_output +778,716712,"TERMINAL",0,0,"v",,terminal_output +779,716828,"TERMINAL",0,0,"a",,terminal_output +780,716963,"TERMINAL",0,0,"t",,terminal_output +781,717062,"TERMINAL",0,0,"e",,terminal_output +782,717095,"TERMINAL",0,0,"14201331",,terminal_output +783,717162,"TERMINAL",0,0," ",,terminal_output +784,717462,"TERMINAL",0,0,"\r\n[?2004l\r]0;tum_cte0515@hkn0401:~/Projects/jasmine[?2004h[tum_cte0515@hkn0401 jasmine]$ ",,terminal_output +785,717829,"TERMINAL",0,0,"c",,terminal_output +786,717978,"TERMINAL",0,0,"d",,terminal_output +787,718145,"TERMINAL",0,0," ",,terminal_output +788,718145,"TERMINAL",0,0,"2512442",,terminal_output +789,718295,"TERMINAL",0,0,"d",,terminal_output +790,718397,"TERMINAL",0,0,"a",,terminal_output +791,718580,"TERMINAL",0,0,"ta/",,terminal_output +792,718868,"TERMINAL",0,0,"\r\n[?2004l\r]0;tum_cte0515@hkn0401:~/Projects/jasmine/data[?2004h[tum_cte0515@hkn0401 data]$ ",,terminal_output +793,719133,"TERMINAL",0,0,"3623553",,terminal_output +794,719163,"TERMINAL",0,0,"so",,terminal_output +795,719228,"TERMINAL",0,0,"u",,terminal_output +796,719313,"TERMINAL",0,0,"r",,terminal_output +797,719512,"TERMINAL",0,0,"ce",,terminal_output +798,720079,"TERMINAL",0,0," ",,terminal_output +799,720208,"TERMINAL",0,0,"4734664",,terminal_output +800,720265,"TERMINAL",0,0,".",,terminal_output +801,720395,"TERMINAL",0,0,"",,terminal_output +802,720695,"TERMINAL",0,0,"b",,terminal_output +803,720879,"TERMINAL",0,0,"",,terminal_output +804,721163,"TERMINAL",0,0,"5845775",,terminal_output +805,721312,"TERMINAL",0,0,"",,terminal_output +806,721428,"TERMINAL",0,0,"v",,terminal_output +807,721546,"TERMINAL",0,0,"env/",,terminal_output +808,722149,"TERMINAL",0,0,"b",,terminal_output +809,722262,"TERMINAL",0,0,"in/",,terminal_output +810,722263,"TERMINAL",0,0,"6956886",,terminal_output +811,722495,"TERMINAL",0,0,"a",,terminal_output +812,722547,"TERMINAL",0,0,"c",,terminal_output +813,722778,"TERMINAL",0,0,"tivate",,terminal_output +814,723078,"TERMINAL",0,0,"\r\n[?2004l\r]0;tum_cte0515@hkn0401:~/Projects/jasmine/data[?2004h(jasmine-data) [tum_cte0515@hkn0401 data]$ ",,terminal_output +815,723245,"TERMINAL",0,0,"72:0067997",,terminal_output +816,724414,"TERMINAL",0,0,"817830308",,terminal_output +817,725346,"TERMINAL",0,0,"9289119",,terminal_output +818,726346,"TERMINAL",0,0,"6:0039302220",,terminal_output +819,727480,"TERMINAL",0,0,"14301331",,terminal_output +820,728545,"TERMINAL",0,0,"2512442",,terminal_output +821,729479,"TERMINAL",0,0,"3623553",,terminal_output +822,730480,"TERMINAL",0,0,"4734664",,terminal_output +823,731528,"TERMINAL",0,0,"5845775",,terminal_output +824,732663,"TERMINAL",0,0,"6956886",,terminal_output +825,733612,"TERMINAL",0,0,"71067997",,terminal_output +826,734284,"data/jasmine_data/ViZDoomPPO/explore_dataset.py",0,0,"import matplotlib.pyplot as plt\nimport base64\nimport io\nfrom PIL import Image\nimport numpy as np\nfrom matplotlib.animation import FuncAnimation\nimport pyarrow.parquet as pq\n\n\n# Example script to open the parquet file and get the data\nparquet_path = ""./concatenated.parquet""\ntable = pq.read_table(parquet_path)\n# Convert to pandas DataFrame\ndf = table.to_pandas()\n\n# Sort by step_id\ndf_sorted = df.sort_values(""step_id"")\n\n# Get the first row\nfirst_row = df_sorted.iloc[27]\n\n\n# Function to decode base64 image\ndef decode_image(b64_string):\n image_data = base64.b64decode(b64_string)\n return np.array(Image.open(io.BytesIO(image_data)))\n\n\n# Decode all images in the first row\nimages = [decode_image(img) for img in first_row[""images""]]\n\n# Create figure and axis\nfig, ax = plt.subplots(figsize=(10, 8))\nplt.axis(""off"")\n\n\n# Animation function\ndef animate(i):\n ax.clear()\n ax.imshow(images[i])\n ax.set_title(\n f""Episode {first_row['episode_id']}, Step {first_row['step_id']}, Frame {i+1}/{len(images)}""\n )\n ax.axis(""off"")\n\n\n# Create animation\nanim = FuncAnimation(fig, animate, frames=len(images), interval=200, repeat=True)\n\nplt.show()\n\n# Print other information about the first row\nprint(f""Episode ID: {first_row['episode_id']}"")\nprint(f""Step ID: {first_row['step_id']}"")\nprint(f""Health: {first_row['health']}"")\nprint(f""Actions: {first_row['actions']}"")\n",python,tab +827,734697,"TERMINAL",0,0,"817840408",,terminal_output +828,735679,"TERMINAL",0,0,"9289119",,terminal_output +829,736696,"TERMINAL",0,0,"1039402230",,terminal_output +830,737733,"TERMINAL",0,0,"14401331",,terminal_output +831,738779,"TERMINAL",0,0,"2512442",,terminal_output +832,739795,"TERMINAL",0,0,"3734664",,terminal_output +833,740845,"TERMINAL",0,0,"5845775",,terminal_output +834,741918,"TERMINAL",0,0,"6956886",,terminal_output +835,742913,"TERMINAL",0,0,"72067997",,terminal_output +836,743961,"TERMINAL",0,0,"817850508",,terminal_output +837,744978,"TERMINAL",0,0,"9289119",,terminal_output +838,746013,"TERMINAL",0,0,"2039502240",,terminal_output +839,747064,"TERMINAL",0,0,"14501331",,terminal_output +840,747734,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",0,0,"# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #\n# /$$ /$$ /$$ /$$$$$$$ /$$$$$$$ /$$$$$$$ /$$$$$$ #\n# | $$ | $$|__/ | $$__ $$ | $$__ $$| $$__ $$ /$$__ $$ #\n# | $$ | $$ /$$ /$$$$$$$$| $$ \ $$ /$$$$$$ /$$$$$$ /$$$$$$/$$$$ | $$ \ $$| $$ \ $$| $$ \ $$ #\n# | $$ / $$/| $$|____ /$$/| $$ | $$ /$$__ $$ /$$__ $$| $$_ $$_ $$ | $$$$$$$/| $$$$$$$/| $$ | $$ #\n# \ $$ $$/ | $$ /$$$$/ | $$ | $$| $$ \ $$| $$ \ $$| $$ \ $$ \ $$ | $$____/ | $$____/ | $$ | $$ #\n# \ $$$/ | $$ /$$__/ | $$ | $$| $$ | $$| $$ | $$| $$ | $$ | $$ | $$ | $$ | $$ | $$ #\n# \ $/ | $$ /$$$$$$$$| $$$$$$$/| $$$$$$/| $$$$$$/| $$ | $$ | $$ | $$ | $$ | $$$$$$/ #\n# \_/ |__/|________/|_______/ \______/ \______/ |__/ |__/ |__/ |__/ |__/ \______/ #\n# #\n# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #\n\n# FORK OF LEANDRO KIELIGER'S DOOM PPO TUTORIAL: https://lkieliger.medium.com/deep-reinforcement-learning-in-practice-by-playing-doom-part-1-getting-started-618c99075c77\n\n# SCRIPT TO RUN PPO AGENT AND GENERATE DATASET FOR DOOM ENVIRONMENT.\n\nfrom dataclasses import dataclass\nimport imageio\nfrom common import envs\nimport torch\nimport json\nfrom vizdoom.vizdoom import GameVariable\nimport os\nfrom PIL import Image\n\nimport numpy as np\nfrom train_ppo_parallel import DoomWithBotsCurriculum, game_instance\nfrom stable_baselines3.common.vec_env import VecTransposeImage, DummyVecEnv\n\nfrom loguru import logger\nimport tyro\nfrom jasmine_data.utils import save_chunks\n\n# To replicate frame_skip in the environment\nACTION_REPEAT = 4\n\n\n@dataclass\nclass Args:\n num_episodes_train: int = 1000\n num_episodes_val: int = 100\n num_episodes_test: int = 100\n min_episode_length: int = 100\n max_episode_length: int = 1000\n num_parallel_envs: int = 100\n target_width: int = 320\n target_height: int = 240\n chunk_size: int = 160\n chunks_per_file: int = 100\n agent_path: str = """"\n seed: int = 0\n output_dir: str = ""data/vizdoom_episodes""\n generate_gif: bool = False\n\n\nargs = tyro.cli(Args)\ndevice = torch.device(""cuda"" if torch.cuda.is_available() else ""cpu"")\nlogger.info(f""Using device: {device}"")\n\n\ndef dummy_vec_env_with_bots_curriculum(n_envs=1, **kwargs) -> VecTransposeImage:\n """"""Wraps a Doom game instance in a vectorized environment with shaped rewards and curriculum.""""""\n scenario = kwargs.pop(""scenario"") # Remove 'scenario' from kwargs\n return VecTransposeImage(\n DummyVecEnv(\n [lambda: DoomWithBotsCurriculum(game_instance(scenario), **kwargs)] * n_envs\n )\n )\n\n\n# TODO move to utils\ndef downsample_resolution(img):\n if img.shape[:2] != (args.target_height, args.target_width):\n resample_filter = Image.LANCZOS\n img = Image.fromarray(img)\n img = img.resize(\n (args.target_height, args.target_width), resample=resample_filter\n )\n img = np.array(img)\n return img\n\n\ndef make_gif(agent, eval_env_args):\n """"""Generate a GIF by running the agent in the environment.\n\n Args:\n agent: The trained PPO agent.\n file_path (str): Path to save the generated GIF.\n eval_env_args (dict): Arguments for the evaluation environment.\n num_episodes (int): Number of episodes to run.\n\n Returns:\n list: Collected health values for analysis.\n """"""\n # Set frame_skip to 1 to capture all frames\n eval_env_args[""frame_skip""] = 1\n env = dummy_vec_env_with_bots_curriculum(1, **eval_env_args)\n\n images = []\n actions = []\n health_values = []\n current_action = None\n frame_counter = 0\n\n obs = env.reset()\n\n done = False\n while not done and frame_counter < args.max_episode_length:\n if frame_counter % ACTION_REPEAT == 0:\n current_action, _ = agent.predict(obs)\n\n obs, _, done, _ = env.step(current_action)\n\n # Get the raw screen buffer from the Doom game instance\n screen = env.venv.envs[0].game.get_state().screen_buffer\n\n # Get the current health value\n health = env.venv.envs[0].game.get_game_variable(GameVariable.HEALTH)\n health_values.append(health) # Store the health value\n\n actions.append(current_action)\n images.append(screen)\n\n frame_counter += 1\n\n print(""Health values:"", health_values)\n print(""Number of health values:"", len(health_values))\n print(""Number of actions:"", len(actions))\n print(""Number of images:"", len(images))\n\n # Save only the first 1000 frames to avoid large file size\n imageio.mimsave(args.output_dir, images, fps=20)\n env.close()\n logger.info(f""GIF saved to {args.output_dir}"")\n\n return health_values\n\n\ndef make_array_records_dataset(agent, eval_env_args, num_episodes, split):\n """"""Generate a dataset by running the agent in the environment and saving the data as array record files.\n\n Args:\n agent: The trained PPO agent.\n output_dir (str): Directory to save the array record files.\n eval_env_args (dict): Arguments for the evaluation environment.\n num_episodes (int): Number of episodes to run.\n """"""\n # Set frame_skip to 1 to capture all frames\n eval_env_args[""frame_skip""] = 1\n env = dummy_vec_env_with_bots_curriculum(args.num_parallel_envs, **eval_env_args)\n\n current_action_B = None\n episode_idx = 0\n episode_metadata = []\n obs_chunks = []\n act_chunks = []\n file_idx = 0\n output_dir_split = os.path.join(args.output_dir, split)\n os.makedirs(output_dir_split, exist_ok=True)\n\n while episode_idx < num_episodes // args.num_parallel_envs:\n obs = env.reset()\n done = np.array(False)\n frame_counter = 0\n\n observations_seq = []\n actions_seq = []\n health_values_seq = []\n episode_obs_chunks = []\n episode_act_chunks = []\n\n # --- Run episode ---\n while not done.any() and frame_counter < args.max_episode_length:\n screen_BHWC = [\n downsample_resolution(env_i.game.get_state().screen_buffer)\n for env_i in env.venv.envs\n ]\n health_B = [\n env_i.game.get_game_variable(GameVariable.HEALTH)\n for env_i in env.venv.envs\n ]\n if frame_counter % ACTION_REPEAT == 0:\n current_action_B, _ = agent.predict(obs)\n\n obs, _, done, _ = env.step(current_action_B)\n\n observations_seq.extend(screen_BHWC)\n actions_seq.extend(current_action_B)\n health_values_seq.extend(health_B)\n\n while len(observations_seq) >= args.chunk_size:\n episode_obs_chunks.append(observations_seq[: args.chunk_size])\n episode_act_chunks.append(actions_seq[: args.chunk_size])\n observations_seq = observations_seq[args.chunk_size :]\n actions_seq = actions_seq[args.chunk_size :]\n\n frame_counter += 1\n\n # --- Save episode ---\n if frame_counter >= args.min_episode_length:\n if observations_seq:\n if len(observations_seq) < args.chunk_size:\n print(\n f""Warning: Inconsistent chunk_sizes. Episode has {len(observations_seq)} frames, ""\n f""which is smaller than the requested chunk_size: {args.chunk_size}. ""\n ""This might lead to performance degradation during training.""\n )\n episode_obs_chunks.append(observations_seq)\n episode_act_chunks.append(actions_seq)\n\n obs_chunks_data = [\n np.stack(seq).astype(np.uint8) for seq in episode_obs_chunks\n ]\n act_chunks_data = [\n np.stack(act).astype(np.uint8) for act in episode_act_chunks\n ]\n obs_chunks.extend(obs_chunks_data)\n act_chunks.extend(act_chunks_data)\n\n ep_metadata, file_idx, obs_chunks, act_chunks = save_chunks(\n file_idx, args.chunks_per_file, output_dir_split, obs_chunks, act_chunks\n )\n episode_metadata.extend(ep_metadata)\n\n print(f""Episode {episode_idx} completed, length: {frame_counter}."")\n episode_idx += 1\n else:\n print(f""Episode too short ({frame_counter}), resampling..."")\n env.close()\n return episode_metadata\n\n\ndef main():\n assert (\n args.num_episodes_train % args.num_parallel_envs == 0\n and args.num_episodes_train >= args.num_parallel_envs\n )\n assert (\n args.num_episodes_val % args.num_parallel_envs == 0\n and args.num_episodes_val >= args.num_parallel_envs\n )\n assert (\n args.num_episodes_test % args.num_parallel_envs == 0\n and args.num_episodes_test >= args.num_parallel_envs\n )\n scenario = ""deathmatch_simple""\n\n env_args = {\n ""scenario"": scenario,\n ""frame_skip"": 1,\n ""frame_processor"": envs.default_frame_processor,\n ""n_bots"": 8,\n ""shaping"": True,\n ""initial_level"": 5,\n ""max_level"": 5,\n ""rolling_mean_length"": 10,\n }\n\n eval_env_args = dict(env_args)\n new_env = dummy_vec_env_with_bots_curriculum(1, **env_args)\n agent = envs.load_model(\n args.agent_path,\n new_env,\n )\n\n if args.generate_gif:\n make_gif(agent, eval_env_args)\n return\n\n train_episode_metadata = make_array_records_dataset(\n agent,\n num_episodes=args.num_episodes_train,\n eval_env_args=eval_env_args,\n split=""train"",\n )\n val_episode_metadata = make_array_records_dataset(\n agent,\n num_episodes=args.num_episodes_val,\n eval_env_args=eval_env_args,\n split=""val"",\n )\n test_episode_metadata = make_array_records_dataset(\n agent,\n num_episodes=args.num_episodes_test,\n eval_env_args=eval_env_args,\n split=""test"",\n )\n # --- Save metadata ---\n metadata = {\n ""env"": ""coinrun"",\n ""num_actions"": 0, # TODO mihir\n ""num_episodes_train"": args.num_episodes_train,\n ""num_episodes_val"": args.num_episodes_val,\n ""num_episodes_test"": args.num_episodes_test,\n ""avg_episode_len_train"": np.mean(\n [ep[""avg_seq_len""] for ep in train_episode_metadata]\n ),\n ""avg_episode_len_val"": np.mean(\n [ep[""avg_seq_len""] for ep in val_episode_metadata]\n ),\n ""avg_episode_len_test"": np.mean(\n [ep[""avg_seq_len""] for ep in test_episode_metadata]\n ),\n ""episode_metadata_train"": train_episode_metadata,\n ""episode_metadata_val"": val_episode_metadata,\n ""episode_metadata_test"": test_episode_metadata,\n }\n with open(os.path.join(args.output_dir, ""metadata.json""), ""w"") as f:\n json.dump(metadata, f)\n\n print(f""Done generating dataset."")\n\n\nif __name__ == ""__main__"":\n main()\n",python,tab +841,748063,"TERMINAL",0,0,"2512442",,terminal_output +842,749096,"TERMINAL",0,0,"3623553",,terminal_output +843,750161,"TERMINAL",0,0,"4734664",,terminal_output +844,751178,"TERMINAL",0,0,"5845775",,terminal_output +845,752178,"TERMINAL",0,0,"6956886",,terminal_output +846,753213,"TERMINAL",0,0,"73067997",,terminal_output +847,754263,"TERMINAL",0,0,"81789:009:008",,terminal_output +848,755281,"TERMINAL",0,0,"9289119",,terminal_output +849,756328,"TERMINAL",0,0,"30395:002250",,terminal_output +850,757180,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",3202,0,"",python,selection_mouse +851,757214,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",3201,0,"",python,selection_command +852,757379,"TERMINAL",0,0,"149:001331",,terminal_output +853,758411,"TERMINAL",0,0,"2512442",,terminal_output +854,759428,"TERMINAL",0,0,"3623553",,terminal_output +855,760478,"TERMINAL",0,0,"4734664",,terminal_output +856,761563,"TERMINAL",0,0,"5845775",,terminal_output +857,762528,"TERMINAL",0,0,"6956886",,terminal_output +858,763578,"TERMINAL",0,0,"74067997",,terminal_output +859,764613,"TERMINAL",0,0,"817810108",,terminal_output +860,765628,"TERMINAL",0,0,"9289119",,terminal_output +861,766695,"TERMINAL",0,0,"403910223:00",,terminal_output +862,767230,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",3036,0,"",python,selection_command +863,767712,"TERMINAL",0,0,"14101331",,terminal_output +864,768745,"TERMINAL",0,0,"2512442",,terminal_output +865,769845,"TERMINAL",0,0,"3623553",,terminal_output +866,770795,"TERMINAL",0,0,"4845775",,terminal_output +867,772029,"TERMINAL",0,0,"6956886",,terminal_output +868,772864,"TERMINAL",0,0,"75067997",,terminal_output +869,773895,"TERMINAL",0,0,"817820208",,terminal_output +870,774929,"TERMINAL",0,0,"9289119",,terminal_output +871,776045,"TERMINAL",0,0,"5039202210",,terminal_output +872,776996,"TERMINAL",0,0,"14201331",,terminal_output +873,778064,"TERMINAL",0,0,"2512442",,terminal_output +874,779112,"TERMINAL",0,0,"3623553",,terminal_output +875,780229,"TERMINAL",0,0,"4734664",,terminal_output +876,781114,"TERMINAL",0,0,"5845775",,terminal_output +877,781496,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",4169,0,"",python,selection_mouse +878,782145,"TERMINAL",0,0,"6956886",,terminal_output +879,782747,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",4381,0,"",python,selection_mouse +880,783178,"TERMINAL",0,0,"73:0067997",,terminal_output +881,783329,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",4331,0,"",python,selection_mouse +882,784214,"TERMINAL",0,0,"817830308",,terminal_output +883,785181,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",4380,0,"\n ",python,content +884,785262,"TERMINAL",0,0,"9289119",,terminal_output +885,785720,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",4389,0,"s",python,content +886,785722,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",4390,0,"",python,selection_keyboard +887,786246,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",4390,0,"c",python,content +888,786249,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",4391,0,"",python,selection_keyboard +889,786280,"TERMINAL",0,0,"7:0039302220",,terminal_output +890,787314,"TERMINAL",0,0,"14301331",,terminal_output +891,787566,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",4391,0,"r",python,content +892,787568,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",4392,0,"",python,selection_keyboard +893,787748,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",4392,0,"e",python,content +894,787750,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",4393,0,"",python,selection_keyboard +895,787879,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",4393,0,"e",python,content +896,787881,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",4394,0,"",python,selection_keyboard +897,788013,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",4394,0,"n",python,content +898,788015,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",4395,0,"",python,selection_keyboard +899,788247,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",4395,0," ",python,content +900,788249,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",4396,0,"",python,selection_keyboard +901,788382,"TERMINAL",0,0,"2512442",,terminal_output +902,788464,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",4396,0,"=",python,content +903,788465,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",4397,0,"",python,selection_keyboard +904,788603,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",4397,0," ",python,content +905,788607,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",4398,0,"",python,selection_keyboard +906,789481,"TERMINAL",0,0,"3623553",,terminal_output +907,790496,"TERMINAL",0,0,"4734664",,terminal_output +908,791478,"TERMINAL",0,0,"5845775",,terminal_output +909,791578,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",4398,0,"d",python,content +910,791580,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",4399,0,"",python,selection_keyboard +911,791696,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",4399,0,"o",python,content +912,791697,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",4400,0,"",python,selection_keyboard +913,792214,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",4400,0," ",python,content +914,792216,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",4401,0,"",python,selection_keyboard +915,792462,"TERMINAL",0,0,"6956886",,terminal_output +916,792929,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",4400,1,"",python,content +917,793528,"TERMINAL",0,0,"71067997",,terminal_output +918,794262,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",4398,2,"downsample_resolution",python,content +919,794561,"TERMINAL",0,0,"817840408",,terminal_output +920,794996,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",4419,0,"()",python,content +921,794997,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",4420,0,"",python,selection_keyboard +922,795562,"TERMINAL",0,0,"9289119",,terminal_output +923,796728,"TERMINAL",0,0,"1039402230",,terminal_output +924,796729,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",4420,0,"s",python,content +925,796731,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",4421,0,"",python,selection_keyboard +926,796879,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",4421,0,"c",python,content +927,796880,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",4422,0,"",python,selection_keyboard +928,797098,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",4422,0,"r",python,content +929,797099,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",4423,0,"",python,selection_keyboard +930,797296,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",4423,0,"e",python,content +931,797297,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",4424,0,"",python,selection_keyboard +932,797345,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",4424,0,"e",python,content +933,797347,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",4425,0,"",python,selection_keyboard +934,797479,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",4425,0,"n",python,content +935,797481,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",4426,0,"",python,selection_keyboard +936,797713,"TERMINAL",0,0,"14401331",,terminal_output +937,798678,"TERMINAL",0,0,"2512442",,terminal_output +938,799729,"TERMINAL",0,0,"3623553",,terminal_output +939,801579,"TERMINAL",0,0,"4845775",,terminal_output +940,802745,"TERMINAL",0,0,"6956886",,terminal_output +941,803645,"TERMINAL",0,0,"72067997",,terminal_output +942,804747,"TERMINAL",0,0,"817850508",,terminal_output +943,805763,"TERMINAL",0,0,"9289119",,terminal_output +944,806846,"TERMINAL",0,0,"2039502240",,terminal_output +945,808046,"TERMINAL",0,0,"15512442",,terminal_output +946,808828,"TERMINAL",0,0,"3623553",,terminal_output +947,809911,"TERMINAL",0,0,"4734664",,terminal_output +948,810861,"TERMINAL",0,0,"5845775",,terminal_output +949,811229,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",4331,0,"",python,selection_mouse +950,811978,"TERMINAL",0,0,"6956886",,terminal_output +951,812945,"TERMINAL",0,0,"73067997",,terminal_output +952,813562,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",4330,0,"",python,selection_command +953,814011,"TERMINAL",0,0,"817850:0050:008",,terminal_output +954,814679,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",4288,0,"",python,selection_mouse +955,814995,"TERMINAL",0,0,"9289119",,terminal_output +956,816028,"TERMINAL",0,0,"30396:002250",,terminal_output +957,817079,"TERMINAL",0,0,"1420:001331",,terminal_output +958,818096,"TERMINAL",0,0,"2512442",,terminal_output +959,819228,"TERMINAL",0,0,"3623553",,terminal_output +960,820278,"TERMINAL",0,0,"4734664",,terminal_output +961,821195,"TERMINAL",0,0,"5845775",,terminal_output +962,822213,"TERMINAL",0,0,"6956886",,terminal_output +963,823445,"TERMINAL",0,0,"74067997",,terminal_output +964,824344,"TERMINAL",0,0,"817810108",,terminal_output +965,825478,"TERMINAL",0,0,"9289119",,terminal_output +966,826346,"TERMINAL",0,0,"403910224:00",,terminal_output +967,827379,"TERMINAL",0,0,"14101331",,terminal_output +968,827689,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=1\n#SBATCH --time=10:00:00\n#SBATCH --partition=large\n#SBATCH --cpus-per-task=16\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/preprocess/coinrun/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/preprocess/coinrun/%x_%j.log\n#SBATCH --job-name=preprocess_coinrun_chunked\n\nexport PYTHONUNBUFFERED=1\n\n\n# source .venv/bin/activate\n\nsource .venv/bin/activate\n\npython jasmine_data/ViZDoomPPO/load_model_generate_dataset.py \\n --output_dir /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_doom/dev \\n --agent_path $(pwd)/jasmine_data/ViZDoomPPO/logs/models/deathmatch_simple_bak/best_model.zip \\n --num_episodes_train 100 \\n --num_episodes_val 100 \\n --num_episodes_test 100 \\n --chunk_size 100 \\n\n",shellscript,tab +969,828414,"TERMINAL",0,0,"2512442",,terminal_output +970,828947,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",705,0,"",shellscript,selection_mouse +971,829496,"TERMINAL",0,0,"3623553",,terminal_output +972,829679,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",792,0,"",shellscript,selection_mouse +973,829681,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",791,0,"",shellscript,selection_command +974,830534,"TERMINAL",0,0,"4734664",,terminal_output +975,831629,"TERMINAL",0,0,"5845775",,terminal_output +976,832662,"TERMINAL",0,0,"6956886",,terminal_output +977,833584,"TERMINAL",0,0,"75067997",,terminal_output +978,834612,"TERMINAL",0,0,"817820208",,terminal_output +979,835728,"TERMINAL",0,0,"9289119",,terminal_output +980,835914,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",762,60,"",shellscript,content +981,835918,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",766,0,"",shellscript,selection_command +982,836446,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",762,53,"",shellscript,content +983,836696,"TERMINAL",0,0,"5039202210",,terminal_output +984,837530,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",762,0," ",shellscript,content +985,837778,"TERMINAL",0,0,"14201331",,terminal_output +986,838129,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",766,0,"-",shellscript,content +987,838131,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",767,0,"",shellscript,selection_keyboard +988,838231,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",767,0,"-",shellscript,content +989,838232,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",768,0,"",shellscript,selection_keyboard +990,838446,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",768,0,"m",shellscript,content +991,838447,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",769,0,"",shellscript,selection_keyboard +992,838545,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",769,0,"a",shellscript,content +993,838546,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",770,0,"",shellscript,selection_keyboard +994,838714,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",770,0,"x",shellscript,content +995,838715,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",771,0,"",shellscript,selection_keyboard +996,838761,"TERMINAL",0,0,"2512442",,terminal_output +997,838973,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",771,0,"_",shellscript,content +998,838973,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",772,0,"",shellscript,selection_keyboard +999,839262,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",772,0,"e",shellscript,content +1000,839263,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",773,0,"",shellscript,selection_keyboard +1001,839345,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",773,0,"p",shellscript,content +1002,839346,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",774,0,"",shellscript,selection_keyboard +1003,839495,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",774,0,"i",shellscript,content +1004,839496,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",775,0,"",shellscript,selection_keyboard +1005,839599,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",775,0,"s",shellscript,content +1006,839600,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",776,0,"",shellscript,selection_keyboard +1007,839696,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",776,0,"o",shellscript,content +1008,839697,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",777,0,"",shellscript,selection_keyboard +1009,839812,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",777,0,"d",shellscript,content +1010,839813,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",778,0,"",shellscript,selection_keyboard +1011,839828,"TERMINAL",0,0,"3734664",,terminal_output +1012,839895,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",778,0,"e",shellscript,content +1013,839896,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",779,0,"",shellscript,selection_keyboard +1014,840046,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",779,0,"_",shellscript,content +1015,840047,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",780,0,"",shellscript,selection_keyboard +1016,840345,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",780,0,"e",shellscript,content +1017,840346,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",781,0,"",shellscript,selection_keyboard +1018,840695,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",780,1,"",shellscript,content +1019,840813,"TERMINAL",0,0,"5845775",,terminal_output +1020,840915,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",780,0,"e",shellscript,content +1021,840916,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",781,0,"",shellscript,selection_keyboard +1022,841379,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",780,1,"",shellscript,content +1023,841595,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",780,0,"l",shellscript,content +1024,841596,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",781,0,"",shellscript,selection_keyboard +1025,841734,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",781,0,"e",shellscript,content +1026,841735,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",782,0,"",shellscript,selection_keyboard +1027,841862,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",782,0,"n",shellscript,content +1028,841863,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",783,0,"",shellscript,selection_keyboard +1029,841879,"TERMINAL",0,0,"6956886",,terminal_output +1030,841946,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",783,0,"g",shellscript,content +1031,841947,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",784,0,"",shellscript,selection_keyboard +1032,842099,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",784,0,"t",shellscript,content +1033,842100,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",785,0,"",shellscript,selection_keyboard +1034,842157,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",785,0,"h",shellscript,content +1035,842158,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",786,0,"",shellscript,selection_keyboard +1036,842912,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",786,0," ",shellscript,content +1037,842913,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",787,0,"",shellscript,selection_keyboard +1038,842963,"TERMINAL",0,0,"74:0067997",,terminal_output +1039,843098,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",787,0,"=",shellscript,content +1040,843099,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",788,0,"",shellscript,selection_keyboard +1041,843179,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",788,0," ",shellscript,content +1042,843180,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",789,0,"",shellscript,selection_keyboard +1043,843637,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",788,1,"",shellscript,content +1044,843862,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",787,1,"",shellscript,content +1045,843928,"TERMINAL",0,0,"817830308",,terminal_output +1046,844012,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",786,1,"",shellscript,content +1047,844497,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",786,0," ",shellscript,content +1048,844498,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",787,0,"",shellscript,selection_keyboard +1049,844630,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",787,0,"1",shellscript,content +1050,844631,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",788,0,"",shellscript,selection_keyboard +1051,844712,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",788,0,"0",shellscript,content +1052,844713,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",789,0,"",shellscript,selection_keyboard +1053,844845,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",789,0,"0",shellscript,content +1054,844846,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",790,0,"",shellscript,selection_keyboard +1055,844946,"TERMINAL",0,0,"9289119",,terminal_output +1056,845030,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",790,0," ",shellscript,content +1057,845030,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",791,0,"",shellscript,selection_keyboard +1058,845445,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",791,0,"\",shellscript,content +1059,845446,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",792,0,"",shellscript,selection_keyboard +1060,846047,"TERMINAL",0,0,"8:0039302220",,terminal_output +1061,847011,"TERMINAL",0,0,"14301331",,terminal_output +1062,847512,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",792,0,"\n ",shellscript,content +1063,848196,"TERMINAL",0,0,"2512442",,terminal_output +1064,848213,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",797,0,"-",shellscript,content +1065,848214,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",798,0,"",shellscript,selection_keyboard +1066,848345,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",798,0,"-",shellscript,content +1067,848346,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",799,0,"",shellscript,selection_keyboard +1068,848729,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",799,0,"m",shellscript,content +1069,848730,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",800,0,"",shellscript,selection_keyboard +1070,848836,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",800,0,"a",shellscript,content +1071,848837,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",801,0,"",shellscript,selection_keyboard +1072,848904,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",801,0,"k",shellscript,content +1073,848905,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",802,0,"",shellscript,selection_keyboard +1074,849094,"TERMINAL",0,0,"3623553",,terminal_output +1075,849146,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",802,0,"e",shellscript,content +1076,849147,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",803,0,"",shellscript,selection_keyboard +1077,849629,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",803,0,"_",shellscript,content +1078,849630,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",804,0,"",shellscript,selection_keyboard +1079,849829,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",804,0,"g",shellscript,content +1080,849830,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",805,0,"",shellscript,selection_keyboard +1081,849946,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",805,0,"i",shellscript,content +1082,849947,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",806,0,"",shellscript,selection_keyboard +1083,850062,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",806,0,"t",shellscript,content +1084,850063,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",807,0,"",shellscript,selection_keyboard +1085,850131,"TERMINAL",0,0,"4734664",,terminal_output +1086,850646,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",806,1,"",shellscript,content +1087,851182,"TERMINAL",0,0,"5845775",,terminal_output +1088,851512,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",806,0,"f",shellscript,content +1089,851513,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",807,0,"",shellscript,selection_keyboard +1090,851678,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",807,0," ",shellscript,content +1091,851679,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",808,0,"",shellscript,selection_keyboard +1092,851846,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",808,0,"\",shellscript,content +1093,851847,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",809,0,"",shellscript,selection_keyboard +1094,851979,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",808,0,"",shellscript,selection_command +1095,852228,"TERMINAL",0,0,"6956886",,terminal_output +1096,853350,"TERMINAL",0,0,"71067997",,terminal_output +1097,853513,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",792,0,"",shellscript,selection_mouse +1098,853514,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",791,0,"",shellscript,selection_command +1099,854386,"TERMINAL",0,0,"817840408",,terminal_output +1100,854566,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",0,0,"",python,tab +1101,855297,"TERMINAL",0,0,"9289119",,terminal_output +1102,856345,"TERMINAL",0,0,"1039402230",,terminal_output +1103,856363,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",11318,0,"",python,selection_command +1104,857229,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",0,0,"",python,selection_command +1105,857397,"TERMINAL",0,0,"14401331",,terminal_output +1106,857869,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",114,0,"",python,selection_command +1107,858445,"TERMINAL",0,0,"2512442",,terminal_output +1108,858612,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",0,0,"",python,selection_command +1109,859262,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",4428,0,"",python,selection_command +1110,859396,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",4428,0,"d",python,content +1111,859397,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",4429,0,"",python,selection_keyboard +1112,859562,"TERMINAL",0,0,"3623553",,terminal_output +1113,860279,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",4428,1,"",python,content +1114,860481,"TERMINAL",0,0,"4734664",,terminal_output +1115,861545,"TERMINAL",0,0,"5845775",,terminal_output +1116,862315,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",4439,1,"G",python,selection_command +1117,862446,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",5050,2,"GI",python,selection_command +1118,862528,"TERMINAL",0,0,"6956886",,terminal_output +1119,862701,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",5050,3,"GIF",python,selection_command +1120,863262,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",9739,3,"gif",python,selection_command +1121,863579,"TERMINAL",0,0,"72067997",,terminal_output +1122,864611,"TERMINAL",0,0,"817850508",,terminal_output +1123,865628,"TERMINAL",0,0,"9289119",,terminal_output +1124,866715,"TERMINAL",0,0,"2039502240",,terminal_output +1125,867762,"TERMINAL",0,0,"14501331",,terminal_output +1126,868895,"TERMINAL",0,0,"2512442",,terminal_output +1127,869685,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",0,0,"",shellscript,tab +1128,869795,"TERMINAL",0,0,"3623553",,terminal_output +1129,870779,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",808,0,"",shellscript,selection_command +1130,870828,"TERMINAL",0,0,"4845775",,terminal_output +1131,871462,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",807,0,"",shellscript,selection_command +1132,871463,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",806,0,"",shellscript,selection_command +1133,871612,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",805,0,"",shellscript,selection_command +1134,871695,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",804,0,"",shellscript,selection_command +1135,871862,"TERMINAL",0,0,"6956886",,terminal_output +1136,871879,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",803,0,"",shellscript,selection_command +1137,871978,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",802,0,"",shellscript,selection_command +1138,872147,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",801,0,"",shellscript,selection_command +1139,872296,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",800,0,"",shellscript,selection_command +1140,872462,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",799,0,"",shellscript,selection_command +1141,872562,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",799,1,"",shellscript,content +1142,872746,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",799,1,"",shellscript,content +1143,872879,"TERMINAL",0,0,"73067997",,terminal_output +1144,872897,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",799,1,"",shellscript,content +1145,873097,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",799,1,"",shellscript,content +1146,873512,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",799,0,"g",shellscript,content +1147,873512,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",800,0,"",shellscript,selection_keyboard +1148,873513,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",800,0,"e",shellscript,content +1149,873513,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",801,0,"",shellscript,selection_keyboard +1150,873712,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",801,0,"n",shellscript,content +1151,873713,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",802,0,"",shellscript,selection_keyboard +1152,873879,"TERMINAL",0,0,"81781:001:008",,terminal_output +1153,873912,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",802,0,"e",shellscript,content +1154,873913,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",803,0,"",shellscript,selection_keyboard +1155,874280,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",803,0,"r",shellscript,content +1156,874281,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",804,0,"",shellscript,selection_keyboard +1157,874514,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",804,0,"a",shellscript,content +1158,874515,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",805,0,"",shellscript,selection_keyboard +1159,874545,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",805,0,"t",shellscript,content +1160,874546,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",806,0,"",shellscript,selection_keyboard +1161,874646,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",806,0,"e",shellscript,content +1162,874647,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",807,0,"",shellscript,selection_keyboard +1163,874912,"TERMINAL",0,0,"9289119",,terminal_output +1164,875030,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",806,0,"",shellscript,selection_command +1165,875962,"TERMINAL",0,0,"30397:002250",,terminal_output +1166,876766,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",0,0,"",python,tab +1167,877013,"TERMINAL",0,0,"141:001331",,terminal_output +1168,877302,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",0,0,"",shellscript,tab +1169,878011,"TERMINAL",0,0,"2512442",,terminal_output +1170,879045,"TERMINAL",0,0,"3623553",,terminal_output +1171,880145,"TERMINAL",0,0,"4734664",,terminal_output +1172,881129,"TERMINAL",0,0,"5845775",,terminal_output +1173,881345,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",563,0,"",shellscript,selection_mouse +1174,881862,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",707,0,"",shellscript,selection_mouse +1175,882146,"TERMINAL",0,0,"6956886",,terminal_output +1176,882464,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",598,0,"",shellscript,selection_mouse +1177,883179,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",592,0,"",shellscript,selection_mouse +1178,883196,"TERMINAL",0,0,"74067997",,terminal_output +1179,883329,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",592,2,"hk",shellscript,selection_mouse +1180,883330,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",535,57,"omPPO/load_model_generate_dataset.py \\n --output_dir /",shellscript,selection_mouse +1181,883330,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",542,50,"oad_model_generate_dataset.py \\n --output_dir /",shellscript,selection_mouse +1182,883348,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",550,42,"l_generate_dataset.py \\n --output_dir /",shellscript,selection_mouse +1183,883348,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",563,29,"taset.py \\n --output_dir /",shellscript,selection_mouse +1184,883378,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",592,55,"hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/",shellscript,selection_mouse +1185,883395,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",592,61,"hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_d",shellscript,selection_mouse +1186,883431,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",592,66,"hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_doom/d",shellscript,selection_mouse +1187,883431,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",592,68,"hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_doom/dev",shellscript,selection_mouse +1188,883445,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",573,19,"\n --output_dir /",shellscript,selection_mouse +1189,883662,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",592,70,"hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_doom/dev \",shellscript,selection_mouse +1190,883745,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",592,69,"hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_doom/dev ",shellscript,selection_mouse +1191,883779,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",592,68,"hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_doom/dev",shellscript,selection_mouse +1192,883798,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",592,67,"hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_doom/de",shellscript,selection_mouse +1193,883846,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",592,66,"hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_doom/d",shellscript,selection_mouse +1194,883895,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",592,65,"hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_doom/",shellscript,selection_mouse +1195,883996,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",592,66,"hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_doom/d",shellscript,selection_mouse +1196,884029,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",592,67,"hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_doom/de",shellscript,selection_mouse +1197,884195,"TERMINAL",0,0,"817810108",,terminal_output +1198,884495,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",592,68,"hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_doom/dev",shellscript,selection_mouse +1199,885396,"TERMINAL",0,0,"9289119",,terminal_output +1200,885995,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",659,0,"",shellscript,selection_command +1201,886263,"TERMINAL",0,0,"403910225:00",,terminal_output +1202,886695,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",714,0,"",shellscript,selection_mouse +1203,887344,"TERMINAL",0,0,"14101331",,terminal_output +1204,887629,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",700,0,"",shellscript,selection_mouse +1205,888344,"TERMINAL",0,0,"2512442",,terminal_output +1206,889449,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",680,0,"",shellscript,selection_mouse +1207,889496,"TERMINAL",0,0,"3623553",,terminal_output +1208,889662,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",680,1,"$",shellscript,selection_mouse +1209,889695,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",680,3,"$(p",shellscript,selection_mouse +1210,889745,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",680,4,"$(pw",shellscript,selection_mouse +1211,889778,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",680,5,"$(pwd",shellscript,selection_mouse +1212,889879,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",680,6,"$(pwd)",shellscript,selection_mouse +1213,890728,"TERMINAL",0,0,"4734664",,terminal_output +1214,891212,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",501,0,"",shellscript,selection_mouse +1215,891562,"TERMINAL",0,0,"5845775",,terminal_output +1216,891945,"TERMINAL",0,0,"watch",,terminal_focus +1217,892511,"TERMINAL",0,0,"6956886",,terminal_output +1218,893530,"TERMINAL",0,0,"srun",,terminal_focus +1219,893531,"TERMINAL",0,0,"75067997",,terminal_output +1220,894478,"TERMINAL",0,0,"pw",,terminal_output +1221,894545,"TERMINAL",0,0,"d",,terminal_output +1222,894579,"TERMINAL",0,0,"817820208",,terminal_output +1223,894695,"TERMINAL",0,0,"\r\n[?2004l\r/home/hk-project-p0023960/tum_cte0515/Projects/jasmine/data\r\n]0;tum_cte0515@hkn0401:~/Projects/jasmine/data[?2004h(jasmine-data) [tum_cte0515@hkn0401 data]$ ",,terminal_output +1224,895561,"TERMINAL",0,0,"9289119",,terminal_output +1225,896595,"TERMINAL",0,0,"5039202210",,terminal_output +1226,897628,"TERMINAL",0,0,"14201331",,terminal_output +1227,898680,"TERMINAL",0,0,"2512442",,terminal_output +1228,899696,"TERMINAL",0,0,"3623553",,terminal_output +1229,900761,"TERMINAL",0,0,"4734664",,terminal_output +1230,901744,"TERMINAL",0,0,"5845775",,terminal_output +1231,902895,"TERMINAL",0,0,"65:0067997",,terminal_output +1232,903812,"TERMINAL",0,0,"817830308",,terminal_output +1233,904845,"TERMINAL",0,0,"9289119",,terminal_output +1234,905064,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",0,0,"",shellscript,tab +1235,905879,"TERMINAL",0,0,"9:0039302220",,terminal_output +1236,906730,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",591,0,"",shellscript,selection_mouse +1237,906763,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",591,3,"/hk",shellscript,selection_mouse +1238,906778,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",537,54,"PPO/load_model_generate_dataset.py \\n --output_dir ",shellscript,selection_mouse +1239,906795,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",574,17," --output_dir ",shellscript,selection_mouse +1240,906796,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",591,19,"/hkfs/work/workspac",shellscript,selection_mouse +1241,906845,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",591,24,"/hkfs/work/workspace/scr",shellscript,selection_mouse +1242,906846,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",591,33,"/hkfs/work/workspace/scratch/tum_",shellscript,selection_mouse +1243,906895,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",591,38,"/hkfs/work/workspace/scratch/tum_ind36",shellscript,selection_mouse +1244,906896,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",591,138,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_doom/dev \\n --agent_path $(pwd)/jasmine_data/ViZDoomPPO/logs/models/deathm",shellscript,selection_mouse +1245,906929,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",591,140,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_doom/dev \\n --agent_path $(pwd)/jasmine_data/ViZDoomPPO/logs/models/deathmat",shellscript,selection_mouse +1246,906930,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",591,143,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_doom/dev \\n --agent_path $(pwd)/jasmine_data/ViZDoomPPO/logs/models/deathmatch_",shellscript,selection_mouse +1247,906979,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",591,145,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_doom/dev \\n --agent_path $(pwd)/jasmine_data/ViZDoomPPO/logs/models/deathmatch_si",shellscript,selection_mouse +1248,906980,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",591,147,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_doom/dev \\n --agent_path $(pwd)/jasmine_data/ViZDoomPPO/logs/models/deathmatch_simp",shellscript,selection_mouse +1249,906980,"TERMINAL",0,0,"14301331",,terminal_output +1250,906995,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",591,149,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_doom/dev \\n --agent_path $(pwd)/jasmine_data/ViZDoomPPO/logs/models/deathmatch_simple",shellscript,selection_mouse +1251,906996,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",591,151,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_doom/dev \\n --agent_path $(pwd)/jasmine_data/ViZDoomPPO/logs/models/deathmatch_simple_b",shellscript,selection_mouse +1252,907012,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",591,152,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_doom/dev \\n --agent_path $(pwd)/jasmine_data/ViZDoomPPO/logs/models/deathmatch_simple_ba",shellscript,selection_mouse +1253,907064,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",591,153,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_doom/dev \\n --agent_path $(pwd)/jasmine_data/ViZDoomPPO/logs/models/deathmatch_simple_bak",shellscript,selection_mouse +1254,907195,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",591,154,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_doom/dev \\n --agent_path $(pwd)/jasmine_data/ViZDoomPPO/logs/models/deathmatch_simple_bak/",shellscript,selection_mouse +1255,907212,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",591,66,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_doom/",shellscript,selection_mouse +1256,907295,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",591,67,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_doom/d",shellscript,selection_mouse +1257,907305,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",591,68,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_doom/de",shellscript,selection_mouse +1258,907395,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",591,69,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_doom/dev",shellscript,selection_mouse +1259,908031,"TERMINAL",0,0,"2512442",,terminal_output +1260,908045,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",591,69,"",shellscript,content +1261,908745,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",591,0,"/home/hk-project-p0023960/tum_cte0515/Projects/jasmine",shellscript,content +1262,908962,"TERMINAL",0,0,"3623553",,terminal_output +1263,909878,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",645,0,"/",shellscript,content +1264,909879,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",646,0,"",shellscript,selection_keyboard +1265,910011,"TERMINAL",0,0,"4734664",,terminal_output +1266,911046,"TERMINAL",0,0,"5845775",,terminal_output +1267,912111,"TERMINAL",0,0,"6956886",,terminal_output +1268,913094,"TERMINAL",0,0,"71067997",,terminal_output +1269,914128,"TERMINAL",0,0,"817840408",,terminal_output +1270,915278,"TERMINAL",0,0,"9289119",,terminal_output +1271,916195,"TERMINAL",0,0,"1039402230",,terminal_output +1272,917228,"TERMINAL",0,0,"14401331",,terminal_output +1273,918361,"TERMINAL",0,0,"2512442",,terminal_output +1274,919328,"TERMINAL",0,0,"3623553",,terminal_output +1275,920445,"TERMINAL",0,0,"4734664",,terminal_output +1276,921429,"TERMINAL",0,0,"5845775",,terminal_output +1277,921864,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",557,0,"",shellscript,selection_mouse +1278,922396,"TERMINAL",0,0,"6956886",,terminal_output +1279,923495,"TERMINAL",0,0,"72067997",,terminal_output +1280,924546,"TERMINAL",0,0,"817850508",,terminal_output +1281,925545,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",646,0,"",shellscript,selection_mouse +1282,925546,"TERMINAL",0,0,"9289119",,terminal_output +1283,926412,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",646,0,"g",shellscript,content +1284,926414,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",647,0,"",shellscript,selection_keyboard +1285,926529,"TERMINAL",0,0,"2039502240",,terminal_output +1286,926580,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",647,0,"i",shellscript,content +1287,926581,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",648,0,"",shellscript,selection_keyboard +1288,926697,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",648,0,"f",shellscript,content +1289,926698,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",649,0,"",shellscript,selection_keyboard +1290,926862,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",649,0,"s",shellscript,content +1291,926863,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",650,0,"",shellscript,selection_keyboard +1292,927578,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",650,0,"/",shellscript,content +1293,927579,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",651,0,"",shellscript,selection_keyboard +1294,927581,"TERMINAL",0,0,"14501331",,terminal_output +1295,927863,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",650,0,"",shellscript,selection_command +1296,928595,"TERMINAL",0,0,"2512442",,terminal_output +1297,929678,"TERMINAL",0,0,"3623553",,terminal_output +1298,930663,"TERMINAL",0,0,"4734664",,terminal_output +1299,931314,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",0,0,"",shellscript,tab +1300,931715,"TERMINAL",0,0,"5845775",,terminal_output +1301,932745,"TERMINAL",0,0,"6956886",,terminal_output +1302,933778,"TERMINAL",0,0,"73067997",,terminal_output +1303,934514,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",0,0,"",python,tab +1304,934929,"TERMINAL",0,0,"82892:012:019",,terminal_output +1305,935862,"TERMINAL",0,0,"30398:002250",,terminal_output +1306,936861,"TERMINAL",0,0,"142:001331",,terminal_output +1307,936946,"TERMINAL",0,0,"s",,terminal_output +1308,937078,"TERMINAL",0,0,"h",,terminal_output +1309,937137,"TERMINAL",0,0," ",,terminal_output +1310,937762,"TERMINAL",0,0,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",,terminal_output +1311,937895,"TERMINAL",0,0,"2512442",,terminal_output +1312,938930,"TERMINAL",0,0,"3623553",,terminal_output +1313,939297,"TERMINAL",0,0,"\rslurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",,terminal_output +1314,939480,"TERMINAL",0,0,"",,terminal_output +1315,939628,"TERMINAL",0,0,"",,terminal_output +1316,939830,"TERMINAL",0,0,"",,terminal_output +1317,939967,"TERMINAL",0,0,"",,terminal_output +1318,939967,"TERMINAL",0,0,"4734664",,terminal_output +1319,940080,"TERMINAL",0,0,"",,terminal_output +1320,940245,"TERMINAL",0,0,"",,terminal_output +1321,940429,"TERMINAL",0,0,"",,terminal_output +1322,940595,"TERMINAL",0,0,"",,terminal_output +1323,940962,"TERMINAL",0,0,"",,terminal_output +1324,941011,"TERMINAL",0,0,"5845775",,terminal_output +1325,941745,"TERMINAL",0,0,".slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh\r",,terminal_output +1326,941878,"TERMINAL",0,0,".slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh\r",,terminal_output +1327,942028,"TERMINAL",0,0,"6956886",,terminal_output +1328,942229,"TERMINAL",0,0,"/slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh\r",,terminal_output +1329,942428,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +1330,943095,"TERMINAL",0,0,"74067997",,terminal_output +1331,944161,"TERMINAL",0,0,"817810108",,terminal_output +1332,945198,"TERMINAL",0,0,"9289119",,terminal_output +1333,946145,"TERMINAL",0,0,"403910226:00",,terminal_output +1334,946979,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/data/.venv/lib/python3.10/site-packages/gymnasium/envs/registration.py:596: UserWarning: WARN: plugin: shimmy.registration:register_gymnasium_envs raised Traceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/data/.venv/lib/python3.10/site-packages/gymnasium/envs/registration.py"", line 594, in load_plugin_envs\r\n fn()\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/data/.venv/lib/python3.10/site-packages/shimmy/registration.py"", line 304, in register_gymnasium_envs\r\n _register_atari_envs()\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/data/.venv/lib/python3.10/site-packages/shimmy/registration.py"", line 205, in _register_atari_envs\r\n import ale_py\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/data/.venv/lib/python3.10/site-packages/ale_py/__init__.py"", line 66, in \r\n register_v0_v4_envs()\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/data/.venv/lib/python3.10/site-packages/ale_py/registration.py"", line 176, in register_v0_v4_envs\r\n _register_rom_configs(legacy_games, obs_types, versions)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/data/.venv/lib/python3.10/site-packages/ale_py/registration.py"", line 62, in _register_rom_configs\r\n gymnasium.register(\r\nAttributeError: partially initialized module 'gymnasium' has no attribute 'register' (most likely due to a circular import)\r\n\r\n logger.warn(f""plugin: {plugin.value} raised {traceback.format_exc()}"")\r\n",,terminal_output +1335,947195,"TERMINAL",0,0,"14101331",,terminal_output +1336,948230,"TERMINAL",0,0,"2512442",,terminal_output +1337,949296,"TERMINAL",0,0,"3623553",,terminal_output +1338,949462,"TERMINAL",0,0,"Using device: cuda\r\n",,terminal_output +1339,949695,"TERMINAL",0,0,"2025-10-05 17:09:43.919 | INFO  | __main__::58 - Using device: cuda\r\n",,terminal_output +1340,949946,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/data/.venv/lib/python3.10/site-packages/stable_baselines3/common/vec_env/patch_gym.py:49: UserWarning: You provided an OpenAI Gym environment. We strongly recommend transitioning to Gymnasium environments. Stable-Baselines3 is automatically wrapping your environments in a compatibility layer, which could potentially cause issues.\r\n warnings.warn(\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/data/.venv/lib/python3.10/site-packages/stable_baselines3/common/save_util.py:167: UserWarning: Could not deserialize object clip_range. Consider using `custom_objects` argument to replace this object.\r\nException: code expected at most 16 arguments, got 18\r\n warnings.warn(\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/data/.venv/lib/python3.10/site-packages/stable_baselines3/common/save_util.py:167: UserWarning: Could not deserialize object lr_schedule. Consider using `custom_objects` argument to replace this object.\r\nException: code expected at most 16 arguments, got 18\r\n warnings.warn(\r\n",,terminal_output +1341,950496,"TERMINAL",0,0,"4734664",,terminal_output +1342,951328,"TERMINAL",0,0,"5845775",,terminal_output +1343,952377,"TERMINAL",0,0,"6956886",,terminal_output +1344,952762,"TERMINAL",0,0,"Model loaded from /home/hk-project-p0023960/tum_cte0515/Projects/jasmine/data/jasmine_data/ViZDoomPPO/logs/models/deathmatch_simple_bak/best_model.zip\r\n",,terminal_output +1345,952846,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/data/.venv/lib/python3.10/site-packages/stable_baselines3/common/vec_env/patch_gym.py:49: UserWarning: You provided an OpenAI Gym environment. We strongly recommend transitioning to Gymnasium environments. Stable-Baselines3 is automatically wrapping your environments in a compatibility layer, which could potentially cause issues.\r\n warnings.warn(\r\n",,terminal_output +1346,953428,"TERMINAL",0,0,"Health values: [100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0]\r\nNumber of health values: 100\r\nNumber of actions: 100\r\nNumber of images: 100\r\nTraceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py"", line 316, in \r\n main()\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py"", line 268, in main\r\n make_gif(agent, eval_env_args)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py"", line 133, in make_gif\r\n imageio.mimsave(args.output_dir, images, fps=20)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/data/.venv/lib/python3.10/site-packages/imageio/v2.py"", line 494, in mimwrite\r\n with imopen(uri, ""wI"", **imopen_args) as file:\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/data/.venv/lib/python3.10/site-packages/imageio/core/imopen.py"", line 223, in imopen\r\n raise err_type(err_msg)\r\nValueError: ImageIO does not generally support reading folders. Limited support may be available via specific plugins. Specify the plugin explicitly using the `plugin` kwarg, e.g. `plugin='DICOM'`\r\n",,terminal_output +1347,953429,"TERMINAL",0,0,"75067997",,terminal_output +1348,954112,"TERMINAL",0,0,"]0;tum_cte0515@hkn0401:~/Projects/jasmine/data[?2004h(jasmine-data) [tum_cte0515@hkn0401 data]$ ",,terminal_output +1349,954481,"TERMINAL",0,0,"817820208",,terminal_output +1350,955469,"TERMINAL",0,0,"9289119",,terminal_output +1351,956511,"TERMINAL",0,0,"5039202210",,terminal_output +1352,957545,"TERMINAL",0,0,"14201331",,terminal_output +1353,958546,"TERMINAL",0,0,"2512442",,terminal_output +1354,959578,"TERMINAL",0,0,"3623553",,terminal_output +1355,960628,"TERMINAL",0,0,"4734664",,terminal_output +1356,961697,"TERMINAL",0,0,"5845775",,terminal_output +1357,962812,"TERMINAL",0,0,"6956886",,terminal_output +1358,963281,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",0,0,"",shellscript,tab +1359,963711,"TERMINAL",0,0,"76:0067997",,terminal_output +1360,964796,"TERMINAL",0,0,"817830308",,terminal_output +1361,966080,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",607,0,"",shellscript,selection_mouse +1362,966845,"TERMINAL",0,0,"9430313321",,terminal_output +1363,967265,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",0,0,"",python,tab +1364,967911,"TERMINAL",0,0,"10:02512442",,terminal_output +1365,968195,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",9798,0,"",python,selection_mouse +1366,968931,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",9765,0,"",python,selection_mouse +1367,968932,"TERMINAL",0,0,"3623553",,terminal_output +1368,969530,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",9757,0,"",python,selection_mouse +1369,969778,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",3340,0,"",python,selection_command +1370,969981,"TERMINAL",0,0,"4734664",,terminal_output +1371,971046,"TERMINAL",0,0,"5845775",,terminal_output +1372,972096,"TERMINAL",0,0,"6956886",,terminal_output +1373,973095,"TERMINAL",0,0,"71067997",,terminal_output +1374,974161,"TERMINAL",0,0,"817840408",,terminal_output +1375,975111,"TERMINAL",0,0,"9289119",,terminal_output +1376,976163,"TERMINAL",0,0,"1039402230",,terminal_output +1377,977212,"TERMINAL",0,0,"14401331",,terminal_output +1378,978212,"TERMINAL",0,0,"2512442",,terminal_output +1379,979311,"TERMINAL",0,0,"3623553",,terminal_output +1380,979979,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",5031,0,"",python,selection_mouse +1381,979983,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",5030,0,"",python,selection_command +1382,980112,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",5031,0,"",python,selection_mouse +1383,980114,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",5030,0,"",python,selection_command +1384,980278,"TERMINAL",0,0,"4734664",,terminal_output +1385,980679,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",4994,0,"",python,selection_mouse +1386,980846,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",4988,10,"output_dir",python,selection_mouse +1387,981329,"TERMINAL",0,0,"5845775",,terminal_output +1388,981746,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",4985,0,"",python,selection_mouse +1389,981895,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",4983,4,"args",python,selection_mouse +1390,982346,"TERMINAL",0,0,"6956886",,terminal_output +1391,983495,"TERMINAL",0,0,"72067997",,terminal_output +1392,984628,"TERMINAL",0,0,"817850508",,terminal_output +1393,985529,"TERMINAL",0,0,"9289119",,terminal_output +1394,986479,"TERMINAL",0,0,"2039502240",,terminal_output +1395,987628,"TERMINAL",0,0,"14501331",,terminal_output +1396,988548,"TERMINAL",0,0,"2512442",,terminal_output +1397,989678,"TERMINAL",0,0,"3623553",,terminal_output +1398,990661,"TERMINAL",0,0,"4734664",,terminal_output +1399,991645,"TERMINAL",0,0,"5845775",,terminal_output +1400,992680,"TERMINAL",0,0,"6956886",,terminal_output +1401,993829,"TERMINAL",0,0,"73067997",,terminal_output +1402,994761,"TERMINAL",0,0,"81783:003:008",,terminal_output +1403,995782,"TERMINAL",0,0,"9399:002250",,terminal_output +1404,996812,"TERMINAL",0,0,"3143:001331",,terminal_output +1405,997929,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",5083,0,"",python,selection_command +1406,997978,"TERMINAL",0,0,"2512442",,terminal_output +1407,998879,"TERMINAL",0,0,"3623553",,terminal_output +1408,998995,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",4982,0,"",python,selection_mouse +1409,999864,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",4983,0,"",python,selection_mouse +1410,999945,"TERMINAL",0,0,"4734664",,terminal_output +1411,1000545,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",4924,0,"",python,selection_mouse +1412,1000978,"TERMINAL",0,0,"5845775",,terminal_output +1413,1001045,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",4962,0,"\n ",python,content +1414,1001961,"TERMINAL",0,0,"6956886",,terminal_output +1415,1002096,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",4967,0,"o",python,content +1416,1002097,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",4968,0,"",python,selection_keyboard +1417,1002212,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",4968,0,"u",python,content +1418,1002213,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",4969,0,"",python,selection_keyboard +1419,1002711,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",4969,0,"t",python,content +1420,1002712,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",4970,0,"",python,selection_keyboard +1421,1003029,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",4970,0,"p",python,content +1422,1003030,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",4971,0,"",python,selection_keyboard +1423,1003031,"TERMINAL",0,0,"74067997",,terminal_output +1424,1003285,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",4971,0,"u",python,content +1425,1003286,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",4972,0,"",python,selection_keyboard +1426,1003396,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",4972,0,"t",python,content +1427,1003397,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",4973,0,"",python,selection_keyboard +1428,1003631,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",4973,0,"_",python,content +1429,1003632,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",4974,0,"",python,selection_keyboard +1430,1003880,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",4974,0,"p",python,content +1431,1003881,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",4975,0,"",python,selection_keyboard +1432,1003978,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",4975,0,"a",python,content +1433,1003980,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",4976,0,"",python,selection_keyboard +1434,1004061,"TERMINAL",0,0,"817810108",,terminal_output +1435,1004145,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",4976,0,"t",python,content +1436,1004146,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",4977,0,"",python,selection_keyboard +1437,1004413,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",4977,0,"h",python,content +1438,1004414,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",4978,0,"",python,selection_keyboard +1439,1004512,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",4978,0," ",python,content +1440,1004513,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",4979,0,"",python,selection_keyboard +1441,1004795,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",4979,0,"=",python,content +1442,1004796,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",4980,0,"",python,selection_keyboard +1443,1004912,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",4980,0," ",python,content +1444,1004913,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",4981,0,"",python,selection_keyboard +1445,1005129,"TERMINAL",0,0,"9289119",,terminal_output +1446,1005845,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",4981,0,"o",python,content +1447,1005846,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",4982,0,"",python,selection_keyboard +1448,1005962,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",4982,0,"s",python,content +1449,1005963,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",4983,0,"",python,selection_keyboard +1450,1006079,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",4983,0,".",python,content +1451,1006080,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",4984,0,"",python,selection_keyboard +1452,1006129,"TERMINAL",0,0,"403910227:00",,terminal_output +1453,1006312,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",4984,0,"p",python,content +1454,1006314,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",4985,0,"",python,selection_keyboard +1455,1006378,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",4985,0,"a",python,content +1456,1006379,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",4986,0,"",python,selection_keyboard +1457,1006562,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",4986,0,"t",python,content +1458,1006563,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",4987,0,"",python,selection_keyboard +1459,1006611,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",4987,0,"h",python,content +1460,1006612,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",4988,0,"",python,selection_keyboard +1461,1006813,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",4988,0,".",python,content +1462,1006815,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",4989,0,"",python,selection_keyboard +1463,1007045,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",4989,0,"j",python,content +1464,1007047,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",4990,0,"",python,selection_keyboard +1465,1007146,"TERMINAL",0,0,"14101331",,terminal_output +1466,1007428,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",4990,0,"o",python,content +1467,1007430,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",4991,0,"",python,selection_keyboard +1468,1007713,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",4991,0,"i",python,content +1469,1007713,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",4992,0,"",python,selection_keyboard +1470,1007745,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",4992,0,"n",python,content +1471,1007746,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",4993,0,"",python,selection_keyboard +1472,1008047,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",4989,4,"join",python,content +1473,1008233,"TERMINAL",0,0,"2512442",,terminal_output +1474,1008813,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",4993,0,"()",python,content +1475,1008814,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",4994,0,"",python,selection_keyboard +1476,1008929,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",4994,0,"a",python,content +1477,1008930,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",4995,0,"",python,selection_keyboard +1478,1009131,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",4995,0,"r",python,content +1479,1009132,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",4996,0,"",python,selection_keyboard +1480,1009178,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",4996,0,"g",python,content +1481,1009179,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",4997,0,"",python,selection_keyboard +1482,1009228,"TERMINAL",0,0,"3623553",,terminal_output +1483,1009312,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",4997,0,"s",python,content +1484,1009314,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",4998,0,"",python,selection_keyboard +1485,1009412,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",4998,0,".",python,content +1486,1009413,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",4999,0,"",python,selection_keyboard +1487,1009663,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",4999,0,"o",python,content +1488,1009664,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",5000,0,"",python,selection_keyboard +1489,1009862,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",5000,0,"i",python,content +1490,1009863,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",5001,0,"",python,selection_keyboard +1491,1009929,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",5001,0,"u",python,content +1492,1009930,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",5002,0,"",python,selection_keyboard +1493,1010262,"TERMINAL",0,0,"4734664",,terminal_output +1494,1010345,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",4999,3,"output_dir",python,content +1495,1010864,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",5009,0,",",python,content +1496,1010865,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",5010,0,"",python,selection_keyboard +1497,1010928,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",5010,0," ",python,content +1498,1010929,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",5011,0,"",python,selection_keyboard +1499,1011295,"TERMINAL",0,0,"5845775",,terminal_output +1500,1011579,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",5011,0,"""""",python,content +1501,1011580,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",5012,0,"",python,selection_keyboard +1502,1011896,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",5012,0,"o",python,content +1503,1011897,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",5013,0,"",python,selection_keyboard +1504,1012095,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",5013,0,"u",python,content +1505,1012096,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",5014,0,"",python,selection_keyboard +1506,1012161,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",5014,0,"t",python,content +1507,1012162,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",5015,0,"",python,selection_keyboard +1508,1012312,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",5015,0,"p",python,content +1509,1012313,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",5016,0,"",python,selection_keyboard +1510,1012313,"TERMINAL",0,0,"6956886",,terminal_output +1511,1012529,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",5016,0,"u",python,content +1512,1012530,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",5017,0,"",python,selection_keyboard +1513,1012629,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",5017,0,"t",python,content +1514,1012630,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",5018,0,"",python,selection_keyboard +1515,1012828,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",5018,0,".",python,content +1516,1012829,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",5019,0,"",python,selection_keyboard +1517,1013361,"TERMINAL",0,0,"75067997",,terminal_output +1518,1014295,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",5019,0,"f",python,content +1519,1014296,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",5020,0,"",python,selection_keyboard +1520,1014361,"TERMINAL",0,0,"817820208",,terminal_output +1521,1014713,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",5019,1,"",python,content +1522,1014778,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",5019,0,"g",python,content +1523,1014779,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",5020,0,"",python,selection_keyboard +1524,1014878,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",5020,0,"i",python,content +1525,1014879,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",5021,0,"",python,selection_keyboard +1526,1014994,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",5021,0,"f",python,content +1527,1014995,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",5022,0,"",python,selection_keyboard +1528,1015411,"TERMINAL",0,0,"9289119",,terminal_output +1529,1016412,"TERMINAL",0,0,"5039202210",,terminal_output +1530,1016645,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",5055,0,"",python,selection_mouse +1531,1017362,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",5046,0,"",python,selection_mouse +1532,1017462,"TERMINAL",0,0,"14201331",,terminal_output +1533,1017512,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",5045,4,"args",python,selection_mouse +1534,1017595,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",5045,5,"args.",python,selection_mouse +1535,1017595,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",5045,15,"args.output_dir",python,selection_mouse +1536,1018095,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",5045,15,"",python,content +1537,1018515,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",5045,0,"o",python,content +1538,1018515,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",5046,0,"",python,selection_keyboard +1539,1018516,"TERMINAL",0,0,"2512442",,terminal_output +1540,1018561,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",5046,0,"u",python,content +1541,1018562,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",5047,0,"",python,selection_keyboard +1542,1018945,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",5047,0,"t",python,content +1543,1018946,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",5048,0,"",python,selection_keyboard +1544,1019462,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",5045,3,"output_path",python,content +1545,1019578,"TERMINAL",0,0,"3623553",,terminal_output +1546,1019997,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",5055,0,"",python,selection_command +1547,1020546,"TERMINAL",0,0,"4734664",,terminal_output +1548,1021528,"TERMINAL",0,0,"sh ../slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",,terminal_output +1549,1021578,"TERMINAL",0,0,"5845775",,terminal_output +1550,1022178,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +1551,1022611,"TERMINAL",0,0,"6956886",,terminal_output +1552,1023629,"TERMINAL",0,0,"77:0067997",,terminal_output +1553,1023695,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/data/.venv/lib/python3.10/site-packages/gymnasium/envs/registration.py:596: UserWarning: WARN: plugin: shimmy.registration:register_gymnasium_envs raised Traceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/data/.venv/lib/python3.10/site-packages/gymnasium/envs/registration.py"", line 594, in load_plugin_envs\r\n fn()\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/data/.venv/lib/python3.10/site-packages/shimmy/registration.py"", line 304, in register_gymnasium_envs\r\n _register_atari_envs()\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/data/.venv/lib/python3.10/site-packages/shimmy/registration.py"", line 205, in _register_atari_envs\r\n import ale_py\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/data/.venv/lib/python3.10/site-packages/ale_py/__init__.py"", line 66, in \r\n register_v0_v4_envs()\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/data/.venv/lib/python3.10/site-packages/ale_py/registration.py"", line 176, in register_v0_v4_envs\r\n _register_rom_configs(legacy_games, obs_types, versions)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/data/.venv/lib/python3.10/site-packages/ale_py/registration.py"", line 62, in _register_rom_configs\r\n gymnasium.register(\r\nAttributeError: partially initialized module 'gymnasium' has no attribute 'register' (most likely due to a circular import)\r\n\r\n logger.warn(f""plugin: {plugin.value} raised {traceback.format_exc()}"")\r\n",,terminal_output +1554,1024561,"TERMINAL",0,0,"Using device: cuda\r\n",,terminal_output +1555,1024728,"TERMINAL",0,0,"2025-10-05 17:10:58.872 | INFO  | __main__::58 - Using device: cuda\r\n",,terminal_output +1556,1024744,"TERMINAL",0,0,"817830308",,terminal_output +1557,1024897,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/data/.venv/lib/python3.10/site-packages/stable_baselines3/common/vec_env/patch_gym.py:49: UserWarning: You provided an OpenAI Gym environment. We strongly recommend transitioning to Gymnasium environments. Stable-Baselines3 is automatically wrapping your environments in a compatibility layer, which could potentially cause issues.\r\n warnings.warn(\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/data/.venv/lib/python3.10/site-packages/stable_baselines3/common/save_util.py:167: UserWarning: Could not deserialize object clip_range. Consider using `custom_objects` argument to replace this object.\r\nException: code expected at most 16 arguments, got 18\r\n warnings.warn(\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/data/.venv/lib/python3.10/site-packages/stable_baselines3/common/save_util.py:167: UserWarning: Could not deserialize object lr_schedule. Consider using `custom_objects` argument to replace this object.\r\nException: code expected at most 16 arguments, got 18\r\n warnings.warn(\r\n",,terminal_output +1558,1025713,"TERMINAL",0,0,"9289119",,terminal_output +1559,1025797,"TERMINAL",0,0,"Model loaded from /home/hk-project-p0023960/tum_cte0515/Projects/jasmine/data/jasmine_data/ViZDoomPPO/logs/models/deathmatch_simple_bak/best_model.zip\r\n",,terminal_output +1560,1025982,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/data/.venv/lib/python3.10/site-packages/stable_baselines3/common/vec_env/patch_gym.py:49: UserWarning: You provided an OpenAI Gym environment. We strongly recommend transitioning to Gymnasium environments. Stable-Baselines3 is automatically wrapping your environments in a compatibility layer, which could potentially cause issues.\r\n warnings.warn(\r\n",,terminal_output +1561,1026330,"TERMINAL",0,0,"Health values: [100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0]\r\nNumber of health values: 100\r\nNumber of actions: 100\r\nNumber of images: 100\r\n",,terminal_output +1562,1026897,"TERMINAL",0,0,"1:0039302220",,terminal_output +1563,1027678,"TERMINAL",0,0,"2025-10-05 17:11:01.919 | INFO  | __main__:make_gif:136 - GIF saved to /home/hk-project-p0023960/tum_cte0515/Projects/jasmine/gifs/\r\n",,terminal_output +1564,1027795,"TERMINAL",0,0,"14301331",,terminal_output +1565,1028470,"TERMINAL",0,0,"]0;tum_cte0515@hkn0401:~/Projects/jasmine/data[?2004h(jasmine-data) [tum_cte0515@hkn0401 data]$ ",,terminal_output +1566,1028944,"TERMINAL",0,0,"2623553",,terminal_output +1567,1029845,"TERMINAL",0,0,"4734664",,terminal_output +1568,1030995,"TERMINAL",0,0,"5845775",,terminal_output +1569,1031944,"TERMINAL",0,0,"6956886",,terminal_output +1570,1032953,"TERMINAL",0,0,"71067997",,terminal_output +1571,1035465,"TERMINAL",0,0,"8178404089289119",,terminal_output +1572,1036126,"TERMINAL",0,0,"1039402230",,terminal_output +1573,1037946,"TERMINAL",0,0,"14401331",,terminal_output +1574,1038111,"TERMINAL",0,0,"2512442",,terminal_output +1575,1040712,"TERMINAL",0,0,"36235534734664",,terminal_output +1576,1041211,"TERMINAL",0,0,"5845775",,terminal_output +1577,1042328,"TERMINAL",0,0,"6956886",,terminal_output +1578,1043396,"TERMINAL",0,0,"72067997",,terminal_output +1579,1044312,"TERMINAL",0,0,"817850508",,terminal_output +1580,1045506,"TERMINAL",0,0,"9289119",,terminal_output +1581,1046378,"TERMINAL",0,0,"2039502240",,terminal_output +1582,1047411,"TERMINAL",0,0,"14501331",,terminal_output +1583,1048478,"TERMINAL",0,0,"2512442",,terminal_output +1584,1049478,"TERMINAL",0,0,"3623553",,terminal_output +1585,1051882,"jasmine/train_tokenizer.py",0,0,"",python,tab +1586,1051945,"TERMINAL",0,0,"47346645845775",,terminal_output +1587,1052595,"TERMINAL",0,0,"6956886",,terminal_output +1588,1054495,"TERMINAL",0,0,"73067997",,terminal_output +1589,1054678,"TERMINAL",0,0,"81784:004:008",,terminal_output +1590,1055182,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",0,0,"",shellscript,tab +1591,1055712,"TERMINAL",0,0,"9289119",,terminal_output +1592,1056174,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",804,0,"",shellscript,selection_mouse +1593,1056175,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",803,0,"",shellscript,selection_command +1594,1056965,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",804,0,"\n ",shellscript,content +1595,1057146,"TERMINAL",0,0,"303940:002250",,terminal_output +1596,1057794,"TERMINAL",0,0,"144:001331",,terminal_output +1597,1058128,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",809,0,"-",shellscript,content +1598,1058129,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",810,0,"",shellscript,selection_keyboard +1599,1058296,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",810,0,"-",shellscript,content +1600,1058296,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",811,0,"",shellscript,selection_keyboard +1601,1058646,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",811,0,"t",shellscript,content +1602,1058646,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",812,0,"",shellscript,selection_keyboard +1603,1058861,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",812,0,"a",shellscript,content +1604,1058862,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",813,0,"",shellscript,selection_keyboard +1605,1058862,"TERMINAL",0,0,"2623553",,terminal_output +1606,1059014,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",813,0,"r",shellscript,content +1607,1059015,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",814,0,"",shellscript,selection_keyboard +1608,1059097,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",814,0,"g",shellscript,content +1609,1059098,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",815,0,"",shellscript,selection_keyboard +1610,1059213,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",815,0,"e",shellscript,content +1611,1059213,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",816,0,"",shellscript,selection_keyboard +1612,1059263,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",816,0,"t",shellscript,content +1613,1059264,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",817,0,"",shellscript,selection_keyboard +1614,1059780,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",817,0,"_",shellscript,content +1615,1059780,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",818,0,"",shellscript,selection_keyboard +1616,1060063,"TERMINAL",0,0,"4734664",,terminal_output +1617,1060645,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",818,0,"w",shellscript,content +1618,1060646,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",819,0,"",shellscript,selection_keyboard +1619,1060745,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",819,0,"i",shellscript,content +1620,1060746,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",820,0,"",shellscript,selection_keyboard +1621,1060861,"TERMINAL",0,0,"5845775",,terminal_output +1622,1061280,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",820,0,"d",shellscript,content +1623,1061281,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",821,0,"",shellscript,selection_keyboard +1624,1061584,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",821,0,"t",shellscript,content +1625,1061584,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",822,0,"",shellscript,selection_keyboard +1626,1061678,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",822,0,"h",shellscript,content +1627,1061679,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",823,0,"",shellscript,selection_keyboard +1628,1061878,"TERMINAL",0,0,"6956886",,terminal_output +1629,1062262,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",823,0,"=",shellscript,content +1630,1062263,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",824,0,"",shellscript,selection_keyboard +1631,1062928,"TERMINAL",0,0,"74067997",,terminal_output +1632,1063795,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",823,1,"",shellscript,content +1633,1063945,"TERMINAL",0,0,"817810108",,terminal_output +1634,1064113,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",823,0," ",shellscript,content +1635,1064114,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",824,0,"",shellscript,selection_keyboard +1636,1065013,"TERMINAL",0,0,"9289119",,terminal_output +1637,1066078,"TERMINAL",0,0,"403910228:00",,terminal_output +1638,1066112,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",824,0,"1",shellscript,content +1639,1066113,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",825,0,"",shellscript,selection_keyboard +1640,1066645,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",825,0,"6",shellscript,content +1641,1066646,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",826,0,"",shellscript,selection_keyboard +1642,1066797,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",826,0,"0",shellscript,content +1643,1066798,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",827,0,"",shellscript,selection_keyboard +1644,1067012,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",827,0," ",shellscript,content +1645,1067013,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",828,0,"",shellscript,selection_keyboard +1646,1067078,"TERMINAL",0,0,"14101331",,terminal_output +1647,1068128,"TERMINAL",0,0,"2512442",,terminal_output +1648,1068728,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",828,0,"\",shellscript,content +1649,1068729,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",829,0,"",shellscript,selection_keyboard +1650,1068945,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",829,0,"\n ",shellscript,content +1651,1069195,"TERMINAL",0,0,"3623553",,terminal_output +1652,1070086,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",834,0,"-",shellscript,content +1653,1070087,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",835,0,"",shellscript,selection_keyboard +1654,1070264,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",835,0,"-",shellscript,content +1655,1070265,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",836,0,"",shellscript,selection_keyboard +1656,1070265,"TERMINAL",0,0,"4734664",,terminal_output +1657,1070511,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",836,0,"t",shellscript,content +1658,1070512,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",837,0,"",shellscript,selection_keyboard +1659,1070630,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",837,0,"a",shellscript,content +1660,1070630,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",838,0,"",shellscript,selection_keyboard +1661,1070761,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",838,0,"r",shellscript,content +1662,1070762,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",839,0,"",shellscript,selection_keyboard +1663,1070778,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",839,0,"g",shellscript,content +1664,1070779,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",840,0,"",shellscript,selection_keyboard +1665,1070928,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",840,0,"e",shellscript,content +1666,1070929,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",841,0,"",shellscript,selection_keyboard +1667,1071027,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",841,0,"t",shellscript,content +1668,1071028,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",842,0,"",shellscript,selection_keyboard +1669,1071178,"TERMINAL",0,0,"5845775",,terminal_output +1670,1071361,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",842,0,"_",shellscript,content +1671,1071362,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",843,0,"",shellscript,selection_keyboard +1672,1071728,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",843,0,"g",shellscript,content +1673,1071728,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",844,0,"",shellscript,selection_keyboard +1674,1072228,"TERMINAL",0,0,"6956886",,terminal_output +1675,1072362,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",843,1,"",shellscript,content +1676,1072561,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",843,0,"h",shellscript,content +1677,1072562,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",844,0,"",shellscript,selection_keyboard +1678,1072678,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",844,0,"e",shellscript,content +1679,1072678,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",845,0,"",shellscript,selection_keyboard +1680,1072828,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",845,0,"i",shellscript,content +1681,1072829,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",846,0,"",shellscript,selection_keyboard +1682,1073244,"TERMINAL",0,0,"75067997",,terminal_output +1683,1073428,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",846,0,"g",shellscript,content +1684,1073429,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",847,0,"",shellscript,selection_keyboard +1685,1073699,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",847,0,"t",shellscript,content +1686,1073700,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",848,0,"",shellscript,selection_keyboard +1687,1073811,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",848,0,"h",shellscript,content +1688,1073812,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",849,0,"",shellscript,selection_keyboard +1689,1074314,"TERMINAL",0,0,"817820208",,terminal_output +1690,1074795,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",848,1,"",shellscript,content +1691,1074951,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",847,1,"",shellscript,content +1692,1075095,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",846,1,"",shellscript,content +1693,1075359,"TERMINAL",0,0,"9289119",,terminal_output +1694,1076362,"TERMINAL",0,0,"5039202210",,terminal_output +1695,1076445,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",846,0,"g",shellscript,content +1696,1076447,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",847,0,"",shellscript,selection_keyboard +1697,1076578,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",847,0,"h",shellscript,content +1698,1076579,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",848,0,"",shellscript,selection_keyboard +1699,1076701,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",848,0,"t",shellscript,content +1700,1076702,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",849,0,"",shellscript,selection_keyboard +1701,1077396,"TERMINAL",0,0,"14201331",,terminal_output +1702,1077878,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",849,0," ",shellscript,content +1703,1077879,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",850,0,"",shellscript,selection_keyboard +1704,1078296,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",850,0,"1",shellscript,content +1705,1078297,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",851,0,"",shellscript,selection_keyboard +1706,1078412,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",851,0,"2",shellscript,content +1707,1078413,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",852,0,"",shellscript,selection_keyboard +1708,1078461,"TERMINAL",0,0,"2512442",,terminal_output +1709,1078513,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",852,0,"0",shellscript,content +1710,1078513,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",853,0,"",shellscript,selection_keyboard +1711,1079529,"TERMINAL",0,0,"3623553",,terminal_output +1712,1080513,"TERMINAL",0,0,"4734664",,terminal_output +1713,1080612,"TERMINAL",0,0,"sh ../slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",,terminal_output +1714,1080779,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +1715,1081531,"TERMINAL",0,0,"5845775",,terminal_output +1716,1082277,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/data/.venv/lib/python3.10/site-packages/gymnasium/envs/registration.py:596: UserWarning: WARN: plugin: shimmy.registration:register_gymnasium_envs raised Traceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/data/.venv/lib/python3.10/site-packages/gymnasium/envs/registration.py"", line 594, in load_plugin_envs\r\n fn()\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/data/.venv/lib/python3.10/site-packages/shimmy/registration.py"", line 304, in register_gymnasium_envs\r\n _register_atari_envs()\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/data/.venv/lib/python3.10/site-packages/shimmy/registration.py"", line 205, in _register_atari_envs\r\n import ale_py\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/data/.venv/lib/python3.10/site-packages/ale_py/__init__.py"", line 66, in \r\n register_v0_v4_envs()\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/data/.venv/lib/python3.10/site-packages/ale_py/registration.py"", line 176, in register_v0_v4_envs\r\n _register_rom_configs(legacy_games, obs_types, versions)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/data/.venv/lib/python3.10/site-packages/ale_py/registration.py"", line 62, in _register_rom_configs\r\n gymnasium.register(\r\nAttributeError: partially initialized module 'gymnasium' has no attribute 'register' (most likely due to a circular import)\r\n\r\n logger.warn(f""plugin: {plugin.value} raised {traceback.format_exc()}"")\r\n",,terminal_output +1717,1082577,"TERMINAL",0,0,"6956886",,terminal_output +1718,1083028,"TERMINAL",0,0,"Using device: cuda\r\n2025-10-05 17:11:57.239 | INFO  | __main__::58 - Using device: cuda\r\n",,terminal_output +1719,1083245,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/data/.venv/lib/python3.10/site-packages/stable_baselines3/common/vec_env/patch_gym.py:49: UserWarning: You provided an OpenAI Gym environment. We strongly recommend transitioning to Gymnasium environments. Stable-Baselines3 is automatically wrapping your environments in a compatibility layer, which could potentially cause issues.\r\n warnings.warn(\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/data/.venv/lib/python3.10/site-packages/stable_baselines3/common/save_util.py:167: UserWarning: Could not deserialize object clip_range. Consider using `custom_objects` argument to replace this object.\r\nException: code expected at most 16 arguments, got 18\r\n warnings.warn(\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/data/.venv/lib/python3.10/site-packages/stable_baselines3/common/save_util.py:167: UserWarning: Could not deserialize object lr_schedule. Consider using `custom_objects` argument to replace this object.\r\nException: code expected at most 16 arguments, got 18\r\n warnings.warn(\r\n",,terminal_output +1720,1083711,"TERMINAL",0,0,"78:0067997",,terminal_output +1721,1084063,"TERMINAL",0,0,"Model loaded from /home/hk-project-p0023960/tum_cte0515/Projects/jasmine/data/jasmine_data/ViZDoomPPO/logs/models/deathmatch_simple_bak/best_model.zip\r\n",,terminal_output +1722,1084311,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/data/.venv/lib/python3.10/site-packages/stable_baselines3/common/vec_env/patch_gym.py:49: UserWarning: You provided an OpenAI Gym environment. We strongly recommend transitioning to Gymnasium environments. Stable-Baselines3 is automatically wrapping your environments in a compatibility layer, which could potentially cause issues.\r\n warnings.warn(\r\n",,terminal_output +1723,1084628,"TERMINAL",0,0,"817830308",,terminal_output +1724,1084845,"TERMINAL",0,0,"Health values: [100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0]\r\nNumber of health values: 100\r\nNumber of actions: 100\r\nNumber of images: 100\r\n",,terminal_output +1725,1085762,"TERMINAL",0,0,"9289119",,terminal_output +1726,1085945,"TERMINAL",0,0,"2025-10-05 17:12:00.098 | INFO  | __main__:make_gif:136 - GIF saved to /home/hk-project-p0023960/tum_cte0515/Projects/jasmine/gifs/\r\n",,terminal_output +1727,1087095,"TERMINAL",0,0,"]0;tum_cte0515@hkn0401:~/Projects/jasmine/data[?2004h(jasmine-data) [tum_cte0515@hkn0401 data]$ ",,terminal_output +1728,1087095,"TERMINAL",0,0,"2:0039302220",,terminal_output +1729,1087779,"TERMINAL",0,0,"14301331",,terminal_output +1730,1088828,"TERMINAL",0,0,"2512442",,terminal_output +1731,1090378,"TERMINAL",0,0,"3734664",,terminal_output +1732,1090827,"TERMINAL",0,0,"5845775",,terminal_output +1733,1091864,"TERMINAL",0,0,"6956886",,terminal_output +1734,1093045,"TERMINAL",0,0,"71067997",,terminal_output +1735,1093930,"TERMINAL",0,0,"817840408",,terminal_output +1736,1094978,"TERMINAL",0,0,"9289119",,terminal_output +1737,1095978,"TERMINAL",0,0,"1039402230",,terminal_output +1738,1097127,"TERMINAL",0,0,"14401331",,terminal_output +1739,1098128,"TERMINAL",0,0,"2512442",,terminal_output +1740,1098348,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",0,0,"",shellscript,tab +1741,1099079,"TERMINAL",0,0,"3623553",,terminal_output +1742,1100128,"TERMINAL",0,0,"4734664",,terminal_output +1743,1100946,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",819,0,"",shellscript,selection_mouse +1744,1101095,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",811,12,"target_width",shellscript,selection_mouse +1745,1101247,"TERMINAL",0,0,"5845775",,terminal_output +1746,1101763,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",846,0,"",shellscript,selection_mouse +1747,1101930,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",836,13,"target_height",shellscript,selection_mouse +1748,1102296,"TERMINAL",0,0,"6956886",,terminal_output +1749,1103220,"TERMINAL",0,0,"72067997",,terminal_output +1750,1104261,"TERMINAL",0,0,"817850508",,terminal_output +1751,1105311,"TERMINAL",0,0,"9289119",,terminal_output +1752,1106420,"TERMINAL",0,0,"2039502240",,terminal_output +1753,1107345,"TERMINAL",0,0,"14501331",,terminal_output +1754,1107730,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",0,0,"",python,tab +1755,1108478,"TERMINAL",0,0,"2512442",,terminal_output +1756,1109428,"TERMINAL",0,0,"3623553",,terminal_output +1757,1110482,"TERMINAL",0,0,"4734664",,terminal_output +1758,1111497,"TERMINAL",0,0,"5845775",,terminal_output +1759,1112612,"TERMINAL",0,0,"6956886",,terminal_output +1760,1113561,"TERMINAL",0,0,"73067997",,terminal_output +1761,1114678,"TERMINAL",0,0,"81785:005:008",,terminal_output +1762,1115611,"TERMINAL",0,0,"9289119",,terminal_output +1763,1116662,"TERMINAL",0,0,"30391:002250",,terminal_output +1764,1117678,"TERMINAL",0,0,"145:001331",,terminal_output +1765,1118712,"TERMINAL",0,0,"2512442",,terminal_output +1766,1119528,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",3229,0,"",python,selection_mouse +1767,1119761,"TERMINAL",0,0,"3623553",,terminal_output +1768,1120687,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",3237,0,"",python,selection_mouse +1769,1120778,"TERMINAL",0,0,"4845775",,terminal_output +1770,1121329,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",3236,0,"",python,selection_mouse +1771,1121830,"TERMINAL",0,0,"6956886",,terminal_output +1772,1122829,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",3228,0,"",python,selection_mouse +1773,1122861,"TERMINAL",0,0,"74067997",,terminal_output +1774,1123011,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",3228,2,"he",python,selection_mouse +1775,1123045,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",3228,3,"hei",python,selection_mouse +1776,1123046,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",3228,4,"heig",python,selection_mouse +1777,1123346,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",3228,5,"heigh",python,selection_mouse +1778,1123445,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",3228,6,"height",python,selection_mouse +1779,1123730,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",3228,6,"",python,content +1780,1123895,"TERMINAL",0,0,"817810108",,terminal_output +1781,1124511,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",3228,0,"w",python,content +1782,1124512,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",3229,0,"",python,selection_keyboard +1783,1124646,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",3229,0,"i",python,content +1784,1124646,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",3230,0,"",python,selection_keyboard +1785,1124961,"TERMINAL",0,0,"9289119",,terminal_output +1786,1125328,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",3221,9,"target_width",python,content +1787,1125961,"TERMINAL",0,0,"403910229:00",,terminal_output +1788,1126830,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",3247,0,"",python,selection_mouse +1789,1126928,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",3247,1,"w",python,selection_mouse +1790,1126961,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",3247,2,"wi",python,selection_mouse +1791,1126978,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",3247,3,"wid",python,selection_mouse +1792,1127013,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",3247,4,"widt",python,selection_mouse +1793,1127014,"TERMINAL",0,0,"14101331",,terminal_output +1794,1127211,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",3247,5,"width",python,selection_mouse +1795,1127963,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",3247,5,"h",python,content +1796,1127964,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",3248,0,"",python,selection_keyboard +1797,1128047,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",3248,0,"e",python,content +1798,1128047,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",3249,0,"",python,selection_keyboard +1799,1128048,"TERMINAL",0,0,"2512442",,terminal_output +1800,1128712,"data/jasmine_data/ViZDoomPPO/load_model_generate_dataset.py",3240,9,"target_height",python,content +1801,1129112,"TERMINAL",0,0,"3623553",,terminal_output +1802,1130095,"TERMINAL",0,0,"4734664",,terminal_output +1803,1131294,"TERMINAL",0,0,"5845775",,terminal_output +1804,1131361,"TERMINAL",0,0,"sh ../slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh\r\n[?2004l\r",,terminal_output +1805,1132145,"TERMINAL",0,0,"6956886",,terminal_output +1806,1132678,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/data/.venv/lib/python3.10/site-packages/gymnasium/envs/registration.py:596: UserWarning: WARN: plugin: shimmy.registration:register_gymnasium_envs raised Traceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/data/.venv/lib/python3.10/site-packages/gymnasium/envs/registration.py"", line 594, in load_plugin_envs\r\n fn()\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/data/.venv/lib/python3.10/site-packages/shimmy/registration.py"", line 304, in register_gymnasium_envs\r\n _register_atari_envs()\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/data/.venv/lib/python3.10/site-packages/shimmy/registration.py"", line 205, in _register_atari_envs\r\n import ale_py\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/data/.venv/lib/python3.10/site-packages/ale_py/__init__.py"", line 66, in \r\n register_v0_v4_envs()\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/data/.venv/lib/python3.10/site-packages/ale_py/registration.py"", line 176, in register_v0_v4_envs\r\n _register_rom_configs(legacy_games, obs_types, versions)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/data/.venv/lib/python3.10/site-packages/ale_py/registration.py"", line 62, in _register_rom_configs\r\n gymnasium.register(\r\nAttributeError: partially initialized module 'gymnasium' has no attribute 'register' (most likely due to a circular import)\r\n\r\n logger.warn(f""plugin: {plugin.value} raised {traceback.format_exc()}"")\r\n",,terminal_output +1807,1133280,"TERMINAL",0,0,"75067997",,terminal_output +1808,1133496,"TERMINAL",0,0,"Using device: cuda\r\n",,terminal_output +1809,1133595,"TERMINAL",0,0,"2025-10-05 17:12:47.800 | INFO  | __main__::58 - Using device: cuda\r\n",,terminal_output +1810,1133861,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/data/.venv/lib/python3.10/site-packages/stable_baselines3/common/vec_env/patch_gym.py:49: UserWarning: You provided an OpenAI Gym environment. We strongly recommend transitioning to Gymnasium environments. Stable-Baselines3 is automatically wrapping your environments in a compatibility layer, which could potentially cause issues.\r\n warnings.warn(\r\n",,terminal_output +1811,1133929,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/data/.venv/lib/python3.10/site-packages/stable_baselines3/common/save_util.py:167: UserWarning: Could not deserialize object clip_range. Consider using `custom_objects` argument to replace this object.\r\nException: code expected at most 16 arguments, got 18\r\n warnings.warn(\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/data/.venv/lib/python3.10/site-packages/stable_baselines3/common/save_util.py:167: UserWarning: Could not deserialize object lr_schedule. Consider using `custom_objects` argument to replace this object.\r\nException: code expected at most 16 arguments, got 18\r\n warnings.warn(\r\n",,terminal_output +1812,1134294,"TERMINAL",0,0,"817820208",,terminal_output +1813,1134633,"TERMINAL",0,0,"Model loaded from /home/hk-project-p0023960/tum_cte0515/Projects/jasmine/data/jasmine_data/ViZDoomPPO/logs/models/deathmatch_simple_bak/best_model.zip\r\n",,terminal_output +1814,1134811,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/data/.venv/lib/python3.10/site-packages/stable_baselines3/common/vec_env/patch_gym.py:49: UserWarning: You provided an OpenAI Gym environment. We strongly recommend transitioning to Gymnasium environments. Stable-Baselines3 is automatically wrapping your environments in a compatibility layer, which could potentially cause issues.\r\n warnings.warn(\r\n",,terminal_output +1815,1135344,"TERMINAL",0,0,"9289119",,terminal_output +1816,1135345,"TERMINAL",0,0,"Health values: [100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0]\r\nNumber of health values: 100\r\nNumber of actions: 100\r\nNumber of images: 100\r\n",,terminal_output +1817,1136328,"TERMINAL",0,0,"5039202210",,terminal_output +1818,1136328,"TERMINAL",0,0,"2025-10-05 17:12:50.515 | INFO  | __main__:make_gif:136 - GIF saved to /home/hk-project-p0023960/tum_cte0515/Projects/jasmine/gifs/\r\n",,terminal_output +1819,1137179,"TERMINAL",0,0,"]0;tum_cte0515@hkn0401:~/Projects/jasmine/data[?2004h(jasmine-data) [tum_cte0515@hkn0401 data]$ ",,terminal_output +1820,1137345,"TERMINAL",0,0,"14201331",,terminal_output +1821,1138379,"TERMINAL",0,0,"2512442",,terminal_output +1822,1141611,"TERMINAL",0,0,"362355347346645845775",,terminal_output +1823,1142911,"TERMINAL",0,0,"6956886",,terminal_output +1824,1143561,"TERMINAL",0,0,"79:0067997",,terminal_output +1825,1144879,"TERMINAL",0,0,"817830308",,terminal_output +1826,1145578,"TERMINAL",0,0,"9289119",,terminal_output +1827,1146666,"TERMINAL",0,0,"3:0039302220",,terminal_output +1828,1147929,"TERMINAL",0,0,"14301331",,terminal_output +1829,1148681,"TERMINAL",0,0,"2512442",,terminal_output +1830,1149711,"TERMINAL",0,0,"3623553",,terminal_output +1831,1150761,"TERMINAL",0,0,"4734664",,terminal_output +1832,1151777,"TERMINAL",0,0,"5956886",,terminal_output +1833,1152863,"TERMINAL",0,0,"71067997",,terminal_output +1834,1153998,"TERMINAL",0,0,"817840408",,terminal_output +1835,1154895,"TERMINAL",0,0,"9289119",,terminal_output +1836,1155998,"TERMINAL",0,0,"1039402230",,terminal_output +1837,1156995,"TERMINAL",0,0,"14401331",,terminal_output +1838,1157977,"TERMINAL",0,0,"2512442",,terminal_output +1839,1159015,"TERMINAL",0,0,"3623553",,terminal_output +1840,1160062,"TERMINAL",0,0,"4734664",,terminal_output +1841,1161077,"TERMINAL",0,0,"5845775",,terminal_output +1842,1162128,"TERMINAL",0,0,"6956886",,terminal_output +1843,1163161,"TERMINAL",0,0,"72067997",,terminal_output +1844,1164194,"TERMINAL",0,0,"817850508",,terminal_output +1845,1165312,"TERMINAL",0,0,"9289119",,terminal_output +1846,1166294,"TERMINAL",0,0,"2039502240",,terminal_output +1847,1167328,"TERMINAL",0,0,"14501331",,terminal_output +1848,1168329,"TERMINAL",0,0,"2512442",,terminal_output +1849,1169362,"TERMINAL",0,0,"3623553",,terminal_output +1850,1170131,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",0,0,"",shellscript,tab +1851,1170461,"TERMINAL",0,0,"4734664",,terminal_output +1852,1171478,"TERMINAL",0,0,"5845775",,terminal_output +1853,1172496,"TERMINAL",0,0,"6956886",,terminal_output +1854,1172728,"TERMINAL",0,0,"watch",,terminal_focus +1855,1173829,"TERMINAL",0,0,"73067997[?1049l\r[?1l>]0;tum_cte0515@hkn1991:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared",,terminal_output +1856,1178347,"TERMINAL",0,0,"python",,terminal_command +1857,1178411,"TERMINAL",0,0,"]633;C",,terminal_output +1858,1178462,"TERMINAL",0,0,"Python 3.9.18 (main, Sep 4 2025, 00:00:00) \r\n[GCC 11.4.1 20231218 (Red Hat 11.4.1-3)] on linux\r\nType ""help"", ""copyright"", ""credits"" or ""license"" for more information.\r\n>>> ",,terminal_output +1859,1179194,"TERMINAL",0,0,"3",,terminal_output +1860,1179413,"TERMINAL",0,0,"2",,terminal_output +1861,1179528,"TERMINAL",0,0,"0",,terminal_output +1862,1180044,"TERMINAL",0,0,"*",,terminal_output +1863,1180628,"TERMINAL",0,0,"3",,terminal_output +1864,1180761,"TERMINAL",0,0,"\r\n960\r\n>>> ",,terminal_output +1865,1182097,"TERMINAL",0,0,"320*3",,terminal_output +1866,1182295,"TERMINAL",0,0,"",,terminal_output +1867,1182479,"TERMINAL",0,0,"",,terminal_output +1868,1183161,"TERMINAL",0,0,"/",,terminal_output +1869,1183679,"TERMINAL",0,0,"3",,terminal_output +1870,1183862,"TERMINAL",0,0,"\r\n106.66666666666667\r\n>>> ",,terminal_output +1871,1184378,"TERMINAL",0,0,"320/3",,terminal_output +1872,1184678,"TERMINAL",0,0,"",,terminal_output +1873,1185161,"TERMINAL",0,0,"2",,terminal_output +1874,1185361,"TERMINAL",0,0,"\r\n160.0\r\n>>> ",,terminal_output +1875,1185745,"TERMINAL",0,0,"320/2",,terminal_output +1876,1185961,"TERMINAL",0,0,"",,terminal_output +1877,1186761,"TERMINAL",0,0,"4",,terminal_output +1878,1186862,"TERMINAL",0,0,"\r\n80.0\r\n>>> ",,terminal_output +1879,1187594,"TERMINAL",0,0,"320/4",,terminal_output +1880,1187861,"TERMINAL",0,0,"",,terminal_output +1881,1191162,"TERMINAL",0,0,"",,terminal_output +1882,1191278,"TERMINAL",0,0,"",,terminal_output +1883,1191397,"TERMINAL",0,0,"",,terminal_output +1884,1191578,"TERMINAL",0,0,"",,terminal_output +1885,1191729,"TERMINAL",0,0,"",,terminal_output +1886,1192979,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",827,0,"",shellscript,selection_mouse +1887,1193496,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",826,1,"",shellscript,content +1888,1193628,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",825,1,"",shellscript,content +1889,1193761,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",824,1,"",shellscript,content +1890,1194567,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",824,0,"8",shellscript,content +1891,1194568,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",825,0,"",shellscript,selection_keyboard +1892,1194611,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",825,0,"0",shellscript,content +1893,1194612,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",826,0,"",shellscript,selection_keyboard +1894,1195611,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",828,0,"",shellscript,selection_mouse +1895,1196047,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",851,0,"",shellscript,selection_mouse +1896,1196414,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",852,0,"",shellscript,selection_mouse +1897,1196745,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",851,1,"",shellscript,content +1898,1196846,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",850,1,"",shellscript,content +1899,1197005,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",849,1,"",shellscript,content +1900,1198211,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",849,0,"6",shellscript,content +1901,1198212,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",850,0,"",shellscript,selection_keyboard +1902,1198495,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",850,0,"0",shellscript,content +1903,1198496,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",851,0,"",shellscript,selection_keyboard +1904,1201468,"TERMINAL",0,0,"srun",,terminal_focus +1905,1202661,"TERMINAL",0,0,"sh ../slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",,terminal_output +1906,1203030,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +1907,1204494,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/data/.venv/lib/python3.10/site-packages/gymnasium/envs/registration.py:596: UserWarning: WARN: plugin: shimmy.registration:register_gymnasium_envs raised Traceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/data/.venv/lib/python3.10/site-packages/gymnasium/envs/registration.py"", line 594, in load_plugin_envs\r\n fn()\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/data/.venv/lib/python3.10/site-packages/shimmy/registration.py"", line 304, in register_gymnasium_envs\r\n _register_atari_envs()\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/data/.venv/lib/python3.10/site-packages/shimmy/registration.py"", line 205, in _register_atari_envs\r\n import ale_py\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/data/.venv/lib/python3.10/site-packages/ale_py/__init__.py"", line 66, in \r\n register_v0_v4_envs()\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/data/.venv/lib/python3.10/site-packages/ale_py/registration.py"", line 176, in register_v0_v4_envs\r\n _register_rom_configs(legacy_games, obs_types, versions)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/data/.venv/lib/python3.10/site-packages/ale_py/registration.py"", line 62, in _register_rom_configs\r\n gymnasium.register(\r\nAttributeError: partially initialized module 'gymnasium' has no attribute 'register' (most likely due to a circular import)\r\n\r\n logger.warn(f""plugin: {plugin.value} raised {traceback.format_exc()}"")\r\n",,terminal_output +1908,1205428,"TERMINAL",0,0,"Using device: cuda\r\n2025-10-05 17:13:59.671 | INFO  | __main__::58 - Using device: cuda\r\n",,terminal_output +1909,1205662,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/data/.venv/lib/python3.10/site-packages/stable_baselines3/common/vec_env/patch_gym.py:49: UserWarning: You provided an OpenAI Gym environment. We strongly recommend transitioning to Gymnasium environments. Stable-Baselines3 is automatically wrapping your environments in a compatibility layer, which could potentially cause issues.\r\n warnings.warn(\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/data/.venv/lib/python3.10/site-packages/stable_baselines3/common/save_util.py:167: UserWarning: Could not deserialize object clip_range. Consider using `custom_objects` argument to replace this object.\r\nException: code expected at most 16 arguments, got 18\r\n warnings.warn(\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/data/.venv/lib/python3.10/site-packages/stable_baselines3/common/save_util.py:167: UserWarning: Could not deserialize object lr_schedule. Consider using `custom_objects` argument to replace this object.\r\nException: code expected at most 16 arguments, got 18\r\n warnings.warn(\r\n",,terminal_output +1910,1206861,"TERMINAL",0,0,"Model loaded from /home/hk-project-p0023960/tum_cte0515/Projects/jasmine/data/jasmine_data/ViZDoomPPO/logs/models/deathmatch_simple_bak/best_model.zip\r\nAL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/data/.venv/lib/python3.10/site-packages/stable_baselines3/common/vec_env/patch_gym.py:49: UserWarning: You provided an OpenAI Gym environment. We strongly recommend transitioning to Gymnasium environments. Stable-Baselines3 is automatically wrapping your environments in a compatibility layer, which could potentially cause issues.\r\n warnings.warn(\r\n",,terminal_output +1911,1207263,"TERMINAL",0,0,"Health values: [100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0]\r\nNumber of health values: 100\r\nNumber of actions: 100\r\nNumber of images: 100\r\n",,terminal_output +1912,1207861,"TERMINAL",0,0,"2025-10-05 17:14:02.066 | INFO  | __main__:make_gif:136 - GIF saved to /home/hk-project-p0023960/tum_cte0515/Projects/jasmine/gifs/\r\n",,terminal_output +1913,1208511,"TERMINAL",0,0,"]0;tum_cte0515@hkn0401:~/Projects/jasmine/data[?2004h(jasmine-data) [tum_cte0515@hkn0401 data]$ ",,terminal_output +1914,1272847,"TERMINAL",0,0,"sh ../slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",,terminal_output +1915,1274480,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",0,0,"",shellscript,tab +1916,1275946,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",849,2,"",shellscript,content +1917,1276295,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",849,0,"120",shellscript,content +1918,1276628,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",824,2,"",shellscript,content +1919,1276878,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",824,0,"160",shellscript,content +1920,1279421,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +1921,1280961,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/data/.venv/lib/python3.10/site-packages/gymnasium/envs/registration.py:596: UserWarning: WARN: plugin: shimmy.registration:register_gymnasium_envs raised Traceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/data/.venv/lib/python3.10/site-packages/gymnasium/envs/registration.py"", line 594, in load_plugin_envs\r\n fn()\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/data/.venv/lib/python3.10/site-packages/shimmy/registration.py"", line 304, in register_gymnasium_envs\r\n _register_atari_envs()\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/data/.venv/lib/python3.10/site-packages/shimmy/registration.py"", line 205, in _register_atari_envs\r\n import ale_py\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/data/.venv/lib/python3.10/site-packages/ale_py/__init__.py"", line 66, in \r\n register_v0_v4_envs()\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/data/.venv/lib/python3.10/site-packages/ale_py/registration.py"", line 176, in register_v0_v4_envs\r\n _register_rom_configs(legacy_games, obs_types, versions)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/data/.venv/lib/python3.10/site-packages/ale_py/registration.py"", line 62, in _register_rom_configs\r\n gymnasium.register(\r\nAttributeError: partially initialized module 'gymnasium' has no attribute 'register' (most likely due to a circular import)\r\n\r\n logger.warn(f""plugin: {plugin.value} raised {traceback.format_exc()}"")\r\n",,terminal_output +1922,1281695,"TERMINAL",0,0,"Using device: cuda\r\n",,terminal_output +1923,1281795,"TERMINAL",0,0,"2025-10-05 17:15:16.008 | INFO  | __main__::58 - Using device: cuda\r\n",,terminal_output +1924,1281978,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/data/.venv/lib/python3.10/site-packages/stable_baselines3/common/vec_env/patch_gym.py:49: UserWarning: You provided an OpenAI Gym environment. We strongly recommend transitioning to Gymnasium environments. Stable-Baselines3 is automatically wrapping your environments in a compatibility layer, which could potentially cause issues.\r\n warnings.warn(\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/data/.venv/lib/python3.10/site-packages/stable_baselines3/common/save_util.py:167: UserWarning: Could not deserialize object clip_range. Consider using `custom_objects` argument to replace this object.\r\nException: code expected at most 16 arguments, got 18\r\n warnings.warn(\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/data/.venv/lib/python3.10/site-packages/stable_baselines3/common/save_util.py:167: UserWarning: Could not deserialize object lr_schedule. Consider using `custom_objects` argument to replace this object.\r\nException: code expected at most 16 arguments, got 18\r\n warnings.warn(\r\n",,terminal_output +1925,1282927,"TERMINAL",0,0,"Model loaded from /home/hk-project-p0023960/tum_cte0515/Projects/jasmine/data/jasmine_data/ViZDoomPPO/logs/models/deathmatch_simple_bak/best_model.zip\r\n",,terminal_output +1926,1283077,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/data/.venv/lib/python3.10/site-packages/stable_baselines3/common/vec_env/patch_gym.py:49: UserWarning: You provided an OpenAI Gym environment. We strongly recommend transitioning to Gymnasium environments. Stable-Baselines3 is automatically wrapping your environments in a compatibility layer, which could potentially cause issues.\r\n warnings.warn(\r\n",,terminal_output +1927,1283594,"TERMINAL",0,0,"Health values: [100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0]\r\nNumber of health values: 100\r\nNumber of actions: 100\r\nNumber of images: 100\r\n",,terminal_output +1928,1284878,"TERMINAL",0,0,"2025-10-05 17:15:19.032 | INFO  | __main__:make_gif:136 - GIF saved to /home/hk-project-p0023960/tum_cte0515/Projects/jasmine/gifs/\r\n",,terminal_output +1929,1285494,"TERMINAL",0,0,"]0;tum_cte0515@hkn0401:~/Projects/jasmine/data[?2004h(jasmine-data) [tum_cte0515@hkn0401 data]$ ",,terminal_output +1930,1306782,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",0,0,"",shellscript,tab +1931,1307962,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",826,0,"",shellscript,selection_command +1932,1308797,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",805,24," --target_width 160 \",shellscript,selection_command +1933,1308995,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",805,48," --target_width 160 \\n --target_height 120",shellscript,selection_command +1934,1309245,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",809,0,"",shellscript,selection_command +1935,1309928,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",834,0,"#",shellscript,content +1936,1309928,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",809,0,"#",shellscript,content +1937,1309929,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",810,0,"",shellscript,selection_keyboard +1938,1310232,"slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",809,0,"",shellscript,selection_command +1939,1312129,"TERMINAL",0,0,"sh ../slurm/jobs/mihir/horeka/preprocessing/doom/doom_chunked_test.sh",,terminal_output +1940,1312287,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +1941,1313662,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/data/.venv/lib/python3.10/site-packages/gymnasium/envs/registration.py:596: UserWarning: WARN: plugin: shimmy.registration:register_gymnasium_envs raised Traceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/data/.venv/lib/python3.10/site-packages/gymnasium/envs/registration.py"", line 594, in load_plugin_envs\r\n fn()\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/data/.venv/lib/python3.10/site-packages/shimmy/registration.py"", line 304, in register_gymnasium_envs\r\n _register_atari_envs()\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/data/.venv/lib/python3.10/site-packages/shimmy/registration.py"", line 205, in _register_atari_envs\r\n import ale_py\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/data/.venv/lib/python3.10/site-packages/ale_py/__init__.py"", line 66, in \r\n register_v0_v4_envs()\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/data/.venv/lib/python3.10/site-packages/ale_py/registration.py"", line 176, in register_v0_v4_envs\r\n _register_rom_configs(legacy_games, obs_types, versions)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/data/.venv/lib/python3.10/site-packages/ale_py/registration.py"", line 62, in _register_rom_configs\r\n gymnasium.register(\r\nAttributeError: partially initialized module 'gymnasium' has no attribute 'register' (most likely due to a circular import)\r\n\r\n logger.warn(f""plugin: {plugin.value} raised {traceback.format_exc()}"")\r\n",,terminal_output +1942,1314444,"TERMINAL",0,0,"Using device: cuda\r\n",,terminal_output +1943,1314578,"TERMINAL",0,0,"2025-10-05 17:15:48.753 | INFO  | __main__::58 - Using device: cuda\r\n",,terminal_output +1944,1314746,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/data/.venv/lib/python3.10/site-packages/stable_baselines3/common/vec_env/patch_gym.py:49: UserWarning: You provided an OpenAI Gym environment. We strongly recommend transitioning to Gymnasium environments. Stable-Baselines3 is automatically wrapping your environments in a compatibility layer, which could potentially cause issues.\r\n warnings.warn(\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/data/.venv/lib/python3.10/site-packages/stable_baselines3/common/save_util.py:167: UserWarning: Could not deserialize object clip_range. Consider using `custom_objects` argument to replace this object.\r\nException: code expected at most 16 arguments, got 18\r\n warnings.warn(\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/data/.venv/lib/python3.10/site-packages/stable_baselines3/common/save_util.py:167: UserWarning: Could not deserialize object lr_schedule. Consider using `custom_objects` argument to replace this object.\r\nException: code expected at most 16 arguments, got 18\r\n warnings.warn(\r\n",,terminal_output +1945,1315595,"TERMINAL",0,0,"Model loaded from /home/hk-project-p0023960/tum_cte0515/Projects/jasmine/data/jasmine_data/ViZDoomPPO/logs/models/deathmatch_simple_bak/best_model.zip\r\n",,terminal_output +1946,1315795,"TERMINAL",0,0,"AL lib: (WW) alc_initconfig: Failed to initialize backend ""pulse""\r\nBuilt action space of size 18 from buttons [ \r\n ]\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/data/.venv/lib/python3.10/site-packages/stable_baselines3/common/vec_env/patch_gym.py:49: UserWarning: You provided an OpenAI Gym environment. We strongly recommend transitioning to Gymnasium environments. Stable-Baselines3 is automatically wrapping your environments in a compatibility layer, which could potentially cause issues.\r\n warnings.warn(\r\n",,terminal_output +1947,1316144,"TERMINAL",0,0,"Health values: [100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0, 100.0]\r\nNumber of health values: 100\r\nNumber of actions: 100\r\nNumber of images: 100\r\n",,terminal_output +1948,1317062,"TERMINAL",0,0,"2025-10-05 17:15:51.232 | INFO  | __main__:make_gif:136 - GIF saved to /home/hk-project-p0023960/tum_cte0515/Projects/jasmine/gifs/\r\n",,terminal_output +1949,1317744,"TERMINAL",0,0,"]0;tum_cte0515@hkn0401:~/Projects/jasmine/data[?2004h(jasmine-data) [tum_cte0515@hkn0401 data]$ ",,terminal_output diff --git a/927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-05e5774f-8d01-4366-931e-9e4275a288a01759222637213-2025_09_30-10.58.14.459/source.csv b/927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-05e5774f-8d01-4366-931e-9e4275a288a01759222637213-2025_09_30-10.58.14.459/source.csv new file mode 100644 index 0000000000000000000000000000000000000000..3efb1c1699b65682ee51a63ecbccc695c548e844 --- /dev/null +++ b/927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-05e5774f-8d01-4366-931e-9e4275a288a01759222637213-2025_09_30-10.58.14.459/source.csv @@ -0,0 +1,1355 @@ +Sequence,Time,File,RangeOffset,RangeLength,Text,Language,Type +1,4,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_tokenizer_default.sh",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=1\n#SBATCH --time=48:00:00\n#SBATCH --partition=accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:1\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/tokenizer/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/tokenizer/%x_%j.log\n#SBATCH --job-name=train_tokenizer_default\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/train\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/val\n\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\nenv | grep SLURM\n\nsrun python jasmine/train_tokenizer.py \\n --save_ckpt \\n --image_height=64 \\n --image_width=64 \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=120 \\n --init_lr=0 \\n --log \\n --name=coinrun-tokenizer-default-$slurm_job_id \\n --tags tokenizer coinrun default \\n --entity instant-uv \\n --project jafar \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val \\n",shellscript,tab +2,272,"extension-output-pdoom-org.crowd-code-#1-crowd-code",0,0,"10:58:14 AM [info] Activating crowd-code\n10:58:14 AM [info] Recording started\n10:58:14 AM [info] Initializing git provider using file system watchers...\n",Log,tab +3,305,"extension-output-pdoom-org.crowd-code-#1-crowd-code",153,0,"10:58:14 AM [info] Git repository found\n10:58:14 AM [info] Git provider initialized successfully\n",Log,content +4,499,"extension-output-pdoom-org.crowd-code-#1-crowd-code",250,0,"10:58:14 AM [info] Initial git state: [object Object]\n",Log,content +5,964206,"jasmine/train_dynamics.py",0,0,"import os\n\n\nos.environ.setdefault(""XLA_PYTHON_CLIENT_MEM_FRACTION"", ""0.98"")\n\nfrom dataclasses import dataclass, field\nimport itertools\nfrom typing import cast, Optional\n\nimport einops\nfrom jax.sharding import Mesh, PartitionSpec, NamedSharding\nfrom jax.experimental.mesh_utils import create_device_mesh\nimport optax\nimport orbax.checkpoint as ocp\nimport numpy as np\nimport dm_pix as pix\nimport jax\nimport jax.numpy as jnp\nimport tyro\nimport wandb\nimport grain\nimport flax.nnx as nnx\n\nfrom genie import Genie, restore_genie_components\nfrom utils.dataloader import get_dataloader\nfrom utils.train_utils import (\n get_lr_schedule,\n count_parameters_by_component,\n print_mem_stats,\n print_compiled_memory_stats,\n print_compiled_cost_analysis,\n)\n\n\n@dataclass\nclass Args:\n # Experiment\n num_steps: int = 200_000\n seed: int = 0\n seq_len: int = 16\n image_channels: int = 3\n image_height: int = 90\n image_width: int = 160\n data_dir: str = """"\n save_ckpt: bool = False\n restore_ckpt: bool = False\n # Optimization\n batch_size: int = 36\n init_lr: float = 0.0\n max_lr: float = 3e-5\n decay_end: float = 0.0\n wsd_decay_steps: int = (\n 10000 # NOTE: wsd_decay_steps will only be used when using a wsd-schedule\n )\n warmup_steps: int = 5000\n lr_schedule: str = ""wsd"" # supported options: wsd, cos\n # Tokenizer\n tokenizer_dim: int = 512\n tokenizer_ffn_dim: int = 2048\n latent_patch_dim: int = 32\n num_patch_latents: int = 1024\n patch_size: int = 4\n tokenizer_num_blocks: int = 4\n tokenizer_num_heads: int = 8\n tokenizer_checkpoint: str = """"\n # LAM\n lam_dim: int = 512\n lam_ffn_dim: int = 2048\n latent_action_dim: int = 32\n num_actions: int = 6\n lam_patch_size: int = 16\n lam_num_blocks: int = 4\n lam_num_heads: int = 8\n lam_checkpoint: str = """"\n # Dynamics\n dyna_type: str = ""maskgit"" # supported options: maskgit, causal\n dyna_dim: int = 512\n dyna_ffn_dim: int = 2048\n dyna_num_blocks: int = 6\n dyna_num_heads: int = 8\n max_noise_level: float = 0.7\n noise_buckets: int = 10\n dropout: float = 0.0\n mask_limit: float = 0.5\n param_dtype = jnp.float32\n dtype = jnp.bfloat16\n use_flash_attention: bool = True\n use_gt_actions: bool = False\n # Logging\n log: bool = False\n entity: str = """"\n project: str = """"\n name: str = ""train_dynamics""\n tags: list[str] = field(default_factory=lambda: [""dynamics""])\n log_interval: int = 5\n log_image_interval: int = 250\n ckpt_dir: str = """"\n log_checkpoint_interval: int = 25000\n log_checkpoint_keep_period: int = 20000\n log_gradients: bool = False\n val_data_dir: str = """"\n val_interval: int = 20_000\n val_steps: int = 50\n eval_full_frame: bool = False\n val_maskgit_steps: int = 25\n val_temperature: float = 1\n val_sample_argmax: bool = False\n wandb_id: str = """"\n\n\ndef build_model(args: Args, rng: jax.Array) -> tuple[Genie, jax.Array]:\n rng, _rng = jax.random.split(rng)\n rngs = nnx.Rngs(_rng)\n genie = Genie(\n # Tokenizer\n in_dim=args.image_channels,\n tokenizer_dim=args.tokenizer_dim,\n tokenizer_ffn_dim=args.tokenizer_ffn_dim,\n latent_patch_dim=args.latent_patch_dim,\n num_patch_latents=args.num_patch_latents,\n patch_size=args.patch_size,\n tokenizer_num_blocks=args.tokenizer_num_blocks,\n tokenizer_num_heads=args.tokenizer_num_heads,\n # LAM\n lam_dim=args.lam_dim,\n lam_ffn_dim=args.lam_ffn_dim,\n latent_action_dim=args.latent_action_dim,\n num_actions=args.num_actions,\n lam_patch_size=args.lam_patch_size,\n lam_num_blocks=args.lam_num_blocks,\n lam_num_heads=args.lam_num_heads,\n lam_co_train=not args.lam_checkpoint,\n use_gt_actions=args.use_gt_actions,\n # Dynamics\n dyna_type=args.dyna_type,\n dyna_dim=args.dyna_dim,\n dyna_ffn_dim=args.dyna_ffn_dim,\n dyna_num_blocks=args.dyna_num_blocks,\n dyna_num_heads=args.dyna_num_heads,\n dropout=args.dropout,\n max_noise_level=args.max_noise_level,\n noise_buckets=args.noise_buckets,\n mask_limit=args.mask_limit,\n param_dtype=args.param_dtype,\n dtype=args.dtype,\n use_flash_attention=args.use_flash_attention,\n decode=False,\n rngs=rngs,\n )\n if args.use_gt_actions:\n assert (\n not args.lam_checkpoint\n ), ""Cannot use LAM when using ground-truth actions.""\n else:\n assert genie.lam is not None\n del genie.lam.decoder\n return genie, rng\n\n\ndef build_optimizer(genie: Genie, args: Args) -> nnx.ModelAndOptimizer:\n lr_schedule = get_lr_schedule(\n args.lr_schedule,\n args.init_lr,\n args.max_lr,\n args.decay_end,\n args.num_steps,\n args.warmup_steps,\n args.wsd_decay_steps,\n )\n tx = optax.adamw(\n learning_rate=lr_schedule,\n b1=0.9,\n b2=0.9,\n weight_decay=1e-4,\n mu_dtype=args.param_dtype, # moments in full precision\n )\n optimizer = nnx.ModelAndOptimizer(genie, tx)\n return optimizer\n\n\ndef build_mesh_and_sharding(\n num_devices: int,\n) -> tuple[Mesh, NamedSharding, NamedSharding, NamedSharding]:\n device_mesh_arr = create_device_mesh((num_devices,))\n mesh = Mesh(devices=device_mesh_arr, axis_names=(""data"",))\n replicated_sharding = NamedSharding(mesh, PartitionSpec())\n videos_sharding = NamedSharding(mesh, PartitionSpec(""data"", None, None, None, None))\n actions_sharding = NamedSharding(mesh, PartitionSpec(""data"", None))\n return mesh, replicated_sharding, videos_sharding, actions_sharding\n\n\ndef shard_optimizer_states(\n optimizer: nnx.ModelAndOptimizer, replicated_sharding: NamedSharding\n) -> None:\n model_state = nnx.state(optimizer.model)\n model_sharded_state = jax.lax.with_sharding_constraint(\n model_state, replicated_sharding\n )\n nnx.update(optimizer.model, model_sharded_state)\n optimizer_state = nnx.state(optimizer, nnx.optimizer.OptState)\n optimizer_sharded_state = jax.lax.with_sharding_constraint(\n optimizer_state, replicated_sharding\n )\n nnx.update(optimizer, optimizer_sharded_state)\n\n\ndef build_dataloader(args: Args, data_dir: str) -> grain.DataLoaderIterator:\n image_shape = (args.image_height, args.image_width, args.image_channels)\n array_record_files = [\n os.path.join(data_dir, x)\n for x in os.listdir(data_dir)\n if x.endswith("".array_record"")\n ]\n grain_dataloader = get_dataloader(\n array_record_files,\n args.seq_len,\n # NOTE: We deliberately pass the global batch size\n # The dataloader shards the dataset across all processes\n args.batch_size,\n *image_shape,\n num_workers=8,\n prefetch_buffer_size=1,\n seed=args.seed,\n )\n initial_state = grain_dataloader._create_initial_state()\n grain_iterator = grain.DataLoaderIterator(grain_dataloader, initial_state)\n return grain_iterator\n\n\ndef build_checkpoint_manager(args: Args) -> Optional[ocp.CheckpointManager]:\n if args.restore_ckpt or args.save_ckpt:\n handler_registry = ocp.handlers.DefaultCheckpointHandlerRegistry()\n handler_registry.add(\n ""model_state"", ocp.args.PyTreeSave, ocp.handlers.PyTreeCheckpointHandler\n )\n handler_registry.add(\n ""model_state"", ocp.args.PyTreeRestore, ocp.handlers.PyTreeCheckpointHandler\n )\n handler_registry.add(\n ""train_dataloader_state"",\n grain.checkpoint.CheckpointSave,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n handler_registry.add(\n ""train_dataloader_state"",\n grain.checkpoint.CheckpointRestore,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n if args.val_data_dir:\n handler_registry.add(\n ""val_dataloader_state"",\n grain.checkpoint.CheckpointSave,\n cast(\n ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler\n ),\n )\n handler_registry.add(\n ""val_dataloader_state"",\n grain.checkpoint.CheckpointRestore,\n cast(\n ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler\n ),\n )\n checkpoint_options = ocp.CheckpointManagerOptions(\n save_interval_steps=args.log_checkpoint_interval,\n max_to_keep=3,\n keep_period=args.log_checkpoint_keep_period,\n step_format_fixed_length=6,\n cleanup_tmp_directories=True,\n )\n checkpoint_manager = ocp.CheckpointManager(\n args.ckpt_dir,\n options=checkpoint_options,\n handler_registry=handler_registry,\n )\n return checkpoint_manager\n else:\n return None\n\n\ndef restore_or_initialize_components(\n args: Args,\n checkpoint_manager: Optional[ocp.CheckpointManager],\n optimizer: nnx.ModelAndOptimizer,\n train_iterator: grain.DataLoaderIterator,\n rng: jax.Array,\n replicated_sharding: NamedSharding,\n val_iterator: Optional[grain.DataLoaderIterator],\n restore_step: Optional[int] = None,\n) -> tuple[\n int,\n nnx.ModelAndOptimizer,\n grain.DataLoaderIterator,\n grain.DataLoaderIterator,\n jax.Array,\n]:\n step = 0\n if checkpoint_manager and restore_step is None:\n restore_step = checkpoint_manager.latest_step()\n if args.restore_ckpt:\n assert checkpoint_manager is not None\n abstract_optimizer = nnx.eval_shape(lambda: optimizer)\n abstract_optimizer_state = nnx.state(abstract_optimizer)\n if val_iterator:\n restore_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore(abstract_optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointRestore(train_iterator), # type: ignore\n val_dataloader_state=grain.checkpoint.CheckpointRestore(val_iterator), # type: ignore\n )\n else:\n restore_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore(abstract_optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointRestore(train_iterator), # type: ignore\n )\n restored = checkpoint_manager.restore(\n checkpoint_manager.latest_step(), args=restore_args\n )\n restored_optimizer_state = restored[""model_state""]\n nnx.update(optimizer, restored_optimizer_state)\n train_iterator = restored[""train_dataloader_state""]\n if val_iterator:\n val_iterator = restored[""val_dataloader_state""]\n step = checkpoint_manager.latest_step() or 0\n print(f""Restored dataloader and model state from step {step}"")\n else:\n # Restore from pre-trained tokenizer (and LAM)\n rng, _rng = jax.random.split(rng)\n optimizer = restore_genie_components(optimizer, replicated_sharding, _rng, args)\n return step, optimizer, train_iterator, val_iterator, rng\n\n\ndef _calculate_step_metrics(\n outputs: dict[str, jax.Array],\n gt: jax.Array,\n num_actions: int,\n num_patch_latents: int,\n) -> tuple[jax.Array, dict]:\n mask = outputs[""mask""]\n outputs[""token_logits""] = outputs[""token_logits""].astype(jnp.float32)\n ce_loss = optax.softmax_cross_entropy_with_integer_labels(\n outputs[""token_logits""], outputs[""video_tokens""]\n )\n ce_loss = (mask * ce_loss).sum() / mask.sum()\n acc = outputs[""token_logits""].argmax(-1) == outputs[""video_tokens""]\n acc = (mask * acc).sum() / mask.sum()\n select_probs = jax.nn.softmax(outputs[""token_logits""])\n gt_val = gt.clip(0, 1).reshape(-1, *gt.shape[2:])\n recon = outputs[""recon""].clip(0, 1).reshape(-1, *outputs[""recon""].shape[2:])\n psnr = jnp.asarray(pix.psnr(gt_val, recon)).mean()\n ssim = jnp.asarray(pix.ssim(gt_val, recon)).mean()\n _, index_counts_tokenizer = jnp.unique_counts(\n jnp.ravel(outputs[""video_tokens""]),\n size=num_patch_latents,\n fill_value=0,\n )\n codebook_usage_tokenizer = (index_counts_tokenizer != 0).mean()\n metrics = dict(\n cross_entropy_loss=ce_loss,\n masked_token_accuracy=acc,\n select_logit=outputs[""token_logits""].max(-1).mean(),\n select_p=select_probs.max(-1).mean(),\n entropy=jax.scipy.special.entr(select_probs).sum(-1).mean(),\n psnr=psnr,\n ssim=ssim,\n codebook_usage_tokenizer=codebook_usage_tokenizer,\n )\n if ""lam_indices"" in outputs.keys():\n _, index_counts_lam = jnp.unique_counts(\n jnp.ravel(outputs[""lam_indices""]),\n size=num_actions,\n fill_value=0,\n )\n codebook_usage_lam = (index_counts_lam != 0).mean()\n metrics[""codebook_usage_lam""] = codebook_usage_lam\n return ce_loss, metrics\n\n\ndef main(args: Args) -> None:\n jax.distributed.initialize()\n num_devices = jax.device_count()\n if num_devices == 0:\n raise ValueError(""No JAX devices found."")\n print(f""Running on {num_devices} devices."")\n\n if args.batch_size % num_devices != 0:\n raise ValueError(\n f""Global batch size {args.batch_size} must be divisible by ""\n f""number of devices {num_devices}.""\n )\n\n rng = jax.random.key(args.seed)\n\n # --- Initialize model ---\n genie, rng = build_model(args, rng)\n _, params, _ = nnx.split(genie, nnx.Param, ...)\n param_counts = count_parameters_by_component(params)\n\n if args.log and jax.process_index() == 0:\n wandb_init_kwargs = {\n ""entity"": args.entity,\n ""project"": args.project,\n ""name"": args.name,\n ""tags"": args.tags,\n ""group"": ""debug"",\n ""config"": args,\n }\n\n if args.wandb_id:\n wandb_init_kwargs.update(\n {\n ""id"": args.wandb_id,\n ""resume"": ""allow"",\n }\n )\n wandb.init(**wandb_init_kwargs)\n\n wandb.config.update({""model_param_count"": param_counts})\n\n print(""Parameter counts:"")\n print(param_counts)\n\n # --- Initialize optimizer ---\n optimizer = build_optimizer(genie, args)\n del genie\n\n # FIXME: switch to create_hybrid_device_mesh for runs spanning multiple nodes\n _, replicated_sharding, videos_sharding, actions_sharding = build_mesh_and_sharding(\n num_devices\n )\n\n shard_optimizer_states(optimizer, replicated_sharding)\n\n # --- Initialize checkpoint manager ---\n checkpoint_manager = build_checkpoint_manager(args)\n\n # --- Create DataLoaderIterator from dataloader ---\n train_iterator = build_dataloader(args, args.data_dir)\n val_iterator = None\n if args.val_data_dir:\n val_iterator = build_dataloader(args, args.val_data_dir)\n\n # --- Restore checkpoint ---\n step, optimizer, train_iterator, val_iterator, rng = (\n restore_or_initialize_components(\n args,\n checkpoint_manager,\n optimizer,\n train_iterator,\n rng,\n replicated_sharding,\n val_iterator,\n )\n )\n\n # --- Define loss and train step (close over args) ---\n def dynamics_loss_fn(\n model: Genie,\n inputs: dict,\n ) -> tuple[jax.Array, tuple[jax.Array, dict]]:\n gt = jnp.asarray(inputs[""videos""], dtype=jnp.float32) / 255.0\n inputs[""videos""] = gt.astype(args.dtype)\n outputs = model(inputs)\n ce_loss, metrics = _calculate_step_metrics(\n outputs, gt, args.num_actions, args.num_patch_latents\n )\n return ce_loss, (outputs[""recon""], metrics)\n\n @nnx.jit(donate_argnums=0)\n def train_step(\n optimizer: nnx.ModelAndOptimizer, inputs: dict\n ) -> tuple[jax.Array, jax.Array, dict]:\n def loss_fn(model: Genie) -> tuple[jax.Array, tuple[jax.Array, dict]]:\n model.train()\n return dynamics_loss_fn(model, inputs)\n\n (loss, (recon, metrics)), grads = nnx.value_and_grad(loss_fn, has_aux=True)(\n optimizer.model\n )\n optimizer.update(grads)\n if args.log_gradients:\n metrics[""gradients_std/""] = jax.tree.map(\n lambda x: x.std(), grads[""params""][""dynamics""]\n )\n return loss, recon, metrics\n\n @nnx.jit\n def val_step(genie: Genie, inputs: dict) -> dict:\n """"""Evaluate model and compute metrics""""""\n genie.eval()\n gt = jnp.asarray(inputs[""videos""], dtype=jnp.float32) / 255.0\n (loss, (recon, metrics)) = dynamics_loss_fn(genie, inputs)\n val_output = {""loss"": loss, ""recon"": recon, ""metrics"": metrics}\n\n # --- Evaluate full frame prediction (sampling) ---\n if args.eval_full_frame:\n inputs[""videos""] = gt.astype(args.dtype)\n tokenizer_outputs = genie.tokenizer.vq_encode(\n inputs[""videos""], training=False\n )\n tokens_full_frame = tokenizer_outputs[""indices""]\n lam_indices_E = None\n if not args.use_gt_actions:\n lam_indices_E = genie.vq_encode(inputs, training=False)\n inputs[""latent_actions""] = lam_indices_E\n inputs[""videos""] = inputs[""videos""][\n :, :-1\n ] # remove last frame for generation\n recon_full_frame, logits_full_frame = genie.sample(\n batch=inputs,\n seq_len=args.seq_len,\n noise_level=0.0,\n temperature=args.val_temperature,\n sample_argmax=args.val_sample_argmax,\n maskgit_steps=args.val_maskgit_steps,\n )\n # Calculate metrics for the last frame only\n step_outputs = {\n ""recon"": recon_full_frame[:, -1],\n ""token_logits"": logits_full_frame[:, -1],\n ""video_tokens"": tokens_full_frame[:, -1],\n ""mask"": jnp.ones_like(tokens_full_frame[:, -1]),\n }\n if lam_indices_E is not None:\n lam_indices_B = lam_indices_E.reshape((-1, args.seq_len - 1))[:, -1]\n step_outputs[""lam_indices""] = lam_indices_B\n\n loss_full_frame, metrics_full_frame = _calculate_step_metrics(\n step_outputs, gt[:, -1], args.num_actions, args.num_patch_latents\n )\n val_output.update(\n {\n ""loss_full_frame"": loss_full_frame,\n ""recon_full_frame"": recon_full_frame,\n ""metrics_full_frame"": metrics_full_frame,\n }\n )\n return val_output\n\n def calculate_validation_metrics(val_dataloader, genie, rng):\n step = 0\n loss_per_step = []\n metrics_per_step = []\n loss_full_frame_per_step = []\n metrics_full_frame_per_step = []\n batch = None\n recon = None\n recon_full_frame = None\n for batch in val_dataloader:\n rng, _rng_mask = jax.random.split(rng, 2)\n batch[""rng""] = _rng_mask\n val_outputs = val_step(genie, batch)\n loss_per_step.append(val_outputs[""loss""])\n metrics_per_step.append(val_outputs[""metrics""])\n recon = val_outputs[""recon""]\n if args.eval_full_frame:\n loss_full_frame_per_step.append(val_outputs[""loss_full_frame""])\n metrics_full_frame_per_step.append(val_outputs[""metrics_full_frame""])\n recon_full_frame = val_outputs[""recon_full_frame""]\n step += 1\n if step > args.val_steps:\n break\n\n if step < args.val_steps:\n print(\n f""Warning: Your validation dataset is too small to make val_steps many steps. Made {step} steps, expected {args.val_steps}""\n )\n\n val_metrics = {\n f""val_{key}"": np.mean([float(m[key]) for m in metrics_per_step])\n for key in metrics_per_step[0].keys()\n }\n val_metrics[""val_loss""] = np.mean(loss_per_step)\n if args.eval_full_frame:\n val_metrics_full_frame = {\n f""val_full_frame_{key}"": np.mean(\n [float(m[key]) for m in metrics_full_frame_per_step]\n )\n for key in metrics_full_frame_per_step[0].keys()\n }\n val_metrics.update(val_metrics_full_frame)\n val_metrics[""val_full_frame_loss""] = np.mean(loss_full_frame_per_step)\n return val_metrics, batch, recon, recon_full_frame\n\n # --- TRAIN LOOP ---\n dataloader_train = (\n {\n ""videos"": jax.make_array_from_process_local_data(\n videos_sharding, local_data=elem[""videos""]\n ),\n ""actions"": (\n jax.make_array_from_process_local_data(\n actions_sharding, elem[""actions""]\n )\n if args.use_gt_actions\n else None\n ),\n }\n for elem in train_iterator\n )\n dataloader_val = None\n if val_iterator:\n dataloader_val = (\n {\n ""videos"": jax.make_array_from_process_local_data(\n videos_sharding, elem[""videos""]\n ),\n ""actions"": (\n jax.make_array_from_process_local_data(\n actions_sharding, elem[""actions""]\n )\n if args.use_gt_actions\n else None\n ),\n }\n for elem in val_iterator\n )\n if jax.process_index() == 0:\n first_batch = next(dataloader_train)\n first_batch[""rng""] = rng # type: ignore\n compiled = train_step.lower(optimizer, first_batch).compile()\n print_compiled_memory_stats(compiled.memory_analysis())\n print_compiled_cost_analysis(compiled.cost_analysis())\n # Do not skip the first batch during training\n dataloader_train = itertools.chain([first_batch], dataloader_train)\n print(f""Starting training from step {step}..."")\n first_step = step\n while step < args.num_steps:\n for batch in dataloader_train:\n # --- Train step ---\n rng, _rng_mask = jax.random.split(rng, 2)\n batch[""rng""] = _rng_mask\n loss, recon, metrics = train_step(optimizer, batch)\n if step == first_step:\n print_mem_stats(""After params initialized"")\n step += 1\n\n # --- Validation loss ---\n val_results = {}\n if dataloader_val and step % args.val_interval == 0:\n rng, _rng_mask_val = jax.random.split(rng, 2)\n print(""Calculating validation metrics..."")\n val_metrics, val_gt_batch, val_recon, val_recon_full_frame = (\n calculate_validation_metrics(\n dataloader_val, optimizer.model, _rng_mask_val\n )\n )\n print(f""Step {step}, validation loss: {val_metrics['val_loss']}"")\n val_results = {\n ""metrics"": val_metrics,\n ""gt_batch"": val_gt_batch,\n ""recon"": val_recon,\n ""full_frame"": val_recon_full_frame,\n }\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {""loss"": loss, ""step"": step, **metrics}\n if val_results:\n log_dict.update(val_results[""metrics""])\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if val_results:\n val_results[""gt_seq_val""] = (\n val_results[""gt_batch""][""videos""][0].astype(jnp.float32)\n / 255.0\n )\n val_results[""recon_seq_val""] = val_results[""recon""][0].clip(\n 0, 1\n )\n val_comparison_seq = jnp.concatenate(\n (val_results[""gt_seq_val""], val_results[""recon_seq_val""]),\n axis=1,\n )\n val_results[""val_comparison_seq""] = einops.rearrange(\n val_comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if args.eval_full_frame:\n val_results[""full_frame_seq_val""] = val_results[\n ""full_frame""\n ][0].clip(0, 1)\n val_results[""val_full_frame_comparison_seq""] = (\n jnp.concatenate(\n (\n val_results[""gt_seq_val""],\n val_results[""full_frame_seq_val""],\n ),\n axis=1,\n )\n )\n val_results[""val_full_frame_comparison_seq""] = (\n einops.rearrange(\n val_results[""val_full_frame_comparison_seq""] * 255,\n ""t h w c -> h (t w) c"",\n )\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if val_results:\n log_images.update(\n dict(\n val_image=wandb.Image(\n np.asarray(\n val_results[""gt_seq_val""][args.seq_len - 1]\n )\n ),\n val_recon=wandb.Image(\n np.asarray(\n val_results[""recon_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_recon=wandb.Image(\n np.asarray(\n val_results[""val_comparison_seq""].astype(\n np.uint8\n )\n )\n ),\n )\n )\n if args.eval_full_frame:\n log_images.update(\n dict(\n val_full_frame=wandb.Image(\n np.asarray(\n val_results[""full_frame_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_full_frame=wandb.Image(\n np.asarray(\n val_results[\n ""val_full_frame_comparison_seq""\n ].astype(np.uint8)\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break\n\n if checkpoint_manager:\n checkpoint_manager.close()\n\n\nif __name__ == ""__main__"":\n args = tyro.cli(Args)\n main(args)\n",python,tab +6,964211,"jasmine/train_dynamics.py",4600,0,"",python,selection_mouse +7,964212,"jasmine/train_dynamics.py",4599,0,"",python,selection_command +8,966630,"jasmine/train_dynamics.py",4848,0,"",python,selection_mouse +9,966646,"jasmine/train_dynamics.py",4847,0,"",python,selection_command +10,982723,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_lam_as_15.sh",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=4\n#SBATCH --time=48:00:00\n#SBATCH --partition=accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:1\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/lam/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/lam/%x_%j.log\n#SBATCH --job-name=train_lam_actionspace_15\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/train\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/val\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/lam/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\nenv | grep SLURM\n\nexport PYTHONUNBUFFERED=1\n\nsrun python jasmine/train_lam.py \\n --save_ckpt \\n --image_height=64 \\n --image_width=64 \\n --ckpt_dir $CHECKPOINT_DIR \\n --num_latents=15 \\n --batch_size=120 \\n --log \\n --name=coinrun-lam-default-$slurm_job_id \\n --tags lam coinrun default \\n --entity instant-uv \\n --project jafar \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val \n",shellscript,tab +11,1305212,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",0,0,"",shellscript,tab +12,1307276,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=4\n#SBATCH --time=00:20:00\n#SBATCH --partition=dev_accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:1\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/lam/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/lam/%x_%j.log\n#SBATCH --job-name=train_lam_1e-4\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\n# array_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_test\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_split/train\n# array_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_chunked\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_split/val\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/dyn/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\nlam_checkpoint=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/lam/interactive/3498707\ntokenizer_checkpoint=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/interactive/3498707\n\nenv | grep SLURM\n\nexport PYTHONUNBUFFERED=1\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n --image_height=64 \\n --image_width=64 \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=110 \\n --init_lr=0 \\n --max_lr=1e-4 \\n --log_image_interval=50 \\n --log_checkpoint_interval=2 \\n --dyna_type=maskgit \\n --log \\n --name=coinrun-dyn-dev-$slurm_job_id \\n --tags dyn coinrun dev \\n --entity instant-uv \\n --project jafar \\n --warmup_steps 0 \\n --wsd_decay_steps 0 \\n --num_steps 10 \\n --data_dir $array_records_dir_train \\n --tokenizer_checkpoint $tokenizer_checkpoint \\n --val_data_dir $array_records_dir_val \\n --val_interval 2 \\n --val_steps 5\n",shellscript,tab +13,1316627,"slurm/jobs/mihir/horeka/coinrun/train_dynamics_maskgit.sbatch",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=4\n#SBATCH --time=48:00:00\n#SBATCH --partition=accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:4\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/dynamics/maskgit/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/dynamics/maskgit/%x_%j.log\n#SBATCH --job-name=train_dynamics_maskgit_1_node\n#SBATCH --reservation=llmtum\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/array_records\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=""/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_tokenizer_1e-4/3414046""\n\nenv | grep SLURM\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=48 \\n --image_height=64 \\n --image_width=64 \\n --dyna_type=maskgit \\n --init_lr=0 \\n --max_lr=1e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-1-node-$slurm_job_id \\n --tags coinrun dynamics maskgit 1-node \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir &\n\nchild_pid=$!\n\nwait $child_pid\n",shellscript,tab +14,1322547,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_tokenizer_default.sh",0,0,"",shellscript,tab +15,1349424,"slurm/jobs/mihir/horeka/coinrun/train_dynamics_maskgit.sbatch",0,0,"",shellscript,tab +16,1410094,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",0,0,"",shellscript,tab +17,1411018,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",0,0,"",shellscript,tab +18,1444421,"slurm/jobs/mihir/horeka/coinrun/train_dynamics_maskgit.sbatch",0,0,"",shellscript,tab +19,1444422,"slurm/jobs/mihir/horeka/coinrun/train_dynamics_maskgit.sbatch",2370,0,"",shellscript,selection_mouse +20,1444448,"slurm/jobs/mihir/horeka/coinrun/train_dynamics_maskgit.sbatch",2369,0,"",shellscript,selection_command +21,1444467,"slurm/jobs/mihir/horeka/coinrun/train_dynamics_maskgit.sbatch",2369,1,"d",shellscript,selection_mouse +22,1444476,"slurm/jobs/mihir/horeka/coinrun/train_dynamics_maskgit.sbatch",2370,0,"",shellscript,selection_command +23,1444556,"slurm/jobs/mihir/horeka/coinrun/train_dynamics_maskgit.sbatch",2369,1,"d",shellscript,selection_mouse +24,1444632,"slurm/jobs/mihir/horeka/coinrun/train_dynamics_maskgit.sbatch",2354,16,"\nwait $child_pid",shellscript,selection_mouse +25,1444684,"slurm/jobs/mihir/horeka/coinrun/train_dynamics_maskgit.sbatch",2352,18,"!\n\nwait $child_pid",shellscript,selection_mouse +26,1444742,"slurm/jobs/mihir/horeka/coinrun/train_dynamics_maskgit.sbatch",2351,19,"$!\n\nwait $child_pid",shellscript,selection_mouse +27,1444826,"slurm/jobs/mihir/horeka/coinrun/train_dynamics_maskgit.sbatch",2340,30,"\nchild_pid=$!\n\nwait $child_pid",shellscript,selection_mouse +28,1444879,"slurm/jobs/mihir/horeka/coinrun/train_dynamics_maskgit.sbatch",2312,58,"ta_dir $array_records_dir &\n\nchild_pid=$!\n\nwait $child_pid",shellscript,selection_mouse +29,1444880,"slurm/jobs/mihir/horeka/coinrun/train_dynamics_maskgit.sbatch",2261,109,"tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir &\n\nchild_pid=$!\n\nwait $child_pid",shellscript,selection_mouse +30,1444905,"slurm/jobs/mihir/horeka/coinrun/train_dynamics_maskgit.sbatch",2211,159,"--entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir &\n\nchild_pid=$!\n\nwait $child_pid",shellscript,selection_mouse +31,1444906,"slurm/jobs/mihir/horeka/coinrun/train_dynamics_maskgit.sbatch",2165,205," --tags coinrun dynamics maskgit 1-node \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir &\n\nchild_pid=$!\n\nwait $child_pid",shellscript,selection_mouse +32,1444921,"slurm/jobs/mihir/horeka/coinrun/train_dynamics_maskgit.sbatch",2106,264," --name=coinrun-dynamics-maskgit-1-node-$slurm_job_id \\n --tags coinrun dynamics maskgit 1-node \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir &\n\nchild_pid=$!\n\nwait $child_pid",shellscript,selection_mouse +33,1444976,"slurm/jobs/mihir/horeka/coinrun/train_dynamics_maskgit.sbatch",2068,302," --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-1-node-$slurm_job_id \\n --tags coinrun dynamics maskgit 1-node \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir &\n\nchild_pid=$!\n\nwait $child_pid",shellscript,selection_mouse +34,1444976,"slurm/jobs/mihir/horeka/coinrun/train_dynamics_maskgit.sbatch",2055,315," --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-1-node-$slurm_job_id \\n --tags coinrun dynamics maskgit 1-node \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir &\n\nchild_pid=$!\n\nwait $child_pid",shellscript,selection_mouse +35,1445029,"slurm/jobs/mihir/horeka/coinrun/train_dynamics_maskgit.sbatch",2023,347," --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-1-node-$slurm_job_id \\n --tags coinrun dynamics maskgit 1-node \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir &\n\nchild_pid=$!\n\nwait $child_pid",shellscript,selection_mouse +36,1445075,"slurm/jobs/mihir/horeka/coinrun/train_dynamics_maskgit.sbatch",2003,367," --max_lr=1e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-1-node-$slurm_job_id \\n --tags coinrun dynamics maskgit 1-node \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir &\n\nchild_pid=$!\n\nwait $child_pid",shellscript,selection_mouse +37,1445567,"slurm/jobs/mihir/horeka/coinrun/train_dynamics_maskgit.sbatch",2023,347," --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-1-node-$slurm_job_id \\n --tags coinrun dynamics maskgit 1-node \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir &\n\nchild_pid=$!\n\nwait $child_pid",shellscript,selection_mouse +38,1445643,"slurm/jobs/mihir/horeka/coinrun/train_dynamics_maskgit.sbatch",2055,315," --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-1-node-$slurm_job_id \\n --tags coinrun dynamics maskgit 1-node \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir &\n\nchild_pid=$!\n\nwait $child_pid",shellscript,selection_mouse +39,1445663,"slurm/jobs/mihir/horeka/coinrun/train_dynamics_maskgit.sbatch",2068,302," --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-1-node-$slurm_job_id \\n --tags coinrun dynamics maskgit 1-node \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir &\n\nchild_pid=$!\n\nwait $child_pid",shellscript,selection_mouse +40,1445745,"slurm/jobs/mihir/horeka/coinrun/train_dynamics_maskgit.sbatch",2106,264," --name=coinrun-dynamics-maskgit-1-node-$slurm_job_id \\n --tags coinrun dynamics maskgit 1-node \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir &\n\nchild_pid=$!\n\nwait $child_pid",shellscript,selection_mouse +41,1445754,"slurm/jobs/mihir/horeka/coinrun/train_dynamics_maskgit.sbatch",2165,205," --tags coinrun dynamics maskgit 1-node \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir &\n\nchild_pid=$!\n\nwait $child_pid",shellscript,selection_mouse +42,1445775,"slurm/jobs/mihir/horeka/coinrun/train_dynamics_maskgit.sbatch",2211,159,"--entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir &\n\nchild_pid=$!\n\nwait $child_pid",shellscript,selection_mouse +43,1445791,"slurm/jobs/mihir/horeka/coinrun/train_dynamics_maskgit.sbatch",2238,132,"-project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir &\n\nchild_pid=$!\n\nwait $child_pid",shellscript,selection_mouse +44,1445812,"slurm/jobs/mihir/horeka/coinrun/train_dynamics_maskgit.sbatch",2239,131,"project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir &\n\nchild_pid=$!\n\nwait $child_pid",shellscript,selection_mouse +45,1445821,"slurm/jobs/mihir/horeka/coinrun/train_dynamics_maskgit.sbatch",2262,108,"okenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir &\n\nchild_pid=$!\n\nwait $child_pid",shellscript,selection_mouse +46,1445849,"slurm/jobs/mihir/horeka/coinrun/train_dynamics_maskgit.sbatch",2263,107,"kenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir &\n\nchild_pid=$!\n\nwait $child_pid",shellscript,selection_mouse +47,1445856,"slurm/jobs/mihir/horeka/coinrun/train_dynamics_maskgit.sbatch",2315,55,"dir $array_records_dir &\n\nchild_pid=$!\n\nwait $child_pid",shellscript,selection_mouse +48,1445876,"slurm/jobs/mihir/horeka/coinrun/train_dynamics_maskgit.sbatch",2340,30,"\nchild_pid=$!\n\nwait $child_pid",shellscript,selection_mouse +49,1445894,"slurm/jobs/mihir/horeka/coinrun/train_dynamics_maskgit.sbatch",2353,17,"\n\nwait $child_pid",shellscript,selection_mouse +50,1445905,"slurm/jobs/mihir/horeka/coinrun/train_dynamics_maskgit.sbatch",2354,16,"\nwait $child_pid",shellscript,selection_mouse +51,1445975,"slurm/jobs/mihir/horeka/coinrun/train_dynamics_maskgit.sbatch",2370,0,"",shellscript,selection_mouse +52,1445976,"slurm/jobs/mihir/horeka/coinrun/train_dynamics_maskgit.sbatch",2369,0,"",shellscript,selection_command +53,1445977,"slurm/jobs/mihir/horeka/coinrun/train_dynamics_maskgit.sbatch",2369,1,"d",shellscript,selection_mouse +54,1445978,"slurm/jobs/mihir/horeka/coinrun/train_dynamics_maskgit.sbatch",2370,0,"",shellscript,selection_command +55,1446039,"slurm/jobs/mihir/horeka/coinrun/train_dynamics_maskgit.sbatch",2370,1,"\n",shellscript,selection_mouse +56,1446209,"slurm/jobs/mihir/horeka/coinrun/train_dynamics_maskgit.sbatch",2370,0,"",shellscript,selection_mouse +57,1446225,"slurm/jobs/mihir/horeka/coinrun/train_dynamics_maskgit.sbatch",2369,0,"",shellscript,selection_command +58,1446226,"slurm/jobs/mihir/horeka/coinrun/train_dynamics_maskgit.sbatch",2369,1,"d",shellscript,selection_mouse +59,1446239,"slurm/jobs/mihir/horeka/coinrun/train_dynamics_maskgit.sbatch",2370,0,"",shellscript,selection_command +60,1446240,"slurm/jobs/mihir/horeka/coinrun/train_dynamics_maskgit.sbatch",2353,17,"\n\nwait $child_pid",shellscript,selection_mouse +61,1446263,"slurm/jobs/mihir/horeka/coinrun/train_dynamics_maskgit.sbatch",2247,123,"jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir &\n\nchild_pid=$!\n\nwait $child_pid",shellscript,selection_mouse +62,1446279,"slurm/jobs/mihir/horeka/coinrun/train_dynamics_maskgit.sbatch",2116,254,"inrun-dynamics-maskgit-1-node-$slurm_job_id \\n --tags coinrun dynamics maskgit 1-node \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir &\n\nchild_pid=$!\n\nwait $child_pid",shellscript,selection_mouse +63,1446290,"slurm/jobs/mihir/horeka/coinrun/train_dynamics_maskgit.sbatch",2034,336,"age_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-1-node-$slurm_job_id \\n --tags coinrun dynamics maskgit 1-node \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir &\n\nchild_pid=$!\n\nwait $child_pid",shellscript,selection_mouse +64,1446306,"slurm/jobs/mihir/horeka/coinrun/train_dynamics_maskgit.sbatch",1968,402,"_type=maskgit \\n --init_lr=0 \\n --max_lr=1e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-1-node-$slurm_job_id \\n --tags coinrun dynamics maskgit 1-node \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir &\n\nchild_pid=$!\n\nwait $child_pid",shellscript,selection_mouse +65,1446328,"slurm/jobs/mihir/horeka/coinrun/train_dynamics_maskgit.sbatch",1898,472,"ch_size=48 \\n --image_height=64 \\n --image_width=64 \\n --dyna_type=maskgit \\n --init_lr=0 \\n --max_lr=1e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-1-node-$slurm_job_id \\n --tags coinrun dynamics maskgit 1-node \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir &\n\nchild_pid=$!\n\nwait $child_pid",shellscript,selection_mouse +66,1446339,"slurm/jobs/mihir/horeka/coinrun/train_dynamics_maskgit.sbatch",1807,563,"store_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=48 \\n --image_height=64 \\n --image_width=64 \\n --dyna_type=maskgit \\n --init_lr=0 \\n --max_lr=1e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-1-node-$slurm_job_id \\n --tags coinrun dynamics maskgit 1-node \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir &\n\nchild_pid=$!\n\nwait $child_pid",shellscript,selection_mouse +67,1446359,"slurm/jobs/mihir/horeka/coinrun/train_dynamics_maskgit.sbatch",1730,640,"grep SLURM\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=48 \\n --image_height=64 \\n --image_width=64 \\n --dyna_type=maskgit \\n --init_lr=0 \\n --max_lr=1e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-1-node-$slurm_job_id \\n --tags coinrun dynamics maskgit 1-node \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir &\n\nchild_pid=$!\n\nwait $child_pid",shellscript,selection_mouse +68,1446378,"slurm/jobs/mihir/horeka/coinrun/train_dynamics_maskgit.sbatch",1591,779,"izer_ckpt_dir=""/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_tokenizer_1e-4/3414046""\n\nenv | grep SLURM\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=48 \\n --image_height=64 \\n --image_width=64 \\n --dyna_type=maskgit \\n --init_lr=0 \\n --max_lr=1e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-1-node-$slurm_job_id \\n --tags coinrun dynamics maskgit 1-node \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir &\n\nchild_pid=$!\n\nwait $child_pid",shellscript,selection_mouse +69,1446390,"slurm/jobs/mihir/horeka/coinrun/train_dynamics_maskgit.sbatch",1441,929,"KPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=""/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_tokenizer_1e-4/3414046""\n\nenv | grep SLURM\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=48 \\n --image_height=64 \\n --image_width=64 \\n --dyna_type=maskgit \\n --init_lr=0 \\n --max_lr=1e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-1-node-$slurm_job_id \\n --tags coinrun dynamics maskgit 1-node \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir &\n\nchild_pid=$!\n\nwait $child_pid",shellscript,selection_mouse +70,1446405,"slurm/jobs/mihir/horeka/coinrun/train_dynamics_maskgit.sbatch",1388,982,"name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=""/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_tokenizer_1e-4/3414046""\n\nenv | grep SLURM\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=48 \\n --image_height=64 \\n --image_width=64 \\n --dyna_type=maskgit \\n --init_lr=0 \\n --max_lr=1e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-1-node-$slurm_job_id \\n --tags coinrun dynamics maskgit 1-node \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir &\n\nchild_pid=$!\n\nwait $child_pid",shellscript,selection_mouse +71,1446426,"slurm/jobs/mihir/horeka/coinrun/train_dynamics_maskgit.sbatch",1285,1085,"ay_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/array_records\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=""/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_tokenizer_1e-4/3414046""\n\nenv | grep SLURM\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=48 \\n --image_height=64 \\n --image_width=64 \\n --dyna_type=maskgit \\n --init_lr=0 \\n --max_lr=1e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-1-node-$slurm_job_id \\n --tags coinrun dynamics maskgit 1-node \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir &\n\nchild_pid=$!\n\nwait $child_pid",shellscript,selection_mouse +72,1446445,"slurm/jobs/mihir/horeka/coinrun/train_dynamics_maskgit.sbatch",1228,1142,"ule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/array_records\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=""/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_tokenizer_1e-4/3414046""\n\nenv | grep SLURM\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=48 \\n --image_height=64 \\n --image_width=64 \\n --dyna_type=maskgit \\n --init_lr=0 \\n --max_lr=1e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-1-node-$slurm_job_id \\n --tags coinrun dynamics maskgit 1-node \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir &\n\nchild_pid=$!\n\nwait $child_pid",shellscript,selection_mouse +73,1446457,"slurm/jobs/mihir/horeka/coinrun/train_dynamics_maskgit.sbatch",1194,1176,"\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/array_records\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=""/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_tokenizer_1e-4/3414046""\n\nenv | grep SLURM\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=48 \\n --image_height=64 \\n --image_width=64 \\n --dyna_type=maskgit \\n --init_lr=0 \\n --max_lr=1e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-1-node-$slurm_job_id \\n --tags coinrun dynamics maskgit 1-node \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir &\n\nchild_pid=$!\n\nwait $child_pid",shellscript,selection_mouse +74,1446478,"slurm/jobs/mihir/horeka/coinrun/train_dynamics_maskgit.sbatch",1164,1206," Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/array_records\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=""/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_tokenizer_1e-4/3414046""\n\nenv | grep SLURM\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=48 \\n --image_height=64 \\n --image_width=64 \\n --dyna_type=maskgit \\n --init_lr=0 \\n --max_lr=1e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-1-node-$slurm_job_id \\n --tags coinrun dynamics maskgit 1-node \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir &\n\nchild_pid=$!\n\nwait $child_pid",shellscript,selection_mouse +75,1446490,"slurm/jobs/mihir/horeka/coinrun/train_dynamics_maskgit.sbatch",1162,1208,"\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/array_records\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=""/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_tokenizer_1e-4/3414046""\n\nenv | grep SLURM\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=48 \\n --image_height=64 \\n --image_width=64 \\n --dyna_type=maskgit \\n --init_lr=0 \\n --max_lr=1e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-1-node-$slurm_job_id \\n --tags coinrun dynamics maskgit 1-node \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir &\n\nchild_pid=$!\n\nwait $child_pid",shellscript,selection_mouse +76,1446507,"slurm/jobs/mihir/horeka/coinrun/train_dynamics_maskgit.sbatch",1157,1213,"fi\n\n\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/array_records\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=""/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_tokenizer_1e-4/3414046""\n\nenv | grep SLURM\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=48 \\n --image_height=64 \\n --image_width=64 \\n --dyna_type=maskgit \\n --init_lr=0 \\n --max_lr=1e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-1-node-$slurm_job_id \\n --tags coinrun dynamics maskgit 1-node \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir &\n\nchild_pid=$!\n\nwait $child_pid",shellscript,selection_mouse +77,1446531,"slurm/jobs/mihir/horeka/coinrun/train_dynamics_maskgit.sbatch",897,1473,"\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/array_records\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=""/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_tokenizer_1e-4/3414046""\n\nenv | grep SLURM\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=48 \\n --image_height=64 \\n --image_width=64 \\n --dyna_type=maskgit \\n --init_lr=0 \\n --max_lr=1e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-1-node-$slurm_job_id \\n --tags coinrun dynamics maskgit 1-node \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir &\n\nchild_pid=$!\n\nwait $child_pid",shellscript,selection_mouse +78,1446543,"slurm/jobs/mihir/horeka/coinrun/train_dynamics_maskgit.sbatch",565,1805,"# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/array_records\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=""/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_tokenizer_1e-4/3414046""\n\nenv | grep SLURM\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=48 \\n --image_height=64 \\n --image_width=64 \\n --dyna_type=maskgit \\n --init_lr=0 \\n --max_lr=1e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-1-node-$slurm_job_id \\n --tags coinrun dynamics maskgit 1-node \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir &\n\nchild_pid=$!\n\nwait $child_pid",shellscript,selection_mouse +79,1446568,"slurm/jobs/mihir/horeka/coinrun/train_dynamics_maskgit.sbatch",91,2279,"#SBATCH --partition=accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:4\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/dynamics/maskgit/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/dynamics/maskgit/%x_%j.log\n#SBATCH --job-name=train_dynamics_maskgit_1_node\n#SBATCH --reservation=llmtum\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/array_records\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=""/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_tokenizer_1e-4/3414046""\n\nenv | grep SLURM\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=48 \\n --image_height=64 \\n --image_width=64 \\n --dyna_type=maskgit \\n --init_lr=0 \\n --max_lr=1e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-1-node-$slurm_job_id \\n --tags coinrun dynamics maskgit 1-node \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir &\n\nchild_pid=$!\n\nwait $child_pid",shellscript,selection_mouse +80,1446582,"slurm/jobs/mihir/horeka/coinrun/train_dynamics_maskgit.sbatch",0,2370,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=4\n#SBATCH --time=48:00:00\n#SBATCH --partition=accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:4\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/dynamics/maskgit/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/dynamics/maskgit/%x_%j.log\n#SBATCH --job-name=train_dynamics_maskgit_1_node\n#SBATCH --reservation=llmtum\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/array_records\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=""/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_tokenizer_1e-4/3414046""\n\nenv | grep SLURM\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=48 \\n --image_height=64 \\n --image_width=64 \\n --dyna_type=maskgit \\n --init_lr=0 \\n --max_lr=1e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-1-node-$slurm_job_id \\n --tags coinrun dynamics maskgit 1-node \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir &\n\nchild_pid=$!\n\nwait $child_pid",shellscript,selection_mouse +81,1446904,"slurm/jobs/mihir/horeka/coinrun/train_dynamics_maskgit.sbatch",20,2350,"\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=4\n#SBATCH --time=48:00:00\n#SBATCH --partition=accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:4\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/dynamics/maskgit/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/dynamics/maskgit/%x_%j.log\n#SBATCH --job-name=train_dynamics_maskgit_1_node\n#SBATCH --reservation=llmtum\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/array_records\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=""/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_tokenizer_1e-4/3414046""\n\nenv | grep SLURM\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=48 \\n --image_height=64 \\n --image_width=64 \\n --dyna_type=maskgit \\n --init_lr=0 \\n --max_lr=1e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-1-node-$slurm_job_id \\n --tags coinrun dynamics maskgit 1-node \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir &\n\nchild_pid=$!\n\nwait $child_pid",shellscript,selection_mouse +82,1446905,"slurm/jobs/mihir/horeka/coinrun/train_dynamics_maskgit.sbatch",68,2302,"SBATCH --time=48:00:00\n#SBATCH --partition=accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:4\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/dynamics/maskgit/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/dynamics/maskgit/%x_%j.log\n#SBATCH --job-name=train_dynamics_maskgit_1_node\n#SBATCH --reservation=llmtum\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/array_records\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=""/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_tokenizer_1e-4/3414046""\n\nenv | grep SLURM\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=48 \\n --image_height=64 \\n --image_width=64 \\n --dyna_type=maskgit \\n --init_lr=0 \\n --max_lr=1e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-1-node-$slurm_job_id \\n --tags coinrun dynamics maskgit 1-node \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir &\n\nchild_pid=$!\n\nwait $child_pid",shellscript,selection_mouse +83,1446921,"slurm/jobs/mihir/horeka/coinrun/train_dynamics_maskgit.sbatch",151,2219,"BATCH --gres=gpu:4\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/dynamics/maskgit/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/dynamics/maskgit/%x_%j.log\n#SBATCH --job-name=train_dynamics_maskgit_1_node\n#SBATCH --reservation=llmtum\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/array_records\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=""/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_tokenizer_1e-4/3414046""\n\nenv | grep SLURM\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=48 \\n --image_height=64 \\n --image_width=64 \\n --dyna_type=maskgit \\n --init_lr=0 \\n --max_lr=1e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-1-node-$slurm_job_id \\n --tags coinrun dynamics maskgit 1-node \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir &\n\nchild_pid=$!\n\nwait $child_pid",shellscript,selection_mouse +84,1446986,"slurm/jobs/mihir/horeka/coinrun/train_dynamics_maskgit.sbatch",497,1873,"BATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/array_records\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=""/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_tokenizer_1e-4/3414046""\n\nenv | grep SLURM\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=48 \\n --image_height=64 \\n --image_width=64 \\n --dyna_type=maskgit \\n --init_lr=0 \\n --max_lr=1e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-1-node-$slurm_job_id \\n --tags coinrun dynamics maskgit 1-node \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir &\n\nchild_pid=$!\n\nwait $child_pid",shellscript,selection_mouse +85,1446987,"slurm/jobs/mihir/horeka/coinrun/train_dynamics_maskgit.sbatch",620,1750,"ueue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/array_records\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=""/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_tokenizer_1e-4/3414046""\n\nenv | grep SLURM\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=48 \\n --image_height=64 \\n --image_width=64 \\n --dyna_type=maskgit \\n --init_lr=0 \\n --max_lr=1e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-1-node-$slurm_job_id \\n --tags coinrun dynamics maskgit 1-node \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir &\n\nchild_pid=$!\n\nwait $child_pid",shellscript,selection_mouse +86,1446987,"slurm/jobs/mihir/horeka/coinrun/train_dynamics_maskgit.sbatch",637,1733,"echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/array_records\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=""/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_tokenizer_1e-4/3414046""\n\nenv | grep SLURM\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=48 \\n --image_height=64 \\n --image_width=64 \\n --dyna_type=maskgit \\n --init_lr=0 \\n --max_lr=1e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-1-node-$slurm_job_id \\n --tags coinrun dynamics maskgit 1-node \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir &\n\nchild_pid=$!\n\nwait $child_pid",shellscript,selection_mouse +87,1446989,"slurm/jobs/mihir/horeka/coinrun/train_dynamics_maskgit.sbatch",864,1506,"it 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/array_records\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=""/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_tokenizer_1e-4/3414046""\n\nenv | grep SLURM\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=48 \\n --image_height=64 \\n --image_width=64 \\n --dyna_type=maskgit \\n --init_lr=0 \\n --max_lr=1e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-1-node-$slurm_job_id \\n --tags coinrun dynamics maskgit 1-node \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir &\n\nchild_pid=$!\n\nwait $child_pid",shellscript,selection_mouse +88,1447007,"slurm/jobs/mihir/horeka/coinrun/train_dynamics_maskgit.sbatch",879,1491,"queue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/array_records\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=""/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_tokenizer_1e-4/3414046""\n\nenv | grep SLURM\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=48 \\n --image_height=64 \\n --image_width=64 \\n --dyna_type=maskgit \\n --init_lr=0 \\n --max_lr=1e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-1-node-$slurm_job_id \\n --tags coinrun dynamics maskgit 1-node \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir &\n\nchild_pid=$!\n\nwait $child_pid",shellscript,selection_mouse +89,1447022,"slurm/jobs/mihir/horeka/coinrun/train_dynamics_maskgit.sbatch",951,1419,"count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/array_records\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=""/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_tokenizer_1e-4/3414046""\n\nenv | grep SLURM\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=48 \\n --image_height=64 \\n --image_width=64 \\n --dyna_type=maskgit \\n --init_lr=0 \\n --max_lr=1e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-1-node-$slurm_job_id \\n --tags coinrun dynamics maskgit 1-node \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir &\n\nchild_pid=$!\n\nwait $child_pid",shellscript,selection_mouse +90,1447042,"slurm/jobs/mihir/horeka/coinrun/train_dynamics_maskgit.sbatch",1117,1253,"\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/array_records\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=""/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_tokenizer_1e-4/3414046""\n\nenv | grep SLURM\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=48 \\n --image_height=64 \\n --image_width=64 \\n --dyna_type=maskgit \\n --init_lr=0 \\n --max_lr=1e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-1-node-$slurm_job_id \\n --tags coinrun dynamics maskgit 1-node \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir &\n\nchild_pid=$!\n\nwait $child_pid",shellscript,selection_mouse +91,1447062,"slurm/jobs/mihir/horeka/coinrun/train_dynamics_maskgit.sbatch",1159,1211,"\n\n\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/array_records\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=""/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_tokenizer_1e-4/3414046""\n\nenv | grep SLURM\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=48 \\n --image_height=64 \\n --image_width=64 \\n --dyna_type=maskgit \\n --init_lr=0 \\n --max_lr=1e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-1-node-$slurm_job_id \\n --tags coinrun dynamics maskgit 1-node \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir &\n\nchild_pid=$!\n\nwait $child_pid",shellscript,selection_mouse +92,1447074,"slurm/jobs/mihir/horeka/coinrun/train_dynamics_maskgit.sbatch",1161,1209,"\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/array_records\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=""/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_tokenizer_1e-4/3414046""\n\nenv | grep SLURM\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=48 \\n --image_height=64 \\n --image_width=64 \\n --dyna_type=maskgit \\n --init_lr=0 \\n --max_lr=1e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-1-node-$slurm_job_id \\n --tags coinrun dynamics maskgit 1-node \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir &\n\nchild_pid=$!\n\nwait $child_pid",shellscript,selection_mouse +93,1447102,"slurm/jobs/mihir/horeka/coinrun/train_dynamics_maskgit.sbatch",1162,1208,"\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/array_records\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=""/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_tokenizer_1e-4/3414046""\n\nenv | grep SLURM\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=48 \\n --image_height=64 \\n --image_width=64 \\n --dyna_type=maskgit \\n --init_lr=0 \\n --max_lr=1e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-1-node-$slurm_job_id \\n --tags coinrun dynamics maskgit 1-node \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir &\n\nchild_pid=$!\n\nwait $child_pid",shellscript,selection_mouse +94,1447112,"slurm/jobs/mihir/horeka/coinrun/train_dynamics_maskgit.sbatch",1193,1177,"\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/array_records\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=""/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_tokenizer_1e-4/3414046""\n\nenv | grep SLURM\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=48 \\n --image_height=64 \\n --image_width=64 \\n --dyna_type=maskgit \\n --init_lr=0 \\n --max_lr=1e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-1-node-$slurm_job_id \\n --tags coinrun dynamics maskgit 1-node \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir &\n\nchild_pid=$!\n\nwait $child_pid",shellscript,selection_mouse +95,1447121,"slurm/jobs/mihir/horeka/coinrun/train_dynamics_maskgit.sbatch",1194,1176,"\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/array_records\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=""/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_tokenizer_1e-4/3414046""\n\nenv | grep SLURM\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=48 \\n --image_height=64 \\n --image_width=64 \\n --dyna_type=maskgit \\n --init_lr=0 \\n --max_lr=1e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-1-node-$slurm_job_id \\n --tags coinrun dynamics maskgit 1-node \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir &\n\nchild_pid=$!\n\nwait $child_pid",shellscript,selection_mouse +96,1447154,"slurm/jobs/mihir/horeka/coinrun/train_dynamics_maskgit.sbatch",1235,1135,"oad devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/array_records\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=""/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_tokenizer_1e-4/3414046""\n\nenv | grep SLURM\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=48 \\n --image_height=64 \\n --image_width=64 \\n --dyna_type=maskgit \\n --init_lr=0 \\n --max_lr=1e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-1-node-$slurm_job_id \\n --tags coinrun dynamics maskgit 1-node \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir &\n\nchild_pid=$!\n\nwait $child_pid",shellscript,selection_mouse +97,1447165,"slurm/jobs/mihir/horeka/coinrun/train_dynamics_maskgit.sbatch",1265,1105,"nv/bin/activate\n\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/array_records\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=""/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_tokenizer_1e-4/3414046""\n\nenv | grep SLURM\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=48 \\n --image_height=64 \\n --image_width=64 \\n --dyna_type=maskgit \\n --init_lr=0 \\n --max_lr=1e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-1-node-$slurm_job_id \\n --tags coinrun dynamics maskgit 1-node \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir &\n\nchild_pid=$!\n\nwait $child_pid",shellscript,selection_mouse +98,1447173,"slurm/jobs/mihir/horeka/coinrun/train_dynamics_maskgit.sbatch",1281,1089,"\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/array_records\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=""/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_tokenizer_1e-4/3414046""\n\nenv | grep SLURM\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=48 \\n --image_height=64 \\n --image_width=64 \\n --dyna_type=maskgit \\n --init_lr=0 \\n --max_lr=1e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-1-node-$slurm_job_id \\n --tags coinrun dynamics maskgit 1-node \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir &\n\nchild_pid=$!\n\nwait $child_pid",shellscript,selection_mouse +99,1447448,"slurm/jobs/mihir/horeka/coinrun/train_dynamics_maskgit.sbatch",1265,1105,"nv/bin/activate\n\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/array_records\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=""/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_tokenizer_1e-4/3414046""\n\nenv | grep SLURM\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=48 \\n --image_height=64 \\n --image_width=64 \\n --dyna_type=maskgit \\n --init_lr=0 \\n --max_lr=1e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-1-node-$slurm_job_id \\n --tags coinrun dynamics maskgit 1-node \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir &\n\nchild_pid=$!\n\nwait $child_pid",shellscript,selection_mouse +100,1447449,"slurm/jobs/mihir/horeka/coinrun/train_dynamics_maskgit.sbatch",1235,1135,"oad devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/array_records\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=""/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_tokenizer_1e-4/3414046""\n\nenv | grep SLURM\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=48 \\n --image_height=64 \\n --image_width=64 \\n --dyna_type=maskgit \\n --init_lr=0 \\n --max_lr=1e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-1-node-$slurm_job_id \\n --tags coinrun dynamics maskgit 1-node \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir &\n\nchild_pid=$!\n\nwait $child_pid",shellscript,selection_mouse +101,1447450,"slurm/jobs/mihir/horeka/coinrun/train_dynamics_maskgit.sbatch",1205,1165,"oad mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/array_records\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=""/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_tokenizer_1e-4/3414046""\n\nenv | grep SLURM\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=48 \\n --image_height=64 \\n --image_width=64 \\n --dyna_type=maskgit \\n --init_lr=0 \\n --max_lr=1e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-1-node-$slurm_job_id \\n --tags coinrun dynamics maskgit 1-node \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir &\n\nchild_pid=$!\n\nwait $child_pid",shellscript,selection_mouse +102,1447450,"slurm/jobs/mihir/horeka/coinrun/train_dynamics_maskgit.sbatch",1193,1177,"\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/array_records\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=""/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_tokenizer_1e-4/3414046""\n\nenv | grep SLURM\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=48 \\n --image_height=64 \\n --image_width=64 \\n --dyna_type=maskgit \\n --init_lr=0 \\n --max_lr=1e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-1-node-$slurm_job_id \\n --tags coinrun dynamics maskgit 1-node \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir &\n\nchild_pid=$!\n\nwait $child_pid",shellscript,selection_mouse +103,1447451,"slurm/jobs/mihir/horeka/coinrun/train_dynamics_maskgit.sbatch",1174,1196,"batch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/array_records\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=""/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_tokenizer_1e-4/3414046""\n\nenv | grep SLURM\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=48 \\n --image_height=64 \\n --image_width=64 \\n --dyna_type=maskgit \\n --init_lr=0 \\n --max_lr=1e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-1-node-$slurm_job_id \\n --tags coinrun dynamics maskgit 1-node \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir &\n\nchild_pid=$!\n\nwait $child_pid",shellscript,selection_mouse +104,1447452,"slurm/jobs/mihir/horeka/coinrun/train_dynamics_maskgit.sbatch",1160,1210,"\n\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/array_records\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=""/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_tokenizer_1e-4/3414046""\n\nenv | grep SLURM\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=48 \\n --image_height=64 \\n --image_width=64 \\n --dyna_type=maskgit \\n --init_lr=0 \\n --max_lr=1e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-1-node-$slurm_job_id \\n --tags coinrun dynamics maskgit 1-node \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir &\n\nchild_pid=$!\n\nwait $child_pid",shellscript,selection_mouse +105,1447496,"slurm/jobs/mihir/horeka/coinrun/train_dynamics_maskgit.sbatch",1085,1285,"pt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/array_records\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=""/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_tokenizer_1e-4/3414046""\n\nenv | grep SLURM\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=48 \\n --image_height=64 \\n --image_width=64 \\n --dyna_type=maskgit \\n --init_lr=0 \\n --max_lr=1e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-1-node-$slurm_job_id \\n --tags coinrun dynamics maskgit 1-node \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir &\n\nchild_pid=$!\n\nwait $child_pid",shellscript,selection_mouse +106,1447497,"slurm/jobs/mihir/horeka/coinrun/train_dynamics_maskgit.sbatch",957,1413,"$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/array_records\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=""/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_tokenizer_1e-4/3414046""\n\nenv | grep SLURM\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=48 \\n --image_height=64 \\n --image_width=64 \\n --dyna_type=maskgit \\n --init_lr=0 \\n --max_lr=1e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-1-node-$slurm_job_id \\n --tags coinrun dynamics maskgit 1-node \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir &\n\nchild_pid=$!\n\nwait $child_pid",shellscript,selection_mouse +107,1447499,"slurm/jobs/mihir/horeka/coinrun/train_dynamics_maskgit.sbatch",897,1473,"\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/array_records\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=""/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_tokenizer_1e-4/3414046""\n\nenv | grep SLURM\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=48 \\n --image_height=64 \\n --image_width=64 \\n --dyna_type=maskgit \\n --init_lr=0 \\n --max_lr=1e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-1-node-$slurm_job_id \\n --tags coinrun dynamics maskgit 1-node \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir &\n\nchild_pid=$!\n\nwait $child_pid",shellscript,selection_mouse +108,1447516,"slurm/jobs/mihir/horeka/coinrun/train_dynamics_maskgit.sbatch",871,1499,"\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/array_records\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=""/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_tokenizer_1e-4/3414046""\n\nenv | grep SLURM\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=48 \\n --image_height=64 \\n --image_width=64 \\n --dyna_type=maskgit \\n --init_lr=0 \\n --max_lr=1e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-1-node-$slurm_job_id \\n --tags coinrun dynamics maskgit 1-node \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir &\n\nchild_pid=$!\n\nwait $child_pid",shellscript,selection_mouse +109,1447526,"slurm/jobs/mihir/horeka/coinrun/train_dynamics_maskgit.sbatch",868,1502,"\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/array_records\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=""/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_tokenizer_1e-4/3414046""\n\nenv | grep SLURM\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=48 \\n --image_height=64 \\n --image_width=64 \\n --dyna_type=maskgit \\n --init_lr=0 \\n --max_lr=1e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-1-node-$slurm_job_id \\n --tags coinrun dynamics maskgit 1-node \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir &\n\nchild_pid=$!\n\nwait $child_pid",shellscript,selection_mouse +110,1447601,"slurm/jobs/mihir/horeka/coinrun/train_dynamics_maskgit.sbatch",740,1630,": trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/array_records\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=""/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_tokenizer_1e-4/3414046""\n\nenv | grep SLURM\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=48 \\n --image_height=64 \\n --image_width=64 \\n --dyna_type=maskgit \\n --init_lr=0 \\n --max_lr=1e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-1-node-$slurm_job_id \\n --tags coinrun dynamics maskgit 1-node \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir &\n\nchild_pid=$!\n\nwait $child_pid",shellscript,selection_mouse +111,1447602,"slurm/jobs/mihir/horeka/coinrun/train_dynamics_maskgit.sbatch",630,1740," {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/array_records\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=""/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_tokenizer_1e-4/3414046""\n\nenv | grep SLURM\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=48 \\n --image_height=64 \\n --image_width=64 \\n --dyna_type=maskgit \\n --init_lr=0 \\n --max_lr=1e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-1-node-$slurm_job_id \\n --tags coinrun dynamics maskgit 1-node \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir &\n\nchild_pid=$!\n\nwait $child_pid",shellscript,selection_mouse +112,1447602,"slurm/jobs/mihir/horeka/coinrun/train_dynamics_maskgit.sbatch",564,1806,"\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/array_records\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=""/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_tokenizer_1e-4/3414046""\n\nenv | grep SLURM\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=48 \\n --image_height=64 \\n --image_width=64 \\n --dyna_type=maskgit \\n --init_lr=0 \\n --max_lr=1e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-1-node-$slurm_job_id \\n --tags coinrun dynamics maskgit 1-node \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir &\n\nchild_pid=$!\n\nwait $child_pid",shellscript,selection_mouse +113,1447604,"slurm/jobs/mihir/horeka/coinrun/train_dynamics_maskgit.sbatch",507,1863,"queue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/array_records\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=""/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_tokenizer_1e-4/3414046""\n\nenv | grep SLURM\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=48 \\n --image_height=64 \\n --image_width=64 \\n --dyna_type=maskgit \\n --init_lr=0 \\n --max_lr=1e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-1-node-$slurm_job_id \\n --tags coinrun dynamics maskgit 1-node \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir &\n\nchild_pid=$!\n\nwait $child_pid",shellscript,selection_mouse +114,1447609,"slurm/jobs/mihir/horeka/coinrun/train_dynamics_maskgit.sbatch",306,2064,"ror=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/dynamics/maskgit/%x_%j.log\n#SBATCH --job-name=train_dynamics_maskgit_1_node\n#SBATCH --reservation=llmtum\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/array_records\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=""/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_tokenizer_1e-4/3414046""\n\nenv | grep SLURM\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=48 \\n --image_height=64 \\n --image_width=64 \\n --dyna_type=maskgit \\n --init_lr=0 \\n --max_lr=1e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-1-node-$slurm_job_id \\n --tags coinrun dynamics maskgit 1-node \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir &\n\nchild_pid=$!\n\nwait $child_pid",shellscript,selection_mouse +115,1447622,"slurm/jobs/mihir/horeka/coinrun/train_dynamics_maskgit.sbatch",161,2209,"es=gpu:4\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/dynamics/maskgit/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/dynamics/maskgit/%x_%j.log\n#SBATCH --job-name=train_dynamics_maskgit_1_node\n#SBATCH --reservation=llmtum\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/array_records\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=""/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_tokenizer_1e-4/3414046""\n\nenv | grep SLURM\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=48 \\n --image_height=64 \\n --image_width=64 \\n --dyna_type=maskgit \\n --init_lr=0 \\n --max_lr=1e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-1-node-$slurm_job_id \\n --tags coinrun dynamics maskgit 1-node \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir &\n\nchild_pid=$!\n\nwait $child_pid",shellscript,selection_mouse +116,1447679,"slurm/jobs/mihir/horeka/coinrun/train_dynamics_maskgit.sbatch",134,2236,"pus-per-task=5\n#SBATCH --gres=gpu:4\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/dynamics/maskgit/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/dynamics/maskgit/%x_%j.log\n#SBATCH --job-name=train_dynamics_maskgit_1_node\n#SBATCH --reservation=llmtum\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/array_records\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=""/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_tokenizer_1e-4/3414046""\n\nenv | grep SLURM\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=48 \\n --image_height=64 \\n --image_width=64 \\n --dyna_type=maskgit \\n --init_lr=0 \\n --max_lr=1e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-1-node-$slurm_job_id \\n --tags coinrun dynamics maskgit 1-node \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir &\n\nchild_pid=$!\n\nwait $child_pid",shellscript,selection_mouse +117,1447682,"slurm/jobs/mihir/horeka/coinrun/train_dynamics_maskgit.sbatch",102,2268,"artition=accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:4\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/dynamics/maskgit/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/dynamics/maskgit/%x_%j.log\n#SBATCH --job-name=train_dynamics_maskgit_1_node\n#SBATCH --reservation=llmtum\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/array_records\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=""/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_tokenizer_1e-4/3414046""\n\nenv | grep SLURM\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=48 \\n --image_height=64 \\n --image_width=64 \\n --dyna_type=maskgit \\n --init_lr=0 \\n --max_lr=1e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-1-node-$slurm_job_id \\n --tags coinrun dynamics maskgit 1-node \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir &\n\nchild_pid=$!\n\nwait $child_pid",shellscript,selection_mouse +118,1447683,"slurm/jobs/mihir/horeka/coinrun/train_dynamics_maskgit.sbatch",30,2340,"-nodes=1\n#SBATCH --ntasks-per-node=4\n#SBATCH --time=48:00:00\n#SBATCH --partition=accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:4\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/dynamics/maskgit/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/dynamics/maskgit/%x_%j.log\n#SBATCH --job-name=train_dynamics_maskgit_1_node\n#SBATCH --reservation=llmtum\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/array_records\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=""/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_tokenizer_1e-4/3414046""\n\nenv | grep SLURM\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=48 \\n --image_height=64 \\n --image_width=64 \\n --dyna_type=maskgit \\n --init_lr=0 \\n --max_lr=1e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-1-node-$slurm_job_id \\n --tags coinrun dynamics maskgit 1-node \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir &\n\nchild_pid=$!\n\nwait $child_pid",shellscript,selection_mouse +119,1447690,"slurm/jobs/mihir/horeka/coinrun/train_dynamics_maskgit.sbatch",20,2350,"\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=4\n#SBATCH --time=48:00:00\n#SBATCH --partition=accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:4\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/dynamics/maskgit/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/dynamics/maskgit/%x_%j.log\n#SBATCH --job-name=train_dynamics_maskgit_1_node\n#SBATCH --reservation=llmtum\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/array_records\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=""/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_tokenizer_1e-4/3414046""\n\nenv | grep SLURM\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=48 \\n --image_height=64 \\n --image_width=64 \\n --dyna_type=maskgit \\n --init_lr=0 \\n --max_lr=1e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-1-node-$slurm_job_id \\n --tags coinrun dynamics maskgit 1-node \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir &\n\nchild_pid=$!\n\nwait $child_pid",shellscript,selection_mouse +120,1447727,"slurm/jobs/mihir/horeka/coinrun/train_dynamics_maskgit.sbatch",0,2370,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=4\n#SBATCH --time=48:00:00\n#SBATCH --partition=accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:4\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/dynamics/maskgit/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/dynamics/maskgit/%x_%j.log\n#SBATCH --job-name=train_dynamics_maskgit_1_node\n#SBATCH --reservation=llmtum\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/array_records\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=""/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_tokenizer_1e-4/3414046""\n\nenv | grep SLURM\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=48 \\n --image_height=64 \\n --image_width=64 \\n --dyna_type=maskgit \\n --init_lr=0 \\n --max_lr=1e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-1-node-$slurm_job_id \\n --tags coinrun dynamics maskgit 1-node \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir &\n\nchild_pid=$!\n\nwait $child_pid",shellscript,selection_mouse +121,1450010,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",0,0,"",shellscript,tab +122,1453853,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=4\n#SBATCH --time=48:00:00\n#SBATCH --partition=accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:4\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/dynamics/maskgit/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/dynamics/maskgit/%x_%j.log\n#SBATCH --job-name=train_dynamics_maskgit_1_node\n#SBATCH --reservation=llmtum\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/array_records\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=""/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_tokenizer_1e-4/3414046""\n\nenv | grep SLURM\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=48 \\n --image_height=64 \\n --image_width=64 \\n --dyna_type=maskgit \\n --init_lr=0 \\n --max_lr=1e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-1-node-$slurm_job_id \\n --tags coinrun dynamics maskgit 1-node \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir &\n\nchild_pid=$!\n\nwait $child_pid",shellscript,content +123,1456808,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2198,0,"",shellscript,selection_mouse +124,1456952,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2198,1,"1",shellscript,selection_mouse +125,1457007,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2198,2,"1-",shellscript,selection_mouse +126,1457086,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2198,3,"1-n",shellscript,selection_mouse +127,1457139,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2198,4,"1-no",shellscript,selection_mouse +128,1457157,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2198,5,"1-nod",shellscript,selection_mouse +129,1457423,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2198,6,"1-node",shellscript,selection_mouse +130,1458654,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2198,6,"",shellscript,content +131,1458799,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2198,0,"d",shellscript,content +132,1458800,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2199,0,"",shellscript,selection_keyboard +133,1458976,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2199,0,"e",shellscript,content +134,1458977,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2200,0,"",shellscript,selection_keyboard +135,1459079,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2200,0,"f",shellscript,content +136,1459080,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2201,0,"",shellscript,selection_keyboard +137,1459233,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2201,0,"a",shellscript,content +138,1459234,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2202,0,"",shellscript,selection_keyboard +139,1459414,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2202,0,"u",shellscript,content +140,1459415,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2203,0,"",shellscript,selection_keyboard +141,1459603,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2203,0,"l",shellscript,content +142,1459604,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2204,0,"",shellscript,selection_keyboard +143,1459666,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2204,0,"t",shellscript,content +144,1459667,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2205,0,"",shellscript,selection_keyboard +145,1460614,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2146,0,"",shellscript,selection_command +146,1460878,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2145,0,"",shellscript,selection_command +147,1461200,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2141,4,"",shellscript,content +148,1461462,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2140,1,"",shellscript,content +149,1461871,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2139,1,"",shellscript,content +150,1462463,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2139,0,"d",shellscript,content +151,1462464,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2140,0,"",shellscript,selection_keyboard +152,1462569,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2140,0,"e",shellscript,content +153,1462570,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2141,0,"",shellscript,selection_keyboard +154,1462778,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2141,0,"f",shellscript,content +155,1462779,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2142,0,"",shellscript,selection_keyboard +156,1462891,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2142,0,"a",shellscript,content +157,1462892,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2143,0,"",shellscript,selection_keyboard +158,1463051,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2143,0,"u",shellscript,content +159,1463052,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2144,0,"",shellscript,selection_keyboard +160,1463270,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2144,0,"l",shellscript,content +161,1463271,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2145,0,"",shellscript,selection_keyboard +162,1463372,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2145,0,"t",shellscript,content +163,1463373,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2146,0,"",shellscript,selection_keyboard +164,1463920,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2145,0,"",shellscript,selection_command +165,1464370,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2101,0,"",shellscript,selection_command +166,1464582,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2064,0,"",shellscript,selection_command +167,1464894,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2052,0,"",shellscript,selection_command +168,1465112,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2020,0,"",shellscript,selection_command +169,1465288,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2000,0,"",shellscript,selection_command +170,1465641,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2020,0,"",shellscript,selection_command +171,1466044,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2019,0,"",shellscript,selection_command +172,1466344,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2018,0,"",shellscript,selection_command +173,1466526,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2017,0,"",shellscript,selection_command +174,1466674,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2016,0,"",shellscript,selection_command +175,1467029,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2015,0,"",shellscript,selection_command +176,1514218,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",1957,0,"",shellscript,selection_mouse +177,1514229,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",1956,0,"",shellscript,selection_command +178,1516176,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",1957,0,"\n ",shellscript,content +179,1516659,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",1962,0,"-",shellscript,content +180,1516660,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",1963,0,"",shellscript,selection_keyboard +181,1517156,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",1963,0,"-",shellscript,content +182,1517157,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",1964,0,"",shellscript,selection_keyboard +183,1518837,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",1964,0,"p",shellscript,content +184,1518839,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",1965,0,"",shellscript,selection_keyboard +185,1518898,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",1965,0,"a",shellscript,content +186,1518899,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",1966,0,"",shellscript,selection_keyboard +187,1519115,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",1966,0,"t",shellscript,content +188,1519116,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",1967,0,"",shellscript,selection_keyboard +189,1519306,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",1967,0,"c",shellscript,content +190,1519307,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",1968,0,"",shellscript,selection_keyboard +191,1519398,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",1968,0,"h",shellscript,content +192,1519399,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",1969,0,"",shellscript,selection_keyboard +193,1520083,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",1969,0,"_",shellscript,content +194,1520084,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",1970,0,"",shellscript,selection_keyboard +195,1520709,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",1970,0,"s",shellscript,content +196,1520710,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",1971,0,"",shellscript,selection_keyboard +197,1520773,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",1971,0,"i",shellscript,content +198,1520774,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",1972,0,"",shellscript,selection_keyboard +199,1520991,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",1972,0,"z",shellscript,content +200,1520992,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",1973,0,"",shellscript,selection_keyboard +201,1522968,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",1973,0,"e",shellscript,content +202,1522969,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",1974,0,"",shellscript,selection_keyboard +203,1523839,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",1974,0,"=",shellscript,content +204,1523840,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",1975,0,"",shellscript,selection_keyboard +205,1524828,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",1975,0,"1",shellscript,content +206,1524829,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",1976,0,"",shellscript,selection_keyboard +207,1524928,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",1976,0,"6",shellscript,content +208,1524928,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",1977,0,"",shellscript,selection_keyboard +209,1525518,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",1977,0," ",shellscript,content +210,1525519,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",1978,0,"",shellscript,selection_keyboard +211,1525973,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",1978,0,"\",shellscript,content +212,1525974,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",1979,0,"",shellscript,selection_keyboard +213,1526313,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",1978,0,"",shellscript,selection_command +214,1537919,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2000,0,"",shellscript,selection_command +215,1538280,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",1980,44,"",shellscript,content +216,1538298,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",1984,0,"",shellscript,selection_command +217,1539360,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",1999,0,"",shellscript,selection_command +218,1540891,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",1994,0,"",shellscript,selection_command +219,1541052,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",1993,1,"",shellscript,content +220,1541251,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",1993,0,"3",shellscript,content +221,1541252,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",1994,0,"",shellscript,selection_keyboard +222,1542039,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",1993,0,"",shellscript,selection_command +223,1542833,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2013,0,"",shellscript,selection_command +224,1543116,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",1993,0,"",shellscript,selection_command +225,1543492,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",1994,0,"",shellscript,selection_command +226,1543649,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",1995,0,"",shellscript,selection_command +227,1543829,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",1996,0,"",shellscript,selection_command +228,1544013,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",1997,0,"",shellscript,selection_command +229,1586394,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2234,0,"",shellscript,selection_mouse +230,1586405,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2233,0,"",shellscript,selection_command +231,1587610,"slurm/jobs/mihir/horeka/coinrun/train_dynamics_maskgit.sbatch",0,0,"",shellscript,tab +232,1587611,"slurm/jobs/mihir/horeka/coinrun/train_dynamics_maskgit.sbatch",1194,0,"",shellscript,selection_mouse +233,1588186,"slurm/jobs/mihir/horeka/coinrun/train_dynamics_maskgit.sbatch",1161,0,"",shellscript,selection_mouse +234,1588923,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",0,0,"",shellscript,tab +235,1588924,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2103,0,"",shellscript,selection_mouse +236,1589488,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",1995,0,"",shellscript,selection_mouse +237,1589989,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",1992,0,"",shellscript,selection_mouse +238,1634296,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2043,0,"",shellscript,selection_mouse +239,1634297,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2042,0,"",shellscript,selection_command +240,1634871,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2043,0,"",shellscript,selection_mouse +241,1634871,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2042,0,"",shellscript,selection_command +242,1675776,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2350,0,"",shellscript,selection_mouse +243,1675788,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2349,0,"",shellscript,selection_command +244,1675835,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2349,1,"d",shellscript,selection_mouse +245,1675836,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2350,0,"",shellscript,selection_command +246,1676244,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2320,0,"",shellscript,selection_mouse +247,1676626,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2350,0,"",shellscript,selection_mouse +248,1676639,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2349,0,"",shellscript,selection_command +249,1677075,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2333,0,"",shellscript,selection_mouse +250,1677076,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2332,0,"",shellscript,selection_command +251,1677494,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2350,0,"",shellscript,selection_mouse +252,1677496,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2349,0,"",shellscript,selection_command +253,1677892,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2320,0,"",shellscript,selection_mouse +254,1679262,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2251,0,"",shellscript,selection_mouse +255,1680370,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2292,0,"",shellscript,selection_mouse +256,1680537,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2290,8,"data_dir",shellscript,selection_mouse +257,1681018,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2246,0,"",shellscript,selection_mouse +258,1681176,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2241,20,"tokenizer_checkpoint",shellscript,selection_mouse +259,1682024,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2222,0,"",shellscript,selection_mouse +260,1682150,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2219,7,"project",shellscript,selection_mouse +261,1682611,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2196,0,"",shellscript,selection_mouse +262,1682958,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2193,6,"entity",shellscript,selection_mouse +263,1683607,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2149,0,"",shellscript,selection_mouse +264,1683944,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2147,4,"tags",shellscript,selection_mouse +265,1684545,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2089,0,"",shellscript,selection_mouse +266,1684708,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2087,4,"name",shellscript,selection_mouse +267,1686621,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2052,0,"",shellscript,selection_mouse +268,1686883,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2050,23,"log_checkpoint_interval",shellscript,selection_mouse +269,1689949,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",1989,0,"",shellscript,selection_mouse +270,1690001,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",1989,1,"_",shellscript,selection_mouse +271,1690998,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",1971,0,"",shellscript,selection_mouse +272,1692058,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",1942,0,"",shellscript,selection_mouse +273,1692690,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",1920,0,"",shellscript,selection_mouse +274,1693396,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",1898,0,"",shellscript,selection_mouse +275,1693869,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",1863,0,"",shellscript,selection_mouse +276,1701059,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",1908,0,"",shellscript,selection_mouse +277,1702399,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",1907,1,"",shellscript,content +278,1702504,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",1906,1,"",shellscript,content +279,1702746,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",1906,0,"1",shellscript,content +280,1702747,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",1907,0,"",shellscript,selection_keyboard +281,1702949,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",1907,0,"1",shellscript,content +282,1702950,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",1908,0,"",shellscript,selection_keyboard +283,1703059,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",1908,0,"0",shellscript,content +284,1703060,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",1909,0,"",shellscript,selection_keyboard +285,1703604,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",1908,0,"",shellscript,selection_command +286,1751144,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",1606,0,"",shellscript,selection_mouse +287,1752219,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",1605,0,"",shellscript,selection_command +288,1753328,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",1605,117,"",shellscript,content +289,1753334,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",1604,0,"",shellscript,selection_command +290,1753779,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",1605,0,"",shellscript,selection_command +291,1754322,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",1605,0,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_tokenizer_default/3528955",shellscript,content +292,1772526,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",459,0,"",shellscript,selection_command +293,1774396,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",459,2,"1_",shellscript,selection_mouse +294,1774404,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",459,3,"1_n",shellscript,selection_mouse +295,1774422,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",459,4,"1_no",shellscript,selection_mouse +296,1774479,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",459,5,"1_nod",shellscript,selection_mouse +297,1774543,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",459,6,"1_node",shellscript,selection_mouse +298,1775631,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",459,6,"",shellscript,content +299,1775786,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",458,1,"",shellscript,content +300,1806997,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",1154,0,"",shellscript,selection_mouse +301,1817729,"jasmine/utils/nn.py",0,0,"import math\nfrom typing import Tuple, Callable, List\n\nfrom flax import nnx\nimport jax\nimport jax.numpy as jnp\nimport einops\n\n\ndef _get_spatiotemporal_positional_encoding(d_model: int, max_len: int = 5000):\n """"""\n Creates a function that applies separate sinusoidal positional encodings to the temporal and spatial dimensions.\n """"""\n pe = jnp.zeros((max_len, d_model))\n position = jnp.arange(0, max_len, dtype=jnp.float32)[:, None]\n div_term = jnp.exp(jnp.arange(0, d_model, 2) * (-math.log(10000.0) / d_model))\n pe = pe.at[:, 0::2].set(jnp.sin(position * div_term))\n pe = pe.at[:, 1::2].set(jnp.cos(position * div_term))\n\n def _encode(x: jax.Array) -> jax.Array:\n """"""\n Args:\n x: The input tensor of shape (Batch, Time, Space, Dimension).\n\n Returns:\n The input tensor with positional encodings added.\n """"""\n assert x.ndim == 4, f""Input must be 4-dimensional, but got shape {x.shape}""\n\n num_timesteps = x.shape[1]\n num_spatial_patches = x.shape[2]\n\n # Temporal positional encoding: (1, T, 1, D)\n temporal_pe = pe[None, :num_timesteps, None, :]\n x = x + temporal_pe\n\n # Spatial positional encoding: (1, 1, S, D)\n spatial_pe = pe[None, None, :num_spatial_patches, :]\n x = x + spatial_pe\n\n return x\n\n return _encode\n\n\nclass STBlock(nnx.Module):\n def __init__(\n self,\n dim: int,\n ffn_dim: int,\n num_heads: int,\n dropout: float,\n param_dtype: jnp.dtype,\n dtype: jnp.dtype,\n use_flash_attention: bool,\n rngs: nnx.Rngs,\n sow_weights: bool,\n sow_activations: bool,\n ):\n self.dim = dim\n self.ffn_dim = ffn_dim\n self.num_heads = num_heads\n self.dropout = dropout\n self.param_dtype = param_dtype\n self.dtype = dtype\n self.use_flash_attention = use_flash_attention\n self.sow_weights = sow_weights\n self.sow_activations = sow_activations\n\n self.spatial_norm = nnx.LayerNorm(\n num_features=self.dim,\n param_dtype=self.param_dtype,\n dtype=self.param_dtype, # layer norm in full precision\n rngs=rngs,\n )\n self.spatial_attention = nnx.MultiHeadAttention(\n num_heads=self.num_heads,\n in_features=self.dim,\n qkv_features=self.dim,\n dropout_rate=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n attention_fn=_create_flash_attention_fn(\n self.use_flash_attention, is_causal=False\n ),\n rngs=rngs,\n decode=False,\n )\n\n self.temporal_norm = nnx.LayerNorm(\n num_features=self.dim,\n param_dtype=self.param_dtype,\n dtype=self.param_dtype, # layer norm in full precision\n rngs=rngs,\n )\n self.temporal_attention = nnx.MultiHeadAttention(\n num_heads=self.num_heads,\n in_features=self.dim,\n qkv_features=self.dim,\n dropout_rate=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n attention_fn=_create_flash_attention_fn(\n self.use_flash_attention, is_causal=True\n ),\n rngs=rngs,\n decode=False,\n )\n\n self.ffn_norm = nnx.LayerNorm(\n num_features=self.dim,\n param_dtype=self.param_dtype,\n dtype=self.param_dtype, # layer norm in full precision\n rngs=rngs,\n )\n self.ffn_dense1 = nnx.Linear(\n in_features=self.dim,\n out_features=self.ffn_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n rngs=rngs,\n )\n self.ffn_dense2 = nnx.Linear(\n in_features=self.ffn_dim,\n out_features=self.dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n rngs=rngs,\n )\n\n @nnx.remat\n def __call__(self, x_BTNM: jax.Array) -> jax.Array:\n # --- Spatial attention ---\n z_BTNM = self.spatial_norm(x_BTNM)\n z_BTNM = self.spatial_attention(z_BTNM, sow_weights=self.sow_weights)\n x_BTNM = x_BTNM + z_BTNM\n\n # --- Temporal attention ---\n x_BNTM = x_BTNM.swapaxes(1, 2)\n z_BNTM = self.temporal_norm(x_BNTM)\n z_BNTM = self.temporal_attention(z_BNTM, sow_weights=self.sow_weights)\n x_BNTM = x_BNTM + z_BNTM\n x_BTNM = x_BNTM.swapaxes(1, 2)\n\n # --- Feedforward ---\n z_BTNM = self.ffn_norm(x_BTNM)\n z_BTND = self.ffn_dense1(z_BTNM)\n z_BTND = jax.nn.gelu(z_BTND)\n z_BTNM = self.ffn_dense2(z_BTND)\n x_BTNM = x_BTNM + z_BTNM\n if self.sow_activations:\n self.sow(nnx.Intermediate, ""activations"", x_BTNM)\n return x_BTNM\n\n\nclass STTransformer(nnx.Module):\n """"""\n Dimension keys:\n B: batch size\n T: number of frames\n N: number of patches per frame\n I: number of input features\n M: model dimension\n D: FFN dimension\n V: vocabulary size\n """"""\n\n def __init__(\n self,\n input_dim: int,\n model_dim: int,\n ffn_dim: int,\n out_dim: int,\n num_blocks: int,\n num_heads: int,\n dropout: float,\n param_dtype: jnp.dtype,\n dtype: jnp.dtype,\n use_flash_attention: bool,\n rngs: nnx.Rngs,\n sow_weights: bool = False,\n sow_activations: bool = False,\n sow_logits: bool = False,\n max_len: int = 5000,\n ):\n self.input_dim = input_dim\n self.model_dim = model_dim\n self.ffn_dim = ffn_dim\n self.out_dim = out_dim\n self.num_blocks = num_blocks\n self.num_heads = num_heads\n self.dropout = dropout\n self.param_dtype = param_dtype\n self.dtype = dtype\n self.use_flash_attention = use_flash_attention\n self.sow_logits = sow_logits\n self.sow_weights = sow_weights\n self.sow_activations = sow_activations\n\n self.input_norm1 = nnx.LayerNorm(\n num_features=self.input_dim,\n param_dtype=self.param_dtype,\n dtype=self.param_dtype, # layer norm in full precision\n rngs=rngs,\n )\n self.input_dense = nnx.Linear(\n in_features=self.input_dim,\n out_features=self.model_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n rngs=rngs,\n )\n self.input_norm2 = nnx.LayerNorm(\n num_features=self.model_dim,\n param_dtype=self.param_dtype,\n dtype=self.param_dtype, # layer norm in full precision\n rngs=rngs,\n )\n\n self.pos_enc = _get_spatiotemporal_positional_encoding(\n self.model_dim, max_len=max_len\n )\n\n self.blocks = []\n for _ in range(self.num_blocks):\n self.blocks.append(\n STBlock(\n dim=self.model_dim,\n ffn_dim=self.ffn_dim,\n num_heads=self.num_heads,\n dropout=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n use_flash_attention=self.use_flash_attention,\n rngs=rngs,\n sow_weights=self.sow_weights,\n sow_activations=self.sow_activations,\n )\n )\n\n self.output_dense = nnx.Linear(\n in_features=self.model_dim,\n out_features=self.out_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n rngs=rngs,\n )\n\n def __call__(self, x_BTNI: jax.Array) -> jax.Array:\n x_BTNI = self.input_norm1(x_BTNI)\n x_BTNM = self.input_dense(x_BTNI)\n x_BTNM = self.input_norm2(x_BTNM)\n x_BTNM = self.pos_enc(x_BTNM)\n for block in self.blocks:\n x_BTNM = block(x_BTNM)\n\n x_BTNV = self.output_dense(x_BTNM)\n if self.sow_logits:\n self.sow(nnx.Intermediate, ""logits"", x_BTNV)\n return x_BTNV\n\n\nclass TransformerBlock(nnx.Module):\n def __init__(\n self,\n model_dim: int,\n ffn_dim: int,\n num_heads: int,\n dropout: float,\n param_dtype: jnp.dtype,\n dtype: jnp.dtype,\n use_flash_attention: bool,\n decode: bool,\n rngs: nnx.Rngs,\n sow_weights: bool,\n sow_activations: bool,\n ):\n self.model_dim = model_dim\n self.ffn_dim = ffn_dim\n self.num_heads = num_heads\n self.dropout = dropout\n self.param_dtype = param_dtype\n self.dtype = dtype\n self.use_flash_attention = use_flash_attention\n self.decode = decode\n self.sow_weights = sow_weights\n self.sow_activations = sow_activations\n\n self.temporal_norm = nnx.LayerNorm(\n num_features=self.model_dim,\n param_dtype=self.param_dtype,\n dtype=self.param_dtype, # layer norm in full precision\n rngs=rngs,\n )\n self.spatial_norm = nnx.LayerNorm(\n num_features=self.model_dim,\n param_dtype=self.param_dtype,\n dtype=self.param_dtype, # layer norm in full precision\n rngs=rngs,\n )\n self.ffn_norm = nnx.LayerNorm(\n num_features=self.model_dim,\n param_dtype=self.param_dtype,\n dtype=self.param_dtype, # layer norm in full precision\n rngs=rngs,\n )\n self.temporal_attention = nnx.MultiHeadAttention(\n num_heads=self.num_heads,\n in_features=self.model_dim,\n qkv_features=self.model_dim,\n dropout_rate=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n attention_fn=_create_flash_attention_fn(\n self.use_flash_attention, is_causal=True\n ),\n rngs=rngs,\n decode=self.decode,\n )\n self.spatial_attention = nnx.MultiHeadAttention(\n num_heads=self.num_heads,\n in_features=self.model_dim,\n qkv_features=self.model_dim,\n dropout_rate=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n attention_fn=_create_flash_attention_fn(\n self.use_flash_attention, is_causal=True\n ),\n rngs=rngs,\n decode=self.decode,\n )\n self.ffn_dense1 = nnx.Linear(\n in_features=self.model_dim,\n out_features=self.ffn_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n rngs=rngs,\n )\n self.ffn_dense2 = nnx.Linear(\n in_features=self.ffn_dim,\n out_features=self.model_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n rngs=rngs,\n )\n\n @nnx.remat\n def __call__(\n self, x_BTNM: jax.Array, pos_index: Tuple[jax.Array, jax.Array] | None = None\n ) -> jax.Array:\n # --- Spatial attention ---\n B, T, N, M = x_BTNM.shape\n z_FNM = einops.rearrange(x_BTNM, ""b t n m -> (b t) n m"")\n z_FNM = self.spatial_norm(z_FNM)\n z_FNM = self.spatial_attention(z_FNM, sow_weights=self.sow_weights)\n z_BTNM = einops.rearrange(z_FNM, ""(b t) n m -> b t n m"", t=T)\n x_BTNM = x_BTNM + z_BTNM\n # --- Temporal attention ---\n z_PTM = einops.rearrange(x_BTNM, ""b t n m -> (b n) t m"")\n z_PTM = self.temporal_norm(z_PTM)\n z_PTM = self.temporal_attention(z_PTM, sow_weights=self.sow_weights)\n z_BTNM = einops.rearrange(z_PTM, ""(b n) t m -> b t n m"", n=N)\n x_BTNM = x_BTNM + z_BTNM\n # --- Feedforward ---\n z_BTNM = self.ffn_norm(x_BTNM)\n z_BTND = self.ffn_dense1(z_BTNM)\n z_BTND = jax.nn.gelu(z_BTND)\n z_BTNM = self.ffn_dense2(z_BTND)\n x_BTNM = x_BTNM + z_BTNM\n if self.sow_activations:\n self.sow(nnx.Intermediate, ""activations"", x_BTNM)\n\n return x_BTNM\n\n\nclass Transformer(nnx.Module):\n """"""\n Dimension keys:\n B: batch size\n T: number of frames\n N: number of patches per frame\n I: number of input features\n M: model dimension\n D: FFN dimension\n V: vocabulary size\n F: number of frames in batch\n P: number of patch positions in batch\n """"""\n\n def __init__(\n self,\n input_dim: int,\n model_dim: int,\n ffn_dim: int,\n out_dim: int,\n num_blocks: int,\n num_heads: int,\n dropout: float,\n param_dtype: jnp.dtype,\n dtype: jnp.dtype,\n use_flash_attention: bool,\n decode: bool,\n rngs: nnx.Rngs,\n sow_logits: bool = False,\n sow_weights: bool = False,\n sow_activations: bool = False,\n max_len: int = 5000,\n ):\n self.input_dim = input_dim\n self.model_dim = model_dim\n self.ffn_dim = ffn_dim\n self.out_dim = out_dim\n self.num_blocks = num_blocks\n self.num_heads = num_heads\n self.dropout = dropout\n self.param_dtype = param_dtype\n self.dtype = dtype\n self.use_flash_attention = use_flash_attention\n self.sow_logits = sow_logits\n self.sow_weights = sow_weights\n self.sow_activations = sow_activations\n\n self.input_norm1 = nnx.LayerNorm(\n num_features=self.input_dim,\n param_dtype=self.param_dtype,\n dtype=self.param_dtype, # layer norm in full precision\n rngs=rngs,\n )\n self.input_dense = nnx.Linear(\n in_features=self.input_dim,\n out_features=self.model_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n rngs=rngs,\n )\n self.input_norm2 = nnx.LayerNorm(\n num_features=self.model_dim,\n param_dtype=self.param_dtype,\n dtype=self.param_dtype, # layer norm in full precision\n rngs=rngs,\n )\n\n self.pos_enc = _get_spatiotemporal_positional_encoding(\n self.model_dim, max_len=max_len\n )\n\n self.blocks: List[TransformerBlock] = []\n for _ in range(self.num_blocks):\n self.blocks.append(\n TransformerBlock(\n model_dim=self.model_dim,\n ffn_dim=self.ffn_dim,\n num_heads=self.num_heads,\n dropout=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n use_flash_attention=self.use_flash_attention,\n decode=decode,\n sow_weights=self.sow_weights,\n sow_activations=self.sow_activations,\n rngs=rngs,\n )\n )\n self.output_dense = nnx.Linear(\n in_features=self.model_dim,\n out_features=self.out_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n rngs=rngs,\n )\n\n def __call__(\n self, x_BTNI: jax.Array, pos_index: Tuple[jax.Array, jax.Array] | None = None\n ) -> jax.Array:\n x_BTNI = self.input_norm1(x_BTNI)\n x_BTNM = self.input_dense(x_BTNI)\n x_BTNM = self.input_norm2(x_BTNM)\n x_BTNM = self.pos_enc(x_BTNM)\n for block in self.blocks:\n x_BTNM = block(x_BTNM, pos_index)\n\n x_BTNV = self.output_dense(x_BTNM)\n if self.sow_logits:\n self.sow(nnx.Intermediate, ""logits"", x_BTNV)\n return x_BTNV\n\n\ndef normalize(x: jax.Array) -> jax.Array:\n return x / (jnp.linalg.norm(x, ord=2, axis=-1, keepdims=True) + 1e-8)\n\n\nclass VectorQuantizer(nnx.Module):\n """"""\n Dimension keys:\n D: B * T * N\n K: number of latents\n L: latent dimension\n """"""\n\n def __init__(\n self,\n latent_dim: int,\n num_latents: int,\n dropout: float,\n dtype: jnp.dtype,\n rngs: nnx.Rngs,\n ):\n self.latent_dim = latent_dim\n self.num_latents = num_latents\n self.dropout = dropout\n self.dtype = dtype\n\n self.codebook = nnx.Param(\n normalize(\n nnx.initializers.normal(stddev=1)(\n rngs.params(), (self.num_latents, self.latent_dim)\n )\n )\n )\n self.drop = nnx.Dropout(self.dropout, rngs=rngs)\n\n def __call__(\n self, x_DL: jax.Array, training: bool\n ) -> Tuple[jax.Array, jax.Array, jax.Array, jax.Array]:\n # --- Compute distances ---\n x_DL = x_DL.astype(self.dtype)\n codebook = self.codebook.value.astype(self.dtype)\n\n x_DL = normalize(x_DL)\n normalized_codebook_KL = normalize(codebook)\n distance_DK = -jnp.matmul(x_DL, normalized_codebook_KL.T)\n if training:\n distance_DK = self.drop(distance_DK)\n\n # --- Get indices and embeddings ---\n indices_D = jnp.argmin(distance_DK, axis=-1)\n z_DL = codebook[indices_D]\n\n # --- Straight through estimator ---\n z_q_DL = x_DL + jax.lax.stop_gradient(z_DL - x_DL)\n return z_q_DL, z_DL, x_DL, indices_D\n\n def get_codes(self, indices_E: jax.Array) -> jax.Array:\n return self.codebook[indices_E]\n\n\ndef _create_flash_attention_fn(use_flash_attention: bool, is_causal: bool) -> Callable:\n """"""\n Create an attention function that uses flash attention if enabled.\n\n flax.nnx.MultiHeadAttention provides tensors with shape (batch..., length, num_heads, head_dim),\n but jax.nn.dot_product_attention expects (batch, length, num_heads, head_dim). We reshape to\n ensure compatibility. cuDNN's flash attention additionally requires a sequence length that\n is a multiple of 4. We pad the sequence length to the nearest multiple of 4 and mask\n accordingly. Note that cuDNN requires the mask to be broadcast before calling the attention\n function due to strict shape checking.\n """"""\n\n def attention_fn(\n query_BTHD, key_BSHD, value_BSHD, bias=None, mask_B111=None, **kwargs\n ):\n implementation = ""cudnn"" if use_flash_attention else None\n\n def _merge_batch_dims(x):\n return einops.rearrange(x, ""... l h k -> (...) l h k"")\n\n def _pad(x, pad_size):\n return jnp.pad(x, ((0, 0), (0, pad_size), (0, 0), (0, 0)))\n\n original_shape = query_BTHD.shape\n T = query_BTHD.shape[-3]\n S = key_BSHD.shape[-3]\n\n # Pad to nearest multiple of 4\n Q = ((T + 3) // 4) * 4\n pad_size_Q = Q - T\n K = ((S + 3) // 4) * 4\n pad_size_K = K - S\n\n query_BQHD = _pad(_merge_batch_dims(query_BTHD), pad_size_Q)\n key_BKHD = _pad(_merge_batch_dims(key_BSHD), pad_size_K)\n value_BKHD = _pad(_merge_batch_dims(value_BSHD), pad_size_K)\n\n attention_mask = jnp.ones((Q, K), dtype=jnp.bool_)\n attention_mask = attention_mask.at[T:, :].set(False)\n attention_mask = attention_mask.at[:, S:].set(False)\n\n mask_11TS = attention_mask[jnp.newaxis, jnp.newaxis, :, :]\n\n bias_4d = (\n jnp.pad(\n _merge_batch_dims(bias),\n ((0, 0), (0, 0), (0, pad_size_Q), (0, pad_size_K)),\n )\n if bias is not None\n else None\n )\n\n # NOTE: jax.nn.dot_product_attention does not support dropout\n output_4d = jax.nn.dot_product_attention(\n query=query_BQHD,\n key=key_BKHD,\n value=value_BKHD,\n bias=bias_4d,\n mask=mask_11TS,\n implementation=implementation,\n is_causal=is_causal,\n )\n return output_4d[..., :T, :, :].reshape(original_shape)\n\n return attention_fn\n",python,tab +302,1828002,"jasmine/utils/nn.py",17445,0,"",python,selection_mouse +303,1828154,"jasmine/utils/nn.py",17439,26,"_create_flash_attention_fn",python,selection_mouse +304,1837036,"jasmine/utils/nn.py",17455,0,"",python,selection_mouse +305,1847873,"jasmine/utils/nn.py",2538,26,"_create_flash_attention_fn",python,selection_command +306,1851359,"jasmine/utils/nn.py",2529,0,"",python,selection_mouse +307,1853187,"jasmine/utils/nn.py",2597,0,"",python,selection_mouse +308,1853380,"jasmine/utils/nn.py",2587,19,"use_flash_attention",python,selection_mouse +309,1854365,"jasmine/utils/nn.py",2610,0,"",python,selection_mouse +310,1854512,"jasmine/utils/nn.py",2608,9,"is_causal",python,selection_mouse +311,1854989,"jasmine/utils/nn.py",2623,0,"",python,selection_mouse +312,1854996,"jasmine/utils/nn.py",2622,0,"",python,selection_command +313,1857551,"jasmine/utils/nn.py",0,0,"",python,tab +314,1862907,"jasmine/utils/nn.py",18148,0,"",python,selection_mouse +315,1863051,"jasmine/utils/nn.py",18141,12,"attention_fn",python,selection_mouse +316,1865746,"jasmine/utils/nn.py",18253,0,"",python,selection_mouse +317,1865886,"jasmine/utils/nn.py",18248,14,"implementation",python,selection_mouse +318,1866830,"jasmine/utils/nn.py",18276,0,"",python,selection_mouse +319,1867594,"jasmine/utils/nn.py",18306,0,"",python,selection_mouse +320,1867956,"jasmine/utils/nn.py",18306,1,"\n",python,selection_mouse +321,1868544,"jasmine/utils/nn.py",18256,0,"",python,selection_mouse +322,1868710,"jasmine/utils/nn.py",18248,14,"implementation",python,selection_mouse +323,1868896,"jasmine/utils/nn.py",18248,15,"implementation ",python,selection_mouse +324,1868906,"jasmine/utils/nn.py",18248,23,"implementation = ""cudnn",python,selection_mouse +325,1868958,"jasmine/utils/nn.py",18248,27,"implementation = ""cudnn"" if",python,selection_mouse +326,1868960,"jasmine/utils/nn.py",18248,47,"implementation = ""cudnn"" if use_flash_attention",python,selection_mouse +327,1869152,"jasmine/utils/nn.py",18248,48,"implementation = ""cudnn"" if use_flash_attention ",python,selection_mouse +328,1869165,"jasmine/utils/nn.py",18248,52,"implementation = ""cudnn"" if use_flash_attention else",python,selection_mouse +329,1869232,"jasmine/utils/nn.py",18248,53,"implementation = ""cudnn"" if use_flash_attention else ",python,selection_mouse +330,1869292,"jasmine/utils/nn.py",18248,57,"implementation = ""cudnn"" if use_flash_attention else None",python,selection_mouse +331,1869692,"jasmine/utils/nn.py",18304,0,"",python,selection_mouse +332,1870214,"jasmine/utils/nn.py",18270,0,"",python,selection_mouse +333,1871103,"jasmine/utils/nn.py",18213,0,"",python,selection_mouse +334,1871189,"jasmine/utils/nn.py",18208,9,"mask_B111",python,selection_mouse +335,1876386,"jasmine/utils/nn.py",18227,0,"",python,selection_mouse +336,1876521,"jasmine/utils/nn.py",18226,6,"kwargs",python,selection_mouse +337,1877664,"jasmine/utils/nn.py",18283,0,"",python,selection_mouse +338,1878464,"jasmine/utils/nn.py",18306,0,"",python,selection_mouse +339,1879756,"jasmine/utils/nn.py",18255,0,"",python,selection_mouse +340,1880473,"jasmine/utils/nn.py",19730,0,"",python,selection_command +341,1894542,"jasmine/utils/nn.py",19793,0,"",python,selection_mouse +342,1894549,"jasmine/utils/nn.py",19792,0,"",python,selection_command +343,1894690,"jasmine/utils/nn.py",19793,0,"",python,selection_mouse +344,1894702,"jasmine/utils/nn.py",19792,0,"",python,selection_command +345,1895274,"jasmine/utils/nn.py",19752,0,"",python,selection_mouse +346,1895425,"jasmine/utils/nn.py",19745,14,"implementation",python,selection_mouse +347,1900663,"jasmine/utils/nn.py",19563,0,"",python,selection_mouse +348,1900912,".venv/lib64/python3.12/site-packages/jax/_src/nn/functions.py",0,0,"# Copyright 2019 The JAX Authors.\n#\n# Licensed under the Apache License, Version 2.0 (the ""License"");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# https://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an ""AS IS"" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n""""""Shared neural network activations and other functions.""""""\n\nfrom __future__ import annotations\n\nfrom collections.abc import Sequence\nfrom functools import partial\nimport operator\nimport math\nimport numpy as np\nfrom typing import Any, Literal\nimport warnings\n\nfrom jax._src import api\nfrom jax._src import config\nfrom jax._src import core\nfrom jax._src import custom_derivatives\nfrom jax._src import deprecations\nfrom jax._src import dtypes\nfrom jax._src import lax\nfrom jax._src import numpy as jnp\nfrom jax._src import util\nfrom jax._src.core import AxisName\nfrom jax._src.cudnn.fused_attention_stablehlo import (\n dot_product_attention as cudnn_dot_product_attention, MaskType)\nfrom jax._src.cudnn.scaled_matmul_stablehlo import (\n scaled_matmul_wrapper as cudnn_scaled_matmul,\n scaled_dot_general_wrapper as cudnn_scaled_dot_general,\n BlockScaleConfig)\nfrom jax._src.interpreters import batching\nfrom jax._src.interpreters import mlir\nfrom jax._src.numpy import einsum as jnp_einsum\nfrom jax._src.numpy import util as numpy_util\nfrom jax._src.numpy.reductions import _count\nfrom jax._src.numpy.reductions import Axis\nfrom jax._src.sharding_impls import NamedSharding, PartitionSpec as P\nfrom jax._src.typing import Array, ArrayLike, DType, DTypeLike\nfrom jax._src.ops.special import logsumexp as _logsumexp\n\n\n# activations\n@api.jit\ndef identity(x: ArrayLike) -> Array:\n r""""""Identity activation function.\n\n Returns the argument unmodified.\n\n Args:\n x : input array\n\n Returns:\n The argument `x` unmodified.\n\n Examples:\n >>> jax.nn.identity(jax.numpy.array([-2., -1., -0.5, 0, 0.5, 1., 2.]))\n Array([-2. , -1. , -0.5, 0. , 0.5, 1. , 2. ], dtype=float32)\n\n """"""\n numpy_util.check_arraylike(""identity"", x)\n return jnp.asarray(x)\n\n@custom_derivatives.custom_jvp\n@api.jit\ndef relu(x: ArrayLike) -> Array:\n r""""""Rectified linear unit activation function.\n\n Computes the element-wise function:\n\n .. math::\n \mathrm{relu}(x) = \max(x, 0)\n\n except under differentiation, we take:\n\n .. math::\n \nabla \mathrm{relu}(0) = 0\n\n For more information see\n `Numerical influence of ReLU’(0) on backpropagation\n `_.\n\n Args:\n x : input array\n\n Returns:\n An array.\n\n Examples:\n >>> jax.nn.relu(jax.numpy.array([-2., -1., -0.5, 0, 0.5, 1., 2.]))\n Array([0. , 0. , 0. , 0. , 0.5, 1. , 2. ], dtype=float32)\n\n See also:\n :func:`relu6`\n\n """"""\n return jnp.maximum(x, 0)\n# For behavior at 0, see https://dl.acm.org/doi/10.5555/3540261.3540297\nrelu.defjvps(lambda g, ans, x: lax.select(x > 0, g, lax.full_like(g, 0)))\n\n@api.jit\ndef squareplus(x: ArrayLike, b: ArrayLike = 4) -> Array:\n r""""""Squareplus activation function.\n\n Computes the element-wise function\n\n .. math::\n \mathrm{squareplus}(x) = \frac{x + \sqrt{x^2 + b}}{2}\n\n as described in https://arxiv.org/abs/2112.11687.\n\n Args:\n x : input array\n b : smoothness parameter\n """"""\n numpy_util.check_arraylike(""squareplus"", x)\n numpy_util.check_arraylike(""squareplus"", b)\n x = jnp.asarray(x)\n b = jnp.asarray(b)\n y = x + jnp.sqrt(jnp.square(x) + b)\n return y / 2\n\n@api.jit\ndef softplus(x: ArrayLike) -> Array:\n r""""""Softplus activation function.\n\n Computes the element-wise function\n\n .. math::\n \mathrm{softplus}(x) = \log(1 + e^x)\n\n Args:\n x : input array\n """"""\n return jnp.logaddexp(x, 0)\n\n@api.jit\ndef sparse_plus(x: ArrayLike) -> Array:\n r""""""Sparse plus function.\n\n Computes the function:\n\n .. math::\n\n \mathrm{sparse\_plus}(x) = \begin{cases}\n 0, & x \leq -1\\\n \frac{1}{4}(x+1)^2, & -1 < x < 1 \\\n x, & 1 \leq x\n \end{cases}\n\n This is the twin function of the softplus activation ensuring a zero output\n for inputs less than -1 and a linear output for inputs greater than 1,\n while remaining smooth, convex, monotonic by an adequate definition between\n -1 and 1.\n\n Args:\n x: input (float)\n """"""\n numpy_util.check_arraylike(""sparse_plus"", x)\n x = jnp.asarray(x)\n return jnp.where(x <= -1.0, 0.0, jnp.where(x >= 1.0, x, (x + 1.0)**2/4))\n\n@api.jit\ndef soft_sign(x: ArrayLike) -> Array:\n r""""""Soft-sign activation function.\n\n Computes the element-wise function\n\n .. math::\n \mathrm{soft\_sign}(x) = \frac{x}{|x| + 1}\n\n Args:\n x : input array\n """"""\n numpy_util.check_arraylike(""soft_sign"", x)\n x_arr = jnp.asarray(x)\n return x_arr / (jnp.abs(x_arr) + 1)\n\n@partial(api.jit, inline=True)\ndef sigmoid(x: ArrayLike) -> Array:\n r""""""Sigmoid activation function.\n\n Computes the element-wise function:\n\n .. math::\n \mathrm{sigmoid}(x) = \frac{1}{1 + e^{-x}}\n\n Args:\n x : input array\n\n Returns:\n An array.\n\n See also:\n :func:`log_sigmoid`\n\n """"""\n return lax.logistic(x)\n\n@api.jit\ndef sparse_sigmoid(x: ArrayLike) -> Array:\n r""""""Sparse sigmoid activation function.\n\n Computes the function:\n\n .. math::\n\n \mathrm{sparse\_sigmoid}(x) = \begin{cases}\n 0, & x \leq -1\\\n \frac{1}{2}(x+1), & -1 < x < 1 \\\n 1, & 1 \leq x\n \end{cases}\n\n This is the twin function of the ``sigmoid`` activation ensuring a zero output\n for inputs less than -1, a 1 output for inputs greater than 1, and a linear\n output for inputs between -1 and 1. It is the derivative of ``sparse_plus``.\n\n For more information, see `Learning with Fenchel-Young Losses (section 6.2)\n `_.\n\n Args:\n x : input array\n\n Returns:\n An array.\n\n See also:\n :func:`sigmoid`\n """"""\n return 0.5 * jnp.clip(x + 1.0, 0.0, 2.0)\n\n@api.jit\ndef silu(x: ArrayLike) -> Array:\n r""""""SiLU (aka swish) activation function.\n\n Computes the element-wise function:\n\n .. math::\n \mathrm{silu}(x) = x \cdot \mathrm{sigmoid}(x) = \frac{x}{1 + e^{-x}}\n\n :func:`swish` and :func:`silu` are both aliases for the same function.\n\n Args:\n x : input array\n\n Returns:\n An array.\n\n See also:\n :func:`sigmoid`\n """"""\n numpy_util.check_arraylike(""silu"", x)\n x_arr = jnp.asarray(x)\n return x_arr * sigmoid(x_arr)\n\nswish = silu\n\n@api.jit\ndef mish(x: ArrayLike) -> Array:\n r""""""Mish activation function.\n\n Computes the element-wise function:\n\n .. math::\n \mathrm{mish}(x) = x \cdot \mathrm{tanh}(\mathrm{softplus}(x))\n\n For more information, see\n `Mish: A Self Regularized Non-Monotonic Activation Function\n `_.\n\n Args:\n x : input array\n\n Returns:\n An array.\n """"""\n numpy_util.check_arraylike(""mish"", x)\n x_arr = jnp.asarray(x)\n return x_arr * jnp.tanh(softplus(x_arr))\n\n@api.jit\ndef log_sigmoid(x: ArrayLike) -> Array:\n r""""""Log-sigmoid activation function.\n\n Computes the element-wise function:\n\n .. math::\n \mathrm{log\_sigmoid}(x) = \log(\mathrm{sigmoid}(x)) = -\log(1 + e^{-x})\n\n Args:\n x : input array\n\n Returns:\n An array.\n\n See also:\n :func:`sigmoid`\n """"""\n numpy_util.check_arraylike(""log_sigmoid"", x)\n x_arr = jnp.asarray(x)\n return -softplus(-x_arr)\n\n@api.jit\ndef elu(x: ArrayLike, alpha: ArrayLike = 1.0) -> Array:\n r""""""Exponential linear unit activation function.\n\n Computes the element-wise function:\n\n .. math::\n \mathrm{elu}(x) = \begin{cases}\n x, & x > 0\\\n \alpha \left(\exp(x) - 1\right), & x \le 0\n \end{cases}\n\n Args:\n x : input array\n alpha : scalar or array of alpha values (default: 1.0)\n\n Returns:\n An array.\n\n See also:\n :func:`selu`\n """"""\n numpy_util.check_arraylike(""elu"", x)\n x_arr = jnp.asarray(x)\n return jnp.where(x_arr > 0,\n x_arr,\n alpha * jnp.expm1(jnp.where(x_arr > 0, 0., x_arr)))\n\n@api.jit\ndef leaky_relu(x: ArrayLike, negative_slope: ArrayLike = 1e-2) -> Array:\n r""""""Leaky rectified linear unit activation function.\n\n Computes the element-wise function:\n\n .. math::\n \mathrm{leaky\_relu}(x) = \begin{cases}\n x, & x \ge 0\\\n \alpha x, & x < 0\n \end{cases}\n\n where :math:`\alpha` = :code:`negative_slope`.\n\n Args:\n x : input array\n negative_slope : array or scalar specifying the negative slope (default: 0.01)\n\n Returns:\n An array.\n\n See also:\n :func:`relu`\n """"""\n numpy_util.check_arraylike(""leaky_relu"", x)\n x_arr = jnp.asarray(x)\n return jnp.where(x_arr >= 0, x_arr, negative_slope * x_arr)\n\n@api.jit\ndef hard_tanh(x: ArrayLike) -> Array:\n r""""""Hard :math:`\mathrm{tanh}` activation function.\n\n Computes the element-wise function:\n\n .. math::\n \mathrm{hard\_tanh}(x) = \begin{cases}\n -1, & x < -1\\\n x, & -1 \le x \le 1\\\n 1, & 1 < x\n \end{cases}\n\n Args:\n x : input array\n\n Returns:\n An array.\n """"""\n numpy_util.check_arraylike(""hard_tanh"", x)\n x_arr = jnp.asarray(x)\n return jnp.where(x_arr > 1, 1, jnp.where(x_arr < -1, -1, x_arr))\n\n@api.jit\ndef celu(x: ArrayLike, alpha: ArrayLike = 1.0) -> Array:\n r""""""Continuously-differentiable exponential linear unit activation.\n\n Computes the element-wise function:\n\n .. math::\n \mathrm{celu}(x) = \begin{cases}\n x, & x > 0\\\n \alpha \left(\exp(\frac{x}{\alpha}) - 1\right), & x \le 0\n \end{cases}\n\n For more information, see\n `Continuously Differentiable Exponential Linear Units\n `_.\n\n Args:\n x : input array\n alpha : array or scalar (default: 1.0)\n\n Returns:\n An array.\n """"""\n return jnp.maximum(x, 0.0) + alpha * jnp.expm1(jnp.minimum(x, 0.0) / alpha)\n\n@api.jit\ndef selu(x: ArrayLike) -> Array:\n r""""""Scaled exponential linear unit activation.\n\n Computes the element-wise function:\n\n .. math::\n \mathrm{selu}(x) = \lambda \begin{cases}\n x, & x > 0\\\n \alpha e^x - \alpha, & x \le 0\n \end{cases}\n\n where :math:`\lambda = 1.0507009873554804934193349852946` and\n :math:`\alpha = 1.6732632423543772848170429916717`.\n\n For more information, see\n `Self-Normalizing Neural Networks\n `_.\n\n Args:\n x : input array\n\n Returns:\n An array.\n\n See also:\n :func:`elu`\n """"""\n alpha = 1.6732632423543772848170429916717\n scale = 1.0507009873554804934193349852946\n return scale * elu(x, alpha)\n\n# TODO(phawkins): this jit was found to change numerics in a test. Debug this.\n# @partial(api.jit, static_argnames=(""approximate"",))\ndef gelu(x: ArrayLike, approximate: bool = True) -> Array:\n r""""""Gaussian error linear unit activation function.\n\n If ``approximate=False``, computes the element-wise function:\n\n .. math::\n \mathrm{gelu}(x) = \frac{x}{2} \left(\mathrm{erfc} \left(\n \frac{-x}{\sqrt{2}} \right) \right)\n\n If ``approximate=True``, uses the approximate formulation of GELU:\n\n .. math::\n \mathrm{gelu}(x) = \frac{x}{2} \left(1 + \mathrm{tanh} \left(\n \sqrt{\frac{2}{\pi}} \left(x + 0.044715 x^3 \right) \right) \right)\n\n For more information, see `Gaussian Error Linear Units (GELUs)\n `_, section 2.\n\n Args:\n x: input array\n approximate: whether to use the approximate or exact formulation.\n """"""\n [x_arr] = numpy_util.promote_args_inexact(""gelu"", x)\n\n if approximate:\n sqrt_2_over_pi = np.sqrt(2 / np.pi).astype(x_arr.dtype)\n cdf = 0.5 * (1.0 + jnp.tanh(sqrt_2_over_pi * (x_arr + 0.044715 * (x_arr ** 3))))\n return x_arr * cdf\n else:\n sqrt_half = np.sqrt(0.5).astype(x_arr.dtype)\n return jnp.array(\n 0.5 * x_arr * (lax.erfc(-x_arr * sqrt_half)), dtype=x_arr.dtype\n )\n\n@partial(api.jit, static_argnames=(""axis"",))\ndef glu(x: ArrayLike, axis: int = -1) -> Array:\n r""""""Gated linear unit activation function.\n\n Computes the function:\n\n .. math::\n \mathrm{glu}(x) = x\left[\ldots, 0:\frac{n}{2}, \ldots\right] \cdot\n \mathrm{sigmoid} \left( x\left[\ldots, \frac{n}{2}:n, \ldots\right]\n \right)\n\n where the array is split into two along ``axis``. The size of the ``axis``\n dimension must be divisible by two.\n\n Args:\n x : input array\n axis: the axis along which the split should be computed (default: -1)\n\n Returns:\n An array.\n\n See also:\n :func:`sigmoid`\n """"""\n numpy_util.check_arraylike(""glu"", x)\n x_arr = jnp.asarray(x)\n size = x_arr.shape[axis]\n assert size % 2 == 0, ""axis size must be divisible by 2""\n x1, x2 = jnp.split(x_arr, 2, axis)\n return x1 * sigmoid(x2)\n\n# other functions\n\nlogsumexp = _logsumexp\n\n\n@partial(api.jit, static_argnames=(""axis"", ""keepdims""))\ndef logmeanexp(\n x: ArrayLike,\n axis: Axis = None,\n where: ArrayLike | None = None,\n keepdims: bool = False,\n) -> Array:\n r""""""Log mean exp.\n\n Computes the function:\n\n .. math::\n \text{logmeanexp}(x) = \log \frac{1}{n} \sum_{i=1}^n \exp x_i = \text{logsumexp}(x) - \log n\n\n Args:\n x: Input array.\n axis: Axis or axes along which to reduce.\n where: Elements to include in the reduction. Optional.\n keepdims: Preserve the dimensions of the input.\n Returns:\n An array.\n See also:\n :func:`jax.nn.logsumexp`\n """"""\n lse = _logsumexp(x, axis=axis, where=where, keepdims=keepdims)\n count = _count(x, axis=axis, where=where, keepdims=keepdims, dtype=lse.dtype)\n return lse - jnp.log(count)\n\n\n@partial(api.jit, static_argnames=(""axis"",))\ndef log_softmax(x: ArrayLike,\n axis: Axis = -1,\n where: ArrayLike | None = None) -> Array:\n r""""""Log-Softmax function.\n\n Computes the logarithm of the :code:`softmax` function, which rescales\n elements to the range :math:`[-\infty, 0)`.\n\n .. math ::\n \mathrm{log\_softmax}(x)_i = \log \left( \frac{\exp(x_i)}{\sum_j \exp(x_j)}\n \right)\n\n Args:\n x : input array\n axis: the axis or axes along which the :code:`log_softmax` should be\n computed. Either an integer or a tuple of integers.\n where: Elements to include in the :code:`log_softmax`. The output for any\n masked-out element is minus infinity.\n\n Returns:\n An array.\n\n Note:\n If any input values are ``+inf``, the result will be all ``NaN``: this reflects the\n fact that ``inf / inf`` is not well-defined in the context of floating-point math.\n\n See also:\n :func:`softmax`\n """"""\n numpy_util.check_arraylike(""log_softmax"", x)\n x_arr = jnp.asarray(x)\n x_max = jnp.max(x_arr, axis, where=where, initial=-np.inf, keepdims=True)\n x_safe = x_arr if where is None else jnp.where(where, x_arr, -np.inf)\n shifted = x_safe - lax.stop_gradient(x_max)\n shifted_logsumexp = jnp.log(\n jnp.sum(jnp.exp(shifted), axis, where=where, keepdims=True))\n result = shifted - shifted_logsumexp\n if where is not None:\n return jnp.where(where, result, -np.inf)\n return result\n\n\n# TODO(phawkins): this jit was found to change numerics in a test. Debug this.\n# @partial(api.jit, static_argnames=(""axis"",))\ndef softmax(x: ArrayLike,\n axis: Axis = -1,\n where: ArrayLike | None = None) -> Array:\n r""""""Softmax function.\n\n Computes the function which rescales elements to the range :math:`[0, 1]`\n such that the elements along :code:`axis` sum to :math:`1`.\n\n .. math ::\n \mathrm{softmax}(x) = \frac{\exp(x_i)}{\sum_j \exp(x_j)}\n\n Args:\n x : input array\n axis: the axis or axes along which the softmax should be computed. The\n softmax output summed across these dimensions should sum to :math:`1`.\n Either an integer or a tuple of integers.\n where: Elements to include in the :code:`softmax`. The output for any\n masked-out element is zero.\n\n Returns:\n An array.\n\n Note:\n If any input values are ``+inf``, the result will be all ``NaN``: this reflects the\n fact that ``inf / inf`` is not well-defined in the context of floating-point math.\n\n See also:\n :func:`log_softmax`\n """"""\n if config.softmax_custom_jvp.value:\n # mypy is confused by the `functools.partial` application in the definition\n # of `_softmax` and incorrectly concludes that `_softmax` returns\n # `ReturnValue` -- the unsubstituted type parameter of `custom_jvp`.\n return _softmax(x, axis, where)\n else:\n return _softmax_deprecated(x, axis, where)\n\n# TODO(mattjj): replace softmax with _softmax when deprecation flag is removed\n@partial(custom_derivatives.custom_jvp, nondiff_argnums=(1,))\ndef _softmax(\n x: ArrayLike,\n axis: Axis = -1,\n where: ArrayLike | None = None,\n initial: ArrayLike = -np.inf) -> Array:\n x_max = jnp.max(x, axis, where=where, initial=initial, keepdims=True)\n x_safe = x if where is None else jnp.where(where, x, initial)\n unnormalized = jnp.exp(x_safe - x_max)\n result = unnormalized / jnp.sum(unnormalized, axis, where=where, keepdims=True)\n if where is not None:\n result = jnp.where(where, result, 0)\n return result\n\n@_softmax.defjvp\ndef _softmax_jvp(axis, primals, tangents):\n (x, where, initial), (x_dot, _, _) = primals, tangents\n y = _softmax(x, axis, where, initial)\n return y, y * (x_dot - (y * x_dot).sum(axis, where=where, keepdims=True))\n\ndef _softmax_deprecated(\n x: ArrayLike,\n axis: Axis = -1,\n where: ArrayLike | None = None,\n initial: ArrayLike = -np.inf) -> Array:\n x_max = jnp.max(x, axis, where=where, initial=initial, keepdims=True)\n x_safe = x if where is None else jnp.where(where, x, initial)\n unnormalized = jnp.exp(x_safe - lax.stop_gradient(x_max))\n result = unnormalized / jnp.sum(unnormalized, axis, where=where, keepdims=True)\n if where is not None:\n result = jnp.where(where, result, 0)\n return result\n\n\n@partial(api.jit, static_argnames=(""axis"",))\ndef standardize(x: ArrayLike,\n axis: Axis = -1,\n mean: ArrayLike | None = None,\n variance: ArrayLike | None = None,\n epsilon: ArrayLike = 1e-5,\n where: ArrayLike | None = None) -> Array:\n r""""""Standardizes input to zero mean and unit variance.\n\n The standardization is given by:\n\n .. math::\n\n x_{std} = \frac{x - \langle x\rangle}{\sqrt{\langle(x - \langle x\rangle)^2\rangle + \epsilon}}\n\n where :math:`\langle x\rangle` indicates the mean of :math:`x`, and :math:`\epsilon` is\n a small correction factor introduced to avoid division by zero.\n\n Args:\n x: input array to be standardized.\n axis: integer or tuple of integers representing the axes along which\n to standardize. Defaults to the last axis (``-1``).\n mean: optionally specify the mean used for standardization. If not specified,\n then ``x.mean(axis, where=where)`` will be used.\n variance: optionally specify the variance used for standardization. If not\n specified, then ``x.var(axis, where=where)`` will be used.\n epsilon: correction factor added to variance to avoid division by zero; defaults\n to ``1E-5``.\n where: optional boolean mask specifying which elements to use when computing\n the mean and variance.\n\n Returns:\n An array of the same shape as ``x`` containing the standardized input.\n """"""\n numpy_util.check_arraylike(""standardize"", x)\n numpy_util.check_arraylike_or_none(""standardize"", mean, variance, where)\n if mean is None:\n mean = jnp.mean(x, axis, keepdims=True, where=where)\n if variance is None:\n # this definition is traditionally seen as less accurate than jnp.var's\n # mean((x - mean(x))**2) but may be faster and even, given typical\n # activation distributions and low-precision arithmetic, more accurate\n # when used in neural network normalization layers\n variance = jnp.mean(\n jnp.square(x), axis, keepdims=True, where=where) - jnp.square(mean)\n return jnp.subtract(x, jnp.asarray(mean)) * lax.rsqrt(jnp.asarray(variance) + epsilon)\n\n# TODO(slebedev): Change the type of `x` to `ArrayLike`.\n@partial(api.jit, static_argnames=(""num_classes"", ""dtype"", ""axis""))\ndef _one_hot(x: Array, num_classes: int, *,\n dtype: DTypeLike, axis: int | AxisName) -> Array:\n num_classes = core.concrete_dim_or_error(\n num_classes,\n ""The error arose in jax.nn.one_hot argument `num_classes`."")\n try:\n output_pos_axis = util.canonicalize_axis(axis, x.ndim + 1) # type: ignore[arg-type]\n except TypeError:\n axis_size = lax.axis_size(axis)\n if num_classes != axis_size:\n raise ValueError(f""Expected num_classes to match the size of axis {axis}, ""\n f""but {num_classes} != {axis_size}"") from None\n axis_idx = lax.axis_index(axis)\n return jnp.asarray(_dot_product_attention_xla == axis_idx, dtype=dtype)\n axis = operator.index(axis) # type: ignore[arg-type]\n lhs = lax.expand_dims(x, (axis,))\n rhs_shape = [1] * x.ndim\n rhs_shape.insert(output_pos_axis, num_classes)\n # TODO(yashkatariya): Maybe expose `out_sharding` on `one_hot` too?\n rhs_sharding = NamedSharding(x.aval.sharding.mesh, P(*[None] * len(rhs_shape))) # pytype: disable=attribute-error\n rhs = lax.broadcasted_iota(x.dtype, rhs_shape, output_pos_axis,\n out_sharding=rhs_sharding)\n return (lhs == rhs).astype(dtype)\n\n# TODO(slebedev): Change the type of `x` to `ArrayLike`.\ndef one_hot(x: Any, num_classes: int, *,\n dtype: Any | None = None, axis: int | AxisName = -1) -> Array:\n """"""One-hot encodes the given indices.\n\n Each index in the input ``x`` is encoded as a vector of zeros of length\n ``num_classes`` with the element at ``index`` set to one::\n\n >>> jax.nn.one_hot(jnp.array([0, 1, 2]), 3)\n Array([[1., 0., 0.],\n [0., 1., 0.],\n [0., 0., 1.]], dtype=float32)\n\n Indices outside the range [0, num_classes) will be encoded as zeros::\n\n >>> jax.nn.one_hot(jnp.array([-1, 3]), 3)\n Array([[0., 0., 0.],\n [0., 0., 0.]], dtype=float32)\n\n Args:\n x: A tensor of indices.\n num_classes: Number of classes in the one-hot dimension.\n dtype: optional, a float dtype for the returned values (default :obj:`jnp.float_`).\n axis: the axis or axes along which the function should be\n computed.\n """"""\n num_classes = core.concrete_dim_or_error(\n num_classes,\n ""The error arose in jax.nn.one_hot argument `num_classes`."")\n x_arr = jnp.asarray(x)\n if not dtypes.isdtype(x_arr.dtype, ""integral""):\n # Deprecated 2024-12-18\n deprecations.warn(\n 'jax-nn-one-hot-float-input',\n f""jax.nn.one_hot input should be integer-typed; got dtype={x_arr.dtype}"",\n stacklevel=1)\n dtype = dtypes.default_float_dtype() if dtype is None else dtype\n return _one_hot(x_arr, num_classes, dtype=dtype, axis=axis)\n\n\n@custom_derivatives.custom_jvp\n@api.jit\ndef relu6(x: ArrayLike) -> Array:\n r""""""Rectified Linear Unit 6 activation function.\n\n Computes the element-wise function\n\n .. math::\n \mathrm{relu6}(x) = \min(\max(x, 0), 6)\n\n except under differentiation, we take:\n\n .. math::\n \nabla \mathrm{relu}(0) = 0\n\n and\n\n .. math::\n \nabla \mathrm{relu}(6) = 0\n\n Args:\n x : input array\n\n Returns:\n An array.\n\n See also:\n :func:`relu`\n """"""\n return jnp.minimum(jnp.maximum(x, 0), 6.)\nrelu6.defjvps(lambda g, ans, x:\n lax.select((x > 0) & (x < 6), g, lax.full_like(g, 0)))\n\n@api.jit\ndef hard_sigmoid(x: ArrayLike) -> Array:\n r""""""Hard Sigmoid activation function.\n\n Computes the element-wise function\n\n .. math::\n \mathrm{hard\_sigmoid}(x) = \frac{\mathrm{relu6}(x + 3)}{6}\n\n Args:\n x : input array\n\n Returns:\n An array.\n\n See also:\n :func:`relu6`\n """"""\n return relu6(x + 3.) / 6.\n\n@api.jit\ndef hard_silu(x: ArrayLike) -> Array:\n r""""""Hard SiLU (swish) activation function\n\n Computes the element-wise function\n\n .. math::\n \mathrm{hard\_silu}(x) = x \cdot \mathrm{hard\_sigmoid}(x)\n\n Both :func:`hard_silu` and :func:`hard_swish` are aliases for the same\n function.\n\n Args:\n x : input array\n\n Returns:\n An array.\n\n See also:\n :func:`hard_sigmoid`\n """"""\n numpy_util.check_arraylike(""hard_silu"", x)\n x_arr = jnp.asarray(x)\n return x_arr * hard_sigmoid(x_arr)\n\nhard_swish = hard_silu\n\ndef _get_large_negative(dtype):\n dtype_max = dtypes.finfo(dtype).max\n return jnp.asarray(-0.7 * dtype_max, dtype=dtype)\n\ndef _get_causal_mask(T, S):\n mask = jnp.tril(jnp.ones((T, S), dtype=bool))\n return mask[None, None, :, :]\n\ndef _get_window_mask(T: int, S: int, local_window_size: tuple[int, int]):\n query_pos = jnp.array(range(T))\n key_pos = jnp.array(range(S))\n left_window, right_window = local_window_size\n left_mask = query_pos[..., None] <= key_pos[..., None, :] + left_window\n right_mask = query_pos[..., None] >= key_pos[..., None, :] - right_window\n return jnp.logical_and(right_mask, left_mask)[None, None, :, :]\n\ndef _get_padding_mask_logits(T, S, q_seqlen, kv_seqlen):\n q_mask = True\n kv_mask = True\n if q_seqlen is not None:\n q_indices = jnp.arange(0, T)[None, :, None]\n q_mask = q_indices < q_seqlen[:, None, None]\n if kv_seqlen is not None:\n kv_indices = jnp.arange(0, S)[None, None, :]\n kv_mask = kv_indices < kv_seqlen[:, None, None]\n mask = jnp.logical_and(q_mask, kv_mask)\n return mask[:, None, :, :]\n\ndef _get_padding_mask_encoded(T, q_seqlen):\n q_indices = jnp.arange(0, T)[None, :]\n mask = q_indices < q_seqlen[:, None]\n return mask[:, :, None, None]\n\ndef _apply_masks(logits, mask, is_causal, q_seqlen, kv_seqlen,\n local_window_size):\n if mask is None and not is_causal and q_seqlen is None and kv_seqlen is None:\n return logits\n\n combined_mask = jnp.ones_like(logits, dtype=bool)\n if mask is not None:\n assert mask.dtype == np.dtype(bool)\n combined_mask = jnp.logical_and(combined_mask, mask)\n\n T, S = logits.shape[2], logits.shape[3]\n\n if is_causal:\n mask = _get_causal_mask(T, S)\n combined_mask = jnp.logical_and(combined_mask, mask)\n\n if local_window_size is not None:\n mask = _get_window_mask(T, S, local_window_size)\n combined_mask = jnp.logical_and(combined_mask, mask)\n\n if q_seqlen is not None or kv_seqlen is not None:\n mask = _get_padding_mask_logits(T, S, q_seqlen, kv_seqlen)\n combined_mask = jnp.logical_and(combined_mask, mask)\n\n large_negative_number = _get_large_negative(logits.dtype)\n padded_logits = jnp.where(combined_mask, logits, large_negative_number)\n return padded_logits\n\ndef _dot_product_attention_core(query, key, value, bias, mask, is_causal,\n scale, q_seqlen, kv_seqlen, local_window_size):\n logits_dtype = jnp.promote_types(query.dtype, np.float32)\n\n # If the query and logits dtypes are different, then the default precision\n # can use inconsistent types in the backwards pass\n # (see https://github.com/jax-ml/jax/issues/24047).\n if query.dtype == dtypes.bfloat16:\n precision = lax.DotAlgorithmPreset.BF16_BF16_F32\n elif query.dtype == np.float16:\n precision = lax.DotAlgorithmPreset.F16_F16_F32\n # TODO(sbodenstein): Implement this fix for all dtypes.\n else:\n precision = None\n\n # Explicit precision will fail on platforms that don't support it. For example,\n # some GPUs do not support BF16_BF16_F32, and TPU does not support F16_F16_F32.\n # Use the default precision as a fallback in these cases.\n try:\n logits = jnp_einsum.einsum(\n ""BTNH,BSNH->BNTS"",\n query,\n key,\n precision=precision,\n preferred_element_type=logits_dtype,\n )\n except: # pylint: disable=bare-except\n logits = jnp_einsum.einsum(\n ""BTNH,BSNH->BNTS"",\n query,\n key,\n precision=None,\n preferred_element_type=logits_dtype,\n )\n\n logits *= jnp.array(scale, dtype=logits.dtype)\n\n if bias is not None:\n logits = (logits + bias).astype(logits.dtype)\n\n padded_logits = _apply_masks(logits, mask, is_causal, q_seqlen, kv_seqlen,\n local_window_size)\n\n # Softmax and it is always carried out in fp32.\n padded_logits = padded_logits.astype(np.float32)\n probs = softmax(padded_logits, axis=-1).astype(key.dtype)\n\n encoded = jnp_einsum.einsum('BNTS,BSNH->BTNH', probs, value)\n if q_seqlen is not None:\n mask = _get_padding_mask_encoded(encoded.shape[1], q_seqlen)\n encoded *= mask.astype(encoded.dtype)\n return encoded\n\ndef _dot_product_attention_xla(\n query: Array,\n key: Array,\n value: Array,\n bias: Array | None,\n mask: Array | None,\n is_causal: bool,\n scale: float,\n q_seqlen: Array | None,\n kv_seqlen: Array | None,\n local_window_size: tuple[int, int] | None):\n\n B, T, N, H = query.shape\n _, S, K, _ = key.shape\n G = N // K\n\n query = jnp.reshape(query, (B, T, K, G, H))\n def _reshape_to_grouped(t):\n if t is not None:\n tB, tN, tT, tS = t.shape\n if tN == 1:\n t = jnp.broadcast_to(t[:, :, None, :, :], (tB, tN, G, tT, tS))\n else:\n assert tN == N\n t = jnp.reshape(t, (tB, K, G, tT, tS))\n return t\n bias = _reshape_to_grouped(bias)\n mask = _reshape_to_grouped(mask)\n vmapped_fn = api.vmap(\n _dot_product_attention_core,\n in_axes=(3, None, None, 2, 2, None, None, None, None, None),\n out_axes=3,\n )\n encoded = vmapped_fn(query, key, value, bias, mask, is_causal, scale,\n q_seqlen, kv_seqlen, local_window_size)\n encoded = jnp.reshape(encoded, (B, T, N, H))\n return encoded\n\ndef bias_fwd_rule(a, query_head_num):\n return bias_fwd_p.bind(a, query_head_num), a\ndef bias_bwd_rule(query_head_num, res, g):\n a = res\n if a.shape[0] > 1 or a.shape[-3] != query_head_num:\n raise ValueError(""cuDNN only supports bias gradient when the batch size is ""\n f""1 and the head number matches the query, but got ""\n f""B={a.shape[0]}, N={a.shape[-3]}."")\n return (bias_bwd_p.bind(g, a, query_head_num),)\n\n# This function uses two custom primitives, `bias_fwd` and `bias_bwd`, to work\n# around a cuDNN issue where bias gradients are only supported when the batch\n# size is 1 and the number of heads matches the query.\n# TODO(kaixih@nvidia): Remove this workaround once cuDNN resolves the issue.\n@partial(custom_derivatives.custom_vjp, nondiff_argnums=(1,))\ndef check_valid_bias_batch(x, query_head_num):\n output, _ = bias_fwd_rule(x, query_head_num)\n return output\ncheck_valid_bias_batch.defvjp(bias_fwd_rule, bias_bwd_rule)\n\nbias_fwd_p = core.Primitive('bias_fwd')\nbias_fwd_p.multiple_results = False\nbias_bwd_p = core.Primitive('bias_bwd')\nbias_bwd_p.multiple_results = False\n\ndef bias_fwd_impl(a, query_head_num):\n return a\ndef bias_bwd_impl(g, a, query_head_num):\n return g\nbias_fwd_p.def_impl(bias_fwd_impl)\nbias_bwd_p.def_impl(bias_bwd_impl)\n\ndef bias_fwd_abstract_eval(a, query_head_num):\n return core.ShapedArray(a.shape, a.dtype)\ndef bias_bwd_abstract_eval(g, a, query_head_num):\n return core.ShapedArray(g.shape, g.dtype)\nbias_fwd_p.def_abstract_eval(bias_fwd_abstract_eval)\nbias_bwd_p.def_abstract_eval(bias_bwd_abstract_eval)\n\ndef bias_fwd_lowering(ctx, a, query_head_num):\n return [a]\ndef bias_bwd_lowering(ctx, g, a, query_head_num):\n return [g]\nmlir.register_lowering(bias_fwd_p, bias_fwd_lowering)\nmlir.register_lowering(bias_bwd_p, bias_bwd_lowering)\n\ndef bias_fwd_batch_rule(batched_args, batch_dims):\n x, query_head_num = batched_args\n a = batch_dims[0]\n output, _ = bias_fwd_rule(x, query_head_num)\n return output, a\ndef bias_bwd_batch_rule(batched_args, batch_dims):\n g, x, query_head_num = batched_args\n b = batch_dims[0]\n *Bs, _, _, _ = x.shape\n B = math.prod(Bs)\n x = jnp.reshape(x, (B,) + x.shape[-3:])\n output, = bias_bwd_rule(query_head_num, x, g)\n return output, b\nbatching.primitive_batchers[bias_fwd_p] = bias_fwd_batch_rule\nbatching.primitive_batchers[bias_bwd_p] = bias_bwd_batch_rule\n\ndef dot_product_attention(\n query: ArrayLike,\n key: ArrayLike,\n value: ArrayLike,\n bias: ArrayLike | None = None,\n mask: ArrayLike | None = None,\n *,\n scale: float | None = None,\n is_causal: bool = False,\n query_seq_lengths: ArrayLike | None = None,\n key_value_seq_lengths: ArrayLike | None = None,\n local_window_size: int | tuple[int, int] | None = None,\n implementation: Literal['xla', 'cudnn'] | None = None) -> Array:\n r""""""Scaled dot product attention function.\n\n Computes the attention function on Query, Key, and Value tensors:\n\n .. math::\n\n \mathrm{Attention}(Q, K, V)=\mathrm{softmax}(\frac{QK^T}{\sqrt{d_k}})V\n\n If we define :code:`logits` as the output of :math:`QK^T` and the\n :code:`probs` as the output of :math:`softmax`.\n\n Throughout this function, we utilize the following uppercase letters to\n represent the shape of array::\n\n B = batch size\n S = length of the key/value (source)\n T = length of the query (target)\n N = number of attention heads\n H = dimensions of each attention head\n K = number of key/value heads\n G = number of groups, which equals to N // K\n\n Args:\n query: query array; shape :code:`(BTNH|TNH)`\n key: key array: shape :code:`(BSKH|SKH)`. When `K` equals `N`, multi-headed\n attention (MHA https://arxiv.org/abs/1706.03762) is performed. Otherwise,\n grouped query attention (GQA https://arxiv.org/abs/2305.13245) is\n performed if `N` is a multiple of `K`, and multi-query attention (MQA\n https://arxiv.org/abs/1911.02150) is performed if `K == 1` (a special case\n of GQA).\n value: value array, should have the same shape as the `key` array.\n bias: optional, bias array to be added to logits; The shape must be 4D and\n be broadcastable to :code:`(BNTS|NTS)`.\n mask: optional, mask array used to filter out logits. It is a boolean mask\n where `True` indicates the element should take part in attention. For an\n additive mask, users should pass it to `bias`. The shape must be 4D and be\n broadcastable to :code:`(BNTS|NTS)`.\n scale: scale for the logits. If None, the scale will be set to 1 divided by\n the square root of query's head dimension (i.e. H).\n is_causal: If true, causal attention will be applied. Note, some\n implementations like `xla` will generate a mask tensor and apply it to the\n logits to mask out the non-causal parts of the attention matrix, but other\n implementations like `cudnn` will avoid computing the non-causal regions,\n providing speedups.\n query_seq_lengths: `int32` array of sequence lengths for query; shape\n :code:`(B)`\n key_value_seq_lengths: `int32` array of sequence lengths for key and value;\n shape :code:`(B)`\n local_window_size: Window sizes to make self attention to attend to each\n token's local window. If set, this specifies the (left_window_size,\n right_window_size) for each token. E.g., if local_window_size == (3, 2)\n and the sequence is [0, 1, 2, 3, 4, 5, c, 7, 8, 9], token `c` can attend\n to [3, 4, 5, c, 7, 8]. If a single int is given, it will be interpreted as\n a symmetric window (window_size, window_size).\n implementation: A string to control which implementation backend to use.\n Supported strings are `xla`, `cudnn` (cuDNN flash attention). It defaults\n to `None`, which will automatically select the best available backend.\n Note, `cudnn` supports only a subset of shapes/dtypes, and an exception\n will be thrown if its not supported.\n\n Returns:\n An array of the attention output with the same shape as :code:`query`.\n """"""\n output_shape = jnp.asarray(query).shape\n def _ensure_4d(t):\n t = jnp.asarray(t)\n dims_to_add = 4 - t.ndim\n if dims_to_add > 0:\n return jnp.expand_dims(t, axis=tuple(range(dims_to_add)))\n return t\n\n query_arr = _ensure_4d(query)\n key_arr = _ensure_4d(key)\n value_arr = _ensure_4d(value)\n bias = _ensure_4d(bias) if bias is not None else None\n mask = _ensure_4d(mask) if mask is not None else None\n if query_seq_lengths is not None:\n query_seq_lengths = jnp.asarray(query_seq_lengths)\n if key_value_seq_lengths is not None:\n key_value_seq_lengths = jnp.asarray(key_value_seq_lengths)\n if isinstance(local_window_size, int):\n local_window_size = (local_window_size, local_window_size)\n\n def _check_shape_and_dtype(t: Array | None, shape: Sequence[int],\n dtype: DType | None, name: str) -> None:\n if t is None:\n return\n if t.ndim != len(shape):\n raise ValueError(f""{name} ndim should be {len(shape)}, but got {t.ndim}"")\n if dtype is not None and t.dtype != dtype:\n raise ValueError(f""{name} dtype should be {dtype}, but got {t.dtype}"")\n for i in range(t.ndim):\n if shape[i] != -1 and t.shape[i] != shape[i]:\n raise ValueError(f""{name} shape should be {shape}: but got {t.shape}"")\n\n B, S, K, H = key_arr.shape\n _check_shape_and_dtype(value_arr, [B, S, K, H], key_arr.dtype, 'value')\n _check_shape_and_dtype(query_arr, [B, -1, -1, H], key_arr.dtype, 'query')\n _check_shape_and_dtype(mask, [-1] * 4, np.dtype(bool), 'mask')\n _check_shape_and_dtype(bias, [-1] * 4, None, 'bias')\n _check_shape_and_dtype(query_seq_lengths, [B], np.dtype('int32'),\n 'query_seq_lengths')\n _check_shape_and_dtype(key_value_seq_lengths, [B], np.dtype('int32'),\n 'key_value_seq_lengths')\n if query_arr.shape[-2] % K != 0:\n raise ValueError(f""The number of query heads must be a multiple of ""\n f""key/value heads, but got {query_arr.shape[-2]} vs {K}"")\n\n scale_val = (1.0 / np.sqrt(H)) if scale is None else scale\n\n match implementation:\n case 'xla':\n out = _dot_product_attention_xla(\n query_arr, key_arr, value_arr, bias, mask, is_causal=is_causal,\n scale=scale_val, q_seqlen=query_seq_lengths,\n kv_seqlen=key_value_seq_lengths,\n local_window_size=local_window_size,\n )\n case 'cudnn':\n if bias is not None:\n bias = check_valid_bias_batch(bias, query_arr.shape[-2])\n bias = jnp.asarray(bias)\n use_padding = (\n query_seq_lengths is not None or key_value_seq_lengths is not None\n )\n if use_padding:\n if query_seq_lengths is None:\n T = query_arr.shape[1]\n query_seq_lengths = jnp.full((B,), T, dtype=np.int32)\n if key_value_seq_lengths is None:\n key_value_seq_lengths = jnp.full((B,), S, dtype=np.int32)\n\n mask_type = MaskType.NO_MASK\n if use_padding and is_causal:\n mask_type = MaskType.PADDING_CAUSAL\n elif is_causal:\n mask_type = MaskType.CAUSAL\n elif use_padding:\n mask_type = MaskType.PADDING\n # CuDNN supports only the left window with an exclusive boundary when\n # causal mask is enabled.\n sliding_window = None\n if local_window_size is not None:\n l_window, r_window = local_window_size\n if r_window == 0 or mask_type == MaskType.CAUSAL:\n sliding_window = l_window + 1\n else:\n raise ValueError(f""cuDNN doesn't support right window: {r_window} ""\n ""when causal mask is not used."")\n\n out = cudnn_dot_product_attention(\n query_arr, key_arr, value_arr, bias, mask, query_seq_lengths,\n key_value_seq_lengths, scale=scale_val, mask_type=mask_type,\n sliding_window_length=sliding_window,\n )\n case None:\n # TODO(kaixih@nvidia) Defaults to XLA for now. Will automatically select\n # best backend.\n out = _dot_product_attention_xla(\n query_arr, key_arr, value_arr, bias, mask, is_causal=is_causal,\n scale=scale_val, q_seqlen=query_seq_lengths,\n kv_seqlen=key_value_seq_lengths,\n local_window_size=local_window_size,\n )\n case _:\n raise ValueError(f""Unsupported implementation option: {implementation}"")\n\n return jnp.reshape(out, output_shape)\n\ndef scaled_matmul(\n lhs: Array,\n rhs: Array,\n lhs_scales: Array,\n rhs_scales: Array,\n preferred_element_type: DTypeLike = np.float32,\n) -> Array:\n r""""""Scaled matrix multiplication function.\n\n Performs block-scaled matmul of `a` and `b` using `a_scales` and `b_scales`.\n The last dim is the contracting dim, and block size is inferred.\n\n Mathematically, this operation is equivalent to::\n\n a_block_size = a.shape[-1] // a_scales.shape[-1]\n b_block_size = b.shape[-1] // b_scales.shape[-1]\n a_scaled = a * jnp.repeat(a_scales, a_block_size, axis=-1)\n b_scaled = b * jnp.repeat(b_scales, b_block_size, axis=-1)\n jnp.einsum('BMK,BNK->BMN', a_scaled, b_scaled)\n\n Args:\n lhs (Array): Operand a, shape (B, M, K).\n rhs (Array): Operand b, shape (B, N, K).\n lhs_scales (Array): Shape (B, M, K_a), where `K % K_a == 0`.\n rhs_scales (Array): Shape (B, N, K_b), where `K % K_b == 0`.\n preferred_element_type (DTypeLike, optional): Defaults to `jnp.float32`.\n\n Returns:\n Array of shape (B, M, N).\n\n Notes:\n - We currently do not support user-defined `precision` for customizing the\n compute data type. It is fixed to `jnp.float32`.\n - Block size is inferred as `K // K_a` for `a` and `K // K_b` for `b`.\n - To use cuDNN with Nvidia Blackwell GPUs, inputs must match::\n\n # mxfp8\n a, b: jnp.float8_e4m3fn | jnp.float8_e5m2\n a_scales, b_scales: jnp.float8_e8m0fnu\n block_size: 32\n # nvfp4\n a, b: jnp.float4_e2m1fn\n a_scales, b_scales: jnp.float8_e4m3fn\n block_size: 16\n\n Examples:\n\n Basic case:\n\n >>> a = jnp.array([1, 2, 3]).reshape((1, 1, 3))\n >>> b = jnp.array([4, 5, 6]).reshape((1, 1, 3))\n >>> a_scales = jnp.array([0.5]).reshape((1, 1, 1))\n >>> b_scales = jnp.array([0.5]).reshape((1, 1, 1))\n >>> scaled_matmul(a, b, a_scales, b_scales) # doctest: +SKIP\n Array([[[8.]]], dtype=float32)\n\n Using fused cuDNN call on Blackwell GPUs:\n\n >>> dtype = jnp.float8_e4m3fn\n >>> a = jax.random.normal(jax.random.PRNGKey(1), (3, 128, 64), dtype=dtype)\n >>> b = jax.random.normal(jax.random.PRNGKey(2), (3, 128, 64), dtype=dtype)\n >>> a_scales = jnp.ones((3, 128, 4), dtype=jnp.float8_e8m0fnu)\n >>> b_scales = jnp.ones((3, 128, 4), dtype=jnp.float8_e8m0fnu)\n >>> scaled_matmul(a, b, a_scales, b_scales) # doctest: +SKIP\n """"""\n a, b, a_scales, b_scales = lhs, rhs, lhs_scales, rhs_scales\n if not all(x.ndim == 3 for x in (a, b, a_scales, b_scales)):\n raise ValueError(\n ""scaled_matmul requires all inputs to be 3-dimensional arrays""\n )\n\n B_a, M_a, K_a = a.shape\n B_b, N_b, K_b = b.shape\n if K_a != K_b or B_a != B_b:\n raise ValueError(\n ""scaled_matmul requires inputs a and b to have matching batch (B) ""\n f""and contract (K) dimensions, but got shapes {a.shape} and ""\n f""{b.shape}""\n )\n\n B_as, M_as, K_as = a_scales.shape\n B_bs, N_bs, K_bs = b_scales.shape\n if K_as != K_bs or B_as != B_bs:\n raise ValueError(\n ""scaled_matmul requires scales to have matching batch (B) and ""\n f""contract (K) dimensions, but got shapes {a_scales.shape} and ""\n f""{b_scales.shape}""\n )\n\n if M_as != M_a or N_bs != N_b:\n raise ValueError(\n ""scaled_matmul requires scales to match non-contract dimensions of ""\n f""inputs, but got shapes a: {a.shape}, b: {b.shape}, a_scales: ""\n f""{a_scales.shape}, b_scales: {b_scales.shape}""\n )\n\n preferred_element_type = dtypes.check_and_canonicalize_user_dtype(\n preferred_element_type, ""scaled_matmul""\n )\n out = cudnn_scaled_matmul(\n a,\n b,\n a_scales,\n b_scales,\n preferred_element_type=preferred_element_type,\n )\n return out\n\ndef get_scaled_dot_general_config(mode: Literal['nvfp4', 'mxfp8'],\n global_scale: Array | None = None):\n r""""""Get quantization configs for scaled_dot_general.\n\n Create quantization configs for the `jax.nn.scaled_dot_general`.\n\n See Also:\n - :func:`jax.nn.scaled_dot_general`: Scaled dot general function.\n """"""\n\n if mode == 'nvfp4':\n one = jnp.ones((1,), dtype=np.float32)\n return BlockScaleConfig(\n mode='nvfp4',\n block_size=16,\n data_type=dtypes.float4_e2m1fn,\n scale_type=dtypes.float8_e4m3fn,\n global_scale=one if global_scale is None else global_scale,\n infer_only=False\n )\n elif mode == 'mxfp8':\n return BlockScaleConfig(\n mode='mxfp8',\n block_size=32,\n data_type=dtypes.float8_e4m3fn,\n scale_type=dtypes.float8_e8m0fnu,\n global_scale=None,\n infer_only=False\n )\n else:\n raise ValueError(f""Unsupported mode: {mode}"")\n\ndef scaled_dot_general(\n lhs, rhs,\n dimension_numbers,\n preferred_element_type=np.float32,\n configs: list[BlockScaleConfig] | None = None,\n implementation: Literal['cudnn'] | None = None,\n ):\n r""""""Scaled dot general operation.\n\n Performs a generalized dot product with block-scaled quantization on the\n lhs and rhs inputs. This operation extends `lax.dot_general` to support\n user-defined scaling configurations.\n\n Essentially, the operation follows::\n\n a, a_scales = quantize(lhs, configs[0])\n b, b_scales = quantize(rhs, configs[1])\n c = jax.nn.scaled_matmul(a, b, a_scales, b_scales)\n\n Args:\n lhs (ArrayLike): Input array.\n rhs (ArrayLike): Input array.\n dimension_numbers (DotDimensionNumbers): A tuple of two tuples specifying\n the contraction and batch dimensions:\n `((lhs_contracting_dims, rhs_contracting_dims), (lhs_batch_dims, rhs_batch_dims))`.\n preferred_element_type (DTypeLike, optional): Output data type of the dot\n product. Defaults to `jnp.float32`. Other valid types include\n `jnp.bfloat16` and `jnp.float16`.\n configs (list of BlockScaleConfig, optional): Scaling configurations for\n lhs, rhs, and gradients. Users can obtain valid configurations via\n `jax.nn.get_scaled_dot_general_config`. Currently, `nvfp4` and `mxfp8`\n are supported. If `None`, falls back to `lax.dot_general`.\n implementation: str\n (Deprecated) Backend selector, now ignored. The system chooses the backend\n automatically. Scheduled for removal in future releases.\n\n Returns:\n Array: The resulting tensor, with batch dimensions first, followed by\n non-contracting/non-batch dimensions of lhs, and then those of rhs.\n\n See Also:\n - :func:`jax.nn.scaled_matmul`: Scaled matmul function.\n - :func:`jax.lax.dot_general`: General dot product operator.\n\n Notes:\n - Unlike `nn.scaled_matmul`, which assumes quantized low-precision\n inputs with explicit scaling factors, this operator takes high-precision\n inputs, applies quantization internally, and handles the backward pass.\n\n Examples:\n\n Creating config for mxfp8:\n\n >>> configs = [jax.nn.get_scaled_dot_general_config('mxfp8')] * 3\n\n Creating config for nvfp4:\n\n >>> global_scale = jnp.array([0.5], jnp.float32)\n >>> configs = [jax.nn.get_scaled_dot_general_config('nvfp4', global_scale)] * 3\n\n Using scaled_dot_general with the configs:\n\n >>> import functools\n >>> scaled_dot_general_fn = functools.partial(jax.nn.scaled_dot_general, configs=configs)\n >>> lhs = jax.random.normal(jax.random.PRNGKey(1), (3, 128, 64))\n >>> rhs = jax.random.normal(jax.random.PRNGKey(2), (3, 128, 64))\n >>> out = scaled_dot_general_fn(lhs, rhs, (((2,), (2,)), ((0,), (0,)))) # doctest: +SKIP\n """"""\n if implementation is not None:\n warnings.warn(""Backend selector, now ignored. The system chooses the ""\n ""backend automatically."", DeprecationWarning)\n\n if configs is None:\n return lax.dot_general(lhs, rhs, dimension_numbers,\n preferred_element_type=preferred_element_type)\n\n out = cudnn_scaled_dot_general(\n lhs, rhs, dimension_numbers,\n preferred_element_type=preferred_element_type,\n configs=configs\n )\n\n return out\n\n@custom_derivatives.custom_jvp\n@api.jit\ndef log1mexp(x: ArrayLike) -> Array:\n r""""""Numerically stable calculation of :math:`\log(1 - \exp(-x))`.\n\n This function is undefined for :math:`x < 0`.\n\n Based on `TensorFlow's implementation `_.\n\n References:\n .. [1] Martin Mächler. `Accurately Computing log(1 − exp(−|a|)) Assessed by the Rmpfr package.\n `_.\n """"""\n numpy_util.check_arraylike(""log1mexp"", x)\n x = jnp.asarray(x)\n c = jnp.log(2.0)\n return jnp.where(\n x < c,\n jnp.log(-jnp.expm1(-x)),\n jnp.log1p(-jnp.exp(-x)),\n )\n\nlog1mexp.defjvps(lambda g, ans, x: g / jnp.expm1(x))\n",python,tab +349,1900915,".venv/lib64/python3.12/site-packages/jax/_src/nn/functions.py",31618,0,"",python,selection_command +350,1906246,".venv/lib64/python3.12/site-packages/jax/_src/nn/functions.py",32013,0,"",python,selection_mouse +351,1908110,".venv/lib64/python3.12/site-packages/jax/_src/nn/functions.py",32027,0,"",python,selection_mouse +352,1908285,".venv/lib64/python3.12/site-packages/jax/_src/nn/functions.py",32023,7,"Literal",python,selection_mouse +353,1908613,".venv/lib64/python3.12/site-packages/jax/_src/nn/functions.py",32023,83,"Literal['xla', 'cudnn'] | None = None) -> Array:\n r""""""Scaled dot product attention",python,selection_mouse +354,1908861,".venv/lib64/python3.12/site-packages/jax/_src/nn/functions.py",32023,13,"Literal['xla'",python,selection_mouse +355,1908880,".venv/lib64/python3.12/site-packages/jax/_src/nn/functions.py",32023,14,"Literal['xla',",python,selection_mouse +356,1908923,".venv/lib64/python3.12/site-packages/jax/_src/nn/functions.py",32023,15,"Literal['xla', ",python,selection_mouse +357,1908979,".venv/lib64/python3.12/site-packages/jax/_src/nn/functions.py",32023,16,"Literal['xla', '",python,selection_mouse +358,1909032,".venv/lib64/python3.12/site-packages/jax/_src/nn/functions.py",32023,21,"Literal['xla', 'cudnn",python,selection_mouse +359,1909426,".venv/lib64/python3.12/site-packages/jax/_src/nn/functions.py",32023,22,"Literal['xla', 'cudnn'",python,selection_mouse +360,1909992,".venv/lib64/python3.12/site-packages/jax/_src/nn/functions.py",32023,23,"Literal['xla', 'cudnn']",python,selection_mouse +361,1910004,".venv/lib64/python3.12/site-packages/jax/_src/nn/functions.py",32023,24,"Literal['xla', 'cudnn'] ",python,selection_mouse +362,1910059,".venv/lib64/python3.12/site-packages/jax/_src/nn/functions.py",32023,25,"Literal['xla', 'cudnn'] |",python,selection_mouse +363,1910543,".venv/lib64/python3.12/site-packages/jax/_src/nn/functions.py",32048,0,"",python,selection_mouse +364,1911051,".venv/lib64/python3.12/site-packages/jax/_src/nn/functions.py",32046,0,"",python,selection_mouse +365,1911214,".venv/lib64/python3.12/site-packages/jax/_src/nn/functions.py",32046,1," ",python,selection_mouse +366,1911453,".venv/lib64/python3.12/site-packages/jax/_src/nn/functions.py",32045,2,"] ",python,selection_mouse +367,1911522,".venv/lib64/python3.12/site-packages/jax/_src/nn/functions.py",32044,3,"'] ",python,selection_mouse +368,1911580,".venv/lib64/python3.12/site-packages/jax/_src/nn/functions.py",32045,2,"] ",python,selection_mouse +369,1911613,".venv/lib64/python3.12/site-packages/jax/_src/nn/functions.py",32046,1," ",python,selection_mouse +370,1912127,".venv/lib64/python3.12/site-packages/jax/_src/nn/functions.py",32047,0,"",python,selection_mouse +371,1912850,".venv/lib64/python3.12/site-packages/jax/_src/nn/functions.py",32015,0,"",python,selection_mouse +372,1929814,".venv/lib64/python3.12/site-packages/jax/_src/nn/functions.py",34816,0,"",python,selection_command +373,1936875,".venv/lib64/python3.12/site-packages/jax/_src/nn/functions.py",34858,0,"",python,selection_command +374,1937635,".venv/lib64/python3.12/site-packages/jax/_src/nn/functions.py",37335,0,"",python,selection_command +375,1956904,".venv/lib64/python3.12/site-packages/jax/_src/nn/functions.py",39145,0,"",python,selection_mouse +376,1957700,".venv/lib64/python3.12/site-packages/jax/_src/nn/functions.py",39123,0,"",python,selection_mouse +377,1957842,".venv/lib64/python3.12/site-packages/jax/_src/nn/functions.py",39123,1," ",python,selection_mouse +378,1958100,".venv/lib64/python3.12/site-packages/jax/_src/nn/functions.py",39123,5," TODO",python,selection_mouse +379,1958119,".venv/lib64/python3.12/site-packages/jax/_src/nn/functions.py",39123,84," TODO(kaixih@nvidia) Defaults to XLA for now. Will automatically select\n # best",python,selection_mouse +380,1958178,".venv/lib64/python3.12/site-packages/jax/_src/nn/functions.py",39123,92," TODO(kaixih@nvidia) Defaults to XLA for now. Will automatically select\n # best backend",python,selection_mouse +381,1958305,".venv/lib64/python3.12/site-packages/jax/_src/nn/functions.py",39123,93," TODO(kaixih@nvidia) Defaults to XLA for now. Will automatically select\n # best backend.",python,selection_mouse +382,1958600,".venv/lib64/python3.12/site-packages/jax/_src/nn/functions.py",39216,0,"",python,selection_mouse +383,1958604,".venv/lib64/python3.12/site-packages/jax/_src/nn/functions.py",39215,0,"",python,selection_command +384,1959406,".venv/lib64/python3.12/site-packages/jax/_src/nn/functions.py",39216,0,"",python,selection_mouse +385,1959408,".venv/lib64/python3.12/site-packages/jax/_src/nn/functions.py",39215,0,"",python,selection_command +386,1959553,".venv/lib64/python3.12/site-packages/jax/_src/nn/functions.py",39216,0,"",python,selection_mouse +387,1959555,".venv/lib64/python3.12/site-packages/jax/_src/nn/functions.py",39215,0,"",python,selection_command +388,1959798,".venv/lib64/python3.12/site-packages/jax/_src/nn/functions.py",39215,1,".",python,selection_mouse +389,1959799,".venv/lib64/python3.12/site-packages/jax/_src/nn/functions.py",39216,0,"",python,selection_command +390,1961345,".venv/lib64/python3.12/site-packages/jax/_src/nn/functions.py",39242,0,"",python,selection_mouse +391,1962078,".venv/lib64/python3.12/site-packages/jax/_src/nn/functions.py",39136,0,"",python,selection_mouse +392,1977352,".venv/lib64/python3.12/site-packages/jax/_src/nn/functions.py",39713,0,"",python,selection_mouse +393,1977364,".venv/lib64/python3.12/site-packages/jax/_src/nn/functions.py",39712,0,"",python,selection_command +394,1977966,".venv/lib64/python3.12/site-packages/jax/_src/nn/functions.py",39495,0,"",python,selection_mouse +395,1977976,".venv/lib64/python3.12/site-packages/jax/_src/nn/functions.py",39494,0,"",python,selection_command +396,1989258,".venv/lib64/python3.12/site-packages/jax/_src/nn/functions.py",37394,0,"",python,selection_mouse +397,1994616,".venv/lib64/python3.12/site-packages/jax/_src/nn/functions.py",38884,0,"",python,selection_mouse +398,1997308,".venv/lib64/python3.12/site-packages/jax/_src/nn/functions.py",38890,0,"",python,selection_mouse +399,1997650,".venv/lib64/python3.12/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",0,0,"# Copyright 2024 The JAX Authors.\n#\n# Licensed under the Apache License, Version 2.0 (the ""License"");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# https://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an ""AS IS"" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\nimport enum\nimport functools\nimport json\nimport math\nfrom typing import TypedDict\n\nfrom jax._src import core\nfrom jax._src import custom_derivatives\nfrom jax._src import dispatch\nfrom jax._src import dtypes\nfrom jax._src import numpy as jnp\nfrom jax._src import xla_bridge\nfrom jax._src.custom_partitioning import custom_partitioning\nfrom jax._src.custom_partitioning_sharding_rule import BATCHING, ArrayMapping, CompoundFactor, SdyShardingRule\nfrom jax._src.interpreters import batching\nfrom jax._src.interpreters import mlir\nfrom jax._src.lax import parallel as lax_parallel\nfrom jax._src.lib import cuda_versions\nfrom jax._src.lib.mlir import ir\nfrom jax._src.lib.mlir.dialects import hlo\nfrom jax._src.sharding_impls import NamedSharding, PartitionSpec\nfrom jax._src.typing import Array\n\nimport numpy as np\n\n\nclass FP8Params(TypedDict):\n amax_dQ: float # Amax of gradient of query\n amax_dK: float # Amax of gradient of key\n amax_dV: float # Amax of gradient of value\n amax_dP: float # Amax of gradient of state\n descale_q: float # Descaling factor of query\n descale_k: float # Descaling factor of key\n descale_v: float # Descaling factor of value\n descale_s: float # Descaling factor of attention score\n scale_s: float # Scale factor for S tensor\n scale_o: float # Scale factor for output\n descale_o: float # Descale factor for output (bwd)\n descale_dO: float # Descale factor for output gradient (bwd)\n descale_dP: float # Descale factor for P gradient tensor (bwd)\n scale_dQ: float # Scale factor for query gradient (bwd)\n scale_dK: float # Scale factor for key gradient (bwd)\n scale_dV: float # Scale factor for value gradient (bwd)\n scale_dP: float # Scale factor for state gradient (bwd)\n\n\nclass AttentionLayout(enum.Enum):\n BTNH = 0\n BNTH = 1\n\n\nclass MaskType(enum.Enum):\n NO_MASK = 0\n PADDING = 1\n CAUSAL = 2\n PADDING_CAUSAL = 3\n ALIBI = 4\n\n\ndef convert_mask_type_to_string(mask_type: MaskType) -> str:\n if mask_type == MaskType.NO_MASK:\n return ""NO_MASK""\n elif mask_type == MaskType.PADDING:\n return ""PADDING""\n elif mask_type == MaskType.CAUSAL:\n return ""CAUSAL""\n elif mask_type == MaskType.PADDING_CAUSAL:\n return ""PADDING_CAUSAL""\n elif mask_type == MaskType.ALIBI:\n return ""ALIBI""\n else:\n raise ValueError(f""Unexpected mask type: {mask_type}"")\n\ndef has_padding(mask_type: MaskType) -> bool:\n return mask_type == MaskType.PADDING or mask_type == MaskType.PADDING_CAUSAL\n\ndef should_export_dbias(bias_shape, query_shape, layout) -> bool:\n b_B, b_N, _, _ = bias_shape\n if layout == AttentionLayout.BNTH.value:\n _, q_N, _, _ = query_shape\n else:\n _, _, q_N, _ = query_shape\n return b_B == 1 and b_N == q_N\n\ndef get_large_negative_number(dtype):\n # temp WAR as cuDNN has a bug for subtraction between two large negative value\n if dtype == np.dtype('bfloat16'):\n return jnp.asarray(-2 << 40, dtype=dtype)\n elif dtype == np.dtype('float16'):\n return jnp.asarray(-2 << 14, dtype=dtype)\n else:\n raise ValueError(""Unsupported dtype for inputs."")\n\ndef _normalize_layout(layout: str) -> AttentionLayout:\n layout_upper = layout.upper()\n if layout_upper in [""BSNH"", ""BNSH"", ""BTNH"", ""BNTH""]:\n return AttentionLayout[layout_upper.replace(""S"", ""T"")]\n else:\n raise ValueError(f""Unsupported qkv_layout: {layout}"")\n\ndef element_type_to_backend_config_type_mapping(dtype):\n _element_type_to_backend_config_type_mapping = {\n ir.BF16Type.get(): ""BF16"",\n ir.F16Type.get(): ""F16"",\n }\n return _element_type_to_backend_config_type_mapping[dtype]\n\ndef default_layouts(*shapes):\n return [range(len(shape) - 1, -1, -1) for shape in shapes]\n\ndef get_max_seg_per_batch(q_offsets):\n return q_offsets.shape[1] - 1 if len(q_offsets.shape) == 2 else 1\n\ndef check_is_paged_attention(page_table_k):\n return len(page_table_k.shape) == 4\n\ndef create_dot_product_attention_backend_config_base(\n batch, num_heads, seq_q, seq_kv, dtype, fmha_scale, mask_type, layout, is_bwd\n):\n # Q, K, V: query, key, value in shape of BT(S)NH or BNT(S)H\n # P: BMM1 output in shape of BNTS\n # O: BMM2 output in the same shape with Q\n # BMM1: Q @ K -> P\n # BMM2: P @ V -> O\n # BMM1Grad1: dP @ Q -> dK\n # BMM1Grad2: dP @ K -> dQ\n # BMM2Grad1: P @ dO -> dV\n # BMM2Grad2: dO @ V -> dP\n cudnn_fmha_backend_config = {\n ""algorithm"": {\n ""algo_id"": ""0"",\n ""math_type"": ""TENSOR_OP_MATH"",\n ""tuning_knobs"": {""17"": ""1"", ""24"": ""0""},\n ""is_cudnn_frontend"": True,\n ""workspace_size"": ""0"",\n },\n ""fmha_scale"": fmha_scale,\n ""intermediate_tensor_shape"": {\n ""element_type"": element_type_to_backend_config_type_mapping(dtype),\n ""dimensions"": [str(batch), str(num_heads), str(seq_q), str(seq_kv)],\n ""tuple_shapes"": [],\n ""layout"": {\n ""dim_level_types"": [],\n ""dim_unique"": [],\n ""dim_ordered"": [],\n ""minor_to_major"": [""3"", ""2"", ""1"", ""0""],\n ""tiles"": [],\n ""element_size_in_bits"": ""0"",\n ""memory_space"": ""0"",\n ""index_primitive_type"": ""PRIMITIVE_TYPE_INVALID"",\n ""pointer_primitive_type"": ""PRIMITIVE_TYPE_INVALID"",\n ""dynamic_shape_metadata_prefix_bytes"": ""0"",\n },\n ""is_dynamic_dimension"": [False, False, False, False],\n },\n ""is_flash_attention"": True,\n ""mask_type"": convert_mask_type_to_string(mask_type),\n }\n\n # We define the contracting and batch dims in the format of\n # ((lhs_contracting_dims, rhs_contracting_dims), (lhs_batch_dims,\n # rhs_batch_dims)).\n if layout == AttentionLayout.BNTH.value:\n dims = [\n ((3, 3), ((0, 1), (0, 1))), # BMM1: BNTH,BNSH->BNTS\n ((3, 2), ((0, 1), (0, 1))), # BMM2: BNTS,BNSH->BNTH\n ((2, 2), ((0, 1), (0, 1))), # BMM1_grad_1: BNTS,BNTH->BNSH\n ((3, 2), ((0, 1), (0, 1))), # BMM1_grad_2: BNTS,BNSH->BNTH\n ((2, 2), ((0, 1), (0, 1))), # BMM2_grad_1: BNTS,BNTH->BNSH\n ((3, 3), ((0, 1), (0, 1))), # BMM2_grad_2: BNTH,BNSH->BNTS\n ]\n else:\n dims = [\n ((3, 3), ((0, 2), (0, 2))), # BMM1: BTNH,BSNH->BNTS\n ((3, 1), ((0, 1), (0, 2))), # BMM2: BNTS,BSNH->BTNH\n ((2, 1), ((0, 1), (0, 2))), # BMM1_grad_1: BNTS,BTNH->BSNH\n ((3, 1), ((0, 1), (0, 2))), # BMM1_grad_2: BNTS,BSNH->BTNH\n ((2, 1), ((0, 1), (0, 2))), # BMM2_grad_1: BNTS,BTNH->BSNH\n ((3, 3), ((0, 2), (0, 2))), # BMM2_grad_2: BTNH,BSNH->BNTS\n ]\n keys = [\n ""bmm1_dot_dimension_numbers"",\n ""bmm2_dot_dimension_numbers"",\n ""bmm1_grad_gemm1_dot_dimension_numbers"",\n ""bmm1_grad_gemm2_dot_dimension_numbers"",\n ""bmm2_grad_gemm1_dot_dimension_numbers"",\n ""bmm2_grad_gemm2_dot_dimension_numbers"",\n ]\n fwd_dot_number = {}\n bwd_dot_number = {}\n for idx, (key, ((lc, rc), (lb, rb))) in enumerate(zip(keys, dims)):\n dims_to_write = fwd_dot_number if idx < 2 else bwd_dot_number\n dims_to_write[key] = {\n ""lhs_contracting_dimensions"": [str(lc)],\n ""rhs_contracting_dimensions"": [str(rc)],\n ""lhs_batch_dimensions"": [str(i) for i in lb],\n ""rhs_batch_dimensions"": [str(i) for i in rb],\n }\n\n if is_bwd:\n cudnn_fmha_backend_config = {**cudnn_fmha_backend_config, **bwd_dot_number}\n else:\n cudnn_fmha_backend_config = {**cudnn_fmha_backend_config, **fwd_dot_number}\n backend_config = {\n ""operation_queue_id"":""0"",\n ""wait_on_operation_queues"":[],\n ""cudnn_fmha_backend_config"": cudnn_fmha_backend_config\n }\n return backend_config\n\ndef create_dot_product_attention_backend_config(\n batch,\n num_heads,\n seq_q,\n seq_kv,\n dtype,\n fmha_scale,\n seed,\n dropout_rate,\n mask_type,\n layout,\n sliding_window_length,\n max_seg_per_batch,\n is_paged_attention,\n is_bwd\n):\n backend_config = create_dot_product_attention_backend_config_base(\n batch, num_heads, seq_q, seq_kv, dtype,\n fmha_scale, mask_type, layout, is_bwd\n )\n if sliding_window_length is None:\n sliding_window_length = 0\n backend_config['cudnn_fmha_backend_config'][""dropout_rate""] = dropout_rate\n backend_config['cudnn_fmha_backend_config'][""seed""] = seed\n backend_config['cudnn_fmha_backend_config'][""sliding_window_length""] = sliding_window_length\n backend_config['cudnn_fmha_backend_config'][""max_seg_per_batch""] = max_seg_per_batch\n backend_config['cudnn_fmha_backend_config'][""is_paged_attention""] = is_paged_attention\n return json.dumps(backend_config)\n\ndef create_dot_product_attention_fp8_backend_config(\n batch, num_heads, seq_q, seq_kv, dtype, fmha_scale, mask_type, layout, is_bwd):\n backend_config = create_dot_product_attention_backend_config_base(\n batch, num_heads, seq_q, seq_kv, dtype, fmha_scale, mask_type, layout, is_bwd)\n return json.dumps(backend_config)\n\n# mapping from (is_bwd, has_dropout, has_bias) to custom call name\n_custom_name_maps = {\n # fMHA forward call targets.\n (False, False, False, False): ""__cudnn$fmhaSoftmax"",\n (False, False, True, False): ""__cudnn$fmhaScaleBiasSoftmax"",\n (False, True, False, False): ""__cudnn$fmhaSoftmaxDropout"",\n (False, True, True, False): ""__cudnn$fmhaScaleBiasSoftmaxDropout"",\n (False, False, False, True): ""__cudnn$fmhaSoftmaxF8"",\n # fMHA backward call targets.\n (True, False, False, False): ""__cudnn$fmhaSoftmaxBackward"",\n (True, False, True, False): ""__cudnn$fmhaScaleBiasSoftmaxBackward"",\n (True, True, False, False): ""__cudnn$fmhaSoftmaxDropoutBackward"",\n (True, True, True, False): ""__cudnn$fmhaScaleBiasSoftmaxDropoutBackward"",\n (True, False, False, True): ""__cudnn$fmhaSoftmaxBackwardF8"",\n}\n\ndef get_custom_call_name(has_bias, has_dropout, is_bwd, is_fp8=False):\n return _custom_name_maps[(is_bwd, has_dropout, has_bias, is_fp8)]\n\nget_fp8_custom_call_name = functools.partial(\n get_custom_call_name, has_bias=False, has_dropout=False, is_fp8=True\n)\n\ndef check_layout(query, key, value, bias, q_seqlen, kv_seqlen,\n q_offsets, kv_offsets, page_table_k, page_table_v, layout):\n def check_eq(a, b, c, msg):\n if not (a == b == c):\n raise ValueError(f""{msg} must be same, got {a}, {b}, {b}"")\n\n q_rank, k_rank, v_rank = len(query.shape), len(key.shape), len(value.shape)\n if q_rank != 4:\n raise ValueError(f""Q must have a rank of 4, got {q_rank}"")\n check_eq(q_rank, k_rank, v_rank, ""QKV rank"")\n\n q_dtype, k_dtype, v_dtype = query.dtype, key.dtype, value.dtype\n if q_dtype not in [np.float16, dtypes.bfloat16, dtypes.float8_e4m3fn, dtypes.float8_e5m2]:\n raise NotImplementedError(f""Q must be fp16/bf16/fp8_e4m3fn/fp8_e5m2, got {q_dtype}"")\n check_eq(q_dtype, k_dtype, v_dtype, ""QKV dtype"")\n\n if layout == AttentionLayout.BNTH:\n qB, qN, qT, qH = query.shape\n kB, kN, kS, kH = key.shape\n vB, vN, vS, vH = value.shape\n else:\n assert layout == AttentionLayout.BTNH\n qB, qT, qN, qH = query.shape\n kB, kS, kN, kH = key.shape\n vB, vS, vN, vH = value.shape\n\n if page_table_k is not None and page_table_v is not None:\n k_blocks, k_block_size = kB, kS\n v_blocks, v_block_size = vB, vS\n kB, _, k_blocks_per_batch, _ = page_table_k.shape\n vB, _, v_blocks_per_batch, _ = page_table_v.shape\n kS = k_blocks_per_batch * k_block_size\n vS = v_blocks_per_batch * v_block_size\n if kB * k_blocks_per_batch != k_blocks:\n raise ValueError(\n f""Key and page_table_k must have same number of blocks, ""\n f""got {k_blocks} vs {kB * k_blocks_per_batch}"")\n if vB * v_blocks_per_batch != v_blocks:\n raise ValueError(\n f""Value and page_table_v must have same number of blocks, ""\n f""got {v_blocks} vs {vB * v_blocks_per_batch}"")\n\n check_eq(qB, kB, vB, ""QKV batch"")\n if qH != kH:\n raise ValueError(f""QK must have same head dim, got {qH} vs {kH}"")\n if kN != vN:\n raise ValueError(f""KV must have same number of heads, got {kN} vs {vN}"")\n if kS != vS:\n raise ValueError(f""KV must have same seq length, got {kS} vs {vS}"")\n\n # check bias\n if bias is not None:\n _, _, bT, bS = bias.shape\n if bT != qT or bS != vS:\n raise ValueError(\n f""Bias must have same seq length as QKV, got {bT} and {bS}"")\n\n # check q_seqlen/kv_seqlen/q_offsets/kv_offsets\n expected_rank = 2 if q_offsets is not None else 1\n def check_seqlen_offsets(tensor, name):\n if tensor is not None:\n dtype = tensor.dtype\n rank = len(tensor.shape)\n if dtype != np.dtype('int32'):\n raise ValueError(f""{name} must have int32 datatype, got {dtype}"")\n if rank != expected_rank:\n raise ValueError(f""{name} must have a rank of {expected_rank}, got {rank}"")\n b = tensor.shape[0]\n if b != qB:\n raise ValueError(f""{name} must have same batch as Q, got {b}"")\n\n check_seqlen_offsets(q_seqlen, ""q_seqlen"")\n check_seqlen_offsets(kv_seqlen, ""kv_seqlen"")\n check_seqlen_offsets(q_offsets, ""q_offsets"")\n check_seqlen_offsets(kv_offsets, ""kv_offsets"")\n\n\ndef check_is_flash_attention(\n query, key, value, layout: int, cudnn_version, has_bias, is_training,\n is_packed=False, is_paged_attention=False, is_fp8=False):\n # Extract sequence length (T) and head dim (H) based on layout\n if layout == AttentionLayout.BNTH.value:\n _, _, T, qH = query.shape\n _, _, S, vH = value.shape\n else:\n _, T, _, qH = query.shape\n _, S, _, vH = value.shape\n\n if is_cuda_compute_capability_equal(""10.3"") and cudnn_version < 91100:\n # cudnn support compute_cap 10.3 on cudnn 9.11+\n raise NotImplementedError(\n ""Compute capability 10.3 requires cuDNN version >= 9.11."")\n\n # Flash attention conditions\n if is_fp8:\n # FP8 specific conditions\n if not ((is_training and qH == 128 and T % 128 == 0 and S % 128 == 0) or\n (not is_training and qH <= 256 and qH % 16 == 0)):\n raise NotImplementedError(\n f""Unsupported sequence length Q {T}, KV {S} and head dim {qH} for FP8.""\n )\n else:\n # bf16/fp16 attention conditions\n # Check the head dim.\n is_on_hopper = is_cuda_compute_capability_equal(""9.0"")\n H_max = 256 if cudnn_version >= 90500 and is_on_hopper else 128\n # check if multi-head latent attention is needed\n is_mla = qH != vH\n if not (qH <= H_max and qH % 8 == 0):\n raise NotImplementedError(\n f""The head dim must be <= {H_max} and a multiple of 8, ""\n f""but got {qH}.""\n )\n\n # Check patterns with bias, seqlen should be divisible by 2\n if (is_training and has_bias and (T % 2 != 0 or S % 2 != 0)):\n raise NotImplementedError(\n f""Unsupported sequence length Q {T}, KV {S}.""\n )\n\n if is_packed and (cudnn_version < 90600 or not check_compute_capability(""9.0"")):\n raise NotImplementedError(\n ""Packed layout requires cudnn version >= 9.6 and at least hopper arch."")\n if is_paged_attention and cudnn_version < 90500:\n raise NotImplementedError(""Page attention requires cudnn version >= 9.5."")\n if is_mla and (cudnn_version < 91000 or not check_compute_capability(""9.0"")):\n raise NotImplementedError(\n ""mla requires cudnn version >= 9.10 and at least hopper arch."")\n\ndef check_cudnn_version():\n # check if cuDNN is installed\n if cuda_versions is None:\n raise RuntimeError(""cuDNN is not detected."")\n return cuda_versions.cudnn_get_version()\n\ndef check_compute_capability(capability):\n if not 'cuda' in xla_bridge.get_backend().platform_version:\n return False\n d, *_ = xla_bridge.local_devices(backend=""gpu"")\n target = tuple(int(x) for x in capability.split("".""))\n current = tuple(int(x) for x in d.compute_capability.split("".""))\n return current >= target\n\ndef is_cuda_compute_capability_equal(capability):\n if not 'cuda' in xla_bridge.get_backend().platform_version:\n return False\n d, *_ = xla_bridge.local_devices(backend=""gpu"")\n target = tuple(int(x) for x in capability.split("".""))\n current = tuple(int(x) for x in d.compute_capability.split("".""))\n return current == target\n\ndef _dot_product_attention_fwd(\n query, key, value, bias, q_seqlen, kv_seqlen, q_offsets, kv_offsets,\n page_table_k, page_table_v,\n scale, seed, dropout_rate, variadic_args, mask_type, layout,\n sliding_window_length, cudnn_version, return_residual):\n # check if flash attention is supported for this attention pattern\n check_is_flash_attention(\n query, key, value, layout, cudnn_version, bias is not None, False,\n get_max_seg_per_batch(q_offsets) > 1, check_is_paged_attention(page_table_k))\n outputs = _dot_product_attention_fwd_p_wrapper.bind(\n query, key, value, bias, q_seqlen, kv_seqlen, q_offsets, kv_offsets,\n page_table_k, page_table_v, scale=scale, seed=seed, dropout_rate=dropout_rate,\n variadic_args=variadic_args, mask_type=mask_type, layout=layout,\n sliding_window_length=sliding_window_length, is_training=False or return_residual)\n if return_residual:\n return tuple(outputs)\n else:\n return outputs[0]\n\ndef _dot_product_attention_fwd_rule(\n query, key, value, bias, q_seqlen, kv_seqlen, q_offsets, kv_offsets,\n page_table_k, page_table_v, scale, seed, dropout_rate, variadic_args,\n mask_type, layout, sliding_window_length, cudnn_version,\n return_residual):\n # check if flash attention is supported for this attention pattern\n check_is_flash_attention(\n query, key, value, layout, cudnn_version, bias is not None, True,\n get_max_seg_per_batch(q_offsets) > 1)\n outputs = _dot_product_attention_fwd_p_wrapper.bind(\n query, key, value, bias, q_seqlen, kv_seqlen, q_offsets, kv_offsets,\n page_table_k, page_table_v, scale=scale, seed=seed, dropout_rate=dropout_rate,\n variadic_args=variadic_args, mask_type=mask_type, layout=layout,\n sliding_window_length=sliding_window_length, is_training=True)\n res = (query, key, value, bias, q_seqlen, kv_seqlen, q_offsets,\n kv_offsets, page_table_k, page_table_v, outputs[1], outputs[0])\n if return_residual:\n return tuple(outputs), res\n else:\n return outputs[0], res\n\ndef _dot_product_attention_bwd_rule(\n scale, seed, dropout_rate, variadic_args, mask_type, layout,\n sliding_window_length, is_training, return_residual, res, grad_output):\n (query, key, value, bias, q_seqlen, kv_seqlen, q_offsets, kv_offsets,\n page_table_k, page_table_v, activation, fwd_output) = res\n if return_residual:\n grad_output = grad_output[0]\n grads = _dot_product_attention_bwd_p_wrapper.bind(\n query, key, value, bias, q_seqlen, kv_seqlen, q_offsets, kv_offsets,\n page_table_k, page_table_v, activation, fwd_output, grad_output,\n scale=scale, seed=seed, dropout_rate=dropout_rate, variadic_args=variadic_args,\n mask_type=mask_type, layout=layout,\n sliding_window_length=sliding_window_length\n )\n grads = (*grads,) + (None,) * (10 - len(grads))\n return grads\n\ndef _fix_seqlen_offsets(q_seqlen, kv_seqlen, q_offsets, kv_offsets, query, key):\n # fix seqlen and offsets to what cuDNN expects in sequence packing.\n # cuDNN expects seqlen to have shape [S] where S is the total number of segments\n # while the SDPA API accetps seqlen with shape [B, M] where B is the batch and M\n # is the maximum number of segments of one batch. B x M is larger than S and seqlen\n # is filled with -1 for padded regions. Therefore, we need to shift all non negative\n # values to left side to form a correct seqlen. Similar layout is required for\n # offsets tensors.\n # cuDNN expects offsets to have offset for each segment starting from first segment\n # while SDPA API accetps offsets to have offset for each segment starting from\n # current batch, therefore we need to calculate accumulative offset of each segment\n # starting from first segment.\n def _shift_to_left(x, fill_value):\n # shift any non-negative value to left\n # [[1, 3, -1, -1], [2, 3, 4, -1]]\n # -> [[1, 3, 2, 3], [4, -1, -1, -1]]\n x_shape = x.shape\n x = x.flatten()\n size = x.size\n indices = jnp.nonzero(x >= 0, size=size, fill_value=size)[0]\n y = jnp.take(x, indices, fill_value=fill_value)\n return jnp.reshape(y, x_shape)\n\n def _cu_offset(offsets, max_seq):\n # calculate accumulative offset by batch\n # [[1, 3, 5, 7], [4, 5, -1, -1]], max_seq = 8\n # -> [[1, 3, 5, 7], [12, 13, -1, -1]]\n batch = offsets.shape[0]\n offsets = jnp.where(\n offsets >= 0,\n offsets + (jnp.arange(batch, dtype=offsets.dtype) * max_seq)[..., np.newaxis],\n offsets,\n )\n return offsets\n\n if get_max_seg_per_batch(q_offsets) > 1:\n B, T, N, H = query.shape\n _, S, _, _ = key.shape\n\n q_seqlen = _shift_to_left(q_seqlen, 0)\n kv_seqlen = _shift_to_left(kv_seqlen, 0)\n\n q_offsets = _cu_offset(q_offsets, T)\n kv_offsets = _cu_offset(kv_offsets, S)\n q_offsets = _shift_to_left(q_offsets, B * T)\n kv_offsets = _shift_to_left(kv_offsets, B * S)\n\n # multiply by stride_per_token to get correct offsets\n # do it here because real stride changes after sharding\n q_offsets = q_offsets * N * H\n kv_offsets = kv_offsets * N * H\n\n return q_seqlen, kv_seqlen, q_offsets, kv_offsets\n\ndef _dot_product_attention_fwd_impl(\n query, key, value, bias, q_seqlen, kv_seqlen, q_offsets, kv_offsets,\n page_table_k, page_table_v, scale, seed, dropout_rate, variadic_args,\n mask_type, layout, sliding_window_length, is_training):\n # args: {Q, K, V, mask*, bias*}\n q_seqlen, kv_seqlen, q_offsets, kv_offsets = \\n _fix_seqlen_offsets(q_seqlen, kv_seqlen, q_offsets, kv_offsets, query, key)\n outputs = _dot_product_attention_fwd_p.bind(\n query, key, value, bias, q_seqlen, kv_seqlen, q_offsets, kv_offsets,\n page_table_k, page_table_v, scale=scale, seed=seed, dropout_rate=dropout_rate,\n variadic_args=variadic_args, mask_type=mask_type, layout=layout,\n sliding_window_length=sliding_window_length, is_training=is_training)\n return outputs\n\ndef _dot_product_attention_bwd_impl(\n query, key, value, bias, q_seqlen, kv_seqlen, q_offsets, kv_offsets,\n page_table_k, page_table_v, activation, fwd_output, grad_output, scale,\n seed, dropout_rate, variadic_args, mask_type, layout, sliding_window_length):\n q_seqlen, kv_seqlen, q_offsets, kv_offsets = \\n _fix_seqlen_offsets(q_seqlen, kv_seqlen, q_offsets, kv_offsets, query, key)\n grads = _dot_product_attention_bwd_p.bind(\n query, key, value, bias, q_seqlen, kv_seqlen, q_offsets, kv_offsets,\n page_table_k, page_table_v, activation, fwd_output, grad_output,\n scale=scale, seed=seed,\n dropout_rate=dropout_rate, variadic_args=variadic_args,\n mask_type=mask_type, layout=layout,\n sliding_window_length=sliding_window_length)\n return grads\n\ndef _dot_product_attention_fwd_abstract(\n query, key, value, bias, q_seqlen, kv_seqlen, q_offsets, kv_offsets,\n page_table_k, page_table_v, *, scale, seed, dropout_rate, variadic_args,\n mask_type, layout, sliding_window_length, is_training):\n if layout == AttentionLayout.BNTH.value:\n B, N, T, _ = query.shape\n _, _, S, H = value.shape\n output_shape = (B, N, T, H)\n else:\n B, T, N, _ = query.shape\n _, S, _, H = value.shape\n output_shape = (B, T, N, H)\n\n max_seg_per_batch = get_max_seg_per_batch(q_offsets)\n softmax_stat_shape = (B * max_seg_per_batch, N, T)\n\n if is_training:\n return (\n core.ShapedArray(output_shape, query.dtype), # output\n core.ShapedArray(softmax_stat_shape, np.float32), # softmax_stat\n )\n else:\n return (\n core.ShapedArray(output_shape, query.dtype), # output\n )\n\ndef _dot_product_attention_bwd_abstract(\n query, key, value, bias, q_seqlen, kv_seqlen, q_offsets, kv_offsets,\n page_table_k, page_table_v, activation, fwd_output, grad_output, *,\n scale, seed, dropout_rate, variadic_args, mask_type, layout, sliding_window_length):\n _, has_dbias = variadic_args\n if has_dbias:\n # cuDNN supports bias for this case\n return (\n core.ShapedArray(\n query.shape, query.dtype\n ), # grad query\n core.ShapedArray(\n key.shape, key.dtype\n ), # grad key\n core.ShapedArray(\n value.shape, value.dtype\n ), # grad value\n core.ShapedArray(\n bias.shape, bias.dtype\n ), # grad bias\n )\n else:\n return (\n core.ShapedArray(\n query.shape, query.dtype\n ), # grad query\n core.ShapedArray(\n key.shape, key.dtype\n ), # grad key\n core.ShapedArray(\n value.shape, value.dtype\n ), # grad value\n )\n\ndef _dot_product_attention_fwd_cuda_lowering(\n ctx, query, key, value, bias, q_seqlen, kv_seqlen, q_offsets,\n kv_offsets, page_table_k, page_table_v, scale, seed, dropout_rate,\n variadic_args, mask_type, layout, sliding_window_length, is_training):\n query_type = ir.RankedTensorType(query.type)\n query_shape = query_type.shape\n value_type = ir.RankedTensorType(value.type)\n value_shape = value_type.shape\n\n if layout == AttentionLayout.BNTH.value:\n B, N, T, qk_H = query_shape\n _, _, S, v_H = value_shape\n output_layout = (3, 2, 1, 0)\n output_transpose_perm = mlir.dense_int_array((0, 1, 2, 3))\n else:\n B, T, N, qk_H = query_shape\n _, S, _, v_H = value_shape\n output_layout = (3, 1, 2, 0)\n output_transpose_perm = mlir.dense_int_array((0, 2, 1, 3))\n\n max_seg_per_batch = get_max_seg_per_batch(ir.RankedTensorType(q_offsets.type))\n is_paged_attention = check_is_paged_attention(ir.RankedTensorType(page_table_k.type))\n\n output_shape = (B, N, T, v_H)\n softmax_stat_shape = (B * max_seg_per_batch, N, T)\n workspace_shape = (0,)\n workspace_type = ir.IntegerType.get_unsigned(8)\n\n has_bias, _ = variadic_args\n backend_config = create_dot_product_attention_backend_config(\n B, N, T, S, query_type.element_type, scale, seed, dropout_rate,\n mask_type, layout, sliding_window_length, max_seg_per_batch,\n is_paged_attention, is_bwd=False)\n # {Q, K, V, bias*, q_seqlen*, kv_seqlen*, q_offsets*, kv_offsets*}}\n # {output, activation*, workspace}\n has_dropout = dropout_rate > 0\n operands = [query, key, value]\n if has_bias:\n operands.append(bias)\n if has_padding(mask_type) or max_seg_per_batch > 1 or is_paged_attention:\n operands.append(q_seqlen)\n operands.append(kv_seqlen)\n if max_seg_per_batch > 1:\n operands.append(q_offsets)\n operands.append(kv_offsets)\n if is_paged_attention:\n operands.append(page_table_k)\n operands.append(page_table_v)\n\n custom_call_name = get_custom_call_name(has_bias, has_dropout, False)\n\n if is_training:\n result_types = [\n ir.RankedTensorType.get(output_shape, query_type.element_type),\n ir.RankedTensorType.get(softmax_stat_shape, ir.F32Type.get()),\n ir.RankedTensorType.get(workspace_shape, workspace_type),\n ]\n result_layouts = [output_layout] + default_layouts(softmax_stat_shape, workspace_shape)\n else:\n result_types = [\n ir.RankedTensorType.get(output_shape, query_type.element_type),\n ir.RankedTensorType.get(workspace_shape, workspace_type)\n ]\n result_layouts = [output_layout] + default_layouts(workspace_shape)\n # create custom call here\n out = mlir.custom_call(\n custom_call_name,\n result_types=result_types,\n operands=operands,\n backend_config=backend_config,\n operand_layouts=default_layouts(\n *[ir.RankedTensorType(operand.type).shape for operand in operands]),\n result_layouts=result_layouts,\n )\n # drop workspace memory\n # output should be (B, T, N, H) instead of (B, N, T, H)\n if is_training:\n return [hlo.transpose(out.results[0], output_transpose_perm), out.results[1]]\n else:\n return [hlo.transpose(out.results[0], output_transpose_perm)]\n\ndef _dot_product_attention_bwd_cuda_lowering(\n ctx, query, key, value, bias, q_seqlen, kv_seqlen, q_offsets, kv_offsets,\n page_table_k, page_table_v, activation, fwd_output, grad_output,\n scale, seed, dropout_rate, variadic_args, mask_type, layout, sliding_window_length):\n query_type = ir.RankedTensorType(query.type)\n query_shape = query_type.shape\n key_type = ir.RankedTensorType(key.type)\n value_type = ir.RankedTensorType(value.type)\n value_shape = value_type.shape\n\n if layout == AttentionLayout.BNTH.value:\n B, q_N, T, qk_H = query_shape\n _, v_N, S, v_H = value_shape\n grad_layout = (3, 2, 1, 0)\n grad_transpose_perm = mlir.dense_int_array((0, 1, 2, 3))\n else:\n B, T, q_N, qk_H = query_shape\n _, S, v_N, v_H = value_shape\n grad_layout = (3, 1, 2, 0)\n grad_transpose_perm = mlir.dense_int_array((0, 2, 1, 3))\n\n workspace_shape = (0,)\n workspace_type = ir.IntegerType.get_unsigned(8)\n\n grad_query_shape = (B, q_N, T, qk_H)\n grad_key_shape = (B, v_N, S, qk_H)\n grad_value_shape = (B, v_N, S, v_H)\n\n has_bias, has_dbias = variadic_args\n max_seg_per_batch = get_max_seg_per_batch(ir.RankedTensorType(q_offsets.type))\n backend_config = create_dot_product_attention_backend_config(\n B, q_N, T, S, query_type.element_type, scale, seed, dropout_rate,\n mask_type, layout, sliding_window_length, max_seg_per_batch,\n False, is_bwd=True)\n # {Q, K, V, activation, dO, bias*, O, q_seqlen*, kv_seqlen*,\n # q_offsets*, kv_offsets*}\n # {dQ, dK, dV, dbias*, workspace}\n has_dropout = dropout_rate > 0\n # create operands\n operands = [query, key, value, activation, grad_output]\n if has_bias:\n # flash attention requires bias in the bwd for remat\n operands.append(bias)\n operands.append(fwd_output)\n if has_padding(mask_type) or max_seg_per_batch > 1:\n operands.append(q_seqlen)\n operands.append(kv_seqlen)\n if max_seg_per_batch > 1:\n operands.append(q_offsets)\n operands.append(kv_offsets)\n # get custom call name\n custom_call_name = get_custom_call_name(has_bias, has_dropout, True)\n\n # create output types and layouts\n # grad_query, grad_key, grad_value\n result_types = [\n ir.RankedTensorType.get(grad_query_shape, query_type.element_type),\n ir.RankedTensorType.get(grad_key_shape, key_type.element_type),\n ir.RankedTensorType.get(grad_value_shape, value_type.element_type),\n ]\n result_layouts = [grad_layout, grad_layout, grad_layout]\n bias_type = ir.RankedTensorType(bias.type)\n bias_shape = bias_type.shape\n if has_dbias:\n # cuDNN supports bias for this case\n result_types.append(\n ir.RankedTensorType.get(bias_shape, bias_type.element_type))\n result_layouts = result_layouts + default_layouts(bias_shape)\n # workspace\n result_types.append(ir.RankedTensorType.get(workspace_shape, workspace_type))\n result_layouts = result_layouts + default_layouts(workspace_shape)\n out = mlir.custom_call(\n custom_call_name,\n result_types=result_types,\n operands=operands,\n backend_config=backend_config,\n operand_layouts=default_layouts(\n *[ir.RankedTensorType(operand.type).shape for operand in operands]),\n result_layouts=result_layouts,\n )\n dqkv = (hlo.transpose(out.results[0], grad_transpose_perm),\n hlo.transpose(out.results[1], grad_transpose_perm),\n hlo.transpose(out.results[2], grad_transpose_perm))\n # Only keep dQ, dK, dV and dBias here\n if has_dbias:\n return dqkv + (out.results[3],)\n else:\n return dqkv\n\n# batcher\ndef _check_valid_batch_dims(bdims):\n for dim in bdims:\n if dim not in [0, None]:\n raise NotImplementedError(\n f""Currently only support batch_dim in [0, None], but got {dim=}"")\n\ndef _dot_product_attention_fwd_batcher(\n batched_args, batch_dims, *, scale, seed, dropout_rate, variadic_args,\n mask_type, layout, sliding_window_length, is_training):\n _check_valid_batch_dims(batch_dims)\n query, key, value, bias, q_seqlen, kv_seqlen, \\n q_offsets, kv_offsets, page_table_k, page_table_v = batched_args\n query_bdim = batch_dims[0]\n if is_training:\n out_bdims = query_bdim, query_bdim\n else:\n out_bdims = (query_bdim,)\n\n if layout == AttentionLayout.BNTH.value:\n *Bs, N, T, _ = query.shape\n *_, _, S, _ = key.shape\n else:\n *Bs, T, N, _ = query.shape\n *_, S, _, _ = key.shape\n B = math.prod(Bs)\n has_bias, _ = variadic_args\n original_shape = query.shape\n # reshape to 4D shape\n query = jnp.reshape(query, (B,) + query.shape[-3:])\n key = jnp.reshape(key, (B,) + key.shape[-3:])\n value = jnp.reshape(value, (B,) + key.shape[-3:])\n if has_bias and batch_dims[3] is not None:\n bias = jnp.reshape(bias, (B, N, T, S))\n if has_padding(mask_type):\n q_seqlen = jnp.reshape(q_seqlen, (B, ))\n kv_seqlen = jnp.reshape(kv_seqlen, (B, ))\n\n outputs = _dot_product_attention_fwd_p_wrapper.bind(\n query, key, value, bias, q_seqlen, kv_seqlen, q_offsets, kv_offsets,\n page_table_k, page_table_v, scale=scale, seed=seed, dropout_rate=dropout_rate,\n variadic_args=variadic_args, mask_type=mask_type, layout=layout,\n sliding_window_length=sliding_window_length, is_training=is_training)\n\n # reshape to original shape\n output = outputs[0]\n output = jnp.reshape(output, original_shape)\n if is_training:\n activation = outputs[1]\n activation = jnp.reshape(activation, (*Bs, N, T))\n return (output, activation), out_bdims\n else:\n return (output,), out_bdims\n\ndef _dot_product_attention_bwd_batcher(\n batched_args, batch_dims, *, scale, seed, dropout_rate, variadic_args,\n mask_type, layout, sliding_window_length):\n _check_valid_batch_dims(batch_dims)\n query, key, value, bias, q_seqlen, kv_seqlen, q_offsets, kv_offsets, \\n page_table_k, page_table_v, activation, fwd_output, grad_output = batched_args\n query_bdim = batch_dims[0]\n out_bdims = query_bdim, query_bdim, query_bdim\n\n if layout == AttentionLayout.BNTH.value:\n *Bs, N, T, _ = query.shape\n *_, _, S, _ = key.shape\n else:\n *Bs, T, N, _ = query.shape\n *_, S, _, _ = key.shape\n B = math.prod(Bs)\n has_bias, has_dbias = variadic_args\n # Reset the has_dbias if the combined batch size is not 1, because cuDNN only\n # supports dbias with a single batch. In this case, an all-zero dbias will be\n # appended instead.\n if B > 1:\n variadic_args = (has_bias, False)\n original_query_shape = query.shape\n original_key_shape = key.shape\n original_value_shape = value.shape\n original_bias_shape = bias.shape if has_bias else None\n # reshape to 4D shape\n query = jnp.reshape(query, (B,) + query.shape[-3:])\n key = jnp.reshape(key, (B,) + key.shape[-3:])\n value = jnp.reshape(value, (B,) + key.shape[-3:])\n if has_bias and batch_dims[3] is not None:\n bias = jnp.reshape(bias, (B, N, T, S))\n if has_padding(mask_type):\n q_seqlen = jnp.reshape(q_seqlen, (B, ))\n kv_seqlen = jnp.reshape(kv_seqlen, (B, ))\n\n activation = jnp.reshape(activation, (B, N, T))\n fwd_output = jnp.reshape(fwd_output, (B,) + query.shape[-3:])\n grad_output = jnp.reshape(grad_output, (B,) + query.shape[-3:])\n\n grads = _dot_product_attention_bwd_p_wrapper.bind(\n query, key, value, bias, q_seqlen, kv_seqlen, q_offsets, kv_offsets,\n page_table_k, page_table_v, activation, fwd_output, grad_output,\n scale=scale, seed=seed, dropout_rate=dropout_rate, variadic_args=variadic_args,\n mask_type=mask_type, layout=layout,\n sliding_window_length=sliding_window_length,\n )\n\n # reshape to original shape\n grads[0] = jnp.reshape(grads[0], original_query_shape)\n grads[1] = jnp.reshape(grads[1], original_key_shape)\n grads[2] = jnp.reshape(grads[2], original_value_shape)\n if has_dbias:\n assert has_bias\n if variadic_args[1]:\n grads[3] = jnp.reshape(grads[3], original_bias_shape)\n else:\n grads.append(jnp.zeros(original_bias_shape, bias.dtype))\n out_bdims += (batch_dims[3],)\n return grads, out_bdims\n\n# custom partitioning\ndef _get_padded_spec(arg_info):\n spec = None if arg_info.sharding is None else arg_info.sharding.spec\n ndim = arg_info.ndim\n if spec is None:\n return (None,) * ndim\n assert len(spec) <= ndim\n return spec + (None,) * (ndim - len(spec))\n\ndef _check_qkv_bias_mask_spec(\n query_spec, key_spec, value_spec, bias_spec, layout):\n # check qkv spec\n if not query_spec == key_spec == value_spec:\n raise ValueError(""Query, key and value should have same sharding."")\n if layout == AttentionLayout.BNTH.value:\n *batch_spec, num_head_spec, q_seq_spec, head_spec = query_spec\n else:\n *batch_spec, q_seq_spec, num_head_spec, head_spec = query_spec\n if q_seq_spec is not None:\n raise ValueError(""Sharding on sequence dim is not allowed."")\n if head_spec is not None:\n raise ValueError(""Sharding on head dim is not allowed."")\n # check bias spec\n if bias_spec:\n *bias_batch_spec, bias_num_head_spec, bias_q_seq_spec, bias_kv_seq_spec = bias_spec\n if any(bias_batch_spec) and bias_batch_spec != batch_spec or \\n bias_num_head_spec is not None and bias_num_head_spec != num_head_spec:\n raise ValueError(\n ""Query and bias should have same sharding on batch and num_head dim."")\n if bias_q_seq_spec is not None or bias_kv_seq_spec is not None:\n raise ValueError(""Sharding on bias sequence dim is not allowed."")\n\n\n# fwd custom partition\ndef _infer_fwd_output_sharding(mesh, arg_shapes, variadic_args, is_training, layout):\n # only sharding on batch and num_head dim is allowed\n # (*batch, q_seq, num_head, head)\n query_spec = _get_padded_spec(arg_shapes[0])\n # (*batch, kv_seq, num_head, head)\n key_spec = _get_padded_spec(arg_shapes[1])\n value_spec = _get_padded_spec(arg_shapes[2])\n has_bias, _ = variadic_args\n bias_spec = _get_padded_spec(arg_shapes[3]) if has_bias else None\n\n _check_qkv_bias_mask_spec(\n query_spec, key_spec, value_spec, bias_spec, layout)\n # keep out sharding same as query sharding since they have same shape\n out_sharding = NamedSharding(mesh, PartitionSpec(*query_spec))\n if is_training:\n # activation sharding\n if layout == AttentionLayout.BNTH.value:\n *batch_spec, num_head_spec, q_seq_spec, _ = query_spec\n else:\n *batch_spec, q_seq_spec, num_head_spec, _ = query_spec\n activation_sharding = NamedSharding(\n mesh, PartitionSpec(*batch_spec, num_head_spec, q_seq_spec, None))\n return [out_sharding, activation_sharding]\n return [out_sharding]\n\ndef _fwd_shardy_rule(value_types, result_types, layout, is_training, is_fp8):\n num_args = len(value_types)\n # We only need the query and value sharding, so use placeholders for the remaining args.\n input_sharding = [ArrayMapping(f'{BATCHING}{n}') for n in range(num_args)]\n if layout == AttentionLayout.BNTH.value:\n input_sharding[0] = ArrayMapping('batch', 'nhead', 'qseq', 'head')\n else:\n input_sharding[0] = ArrayMapping('batch', 'qseq', 'nhead', 'head')\n input_sharding[2] += ('v',)\n\n # The major dimensions are sharded like the query, the minor like the value.\n output_sharding = (ArrayMapping(*input_sharding[0][:-1], 'v'),)\n if is_fp8:\n # `amax` is a scalar.\n amax = ArrayMapping(f'{BATCHING}{num_args}')\n output_sharding += (amax, amax)\n factor_sizes = {}\n if is_training:\n # Activation sharding.\n if result_types[-1].shape[0] == value_types[0].shape[0]:\n output_sharding += (ArrayMapping('batch', 'nhead', 'qseq'),)\n else:\n factor_sizes['n'] = result_types[-1].shape[0] // value_types[0].shape[0]\n output_sharding += (ArrayMapping(CompoundFactor('batch', 'n'), 'nhead', 'qseq'),)\n return SdyShardingRule(tuple(input_sharding), output_sharding, **factor_sizes)\n\n_dot_product_attention_fwd_lower = custom_partitioning(\n _dot_product_attention_fwd_impl, static_argnums=(10, 11, 12, 13, 14, 15, 16, 17))\n\ndef _dot_product_attention_fwd_infer_sharding_from_operands(\n scale, seed, dropout_rate, variadic_args, mask_type, layout, sliding_window_length,\n is_training, mesh, arg_shapes, result_shape):\n return _infer_fwd_output_sharding(mesh, arg_shapes, variadic_args, is_training, layout)\n\ndef _dot_product_attention_fwd_shardy_rule(\n scale, seed, dropout_rate, variadic_args, mask_type, layout, sliding_window_length,\n is_training, mesh, value_types, result_types):\n return _fwd_shardy_rule(value_types, result_types, layout, is_training, is_fp8=False)\n\ndef _dot_product_attention_fwd_partition(\n scale, seed, dropout_rate, variadic_args, mask_type, layout, sliding_window_length,\n is_training, mesh, arg_shapes, result_shape):\n # args sharding\n arg_shardings = tuple(arg_i.sharding for arg_i in arg_shapes)\n out_shardings = _infer_fwd_output_sharding(\n mesh, arg_shapes, variadic_args, is_training, layout)\n impl = functools.partial(\n _dot_product_attention_fwd_impl,\n scale=scale,\n seed=seed,\n dropout_rate=dropout_rate,\n variadic_args=variadic_args,\n mask_type=mask_type,\n layout=layout,\n sliding_window_length=sliding_window_length,\n is_training=is_training,\n )\n return mesh, impl, out_shardings, arg_shardings\n\n# bwd custom partition\ndef _infer_bwd_output_sharding(mesh, arg_shapes, layout, variadic_args):\n # (*batch, q_seq, num_head, head)\n query_spec = _get_padded_spec(arg_shapes[0])\n # (*batch, kv_seq, num_head, head)\n key_spec = _get_padded_spec(arg_shapes[1])\n value_spec = _get_padded_spec(arg_shapes[2])\n has_bias, has_dbias = variadic_args\n bias_spec = _get_padded_spec(arg_shapes[3]) if has_bias else None\n _check_qkv_bias_mask_spec(\n query_spec, key_spec, value_spec, bias_spec, layout)\n # keep grad query sharding same as query sharding\n grad_query_sharding = NamedSharding(mesh, PartitionSpec(*query_spec))\n grad_key_sharding = NamedSharding(mesh, PartitionSpec(*key_spec))\n grad_value_sharding = NamedSharding(mesh, PartitionSpec(*key_spec))\n out_shardings = [grad_query_sharding, grad_key_sharding, grad_value_sharding]\n if has_dbias:\n grad_bias_sharding = NamedSharding(mesh, PartitionSpec(*bias_spec))\n out_shardings = out_shardings + [grad_bias_sharding]\n return out_shardings\n\ndef _bwd_shardy_rule(num_args, has_dbias, is_fp8):\n input_sharding = tuple(ArrayMapping(f'{BATCHING}{n}') for n in range(num_args))\n\n if has_dbias:\n output_sharding = input_sharding[0:4]\n else:\n output_sharding = input_sharding[0:3]\n if is_fp8:\n amax = ArrayMapping(f'{BATCHING}{num_args}')\n output_sharding += (amax, amax, amax, amax)\n return SdyShardingRule(input_sharding, output_sharding)\n\n_dot_product_attention_bwd_lower = custom_partitioning(\n _dot_product_attention_bwd_impl, static_argnums=(13, 14, 15, 16, 17, 18, 19)\n)\n\ndef _dot_product_attention_bwd_infer_sharding_from_operands(\n scale, seed, dropout_rate, variadic_args, mask_type, layout,\n sliding_window_length, mesh, arg_shapes, result_shape):\n return _infer_bwd_output_sharding(mesh, arg_shapes, layout, variadic_args)\n\ndef _dot_product_attention_bwd_shardy_rule(\n scale, seed, dropout_rate, variadic_args,\n mask_type, layout, sliding_window_length, mesh, value_types, result_types):\n _, has_dbias = variadic_args\n return _bwd_shardy_rule(len(value_types), has_dbias, is_fp8=False)\n\ndef _dot_product_attention_bwd_partition(\n scale, seed, dropout_rate, variadic_args, mask_type, layout,\n sliding_window_length, mesh, arg_shapes, result_shape):\n out_shardings = _infer_bwd_output_sharding(mesh, arg_shapes, layout, variadic_args)\n # args sharding\n arg_shardings = tuple(arg_i.sharding for arg_i in arg_shapes)\n def sharded_impl(*args):\n impl = functools.partial(\n _dot_product_attention_bwd_impl,\n scale=scale,\n seed=seed,\n dropout_rate=dropout_rate,\n variadic_args=variadic_args,\n mask_type=mask_type,\n layout=layout,\n sliding_window_length=sliding_window_length,\n )\n grads = impl(*args)\n _, has_dbias = variadic_args\n if has_dbias:\n query_spec = arg_shardings[0].spec\n batch_spec = query_spec[0]\n local_dbias = grads[3]\n global_dbias = lax_parallel.psum(local_dbias, batch_spec)\n grads = grads[:3] + [global_dbias]\n return grads\n return mesh, sharded_impl, out_shardings, arg_shardings\n\n# Create dot_product_attention_fwd_p for forward operation.\n_dot_product_attention_fwd_p = core.Primitive(""dot_product_attention_fwd"")\n_dot_product_attention_fwd_p.multiple_results = True\n_dot_product_attention_fwd_p.def_impl(\n functools.partial(dispatch.apply_primitive, _dot_product_attention_fwd_p)\n)\n_dot_product_attention_fwd_p.def_abstract_eval(\n _dot_product_attention_fwd_abstract\n)\n\nmlir.register_lowering(\n _dot_product_attention_fwd_p,\n _dot_product_attention_fwd_cuda_lowering,\n platform=""cuda"",\n)\n\n_dot_product_attention_fwd_p_wrapper = core.Primitive(\n ""dot_product_attention_fwd_wrapper""\n)\n_dot_product_attention_fwd_p_wrapper.multiple_results = True\n_dot_product_attention_fwd_p_wrapper.def_impl(_dot_product_attention_fwd_impl)\n_dot_product_attention_fwd_p_wrapper.def_abstract_eval(\n _dot_product_attention_fwd_abstract\n)\n\n# Create dot_product_attention_bwd_p for backward operation.\n_dot_product_attention_bwd_p = core.Primitive(""dot_product_attention_bwd"")\n_dot_product_attention_bwd_p.multiple_results = True\n_dot_product_attention_bwd_p.def_impl(\n functools.partial(dispatch.apply_primitive, _dot_product_attention_bwd_p)\n)\n_dot_product_attention_bwd_p.def_abstract_eval(\n _dot_product_attention_bwd_abstract\n)\n\nmlir.register_lowering(\n _dot_product_attention_bwd_p,\n _dot_product_attention_bwd_cuda_lowering,\n platform=""cuda"",\n)\n\n_dot_product_attention_bwd_p_wrapper = core.Primitive(\n ""dot_product_attention_bwd_wrapper""\n)\n_dot_product_attention_bwd_p_wrapper.multiple_results = True\n_dot_product_attention_bwd_p_wrapper.def_impl(_dot_product_attention_bwd_impl)\n_dot_product_attention_bwd_p_wrapper.def_abstract_eval(\n _dot_product_attention_bwd_abstract\n)\n\nbatching.primitive_batchers[\n _dot_product_attention_fwd_p_wrapper\n] = _dot_product_attention_fwd_batcher\nbatching.primitive_batchers[\n _dot_product_attention_bwd_p_wrapper\n] = _dot_product_attention_bwd_batcher\n\n_dot_product_attention_fwd_lower.def_partition(\n infer_sharding_from_operands=_dot_product_attention_fwd_infer_sharding_from_operands,\n partition=_dot_product_attention_fwd_partition,\n sharding_rule=_dot_product_attention_fwd_shardy_rule)\n\nmlir.register_lowering(_dot_product_attention_fwd_p_wrapper,\n mlir.lower_fun(_dot_product_attention_fwd_lower, multiple_results=True))\n\n_dot_product_attention_bwd_lower.def_partition(\n infer_sharding_from_operands=_dot_product_attention_bwd_infer_sharding_from_operands,\n partition=_dot_product_attention_bwd_partition,\n sharding_rule=_dot_product_attention_bwd_shardy_rule)\n\nmlir.register_lowering(_dot_product_attention_bwd_p_wrapper,\n mlir.lower_fun(_dot_product_attention_bwd_lower, multiple_results=True))\n\ndispatch.prim_requires_devices_during_lowering.add(\n _dot_product_attention_fwd_p\n)\ndispatch.prim_requires_devices_during_lowering.add(\n _dot_product_attention_fwd_p_wrapper\n)\ndispatch.prim_requires_devices_during_lowering.add(\n _dot_product_attention_bwd_p\n)\ndispatch.prim_requires_devices_during_lowering.add(\n _dot_product_attention_bwd_p_wrapper\n)\n\n@functools.partial(custom_derivatives.custom_vjp, nondiff_argnums=(10, 11, 12, 13, 14, 15, 16, 17, 18))\ndef _dot_product_attention(query: Array,\n key: Array,\n value: Array,\n bias: Array,\n q_seqlen: Array,\n kv_seqlen: Array,\n q_offsets: Array,\n kv_offsets: Array,\n page_table_k: Array,\n page_table_v: Array,\n scale: float,\n seed: int,\n dropout_rate: float,\n variadic_args: tuple[bool, ...],\n mask_type: bool,\n layout: int,\n sliding_window_length: int | None,\n cudnn_version: int,\n return_residual: bool):\n output = _dot_product_attention_fwd(\n query, key, value, bias, q_seqlen, kv_seqlen, q_offsets, kv_offsets,\n page_table_k, page_table_v, scale=scale, seed=seed, dropout_rate=dropout_rate,\n variadic_args=variadic_args, mask_type=mask_type, layout=layout,\n sliding_window_length=sliding_window_length,\n cudnn_version=cudnn_version, return_residual=return_residual)\n return output\n\n_dot_product_attention.defvjp(\n _dot_product_attention_fwd_rule, _dot_product_attention_bwd_rule\n)\n\nfp8_params_keys = [\n 'amax_dQ', 'amax_dK', 'amax_dV', 'amax_dP', # place holder for bwd output\n 'descale_q', 'descale_k', 'descale_v', 'descale_s',\n 'scale_s', 'scale_o', 'descale_o', 'descale_dO',\n 'descale_dP', 'scale_dQ', 'scale_dK', 'scale_dV',\n 'scale_dP'\n]\n\nfp8_params_keys_fwd = [\n 'descale_q', 'descale_k', 'descale_v', 'descale_s', 'scale_s', 'scale_o'\n]\nfp8_params_keys_bwd = [\n 'descale_q', 'descale_k', 'descale_v', 'descale_o', 'descale_dO', 'descale_s',\n 'descale_dP', 'scale_s', 'scale_dQ', 'scale_dK', 'scale_dV', 'scale_dP',\n]\nparams_from_keys = lambda params, keys: [params[key] for key in keys]\n\ndef check_fp8_params(params):\n # Check if all required keys are present\n missing_keys = set(fp8_params_keys) - set(params)\n if missing_keys:\n raise ValueError(f""The following keys are missing from fp8_params: {', '.join(missing_keys)}"")\n\ncheck_is_flash_attention_fp8 = functools.partial(\n check_is_flash_attention,\n has_bias=False,\n is_fp8=True\n)\n\ndef _dot_product_attention_fp8_fwd(\n query, key, value,\n fp8_params_fwd,\n scale, use_causal_mask, layout, cudnn_version):\n check_is_flash_attention_fp8(\n query, key, value, layout, cudnn_version, is_training=False)\n descale_q, descale_k, descale_v, descale_s, scale_s, scale_o = fp8_params_fwd\n outputs = _dot_product_attention_fp8_fwd_p_wrapper.bind(\n query, key, value,\n descale_q, descale_k, descale_v, descale_s,\n scale_s, scale_o,\n scale=scale, use_causal_mask=use_causal_mask, layout=layout, is_training=False)\n return outputs\n\ndef _dot_product_attention_fp8_fwd_rule(\n query, key, value,\n fp8_params,\n scale, use_causal_mask, layout, cudnn_version):\n check_is_flash_attention_fp8(\n query, key, value, layout, cudnn_version, is_training=True)\n\n outputs = _dot_product_attention_fp8_fwd_p_wrapper.bind(\n query, key, value, *params_from_keys(fp8_params, fp8_params_keys_fwd),\n scale=scale, use_causal_mask=use_causal_mask, layout=layout, is_training=True)\n res = (query, key, value, outputs[3], outputs[0], params_from_keys(fp8_params, fp8_params_keys_bwd))\n return (outputs[0], outputs[1], outputs[2]), res\n\ndef _dot_product_attention_fp8_bwd_rule(\n scale, use_causal_mask, layout, cudnn_version, res, g):\n (query, key, value, activation, fwd_output, aux_params) = res\n grad_output = g[0]\n grads = _dot_product_attention_fp8_bwd_p_wrapper.bind(\n query,\n key,\n value,\n fwd_output,\n grad_output,\n activation,\n *aux_params,\n scale=scale,\n use_causal_mask=use_causal_mask,\n layout=layout,\n )\n\n fp8_params_grads = dict.fromkeys(fp8_params_keys)\n keys_to_grad_indices = ['amax_dQ', 'amax_dK', 'amax_dV', 'amax_dP']\n # grads structure: (dQ, dK, dV, amax_dq, amax_dk, amax_dv, amax_dp)\n for i, key in enumerate(keys_to_grad_indices, start=3):\n fp8_params_grads[key] = grads[i]\n\n return (grads[0], grads[1], grads[2], fp8_params_grads)\n\ndef _dot_product_attention_fp8_fwd_impl(\n query, key, value,\n descale_q, descale_k, descale_v, descale_s, scale_s, scale_o,\n scale, use_causal_mask, layout, is_training):\n outputs = _dot_product_attention_fp8_fwd_p.bind(\n query,\n key,\n value,\n descale_q,\n descale_k,\n descale_v,\n descale_s,\n scale_s,\n scale_o,\n scale=scale,\n use_causal_mask=use_causal_mask,\n layout=layout,\n is_training=is_training,\n )\n return outputs\n\ndef _dot_product_attention_fp8_bwd_impl(\n query, key, value, fwd_output, grad_output, activation,\n descale_q, descale_k, descale_v, descale_o, descale_dO, descale_s,\n descale_dP, scale_s, scale_dQ, scale_dK, scale_dV, scale_dP,\n scale, use_causal_mask, layout):\n grads = _dot_product_attention_fp8_bwd_p.bind(\n query, key, value, fwd_output, grad_output, activation,\n descale_q, descale_k, descale_v, descale_o, descale_dO, descale_s,\n descale_dP, scale_s, scale_dQ, scale_dK, scale_dV, scale_dP,\n scale=scale, use_causal_mask=use_causal_mask, layout=layout)\n return grads\n\n\ndef _dot_product_attention_fp8_fwd_abstract(\n query, key, value,\n descale_q, descale_k, descale_v, descale_s, scale_s, scale_o,\n scale, use_causal_mask, layout, is_training):\n if layout == AttentionLayout.BNTH.value:\n B, N, T, _ = query.shape\n _, _, S, _ = key.shape\n else:\n B, T, N, _ = query.shape\n _, S, _, _ = key.shape\n output_shape = query.shape\n softmax_stat_shape = (B, N, T)\n\n # output, amax_s, amax_o[, softmax_stat]\n if is_training:\n return (\n core.ShapedArray(output_shape, query.dtype),\n core.ShapedArray((1,1,1,1), np.float32),\n core.ShapedArray((1,1,1,1), np.float32),\n core.ShapedArray(softmax_stat_shape, np.float32),\n )\n else:\n return (\n core.ShapedArray(output_shape, query.dtype),\n core.ShapedArray((1,1,1,1), np.float32),\n core.ShapedArray((1,1,1,1), np.float32),\n )\n\ndef _dot_product_attention_fp8_bwd_abstract(\n query, key, value, fwd_output, grad_output, activation,\n descale_q, descale_k, descale_v, descale_o, descale_dO, descale_s,\n descale_dP, scale_s, scale_dQ, scale_dK, scale_dV, scale_dP,\n scale, use_causal_mask, layout):\n amax_shape = (1,1,1,1)\n return (\n core.ShapedArray(query.shape, query.dtype),\n core.ShapedArray(key.shape, key.dtype),\n core.ShapedArray(value.shape, value.dtype),\n core.ShapedArray(amax_shape, np.float32),\n core.ShapedArray(amax_shape, np.float32),\n core.ShapedArray(amax_shape, np.float32),\n core.ShapedArray(amax_shape, np.float32),\n )\n\ndef _dot_product_attention_fp8_fwd_cuda_lowering(\n ctx, query, key, value,\n descale_q, descale_k, descale_v, descale_s, scale_s, scale_o,\n scale, use_causal_mask, layout, is_training):\n query_type = ir.RankedTensorType(query.type)\n query_shape = query_type.shape\n key_type = ir.RankedTensorType(key.type)\n key_shape = key_type.shape\n\n if layout == AttentionLayout.BNTH.value:\n B, N, T, H = query_shape\n _, _, S, _ = key_shape\n output_layout = (3, 2, 1, 0)\n output_transpose_perm = mlir.dense_int_array((0, 1, 2, 3))\n else:\n B, T, N, H = query_shape\n _, S, _, _ = key_shape\n output_layout = (3, 1, 2, 0)\n output_transpose_perm = mlir.dense_int_array((0, 2, 1, 3))\n\n output_shape = (B, N, T, H)\n softmax_stat_shape = (B, N, T)\n workspace_shape = (0,)\n amax_shape = (1,1,1,1)\n workspace_type = ir.IntegerType.get_unsigned(8)\n mask_type = MaskType.CAUSAL if use_causal_mask else MaskType.NO_MASK\n backend_config = create_dot_product_attention_fp8_backend_config(\n B, N, T, S, ir.BF16Type.get(), # query_type.element_type,\n scale, mask_type, layout, is_bwd=False,\n )\n\n operands = [query, key, value, descale_q, descale_k, descale_v, descale_s, scale_s, scale_o]\n custom_call_name = get_fp8_custom_call_name(is_bwd=False)\n\n if is_training:\n result_types = [\n ir.RankedTensorType.get(output_shape, query_type.element_type),\n ir.RankedTensorType.get((1,1,1,1), ir.F32Type.get()),\n ir.RankedTensorType.get((1,1,1,1), ir.F32Type.get()),\n ir.RankedTensorType.get(softmax_stat_shape, ir.F32Type.get()),\n ir.RankedTensorType.get(workspace_shape, workspace_type),\n ]\n result_layouts = [output_layout] + default_layouts(amax_shape, amax_shape, softmax_stat_shape, workspace_shape)\n else:\n result_types = [\n ir.RankedTensorType.get(output_shape, query_type.element_type),\n ir.RankedTensorType.get((1,1,1,1), ir.F32Type.get()),\n ir.RankedTensorType.get((1,1,1,1), ir.F32Type.get()),\n ir.RankedTensorType.get(workspace_shape, workspace_type)\n ]\n result_layouts = [output_layout] + default_layouts(amax_shape, amax_shape, workspace_shape)\n\n operand_shapes = [ir.RankedTensorType(operand.type).shape for operand in operands[:3]]\n operand_shapes += [[1, 1, 1, 1]] * 6\n operand_layouts = default_layouts(*operand_shapes)\n out = mlir.custom_call(\n custom_call_name,\n result_types=result_types,\n operands=operands,\n backend_config=backend_config,\n operand_layouts=operand_layouts,\n result_layouts=result_layouts,\n )\n\n if is_training:\n return [hlo.transpose(out.results[0], output_transpose_perm), out.results[1], out.results[2], out.results[3]]\n else:\n return [hlo.transpose(out.results[0], output_transpose_perm), out.results[1], out.results[2]]\n\n\n\ndef _dot_product_attention_fp8_bwd_cuda_lowering(\n ctx, query, key, value, fwd_output, grad_output, activation,\n descale_q, descale_k, descale_v, descale_o, descale_dO, descale_s,\n descale_dP, scale_s, scale_dQ, scale_dK, scale_dV, scale_dP, scale,\n use_causal_mask, layout):\n query_type = ir.RankedTensorType(query.type)\n query_shape = query_type.shape\n key_type = ir.RankedTensorType(key.type)\n key_shape = key_type.shape\n value_type = ir.RankedTensorType(value.type)\n\n if layout == AttentionLayout.BNTH.value:\n B, q_N, T, H = query_shape\n _, k_N, S, _ = key_shape\n grad_layout = (3, 2, 1, 0)\n grad_transpose_perm = mlir.dense_int_array((0, 1, 2, 3))\n else:\n B, T, q_N, H = query_shape\n _, S, k_N, _ = key_shape\n grad_layout = (3, 1, 2, 0)\n grad_transpose_perm = mlir.dense_int_array((0, 2, 1, 3))\n\n workspace_shape = (0,)\n workspace_type = ir.IntegerType.get_unsigned(8)\n amax_shape = (1,1,1,1)\n\n grad_query_shape = (B, q_N, T, H)\n grad_key_shape = (B, k_N, S, H)\n grad_value_shape = (B, k_N, S, H)\n mask_type = MaskType.CAUSAL if use_causal_mask else MaskType.NO_MASK\n\n backend_config = create_dot_product_attention_fp8_backend_config(\n B, q_N, T, S, ir.BF16Type.get(),\n scale, mask_type, layout, is_bwd=True,\n )\n\n operands = [\n query,\n key,\n value,\n fwd_output,\n grad_output,\n activation,\n descale_q,\n descale_k,\n descale_v,\n descale_o,\n descale_dO,\n descale_s,\n descale_dP,\n scale_s,\n scale_dQ,\n scale_dK,\n scale_dV,\n scale_dP,\n ]\n\n custom_call_name = get_fp8_custom_call_name(is_bwd=True)\n\n result_types = [\n ir.RankedTensorType.get(grad_query_shape, query_type.element_type),\n ir.RankedTensorType.get(grad_key_shape, key_type.element_type),\n ir.RankedTensorType.get(grad_value_shape, value_type.element_type),\n ir.RankedTensorType.get(amax_shape, ir.F32Type.get()),\n ir.RankedTensorType.get(amax_shape, ir.F32Type.get()),\n ir.RankedTensorType.get(amax_shape, ir.F32Type.get()),\n ir.RankedTensorType.get(amax_shape, ir.F32Type.get()),\n ]\n result_layouts = [grad_layout, grad_layout, grad_layout] + default_layouts(amax_shape, amax_shape, amax_shape, amax_shape)\n\n result_types.append(ir.RankedTensorType.get(workspace_shape, workspace_type))\n result_layouts = result_layouts + default_layouts(workspace_shape)\n out = mlir.custom_call(\n custom_call_name,\n result_types=result_types,\n operands=operands,\n backend_config=backend_config,\n operand_layouts=default_layouts(\n *[ir.RankedTensorType(operand.type).shape for operand in operands]),\n result_layouts=result_layouts,\n )\n dqkv_amaxs = (hlo.transpose(out.results[0], grad_transpose_perm),\n hlo.transpose(out.results[1], grad_transpose_perm),\n hlo.transpose(out.results[2], grad_transpose_perm),\n out.results[3], out.results[4], out.results[5], out.results[6])\n # Only keep dQ, dK, dV, amax_dQ, amax_dK, amax_dV, amax_dP here\n return dqkv_amaxs\n\ndef _dot_product_attention_fp8_fwd_batcher(\n batched_args, batch_dims, *, scale, use_causal_mask, layout, is_training):\n _check_valid_batch_dims(batch_dims)\n query, key, value,\\n descale_q, descale_k, descale_v, descale_s, scale_s, scale_o, = batched_args\n query_bdim = batch_dims[0]\n if is_training:\n out_bdims = query_bdim, query_bdim\n else:\n out_bdims = (query_bdim,)\n\n if layout == AttentionLayout.BNTH.value:\n *Bs, N, T, _ = query.shape\n *_, _, S, _ = key.shape\n else:\n *Bs, T, N, _ = query.shape\n *_, S, _, _ = key.shape\n B = math.prod(Bs)\n\n # reshape to 4D shape\n query = jnp.reshape(query, (B,) + query.shape[-3:])\n key = jnp.reshape(key, (B,) + key.shape[-3:])\n value = jnp.reshape(value, (B,) + key.shape[-3:])\n\n outputs = _dot_product_attention_fp8_fwd_p_wrapper.bind(\n query, key, value, descale_q, descale_k, descale_v, descale_s, scale_s, scale_o,\n scale=scale, use_causal_mask=use_causal_mask, layout=layout, is_training=is_training)\n\n # reshape to original shape\n output, amax_s, amax_o = outputs[0], outputs[1], outputs[2]\n output = jnp.reshape(output, query.shape)\n if is_training:\n activation = outputs[3]\n activation = jnp.reshape(activation, (*Bs, N, T))\n return (output, amax_s, amax_o, activation), out_bdims\n else:\n return (output, amax_s, amax_o), out_bdims\n\ndef _dot_product_attention_fp8_bwd_batcher(\n batched_args, batch_dims, *, scale, use_causal_mask, layout):\n _check_valid_batch_dims(batch_dims)\n query, key, value, fwd_output, grad_output, activation,\\n descale_q, descale_k, descale_v, descale_o, descale_dO, descale_s, descale_dP,\\n scale_s, scale_dQ, scale_dK, scale_dV, scale_dP = batched_args\n query_bdim = batch_dims[0]\n out_bdims = query_bdim, query_bdim, query_bdim\n\n if layout == AttentionLayout.BNTH.value:\n *Bs, N, T, _ = query.shape\n *_, _, S, _ = key.shape\n else:\n *Bs, T, N, _ = query.shape\n *_, S, _, _ = key.shape\n B = math.prod(Bs)\n\n # reshape to 4D shape\n query = jnp.reshape(query, (B,) + query.shape[-3:])\n key = jnp.reshape(key, (B,) + key.shape[-3:])\n value = jnp.reshape(value, (B,) + key.shape[-3:])\n\n activation = jnp.reshape(activation, (B, N, T))\n fwd_output = jnp.reshape(fwd_output, (B,) + query.shape[-3:])\n grad_output = jnp.reshape(grad_output, (B,) + query.shape[-3:])\n\n grads = _dot_product_attention_fp8_bwd_p_wrapper.bind(\n query, key, value, fwd_output, grad_output, activation,\n descale_q, descale_k, descale_v, descale_o, descale_dO, descale_s, descale_dP, scale_s, scale_dQ, scale_dK, scale_dV, scale_dP,\n scale=scale, use_causal_mask=use_causal_mask, layout=layout,\n )\n\n grad_query, grad_key, grad_value = grads[:3]\n # reshape to original shape\n grad_query = jnp.reshape(grad_query, query.shape)\n grad_key = jnp.reshape(grad_key, key.shape)\n grad_value = jnp.reshape(grad_value, value.shape)\n\n return grads, out_bdims\n\ndef _infer_fp8_fwd_output_sharding(mesh, arg_shapes, is_training, layout):\n # Prepare variadic_args for the original function\n has_bias = False # Adjust as needed\n variadic_args = (has_bias, None) # Dummy value, adjust as necessary\n\n # Call the original function with the required parameters\n output_sharding = _infer_fwd_output_sharding(mesh, arg_shapes, variadic_args, is_training, layout)\n amax_sharding = NamedSharding(mesh, PartitionSpec())\n if is_training:\n out_sharding, activation_sharding = output_sharding[0], output_sharding[1]\n return [out_sharding, amax_sharding, amax_sharding, activation_sharding]\n return output_sharding + [amax_sharding, amax_sharding]\n\n_dot_product_attention_fp8_fwd_lower = custom_partitioning(\n _dot_product_attention_fp8_fwd_impl, static_argnums=(9, 10, 11, 12))\n\ndef _dot_product_attention_fp8_fwd_infer_sharding_from_operands(\n scale, use_causal_mask, layout, is_training,\n mesh, arg_shapes, result_shape):\n return _infer_fp8_fwd_output_sharding(mesh, arg_shapes, is_training, layout)\n\ndef _dot_product_attention_fp8_fwd_partition(\n scale, use_causal_mask, layout, is_training,\n mesh, arg_shapes, result_shape):\n # args sharding\n arg_shardings = tuple(arg_i.sharding for arg_i in arg_shapes)\n out_shardings = _infer_fp8_fwd_output_sharding(\n mesh, arg_shapes, is_training, layout)\n impl = functools.partial(\n _dot_product_attention_fp8_fwd_impl, scale=scale, use_causal_mask=use_causal_mask,\n layout=layout, is_training=is_training)\n return mesh, impl, out_shardings, arg_shardings\n\ndef _dot_product_attention_fp8_fwd_shardy_rule(\n scale, use_causal_mask, layout, is_training,\n mesh, value_types, result_types):\n return _fwd_shardy_rule(value_types, result_types, layout, is_training, is_fp8=True)\n\ndef _infer_fp8_bwd_output_sharding(mesh, arg_shapes, layout):\n # Prepare variadic_args for the original function\n has_bias = False # Adjust as needed\n has_dbias = False # Adjust as needed\n variadic_args = (has_bias, has_dbias) # Dummy value, adjust as necessary\n\n # Call the original function with the required parameters\n output_shardings = _infer_bwd_output_sharding(mesh, arg_shapes, layout, variadic_args)\n\n # Prepare amax_sharding\n amax_sharding = NamedSharding(mesh, PartitionSpec()) # Use a default spec or adjust as needed\n\n # Append amax_sharding for each output sharding\n out_shardings_with_amax = output_shardings + [amax_sharding] * 4\n\n return out_shardings_with_amax\n\n_dot_product_attention_fp8_bwd_lower = custom_partitioning(\n _dot_product_attention_fp8_bwd_impl, static_argnums=(18,19,20)\n)\n\ndef _dot_product_attention_fp8_bwd_infer_sharding_from_operands(\n scale, use_causal_mask, layout, mesh,\n arg_shapes, result_shape):\n return _infer_fp8_bwd_output_sharding(mesh, arg_shapes, layout)\n\ndef _dot_product_attention_fp8_bwd_shardy_rule(\n scale, use_causal_mask, layout, mesh, value_types, result_types):\n return _bwd_shardy_rule(len(value_types), has_dbias=False, is_fp8=True)\n\ndef _dot_product_attention_fp8_bwd_partition(\n scale, use_causal_mask, layout, mesh,\n arg_shapes, result_shape):\n out_shardings = _infer_fp8_bwd_output_sharding(mesh, arg_shapes, layout)\n # args sharding\n arg_shardings = tuple(arg_i.sharding for arg_i in arg_shapes)\n impl = functools.partial(\n _dot_product_attention_fp8_bwd_impl, scale=scale,\n use_causal_mask=use_causal_mask, layout=layout\n )\n return mesh, impl, out_shardings, arg_shardings\n\n# Create dot_product_attention_fp8_fwd_p for forward operation.\n_dot_product_attention_fp8_fwd_p = core.Primitive(""dot_product_attention_fp8_fwd"")\n_dot_product_attention_fp8_fwd_p.multiple_results = True\n_dot_product_attention_fp8_fwd_p.def_impl(\n functools.partial(dispatch.apply_primitive, _dot_product_attention_fp8_fwd_p)\n)\n_dot_product_attention_fp8_fwd_p.def_abstract_eval(\n _dot_product_attention_fp8_fwd_abstract\n)\n\nmlir.register_lowering(\n _dot_product_attention_fp8_fwd_p,\n _dot_product_attention_fp8_fwd_cuda_lowering,\n platform=""cuda"",\n)\n\n_dot_product_attention_fp8_fwd_p_wrapper = core.Primitive(\n ""dot_product_attention_fp8_fwd_wrapper""\n)\n_dot_product_attention_fp8_fwd_p_wrapper.multiple_results = True\n_dot_product_attention_fp8_fwd_p_wrapper.def_impl(_dot_product_attention_fp8_fwd_impl)\n_dot_product_attention_fp8_fwd_p_wrapper.def_abstract_eval(\n _dot_product_attention_fp8_fwd_abstract\n)\n\n# Create dot_product_attention_bwd_p for backward operation.\n_dot_product_attention_fp8_bwd_p = core.Primitive(""dot_product_attention_fp8_bwd"")\n_dot_product_attention_fp8_bwd_p.multiple_results = True\n_dot_product_attention_fp8_bwd_p.def_impl(\n functools.partial(dispatch.apply_primitive, _dot_product_attention_fp8_bwd_p)\n)\n_dot_product_attention_fp8_bwd_p.def_abstract_eval(\n _dot_product_attention_fp8_bwd_abstract\n)\n\nmlir.register_lowering(\n _dot_product_attention_fp8_bwd_p,\n _dot_product_attention_fp8_bwd_cuda_lowering,\n platform=""cuda"",\n)\n\n_dot_product_attention_fp8_bwd_p_wrapper = core.Primitive(\n ""dot_product_attention_fp8_bwd_wrapper""\n)\n_dot_product_attention_fp8_bwd_p_wrapper.multiple_results = True\n_dot_product_attention_fp8_bwd_p_wrapper.def_impl(_dot_product_attention_fp8_bwd_impl)\n_dot_product_attention_fp8_bwd_p_wrapper.def_abstract_eval(\n _dot_product_attention_fp8_bwd_abstract\n)\n\nbatching.primitive_batchers[\n _dot_product_attention_fp8_fwd_p_wrapper\n] = _dot_product_attention_fp8_fwd_batcher\nbatching.primitive_batchers[\n _dot_product_attention_fp8_bwd_p_wrapper\n] = _dot_product_attention_fp8_bwd_batcher\n\n_dot_product_attention_fp8_fwd_lower.def_partition(\n infer_sharding_from_operands=_dot_product_attention_fp8_fwd_infer_sharding_from_operands,\n partition=_dot_product_attention_fp8_fwd_partition,\n sharding_rule=_dot_product_attention_fp8_fwd_shardy_rule)\n\nmlir.register_lowering(_dot_product_attention_fp8_fwd_p_wrapper,\n mlir.lower_fun(_dot_product_attention_fp8_fwd_lower, multiple_results=True))\n\n_dot_product_attention_fp8_bwd_lower.def_partition(\n infer_sharding_from_operands=_dot_product_attention_fp8_bwd_infer_sharding_from_operands,\n partition=_dot_product_attention_fp8_bwd_partition,\n sharding_rule=_dot_product_attention_fp8_bwd_shardy_rule)\n\nmlir.register_lowering(_dot_product_attention_fp8_bwd_p_wrapper,\n mlir.lower_fun(_dot_product_attention_fp8_bwd_lower, multiple_results=True))\n\ndispatch.prim_requires_devices_during_lowering.add(\n _dot_product_attention_fp8_fwd_p\n)\ndispatch.prim_requires_devices_during_lowering.add(\n _dot_product_attention_fp8_fwd_p_wrapper\n)\ndispatch.prim_requires_devices_during_lowering.add(\n _dot_product_attention_fp8_bwd_p\n)\ndispatch.prim_requires_devices_during_lowering.add(\n _dot_product_attention_fp8_bwd_p_wrapper\n)\n\n@functools.partial(custom_derivatives.custom_vjp, nondiff_argnums=(4, 5, 6, 7))\ndef _dot_product_attention_fp8(query: Array,\n key: Array,\n value: Array,\n fp8_params: dict[str, Array],\n scale: float,\n use_causal_mask: bool,\n layout: int,\n cudnn_version: int):\n output, amax_s, amax_o = _dot_product_attention_fp8_fwd(\n query, key, value, params_from_keys(fp8_params, fp8_params_keys_fwd),\n scale, use_causal_mask, layout, cudnn_version\n )\n return output, amax_s, amax_o\n\n_dot_product_attention_fp8.defvjp(_dot_product_attention_fp8_fwd_rule, _dot_product_attention_fp8_bwd_rule)\n\ndef combine_bias_and_mask(bias, mask, dtype):\n if bias is not None:\n # reshape bias to have 4D shape\n bias = bias.reshape((1,) * (4 - len(bias.shape)) + bias.shape)\n\n if mask is not None:\n if mask.dtype == np.dtype('bool'):\n large_negative_number = get_large_negative_number(dtype)\n mask = jnp.where(mask, jnp.asarray(0, dtype), large_negative_number)\n # reshape mask to have 4D shape\n mask = mask.reshape((1,) * (4 - len(mask.shape)) + mask.shape) # type: ignore[union-attr]\n\n # combine bias and mask\n if bias is None:\n bias = mask\n else:\n if mask is not None:\n # should be broadcast to same shape\n bias = bias + mask\n return bias\n\n# User interface\ndef paged_attention(\n query: Array,\n key: Array,\n value: Array,\n q_seqlen: Array,\n kv_seqlen: Array,\n page_table_k: Array,\n page_table_v: Array,\n bias: Array | None = None,\n mask: Array | None = None,\n fp8_params: FP8Params | None = None,\n *,\n scale: float = 1.0,\n mask_type: MaskType = MaskType.NO_MASK,\n seed: int = 42,\n dropout_rate: float = 0.,\n qkv_layout: str = ""BTNH"",\n sliding_window_length: int | None = None,\n use_fp8: bool = False,\n return_residual: bool = False\n):\n """"""Computes paged attention described in https://arxiv.org/pdf/2309.06180.\n\n B = batch size\n S = length of the key/value (source)\n T = length of the query (target)\n N = number of attention heads\n H = dimensions of each attention head.\n\n Args:\n query: Queries for attention calculation with a shape of BTNH or BNTH.\n key: Keys for attention calculation with a shape of\n [num_blocks, block_size, N, H] or [num_blocks, N, block_size, H] where\n num_blocks = B * Ceil(S / block_size).\n value: Values to be used in attention with a shape of\n [num_blocks, block_size, N, H] or [num_blocks, N, block_size, H] where\n num_blocks = B * Ceil(S / block_size).\n q_seqlen: Non padded sequence length of query with a shape of B.\n kv_seqlen: Non padded sequence length of key and value with a shape of B.\n page_table_k: page table for key of shape [B, 1, num_blocks_per_batch, 1]\n where num_blocks_per_batch = Ceil(S / block_size).\n page_table_v: page table for value of shape [B, 1, num_blocks_per_batch, 1]\n where num_blocks_per_batch = Ceil(S / block_size).\n bias: Bias to be added to logits with a shape of BNTS.\n mask: Mask used to filter out logits with a shape of BNTS.\n scale: Scale for the query.\n qkv_layout: Layout string, with supported formats being BTNH, BNTH, BSNH,\n BNSH.\n sliding_window_length: Window size to make attention only attend to each\n token's left local window (pos - sliding_window_length, pos] where `pos`\n is the index of each token. E.g., if sliding_window_length == 3 and the\n sequence is [0, 1, 2, 3, c, 4, 5], token `c` can attend to [4, 5, c].\n use_fp8: Whether to use FP8 attention mechanism.\n return_residual: Whether to return the logsumexp tensor of shape BTN\n or BNT to users. See section 3.1.1 in the FlashAttention-2 paper:\n https://arxiv.org/pdf/2307.08691 to find the definition of logsumexp.\n Returns:\n output: the same shape as the query.\n residual: the logsumexp tensor if return_residual=True. (non fp8)\n """"""\n cudnn_version = check_cudnn_version()\n layout = _normalize_layout(qkv_layout)\n if use_fp8:\n raise ValueError(""Paged attention doesn't support fp8 for now."")\n if has_padding(mask_type) and (q_seqlen is None or kv_seqlen is None):\n raise ValueError(""Require q_seqlen and kv_seqlen to generate padding mask."")\n if sliding_window_length is not None and sliding_window_length <= 0:\n raise ValueError(\n f""Require sliding_window_length > 0, got {sliding_window_length}."")\n\n bias = combine_bias_and_mask(bias, mask, query.dtype)\n # check if input shape and data type is compatiable\n check_layout(query, key, value, bias, q_seqlen, kv_seqlen, None, None,\n page_table_k, page_table_v, layout)\n has_bias = bias is not None\n has_dbias = has_bias and \\n should_export_dbias(bias.shape, query.shape, layout) # type: ignore[union-attr]\n variadic_args = (has_bias, has_dbias)\n\n _not_used = jnp.zeros(0, dtype=query.dtype)\n if bias is None:\n bias = _not_used\n\n output = _dot_product_attention(\n query, key, value, bias, q_seqlen, kv_seqlen, _not_used, _not_used,\n page_table_k, page_table_v, scale, seed, dropout_rate, variadic_args,\n mask_type, layout.value, sliding_window_length, cudnn_version,\n return_residual)\n return output\n\n\ndef dot_product_attention(\n query: Array,\n key: Array,\n value: Array,\n bias: Array | None = None,\n mask: Array | None = None,\n q_seqlen: Array | None = None,\n kv_seqlen: Array | None = None,\n q_offsets: Array | None = None,\n kv_offsets: Array | None = None,\n fp8_params: FP8Params | None = None,\n *,\n scale: float = 1.0,\n mask_type: MaskType = MaskType.NO_MASK,\n seed: int = 42,\n dropout_rate: float = 0.,\n qkv_layout: str = ""BTNH"",\n sliding_window_length: int | None = None,\n use_fp8: bool = False,\n return_residual: bool = False\n):\n """"""Computes dot-product attention given query (Q), key (K), and value (V).\n\n This function serves as the core operation for applying attention\n mechanisms as described in the paper [https://arxiv.org/abs/1706.03762].\n Initially, it determines the attention weights by processing Q and K,\n subsequently combining the outcomes using K. Throughout this function, we\n utilize the following uppercase letters to represent specific parameters of\n array:\n\n B = batch size\n S = length of the key/value (source)\n T = length of the query (target)\n N = number of attention heads\n H = dimensions of each attention head.\n\n The supported layouts for Q, K, V are either BT(S)NH or BNT(S)H, and they must\n adhere to the same layout. The output layout remains consistent with Q,\n defaulting to BT(S)NH.\n\n Args:\n query: Queries for attention calculation with a shape of BTNH or BNTH.\n key: Keys for attention calculation with a shape of BSNH or BNSH.\n value: Values to be used in attention with a shape of BSNH or BNSH.\n bias: Bias to be added to logits with a shape of BNTS.\n mask: Mask used to filter out logits with a shape of BNTS.\n q_seqlen: Non padded sequence length of query with a shape of B.\n If q_offsets is set, q_seqlen should have shape [B,M] where M is the\n maximum number of segments per batch. For batch that has less segments\n than maximum segments, fill the padded entries with -1.\n kv_seqlen: Non padded sequence length of key and value with a shape of B.\n If kv_offsets is set, kv_seqlen should have shape [B,M] where M is the\n maximum number of segments per batch. For batch that has less segments\n than maximum segments, fill the padded entries with -1.\n q_offsets: offset of each segment packed in query with a shape of [B,M+1]\n where M is the maximum number of segments per batch. For batch that has\n less segments than maximum segments, fill the padded entries with -1.\n E.g, if 2 batches has 3 and 2 segments respectively, each segment has\n size 1, q_offsets = [[0,1,2,-1], [0,1,-1,-1]]. q_seqlen should be set\n to indicate the size of each segment.\n kv_offsets: offset of each segment packed in key with a shape of [B,M+1]\n where M is the maximum number of segments per batch. For batch that has\n less segments than maximum segments, fill the padded entries with -1.\n E.g, if 2 batches has 3 and 2 segments respectively, each segment has\n size 1, kv_offsets = [[0,1,2,-1], [0,1,-1,-1]]. kv_seqlen should be set\n to indicate the size of each segment.\n scale: Scale for the query.\n dropout_rate: Dropout rate.\n qkv_layout: Layout string, with supported formats being BTNH, BNTH, BSNH,\n BNSH.\n sliding_window_length: Window size to make attention only attend to each\n token's left local window (pos - sliding_window_length, pos] where `pos`\n is the index of each token. E.g., if sliding_window_length == 3 and the\n sequence is [0, 1, 2, 3, c, 4, 5], token `c` can attend to [4, 5, c].\n use_fp8: Whether to use FP8 attention mechanism.\n return_residual: Whether to return the logsumexp tensor of shape BTN\n or BNT to users. See section 3.1.1 in the FlashAttention-2 paper:\n https://arxiv.org/pdf/2307.08691 to find the definition of logsumexp.\n Returns:\n output: the same shape as the query.\n residual: the logsumexp tensor if return_residual=True. (non fp8)\n amax_s: amax of state. (fp8 only)\n amax_o: amax of output. (fp8 only)\n """"""\n # TODO(b/380898464): Check the compute capability, e.g., require GPU device,\n # in the kernel implementation (c++) code.\n cudnn_version = check_cudnn_version()\n layout = _normalize_layout(qkv_layout)\n\n if use_fp8:\n if fp8_params is None:\n raise ValueError(""fp8_params should not be None."")\n if mask_type not in (MaskType.NO_MASK, MaskType.CAUSAL):\n raise ValueError(""Only NO_MASK or CAUSAL masks are supported for fp8."")\n if not all(x is None for x in [bias, mask, q_seqlen, kv_seqlen]):\n raise ValueError(\n f""Expected 'None' for bias, mask, q_seqlen, and kv_seqlen, ""\n f""but got: bias={bias}, mask={mask}, q_seqlen={q_seqlen}, kv_seqlen={kv_seqlen}""\n )\n check_fp8_params(fp8_params)\n check_layout(query, key, value, bias, q_seqlen, kv_seqlen, q_offsets, kv_offsets,\n None, None, layout)\n output, amax_s, amax_o = _dot_product_attention_fp8(\n query, key, value, fp8_params,\n scale, mask_type == MaskType.CAUSAL, layout.value, cudnn_version\n )\n return output, amax_s, amax_o\n else:\n if has_padding(mask_type) and (q_seqlen is None or kv_seqlen is None):\n raise ValueError(""Require q_seqlen and kv_seqlen to generate padding mask"")\n if sliding_window_length is not None and sliding_window_length <= 0:\n raise ValueError(\n f""Require sliding_window_length > 0, got {sliding_window_length}"")\n if q_offsets is not None and (q_seqlen is None or kv_seqlen is None):\n raise ValueError(""Require q_seqlen and kv_seqlen to use packed layout"")\n\n bias = combine_bias_and_mask(bias, mask, query.dtype)\n # check if input shape and data type is compatiable\n check_layout(query, key, value, bias, q_seqlen, kv_seqlen, q_offsets, kv_offsets,\n None, None, layout)\n has_bias = bias is not None\n has_dbias = has_bias and \\n should_export_dbias(bias.shape, query.shape, layout) # type: ignore[union-attr]\n variadic_args = (has_bias, has_dbias)\n\n _not_used = jnp.zeros(0, dtype=query.dtype)\n if bias is None:\n bias = _not_used\n if q_seqlen is None:\n q_seqlen = _not_used\n if kv_seqlen is None:\n kv_seqlen = _not_used\n if q_offsets is None:\n q_offsets = _not_used\n if kv_offsets is None:\n kv_offsets = _not_used\n\n output = _dot_product_attention(\n query, key, value, bias, q_seqlen, kv_seqlen, q_offsets, kv_offsets,\n _not_used, _not_used, scale, seed, dropout_rate, variadic_args,\n mask_type, layout.value, sliding_window_length, cudnn_version,\n return_residual)\n return output\n",python,tab +400,1997656,".venv/lib64/python3.12/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",75192,0,"",python,selection_command +401,2002071,".venv/lib64/python3.12/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",75968,0,"",python,selection_mouse +402,2002225,".venv/lib64/python3.12/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",75965,5,"https",python,selection_mouse +403,2003052,".venv/lib64/python3.12/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",76087,0,"",python,selection_mouse +404,2005101,".venv/lib64/python3.12/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",75969,0,"",python,selection_mouse +405,2005881,".venv/lib64/python3.12/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",75974,0,"",python,selection_mouse +406,2024587,".venv/lib64/python3.12/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",81274,0,"",python,selection_mouse +407,2026407,".venv/lib64/python3.12/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",81625,0,"",python,selection_mouse +408,2027569,".venv/lib64/python3.12/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",47381,0,"",python,selection_command +409,2032157,".venv/lib64/python3.12/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",48274,0,"",python,selection_mouse +410,2032376,".venv/lib64/python3.12/site-packages/jax/_src/cudnn/fused_attention_stablehlo.py",16439,0,"",python,selection_command +411,2212468,".venv/lib64/python3.12/site-packages/jax/_src/nn/functions.py",0,0,"",python,tab +412,2213678,"jasmine/utils/nn.py",0,0,"",python,tab +413,2214748,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",0,0,"",shellscript,tab +414,2214749,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",1152,0,"",shellscript,selection_mouse +415,2226748,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",1974,0,"",shellscript,selection_mouse +416,2228300,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",1994,0,"",shellscript,selection_mouse +417,2300188,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",1993,0,"",shellscript,selection_command +418,2309939,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2314,0,"",shellscript,selection_mouse +419,2309968,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2313,0,"",shellscript,selection_command +420,2310986,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2278,0,"",shellscript,selection_mouse +421,2310987,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2277,0,"",shellscript,selection_command +422,2313079,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2278,0,"\n ",shellscript,content +423,2313388,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2283,0,"-",shellscript,content +424,2313389,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2284,0,"",shellscript,selection_keyboard +425,2313471,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2284,0,"-",shellscript,content +426,2313472,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2285,0,"",shellscript,selection_keyboard +427,2314016,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2285,0,"e",shellscript,content +428,2314017,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2286,0,"",shellscript,selection_keyboard +429,2314753,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2286,0,"v",shellscript,content +430,2314754,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2287,0,"",shellscript,selection_keyboard +431,2314921,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2287,0,"a",shellscript,content +432,2314922,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2288,0,"",shellscript,selection_keyboard +433,2315034,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2288,0,"l",shellscript,content +434,2315035,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2289,0,"",shellscript,selection_keyboard +435,2315273,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2289,0,"-",shellscript,content +436,2315274,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2290,0,"",shellscript,selection_keyboard +437,2315543,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2290,0,"f",shellscript,content +438,2315545,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2291,0,"",shellscript,selection_keyboard +439,2315718,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2291,0,"u",shellscript,content +440,2315719,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2292,0,"",shellscript,selection_keyboard +441,2315895,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2292,0,"l",shellscript,content +442,2315896,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2293,0,"",shellscript,selection_keyboard +443,2316028,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2293,0,"l",shellscript,content +444,2316029,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2294,0,"",shellscript,selection_keyboard +445,2316578,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2294,0,"-",shellscript,content +446,2316579,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2295,0,"",shellscript,selection_keyboard +447,2316947,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2295,0,"f",shellscript,content +448,2316948,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2296,0,"",shellscript,selection_keyboard +449,2317085,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2296,0,"r",shellscript,content +450,2317086,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2297,0,"",shellscript,selection_keyboard +451,2317265,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2297,0,"a",shellscript,content +452,2317266,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2298,0,"",shellscript,selection_keyboard +453,2317318,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2298,0,"m",shellscript,content +454,2317319,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2299,0,"",shellscript,selection_keyboard +455,2317455,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2299,0,"e",shellscript,content +456,2317456,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2300,0,"",shellscript,selection_keyboard +457,2317637,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2300,0," ",shellscript,content +458,2317638,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2301,0,"",shellscript,selection_keyboard +459,2317825,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2301,0,"}",shellscript,content +460,2317826,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2302,0,"",shellscript,selection_keyboard +461,2318503,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2301,1,"",shellscript,content +462,2319205,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2301,0,"\",shellscript,content +463,2319206,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2302,0,"",shellscript,selection_keyboard +464,2320647,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2301,0,"",shellscript,selection_command +465,2322033,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2288,0,"",shellscript,selection_mouse +466,2322198,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2285,4,"eval",shellscript,selection_mouse +467,2323853,"jasmine/train_dynamics.py",0,0,"",python,tab +468,2327805,"jasmine/train_dynamics.py",3965,0,"",python,selection_mouse +469,2350093,"TERMINAL",0,0,"git branhc",,terminal_command +470,2352541,"TERMINAL",0,0,"git branch",,terminal_command +471,2352592,"TERMINAL",0,0,"]633;C[?1h=\r ablation/use-pytorch-dataloader\r\n action-mapper\r\n* add-noise-to-combat-exposure-bias\r\n add-wandb-name-and-tags\r\n before-nnx\r\n causal-mem-reduce\r\n causal-spatiotemporal-kv-cache\r\n causal-st-transformer\r\n causal-transformer-dynamics-model\r\n causal-transformer-nnx-no-kv-cache\r\n coinrun-gt-actions\r\n convert-to-jax-array-in-iter\r\n correct-batched-sampling\r\n dev\r\n dont-let-tf-see-gpu\r\n:",,terminal_output +472,2353966,"TERMINAL",0,0,"\r[?1l>]0;tum_cte0515@hkn1990:~/Projects/jasmine",,terminal_output +473,2357029,"jasmine/train_dynamics.py",2764,0,"",python,selection_mouse +474,2357139,"jasmine/train_dynamics.py",2757,15,"eval_full_frame",python,selection_mouse +475,2359679,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",0,0,"",shellscript,tab +476,2360972,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2300,0,"",shellscript,selection_mouse +477,2361932,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2295,5,"",shellscript,content +478,2362354,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2294,1,"",shellscript,content +479,2362420,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2290,4,"",shellscript,content +480,2362571,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2289,1,"",shellscript,content +481,2362872,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2285,4,"",shellscript,content +482,2363067,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2285,0,"eval_full_frame",shellscript,content +483,2378229,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_tokenizer_default.sh",0,0,"",shellscript,tab +484,2379572,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_tokenizer_default.sh",1547,0,"",shellscript,selection_mouse +485,2379581,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_tokenizer_default.sh",1546,0,"",shellscript,selection_command +486,2379654,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_tokenizer_default.sh",1546,1,"\",shellscript,selection_mouse +487,2379662,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_tokenizer_default.sh",1547,0,"",shellscript,selection_command +488,2379731,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_tokenizer_default.sh",1546,1,"\",shellscript,selection_mouse +489,2379732,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_tokenizer_default.sh",1540,7,"r_val \",shellscript,selection_mouse +490,2379742,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_tokenizer_default.sh",1533,14,"ords_dir_val \",shellscript,selection_mouse +491,2379764,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_tokenizer_default.sh",1484,63,"records_dir_train \\n --val_data_dir $array_records_dir_val \",shellscript,selection_mouse +492,2379792,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_tokenizer_default.sh",1475,72,"r $array_records_dir_train \\n --val_data_dir $array_records_dir_val \",shellscript,selection_mouse +493,2379792,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_tokenizer_default.sh",1450,97,"ect jafar \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val \",shellscript,selection_mouse +494,2379814,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_tokenizer_default.sh",1447,100,"roject jafar \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val \",shellscript,selection_mouse +495,2379841,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_tokenizer_default.sh",1445,102,"-project jafar \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val \",shellscript,selection_mouse +496,2379855,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_tokenizer_default.sh",1442,105," --project jafar \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val \",shellscript,selection_mouse +497,2379879,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_tokenizer_default.sh",1440,107," --project jafar \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val \",shellscript,selection_mouse +498,2380249,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_tokenizer_default.sh",1462,85," --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val \",shellscript,selection_mouse +499,2381033,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_tokenizer_default.sh",1462,0,"",shellscript,selection_command +500,2382113,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",0,0,"",shellscript,tab +501,2382114,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2302,0,"",shellscript,selection_mouse +502,2382964,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2301,0,"",shellscript,selection_command +503,2383101,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2302,0," --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val \",shellscript,content +504,2383113,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2302,0,"",shellscript,selection_command +505,2384647,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2302,0,"\n ",shellscript,content +506,2385297,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2307,1,"",shellscript,content +507,2385449,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2307,1,"",shellscript,content +508,2385542,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2307,1,"",shellscript,content +509,2385709,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2307,1,"",shellscript,content +510,2386023,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2306,0,"",shellscript,selection_command +511,2386900,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2348,0,"",shellscript,selection_command +512,2387015,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2392,0,"",shellscript,selection_command +513,2388276,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2389,36,"",shellscript,content +514,2388901,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2345,0,"",shellscript,selection_command +515,2389779,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2388,0,"",shellscript,selection_command +516,2390232,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2387,1,"",shellscript,content +517,2391507,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2387,0,"&",shellscript,content +518,2391508,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2388,0,"",shellscript,selection_keyboard +519,2392041,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2387,0,"",shellscript,selection_command +520,2393180,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2207,0,"",shellscript,selection_mouse +521,2393187,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2206,0,"",shellscript,selection_command +522,2393789,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2380,0,"",shellscript,selection_mouse +523,2394884,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_tokenizer_default.sh",0,0,"",shellscript,tab +524,2394885,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_tokenizer_default.sh",1548,0,"",shellscript,selection_mouse +525,2395361,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_tokenizer_default.sh",1537,0,"",shellscript,selection_mouse +526,2396511,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_tokenizer_default.sh",790,0,"",shellscript,selection_mouse +527,2398939,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_tokenizer_default.sh",653,0,"",shellscript,selection_command +528,2400768,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",0,0,"",shellscript,tab +529,2400769,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",1294,0,"",shellscript,selection_mouse +530,2401535,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",1375,0,"\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/train\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/val",shellscript,content +531,2401543,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",1376,0,"",shellscript,selection_command +532,2402052,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",1275,0,"",shellscript,selection_command +533,2402440,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",1275,101,"",shellscript,content +534,2404378,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",1429,0,"",shellscript,selection_mouse +535,2404484,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",1412,21,"array_records_dir_val",shellscript,selection_mouse +536,2413732,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_tokenizer_default.sh",0,0,"",shellscript,tab +537,2413733,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_tokenizer_default.sh",1547,0,"",shellscript,selection_mouse +538,2413745,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_tokenizer_default.sh",1546,0,"",shellscript,selection_command +539,2415300,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_tokenizer_default.sh",1547,0,"",shellscript,selection_command +540,2415430,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_tokenizer_default.sh",1546,1,"",shellscript,content +541,2416432,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_tokenizer_default.sh",1546,0,"&",shellscript,content +542,2416433,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_tokenizer_default.sh",1547,0,"",shellscript,selection_keyboard +543,2417175,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_tokenizer_default.sh",1546,0,"",shellscript,selection_command +544,2417835,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",0,0,"",shellscript,tab +545,2417836,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2572,0,"",shellscript,selection_mouse +546,2418207,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2588,0,"",shellscript,selection_mouse +547,2418208,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2587,0,"",shellscript,selection_command +548,2418900,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2573,15,"wait $child_pid",shellscript,selection_command +549,2419095,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2572,16,"\nwait $child_pid",shellscript,selection_command +550,2419223,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2559,29,"child_pid=$!\n\nwait $child_pid",shellscript,selection_command +551,2419902,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2559,0,"",shellscript,selection_command +552,2420925,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_tokenizer_default.sh",0,0,"",shellscript,tab +553,2420926,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_tokenizer_default.sh",1548,0,"",shellscript,selection_mouse +554,2421678,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_tokenizer_default.sh",1548,0,"\nchild_pid=$!\n\nwait $child_pid",shellscript,content +555,2421690,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_tokenizer_default.sh",1549,0,"",shellscript,selection_command +556,2426556,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_lam_default.sh",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=1\n#SBATCH --time=48:00:00\n#SBATCH --partition=accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:1\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/lam/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/lam/%x_%j.log\n#SBATCH --job-name=train_lam_default\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/train\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/val\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/lam/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\nenv | grep SLURM\n\nsrun python jasmine/train_lam.py \\n --save_ckpt \\n --image_height=64 \\n --image_width=64 \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=120 \\n --max_lr=5e-6 \\n --log \\n --name=coinrun-lam-default-$slurm_job_id \\n --tags lam coinrun default \\n --entity instant-uv \\n --project jafar \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val \n",shellscript,tab +557,2430392,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_tokenizer_default.sh",0,0,"",shellscript,tab +558,2464710,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",0,0,"",shellscript,tab +559,2464711,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",1154,0,"",shellscript,selection_mouse +560,2464767,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",1153,1,"\n",shellscript,selection_mouse +561,2464792,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",1152,2,"\n\n",shellscript,selection_mouse +562,2464804,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",1110,44,"\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n",shellscript,selection_mouse +563,2464827,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",1032,122," [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n",shellscript,selection_mouse +564,2464854,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",1029,125,"\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n",shellscript,selection_mouse +565,2464862,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",936,218,"restart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n",shellscript,selection_mouse +566,2464878,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",891,263,"# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n",shellscript,selection_mouse +567,2464895,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",890,264,"\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n",shellscript,selection_mouse +568,2464929,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",865,289,"trap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n",shellscript,selection_mouse +569,2464963,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",864,290,"\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n",shellscript,selection_mouse +570,2465015,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",862,292,"}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n",shellscript,selection_mouse +571,2465015,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",851,303," exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n",shellscript,selection_mouse +572,2465068,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",816,338," scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n",shellscript,selection_mouse +573,2465125,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",766,388," # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n",shellscript,selection_mouse +574,2465189,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",719,435," # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n",shellscript,selection_mouse +575,2465252,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",626,528," echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n",shellscript,selection_mouse +576,2465341,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",610,544,"requeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n",shellscript,selection_mouse +577,2465485,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",558,596,"# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n",shellscript,selection_mouse +578,2470697,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_tokenizer_default.sh",0,0,"",shellscript,tab +579,2470697,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_tokenizer_default.sh",515,0,"",shellscript,selection_mouse +580,2471958,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_tokenizer_default.sh",515,0,"\n",shellscript,content +581,2472192,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_tokenizer_default.sh",516,0,"\n",shellscript,content +582,2472295,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_tokenizer_default.sh",517,0,"\n",shellscript,content +583,2472516,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_tokenizer_default.sh",517,0,"",shellscript,selection_command +584,2472963,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_tokenizer_default.sh",516,0,"",shellscript,selection_command +585,2473324,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_tokenizer_default.sh",516,0,"# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n",shellscript,content +586,2474805,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_tokenizer_default.sh",1119,0,"",shellscript,selection_mouse +587,2475192,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_tokenizer_default.sh",1114,0,"",shellscript,selection_mouse +588,2475856,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_tokenizer_default.sh",1113,1,"",shellscript,content +589,2475969,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_tokenizer_default.sh",1112,1,"",shellscript,content +590,2495654,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_tokenizer_default.sh",529,0,"",shellscript,selection_mouse +591,2496284,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_tokenizer_default.sh",515,0,"",shellscript,selection_mouse +592,2497200,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_tokenizer_default.sh",987,0,"",shellscript,selection_mouse +593,2497787,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_tokenizer_default.sh",784,0,"",shellscript,selection_mouse +594,2498340,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_tokenizer_default.sh",822,0,"",shellscript,selection_mouse +595,2507364,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_tokenizer_default.sh",1040,0,"",shellscript,selection_mouse +596,2511564,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",0,0,"",shellscript,tab +597,2511565,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",557,0,"",shellscript,selection_mouse +598,2512081,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",537,0,"",shellscript,selection_mouse +599,2513003,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",482,0,"",shellscript,selection_mouse +600,2513931,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",459,29,"",shellscript,content +601,2526055,"TERMINAL",0,0,"source .venv/bin/activate",,terminal_command +602,2534920,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",1815,0,"",shellscript,selection_mouse +603,2535057,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",1815,2,"to",shellscript,selection_mouse +604,2535075,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",1815,3,"tok",shellscript,selection_mouse +605,2535090,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",1815,5,"token",shellscript,selection_mouse +606,2535145,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",1815,6,"tokeni",shellscript,selection_mouse +607,2535145,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",1815,7,"tokeniz",shellscript,selection_mouse +608,2535374,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",1815,8,"tokenize",shellscript,selection_mouse +609,2535556,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",1815,9,"tokenizer",shellscript,selection_mouse +610,2536344,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",1815,9,"",shellscript,content +611,2536743,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",1815,0,"d",shellscript,content +612,2536744,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",1816,0,"",shellscript,selection_keyboard +613,2537014,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",1816,0,"y",shellscript,content +614,2537015,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",1817,0,"",shellscript,selection_keyboard +615,2537131,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",1817,0,"n",shellscript,content +616,2537132,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",1818,0,"",shellscript,selection_keyboard +617,2538765,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_tokenizer_default.sh",0,0,"",shellscript,tab +618,2538766,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_tokenizer_default.sh",1635,0,"",shellscript,selection_mouse +619,2546981,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_tokenizer_default.sh",1112,0,"\n",shellscript,content +620,2548212,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_tokenizer_default.sh",1112,1,"",shellscript,content +621,2549296,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",0,0,"",shellscript,tab +622,2549297,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",1851,0,"",shellscript,selection_mouse +623,2550513,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",1815,3,"",shellscript,content +624,2551109,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",1815,0,"tokenizer",shellscript,content +625,2552177,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",1645,0,"",shellscript,selection_mouse +626,2554256,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",1717,0,"",shellscript,selection_mouse +627,2554901,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",1659,0,"",shellscript,selection_mouse +628,2555380,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",1666,0,"",shellscript,selection_mouse +629,2567594,"TERMINAL",0,0,"sync-runner",,terminal_command +630,2567652,"TERMINAL",0,0,"]633;Csending incremental file list\r\n",,terminal_output +631,2570249,"TERMINAL",0,0,"slurm/jobs/mihir/horeka/coinrun/default_runs/\r\nslurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh\r\nslurm/jobs/mihir/horeka/coinrun/default_runs/train_tokenizer_default.sh\r\n\r\nsent 42,053 bytes received 282 bytes 16,934.00 bytes/sec\r\ntotal size is 28,214,089 speedup is 666.45\r\n]0;tum_cte0515@hkn1990:~/Projects/jasmine",,terminal_output +632,2584260,"TERMINAL",0,0,"sbatch slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",,terminal_command +633,2584270,"TERMINAL",0,0,"]633;CSubmitted batch job 3532378\r\n]0;tum_cte0515@hkn1990:~/Projects/jasmine",,terminal_output +634,2588323,"TERMINAL",0,0,"sbatch slurm/jobs/mihir/horeka/coinrun/default_runs/train_tokenizer_default.sh",,terminal_command +635,2588326,"TERMINAL",0,0,"]633;CSubmitted batch job 3532379\r\n]0;tum_cte0515@hkn1990:~/Projects/jasmine",,terminal_output +636,2595469,"TERMINAL",0,0,"scancel 3532378",,terminal_command +637,2599426,"TERMINAL",0,0,"scancel 3532379",,terminal_command +638,2603628,"TERMINAL",0,0,"sync-runner",,terminal_command +639,2603677,"TERMINAL",0,0,"]633;Csending incremental file list\r\n",,terminal_output +640,2603807,"TERMINAL",0,0,"\r\nsent 37,224 bytes received 235 bytes 74,918.00 bytes/sec\r\ntotal size is 28,214,089 speedup is 753.20\r\n]0;tum_cte0515@hkn1990:~/Projects/jasmine",,terminal_output +641,2604583,"TERMINAL",0,0,"runner",,terminal_command +642,2607917,"TERMINAL",0,0,"sbatch slurm/jobs/mihir/horeka/coinrun/default_runs/train_tokenizer_default.sh",,terminal_command +643,2607921,"TERMINAL",0,0,"]633;CSubmitted batch job 3532383\r\n]0;tum_cte0515@hkn1990:~/Projects/jasmine_jobs",,terminal_output +644,2610432,"TERMINAL",0,0,"sbatch slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",,terminal_command +645,2610434,"TERMINAL",0,0,"]633;CSubmitted batch job 3532384\r\n]0;tum_cte0515@hkn1990:~/Projects/jasmine_jobs",,terminal_output +646,2612837,"TERMINAL",0,0,"dev",,terminal_command +647,2614822,"jasmine/train_dynamics.py",0,0,"",python,tab +648,2619668,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",0,0,"",shellscript,tab +649,2621004,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default copy.sh",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=4\n#SBATCH --time=48:00:00\n#SBATCH --partition=accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:4\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/dynamics/maskgit/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/dynamics/maskgit/%x_%j.log\n#SBATCH --job-name=train_dynamics_maskgit\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/train\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/val\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_tokenizer_default/3528955\n\nenv | grep SLURM\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=110 \\n --image_height=64 \\n --image_width=64 \\n --patch_size=16 \\n --max_lr=3e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-default-$slurm_job_id \\n --tags coinrun dynamics maskgit default \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --eval_full_frame \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val &\n\nchild_pid=$!\n\nwait $child_pid",shellscript,tab +650,2628807,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default-alpha-abl.sh",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=4\n#SBATCH --time=48:00:00\n#SBATCH --partition=accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:4\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/dynamics/maskgit/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/dynamics/maskgit/%x_%j.log\n#SBATCH --job-name=train_dynamics_maskgit\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/train\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/val\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_tokenizer_default/3528955\n\nenv | grep SLURM\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=110 \\n --image_height=64 \\n --image_width=64 \\n --patch_size=16 \\n --max_lr=3e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-default-$slurm_job_id \\n --tags coinrun dynamics maskgit default \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --eval_full_frame \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val &\n\nchild_pid=$!\n\nwait $child_pid",shellscript,tab +651,2631500,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",0,0,"",shellscript,tab +652,2631500,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2258,0,"",shellscript,selection_mouse +653,2632762,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2257,0,"",shellscript,selection_command +654,2632896,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2258,0,"",shellscript,selection_command +655,2633568,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2216,59," --name=coinrun-dynamics-maskgit-default-$slurm_job_id \",shellscript,selection_command +656,2634022,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2216,105," --name=coinrun-dynamics-maskgit-default-$slurm_job_id \\n --tags coinrun dynamics maskgit default \",shellscript,selection_command +657,2635261,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2318,0,"",shellscript,selection_command +658,2636017,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2318,1,"t",shellscript,selection_command +659,2637538,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2258,1,"t",shellscript,selection_command +660,2638006,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2258,0,"",shellscript,selection_command +661,2638545,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2317,1,"",shellscript,content +662,2638546,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2257,1,"",shellscript,content +663,2638690,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2315,1,"",shellscript,content +664,2638691,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2256,1,"",shellscript,content +665,2638829,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2313,1,"",shellscript,content +666,2638829,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2255,1,"",shellscript,content +667,2638974,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2311,1,"",shellscript,content +668,2638974,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2254,1,"",shellscript,content +669,2639104,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2309,1,"",shellscript,content +670,2639105,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2253,1,"",shellscript,content +671,2639259,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2307,1,"",shellscript,content +672,2639259,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2252,1,"",shellscript,content +673,2639539,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2306,1,"",shellscript,content +674,2639540,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2252,1,"",shellscript,content +675,2640516,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2305,0,"s",shellscript,content +676,2640516,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2252,0,"s",shellscript,content +677,2640517,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2253,0,"",shellscript,selection_keyboard +678,2640678,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2307,0,"q",shellscript,content +679,2640679,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2253,0,"q",shellscript,content +680,2640679,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2254,0,"",shellscript,selection_keyboard +681,2640935,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2309,0,"r",shellscript,content +682,2640936,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2254,0,"r",shellscript,content +683,2640936,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2255,0,"",shellscript,selection_keyboard +684,2641101,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2311,0,"t",shellscript,content +685,2641101,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2255,0,"t",shellscript,content +686,2641102,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2256,0,"",shellscript,selection_keyboard +687,2641229,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2313,0,"-",shellscript,content +688,2641230,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2256,0,"-",shellscript,content +689,2641230,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2257,0,"",shellscript,selection_keyboard +690,2641467,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2315,0,"a",shellscript,content +691,2641467,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2257,0,"a",shellscript,content +692,2641469,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2258,0,"",shellscript,selection_keyboard +693,2641651,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2317,0,"b",shellscript,content +694,2641651,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2258,0,"b",shellscript,content +695,2641652,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2259,0,"",shellscript,selection_keyboard +696,2641894,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2319,0,"l",shellscript,content +697,2641895,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2259,0,"l",shellscript,content +698,2641895,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2260,0,"",shellscript,selection_keyboard +699,2641958,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2321,0,"a",shellscript,content +700,2641959,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2260,0,"a",shellscript,content +701,2641959,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2261,0,"",shellscript,selection_keyboard +702,2642157,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2323,0,"t",shellscript,content +703,2642158,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2261,0,"t",shellscript,content +704,2642159,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2262,0,"",shellscript,selection_keyboard +705,2642217,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2325,0,"i",shellscript,content +706,2642217,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2262,0,"i",shellscript,content +707,2642226,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2263,0,"",shellscript,selection_keyboard +708,2642288,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2327,0,"o",shellscript,content +709,2642288,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2263,0,"o",shellscript,content +710,2642289,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2264,0,"",shellscript,selection_keyboard +711,2642434,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2329,0,"n",shellscript,content +712,2642434,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2264,0,"n",shellscript,content +713,2642435,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2265,0,"",shellscript,selection_keyboard +714,2642985,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2264,0,"",shellscript,selection_command +715,2646417,"jasmine/train_dynamics.py",0,0,"",python,tab +716,2647603,"jasmine/genie.py",0,0,"from typing import Dict\n\nimport einops\nimport jax\nimport jax.numpy as jnp\nimport flax.nnx as nnx\nimport orbax.checkpoint as ocp\n\nfrom models.dynamics import DynamicsMaskGIT, DynamicsCausal\nfrom models.lam import LatentActionModel\nfrom models.tokenizer import TokenizerVQVAE\n\n\nclass Genie(nnx.Module):\n """"""Genie model""""""\n\n def __init__(\n self,\n in_dim: int,\n tokenizer_dim: int,\n tokenizer_ffn_dim: int,\n latent_patch_dim: int,\n num_patch_latents: int,\n patch_size: int,\n tokenizer_num_blocks: int,\n tokenizer_num_heads: int,\n lam_dim: int,\n lam_ffn_dim: int,\n latent_action_dim: int,\n num_actions: int,\n lam_patch_size: int,\n lam_num_blocks: int,\n lam_num_heads: int,\n lam_co_train: bool,\n use_gt_actions: bool,\n dyna_type: str,\n dyna_dim: int,\n dyna_ffn_dim: int,\n dyna_num_blocks: int,\n dyna_num_heads: int,\n max_noise_level: float,\n noise_buckets: int,\n param_dtype: jnp.dtype,\n dtype: jnp.dtype,\n use_flash_attention: bool,\n decode: bool,\n rngs: nnx.Rngs,\n dropout: float = 0.0,\n mask_limit: float = 0.0,\n ):\n # --- Tokenizer ---\n self.in_dim = in_dim\n self.tokenizer_dim = tokenizer_dim\n self.tokenizer_ffn_dim = tokenizer_ffn_dim\n self.latent_patch_dim = latent_patch_dim\n self.num_patch_latents = num_patch_latents\n self.patch_size = patch_size\n self.tokenizer_num_blocks = tokenizer_num_blocks\n self.tokenizer_num_heads = tokenizer_num_heads\n # --- LAM ---\n self.lam_dim = lam_dim\n self.lam_ffn_dim = lam_ffn_dim\n self.latent_action_dim = latent_action_dim\n self.num_actions = num_actions\n self.lam_patch_size = lam_patch_size\n self.lam_num_blocks = lam_num_blocks\n self.lam_num_heads = lam_num_heads\n self.lam_co_train = lam_co_train\n self.use_gt_actions = use_gt_actions\n # --- Dynamics ---\n self.dyna_type = dyna_type\n self.dyna_dim = dyna_dim\n self.dyna_ffn_dim = dyna_ffn_dim\n self.dyna_num_blocks = dyna_num_blocks\n self.dyna_num_heads = dyna_num_heads\n self.max_noise_level = max_noise_level\n self.noise_buckets = noise_buckets\n self.param_dtype = param_dtype\n self.dtype = dtype\n self.use_flash_attention = use_flash_attention\n self.dropout = dropout\n self.mask_limit = mask_limit\n self.decode = decode\n\n self.tokenizer = TokenizerVQVAE(\n in_dim=self.in_dim,\n model_dim=self.tokenizer_dim,\n ffn_dim=self.tokenizer_ffn_dim,\n latent_dim=self.latent_patch_dim,\n num_latents=self.num_patch_latents,\n patch_size=self.patch_size,\n num_blocks=self.tokenizer_num_blocks,\n num_heads=self.tokenizer_num_heads,\n dropout=0.0,\n codebook_dropout=0.0,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n use_flash_attention=self.use_flash_attention,\n rngs=rngs,\n )\n if self.use_gt_actions:\n self.action_embed = nnx.Embed(\n self.num_actions, self.latent_action_dim, rngs=rngs\n )\n self.lam = None\n else:\n self.lam = LatentActionModel(\n in_dim=self.in_dim,\n model_dim=self.lam_dim,\n ffn_dim=self.lam_ffn_dim,\n latent_dim=self.latent_patch_dim,\n num_latents=self.num_actions,\n patch_size=self.lam_patch_size,\n num_blocks=self.lam_num_blocks,\n num_heads=self.lam_num_heads,\n dropout=0.0,\n codebook_dropout=0.0,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n use_flash_attention=self.use_flash_attention,\n rngs=rngs,\n )\n self.action_embed = None\n if self.dyna_type == ""maskgit"":\n self.dynamics = DynamicsMaskGIT(\n model_dim=self.dyna_dim,\n ffn_dim=self.dyna_ffn_dim,\n num_latents=self.num_patch_latents,\n latent_action_dim=self.latent_action_dim,\n num_blocks=self.dyna_num_blocks,\n num_heads=self.dyna_num_heads,\n dropout=self.dropout,\n mask_limit=self.mask_limit,\n max_noise_level=self.max_noise_level,\n noise_buckets=self.noise_buckets,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n use_flash_attention=self.use_flash_attention,\n rngs=rngs,\n )\n elif self.dyna_type == ""causal"":\n self.dynamics = DynamicsCausal(\n model_dim=self.dyna_dim,\n ffn_dim=self.dyna_ffn_dim,\n num_latents=self.num_patch_latents,\n latent_action_dim=self.latent_action_dim,\n num_blocks=self.dyna_num_blocks,\n num_heads=self.dyna_num_heads,\n dropout=self.dropout,\n max_noise_level=self.max_noise_level,\n noise_buckets=self.noise_buckets,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n use_flash_attention=self.use_flash_attention,\n decode=decode,\n rngs=rngs,\n )\n else:\n raise ValueError(f""Invalid dynamics type: {self.dyna_type}"")\n\n def __call__(\n self,\n batch: Dict[str, jax.Array],\n ) -> Dict[str, jax.Array]:\n videos_BTHWC = batch[""videos""]\n tokenizer_outputs = self.tokenizer.vq_encode(videos_BTHWC, training=False)\n token_indices_BTN = tokenizer_outputs[""indices""]\n latent_actions_BTm11L = None\n action_embeddings_BTm11L = None\n if self.use_gt_actions:\n assert self.action_embed is not None\n action_indices_E = None\n action_embeddings_BT1L = self.action_embed(batch[""actions""]).reshape(\n *batch[""actions""].shape[:2], 1, self.latent_action_dim\n )\n action_embeddings_BTm11L = action_embeddings_BT1L[:, :-1]\n else:\n assert self.lam is not None\n lam_outputs = self.lam.vq_encode(videos_BTHWC, training=False)\n z_q_BTm11L = lam_outputs[""z_q""]\n action_indices_E = lam_outputs[""indices""]\n latent_actions_BTm11L = jax.lax.cond(\n self.lam_co_train,\n lambda: z_q_BTm11L,\n lambda: jax.lax.stop_gradient(z_q_BTm11L),\n )\n outputs = dict(\n video_tokens=jax.lax.stop_gradient(token_indices_BTN),\n latent_actions=(\n action_embeddings_BTm11L\n if self.use_gt_actions\n else latent_actions_BTm11L\n ),\n )\n outputs[""rng""] = batch[""rng""]\n dyna_logits_BTNV, dyna_mask = self.dynamics(outputs)\n outputs[""token_logits""] = dyna_logits_BTNV\n outputs[""mask""] = dyna_mask\n mle_indices_BTN = jnp.argmax(outputs[""token_logits""], axis=-1)\n H, W = batch[""videos""].shape[2:4]\n outputs[""recon""] = self.tokenizer.decode(mle_indices_BTN, (H, W))\n if action_indices_E is not None:\n outputs[""lam_indices""] = action_indices_E\n return outputs\n\n def sample(\n self,\n batch: Dict[str, jax.Array],\n seq_len: int,\n noise_level: float = 0.0,\n temperature: float = 1,\n sample_argmax: bool = False,\n maskgit_steps: int = 25,\n ) -> tuple[jax.Array, jax.Array]:\n assert (\n noise_level <= self.max_noise_level\n ), ""Noise level must not be greater than max_noise_level.""\n if self.dyna_type == ""maskgit"":\n return self.sample_maskgit(\n batch, seq_len, noise_level, maskgit_steps, temperature, sample_argmax\n )\n elif self.dyna_type == ""causal"":\n return self.sample_causal(\n batch, seq_len, noise_level, temperature, sample_argmax\n )\n else:\n raise ValueError(f""Dynamics model type unknown: {self.dyna_type}"")\n\n def sample_maskgit(\n self,\n batch: Dict[str, jax.Array],\n seq_len: int,\n noise_level: float,\n steps: int = 25,\n temperature: float = 1,\n sample_argmax: bool = False,\n ) -> tuple[jax.Array, jax.Array]:\n """"""\n Autoregressively samples up to `seq_len` future frames, following Figure 8 of the paper.\n\n - Input frames are tokenized once.\n - Future frames are generated autoregressively in token space.\n - All frames are detokenized in a single pass.\n\n Note:\n - For interactive or step-wise sampling, detokenization should occur after each action.\n - To maintain consistent tensor shapes across timesteps, all current and future frames are decoded at every step.\n - Temporal causal structure is preserved by\n a) reapplying the mask before each decoding step.\n b) a temporal causal mask is applied within each ST-transformer block.\n\n Dimension keys:\n B: batch size\n T: number of input (conditioning) frames\n N: number of patches per frame\n M: model dimension\n S: sequence length\n H: height\n W: width\n E: B * (S - 1)\n P: S * N\n """"""\n assert isinstance(self.dynamics, DynamicsMaskGIT)\n # --- Encode videos and actions ---\n videos_BTHWC = batch[""videos""]\n tokenizer_out = self.tokenizer.vq_encode(videos_BTHWC, training=False)\n token_idxs_BTN = tokenizer_out[""indices""]\n B, T, N = token_idxs_BTN.shape\n pad_shape = (B, seq_len - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs_BTN.dtype)\n token_idxs_BSN = jnp.concatenate([token_idxs_BTN, pad], axis=1)\n init_logits_BSNV = jnp.zeros(\n shape=(*token_idxs_BSN.shape, self.num_patch_latents)\n )\n noise_level = jnp.array(noise_level)\n if self.use_gt_actions:\n assert self.action_embed is not None\n latent_actions_BT1L = self.action_embed(batch[""actions""]).reshape(\n *batch[""actions""].shape[:2], 1, self.latent_action_dim\n )\n latent_actions_BTm11L = latent_actions_BT1L[:, :-1]\n action_tokens_EL = latent_actions_BTm11L.reshape(-1, self.latent_action_dim)\n else:\n assert self.lam is not None\n latent_actions_E = batch[""latent_actions""]\n action_tokens_EL = self.lam.vq.get_codes(latent_actions_E)\n\n # --- Extract submodule state ---\n dynamics_state = nnx.state(self.dynamics)\n\n @nnx.scan(in_axes=(nnx.Carry, 0), out_axes=nnx.Carry)\n def maskgit_step_fn(\n carry: tuple[jax.Array, jax.Array, jax.Array, jax.Array, jax.Array],\n step: jax.Array,\n ) -> tuple[jax.Array, jax.Array, jax.Array, jax.Array, jax.Array]:\n rng, token_idxs_BSN, logits_BSNV, mask_BSN, action_tokens_EL = carry\n S, N = token_idxs_BSN.shape[1:]\n L = action_tokens_EL.shape[-1]\n\n # We need to reconstruct the submodule inside scan body to prevent trace context mismatches\n dynamics_maskgit = DynamicsMaskGIT(\n model_dim=self.dyna_dim,\n ffn_dim=self.dyna_ffn_dim,\n num_latents=self.num_patch_latents,\n latent_action_dim=self.latent_action_dim,\n num_blocks=self.dyna_num_blocks,\n num_heads=self.dyna_num_heads,\n dropout=self.dropout,\n max_noise_level=self.max_noise_level,\n noise_buckets=self.noise_buckets,\n mask_limit=self.mask_limit,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n use_flash_attention=self.use_flash_attention,\n rngs=nnx.Rngs(0),\n )\n nnx.update(dynamics_maskgit, dynamics_state)\n\n # --- Construct + encode video ---\n vid_embed_BSNM = dynamics_maskgit.patch_embed(token_idxs_BSN)\n mask_token_111M = dynamics_maskgit.mask_token.value\n mask_expanded_BSN1 = mask_BSN[..., None]\n vid_embed_BSNM = jnp.where(\n mask_expanded_BSN1, mask_token_111M, vid_embed_BSNM\n )\n\n # --- Predict transition ---\n action_tokens_BSm1L = jnp.reshape(action_tokens_EL, (B, S - 1, L))\n act_embed_BSm1M = dynamics_maskgit.action_up(action_tokens_BSm1L)\n act_embed_BSM = jnp.pad(act_embed_BSm1M, ((0, 0), (1, 0), (0, 0)))\n act_embed_BS1M = jnp.reshape(\n act_embed_BSM, (B, S, 1, act_embed_BSM.shape[-1])\n )\n\n rng, _rng_noise_augmentation = jax.random.split(rng)\n noise_level_B = jnp.tile(noise_level, B)\n _, noise_level_embed_BS1M = dynamics_maskgit.apply_noise_augmentation(\n vid_embed_BSNM, _rng_noise_augmentation, noise_level_B\n )\n\n vid_embed_BSNp2M = jnp.concatenate(\n [act_embed_BS1M, noise_level_embed_BS1M, vid_embed_BSNM], axis=2\n )\n unmasked_ratio = jnp.cos(jnp.pi * (step + 1) / (steps * 2))\n step_temp = temperature * (1.0 - unmasked_ratio)\n final_logits_BSNp2V = (\n dynamics_maskgit.transformer(vid_embed_BSNp2M) / step_temp\n )\n final_logits_BSNV = final_logits_BSNp2V[:, :, 2:]\n\n # --- Sample new tokens for final frame ---\n if sample_argmax:\n sampled_token_idxs_BSN = jnp.argmax(final_logits_BSNV, axis=-1)\n else:\n rng, _rng = jax.random.split(rng)\n sampled_token_idxs_BSN = jax.random.categorical(_rng, final_logits_BSNV)\n gather_fn = jax.vmap(jax.vmap(jax.vmap(lambda x, y: x[y])))\n final_token_probs_BSN = gather_fn(\n jax.nn.softmax(final_logits_BSNV), sampled_token_idxs_BSN\n )\n final_token_probs_BSN += ~mask_BSN\n # Update masked tokens and logits only\n token_idxs_BSN = jnp.where(mask_BSN, sampled_token_idxs_BSN, token_idxs_BSN)\n logits_BSNV = jnp.where(\n jnp.expand_dims(mask_BSN, -1), final_logits_BSNV, logits_BSNV\n )\n\n # --- Update mask ---\n num_unmasked_tokens = jnp.round(N * (1.0 - unmasked_ratio)).astype(int)\n final_token_probs_flat_BP = einops.rearrange(\n final_token_probs_BSN, ""b s n -> b (s n)""\n )\n idx_mask_P = (\n jnp.arange(final_token_probs_flat_BP.shape[-1])\n <= N - num_unmasked_tokens\n )\n sorted_idxs_BP = jnp.argsort(final_token_probs_flat_BP, axis=-1)\n mask_update_fn = jax.vmap(lambda msk, ids: msk.at[ids].set(idx_mask_P))\n mask_flat_BP = einops.rearrange(mask_BSN, ""b s n -> b (s n)"")\n new_mask_flat_BP = mask_update_fn(mask_flat_BP, sorted_idxs_BP)\n new_mask_BSN = einops.rearrange(new_mask_flat_BP, ""b (s n) -> b s n"", n=N)\n\n new_carry = (\n rng,\n token_idxs_BSN,\n logits_BSNV,\n new_mask_BSN,\n action_tokens_EL,\n )\n return new_carry\n\n @nnx.scan(in_axes=(nnx.Carry, 0), out_axes=nnx.Carry)\n def generation_step_fn(\n carry: tuple[jax.Array, jax.Array, jax.Array], step_t: jax.Array\n ) -> tuple[jax.Array, jax.Array, jax.Array]:\n rng, current_token_idxs_BSN, current_logits_BSNV = carry\n rng, step_rng = jax.random.split(rng)\n\n # Mask current frame (i.e., t == step_t)\n mask_S = jnp.arange(seq_len) == step_t\n mask_BSN = jnp.broadcast_to(mask_S[None, :, None], (B, seq_len, N)).astype(\n bool\n )\n masked_token_idxs_BSN = current_token_idxs_BSN * ~mask_BSN\n masked_logits_BSNV = current_logits_BSNV * jnp.expand_dims(~mask_BSN, -1)\n\n # --- Initialize and run MaskGIT loop ---\n init_carry_maskgit = (\n step_rng,\n masked_token_idxs_BSN,\n masked_logits_BSNV,\n mask_BSN,\n action_tokens_EL,\n )\n final_carry_maskgit = maskgit_step_fn(init_carry_maskgit, jnp.arange(steps))\n updated_token_idxs_BSN = final_carry_maskgit[1]\n updated_logits_BSNV = final_carry_maskgit[2]\n new_carry = (rng, updated_token_idxs_BSN, updated_logits_BSNV)\n return new_carry\n\n # --- Run the autoregressive generation using jax.lax.scan ---\n initial_carry = (batch[""rng""], token_idxs_BSN, init_logits_BSNV)\n timesteps_to_scan = jnp.arange(T, seq_len)\n final_carry = generation_step_fn(initial_carry, timesteps_to_scan)\n final_token_idxs_BSN = final_carry[1]\n final_logits_BSNV = final_carry[2]\n\n # --- Decode all tokens at once at the end ---\n H, W = batch[""videos""].shape[2:4]\n final_frames_BSHWC = self.tokenizer.decode(\n final_token_idxs_BSN,\n video_hw=(H, W),\n )\n return final_frames_BSHWC, final_logits_BSNV\n\n def sample_causal(\n self,\n batch: Dict[str, jax.Array],\n seq_len: int,\n noise_level: float,\n temperature: float = 1,\n sample_argmax: bool = False,\n ) -> tuple[jax.Array, jax.Array]:\n """"""\n Autoregressively samples up to `seq_len` future frames, following Figure 8 of the paper.\n\n - Input frames are tokenized once.\n - Future frames are generated autoregressively in token space.\n - All frames are detokenized in a single pass.\n\n Note:\n - For interactive or step-wise sampling, detokenization should occur after each action.\n - To maintain consistent tensor shapes across timesteps, all current and future frames are decoded at every step.\n - Temporal causal structure is preserved by\n a) reapplying the mask before each decoding step.\n b) a temporal causal mask is applied within each ST-transformer block.\n\n Dimension keys:\n B: batch size\n T: number of input (conditioning) frames\n N: number of patches per frame\n M: model dimension\n S: sequence length\n H: height\n W: width\n E: B * (S - 1)\n """"""\n assert isinstance(self.dynamics, DynamicsCausal)\n # --- Encode videos and actions ---\n videos_BTHWC = batch[""videos""]\n tokenizer_out = self.tokenizer.vq_encode(videos_BTHWC, training=False)\n token_idxs_BTN = tokenizer_out[""indices""]\n B, T, N = token_idxs_BTN.shape\n pad_shape = (B, seq_len - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs_BTN.dtype)\n token_idxs_BSN = jnp.concatenate([token_idxs_BTN, pad], axis=1)\n logits_BSNV = jnp.zeros((*token_idxs_BSN.shape, self.num_patch_latents))\n dynamics_state = nnx.state(self.dynamics)\n noise_level = jnp.array(noise_level)\n\n if self.use_gt_actions:\n assert self.action_embed is not None\n latent_actions_BT1L = self.action_embed(batch[""actions""]).reshape(\n *batch[""actions""].shape[:2], 1, self.latent_action_dim\n )\n latent_actions_BTm11L = latent_actions_BT1L[:, :-1]\n action_tokens_EL = latent_actions_BTm11L.reshape(-1, self.latent_action_dim)\n else:\n assert self.lam is not None\n latent_actions_E = batch[""latent_actions""]\n action_tokens_EL = self.lam.vq.get_codes(latent_actions_E)\n\n @nnx.scan(in_axes=(nnx.Carry, 0), out_axes=nnx.Carry)\n def causal_step_fn(\n carry: tuple[jax.Array, jax.Array, jax.Array, jax.Array, jax.Array],\n step_n: jax.Array,\n ) -> tuple[jax.Array, jax.Array, jax.Array, jax.Array, jax.Array]:\n rng, token_idxs_BSN, logits_BSNV, action_tokens_EL, step_t = carry\n S, N = token_idxs_BSN.shape[1:]\n L = action_tokens_EL.shape[-1]\n\n # We need to reconstruct the submodule inside scan body to prevent trace context mismatches\n dynamics_causal = DynamicsCausal(\n model_dim=self.dyna_dim,\n ffn_dim=self.dyna_ffn_dim,\n num_latents=self.num_patch_latents,\n latent_action_dim=self.latent_action_dim,\n num_blocks=self.dyna_num_blocks,\n num_heads=self.dyna_num_heads,\n dropout=self.dropout,\n max_noise_level=self.max_noise_level,\n noise_buckets=self.noise_buckets,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n use_flash_attention=self.use_flash_attention,\n decode=self.decode,\n rngs=nnx.Rngs(0),\n )\n nnx.update(dynamics_causal, dynamics_state)\n\n # --- Construct + encode video ---\n vid_embed_BSNM = dynamics_causal.patch_embed(token_idxs_BSN)\n\n # --- Predict transition ---\n action_tokens_BSm1L = jnp.reshape(action_tokens_EL, (B, S - 1, L))\n act_embed_BSm1M = dynamics_causal.action_up(action_tokens_BSm1L)\n act_embed_BSM = jnp.pad(act_embed_BSm1M, ((0, 0), (1, 0), (0, 0)))\n act_embed_BS1M = jnp.reshape(\n act_embed_BSM, (B, S, 1, act_embed_BSM.shape[-1])\n )\n\n rng, _rng_noise_augmentation = jax.random.split(rng)\n noise_level_B = jnp.tile(noise_level, B)\n _, noise_level_embed_BS1M = dynamics_causal.apply_noise_augmentation(\n vid_embed_BSNM, _rng_noise_augmentation, noise_level_B\n )\n\n vid_embed_BSNp2M = jnp.concatenate(\n [act_embed_BS1M, noise_level_embed_BS1M, vid_embed_BSNM], axis=2\n )\n final_logits_BTNp2V = (\n dynamics_causal.transformer(vid_embed_BSNp2M, (step_t, step_n))\n / temperature\n )\n final_logits_BV = final_logits_BTNp2V[:, step_t, step_n + 1, :]\n\n # --- Sample new tokens for final frame ---\n if sample_argmax:\n sampled_token_idxs_B = jnp.argmax(final_logits_BV, axis=-1)\n else:\n rng, _rng = jax.random.split(rng)\n sampled_token_idxs_B = jax.random.categorical(_rng, final_logits_BV)\n # Update next tokens only\n token_idxs_BSN = token_idxs_BSN.at[:, step_t, step_n].set(\n sampled_token_idxs_B\n )\n logits_BSNV = logits_BSNV.at[:, step_t, step_n].set(final_logits_BV)\n\n new_carry = (rng, token_idxs_BSN, logits_BSNV, action_tokens_EL, step_t)\n return new_carry\n\n @nnx.scan(in_axes=(nnx.Carry, 0), out_axes=nnx.Carry)\n def generation_step_fn(\n carry: tuple[jax.Array, jax.Array, jax.Array], step_t: jax.Array\n ) -> tuple[jax.Array, jax.Array, jax.Array]:\n rng, current_token_idxs_BSN, current_logits_BSNV = carry\n rng, step_rng = jax.random.split(rng)\n\n # --- Initialize and run causal loop ---\n init_carry_causal = (\n step_rng,\n current_token_idxs_BSN,\n current_logits_BSNV,\n action_tokens_EL,\n step_t,\n )\n final_carry_causal = causal_step_fn(init_carry_causal, jnp.arange(N))\n updated_token_idxs_BSN = final_carry_causal[1]\n updated_logits_BSNV = final_carry_causal[2]\n new_carry = (rng, updated_token_idxs_BSN, updated_logits_BSNV)\n return new_carry\n\n # --- Run the autoregressive generation using jax.lax.scan ---\n initial_carry = (batch[""rng""], token_idxs_BSN, logits_BSNV)\n timesteps_to_scan = jnp.arange(T, seq_len)\n final_carry = generation_step_fn(initial_carry, timesteps_to_scan)\n final_token_idxs_BSN = final_carry[1]\n final_logits_BSNV = final_carry[2]\n\n # --- Decode all tokens at once at the end ---\n H, W = batch[""videos""].shape[2:4]\n final_frames_BSHWC = self.tokenizer.decode(\n final_token_idxs_BSN,\n video_hw=(H, W),\n )\n return final_frames_BSHWC, final_logits_BSNV\n\n def vq_encode(self, batch: Dict[str, jax.Array], training: bool) -> jax.Array:\n # --- Preprocess videos ---\n assert self.lam is not None\n video_BTHWC = batch[""videos""]\n lam_output: Dict[str, jax.Array] = self.lam.vq_encode(\n video_BTHWC, training=training\n )\n lam_indices_E = lam_output[""indices""]\n return lam_indices_E\n\n\n# FIXME (f.srambical): add conversion script for old checkpoints\ndef restore_genie_components(\n optimizer: nnx.ModelAndOptimizer,\n sharding: jax.sharding.NamedSharding,\n rng: jax.Array,\n args,\n) -> nnx.ModelAndOptimizer:\n """"""Restore pre-trained Genie components""""""\n rng_tokenizer, rng_lam = jax.random.split(rng)\n rngs_tokenizer = nnx.Rngs(rng_tokenizer)\n rngs_lam = nnx.Rngs(rng_lam)\n\n tx = optimizer.tx\n model = optimizer.model\n handler_registry = ocp.handlers.DefaultCheckpointHandlerRegistry()\n handler_registry.add(\n ""model_state"", ocp.args.PyTreeRestore, ocp.handlers.PyTreeCheckpointHandler\n )\n\n checkpoint_options = ocp.CheckpointManagerOptions(\n step_format_fixed_length=6,\n )\n tokenizer_checkpoint_manager = ocp.CheckpointManager(\n directory=args.tokenizer_checkpoint,\n options=checkpoint_options,\n handler_registry=handler_registry,\n )\n dummy_tokenizer = TokenizerVQVAE(\n in_dim=args.image_channels,\n model_dim=args.tokenizer_dim,\n ffn_dim=args.tokenizer_ffn_dim,\n latent_dim=args.latent_patch_dim,\n num_latents=args.num_patch_latents,\n patch_size=args.patch_size,\n num_blocks=args.tokenizer_num_blocks,\n num_heads=args.tokenizer_num_heads,\n dropout=args.dropout,\n codebook_dropout=args.dropout,\n param_dtype=args.param_dtype,\n dtype=args.dtype,\n use_flash_attention=args.use_flash_attention,\n rngs=rngs_tokenizer,\n )\n dummy_tokenizer_optimizer = nnx.ModelAndOptimizer(dummy_tokenizer, tx)\n dummy_tokenizer_optimizer_state = nnx.state(dummy_tokenizer_optimizer)\n abstract_sharded_tokenizer_optimizer_state = _create_abstract_sharded_pytree(\n dummy_tokenizer_optimizer_state, sharding\n )\n restored_tokenizer = tokenizer_checkpoint_manager.restore(\n step=tokenizer_checkpoint_manager.latest_step(),\n args=ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore( # type: ignore\n abstract_sharded_tokenizer_optimizer_state # type: ignore\n ),\n ),\n )[""model_state""]\n nnx.update(dummy_tokenizer_optimizer.model, restored_tokenizer.model)\n model.tokenizer = dummy_tokenizer_optimizer.model\n tokenizer_checkpoint_manager.close()\n\n if args.lam_checkpoint:\n lam_checkpoint_manager = ocp.CheckpointManager(\n directory=args.lam_checkpoint,\n options=checkpoint_options,\n handler_registry=handler_registry,\n )\n dummy_lam = LatentActionModel(\n in_dim=args.image_channels,\n model_dim=args.lam_dim,\n ffn_dim=args.lam_ffn_dim,\n latent_dim=args.latent_patch_dim,\n num_latents=args.num_actions,\n patch_size=args.lam_patch_size,\n num_blocks=args.lam_num_blocks,\n num_heads=args.lam_num_heads,\n dropout=args.dropout,\n codebook_dropout=args.dropout,\n param_dtype=args.param_dtype,\n dtype=args.dtype,\n use_flash_attention=args.use_flash_attention,\n rngs=rngs_lam,\n )\n dummy_lam_optimizer = nnx.ModelAndOptimizer(dummy_lam, tx)\n dummy_lam_optimizer_state = nnx.state(dummy_lam_optimizer)\n abstract_sharded_lam_optimizer_state = _create_abstract_sharded_pytree(\n dummy_lam_optimizer_state, sharding\n )\n restored_lam_optimizer = lam_checkpoint_manager.restore(\n step=lam_checkpoint_manager.latest_step(),\n args=ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore( # type: ignore\n abstract_sharded_lam_optimizer_state # type: ignore\n ),\n ),\n )[""model_state""]\n nnx.update(dummy_lam_optimizer.model, restored_lam_optimizer.model)\n model.lam = dummy_lam_optimizer.model\n # Remove the LAM decoder to save memory and avoid unnecessary computation.\n del model.lam.decoder\n lam_checkpoint_manager.close()\n\n # Reinitialize the optimizer states\n optimizer = nnx.ModelAndOptimizer(model, tx)\n return optimizer\n\n\ndef _create_abstract_sharded_pytree(\n pytree_template: nnx.GraphState, sharding_spec: jax.sharding.NamedSharding\n) -> jax.Array:\n """"""Replaces arrays in a pytree with ShapeDtypeStructs having the given sharding.""""""\n\n def map_fn(leaf_template):\n if hasattr(leaf_template, ""shape"") and hasattr(leaf_template, ""dtype""):\n return jax.ShapeDtypeStruct(\n leaf_template.shape, leaf_template.dtype, sharding=sharding_spec\n )\n return leaf_template\n\n return jax.tree_util.tree_map(map_fn, pytree_template)\n",python,tab +717,2650375,"jasmine/train_dynamics.py",0,0,"",python,tab +718,2651121,"jasmine/train_dynamics.py",2849,0,"",python,selection_mouse +719,2651121,"jasmine/train_dynamics.py",2848,0,"",python,selection_command +720,2652101,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",0,0,"",shellscript,tab +721,2652592,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2540,0,"",shellscript,selection_mouse +722,2652598,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2539,0,"",shellscript,selection_command +723,2658458,"jasmine/train_dynamics.py",0,0,"",python,tab +724,2661521,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",0,0,"",shellscript,tab +725,2661990,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2215,0,"",shellscript,selection_mouse +726,2661991,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2214,0,"",shellscript,selection_command +727,2674043,"jasmine/models/dynamics.py",0,0,"from typing import Dict\n\nimport jax\nimport jax.numpy as jnp\nimport flax.nnx as nnx\n\nfrom utils.nn import STTransformer, Transformer\n\n\nclass DynamicsMaskGIT(nnx.Module):\n """"""\n MaskGIT dynamics model\n\n Dimension keys:\n B: batch size\n T: sequence length\n N: number of patches per frame\n L: latent dimension\n V: vocabulary size (number of latents)\n """"""\n\n def __init__(\n self,\n model_dim: int,\n ffn_dim: int,\n num_latents: int,\n latent_action_dim: int,\n num_blocks: int,\n num_heads: int,\n dropout: float,\n max_noise_level: float,\n noise_buckets: int,\n mask_limit: float,\n param_dtype: jnp.dtype,\n dtype: jnp.dtype,\n use_flash_attention: bool,\n rngs: nnx.Rngs,\n ):\n self.model_dim = model_dim\n self.ffn_dim = ffn_dim\n self.num_latents = num_latents\n self.latent_action_dim = latent_action_dim\n self.num_blocks = num_blocks\n self.num_heads = num_heads\n self.dropout = dropout\n self.max_noise_level = max_noise_level\n self.noise_buckets = noise_buckets\n self.mask_limit = mask_limit\n self.param_dtype = param_dtype\n self.dtype = dtype\n self.use_flash_attention = use_flash_attention\n\n self.transformer = STTransformer(\n self.model_dim,\n self.model_dim,\n self.ffn_dim,\n self.num_latents,\n self.num_blocks,\n self.num_heads,\n self.dropout,\n self.param_dtype,\n self.dtype,\n use_flash_attention=self.use_flash_attention,\n rngs=rngs,\n )\n self.patch_embed = nnx.Embed(self.num_latents, self.model_dim, rngs=rngs)\n self.mask_token = nnx.Param(\n nnx.initializers.lecun_uniform()(rngs.params(), (1, 1, 1, self.model_dim))\n )\n self.action_up = nnx.Linear(\n self.latent_action_dim,\n self.model_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n rngs=rngs,\n )\n self.noise_level_embed = nnx.Embed(\n self.noise_buckets, self.model_dim, rngs=rngs\n )\n\n def apply_noise_augmentation(self, vid_embed_BTNM, rng, noise_level_B=None):\n B, T, N, M = vid_embed_BTNM.shape\n rng, _rng_noise_lvl, _rng_noise = jax.random.split(rng, 3)\n if noise_level_B is None:\n noise_level_B = jax.random.uniform(\n _rng_noise_lvl,\n shape=(B,),\n minval=0.0,\n maxval=self.max_noise_level,\n dtype=self.dtype,\n )\n noise_BTNM = jax.random.normal(_rng_noise, shape=(B, T, N, M), dtype=self.dtype)\n noise_bucket_idx_B = jnp.floor(\n (noise_level_B * self.noise_buckets) / self.max_noise_level\n ).astype(jnp.int32)\n\n # Clip noise_bucket_idx_B to ensure it stays within valid range to prevent NaNs\n noise_bucket_idx_B = jnp.clip(noise_bucket_idx_B, 0, self.noise_buckets - 1)\n\n noise_bucket_idx_B11 = noise_bucket_idx_B.reshape(B, 1, 1)\n noise_level_embed_B11M = self.noise_level_embed(noise_bucket_idx_B11)\n noise_level_embed_BT1M = jnp.tile(noise_level_embed_B11M, (1, T, 1, 1))\n noise_level_B111 = noise_level_B.reshape(B, 1, 1, 1)\n\n noise_augmented_vid_embed_BTNM = (\n jnp.sqrt(1 - noise_level_B111) * vid_embed_BTNM\n + jnp.sqrt(noise_level_B111) * noise_BTNM\n )\n\n return noise_augmented_vid_embed_BTNM, noise_level_embed_BT1M\n\n def __call__(\n self,\n batch: Dict[str, jax.Array],\n ) -> tuple[jax.Array, jax.Array]:\n # --- Mask videos ---\n video_tokens_BTN = batch[""video_tokens""]\n latent_actions_BTm11L = batch[""latent_actions""]\n vid_embed_BTNM = self.patch_embed(video_tokens_BTN)\n\n B = vid_embed_BTNM.shape[0]\n rng, _rng_prob, *_rngs_mask = jax.random.split(batch[""rng""], B + 2)\n mask_prob = jax.random.uniform(_rng_prob, shape=(B,), minval=self.mask_limit)\n per_sample_shape = vid_embed_BTNM.shape[1:-1]\n mask = jax.vmap(\n lambda rng, prob: jax.random.bernoulli(rng, prob, per_sample_shape),\n in_axes=(0, 0),\n )(jnp.asarray(_rngs_mask), mask_prob)\n mask = mask.at[:, 0].set(False)\n vid_embed_BTNM = jnp.where(\n jnp.expand_dims(mask, -1), self.mask_token.value, vid_embed_BTNM\n )\n\n # --- Apply noise augmentation ---\n vid_embed_BTNM, noise_level_embed_BT1M = self.apply_noise_augmentation(\n vid_embed_BTNM, rng\n )\n\n # --- Predict transition ---\n act_embed_BTm11M = self.action_up(latent_actions_BTm11L)\n padded_act_embed_BT1M = jnp.pad(\n act_embed_BTm11M, ((0, 0), (1, 0), (0, 0), (0, 0))\n )\n vid_embed_BTNp2M = jnp.concatenate(\n [padded_act_embed_BT1M, noise_level_embed_BT1M, vid_embed_BTNM], axis=2\n )\n logits_BTNp2V = self.transformer(vid_embed_BTNp2M)\n logits_BTNV = logits_BTNp2V[:, :, 2:]\n return logits_BTNV, mask\n\n\nclass DynamicsCausal(nnx.Module):\n """"""Causal dynamics model""""""\n\n def __init__(\n self,\n model_dim: int,\n ffn_dim: int,\n num_latents: int,\n latent_action_dim: int,\n num_blocks: int,\n num_heads: int,\n dropout: float,\n max_noise_level: float,\n noise_buckets: int,\n param_dtype: jnp.dtype,\n dtype: jnp.dtype,\n use_flash_attention: bool,\n decode: bool,\n rngs: nnx.Rngs,\n ):\n self.model_dim = model_dim\n self.ffn_dim = ffn_dim\n self.num_latents = num_latents\n self.latent_action_dim = latent_action_dim\n self.num_blocks = num_blocks\n self.num_heads = num_heads\n self.dropout = dropout\n self.max_noise_level = max_noise_level\n self.noise_buckets = noise_buckets\n self.param_dtype = param_dtype\n self.dtype = dtype\n self.use_flash_attention = use_flash_attention\n self.decode = decode\n\n self.transformer = Transformer(\n self.model_dim,\n self.model_dim,\n self.ffn_dim,\n self.num_latents,\n self.num_blocks,\n self.num_heads,\n self.dropout,\n self.param_dtype,\n self.dtype,\n use_flash_attention=self.use_flash_attention,\n decode=self.decode,\n rngs=rngs,\n )\n self.patch_embed = nnx.Embed(self.num_latents, self.model_dim, rngs=rngs)\n self.action_up = nnx.Linear(\n self.latent_action_dim,\n self.model_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n rngs=rngs,\n )\n self.noise_level_embed = nnx.Embed(\n self.noise_buckets, self.model_dim, rngs=rngs\n )\n\n def apply_noise_augmentation(self, vid_embed_BTNM, rng):\n B, T, N, M = vid_embed_BTNM.shape\n rng, _rng_noise_lvl, _rng_noise = jax.random.split(rng, 3)\n noise_level_B = jax.random.uniform(\n _rng_noise_lvl,\n shape=(B,),\n minval=0.0,\n maxval=self.max_noise_level,\n dtype=self.dtype,\n )\n noise_BTNM = jax.random.normal(_rng_noise, shape=(B, T, N, M), dtype=self.dtype)\n noise_bucket_idx_B = jnp.floor(\n (noise_level_B * self.noise_buckets) / self.max_noise_level\n ).astype(jnp.int32)\n\n # Clip noise_bucket_idx_B to ensure it stays within valid range to prevent NaNs\n noise_bucket_idx_B = jnp.clip(noise_bucket_idx_B, 0, self.noise_buckets - 1)\n\n noise_bucket_idx_B11 = noise_bucket_idx_B.reshape(B, 1, 1)\n noise_level_embed_B11M = self.noise_level_embed(noise_bucket_idx_B11)\n noise_level_embed_BT1M = jnp.tile(noise_level_embed_B11M, (1, T, 1, 1))\n noise_level_B111 = noise_level_B.reshape(B, 1, 1, 1)\n\n noise_augmented_vid_embed_BTNM = (\n jnp.sqrt(1 - noise_level_B111) * vid_embed_BTNM\n + jnp.sqrt(noise_level_B111) * noise_BTNM\n )\n\n return noise_augmented_vid_embed_BTNM, noise_level_embed_BT1M\n\n def __call__(\n self,\n batch: Dict[str, jax.Array],\n ) -> tuple[jax.Array, jax.Array]:\n video_tokens_BTN = batch[""video_tokens""]\n latent_actions_BTm11L = batch[""latent_actions""]\n vid_embed_BTNM = self.patch_embed(video_tokens_BTN)\n act_embed_BTm11M = self.action_up(latent_actions_BTm11L)\n padded_act_embed_BT1M = jnp.pad(\n act_embed_BTm11M, ((0, 0), (1, 0), (0, 0), (0, 0))\n )\n vid_embed_BTNM, noise_level_embed_BT1M = self.apply_noise_augmentation(\n vid_embed_BTNM, batch[""rng""]\n )\n vid_embed_BTNp2M = jnp.concatenate(\n [padded_act_embed_BT1M, noise_level_embed_BT1M, vid_embed_BTNM], axis=2\n )\n logits_BTNp2V = self.transformer(vid_embed_BTNp2M)\n logits_BTNV = logits_BTNp2V[:, :, 1:-1]\n return logits_BTNV, jnp.ones_like(video_tokens_BTN)\n",python,tab +728,2683427,"jasmine/models/dynamics.py",3553,0,"",python,selection_mouse +729,2683958,"jasmine/models/dynamics.py",3552,0,"",python,selection_mouse +730,2683971,"jasmine/models/dynamics.py",3551,0,"",python,selection_command +731,2684668,"jasmine/models/dynamics.py",3543,9," )",python,selection_command +732,2684862,"jasmine/models/dynamics.py",3489,63," + jnp.sqrt(noise_level_B111) * noise_BTNM\n )",python,selection_command +733,2685010,"jasmine/models/dynamics.py",3429,123," jnp.sqrt(1 - noise_level_B111) * vid_embed_BTNM\n + jnp.sqrt(noise_level_B111) * noise_BTNM\n )",python,selection_command +734,2685138,"jasmine/models/dynamics.py",3386,166," noise_augmented_vid_embed_BTNM = (\n jnp.sqrt(1 - noise_level_B111) * vid_embed_BTNM\n + jnp.sqrt(noise_level_B111) * noise_BTNM\n )",python,selection_command +735,2685426,"jasmine/models/dynamics.py",3394,0,"",python,selection_command +736,2686074,"jasmine/models/dynamics.py",3551,0,"#",python,content +737,2686074,"jasmine/models/dynamics.py",3501,0,"#",python,content +738,2686075,"jasmine/models/dynamics.py",3441,0,"#",python,content +739,2686075,"jasmine/models/dynamics.py",3394,0,"#",python,content +740,2686077,"jasmine/models/dynamics.py",3395,0,"",python,selection_keyboard +741,2686118,"jasmine/models/dynamics.py",3555,0," ",python,content +742,2686118,"jasmine/models/dynamics.py",3504,0," ",python,content +743,2686118,"jasmine/models/dynamics.py",3443,0," ",python,content +744,2686118,"jasmine/models/dynamics.py",3395,0," ",python,content +745,2686119,"jasmine/models/dynamics.py",3396,0,"",python,selection_keyboard +746,2686387,"jasmine/models/dynamics.py",3395,0,"",python,selection_command +747,2686693,"jasmine/models/dynamics.py",3440,0,"",python,selection_command +748,2687025,"jasmine/models/dynamics.py",3502,0,"",python,selection_command +749,2687144,"jasmine/models/dynamics.py",3558,0,"",python,selection_command +750,2687779,"jasmine/models/dynamics.py",3549,11," # )",python,selection_command +751,2687964,"jasmine/models/dynamics.py",3493,67," # + jnp.sqrt(noise_level_B111) * noise_BTNM\n # )",python,selection_command +752,2688089,"jasmine/models/dynamics.py",3431,129," # jnp.sqrt(1 - noise_level_B111) * vid_embed_BTNM\n # + jnp.sqrt(noise_level_B111) * noise_BTNM\n # )",python,selection_command +753,2688231,"jasmine/models/dynamics.py",3386,174," # noise_augmented_vid_embed_BTNM = (\n # jnp.sqrt(1 - noise_level_B111) * vid_embed_BTNM\n # + jnp.sqrt(noise_level_B111) * noise_BTNM\n # )",python,selection_command +754,2688365,"jasmine/models/dynamics.py",3386,0,"",python,selection_command +755,2688476,"jasmine/models/dynamics.py",3431,0,"",python,selection_command +756,2688684,"jasmine/models/dynamics.py",3493,0,"",python,selection_command +757,2688778,"jasmine/models/dynamics.py",3549,0,"",python,selection_command +758,2689103,"jasmine/models/dynamics.py",3560,0,"\n # noise_augmented_vid_embed_BTNM = (\n # jnp.sqrt(1 - noise_level_B111) * vid_embed_BTNM\n # + jnp.sqrt(noise_level_B111) * noise_BTNM\n # )",python,content +759,2689104,"jasmine/models/dynamics.py",3569,0,"",python,selection_command +760,2690358,"jasmine/models/dynamics.py",3561,44," # noise_augmented_vid_embed_BTNM = (",python,selection_command +761,2690536,"jasmine/models/dynamics.py",3561,106," # noise_augmented_vid_embed_BTNM = (\n # jnp.sqrt(1 - noise_level_B111) * vid_embed_BTNM",python,selection_command +762,2690666,"jasmine/models/dynamics.py",3561,162," # noise_augmented_vid_embed_BTNM = (\n # jnp.sqrt(1 - noise_level_B111) * vid_embed_BTNM\n # + jnp.sqrt(noise_level_B111) * noise_BTNM",python,selection_command +763,2690813,"jasmine/models/dynamics.py",3561,174," # noise_augmented_vid_embed_BTNM = (\n # jnp.sqrt(1 - noise_level_B111) * vid_embed_BTNM\n # + jnp.sqrt(noise_level_B111) * noise_BTNM\n # )",python,selection_command +764,2691033,"jasmine/models/dynamics.py",3569,0,"",python,selection_command +765,2691417,"jasmine/models/dynamics.py",3732,1,"",python,content +766,2691417,"jasmine/models/dynamics.py",3680,1,"",python,content +767,2691418,"jasmine/models/dynamics.py",3618,1,"",python,content +768,2691418,"jasmine/models/dynamics.py",3569,1,"",python,content +769,2691581,"jasmine/models/dynamics.py",3729,1,"",python,content +770,2691581,"jasmine/models/dynamics.py",3678,1,"",python,content +771,2691581,"jasmine/models/dynamics.py",3617,1,"",python,content +772,2691581,"jasmine/models/dynamics.py",3569,1,"",python,content +773,2691856,"jasmine/models/dynamics.py",3568,0,"",python,selection_command +774,2692046,"jasmine/models/dynamics.py",3611,0,"",python,selection_command +775,2692270,"jasmine/models/dynamics.py",3612,0,"",python,selection_command +776,2692769,"jasmine/models/dynamics.py",3613,0,"",python,selection_command +777,2692797,"jasmine/models/dynamics.py",3614,0,"",python,selection_command +778,2692861,"jasmine/models/dynamics.py",3615,0,"",python,selection_command +779,2692861,"jasmine/models/dynamics.py",3616,0,"",python,selection_command +780,2692977,"jasmine/models/dynamics.py",3617,0,"",python,selection_command +781,2693336,"jasmine/models/dynamics.py",3616,0,"",python,selection_command +782,2693553,"jasmine/models/dynamics.py",3616,3,"",python,content +783,2694028,"jasmine/models/dynamics.py",3616,1,"",python,content +784,2694188,"jasmine/models/dynamics.py",3616,1,"",python,content +785,2694362,"jasmine/models/dynamics.py",3616,1,"",python,content +786,2694553,"jasmine/models/dynamics.py",3616,1,"",python,content +787,2694716,"jasmine/models/dynamics.py",3616,1,"",python,content +788,2695103,"jasmine/models/dynamics.py",3616,1,"",python,content +789,2697137,"jasmine/models/dynamics.py",3616,1,"",python,content +790,2697362,"jasmine/models/dynamics.py",3616,1,"",python,content +791,2697553,"jasmine/models/dynamics.py",3616,1,"",python,content +792,2698052,"jasmine/models/dynamics.py",3616,1,"",python,content +793,2698089,"jasmine/models/dynamics.py",3616,1,"",python,content +794,2698145,"jasmine/models/dynamics.py",3616,1,"",python,content +795,2698169,"jasmine/models/dynamics.py",3616,1,"",python,content +796,2698229,"jasmine/models/dynamics.py",3616,1,"",python,content +797,2698230,"jasmine/models/dynamics.py",3616,1,"",python,content +798,2698281,"jasmine/models/dynamics.py",3616,1,"",python,content +799,2698282,"jasmine/models/dynamics.py",3616,1,"",python,content +800,2698298,"jasmine/models/dynamics.py",3616,1,"",python,content +801,2698326,"jasmine/models/dynamics.py",3616,1,"",python,content +802,2698393,"jasmine/models/dynamics.py",3616,1,"",python,content +803,2698395,"jasmine/models/dynamics.py",3616,1,"",python,content +804,2698551,"jasmine/models/dynamics.py",3616,1,"",python,content +805,2698552,"jasmine/models/dynamics.py",3616,1,"",python,content +806,2698553,"jasmine/models/dynamics.py",3616,1,"",python,content +807,2698554,"jasmine/models/dynamics.py",3616,1,"",python,content +808,2698555,"jasmine/models/dynamics.py",3616,1,"",python,content +809,2698721,"jasmine/models/dynamics.py",3616,1,"",python,content +810,2698908,"jasmine/models/dynamics.py",3616,1,"",python,content +811,2699145,"jasmine/models/dynamics.py",3616,1,"",python,content +812,2699570,"jasmine/models/dynamics.py",3616,1,"",python,content +813,2700106,"jasmine/models/dynamics.py",3643,0,"",python,selection_command +814,2700561,"jasmine/models/dynamics.py",3643,1,"",python,content +815,2700709,"jasmine/models/dynamics.py",3643,1,"",python,content +816,2701207,"jasmine/models/dynamics.py",3643,1,"",python,content +817,2701268,"jasmine/models/dynamics.py",3643,1,"",python,content +818,2701269,"jasmine/models/dynamics.py",3643,1,"",python,content +819,2701311,"jasmine/models/dynamics.py",3643,1,"",python,content +820,2701323,"jasmine/models/dynamics.py",3643,1,"",python,content +821,2701362,"jasmine/models/dynamics.py",3643,1,"",python,content +822,2701415,"jasmine/models/dynamics.py",3643,1,"",python,content +823,2701589,"jasmine/models/dynamics.py",3643,1,"",python,content +824,2702110,"jasmine/models/dynamics.py",3644,0,"",python,selection_command +825,2702604,"jasmine/models/dynamics.py",3643,0,"",python,selection_command +826,2702779,"jasmine/models/dynamics.py",3643,1,"",python,content +827,2702880,"jasmine/models/dynamics.py",3644,0,"",python,selection_command +828,2703375,"jasmine/models/dynamics.py",3645,0,"",python,selection_command +829,2703430,"jasmine/models/dynamics.py",3646,0,"",python,selection_command +830,2703443,"jasmine/models/dynamics.py",3647,0,"",python,selection_command +831,2703502,"jasmine/models/dynamics.py",3648,0,"",python,selection_command +832,2703503,"jasmine/models/dynamics.py",3649,0,"",python,selection_command +833,2703570,"jasmine/models/dynamics.py",3650,0,"",python,selection_command +834,2703571,"jasmine/models/dynamics.py",3651,0,"",python,selection_command +835,2703599,"jasmine/models/dynamics.py",3652,0,"",python,selection_command +836,2703655,"jasmine/models/dynamics.py",3653,0,"",python,selection_command +837,2703656,"jasmine/models/dynamics.py",3654,0,"",python,selection_command +838,2703712,"jasmine/models/dynamics.py",3655,0,"",python,selection_command +839,2703713,"jasmine/models/dynamics.py",3656,0,"",python,selection_command +840,2703769,"jasmine/models/dynamics.py",3657,0,"",python,selection_command +841,2703769,"jasmine/models/dynamics.py",3658,0,"",python,selection_command +842,2703964,"jasmine/models/dynamics.py",3659,0,"",python,selection_command +843,2704269,"jasmine/models/dynamics.py",3659,1,"",python,content +844,2706723,"jasmine/models/dynamics.py",3639,4,"",python,content +845,2707021,"jasmine/models/dynamics.py",3635,4,"",python,content +846,2707215,"jasmine/models/dynamics.py",3631,4,"",python,content +847,2707563,"jasmine/models/dynamics.py",3630,1,"",python,content +848,2708109,"jasmine/models/dynamics.py",3630,0," ",python,content +849,2708110,"jasmine/models/dynamics.py",3631,0,"",python,selection_keyboard +850,2708557,"jasmine/models/dynamics.py",3631,0,"+",python,content +851,2708558,"jasmine/models/dynamics.py",3632,0,"",python,selection_keyboard +852,2708641,"jasmine/models/dynamics.py",3632,0," ",python,content +853,2708642,"jasmine/models/dynamics.py",3633,0,"",python,selection_keyboard +854,2709095,"jasmine/models/dynamics.py",3632,0,"",python,selection_command +855,2711186,"jasmine/models/dynamics.py",3671,0,"",python,selection_command +856,2711350,"jasmine/models/dynamics.py",3673,0,"",python,selection_command +857,2711720,"jasmine/models/dynamics.py",3671,0,"",python,selection_command +858,2712074,"jasmine/models/dynamics.py",3663,9," )",python,selection_command +859,2712267,"jasmine/models/dynamics.py",3604,68," vid_embed_BTNM + noise_level_B111 * noise_BTNM\n )",python,selection_command +860,2712399,"jasmine/models/dynamics.py",3561,111," noise_augmented_vid_embed_BTNM = (\n vid_embed_BTNM + noise_level_B111 * noise_BTNM\n )",python,selection_command +861,2712519,"jasmine/models/dynamics.py",3561,0,"",python,selection_command +862,2712682,"jasmine/models/dynamics.py",3604,0,"",python,selection_command +863,2713639,"jasmine/models/dynamics.py",3605,0,"",python,selection_command +864,2713768,"jasmine/models/dynamics.py",3562,0,"",python,selection_command +865,2713968,"jasmine/models/dynamics.py",3563,0,"",python,selection_command +866,2714470,"jasmine/models/dynamics.py",3564,0,"",python,selection_command +867,2714528,"jasmine/models/dynamics.py",3565,0,"",python,selection_command +868,2714529,"jasmine/models/dynamics.py",3566,0,"",python,selection_command +869,2714593,"jasmine/models/dynamics.py",3567,0,"",python,selection_command +870,2714600,"jasmine/models/dynamics.py",3568,0,"",python,selection_command +871,2714610,"jasmine/models/dynamics.py",3569,0,"",python,selection_command +872,2714665,"jasmine/models/dynamics.py",3570,0,"",python,selection_command +873,2714672,"jasmine/models/dynamics.py",3571,0,"",python,selection_command +874,2714708,"jasmine/models/dynamics.py",3572,0,"",python,selection_command +875,2714767,"jasmine/models/dynamics.py",3573,0,"",python,selection_command +876,2714768,"jasmine/models/dynamics.py",3574,0,"",python,selection_command +877,2714796,"jasmine/models/dynamics.py",3575,0,"",python,selection_command +878,2715373,"jasmine/models/dynamics.py",3689,0,"",python,selection_command +879,2715598,"jasmine/models/dynamics.py",8169,0,"",python,selection_command +880,2716246,"jasmine/models/dynamics.py",8344,0,"",python,selection_command +881,2717550,"jasmine/models/dynamics.py",8328,0,"",python,selection_command +882,2717727,"jasmine/models/dynamics.py",8326,0,"",python,selection_command +883,2717988,"jasmine/models/dynamics.py",8327,0,"\n noise_augmented_vid_embed_BTNM = (\n vid_embed_BTNM + noise_level_B111 * noise_BTNM\n )",python,content +884,2717990,"jasmine/models/dynamics.py",8336,0,"",python,selection_command +885,2718432,"jasmine/models/dynamics.py",8326,0,"",python,selection_command +886,2719050,"jasmine/models/dynamics.py",8318,9," )",python,selection_command +887,2719220,"jasmine/models/dynamics.py",8264,63," + jnp.sqrt(noise_level_B111) * noise_BTNM\n )",python,selection_command +888,2719355,"jasmine/models/dynamics.py",8204,123," jnp.sqrt(1 - noise_level_B111) * vid_embed_BTNM\n + jnp.sqrt(noise_level_B111) * noise_BTNM\n )",python,selection_command +889,2719506,"jasmine/models/dynamics.py",8161,166," noise_augmented_vid_embed_BTNM = (\n jnp.sqrt(1 - noise_level_B111) * vid_embed_BTNM\n + jnp.sqrt(noise_level_B111) * noise_BTNM\n )",python,selection_command +890,2719691,"jasmine/models/dynamics.py",8169,0,"",python,selection_command +891,2720341,"jasmine/models/dynamics.py",8326,0,"#",python,content +892,2720341,"jasmine/models/dynamics.py",8276,0,"#",python,content +893,2720342,"jasmine/models/dynamics.py",8216,0,"#",python,content +894,2720342,"jasmine/models/dynamics.py",8169,0,"#",python,content +895,2720343,"jasmine/models/dynamics.py",8170,0,"",python,selection_keyboard +896,2720357,"jasmine/models/dynamics.py",8330,0," ",python,content +897,2720357,"jasmine/models/dynamics.py",8279,0," ",python,content +898,2720357,"jasmine/models/dynamics.py",8218,0," ",python,content +899,2720357,"jasmine/models/dynamics.py",8170,0," ",python,content +900,2720358,"jasmine/models/dynamics.py",8171,0,"",python,selection_keyboard +901,2720618,"jasmine/models/dynamics.py",8170,0,"",python,selection_command +902,2721674,"jasmine/models/dynamics.py",8447,0,"",python,selection_mouse +903,2721691,"jasmine/models/dynamics.py",8446,0,"",python,selection_command +904,2722307,"jasmine/models/dynamics.py",8473,0,"",python,selection_mouse +905,2723621,"jasmine/models/dynamics.py",8335,0,"",python,selection_mouse +906,2723622,"jasmine/models/dynamics.py",8334,0,"",python,selection_command +907,2724473,"jasmine/models/dynamics.py",8335,0,"\n ",python,content +908,2724575,"jasmine/models/dynamics.py",8336,8,"",python,content +909,2725394,"jasmine/models/dynamics.py",8448,0,"",python,selection_mouse +910,2725405,"jasmine/models/dynamics.py",8447,0,"",python,selection_command +911,2726190,"jasmine/models/dynamics.py",8400,0,"",python,selection_mouse +912,2726940,"jasmine/models/dynamics.py",8448,0,"",python,selection_mouse +913,2726953,"jasmine/models/dynamics.py",8447,0,"",python,selection_command +914,2727712,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",0,0,"",shellscript,tab +915,2727712,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2520,0,"",shellscript,selection_mouse +916,2732208,"TERMINAL",0,0,"sync-runner-2",,terminal_command +917,2732268,"TERMINAL",0,0,"]633;Csending incremental file list\r\n",,terminal_output +918,2739746,"TERMINAL",0,0,"./\r\npyproject.toml\r\n",,terminal_output +919,2741748,"TERMINAL",0,0,"jasmine/\r\njasmine/genie.py\r\njasmine/sample.py\r\njasmine/train_dynamics.py\r\njasmine/models/\r\njasmine/models/dynamics.py\r\njasmine/utils/\r\nslurm/dev/alfred/berlin/\r\nslurm/dev/alfred/berlin/patch_size_lr/\r\nslurm/dev/alfred/berlin/patch_size_lr/spawner.sh\r\nslurm/dev/alfred/berlin/patch_size_lr/tokenizer_patch_16_base.sbatch\r\nslurm/dev/alfred/berlin/patch_size_lr/tokenizer_patch_16_batch_size_248.sbatch\r\nslurm/dev/alfred/berlin/patch_size_lr/tokenizer_patch_16_batch_size_248_lr_0.00005.sbatch\r\nslurm/dev/alfred/berlin/patch_size_lr/tokenizer_patch_16_batch_size_248_lr_0.00009.sbatch\r\nslurm/dev/alfred/berlin/patch_size_lr/tokenizer_patch_16_batch_size_248_lr_0.0005.sbatch\r\nslurm/dev/alfred/berlin/patch_size_lr/tokenizer_patch_16_batch_size_248_lr_0.001.sbatch\r\nslurm/dev/alfred/berlin/patch_size_lr/tokenizer_patch_16_batch_size_248_lr_0.01.sbatch\r\nslurm/dev/alfred/berlin/patch_size_lr/tokenizer_patch_16_batch_size_248_lr_0.01_full_prec.sbatch\r\nslurm/dev/alfred/berlin/patch_size_lr/tokenizer_patch_16_full_prec_lr_0.005_warumup_30k.sbatch\r\nslurm/dev/alfred/berlin/patch_size_lr/tokenizer_patch_16_full_prec_lr_0.005_warumup_30k_batch_248.sbatch\r\nslurm/dev/alfred/berlin/patch_size_lr/tokenizer_patch_16_full_prec_lr_0.01_batch_248.sbatch\r\nslurm/dev/alfred/berlin/patch_size_lr/tokenizer_patch_16_full_prec_lr_0.01_batch_248_warmup_30k.sbatch\r\nslurm/dev/alfred/berlin/patch_size_lr/tokenizer_patch_16_full_prec_lr_0.01_warumup_30k.sbatch\r\nslurm/dev/alfred/berlin/patch_size_lr/tokenizer_patch_16_lr_0.01.sbatch\r\nslurm/dev/alfred/berlin/test_exposure_bias/\r\nslurm/dev/alfred/berlin/test_exposure_bias/dynamics.sbatch\r\nslurm/dev/alfred/berlin/test_exposure_bias/dynamics_full_prec.sbatch\r\nslurm/dev/alfred/berlin/test_exposure_bias/dynamics_full_prec_batch_96.sbatch\r\nslurm/dev/alfred/berlin/test_exposure_bias/dynamics_full_prec_from_40k.sbatch\r\nslurm/dev/alfred/berlin/test_exposure_bias/dynamics_full_prec_from_80k_nan_invest_requeue_every2h.sbatch\r\nslurm/dev/alfred/berlin/test_exposure_bias/dynamics_full_prec_from_80k_nan_invest_requeue_every_10h.sbatch\r\nslurm/dev/alfred/berlin/test_exposure_bias/sample.sbatch\r\nslurm/dev/alfred/berlin/test_exposure_bias/sample_20k.sbatch\r\nslurm/dev/alfred/berlin/test_exposure_bias/sample_40k.sbatch\r\nslurm/dev/alfred/berlin/test_exposure_bias/sample_60k.sbatch\r\nslurm/dev/alfred/berlin/test_exposure_bias/sample_full_prec_from_40k.sbatch\r\nslurm/dev/alfred/berlin/test_exposure_bias/batch_scaling/\r\nslurm/dev/alfred/berlin/test_exposure_bias/batch_scaling/dynamics_batch_scaling.sbatch\r\nslurm/dev/alfred/berlin/test_exposure_bias/batch_scaling/dynamics_batch_scaling_batch_96.sbatch\r\nslurm/dev/alfred/berlin/test_exposure_bias/batch_scaling/spawner.sh\r\nslurm/dev/alfred/berlin/test_exposure_bias_climber/\r\nslurm/dev/alfred/berlin/test_exposure_bias_climber/dynamics.sbatch\r\nslurm/dev/alfred/berlin/test_exposure_bias_climber/dynamics_batch_96.sbatch\r\nslurm/dev/alfred/berlin/topology_restore_fix/\r\nslurm/dev/alfred/berlin/topology_restore_fix/train_tokenizer_overfit_1_gpu.sbatch\r\nslurm/dev/alfred/berlin/topology_restore_fix/train_tokenizer_overfit_1_to_1_gpu.sbatch\r\nslurm/dev/alfred/berlin/topology_restore_fix/train_tokenizer_overfit_1_to_2_gpu.sbatch\r\nslurm/dev/alfred/berlin/with_validation/\r\nslurm/dev/alfred/berlin/with_validation/dynamics_baseline_debug_new_jasmine_folder.sbatch\r\nslurm/dev/alfred/berlin/with_validation/dynamics_test_top_k.sbatch\r\nslurm/dev/alfred/berlin/with_validation/lam_baseline_debug.sbatch\r\nslurm/dev/alfred/berlin/with_validation/tokenizer_baseline_debug.sbatch\r\nslurm/jobs/alfred/berlin/\r\nslurm/jobs/alfred/berlin/coinrun/\r\nslurm/jobs/alfred/berlin/coinrun/1m_steps/\r\nslurm/jobs/alfred/berlin/coinrun/1m_steps/dynamics.sbatch\r\nslurm/jobs/alfred/berlin/coinrun/batch_size_scaling/\r\nslurm/jobs/alfred/berlin/coinrun/batch_size_scaling/dynamics_batch_size_scaling.sbatch\r\nslurm/jobs/alfred/berlin/coinrun/batch_size_scaling/dynamics_batch_size_scaling_248.sbatch\r\nslurm/jobs/alfred/berlin/coinrun/batch_size_scaling/spawner.sh\r\nslurm/jobs/alfred/berlin/coinrun/coinrun_data/\r\nslurm/jobs/alfred/berlin/coinrun/coinrun_data/generate_data_500k.sbatch\r\nslurm/jobs/alfred/berlin/coinrun/coinrun_data/generate_data_500mio.sbatch\r\nslurm/jobs/alfred/berlin/coinrun/coinrun_data/generate_data_500mio_starpilot.sbatch\r\nslurm/jobs/alfred/berlin/coinrun/lr_scaling/\r\nslurm/jobs/alfred/berlin/coinrun/lr_scaling/dynamics_lr_scaling.sbatch\r\nslurm/jobs/alfred/berlin/coinrun/lr_scaling/spawner.sh\r\nslurm/jobs/alfred/berlin/coinrun/overfitting_test/\r\nslurm/jobs/alfred/berlin/coinrun/overfitting_test/500k_dataset/\r\nslurm/jobs/alfred/berlin/coinrun/overfitting_test/500k_dataset/dynamics.sbatch\r\nslurm/jobs/alfred/berlin/coinrun/overfitting_test/500k_dataset/lam.sbatch\r\nslurm/jobs/alfred/berlin/coinrun/overfitting_test/500k_dataset/lam_wsd.sbatch\r\nslurm/jobs/alfred/berlin/coinrun/overfitting_test/500k_dataset/tokenizer.sbatch\r\nslurm/jobs/alfred/berlin/coinrun/overfitting_test/500m_dataset/\r\nslurm/jobs/alfred/berlin/coinrun/overfitting_test/500m_dataset/dynamics.sbatch\r\nslurm/jobs/alfred/berlin/coinrun/overfitting_test/500m_dataset/dynamics_debug.sbatch\r\nslurm/jobs/alfred/berlin/coinrun/overfitting_test/500m_dataset/lam.sbatch\r\nslurm/jobs/alfred/berlin/coinrun/overfitting_test/500m_dataset/tokenizer.sbatch\r\nslurm/jobs/alfred/berlin/coinrun/overfitting_test/500m_dataset/tokenizer_patch_16.sbatch\r\nslurm/jobs/alfred/berlin/coinrun/overfitting_test/500m_dataset/patch_size_lr/\r\nslurm/jobs/alfred/berlin/coinrun/overfitting_test/500m_dataset/patch_size_lr/spawner.sh\r\nslurm/jobs/alfred/berlin/coinrun/overfitting_test/500m_dataset/patch_size_lr/tokenizer_patch_16.sbatch\r\nslurm/jobs/alfred/berlin/coinrun/overfitting_test/500m_dataset/patch_size_lr/tokenizer_patch_16_base.sbatch\r\nslurm/jobs/alfred/berlin/coinrun/overfitting_test/500m_dataset/patch_size_lr/tokenizer_patch_16_debug.sbatch\r\nslurm/jobs/alfred/berlin/coinrun/overfitting_test/500m_dataset/patch_size_lr/tokenizer_patch_16_debug_ckpt_half.sbatch\r\nslurm/jobs/alfred/berlin/coinrun/overfitting_test/500m_dataset/patch_size_lr/tokenizer_patch_16_full_prec_lr_0.01.sbatch\r\nslurm/jobs/alfred/berlin/coinrun/overfitting_test/500m_dataset/patch_size_lr/tokenizer_patch_16_lr_0.01.sbatch\r\nslurm/jobs/alfred/berlin/coinrun/overfitting_test/500m_dataset_climber/\r\nslurm/jobs/alfred/berlin/coinrun/overfitting_test/500m_dataset_climber/dynamics.sbatch\r\nslurm/jobs/alfred/berlin/coinrun/overfitting_test/500m_dataset_climber/lam.sbatch\r\nslurm/jobs/alfred/berlin/coinrun/overfitting_test/500m_dataset_climber/tokenizer.sbatch\r\nslurm/jobs/alfred/berlin/coinrun/overfitting_test/500m_dataset_climber/tokenizer_patch_16.sbatch\r\nslurm/jobs/alfred/berlin/coinrun/w_val/\r\nslurm/jobs/alfred/berlin/coinrun/w_val/dynamics_baseline.sbatch\r\nslurm/jobs/alfred/berlin/coinrun/w_val/dynamics_baseline_batch_100.sbatch\r\nslurm/jobs/alfred/berlin/coinrun/w_val/dynamics_baseline_batch_100_full_frame.sbatch\r\nslurm/jobs/alfred/berlin/coinrun/w_val/dynamics_baseline_batch_200.sbatch\r\nslurm/jobs/alfred/berlin/coinrun/w_val/dynamics_baseline_debug.sbatch\r\nslurm/jobs/alfred/berlin/coinrun/w_val/dynamics_baseline_eval.sbatch\r\nslurm/jobs/alfred/berlin/coinrun/w_val/lam_baseline_ffn_2k_n_blocks_4.sbatch\r\nslurm/jobs/alfred/berlin/coinrun/w_val/sample_og.sbatch\r\nslurm/jobs/alfred/berlin/coinrun/w_val/sample_test.sbatch\r\nslurm/jobs/alfred/berlin/coinrun/w_val/sample_test_200k.sbatch\r\nslurm/jobs/alfred/berlin/coinrun/w_val/sample_train.sbatch\r\nslurm/jobs/alfred/berlin/coinrun/w_val/tokenizer_baseline_10k_wsd.sbatch\r\nslurm/jobs/alfred/berlin/coinrun/w_val/tokenizer_baseline_500mio_wsd.sbatch\r\nslurm/jobs/alfred/berlin/procgen/\r\nslurm/jobs/alfred/berlin/procgen/generate_data_500mio_climber.sbatch\r\nslurm/jobs/franz/berlin/\r\nslurm/jobs/franz/berlin/atari/\r\nslurm/jobs/franz/berlin/atari/atari_breakout_data_gen.sh\r\nslurm/jobs/franz/berlin/minatar/\r\nslurm/jobs/franz/berlin/minatar/minatar_breakout_data_gen.sh\r\nslurm/jobs/mihir/horeka/\r\nslurm/jobs/mihir/horeka/breakout/noise_schedule_runs/\r\nslurm/jobs/mihir/horeka/breakout/noise_schedule_runs/causal/\r\nslurm/jobs/mihir/horeka/breakout/noise_schedule_runs/causal/train_dyn_single_gpu._sh\r\nslurm/jobs/mihir/horeka/breakout/noise_schedule_runs/causal/train_dyn_single_gpu_50k.sh\r\nslurm/jobs/mihir/horeka/breakout/noise_schedule_runs/causal/train_dyn_single_gpu_50k_no_noise_aug.sh\r\nslurm/jobs/mihir/horeka/breakout/noise_schedule_runs/causal/train_dyn_single_gpu_gt_actions._sh\r\nslurm/jobs/mihir/horeka/breakout/noise_schedule_runs/causal/train_dyn_single_gpu_gt_actions_50k.sh\r\nslurm/jobs/mihir/horeka/breakout/noise_schedule_runs/causal/train_dyn_single_gpu_gt_actions_smaller_lr_50k._sh\r\nslurm/jobs/mihir/horeka/breakout/noise_schedule_runs/causal/train_dyn_single_gpu_no_noise_aug._sh\r\nslurm/jobs/mihir/horeka/breakout/noise_schedule_runs/causal/train_dyn_single_gpu_smaller_lr_50k._sh\r\nslurm/jobs/mihir/horeka/breakout/noise_schedule_runs/full-prec/train_dyn_single_gpu.sh\r\nslurm/jobs/mihir/horeka/breakout/noise_schedule_runs/mixed_prec/\r\nslurm/jobs/mihir/horeka/breakout/noise_schedule_runs/mixed_prec/train_dyn_single_gpu_50k.sh\r\nslurm/jobs/mihir/horeka/coinrun/default_runs/\r\nslurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default-alpha-abl.sh\r\nslurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh\r\nslurm/jobs/mihir/horeka/coinrun/default_runs/train_lam_default.sh\r\nslurm/jobs/mihir/horeka/coinrun/default_runs/train_tokenizer_default.sh\r\nslurm/jobs/mihir/horeka/minecraft/\r\nslurm/jobs/mihir/horeka/minecraft/default_runs/\r\nslurm/jobs/mihir/horeka/minecraft/default_runs/train_lam_8_nodes.sbatch\r\nslurm/jobs/mihir/horeka/minecraft/default_runs/train_tokenizer_8_nodes.sbatch\r\nslurm/jobs/mihir/horeka/preprocessing/\r\nslurm/jobs/mihir/horeka/preprocessing/doom_chunked.sh\r\n",,terminal_output +920,2742288,"TERMINAL",0,0,"\r\nsent 316,114 bytes received 2,377 bytes 30,332.48 bytes/sec\r\ntotal size is 28,216,901 speedup is 88.60\r\n]0;tum_cte0515@hkn1990:~/Projects/jasmine",,terminal_output +921,2744585,"TERMINAL",0,0,"runner-2",,terminal_command +922,2750067,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default-alpha-abl.sh",0,0,"",shellscript,tab +923,2762231,"TERMINAL",0,0,"sbatch slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default-alpha-abl.sh",,terminal_command +924,2762293,"TERMINAL",0,0,"]633;CSubmitted batch job 3532455\r\n]0;tum_cte0515@hkn1990:~/Projects/jasmine_jobs_2",,terminal_output +925,2779376,"TERMINAL",0,0,"dev",,terminal_command +926,2784204,"jasmine/models/dynamics.py",0,0,"",python,tab +927,2785751,"jasmine/models/dynamics.py",8439,9," )",python,selection_command +928,2785890,"jasmine/models/dynamics.py",8380,68," vid_embed_BTNM + noise_level_B111 * noise_BTNM\n )",python,selection_command +929,2786045,"jasmine/models/dynamics.py",8337,111," noise_augmented_vid_embed_BTNM = (\n vid_embed_BTNM + noise_level_B111 * noise_BTNM\n )",python,selection_command +930,2786239,"jasmine/models/dynamics.py",8345,0,"",python,selection_command +931,2786868,"jasmine/models/dynamics.py",8447,0,"#",python,content +932,2786868,"jasmine/models/dynamics.py",8392,0,"#",python,content +933,2786869,"jasmine/models/dynamics.py",8345,0,"#",python,content +934,2786870,"jasmine/models/dynamics.py",8346,0,"",python,selection_keyboard +935,2786938,"jasmine/models/dynamics.py",8450,0," ",python,content +936,2786938,"jasmine/models/dynamics.py",8394,0," ",python,content +937,2786939,"jasmine/models/dynamics.py",8346,0," ",python,content +938,2786939,"jasmine/models/dynamics.py",8347,0,"",python,selection_keyboard +939,2787234,"jasmine/models/dynamics.py",8346,0,"",python,selection_command +940,2787429,"jasmine/models/dynamics.py",8336,0,"",python,selection_command +941,2787561,"jasmine/models/dynamics.py",8333,0,"",python,selection_command +942,2788246,"jasmine/models/dynamics.py",8324,11," # )",python,selection_command +943,2788467,"jasmine/models/dynamics.py",8268,67," # + jnp.sqrt(noise_level_B111) * noise_BTNM\n # )",python,selection_command +944,2788579,"jasmine/models/dynamics.py",8206,129," # jnp.sqrt(1 - noise_level_B111) * vid_embed_BTNM\n # + jnp.sqrt(noise_level_B111) * noise_BTNM\n # )",python,selection_command +945,2788933,"jasmine/models/dynamics.py",8161,174," # noise_augmented_vid_embed_BTNM = (\n # jnp.sqrt(1 - noise_level_B111) * vid_embed_BTNM\n # + jnp.sqrt(noise_level_B111) * noise_BTNM\n # )",python,selection_command +946,2789214,"jasmine/models/dynamics.py",8169,0,"",python,selection_command +947,2790171,"jasmine/models/dynamics.py",8332,1,"",python,content +948,2790172,"jasmine/models/dynamics.py",8280,1,"",python,content +949,2790172,"jasmine/models/dynamics.py",8218,1,"",python,content +950,2790172,"jasmine/models/dynamics.py",8169,1,"",python,content +951,2790338,"jasmine/models/dynamics.py",8329,1,"",python,content +952,2790338,"jasmine/models/dynamics.py",8278,1,"",python,content +953,2790338,"jasmine/models/dynamics.py",8217,1,"",python,content +954,2790338,"jasmine/models/dynamics.py",8169,1,"",python,content +955,2790547,"jasmine/models/dynamics.py",8326,0," ",python,content +956,2790548,"jasmine/models/dynamics.py",8276,0," ",python,content +957,2790548,"jasmine/models/dynamics.py",8216,0," ",python,content +958,2790548,"jasmine/models/dynamics.py",8169,0," ",python,content +959,2790548,"jasmine/models/dynamics.py",8170,0,"",python,selection_keyboard +960,2791765,"jasmine/models/dynamics.py",8329,1,"",python,content +961,2791765,"jasmine/models/dynamics.py",8278,1,"",python,content +962,2791765,"jasmine/models/dynamics.py",8217,1,"",python,content +963,2791765,"jasmine/models/dynamics.py",8169,1,"",python,content +964,2791998,"jasmine/models/dynamics.py",8168,0,"",python,selection_command +965,2792478,"jasmine/models/dynamics.py",9406,0,"",python,selection_keyboard +966,2793376,"jasmine/models/dynamics.py",7372,0,"",python,selection_keyboard +967,2795262,"jasmine/models/dynamics.py",8181,0,"",python,selection_mouse +968,2796096,"jasmine/models/dynamics.py",8339,0,"",python,selection_command +969,2796401,"jasmine/models/dynamics.py",8463,0,"",python,selection_command +970,2796651,"jasmine/models/dynamics.py",3396,0,"",python,selection_command +971,2797235,"jasmine/models/dynamics.py",3569,0,"",python,selection_command +972,2798699,"jasmine/models/dynamics.py",3561,42," noise_augmented_vid_embed_BTNM = (",python,selection_command +973,2798910,"jasmine/models/dynamics.py",3561,101," noise_augmented_vid_embed_BTNM = (\n vid_embed_BTNM + noise_level_B111 * noise_BTNM",python,selection_command +974,2799239,"jasmine/models/dynamics.py",3561,111," noise_augmented_vid_embed_BTNM = (\n vid_embed_BTNM + noise_level_B111 * noise_BTNM\n )",python,selection_command +975,2799418,"jasmine/models/dynamics.py",3569,0,"",python,selection_command +976,2800158,"jasmine/models/dynamics.py",3671,0,"#",python,content +977,2800158,"jasmine/models/dynamics.py",3616,0,"#",python,content +978,2800158,"jasmine/models/dynamics.py",3569,0,"#",python,content +979,2800159,"jasmine/models/dynamics.py",3570,0,"",python,selection_keyboard +980,2800218,"jasmine/models/dynamics.py",3674,0," ",python,content +981,2800219,"jasmine/models/dynamics.py",3618,0," ",python,content +982,2800219,"jasmine/models/dynamics.py",3570,0," ",python,content +983,2800219,"jasmine/models/dynamics.py",3571,0,"",python,selection_keyboard +984,2800499,"jasmine/models/dynamics.py",3570,0,"",python,selection_command +985,2800634,"jasmine/models/dynamics.py",3558,0,"",python,selection_command +986,2801329,"jasmine/models/dynamics.py",3549,11," # )",python,selection_command +987,2801491,"jasmine/models/dynamics.py",3493,67," # + jnp.sqrt(noise_level_B111) * noise_BTNM\n # )",python,selection_command +988,2801628,"jasmine/models/dynamics.py",3431,129," # jnp.sqrt(1 - noise_level_B111) * vid_embed_BTNM\n # + jnp.sqrt(noise_level_B111) * noise_BTNM\n # )",python,selection_command +989,2801773,"jasmine/models/dynamics.py",3386,174," # noise_augmented_vid_embed_BTNM = (\n # jnp.sqrt(1 - noise_level_B111) * vid_embed_BTNM\n # + jnp.sqrt(noise_level_B111) * noise_BTNM\n # )",python,selection_command +990,2801988,"jasmine/models/dynamics.py",3394,0,"",python,selection_command +991,2802444,"jasmine/models/dynamics.py",3557,1,"",python,content +992,2802444,"jasmine/models/dynamics.py",3505,1,"",python,content +993,2802444,"jasmine/models/dynamics.py",3443,1,"",python,content +994,2802445,"jasmine/models/dynamics.py",3394,1,"",python,content +995,2802644,"jasmine/models/dynamics.py",3554,1,"",python,content +996,2802644,"jasmine/models/dynamics.py",3503,1,"",python,content +997,2802644,"jasmine/models/dynamics.py",3442,1,"",python,content +998,2802644,"jasmine/models/dynamics.py",3394,1,"",python,content +999,2802932,"jasmine/models/dynamics.py",3393,0,"",python,selection_command +1000,2804170,"jasmine/models/dynamics.py",3552,0,"",python,selection_mouse +1001,2804171,"jasmine/models/dynamics.py",3551,0,"",python,selection_command +1002,2804863,"jasmine/models/dynamics.py",3552,0,"\n ",python,content +1003,2804947,"jasmine/models/dynamics.py",3553,8,"",python,content +1004,2806075,"jasmine/models/dynamics.py",3584,0,"",python,selection_mouse +1005,2806645,"jasmine/models/dynamics.py",3671,0,"",python,selection_mouse +1006,2806647,"jasmine/models/dynamics.py",3670,0,"",python,selection_command +1007,2807199,"jasmine/models/dynamics.py",3633,0,"",python,selection_mouse +1008,2816276,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",0,0,"",shellscript,tab +1009,2817482,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default copy.sh",0,0,"",shellscript,tab +1010,2824142,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default-no-noise.sh",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=4\n#SBATCH --time=48:00:00\n#SBATCH --partition=accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:4\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/dynamics/maskgit/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/dynamics/maskgit/%x_%j.log\n#SBATCH --job-name=train_dynamics_maskgit\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/train\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/val\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_tokenizer_default/3528955\n\nenv | grep SLURM\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=110 \\n --image_height=64 \\n --image_width=64 \\n --patch_size=16 \\n --max_lr=3e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-sqrt-ablation-$slurm_job_id \\n --tags coinrun dynamics maskgit sqrt-ablation \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --eval_full_frame \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val &\n\nchild_pid=$!\n\nwait $child_pid",shellscript,tab +1011,2827348,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default-no-noise.sh",2134,0,"",shellscript,selection_mouse +1012,2827361,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default-no-noise.sh",2133,0,"",shellscript,selection_command +1013,2828141,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default-no-noise.sh",2134,0,"\n ",shellscript,content +1014,2828458,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default-no-noise.sh",2139,0,"-",shellscript,content +1015,2828459,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default-no-noise.sh",2140,0,"",shellscript,selection_keyboard +1016,2828568,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default-no-noise.sh",2140,0,"-",shellscript,content +1017,2828568,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default-no-noise.sh",2141,0,"",shellscript,selection_keyboard +1018,2828796,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default-no-noise.sh",2141,0,"m",shellscript,content +1019,2828797,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default-no-noise.sh",2142,0,"",shellscript,selection_keyboard +1020,2828902,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default-no-noise.sh",2142,0,"a",shellscript,content +1021,2828903,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default-no-noise.sh",2143,0,"",shellscript,selection_keyboard +1022,2829115,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default-no-noise.sh",2143,0,"x",shellscript,content +1023,2829116,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default-no-noise.sh",2144,0,"",shellscript,selection_keyboard +1024,2829309,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default-no-noise.sh",2144,0,"_",shellscript,content +1025,2829309,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default-no-noise.sh",2145,0,"",shellscript,selection_keyboard +1026,2831851,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default-no-noise.sh",2145,0,"n",shellscript,content +1027,2831852,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default-no-noise.sh",2146,0,"",shellscript,selection_keyboard +1028,2832119,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default-no-noise.sh",2146,0,"o",shellscript,content +1029,2832119,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default-no-noise.sh",2147,0,"",shellscript,selection_keyboard +1030,2832120,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default-no-noise.sh",2147,0,"i",shellscript,content +1031,2832120,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default-no-noise.sh",2148,0,"",shellscript,selection_keyboard +1032,2832344,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default-no-noise.sh",2148,0,"s",shellscript,content +1033,2832344,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default-no-noise.sh",2149,0,"",shellscript,selection_keyboard +1034,2832481,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default-no-noise.sh",2149,0,"e",shellscript,content +1035,2832482,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default-no-noise.sh",2150,0,"",shellscript,selection_keyboard +1036,2833203,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default-no-noise.sh",2150,0,"_",shellscript,content +1037,2833204,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default-no-noise.sh",2151,0,"",shellscript,selection_keyboard +1038,2834278,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default-no-noise.sh",2151,0,"l",shellscript,content +1039,2834278,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default-no-noise.sh",2152,0,"",shellscript,selection_keyboard +1040,2834379,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default-no-noise.sh",2152,0,"e",shellscript,content +1041,2834379,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default-no-noise.sh",2153,0,"",shellscript,selection_keyboard +1042,2834545,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default-no-noise.sh",2153,0,"v",shellscript,content +1043,2834546,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default-no-noise.sh",2154,0,"",shellscript,selection_keyboard +1044,2834640,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default-no-noise.sh",2154,0,"e",shellscript,content +1045,2834640,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default-no-noise.sh",2155,0,"",shellscript,selection_keyboard +1046,2834723,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default-no-noise.sh",2155,0,"l",shellscript,content +1047,2834724,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default-no-noise.sh",2156,0,"",shellscript,selection_keyboard +1048,2835702,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default-no-noise.sh",2156,0,"=",shellscript,content +1049,2835703,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default-no-noise.sh",2157,0,"",shellscript,selection_keyboard +1050,2836494,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default-no-noise.sh",2157,0,"0",shellscript,content +1051,2836495,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default-no-noise.sh",2158,0,"",shellscript,selection_keyboard +1052,2837244,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default-no-noise.sh",2158,0," ",shellscript,content +1053,2837244,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default-no-noise.sh",2159,0,"",shellscript,selection_keyboard +1054,2837551,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default-no-noise.sh",2159,0,"\",shellscript,content +1055,2837552,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default-no-noise.sh",2160,0,"",shellscript,selection_keyboard +1056,2837734,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default-no-noise.sh",2159,0,"",shellscript,selection_command +1057,2838643,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default-no-noise.sh",2154,0,"",shellscript,selection_mouse +1058,2838781,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default-no-noise.sh",2141,15,"max_noise_level",shellscript,selection_mouse +1059,2839851,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",0,0,"",shellscript,tab +1060,2839851,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2381,0,"",shellscript,selection_mouse +1061,2839859,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2380,0,"",shellscript,selection_command +1062,2840211,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2540,0,"",shellscript,selection_mouse +1063,2840216,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2539,0,"",shellscript,selection_command +1064,2840234,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2539,1,"&",shellscript,selection_mouse +1065,2840244,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2540,0,"",shellscript,selection_command +1066,2840594,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2555,0,"",shellscript,selection_mouse +1067,2841091,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2541,0,"",shellscript,selection_mouse +1068,2842184,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default-no-noise.sh",0,0,"",shellscript,tab +1069,2842185,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default-no-noise.sh",2327,0,"",shellscript,selection_mouse +1070,2843021,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default-no-noise.sh",2352,0,"",shellscript,selection_mouse +1071,2847238,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default-no-noise.sh",2279,0,"",shellscript,selection_mouse +1072,2848142,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default-no-noise.sh",2278,0,"",shellscript,selection_command +1073,2848431,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default-no-noise.sh",2278,1,"s",shellscript,selection_command +1074,2848666,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default-no-noise.sh",2278,1,"s",shellscript,selection_command +1075,2849680,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default-no-noise.sh",2278,0,"",shellscript,selection_command +1076,2850577,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default-no-noise.sh",2344,1,"",shellscript,content +1077,2850578,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default-no-noise.sh",2278,1,"",shellscript,content +1078,2851063,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default-no-noise.sh",2343,1,"",shellscript,content +1079,2851063,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default-no-noise.sh",2278,1,"",shellscript,content +1080,2851118,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default-no-noise.sh",2342,1,"",shellscript,content +1081,2851118,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default-no-noise.sh",2278,1,"",shellscript,content +1082,2851118,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default-no-noise.sh",2341,1,"",shellscript,content +1083,2851119,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default-no-noise.sh",2278,1,"",shellscript,content +1084,2851174,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default-no-noise.sh",2340,1,"",shellscript,content +1085,2851175,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default-no-noise.sh",2278,1,"",shellscript,content +1086,2851175,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default-no-noise.sh",2339,1,"",shellscript,content +1087,2851175,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default-no-noise.sh",2278,1,"",shellscript,content +1088,2851292,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default-no-noise.sh",2338,1,"",shellscript,content +1089,2851292,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default-no-noise.sh",2278,1,"",shellscript,content +1090,2851348,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default-no-noise.sh",2337,1,"",shellscript,content +1091,2851350,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default-no-noise.sh",2278,1,"",shellscript,content +1092,2851487,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default-no-noise.sh",2336,1,"",shellscript,content +1093,2851487,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default-no-noise.sh",2278,1,"",shellscript,content +1094,2851641,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default-no-noise.sh",2335,1,"",shellscript,content +1095,2851642,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default-no-noise.sh",2278,1,"",shellscript,content +1096,2851781,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default-no-noise.sh",2334,1,"",shellscript,content +1097,2851782,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default-no-noise.sh",2278,1,"",shellscript,content +1098,2851918,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default-no-noise.sh",2333,1,"",shellscript,content +1099,2851919,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default-no-noise.sh",2278,1,"",shellscript,content +1100,2852294,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default-no-noise.sh",2332,1,"",shellscript,content +1101,2852294,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default-no-noise.sh",2278,1,"",shellscript,content +1102,2853049,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default-no-noise.sh",2331,0,"n",shellscript,content +1103,2853050,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default-no-noise.sh",2278,0,"n",shellscript,content +1104,2853050,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default-no-noise.sh",2279,0,"",shellscript,selection_keyboard +1105,2853314,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default-no-noise.sh",2333,0,"o",shellscript,content +1106,2853315,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default-no-noise.sh",2279,0,"o",shellscript,content +1107,2853315,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default-no-noise.sh",2280,0,"",shellscript,selection_keyboard +1108,2853885,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default-no-noise.sh",2335,0,"-",shellscript,content +1109,2853886,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default-no-noise.sh",2280,0,"-",shellscript,content +1110,2853886,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default-no-noise.sh",2281,0,"",shellscript,selection_keyboard +1111,2854215,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default-no-noise.sh",2337,0,"n",shellscript,content +1112,2854215,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default-no-noise.sh",2281,0,"n",shellscript,content +1113,2854215,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default-no-noise.sh",2282,0,"",shellscript,selection_keyboard +1114,2854417,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default-no-noise.sh",2339,0,"o",shellscript,content +1115,2854417,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default-no-noise.sh",2282,0,"o",shellscript,content +1116,2854418,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default-no-noise.sh",2283,0,"",shellscript,selection_keyboard +1117,2854480,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default-no-noise.sh",2341,0,"i",shellscript,content +1118,2854480,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default-no-noise.sh",2283,0,"i",shellscript,content +1119,2854481,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default-no-noise.sh",2284,0,"",shellscript,selection_keyboard +1120,2854538,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default-no-noise.sh",2343,0,"s",shellscript,content +1121,2854538,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default-no-noise.sh",2284,0,"s",shellscript,content +1122,2854539,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default-no-noise.sh",2285,0,"",shellscript,selection_keyboard +1123,2854705,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default-no-noise.sh",2345,0,"e",shellscript,content +1124,2854705,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default-no-noise.sh",2285,0,"e",shellscript,content +1125,2854706,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default-no-noise.sh",2286,0,"",shellscript,selection_keyboard +1126,2855008,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default-no-noise.sh",2285,0,"",shellscript,selection_command +1127,2855950,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",0,0,"",shellscript,tab +1128,2855951,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2430,0,"",shellscript,selection_mouse +1129,2855952,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",2429,0,"",shellscript,selection_command +1130,2871412,"TERMINAL",0,0,"sync-runner",,terminal_command +1131,2871462,"TERMINAL",0,0,"]633;Csending incremental file list\r\n",,terminal_output +1132,2872765,"TERMINAL",0,0,"jasmine/models/dynamics.py\r\n",,terminal_output +1133,2872878,"TERMINAL",0,0,"slurm/jobs/mihir/horeka/coinrun/default_runs/\r\nslurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default-alpha-abl.sh\r\nslurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default-no-noise.sh\r\nslurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh\r\n",,terminal_output +1134,2872953,"TERMINAL",0,0,"\r\nsent 54,586 bytes received 324 bytes 21,964.00 bytes/sec\r\ntotal size is 28,219,485 speedup is 513.92\r\n]0;tum_cte0515@hkn1990:~/Projects/jasmine",,terminal_output +1135,2884501,"TERMINAL",0,0,"sbatch slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default-no-noise.sh^C",,terminal_command +1136,2885668,"TERMINAL",0,0,"runner",,terminal_command +1137,2895581,"TERMINAL",0,0,"sbatch slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default-no-noise.sh",,terminal_command +1138,2901004,"TERMINAL",0,0,"queue",,terminal_command +1139,2901055,"TERMINAL",0,0,"]633;C",,terminal_output +1140,2901132,"TERMINAL",0,0,"[?1049h(B[?7hEvery 1.0s: squeue --mehkn1990.localdomain: Tue Sep 30 11:46:35 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3532464 accelerat train_dy tum_cte0 PD\t0:00\t 1 (None)3532383 accelerat train_to tum_cte0 R\t4:48\t 1 hkn08073529698 accelerat train_to tum_cte0 R 15:10:42\t 1 hkn0429",,terminal_output +1141,2902260,"TERMINAL",0,0,"693",,terminal_output +1142,2903189,"TERMINAL",0,0,"7504",,terminal_output +1143,2904318,"TERMINAL",0,0,"815",,terminal_output +1144,2905273,"TERMINAL",0,0,"926",,terminal_output +1145,2906300,"TERMINAL",0,0,"4037",,terminal_output +1146,2907336,"TERMINAL",0,0,"148",,terminal_output +1147,2907957,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn1990:~/Projects/jasmine_jobs",,terminal_output +1148,2909505,"TERMINAL",0,0,"logs",,terminal_command +1149,2912073,"TERMINAL",0,0,"fsacct_week",,terminal_command +1150,2912115,"TERMINAL",0,0,"]633;C",,terminal_output +1151,2912153,"TERMINAL",0,0," JobID JobName Partition All State Elapsed Timelimit \r\n--------------- ------------------------------ ---------------- --- ------------ ---------- ---------- \r\n 3510986 preprocess_breakout_chunked large 16 COMPLETED 08:37:35 10:00:00 \r\n 3511158 train_dyn_default_breakout_lo+ accelerated 6 COMPLETED 01:35:37 2-00:00:00 \r\n 3511159 train_dyn_default_breakout_lo+ accelerated 6 COMPLETED 01:21:46 2-00:00:00 \r\n 3512582 preprocess_breakout_chunked large 16 COMPLETED 00:13:05 15:00:00 \r\n 3512651 preprocess_breakout_chunked large 16 COMPLETED 02:39:18 15:00:00 \r\n 3519480 train_dyn_default_breakout_lo+ accelerated 6 COMPLETED 02:33:34 06:00:00 \r\n 3519481 train_dyn_default_gt_actions_+ accelerated 6 COMPLETED 00:15:05 06:00:00 \r\n 3519482 train_dyn_default_gt_actions_+ accelerated 6 COMPLETED 00:15:19 06:00:00 \r\n 3519483 train_dyn_default_breakout_lo+ accelerated 6 COMPLETED 02:31:59 06:00:00 \r\n 3519529 train_dyn_default_gt_actions_+ accelerated 6 COMPLETED 02:00:39 06:00:00 \r\n 3519530 train_dyn_default_gt_actions_+ accelerated 6 COMPLETED 02:02:38 06:00:00 \r\n 3519697 train_dyn_default_breakout_lo+ accelerated 6 COMPLETED 02:43:11 06:00:00 \r\n 3519698 train_dyn_default_gt_actions_+ accelerated 6 COMPLETED 02:16:16 06:00:00 \r\n 3519699 train_dyn_default_breakout_lo+ accelerated 6 FAILED 00:00:49 2-00:00:00 \r\n 3519700 train_dyn_default_gt_actions_+ accelerated 6 COMPLETED 02:14:55 06:00:00 \r\n 3519701 train_dyn_default_breakout_lo+ accelerated 6 COMPLETED 00:23:08 2-00:00:00 \r\n 3519702 train_dyn_default_breakout_lo+ accelerated 6 COMPLETED 02:44:14 06:00:00 \r\n 3520773 train_dyn_default_breakout_lo+ accelerated 6 COMPLETED 02:45:03 06:00:00 \r\n 3520774 train_dyn_default_gt_actions_+ accelerated 6 COMPLETED 02:12:37 06:00:00 \r\n 3520775 train_dyn_default_breakout_lo+ accelerated 6 FAILED 00:00:41 2-00:00:00 \r\n 3520776 train_dyn_default_gt_actions_+ accelerated 6 COMPLETED 02:10:49 06:00:00 \r\n 3520777 train_dyn_default_breakout_lo+ accelerated 6 COMPLETED 00:23:19 2-00:00:00 \r\n 3520778 train_dyn_default_breakout_lo+ accelerated 6 COMPLETED 02:44:00 06:00:00 \r\n 3520794 train_dyn_default_breakout_lo+ accelerated 6 COMPLETED 00:16:03 2-00:00:00 \r\n 3527001 train_dyn_default_breakout_lo+ accelerated 6 COMPLETED 03:13:01 06:00:00 \r\n 3527002 train_dyn_default_gt_actions_+ accelerated 6 COMPLETED 02:42:18 06:00:00 \r\n 3527003 train_dyn_default_breakout_lo+ accelerated 6 COMPLETED 00:19:39 2-00:00:00 \r\n 3527004 train_dyn_default_gt_actions_+ accelerated 6 COMPLETED 02:40:40 06:00:00 \r\n 3527005 train_dyn_default_breakout_lo+ accelerated 6 COMPLETED 00:28:30 2-00:00:00 \r\n 3527006 train_dyn_default_breakout_lo+ accelerated 6 COMPLETED 03:16:07 06:00:00 \r\n 3527025 train_dyn_default_breakout_lo+ accelerated 6 COMPLETED 02:45:34 06:00:00 \r\n 3527026 train_dyn_default_gt_actions_+ accelerated 6 COMPLETED 02:18:17 06:00:00 \r\n 3527027 train_dyn_default_breakout_lo+ accelerated 6 COMPLETED 00:16:10 2-00:00:00 \r\n 3527028 train_dyn_default_gt_actions_+ accelerated 6 COMPLETED 02:17:17 06:00:00 \r\n 3527029 train_dyn_default_breakout_lo+ accelerated 6 COMPLETED 00:23:26 2-00:00:00 \r\n 3527030 train_dyn_default_breakout_lo+ accelerated 6 COMPLETED 02:47:48 06:00:00 \r\n 3527671 train_dyn_default_breakout_lo+ accelerated 6 COMPLETED 02:44:13 06:00:00 \r\n 3527672 train_dyn_default_gt_actions_+ accelerated 6 COMPLETED 02:13:30 06:00:00 \r\n 3527673 train_dyn_default_breakout_lo+ accelerated 6 COMPLETED 00:15:51 06:00:00 \r\n 3527674 train_dyn_default_gt_actions_+ accelerated 6 COMPLETED 02:09:56 06:00:00 \r\n 3527675 train_dyn_default_breakout_lo+ accelerated 6 COMPLETED 00:23:08 06:00:00 \r\n 3527676 train_dyn_default_breakout_lo+ accelerated 6 COMPLETED 02:42:07 06:00:00 \r\n 3527891 train_dyn_default_breakout_lo+ accelerated 6 FAILED 00:43:00 2-00:00:00 \r\n 3527904 train_dyn_default_breakout_lo+ accelerated 6 FAILED 00:44:10 2-00:00:00 \r\n 3527905 train_dyn_default_breakout_lo+ accelerated 6 COMPLETED 02:42:41 06:00:00 \r\n 3527920 train_dyn_default_breakout_lo+ accelerated 6 COMPLETED 02:44:06 06:00:00 \r\n 3527927 train_dyn_default_breakout_lo+ accelerated 6 COMPLETED 04:18:29 06:00:00 \r\n 3528099 train_dyn_default_breakout_lo+ accelerated 6 COMPLETED 02:46:19 06:00:00 \r\n 3528100 train_dyn_default_gt_actions_+ accelerated 6 COMPLETED 02:07:50 06:00:00 \r\n 3528101 train_dyn_default_breakout_lo+ accelerated 6 COMPLETED 00:15:39 06:00:00 \r\n 3528102 train_dyn_default_gt_actions_+ accelerated 6 COMPLETED 02:06:52 06:00:00 \r\n 3528104 train_dyn_default_breakout_lo+ accelerated 6 COMPLETED 00:20:44 2-00:00:00 \r\n 3528105 train_dyn_default_breakout_lo+ accelerated 6 COMPLETED 02:44:26 06:00:00 \r\n 3528127 train_dyn_default_breakout_lo+ accelerated 6 FAILED 00:01:01 2-00:00:00 \r\n 3528128 train_dyn_default_breakout_lo+ accelerated 6 FAILED 00:01:05 2-00:00:00 \r\n 3528129 train_dyn_default_gt_actions_+ accelerated 6 FAILED 00:01:18 2-00:00:00 \r\n 3528130 train_dyn_default_breakout_lo+ accelerated 6 FAILED 00:01:16 2-00:00:00 \r\n 3528131 train_dyn_default_gt_actions_+ accelerated 6 FAILED 00:01:25 2-00:00:00 \r\n 3528132 train_dyn_default_breakout_lo+ accelerated 6 FAILED 00:01:15 2-00:00:00 \r\n 3528133 train_dyn_default_breakout_lo+ accelerated 6 FAILED 00:01:15 2-00:00:00 \r\n 3528134 train_dyn_default_breakout_lo+ accelerated 6 FAILED 00:01:08 2-00:00:00 \r\n 3528250 train_dyn_default_breakout_lo+ accelerated 6 FAILED 00:00:40 2-00:00:00 \r\n 3528251 train_dyn_default_breakout_lo+ accelerated 6 FAILED 00:00:40 2-00:00:00 \r\n 3528252 train_dyn_default_gt_actions_+ accelerated 6 FAILED 00:00:40 2-00:00:00 \r\n 3528253 train_dyn_default_breakout_lo+ accelerated 6 COMPLETED 00:14:45 2-00:00:00 \r\n 3528254 train_dyn_default_gt_actions_+ accelerated 6 COMPLETED 01:54:41 2-00:00:00 \r\n 3528255 train_dyn_default_breakout_lo+ accelerated 6 FAILED 00:06:02 2-00:00:00 \r\n 3528256 train_dyn_default_breakout_lo+ accelerated 6 COMPLETED 00:19:54 2-00:00:00 \r\n 3528257 train_dyn_default_breakout_lo+ accelerated 6 COMPLETED 02:33:41 2-00:00:00 \r\n 3528755 train_dyn_default_breakout_lo+ accelerated 6 COMPLETED 00:19:46 2-00:00:00 \r\n 3528764 train_dyn_default_gt_actions_+ accelerated 6 COMPLETED 02:05:28 2-00:00:00 \r\n 3528765 train_dyn_default_breakout_lo+ accelerated 6 COMPLETED 02:36:14 2-00:00:00 \r\n 3528766 train_dyn_default_breakout_lo+ accelerated 6 COMPLETED 02:35:05 2-00:00:00 \r\n 3528908 train_tokenizer_default accelerated 24 FAILED 00:02:33 2-00:00:00 \r\n 3528910 train_lam_default accelerated 24 FAILED 00:02:33 2-00:00:00 \r\n 3528936 train_tokenizer_default accelerated 24 FAILED 00:02:33 2-00:00:00 \r\n 3528955 train_tokenizer_default accelerated 6 COMPLETED 14:42:01 2-00:00:00 \r\n 3529697 train_lam_default accelerated 6 COMPLETED 08:38:21 2-00:00:00 \r\n 3529698 train_tokenizer_default accelerated 6 RUNNING 15:10:53 2-00:00:00 \r\n 3532383 train_tokenizer_default accelerated 6 RUNNING 00:04:59 2-00:00:00 \r\n 3532384 train_dynamics_maskgit accelerated 24 FAILED 00:00:45 2-00:00:00 \r\n 3532455 train_dynamics_maskgit accelerated 24 FAILED 00:00:56 2-00:00:00 \r\n 3532464 train_dynamics_maskgit accelerated 0 PENDING 00:00:00 2-00:00:00 \r\n]0;tum_cte0515@hkn1990:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir",,terminal_output +1152,2925659,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",580,0,"",shellscript,selection_command +1153,2930579,"TERMINAL",0,0,"cd coinrun/dynamics/",,terminal_command +1154,2930803,"TERMINAL",0,0,"ls",,terminal_command +1155,2930813,"TERMINAL",0,0,"]633;Ccausal maskgit\r\n]0;tum_cte0515@hkn1990:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/dynamics",,terminal_output +1156,2932520,"TERMINAL",0,0,"cd maskgit/",,terminal_command +1157,2932791,"TERMINAL",0,0,"ls",,terminal_command +1158,2932842,"TERMINAL",0,0,"]633;C",,terminal_output +1159,2932960,"TERMINAL",0,0,"train_dynamics_maskgit_1_node_3415111.log train_dynamics_maskgit_1_node_3415713.log train_dynamics_maskgit_3532378.log train_dynamics_maskgit_3532455.log\r\ntrain_dynamics_maskgit_1_node_3415691.log train_dynamics_maskgit_1_node_3422008.log train_dynamics_maskgit_3532384.log\r\n]0;tum_cte0515@hkn1990:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/dynamics/maskgit",,terminal_output +1160,2935524,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/dynamics/maskgit/train_dynamics_maskgit_3532455.log",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=4\n#SBATCH --time=48:00:00\n#SBATCH --partition=accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:4\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/dynamics/maskgit/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/dynamics/maskgit/%x_%j.log\n#SBATCH --job-name=train_dynamics_maskgit\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/train\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/val\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_tokenizer_default/3528955\n\nenv | grep SLURM\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=110 \\n --image_height=64 \\n --image_width=64 \\n --patch_size=16 \\n --max_lr=3e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-default-$slurm_job_id \\n --tags coinrun dynamics maskgit default \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --eval_full_frame \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val &\n\nchild_pid=$!\n\nwait $child_pid/var/spool/slurmd/job3532455/slurm_script: line 42: .venv/bin/activate: No such file or directory\nSLURM_JOB_USER=tum_cte0515\nSLURM_TASKS_PER_NODE=4\nSLURM_JOB_UID=999226\nSLURM_TASK_PID=640435\nSLURM_JOB_GPUS=0,1,2,3\nSLURM_LOCALID=0\nSLURM_SUBMIT_DIR=/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine_jobs_2\nSLURMD_NODENAME=hkn0730\nSLURM_JOB_START_TIME=1759225487\nSLURM_CLUSTER_NAME=hk\nSLURM_JOB_END_TIME=1759398287\nSLURM_CPUS_ON_NODE=24\nSLURM_JOB_CPUS_PER_NODE=24\nSLURM_GPUS_ON_NODE=4\nSLURM_GTIDS=0\nSLURM_JOB_PARTITION=accelerated\nSLURM_TRES_PER_TASK=cpu=5\nSLURM_OOM_KILL_STEP=0\nSLURM_JOB_NUM_NODES=1\nSLURM_JOBID=3532455\nSLURM_JOB_QOS=normal\nSLURM_PROCID=0\nSLURM_CPUS_PER_TASK=5\nSLURM_NTASKS=4\nSLURM_TOPOLOGY_ADDR=hkibb.hkibbi1.hkibbi1e9.hkn0730\nSLURM_TOPOLOGY_ADDR_PATTERN=switch.switch.switch.node\nSLURM_SCRIPT_CONTEXT=prolog_task\nSLURM_NODELIST=hkn0730\nSLURM_JOB_ACCOUNT=hk-project-p0023960\nSLURM_PRIO_PROCESS=0\nSLURM_NPROCS=4\nSLURM_NNODES=1\nSLURM_SUBMIT_HOST=hkn1990.localdomain\nSLURM_JOB_ID=3532455\nSLURM_NODEID=0\nSLURM_CONF=/etc/slurm/slurm.conf\nSLURM_JOB_NAME=train_dynamics_maskgit\nSLURM_NTASKS_PER_NODE=4\nSLURM_JOB_GID=502226\nSLURM_JOB_NODELIST=hkn0730\nGpuFreq=control_disabled\nRunning on 4 devices.\nTraceback (most recent call last):\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine_jobs_2/jasmine/train_dynamics.py"", line 790, in \n main(args)\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine_jobs_2/jasmine/train_dynamics.py"", line 386, in main\n raise ValueError(\nValueError: Global batch size 110 must be divisible by number of devices 4.\nRunning on 4 devices.\nTraceback (most recent call last):\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine_jobs_2/jasmine/train_dynamics.py"", line 790, in \n main(args)\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine_jobs_2/jasmine/train_dynamics.py"", line 386, in main\n raise ValueError(\nValueError: Global batch size 110 must be divisible by number of devices 4.\nRunning on 4 devices.\nTraceback (most recent call last):\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine_jobs_2/jasmine/train_dynamics.py"", line 790, in \n main(args)\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine_jobs_2/jasmine/train_dynamics.py"", line 386, in main\n raise ValueError(\nValueError: Global batch size 110 must be divisible by number of devices 4.\nRunning on 4 devices.\nTraceback (most recent call last):\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine_jobs_2/jasmine/train_dynamics.py"", line 790, in \n main(args)\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine_jobs_2/jasmine/train_dynamics.py"", line 386, in main\n raise ValueError(\nValueError: Global batch size 110 must be divisible by number of devices 4.\nW0930 11:45:41.948738 640775 pjrt_client.cc:1469] WatchJobStateAsync failed for task goo.gle/debugproto job_name: ""jax_worker"": UNAVAILABLE: failed to connect to all addresses; last error: UNKNOWN: ipv4:10.0.1.170:63143: Failed to connect to remote host: Connection refused\nAdditional GRPC error information from remote target coordination_service while calling /tensorflow.CoordinationService/WatchJobState:\n:UNKNOWN:Error received from peer {grpc_status:14, grpc_message:""failed to connect to all addresses; last error: UNKNOWN: ipv4:10.0.1.170:63143: Failed to connect to remote host: Connection refused""}\nW0930 11:45:41.948687 640777 pjrt_client.cc:1469] WatchJobStateAsync failed for task goo.gle/debugproto job_name: ""jax_worker"" task_id: 1: CANCELLED: CANCELLED\nAdditional GRPC error information from remote target coordination_service while calling /tensorflow.CoordinationService/WatchJobState:\n:UNKNOWN:Error received from peer {grpc_message:""CANCELLED"", grpc_status:1} [type.googleapis.com/tensorflow.DerivedStatus='']\nW0930 11:45:41.948734 640778 pjrt_client.cc:1469] WatchJobStateAsync failed for task goo.gle/debugstr job_name: ""jax_worker"" task_id: 2: UNAVAILABLE: failed to connect to all addresses; last error: UNKNOWN: ipv4:10.0.1.170:63143: Failed to connect to remote host: Connection refused\nAdditional GRPC error information from remote target coordination_service while calling /tensorflow.CoordinationService/WatchJobState:\n:UNKNOWN:Error received from peer {grpc_status:14, grpc_message:""failed to connect to all addresses; last error: UNKNOWN: ipv4:10.0.1.170:63143: Failed to connect to remote host: Connection refused""}\nW0930 11:45:41.948996 640776 pjrt_client.cc:1469] WatchJobStateAsync failed for task goo.gle/debugonly job_name: ""jax_worker"" task_id: 3: UNAVAILABLE: failed to connect to all addresses; last error: UNKNOWN: ipv4:10.0.1.170:63143: Failed to connect to remote host: Connection refused\nAdditional GRPC error information from remote target coordination_service while calling /tensorflow.CoordinationService/WatchJobState:\n:UNKNOWN:Error received from peer {grpc_message:""failed to connect to all addresses; last error: UNKNOWN: ipv4:10.0.1.170:63143: Failed to connect to remote host: Connection refused"", grpc_status:14}\nsrun: error: hkn0730: tasks 1-2: Exited with exit code 1\nsrun: error: hkn0730: task 3: Exited with exit code 1\nsrun: error: hkn0730: task 0: Exited with exit code 1\n\n============================= JOB FEEDBACK =============================\n\nJob ID: 3532455\nCluster: hk\nUser/Group: tum_cte0515/hk-project-p0023960\nAccount: hk-project-p0023960\nState: FAILED (exit code 1)\nPartition: accelerated\nNodes: 1\nCores per node: 24\nNodelist: hkn0730\nCPU Utilized: 00:00:19\nCPU Efficiency: 1.41% of 00:22:24 core-walltime\nJob Wall-clock time: 00:00:56\nStarttime: Tue Sep 30 11:44:47 2025\nEndtime: Tue Sep 30 11:45:43 2025\nMemory Utilized: 1.24 GB\nMemory Efficiency: 0.26% of 485.84 GB (485.84 GB/node)\nThe task which had the largest memory consumption differs by 103.88% from the average task max memory consumption\nEnergy Consumed: 89458 Joule / 24.8494444444444 Watthours\nAverage node power draw: 1597.46428571429 Watt\n",log,tab +1161,2937095,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/dynamics/maskgit/train_dynamics_maskgit_3532455.log",1278,0,"",log,selection_mouse +1162,2937671,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/dynamics/maskgit/train_dynamics_maskgit_3532455.log",8680,0,"",log,selection_command +1163,2944686,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/dynamics/maskgit/train_dynamics_maskgit_3532455.log",4572,0,"",log,selection_mouse +1164,2944836,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/dynamics/maskgit/train_dynamics_maskgit_3532455.log",4568,5,"batch",log,selection_mouse +1165,2945015,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/dynamics/maskgit/train_dynamics_maskgit_3532455.log",4568,6,"batch ",log,selection_mouse +1166,2945035,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/dynamics/maskgit/train_dynamics_maskgit_3532455.log",4568,10,"batch size",log,selection_mouse +1167,2945052,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/dynamics/maskgit/train_dynamics_maskgit_3532455.log",4568,14,"batch size 110",log,selection_mouse +1168,2945107,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/dynamics/maskgit/train_dynamics_maskgit_3532455.log",4568,22,"batch size 110 must be",log,selection_mouse +1169,2945108,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/dynamics/maskgit/train_dynamics_maskgit_3532455.log",4568,32,"batch size 110 must be divisible",log,selection_mouse +1170,2945116,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/dynamics/maskgit/train_dynamics_maskgit_3532455.log",4548,25,"\nValueError: Global batch",log,selection_mouse +1171,2945378,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/dynamics/maskgit/train_dynamics_maskgit_3532455.log",4548,0,"",log,selection_mouse +1172,2945382,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/dynamics/maskgit/train_dynamics_maskgit_3532455.log",4547,0,"",log,selection_command +1173,2945516,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/dynamics/maskgit/train_dynamics_maskgit_3532455.log",4548,0,"",log,selection_mouse +1174,2945517,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/dynamics/maskgit/train_dynamics_maskgit_3532455.log",4547,0,"",log,selection_command +1175,2948894,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",0,0,"",shellscript,tab +1176,2948895,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",169,0,"",shellscript,selection_mouse +1177,2948901,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",168,0,"",shellscript,selection_command +1178,2949393,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",169,0,"",shellscript,selection_command +1179,2949716,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",168,1,"",shellscript,content +1180,2950042,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",168,0,"1",shellscript,content +1181,2950042,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",169,0,"",shellscript,selection_keyboard +1182,2950465,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",143,0,"",shellscript,selection_command +1183,2951152,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",111,0,"",shellscript,selection_command +1184,2951284,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",87,0,"",shellscript,selection_command +1185,2951518,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",59,0,"",shellscript,selection_command +1186,2952300,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",65,1,"",shellscript,content +1187,2952412,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",65,0,"1",shellscript,content +1188,2952412,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",66,0,"",shellscript,selection_keyboard +1189,2952913,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",65,0,"",shellscript,selection_command +1190,2955361,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",0,0,"",shellscript,tab +1191,2959313,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default-no-noise.sh",0,0,"",shellscript,tab +1192,2960816,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default-no-noise.sh",66,0,"",shellscript,selection_mouse +1193,2960829,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default-no-noise.sh",65,0,"",shellscript,selection_command +1194,2961057,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default-no-noise.sh",66,0,"",shellscript,selection_command +1195,2961190,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default-no-noise.sh",65,1,"",shellscript,content +1196,2961389,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default-no-noise.sh",65,0,"1",shellscript,content +1197,2961389,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default-no-noise.sh",66,0,"",shellscript,selection_keyboard +1198,2961696,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default-no-noise.sh",90,0,"",shellscript,selection_command +1199,2961853,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default-no-noise.sh",114,0,"",shellscript,selection_command +1200,2961996,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default-no-noise.sh",146,0,"",shellscript,selection_command +1201,2962146,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default-no-noise.sh",169,0,"",shellscript,selection_command +1202,2962468,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default-no-noise.sh",168,1,"",shellscript,content +1203,2962572,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default-no-noise.sh",168,0,"1",shellscript,content +1204,2962573,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default-no-noise.sh",169,0,"",shellscript,selection_keyboard +1205,2964239,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default-alpha-abl.sh",0,0,"",shellscript,tab +1206,2966316,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default-alpha-abl.sh",66,0,"",shellscript,selection_mouse +1207,2966316,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default-alpha-abl.sh",65,0,"",shellscript,selection_command +1208,2966722,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default-alpha-abl.sh",66,0,"",shellscript,selection_command +1209,2966890,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default-alpha-abl.sh",65,1,"",shellscript,content +1210,2966989,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default-alpha-abl.sh",65,0,"1",shellscript,content +1211,2966990,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default-alpha-abl.sh",66,0,"",shellscript,selection_keyboard +1212,2967339,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default-alpha-abl.sh",90,0,"",shellscript,selection_command +1213,2967543,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default-alpha-abl.sh",114,0,"",shellscript,selection_command +1214,2967608,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default-alpha-abl.sh",146,0,"",shellscript,selection_command +1215,2967745,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default-alpha-abl.sh",169,0,"",shellscript,selection_command +1216,2967999,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default-alpha-abl.sh",168,1,"",shellscript,content +1217,2968071,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default-alpha-abl.sh",168,0,"1",shellscript,content +1218,2968072,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default-alpha-abl.sh",169,0,"",shellscript,selection_keyboard +1219,2977034,"TERMINAL",0,0,"sync-runner",,terminal_command +1220,2977085,"TERMINAL",0,0,"]633;Csending incremental file list\r\n",,terminal_output +1221,2977195,"TERMINAL",0,0,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default-alpha-abl.sh\r\nslurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default-no-noise.sh\r\nslurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh\r\n",,terminal_output +1222,2977208,"TERMINAL",0,0,"\r\nsent 45,131 bytes received 298 bytes 90,858.00 bytes/sec\r\ntotal size is 28,219,485 speedup is 621.18\r\n]0;tum_cte0515@hkn1990:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/dynamics/maskgit",,terminal_output +1223,2987369,"TERMINAL",0,0,"sbatch slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",,terminal_command +1224,2994556,"TERMINAL",0,0,"sync-runner",,terminal_command +1225,2994598,"TERMINAL",0,0,"]633;Csending incremental file list\r\n",,terminal_output +1226,2994690,"TERMINAL",0,0,"\r\nsent 37,279 bytes received 235 bytes 25,009.33 bytes/sec\r\ntotal size is 28,219,485 speedup is 752.24\r\n]0;tum_cte0515@hkn1990:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/dynamics/maskgit",,terminal_output +1227,2995447,"TERMINAL",0,0,"runner",,terminal_command +1228,2999384,"TERMINAL",0,0,"sbatch slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",,terminal_command +1229,2999422,"TERMINAL",0,0,"]633;C",,terminal_output +1230,2999435,"TERMINAL",0,0,"Submitted batch job 3532465\r\n]0;tum_cte0515@hkn1990:~/Projects/jasmine_jobs",,terminal_output +1231,3004409,"TERMINAL",0,0,"sbatch slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default-no-noise.sh",,terminal_command +1232,3006247,"TERMINAL",0,0,"dev",,terminal_command +1233,3007821,"jasmine/train_dynamics.py",0,0,"",python,tab +1234,3008814,"jasmine/models/dynamics.py",0,0,"",python,tab +1235,3010461,"jasmine/models/dynamics.py",3670,0,"",python,selection_command +1236,3011139,"jasmine/models/dynamics.py",3660,11," # )",python,selection_command +1237,3011352,"jasmine/models/dynamics.py",3599,72," # vid_embed_BTNM + noise_level_B111 * noise_BTNM\n # )",python,selection_command +1238,3011482,"jasmine/models/dynamics.py",3554,117," # noise_augmented_vid_embed_BTNM = (\n # vid_embed_BTNM + noise_level_B111 * noise_BTNM\n # )",python,selection_command +1239,3011793,"jasmine/models/dynamics.py",3562,0,"",python,selection_command +1240,3012204,"jasmine/models/dynamics.py",3668,1,"",python,content +1241,3012205,"jasmine/models/dynamics.py",3611,1,"",python,content +1242,3012205,"jasmine/models/dynamics.py",3562,1,"",python,content +1243,3012368,"jasmine/models/dynamics.py",3666,1,"",python,content +1244,3012369,"jasmine/models/dynamics.py",3610,1,"",python,content +1245,3012369,"jasmine/models/dynamics.py",3562,1,"",python,content +1246,3012687,"jasmine/models/dynamics.py",3561,0,"",python,selection_command +1247,3012779,"jasmine/models/dynamics.py",3553,0,"",python,selection_command +1248,3012895,"jasmine/models/dynamics.py",3550,0,"",python,selection_command +1249,3013486,"jasmine/models/dynamics.py",3543,9," )",python,selection_command +1250,3013689,"jasmine/models/dynamics.py",3489,63," + jnp.sqrt(noise_level_B111) * noise_BTNM\n )",python,selection_command +1251,3013815,"jasmine/models/dynamics.py",3429,123," jnp.sqrt(1 - noise_level_B111) * vid_embed_BTNM\n + jnp.sqrt(noise_level_B111) * noise_BTNM\n )",python,selection_command +1252,3013942,"jasmine/models/dynamics.py",3386,166," noise_augmented_vid_embed_BTNM = (\n jnp.sqrt(1 - noise_level_B111) * vid_embed_BTNM\n + jnp.sqrt(noise_level_B111) * noise_BTNM\n )",python,selection_command +1253,3014179,"jasmine/models/dynamics.py",3394,0,"",python,selection_command +1254,3014574,"jasmine/models/dynamics.py",3551,1,"",python,content +1255,3014574,"jasmine/models/dynamics.py",3501,1,"",python,content +1256,3014574,"jasmine/models/dynamics.py",3441,1,"",python,content +1257,3014574,"jasmine/models/dynamics.py",3394,1,"",python,content +1258,3015737,"jasmine/models/dynamics.py",3548,0,")",python,content +1259,3015738,"jasmine/models/dynamics.py",3499,0,"+",python,content +1260,3015738,"jasmine/models/dynamics.py",3440,0,"j",python,content +1261,3015738,"jasmine/models/dynamics.py",3394,0,"n",python,content +1262,3016901,"jasmine/models/dynamics.py",3551,0,"#",python,content +1263,3016902,"jasmine/models/dynamics.py",3501,0,"#",python,content +1264,3016902,"jasmine/models/dynamics.py",3441,0,"#",python,content +1265,3016902,"jasmine/models/dynamics.py",3394,0,"#",python,content +1266,3016903,"jasmine/models/dynamics.py",3395,0,"",python,selection_keyboard +1267,3017868,"jasmine/models/dynamics.py",3394,0,"",python,selection_command +1268,3019496,"jasmine/models/dynamics.py",3395,0,"",python,selection_command +1269,3020036,"jasmine/models/dynamics.py",3566,0,"",python,selection_command +1270,3020345,"jasmine/models/dynamics.py",3686,0,"",python,selection_command +1271,3020621,"jasmine/models/dynamics.py",8166,0,"",python,selection_command +1272,3022001,"jasmine/models/dynamics.py",8158,42," noise_augmented_vid_embed_BTNM = (",python,selection_command +1273,3022192,"jasmine/models/dynamics.py",8158,102," noise_augmented_vid_embed_BTNM = (\n jnp.sqrt(1 - noise_level_B111) * vid_embed_BTNM",python,selection_command +1274,3022319,"jasmine/models/dynamics.py",8158,156," noise_augmented_vid_embed_BTNM = (\n jnp.sqrt(1 - noise_level_B111) * vid_embed_BTNM\n + jnp.sqrt(noise_level_B111) * noise_BTNM",python,selection_command +1275,3022457,"jasmine/models/dynamics.py",8158,166," noise_augmented_vid_embed_BTNM = (\n jnp.sqrt(1 - noise_level_B111) * vid_embed_BTNM\n + jnp.sqrt(noise_level_B111) * noise_BTNM\n )",python,selection_command +1276,3022678,"jasmine/models/dynamics.py",8166,0,"",python,selection_command +1277,3023432,"jasmine/models/dynamics.py",8323,0,"#",python,content +1278,3023432,"jasmine/models/dynamics.py",8273,0,"#",python,content +1279,3023432,"jasmine/models/dynamics.py",8213,0,"#",python,content +1280,3023433,"jasmine/models/dynamics.py",8166,0,"#",python,content +1281,3023433,"jasmine/models/dynamics.py",8167,0,"",python,selection_keyboard +1282,3023488,"jasmine/models/dynamics.py",8327,0," ",python,content +1283,3023489,"jasmine/models/dynamics.py",8276,0," ",python,content +1284,3023489,"jasmine/models/dynamics.py",8215,0," ",python,content +1285,3023489,"jasmine/models/dynamics.py",8167,0," ",python,content +1286,3023490,"jasmine/models/dynamics.py",8168,0,"",python,selection_keyboard +1287,3023819,"jasmine/models/dynamics.py",8167,0,"",python,selection_command +1288,3023949,"jasmine/models/dynamics.py",8212,0,"",python,selection_command +1289,3024113,"jasmine/models/dynamics.py",8274,0,"",python,selection_command +1290,3024278,"jasmine/models/dynamics.py",8330,0,"",python,selection_command +1291,3024440,"jasmine/models/dynamics.py",8333,0,"",python,selection_command +1292,3024574,"jasmine/models/dynamics.py",8343,0,"",python,selection_command +1293,3025151,"jasmine/models/dynamics.py",8334,44," # noise_augmented_vid_embed_BTNM = (",python,selection_command +1294,3025356,"jasmine/models/dynamics.py",8334,105," # noise_augmented_vid_embed_BTNM = (\n # vid_embed_BTNM + noise_level_B111 * noise_BTNM",python,selection_command +1295,3025525,"jasmine/models/dynamics.py",8334,117," # noise_augmented_vid_embed_BTNM = (\n # vid_embed_BTNM + noise_level_B111 * noise_BTNM\n # )",python,selection_command +1296,3025818,"jasmine/models/dynamics.py",8342,0,"",python,selection_command +1297,3026197,"jasmine/models/dynamics.py",8448,1,"",python,content +1298,3026197,"jasmine/models/dynamics.py",8391,1,"",python,content +1299,3026197,"jasmine/models/dynamics.py",8342,1,"",python,content +1300,3026352,"jasmine/models/dynamics.py",8446,1,"",python,content +1301,3026352,"jasmine/models/dynamics.py",8390,1,"",python,content +1302,3026353,"jasmine/models/dynamics.py",8342,1,"",python,content +1303,3026525,"jasmine/models/dynamics.py",8341,0,"",python,selection_command +1304,3029870,"TERMINAL",0,0,"sync-runner-2",,terminal_command +1305,3029922,"TERMINAL",0,0,"]633;Csending incremental file list\r\n",,terminal_output +1306,3030251,"TERMINAL",0,0,"jasmine/models/dynamics.py\r\nslurm/jobs/mihir/horeka/coinrun/default_runs/\r\nslurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default-alpha-abl.sh\r\nslurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default-no-noise.sh\r\nslurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh\r\n\r\nsent 54,586 bytes received 324 bytes 109,820.00 bytes/sec\r\ntotal size is 28,219,485 speedup is 513.92\r\n]0;tum_cte0515@hkn1990:~/Projects/jasmine",,terminal_output +1307,3036611,"TERMINAL",0,0,"sbatch slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default-alpha-abl.sh",,terminal_command +1308,3036644,"TERMINAL",0,0,"]633;CSubmitted batch job 3532467\r\n]0;tum_cte0515@hkn1990:~/Projects/jasmine",,terminal_output +1309,3042519,"TERMINAL",0,0,"scancel 3532467",,terminal_command +1310,3046522,"TERMINAL",0,0,"sync-runner-2",,terminal_command +1311,3046586,"TERMINAL",0,0,"]633;Csending incremental file list\r\n",,terminal_output +1312,3046657,"TERMINAL",0,0,"\r\nsent 37,279 bytes received 235 bytes 75,028.00 bytes/sec\r\ntotal size is 28,219,485 speedup is 752.24\r\n]0;tum_cte0515@hkn1990:~/Projects/jasmine",,terminal_output +1313,3048431,"TERMINAL",0,0,"runner-2",,terminal_command +1314,3050661,"TERMINAL",0,0,"sbatch slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default-alpha-abl.sh",,terminal_command +1315,3050667,"TERMINAL",0,0,"]633;CSubmitted batch job 3532468\r\n]0;tum_cte0515@hkn1990:~/Projects/jasmine_jobs_2",,terminal_output +1316,3053207,"TERMINAL",0,0,"queue",,terminal_command +1317,3053293,"TERMINAL",0,0,"]633;C[?1049h(B[?7hEvery 1.0s: squeue --mehkn1990.localdomain: Tue Sep 30 11:49:07 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3532468 accelerat train_dy tum_cte0 PD\t0:00\t 1 (None)3532465 accelerat train_dy tum_cte0 R\t0:20\t 1 hkn05073532466 accelerat train_dy tum_cte0 R\t0:20\t 1 hkn05073532383 accelerat train_to tum_cte0 R\t7:20\t 1 hkn08073529698 accelerat train_to tum_cte0 R 15:13:14\t 1 hkn0429",,terminal_output +1318,3054323,"TERMINAL",0,0,"81115",,terminal_output +1319,3055352,"TERMINAL",0,0,"92226",,terminal_output +1320,3056474,"TERMINAL",0,0,"103337",,terminal_output +1321,3057498,"TERMINAL",0,0,"14448",,terminal_output +1322,3058520,"TERMINAL",0,0,"25559",,terminal_output +1323,3059510,"TERMINAL",0,0,"366620",,terminal_output +1324,3060549,"TERMINAL",0,0,"47771",,terminal_output +1325,3061586,"TERMINAL",0,0,"58882",,terminal_output +1326,3062842,"TERMINAL",0,0,"63030304",,terminal_output +1327,3063664,"TERMINAL",0,0,"81115",,terminal_output +1328,3064713,"TERMINAL",0,0,"92226",,terminal_output +1329,3065791,"TERMINAL",0,0,"203337",,terminal_output +1330,3066794,"TERMINAL",0,0,"14448",,terminal_output +1331,3067850,"TERMINAL",0,0,"25559",,terminal_output +1332,3068888,"TERMINAL",0,0,"366630",,terminal_output +1333,3069989,"TERMINAL",0,0,"47771",,terminal_output +1334,3071013,"TERMINAL",0,0,"58882",,terminal_output +1335,3072018,"TERMINAL",0,0,"69993",,terminal_output +1336,3073063,"TERMINAL",0,0,"74040404",,terminal_output +1337,3074188,"TERMINAL",0,0,"81115",,terminal_output +1338,3075153,"TERMINAL",0,0,"92226",,terminal_output +1339,3076215,"TERMINAL",0,0,"30 Rhkn08073337",,terminal_output +1340,3077262,"TERMINAL",0,0,"114448",,terminal_output +1341,3078385,"TERMINAL",0,0,"225559",,terminal_output +1342,3079421,"TERMINAL",0,0,"3366640",,terminal_output +1343,3080434,"TERMINAL",0,0,"447771",,terminal_output +1344,3081428,"TERMINAL",0,0,"558882",,terminal_output +1345,3082466,"TERMINAL",0,0,"669993",,terminal_output +1346,3083511,"TERMINAL",0,0,"775050504",,terminal_output +1347,3084555,"TERMINAL",0,0,"881115",,terminal_output +1348,3085618,"TERMINAL",0,0,"9103337",,terminal_output +1349,3086662,"TERMINAL",0,0,"4114448",,terminal_output +1350,3087807,"TERMINAL",0,0,"225559",,terminal_output +1351,3088739,"TERMINAL",0,0,"3366650",,terminal_output +1352,3089801,"TERMINAL",0,0,"447771",,terminal_output +1353,3090879,"TERMINAL",0,0,"558882",,terminal_output +1354,3091588,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn1990:~/Projects/jasmine_jobs_2",,terminal_output diff --git a/927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-066da5ef-4ba0-4726-8184-5dc4ef5e75811751465994612-2025_07_02-16.20.42.177/source.csv b/927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-066da5ef-4ba0-4726-8184-5dc4ef5e75811751465994612-2025_07_02-16.20.42.177/source.csv new file mode 100644 index 0000000000000000000000000000000000000000..4c84451a1428d218f94309b3ba680aa84fc22a06 --- /dev/null +++ b/927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-066da5ef-4ba0-4726-8184-5dc4ef5e75811751465994612-2025_07_02-16.20.42.177/source.csv @@ -0,0 +1,358 @@ +Sequence,Time,File,RangeOffset,RangeLength,Text,Language,Type +2,241,"extension-output-pdoom-org.crowd-code-#1-crowd-code",0,0,"4:20:42 PM [info] Activating crowd-code\n4:20:42 PM [info] Recording started\n4:20:42 PM [info] Initializing git provider using file system watchers...\n",Log,tab +3,825,"extension-output-pdoom-org.crowd-code-#1-crowd-code",150,0,"4:20:42 PM [info] Git repository found\n4:20:42 PM [info] Git provider initialized successfully\n",Log,content +4,1121,"extension-output-pdoom-org.crowd-code-#1-crowd-code",245,0,"4:20:43 PM [info] Initial git state: [object Object]\n",Log,content +5,3223,"TERMINAL",0,0,"idling",,terminal_command +6,3312,"TERMINAL",0,0,"]633;E;2025-07-02 16:20:45 idling;26cd839c-476e-4913-967a-1422bf7b3816]633;C[?1049h(B[?7hEvery 1.0s: sinfo_t_idlehkn1993.localdomain: Wed Jul 2 16:20:45 2025Partition dev_cpuonly:\t 9 nodes idle\rPartition cpuonly: 69 nodes idle\rPartition dev_accelerated:\t 0 nodes idle\rPartition accelerated:\t 0 nodes idle\rPartition dev_accelerated-h100 :\t 0 nodes idle\rPartition accelerated-h100:\t 0 nodes idle\rPartition large:\t 6 nodes idle",,terminal_output +7,3863,"TERMINAL",0,0,"/bin/python3 /hkfs/home/project/hk-project-p0023960/tum_cte0515/.cursor-server/extensions/ms-python.python-2024.12.3-linux-x64/python_files/printEnvVariablesToFile.py /hkfs/home/project/hk-project-p0023960/tum_cte0515/.cursor-server/extensions/ms-python.python-2024.12.3-linux-x64/python_files/deactivate/bash/envVars.txt",,terminal_command +8,3899,"TERMINAL",0,0,"]633;E;2025-07-02 16:20:45 /bin/python3 /hkfs/home/project/hk-project-p0023960/tum_cte0515/.cursor-server/extensions/ms-python.python-2024.12.3-linux-x64/python_files/printEnvVariablesToFile.py /hkfs/home/project/hk-project-p0023960/tum_cte0515/.cursor-server/extensions/ms-python.python-2024.12.3-linux-x64/python_files/deactivate/bash/envVars.txt;14446405-24c7-42a5-a6ff-f36b2c7c204e]633;C",,terminal_output +9,3923,"TERMINAL",0,0,"]0;tum_cte0515@hkn1993:/hkfs/home/project/hk-project-p0023960/tum_cte0515/.cursor-server/extensions/ms-python.python-2024.12.3-linux-x64/python_files/deactivate/bash]633;D;0",,terminal_output +10,4353,"TERMINAL",0,0,"6101",,terminal_output +11,5398,"TERMINAL",0,0,"7\t ",,terminal_output +12,5843,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn1993:~/Projects/jafar]633;D;0",,terminal_output +13,11230,"TERMINAL",0,0,"source .venv/bin/activate",,terminal_command +14,15010,"TERMINAL",0,0,"queue",,terminal_command +15,15052,"TERMINAL",0,0,"]633;E;2025-07-02 16:20:57 queue;26cd839c-476e-4913-967a-1422bf7b3816]633;C",,terminal_output +16,15145,"TERMINAL",0,0,"[?1049h(B[?7hEvery 1.0s: squeue --mehkn1993.localdomain: Wed Jul 2 16:20:57 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3311671 accelerat train_to tum_cte0 R 1:56:41\t 1 hkn0717",,terminal_output +17,16168,"TERMINAL",0,0,"82",,terminal_output +18,17176,"TERMINAL",0,0,"93",,terminal_output +19,18229,"TERMINAL",0,0,"1:004",,terminal_output +20,19280,"TERMINAL",0,0,"15",,terminal_output +21,20325,"TERMINAL",0,0,"26",,terminal_output +22,21375,"TERMINAL",0,0,"37",,terminal_output +23,22381,"TERMINAL",0,0,"48",,terminal_output +24,23415,"TERMINAL",0,0,"59",,terminal_output +25,24458,"TERMINAL",0,0,"650",,terminal_output +26,25500,"TERMINAL",0,0,"71",,terminal_output +27,26554,"TERMINAL",0,0,"82",,terminal_output +28,27638,"TERMINAL",0,0,"93",,terminal_output +29,28771,"TERMINAL",0,0,"104",,terminal_output +30,29687,"TERMINAL",0,0,"15",,terminal_output +31,30801,"TERMINAL",0,0,"26",,terminal_output +32,31799,"TERMINAL",0,0,"37",,terminal_output +33,32842,"TERMINAL",0,0,"48",,terminal_output +34,33915,"TERMINAL",0,0,"59",,terminal_output +35,34938,"TERMINAL",0,0,"67:01",,terminal_output +36,35991,"TERMINAL",0,0,"82",,terminal_output +37,37044,"TERMINAL",0,0,"93",,terminal_output +38,38084,"TERMINAL",0,0,"204",,terminal_output +39,39136,"TERMINAL",0,0,"15",,terminal_output +40,40175,"TERMINAL",0,0,"26",,terminal_output +41,41228,"TERMINAL",0,0,"37",,terminal_output +42,42316,"TERMINAL",0,0,"48",,terminal_output +43,43362,"TERMINAL",0,0,"59",,terminal_output +44,44399,"TERMINAL",0,0,"610",,terminal_output +45,45439,"TERMINAL",0,0,"71",,terminal_output +46,46484,"TERMINAL",0,0,"82",,terminal_output +47,47524,"TERMINAL",0,0,"93",,terminal_output +48,48558,"TERMINAL",0,0,"304",,terminal_output +49,49609,"TERMINAL",0,0,"15",,terminal_output +50,50628,"TERMINAL",0,0,"26",,terminal_output +51,51687,"TERMINAL",0,0,"37",,terminal_output +52,52733,"TERMINAL",0,0,"48",,terminal_output +53,53771,"TERMINAL",0,0,"59",,terminal_output +54,54813,"TERMINAL",0,0,"620",,terminal_output +55,55873,"TERMINAL",0,0,"71",,terminal_output +56,56914,"TERMINAL",0,0,"83",,terminal_output +57,57963,"TERMINAL",0,0,"404",,terminal_output +58,59001,"TERMINAL",0,0,"15",,terminal_output +59,60059,"TERMINAL",0,0,"26",,terminal_output +60,61088,"TERMINAL",0,0,"37",,terminal_output +61,62175,"TERMINAL",0,0,"48",,terminal_output +62,63190,"TERMINAL",0,0,"59",,terminal_output +63,64223,"TERMINAL",0,0,"630",,terminal_output +64,65267,"TERMINAL",0,0,"71",,terminal_output +65,66304,"TERMINAL",0,0,"82",,terminal_output +66,67359,"TERMINAL",0,0,"93",,terminal_output +67,68400,"TERMINAL",0,0,"504",,terminal_output +68,69439,"TERMINAL",0,0,"15",,terminal_output +69,70458,"TERMINAL",0,0,"26",,terminal_output +70,71507,"TERMINAL",0,0,"37",,terminal_output +71,72552,"TERMINAL",0,0,"48",,terminal_output +72,73601,"TERMINAL",0,0,"59",,terminal_output +73,74642,"TERMINAL",0,0,"640",,terminal_output +74,75776,"TERMINAL",0,0,"71",,terminal_output +75,76720,"TERMINAL",0,0,"82",,terminal_output +76,77825,"TERMINAL",0,0,"93",,terminal_output +77,78850,"TERMINAL",0,0,"2:004",,terminal_output +78,79867,"TERMINAL",0,0,"15",,terminal_output +79,80916,"TERMINAL",0,0,"27",,terminal_output +80,82024,"TERMINAL",0,0,"48",,terminal_output +81,83048,"TERMINAL",0,0,"59",,terminal_output +82,84062,"TERMINAL",0,0,"650",,terminal_output +83,85199,"TERMINAL",0,0,"71",,terminal_output +84,86186,"TERMINAL",0,0,"82",,terminal_output +85,87220,"TERMINAL",0,0,"93",,terminal_output +86,88273,"TERMINAL",0,0,"104",,terminal_output +87,89320,"TERMINAL",0,0,"15",,terminal_output +88,90408,"TERMINAL",0,0,"26",,terminal_output +89,91444,"TERMINAL",0,0,"37",,terminal_output +90,92572,"TERMINAL",0,0,"48",,terminal_output +91,93550,"TERMINAL",0,0,"59",,terminal_output +92,94557,"TERMINAL",0,0,"68:00",,terminal_output +93,95645,"TERMINAL",0,0,"71",,terminal_output +94,96668,"TERMINAL",0,0,"82",,terminal_output +95,97691,"TERMINAL",0,0,"93",,terminal_output +96,98818,"TERMINAL",0,0,"204",,terminal_output +97,99755,"TERMINAL",0,0,"15",,terminal_output +98,100869,"TERMINAL",0,0,"26",,terminal_output +99,101827,"TERMINAL",0,0,"37",,terminal_output +100,102871,"TERMINAL",0,0,"48",,terminal_output +101,103910,"TERMINAL",0,0,"510",,terminal_output +102,104953,"TERMINAL",0,0,"71",,terminal_output +103,106086,"TERMINAL",0,0,"82",,terminal_output +104,107056,"TERMINAL",0,0,"93",,terminal_output +105,108157,"TERMINAL",0,0,"304",,terminal_output +106,109213,"TERMINAL",0,0,"15",,terminal_output +107,110257,"TERMINAL",0,0,"26",,terminal_output +108,111288,"TERMINAL",0,0,"37",,terminal_output +109,112341,"TERMINAL",0,0,"48",,terminal_output +110,113392,"TERMINAL",0,0,"59",,terminal_output +111,114483,"TERMINAL",0,0,"620",,terminal_output +112,115480,"TERMINAL",0,0,"71",,terminal_output +113,116502,"TERMINAL",0,0,"82",,terminal_output +114,117589,"TERMINAL",0,0,"93",,terminal_output +115,118589,"TERMINAL",0,0,"404",,terminal_output +116,119706,"TERMINAL",0,0,"15",,terminal_output +117,120737,"TERMINAL",0,0,"26",,terminal_output +118,121857,"TERMINAL",0,0,"37",,terminal_output +119,122882,"TERMINAL",0,0,"48",,terminal_output +120,123879,"TERMINAL",0,0,"59",,terminal_output +121,125032,"TERMINAL",0,0,"631",,terminal_output +122,126058,"TERMINAL",0,0,"82",,terminal_output +123,127078,"TERMINAL",0,0,"93",,terminal_output +124,128105,"TERMINAL",0,0,"504",,terminal_output +125,129144,"TERMINAL",0,0,"15",,terminal_output +126,130159,"TERMINAL",0,0,"26",,terminal_output +127,131201,"TERMINAL",0,0,"37",,terminal_output +128,132261,"TERMINAL",0,0,"48",,terminal_output +129,133300,"TERMINAL",0,0,"59",,terminal_output +130,134348,"TERMINAL",0,0,"640",,terminal_output +131,135480,"TERMINAL",0,0,"71",,terminal_output +132,136516,"TERMINAL",0,0,"82",,terminal_output +133,137528,"TERMINAL",0,0,"93",,terminal_output +134,138545,"TERMINAL",0,0,"3:004",,terminal_output +135,139569,"TERMINAL",0,0,"15",,terminal_output +136,140617,"TERMINAL",0,0,"26",,terminal_output +137,141723,"TERMINAL",0,0,"37",,terminal_output +138,142750,"TERMINAL",0,0,"48",,terminal_output +139,143875,"TERMINAL",0,0,"59",,terminal_output +140,144920,"TERMINAL",0,0,"650",,terminal_output +141,145925,"TERMINAL",0,0,"71",,terminal_output +142,146945,"TERMINAL",0,0,"82",,terminal_output +143,147970,"TERMINAL",0,0,"94",,terminal_output +144,149098,"TERMINAL",0,0,"115",,terminal_output +145,150048,"TERMINAL",0,0,"26",,terminal_output +146,151144,"TERMINAL",0,0,"37",,terminal_output +147,152130,"TERMINAL",0,0,"48",,terminal_output +148,153197,"TERMINAL",0,0,"59",,terminal_output +149,154233,"TERMINAL",0,0,"69:00",,terminal_output +150,155287,"TERMINAL",0,0,"71",,terminal_output +151,156368,"TERMINAL",0,0,"82",,terminal_output +152,157391,"TERMINAL",0,0,"93",,terminal_output +153,158444,"TERMINAL",0,0,"204",,terminal_output +154,159544,"TERMINAL",0,0,"15",,terminal_output +155,160568,"TERMINAL",0,0,"26",,terminal_output +156,161586,"TERMINAL",0,0,"37",,terminal_output +157,162636,"TERMINAL",0,0,"48",,terminal_output +158,163687,"TERMINAL",0,0,"59",,terminal_output +159,164735,"TERMINAL",0,0,"610",,terminal_output +160,165787,"TERMINAL",0,0,"71",,terminal_output +161,166805,"TERMINAL",0,0,"82",,terminal_output +162,167938,"TERMINAL",0,0,"93",,terminal_output +163,168870,"TERMINAL",0,0,"304",,terminal_output +164,169988,"TERMINAL",0,0,"16",,terminal_output +165,171008,"TERMINAL",0,0,"37",,terminal_output +166,172736,"TERMINAL",0,0,"48",,terminal_output +167,173818,"TERMINAL",0,0,"59",,terminal_output +168,174823,"TERMINAL",0,0,"620",,terminal_output +169,175879,"TERMINAL",0,0,"71",,terminal_output +170,176929,"TERMINAL",0,0,"83",,terminal_output +171,177980,"TERMINAL",0,0,"404",,terminal_output +172,179020,"TERMINAL",0,0,"15",,terminal_output +173,180047,"TERMINAL",0,0,"26",,terminal_output +174,181098,"TERMINAL",0,0,"37",,terminal_output +175,182144,"TERMINAL",0,0,"48",,terminal_output +176,183199,"TERMINAL",0,0,"59",,terminal_output +177,184252,"TERMINAL",0,0,"630",,terminal_output +178,185293,"TERMINAL",0,0,"71",,terminal_output +179,186346,"TERMINAL",0,0,"82",,terminal_output +180,187386,"TERMINAL",0,0,"93",,terminal_output +181,188438,"TERMINAL",0,0,"504",,terminal_output +182,189489,"TERMINAL",0,0,"15",,terminal_output +183,190534,"TERMINAL",0,0,"26",,terminal_output +184,191589,"TERMINAL",0,0,"37",,terminal_output +185,192642,"TERMINAL",0,0,"48",,terminal_output +186,193686,"TERMINAL",0,0,"59",,terminal_output +187,194730,"TERMINAL",0,0,"640",,terminal_output +188,195783,"TERMINAL",0,0,"71",,terminal_output +189,196832,"TERMINAL",0,0,"82",,terminal_output +190,197945,"TERMINAL",0,0,"93",,terminal_output +191,198909,"TERMINAL",0,0,"4:005",,terminal_output +192,199955,"TERMINAL",0,0,"26",,terminal_output +193,201013,"TERMINAL",0,0,"37",,terminal_output +194,202045,"TERMINAL",0,0,"48",,terminal_output +195,203085,"TERMINAL",0,0,"59",,terminal_output +196,204132,"TERMINAL",0,0,"650",,terminal_output +197,205180,"TERMINAL",0,0,"71",,terminal_output +198,206228,"TERMINAL",0,0,"82",,terminal_output +199,207263,"TERMINAL",0,0,"93",,terminal_output +200,208305,"TERMINAL",0,0,"104",,terminal_output +201,209343,"TERMINAL",0,0,"15",,terminal_output +202,210433,"TERMINAL",0,0,"26",,terminal_output +203,211401,"TERMINAL",0,0,"37",,terminal_output +204,212440,"TERMINAL",0,0,"48",,terminal_output +205,213533,"TERMINAL",0,0,"59",,terminal_output +206,214534,"TERMINAL",0,0,"62:00:00",,terminal_output +207,215556,"TERMINAL",0,0,"71",,terminal_output +208,216683,"TERMINAL",0,0,"82",,terminal_output +209,217704,"TERMINAL",0,0,"93",,terminal_output +210,218676,"TERMINAL",0,0,"204",,terminal_output +211,219713,"TERMINAL",0,0,"15",,terminal_output +212,220882,"TERMINAL",0,0,"26",,terminal_output +213,221902,"TERMINAL",0,0,"37",,terminal_output +214,222853,"TERMINAL",0,0,"48",,terminal_output +215,223913,"TERMINAL",0,0,"510",,terminal_output +216,224993,"TERMINAL",0,0,"71",,terminal_output +217,225981,"TERMINAL",0,0,"82",,terminal_output +218,227095,"TERMINAL",0,0,"93",,terminal_output +219,228148,"TERMINAL",0,0,"304",,terminal_output +220,229183,"TERMINAL",0,0,"15",,terminal_output +221,230174,"TERMINAL",0,0,"26",,terminal_output +222,231210,"TERMINAL",0,0,"37",,terminal_output +223,232274,"TERMINAL",0,0,"48",,terminal_output +224,233322,"TERMINAL",0,0,"59",,terminal_output +225,234362,"TERMINAL",0,0,"620",,terminal_output +226,235404,"TERMINAL",0,0,"71",,terminal_output +227,236454,"TERMINAL",0,0,"82",,terminal_output +228,237503,"TERMINAL",0,0,"93",,terminal_output +229,238542,"TERMINAL",0,0,"404",,terminal_output +230,239598,"TERMINAL",0,0,"15",,terminal_output +231,240645,"TERMINAL",0,0,"26",,terminal_output +232,241692,"TERMINAL",0,0,"37",,terminal_output +233,242731,"TERMINAL",0,0,"48",,terminal_output +234,243808,"TERMINAL",0,0,"59",,terminal_output +235,244792,"TERMINAL",0,0,"630",,terminal_output +236,245835,"TERMINAL",0,0,"71",,terminal_output +237,246903,"TERMINAL",0,0,"83",,terminal_output +238,248016,"TERMINAL",0,0,"504",,terminal_output +239,249001,"TERMINAL",0,0,"15",,terminal_output +240,250082,"TERMINAL",0,0,"26",,terminal_output +241,251096,"TERMINAL",0,0,"37",,terminal_output +242,252230,"TERMINAL",0,0,"48",,terminal_output +243,253213,"TERMINAL",0,0,"59",,terminal_output +244,254265,"TERMINAL",0,0,"640",,terminal_output +245,255317,"TERMINAL",0,0,"71",,terminal_output +246,256445,"TERMINAL",0,0,"82",,terminal_output +247,257415,"TERMINAL",0,0,"93",,terminal_output +248,258467,"TERMINAL",0,0,"5:004",,terminal_output +249,259491,"TERMINAL",0,0,"15",,terminal_output +250,260541,"TERMINAL",0,0,"26",,terminal_output +251,261597,"TERMINAL",0,0,"37",,terminal_output +252,262644,"TERMINAL",0,0,"48",,terminal_output +253,263683,"TERMINAL",0,0,"59",,terminal_output +254,265423,"TERMINAL",0,0,"651",,terminal_output +255,266549,"TERMINAL",0,0,"82",,terminal_output +256,267573,"TERMINAL",0,0,"93",,terminal_output +257,268536,"TERMINAL",0,0,"104",,terminal_output +258,269659,"TERMINAL",0,0,"15",,terminal_output +259,270603,"TERMINAL",0,0,"26",,terminal_output +260,271673,"TERMINAL",0,0,"37",,terminal_output +261,272686,"TERMINAL",0,0,"48",,terminal_output +262,273739,"TERMINAL",0,0,"59",,terminal_output +263,274843,"TERMINAL",0,0,"61:00",,terminal_output +264,275869,"TERMINAL",0,0,"71",,terminal_output +265,276857,"TERMINAL",0,0,"82",,terminal_output +266,277915,"TERMINAL",0,0,"93",,terminal_output +267,278940,"TERMINAL",0,0,"205",,terminal_output +268,280068,"TERMINAL",0,0,"26",,terminal_output +269,281093,"TERMINAL",0,0,"37",,terminal_output +270,282114,"TERMINAL",0,0,"48",,terminal_output +271,283129,"TERMINAL",0,0,"59",,terminal_output +272,284264,"TERMINAL",0,0,"610",,terminal_output +273,285295,"TERMINAL",0,0,"71",,terminal_output +274,286261,"TERMINAL",0,0,"82",,terminal_output +275,287311,"TERMINAL",0,0,"93",,terminal_output +276,288349,"TERMINAL",0,0,"304",,terminal_output +277,289487,"TERMINAL",0,0,"15",,terminal_output +278,290438,"TERMINAL",0,0,"26",,terminal_output +279,291535,"TERMINAL",0,0,"37",,terminal_output +280,292563,"TERMINAL",0,0,"48",,terminal_output +281,293569,"TERMINAL",0,0,"59",,terminal_output +282,294644,"TERMINAL",0,0,"620",,terminal_output +283,295655,"TERMINAL",0,0,"71",,terminal_output +284,296695,"TERMINAL",0,0,"82",,terminal_output +285,297741,"TERMINAL",0,0,"93",,terminal_output +286,298794,"TERMINAL",0,0,"404",,terminal_output +287,299838,"TERMINAL",0,0,"15",,terminal_output +288,300882,"TERMINAL",0,0,"26",,terminal_output +289,301983,"TERMINAL",0,0,"38",,terminal_output +290,302951,"TERMINAL",0,0,"59",,terminal_output +291,303999,"TERMINAL",0,0,"630",,terminal_output +292,305051,"TERMINAL",0,0,"71",,terminal_output +293,306178,"TERMINAL",0,0,"82",,terminal_output +294,307202,"TERMINAL",0,0,"93",,terminal_output +295,308176,"TERMINAL",0,0,"504",,terminal_output +296,309221,"TERMINAL",0,0,"15",,terminal_output +297,310276,"TERMINAL",0,0,"26",,terminal_output +298,311314,"TERMINAL",0,0,"37",,terminal_output +299,312428,"TERMINAL",0,0,"48",,terminal_output +300,313448,"TERMINAL",0,0,"59",,terminal_output +301,314473,"TERMINAL",0,0,"640",,terminal_output +302,315495,"TERMINAL",0,0,"71",,terminal_output +303,316521,"TERMINAL",0,0,"82",,terminal_output +304,317570,"TERMINAL",0,0,"93",,terminal_output +305,318624,"TERMINAL",0,0,"6:004",,terminal_output +306,319697,"TERMINAL",0,0,"15",,terminal_output +307,320678,"TERMINAL",0,0,"26",,terminal_output +308,321736,"TERMINAL",0,0,"37",,terminal_output +309,322777,"TERMINAL",0,0,"48",,terminal_output +310,323894,"TERMINAL",0,0,"59",,terminal_output +311,324922,"TERMINAL",0,0,"650",,terminal_output +312,325942,"TERMINAL",0,0,"72",,terminal_output +313,326965,"TERMINAL",0,0,"93",,terminal_output +314,327991,"TERMINAL",0,0,"104",,terminal_output +315,329116,"TERMINAL",0,0,"15",,terminal_output +316,330243,"TERMINAL",0,0,"26",,terminal_output +317,331133,"TERMINAL",0,0,"37",,terminal_output +318,332188,"TERMINAL",0,0,"48",,terminal_output +319,333240,"TERMINAL",0,0,"59",,terminal_output +320,334286,"TERMINAL",0,0,"62:00",,terminal_output +321,334771,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn1993:~/Projects/jafar]633;D;0",,terminal_output +322,4560228,"TERMINAL",0,0,"queue",,terminal_command +323,4560272,"TERMINAL",0,0,"]633;E;2025-07-02 17:36:42 queue;26cd839c-476e-4913-967a-1422bf7b3816]633;C",,terminal_output +324,4560343,"TERMINAL",0,0,"[?1049h(B[?7hEvery 1.0s: squeue --mehkn1993.localdomain: Wed Jul 2 17:36:42 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3311671 accelerat train_to tum_cte0 R 3:12:26\t 1 hkn0717",,terminal_output +325,4561383,"TERMINAL",0,0,"37",,terminal_output +326,4561489,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn1993:~/Projects/jafar]633;D;0",,terminal_output +327,4562886,"TERMINAL",0,0,"idling",,terminal_command +328,4562983,"TERMINAL",0,0,"]633;E;2025-07-02 17:36:44 idling;26cd839c-476e-4913-967a-1422bf7b3816]633;C[?1049h(B[?7hEvery 1.0s: sinfo_t_idlehkn1993.localdomain: Wed Jul 2 17:36:45 2025Partition dev_cpuonly: 12 nodes idle\rPartition cpuonly:\t 4 nodes idle\rPartition dev_accelerated:\t 0 nodes idle\rPartition accelerated:\t 0 nodes idle\rPartition dev_accelerated-h100 :\t 0 nodes idle\rPartition accelerated-h100:\t 0 nodes idle\rPartition large:\t 7 nodes idle",,terminal_output +329,4564005,"TERMINAL",0,0,"6\t ",,terminal_output +330,4565051,"TERMINAL",0,0,"7\t ",,terminal_output +331,4566099,"TERMINAL",0,0,"8\t ",,terminal_output +332,4566873,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn1993:~/Projects/jafar]633;D;0",,terminal_output +333,4569190,"TERMINAL",0,0,"",,terminal_focus +334,4575053,"scripts_horeka/overfit_batch_tiny/sample.sh",0,0,"#!/usr/bin/env bash\n\n# Unload modules that may interfere\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\n\n# Activate virtual environment\nsource .venv/bin/activate\n\n# Set workspace and checkpoint directory (update slurm_job_id as needed)\nws_dir='/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared'\n# Replace the following with the actual job id/checkpoint you want to sample from\nslurm_job_id=3301029\n\n# job_name=train_dynamics_minecraft_overfit_sample_tiny\nCHECKPOINT_DIR=$ws_dir/checkpoints/${slurm_job_id}\n\n# Example: If you want to use a specific checkpoint, set it here\n# CHECKPOINT_PATH=$ws_dir/checkpoints/3299272/dynamics-tiny-overfit-big-lr-3299272_50000/\n# Or use the latest in the directory\n# CHECKPOINT_PATH=$(ls -d $CHECKPOINT_DIR/*/ | sort | tail -n 1)\nCHECKPOINT_PATH=$CHECKPOINT_DIR/genie_1751067601_200000/\n# CHECKPOINT_PATH=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/0000/genie_1751301068_2000/\n# CHECKPOINT_PATH=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/../checkpoints/3307618/genie_1751322003_15500/\n# CHECKPOINT_PATH=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/3307619/genie_1751322003_200000/\nCHECKPOINT_PATH=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/3309699/genie_1751384516_200000/\n\n\necho ""Sampling from checkpoint: $CHECKPOINT_PATH""\n\npython sample.py \\n --checkpoint ""$CHECKPOINT_PATH"" \\n --tokenizer_dim=384 \\n --latent_patch_dim=32 \\n --num_patch_latents=1024 \\n --patch_size=4 \\n --tokenizer_num_blocks=8 \\n --tokenizer_num_heads=8 \\n --lam_dim=384 \\n --latent_action_dim=32 \\n --lam_patch_size=16 \\n --lam_num_blocks=8 \\n --lam_num_heads=8 \\n --dyna_dim=128 \\n --dyna_num_blocks=2 \\n --dyna_num_heads=4 \\n --maskgit_steps=1000 \\n --num_latent_actions=6 \\n --seq_len=16 \\n --start_frame=0\n\n# python sample.py \\n # --checkpoint ""$CHECKPOINT_PATH"" \\n # --data_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/coinrun_episodes\n",shellscript,tab +335,4582430,"TERMINAL",0,0,"bash",,terminal_focus +336,4584712,"TERMINAL",0,0,"bash",,terminal_focus +337,4584714,"scripts_horeka/overfit_batch_tiny/sample.sh",0,0,"",shellscript,tab +338,4585497,"scripts_horeka/modelsize_scaling/dynamics/model_sizes.md",0,0,"# Genie 1 - Model Sizes and their configs\n\n## Tokenizer model: sizes\n\ndefault: \n| Model | model_dim | num_blocks | num_heads | latent_dim | num_latents | Est. Params |\n|-------|-----------|------------|-----------|------------|-------------|-------------|\n| default | 512 | 8 | 8 | 32 | 1024 | ~38M |\n\n### scaling up \n#### (not tested yet - TODO @mihir)\n\n| Model | model_dim | num_blocks | num_heads | latent_dim | num_latents | Est. Params |\n|-------|-----------|------------|-----------|------------|-------------|-------------|\n| L1 | 768 | 12 | 12 | 64 | 2048 | ~80M |\n| L2 | 1024 | 12 | 16 | 128 | 2048 | ~140M |\n| L3 | 1152 | 16 | 16 | 128 | 4096 | ~200M |\n| L4 | 896 | 16 | 14 | 96 | 4096 | ~120M |\n| L5 | 1536 | 12 | 24 | 256 | 2048 | ~190M |\n\n\n### tiny models\n| Model | model_dim | num_blocks | num_heads | latent_dim | num_latents | Est. Params |\n|-------|-----------|------------|-----------|------------|-------------|-------------|\n| S1 | 128 | 2 | 2 | 8 | 128 | ~0.6M |\n| S2 | 192 | 2 | 3 | 16 | 128 | ~1.3M |\n| S3 | 256 | 3 | 4 | 16 | 256 | ~3.6M |\n| S4 | 320 | 4 | 5 | 24 | 256 | ~7.4M |\n| S5 | 384 | 4 | 6 | 32 | 512 | ~10M |\n\n\n## Latent Action model: sizes\ndefault: \n| Model | model_dim | num_blocks | num_heads | latent_dim | num_latents | Est. Params |\n|-------|-----------|------------|-----------|------------|-------------|-------------|\n| default | 512 | 8 | 8 | 32 | 6 | ~39M |\n\n### scaling up \n#### (not tested yet - TODO @mihir)\n\n| Name | model_dim | num_blocks | num_heads | latent_dim | num_latents | Est. Params |\n|--------------|-----------|------------|-----------|------------|-------------|-------------|\n| XL | 1024 | 12 | 16 | 64 | 12 | ~200M |\n| L | 896 | 12 | 14 | 48 | 8 | ~150M |\n| M+ | 768 | 10 | 12 | 48 | 8 | ~100M |\n| M | 640 | 10 | 10 | 32 | 8 | ~70M |\n| Base+ | 512 | 12 | 8 | 32 | 8 | ~55M |\n\n\n### tiny models\n| Name | model_dim | num_blocks | num_heads | latent_dim | num_latents | Est. Params |\n|--------------|-----------|------------|-----------|------------|-------------|-------------|\n| XS | 128 | 2 | 2 | 8 | 4 | ~0.9M |\n| S | 160 | 2 | 2 | 8 | 4 | ~1.3M |\n| S+ | 192 | 3 | 3 | 8 | 4 | ~2.4M |\n| M- | 256 | 4 | 4 | 16 | 6 | ~5.4M |\n| M | 320 | 6 | 4 | 16 | 6 | ~12M |\n\n\n## Dynamics model: sizes \n\n| Config | dyna_dim | dyna_num_blocks | dyna_num_heads | Approx. Params |\n|--------|----------|-----------------|---------------|----------------|\n| 1 | 512 | 12 | 8 | ~36M |\n| 2 | 768 | 16 | 12 | ~110M |\n| 3 | 1024 | 16 | 16 | ~180M |\n| 4 | 1024 | 24 | 16 | ~270M |\n| 5 | 1536 | 24 | 24 | ~500M |\n\n\n### tiny models\n| Config | dyna_dim | dyna_num_blocks | dyna_num_heads | Approx. Params |\n|--------|----------|-----------------|---------------|----------------|\n| A | 128 | 2 | 4 | ~1.5M |\n| B | 256 | 2 | 4 | ~3.5M |\n| C | 256 | 4 | 4 | ~6M |\n| D | 384 | 4 | 6 | ~12M |\n| E | 512 | 4 | 8 | ~18M |",markdown,tab +339,4873342,"TERMINAL",0,0,"bash",,terminal_focus +340,5485002,"TERMINAL",0,0,"salloc --time=00:30:00 --partition=dev_accelerated --nodes=1 --ntasks-per-node=1 --gres=gpu:1 --cpus-per-task=5 --mem=50G",,terminal_command +341,5485037,"TERMINAL",0,0,"]633;E;2025-07-02 17:52:07 salloc --time=00:30:00 --partition=dev_accelerated --nodes=1 --ntasks-per-node=1 --gres=gpu:1 --cpus-per-task=5 --mem=50G;26cd839c-476e-4913-967a-1422bf7b3816]633;Csalloc: Pending job allocation 3312853\r\nsalloc: job 3312853 queued and waiting for resources\r\n",,terminal_output +342,5486062,"TERMINAL",0,0,"^Csalloc: Job allocation 3312853 has been revoked.\r\nsalloc: Job aborted due to signal\r\n]0;tum_cte0515@hkn1993:~/Projects/jafar]633;D;1",,terminal_output +343,5488299,"TERMINAL",0,0,"salloc --time=00:30:00 --partition=dev_accelerated --nodes=1 --ntasks-per-node=1 --gres=gpu:1 --cpus-per-task=5 --mem=50G^C",,terminal_command +344,5488347,"TERMINAL",0,0,"^C[?2004l\r[?2004h[?2004l\r\r\n]633;E;;26cd839c-476e-4913-967a-1422bf7b3816]633;C]0;tum_cte0515@hkn1993:~/Projects/jafar]633;D",,terminal_output +345,5507657,"TERMINAL",0,0,"salloc --time=01:30:00 --partition=dev_accelerated --nodes=1 --ntasks-per-node=1 --gres=gpu:1 --cpus-per-task=5 --mem=50G",,terminal_command +346,5507713,"TERMINAL",0,0,"]633;E;2025-07-02 17:52:29 salloc --time=01:30:00 --partition=dev_accelerated --nodes=1 --ntasks-per-node=1 --gres=gpu:1 --cpus-per-task=5 --mem=50G;26cd839c-476e-4913-967a-1422bf7b3816]633;Csalloc: error: Job submit/allocate failed: Requested time limit is invalid (missing or exceeds some limit)\r\n]0;tum_cte0515@hkn1993:~/Projects/jafar]633;D;1",,terminal_output +347,5522205,"TERMINAL",0,0,"salloc --time=01:30:00 --partition=accelerated --nodes=1 --ntasks-per-node=4 --gres=gpu:4 --cpus-per-task=5 --mem=50G",,terminal_command +348,5522257,"TERMINAL",0,0,"]633;E;2025-07-02 17:52:44 salloc --time=01:30:00 --partition=accelerated --nodes=1 --ntasks-per-node=4 --gres=gpu:4 --cpus-per-task=5 --mem=50G;26cd839c-476e-4913-967a-1422bf7b3816]633;Csalloc: Pending job allocation 3312854\r\nsalloc: job 3312854 queued and waiting for resources\r\n",,terminal_output +349,5528628,"TERMINAL",0,0,"bash",,terminal_focus +350,5529561,"TERMINAL",0,0,"queue",,terminal_command +351,5529625,"TERMINAL",0,0,"\r\n[?2004l\r]633;E;2025-07-02 17:52:51 queue;0598f850-442d-4019-9770-f648eaf5abbd]633;C[?1049h(B[?7hEvery 1.0s: squeue --mehkn1993.localdomain: Wed Jul 2 17:52:51 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3312854 accelerat interact tum_cte0 PD\t0:00\t 1 (Priority)3311671 accelerat train_to tum_cte0 R 3:28:35\t 1 hkn0717",,terminal_output +352,5530673,"TERMINAL",0,0,"26",,terminal_output +353,5531169,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn1993:~/Projects/jafar]633;D;0]633;P;Cwd=/home/hk-project-p0023960/tum_cte0515/Projects/jafar[?2004h",,terminal_output +354,5532122,"TERMINAL",0,0,"idling",,terminal_command +355,5532190,"TERMINAL",0,0,"]633;E;2025-07-02 17:52:54 idling;0598f850-442d-4019-9770-f648eaf5abbd]633;C[?1049h(B[?7hEvery 1.0s: sinfo_t_idlehkn1993.localdomain: Wed Jul 2 17:52:54 2025Partition dev_cpuonly: 11 nodes idle\rPartition cpuonly: 115 nodes idle\rPartition dev_accelerated:\t 0 nodes idle\rPartition accelerated:\t 0 nodes idle\rPartition dev_accelerated-h100 :\t 0 nodes idle\rPartition accelerated-h100:\t 0 nodes idle\rPartition large:\t 7 nodes idle",,terminal_output +356,5533235,"TERMINAL",0,0,"5\t ",,terminal_output +357,5533929,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn1993:~/Projects/jafar]633;D;0",,terminal_output +358,5771719,"TERMINAL",0,0,"salloc",,terminal_focus diff --git a/927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-0f5513f7-8bc9-4c5d-856d-79d92f75113d1751284706913-2025_06_30-14.24.04.501/source.csv b/927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-0f5513f7-8bc9-4c5d-856d-79d92f75113d1751284706913-2025_06_30-14.24.04.501/source.csv new file mode 100644 index 0000000000000000000000000000000000000000..a40dc3428ab18faf2ac593f4874abb864a1f555d --- /dev/null +++ b/927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-0f5513f7-8bc9-4c5d-856d-79d92f75113d1751284706913-2025_06_30-14.24.04.501/source.csv @@ -0,0 +1,25244 @@ +Sequence,Time,File,RangeOffset,RangeLength,Text,Language,Type +2,4393,"TERMINAL",0,0,"idle",,terminal_command +3,4425,"TERMINAL",0,0,"]633;E;2025-06-30 14:24:08 idle;ead59344-49db-4336-9336-47fae706e637]633;CPartition dev_cpuonly : 10 nodes idle\r\nPartition cpuonly : 118 nodes idle\r\nPartition dev_accelerated : 2 nodes idle\r\nPartition accelerated : 0 nodes idle\r\nPartition dev_accelerated-h100 : 1 nodes idle\r\nPartition accelerated-h100 : 0 nodes idle\r\nPartition large : 8 nodes idle\r\n]0;tum_cte0515@hkn1991:/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar]633;D;0",,terminal_output +4,6679,"TERMINAL",0,0,"idling",,terminal_command +5,6758,"TERMINAL",0,0,"]633;E;2025-06-30 14:24:11 idling;ead59344-49db-4336-9336-47fae706e637]633;C[?1049h(B[?7hEvery 1.0s: sinfo_t_idlehkn1991.localdomain: Mon Jun 30 14:24:11 2025Partition dev_cpuonly: 10 nodes idle\rPartition cpuonly: 118 nodes idle\rPartition dev_accelerated:\t 0 nodes idle\rPartition accelerated:\t 0 nodes idle\rPartition dev_accelerated-h100 :\t 1 nodes idle\rPartition accelerated-h100:\t 0 nodes idle\rPartition large:\t 8 nodes idle",,terminal_output +6,7792,"TERMINAL",0,0,"2\t ",,terminal_output +7,8825,"TERMINAL",0,0,"3\t ",,terminal_output +8,9878,"TERMINAL",0,0,"4\t ",,terminal_output +9,10897,"TERMINAL",0,0,"5\t ",,terminal_output +10,11936,"TERMINAL",0,0,"6\t ",,terminal_output +11,12974,"TERMINAL",0,0,"7\t ",,terminal_output +12,14017,"TERMINAL",0,0,"8\t ",,terminal_output +13,15103,"TERMINAL",0,0,"9\t ",,terminal_output +14,16107,"TERMINAL",0,0,"20\t ",,terminal_output +15,16439,"TERMINAL",0,0,"salloc",,terminal_focus +16,16944,"TERMINAL",0,0,"^Csalloc: Job allocation 3306177 has been revoked.\r\nsalloc: Job aborted due to signal\r\n]0;tum_cte0515@hkn1991:~/Projects/jafar]633;D;1",,terminal_output +17,17158,"TERMINAL",0,0,"1\t ",,terminal_output +18,18194,"TERMINAL",0,0,"2\t ",,terminal_output +19,19229,"TERMINAL",0,0,"3\t ",,terminal_output +20,20334,"TERMINAL",0,0,"4\t ",,terminal_output +21,21329,"TERMINAL",0,0,"5\t ",,terminal_output +22,22360,"TERMINAL",0,0,"6\t ",,terminal_output +23,23410,"TERMINAL",0,0,"7\t ",,terminal_output +24,24464,"TERMINAL",0,0,"8\t ",,terminal_output +25,25174,"TERMINAL",0,0,"salloc --time=01:00:00 --partition=dev-accelerated-h100 --nodes=2 --ntasks-per-node=4 --gres=gpu:4 --cpus-per-task=5 --mem=200G",,terminal_command +26,25206,"TERMINAL",0,0,"]633;E;2025-06-30 14:24:29 salloc --time=01:00:00 --partition=dev-accelerated-h100 --nodes=2 --ntasks-per-node=4 --gres=gpu:4 --cpus-per-task=5 --mem=200G;79ec3af7-6a10-4dac-bb07-e3b50f56ded4]633;Csalloc: error: invalid partition specified: dev-accelerated-h100\r\nsalloc: error: Job submit/allocate failed: Invalid partition name specified\r\n]0;tum_cte0515@hkn1991:~/Projects/jafar]633;D;1",,terminal_output +27,25477,"TERMINAL",0,0,"9\t ",,terminal_output +28,26516,"TERMINAL",0,0,"30\t ",,terminal_output +29,27564,"TERMINAL",0,0,"1\t ",,terminal_output +30,28599,"TERMINAL",0,0,"2\t ",,terminal_output +31,29640,"TERMINAL",0,0,"4\t ",,terminal_output +32,30679,"TERMINAL",0,0,"5\t ",,terminal_output +33,31724,"TERMINAL",0,0,"6\t ",,terminal_output +34,32766,"TERMINAL",0,0,"7\t ",,terminal_output +35,33797,"TERMINAL",0,0,"8\t ",,terminal_output +36,34081,"TERMINAL",0,0,"salloc --time=01:00:00 --partition=dev_accelerated-h100 --nodes=2 --ntasks-per-node=4 --gres=gpu:4 --cpus-per-task=5 --mem=200G",,terminal_command +37,34093,"TERMINAL",0,0,"]633;E;2025-06-30 14:24:38 salloc --time=01:00:00 --partition=dev_accelerated-h100 --nodes=2 --ntasks-per-node=4 --gres=gpu:4 --cpus-per-task=5 --mem=200G;79ec3af7-6a10-4dac-bb07-e3b50f56ded4]633;Csalloc: error: Job submit/allocate failed: Node count specification invalid\r\n]0;tum_cte0515@hkn1991:~/Projects/jafar]633;D;1",,terminal_output +38,34837,"TERMINAL",0,0,"9\t ",,terminal_output +39,35878,"TERMINAL",0,0,"40\t ",,terminal_output +40,36942,"TERMINAL",0,0,"1\t ",,terminal_output +41,37981,"TERMINAL",0,0,"2\t ",,terminal_output +42,38988,"TERMINAL",0,0,"3\t ",,terminal_output +43,39944,"TERMINAL",0,0,"salloc --time=01:00:00 --partition=dev_accelerated-h100 --nodes=1 --ntasks-per-node=4 --gres=gpu:4 --cpus-per-task=5 --mem=200G",,terminal_command +44,39997,"TERMINAL",0,0,"]633;E;2025-06-30 14:24:44 salloc --time=01:00:00 --partition=dev_accelerated-h100 --nodes=1 --ntasks-per-node=4 --gres=gpu:4 --cpus-per-task=5 --mem=200G;79ec3af7-6a10-4dac-bb07-e3b50f56ded4]633;Csalloc: Granted job allocation 3306183\r\n",,terminal_output +45,40021,"TERMINAL",0,0,"40",,terminal_output +46,40126,"TERMINAL",0,0,"salloc: Waiting for resource configuration\r\n",,terminal_output +47,41062,"TERMINAL",0,0,"5\t ",,terminal_output +48,42102,"TERMINAL",0,0,"6\t ",,terminal_output +49,42165,"TERMINAL",0,0,"watch",,terminal_focus +50,43151,"TERMINAL",0,0,"7\t ",,terminal_output +51,43620,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn1991:/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar]633;D;0",,terminal_output +52,45559,"TERMINAL",0,0,"queue",,terminal_command +53,45609,"TERMINAL",0,0,"]633;E;2025-06-30 14:24:49 queue;ead59344-49db-4336-9336-47fae706e637]633;C[?1049h(B[?7hEvery 1.0s: squeue --mehkn1991.localdomain: Mon Jun 30 14:24:49 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3306183 dev_accel interact tum_cte0 R\t0:06\t 1 hkn0901",,terminal_output +54,46663,"TERMINAL",0,0,"517",,terminal_output +55,47705,"TERMINAL",0,0,"28",,terminal_output +56,48766,"TERMINAL",0,0,"39",,terminal_output +57,49793,"TERMINAL",0,0,"410",,terminal_output +58,50840,"TERMINAL",0,0,"51",,terminal_output +59,51892,"TERMINAL",0,0,"62",,terminal_output +60,52943,"TERMINAL",0,0,"73",,terminal_output +61,53980,"TERMINAL",0,0,"84",,terminal_output +62,55030,"TERMINAL",0,0,"95",,terminal_output +63,56097,"TERMINAL",0,0,"5:006",,terminal_output +64,57121,"TERMINAL",0,0,"17",,terminal_output +65,58175,"TERMINAL",0,0,"28",,terminal_output +66,59214,"TERMINAL",0,0,"39",,terminal_output +67,60286,"TERMINAL",0,0,"420",,terminal_output +68,61304,"TERMINAL",0,0,"51",,terminal_output +69,62356,"TERMINAL",0,0,"62",,terminal_output +70,63402,"TERMINAL",0,0,"73",,terminal_output +71,64454,"TERMINAL",0,0,"84",,terminal_output +72,65501,"TERMINAL",0,0,"95",,terminal_output +73,66544,"TERMINAL",0,0,"106",,terminal_output +74,67608,"TERMINAL",0,0,"18",,terminal_output +75,67995,"TERMINAL",0,0,"salloc: Prolog hung on node hkn0901\r\n",,terminal_output +76,68670,"TERMINAL",0,0,"39",,terminal_output +77,69706,"TERMINAL",0,0,"430",,terminal_output +78,70754,"TERMINAL",0,0,"51",,terminal_output +79,71799,"TERMINAL",0,0,"62",,terminal_output +80,72849,"TERMINAL",0,0,"73",,terminal_output +81,73901,"TERMINAL",0,0,"84",,terminal_output +82,74943,"TERMINAL",0,0,"95",,terminal_output +83,75994,"TERMINAL",0,0,"206",,terminal_output +84,77042,"TERMINAL",0,0,"17",,terminal_output +85,78099,"TERMINAL",0,0,"28",,terminal_output +86,78672,"TERMINAL",0,0,"salloc: Nodes hkn0901 are ready for job\r\n",,terminal_output +87,79188,"TERMINAL",0,0,"39",,terminal_output +88,79740,"TERMINAL",0,0,"]0;tum_cte0515@hkn0901:~/Projects/jafar[?2004h[tum_cte0515@hkn0901 jafar]$ ",,terminal_output +89,80246,"TERMINAL",0,0,"440",,terminal_output +90,81275,"TERMINAL",0,0,"51",,terminal_output +91,82306,"TERMINAL",0,0,"62",,terminal_output +92,83425,"TERMINAL",0,0,"73",,terminal_output +93,84395,"TERMINAL",0,0,"84",,terminal_output +94,85471,"TERMINAL",0,0,"95",,terminal_output +95,86476,"TERMINAL",0,0,"306",,terminal_output +96,87569,"TERMINAL",0,0,"17",,terminal_output +97,88648,"TERMINAL",0,0,"29",,terminal_output +98,89637,"TERMINAL",0,0,"450",,terminal_output +99,90674,"TERMINAL",0,0,"51",,terminal_output +100,91826,"TERMINAL",0,0,"62",,terminal_output +101,92850,"TERMINAL",0,0,"73",,terminal_output +102,93869,"TERMINAL",0,0,"84",,terminal_output +103,94896,"TERMINAL",0,0,"95",,terminal_output +104,95932,"TERMINAL",0,0,"406",,terminal_output +105,97463,"TERMINAL",0,0,"17",,terminal_output +106,98019,"TERMINAL",0,0,"28",,terminal_output +107,98617,"TERMINAL",0,0,"srun",,terminal_focus +108,99091,"TERMINAL",0,0,"39",,terminal_output +109,99668,"TERMINAL",0,0,"s",,terminal_output +110,99799,"TERMINAL",0,0,"[?25lo[?25h",,terminal_output +111,99892,"TERMINAL",0,0,"[?25lu[?25h",,terminal_output +112,100005,"TERMINAL",0,0,"[?25lr[?25h",,terminal_output +113,100156,"TERMINAL",0,0,"[?25lc[?25h",,terminal_output +114,100156,"TERMINAL",0,0,"41:00",,terminal_output +115,100324,"TERMINAL",0,0,"[?25le[?25h",,terminal_output +116,100481,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +117,100660,"TERMINAL",0,0,"[?25l.[?25h",,terminal_output +118,100768,"TERMINAL",0,0,"[?25lv[?25h",,terminal_output +119,100955,"TERMINAL",0,0,"env/",,terminal_output +120,101153,"TERMINAL",0,0,"51",,terminal_output +121,101424,"TERMINAL",0,0,"[?25lb[?25h",,terminal_output +122,101521,"TERMINAL",0,0,"in/",,terminal_output +123,102234,"TERMINAL",0,0,"62",,terminal_output +124,102268,"TERMINAL",0,0,"[?25la[?25h[?25lc[?25h",,terminal_output +125,102468,"TERMINAL",0,0,"tivate",,terminal_output +126,102809,"TERMINAL",0,0,"\r\n[?2004l\r]0;tum_cte0515@hkn0901:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0901 jafar]$ ",,terminal_output +127,103073,"TERMINAL",0,0,"l",,terminal_output +128,103168,"TERMINAL",0,0,"[?25ls[?25h",,terminal_output +129,103297,"TERMINAL",0,0,"\r\n[?2004l\rdata frame-knoms.png genie.py logs __pycache__ requirements-franz.txt scripts_cremers train_dynamics.py utils\r\ndata_tfrecord_duplicated frame.png gifs models README.md requirements.txt scripts_horeka train_lam.py wandb\r\ndata_tfrecords generate_dataset.py LICENSE overfit_dir read_tf_record.py sample.py slurm train_tokenizer.py\r\n]0;tum_cte0515@hkn0901:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0901 jafar]$ ",,terminal_output +130,103297,"TERMINAL",0,0,"73",,terminal_output +131,104317,"TERMINAL",0,0,"84",,terminal_output +132,105443,"TERMINAL",0,0,"95",,terminal_output +133,106466,"TERMINAL",0,0,"506",,terminal_output +134,107474,"TERMINAL",0,0,"17",,terminal_output +135,108515,"TERMINAL",0,0,"28",,terminal_output +136,109261,"TERMINAL",0,0,"[?25lgi[?25h[?25li[?25h",,terminal_output +137,109421,"TERMINAL",0,0,"[?25lt[?25h",,terminal_output +138,109488,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +139,109553,"TERMINAL",0,0,"39",,terminal_output +140,109648,"TERMINAL",0,0,"[?25lb[?25h[?25lr[?25h",,terminal_output +141,109884,"TERMINAL",0,0,"[?25la[?25h",,terminal_output +142,109959,"TERMINAL",0,0,"[?25ln[?25h",,terminal_output +143,110138,"TERMINAL",0,0,"[?25lc[?25h[?25lh[?25h",,terminal_output +144,110364,"TERMINAL",0,0,"\r\n[?2004l\r[?1h=\r add-wandb-name-and-tags\r\n convert-to-jax-array-in-iter\r\n dont-let-tf-see-gpu\r\n* main\r\n preprocess_video\r\n tmp\r\n",,terminal_output +145,110482,"TERMINAL",0,0,"\r[?1l>]0;tum_cte0515@hkn0901:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0901 jafar]$ ",,terminal_output +146,110593,"TERMINAL",0,0,"411",,terminal_output +147,111638,"TERMINAL",0,0,"62",,terminal_output +148,112321,"TERMINAL",0,0,"[?25lsh[?25h[?25lh[?25h",,terminal_output +149,112492,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +150,112692,"TERMINAL",0,0,"73",,terminal_output +151,113735,"TERMINAL",0,0,"84",,terminal_output +152,114014,"TERMINAL",0,0,"[?25ls[?25h[?25lh[?25h",,terminal_output +153,114227,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +154,114292,"TERMINAL",0,0,"[?25ls[?25h",,terminal_output +155,114477,"TERMINAL",0,0,"[?25lc[?25h",,terminal_output +156,114696,"TERMINAL",0,0,"ripts_",,terminal_output +157,114807,"TERMINAL",0,0,"95",,terminal_output +158,115387,"TERMINAL",0,0,"[?25lh[?25h",,terminal_output +159,115453,"TERMINAL",0,0,"oreka/",,terminal_output +160,115840,"TERMINAL",0,0,"6:006",,terminal_output +161,116911,"TERMINAL",0,0,"17",,terminal_output +162,117156,"TERMINAL",0,0,"[?25lt[?25h",,terminal_output +163,117450,"TERMINAL",0,0,"[?25lr[?25h",,terminal_output +164,117569,"TERMINAL",0,0,"ain_",,terminal_output +165,117939,"TERMINAL",0,0,"28",,terminal_output +166,118068,"TERMINAL",0,0,"",,terminal_output +167,118620,"TERMINAL",0,0,"[?25lt[?25hokenizer.sh ",,terminal_output +168,119092,"TERMINAL",0,0,"39",,terminal_output +169,120038,"TERMINAL",0,0,"420",,terminal_output +170,121087,"TERMINAL",0,0,"51",,terminal_output +171,122140,"TERMINAL",0,0,"62",,terminal_output +172,123191,"TERMINAL",0,0,"73",,terminal_output +173,123338,"TERMINAL",0,0,"^C[?2004l\r[?2004h[?2004l\r\r\n]0;tum_cte0515@hkn0901:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0901 jafar]$ ",,terminal_output +174,124231,"TERMINAL",0,0,"84",,terminal_output +175,124372,"TERMINAL",0,0,"[?25lsm[?25h[?25lm[?25h",,terminal_output +176,124560,"TERMINAL",0,0,"[?25li[?25h",,terminal_output +177,124810,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +178,125308,"TERMINAL",0,0,"95",,terminal_output +179,125369,"TERMINAL",0,0,"[?1049h(B[?7hEvery 1.0s: nvidia-smihkn0901.localdomain: Mon Jun 30 14:26:09 2025Mon Jun 30 14:26:09 2025\r+-----------------------------------------------------------------------------------------+\r| NVIDIA-SMI 570.133.20Driver Version: 570.133.20 CUDA Version: 12.8 |\r|-----------------------------------------+------------------------+----------------------+\r| GPU NamePersistence-M | Bus-IdDisp.A | Volatile Uncorr. ECC |\r| Fan Temp PerfPwr:Usage/Cap |Memory-Usage | GPU-Util Compute M. |\r|||MIG M. |\r|=========================================+========================+======================|\r| 0 NVIDIA H100On | 00000000:06:00.0 Off |0 |\r| N/A 43C P069W / 415W |\t 27MiB / 95830MiB |\t 0%\t Default |\r|||Disabled |\r+-----------------------------------------+------------------------+----------------------+\r| 1 NVIDIA H100On | 00000000:26:00.0 Off |0 |\r| N/A 42C P069W / 415W |\t 27MiB / 95830MiB |\t 0%\t Default |\r|||Disabled |\r+-----------------------------------------+------------------------+----------------------+\r| 2 NVIDIA H100On | 00000000:A6:00.0 Off |0 |\r| N/A 43C P071W / 415W |\t 27MiB / 95830MiB |\t 0%\t Default |\r|||Disabled |\r+-----------------------------------------+------------------------+----------------------+\r| 3 NVIDIA H100On | 00000000:C6:00.0 Off |0 |\r| N/A 43C P067W / 415W |\t 27MiB / 95830MiB |\t 0%\t Default |\r|||Disabled |\r+-----------------------------------------+------------------------+----------------------+\r+-----------------------------------------------------------------------------------------+\r| Processes:|\r| GPU GI CIPID Type Process nameGPU Memory |\r|ID IDUsage\t |\r|=========================================================================================|\r| 0 N/A N/A3199G /usr/libexec/Xorg17MiB |\r| 1 N/A N/A3199G /usr/libexec/Xorg17MiB |\r| 2 N/A N/A3199G /usr/libexec/Xorg17MiB |\r| 3 N/A N/A3199G /usr/libexec/Xorg17MiB |\r+-----------------------------------------------------------------------------------------+",,terminal_output +180,126437,"TERMINAL",0,0,"106",,terminal_output +181,126929,"TERMINAL",0,0,"1010",,terminal_output +182,127460,"TERMINAL",0,0,"17",,terminal_output +183,128416,"TERMINAL",0,0,"28",,terminal_output +184,128528,"TERMINAL",0,0,"22",,terminal_output +185,129036,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn0901:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0901 jafar]$ ",,terminal_output +186,129333,"TERMINAL",0,0,"smi",,terminal_output +187,129464,"TERMINAL",0,0,"39",,terminal_output +188,129618,"TERMINAL",0,0,"git branch",,terminal_output +189,130511,"TERMINAL",0,0,"430",,terminal_output +190,130563,"TERMINAL",0,0,"ls",,terminal_output +191,130983,"TERMINAL",0,0,"source .venv/bin/activate",,terminal_output +192,131558,"TERMINAL",0,0,"51",,terminal_output +193,131915,"TERMINAL",0,0,"\r\n[?2004l\r]0;tum_cte0515@hkn0901:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0901 jafar]$ ",,terminal_output +194,132602,"TERMINAL",0,0,"63",,terminal_output +195,133188,"TERMINAL",0,0,"source .venv/bin/activate",,terminal_output +196,133436,"TERMINAL",0,0,"mi",,terminal_output +197,133587,"TERMINAL",0,0,"git branch",,terminal_output +198,133652,"TERMINAL",0,0,"84",,terminal_output +199,133786,"TERMINAL",0,0,"ls",,terminal_output +200,134487,"TERMINAL",0,0,"git branch",,terminal_output +201,134731,"TERMINAL",0,0,"smi",,terminal_output +202,134731,"TERMINAL",0,0,"95",,terminal_output +203,134886,"TERMINAL",0,0,"ource .venv/bin/activate",,terminal_output +204,135034,"TERMINAL",0,0,"",,terminal_output +205,135161,"TERMINAL",0,0,"",,terminal_output +206,135517,"TERMINAL",0,0,"s",,terminal_output +207,135770,"TERMINAL",0,0,"206",,terminal_output +208,135845,"TERMINAL",0,0,"[?25lh[?25h",,terminal_output +209,135911,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +210,136078,"TERMINAL",0,0,"[?25ls[?25h",,terminal_output +211,136131,"TERMINAL",0,0,"[?25lc[?25h",,terminal_output +212,136357,"TERMINAL",0,0,"ripts_",,terminal_output +213,136619,"TERMINAL",0,0,"[?25lh[?25h",,terminal_output +214,136715,"TERMINAL",0,0,"oreka/",,terminal_output +215,136825,"TERMINAL",0,0,"17",,terminal_output +216,137321,"TERMINAL",0,0,"[?25lt[?25h",,terminal_output +217,137606,"TERMINAL",0,0,"rain_",,terminal_output +218,137907,"TERMINAL",0,0,"28",,terminal_output +219,138197,"TERMINAL",0,0,"[?25lh[?25h",,terminal_output +220,138614,"TERMINAL",0,0,"[?25lt[?25h",,terminal_output +221,138824,"TERMINAL",0,0,"okenizer.sh ",,terminal_output +222,138889,"TERMINAL",0,0,"39",,terminal_output +223,139763,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +224,139942,"TERMINAL",0,0,"440",,terminal_output +225,139966,"TERMINAL",0,0,"SLURM_STEP_NUM_TASKS=1\r\nSLURM_JOB_USER=tum_cte0515\r\nSLURM_TASKS_PER_NODE=4\r\nSLURM_JOB_UID=999226\r\nSLURM_TASK_PID=1787497\r\nSLURM_JOB_GPUS=0,1,2,3\r\nSLURM_LOCALID=0\r\nSLURM_SUBMIT_DIR=/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar\r\nSLURMD_NODENAME=hkn0901\r\nSLURM_JOB_START_TIME=1751286284\r\nSLURM_STEP_NODELIST=hkn0901\r\nSLURM_CLUSTER_NAME=hk\r\nSLURM_JOB_END_TIME=1751289884\r\nSLURM_PMI2_SRUN_PORT=43699\r\nSLURM_CPUS_ON_NODE=24\r\nSLURM_JOB_CPUS_PER_NODE=24\r\nSLURM_GPUS_ON_NODE=4\r\nSLURM_GTIDS=0\r\nSLURM_JOB_PARTITION=dev_accelerated-h100\r\nSLURM_TRES_PER_TASK=cpu=5\r\nSLURM_OOM_KILL_STEP=0\r\nSLURM_JOB_NUM_NODES=1\r\nSLURM_STEPID=4294967290\r\nSLURM_JOBID=3306183\r\nSLURM_PTY_PORT=38037\r\nSLURM_JOB_QOS=normal\r\nSLURM_LAUNCH_NODE_IPADDR=10.0.7.199\r\nSLURM_PTY_WIN_ROW=37\r\nSLURM_PMI2_PROC_MAPPING=(vector,(0,1,1))\r\nSLURMD_DEBUG=2\r\nSLURM_PROCID=0\r\nSLURM_CPUS_PER_TASK=5\r\nSLURM_NTASKS=4\r\nSLURM_TOPOLOGY_ADDR=hkibb.hkibbi4.hkibbi4e1.hkn0901\r\nSLURM_TOPOLOGY_ADDR_PATTERN=switch.switch.switch.node\r\nSLURM_SRUN_COMM_HOST=10.0.7.199\r\nSLURM_SCRIPT_CONTEXT=prolog_task\r\nSLURM_MEM_PER_NODE=204800\r\nSLURM_PTY_WIN_COL=202\r\nSLURM_NODELIST=hkn0901\r\nSLURM_SRUN_COMM_PORT=43913\r\nSLURM_STEP_ID=4294967290\r\nSLURM_JOB_ACCOUNT=hk-project-p0023960\r\nSLURM_PRIO_PROCESS=0\r\nSLURM_NPROCS=4\r\nSLURM_NNODES=1\r\nSLURM_SUBMIT_HOST=hkn1991.localdomain\r\nSLURM_JOB_ID=3306183\r\nSLURM_NODEID=0\r\nSLURM_STEP_NUM_NODES=1\r\nSLURM_STEP_TASKS_PER_NODE=1\r\nSLURM_MPI_TYPE=pmi2\r\nSLURM_PMI2_STEP_NODES=hkn0901\r\nSLURM_CONF=/etc/slurm/slurm.conf\r\nSLURM_JOB_NAME=interactive\r\nSLURM_NTASKS_PER_NODE=4\r\nSLURM_STEP_LAUNCHER_PORT=43913\r\nSLURM_JOB_GID=502226\r\nSLURM_JOB_NODELIST=hkn0901\r\n",,terminal_output +226,140218,"TERMINAL",0,0,"GpuFreq=control_disabled\r\n",,terminal_output +227,141187,"TERMINAL",0,0,"51",,terminal_output +228,142041,"TERMINAL",0,0,"62",,terminal_output +229,142989,"scripts_horeka/train_tokenizer.sh",0,0,"#!/usr/bin/env bash\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\ntf_records_dir=$ws_dir/knoms_tfrecords_500_shards\nws_dir='/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/'\n\njob_name=""debug""\nslurm_job_id=""0000""\n\nCHECKPOINT_DIR=$ws_dir/checkpoints/$job_name_$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\nenv | grep SLURM\n\nsrun python train_tokenizer.py \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=16 \\n --min_lr=4.24e-4 \\n --max_lr=4.24e-4 \\n --log_image_interval=100 \\n --log \\n --name=test-wandb-tags-$slurm_job_id \\n --tags test tokenizer debug \\n --entity instant-uv \\n --project jafar \\n --data_dir $tf_records_dir",shellscript,tab +230,143118,"TERMINAL",0,0,"73",,terminal_output +231,144135,"TERMINAL",0,0,"84",,terminal_output +232,145182,"TERMINAL",0,0,"95",,terminal_output +233,146238,"TERMINAL",0,0,"306",,terminal_output +234,147323,"TERMINAL",0,0,"17",,terminal_output +235,148450,"TERMINAL",0,0,"28",,terminal_output +236,149427,"TERMINAL",0,0,"39",,terminal_output +237,150430,"TERMINAL",0,0,"450",,terminal_output +238,151520,"TERMINAL",0,0,"51",,terminal_output +239,152564,"TERMINAL",0,0,"62",,terminal_output +240,153580,"TERMINAL",0,0,"73",,terminal_output +241,154699,"TERMINAL",0,0,"95",,terminal_output +242,155719,"TERMINAL",0,0,"406",,terminal_output +243,156724,"TERMINAL",0,0,"17",,terminal_output +244,157870,"TERMINAL",0,0,"28",,terminal_output +245,158894,"TERMINAL",0,0,"39",,terminal_output +246,159896,"TERMINAL",0,0,"42:00",,terminal_output +247,161051,"TERMINAL",0,0,"51",,terminal_output +248,162109,"TERMINAL",0,0,"62",,terminal_output +249,163110,"TERMINAL",0,0,"73",,terminal_output +250,163275,"TERMINAL",0,0,"2025-06-30 14:26:47.702710: E external/local_xla/xla/stream_executor/cuda/cuda_fft.cc:467] Unable to register cuFFT factory: Attempting to register factory for plugin cuFFT when one has already been registered\r\n2025-06-30 14:26:47.702704: E external/local_xla/xla/stream_executor/cuda/cuda_fft.cc:467] Unable to register cuFFT factory: Attempting to register factory for plugin cuFFT when one has already been registered\r\n2025-06-30 14:26:47.702707: E external/local_xla/xla/stream_executor/cuda/cuda_fft.cc:467] Unable to register cuFFT factory: Attempting to register factory for plugin cuFFT when one has already been registered\r\n2025-06-30 14:26:47.702702: E external/local_xla/xla/stream_executor/cuda/cuda_fft.cc:467] Unable to register cuFFT factory: Attempting to register factory for plugin cuFFT when one has already been registered\r\n",,terminal_output +251,163385,"TERMINAL",0,0,"WARNING: All log messages before absl::InitializeLog() is called are written to STDERR\r\nE0000 00:00:1751286407.789834 1787835 cuda_dnn.cc:8579] Unable to register cuDNN factory: Attempting to register factory for plugin cuDNN when one has already been registered\r\nWARNING: All log messages before absl::InitializeLog() is called are written to STDERR\r\nE0000 00:00:1751286407.789836 1787836 cuda_dnn.cc:8579] Unable to register cuDNN factory: Attempting to register factory for plugin cuDNN when one has already been registered\r\nWARNING: All log messages before absl::InitializeLog() is called are written to STDERR\r\nE0000 00:00:1751286407.789833 1787837 cuda_dnn.cc:8579] Unable to register cuDNN factory: Attempting to register factory for plugin cuDNN when one has already been registered\r\nWARNING: All log messages before absl::InitializeLog() is called are written to STDERR\r\nE0000 00:00:1751286407.789831 1787838 cuda_dnn.cc:8579] Unable to register cuDNN factory: Attempting to register factory for plugin cuDNN when one has already been registered\r\nE0000 00:00:1751286407.803345 1787835 cuda_blas.cc:1407] Unable to register cuBLAS factory: Attempting to register factory for plugin cuBLAS when one has already been registered\r\nE0000 00:00:1751286407.803340 1787836 cuda_blas.cc:1407] Unable to register cuBLAS factory: Attempting to register factory for plugin cuBLAS when one has already been registered\r\nE0000 00:00:1751286407.803338 1787838 cuda_blas.cc:1407] Unable to register cuBLAS factory: Attempting to register factory for plugin cuBLAS when one has already been registered\r\nE0000 00:00:1751286407.803385 1787837 cuda_blas.cc:1407] Unable to register cuBLAS factory: Attempting to register factory for plugin cuBLAS when one has already been registered\r\n",,terminal_output +252,163706,"TERMINAL",0,0,"W0000 00:00:1751286408.125169 1787835 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\nW0000 00:00:1751286408.125226 1787835 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\nW0000 00:00:1751286408.125230 1787835 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\nW0000 00:00:1751286408.125232 1787835 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\nW0000 00:00:1751286408.125164 1787836 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\nW0000 00:00:1751286408.125218 1787836 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\nW0000 00:00:1751286408.125220 1787836 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\nW0000 00:00:1751286408.125223 1787836 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\nW0000 00:00:1751286408.125161 1787837 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\nW0000 00:00:1751286408.125231 1787837 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\nW0000 00:00:1751286408.125234 1787837 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\nW0000 00:00:1751286408.125237 1787837 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\nW0000 00:00:1751286408.125154 1787838 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\nW0000 00:00:1751286408.125218 1787838 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\nW0000 00:00:1751286408.125220 1787838 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\nW0000 00:00:1751286408.125223 1787838 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\n",,terminal_output +253,164084,"TERMINAL",0,0,"84",,terminal_output +254,165125,"TERMINAL",0,0,"95",,terminal_output +255,166178,"TERMINAL",0,0,"506",,terminal_output +256,167295,"TERMINAL",0,0,"17",,terminal_output +257,168276,"TERMINAL",0,0,"28",,terminal_output +258,169320,"TERMINAL",0,0,"39",,terminal_output +259,170465,"TERMINAL",0,0,"410",,terminal_output +260,171495,"TERMINAL",0,0,"51",,terminal_output +261,172465,"TERMINAL",0,0,"62",,terminal_output +262,173536,"TERMINAL",0,0,"73",,terminal_output +263,174572,"TERMINAL",0,0,"84",,terminal_output +264,175617,"TERMINAL",0,0,"96",,terminal_output +265,176711,"TERMINAL",0,0,"7:017",,terminal_output +266,177708,"TERMINAL",0,0,"28",,terminal_output +267,178757,"TERMINAL",0,0,"39",,terminal_output +268,180605,"TERMINAL",0,0,"420",,terminal_output +269,181580,"TERMINAL",0,0,"51",,terminal_output +270,182616,"TERMINAL",0,0,"63",,terminal_output +271,183776,"TERMINAL",0,0,"84",,terminal_output +272,184804,"TERMINAL",0,0,"95",,terminal_output +273,185768,"TERMINAL",0,0,"106",,terminal_output +274,186818,"TERMINAL",0,0,"17",,terminal_output +275,187874,"TERMINAL",0,0,"28",,terminal_output +276,188920,"TERMINAL",0,0,"39",,terminal_output +277,189986,"TERMINAL",0,0,"430",,terminal_output +278,191039,"TERMINAL",0,0,"51",,terminal_output +279,192076,"TERMINAL",0,0,"62",,terminal_output +280,193147,"TERMINAL",0,0,"73",,terminal_output +281,194193,"TERMINAL",0,0,"84",,terminal_output +282,194945,"TERMINAL",0,0,"W0000 00:00:1751286439.371874 1787836 gpu_device.cc:2341] Cannot dlopen some GPU libraries. Please make sure the missing libraries mentioned above are installed properly if you would like to use GPU. Follow the guide at https://www.tensorflow.org/install/gpu for how to download and setup the required libraries for your platform.\r\nSkipping registering GPU devices...\r\nW0000 00:00:1751286439.371877 1787837 gpu_device.cc:2341] Cannot dlopen some GPU libraries. Please make sure the missing libraries mentioned above are installed properly if you would like to use GPU. Follow the guide at https://www.tensorflow.org/install/gpu for how to download and setup the required libraries for your platform.\r\nSkipping registering GPU devices...\r\nW0000 00:00:1751286439.371936 1787835 gpu_device.cc:2341] Cannot dlopen some GPU libraries. Please make sure the missing libraries mentioned above are installed properly if you would like to use GPU. Follow the guide at https://www.tensorflow.org/install/gpu for how to download and setup the required libraries for your platform.\r\nSkipping registering GPU devices...\r\nW0000 00:00:1751286439.371907 1787838 gpu_device.cc:2341] Cannot dlopen some GPU libraries. Please make sure the missing libraries mentioned above are installed properly if you would like to use GPU. Follow the guide at https://www.tensorflow.org/install/gpu for how to download and setup the required libraries for your platform.\r\nSkipping registering GPU devices...\r\n",,terminal_output +283,195217,"TERMINAL",0,0,"95",,terminal_output +284,196273,"TERMINAL",0,0,"206",,terminal_output +285,197466,"TERMINAL",0,0,"17",,terminal_output +286,198420,"TERMINAL",0,0,"28",,terminal_output +287,199108,"TERMINAL",0,0,"wandb: Currently logged in as: mihir-mahajan2002 (instant-uv) to https://api.wandb.ai. Use `wandb login --relogin` to force relogin\r\n",,terminal_output +288,199448,"TERMINAL",0,0,"39",,terminal_output +289,199762,"TERMINAL",0,0,"2025-06-30 14:27:24.104224: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-06-30 14:27:24.106628: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-06-30 14:27:24.139953: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +290,200180,"TERMINAL",0,0,"wandb: Tracking run with wandb version 0.19.11\r\nwandb: Run data is saved locally in /hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/wandb/run-20250630_142723-78k2arm7\r\nwandb: Run `wandb offline` to turn off syncing.\r\nwandb: Syncing run test-wandb-tags-0000\r\nwandb: ⭐️ View project at https://wandb.ai/instant-uv/jafar\r\nwandb: 🚀 View run at https://wandb.ai/instant-uv/jafar/runs/78k2arm7\r\n",,terminal_output +291,200473,"TERMINAL",0,0,"440",,terminal_output +292,201597,"TERMINAL",0,0,"51",,terminal_output +293,202570,"TERMINAL",0,0,"62",,terminal_output +294,202634,"TERMINAL",0,0,"2025-06-30 14:27:27.048214: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +295,203645,"TERMINAL",0,0,"74",,terminal_output +296,204649,"TERMINAL",0,0,"95",,terminal_output +297,205796,"TERMINAL",0,0,"306",,terminal_output +298,206821,"TERMINAL",0,0,"17",,terminal_output +299,207800,"TERMINAL",0,0,"28",,terminal_output +300,208868,"TERMINAL",0,0,"39",,terminal_output +301,209996,"TERMINAL",0,0,"450",,terminal_output +302,211020,"TERMINAL",0,0,"51",,terminal_output +303,212044,"TERMINAL",0,0,"62",,terminal_output +304,213036,"TERMINAL",0,0,"73",,terminal_output +305,213542,"TERMINAL",0,0,"2025-06-30 14:27:37.918961: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +306,213899,"TERMINAL",0,0,"2025-06-30 14:27:38.323602: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +307,214096,"TERMINAL",0,0,"84",,terminal_output +308,215135,"TERMINAL",0,0,"95",,terminal_output +309,216184,"TERMINAL",0,0,"406",,terminal_output +310,216336,"TERMINAL",0,0,"2025-06-30 14:27:40.761205: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +311,217233,"TERMINAL",0,0,"17",,terminal_output +312,217360,"TERMINAL",0,0,"2025-06-30 14:27:41.787495: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +313,218275,"TERMINAL",0,0,"28",,terminal_output +314,219416,"TERMINAL",0,0,"39",,terminal_output +315,220370,"TERMINAL",0,0,"43:00",,terminal_output +316,221419,"TERMINAL",0,0,"51",,terminal_output +317,222470,"TERMINAL",0,0,"62",,terminal_output +318,223519,"TERMINAL",0,0,"73",,terminal_output +319,224643,"TERMINAL",0,0,"84",,terminal_output +320,225673,"TERMINAL",0,0,"96",,terminal_output +321,226692,"TERMINAL",0,0,"517",,terminal_output +322,227712,"TERMINAL",0,0,"28",,terminal_output +323,228762,"TERMINAL",0,0,"39",,terminal_output +324,229812,"TERMINAL",0,0,"410",,terminal_output +325,230864,"TERMINAL",0,0,"51",,terminal_output +326,231917,"TERMINAL",0,0,"62",,terminal_output +327,233039,"TERMINAL",0,0,"73",,terminal_output +328,234058,"TERMINAL",0,0,"84",,terminal_output +329,235070,"TERMINAL",0,0,"95",,terminal_output +330,236115,"TERMINAL",0,0,"8:006",,terminal_output +331,237159,"TERMINAL",0,0,"17",,terminal_output +332,238218,"TERMINAL",0,0,"28",,terminal_output +333,239278,"TERMINAL",0,0,"39",,terminal_output +334,240407,"TERMINAL",0,0,"420",,terminal_output +335,241370,"TERMINAL",0,0,"51",,terminal_output +336,242450,"TERMINAL",0,0,"62",,terminal_output +337,243462,"TERMINAL",0,0,"73",,terminal_output +338,244509,"TERMINAL",0,0,"84",,terminal_output +339,245628,"TERMINAL",0,0,"95",,terminal_output +340,245750,"TERMINAL",0,0,"2025-06-30 14:28:10.176057: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-06-30 14:28:10.176181: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +341,246603,"TERMINAL",0,0,"107",,terminal_output +342,247646,"TERMINAL",0,0,"28",,terminal_output +343,248042,"TERMINAL",0,0,"2025-06-30 14:28:12.446462: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +344,248811,"TERMINAL",0,0,"39",,terminal_output +345,249215,"TERMINAL",0,0,"2025-06-30 14:28:13.639008: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +346,249745,"TERMINAL",0,0,"430",,terminal_output +347,250857,"TERMINAL",0,0,"51",,terminal_output +348,251833,"TERMINAL",0,0,"62",,terminal_output +349,252899,"TERMINAL",0,0,"73",,terminal_output +350,254535,"TERMINAL",0,0,"84",,terminal_output +351,255546,"TERMINAL",0,0,"95",,terminal_output +352,256598,"TERMINAL",0,0,"207",,terminal_output +353,257648,"TERMINAL",0,0,"28",,terminal_output +354,258691,"TERMINAL",0,0,"39",,terminal_output +355,259762,"TERMINAL",0,0,"440",,terminal_output +356,260779,"TERMINAL",0,0,"51",,terminal_output +357,261910,"TERMINAL",0,0,"62",,terminal_output +358,262882,"TERMINAL",0,0,"73",,terminal_output +359,263931,"TERMINAL",0,0,"84",,terminal_output +360,265120,"TERMINAL",0,0,"95",,terminal_output +361,266024,"TERMINAL",0,0,"306",,terminal_output +362,267131,"TERMINAL",0,0,"17",,terminal_output +363,268110,"TERMINAL",0,0,"28",,terminal_output +364,269193,"TERMINAL",0,0,"39",,terminal_output +365,270292,"TERMINAL",0,0,"450",,terminal_output +366,271265,"TERMINAL",0,0,"51",,terminal_output +367,272311,"TERMINAL",0,0,"62",,terminal_output +368,273359,"TERMINAL",0,0,"73",,terminal_output +369,274385,"TERMINAL",0,0,"84",,terminal_output +370,275428,"TERMINAL",0,0,"95",,terminal_output +371,276468,"TERMINAL",0,0,"406",,terminal_output +372,277507,"TERMINAL",0,0,"17",,terminal_output +373,278547,"TERMINAL",0,0,"28",,terminal_output +374,279630,"TERMINAL",0,0,"34:00",,terminal_output +375,280639,"TERMINAL",0,0,"51",,terminal_output +376,281675,"TERMINAL",0,0,"62",,terminal_output +377,282800,"TERMINAL",0,0,"73",,terminal_output +378,283824,"TERMINAL",0,0,"84",,terminal_output +379,284824,"TERMINAL",0,0,"95",,terminal_output +380,285870,"TERMINAL",0,0,"506",,terminal_output +381,286997,"TERMINAL",0,0,"17",,terminal_output +382,287970,"TERMINAL",0,0,"28",,terminal_output +383,289022,"TERMINAL",0,0,"39",,terminal_output +384,290064,"TERMINAL",0,0,"410",,terminal_output +385,291112,"TERMINAL",0,0,"51",,terminal_output +386,292157,"TERMINAL",0,0,"62",,terminal_output +387,293197,"TERMINAL",0,0,"73",,terminal_output +388,294235,"TERMINAL",0,0,"84",,terminal_output +389,295280,"TERMINAL",0,0,"95",,terminal_output +390,296327,"TERMINAL",0,0,"9:006",,terminal_output +391,297460,"TERMINAL",0,0,"17",,terminal_output +392,298468,"TERMINAL",0,0,"28",,terminal_output +393,299467,"TERMINAL",0,0,"39",,terminal_output +394,300509,"TERMINAL",0,0,"420",,terminal_output +395,301555,"TERMINAL",0,0,"51",,terminal_output +396,302607,"TERMINAL",0,0,"63",,terminal_output +397,303694,"TERMINAL",0,0,"84",,terminal_output +398,304716,"TERMINAL",0,0,"95",,terminal_output +399,305747,"TERMINAL",0,0,"106",,terminal_output +400,306866,"TERMINAL",0,0,"17",,terminal_output +401,307891,"TERMINAL",0,0,"28",,terminal_output +402,308874,"TERMINAL",0,0,"39",,terminal_output +403,309938,"TERMINAL",0,0,"430",,terminal_output +404,311065,"TERMINAL",0,0,"51",,terminal_output +405,312002,"TERMINAL",0,0,"62",,terminal_output +406,313112,"TERMINAL",0,0,"73",,terminal_output +407,314087,"TERMINAL",0,0,"84",,terminal_output +408,315161,"TERMINAL",0,0,"95",,terminal_output +409,316185,"TERMINAL",0,0,"206",,terminal_output +410,317221,"TERMINAL",0,0,"17",,terminal_output +411,318272,"TERMINAL",0,0,"28",,terminal_output +412,319324,"TERMINAL",0,0,"39",,terminal_output +413,320381,"TERMINAL",0,0,"440",,terminal_output +414,321514,"TERMINAL",0,0,"51",,terminal_output +415,322471,"TERMINAL",0,0,"62",,terminal_output +416,323556,"TERMINAL",0,0,"73",,terminal_output +417,324571,"TERMINAL",0,0,"84",,terminal_output +418,325710,"TERMINAL",0,0,"96",,terminal_output +419,326735,"TERMINAL",0,0,"317",,terminal_output +420,327755,"TERMINAL",0,0,"28",,terminal_output +421,328735,"TERMINAL",0,0,"39",,terminal_output +422,329777,"TERMINAL",0,0,"450",,terminal_output +423,330815,"TERMINAL",0,0,"51",,terminal_output +424,331856,"TERMINAL",0,0,"62",,terminal_output +425,332977,"TERMINAL",0,0,"73",,terminal_output +426,333938,"TERMINAL",0,0,"84",,terminal_output +427,335157,"TERMINAL",0,0,"95",,terminal_output +428,336152,"TERMINAL",0,0,"406",,terminal_output +429,337174,"TERMINAL",0,0,"17",,terminal_output +430,338171,"TERMINAL",0,0,"28",,terminal_output +431,339214,"TERMINAL",0,0,"39",,terminal_output +432,340253,"TERMINAL",0,0,"45:00",,terminal_output +433,341379,"TERMINAL",0,0,"51",,terminal_output +434,342353,"TERMINAL",0,0,"62",,terminal_output +435,343399,"TERMINAL",0,0,"73",,terminal_output +436,344549,"TERMINAL",0,0,"84",,terminal_output +437,345478,"TERMINAL",0,0,"95",,terminal_output +438,346519,"TERMINAL",0,0,"506",,terminal_output +439,347568,"TERMINAL",0,0,"17",,terminal_output +440,348603,"TERMINAL",0,0,"29",,terminal_output +441,349672,"TERMINAL",0,0,"410",,terminal_output +442,350692,"TERMINAL",0,0,"51",,terminal_output +443,351755,"TERMINAL",0,0,"62",,terminal_output +444,352776,"TERMINAL",0,0,"73",,terminal_output +445,353806,"TERMINAL",0,0,"84",,terminal_output +446,354862,"TERMINAL",0,0,"95",,terminal_output +447,355908,"TERMINAL",0,0,"30:006",,terminal_output +448,356941,"TERMINAL",0,0,"17",,terminal_output +449,357985,"TERMINAL",0,0,"28",,terminal_output +450,359097,"TERMINAL",0,0,"39",,terminal_output +451,360074,"TERMINAL",0,0,"420",,terminal_output +452,361125,"TERMINAL",0,0,"51",,terminal_output +453,362173,"TERMINAL",0,0,"62",,terminal_output +454,363213,"TERMINAL",0,0,"73",,terminal_output +455,364260,"TERMINAL",0,0,"84",,terminal_output +456,365296,"TERMINAL",0,0,"95",,terminal_output +457,366342,"TERMINAL",0,0,"106",,terminal_output +458,367461,"TERMINAL",0,0,"17",,terminal_output +459,368512,"TERMINAL",0,0,"28",,terminal_output +460,369536,"TERMINAL",0,0,"39",,terminal_output +461,370523,"TERMINAL",0,0,"430",,terminal_output +462,371568,"TERMINAL",0,0,"51",,terminal_output +463,372611,"TERMINAL",0,0,"63",,terminal_output +464,373657,"TERMINAL",0,0,"84",,terminal_output +465,374757,"TERMINAL",0,0,"95",,terminal_output +466,375790,"TERMINAL",0,0,"206",,terminal_output +467,376790,"TERMINAL",0,0,"17",,terminal_output +468,377939,"TERMINAL",0,0,"28",,terminal_output +469,378882,"TERMINAL",0,0,"39",,terminal_output +470,379992,"TERMINAL",0,0,"440",,terminal_output +471,381007,"TERMINAL",0,0,"51",,terminal_output +472,382030,"TERMINAL",0,0,"62",,terminal_output +473,383155,"TERMINAL",0,0,"73",,terminal_output +474,384118,"TERMINAL",0,0,"84",,terminal_output +475,385156,"TERMINAL",0,0,"95",,terminal_output +476,386197,"TERMINAL",0,0,"306",,terminal_output +477,387233,"TERMINAL",0,0,"17",,terminal_output +478,388280,"TERMINAL",0,0,"28",,terminal_output +479,389327,"TERMINAL",0,0,"39",,terminal_output +480,390423,"TERMINAL",0,0,"450",,terminal_output +481,391447,"TERMINAL",0,0,"51",,terminal_output +482,392466,"TERMINAL",0,0,"62",,terminal_output +483,393511,"TERMINAL",0,0,"73",,terminal_output +484,394624,"TERMINAL",0,0,"84",,terminal_output +485,395607,"TERMINAL",0,0,"96",,terminal_output +486,396654,"TERMINAL",0,0,"417",,terminal_output +487,397705,"TERMINAL",0,0,"28",,terminal_output +488,398825,"TERMINAL",0,0,"39",,terminal_output +489,399823,"TERMINAL",0,0,"46:00",,terminal_output +490,400868,"TERMINAL",0,0,"51",,terminal_output +491,401914,"TERMINAL",0,0,"62",,terminal_output +492,402965,"TERMINAL",0,0,"73",,terminal_output +493,404045,"TERMINAL",0,0,"84",,terminal_output +494,405071,"TERMINAL",0,0,"95",,terminal_output +495,406195,"TERMINAL",0,0,"506",,terminal_output +496,407221,"TERMINAL",0,0,"17",,terminal_output +497,408202,"TERMINAL",0,0,"28",,terminal_output +498,409258,"TERMINAL",0,0,"39",,terminal_output +499,410301,"TERMINAL",0,0,"410",,terminal_output +500,411419,"TERMINAL",0,0,"51",,terminal_output +501,412402,"TERMINAL",0,0,"62",,terminal_output +502,413467,"TERMINAL",0,0,"73",,terminal_output +503,414594,"TERMINAL",0,0,"84",,terminal_output +504,415618,"TERMINAL",0,0,"95",,terminal_output +505,416578,"TERMINAL",0,0,"1:007",,terminal_output +506,417622,"TERMINAL",0,0,"28",,terminal_output +507,418685,"TERMINAL",0,0,"39",,terminal_output +508,419813,"TERMINAL",0,0,"420",,terminal_output +509,420842,"TERMINAL",0,0,"51",,terminal_output +510,421816,"TERMINAL",0,0,"62",,terminal_output +511,422866,"TERMINAL",0,0,"73",,terminal_output +512,423915,"TERMINAL",0,0,"84",,terminal_output +513,425036,"TERMINAL",0,0,"95",,terminal_output +514,426011,"TERMINAL",0,0,"106",,terminal_output +515,427081,"TERMINAL",0,0,"17",,terminal_output +516,428208,"TERMINAL",0,0,"28",,terminal_output +517,429235,"TERMINAL",0,0,"39",,terminal_output +518,430198,"TERMINAL",0,0,"430",,terminal_output +519,431240,"TERMINAL",0,0,"51",,terminal_output +520,432285,"TERMINAL",0,0,"62",,terminal_output +521,433430,"TERMINAL",0,0,"73",,terminal_output +522,434382,"TERMINAL",0,0,"84",,terminal_output +523,435431,"TERMINAL",0,0,"95",,terminal_output +524,436472,"TERMINAL",0,0,"206",,terminal_output +525,437519,"TERMINAL",0,0,"17",,terminal_output +526,438654,"TERMINAL",0,0,"28",,terminal_output +527,439615,"TERMINAL",0,0,"340",,terminal_output +528,440702,"TERMINAL",0,0,"51",,terminal_output +529,441728,"TERMINAL",0,0,"62",,terminal_output +530,442854,"TERMINAL",0,0,"73",,terminal_output +531,443798,"TERMINAL",0,0,"84",,terminal_output +532,444902,"TERMINAL",0,0,"95",,terminal_output +533,445926,"TERMINAL",0,0,"306",,terminal_output +534,446934,"TERMINAL",0,0,"17",,terminal_output +535,447974,"TERMINAL",0,0,"28",,terminal_output +536,449101,"TERMINAL",0,0,"39",,terminal_output +537,450124,"TERMINAL",0,0,"450",,terminal_output +538,451152,"TERMINAL",0,0,"51",,terminal_output +539,452172,"TERMINAL",0,0,"62",,terminal_output +540,453205,"TERMINAL",0,0,"73",,terminal_output +541,454258,"TERMINAL",0,0,"84",,terminal_output +542,455304,"TERMINAL",0,0,"95",,terminal_output +543,456347,"TERMINAL",0,0,"406",,terminal_output +544,457396,"TERMINAL",0,0,"17",,terminal_output +545,458422,"TERMINAL",0,0,"^Csrun: interrupt (one more within 1 sec to abort)\r\nsrun: StepId=3306183.0 tasks 0-3: running\r\n",,terminal_output +546,458446,"TERMINAL",0,0,"28",,terminal_output +547,458639,"TERMINAL",0,0,"^Csrun: sending Ctrl-C to StepId=3306183.0\r\nsrun: forcing job termination\r\nsrun: Job step aborted: Waiting up to 32 seconds for job step to finish.\r\nslurmstepd: error: *** STEP 3306183.0 ON hkn0901 CANCELLED AT 2025-06-30T14:31:43 ***\r\n",,terminal_output +548,458798,"TERMINAL",0,0,"^Csrun: sending Ctrl-C to StepId=3306183.0\r\nsrun: job abort in progress\r\n",,terminal_output +549,459546,"TERMINAL",0,0,"39",,terminal_output +550,459900,"TERMINAL",0,0,"^C",,terminal_output +551,460138,"TERMINAL",0,0,"]0;tum_cte0515@hkn0901:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0901 jafar]$ ",,terminal_output +552,460539,"TERMINAL",0,0,"47:00",,terminal_output +553,461014,"TERMINAL",0,0,"g",,terminal_output +554,461117,"TERMINAL",0,0,"[?25li[?25h[?25lt[?25h",,terminal_output +555,461233,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +556,461396,"TERMINAL",0,0,"[?25lc[?25h",,terminal_output +557,461449,"TERMINAL",0,0,"[?25lh[?25h",,terminal_output +558,461581,"TERMINAL",0,0,"[?25le[?25h",,terminal_output +559,461597,"TERMINAL",0,0,"52",,terminal_output +560,462320,"TERMINAL",0,0,"[?25lb[?25h",,terminal_output +561,462387,"TERMINAL",0,0,"[?25lr[?25h",,terminal_output +562,462577,"TERMINAL",0,0,"[?25la[?25h",,terminal_output +563,462643,"TERMINAL",0,0,"[?25ln[?25h",,terminal_output +564,462655,"TERMINAL",0,0,"73",,terminal_output +565,462782,"TERMINAL",0,0,"[?25lc[?25h[?25lh[?25h",,terminal_output +566,463288,"TERMINAL",0,0,"\r\n[?2004l\r[?1h=\r add-wandb-name-and-tags\r\n convert-to-jax-array-in-iter\r\n dont-let-tf-see-gpu\r\n* main\r\n preprocess_video\r\n tmp\r\n",,terminal_output +567,463362,"TERMINAL",0,0,"\r[?1l>]0;tum_cte0515@hkn0901:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0901 jafar]$ ",,terminal_output +568,463744,"TERMINAL",0,0,"84",,terminal_output +569,464157,"TERMINAL",0,0,"g",,terminal_output +570,464240,"TERMINAL",0,0,"[?25li[?25h",,terminal_output +571,464355,"TERMINAL",0,0,"[?25lt[?25h",,terminal_output +572,464422,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +573,464637,"TERMINAL",0,0,"[?25lc[?25h",,terminal_output +574,464706,"TERMINAL",0,0,"[?25lh[?25h",,terminal_output +575,464772,"TERMINAL",0,0,"[?25le[?25h",,terminal_output +576,464773,"TERMINAL",0,0,"95",,terminal_output +577,464904,"TERMINAL",0,0,"[?25lc[?25h",,terminal_output +578,464971,"TERMINAL",0,0,"[?25lk[?25h",,terminal_output +579,465113,"TERMINAL",0,0,"[?25lo[?25h",,terminal_output +580,465241,"TERMINAL",0,0,"[?25lu[?25h",,terminal_output +581,465306,"TERMINAL",0,0,"[?25lt[?25h",,terminal_output +582,465372,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +583,465794,"TERMINAL",0,0,"506",,terminal_output +584,466828,"TERMINAL",0,0,"17",,terminal_output +585,467941,"TERMINAL",0,0,"28",,terminal_output +586,468355,"TERMINAL",0,0,"convert-to-jax-array-in-iter",,terminal_output +587,468809,"TERMINAL",0,0,"convert-to-jax-array-in-iter\r\n[?2004l\r",,terminal_output +588,468877,"TERMINAL",0,0,"M\tutils/dataloader.py\r\n",,terminal_output +589,468938,"TERMINAL",0,0,"39",,terminal_output +590,468954,"TERMINAL",0,0,"Switched to branch 'convert-to-jax-array-in-iter'\r\nYour branch is up to date with 'origin/convert-to-jax-array-in-iter'.\r\n]0;tum_cte0515@hkn0901:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0901 jafar]$ ",,terminal_output +591,469997,"TERMINAL",0,0,"git checkout convert-to-jax-array-in-iter",,terminal_output +592,469998,"TERMINAL",0,0,"410",,terminal_output +593,470122,"TERMINAL",0,0,"branch",,terminal_output +594,470560,"TERMINAL",0,0,"sh scripts_horeka/train_tokenizer.sh ",,terminal_output +595,471123,"TERMINAL",0,0,"51",,terminal_output +596,471249,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +597,471399,"TERMINAL",0,0,"SLURM_STEP_NUM_TASKS=1\r\nSLURM_JOB_USER=tum_cte0515\r\nSLURM_TASKS_PER_NODE=4\r\nSLURM_JOB_UID=999226\r\nSLURM_TASK_PID=1787497\r\nSLURM_JOB_GPUS=0,1,2,3\r\nSLURM_LOCALID=0\r\nSLURM_SUBMIT_DIR=/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar\r\nSLURMD_NODENAME=hkn0901\r\nSLURM_JOB_START_TIME=1751286284\r\nSLURM_STEP_NODELIST=hkn0901\r\nSLURM_CLUSTER_NAME=hk\r\nSLURM_JOB_END_TIME=1751289884\r\nSLURM_PMI2_SRUN_PORT=43699\r\nSLURM_CPUS_ON_NODE=24\r\nSLURM_JOB_CPUS_PER_NODE=24\r\nSLURM_GPUS_ON_NODE=4\r\nSLURM_GTIDS=0\r\nSLURM_JOB_PARTITION=dev_accelerated-h100\r\nSLURM_TRES_PER_TASK=cpu=5\r\nSLURM_OOM_KILL_STEP=0\r\nSLURM_JOB_NUM_NODES=1\r\nSLURM_STEPID=4294967290\r\nSLURM_JOBID=3306183\r\nSLURM_PTY_PORT=38037\r\nSLURM_JOB_QOS=normal\r\nSLURM_LAUNCH_NODE_IPADDR=10.0.7.199\r\nSLURM_PTY_WIN_ROW=37\r\nSLURM_PMI2_PROC_MAPPING=(vector,(0,1,1))\r\nSLURMD_DEBUG=2\r\nSLURM_PROCID=0\r\nSLURM_CPUS_PER_TASK=5\r\nSLURM_NTASKS=4\r\nSLURM_TOPOLOGY_ADDR=hkibb.hkibbi4.hkibbi4e1.hkn0901\r\nSLURM_TOPOLOGY_ADDR_PATTERN=switch.switch.switch.node\r\nSLURM_SRUN_COMM_HOST=10.0.7.199\r\nSLURM_SCRIPT_CONTEXT=prolog_task\r\nSLURM_MEM_PER_NODE=204800\r\nSLURM_PTY_WIN_COL=202\r\nSLURM_NODELIST=hkn0901\r\nSLURM_SRUN_COMM_PORT=43913\r\nSLURM_STEP_ID=4294967290\r\nSLURM_JOB_ACCOUNT=hk-project-p0023960\r\nSLURM_PRIO_PROCESS=0\r\nSLURM_NPROCS=4\r\nSLURM_NNODES=1\r\nSLURM_SUBMIT_HOST=hkn1991.localdomain\r\nSLURM_JOB_ID=3306183\r\nSLURM_NODEID=0\r\nSLURM_STEP_NUM_NODES=1\r\nSLURM_STEP_TASKS_PER_NODE=1\r\nSLURM_MPI_TYPE=pmi2\r\nSLURM_PMI2_STEP_NODES=hkn0901\r\nSLURM_CONF=/etc/slurm/slurm.conf\r\nSLURM_JOB_NAME=interactive\r\nSLURM_NTASKS_PER_NODE=4\r\nSLURM_STEP_LAUNCHER_PORT=43913\r\nSLURM_JOB_GID=502226\r\nSLURM_JOB_NODELIST=hkn0901\r\n",,terminal_output +598,471653,"TERMINAL",0,0,"GpuFreq=control_disabled\r\n",,terminal_output +599,472090,"TERMINAL",0,0,"62",,terminal_output +600,472319,"",0,0,"Switched from branch 'main' to 'convert-to-jax-array-in-iter'",,git_branch_checkout +601,473183,"TERMINAL",0,0,"73",,terminal_output +602,474199,"TERMINAL",0,0,"84",,terminal_output +603,475232,"TERMINAL",0,0,"95",,terminal_output +604,476286,"TERMINAL",0,0,"2:006",,terminal_output +605,476465,"TERMINAL",0,0,"2025-06-30 14:32:00.805672: E external/local_xla/xla/stream_executor/cuda/cuda_fft.cc:467] Unable to register cuFFT factory: Attempting to register factory for plugin cuFFT when one has already been registered\r\n2025-06-30 14:32:00.806390: E external/local_xla/xla/stream_executor/cuda/cuda_fft.cc:467] Unable to register cuFFT factory: Attempting to register factory for plugin cuFFT when one has already been registered\r\nWARNING: All log messages before absl::InitializeLog() is called are written to STDERR\r\nE0000 00:00:1751286720.815780 1791094 cuda_dnn.cc:8579] Unable to register cuDNN factory: Attempting to register factory for plugin cuDNN when one has already been registered\r\nWARNING: All log messages before absl::InitializeLog() is called are written to STDERR\r\nE0000 00:00:1751286720.816051 1791092 cuda_dnn.cc:8579] Unable to register cuDNN factory: Attempting to register factory for plugin cuDNN when one has already been registered\r\nE0000 00:00:1751286720.819226 1791092 cuda_blas.cc:1407] Unable to register cuBLAS factory: Attempting to register factory for plugin cuBLAS when one has already been registered\r\nE0000 00:00:1751286720.819788 1791094 cuda_blas.cc:1407] Unable to register cuBLAS factory: Attempting to register factory for plugin cuBLAS when one has already been registered\r\n2025-06-30 14:32:00.824994: E external/local_xla/xla/stream_executor/cuda/cuda_fft.cc:467] Unable to register cuFFT factory: Attempting to register factory for plugin cuFFT when one has already been registered\r\nW0000 00:00:1751286720.831098 1791092 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\nW0000 00:00:1751286720.831118 1791092 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\nW0000 00:00:1751286720.831120 1791092 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\nW0000 00:00:1751286720.831122 1791092 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\nW0000 00:00:1751286720.831571 1791094 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\nW0000 00:00:1751286720.831589 1791094 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\nW0000 00:00:1751286720.831592 1791094 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\nW0000 00:00:1751286720.831594 1791094 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\n2025-06-30 14:32:00.831824: E external/local_xla/xla/stream_executor/cuda/cuda_fft.cc:467] Unable to register cuFFT factory: Attempting to register factory for plugin cuFFT when one has already been registered\r\nWARNING: All log messages before absl::InitializeLog() is called are written to STDERR\r\nE0000 00:00:1751286720.835206 1791093 cuda_dnn.cc:8579] Unable to register cuDNN factory: Attempting to register factory for plugin cuDNN when one has already been registered\r\nE0000 00:00:1751286720.839231 1791093 cuda_blas.cc:1407] Unable to register cuBLAS factory: Attempting to register factory for plugin cuBLAS when one has already been registered\r\nWARNING: All log messages before absl::InitializeLog() is called are written to STDERR\r\nE0000 00:00:1751286720.842056 1791091 cuda_dnn.cc:8579] Unable to register cuDNN factory: Attempting to register factory for plugin cuDNN when one has already been registered\r\nE0000 00:00:1751286720.845232 1791091 cuda_blas.cc:1407] Unable to register cuBLAS factory: Attempting to register factory for plugin cuBLAS when one has already been registered\r\nW0000 00:00:1751286720.852471 1791093 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\nW0000 00:00:1751286720.852493 1791093 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\nW0000 00:00:1751286720.852496 1791093 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\nW0000 00:00:1751286720.852498 1791093 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\nW0000 00:00:1751286720.856345 1791091 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\nW0000 00:00:1751286720.856368 1791091 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\nW0000 00:00:1751286720.856370 1791091 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\nW0000 00:00:1751286720.856372 1791091 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\n",,terminal_output +606,477334,"TERMINAL",0,0,"17",,terminal_output +607,478490,"TERMINAL",0,0,"28",,terminal_output +608,479430,"TERMINAL",0,0,"39",,terminal_output +609,480470,"TERMINAL",0,0,"420",,terminal_output +610,481520,"TERMINAL",0,0,"51",,terminal_output +611,482573,"TERMINAL",0,0,"62",,terminal_output +612,483612,"TERMINAL",0,0,"74",,terminal_output +613,484737,"TERMINAL",0,0,"95",,terminal_output +614,485694,"TERMINAL",0,0,"106",,terminal_output +615,486584,"TERMINAL",0,0,"W0000 00:00:1751286730.905975 1791094 gpu_device.cc:2341] Cannot dlopen some GPU libraries. Please make sure the missing libraries mentioned above are installed properly if you would like to use GPU. Follow the guide at https://www.tensorflow.org/install/gpu for how to download and setup the required libraries for your platform.\r\nSkipping registering GPU devices...\r\nW0000 00:00:1751286730.906040 1791092 gpu_device.cc:2341] Cannot dlopen some GPU libraries. Please make sure the missing libraries mentioned above are installed properly if you would like to use GPU. Follow the guide at https://www.tensorflow.org/install/gpu for how to download and setup the required libraries for your platform.\r\nSkipping registering GPU devices...\r\nW0000 00:00:1751286730.907488 1791091 gpu_device.cc:2341] Cannot dlopen some GPU libraries. Please make sure the missing libraries mentioned above are installed properly if you would like to use GPU. Follow the guide at https://www.tensorflow.org/install/gpu for how to download and setup the required libraries for your platform.\r\nSkipping registering GPU devices...\r\nW0000 00:00:1751286730.941897 1791093 gpu_device.cc:2341] Cannot dlopen some GPU libraries. Please make sure the missing libraries mentioned above are installed properly if you would like to use GPU. Follow the guide at https://www.tensorflow.org/install/gpu for how to download and setup the required libraries for your platform.\r\nSkipping registering GPU devices...\r\n",,terminal_output +616,486742,"TERMINAL",0,0,"17",,terminal_output +617,487788,"TERMINAL",0,0,"28",,terminal_output +618,488831,"TERMINAL",0,0,"39",,terminal_output +619,489558,"TERMINAL",0,0,"wandb: Currently logged in as: mihir-mahajan2002 (instant-uv) to https://api.wandb.ai. Use `wandb login --relogin` to force relogin\r\n",,terminal_output +620,489865,"TERMINAL",0,0,"430",,terminal_output +621,490085,"TERMINAL",0,0,"2025-06-30 14:32:14.508415: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +622,490152,"TERMINAL",0,0,"2025-06-30 14:32:14.571375: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +623,490210,"TERMINAL",0,0,"2025-06-30 14:32:14.599131: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +624,490398,"TERMINAL",0,0,"wandb: Tracking run with wandb version 0.19.11\r\nwandb: Run data is saved locally in /hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/wandb/run-20250630_143213-kpnty7co\r\nwandb: Run `wandb offline` to turn off syncing.\r\nwandb: Syncing run test-wandb-tags-0000\r\nwandb: ⭐️ View project at https://wandb.ai/instant-uv/jafar\r\nwandb: 🚀 View run at https://wandb.ai/instant-uv/jafar/runs/kpnty7co\r\n",,terminal_output +625,490906,"TERMINAL",0,0,"51",,terminal_output +626,491880,"TERMINAL",0,0,"2025-06-30 14:32:16.304673: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +627,491945,"TERMINAL",0,0,"62",,terminal_output +628,493034,"TERMINAL",0,0,"73",,terminal_output +629,494056,"TERMINAL",0,0,"84",,terminal_output +630,495129,"TERMINAL",0,0,"95",,terminal_output +631,496206,"TERMINAL",0,0,"206",,terminal_output +632,497232,"TERMINAL",0,0,"17",,terminal_output +633,498225,"TERMINAL",0,0,"28",,terminal_output +634,499277,"TERMINAL",0,0,"39",,terminal_output +635,500314,"TERMINAL",0,0,"440",,terminal_output +636,501356,"TERMINAL",0,0,"51",,terminal_output +637,502398,"TERMINAL",0,0,"62",,terminal_output +638,503474,"TERMINAL",0,0,"73",,terminal_output +639,504093,"TERMINAL",0,0,"2025-06-30 14:32:28.452795: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +640,504203,"TERMINAL",0,0,"2025-06-30 14:32:28.589438: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +641,504497,"TERMINAL",0,0,"84",,terminal_output +642,505494,"TERMINAL",0,0,"2025-06-30 14:32:29.919117: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +643,505558,"TERMINAL",0,0,"95",,terminal_output +644,506649,"TERMINAL",0,0,"306",,terminal_output +645,506701,"TERMINAL",0,0,"2025-06-30 14:32:31.124170: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +646,507613,"TERMINAL",0,0,"18",,terminal_output +647,508659,"TERMINAL",0,0,"39",,terminal_output +648,509721,"TERMINAL",0,0,"450",,terminal_output +649,510747,"TERMINAL",0,0,"51",,terminal_output +650,511871,"TERMINAL",0,0,"62",,terminal_output +651,512897,"TERMINAL",0,0,"73",,terminal_output +652,513886,"TERMINAL",0,0,"84",,terminal_output +653,514943,"TERMINAL",0,0,"95",,terminal_output +654,515960,"TERMINAL",0,0,"406",,terminal_output +655,517006,"TERMINAL",0,0,"17",,terminal_output +656,518121,"TERMINAL",0,0,"28",,terminal_output +657,519142,"TERMINAL",0,0,"39",,terminal_output +658,520151,"TERMINAL",0,0,"48:00",,terminal_output +659,521202,"TERMINAL",0,0,"51",,terminal_output +660,522248,"TERMINAL",0,0,"62",,terminal_output +661,523292,"TERMINAL",0,0,"73",,terminal_output +662,524346,"TERMINAL",0,0,"84",,terminal_output +663,525393,"TERMINAL",0,0,"95",,terminal_output +664,526514,"TERMINAL",0,0,"506",,terminal_output +665,527505,"TERMINAL",0,0,"17",,terminal_output +666,528562,"TERMINAL",0,0,"28",,terminal_output +667,529731,"TERMINAL",0,0,"310",,terminal_output +668,530712,"TERMINAL",0,0,"51",,terminal_output +669,531685,"TERMINAL",0,0,"62",,terminal_output +670,532761,"TERMINAL",0,0,"73",,terminal_output +671,533785,"TERMINAL",0,0,"84",,terminal_output +672,534706,"TERMINAL",0,0,"2025-06-30 14:32:59.078165: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-06-30 14:32:59.078302: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +673,534816,"TERMINAL",0,0,"95",,terminal_output +674,535938,"TERMINAL",0,0,"3:006",,terminal_output +675,536840,"TERMINAL",0,0,"2025-06-30 14:33:01.265178: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +676,536901,"TERMINAL",0,0,"17",,terminal_output +677,537934,"TERMINAL",0,0,"28",,terminal_output +678,538980,"TERMINAL",0,0,"39",,terminal_output +679,539749,"TERMINAL",0,0,"2025-06-30 14:33:04.172835: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +680,540023,"TERMINAL",0,0,"420",,terminal_output +681,541070,"TERMINAL",0,0,"51",,terminal_output +682,542118,"TERMINAL",0,0,"62",,terminal_output +683,543172,"TERMINAL",0,0,"73",,terminal_output +684,544215,"TERMINAL",0,0,"84",,terminal_output +685,545263,"TERMINAL",0,0,"95",,terminal_output +686,546309,"TERMINAL",0,0,"106",,terminal_output +687,547356,"TERMINAL",0,0,"17",,terminal_output +688,548401,"TERMINAL",0,0,"28",,terminal_output +689,549444,"TERMINAL",0,0,"39",,terminal_output +690,550587,"TERMINAL",0,0,"430",,terminal_output +691,551603,"TERMINAL",0,0,"51",,terminal_output +692,552579,"TERMINAL",0,0,"63",,terminal_output +693,553627,"TERMINAL",0,0,"84",,terminal_output +694,554703,"TERMINAL",0,0,"95",,terminal_output +695,556008,"TERMINAL",0,0,"206",,terminal_output +696,556765,"TERMINAL",0,0,"17",,terminal_output +697,557815,"TERMINAL",0,0,"28",,terminal_output +698,558866,"TERMINAL",0,0,"39",,terminal_output +699,559913,"TERMINAL",0,0,"440",,terminal_output +700,560975,"TERMINAL",0,0,"51",,terminal_output +701,562049,"TERMINAL",0,0,"62",,terminal_output +702,563045,"TERMINAL",0,0,"73",,terminal_output +703,564088,"TERMINAL",0,0,"84",,terminal_output +704,565224,"TERMINAL",0,0,"95",,terminal_output +705,566173,"TERMINAL",0,0,"306",,terminal_output +706,567221,"TERMINAL",0,0,"17",,terminal_output +707,568257,"TERMINAL",0,0,"28",,terminal_output +708,569294,"TERMINAL",0,0,"39",,terminal_output +709,570334,"TERMINAL",0,0,"450",,terminal_output +710,571373,"TERMINAL",0,0,"51",,terminal_output +711,572450,"TERMINAL",0,0,"62",,terminal_output +712,573457,"TERMINAL",0,0,"73",,terminal_output +713,574540,"TERMINAL",0,0,"84",,terminal_output +714,575545,"TERMINAL",0,0,"95",,terminal_output +715,576589,"TERMINAL",0,0,"407",,terminal_output +716,577636,"TERMINAL",0,0,"28",,terminal_output +717,578682,"TERMINAL",0,0,"39",,terminal_output +718,579728,"TERMINAL",0,0,"49:00",,terminal_output +719,580774,"TERMINAL",0,0,"51",,terminal_output +720,581859,"TERMINAL",0,0,"62",,terminal_output +721,582877,"TERMINAL",0,0,"73",,terminal_output +722,583962,"TERMINAL",0,0,"84",,terminal_output +723,584987,"TERMINAL",0,0,"95",,terminal_output +724,586004,"TERMINAL",0,0,"506",,terminal_output +725,587067,"TERMINAL",0,0,"17",,terminal_output +726,588100,"TERMINAL",0,0,"28",,terminal_output +727,589139,"TERMINAL",0,0,"39",,terminal_output +728,590181,"TERMINAL",0,0,"410",,terminal_output +729,591250,"TERMINAL",0,0,"51",,terminal_output +730,592281,"TERMINAL",0,0,"62",,terminal_output +731,593316,"TERMINAL",0,0,"73",,terminal_output +732,594365,"TERMINAL",0,0,"84",,terminal_output +733,595414,"TERMINAL",0,0,"95",,terminal_output +734,596555,"TERMINAL",0,0,"4:006",,terminal_output +735,597487,"TERMINAL",0,0,"17",,terminal_output +736,598607,"TERMINAL",0,0,"28",,terminal_output +737,599571,"TERMINAL",0,0,"39",,terminal_output +738,600656,"TERMINAL",0,0,"421",,terminal_output +739,601652,"TERMINAL",0,0,"62",,terminal_output +740,602692,"TERMINAL",0,0,"73",,terminal_output +741,603728,"TERMINAL",0,0,"84",,terminal_output +742,604004,"TERMINAL",0,0,"watch",,terminal_focus +743,604852,"TERMINAL",0,0,"95",,terminal_output +744,605504,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn1991:/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar]633;D;0",,terminal_output +745,685148,"TERMINAL",0,0,"srun",,terminal_focus +746,685984,"TERMINAL",0,0,"^Csrun: interrupt (one more within 1 sec to abort)\r\nsrun: StepId=3306183.1 tasks 0-3: running\r\n",,terminal_output +747,686198,"TERMINAL",0,0,"^Csrun: sending Ctrl-C to StepId=3306183.1\r\nsrun: forcing job termination\r\nsrun: Job step aborted: Waiting up to 32 seconds for job step to finish.\r\nslurmstepd: error: *** STEP 3306183.1 ON hkn0901 CANCELLED AT 2025-06-30T14:35:30 ***\r\n",,terminal_output +748,686423,"TERMINAL",0,0,"^Csrun: sending Ctrl-C to StepId=3306183.1\r\nsrun: job abort in progress\r\n",,terminal_output +749,687140,"TERMINAL",0,0,"^C",,terminal_output +750,687386,"TERMINAL",0,0,"^Csrun: sending Ctrl-C to StepId=3306183.1\r\nsrun: job abort in progress\r\n",,terminal_output +751,687675,"TERMINAL",0,0,"]0;tum_cte0515@hkn0901:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0901 jafar]$ ",,terminal_output +752,687887,"TERMINAL",0,0,"^C[?2004l\r[?2004h[?2004l\r\r\n]0;tum_cte0515@hkn0901:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0901 jafar]$ ",,terminal_output +753,689216,"TERMINAL",0,0,"bash",,terminal_focus +754,689716,"TERMINAL",0,0,"srun",,terminal_focus +755,690319,"TERMINAL",0,0,"[?2004l\r\r\nexit\r\nsrun: error: hkn0901: task 0: Exited with exit code 137\r\nsalloc: Relinquishing job allocation 3306183\r\nsalloc: Job allocation 3306183 has been revoked.\r\n]0;tum_cte0515@hkn1991:~/Projects/jafar]633;D;137",,terminal_output +756,694079,"TERMINAL",0,0,"bash",,terminal_focus +757,695362,"TERMINAL",0,0,"idling",,terminal_command +758,695419,"TERMINAL",0,0,"]633;E;2025-06-30 14:35:39 idling;ead59344-49db-4336-9336-47fae706e637]633;C[?1049h(B[?7hEvery 1.0s: sinfo_t_idlehkn1991.localdomain: Mon Jun 30 14:35:39 2025Partition dev_cpuonly: 10 nodes idle\rPartition cpuonly: 120 nodes idle\rPartition dev_accelerated:\t 0 nodes idle\rPartition accelerated:\t 0 nodes idle\rPartition dev_accelerated-h100 :\t 1 nodes idle\rPartition accelerated-h100:\t 0 nodes idle\rPartition large:\t 8 nodes idle",,terminal_output +759,696459,"TERMINAL",0,0,"40\t ",,terminal_output +760,697499,"TERMINAL",0,0,"1\t ",,terminal_output +761,697931,"TERMINAL",0,0,"bash",,terminal_focus +762,698576,"TERMINAL",0,0,"2\t ",,terminal_output +763,699576,"TERMINAL",0,0,"3\t ",,terminal_output +764,700616,"TERMINAL",0,0,"4\t ",,terminal_output +765,701676,"TERMINAL",0,0,"6\t ",,terminal_output +766,702688,"TERMINAL",0,0,"7\t ",,terminal_output +767,703746,"TERMINAL",0,0,"8\t ",,terminal_output +768,704804,"TERMINAL",0,0,"9\t ",,terminal_output +769,705808,"TERMINAL",0,0,"50\t ",,terminal_output +770,706886,"TERMINAL",0,0,"1\t ",,terminal_output +771,707898,"TERMINAL",0,0,"2\t ",,terminal_output +772,708945,"TERMINAL",0,0,"3\t ",,terminal_output +773,709701,"TERMINAL",0,0,"salloc --time=01:00:00 --partition=accelerated --nodes=4 --ntasks-per-node=4 --gres=gpu:4 --cpus-per-task=5 --mem=200G",,terminal_command +774,709738,"TERMINAL",0,0,"]633;E;2025-06-30 14:35:54 salloc --time=01:00:00 --partition=accelerated --nodes=4 --ntasks-per-node=4 --gres=gpu:4 --cpus-per-task=5 --mem=200G;79ec3af7-6a10-4dac-bb07-e3b50f56ded4]633;Csalloc: Pending job allocation 3306206\r\nsalloc: job 3306206 queued and waiting for resources\r\n",,terminal_output +775,709975,"TERMINAL",0,0,"4\t ",,terminal_output +776,710896,"TERMINAL",0,0,"watch",,terminal_focus +777,711303,"TERMINAL",0,0,"519",,terminal_output +778,712376,"TERMINAL",0,0,"6\t ",,terminal_output +779,713366,"TERMINAL",0,0,"7\t ",,terminal_output +780,714400,"TERMINAL",0,0,"8\t ",,terminal_output +781,715438,"TERMINAL",0,0,"9\t ",,terminal_output +782,716477,"TERMINAL",0,0,"6:00\t ",,terminal_output +783,717519,"TERMINAL",0,0,"1\t ",,terminal_output +784,718556,"TERMINAL",0,0,"2\t ",,terminal_output +785,719591,"TERMINAL",0,0,"3\t ",,terminal_output +786,720635,"TERMINAL",0,0,"5\t ",,terminal_output +787,721670,"TERMINAL",0,0,"6\t ",,terminal_output +788,722714,"TERMINAL",0,0,"7\t ",,terminal_output +789,723841,"TERMINAL",0,0,"8\t ",,terminal_output +790,724866,"TERMINAL",0,0,"9\t ",,terminal_output +791,725856,"TERMINAL",0,0,"10\t ",,terminal_output +792,726914,"TERMINAL",0,0,"1\t ",,terminal_output +793,727938,"TERMINAL",0,0,"2\t ",,terminal_output +794,728921,"TERMINAL",0,0,"3\t ",,terminal_output +795,729959,"TERMINAL",0,0,"4\t ",,terminal_output +796,730996,"TERMINAL",0,0,"5\t ",,terminal_output +797,732138,"TERMINAL",0,0,"61",,terminal_output +798,733162,"TERMINAL",0,0,"7\t ",,terminal_output +799,734187,"TERMINAL",0,0,"8\t ",,terminal_output +800,735150,"TERMINAL",0,0,"9\t ",,terminal_output +801,736189,"TERMINAL",0,0,"20\t ",,terminal_output +802,737226,"TERMINAL",0,0,"1\t ",,terminal_output +803,738293,"TERMINAL",0,0,"2\t ",,terminal_output +804,739302,"TERMINAL",0,0,"3\t ",,terminal_output +805,740334,"TERMINAL",0,0,"4\t ",,terminal_output +806,741372,"TERMINAL",0,0,"5\t ",,terminal_output +807,742452,"TERMINAL",0,0,"6\t ",,terminal_output +808,743456,"TERMINAL",0,0,"70",,terminal_output +809,744492,"TERMINAL",0,0,"8\t ",,terminal_output +810,745551,"TERMINAL",0,0,"9\t ",,terminal_output +811,746578,"TERMINAL",0,0,"30\t ",,terminal_output +812,747608,"TERMINAL",0,0,"2\t ",,terminal_output +813,748727,"TERMINAL",0,0,"3\t ",,terminal_output +814,749759,"TERMINAL",0,0,"4\t ",,terminal_output +815,750775,"TERMINAL",0,0,"5\t ",,terminal_output +816,751754,"TERMINAL",0,0,"6\t ",,terminal_output +817,752822,"TERMINAL",0,0,"7\t ",,terminal_output +818,753851,"TERMINAL",0,0,"8\t ",,terminal_output +819,754876,"TERMINAL",0,0,"9\t ",,terminal_output +820,755901,"TERMINAL",0,0,"40\t ",,terminal_output +821,756939,"TERMINAL",0,0,"1\t ",,terminal_output +822,757980,"TERMINAL",0,0,"2\t ",,terminal_output +823,759021,"TERMINAL",0,0,"3\t ",,terminal_output +824,760068,"TERMINAL",0,0,"4\t ",,terminal_output +825,761098,"TERMINAL",0,0,"5\t ",,terminal_output +826,762240,"TERMINAL",0,0,"6\t ",,terminal_output +827,763180,"TERMINAL",0,0,"7\t ",,terminal_output +828,764309,"TERMINAL",0,0,"8\t ",,terminal_output +829,765316,"TERMINAL",0,0,"9\t ",,terminal_output +830,766346,"TERMINAL",0,0,"50\t ",,terminal_output +831,767333,"TERMINAL",0,0,"1\t ",,terminal_output +832,768412,"TERMINAL",0,0,"2\t ",,terminal_output +833,769404,"TERMINAL",0,0,"3\t ",,terminal_output +834,770462,"TERMINAL",0,0,"4\t ",,terminal_output +835,771560,"TERMINAL",0,0,"5\t ",,terminal_output +836,772561,"TERMINAL",0,0,"6\t ",,terminal_output +837,773815,"TERMINAL",0,0,"7\t ",,terminal_output +838,774638,"TERMINAL",0,0,"8\t ",,terminal_output +839,775656,"TERMINAL",0,0,"7:00\t ",,terminal_output +840,776785,"TERMINAL",0,0,"1\t ",,terminal_output +841,777781,"TERMINAL",0,0,"2\t ",,terminal_output +842,778812,"TERMINAL",0,0,"3\t ",,terminal_output +843,779857,"TERMINAL",0,0,"4\t ",,terminal_output +844,780865,"TERMINAL",0,0,"5\t ",,terminal_output +845,781906,"TERMINAL",0,0,"6\t ",,terminal_output +846,783031,"TERMINAL",0,0,"7\t ",,terminal_output +847,784054,"TERMINAL",0,0,"8\t ",,terminal_output +848,785075,"TERMINAL",0,0,"9\t ",,terminal_output +849,786101,"TERMINAL",0,0,"10\t ",,terminal_output +850,787227,"TERMINAL",0,0,"1\t ",,terminal_output +851,788255,"TERMINAL",0,0,"2\t ",,terminal_output +852,789277,"TERMINAL",0,0,"3\t ",,terminal_output +853,790253,"TERMINAL",0,0,"4\t ",,terminal_output +854,791323,"TERMINAL",0,0,"5\t ",,terminal_output +855,792354,"TERMINAL",0,0,"6\t ",,terminal_output +856,793407,"TERMINAL",0,0,"7\t ",,terminal_output +857,794463,"TERMINAL",0,0,"8\t ",,terminal_output +858,795461,"TERMINAL",0,0,"9\t ",,terminal_output +859,796513,"TERMINAL",0,0,"20\t ",,terminal_output +860,797544,"TERMINAL",0,0,"1\t ",,terminal_output +861,798573,"TERMINAL",0,0,"2\t ",,terminal_output +862,799927,"TERMINAL",0,0,"3\t ",,terminal_output +863,800776,"TERMINAL",0,0,"5\t ",,terminal_output +864,801774,"TERMINAL",0,0,"6\t ",,terminal_output +865,802795,"TERMINAL",0,0,"7\t ",,terminal_output +866,803881,"TERMINAL",0,0,"8\t ",,terminal_output +867,804852,"TERMINAL",0,0,"9\t ",,terminal_output +868,805844,"TERMINAL",0,0,"30\t ",,terminal_output +869,806893,"TERMINAL",0,0,"1\t ",,terminal_output +870,808015,"TERMINAL",0,0,"2\t ",,terminal_output +871,809039,"TERMINAL",0,0,"3\t ",,terminal_output +872,809995,"TERMINAL",0,0,"4\t ",,terminal_output +873,811089,"TERMINAL",0,0,"5\t ",,terminal_output +874,812113,"TERMINAL",0,0,"6\t ",,terminal_output +875,813112,"TERMINAL",0,0,"7\t ",,terminal_output +876,814149,"TERMINAL",0,0,"8\t ",,terminal_output +877,815187,"TERMINAL",0,0,"9\t ",,terminal_output +878,816312,"TERMINAL",0,0,"40\t ",,terminal_output +879,817350,"TERMINAL",0,0,"1\t ",,terminal_output +880,818304,"TERMINAL",0,0,"2\t ",,terminal_output +881,819348,"TERMINAL",0,0,"3\t ",,terminal_output +882,820391,"TERMINAL",0,0,"4\t ",,terminal_output +883,821427,"TERMINAL",0,0,"5\t ",,terminal_output +884,822467,"TERMINAL",0,0,"6\t ",,terminal_output +885,823503,"TERMINAL",0,0,"7\t ",,terminal_output +886,824546,"TERMINAL",0,0,"8\t ",,terminal_output +887,825630,"TERMINAL",0,0,"9\t ",,terminal_output +888,826653,"TERMINAL",0,0,"51\t ",,terminal_output +889,827666,"TERMINAL",0,0,"2\t ",,terminal_output +890,828804,"TERMINAL",0,0,"3\t ",,terminal_output +891,829828,"TERMINAL",0,0,"4\t ",,terminal_output +892,830859,"TERMINAL",0,0,"5\t ",,terminal_output +893,831828,"TERMINAL",0,0,"6\t ",,terminal_output +894,832900,"TERMINAL",0,0,"7\t ",,terminal_output +895,833896,"TERMINAL",0,0,"8\t ",,terminal_output +896,834948,"TERMINAL",0,0,"9\t ",,terminal_output +897,835989,"TERMINAL",0,0,"8:001",,terminal_output +898,837046,"TERMINAL",0,0,"1\t ",,terminal_output +899,838067,"TERMINAL",0,0,"2\t ",,terminal_output +900,839109,"TERMINAL",0,0,"3\t ",,terminal_output +901,840175,"TERMINAL",0,0,"4\t ",,terminal_output +902,841197,"TERMINAL",0,0,"5\t ",,terminal_output +903,842225,"TERMINAL",0,0,"6\t ",,terminal_output +904,843271,"TERMINAL",0,0,"7\t ",,terminal_output +905,844313,"TERMINAL",0,0,"8\t ",,terminal_output +906,845393,"TERMINAL",0,0,"9\t ",,terminal_output +907,846418,"TERMINAL",0,0,"100",,terminal_output +908,847450,"TERMINAL",0,0,"1\t ",,terminal_output +909,848459,"TERMINAL",0,0,"2\t ",,terminal_output +910,849497,"TERMINAL",0,0,"3\t ",,terminal_output +911,850614,"TERMINAL",0,0,"4\t ",,terminal_output +912,851572,"TERMINAL",0,0,"5\t ",,terminal_output +913,852609,"TERMINAL",0,0,"6\t ",,terminal_output +914,853651,"TERMINAL",0,0,"8\t ",,terminal_output +915,854697,"TERMINAL",0,0,"9\t ",,terminal_output +916,855837,"TERMINAL",0,0,"20\t ",,terminal_output +917,856781,"TERMINAL",0,0,"1\t ",,terminal_output +918,857818,"TERMINAL",0,0,"2\t ",,terminal_output +919,858912,"TERMINAL",0,0,"3\t ",,terminal_output +920,859906,"TERMINAL",0,0,"4\t ",,terminal_output +921,860946,"TERMINAL",0,0,"5\t ",,terminal_output +922,862084,"TERMINAL",0,0,"6\t ",,terminal_output +923,863032,"TERMINAL",0,0,"7\t ",,terminal_output +924,864133,"TERMINAL",0,0,"8\t ",,terminal_output +925,865158,"TERMINAL",0,0,"9\t ",,terminal_output +926,866185,"TERMINAL",0,0,"30\t ",,terminal_output +927,867178,"TERMINAL",0,0,"1\t ",,terminal_output +928,868230,"TERMINAL",0,0,"2\t ",,terminal_output +929,869263,"TERMINAL",0,0,"3\t ",,terminal_output +930,870391,"TERMINAL",0,0,"4\t ",,terminal_output +931,871408,"TERMINAL",0,0,"5\t ",,terminal_output +932,872429,"TERMINAL",0,0,"6\t ",,terminal_output +933,873452,"TERMINAL",0,0,"7\t ",,terminal_output +934,874479,"TERMINAL",0,0,"8\t ",,terminal_output +935,875514,"TERMINAL",0,0,"9\t ",,terminal_output +936,876560,"TERMINAL",0,0,"40\t ",,terminal_output +937,877691,"TERMINAL",0,0,"1\t ",,terminal_output +938,878673,"TERMINAL",0,0,"3\t ",,terminal_output +939,879697,"TERMINAL",0,0,"4\t ",,terminal_output +940,880720,"TERMINAL",0,0,"5\t ",,terminal_output +941,881758,"TERMINAL",0,0,"6\t ",,terminal_output +942,882874,"TERMINAL",0,0,"7\t ",,terminal_output +943,883844,"TERMINAL",0,0,"8\t ",,terminal_output +944,884921,"TERMINAL",0,0,"9\t ",,terminal_output +945,885946,"TERMINAL",0,0,"50\t ",,terminal_output +946,886959,"TERMINAL",0,0,"1\t ",,terminal_output +947,888097,"TERMINAL",0,0,"2\t ",,terminal_output +948,889115,"TERMINAL",0,0,"3\t ",,terminal_output +949,890084,"TERMINAL",0,0,"4\t ",,terminal_output +950,891114,"TERMINAL",0,0,"5\t ",,terminal_output +951,892152,"TERMINAL",0,0,"6\t ",,terminal_output +952,893215,"TERMINAL",0,0,"7\t ",,terminal_output +953,894250,"TERMINAL",0,0,"8\t ",,terminal_output +954,895380,"TERMINAL",0,0,"9\t ",,terminal_output +955,896399,"TERMINAL",0,0,"9:00\t ",,terminal_output +956,897449,"TERMINAL",0,0,"1\t ",,terminal_output +957,898438,"TERMINAL",0,0,"2\t ",,terminal_output +958,899460,"TERMINAL",0,0,"3\t ",,terminal_output +959,900482,"TERMINAL",0,0,"4\t ",,terminal_output +960,901525,"TERMINAL",0,0,"5\t ",,terminal_output +961,902585,"TERMINAL",0,0,"6\t ",,terminal_output +962,903615,"TERMINAL",0,0,"7\t ",,terminal_output +963,904655,"TERMINAL",0,0,"9\t ",,terminal_output +964,905691,"TERMINAL",0,0,"10\t ",,terminal_output +965,906724,"TERMINAL",0,0,"1\t ",,terminal_output +966,907860,"TERMINAL",0,0,"2\t ",,terminal_output +967,908878,"TERMINAL",0,0,"3\t ",,terminal_output +968,909847,"TERMINAL",0,0,"4\t ",,terminal_output +969,910927,"TERMINAL",0,0,"5\t ",,terminal_output +970,911953,"TERMINAL",0,0,"6\t ",,terminal_output +971,912977,"TERMINAL",0,0,"7\t ",,terminal_output +972,914014,"TERMINAL",0,0,"8\t ",,terminal_output +973,915075,"TERMINAL",0,0,"9\t ",,terminal_output +974,916099,"TERMINAL",0,0,"20\t ",,terminal_output +975,917140,"TERMINAL",0,0,"1\t ",,terminal_output +976,918179,"TERMINAL",0,0,"2\t ",,terminal_output +977,919243,"TERMINAL",0,0,"31",,terminal_output +978,920351,"TERMINAL",0,0,"4\t ",,terminal_output +979,921390,"TERMINAL",0,0,"5\t ",,terminal_output +980,922354,"TERMINAL",0,0,"6\t ",,terminal_output +981,923399,"TERMINAL",0,0,"7\t ",,terminal_output +982,924413,"TERMINAL",0,0,"8\t ",,terminal_output +983,925455,"TERMINAL",0,0,"9\t ",,terminal_output +984,926491,"TERMINAL",0,0,"30\t ",,terminal_output +985,927537,"TERMINAL",0,0,"1\t ",,terminal_output +986,928645,"TERMINAL",0,0,"2\t ",,terminal_output +987,929613,"TERMINAL",0,0,"30",,terminal_output +988,930693,"TERMINAL",0,0,"5\t ",,terminal_output +989,931719,"TERMINAL",0,0,"6\t ",,terminal_output +990,932843,"TERMINAL",0,0,"7\t ",,terminal_output +991,933787,"TERMINAL",0,0,"8\t ",,terminal_output +992,934891,"TERMINAL",0,0,"9\t ",,terminal_output +993,935915,"TERMINAL",0,0,"40\t ",,terminal_output +994,936941,"TERMINAL",0,0,"1\t ",,terminal_output +995,937964,"TERMINAL",0,0,"2\t ",,terminal_output +996,939090,"TERMINAL",0,0,"3\t ",,terminal_output +997,940029,"TERMINAL",0,0,"4\t ",,terminal_output +998,941067,"TERMINAL",0,0,"5\t ",,terminal_output +999,942166,"TERMINAL",0,0,"6\t ",,terminal_output +1000,943151,"TERMINAL",0,0,"7\t ",,terminal_output +1001,944194,"TERMINAL",0,0,"8\t ",,terminal_output +1002,945236,"TERMINAL",0,0,"9\t ",,terminal_output +1003,946465,"TERMINAL",0,0,"50\t ",,terminal_output +1004,947400,"TERMINAL",0,0,"1\t ",,terminal_output +1005,948358,"TERMINAL",0,0,"2\t ",,terminal_output +1006,949404,"TERMINAL",0,0,"3\t ",,terminal_output +1007,950459,"TERMINAL",0,0,"4\t ",,terminal_output +1008,951487,"TERMINAL",0,0,"5\t ",,terminal_output +1009,952525,"TERMINAL",0,0,"6\t ",,terminal_output +1010,953566,"TERMINAL",0,0,"7\t ",,terminal_output +1011,954604,"TERMINAL",0,0,"8\t ",,terminal_output +1012,955680,"TERMINAL",0,0,"40:00\t ",,terminal_output +1013,956702,"TERMINAL",0,0,"1\t ",,terminal_output +1014,957728,"TERMINAL",0,0,"2\t ",,terminal_output +1015,958763,"TERMINAL",0,0,"3\t ",,terminal_output +1016,959802,"TERMINAL",0,0,"4\t ",,terminal_output +1017,960842,"TERMINAL",0,0,"5\t ",,terminal_output +1018,961883,"TERMINAL",0,0,"6\t ",,terminal_output +1019,962931,"TERMINAL",0,0,"7\t ",,terminal_output +1020,963971,"TERMINAL",0,0,"8\t ",,terminal_output +1021,965006,"TERMINAL",0,0,"9\t ",,terminal_output +1022,966045,"TERMINAL",0,0,"10\t ",,terminal_output +1023,967086,"TERMINAL",0,0,"1\t ",,terminal_output +1024,968129,"TERMINAL",0,0,"2\t ",,terminal_output +1025,969167,"TERMINAL",0,0,"3\t ",,terminal_output +1026,970203,"TERMINAL",0,0,"4\t ",,terminal_output +1027,971239,"TERMINAL",0,0,"5\t ",,terminal_output +1028,972282,"TERMINAL",0,0,"6\t ",,terminal_output +1029,973412,"TERMINAL",0,0,"7\t ",,terminal_output +1030,974365,"TERMINAL",0,0,"8\t ",,terminal_output +1031,975456,"TERMINAL",0,0,"9\t ",,terminal_output +1032,976437,"TERMINAL",0,0,"20\t ",,terminal_output +1033,977483,"TERMINAL",0,0,"1\t ",,terminal_output +1034,978523,"TERMINAL",0,0,"2\t ",,terminal_output +1035,979568,"TERMINAL",0,0,"3\t ",,terminal_output +1036,980622,"TERMINAL",0,0,"4\t ",,terminal_output +1037,981652,"TERMINAL",0,0,"6\t ",,terminal_output +1038,982685,"TERMINAL",0,0,"7\t ",,terminal_output +1039,983726,"TERMINAL",0,0,"8\t ",,terminal_output +1040,984760,"TERMINAL",0,0,"9\t ",,terminal_output +1041,985887,"TERMINAL",0,0,"30\t ",,terminal_output +1042,986835,"TERMINAL",0,0,"1\t ",,terminal_output +1043,987871,"TERMINAL",0,0,"2\t ",,terminal_output +1044,988958,"TERMINAL",0,0,"3\t ",,terminal_output +1045,989981,"TERMINAL",0,0,"4\t ",,terminal_output +1046,990990,"TERMINAL",0,0,"5\t ",,terminal_output +1047,992134,"TERMINAL",0,0,"6\t ",,terminal_output +1048,993149,"TERMINAL",0,0,"7\t ",,terminal_output +1049,994101,"TERMINAL",0,0,"8\t ",,terminal_output +1050,995203,"TERMINAL",0,0,"9\t ",,terminal_output +1051,996228,"TERMINAL",0,0,"40\t ",,terminal_output +1052,997215,"TERMINAL",0,0,"1\t ",,terminal_output +1053,998277,"TERMINAL",0,0,"2\t ",,terminal_output +1054,999301,"TERMINAL",0,0,"3\t ",,terminal_output +1055,1000346,"TERMINAL",0,0,"4\t ",,terminal_output +1056,1001396,"TERMINAL",0,0,"5\t ",,terminal_output +1057,1002477,"TERMINAL",0,0,"6\t ",,terminal_output +1058,1003499,"TERMINAL",0,0,"7\t ",,terminal_output +1059,1004523,"TERMINAL",0,0,"8\t ",,terminal_output +1060,1005555,"TERMINAL",0,0,"9\t ",,terminal_output +1061,1006598,"TERMINAL",0,0,"50\t ",,terminal_output +1062,1007643,"TERMINAL",0,0,"2\t ",,terminal_output +1063,1008681,"TERMINAL",0,0,"3\t ",,terminal_output +1064,1009748,"TERMINAL",0,0,"4\t ",,terminal_output +1065,1010773,"TERMINAL",0,0,"5\t ",,terminal_output +1066,1011851,"TERMINAL",0,0,"6\t ",,terminal_output +1067,1012922,"TERMINAL",0,0,"7\t ",,terminal_output +1068,1013946,"TERMINAL",0,0,"8\t ",,terminal_output +1069,1014942,"TERMINAL",0,0,"9\t ",,terminal_output +1070,1015981,"TERMINAL",0,0,"1:00\t ",,terminal_output +1071,1017029,"TERMINAL",0,0,"1\t ",,terminal_output +1072,1018145,"TERMINAL",0,0,"2\t ",,terminal_output +1073,1019093,"TERMINAL",0,0,"3\t ",,terminal_output +1074,1020131,"TERMINAL",0,0,"4\t ",,terminal_output +1075,1021215,"TERMINAL",0,0,"5\t ",,terminal_output +1076,1022240,"TERMINAL",0,0,"6\t ",,terminal_output +1077,1023262,"TERMINAL",0,0,"7\t ",,terminal_output +1078,1024303,"TERMINAL",0,0,"8\t ",,terminal_output +1079,1025434,"TERMINAL",0,0,"9\t ",,terminal_output +1080,1026389,"TERMINAL",0,0,"10\t ",,terminal_output +1081,1027473,"TERMINAL",0,0,"1\t ",,terminal_output +1082,1028486,"TERMINAL",0,0,"2\t ",,terminal_output +1083,1029513,"TERMINAL",0,0,"3\t ",,terminal_output +1084,1030553,"TERMINAL",0,0,"4\t ",,terminal_output +1085,1031597,"TERMINAL",0,0,"5\t ",,terminal_output +1086,1032644,"TERMINAL",0,0,"7\t ",,terminal_output +1087,1033709,"TERMINAL",0,0,"8\t ",,terminal_output +1088,1034835,"TERMINAL",0,0,"9\t ",,terminal_output +1089,1035859,"TERMINAL",0,0,"20\t ",,terminal_output +1090,1036859,"TERMINAL",0,0,"1\t ",,terminal_output +1091,1037908,"TERMINAL",0,0,"2\t ",,terminal_output +1092,1038932,"TERMINAL",0,0,"3\t ",,terminal_output +1093,1040058,"TERMINAL",0,0,"4\t ",,terminal_output +1094,1041082,"TERMINAL",0,0,"5\t ",,terminal_output +1095,1042105,"TERMINAL",0,0,"6\t ",,terminal_output +1096,1043087,"TERMINAL",0,0,"7\t ",,terminal_output +1097,1044155,"TERMINAL",0,0,"8\t ",,terminal_output +1098,1045278,"TERMINAL",0,0,"9\t ",,terminal_output +1099,1046307,"TERMINAL",0,0,"30\t ",,terminal_output +1100,1047255,"TERMINAL",0,0,"1\t ",,terminal_output +1101,1048301,"TERMINAL",0,0,"2\t ",,terminal_output +1102,1049351,"TERMINAL",0,0,"3\t ",,terminal_output +1103,1050396,"TERMINAL",0,0,"4\t ",,terminal_output +1104,1051543,"TERMINAL",0,0,"5\t ",,terminal_output +1105,1052469,"TERMINAL",0,0,"6\t ",,terminal_output +1106,1053513,"TERMINAL",0,0,"7\t ",,terminal_output +1107,1054600,"TERMINAL",0,0,"8\t ",,terminal_output +1108,1055604,"TERMINAL",0,0,"9\t ",,terminal_output +1109,1056751,"TERMINAL",0,0,"41\t ",,terminal_output +1110,1057684,"TERMINAL",0,0,"2\t ",,terminal_output +1111,1058798,"TERMINAL",0,0,"3\t ",,terminal_output +1112,1059768,"TERMINAL",0,0,"4\t ",,terminal_output +1113,1060843,"TERMINAL",0,0,"5\t ",,terminal_output +1114,1061869,"TERMINAL",0,0,"6\t ",,terminal_output +1115,1062872,"TERMINAL",0,0,"7\t ",,terminal_output +1116,1064018,"TERMINAL",0,0,"8\t ",,terminal_output +1117,1064966,"TERMINAL",0,0,"9\t ",,terminal_output +1118,1066012,"TERMINAL",0,0,"50\t ",,terminal_output +1119,1067302,"TERMINAL",0,0,"1\t ",,terminal_output +1120,1068323,"TERMINAL",0,0,"2\t ",,terminal_output +1121,1069337,"TERMINAL",0,0,"3\t ",,terminal_output +1122,1070472,"TERMINAL",0,0,"4\t ",,terminal_output +1123,1071422,"TERMINAL",0,0,"5\t ",,terminal_output +1124,1072481,"TERMINAL",0,0,"6\t ",,terminal_output +1125,1073507,"TERMINAL",0,0,"7\t ",,terminal_output +1126,1074546,"TERMINAL",0,0,"8\t ",,terminal_output +1127,1075586,"TERMINAL",0,0,"9\t ",,terminal_output +1128,1076623,"TERMINAL",0,0,"2:01\t ",,terminal_output +1129,1077674,"TERMINAL",0,0,"2\t ",,terminal_output +1130,1078712,"TERMINAL",0,0,"3\t ",,terminal_output +1131,1079790,"TERMINAL",0,0,"4\t ",,terminal_output +1132,1080811,"TERMINAL",0,0,"5\t ",,terminal_output +1133,1081847,"TERMINAL",0,0,"6\t ",,terminal_output +1134,1082891,"TERMINAL",0,0,"7\t ",,terminal_output +1135,1083934,"TERMINAL",0,0,"8\t ",,terminal_output +1136,1085048,"TERMINAL",0,0,"9\t ",,terminal_output +1137,1086036,"TERMINAL",0,0,"10\t ",,terminal_output +1138,1087160,"TERMINAL",0,0,"1\t ",,terminal_output +1139,1088226,"TERMINAL",0,0,"2\t ",,terminal_output +1140,1089160,"TERMINAL",0,0,"3\t ",,terminal_output +1141,1090213,"TERMINAL",0,0,"4\t ",,terminal_output +1142,1091260,"TERMINAL",0,0,"5\t ",,terminal_output +1143,1092386,"TERMINAL",0,0,"6\t ",,terminal_output +1144,1093334,"TERMINAL",0,0,"7\t ",,terminal_output +1145,1094377,"TERMINAL",0,0,"8\t ",,terminal_output +1146,1095469,"TERMINAL",0,0,"9\t ",,terminal_output +1147,1096482,"TERMINAL",0,0,"20\t ",,terminal_output +1148,1097543,"TERMINAL",0,0,"1\t ",,terminal_output +1149,1098545,"TERMINAL",0,0,"220",,terminal_output +1150,1099586,"TERMINAL",0,0,"3\t ",,terminal_output +1151,1100634,"TERMINAL",0,0,"5\t ",,terminal_output +1152,1101703,"TERMINAL",0,0,"6\t ",,terminal_output +1153,1102725,"TERMINAL",0,0,"7\t ",,terminal_output +1154,1103759,"TERMINAL",0,0,"8\t ",,terminal_output +1155,1104800,"TERMINAL",0,0,"9\t ",,terminal_output +1156,1105857,"TERMINAL",0,0,"30\t ",,terminal_output +1157,1106924,"TERMINAL",0,0,"1\t ",,terminal_output +1158,1107951,"TERMINAL",0,0,"2\t ",,terminal_output +1159,1109075,"TERMINAL",0,0,"3\t ",,terminal_output +1160,1110023,"TERMINAL",0,0,"4\t ",,terminal_output +1161,1111068,"TERMINAL",0,0,"5\t ",,terminal_output +1162,1112148,"TERMINAL",0,0,"6\t ",,terminal_output +1163,1113174,"TERMINAL",0,0,"7\t ",,terminal_output +1164,1114204,"TERMINAL",0,0,"8\t ",,terminal_output +1165,1115240,"TERMINAL",0,0,"9\t ",,terminal_output +1166,1116279,"TERMINAL",0,0,"40\t ",,terminal_output +1167,1117790,"TERMINAL",0,0,"11",,terminal_output +1168,1118824,"TERMINAL",0,0,"3\t ",,terminal_output +1169,1119873,"TERMINAL",0,0,"4\t ",,terminal_output +1170,1120910,"TERMINAL",0,0,"5\t ",,terminal_output +1171,1121949,"TERMINAL",0,0,"6\t ",,terminal_output +1172,1123105,"TERMINAL",0,0,"7\t ",,terminal_output +1173,1124043,"TERMINAL",0,0,"8\t ",,terminal_output +1174,1125084,"TERMINAL",0,0,"9\t ",,terminal_output +1175,1126123,"TERMINAL",0,0,"50\t ",,terminal_output +1176,1127202,"TERMINAL",0,0,"1\t ",,terminal_output +1177,1128214,"TERMINAL",0,0,"2\t ",,terminal_output +1178,1129271,"TERMINAL",0,0,"3\t ",,terminal_output +1179,1130301,"TERMINAL",0,0,"4\t ",,terminal_output +1180,1131339,"TERMINAL",0,0,"5\t ",,terminal_output +1181,1132452,"TERMINAL",0,0,"6\t ",,terminal_output +1182,1133522,"TERMINAL",0,0,"7\t ",,terminal_output +1183,1134489,"TERMINAL",0,0,"8\t ",,terminal_output +1184,1135500,"TERMINAL",0,0,"9\t ",,terminal_output +1185,1136541,"TERMINAL",0,0,"3:00\t ",,terminal_output +1186,1137601,"TERMINAL",0,0,"1\t ",,terminal_output +1187,1138624,"TERMINAL",0,0,"3\t ",,terminal_output +1188,1139665,"TERMINAL",0,0,"4\t ",,terminal_output +1189,1140709,"TERMINAL",0,0,"5\t ",,terminal_output +1190,1141755,"TERMINAL",0,0,"6\t ",,terminal_output +1191,1142869,"TERMINAL",0,0,"7\t ",,terminal_output +1192,1143830,"TERMINAL",0,0,"8\t ",,terminal_output +1193,1144920,"TERMINAL",0,0,"9\t ",,terminal_output +1194,1145943,"TERMINAL",0,0,"10\t ",,terminal_output +1195,1146965,"TERMINAL",0,0,"1\t ",,terminal_output +1196,1147996,"TERMINAL",0,0,"2\t ",,terminal_output +1197,1149040,"TERMINAL",0,0,"3\t ",,terminal_output +1198,1150077,"TERMINAL",0,0,"4\t ",,terminal_output +1199,1151125,"TERMINAL",0,0,"5\t ",,terminal_output +1200,1152161,"TERMINAL",0,0,"6\t ",,terminal_output +1201,1153212,"TERMINAL",0,0,"7\t ",,terminal_output +1202,1154263,"TERMINAL",0,0,"8\t ",,terminal_output +1203,1155290,"TERMINAL",0,0,"9\t ",,terminal_output +1204,1156325,"TERMINAL",0,0,"20\t ",,terminal_output +1205,1157366,"TERMINAL",0,0,"1\t ",,terminal_output +1206,1158412,"TERMINAL",0,0,"2\t ",,terminal_output +1207,1159452,"TERMINAL",0,0,"3\t ",,terminal_output +1208,1160509,"TERMINAL",0,0,"4\t ",,terminal_output +1209,1161532,"TERMINAL",0,0,"5\t ",,terminal_output +1210,1162581,"TERMINAL",0,0,"6\t ",,terminal_output +1211,1163633,"TERMINAL",0,0,"8\t ",,terminal_output +1212,1164667,"TERMINAL",0,0,"9\t ",,terminal_output +1213,1165702,"TERMINAL",0,0,"30\t ",,terminal_output +1214,1166740,"TERMINAL",0,0,"1\t ",,terminal_output +1215,1167776,"TERMINAL",0,0,"2\t ",,terminal_output +1216,1168820,"TERMINAL",0,0,"3\t ",,terminal_output +1217,1169859,"TERMINAL",0,0,"4\t ",,terminal_output +1218,1170902,"TERMINAL",0,0,"5\t ",,terminal_output +1219,1171942,"TERMINAL",0,0,"6\t ",,terminal_output +1220,1172996,"TERMINAL",0,0,"7\t ",,terminal_output +1221,1174103,"TERMINAL",0,0,"8\t ",,terminal_output +1222,1175064,"TERMINAL",0,0,"9\t ",,terminal_output +1223,1176097,"TERMINAL",0,0,"40\t ",,terminal_output +1224,1177173,"TERMINAL",0,0,"1\t ",,terminal_output +1225,1178181,"TERMINAL",0,0,"2\t ",,terminal_output +1226,1179219,"TERMINAL",0,0,"3\t ",,terminal_output +1227,1180263,"TERMINAL",0,0,"4\t ",,terminal_output +1228,1181371,"TERMINAL",0,0,"5\t ",,terminal_output +1229,1182355,"TERMINAL",0,0,"6\t ",,terminal_output +1230,1183421,"TERMINAL",0,0,"7\t ",,terminal_output +1231,1184442,"TERMINAL",0,0,"8\t ",,terminal_output +1232,1185472,"TERMINAL",0,0,"9\t ",,terminal_output +1233,1186517,"TERMINAL",0,0,"50\t ",,terminal_output +1234,1187557,"TERMINAL",0,0,"1\t ",,terminal_output +1235,1188597,"TERMINAL",0,0,"2\t ",,terminal_output +1236,1189635,"TERMINAL",0,0,"4\t ",,terminal_output +1237,1190680,"TERMINAL",0,0,"5\t ",,terminal_output +1238,1191715,"TERMINAL",0,0,"6\t ",,terminal_output +1239,1192756,"TERMINAL",0,0,"7\t ",,terminal_output +1240,1193790,"TERMINAL",0,0,"8\t ",,terminal_output +1241,1194830,"TERMINAL",0,0,"9\t ",,terminal_output +1242,1195866,"TERMINAL",0,0,"4:00\t ",,terminal_output +1243,1196902,"TERMINAL",0,0,"1\t ",,terminal_output +1244,1197958,"TERMINAL",0,0,"2\t ",,terminal_output +1245,1198984,"TERMINAL",0,0,"3\t ",,terminal_output +1246,1200109,"TERMINAL",0,0,"4\t ",,terminal_output +1247,1201137,"TERMINAL",0,0,"5\t ",,terminal_output +1248,1202113,"TERMINAL",0,0,"6\t ",,terminal_output +1249,1203182,"TERMINAL",0,0,"7\t ",,terminal_output +1250,1204208,"TERMINAL",0,0,"8\t ",,terminal_output +1251,1205334,"TERMINAL",0,0,"9\t ",,terminal_output +1252,1206270,"TERMINAL",0,0,"10\t ",,terminal_output +1253,1207308,"TERMINAL",0,0,"1\t ",,terminal_output +1254,1208345,"TERMINAL",0,0,"2\t ",,terminal_output +1255,1209383,"TERMINAL",0,0,"3\t ",,terminal_output +1256,1210455,"TERMINAL",0,0,"4\t ",,terminal_output +1257,1211484,"TERMINAL",0,0,"5\t ",,terminal_output +1258,1212511,"TERMINAL",0,0,"6\t ",,terminal_output +1259,1213541,"TERMINAL",0,0,"7\t ",,terminal_output +1260,1214582,"TERMINAL",0,0,"8\t ",,terminal_output +1261,1215616,"TERMINAL",0,0,"20\t ",,terminal_output +1262,1216662,"TERMINAL",0,0,"1\t ",,terminal_output +1263,1217722,"TERMINAL",0,0,"2\t ",,terminal_output +1264,1218852,"TERMINAL",0,0,"3\t ",,terminal_output +1265,1219792,"TERMINAL",0,0,"4\t ",,terminal_output +1266,1220898,"TERMINAL",0,0,"5\t ",,terminal_output +1267,1221874,"TERMINAL",0,0,"6\t ",,terminal_output +1268,1222949,"TERMINAL",0,0,"7\t ",,terminal_output +1269,1223957,"TERMINAL",0,0,"8\t ",,terminal_output +1270,1225095,"TERMINAL",0,0,"9\t ",,terminal_output +1271,1226220,"TERMINAL",0,0,"30\t ",,terminal_output +1272,1227151,"TERMINAL",0,0,"1\t ",,terminal_output +1273,1228169,"TERMINAL",0,0,"2\t ",,terminal_output +1274,1229148,"TERMINAL",0,0,"3\t ",,terminal_output +1275,1230216,"TERMINAL",0,0,"4\t ",,terminal_output +1276,1231224,"TERMINAL",0,0,"5\t ",,terminal_output +1277,1232264,"TERMINAL",0,0,"6\t ",,terminal_output +1278,1233302,"TERMINAL",0,0,"7\t ",,terminal_output +1279,1234338,"TERMINAL",0,0,"8\t ",,terminal_output +1280,1235378,"TERMINAL",0,0,"9\t ",,terminal_output +1281,1236433,"TERMINAL",0,0,"40\t ",,terminal_output +1282,1237465,"TERMINAL",0,0,"1\t ",,terminal_output +1283,1238498,"TERMINAL",0,0,"2\t ",,terminal_output +1284,1239541,"TERMINAL",0,0,"3\t ",,terminal_output +1285,1240581,"TERMINAL",0,0,"4\t ",,terminal_output +1286,1241629,"TERMINAL",0,0,"6\t ",,terminal_output +1287,1242670,"TERMINAL",0,0,"7\t ",,terminal_output +1288,1243716,"TERMINAL",0,0,"8\t ",,terminal_output +1289,1244751,"TERMINAL",0,0,"9\t ",,terminal_output +1290,1245795,"TERMINAL",0,0,"50\t ",,terminal_output +1291,1246910,"TERMINAL",0,0,"1\t ",,terminal_output +1292,1247931,"TERMINAL",0,0,"2\t ",,terminal_output +1293,1248916,"TERMINAL",0,0,"3\t ",,terminal_output +1294,1249962,"TERMINAL",0,0,"4\t ",,terminal_output +1295,1251009,"TERMINAL",0,0,"5\t ",,terminal_output +1296,1252130,"TERMINAL",0,0,"6\t ",,terminal_output +1297,1253156,"TERMINAL",0,0,"7\t ",,terminal_output +1298,1254186,"TERMINAL",0,0,"8\t ",,terminal_output +1299,1255171,"TERMINAL",0,0,"9\t ",,terminal_output +1300,1256210,"TERMINAL",0,0,"5:00\t ",,terminal_output +1301,1257351,"TERMINAL",0,0,"1\t ",,terminal_output +1302,1258296,"TERMINAL",0,0,"2\t ",,terminal_output +1303,1259336,"TERMINAL",0,0,"3\t ",,terminal_output +1304,1260380,"TERMINAL",0,0,"4\t ",,terminal_output +1305,1261450,"TERMINAL",0,0,"5\t ",,terminal_output +1306,1262459,"TERMINAL",0,0,"6\t ",,terminal_output +1307,1263499,"TERMINAL",0,0,"7\t ",,terminal_output +1308,1264636,"TERMINAL",0,0,"8\t ",,terminal_output +1309,1265582,"TERMINAL",0,0,"9\t ",,terminal_output +1310,1266624,"TERMINAL",0,0,"11\t ",,terminal_output +1311,1267662,"TERMINAL",0,0,"2\t ",,terminal_output +1312,1268708,"TERMINAL",0,0,"3\t ",,terminal_output +1313,1269752,"TERMINAL",0,0,"4\t ",,terminal_output +1314,1270870,"TERMINAL",0,0,"5\t ",,terminal_output +1315,1271894,"TERMINAL",0,0,"6\t ",,terminal_output +1316,1272918,"TERMINAL",0,0,"7\t ",,terminal_output +1317,1273941,"TERMINAL",0,0,"8\t ",,terminal_output +1318,1274966,"TERMINAL",0,0,"9\t ",,terminal_output +1319,1276094,"TERMINAL",0,0,"20\t ",,terminal_output +1320,1277117,"TERMINAL",0,0,"1\t ",,terminal_output +1321,1278139,"TERMINAL",0,0,"2\t ",,terminal_output +1322,1279121,"TERMINAL",0,0,"3\t ",,terminal_output +1323,1280185,"TERMINAL",0,0,"4\t ",,terminal_output +1324,1281208,"TERMINAL",0,0,"5\t ",,terminal_output +1325,1282252,"TERMINAL",0,0,"6\t ",,terminal_output +1326,1283290,"TERMINAL",0,0,"7\t ",,terminal_output +1327,1284335,"TERMINAL",0,0,"8\t ",,terminal_output +1328,1285374,"TERMINAL",0,0,"9\t ",,terminal_output +1329,1286417,"TERMINAL",0,0,"30\t ",,terminal_output +1330,1287465,"TERMINAL",0,0,"1\t ",,terminal_output +1331,1288497,"TERMINAL",0,0,"2\t ",,terminal_output +1332,1289541,"TERMINAL",0,0,"3\t ",,terminal_output +1333,1290581,"TERMINAL",0,0,"4\t ",,terminal_output +1334,1291622,"TERMINAL",0,0,"6\t ",,terminal_output +1335,1292662,"TERMINAL",0,0,"7\t ",,terminal_output +1336,1293704,"TERMINAL",0,0,"8\t ",,terminal_output +1337,1294745,"TERMINAL",0,0,"9\t ",,terminal_output +1338,1295791,"TERMINAL",0,0,"40\t ",,terminal_output +1339,1296832,"TERMINAL",0,0,"1\t ",,terminal_output +1340,1297904,"TERMINAL",0,0,"2\t ",,terminal_output +1341,1299036,"TERMINAL",0,0,"3\t ",,terminal_output +1342,1299951,"TERMINAL",0,0,"4\t ",,terminal_output +1343,1301080,"TERMINAL",0,0,"5\t ",,terminal_output +1344,1302029,"TERMINAL",0,0,"6\t ",,terminal_output +1345,1303076,"TERMINAL",0,0,"7\t ",,terminal_output +1346,1304149,"TERMINAL",0,0,"8\t ",,terminal_output +1347,1305279,"TERMINAL",0,0,"9\t ",,terminal_output +1348,1306269,"TERMINAL",0,0,"50\t ",,terminal_output +1349,1307235,"TERMINAL",0,0,"1\t ",,terminal_output +1350,1308280,"TERMINAL",0,0,"2\t ",,terminal_output +1351,1309317,"TERMINAL",0,0,"3\t ",,terminal_output +1352,1310360,"TERMINAL",0,0,"4\t ",,terminal_output +1353,1311401,"TERMINAL",0,0,"5\t ",,terminal_output +1354,1312463,"TERMINAL",0,0,"6\t ",,terminal_output +1355,1313484,"TERMINAL",0,0,"7\t ",,terminal_output +1356,1314596,"TERMINAL",0,0,"8\t ",,terminal_output +1357,1315561,"TERMINAL",0,0,"9\t ",,terminal_output +1358,1316599,"TERMINAL",0,0,"6:00\t ",,terminal_output +1359,1317643,"TERMINAL",0,0,"2\t ",,terminal_output +1360,1318680,"TERMINAL",0,0,"3\t ",,terminal_output +1361,1319720,"TERMINAL",0,0,"4\t ",,terminal_output +1362,1320841,"TERMINAL",0,0,"5\t ",,terminal_output +1363,1321879,"TERMINAL",0,0,"6\t ",,terminal_output +1364,1322840,"TERMINAL",0,0,"7\t ",,terminal_output +1365,1323878,"TERMINAL",0,0,"8\t ",,terminal_output +1366,1324938,"TERMINAL",0,0,"9\t ",,terminal_output +1367,1325949,"TERMINAL",0,0,"10\t ",,terminal_output +1368,1326988,"TERMINAL",0,0,"1\t ",,terminal_output +1369,1328032,"TERMINAL",0,0,"2\t ",,terminal_output +1370,1329069,"TERMINAL",0,0,"3\t ",,terminal_output +1371,1330109,"TERMINAL",0,0,"4\t ",,terminal_output +1372,1331146,"TERMINAL",0,0,"5\t ",,terminal_output +1373,1332209,"TERMINAL",0,0,"6\t ",,terminal_output +1374,1333228,"TERMINAL",0,0,"7\t ",,terminal_output +1375,1334300,"TERMINAL",0,0,"8\t ",,terminal_output +1376,1335298,"TERMINAL",0,0,"9\t ",,terminal_output +1377,1336341,"TERMINAL",0,0,"20\t ",,terminal_output +1378,1337377,"TERMINAL",0,0,"1\t ",,terminal_output +1379,1338417,"TERMINAL",0,0,"2\t ",,terminal_output +1380,1339456,"TERMINAL",0,0,"3\t ",,terminal_output +1381,1340507,"TERMINAL",0,0,"4\t ",,terminal_output +1382,1341569,"TERMINAL",0,0,"5\t ",,terminal_output +1383,1342581,"TERMINAL",0,0,"6\t ",,terminal_output +1384,1343618,"TERMINAL",0,0,"8\t ",,terminal_output +1385,1344660,"TERMINAL",0,0,"9\t ",,terminal_output +1386,1345705,"TERMINAL",0,0,"30\t ",,terminal_output +1387,1346750,"TERMINAL",0,0,"1\t ",,terminal_output +1388,1347784,"TERMINAL",0,0,"2\t ",,terminal_output +1389,1348822,"TERMINAL",0,0,"3\t ",,terminal_output +1390,1349948,"TERMINAL",0,0,"4\t ",,terminal_output +1391,1351034,"TERMINAL",0,0,"5\t ",,terminal_output +1392,1352027,"TERMINAL",0,0,"6\t ",,terminal_output +1393,1353072,"TERMINAL",0,0,"7\t ",,terminal_output +1394,1354111,"TERMINAL",0,0,"8\t ",,terminal_output +1395,1355152,"TERMINAL",0,0,"9\t ",,terminal_output +1396,1356276,"TERMINAL",0,0,"40\t ",,terminal_output +1397,1357225,"TERMINAL",0,0,"1\t ",,terminal_output +1398,1358263,"TERMINAL",0,0,"2\t ",,terminal_output +1399,1359345,"TERMINAL",0,0,"3\t ",,terminal_output +1400,1360342,"TERMINAL",0,0,"4\t ",,terminal_output +1401,1361386,"TERMINAL",0,0,"5\t ",,terminal_output +1402,1362457,"TERMINAL",0,0,"6\t ",,terminal_output +1403,1363467,"TERMINAL",0,0,"7\t ",,terminal_output +1404,1364580,"TERMINAL",0,0,"8\t ",,terminal_output +1405,1365542,"TERMINAL",0,0,"90",,terminal_output +1406,1366601,"TERMINAL",0,0,"50\t ",,terminal_output +1407,1367618,"TERMINAL",0,0,"2\t ",,terminal_output +1408,1368659,"TERMINAL",0,0,"3\t ",,terminal_output +1409,1369704,"TERMINAL",0,0,"4\t ",,terminal_output +1410,1370754,"TERMINAL",0,0,"5\t ",,terminal_output +1411,1371783,"TERMINAL",0,0,"6\t ",,terminal_output +1412,1372838,"TERMINAL",0,0,"7\t ",,terminal_output +1413,1373863,"TERMINAL",0,0,"8\t ",,terminal_output +1414,1374909,"TERMINAL",0,0,"9\t ",,terminal_output +1415,1375938,"TERMINAL",0,0,"7:00\t ",,terminal_output +1416,1376965,"TERMINAL",0,0,"1\t ",,terminal_output +1417,1378008,"TERMINAL",0,0,"2\t ",,terminal_output +1418,1379043,"TERMINAL",0,0,"3\t ",,terminal_output +1419,1380132,"TERMINAL",0,0,"4\t ",,terminal_output +1420,1381157,"TERMINAL",0,0,"5\t ",,terminal_output +1421,1382144,"TERMINAL",0,0,"6\t ",,terminal_output +1422,1383205,"TERMINAL",0,0,"7\t ",,terminal_output +1423,1384225,"TERMINAL",0,0,"8\t ",,terminal_output +1424,1385259,"TERMINAL",0,0,"9\t ",,terminal_output +1425,1386316,"TERMINAL",0,0,"10\t ",,terminal_output +1426,1387464,"TERMINAL",0,0,"1\t ",,terminal_output +1427,1388393,"TERMINAL",0,0,"2\t ",,terminal_output +1428,1389430,"TERMINAL",0,0,"3\t ",,terminal_output +1429,1390476,"TERMINAL",0,0,"4\t ",,terminal_output +1430,1391602,"TERMINAL",0,0,"5\t ",,terminal_output +1431,1392551,"TERMINAL",0,0,"6\t ",,terminal_output +1432,1393605,"TERMINAL",0,0,"7\t ",,terminal_output +1433,1394626,"TERMINAL",0,0,"9\t ",,terminal_output +1434,1395663,"TERMINAL",0,0,"20\t ",,terminal_output +1435,1396705,"TERMINAL",0,0,"1\t ",,terminal_output +1436,1397843,"TERMINAL",0,0,"2\t ",,terminal_output +1437,1398788,"TERMINAL",0,0,"3\t ",,terminal_output +1438,1399823,"TERMINAL",0,0,"4\t ",,terminal_output +1439,1400859,"TERMINAL",0,0,"5\t ",,terminal_output +1440,1401902,"TERMINAL",0,0,"6\t ",,terminal_output +1441,1402965,"TERMINAL",0,0,"7\t ",,terminal_output +1442,1403985,"TERMINAL",0,0,"8\t ",,terminal_output +1443,1405028,"TERMINAL",0,0,"9\t ",,terminal_output +1444,1406071,"TERMINAL",0,0,"30\t ",,terminal_output +1445,1407165,"TERMINAL",0,0,"1\t ",,terminal_output +1446,1408151,"TERMINAL",0,0,"2\t ",,terminal_output +1447,1409198,"TERMINAL",0,0,"3\t ",,terminal_output +1448,1410245,"TERMINAL",0,0,"4\t ",,terminal_output +1449,1411281,"TERMINAL",0,0,"5\t ",,terminal_output +1450,1412308,"TERMINAL",0,0,"6\t ",,terminal_output +1451,1413346,"TERMINAL",0,0,"7\t ",,terminal_output +1452,1414382,"TERMINAL",0,0,"8\t ",,terminal_output +1453,1415424,"TERMINAL",0,0,"9\t ",,terminal_output +1454,1416464,"TERMINAL",0,0,"40\t ",,terminal_output +1455,1417508,"TERMINAL",0,0,"1\t ",,terminal_output +1456,1418636,"TERMINAL",0,0,"2\t ",,terminal_output +1457,1419618,"TERMINAL",0,0,"31",,terminal_output +1458,1420609,"TERMINAL",0,0,"4\t ",,terminal_output +1459,1421636,"TERMINAL",0,0,"6\t ",,terminal_output +1460,1422672,"TERMINAL",0,0,"7\t ",,terminal_output +1461,1423718,"TERMINAL",0,0,"8\t ",,terminal_output +1462,1424759,"TERMINAL",0,0,"9\t ",,terminal_output +1463,1425801,"TERMINAL",0,0,"50\t ",,terminal_output +1464,1426926,"TERMINAL",0,0,"1\t ",,terminal_output +1465,1427951,"TERMINAL",0,0,"2\t ",,terminal_output +1466,1428979,"TERMINAL",0,0,"3\t ",,terminal_output +1467,1429955,"TERMINAL",0,0,"4\t ",,terminal_output +1468,1431029,"TERMINAL",0,0,"50",,terminal_output +1469,1432370,"TERMINAL",0,0,"6 89",,terminal_output +1470,1433406,"TERMINAL",0,0,"7\t ",,terminal_output +1471,1434564,"TERMINAL",0,0,"8\t ",,terminal_output +1472,1435513,"TERMINAL",0,0,"9\t ",,terminal_output +1473,1436612,"TERMINAL",0,0,"8:00\t ",,terminal_output +1474,1437607,"TERMINAL",0,0,"1\t ",,terminal_output +1475,1438705,"TERMINAL",0,0,"3\t ",,terminal_output +1476,1439663,"TERMINAL",0,0,"4\t ",,terminal_output +1477,1440698,"TERMINAL",0,0,"5\t ",,terminal_output +1478,1441739,"TERMINAL",0,0,"6\t ",,terminal_output +1479,1442800,"TERMINAL",0,0,"7\t ",,terminal_output +1480,1443928,"TERMINAL",0,0,"8\t ",,terminal_output +1481,1444864,"TERMINAL",0,0,"9\t ",,terminal_output +1482,1445975,"TERMINAL",0,0,"10\t ",,terminal_output +1483,1446945,"TERMINAL",0,0,"1\t ",,terminal_output +1484,1448022,"TERMINAL",0,0,"2\t ",,terminal_output +1485,1449024,"TERMINAL",0,0,"3\t ",,terminal_output +1486,1450056,"TERMINAL",0,0,"4\t ",,terminal_output +1487,1451107,"TERMINAL",0,0,"5\t ",,terminal_output +1488,1455716,"TERMINAL",0,0,"6\t 7\t 8\t 9\t ",,terminal_output +1489,1456277,"TERMINAL",0,0,"20\t ",,terminal_output +1490,1457313,"TERMINAL",0,0,"1\t ",,terminal_output +1491,1458468,"TERMINAL",0,0,"2\t ",,terminal_output +1492,1459386,"TERMINAL",0,0,"3\t ",,terminal_output +1493,1460416,"TERMINAL",0,0,"4\t ",,terminal_output +1494,1461457,"TERMINAL",0,0,"5\t ",,terminal_output +1495,1462495,"TERMINAL",0,0,"6\t ",,terminal_output +1496,1463537,"TERMINAL",0,0,"7\t ",,terminal_output +1497,1464572,"TERMINAL",0,0,"8\t ",,terminal_output +1498,1465650,"TERMINAL",0,0,"9\t ",,terminal_output +1499,1466645,"TERMINAL",0,0,"31\t ",,terminal_output +1500,1467683,"TERMINAL",0,0,"2\t ",,terminal_output +1501,1468726,"TERMINAL",0,0,"3\t ",,terminal_output +1502,1469774,"TERMINAL",0,0,"4\t ",,terminal_output +1503,1470816,"TERMINAL",0,0,"5\t ",,terminal_output +1504,1471884,"TERMINAL",0,0,"6\t ",,terminal_output +1505,1472895,"TERMINAL",0,0,"71",,terminal_output +1506,1474034,"TERMINAL",0,0,"8\t ",,terminal_output +1507,1474973,"TERMINAL",0,0,"9\t ",,terminal_output +1508,1476018,"TERMINAL",0,0,"40\t ",,terminal_output +1509,1477050,"TERMINAL",0,0,"1\t ",,terminal_output +1510,1478094,"TERMINAL",0,0,"2\t ",,terminal_output +1511,1479123,"TERMINAL",0,0,"3\t ",,terminal_output +1512,1480166,"TERMINAL",0,0,"4\t ",,terminal_output +1513,1481210,"TERMINAL",0,0,"5\t ",,terminal_output +1514,1482247,"TERMINAL",0,0,"6\t ",,terminal_output +1515,1483291,"TERMINAL",0,0,"7\t ",,terminal_output +1516,1484337,"TERMINAL",0,0,"8\t ",,terminal_output +1517,1485373,"TERMINAL",0,0,"9\t ",,terminal_output +1518,1486414,"TERMINAL",0,0,"50\t ",,terminal_output +1519,1487462,"TERMINAL",0,0,"1\t ",,terminal_output +1520,1488484,"TERMINAL",0,0,"2\t ",,terminal_output +1521,1489521,"TERMINAL",0,0,"3\t ",,terminal_output +1522,1490560,"TERMINAL",0,0,"4\t ",,terminal_output +1523,1491605,"TERMINAL",0,0,"5\t ",,terminal_output +1524,1492642,"TERMINAL",0,0,"7\t ",,terminal_output +1525,1493696,"TERMINAL",0,0,"8\t ",,terminal_output +1526,1494732,"TERMINAL",0,0,"9\t ",,terminal_output +1527,1495773,"TERMINAL",0,0,"9:00\t ",,terminal_output +1528,1496868,"TERMINAL",0,0,"1\t ",,terminal_output +1529,1497895,"TERMINAL",0,0,"2\t ",,terminal_output +1530,1498921,"TERMINAL",0,0,"3\t ",,terminal_output +1531,1499929,"TERMINAL",0,0,"4\t ",,terminal_output +1532,1501074,"TERMINAL",0,0,"5\t ",,terminal_output +1533,1502026,"TERMINAL",0,0,"6\t ",,terminal_output +1534,1503042,"TERMINAL",0,0,"7\t ",,terminal_output +1535,1504141,"TERMINAL",0,0,"8\t ",,terminal_output +1536,1505204,"TERMINAL",0,0,"9\t ",,terminal_output +1537,1506158,"TERMINAL",0,0,"10\t ",,terminal_output +1538,1507215,"TERMINAL",0,0,"1\t ",,terminal_output +1539,1508243,"TERMINAL",0,0,"2\t ",,terminal_output +1540,1509299,"TERMINAL",0,0,"3\t ",,terminal_output +1541,1510385,"TERMINAL",0,0,"4\t ",,terminal_output +1542,1511409,"TERMINAL",0,0,"5\t ",,terminal_output +1543,1512449,"TERMINAL",0,0,"6\t ",,terminal_output +1544,1513430,"TERMINAL",0,0,"7\t ",,terminal_output +1545,1514587,"TERMINAL",0,0,"8\t ",,terminal_output +1546,1515525,"TERMINAL",0,0,"9\t ",,terminal_output +1547,1516561,"TERMINAL",0,0,"20\t ",,terminal_output +1548,1517610,"TERMINAL",0,0,"1\t ",,terminal_output +1549,1518679,"TERMINAL",0,0,"3\t ",,terminal_output +1550,1519674,"TERMINAL",0,0,"4\t ",,terminal_output +1551,1520730,"TERMINAL",0,0,"5\t ",,terminal_output +1552,1521757,"TERMINAL",0,0,"6\t ",,terminal_output +1553,1522799,"TERMINAL",0,0,"7\t ",,terminal_output +1554,1523848,"TERMINAL",0,0,"8\t ",,terminal_output +1555,1524927,"TERMINAL",0,0,"9\t ",,terminal_output +1556,1525953,"TERMINAL",0,0,"30\t ",,terminal_output +1557,1526977,"TERMINAL",0,0,"1\t ",,terminal_output +1558,1528012,"TERMINAL",0,0,"2\t ",,terminal_output +1559,1529110,"TERMINAL",0,0,"3\t ",,terminal_output +1560,1530110,"TERMINAL",0,0,"4\t ",,terminal_output +1561,1531150,"TERMINAL",0,0,"5\t ",,terminal_output +1562,1532194,"TERMINAL",0,0,"6\t ",,terminal_output +1563,1533116,"TERMINAL",0,0,"salloc",,terminal_focus +1564,1533220,"TERMINAL",0,0,"7\t ",,terminal_output +1565,1534307,"TERMINAL",0,0,"8\t ",,terminal_output +1566,1535129,"TERMINAL",0,0,"^Csalloc: Job allocation 3306206 has been revoked.\r\nsalloc: Job aborted due to signal\r\n]0;tum_cte0515@hkn1991:~/Projects/jafar]633;D;1",,terminal_output +1567,1535297,"TERMINAL",0,0,"9\t ",,terminal_output +1568,1536343,"TERMINAL",0,0,"40\t ",,terminal_output +1569,1537449,"TERMINAL",0,0,"1\t ",,terminal_output +1570,1538423,"TERMINAL",0,0,"2\t ",,terminal_output +1571,1539472,"TERMINAL",0,0,"3\t ",,terminal_output +1572,1539915,"TERMINAL",0,0,"salloc --time=01:00:00 --partition=accelerated --nodes=1 --ntasks-per-node=4 --gres=gpu:4 --cpus-per-task=5 --mem=200G",,terminal_command +1573,1539965,"TERMINAL",0,0,"]633;E;2025-06-30 14:49:44 salloc --time=01:00:00 --partition=accelerated --nodes=1 --ntasks-per-node=4 --gres=gpu:4 --cpus-per-task=5 --mem=200G;79ec3af7-6a10-4dac-bb07-e3b50f56ded4]633;Csalloc: Pending job allocation 3306221\r\nsalloc: job 3306221 queued and waiting for resources\r\n",,terminal_output +1574,1540508,"TERMINAL",0,0,"4\t ",,terminal_output +1575,1541418,"TERMINAL",0,0,"watch",,terminal_focus +1576,1541553,"TERMINAL",0,0,"5\t ",,terminal_output +1577,1542604,"TERMINAL",0,0,"6\t ",,terminal_output +1578,1542889,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn1991:/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar]633;D;0",,terminal_output +1579,1543796,"TERMINAL",0,0,"idling",,terminal_command +1580,1543882,"TERMINAL",0,0,"]633;E;2025-06-30 14:49:48 idling;ead59344-49db-4336-9336-47fae706e637]633;C[?1049h(B[?7hEvery 1.0s: sinfo_t_idlehkn1991.localdomain: Mon Jun 30 14:49:48 2025Partition dev_cpuonly: 10 nodes idle\rPartition cpuonly: 89 nodes idle\rPartition dev_accelerated:\t 0 nodes idle\rPartition accelerated:\t 1 nodes idle\rPartition dev_accelerated-h100 :\t 0 nodes idle\rPartition accelerated-h100:\t 0 nodes idle\rPartition large:\t 8 nodes idle",,terminal_output +1581,1544756,"TERMINAL",0,0,"salloc",,terminal_focus +1582,1544915,"TERMINAL",0,0,"9\t ",,terminal_output +1583,1545822,"TERMINAL",0,0,"watch",,terminal_focus +1584,1545958,"TERMINAL",0,0,"50\t ",,terminal_output +1585,1547002,"TERMINAL",0,0,"1\t ",,terminal_output +1586,1547330,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn1991:/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar]633;D;0",,terminal_output +1587,1548483,"TERMINAL",0,0,"idling",,terminal_command +1588,1548533,"TERMINAL",0,0,"]633;E;2025-06-30 14:49:52 idling;ead59344-49db-4336-9336-47fae706e637]633;C",,terminal_output +1589,1548592,"TERMINAL",0,0,"[?1049h(B[?7hEvery 1.0s: sinfo_t_idlehkn1991.localdomain: Mon Jun 30 14:49:52 2025Partition dev_cpuonly: 10 nodes idle\rPartition cpuonly: 89 nodes idle\rPartition dev_accelerated:\t 0 nodes idle\rPartition accelerated:\t 1 nodes idle\rPartition dev_accelerated-h100 :\t 0 nodes idle\rPartition accelerated-h100:\t 0 nodes idle\rPartition large:\t 8 nodes idle",,terminal_output +1590,1549594,"TERMINAL",0,0,"3\t ",,terminal_output +1591,1550100,"TERMINAL",0,0,"salloc",,terminal_focus +1592,1550671,"TERMINAL",0,0,"5\t ",,terminal_output +1593,1551763,"TERMINAL",0,0,"6\t ",,terminal_output +1594,1552864,"TERMINAL",0,0,"740",,terminal_output +1595,1553905,"TERMINAL",0,0,"8\t ",,terminal_output +1596,1554941,"TERMINAL",0,0,"9\t ",,terminal_output +1597,1555989,"TERMINAL",0,0,"50:00\t ",,terminal_output +1598,1557081,"TERMINAL",0,0,"1\t ",,terminal_output +1599,1558114,"TERMINAL",0,0,"2\t ",,terminal_output +1600,1559118,"TERMINAL",0,0,"3\t ",,terminal_output +1601,1560162,"TERMINAL",0,0,"4\t ",,terminal_output +1602,1561205,"TERMINAL",0,0,"5\t ",,terminal_output +1603,1562250,"TERMINAL",0,0,"6\t ",,terminal_output +1604,1563329,"TERMINAL",0,0,"7\t ",,terminal_output +1605,1564327,"TERMINAL",0,0,"8\t ",,terminal_output +1606,1565364,"TERMINAL",0,0,"9\t ",,terminal_output +1607,1566503,"TERMINAL",0,0,"10\t ",,terminal_output +1608,1567462,"TERMINAL",0,0,"1\t ",,terminal_output +1609,1568488,"TERMINAL",0,0,"2\t ",,terminal_output +1610,1569571,"TERMINAL",0,0,"3\t ",,terminal_output +1611,1570573,"TERMINAL",0,0,"4\t ",,terminal_output +1612,1571724,"TERMINAL",0,0,"6\t ",,terminal_output +1613,1572675,"TERMINAL",0,0,"7\t ",,terminal_output +1614,1573773,"TERMINAL",0,0,"8\t ",,terminal_output +1615,1574742,"TERMINAL",0,0,"9\t ",,terminal_output +1616,1575789,"TERMINAL",0,0,"20\t ",,terminal_output +1617,1576841,"TERMINAL",0,0,"1\t ",,terminal_output +1618,1577870,"TERMINAL",0,0,"2\t ",,terminal_output +1619,1578911,"TERMINAL",0,0,"3\t ",,terminal_output +1620,1579948,"TERMINAL",0,0,"4\t ",,terminal_output +1621,1581043,"TERMINAL",0,0,"5\t ",,terminal_output +1622,1582038,"TERMINAL",0,0,"6\t ",,terminal_output +1623,1583079,"TERMINAL",0,0,"7\t ",,terminal_output +1624,1584117,"TERMINAL",0,0,"8\t ",,terminal_output +1625,1585227,"TERMINAL",0,0,"9\t ",,terminal_output +1626,1586196,"TERMINAL",0,0,"30\t ",,terminal_output +1627,1587235,"TERMINAL",0,0,"1\t ",,terminal_output +1628,1588312,"TERMINAL",0,0,"2\t ",,terminal_output +1629,1589322,"TERMINAL",0,0,"3\t ",,terminal_output +1630,1590360,"TERMINAL",0,0,"4\t ",,terminal_output +1631,1591400,"TERMINAL",0,0,"5\t ",,terminal_output +1632,1592461,"TERMINAL",0,0,"6\t ",,terminal_output +1633,1593535,"TERMINAL",0,0,"7\t ",,terminal_output +1634,1594563,"TERMINAL",0,0,"8\t ",,terminal_output +1635,1595561,"TERMINAL",0,0,"9\t ",,terminal_output +1636,1596615,"TERMINAL",0,0,"40\t ",,terminal_output +1637,1597637,"TERMINAL",0,0,"2\t ",,terminal_output +1638,1598759,"TERMINAL",0,0,"3\t ",,terminal_output +1639,1599720,"TERMINAL",0,0,"4\t ",,terminal_output +1640,1600776,"TERMINAL",0,0,"5\t ",,terminal_output +1641,1601801,"TERMINAL",0,0,"6\t ",,terminal_output +1642,1602837,"TERMINAL",0,0,"7\t ",,terminal_output +1643,1603877,"TERMINAL",0,0,"8\t ",,terminal_output +1644,1604918,"TERMINAL",0,0,"9\t ",,terminal_output +1645,1605955,"TERMINAL",0,0,"50\t ",,terminal_output +1646,1607051,"TERMINAL",0,0,"1\t ",,terminal_output +1647,1608030,"TERMINAL",0,0,"2\t ",,terminal_output +1648,1609100,"TERMINAL",0,0,"3\t ",,terminal_output +1649,1610111,"TERMINAL",0,0,"4\t ",,terminal_output +1650,1611253,"TERMINAL",0,0,"5\t ",,terminal_output +1651,1612190,"TERMINAL",0,0,"6\t ",,terminal_output +1652,1613307,"TERMINAL",0,0,"7\t ",,terminal_output +1653,1614309,"TERMINAL",0,0,"8\t ",,terminal_output +1654,1615308,"TERMINAL",0,0,"9\t ",,terminal_output +1655,1616373,"TERMINAL",0,0,"1:00\t ",,terminal_output +1656,1617387,"TERMINAL",0,0,"1\t ",,terminal_output +1657,1618428,"TERMINAL",0,0,"2\t ",,terminal_output +1658,1619547,"TERMINAL",0,0,"3\t ",,terminal_output +1659,1620569,"TERMINAL",0,0,"4\t ",,terminal_output +1660,1621593,"TERMINAL",0,0,"5\t ",,terminal_output +1661,1622618,"TERMINAL",0,0,"6\t ",,terminal_output +1662,1623645,"TERMINAL",0,0,"8\t ",,terminal_output +1663,1624668,"TERMINAL",0,0,"9\t ",,terminal_output +1664,1625714,"TERMINAL",0,0,"10\t ",,terminal_output +1665,1626756,"TERMINAL",0,0,"1\t ",,terminal_output +1666,1627798,"TERMINAL",0,0,"2\t ",,terminal_output +1667,1628834,"TERMINAL",0,0,"3\t ",,terminal_output +1668,1629879,"TERMINAL",0,0,"4\t ",,terminal_output +1669,1630926,"TERMINAL",0,0,"5\t ",,terminal_output +1670,1632038,"TERMINAL",0,0,"6\t ",,terminal_output +1671,1633061,"TERMINAL",0,0,"7\t ",,terminal_output +1672,1634040,"TERMINAL",0,0,"8\t ",,terminal_output +1673,1635075,"TERMINAL",0,0,"9\t ",,terminal_output +1674,1636116,"TERMINAL",0,0,"20\t ",,terminal_output +1675,1637155,"TERMINAL",0,0,"1\t ",,terminal_output +1676,1638190,"TERMINAL",0,0,"2\t ",,terminal_output +1677,1639226,"TERMINAL",0,0,"3\t ",,terminal_output +1678,1640263,"TERMINAL",0,0,"4\t ",,terminal_output +1679,1641303,"TERMINAL",0,0,"5\t ",,terminal_output +1680,1642380,"TERMINAL",0,0,"6\t ",,terminal_output +1681,1643385,"TERMINAL",0,0,"7\t ",,terminal_output +1682,1644531,"TERMINAL",0,0,"8\t ",,terminal_output +1683,1645563,"TERMINAL",0,0,"9\t ",,terminal_output +1684,1646577,"TERMINAL",0,0,"30\t ",,terminal_output +1685,1647543,"TERMINAL",0,0,"1\t ",,terminal_output +1686,1648584,"TERMINAL",0,0,"2\t ",,terminal_output +1687,1649618,"TERMINAL",0,0,"4\t ",,terminal_output +1688,1650675,"TERMINAL",0,0,"5\t ",,terminal_output +1689,1651820,"TERMINAL",0,0,"6\t ",,terminal_output +1690,1652737,"TERMINAL",0,0,"7\t ",,terminal_output +1691,1653773,"TERMINAL",0,0,"8\t ",,terminal_output +1692,1654816,"TERMINAL",0,0,"9\t ",,terminal_output +1693,1655855,"TERMINAL",0,0,"40\t ",,terminal_output +1694,1656888,"TERMINAL",0,0,"1\t ",,terminal_output +1695,1657931,"TERMINAL",0,0,"2\t ",,terminal_output +1696,1658975,"TERMINAL",0,0,"3\t ",,terminal_output +1697,1660011,"TERMINAL",0,0,"4\t ",,terminal_output +1698,1661056,"TERMINAL",0,0,"5\t ",,terminal_output +1699,1662091,"TERMINAL",0,0,"6\t ",,terminal_output +1700,1663168,"TERMINAL",0,0,"7\t ",,terminal_output +1701,1664177,"TERMINAL",0,0,"8\t ",,terminal_output +1702,1665317,"TERMINAL",0,0,"9\t ",,terminal_output +1703,1666261,"TERMINAL",0,0,"50\t ",,terminal_output +1704,1667379,"TERMINAL",0,0,"1\t ",,terminal_output +1705,1668392,"TERMINAL",0,0,"2\t ",,terminal_output +1706,1669397,"TERMINAL",0,0,"3\t ",,terminal_output +1707,1670436,"TERMINAL",0,0,"4\t ",,terminal_output +1708,1671668,"TERMINAL",0,0,"5\t ",,terminal_output +1709,1672522,"TERMINAL",0,0,"6\t ",,terminal_output +1710,1673615,"TERMINAL",0,0,"70",,terminal_output +1711,1674603,"TERMINAL",0,0,"8\t ",,terminal_output +1712,1675768,"TERMINAL",0,0,"2:00\t ",,terminal_output +1713,1676724,"TERMINAL",0,0,"1\t ",,terminal_output +1714,1677812,"TERMINAL",0,0,"2\t ",,terminal_output +1715,1678777,"TERMINAL",0,0,"3\t ",,terminal_output +1716,1679861,"TERMINAL",0,0,"4\t ",,terminal_output +1717,1680865,"TERMINAL",0,0,"5\t ",,terminal_output +1718,1681904,"TERMINAL",0,0,"61",,terminal_output +1719,1682953,"TERMINAL",0,0,"7\t ",,terminal_output +1720,1684057,"TERMINAL",0,0,"8\t ",,terminal_output +1721,1685081,"TERMINAL",0,0,"9\t ",,terminal_output +1722,1686106,"TERMINAL",0,0,"101",,terminal_output +1723,1687121,"TERMINAL",0,0,"1\t ",,terminal_output +1724,1688259,"TERMINAL",0,0,"2\t ",,terminal_output +1725,1689202,"TERMINAL",0,0,"3\t ",,terminal_output +1726,1690243,"TERMINAL",0,0,"4\t ",,terminal_output +1727,1691332,"TERMINAL",0,0,"5\t ",,terminal_output +1728,1692357,"TERMINAL",0,0,"6\t ",,terminal_output +1729,1693377,"TERMINAL",0,0,"70",,terminal_output +1730,1694413,"TERMINAL",0,0,"8\t ",,terminal_output +1731,1695464,"TERMINAL",0,0,"9\t ",,terminal_output +1732,1696560,"TERMINAL",0,0,"200",,terminal_output +1733,1697545,"TERMINAL",0,0,"1\t ",,terminal_output +1734,1698602,"TERMINAL",0,0,"2\t ",,terminal_output +1735,1699728,"TERMINAL",0,0,"4\t ",,terminal_output +1736,1700678,"TERMINAL",0,0,"5\t ",,terminal_output +1737,1701773,"TERMINAL",0,0,"6\t ",,terminal_output +1738,1702796,"TERMINAL",0,0,"7\t ",,terminal_output +1739,1703808,"TERMINAL",0,0,"8\t ",,terminal_output +1740,1704849,"TERMINAL",0,0,"9\t ",,terminal_output +1741,1705888,"TERMINAL",0,0,"30\t ",,terminal_output +1742,1706928,"TERMINAL",0,0,"1\t ",,terminal_output +1743,1708021,"TERMINAL",0,0,"2\t ",,terminal_output +1744,1709024,"TERMINAL",0,0,"3\t ",,terminal_output +1745,1710069,"TERMINAL",0,0,"4\t ",,terminal_output +1746,1711095,"TERMINAL",0,0,"5\t ",,terminal_output +1747,1712139,"TERMINAL",0,0,"6\t ",,terminal_output +1748,1713177,"TERMINAL",0,0,"7\t ",,terminal_output +1749,1714269,"TERMINAL",0,0,"8\t ",,terminal_output +1750,1715261,"TERMINAL",0,0,"9\t ",,terminal_output +1751,1716314,"TERMINAL",0,0,"40\t ",,terminal_output +1752,1717449,"TERMINAL",0,0,"1\t ",,terminal_output +1753,1718393,"TERMINAL",0,0,"2\t ",,terminal_output +1754,1719438,"TERMINAL",0,0,"3\t ",,terminal_output +1755,1720494,"TERMINAL",0,0,"4\t ",,terminal_output +1756,1721537,"TERMINAL",0,0,"5\t ",,terminal_output +1757,1722560,"TERMINAL",0,0,"6\t ",,terminal_output +1758,1723655,"TERMINAL",0,0,"7\t ",,terminal_output +1759,1724711,"TERMINAL",0,0,"9\t ",,terminal_output +1760,1725691,"TERMINAL",0,0,"50\t ",,terminal_output +1761,1726780,"TERMINAL",0,0,"1\t ",,terminal_output +1762,1727788,"TERMINAL",0,0,"2\t ",,terminal_output +1763,1728810,"TERMINAL",0,0,"3\t ",,terminal_output +1764,1729855,"TERMINAL",0,0,"4\t ",,terminal_output +1765,1730891,"TERMINAL",0,0,"5\t ",,terminal_output +1766,1731930,"TERMINAL",0,0,"6\t ",,terminal_output +1767,1732975,"TERMINAL",0,0,"7\t ",,terminal_output +1768,1734019,"TERMINAL",0,0,"8\t ",,terminal_output +1769,1735060,"TERMINAL",0,0,"9\t ",,terminal_output +1770,1736181,"TERMINAL",0,0,"3:001",,terminal_output +1771,1737141,"TERMINAL",0,0,"1\t ",,terminal_output +1772,1738287,"TERMINAL",0,0,"2\t ",,terminal_output +1773,1739310,"TERMINAL",0,0,"3\t ",,terminal_output +1774,1740270,"TERMINAL",0,0,"4\t ",,terminal_output +1775,1741301,"TERMINAL",0,0,"5\t ",,terminal_output +1776,1742388,"TERMINAL",0,0,"6\t ",,terminal_output +1777,1743382,"TERMINAL",0,0,"7\t ",,terminal_output +1778,1744470,"TERMINAL",0,0,"8\t ",,terminal_output +1779,1745465,"TERMINAL",0,0,"9\t ",,terminal_output +1780,1746503,"TERMINAL",0,0,"10\t ",,terminal_output +1781,1747545,"TERMINAL",0,0,"1\t ",,terminal_output +1782,1748588,"TERMINAL",0,0,"2\t ",,terminal_output +1783,1749632,"TERMINAL",0,0,"4\t ",,terminal_output +1784,1750672,"TERMINAL",0,0,"5\t ",,terminal_output +1785,1751710,"TERMINAL",0,0,"6\t ",,terminal_output +1786,1752755,"TERMINAL",0,0,"7\t ",,terminal_output +1787,1753796,"TERMINAL",0,0,"8\t ",,terminal_output +1788,1754931,"TERMINAL",0,0,"9\t ",,terminal_output +1789,1755892,"TERMINAL",0,0,"20\t ",,terminal_output +1790,1756967,"TERMINAL",0,0,"1\t ",,terminal_output +1791,1757991,"TERMINAL",0,0,"2\t ",,terminal_output +1792,1759117,"TERMINAL",0,0,"3\t ",,terminal_output +1793,1760141,"TERMINAL",0,0,"4\t ",,terminal_output +1794,1761097,"TERMINAL",0,0,"5\t ",,terminal_output +1795,1762141,"TERMINAL",0,0,"6\t ",,terminal_output +1796,1763216,"TERMINAL",0,0,"7\t ",,terminal_output +1797,1764229,"TERMINAL",0,0,"8\t ",,terminal_output +1798,1765275,"TERMINAL",0,0,"9\t ",,terminal_output +1799,1766389,"TERMINAL",0,0,"30\t ",,terminal_output +1800,1767362,"TERMINAL",0,0,"1\t ",,terminal_output +1801,1768434,"TERMINAL",0,0,"2\t ",,terminal_output +1802,1769461,"TERMINAL",0,0,"3\t ",,terminal_output +1803,1770493,"TERMINAL",0,0,"4\t ",,terminal_output +1804,1771542,"TERMINAL",0,0,"5\t ",,terminal_output +1805,1772624,"TERMINAL",0,0,"6\t ",,terminal_output +1806,1773630,"TERMINAL",0,0,"8\t ",,terminal_output +1807,1774676,"TERMINAL",0,0,"9\t ",,terminal_output +1808,1775808,"TERMINAL",0,0,"40\t ",,terminal_output +1809,1776759,"TERMINAL",0,0,"1\t ",,terminal_output +1810,1777865,"TERMINAL",0,0,"2\t ",,terminal_output +1811,1778849,"TERMINAL",0,0,"3\t ",,terminal_output +1812,1779893,"TERMINAL",0,0,"4\t ",,terminal_output +1813,1780930,"TERMINAL",0,0,"5\t ",,terminal_output +1814,1781979,"TERMINAL",0,0,"6\t ",,terminal_output +1815,1783014,"TERMINAL",0,0,"7\t ",,terminal_output +1816,1784068,"TERMINAL",0,0,"8\t ",,terminal_output +1817,1785090,"TERMINAL",0,0,"9\t ",,terminal_output +1818,1786151,"TERMINAL",0,0,"50\t ",,terminal_output +1819,1787277,"TERMINAL",0,0,"1\t ",,terminal_output +1820,1788219,"TERMINAL",0,0,"2\t ",,terminal_output +1821,1789297,"TERMINAL",0,0,"3\t ",,terminal_output +1822,1790303,"TERMINAL",0,0,"4\t ",,terminal_output +1823,1791348,"TERMINAL",0,0,"5\t ",,terminal_output +1824,1792400,"TERMINAL",0,0,"6\t ",,terminal_output +1825,1793438,"TERMINAL",0,0,"7\t ",,terminal_output +1826,1794479,"TERMINAL",0,0,"8\t ",,terminal_output +1827,1795566,"TERMINAL",0,0,"9\t ",,terminal_output +1828,1796570,"TERMINAL",0,0,"4:00\t ",,terminal_output +1829,1797633,"TERMINAL",0,0,"1\t ",,terminal_output +1830,1798651,"TERMINAL",0,0,"3\t ",,terminal_output +1831,1799771,"TERMINAL",0,0,"4\t ",,terminal_output +1832,1800794,"TERMINAL",0,0,"5\t ",,terminal_output +1833,1801781,"TERMINAL",0,0,"6\t ",,terminal_output +1834,1802821,"TERMINAL",0,0,"7\t ",,terminal_output +1835,1803871,"TERMINAL",0,0,"8\t ",,terminal_output +1836,1804895,"TERMINAL",0,0,"9\t ",,terminal_output +1837,1805942,"TERMINAL",0,0,"10\t ",,terminal_output +1838,1807052,"TERMINAL",0,0,"1\t ",,terminal_output +1839,1808069,"TERMINAL",0,0,"2\t ",,terminal_output +1840,1809098,"TERMINAL",0,0,"3\t ",,terminal_output +1841,1810114,"TERMINAL",0,0,"4\t ",,terminal_output +1842,1811239,"TERMINAL",0,0,"5\t ",,terminal_output +1843,1812194,"TERMINAL",0,0,"6\t ",,terminal_output +1844,1813302,"TERMINAL",0,0,"7\t ",,terminal_output +1845,1813493,"TERMINAL",0,0,"a",,terminal_output +1846,1814318,"TERMINAL",0,0,"8\t ",,terminal_output +1847,1815328,"TERMINAL",0,0,"9\t ",,terminal_output +1848,1816358,"TERMINAL",0,0,"20\t ",,terminal_output +1849,1817462,"TERMINAL",0,0,"1\t ",,terminal_output +1850,1818440,"TERMINAL",0,0,"2\t ",,terminal_output +1851,1819485,"TERMINAL",0,0,"3\t ",,terminal_output +1852,1820557,"TERMINAL",0,0,"4\t ",,terminal_output +1853,1821560,"TERMINAL",0,0,"5\t ",,terminal_output +1854,1822629,"TERMINAL",0,0,"60",,terminal_output +1855,1823642,"TERMINAL",0,0,"8\t ",,terminal_output +1856,1824681,"TERMINAL",0,0,"9\t ",,terminal_output +1857,1825726,"TERMINAL",0,0,"30\t ",,terminal_output +1858,1826826,"TERMINAL",0,0,"1\t ",,terminal_output +1859,1827811,"TERMINAL",0,0,"2\t ",,terminal_output +1860,1828861,"TERMINAL",0,0,"3\t ",,terminal_output +1861,1829916,"TERMINAL",0,0,"4\t ",,terminal_output +1862,1830946,"TERMINAL",0,0,"5\t ",,terminal_output +1863,1832028,"TERMINAL",0,0,"6\t ",,terminal_output +1864,1833372,"TERMINAL",0,0,"7 9",,terminal_output +1865,1834411,"TERMINAL",0,0,"8\t ",,terminal_output +1866,1835514,"TERMINAL",0,0,"9\t ",,terminal_output +1867,1836536,"TERMINAL",0,0,"40\t ",,terminal_output +1868,1837533,"TERMINAL",0,0,"1\t ",,terminal_output +1869,1838590,"TERMINAL",0,0,"2\t ",,terminal_output +1870,1839636,"TERMINAL",0,0,"4\t ",,terminal_output +1871,1840666,"TERMINAL",0,0,"5\t ",,terminal_output +1872,1841728,"TERMINAL",0,0,"6\t ",,terminal_output +1873,1842740,"TERMINAL",0,0,"7\t ",,terminal_output +1874,1843819,"TERMINAL",0,0,"8\t ",,terminal_output +1875,1844830,"TERMINAL",0,0,"91",,terminal_output +1876,1845867,"TERMINAL",0,0,"50\t ",,terminal_output +1877,1846906,"TERMINAL",0,0,"1\t ",,terminal_output +1878,1847948,"TERMINAL",0,0,"2\t ",,terminal_output +1879,1848982,"TERMINAL",0,0,"3\t ",,terminal_output +1880,1850052,"TERMINAL",0,0,"4\t ",,terminal_output +1881,1851078,"TERMINAL",0,0,"5\t ",,terminal_output +1882,1852182,"TERMINAL",0,0,"6\t ",,terminal_output +1883,1853139,"TERMINAL",0,0,"7\t ",,terminal_output +1884,1854261,"TERMINAL",0,0,"8\t ",,terminal_output +1885,1855277,"TERMINAL",0,0,"9\t ",,terminal_output +1886,1856310,"TERMINAL",0,0,"5:00\t ",,terminal_output +1887,1857320,"TERMINAL",0,0,"1\t ",,terminal_output +1888,1858340,"TERMINAL",0,0,"2\t ",,terminal_output +1889,1859426,"TERMINAL",0,0,"3\t ",,terminal_output +1890,1860498,"TERMINAL",0,0,"4\t ",,terminal_output +1891,1861458,"TERMINAL",0,0,"5\t ",,terminal_output +1892,1862504,"TERMINAL",0,0,"6\t ",,terminal_output +1893,1863566,"TERMINAL",0,0,"7\t ",,terminal_output +1894,1864589,"TERMINAL",0,0,"8\t ",,terminal_output +1895,1865628,"TERMINAL",0,0,"10\t ",,terminal_output +1896,1866688,"TERMINAL",0,0,"1\t ",,terminal_output +1897,1867715,"TERMINAL",0,0,"2\t ",,terminal_output +1898,1868758,"TERMINAL",0,0,"3\t ",,terminal_output +1899,1869812,"TERMINAL",0,0,"4\t ",,terminal_output +1900,1870841,"TERMINAL",0,0,"5\t ",,terminal_output +1901,1871886,"TERMINAL",0,0,"6\t ",,terminal_output +1902,1872926,"TERMINAL",0,0,"7\t ",,terminal_output +1903,1873968,"TERMINAL",0,0,"8\t ",,terminal_output +1904,1875038,"TERMINAL",0,0,"9\t ",,terminal_output +1905,1876162,"TERMINAL",0,0,"20\t ",,terminal_output +1906,1877186,"TERMINAL",0,0,"1\t ",,terminal_output +1907,1878134,"TERMINAL",0,0,"2\t ",,terminal_output +1908,1879237,"TERMINAL",0,0,"3\t ",,terminal_output +1909,1880260,"TERMINAL",0,0,"4\t ",,terminal_output +1910,1881252,"TERMINAL",0,0,"5\t ",,terminal_output +1911,1882306,"TERMINAL",0,0,"6\t ",,terminal_output +1912,1883333,"TERMINAL",0,0,"7\t ",,terminal_output +1913,1884376,"TERMINAL",0,0,"8\t ",,terminal_output +1914,1885480,"TERMINAL",0,0,"9\t ",,terminal_output +1915,1886508,"TERMINAL",0,0,"30\t ",,terminal_output +1916,1887500,"TERMINAL",0,0,"1\t ",,terminal_output +1917,1888555,"TERMINAL",0,0,"2\t ",,terminal_output +1918,1889678,"TERMINAL",0,0,"3\t ",,terminal_output +1919,1890707,"TERMINAL",0,0,"5\t ",,terminal_output +1920,1891665,"TERMINAL",0,0,"6\t ",,terminal_output +1921,1892701,"TERMINAL",0,0,"7\t ",,terminal_output +1922,1893744,"TERMINAL",0,0,"8\t ",,terminal_output +1923,1894787,"TERMINAL",0,0,"9\t ",,terminal_output +1924,1895946,"TERMINAL",0,0,"40\t ",,terminal_output +1925,1896867,"TERMINAL",0,0,"10",,terminal_output +1926,1897911,"TERMINAL",0,0,"2\t ",,terminal_output +1927,1898954,"TERMINAL",0,0,"3\t ",,terminal_output +1928,1899996,"TERMINAL",0,0,"4\t ",,terminal_output +1929,1901036,"TERMINAL",0,0,"5\t ",,terminal_output +1930,1902077,"TERMINAL",0,0,"6\t ",,terminal_output +1931,1903121,"TERMINAL",0,0,"7\t ",,terminal_output +1932,1904158,"TERMINAL",0,0,"8\t ",,terminal_output +1933,1905244,"TERMINAL",0,0,"9\t ",,terminal_output +1934,1906230,"TERMINAL",0,0,"50\t ",,terminal_output +1935,1907295,"TERMINAL",0,0,"1\t ",,terminal_output +1936,1908321,"TERMINAL",0,0,"2\t ",,terminal_output +1937,1909351,"TERMINAL",0,0,"3\t ",,terminal_output +1938,1910469,"TERMINAL",0,0,"4\t ",,terminal_output +1939,1911417,"TERMINAL",0,0,"5\t ",,terminal_output +1940,1912477,"TERMINAL",0,0,"6\t ",,terminal_output +1941,1913504,"TERMINAL",0,0,"758",,terminal_output +1942,1914549,"TERMINAL",0,0,"8\t ",,terminal_output +1943,1915694,"TERMINAL",0,0,"9\t ",,terminal_output +1944,1916717,"TERMINAL",0,0,"6:01\t ",,terminal_output +1945,1917664,"TERMINAL",0,0,"2\t ",,terminal_output +1946,1918699,"TERMINAL",0,0,"3\t ",,terminal_output +1947,1919735,"TERMINAL",0,0,"4\t ",,terminal_output +1948,1920772,"TERMINAL",0,0,"5\t ",,terminal_output +1949,1921849,"TERMINAL",0,0,"6\t ",,terminal_output +1950,1922860,"TERMINAL",0,0,"7\t ",,terminal_output +1951,1923890,"TERMINAL",0,0,"8\t ",,terminal_output +1952,1924934,"TERMINAL",0,0,"9\t ",,terminal_output +1953,1925977,"TERMINAL",0,0,"10\t ",,terminal_output +1954,1927014,"TERMINAL",0,0,"1\t ",,terminal_output +1955,1928112,"TERMINAL",0,0,"2\t ",,terminal_output +1956,1929081,"TERMINAL",0,0,"3\t ",,terminal_output +1957,1930124,"TERMINAL",0,0,"4\t ",,terminal_output +1958,1931246,"TERMINAL",0,0,"5\t ",,terminal_output +1959,1932284,"TERMINAL",0,0,"6\t ",,terminal_output +1960,1933239,"TERMINAL",0,0,"7\t ",,terminal_output +1961,1934315,"TERMINAL",0,0,"8\t ",,terminal_output +1962,1935312,"TERMINAL",0,0,"9\t ",,terminal_output +1963,1936373,"TERMINAL",0,0,"20\t ",,terminal_output +1964,1937460,"TERMINAL",0,0,"1\t ",,terminal_output +1965,1938428,"TERMINAL",0,0,"2\t ",,terminal_output +1966,1939556,"TERMINAL",0,0,"3\t ",,terminal_output +1967,1940574,"TERMINAL",0,0,"4\t ",,terminal_output +1968,1941595,"TERMINAL",0,0,"5\t ",,terminal_output +1969,1942586,"TERMINAL",0,0,"6\t ",,terminal_output +1970,1943651,"TERMINAL",0,0,"8\t ",,terminal_output +1971,1944665,"TERMINAL",0,0,"9\t ",,terminal_output +1972,1945798,"TERMINAL",0,0,"30\t ",,terminal_output +1973,1946820,"TERMINAL",0,0,"1\t ",,terminal_output +1974,1947847,"TERMINAL",0,0,"2\t ",,terminal_output +1975,1948868,"TERMINAL",0,0,"3\t ",,terminal_output +1976,1949893,"TERMINAL",0,0,"4\t ",,terminal_output +1977,1950917,"TERMINAL",0,0,"51",,terminal_output +1978,1951941,"TERMINAL",0,0,"6\t ",,terminal_output +1979,1952969,"TERMINAL",0,0,"7\t ",,terminal_output +1980,1954007,"TERMINAL",0,0,"8\t ",,terminal_output +1981,1955038,"TERMINAL",0,0,"9\t ",,terminal_output +1982,1956141,"TERMINAL",0,0,"40\t ",,terminal_output +1983,1957118,"TERMINAL",0,0,"1\t ",,terminal_output +1984,1958157,"TERMINAL",0,0,"2\t ",,terminal_output +1985,1959203,"TERMINAL",0,0,"3\t ",,terminal_output +1986,1960342,"TERMINAL",0,0,"4\t ",,terminal_output +1987,1961270,"TERMINAL",0,0,"50",,terminal_output +1988,1962305,"TERMINAL",0,0,"6\t ",,terminal_output +1989,1963412,"TERMINAL",0,0,"7\t ",,terminal_output +1990,1964377,"TERMINAL",0,0,"8\t ",,terminal_output +1991,1965421,"TERMINAL",0,0,"9\t ",,terminal_output +1992,1966484,"TERMINAL",0,0,"50\t ",,terminal_output +1993,1967503,"TERMINAL",0,0,"1\t ",,terminal_output +1994,1968631,"TERMINAL",0,0,"2\t ",,terminal_output +1995,1969656,"TERMINAL",0,0,"3\t ",,terminal_output +1996,1970683,"TERMINAL",0,0,"5\t ",,terminal_output +1997,1971704,"TERMINAL",0,0,"6\t ",,terminal_output +1998,1972724,"TERMINAL",0,0,"7\t ",,terminal_output +1999,1973865,"TERMINAL",0,0,"8\t ",,terminal_output +2000,1974808,"TERMINAL",0,0,"9\t ",,terminal_output +2001,1975876,"TERMINAL",0,0,"7:00\t ",,terminal_output +2002,1976918,"TERMINAL",0,0,"1\t ",,terminal_output +2003,1977934,"TERMINAL",0,0,"2\t ",,terminal_output +2004,1978988,"TERMINAL",0,0,"3\t ",,terminal_output +2005,1980015,"TERMINAL",0,0,"4\t ",,terminal_output +2006,1981061,"TERMINAL",0,0,"5\t ",,terminal_output +2007,1982093,"TERMINAL",0,0,"6\t ",,terminal_output +2008,1983191,"TERMINAL",0,0,"7\t ",,terminal_output +2009,1984184,"TERMINAL",0,0,"8\t ",,terminal_output +2010,1985224,"TERMINAL",0,0,"9\t ",,terminal_output +2011,1986264,"TERMINAL",0,0,"10\t ",,terminal_output +2012,1987305,"TERMINAL",0,0,"1\t ",,terminal_output +2013,1988393,"TERMINAL",0,0,"2\t ",,terminal_output +2014,1989388,"TERMINAL",0,0,"3\t ",,terminal_output +2015,1990441,"TERMINAL",0,0,"4\t ",,terminal_output +2016,1991471,"TERMINAL",0,0,"5\t ",,terminal_output +2017,1992511,"TERMINAL",0,0,"6\t ",,terminal_output +2018,1993623,"TERMINAL",0,0,"7\t ",,terminal_output +2019,1994594,"TERMINAL",0,0,"8\t ",,terminal_output +2020,1995668,"TERMINAL",0,0,"20\t ",,terminal_output +2021,1996691,"TERMINAL",0,0,"1\t ",,terminal_output +2022,1997714,"TERMINAL",0,0,"2\t ",,terminal_output +2023,1998842,"TERMINAL",0,0,"3\t ",,terminal_output +2024,1999888,"TERMINAL",0,0,"4\t ",,terminal_output +2025,2000834,"TERMINAL",0,0,"5\t ",,terminal_output +2026,2001929,"TERMINAL",0,0,"680",,terminal_output +2027,2002934,"TERMINAL",0,0,"7\t ",,terminal_output +2028,2003955,"TERMINAL",0,0,"8\t ",,terminal_output +2029,2004996,"TERMINAL",0,0,"9\t ",,terminal_output +2030,2006122,"TERMINAL",0,0,"30\t ",,terminal_output +2031,2007087,"TERMINAL",0,0,"1\t ",,terminal_output +2032,2008158,"TERMINAL",0,0,"2\t ",,terminal_output +2033,2009169,"TERMINAL",0,0,"3\t ",,terminal_output +2034,2010281,"TERMINAL",0,0,"4\t ",,terminal_output +2035,2011310,"TERMINAL",0,0,"5\t ",,terminal_output +2036,2012357,"TERMINAL",0,0,"6\t ",,terminal_output +2037,2013384,"TERMINAL",0,0,"7\t ",,terminal_output +2038,2014375,"TERMINAL",0,0,"8\t ",,terminal_output +2039,2015417,"TERMINAL",0,0,"9\t ",,terminal_output +2040,2016460,"TERMINAL",0,0,"40\t ",,terminal_output +2041,2017498,"TERMINAL",0,0,"1\t ",,terminal_output +2042,2018540,"TERMINAL",0,0,"2\t ",,terminal_output +2043,2019578,"TERMINAL",0,0,"3\t ",,terminal_output +2044,2020651,"TERMINAL",0,0,"5\t ",,terminal_output +2045,2021906,"TERMINAL",0,0,"6\t ",,terminal_output +2046,2022917,"TERMINAL",0,0,"7\t ",,terminal_output +2047,2023954,"TERMINAL",0,0,"8\t ",,terminal_output +2048,2024999,"TERMINAL",0,0,"91",,terminal_output +2049,2026034,"TERMINAL",0,0,"50\t ",,terminal_output +2050,2027080,"TERMINAL",0,0,"1\t ",,terminal_output +2051,2028227,"TERMINAL",0,0,"2\t ",,terminal_output +2052,2029254,"TERMINAL",0,0,"3\t ",,terminal_output +2053,2030274,"TERMINAL",0,0,"4\t ",,terminal_output +2054,2031298,"TERMINAL",0,0,"5\t ",,terminal_output +2055,2032271,"TERMINAL",0,0,"6\t ",,terminal_output +2056,2033861,"TERMINAL",0,0,"7\t ",,terminal_output +2057,2034904,"TERMINAL",0,0,"9\t ",,terminal_output +2058,2035939,"TERMINAL",0,0,"8:00\t ",,terminal_output +2059,2036980,"TERMINAL",0,0,"1\t ",,terminal_output +2060,2038016,"TERMINAL",0,0,"2\t ",,terminal_output +2061,2039060,"TERMINAL",0,0,"3\t ",,terminal_output +2062,2040113,"TERMINAL",0,0,"4\t ",,terminal_output +2063,2041236,"TERMINAL",0,0,"5\t ",,terminal_output +2064,2042183,"TERMINAL",0,0,"6\t ",,terminal_output +2065,2043226,"TERMINAL",0,0,"7\t ",,terminal_output +2066,2044307,"TERMINAL",0,0,"8\t ",,terminal_output +2067,2045291,"TERMINAL",0,0,"9\t ",,terminal_output +2068,2046330,"TERMINAL",0,0,"10\t ",,terminal_output +2069,2047372,"TERMINAL",0,0,"1\t ",,terminal_output +2070,2048411,"TERMINAL",0,0,"2\t ",,terminal_output +2071,2049460,"TERMINAL",0,0,"3\t ",,terminal_output +2072,2050509,"TERMINAL",0,0,"4\t ",,terminal_output +2073,2051578,"TERMINAL",0,0,"5\t ",,terminal_output +2074,2052581,"TERMINAL",0,0,"6\t ",,terminal_output +2075,2053618,"TERMINAL",0,0,"8\t ",,terminal_output +2076,2054756,"TERMINAL",0,0,"9\t ",,terminal_output +2077,2055697,"TERMINAL",0,0,"20\t ",,terminal_output +2078,2056744,"TERMINAL",0,0,"1\t ",,terminal_output +2079,2057822,"TERMINAL",0,0,"20",,terminal_output +2080,2058824,"TERMINAL",0,0,"3\t ",,terminal_output +2081,2059872,"TERMINAL",0,0,"4\t ",,terminal_output +2082,2060996,"TERMINAL",0,0,"5\t ",,terminal_output +2083,2061944,"TERMINAL",0,0,"6\t ",,terminal_output +2084,2062983,"TERMINAL",0,0,"7\t ",,terminal_output +2085,2064036,"TERMINAL",0,0,"8\t ",,terminal_output +2086,2065073,"TERMINAL",0,0,"9\t ",,terminal_output +2087,2066130,"TERMINAL",0,0,"30\t ",,terminal_output +2088,2067150,"TERMINAL",0,0,"1\t ",,terminal_output +2089,2068187,"TERMINAL",0,0,"2\t ",,terminal_output +2090,2069294,"TERMINAL",0,0,"3\t ",,terminal_output +2091,2070316,"TERMINAL",0,0,"4\t ",,terminal_output +2092,2071344,"TERMINAL",0,0,"5\t ",,terminal_output +2093,2072361,"TERMINAL",0,0,"6\t ",,terminal_output +2094,2073385,"TERMINAL",0,0,"7\t ",,terminal_output +2095,2074415,"TERMINAL",0,0,"8\t ",,terminal_output +2096,2075536,"TERMINAL",0,0,"9\t ",,terminal_output +2097,2076561,"TERMINAL",0,0,"40\t ",,terminal_output +2098,2077526,"TERMINAL",0,0,"1\t ",,terminal_output +2099,2078607,"TERMINAL",0,0,"2\t ",,terminal_output +2100,2079605,"TERMINAL",0,0,"3\t ",,terminal_output +2101,2080640,"TERMINAL",0,0,"5\t ",,terminal_output +2102,2081683,"TERMINAL",0,0,"6\t ",,terminal_output +2103,2082714,"TERMINAL",0,0,"7\t ",,terminal_output +2104,2083833,"TERMINAL",0,0,"8\t ",,terminal_output +2105,2084857,"TERMINAL",0,0,"9\t ",,terminal_output +2106,2085854,"TERMINAL",0,0,"50\t ",,terminal_output +2107,2086928,"TERMINAL",0,0,"1\t ",,terminal_output +2108,2087922,"TERMINAL",0,0,"2\t ",,terminal_output +2109,2088966,"TERMINAL",0,0,"3\t ",,terminal_output +2110,2090003,"TERMINAL",0,0,"4\t ",,terminal_output +2111,2091045,"TERMINAL",0,0,"5\t ",,terminal_output +2112,2092087,"TERMINAL",0,0,"6\t ",,terminal_output +2113,2093152,"TERMINAL",0,0,"7\t ",,terminal_output +2114,2094169,"TERMINAL",0,0,"8\t ",,terminal_output +2115,2095249,"TERMINAL",0,0,"9\t ",,terminal_output +2116,2096276,"TERMINAL",0,0,"9:00\t ",,terminal_output +2117,2097291,"TERMINAL",0,0,"1\t ",,terminal_output +2118,2098374,"TERMINAL",0,0,"2\t ",,terminal_output +2119,2099377,"TERMINAL",0,0,"3\t ",,terminal_output +2120,2100424,"TERMINAL",0,0,"4\t ",,terminal_output +2121,2101465,"TERMINAL",0,0,"5\t ",,terminal_output +2122,2102508,"TERMINAL",0,0,"6\t ",,terminal_output +2123,2103596,"TERMINAL",0,0,"7\t ",,terminal_output +2124,2104587,"TERMINAL",0,0,"8\t ",,terminal_output +2125,2105644,"TERMINAL",0,0,"10\t ",,terminal_output +2126,2106671,"TERMINAL",0,0,"1\t ",,terminal_output +2127,2107714,"TERMINAL",0,0,"2\t ",,terminal_output +2128,2108754,"TERMINAL",0,0,"3\t ",,terminal_output +2129,2109845,"TERMINAL",0,0,"4\t ",,terminal_output +2130,2110834,"TERMINAL",0,0,"5\t ",,terminal_output +2131,2111891,"TERMINAL",0,0,"6\t ",,terminal_output +2132,2112929,"TERMINAL",0,0,"7\t ",,terminal_output +2133,2113956,"TERMINAL",0,0,"8\t ",,terminal_output +2134,2114990,"TERMINAL",0,0,"9\t ",,terminal_output +2135,2116031,"TERMINAL",0,0,"20\t ",,terminal_output +2136,2117070,"TERMINAL",0,0,"1\t ",,terminal_output +2137,2118195,"TERMINAL",0,0,"2\t ",,terminal_output +2138,2119151,"TERMINAL",0,0,"3\t ",,terminal_output +2139,2120186,"TERMINAL",0,0,"4\t ",,terminal_output +2140,2121310,"TERMINAL",0,0,"5\t ",,terminal_output +2141,2122333,"TERMINAL",0,0,"6\t ",,terminal_output +2142,2123330,"TERMINAL",0,0,"7\t ",,terminal_output +2143,2124345,"TERMINAL",0,0,"8\t ",,terminal_output +2144,2125407,"TERMINAL",0,0,"9\t ",,terminal_output +2145,2126431,"TERMINAL",0,0,"30\t ",,terminal_output +2146,2127466,"TERMINAL",0,0,"1\t ",,terminal_output +2147,2128506,"TERMINAL",0,0,"2\t ",,terminal_output +2148,2129555,"TERMINAL",0,0,"3\t ",,terminal_output +2149,2130597,"TERMINAL",0,0,"4\t ",,terminal_output +2150,2131639,"TERMINAL",0,0,"6\t ",,terminal_output +2151,2132719,"TERMINAL",0,0,"7\t ",,terminal_output +2152,2133805,"TERMINAL",0,0,"8\t ",,terminal_output +2153,2135125,"TERMINAL",0,0,"9\t ",,terminal_output +2154,2136173,"TERMINAL",0,0,"40\t ",,terminal_output +2155,2137288,"TERMINAL",0,0,"1\t ",,terminal_output +2156,2138309,"TERMINAL",0,0,"2\t ",,terminal_output +2157,2139326,"TERMINAL",0,0,"3\t ",,terminal_output +2158,2140327,"TERMINAL",0,0,"4\t ",,terminal_output +2159,2141357,"TERMINAL",0,0,"5\t ",,terminal_output +2160,2142395,"TERMINAL",0,0,"6\t ",,terminal_output +2161,2143435,"TERMINAL",0,0,"7\t ",,terminal_output +2162,2144478,"TERMINAL",0,0,"8\t ",,terminal_output +2163,2145519,"TERMINAL",0,0,"9\t ",,terminal_output +2164,2146604,"TERMINAL",0,0,"50\t ",,terminal_output +2165,2147637,"TERMINAL",0,0,"1\t ",,terminal_output +2166,2148643,"TERMINAL",0,0,"3\t ",,terminal_output +2167,2149684,"TERMINAL",0,0,"4\t ",,terminal_output +2168,2150737,"TERMINAL",0,0,"5\t ",,terminal_output +2169,2151773,"TERMINAL",0,0,"6\t ",,terminal_output +2170,2152849,"TERMINAL",0,0,"7\t ",,terminal_output +2171,2153968,"TERMINAL",0,0,"864",,terminal_output +2172,2155008,"TERMINAL",0,0,"9\t ",,terminal_output +2173,2156048,"TERMINAL",0,0,"5:00:00\t ",,terminal_output +2174,2157156,"TERMINAL",0,0,"1\t ",,terminal_output +2175,2158177,"TERMINAL",0,0,"2\t ",,terminal_output +2176,2159200,"TERMINAL",0,0,"3\t ",,terminal_output +2177,2160228,"TERMINAL",0,0,"4\t ",,terminal_output +2178,2161252,"TERMINAL",0,0,"5\t ",,terminal_output +2179,2162376,"TERMINAL",0,0,"6\t ",,terminal_output +2180,2163389,"TERMINAL",0,0,"7\t ",,terminal_output +2181,2164386,"TERMINAL",0,0,"8\t ",,terminal_output +2182,2165452,"TERMINAL",0,0,"9\t ",,terminal_output +2183,2166450,"TERMINAL",0,0,"10\t ",,terminal_output +2184,2167488,"TERMINAL",0,0,"1\t ",,terminal_output +2185,2168621,"TERMINAL",0,0,"2\t ",,terminal_output +2186,2169567,"TERMINAL",0,0,"3\t ",,terminal_output +2187,2170605,"TERMINAL",0,0,"4\t ",,terminal_output +2188,2171651,"TERMINAL",0,0,"6\t ",,terminal_output +2189,2172692,"TERMINAL",0,0,"7\t ",,terminal_output +2190,2173733,"TERMINAL",0,0,"8\t ",,terminal_output +2191,2174774,"TERMINAL",0,0,"9\t ",,terminal_output +2192,2175894,"TERMINAL",0,0,"20\t ",,terminal_output +2193,2176848,"TERMINAL",0,0,"1\t ",,terminal_output +2194,2177953,"TERMINAL",0,0,"2\t ",,terminal_output +2195,2178934,"TERMINAL",0,0,"3\t ",,terminal_output +2196,2179989,"TERMINAL",0,0,"4\t ",,terminal_output +2197,2181011,"TERMINAL",0,0,"5\t ",,terminal_output +2198,2182054,"TERMINAL",0,0,"6\t ",,terminal_output +2199,2183090,"TERMINAL",0,0,"7\t ",,terminal_output +2200,2184188,"TERMINAL",0,0,"8\t ",,terminal_output +2201,2185211,"TERMINAL",0,0,"9\t ",,terminal_output +2202,2186235,"TERMINAL",0,0,"30\t ",,terminal_output +2203,2187252,"TERMINAL",0,0,"1\t ",,terminal_output +2204,2188383,"TERMINAL",0,0,"2\t ",,terminal_output +2205,2189330,"TERMINAL",0,0,"3\t ",,terminal_output +2206,2190429,"TERMINAL",0,0,"4\t ",,terminal_output +2207,2191415,"TERMINAL",0,0,"5\t ",,terminal_output +2208,2192451,"TERMINAL",0,0,"6\t ",,terminal_output +2209,2193491,"TERMINAL",0,0,"7\t ",,terminal_output +2210,2194631,"TERMINAL",0,0,"8\t ",,terminal_output +2211,2195655,"TERMINAL",0,0,"9\t ",,terminal_output +2212,2196604,"TERMINAL",0,0,"40\t ",,terminal_output +2213,2197639,"TERMINAL",0,0,"2\t ",,terminal_output +2214,2198683,"TERMINAL",0,0,"3\t ",,terminal_output +2215,2199721,"TERMINAL",0,0,"4\t ",,terminal_output +2216,2200765,"TERMINAL",0,0,"51",,terminal_output +2217,2201802,"TERMINAL",0,0,"6\t ",,terminal_output +2218,2202836,"TERMINAL",0,0,"7\t ",,terminal_output +2219,2203966,"TERMINAL",0,0,"8\t ",,terminal_output +2220,2204909,"TERMINAL",0,0,"9\t ",,terminal_output +2221,2206001,"TERMINAL",0,0,"50\t ",,terminal_output +2222,2206986,"TERMINAL",0,0,"1\t ",,terminal_output +2223,2208028,"TERMINAL",0,0,"2\t ",,terminal_output +2224,2209073,"TERMINAL",0,0,"3\t ",,terminal_output +2225,2210115,"TERMINAL",0,0,"4\t ",,terminal_output +2226,2211153,"TERMINAL",0,0,"50",,terminal_output +2227,2212194,"TERMINAL",0,0,"6\t ",,terminal_output +2228,2213230,"TERMINAL",0,0,"7\t ",,terminal_output +2229,2214268,"TERMINAL",0,0,"8\t ",,terminal_output +2230,2215417,"TERMINAL",0,0,"9\t ",,terminal_output +2231,2216358,"TERMINAL",0,0,"1:00\t ",,terminal_output +2232,2217462,"TERMINAL",0,0,"1\t ",,terminal_output +2233,2218429,"TERMINAL",0,0,"2\t ",,terminal_output +2234,2219514,"TERMINAL",0,0,"3\t ",,terminal_output +2235,2220537,"TERMINAL",0,0,"4\t ",,terminal_output +2236,2221560,"TERMINAL",0,0,"5\t ",,terminal_output +2237,2222688,"TERMINAL",0,0,"6\t ",,terminal_output +2238,2223632,"TERMINAL",0,0,"8\t ",,terminal_output +2239,2224670,"TERMINAL",0,0,"9\t ",,terminal_output +2240,2225710,"TERMINAL",0,0,"10\t ",,terminal_output +2241,2226751,"TERMINAL",0,0,"1\t ",,terminal_output +2242,2227810,"TERMINAL",0,0,"2\t ",,terminal_output +2243,2228826,"TERMINAL",0,0,"3\t ",,terminal_output +2244,2229875,"TERMINAL",0,0,"4\t ",,terminal_output +2245,2230983,"TERMINAL",0,0,"5\t ",,terminal_output +2246,2232009,"TERMINAL",0,0,"6\t ",,terminal_output +2247,2232990,"TERMINAL",0,0,"7\t ",,terminal_output +2248,2234031,"TERMINAL",0,0,"8\t ",,terminal_output +2249,2235080,"TERMINAL",0,0,"9\t ",,terminal_output +2250,2236108,"TERMINAL",0,0,"20\t ",,terminal_output +2251,2237147,"TERMINAL",0,0,"1\t ",,terminal_output +2252,2238191,"TERMINAL",0,0,"2\t ",,terminal_output +2253,2239231,"TERMINAL",0,0,"3\t ",,terminal_output +2254,2240301,"TERMINAL",0,0,"4\t ",,terminal_output +2255,2241352,"TERMINAL",0,0,"5\t ",,terminal_output +2256,2242351,"TERMINAL",0,0,"6\t ",,terminal_output +2257,2243476,"TERMINAL",0,0,"7\t ",,terminal_output +2258,2244433,"TERMINAL",0,0,"8\t ",,terminal_output +2259,2245487,"TERMINAL",0,0,"9\t ",,terminal_output +2260,2246520,"TERMINAL",0,0,"30\t ",,terminal_output +2261,2247559,"TERMINAL",0,0,"1\t ",,terminal_output +2262,2248701,"TERMINAL",0,0,"2\t ",,terminal_output +2263,2249645,"TERMINAL",0,0,"4\t ",,terminal_output +2264,2250687,"TERMINAL",0,0,"5\t ",,terminal_output +2265,2251771,"TERMINAL",0,0,"6\t ",,terminal_output +2266,2252774,"TERMINAL",0,0,"7\t ",,terminal_output +2267,2253810,"TERMINAL",0,0,"8\t ",,terminal_output +2268,2254853,"TERMINAL",0,0,"91",,terminal_output +2269,2255889,"TERMINAL",0,0,"403",,terminal_output +2270,2256994,"TERMINAL",0,0,"1\t ",,terminal_output +2271,2258018,"TERMINAL",0,0,"2\t ",,terminal_output +2272,2259010,"TERMINAL",0,0,"3\t ",,terminal_output +2273,2260066,"TERMINAL",0,0,"4\t ",,terminal_output +2274,2261101,"TERMINAL",0,0,"5\t ",,terminal_output +2275,2262139,"TERMINAL",0,0,"6\t ",,terminal_output +2276,2263176,"TERMINAL",0,0,"7\t ",,terminal_output +2277,2264263,"TERMINAL",0,0,"8\t ",,terminal_output +2278,2265287,"TERMINAL",0,0,"9\t ",,terminal_output +2279,2266304,"TERMINAL",0,0,"50\t ",,terminal_output +2280,2267341,"TERMINAL",0,0,"1\t ",,terminal_output +2281,2268386,"TERMINAL",0,0,"2\t ",,terminal_output +2282,2269432,"TERMINAL",0,0,"3\t ",,terminal_output +2283,2270508,"TERMINAL",0,0,"4\t ",,terminal_output +2284,2271509,"TERMINAL",0,0,"5\t ",,terminal_output +2285,2272551,"TERMINAL",0,0,"6\t ",,terminal_output +2286,2273684,"TERMINAL",0,0,"7\t ",,terminal_output +2287,2274019,"TERMINAL",0,0,"salloc: job 3306221 has been allocated resources\r\nsalloc: Granted job allocation 3306221\r\n",,terminal_output +2288,2274130,"TERMINAL",0,0,"salloc: Waiting for resource configuration\r\n",,terminal_output +2289,2274631,"TERMINAL",0,0,"92",,terminal_output +2290,2275677,"TERMINAL",0,0,"2:00\t ",,terminal_output +2291,2276712,"TERMINAL",0,0,"1\t ",,terminal_output +2292,2277755,"TERMINAL",0,0,"2\t ",,terminal_output +2293,2278802,"TERMINAL",0,0,"3\t ",,terminal_output +2294,2279839,"TERMINAL",0,0,"4\t ",,terminal_output +2295,2280877,"TERMINAL",0,0,"5\t ",,terminal_output +2296,2281916,"TERMINAL",0,0,"6\t ",,terminal_output +2297,2283023,"TERMINAL",0,0,"7\t ",,terminal_output +2298,2284047,"TERMINAL",0,0,"8\t ",,terminal_output +2299,2285052,"TERMINAL",0,0,"9\t ",,terminal_output +2300,2286081,"TERMINAL",0,0,"10\t ",,terminal_output +2301,2287125,"TERMINAL",0,0,"1\t ",,terminal_output +2302,2288165,"TERMINAL",0,0,"2\t ",,terminal_output +2303,2289209,"TERMINAL",0,0,"3\t ",,terminal_output +2304,2290242,"TERMINAL",0,0,"4\t ",,terminal_output +2305,2291295,"TERMINAL",0,0,"5\t ",,terminal_output +2306,2292449,"TERMINAL",0,0,"6\t ",,terminal_output +2307,2293372,"TERMINAL",0,0,"7\t ",,terminal_output +2308,2294416,"TERMINAL",0,0,"8\t ",,terminal_output +2309,2295477,"TERMINAL",0,0,"9\t ",,terminal_output +2310,2296509,"TERMINAL",0,0,"20\t ",,terminal_output +2311,2297554,"TERMINAL",0,0,"1\t ",,terminal_output +2312,2298672,"TERMINAL",0,0,"2\t ",,terminal_output +2313,2299635,"TERMINAL",0,0,"4\t ",,terminal_output +2314,2300681,"TERMINAL",0,0,"5\t ",,terminal_output +2315,2301166,"TERMINAL",0,0,"salloc: Nodes hkn0719 are ready for job\r\n",,terminal_output +2316,2301724,"TERMINAL",0,0,"6\t ",,terminal_output +2317,2302764,"TERMINAL",0,0,"7\t ",,terminal_output +2318,2303815,"TERMINAL",0,0,"]0;tum_cte0515@hkn0719:~/Projects/jafar[?2004h[tum_cte0515@hkn0719 jafar]$ ",,terminal_output +2319,2303841,"TERMINAL",0,0,"8\t ",,terminal_output +2320,2304922,"TERMINAL",0,0,"9\t ",,terminal_output +2321,2305941,"TERMINAL",0,0,"30\t ",,terminal_output +2322,2306934,"TERMINAL",0,0,"1\t ",,terminal_output +2323,2307987,"TERMINAL",0,0,"2\t ",,terminal_output +2324,2309132,"TERMINAL",0,0,"3\t ",,terminal_output +2325,2310057,"TERMINAL",0,0,"4\t ",,terminal_output +2326,2311108,"TERMINAL",0,0,"5\t ",,terminal_output +2327,2312147,"TERMINAL",0,0,"6\t ",,terminal_output +2328,2313182,"TERMINAL",0,0,"7\t ",,terminal_output +2329,2314229,"TERMINAL",0,0,"8\t ",,terminal_output +2330,2315274,"TERMINAL",0,0,"9\t ",,terminal_output +2331,2316310,"TERMINAL",0,0,"40\t ",,terminal_output +2332,2317448,"TERMINAL",0,0,"1\t ",,terminal_output +2333,2318434,"TERMINAL",0,0,"2\t ",,terminal_output +2334,2319429,"TERMINAL",0,0,"3\t ",,terminal_output +2335,2320472,"TERMINAL",0,0,"4\t ",,terminal_output +2336,2321607,"TERMINAL",0,0,"580",,terminal_output +2337,2322564,"TERMINAL",0,0,"6\t ",,terminal_output +2338,2323625,"TERMINAL",0,0,"7\t ",,terminal_output +2339,2324679,"TERMINAL",0,0,"9\t ",,terminal_output +2340,2325704,"TERMINAL",0,0,"50\t ",,terminal_output +2341,2326742,"TERMINAL",0,0,"1\t ",,terminal_output +2342,2327781,"TERMINAL",0,0,"2\t ",,terminal_output +2343,2328878,"TERMINAL",0,0,"3\t ",,terminal_output +2344,2329857,"TERMINAL",0,0,"4\t ",,terminal_output +2345,2330926,"TERMINAL",0,0,"5\t ",,terminal_output +2346,2331941,"TERMINAL",0,0,"6\t ",,terminal_output +2347,2332987,"TERMINAL",0,0,"7\t ",,terminal_output +2348,2334048,"TERMINAL",0,0,"8\t ",,terminal_output +2349,2335070,"TERMINAL",0,0,"9\t ",,terminal_output +2350,2336116,"TERMINAL",0,0,"3:00\t ",,terminal_output +2351,2337161,"TERMINAL",0,0,"1\t ",,terminal_output +2352,2338202,"TERMINAL",0,0,"2\t ",,terminal_output +2353,2339239,"TERMINAL",0,0,"3\t ",,terminal_output +2354,2340283,"TERMINAL",0,0,"4\t ",,terminal_output +2355,2341355,"TERMINAL",0,0,"5\t ",,terminal_output +2356,2342387,"TERMINAL",0,0,"6\t ",,terminal_output +2357,2343432,"TERMINAL",0,0,"7\t ",,terminal_output +2358,2344547,"TERMINAL",0,0,"8\t ",,terminal_output +2359,2345568,"TERMINAL",0,0,"9\t ",,terminal_output +2360,2346593,"TERMINAL",0,0,"1010",,terminal_output +2361,2347577,"TERMINAL",0,0,"1\t ",,terminal_output +2362,2348614,"TERMINAL",0,0,"3\t ",,terminal_output +2363,2349650,"TERMINAL",0,0,"4\t ",,terminal_output +2364,2350705,"TERMINAL",0,0,"5\t ",,terminal_output +2365,2351741,"TERMINAL",0,0,"6\t ",,terminal_output +2366,2352839,"TERMINAL",0,0,"7\t ",,terminal_output +2367,2353827,"TERMINAL",0,0,"8\t ",,terminal_output +2368,2354895,"TERMINAL",0,0,"9\t ",,terminal_output +2369,2355920,"TERMINAL",0,0,"20\t ",,terminal_output +2370,2357040,"TERMINAL",0,0,"1\t ",,terminal_output +2371,2358065,"TERMINAL",0,0,"2\t ",,terminal_output +2372,2359090,"TERMINAL",0,0,"3\t ",,terminal_output +2373,2360084,"TERMINAL",0,0,"4\t ",,terminal_output +2374,2361121,"TERMINAL",0,0,"5\t ",,terminal_output +2375,2362171,"TERMINAL",0,0,"6\t ",,terminal_output +2376,2363205,"TERMINAL",0,0,"7\t ",,terminal_output +2377,2364246,"TERMINAL",0,0,"8\t ",,terminal_output +2378,2365334,"TERMINAL",0,0,"9\t ",,terminal_output +2379,2366347,"TERMINAL",0,0,"30\t ",,terminal_output +2380,2367360,"TERMINAL",0,0,"1\t ",,terminal_output +2381,2368404,"TERMINAL",0,0,"2\t ",,terminal_output +2382,2369440,"TERMINAL",0,0,"3\t ",,terminal_output +2383,2370486,"TERMINAL",0,0,"4\t ",,terminal_output +2384,2371581,"TERMINAL",0,0,"5\t ",,terminal_output +2385,2372571,"TERMINAL",0,0,"6\t ",,terminal_output +2386,2373631,"TERMINAL",0,0,"7\t ",,terminal_output +2387,2374756,"TERMINAL",0,0,"9\t ",,terminal_output +2388,2375699,"TERMINAL",0,0,"40\t ",,terminal_output +2389,2376807,"TERMINAL",0,0,"1\t ",,terminal_output +2390,2377767,"TERMINAL",0,0,"2\t ",,terminal_output +2391,2378814,"TERMINAL",0,0,"3\t ",,terminal_output +2392,2379850,"TERMINAL",0,0,"4\t ",,terminal_output +2393,2380899,"TERMINAL",0,0,"5\t ",,terminal_output +2394,2382024,"TERMINAL",0,0,"6\t ",,terminal_output +2395,2383065,"TERMINAL",0,0,"7\t ",,terminal_output +2396,2384019,"TERMINAL",0,0,"8\t ",,terminal_output +2397,2385060,"TERMINAL",0,0,"9\t ",,terminal_output +2398,2386122,"TERMINAL",0,0,"50\t ",,terminal_output +2399,2387159,"TERMINAL",0,0,"1\t ",,terminal_output +2400,2388195,"TERMINAL",0,0,"2\t ",,terminal_output +2401,2389246,"TERMINAL",0,0,"3\t ",,terminal_output +2402,2390320,"TERMINAL",0,0,"4\t ",,terminal_output +2403,2391328,"TERMINAL",0,0,"5\t ",,terminal_output +2404,2392358,"TERMINAL",0,0,"6\t ",,terminal_output +2405,2393395,"TERMINAL",0,0,"7\t ",,terminal_output +2406,2394437,"TERMINAL",0,0,"864",,terminal_output +2407,2395475,"TERMINAL",0,0,"9\t ",,terminal_output +2408,2396518,"TERMINAL",0,0,"4:00\t ",,terminal_output +2409,2397565,"TERMINAL",0,0,"1\t ",,terminal_output +2410,2398601,"TERMINAL",0,0,"2\t ",,terminal_output +2411,2399652,"TERMINAL",0,0,"4\t ",,terminal_output +2412,2400689,"TERMINAL",0,0,"5\t ",,terminal_output +2413,2401733,"TERMINAL",0,0,"6\t ",,terminal_output +2414,2402767,"TERMINAL",0,0,"7\t ",,terminal_output +2415,2403940,"TERMINAL",0,0,"8\t ",,terminal_output +2416,2404859,"TERMINAL",0,0,"9\t ",,terminal_output +2417,2405928,"TERMINAL",0,0,"10\t ",,terminal_output +2418,2406945,"TERMINAL",0,0,"1\t ",,terminal_output +2419,2407985,"TERMINAL",0,0,"2\t ",,terminal_output +2420,2409059,"TERMINAL",0,0,"3\t ",,terminal_output +2421,2410082,"TERMINAL",0,0,"4\t ",,terminal_output +2422,2411108,"TERMINAL",0,0,"5\t ",,terminal_output +2423,2412147,"TERMINAL",0,0,"6\t ",,terminal_output +2424,2413188,"TERMINAL",0,0,"7\t ",,terminal_output +2425,2414234,"TERMINAL",0,0,"8\t ",,terminal_output +2426,2415276,"TERMINAL",0,0,"9\t ",,terminal_output +2427,2416431,"TERMINAL",0,0,"20\t ",,terminal_output +2428,2417459,"TERMINAL",0,0,"1\t ",,terminal_output +2429,2418402,"TERMINAL",0,0,"2\t ",,terminal_output +2430,2419505,"TERMINAL",0,0,"3\t ",,terminal_output +2431,2420489,"TERMINAL",0,0,"4\t ",,terminal_output +2432,2421530,"TERMINAL",0,0,"5\t ",,terminal_output +2433,2422573,"TERMINAL",0,0,"6\t ",,terminal_output +2434,2423613,"TERMINAL",0,0,"7\t ",,terminal_output +2435,2424651,"TERMINAL",0,0,"9\t ",,terminal_output +2436,2425690,"TERMINAL",0,0,"30\t ",,terminal_output +2437,2426772,"TERMINAL",0,0,"1\t ",,terminal_output +2438,2427765,"TERMINAL",0,0,"2\t ",,terminal_output +2439,2428821,"TERMINAL",0,0,"3\t ",,terminal_output +2440,2429947,"TERMINAL",0,0,"4\t ",,terminal_output +2441,2430882,"TERMINAL",0,0,"5\t ",,terminal_output +2442,2431921,"TERMINAL",0,0,"6\t ",,terminal_output +2443,2433020,"TERMINAL",0,0,"7\t ",,terminal_output +2444,2434044,"TERMINAL",0,0,"8\t ",,terminal_output +2445,2435078,"TERMINAL",0,0,"9\t ",,terminal_output +2446,2436094,"TERMINAL",0,0,"40\t ",,terminal_output +2447,2437122,"TERMINAL",0,0,"1\t ",,terminal_output +2448,2438171,"TERMINAL",0,0,"2\t ",,terminal_output +2449,2439213,"TERMINAL",0,0,"3\t ",,terminal_output +2450,2440291,"TERMINAL",0,0,"4\t ",,terminal_output +2451,2441317,"TERMINAL",0,0,"5\t ",,terminal_output +2452,2442342,"TERMINAL",0,0,"6\t ",,terminal_output +2453,2443467,"TERMINAL",0,0,"7\t ",,terminal_output +2454,2444400,"TERMINAL",0,0,"8\t ",,terminal_output +2455,2445514,"TERMINAL",0,0,"9\t ",,terminal_output +2456,2446478,"TERMINAL",0,0,"50\t ",,terminal_output +2457,2447519,"TERMINAL",0,0,"1\t ",,terminal_output +2458,2448561,"TERMINAL",0,0,"2\t ",,terminal_output +2459,2449598,"TERMINAL",0,0,"3\t ",,terminal_output +2460,2450675,"TERMINAL",0,0,"5\t ",,terminal_output +2461,2451679,"TERMINAL",0,0,"6\t ",,terminal_output +2462,2452718,"TERMINAL",0,0,"7\t ",,terminal_output +2463,2453810,"TERMINAL",0,0,"8\t ",,terminal_output +2464,2454803,"TERMINAL",0,0,"9\t ",,terminal_output +2465,2455843,"TERMINAL",0,0,"5:00\t ",,terminal_output +2466,2456983,"TERMINAL",0,0,"1\t ",,terminal_output +2467,2457930,"TERMINAL",0,0,"2\t ",,terminal_output +2468,2459028,"TERMINAL",0,0,"3\t ",,terminal_output +2469,2460064,"TERMINAL",0,0,"4\t ",,terminal_output +2470,2461049,"TERMINAL",0,0,"5\t ",,terminal_output +2471,2462125,"TERMINAL",0,0,"6\t ",,terminal_output +2472,2463128,"TERMINAL",0,0,"7\t ",,terminal_output +2473,2464179,"TERMINAL",0,0,"8\t ",,terminal_output +2474,2465210,"TERMINAL",0,0,"9\t ",,terminal_output +2475,2466250,"TERMINAL",0,0,"10\t ",,terminal_output +2476,2467293,"TERMINAL",0,0,"1\t ",,terminal_output +2477,2468351,"TERMINAL",0,0,"2\t ",,terminal_output +2478,2469374,"TERMINAL",0,0,"3\t ",,terminal_output +2479,2470414,"TERMINAL",0,0,"4\t ",,terminal_output +2480,2471456,"TERMINAL",0,0,"5\t ",,terminal_output +2481,2472493,"TERMINAL",0,0,"6\t ",,terminal_output +2482,2473527,"TERMINAL",0,0,"7\t ",,terminal_output +2483,2474593,"TERMINAL",0,0,"8\t ",,terminal_output +2484,2475625,"TERMINAL",0,0,"9\t ",,terminal_output +2485,2476651,"TERMINAL",0,0,"21\t ",,terminal_output +2486,2477718,"TERMINAL",0,0,"2\t ",,terminal_output +2487,2478746,"TERMINAL",0,0,"3\t ",,terminal_output +2488,2479817,"TERMINAL",0,0,"4\t ",,terminal_output +2489,2480833,"TERMINAL",0,0,"5\t ",,terminal_output +2490,2481967,"TERMINAL",0,0,"6\t ",,terminal_output +2491,2482994,"TERMINAL",0,0,"7\t ",,terminal_output +2492,2483950,"TERMINAL",0,0,"8\t ",,terminal_output +2493,2484993,"TERMINAL",0,0,"9\t ",,terminal_output +2494,2486032,"TERMINAL",0,0,"30\t ",,terminal_output +2495,2487115,"TERMINAL",0,0,"1\t ",,terminal_output +2496,2488118,"TERMINAL",0,0,"23",,terminal_output +2497,2489174,"TERMINAL",0,0,"3\t ",,terminal_output +2498,2490254,"TERMINAL",0,0,"4\t ",,terminal_output +2499,2491251,"TERMINAL",0,0,"5\t ",,terminal_output +2500,2492297,"TERMINAL",0,0,"6\t ",,terminal_output +2501,2493436,"TERMINAL",0,0,"7\t ",,terminal_output +2502,2494384,"TERMINAL",0,0,"8\t ",,terminal_output +2503,2495425,"TERMINAL",0,0,"9\t ",,terminal_output +2504,2496511,"TERMINAL",0,0,"40\t ",,terminal_output +2505,2497501,"TERMINAL",0,0,"1\t ",,terminal_output +2506,2498558,"TERMINAL",0,0,"2\t ",,terminal_output +2507,2499575,"TERMINAL",0,0,"3\t ",,terminal_output +2508,2500622,"TERMINAL",0,0,"5\t ",,terminal_output +2509,2501731,"TERMINAL",0,0,"6\t ",,terminal_output +2510,2502692,"TERMINAL",0,0,"7\t ",,terminal_output +2511,2503779,"TERMINAL",0,0,"8\t ",,terminal_output +2512,2504802,"TERMINAL",0,0,"9\t ",,terminal_output +2513,2505814,"TERMINAL",0,0,"50\t ",,terminal_output +2514,2506953,"TERMINAL",0,0,"1\t ",,terminal_output +2515,2507977,"TERMINAL",0,0,"2\t ",,terminal_output +2516,2508945,"TERMINAL",0,0,"3\t ",,terminal_output +2517,2509989,"TERMINAL",0,0,"4\t ",,terminal_output +2518,2511029,"TERMINAL",0,0,"5\t ",,terminal_output +2519,2512073,"TERMINAL",0,0,"6\t ",,terminal_output +2520,2513202,"TERMINAL",0,0,"7\t ",,terminal_output +2521,2514188,"TERMINAL",0,0,"8\t ",,terminal_output +2522,2515227,"TERMINAL",0,0,"9\t ",,terminal_output +2523,2516271,"TERMINAL",0,0,"6:00\t ",,terminal_output +2524,2517402,"TERMINAL",0,0,"1\t ",,terminal_output +2525,2518420,"TERMINAL",0,0,"2\t ",,terminal_output +2526,2519398,"TERMINAL",0,0,"3\t ",,terminal_output +2527,2520443,"TERMINAL",0,0,"4\t ",,terminal_output +2528,2521479,"TERMINAL",0,0,"5\t ",,terminal_output +2529,2522517,"TERMINAL",0,0,"6\t ",,terminal_output +2530,2523557,"TERMINAL",0,0,"7\t ",,terminal_output +2531,2524671,"TERMINAL",0,0,"8\t ",,terminal_output +2532,2525693,"TERMINAL",0,0,"10\t ",,terminal_output +2533,2526715,"TERMINAL",0,0,"1\t ",,terminal_output +2534,2527710,"TERMINAL",0,0,"2\t ",,terminal_output +2535,2528752,"TERMINAL",0,0,"3\t ",,terminal_output +2536,2529790,"TERMINAL",0,0,"4\t ",,terminal_output +2537,2530914,"TERMINAL",0,0,"5\t ",,terminal_output +2538,2531939,"TERMINAL",0,0,"6\t ",,terminal_output +2539,2532965,"TERMINAL",0,0,"7\t ",,terminal_output +2540,2533958,"TERMINAL",0,0,"8\t ",,terminal_output +2541,2535012,"TERMINAL",0,0,"9\t ",,terminal_output +2542,2536139,"TERMINAL",0,0,"20\t ",,terminal_output +2543,2537087,"TERMINAL",0,0,"1\t ",,terminal_output +2544,2538131,"TERMINAL",0,0,"2\t ",,terminal_output +2545,2539209,"TERMINAL",0,0,"3\t ",,terminal_output +2546,2540236,"TERMINAL",0,0,"4\t ",,terminal_output +2547,2541256,"TERMINAL",0,0,"5\t ",,terminal_output +2548,2542297,"TERMINAL",0,0,"6\t ",,terminal_output +2549,2543339,"TERMINAL",0,0,"7\t ",,terminal_output +2550,2544379,"TERMINAL",0,0,"8\t ",,terminal_output +2551,2545455,"TERMINAL",0,0,"9\t ",,terminal_output +2552,2546478,"TERMINAL",0,0,"30\t ",,terminal_output +2553,2547493,"TERMINAL",0,0,"1\t ",,terminal_output +2554,2548634,"TERMINAL",0,0,"2\t ",,terminal_output +2555,2549590,"TERMINAL",0,0,"3\t ",,terminal_output +2556,2550679,"TERMINAL",0,0,"5\t ",,terminal_output +2557,2551663,"TERMINAL",0,0,"6\t ",,terminal_output +2558,2552700,"TERMINAL",0,0,"7\t ",,terminal_output +2559,2553751,"TERMINAL",0,0,"8\t ",,terminal_output +2560,2554779,"TERMINAL",0,0,"9\t ",,terminal_output +2561,2555901,"TERMINAL",0,0,"40\t ",,terminal_output +2562,2556925,"TERMINAL",0,0,"1\t ",,terminal_output +2563,2557907,"TERMINAL",0,0,"2\t ",,terminal_output +2564,2558950,"TERMINAL",0,0,"3\t ",,terminal_output +2565,2559989,"TERMINAL",0,0,"4\t ",,terminal_output +2566,2561028,"TERMINAL",0,0,"5\t ",,terminal_output +2567,2562069,"TERMINAL",0,0,"6\t ",,terminal_output +2568,2563171,"TERMINAL",0,0,"7\t ",,terminal_output +2569,2564194,"TERMINAL",0,0,"8\t ",,terminal_output +2570,2565191,"TERMINAL",0,0,"9\t ",,terminal_output +2571,2566230,"TERMINAL",0,0,"50\t ",,terminal_output +2572,2567275,"TERMINAL",0,0,"1\t ",,terminal_output +2573,2568394,"TERMINAL",0,0,"2\t ",,terminal_output +2574,2569358,"TERMINAL",0,0,"3\t ",,terminal_output +2575,2570443,"TERMINAL",0,0,"4\t ",,terminal_output +2576,2571466,"TERMINAL",0,0,"5\t ",,terminal_output +2577,2572479,"TERMINAL",0,0,"6\t ",,terminal_output +2578,2573514,"TERMINAL",0,0,"7\t ",,terminal_output +2579,2574569,"TERMINAL",0,0,"8\t ",,terminal_output +2580,2575600,"TERMINAL",0,0,"9\t ",,terminal_output +2581,2576638,"TERMINAL",0,0,"7:01\t ",,terminal_output +2582,2577675,"TERMINAL",0,0,"2\t ",,terminal_output +2583,2578738,"TERMINAL",0,0,"3\t ",,terminal_output +2584,2579859,"TERMINAL",0,0,"4\t ",,terminal_output +2585,2580886,"TERMINAL",0,0,"5\t ",,terminal_output +2586,2581853,"TERMINAL",0,0,"6\t ",,terminal_output +2587,2582876,"TERMINAL",0,0,"7\t ",,terminal_output +2588,2583957,"TERMINAL",0,0,"8\t ",,terminal_output +2589,2584984,"TERMINAL",0,0,"9\t ",,terminal_output +2590,2586008,"TERMINAL",0,0,"10\t ",,terminal_output +2591,2587036,"TERMINAL",0,0,"1\t ",,terminal_output +2592,2588073,"TERMINAL",0,0,"2\t ",,terminal_output +2593,2589111,"TERMINAL",0,0,"3\t ",,terminal_output +2594,2590154,"TERMINAL",0,0,"4\t ",,terminal_output +2595,2591236,"TERMINAL",0,0,"5\t ",,terminal_output +2596,2592256,"TERMINAL",0,0,"6\t ",,terminal_output +2597,2593279,"TERMINAL",0,0,"7\t ",,terminal_output +2598,2594339,"TERMINAL",0,0,"8\t ",,terminal_output +2599,2595360,"TERMINAL",0,0,"9\t ",,terminal_output +2600,2596406,"TERMINAL",0,0,"20\t ",,terminal_output +2601,2597461,"TERMINAL",0,0,"1\t ",,terminal_output +2602,2598603,"TERMINAL",0,0,"2\t ",,terminal_output +2603,2599625,"TERMINAL",0,0,"3\t ",,terminal_output +2604,2600579,"TERMINAL",0,0,"4\t ",,terminal_output +2605,2601620,"TERMINAL",0,0,"6\t ",,terminal_output +2606,2602659,"TERMINAL",0,0,"7\t ",,terminal_output +2607,2603697,"TERMINAL",0,0,"8\t ",,terminal_output +2608,2604748,"TERMINAL",0,0,"9\t ",,terminal_output +2609,2605780,"TERMINAL",0,0,"30\t ",,terminal_output +2610,2606822,"TERMINAL",0,0,"1\t ",,terminal_output +2611,2607920,"TERMINAL",0,0,"2\t ",,terminal_output +2612,2608901,"TERMINAL",0,0,"3\t ",,terminal_output +2613,2609942,"TERMINAL",0,0,"4\t ",,terminal_output +2614,2610987,"TERMINAL",0,0,"5\t ",,terminal_output +2615,2612026,"TERMINAL",0,0,"6\t ",,terminal_output +2616,2613065,"TERMINAL",0,0,"7\t ",,terminal_output +2617,2614111,"TERMINAL",0,0,"8\t ",,terminal_output +2618,2615151,"TERMINAL",0,0,"9\t ",,terminal_output +2619,2616195,"TERMINAL",0,0,"40\t ",,terminal_output +2620,2617237,"TERMINAL",0,0,"1\t ",,terminal_output +2621,2618469,"TERMINAL",0,0,"2\t ",,terminal_output +2622,2619341,"TERMINAL",0,0,"3\t ",,terminal_output +2623,2620364,"TERMINAL",0,0,"4\t ",,terminal_output +2624,2621438,"TERMINAL",0,0,"5\t ",,terminal_output +2625,2622464,"TERMINAL",0,0,"6\t ",,terminal_output +2626,2623487,"TERMINAL",0,0,"7\t ",,terminal_output +2627,2624603,"TERMINAL",0,0,"8\t ",,terminal_output +2628,2625640,"TERMINAL",0,0,"9\t ",,terminal_output +2629,2626596,"TERMINAL",0,0,"50\t ",,terminal_output +2630,2627656,"TERMINAL",0,0,"2\t ",,terminal_output +2631,2628709,"TERMINAL",0,0,"3\t ",,terminal_output +2632,2629735,"TERMINAL",0,0,"4\t ",,terminal_output +2633,2630760,"TERMINAL",0,0,"5\t ",,terminal_output +2634,2631884,"TERMINAL",0,0,"6\t ",,terminal_output +2635,2632846,"TERMINAL",0,0,"7\t ",,terminal_output +2636,2633932,"TERMINAL",0,0,"8\t ",,terminal_output +2637,2634934,"TERMINAL",0,0,"9\t ",,terminal_output +2638,2635976,"TERMINAL",0,0,"8:00\t ",,terminal_output +2639,2637105,"TERMINAL",0,0,"1\t ",,terminal_output +2640,2638131,"TERMINAL",0,0,"2\t ",,terminal_output +2641,2639154,"TERMINAL",0,0,"3\t ",,terminal_output +2642,2640144,"TERMINAL",0,0,"4\t ",,terminal_output +2643,2641214,"TERMINAL",0,0,"5\t ",,terminal_output +2644,2642235,"TERMINAL",0,0,"6\t ",,terminal_output +2645,2643289,"TERMINAL",0,0,"7\t ",,terminal_output +2646,2644384,"TERMINAL",0,0,"8\t ",,terminal_output +2647,2645409,"TERMINAL",0,0,"9\t ",,terminal_output +2648,2646396,"TERMINAL",0,0,"10\t ",,terminal_output +2649,2647461,"TERMINAL",0,0,"1\t ",,terminal_output +2650,2648486,"TERMINAL",0,0,"2\t ",,terminal_output +2651,2649596,"TERMINAL",0,0,"3\t ",,terminal_output +2652,2650563,"TERMINAL",0,0,"4\t ",,terminal_output +2653,2651601,"TERMINAL",0,0,"5\t ",,terminal_output +2654,2652674,"TERMINAL",0,0,"7\t ",,terminal_output +2655,2653798,"TERMINAL",0,0,"8\t ",,terminal_output +2656,2654736,"TERMINAL",0,0,"9\t ",,terminal_output +2657,2655790,"TERMINAL",0,0,"20\t ",,terminal_output +2658,2656859,"TERMINAL",0,0,"1\t ",,terminal_output +2659,2657895,"TERMINAL",0,0,"2\t ",,terminal_output +2660,2658915,"TERMINAL",0,0,"3\t ",,terminal_output +2661,2659963,"TERMINAL",0,0,"4\t ",,terminal_output +2662,2661068,"TERMINAL",0,0,"5\t ",,terminal_output +2663,2662091,"TERMINAL",0,0,"6\t ",,terminal_output +2664,2663086,"TERMINAL",0,0,"7\t ",,terminal_output +2665,2664149,"TERMINAL",0,0,"8 9",,terminal_output +2666,2665270,"TERMINAL",0,0,"9\t ",,terminal_output +2667,2666292,"TERMINAL",0,0,"30\t ",,terminal_output +2668,2667253,"TERMINAL",0,0,"1\t ",,terminal_output +2669,2668300,"TERMINAL",0,0,"2\t ",,terminal_output +2670,2669345,"TERMINAL",0,0,"3\t ",,terminal_output +2671,2670391,"TERMINAL",0,0,"4\t ",,terminal_output +2672,2671425,"TERMINAL",0,0,"5\t ",,terminal_output +2673,2672459,"TERMINAL",0,0,"6\t ",,terminal_output +2674,2673496,"TERMINAL",0,0,"7\t ",,terminal_output +2675,2674537,"TERMINAL",0,0,"8\t ",,terminal_output +2676,2675611,"TERMINAL",0,0,"9\t ",,terminal_output +2677,2676631,"TERMINAL",0,0,"41\t ",,terminal_output +2678,2677699,"TERMINAL",0,0,"2\t ",,terminal_output +2679,2678782,"TERMINAL",0,0,"3\t ",,terminal_output +2680,2679739,"TERMINAL",0,0,"4\t ",,terminal_output +2681,2680830,"TERMINAL",0,0,"5\t ",,terminal_output +2682,2681817,"TERMINAL",0,0,"6\t ",,terminal_output +2683,2682885,"TERMINAL",0,0,"7\t ",,terminal_output +2684,2683902,"TERMINAL",0,0,"8\t ",,terminal_output +2685,2685030,"TERMINAL",0,0,"9\t ",,terminal_output +2686,2685980,"TERMINAL",0,0,"50\t ",,terminal_output +2687,2687076,"TERMINAL",0,0,"1\t ",,terminal_output +2688,2688105,"TERMINAL",0,0,"2\t ",,terminal_output +2689,2689127,"TERMINAL",0,0,"3\t ",,terminal_output +2690,2690146,"TERMINAL",0,0,"4\t ",,terminal_output +2691,2691274,"TERMINAL",0,0,"5\t ",,terminal_output +2692,2692232,"TERMINAL",0,0,"6\t ",,terminal_output +2693,2693376,"TERMINAL",0,0,"7\t ",,terminal_output +2694,2694319,"TERMINAL",0,0,"8\t ",,terminal_output +2695,2695342,"TERMINAL",0,0,"9\t ",,terminal_output +2696,2696377,"TERMINAL",0,0,"9:00\t ",,terminal_output +2697,2697460,"TERMINAL",0,0,"1\t ",,terminal_output +2698,2698546,"TERMINAL",0,0,"2\t ",,terminal_output +2699,2699568,"TERMINAL",0,0,"3\t ",,terminal_output +2700,2700551,"TERMINAL",0,0,"4\t ",,terminal_output +2701,2701618,"TERMINAL",0,0,"5\t ",,terminal_output +2702,2702687,"TERMINAL",0,0,"7\t ",,terminal_output +2703,2703688,"TERMINAL",0,0,"8\t ",,terminal_output +2704,2704719,"TERMINAL",0,0,"9\t ",,terminal_output +2705,2705758,"TERMINAL",0,0,"10\t ",,terminal_output +2706,2706794,"TERMINAL",0,0,"1\t ",,terminal_output +2707,2707866,"TERMINAL",0,0,"2\t ",,terminal_output +2708,2708876,"TERMINAL",0,0,"3\t ",,terminal_output +2709,2710007,"TERMINAL",0,0,"4\t ",,terminal_output +2710,2711039,"TERMINAL",0,0,"5\t ",,terminal_output +2711,2712000,"TERMINAL",0,0,"6\t ",,terminal_output +2712,2713088,"TERMINAL",0,0,"7\t ",,terminal_output +2713,2714085,"TERMINAL",0,0,"8\t ",,terminal_output +2714,2715136,"TERMINAL",0,0,"9\t ",,terminal_output +2715,2716175,"TERMINAL",0,0,"20\t ",,terminal_output +2716,2717239,"TERMINAL",0,0,"1\t ",,terminal_output +2717,2718258,"TERMINAL",0,0,"210",,terminal_output +2718,2719331,"TERMINAL",0,0,"3\t ",,terminal_output +2719,2720336,"TERMINAL",0,0,"4\t ",,terminal_output +2720,2721383,"TERMINAL",0,0,"5\t ",,terminal_output +2721,2722461,"TERMINAL",0,0,"6\t ",,terminal_output +2722,2723469,"TERMINAL",0,0,"7\t ",,terminal_output +2723,2724510,"TERMINAL",0,0,"8\t ",,terminal_output +2724,2725579,"TERMINAL",0,0,"91",,terminal_output +2725,2726605,"TERMINAL",0,0,"30\t ",,terminal_output +2726,2727673,"TERMINAL",0,0,"2\t ",,terminal_output +2727,2728670,"TERMINAL",0,0,"3\t ",,terminal_output +2728,2729779,"TERMINAL",0,0,"4\t ",,terminal_output +2729,2730788,"TERMINAL",0,0,"5\t ",,terminal_output +2730,2731825,"TERMINAL",0,0,"6\t ",,terminal_output +2731,2732837,"TERMINAL",0,0,"7\t ",,terminal_output +2732,2733975,"TERMINAL",0,0,"8\t ",,terminal_output +2733,2735002,"TERMINAL",0,0,"9\t ",,terminal_output +2734,2735964,"TERMINAL",0,0,"40\t ",,terminal_output +2735,2737050,"TERMINAL",0,0,"1\t ",,terminal_output +2736,2738050,"TERMINAL",0,0,"20",,terminal_output +2737,2739096,"TERMINAL",0,0,"3\t ",,terminal_output +2738,2740222,"TERMINAL",0,0,"4\t ",,terminal_output +2739,2741245,"TERMINAL",0,0,"5\t ",,terminal_output +2740,2742249,"TERMINAL",0,0,"6\t ",,terminal_output +2741,2743258,"TERMINAL",0,0,"7\t ",,terminal_output +2742,2744321,"TERMINAL",0,0,"8\t ",,terminal_output +2743,2745342,"TERMINAL",0,0,"9\t ",,terminal_output +2744,2746380,"TERMINAL",0,0,"50\t ",,terminal_output +2745,2747452,"TERMINAL",0,0,"1\t ",,terminal_output +2746,2748464,"TERMINAL",0,0,"2\t ",,terminal_output +2747,2749499,"TERMINAL",0,0,"3\t ",,terminal_output +2748,2750539,"TERMINAL",0,0,"4\t ",,terminal_output +2749,2751576,"TERMINAL",0,0,"5\t ",,terminal_output +2750,2752617,"TERMINAL",0,0,"7\t ",,terminal_output +2751,2753746,"TERMINAL",0,0,"8\t ",,terminal_output +2752,2755106,"TERMINAL",0,0,"9\t ",,terminal_output +2753,2756145,"TERMINAL",0,0,"10:00\t ",,terminal_output +2754,2757184,"TERMINAL",0,0,"1\t ",,terminal_output +2755,2758246,"TERMINAL",0,0,"2\t ",,terminal_output +2756,2759271,"TERMINAL",0,0,"3\t ",,terminal_output +2757,2760306,"TERMINAL",0,0,"4\t ",,terminal_output +2758,2761353,"TERMINAL",0,0,"5\t ",,terminal_output +2759,2762381,"TERMINAL",0,0,"6\t ",,terminal_output +2760,2763424,"TERMINAL",0,0,"7\t ",,terminal_output +2761,2764462,"TERMINAL",0,0,"8\t ",,terminal_output +2762,2765518,"TERMINAL",0,0,"9\t ",,terminal_output +2763,2766542,"TERMINAL",0,0,"10\t ",,terminal_output +2764,2767575,"TERMINAL",0,0,"1\t ",,terminal_output +2765,2768617,"TERMINAL",0,0,"3\t ",,terminal_output +2766,2769670,"TERMINAL",0,0,"4\t ",,terminal_output +2767,2770704,"TERMINAL",0,0,"5\t ",,terminal_output +2768,2771746,"TERMINAL",0,0,"6\t ",,terminal_output +2769,2772787,"TERMINAL",0,0,"7\t ",,terminal_output +2770,2773913,"TERMINAL",0,0,"8\t ",,terminal_output +2771,2774937,"TERMINAL",0,0,"9\t ",,terminal_output +2772,2775963,"TERMINAL",0,0,"20\t ",,terminal_output +2773,2776988,"TERMINAL",0,0,"1\t ",,terminal_output +2774,2778010,"TERMINAL",0,0,"2\t ",,terminal_output +2775,2779240,"TERMINAL",0,0,"3\t ",,terminal_output +2776,2780161,"TERMINAL",0,0,"4\t ",,terminal_output +2777,2781183,"TERMINAL",0,0,"5\t ",,terminal_output +2778,2782207,"TERMINAL",0,0,"6\t ",,terminal_output +2779,2783177,"TERMINAL",0,0,"7\t ",,terminal_output +2780,2784219,"TERMINAL",0,0,"8\t ",,terminal_output +2781,2785279,"TERMINAL",0,0,"9\t ",,terminal_output +2782,2786306,"TERMINAL",0,0,"30\t ",,terminal_output +2783,2787347,"TERMINAL",0,0,"1\t ",,terminal_output +2784,2788386,"TERMINAL",0,0,"2\t ",,terminal_output +2785,2789432,"TERMINAL",0,0,"3\t ",,terminal_output +2786,2790469,"TERMINAL",0,0,"4\t ",,terminal_output +2787,2791511,"TERMINAL",0,0,"5\t ",,terminal_output +2788,2792551,"TERMINAL",0,0,"6\t ",,terminal_output +2789,2793682,"TERMINAL",0,0,"7\t ",,terminal_output +2790,2794705,"TERMINAL",0,0,"9\t ",,terminal_output +2791,2795676,"TERMINAL",0,0,"40\t ",,terminal_output +2792,2796751,"TERMINAL",0,0,"1\t ",,terminal_output +2793,2797758,"TERMINAL",0,0,"2\t ",,terminal_output +2794,2798798,"TERMINAL",0,0,"3\t ",,terminal_output +2795,2799839,"TERMINAL",0,0,"4\t ",,terminal_output +2796,2800882,"TERMINAL",0,0,"5\t ",,terminal_output +2797,2801922,"TERMINAL",0,0,"6\t ",,terminal_output +2798,2803031,"TERMINAL",0,0,"7\t ",,terminal_output +2799,2804018,"TERMINAL",0,0,"8\t ",,terminal_output +2800,2805069,"TERMINAL",0,0,"9\t ",,terminal_output +2801,2806091,"TERMINAL",0,0,"50\t ",,terminal_output +2802,2807194,"TERMINAL",0,0,"1\t ",,terminal_output +2803,2808177,"TERMINAL",0,0,"2\t ",,terminal_output +2804,2809214,"TERMINAL",0,0,"3\t ",,terminal_output +2805,2810275,"TERMINAL",0,0,"4\t ",,terminal_output +2806,2811291,"TERMINAL",0,0,"5\t ",,terminal_output +2807,2812331,"TERMINAL",0,0,"6\t ",,terminal_output +2808,2813367,"TERMINAL",0,0,"7\t ",,terminal_output +2809,2814409,"TERMINAL",0,0,"8\t ",,terminal_output +2810,2815449,"TERMINAL",0,0,"9\t ",,terminal_output +2811,2816516,"TERMINAL",0,0,"1:001",,terminal_output +2812,2817542,"TERMINAL",0,0,"1\t ",,terminal_output +2813,2818664,"TERMINAL",0,0,"22",,terminal_output +2814,2819688,"TERMINAL",0,0,"4\t ",,terminal_output +2815,2820665,"TERMINAL",0,0,"5\t ",,terminal_output +2816,2821712,"TERMINAL",0,0,"6\t ",,terminal_output +2817,2822762,"TERMINAL",0,0,"7\t ",,terminal_output +2818,2823887,"TERMINAL",0,0,"8\t ",,terminal_output +2819,2824877,"TERMINAL",0,0,"9\t ",,terminal_output +2820,2825935,"TERMINAL",0,0,"10\t ",,terminal_output +2821,2826959,"TERMINAL",0,0,"1\t ",,terminal_output +2822,2827983,"TERMINAL",0,0,"2\t ",,terminal_output +2823,2829008,"TERMINAL",0,0,"3\t ",,terminal_output +2824,2830040,"TERMINAL",0,0,"4\t ",,terminal_output +2825,2831084,"TERMINAL",0,0,"5\t ",,terminal_output +2826,2832181,"TERMINAL",0,0,"6\t ",,terminal_output +2827,2833164,"TERMINAL",0,0,"7\t ",,terminal_output +2828,2834228,"TERMINAL",0,0,"8\t ",,terminal_output +2829,2835355,"TERMINAL",0,0,"9\t ",,terminal_output +2830,2836291,"TERMINAL",0,0,"20\t ",,terminal_output +2831,2837330,"TERMINAL",0,0,"1\t ",,terminal_output +2832,2838364,"TERMINAL",0,0,"2\t ",,terminal_output +2833,2839405,"TERMINAL",0,0,"3\t ",,terminal_output +2834,2840449,"TERMINAL",0,0,"4\t ",,terminal_output +2835,2841487,"TERMINAL",0,0,"5\t ",,terminal_output +2836,2842527,"TERMINAL",0,0,"6\t ",,terminal_output +2837,2843567,"TERMINAL",0,0,"7\t ",,terminal_output +2838,2844606,"TERMINAL",0,0,"8\t ",,terminal_output +2839,2845647,"TERMINAL",0,0,"30\t ",,terminal_output +2840,2846687,"TERMINAL",0,0,"1\t ",,terminal_output +2841,2847727,"TERMINAL",0,0,"2\t ",,terminal_output +2842,2848764,"TERMINAL",0,0,"3\t ",,terminal_output +2843,2849800,"TERMINAL",0,0,"4\t ",,terminal_output +2844,2850843,"TERMINAL",0,0,"5\t ",,terminal_output +2845,2851889,"TERMINAL",0,0,"6\t ",,terminal_output +2846,2852968,"TERMINAL",0,0,"7\t ",,terminal_output +2847,2853993,"TERMINAL",0,0,"8\t ",,terminal_output +2848,2855016,"TERMINAL",0,0,"9\t ",,terminal_output +2849,2855066,"TERMINAL",0,0,"[?2004l\r\r\nexit\r\nsalloc: Relinquishing job allocation 3306221\r\nsalloc: Job allocation 3306221 has been revoked.\r\n]0;tum_cte0515@hkn1991:~/Projects/jafar]633;D;0",,terminal_output +2850,2856149,"TERMINAL",0,0,"404",,terminal_output +2851,2857093,"TERMINAL",0,0,"1\t ",,terminal_output +2852,2858137,"TERMINAL",0,0,"2\t ",,terminal_output +2853,2859176,"TERMINAL",0,0,"3\t ",,terminal_output +2854,2859227,"TERMINAL",0,0,"salloc --time=01:00:00 --partition=accelerated --nodes=2 --ntasks-per-node=4 --gres=gpu:4 --cpus-per-task=5 --mem=200G",,terminal_command +2855,2859281,"TERMINAL",0,0,"]633;E;2025-06-30 15:11:43 salloc --time=01:00:00 --partition=accelerated --nodes=2 --ntasks-per-node=4 --gres=gpu:4 --cpus-per-task=5 --mem=200G;79ec3af7-6a10-4dac-bb07-e3b50f56ded4]633;Csalloc: Pending job allocation 3306282\r\nsalloc: job 3306282 queued and waiting for resources\r\n",,terminal_output +2856,2860251,"TERMINAL",0,0,"4\t ",,terminal_output +2857,2861062,"TERMINAL",0,0,"watch",,terminal_focus +2858,2861256,"TERMINAL",0,0,"5\t ",,terminal_output +2859,2861463,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn1991:/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar]633;D;0",,terminal_output +2860,2862456,"TERMINAL",0,0,"queue",,terminal_command +2861,2862506,"TERMINAL",0,0,"]633;E;2025-06-30 15:11:46 queue;ead59344-49db-4336-9336-47fae706e637]633;C[?1049h(B[?7hEvery 1.0s: squeue --mehkn1991.localdomain: Mon Jun 30 15:11:46 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3306221 accelerat interact tum_cte0 CG\t9:41\t 1 hkn07193306282 accelerat interact tum_cte0 PD\t0:00\t 2 (Priority)",,terminal_output +2862,2863562,"TERMINAL",0,0,"7\t ",,terminal_output +2863,2864647,"TERMINAL",0,0,"8\t ",,terminal_output +2864,2865639,"TERMINAL",0,0,"50\t ",,terminal_output +2865,2866690,"TERMINAL",0,0,"\r1\t ",,terminal_output +2866,2867755,"TERMINAL",0,0,"2\t ",,terminal_output +2867,2868773,"TERMINAL",0,0,"3\t ",,terminal_output +2868,2869826,"TERMINAL",0,0,"4\t ",,terminal_output +2869,2870869,"TERMINAL",0,0,"5\t ",,terminal_output +2870,2871916,"TERMINAL",0,0,"6\t ",,terminal_output +2871,2872661,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn1991:/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar]633;D;0",,terminal_output +2872,2873759,"TERMINAL",0,0,"idling",,terminal_command +2873,2873829,"TERMINAL",0,0,"]633;E;2025-06-30 15:11:58 idling;ead59344-49db-4336-9336-47fae706e637]633;C[?1049h(B[?7hEvery 1.0s: sinfo_t_idlehkn1991.localdomain: Mon Jun 30 15:11:58 2025Partition dev_cpuonly: 10 nodes idle\rPartition cpuonly: 96 nodes idle\rPartition dev_accelerated:\t 2 nodes idle\rPartition accelerated:\t 0 nodes idle\rPartition dev_accelerated-h100 :\t 0 nodes idle\rPartition accelerated-h100:\t 0 nodes idle\rPartition large:\t 8 nodes idle",,terminal_output +2874,2874868,"TERMINAL",0,0,"9\t ",,terminal_output +2875,2875910,"TERMINAL",0,0,"2:00\t ",,terminal_output +2876,2876947,"TERMINAL",0,0,"1\t ",,terminal_output +2877,2878058,"TERMINAL",0,0,"2\t ",,terminal_output +2878,2879083,"TERMINAL",0,0,"3\t ",,terminal_output +2879,2880105,"TERMINAL",0,0,"4\t ",,terminal_output +2880,2881129,"TERMINAL",0,0,"5\t ",,terminal_output +2881,2882152,"TERMINAL",0,0,"6\t ",,terminal_output +2882,2883192,"TERMINAL",0,0,"7\t ",,terminal_output +2883,2884236,"TERMINAL",0,0,"8\t ",,terminal_output +2884,2885323,"TERMINAL",0,0,"9\t ",,terminal_output +2885,2886317,"TERMINAL",0,0,"10\t ",,terminal_output +2886,2887365,"TERMINAL",0,0,"1\t ",,terminal_output +2887,2888399,"TERMINAL",0,0,"2\t ",,terminal_output +2888,2889526,"TERMINAL",0,0,"3\t ",,terminal_output +2889,2890486,"TERMINAL",0,0,"4\t ",,terminal_output +2890,2891527,"TERMINAL",0,0,"5\t ",,terminal_output +2891,2892570,"TERMINAL",0,0,"6\t ",,terminal_output +2892,2893613,"TERMINAL",0,0,"7\t ",,terminal_output +2893,2894649,"TERMINAL",0,0,"9\t ",,terminal_output +2894,2895772,"TERMINAL",0,0,"20\t ",,terminal_output +2895,2896565,"TERMINAL",0,0,"salloc",,terminal_focus +2896,2896731,"TERMINAL",0,0,"1\t ",,terminal_output +2897,2897771,"TERMINAL",0,0,"2\t ",,terminal_output +2898,2898356,"TERMINAL",0,0,"^Csalloc: Job allocation 3306282 has been revoked.\r\nsalloc: Job aborted due to signal\r\n]0;tum_cte0515@hkn1991:~/Projects/jafar]633;D;1",,terminal_output +2899,2898818,"TERMINAL",0,0,"3\t ",,terminal_output +2900,2899868,"TERMINAL",0,0,"4\t ",,terminal_output +2901,2900899,"TERMINAL",0,0,"5\t ",,terminal_output +2902,2901947,"TERMINAL",0,0,"6\t ",,terminal_output +2903,2902797,"TERMINAL",0,0,"salloc --time=01:00:00 --partition=dev_accelerated --nodes=2 --ntasks-per-node=4 --gres=gpu:4 --cpus-per-task=5 --mem=200G",,terminal_command +2904,2902835,"TERMINAL",0,0,"]633;E;2025-06-30 15:12:27 salloc --time=01:00:00 --partition=dev_accelerated --nodes=2 --ntasks-per-node=4 --gres=gpu:4 --cpus-per-task=5 --mem=200G;79ec3af7-6a10-4dac-bb07-e3b50f56ded4]633;C",,terminal_output +2905,2902900,"TERMINAL",0,0,"salloc: Granted job allocation 3306283\r\n",,terminal_output +2906,2903008,"TERMINAL",0,0,"70",,terminal_output +2907,2903022,"TERMINAL",0,0,"salloc: Waiting for resource configuration\r\n",,terminal_output +2908,2904065,"TERMINAL",0,0,"8\t ",,terminal_output +2909,2904529,"TERMINAL",0,0,"watch",,terminal_focus +2910,2905060,"TERMINAL",0,0,"9\t ",,terminal_output +2911,2906103,"TERMINAL",0,0,"30\t ",,terminal_output +2912,2907242,"TERMINAL",0,0,"1\t ",,terminal_output +2913,2908180,"TERMINAL",0,0,"2\t ",,terminal_output +2914,2909289,"TERMINAL",0,0,"3\t ",,terminal_output +2915,2910326,"TERMINAL",0,0,"4\t ",,terminal_output +2916,2910960,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn1991:/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar]633;D;0",,terminal_output +2917,2912014,"TERMINAL",0,0,"queue",,terminal_command +2918,2912059,"TERMINAL",0,0,"]633;E;2025-06-30 15:12:36 queue;ead59344-49db-4336-9336-47fae706e637]633;C",,terminal_output +2919,2912124,"TERMINAL",0,0,"[?1049h(B[?7hEvery 1.0s: squeue --mehkn1991.localdomain: Mon Jun 30 15:12:36 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3306283 dev_accel interact tum_cte0 R\t0:09\t 2 hkn[0402-0403]",,terminal_output +2920,2913134,"TERMINAL",0,0,"710",,terminal_output +2921,2913177,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn1991:/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar]633;D;0",,terminal_output +2922,2914151,"TERMINAL",0,0,"salloc",,terminal_focus +2923,2930038,"TERMINAL",0,0,"salloc: Nodes hkn[0402-0403] are ready for job\r\n",,terminal_output +2924,2930836,"TERMINAL",0,0,"]0;tum_cte0515@hkn0402:~/Projects/jafar[?2004h[tum_cte0515@hkn0402 jafar]$ ",,terminal_output +2925,2948669,"TERMINAL",0,0,"[?25lso[?25h[?25lo[?25h",,terminal_output +2926,2948874,"TERMINAL",0,0,"[?25lu[?25h",,terminal_output +2927,2949077,"TERMINAL",0,0,"[?25lr[?25h",,terminal_output +2928,2949283,"TERMINAL",0,0,"[?25lc[?25h",,terminal_output +2929,2949371,"TERMINAL",0,0,"[?25le[?25h[?25l [?25h",,terminal_output +2930,2949542,"TERMINAL",0,0,"[?25l.[?25h",,terminal_output +2931,2949692,"TERMINAL",0,0,"[?25lv[?25h",,terminal_output +2932,2949897,"TERMINAL",0,0,"env/",,terminal_output +2933,2950101,"TERMINAL",0,0,"[?25lb[?25h",,terminal_output +2934,2950306,"TERMINAL",0,0,"[?25li[?25h",,terminal_output +2935,2950483,"TERMINAL",0,0,"[?25ln[?25h",,terminal_output +2936,2950621,"TERMINAL",0,0,"/",,terminal_output +2937,2950950,"TERMINAL",0,0,"[?25la[?25h",,terminal_output +2938,2951251,"TERMINAL",0,0,"ctivate",,terminal_output +2939,2951756,"TERMINAL",0,0,"\r\n[?2004l\r]0;tum_cte0515@hkn0402:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0402 jafar]$ ",,terminal_output +2940,2955378,"TERMINAL",0,0,"g",,terminal_output +2941,2955668,"TERMINAL",0,0,"[?25li[?25h",,terminal_output +2942,2955771,"TERMINAL",0,0,"[?25l [?25h[?25lt[?25h",,terminal_output +2943,2955914,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +2944,2956747,"TERMINAL",0,0,"[?25lt[?25h",,terminal_output +2945,2956814,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +2946,2957279,"TERMINAL",0,0,"[?25lb[?25h",,terminal_output +2947,2957485,"TERMINAL",0,0,"[?25lr[?25h",,terminal_output +2948,2957674,"TERMINAL",0,0,"[?25la[?25h",,terminal_output +2949,2957726,"TERMINAL",0,0,"[?25ln[?25h",,terminal_output +2950,2957799,"TERMINAL",0,0,"[?25lc[?25h",,terminal_output +2951,2957851,"TERMINAL",0,0,"[?25lh[?25h",,terminal_output +2952,2958060,"TERMINAL",0,0,"\r\n[?2004l\r[?1h=\r",,terminal_output +2953,2958200,"TERMINAL",0,0," add-wandb-name-and-tags\r\n* convert-to-jax-array-in-iter\r\n dont-let-tf-see-gpu\r\n main\r\n preprocess_video\r\n tmp\r\n\r[?1l>]0;tum_cte0515@hkn0402:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0402 jafar]$ ",,terminal_output +2954,2959528,"TERMINAL",0,0,"g",,terminal_output +2955,2959709,"TERMINAL",0,0,"[?25li[?25h",,terminal_output +2956,2960033,"TERMINAL",0,0,"[?25lt[?25h",,terminal_output +2957,2960150,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +2958,2960424,"TERMINAL",0,0,"[?25lc[?25h[?25lh[?25h",,terminal_output +2959,2960576,"TERMINAL",0,0,"[?25le[?25h",,terminal_output +2960,2960686,"TERMINAL",0,0,"[?25lc[?25h",,terminal_output +2961,2960932,"TERMINAL",0,0,"[?25lk[?25h",,terminal_output +2962,2961123,"TERMINAL",0,0,"[?25lo[?25h",,terminal_output +2963,2961259,"TERMINAL",0,0,"[?25lu[?25h",,terminal_output +2964,2961330,"TERMINAL",0,0,"[?25lt[?25h",,terminal_output +2965,2961395,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +2966,2961561,"TERMINAL",0,0,"[?25lm[?25h",,terminal_output +2967,2961631,"TERMINAL",0,0,"[?25la[?25h",,terminal_output +2968,2961713,"TERMINAL",0,0,"[?25li[?25h",,terminal_output +2969,2961799,"TERMINAL",0,0,"[?25ln[?25h",,terminal_output +2970,2962037,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +2971,2962256,"TERMINAL",0,0,"M\tutils/dataloader.py\r\n",,terminal_output +2972,2962378,"TERMINAL",0,0,"Switched to branch 'main'\r\nYour branch is up to date with 'origin/main'.\r\n]0;tum_cte0515@hkn0402:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0402 jafar]$ ",,terminal_output +2973,2962507,"",0,0,"Switched from branch 'convert-to-jax-array-in-iter' to 'main'",,git_branch_checkout +2974,2964471,"TERMINAL",0,0,"\r(reverse-i-search)`': ",,terminal_output +2975,2964693,"TERMINAL",0,0,"s': source .venv/bin/activate\r",,terminal_output +2976,2964807,"TERMINAL",0,0,"[?25lsh': sh scripts_horeka/train_tokenizer.sh [?25h",,terminal_output +2977,2967349,"TERMINAL",0,0,"\r[13@jafar) [tum_cte0515@hkn0402 jafar]$ sh scripts_horeka/train_tokenizer.sh\r\n[?2004l\r",,terminal_output +2978,2967512,"TERMINAL",0,0,"SLURM_STEP_NUM_TASKS=1\r\nSLURM_JOB_USER=tum_cte0515\r\nSLURM_TASKS_PER_NODE=4(x2)\r\nSLURM_JOB_UID=999226\r\nSLURM_TASK_PID=3224824\r\nSLURM_JOB_GPUS=0,1,2,3\r\nSLURM_LOCALID=0\r\nSLURM_SUBMIT_DIR=/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar\r\nSLURMD_NODENAME=hkn0402\r\nSLURM_JOB_START_TIME=1751289147\r\nSLURM_STEP_NODELIST=hkn0402\r\nSLURM_CLUSTER_NAME=hk\r\nSLURM_JOB_END_TIME=1751292747\r\nSLURM_PMI2_SRUN_PORT=36863\r\nSLURM_CPUS_ON_NODE=24\r\nSLURM_JOB_CPUS_PER_NODE=24(x2)\r\nSLURM_GPUS_ON_NODE=4\r\nSLURM_GTIDS=0\r\nSLURM_JOB_PARTITION=dev_accelerated\r\nSLURM_TRES_PER_TASK=cpu=5\r\nSLURM_OOM_KILL_STEP=0\r\nSLURM_JOB_NUM_NODES=2\r\nSLURM_STEPID=4294967290\r\nSLURM_JOBID=3306283\r\nSLURM_PTY_PORT=36049\r\nSLURM_JOB_QOS=normal\r\nSLURM_LAUNCH_NODE_IPADDR=10.0.7.199\r\nSLURM_PTY_WIN_ROW=37\r\nSLURM_PMI2_PROC_MAPPING=(vector,(0,1,1))\r\nSLURMD_DEBUG=2\r\nSLURM_PROCID=0\r\nSLURM_CPUS_PER_TASK=5\r\nSLURM_NTASKS=8\r\nSLURM_TOPOLOGY_ADDR=hkibb.hkibbi1.hkibbi1e11.hkn0402\r\nSLURM_TOPOLOGY_ADDR_PATTERN=switch.switch.switch.node\r\nSLURM_SRUN_COMM_HOST=10.0.7.199\r\nSLURM_SCRIPT_CONTEXT=prolog_task\r\nSLURM_MEM_PER_NODE=204800\r\nSLURM_PTY_WIN_COL=202\r\nSLURM_NODELIST=hkn[0402-0403]\r\nSLURM_SRUN_COMM_PORT=46875\r\nSLURM_STEP_ID=4294967290\r\nSLURM_JOB_ACCOUNT=hk-project-p0023960\r\nSLURM_PRIO_PROCESS=0\r\nSLURM_NPROCS=8\r\nSLURM_NNODES=2\r\nSLURM_SUBMIT_HOST=hkn1991.localdomain\r\nSLURM_JOB_ID=3306283\r\nSLURM_NODEID=0\r\nSLURM_STEP_NUM_NODES=1\r\nSLURM_STEP_TASKS_PER_NODE=1\r\nSLURM_MPI_TYPE=pmi2\r\nSLURM_PMI2_STEP_NODES=hkn0402\r\nSLURM_CONF=/etc/slurm/slurm.conf\r\nSLURM_JOB_NAME=interactive\r\nSLURM_NTASKS_PER_NODE=4\r\nSLURM_STEP_LAUNCHER_PORT=46875\r\nSLURM_JOB_GID=502226\r\nSLURM_JOB_NODELIST=hkn[0402-0403]\r\n",,terminal_output +2979,2967633,"TERMINAL",0,0,"GpuFreq=control_disabled\r\nGpuFreq=control_disabled\r\n",,terminal_output +2980,2969955,"scripts_horeka/train_tokenizer.sh",0,0,"#!/usr/bin/env bash\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\ntf_records_dir=$ws_dir/knoms_tfrecords_500_shards\nws_dir='/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/'\n\njob_name=""debug""\nslurm_job_id=""0000""\n\nCHECKPOINT_DIR=$ws_dir/checkpoints/$job_name_$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\nenv | grep SLURM\n\nsrun python train_tokenizer.py \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=16 \\n --min_lr=4.24e-4 \\n --max_lr=4.24e-4 \\n --log_image_interval=100 \\n --log \\n --name=test-wandb-tags-$slurm_job_id \\n --tags test tokenizer debug \\n --entity instant-uv \\n --project jafar \\n --data_dir $tf_records_dir",shellscript,tab +2981,2992958,"TERMINAL",0,0,"2025-06-30 15:13:57.333224: E external/local_xla/xla/stream_executor/cuda/cuda_fft.cc:467] Unable to register cuFFT factory: Attempting to register factory for plugin cuFFT when one has already been registered\r\n2025-06-30 15:13:57.333212: E external/local_xla/xla/stream_executor/cuda/cuda_fft.cc:467] Unable to register cuFFT factory: Attempting to register factory for plugin cuFFT when one has already been registered\r\n2025-06-30 15:13:57.333214: E external/local_xla/xla/stream_executor/cuda/cuda_fft.cc:467] Unable to register cuFFT factory: Attempting to register factory for plugin cuFFT when one has already been registered\r\n2025-06-30 15:13:57.333208: E external/local_xla/xla/stream_executor/cuda/cuda_fft.cc:467] Unable to register cuFFT factory: Attempting to register factory for plugin cuFFT when one has already been registered\r\n",,terminal_output +2982,2993062,"TERMINAL",0,0,"WARNING: All log messages before absl::InitializeLog() is called are written to STDERR\r\nE0000 00:00:1751289237.409111 3225047 cuda_dnn.cc:8579] Unable to register cuDNN factory: Attempting to register factory for plugin cuDNN when one has already been registered\r\nWARNING: All log messages before absl::InitializeLog() is called are written to STDERR\r\nE0000 00:00:1751289237.408994 3225048 cuda_dnn.cc:8579] Unable to register cuDNN factory: Attempting to register factory for plugin cuDNN when one has already been registered\r\nWARNING: All log messages before absl::InitializeLog() is called are written to STDERR\r\nE0000 00:00:1751289237.408957 3225049 cuda_dnn.cc:8579] Unable to register cuDNN factory: Attempting to register factory for plugin cuDNN when one has already been registered\r\nWARNING: All log messages before absl::InitializeLog() is called are written to STDERR\r\nE0000 00:00:1751289237.409188 3225050 cuda_dnn.cc:8579] Unable to register cuDNN factory: Attempting to register factory for plugin cuDNN when one has already been registered\r\nE0000 00:00:1751289237.414995 3225047 cuda_blas.cc:1407] Unable to register cuBLAS factory: Attempting to register factory for plugin cuBLAS when one has already been registered\r\nE0000 00:00:1751289237.414991 3225048 cuda_blas.cc:1407] Unable to register cuBLAS factory: Attempting to register factory for plugin cuBLAS when one has already been registered\r\nE0000 00:00:1751289237.414997 3225049 cuda_blas.cc:1407] Unable to register cuBLAS factory: Attempting to register factory for plugin cuBLAS when one has already been registered\r\nE0000 00:00:1751289237.414991 3225050 cuda_blas.cc:1407] Unable to register cuBLAS factory: Attempting to register factory for plugin cuBLAS when one has already been registered\r\n2025-06-30 15:13:57.423307: E external/local_xla/xla/stream_executor/cuda/cuda_fft.cc:467] Unable to register cuFFT factory: Attempting to register factory for plugin cuFFT when one has already been registered\r\n2025-06-30 15:13:57.423374: E external/local_xla/xla/stream_executor/cuda/cuda_fft.cc:467] Unable to register cuFFT factory: Attempting to register factory for plugin cuFFT when one has already been registered\r\n2025-06-30 15:13:57.423303: E external/local_xla/xla/stream_executor/cuda/cuda_fft.cc:467] Unable to register cuFFT factory: Attempting to register factory for plugin cuFFT when one has already been registered\r\n2025-06-30 15:13:57.423308: E external/local_xla/xla/stream_executor/cuda/cuda_fft.cc:467] Unable to register cuFFT factory: Attempting to register factory for plugin cuFFT when one has already been registered\r\nWARNING: All log messages before absl::InitializeLog() is called are written to STDERR\r\nE0000 00:00:1751289237.453463 4159778 cuda_dnn.cc:8579] Unable to register cuDNN factory: Attempting to register factory for plugin cuDNN when one has already been registered\r\nWARNING: All log messages before absl::InitializeLog() is called are written to STDERR\r\nE0000 00:00:1751289237.453574 4159780 cuda_dnn.cc:8579] Unable to register cuDNN factory: Attempting to register factory for plugin cuDNN when one has already been registered\r\nWARNING: All log messages before absl::InitializeLog() is called are written to STDERR\r\nE0000 00:00:1751289237.453418 4159781 cuda_dnn.cc:8579] Unable to register cuDNN factory: Attempting to register factory for plugin cuDNN when one has already been registered\r\nWARNING: All log messages before absl::InitializeLog() is called are written to STDERR\r\nE0000 00:00:1751289237.453674 4159779 cuda_dnn.cc:8579] Unable to register cuDNN factory: Attempting to register factory for plugin cuDNN when one has already been registered\r\nE0000 00:00:1751289237.464475 4159780 cuda_blas.cc:1407] Unable to register cuBLAS factory: Attempting to register factory for plugin cuBLAS when one has already been registered\r\nE0000 00:00:1751289237.464347 4159778 cuda_blas.cc:1407] Unable to register cuBLAS factory: Attempting to register factory for plugin cuBLAS when one has already been registered\r\nE0000 00:00:1751289237.464853 4159779 cuda_blas.cc:1407] Unable to register cuBLAS factory: Attempting to register factory for plugin cuBLAS when one has already been registered\r\nE0000 00:00:1751289237.465079 4159781 cuda_blas.cc:1407] Unable to register cuBLAS factory: Attempting to register factory for plugin cuBLAS when one has already been registered\r\n",,terminal_output +2983,2993252,"TERMINAL",0,0,"W0000 00:00:1751289237.673060 3225047 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\nW0000 00:00:1751289237.673090 3225047 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\nW0000 00:00:1751289237.673093 3225047 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\nW0000 00:00:1751289237.673095 3225047 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\nW0000 00:00:1751289237.673066 3225048 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\nW0000 00:00:1751289237.673095 3225048 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\nW0000 00:00:1751289237.673098 3225048 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\nW0000 00:00:1751289237.673100 3225048 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\nW0000 00:00:1751289237.673066 3225049 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\nW0000 00:00:1751289237.673099 3225049 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\nW0000 00:00:1751289237.673101 3225049 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\nW0000 00:00:1751289237.673103 3225049 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\nW0000 00:00:1751289237.673064 3225050 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\nW0000 00:00:1751289237.673093 3225050 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\nW0000 00:00:1751289237.673095 3225050 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\nW0000 00:00:1751289237.673096 3225050 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\nW0000 00:00:1751289237.673416 4159778 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\nW0000 00:00:1751289237.673465 4159778 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\nW0000 00:00:1751289237.673468 4159778 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\nW0000 00:00:1751289237.673470 4159778 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\nW0000 00:00:1751289237.673419 4159779 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\nW0000 00:00:1751289237.673447 4159779 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\nW0000 00:00:1751289237.673450 4159779 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\nW0000 00:00:1751289237.673451 4159779 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\nW0000 00:00:1751289237.673411 4159780 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\nW0000 00:00:1751289237.673453 4159780 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\nW0000 00:00:1751289237.673455 4159780 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\nW0000 00:00:1751289237.673457 4159780 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\nW0000 00:00:1751289237.673412 4159781 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\nW0000 00:00:1751289237.673444 4159781 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\nW0000 00:00:1751289237.673446 4159781 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\nW0000 00:00:1751289237.673447 4159781 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\n",,terminal_output +2984,3027053,"TERMINAL",0,0,"W0000 00:00:1751289271.409576 3225047 gpu_device.cc:2341] Cannot dlopen some GPU libraries. Please make sure the missing libraries mentioned above are installed properly if you would like to use GPU. Follow the guide at https://www.tensorflow.org/install/gpu for how to download and setup the required libraries for your platform.\r\nSkipping registering GPU devices...\r\nW0000 00:00:1751289271.409578 3225049 gpu_device.cc:2341] Cannot dlopen some GPU libraries. Please make sure the missing libraries mentioned above are installed properly if you would like to use GPU. Follow the guide at https://www.tensorflow.org/install/gpu for how to download and setup the required libraries for your platform.\r\nSkipping registering GPU devices...\r\nW0000 00:00:1751289271.409675 3225050 gpu_device.cc:2341] Cannot dlopen some GPU libraries. Please make sure the missing libraries mentioned above are installed properly if you would like to use GPU. Follow the guide at https://www.tensorflow.org/install/gpu for how to download and setup the required libraries for your platform.\r\nSkipping registering GPU devices...\r\nW0000 00:00:1751289271.409685 3225048 gpu_device.cc:2341] Cannot dlopen some GPU libraries. Please make sure the missing libraries mentioned above are installed properly if you would like to use GPU. Follow the guide at https://www.tensorflow.org/install/gpu for how to download and setup the required libraries for your platform.\r\nSkipping registering GPU devices...\r\nW0000 00:00:1751289271.409298 4159778 gpu_device.cc:2341] Cannot dlopen some GPU libraries. Please make sure the missing libraries mentioned above are installed properly if you would like to use GPU. Follow the guide at https://www.tensorflow.org/install/gpu for how to download and setup the required libraries for your platform.\r\nSkipping registering GPU devices...\r\nW0000 00:00:1751289271.409318 4159780 gpu_device.cc:2341] Cannot dlopen some GPU libraries. Please make sure the missing libraries mentioned above are installed properly if you would like to use GPU. Follow the guide at https://www.tensorflow.org/install/gpu for how to download and setup the required libraries for your platform.\r\nSkipping registering GPU devices...\r\nW0000 00:00:1751289271.409299 4159781 gpu_device.cc:2341] Cannot dlopen some GPU libraries. Please make sure the missing libraries mentioned above are installed properly if you would like to use GPU. Follow the guide at https://www.tensorflow.org/install/gpu for how to download and setup the required libraries for your platform.\r\nSkipping registering GPU devices...\r\nW0000 00:00:1751289271.409413 4159779 gpu_device.cc:2341] Cannot dlopen some GPU libraries. Please make sure the missing libraries mentioned above are installed properly if you would like to use GPU. Follow the guide at https://www.tensorflow.org/install/gpu for how to download and setup the required libraries for your platform.\r\nSkipping registering GPU devices...\r\n",,terminal_output +2985,3030737,"TERMINAL",0,0,"wandb: Currently logged in as: mihir-mahajan2002 (instant-uv) to https://api.wandb.ai. Use `wandb login --relogin` to force relogin\r\n",,terminal_output +2986,3031280,"TERMINAL",0,0,"2025-06-30 15:14:35.694791: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-06-30 15:14:35.694795: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-06-30 15:14:35.696543: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-06-30 15:14:35.702575: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +2987,3031344,"TERMINAL",0,0,"2025-06-30 15:14:35.736109: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +2988,3031533,"TERMINAL",0,0,"2025-06-30 15:14:35.957449: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-06-30 15:14:35.962673: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +2989,3031603,"TERMINAL",0,0,"wandb: Tracking run with wandb version 0.19.11\r\nwandb: Run data is saved locally in /hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/wandb/run-20250630_151435-6jn2q4bj\r\nwandb: Run `wandb offline` to turn off syncing.\r\nwandb: Syncing run test-wandb-tags-0000\r\nwandb: ⭐️ View project at https://wandb.ai/instant-uv/jafar\r\nwandb: 🚀 View run at https://wandb.ai/instant-uv/jafar/runs/6jn2q4bj\r\n",,terminal_output +2990,3034221,"TERMINAL",0,0,"2025-06-30 15:14:38.546636: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +2991,3044921,"TERMINAL",0,0,"2025-06-30 15:14:49.347585: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +2992,3044959,"scripts_horeka/train_tokenizer.sh",530,0,"",shellscript,selection_mouse +2993,3044974,"scripts_horeka/train_tokenizer.sh",529,0,"",shellscript,selection_command +2994,3045060,"TERMINAL",0,0,"2025-06-30 15:14:49.420852: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-06-30 15:14:49.479868: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +2995,3045318,"TERMINAL",0,0,"2025-06-30 15:14:49.743379: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +2996,3047016,"TERMINAL",0,0,"2025-06-30 15:14:51.438861: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +2997,3047278,"TERMINAL",0,0,"2025-06-30 15:14:51.698980: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +2998,3047339,"TERMINAL",0,0,"2025-06-30 15:14:51.746829: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +2999,3047448,"TERMINAL",0,0,"2025-06-30 15:14:51.820966: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3000,3047638,"TERMINAL",0,0,"2025-06-30 15:14:52.057347: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3001,3048044,"TERMINAL",0,0,"2025-06-30 15:14:52.384023: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3002,3048350,"TERMINAL",0,0,"2025-06-30 15:14:52.753523: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3003,3049066,"TERMINAL",0,0,"2025-06-30 15:14:53.400946: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3004,3049654,"TERMINAL",0,0,"2025-06-30 15:14:54.078654: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3005,3050654,"TERMINAL",0,0,"2025-06-30 15:14:55.081298: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3006,3051009,"TERMINAL",0,0,"2025-06-30 15:14:55.394165: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3007,3051533,"TERMINAL",0,0,"2025-06-30 15:14:55.955315: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3008,3091357,"TERMINAL",0,0,"2025-06-30 15:15:35.692252: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3009,3092893,"TERMINAL",0,0,"2025-06-30 15:15:37.286679: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-06-30 15:15:37.286833: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3010,3093921,"TERMINAL",0,0,"2025-06-30 15:15:38.241743: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3011,3100166,"TERMINAL",0,0,"2025-06-30 15:15:44.514724: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3012,3244242,"TERMINAL",0,0,"bash",,terminal_focus +3013,3247248,"TERMINAL",0,0,"queue",,terminal_command +3014,3247299,"TERMINAL",0,0,"]633;E;2025-06-30 15:18:11 queue;ead59344-49db-4336-9336-47fae706e637]633;C",,terminal_output +3015,3247418,"TERMINAL",0,0,"[?1049h(B[?7hEvery 1.0s: squeue --mehkn1991.localdomain: Mon Jun 30 15:18:11 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3306283 dev_accel interact tum_cte0 R\t5:44\t 2 hkn[0402-0403]",,terminal_output +3016,3248443,"TERMINAL",0,0,"25",,terminal_output +3017,3249467,"TERMINAL",0,0,"36",,terminal_output +3018,3250632,"TERMINAL",0,0,"47",,terminal_output +3019,3251628,"TERMINAL",0,0,"59",,terminal_output +3020,3252629,"TERMINAL",0,0,"750",,terminal_output +3021,3253453,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn1991:/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar]633;D;0",,terminal_output +3022,3272470,"TERMINAL",0,0,"srun --overlap --jobid=3254710 /bin/bash",,terminal_command +3023,3272491,"TERMINAL",0,0,"]633;E;2025-06-30 15:18:36 srun --overlap --jobid=3254710 /bin/bash;ead59344-49db-4336-9336-47fae706e637]633;Csrun: error: Unable to confirm allocation for job 3254710: Invalid job id specified\r\nsrun: Check SLURM_JOB_ID environment variable. Expired or invalid job 3254710\r\n]0;tum_cte0515@hkn1991:/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar]633;D;1",,terminal_output +3024,3278707,"TERMINAL",0,0,"srun --overlap 3254710 /bin/bash",,terminal_command +3025,3278718,"TERMINAL",0,0,"]633;E;2025-06-30 15:18:43 srun --overlap 3254710 /bin/bash;ead59344-49db-4336-9336-47fae706e637]633;Csrun: error: Unable to allocate resources: Time limit specification required, but not provided\r\n]0;tum_cte0515@hkn1991:/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar]633;D;1",,terminal_output +3026,3284215,"TERMINAL",0,0,"srun --overlap --job_id3254710 /bin/bash",,terminal_command +3027,3288772,"TERMINAL",0,0,"srun --overlap --job_id=3254710 /bin/bash",,terminal_command +3028,3288794,"TERMINAL",0,0,"]633;E;2025-06-30 15:18:53 srun --overlap --job_id=3254710 /bin/bash;ead59344-49db-4336-9336-47fae706e637]633;Csrun: unrecognized option '--job_id=3254710'\r\nTry ""srun --help"" for more information\r\n]0;tum_cte0515@hkn1991:/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar]633;D;255",,terminal_output +3029,3292690,"TERMINAL",0,0,"srun --help",,terminal_command +3030,3292706,"TERMINAL",0,0,"]633;E;2025-06-30 15:18:57 srun --help;ead59344-49db-4336-9336-47fae706e637]633;CUsage: srun [OPTIONS(0)... [executable(0) [args(0)...]]] [ : [OPTIONS(N)...]] executable(N) [args(N)...]\r\n\r\nParallel run options:\r\n -A, --account=name charge job to specified account\r\n --acctg-freq== accounting and profiling sampling\r\n intervals. Supported datatypes:\r\n task= energy=\r\n network= filesystem=\r\n --bb= burst buffer specifications\r\n --bbf= burst buffer specification file\r\n --bcast= Copy executable file to compute nodes\r\n --bcast-exclude= Shared object directory paths to exclude\r\n -b, --begin=time defer job until HH:MM MM/DD/YY\r\n -c, --cpus-per-task=ncpus number of cpus required per task\r\n --comment=name arbitrary comment\r\n --compress[=library] data compression library used with --bcast\r\n --container Path to OCI container bundle\r\n --container-id OCI container ID\r\n --cpu-freq=min[-max[:gov]] requested cpu frequency (and governor)\r\n -d, --dependency=type:jobid[:time] defer job until condition on jobid is satisfied\r\n --deadline=time remove the job if no ending possible before\r\n this deadline (start > (deadline - time[-min]))\r\n --delay-boot=mins delay boot for desired node features\r\n -D, --chdir=path change remote current working directory\r\n --export=env_vars|NONE environment variables passed to launcher with\r\n optional values or NONE (pass no variables)\r\n -e, --error=err location of stderr redirection\r\n --epilog=program run ""program"" after launching job step\r\n -E, --preserve-env env vars for node and task counts override\r\n command-line flags\r\n --gres=list required generic resources\r\n --gres-flags=opts flags related to GRES management\r\n -H, --hold submit job in held state\r\n -i, --input=in location of stdin redirection\r\n -I, --immediate[=secs] exit if resources not available in ""secs""\r\n --jobid=id run under already allocated job\r\n -J, --job-name=jobname name of job\r\n -k, --no-kill do not kill job on node failure\r\n -K, --kill-on-bad-exit kill the job if any task terminates with a\r\n non-zero exit code\r\n -l, --label prepend task number to lines of stdout/err\r\n -L, --licenses=names required license, comma separated\r\n -M, --clusters=names Comma separated list of clusters to issue\r\n commands to. Default is current cluster.\r\n Name of 'all' will submit to run on all clusters.\r\n NOTE: SlurmDBD must up.\r\n -m, --distribution=type distribution method for processes to nodes\r\n (type = block|cyclic|arbitrary)\r\n --mail-type=type notify on state change: BEGIN, END, FAIL or ALL\r\n --mail-user=user who to send email notification for job state\r\n changes\r\n --mcs-label=mcs mcs label if mcs plugin mcs/group is used\r\n --mpi=type type of MPI being used\r\n --multi-prog if set the program name specified is the\r\n configuration specification for multiple programs\r\n -n, --ntasks=ntasks number of tasks to run\r\n --nice[=value] decrease scheduling priority by value\r\n --ntasks-per-node=n number of tasks to invoke on each node\r\n -N, --nodes=N number of nodes on which to run (N = min[-max])\r\n --oom-kill-step[=0|1] set the OOMKillStep behaviour\r\n -o, --output=out location of stdout redirection\r\n -O, --overcommit overcommit resources\r\n --overlap Allow other steps to overlap this step\r\n --het-group=value hetjob component allocation(s) in which to launch\r\n application\r\n -p, --partition=partition partition requested\r\n --power=flags power management options\r\n --priority=value set the priority of the job to value\r\n --prolog=program run ""program"" before launching job step\r\n --profile=value enable acct_gather_profile for detailed data\r\n value is all or none or any combination of\r\n energy, lustre, network or task\r\n --propagate[=rlimits] propagate all [or specific list of] rlimits\r\n --pty[=fd] run task zero in pseudo terminal [or in requested terminal given by fd]\r\n --quit-on-interrupt quit on single Ctrl-C\r\n -q, --qos=qos quality of service\r\n -Q, --quiet quiet mode (suppress informational messages)\r\n --reboot reboot block before starting job\r\n -r, --relative=n run job step relative to node n of allocation\r\n -s, --oversubscribe over-subscribe resources with other jobs\r\n -S, --core-spec=cores count of reserved cores\r\n --send-libs[=yes|no] autodetect and broadcast shared objects\r\n --signal=[R:]num[@time] send signal when time limit within time seconds\r\n --slurmd-debug=level slurmd debug level\r\n --spread-job spread job across as many nodes as possible\r\n --switches=max-switches{@max-time-to-wait}\r\n Optimum switches and max time to wait for optimum\r\n --task-epilog=program run ""program"" after launching task\r\n --task-prolog=program run ""program"" before launching task\r\n --thread-spec=threads count of reserved threads\r\n -T, --threads=threads set srun launch fanout\r\n -t, --time=minutes time limit\r\n --time-min=minutes minimum time limit (if distinct)\r\n --tres-bind=... task to tres binding options\r\n --tres-per-task=list list of tres required per task\r\n -u, --unbuffered do not line-buffer stdout/err\r\n --use-min-nodes if a range of node counts is given, prefer the\r\n smaller count\r\n -v, --verbose verbose mode (multiple -v's increase verbosity)\r\n -W, --wait=sec seconds to wait after first task exits\r\n before killing job\r\n --wckey=wckey wckey to run job under\r\n -X, --disable-status Disable Ctrl-C status feature\r\n\r\nConstraint options:\r\n --cluster-constraint=list specify a list of cluster-constraints\r\n --contiguous demand a contiguous range of nodes\r\n -C, --constraint=list specify a list of constraints\r\n --mem=MB minimum amount of real memory\r\n --mincpus=n minimum number of logical processors (threads)\r\n per node\r\n --reservation=name allocate resources from named reservation\r\n --tmp=MB minimum amount of temporary disk\r\n -w, --nodelist=hosts... request a specific list of hosts\r\n -x, --exclude=hosts... exclude a specific list of hosts\r\n -Z, --no-allocate don't allocate nodes (must supply -w)\r\n\r\nConsumable resources related options:\r\n --exact use only the resources requested for the step\r\n (by default, all non-gres resources on each node\r\n in the allocation will be used in the step)\r\n --exclusive[=user] for job allocation, this allocates nodes in\r\n in exclusive mode\r\n for job steps, this is equivalent to --exact\r\n --exclusive[=mcs] allocate nodes in exclusive mode when\r\n cpu consumable resource is enabled\r\n and mcs plugin is enabled (--exact implied)\r\n or don't share CPUs for job steps\r\n --mem-per-cpu=MB maximum amount of real memory per allocated\r\n cpu required by the job.\r\n --mem >= --mem-per-cpu if --mem is specified.\r\n --resv-ports reserve communication ports\r\n\r\nAffinity/Multi-core options: (when the task/affinity plugin is enabled)\r\n For the following 4 options, you are\r\n specifying the minimum resources available for\r\n the node(s) allocated to the job.\r\n --sockets-per-node=S number of sockets per node to allocate\r\n --cores-per-socket=C number of cores per socket to allocate\r\n --threads-per-core=T number of threads per core to allocate\r\n -B, --extra-node-info=S[:C[:T]] combine request of sockets per node,\r\n cores per socket and threads per core.\r\n Specify an asterisk (*) as a placeholder,\r\n a minimum value, or a min-max range.\r\n\r\n --ntasks-per-core=n number of tasks to invoke on each core\r\n --ntasks-per-socket=n number of tasks to invoke on each socket\r\n --cpu-bind= Bind tasks to CPUs\r\n (see ""--cpu-bind=help"" for options)\r\n --hint= Bind tasks according to application hints\r\n (see ""--hint=help"" for options)\r\n --mem-bind= Bind memory to locality domains (ldom)\r\n (see ""--mem-bind=help"" for options)\r\n\r\nOptions provided by plugins:\r\n\r\n --container-image=[USER@][REGISTRY#]IMAGE[:TAG]|PATH\r\n [pyxis] the image to use for the container\r\n filesystem. Can be either a docker image given as\r\n an enroot URI, or a path to a squashfs file on the\r\n remote host filesystem.\r\n\r\n --container-mounts=SRC:DST[:FLAGS][,SRC:DST...]\r\n [pyxis] bind mount[s] inside the container. Mount\r\n flags are separated with ""+"", e.g. ""ro+rprivate""\r\n\r\n --container-workdir=PATH\r\n [pyxis] working directory inside the container\r\n --container-name=NAME [pyxis] name to use for saving and loading the\r\n container on the host. Unnamed containers are\r\n removed after the slurm task is complete; named\r\n containers are not. If a container with this name\r\n already exists, the existing container is used and\r\n the import is skipped.\r\n --container-save=PATH [pyxis] Save the container state to a squashfs\r\n file on the remote host filesystem.\r\n --container-mount-home [pyxis] bind mount the user's home directory.\r\n System-level enroot settings might cause this\r\n directory to be already-mounted.\r\n\r\n --no-container-mount-home\r\n [pyxis] do not bind mount the user's home\r\n directory\r\n --container-remap-root [pyxis] ask to be remapped to root inside the\r\n container. Does not grant elevated system\r\n permissions, despite appearances.\r\n\r\n --no-container-remap-root\r\n [pyxis] do not remap to root inside the container\r\n --container-entrypoint [pyxis] execute the entrypoint from the container\r\n image\r\n\r\n --no-container-entrypoint\r\n [pyxis] do not execute the entrypoint from the\r\n container image\r\n\r\n --container-entrypoint-log\r\n [pyxis] print the output of the entrypoint script\r\n --container-writable [pyxis] make the container filesystem writable\r\n --container-readonly [pyxis] make the container filesystem read-only\r\n\r\n --container-env=NAME[,NAME...]\r\n [pyxis] names of environment variables to override\r\n with the host environment and set at the\r\n entrypoint. By default, all exported host\r\n environment variables are set in the container\r\n after the entrypoint is run, but their existing\r\n values in the image take precedence; the variables\r\n specified with this flag are preserved from the\r\n host and set before the entrypoint runs\r\n\r\nGPU scheduling options:\r\n --cpus-per-gpu=n number of CPUs required per allocated GPU\r\n -G, --gpus=n count of GPUs required for the job\r\n --gpu-bind=... task to gpu binding options\r\n --gpu-freq=... frequency and voltage of GPUs\r\n --gpus-per-node=n number of GPUs required per allocated node\r\n --gpus-per-socket=n number of GPUs required per allocated socket\r\n --gpus-per-task=n number of GPUs required per spawned task\r\n --mem-per-gpu=n real memory required per allocated GPU\r\n\r\nHelp options:\r\n -h, --help show this help message\r\n --usage display brief usage message\r\n\r\nOther options:\r\n -V, --version output version information and exit\r\n\r\n]0;tum_cte0515@hkn1991:/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar]633;D;0",,terminal_output +3031,3295070,"TERMINAL",0,0,"^C",,terminal_command +3032,3295087,"TERMINAL",0,0,"^C[?2004l\r[?2004h[?2004l\r\r\n]633;E;;ead59344-49db-4336-9336-47fae706e637]633;C]0;tum_cte0515@hkn1991:/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar]633;D",,terminal_output +3033,3298593,"TERMINAL",0,0,"srun --overlap --help",,terminal_command +3034,3330166,"TERMINAL",0,0,"srun --overlap --jobid=3254710 /bin/bash",,terminal_command +3035,3330180,"TERMINAL",0,0,"]633;E;2025-06-30 15:19:34 srun --overlap --jobid=3254710 /bin/bash;ead59344-49db-4336-9336-47fae706e637]633;Csrun: error: Unable to confirm allocation for job 3254710: Invalid job id specified\r\nsrun: Check SLURM_JOB_ID environment variable. Expired or invalid job 3254710\r\n]0;tum_cte0515@hkn1991:/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar]633;D;1",,terminal_output +3036,3336157,"TERMINAL",0,0,"queue",,terminal_command +3037,3336239,"TERMINAL",0,0,"]633;E;2025-06-30 15:19:40 queue;ead59344-49db-4336-9336-47fae706e637]633;C[?1049h(B[?7hEvery 1.0s: squeue --mehkn1991.localdomain: Mon Jun 30 15:19:40 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3306283 dev_accel interact tum_cte0 R\t7:13\t 2 hkn[0402-0403]",,terminal_output +3038,3337325,"TERMINAL",0,0,"14",,terminal_output +3039,3338349,"TERMINAL",0,0,"25",,terminal_output +3040,3339374,"TERMINAL",0,0,"36",,terminal_output +3041,3340421,"TERMINAL",0,0,"47",,terminal_output +3042,3341523,"TERMINAL",0,0,"58",,terminal_output +3043,3342520,"TERMINAL",0,0,"69",,terminal_output +3044,3343016,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn1991:/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar]633;D;0",,terminal_output +3045,3348356,"TERMINAL",0,0,"srun --overlap --jobid=3306283 /bin/bash",,terminal_command +3046,3348407,"TERMINAL",0,0,"]633;E;2025-06-30 15:19:52 srun --overlap --jobid=3306283 /bin/bash;ead59344-49db-4336-9336-47fae706e637]633;C",,terminal_output +3047,3348601,"TERMINAL",0,0,"GpuFreq=control_disabled\r\nGpuFreq=control_disabled\r\n",,terminal_output +3048,3372523,"TERMINAL",0,0,"\r\n",,terminal_output +3049,3373337,"TERMINAL",0,0,"q",,terminal_output +3050,3373402,"TERMINAL",0,0,"[?25lu[?25h",,terminal_output +3051,3373529,"TERMINAL",0,0,"[?25le[?25h",,terminal_output +3052,3373603,"TERMINAL",0,0,"[?25lu[?25h",,terminal_output +3053,3373718,"TERMINAL",0,0,"[?25le[?25h",,terminal_output +3054,3373784,"TERMINAL",0,0,"\r\n/bin/bash: line 2: queue: command not found\r\n/bin/bash: line 2: queue: command not found\r\n",,terminal_output +3055,3377329,"TERMINAL",0,0,"^Csrun: interrupt (one more within 1 sec to abort)\r\nsrun: StepId=3306283.1 tasks 0-1: running\r\n",,terminal_output +3056,3379500,"TERMINAL",0,0,"l",,terminal_output +3057,3379741,"TERMINAL",0,0,"[?25ls[?25h",,terminal_output +3058,3379946,"TERMINAL",0,0,"\r\ndata\r\ndata_tfrecord_duplicated\r\ndata_tfrecords\r\nframe-knoms.png\r\nframe.png\r\ngenerate_dataset.py\r\ngenie.py\r\ngifs\r\nLICENSE\r\nlogs\r\nmodels\r\noverfit_dir\r\n__pycache__\r\nREADME.md\r\nread_tf_record.py\r\nrequirements-franz.txt\r\nrequirements.txt\r\nsample.py\r\nscripts_cremers\r\nscripts_horeka\r\nslurm\r\ntrain_dynamics.py\r\ntrain_lam.py\r\ntrain_tokenizer.py\r\nutils\r\nwandb\r\ndata\r\ndata_tfrecord_duplicated\r\ndata_tfrecords\r\nframe-knoms.png\r\nframe.png\r\ngenerate_dataset.py\r\ngenie.py\r\ngifs\r\nLICENSE\r\nlogs\r\nmodels\r\noverfit_dir\r\n__pycache__\r\nREADME.md\r\nread_tf_record.py\r\nrequirements-franz.txt\r\nrequirements.txt\r\nsample.py\r\nscripts_cremers\r\nscripts_horeka\r\nslurm\r\ntrain_dynamics.py\r\ntrain_lam.py\r\ntrain_tokenizer.py\r\nutils\r\nwandb\r\n",,terminal_output +3059,3383901,"TERMINAL",0,0,"^Csrun: interrupt (one more within 1 sec to abort)\r\nsrun: StepId=3306283.1 tasks 0-1: running\r\n",,terminal_output +3060,3384070,"TERMINAL",0,0,"^Csrun: sending Ctrl-C to StepId=3306283.1\r\nsrun: forcing job termination\r\nsrun: Job step aborted: Waiting up to 32 seconds for job step to finish.\r\n]0;tum_cte0515@hkn1991:/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar]633;D;130",,terminal_output +3061,3386004,"TERMINAL",0,0,"srun --overlap --jobid=3306283 /bin/bash",,terminal_command +3062,3386046,"TERMINAL",0,0,"]633;E;2025-06-30 15:20:30 srun --overlap --jobid=3306283 /bin/bash;ead59344-49db-4336-9336-47fae706e637]633;C",,terminal_output +3063,3386199,"TERMINAL",0,0,"GpuFreq=control_disabled\r\nGpuFreq=control_disabled\r\n",,terminal_output +3064,3401658,"TERMINAL",0,0,"srun",,terminal_focus +3065,3402715,"TERMINAL",0,0,"^Csrun: interrupt (one more within 1 sec to abort)\r\nsrun: StepId=3306283.0 tasks 0-7: running\r\n",,terminal_output +3066,3402892,"TERMINAL",0,0,"^Csrun: sending Ctrl-C to StepId=3306283.0\r\nsrun: forcing job termination\r\nsrun: Job step aborted: Waiting up to 32 seconds for job step to finish.\r\nslurmstepd: error: *** STEP 3306283.0 ON hkn0402 CANCELLED AT 2025-06-30T15:20:47 ***\r\n",,terminal_output +3067,3403997,"TERMINAL",0,0,"]0;tum_cte0515@hkn0402:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0402 jafar]$ ",,terminal_output +3068,3404190,"TERMINAL",0,0,"^C[?2004l\r[?2004h[?2004l\r\r\n]0;tum_cte0515@hkn0402:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0402 jafar]$ ",,terminal_output +3069,3405010,"TERMINAL",0,0,"sh scripts_horeka/train_tokenizer.sh ",,terminal_output +3070,3405394,"TERMINAL",0,0,"",,terminal_output +3071,3405966,"TERMINAL",0,0,"[?25lgi[?25h",,terminal_output +3072,3406041,"TERMINAL",0,0,"[?25li[?25h",,terminal_output +3073,3406111,"TERMINAL",0,0,"[?25lt[?25h",,terminal_output +3074,3406178,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +3075,3406436,"TERMINAL",0,0,"[?25lc[?25h",,terminal_output +3076,3406498,"TERMINAL",0,0,"[?25lh[?25h",,terminal_output +3077,3406565,"TERMINAL",0,0,"[?25le[?25h",,terminal_output +3078,3406698,"TERMINAL",0,0,"[?25lc[?25h",,terminal_output +3079,3406818,"TERMINAL",0,0,"[?25lk[?25h",,terminal_output +3080,3406927,"TERMINAL",0,0,"[?25lo[?25h",,terminal_output +3081,3407073,"TERMINAL",0,0,"[?25lu[?25h",,terminal_output +3082,3407218,"TERMINAL",0,0,"[?25lt[?25h",,terminal_output +3083,3407335,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +3084,3407474,"TERMINAL",0,0,"[?25lm[?25h",,terminal_output +3085,3407660,"TERMINAL",0,0,"[?25la[?25h[?25li[?25h[?25ln[?25h",,terminal_output +3086,3407893,"TERMINAL",0,0,"\r\n[?2004l\rM\tutils/dataloader.py\r\nAlready on 'main'\r\nYour branch is up to date with 'origin/main'.\r\n]0;tum_cte0515@hkn0402:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0402 jafar]$ ",,terminal_output +3087,3410421,"TERMINAL",0,0,"[?25lgi[?25h",,terminal_output +3088,3410531,"TERMINAL",0,0,"[?25li[?25h",,terminal_output +3089,3410596,"TERMINAL",0,0,"[?25lt[?25h[?25l [?25h",,terminal_output +3090,3411647,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +3091,3411848,"TERMINAL",0,0,"[?25lb[?25h",,terminal_output +3092,3412259,"TERMINAL",0,0,"[?25lr[?25h",,terminal_output +3093,3412851,"TERMINAL",0,0,"[?25la[?25h",,terminal_output +3094,3412904,"TERMINAL",0,0,"[?25ln[?25h",,terminal_output +3095,3413120,"TERMINAL",0,0,"[?25lc[?25h[?25lh[?25h",,terminal_output +3096,3413349,"TERMINAL",0,0,"\r\n[?2004l\r[?1h=\r add-wandb-name-and-tags\r\n convert-to-jax-array-in-iter\r\n dont-let-tf-see-gpu\r\n* main\r\n preprocess_video\r\n tmp\r\n\r[?1l>]0;tum_cte0515@hkn0402:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0402 jafar]$ ",,terminal_output +3097,3416334,"TERMINAL",0,0,"g",,terminal_output +3098,3416409,"TERMINAL",0,0,"[?25li[?25h",,terminal_output +3099,3416461,"TERMINAL",0,0,"[?25lt[?25h",,terminal_output +3100,3416640,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +3101,3416705,"TERMINAL",0,0,"[?25lc[?25h",,terminal_output +3102,3416848,"TERMINAL",0,0,"[?25lh[?25h",,terminal_output +3103,3416917,"TERMINAL",0,0,"[?25le[?25h",,terminal_output +3104,3417044,"TERMINAL",0,0,"[?25lc[?25h",,terminal_output +3105,3417112,"TERMINAL",0,0,"[?25lk[?25h",,terminal_output +3106,3417248,"TERMINAL",0,0,"[?25lo[?25h",,terminal_output +3107,3417472,"TERMINAL",0,0,"[?25lt[?25h",,terminal_output +3108,3417948,"TERMINAL",0,0,"[?25lu[?25h",,terminal_output +3109,3418086,"TERMINAL",0,0,"[?25lt[?25h",,terminal_output +3110,3418151,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +3111,3418665,"TERMINAL",0,0,"convert-to-jax-array-in-iter",,terminal_output +3112,3418955,"TERMINAL",0,0,"convert-to-jax-array-in-iter\r\n[?2004l\r",,terminal_output +3113,3419070,"TERMINAL",0,0,"M\tutils/dataloader.py\r\nSwitched to branch 'convert-to-jax-array-in-iter'\r\nYour branch is up to date with 'origin/convert-to-jax-array-in-iter'.\r\n]0;tum_cte0515@hkn0402:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0402 jafar]$ ",,terminal_output +3114,3420711,"TERMINAL",0,0,"git checkout convert-to-jax-array-in-iter",,terminal_output +3115,3420937,"TERMINAL",0,0,"branch",,terminal_output +3116,3421519,"TERMINAL",0,0,"checkout main",,terminal_output +3117,3421983,"TERMINAL",0,0,"sh scripts_horeka/train_tokenizer.sh ",,terminal_output +3118,3422471,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +3119,3422503,"",0,0,"Switched from branch 'main' to 'convert-to-jax-array-in-iter'",,git_branch_checkout +3120,3422633,"TERMINAL",0,0,"SLURM_STEP_NUM_TASKS=1\r\nSLURM_JOB_USER=tum_cte0515\r\nSLURM_TASKS_PER_NODE=4(x2)\r\nSLURM_JOB_UID=999226\r\nSLURM_TASK_PID=3224824\r\nSLURM_JOB_GPUS=0,1,2,3\r\nSLURM_LOCALID=0\r\nSLURM_SUBMIT_DIR=/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar\r\nSLURMD_NODENAME=hkn0402\r\nSLURM_JOB_START_TIME=1751289147\r\nSLURM_STEP_NODELIST=hkn0402\r\nSLURM_CLUSTER_NAME=hk\r\nSLURM_JOB_END_TIME=1751292747\r\nSLURM_PMI2_SRUN_PORT=36863\r\nSLURM_CPUS_ON_NODE=24\r\nSLURM_JOB_CPUS_PER_NODE=24(x2)\r\nSLURM_GPUS_ON_NODE=4\r\nSLURM_GTIDS=0\r\nSLURM_JOB_PARTITION=dev_accelerated\r\nSLURM_TRES_PER_TASK=cpu=5\r\nSLURM_OOM_KILL_STEP=0\r\nSLURM_JOB_NUM_NODES=2\r\nSLURM_STEPID=4294967290\r\nSLURM_JOBID=3306283\r\nSLURM_PTY_PORT=36049\r\nSLURM_JOB_QOS=normal\r\nSLURM_LAUNCH_NODE_IPADDR=10.0.7.199\r\nSLURM_PTY_WIN_ROW=37\r\nSLURM_PMI2_PROC_MAPPING=(vector,(0,1,1))\r\nSLURMD_DEBUG=2\r\nSLURM_PROCID=0\r\nSLURM_CPUS_PER_TASK=5\r\nSLURM_NTASKS=8\r\nSLURM_TOPOLOGY_ADDR=hkibb.hkibbi1.hkibbi1e11.hkn0402\r\nSLURM_TOPOLOGY_ADDR_PATTERN=switch.switch.switch.node\r\nSLURM_SRUN_COMM_HOST=10.0.7.199\r\nSLURM_SCRIPT_CONTEXT=prolog_task\r\nSLURM_MEM_PER_NODE=204800\r\nSLURM_PTY_WIN_COL=202\r\nSLURM_NODELIST=hkn[0402-0403]\r\nSLURM_SRUN_COMM_PORT=46875\r\nSLURM_STEP_ID=4294967290\r\nSLURM_JOB_ACCOUNT=hk-project-p0023960\r\nSLURM_PRIO_PROCESS=0\r\nSLURM_NPROCS=8\r\nSLURM_NNODES=2\r\nSLURM_SUBMIT_HOST=hkn1991.localdomain\r\nSLURM_JOB_ID=3306283\r\nSLURM_NODEID=0\r\nSLURM_STEP_NUM_NODES=1\r\nSLURM_STEP_TASKS_PER_NODE=1\r\nSLURM_MPI_TYPE=pmi2\r\nSLURM_PMI2_STEP_NODES=hkn0402\r\nSLURM_CONF=/etc/slurm/slurm.conf\r\nSLURM_JOB_NAME=interactive\r\nSLURM_NTASKS_PER_NODE=4\r\nSLURM_STEP_LAUNCHER_PORT=46875\r\nSLURM_JOB_GID=502226\r\nSLURM_JOB_NODELIST=hkn[0402-0403]\r\n",,terminal_output +3121,3422756,"TERMINAL",0,0,"GpuFreq=control_disabled\r\nGpuFreq=control_disabled\r\n",,terminal_output +3122,3430212,"TERMINAL",0,0,"2025-06-30 15:21:14.534052: E external/local_xla/xla/stream_executor/cuda/cuda_fft.cc:467] Unable to register cuFFT factory: Attempting to register factory for plugin cuFFT when one has already been registered\r\n2025-06-30 15:21:14.534665: E external/local_xla/xla/stream_executor/cuda/cuda_fft.cc:467] Unable to register cuFFT factory: Attempting to register factory for plugin cuFFT when one has already been registered\r\n2025-06-30 15:21:14.534934: E external/local_xla/xla/stream_executor/cuda/cuda_fft.cc:467] Unable to register cuFFT factory: Attempting to register factory for plugin cuFFT when one has already been registered\r\n2025-06-30 15:21:14.535277: E external/local_xla/xla/stream_executor/cuda/cuda_fft.cc:467] Unable to register cuFFT factory: Attempting to register factory for plugin cuFFT when one has already been registered\r\nWARNING: All log messages before absl::InitializeLog() is called are written to STDERR\r\nE0000 00:00:1751289674.547133 3228020 cuda_dnn.cc:8579] Unable to register cuDNN factory: Attempting to register factory for plugin cuDNN when one has already been registered\r\nWARNING: All log messages before absl::InitializeLog() is called are written to STDERR\r\nE0000 00:00:1751289674.547707 3228023 cuda_dnn.cc:8579] Unable to register cuDNN factory: Attempting to register factory for plugin cuDNN when one has already been registered\r\nWARNING: All log messages before absl::InitializeLog() is called are written to STDERR\r\nE0000 00:00:1751289674.548036 3228021 cuda_dnn.cc:8579] Unable to register cuDNN factory: Attempting to register factory for plugin cuDNN when one has already been registered\r\nWARNING: All log messages before absl::InitializeLog() is called are written to STDERR\r\nE0000 00:00:1751289674.548443 3228022 cuda_dnn.cc:8579] Unable to register cuDNN factory: Attempting to register factory for plugin cuDNN when one has already been registered\r\nE0000 00:00:1751289674.551828 3228020 cuda_blas.cc:1407] Unable to register cuBLAS factory: Attempting to register factory for plugin cuBLAS when one has already been registered\r\nE0000 00:00:1751289674.551845 3228023 cuda_blas.cc:1407] Unable to register cuBLAS factory: Attempting to register factory for plugin cuBLAS when one has already been registered\r\nE0000 00:00:1751289674.552400 3228021 cuda_blas.cc:1407] Unable to register cuBLAS factory: Attempting to register factory for plugin cuBLAS when one has already been registered\r\nE0000 00:00:1751289674.552658 3228022 cuda_blas.cc:1407] Unable to register cuBLAS factory: Attempting to register factory for plugin cuBLAS when one has already been registered\r\nW0000 00:00:1751289674.565334 3228023 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\nW0000 00:00:1751289674.565350 3228023 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\nW0000 00:00:1751289674.565352 3228023 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\nW0000 00:00:1751289674.565354 3228023 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\nW0000 00:00:1751289674.565671 3228020 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\nW0000 00:00:1751289674.565690 3228020 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\nW0000 00:00:1751289674.565692 3228020 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\nW0000 00:00:1751289674.565694 3228020 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\nW0000 00:00:1751289674.565548 3228021 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\nW0000 00:00:1751289674.565564 3228021 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\nW0000 00:00:1751289674.565566 3228021 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\nW0000 00:00:1751289674.565568 3228021 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\nW0000 00:00:1751289674.565700 3228022 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\nW0000 00:00:1751289674.565714 3228022 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\nW0000 00:00:1751289674.565716 3228022 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\nW0000 00:00:1751289674.565717 3228022 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\n",,terminal_output +3123,3431438,"TERMINAL",0,0,"2025-06-30 15:21:15.803432: E external/local_xla/xla/stream_executor/cuda/cuda_fft.cc:467] Unable to register cuFFT factory: Attempting to register factory for plugin cuFFT when one has already been registered\r\n2025-06-30 15:21:15.803495: E external/local_xla/xla/stream_executor/cuda/cuda_fft.cc:467] Unable to register cuFFT factory: Attempting to register factory for plugin cuFFT when one has already been registered\r\n2025-06-30 15:21:15.804223: E external/local_xla/xla/stream_executor/cuda/cuda_fft.cc:467] Unable to register cuFFT factory: Attempting to register factory for plugin cuFFT when one has already been registered\r\n2025-06-30 15:21:15.804209: E external/local_xla/xla/stream_executor/cuda/cuda_fft.cc:467] Unable to register cuFFT factory: Attempting to register factory for plugin cuFFT when one has already been registered\r\nWARNING: All log messages before absl::InitializeLog() is called are written to STDERR\r\nE0000 00:00:1751289675.816577 4162465 cuda_dnn.cc:8579] Unable to register cuDNN factory: Attempting to register factory for plugin cuDNN when one has already been registered\r\nWARNING: All log messages before absl::InitializeLog() is called are written to STDERR\r\nE0000 00:00:1751289675.817088 4162464 cuda_dnn.cc:8579] Unable to register cuDNN factory: Attempting to register factory for plugin cuDNN when one has already been registered\r\nWARNING: All log messages before absl::InitializeLog() is called are written to STDERR\r\nE0000 00:00:1751289675.817018 4162466 cuda_dnn.cc:8579] Unable to register cuDNN factory: Attempting to register factory for plugin cuDNN when one has already been registered\r\nWARNING: All log messages before absl::InitializeLog() is called are written to STDERR\r\nE0000 00:00:1751289675.817553 4162463 cuda_dnn.cc:8579] Unable to register cuDNN factory: Attempting to register factory for plugin cuDNN when one has already been registered\r\nE0000 00:00:1751289675.821208 4162464 cuda_blas.cc:1407] Unable to register cuBLAS factory: Attempting to register factory for plugin cuBLAS when one has already been registered\r\nE0000 00:00:1751289675.821252 4162465 cuda_blas.cc:1407] Unable to register cuBLAS factory: Attempting to register factory for plugin cuBLAS when one has already been registered\r\nE0000 00:00:1751289675.821476 4162466 cuda_blas.cc:1407] Unable to register cuBLAS factory: Attempting to register factory for plugin cuBLAS when one has already been registered\r\nE0000 00:00:1751289675.821914 4162463 cuda_blas.cc:1407] Unable to register cuBLAS factory: Attempting to register factory for plugin cuBLAS when one has already been registered\r\nW0000 00:00:1751289675.834555 4162464 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\nW0000 00:00:1751289675.834572 4162464 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\nW0000 00:00:1751289675.834574 4162464 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\nW0000 00:00:1751289675.834575 4162464 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\nW0000 00:00:1751289675.835054 4162463 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\nW0000 00:00:1751289675.835074 4162463 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\nW0000 00:00:1751289675.835076 4162463 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\nW0000 00:00:1751289675.835077 4162463 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\nW0000 00:00:1751289675.835053 4162465 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\nW0000 00:00:1751289675.835073 4162465 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\nW0000 00:00:1751289675.835075 4162465 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\nW0000 00:00:1751289675.835076 4162465 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\nW0000 00:00:1751289675.835296 4162466 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\nW0000 00:00:1751289675.835312 4162466 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\nW0000 00:00:1751289675.835314 4162466 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\nW0000 00:00:1751289675.835315 4162466 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\n",,terminal_output +3124,3442472,"TERMINAL",0,0,"W0000 00:00:1751289686.815888 4162463 gpu_device.cc:2341] Cannot dlopen some GPU libraries. Please make sure the missing libraries mentioned above are installed properly if you would like to use GPU. Follow the guide at https://www.tensorflow.org/install/gpu for how to download and setup the required libraries for your platform.\r\nSkipping registering GPU devices...\r\nW0000 00:00:1751289686.816035 4162465 gpu_device.cc:2341] Cannot dlopen some GPU libraries. Please make sure the missing libraries mentioned above are installed properly if you would like to use GPU. Follow the guide at https://www.tensorflow.org/install/gpu for how to download and setup the required libraries for your platform.\r\nSkipping registering GPU devices...\r\nW0000 00:00:1751289686.816051 4162466 gpu_device.cc:2341] Cannot dlopen some GPU libraries. Please make sure the missing libraries mentioned above are installed properly if you would like to use GPU. Follow the guide at https://www.tensorflow.org/install/gpu for how to download and setup the required libraries for your platform.\r\nSkipping registering GPU devices...\r\nW0000 00:00:1751289686.816227 4162464 gpu_device.cc:2341] Cannot dlopen some GPU libraries. Please make sure the missing libraries mentioned above are installed properly if you would like to use GPU. Follow the guide at https://www.tensorflow.org/install/gpu for how to download and setup the required libraries for your platform.\r\nSkipping registering GPU devices...\r\nW0000 00:00:1751289686.817312 3228020 gpu_device.cc:2341] Cannot dlopen some GPU libraries. Please make sure the missing libraries mentioned above are installed properly if you would like to use GPU. Follow the guide at https://www.tensorflow.org/install/gpu for how to download and setup the required libraries for your platform.\r\nSkipping registering GPU devices...\r\nW0000 00:00:1751289686.817765 3228021 gpu_device.cc:2341] Cannot dlopen some GPU libraries. Please make sure the missing libraries mentioned above are installed properly if you would like to use GPU. Follow the guide at https://www.tensorflow.org/install/gpu for how to download and setup the required libraries for your platform.\r\nSkipping registering GPU devices...\r\nW0000 00:00:1751289686.817804 3228023 gpu_device.cc:2341] Cannot dlopen some GPU libraries. Please make sure the missing libraries mentioned above are installed properly if you would like to use GPU. Follow the guide at https://www.tensorflow.org/install/gpu for how to download and setup the required libraries for your platform.\r\nSkipping registering GPU devices...\r\nW0000 00:00:1751289686.818051 3228022 gpu_device.cc:2341] Cannot dlopen some GPU libraries. Please make sure the missing libraries mentioned above are installed properly if you would like to use GPU. Follow the guide at https://www.tensorflow.org/install/gpu for how to download and setup the required libraries for your platform.\r\nSkipping registering GPU devices...\r\n",,terminal_output +3125,3445196,"TERMINAL",0,0,"wandb: Currently logged in as: mihir-mahajan2002 (instant-uv) to https://api.wandb.ai. Use `wandb login --relogin` to force relogin\r\n",,terminal_output +3126,3445784,"TERMINAL",0,0,"wandb: Tracking run with wandb version 0.19.11\r\nwandb: Run data is saved locally in /hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/wandb/run-20250630_152129-ajf1kd0q\r\nwandb: Run `wandb offline` to turn off syncing.\r\nwandb: Syncing run test-wandb-tags-0000\r\nwandb: ⭐️ View project at https://wandb.ai/instant-uv/jafar\r\nwandb: 🚀 View run at https://wandb.ai/instant-uv/jafar/runs/ajf1kd0q\r\n",,terminal_output +3127,3445917,"TERMINAL",0,0,"2025-06-30 15:21:30.296969: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-06-30 15:21:30.307261: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-06-30 15:21:30.307837: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-06-30 15:21:30.317157: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-06-30 15:21:30.323035: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-06-30 15:21:30.331736: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-06-30 15:21:30.342808: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3128,3447726,"TERMINAL",0,0,"2025-06-30 15:21:32.065566: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3129,3459317,"TERMINAL",0,0,"2025-06-30 15:21:43.744652: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3130,3459486,"TERMINAL",0,0,"2025-06-30 15:21:43.910187: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3131,3459549,"TERMINAL",0,0,"2025-06-30 15:21:43.931855: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-06-30 15:21:43.974831: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3132,3461395,"TERMINAL",0,0,"2025-06-30 15:21:45.822734: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3133,3461635,"TERMINAL",0,0,"2025-06-30 15:21:46.063270: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3134,3461726,"TERMINAL",0,0,"2025-06-30 15:21:46.155072: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3135,3461834,"TERMINAL",0,0,"2025-06-30 15:21:46.248772: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3136,3461912,"TERMINAL",0,0,"2025-06-30 15:21:46.319261: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-06-30 15:21:46.341883: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3137,3462167,"TERMINAL",0,0,"2025-06-30 15:21:46.595538: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3138,3463228,"TERMINAL",0,0,"2025-06-30 15:21:47.654879: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3139,3464097,"TERMINAL",0,0,"2025-06-30 15:21:48.460106: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3140,3464318,"TERMINAL",0,0,"2025-06-30 15:21:48.745781: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3141,3465032,"TERMINAL",0,0,"2025-06-30 15:21:49.459604: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3142,3465738,"TERMINAL",0,0,"2025-06-30 15:21:50.165847: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3143,3505408,"TERMINAL",0,0,"2025-06-30 15:22:29.833791: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3144,3507413,"TERMINAL",0,0,"2025-06-30 15:22:31.781602: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-06-30 15:22:31.781745: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3145,3508440,"TERMINAL",0,0,"2025-06-30 15:22:32.842265: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3146,3514077,"TERMINAL",0,0,"2025-06-30 15:22:38.474185: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3147,3715112,"TERMINAL",0,0,"^Csrun: interrupt (one more within 1 sec to abort)\r\nsrun: StepId=3306283.3 tasks 0-7: running\r\n",,terminal_output +3148,3715326,"TERMINAL",0,0,"^Csrun: sending Ctrl-C to StepId=3306283.3\r\nsrun: forcing job termination\r\nsrun: Job step aborted: Waiting up to 32 seconds for job step to finish.\r\nslurmstepd: error: *** STEP 3306283.3 ON hkn0402 CANCELLED AT 2025-06-30T15:25:59 ***\r\n",,terminal_output +3149,3715837,"TERMINAL",0,0,"^Csrun: sending Ctrl-C to StepId=3306283.3\r\nsrun: job abort in progress\r\n",,terminal_output +3150,3716303,"TERMINAL",0,0,"srun",,terminal_focus +3151,3716362,"TERMINAL",0,0,"]0;tum_cte0515@hkn0402:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0402 jafar]$ ",,terminal_output +3152,3716660,"TERMINAL",0,0,"^Csrun: interrupt (one more within 1 sec to abort)\r\nsrun: StepId=3306283.2 tasks 0-1: running\r\n",,terminal_output +3153,3717502,"TERMINAL",0,0,"^Csrun: sending Ctrl-C to StepId=3306283.2\r\nsrun: forcing job termination\r\nsrun: Job step aborted: Waiting up to 32 seconds for job step to finish.\r\n]0;tum_cte0515@hkn1991:/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar]633;D;130",,terminal_output +3154,3719533,"TERMINAL",0,0,"srun",,terminal_focus +3155,3721371,"TERMINAL",0,0,"[?25lgi[?25h[?25li[?25h",,terminal_output +3156,3721538,"TERMINAL",0,0,"[?25lt[?25h[?25l [?25h",,terminal_output +3157,3722553,"TERMINAL",0,0,"[?25lb[?25h",,terminal_output +3158,3722661,"TERMINAL",0,0,"[?25lr[?25h",,terminal_output +3159,3722842,"TERMINAL",0,0,"[?25la[?25h[?25ln[?25h",,terminal_output +3160,3722996,"TERMINAL",0,0,"[?25lc[?25h[?25lh[?25h",,terminal_output +3161,3723226,"TERMINAL",0,0,"\r\n[?2004l\r[?1h=\r add-wandb-name-and-tags\r\n* convert-to-jax-array-in-iter\r\n dont-let-tf-see-gpu\r\n main\r\n preprocess_video\r\n tmp\r\n\r[?1l>]0;tum_cte0515@hkn0402:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0402 jafar]$ ",,terminal_output +3162,3730179,"TERMINAL",0,0,"g",,terminal_output +3163,3730244,"TERMINAL",0,0,"[?25li[?25h",,terminal_output +3164,3730421,"TERMINAL",0,0,"[?25lt[?25h[?25l [?25h",,terminal_output +3165,3730593,"TERMINAL",0,0,"[?25lc[?25h",,terminal_output +3166,3730698,"TERMINAL",0,0,"[?25lh[?25h",,terminal_output +3167,3730822,"TERMINAL",0,0,"[?25le[?25h",,terminal_output +3168,3730933,"TERMINAL",0,0,"[?25lc[?25h",,terminal_output +3169,3731142,"TERMINAL",0,0,"[?25lo[?25h",,terminal_output +3170,3731659,"TERMINAL",0,0,"[?25lk[?25h",,terminal_output +3171,3731712,"TERMINAL",0,0,"[?25lo[?25h",,terminal_output +3172,3731876,"TERMINAL",0,0,"[?25lu[?25h",,terminal_output +3173,3732008,"TERMINAL",0,0,"[?25lt[?25h",,terminal_output +3174,3732122,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +3175,3732630,"TERMINAL",0,0,"revised-dataloader",,terminal_output +3176,3732945,"TERMINAL",0,0,"revised-dataloader\r\n[?2004l\rerror: Your local changes to the following files would be overwritten by checkout:\r\n\tutils/dataloader.py\r\nPlease commit your changes or stash them before you switch branches.\r\nAborting\r\n]0;tum_cte0515@hkn0402:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0402 jafar]$ ",,terminal_output +3177,3734726,"TERMINAL",0,0,"g",,terminal_output +3178,3734836,"TERMINAL",0,0,"[?25li[?25h",,terminal_output +3179,3734943,"TERMINAL",0,0,"[?25lt[?25h",,terminal_output +3180,3735009,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +3181,3735271,"TERMINAL",0,0,"[?25ls[?25h",,terminal_output +3182,3735488,"TERMINAL",0,0,"[?25lt[?25h",,terminal_output +3183,3735657,"TERMINAL",0,0,"[?25la[?25h",,terminal_output +3184,3735749,"TERMINAL",0,0,"[?25ls[?25h[?25lh[?25h",,terminal_output +3185,3735984,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +3186,3736051,"TERMINAL",0,0,"Saved working directory and index state WIP on convert-to-jax-array-in-iter: 4de42f3 feat: convert data to jax array as part of iter\r\n",,terminal_output +3187,3736116,"TERMINAL",0,0,"]0;tum_cte0515@hkn0402:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0402 jafar]$ ",,terminal_output +3188,3736486,"TERMINAL",0,0,"[?25lgi[?25h",,terminal_output +3189,3736540,"TERMINAL",0,0,"[?25li[?25h",,terminal_output +3190,3736644,"TERMINAL",0,0,"[?25lt[?25h[?25l [?25h",,terminal_output +3191,3736849,"TERMINAL",0,0,"[?25lc[?25h",,terminal_output +3192,3736954,"TERMINAL",0,0,"[?25lh[?25h",,terminal_output +3193,3737019,"TERMINAL",0,0,"[?25le[?25h",,terminal_output +3194,3737175,"TERMINAL",0,0,"[?25lc[?25h",,terminal_output +3195,3737398,"TERMINAL",0,0,"[?25lo[?25h",,terminal_output +3196,3737963,"TERMINAL",0,0,"[?25lk[?25h",,terminal_output +3197,3738072,"TERMINAL",0,0,"[?25lo[?25h",,terminal_output +3198,3738250,"TERMINAL",0,0,"[?25lu[?25h",,terminal_output +3199,3738361,"TERMINAL",0,0,"[?25lt[?25h",,terminal_output +3200,3738426,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +3201,3739021,"TERMINAL",0,0,"revised-dataloader",,terminal_output +3202,3739345,"TERMINAL",0,0,"revised-dataloader\r\n[?2004l\r",,terminal_output +3203,3739420,"TERMINAL",0,0,"branch 'revised-dataloader' set up to track 'origin/revised-dataloader'.\r\nSwitched to a new branch 'revised-dataloader'\r\n]0;tum_cte0515@hkn0402:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0402 jafar]$ ",,terminal_output +3204,3740667,"TERMINAL",0,0,"[?25lgi[?25h",,terminal_output +3205,3740731,"TERMINAL",0,0,"[?25li[?25h",,terminal_output +3206,3740795,"TERMINAL",0,0,"[?25lt[?25h",,terminal_output +3207,3740963,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +3208,3741128,"TERMINAL",0,0,"[?25la[?25h",,terminal_output +3209,3741849,"TERMINAL",0,0,"[?25ls[?25h",,terminal_output +3210,3741965,"TERMINAL",0,0,"[?25lt[?25h",,terminal_output +3211,3742087,"TERMINAL",0,0,"[?25la[?25h",,terminal_output +3212,3742160,"TERMINAL",0,0,"[?25ls[?25h",,terminal_output +3213,3742274,"TERMINAL",0,0,"[?25lh[?25h",,terminal_output +3214,3742543,"",0,0,"Switched from branch 'convert-to-jax-array-in-iter' to 'revised-dataloader'",,git_branch_checkout +3215,3742566,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +3216,3742778,"TERMINAL",0,0,"[?25lp[?25h",,terminal_output +3217,3742845,"TERMINAL",0,0,"[?25lo[?25h",,terminal_output +3218,3742997,"TERMINAL",0,0,"[?25lp[?25h",,terminal_output +3219,3743193,"TERMINAL",0,0,"\r\n[?2004l\rAuto-merging utils/dataloader.py\r\nCONFLICT (content): Merge conflict in utils/dataloader.py\r\n",,terminal_output +3220,3743326,"TERMINAL",0,0,"On branch revised-dataloader\r\nYour branch is up to date with 'origin/revised-dataloader'.\r\n\r\nUnmerged paths:\r\n (use ""git restore --staged ..."" to unstage)\r\n (use ""git add ..."" to mark resolution)\r\n\tboth modified: utils/dataloader.py\r\n\r\nUntracked files:\r\n (use ""git add ..."" to include in what will be committed)\r\n\tdata_tfrecord_duplicated/\r\n\tdata_tfrecords/\r\n\tlogs/\r\n\tread_tf_record.py\r\n\trequirements-franz.txt\r\n\tscripts_cremers/\r\n\tscripts_horeka/\r\n\tslurm/\r\n\r\nno changes added to commit (use ""git add"" and/or ""git commit -a"")\r\nThe stash entry is kept in case you need it again.\r\n]0;tum_cte0515@hkn0402:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0402 jafar]$ ",,terminal_output +3221,3747781,"utils/dataloader.py",0,0,"import functools\nimport jax\n\nimport tensorflow as tf\n\n# reserve GPU memory for JAX only if tensorflow is built with GPU support\ntf.config.experimental.set_visible_devices([], ""GPU"")\n\n\n# --- TensorFlow function for processing: slicing, normalization ---\ndef _tf_process_episode(episode_tensor, seq_len, image_h, image_w, image_c):\n """"""\n Processes a raw episode tensor in TensorFlow.\n Takes a full episode, extracts a random sequence, and normalizes it.\n Args:\n episode_tensor: A TensorFlow tensor representing a full video episode.\n Expected shape: (dynamic_length, image_h, image_w, image_c)\n Expected dtype: e.g., tf.uint8 (raw pixel values)\n seq_len: The desired length of the sub-sequence to extract.\n image_h: The height of each frame.\n image_w: The width of each frame.\n image_c: The number of channels in each frame.\n Returns:\n A TensorFlow tensor representing the processed video sequence.\n Shape: (seq_len, image_h, image_w, image_c)\n Dtype: tf.float32 (normalized pixel values)\n """"""\n current_episode_len = tf.shape(episode_tensor)[0]\n\n max_start_idx = current_episode_len - seq_len\n\n start_idx = tf.random.uniform(\n shape=(), minval=0, maxval=max_start_idx + 1, dtype=tf.int32\n )\n\n seq = episode_tensor[start_idx : start_idx + seq_len]\n\n seq = tf.cast(seq, tf.float32) / 255.0\n\n # Ensure the final shape is statically known for batching.\n # tf.reshape is robust, but tf.ensure_shape or set_shape can also be used if confident.\n processed_sequence = tf.reshape(seq, [seq_len, image_h, image_w, image_c])\n\n return processed_sequence\n\n\ndef _parse_tfrecord_fn(example_proto, image_h, image_w, image_c):\n feature_description = {\n ""height"": tf.io.FixedLenFeature([], tf.int64),\n ""width"": tf.io.FixedLenFeature([], tf.int64),\n ""channels"": tf.io.FixedLenFeature([], tf.int64),\n ""sequence_length"": tf.io.FixedLenFeature([], tf.int64),\n ""raw_video"": tf.io.FixedLenFeature([], tf.string),\n }\n example = tf.io.parse_single_example(example_proto, feature_description)\n\n video_shape = (example[""sequence_length""], image_h, image_w, image_c)\n\n episode_tensor = tf.io.decode_raw(example[""raw_video""], out_type=tf.uint8)\n episode_tensor = tf.reshape(episode_tensor, video_shape)\n\n episode_tensor = tf.ensure_shape(episode_tensor, [None, image_h, image_w, image_c])\n return episode_tensor\n\n\n<<<<<<< Updated upstream\ndef _create_processed_dataset_from_file(file_path, image_h, image_w, image_c, seq_len, num_parallel_calls):\n """"""Creates a fully processed dataset from a single TFRecord file.""""""\n dataset = tf.data.TFRecordDataset([file_path])\n \n=======\ndef get_dataloader(\n tfrecord_paths: list[str], # List of TFRecord file paths\n seq_len: int,\n global_batch_size: int,\n image_h: int,\n image_w: int,\n image_c: int,\n shuffle_buffer_size: int = 10,\n num_parallel_calls: int = tf.data.AUTOTUNE,\n seed: int = 42,\n):\n """"""\n Creates a tf.data.Dataset pipeline from TFRecord files.\n """"""\n if not tfrecord_paths:\n raise ValueError(""tfrecord_paths list cannot be empty."")\n\n process_id = jax.process_index()\n num_processes = jax.process_count()\n\n assert (\n global_batch_size % num_processes == 0\n ), ""Global batch size {global_batch_size} \\n must be divisible by the number of JAX processes {num_processes} for proper sharding.""\n per_process_batch_size = global_batch_size // num_processes\n\n dataset = tf.data.TFRecordDataset(\n tfrecord_paths, num_parallel_reads=tf.data.AUTOTUNE\n )\n\n dataset = dataset.shard(num_shards=num_processes, index=process_id)\n\n # (f.srambical) NOTE: For TFRecords, it's often good to have a large shuffle buffer.\n if shuffle_buffer_size > 0:\n dataset = dataset.shuffle(\n buffer_size=shuffle_buffer_size, seed=seed, reshuffle_each_iteration=True\n )\n>>>>>>> Stashed changes\n parse_fn = functools.partial(\n _parse_tfrecord_fn, image_h=image_h, image_w=image_w, image_c=image_c\n )\n dataset = dataset.map(parse_fn, num_parallel_calls=num_parallel_calls)\n\n tf_process_fn = functools.partial(\n _tf_process_episode,\n seq_len=seq_len,\n image_h=image_h,\n image_w=image_w,\n image_c=image_c,\n )\n dataset = dataset.map(tf_process_fn, num_parallel_calls=num_parallel_calls)\n \n return dataset\n\n\ndef get_dataloader(\n tfrecord_paths: list[str],\n seq_len: int,\n global_batch_size: int,\n image_h: int,\n image_w: int,\n image_c: int,\n shuffle_buffer_size: int = 1000,\n num_parallel_calls: int = tf.data.AUTOTUNE,\n seed: int = 42,\n cycle_length: int = 4,\n block_length: int = 1,\n):\n """"""\n Creates a tf.data.Dataset pipeline from TFRecord files.\n """"""\n if not tfrecord_paths:\n raise ValueError(""tfrecord_paths list cannot be empty."")\n\n process_id = jax.process_index()\n num_processes = jax.process_count()\n\n assert (\n global_batch_size % num_processes == 0\n ), f""Global batch size {global_batch_size} \\n must be divisible by the number of JAX processes {num_processes} for proper sharding.""\n per_process_batch_size = global_batch_size // num_processes\n\n def dataset_fn(file_path):\n return _create_processed_dataset_from_file(\n file_path, image_h, image_w, image_c, seq_len, num_parallel_calls\n )\n \n dataset = tf.data.Dataset.from_tensor_slices(tfrecord_paths)\n dataset = dataset.shard(num_shards=num_processes, index=process_id)\n \n dataset = dataset.interleave(\n dataset_fn,\n cycle_length=cycle_length,\n block_length=block_length,\n num_parallel_calls=num_parallel_calls,\n deterministic=False\n )\n \n if shuffle_buffer_size > 0:\n dataset = dataset.shuffle(\n buffer_size=shuffle_buffer_size, seed=seed, reshuffle_each_iteration=True\n )\n\n dataset = dataset.repeat(None)\n dataset = dataset.batch(per_process_batch_size, drop_remainder=True)\n dataset = dataset.prefetch(tf.data.AUTOTUNE)\n\n return dataset.as_numpy_iterator()\n",python,tab +3222,3761224,"utils/dataloader.py",2498,1528,"def _create_processed_dataset_from_file(file_path, image_h, image_w, image_c, seq_len, num_parallel_calls):\n """"""Creates a fully processed dataset from a single TFRecord file.""""""\n dataset = tf.data.TFRecordDataset([file_path])\n \n",python,content +3223,3764728,"utils/dataloader.py",2952,0,"",python,selection_mouse +3224,3768441,"utils/dataloader.py",0,0,"",python,tab +3225,3770540,"utils/dataloader.py",3393,0,"",python,selection_mouse +3226,3771487,"utils/dataloader.py",3393,1,"",python,content +3227,3771605,"utils/dataloader.py",3393,1,"",python,content +3228,3778402,"TERMINAL",0,0,"git stash pop",,terminal_output +3229,3779141,"TERMINAL",0,0,"checkout revised-dataloader",,terminal_output +3230,3779631,"TERMINAL",0,0,"stash",,terminal_output +3231,3779984,"TERMINAL",0,0,"checkout revised-dataloader",,terminal_output +3232,3780441,"TERMINAL",0,0,"branch",,terminal_output +3233,3780793,"TERMINAL",0,0,"sh scripts_horeka/train_tokenizer.sh ",,terminal_output +3234,3781698,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +3235,3781829,"TERMINAL",0,0,"SLURM_STEP_NUM_TASKS=1\r\nSLURM_JOB_USER=tum_cte0515\r\nSLURM_TASKS_PER_NODE=4(x2)\r\nSLURM_JOB_UID=999226\r\nSLURM_TASK_PID=3224824\r\nSLURM_JOB_GPUS=0,1,2,3\r\nSLURM_LOCALID=0\r\nSLURM_SUBMIT_DIR=/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar\r\nSLURMD_NODENAME=hkn0402\r\nSLURM_JOB_START_TIME=1751289147\r\nSLURM_STEP_NODELIST=hkn0402\r\nSLURM_CLUSTER_NAME=hk\r\nSLURM_JOB_END_TIME=1751292747\r\nSLURM_PMI2_SRUN_PORT=36863\r\nSLURM_CPUS_ON_NODE=24\r\nSLURM_JOB_CPUS_PER_NODE=24(x2)\r\nSLURM_GPUS_ON_NODE=4\r\nSLURM_GTIDS=0\r\nSLURM_JOB_PARTITION=dev_accelerated\r\nSLURM_TRES_PER_TASK=cpu=5\r\nSLURM_OOM_KILL_STEP=0\r\nSLURM_JOB_NUM_NODES=2\r\nSLURM_STEPID=4294967290\r\nSLURM_JOBID=3306283\r\nSLURM_PTY_PORT=36049\r\nSLURM_JOB_QOS=normal\r\nSLURM_LAUNCH_NODE_IPADDR=10.0.7.199\r\nSLURM_PTY_WIN_ROW=37\r\nSLURM_PMI2_PROC_MAPPING=(vector,(0,1,1))\r\nSLURMD_DEBUG=2\r\nSLURM_PROCID=0\r\nSLURM_CPUS_PER_TASK=5\r\nSLURM_NTASKS=8\r\nSLURM_TOPOLOGY_ADDR=hkibb.hkibbi1.hkibbi1e11.hkn0402\r\nSLURM_TOPOLOGY_ADDR_PATTERN=switch.switch.switch.node\r\nSLURM_SRUN_COMM_HOST=10.0.7.199\r\nSLURM_SCRIPT_CONTEXT=prolog_task\r\nSLURM_MEM_PER_NODE=204800\r\nSLURM_PTY_WIN_COL=202\r\nSLURM_NODELIST=hkn[0402-0403]\r\nSLURM_SRUN_COMM_PORT=46875\r\nSLURM_STEP_ID=4294967290\r\nSLURM_JOB_ACCOUNT=hk-project-p0023960\r\nSLURM_PRIO_PROCESS=0\r\nSLURM_NPROCS=8\r\nSLURM_NNODES=2\r\nSLURM_SUBMIT_HOST=hkn1991.localdomain\r\nSLURM_JOB_ID=3306283\r\nSLURM_NODEID=0\r\nSLURM_STEP_NUM_NODES=1\r\nSLURM_STEP_TASKS_PER_NODE=1\r\nSLURM_MPI_TYPE=pmi2\r\nSLURM_PMI2_STEP_NODES=hkn0402\r\nSLURM_CONF=/etc/slurm/slurm.conf\r\nSLURM_JOB_NAME=interactive\r\nSLURM_NTASKS_PER_NODE=4\r\nSLURM_STEP_LAUNCHER_PORT=46875\r\nSLURM_JOB_GID=502226\r\nSLURM_JOB_NODELIST=hkn[0402-0403]\r\n",,terminal_output +3236,3781963,"TERMINAL",0,0,"GpuFreq=control_disabled\r\nGpuFreq=control_disabled\r\n",,terminal_output +3237,3785343,"TERMINAL",0,0,"2025-06-30 15:27:09.729846: E external/local_xla/xla/stream_executor/cuda/cuda_fft.cc:467] Unable to register cuFFT factory: Attempting to register factory for plugin cuFFT when one has already been registered\r\n2025-06-30 15:27:09.729896: E external/local_xla/xla/stream_executor/cuda/cuda_fft.cc:467] Unable to register cuFFT factory: Attempting to register factory for plugin cuFFT when one has already been registered\r\n2025-06-30 15:27:09.729826: E external/local_xla/xla/stream_executor/cuda/cuda_fft.cc:467] Unable to register cuFFT factory: Attempting to register factory for plugin cuFFT when one has already been registered\r\n2025-06-30 15:27:09.731084: E external/local_xla/xla/stream_executor/cuda/cuda_fft.cc:467] Unable to register cuFFT factory: Attempting to register factory for plugin cuFFT when one has already been registered\r\nWARNING: All log messages before absl::InitializeLog() is called are written to STDERR\r\nE0000 00:00:1751290029.742676 3230763 cuda_dnn.cc:8579] Unable to register cuDNN factory: Attempting to register factory for plugin cuDNN when one has already been registered\r\nWARNING: All log messages before absl::InitializeLog() is called are written to STDERR\r\nE0000 00:00:1751290029.742786 3230765 cuda_dnn.cc:8579] Unable to register cuDNN factory: Attempting to register factory for plugin cuDNN when one has already been registered\r\nWARNING: All log messages before absl::InitializeLog() is called are written to STDERR\r\nE0000 00:00:1751290029.742994 3230764 cuda_dnn.cc:8579] Unable to register cuDNN factory: Attempting to register factory for plugin cuDNN when one has already been registered\r\nWARNING: All log messages before absl::InitializeLog() is called are written to STDERR\r\nE0000 00:00:1751290029.744315 3230762 cuda_dnn.cc:8579] Unable to register cuDNN factory: Attempting to register factory for plugin cuDNN when one has already been registered\r\nE0000 00:00:1751290029.747054 3230763 cuda_blas.cc:1407] Unable to register cuBLAS factory: Attempting to register factory for plugin cuBLAS when one has already been registered\r\nE0000 00:00:1751290029.747389 3230765 cuda_blas.cc:1407] Unable to register cuBLAS factory: Attempting to register factory for plugin cuBLAS when one has already been registered\r\nE0000 00:00:1751290029.747480 3230764 cuda_blas.cc:1407] Unable to register cuBLAS factory: Attempting to register factory for plugin cuBLAS when one has already been registered\r\nE0000 00:00:1751290029.748463 3230762 cuda_blas.cc:1407] Unable to register cuBLAS factory: Attempting to register factory for plugin cuBLAS when one has already been registered\r\nW0000 00:00:1751290029.761210 3230763 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\nW0000 00:00:1751290029.761225 3230763 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\nW0000 00:00:1751290029.761227 3230763 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\nW0000 00:00:1751290029.761229 3230763 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\nW0000 00:00:1751290029.761194 3230765 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\nW0000 00:00:1751290029.761210 3230765 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\nW0000 00:00:1751290029.761212 3230765 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\nW0000 00:00:1751290029.761214 3230765 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\nW0000 00:00:1751290029.761307 3230764 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\nW0000 00:00:1751290029.761322 3230762 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\nW0000 00:00:1751290029.761339 3230762 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\nW0000 00:00:1751290029.761341 3230762 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\nW0000 00:00:1751290029.761342 3230762 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\nW0000 00:00:1751290029.761322 3230764 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\nW0000 00:00:1751290029.761324 3230764 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\nW0000 00:00:1751290029.761325 3230764 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\n",,terminal_output +3238,3789657,"TERMINAL",0,0,"2025-06-30 15:27:14.051291: E external/local_xla/xla/stream_executor/cuda/cuda_fft.cc:467] Unable to register cuFFT factory: Attempting to register factory for plugin cuFFT when one has already been registered\r\n2025-06-30 15:27:14.051593: E external/local_xla/xla/stream_executor/cuda/cuda_fft.cc:467] Unable to register cuFFT factory: Attempting to register factory for plugin cuFFT when one has already been registered\r\n2025-06-30 15:27:14.051714: E external/local_xla/xla/stream_executor/cuda/cuda_fft.cc:467] Unable to register cuFFT factory: Attempting to register factory for plugin cuFFT when one has already been registered\r\n2025-06-30 15:27:14.051821: E external/local_xla/xla/stream_executor/cuda/cuda_fft.cc:467] Unable to register cuFFT factory: Attempting to register factory for plugin cuFFT when one has already been registered\r\nWARNING: All log messages before absl::InitializeLog() is called are written to STDERR\r\nE0000 00:00:1751290034.064373 4164973 cuda_dnn.cc:8579] Unable to register cuDNN factory: Attempting to register factory for plugin cuDNN when one has already been registered\r\nWARNING: All log messages before absl::InitializeLog() is called are written to STDERR\r\nE0000 00:00:1751290034.064916 4164972 cuda_dnn.cc:8579] Unable to register cuDNN factory: Attempting to register factory for plugin cuDNN when one has already been registered\r\nWARNING: All log messages before absl::InitializeLog() is called are written to STDERR\r\nE0000 00:00:1751290034.064832 4164975 cuda_dnn.cc:8579] Unable to register cuDNN factory: Attempting to register factory for plugin cuDNN when one has already been registered\r\nWARNING: All log messages before absl::InitializeLog() is called are written to STDERR\r\nE0000 00:00:1751290034.065189 4164974 cuda_dnn.cc:8579] Unable to register cuDNN factory: Attempting to register factory for plugin cuDNN when one has already been registered\r\nE0000 00:00:1751290034.068552 4164973 cuda_blas.cc:1407] Unable to register cuBLAS factory: Attempting to register factory for plugin cuBLAS when one has already been registered\r\nE0000 00:00:1751290034.069171 4164975 cuda_blas.cc:1407] Unable to register cuBLAS factory: Attempting to register factory for plugin cuBLAS when one has already been registered\r\nE0000 00:00:1751290034.069327 4164972 cuda_blas.cc:1407] Unable to register cuBLAS factory: Attempting to register factory for plugin cuBLAS when one has already been registered\r\nE0000 00:00:1751290034.069663 4164974 cuda_blas.cc:1407] Unable to register cuBLAS factory: Attempting to register factory for plugin cuBLAS when one has already been registered\r\nW0000 00:00:1751290034.081997 4164973 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\nW0000 00:00:1751290034.082013 4164973 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\nW0000 00:00:1751290034.082015 4164973 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\nW0000 00:00:1751290034.082017 4164973 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\nW0000 00:00:1751290034.081999 4164975 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\nW0000 00:00:1751290034.082014 4164975 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\nW0000 00:00:1751290034.082016 4164975 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\nW0000 00:00:1751290034.082017 4164975 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\nW0000 00:00:1751290034.082605 4164972 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\nW0000 00:00:1751290034.082623 4164972 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\nW0000 00:00:1751290034.082625 4164972 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\nW0000 00:00:1751290034.082626 4164972 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\nW0000 00:00:1751290034.082634 4164974 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\nW0000 00:00:1751290034.082649 4164974 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\nW0000 00:00:1751290034.082651 4164974 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\nW0000 00:00:1751290034.082652 4164974 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\n",,terminal_output +3239,3791549,"TERMINAL",0,0,"W0000 00:00:1751290035.972724 3230765 gpu_device.cc:2341] Cannot dlopen some GPU libraries. Please make sure the missing libraries mentioned above are installed properly if you would like to use GPU. Follow the guide at https://www.tensorflow.org/install/gpu for how to download and setup the required libraries for your platform.\r\nSkipping registering GPU devices...\r\nW0000 00:00:1751290035.972925 3230763 gpu_device.cc:2341] Cannot dlopen some GPU libraries. Please make sure the missing libraries mentioned above are installed properly if you would like to use GPU. Follow the guide at https://www.tensorflow.org/install/gpu for how to download and setup the required libraries for your platform.\r\nSkipping registering GPU devices...\r\nW0000 00:00:1751290035.973745 3230762 gpu_device.cc:2341] Cannot dlopen some GPU libraries. Please make sure the missing libraries mentioned above are installed properly if you would like to use GPU. Follow the guide at https://www.tensorflow.org/install/gpu for how to download and setup the required libraries for your platform.\r\nSkipping registering GPU devices...\r\nW0000 00:00:1751290035.973745 3230764 gpu_device.cc:2341] Cannot dlopen some GPU libraries. Please make sure the missing libraries mentioned above are installed properly if you would like to use GPU. Follow the guide at https://www.tensorflow.org/install/gpu for how to download and setup the required libraries for your platform.\r\nSkipping registering GPU devices...\r\n",,terminal_output +3240,3800698,"TERMINAL",0,0,"W0000 00:00:1751290045.122096 4164972 gpu_device.cc:2341] Cannot dlopen some GPU libraries. Please make sure the missing libraries mentioned above are installed properly if you would like to use GPU. Follow the guide at https://www.tensorflow.org/install/gpu for how to download and setup the required libraries for your platform.\r\nSkipping registering GPU devices...\r\nW0000 00:00:1751290045.122112 4164973 gpu_device.cc:2341] Cannot dlopen some GPU libraries. Please make sure the missing libraries mentioned above are installed properly if you would like to use GPU. Follow the guide at https://www.tensorflow.org/install/gpu for how to download and setup the required libraries for your platform.\r\nSkipping registering GPU devices...\r\nW0000 00:00:1751290045.121991 4164974 gpu_device.cc:2341] Cannot dlopen some GPU libraries. Please make sure the missing libraries mentioned above are installed properly if you would like to use GPU. Follow the guide at https://www.tensorflow.org/install/gpu for how to download and setup the required libraries for your platform.\r\nSkipping registering GPU devices...\r\nW0000 00:00:1751290045.122268 4164975 gpu_device.cc:2341] Cannot dlopen some GPU libraries. Please make sure the missing libraries mentioned above are installed properly if you would like to use GPU. Follow the guide at https://www.tensorflow.org/install/gpu for how to download and setup the required libraries for your platform.\r\nSkipping registering GPU devices...\r\n",,terminal_output +3241,3802531,"TERMINAL",0,0,"wandb: Currently logged in as: mihir-mahajan2002 (instant-uv) to https://api.wandb.ai. Use `wandb login --relogin` to force relogin\r\n",,terminal_output +3242,3803255,"TERMINAL",0,0,"2025-06-30 15:27:27.596352: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-06-30 15:27:27.598889: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-06-30 15:27:27.608339: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-06-30 15:27:27.624407: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-06-30 15:27:27.624406: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-06-30 15:27:27.631761: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-06-30 15:27:27.653555: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3243,3803351,"TERMINAL",0,0,"wandb: Tracking run with wandb version 0.19.11\r\nwandb: Run data is saved locally in /hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/wandb/run-20250630_152726-pnktbeq0\r\nwandb: Run `wandb offline` to turn off syncing.\r\nwandb: Syncing run test-wandb-tags-0000\r\nwandb: ⭐️ View project at https://wandb.ai/instant-uv/jafar\r\nwandb: 🚀 View run at https://wandb.ai/instant-uv/jafar/runs/pnktbeq0\r\n",,terminal_output +3244,3804948,"TERMINAL",0,0,"2025-06-30 15:27:29.375451: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3245,3816549,"TERMINAL",0,0,"2025-06-30 15:27:40.977835: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3246,3816665,"TERMINAL",0,0,"2025-06-30 15:27:41.061209: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-06-30 15:27:41.078454: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3247,3816823,"TERMINAL",0,0,"2025-06-30 15:27:41.249641: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3248,3818422,"TERMINAL",0,0,"2025-06-30 15:27:42.848792: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3249,3818913,"TERMINAL",0,0,"2025-06-30 15:27:43.339325: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3250,3818979,"TERMINAL",0,0,"2025-06-30 15:27:43.373398: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-06-30 15:27:43.389027: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-06-30 15:27:43.396465: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3251,3819215,"TERMINAL",0,0,"2025-06-30 15:27:43.637777: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3252,3819378,"TERMINAL",0,0,"2025-06-30 15:27:43.806314: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3253,3820455,"TERMINAL",0,0,"2025-06-30 15:27:44.842698: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3254,3821050,"TERMINAL",0,0,"2025-06-30 15:27:45.475805: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3255,3821583,"TERMINAL",0,0,"2025-06-30 15:27:46.002188: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3256,3822039,"TERMINAL",0,0,"2025-06-30 15:27:46.464616: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3257,3822959,"TERMINAL",0,0,"2025-06-30 15:27:47.384852: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3258,3850355,"TERMINAL",0,0,"2025-06-30 15:28:14.706040: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3259,3850765,"TERMINAL",0,0,"2025-06-30 15:28:15.130141: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3260,3851279,"TERMINAL",0,0,"2025-06-30 15:28:15.601909: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-06-30 15:28:15.602059: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3261,3851588,"TERMINAL",0,0,"2025-06-30 15:28:15.924648: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3262,3968615,"TERMINAL",0,0,"Running on 8 devices.\r\nStarting training from step 0...\r\nStep 0, loss: 0.3371405005455017, step time: 52139.82701301575ms\r\nStep 1, loss: 0.255323588848114, step time: 47.81985282897949ms\r\nStep 2, loss: 0.12879155576229095, step time: 61.59520149230957ms\r\nStep 3, loss: 0.09622794389724731, step time: 84.48195457458496ms\r\nStep 4, loss: 0.15179984271526337, step time: 97.51224517822266ms\r\nStep 5, loss: 0.14292140305042267, step time: 209.13100242614746ms\r\nStep 6, loss: 0.18607014417648315, step time: 96.62294387817383ms\r\nStep 7, loss: 0.21479065716266632, step time: 159.5466136932373ms\r\nStep 8, loss: 0.18476882576942444, step time: 117.87128448486328ms\r\nStep 9, loss: 0.16492873430252075, step time: 343.88136863708496ms\r\nStep 10, loss: 0.2142234593629837, step time: 220.68309783935547ms\r\nStep 11, loss: 0.17373782396316528, step time: 109.36689376831055ms\r\nStep 12, loss: 0.17216065526008606, step time: 110.0618839263916ms\r\nStep 13, loss: 0.1352560818195343, step time: 147.9642391204834ms\r\nStep 14, loss: 0.14637072384357452, step time: 100.60834884643555ms\r\nStep 15, loss: 0.15783251821994781, step time: 42.69099235534668ms\r\nStep 16, loss: 0.19798177480697632, step time: 174.78632926940918ms\r\nStep 17, loss: 0.15507982671260834, step time: 86.75384521484375ms\r\nStep 18, loss: 0.19018206000328064, step time: 129.84538078308105ms\r\nStep 19, loss: 0.13229650259017944, step time: 48.677682876586914ms\r\nStep 20, loss: 0.12048780173063278, step time: 108.00862312316895ms\r\nStep 21, loss: 0.11940276622772217, step time: 219.53654289245605ms\r\nStep 22, loss: 0.10998138040304184, step time: 204.60033416748047ms\r\nStep 23, loss: 0.12868253886699677, step time: 103.91712188720703ms\r\nStep 24, loss: 0.14274726808071136, step time: 116.1189079284668ms\r\nStep 25, loss: 0.1409907042980194, step time: 90.91734886169434ms\r\nStep 26, loss: 0.1367851197719574, step time: 153.09810638427734ms\r\nStep 27, loss: 0.09431852400302887, step time: 107.03516006469727ms\r\nStep 28, loss: 0.13750433921813965, step time: 141.66903495788574ms\r\nStep 29, loss: 0.12246876209974289, step time: 242.22636222839355ms\r\nStep 30, loss: 0.11990787833929062, step time: 170.72057723999023ms\r\nStep 31, loss: 0.08659865707159042, step time: 139.7573947906494ms\r\nStep 32, loss: 0.13262033462524414, step time: 160.9647274017334ms\r\nStep 33, loss: 0.13781970739364624, step time: 219.4993495941162ms\r\nStep 34, loss: 0.1245904490351677, step time: 121.89292907714844ms\r\nStep 35, loss: 0.11198143661022186, step time: 140.98834991455078ms\r\nStep 36, loss: 0.17048567533493042, step time: 198.81391525268555ms\r\nStep 37, loss: 0.14530757069587708, step time: 165.22908210754395ms\r\nStep 38, loss: 0.13687710464000702, step time: 180.82523345947266ms\r\nStep 39, loss: 0.12694677710533142, step time: 166.2120819091797ms\r\nStep 40, loss: 0.16414347290992737, step time: 103.4543514251709ms\r\nStep 41, loss: 0.15038971602916718, step time: 179.7804832458496ms\r\nStep 42, loss: 0.17023690044879913, step time: 145.14470100402832ms\r\nStep 43, loss: 0.1883412003517151, step time: 121.77181243896484ms\r\nStep 44, loss: 0.18213486671447754, step time: 43.02382469177246ms\r\nStep 45, loss: 0.17754821479320526, step time: 37.915945053100586ms\r\nStep 46, loss: 0.17418275773525238, step time: 27.410030364990234ms\r\nStep 47, loss: 0.17311707139015198, step time: 52.95896530151367ms\r\nStep 48, loss: 0.19261178374290466, step time: 130.01084327697754ms\r\nStep 49, loss: 0.18420304358005524, step time: 229.5689582824707ms\r\nStep 50, loss: 0.18866658210754395, step time: 116.12510681152344ms\r\nStep 51, loss: 0.15617214143276215, step time: 68.31169128417969ms\r\nStep 52, loss: 0.16529572010040283, step time: 43.17116737365723ms\r\nStep 53, loss: 0.20970384776592255, step time: 42.508840560913086ms\r\nStep 54, loss: 0.15166610479354858, step time: 46.39720916748047ms\r\nStep 55, loss: 0.1617882400751114, step time: 56.947946548461914ms\r\nStep 56, loss: 0.17322023212909698, step time: 58.90774726867676ms\r\nStep 57, loss: 0.16511975228786469, step time: 62.703847885131836ms\r\nStep 58, loss: 0.2124534249305725, step time: 63.16995620727539ms\r\nStep 59, loss: 0.18958066403865814, step time: 100.42166709899902ms\r\nStep 60, loss: 0.1716698259115219, step time: 107.93256759643555ms\r\nStep 61, loss: 0.1855076253414154, step time: 55.18460273742676ms\r\nStep 62, loss: 0.18269070982933044, step time: 130.17988204956055ms\r\nStep 63, loss: 0.20123043656349182, step time: 89.96415138244629ms\r\nStep 64, loss: 0.20838962495326996, step time: 203.32908630371094ms\r\nStep 65, loss: 0.22178855538368225, step time: 136.21973991394043ms\r\nStep 66, loss: 0.22029900550842285, step time: 272.1233367919922ms\r\nStep 67, loss: 0.197868213057518, step time: 152.38380432128906ms\r\nStep 68, loss: 0.19733645021915436, step time: 114.36915397644043ms\r\nStep 69, loss: 0.1776614487171173, step time: 148.12898635864258ms\r\nStep 70, loss: 0.2160605639219284, step time: 215.61598777770996ms\r\nStep 71, loss: 0.1945113092660904, step time: 98.33717346191406ms\r\nStep 72, loss: 0.18405216932296753, step time: 86.43007278442383ms\r\nStep 73, loss: 0.20694969594478607, step time: 38.07687759399414ms\r\nStep 74, loss: 0.21449634432792664, step time: 133.42761993408203ms\r\nStep 75, loss: 0.20241209864616394, step time: 141.71743392944336ms\r\nStep 76, loss: 0.21559882164001465, step time: 129.3647289276123ms\r\nStep 77, loss: 0.2075277715921402, step time: 118.80254745483398ms\r\nStep 78, loss: 0.2306802123785019, step time: 101.62019729614258ms\r\nStep 79, loss: 0.1760329306125641, step time: 100.09145736694336ms\r\nStep 80, loss: 0.22264865040779114, step time: 201.24053955078125ms\r\nStep 81, loss: 0.22304664552211761, step time: 250.60153007507324ms\r\nStep 82, loss: 0.21281281113624573, step time: 157.1352481842041ms\r\nStep 83, loss: 0.18681474030017853, step time: 152.99725532531738ms\r\nStep 84, loss: 0.20908211171627045, step time: 223.88315200805664ms\r\nStep 85, loss: 0.20652326941490173, step time: 150.38537979125977ms\r\nStep 86, loss: 0.2098509967327118, step time: 120.66459655761719ms\r\nStep 87, loss: 0.1820390224456787, step time: 146.08335494995117ms\r\nStep 88, loss: 0.20359483361244202, step time: 141.04557037353516ms\r\nStep 89, loss: 0.1932521015405655, step time: 246.57225608825684ms\r\nRunning on 8 devices.\r\nStarting training from step 0...\r\nStep 0, loss: 0.3371405005455017, step time: 52447.545766830444ms\r\nStep 1, loss: 0.255323588848114, step time: 25.48360824584961ms\r\nStep 2, loss: 0.12879155576229095, step time: 34.99627113342285ms\r\nStep 3, loss: 0.09622794389724731, step time: 90.63339233398438ms\r\nStep 4, loss: 0.15179984271526337, step time: 46.065568923950195ms\r\nStep 5, loss: 0.14292140305042267, step time: 184.6940517425537ms\r\nStep 6, loss: 0.18607014417648315, step time: 110.32962799072266ms\r\nStep 7, loss: 0.21479065716266632, step time: 204.6797275543213ms\r\nStep 8, loss: 0.18476882576942444, step time: 203.24182510375977ms\r\nStep 9, loss: 0.16492873430252075, step time: 143.13387870788574ms\r\nStep 10, loss: 0.2142234593629837, step time: 117.99263954162598ms\r\nStep 11, loss: 0.17373782396316528, step time: 104.46619987487793ms\r\nStep 12, loss: 0.17216065526008606, step time: 109.30585861206055ms\r\nStep 13, loss: 0.1352560818195343, step time: 179.5804500579834ms\r\nStep 90, loss: 0.19430938363075256, step time: 168.93506050109863ms\r\nStep 91, loss: 0.19726090133190155, step time: 191.80035591125488ms\r\nStep 92, loss: 0.2031475305557251, step time: 169.3570613861084ms\r\nStep 93, loss: 0.1898179054260254, step time: 172.11341857910156ms\r\nStep 94, loss: 0.21448193490505219, step time: 103.27744483947754ms\r\nStep 95, loss: 0.22105661034584045, step time: 66.99061393737793ms\r\nStep 96, loss: 0.2251318246126175, step time: 120.41687965393066ms\r\nStep 97, loss: 0.20732171833515167, step time: 25.656461715698242ms\r\nStep 98, loss: 0.19954998791217804, step time: 23.991107940673828ms\r\nStep 99, loss: 0.2192666381597519, step time: 30.553102493286133ms\r\nStep 100, loss: 0.21951265633106232, step time: 36.59200668334961ms\r\nStep 101, loss: 0.19409632682800293, step time: 38.62357139587402ms\r\nStep 102, loss: 0.21452262997627258, step time: 126.1434555053711ms\r\nStep 103, loss: 0.22496581077575684, step time: 434.3066215515137ms\r\nStep 104, loss: 0.1928805261850357, step time: 37.691354751586914ms\r\nStep 14, loss: 0.14637072384357452, step time: 106.6596508026123ms\r\nStep 15, loss: 0.15783251821994781, step time: 190.9768581390381ms\r\nStep 16, loss: 0.19798177480697632, step time: 328.3100128173828ms\r\nStep 17, loss: 0.15507982671260834, step time: 143.21279525756836ms\r\nStep 18, loss: 0.19018206000328064, step time: 85.63542366027832ms\r\nStep 19, loss: 0.13229650259017944, step time: 34.99794006347656ms\r\nStep 20, loss: 0.12048780173063278, step time: 107.73658752441406ms\r\nStep 21, loss: 0.11940276622772217, step time: 169.08740997314453ms\r\nStep 22, loss: 0.10998138040304184, step time: 179.20279502868652ms\r\nStep 23, loss: 0.12868253886699677, step time: 131.6549777984619ms\r\nStep 24, loss: 0.14274726808071136, step time: 126.42574310302734ms\r\nStep 25, loss: 0.1409907042980194, step time: 136.75475120544434ms\r\nStep 26, loss: 0.1367851197719574, step time: 245.76473236083984ms\r\nStep 27, loss: 0.09431852400302887, step time: 138.28063011169434ms\r\nStep 28, loss: 0.13750433921813965, step time: 119.68803405761719ms\r\nStep 105, loss: 0.20959798991680145, step time: 79.92076873779297ms\r\nStep 106, loss: 0.20392221212387085, step time: 49.286842346191406ms\r\nStep 107, loss: 0.22778305411338806, step time: 58.80546569824219ms\r\nStep 108, loss: 0.21731412410736084, step time: 75.41918754577637ms\r\nStep 109, loss: 0.21080194413661957, step time: 27.925968170166016ms\r\nStep 110, loss: 0.18422192335128784, step time: 33.85472297668457ms\r\nStep 111, loss: 0.1793976128101349, step time: 66.1935806274414ms\r\nStep 112, loss: 0.20595386624336243, step time: 110.13174057006836ms\r\nStep 113, loss: 0.2135477364063263, step time: 84.25164222717285ms\r\nStep 114, loss: 0.20374689996242523, step time: 66.57123565673828ms\r\nStep 115, loss: 0.1921147257089615, step time: 87.73446083068848ms\r\nStep 116, loss: 0.23678253591060638, step time: 120.45598030090332ms\r\nStep 117, loss: 0.21021409332752228, step time: 86.1818790435791ms\r\nStep 118, loss: 0.20275235176086426, step time: 57.23309516906738ms\r\nStep 119, loss: 0.20498108863830566, step time: 85.12616157531738ms\r\nStep 29, loss: 0.12246876209974289, step time: 219.49291229248047ms\r\nStep 30, loss: 0.11990787833929062, step time: 135.56623458862305ms\r\nStep 31, loss: 0.08659865707159042, step time: 102.26726531982422ms\r\nStep 32, loss: 0.13262033462524414, step time: 154.69741821289062ms\r\nStep 33, loss: 0.13781970739364624, step time: 121.45662307739258ms\r\nStep 34, loss: 0.1245904490351677, step time: 235.823392868042ms\r\nStep 35, loss: 0.11198143661022186, step time: 99.58958625793457ms\r\nStep 36, loss: 0.17048567533493042, step time: 179.3217658996582ms\r\nStep 37, loss: 0.14530757069587708, step time: 178.91860008239746ms\r\nStep 38, loss: 0.13687710464000702, step time: 167.62375831604004ms\r\nStep 39, loss: 0.12694677710533142, step time: 338.15503120422363ms\r\nStep 40, loss: 0.16414347290992737, step time: 115.43059349060059ms\r\nStep 41, loss: 0.15038971602916718, step time: 74.50246810913086ms\r\nStep 42, loss: 0.17023690044879913, step time: 44.60573196411133ms\r\nStep 43, loss: 0.1883412003517151, step time: 22.885560989379883ms\r\nStep 44, loss: 0.18213486671447754, step time: 22.010326385498047ms\r\nStep 45, loss: 0.17754821479320526, step time: 22.675275802612305ms\r\nStep 46, loss: 0.17418275773525238, step time: 53.90787124633789ms\r\nStep 47, loss: 0.17311707139015198, step time: 175.4474639892578ms\r\nStep 48, loss: 0.19261178374290466, step time: 197.91054725646973ms\r\nStep 49, loss: 0.18420304358005524, step time: 48.31647872924805ms\r\nStep 50, loss: 0.18866658210754395, step time: 38.335561752319336ms\r\nStep 51, loss: 0.15617214143276215, step time: 74.28741455078125ms\r\nStep 52, loss: 0.16529572010040283, step time: 25.314807891845703ms\r\nStep 53, loss: 0.20970384776592255, step time: 69.29492950439453ms\r\nStep 54, loss: 0.15166610479354858, step time: 51.932573318481445ms\r\nStep 55, loss: 0.1617882400751114, step time: 100.80170631408691ms\r\nStep 56, loss: 0.17322023212909698, step time: 81.05707168579102ms\r\nStep 57, loss: 0.16511975228786469, step time: 225.17848014831543ms\r\nStep 58, loss: 0.2124534249305725, step time: 203.57394218444824ms\r\nStep 59, loss: 0.18958066403865814, step time: 170.18938064575195ms\r\nStep 60, loss: 0.1716698259115219, step time: 57.111263275146484ms\r\nStep 61, loss: 0.1855076253414154, step time: 55.11617660522461ms\r\nStep 62, loss: 0.18269070982933044, step time: 125.96368789672852ms\r\nStep 63, loss: 0.20123043656349182, step time: 84.08665657043457ms\r\nStep 64, loss: 0.20838962495326996, step time: 106.58884048461914ms\r\nStep 65, loss: 0.22178855538368225, step time: 60.610294342041016ms\r\nStep 66, loss: 0.22029900550842285, step time: 83.98747444152832ms\r\nStep 67, loss: 0.197868213057518, step time: 75.36077499389648ms\r\nStep 68, loss: 0.19733645021915436, step time: 77.31151580810547ms\r\nStep 69, loss: 0.1776614487171173, step time: 130.7971477508545ms\r\nStep 70, loss: 0.2160605639219284, step time: 221.14276885986328ms\r\nStep 71, loss: 0.1945113092660904, step time: 74.10383224487305ms\r\nStep 72, loss: 0.18405216932296753, step time: 94.58255767822266ms\r\nStep 73, loss: 0.20694969594478607, step time: 73.24719429016113ms\r\nStep 74, loss: 0.21449634432792664, step time: 142.68875122070312ms\r\nStep 75, loss: 0.20241209864616394, step time: 183.81571769714355ms\r\nStep 76, loss: 0.21559882164001465, step time: 104.33101654052734ms\r\nStep 77, loss: 0.2075277715921402, step time: 153.31006050109863ms\r\nStep 78, loss: 0.2306802123785019, step time: 116.51754379272461ms\r\nStep 79, loss: 0.1760329306125641, step time: 156.07404708862305ms\r\nStep 80, loss: 0.22264865040779114, step time: 107.1174144744873ms\r\nStep 81, loss: 0.22304664552211761, step time: 111.19914054870605ms\r\nStep 82, loss: 0.21281281113624573, step time: 105.7577133178711ms\r\nStep 83, loss: 0.18681474030017853, step time: 219.5744514465332ms\r\nStep 84, loss: 0.20908211171627045, step time: 110.01372337341309ms\r\nStep 85, loss: 0.20652326941490173, step time: 216.13669395446777ms\r\nStep 86, loss: 0.2098509967327118, step time: 121.15120887756348ms\r\nStep 87, loss: 0.1820390224456787, step time: 52.39129066467285ms\r\nStep 88, loss: 0.20359483361244202, step time: 75.91009140014648ms\r\nStep 89, loss: 0.1932521015405655, step time: 156.07643127441406ms\r\nStep 90, loss: 0.19430938363075256, step time: 91.49360656738281ms\r\nStep 91, loss: 0.19726090133190155, step time: 24.221181869506836ms\r\nStep 92, loss: 0.2031475305557251, step time: 23.354053497314453ms\r\nStep 93, loss: 0.1898179054260254, step time: 20.757198333740234ms\r\nStep 94, loss: 0.21448193490505219, step time: 22.760391235351562ms\r\nStep 95, loss: 0.22105661034584045, step time: 27.89449691772461ms\r\nStep 96, loss: 0.2251318246126175, step time: 36.38315200805664ms\r\nStep 97, loss: 0.20732171833515167, step time: 119.09365653991699ms\r\nStep 98, loss: 0.19954998791217804, step time: 79.55288887023926ms\r\nStep 99, loss: 0.2192666381597519, step time: 49.70431327819824ms\r\nStep 100, loss: 0.21951265633106232, step time: 55.20796775817871ms\r\nStep 101, loss: 0.19409632682800293, step time: 66.25175476074219ms\r\nStep 102, loss: 0.21452262997627258, step time: 37.41097450256348ms\r\nStep 103, loss: 0.22496581077575684, step time: 30.015945434570312ms\r\nStep 104, loss: 0.1928805261850357, step time: 41.611671447753906ms\r\nStep 105, loss: 0.20959798991680145, step time: 63.26127052307129ms\r\nStep 106, loss: 0.20392221212387085, step time: 230.3459644317627ms\r\nStep 107, loss: 0.22778305411338806, step time: 263.72694969177246ms\r\nStep 108, loss: 0.21731412410736084, step time: 69.22626495361328ms\r\nStep 109, loss: 0.21080194413661957, step time: 36.978960037231445ms\r\nStep 110, loss: 0.18422192335128784, step time: 79.04624938964844ms\r\nStep 111, loss: 0.1793976128101349, step time: 70.56975364685059ms\r\nStep 112, loss: 0.20595386624336243, step time: 63.028573989868164ms\r\nStep 113, loss: 0.2135477364063263, step time: 52.57129669189453ms\r\nStep 114, loss: 0.20374689996242523, step time: 41.957855224609375ms\r\nStep 115, loss: 0.1921147257089615, step time: 64.38517570495605ms\r\nStep 116, loss: 0.23678253591060638, step time: 77.04639434814453ms\r\nStep 117, loss: 0.21021409332752228, step time: 124.71771240234375ms\r\nStep 118, loss: 0.20275235176086426, step time: 91.31431579589844ms\r\nStep 119, loss: 0.20498108863830566, step time: 132.33518600463867ms\r\n",,terminal_output +3263,3969167,"TERMINAL",0,0,"Running on 8 devices.\r\nStarting training from step 0...\r\nStep 0, loss: 0.3371405005455017, step time: 52987.52403259277ms\r\nStep 1, loss: 0.255323588848114, step time: 31.145095825195312ms\r\nStep 2, loss: 0.12879155576229095, step time: 32.95731544494629ms\r\nStep 3, loss: 0.09622794389724731, step time: 31.511306762695312ms\r\nStep 4, loss: 0.15179984271526337, step time: 32.32145309448242ms\r\nStep 5, loss: 0.14292140305042267, step time: 49.330711364746094ms\r\nStep 6, loss: 0.18607014417648315, step time: 59.84830856323242ms\r\nStep 7, loss: 0.21479065716266632, step time: 30.894994735717773ms\r\nStep 8, loss: 0.18476882576942444, step time: 61.54775619506836ms\r\nStep 9, loss: 0.16492873430252075, step time: 72.10445404052734ms\r\nStep 10, loss: 0.2142234593629837, step time: 34.76214408874512ms\r\nStep 11, loss: 0.17373782396316528, step time: 51.09429359436035ms\r\nStep 12, loss: 0.17216065526008606, step time: 108.2603931427002ms\r\nStep 13, loss: 0.1352560818195343, step time: 51.02133750915527ms\r\nStep 14, loss: 0.14637072384357452, step time: 62.50405311584473ms\r\nStep 15, loss: 0.15783251821994781, step time: 93.03426742553711ms\r\nStep 16, loss: 0.19798177480697632, step time: 67.7955150604248ms\r\nStep 17, loss: 0.15507982671260834, step time: 35.30287742614746ms\r\nStep 18, loss: 0.19018206000328064, step time: 35.787343978881836ms\r\nStep 19, loss: 0.13229650259017944, step time: 38.625240325927734ms\r\nStep 20, loss: 0.12048780173063278, step time: 82.97443389892578ms\r\nStep 21, loss: 0.11940276622772217, step time: 33.050537109375ms\r\nStep 22, loss: 0.10998138040304184, step time: 33.36930274963379ms\r\nStep 23, loss: 0.12868253886699677, step time: 49.52239990234375ms\r\nStep 24, loss: 0.14274726808071136, step time: 48.39444160461426ms\r\nStep 25, loss: 0.1409907042980194, step time: 41.84436798095703ms\r\nStep 26, loss: 0.1367851197719574, step time: 37.32132911682129ms\r\nStep 27, loss: 0.09431852400302887, step time: 44.07167434692383ms\r\nStep 28, loss: 0.13750433921813965, step time: 51.26690864562988ms\r\nStep 29, loss: 0.12246876209974289, step time: 43.08724403381348ms\r\nStep 30, loss: 0.11990787833929062, step time: 28.35679054260254ms\r\nStep 31, loss: 0.08659865707159042, step time: 40.69805145263672ms\r\nStep 32, loss: 0.13262033462524414, step time: 53.17521095275879ms\r\nStep 33, loss: 0.13781970739364624, step time: 41.26453399658203ms\r\nStep 34, loss: 0.1245904490351677, step time: 86.6396427154541ms\r\nStep 35, loss: 0.11198143661022186, step time: 24.94525909423828ms\r\nStep 36, loss: 0.17048567533493042, step time: 52.31618881225586ms\r\nStep 37, loss: 0.14530757069587708, step time: 25.62427520751953ms\r\nStep 38, loss: 0.13687710464000702, step time: 42.244911193847656ms\r\nStep 39, loss: 0.12694677710533142, step time: 46.486616134643555ms\r\nStep 40, loss: 0.16414347290992737, step time: 23.966073989868164ms\r\nStep 41, loss: 0.15038971602916718, step time: 24.186134338378906ms\r\nStep 42, loss: 0.17023690044879913, step time: 24.42145347595215ms\r\nStep 43, loss: 0.1883412003517151, step time: 25.70319175720215ms\r\nStep 44, loss: 0.18213486671447754, step time: 25.649070739746094ms\r\nStep 45, loss: 0.17754821479320526, step time: 23.45108985900879ms\r\nStep 46, loss: 0.17418275773525238, step time: 17.928600311279297ms\r\nStep 47, loss: 0.17311707139015198, step time: 19.068002700805664ms\r\nStep 48, loss: 0.19261178374290466, step time: 81.33292198181152ms\r\nStep 49, loss: 0.18420304358005524, step time: 55.17721176147461ms\r\nStep 50, loss: 0.18866658210754395, step time: 27.36830711364746ms\r\nStep 51, loss: 0.15617214143276215, step time: 48.19297790527344ms\r\nStep 52, loss: 0.16529572010040283, step time: 37.18233108520508ms\r\nStep 53, loss: 0.20970384776592255, step time: 23.591995239257812ms\r\nStep 54, loss: 0.15166610479354858, step time: 28.211116790771484ms\r\nStep 55, loss: 0.1617882400751114, step time: 63.970088958740234ms\r\nStep 56, loss: 0.17322023212909698, step time: 30.161380767822266ms\r\nStep 57, loss: 0.16511975228786469, step time: 33.12349319458008ms\r\nStep 58, loss: 0.2124534249305725, step time: 27.853012084960938ms\r\nStep 59, loss: 0.18958066403865814, step time: 57.02495574951172ms\r\nStep 60, loss: 0.1716698259115219, step time: 62.77656555175781ms\r\nStep 61, loss: 0.1855076253414154, step time: 55.7248592376709ms\r\nStep 62, loss: 0.18269070982933044, step time: 39.26229476928711ms\r\nStep 63, loss: 0.20123043656349182, step time: 56.5640926361084ms\r\nStep 64, loss: 0.20838962495326996, step time: 57.64913558959961ms\r\nStep 65, loss: 0.22178855538368225, step time: 96.63176536560059ms\r\nStep 66, loss: 0.22029900550842285, step time: 70.18709182739258ms\r\nStep 67, loss: 0.197868213057518, step time: 52.19912528991699ms\r\nStep 68, loss: 0.19733645021915436, step time: 28.180837631225586ms\r\nStep 69, loss: 0.1776614487171173, step time: 27.786731719970703ms\r\nStep 70, loss: 0.2160605639219284, step time: 28.770923614501953ms\r\nStep 71, loss: 0.1945113092660904, step time: 77.5144100189209ms\r\nStep 72, loss: 0.18405216932296753, step time: 57.13701248168945ms\r\nStep 73, loss: 0.20694969594478607, step time: 26.140689849853516ms\r\nStep 74, loss: 0.21449634432792664, step time: 43.03240776062012ms\r\nStep 75, loss: 0.20241209864616394, step time: 40.78078269958496ms\r\nStep 76, loss: 0.21559882164001465, step time: 43.14303398132324ms\r\nStep 77, loss: 0.2075277715921402, step time: 40.46630859375ms\r\nStep 78, loss: 0.2306802123785019, step time: 60.31942367553711ms\r\nStep 79, loss: 0.1760329306125641, step time: 52.08444595336914ms\r\nStep 80, loss: 0.22264865040779114, step time: 38.459062576293945ms\r\nStep 81, loss: 0.22304664552211761, step time: 49.44944381713867ms\r\nStep 82, loss: 0.21281281113624573, step time: 29.625415802001953ms\r\nStep 83, loss: 0.18681474030017853, step time: 48.71416091918945ms\r\nStep 84, loss: 0.20908211171627045, step time: 34.47103500366211ms\r\nStep 85, loss: 0.20652326941490173, step time: 68.5725212097168ms\r\nStep 86, loss: 0.2098509967327118, step time: 51.88417434692383ms\r\nStep 87, loss: 0.1820390224456787, step time: 26.69811248779297ms\r\nStep 88, loss: 0.20359483361244202, step time: 37.05954551696777ms\r\nStep 89, loss: 0.1932521015405655, step time: 47.72639274597168ms\r\nStep 90, loss: 0.19430938363075256, step time: 27.075767517089844ms\r\nStep 91, loss: 0.19726090133190155, step time: 26.625394821166992ms\r\nStep 92, loss: 0.2031475305557251, step time: 24.466753005981445ms\r\nStep 93, loss: 0.1898179054260254, step time: 26.21603012084961ms\r\nStep 94, loss: 0.21448193490505219, step time: 25.0399112701416ms\r\nStep 95, loss: 0.22105661034584045, step time: 25.723695755004883ms\r\nStep 96, loss: 0.2251318246126175, step time: 26.05462074279785ms\r\nStep 97, loss: 0.20732171833515167, step time: 23.234128952026367ms\r\nStep 98, loss: 0.19954998791217804, step time: 22.111892700195312ms\r\nStep 99, loss: 0.2192666381597519, step time: 130.91397285461426ms\r\nStep 100, loss: 0.21951265633106232, step time: 96.55332565307617ms\r\nStep 101, loss: 0.19409632682800293, step time: 110.01276969909668ms\r\nStep 102, loss: 0.21452262997627258, step time: 65.32001495361328ms\r\nStep 103, loss: 0.22496581077575684, step time: 55.50646781921387ms\r\nStep 104, loss: 0.1928805261850357, step time: 27.434110641479492ms\r\nStep 105, loss: 0.20959798991680145, step time: 27.40025520324707ms\r\nStep 106, loss: 0.20392221212387085, step time: 43.24150085449219ms\r\nStep 107, loss: 0.22778305411338806, step time: 35.178422927856445ms\r\nStep 108, loss: 0.21731412410736084, step time: 24.027585983276367ms\r\nStep 109, loss: 0.21080194413661957, step time: 45.557260513305664ms\r\nStep 110, loss: 0.18422192335128784, step time: 39.60776329040527ms\r\nStep 111, loss: 0.1793976128101349, step time: 84.11049842834473ms\r\nStep 112, loss: 0.20595386624336243, step time: 43.58696937561035ms\r\nStep 113, loss: 0.2135477364063263, step time: 79.11133766174316ms\r\nStep 114, loss: 0.20374689996242523, step time: 84.24544334411621ms\r\nStep 115, loss: 0.1921147257089615, step time: 47.47509956359863ms\r\nStep 116, loss: 0.23678253591060638, step time: 93.17350387573242ms\r\nStep 117, loss: 0.21021409332752228, step time: 50.65774917602539ms\r\nStep 118, loss: 0.20275235176086426, step time: 145.09844779968262ms\r\nStep 119, loss: 0.20498108863830566, step time: 113.57688903808594ms\r\nStep 120, loss: 0.22381655871868134, step time: 26.49974822998047ms\r\nRunning on 8 devices.\r\nStarting training from step 0...\r\nStep 0, loss: 0.3371405005455017, step time: 52486.11497879028ms\r\nStep 1, loss: 0.255323588848114, step time: 29.511213302612305ms\r\nStep 2, loss: 0.12879155576229095, step time: 46.71955108642578ms\r\nStep 3, loss: 0.09622794389724731, step time: 48.471927642822266ms\r\nStep 4, loss: 0.15179984271526337, step time: 110.98742485046387ms\r\nStep 5, loss: 0.14292140305042267, step time: 54.83722686767578ms\r\nStep 6, loss: 0.18607014417648315, step time: 111.40131950378418ms\r\nStep 7, loss: 0.21479065716266632, step time: 65.0339126586914ms\r\nStep 8, loss: 0.18476882576942444, step time: 124.67741966247559ms\r\nStep 9, loss: 0.16492873430252075, step time: 91.71342849731445ms\r\nStep 10, loss: 0.2142234593629837, step time: 87.38970756530762ms\r\nStep 11, loss: 0.17373782396316528, step time: 155.75933456420898ms\r\nStep 12, loss: 0.17216065526008606, step time: 106.87947273254395ms\r\nStep 13, loss: 0.1352560818195343, step time: 168.09773445129395ms\r\nStep 14, loss: 0.14637072384357452, step time: 118.47639083862305ms\r\nStep 15, loss: 0.15783251821994781, step time: 104.48718070983887ms\r\nStep 16, loss: 0.19798177480697632, step time: 44.32797431945801ms\r\nStep 17, loss: 0.15507982671260834, step time: 147.444486618042ms\r\nStep 18, loss: 0.19018206000328064, step time: 181.35356903076172ms\r\nStep 19, loss: 0.13229650259017944, step time: 89.93291854858398ms\r\nStep 20, loss: 0.12048780173063278, step time: 80.86585998535156ms\r\nStep 21, loss: 0.11940276622772217, step time: 117.2480583190918ms\r\nStep 22, loss: 0.10998138040304184, step time: 109.65371131896973ms\r\nStep 23, loss: 0.12868253886699677, step time: 72.59225845336914ms\r\nStep 24, loss: 0.14274726808071136, step time: 56.931495666503906ms\r\nStep 25, loss: 0.1409907042980194, step time: 94.78759765625ms\r\nStep 26, loss: 0.1367851197719574, step time: 47.49345779418945ms\r\nStep 27, loss: 0.09431852400302887, step time: 106.98509216308594ms\r\nStep 28, loss: 0.13750433921813965, step time: 134.40299034118652ms\r\nStep 29, loss: 0.12246876209974289, step time: 56.92577362060547ms\r\nStep 30, loss: 0.11990787833929062, step time: 78.97233963012695ms\r\nStep 31, loss: 0.08659865707159042, step time: 68.76945495605469ms\r\nStep 32, loss: 0.13262033462524414, step time: 69.80729103088379ms\r\nStep 33, loss: 0.13781970739364624, step time: 106.43911361694336ms\r\nStep 34, loss: 0.1245904490351677, step time: 114.55130577087402ms\r\nStep 35, loss: 0.11198143661022186, step time: 137.21704483032227ms\r\nStep 36, loss: 0.17048567533493042, step time: 76.94602012634277ms\r\nStep 37, loss: 0.14530757069587708, step time: 119.45962905883789ms\r\nStep 38, loss: 0.13687710464000702, step time: 144.3159580230713ms\r\nStep 39, loss: 0.12694677710533142, step time: 149.50823783874512ms\r\nStep 40, loss: 0.16414347290992737, step time: 83.92524719238281ms\r\nStep 41, loss: 0.15038971602916718, step time: 99.62296485900879ms\r\nStep 42, loss: 0.17023690044879913, step time: 60.926198959350586ms\r\nStep 43, loss: 0.1883412003517151, step time: 48.546791076660156ms\r\nStep 44, loss: 0.18213486671447754, step time: 25.73871612548828ms\r\nStep 45, loss: 0.17754821479320526, step time: 22.52197265625ms\r\nStep 46, loss: 0.17418275773525238, step time: 20.9808349609375ms\r\nStep 47, loss: 0.17311707139015198, step time: 22.443532943725586ms\r\nStep 48, loss: 0.19261178374290466, step time: 146.82960510253906ms\r\nStep 49, loss: 0.18420304358005524, step time: 91.06087684631348ms\r\nStep 50, loss: 0.18866658210754395, step time: 155.26413917541504ms\r\nStep 51, loss: 0.15617214143276215, step time: 75.85716247558594ms\r\nStep 52, loss: 0.16529572010040283, step time: 29.742717742919922ms\r\nStep 53, loss: 0.20970384776592255, step time: 37.98031806945801ms\r\nStep 54, loss: 0.15166610479354858, step time: 55.812835693359375ms\r\nStep 55, loss: 0.1617882400751114, step time: 30.52830696105957ms\r\nStep 56, loss: 0.17322023212909698, step time: 46.00238800048828ms\r\nStep 57, loss: 0.16511975228786469, step time: 104.79378700256348ms\r\nStep 58, loss: 0.2124534249305725, step time: 61.91682815551758ms\r\nRunning on 8 devices.\r\nStarting training from step 0...\r\nStep 0, loss: 0.3371405005455017, step time: 53446.75970077515ms\r\nStep 1, loss: 0.255323588848114, step time: 27.270793914794922ms\r\nStep 2, loss: 0.12879155576229095, step time: 35.7203483581543ms\r\nStep 3, loss: 0.09622794389724731, step time: 32.355546951293945ms\r\nStep 4, loss: 0.15179984271526337, step time: 47.574520111083984ms\r\nStep 5, loss: 0.14292140305042267, step time: 58.563947677612305ms\r\nStep 6, loss: 0.18607014417648315, step time: 35.15195846557617ms\r\nStep 7, loss: 0.21479065716266632, step time: 61.13553047180176ms\r\nStep 8, loss: 0.18476882576942444, step time: 82.89289474487305ms\r\nStep 9, loss: 0.16492873430252075, step time: 36.823272705078125ms\r\nStep 10, loss: 0.2142234593629837, step time: 31.13079071044922ms\r\nStep 11, loss: 0.17373782396316528, step time: 76.45463943481445ms\r\nStep 12, loss: 0.17216065526008606, step time: 48.49886894226074ms\r\nStep 13, loss: 0.1352560818195343, step time: 44.623613357543945ms\r\nStep 59, loss: 0.18958066403865814, step time: 45.124053955078125ms\r\nStep 14, loss: 0.14637072384357452, step time: 98.74391555786133ms\r\nStep 15, loss: 0.15783251821994781, step time: 29.308080673217773ms\r\nStep 16, loss: 0.19798177480697632, step time: 27.72212028503418ms\r\nStep 17, loss: 0.15507982671260834, step time: 30.016183853149414ms\r\nStep 18, loss: 0.19018206000328064, step time: 29.89935874938965ms\r\nStep 19, loss: 0.13229650259017944, step time: 53.179025650024414ms\r\nStep 20, loss: 0.12048780173063278, step time: 53.85565757751465ms\r\nStep 21, loss: 0.11940276622772217, step time: 44.763803482055664ms\r\nStep 22, loss: 0.10998138040304184, step time: 44.8002815246582ms\r\nStep 23, loss: 0.12868253886699677, step time: 60.92667579650879ms\r\nStep 24, loss: 0.14274726808071136, step time: 30.434846878051758ms\r\nStep 25, loss: 0.1409907042980194, step time: 72.36123085021973ms\r\nStep 26, loss: 0.1367851197719574, step time: 61.814308166503906ms\r\nStep 27, loss: 0.09431852400302887, step time: 32.057762145996094ms\r\nStep 28, loss: 0.13750433921813965, step time: 46.552419662475586ms\r\nStep 60, loss: 0.1716698259115219, step time: 151.99565887451172ms\r\nStep 61, loss: 0.1855076253414154, step time: 95.38817405700684ms\r\nStep 62, loss: 0.18269070982933044, step time: 97.64695167541504ms\r\nStep 63, loss: 0.20123043656349182, step time: 92.57864952087402ms\r\nStep 64, loss: 0.20838962495326996, step time: 117.89727210998535ms\r\nStep 65, loss: 0.22178855538368225, step time: 134.31620597839355ms\r\nStep 66, loss: 0.22029900550842285, step time: 66.73431396484375ms\r\nStep 67, loss: 0.197868213057518, step time: 95.8242416381836ms\r\nStep 68, loss: 0.19733645021915436, step time: 169.48509216308594ms\r\nStep 69, loss: 0.1776614487171173, step time: 85.1280689239502ms\r\nStep 70, loss: 0.2160605639219284, step time: 182.9051971435547ms\r\nStep 71, loss: 0.1945113092660904, step time: 88.80162239074707ms\r\nStep 72, loss: 0.18405216932296753, step time: 129.6243667602539ms\r\nStep 73, loss: 0.20694969594478607, step time: 168.20573806762695ms\r\nStep 74, loss: 0.21449634432792664, step time: 110.48173904418945ms\r\nStep 29, loss: 0.12246876209974289, step time: 33.77485275268555ms\r\nStep 30, loss: 0.11990787833929062, step time: 30.11488914489746ms\r\nStep 31, loss: 0.08659865707159042, step time: 57.933807373046875ms\r\nStep 32, loss: 0.13262033462524414, step time: 39.82424736022949ms\r\nStep 33, loss: 0.13781970739364624, step time: 32.25231170654297ms\r\nStep 34, loss: 0.1245904490351677, step time: 41.30411148071289ms\r\nStep 35, loss: 0.11198143661022186, step time: 56.98561668395996ms\r\nStep 36, loss: 0.17048567533493042, step time: 29.387950897216797ms\r\nStep 37, loss: 0.14530757069587708, step time: 57.52253532409668ms\r\nStep 38, loss: 0.13687710464000702, step time: 55.04870414733887ms\r\nStep 39, loss: 0.12694677710533142, step time: 28.310298919677734ms\r\nStep 40, loss: 0.16414347290992737, step time: 42.243003845214844ms\r\nStep 41, loss: 0.15038971602916718, step time: 30.52973747253418ms\r\nStep 42, loss: 0.17023690044879913, step time: 24.669885635375977ms\r\nStep 43, loss: 0.1883412003517151, step time: 22.27306365966797ms\r\nStep 75, loss: 0.20241209864616394, step time: 108.37841033935547ms\r\nStep 76, loss: 0.21559882164001465, step time: 169.77167129516602ms\r\nStep 77, loss: 0.2075277715921402, step time: 88.54079246520996ms\r\nStep 78, loss: 0.2306802123785019, step time: 208.53829383850098ms\r\nStep 79, loss: 0.1760329306125641, step time: 171.30303382873535ms\r\nStep 80, loss: 0.22264865040779114, step time: 159.7769260406494ms\r\nStep 81, loss: 0.22304664552211761, step time: 151.3078212738037ms\r\nStep 82, loss: 0.21281281113624573, step time: 124.91488456726074ms\r\nStep 83, loss: 0.18681474030017853, step time: 121.73700332641602ms\r\nStep 84, loss: 0.20908211171627045, step time: 100.11434555053711ms\r\nStep 85, loss: 0.20652326941490173, step time: 93.99771690368652ms\r\nStep 86, loss: 0.2098509967327118, step time: 99.37095642089844ms\r\nStep 87, loss: 0.1820390224456787, step time: 76.2331485748291ms\r\nStep 88, loss: 0.20359483361244202, step time: 110.80360412597656ms\r\nStep 89, loss: 0.1932521015405655, step time: 90.55256843566895ms\r\nStep 44, loss: 0.18213486671447754, step time: 19.946813583374023ms\r\nStep 45, loss: 0.17754821479320526, step time: 23.01478385925293ms\r\nStep 46, loss: 0.17418275773525238, step time: 25.055646896362305ms\r\nStep 47, loss: 0.17311707139015198, step time: 28.235435485839844ms\r\nStep 48, loss: 0.19261178374290466, step time: 47.31941223144531ms\r\nStep 49, loss: 0.18420304358005524, step time: 94.0089225769043ms\r\nStep 50, loss: 0.18866658210754395, step time: 27.437210083007812ms\r\nStep 51, loss: 0.15617214143276215, step time: 27.796268463134766ms\r\nStep 52, loss: 0.16529572010040283, step time: 27.734041213989258ms\r\nStep 53, loss: 0.20970384776592255, step time: 26.01027488708496ms\r\nStep 54, loss: 0.15166610479354858, step time: 27.759552001953125ms\r\nStep 55, loss: 0.1617882400751114, step time: 46.20075225830078ms\r\nStep 56, loss: 0.17322023212909698, step time: 61.66338920593262ms\r\nStep 57, loss: 0.16511975228786469, step time: 31.507492065429688ms\r\nStep 58, loss: 0.2124534249305725, step time: 30.807018280029297ms\r\nStep 90, loss: 0.19430938363075256, step time: 123.0776309967041ms\r\nStep 91, loss: 0.19726090133190155, step time: 67.51704216003418ms\r\nStep 92, loss: 0.2031475305557251, step time: 77.6979923248291ms\r\nStep 93, loss: 0.1898179054260254, step time: 86.41386032104492ms\r\nStep 94, loss: 0.21448193490505219, step time: 92.47612953186035ms\r\nStep 95, loss: 0.22105661034584045, step time: 79.52618598937988ms\r\nStep 96, loss: 0.2251318246126175, step time: 39.95394706726074ms\r\nStep 97, loss: 0.20732171833515167, step time: 23.77605438232422ms\r\nStep 98, loss: 0.19954998791217804, step time: 24.87778663635254ms\r\nStep 99, loss: 0.2192666381597519, step time: 25.765657424926758ms\r\nStep 100, loss: 0.21951265633106232, step time: 28.111696243286133ms\r\nStep 101, loss: 0.19409632682800293, step time: 28.4574031829834ms\r\nStep 102, loss: 0.21452262997627258, step time: 61.02633476257324ms\r\nStep 103, loss: 0.22496581077575684, step time: 103.3937931060791ms\r\nStep 104, loss: 0.1928805261850357, step time: 38.521528244018555ms\r\nStep 59, loss: 0.18958066403865814, step time: 59.74078178405762ms\r\nStep 105, loss: 0.20959798991680145, step time: 71.41327857971191ms\r\nStep 106, loss: 0.20392221212387085, step time: 58.904409408569336ms\r\nStep 107, loss: 0.22778305411338806, step time: 42.974233627319336ms\r\nStep 108, loss: 0.21731412410736084, step time: 35.10236740112305ms\r\nStep 109, loss: 0.21080194413661957, step time: 26.32617950439453ms\r\nStep 110, loss: 0.18422192335128784, step time: 41.135311126708984ms\r\nStep 111, loss: 0.1793976128101349, step time: 67.11244583129883ms\r\nStep 112, loss: 0.20595386624336243, step time: 126.61552429199219ms\r\nStep 113, loss: 0.2135477364063263, step time: 153.8553237915039ms\r\nStep 114, loss: 0.20374689996242523, step time: 148.18167686462402ms\r\nStep 115, loss: 0.1921147257089615, step time: 215.1029109954834ms\r\nStep 116, loss: 0.23678253591060638, step time: 142.02618598937988ms\r\nStep 117, loss: 0.21021409332752228, step time: 108.50763320922852ms\r\nStep 118, loss: 0.20275235176086426, step time: 105.28278350830078ms\r\nStep 60, loss: 0.1716698259115219, step time: 52.64878273010254ms\r\nStep 61, loss: 0.1855076253414154, step time: 28.910160064697266ms\r\nStep 62, loss: 0.18269070982933044, step time: 39.945363998413086ms\r\nStep 63, loss: 0.20123043656349182, step time: 69.3967342376709ms\r\nStep 64, loss: 0.20838962495326996, step time: 56.19478225708008ms\r\nStep 65, loss: 0.22178855538368225, step time: 35.32266616821289ms\r\nStep 66, loss: 0.22029900550842285, step time: 30.625581741333008ms\r\nStep 67, loss: 0.197868213057518, step time: 62.53457069396973ms\r\nStep 68, loss: 0.19733645021915436, step time: 54.038047790527344ms\r\nStep 69, loss: 0.1776614487171173, step time: 32.067298889160156ms\r\nStep 70, loss: 0.2160605639219284, step time: 49.46088790893555ms\r\nStep 71, loss: 0.1945113092660904, step time: 72.10230827331543ms\r\nStep 72, loss: 0.18405216932296753, step time: 50.24862289428711ms\r\nStep 73, loss: 0.20694969594478607, step time: 27.761459350585938ms\r\nStep 74, loss: 0.21449634432792664, step time: 30.109167098999023ms\r\nStep 119, loss: 0.20498108863830566, step time: 95.69001197814941ms\r\nStep 75, loss: 0.20241209864616394, step time: 30.867815017700195ms\r\nStep 76, loss: 0.21559882164001465, step time: 57.38687515258789ms\r\nStep 77, loss: 0.2075277715921402, step time: 31.755685806274414ms\r\nStep 78, loss: 0.2306802123785019, step time: 46.32425308227539ms\r\nStep 79, loss: 0.1760329306125641, step time: 69.32258605957031ms\r\nStep 80, loss: 0.22264865040779114, step time: 53.30657958984375ms\r\nStep 81, loss: 0.22304664552211761, step time: 59.549570083618164ms\r\nStep 82, loss: 0.21281281113624573, step time: 49.6823787689209ms\r\nStep 83, loss: 0.18681474030017853, step time: 92.23079681396484ms\r\nStep 84, loss: 0.20908211171627045, step time: 28.78546714782715ms\r\nStep 85, loss: 0.20652326941490173, step time: 56.301116943359375ms\r\nStep 86, loss: 0.2098509967327118, step time: 47.95026779174805ms\r\nStep 87, loss: 0.1820390224456787, step time: 29.094696044921875ms\r\nStep 88, loss: 0.20359483361244202, step time: 30.136823654174805ms\r\nStep 89, loss: 0.1932521015405655, step time: 55.89604377746582ms\r\nRunning on 8 devices.\r\nStarting training from step 0...\r\nStep 0, loss: 0.3371405005455017, step time: 52516.294956207275ms\r\nStep 1, loss: 0.255323588848114, step time: 29.498577117919922ms\r\nStep 2, loss: 0.12879155576229095, step time: 31.583309173583984ms\r\nStep 3, loss: 0.09622794389724731, step time: 45.11404037475586ms\r\nStep 4, loss: 0.15179984271526337, step time: 99.70951080322266ms\r\nStep 5, loss: 0.14292140305042267, step time: 64.8488998413086ms\r\nStep 6, loss: 0.18607014417648315, step time: 58.94970893859863ms\r\nStep 7, loss: 0.21479065716266632, step time: 35.742998123168945ms\r\nStep 8, loss: 0.18476882576942444, step time: 127.95448303222656ms\r\nStep 9, loss: 0.16492873430252075, step time: 53.32756042480469ms\r\nStep 10, loss: 0.2142234593629837, step time: 33.32924842834473ms\r\nStep 11, loss: 0.17373782396316528, step time: 52.05059051513672ms\r\nStep 12, loss: 0.17216065526008606, step time: 108.40392112731934ms\r\nStep 13, loss: 0.1352560818195343, step time: 31.023263931274414ms\r\nStep 90, loss: 0.19430938363075256, step time: 46.047210693359375ms\r\nStep 91, loss: 0.19726090133190155, step time: 60.45246124267578ms\r\nStep 92, loss: 0.2031475305557251, step time: 51.12648010253906ms\r\nStep 93, loss: 0.1898179054260254, step time: 24.185895919799805ms\r\nStep 94, loss: 0.21448193490505219, step time: 38.77067565917969ms\r\nStep 95, loss: 0.22105661034584045, step time: 27.019739151000977ms\r\nStep 96, loss: 0.2251318246126175, step time: 25.499820709228516ms\r\nStep 97, loss: 0.20732171833515167, step time: 26.53670310974121ms\r\nStep 98, loss: 0.19954998791217804, step time: 26.14140510559082ms\r\nStep 99, loss: 0.2192666381597519, step time: 27.37903594970703ms\r\nStep 100, loss: 0.21951265633106232, step time: 31.137943267822266ms\r\nStep 101, loss: 0.19409632682800293, step time: 38.524627685546875ms\r\nStep 102, loss: 0.21452262997627258, step time: 219.29049491882324ms\r\nStep 103, loss: 0.22496581077575684, step time: 57.12771415710449ms\r\nStep 104, loss: 0.1928805261850357, step time: 28.2437801361084ms\r\nStep 14, loss: 0.14637072384357452, step time: 33.185482025146484ms\r\nStep 15, loss: 0.15783251821994781, step time: 87.22734451293945ms\r\nStep 16, loss: 0.19798177480697632, step time: 65.84405899047852ms\r\nStep 17, loss: 0.15507982671260834, step time: 38.477182388305664ms\r\nStep 18, loss: 0.19018206000328064, step time: 37.54854202270508ms\r\nStep 19, loss: 0.13229650259017944, step time: 58.676719665527344ms\r\nStep 20, loss: 0.12048780173063278, step time: 41.80002212524414ms\r\nStep 21, loss: 0.11940276622772217, step time: 56.860923767089844ms\r\nStep 22, loss: 0.10998138040304184, step time: 51.37276649475098ms\r\nStep 23, loss: 0.12868253886699677, step time: 58.492422103881836ms\r\nStep 24, loss: 0.14274726808071136, step time: 44.79551315307617ms\r\nStep 25, loss: 0.1409907042980194, step time: 32.26947784423828ms\r\nStep 26, loss: 0.1367851197719574, step time: 45.91035842895508ms\r\nStep 27, loss: 0.09431852400302887, step time: 55.589914321899414ms\r\nStep 28, loss: 0.13750433921813965, step time: 53.08675765991211ms\r\nStep 105, loss: 0.20959798991680145, step time: 30.185222625732422ms\r\nStep 106, loss: 0.20392221212387085, step time: 51.29194259643555ms\r\nStep 107, loss: 0.22778305411338806, step time: 103.09338569641113ms\r\nStep 108, loss: 0.21731412410736084, step time: 34.5921516418457ms\r\nStep 109, loss: 0.21080194413661957, step time: 26.36885643005371ms\r\nStep 110, loss: 0.18422192335128784, step time: 41.11146926879883ms\r\nStep 111, loss: 0.1793976128101349, step time: 96.13347053527832ms\r\nStep 112, loss: 0.20595386624336243, step time: 43.77627372741699ms\r\nStep 113, loss: 0.2135477364063263, step time: 52.904367446899414ms\r\nStep 114, loss: 0.20374689996242523, step time: 41.5043830871582ms\r\nStep 115, loss: 0.1921147257089615, step time: 44.57426071166992ms\r\nStep 116, loss: 0.23678253591060638, step time: 42.534589767456055ms\r\nStep 117, loss: 0.21021409332752228, step time: 27.748823165893555ms\r\nStep 118, loss: 0.20275235176086426, step time: 31.653404235839844ms\r\nStep 119, loss: 0.20498108863830566, step time: 46.738386154174805ms\r\nStep 29, loss: 0.12246876209974289, step time: 47.69754409790039ms\r\nStep 30, loss: 0.11990787833929062, step time: 44.10958290100098ms\r\nStep 31, loss: 0.08659865707159042, step time: 45.11404037475586ms\r\nStep 32, loss: 0.13262033462524414, step time: 54.20732498168945ms\r\nStep 33, loss: 0.13781970739364624, step time: 50.16040802001953ms\r\nStep 34, loss: 0.1245904490351677, step time: 28.53107452392578ms\r\nStep 35, loss: 0.11198143661022186, step time: 70.61004638671875ms\r\nStep 36, loss: 0.17048567533493042, step time: 90.74187278747559ms\r\nStep 37, loss: 0.14530757069587708, step time: 28.873443603515625ms\r\nStep 38, loss: 0.13687710464000702, step time: 29.035568237304688ms\r\nStep 39, loss: 0.12694677710533142, step time: 28.57804298400879ms\r\nStep 40, loss: 0.16414347290992737, step time: 44.81649398803711ms\r\nStep 41, loss: 0.15038971602916718, step time: 30.112266540527344ms\r\nStep 42, loss: 0.17023690044879913, step time: 26.7486572265625ms\r\nStep 43, loss: 0.1883412003517151, step time: 42.345523834228516ms\r\nStep 120, loss: 0.22381655871868134, step time: 52.500247955322266ms\r\nStep 44, loss: 0.18213486671447754, step time: 25.56586265563965ms\r\nStep 45, loss: 0.17754821479320526, step time: 23.17214012145996ms\r\nStep 46, loss: 0.17418275773525238, step time: 21.36683464050293ms\r\nStep 47, loss: 0.17311707139015198, step time: 23.41938018798828ms\r\nStep 48, loss: 0.19261178374290466, step time: 29.96659278869629ms\r\nStep 49, loss: 0.18420304358005524, step time: 28.51390838623047ms\r\nStep 50, loss: 0.18866658210754395, step time: 40.764570236206055ms\r\nStep 51, loss: 0.15617214143276215, step time: 129.53901290893555ms\r\nStep 52, loss: 0.16529572010040283, step time: 47.348737716674805ms\r\nStep 53, loss: 0.20970384776592255, step time: 26.288509368896484ms\r\nStep 54, loss: 0.15166610479354858, step time: 56.012630462646484ms\r\nStep 55, loss: 0.1617882400751114, step time: 52.8714656829834ms\r\nStep 56, loss: 0.17322023212909698, step time: 79.35929298400879ms\r\nStep 57, loss: 0.16511975228786469, step time: 50.61173439025879ms\r\nStep 58, loss: 0.2124534249305725, step time: 51.169395446777344ms\r\nRunning on 8 devices.\r\nStarting training from step 0...\r\nStep 0, loss: 0.3371405005455017, step time: 53028.28502655029ms\r\nStep 1, loss: 0.255323588848114, step time: 27.246952056884766ms\r\nStep 2, loss: 0.12879155576229095, step time: 28.832435607910156ms\r\nStep 3, loss: 0.09622794389724731, step time: 61.43498420715332ms\r\nStep 4, loss: 0.15179984271526337, step time: 45.19295692443848ms\r\nStep 5, loss: 0.14292140305042267, step time: 50.03094673156738ms\r\nStep 6, loss: 0.18607014417648315, step time: 47.057151794433594ms\r\nStep 7, loss: 0.21479065716266632, step time: 43.98465156555176ms\r\nStep 8, loss: 0.18476882576942444, step time: 29.95443344116211ms\r\nStep 9, loss: 0.16492873430252075, step time: 62.13831901550293ms\r\nStep 10, loss: 0.2142234593629837, step time: 101.44853591918945ms\r\nStep 11, loss: 0.17373782396316528, step time: 58.85481834411621ms\r\nStep 12, loss: 0.17216065526008606, step time: 43.360233306884766ms\r\nStep 13, loss: 0.1352560818195343, step time: 27.4808406829834ms\r\nStep 59, loss: 0.18958066403865814, step time: 48.624515533447266ms\r\nStep 14, loss: 0.14637072384357452, step time: 55.686235427856445ms\r\nStep 15, loss: 0.15783251821994781, step time: 51.31268501281738ms\r\nStep 16, loss: 0.19798177480697632, step time: 52.910804748535156ms\r\nStep 17, loss: 0.15507982671260834, step time: 54.8861026763916ms\r\nStep 18, loss: 0.19018206000328064, step time: 51.04231834411621ms\r\nStep 19, loss: 0.13229650259017944, step time: 26.72266960144043ms\r\nStep 20, loss: 0.12048780173063278, step time: 26.228904724121094ms\r\nStep 21, loss: 0.11940276622772217, step time: 49.18265342712402ms\r\nStep 22, loss: 0.10998138040304184, step time: 26.80063247680664ms\r\nStep 23, loss: 0.12868253886699677, step time: 37.66131401062012ms\r\nStep 24, loss: 0.14274726808071136, step time: 34.58046913146973ms\r\nStep 25, loss: 0.1409907042980194, step time: 55.090904235839844ms\r\nStep 26, loss: 0.1367851197719574, step time: 33.236026763916016ms\r\nStep 27, loss: 0.09431852400302887, step time: 29.396533966064453ms\r\nStep 28, loss: 0.13750433921813965, step time: 61.36512756347656ms\r\nStep 120, loss: 0.22381655871868134, step time: 138.24748992919922ms\r\nStep 29, loss: 0.12246876209974289, step time: 87.53752708435059ms\r\nStep 30, loss: 0.11990787833929062, step time: 54.12030220031738ms\r\nStep 31, loss: 0.08659865707159042, step time: 32.156944274902344ms\r\nStep 32, loss: 0.13262033462524414, step time: 34.70110893249512ms\r\nStep 33, loss: 0.13781970739364624, step time: 45.1359748840332ms\r\nStep 34, loss: 0.1245904490351677, step time: 27.759075164794922ms\r\nStep 35, loss: 0.11198143661022186, step time: 37.909746170043945ms\r\nStep 36, loss: 0.17048567533493042, step time: 38.22970390319824ms\r\nStep 37, loss: 0.14530757069587708, step time: 28.814315795898438ms\r\nStep 38, loss: 0.13687710464000702, step time: 48.04420471191406ms\r\nStep 39, loss: 0.12694677710533142, step time: 31.571388244628906ms\r\nStep 40, loss: 0.16414347290992737, step time: 29.062271118164062ms\r\nStep 41, loss: 0.15038971602916718, step time: 31.668424606323242ms\r\nStep 42, loss: 0.17023690044879913, step time: 29.73031997680664ms\r\nStep 43, loss: 0.1883412003517151, step time: 27.546405792236328ms\r\nStep 60, loss: 0.1716698259115219, step time: 36.88502311706543ms\r\nStep 61, loss: 0.1855076253414154, step time: 29.333829879760742ms\r\nStep 62, loss: 0.18269070982933044, step time: 29.8459529876709ms\r\nStep 63, loss: 0.20123043656349182, step time: 35.55798530578613ms\r\nStep 64, loss: 0.20838962495326996, step time: 49.7889518737793ms\r\nStep 65, loss: 0.22178855538368225, step time: 54.06761169433594ms\r\nStep 66, loss: 0.22029900550842285, step time: 30.269145965576172ms\r\nStep 67, loss: 0.197868213057518, step time: 32.23156929016113ms\r\nStep 68, loss: 0.19733645021915436, step time: 51.85723304748535ms\r\nStep 69, loss: 0.1776614487171173, step time: 54.344892501831055ms\r\nStep 70, loss: 0.2160605639219284, step time: 31.499624252319336ms\r\nStep 71, loss: 0.1945113092660904, step time: 30.701637268066406ms\r\nStep 72, loss: 0.18405216932296753, step time: 41.37825965881348ms\r\nStep 73, loss: 0.20694969594478607, step time: 31.8450927734375ms\r\nStep 74, loss: 0.21449634432792664, step time: 29.878854751586914ms\r\nStep 44, loss: 0.18213486671447754, step time: 26.820659637451172ms\r\nStep 45, loss: 0.17754821479320526, step time: 35.34126281738281ms\r\nStep 46, loss: 0.17418275773525238, step time: 120.29433250427246ms\r\nStep 47, loss: 0.17311707139015198, step time: 97.16343879699707ms\r\nStep 48, loss: 0.19261178374290466, step time: 52.21271514892578ms\r\nStep 49, loss: 0.18420304358005524, step time: 26.07560157775879ms\r\nStep 50, loss: 0.18866658210754395, step time: 51.11098289489746ms\r\nStep 51, loss: 0.15617214143276215, step time: 49.45206642150879ms\r\nStep 52, loss: 0.16529572010040283, step time: 24.677515029907227ms\r\nStep 53, loss: 0.20970384776592255, step time: 35.298824310302734ms\r\nStep 54, loss: 0.15166610479354858, step time: 36.405086517333984ms\r\nStep 55, loss: 0.1617882400751114, step time: 86.84492111206055ms\r\nStep 56, loss: 0.17322023212909698, step time: 49.47233200073242ms\r\nStep 57, loss: 0.16511975228786469, step time: 28.93662452697754ms\r\nStep 58, loss: 0.2124534249305725, step time: 59.62014198303223ms\r\nStep 75, loss: 0.20241209864616394, step time: 41.797637939453125ms\r\nStep 76, loss: 0.21559882164001465, step time: 45.41206359863281ms\r\nStep 77, loss: 0.2075277715921402, step time: 27.122974395751953ms\r\nStep 78, loss: 0.2306802123785019, step time: 30.321598052978516ms\r\nStep 79, loss: 0.1760329306125641, step time: 29.668569564819336ms\r\nStep 80, loss: 0.22264865040779114, step time: 43.4720516204834ms\r\nStep 81, loss: 0.22304664552211761, step time: 28.931856155395508ms\r\nStep 82, loss: 0.21281281113624573, step time: 47.812461853027344ms\r\nStep 83, loss: 0.18681474030017853, step time: 32.837629318237305ms\r\nStep 84, loss: 0.20908211171627045, step time: 30.646562576293945ms\r\nStep 85, loss: 0.20652326941490173, step time: 61.32364273071289ms\r\nStep 86, loss: 0.2098509967327118, step time: 47.19400405883789ms\r\nStep 87, loss: 0.1820390224456787, step time: 59.76414680480957ms\r\nStep 88, loss: 0.20359483361244202, step time: 52.96921730041504ms\r\nStep 89, loss: 0.1932521015405655, step time: 55.40633201599121ms\r\nStep 59, loss: 0.18958066403865814, step time: 61.13100051879883ms\r\nStep 90, loss: 0.19430938363075256, step time: 51.39517784118652ms\r\nStep 91, loss: 0.19726090133190155, step time: 75.62589645385742ms\r\nStep 92, loss: 0.2031475305557251, step time: 43.94125938415527ms\r\nStep 93, loss: 0.1898179054260254, step time: 27.161121368408203ms\r\nStep 94, loss: 0.21448193490505219, step time: 44.36159133911133ms\r\nStep 95, loss: 0.22105661034584045, step time: 39.285898208618164ms\r\nStep 96, loss: 0.2251318246126175, step time: 27.354717254638672ms\r\nStep 97, loss: 0.20732171833515167, step time: 24.631977081298828ms\r\nStep 98, loss: 0.19954998791217804, step time: 24.04332160949707ms\r\nStep 99, loss: 0.2192666381597519, step time: 25.08854866027832ms\r\nStep 100, loss: 0.21951265633106232, step time: 30.033588409423828ms\r\nStep 101, loss: 0.19409632682800293, step time: 27.58193016052246ms\r\nStep 102, loss: 0.21452262997627258, step time: 32.93299674987793ms\r\nStep 103, loss: 0.22496581077575684, step time: 31.063079833984375ms\r\nStep 104, loss: 0.1928805261850357, step time: 25.774240493774414ms\r\nStep 60, loss: 0.1716698259115219, step time: 73.67444038391113ms\r\nStep 61, loss: 0.1855076253414154, step time: 70.19901275634766ms\r\nStep 62, loss: 0.18269070982933044, step time: 75.66976547241211ms\r\nStep 63, loss: 0.20123043656349182, step time: 62.78848648071289ms\r\nStep 64, loss: 0.20838962495326996, step time: 88.17028999328613ms\r\nStep 65, loss: 0.22178855538368225, step time: 54.38709259033203ms\r\nStep 66, loss: 0.22029900550842285, step time: 39.02935981750488ms\r\nStep 67, loss: 0.197868213057518, step time: 41.93544387817383ms\r\nStep 68, loss: 0.19733645021915436, step time: 36.78631782531738ms\r\nStep 69, loss: 0.1776614487171173, step time: 34.94715690612793ms\r\nStep 70, loss: 0.2160605639219284, step time: 70.66512107849121ms\r\nStep 71, loss: 0.1945113092660904, step time: 26.46183967590332ms\r\nStep 72, loss: 0.18405216932296753, step time: 45.94826698303223ms\r\nStep 73, loss: 0.20694969594478607, step time: 44.318437576293945ms\r\nStep 74, loss: 0.21449634432792664, step time: 44.48127746582031ms\r\nStep 105, loss: 0.20959798991680145, step time: 33.83016586303711ms\r\nStep 106, loss: 0.20392221212387085, step time: 130.88631629943848ms\r\nStep 107, loss: 0.22778305411338806, step time: 193.06254386901855ms\r\nStep 108, loss: 0.21731412410736084, step time: 140.4585838317871ms\r\nStep 109, loss: 0.21080194413661957, step time: 45.36747932434082ms\r\nStep 110, loss: 0.18422192335128784, step time: 127.17795372009277ms\r\nStep 111, loss: 0.1793976128101349, step time: 114.62998390197754ms\r\nStep 112, loss: 0.20595386624336243, step time: 134.05323028564453ms\r\nStep 113, loss: 0.2135477364063263, step time: 29.724836349487305ms\r\nStep 114, loss: 0.20374689996242523, step time: 27.24623680114746ms\r\nStep 115, loss: 0.1921147257089615, step time: 27.760028839111328ms\r\nStep 116, loss: 0.23678253591060638, step time: 54.18562889099121ms\r\nStep 117, loss: 0.21021409332752228, step time: 30.901193618774414ms\r\nStep 118, loss: 0.20275235176086426, step time: 43.82157325744629ms\r\nStep 75, loss: 0.20241209864616394, step time: 41.80717468261719ms\r\nStep 76, loss: 0.21559882164001465, step time: 40.53211212158203ms\r\nStep 77, loss: 0.2075277715921402, step time: 56.21767044067383ms\r\nStep 78, loss: 0.2306802123785019, step time: 45.02153396606445ms\r\nStep 79, loss: 0.1760329306125641, step time: 52.60109901428223ms\r\nStep 80, loss: 0.22264865040779114, step time: 37.456512451171875ms\r\nStep 81, loss: 0.22304664552211761, step time: 27.57883071899414ms\r\nStep 82, loss: 0.21281281113624573, step time: 26.538848876953125ms\r\nStep 83, loss: 0.18681474030017853, step time: 44.46768760681152ms\r\nStep 84, loss: 0.20908211171627045, step time: 32.85861015319824ms\r\nStep 85, loss: 0.20652326941490173, step time: 32.738685607910156ms\r\nStep 86, loss: 0.2098509967327118, step time: 33.5383415222168ms\r\nStep 87, loss: 0.1820390224456787, step time: 54.18848991394043ms\r\nStep 88, loss: 0.20359483361244202, step time: 42.844295501708984ms\r\nStep 89, loss: 0.1932521015405655, step time: 27.765512466430664ms\r\nStep 119, loss: 0.20498108863830566, step time: 60.201406478881836ms\r\nStep 90, loss: 0.19430938363075256, step time: 26.015281677246094ms\r\nStep 91, loss: 0.19726090133190155, step time: 24.197101593017578ms\r\nStep 92, loss: 0.2031475305557251, step time: 23.342132568359375ms\r\nStep 93, loss: 0.1898179054260254, step time: 21.82793617248535ms\r\nStep 94, loss: 0.21448193490505219, step time: 24.35898780822754ms\r\nStep 95, loss: 0.22105661034584045, step time: 42.65737533569336ms\r\nStep 96, loss: 0.2251318246126175, step time: 49.17645454406738ms\r\nStep 97, loss: 0.20732171833515167, step time: 55.63211441040039ms\r\nStep 98, loss: 0.19954998791217804, step time: 41.87583923339844ms\r\nStep 99, loss: 0.2192666381597519, step time: 88.16409111022949ms\r\nStep 100, loss: 0.21951265633106232, step time: 29.61111068725586ms\r\nStep 101, loss: 0.19409632682800293, step time: 37.24956512451172ms\r\nStep 102, loss: 0.21452262997627258, step time: 42.57607460021973ms\r\nStep 103, loss: 0.22496581077575684, step time: 36.518096923828125ms\r\nStep 104, loss: 0.1928805261850357, step time: 34.069061279296875ms\r\nStep 120, loss: 0.22381655871868134, step time: 46.381473541259766ms\r\nStep 105, loss: 0.20959798991680145, step time: 35.076141357421875ms\r\nStep 106, loss: 0.20392221212387085, step time: 34.76095199584961ms\r\nStep 107, loss: 0.22778305411338806, step time: 51.71370506286621ms\r\nStep 108, loss: 0.21731412410736084, step time: 40.66348075866699ms\r\nStep 109, loss: 0.21080194413661957, step time: 32.90081024169922ms\r\nStep 110, loss: 0.18422192335128784, step time: 30.69925308227539ms\r\nStep 111, loss: 0.1793976128101349, step time: 56.10513687133789ms\r\nStep 112, loss: 0.20595386624336243, step time: 76.17712020874023ms\r\nStep 113, loss: 0.2135477364063263, step time: 45.20893096923828ms\r\nStep 114, loss: 0.20374689996242523, step time: 54.15463447570801ms\r\nStep 115, loss: 0.1921147257089615, step time: 43.985843658447266ms\r\nStep 116, loss: 0.23678253591060638, step time: 27.9083251953125ms\r\nStep 117, loss: 0.21021409332752228, step time: 44.71111297607422ms\r\nStep 118, loss: 0.20275235176086426, step time: 51.230430603027344ms\r\nStep 119, loss: 0.20498108863830566, step time: 61.59043312072754ms\r\nStep 120, loss: 0.22381655871868134, step time: 41.65816307067871ms\r\nRunning on 8 devices.\r\nStarting training from step 0...\r\nStep 0, loss: 0.3371405005455017, step time: 51642.547369003296ms\r\nStep 1, loss: 0.255323588848114, step time: 25.841712951660156ms\r\nStep 2, loss: 0.12879155576229095, step time: 39.00933265686035ms\r\nStep 3, loss: 0.09622794389724731, step time: 68.83883476257324ms\r\nStep 4, loss: 0.15179984271526337, step time: 126.69491767883301ms\r\nStep 5, loss: 0.14292140305042267, step time: 126.18756294250488ms\r\nStep 6, loss: 0.18607014417648315, step time: 87.85438537597656ms\r\nStep 7, loss: 0.21479065716266632, step time: 60.31203269958496ms\r\nStep 8, loss: 0.18476882576942444, step time: 77.09097862243652ms\r\nStep 9, loss: 0.16492873430252075, step time: 59.70335006713867ms\r\nStep 10, loss: 0.2142234593629837, step time: 81.33578300476074ms\r\nStep 11, loss: 0.17373782396316528, step time: 137.26162910461426ms\r\nStep 12, loss: 0.17216065526008606, step time: 65.82283973693848ms\r\nStep 13, loss: 0.1352560818195343, step time: 70.1441764831543ms\r\nStep 14, loss: 0.14637072384357452, step time: 225.71396827697754ms\r\nStep 15, loss: 0.15783251821994781, step time: 168.93506050109863ms\r\nStep 16, loss: 0.19798177480697632, step time: 122.1609115600586ms\r\nStep 17, loss: 0.15507982671260834, step time: 146.85893058776855ms\r\nStep 18, loss: 0.19018206000328064, step time: 62.482595443725586ms\r\nStep 19, loss: 0.13229650259017944, step time: 54.656982421875ms\r\nStep 20, loss: 0.12048780173063278, step time: 102.00023651123047ms\r\nStep 21, loss: 0.11940276622772217, step time: 107.37204551696777ms\r\nStep 22, loss: 0.10998138040304184, step time: 61.62619590759277ms\r\nStep 23, loss: 0.12868253886699677, step time: 102.83279418945312ms\r\nStep 24, loss: 0.14274726808071136, step time: 90.97146987915039ms\r\nStep 25, loss: 0.1409907042980194, step time: 100.00014305114746ms\r\nStep 26, loss: 0.1367851197719574, step time: 108.58440399169922ms\r\nStep 27, loss: 0.09431852400302887, step time: 102.95295715332031ms\r\nStep 28, loss: 0.13750433921813965, step time: 139.05739784240723ms\r\nStep 29, loss: 0.12246876209974289, step time: 62.760353088378906ms\r\nStep 30, loss: 0.11990787833929062, step time: 103.25217247009277ms\r\nStep 31, loss: 0.08659865707159042, step time: 82.19242095947266ms\r\nStep 32, loss: 0.13262033462524414, step time: 82.87549018859863ms\r\nStep 33, loss: 0.13781970739364624, step time: 53.40313911437988ms\r\nStep 34, loss: 0.1245904490351677, step time: 104.13384437561035ms\r\nStep 35, loss: 0.11198143661022186, step time: 169.01373863220215ms\r\nStep 36, loss: 0.17048567533493042, step time: 118.84069442749023ms\r\nStep 37, loss: 0.14530757069587708, step time: 125.23841857910156ms\r\nStep 38, loss: 0.13687710464000702, step time: 108.2761287689209ms\r\nStep 39, loss: 0.12694677710533142, step time: 82.98063278198242ms\r\nStep 40, loss: 0.16414347290992737, step time: 98.89674186706543ms\r\nStep 41, loss: 0.15038971602916718, step time: 79.86688613891602ms\r\nStep 42, loss: 0.17023690044879913, step time: 31.224966049194336ms\r\nStep 43, loss: 0.1883412003517151, step time: 28.51581573486328ms\r\nStep 44, loss: 0.18213486671447754, step time: 25.430679321289062ms\r\nStep 45, loss: 0.17754821479320526, step time: 27.705907821655273ms\r\nStep 46, loss: 0.17418275773525238, step time: 31.342267990112305ms\r\nStep 47, loss: 0.17311707139015198, step time: 188.3220672607422ms\r\nStep 48, loss: 0.19261178374290466, step time: 191.71905517578125ms\r\nStep 49, loss: 0.18420304358005524, step time: 56.40053749084473ms\r\nStep 50, loss: 0.18866658210754395, step time: 43.764352798461914ms\r\nStep 51, loss: 0.15617214143276215, step time: 27.530908584594727ms\r\nStep 52, loss: 0.16529572010040283, step time: 44.68083381652832ms\r\nStep 53, loss: 0.20970384776592255, step time: 38.340091705322266ms\r\nStep 54, loss: 0.15166610479354858, step time: 52.76918411254883ms\r\nStep 55, loss: 0.1617882400751114, step time: 49.05128479003906ms\r\nStep 56, loss: 0.17322023212909698, step time: 65.67001342773438ms\r\nStep 57, loss: 0.16511975228786469, step time: 47.79410362243652ms\r\nStep 58, loss: 0.2124534249305725, step time: 68.22419166564941ms\r\nStep 59, loss: 0.18958066403865814, step time: 54.162025451660156ms\r\nStep 60, loss: 0.1716698259115219, step time: 59.29756164550781ms\r\nStep 61, loss: 0.1855076253414154, step time: 43.66493225097656ms\r\nStep 62, loss: 0.18269070982933044, step time: 49.80754852294922ms\r\nStep 63, loss: 0.20123043656349182, step time: 94.76399421691895ms\r\nStep 64, loss: 0.20838962495326996, step time: 62.59942054748535ms\r\nStep 65, loss: 0.22178855538368225, step time: 70.01996040344238ms\r\nStep 66, loss: 0.22029900550842285, step time: 83.98246765136719ms\r\nStep 67, loss: 0.197868213057518, step time: 142.14158058166504ms\r\nStep 68, loss: 0.19733645021915436, step time: 127.54511833190918ms\r\nStep 69, loss: 0.1776614487171173, step time: 132.948637008667ms\r\nStep 70, loss: 0.2160605639219284, step time: 64.66078758239746ms\r\nStep 71, loss: 0.1945113092660904, step time: 72.37744331359863ms\r\nStep 72, loss: 0.18405216932296753, step time: 52.03843116760254ms\r\nStep 73, loss: 0.20694969594478607, step time: 68.08018684387207ms\r\nStep 74, loss: 0.21449634432792664, step time: 109.00354385375977ms\r\nStep 75, loss: 0.20241209864616394, step time: 86.79723739624023ms\r\nStep 76, loss: 0.21559882164001465, step time: 119.61078643798828ms\r\nStep 77, loss: 0.2075277715921402, step time: 151.2594223022461ms\r\nStep 78, loss: 0.2306802123785019, step time: 260.0862979888916ms\r\nStep 79, loss: 0.1760329306125641, step time: 204.90670204162598ms\r\nStep 80, loss: 0.22264865040779114, step time: 80.40165901184082ms\r\nStep 81, loss: 0.22304664552211761, step time: 216.0933017730713ms\r\nStep 82, loss: 0.21281281113624573, step time: 61.33842468261719ms\r\nStep 83, loss: 0.18681474030017853, step time: 127.87270545959473ms\r\nStep 84, loss: 0.20908211171627045, step time: 191.57075881958008ms\r\nStep 85, loss: 0.20652326941490173, step time: 104.31194305419922ms\r\nStep 86, loss: 0.2098509967327118, step time: 76.58529281616211ms\r\nStep 87, loss: 0.1820390224456787, step time: 95.35717964172363ms\r\nStep 88, loss: 0.20359483361244202, step time: 62.973737716674805ms\r\nStep 89, loss: 0.1932521015405655, step time: 44.96455192565918ms\r\nStep 90, loss: 0.19430938363075256, step time: 92.00096130371094ms\r\nStep 91, loss: 0.19726090133190155, step time: 45.13072967529297ms\r\nStep 92, loss: 0.2031475305557251, step time: 66.39862060546875ms\r\nStep 93, loss: 0.1898179054260254, step time: 22.59683609008789ms\r\nStep 94, loss: 0.21448193490505219, step time: 24.617910385131836ms\r\nStep 95, loss: 0.22105661034584045, step time: 26.473045349121094ms\r\nStep 96, loss: 0.2251318246126175, step time: 36.27133369445801ms\r\nStep 97, loss: 0.20732171833515167, step time: 49.12161827087402ms\r\nStep 98, loss: 0.19954998791217804, step time: 26.018381118774414ms\r\nStep 99, loss: 0.2192666381597519, step time: 28.905630111694336ms\r\nStep 100, loss: 0.21951265633106232, step time: 224.8847484588623ms\r\nStep 101, loss: 0.19409632682800293, step time: 101.79352760314941ms\r\nStep 102, loss: 0.21452262997627258, step time: 185.40477752685547ms\r\nStep 103, loss: 0.22496581077575684, step time: 33.27155113220215ms\r\nStep 104, loss: 0.1928805261850357, step time: 54.826974868774414ms\r\nStep 105, loss: 0.20959798991680145, step time: 34.749746322631836ms\r\nStep 106, loss: 0.20392221212387085, step time: 133.17251205444336ms\r\nStep 107, loss: 0.22778305411338806, step time: 71.79403305053711ms\r\nStep 108, loss: 0.21731412410736084, step time: 34.25908088684082ms\r\nStep 109, loss: 0.21080194413661957, step time: 42.93489456176758ms\r\nStep 110, loss: 0.18422192335128784, step time: 48.534393310546875ms\r\nStep 111, loss: 0.1793976128101349, step time: 88.92583847045898ms\r\nStep 112, loss: 0.20595386624336243, step time: 74.63788986206055ms\r\nStep 113, loss: 0.2135477364063263, step time: 50.9333610534668ms\r\nStep 114, loss: 0.20374689996242523, step time: 57.13701248168945ms\r\nStep 115, loss: 0.1921147257089615, step time: 66.87617301940918ms\r\nStep 116, loss: 0.23678253591060638, step time: 74.8140811920166ms\r\nStep 117, loss: 0.21021409332752228, step time: 86.90643310546875ms\r\nStep 118, loss: 0.20275235176086426, step time: 116.88828468322754ms\r\nStep 119, loss: 0.20498108863830566, step time: 67.68202781677246ms\r\nStep 120, loss: 0.22381655871868134, step time: 76.69329643249512ms\r\n",,terminal_output +3264,4039091,"TERMINAL",0,0,"Step 120, loss: 0.22381655871868134, step time: 129.3032169342041ms\r\nStep 121, loss: 0.20784707367420197, step time: 167.30046272277832ms\r\nStep 122, loss: 0.2241440862417221, step time: 87.83411979675293ms\r\nStep 123, loss: 0.1952410787343979, step time: 43.64609718322754ms\r\nStep 124, loss: 0.2559303045272827, step time: 138.716459274292ms\r\nStep 125, loss: 0.207289919257164, step time: 80.99937438964844ms\r\nStep 126, loss: 0.2382335513830185, step time: 85.06131172180176ms\r\nStep 127, loss: 0.20085032284259796, step time: 153.86080741882324ms\r\nStep 128, loss: 0.2202863097190857, step time: 504.27818298339844ms\r\nStep 129, loss: 0.23265503346920013, step time: 215.0723934173584ms\r\nStep 130, loss: 0.20963910222053528, step time: 128.2632350921631ms\r\nStep 131, loss: 0.2075287252664566, step time: 147.27282524108887ms\r\nStep 132, loss: 0.21575258672237396, step time: 213.45281600952148ms\r\nStep 133, loss: 0.19945044815540314, step time: 229.13098335266113ms\r\nStep 134, loss: 0.18294720351696014, step time: 180.57847023010254ms\r\nStep 135, loss: 0.22029756009578705, step time: 154.5584201812744ms\r\nStep 136, loss: 0.2230755239725113, step time: 156.4621925354004ms\r\nStep 137, loss: 0.22665250301361084, step time: 207.7465057373047ms\r\nStep 138, loss: 0.1861869841814041, step time: 193.1741237640381ms\r\nStep 139, loss: 0.2156236320734024, step time: 198.91715049743652ms\r\nStep 140, loss: 0.207559272646904, step time: 189.64862823486328ms\r\nStep 141, loss: 0.23966802656650543, step time: 178.77745628356934ms\r\nStep 142, loss: 0.21649032831192017, step time: 139.32251930236816ms\r\nStep 143, loss: 0.21076525747776031, step time: 196.40827178955078ms\r\nStep 144, loss: 0.1896040290594101, step time: 179.24928665161133ms\r\nStep 145, loss: 0.22665216028690338, step time: 256.61182403564453ms\r\nStep 146, loss: 0.19968174397945404, step time: 355.593204498291ms\r\nStep 147, loss: 0.22004956007003784, step time: 100.0666618347168ms\r\nStep 148, loss: 0.21675361692905426, step time: 25.713682174682617ms\r\nStep 149, loss: 0.20051825046539307, step time: 26.293039321899414ms\r\nStep 150, loss: 0.20946861803531647, step time: 29.608488082885742ms\r\nStep 151, loss: 0.18936635553836823, step time: 25.643587112426758ms\r\nStep 152, loss: 0.1982470452785492, step time: 27.6944637298584ms\r\nStep 153, loss: 0.18866270780563354, step time: 43.22957992553711ms\r\nStep 154, loss: 0.2204374074935913, step time: 272.92895317077637ms\r\nStep 155, loss: 0.23175925016403198, step time: 220.11089324951172ms\r\nStep 156, loss: 0.20699328184127808, step time: 257.2143077850342ms\r\nStep 157, loss: 0.2463058978319168, step time: 78.64522933959961ms\r\nStep 158, loss: 0.19663086533546448, step time: 122.34830856323242ms\r\nStep 159, loss: 0.24429723620414734, step time: 92.85449981689453ms\r\nStep 160, loss: 0.18582962453365326, step time: 67.11101531982422ms\r\nStep 161, loss: 0.2051135003566742, step time: 110.04996299743652ms\r\nStep 162, loss: 0.21318994462490082, step time: 163.6826992034912ms\r\nStep 163, loss: 0.2090427726507187, step time: 202.78692245483398ms\r\nStep 164, loss: 0.18675930798053741, step time: 213.11330795288086ms\r\nStep 165, loss: 0.22770842909812927, step time: 327.87060737609863ms\r\nStep 166, loss: 0.2338206171989441, step time: 332.09800720214844ms\r\nStep 167, loss: 0.19510532915592194, step time: 86.13371849060059ms\r\nStep 168, loss: 0.2266233265399933, step time: 64.67080116271973ms\r\nStep 169, loss: 0.26485535502433777, step time: 164.7050380706787ms\r\nStep 170, loss: 0.23013830184936523, step time: 258.85629653930664ms\r\nStep 171, loss: 0.23124182224273682, step time: 342.19908714294434ms\r\nStep 172, loss: 0.211821049451828, step time: 113.75927925109863ms\r\nStep 173, loss: 0.22709602117538452, step time: 197.6017951965332ms\r\nStep 174, loss: 0.22190026938915253, step time: 267.32397079467773ms\r\nStep 175, loss: 0.2170114368200302, step time: 147.81641960144043ms\r\nStep 176, loss: 0.2129228711128235, step time: 109.86328125ms\r\nStep 177, loss: 0.2269134819507599, step time: 214.95890617370605ms\r\nStep 178, loss: 0.20729117095470428, step time: 69.30255889892578ms\r\nStep 179, loss: 0.20655518770217896, step time: 154.82044219970703ms\r\nStep 180, loss: 0.23034240305423737, step time: 212.17107772827148ms\r\nStep 181, loss: 0.24809721112251282, step time: 146.72017097473145ms\r\nStep 182, loss: 0.215785413980484, step time: 176.96428298950195ms\r\nStep 183, loss: 0.22301307320594788, step time: 232.2392463684082ms\r\nStep 184, loss: 0.2594461441040039, step time: 135.0243091583252ms\r\nStep 185, loss: 0.22805005311965942, step time: 227.0803451538086ms\r\nStep 186, loss: 0.2074311226606369, step time: 223.34980964660645ms\r\nStep 187, loss: 0.22991454601287842, step time: 185.7900619506836ms\r\nStep 188, loss: 0.22197772562503815, step time: 98.64258766174316ms\r\nStep 189, loss: 0.22990094125270844, step time: 84.7616195678711ms\r\nStep 190, loss: 0.22791005671024323, step time: 108.65211486816406ms\r\nStep 191, loss: 0.2311677485704422, step time: 77.70919799804688ms\r\nStep 192, loss: 0.20709951221942902, step time: 38.44952583312988ms\r\nStep 193, loss: 0.24093765020370483, step time: 319.06914710998535ms\r\nStep 194, loss: 0.2499508112668991, step time: 288.9516353607178ms\r\nStep 195, loss: 0.20580357313156128, step time: 376.8007755279541ms\r\nStep 196, loss: 0.2215675711631775, step time: 215.26360511779785ms\r\nStep 197, loss: 0.22246551513671875, step time: 200.04534721374512ms\r\nStep 198, loss: 0.2345874160528183, step time: 177.89292335510254ms\r\nStep 199, loss: 0.23320308327674866, step time: 157.67145156860352ms\r\nStep 200, loss: 0.2333080768585205, step time: 183.23445320129395ms\r\nStep 201, loss: 0.22704865038394928, step time: 33.2484245300293ms\r\nStep 202, loss: 0.23823073506355286, step time: 37.11199760437012ms\r\nStep 203, loss: 0.20774541795253754, step time: 114.77351188659668ms\r\nStep 204, loss: 0.24040691554546356, step time: 203.22465896606445ms\r\nStep 205, loss: 0.24088256061077118, step time: 203.324556350708ms\r\nStep 206, loss: 0.22486215829849243, step time: 164.764404296875ms\r\nStep 207, loss: 0.21422669291496277, step time: 109.88688468933105ms\r\nStep 208, loss: 0.24419456720352173, step time: 51.91206932067871ms\r\nStep 209, loss: 0.21322758495807648, step time: 206.36725425720215ms\r\nStep 210, loss: 0.253510981798172, step time: 114.79735374450684ms\r\nStep 211, loss: 0.23119083046913147, step time: 120.41950225830078ms\r\nStep 212, loss: 0.232627272605896, step time: 143.01252365112305ms\r\nStep 213, loss: 0.2389298677444458, step time: 172.90759086608887ms\r\nStep 214, loss: 0.23226134479045868, step time: 120.40519714355469ms\r\nStep 215, loss: 0.22713610529899597, step time: 122.04313278198242ms\r\nStep 216, loss: 0.24775546789169312, step time: 247.56264686584473ms\r\nStep 217, loss: 0.23582950234413147, step time: 254.4562816619873ms\r\nStep 218, loss: 0.2579847574234009, step time: 182.5861930847168ms\r\nStep 219, loss: 0.2372114062309265, step time: 226.75299644470215ms\r\nStep 220, loss: 0.23433665931224823, step time: 298.5866069793701ms\r\nStep 221, loss: 0.2527056634426117, step time: 519.660234451294ms\r\nStep 222, loss: 0.22210896015167236, step time: 190.14787673950195ms\r\nStep 223, loss: 0.24414469301700592, step time: 290.1790142059326ms\r\nStep 224, loss: 0.2504357099533081, step time: 292.23132133483887ms\r\nStep 225, loss: 0.22071973979473114, step time: 246.69933319091797ms\r\nStep 226, loss: 0.24137301743030548, step time: 162.8255844116211ms\r\nStep 227, loss: 0.24164323508739471, step time: 206.7282199859619ms\r\nStep 228, loss: 0.2205706238746643, step time: 99.73573684692383ms\r\nStep 229, loss: 0.2630937695503235, step time: 201.32160186767578ms\r\nStep 230, loss: 0.25513118505477905, step time: 111.70125007629395ms\r\nStep 231, loss: 0.23567938804626465, step time: 174.00622367858887ms\r\nStep 232, loss: 0.2422901839017868, step time: 198.14658164978027ms\r\nStep 233, loss: 0.24106791615486145, step time: 190.96994400024414ms\r\nStep 234, loss: 0.21551364660263062, step time: 186.90729141235352ms\r\nStep 235, loss: 0.22949783504009247, step time: 132.7970027923584ms\r\nStep 236, loss: 0.23463501036167145, step time: 230.26585578918457ms\r\nStep 237, loss: 0.21839597821235657, step time: 208.24718475341797ms\r\nStep 238, loss: 0.2149261236190796, step time: 219.19775009155273ms\r\nStep 239, loss: 0.22921283543109894, step time: 160.5532169342041ms\r\nStep 120, loss: 0.22381655871868134, step time: 76.36213302612305ms\r\nStep 121, loss: 0.20784707367420197, step time: 122.8780746459961ms\r\nStep 122, loss: 0.2241440862417221, step time: 144.0255641937256ms\r\nStep 123, loss: 0.1952410787343979, step time: 58.14099311828613ms\r\nStep 124, loss: 0.2559303045272827, step time: 166.28432273864746ms\r\nStep 125, loss: 0.207289919257164, step time: 152.74500846862793ms\r\nStep 126, loss: 0.2382335513830185, step time: 113.1443977355957ms\r\nStep 127, loss: 0.20085032284259796, step time: 111.89818382263184ms\r\nStep 128, loss: 0.2202863097190857, step time: 153.2421112060547ms\r\nStep 129, loss: 0.23265503346920013, step time: 141.79468154907227ms\r\nStep 130, loss: 0.20963910222053528, step time: 81.19416236877441ms\r\nStep 131, loss: 0.2075287252664566, step time: 54.84819412231445ms\r\nStep 132, loss: 0.21575258672237396, step time: 118.96228790283203ms\r\nStep 133, loss: 0.19945044815540314, step time: 148.2081413269043ms\r\nStep 134, loss: 0.18294720351696014, step time: 336.9591236114502ms\r\nStep 135, loss: 0.22029756009578705, step time: 138.4294033050537ms\r\nStep 136, loss: 0.2230755239725113, step time: 218.49822998046875ms\r\nStep 137, loss: 0.22665250301361084, step time: 151.46398544311523ms\r\nStep 138, loss: 0.1861869841814041, step time: 184.13686752319336ms\r\nStep 139, loss: 0.2156236320734024, step time: 83.88710021972656ms\r\nStep 140, loss: 0.207559272646904, step time: 92.5297737121582ms\r\nStep 141, loss: 0.23966802656650543, step time: 134.6607208251953ms\r\nStep 142, loss: 0.21649032831192017, step time: 80.20877838134766ms\r\nStep 143, loss: 0.21076525747776031, step time: 51.94258689880371ms\r\nStep 144, loss: 0.1896040290594101, step time: 28.685808181762695ms\r\nStep 145, loss: 0.22665216028690338, step time: 24.199247360229492ms\r\nStep 146, loss: 0.19968174397945404, step time: 26.88288688659668ms\r\nStep 147, loss: 0.22004956007003784, step time: 27.04000473022461ms\r\nStep 148, loss: 0.21675361692905426, step time: 27.15754508972168ms\r\nStep 149, loss: 0.20051825046539307, step time: 49.578189849853516ms\r\nStep 150, loss: 0.20946861803531647, step time: 155.1671028137207ms\r\nStep 151, loss: 0.18936635553836823, step time: 194.92292404174805ms\r\nStep 152, loss: 0.1982470452785492, step time: 61.90848350524902ms\r\nStep 153, loss: 0.18866270780563354, step time: 120.45621871948242ms\r\nStep 154, loss: 0.2204374074935913, step time: 176.5906810760498ms\r\nStep 155, loss: 0.23175925016403198, step time: 138.09490203857422ms\r\nStep 156, loss: 0.20699328184127808, step time: 115.90194702148438ms\r\nStep 157, loss: 0.2463058978319168, step time: 262.897253036499ms\r\nStep 158, loss: 0.19663086533546448, step time: 293.872594833374ms\r\nStep 159, loss: 0.24429723620414734, step time: 420.4714298248291ms\r\nStep 160, loss: 0.18582962453365326, step time: 249.29189682006836ms\r\nStep 161, loss: 0.2051135003566742, step time: 245.84126472473145ms\r\nStep 162, loss: 0.21318994462490082, step time: 188.60149383544922ms\r\nStep 163, loss: 0.2090427726507187, step time: 225.48985481262207ms\r\nStep 164, loss: 0.18675930798053741, step time: 261.6844177246094ms\r\nStep 165, loss: 0.22770842909812927, step time: 102.35762596130371ms\r\nStep 166, loss: 0.2338206171989441, step time: 38.07497024536133ms\r\nStep 167, loss: 0.19510532915592194, step time: 108.3376407623291ms\r\nStep 168, loss: 0.2266233265399933, step time: 81.92920684814453ms\r\nStep 169, loss: 0.26485535502433777, step time: 73.03643226623535ms\r\nStep 170, loss: 0.23013830184936523, step time: 40.74549674987793ms\r\nStep 171, loss: 0.23124182224273682, step time: 78.40347290039062ms\r\nStep 172, loss: 0.211821049451828, step time: 55.925607681274414ms\r\nStep 173, loss: 0.22709602117538452, step time: 82.40270614624023ms\r\nStep 174, loss: 0.22190026938915253, step time: 34.934043884277344ms\r\nStep 175, loss: 0.2170114368200302, step time: 61.347246170043945ms\r\nStep 176, loss: 0.2129228711128235, step time: 70.96290588378906ms\r\nStep 177, loss: 0.2269134819507599, step time: 171.38099670410156ms\r\nStep 178, loss: 0.20729117095470428, step time: 131.12783432006836ms\r\nStep 179, loss: 0.20655518770217896, step time: 135.0111961364746ms\r\nStep 180, loss: 0.23034240305423737, step time: 54.74090576171875ms\r\nStep 181, loss: 0.24809721112251282, step time: 202.80885696411133ms\r\nStep 182, loss: 0.215785413980484, step time: 159.91497039794922ms\r\nStep 183, loss: 0.22301307320594788, step time: 126.20139122009277ms\r\nStep 184, loss: 0.2594461441040039, step time: 85.02483367919922ms\r\nStep 185, loss: 0.22805005311965942, step time: 114.06326293945312ms\r\nStep 186, loss: 0.2074311226606369, step time: 169.34776306152344ms\r\nStep 187, loss: 0.22991454601287842, step time: 214.33615684509277ms\r\nStep 188, loss: 0.22197772562503815, step time: 122.04861640930176ms\r\nStep 189, loss: 0.22990094125270844, step time: 117.77901649475098ms\r\nStep 190, loss: 0.22791005671024323, step time: 98.42085838317871ms\r\nStep 191, loss: 0.2311677485704422, step time: 57.59930610656738ms\r\nStep 192, loss: 0.20709951221942902, step time: 74.59211349487305ms\r\nStep 193, loss: 0.24093765020370483, step time: 53.646087646484375ms\r\nStep 194, loss: 0.2499508112668991, step time: 50.74715614318848ms\r\nStep 195, loss: 0.20580357313156128, step time: 123.27456474304199ms\r\nStep 196, loss: 0.2215675711631775, step time: 34.58976745605469ms\r\nStep 197, loss: 0.22246551513671875, step time: 72.41320610046387ms\r\nStep 198, loss: 0.2345874160528183, step time: 196.7008113861084ms\r\nStep 199, loss: 0.23320308327674866, step time: 62.30974197387695ms\r\nStep 200, loss: 0.2333080768585205, step time: 270.77412605285645ms\r\nStep 201, loss: 0.22704865038394928, step time: 173.14839363098145ms\r\nStep 202, loss: 0.23823073506355286, step time: 107.64718055725098ms\r\nStep 203, loss: 0.20774541795253754, step time: 112.46514320373535ms\r\nStep 204, loss: 0.24040691554546356, step time: 126.861572265625ms\r\nStep 205, loss: 0.24088256061077118, step time: 95.44944763183594ms\r\nStep 206, loss: 0.22486215829849243, step time: 102.93030738830566ms\r\nStep 207, loss: 0.21422669291496277, step time: 106.05716705322266ms\r\nStep 208, loss: 0.24419456720352173, step time: 148.66375923156738ms\r\nStep 209, loss: 0.21322758495807648, step time: 140.87724685668945ms\r\nStep 210, loss: 0.253510981798172, step time: 202.6529312133789ms\r\nStep 211, loss: 0.23119083046913147, step time: 200.03938674926758ms\r\nStep 212, loss: 0.232627272605896, step time: 222.29647636413574ms\r\nStep 213, loss: 0.2389298677444458, step time: 247.85780906677246ms\r\nStep 214, loss: 0.23226134479045868, step time: 142.09461212158203ms\r\nStep 215, loss: 0.22713610529899597, step time: 109.08842086791992ms\r\nStep 216, loss: 0.24775546789169312, step time: 74.1569995880127ms\r\nStep 217, loss: 0.23582950234413147, step time: 114.40038681030273ms\r\nStep 218, loss: 0.2579847574234009, step time: 77.83961296081543ms\r\nStep 219, loss: 0.2372114062309265, step time: 118.5750961303711ms\r\nStep 220, loss: 0.23433665931224823, step time: 168.4873104095459ms\r\nStep 221, loss: 0.2527056634426117, step time: 110.55946350097656ms\r\nStep 222, loss: 0.22210896015167236, step time: 64.73970413208008ms\r\nStep 223, loss: 0.24414469301700592, step time: 138.91339302062988ms\r\nStep 224, loss: 0.2504357099533081, step time: 137.11953163146973ms\r\nStep 225, loss: 0.22071973979473114, step time: 77.00181007385254ms\r\nStep 226, loss: 0.24137301743030548, step time: 172.49727249145508ms\r\nStep 227, loss: 0.24164323508739471, step time: 148.61822128295898ms\r\nStep 228, loss: 0.2205706238746643, step time: 222.73802757263184ms\r\nStep 229, loss: 0.2630937695503235, step time: 73.77171516418457ms\r\nStep 230, loss: 0.25513118505477905, step time: 130.76376914978027ms\r\nStep 231, loss: 0.23567938804626465, step time: 95.93534469604492ms\r\nStep 232, loss: 0.2422901839017868, step time: 168.54071617126465ms\r\nStep 233, loss: 0.24106791615486145, step time: 98.11687469482422ms\r\nStep 234, loss: 0.21551364660263062, step time: 122.53808975219727ms\r\nStep 235, loss: 0.22949783504009247, step time: 207.52573013305664ms\r\nStep 236, loss: 0.23463501036167145, step time: 159.75332260131836ms\r\nStep 237, loss: 0.21839597821235657, step time: 151.08919143676758ms\r\nStep 238, loss: 0.2149261236190796, step time: 133.3303451538086ms\r\nStep 239, loss: 0.22921283543109894, step time: 179.2588233947754ms\r\n",,terminal_output +3265,4039576,"TERMINAL",0,0,"Step 121, loss: 0.20784707367420197, step time: 38.45572471618652ms\r\nStep 122, loss: 0.2241440862417221, step time: 67.10386276245117ms\r\nStep 123, loss: 0.1952410787343979, step time: 28.730392456054688ms\r\nStep 124, loss: 0.2559303045272827, step time: 34.6837043762207ms\r\nStep 125, loss: 0.207289919257164, step time: 61.09428405761719ms\r\nStep 126, loss: 0.2382335513830185, step time: 46.6465950012207ms\r\nStep 127, loss: 0.20085032284259796, step time: 53.92765998840332ms\r\nStep 128, loss: 0.2202863097190857, step time: 96.5118408203125ms\r\nStep 129, loss: 0.23265503346920013, step time: 38.0246639251709ms\r\nStep 130, loss: 0.20963910222053528, step time: 53.16901206970215ms\r\nStep 131, loss: 0.2075287252664566, step time: 41.8698787689209ms\r\nStep 132, loss: 0.21575258672237396, step time: 85.49857139587402ms\r\nStep 133, loss: 0.19945044815540314, step time: 41.092872619628906ms\r\nStep 134, loss: 0.18294720351696014, step time: 45.232295989990234ms\r\nStep 135, loss: 0.22029756009578705, step time: 53.70664596557617ms\r\nStep 136, loss: 0.2230755239725113, step time: 57.51609802246094ms\r\nStep 137, loss: 0.22665250301361084, step time: 44.26169395446777ms\r\nStep 138, loss: 0.1861869841814041, step time: 34.063100814819336ms\r\nStep 139, loss: 0.2156236320734024, step time: 37.31083869934082ms\r\nStep 140, loss: 0.207559272646904, step time: 38.74659538269043ms\r\nStep 141, loss: 0.23966802656650543, step time: 33.71286392211914ms\r\nStep 142, loss: 0.21649032831192017, step time: 24.728059768676758ms\r\nStep 143, loss: 0.21076525747776031, step time: 24.370193481445312ms\r\nStep 144, loss: 0.1896040290594101, step time: 25.389671325683594ms\r\nStep 145, loss: 0.22665216028690338, step time: 23.75507354736328ms\r\nStep 146, loss: 0.19968174397945404, step time: 25.47311782836914ms\r\nStep 147, loss: 0.22004956007003784, step time: 25.302886962890625ms\r\nStep 148, loss: 0.21675361692905426, step time: 26.232004165649414ms\r\nStep 149, loss: 0.20051825046539307, step time: 34.56854820251465ms\r\nStep 150, loss: 0.20946861803531647, step time: 144.1788673400879ms\r\nStep 151, loss: 0.18936635553836823, step time: 202.1012306213379ms\r\nStep 152, loss: 0.1982470452785492, step time: 31.622886657714844ms\r\nStep 153, loss: 0.18866270780563354, step time: 105.64088821411133ms\r\nStep 154, loss: 0.2204374074935913, step time: 78.11570167541504ms\r\nStep 155, loss: 0.23175925016403198, step time: 69.09990310668945ms\r\nStep 156, loss: 0.20699328184127808, step time: 79.67114448547363ms\r\nStep 157, loss: 0.2463058978319168, step time: 25.610685348510742ms\r\nStep 158, loss: 0.19663086533546448, step time: 61.324357986450195ms\r\nStep 159, loss: 0.24429723620414734, step time: 64.03613090515137ms\r\nStep 160, loss: 0.18582962453365326, step time: 33.63966941833496ms\r\nStep 161, loss: 0.2051135003566742, step time: 58.77399444580078ms\r\nStep 162, loss: 0.21318994462490082, step time: 56.777238845825195ms\r\nStep 163, loss: 0.2090427726507187, step time: 60.457468032836914ms\r\nStep 164, loss: 0.18675930798053741, step time: 40.12918472290039ms\r\nStep 165, loss: 0.22770842909812927, step time: 36.24224662780762ms\r\nStep 166, loss: 0.2338206171989441, step time: 58.943748474121094ms\r\nStep 167, loss: 0.19510532915592194, step time: 30.150175094604492ms\r\nStep 168, loss: 0.2266233265399933, step time: 25.455951690673828ms\r\nStep 169, loss: 0.26485535502433777, step time: 46.45848274230957ms\r\nStep 170, loss: 0.23013830184936523, step time: 95.9014892578125ms\r\nStep 171, loss: 0.23124182224273682, step time: 70.07455825805664ms\r\nStep 172, loss: 0.211821049451828, step time: 30.875444412231445ms\r\nStep 173, loss: 0.22709602117538452, step time: 33.54287147521973ms\r\nStep 174, loss: 0.22190026938915253, step time: 87.75568008422852ms\r\nStep 175, loss: 0.2170114368200302, step time: 38.449764251708984ms\r\nStep 176, loss: 0.2129228711128235, step time: 63.363075256347656ms\r\nStep 177, loss: 0.2269134819507599, step time: 68.34888458251953ms\r\nStep 178, loss: 0.20729117095470428, step time: 51.88393592834473ms\r\nStep 179, loss: 0.20655518770217896, step time: 26.623964309692383ms\r\nStep 180, loss: 0.23034240305423737, step time: 43.609619140625ms\r\nStep 181, loss: 0.24809721112251282, step time: 28.864145278930664ms\r\nStep 182, loss: 0.215785413980484, step time: 28.194427490234375ms\r\nStep 183, loss: 0.22301307320594788, step time: 41.149139404296875ms\r\nStep 184, loss: 0.2594461441040039, step time: 46.97990417480469ms\r\nStep 185, loss: 0.22805005311965942, step time: 27.39405632019043ms\r\nStep 186, loss: 0.2074311226606369, step time: 45.5019474029541ms\r\nStep 187, loss: 0.22991454601287842, step time: 51.224470138549805ms\r\nStep 188, loss: 0.22197772562503815, step time: 44.46554183959961ms\r\nStep 189, loss: 0.22990094125270844, step time: 26.639223098754883ms\r\nStep 190, loss: 0.22791005671024323, step time: 52.21128463745117ms\r\nStep 191, loss: 0.2311677485704422, step time: 23.834943771362305ms\r\nStep 192, loss: 0.20709951221942902, step time: 24.04499053955078ms\r\nStep 193, loss: 0.24093765020370483, step time: 25.669097900390625ms\r\nStep 194, loss: 0.2499508112668991, step time: 24.979114532470703ms\r\nStep 195, loss: 0.20580357313156128, step time: 25.787353515625ms\r\nStep 196, loss: 0.2215675711631775, step time: 24.29032325744629ms\r\nStep 197, loss: 0.22246551513671875, step time: 37.43600845336914ms\r\nStep 198, loss: 0.2345874160528183, step time: 40.779829025268555ms\r\nStep 199, loss: 0.23320308327674866, step time: 137.59875297546387ms\r\nStep 200, loss: 0.2333080768585205, step time: 177.14977264404297ms\r\nStep 201, loss: 0.22704865038394928, step time: 131.0098171234131ms\r\nStep 202, loss: 0.23823073506355286, step time: 93.780517578125ms\r\nStep 203, loss: 0.20774541795253754, step time: 128.06391716003418ms\r\nStep 204, loss: 0.24040691554546356, step time: 115.36574363708496ms\r\nStep 205, loss: 0.24088256061077118, step time: 74.74780082702637ms\r\nStep 206, loss: 0.22486215829849243, step time: 83.11939239501953ms\r\nStep 207, loss: 0.21422669291496277, step time: 152.97985076904297ms\r\nStep 208, loss: 0.24419456720352173, step time: 139.58096504211426ms\r\nStep 209, loss: 0.21322758495807648, step time: 84.14125442504883ms\r\nStep 210, loss: 0.253510981798172, step time: 42.45591163635254ms\r\nStep 211, loss: 0.23119083046913147, step time: 44.90399360656738ms\r\nStep 212, loss: 0.232627272605896, step time: 28.046369552612305ms\r\nStep 213, loss: 0.2389298677444458, step time: 46.95248603820801ms\r\nStep 214, loss: 0.23226134479045868, step time: 42.91987419128418ms\r\nStep 215, loss: 0.22713610529899597, step time: 70.71661949157715ms\r\nStep 216, loss: 0.24775546789169312, step time: 48.06661605834961ms\r\nStep 217, loss: 0.23582950234413147, step time: 50.333261489868164ms\r\nStep 218, loss: 0.2579847574234009, step time: 66.94841384887695ms\r\nStep 219, loss: 0.2372114062309265, step time: 80.52778244018555ms\r\nStep 220, loss: 0.23433665931224823, step time: 26.000499725341797ms\r\nStep 221, loss: 0.2527056634426117, step time: 47.814130783081055ms\r\nStep 222, loss: 0.22210896015167236, step time: 69.29731369018555ms\r\nStep 223, loss: 0.24414469301700592, step time: 37.02402114868164ms\r\nStep 224, loss: 0.2504357099533081, step time: 25.349855422973633ms\r\nStep 225, loss: 0.22071973979473114, step time: 48.2790470123291ms\r\nStep 226, loss: 0.24137301743030548, step time: 43.398141860961914ms\r\nStep 227, loss: 0.24164323508739471, step time: 54.880380630493164ms\r\nStep 228, loss: 0.2205706238746643, step time: 44.36659812927246ms\r\nStep 229, loss: 0.2630937695503235, step time: 41.42045974731445ms\r\nStep 230, loss: 0.25513118505477905, step time: 63.14849853515625ms\r\nStep 231, loss: 0.23567938804626465, step time: 33.85663032531738ms\r\nStep 232, loss: 0.2422901839017868, step time: 49.20172691345215ms\r\nStep 233, loss: 0.24106791615486145, step time: 85.17861366271973ms\r\nStep 234, loss: 0.21551364660263062, step time: 48.98381233215332ms\r\nStep 235, loss: 0.22949783504009247, step time: 45.06874084472656ms\r\nStep 236, loss: 0.23463501036167145, step time: 91.22395515441895ms\r\nStep 237, loss: 0.21839597821235657, step time: 53.93385887145996ms\r\nStep 238, loss: 0.2149261236190796, step time: 27.091503143310547ms\r\nStep 239, loss: 0.22921283543109894, step time: 46.31471633911133ms\r\nStep 240, loss: 0.2159382402896881, step time: 26.684045791625977ms\r\nStep 121, loss: 0.20784707367420197, step time: 99.98893737792969ms\r\nStep 122, loss: 0.2241440862417221, step time: 130.00249862670898ms\r\nStep 123, loss: 0.1952410787343979, step time: 280.24911880493164ms\r\nStep 124, loss: 0.2559303045272827, step time: 98.78110885620117ms\r\nStep 125, loss: 0.207289919257164, step time: 135.1163387298584ms\r\nStep 126, loss: 0.2382335513830185, step time: 101.83262825012207ms\r\nStep 127, loss: 0.20085032284259796, step time: 131.29019737243652ms\r\nStep 128, loss: 0.2202863097190857, step time: 65.00506401062012ms\r\nStep 129, loss: 0.23265503346920013, step time: 79.8952579498291ms\r\nStep 130, loss: 0.20963910222053528, step time: 111.86957359313965ms\r\nStep 131, loss: 0.2075287252664566, step time: 166.80002212524414ms\r\nStep 132, loss: 0.21575258672237396, step time: 95.78442573547363ms\r\nStep 133, loss: 0.19945044815540314, step time: 168.75171661376953ms\r\nStep 134, loss: 0.18294720351696014, step time: 73.72283935546875ms\r\nStep 135, loss: 0.22029756009578705, step time: 93.77288818359375ms\r\nStep 136, loss: 0.2230755239725113, step time: 112.35952377319336ms\r\nStep 137, loss: 0.22665250301361084, step time: 88.32526206970215ms\r\nStep 138, loss: 0.1861869841814041, step time: 80.80339431762695ms\r\nStep 139, loss: 0.2156236320734024, step time: 71.28691673278809ms\r\nStep 140, loss: 0.207559272646904, step time: 123.93546104431152ms\r\nStep 141, loss: 0.23966802656650543, step time: 86.61222457885742ms\r\nStep 142, loss: 0.21649032831192017, step time: 99.16377067565918ms\r\nStep 143, loss: 0.21076525747776031, step time: 83.54377746582031ms\r\nStep 144, loss: 0.1896040290594101, step time: 71.3355541229248ms\r\nStep 145, loss: 0.22665216028690338, step time: 69.04721260070801ms\r\nStep 146, loss: 0.19968174397945404, step time: 59.35263633728027ms\r\nStep 147, loss: 0.22004956007003784, step time: 44.60859298706055ms\r\nStep 148, loss: 0.21675361692905426, step time: 26.12471580505371ms\r\nStep 149, loss: 0.20051825046539307, step time: 27.13298797607422ms\r\nStep 150, loss: 0.20946861803531647, step time: 26.021242141723633ms\r\nStep 151, loss: 0.18936635553836823, step time: 25.965452194213867ms\r\nStep 152, loss: 0.1982470452785492, step time: 25.307416915893555ms\r\nStep 153, loss: 0.18866270780563354, step time: 26.845932006835938ms\r\nStep 154, loss: 0.2204374074935913, step time: 219.3915843963623ms\r\nStep 155, loss: 0.23175925016403198, step time: 178.40862274169922ms\r\nStep 156, loss: 0.20699328184127808, step time: 118.97492408752441ms\r\nStep 157, loss: 0.2463058978319168, step time: 48.39801788330078ms\r\nStep 158, loss: 0.19663086533546448, step time: 45.743703842163086ms\r\nStep 159, loss: 0.24429723620414734, step time: 42.099714279174805ms\r\nStep 160, loss: 0.18582962453365326, step time: 75.958251953125ms\r\nStep 161, loss: 0.2051135003566742, step time: 43.45512390136719ms\r\nStep 162, loss: 0.21318994462490082, step time: 106.19497299194336ms\r\nStep 163, loss: 0.2090427726507187, step time: 120.94855308532715ms\r\nStep 164, loss: 0.18675930798053741, step time: 134.23418998718262ms\r\nStep 165, loss: 0.22770842909812927, step time: 81.46500587463379ms\r\nStep 166, loss: 0.2338206171989441, step time: 78.57847213745117ms\r\nStep 167, loss: 0.19510532915592194, step time: 124.88985061645508ms\r\nStep 168, loss: 0.2266233265399933, step time: 70.42503356933594ms\r\nStep 169, loss: 0.26485535502433777, step time: 108.70909690856934ms\r\nStep 170, loss: 0.23013830184936523, step time: 88.47665786743164ms\r\nStep 171, loss: 0.23124182224273682, step time: 81.68816566467285ms\r\nStep 172, loss: 0.211821049451828, step time: 45.162200927734375ms\r\nStep 173, loss: 0.22709602117538452, step time: 81.06088638305664ms\r\nStep 174, loss: 0.22190026938915253, step time: 142.62151718139648ms\r\nStep 175, loss: 0.2170114368200302, step time: 117.31386184692383ms\r\nStep 176, loss: 0.2129228711128235, step time: 102.94008255004883ms\r\nStep 177, loss: 0.2269134819507599, step time: 72.07846641540527ms\r\nStep 178, loss: 0.20729117095470428, step time: 107.57589340209961ms\r\nStep 179, loss: 0.20655518770217896, step time: 104.21252250671387ms\r\nStep 180, loss: 0.23034240305423737, step time: 99.11346435546875ms\r\nStep 181, loss: 0.24809721112251282, step time: 71.33245468139648ms\r\nStep 182, loss: 0.215785413980484, step time: 78.82118225097656ms\r\nStep 183, loss: 0.22301307320594788, step time: 63.234806060791016ms\r\nStep 184, loss: 0.2594461441040039, step time: 121.72722816467285ms\r\nStep 185, loss: 0.22805005311965942, step time: 98.5407829284668ms\r\nStep 186, loss: 0.2074311226606369, step time: 109.41696166992188ms\r\nStep 187, loss: 0.22991454601287842, step time: 81.59136772155762ms\r\nStep 188, loss: 0.22197772562503815, step time: 53.8485050201416ms\r\nStep 189, loss: 0.22990094125270844, step time: 123.3224868774414ms\r\nStep 190, loss: 0.22791005671024323, step time: 142.39764213562012ms\r\nStep 191, loss: 0.2311677485704422, step time: 96.74978256225586ms\r\nStep 192, loss: 0.20709951221942902, step time: 136.061429977417ms\r\nStep 193, loss: 0.24093765020370483, step time: 46.2186336517334ms\r\nStep 194, loss: 0.2499508112668991, step time: 81.3441276550293ms\r\nStep 195, loss: 0.20580357313156128, step time: 97.9149341583252ms\r\nStep 196, loss: 0.2215675711631775, step time: 74.54037666320801ms\r\nStep 197, loss: 0.22246551513671875, step time: 113.30699920654297ms\r\nStep 198, loss: 0.2345874160528183, step time: 106.0936450958252ms\r\nStep 199, loss: 0.23320308327674866, step time: 95.16334533691406ms\r\nStep 200, loss: 0.2333080768585205, step time: 30.7464599609375ms\r\nStep 201, loss: 0.22704865038394928, step time: 26.28612518310547ms\r\nStep 202, loss: 0.23823073506355286, step time: 26.242494583129883ms\r\nStep 203, loss: 0.20774541795253754, step time: 28.250455856323242ms\r\nStep 204, loss: 0.24040691554546356, step time: 26.949167251586914ms\r\nStep 205, loss: 0.24088256061077118, step time: 27.61077880859375ms\r\nStep 206, loss: 0.22486215829849243, step time: 24.946212768554688ms\r\nStep 207, loss: 0.21422669291496277, step time: 290.4801368713379ms\r\nStep 208, loss: 0.24419456720352173, step time: 128.27181816101074ms\r\nStep 209, loss: 0.21322758495807648, step time: 77.46624946594238ms\r\nStep 210, loss: 0.253510981798172, step time: 99.5488166809082ms\r\nStep 211, loss: 0.23119083046913147, step time: 156.47411346435547ms\r\nStep 212, loss: 0.232627272605896, step time: 84.93471145629883ms\r\nStep 213, loss: 0.2389298677444458, step time: 166.1834716796875ms\r\nStep 214, loss: 0.23226134479045868, step time: 130.4640769958496ms\r\nStep 215, loss: 0.22713610529899597, step time: 223.26326370239258ms\r\nStep 216, loss: 0.24775546789169312, step time: 267.6210403442383ms\r\nStep 217, loss: 0.23582950234413147, step time: 184.39865112304688ms\r\nStep 218, loss: 0.2579847574234009, step time: 205.28197288513184ms\r\nStep 219, loss: 0.2372114062309265, step time: 215.76762199401855ms\r\nStep 220, loss: 0.23433665931224823, step time: 98.33097457885742ms\r\nStep 221, loss: 0.2527056634426117, step time: 99.00093078613281ms\r\nStep 222, loss: 0.22210896015167236, step time: 142.77911186218262ms\r\nStep 223, loss: 0.24414469301700592, step time: 138.75699043273926ms\r\nStep 224, loss: 0.2504357099533081, step time: 235.7654571533203ms\r\nStep 225, loss: 0.22071973979473114, step time: 140.05541801452637ms\r\nStep 226, loss: 0.24137301743030548, step time: 247.34902381896973ms\r\nStep 227, loss: 0.24164323508739471, step time: 159.0120792388916ms\r\nStep 228, loss: 0.2205706238746643, step time: 196.84195518493652ms\r\nStep 229, loss: 0.2630937695503235, step time: 70.85108757019043ms\r\nStep 230, loss: 0.25513118505477905, step time: 146.4531421661377ms\r\nStep 231, loss: 0.23567938804626465, step time: 140.14458656311035ms\r\nStep 232, loss: 0.2422901839017868, step time: 233.12735557556152ms\r\nStep 233, loss: 0.24106791615486145, step time: 152.6036262512207ms\r\nStep 234, loss: 0.21551364660263062, step time: 86.4400863647461ms\r\nStep 235, loss: 0.22949783504009247, step time: 53.399085998535156ms\r\nStep 236, loss: 0.23463501036167145, step time: 135.50305366516113ms\r\nStep 237, loss: 0.21839597821235657, step time: 211.09819412231445ms\r\nStep 238, loss: 0.2149261236190796, step time: 140.05446434020996ms\r\nStep 239, loss: 0.22921283543109894, step time: 96.63009643554688ms\r\nStep 240, loss: 0.2159382402896881, step time: 174.06582832336426ms\r\nStep 121, loss: 0.20784707367420197, step time: 57.65867233276367ms\r\nStep 122, loss: 0.2241440862417221, step time: 105.2560806274414ms\r\nStep 123, loss: 0.1952410787343979, step time: 58.257102966308594ms\r\nStep 124, loss: 0.2559303045272827, step time: 53.727149963378906ms\r\nStep 125, loss: 0.207289919257164, step time: 58.951377868652344ms\r\nStep 126, loss: 0.2382335513830185, step time: 55.41729927062988ms\r\nStep 127, loss: 0.20085032284259796, step time: 27.40621566772461ms\r\nStep 128, loss: 0.2202863097190857, step time: 51.34129524230957ms\r\nStep 129, loss: 0.23265503346920013, step time: 40.334463119506836ms\r\nStep 130, loss: 0.20963910222053528, step time: 28.44548225402832ms\r\nStep 131, loss: 0.2075287252664566, step time: 28.790950775146484ms\r\nStep 132, loss: 0.21575258672237396, step time: 48.665761947631836ms\r\nStep 133, loss: 0.19945044815540314, step time: 67.05379486083984ms\r\nStep 134, loss: 0.18294720351696014, step time: 52.20627784729004ms\r\nStep 135, loss: 0.22029756009578705, step time: 30.239105224609375ms\r\nStep 136, loss: 0.2230755239725113, step time: 53.36880683898926ms\r\nStep 137, loss: 0.22665250301361084, step time: 56.10013008117676ms\r\nStep 138, loss: 0.1861869841814041, step time: 34.58380699157715ms\r\nStep 139, loss: 0.2156236320734024, step time: 41.63074493408203ms\r\nStep 140, loss: 0.207559272646904, step time: 49.61252212524414ms\r\nStep 141, loss: 0.23966802656650543, step time: 56.83732032775879ms\r\nStep 142, loss: 0.21649032831192017, step time: 55.79352378845215ms\r\nStep 143, loss: 0.21076525747776031, step time: 28.371572494506836ms\r\nStep 144, loss: 0.1896040290594101, step time: 43.619394302368164ms\r\nStep 145, loss: 0.22665216028690338, step time: 87.47601509094238ms\r\nStep 146, loss: 0.19968174397945404, step time: 41.832923889160156ms\r\nStep 147, loss: 0.22004956007003784, step time: 29.8616886138916ms\r\nStep 148, loss: 0.21675361692905426, step time: 46.91791534423828ms\r\nStep 149, loss: 0.20051825046539307, step time: 44.779062271118164ms\r\nStep 150, loss: 0.20946861803531647, step time: 26.449918746948242ms\r\nStep 151, loss: 0.18936635553836823, step time: 26.1385440826416ms\r\nStep 152, loss: 0.1982470452785492, step time: 24.170637130737305ms\r\nStep 153, loss: 0.18866270780563354, step time: 25.133371353149414ms\r\nStep 154, loss: 0.2204374074935913, step time: 28.896093368530273ms\r\nStep 155, loss: 0.23175925016403198, step time: 28.464794158935547ms\r\nStep 156, loss: 0.20699328184127808, step time: 27.57740020751953ms\r\nStep 157, loss: 0.2463058978319168, step time: 25.70366859436035ms\r\nStep 158, loss: 0.19663086533546448, step time: 28.578996658325195ms\r\nStep 159, loss: 0.24429723620414734, step time: 27.587413787841797ms\r\nStep 160, loss: 0.18582962453365326, step time: 30.341148376464844ms\r\nStep 161, loss: 0.2051135003566742, step time: 91.54272079467773ms\r\nStep 162, loss: 0.21318994462490082, step time: 249.68886375427246ms\r\nStep 163, loss: 0.2090427726507187, step time: 174.02052879333496ms\r\nStep 164, loss: 0.18675930798053741, step time: 55.82547187805176ms\r\nStep 165, loss: 0.22770842909812927, step time: 122.96485900878906ms\r\nStep 166, loss: 0.2338206171989441, step time: 124.68266487121582ms\r\nStep 167, loss: 0.19510532915592194, step time: 61.383724212646484ms\r\nStep 168, loss: 0.2266233265399933, step time: 93.61505508422852ms\r\nStep 169, loss: 0.26485535502433777, step time: 60.861825942993164ms\r\nStep 170, loss: 0.23013830184936523, step time: 79.56147193908691ms\r\nStep 171, loss: 0.23124182224273682, step time: 96.41599655151367ms\r\nStep 172, loss: 0.211821049451828, step time: 43.70260238647461ms\r\nStep 173, loss: 0.22709602117538452, step time: 50.07123947143555ms\r\nStep 174, loss: 0.22190026938915253, step time: 37.98508644104004ms\r\nStep 175, loss: 0.2170114368200302, step time: 115.60177803039551ms\r\nStep 176, loss: 0.2129228711128235, step time: 81.23993873596191ms\r\nStep 177, loss: 0.2269134819507599, step time: 56.52189254760742ms\r\nStep 178, loss: 0.20729117095470428, step time: 56.731462478637695ms\r\nStep 179, loss: 0.20655518770217896, step time: 48.148393630981445ms\r\nStep 180, loss: 0.23034240305423737, step time: 52.449703216552734ms\r\nStep 121, loss: 0.20784707367420197, step time: 175.51279067993164ms\r\nStep 122, loss: 0.2241440862417221, step time: 141.7078971862793ms\r\nStep 123, loss: 0.1952410787343979, step time: 40.30108451843262ms\r\nStep 124, loss: 0.2559303045272827, step time: 42.49763488769531ms\r\nStep 125, loss: 0.207289919257164, step time: 58.17127227783203ms\r\nStep 126, loss: 0.2382335513830185, step time: 96.44865989685059ms\r\nStep 127, loss: 0.20085032284259796, step time: 62.164306640625ms\r\nStep 128, loss: 0.2202863097190857, step time: 83.66060256958008ms\r\nStep 129, loss: 0.23265503346920013, step time: 99.62654113769531ms\r\nStep 130, loss: 0.20963910222053528, step time: 98.30021858215332ms\r\nStep 131, loss: 0.2075287252664566, step time: 145.3697681427002ms\r\nStep 132, loss: 0.21575258672237396, step time: 89.71762657165527ms\r\nStep 133, loss: 0.19945044815540314, step time: 126.56188011169434ms\r\nStep 134, loss: 0.18294720351696014, step time: 106.64868354797363ms\r\nStep 135, loss: 0.22029756009578705, step time: 88.23609352111816ms\r\nStep 181, loss: 0.24809721112251282, step time: 53.37214469909668ms\r\nStep 182, loss: 0.215785413980484, step time: 33.78009796142578ms\r\nStep 183, loss: 0.22301307320594788, step time: 26.062965393066406ms\r\nStep 184, loss: 0.2594461441040039, step time: 31.013965606689453ms\r\nStep 185, loss: 0.22805005311965942, step time: 47.219038009643555ms\r\nStep 186, loss: 0.2074311226606369, step time: 44.18158531188965ms\r\nStep 187, loss: 0.22991454601287842, step time: 51.86009407043457ms\r\nStep 188, loss: 0.22197772562503815, step time: 28.672218322753906ms\r\nStep 189, loss: 0.22990094125270844, step time: 87.20207214355469ms\r\nStep 190, loss: 0.22791005671024323, step time: 43.81966590881348ms\r\nStep 191, loss: 0.2311677485704422, step time: 59.12947654724121ms\r\nStep 192, loss: 0.20709951221942902, step time: 29.97756004333496ms\r\nStep 193, loss: 0.24093765020370483, step time: 27.140378952026367ms\r\nStep 194, loss: 0.2499508112668991, step time: 51.221370697021484ms\r\nStep 195, loss: 0.20580357313156128, step time: 27.454614639282227ms\r\nStep 136, loss: 0.2230755239725113, step time: 158.33401679992676ms\r\nStep 137, loss: 0.22665250301361084, step time: 183.70985984802246ms\r\nStep 138, loss: 0.1861869841814041, step time: 43.37048530578613ms\r\nStep 139, loss: 0.2156236320734024, step time: 49.16191101074219ms\r\nStep 140, loss: 0.207559272646904, step time: 84.47837829589844ms\r\nStep 141, loss: 0.23966802656650543, step time: 105.95703125ms\r\nStep 142, loss: 0.21649032831192017, step time: 53.55072021484375ms\r\nStep 143, loss: 0.21076525747776031, step time: 99.18713569641113ms\r\nStep 144, loss: 0.1896040290594101, step time: 50.28724670410156ms\r\nStep 145, loss: 0.22665216028690338, step time: 38.27548027038574ms\r\nStep 146, loss: 0.19968174397945404, step time: 24.693965911865234ms\r\nStep 147, loss: 0.22004956007003784, step time: 25.002717971801758ms\r\nStep 148, loss: 0.21675361692905426, step time: 23.98228645324707ms\r\nStep 149, loss: 0.20051825046539307, step time: 25.96449851989746ms\r\nStep 150, loss: 0.20946861803531647, step time: 49.99351501464844ms\r\nStep 196, loss: 0.2215675711631775, step time: 35.74061393737793ms\r\nStep 197, loss: 0.22246551513671875, step time: 42.91343688964844ms\r\nStep 198, loss: 0.2345874160528183, step time: 66.19715690612793ms\r\nStep 199, loss: 0.23320308327674866, step time: 54.33154106140137ms\r\nStep 200, loss: 0.2333080768585205, step time: 54.64315414428711ms\r\nStep 201, loss: 0.22704865038394928, step time: 37.050724029541016ms\r\nStep 202, loss: 0.23823073506355286, step time: 59.0212345123291ms\r\nStep 203, loss: 0.20774541795253754, step time: 42.15407371520996ms\r\nStep 204, loss: 0.24040691554546356, step time: 33.7064266204834ms\r\nStep 205, loss: 0.24088256061077118, step time: 59.23104286193848ms\r\nStep 206, loss: 0.22486215829849243, step time: 25.7108211517334ms\r\nStep 207, loss: 0.21422669291496277, step time: 26.419639587402344ms\r\nStep 208, loss: 0.24419456720352173, step time: 26.343822479248047ms\r\nStep 209, loss: 0.21322758495807648, step time: 26.9773006439209ms\r\nStep 210, loss: 0.253510981798172, step time: 29.726743698120117ms\r\nStep 151, loss: 0.18936635553836823, step time: 26.333093643188477ms\r\nStep 152, loss: 0.1982470452785492, step time: 72.92056083679199ms\r\nStep 153, loss: 0.18866270780563354, step time: 99.00188446044922ms\r\nStep 154, loss: 0.2204374074935913, step time: 197.14069366455078ms\r\nStep 155, loss: 0.23175925016403198, step time: 245.93806266784668ms\r\nStep 156, loss: 0.20699328184127808, step time: 168.10917854309082ms\r\nStep 157, loss: 0.2463058978319168, step time: 160.25066375732422ms\r\nStep 158, loss: 0.19663086533546448, step time: 175.55570602416992ms\r\nStep 159, loss: 0.24429723620414734, step time: 142.41600036621094ms\r\nStep 160, loss: 0.18582962453365326, step time: 123.01039695739746ms\r\nStep 161, loss: 0.2051135003566742, step time: 133.56852531433105ms\r\nStep 162, loss: 0.21318994462490082, step time: 107.04779624938965ms\r\nStep 163, loss: 0.2090427726507187, step time: 149.59096908569336ms\r\nStep 164, loss: 0.18675930798053741, step time: 71.1665153503418ms\r\nStep 211, loss: 0.23119083046913147, step time: 35.094499588012695ms\r\nStep 212, loss: 0.232627272605896, step time: 84.84864234924316ms\r\nStep 213, loss: 0.2389298677444458, step time: 186.65337562561035ms\r\nStep 214, loss: 0.23226134479045868, step time: 165.8487319946289ms\r\nStep 215, loss: 0.22713610529899597, step time: 141.27588272094727ms\r\nStep 216, loss: 0.24775546789169312, step time: 42.769670486450195ms\r\nStep 217, loss: 0.23582950234413147, step time: 59.20696258544922ms\r\nStep 218, loss: 0.2579847574234009, step time: 60.8365535736084ms\r\nStep 219, loss: 0.2372114062309265, step time: 121.92702293395996ms\r\nStep 220, loss: 0.23433665931224823, step time: 66.81299209594727ms\r\nStep 221, loss: 0.2527056634426117, step time: 99.97439384460449ms\r\nStep 222, loss: 0.22210896015167236, step time: 70.06192207336426ms\r\nStep 223, loss: 0.24414469301700592, step time: 40.25077819824219ms\r\nStep 224, loss: 0.2504357099533081, step time: 44.18325424194336ms\r\nStep 225, loss: 0.22071973979473114, step time: 30.092477798461914ms\r\nStep 165, loss: 0.22770842909812927, step time: 48.85506629943848ms\r\nStep 166, loss: 0.2338206171989441, step time: 28.083324432373047ms\r\nStep 167, loss: 0.19510532915592194, step time: 58.15911293029785ms\r\nStep 168, loss: 0.2266233265399933, step time: 65.05799293518066ms\r\nStep 169, loss: 0.26485535502433777, step time: 57.13295936584473ms\r\nStep 170, loss: 0.23013830184936523, step time: 42.38438606262207ms\r\nStep 171, loss: 0.23124182224273682, step time: 98.69837760925293ms\r\nStep 172, loss: 0.211821049451828, step time: 102.22363471984863ms\r\nStep 173, loss: 0.22709602117538452, step time: 40.45438766479492ms\r\nStep 174, loss: 0.22190026938915253, step time: 45.342206954956055ms\r\nStep 175, loss: 0.2170114368200302, step time: 30.541658401489258ms\r\nStep 176, loss: 0.2129228711128235, step time: 42.48857498168945ms\r\nStep 177, loss: 0.2269134819507599, step time: 46.72598838806152ms\r\nStep 178, loss: 0.20729117095470428, step time: 71.1662769317627ms\r\nStep 179, loss: 0.20655518770217896, step time: 55.56535720825195ms\r\nStep 226, loss: 0.24137301743030548, step time: 47.066688537597656ms\r\nStep 227, loss: 0.24164323508739471, step time: 39.44039344787598ms\r\nStep 228, loss: 0.2205706238746643, step time: 50.47464370727539ms\r\nStep 229, loss: 0.2630937695503235, step time: 55.97829818725586ms\r\nStep 230, loss: 0.25513118505477905, step time: 47.81508445739746ms\r\nStep 231, loss: 0.23567938804626465, step time: 55.419921875ms\r\nStep 232, loss: 0.2422901839017868, step time: 49.649715423583984ms\r\nStep 233, loss: 0.24106791615486145, step time: 113.41643333435059ms\r\nStep 234, loss: 0.21551364660263062, step time: 34.91091728210449ms\r\nStep 235, loss: 0.22949783504009247, step time: 41.41855239868164ms\r\nStep 236, loss: 0.23463501036167145, step time: 117.69223213195801ms\r\nStep 237, loss: 0.21839597821235657, step time: 50.96006393432617ms\r\nStep 238, loss: 0.2149261236190796, step time: 33.45537185668945ms\r\nStep 239, loss: 0.22921283543109894, step time: 51.90134048461914ms\r\nStep 240, loss: 0.2159382402896881, step time: 59.68451499938965ms\r\nStep 180, loss: 0.23034240305423737, step time: 79.60271835327148ms\r\nStep 121, loss: 0.20784707367420197, step time: 54.1224479675293ms\r\nStep 122, loss: 0.2241440862417221, step time: 52.42466926574707ms\r\nStep 123, loss: 0.1952410787343979, step time: 26.555299758911133ms\r\nStep 124, loss: 0.2559303045272827, step time: 43.67542266845703ms\r\nStep 125, loss: 0.207289919257164, step time: 53.21621894836426ms\r\nStep 126, loss: 0.2382335513830185, step time: 52.570343017578125ms\r\nStep 127, loss: 0.20085032284259796, step time: 29.885292053222656ms\r\nStep 128, loss: 0.2202863097190857, step time: 30.164003372192383ms\r\nStep 129, loss: 0.23265503346920013, step time: 81.99906349182129ms\r\nStep 130, loss: 0.20963910222053528, step time: 57.17349052429199ms\r\nStep 131, loss: 0.2075287252664566, step time: 92.61727333068848ms\r\nStep 132, loss: 0.21575258672237396, step time: 81.51817321777344ms\r\nStep 133, loss: 0.19945044815540314, step time: 48.337697982788086ms\r\nStep 134, loss: 0.18294720351696014, step time: 54.00729179382324ms\r\nStep 135, loss: 0.22029756009578705, step time: 46.88715934753418ms\r\nStep 136, loss: 0.2230755239725113, step time: 55.22894859313965ms\r\nStep 137, loss: 0.22665250301361084, step time: 71.04039192199707ms\r\nStep 138, loss: 0.1861869841814041, step time: 48.525333404541016ms\r\nStep 139, loss: 0.2156236320734024, step time: 50.6596565246582ms\r\nStep 140, loss: 0.207559272646904, step time: 47.15895652770996ms\r\nStep 141, loss: 0.23966802656650543, step time: 59.39197540283203ms\r\nStep 142, loss: 0.21649032831192017, step time: 28.00440788269043ms\r\nStep 143, loss: 0.21076525747776031, step time: 49.646615982055664ms\r\nStep 144, loss: 0.1896040290594101, step time: 67.55638122558594ms\r\nStep 145, loss: 0.22665216028690338, step time: 46.58150672912598ms\r\nStep 146, loss: 0.19968174397945404, step time: 25.587797164916992ms\r\nStep 147, loss: 0.22004956007003784, step time: 24.77574348449707ms\r\nStep 148, loss: 0.21675361692905426, step time: 24.159908294677734ms\r\nStep 149, loss: 0.20051825046539307, step time: 25.171279907226562ms\r\nStep 150, loss: 0.20946861803531647, step time: 26.317596435546875ms\r\nStep 151, loss: 0.18936635553836823, step time: 25.696754455566406ms\r\nStep 152, loss: 0.1982470452785492, step time: 26.210546493530273ms\r\nStep 153, loss: 0.18866270780563354, step time: 27.171850204467773ms\r\nStep 154, loss: 0.2204374074935913, step time: 29.674768447875977ms\r\nStep 155, loss: 0.23175925016403198, step time: 108.4132194519043ms\r\nStep 156, loss: 0.20699328184127808, step time: 70.50776481628418ms\r\nStep 157, loss: 0.2463058978319168, step time: 56.78820610046387ms\r\nStep 158, loss: 0.19663086533546448, step time: 77.86393165588379ms\r\nStep 159, loss: 0.24429723620414734, step time: 77.41522789001465ms\r\nStep 160, loss: 0.18582962453365326, step time: 53.6799430847168ms\r\nStep 161, loss: 0.2051135003566742, step time: 30.64417839050293ms\r\nStep 162, loss: 0.21318994462490082, step time: 37.78576850891113ms\r\nStep 163, loss: 0.2090427726507187, step time: 56.49924278259277ms\r\nStep 164, loss: 0.18675930798053741, step time: 44.3422794342041ms\r\nStep 165, loss: 0.22770842909812927, step time: 28.904438018798828ms\r\nStep 166, loss: 0.2338206171989441, step time: 30.82871437072754ms\r\nStep 167, loss: 0.19510532915592194, step time: 51.561832427978516ms\r\nStep 168, loss: 0.2266233265399933, step time: 28.011083602905273ms\r\nStep 169, loss: 0.26485535502433777, step time: 27.901411056518555ms\r\nStep 170, loss: 0.23013830184936523, step time: 40.64154624938965ms\r\nStep 171, loss: 0.23124182224273682, step time: 39.642333984375ms\r\nStep 172, loss: 0.211821049451828, step time: 55.29665946960449ms\r\nStep 173, loss: 0.22709602117538452, step time: 29.45542335510254ms\r\nStep 174, loss: 0.22190026938915253, step time: 33.948659896850586ms\r\nStep 175, loss: 0.2170114368200302, step time: 105.66353797912598ms\r\nStep 176, loss: 0.2129228711128235, step time: 58.598995208740234ms\r\nStep 177, loss: 0.2269134819507599, step time: 60.451507568359375ms\r\nStep 178, loss: 0.20729117095470428, step time: 85.33382415771484ms\r\nStep 179, loss: 0.20655518770217896, step time: 28.60736846923828ms\r\nStep 180, loss: 0.23034240305423737, step time: 56.41674995422363ms\r\nStep 181, loss: 0.24809721112251282, step time: 206.49337768554688ms\r\nStep 182, loss: 0.215785413980484, step time: 192.0461654663086ms\r\nStep 183, loss: 0.22301307320594788, step time: 157.08231925964355ms\r\nStep 184, loss: 0.2594461441040039, step time: 84.34486389160156ms\r\nStep 185, loss: 0.22805005311965942, step time: 60.895681381225586ms\r\nStep 186, loss: 0.2074311226606369, step time: 81.12359046936035ms\r\nStep 187, loss: 0.22991454601287842, step time: 137.01486587524414ms\r\nStep 188, loss: 0.22197772562503815, step time: 82.53693580627441ms\r\nStep 189, loss: 0.22990094125270844, step time: 97.71370887756348ms\r\nStep 190, loss: 0.22791005671024323, step time: 127.54678726196289ms\r\nStep 191, loss: 0.2311677485704422, step time: 52.75535583496094ms\r\nStep 192, loss: 0.20709951221942902, step time: 125.52380561828613ms\r\nStep 193, loss: 0.24093765020370483, step time: 71.14410400390625ms\r\nStep 194, loss: 0.2499508112668991, step time: 70.40596008300781ms\r\nStep 195, loss: 0.20580357313156128, step time: 60.674190521240234ms\r\nStep 196, loss: 0.2215675711631775, step time: 25.46238899230957ms\r\nStep 197, loss: 0.22246551513671875, step time: 42.14644432067871ms\r\nStep 198, loss: 0.2345874160528183, step time: 28.814315795898438ms\r\nStep 199, loss: 0.23320308327674866, step time: 27.231693267822266ms\r\nStep 200, loss: 0.2333080768585205, step time: 31.97026252746582ms\r\nStep 201, loss: 0.22704865038394928, step time: 31.390905380249023ms\r\nStep 202, loss: 0.23823073506355286, step time: 29.280424118041992ms\r\nStep 203, loss: 0.20774541795253754, step time: 319.77152824401855ms\r\nStep 204, loss: 0.24040691554546356, step time: 130.00106811523438ms\r\nStep 205, loss: 0.24088256061077118, step time: 204.6365737915039ms\r\nStep 206, loss: 0.22486215829849243, step time: 155.43484687805176ms\r\nStep 207, loss: 0.21422669291496277, step time: 142.5936222076416ms\r\nStep 208, loss: 0.24419456720352173, step time: 227.89406776428223ms\r\nStep 209, loss: 0.21322758495807648, step time: 143.66602897644043ms\r\nStep 210, loss: 0.253510981798172, step time: 255.385160446167ms\r\nStep 211, loss: 0.23119083046913147, step time: 184.57651138305664ms\r\nStep 212, loss: 0.232627272605896, step time: 114.07470703125ms\r\nStep 213, loss: 0.2389298677444458, step time: 125.74028968811035ms\r\nStep 214, loss: 0.23226134479045868, step time: 231.6298484802246ms\r\nStep 215, loss: 0.22713610529899597, step time: 120.64766883850098ms\r\nStep 216, loss: 0.24775546789169312, step time: 96.76647186279297ms\r\nStep 217, loss: 0.23582950234413147, step time: 43.286800384521484ms\r\nStep 218, loss: 0.2579847574234009, step time: 82.16547966003418ms\r\nStep 219, loss: 0.2372114062309265, step time: 180.41634559631348ms\r\nStep 220, loss: 0.23433665931224823, step time: 253.89385223388672ms\r\nStep 221, loss: 0.2527056634426117, step time: 251.65891647338867ms\r\nStep 222, loss: 0.22210896015167236, step time: 75.92415809631348ms\r\nStep 223, loss: 0.24414469301700592, step time: 129.03928756713867ms\r\nStep 224, loss: 0.2504357099533081, step time: 118.2413101196289ms\r\nStep 225, loss: 0.22071973979473114, step time: 173.6431121826172ms\r\nStep 226, loss: 0.24137301743030548, step time: 138.88120651245117ms\r\nStep 227, loss: 0.24164323508739471, step time: 86.68112754821777ms\r\nStep 228, loss: 0.2205706238746643, step time: 163.59877586364746ms\r\nStep 229, loss: 0.2630937695503235, step time: 176.5427589416504ms\r\nStep 230, loss: 0.25513118505477905, step time: 138.31067085266113ms\r\nStep 231, loss: 0.23567938804626465, step time: 172.210693359375ms\r\nStep 232, loss: 0.2422901839017868, step time: 255.04422187805176ms\r\nStep 233, loss: 0.24106791615486145, step time: 211.32969856262207ms\r\nStep 234, loss: 0.21551364660263062, step time: 250.5626678466797ms\r\nStep 235, loss: 0.22949783504009247, step time: 140.79022407531738ms\r\nStep 236, loss: 0.23463501036167145, step time: 158.62298011779785ms\r\nStep 237, loss: 0.21839597821235657, step time: 79.31923866271973ms\r\nStep 238, loss: 0.2149261236190796, step time: 82.70025253295898ms\r\nStep 239, loss: 0.22921283543109894, step time: 122.98369407653809ms\r\nStep 240, loss: 0.2159382402896881, step time: 83.22715759277344ms\r\nStep 121, loss: 0.20784707367420197, step time: 28.102874755859375ms\r\nStep 122, loss: 0.2241440862417221, step time: 54.19206619262695ms\r\nStep 123, loss: 0.1952410787343979, step time: 55.00173568725586ms\r\nStep 124, loss: 0.2559303045272827, step time: 27.759313583374023ms\r\nStep 125, loss: 0.207289919257164, step time: 34.364938735961914ms\r\nStep 126, loss: 0.2382335513830185, step time: 40.0388240814209ms\r\nStep 127, loss: 0.20085032284259796, step time: 109.33899879455566ms\r\nStep 128, loss: 0.2202863097190857, step time: 42.01507568359375ms\r\nStep 129, loss: 0.23265503346920013, step time: 28.316974639892578ms\r\nStep 130, loss: 0.20963910222053528, step time: 47.423362731933594ms\r\nStep 131, loss: 0.2075287252664566, step time: 57.20019340515137ms\r\nStep 132, loss: 0.21575258672237396, step time: 27.878522872924805ms\r\nStep 133, loss: 0.19945044815540314, step time: 27.20022201538086ms\r\nStep 134, loss: 0.18294720351696014, step time: 38.98191452026367ms\r\nStep 135, loss: 0.22029756009578705, step time: 34.02423858642578ms\r\nStep 136, loss: 0.2230755239725113, step time: 40.375471115112305ms\r\nStep 137, loss: 0.22665250301361084, step time: 27.110815048217773ms\r\nStep 138, loss: 0.1861869841814041, step time: 25.694608688354492ms\r\nStep 139, loss: 0.2156236320734024, step time: 24.428367614746094ms\r\nStep 140, loss: 0.207559272646904, step time: 25.22897720336914ms\r\nStep 141, loss: 0.23966802656650543, step time: 27.24623680114746ms\r\nStep 142, loss: 0.21649032831192017, step time: 24.554014205932617ms\r\nStep 143, loss: 0.21076525747776031, step time: 60.21237373352051ms\r\nStep 144, loss: 0.1896040290594101, step time: 122.43199348449707ms\r\nStep 145, loss: 0.22665216028690338, step time: 63.67325782775879ms\r\nStep 146, loss: 0.19968174397945404, step time: 172.00565338134766ms\r\nStep 147, loss: 0.22004956007003784, step time: 58.96162986755371ms\r\nStep 148, loss: 0.21675361692905426, step time: 46.19908332824707ms\r\nStep 149, loss: 0.20051825046539307, step time: 126.85132026672363ms\r\nStep 150, loss: 0.20946861803531647, step time: 123.3987808227539ms\r\nStep 151, loss: 0.18936635553836823, step time: 88.64140510559082ms\r\nStep 152, loss: 0.1982470452785492, step time: 121.612548828125ms\r\nStep 153, loss: 0.18866270780563354, step time: 98.3133316040039ms\r\nStep 154, loss: 0.2204374074935913, step time: 101.26352310180664ms\r\nStep 155, loss: 0.23175925016403198, step time: 28.731346130371094ms\r\nStep 156, loss: 0.20699328184127808, step time: 27.0998477935791ms\r\nStep 157, loss: 0.2463058978319168, step time: 25.94900131225586ms\r\nStep 158, loss: 0.19663086533546448, step time: 36.85402870178223ms\r\nStep 159, loss: 0.24429723620414734, step time: 38.153648376464844ms\r\nStep 160, loss: 0.18582962453365326, step time: 50.60982704162598ms\r\nStep 161, loss: 0.2051135003566742, step time: 36.19098663330078ms\r\nStep 162, loss: 0.21318994462490082, step time: 38.73109817504883ms\r\nStep 163, loss: 0.2090427726507187, step time: 37.04333305358887ms\r\nStep 164, loss: 0.18675930798053741, step time: 47.1653938293457ms\r\nStep 165, loss: 0.22770842909812927, step time: 26.943683624267578ms\r\nStep 166, loss: 0.2338206171989441, step time: 37.52708435058594ms\r\nStep 167, loss: 0.19510532915592194, step time: 48.442840576171875ms\r\nStep 168, loss: 0.2266233265399933, step time: 50.577402114868164ms\r\nStep 169, loss: 0.26485535502433777, step time: 26.796340942382812ms\r\nStep 170, loss: 0.23013830184936523, step time: 27.785778045654297ms\r\nStep 171, loss: 0.23124182224273682, step time: 51.04684829711914ms\r\nStep 172, loss: 0.211821049451828, step time: 46.30160331726074ms\r\nStep 173, loss: 0.22709602117538452, step time: 55.40728569030762ms\r\nStep 174, loss: 0.22190026938915253, step time: 51.28622055053711ms\r\nStep 175, loss: 0.2170114368200302, step time: 26.126861572265625ms\r\nStep 176, loss: 0.2129228711128235, step time: 38.114309310913086ms\r\nStep 177, loss: 0.2269134819507599, step time: 59.65828895568848ms\r\nStep 178, loss: 0.20729117095470428, step time: 56.282758712768555ms\r\nStep 179, loss: 0.20655518770217896, step time: 34.9423885345459ms\r\nStep 180, loss: 0.23034240305423737, step time: 41.441917419433594ms\r\nStep 181, loss: 0.24809721112251282, step time: 56.84924125671387ms\r\nStep 182, loss: 0.215785413980484, step time: 59.31997299194336ms\r\nStep 183, loss: 0.22301307320594788, step time: 94.89989280700684ms\r\nStep 184, loss: 0.2594461441040039, step time: 51.30600929260254ms\r\nStep 185, loss: 0.22805005311965942, step time: 71.1662769317627ms\r\nStep 186, loss: 0.2074311226606369, step time: 27.386903762817383ms\r\nStep 187, loss: 0.22991454601287842, step time: 78.04584503173828ms\r\nStep 188, loss: 0.22197772562503815, step time: 48.03586006164551ms\r\nStep 189, loss: 0.22990094125270844, step time: 44.821977615356445ms\r\nStep 190, loss: 0.22791005671024323, step time: 39.87550735473633ms\r\nStep 191, loss: 0.2311677485704422, step time: 37.9633903503418ms\r\nStep 192, loss: 0.20709951221942902, step time: 47.122955322265625ms\r\nStep 193, loss: 0.24093765020370483, step time: 53.026437759399414ms\r\nStep 194, loss: 0.2499508112668991, step time: 28.325557708740234ms\r\nStep 195, loss: 0.20580357313156128, step time: 64.40281867980957ms\r\nStep 196, loss: 0.2215675711631775, step time: 42.51289367675781ms\r\nStep 197, loss: 0.22246551513671875, step time: 25.304317474365234ms\r\nStep 198, loss: 0.2345874160528183, step time: 49.22652244567871ms\r\nStep 199, loss: 0.23320308327674866, step time: 38.62643241882324ms\r\nStep 200, loss: 0.2333080768585205, step time: 25.24113655090332ms\r\nStep 201, loss: 0.22704865038394928, step time: 26.98493003845215ms\r\nStep 202, loss: 0.23823073506355286, step time: 31.833648681640625ms\r\nStep 203, loss: 0.20774541795253754, step time: 32.57155418395996ms\r\nStep 204, loss: 0.24040691554546356, step time: 28.23019027709961ms\r\nStep 205, loss: 0.24088256061077118, step time: 43.387651443481445ms\r\nStep 206, loss: 0.22486215829849243, step time: 26.51214599609375ms\r\nStep 207, loss: 0.21422669291496277, step time: 77.08430290222168ms\r\nStep 208, loss: 0.24419456720352173, step time: 104.25782203674316ms\r\nStep 209, loss: 0.21322758495807648, step time: 63.797950744628906ms\r\nStep 210, loss: 0.253510981798172, step time: 64.00132179260254ms\r\nStep 211, loss: 0.23119083046913147, step time: 127.89797782897949ms\r\nStep 212, loss: 0.232627272605896, step time: 57.684898376464844ms\r\nStep 213, loss: 0.2389298677444458, step time: 117.0034408569336ms\r\nStep 214, loss: 0.23226134479045868, step time: 90.28959274291992ms\r\nStep 215, loss: 0.22713610529899597, step time: 117.23089218139648ms\r\nStep 216, loss: 0.24775546789169312, step time: 66.86735153198242ms\r\nStep 217, loss: 0.23582950234413147, step time: 85.73174476623535ms\r\nStep 218, loss: 0.2579847574234009, step time: 47.32179641723633ms\r\nStep 219, loss: 0.2372114062309265, step time: 59.17000770568848ms\r\nStep 220, loss: 0.23433665931224823, step time: 61.67721748352051ms\r\nStep 221, loss: 0.2527056634426117, step time: 37.91093826293945ms\r\nStep 222, loss: 0.22210896015167236, step time: 29.537677764892578ms\r\nStep 223, loss: 0.24414469301700592, step time: 49.817562103271484ms\r\nStep 224, loss: 0.2504357099533081, step time: 48.41756820678711ms\r\nStep 225, loss: 0.22071973979473114, step time: 54.474592208862305ms\r\nStep 226, loss: 0.24137301743030548, step time: 52.09779739379883ms\r\nStep 227, loss: 0.24164323508739471, step time: 28.997421264648438ms\r\nStep 228, loss: 0.2205706238746643, step time: 29.86454963684082ms\r\nStep 229, loss: 0.2630937695503235, step time: 29.45852279663086ms\r\nStep 230, loss: 0.25513118505477905, step time: 56.84041976928711ms\r\nStep 231, loss: 0.23567938804626465, step time: 50.53210258483887ms\r\nStep 232, loss: 0.2422901839017868, step time: 53.2991886138916ms\r\nStep 233, loss: 0.24106791615486145, step time: 42.09494590759277ms\r\nStep 234, loss: 0.21551364660263062, step time: 47.04618453979492ms\r\nStep 235, loss: 0.22949783504009247, step time: 28.749465942382812ms\r\nStep 236, loss: 0.23463501036167145, step time: 86.35544776916504ms\r\nStep 237, loss: 0.21839597821235657, step time: 50.90761184692383ms\r\nStep 238, loss: 0.2149261236190796, step time: 54.834842681884766ms\r\nStep 239, loss: 0.22921283543109894, step time: 43.90549659729004ms\r\nStep 240, loss: 0.2159382402896881, step time: 53.63106727600098ms\r\nStep 181, loss: 0.24809721112251282, step time: 46.80037498474121ms\r\nStep 182, loss: 0.215785413980484, step time: 54.780006408691406ms\r\nStep 183, loss: 0.22301307320594788, step time: 28.1674861907959ms\r\nStep 184, loss: 0.2594461441040039, step time: 26.546478271484375ms\r\nStep 185, loss: 0.22805005311965942, step time: 26.401519775390625ms\r\nStep 186, loss: 0.2074311226606369, step time: 24.951696395874023ms\r\nStep 187, loss: 0.22991454601287842, step time: 27.12559700012207ms\r\nStep 188, loss: 0.22197772562503815, step time: 23.596525192260742ms\r\nStep 189, loss: 0.22990094125270844, step time: 38.785457611083984ms\r\nStep 190, loss: 0.22791005671024323, step time: 95.9317684173584ms\r\nStep 191, loss: 0.2311677485704422, step time: 182.22379684448242ms\r\nStep 192, loss: 0.20709951221942902, step time: 126.10101699829102ms\r\nStep 193, loss: 0.24093765020370483, step time: 76.69234275817871ms\r\nStep 194, loss: 0.2499508112668991, step time: 61.997175216674805ms\r\nStep 195, loss: 0.20580357313156128, step time: 61.055660247802734ms\r\nStep 196, loss: 0.2215675711631775, step time: 66.59102439880371ms\r\nStep 197, loss: 0.22246551513671875, step time: 91.4151668548584ms\r\nStep 198, loss: 0.2345874160528183, step time: 85.23440361022949ms\r\nStep 199, loss: 0.23320308327674866, step time: 94.1767692565918ms\r\nStep 200, loss: 0.2333080768585205, step time: 93.28699111938477ms\r\nStep 201, loss: 0.22704865038394928, step time: 96.6036319732666ms\r\nStep 202, loss: 0.23823073506355286, step time: 59.20290946960449ms\r\nStep 203, loss: 0.20774541795253754, step time: 32.27519989013672ms\r\nStep 204, loss: 0.24040691554546356, step time: 66.2684440612793ms\r\nStep 205, loss: 0.24088256061077118, step time: 81.76326751708984ms\r\nStep 206, loss: 0.22486215829849243, step time: 36.359548568725586ms\r\nStep 207, loss: 0.21422669291496277, step time: 46.63801193237305ms\r\nStep 208, loss: 0.24419456720352173, step time: 49.02791976928711ms\r\nStep 209, loss: 0.21322758495807648, step time: 87.33630180358887ms\r\nStep 210, loss: 0.253510981798172, step time: 28.31244468688965ms\r\nStep 211, loss: 0.23119083046913147, step time: 34.34038162231445ms\r\nStep 212, loss: 0.232627272605896, step time: 37.99939155578613ms\r\nStep 213, loss: 0.2389298677444458, step time: 35.57157516479492ms\r\nStep 214, loss: 0.23226134479045868, step time: 29.137611389160156ms\r\nStep 215, loss: 0.22713610529899597, step time: 42.87981986999512ms\r\nStep 216, loss: 0.24775546789169312, step time: 49.66163635253906ms\r\nStep 217, loss: 0.23582950234413147, step time: 45.5782413482666ms\r\nStep 218, loss: 0.2579847574234009, step time: 55.54056167602539ms\r\nStep 219, loss: 0.2372114062309265, step time: 51.5141487121582ms\r\nStep 220, loss: 0.23433665931224823, step time: 48.723459243774414ms\r\nStep 221, loss: 0.2527056634426117, step time: 51.47194862365723ms\r\nStep 222, loss: 0.22210896015167236, step time: 47.586679458618164ms\r\nStep 223, loss: 0.24414469301700592, step time: 85.22415161132812ms\r\nStep 224, loss: 0.2504357099533081, step time: 67.75307655334473ms\r\nStep 225, loss: 0.22071973979473114, step time: 70.09053230285645ms\r\nStep 226, loss: 0.24137301743030548, step time: 57.895660400390625ms\r\nStep 227, loss: 0.24164323508739471, step time: 61.19894981384277ms\r\nStep 228, loss: 0.2205706238746643, step time: 58.0286979675293ms\r\nStep 229, loss: 0.2630937695503235, step time: 50.34494400024414ms\r\nStep 230, loss: 0.25513118505477905, step time: 32.2268009185791ms\r\nStep 231, loss: 0.23567938804626465, step time: 30.990123748779297ms\r\nStep 232, loss: 0.2422901839017868, step time: 36.23247146606445ms\r\nStep 233, loss: 0.24106791615486145, step time: 26.430606842041016ms\r\nStep 234, loss: 0.21551364660263062, step time: 27.324676513671875ms\r\nStep 235, loss: 0.22949783504009247, step time: 25.076866149902344ms\r\nStep 236, loss: 0.23463501036167145, step time: 25.916576385498047ms\r\nStep 237, loss: 0.21839597821235657, step time: 25.968074798583984ms\r\nStep 238, loss: 0.2149261236190796, step time: 24.314165115356445ms\r\nStep 239, loss: 0.22921283543109894, step time: 27.187585830688477ms\r\nStep 240, loss: 0.2159382402896881, step time: 31.904935836791992ms\r\n",,terminal_output +3266,4042828,"TERMINAL",0,0,"^Csrun: interrupt (one more within 1 sec to abort)\r\nsrun: StepId=3306283.4 tasks 0-7: running\r\n",,terminal_output +3267,4043063,"TERMINAL",0,0,"^Csrun: sending Ctrl-C to StepId=3306283.4\r\nsrun: forcing job termination\r\nsrun: Job step aborted: Waiting up to 32 seconds for job step to finish.\r\nslurmstepd: error: *** STEP 3306283.4 ON hkn0402 CANCELLED AT 2025-06-30T15:31:27 ***\r\n",,terminal_output +3268,4043226,"TERMINAL",0,0,"^Csrun: sending Ctrl-C to StepId=3306283.4\r\nsrun: job abort in progress\r\n",,terminal_output +3269,4044209,"TERMINAL",0,0,"]0;tum_cte0515@hkn0402:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0402 jafar]$ ",,terminal_output +3270,4049035,"TERMINAL",0,0,"g",,terminal_output +3271,4049355,"TERMINAL",0,0,"[?25li[?25h",,terminal_output +3272,4049447,"TERMINAL",0,0,"[?25lt[?25h[?25l [?25h",,terminal_output +3273,4049667,"TERMINAL",0,0,"[?25lb[?25h[?25lr[?25h",,terminal_output +3274,4049888,"TERMINAL",0,0,"[?25la[?25h",,terminal_output +3275,4049992,"TERMINAL",0,0,"[?25ln[?25h",,terminal_output +3276,4050082,"TERMINAL",0,0,"[?25lc[?25h[?25lh[?25h",,terminal_output +3277,4050422,"TERMINAL",0,0,"\r\n[?2004l\r[?1h=\r add-wandb-name-and-tags\r\n convert-to-jax-array-in-iter\r\n dont-let-tf-see-gpu\r\n main\r\n preprocess_video\r\n* revised-dataloader\r\n tmp\r\n\r[?1l>]0;tum_cte0515@hkn0402:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0402 jafar]$ ",,terminal_output +3278,4305477,"TERMINAL",0,0,"[?25lq[?25h",,terminal_output +3279,4305542,"TERMINAL",0,0,"[?25lu[?25h",,terminal_output +3280,4305660,"TERMINAL",0,0,"[?25le[?25h",,terminal_output +3281,4305725,"TERMINAL",0,0,"[?25lu[?25h",,terminal_output +3282,4305872,"TERMINAL",0,0,"[?25le[?25h",,terminal_output +3283,4305988,"TERMINAL",0,0,"\r\n[?2004l\r[?1049h(B[?7hEvery 1.0s: squeue --mehkn0402.localdomain: Mon Jun 30 15:35:50 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3306283 dev_accel interact tum_cte0 R23:23\t 2 hkn[0402-0403]",,terminal_output +3284,4307016,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn0402:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0402 jafar]$ ",,terminal_output +3285,4307214,"TERMINAL",0,0,"[?25li[?25h",,terminal_output +3286,4307583,"TERMINAL",0,0,"[?25ld[?25h",,terminal_output +3287,4307649,"TERMINAL",0,0,"[?25ll[?25h",,terminal_output +3288,4307777,"TERMINAL",0,0,"[?25le[?25h",,terminal_output +3289,4307854,"TERMINAL",0,0,"\r\n[?2004l\rPartition dev_cpuonly : 11 nodes idle\r\nPartition cpuonly : 51 nodes idle\r\nPartition dev_accelerated : 0 nodes idle\r\nPartition accelerated : 0 nodes idle\r\nPartition dev_accelerated-h100 : 1 nodes idle\r\nPartition accelerated-h100 : 0 nodes idle\r\nPartition large : 8 nodes idle\r\n]0;tum_cte0515@hkn0402:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0402 jafar]$ ",,terminal_output +3290,4310143,"TERMINAL",0,0,"[?25li[?25h",,terminal_output +3291,4310376,"TERMINAL",0,0,"[?25ld[?25h",,terminal_output +3292,4310491,"TERMINAL",0,0,"[?25ll[?25h",,terminal_output +3293,4310838,"TERMINAL",0,0,"[?25li[?25h",,terminal_output +3294,4310916,"TERMINAL",0,0,"[?25ln[?25h",,terminal_output +3295,4311024,"TERMINAL",0,0,"[?25lg[?25h",,terminal_output +3296,4311241,"TERMINAL",0,0,"\r\n[?2004l\r[?1049h(B[?7hEvery 1.0s: sinfo_t_idlehkn0402.localdomain: Mon Jun 30 15:35:55 2025Partition dev_cpuonly: 10 nodes idle\rPartition cpuonly: 51 nodes idle\rPartition dev_accelerated:\t 0 nodes idle\rPartition accelerated:\t 0 nodes idle\rPartition dev_accelerated-h100 :\t 1 nodes idle\rPartition accelerated-h100:\t 0 nodes idle\rPartition large:\t 8 nodes idle",,terminal_output +3297,4312289,"TERMINAL",0,0,"6\t ",,terminal_output +3298,4313251,"TERMINAL",0,0,"7\t ",,terminal_output +3299,4314273,"TERMINAL",0,0,"8\t ",,terminal_output +3300,4314525,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn0402:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0402 jafar]$ ",,terminal_output +3301,4320305,"TERMINAL",0,0,"[?2004l\r\r\nexit\r\n",,terminal_output +3302,4320359,"TERMINAL",0,0,"salloc: Relinquishing job allocation 3306283\r\n",,terminal_output +3303,4320379,"TERMINAL",0,0,"salloc: Job allocation 3306283 has been revoked.\r\n]0;tum_cte0515@hkn1991:~/Projects/jafar]633;D;0",,terminal_output +3304,6055348,"TERMINAL",0,0,"idle",,terminal_command +3305,6055384,"TERMINAL",0,0,"]633;E;2025-06-30 16:04:59 idle;79ec3af7-6a10-4dac-bb07-e3b50f56ded4]633;CPartition dev_cpuonly : 11 nodes idle\r\nPartition cpuonly : 10 nodes idle\r\nPartition dev_accelerated : 3 nodes idle\r\nPartition accelerated : 3 nodes idle\r\nPartition dev_accelerated-h100 : 1 nodes idle\r\nPartition accelerated-h100 : 2 nodes idle\r\nPartition large : 8 nodes idle\r\n]0;tum_cte0515@hkn1991:~/Projects/jafar]633;D;0",,terminal_output +3306,6057312,"TERMINAL",0,0,"idle",,terminal_command +3307,6057345,"TERMINAL",0,0,"]633;E;2025-06-30 16:05:01 idle;79ec3af7-6a10-4dac-bb07-e3b50f56ded4]633;CPartition dev_cpuonly : 11 nodes idle\r\nPartition cpuonly : 10 nodes idle\r\nPartition dev_accelerated : 3 nodes idle\r\nPartition accelerated : 3 nodes idle\r\nPartition dev_accelerated-h100 : 1 nodes idle\r\nPartition accelerated-h100 : 2 nodes idle\r\nPartition large : 8 nodes idle\r\n]0;tum_cte0515@hkn1991:~/Projects/jafar]633;D;0",,terminal_output +3308,6058774,"TERMINAL",0,0,"idle",,terminal_command +3309,6058815,"TERMINAL",0,0,"]633;E;2025-06-30 16:05:03 idle;79ec3af7-6a10-4dac-bb07-e3b50f56ded4]633;CPartition dev_cpuonly : 11 nodes idle\r\nPartition cpuonly : 10 nodes idle\r\nPartition dev_accelerated : 3 nodes idle\r\nPartition accelerated : 3 nodes idle\r\nPartition dev_accelerated-h100 : 1 nodes idle\r\nPartition accelerated-h100 : 2 nodes idle\r\nPartition large : 8 nodes idle\r\n]0;tum_cte0515@hkn1991:~/Projects/jafar]633;D;0",,terminal_output +3310,6060895,"TERMINAL",0,0,"idling",,terminal_command +3311,6060937,"TERMINAL",0,0,"]633;E;2025-06-30 16:05:05 idling;79ec3af7-6a10-4dac-bb07-e3b50f56ded4]633;Cbash: idling: command not found...\r\n",,terminal_output +3312,6061067,"TERMINAL",0,0,"]0;tum_cte0515@hkn1991:~/Projects/jafar]633;D;127",,terminal_output +3313,6064138,"TERMINAL",0,0,"bash",,terminal_focus +3314,6064143,"utils/dataloader.py",0,0,"",python,tab +3315,6069677,"TERMINAL",0,0,"",,terminal_focus +3316,6073447,"TERMINAL",0,0,"idling",,terminal_command +3317,6073529,"TERMINAL",0,0,"]633;E;2025-06-30 16:05:17 idling;1b1b54ff-80e1-48fc-8101-86ff30a7ce8c]633;C[?1049h(B[?7hEvery 1.0s: sinfo_t_idlehkn1991.localdomain: Mon Jun 30 16:05:17 2025Partition dev_cpuonly: 11 nodes idle\rPartition cpuonly: 18 nodes idle\rPartition dev_accelerated:\t 3 nodes idle\rPartition accelerated:\t 3 nodes idle\rPartition dev_accelerated-h100 :\t 1 nodes idle\rPartition accelerated-h100:\t 2 nodes idle\rPartition large:\t 8 nodes idle",,terminal_output +3318,6074561,"TERMINAL",0,0,"8\t ",,terminal_output +3319,6075592,"TERMINAL",0,0,"9\t ",,terminal_output +3320,6075609,"TERMINAL",0,0,"bash",,terminal_focus +3321,6076637,"TERMINAL",0,0,"21\t ",,terminal_output +3322,6077678,"TERMINAL",0,0,"2\t ",,terminal_output +3323,6078717,"TERMINAL",0,0,"3\t ",,terminal_output +3324,6079754,"TERMINAL",0,0,"4\t ",,terminal_output +3325,6080794,"TERMINAL",0,0,"5\t ",,terminal_output +3326,6081827,"TERMINAL",0,0,"6\t ",,terminal_output +3327,6082899,"TERMINAL",0,0,"7\t ",,terminal_output +3328,6083973,"TERMINAL",0,0,"8\t ",,terminal_output +3329,6084965,"TERMINAL",0,0,"9\t ",,terminal_output +3330,6085991,"TERMINAL",0,0,"30\t ",,terminal_output +3331,6087035,"TERMINAL",0,0,"1\t ",,terminal_output +3332,6088064,"TERMINAL",0,0,"2\t ",,terminal_output +3333,6089105,"TERMINAL",0,0,"3\t ",,terminal_output +3334,6090150,"TERMINAL",0,0,"4\t ",,terminal_output +3335,6091197,"TERMINAL",0,0,"5\t ",,terminal_output +3336,6092221,"TERMINAL",0,0,"6\t ",,terminal_output +3337,6093263,"TERMINAL",0,0,"7\t ",,terminal_output +3338,6094298,"TERMINAL",0,0,"8\t ",,terminal_output +3339,6095338,"TERMINAL",0,0,"9\t ",,terminal_output +3340,6096371,"TERMINAL",0,0,"40\t ",,terminal_output +3341,6097405,"TERMINAL",0,0,"1\t ",,terminal_output +3342,6098457,"TERMINAL",0,0,"2\t ",,terminal_output +3343,6099513,"TERMINAL",0,0,"3\t ",,terminal_output +3344,6100544,"TERMINAL",0,0,"4\t ",,terminal_output +3345,6101608,"TERMINAL",0,0,"5\t ",,terminal_output +3346,6102634,"TERMINAL",0,0,"6\t ",,terminal_output +3347,6103645,"TERMINAL",0,0,"8\t ",,terminal_output +3348,6104705,"TERMINAL",0,0,"9\t ",,terminal_output +3349,6105733,"TERMINAL",0,0,"50\t ",,terminal_output +3350,6106080,"slurm/jobs/mihir/horeka/batchsize_scaling/adjusted_lr/train_tokenizer_1_nodes.sbatch",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=4\n#SBATCH --time=05:00:00\n#SBATCH --partition=accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:4\n#SBATCH --output=logs/logs_training/%x_%j.log\n#SBATCH --error=logs/logs_training/%x_%j.log\n#SBATCH --job-name=train_tokenizer_batch_size_scaling_1_node\n#SBATCH --mem=100G\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\ntf_records_dir=$ws_dir/knoms_tfrecords_500_shards\nws_dir='/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/'\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=$ws_dir/checkpoints/$job_name_$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\nenv | grep SLURM\n\nsrun python train_tokenizer.py \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=48 \\n --min_lr=3e-4 \\n --max_lr=3e-4 \\n --log_image_interval=250 \\n --log \\n --name=tokenizer-batch-size-scaling-1-node-$slurm_job_id \\n --tags tokenizer batch-size-scaling 1-node time-step \\n --entity instant-uv \\n --project jafar \\n --data_dir $tf_records_dir\n",shellscript,tab +3351,6106772,"TERMINAL",0,0,"1\t ",,terminal_output +3352,6107822,"TERMINAL",0,0,"2\t ",,terminal_output +3353,6108857,"TERMINAL",0,0,"3\t ",,terminal_output +3354,6109904,"TERMINAL",0,0,"4\t ",,terminal_output +3355,6110940,"TERMINAL",0,0,"5\t ",,terminal_output +3356,6111980,"TERMINAL",0,0,"6\t ",,terminal_output +3357,6113047,"TERMINAL",0,0,"7\t ",,terminal_output +3358,6114069,"TERMINAL",0,0,"8\t ",,terminal_output +3359,6115135,"TERMINAL",0,0,"9\t ",,terminal_output +3360,6116154,"TERMINAL",0,0,"6:00\t ",,terminal_output +3361,6117182,"TERMINAL",0,0,"1\t ",,terminal_output +3362,6118233,"TERMINAL",0,0,"2\t ",,terminal_output +3363,6119257,"TERMINAL",0,0,"3\t ",,terminal_output +3364,6120302,"TERMINAL",0,0,"4\t ",,terminal_output +3365,6121321,"slurm/jobs/mihir/horeka/batchsize_scaling/adjusted_lr/train_tokenizer_2_nodes.sbatch",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=2\n#SBATCH --ntasks-per-node=4\n#SBATCH --time=05:00:00\n#SBATCH --partition=accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:4\n#SBATCH --output=logs/logs_training/%x_%j.log\n#SBATCH --error=logs/logs_training/%x_%j.log\n#SBATCH --job-name=train_tokenizer_batch_size_scaling_2_node\n#SBATCH --mem=100G\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\ntf_records_dir=$ws_dir/knoms_tfrecords_500_shards\nws_dir='/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/'\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=$ws_dir/checkpoints/$job_name_$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\nenv | grep SLURM\n\nsrun python train_tokenizer.py \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=96 \\n --min_lr=4.24e-4 \\n --max_lr=4.24e-4 \\n --log_image_interval=250 \\n --log \\n --name=tokenizer-batch-size-scaling-2-node-$slurm_job_id \\n --tags tokenizer batch-size-scaling 2-node time-step \\n --entity instant-uv \\n --project jafar \\n --data_dir $tf_records_dir\n",shellscript,tab +3366,6121348,"TERMINAL",0,0,"5\t ",,terminal_output +3367,6122382,"TERMINAL",0,0,"6\t ",,terminal_output +3368,6122528,"slurm/jobs/mihir/horeka/batchsize_scaling/adjusted_lr/train_tokenizer_8_nodes.sbatch",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=8\n#SBATCH --ntasks-per-node=4\n#SBATCH --time=05:00:00\n#SBATCH --partition=accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:4\n#SBATCH --output=logs/logs_training/%x_%j.log\n#SBATCH --error=logs/logs_training/%x_%j.log\n#SBATCH --job-name=train_tokenizer_batch_size_scaling_8_node\n#SBATCH --mem=100G\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\ntf_records_dir=$ws_dir/knoms_tfrecords_500_shards\nws_dir='/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/'\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=$ws_dir/checkpoints/$job_name_$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\nenv | grep SLURM\n\nsrun python train_tokenizer.py \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=384 \\n --min_lr=8.49e-4 \\n --max_lr=8.49e-4 \\n --log_image_interval=250 \\n --log \\n --name=tokenizer-batch-size-scaling-8-node-$slurm_job_id \\n --tags tokenizer batch-size-scaling 8-node time-step \\n --entity instant-uv \\n --project jafar \\n --data_dir $tf_records_dir\n",shellscript,tab +3369,6123422,"TERMINAL",0,0,"7\t ",,terminal_output +3370,6124069,"slurm/jobs/mihir/horeka/batchsize_scaling/adjusted_lr/train_tokenizer_32_nodes.sbatch",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=32\n#SBATCH --ntasks-per-node=4\n#SBATCH --time=05:00:00\n#SBATCH --partition=accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:4\n#SBATCH --output=logs/logs_training/%x_%j.log\n#SBATCH --error=logs/logs_training/%x_%j.log\n#SBATCH --job-name=train_tokenizer_batch_size_scaling_32_node\n#SBATCH --mem=400G\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\ntf_records_dir=$ws_dir/knoms_tfrecords_500_shards\nws_dir='/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/'\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=$ws_dir/checkpoints/$job_name_$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\nenv | grep SLURM\n\nsrun python train_tokenizer.py \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=1536 \\n --min_lr=3e-4 \\n --max_lr=3e-4 \\n --log_image_interval=250 \\n --log \\n --name=tokenizer-batch-size-scaling-32-node-$slurm_job_id \\n --tags tokenizer batch-size-scaling 32-node \\n --entity instant-uv \\n --project jafar \\n --data_dir $tf_records_dir\n",shellscript,tab +3371,6124468,"TERMINAL",0,0,"8\t ",,terminal_output +3372,6124945,"slurm/jobs/mihir/horeka/batchsize_scaling/adjusted_lr/train_tokenizer_32_nodes.sbatch",285,0,"",shellscript,selection_mouse +3373,6125397,"slurm/jobs/mihir/horeka/batchsize_scaling/adjusted_lr/train_tokenizer_32_nodes.sbatch",342,0,"",shellscript,selection_mouse +3374,6125400,"slurm/jobs/mihir/horeka/batchsize_scaling/adjusted_lr/train_tokenizer_32_nodes.sbatch",341,0,"",shellscript,selection_command +3375,6125509,"TERMINAL",0,0,"9\t ",,terminal_output +3376,6126541,"TERMINAL",0,0,"10\t ",,terminal_output +3377,6127595,"TERMINAL",0,0,"1\t ",,terminal_output +3378,6128673,"TERMINAL",0,0,"3\t ",,terminal_output +3379,6129681,"TERMINAL",0,0,"42",,terminal_output +3380,6130715,"TERMINAL",0,0,"5\t ",,terminal_output +3381,6131756,"TERMINAL",0,0,"6\t ",,terminal_output +3382,6132810,"TERMINAL",0,0,"7\t ",,terminal_output +3383,6133847,"TERMINAL",0,0,"8\t ",,terminal_output +3384,6134902,"TERMINAL",0,0,"9\t ",,terminal_output +3385,6135942,"TERMINAL",0,0,"20\t ",,terminal_output +3386,6136968,"TERMINAL",0,0,"1\t ",,terminal_output +3387,6138028,"TERMINAL",0,0,"2\t ",,terminal_output +3388,6139049,"TERMINAL",0,0,"3\t ",,terminal_output +3389,6140090,"TERMINAL",0,0,"4\t ",,terminal_output +3390,6140130,"slurm/jobs/mihir/horeka/batchsize_scaling/adjusted_lr/train_tokenizer_32_nodes.sbatch",893,0,"",shellscript,selection_mouse +3391,6141052,"slurm/jobs/mihir/horeka/batchsize_scaling/adjusted_lr/train_tokenizer_32_nodes.sbatch",894,0,"",shellscript,selection_command +3392,6141132,"TERMINAL",0,0,"5\t ",,terminal_output +3393,6141460,"slurm/jobs/mihir/horeka/batchsize_scaling/adjusted_lr/train_tokenizer_32_nodes.sbatch",893,1,"",shellscript,content +3394,6141562,"slurm/jobs/mihir/horeka/batchsize_scaling/adjusted_lr/train_tokenizer_32_nodes.sbatch",892,1,"",shellscript,content +3395,6141687,"slurm/jobs/mihir/horeka/batchsize_scaling/adjusted_lr/train_tokenizer_32_nodes.sbatch",891,1,"",shellscript,content +3396,6141847,"slurm/jobs/mihir/horeka/batchsize_scaling/adjusted_lr/train_tokenizer_32_nodes.sbatch",891,0,"1",shellscript,content +3397,6141848,"slurm/jobs/mihir/horeka/batchsize_scaling/adjusted_lr/train_tokenizer_32_nodes.sbatch",892,0,"",shellscript,selection_keyboard +3398,6141908,"slurm/jobs/mihir/horeka/batchsize_scaling/adjusted_lr/train_tokenizer_32_nodes.sbatch",892,0,"0",shellscript,content +3399,6141909,"slurm/jobs/mihir/horeka/batchsize_scaling/adjusted_lr/train_tokenizer_32_nodes.sbatch",893,0,"",shellscript,selection_keyboard +3400,6142060,"slurm/jobs/mihir/horeka/batchsize_scaling/adjusted_lr/train_tokenizer_32_nodes.sbatch",893,0,"0",shellscript,content +3401,6142061,"slurm/jobs/mihir/horeka/batchsize_scaling/adjusted_lr/train_tokenizer_32_nodes.sbatch",894,0,"",shellscript,selection_keyboard +3402,6142180,"slurm/jobs/mihir/horeka/batchsize_scaling/adjusted_lr/train_tokenizer_32_nodes.sbatch",894,0,"0",shellscript,content +3403,6142181,"slurm/jobs/mihir/horeka/batchsize_scaling/adjusted_lr/train_tokenizer_32_nodes.sbatch",895,0,"",shellscript,selection_keyboard +3404,6142195,"TERMINAL",0,0,"6\t ",,terminal_output +3405,6143228,"TERMINAL",0,0,"7\t ",,terminal_output +3406,6143339,"slurm/jobs/mihir/horeka/batchsize_scaling/adjusted_lr/train_tokenizer_16_nodes.sbatch",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=16\n#SBATCH --ntasks-per-node=4\n#SBATCH --time=05:00:00\n#SBATCH --partition=accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:4\n#SBATCH --output=logs/logs_training/%x_%j.log\n#SBATCH --error=logs/logs_training/%x_%j.log\n#SBATCH --job-name=train_tokenizer_batch_size_scaling_16_node\n#SBATCH --mem=200G\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\ntf_records_dir=$ws_dir/knoms_tfrecords_500_shards\nws_dir='/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/'\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=$ws_dir/checkpoints/$job_name_$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\nenv | grep SLURM\n\nsrun python train_tokenizer.py \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=768 \\n --min_lr=1.20e-3 \\n --max_lr=1.20e-3 \\n --log_image_interval=250 \\n --log \\n --name=tokenizer-batch-size-scaling-16-node-$slurm_job_id \\n --tags tokenizer batch-size-scaling 16-node \\n --entity instant-uv \\n --project jafar \\n --data_dir $tf_records_dir\n",shellscript,tab +3407,6144258,"TERMINAL",0,0,"8\t ",,terminal_output +3408,6144904,"slurm/jobs/mihir/horeka/batchsize_scaling/adjusted_lr/train_tokenizer_16_nodes.sbatch",899,0,"",shellscript,selection_mouse +3409,6145305,"TERMINAL",0,0,"9\t ",,terminal_output +3410,6145828,"slurm/jobs/mihir/horeka/batchsize_scaling/adjusted_lr/train_tokenizer_16_nodes.sbatch",898,1,"",shellscript,content +3411,6145964,"slurm/jobs/mihir/horeka/batchsize_scaling/adjusted_lr/train_tokenizer_16_nodes.sbatch",897,1,"",shellscript,content +3412,6146091,"slurm/jobs/mihir/horeka/batchsize_scaling/adjusted_lr/train_tokenizer_16_nodes.sbatch",896,1,"",shellscript,content +3413,6146230,"slurm/jobs/mihir/horeka/batchsize_scaling/adjusted_lr/train_tokenizer_16_nodes.sbatch",896,0,"1",shellscript,content +3414,6146231,"slurm/jobs/mihir/horeka/batchsize_scaling/adjusted_lr/train_tokenizer_16_nodes.sbatch",897,0,"",shellscript,selection_keyboard +3415,6146293,"slurm/jobs/mihir/horeka/batchsize_scaling/adjusted_lr/train_tokenizer_16_nodes.sbatch",897,0,"0",shellscript,content +3416,6146294,"slurm/jobs/mihir/horeka/batchsize_scaling/adjusted_lr/train_tokenizer_16_nodes.sbatch",898,0,"",shellscript,selection_keyboard +3417,6146364,"TERMINAL",0,0,"30\t ",,terminal_output +3418,6146452,"slurm/jobs/mihir/horeka/batchsize_scaling/adjusted_lr/train_tokenizer_16_nodes.sbatch",898,0,"0",shellscript,content +3419,6146452,"slurm/jobs/mihir/horeka/batchsize_scaling/adjusted_lr/train_tokenizer_16_nodes.sbatch",899,0,"",shellscript,selection_keyboard +3420,6146990,"slurm/jobs/mihir/horeka/batchsize_scaling/adjusted_lr/train_tokenizer_16_nodes.sbatch",899,0,"0",shellscript,content +3421,6146990,"slurm/jobs/mihir/horeka/batchsize_scaling/adjusted_lr/train_tokenizer_16_nodes.sbatch",900,0,"",shellscript,selection_keyboard +3422,6147386,"TERMINAL",0,0,"1\t ",,terminal_output +3423,6148134,"slurm/jobs/mihir/horeka/batchsize_scaling/adjusted_lr/train_tokenizer_8_nodes.sbatch",0,0,"",shellscript,tab +3424,6148425,"TERMINAL",0,0,"2\t ",,terminal_output +3425,6149109,"slurm/jobs/mihir/horeka/batchsize_scaling/adjusted_lr/train_tokenizer_8_nodes.sbatch",897,0,"",shellscript,selection_mouse +3426,6149469,"TERMINAL",0,0,"3\t ",,terminal_output +3427,6149925,"slurm/jobs/mihir/horeka/batchsize_scaling/adjusted_lr/train_tokenizer_8_nodes.sbatch",896,1,"",shellscript,content +3428,6150059,"slurm/jobs/mihir/horeka/batchsize_scaling/adjusted_lr/train_tokenizer_8_nodes.sbatch",895,1,"",shellscript,content +3429,6150183,"slurm/jobs/mihir/horeka/batchsize_scaling/adjusted_lr/train_tokenizer_8_nodes.sbatch",894,1,"",shellscript,content +3430,6150330,"slurm/jobs/mihir/horeka/batchsize_scaling/adjusted_lr/train_tokenizer_8_nodes.sbatch",894,0,"1",shellscript,content +3431,6150330,"slurm/jobs/mihir/horeka/batchsize_scaling/adjusted_lr/train_tokenizer_8_nodes.sbatch",895,0,"",shellscript,selection_keyboard +3432,6150408,"slurm/jobs/mihir/horeka/batchsize_scaling/adjusted_lr/train_tokenizer_8_nodes.sbatch",895,0,"0",shellscript,content +3433,6150408,"slurm/jobs/mihir/horeka/batchsize_scaling/adjusted_lr/train_tokenizer_8_nodes.sbatch",896,0,"",shellscript,selection_keyboard +3434,6150509,"TERMINAL",0,0,"4\t ",,terminal_output +3435,6150559,"slurm/jobs/mihir/horeka/batchsize_scaling/adjusted_lr/train_tokenizer_8_nodes.sbatch",896,0,"0",shellscript,content +3436,6150559,"slurm/jobs/mihir/horeka/batchsize_scaling/adjusted_lr/train_tokenizer_8_nodes.sbatch",897,0,"",shellscript,selection_keyboard +3437,6150675,"slurm/jobs/mihir/horeka/batchsize_scaling/adjusted_lr/train_tokenizer_8_nodes.sbatch",897,0,"0",shellscript,content +3438,6150675,"slurm/jobs/mihir/horeka/batchsize_scaling/adjusted_lr/train_tokenizer_8_nodes.sbatch",898,0,"",shellscript,selection_keyboard +3439,6151616,"TERMINAL",0,0,"5\t ",,terminal_output +3440,6152026,"slurm/jobs/mihir/horeka/batchsize_scaling/adjusted_lr/train_tokenizer_4_nodes.sbatch",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=4\n#SBATCH --ntasks-per-node=4\n#SBATCH --time=05:00:00\n#SBATCH --partition=accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:4\n#SBATCH --output=logs/logs_training/%x_%j.log\n#SBATCH --error=logs/logs_training/%x_%j.log\n#SBATCH --job-name=train_tokenizer_batch_size_scaling_4_node\n#SBATCH --mem=100G\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\ntf_records_dir=$ws_dir/knoms_tfrecords_500_shards\nws_dir='/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/'\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=$ws_dir/checkpoints/$job_name_$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\nenv | grep SLURM\n\nsrun python train_tokenizer.py \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=192 \\n --min_lr=6.00e-4 \\n --max_lr=6.00e-4 \\n --log_image_interval=250 \\n --log \\n --name=tokenizer-batch-size-scaling-4-node-$slurm_job_id \\n --tags tokenizer batch-size-scaling 4-node time-step \\n --entity instant-uv \\n --project jafar \\n --data_dir $tf_records_dir\n",shellscript,tab +3441,6152589,"TERMINAL",0,0,"6\t ",,terminal_output +3442,6152980,"slurm/jobs/mihir/horeka/batchsize_scaling/adjusted_lr/train_tokenizer_4_nodes.sbatch",898,0,"",shellscript,selection_mouse +3443,6153639,"TERMINAL",0,0,"8\t ",,terminal_output +3444,6153799,"slurm/jobs/mihir/horeka/batchsize_scaling/adjusted_lr/train_tokenizer_4_nodes.sbatch",897,0,"",shellscript,selection_command +3445,6154198,"slurm/jobs/mihir/horeka/batchsize_scaling/adjusted_lr/train_tokenizer_4_nodes.sbatch",896,1,"",shellscript,content +3446,6154314,"slurm/jobs/mihir/horeka/batchsize_scaling/adjusted_lr/train_tokenizer_4_nodes.sbatch",895,1,"",shellscript,content +3447,6154444,"slurm/jobs/mihir/horeka/batchsize_scaling/adjusted_lr/train_tokenizer_4_nodes.sbatch",894,1,"",shellscript,content +3448,6154563,"slurm/jobs/mihir/horeka/batchsize_scaling/adjusted_lr/train_tokenizer_4_nodes.sbatch",894,0,"1",shellscript,content +3449,6154563,"slurm/jobs/mihir/horeka/batchsize_scaling/adjusted_lr/train_tokenizer_4_nodes.sbatch",895,0,"",shellscript,selection_keyboard +3450,6154638,"slurm/jobs/mihir/horeka/batchsize_scaling/adjusted_lr/train_tokenizer_4_nodes.sbatch",895,0,"0",shellscript,content +3451,6154639,"slurm/jobs/mihir/horeka/batchsize_scaling/adjusted_lr/train_tokenizer_4_nodes.sbatch",896,0,"",shellscript,selection_keyboard +3452,6154712,"TERMINAL",0,0,"9\t ",,terminal_output +3453,6154811,"slurm/jobs/mihir/horeka/batchsize_scaling/adjusted_lr/train_tokenizer_4_nodes.sbatch",896,0,"0",shellscript,content +3454,6154811,"slurm/jobs/mihir/horeka/batchsize_scaling/adjusted_lr/train_tokenizer_4_nodes.sbatch",897,0,"",shellscript,selection_keyboard +3455,6154987,"slurm/jobs/mihir/horeka/batchsize_scaling/adjusted_lr/train_tokenizer_4_nodes.sbatch",897,0,"0",shellscript,content +3456,6154988,"slurm/jobs/mihir/horeka/batchsize_scaling/adjusted_lr/train_tokenizer_4_nodes.sbatch",898,0,"",shellscript,selection_keyboard +3457,6155728,"TERMINAL",0,0,"40\t ",,terminal_output +3458,6156345,"slurm/jobs/mihir/horeka/batchsize_scaling/adjusted_lr/train_tokenizer_2_nodes.sbatch",0,0,"",shellscript,tab +3459,6156756,"TERMINAL",0,0,"1\t ",,terminal_output +3460,6157311,"slurm/jobs/mihir/horeka/batchsize_scaling/adjusted_lr/train_tokenizer_2_nodes.sbatch",896,0,"",shellscript,selection_mouse +3461,6157796,"TERMINAL",0,0,"2\t ",,terminal_output +3462,6158121,"slurm/jobs/mihir/horeka/batchsize_scaling/adjusted_lr/train_tokenizer_2_nodes.sbatch",895,1,"",shellscript,content +3463,6158238,"slurm/jobs/mihir/horeka/batchsize_scaling/adjusted_lr/train_tokenizer_2_nodes.sbatch",894,1,"",shellscript,content +3464,6158380,"slurm/jobs/mihir/horeka/batchsize_scaling/adjusted_lr/train_tokenizer_2_nodes.sbatch",893,1,"",shellscript,content +3465,6158839,"slurm/jobs/mihir/horeka/batchsize_scaling/adjusted_lr/train_tokenizer_2_nodes.sbatch",893,0,"1",shellscript,content +3466,6158840,"slurm/jobs/mihir/horeka/batchsize_scaling/adjusted_lr/train_tokenizer_2_nodes.sbatch",894,0,"",shellscript,selection_keyboard +3467,6158855,"TERMINAL",0,0,"3\t ",,terminal_output +3468,6158931,"slurm/jobs/mihir/horeka/batchsize_scaling/adjusted_lr/train_tokenizer_2_nodes.sbatch",894,0,"0",shellscript,content +3469,6158932,"slurm/jobs/mihir/horeka/batchsize_scaling/adjusted_lr/train_tokenizer_2_nodes.sbatch",895,0,"",shellscript,selection_keyboard +3470,6159083,"slurm/jobs/mihir/horeka/batchsize_scaling/adjusted_lr/train_tokenizer_2_nodes.sbatch",895,0,"0",shellscript,content +3471,6159084,"slurm/jobs/mihir/horeka/batchsize_scaling/adjusted_lr/train_tokenizer_2_nodes.sbatch",896,0,"",shellscript,selection_keyboard +3472,6159233,"slurm/jobs/mihir/horeka/batchsize_scaling/adjusted_lr/train_tokenizer_2_nodes.sbatch",896,0,"0",shellscript,content +3473,6159234,"slurm/jobs/mihir/horeka/batchsize_scaling/adjusted_lr/train_tokenizer_2_nodes.sbatch",897,0,"",shellscript,selection_keyboard +3474,6159901,"TERMINAL",0,0,"4\t ",,terminal_output +3475,6160713,"slurm/jobs/mihir/horeka/batchsize_scaling/adjusted_lr/train_tokenizer_1_nodes.sbatch",0,0,"",shellscript,tab +3476,6160929,"TERMINAL",0,0,"5\t ",,terminal_output +3477,6161411,"slurm/jobs/mihir/horeka/batchsize_scaling/adjusted_lr/train_tokenizer_1_nodes.sbatch",890,0,"",shellscript,selection_mouse +3478,6161956,"TERMINAL",0,0,"6\t ",,terminal_output +3479,6162227,"slurm/jobs/mihir/horeka/batchsize_scaling/adjusted_lr/train_tokenizer_1_nodes.sbatch",889,1,"",shellscript,content +3480,6162355,"slurm/jobs/mihir/horeka/batchsize_scaling/adjusted_lr/train_tokenizer_1_nodes.sbatch",888,1,"",shellscript,content +3481,6162489,"slurm/jobs/mihir/horeka/batchsize_scaling/adjusted_lr/train_tokenizer_1_nodes.sbatch",887,1,"",shellscript,content +3482,6162614,"slurm/jobs/mihir/horeka/batchsize_scaling/adjusted_lr/train_tokenizer_1_nodes.sbatch",887,0,"1",shellscript,content +3483,6162615,"slurm/jobs/mihir/horeka/batchsize_scaling/adjusted_lr/train_tokenizer_1_nodes.sbatch",888,0,"",shellscript,selection_keyboard +3484,6162673,"slurm/jobs/mihir/horeka/batchsize_scaling/adjusted_lr/train_tokenizer_1_nodes.sbatch",888,0,"0",shellscript,content +3485,6162674,"slurm/jobs/mihir/horeka/batchsize_scaling/adjusted_lr/train_tokenizer_1_nodes.sbatch",889,0,"",shellscript,selection_keyboard +3486,6162843,"slurm/jobs/mihir/horeka/batchsize_scaling/adjusted_lr/train_tokenizer_1_nodes.sbatch",889,0,"0",shellscript,content +3487,6162844,"slurm/jobs/mihir/horeka/batchsize_scaling/adjusted_lr/train_tokenizer_1_nodes.sbatch",890,0,"",shellscript,selection_keyboard +3488,6162981,"slurm/jobs/mihir/horeka/batchsize_scaling/adjusted_lr/train_tokenizer_1_nodes.sbatch",890,0,"0",shellscript,content +3489,6162982,"slurm/jobs/mihir/horeka/batchsize_scaling/adjusted_lr/train_tokenizer_1_nodes.sbatch",891,0,"",shellscript,selection_keyboard +3490,6163037,"TERMINAL",0,0,"7\t ",,terminal_output +3491,6164037,"TERMINAL",0,0,"8\t ",,terminal_output +3492,6164508,"slurm/jobs/mihir/horeka/batchsize_scaling/adjusted_lr/train_tokenizer_1_nodes.sbatch",905,0,"",shellscript,selection_mouse +3493,6165042,"slurm/jobs/mihir/horeka/batchsize_scaling/adjusted_lr/train_tokenizer_1_nodes.sbatch",841,0,"",shellscript,selection_mouse +3494,6165100,"TERMINAL",0,0,"9\t ",,terminal_output +3495,6166119,"TERMINAL",0,0,"50\t ",,terminal_output +3496,6167183,"TERMINAL",0,0,"1\t ",,terminal_output +3497,6168268,"TERMINAL",0,0,"2\t ",,terminal_output +3498,6169252,"TERMINAL",0,0,"3\t ",,terminal_output +3499,6170326,"TERMINAL",0,0,"4\t ",,terminal_output +3500,6171387,"TERMINAL",0,0,"5\t ",,terminal_output +3501,6172508,"TERMINAL",0,0,"6\t ",,terminal_output +3502,6173412,"TERMINAL",0,0,"7\t ",,terminal_output +3503,6174557,"TERMINAL",0,0,"8\t ",,terminal_output +3504,6175579,"TERMINAL",0,0,"9\t ",,terminal_output +3505,6176603,"TERMINAL",0,0,"7:00\t ",,terminal_output +3506,6177628,"TERMINAL",0,0,"1\t ",,terminal_output +3507,6178628,"TERMINAL",0,0,"3\t ",,terminal_output +3508,6179670,"TERMINAL",0,0,"4\t ",,terminal_output +3509,6180708,"TERMINAL",0,0,"5\t ",,terminal_output +3510,6181757,"TERMINAL",0,0,"6\t ",,terminal_output +3511,6182787,"TERMINAL",0,0,"7\t ",,terminal_output +3512,6183830,"TERMINAL",0,0,"8\t ",,terminal_output +3513,6184870,"TERMINAL",0,0,"9\t ",,terminal_output +3514,6185908,"TERMINAL",0,0,"10\t ",,terminal_output +3515,6186951,"TERMINAL",0,0,"1\t ",,terminal_output +3516,6187984,"TERMINAL",0,0,"2\t ",,terminal_output +3517,6189099,"TERMINAL",0,0,"3\t ",,terminal_output +3518,6190121,"TERMINAL",0,0,"4\t ",,terminal_output +3519,6191145,"TERMINAL",0,0,"5\t ",,terminal_output +3520,6192135,"TERMINAL",0,0,"6\t ",,terminal_output +3521,6193181,"TERMINAL",0,0,"7\t ",,terminal_output +3522,6194211,"TERMINAL",0,0,"8\t ",,terminal_output +3523,6195250,"TERMINAL",0,0,"9\t ",,terminal_output +3524,6196324,"TERMINAL",0,0,"20\t ",,terminal_output +3525,6197392,"TERMINAL",0,0,"1\t ",,terminal_output +3526,6198367,"TERMINAL",0,0,"2\t ",,terminal_output +3527,6199442,"TERMINAL",0,0,"3\t ",,terminal_output +3528,6200454,"TERMINAL",0,0,"4\t ",,terminal_output +3529,6201600,"TERMINAL",0,0,"5\t ",,terminal_output +3530,6202532,"TERMINAL",0,0,"6\t ",,terminal_output +3531,6203624,"TERMINAL",0,0,"7\t ",,terminal_output +3532,6204662,"TERMINAL",0,0,"9\t ",,terminal_output +3533,6205686,"TERMINAL",0,0,"30\t ",,terminal_output +3534,6206813,"TERMINAL",0,0,"1\t ",,terminal_output +3535,6207750,"TERMINAL",0,0,"2\t ",,terminal_output +3536,6208792,"TERMINAL",0,0,"3\t ",,terminal_output +3537,6209835,"TERMINAL",0,0,"4\t ",,terminal_output +3538,6210878,"TERMINAL",0,0,"5\t ",,terminal_output +3539,6211912,"TERMINAL",0,0,"6\t ",,terminal_output +3540,6212957,"TERMINAL",0,0,"7\t ",,terminal_output +3541,6214009,"TERMINAL",0,0,"8\t ",,terminal_output +3542,6215109,"TERMINAL",0,0,"9\t ",,terminal_output +3543,6216131,"TERMINAL",0,0,"40\t ",,terminal_output +3544,6216567,"TERMINAL",0,0,"watch",,terminal_focus +3545,6217120,"TERMINAL",0,0,"1\t ",,terminal_output +3546,6218179,"TERMINAL",0,0,"2\t ",,terminal_output +3547,6219459,"TERMINAL",0,0,"3\t Every 1.0s: sinfo_t_idlehkn1991.localdomain: Mon Jun 30 16:07:43 2025Partition dev_cpuonly: 11 nodes idle\rPartition cpuonly: 18 nodes idle\rPartition dev_accelerated:\t 2 nodes idle\rPartition accelerated:\t 3 nodes idle\rPartition dev_accelerated-h100 :\t 1 nodes idle\rPartition accelerated-h100:\t 2 nodes idle\rPartition large:\t 8 nodes idle",,terminal_output +3548,6220535,"TERMINAL",0,0,"4",,terminal_output +3549,6221562,"TERMINAL",0,0,"5",,terminal_output +3550,6222593,"TERMINAL",0,0,"6",,terminal_output +3551,6223657,"TERMINAL",0,0,"7",,terminal_output +3552,6224619,"TERMINAL",0,0,"9",,terminal_output +3553,6225755,"TERMINAL",0,0,"50",,terminal_output +3554,6226782,"TERMINAL",0,0,"19",,terminal_output +3555,6227861,"TERMINAL",0,0,"2",,terminal_output +3556,6228811,"TERMINAL",0,0,"3",,terminal_output +3557,6229827,"TERMINAL",0,0,"4",,terminal_output +3558,6230906,"TERMINAL",0,0,"5",,terminal_output +3559,6231967,"TERMINAL",0,0,"6",,terminal_output +3560,6232971,"TERMINAL",0,0,"7",,terminal_output +3561,6234054,"TERMINAL",0,0,"8",,terminal_output +3562,6235042,"TERMINAL",0,0,"9",,terminal_output +3563,6236226,"TERMINAL",0,0,"8:00",,terminal_output +3564,6237114,"TERMINAL",0,0,"18",,terminal_output +3565,6238207,"TERMINAL",0,0,"2",,terminal_output +3566,6239297,"TERMINAL",0,0,"3",,terminal_output +3567,6240252,"TERMINAL",0,0,"4",,terminal_output +3568,6241316,"TERMINAL",0,0,"5",,terminal_output +3569,6242338,"TERMINAL",0,0,"6",,terminal_output +3570,6243373,"TERMINAL",0,0,"7",,terminal_output +3571,6244489,"TERMINAL",0,0,"8",,terminal_output +3572,6245466,"TERMINAL",0,0,"9",,terminal_output +3573,6246537,"TERMINAL",0,0,"10",,terminal_output +3574,6247570,"TERMINAL",0,0,"1",,terminal_output +3575,6248623,"TERMINAL",0,0,"2",,terminal_output +3576,6249660,"TERMINAL",0,0,"4",,terminal_output +3577,6250698,"TERMINAL",0,0,"5",,terminal_output +3578,6251742,"TERMINAL",0,0,"6",,terminal_output +3579,6252762,"TERMINAL",0,0,"7",,terminal_output +3580,6253794,"TERMINAL",0,0,"8",,terminal_output +3581,6254837,"TERMINAL",0,0,"9",,terminal_output +3582,6255887,"TERMINAL",0,0,"20",,terminal_output +3583,6256993,"TERMINAL",0,0,"1",,terminal_output +3584,6257959,"TERMINAL",0,0,"2",,terminal_output +3585,6258997,"TERMINAL",0,0,"3",,terminal_output +3586,6260046,"TERMINAL",0,0,"4",,terminal_output +3587,6261111,"TERMINAL",0,0,"5",,terminal_output +3588,6262115,"TERMINAL",0,0,"6",,terminal_output +3589,6263152,"TERMINAL",0,0,"7",,terminal_output +3590,6264203,"TERMINAL",0,0,"8",,terminal_output +3591,6265237,"TERMINAL",0,0,"9",,terminal_output +3592,6266275,"TERMINAL",0,0,"30",,terminal_output +3593,6267334,"TERMINAL",0,0,"1",,terminal_output +3594,6268370,"TERMINAL",0,0,"2",,terminal_output +3595,6269492,"TERMINAL",0,0,"3",,terminal_output +3596,6270471,"TERMINAL",0,0,"4",,terminal_output +3597,6271478,"TERMINAL",0,0,"5",,terminal_output +3598,6272543,"TERMINAL",0,0,"6",,terminal_output +3599,6273625,"TERMINAL",0,0,"7",,terminal_output +3600,6274594,"TERMINAL",0,0,"8",,terminal_output +3601,6275730,"TERMINAL",0,0,"40",,terminal_output +3602,6276676,"TERMINAL",0,0,"1",,terminal_output +3603,6277783,"TERMINAL",0,0,"2",,terminal_output +3604,6278751,"TERMINAL",0,0,"3",,terminal_output +3605,6279867,"TERMINAL",0,0,"4",,terminal_output +3606,6280849,"TERMINAL",0,0,"5",,terminal_output +3607,6281878,"TERMINAL",0,0,"6",,terminal_output +3608,6282960,"TERMINAL",0,0,"7",,terminal_output +3609,6283941,"TERMINAL",0,0,"8",,terminal_output +3610,6284981,"TERMINAL",0,0,"9",,terminal_output +3611,6286074,"TERMINAL",0,0,"50",,terminal_output +3612,6287057,"TERMINAL",0,0,"1",,terminal_output +3613,6288124,"TERMINAL",0,0,"2",,terminal_output +3614,6289143,"TERMINAL",0,0,"3",,terminal_output +3615,6290182,"TERMINAL",0,0,"4",,terminal_output +3616,6291220,"TERMINAL",0,0,"5",,terminal_output +3617,6292257,"TERMINAL",0,0,"6",,terminal_output +3618,6293299,"TERMINAL",0,0,"7",,terminal_output +3619,6294340,"TERMINAL",0,0,"8",,terminal_output +3620,6295389,"TERMINAL",0,0,"9",,terminal_output +3621,6296517,"TERMINAL",0,0,"9:00",,terminal_output +3622,6297540,"TERMINAL",0,0,"1",,terminal_output +3623,6298570,"TERMINAL",0,0,"2",,terminal_output +3624,6299594,"TERMINAL",0,0,"3",,terminal_output +3625,6300621,"TERMINAL",0,0,"4",,terminal_output +3626,6301636,"TERMINAL",0,0,"6",,terminal_output +3627,6302672,"TERMINAL",0,0,"7",,terminal_output +3628,6303708,"TERMINAL",0,0,"8",,terminal_output +3629,6304744,"TERMINAL",0,0,"9",,terminal_output +3630,6305835,"TERMINAL",0,0,"10",,terminal_output +3631,6306860,"TERMINAL",0,0,"1",,terminal_output +3632,6307855,"TERMINAL",0,0,"29",,terminal_output +3633,6308908,"TERMINAL",0,0,"3",,terminal_output +3634,6309934,"TERMINAL",0,0,"481",,terminal_output +3635,6310970,"TERMINAL",0,0,"5",,terminal_output +3636,6312087,"TERMINAL",0,0,"6",,terminal_output +3637,6313121,"TERMINAL",0,0,"7",,terminal_output +3638,6314074,"TERMINAL",0,0,"8",,terminal_output +3639,6315155,"TERMINAL",0,0,"9",,terminal_output +3640,6316145,"TERMINAL",0,0,"20",,terminal_output +3641,6317181,"TERMINAL",0,0,"1",,terminal_output +3642,6318221,"TERMINAL",0,0,"2",,terminal_output +3643,6319269,"TERMINAL",0,0,"3",,terminal_output +3644,6320385,"TERMINAL",0,0,"4",,terminal_output +3645,6321330,"TERMINAL",0,0,"5",,terminal_output +3646,6322424,"TERMINAL",0,0,"6",,terminal_output +3647,6323448,"TERMINAL",0,0,"7",,terminal_output +3648,6324474,"TERMINAL",0,0,"8",,terminal_output +3649,6325478,"TERMINAL",0,0,"9",,terminal_output +3650,6326624,"TERMINAL",0,0,"30",,terminal_output +3651,6327551,"TERMINAL",0,0,"19",,terminal_output +3652,6328623,"TERMINAL",0,0,"2",,terminal_output +3653,6329630,"TERMINAL",0,0,"4",,terminal_output +3654,6330670,"TERMINAL",0,0,"5",,terminal_output +3655,6331742,"TERMINAL",0,0,"6",,terminal_output +3656,6332767,"TERMINAL",0,0,"7",,terminal_output +3657,6333791,"TERMINAL",0,0,"8",,terminal_output +3658,6334821,"TERMINAL",0,0,"9",,terminal_output +3659,6335858,"TERMINAL",0,0,"40",,terminal_output +3660,6336891,"TERMINAL",0,0,"1",,terminal_output +3661,6337934,"TERMINAL",0,0,"2",,terminal_output +3662,6338972,"TERMINAL",0,0,"38",,terminal_output +3663,6340037,"TERMINAL",0,0,"4",,terminal_output +3664,6341080,"TERMINAL",0,0,"59",,terminal_output +3665,6342092,"TERMINAL",0,0,"6",,terminal_output +3666,6343126,"TERMINAL",0,0,"7",,terminal_output +3667,6344164,"TERMINAL",0,0,"8",,terminal_output +3668,6345197,"TERMINAL",0,0,"9",,terminal_output +3669,6346243,"TERMINAL",0,0,"508",,terminal_output +3670,6347307,"TERMINAL",0,0,"1",,terminal_output +3671,6348316,"TERMINAL",0,0,"2",,terminal_output +3672,6349358,"TERMINAL",0,0,"3",,terminal_output +3673,6350391,"TERMINAL",0,0,"4",,terminal_output +3674,6351507,"TERMINAL",0,0,"5",,terminal_output +3675,6352529,"TERMINAL",0,0,"6",,terminal_output +3676,6353735,"TERMINAL",0,0,"7",,terminal_output +3677,6354596,"TERMINAL",0,0,"8",,terminal_output +3678,6355710,"TERMINAL",0,0,"9",,terminal_output +3679,6356726,"TERMINAL",0,0,"10:01",,terminal_output +3680,6357689,"TERMINAL",0,0,"2",,terminal_output +3681,6358717,"TERMINAL",0,0,"3",,terminal_output +3682,6359870,"TERMINAL",0,0,"4",,terminal_output +3683,6360795,"TERMINAL",0,0,"5",,terminal_output +3684,6361882,"TERMINAL",0,0,"6",,terminal_output +3685,6362922,"TERMINAL",0,0,"7",,terminal_output +3686,6363922,"TERMINAL",0,0,"8",,terminal_output +3687,6365005,"TERMINAL",0,0,"9",,terminal_output +3688,6365997,"TERMINAL",0,0,"10",,terminal_output +3689,6367043,"TERMINAL",0,0,"1",,terminal_output +3690,6368097,"TERMINAL",0,0,"2",,terminal_output +3691,6369233,"TERMINAL",0,0,"39",,terminal_output +3692,6370170,"TERMINAL",0,0,"48",,terminal_output +3693,6371271,"TERMINAL",0,0,"5",,terminal_output +3694,6372229,"TERMINAL",0,0,"6",,terminal_output +3695,6373272,"TERMINAL",0,0,"7",,terminal_output +3696,6374331,"TERMINAL",0,0,"8",,terminal_output +3697,6375359,"TERMINAL",0,0,"9",,terminal_output +3698,6376403,"TERMINAL",0,0,"20",,terminal_output +3699,6377515,"TERMINAL",0,0,"1",,terminal_output +3700,6378539,"TERMINAL",0,0,"2",,terminal_output +3701,6379521,"TERMINAL",0,0,"3",,terminal_output +3702,6380563,"TERMINAL",0,0,"4",,terminal_output +3703,6381612,"TERMINAL",0,0,"5",,terminal_output +3704,6382743,"TERMINAL",0,0,"7",,terminal_output +3705,6383688,"TERMINAL",0,0,"8",,terminal_output +3706,6384736,"TERMINAL",0,0,"9",,terminal_output +3707,6385811,"TERMINAL",0,0,"30",,terminal_output +3708,6386835,"TERMINAL",0,0,"19",,terminal_output +3709,6387852,"TERMINAL",0,0,"2",,terminal_output +3710,6388916,"TERMINAL",0,0,"3",,terminal_output +3711,6389941,"TERMINAL",0,0,"4",,terminal_output +3712,6391033,"TERMINAL",0,0,"5",,terminal_output +3713,6392058,"TERMINAL",0,0,"6",,terminal_output +3714,6393066,"TERMINAL",0,0,"7",,terminal_output +3715,6394208,"TERMINAL",0,0,"8",,terminal_output +3716,6395240,"TERMINAL",0,0,"9",,terminal_output +3717,6396195,"TERMINAL",0,0,"40",,terminal_output +3718,6397238,"TERMINAL",0,0,"18",,terminal_output +3719,6398292,"TERMINAL",0,0,"2",,terminal_output +3720,6399329,"TERMINAL",0,0,"3",,terminal_output +3721,6400366,"TERMINAL",0,0,"4",,terminal_output +3722,6401408,"TERMINAL",0,0,"5",,terminal_output +3723,6402450,"TERMINAL",0,0,"6",,terminal_output +3724,6403504,"TERMINAL",0,0,"7",,terminal_output +3725,6404532,"TERMINAL",0,0,"8",,terminal_output +3726,6405575,"TERMINAL",0,0,"9",,terminal_output +3727,6406612,"TERMINAL",0,0,"50",,terminal_output +3728,6407656,"TERMINAL",0,0,"2",,terminal_output +3729,6408693,"TERMINAL",0,0,"3",,terminal_output +3730,6410102,"TERMINAL",0,0,"4",,terminal_output +3731,6410800,"TERMINAL",0,0,"5",,terminal_output +3732,6411820,"TERMINAL",0,0,"6",,terminal_output +3733,6412859,"TERMINAL",0,0,"7",,terminal_output +3734,6413899,"TERMINAL",0,0,"8",,terminal_output +3735,6414945,"TERMINAL",0,0,"9",,terminal_output +3736,6415977,"TERMINAL",0,0,"1:00",,terminal_output +3737,6417022,"TERMINAL",0,0,"1",,terminal_output +3738,6418066,"TERMINAL",0,0,"2",,terminal_output +3739,6419099,"TERMINAL",0,0,"3",,terminal_output +3740,6420138,"TERMINAL",0,0,"4",,terminal_output +3741,6421197,"TERMINAL",0,0,"5",,terminal_output +3742,6422229,"TERMINAL",0,0,"6",,terminal_output +3743,6423256,"TERMINAL",0,0,"7",,terminal_output +3744,6424299,"TERMINAL",0,0,"8",,terminal_output +3745,6425340,"TERMINAL",0,0,"9",,terminal_output +3746,6426383,"TERMINAL",0,0,"10",,terminal_output +3747,6427427,"TERMINAL",0,0,"1",,terminal_output +3748,6428476,"TERMINAL",0,0,"2",,terminal_output +3749,6429508,"TERMINAL",0,0,"3",,terminal_output +3750,6430543,"TERMINAL",0,0,"4",,terminal_output +3751,6431590,"TERMINAL",0,0,"5",,terminal_output +3752,6432628,"TERMINAL",0,0,"7",,terminal_output +3753,6433658,"TERMINAL",0,0,"8",,terminal_output +3754,6434699,"TERMINAL",0,0,"9",,terminal_output +3755,6435738,"TERMINAL",0,0,"20",,terminal_output +3756,6436781,"TERMINAL",0,0,"1",,terminal_output +3757,6437825,"TERMINAL",0,0,"2",,terminal_output +3758,6438862,"TERMINAL",0,0,"3",,terminal_output +3759,6439897,"TERMINAL",0,0,"4",,terminal_output +3760,6440943,"TERMINAL",0,0,"5",,terminal_output +3761,6442029,"TERMINAL",0,0,"6",,terminal_output +3762,6443017,"TERMINAL",0,0,"7",,terminal_output +3763,6444059,"TERMINAL",0,0,"8",,terminal_output +3764,6445203,"TERMINAL",0,0,"99",,terminal_output +3765,6446129,"TERMINAL",0,0,"30",,terminal_output +3766,6447170,"TERMINAL",0,0,"1",,terminal_output +3767,6447971,"TERMINAL",0,0,"Every 1.0s: sinfo_t_idlehkn1991.localdomain: Mon Jun 30 16:11:32 2025Partition dev_cpuonly: 11 nodes idle\rPartition cpuonly: 19 nodes idle\rPartition dev_accelerated:\t 1 nodes idle\rPartition accelerated:\t 3 nodes idle\rPartition dev_accelerated-h100 :\t 1 nodes idle\rPartition accelerated-h100:\t 2 nodes idle\rPartition large:\t 8 nodes idle",,terminal_output +3768,6448406,"TERMINAL",0,0,"Every 1.0s: sinfo_t_idlehkn1991.localdomain: Mon Jun 30 16:11:32 2025Partition dev_cpuonly: 11 nodes idle\rPartition cpuonly: 19 nodes idle\rPartition dev_accelerated:\t 1 nodes idle\rPartition accelerated:\t 3 nodes idle\rPartition dev_accelerated-h100 :\t 1 nodes idle\rPartition accelerated-h100:\t 2 nodes idle\rPartition large:\t 8 nodes idleEvery 1.0s: sinfo_t_idlehkn1991.localdomain: Mon Jun 30 16:11:32 2025Partition dev_cpuonly: 11 nodes idle\rPartition cpuonly: 19 nodes idle\rPartition dev_accelerated:\t 1 nodes idle\rPartition accelerated:\t 3 nodes idle\rPartition dev_accelerated-h100 :\t 1 nodes idle\rPartition accelerated-h100:\t 2 nodes idle\rPartition large:\t 8 nodes idleEvery 1.0s: sinfo_t_idlehkn1991.localdomain: Mon Jun 30 16:11:32 2025Partition dev_cpuonly: 11 nodes idle\rPartition cpuonly: 19 nodes idle\rPartition dev_accelerated:\t 1 nodes idle\rPartition accelerated:\t 3 nodes idle\rPartition dev_accelerated-h100 :\t 1 nodes idle\rPartition accelerated-h100:\t 2 nodes idle\rPartition large:\t 8 nodes idleEvery 1.0s: sinfo_t_idlehkn1991.localdomain: Mon Jun 30 16:11:32 2025Partition dev_cpuonly: 11 nodes idle\rPartition cpuonly: 19 nodes idle\rPartition dev_accelerated:\t 1 nodes idle\rPartition accelerated:\t 3 nodes idle\rPartition dev_accelerated-h100 :\t 1 nodes idle\rPartition accelerated-h100:\t 2 nodes idle\rPartition large:\t 8 nodes idleEvery 1.0s: sinfo_t_idlehkn1991.localdomain: Mon Jun 30 16:11:32 2025Partition dev_cpuonly: 11 nodes idle\rPartition cpuonly: 19 nodes idle\rPartition dev_accelerated:\t 1 nodes idle\rPartition accelerated:\t 3 nodes idle\rPartition dev_accelerated-h100 :\t 1 nodes idle\rPartition accelerated-h100:\t 2 nodes idle\rPartition large:\t 8 nodes idleEvery 1.0s: sinfo_t_idlehkn1991.localdomain: Mon Jun 30 16:11:32 2025Partition dev_cpuonly: 11 nodes idle\rPartition cpuonly: 19 nodes idle\rPartition dev_accelerated:\t 1 nodes idle\rPartition accelerated:\t 3 nodes idle\rPartition dev_accelerated-h100 :\t 1 nodes idle\rPartition accelerated-h100:\t 2 nodes idle\rPartition large:\t 8 nodes idleEvery 1.0s: sinfo_t_idlehkn1991.localdomain: Mon Jun 30 16:11:32 2025Partition dev_cpuonly: 11 nodes idle\rPartition cpuonly: 19 nodes idle\rPartition dev_accelerated:\t 1 nodes idle\rPartition accelerated:\t 3 nodes idle\rPartition dev_accelerated-h100 :\t 1 nodes idle\rPartition accelerated-h100:\t 2 nodes idle\rPartition large:\t 8 nodes idle",,terminal_output +3769,6448539,"TERMINAL",0,0,"Every 1.0s: sinfo_t_idlehkn1991.localdomain: Mon Jun 30 16:11:32 2025Partition dev_cpuonly: 11 nodes idle\rPartition cpuonly: 19 nodes idle\rPartition dev_accelerated:\t 1 nodes idle\rPartition accelerated:\t 3 nodes idle\rPartition dev_accelerated-h100 :\t 1 nodes idle\rPartition accelerated-h100:\t 2 nodes idle\rPartition large:\t 8 nodes idleEvery 1.0s: sinfo_t_idlehkn1991.localdomain: Mon Jun 30 16:11:32 2025Partition dev_cpuonly: 11 nodes idle\rPartition cpuonly: 19 nodes idle\rPartition dev_accelerated:\t 1 nodes idle\rPartition accelerated:\t 3 nodes idle\rPartition dev_accelerated-h100 :\t 1 nodes idle\rPartition accelerated-h100:\t 2 nodes idle\rPartition large:\t 8 nodes idleEvery 1.0s: sinfo_t_idlehkn1991.localdomain: Mon Jun 30 16:11:32 2025Partition dev_cpuonly: 11 nodes idle\rPartition cpuonly: 19 nodes idle\rPartition dev_accelerated:\t 1 nodes idle\rPartition accelerated:\t 3 nodes idle\rPartition dev_accelerated-h100 :\t 1 nodes idle\rPartition accelerated-h100:\t 2 nodes idle\rPartition large:\t 8 nodes idle",,terminal_output +3770,6449607,"TERMINAL",0,0,"3",,terminal_output +3771,6450632,"TERMINAL",0,0,"5",,terminal_output +3772,6451761,"TERMINAL",0,0,"6",,terminal_output +3773,6452712,"TERMINAL",0,0,"7",,terminal_output +3774,6453759,"TERMINAL",0,0,"8",,terminal_output +3775,6454819,"TERMINAL",0,0,"9",,terminal_output +3776,6455833,"TERMINAL",0,0,"408",,terminal_output +3777,6456886,"TERMINAL",0,0,"1",,terminal_output +3778,6457944,"TERMINAL",0,0,"2",,terminal_output +3779,6459026,"TERMINAL",0,0,"3",,terminal_output +3780,6460052,"TERMINAL",0,0,"4",,terminal_output +3781,6461032,"TERMINAL",0,0,"5",,terminal_output +3782,6462069,"TERMINAL",0,0,"6",,terminal_output +3783,6463126,"TERMINAL",0,0,"7",,terminal_output +3784,6464257,"TERMINAL",0,0,"8",,terminal_output +3785,6465210,"TERMINAL",0,0,"9",,terminal_output +3786,6466306,"TERMINAL",0,0,"50",,terminal_output +3787,6467267,"TERMINAL",0,0,"1",,terminal_output +3788,6468328,"TERMINAL",0,0,"2",,terminal_output +3789,6469357,"TERMINAL",0,0,"3",,terminal_output +3790,6470399,"TERMINAL",0,0,"4",,terminal_output +3791,6471427,"TERMINAL",0,0,"5",,terminal_output +3792,6472468,"TERMINAL",0,0,"6",,terminal_output +3793,6473569,"TERMINAL",0,0,"72",,terminal_output +3794,6474595,"TERMINAL",0,0,"8",,terminal_output +3795,6475615,"TERMINAL",0,0,"9",,terminal_output +3796,6476639,"TERMINAL",0,0,"2:01",,terminal_output +3797,6477668,"TERMINAL",0,0,"2",,terminal_output +3798,6478703,"TERMINAL",0,0,"3",,terminal_output +3799,6479815,"TERMINAL",0,0,"40",,terminal_output +3800,6480780,"TERMINAL",0,0,"5",,terminal_output +3801,6481820,"TERMINAL",0,0,"6",,terminal_output +3802,6482885,"TERMINAL",0,0,"7",,terminal_output +3803,6483912,"TERMINAL",0,0,"8",,terminal_output +3804,6484935,"TERMINAL",0,0,"9",,terminal_output +3805,6486062,"TERMINAL",0,0,"10",,terminal_output +3806,6487016,"TERMINAL",0,0,"1",,terminal_output +3807,6488055,"TERMINAL",0,0,"2",,terminal_output +3808,6489140,"TERMINAL",0,0,"3",,terminal_output +3809,6490158,"TERMINAL",0,0,"4",,terminal_output +3810,6491171,"TERMINAL",0,0,"5",,terminal_output +3811,6492308,"TERMINAL",0,0,"6",,terminal_output +3812,6493251,"TERMINAL",0,0,"7",,terminal_output +3813,6494296,"TERMINAL",0,0,"8",,terminal_output +3814,6495343,"TERMINAL",0,0,"9",,terminal_output +3815,6496372,"TERMINAL",0,0,"20",,terminal_output +3816,6497428,"TERMINAL",0,0,"1",,terminal_output +3817,6498556,"TERMINAL",0,0,"2",,terminal_output +3818,6499582,"TERMINAL",0,0,"3",,terminal_output +3819,6500529,"TERMINAL",0,0,"4",,terminal_output +3820,6501575,"TERMINAL",0,0,"5",,terminal_output +3821,6502609,"TERMINAL",0,0,"6",,terminal_output +3822,6503646,"TERMINAL",0,0,"8",,terminal_output +3823,6504685,"TERMINAL",0,0,"9",,terminal_output +3824,6505721,"TERMINAL",0,0,"30",,terminal_output +3825,6506755,"TERMINAL",0,0,"1",,terminal_output +3826,6507874,"TERMINAL",0,0,"2",,terminal_output +3827,6508896,"TERMINAL",0,0,"3",,terminal_output +3828,6509871,"TERMINAL",0,0,"4",,terminal_output +3829,6510904,"TERMINAL",0,0,"5",,terminal_output +3830,6511945,"TERMINAL",0,0,"6",,terminal_output +3831,6512993,"TERMINAL",0,0,"7",,terminal_output +3832,6514056,"TERMINAL",0,0,"8",,terminal_output +3833,6515160,"TERMINAL",0,0,"9",,terminal_output +3834,6516167,"TERMINAL",0,0,"40",,terminal_output +3835,6517190,"TERMINAL",0,0,"1",,terminal_output +3836,6518232,"TERMINAL",0,0,"2",,terminal_output +3837,6519248,"TERMINAL",0,0,"3",,terminal_output +3838,6520259,"TERMINAL",0,0,"4",,terminal_output +3839,6521299,"TERMINAL",0,0,"59",,terminal_output +3840,6522344,"TERMINAL",0,0,"6",,terminal_output +3841,6523382,"TERMINAL",0,0,"78",,terminal_output +3842,6524462,"TERMINAL",0,0,"8",,terminal_output +3843,6525485,"TERMINAL",0,0,"9",,terminal_output +3844,6526611,"TERMINAL",0,0,"50",,terminal_output +3845,6527639,"TERMINAL",0,0,"1",,terminal_output +3846,6528639,"TERMINAL",0,0,"2",,terminal_output +3847,6529624,"TERMINAL",0,0,"4",,terminal_output +3848,6530665,"TERMINAL",0,0,"5",,terminal_output +3849,6531701,"TERMINAL",0,0,"6",,terminal_output +3850,6532580,"TERMINAL",0,0,"bash",,terminal_focus +3851,6532751,"TERMINAL",0,0,"7",,terminal_output +3852,6533787,"TERMINAL",0,0,"8",,terminal_output +3853,6534811,"TERMINAL",0,0,"9",,terminal_output +3854,6535858,"TERMINAL",0,0,"3:00",,terminal_output +3855,6536893,"TERMINAL",0,0,"1",,terminal_output +3856,6537940,"TERMINAL",0,0,"2",,terminal_output +3857,6538976,"TERMINAL",0,0,"3",,terminal_output +3858,6540023,"TERMINAL",0,0,"4",,terminal_output +3859,6541060,"TERMINAL",0,0,"5",,terminal_output +3860,6542099,"TERMINAL",0,0,"6",,terminal_output +3861,6543201,"TERMINAL",0,0,"7",,terminal_output +3862,6544184,"TERMINAL",0,0,"8",,terminal_output +3863,6545252,"TERMINAL",0,0,"9",,terminal_output +3864,6546032,"TERMINAL",0,0,"watch",,terminal_focus +3865,6546264,"TERMINAL",0,0,"10",,terminal_output +3866,6547310,"TERMINAL",0,0,"1",,terminal_output +3867,6548344,"TERMINAL",0,0,"2",,terminal_output +3868,6549550,"TERMINAL",0,0,"3",,terminal_output +3869,6550429,"TERMINAL",0,0,"4",,terminal_output +3870,6551600,"TERMINAL",0,0,"5",,terminal_output +3871,6552493,"TERMINAL",0,0,"6",,terminal_output +3872,6553541,"TERMINAL",0,0,"7",,terminal_output +3873,6554572,"TERMINAL",0,0,"8",,terminal_output +3874,6555691,"TERMINAL",0,0,"20",,terminal_output +3875,6556661,"TERMINAL",0,0,"1",,terminal_output +3876,6557751,"TERMINAL",0,0,"2",,terminal_output +3877,6558737,"TERMINAL",0,0,"3",,terminal_output +3878,6559808,"TERMINAL",0,0,"4",,terminal_output +3879,6560817,"TERMINAL",0,0,"55",,terminal_output +3880,6561940,"TERMINAL",0,0,"6",,terminal_output +3881,6562964,"TERMINAL",0,0,"7",,terminal_output +3882,6563988,"TERMINAL",0,0,"8",,terminal_output +3883,6565014,"TERMINAL",0,0,"9",,terminal_output +3884,6566035,"TERMINAL",0,0,"30",,terminal_output +3885,6567063,"TERMINAL",0,0,"1",,terminal_output +3886,6568075,"TERMINAL",0,0,"2",,terminal_output +3887,6569112,"TERMINAL",0,0,"3",,terminal_output +3888,6570257,"TERMINAL",0,0,"496",,terminal_output +3889,6571186,"TERMINAL",0,0,"58",,terminal_output +3890,6572229,"TERMINAL",0,0,"6",,terminal_output +3891,6573308,"TERMINAL",0,0,"7",,terminal_output +3892,6574302,"TERMINAL",0,0,"8",,terminal_output +3893,6575339,"TERMINAL",0,0,"9",,terminal_output +3894,6576375,"TERMINAL",0,0,"40",,terminal_output +3895,6577417,"TERMINAL",0,0,"1",,terminal_output +3896,6578446,"TERMINAL",0,0,"2",,terminal_output +3897,6579485,"TERMINAL",0,0,"3",,terminal_output +3898,6580525,"TERMINAL",0,0,"4",,terminal_output +3899,6581562,"TERMINAL",0,0,"5",,terminal_output +3900,6582626,"TERMINAL",0,0,"6",,terminal_output +3901,6583641,"TERMINAL",0,0,"8",,terminal_output +3902,6584680,"TERMINAL",0,0,"99",,terminal_output +3903,6585801,"TERMINAL",0,0,"50",,terminal_output +3904,6586823,"TERMINAL",0,0,"1",,terminal_output +3905,6587855,"TERMINAL",0,0,"2",,terminal_output +3906,6588873,"TERMINAL",0,0,"3",,terminal_output +3907,6589875,"TERMINAL",0,0,"4",,terminal_output +3908,6590922,"TERMINAL",0,0,"5",,terminal_output +3909,6592046,"TERMINAL",0,0,"6",,terminal_output +3910,6593011,"TERMINAL",0,0,"7",,terminal_output +3911,6594027,"TERMINAL",0,0,"8",,terminal_output +3912,6595072,"TERMINAL",0,0,"9",,terminal_output +3913,6596140,"TERMINAL",0,0,"4:008",,terminal_output +3914,6597156,"TERMINAL",0,0,"1",,terminal_output +3915,6598203,"TERMINAL",0,0,"2",,terminal_output +3916,6599300,"TERMINAL",0,0,"30",,terminal_output +3917,6600299,"TERMINAL",0,0,"4",,terminal_output +3918,6601323,"TERMINAL",0,0,"5",,terminal_output +3919,6602360,"TERMINAL",0,0,"6",,terminal_output +3920,6603405,"TERMINAL",0,0,"7",,terminal_output +3921,6604449,"TERMINAL",0,0,"8",,terminal_output +3922,6605564,"TERMINAL",0,0,"9",,terminal_output +3923,6606520,"TERMINAL",0,0,"10",,terminal_output +3924,6607555,"TERMINAL",0,0,"1",,terminal_output +3925,6608626,"TERMINAL",0,0,"2",,terminal_output +3926,6609640,"TERMINAL",0,0,"4",,terminal_output +3927,6610683,"TERMINAL",0,0,"5",,terminal_output +3928,6611723,"TERMINAL",0,0,"6",,terminal_output +3929,6612766,"TERMINAL",0,0,"7",,terminal_output +3930,6613804,"TERMINAL",0,0,"8",,terminal_output +3931,6614876,"TERMINAL",0,0,"9",,terminal_output +3932,6615880,"TERMINAL",0,0,"20",,terminal_output +3933,6616918,"TERMINAL",0,0,"1",,terminal_output +3934,6617994,"TERMINAL",0,0,"2",,terminal_output +3935,6619089,"TERMINAL",0,0,"3",,terminal_output +3936,6620031,"TERMINAL",0,0,"4",,terminal_output +3937,6621067,"TERMINAL",0,0,"5",,terminal_output +3938,6622104,"TERMINAL",0,0,"6",,terminal_output +3939,6623137,"TERMINAL",0,0,"7",,terminal_output +3940,6624184,"TERMINAL",0,0,"8",,terminal_output +3941,6625225,"TERMINAL",0,0,"9",,terminal_output +3942,6626283,"TERMINAL",0,0,"30",,terminal_output +3943,6627315,"TERMINAL",0,0,"1",,terminal_output +3944,6628356,"TERMINAL",0,0,"2",,terminal_output +3945,6629398,"TERMINAL",0,0,"3",,terminal_output +3946,6630441,"TERMINAL",0,0,"4",,terminal_output +3947,6631481,"TERMINAL",0,0,"5",,terminal_output +3948,6632528,"TERMINAL",0,0,"6",,terminal_output +3949,6633569,"TERMINAL",0,0,"7",,terminal_output +3950,6634646,"TERMINAL",0,0,"8",,terminal_output +3951,6635668,"TERMINAL",0,0,"40",,terminal_output +3952,6636695,"TERMINAL",0,0,"1",,terminal_output +3953,6637718,"TERMINAL",0,0,"2",,terminal_output +3954,6638844,"TERMINAL",0,0,"3",,terminal_output +3955,6639834,"TERMINAL",0,0,"4",,terminal_output +3956,6640892,"TERMINAL",0,0,"5",,terminal_output +3957,6641917,"TERMINAL",0,0,"6",,terminal_output +3958,6642937,"TERMINAL",0,0,"7",,terminal_output +3959,6644068,"TERMINAL",0,0,"8",,terminal_output +3960,6645088,"TERMINAL",0,0,"9",,terminal_output +3961,6646059,"TERMINAL",0,0,"50",,terminal_output +3962,6647098,"TERMINAL",0,0,"1",,terminal_output +3963,6648163,"TERMINAL",0,0,"2",,terminal_output +3964,6649188,"TERMINAL",0,0,"3",,terminal_output +3965,6650315,"TERMINAL",0,0,"4",,terminal_output +3966,6651280,"TERMINAL",0,0,"5",,terminal_output +3967,6652367,"TERMINAL",0,0,"6",,terminal_output +3968,6653352,"TERMINAL",0,0,"71",,terminal_output +3969,6654392,"TERMINAL",0,0,"8",,terminal_output +3970,6655441,"TERMINAL",0,0,"9",,terminal_output +3971,6656693,"TERMINAL",0,0,"5:00",,terminal_output +3972,6657718,"TERMINAL",0,0,"1",,terminal_output +3973,6658540,"TERMINAL",0,0,"2",,terminal_output +3974,6659580,"TERMINAL",0,0,"3",,terminal_output +3975,6660618,"TERMINAL",0,0,"5",,terminal_output +3976,6661661,"TERMINAL",0,0,"6",,terminal_output +3977,6662698,"TERMINAL",0,0,"7",,terminal_output +3978,6663741,"TERMINAL",0,0,"8",,terminal_output +3979,6664855,"TERMINAL",0,0,"90",,terminal_output +3980,6665876,"TERMINAL",0,0,"10",,terminal_output +3981,6666906,"TERMINAL",0,0,"1",,terminal_output +3982,6667892,"TERMINAL",0,0,"2",,terminal_output +3983,6668920,"TERMINAL",0,0,"3",,terminal_output +3984,6669985,"TERMINAL",0,0,"4",,terminal_output +3985,6670994,"TERMINAL",0,0,"5",,terminal_output +3986,6672121,"TERMINAL",0,0,"6",,terminal_output +3987,6673064,"TERMINAL",0,0,"7",,terminal_output +3988,6674169,"TERMINAL",0,0,"8",,terminal_output +3989,6675202,"TERMINAL",0,0,"9",,terminal_output +3990,6676194,"TERMINAL",0,0,"20",,terminal_output +3991,6677230,"TERMINAL",0,0,"1",,terminal_output +3992,6678268,"TERMINAL",0,0,"2",,terminal_output +3993,6679308,"TERMINAL",0,0,"3",,terminal_output +3994,6680331,"TERMINAL",0,0,"4",,terminal_output +3995,6681368,"TERMINAL",0,0,"5",,terminal_output +3996,6682414,"TERMINAL",0,0,"6",,terminal_output +3997,6683454,"TERMINAL",0,0,"7",,terminal_output +3998,6684515,"TERMINAL",0,0,"8",,terminal_output +3999,6685545,"TERMINAL",0,0,"9",,terminal_output +4000,6686665,"TERMINAL",0,0,"30",,terminal_output +4001,6687609,"TERMINAL",0,0,"1",,terminal_output +4002,6688648,"TERMINAL",0,0,"3",,terminal_output +4003,6689689,"TERMINAL",0,0,"4",,terminal_output +4004,6690760,"TERMINAL",0,0,"5",,terminal_output +4005,6691783,"TERMINAL",0,0,"6",,terminal_output +4006,6692911,"TERMINAL",0,0,"7",,terminal_output +4007,6693854,"TERMINAL",0,0,"8",,terminal_output +4008,6694884,"TERMINAL",0,0,"9",,terminal_output +4009,6695927,"TERMINAL",0,0,"40",,terminal_output +4010,6697007,"TERMINAL",0,0,"1",,terminal_output +4011,6698008,"TERMINAL",0,0,"2",,terminal_output +4012,6699058,"TERMINAL",0,0,"3",,terminal_output +4013,6700089,"TERMINAL",0,0,"4",,terminal_output +4014,6701207,"TERMINAL",0,0,"5",,terminal_output +4015,6702172,"TERMINAL",0,0,"6",,terminal_output +4016,6703255,"TERMINAL",0,0,"7",,terminal_output +4017,6704276,"TERMINAL",0,0,"8",,terminal_output +4018,6705303,"TERMINAL",0,0,"9",,terminal_output +4019,6706336,"TERMINAL",0,0,"50",,terminal_output +4020,6707351,"TERMINAL",0,0,"1",,terminal_output +4021,6708387,"TERMINAL",0,0,"2",,terminal_output +4022,6709500,"TERMINAL",0,0,"3",,terminal_output +4023,6710533,"TERMINAL",0,0,"4",,terminal_output +4024,6711509,"TERMINAL",0,0,"5",,terminal_output +4025,6712544,"TERMINAL",0,0,"6",,terminal_output +4026,6713623,"TERMINAL",0,0,"7",,terminal_output +4027,6714724,"TERMINAL",0,0,"9",,terminal_output +4028,6715736,"TERMINAL",0,0,"6:00",,terminal_output +4029,6716770,"TERMINAL",0,0,"1",,terminal_output +4030,6717767,"TERMINAL",0,0,"2",,terminal_output +4031,6718817,"TERMINAL",0,0,"3",,terminal_output +4032,6719885,"TERMINAL",0,0,"4 2",,terminal_output +4033,6720868,"TERMINAL",0,0,"5",,terminal_output +4034,6721906,"TERMINAL",0,0,"6",,terminal_output +4035,6722983,"TERMINAL",0,0,"7",,terminal_output +4036,6724041,"TERMINAL",0,0,"8",,terminal_output +4037,6725064,"TERMINAL",0,0,"9",,terminal_output +4038,6726091,"TERMINAL",0,0,"10",,terminal_output +4039,6727104,"TERMINAL",0,0,"1",,terminal_output +4040,6728163,"TERMINAL",0,0,"2",,terminal_output +4041,6729178,"TERMINAL",0,0,"3",,terminal_output +4042,6730289,"TERMINAL",0,0,"4",,terminal_output +4043,6731266,"TERMINAL",0,0,"5",,terminal_output +4044,6732322,"TERMINAL",0,0,"6",,terminal_output +4045,6733341,"TERMINAL",0,0,"7",,terminal_output +4046,6734388,"TERMINAL",0,0,"8",,terminal_output +4047,6735408,"TERMINAL",0,0,"9",,terminal_output +4048,6736444,"TERMINAL",0,0,"20",,terminal_output +4049,6737564,"TERMINAL",0,0,"1",,terminal_output +4050,6738516,"TERMINAL",0,0,"2",,terminal_output +4051,6739553,"TERMINAL",0,0,"3",,terminal_output +4052,6740589,"TERMINAL",0,0,"4",,terminal_output +4053,6741654,"TERMINAL",0,0,"6",,terminal_output +4054,6742670,"TERMINAL",0,0,"7",,terminal_output +4055,6743706,"TERMINAL",0,0,"8",,terminal_output +4056,6744829,"TERMINAL",0,0,"9",,terminal_output +4057,6745854,"TERMINAL",0,0,"30",,terminal_output +4058,6746842,"TERMINAL",0,0,"1",,terminal_output +4059,6747904,"TERMINAL",0,0,"2",,terminal_output +4060,6748924,"TERMINAL",0,0,"3",,terminal_output +4061,6749965,"TERMINAL",0,0,"4",,terminal_output +4062,6751004,"TERMINAL",0,0,"5",,terminal_output +4063,6752100,"TERMINAL",0,0,"6",,terminal_output +4064,6753090,"TERMINAL",0,0,"7",,terminal_output +4065,6754131,"TERMINAL",0,0,"8",,terminal_output +4066,6755173,"TERMINAL",0,0,"9",,terminal_output +4067,6756296,"TERMINAL",0,0,"40",,terminal_output +4068,6757259,"TERMINAL",0,0,"1",,terminal_output +4069,6758452,"TERMINAL",0,0,"2",,terminal_output +4070,6759371,"TERMINAL",0,0,"3",,terminal_output +4071,6760435,"TERMINAL",0,0,"4",,terminal_output +4072,6761833,"TERMINAL",0,0,"5",,terminal_output +4073,6762538,"TERMINAL",0,0,"6",,terminal_output +4074,6763521,"TERMINAL",0,0,"7",,terminal_output +4075,6764592,"TERMINAL",0,0,"8",,terminal_output +4076,6765681,"TERMINAL",0,0,"9",,terminal_output +4077,6766642,"TERMINAL",0,0,"51",,terminal_output +4078,6767781,"TERMINAL",0,0,"2",,terminal_output +4079,6768745,"TERMINAL",0,0,"3",,terminal_output +4080,6769753,"TERMINAL",0,0,"4",,terminal_output +4081,6771775,"TERMINAL",0,0,"598",,terminal_output +4082,6772683,"TERMINAL",0,0,"7",,terminal_output +4083,6773715,"TERMINAL",0,0,"8",,terminal_output +4084,6774831,"TERMINAL",0,0,"9",,terminal_output +4085,6775793,"TERMINAL",0,0,"7:00",,terminal_output +4086,6776944,"TERMINAL",0,0,"1",,terminal_output +4087,6778012,"TERMINAL",0,0,"2",,terminal_output +4088,6779035,"TERMINAL",0,0,"3",,terminal_output +4089,6780058,"TERMINAL",0,0,"4",,terminal_output +4090,6781198,"TERMINAL",0,0,"5",,terminal_output +4091,6782132,"TERMINAL",0,0,"6",,terminal_output +4092,6783161,"TERMINAL",0,0,"7",,terminal_output +4093,6784202,"TERMINAL",0,0,"8",,terminal_output +4094,6785240,"TERMINAL",0,0,"9",,terminal_output +4095,6786301,"TERMINAL",0,0,"10",,terminal_output +4096,6787632,"TERMINAL",0,0,"1",,terminal_output +4097,6788357,"TERMINAL",0,0,"2",,terminal_output +4098,6789397,"TERMINAL",0,0,"3",,terminal_output +4099,6790431,"TERMINAL",0,0,"4",,terminal_output +4100,6791475,"TERMINAL",0,0,"5",,terminal_output +4101,6792513,"TERMINAL",0,0,"6",,terminal_output +4102,6793549,"TERMINAL",0,0,"7",,terminal_output +4103,6794594,"TERMINAL",0,0,"8",,terminal_output +4104,6795631,"TERMINAL",0,0,"20",,terminal_output +4105,6796673,"TERMINAL",0,0,"1",,terminal_output +4106,6797717,"TERMINAL",0,0,"2",,terminal_output +4107,6798795,"TERMINAL",0,0,"3",,terminal_output +4108,6799819,"TERMINAL",0,0,"4",,terminal_output +4109,6800849,"TERMINAL",0,0,"5",,terminal_output +4110,6801893,"TERMINAL",0,0,"6",,terminal_output +4111,6802996,"TERMINAL",0,0,"7",,terminal_output +4112,6804015,"TERMINAL",0,0,"8",,terminal_output +4113,6805049,"TERMINAL",0,0,"9",,terminal_output +4114,6806184,"TERMINAL",0,0,"30",,terminal_output +4115,6807089,"TERMINAL",0,0,"1",,terminal_output +4116,6808128,"TERMINAL",0,0,"2",,terminal_output +4117,6809239,"TERMINAL",0,0,"3",,terminal_output +4118,6810212,"TERMINAL",0,0,"4",,terminal_output +4119,6811291,"TERMINAL",0,0,"5",,terminal_output +4120,6812315,"TERMINAL",0,0,"6",,terminal_output +4121,6813348,"TERMINAL",0,0,"7",,terminal_output +4122,6814377,"TERMINAL",0,0,"8",,terminal_output +4123,6815423,"TERMINAL",0,0,"9",,terminal_output +4124,6816465,"TERMINAL",0,0,"40",,terminal_output +4125,6817508,"TERMINAL",0,0,"1",,terminal_output +4126,6818553,"TERMINAL",0,0,"2",,terminal_output +4127,6819589,"TERMINAL",0,0,"3",,terminal_output +4128,6820629,"TERMINAL",0,0,"57",,terminal_output +4129,6821745,"TERMINAL",0,0,"6",,terminal_output +4130,6822754,"TERMINAL",0,0,"7",,terminal_output +4131,6823750,"TERMINAL",0,0,"89",,terminal_output +4132,6824802,"TERMINAL",0,0,"9",,terminal_output +4133,6825930,"TERMINAL",0,0,"50",,terminal_output +4134,6826956,"TERMINAL",0,0,"1",,terminal_output +4135,6827945,"TERMINAL",0,0,"2",,terminal_output +4136,6829005,"TERMINAL",0,0,"3",,terminal_output +4137,6830026,"TERMINAL",0,0,"4",,terminal_output +4138,6831050,"TERMINAL",0,0,"58",,terminal_output +4139,6832077,"TERMINAL",0,0,"6",,terminal_output +4140,6833112,"TERMINAL",0,0,"7",,terminal_output +4141,6834225,"TERMINAL",0,0,"8",,terminal_output +4142,6835250,"TERMINAL",0,0,"9",,terminal_output +4143,6836244,"TERMINAL",0,0,"8:00",,terminal_output +4144,6837278,"TERMINAL",0,0,"1",,terminal_output +4145,6837938,"train_dynamics.py",0,0,"from dataclasses import dataclass, field\nimport os\nimport time\n\nimport einops\nfrom flax.training import orbax_utils\nfrom flax.training.train_state import TrainState\nfrom jax.sharding import Mesh, PartitionSpec, NamedSharding\nfrom jax.experimental.mesh_utils import create_device_mesh\nimport optax\nimport orbax\nfrom orbax.checkpoint import PyTreeCheckpointer\nimport numpy as np\nimport jax\nimport jax.numpy as jnp\nimport tyro\nimport wandb\n\nfrom genie import Genie, restore_genie_components\nfrom models.tokenizer import TokenizerVQVAE\nfrom models.lam import LatentActionModel\nfrom utils.dataloader import get_dataloader\n\nts = int(time.time())\n\n\n@dataclass\nclass Args:\n # Experiment\n num_steps: int = 200_000\n seed: int = 0\n seq_len: int = 16\n image_channels: int = 3\n image_height: int = 90\n image_width: int = 160\n data_dir: str = ""data_tfrecords/coinrun""\n # Optimization\n batch_size: int = 36\n min_lr: float = 3e-6\n max_lr: float = 3e-5\n warmup_steps: int = 5000\n # Tokenizer\n tokenizer_dim: int = 512\n latent_patch_dim: int = 32\n num_patch_latents: int = 1024\n patch_size: int = 4\n tokenizer_num_blocks: int = 8\n tokenizer_num_heads: int = 8\n tokenizer_checkpoint: str = """"\n # LAM\n lam_dim: int = 512\n latent_action_dim: int = 32\n num_latent_actions: int = 6\n lam_patch_size: int = 16\n lam_num_blocks: int = 8\n lam_num_heads: int = 8\n lam_checkpoint: str = """"\n # Dynamics\n dyna_dim: int = 512\n dyna_num_blocks: int = 12\n dyna_num_heads: int = 8\n dropout: float = 0.0\n mask_limit: float = 0.5\n # Logging\n log: bool = False\n entity: str = """"\n project: str = """"\n name: str = ""train_dynamics""\n tags: list[str] = field(default_factory=lambda: [""dynamics""])\n log_interval: int = 5\n log_image_interval: int = 250\n ckpt_dir: str = """"\n log_checkpoint_interval: int = 25000\n log_gradients: bool = False\n\n\nargs = tyro.cli(Args)\n\n\ndef dynamics_loss_fn(params, state, inputs):\n """"""Compute masked dynamics loss""""""\n outputs = state.apply_fn(\n params,\n inputs,\n training=True,\n rngs={""params"": inputs[""rng""], ""dropout"": inputs[""dropout_rng""]},\n )\n mask = outputs[""mask""]\n ce_loss = optax.softmax_cross_entropy_with_integer_labels(\n outputs[""token_logits""], outputs[""video_tokens""]\n )\n ce_loss = (mask * ce_loss).sum() / mask.sum()\n acc = outputs[""token_logits""].argmax(-1) == outputs[""video_tokens""]\n acc = (mask * acc).sum() / mask.sum()\n select_probs = jax.nn.softmax(outputs[""token_logits""])\n metrics = dict(\n cross_entropy_loss=ce_loss,\n masked_token_accuracy=acc,\n select_logit=outputs[""token_logits""].max(-1).mean(),\n select_p=select_probs.max(-1).mean(),\n entropy=jax.scipy.special.entr(select_probs).sum(-1).mean(),\n )\n return ce_loss, (outputs[""recon""], metrics)\n\n\n@jax.jit\ndef train_step(state, inputs):\n """"""Update state and compute metrics""""""\n grad_fn = jax.value_and_grad(dynamics_loss_fn, has_aux=True, allow_int=True)\n (loss, (recon, metrics)), grads = grad_fn(state.params, state, inputs)\n state = state.apply_gradients(grads=grads)\n if args.log_gradients:\n metrics[""gradients_std/""] = jax.tree.map(\n lambda x: x.std(), grads[""params""][""dynamics""]\n )\n return state, loss, recon, metrics\n\n\nif __name__ == ""__main__"":\n jax.distributed.initialize()\n num_devices = jax.device_count()\n if num_devices == 0:\n raise ValueError(""No JAX devices found."")\n print(f""Running on {num_devices} devices."")\n\n if args.batch_size % num_devices != 0:\n raise ValueError(\n f""Global batch size {args.batch_size} must be divisible by ""\n f""number of devices {num_devices}.""\n )\n\n per_device_batch_size_for_init = args.batch_size // num_devices\n\n rng = jax.random.PRNGKey(args.seed)\n if args.log and jax.process_index() == 0:\n wandb.init(\n entity=args.entity,\n project=args.project,\n name=args.name,\n tags=args.tags,\n group=""debug"",\n config=args\n )\n\n # --- Initialize model ---\n genie = Genie(\n # Tokenizer\n in_dim=args.image_channels,\n tokenizer_dim=args.tokenizer_dim,\n latent_patch_dim=args.latent_patch_dim,\n num_patch_latents=args.num_patch_latents,\n patch_size=args.patch_size,\n tokenizer_num_blocks=args.tokenizer_num_blocks,\n tokenizer_num_heads=args.tokenizer_num_heads,\n # LAM\n lam_dim=args.lam_dim,\n latent_action_dim=args.latent_action_dim,\n num_latent_actions=args.num_latent_actions,\n lam_patch_size=args.lam_patch_size,\n lam_num_blocks=args.lam_num_blocks,\n lam_num_heads=args.lam_num_heads,\n # Dynamics\n dyna_dim=args.dyna_dim,\n dyna_num_blocks=args.dyna_num_blocks,\n dyna_num_heads=args.dyna_num_heads,\n dropout=args.dropout,\n mask_limit=args.mask_limit,\n )\n rng, _rng = jax.random.split(rng)\n image_shape = (args.image_height, args.image_width, args.image_channels)\n dummy_inputs = dict(\n videos=jnp.zeros(\n (per_device_batch_size_for_init, args.seq_len, *image_shape),\n dtype=jnp.float32,\n ),\n action=jnp.zeros(\n (per_device_batch_size_for_init, args.seq_len), dtype=jnp.float32\n ),\n mask_rng=_rng,\n )\n rng, _rng = jax.random.split(rng)\n init_params = genie.init(_rng, dummy_inputs)\n\n # --- Initialize optimizer ---\n lr_schedule = optax.warmup_cosine_decay_schedule(\n args.min_lr, args.max_lr, args.warmup_steps, args.num_steps\n )\n tx = optax.adamw(learning_rate=lr_schedule, b1=0.9, b2=0.9, weight_decay=1e-4)\n train_state = TrainState.create(apply_fn=genie.apply, params=init_params, tx=tx)\n\n device_mesh_arr = create_device_mesh((num_devices,))\n mesh = Mesh(devices=device_mesh_arr, axis_names=(""data"",))\n\n replicated_sharding = NamedSharding(mesh, PartitionSpec())\n train_state = jax.device_put(train_state, replicated_sharding)\n\n # --- Restore checkpoint ---\n train_state = restore_genie_components(\n train_state, replicated_sharding, dummy_inputs, rng, args\n )\n\n # --- TRAIN LOOP ---\n tfrecord_files = [\n os.path.join(args.data_dir, x)\n for x in os.listdir(args.data_dir)\n if x.endswith("".tfrecord"")\n ]\n dataloader = get_dataloader(\n # NOTE: We deliberately pass the global batch size\n # The dataloader shards the dataset across all processes\n tfrecord_files,\n args.seq_len,\n args.batch_size,\n *image_shape,\n )\n step = 0\n while step < args.num_steps:\n for videos in dataloader:\n # --- Train step ---\n rng, _rng, _rng_dropout, _rng_mask = jax.random.split(rng, 4)\n\n videos_sharding = NamedSharding(\n mesh, PartitionSpec(""data"", None, None, None, None)\n )\n videos = jax.make_array_from_process_local_data(videos_sharding, videos)\n\n inputs = dict(\n videos=videos,\n rng=_rng,\n dropout_rng=_rng_dropout,\n mask_rng=_rng_mask,\n )\n start_time = time.time()\n train_state, loss, recon, metrics = train_step(train_state, inputs)\n elapsed_time = (time.time() - start_time) * 1000\n print(f""Step {step}, loss: {loss}, step time: {elapsed_time}ms"")\n step += 1\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n wandb.log(\n {\n ""loss"": loss,\n ""step"": step,\n ""step_time_ms"": elapsed_time,\n **metrics,\n }\n )\n if step % args.log_image_interval == 0:\n gt_seq = inputs[""videos""][0]\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)\n if step % args.log_checkpoint_interval == 0:\n ckpt = {""model"": train_state}\n orbax_checkpointer = orbax.checkpoint.PyTreeCheckpointer()\n save_args = orbax_utils.save_args_from_target(ckpt)\n orbax_checkpointer.save(\n os.path.join(os.getcwd(), args.ckpt_dir, f""genie_{ts}_{step}""),\n ckpt,\n save_args=save_args,\n )\n if step >= args.num_steps:\n break\n",python,tab +4146,6838320,"TERMINAL",0,0,"2",,terminal_output +4147,6839384,"TERMINAL",0,0,"31",,terminal_output +4148,6840434,"TERMINAL",0,0,"4",,terminal_output +4149,6841460,"TERMINAL",0,0,"5",,terminal_output +4150,6842510,"TERMINAL",0,0,"6",,terminal_output +4151,6843544,"TERMINAL",0,0,"7",,terminal_output +4152,6844173,"train_dynamics.py",1583,0,"",python,selection_mouse +4153,6844584,"TERMINAL",0,0,"8",,terminal_output +4154,6844745,"train_dynamics.py",1578,0,"",python,selection_mouse +4155,6844897,"train_dynamics.py",1573,10,"mask_limit",python,selection_mouse +4156,6845692,"TERMINAL",0,0,"10",,terminal_output +4157,6846263,"train_dynamics.py",1579,0,"",python,selection_mouse +4158,6846656,"TERMINAL",0,0,"1",,terminal_output +4159,6847740,"TERMINAL",0,0,"2",,terminal_output +4160,6848740,"TERMINAL",0,0,"3",,terminal_output +4161,6849648,"train_dynamics.py",4206,0,"",python,selection_mouse +4162,6849781,"TERMINAL",0,0,"4",,terminal_output +4163,6849868,"genie.py",0,0,"from typing import Dict, Any\n\nimport optax\nimport jax\nimport jax.numpy as jnp\nimport flax.linen as nn\nfrom jax import NamedSharding\nfrom flax.training.train_state import TrainState\nfrom flax.training import orbax_utils\nfrom orbax.checkpoint import PyTreeCheckpointer\n\nfrom models.dynamics import DynamicsMaskGIT\nfrom models.lam import LatentActionModel\nfrom models.tokenizer import TokenizerVQVAE\n\n\nclass Genie(nn.Module):\n """"""Genie model""""""\n\n # --- Tokenizer ---\n in_dim: int\n tokenizer_dim: int\n latent_patch_dim: int\n num_patch_latents: int\n patch_size: int\n tokenizer_num_blocks: int\n tokenizer_num_heads: int\n # --- LAM ---\n lam_dim: int\n latent_action_dim: int\n num_latent_actions: int\n lam_patch_size: int\n lam_num_blocks: int\n lam_num_heads: int\n # --- Dynamics ---\n dyna_dim: int\n dyna_num_blocks: int\n dyna_num_heads: int\n dropout: float = 0.0\n mask_limit: float = 0.0\n\n def setup(self):\n self.tokenizer = TokenizerVQVAE(\n in_dim=self.in_dim,\n model_dim=self.tokenizer_dim,\n latent_dim=self.latent_patch_dim,\n num_latents=self.num_patch_latents,\n patch_size=self.patch_size,\n num_blocks=self.tokenizer_num_blocks,\n num_heads=self.tokenizer_num_heads,\n dropout=0.0,\n codebook_dropout=0.0,\n )\n self.lam = LatentActionModel(\n in_dim=self.in_dim,\n model_dim=self.lam_dim,\n latent_dim=self.latent_patch_dim,\n num_latents=self.num_latent_actions,\n patch_size=self.lam_patch_size,\n num_blocks=self.lam_num_blocks,\n num_heads=self.lam_num_heads,\n dropout=0.0,\n codebook_dropout=0.0,\n )\n self.dynamics = DynamicsMaskGIT(\n model_dim=self.dyna_dim,\n num_latents=self.num_patch_latents,\n num_blocks=self.dyna_num_blocks,\n num_heads=self.dyna_num_heads,\n dropout=self.dropout,\n mask_limit=self.mask_limit,\n )\n\n def __call__(self, batch: Dict[str, Any], training: bool = True) -> Dict[str, Any]:\n tokenizer_outputs = self.tokenizer.vq_encode(batch[""videos""], training=False)\n lam_outputs = self.lam.vq_encode(batch[""videos""], training=False)\n outputs = dict(\n video_tokens=jax.lax.stop_gradient(tokenizer_outputs[""indices""]),\n latent_actions=jax.lax.stop_gradient(lam_outputs[""z_q""]),\n )\n outputs[""mask_rng""] = batch[""mask_rng""]\n dyna_outputs = self.dynamics(outputs, training)\n outputs.update(dyna_outputs)\n mle_indices = jnp.argmax(outputs[""token_logits""], axis=-1)\n outputs[""recon""] = self.tokenizer.decode(\n mle_indices, batch[""videos""].shape[2:4]\n )\n return outputs\n\n @nn.compact\n def sample(\n self,\n batch: Dict[str, Any],\n steps: int = 25,\n temperature: int = 1,\n sample_argmax: bool = False,\n ) -> Any:\n # --- Encode videos and actions ---\n tokenizer_out = self.tokenizer.vq_encode(batch[""videos""], training=False)\n token_idxs = tokenizer_out[""indices""]\n new_frame_idxs = jnp.zeros_like(token_idxs)[:, 0]\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n\n # --- Initialize MaskGIT ---\n init_mask = jnp.ones_like(token_idxs, dtype=bool)[:, 0]\n init_carry = (\n batch[""rng""],\n new_frame_idxs,\n init_mask,\n token_idxs,\n action_tokens,\n )\n MaskGITLoop = nn.scan(\n MaskGITStep,\n variable_broadcast=""params"",\n split_rngs={""params"": False},\n in_axes=0,\n out_axes=0,\n length=steps,\n )\n\n # --- Run MaskGIT loop ---\n loop_fn = MaskGITLoop(\n dynamics=self.dynamics,\n tokenizer=self.tokenizer,\n temperature=temperature,\n sample_argmax=sample_argmax,\n steps=steps,\n )\n final_carry, _ = loop_fn(init_carry, jnp.arange(steps))\n new_frame_idxs = final_carry[1]\n new_frame_pixels = self.tokenizer.decode(\n jnp.expand_dims(new_frame_idxs, 1),\n video_hw=batch[""videos""].shape[2:4],\n )\n return new_frame_pixels\n\n def vq_encode(self, batch, training) -> Dict[str, Any]:\n # --- Preprocess videos ---\n lam_output = self.lam.vq_encode(batch[""videos""], training=training)\n return lam_output[""indices""]\n\n\nclass MaskGITStep(nn.Module):\n dynamics: nn.Module\n tokenizer: nn.Module\n temperature: float\n sample_argmax: bool\n steps: int\n\n @nn.compact\n def __call__(self, carry, x):\n rng, final_token_idxs, mask, token_idxs, action_tokens = carry\n step = x\n B, T, N = token_idxs.shape[:3]\n\n # --- Construct + encode video ---\n vid_token_idxs = jnp.concatenate(\n (token_idxs, jnp.expand_dims(final_token_idxs, 1)), axis=1\n )\n vid_embed = self.dynamics.patch_embed(vid_token_idxs)\n curr_masked_frame = jnp.where(\n jnp.expand_dims(mask, -1),\n self.dynamics.mask_token[0],\n vid_embed[:, -1],\n )\n vid_embed = vid_embed.at[:, -1].set(curr_masked_frame)\n\n # --- Predict transition ---\n act_embed = self.dynamics.action_up(action_tokens)\n vid_embed += jnp.pad(act_embed, ((0, 0), (1, 0), (0, 0), (0, 0)))\n unmasked_ratio = jnp.cos(jnp.pi * (step + 1) / (self.steps * 2))\n step_temp = self.temperature * (1.0 - unmasked_ratio)\n final_logits = self.dynamics.dynamics(vid_embed)[:, -1] / step_temp\n\n # --- Sample new tokens for final frame ---\n if self.sample_argmax:\n sampled_token_idxs = jnp.argmax(final_logits, axis=-1)\n else:\n rng, _rng = jax.random.split(rng)\n sampled_token_idxs = jnp.where(\n step == self.steps - 1,\n jnp.argmax(final_logits, axis=-1),\n jax.random.categorical(_rng, final_logits),\n )\n gather_fn = jax.vmap(jax.vmap(lambda x, y: x[y]))\n final_token_probs = gather_fn(jax.nn.softmax(final_logits), sampled_token_idxs)\n final_token_probs += ~mask\n # Update masked tokens only\n new_token_idxs = jnp.where(mask, sampled_token_idxs, final_token_idxs)\n\n # --- Update mask ---\n num_unmasked_tokens = jnp.round(N * (1.0 - unmasked_ratio)).astype(int)\n idx_mask = jnp.arange(final_token_probs.shape[-1]) > num_unmasked_tokens\n sorted_idxs = jnp.argsort(final_token_probs, axis=-1, descending=True)\n mask_update_fn = jax.vmap(lambda msk, ids: msk.at[ids].set(idx_mask))\n new_mask = mask_update_fn(mask, sorted_idxs)\n\n new_carry = (rng, new_token_idxs, new_mask, token_idxs, action_tokens)\n return new_carry, None\n\n\ndef restore_genie_components(\n train_state: TrainState,\n sharding: NamedSharding,\n inputs: Dict[str, jax.Array],\n rng: jax.Array,\n args,\n):\n """"""Restore pre-trained Genie components""""""\n rng, _rng = jax.random.split(rng)\n\n dummy_tokenizer = TokenizerVQVAE(\n in_dim=args.image_channels,\n model_dim=args.tokenizer_dim,\n latent_dim=args.latent_patch_dim,\n num_latents=args.num_patch_latents,\n patch_size=args.patch_size,\n num_blocks=args.tokenizer_num_blocks,\n num_heads=args.tokenizer_num_heads,\n dropout=args.dropout,\n codebook_dropout=args.dropout,\n )\n dummy_lam = LatentActionModel(\n in_dim=args.image_channels,\n model_dim=args.lam_dim,\n latent_dim=args.latent_patch_dim,\n num_latents=args.num_latent_actions,\n patch_size=args.lam_patch_size,\n num_blocks=args.lam_num_blocks,\n num_heads=args.lam_num_heads,\n dropout=args.dropout,\n codebook_dropout=args.dropout,\n )\n tokenizer_init_params = dummy_tokenizer.init(_rng, inputs)\n lam_init_params = dummy_lam.init(_rng, inputs)\n\n # dummy values since we only use tx to initialize the dummy train states\n dummy_tx = optax.adamw(\n learning_rate=optax.constant_schedule(args.max_lr),\n b1=0.9,\n b2=0.9,\n weight_decay=1e-4,\n )\n\n dummy_tokenizer_train_state = TrainState.create(\n apply_fn=dummy_tokenizer.apply, params=tokenizer_init_params, tx=dummy_tx\n )\n dummy_lam_train_state = TrainState.create(\n apply_fn=dummy_lam.apply, params=lam_init_params, tx=dummy_tx\n )\n\n def create_abstract_sharded_pytree(pytree_template, sharding_spec):\n """"""Replaces arrays in a pytree with ShapeDtypeStructs having the given sharding.""""""\n\n def map_fn(leaf_template):\n if hasattr(leaf_template, ""shape"") and hasattr(leaf_template, ""dtype""):\n return jax.ShapeDtypeStruct(\n leaf_template.shape, leaf_template.dtype, sharding=sharding_spec\n )\n return leaf_template\n\n return jax.tree_util.tree_map(map_fn, pytree_template)\n\n abstract_sharded_tokenizer_state = create_abstract_sharded_pytree(\n dummy_tokenizer_train_state, sharding\n )\n abstract_sharded_lam_state = create_abstract_sharded_pytree(\n dummy_lam_train_state, sharding\n )\n\n tokenizer_restore_target = {""model"": abstract_sharded_tokenizer_state}\n lam_restore_target = {""model"": abstract_sharded_lam_state}\n\n tokenizer_restore_args = orbax_utils.restore_args_from_target(\n tokenizer_restore_target\n )\n lam_restore_args = orbax_utils.restore_args_from_target(lam_restore_target)\n\n restored_tokenizer_params = (\n PyTreeCheckpointer()\n .restore(\n args.tokenizer_checkpoint,\n item=tokenizer_restore_target,\n restore_args=tokenizer_restore_args,\n )[""model""]\n .params[""params""]\n )\n restored_lam_params = (\n PyTreeCheckpointer()\n .restore(\n args.lam_checkpoint, item=lam_restore_target, restore_args=lam_restore_args\n )[""model""]\n .params[""params""]\n )\n # Genie does not initialize all LAM modules, thus we omit those extra modules during restoration\n # (f.srambical) FIXME: Currently, this is a small HBM memory crunch since the LAM's decoder is loaded into HBM and immediately dicarded.\n # A workaround would be to restore to host memory first, and only move the weights to HBM after pruning the decoder\n restored_lam_params = {\n k: v\n for k, v in restored_lam_params.items()\n if k in train_state.params[""params""][""lam""]\n }\n\n train_state.params[""params""][""tokenizer""].update(restored_tokenizer_params)\n train_state.params[""params""][""lam""].update(restored_lam_params)\n\n return train_state\n",python,tab +4164,6850819,"TERMINAL",0,0,"5",,terminal_output +4165,6851881,"TERMINAL",0,0,"6",,terminal_output +4166,6852059,"genie.py",927,0,"",python,selection_mouse +4167,6852885,"TERMINAL",0,0,"7",,terminal_output +4168,6853926,"TERMINAL",0,0,"8",,terminal_output +4169,6854965,"TERMINAL",0,0,"9",,terminal_output +4170,6856012,"TERMINAL",0,0,"20",,terminal_output +4171,6856691,"genie.py",926,0,"",python,selection_mouse +4172,6856816,"genie.py",921,10,"mask_limit",python,selection_mouse +4173,6857050,"TERMINAL",0,0,"1",,terminal_output +4174,6858087,"TERMINAL",0,0,"2",,terminal_output +4175,6859212,"TERMINAL",0,0,"3",,terminal_output +4176,6859807,"genie.py",1818,0,"",python,selection_mouse +4177,6860177,"models/dynamics.py",0,0,"from typing import Dict, Any\n\nimport jax\nimport jax.numpy as jnp\nimport flax.linen as nn\n\nfrom utils.nn import STTransformer\n\n\nclass DynamicsMaskGIT(nn.Module):\n """"""MaskGIT dynamics model""""""\n\n model_dim: int\n num_latents: int\n num_blocks: int\n num_heads: int\n dropout: float\n mask_limit: float\n\n def setup(self):\n self.dynamics = STTransformer(\n self.model_dim,\n self.num_latents,\n self.num_blocks,\n self.num_heads,\n self.dropout,\n )\n self.patch_embed = nn.Embed(self.num_latents, self.model_dim)\n self.mask_token = self.param(\n ""mask_token"",\n nn.initializers.lecun_uniform(),\n (1, 1, 1, self.model_dim),\n )\n self.action_up = nn.Dense(self.model_dim)\n\n def __call__(self, batch: Dict[str, Any], training: bool = True) -> Dict[str, Any]:\n # --- Mask videos ---\n vid_embed = self.patch_embed(batch[""video_tokens""])\n if training:\n rng1, rng2 = jax.random.split(batch[""mask_rng""])\n mask_prob = jax.random.uniform(rng1, minval=self.mask_limit)\n mask = jax.random.bernoulli(rng2, mask_prob, vid_embed.shape[:-1])\n mask = mask.at[:, 0].set(False)\n vid_embed = jnp.where(jnp.expand_dims(mask, -1), self.mask_token, vid_embed)\n else:\n mask = None\n\n # --- Predict transition ---\n act_embed = self.action_up(batch[""latent_actions""])\n vid_embed += jnp.pad(act_embed, ((0, 0), (1, 0), (0, 0), (0, 0)))\n logits = self.dynamics(vid_embed)\n return dict(token_logits=logits, mask=mask)\n",python,tab +4178,6860185,"TERMINAL",0,0,"4",,terminal_output +4179,6861262,"TERMINAL",0,0,"5",,terminal_output +4180,6862023,"models/dynamics.py",302,0,"",python,selection_mouse +4181,6862172,"models/dynamics.py",297,10,"mask_limit",python,selection_mouse +4182,6862266,"TERMINAL",0,0,"6",,terminal_output +4183,6863308,"TERMINAL",0,0,"7",,terminal_output +4184,6864433,"TERMINAL",0,0,"8",,terminal_output +4185,6865449,"TERMINAL",0,0,"9",,terminal_output +4186,6866426,"TERMINAL",0,0,"30",,terminal_output +4187,6867462,"TERMINAL",0,0,"1",,terminal_output +4188,6868507,"TERMINAL",0,0,"2",,terminal_output +4189,6869544,"TERMINAL",0,0,"3",,terminal_output +4190,6870589,"TERMINAL",0,0,"4",,terminal_output +4191,6871629,"TERMINAL",0,0,"6",,terminal_output +4192,6872667,"TERMINAL",0,0,"7",,terminal_output +4193,6873706,"TERMINAL",0,0,"8",,terminal_output +4194,6874775,"TERMINAL",0,0,"9",,terminal_output +4195,6875788,"TERMINAL",0,0,"40",,terminal_output +4196,6876820,"TERMINAL",0,0,"1",,terminal_output +4197,6877950,"TERMINAL",0,0,"282",,terminal_output +4198,6878977,"TERMINAL",0,0,"3",,terminal_output +4199,6879998,"TERMINAL",0,0,"4",,terminal_output +4200,6881019,"TERMINAL",0,0,"5",,terminal_output +4201,6882149,"TERMINAL",0,0,"6",,terminal_output +4202,6883176,"TERMINAL",0,0,"7",,terminal_output +4203,6884196,"TERMINAL",0,0,"8",,terminal_output +4204,6885224,"TERMINAL",0,0,"93",,terminal_output +4205,6886245,"TERMINAL",0,0,"50",,terminal_output +4206,6887272,"TERMINAL",0,0,"1",,terminal_output +4207,6888293,"TERMINAL",0,0,"2",,terminal_output +4208,6889420,"TERMINAL",0,0,"3",,terminal_output +4209,6890364,"TERMINAL",0,0,"4",,terminal_output +4210,6891397,"TERMINAL",0,0,"5",,terminal_output +4211,6892432,"TERMINAL",0,0,"6",,terminal_output +4212,6893466,"TERMINAL",0,0,"7",,terminal_output +4213,6894512,"TERMINAL",0,0,"8",,terminal_output +4214,6895556,"TERMINAL",0,0,"9",,terminal_output +4215,6896601,"TERMINAL",0,0,"9:00",,terminal_output +4216,6897712,"TERMINAL",0,0,"2",,terminal_output +4217,6898943,"TERMINAL",0,0,"3",,terminal_output +4218,6899767,"TERMINAL",0,0,"4",,terminal_output +4219,6900642,"sample.py",0,0,"from dataclasses import dataclass\nimport time\n\nimport dm_pix as pix\nimport einops\nimport jax\nimport jax.numpy as jnp\nimport numpy as np\nfrom orbax.checkpoint import PyTreeCheckpointer\nfrom PIL import Image, ImageDraw\nimport tyro\n\nfrom genie import Genie\nfrom utils.dataloader import get_dataloader\n\n\n@dataclass\nclass Args:\n # Experiment\n seed: int = 0\n seq_len: int = 16\n image_channels: int = 3\n image_resolution: int = 64\n data_dir: str = ""data/coinrun_episodes""\n checkpoint: str = """"\n # Sampling\n batch_size: int = 1\n maskgit_steps: int = 25\n temperature: float = 1.0\n sample_argmax: bool = True\n start_frame: int = 0\n # Tokenizer checkpoint\n tokenizer_dim: int = 512\n latent_patch_dim: int = 32\n num_patch_latents: int = 1024\n patch_size: int = 4\n tokenizer_num_blocks: int = 8\n tokenizer_num_heads: int = 8\n # LAM checkpoint\n lam_dim: int = 512\n latent_action_dim: int = 32\n num_latent_actions: int = 6\n lam_patch_size: int = 16\n lam_num_blocks: int = 8\n lam_num_heads: int = 8\n # Dynamics checkpoint\n dyna_dim: int = 512\n dyna_num_blocks: int = 12\n dyna_num_heads: int = 8\n\n\nargs = tyro.cli(Args)\nrng = jax.random.PRNGKey(args.seed)\n\n# --- Load Genie checkpoint ---\ngenie = Genie(\n # Tokenizer\n in_dim=args.image_channels,\n tokenizer_dim=args.tokenizer_dim,\n latent_patch_dim=args.latent_patch_dim,\n num_patch_latents=args.num_patch_latents,\n patch_size=args.patch_size,\n tokenizer_num_blocks=args.tokenizer_num_blocks,\n tokenizer_num_heads=args.tokenizer_num_heads,\n # LAM\n lam_dim=args.lam_dim,\n latent_action_dim=args.latent_action_dim,\n num_latent_actions=args.num_latent_actions,\n lam_patch_size=args.lam_patch_size,\n lam_num_blocks=args.lam_num_blocks,\n lam_num_heads=args.lam_num_heads,\n # Dynamics\n dyna_dim=args.dyna_dim,\n dyna_num_blocks=args.dyna_num_blocks,\n dyna_num_heads=args.dyna_num_heads,\n)\nrng, _rng = jax.random.split(rng)\nimage_shape = (args.image_resolution, args.image_resolution, args.image_channels)\ndummy_inputs = dict(\n videos=jnp.zeros((args.batch_size, args.seq_len, *image_shape), dtype=jnp.float32),\n mask_rng=_rng,\n)\nrng, _rng = jax.random.split(rng)\nparams = genie.init(_rng, dummy_inputs)\nckpt = PyTreeCheckpointer().restore(args.checkpoint)[""model""][""params""][""params""]\nparams[""params""].update(ckpt)\n\n\n# --- Define autoregressive sampling loop ---\ndef _autoreg_sample(rng, video_batch, action_batch):\n vid = video_batch[:, : args.start_frame + 1]\n for frame_idx in range(args.start_frame + 1, args.seq_len):\n # --- Sample next frame ---\n print(""Frame"", frame_idx)\n rng, _rng = jax.random.split(rng)\n batch = dict(videos=vid, latent_actions=action_batch[:, :frame_idx], rng=_rng)\n new_frame = genie.apply(\n params,\n batch,\n args.maskgit_steps,\n args.temperature,\n args.sample_argmax,\n method=Genie.sample,\n )\n vid = jnp.concatenate([vid, new_frame], axis=1)\n return vid\n\n\n# --- Get video + latent actions ---\ndataloader = get_dataloader(args.data_dir, args.seq_len, args.batch_size)\nvideo_batch = next(iter(dataloader))\n# Get latent actions from first video only\nfirst_video = video_batch[:1]\nbatch = dict(videos=first_video)\naction_batch = genie.apply(params, batch, False, method=Genie.vq_encode)\naction_batch = action_batch.reshape(1, args.seq_len - 1, 1)\n# Use actions from first video for all videos\naction_batch = jnp.repeat(action_batch, video_batch.shape[0], axis=0)\n\n# --- Sample + evaluate video ---\nvid = _autoreg_sample(rng, video_batch, action_batch)\ngt = video_batch[:, : vid.shape[1]].clip(0, 1).reshape(-1, *video_batch.shape[2:])\nrecon = vid.clip(0, 1).reshape(-1, *vid.shape[2:])\nssim = pix.ssim(gt[:, args.start_frame + 1 :], recon[:, args.start_frame + 1 :]).mean()\nprint(f""SSIM: {ssim}"")\n\n# --- Construct video ---\nfirst_true = (video_batch[0:1] * 255).astype(np.uint8)\nfirst_pred = (vid[0:1] * 255).astype(np.uint8)\nfirst_video_comparison = np.zeros((2, *vid.shape[1:5]), dtype=np.uint8)\nfirst_video_comparison[0] = first_true[:, : vid.shape[1]]\nfirst_video_comparison[1] = first_pred\n# For other videos, only show generated video\nother_preds = (vid[1:] * 255).astype(np.uint8)\nall_frames = np.concatenate([first_video_comparison, other_preds], axis=0)\nflat_vid = einops.rearrange(all_frames, ""n t h w c -> t h (n w) c"")\n\n# --- Save video ---\nimgs = [Image.fromarray(img) for img in flat_vid]\n# Write actions on each frame\nfor img, action in zip(imgs[1:], action_batch[0, :, 0]):\n d = ImageDraw.Draw(img)\n d.text((2, 2), f""{action}"", fill=255)\nimgs[0].save(\n f""generation_{time.time()}.gif"",\n save_all=True,\n append_images=imgs[1:],\n duration=250,\n loop=0,\n)\n",python,tab +4220,6900750,"TERMINAL",0,0,"5",,terminal_output +4221,6901813,"TERMINAL",0,0,"6",,terminal_output +4222,6902934,"TERMINAL",0,0,"7",,terminal_output +4223,6903960,"TERMINAL",0,0,"8",,terminal_output +4224,6904925,"TERMINAL",0,0,"9",,terminal_output +4225,6905973,"TERMINAL",0,0,"10",,terminal_output +4226,6906986,"TERMINAL",0,0,"1",,terminal_output +4227,6908022,"TERMINAL",0,0,"2",,terminal_output +4228,6909057,"TERMINAL",0,0,"3",,terminal_output +4229,6909855,"sample.py",1276,0,"",python,selection_mouse +4230,6909971,"sample.py",1273,5,"Genie",python,selection_mouse +4231,6910094,"TERMINAL",0,0,"42",,terminal_output +4232,6911131,"TERMINAL",0,0,"5",,terminal_output +4233,6912195,"TERMINAL",0,0,"6",,terminal_output +4234,6913219,"TERMINAL",0,0,"7",,terminal_output +4235,6914315,"TERMINAL",0,0,"8",,terminal_output +4236,6915330,"TERMINAL",0,0,"9",,terminal_output +4237,6916349,"TERMINAL",0,0,"20",,terminal_output +4238,6917371,"TERMINAL",0,0,"1",,terminal_output +4239,6918438,"TERMINAL",0,0,"2",,terminal_output +4240,6919441,"TERMINAL",0,0,"3",,terminal_output +4241,6920496,"TERMINAL",0,0,"4",,terminal_output +4242,6921533,"TERMINAL",0,0,"5",,terminal_output +4243,6922600,"TERMINAL",0,0,"6",,terminal_output +4244,6923624,"TERMINAL",0,0,"7",,terminal_output +4245,6924749,"TERMINAL",0,0,"9",,terminal_output +4246,6925772,"TERMINAL",0,0,"30",,terminal_output +4247,6926722,"TERMINAL",0,0,"1",,terminal_output +4248,6927820,"TERMINAL",0,0,"2",,terminal_output +4249,6928863,"TERMINAL",0,0,"3",,terminal_output +4250,6929184,"sample.py",586,0,"",python,selection_mouse +4251,6929800,"sample.py",561,0,"",python,selection_mouse +4252,6929933,"sample.py",551,13,"maskgit_steps",python,selection_mouse +4253,6930085,"TERMINAL",0,0,"4",,terminal_output +4254,6930897,"TERMINAL",0,0,"5",,terminal_output +4255,6932032,"TERMINAL",0,0,"6",,terminal_output +4256,6932959,"TERMINAL",0,0,"7",,terminal_output +4257,6934066,"TERMINAL",0,0,"8",,terminal_output +4258,6935088,"TERMINAL",0,0,"9",,terminal_output +4259,6936085,"TERMINAL",0,0,"40",,terminal_output +4260,6937138,"TERMINAL",0,0,"1",,terminal_output +4261,6938262,"TERMINAL",0,0,"2",,terminal_output +4262,6939287,"TERMINAL",0,0,"3",,terminal_output +4263,6940361,"TERMINAL",0,0,"4",,terminal_output +4264,6941299,"TERMINAL",0,0,"5",,terminal_output +4265,6942360,"TERMINAL",0,0,"6",,terminal_output +4266,6943397,"TERMINAL",0,0,"7",,terminal_output +4267,6944422,"TERMINAL",0,0,"8",,terminal_output +4268,6945453,"TERMINAL",0,0,"93",,terminal_output +4269,6945825,"train_dynamics.py",0,0,"",python,tab +4270,6946499,"TERMINAL",0,0,"50",,terminal_output +4271,6947540,"TERMINAL",0,0,"1",,terminal_output +4272,6948621,"TERMINAL",0,0,"2",,terminal_output +4273,6949609,"TERMINAL",0,0,"4",,terminal_output +4274,6950649,"TERMINAL",0,0,"5",,terminal_output +4275,6951788,"TERMINAL",0,0,"6",,terminal_output +4276,6952736,"TERMINAL",0,0,"7",,terminal_output +4277,6953777,"TERMINAL",0,0,"8",,terminal_output +4278,6954814,"TERMINAL",0,0,"9",,terminal_output +4279,6955877,"TERMINAL",0,0,"20:00",,terminal_output +4280,6956891,"TERMINAL",0,0,"12",,terminal_output +4281,6957959,"TERMINAL",0,0,"2",,terminal_output +4282,6959160,"TERMINAL",0,0,"3",,terminal_output +4283,6960076,"TERMINAL",0,0,"4",,terminal_output +4284,6961157,"TERMINAL",0,0,"5",,terminal_output +4285,6962126,"TERMINAL",0,0,"6",,terminal_output +4286,6962459,"sample.py",0,0,"",python,tab +4287,6963147,"TERMINAL",0,0,"7",,terminal_output +4288,6964181,"TERMINAL",0,0,"8",,terminal_output +4289,6964592,"sample.py",559,0,"",python,selection_mouse +4290,6965228,"TERMINAL",0,0,"93",,terminal_output +4291,6966319,"TERMINAL",0,0,"10",,terminal_output +4292,6966596,"sample.py",2837,0,"",python,selection_mouse +4293,6966720,"sample.py",2836,5,"apply",python,selection_mouse +4294,6967315,"TERMINAL",0,0,"1",,terminal_output +4295,6967546,"sample.py",2836,0,"",python,selection_mouse +4296,6968037,"sample.py",2840,0,"",python,selection_mouse +4297,6968450,"TERMINAL",0,0,"2",,terminal_output +4298,6969395,"TERMINAL",0,0,"3",,terminal_output +4299,6970434,"TERMINAL",0,0,"42",,terminal_output +4300,6971143,"sample.py",3005,0,"",python,selection_mouse +4301,6971315,"genie.py",0,0,"",python,tab +4302,6971467,"TERMINAL",0,0,"53",,terminal_output +4303,6972509,"TERMINAL",0,0,"6",,terminal_output +4304,6973549,"TERMINAL",0,0,"7",,terminal_output +4305,6974583,"TERMINAL",0,0,"8",,terminal_output +4306,6975643,"TERMINAL",0,0,"20",,terminal_output +4307,6976663,"TERMINAL",0,0,"12",,terminal_output +4308,6976935,"genie.py",3653,0,"",python,selection_mouse +4309,6977071,"genie.py",3646,11,"MaskGITStep",python,selection_mouse +4310,6977856,"TERMINAL",0,0,"2",,terminal_output +4311,6978740,"TERMINAL",0,0,"3",,terminal_output +4312,6979777,"TERMINAL",0,0,"4",,terminal_output +4313,6980011,"genie.py",3651,0,"",python,selection_mouse +4314,6980851,"TERMINAL",0,0,"5",,terminal_output +4315,6981888,"TERMINAL",0,0,"6",,terminal_output +4316,6982906,"TERMINAL",0,0,"7",,terminal_output +4317,6984040,"TERMINAL",0,0,"8",,terminal_output +4318,6985002,"TERMINAL",0,0,"9",,terminal_output +4319,6986024,"TERMINAL",0,0,"30",,terminal_output +4320,6987071,"TERMINAL",0,0,"1",,terminal_output +4321,6988135,"TERMINAL",0,0,"2",,terminal_output +4322,6989137,"TERMINAL",0,0,"3",,terminal_output +4323,6990183,"TERMINAL",0,0,"4",,terminal_output +4324,6991306,"TERMINAL",0,0,"5",,terminal_output +4325,6992333,"TERMINAL",0,0,"6",,terminal_output +4326,6992430,"TERMINAL",0,0,"Every 1.0s: sinfo_t_idlehkn1991.localdomain: Mon Jun 30 16:20:36 2025Partition dev_cpuonly: 11 nodes idle\rPartition cpuonly: 82 nodes idle\rPartition dev_accelerated:\t 2 nodes idle\rPartition accelerated:\t 7 nodes idle\rPartition dev_accelerated-h100 :\t 0 nodes idle\rPartition accelerated-h100:\t 2 nodes idle\rPartition large:\t 8 nodes idle",,terminal_output +4327,6993460,"TERMINAL",0,0,"7",,terminal_output +4328,6994478,"TERMINAL",0,0,"8",,terminal_output +4329,6994968,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn1991:~/Projects/jafar]633;D;0",,terminal_output +4330,7036276,"TERMINAL",0,0,"bash",,terminal_focus +4331,7037525,"TERMINAL",0,0,"idle",,terminal_command +4332,7037540,"TERMINAL",0,0,"]633;E;2025-06-30 16:21:21 idle;ead59344-49db-4336-9336-47fae706e637]633;CPartition dev_cpuonly : 11 nodes idle\r\nPartition cpuonly : 82 nodes idle\r\nPartition dev_accelerated : 2 nodes idle\r\nPartition accelerated : 7 nodes idle\r\nPartition dev_accelerated-h100 : 0 nodes idle\r\nPartition accelerated-h100 : 2 nodes idle\r\nPartition large : 8 nodes idle\r\n]0;tum_cte0515@hkn1991:/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar]633;D;0",,terminal_output +4333,7039887,"TERMINAL",0,0,"bash",,terminal_focus +4334,7043991,"TERMINAL",0,0,"salloc --time=01:00:00 --partition=accelerated --nodes=1 --ntasks-per-node=4 --gres=gpu:4 --cpus-per-task=5 --mem=200G",,terminal_command +4335,7044059,"TERMINAL",0,0,"]633;E;2025-06-30 16:21:28 salloc --time=01:00:00 --partition=accelerated --nodes=1 --ntasks-per-node=4 --gres=gpu:4 --cpus-per-task=5 --mem=200G;1b1b54ff-80e1-48fc-8101-86ff30a7ce8c]633;Csalloc: Granted job allocation 3306668\r\n",,terminal_output +4336,7044177,"TERMINAL",0,0,"salloc: Waiting for resource configuration\r\n",,terminal_output +4337,7071228,"TERMINAL",0,0,"salloc: Nodes hkn0634 are ready for job\r\n",,terminal_output +4338,7074365,"TERMINAL",0,0,"]0;tum_cte0515@hkn0634:~/Projects/jafar[?2004h[tum_cte0515@hkn0634 jafar]$ ",,terminal_output +4339,7097325,"TERMINAL",0,0,"s",,terminal_output +4340,7097409,"TERMINAL",0,0,"[?25lo[?25h",,terminal_output +4341,7097474,"TERMINAL",0,0,"[?25lu[?25h",,terminal_output +4342,7097601,"TERMINAL",0,0,"[?25lr[?25h",,terminal_output +4343,7097762,"TERMINAL",0,0,"[?25lc[?25h",,terminal_output +4344,7097895,"TERMINAL",0,0,"[?25le[?25h[?25l [?25h",,terminal_output +4345,7098107,"TERMINAL",0,0,"[?25l.[?25h[?25lv[?25h",,terminal_output +4346,7098357,"TERMINAL",0,0,"env/",,terminal_output +4347,7098625,"TERMINAL",0,0,"[?25lb[?25h",,terminal_output +4348,7098742,"TERMINAL",0,0,"in/",,terminal_output +4349,7099099,"TERMINAL",0,0,"[?25lc[?25h",,terminal_output +4350,7099842,"TERMINAL",0,0,"[?25la[?25h",,terminal_output +4351,7099970,"TERMINAL",0,0,"[?25lc[?25h",,terminal_output +4352,7100036,"TERMINAL",0,0,"tivate",,terminal_output +4353,7100360,"TERMINAL",0,0,"[?25l[?2004l\r[?25h]0;tum_cte0515@hkn0634:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0634 jafar]$ ",,terminal_output +4354,7100672,"TERMINAL",0,0,"s",,terminal_output +4355,7101062,"TERMINAL",0,0,"[?25lm[?25h",,terminal_output +4356,7101129,"TERMINAL",0,0,"[?25li[?25h",,terminal_output +4357,7101333,"TERMINAL",0,0,"[?25l[?2004l\r[?25h",,terminal_output +4358,7102929,"TERMINAL",0,0,"[?1049h(B[?7hEvery 1.0s: nvidia-smihkn0634.localdomain: Mon Jun 30 16:22:25 2025Mon Jun 30 16:22:25 2025\r+-----------------------------------------------------------------------------------------+\r| NVIDIA-SMI 570.133.20Driver Version: 570.133.20 CUDA Version: 12.8 |\r|-----------------------------------------+------------------------+----------------------+\r| GPU NamePersistence-M | Bus-IdDisp.A | Volatile Uncorr. ECC |\r| Fan Temp PerfPwr:Usage/Cap |Memory-Usage | GPU-Util Compute M. |\r|||MIG M. |\r|=========================================+========================+======================|\r| 0 NVIDIA A100-SXM4-40GBOn | 00000000:31:00.0 Off |0 |\r| N/A 46C P059W / 300W |\t 27MiB / 40960MiB |\t 0%\t Default |\r|||Disabled |\r+-----------------------------------------+------------------------+----------------------+\r| 1 NVIDIA A100-SXM4-40GBOn | 00000000:4B:00.0 Off |0 |\r| N/A 46C P055W / 300W |\t 27MiB / 40960MiB |\t 0%\t Default |\r|||Disabled |\r+-----------------------------------------+------------------------+----------------------+\r| 2 NVIDIA A100-SXM4-40GBOn | 00000000:CA:00.0 Off |0 |\r| N/A 45C P056W / 300W |\t 27MiB / 40960MiB |\t 0%\t Default |\r|||Disabled |\r+-----------------------------------------+------------------------+----------------------+\r| 3 NVIDIA A100-SXM4-40GBOn | 00000000:E3:00.0 Off |0 |\r| N/A 45C P057W / 300W |\t 27MiB / 40960MiB |\t 0%\t Default |\r|||Disabled |\r+-----------------------------------------+------------------------+----------------------+\r+-----------------------------------------------------------------------------------------+\r| Processes:|\r| GPU GI CIPID Type Process nameGPU Memory |\r|ID IDUsage\t |\r|=========================================================================================|\r| 0 N/A N/A2634G /usr/libexec/Xorg17MiB |\r| 1 N/A N/A2634G /usr/libexec/Xorg17MiB |\r| 2 N/A N/A2634G /usr/libexec/Xorg17MiB |\r| 3 N/A N/A2634G /usr/libexec/Xorg17MiB |\r+-----------------------------------------------------------------------------------------+",,terminal_output +4359,7105497,"TERMINAL",0,0,"887607626163",,terminal_output +4360,7105618,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn0634:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0634 jafar]$ ",,terminal_output +4361,7112509,"TERMINAL",0,0,"s[?25lh[?25h",,terminal_output +4362,7112583,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +4363,7113019,"TERMINAL",0,0,"[?25ls[?25h[?25lc[?25h",,terminal_output +4364,7113192,"TERMINAL",0,0,"ripts_",,terminal_output +4365,7114370,"TERMINAL",0,0,"[?25lh[?25h",,terminal_output +4366,7114421,"TERMINAL",0,0,"oreka/",,terminal_output +4367,7114737,"TERMINAL",0,0,"",,terminal_output +4368,7114862,"TERMINAL",0,0,"\r\nbatchsize_scaling/ overfit_batch/ overfit_sample_tiny/ train_dynamics.sh train_tokenizer.sh \r\nmodelsize_scaling/ overfit_sample/ sync_runner.sh train_lam.sh \r\n(jafar) [tum_cte0515@hkn0634 jafar]$ sh scripts_horeka/",,terminal_output +4369,7115655,"TERMINAL",0,0,"o",,terminal_output +4370,7115782,"TERMINAL",0,0,"verfit_",,terminal_output +4371,7116389,"TERMINAL",0,0,"[?25ls[?25h",,terminal_output +4372,7116448,"TERMINAL",0,0,"ample",,terminal_output +4373,7117669,"TERMINAL",0,0,"[?25l#[?25h",,terminal_output +4374,7118696,"TERMINAL",0,0,"[?25l_[?25h",,terminal_output +4375,7119024,"TERMINAL",0,0,"tiny/",,terminal_output +4376,7119736,"TERMINAL",0,0,"",,terminal_output +4377,7119918,"TERMINAL",0,0,"\r\nsample.sh tester.sh \r\n(jafar) [tum_cte0515@hkn0634 jafar]$ sh scripts_horeka/overfit_sample_tiny/",,terminal_output +4378,7121354,"TERMINAL",0,0,"s",,terminal_output +4379,7121508,"TERMINAL",0,0,"ample.sh ",,terminal_output +4380,7122961,"scripts_horeka/overfit_sample_tiny/sample.sh",0,0,"#!/usr/bin/env bash\n\n# Unload modules that may interfere\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\n\n# Activate virtual environment\nsource .venv/bin/activate\n\n# Set workspace and checkpoint directory (update slurm_job_id as needed)\nws_dir='/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared'\n# Replace the following with the actual job id/checkpoint you want to sample from\nslurm_job_id=3301031\n\n# job_name=train_dynamics_minecraft_overfit_sample_tiny\nCHECKPOINT_DIR=$ws_dir/checkpoints/${slurm_job_id}\n\n# Example: If you want to use a specific checkpoint, set it here\n# CHECKPOINT_PATH=$ws_dir/checkpoints/3299272/dynamics-tiny-overfit-big-lr-3299272_50000/\n# Or use the latest in the directory\n# CHECKPOINT_PATH=$(ls -d $CHECKPOINT_DIR/*/ | sort | tail -n 1)\nCHECKPOINT_PATH=$CHECKPOINT_DIR/genie_1751067778_200000/\n\necho ""Sampling from checkpoint: $CHECKPOINT_PATH""\n\npython sample.py \\n --checkpoint ""$CHECKPOINT_PATH"" \\n --tokenizer_dim=384 \\n --latent_patch_dim=32 \\n --num_patch_latents=1024 \\n --patch_size=4 \\n --tokenizer_num_blocks=8 \\n --tokenizer_num_heads=8 \\n --lam_dim=384 \\n --latent_action_dim=32 \\n --num_latent_actions=6 \\n --lam_patch_size=16 \\n --lam_num_blocks=8 \\n --lam_num_heads=8 \\n --dyna_dim=128 \\n --dyna_num_blocks=2 \\n --dyna_num_heads=4\n",shellscript,tab +4381,7127380,"scripts_horeka/overfit_sample_tiny/sample.sh",1097,0,"",shellscript,selection_mouse +4382,7127513,"scripts_horeka/overfit_sample_tiny/sample.sh",1092,19,"tokenizer_num_heads",shellscript,selection_mouse +4383,7128343,"scripts_horeka/overfit_sample_tiny/sample.sh",1097,0,"",shellscript,selection_mouse +4384,7184416,"TERMINAL",0,0,"bash",,terminal_focus +4385,7185393,"scripts_horeka/overfit_sample_tiny/sample.sh",1315,0,"",shellscript,selection_mouse +4386,7185408,"scripts_horeka/overfit_sample_tiny/sample.sh",1314,0,"",shellscript,selection_command +4387,7186162,"scripts_horeka/overfit_sample_tiny/sample.sh",1338,0,"",shellscript,selection_mouse +4388,7186164,"scripts_horeka/overfit_sample_tiny/sample.sh",1337,0,"",shellscript,selection_command +4389,7196059,"scripts_horeka/overfit_sample_tiny/sample.sh",211,0,"",shellscript,selection_mouse +4390,7196756,"scripts_horeka/overfit_sample_tiny/sample.sh",284,0,"",shellscript,selection_mouse +4391,7197394,"scripts_horeka/overfit_sample_tiny/sample.sh",347,0,"",shellscript,selection_mouse +4392,7198192,"scripts_horeka/overfit_sample_tiny/sample.sh",451,0,"",shellscript,selection_mouse +4393,7198993,"scripts_horeka/overfit_sample_tiny/sample.sh",416,0,"",shellscript,selection_mouse +4394,7198997,"scripts_horeka/overfit_sample_tiny/sample.sh",415,0,"",shellscript,selection_command +4395,7203304,"scripts_horeka/overfit_sample_tiny/sample.sh",717,0,"",shellscript,selection_mouse +4396,7203306,"scripts_horeka/overfit_sample_tiny/sample.sh",716,0,"",shellscript,selection_command +4397,7208798,"scripts_horeka/overfit_sample_tiny/sample.sh",416,0,"",shellscript,selection_mouse +4398,7208799,"scripts_horeka/overfit_sample_tiny/sample.sh",415,0,"",shellscript,selection_command +4399,7209339,"scripts_horeka/overfit_sample_tiny/sample.sh",416,0,"",shellscript,selection_command +4400,7209801,"scripts_horeka/overfit_sample_tiny/sample.sh",415,1,"",shellscript,content +4401,7209921,"scripts_horeka/overfit_sample_tiny/sample.sh",414,1,"",shellscript,content +4402,7210103,"scripts_horeka/overfit_sample_tiny/sample.sh",414,0,"2",shellscript,content +4403,7210104,"scripts_horeka/overfit_sample_tiny/sample.sh",415,0,"",shellscript,selection_keyboard +4404,7210250,"scripts_horeka/overfit_sample_tiny/sample.sh",415,0,"9",shellscript,content +4405,7210251,"scripts_horeka/overfit_sample_tiny/sample.sh",416,0,"",shellscript,selection_keyboard +4406,7213240,"TERMINAL",0,0,"^C",,terminal_command +4407,7213263,"TERMINAL",0,0,"^C[?2004l\r[?2004h[?2004l\r\r\n]633;E;;ead59344-49db-4336-9336-47fae706e637]633;C]0;tum_cte0515@hkn1991:/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar]633;D",,terminal_output +4408,7225846,"TERMINAL",0,0,"ls /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/3301031",,terminal_command +4409,7225880,"TERMINAL",0,0,"]633;E;2025-06-30 16:24:30 ls /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/3301031;ead59344-49db-4336-9336-47fae706e637]633;C",,terminal_output +4410,7226373,"TERMINAL",0,0,"genie_1751067778_1000 genie_1751067778_13000 genie_1751067778_16000 genie_1751067778_190000 genie_1751067778_40000 genie_1751067778_70000\r\ngenie_1751067778_10000 genie_1751067778_130000 genie_1751067778_160000 genie_1751067778_190500 genie_1751067778_40500 genie_1751067778_70500\r\ngenie_1751067778_100000 genie_1751067778_130500 genie_1751067778_160500 genie_1751067778_191000 genie_1751067778_41000 genie_1751067778_71000\r\ngenie_1751067778_100500 genie_1751067778_131000 genie_1751067778_161000 genie_1751067778_191500 genie_1751067778_41500 genie_1751067778_71500\r\ngenie_1751067778_101000 genie_1751067778_131500 genie_1751067778_161500 genie_1751067778_192000 genie_1751067778_42000 genie_1751067778_72000\r\ngenie_1751067778_101500 genie_1751067778_132000 genie_1751067778_162000 genie_1751067778_192500 genie_1751067778_42500 genie_1751067778_72500\r\ngenie_1751067778_102000 genie_1751067778_132500 genie_1751067778_162500 genie_1751067778_193000 genie_1751067778_43000 genie_1751067778_73000\r\ngenie_1751067778_102500 genie_1751067778_133000 genie_1751067778_163000 genie_1751067778_193500 genie_1751067778_43500 genie_1751067778_73500\r\ngenie_1751067778_103000 genie_1751067778_133500 genie_1751067778_163500 genie_1751067778_194000 genie_1751067778_44000 genie_1751067778_74000\r\ngenie_1751067778_103500 genie_1751067778_134000 genie_1751067778_164000 genie_1751067778_194500 genie_1751067778_44500 genie_1751067778_74500\r\ngenie_1751067778_104000 genie_1751067778_134500 genie_1751067778_164500 genie_1751067778_19500 genie_1751067778_4500 genie_1751067778_7500\r\ngenie_1751067778_104500 genie_1751067778_13500 genie_1751067778_16500 genie_1751067778_195000 genie_1751067778_45000 genie_1751067778_75000\r\ngenie_1751067778_10500 genie_1751067778_135000 genie_1751067778_165000 genie_1751067778_195500 genie_1751067778_45500 genie_1751067778_75500\r\ngenie_1751067778_105000 genie_1751067778_135500 genie_1751067778_165500 genie_1751067778_196000 genie_1751067778_46000 genie_1751067778_76000\r\ngenie_1751067778_105500 genie_1751067778_136000 genie_1751067778_166000 genie_1751067778_196500 genie_1751067778_46500 genie_1751067778_76500\r\ngenie_1751067778_106000 genie_1751067778_136500 genie_1751067778_166500 genie_1751067778_197000 genie_1751067778_47000 genie_1751067778_77000\r\ngenie_1751067778_106500 genie_1751067778_137000 genie_1751067778_167000 genie_1751067778_197500 genie_1751067778_47500 genie_1751067778_77500\r\ngenie_1751067778_107000 genie_1751067778_137500 genie_1751067778_167500 genie_1751067778_198000 genie_1751067778_48000 genie_1751067778_78000\r\ngenie_1751067778_107500 genie_1751067778_138000 genie_1751067778_168000 genie_1751067778_198500 genie_1751067778_48500 genie_1751067778_78500\r\ngenie_1751067778_108000 genie_1751067778_138500 genie_1751067778_168500 genie_1751067778_199000 genie_1751067778_49000 genie_1751067778_79000\r\ngenie_1751067778_108500 genie_1751067778_139000 genie_1751067778_169000 genie_1751067778_199500 genie_1751067778_49500 genie_1751067778_79500\r\ngenie_1751067778_109000 genie_1751067778_139500 genie_1751067778_169500 genie_1751067778_2000 genie_1751067778_500 genie_1751067778_8000\r\ngenie_1751067778_109500 genie_1751067778_14000 genie_1751067778_17000 genie_1751067778_20000 genie_1751067778_5000 genie_1751067778_80000\r\ngenie_1751067778_11000 genie_1751067778_140000 genie_1751067778_170000 genie_1751067778_200000 genie_1751067778_50000 genie_1751067778_80500\r\ngenie_1751067778_110000 genie_1751067778_140500 genie_1751067778_170500 genie_1751067778_20500 genie_1751067778_50500 genie_1751067778_81000\r\ngenie_1751067778_110500 genie_1751067778_141000 genie_1751067778_171000 genie_1751067778_21000 genie_1751067778_51000 genie_1751067778_81500\r\ngenie_1751067778_111000 genie_1751067778_141500 genie_1751067778_171500 genie_1751067778_21500 genie_1751067778_51500 genie_1751067778_82000\r\ngenie_1751067778_111500 genie_1751067778_142000 genie_1751067778_172000 genie_1751067778_22000 genie_1751067778_52000 genie_1751067778_82500\r\ngenie_1751067778_112000 genie_1751067778_142500 genie_1751067778_172500 genie_1751067778_22500 genie_1751067778_52500 genie_1751067778_83000\r\ngenie_1751067778_112500 genie_1751067778_143000 genie_1751067778_173000 genie_1751067778_23000 genie_1751067778_53000 genie_1751067778_83500\r\ngenie_1751067778_113000 genie_1751067778_143500 genie_1751067778_173500 genie_1751067778_23500 genie_1751067778_53500 genie_1751067778_84000\r\ngenie_1751067778_113500 genie_1751067778_144000 genie_1751067778_174000 genie_1751067778_24000 genie_1751067778_54000 genie_1751067778_84500\r\ngenie_1751067778_114000 genie_1751067778_144500 genie_1751067778_174500 genie_1751067778_24500 genie_1751067778_54500 genie_1751067778_8500\r\ngenie_1751067778_114500 genie_1751067778_14500 genie_1751067778_17500 genie_1751067778_2500 genie_1751067778_5500 genie_1751067778_85000\r\ngenie_1751067778_11500 genie_1751067778_145000 genie_1751067778_175000 genie_1751067778_25000 genie_1751067778_55000 genie_1751067778_85500\r\ngenie_1751067778_115000 genie_1751067778_145500 genie_1751067778_175500 genie_1751067778_25500 genie_1751067778_55500 genie_1751067778_86000\r\ngenie_1751067778_115500 genie_1751067778_146000 genie_1751067778_176000 genie_1751067778_26000 genie_1751067778_56000 genie_1751067778_86500\r\ngenie_1751067778_116000 genie_1751067778_146500 genie_1751067778_176500 genie_1751067778_26500 genie_1751067778_56500 genie_1751067778_87000\r\ngenie_1751067778_116500 genie_1751067778_147000 genie_1751067778_177000 genie_1751067778_27000 genie_1751067778_57000 genie_1751067778_87500\r\ngenie_1751067778_117000 genie_1751067778_147500 genie_1751067778_177500 genie_1751067778_27500 genie_1751067778_57500 genie_1751067778_88000\r\ngenie_1751067778_117500 genie_1751067778_148000 genie_1751067778_178000 genie_1751067778_28000 genie_1751067778_58000 genie_1751067778_88500\r\ngenie_1751067778_118000 genie_1751067778_148500 genie_1751067778_178500 genie_1751067778_28500 genie_1751067778_58500 genie_1751067778_89000\r\ngenie_1751067778_118500 genie_1751067778_149000 genie_1751067778_179000 genie_1751067778_29000 genie_1751067778_59000 genie_1751067778_89500\r\ngenie_1751067778_119000 genie_1751067778_149500 genie_1751067778_179500 genie_1751067778_29500 genie_1751067778_59500 genie_1751067778_9000\r\ngenie_1751067778_119500 genie_1751067778_1500 genie_1751067778_18000 genie_1751067778_3000 genie_1751067778_6000 genie_1751067778_90000\r\ngenie_1751067778_12000 genie_1751067778_15000 genie_1751067778_180000 genie_1751067778_30000 genie_1751067778_60000 genie_1751067778_90500\r\ngenie_1751067778_120000 genie_1751067778_150000 genie_1751067778_180500 genie_1751067778_30500 genie_1751067778_60500 genie_1751067778_91000\r\ngenie_1751067778_120500 genie_1751067778_150500 genie_1751067778_181000 genie_1751067778_31000 genie_1751067778_61000 genie_1751067778_91500\r\ngenie_1751067778_121000 genie_1751067778_151000 genie_1751067778_181500 genie_1751067778_31500 genie_1751067778_61500 genie_1751067778_92000\r\ngenie_1751067778_121500 genie_1751067778_151500 genie_1751067778_182000 genie_1751067778_32000 genie_1751067778_62000 genie_1751067778_92500\r\ngenie_1751067778_122000 genie_1751067778_152000 genie_1751067778_182500 genie_1751067778_32500 genie_1751067778_62500 genie_1751067778_93000\r\ngenie_1751067778_122500 genie_1751067778_152500 genie_1751067778_183000 genie_1751067778_33000 genie_1751067778_63000 genie_1751067778_93500\r\ngenie_1751067778_123000 genie_1751067778_153000 genie_1751067778_183500 genie_1751067778_33500 genie_1751067778_63500 genie_1751067778_94000\r\ngenie_1751067778_123500 genie_1751067778_153500 genie_1751067778_184000 genie_1751067778_34000 genie_1751067778_64000 genie_1751067778_94500\r\ngenie_1751067778_124000 genie_1751067778_154000 genie_1751067778_184500 genie_1751067778_34500 genie_1751067778_64500 genie_1751067778_9500\r\ngenie_1751067778_124500 genie_1751067778_154500 genie_1751067778_18500 genie_1751067778_3500 genie_1751067778_6500 genie_1751067778_95000\r\ngenie_1751067778_12500 genie_1751067778_15500 genie_1751067778_185000 genie_1751067778_35000 genie_1751067778_65000 genie_1751067778_95500\r\ngenie_1751067778_125000 genie_1751067778_155000 genie_1751067778_185500 genie_1751067778_35500 genie_1751067778_65500 genie_1751067778_96000\r\ngenie_1751067778_125500 genie_1751067778_155500 genie_1751067778_186000 genie_1751067778_36000 genie_1751067778_66000 genie_1751067778_96500\r\ngenie_1751067778_126000 genie_1751067778_156000 genie_1751067778_186500 genie_1751067778_36500 genie_1751067778_66500 genie_1751067778_97000\r\ngenie_1751067778_126500 genie_1751067778_156500 genie_1751067778_187000 genie_1751067778_37000 genie_1751067778_67000 genie_1751067778_97500\r\ngenie_1751067778_127000 genie_1751067778_157000 genie_1751067778_187500 genie_1751067778_37500 genie_1751067778_67500 genie_1751067778_98000\r\ngenie_1751067778_127500 genie_1751067778_157500 genie_1751067778_188000 genie_1751067778_38000 genie_1751067778_68000 genie_1751067778_98500\r\ngenie_1751067778_128000 genie_1751067778_158000 genie_1751067778_188500 genie_1751067778_38500 genie_1751067778_68500 genie_1751067778_99000\r\ngenie_1751067778_128500 genie_1751067778_158500 genie_1751067778_189000 genie_1751067778_39000 genie_1751067778_69000 genie_1751067778_99500\r\ngenie_1751067778_129000 genie_1751067778_159000 genie_1751067778_189500 genie_1751067778_39500 genie_1751067778_69500\r\ngenie_1751067778_129500 genie_1751067778_159500 genie_1751067778_19000 genie_1751067778_4000 genie_1751067778_7000\r\n]0;tum_cte0515@hkn1991:/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar]633;D;0",,terminal_output +4411,7229994,"TERMINAL",0,0,"ls /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/3301029",,terminal_command +4412,7230044,"TERMINAL",0,0,"]633;E;2025-06-30 16:24:34 ls /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/3301029;ead59344-49db-4336-9336-47fae706e637]633;C",,terminal_output +4413,7230207,"TERMINAL",0,0,"genie_1751067601_1000 genie_1751067601_13000 genie_1751067601_16000 genie_1751067601_190000 genie_1751067601_40000 genie_1751067601_70000\r\ngenie_1751067601_10000 genie_1751067601_130000 genie_1751067601_160000 genie_1751067601_190500 genie_1751067601_40500 genie_1751067601_70500\r\ngenie_1751067601_100000 genie_1751067601_130500 genie_1751067601_160500 genie_1751067601_191000 genie_1751067601_41000 genie_1751067601_71000\r\ngenie_1751067601_100500 genie_1751067601_131000 genie_1751067601_161000 genie_1751067601_191500 genie_1751067601_41500 genie_1751067601_71500\r\ngenie_1751067601_101000 genie_1751067601_131500 genie_1751067601_161500 genie_1751067601_192000 genie_1751067601_42000 genie_1751067601_72000\r\ngenie_1751067601_101500 genie_1751067601_132000 genie_1751067601_162000 genie_1751067601_192500 genie_1751067601_42500 genie_1751067601_72500\r\ngenie_1751067601_102000 genie_1751067601_132500 genie_1751067601_162500 genie_1751067601_193000 genie_1751067601_43000 genie_1751067601_73000\r\ngenie_1751067601_102500 genie_1751067601_133000 genie_1751067601_163000 genie_1751067601_193500 genie_1751067601_43500 genie_1751067601_73500\r\ngenie_1751067601_103000 genie_1751067601_133500 genie_1751067601_163500 genie_1751067601_194000 genie_1751067601_44000 genie_1751067601_74000\r\ngenie_1751067601_103500 genie_1751067601_134000 genie_1751067601_164000 genie_1751067601_194500 genie_1751067601_44500 genie_1751067601_74500\r\ngenie_1751067601_104000 genie_1751067601_134500 genie_1751067601_164500 genie_1751067601_19500 genie_1751067601_4500 genie_1751067601_7500\r\ngenie_1751067601_104500 genie_1751067601_13500 genie_1751067601_16500 genie_1751067601_195000 genie_1751067601_45000 genie_1751067601_75000\r\ngenie_1751067601_10500 genie_1751067601_135000 genie_1751067601_165000 genie_1751067601_195500 genie_1751067601_45500 genie_1751067601_75500\r\ngenie_1751067601_105000 genie_1751067601_135500 genie_1751067601_165500 genie_1751067601_196000 genie_1751067601_46000 genie_1751067601_76000\r\ngenie_1751067601_105500 genie_1751067601_136000 genie_1751067601_166000 genie_1751067601_196500 genie_1751067601_46500 genie_1751067601_76500\r\ngenie_1751067601_106000 genie_1751067601_136500 genie_1751067601_166500 genie_1751067601_197000 genie_1751067601_47000 genie_1751067601_77000\r\ngenie_1751067601_106500 genie_1751067601_137000 genie_1751067601_167000 genie_1751067601_197500 genie_1751067601_47500 genie_1751067601_77500\r\ngenie_1751067601_107000 genie_1751067601_137500 genie_1751067601_167500 genie_1751067601_198000 genie_1751067601_48000 genie_1751067601_78000\r\ngenie_1751067601_107500 genie_1751067601_138000 genie_1751067601_168000 genie_1751067601_198500 genie_1751067601_48500 genie_1751067601_78500\r\ngenie_1751067601_108000 genie_1751067601_138500 genie_1751067601_168500 genie_1751067601_199000 genie_1751067601_49000 genie_1751067601_79000\r\ngenie_1751067601_108500 genie_1751067601_139000 genie_1751067601_169000 genie_1751067601_199500 genie_1751067601_49500 genie_1751067601_79500\r\ngenie_1751067601_109000 genie_1751067601_139500 genie_1751067601_169500 genie_1751067601_2000 genie_1751067601_500 genie_1751067601_8000\r\ngenie_1751067601_109500 genie_1751067601_14000 genie_1751067601_17000 genie_1751067601_20000 genie_1751067601_5000 genie_1751067601_80000\r\ngenie_1751067601_11000 genie_1751067601_140000 genie_1751067601_170000 genie_1751067601_200000 genie_1751067601_50000 genie_1751067601_80500\r\ngenie_1751067601_110000 genie_1751067601_140500 genie_1751067601_170500 genie_1751067601_20500 genie_1751067601_50500 genie_1751067601_81000\r\ngenie_1751067601_110500 genie_1751067601_141000 genie_1751067601_171000 genie_1751067601_21000 genie_1751067601_51000 genie_1751067601_81500\r\ngenie_1751067601_111000 genie_1751067601_141500 genie_1751067601_171500 genie_1751067601_21500 genie_1751067601_51500 genie_1751067601_82000\r\ngenie_1751067601_111500 genie_1751067601_142000 genie_1751067601_172000 genie_1751067601_22000 genie_1751067601_52000 genie_1751067601_82500\r\ngenie_1751067601_112000 genie_1751067601_142500 genie_1751067601_172500 genie_1751067601_22500 genie_1751067601_52500 genie_1751067601_83000\r\ngenie_1751067601_112500 genie_1751067601_143000 genie_1751067601_173000 genie_1751067601_23000 genie_1751067601_53000 genie_1751067601_83500\r\ngenie_1751067601_113000 genie_1751067601_143500 genie_1751067601_173500 genie_1751067601_23500 genie_1751067601_53500 genie_1751067601_84000\r\ngenie_1751067601_113500 genie_1751067601_144000 genie_1751067601_174000 genie_1751067601_24000 genie_1751067601_54000 genie_1751067601_84500\r\ngenie_1751067601_114000 genie_1751067601_144500 genie_1751067601_174500 genie_1751067601_24500 genie_1751067601_54500 genie_1751067601_8500\r\ngenie_1751067601_114500 genie_1751067601_14500 genie_1751067601_17500 genie_1751067601_2500 genie_1751067601_5500 genie_1751067601_85000\r\ngenie_1751067601_11500 genie_1751067601_145000 genie_1751067601_175000 genie_1751067601_25000 genie_1751067601_55000 genie_1751067601_85500\r\ngenie_1751067601_115000 genie_1751067601_145500 genie_1751067601_175500 genie_1751067601_25500 genie_1751067601_55500 genie_1751067601_86000\r\ngenie_1751067601_115500 genie_1751067601_146000 genie_1751067601_176000 genie_1751067601_26000 genie_1751067601_56000 genie_1751067601_86500\r\ngenie_1751067601_116000 genie_1751067601_146500 genie_1751067601_176500 genie_1751067601_26500 genie_1751067601_56500 genie_1751067601_87000\r\ngenie_1751067601_116500 genie_1751067601_147000 genie_1751067601_177000 genie_1751067601_27000 genie_1751067601_57000 genie_1751067601_87500\r\ngenie_1751067601_117000 genie_1751067601_147500 genie_1751067601_177500 genie_1751067601_27500 genie_1751067601_57500 genie_1751067601_88000\r\ngenie_1751067601_117500 genie_1751067601_148000 genie_1751067601_178000 genie_1751067601_28000 genie_1751067601_58000 genie_1751067601_88500\r\ngenie_1751067601_118000 genie_1751067601_148500 genie_1751067601_178500 genie_1751067601_28500 genie_1751067601_58500 genie_1751067601_89000\r\ngenie_1751067601_118500 genie_1751067601_149000 genie_1751067601_179000 genie_1751067601_29000 genie_1751067601_59000 genie_1751067601_89500\r\ngenie_1751067601_119000 genie_1751067601_149500 genie_1751067601_179500 genie_1751067601_29500 genie_1751067601_59500 genie_1751067601_9000\r\ngenie_1751067601_119500 genie_1751067601_1500 genie_1751067601_18000 genie_1751067601_3000 genie_1751067601_6000 genie_1751067601_90000\r\ngenie_1751067601_12000 genie_1751067601_15000 genie_1751067601_180000 genie_1751067601_30000 genie_1751067601_60000 genie_1751067601_90500\r\ngenie_1751067601_120000 genie_1751067601_150000 genie_1751067601_180500 genie_1751067601_30500 genie_1751067601_60500 genie_1751067601_91000\r\ngenie_1751067601_120500 genie_1751067601_150500 genie_1751067601_181000 genie_1751067601_31000 genie_1751067601_61000 genie_1751067601_91500\r\ngenie_1751067601_121000 genie_1751067601_151000 genie_1751067601_181500 genie_1751067601_31500 genie_1751067601_61500 genie_1751067601_92000\r\ngenie_1751067601_121500 genie_1751067601_151500 genie_1751067601_182000 genie_1751067601_32000 genie_1751067601_62000 genie_1751067601_92500\r\ngenie_1751067601_122000 genie_1751067601_152000 genie_1751067601_182500 genie_1751067601_32500 genie_1751067601_62500 genie_1751067601_93000\r\ngenie_1751067601_122500 genie_1751067601_152500 genie_1751067601_183000 genie_1751067601_33000 genie_1751067601_63000 genie_1751067601_93500\r\ngenie_1751067601_123000 genie_1751067601_153000 genie_1751067601_183500 genie_1751067601_33500 genie_1751067601_63500 genie_1751067601_94000\r\ngenie_1751067601_123500 genie_1751067601_153500 genie_1751067601_184000 genie_1751067601_34000 genie_1751067601_64000 genie_1751067601_94500\r\ngenie_1751067601_124000 genie_1751067601_154000 genie_1751067601_184500 genie_1751067601_34500 genie_1751067601_64500 genie_1751067601_9500\r\ngenie_1751067601_124500 genie_1751067601_154500 genie_1751067601_18500 genie_1751067601_3500 genie_1751067601_6500 genie_1751067601_95000\r\ngenie_1751067601_12500 genie_1751067601_15500 genie_1751067601_185000 genie_1751067601_35000 genie_1751067601_65000 genie_1751067601_95500\r\ngenie_1751067601_125000 genie_1751067601_155000 genie_1751067601_185500 genie_1751067601_35500 genie_1751067601_65500 genie_1751067601_96000\r\ngenie_1751067601_125500 genie_1751067601_155500 genie_1751067601_186000 genie_1751067601_36000 genie_1751067601_66000 genie_1751067601_96500\r\ngenie_1751067601_126000 genie_1751067601_156000 genie_1751067601_186500 genie_1751067601_36500 genie_1751067601_66500 genie_1751067601_97000\r\ngenie_1751067601_126500 genie_1751067601_156500 genie_1751067601_187000 genie_1751067601_37000 genie_1751067601_67000 genie_1751067601_97500\r\ngenie_1751067601_127000 genie_1751067601_157000 genie_1751067601_187500 genie_1751067601_37500 genie_1751067601_67500 genie_1751067601_98000\r\ngenie_1751067601_127500 genie_1751067601_157500 genie_1751067601_188000 genie_1751067601_38000 genie_1751067601_68000 genie_1751067601_98500\r\ngenie_1751067601_128000 genie_1751067601_158000 genie_1751067601_188500 genie_1751067601_38500 genie_1751067601_68500 genie_1751067601_99000\r\ngenie_1751067601_128500 genie_1751067601_158500 genie_1751067601_189000 genie_1751067601_39000 genie_1751067601_69000 genie_1751067601_99500\r\ngenie_1751067601_129000 genie_1751067601_159000 genie_1751067601_189500 genie_1751067601_39500 genie_1751067601_69500\r\ngenie_1751067601_129500 genie_1751067601_159500 genie_1751067601_19000 genie_1751067601_4000 genie_1751067601_7000\r\n]0;tum_cte0515@hkn1991:/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar]633;D;0",,terminal_output +4414,7235680,"scripts_horeka/overfit_sample_tiny/sample.sh",831,0,"",shellscript,selection_mouse +4415,7236204,"scripts_horeka/overfit_sample_tiny/sample.sh",830,1,"",shellscript,content +4416,7236333,"scripts_horeka/overfit_sample_tiny/sample.sh",829,1,"",shellscript,content +4417,7236473,"scripts_horeka/overfit_sample_tiny/sample.sh",828,1,"",shellscript,content +4418,7236606,"scripts_horeka/overfit_sample_tiny/sample.sh",827,1,"",shellscript,content +4419,7236714,"scripts_horeka/overfit_sample_tiny/sample.sh",826,1,"",shellscript,content +4420,7236858,"scripts_horeka/overfit_sample_tiny/sample.sh",825,1,"",shellscript,content +4421,7237001,"scripts_horeka/overfit_sample_tiny/sample.sh",824,1,"",shellscript,content +4422,7237142,"scripts_horeka/overfit_sample_tiny/sample.sh",823,1,"",shellscript,content +4423,7237275,"scripts_horeka/overfit_sample_tiny/sample.sh",822,1,"",shellscript,content +4424,7237405,"scripts_horeka/overfit_sample_tiny/sample.sh",821,1,"",shellscript,content +4425,7237597,"scripts_horeka/overfit_sample_tiny/sample.sh",821,0,"1751067601",shellscript,content +4426,7320218,"scripts_horeka/overfit_sample_tiny/sample.sh",834,0,"",shellscript,selection_mouse +4427,7320698,"scripts_horeka/overfit_sample_tiny/sample.sh",840,0,"",shellscript,selection_mouse +4428,7326022,"scripts_horeka/overfit_sample_tiny/sample.sh",833,0,"",shellscript,selection_mouse +4429,7327902,"TERMINAL",0,0,"srun",,terminal_focus +4430,7329243,"TERMINAL",0,0,"[?25l[?2004l\r[?25h",,terminal_output +4431,7329446,"TERMINAL",0,0,"Sampling from checkpoint: /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/3301029/genie_1751067601_200000/\r\n",,terminal_output +4432,7333247,"TERMINAL",0,0,"^CTraceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py"", line 4, in \r\n import dm_pix as pix\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/dm_pix/__init__.py"", line 16, in \r\n from dm_pix._src import augment\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/dm_pix/_src/augment.py"", line 25, in \r\n import chex\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/chex/__init__.py"", line 17, in \r\n from chex._src.asserts import assert_axis_dimension\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/chex/_src/asserts.py"", line 26, in \r\n from chex._src import asserts_internal as _ai\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/chex/_src/asserts_internal.py"", line 35, in \r\n from chex._src import pytypes\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/chex/_src/pytypes.py"", line 19, in \r\n import jax\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/__init__.py"", line 25, in \r\n",,terminal_output +4433,7333375,"TERMINAL",0,0," from jax._src.cloud_tpu_init import cloud_tpu_init as _cloud_tpu_init\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/cloud_tpu_init.py"", line 20, in \r\n from jax._src import config\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/config.py"", line 27, in \r\n from jax._src.lib import guard_lib\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/lib/__init__.py"", line 86, in \r\n import jaxlib.lapack as lapack # noqa: F401\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jaxlib/lapack.py"", line 17, in \r\n import numpy as np\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/numpy/__init__.py"", line 130, in \r\n from numpy.__config__ import show as show_config\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/numpy/__config__.py"", line 4, in \r\n from numpy.core._multiarray_umath import (\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/numpy/core/__init__.py"", line 78, in \r\n from . import defchararray as char\r\n File """", line 1027, in _find_and_load\r\n File """", line 1006, in _find_and_load_unlocked\r\n File """", line 688, in _load_unlocked\r\n File """", line 879, in exec_module\r\n File """", line 975, in get_code\r\n File """", line 1074, in get_data\r\nKeyboardInterrupt\r\n\r\n]0;tum_cte0515@hkn0634:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0634 jafar]$ ",,terminal_output +4434,7334578,"TERMINAL",0,0,"sh scripts_horeka/overfit_sample_tiny/sample.sh ",,terminal_output +4435,7334717,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +4436,7334830,"TERMINAL",0,0,"Sampling from checkpoint: /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/3301029/genie_1751067601_200000/\r\n",,terminal_output +4437,7335549,"scripts_horeka/overfit_sample_tiny/sample.sh",0,0,"",shellscript,tab +4438,7337303,"scripts_horeka/overfit_sample_tiny/sample.sh",1244,0,"",shellscript,selection_mouse +4439,7337321,"scripts_horeka/overfit_sample_tiny/sample.sh",1243,0,"",shellscript,selection_command +4440,7337729,"scripts_horeka/overfit_sample_tiny/sample.sh",1339,0,"",shellscript,selection_mouse +4441,7338565,"scripts_horeka/overfit_sample_tiny/sample.sh",1289,0,"",shellscript,selection_mouse +4442,7338566,"scripts_horeka/overfit_sample_tiny/sample.sh",1288,0,"",shellscript,selection_command +4443,7339094,"scripts_horeka/overfit_sample_tiny/sample.sh",1338,0,"",shellscript,selection_mouse +4444,7339096,"scripts_horeka/overfit_sample_tiny/sample.sh",1337,0,"",shellscript,selection_command +4445,7340575,"scripts_horeka/overfit_sample_tiny/sample.sh",915,0,"",shellscript,selection_command +4446,7350145,"TERMINAL",0,0,"2025-06-30 16:26:34.573511: E external/local_xla/xla/stream_executor/cuda/cuda_fft.cc:467] Unable to register cuFFT factory: Attempting to register factory for plugin cuFFT when one has already been registered\r\n",,terminal_output +4447,7350393,"TERMINAL",0,0,"WARNING: All log messages before absl::InitializeLog() is called are written to STDERR\r\nE0000 00:00:1751293594.816372 33376 cuda_dnn.cc:8579] Unable to register cuDNN factory: Attempting to register factory for plugin cuDNN when one has already been registered\r\n",,terminal_output +4448,7350460,"TERMINAL",0,0,"E0000 00:00:1751293594.848421 33376 cuda_blas.cc:1407] Unable to register cuBLAS factory: Attempting to register factory for plugin cuBLAS when one has already been registered\r\n",,terminal_output +4449,7350786,"TERMINAL",0,0,"W0000 00:00:1751293595.155208 33376 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\nW0000 00:00:1751293595.155229 33376 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\nW0000 00:00:1751293595.155231 33376 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\nW0000 00:00:1751293595.155233 33376 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\n",,terminal_output +4450,7355405,"scripts_horeka/overfit_sample_tiny/sample.sh",911,0,"",shellscript,selection_command +4451,7358824,"scripts_horeka/overfit_sample_tiny/sample.sh",1316,0," --dyna_num_heads=4 \\n",shellscript,content +4452,7358898,"scripts_horeka/overfit_sample_tiny/sample.sh",1341,0," --maskgit_steps=1\n",shellscript,content +4453,7358900,"scripts_horeka/overfit_sample_tiny/sample.sh",1363,23,"",shellscript,content +4454,7387066,"TERMINAL",0,0,"W0000 00:00:1751293631.492082 33376 gpu_device.cc:2341] Cannot dlopen some GPU libraries. Please make sure the missing libraries mentioned above are installed properly if you would like to use GPU. Follow the guide at https://www.tensorflow.org/install/gpu for how to download and setup the required libraries for your platform.\r\nSkipping registering GPU devices...\r\n",,terminal_output +4455,7390326,"TERMINAL",0,0,"2025-06-30 16:27:14.753454: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4456,7393845,"TERMINAL",0,0,"2025-06-30 16:27:18.254240: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4457,7401007,"TERMINAL",0,0,"2025-06-30 16:27:25.432388: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4458,7407670,"TERMINAL",0,0,"2025-06-30 16:27:31.950460: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4459,7413509,"TERMINAL",0,0,"2025-06-30 16:27:37.937696: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4460,7418539,"TERMINAL",0,0,"2025-06-30 16:27:42.964762: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4461,7421497,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/serialization/type_handlers.py:1251: UserWarning: Sharding info not provided when restoring. Populating sharding info from sharding file. Please note restoration time will be slightly increased due to reading from file. Note also that this option is unsafe when restoring on a different topology than the checkpoint was saved with.\r\n warnings.warn(\r\n",,terminal_output +4462,7424261,"TERMINAL",0,0,"Traceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py"", line 110, in \r\n dataloader = get_dataloader(args.data_dir, args.seq_len, args.batch_size)\r\nTypeError: get_dataloader() missing 3 required positional arguments: 'image_h', 'image_w', and 'image_c'\r\n",,terminal_output +4463,7426522,"TERMINAL",0,0,"scripts_horeka/overfit_sample_tiny/sample.sh: line 44: --maskgit_steps=1: command not found\r\n]0;tum_cte0515@hkn0634:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0634 jafar]$ ",,terminal_output +4464,7432939,"TERMINAL",0,0,"[?25lgi[?25h",,terminal_output +4465,7432993,"TERMINAL",0,0,"[?25li[?25h",,terminal_output +4466,7433061,"TERMINAL",0,0,"[?25lt[?25h",,terminal_output +4467,7433170,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +4468,7433415,"TERMINAL",0,0,"[?25lbr[?25h",,terminal_output +4469,7433670,"TERMINAL",0,0,"[?25la[?25h[?25ln[?25h",,terminal_output +4470,7433871,"TERMINAL",0,0,"[?25lc[?25h[?25lh[?25h",,terminal_output +4471,7434099,"TERMINAL",0,0,"\r\n[?2004l\r[?1h=\r",,terminal_output +4472,7434166,"TERMINAL",0,0," add-wandb-name-and-tags\r\n convert-to-jax-array-in-iter\r\n dont-let-tf-see-gpu\r\n main\r\n preprocess_video\r\n* revised-dataloader\r\n tmp\r\n",,terminal_output +4473,7434340,"TERMINAL",0,0,"\r[?1l>]0;tum_cte0515@hkn0634:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0634 jafar]$ ",,terminal_output +4474,7434406,"TERMINAL",0,0,"g",,terminal_output +4475,7434565,"TERMINAL",0,0,"[?25li[?25h",,terminal_output +4476,7434707,"TERMINAL",0,0,"[?25lt[?25h",,terminal_output +4477,7434766,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +4478,7434911,"TERMINAL",0,0,"[?25lc[?25h",,terminal_output +4479,7435041,"TERMINAL",0,0,"[?25lh[?25h",,terminal_output +4480,7435107,"TERMINAL",0,0,"[?25le[?25h",,terminal_output +4481,7435345,"TERMINAL",0,0,"[?25lc[?25h",,terminal_output +4482,7435731,"TERMINAL",0,0,"[?25lk[?25h",,terminal_output +4483,7435888,"TERMINAL",0,0,"[?25lo[?25h",,terminal_output +4484,7435999,"TERMINAL",0,0,"[?25lu[?25h",,terminal_output +4485,7436108,"TERMINAL",0,0,"[?25lt[?25h",,terminal_output +4486,7436175,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +4487,7436415,"TERMINAL",0,0,"[?25lt[?25h[?25lm[?25h",,terminal_output +4488,7436640,"TERMINAL",0,0,"[?25lp[?25h",,terminal_output +4489,7437001,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +4490,7437272,"TERMINAL",0,0,"utils/dataloader.py: needs merge\r\nerror: you need to resolve your current index first\r\n]0;tum_cte0515@hkn0634:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0634 jafar]$ ",,terminal_output +4491,7437737,"TERMINAL",0,0,"git checkout tmp",,terminal_output +4492,7437922,"TERMINAL",0,0,"branch",,terminal_output +4493,7438539,"TERMINAL",0,0,"checkout tmp",,terminal_output +4494,7438660,"TERMINAL",0,0,"",,terminal_output +4495,7438863,"TERMINAL",0,0,"",,terminal_output +4496,7439020,"TERMINAL",0,0,"",,terminal_output +4497,7439771,"TERMINAL",0,0,"i",,terminal_output +4498,7440689,"TERMINAL",0,0,"[?25lg[?25h[?25li[?25h",,terminal_output +4499,7440878,"TERMINAL",0,0,"[?25lt[?25h",,terminal_output +4500,7440946,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +4501,7441353,"TERMINAL",0,0,"[?25ls[?25h",,terminal_output +4502,7442766,"TERMINAL",0,0,"[?25ld[?25h",,terminal_output +4503,7442832,"TERMINAL",0,0,"[?25li[?25h",,terminal_output +4504,7443036,"TERMINAL",0,0,"[?25lf[?25h",,terminal_output +4505,7443204,"TERMINAL",0,0,"[?25lf[?25h[?25l [?25h",,terminal_output +4506,7443433,"TERMINAL",0,0,"[?25ld[?25h",,terminal_output +4507,7443666,"TERMINAL",0,0,"[?25la[?25h[?25lt[?25h",,terminal_output +4508,7443807,"TERMINAL",0,0,"a",,terminal_output +4509,7444822,"TERMINAL",0,0,"[?25ll[?25h",,terminal_output +4510,7444930,"TERMINAL",0,0,"",,terminal_output +4511,7445541,"TERMINAL",0,0,"[?25lp[?25h",,terminal_output +4512,7446185,"TERMINAL",0,0,"[?25lo[?25h",,terminal_output +4513,7446324,"TERMINAL",0,0,"",,terminal_output +4514,7447934,"TERMINAL",0,0,"[?25lu[?25h",,terminal_output +4515,7448043,"TERMINAL",0,0,"tils/",,terminal_output +4516,7448546,"TERMINAL",0,0,"[?25ld[?25h",,terminal_output +4517,7448753,"TERMINAL",0,0,"ataloader.py",,terminal_output +4518,7449191,"TERMINAL",0,0,"\r\n[?2004l\r[?1h=\rdiff --cc utils/dataloader.py\r\nindex a6b63f8,0a7dff2..0000000\r\n--- a/utils/dataloader.py\r\n+++ b/utils/dataloader.py\r\n@@@ -91,11 -70,9 +91,11 @@@ def get_dataloader\r\n image_h: int,\r\n image_w: int,\r\n image_c: int,\r\n- shuffle_buffer_size: int = 1000,\r\n+ shuffle_buffer_size: int = 10,\r\n num_parallel_calls: int = tf.data.AUTOTUNE,\r\n seed: int = 42,\r\n + cycle_length: int = 4,\r\n + block_length: int = 1,\r\n ):\r\n """"""\r\n Creates a tf.data.Dataset pipeline from TFRecord files.\r\n\r[?1l>]0;tum_cte0515@hkn0634:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0634 jafar]$ ",,terminal_output +4519,7452605,"TERMINAL",0,0,"q",,terminal_output +4520,7455098,"utils/dataloader.py",0,0,"",python,tab +4521,7457114,"utils/dataloader.py",0,0,"",python,tab +4522,7461054,"utils/dataloader.py",0,0,"",python,tab +4523,7464095,"TERMINAL",0,0,"git diff utils/dataloader.py",,terminal_output +4524,7464654,"TERMINAL",0,0,"checkout tmp",,terminal_output +4525,7465390,"TERMINAL",0,0,"branch",,terminal_output +4526,7465802,"TERMINAL",0,0,"checkout tmp",,terminal_output +4527,7465993,"TERMINAL",0,0,"\r\n[?2004l\rerror: Your local changes to the following files would be overwritten by checkout:\r\n\tutils/dataloader.py\r\nPlease commit your changes or stash them before you switch branches.\r\nAborting\r\n]0;tum_cte0515@hkn0634:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0634 jafar]$ ",,terminal_output +4528,7467353,"TERMINAL",0,0,"t",,terminal_output +4529,7467503,"TERMINAL",0,0,"[?25li[?25h[?25l5[?25h",,terminal_output +4530,7467813,"TERMINAL",0,0,"[?25l [?25h[?25lw[?25h",,terminal_output +4531,7467883,"TERMINAL",0,0,"[?25lt[?25h",,terminal_output +4532,7467955,"TERMINAL",0,0,"[?25lq[?25h",,terminal_output +4533,7468129,"TERMINAL",0,0,"[?25lw[?25h",,terminal_output +4534,7468197,"TERMINAL",0,0,"[?25lh[?25h",,terminal_output +4535,7468462,"TERMINAL",0,0,"\r\n[?2004l\rbash: ti5: command not found...\r\n",,terminal_output +4536,7470076,"TERMINAL",0,0,"^C\r\n]0;tum_cte0515@hkn0634:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0634 jafar]$ ",,terminal_output +4537,7470772,"TERMINAL",0,0,"g",,terminal_output +4538,7470853,"TERMINAL",0,0,"[?25li[?25h",,terminal_output +4539,7470963,"TERMINAL",0,0,"[?25lt[?25h",,terminal_output +4540,7471085,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +4541,7471207,"TERMINAL",0,0,"[?25ls[?25h",,terminal_output +4542,7471529,"TERMINAL",0,0,"[?25lta[?25h",,terminal_output +4543,7471594,"TERMINAL",0,0,"[?25ls[?25h",,terminal_output +4544,7471785,"TERMINAL",0,0,"[?25lh[?25h",,terminal_output +4545,7472212,"TERMINAL",0,0,"\r\n[?2004l\rSaved working directory and index state WIP on revised-dataloader: 1eac634 feat: revised dataloader for increased throughput\r\n",,terminal_output +4546,7472701,"TERMINAL",0,0,"]0;tum_cte0515@hkn0634:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0634 jafar]$ ",,terminal_output +4547,7472976,"TERMINAL",0,0,"git stash",,terminal_output +4548,7473201,"TERMINAL",0,0,"ti5 wtqwh",,terminal_output +4549,7473807,"TERMINAL",0,0,"git checkout tmp",,terminal_output +4550,7474827,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +4551,7474838,"TERMINAL",0,0,"Switched to branch 'tmp'\r\n]0;tum_cte0515@hkn0634:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0634 jafar]$ ",,terminal_output +4552,7475665,"TERMINAL",0,0,"[?25lgi[?25h",,terminal_output +4553,7475730,"TERMINAL",0,0,"[?25li[?25h",,terminal_output +4554,7475815,"TERMINAL",0,0,"[?25lt[?25h",,terminal_output +4555,7475923,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +4556,7477584,"TERMINAL",0,0,"git checkout tmp",,terminal_output +4557,7477706,"TERMINAL",0,0,"stash",,terminal_output +4558,7477834,"",0,0,"Switched from branch 'revised-dataloader' to 'tmp'",,git_branch_checkout +4559,7478334,"TERMINAL",0,0,"ti5 wtqwh",,terminal_output +4560,7478696,"TERMINAL",0,0,"git checkout tmp",,terminal_output +4561,7478985,"TERMINAL",0,0,"diff utils/dataloader.py",,terminal_output +4562,7479400,"TERMINAL",0,0,"checkout tmp",,terminal_output +4563,7479803,"TERMINAL",0,0,"branch",,terminal_output +4564,7480371,"TERMINAL",0,0,"sh scripts_horeka/overfit_sample_tiny/sample.sh ",,terminal_output +4565,7481523,"TERMINAL",0,0,"\r\n[?2004l\rSampling from checkpoint: /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/3301029/genie_1751067601_200000/\r\n",,terminal_output +4566,7485495,"TERMINAL",0,0,"2025-06-30 16:28:49.882616: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4567,7489182,"TERMINAL",0,0,"2025-06-30 16:28:53.362853: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4568,7495703,"TERMINAL",0,0,"2025-06-30 16:29:00.050724: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4569,7502252,"TERMINAL",0,0,"2025-06-30 16:29:06.670770: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4570,7508437,"TERMINAL",0,0,"2025-06-30 16:29:12.394135: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4571,7512840,"TERMINAL",0,0,"2025-06-30 16:29:17.165008: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4572,7515384,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/serialization/type_handlers.py:1251: UserWarning: Sharding info not provided when restoring. Populating sharding info from sharding file. Please note restoration time will be slightly increased due to reading from file. Note also that this option is unsafe when restoring on a different topology than the checkpoint was saved with.\r\n warnings.warn(\r\n",,terminal_output +4573,7527141,"TERMINAL",0,0,"Frame 1\r\n",,terminal_output +4574,7527691,"TERMINAL",0,0,"2025-06-30 16:29:32.116367: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4575,7529852,"TERMINAL",0,0,"2025-06-30 16:29:34.277792: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4576,7536601,"TERMINAL",0,0,"2025-06-30 16:29:41.027956: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4577,7539380,"TERMINAL",0,0,"2025-06-30 16:29:43.804372: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4578,7542857,"TERMINAL",0,0,"2025-06-30 16:29:47.181291: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4579,7544551,"TERMINAL",0,0,"Frame 2\r\n",,terminal_output +4580,7545095,"TERMINAL",0,0,"2025-06-30 16:29:49.520539: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4581,7547107,"TERMINAL",0,0,"2025-06-30 16:29:51.518686: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4582,7553751,"TERMINAL",0,0,"2025-06-30 16:29:58.176071: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4583,7555921,"TERMINAL",0,0,"2025-06-30 16:30:00.343350: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4584,7559937,"TERMINAL",0,0,"Frame 3\r\n",,terminal_output +4585,7560476,"TERMINAL",0,0,"2025-06-30 16:30:04.900136: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4586,7562506,"TERMINAL",0,0,"2025-06-30 16:30:06.934098: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4587,7566966,".gitignore",0,0,"*.pyc\n*.npy\n*.png\n*.gif\n\nwandb_key\ncheckpoints/\nwandb/\n__pycache__/\n*ckpt\nslurm*.out\ndata\ndata_tfrecord*\nlogs\nscripts",ignore,tab +4588,7567219,".gitignore",117,0,"",ignore,selection_mouse +4589,7567242,".gitignore",116,0,"",ignore,selection_command +4590,7569595,"TERMINAL",0,0,"2025-06-30 16:30:14.016477: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4591,7569952,".gitignore",117,0,"\n",ignore,content +4592,7571625,"TERMINAL",0,0,"2025-06-30 16:30:16.050255: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4593,7572570,".gitignore",117,1,"",ignore,content +4594,7575582,"TERMINAL",0,0,"Frame 4\r\n",,terminal_output +4595,7576162,"TERMINAL",0,0,"2025-06-30 16:30:20.586800: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4596,7578266,"TERMINAL",0,0,"2025-06-30 16:30:22.691558: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4597,7585197,"TERMINAL",0,0,"2025-06-30 16:30:29.620417: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4598,7587441,"TERMINAL",0,0,"2025-06-30 16:30:31.835594: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4599,7591708,"TERMINAL",0,0,"Frame 5\r\n",,terminal_output +4600,7592406,"TERMINAL",0,0,"2025-06-30 16:30:36.735194: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4601,7594340,"TERMINAL",0,0,"2025-06-30 16:30:38.758247: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4602,7601386,"TERMINAL",0,0,"2025-06-30 16:30:45.814276: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4603,7603637,"TERMINAL",0,0,"2025-06-30 16:30:48.016590: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4604,7608379,"TERMINAL",0,0,"Frame 6\r\n",,terminal_output +4605,7608960,"TERMINAL",0,0,"2025-06-30 16:30:53.384945: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4606,7611247,"TERMINAL",0,0,"2025-06-30 16:30:55.593163: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4607,7618090,"TERMINAL",0,0,"2025-06-30 16:31:02.509239: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4608,7620202,"TERMINAL",0,0,"2025-06-30 16:31:04.621294: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4609,7624658,"TERMINAL",0,0,"Frame 7\r\n",,terminal_output +4610,7625302,"TERMINAL",0,0,"2025-06-30 16:31:09.728315: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4611,7627355,"TERMINAL",0,0,"2025-06-30 16:31:11.781991: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4612,7634490,"TERMINAL",0,0,"2025-06-30 16:31:18.906275: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4613,7636601,"TERMINAL",0,0,"2025-06-30 16:31:21.028367: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4614,7641163,"TERMINAL",0,0,"Frame 8\r\n",,terminal_output +4615,7641806,"TERMINAL",0,0,"2025-06-30 16:31:26.156372: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4616,7644027,"TERMINAL",0,0,"2025-06-30 16:31:28.410576: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4617,7651770,"TERMINAL",0,0,"2025-06-30 16:31:36.194227: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4618,7654153,"TERMINAL",0,0,"2025-06-30 16:31:38.533886: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4619,7658733,"TERMINAL",0,0,"Frame 9\r\n",,terminal_output +4620,7659479,"TERMINAL",0,0,"2025-06-30 16:31:43.826962: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4621,7668833,"TERMINAL",0,0,"2025-06-30 16:31:53.219799: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4622,7671257,"TERMINAL",0,0,"2025-06-30 16:31:55.685143: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4623,7675555,"TERMINAL",0,0,"Frame 10\r\n",,terminal_output +4624,7676172,"TERMINAL",0,0,"2025-06-30 16:32:00.589492: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4625,7686128,"TERMINAL",0,0,"2025-06-30 16:32:10.536940: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4626,7688623,"TERMINAL",0,0,"2025-06-30 16:32:13.032078: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4627,7693372,"TERMINAL",0,0,"Frame 11\r\n",,terminal_output +4628,7694088,"TERMINAL",0,0,"2025-06-30 16:32:18.465411: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4629,7698738,"TERMINAL",0,0,"bash",,terminal_focus +4630,7703924,"TERMINAL",0,0,"2025-06-30 16:32:28.316854: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4631,7706581,"TERMINAL",0,0,"2025-06-30 16:32:30.836843: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4632,7710884,"TERMINAL",0,0,"Frame 12\r\n",,terminal_output +4633,7711580,"TERMINAL",0,0,"2025-06-30 16:32:36.006410: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4634,7721532,"TERMINAL",0,0,"2025-06-30 16:32:45.900819: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4635,7724298,"TERMINAL",0,0,"2025-06-30 16:32:48.617430: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4636,7729111,"TERMINAL",0,0,"Frame 13\r\n",,terminal_output +4637,7729829,"TERMINAL",0,0,"2025-06-30 16:32:54.172707: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4638,7739508,"TERMINAL",0,0,"2025-06-30 16:33:03.932403: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4639,7741918,"TERMINAL",0,0,"2025-06-30 16:33:06.342286: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4640,7746710,"TERMINAL",0,0,"Frame 14\r\n",,terminal_output +4641,7747543,"TERMINAL",0,0,"2025-06-30 16:33:11.858693: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4642,7757244,"TERMINAL",0,0,"2025-06-30 16:33:21.670555: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4643,7759731,"TERMINAL",0,0,"2025-06-30 16:33:24.155769: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4644,7764744,"TERMINAL",0,0,"Frame 15\r\n",,terminal_output +4645,7765465,"TERMINAL",0,0,"2025-06-30 16:33:29.814739: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4646,7775583,"TERMINAL",0,0,"2025-06-30 16:33:40.007864: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4647,7778366,"TERMINAL",0,0,"2025-06-30 16:33:42.789887: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4648,7786657,"TERMINAL",0,0,"SSIM: 0.6215704679489136\r\n",,terminal_output +4649,7791952,"TERMINAL",0,0,"]0;tum_cte0515@hkn0634:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0634 jafar]$ ",,terminal_output +4650,7897355,"sample.py",0,0,"from dataclasses import dataclass\nimport time\n\nimport dm_pix as pix\nimport einops\nimport jax\nimport jax.numpy as jnp\nimport numpy as np\nfrom orbax.checkpoint import PyTreeCheckpointer\nfrom PIL import Image, ImageDraw\nimport tyro\n\nfrom genie import Genie\nfrom utils.dataloader import get_dataloader\n\n\n@dataclass\nclass Args:\n # Experiment\n seed: int = 0\n seq_len: int = 16\n image_channels: int = 3\n image_resolution: int = 64\n data_dir: str = ""data/coinrun_episodes""\n checkpoint: str = """"\n # Sampling\n batch_size: int = 1\n maskgit_steps: int = 25\n temperature: float = 1.0\n sample_argmax: bool = True\n start_frame: int = 0\n # Tokenizer checkpoint\n tokenizer_dim: int = 512\n latent_patch_dim: int = 32\n num_patch_latents: int = 1024\n patch_size: int = 4\n tokenizer_num_blocks: int = 8\n tokenizer_num_heads: int = 8\n # LAM checkpoint\n lam_dim: int = 512\n latent_action_dim: int = 32\n num_latent_actions: int = 6\n lam_patch_size: int = 16\n lam_num_blocks: int = 8\n lam_num_heads: int = 8\n # Dynamics checkpoint\n dyna_dim: int = 512\n dyna_num_blocks: int = 12\n dyna_num_heads: int = 8\n\n\nargs = tyro.cli(Args)\nrng = jax.random.PRNGKey(args.seed)\n\n# --- Load Genie checkpoint ---\ngenie = Genie(\n # Tokenizer\n in_dim=args.image_channels,\n tokenizer_dim=args.tokenizer_dim,\n latent_patch_dim=args.latent_patch_dim,\n num_patch_latents=args.num_patch_latents,\n patch_size=args.patch_size,\n tokenizer_num_blocks=args.tokenizer_num_blocks,\n tokenizer_num_heads=args.tokenizer_num_heads,\n # LAM\n lam_dim=args.lam_dim,\n latent_action_dim=args.latent_action_dim,\n num_latent_actions=args.num_latent_actions,\n lam_patch_size=args.lam_patch_size,\n lam_num_blocks=args.lam_num_blocks,\n lam_num_heads=args.lam_num_heads,\n # Dynamics\n dyna_dim=args.dyna_dim,\n dyna_num_blocks=args.dyna_num_blocks,\n dyna_num_heads=args.dyna_num_heads,\n)\nrng, _rng = jax.random.split(rng)\nimage_shape = (args.image_resolution, args.image_resolution, args.image_channels)\ndummy_inputs = dict(\n videos=jnp.zeros((args.batch_size, args.seq_len, *image_shape), dtype=jnp.float32),\n mask_rng=_rng,\n)\nrng, _rng = jax.random.split(rng)\nparams = genie.init(_rng, dummy_inputs)\nckpt = PyTreeCheckpointer().restore(args.checkpoint)[""model""][""params""][""params""]\nparams[""params""].update(ckpt)\n\n\n# --- Define autoregressive sampling loop ---\ndef _autoreg_sample(rng, video_batch, action_batch):\n vid = video_batch[:, : args.start_frame + 1]\n for frame_idx in range(args.start_frame + 1, args.seq_len):\n # --- Sample next frame ---\n print(""Frame"", frame_idx)\n rng, _rng = jax.random.split(rng)\n batch = dict(videos=vid, latent_actions=action_batch[:, :frame_idx], rng=_rng)\n new_frame = genie.apply(\n params,\n batch,\n args.maskgit_steps,\n args.temperature,\n args.sample_argmax,\n method=Genie.sample,\n )\n vid = jnp.concatenate([vid, new_frame], axis=1)\n return vid\n\n\n# --- Get video + latent actions ---\ndataloader = get_dataloader(args.data_dir, args.seq_len, args.batch_size)\nvideo_batch = next(iter(dataloader))\n# Get latent actions from first video only\nfirst_video = video_batch[:1]\nbatch = dict(videos=first_video)\naction_batch = genie.apply(params, batch, False, method=Genie.vq_encode)\naction_batch = action_batch.reshape(1, args.seq_len - 1, 1)\n# Use actions from first video for all videos\naction_batch = jnp.repeat(action_batch, video_batch.shape[0], axis=0)\n\n# --- Sample + evaluate video ---\nvid = _autoreg_sample(rng, video_batch, action_batch)\ngt = video_batch[:, : vid.shape[1]].clip(0, 1).reshape(-1, *video_batch.shape[2:])\nrecon = vid.clip(0, 1).reshape(-1, *vid.shape[2:])\nssim = pix.ssim(gt[:, args.start_frame + 1 :], recon[:, args.start_frame + 1 :]).mean()\nprint(f""SSIM: {ssim}"")\n\n# --- Construct video ---\nfirst_true = (video_batch[0:1] * 255).astype(np.uint8)\nfirst_pred = (vid[0:1] * 255).astype(np.uint8)\nfirst_video_comparison = np.zeros((2, *vid.shape[1:5]), dtype=np.uint8)\nfirst_video_comparison[0] = first_true[:, : vid.shape[1]]\nfirst_video_comparison[1] = first_pred\n# For other videos, only show generated video\nother_preds = (vid[1:] * 255).astype(np.uint8)\nall_frames = np.concatenate([first_video_comparison, other_preds], axis=0)\nflat_vid = einops.rearrange(all_frames, ""n t h w c -> t h (n w) c"")\n\n# --- Save video ---\nimgs = [Image.fromarray(img) for img in flat_vid]\n# Write actions on each frame\nfor img, action in zip(imgs[1:], action_batch[0, :, 0]):\n d = ImageDraw.Draw(img)\n d.text((2, 2), f""{action}"", fill=255)\nimgs[0].save(\n f""generation_{time.time()}.gif"",\n save_all=True,\n append_images=imgs[1:],\n duration=250,\n loop=0,\n)\n",python,tab +4651,7897519,"sample.py",254,3059,"#from utils.dataloader import get_dataloader\n\n\n@dataclass\nclass Args:\n # Experiment\n seed: int = 0\n seq_len: int = 16\n image_channels: int = 3\n image_resolution: int = 64\n data_dir: str = ""data/coinrun_episodes""\n checkpoint: str = """"\n # Sampling\n batch_size: int = 1\n maskgit_steps: int = 25\n temperature: float = 1.0\n sample_argmax: bool = True\n start_frame: int = 0\n # Tokenizer checkpoint\n tokenizer_dim: int = 512\n latent_patch_dim: int = 32\n num_patch_latents: int = 1024\n patch_size: int = 4\n tokenizer_num_blocks: int = 8\n tokenizer_num_heads: int = 8\n # LAM checkpoint\n lam_dim: int = 512\n latent_action_dim: int = 32\n num_latent_actions: int = 6\n lam_patch_size: int = 16\n lam_num_blocks: int = 8\n lam_num_heads: int = 8\n # Dynamics checkpoint\n dyna_dim: int = 512\n dyna_num_blocks: int = 12\n dyna_num_heads: int = 8\n\n\nargs = tyro.cli(Args)\nrng = jax.random.PRNGKey(args.seed)\n\n# --- Load Genie checkpoint ---\ngenie = Genie(\n # Tokenizer\n in_dim=args.image_channels,\n tokenizer_dim=args.tokenizer_dim,\n latent_patch_dim=args.latent_patch_dim,\n num_patch_latents=args.num_patch_latents,\n patch_size=args.patch_size,\n tokenizer_num_blocks=args.tokenizer_num_blocks,\n tokenizer_num_heads=args.tokenizer_num_heads,\n # LAM\n lam_dim=args.lam_dim,\n latent_action_dim=args.latent_action_dim,\n num_latent_actions=args.num_latent_actions,\n lam_patch_size=args.lam_patch_size,\n lam_num_blocks=args.lam_num_blocks,\n lam_num_heads=args.lam_num_heads,\n # Dynamics\n dyna_dim=args.dyna_dim,\n dyna_num_blocks=args.dyna_num_blocks,\n dyna_num_heads=args.dyna_num_heads,\n)\nrng, _rng = jax.random.split(rng)\nimage_shape = (args.image_resolution, args.image_resolution, args.image_channels)\ndummy_inputs = dict(\n videos=jnp.zeros((args.batch_size, args.seq_len, *image_shape), dtype=jnp.float32),\n mask_rng=_rng,\n)\nrng, _rng = jax.random.split(rng)\nparams = genie.init(_rng, dummy_inputs)\nckpt = PyTreeCheckpointer().restore(args.checkpoint)[""model""][""params""][""params""]\nparams[""params""].update(ckpt)\n\n\n# --- Define autoregressive sampling loop ---\ndef _autoreg_sample(rng, video_batch, action_batch):\n vid = video_batch[:, : args.start_frame + 1]\n for frame_idx in range(args.start_frame + 1, args.seq_len):\n # --- Sample next frame ---\n print(""Frame"", frame_idx)\n rng, _rng = jax.random.split(rng)\n batch = dict(videos=vid, latent_actions=action_batch[:, :frame_idx], rng=_rng)\n new_frame = genie.apply(\n params,\n batch,\n args.maskgit_steps,\n args.temperature,\n args.sample_argmax,\n method=Genie.sample,\n )\n vid = jnp.concatenate([vid, new_frame], axis=1)\n return vid\n\n\n# --- Get video + latent actions ---\n# dataloader = get_dataloader(args.data_dir, args.seq_len, args.batch_size)\n# video_batch = next(iter(dataloader))\nvideo_batch = np.load(""overfit_dir/single_sample_corner.npy"")\n# Get latent actions from first video only\nfirst_video = video_batch[:1, :args.seq_len]\n",python,content +4652,7904280,"sample.py",558,0,"",python,selection_mouse +4653,7904786,"sample.py",585,0,"",python,selection_mouse +4654,7905775,"sample.py",604,0,"",python,selection_mouse +4655,7905786,"sample.py",603,0,"",python,selection_command +4656,7906052,"sample.py",603,1,"0",python,selection_mouse +4657,7906053,"sample.py",604,0,"",python,selection_command +4658,7906074,"sample.py",604,27,"\n sample_argmax: bool = ",python,selection_mouse +4659,7906087,"sample.py",604,22,"\n sample_argmax: bo",python,selection_mouse +4660,7906101,"sample.py",604,19,"\n sample_argmax:",python,selection_mouse +4661,7906157,"sample.py",604,16,"\n sample_argm",python,selection_mouse +4662,7906157,"sample.py",604,13,"\n sample_a",python,selection_mouse +4663,7906157,"sample.py",604,12,"\n sample_",python,selection_mouse +4664,7906167,"sample.py",604,11,"\n sample",python,selection_mouse +4665,7906184,"sample.py",604,10,"\n sampl",python,selection_mouse +4666,7906239,"sample.py",604,9,"\n samp",python,selection_mouse +4667,7906760,"sample.py",581,0,"",python,selection_mouse +4668,7908091,"sample.py",554,0,"",python,selection_mouse +4669,7908237,"sample.py",552,13,"maskgit_steps",python,selection_mouse +4670,7908507,"sample.py",552,15,"maskgit_steps: ",python,selection_mouse +4671,7908523,"sample.py",552,18,"maskgit_steps: int",python,selection_mouse +4672,7908570,"sample.py",552,20,"maskgit_steps: int =",python,selection_mouse +4673,7908570,"sample.py",547,18,"\n maskgit_steps",python,selection_mouse +4674,7908818,"sample.py",552,23,"maskgit_steps: int = 25",python,selection_mouse +4675,7928777,".gitignore",0,0,"",ignore,tab +4676,7930466,"TERMINAL",0,0,"srun",,terminal_focus +4677,7936800,"scripts_horeka/overfit_sample_tiny/sample.sh",0,0,"#!/usr/bin/env bash\n\n# Unload modules that may interfere\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\n\n# Activate virtual environment\nsource .venv/bin/activate\n\n# Set workspace and checkpoint directory (update slurm_job_id as needed)\nws_dir='/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared'\n# Replace the following with the actual job id/checkpoint you want to sample from\nslurm_job_id=3301029\n\n# job_name=train_dynamics_minecraft_overfit_sample_tiny\nCHECKPOINT_DIR=$ws_dir/checkpoints/${slurm_job_id}\n\n# Example: If you want to use a specific checkpoint, set it here\n# CHECKPOINT_PATH=$ws_dir/checkpoints/3299272/dynamics-tiny-overfit-big-lr-3299272_50000/\n# Or use the latest in the directory\n# CHECKPOINT_PATH=$(ls -d $CHECKPOINT_DIR/*/ | sort | tail -n 1)\nCHECKPOINT_PATH=$CHECKPOINT_DIR/genie_1751067601_200000/\n\necho ""Sampling from checkpoint: $CHECKPOINT_PATH""\n\npython sample.py \\n --checkpoint ""$CHECKPOINT_PATH"" \\n --tokenizer_dim=384 \\n --latent_patch_dim=32 \\n --num_patch_latents=1024 \\n --patch_size=4 \\n --tokenizer_num_blocks=8 \\n --tokenizer_num_heads=8 \\n --lam_dim=384 \\n --latent_action_dim=32 \\n --num_latent_actions=6 \\n --lam_patch_size=16 \\n --lam_num_blocks=8 \\n --lam_num_heads=8 \\n --dyna_dim=128 \\n --dyna_num_blocks=2 \\n --dyna_num_heads=4 \\n --maskgit_steps=1\n",shellscript,tab +4678,7937976,"scripts_horeka/overfit_sample_tiny/sample.sh",1363,0,"",shellscript,selection_mouse +4679,7938527,"scripts_horeka/overfit_sample_tiny/sample.sh",1340,0,"",shellscript,selection_mouse +4680,7938529,"scripts_horeka/overfit_sample_tiny/sample.sh",1339,0,"",shellscript,selection_command +4681,7939504,"scripts_horeka/overfit_sample_tiny/sample.sh",915,0,"",shellscript,selection_command +4682,7945400,"scripts_horeka/overfit_sample_tiny/sample.sh",911,0,"",shellscript,selection_command +4683,7954879,"scripts_horeka/overfit_sample_tiny/sample.sh",1341,0," --maskgit_steps=1 \\n",shellscript,content +4684,7954939,"scripts_horeka/overfit_sample_tiny/sample.sh",1365,0," --temperature=0\n",shellscript,content +4685,7954940,"scripts_horeka/overfit_sample_tiny/sample.sh",1385,22,"",shellscript,content +4686,7962446,"TERMINAL",0,0,"sh scripts_horeka/overfit_sample_tiny/sample.sh ",,terminal_output +4687,7963029,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +4688,7963146,"TERMINAL",0,0,"Sampling from checkpoint: /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/3301029/genie_1751067601_200000/\r\n",,terminal_output +4689,7967116,"TERMINAL",0,0,"2025-06-30 16:36:51.543428: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4690,7970673,"TERMINAL",0,0,"2025-06-30 16:36:55.052598: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4691,7977335,"TERMINAL",0,0,"2025-06-30 16:37:01.760909: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4692,7980692,"TERMINAL",0,0,"bash",,terminal_focus +4693,7984314,"TERMINAL",0,0,"2025-06-30 16:37:08.741162: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4694,7985368,"TERMINAL",0,0,"mv *.gif gifs",,terminal_command +4695,7985381,"TERMINAL",0,0,"]633;E;2025-06-30 16:37:09 mv *.gif gifs;ead59344-49db-4336-9336-47fae706e637]633;C]0;tum_cte0515@hkn1991:/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar]633;D;0",,terminal_output +4696,7985491,"TERMINAL",0,0,"srun",,terminal_focus +4697,7990295,"TERMINAL",0,0,"2025-06-30 16:37:14.722057: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4698,7995164,"TERMINAL",0,0,"2025-06-30 16:37:19.574859: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4699,7997738,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/serialization/type_handlers.py:1251: UserWarning: Sharding info not provided when restoring. Populating sharding info from sharding file. Please note restoration time will be slightly increased due to reading from file. Note also that this option is unsafe when restoring on a different topology than the checkpoint was saved with.\r\n warnings.warn(\r\n",,terminal_output +4700,8009107,"TERMINAL",0,0,"Frame 1\r\n",,terminal_output +4701,8009687,"TERMINAL",0,0,"2025-06-30 16:37:34.086812: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4702,8011837,"TERMINAL",0,0,"2025-06-30 16:37:36.207140: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4703,8018724,"TERMINAL",0,0,"2025-06-30 16:37:43.149142: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4704,8021401,"TERMINAL",0,0,"2025-06-30 16:37:45.827065: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4705,8024949,"TERMINAL",0,0,"2025-06-30 16:37:49.326121: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4706,8026591,"TERMINAL",0,0,"Frame 2\r\n",,terminal_output +4707,8027112,"TERMINAL",0,0,"2025-06-30 16:37:51.531646: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4708,8029286,"TERMINAL",0,0,"2025-06-30 16:37:53.708595: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4709,8036315,"TERMINAL",0,0,"2025-06-30 16:38:00.742175: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4710,8038501,"TERMINAL",0,0,"2025-06-30 16:38:02.924407: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4711,8042665,"TERMINAL",0,0,"Frame 3\r\n",,terminal_output +4712,8043140,"TERMINAL",0,0,"2025-06-30 16:38:07.553177: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4713,8045178,"TERMINAL",0,0,"2025-06-30 16:38:09.605077: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4714,8052043,"TERMINAL",0,0,"2025-06-30 16:38:16.465638: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4715,8054067,"TERMINAL",0,0,"2025-06-30 16:38:18.493826: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4716,8058442,"TERMINAL",0,0,"Frame 4\r\n",,terminal_output +4717,8059021,"TERMINAL",0,0,"2025-06-30 16:38:23.450292: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4718,8061354,"TERMINAL",0,0,"2025-06-30 16:38:25.780916: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4719,8068214,"TERMINAL",0,0,"2025-06-30 16:38:32.637953: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4720,8070387,"TERMINAL",0,0,"2025-06-30 16:38:34.806984: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4721,8074851,"TERMINAL",0,0,"Frame 5\r\n",,terminal_output +4722,8075490,"TERMINAL",0,0,"2025-06-30 16:38:39.874461: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4723,8077699,"TERMINAL",0,0,"2025-06-30 16:38:42.125619: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4724,8084853,"TERMINAL",0,0,"2025-06-30 16:38:49.227333: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4725,8087206,"TERMINAL",0,0,"2025-06-30 16:38:51.551078: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4726,8091668,"TERMINAL",0,0,"Frame 6\r\n",,terminal_output +4727,8092327,"TERMINAL",0,0,"2025-06-30 16:38:56.710967: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4728,8094481,"TERMINAL",0,0,"2025-06-30 16:38:58.861739: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4729,8101658,"TERMINAL",0,0,"2025-06-30 16:39:06.026964: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4730,8103773,"TERMINAL",0,0,"2025-06-30 16:39:08.202317: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4731,8108040,"TERMINAL",0,0,"Frame 7\r\n",,terminal_output +4732,8108678,"TERMINAL",0,0,"2025-06-30 16:39:13.104991: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4733,8110864,"TERMINAL",0,0,"2025-06-30 16:39:15.272993: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4734,8117921,"TERMINAL",0,0,"2025-06-30 16:39:22.345928: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4735,8120349,"TERMINAL",0,0,"2025-06-30 16:39:24.774661: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4736,8124786,"TERMINAL",0,0,"Frame 8\r\n",,terminal_output +4737,8125366,"TERMINAL",0,0,"2025-06-30 16:39:29.794598: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4738,8127464,"TERMINAL",0,0,"2025-06-30 16:39:31.878404: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4739,8134884,"TERMINAL",0,0,"2025-06-30 16:39:39.308503: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4740,8137177,"TERMINAL",0,0,"2025-06-30 16:39:41.578383: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4741,8141585,"TERMINAL",0,0,"Frame 9\r\n",,terminal_output +4742,8142214,"TERMINAL",0,0,"2025-06-30 16:39:46.640180: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4743,8151717,"TERMINAL",0,0,"2025-06-30 16:39:56.044064: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4744,8154026,"TERMINAL",0,0,"2025-06-30 16:39:58.437619: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4745,8158479,"TERMINAL",0,0,"Frame 10\r\n",,terminal_output +4746,8159168,"TERMINAL",0,0,"2025-06-30 16:40:03.591251: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4747,8169165,"TERMINAL",0,0,"2025-06-30 16:40:13.591207: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4748,8171799,"TERMINAL",0,0,"2025-06-30 16:40:16.067421: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4749,8176197,"TERMINAL",0,0,"Frame 11\r\n",,terminal_output +4750,8176916,"TERMINAL",0,0,"2025-06-30 16:40:21.344692: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4751,8186949,"TERMINAL",0,0,"2025-06-30 16:40:31.278448: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4752,8189295,"TERMINAL",0,0,"2025-06-30 16:40:33.715084: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4753,8194045,"TERMINAL",0,0,"Frame 12\r\n",,terminal_output +4754,8194829,"TERMINAL",0,0,"2025-06-30 16:40:39.181766: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4755,8204658,"TERMINAL",0,0,"2025-06-30 16:40:49.012736: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4756,8207423,"TERMINAL",0,0,"2025-06-30 16:40:51.801581: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4757,8212200,"TERMINAL",0,0,"Frame 13\r\n",,terminal_output +4758,8212955,"TERMINAL",0,0,"2025-06-30 16:40:57.348174: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4759,8222783,"TERMINAL",0,0,"2025-06-30 16:41:07.128515: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4760,8225147,"TERMINAL",0,0,"2025-06-30 16:41:09.572102: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4761,8230125,"TERMINAL",0,0,"Frame 14\r\n",,terminal_output +4762,8230876,"TERMINAL",0,0,"2025-06-30 16:41:15.275447: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4763,8240626,"TERMINAL",0,0,"2025-06-30 16:41:25.050676: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4764,8243166,"TERMINAL",0,0,"2025-06-30 16:41:27.534837: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4765,8247875,"TERMINAL",0,0,"Frame 15\r\n",,terminal_output +4766,8248502,"TERMINAL",0,0,"2025-06-30 16:41:32.925185: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4767,8258008,"TERMINAL",0,0,"2025-06-30 16:41:42.407776: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4768,8260876,"TERMINAL",0,0,"2025-06-30 16:41:45.205795: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4769,8268356,"TERMINAL",0,0,"SSIM: 0.2596510052680969\r\n",,terminal_output +4770,8273123,"TERMINAL",0,0,"]0;tum_cte0515@hkn0634:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0634 jafar]$ ",,terminal_output +4771,8423614,"scripts_horeka/overfit_sample_tiny/sample.sh",0,0,"",shellscript,tab +4772,8430575,"scripts_horeka/overfit_sample_tiny/sample.sh",0,0,"",shellscript,tab +4773,8463610,"sample.py",0,0,"",python,tab +4774,8465825,"genie.py",0,0,"from typing import Dict, Any\n\nimport optax\nimport jax\nimport jax.numpy as jnp\nimport flax.linen as nn\nfrom jax import NamedSharding\nfrom flax.training.train_state import TrainState\nfrom flax.training import orbax_utils\nfrom orbax.checkpoint import PyTreeCheckpointer\n\nfrom models.dynamics import DynamicsMaskGIT\nfrom models.lam import LatentActionModel\nfrom models.tokenizer import TokenizerVQVAE\n\n\nclass Genie(nn.Module):\n """"""Genie model""""""\n\n # --- Tokenizer ---\n in_dim: int\n tokenizer_dim: int\n latent_patch_dim: int\n num_patch_latents: int\n patch_size: int\n tokenizer_num_blocks: int\n tokenizer_num_heads: int\n # --- LAM ---\n lam_dim: int\n latent_action_dim: int\n num_latent_actions: int\n lam_patch_size: int\n lam_num_blocks: int\n lam_num_heads: int\n # --- Dynamics ---\n dyna_dim: int\n dyna_num_blocks: int\n dyna_num_heads: int\n dropout: float = 0.0\n mask_limit: float = 0.0\n\n def setup(self):\n self.tokenizer = TokenizerVQVAE(\n in_dim=self.in_dim,\n model_dim=self.tokenizer_dim,\n latent_dim=self.latent_patch_dim,\n num_latents=self.num_patch_latents,\n patch_size=self.patch_size,\n num_blocks=self.tokenizer_num_blocks,\n num_heads=self.tokenizer_num_heads,\n dropout=0.0,\n codebook_dropout=0.0,\n )\n self.lam = LatentActionModel(\n in_dim=self.in_dim,\n model_dim=self.lam_dim,\n latent_dim=self.latent_patch_dim,\n num_latents=self.num_latent_actions,\n patch_size=self.lam_patch_size,\n num_blocks=self.lam_num_blocks,\n num_heads=self.lam_num_heads,\n dropout=0.0,\n codebook_dropout=0.0,\n )\n self.dynamics = DynamicsMaskGIT(\n model_dim=self.dyna_dim,\n num_latents=self.num_patch_latents,\n num_blocks=self.dyna_num_blocks,\n num_heads=self.dyna_num_heads,\n dropout=self.dropout,\n mask_limit=self.mask_limit,\n )\n\n def __call__(self, batch: Dict[str, Any], training: bool = True) -> Dict[str, Any]:\n tokenizer_outputs = self.tokenizer.vq_encode(batch[""videos""], training=False)\n lam_outputs = self.lam.vq_encode(batch[""videos""], training=False)\n outputs = dict(\n video_tokens=jax.lax.stop_gradient(tokenizer_outputs[""indices""]),\n latent_actions=jax.lax.stop_gradient(lam_outputs[""z_q""]),\n )\n outputs[""mask_rng""] = batch[""mask_rng""]\n dyna_outputs = self.dynamics(outputs, training)\n outputs.update(dyna_outputs)\n mle_indices = jnp.argmax(outputs[""token_logits""], axis=-1)\n outputs[""recon""] = self.tokenizer.decode(\n mle_indices, batch[""videos""].shape[2:4]\n )\n return outputs\n\n @nn.compact\n def sample(\n self,\n batch: Dict[str, Any],\n steps: int = 25,\n temperature: int = 1,\n sample_argmax: bool = False,\n ) -> Any:\n # --- Encode videos and actions ---\n tokenizer_out = self.tokenizer.vq_encode(batch[""videos""], training=False)\n token_idxs = tokenizer_out[""indices""]\n new_frame_idxs = jnp.zeros_like(token_idxs)[:, 0]\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n\n # --- Initialize MaskGIT ---\n init_mask = jnp.ones_like(token_idxs, dtype=bool)[:, 0]\n init_carry = (\n batch[""rng""],\n new_frame_idxs,\n init_mask,\n token_idxs,\n action_tokens,\n )\n MaskGITLoop = nn.scan(\n MaskGITStep,\n variable_broadcast=""params"",\n split_rngs={""params"": False},\n in_axes=0,\n out_axes=0,\n length=steps,\n )\n\n # --- Run MaskGIT loop ---\n loop_fn = MaskGITLoop(\n dynamics=self.dynamics,\n tokenizer=self.tokenizer,\n temperature=temperature,\n sample_argmax=sample_argmax,\n steps=steps,\n )\n final_carry, _ = loop_fn(init_carry, jnp.arange(steps))\n new_frame_idxs = final_carry[1]\n new_frame_pixels = self.tokenizer.decode(\n jnp.expand_dims(new_frame_idxs, 1),\n video_hw=batch[""videos""].shape[2:4],\n )\n return new_frame_pixels\n\n def vq_encode(self, batch, training) -> Dict[str, Any]:\n # --- Preprocess videos ---\n lam_output = self.lam.vq_encode(batch[""videos""], training=training)\n return lam_output[""indices""]\n\n\nclass MaskGITStep(nn.Module):\n dynamics: nn.Module\n tokenizer: nn.Module\n temperature: float\n sample_argmax: bool\n steps: int\n\n @nn.compact\n def __call__(self, carry, x):\n rng, final_token_idxs, mask, token_idxs, action_tokens = carry\n step = x\n B, T, N = token_idxs.shape[:3]\n\n # --- Construct + encode video ---\n vid_token_idxs = jnp.concatenate(\n (token_idxs, jnp.expand_dims(final_token_idxs, 1)), axis=1\n )\n vid_embed = self.dynamics.patch_embed(vid_token_idxs)\n curr_masked_frame = jnp.where(\n jnp.expand_dims(mask, -1),\n self.dynamics.mask_token[0],\n vid_embed[:, -1],\n )\n vid_embed = vid_embed.at[:, -1].set(curr_masked_frame)\n\n # --- Predict transition ---\n act_embed = self.dynamics.action_up(action_tokens)\n vid_embed += jnp.pad(act_embed, ((0, 0), (1, 0), (0, 0), (0, 0)))\n unmasked_ratio = jnp.cos(jnp.pi * (step + 1) / (self.steps * 2))\n step_temp = self.temperature * (1.0 - unmasked_ratio)\n final_logits = self.dynamics.dynamics(vid_embed)[:, -1] / step_temp\n\n # --- Sample new tokens for final frame ---\n if self.sample_argmax:\n sampled_token_idxs = jnp.argmax(final_logits, axis=-1)\n else:\n rng, _rng = jax.random.split(rng)\n sampled_token_idxs = jnp.where(\n step == self.steps - 1,\n jnp.argmax(final_logits, axis=-1),\n jax.random.categorical(_rng, final_logits),\n )\n gather_fn = jax.vmap(jax.vmap(lambda x, y: x[y]))\n final_token_probs = gather_fn(jax.nn.softmax(final_logits), sampled_token_idxs)\n final_token_probs += ~mask\n # Update masked tokens only\n new_token_idxs = jnp.where(mask, sampled_token_idxs, final_token_idxs)\n\n # --- Update mask ---\n num_unmasked_tokens = jnp.round(N * (1.0 - unmasked_ratio)).astype(int)\n idx_mask = jnp.arange(final_token_probs.shape[-1]) > num_unmasked_tokens\n sorted_idxs = jnp.argsort(final_token_probs, axis=-1, descending=True)\n mask_update_fn = jax.vmap(lambda msk, ids: msk.at[ids].set(idx_mask))\n new_mask = mask_update_fn(mask, sorted_idxs)\n\n new_carry = (rng, new_token_idxs, new_mask, token_idxs, action_tokens)\n return new_carry, None\n\n\ndef restore_genie_components(\n train_state: TrainState,\n sharding: NamedSharding,\n inputs: Dict[str, jax.Array],\n rng: jax.Array,\n args,\n):\n """"""Restore pre-trained Genie components""""""\n rng, _rng = jax.random.split(rng)\n\n dummy_tokenizer = TokenizerVQVAE(\n in_dim=args.image_channels,\n model_dim=args.tokenizer_dim,\n latent_dim=args.latent_patch_dim,\n num_latents=args.num_patch_latents,\n patch_size=args.patch_size,\n num_blocks=args.tokenizer_num_blocks,\n num_heads=args.tokenizer_num_heads,\n dropout=args.dropout,\n codebook_dropout=args.dropout,\n )\n dummy_lam = LatentActionModel(\n in_dim=args.image_channels,\n model_dim=args.lam_dim,\n latent_dim=args.latent_patch_dim,\n num_latents=args.num_latent_actions,\n patch_size=args.lam_patch_size,\n num_blocks=args.lam_num_blocks,\n num_heads=args.lam_num_heads,\n dropout=args.dropout,\n codebook_dropout=args.dropout,\n )\n tokenizer_init_params = dummy_tokenizer.init(_rng, inputs)\n lam_init_params = dummy_lam.init(_rng, inputs)\n\n # dummy values since we only use tx to initialize the dummy train states\n dummy_tx = optax.adamw(\n learning_rate=optax.constant_schedule(args.max_lr),\n b1=0.9,\n b2=0.9,\n weight_decay=1e-4,\n )\n\n dummy_tokenizer_train_state = TrainState.create(\n apply_fn=dummy_tokenizer.apply, params=tokenizer_init_params, tx=dummy_tx\n )\n dummy_lam_train_state = TrainState.create(\n apply_fn=dummy_lam.apply, params=lam_init_params, tx=dummy_tx\n )\n\n def create_abstract_sharded_pytree(pytree_template, sharding_spec):\n """"""Replaces arrays in a pytree with ShapeDtypeStructs having the given sharding.""""""\n\n def map_fn(leaf_template):\n if hasattr(leaf_template, ""shape"") and hasattr(leaf_template, ""dtype""):\n return jax.ShapeDtypeStruct(\n leaf_template.shape, leaf_template.dtype, sharding=sharding_spec\n )\n return leaf_template\n\n return jax.tree_util.tree_map(map_fn, pytree_template)\n\n abstract_sharded_tokenizer_state = create_abstract_sharded_pytree(\n dummy_tokenizer_train_state, sharding\n )\n abstract_sharded_lam_state = create_abstract_sharded_pytree(\n dummy_lam_train_state, sharding\n )\n\n tokenizer_restore_target = {""model"": abstract_sharded_tokenizer_state}\n lam_restore_target = {""model"": abstract_sharded_lam_state}\n\n tokenizer_restore_args = orbax_utils.restore_args_from_target(\n tokenizer_restore_target\n )\n lam_restore_args = orbax_utils.restore_args_from_target(lam_restore_target)\n\n restored_tokenizer_params = (\n PyTreeCheckpointer()\n .restore(\n args.tokenizer_checkpoint,\n item=tokenizer_restore_target,\n restore_args=tokenizer_restore_args,\n )[""model""]\n .params[""params""]\n )\n restored_lam_params = (\n PyTreeCheckpointer()\n .restore(\n args.lam_checkpoint, item=lam_restore_target, restore_args=lam_restore_args\n )[""model""]\n .params[""params""]\n )\n # Genie does not initialize all LAM modules, thus we omit those extra modules during restoration\n # (f.srambical) FIXME: Currently, this is a small HBM memory crunch since the LAM's decoder is loaded into HBM and immediately dicarded.\n # A workaround would be to restore to host memory first, and only move the weights to HBM after pruning the decoder\n restored_lam_params = {\n k: v\n for k, v in restored_lam_params.items()\n if k in train_state.params[""params""][""lam""]\n }\n\n train_state.params[""params""][""tokenizer""].update(restored_tokenizer_params)\n train_state.params[""params""][""lam""].update(restored_lam_params)\n\n return train_state\n",python,tab +4775,8469963,"genie.py",4650,0,"",python,selection_mouse +4776,8470619,"genie.py",4698,0,"",python,selection_mouse +4777,8471234,"genie.py",4672,0,"",python,selection_mouse +4778,8471381,"genie.py",4667,11,"temperature",python,selection_mouse +4779,8471989,"genie.py",4694,0,"",python,selection_mouse +4780,8484373,"genie.py",4717,0,"",python,selection_mouse +4781,8484543,"genie.py",4714,5,"steps",python,selection_mouse +4782,8490129,"genie.py",5538,0,"",python,selection_mouse +4783,8490262,"genie.py",5533,14,"unmasked_ratio",python,selection_mouse +4784,8492155,"genie.py",5678,0,"",python,selection_mouse +4785,8492302,"genie.py",5668,12,"final_logits",python,selection_mouse +4786,8498485,"genie.py",5539,0,"",python,selection_mouse +4787,8498627,"genie.py",5533,14,"unmasked_ratio",python,selection_mouse +4788,8500386,"genie.py",5463,0,"",python,selection_mouse +4789,8500520,"genie.py",5459,9,"vid_embed",python,selection_mouse +4790,8506390,"genie.py",6497,0,"",python,selection_mouse +4791,8506523,"genie.py",6491,19,"num_unmasked_tokens",python,selection_mouse +4792,8510648,"genie.py",4808,0,"",python,selection_mouse +4793,8510806,"genie.py",4807,4,"mask",python,selection_mouse +4794,8513824,"genie.py",5146,0,"",python,selection_mouse +4795,8513964,"genie.py",5140,17,"curr_masked_frame",python,selection_mouse +4796,8531005,"genie.py",5191,0,"",python,selection_mouse +4797,8531162,"genie.py",5187,11,"expand_dims",python,selection_mouse +4798,8536974,"genie.py",5277,0,"",python,selection_mouse +4799,8537602,"genie.py",5272,0,"",python,selection_mouse +4800,8537770,"genie.py",5263,9,"vid_embed",python,selection_mouse +4801,8538436,"genie.py",5269,0,"",python,selection_mouse +4802,8538437,"genie.py",5263,9,"vid_embed",python,selection_mouse +4803,8539029,"genie.py",5378,0,"",python,selection_mouse +4804,8548116,"genie.py",6181,0,"",python,selection_mouse +4805,8555397,"genie.py",6105,0,"",python,selection_mouse +4806,8556155,"genie.py",6155,0,"",python,selection_mouse +4807,8556170,"genie.py",6154,0,"",python,selection_command +4808,8559595,"genie.py",5692,0,"",python,selection_mouse +4809,8559732,"genie.py",5688,8,"dynamics",python,selection_mouse +4810,8559944,"genie.py",5688,17,"dynamics.dynamics",python,selection_mouse +4811,8560010,"genie.py",5688,27,"dynamics.dynamics(vid_embed",python,selection_mouse +4812,8560118,"genie.py",5688,28,"dynamics.dynamics(vid_embed)",python,selection_mouse +4813,8560139,"genie.py",5688,29,"dynamics.dynamics(vid_embed)[",python,selection_mouse +4814,8560196,"genie.py",5688,31,"dynamics.dynamics(vid_embed)[:,",python,selection_mouse +4815,8560197,"genie.py",5688,32,"dynamics.dynamics(vid_embed)[:, ",python,selection_mouse +4816,8560198,"genie.py",5688,34,"dynamics.dynamics(vid_embed)[:, -1",python,selection_mouse +4817,8560221,"genie.py",5688,48,"dynamics.dynamics(vid_embed)[:, -1] / step_temp\n",python,selection_mouse +4818,8560286,"genie.py",5688,47,"dynamics.dynamics(vid_embed)[:, -1] / step_temp",python,selection_mouse +4819,8560631,"genie.py",5735,0,"",python,selection_mouse +4820,8560632,"genie.py",5734,0,"",python,selection_command +4821,8561347,"genie.py",5719,0,"",python,selection_mouse +4822,8562202,"genie.py",5673,0,"",python,selection_mouse +4823,8562342,"genie.py",5668,12,"final_logits",python,selection_mouse +4824,8565856,"genie.py",5807,0,"",python,selection_mouse +4825,8565927,"genie.py",5805,13,"sample_argmax",python,selection_mouse +4826,8567267,"genie.py",5900,0,"",python,selection_mouse +4827,8567268,"genie.py",5899,0,"",python,selection_command +4828,8567909,"genie.py",5872,0,"",python,selection_mouse +4829,8568064,"genie.py",5864,12,"final_logits",python,selection_mouse +4830,8571895,"genie.py",6246,0,"",python,selection_mouse +4831,8572957,"genie.py",6177,0,"",python,selection_mouse +4832,8574417,"genie.py",6155,0,"",python,selection_mouse +4833,8574418,"genie.py",6154,0,"",python,selection_command +4834,8577415,"genie.py",6321,0,"",python,selection_mouse +4835,8577616,"genie.py",6310,17,"final_token_probs",python,selection_mouse +4836,8578790,"genie.py",6355,0,"",python,selection_mouse +4837,8578958,"genie.py",6354,6,"masked",python,selection_mouse +4838,8579683,"genie.py",6372,0,"",python,selection_mouse +4839,8579695,"genie.py",6371,0,"",python,selection_command +4840,8579897,"genie.py",6370,1,"l",python,selection_mouse +4841,8579898,"genie.py",6368,3,"onl",python,selection_mouse +4842,8579900,"genie.py",6370,2,"ly",python,selection_command +4843,8579914,"genie.py",6367,5," only",python,selection_mouse +4844,8579928,"genie.py",6365,7,"ns only",python,selection_mouse +4845,8579984,"genie.py",6363,9,"kens only",python,selection_mouse +4846,8579984,"genie.py",6361,11,"tokens only",python,selection_mouse +4847,8579985,"genie.py",6356,16,"sked tokens only",python,selection_mouse +4848,8579989,"genie.py",6354,18,"masked tokens only",python,selection_mouse +4849,8580006,"genie.py",6352,20,"e masked tokens only",python,selection_mouse +4850,8580066,"genie.py",6350,22,"ate masked tokens only",python,selection_mouse +4851,8580067,"genie.py",6349,23,"date masked tokens only",python,selection_mouse +4852,8580069,"genie.py",6348,24,"pdate masked tokens only",python,selection_mouse +4853,8580144,"genie.py",6347,25,"Update masked tokens only",python,selection_mouse +4854,8580680,"genie.py",6347,0,"",python,selection_mouse +4855,8580680,"genie.py",6347,6,"Update",python,selection_mouse +4856,8580910,"genie.py",6347,7,"Update ",python,selection_mouse +4857,8580930,"genie.py",6347,13,"Update masked",python,selection_mouse +4858,8580994,"genie.py",6347,14,"Update masked ",python,selection_mouse +4859,8580995,"genie.py",6347,20,"Update masked tokens",python,selection_mouse +4860,8581057,"genie.py",6347,21,"Update masked tokens ",python,selection_mouse +4861,8581058,"genie.py",6347,25,"Update masked tokens only",python,selection_mouse +4862,8581422,"genie.py",6372,0,"",python,selection_mouse +4863,8581424,"genie.py",6371,0,"",python,selection_command +4864,8581792,"genie.py",6368,4,"only",python,selection_mouse +4865,8581808,"genie.py",6369,3,"nly",python,selection_command +4866,8581996,"genie.py",6368,1,"o",python,selection_mouse +4867,8582010,"genie.py",6361,8,"tokens o",python,selection_mouse +4868,8582072,"genie.py",6360,9," tokens o",python,selection_mouse +4869,8582072,"genie.py",6354,15,"masked tokens o",python,selection_mouse +4870,8582139,"genie.py",6353,16," masked tokens o",python,selection_mouse +4871,8582157,"genie.py",6347,22,"Update masked tokens o",python,selection_mouse +4872,8582321,"genie.py",6346,23," Update masked tokens o",python,selection_mouse +4873,8582700,"genie.py",6346,0,"",python,selection_mouse +4874,8582701,"genie.py",6346,1," ",python,selection_mouse +4875,8582915,"genie.py",6346,7," Update",python,selection_mouse +4876,8582950,"genie.py",6346,14," Update masked",python,selection_mouse +4877,8583013,"genie.py",6346,21," Update masked tokens",python,selection_mouse +4878,8583024,"genie.py",6346,61," Update masked tokens only\n new_token_idxs = jnp.where",python,selection_mouse +4879,8583041,"genie.py",6346,62," Update masked tokens only\n new_token_idxs = jnp.where(",python,selection_mouse +4880,8583066,"genie.py",6346,66," Update masked tokens only\n new_token_idxs = jnp.where(mask",python,selection_mouse +4881,8583124,"genie.py",6346,67," Update masked tokens only\n new_token_idxs = jnp.where(mask,",python,selection_mouse +4882,8583125,"genie.py",6346,68," Update masked tokens only\n new_token_idxs = jnp.where(mask, ",python,selection_mouse +4883,8583145,"genie.py",6346,86," Update masked tokens only\n new_token_idxs = jnp.where(mask, sampled_token_idxs",python,selection_mouse +4884,8583918,"genie.py",6372,0,"",python,selection_mouse +4885,8583932,"genie.py",6371,0,"",python,selection_command +4886,8584131,"genie.py",6368,4,"only",python,selection_mouse +4887,8584136,"genie.py",6369,3,"nly",python,selection_command +4888,8584311,"genie.py",6368,1,"o",python,selection_mouse +4889,8584328,"genie.py",6361,8,"tokens o",python,selection_mouse +4890,8584343,"genie.py",6354,15,"masked tokens o",python,selection_mouse +4891,8584414,"genie.py",6353,16," masked tokens o",python,selection_mouse +4892,8584414,"genie.py",6347,22,"Update masked tokens o",python,selection_mouse +4893,8584425,"genie.py",6369,26,"nly\n new_token_idxs",python,selection_mouse +4894,8584687,"genie.py",6347,22,"Update masked tokens o",python,selection_mouse +4895,8584704,"genie.py",6346,23," Update masked tokens o",python,selection_mouse +4896,8585186,"genie.py",6346,0,"",python,selection_mouse +4897,8651867,"genie.py",5900,0,"",python,selection_mouse +4898,8651890,"genie.py",5899,0,"",python,selection_command +4899,8652419,"genie.py",5804,0,"",python,selection_mouse +4900,8652577,"genie.py",5800,4,"self",python,selection_mouse +4901,8653090,"genie.py",5811,0,"",python,selection_mouse +4902,8653225,"genie.py",5805,13,"sample_argmax",python,selection_mouse +4903,8660605,"genie.py",3004,0,"",python,selection_mouse +4904,8660772,"genie.py",2996,13,"sample_argmax",python,selection_mouse +4905,8661782,"genie.py",3004,0,"",python,selection_mouse +4906,8661917,"genie.py",2996,13,"sample_argmax",python,selection_mouse +4907,8672317,"train_dynamics.py",0,0,"from dataclasses import dataclass, field\nimport os\nimport time\n\nimport einops\nfrom flax.training import orbax_utils\nfrom flax.training.train_state import TrainState\nfrom jax.sharding import Mesh, PartitionSpec, NamedSharding\nfrom jax.experimental.mesh_utils import create_device_mesh\nimport optax\nimport orbax\nfrom orbax.checkpoint import PyTreeCheckpointer\nimport numpy as np\nimport jax\nimport jax.numpy as jnp\nimport tyro\nimport wandb\n\nfrom genie import Genie, restore_genie_components\nfrom models.tokenizer import TokenizerVQVAE\nfrom models.lam import LatentActionModel\nfrom utils.dataloader import get_dataloader\n\nts = int(time.time())\n\n\n@dataclass\nclass Args:\n # Experiment\n num_steps: int = 200_000\n seed: int = 0\n seq_len: int = 16\n image_channels: int = 3\n image_height: int = 90\n image_width: int = 160\n data_dir: str = ""data_tfrecords/coinrun""\n # Optimization\n batch_size: int = 36\n min_lr: float = 3e-6\n max_lr: float = 3e-5\n warmup_steps: int = 5000\n # Tokenizer\n tokenizer_dim: int = 512\n latent_patch_dim: int = 32\n num_patch_latents: int = 1024\n patch_size: int = 4\n tokenizer_num_blocks: int = 8\n tokenizer_num_heads: int = 8\n tokenizer_checkpoint: str = """"\n # LAM\n lam_dim: int = 512\n latent_action_dim: int = 32\n num_latent_actions: int = 6\n lam_patch_size: int = 16\n lam_num_blocks: int = 8\n lam_num_heads: int = 8\n lam_checkpoint: str = """"\n # Dynamics\n dyna_dim: int = 512\n dyna_num_blocks: int = 12\n dyna_num_heads: int = 8\n dropout: float = 0.0\n mask_limit: float = 0.5\n # Logging\n log: bool = False\n entity: str = """"\n project: str = """"\n name: str = ""train_dynamics""\n tags: list = field(default_factory=lambda: [""dynamics""])\n log_interval: int = 5\n log_image_interval: int = 250\n ckpt_dir: str = """"\n log_checkpoint_interval: int = 25000\n log_gradients: bool = False\n\n\nargs = tyro.cli(Args)\n\n\ndef dynamics_loss_fn(params, state, inputs):\n """"""Compute masked dynamics loss""""""\n outputs = state.apply_fn(\n params,\n inputs,\n training=True,\n rngs={""params"": inputs[""rng""], ""dropout"": inputs[""dropout_rng""]},\n )\n mask = outputs[""mask""]\n ce_loss = optax.softmax_cross_entropy_with_integer_labels(\n outputs[""token_logits""], outputs[""video_tokens""]\n )\n ce_loss = (mask * ce_loss).sum() / mask.sum()\n acc = outputs[""token_logits""].argmax(-1) == outputs[""video_tokens""]\n acc = (mask * acc).sum() / mask.sum()\n select_probs = jax.nn.softmax(outputs[""token_logits""])\n metrics = dict(\n cross_entropy_loss=ce_loss,\n masked_token_accuracy=acc,\n select_logit=outputs[""token_logits""].max(-1).mean(),\n select_p=select_probs.max(-1).mean(),\n entropy=jax.scipy.special.entr(select_probs).sum(-1).mean(),\n )\n return ce_loss, (outputs[""recon""], metrics)\n\n\n@jax.jit\ndef train_step(state, inputs):\n """"""Update state and compute metrics""""""\n grad_fn = jax.value_and_grad(dynamics_loss_fn, has_aux=True, allow_int=True)\n (loss, (recon, metrics)), grads = grad_fn(state.params, state, inputs)\n state = state.apply_gradients(grads=grads)\n if args.log_gradients:\n metrics[""gradients_std/""] = jax.tree.map(\n lambda x: x.std(), grads[""params""][""dynamics""]\n )\n return state, loss, recon, metrics\n\n\nif __name__ == ""__main__"":\n jax.distributed.initialize()\n num_devices = jax.device_count()\n if num_devices == 0:\n raise ValueError(""No JAX devices found."")\n print(f""Running on {num_devices} devices."")\n\n if args.batch_size % num_devices != 0:\n raise ValueError(\n f""Global batch size {args.batch_size} must be divisible by ""\n f""number of devices {num_devices}.""\n )\n\n per_device_batch_size_for_init = args.batch_size // num_devices\n\n rng = jax.random.PRNGKey(args.seed)\n if args.log and jax.process_index() == 0:\n wandb.init(\n entity=args.entity,\n project=args.project,\n name=args.name,\n tags=args.tags,\n group=""debug"",\n config=args\n )\n\n # --- Initialize model ---\n genie = Genie(\n # Tokenizer\n in_dim=args.image_channels,\n tokenizer_dim=args.tokenizer_dim,\n latent_patch_dim=args.latent_patch_dim,\n num_patch_latents=args.num_patch_latents,\n patch_size=args.patch_size,\n tokenizer_num_blocks=args.tokenizer_num_blocks,\n tokenizer_num_heads=args.tokenizer_num_heads,\n # LAM\n lam_dim=args.lam_dim,\n latent_action_dim=args.latent_action_dim,\n num_latent_actions=args.num_latent_actions,\n lam_patch_size=args.lam_patch_size,\n lam_num_blocks=args.lam_num_blocks,\n lam_num_heads=args.lam_num_heads,\n # Dynamics\n dyna_dim=args.dyna_dim,\n dyna_num_blocks=args.dyna_num_blocks,\n dyna_num_heads=args.dyna_num_heads,\n dropout=args.dropout,\n mask_limit=args.mask_limit,\n )\n rng, _rng = jax.random.split(rng)\n image_shape = (args.image_height, args.image_width, args.image_channels)\n dummy_inputs = dict(\n videos=jnp.zeros(\n (per_device_batch_size_for_init, args.seq_len, *image_shape),\n dtype=jnp.float32,\n ),\n action=jnp.zeros(\n (per_device_batch_size_for_init, args.seq_len), dtype=jnp.float32\n ),\n mask_rng=_rng,\n )\n rng, _rng = jax.random.split(rng)\n init_params = genie.init(_rng, dummy_inputs)\n\n # --- Initialize optimizer ---\n lr_schedule = optax.warmup_cosine_decay_schedule(\n args.min_lr, args.max_lr, args.warmup_steps, args.num_steps\n )\n tx = optax.adamw(learning_rate=lr_schedule, b1=0.9, b2=0.9, weight_decay=1e-4)\n train_state = TrainState.create(apply_fn=genie.apply, params=init_params, tx=tx)\n\n device_mesh_arr = create_device_mesh((num_devices,))\n mesh = Mesh(devices=device_mesh_arr, axis_names=(""data"",))\n\n replicated_sharding = NamedSharding(mesh, PartitionSpec())\n train_state = jax.device_put(train_state, replicated_sharding)\n\n # --- Restore checkpoint ---\n train_state = restore_genie_components(\n train_state, replicated_sharding, dummy_inputs, rng, args\n )\n\n # --- TRAIN LOOP ---\n tfrecord_files = [\n os.path.join(args.data_dir, x)\n for x in os.listdir(args.data_dir)\n if x.endswith("".tfrecord"")\n ]\n # dataloader = get_dataloader(\n # # NOTE: We deliberately pass the global batch size\n # # The dataloader shards the dataset across all processes\n # tfrecord_files,\n # args.seq_len,\n # args.batch_size,\n # *image_shape,\n # )\n step = 0\n while step < args.num_steps:\n # for videos in dataloader:\n npy_path = ""overfit_dir/single_sample_corner.npy""\n # npy_path = ""overfit_dir/single_batch_3_elems.npy""\n videos = np.load(npy_path)\n print(""batch shape: "", videos.shape)\n while(True):\n # --- Train step ---\n rng, _rng, _rng_dropout, _rng_mask = jax.random.split(rng, 4)\n\n videos_sharding = NamedSharding(\n mesh, PartitionSpec(""data"", None, None, None, None)\n )\n videos = jax.make_array_from_process_local_data(videos_sharding, videos)\n\n inputs = dict(\n videos=videos,\n rng=_rng,\n dropout_rng=_rng_dropout,\n mask_rng=_rng_mask,\n )\n start_time = time.time()\n train_state, loss, recon, metrics = train_step(train_state, inputs)\n elapsed_time = (time.time() - start_time) * 1000\n print(f""Step {step}, loss: {loss}, step time: {elapsed_time}ms"")\n step += 1\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n wandb.log(\n {\n ""loss"": loss,\n ""step"": step,\n ""step_time_ms"": elapsed_time,\n **metrics,\n }\n )\n if step % args.log_image_interval == 0:\n gt_seq = inputs[""videos""][0]\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)\n if step % args.log_checkpoint_interval == 0:\n ckpt = {""model"": train_state}\n orbax_checkpointer = orbax.checkpoint.PyTreeCheckpointer()\n save_args = orbax_utils.save_args_from_target(ckpt)\n orbax_checkpointer.save(\n os.path.join(os.getcwd(), args.ckpt_dir, f""genie_{ts}_{step}""),\n ckpt,\n save_args=save_args,\n )\n if step >= args.num_steps:\n break\n",python,tab +4908,8673413,"train_dynamics.py",1578,0,"",python,selection_mouse +4909,8673563,"train_dynamics.py",1573,10,"mask_limit",python,selection_mouse +4910,8679351,"train_dynamics.py",4200,0,"",python,selection_mouse +4911,8679522,"genie.py",0,0,"",python,tab +4912,8681746,"genie.py",927,0,"",python,selection_mouse +4913,8681903,"genie.py",921,10,"mask_limit",python,selection_mouse +4914,8683770,"genie.py",1815,0,"",python,selection_mouse +4915,8684005,"models/dynamics.py",0,0,"from typing import Dict, Any\n\nimport jax\nimport jax.numpy as jnp\nimport flax.linen as nn\n\nfrom utils.nn import STTransformer\n\n\nclass DynamicsMaskGIT(nn.Module):\n """"""MaskGIT dynamics model""""""\n\n model_dim: int\n num_latents: int\n num_blocks: int\n num_heads: int\n dropout: float\n mask_limit: float\n\n def setup(self):\n self.dynamics = STTransformer(\n self.model_dim,\n self.num_latents,\n self.num_blocks,\n self.num_heads,\n self.dropout,\n )\n self.patch_embed = nn.Embed(self.num_latents, self.model_dim)\n self.mask_token = self.param(\n ""mask_token"",\n nn.initializers.lecun_uniform(),\n (1, 1, 1, self.model_dim),\n )\n self.action_up = nn.Dense(self.model_dim)\n\n def __call__(self, batch: Dict[str, Any], training: bool = True) -> Dict[str, Any]:\n # --- Mask videos ---\n vid_embed = self.patch_embed(batch[""video_tokens""])\n if training:\n rng1, rng2 = jax.random.split(batch[""mask_rng""])\n mask_prob = jax.random.uniform(rng1, minval=self.mask_limit)\n mask = jax.random.bernoulli(rng2, mask_prob, vid_embed.shape[:-1])\n mask = mask.at[:, 0].set(False)\n vid_embed = jnp.where(jnp.expand_dims(mask, -1), self.mask_token, vid_embed)\n else:\n mask = None\n\n # --- Predict transition ---\n act_embed = self.action_up(batch[""latent_actions""])\n vid_embed += jnp.pad(act_embed, ((0, 0), (1, 0), (0, 0), (0, 0)))\n logits = self.dynamics(vid_embed)\n return dict(token_logits=logits, mask=mask)\n",python,tab +4916,8685596,"models/dynamics.py",304,0,"",python,selection_mouse +4917,8685760,"models/dynamics.py",297,10,"mask_limit",python,selection_mouse +4918,8689264,"models/dynamics.py",996,0,"",python,selection_mouse +4919,8689398,"models/dynamics.py",995,8,"training",python,selection_mouse +4920,8691553,"models/dynamics.py",1079,0,"",python,selection_mouse +4921,8692550,"models/dynamics.py",1105,0,"",python,selection_mouse +4922,8692781,"models/dynamics.py",1101,7,"uniform",python,selection_mouse +4923,8693747,"models/dynamics.py",1131,0,"",python,selection_mouse +4924,8693901,"models/dynamics.py",1127,10,"mask_limit",python,selection_mouse +4925,8694663,"models/dynamics.py",1125,0,"",python,selection_mouse +4926,8694882,"models/dynamics.py",1122,4,"self",python,selection_mouse +4927,8695380,"models/dynamics.py",1132,0,"",python,selection_mouse +4928,8695525,"models/dynamics.py",1127,10,"mask_limit",python,selection_mouse +4929,8696449,"models/dynamics.py",1082,0,"",python,selection_mouse +4930,8696613,"models/dynamics.py",1078,9,"mask_prob",python,selection_mouse +4931,8696883,"models/dynamics.py",1078,10,"mask_prob ",python,selection_mouse +4932,8696901,"models/dynamics.py",1078,12,"mask_prob = ",python,selection_mouse +4933,8696940,"models/dynamics.py",1078,15,"mask_prob = jax",python,selection_mouse +4934,8696940,"models/dynamics.py",1078,16,"mask_prob = jax.",python,selection_mouse +4935,8696955,"models/dynamics.py",1078,22,"mask_prob = jax.random",python,selection_mouse +4936,8697014,"models/dynamics.py",1078,30,"mask_prob = jax.random.uniform",python,selection_mouse +4937,8697081,"models/dynamics.py",1078,31,"mask_prob = jax.random.uniform(",python,selection_mouse +4938,8697100,"models/dynamics.py",1078,35,"mask_prob = jax.random.uniform(rng1",python,selection_mouse +4939,8697165,"models/dynamics.py",1078,37,"mask_prob = jax.random.uniform(rng1, ",python,selection_mouse +4940,8697184,"models/dynamics.py",1078,43,"mask_prob = jax.random.uniform(rng1, minval",python,selection_mouse +4941,8697265,"models/dynamics.py",1078,44,"mask_prob = jax.random.uniform(rng1, minval=",python,selection_mouse +4942,8697285,"models/dynamics.py",1078,48,"mask_prob = jax.random.uniform(rng1, minval=self",python,selection_mouse +4943,8697348,"models/dynamics.py",1078,59,"mask_prob = jax.random.uniform(rng1, minval=self.mask_limit",python,selection_mouse +4944,8697546,"models/dynamics.py",1078,60,"mask_prob = jax.random.uniform(rng1, minval=self.mask_limit)",python,selection_mouse +4945,8703572,"models/dynamics.py",1151,0,"",python,selection_mouse +4946,8703771,"models/dynamics.py",1151,3,"mas",python,selection_mouse +4947,8703793,"models/dynamics.py",1151,7,"mask = ",python,selection_mouse +4948,8703816,"models/dynamics.py",1151,10,"mask = jax",python,selection_mouse +4949,8703837,"models/dynamics.py",1151,13,"mask = jax.ra",python,selection_mouse +4950,8703897,"models/dynamics.py",1151,17,"mask = jax.random",python,selection_mouse +4951,8703898,"models/dynamics.py",1151,20,"mask = jax.random.be",python,selection_mouse +4952,8703898,"models/dynamics.py",1103,48,"iform(rng1, minval=self.mask_limit)\n ",python,selection_mouse +4953,8703899,"models/dynamics.py",1108,43,"(rng1, minval=self.mask_limit)\n ",python,selection_mouse +4954,8703909,"models/dynamics.py",1110,41,"ng1, minval=self.mask_limit)\n ",python,selection_mouse +4955,8703923,"models/dynamics.py",1114,37," minval=self.mask_limit)\n ",python,selection_mouse +4956,8703939,"models/dynamics.py",1118,33,"val=self.mask_limit)\n ",python,selection_mouse +4957,8704000,"models/dynamics.py",1120,31,"l=self.mask_limit)\n ",python,selection_mouse +4958,8704000,"models/dynamics.py",1122,29,"self.mask_limit)\n ",python,selection_mouse +4959,8704001,"models/dynamics.py",1125,26,"f.mask_limit)\n ",python,selection_mouse +4960,8704006,"models/dynamics.py",1127,24,"mask_limit)\n ",python,selection_mouse +4961,8704023,"models/dynamics.py",1128,23,"ask_limit)\n ",python,selection_mouse +4962,8704083,"models/dynamics.py",1130,21,"k_limit)\n ",python,selection_mouse +4963,8704087,"models/dynamics.py",1131,20,"_limit)\n ",python,selection_mouse +4964,8704087,"models/dynamics.py",1133,18,"imit)\n ",python,selection_mouse +4965,8704105,"models/dynamics.py",1134,17,"mit)\n ",python,selection_mouse +4966,8704179,"models/dynamics.py",1135,16,"it)\n ",python,selection_mouse +4967,8704180,"models/dynamics.py",1136,15,"t)\n ",python,selection_mouse +4968,8704180,"models/dynamics.py",1137,14,")\n ",python,selection_mouse +4969,8704182,"models/dynamics.py",1138,13,"\n ",python,selection_mouse +4970,8704350,"models/dynamics.py",1151,66,"mask = jax.random.bernoulli(rng2, mask_prob, vid_embed.shape[:-1])",python,selection_mouse +4971,8704770,"models/dynamics.py",1217,0,"",python,selection_mouse +4972,8704776,"models/dynamics.py",1216,0,"",python,selection_command +4973,8705235,"models/dynamics.py",1217,0,"",python,selection_mouse +4974,8705237,"models/dynamics.py",1216,0,"",python,selection_command +4975,8705385,"models/dynamics.py",1216,1,")",python,selection_mouse +4976,8705388,"models/dynamics.py",1217,0,"",python,selection_command +4977,8705460,"models/dynamics.py",1214,3,"1])",python,selection_mouse +4978,8705461,"models/dynamics.py",1210,7,"e[:-1])",python,selection_mouse +4979,8705461,"models/dynamics.py",1204,13,"d.shape[:-1])",python,selection_mouse +4980,8705472,"models/dynamics.py",1201,16,"mbed.shape[:-1])",python,selection_mouse +4981,8705488,"models/dynamics.py",1198,19,"d_embed.shape[:-1])",python,selection_mouse +4982,8705543,"models/dynamics.py",1196,21,"vid_embed.shape[:-1])",python,selection_mouse +4983,8705544,"models/dynamics.py",1193,24,"b, vid_embed.shape[:-1])",python,selection_mouse +4984,8705545,"models/dynamics.py",1191,26,"rob, vid_embed.shape[:-1])",python,selection_mouse +4985,8705555,"models/dynamics.py",1217,44,"\n mask = mask.at[:, 0].set(False)",python,selection_mouse +4986,8705619,"models/dynamics.py",1217,43,"\n mask = mask.at[:, 0].set(False",python,selection_mouse +4987,8705639,"models/dynamics.py",1217,42,"\n mask = mask.at[:, 0].set(Fals",python,selection_mouse +4988,8705697,"models/dynamics.py",1217,41,"\n mask = mask.at[:, 0].set(Fal",python,selection_mouse +4989,8705698,"models/dynamics.py",1217,39,"\n mask = mask.at[:, 0].set(F",python,selection_mouse +4990,8705698,"models/dynamics.py",1217,38,"\n mask = mask.at[:, 0].set(",python,selection_mouse +4991,8705705,"models/dynamics.py",1217,36,"\n mask = mask.at[:, 0].se",python,selection_mouse +4992,8705722,"models/dynamics.py",1217,35,"\n mask = mask.at[:, 0].s",python,selection_mouse +4993,8705777,"models/dynamics.py",1217,34,"\n mask = mask.at[:, 0].",python,selection_mouse +4994,8705783,"models/dynamics.py",1217,33,"\n mask = mask.at[:, 0]",python,selection_mouse +4995,8705784,"models/dynamics.py",1217,32,"\n mask = mask.at[:, 0",python,selection_mouse +4996,8705790,"models/dynamics.py",1217,30,"\n mask = mask.at[:,",python,selection_mouse +4997,8705850,"models/dynamics.py",1217,29,"\n mask = mask.at[:",python,selection_mouse +4998,8705851,"models/dynamics.py",1217,27,"\n mask = mask.at",python,selection_mouse +4999,8705853,"models/dynamics.py",1217,26,"\n mask = mask.a",python,selection_mouse +5000,8705870,"models/dynamics.py",1163,54,"andom.bernoulli(rng2, mask_prob, vid_embed.shape[:-1])",python,selection_mouse +5001,8705927,"models/dynamics.py",1162,55,"random.bernoulli(rng2, mask_prob, vid_embed.shape[:-1])",python,selection_mouse +5002,8705928,"models/dynamics.py",1161,56,".random.bernoulli(rng2, mask_prob, vid_embed.shape[:-1])",python,selection_mouse +5003,8705928,"models/dynamics.py",1160,57,"x.random.bernoulli(rng2, mask_prob, vid_embed.shape[:-1])",python,selection_mouse +5004,8705938,"models/dynamics.py",1159,58,"ax.random.bernoulli(rng2, mask_prob, vid_embed.shape[:-1])",python,selection_mouse +5005,8705960,"models/dynamics.py",1158,59,"jax.random.bernoulli(rng2, mask_prob, vid_embed.shape[:-1])",python,selection_mouse +5006,8705974,"models/dynamics.py",1157,60," jax.random.bernoulli(rng2, mask_prob, vid_embed.shape[:-1])",python,selection_mouse +5007,8706036,"models/dynamics.py",1156,61,"= jax.random.bernoulli(rng2, mask_prob, vid_embed.shape[:-1])",python,selection_mouse +5008,8706037,"models/dynamics.py",1155,62," = jax.random.bernoulli(rng2, mask_prob, vid_embed.shape[:-1])",python,selection_mouse +5009,8706112,"models/dynamics.py",1154,63,"k = jax.random.bernoulli(rng2, mask_prob, vid_embed.shape[:-1])",python,selection_mouse +5010,8706119,"models/dynamics.py",1153,64,"sk = jax.random.bernoulli(rng2, mask_prob, vid_embed.shape[:-1])",python,selection_mouse +5011,8706220,"models/dynamics.py",1152,65,"ask = jax.random.bernoulli(rng2, mask_prob, vid_embed.shape[:-1])",python,selection_mouse +5012,8706280,"models/dynamics.py",1151,66,"mask = jax.random.bernoulli(rng2, mask_prob, vid_embed.shape[:-1])",python,selection_mouse +5013,8706336,"models/dynamics.py",1150,67," mask = jax.random.bernoulli(rng2, mask_prob, vid_embed.shape[:-1])",python,selection_mouse +5014,8706897,"models/dynamics.py",1150,0,"",python,selection_mouse +5015,8707236,"models/dynamics.py",1154,0,"",python,selection_mouse +5016,8707390,"models/dynamics.py",1151,4,"mask",python,selection_mouse +5017,8707580,"models/dynamics.py",1151,5,"mask ",python,selection_mouse +5018,8707594,"models/dynamics.py",1151,10,"mask = jax",python,selection_mouse +5019,8707609,"models/dynamics.py",1151,17,"mask = jax.random",python,selection_mouse +5020,8707623,"models/dynamics.py",1151,27,"mask = jax.random.bernoulli",python,selection_mouse +5021,8707681,"models/dynamics.py",1113,42,", minval=self.mask_limit)\n mask",python,selection_mouse +5022,8707682,"models/dynamics.py",1115,40,"minval=self.mask_limit)\n mask",python,selection_mouse +5023,8707691,"models/dynamics.py",1122,33,"self.mask_limit)\n mask",python,selection_mouse +5024,8707709,"models/dynamics.py",1127,28,"mask_limit)\n mask",python,selection_mouse +5025,8707777,"models/dynamics.py",1065,90,"\n mask_prob = jax.random.uniform(rng1, minval=self.mask_limit)\n mask",python,selection_mouse +5026,8707961,"models/dynamics.py",1138,17,"\n mask",python,selection_mouse +5027,8708059,"models/dynamics.py",1151,66,"mask = jax.random.bernoulli(rng2, mask_prob, vid_embed.shape[:-1])",python,selection_mouse +5028,8708337,"models/dynamics.py",1217,0,"",python,selection_mouse +5029,8708337,"models/dynamics.py",1216,0,"",python,selection_command +5030,9017727,"models/dynamics.py",1138,0,"",python,selection_mouse +5031,9017731,"models/dynamics.py",1137,0,"",python,selection_command +5032,9018345,"models/dynamics.py",889,0,"",python,selection_mouse +5033,9019023,"models/dynamics.py",891,0,"",python,selection_mouse +5034,9020092,"models/dynamics.py",983,0,"",python,selection_mouse +5035,9020103,"models/dynamics.py",982,0,"",python,selection_command +5036,9021104,"models/dynamics.py",1004,0,"",python,selection_mouse +5037,9021106,"models/dynamics.py",1003,0,"",python,selection_command +5038,9021973,"models/dynamics.py",1065,0,"",python,selection_mouse +5039,9021974,"models/dynamics.py",1064,0,"",python,selection_command +5040,9024269,"models/dynamics.py",1065,0,"",python,selection_mouse +5041,9024274,"models/dynamics.py",1064,0,"",python,selection_command +5042,9024336,"models/dynamics.py",1064,1,")",python,selection_mouse +5043,9024337,"models/dynamics.py",1065,0,"",python,selection_command +5044,9024339,"models/dynamics.py",1004,61,"\n rng1, rng2 = jax.random.split(batch[""mask_rng""])",python,selection_mouse +5045,9024418,"models/dynamics.py",983,82,"\n if training:\n rng1, rng2 = jax.random.split(batch[""mask_rng""])",python,selection_mouse +5046,9024480,"models/dynamics.py",923,142,"\n vid_embed = self.patch_embed(batch[""video_tokens""])\n if training:\n rng1, rng2 = jax.random.split(batch[""mask_rng""])",python,selection_mouse +5047,9025039,"models/dynamics.py",1117,0,"",python,selection_mouse +5048,9025673,"models/dynamics.py",1004,0,"",python,selection_mouse +5049,9025674,"models/dynamics.py",1003,0,"",python,selection_command +5050,9026191,"models/dynamics.py",1020,0,"",python,selection_mouse +5051,9076063,"models/dynamics.py",1655,0,"",python,selection_mouse +5052,9107228,"models/dynamics.py",1654,0,"",python,selection_mouse +5053,9107229,"models/dynamics.py",1653,0,"",python,selection_command +5054,9107774,"TERMINAL",0,0,"bash",,terminal_focus +5055,9170623,"models/dynamics.py",403,0,"",python,selection_mouse +5056,9170638,"models/dynamics.py",402,0,"",python,selection_command +5057,9897378,"models/dynamics.py",1364,0,"",python,selection_mouse +5058,9897380,"models/dynamics.py",1363,0,"",python,selection_command +5059,9899023,"TERMINAL",0,0,"srun",,terminal_focus +5060,9899234,"TERMINAL",0,0,"\r(jafar) [tum_cte0515@hkn0634 jafar]$ ",,terminal_output +5061,9900936,"TERMINAL",0,0,"q",,terminal_output +5062,9901098,"TERMINAL",0,0,"[?25lu[?25h",,terminal_output +5063,9901169,"TERMINAL",0,0,"[?25le[?25h",,terminal_output +5064,9901244,"TERMINAL",0,0,"[?25lu[?25h",,terminal_output +5065,9901311,"TERMINAL",0,0,"[?25le[?25h",,terminal_output +5066,9901732,"TERMINAL",0,0,"\r\n[?2004l\r[?1049h(B[?7hEvery 1.0s: squeue --mehkn0634.localdomain: Mon Jun 30 17:09:06 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3306668 accelerat interact tum_cte0 R47:38\t 1 hkn0634",,terminal_output +5067,9902668,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn0634:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0634 jafar]$ ",,terminal_output +5068,9915092,"train_lam.py",0,0,"from dataclasses import dataclass, field\nimport os\nimport time\n\nimport einops\nfrom flax.training import orbax_utils\nfrom flax.training.train_state import TrainState\nfrom jax.sharding import Mesh, PartitionSpec, NamedSharding\nfrom jax.experimental.mesh_utils import create_device_mesh\nimport optax\nimport orbax\nfrom orbax.checkpoint import PyTreeCheckpointer\nimport numpy as np\nimport dm_pix as pix\nimport jax\nimport jax.numpy as jnp\nimport tyro\nimport wandb\n\nfrom models.lam import LatentActionModel\nfrom utils.dataloader import get_dataloader\n\nts = int(time.time())\n\n\n@dataclass\nclass Args:\n # Experiment\n num_steps: int = 200_000\n seed: int = 0\n seq_len: int = 16\n image_channels: int = 3\n image_height: int = 90\n image_width: int = 160\n data_dir: str = ""data_tfrecords/coinrun""\n checkpoint: str = """"\n # Optimization\n batch_size: int = 36\n vq_beta: float = 0.25\n min_lr: float = 3e-6\n max_lr: float = 3e-5\n warmup_steps: int = 5000\n vq_reset_thresh: int = 50\n # LAM\n model_dim: int = 512\n latent_dim: int = 32\n num_latents: int = 6\n patch_size: int = 16\n num_blocks: int = 8\n num_heads: int = 8\n dropout: float = 0.0\n codebook_dropout: float = 0.0\n # Logging\n log: bool = False\n entity: str = """"\n project: str = """"\n name: str = ""train_lam""\n tags: list[str] = field(default_factory=lambda: [""lam""])\n log_interval: int = 5\n log_image_interval: int = 250\n ckpt_dir: str = """"\n log_checkpoint_interval: int = 10000\n\n\nargs = tyro.cli(Args)\n\n\ndef lam_loss_fn(params, state, inputs):\n # --- Compute loss ---\n outputs = state.apply_fn(\n params, inputs, training=True, rngs={""dropout"": inputs[""rng""]}\n )\n gt_future_frames = inputs[""videos""][:, 1:]\n mse = jnp.square(gt_future_frames - outputs[""recon""]).mean()\n q_loss = jnp.square(jax.lax.stop_gradient(outputs[""emb""]) - outputs[""z""]).mean()\n commitment_loss = jnp.square(\n outputs[""emb""] - jax.lax.stop_gradient(outputs[""z""])\n ).mean()\n loss = mse + q_loss + args.vq_beta * commitment_loss\n\n # --- Compute validation metrics ---\n gt = gt_future_frames.clip(0, 1).reshape(-1, *gt_future_frames.shape[2:])\n recon = outputs[""recon""].clip(0, 1).reshape(-1, *outputs[""recon""].shape[2:])\n psnr = pix.psnr(gt, recon).mean()\n ssim = pix.ssim(gt, recon).mean()\n count_fn = jax.vmap(lambda i: (outputs[""indices""] == i).sum())\n index_counts = count_fn(jnp.arange(args.num_latents))\n metrics = dict(\n loss=loss,\n mse=mse,\n q_loss=q_loss,\n commitment_loss=commitment_loss,\n psnr=psnr,\n ssim=ssim,\n codebook_usage=(index_counts != 0).mean(),\n )\n return loss, (outputs[""recon""], index_counts, metrics)\n\n\n@jax.jit\ndef train_step(state, inputs, action_last_active):\n # --- Update model ---\n rng, inputs[""rng""] = jax.random.split(inputs[""rng""])\n grad_fn = jax.value_and_grad(lam_loss_fn, has_aux=True, allow_int=True)\n (loss, (recon, idx_counts, metrics)), grads = grad_fn(state.params, state, inputs)\n state = state.apply_gradients(grads=grads)\n\n # --- Reset inactive latent actions ---\n codebook = state.params[""params""][""vq""][""codebook""]\n num_codes = len(codebook)\n active_codes = idx_counts != 0.0\n action_last_active = jnp.where(active_codes, 0, action_last_active + 1)\n p_code = active_codes / active_codes.sum()\n reset_idxs = jax.random.choice(rng, num_codes, shape=(num_codes,), p=p_code)\n do_reset = action_last_active >= args.vq_reset_thresh\n new_codebook = jnp.where(\n jnp.expand_dims(do_reset, -1), codebook[reset_idxs], codebook\n )\n state.params[""params""][""vq""][""codebook""] = new_codebook\n action_last_active = jnp.where(do_reset, 0, action_last_active)\n return state, loss, recon, action_last_active, metrics\n\n\nif __name__ == ""__main__"":\n jax.distributed.initialize()\n num_devices = jax.device_count()\n if num_devices == 0:\n raise ValueError(""No JAX devices found."")\n print(f""Running on {num_devices} devices."")\n\n if args.batch_size % num_devices != 0:\n raise ValueError(\n f""Global batch size {args.batch_size} must be divisible by ""\n f""number of devices {num_devices}.""\n )\n\n per_device_batch_size_for_init = args.batch_size // num_devices\n\n rng = jax.random.PRNGKey(args.seed)\n if args.log and jax.process_index() == 0:\n wandb.init(\n entity=args.entity,\n project=args.project,\n name=args.name,\n tags=args.tags,\n group=""debug"",\n config=args\n )\n\n # --- Initialize model ---\n lam = LatentActionModel(\n in_dim=args.image_channels,\n model_dim=args.model_dim,\n latent_dim=args.latent_dim,\n num_latents=args.num_latents,\n patch_size=args.patch_size,\n num_blocks=args.num_blocks,\n num_heads=args.num_heads,\n dropout=args.dropout,\n codebook_dropout=args.codebook_dropout,\n )\n # Track when each action was last sampled\n action_last_active = jnp.zeros(args.num_latents)\n image_shape = (args.image_height, args.image_width, args.image_channels)\n rng, _rng = jax.random.split(rng)\n inputs = dict(\n videos=jnp.zeros(\n (per_device_batch_size_for_init, args.seq_len, *image_shape),\n dtype=jnp.float32,\n ),\n rng=_rng,\n )\n rng, _rng = jax.random.split(rng)\n init_params = lam.init(_rng, inputs)\n\n # --- Initialize optimizer ---\n lr_schedule = optax.warmup_cosine_decay_schedule(\n args.min_lr, args.max_lr, args.warmup_steps, args.num_steps\n )\n tx = optax.adamw(learning_rate=lr_schedule, b1=0.9, b2=0.9, weight_decay=1e-4)\n train_state = TrainState.create(apply_fn=lam.apply, params=init_params, tx=tx)\n\n # FIXME: switch to create_hybrid_device_mesh for runs spanning multiple nodes\n device_mesh_arr = create_device_mesh((num_devices,))\n mesh = Mesh(devices=device_mesh_arr, axis_names=(""data"",))\n\n replicated_sharding = NamedSharding(mesh, PartitionSpec())\n train_state = jax.device_put(train_state, replicated_sharding)\n action_last_active = jax.device_put(action_last_active, replicated_sharding)\n\n # --- Load checkpoint ---\n step = 0\n if args.checkpoint:\n restore_target = {""model"": train_state}\n restore_args = orbax_utils.restore_args_from_target(restore_target)\n train_state.params[""params""].update(\n PyTreeCheckpointer()\n .restore(args.checkpoint, item=restore_target, restore_args=restore_args)[\n ""model""\n ]\n .params[""params""]\n )\n # Assume checkpoint is of the form tokenizer__\n step += int(args.checkpoint.split(""_"")[-1])\n\n # --- TRAIN LOOP ---\n tfrecord_files = [\n os.path.join(args.data_dir, x)\n for x in os.listdir(args.data_dir)\n if x.endswith("".tfrecord"")\n ]\n dataloader = get_dataloader(\n # NOTE: We deliberately pass the global batch size\n # The dataloader shards the dataset across all processes\n tfrecord_files,\n args.seq_len,\n args.batch_size,\n *image_shape,\n )\n print(f""Starting training from step {step}..."")\n while step < args.num_steps:\n for videos in dataloader:\n # --- Train step ---\n rng, _rng = jax.random.split(rng)\n\n videos_sharding = NamedSharding(\n mesh, PartitionSpec(""data"", None, None, None, None)\n )\n videos = jax.make_array_from_process_local_data(videos_sharding, videos)\n\n inputs = dict(videos=videos, rng=_rng)\n start_time = time.time()\n train_state, loss, recon, action_last_active, metrics = train_step(\n train_state, inputs, action_last_active\n )\n elapsed_time = (time.time() - start_time) * 1000\n print(f""Step {step}, loss: {loss}, step time: {elapsed_time}ms"")\n step += 1\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n wandb.log(\n {\n ""loss"": loss,\n ""step"": step,\n ""step_time_ms"": elapsed_time,\n **metrics,\n }\n )\n if step % args.log_image_interval == 0:\n gt_seq = inputs[""videos""][0][1:]\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)\n if step % args.log_checkpoint_interval == 0:\n ckpt = {""model"": train_state}\n orbax_checkpointer = orbax.checkpoint.PyTreeCheckpointer()\n save_args = orbax_utils.save_args_from_target(ckpt)\n orbax_checkpointer.save(\n os.path.join(os.getcwd(), args.ckpt_dir, f""lam_{ts}_{step}""),\n ckpt,\n save_args=save_args,\n )\n if step >= args.num_steps:\n break\n",python,tab +5069,9915184,"train_lam.py",1332,6008," tags: list = field(default_factory=lambda: [""lam""])\n log_interval: int = 5\n log_image_interval: int = 250\n ckpt_dir: str = """"\n log_checkpoint_interval: int = 10000\n\n\nargs = tyro.cli(Args)\n\n\ndef lam_loss_fn(params, state, inputs):\n # --- Compute loss ---\n outputs = state.apply_fn(\n params, inputs, training=True, rngs={""dropout"": inputs[""rng""]}\n )\n gt_future_frames = inputs[""videos""][:, 1:]\n mse = jnp.square(gt_future_frames - outputs[""recon""]).mean()\n q_loss = jnp.square(jax.lax.stop_gradient(outputs[""emb""]) - outputs[""z""]).mean()\n commitment_loss = jnp.square(\n outputs[""emb""] - jax.lax.stop_gradient(outputs[""z""])\n ).mean()\n loss = mse + q_loss + args.vq_beta * commitment_loss\n\n # --- Compute validation metrics ---\n gt = gt_future_frames.clip(0, 1).reshape(-1, *gt_future_frames.shape[2:])\n recon = outputs[""recon""].clip(0, 1).reshape(-1, *outputs[""recon""].shape[2:])\n psnr = pix.psnr(gt, recon).mean()\n ssim = pix.ssim(gt, recon).mean()\n count_fn = jax.vmap(lambda i: (outputs[""indices""] == i).sum())\n index_counts = count_fn(jnp.arange(args.num_latents))\n metrics = dict(\n loss=loss,\n mse=mse,\n q_loss=q_loss,\n commitment_loss=commitment_loss,\n psnr=psnr,\n ssim=ssim,\n codebook_usage=(index_counts != 0).mean(),\n )\n return loss, (outputs[""recon""], index_counts, metrics)\n\n\n@jax.jit\ndef train_step(state, inputs, action_last_active):\n # --- Update model ---\n rng, inputs[""rng""] = jax.random.split(inputs[""rng""])\n grad_fn = jax.value_and_grad(lam_loss_fn, has_aux=True, allow_int=True)\n (loss, (recon, idx_counts, metrics)), grads = grad_fn(state.params, state, inputs)\n state = state.apply_gradients(grads=grads)\n\n # --- Reset inactive latent actions ---\n codebook = state.params[""params""][""vq""][""codebook""]\n num_codes = len(codebook)\n active_codes = idx_counts != 0.0\n action_last_active = jnp.where(active_codes, 0, action_last_active + 1)\n p_code = active_codes / active_codes.sum()\n reset_idxs = jax.random.choice(rng, num_codes, shape=(num_codes,), p=p_code)\n do_reset = action_last_active >= args.vq_reset_thresh\n new_codebook = jnp.where(\n jnp.expand_dims(do_reset, -1), codebook[reset_idxs], codebook\n )\n state.params[""params""][""vq""][""codebook""] = new_codebook\n action_last_active = jnp.where(do_reset, 0, action_last_active)\n return state, loss, recon, action_last_active, metrics\n\n\nif __name__ == ""__main__"":\n jax.distributed.initialize()\n num_devices = jax.device_count()\n if num_devices == 0:\n raise ValueError(""No JAX devices found."")\n print(f""Running on {num_devices} devices."")\n\n if args.batch_size % num_devices != 0:\n raise ValueError(\n f""Global batch size {args.batch_size} must be divisible by ""\n f""number of devices {num_devices}.""\n )\n\n per_device_batch_size_for_init = args.batch_size // num_devices\n\n rng = jax.random.PRNGKey(args.seed)\n if args.log and jax.process_index() == 0:\n wandb.init(\n entity=args.entity,\n project=args.project,\n name=args.name,\n tags=args.tags,\n group=""debug"",\n config=args\n ) \n\n # --- Initialize model ---\n lam = LatentActionModel(\n in_dim=args.image_channels,\n model_dim=args.model_dim,\n latent_dim=args.latent_dim,\n num_latents=args.num_latents,\n patch_size=args.patch_size,\n num_blocks=args.num_blocks,\n num_heads=args.num_heads,\n dropout=args.dropout,\n codebook_dropout=args.codebook_dropout,\n )\n # Track when each action was last sampled\n action_last_active = jnp.zeros(args.num_latents)\n image_shape = (args.image_height, args.image_width, args.image_channels)\n rng, _rng = jax.random.split(rng)\n inputs = dict(\n videos=jnp.zeros(\n (per_device_batch_size_for_init, args.seq_len, *image_shape),\n dtype=jnp.float32,\n ),\n rng=_rng,\n )\n rng, _rng = jax.random.split(rng)\n init_params = lam.init(_rng, inputs)\n\n # --- Initialize optimizer ---\n lr_schedule = optax.warmup_cosine_decay_schedule(\n args.min_lr, args.max_lr, args.warmup_steps, args.num_steps\n )\n tx = optax.adamw(learning_rate=lr_schedule, b1=0.9, b2=0.9, weight_decay=1e-4)\n train_state = TrainState.create(apply_fn=lam.apply, params=init_params, tx=tx)\n\n # FIXME: switch to create_hybrid_device_mesh for runs spanning multiple nodes\n device_mesh_arr = create_device_mesh((num_devices,))\n mesh = Mesh(devices=device_mesh_arr, axis_names=(""data"",))\n\n replicated_sharding = NamedSharding(mesh, PartitionSpec())\n train_state = jax.device_put(train_state, replicated_sharding)\n action_last_active = jax.device_put(action_last_active, replicated_sharding)\n\n # --- Load checkpoint ---\n step = 0\n if args.checkpoint:\n restore_target = {""model"": train_state}\n restore_args = orbax_utils.restore_args_from_target(restore_target)\n train_state.params[""params""].update(\n PyTreeCheckpointer()\n .restore(args.checkpoint, item=restore_target, restore_args=restore_args)[\n ""model""\n ]\n .params[""params""]\n )\n # Assume checkpoint is of the form tokenizer__\n step += int(args.checkpoint.split(""_"")[-1])\n\n # --- TRAIN LOOP ---\n tfrecord_files = [\n os.path.join(args.data_dir, x)\n for x in os.listdir(args.data_dir)\n if x.endswith("".tfrecord"")\n ]\n dataloader = get_dataloader(\n # NOTE: We deliberately pass the global batch size\n # The dataloader shards the dataset across all processes\n tfrecord_files,\n args.seq_len,\n args.batch_size,\n *image_shape,\n )\n print(f""Starting training from step {step}..."")\n while step < args.num_steps:\n # for videos in dataloader:\n npy_path = ""overfit_dir/single_sample_corner.npy""\n # npy_path = ""overfit_dir/single_batch_12_elems.npy""\n videos = np.load(npy_path)\n print(""batch shape: "", videos.shape)\n while(True):\n",python,content +5070,9916663,"train_lam.py",2633,0,"",python,selection_mouse +5071,9916664,"train_lam.py",2632,0,"",python,selection_command +5072,9917245,"train_lam.py",2648,0,"",python,selection_mouse +5073,9917364,"train_lam.py",2642,14,"codebook_usage",python,selection_mouse +5074,9917964,"train_lam.py",2690,0,"",python,selection_mouse +5075,9917982,"train_lam.py",2689,0,"",python,selection_command +5076,9918525,"train_lam.py",2658,0,"",python,selection_mouse +5077,9918690,"train_lam.py",2658,12,"index_counts",python,selection_mouse +5078,9918969,"train_lam.py",2658,13,"index_counts ",python,selection_mouse +5079,9918988,"train_lam.py",2658,14,"index_counts !",python,selection_mouse +5080,9919002,"train_lam.py",2658,15,"index_counts !=",python,selection_mouse +5081,9919018,"train_lam.py",2658,16,"index_counts != ",python,selection_mouse +5082,9919075,"train_lam.py",2658,17,"index_counts != 0",python,selection_mouse +5083,9919076,"train_lam.py",2658,19,"index_counts != 0).",python,selection_mouse +5084,9919076,"train_lam.py",2658,23,"index_counts != 0).mean",python,selection_mouse +5085,9919132,"train_lam.py",2658,32,"index_counts != 0).mean(),\n )",python,selection_mouse +5086,9919343,"train_lam.py",2690,0,"",python,selection_mouse +5087,9919354,"train_lam.py",2689,0,"",python,selection_command +5088,9921044,"train_lam.py",2484,0,"",python,selection_mouse +5089,9921199,"train_lam.py",2480,7,"metrics",python,selection_mouse +5090,9922125,"train_lam.py",2666,0,"",python,selection_mouse +5091,9922281,"train_lam.py",2658,12,"index_counts",python,selection_mouse +5092,9923658,"train_lam.py",2690,0,"",python,selection_mouse +5093,9923666,"train_lam.py",2689,0,"",python,selection_command +5094,9924176,"train_lam.py",2633,0,"",python,selection_mouse +5095,9924177,"train_lam.py",2632,0,"",python,selection_command +5096,9925389,"train_lam.py",2649,0,"",python,selection_mouse +5097,10001443,"train_tokenizer.py",0,0,"from dataclasses import dataclass, field\nimport os\nimport time\n\nimport einops\nfrom flax.training import orbax_utils\nfrom flax.training.train_state import TrainState\nfrom jax.sharding import Mesh, PartitionSpec, NamedSharding\nfrom jax.experimental.mesh_utils import create_device_mesh\nimport optax\nimport orbax\nfrom orbax.checkpoint import PyTreeCheckpointer\nimport numpy as np\nimport dm_pix as pix\nimport jax\nimport jax.numpy as jnp\nimport tyro\nimport wandb\n\nfrom models.tokenizer import TokenizerVQVAE\nfrom utils.dataloader import get_dataloader\n\nts = int(time.time())\n\n\n@dataclass\nclass Args:\n # Experiment\n num_steps: int = 300_000\n seed: int = 0\n seq_len: int = 16\n image_channels: int = 3\n image_height: int = 90\n image_width: int = 160\n data_dir: str = ""data_tfrecords/coinrun""\n checkpoint: str = """"\n # Optimization\n vq_beta: float = 0.25\n batch_size: int = 48\n min_lr: float = 3e-4\n max_lr: float = 3e-4\n warmup_steps: int = 10000\n # Tokenizer\n model_dim: int = 512\n latent_dim: int = 32\n num_latents: int = 1024\n patch_size: int = 4\n num_blocks: int = 8\n num_heads: int = 8\n dropout: float = 0.0\n codebook_dropout: float = 0.01\n # Logging\n log: bool = False\n entity: str = """"\n project: str = """"\n name: str = ""train_tokenizer""\n tags: list[str] = field(default_factory=lambda: [""tokenizer""])\n log_interval: int = 5\n log_image_interval: int = 250\n ckpt_dir: str = """"\n log_checkpoint_interval: int = 10000\n log_gradients: bool = False\n\n\nargs = tyro.cli(Args)\n\n\ndef tokenizer_loss_fn(params, state, inputs):\n # --- Compute loss ---\n outputs = state.apply_fn(\n params,\n inputs,\n training=True,\n rngs={""params"": inputs[""rng""], ""dropout"": inputs[""dropout_rng""]},\n )\n mse = jnp.square(inputs[""videos""] - outputs[""recon""]).mean()\n q_loss = jnp.square(jax.lax.stop_gradient(outputs[""emb""]) - outputs[""z""]).mean()\n commitment_loss = jnp.square(\n outputs[""emb""] - jax.lax.stop_gradient(outputs[""z""])\n ).mean()\n loss = mse + q_loss + args.vq_beta * commitment_loss\n\n # --- Compute validation metrics ---\n gt = inputs[""videos""].clip(0, 1).reshape(-1, *inputs[""videos""].shape[2:])\n recon = outputs[""recon""].clip(0, 1).reshape(-1, *outputs[""recon""].shape[2:])\n psnr = pix.psnr(gt, recon).mean()\n ssim = pix.ssim(gt, recon).mean()\n _, index_counts = jnp.unique_counts(\n jnp.ravel(outputs[""indices""]), size=args.num_latents, fill_value=0\n )\n codebook_usage = (index_counts != 0).mean()\n metrics = dict(\n loss=loss,\n mse=mse,\n q_loss=q_loss,\n commitment_loss=commitment_loss,\n psnr=psnr,\n ssim=ssim,\n codebook_usage=codebook_usage,\n )\n return loss, (outputs[""recon""], metrics)\n\n\n@jax.jit\ndef train_step(state, inputs):\n grad_fn = jax.value_and_grad(tokenizer_loss_fn, has_aux=True, allow_int=True)\n (loss, (recon, metrics)), grads = grad_fn(state.params, state, inputs)\n state = state.apply_gradients(grads=grads)\n if args.log_gradients:\n metrics[""encoder_gradients_std/""] = jax.tree.map(\n lambda x: x.std(), grads[""params""][""encoder""]\n )\n metrics[""vq_gradients_std/""] = jax.tree.map(\n lambda x: x.std(), grads[""params""][""vq""]\n )\n metrics[""decoder_gradients_std/""] = jax.tree.map(\n lambda x: x.std(), grads[""params""][""decoder""]\n )\n return state, loss, recon, metrics\n\n\nif __name__ == ""__main__"":\n jax.distributed.initialize()\n num_devices = jax.device_count()\n if num_devices == 0:\n raise ValueError(""No JAX devices found."")\n print(f""Running on {num_devices} devices."")\n\n if args.batch_size % num_devices != 0:\n raise ValueError(\n f""Global batch size {args.batch_size} must be divisible by ""\n f""number of devices {num_devices}.""\n )\n\n per_device_batch_size_for_init = args.batch_size // num_devices\n\n rng = jax.random.PRNGKey(args.seed)\n if args.log and jax.process_index() == 0:\n wandb.init(\n entity=args.entity,\n project=args.project,\n name=args.name,\n tags=args.tags,\n group=""debug"",\n config=args\n )\n\n # --- Initialize model ---\n tokenizer = TokenizerVQVAE(\n in_dim=args.image_channels,\n model_dim=args.model_dim,\n latent_dim=args.latent_dim,\n num_latents=args.num_latents,\n patch_size=args.patch_size,\n num_blocks=args.num_blocks,\n num_heads=args.num_heads,\n dropout=args.dropout,\n codebook_dropout=args.codebook_dropout,\n )\n rng, _rng = jax.random.split(rng)\n image_shape = (args.image_height, args.image_width, args.image_channels)\n inputs = dict(\n videos=jnp.zeros(\n (per_device_batch_size_for_init, args.seq_len, *image_shape),\n dtype=jnp.float32,\n ),\n )\n init_params = tokenizer.init(_rng, inputs)\n\n # --- Initialize optimizer ---\n lr_schedule = optax.warmup_cosine_decay_schedule(\n args.min_lr, args.max_lr, args.warmup_steps, args.num_steps\n )\n tx = optax.adamw(learning_rate=lr_schedule, b1=0.9, b2=0.9, weight_decay=1e-4)\n train_state = TrainState.create(apply_fn=tokenizer.apply, params=init_params, tx=tx)\n\n # FIXME: switch to create_hybrid_device_mesh for runs spanning multiple nodes\n device_mesh_arr = create_device_mesh((num_devices,))\n mesh = Mesh(devices=device_mesh_arr, axis_names=(""data"",))\n\n replicated_sharding = NamedSharding(mesh, PartitionSpec())\n train_state = jax.device_put(train_state, replicated_sharding)\n\n # --- Load checkpoint ---\n step = 0\n if args.checkpoint:\n restore_target = {""model"": train_state}\n restore_args = orbax_utils.restore_args_from_target(restore_target)\n train_state.params[""params""].update(\n PyTreeCheckpointer()\n .restore(args.checkpoint, item=restore_target, restore_args=restore_args)[\n ""model""\n ]\n .params[""params""]\n )\n # Assume checkpoint is of the form tokenizer__\n step += int(args.checkpoint.split(""_"")[-1])\n\n # --- TRAIN LOOP ---\n tfrecord_files = [\n os.path.join(args.data_dir, x)\n for x in os.listdir(args.data_dir)\n if x.endswith("".tfrecord"")\n ]\n dataloader = get_dataloader(\n # NOTE: We deliberately pass the global batch size\n # The dataloader shards the dataset across all processes\n tfrecord_files,\n args.seq_len,\n args.batch_size,\n *image_shape,\n )\n print(f""Starting training from step {step}..."")\n while step < args.num_steps:\n for videos in dataloader:\n # --- Train step ---\n rng, _rng, _rng_dropout = jax.random.split(rng, 3)\n\n videos_sharding = NamedSharding(\n mesh, PartitionSpec(""data"", None, None, None, None)\n )\n videos = jax.make_array_from_process_local_data(videos_sharding, videos)\n\n inputs = dict(videos=videos, rng=_rng, dropout_rng=_rng_dropout)\n start_time = time.time()\n train_state, loss, recon, metrics = train_step(train_state, inputs)\n elapsed_time = (time.time() - start_time) * 1000\n print(f""Step {step}, loss: {loss}, step time: {elapsed_time}ms"")\n step += 1\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n wandb.log(\n {\n ""loss"": loss,\n ""step"": step,\n ""step_time_ms"": elapsed_time,\n **metrics,\n }\n )\n if step % args.log_image_interval == 0:\n gt_seq = inputs[""videos""][0]\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)\n if step % args.log_checkpoint_interval == 0:\n ckpt = {""model"": train_state}\n orbax_checkpointer = orbax.checkpoint.PyTreeCheckpointer()\n save_args = orbax_utils.save_args_from_target(ckpt)\n orbax_checkpointer.save(\n os.path.join(os.getcwd(), args.ckpt_dir, f""tokenizer_{ts}_{step}""),\n ckpt,\n save_args=save_args,\n )\n if step >= args.num_steps:\n break\n",python,tab +5098,10001558,"train_tokenizer.py",1321,5966," tags: list = field(default_factory=lambda: [""tokenizer""])\n log_interval: int = 5\n log_image_interval: int = 250\n ckpt_dir: str = """"\n log_checkpoint_interval: int = 10000\n log_gradients: bool = False\n\n\nargs = tyro.cli(Args)\n\n\ndef tokenizer_loss_fn(params, state, inputs):\n # --- Compute loss ---\n outputs = state.apply_fn(\n params,\n inputs,\n training=True,\n rngs={""params"": inputs[""rng""], ""dropout"": inputs[""dropout_rng""]},\n )\n mse = jnp.square(inputs[""videos""] - outputs[""recon""]).mean()\n q_loss = jnp.square(jax.lax.stop_gradient(outputs[""emb""]) - outputs[""z""]).mean()\n commitment_loss = jnp.square(\n outputs[""emb""] - jax.lax.stop_gradient(outputs[""z""])\n ).mean()\n loss = mse + q_loss + args.vq_beta * commitment_loss\n\n # --- Compute validation metrics ---\n gt = inputs[""videos""].clip(0, 1).reshape(-1, *inputs[""videos""].shape[2:])\n recon = outputs[""recon""].clip(0, 1).reshape(-1, *outputs[""recon""].shape[2:])\n psnr = pix.psnr(gt, recon).mean()\n ssim = pix.ssim(gt, recon).mean()\n _, index_counts = jnp.unique_counts(\n jnp.ravel(outputs[""indices""]), size=args.num_latents, fill_value=0\n )\n codebook_usage = (index_counts != 0).mean()\n metrics = dict(\n loss=loss,\n mse=mse,\n q_loss=q_loss,\n commitment_loss=commitment_loss,\n psnr=psnr,\n ssim=ssim,\n codebook_usage=codebook_usage,\n )\n return loss, (outputs[""recon""], metrics)\n\n\n@jax.jit\ndef train_step(state, inputs):\n grad_fn = jax.value_and_grad(tokenizer_loss_fn, has_aux=True, allow_int=True)\n (loss, (recon, metrics)), grads = grad_fn(state.params, state, inputs)\n state = state.apply_gradients(grads=grads)\n if args.log_gradients:\n metrics[""encoder_gradients_std/""] = jax.tree.map(\n lambda x: x.std(), grads[""params""][""encoder""]\n )\n metrics[""vq_gradients_std/""] = jax.tree.map(\n lambda x: x.std(), grads[""params""][""vq""]\n )\n metrics[""decoder_gradients_std/""] = jax.tree.map(\n lambda x: x.std(), grads[""params""][""decoder""]\n )\n return state, loss, recon, metrics\n\n\nif __name__ == ""__main__"":\n jax.distributed.initialize()\n num_devices = jax.device_count()\n if num_devices == 0:\n raise ValueError(""No JAX devices found."")\n print(f""Running on {num_devices} devices."")\n\n if args.batch_size % num_devices != 0:\n raise ValueError(\n f""Global batch size {args.batch_size} must be divisible by ""\n f""number of devices {num_devices}.""\n )\n\n per_device_batch_size_for_init = args.batch_size // num_devices\n\n rng = jax.random.PRNGKey(args.seed)\n if args.log and jax.process_index() == 0:\n wandb.init(\n entity=args.entity,\n project=args.project,\n name=args.name,\n tags=args.tags,\n group=""debug"",\n config=args\n )\n\n # --- Initialize model ---\n tokenizer = TokenizerVQVAE(\n in_dim=args.image_channels,\n model_dim=args.model_dim,\n latent_dim=args.latent_dim,\n num_latents=args.num_latents,\n patch_size=args.patch_size,\n num_blocks=args.num_blocks,\n num_heads=args.num_heads,\n dropout=args.dropout,\n codebook_dropout=args.codebook_dropout,\n )\n rng, _rng = jax.random.split(rng)\n image_shape = (args.image_height, args.image_width, args.image_channels)\n inputs = dict(\n videos=jnp.zeros(\n (per_device_batch_size_for_init, args.seq_len, *image_shape),\n dtype=jnp.float32,\n ),\n )\n init_params = tokenizer.init(_rng, inputs)\n\n # --- Initialize optimizer ---\n lr_schedule = optax.warmup_cosine_decay_schedule(\n args.min_lr, args.max_lr, args.warmup_steps, args.num_steps\n )\n tx = optax.adamw(learning_rate=lr_schedule, b1=0.9, b2=0.9, weight_decay=1e-4)\n train_state = TrainState.create(apply_fn=tokenizer.apply, params=init_params, tx=tx)\n\n # FIXME: switch to create_hybrid_device_mesh for runs spanning multiple nodes\n device_mesh_arr = create_device_mesh((num_devices,))\n mesh = Mesh(devices=device_mesh_arr, axis_names=(""data"",))\n\n replicated_sharding = NamedSharding(mesh, PartitionSpec())\n train_state = jax.device_put(train_state, replicated_sharding)\n\n # --- Load checkpoint ---\n step = 0\n if args.checkpoint:\n restore_target = {""model"": train_state}\n restore_args = orbax_utils.restore_args_from_target(restore_target)\n train_state.params[""params""].update(\n PyTreeCheckpointer()\n .restore(args.checkpoint, item=restore_target, restore_args=restore_args)[\n ""model""\n ]\n .params[""params""]\n )\n # Assume checkpoint is of the form tokenizer__\n step += int(args.checkpoint.split(""_"")[-1])\n\n # --- TRAIN LOOP ---\n tfrecord_files = [\n os.path.join(args.data_dir, x)\n for x in os.listdir(args.data_dir)\n if x.endswith("".tfrecord"")\n ]\n dataloader = get_dataloader(\n # NOTE: We deliberately pass the global batch size\n # The dataloader shards the dataset across all processes\n tfrecord_files,\n args.seq_len,\n args.batch_size,\n *image_shape,\n )\n print(f""Starting training from step {step}..."")\n while step < args.num_steps:\n # for videos in dataloader:\n npy_path = ""overfit_dir/single_sample_corner.npy""\n # npy_path = ""overfit_dir/single_batch_12_elems.npy""\n videos = np.load(npy_path)\n print(""batch shape: "", videos.shape)\n while(True):\n # --- Train step ---\n rng, _rng, _rng_dropout = jax.random.split(rng, 3)\n\n videos_sharding = NamedSharding(\n mesh, PartitionSpec(""data"", None, None, None, None)\n )\n videos = jax.make_array_from_process_local_data(videos_sharding, videos)\n\n inputs = dict(videos=videos, rng=_rng, dropout_rng=_rng_dropout)\n start_time = time.time()\n train_state, loss, recon, metrics = train_step(train_state, inputs)\n # jax.block_until_ready(loss)\n",python,content +5099,10005767,"train_tokenizer.py",2761,0,"",python,selection_mouse +5100,10005769,"train_tokenizer.py",2760,0,"",python,selection_command +5101,10005897,"train_tokenizer.py",2760,1,",",python,selection_mouse +5102,10005908,"train_tokenizer.py",2761,0,"",python,selection_command +5103,10005926,"train_tokenizer.py",2722,39,"\n codebook_usage=codebook_usage,",python,selection_mouse +5104,10005982,"train_tokenizer.py",2703,58,"\n ssim=ssim,\n codebook_usage=codebook_usage,",python,selection_mouse +5105,10005983,"train_tokenizer.py",2656,105,"itment_loss=commitment_loss,\n psnr=psnr,\n ssim=ssim,\n codebook_usage=codebook_usage,",python,selection_mouse +5106,10005983,"train_tokenizer.py",2655,106,"mitment_loss=commitment_loss,\n psnr=psnr,\n ssim=ssim,\n codebook_usage=codebook_usage,",python,selection_mouse +5107,10005994,"train_tokenizer.py",2654,107,"mmitment_loss=commitment_loss,\n psnr=psnr,\n ssim=ssim,\n codebook_usage=codebook_usage,",python,selection_mouse +5108,10006006,"train_tokenizer.py",2652,109,"commitment_loss=commitment_loss,\n psnr=psnr,\n ssim=ssim,\n codebook_usage=codebook_usage,",python,selection_mouse +5109,10006024,"train_tokenizer.py",2651,110," commitment_loss=commitment_loss,\n psnr=psnr,\n ssim=ssim,\n codebook_usage=codebook_usage,",python,selection_mouse +5110,10006040,"train_tokenizer.py",2627,134," q_loss=q_loss,\n commitment_loss=commitment_loss,\n psnr=psnr,\n ssim=ssim,\n codebook_usage=codebook_usage,",python,selection_mouse +5111,10006095,"train_tokenizer.py",2626,135," q_loss=q_loss,\n commitment_loss=commitment_loss,\n psnr=psnr,\n ssim=ssim,\n codebook_usage=codebook_usage,",python,selection_mouse +5112,10006096,"train_tokenizer.py",2608,153," mse=mse,\n q_loss=q_loss,\n commitment_loss=commitment_loss,\n psnr=psnr,\n ssim=ssim,\n codebook_usage=codebook_usage,",python,selection_mouse +5113,10006096,"train_tokenizer.py",2607,154," mse=mse,\n q_loss=q_loss,\n commitment_loss=commitment_loss,\n psnr=psnr,\n ssim=ssim,\n codebook_usage=codebook_usage,",python,selection_mouse +5114,10006107,"train_tokenizer.py",2606,155," mse=mse,\n q_loss=q_loss,\n commitment_loss=commitment_loss,\n psnr=psnr,\n ssim=ssim,\n codebook_usage=codebook_usage,",python,selection_mouse +5115,10006125,"train_tokenizer.py",2586,175," loss=loss,\n mse=mse,\n q_loss=q_loss,\n commitment_loss=commitment_loss,\n psnr=psnr,\n ssim=ssim,\n codebook_usage=codebook_usage,",python,selection_mouse +5116,10006182,"train_tokenizer.py",2585,176," loss=loss,\n mse=mse,\n q_loss=q_loss,\n commitment_loss=commitment_loss,\n psnr=psnr,\n ssim=ssim,\n codebook_usage=codebook_usage,",python,selection_mouse +5117,10006239,"train_tokenizer.py",2565,196," metrics = dict(\n loss=loss,\n mse=mse,\n q_loss=q_loss,\n commitment_loss=commitment_loss,\n psnr=psnr,\n ssim=ssim,\n codebook_usage=codebook_usage,",python,selection_mouse +5118,10006302,"train_tokenizer.py",2517,244," codebook_usage = (index_counts != 0).mean()\n metrics = dict(\n loss=loss,\n mse=mse,\n q_loss=q_loss,\n commitment_loss=commitment_loss,\n psnr=psnr,\n ssim=ssim,\n codebook_usage=codebook_usage,",python,selection_mouse +5119,10009409,"train_lam.py",0,0,"",python,tab +5120,10010378,"train_lam.py",2684,0,"",python,selection_mouse +5121,10010380,"train_lam.py",2683,0,"",python,selection_command +5122,10010494,"train_lam.py",2683,1,",",python,selection_mouse +5123,10010508,"train_lam.py",2684,0,"",python,selection_command +5124,10010568,"train_lam.py",2680,4,"n(),",python,selection_mouse +5125,10010569,"train_lam.py",2673,11," 0).mean(),",python,selection_mouse +5126,10010569,"train_lam.py",2663,21,"_counts != 0).mean(),",python,selection_mouse +5127,10010577,"train_lam.py",2657,27,"(index_counts != 0).mean(),",python,selection_mouse +5128,10010596,"train_lam.py",2633,51,"\n codebook_usage=(index_counts != 0).mean(),",python,selection_mouse +5129,10010655,"train_lam.py",2631,53,"m,\n codebook_usage=(index_counts != 0).mean(),",python,selection_mouse +5130,10010656,"train_lam.py",2629,55,"sim,\n codebook_usage=(index_counts != 0).mean(),",python,selection_mouse +5131,10010657,"train_lam.py",2628,56,"ssim,\n codebook_usage=(index_counts != 0).mean(),",python,selection_mouse +5132,10010662,"train_lam.py",2627,57,"=ssim,\n codebook_usage=(index_counts != 0).mean(),",python,selection_mouse +5133,10010677,"train_lam.py",2626,58,"m=ssim,\n codebook_usage=(index_counts != 0).mean(),",python,selection_mouse +5134,10010700,"train_lam.py",2624,60,"sim=ssim,\n codebook_usage=(index_counts != 0).mean(),",python,selection_mouse +5135,10010755,"train_lam.py",2623,61,"ssim=ssim,\n codebook_usage=(index_counts != 0).mean(),",python,selection_mouse +5136,10010756,"train_lam.py",2622,62," ssim=ssim,\n codebook_usage=(index_counts != 0).mean(),",python,selection_mouse +5137,10010756,"train_lam.py",2621,63," ssim=ssim,\n codebook_usage=(index_counts != 0).mean(),",python,selection_mouse +5138,10010775,"train_lam.py",2601,83," psnr=psnr,\n ssim=ssim,\n codebook_usage=(index_counts != 0).mean(),",python,selection_mouse +5139,10010832,"train_lam.py",2600,84," psnr=psnr,\n ssim=ssim,\n codebook_usage=(index_counts != 0).mean(),",python,selection_mouse +5140,10010832,"train_lam.py",2599,85," psnr=psnr,\n ssim=ssim,\n codebook_usage=(index_counts != 0).mean(),",python,selection_mouse +5141,10010833,"train_lam.py",2598,86," psnr=psnr,\n ssim=ssim,\n codebook_usage=(index_counts != 0).mean(),",python,selection_mouse +5142,10010844,"train_lam.py",2597,87," psnr=psnr,\n ssim=ssim,\n codebook_usage=(index_counts != 0).mean(),",python,selection_mouse +5143,10010911,"train_lam.py",2555,129," commitment_loss=commitment_loss,\n psnr=psnr,\n ssim=ssim,\n codebook_usage=(index_counts != 0).mean(),",python,selection_mouse +5144,10010914,"train_lam.py",2532,152," q_loss=q_loss,\n commitment_loss=commitment_loss,\n psnr=psnr,\n ssim=ssim,\n codebook_usage=(index_counts != 0).mean(),",python,selection_mouse +5145,10010929,"train_lam.py",2515,169," mse=mse,\n q_loss=q_loss,\n commitment_loss=commitment_loss,\n psnr=psnr,\n ssim=ssim,\n codebook_usage=(index_counts != 0).mean(),",python,selection_mouse +5146,10010952,"train_lam.py",2496,188," loss=loss,\n mse=mse,\n q_loss=q_loss,\n commitment_loss=commitment_loss,\n psnr=psnr,\n ssim=ssim,\n codebook_usage=(index_counts != 0).mean(),",python,selection_mouse +5147,10011242,"train_lam.py",2476,208," metrics = dict(\n loss=loss,\n mse=mse,\n q_loss=q_loss,\n commitment_loss=commitment_loss,\n psnr=psnr,\n ssim=ssim,\n codebook_usage=(index_counts != 0).mean(),",python,selection_mouse +5148,10204526,"train_lam.py",2554,0,"",python,selection_mouse +5149,10204537,"train_lam.py",2553,0,"",python,selection_command +5150,10205739,"scripts_horeka/overfit_sample_tiny/sample.sh",0,0,"",shellscript,tab +5151,10211602,"train_dynamics.py",0,0,"",python,tab +5152,10212722,"train_dynamics.py",4559,0,"",python,selection_mouse +5153,10212738,"train_dynamics.py",4558,0,"",python,selection_command +5154,10218500,"train_dynamics.py",4717,0,"",python,selection_mouse +5155,10218684,"train_dynamics.py",4715,4,"args",python,selection_mouse +5156,10219130,"train_dynamics.py",4621,0,"",python,selection_mouse +5157,10219301,"train_dynamics.py",4621,17,"latent_action_dim",python,selection_mouse +5158,10219876,"train_dynamics.py",4583,0,"",python,selection_mouse +5159,10220014,"train_dynamics.py",4581,7,"lam_dim",python,selection_mouse +5160,10223083,"train_dynamics.py",6176,0,"",python,selection_mouse +5161,10223244,"train_dynamics.py",6170,11,"train_state",python,selection_mouse +5162,10223902,"train_dynamics.py",6197,0,"",python,selection_mouse +5163,10224060,"train_dynamics.py",6184,24,"restore_genie_components",python,selection_mouse +5164,10225006,"train_dynamics.py",6174,0,"",python,selection_mouse +5165,10225166,"train_dynamics.py",6170,11,"train_state",python,selection_mouse +5166,10247482,"train_dynamics.py",1429,0,"",python,selection_mouse +5167,10247644,"train_dynamics.py",1422,14,"lam_checkpoint",python,selection_mouse +5168,10253050,"train_dynamics.py",1428,0,"",python,selection_mouse +5169,10254498,"train_dynamics.py",1429,0,"",python,selection_mouse +5170,10257391,"train_dynamics.py",1423,1,"a",python,selection_command +5171,10257669,"train_dynamics.py",1560,2,"at",python,selection_command +5172,10258556,"train_dynamics.py",1423,1,"a",python,selection_command +5173,10258699,"train_dynamics.py",1928,2,"ar",python,selection_command +5174,10259472,"train_dynamics.py",1928,3,"arg",python,selection_command +5175,10259604,"train_dynamics.py",1928,4,"args",python,selection_command +5176,10259752,"train_dynamics.py",3193,5,"args.",python,selection_command +5177,10259955,"train_dynamics.py",3193,6,"args.l",python,selection_command +5178,10260088,"train_dynamics.py",4327,7,"args.la",python,selection_command +5179,10260189,"train_dynamics.py",4576,8,"args.lam",python,selection_command +5180,10261506,"train_dynamics.py",4576,9,"args.lam_",python,selection_command +5181,10274497,"train_dynamics.py",1440,1,"r",python,selection_command +5182,10274673,"train_dynamics.py",2854,2,"re",python,selection_command +5183,10274852,"train_dynamics.py",6143,3,"Res",python,selection_command +5184,10275081,"train_dynamics.py",6143,4,"Rest",python,selection_command +5185,10275228,"train_dynamics.py",6143,5,"Resto",python,selection_command +5186,10275353,"train_dynamics.py",6143,6,"Restor",python,selection_command +5187,10275529,"train_dynamics.py",6143,7,"Restore",python,selection_command +5188,10277064,"train_dynamics.py",6199,0,"",python,selection_mouse +5189,10277224,"genie.py",0,0,"",python,tab +5190,10281688,"genie.py",7663,0,"",python,selection_mouse +5191,10282225,"genie.py",7629,0,"",python,selection_mouse +5192,10282348,"genie.py",7623,17,"LatentActionModel",python,selection_mouse +5193,10283407,"genie.py",8028,0,"",python,selection_mouse +5194,10283516,"genie.py",8018,15,"dummy_tokenizer",python,selection_mouse +5195,10287876,"genie.py",7207,0,"",python,selection_mouse +5196,10291809,"genie.py",7114,0,"",python,selection_mouse +5197,10291925,"genie.py",7113,4,"args",python,selection_mouse +5198,10295607,"genie.py",7143,1,"a",python,selection_command +5199,10295838,"genie.py",7261,2,"ar",python,selection_command +5200,10296528,"genie.py",7261,3,"arg",python,selection_command +5201,10296704,"genie.py",7261,4,"args",python,selection_command +5202,10296839,"genie.py",7261,5,"args.",python,selection_command +5203,10299988,"genie.py",7339,6,"args.l",python,selection_command +5204,10300693,"genie.py",7339,7,"args.la",python,selection_command +5205,10300785,"genie.py",7696,8,"args.lam",python,selection_command +5206,10302172,"genie.py",7696,9,"args.lam_",python,selection_command +5207,10302502,"genie.py",10042,10,"args.lam_c",python,selection_command +5208,10307567,"genie.py",10168,0,"",python,selection_mouse +5209,10307574,"genie.py",10167,0,"",python,selection_command +5210,10308330,"genie.py",10168,0,"",python,selection_mouse +5211,10308332,"genie.py",10167,0,"",python,selection_command +5212,10309524,"genie.py",9966,0,"",python,selection_mouse +5213,10342164,"TERMINAL",0,0,"\r(jafar) [tum_cte0515@hkn0634 jafar]$ ",,terminal_output +5214,10342866,"TERMINAL",0,0,"[?2004l\r\r\nexit\r\nsalloc: Relinquishing job allocation 3306668\r\nsalloc: Job allocation 3306668 has been revoked.\r\n]0;tum_cte0515@hkn1991:~/Projects/jafar]633;D;0",,terminal_output +5215,10343856,"TERMINAL",0,0,"salloc --time=01:00:00 --partition=accelerated --nodes=1 --ntasks-per-node=4 --gres=gpu:4 --cpus-per-task=5 --mem=200G",,terminal_command +5216,10343911,"TERMINAL",0,0,"]633;E;2025-06-30 17:16:28 salloc --time=01:00:00 --partition=accelerated --nodes=1 --ntasks-per-node=4 --gres=gpu:4 --cpus-per-task=5 --mem=200G;1b1b54ff-80e1-48fc-8101-86ff30a7ce8c]633;Csalloc: Granted job allocation 3306841\r\n",,terminal_output +5217,10344024,"TERMINAL",0,0,"salloc: Waiting for resource configuration\r\n",,terminal_output +5218,10346245,".gitignore",0,0,"",ignore,tab +5219,10347309,"train_lam.py",0,0,"",python,tab +5220,10348220,"scripts_horeka/overfit_sample_tiny/sample.sh",0,0,"",shellscript,tab +5221,10349719,"train_dynamics.py",0,0,"",python,tab +5222,10351143,"sample.py",0,0,"",python,tab +5223,10352737,"genie.py",0,0,"",python,tab +5224,10354813,"train_dynamics.py",0,0,"",python,tab +5225,10363238,"train_dynamics.py",6738,0,"",python,selection_mouse +5226,10363240,"train_dynamics.py",6737,0,"",python,selection_command +5227,10363938,"train_dynamics.py",6725,0,"",python,selection_mouse +5228,10363939,"train_dynamics.py",6724,0,"",python,selection_command +5229,10365467,"train_dynamics.py",6745,0,"",python,selection_mouse +5230,10366119,"train_dynamics.py",6735,0,"",python,selection_mouse +5231,10366243,"train_dynamics.py",6734,1," ",python,selection_mouse +5232,10366762,"train_dynamics.py",6738,0,"",python,selection_mouse +5233,10366765,"train_dynamics.py",6737,0,"",python,selection_command +5234,10366919,"train_dynamics.py",6737,1,"0",python,selection_mouse +5235,10366935,"train_dynamics.py",6738,0,"",python,selection_command +5236,10367509,"train_dynamics.py",6733,0,"",python,selection_mouse +5237,10369225,"train_dynamics.py",6731,0,"",python,selection_mouse +5238,10371079,"TERMINAL",0,0,"salloc: Nodes hkn0817 are ready for job\r\n",,terminal_output +5239,10371863,"TERMINAL",0,0,"]0;tum_cte0515@hkn0817:~/Projects/jafar[?2004h[tum_cte0515@hkn0817 jafar]$ ",,terminal_output +5240,10381088,"train_dynamics.py",7616,0,"",python,selection_mouse +5241,10386811,"train_dynamics.py",3022,0,"",python,selection_mouse +5242,10389266,"train_dynamics.py",2216,0,"",python,selection_mouse +5243,10389439,"train_dynamics.py",2212,7,"outputs",python,selection_mouse +5244,10390310,"train_dynamics.py",2259,0,"",python,selection_mouse +5245,10390450,"train_dynamics.py",2248,41,"softmax_cross_entropy_with_integer_labels",python,selection_mouse +5246,10394060,"train_dynamics.py",2321,0,"",python,selection_mouse +5247,10395635,"train_dynamics.py",2213,0,"",python,selection_mouse +5248,10398698,"train_dynamics.py",2059,0,"",python,selection_mouse +5249,10398831,"train_dynamics.py",2056,8,"apply_fn",python,selection_mouse +5250,10405857,"train_dynamics.py",2054,0,"",python,selection_mouse +5251,10406924,"train_dynamics.py",1982,0,"",python,selection_mouse +5252,10407812,"train_dynamics.py",1968,0,"",python,selection_mouse +5253,10410228,"train_dynamics.py",3091,0,"",python,selection_mouse +5254,10412340,"train_dynamics.py",2928,0,"",python,selection_mouse +5255,10412478,"train_dynamics.py",2924,5,"state",python,selection_mouse +5256,10421303,"train_dynamics.py",2919,0,"",python,selection_mouse +5257,10421625,"train_dynamics.py",7571,0,"",python,selection_mouse +5258,10423529,"train_dynamics.py",7627,0,"",python,selection_mouse +5259,10433979,"train_dynamics.py",5848,0,"",python,selection_mouse +5260,10437043,"train_dynamics.py",5844,0,"",python,selection_mouse +5261,10438696,"train_dynamics.py",0,0,"",python,tab +5262,10438697,"train_dynamics.py",4201,0,"",python,selection_mouse +5263,10438841,"genie.py",0,0,"",python,tab +5264,10445666,"train_dynamics.py",0,0,"",python,tab +5265,10447670,"train_dynamics.py",5845,0,"",python,selection_mouse +5266,10448183,"train_dynamics.py",5849,0,"",python,selection_mouse +5267,10448307,"train_dynamics.py",5846,5,"apply",python,selection_mouse +5268,10451085,"genie.py",0,0,"",python,tab +5269,10452686,"genie.py",2854,0,"",python,selection_mouse +5270,10452687,"genie.py",2853,0,"",python,selection_command +5271,10452835,"genie.py",2847,7,"outputs",python,selection_mouse +5272,10452836,"genie.py",2848,6,"utputs",python,selection_command +5273,10452991,"genie.py",2831,17,"\n return o",python,selection_mouse +5274,10453008,"genie.py",2795,53,"batch[""videos""].shape[2:4]\n )\n return o",python,selection_mouse +5275,10453070,"genie.py",2674,174," jnp.argmax(outputs[""token_logits""], axis=-1)\n outputs[""recon""] = self.tokenizer.decode(\n mle_indices, batch[""videos""].shape[2:4]\n )\n return o",python,selection_mouse +5276,10453071,"genie.py",2568,280,"dyna_outputs = self.dynamics(outputs, training)\n outputs.update(dyna_outputs)\n mle_indices = jnp.argmax(outputs[""token_logits""], axis=-1)\n outputs[""recon""] = self.tokenizer.decode(\n mle_indices, batch[""videos""].shape[2:4]\n )\n return o",python,selection_mouse +5277,10453071,"genie.py",2511,337,"\n outputs[""mask_rng""] = batch[""mask_rng""]\n dyna_outputs = self.dynamics(outputs, training)\n outputs.update(dyna_outputs)\n mle_indices = jnp.argmax(outputs[""token_logits""], axis=-1)\n outputs[""recon""] = self.tokenizer.decode(\n mle_indices, batch[""videos""].shape[2:4]\n )\n return o",python,selection_mouse +5278,10453076,"genie.py",2443,405," latent_actions=jax.lax.stop_gradient(lam_outputs[""z_q""]),\n )\n outputs[""mask_rng""] = batch[""mask_rng""]\n dyna_outputs = self.dynamics(outputs, training)\n outputs.update(dyna_outputs)\n mle_indices = jnp.argmax(outputs[""token_logits""], axis=-1)\n outputs[""recon""] = self.tokenizer.decode(\n mle_indices, batch[""videos""].shape[2:4]\n )\n return o",python,selection_mouse +5279,10453091,"genie.py",2364,484," video_tokens=jax.lax.stop_gradient(tokenizer_outputs[""indices""]),\n latent_actions=jax.lax.stop_gradient(lam_outputs[""z_q""]),\n )\n outputs[""mask_rng""] = batch[""mask_rng""]\n dyna_outputs = self.dynamics(outputs, training)\n outputs.update(dyna_outputs)\n mle_indices = jnp.argmax(outputs[""token_logits""], axis=-1)\n outputs[""recon""] = self.tokenizer.decode(\n mle_indices, batch[""videos""].shape[2:4]\n )\n return o",python,selection_mouse +5280,10453150,"genie.py",2363,485," video_tokens=jax.lax.stop_gradient(tokenizer_outputs[""indices""]),\n latent_actions=jax.lax.stop_gradient(lam_outputs[""z_q""]),\n )\n outputs[""mask_rng""] = batch[""mask_rng""]\n dyna_outputs = self.dynamics(outputs, training)\n outputs.update(dyna_outputs)\n mle_indices = jnp.argmax(outputs[""token_logits""], axis=-1)\n outputs[""recon""] = self.tokenizer.decode(\n mle_indices, batch[""videos""].shape[2:4]\n )\n return o",python,selection_mouse +5281,10453150,"genie.py",2338,510,"outputs = dict(\n video_tokens=jax.lax.stop_gradient(tokenizer_outputs[""indices""]),\n latent_actions=jax.lax.stop_gradient(lam_outputs[""z_q""]),\n )\n outputs[""mask_rng""] = batch[""mask_rng""]\n dyna_outputs = self.dynamics(outputs, training)\n outputs.update(dyna_outputs)\n mle_indices = jnp.argmax(outputs[""token_logits""], axis=-1)\n outputs[""recon""] = self.tokenizer.decode(\n mle_indices, batch[""videos""].shape[2:4]\n )\n return o",python,selection_mouse +5282,10453175,"genie.py",2264,584,"lam_outputs = self.lam.vq_encode(batch[""videos""], training=False)\n outputs = dict(\n video_tokens=jax.lax.stop_gradient(tokenizer_outputs[""indices""]),\n latent_actions=jax.lax.stop_gradient(lam_outputs[""z_q""]),\n )\n outputs[""mask_rng""] = batch[""mask_rng""]\n dyna_outputs = self.dynamics(outputs, training)\n outputs.update(dyna_outputs)\n mle_indices = jnp.argmax(outputs[""token_logits""], axis=-1)\n outputs[""recon""] = self.tokenizer.decode(\n mle_indices, batch[""videos""].shape[2:4]\n )\n return o",python,selection_mouse +5283,10453244,"genie.py",2178,670,"tokenizer_outputs = self.tokenizer.vq_encode(batch[""videos""], training=False)\n lam_outputs = self.lam.vq_encode(batch[""videos""], training=False)\n outputs = dict(\n video_tokens=jax.lax.stop_gradient(tokenizer_outputs[""indices""]),\n latent_actions=jax.lax.stop_gradient(lam_outputs[""z_q""]),\n )\n outputs[""mask_rng""] = batch[""mask_rng""]\n dyna_outputs = self.dynamics(outputs, training)\n outputs.update(dyna_outputs)\n mle_indices = jnp.argmax(outputs[""token_logits""], axis=-1)\n outputs[""recon""] = self.tokenizer.decode(\n mle_indices, batch[""videos""].shape[2:4]\n )\n return o",python,selection_mouse +5284,10453307,"genie.py",2090,758,"__call__(self, batch: Dict[str, Any], training: bool = True) -> Dict[str, Any]:\n tokenizer_outputs = self.tokenizer.vq_encode(batch[""videos""], training=False)\n lam_outputs = self.lam.vq_encode(batch[""videos""], training=False)\n outputs = dict(\n video_tokens=jax.lax.stop_gradient(tokenizer_outputs[""indices""]),\n latent_actions=jax.lax.stop_gradient(lam_outputs[""z_q""]),\n )\n outputs[""mask_rng""] = batch[""mask_rng""]\n dyna_outputs = self.dynamics(outputs, training)\n outputs.update(dyna_outputs)\n mle_indices = jnp.argmax(outputs[""token_logits""], axis=-1)\n outputs[""recon""] = self.tokenizer.decode(\n mle_indices, batch[""videos""].shape[2:4]\n )\n return o",python,selection_mouse +5285,10453739,"genie.py",2090,0,"",python,selection_mouse +5286,10454105,"genie.py",2272,0,"",python,selection_mouse +5287,10454254,"genie.py",2264,11,"lam_outputs",python,selection_mouse +5288,10454792,"genie.py",2203,0,"",python,selection_mouse +5289,10454944,"genie.py",2203,9,"tokenizer",python,selection_mouse +5290,10455788,"genie.py",2212,0,"",python,selection_mouse +5291,10456846,"genie.py",2288,0,"",python,selection_mouse +5292,10457477,"genie.py",2276,0,"",python,selection_mouse +5293,10458064,"genie.py",2270,0,"",python,selection_mouse +5294,10458621,"genie.py",2281,0,"",python,selection_mouse +5295,10459209,"genie.py",2295,0,"",python,selection_mouse +5296,10459804,"genie.py",2291,0,"",python,selection_mouse +5297,10472603,"genie.py",2329,0,"\n ",python,content +5298,10474090,"genie.py",2338,0,"j",python,content +5299,10474091,"genie.py",2339,0,"",python,selection_keyboard +5300,10474193,"genie.py",2339,0,"a",python,content +5301,10474195,"genie.py",2340,0,"",python,selection_keyboard +5302,10474413,"genie.py",2340,0,"x",python,content +5303,10474417,"genie.py",2341,0,"",python,selection_keyboard +5304,10474475,"genie.py",2341,0,".",python,content +5305,10474476,"genie.py",2342,0,"",python,selection_keyboard +5306,10474746,"genie.py",2342,0,"d",python,content +5307,10474748,"genie.py",2343,0,"",python,selection_keyboard +5308,10474933,"genie.py",2343,0,"e",python,content +5309,10474934,"genie.py",2344,0,"",python,selection_keyboard +5310,10474999,"genie.py",2344,0,"b",python,content +5311,10475003,"genie.py",2345,0,"",python,selection_keyboard +5312,10475155,"genie.py",2345,0,"u",python,content +5313,10475157,"genie.py",2346,0,"",python,selection_keyboard +5314,10475262,"genie.py",2346,0,"g",python,content +5315,10475263,"genie.py",2347,0,"",python,selection_keyboard +5316,10475410,"genie.py",2347,0,".",python,content +5317,10475411,"genie.py",2348,0,"",python,selection_keyboard +5318,10475663,"genie.py",2348,0,"b",python,content +5319,10475665,"genie.py",2349,0,"",python,selection_keyboard +5320,10475730,"genie.py",2349,0,"r",python,content +5321,10475732,"genie.py",2350,0,"",python,selection_keyboard +5322,10475895,"genie.py",2350,0,"e",python,content +5323,10475896,"genie.py",2351,0,"",python,selection_keyboard +5324,10476078,"genie.py",2351,0,"a",python,content +5325,10476079,"genie.py",2352,0,"",python,selection_keyboard +5326,10476149,"genie.py",2352,0,"k",python,content +5327,10476151,"genie.py",2353,0,"",python,selection_keyboard +5328,10476513,"genie.py",2348,5,"breakpoint",python,content +5329,10477233,"genie.py",2358,0,"()",python,content +5330,10477236,"genie.py",2359,0,"",python,selection_keyboard +5331,10477314,"genie.py",2359,1,")",python,content +5332,10477315,"genie.py",2360,0,"",python,selection_keyboard +5333,10477452,"genie.py",2359,0,"",python,selection_command +5334,10480226,"genie.py",2269,0,"",python,selection_mouse +5335,10480961,"utils/dataloader.py",0,0,"import functools\nimport jax\n\nimport tensorflow as tf\ntf.config.experimental.set_visible_devices([], ""GPU"")\n\n\n# --- TensorFlow function for processing: slicing, normalization ---\ndef _tf_process_episode(episode_tensor, seq_len, image_h, image_w, image_c):\n """"""\n Processes a raw episode tensor in TensorFlow.\n Takes a full episode, extracts a random sequence, and normalizes it.\n Args:\n episode_tensor: A TensorFlow tensor representing a full video episode.\n Expected shape: (dynamic_length, image_h, image_w, image_c)\n Expected dtype: e.g., tf.uint8 (raw pixel values)\n seq_len: The desired length of the sub-sequence to extract.\n image_h: The height of each frame.\n image_w: The width of each frame.\n image_c: The number of channels in each frame.\n Returns:\n A TensorFlow tensor representing the processed video sequence.\n Shape: (seq_len, image_h, image_w, image_c)\n Dtype: tf.float32 (normalized pixel values)\n """"""\n current_episode_len = tf.shape(episode_tensor)[0]\n\n max_start_idx = current_episode_len - seq_len\n\n start_idx = tf.random.uniform(\n shape=(), minval=0, maxval=max_start_idx + 1, dtype=tf.int32\n )\n\n seq = episode_tensor[start_idx : start_idx + seq_len]\n\n seq = tf.cast(seq, tf.float32) / 255.0\n\n # Ensure the final shape is statically known for batching.\n # tf.reshape is robust, but tf.ensure_shape or set_shape can also be used if confident.\n processed_sequence = tf.reshape(seq, [seq_len, image_h, image_w, image_c])\n\n return processed_sequence\n\n\ndef _parse_tfrecord_fn(example_proto, image_h, image_w, image_c):\n feature_description = {\n ""height"": tf.io.FixedLenFeature([], tf.int64),\n ""width"": tf.io.FixedLenFeature([], tf.int64),\n ""channels"": tf.io.FixedLenFeature([], tf.int64),\n ""sequence_length"": tf.io.FixedLenFeature([], tf.int64),\n ""raw_video"": tf.io.FixedLenFeature([], tf.string),\n }\n example = tf.io.parse_single_example(example_proto, feature_description)\n\n video_shape = (example[""sequence_length""], image_h, image_w, image_c)\n\n episode_tensor = tf.io.decode_raw(example[""raw_video""], out_type=tf.uint8)\n episode_tensor = tf.reshape(episode_tensor, video_shape)\n\n episode_tensor = tf.ensure_shape(episode_tensor, [None, image_h, image_w, image_c])\n return episode_tensor\n\n\ndef get_dataloader(\n tfrecord_paths: list[str], # List of TFRecord file paths\n seq_len: int,\n global_batch_size: int,\n image_h: int,\n image_w: int,\n image_c: int,\n shuffle_buffer_size: int = -1,\n num_parallel_calls: int = tf.data.AUTOTUNE,\n seed: int = 42,\n):\n """"""\n Creates a tf.data.Dataset pipeline from TFRecord files.\n """"""\n if not tfrecord_paths:\n raise ValueError(""tfrecord_paths list cannot be empty."")\n\n process_id = jax.process_index()\n num_processes = jax.process_count()\n\n assert (\n global_batch_size % num_processes == 0\n ), ""Global batch size {global_batch_size} \\n must be divisible by the number of JAX processes {num_processes} for proper sharding.""\n per_process_batch_size = global_batch_size // num_processes\n\n dataset = tf.data.TFRecordDataset(\n tfrecord_paths, num_parallel_reads=tf.data.AUTOTUNE\n )\n\n dataset = dataset.shard(num_shards=num_processes, index=process_id)\n\n # (f.srambical) NOTE: For TFRecords, it's often good to have a large shuffle buffer.\n if shuffle_buffer_size > 0:\n dataset = dataset.shuffle(\n buffer_size=shuffle_buffer_size, seed=seed, reshuffle_each_iteration=True\n )\n parse_fn = functools.partial(\n _parse_tfrecord_fn, image_h=image_h, image_w=image_w, image_c=image_c\n )\n dataset = dataset.map(parse_fn, num_parallel_calls=num_parallel_calls)\n\n tf_process_fn = functools.partial(\n _tf_process_episode,\n seq_len=seq_len,\n image_h=image_h,\n image_w=image_w,\n image_c=image_c,\n )\n dataset = dataset.map(tf_process_fn, num_parallel_calls=num_parallel_calls)\n\n dataset = dataset.repeat(None)\n dataset = dataset.batch(per_process_batch_size, drop_remainder=True)\n dataset = dataset.prefetch(tf.data.AUTOTUNE)\n\n return dataset.as_numpy_iterator()\n",python,tab +5336,10482396,"TERMINAL",0,0,"s",,terminal_output +5337,10482574,"TERMINAL",0,0,"[?25lo[?25h",,terminal_output +5338,10482667,"TERMINAL",0,0,"[?25lu[?25h",,terminal_output +5339,10482795,"TERMINAL",0,0,"[?25lr[?25h",,terminal_output +5340,10482979,"TERMINAL",0,0,"[?25lc[?25h",,terminal_output +5341,10483108,"TERMINAL",0,0,"[?25le[?25h[?25l [?25h",,terminal_output +5342,10483259,"TERMINAL",0,0,"[?25l.[?25h[?25lv[?25h",,terminal_output +5343,10483500,"TERMINAL",0,0,"env/",,terminal_output +5344,10483648,"TERMINAL",0,0,"[?25lb[?25h",,terminal_output +5345,10483757,"TERMINAL",0,0,"in/",,terminal_output +5346,10484040,"TERMINAL",0,0,"[?25la[?25h",,terminal_output +5347,10484225,"TERMINAL",0,0,"[?25lc[?25h",,terminal_output +5348,10484463,"TERMINAL",0,0,"tivate",,terminal_output +5349,10484757,"TERMINAL",0,0,"\r\n[?2004l\r]0;tum_cte0515@hkn0817:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0817 jafar]$ ",,terminal_output +5350,10497663,"scripts_horeka/train_dynamics.sh",0,0,"#!/usr/bin/env bash\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\n\ntf_records_dir=$ws_dir/knoms_tfrecords_500_shards\nws_dir='/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/'\n\n\njob_name=""debug""\nslurm_job_id=""0000""\n\nCHECKPOINT_DIR=$ws_dir/checkpoints/$job_name_$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\nenv | grep SLURM\n\ntokenizer_ckpt_dir=$ws_dir/checkpoints/3290391/tokenizer_1750845012_40000\nlam_ckpt_dir=$ws_dir/checkpoints/3290392/lam_1750845133_130000\npython train_dynamics.py \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=1 \\n --min_lr=5e-7 \\n --max_lr=5e-6 \\n --warmup_steps=125 \\n --log_image_interval=100 \\n --log \\n --log_checkpoint_interval=500 \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --lam_checkpoint=$lam_ckpt_dir \\n --data_dir $tf_records_dir\n",shellscript,tab +5351,10499625,"scripts_horeka/train_dynamics.sh",604,0,"",shellscript,selection_mouse +5352,10500161,"scripts_horeka/train_dynamics.sh",601,0,"",shellscript,selection_mouse +5353,10500658,"scripts_horeka/train_dynamics.sh",599,0,"",shellscript,selection_mouse +5354,10508952,"scripts_horeka/overfit_sample_tiny/train_dynamics_overfit_sample_big_lr.sbatch",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=1\n#SBATCH --time=05:00:00\n#SBATCH --partition=accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:1\n#SBATCH --output=logs/logs_training/%x_%j.log\n#SBATCH --error=logs/logs_training/%x_%j.log\n#SBATCH --mail-user=mihir.mahajan2002@gmail.com\n#SBATCH --job-name=train_dynamics_minecraft_overfit_sample_tiny\n#SBATCH --mem=50G\n#SBATCH --mail-type=ALL\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\ntf_records_dir=$ws_dir/knoms_tfrecords_500_shards\nws_dir='/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/'\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=$ws_dir/checkpoints/$job_name_$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\nenv | grep SLURM\n\necho Running dynamics model overfit run. Slurm id: $slurm_job_id\n\n# Use checkpoints from tokenizer/lam overfit sample runs\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/3299272/tokenizer_1751037678_153500/\nlam_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/3299259/lam_1751036759_200000/\n\nsrun python train_dynamics.py \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=1 \\n --min_lr=1e-4 \\n --max_lr=1e-4 \\n --log_image_interval=500 \\n --log \\n --log_checkpoint_interval=500 \\n --name=dynamics-tiny-overfit-big-lr-$slurm_job_id \\n --tags dynamics overfit tiny \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --lam_checkpoint=$lam_ckpt_dir \\n --data_dir $tf_records_dir \\n --tokenizer_dim=384 \\n --latent_patch_dim=32 \\n --num_patch_latents=1024 \\n --patch_size=4 \\n --tokenizer_num_blocks=8 \\n --tokenizer_num_heads=8 \\n --lam_dim=384 \\n --latent_action_dim=32 \\n --num_latent_actions=6 \\n --lam_patch_size=16 \\n --lam_num_blocks=8 \\n --lam_num_heads=8 \\n --dyna_dim=128 \\n --dyna_num_blocks=2 \\n --dyna_num_heads=4\n",shellscript,tab +5355,10511028,"scripts_horeka/overfit_sample_tiny/train_dynamics_overfit_sample_big_lr.sbatch",2021,0,"",shellscript,selection_mouse +5356,10511215,"scripts_horeka/overfit_sample_tiny/train_dynamics_overfit_sample_big_lr.sbatch",2020,1,"\n",shellscript,selection_mouse +5357,10511232,"scripts_horeka/overfit_sample_tiny/train_dynamics_overfit_sample_big_lr.sbatch",1950,71,"\n --dyna_dim=128 \\n --dyna_num_blocks=2 \\n --dyna_num_heads=4\n",shellscript,selection_mouse +5358,10511288,"scripts_horeka/overfit_sample_tiny/train_dynamics_overfit_sample_big_lr.sbatch",1865,156,"ctions=6 \\n --lam_patch_size=16 \\n --lam_num_blocks=8 \\n --lam_num_heads=8 \\n --dyna_dim=128 \\n --dyna_num_blocks=2 \\n --dyna_num_heads=4\n",shellscript,selection_mouse +5359,10511289,"scripts_horeka/overfit_sample_tiny/train_dynamics_overfit_sample_big_lr.sbatch",1729,292,"ize=4 \\n --tokenizer_num_blocks=8 \\n --tokenizer_num_heads=8 \\n --lam_dim=384 \\n --latent_action_dim=32 \\n --num_latent_actions=6 \\n --lam_patch_size=16 \\n --lam_num_blocks=8 \\n --lam_num_heads=8 \\n --dyna_dim=128 \\n --dyna_num_blocks=2 \\n --dyna_num_heads=4\n",shellscript,selection_mouse +5360,10511289,"scripts_horeka/overfit_sample_tiny/train_dynamics_overfit_sample_big_lr.sbatch",1607,414,"a_dir $tf_records_dir \\n --tokenizer_dim=384 \\n --latent_patch_dim=32 \\n --num_patch_latents=1024 \\n --patch_size=4 \\n --tokenizer_num_blocks=8 \\n --tokenizer_num_heads=8 \\n --lam_dim=384 \\n --latent_action_dim=32 \\n --num_latent_actions=6 \\n --lam_patch_size=16 \\n --lam_num_blocks=8 \\n --lam_num_heads=8 \\n --dyna_dim=128 \\n --dyna_num_blocks=2 \\n --dyna_num_heads=4\n",shellscript,selection_mouse +5361,10511291,"scripts_horeka/overfit_sample_tiny/train_dynamics_overfit_sample_big_lr.sbatch",1470,551,"entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --lam_checkpoint=$lam_ckpt_dir \\n --data_dir $tf_records_dir \\n --tokenizer_dim=384 \\n --latent_patch_dim=32 \\n --num_patch_latents=1024 \\n --patch_size=4 \\n --tokenizer_num_blocks=8 \\n --tokenizer_num_heads=8 \\n --lam_dim=384 \\n --latent_action_dim=32 \\n --num_latent_actions=6 \\n --lam_patch_size=16 \\n --lam_num_blocks=8 \\n --lam_num_heads=8 \\n --dyna_dim=128 \\n --dyna_num_blocks=2 \\n --dyna_num_heads=4\n",shellscript,selection_mouse +5362,10511308,"scripts_horeka/overfit_sample_tiny/train_dynamics_overfit_sample_big_lr.sbatch",1296,725," --log_image_interval=500 \\n --log \\n --log_checkpoint_interval=500 \\n --name=dynamics-tiny-overfit-big-lr-$slurm_job_id \\n --tags dynamics overfit tiny \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --lam_checkpoint=$lam_ckpt_dir \\n --data_dir $tf_records_dir \\n --tokenizer_dim=384 \\n --latent_patch_dim=32 \\n --num_patch_latents=1024 \\n --patch_size=4 \\n --tokenizer_num_blocks=8 \\n --tokenizer_num_heads=8 \\n --lam_dim=384 \\n --latent_action_dim=32 \\n --num_latent_actions=6 \\n --lam_patch_size=16 \\n --lam_num_blocks=8 \\n --lam_num_heads=8 \\n --dyna_dim=128 \\n --dyna_num_blocks=2 \\n --dyna_num_heads=4\n",shellscript,selection_mouse +5363,10511331,"scripts_horeka/overfit_sample_tiny/train_dynamics_overfit_sample_big_lr.sbatch",1275,746," --max_lr=1e-4 \\n --log_image_interval=500 \\n --log \\n --log_checkpoint_interval=500 \\n --name=dynamics-tiny-overfit-big-lr-$slurm_job_id \\n --tags dynamics overfit tiny \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --lam_checkpoint=$lam_ckpt_dir \\n --data_dir $tf_records_dir \\n --tokenizer_dim=384 \\n --latent_patch_dim=32 \\n --num_patch_latents=1024 \\n --patch_size=4 \\n --tokenizer_num_blocks=8 \\n --tokenizer_num_heads=8 \\n --lam_dim=384 \\n --latent_action_dim=32 \\n --num_latent_actions=6 \\n --lam_patch_size=16 \\n --lam_num_blocks=8 \\n --lam_num_heads=8 \\n --dyna_dim=128 \\n --dyna_num_blocks=2 \\n --dyna_num_heads=4\n",shellscript,selection_mouse +5364,10511346,"scripts_horeka/overfit_sample_tiny/train_dynamics_overfit_sample_big_lr.sbatch",1255,766," --min_lr=1e-4 \\n --max_lr=1e-4 \\n --log_image_interval=500 \\n --log \\n --log_checkpoint_interval=500 \\n --name=dynamics-tiny-overfit-big-lr-$slurm_job_id \\n --tags dynamics overfit tiny \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --lam_checkpoint=$lam_ckpt_dir \\n --data_dir $tf_records_dir \\n --tokenizer_dim=384 \\n --latent_patch_dim=32 \\n --num_patch_latents=1024 \\n --patch_size=4 \\n --tokenizer_num_blocks=8 \\n --tokenizer_num_heads=8 \\n --lam_dim=384 \\n --latent_action_dim=32 \\n --num_latent_actions=6 \\n --lam_patch_size=16 \\n --lam_num_blocks=8 \\n --lam_num_heads=8 \\n --dyna_dim=128 \\n --dyna_num_blocks=2 \\n --dyna_num_heads=4\n",shellscript,selection_mouse +5365,10511363,"scripts_horeka/overfit_sample_tiny/train_dynamics_overfit_sample_big_lr.sbatch",1254,767," --min_lr=1e-4 \\n --max_lr=1e-4 \\n --log_image_interval=500 \\n --log \\n --log_checkpoint_interval=500 \\n --name=dynamics-tiny-overfit-big-lr-$slurm_job_id \\n --tags dynamics overfit tiny \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --lam_checkpoint=$lam_ckpt_dir \\n --data_dir $tf_records_dir \\n --tokenizer_dim=384 \\n --latent_patch_dim=32 \\n --num_patch_latents=1024 \\n --patch_size=4 \\n --tokenizer_num_blocks=8 \\n --tokenizer_num_heads=8 \\n --lam_dim=384 \\n --latent_action_dim=32 \\n --num_latent_actions=6 \\n --lam_patch_size=16 \\n --lam_num_blocks=8 \\n --lam_num_heads=8 \\n --dyna_dim=128 \\n --dyna_num_blocks=2 \\n --dyna_num_heads=4\n",shellscript,selection_mouse +5366,10511554,"scripts_horeka/overfit_sample_tiny/train_dynamics_overfit_sample_big_lr.sbatch",1233,788," --batch_size=1 \\n --min_lr=1e-4 \\n --max_lr=1e-4 \\n --log_image_interval=500 \\n --log \\n --log_checkpoint_interval=500 \\n --name=dynamics-tiny-overfit-big-lr-$slurm_job_id \\n --tags dynamics overfit tiny \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --lam_checkpoint=$lam_ckpt_dir \\n --data_dir $tf_records_dir \\n --tokenizer_dim=384 \\n --latent_patch_dim=32 \\n --num_patch_latents=1024 \\n --patch_size=4 \\n --tokenizer_num_blocks=8 \\n --tokenizer_num_heads=8 \\n --lam_dim=384 \\n --latent_action_dim=32 \\n --num_latent_actions=6 \\n --lam_patch_size=16 \\n --lam_num_blocks=8 \\n --lam_num_heads=8 \\n --dyna_dim=128 \\n --dyna_num_blocks=2 \\n --dyna_num_heads=4\n",shellscript,selection_mouse +5367,10511595,"scripts_horeka/overfit_sample_tiny/train_dynamics_overfit_sample_big_lr.sbatch",1200,821," --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=1 \\n --min_lr=1e-4 \\n --max_lr=1e-4 \\n --log_image_interval=500 \\n --log \\n --log_checkpoint_interval=500 \\n --name=dynamics-tiny-overfit-big-lr-$slurm_job_id \\n --tags dynamics overfit tiny \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --lam_checkpoint=$lam_ckpt_dir \\n --data_dir $tf_records_dir \\n --tokenizer_dim=384 \\n --latent_patch_dim=32 \\n --num_patch_latents=1024 \\n --patch_size=4 \\n --tokenizer_num_blocks=8 \\n --tokenizer_num_heads=8 \\n --lam_dim=384 \\n --latent_action_dim=32 \\n --num_latent_actions=6 \\n --lam_patch_size=16 \\n --lam_num_blocks=8 \\n --lam_num_heads=8 \\n --dyna_dim=128 \\n --dyna_num_blocks=2 \\n --dyna_num_heads=4\n",shellscript,selection_mouse +5368,10511653,"scripts_horeka/overfit_sample_tiny/train_dynamics_overfit_sample_big_lr.sbatch",1168,853,"srun python train_dynamics.py \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=1 \\n --min_lr=1e-4 \\n --max_lr=1e-4 \\n --log_image_interval=500 \\n --log \\n --log_checkpoint_interval=500 \\n --name=dynamics-tiny-overfit-big-lr-$slurm_job_id \\n --tags dynamics overfit tiny \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --lam_checkpoint=$lam_ckpt_dir \\n --data_dir $tf_records_dir \\n --tokenizer_dim=384 \\n --latent_patch_dim=32 \\n --num_patch_latents=1024 \\n --patch_size=4 \\n --tokenizer_num_blocks=8 \\n --tokenizer_num_heads=8 \\n --lam_dim=384 \\n --latent_action_dim=32 \\n --num_latent_actions=6 \\n --lam_patch_size=16 \\n --lam_num_blocks=8 \\n --lam_num_heads=8 \\n --dyna_dim=128 \\n --dyna_num_blocks=2 \\n --dyna_num_heads=4\n",shellscript,selection_mouse +5369,10511838,"scripts_horeka/overfit_sample_tiny/train_dynamics_overfit_sample_big_lr.sbatch",1200,821," --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=1 \\n --min_lr=1e-4 \\n --max_lr=1e-4 \\n --log_image_interval=500 \\n --log \\n --log_checkpoint_interval=500 \\n --name=dynamics-tiny-overfit-big-lr-$slurm_job_id \\n --tags dynamics overfit tiny \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --lam_checkpoint=$lam_ckpt_dir \\n --data_dir $tf_records_dir \\n --tokenizer_dim=384 \\n --latent_patch_dim=32 \\n --num_patch_latents=1024 \\n --patch_size=4 \\n --tokenizer_num_blocks=8 \\n --tokenizer_num_heads=8 \\n --lam_dim=384 \\n --latent_action_dim=32 \\n --num_latent_actions=6 \\n --lam_patch_size=16 \\n --lam_num_blocks=8 \\n --lam_num_heads=8 \\n --dyna_dim=128 \\n --dyna_num_blocks=2 \\n --dyna_num_heads=4\n",shellscript,selection_mouse +5370,10518662,"scripts_horeka/train_dynamics.sh",0,0,"",shellscript,tab +5371,10519584,"scripts_horeka/train_dynamics.sh",895,0,"",shellscript,selection_mouse +5372,10519726,"scripts_horeka/train_dynamics.sh",894,1,"\n",shellscript,selection_mouse +5373,10519743,"scripts_horeka/train_dynamics.sh",854,41,"kpt_dir \\n --data_dir $tf_records_dir\n",shellscript,selection_mouse +5374,10519767,"scripts_horeka/train_dynamics.sh",748,147,"nt-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --lam_checkpoint=$lam_ckpt_dir \\n --data_dir $tf_records_dir\n",shellscript,selection_mouse +5375,10519781,"scripts_horeka/train_dynamics.sh",709,186,"point_interval=500 \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --lam_checkpoint=$lam_ckpt_dir \\n --data_dir $tf_records_dir\n",shellscript,selection_mouse +5376,10519796,"scripts_horeka/train_dynamics.sh",693,202,"\n --log_checkpoint_interval=500 \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --lam_checkpoint=$lam_ckpt_dir \\n --data_dir $tf_records_dir\n",shellscript,selection_mouse +5377,10519853,"scripts_horeka/train_dynamics.sh",661,234,"image_interval=100 \\n --log \\n --log_checkpoint_interval=500 \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --lam_checkpoint=$lam_ckpt_dir \\n --data_dir $tf_records_dir\n",shellscript,selection_mouse +5378,10519854,"scripts_horeka/train_dynamics.sh",660,235,"_image_interval=100 \\n --log \\n --log_checkpoint_interval=500 \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --lam_checkpoint=$lam_ckpt_dir \\n --data_dir $tf_records_dir\n",shellscript,selection_mouse +5379,10519860,"scripts_horeka/train_dynamics.sh",659,236,"g_image_interval=100 \\n --log \\n --log_checkpoint_interval=500 \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --lam_checkpoint=$lam_ckpt_dir \\n --data_dir $tf_records_dir\n",shellscript,selection_mouse +5380,10519928,"scripts_horeka/train_dynamics.sh",632,263,"warmup_steps=125 \\n --log_image_interval=100 \\n --log \\n --log_checkpoint_interval=500 \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --lam_checkpoint=$lam_ckpt_dir \\n --data_dir $tf_records_dir\n",shellscript,selection_mouse +5381,10519937,"scripts_horeka/train_dynamics.sh",631,264,"-warmup_steps=125 \\n --log_image_interval=100 \\n --log \\n --log_checkpoint_interval=500 \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --lam_checkpoint=$lam_ckpt_dir \\n --data_dir $tf_records_dir\n",shellscript,selection_mouse +5382,10519944,"scripts_horeka/train_dynamics.sh",610,285,"--max_lr=5e-6 \\n --warmup_steps=125 \\n --log_image_interval=100 \\n --log \\n --log_checkpoint_interval=500 \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --lam_checkpoint=$lam_ckpt_dir \\n --data_dir $tf_records_dir\n",shellscript,selection_mouse +5383,10519960,"scripts_horeka/train_dynamics.sh",609,286," --max_lr=5e-6 \\n --warmup_steps=125 \\n --log_image_interval=100 \\n --log \\n --log_checkpoint_interval=500 \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --lam_checkpoint=$lam_ckpt_dir \\n --data_dir $tf_records_dir\n",shellscript,selection_mouse +5384,10520019,"scripts_horeka/train_dynamics.sh",588,307," --min_lr=5e-7 \\n --max_lr=5e-6 \\n --warmup_steps=125 \\n --log_image_interval=100 \\n --log \\n --log_checkpoint_interval=500 \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --lam_checkpoint=$lam_ckpt_dir \\n --data_dir $tf_records_dir\n",shellscript,selection_mouse +5385,10520019,"scripts_horeka/train_dynamics.sh",587,308," --min_lr=5e-7 \\n --max_lr=5e-6 \\n --warmup_steps=125 \\n --log_image_interval=100 \\n --log \\n --log_checkpoint_interval=500 \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --lam_checkpoint=$lam_ckpt_dir \\n --data_dir $tf_records_dir\n",shellscript,selection_mouse +5386,10520025,"scripts_horeka/train_dynamics.sh",586,309," --min_lr=5e-7 \\n --max_lr=5e-6 \\n --warmup_steps=125 \\n --log_image_interval=100 \\n --log \\n --log_checkpoint_interval=500 \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --lam_checkpoint=$lam_ckpt_dir \\n --data_dir $tf_records_dir\n",shellscript,selection_mouse +5387,10520042,"scripts_horeka/train_dynamics.sh",565,330," --batch_size=1 \\n --min_lr=5e-7 \\n --max_lr=5e-6 \\n --warmup_steps=125 \\n --log_image_interval=100 \\n --log \\n --log_checkpoint_interval=500 \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --lam_checkpoint=$lam_ckpt_dir \\n --data_dir $tf_records_dir\n",shellscript,selection_mouse +5388,10520110,"scripts_horeka/train_dynamics.sh",532,363," --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=1 \\n --min_lr=5e-7 \\n --max_lr=5e-6 \\n --warmup_steps=125 \\n --log_image_interval=100 \\n --log \\n --log_checkpoint_interval=500 \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --lam_checkpoint=$lam_ckpt_dir \\n --data_dir $tf_records_dir\n",shellscript,selection_mouse +5389,10521107,"scripts_horeka/train_dynamics.sh",532,363,"",shellscript,content +5390,10521753,"scripts_horeka/train_dynamics.sh",532,0," --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=1 \\n --min_lr=1e-4 \\n --max_lr=1e-4 \\n --log_image_interval=500 \\n --log \\n --log_checkpoint_interval=500 \\n --name=dynamics-tiny-overfit-big-lr-$slurm_job_id \\n --tags dynamics overfit tiny \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --lam_checkpoint=$lam_ckpt_dir \\n --data_dir $tf_records_dir \\n --tokenizer_dim=384 \\n --latent_patch_dim=32 \\n --num_patch_latents=1024 \\n --patch_size=4 \\n --tokenizer_num_blocks=8 \\n --tokenizer_num_heads=8 \\n --lam_dim=384 \\n --latent_action_dim=32 \\n --num_latent_actions=6 \\n --lam_patch_size=16 \\n --lam_num_blocks=8 \\n --lam_num_heads=8 \\n --dyna_dim=128 \\n --dyna_num_blocks=2 \\n --dyna_num_heads=4\n",shellscript,content +5391,10529002,"scripts_horeka/overfit_sample_tiny/train_dynamics_overfit_sample_big_lr.sbatch",0,0,"",shellscript,tab +5392,10530578,"scripts_horeka/overfit_sample_tiny/train_dynamics_overfit_sample_big_lr.sbatch",1166,0,"",shellscript,selection_mouse +5393,10530580,"scripts_horeka/overfit_sample_tiny/train_dynamics_overfit_sample_big_lr.sbatch",1165,0,"",shellscript,selection_command +5394,10530713,"scripts_horeka/overfit_sample_tiny/train_dynamics_overfit_sample_big_lr.sbatch",1165,1,"/",shellscript,selection_mouse +5395,10530713,"scripts_horeka/overfit_sample_tiny/train_dynamics_overfit_sample_big_lr.sbatch",1166,0,"",shellscript,selection_command +5396,10530727,"scripts_horeka/overfit_sample_tiny/train_dynamics_overfit_sample_big_lr.sbatch",1163,3,"00/",shellscript,selection_mouse +5397,10530743,"scripts_horeka/overfit_sample_tiny/train_dynamics_overfit_sample_big_lr.sbatch",1152,14,"036759_200000/",shellscript,selection_mouse +5398,10530761,"scripts_horeka/overfit_sample_tiny/train_dynamics_overfit_sample_big_lr.sbatch",1166,1,"\n",shellscript,selection_mouse +5399,10530824,"scripts_horeka/overfit_sample_tiny/train_dynamics_overfit_sample_big_lr.sbatch",1166,33,"\n\nsrun python train_dynamics.py \",shellscript,selection_mouse +5400,10530825,"scripts_horeka/overfit_sample_tiny/train_dynamics_overfit_sample_big_lr.sbatch",1166,63,"\n\nsrun python train_dynamics.py \\n --ckpt_dir $CHECKPOINT_DI",shellscript,selection_mouse +5401,10530827,"scripts_horeka/overfit_sample_tiny/train_dynamics_overfit_sample_big_lr.sbatch",1166,50,"\n\nsrun python train_dynamics.py \\n --ckpt_dir $",shellscript,selection_mouse +5402,10530837,"scripts_horeka/overfit_sample_tiny/train_dynamics_overfit_sample_big_lr.sbatch",1166,44,"\n\nsrun python train_dynamics.py \\n --ckpt",shellscript,selection_mouse +5403,10530854,"scripts_horeka/overfit_sample_tiny/train_dynamics_overfit_sample_big_lr.sbatch",1166,72,"\n\nsrun python train_dynamics.py \\n --ckpt_dir $CHECKPOINT_DIR \\n -",shellscript,selection_mouse +5404,10530913,"scripts_horeka/overfit_sample_tiny/train_dynamics_overfit_sample_big_lr.sbatch",1166,36,"\n\nsrun python train_dynamics.py \\n ",shellscript,selection_mouse +5405,10530914,"scripts_horeka/overfit_sample_tiny/train_dynamics_overfit_sample_big_lr.sbatch",1166,34,"\n\nsrun python train_dynamics.py \\n",shellscript,selection_mouse +5406,10531006,"scripts_horeka/overfit_sample_tiny/train_dynamics_overfit_sample_big_lr.sbatch",1166,2,"\n\n",shellscript,selection_mouse +5407,10531036,"scripts_horeka/overfit_sample_tiny/train_dynamics_overfit_sample_big_lr.sbatch",1166,1,"\n",shellscript,selection_mouse +5408,10531070,"scripts_horeka/overfit_sample_tiny/train_dynamics_overfit_sample_big_lr.sbatch",1055,111,"lam_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/3299259/lam_1751036759_200000/",shellscript,selection_mouse +5409,10531239,"scripts_horeka/overfit_sample_tiny/train_dynamics_overfit_sample_big_lr.sbatch",931,235,"tokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/3299272/tokenizer_1751037678_153500/\nlam_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/3299259/lam_1751036759_200000/",shellscript,selection_mouse +5410,10531696,"scripts_horeka/overfit_sample_tiny/train_dynamics_overfit_sample_big_lr.sbatch",874,292,"# Use checkpoints from tokenizer/lam overfit sample runs\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/3299272/tokenizer_1751037678_153500/\nlam_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/3299259/lam_1751036759_200000/",shellscript,selection_mouse +5411,10539826,"scripts_horeka/train_dynamics.sh",0,0,"",shellscript,tab +5412,10541565,"scripts_horeka/train_dynamics.sh",504,0,"",shellscript,selection_mouse +5413,10541708,"scripts_horeka/train_dynamics.sh",501,3,"000",shellscript,selection_mouse +5414,10541724,"scripts_horeka/train_dynamics.sh",493,11,"5133_130000",shellscript,selection_mouse +5415,10541740,"scripts_horeka/train_dynamics.sh",409,95,"90391/tokenizer_1750845012_40000\nlam_ckpt_dir=$ws_dir/checkpoints/3290392/lam_1750845133_130000",shellscript,selection_mouse +5416,10541753,"scripts_horeka/train_dynamics.sh",400,104,"points/3290391/tokenizer_1750845012_40000\nlam_ckpt_dir=$ws_dir/checkpoints/3290392/lam_1750845133_130000",shellscript,selection_mouse +5417,10541808,"scripts_horeka/train_dynamics.sh",394,110,"/checkpoints/3290391/tokenizer_1750845012_40000\nlam_ckpt_dir=$ws_dir/checkpoints/3290392/lam_1750845133_130000",shellscript,selection_mouse +5418,10541809,"scripts_horeka/train_dynamics.sh",388,116,"ws_dir/checkpoints/3290391/tokenizer_1750845012_40000\nlam_ckpt_dir=$ws_dir/checkpoints/3290392/lam_1750845133_130000",shellscript,selection_mouse +5419,10541811,"scripts_horeka/train_dynamics.sh",383,121,"dir=$ws_dir/checkpoints/3290391/tokenizer_1750845012_40000\nlam_ckpt_dir=$ws_dir/checkpoints/3290392/lam_1750845133_130000",shellscript,selection_mouse +5420,10541832,"scripts_horeka/train_dynamics.sh",381,123,"t_dir=$ws_dir/checkpoints/3290391/tokenizer_1750845012_40000\nlam_ckpt_dir=$ws_dir/checkpoints/3290392/lam_1750845133_130000",shellscript,selection_mouse +5421,10541843,"scripts_horeka/train_dynamics.sh",379,125,"kpt_dir=$ws_dir/checkpoints/3290391/tokenizer_1750845012_40000\nlam_ckpt_dir=$ws_dir/checkpoints/3290392/lam_1750845133_130000",shellscript,selection_mouse +5422,10541867,"scripts_horeka/train_dynamics.sh",376,128,"r_ckpt_dir=$ws_dir/checkpoints/3290391/tokenizer_1750845012_40000\nlam_ckpt_dir=$ws_dir/checkpoints/3290392/lam_1750845133_130000",shellscript,selection_mouse +5423,10541881,"scripts_horeka/train_dynamics.sh",375,129,"er_ckpt_dir=$ws_dir/checkpoints/3290391/tokenizer_1750845012_40000\nlam_ckpt_dir=$ws_dir/checkpoints/3290392/lam_1750845133_130000",shellscript,selection_mouse +5424,10541895,"scripts_horeka/train_dynamics.sh",367,137,"\ntokenizer_ckpt_dir=$ws_dir/checkpoints/3290391/tokenizer_1750845012_40000\nlam_ckpt_dir=$ws_dir/checkpoints/3290392/lam_1750845133_130000",shellscript,selection_mouse +5425,10541993,"scripts_horeka/train_dynamics.sh",371,133,"enizer_ckpt_dir=$ws_dir/checkpoints/3290391/tokenizer_1750845012_40000\nlam_ckpt_dir=$ws_dir/checkpoints/3290392/lam_1750845133_130000",shellscript,selection_mouse +5426,10542010,"scripts_horeka/train_dynamics.sh",370,134,"kenizer_ckpt_dir=$ws_dir/checkpoints/3290391/tokenizer_1750845012_40000\nlam_ckpt_dir=$ws_dir/checkpoints/3290392/lam_1750845133_130000",shellscript,selection_mouse +5427,10542024,"scripts_horeka/train_dynamics.sh",369,135,"okenizer_ckpt_dir=$ws_dir/checkpoints/3290391/tokenizer_1750845012_40000\nlam_ckpt_dir=$ws_dir/checkpoints/3290392/lam_1750845133_130000",shellscript,selection_mouse +5428,10542081,"scripts_horeka/train_dynamics.sh",368,136,"tokenizer_ckpt_dir=$ws_dir/checkpoints/3290391/tokenizer_1750845012_40000\nlam_ckpt_dir=$ws_dir/checkpoints/3290392/lam_1750845133_130000",shellscript,selection_mouse +5429,10542082,"scripts_horeka/train_dynamics.sh",367,137,"\ntokenizer_ckpt_dir=$ws_dir/checkpoints/3290391/tokenizer_1750845012_40000\nlam_ckpt_dir=$ws_dir/checkpoints/3290392/lam_1750845133_130000",shellscript,selection_mouse +5430,10542360,"scripts_horeka/train_dynamics.sh",368,136,"tokenizer_ckpt_dir=$ws_dir/checkpoints/3290391/tokenizer_1750845012_40000\nlam_ckpt_dir=$ws_dir/checkpoints/3290392/lam_1750845133_130000",shellscript,selection_mouse +5431,10542913,"scripts_horeka/train_dynamics.sh",368,136,"x",shellscript,content +5432,10543207,"scripts_horeka/train_dynamics.sh",369,0,"i",shellscript,content +5433,10543208,"scripts_horeka/train_dynamics.sh",370,0,"",shellscript,selection_keyboard +5434,10543635,"scripts_horeka/train_dynamics.sh",369,1,"",shellscript,content +5435,10543766,"scripts_horeka/train_dynamics.sh",368,1,"",shellscript,content +5436,10544005,"scripts_horeka/train_dynamics.sh",368,0,"# Use checkpoints from tokenizer/lam overfit sample runs\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/3299272/tokenizer_1751037678_153500/\nlam_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/3299259/lam_1751036759_200000/",shellscript,content +5437,10545194,"scripts_horeka/train_dynamics.sh",660,0,"\n",shellscript,content +5438,10548734,"scripts_horeka/train_dynamics.sh",195,0,"",shellscript,selection_mouse +5439,10549847,"scripts_horeka/train_dynamics.sh",225,0,"",shellscript,selection_mouse +5440,10555358,"scripts_horeka/overfit_sample_tiny/train_dynamics_overfit_sample_big_lr.sbatch",0,0,"",shellscript,tab +5441,10558113,"scripts_horeka/overfit_sample_tiny/train_dynamics_overfit_sample_big_lr.sbatch",584,0,"",shellscript,selection_mouse +5442,10558124,"scripts_horeka/overfit_sample_tiny/train_dynamics_overfit_sample_big_lr.sbatch",583,0,"",shellscript,selection_command +5443,10558691,"scripts_horeka/overfit_sample_tiny/train_dynamics_overfit_sample_big_lr.sbatch",650,0,"",shellscript,selection_mouse +5444,10558693,"scripts_horeka/overfit_sample_tiny/train_dynamics_overfit_sample_big_lr.sbatch",649,0,"",shellscript,selection_command +5445,10558896,"scripts_horeka/overfit_sample_tiny/train_dynamics_overfit_sample_big_lr.sbatch",649,1,"'",shellscript,selection_mouse +5446,10558897,"scripts_horeka/overfit_sample_tiny/train_dynamics_overfit_sample_big_lr.sbatch",650,0,"",shellscript,selection_command +5447,10558899,"scripts_horeka/overfit_sample_tiny/train_dynamics_overfit_sample_big_lr.sbatch",647,3,"d/'",shellscript,selection_mouse +5448,10558921,"scripts_horeka/overfit_sample_tiny/train_dynamics_overfit_sample_big_lr.sbatch",633,17,"-jafa_ws_shared/'",shellscript,selection_mouse +5449,10558928,"scripts_horeka/overfit_sample_tiny/train_dynamics_overfit_sample_big_lr.sbatch",622,28,"tum_ind3695-jafa_ws_shared/'",shellscript,selection_mouse +5450,10558943,"scripts_horeka/overfit_sample_tiny/train_dynamics_overfit_sample_big_lr.sbatch",612,38,"e/scratch/tum_ind3695-jafa_ws_shared/'",shellscript,selection_mouse +5451,10559004,"scripts_horeka/overfit_sample_tiny/train_dynamics_overfit_sample_big_lr.sbatch",601,49,"rk/workspace/scratch/tum_ind3695-jafa_ws_shared/'",shellscript,selection_mouse +5452,10559005,"scripts_horeka/overfit_sample_tiny/train_dynamics_overfit_sample_big_lr.sbatch",594,56,"hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/'",shellscript,selection_mouse +5453,10559005,"scripts_horeka/overfit_sample_tiny/train_dynamics_overfit_sample_big_lr.sbatch",587,63,"_dir='/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/'",shellscript,selection_mouse +5454,10559009,"scripts_horeka/overfit_sample_tiny/train_dynamics_overfit_sample_big_lr.sbatch",585,65,"ws_dir='/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/'",shellscript,selection_mouse +5455,10559073,"scripts_horeka/overfit_sample_tiny/train_dynamics_overfit_sample_big_lr.sbatch",650,1,"\n",shellscript,selection_mouse +5456,10559090,"scripts_horeka/overfit_sample_tiny/train_dynamics_overfit_sample_big_lr.sbatch",585,65,"ws_dir='/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/'",shellscript,selection_mouse +5457,10559326,"scripts_horeka/overfit_sample_tiny/train_dynamics_overfit_sample_big_lr.sbatch",535,115,"tf_records_dir=$ws_dir/knoms_tfrecords_500_shards\nws_dir='/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/'",shellscript,selection_mouse +5458,10564395,"scripts_horeka/train_dynamics.sh",0,0,"",shellscript,tab +5459,10565821,"scripts_horeka/train_dynamics.sh",224,0,"",shellscript,selection_mouse +5460,10565949,"scripts_horeka/train_dynamics.sh",223,1,"'",shellscript,selection_mouse +5461,10565964,"scripts_horeka/train_dynamics.sh",224,1,"\n",shellscript,selection_mouse +5462,10566021,"scripts_horeka/train_dynamics.sh",194,30,"h/tum_ind3695-jafa_ws_shared/'",shellscript,selection_mouse +5463,10566032,"scripts_horeka/train_dynamics.sh",186,38,"e/scratch/tum_ind3695-jafa_ws_shared/'",shellscript,selection_mouse +5464,10566049,"scripts_horeka/train_dynamics.sh",183,41,"pace/scratch/tum_ind3695-jafa_ws_shared/'",shellscript,selection_mouse +5465,10566079,"scripts_horeka/train_dynamics.sh",178,46,"workspace/scratch/tum_ind3695-jafa_ws_shared/'",shellscript,selection_mouse +5466,10566094,"scripts_horeka/train_dynamics.sh",176,48,"k/workspace/scratch/tum_ind3695-jafa_ws_shared/'",shellscript,selection_mouse +5467,10566108,"scripts_horeka/train_dynamics.sh",174,50,"ork/workspace/scratch/tum_ind3695-jafa_ws_shared/'",shellscript,selection_mouse +5468,10566122,"scripts_horeka/train_dynamics.sh",172,52,"/work/workspace/scratch/tum_ind3695-jafa_ws_shared/'",shellscript,selection_mouse +5469,10566136,"scripts_horeka/train_dynamics.sh",170,54,"fs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/'",shellscript,selection_mouse +5470,10566150,"scripts_horeka/train_dynamics.sh",119,105,"_dir=$ws_dir/knoms_tfrecords_500_shards\nws_dir='/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/'",shellscript,selection_mouse +5471,10566207,"scripts_horeka/train_dynamics.sh",117,107,"ds_dir=$ws_dir/knoms_tfrecords_500_shards\nws_dir='/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/'",shellscript,selection_mouse +5472,10566208,"scripts_horeka/train_dynamics.sh",116,108,"rds_dir=$ws_dir/knoms_tfrecords_500_shards\nws_dir='/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/'",shellscript,selection_mouse +5473,10566217,"scripts_horeka/train_dynamics.sh",115,109,"ords_dir=$ws_dir/knoms_tfrecords_500_shards\nws_dir='/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/'",shellscript,selection_mouse +5474,10566274,"scripts_horeka/train_dynamics.sh",114,110,"cords_dir=$ws_dir/knoms_tfrecords_500_shards\nws_dir='/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/'",shellscript,selection_mouse +5475,10566275,"scripts_horeka/train_dynamics.sh",113,111,"ecords_dir=$ws_dir/knoms_tfrecords_500_shards\nws_dir='/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/'",shellscript,selection_mouse +5476,10566283,"scripts_horeka/train_dynamics.sh",112,112,"records_dir=$ws_dir/knoms_tfrecords_500_shards\nws_dir='/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/'",shellscript,selection_mouse +5477,10566344,"scripts_horeka/train_dynamics.sh",111,113,"_records_dir=$ws_dir/knoms_tfrecords_500_shards\nws_dir='/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/'",shellscript,selection_mouse +5478,10566348,"scripts_horeka/train_dynamics.sh",110,114,"f_records_dir=$ws_dir/knoms_tfrecords_500_shards\nws_dir='/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/'",shellscript,selection_mouse +5479,10566404,"scripts_horeka/train_dynamics.sh",109,115,"tf_records_dir=$ws_dir/knoms_tfrecords_500_shards\nws_dir='/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/'",shellscript,selection_mouse +5480,10566844,"scripts_horeka/train_dynamics.sh",109,115,"",shellscript,content +5481,10567331,"scripts_horeka/train_dynamics.sh",109,0,"tf_records_dir=$ws_dir/knoms_tfrecords_500_shards\nws_dir='/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/'",shellscript,content +5482,10567963,"scripts_horeka/train_dynamics.sh",225,0,"",shellscript,selection_command +5483,10568153,"scripts_horeka/train_dynamics.sh",224,1,"",shellscript,content +5484,10570422,"TERMINAL",0,0,"s",,terminal_output +5485,10570502,"TERMINAL",0,0,"[?25lh[?25h",,terminal_output +5486,10570638,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +5487,10570907,"TERMINAL",0,0,"[?25ls[?25h",,terminal_output +5488,10571044,"TERMINAL",0,0,"[?25lc[?25h",,terminal_output +5489,10572042,"TERMINAL",0,0,"ripts_",,terminal_output +5490,10573004,"TERMINAL",0,0,"[?25lH[?25h",,terminal_output +5491,10573069,"TERMINAL",0,0,"",,terminal_output +5492,10573305,"TERMINAL",0,0,"[?25lT[?25h",,terminal_output +5493,10574627,"TERMINAL",0,0,"[?25lh[?25h",,terminal_output +5494,10574800,"TERMINAL",0,0,"oreka/",,terminal_output +5495,10575150,"TERMINAL",0,0,"[?25lt[?25h",,terminal_output +5496,10575439,"TERMINAL",0,0,"rain_",,terminal_output +5497,10576331,"TERMINAL",0,0,"[?25ld[?25h",,terminal_output +5498,10576494,"TERMINAL",0,0,"ynamics.sh ",,terminal_output +5499,10577251,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +5500,10577411,"TERMINAL",0,0,"SLURM_STEP_NUM_TASKS=1\r\nSLURM_JOB_USER=tum_cte0515\r\nSLURM_TASKS_PER_NODE=4\r\nSLURM_JOB_UID=999226\r\nSLURM_TASK_PID=712616\r\nSLURM_JOB_GPUS=0,1,2,3\r\nSLURM_LOCALID=0\r\nSLURM_SUBMIT_DIR=/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar\r\nSLURMD_NODENAME=hkn0817\r\nSLURM_JOB_START_TIME=1751296588\r\nSLURM_STEP_NODELIST=hkn0817\r\nSLURM_CLUSTER_NAME=hk\r\nSLURM_JOB_END_TIME=1751300188\r\nSLURM_PMI2_SRUN_PORT=41677\r\nSLURM_CPUS_ON_NODE=24\r\nSLURM_JOB_CPUS_PER_NODE=24\r\nSLURM_GPUS_ON_NODE=4\r\nSLURM_GTIDS=0\r\nSLURM_JOB_PARTITION=accelerated\r\nSLURM_TRES_PER_TASK=cpu=5\r\nSLURM_OOM_KILL_STEP=0\r\nSLURM_JOB_NUM_NODES=1\r\nSLURM_STEPID=4294967290\r\nSLURM_JOBID=3306841\r\nSLURM_PTY_PORT=44195\r\nSLURM_JOB_QOS=normal\r\nSLURM_LAUNCH_NODE_IPADDR=10.0.7.199\r\nSLURM_PTY_WIN_ROW=37\r\nSLURM_PMI2_PROC_MAPPING=(vector,(0,1,1))\r\nSLURMD_DEBUG=2\r\nSLURM_PROCID=0\r\nSLURM_CPUS_PER_TASK=5\r\nSLURM_NTASKS=4\r\nSLURM_TOPOLOGY_ADDR=hkibb.hkibbi2.hkibbi2e2.hkn0817\r\nSLURM_TOPOLOGY_ADDR_PATTERN=switch.switch.switch.node\r\nSLURM_SRUN_COMM_HOST=10.0.7.199\r\nSLURM_SCRIPT_CONTEXT=prolog_task\r\nSLURM_MEM_PER_NODE=204800\r\nSLURM_PTY_WIN_COL=128\r\nSLURM_NODELIST=hkn0817\r\nSLURM_SRUN_COMM_PORT=44599\r\nSLURM_STEP_ID=4294967290\r\nSLURM_JOB_ACCOUNT=hk-project-p0023960\r\nSLURM_PRIO_PROCESS=0\r\nSLURM_NPROCS=4\r\nSLURM_NNODES=1\r\nSLURM_SUBMIT_HOST=hkn1991.localdomain\r\nSLURM_JOB_ID=3306841\r\nSLURM_NODEID=0\r\nSLURM_STEP_NUM_NODES=1\r\nSLURM_STEP_TASKS_PER_NODE=1\r\nSLURM_MPI_TYPE=pmi2\r\nSLURM_PMI2_STEP_NODES=hkn0817\r\nSLURM_CONF=/etc/slurm/slurm.conf\r\nSLURM_JOB_NAME=interactive\r\nSLURM_NTASKS_PER_NODE=4\r\nSLURM_STEP_LAUNCHER_PORT=44599\r\nSLURM_JOB_GID=502226\r\nSLURM_JOB_NODELIST=hkn0817\r\n",,terminal_output +5501,10579505,"scripts_horeka/train_dynamics.sh",0,0,"",shellscript,tab +5502,10588564,"scripts_horeka/overfit_sample_tiny/train_dynamics_overfit_sample_big_lr.sbatch",0,0,"",shellscript,tab +5503,10593382,"scripts_horeka/train_dynamics.sh",0,0,"",shellscript,tab +5504,10594458,"scripts_horeka/overfit_sample_tiny/train_dynamics_overfit_sample_big_lr.sbatch",0,0,"",shellscript,tab +5505,10595767,"train_lam.py",0,0,"",python,tab +5506,10605312,"TERMINAL",0,0,"2025-06-30 17:20:49.736753: E external/local_xla/xla/stream_executor/cuda/cuda_fft.cc:467] Unable to register cuFFT factory: Attempting to register factory for plugin cuFFT when one has already been registered\r\n",,terminal_output +5507,10605439,"TERMINAL",0,0,"WARNING: All log messages before absl::InitializeLog() is called are written to STDERR\r\nE0000 00:00:1751296849.850784 713422 cuda_dnn.cc:8579] Unable to register cuDNN factory: Attempting to register factory for plugin cuDNN when one has already been registered\r\n",,terminal_output +5508,10605491,"TERMINAL",0,0,"E0000 00:00:1751296849.902928 713422 cuda_blas.cc:1407] Unable to register cuBLAS factory: Attempting to register factory for plugin cuBLAS when one has already been registered\r\n",,terminal_output +5509,10605809,"TERMINAL",0,0,"W0000 00:00:1751296850.236950 713422 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\nW0000 00:00:1751296850.236978 713422 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\nW0000 00:00:1751296850.236981 713422 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\nW0000 00:00:1751296850.236983 713422 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\n",,terminal_output +5510,10640793,"TERMINAL",0,0,"W0000 00:00:1751296885.218083 713422 gpu_device.cc:2341] Cannot dlopen some GPU libraries. Please make sure the missing libraries mentioned above are installed properly if you would like to use GPU. Follow the guide at https://www.tensorflow.org/install/gpu for how to download and setup the required libraries for your platform.\r\nSkipping registering GPU devices...\r\n",,terminal_output +5511,10657461,"TERMINAL",0,0,"^C",,terminal_output +5512,10658011,"TERMINAL",0,0,"^C",,terminal_output +5513,10658224,"TERMINAL",0,0,"^C",,terminal_output +5514,10658433,"TERMINAL",0,0,"^C",,terminal_output +5515,10659077,"TERMINAL",0,0,"^C",,terminal_output +5516,10659281,"TERMINAL",0,0,"^C",,terminal_output +5517,10659471,"TERMINAL",0,0,"^C",,terminal_output +5518,10659662,"TERMINAL",0,0,"^C",,terminal_output +5519,10662463,"TERMINAL",0,0,"bash",,terminal_focus +5520,10663529,"TERMINAL",0,0,"srun",,terminal_focus +5521,10666999,"TERMINAL",0,0,"bash",,terminal_focus +5522,10667002,"train_lam.py",0,0,"",python,tab +5523,10669192,"TERMINAL",0,0,"",,terminal_focus +5524,10674107,"TERMINAL",0,0,"verlap --jobid 3305342 /bin/bash",,terminal_command +5525,10674140,"TERMINAL",0,0,"^C[?2004l\r[?2004h[?2004l\r\r\n]633;E;;4d11dbdc-690b-4257-b927-bbd493ebfa56]633;C]0;tum_cte0515@hkn1991:~/Projects/jafar]633;D",,terminal_output +5526,10676052,"TERMINAL",0,0,"allo^C",,terminal_command +5527,10676087,"TERMINAL",0,0,"^C[?2004l\r[?2004h[?2004l\r\r\n]633;E;;4d11dbdc-690b-4257-b927-bbd493ebfa56]633;C]0;tum_cte0515@hkn1991:~/Projects/jafar]633;D",,terminal_output +5528,10689262,"TERMINAL",0,0,"salloc --time=01:00:00 --partition=accelerated --nodes=1 --ntasks-per-node=1 --gres=gpu:1 --cpus-per-task=5 --mem=50G",,terminal_command +5529,10689318,"TERMINAL",0,0,"]633;E;2025-06-30 17:22:13 salloc --time=01:00:00 --partition=accelerated --nodes=1 --ntasks-per-node=1 --gres=gpu:1 --cpus-per-task=5 --mem=50G;4d11dbdc-690b-4257-b927-bbd493ebfa56]633;Csalloc: Granted job allocation 3306855\r\n",,terminal_output +5530,10689427,"TERMINAL",0,0,"salloc: Waiting for resource configuration\r\n",,terminal_output +5531,10716678,"TERMINAL",0,0,"salloc: Nodes hkn0408 are ready for job\r\n",,terminal_output +5532,10717678,"TERMINAL",0,0,"]0;tum_cte0515@hkn0408:~/Projects/jafar[?2004h[tum_cte0515@hkn0408 jafar]$ ",,terminal_output +5533,10746345,"TERMINAL",0,0,"s",,terminal_output +5534,10746513,"TERMINAL",0,0,"[?25lo[?25h",,terminal_output +5535,10746624,"TERMINAL",0,0,"[?25lu[?25h",,terminal_output +5536,10746885,"TERMINAL",0,0,"[?25lr[?25h",,terminal_output +5537,10747083,"TERMINAL",0,0,"[?25lc[?25h",,terminal_output +5538,10747269,"TERMINAL",0,0,"[?25le[?25h[?25l [?25h",,terminal_output +5539,10747443,"TERMINAL",0,0,"[?25l.[?25h",,terminal_output +5540,10747507,"TERMINAL",0,0,"[?25lv[?25h",,terminal_output +5541,10748327,"TERMINAL",0,0,"env/",,terminal_output +5542,10748670,"TERMINAL",0,0,"[?25lb[?25h",,terminal_output +5543,10748861,"TERMINAL",0,0,"in/",,terminal_output +5544,10749554,"TERMINAL",0,0,"[?25la[?25h",,terminal_output +5545,10749904,"TERMINAL",0,0,"ctivate",,terminal_output +5546,10750182,"TERMINAL",0,0,"[?25l[?2004l\r]0;tum_cte0515@hkn0408:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0408 jafar]$ [?25h",,terminal_output +5547,10830240,"TERMINAL",0,0,"source .venv/bin/activate",,terminal_output +5548,10830412,"TERMINAL",0,0,"alloc --time=01:00:00 --partition=accelerated --nodes=1 --ntasks-per-node=4 --gres=gpu:4 --cpus-per-task=5 --mem=200G",,terminal_output +5549,10830969,"TERMINAL",0,0,"\ridling",,terminal_output +5550,10831241,"TERMINAL",0,0,"queue",,terminal_output +5551,10831594,"TERMINAL",0,0,"sh scripts_horeka/overfit_sample_tiny/sample.sh ",,terminal_output +5552,10835848,"TERMINAL",0,0,"t",,terminal_output +5553,10835958,"TERMINAL",0,0,"rain_",,terminal_output +5554,10836348,"TERMINAL",0,0,"[?25ld[?25h",,terminal_output +5555,10836537,"TERMINAL",0,0,"ynamics.sh ",,terminal_output +5556,10836985,"TERMINAL",0,0,"[?25l[?2004l\r[?25h",,terminal_output +5557,10837195,"TERMINAL",0,0,"SLURM_STEP_NUM_TASKS=1\r\nSLURM_JOB_USER=tum_cte0515\r\nSLURM_TASKS_PER_NODE=1\r\nSLURM_JOB_UID=999226\r\nSLURM_TASK_PID=3458179\r\nSLURM_JOB_GPUS=1\r\nSLURM_LOCALID=0\r\nSLURM_SUBMIT_DIR=/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar\r\nSLURMD_NODENAME=hkn0408\r\nSLURM_JOB_START_TIME=1751296933\r\nSLURM_STEP_NODELIST=hkn0408\r\nSLURM_CLUSTER_NAME=hk\r\nSLURM_JOB_END_TIME=1751300533\r\nSLURM_PMI2_SRUN_PORT=38655\r\nSLURM_CPUS_ON_NODE=6\r\nSLURM_JOB_CPUS_PER_NODE=6\r\nSLURM_GPUS_ON_NODE=1\r\nSLURM_GTIDS=0\r\nSLURM_JOB_PARTITION=accelerated\r\nSLURM_TRES_PER_TASK=cpu=5\r\nSLURM_OOM_KILL_STEP=0\r\nSLURM_JOB_NUM_NODES=1\r\nSLURM_STEPID=4294967290\r\nSLURM_JOBID=3306855\r\nSLURM_PTY_PORT=36241\r\nSLURM_JOB_QOS=normal\r\nSLURM_LAUNCH_NODE_IPADDR=10.0.7.199\r\nSLURM_PTY_WIN_ROW=37\r\nSLURM_PMI2_PROC_MAPPING=(vector,(0,1,1))\r\nSLURMD_DEBUG=2\r\nSLURM_PROCID=0\r\nSLURM_CPUS_PER_TASK=5\r\nSLURM_NTASKS=1\r\nSLURM_TOPOLOGY_ADDR=hkibb.hkibbi1.hkibbi1e12.hkn0408\r\nSLURM_TOPOLOGY_ADDR_PATTERN=switch.switch.switch.node\r\nSLURM_SRUN_COMM_HOST=10.0.7.199\r\nSLURM_SCRIPT_CONTEXT=prolog_task\r\nSLURM_MEM_PER_NODE=51200\r\nSLURM_PTY_WIN_COL=202\r\nSLURM_NODELIST=hkn0408\r\nSLURM_SRUN_COMM_PORT=42547\r\nSLURM_STEP_ID=4294967290\r\nSLURM_JOB_ACCOUNT=hk-project-p0023960\r\nSLURM_PRIO_PROCESS=0\r\nSLURM_NPROCS=1\r\nSLURM_NNODES=1\r\nSLURM_SUBMIT_HOST=hkn1991.localdomain\r\nSLURM_JOB_ID=3306855\r\nSLURM_NODEID=0\r\nSLURM_STEP_NUM_NODES=1\r\nSLURM_STEP_TASKS_PER_NODE=1\r\nSLURM_MPI_TYPE=pmi2\r\nSLURM_PMI2_STEP_NODES=hkn0408\r\nSLURM_CONF=/etc/slurm/slurm.conf\r\nSLURM_JOB_NAME=interactive\r\nSLURM_NTASKS_PER_NODE=1\r\nSLURM_STEP_LAUNCHER_PORT=42547\r\nSLURM_JOB_GID=502226\r\nSLURM_JOB_NODELIST=hkn0408\r\n",,terminal_output +5558,10857267,"TERMINAL",0,0,"2025-06-30 17:25:01.598230: E external/local_xla/xla/stream_executor/cuda/cuda_fft.cc:467] Unable to register cuFFT factory: Attempting to register factory for plugin cuFFT when one has already been registered\r\n",,terminal_output +5559,10857391,"TERMINAL",0,0,"WARNING: All log messages before absl::InitializeLog() is called are written to STDERR\r\nE0000 00:00:1751297101.795816 3462809 cuda_dnn.cc:8579] Unable to register cuDNN factory: Attempting to register factory for plugin cuDNN when one has already been registered\r\nE0000 00:00:1751297101.806636 3462809 cuda_blas.cc:1407] Unable to register cuBLAS factory: Attempting to register factory for plugin cuBLAS when one has already been registered\r\n",,terminal_output +5560,10857571,"TERMINAL",0,0,"W0000 00:00:1751297101.997081 3462809 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\nW0000 00:00:1751297101.997108 3462809 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\nW0000 00:00:1751297101.997111 3462809 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\nW0000 00:00:1751297101.997113 3462809 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\n",,terminal_output +5561,10882451,"TERMINAL",0,0,"W0000 00:00:1751297126.787764 3462809 gpu_device.cc:2341] Cannot dlopen some GPU libraries. Please make sure the missing libraries mentioned above are installed properly if you would like to use GPU. Follow the guide at https://www.tensorflow.org/install/gpu for how to download and setup the required libraries for your platform.\r\nSkipping registering GPU devices...\r\n",,terminal_output +5562,10883330,"TERMINAL",0,0,"Running on 1 devices.\r\n",,terminal_output +5563,10884602,"TERMINAL",0,0,"wandb: Currently logged in as: mihir-mahajan2002 (instant-uv) to https://api.wandb.ai. Use `wandb login --relogin` to force relogin\r\n",,terminal_output +5564,10885526,"TERMINAL",0,0,"wandb: Tracking run with wandb version 0.19.11\r\nwandb: Run data is saved locally in /hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/wandb/run-20250630_172528-4d6085mf\r\nwandb: Run `wandb offline` to turn off syncing.\r\nwandb: Syncing run dynamics-tiny-overfit-big-lr-0000\r\nwandb: ⭐️ View project at https://wandb.ai/instant-uv/jafar\r\nwandb: 🚀 View run at https://wandb.ai/instant-uv/jafar/runs/4d6085mf\r\n",,terminal_output +5565,10888085,"TERMINAL",0,0,"2025-06-30 17:25:32.507509: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +5566,10901212,"TERMINAL",0,0,"2025-06-30 17:25:45.636405: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +5567,10913026,"TERMINAL",0,0,"Entering jdb:\r\n(jdb) ",,terminal_output +5568,10961925,"TERMINAL",0,0,"l",,terminal_output +5569,10962336,"TERMINAL",0,0,"\r\n> /hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/genie.py(77)\r\n def __call__(self, batch: Dict[str, Any], training: bool = True) -> Dict[str, Any]:\r\n tokenizer_outputs = self.tokenizer.vq_encode(batch[""videos""], training=False)\r\n lam_outputs = self.lam.vq_encode(batch[""videos""], training=False)\r\n-> jax.debug.breakpoint()\r\n outputs = dict(\r\n video_tokens=jax.lax.stop_gradient(tokenizer_outputs[""indices""]),\r\n latent_actions=jax.lax.stop_gradient(lam_outputs[""z_q""]),\r\n )\r\n outputs[""mask_rng""] = batch[""mask_rng""]\r\n(jdb) ",,terminal_output +5570,10965088,"TERMINAL",0,0,"l",,terminal_output +5571,10965154,"TERMINAL",0,0,"[?25la[?25h",,terminal_output +5572,10965304,"TERMINAL",0,0,"[?25lm[?25h",,terminal_output +5573,10965787,"TERMINAL",0,0,"[?25l_[?25h",,terminal_output +5574,10966308,"TERMINAL",0,0,"[?25lo[?25h",,terminal_output +5575,10966515,"TERMINAL",0,0,"[?25lu[?25h",,terminal_output +5576,10966691,"TERMINAL",0,0,"[?25lt[?25h",,terminal_output +5577,10966922,"TERMINAL",0,0,"[?25lp[?25h",,terminal_output +5578,10967119,"TERMINAL",0,0,"[?25lu[?25h",,terminal_output +5579,10967185,"TERMINAL",0,0,"[?25lt[?25h",,terminal_output +5580,10967414,"TERMINAL",0,0,"[?25ls[?25h",,terminal_output +5581,10967646,"TERMINAL",0,0,"\r\n{'emb': Array([[-2.06644461e-02, 1.57381237e-01, 1.04974315e-01,\r\n 8.31303596e-02, -2.28744492e-01, -4.48299237e-02,\r\n -3.89327347e-01, -6.62072524e-02, 4.48553376e-02,\r\n 1.71602875e-01, 2.56862313e-01, -4.42324989e-02,\r\n 1.99507270e-02, 1.38756456e-02, -9.82519388e-02,\r\n 9.18445960e-02, 1.92057088e-01, -1.82234600e-01,\r\n -1.68454468e-01, 5.54726236e-02, -6.02309741e-02,\r\n 5.76792836e-01, 2.01188251e-01, -1.51617467e-01,\r\n -2.02136129e-01, -1.05680108e-01, 2.32920989e-01,\r\n -1.32316388e-02, 4.96481732e-02, -1.96849145e-02,\r\n -1.02233127e-01, -8.61209556e-02],\r\n [-2.04412397e-02, 1.52993158e-01, 1.07777782e-01,\r\n 8.40213224e-02, -2.30538368e-01, -5.41780740e-02,\r\n -3.75789791e-01, -5.28643541e-02, 5.76577857e-02,\r\n 1.46091238e-01, 2.56956875e-01, -1.49174063e-02,\r\n 3.40763777e-02, 3.21384445e-02, -9.69494730e-02,\r\n 1.03308178e-01, 1.91881970e-01, -1.81205451e-01,\r\n -1.58905551e-01, 4.81232218e-02, -6.95524365e-02,\r\n 5.87922931e-01, 2.11219475e-01, -1.46324635e-01,\r\n -2.15349585e-01, -1.43863767e-01, 2.08512336e-01,\r\n -6.90077385e-03, 4.84053716e-02, -3.18318158e-02,\r\n -9.18286443e-02, -8.99955630e-02],\r\n [-2.20337678e-02, 1.56218350e-01, 1.13947041e-01,\r\n 7.73238167e-02, -2.23263606e-01, -6.12604544e-02,\r\n -3.58851075e-01, -3.94837707e-02, 7.67167285e-02,\r\n 1.23967595e-01, 2.51726180e-01, 1.04468381e-02,\r\n 5.19063510e-02, 5.19069135e-02, -9.28816199e-02,\r\n 1.11573495e-01, 1.98060721e-01, -1.78756952e-01,\r\n -1.47176921e-01, 4.61846143e-02, -7.69104585e-02,\r\n 5.98050416e-01, 2.14827552e-01, -1.31596267e-01,\r\n -2.28237033e-01, -1.75821930e-01, 1.90953523e-01,\r\n 1.52907771e-04, 5.52865639e-02, -5.04227802e-02,\r\n -7.67706633e-02, -8.96454751e-02],\r\n [-2.64671892e-02, 1.60734490e-01, 1.20925076e-01,\r\n 6.79030120e-02, -2.09702298e-01, -6.44291416e-02,\r\n -3.41321290e-01, -3.28756608e-02, 9.64392349e-02,\r\n 1.10693596e-01, 2.39635915e-01, 2.25846115e-02,\r\n 7.08384067e-02, 6.75408766e-02, -8.68964940e-02,\r\n 1.19671963e-01, 2.07096174e-01, -1.74477443e-01,\r\n -1.37848660e-01, 4.93825637e-02, -7.98531473e-02,\r\n 6.07045591e-01, 2.17198059e-01, -1.13419972e-01,\r\n -2.35130534e-01, -1.97177619e-01, 1.82712853e-01,\r\n 2.96846638e-03, 6.81845322e-02, -6.80191442e-02,\r\n -6.05696999e-02, -8.59488472e-02],\r\n [-3.13863903e-02, 1.59992307e-01, 1.26182616e-01,\r\n 6.19076528e-02, -1.95698574e-01, -6.39557540e-02,\r\n -3.28066051e-01, -3.34148854e-02, 1.13626808e-01,\r\n 1.10477470e-01, 2.27331698e-01, 1.92359667e-02,\r\n 8.46975371e-02, 7.24138469e-02, -8.28196555e-02,\r\n 1.26885712e-01, 2.17566565e-01, -1.66701019e-01,\r\n -1.33022487e-01, 5.59766106e-02, -7.86473081e-02,\r\n 6.11632943e-01, 2.21393108e-01, -9.91352275e-02,\r\n -2.38528118e-01, -2.09173068e-01, 1.80303469e-01,\r\n 6.39682577e-04, 7.80359954e-02, -7.90147111e-02,\r\n -4.94116582e-02, -8.35404769e-02],\r\n [-3.56354229e-02, 1.55665740e-01, 1.28545925e-01,\r\n 6.12579435e-02, -1.87075078e-01, -6.01769090e-02,\r\n -3.24419260e-01, -3.65245380e-02, 1.27354309e-01,\r\n 1.15800679e-01, 2.19914541e-01, 7.53706554e-03,\r\n 9.19252709e-02, 6.58658147e-02, -8.16751048e-02,\r\n 1.32942811e-01, 2.26802319e-01, -1.58808932e-01,\r\n -1.33008018e-01, 6.13830797e-02, -7.83538446e-02,\r\n 6.08377814e-01, 2.26621553e-01, -9.20174643e-02,\r\n -2.42198348e-01, -2.14183986e-01, 1.78980038e-01,\r\n -6.52195374e-03, 7.97115713e-02, -8.49727541e-02,\r\n -4.94079106e-02, -8.37108716e-02],\r\n [-4.08246033e-02, 1.51615247e-01, 1.27682164e-01,\r\n 6.51669055e-02, -1.84983030e-01, -5.37338816e-02,\r\n -3.27306718e-01, -4.07469831e-02, 1.36378273e-01,\r\n 1.15314156e-01, 2.17117518e-01, -4.73031821e-03,\r\n 9.64696035e-02, 5.43267243e-02, -8.19234028e-02,\r\n 1.39879420e-01, 2.32503906e-01, -1.55131683e-01,\r\n -1.37019396e-01, 6.38827533e-02, -8.00392330e-02,\r\n 5.99696398e-01, 2.32011944e-01, -9.07514393e-02,\r\n -2.46448964e-01, -2.14880064e-01, 1.76966965e-01,\r\n -1.82339326e-02, 7.54125789e-02, -8.76635686e-02,\r\n -5.89008927e-02, -8.25736076e-02],\r\n [-4.84302677e-02, 1.49284571e-01, 1.25526458e-01,\r\n 7.27709383e-02, -1.87823996e-01, -4.58997674e-02,\r\n -3.31801564e-01, -4.58733402e-02, 1.41733959e-01,\r\n 1.07505023e-01, 2.15431735e-01, -1.65429208e-02,\r\n 9.98358801e-02, 4.38945703e-02, -8.24385434e-02,\r\n 1.46128207e-01, 2.33804390e-01, -1.54992193e-01,\r\n -1.41346440e-01, 6.36399835e-02, -8.22042599e-02,\r\n 5.90426683e-01, 2.36474723e-01, -9.37158838e-02,\r\n -2.50692755e-01, -2.12758958e-01, 1.75186113e-01,\r\n -3.17831486e-02, 6.92876130e-02, -8.86967257e-02,\r\n -7.14951605e-02, -7.73812234e-02],\r\n [-5.68740480e-02, 1.49820507e-01, 1.23145990e-01,\r\n 8.38316754e-02, -1.92898184e-01, -3.99489366e-02,\r\n -3.34582031e-01, -5.13668247e-02, 1.45983934e-01,\r\n 9.69058871e-02, 2.12808147e-01, -2.61926092e-02,\r\n 1.02066681e-01, 3.49440053e-02, -8.16528872e-02,\r\n 1.50381461e-01, 2.32650667e-01, -1.53186679e-01,\r\n -1.44534752e-01, 6.32675067e-02, -8.51643905e-02,\r\n 5.84151328e-01, 2.39935920e-01, -9.86312106e-02,\r\n -2.53275007e-01, -2.06560001e-01, 1.74002483e-01,\r\n -4.28200997e-02, 6.43548146e-02, -9.11081135e-02,\r\n -8.19090158e-02, -6.92497641e-02],\r\n [-6.48092851e-02, 1.52373135e-01, 1.19813688e-01,\r\n 9.65256616e-02, -1.98015705e-01, -3.63896601e-02,\r\n -3.35518032e-01, -5.69793656e-02, 1.51089013e-01,\r\n 8.90043452e-02, 2.10065097e-01, -3.35527398e-02,\r\n 1.03504151e-01, 2.73049846e-02, -7.95303732e-02,\r\n 1.53164431e-01, 2.30866000e-01, -1.48340806e-01,\r\n -1.46751121e-01, 6.44448921e-02, -8.91982764e-02,\r\n 5.81179798e-01, 2.42264867e-01, -1.04291350e-01,\r\n -2.52968729e-01, -1.96271315e-01, 1.72850519e-01,\r\n -4.90686446e-02, 6.10634387e-02, -9.53309312e-02,\r\n -8.86533409e-02, -5.95425516e-02],\r\n [-6.87432438e-02, 1.54569998e-01, 1.16616160e-01,\r\n 1.05558768e-01, -2.00807348e-01, -3.42175253e-02,\r\n -3.36314291e-01, -6.18798882e-02, 1.55375823e-01,\r\n 8.69681239e-02, 2.07910016e-01, -3.86314839e-02,\r\n 1.04624867e-01, 2.34758370e-02, -7.60038719e-02,\r\n 1.54506207e-01, 2.30384603e-01, -1.44384786e-01,\r\n -1.48303017e-01, 6.68679774e-02, -9.18803960e-02,\r\n 5.80345571e-01, 2.42731795e-01, -1.10356562e-01,\r\n -2.50474751e-01, -1.85560435e-01, 1.71992525e-01,\r\n -5.20059243e-02, 5.92940710e-02, -9.89861861e-02,\r\n -9.23468694e-02, -5.11041172e-02],\r\n [-6.80233017e-02, 1.55147627e-01, 1.14131145e-01,\r\n 1.09160811e-01, -2.02053800e-01, -3.27304080e-02,\r\n -3.37359786e-01, -6.41393363e-02, 1.57787189e-01,\r\n 9.08768103e-02, 2.07733944e-01, -4.29207534e-02,\r\n 1.07050493e-01, 2.36421786e-02, -7.08443522e-02,\r\n 1.54171392e-01, 2.29823545e-01, -1.42805994e-01,\r\n -1.49726495e-01, 6.85217455e-02, -9.42031145e-02,\r\n 5.79524100e-01, 2.42819369e-01, -1.16480976e-01,\r\n -2.48554319e-01, -1.77497908e-01, 1.70745581e-01,\r\n -5.44653423e-02, 5.79439625e-02, -1.01841412e-01,\r\n -9.41707492e-02, -4.47259694e-02],\r\n [-6.51682094e-02, 1.54700905e-01, 1.11247547e-01,\r\n 1.09818757e-01, -2.01473191e-01, -3.12402863e-02,\r\n -3.38545978e-01, -6.42286390e-02, 1.58516705e-01,\r\n 9.62512642e-02, 2.09182233e-01, -4.57704887e-02,\r\n 1.10557787e-01, 2.62331646e-02, -6.61031082e-02,\r\n 1.54649362e-01, 2.29342133e-01, -1.42977908e-01,\r\n -1.51171535e-01, 6.87917694e-02, -9.54840854e-02,\r\n 5.77193856e-01, 2.42868617e-01, -1.22225061e-01,\r\n -2.48138547e-01, -1.74543634e-01, 1.69541836e-01,\r\n -5.83209395e-02, 5.59322499e-02, -1.03993371e-01,\r\n -9.54244137e-02, -3.98087651e-02],\r\n [-6.29820675e-02, 1.53303012e-01, 1.09526165e-01,\r\n 1.08821131e-01, -1.97792456e-01, -2.98455060e-02,\r\n -3.40190858e-01, -6.38211891e-02, 1.58676848e-01,\r\n 9.80172977e-02, 2.11571097e-01, -4.42979522e-02,\r\n 1.14347056e-01, 3.00077200e-02, -6.32114932e-02,\r\n 1.57130197e-01, 2.29706809e-01, -1.44495651e-01,\r\n -1.52458981e-01, 6.93458021e-02, -9.45750996e-02,\r\n 5.73564768e-01, 2.42486268e-01, -1.27609387e-01,\r\n -2.48749688e-01, -1.75961733e-01, 1.68671355e-01,\r\n -6.29061684e-02, 5.38881533e-02, -1.05620712e-01,\r\n -9.68577638e-02, -3.62959206e-02],\r\n [-6.32229149e-02, 1.52795181e-01, 1.09102108e-01,\r\n 1.08374901e-01, -1.91858783e-01, -2.96027455e-02,\r\n -3.41042489e-01, -6.27712980e-02, 1.60076141e-01,\r\n 9.49908346e-02, 2.13903904e-01, -4.00667861e-02,\r\n 1.17478527e-01, 3.45594175e-02, -6.13527335e-02,\r\n 1.59865707e-01, 2.30564013e-01, -1.46134719e-01,\r\n -1.52987137e-01, 7.05056563e-02, -9.04105827e-02,\r\n 5.70735991e-01, 2.43046299e-01, -1.31485641e-01,\r\n -2.49623582e-01, -1.78419068e-01, 1.68241635e-01,\r\n -6.59947246e-02, 5.21681160e-02, -1.06652118e-01,\r\n -9.79458764e-02, -3.29688191e-02]], dtype=float32), 'indices': Array([3, 3, 4, 4, 4, 4, 4, 5, 5, 5, 5, 5, 5, 5, 5], dtype=int32), 'patches': Array([[[[0., 0., 0., ..., 0., 0., 0.],\r\n [0., 0., 0., ..., 0., 0., 0.],\r\n [0., 0., 0., ..., 0., 0., 0.],\r\n ...,\r\n [0., 0., 0., ..., 0., 0., 0.],\r\n [0., 0., 0., ..., 0., 0., 0.],\r\n [0., 0., 0., ..., 0., 0., 0.]],\r\n\r\n [[0., 0., 0., ..., 0., 0., 0.],\r\n [0., 0., 0., ..., 0., 0., 0.],\r\n [0., 0., 0., ..., 0., 0., 0.],\r\n ...,\r\n [0., 0., 0., ..., 0., 0., 0.],\r\n [0., 0., 0., ..., 0., 0., 0.],\r\n [0., 0., 0., ..., 0., 0., 0.]],\r\n\r\n [[0., 0., 0., ..., 0., 0., 0.],\r\n [0., 0., 0., ..., 0., 0., 0.],\r\n [0., 0., 0., ..., 0., 0., 0.],\r\n ...,\r\n [0., 0., 0., ..., 0., 0., 0.],\r\n [0., 0., 0., ..., 0., 0., 0.],\r\n [0., 0., 0., ..., 0., 0., 0.]],\r\n\r\n ...,\r\n\r\n [[0., 0., 0., ..., 0., 0., 0.],\r\n [0., 0., 0., ..., 0., 0., 0.],\r\n [0., 0., 0., ..., 0., 0., 0.],\r\n ...,\r\n [0., 0., 0., ..., 0., 0., 0.],\r\n [0., 0., 0., ..., 0., 0., 0.],\r\n [0., 0., 0., ..., 0., 0., 0.]],\r\n\r\n [[0., 0., 0., ..., 0., 0., 0.],\r\n [0., 0., 0., ..., 0., 0., 0.],\r\n [0., 0., 0., ..., 0., 0., 0.],\r\n ...,\r\n [0., 0., 0., ..., 0., 0., 0.],\r\n [0., 0., 0., ..., 0., 0., 0.],\r\n [0., 0., 0., ..., 0., 0., 0.]],\r\n\r\n [[0., 0., 0., ..., 0., 0., 0.],\r\n [0., 0., 0., ..., 0., 0., 0.],\r\n [0., 0., 0., ..., 0., 0., 0.],\r\n ...,\r\n [0., 0., 0., ..., 0., 0., 0.],\r\n [0., 0., 0., ..., 0., 0., 0.],\r\n [0., 0., 0., ..., 0., 0., 0.]]]], dtype=float32), 'z': Array([[-0.24611266, -0.08915018, -0.21175402, 0.11340406, 0.0714121 ,\r\n -0.10431506, 0.10248755, 0.02543674, 0.01208622, 0.287029 ,\r\n 0.23010997, 0.24168232, -0.2728601 , 0.28035343, 0.11366428,\r\n -0.04016897, 0.12411508, -0.20742124, -0.02079913, 0.15355557,\r\n 0.05924932, -0.10014991, -0.01204041, 0.18450426, 0.29109716,\r\n 0.06596618, -0.24906816, 0.1215218 , -0.1742312 , -0.26995197,\r\n -0.28017378, -0.08593763],\r\n [-0.24611266, -0.08915018, -0.21175402, 0.11340406, 0.0714121 ,\r\n -0.10431506, 0.10248755, 0.02543674, 0.01208622, 0.287029 ,\r\n 0.23010997, 0.24168232, -0.2728601 , 0.28035343, 0.11366428,\r\n -0.04016897, 0.12411508, -0.20742124, -0.02079913, 0.15355557,\r\n 0.05924932, -0.10014991, -0.01204041, 0.18450426, 0.29109716,\r\n 0.06596618, -0.24906816, 0.1215218 , -0.1742312 , -0.26995197,\r\n -0.28017378, -0.08593763],\r\n [ 0.25279966, -0.06678339, 0.22490096, -0.04466017, -0.04649823,\r\n 0.01199109, 0.3082224 , -0.00801151, -0.12698816, 0.06542391,\r\n -0.2358685 , 0.24432254, 0.08294272, 0.09727552, 0.19124971,\r\n 0.19797468, -0.1674422 , 0.19177607, 0.11360856, -0.18923096,\r\n 0.12634727, 0.3240725 , -0.15719385, -0.23136756, 0.03345071,\r\n -0.23611856, -0.26534843, 0.13201468, -0.14167126, 0.23241246,\r\n -0.1139105 , 0.09649927],\r\n [ 0.25279966, -0.06678339, 0.22490096, -0.04466017, -0.04649823,\r\n 0.01199109, 0.3082224 , -0.00801151, -0.12698816, 0.06542391,\r\n -0.2358685 , 0.24432254, 0.08294272, 0.09727552, 0.19124971,\r\n 0.19797468, -0.1674422 , 0.19177607, 0.11360856, -0.18923096,\r\n 0.12634727, 0.3240725 , -0.15719385, -0.23136756, 0.03345071,\r\n -0.23611856, -0.26534843, 0.13201468, -0.14167126, 0.23241246,\r\n -0.1139105 , 0.09649927],\r\n [ 0.25279966, -0.06678339, 0.22490096, -0.04466017, -0.04649823,\r\n 0.01199109, 0.3082224 , -0.00801151, -0.12698816, 0.06542391,\r\n -0.2358685 , 0.24432254, 0.08294272, 0.09727552, 0.19124971,\r\n 0.19797468, -0.1674422 , 0.19177607, 0.11360856, -0.18923096,\r\n 0.12634727, 0.3240725 , -0.15719385, -0.23136756, 0.03345071,\r\n -0.23611856, -0.26534843, 0.13201468, -0.14167126, 0.23241246,\r\n -0.1139105 , 0.09649927],\r\n [ 0.25279966, -0.06678339, 0.22490096, -0.04466017, -0.04649823,\r\n 0.01199109, 0.3082224 , -0.00801151, -0.12698816, 0.06542391,\r\n -0.2358685 , 0.24432254, 0.08294272, 0.09727552, 0.19124971,\r\n 0.19797468, -0.1674422 , 0.19177607, 0.11360856, -0.18923096,\r\n 0.12634727, 0.3240725 , -0.15719385, -0.23136756, 0.03345071,\r\n -0.23611856, -0.26534843, 0.13201468, -0.14167126, 0.23241246,\r\n -0.1139105 , 0.09649927],\r\n [ 0.25279966, -0.06678339, 0.22490096, -0.04466017, -0.04649823,\r\n 0.01199109, 0.3082224 , -0.00801151, -0.12698816, 0.06542391,\r\n -0.2358685 , 0.24432254, 0.08294272, 0.09727552, 0.19124971,\r\n 0.19797468, -0.1674422 , 0.19177607, 0.11360856, -0.18923096,\r\n 0.12634727, 0.3240725 , -0.15719385, -0.23136756, 0.03345071,\r\n -0.23611856, -0.26534843, 0.13201468, -0.14167126, 0.23241246,\r\n -0.1139105 , 0.09649927],\r\n [ 0.16693924, 0.08904018, -0.26449862, -0.23580232, 0.12990192,\r\n -0.13221331, -0.0738584 , 0.04612291, -0.25945234, -0.16443208,\r\n 0.24324496, -0.18642925, 0.13102445, -0.00571162, -0.2811195 ,\r\n 0.26315957, 0.01092044, 0.14321984, 0.26222572, -0.14718235,\r\n 0.22880435, -0.21418779, 0.0065368 , -0.07261764, 0.06655227,\r\n -0.22275479, -0.10256308, -0.1944964 , 0.17530751, -0.2663396 ,\r\n -0.0399945 , -0.1505961 ],\r\n [ 0.16693924, 0.08904018, -0.26449862, -0.23580232, 0.12990192,\r\n -0.13221331, -0.0738584 , 0.04612291, -0.25945234, -0.16443208,\r\n 0.24324496, -0.18642925, 0.13102445, -0.00571162, -0.2811195 ,\r\n 0.26315957, 0.01092044, 0.14321984, 0.26222572, -0.14718235,\r\n 0.22880435, -0.21418779, 0.0065368 , -0.07261764, 0.06655227,\r\n -0.22275479, -0.10256308, -0.1944964 , 0.17530751, -0.2663396 ,\r\n -0.0399945 , -0.1505961 ],\r\n [ 0.16693924, 0.08904018, -0.26449862, -0.23580232, 0.12990192,\r\n -0.13221331, -0.0738584 , 0.04612291, -0.25945234, -0.16443208,\r\n 0.24324496, -0.18642925, 0.13102445, -0.00571162, -0.2811195 ,\r\n 0.26315957, 0.01092044, 0.14321984, 0.26222572, -0.14718235,\r\n 0.22880435, -0.21418779, 0.0065368 , -0.07261764, 0.06655227,\r\n -0.22275479, -0.10256308, -0.1944964 , 0.17530751, -0.2663396 ,\r\n -0.0399945 , -0.1505961 ],\r\n [ 0.16693924, 0.08904018, -0.26449862, -0.23580232, 0.12990192,\r\n -0.13221331, -0.0738584 , 0.04612291, -0.25945234, -0.16443208,\r\n 0.24324496, -0.18642925, 0.13102445, -0.00571162, -0.2811195 ,\r\n 0.26315957, 0.01092044, 0.14321984, 0.26222572, -0.14718235,\r\n 0.22880435, -0.21418779, 0.0065368 , -0.07261764, 0.06655227,\r\n -0.22275479, -0.10256308, -0.1944964 , 0.17530751, -0.2663396 ,\r\n -0.0399945 , -0.1505961 ],\r\n [ 0.16693924, 0.08904018, -0.26449862, -0.23580232, 0.12990192,\r\n -0.13221331, -0.0738584 , 0.04612291, -0.25945234, -0.16443208,\r\n 0.24324496, -0.18642925, 0.13102445, -0.00571162, -0.2811195 ,\r\n 0.26315957, 0.01092044, 0.14321984, 0.26222572, -0.14718235,\r\n 0.22880435, -0.21418779, 0.0065368 , -0.07261764, 0.06655227,\r\n -0.22275479, -0.10256308, -0.1944964 , 0.17530751, -0.2663396 ,\r\n -0.0399945 , -0.1505961 ],\r\n [ 0.16693924, 0.08904018, -0.26449862, -0.23580232, 0.12990192,\r\n -0.13221331, -0.0738584 , 0.04612291, -0.25945234, -0.16443208,\r\n 0.24324496, -0.18642925, 0.13102445, -0.00571162, -0.2811195 ,\r\n 0.26315957, 0.01092044, 0.14321984, 0.26222572, -0.14718235,\r\n 0.22880435, -0.21418779, 0.0065368 , -0.07261764, 0.06655227,\r\n -0.22275479, -0.10256308, -0.1944964 , 0.17530751, -0.2663396 ,\r\n -0.0399945 , -0.1505961 ],\r\n [ 0.16693924, 0.08904018, -0.26449862, -0.23580232, 0.12990192,\r\n -0.13221331, -0.0738584 , 0.04612291, -0.25945234, -0.16443208,\r\n 0.24324496, -0.18642925, 0.13102445, -0.00571162, -0.2811195 ,\r\n 0.26315957, 0.01092044, 0.14321984, 0.26222572, -0.14718235,\r\n 0.22880435, -0.21418779, 0.0065368 , -0.07261764, 0.06655227,\r\n -0.22275479, -0.10256308, -0.1944964 , 0.17530751, -0.2663396 ,\r\n -0.0399945 , -0.1505961 ],\r\n [ 0.16693924, 0.08904018, -0.26449862, -0.23580232, 0.12990192,\r\n -0.13221331, -0.0738584 , 0.04612291, -0.25945234, -0.16443208,\r\n 0.24324496, -0.18642925, 0.13102445, -0.00571162, -0.2811195 ,\r\n 0.26315957, 0.01092044, 0.14321984, 0.26222572, -0.14718235,\r\n 0.22880435, -0.21418779, 0.0065368 , -0.07261764, 0.06655227,\r\n -0.22275479, -0.10256308, -0.1944964 , 0.17530751, -0.2663396 ,\r\n -0.0399945 , -0.1505961 ]], dtype=float32), 'z_q': Array([[[[-0.24611267, -0.08915019, -0.21175404, 0.11340406,\r\n 0.0714121 , -0.10431506, 0.10248756, 0.02543674,\r\n 0.01208622, 0.287029 , 0.23010997, 0.2416823 ,\r\n -0.2728601 , 0.28035343, 0.11366428, -0.04016896,\r\n 0.12411508, -0.20742124, -0.02079913, 0.15355557,\r\n 0.05924932, -0.10014993, -0.01204041, 0.18450427,\r\n 0.29109716, 0.06596619, -0.24906816, 0.1215218 ,\r\n -0.1742312 , -0.26995197, -0.28017378, -0.08593763]],\r\n\r\n [[-0.24611266, -0.08915018, -0.21175402, 0.11340406,\r\n 0.07141209, -0.10431506, 0.10248753, 0.02543674,\r\n 0.01208622, 0.287029 , 0.23010997, 0.24168232,\r\n -0.2728601 , 0.28035343, 0.11366428, -0.04016896,\r\n 0.12411508, -0.20742124, -0.02079913, 0.15355557,\r\n 0.05924931, -0.10014993, -0.01204041, 0.18450427,\r\n 0.29109713, 0.06596619, -0.24906817, 0.1215218 ,\r\n -0.1742312 , -0.26995197, -0.28017378, -0.08593763]],\r\n\r\n [[ 0.25279966, -0.0667834 , 0.22490096, -0.04466017,\r\n -0.04649822, 0.01199108, 0.3082224 , -0.00801151,\r\n -0.12698817, 0.06542391, -0.23586848, 0.24432254,\r\n 0.08294272, 0.09727552, 0.19124973, 0.19797468,\r\n -0.1674422 , 0.19177607, 0.11360857, -0.18923096,\r\n 0.12634727, 0.3240725 , -0.15719385, -0.23136756,\r\n 0.03345072, -0.23611856, -0.26534843, 0.13201468,\r\n -0.14167127, 0.23241246, -0.1139105 , 0.09649926]],\r\n\r\n [[ 0.25279963, -0.06678338, 0.22490096, -0.04466017,\r\n -0.04649822, 0.01199108, 0.3082224 , -0.00801151,\r\n -0.12698814, 0.06542391, -0.23586848, 0.24432254,\r\n 0.08294272, 0.09727552, 0.19124971, 0.19797468,\r\n -0.16744219, 0.19177605, 0.11360855, -0.18923096,\r\n 0.12634727, 0.3240725 , -0.15719385, -0.23136756,\r\n 0.03345071, -0.23611856, -0.26534843, 0.13201468,\r\n -0.14167127, 0.23241246, -0.1139105 , 0.09649927]],\r\n\r\n [[ 0.2527997 , -0.0667834 , 0.22490096, -0.04466016,\r\n -0.04649822, 0.01199108, 0.3082224 , -0.00801151,\r\n -0.12698816, 0.06542391, -0.23586851, 0.24432254,\r\n 0.08294272, 0.09727552, 0.19124971, 0.19797468,\r\n -0.16744219, 0.19177607, 0.11360857, -0.18923096,\r\n 0.12634727, 0.3240725 , -0.15719384, -0.23136756,\r\n 0.03345071, -0.23611856, -0.26534843, 0.13201468,\r\n -0.14167126, 0.23241246, -0.1139105 , 0.09649927]],\r\n\r\n [[ 0.25279966, -0.06678338, 0.22490096, -0.04466017,\r\n -0.04649824, 0.01199108, 0.3082224 , -0.00801151,\r\n -0.12698816, 0.06542391, -0.2358685 , 0.24432254,\r\n 0.08294272, 0.09727552, 0.1912497 , 0.19797468,\r\n -0.1674422 , 0.19177605, 0.11360855, -0.18923096,\r\n 0.12634727, 0.3240725 , -0.15719385, -0.23136756,\r\n 0.03345072, -0.23611856, -0.26534843, 0.13201468,\r\n -0.14167126, 0.23241244, -0.11391051, 0.09649927]],\r\n\r\n [[ 0.25279966, -0.06678338, 0.22490096, -0.04466017,\r\n -0.04649822, 0.01199109, 0.30822244, -0.00801151,\r\n -0.12698816, 0.06542391, -0.23586848, 0.24432254,\r\n 0.08294272, 0.09727552, 0.19124973, 0.19797468,\r\n -0.16744219, 0.19177605, 0.11360857, -0.18923096,\r\n 0.12634727, 0.3240725 , -0.15719387, -0.23136756,\r\n 0.03345069, -0.23611856, -0.26534843, 0.13201469,\r\n -0.14167127, 0.23241246, -0.1139105 , 0.09649928]],\r\n\r\n [[ 0.16693924, 0.08904018, -0.26449862, -0.23580231,\r\n 0.1299019 , -0.13221331, -0.07385841, 0.04612291,\r\n -0.25945234, -0.16443208, 0.24324496, -0.18642925,\r\n 0.13102445, -0.00571162, -0.2811195 , 0.26315957,\r\n 0.01092044, 0.14321986, 0.2622257 , -0.14718235,\r\n 0.22880435, -0.2141878 , 0.0065368 , -0.07261764,\r\n 0.06655225, -0.22275479, -0.10256307, -0.1944964 ,\r\n 0.17530751, -0.2663396 , -0.0399945 , -0.1505961 ]],\r\n\r\n [[ 0.16693924, 0.08904018, -0.26449862, -0.23580232,\r\n 0.12990192, -0.13221331, -0.07385838, 0.04612291,\r\n -0.25945234, -0.16443208, 0.24324496, -0.18642925,\r\n 0.13102445, -0.00571162, -0.2811195 , 0.26315957,\r\n 0.01092044, 0.14321983, 0.2622257 , -0.14718235,\r\n 0.22880435, -0.2141878 , 0.0065368 , -0.07261764,\r\n 0.06655225, -0.22275479, -0.10256307, -0.1944964 ,\r\n 0.17530751, -0.2663396 , -0.0399945 , -0.1505961 ]],\r\n\r\n [[ 0.16693923, 0.08904018, -0.26449862, -0.23580232,\r\n 0.12990193, -0.13221331, -0.07385841, 0.04612292,\r\n -0.25945234, -0.16443208, 0.24324496, -0.18642925,\r\n 0.13102445, -0.00571162, -0.2811195 , 0.26315957,\r\n 0.01092044, 0.14321984, 0.26222575, -0.14718235,\r\n 0.22880436, -0.2141878 , 0.0065368 , -0.07261764,\r\n 0.06655228, -0.22275479, -0.10256308, -0.1944964 ,\r\n 0.17530751, -0.2663396 , -0.0399945 , -0.1505961 ]],\r\n\r\n [[ 0.16693924, 0.08904018, -0.26449862, -0.23580234,\r\n 0.12990193, -0.13221331, -0.07385841, 0.04612291,\r\n -0.25945234, -0.16443208, 0.24324496, -0.18642925,\r\n 0.13102445, -0.00571162, -0.2811195 , 0.26315957,\r\n 0.01092044, 0.14321984, 0.2622257 , -0.14718235,\r\n 0.22880433, -0.2141878 , 0.0065368 , -0.07261764,\r\n 0.06655228, -0.22275479, -0.1025631 , -0.1944964 ,\r\n 0.17530751, -0.2663396 , -0.0399945 , -0.1505961 ]],\r\n\r\n [[ 0.16693926, 0.08904018, -0.26449862, -0.23580232,\r\n 0.12990193, -0.13221331, -0.07385838, 0.04612291,\r\n -0.25945234, -0.16443208, 0.24324496, -0.18642925,\r\n 0.13102445, -0.00571162, -0.2811195 , 0.26315957,\r\n 0.01092044, 0.14321983, 0.2622257 , -0.14718235,\r\n 0.22880435, -0.2141878 , 0.0065368 , -0.07261764,\r\n 0.06655225, -0.22275479, -0.10256308, -0.1944964 ,\r\n 0.17530751, -0.2663396 , -0.0399945 , -0.1505961 ]],\r\n\r\n [[ 0.16693926, 0.08904018, -0.26449862, -0.23580232,\r\n 0.12990193, -0.13221331, -0.07385838, 0.04612291,\r\n -0.25945234, -0.1644321 , 0.24324496, -0.18642926,\r\n 0.13102445, -0.00571162, -0.2811195 , 0.26315957,\r\n 0.01092044, 0.14321984, 0.26222572, -0.14718235,\r\n 0.22880435, -0.2141878 , 0.0065368 , -0.07261764,\r\n 0.06655228, -0.22275479, -0.10256308, -0.1944964 ,\r\n 0.17530751, -0.2663396 , -0.0399945 , -0.1505961 ]],\r\n\r\n [[ 0.16693924, 0.08904018, -0.26449862, -0.23580232,\r\n 0.1299019 , -0.13221331, -0.07385841, 0.04612292,\r\n -0.25945234, -0.16443208, 0.24324496, -0.18642925,\r\n 0.13102445, -0.00571162, -0.2811195 , 0.26315957,\r\n 0.01092044, 0.14321984, 0.26222575, -0.14718235,\r\n 0.22880435, -0.2141878 , 0.0065368 , -0.07261764,\r\n 0.06655227, -0.22275479, -0.1025631 , -0.1944964 ,\r\n 0.17530751, -0.2663396 , -0.0399945 , -0.1505961 ]],\r\n\r\n [[ 0.16693924, 0.08904018, -0.26449862, -0.23580232,\r\n 0.12990193, -0.13221331, -0.07385841, 0.04612292,\r\n -0.25945234, -0.16443206, 0.24324496, -0.18642923,\r\n 0.13102445, -0.00571162, -0.2811195 , 0.26315957,\r\n 0.01092044, 0.14321984, 0.26222575, -0.14718235,\r\n 0.22880435, -0.2141878 , 0.0065368 , -0.07261764,\r\n 0.06655227, -0.22275479, -0.10256307, -0.1944964 ,\r\n 0.17530751, -0.2663396 , -0.0399945 , -0.1505961 ]]]], dtype=float32)}\r\n(jdb) ",,terminal_output +5582,10971981,"TERMINAL",0,0,"\rlam_outputs",,terminal_output +5583,10973055,"TERMINAL",0,0,".",,terminal_output +5584,10973253,"TERMINAL",0,0,"[?25ls[?25h",,terminal_output +5585,10973331,"TERMINAL",0,0,"[?25lh[?25h",,terminal_output +5586,10973399,"TERMINAL",0,0,"[?25la[?25h",,terminal_output +5587,10973524,"TERMINAL",0,0,"[?25lp[?25h",,terminal_output +5588,10973593,"TERMINAL",0,0,"[?25le[?25h",,terminal_output +5589,10973686,"TERMINAL",0,0,"\r\n*** AttributeError: 'dict' object has no attribute 'shape'\r\n(jdb) ",,terminal_output +5590,10974688,"TERMINAL",0,0,"\rlam_outputs.shape",,terminal_output +5591,10975745,"TERMINAL",0,0,"[?25le\r[?25h",,terminal_output +5592,10975918,"TERMINAL",0,0,"[?25lp\r[?25h",,terminal_output +5593,10976046,"TERMINAL",0,0,"[?25la\r[?25h",,terminal_output +5594,10976195,"TERMINAL",0,0,"[?25lh\r[?25h",,terminal_output +5595,10976326,"TERMINAL",0,0,"[?25ls\r[?25h",,terminal_output +5596,10976479,"TERMINAL",0,0,"[?25l.\r[?25h",,terminal_output +5597,10976952,"TERMINAL",0,0,".",,terminal_output +5598,10977202,"TERMINAL",0,0,"[?25lk[?25h",,terminal_output +5599,10977298,"TERMINAL",0,0,"[?25le[?25h",,terminal_output +5600,10977520,"TERMINAL",0,0,"[?25ly[?25h",,terminal_output +5601,10977583,"TERMINAL",0,0,"[?25ls[?25h",,terminal_output +5602,10977995,"TERMINAL",0,0,"[?25l([?25h[?25l)[?25h",,terminal_output +5603,10978307,"TERMINAL",0,0,"\r\ndict_keys(['emb', 'indices', 'patches', 'z', 'z_q'])\r\n(jdb) ",,terminal_output +5604,10984092,"train_lam.py",0,0,"",python,tab +5605,10986098,"TERMINAL",0,0,"bash",,terminal_focus +5606,10986099,"TERMINAL",0,0,"srun",,terminal_focus +5607,10986099,"TERMINAL",0,0,"bash",,terminal_focus +5608,10987822,"train_lam.py",0,0,"",python,tab +5609,10987823,"train_lam.py",2350,0,"",python,selection_mouse +5610,10987826,"train_lam.py",2349,0,"",python,selection_command +5611,10992628,"train_dynamics.py",0,0,"",python,tab +5612,10993716,"genie.py",0,0,"",python,tab +5613,10995842,"genie.py",2373,0,"",python,selection_mouse +5614,10995952,"genie.py",2369,7,"outputs",python,selection_mouse +5615,10996787,"genie.py",2423,0,"",python,selection_mouse +5616,10997372,"genie.py",2490,0,"",python,selection_mouse +5617,10997892,"genie.py",2484,0,"",python,selection_mouse +5618,10998056,"genie.py",2475,14,"latent_actions",python,selection_mouse +5619,10998761,"genie.py",2521,0,"",python,selection_mouse +5620,10999288,"genie.py",2515,0,"",python,selection_mouse +5621,10999439,"genie.py",2512,11,"lam_outputs",python,selection_mouse +5622,11000300,"genie.py",2483,0,"",python,selection_mouse +5623,11000403,"genie.py",2475,14,"latent_actions",python,selection_mouse +5624,11001014,"TERMINAL",0,0,"srun",,terminal_focus +5625,11002037,"TERMINAL",0,0,"\r(jdb) lam_outputs.keys()",,terminal_output +5626,11002908,"TERMINAL",0,0,"[?25l)\r[?25h",,terminal_output +5627,11003042,"TERMINAL",0,0,"[?25l(\r[?25h",,terminal_output +5628,11003108,"TERMINAL",0,0,"[?25ls\r[?25h",,terminal_output +5629,11003262,"TERMINAL",0,0,"[?25ly\r[?25h",,terminal_output +5630,11003457,"TERMINAL",0,0,"[?25le\r[?25h",,terminal_output +5631,11003565,"TERMINAL",0,0,"[?25lk\r[?25h",,terminal_output +5632,11003797,"TERMINAL",0,0,"[?25l.\r[?25h",,terminal_output +5633,11004650,"TERMINAL",0,0,"[",,terminal_output +5634,11006183,"TERMINAL",0,0,"[?25l""[?25h",,terminal_output +5635,11006615,"TERMINAL",0,0,"[?25lz[?25h",,terminal_output +5636,11006994,"TERMINAL",0,0,"[?25l_[?25h",,terminal_output +5637,11007230,"TERMINAL",0,0,"[?25ly[?25h",,terminal_output +5638,11007735,"TERMINAL",0,0,"[?25ly\r[?25h",,terminal_output +5639,11008035,"TERMINAL",0,0,"[?25lq[?25h",,terminal_output +5640,11012171,"TERMINAL",0,0,"[?25l""[?25h",,terminal_output +5641,11012507,"TERMINAL",0,0,"[?25l][?25h",,terminal_output +5642,11012624,"TERMINAL",0,0,"\r\nArray([[[[-0.24611267, -0.08915019, -0.21175404, 0.11340406,\r\n 0.0714121 , -0.10431506, 0.10248756, 0.02543674,\r\n 0.01208622, 0.287029 , 0.23010997, 0.2416823 ,\r\n -0.2728601 , 0.28035343, 0.11366428, -0.04016896,\r\n 0.12411508, -0.20742124, -0.02079913, 0.15355557,\r\n 0.05924932, -0.10014993, -0.01204041, 0.18450427,\r\n 0.29109716, 0.06596619, -0.24906816, 0.1215218 ,\r\n -0.1742312 , -0.26995197, -0.28017378, -0.08593763]],\r\n\r\n [[-0.24611266, -0.08915018, -0.21175402, 0.11340406,\r\n 0.07141209, -0.10431506, 0.10248753, 0.02543674,\r\n 0.01208622, 0.287029 , 0.23010997, 0.24168232,\r\n -0.2728601 , 0.28035343, 0.11366428, -0.04016896,\r\n 0.12411508, -0.20742124, -0.02079913, 0.15355557,\r\n 0.05924931, -0.10014993, -0.01204041, 0.18450427,\r\n 0.29109713, 0.06596619, -0.24906817, 0.1215218 ,\r\n -0.1742312 , -0.26995197, -0.28017378, -0.08593763]],\r\n\r\n [[ 0.25279966, -0.0667834 , 0.22490096, -0.04466017,\r\n -0.04649822, 0.01199108, 0.3082224 , -0.00801151,\r\n -0.12698817, 0.06542391, -0.23586848, 0.24432254,\r\n 0.08294272, 0.09727552, 0.19124973, 0.19797468,\r\n -0.1674422 , 0.19177607, 0.11360857, -0.18923096,\r\n 0.12634727, 0.3240725 , -0.15719385, -0.23136756,\r\n 0.03345072, -0.23611856, -0.26534843, 0.13201468,\r\n -0.14167127, 0.23241246, -0.1139105 , 0.09649926]],\r\n\r\n [[ 0.25279963, -0.06678338, 0.22490096, -0.04466017,\r\n -0.04649822, 0.01199108, 0.3082224 , -0.00801151,\r\n -0.12698814, 0.06542391, -0.23586848, 0.24432254,\r\n 0.08294272, 0.09727552, 0.19124971, 0.19797468,\r\n -0.16744219, 0.19177605, 0.11360855, -0.18923096,\r\n 0.12634727, 0.3240725 , -0.15719385, -0.23136756,\r\n 0.03345071, -0.23611856, -0.26534843, 0.13201468,\r\n -0.14167127, 0.23241246, -0.1139105 , 0.09649927]],\r\n\r\n [[ 0.2527997 , -0.0667834 , 0.22490096, -0.04466016,\r\n -0.04649822, 0.01199108, 0.3082224 , -0.00801151,\r\n -0.12698816, 0.06542391, -0.23586851, 0.24432254,\r\n 0.08294272, 0.09727552, 0.19124971, 0.19797468,\r\n -0.16744219, 0.19177607, 0.11360857, -0.18923096,\r\n 0.12634727, 0.3240725 , -0.15719384, -0.23136756,\r\n 0.03345071, -0.23611856, -0.26534843, 0.13201468,\r\n -0.14167126, 0.23241246, -0.1139105 , 0.09649927]],\r\n\r\n [[ 0.25279966, -0.06678338, 0.22490096, -0.04466017,\r\n -0.04649824, 0.01199108, 0.3082224 , -0.00801151,\r\n -0.12698816, 0.06542391, -0.2358685 , 0.24432254,\r\n 0.08294272, 0.09727552, 0.1912497 , 0.19797468,\r\n -0.1674422 , 0.19177605, 0.11360855, -0.18923096,\r\n 0.12634727, 0.3240725 , -0.15719385, -0.23136756,\r\n 0.03345072, -0.23611856, -0.26534843, 0.13201468,\r\n -0.14167126, 0.23241244, -0.11391051, 0.09649927]],\r\n\r\n [[ 0.25279966, -0.06678338, 0.22490096, -0.04466017,\r\n -0.04649822, 0.01199109, 0.30822244, -0.00801151,\r\n -0.12698816, 0.06542391, -0.23586848, 0.24432254,\r\n 0.08294272, 0.09727552, 0.19124973, 0.19797468,\r\n -0.16744219, 0.19177605, 0.11360857, -0.18923096,\r\n 0.12634727, 0.3240725 , -0.15719387, -0.23136756,\r\n 0.03345069, -0.23611856, -0.26534843, 0.13201469,\r\n -0.14167127, 0.23241246, -0.1139105 , 0.09649928]],\r\n\r\n [[ 0.16693924, 0.08904018, -0.26449862, -0.23580231,\r\n 0.1299019 , -0.13221331, -0.07385841, 0.04612291,\r\n -0.25945234, -0.16443208, 0.24324496, -0.18642925,\r\n 0.13102445, -0.00571162, -0.2811195 , 0.26315957,\r\n 0.01092044, 0.14321986, 0.2622257 , -0.14718235,\r\n 0.22880435, -0.2141878 , 0.0065368 , -0.07261764,\r\n 0.06655225, -0.22275479, -0.10256307, -0.1944964 ,\r\n 0.17530751, -0.2663396 , -0.0399945 , -0.1505961 ]],\r\n\r\n [[ 0.16693924, 0.08904018, -0.26449862, -0.23580232,\r\n 0.12990192, -0.13221331, -0.07385838, 0.04612291,\r\n -0.25945234, -0.16443208, 0.24324496, -0.18642925,\r\n 0.13102445, -0.00571162, -0.2811195 , 0.26315957,\r\n 0.01092044, 0.14321983, 0.2622257 , -0.14718235,\r\n 0.22880435, -0.2141878 , 0.0065368 , -0.07261764,\r\n 0.06655225, -0.22275479, -0.10256307, -0.1944964 ,\r\n 0.17530751, -0.2663396 , -0.0399945 , -0.1505961 ]],\r\n\r\n [[ 0.16693923, 0.08904018, -0.26449862, -0.23580232,\r\n 0.12990193, -0.13221331, -0.07385841, 0.04612292,\r\n -0.25945234, -0.16443208, 0.24324496, -0.18642925,\r\n 0.13102445, -0.00571162, -0.2811195 , 0.26315957,\r\n 0.01092044, 0.14321984, 0.26222575, -0.14718235,\r\n 0.22880436, -0.2141878 , 0.0065368 , -0.07261764,\r\n 0.06655228, -0.22275479, -0.10256308, -0.1944964 ,\r\n 0.17530751, -0.2663396 , -0.0399945 , -0.1505961 ]],\r\n\r\n [[ 0.16693924, 0.08904018, -0.26449862, -0.23580234,\r\n 0.12990193, -0.13221331, -0.07385841, 0.04612291,\r\n -0.25945234, -0.16443208, 0.24324496, -0.18642925,\r\n 0.13102445, -0.00571162, -0.2811195 , 0.26315957,\r\n 0.01092044, 0.14321984, 0.2622257 , -0.14718235,\r\n 0.22880433, -0.2141878 , 0.0065368 , -0.07261764,\r\n 0.06655228, -0.22275479, -0.1025631 , -0.1944964 ,\r\n 0.17530751, -0.2663396 , -0.0399945 , -0.1505961 ]],\r\n\r\n [[ 0.16693926, 0.08904018, -0.26449862, -0.23580232,\r\n 0.12990193, -0.13221331, -0.07385838, 0.04612291,\r\n -0.25945234, -0.16443208, 0.24324496, -0.18642925,\r\n 0.13102445, -0.00571162, -0.2811195 , 0.26315957,\r\n 0.01092044, 0.14321983, 0.2622257 , -0.14718235,\r\n 0.22880435, -0.2141878 , 0.0065368 , -0.07261764,\r\n 0.06655225, -0.22275479, -0.10256308, -0.1944964 ,\r\n 0.17530751, -0.2663396 , -0.0399945 , -0.1505961 ]],\r\n\r\n [[ 0.16693926, 0.08904018, -0.26449862, -0.23580232,\r\n 0.12990193, -0.13221331, -0.07385838, 0.04612291,\r\n -0.25945234, -0.1644321 , 0.24324496, -0.18642926,\r\n 0.13102445, -0.00571162, -0.2811195 , 0.26315957,\r\n 0.01092044, 0.14321984, 0.26222572, -0.14718235,\r\n 0.22880435, -0.2141878 , 0.0065368 , -0.07261764,\r\n 0.06655228, -0.22275479, -0.10256308, -0.1944964 ,\r\n 0.17530751, -0.2663396 , -0.0399945 , -0.1505961 ]],\r\n\r\n [[ 0.16693924, 0.08904018, -0.26449862, -0.23580232,\r\n 0.1299019 , -0.13221331, -0.07385841, 0.04612292,\r\n -0.25945234, -0.16443208, 0.24324496, -0.18642925,\r\n 0.13102445, -0.00571162, -0.2811195 , 0.26315957,\r\n 0.01092044, 0.14321984, 0.26222575, -0.14718235,\r\n 0.22880435, -0.2141878 , 0.0065368 , -0.07261764,\r\n 0.06655227, -0.22275479, -0.1025631 , -0.1944964 ,\r\n 0.17530751, -0.2663396 , -0.0399945 , -0.1505961 ]],\r\n\r\n [[ 0.16693924, 0.08904018, -0.26449862, -0.23580232,\r\n 0.12990193, -0.13221331, -0.07385841, 0.04612292,\r\n -0.25945234, -0.16443206, 0.24324496, -0.18642923,\r\n 0.13102445, -0.00571162, -0.2811195 , 0.26315957,\r\n 0.01092044, 0.14321984, 0.26222575, -0.14718235,\r\n 0.22880435, -0.2141878 , 0.0065368 , -0.07261764,\r\n 0.06655227, -0.22275479, -0.10256307, -0.1944964 ,\r\n 0.17530751, -0.2663396 , -0.0399945 , -0.1505961 ]]]], dtype=float32)\r\n(jdb) ",,terminal_output +5643,11014625,"TERMINAL",0,0,"\rlam_outputs[""z_q""]",,terminal_output +5644,11015641,"TERMINAL",0,0,",",,terminal_output +5645,11016152,"TERMINAL",0,0,"[?25l,\r[?25h",,terminal_output +5646,11016358,"TERMINAL",0,0,"[?25l.[?25h",,terminal_output +5647,11017085,"TERMINAL",0,0,"[?25ls[?25h",,terminal_output +5648,11017158,"TERMINAL",0,0,"[?25lh[?25h",,terminal_output +5649,11017266,"TERMINAL",0,0,"[?25la[?25h",,terminal_output +5650,11018092,"TERMINAL",0,0,"[?25lp[?25h",,terminal_output +5651,11018399,"TERMINAL",0,0,"[?25le[?25h",,terminal_output +5652,11018512,"TERMINAL",0,0,"\r\n(1, 15, 1, 32)\r\n(jdb) ",,terminal_output +5653,11120290,"genie.py",0,0,"",python,tab +5654,11120291,"genie.py",2590,0,"",python,selection_mouse +5655,11120294,"genie.py",2589,0,"",python,selection_command +5656,11120806,"genie.py",2520,0,"",python,selection_mouse +5657,11121438,"genie.py",2507,0,"",python,selection_mouse +5658,11122014,"genie.py",2420,0,"",python,selection_mouse +5659,11122527,"genie.py",2496,0,"",python,selection_mouse +5660,11134184,"genie.py",0,0,"",python,tab +5661,11134185,"genie.py",2291,0,"",python,selection_mouse +5662,11134281,"genie.py",2287,9,"vq_encode",python,selection_mouse +5663,11141218,"TERMINAL",0,0,"b",,terminal_output +5664,11141352,"TERMINAL",0,0,"[?25la[?25h",,terminal_output +5665,11141416,"TERMINAL",0,0,"[?25lt[?25h",,terminal_output +5666,11141544,"TERMINAL",0,0,"[?25lc[?25h",,terminal_output +5667,11142065,"TERMINAL",0,0,"[?25lh[?25h",,terminal_output +5668,11142573,"TERMINAL",0,0,"[?25l.[?25h",,terminal_output +5669,11142759,"TERMINAL",0,0,"[?25ls[?25h",,terminal_output +5670,11143239,"TERMINAL",0,0,"[?25lh[?25h",,terminal_output +5671,11143423,"TERMINAL",0,0,"[?25la[?25h",,terminal_output +5672,11143476,"TERMINAL",0,0,"[?25lp[?25h",,terminal_output +5673,11143791,"TERMINAL",0,0,"[?25lp\r[?25h",,terminal_output +5674,11143991,"TERMINAL",0,0,"[?25la\r[?25h",,terminal_output +5675,11144123,"TERMINAL",0,0,"[?25lh\r[?25h",,terminal_output +5676,11144236,"TERMINAL",0,0,"[?25ls\r[?25h",,terminal_output +5677,11144735,"TERMINAL",0,0,"[?25l.\r[?25h",,terminal_output +5678,11145130,"TERMINAL",0,0,"[?25l[[?25h",,terminal_output +5679,11145581,"TERMINAL",0,0,"[?25l""[?25h",,terminal_output +5680,11146130,"TERMINAL",0,0,"[?25lv[?25h",,terminal_output +5681,11146285,"TERMINAL",0,0,"[?25li[?25h",,terminal_output +5682,11146511,"TERMINAL",0,0,"[?25ld[?25h",,terminal_output +5683,11146731,"TERMINAL",0,0,"[?25le[?25h[?25lo[?25h",,terminal_output +5684,11146934,"TERMINAL",0,0,"[?25ls[?25h",,terminal_output +5685,11147192,"TERMINAL",0,0,"[?25l""[?25h",,terminal_output +5686,11147675,"TERMINAL",0,0,"[?25l][?25h",,terminal_output +5687,11148034,"TERMINAL",0,0,"[?25l.[?25h",,terminal_output +5688,11148647,"TERMINAL",0,0,"[?25ls[?25h",,terminal_output +5689,11148779,"TERMINAL",0,0,"[?25lj[?25h",,terminal_output +5690,11148888,"TERMINAL",0,0,"[?25la[?25h",,terminal_output +5691,11149047,"TERMINAL",0,0,"ü",,terminal_output +5692,11149559,"TERMINAL",0,0,"[?25lü\r[?25h",,terminal_output +5693,11149697,"TERMINAL",0,0,"[?25la\r[?25h",,terminal_output +5694,11149815,"TERMINAL",0,0,"[?25lj\r[?25h",,terminal_output +5695,11150202,"TERMINAL",0,0,"[?25lh[?25h",,terminal_output +5696,11150383,"TERMINAL",0,0,"[?25la[?25h",,terminal_output +5697,11150436,"TERMINAL",0,0,"[?25lp[?25h",,terminal_output +5698,11150651,"TERMINAL",0,0,"[?25le[?25h",,terminal_output +5699,11150764,"TERMINAL",0,0,"\r\n(1, 16, 90, 160, 3)\r\n(jdb) ",,terminal_output +5700,11158086,"train_dynamics.py",0,0,"",python,tab +5701,11163125,"train_dynamics.py",664,0,"",python,selection_mouse +5702,11163126,"train_dynamics.py",663,0,"",python,selection_command +5703,11165281,"train_dynamics.py",9623,0,"",python,selection_command +5704,11167908,"train_dynamics.py",748,1,"1",python,selection_command +5705,11171883,"train_dynamics.py",777,1,"3",python,selection_command +5706,11171988,"train_dynamics.py",1074,2,"32",python,selection_command +5707,11172733,"train_dynamics.py",1299,2,"32",python,selection_command +5708,11179804,"train_dynamics.py",1301,0,"",python,selection_mouse +5709,11180796,"train_dynamics.py",1300,0,"",python,selection_command +5710,11182989,"genie.py",0,0,"",python,tab +5711,11184618,"genie.py",2360,0,"",python,selection_mouse +5712,11184635,"genie.py",2359,0,"",python,selection_command +5713,11185137,"genie.py",2295,0,"",python,selection_mouse +5714,11185836,"genie.py",2411,0,"",python,selection_mouse +5715,11186401,"genie.py",2425,0,"",python,selection_mouse +5716,11186960,"genie.py",2435,0,"",python,selection_mouse +5717,11187605,"genie.py",2438,0,"",python,selection_mouse +5718,11188214,"genie.py",2517,0,"",python,selection_mouse +5719,11197829,"genie.py",2609,0,"",python,selection_mouse +5720,11198634,"genie.py",2542,0,"",python,selection_mouse +5721,11198635,"genie.py",2541,0,"",python,selection_command +5722,11199663,"genie.py",2519,0,"",python,selection_mouse +5723,11199824,"genie.py",2512,11,"lam_outputs",python,selection_mouse +5724,11203512,"genie.py",2293,0,"",python,selection_mouse +5725,11203640,"genie.py",2287,9,"vq_encode",python,selection_mouse +5726,11231337,"genie.py",0,0,"",python,tab +5727,11231338,"genie.py",2360,0,"",python,selection_mouse +5728,11231344,"genie.py",2359,0,"",python,selection_command +5729,11247642,"TERMINAL",0,0,"s",,terminal_output +5730,11247832,"TERMINAL",0,0,"[?25le[?25h[?25ll[?25h",,terminal_output +5731,11247988,"TERMINAL",0,0,"[?25lf[?25h",,terminal_output +5732,11248054,"TERMINAL",0,0,"[?25l.[?25h",,terminal_output +5733,11248476,"TERMINAL",0,0,"[?25ln[?25h",,terminal_output +5734,11248670,"TERMINAL",0,0,"[?25lu[?25h",,terminal_output +5735,11248855,"TERMINAL",0,0,"[?25lm[?25h",,terminal_output +5736,11250852,"TERMINAL",0,0,"[?25ll[?25h",,terminal_output +5737,11251035,"TERMINAL",0,0,"[?25la[?25h",,terminal_output +5738,11252357,"TERMINAL",0,0,"[?25la\r[?25h",,terminal_output +5739,11252469,"TERMINAL",0,0,"[?25ll\r[?25h",,terminal_output +5740,11253115,"TERMINAL",0,0,"[?25l_[?25h",,terminal_output +5741,11253383,"TERMINAL",0,0,"[?25ll[?25h",,terminal_output +5742,11253491,"TERMINAL",0,0,"[?25la[?25h",,terminal_output +5743,11253670,"TERMINAL",0,0,"[?25lt[?25h",,terminal_output +5744,11253884,"TERMINAL",0,0,"[?25le[?25h",,terminal_output +5745,11253950,"TERMINAL",0,0,"[?25ln[?25h",,terminal_output +5746,11254107,"TERMINAL",0,0,"[?25lt[?25h",,terminal_output +5747,11254459,"TERMINAL",0,0,"[?25l_[?25h",,terminal_output +5748,11254744,"TERMINAL",0,0,"[?25ls[?25h",,terminal_output +5749,11255102,"TERMINAL",0,0,"[?25ls\r[?25ha",,terminal_output +5750,11255271,"TERMINAL",0,0,"[?25lc[?25h",,terminal_output +5751,11255433,"TERMINAL",0,0,"[?25lt[?25h",,terminal_output +5752,11255566,"TERMINAL",0,0,"[?25li[?25h",,terminal_output +5753,11255635,"TERMINAL",0,0,"[?25lo[?25h",,terminal_output +5754,11255795,"TERMINAL",0,0,"[?25ln[?25h",,terminal_output +5755,11255860,"TERMINAL",0,0,"[?25ls[?25h",,terminal_output +5756,11256212,"TERMINAL",0,0,"\r\n6\r\n(jdb) ",,terminal_output +5757,11257661,"TERMINAL",0,0,"\rself.num_latent_actions",,terminal_output +5758,11261719,"TERMINAL",0,0,"[?25ls\r[?25h",,terminal_output +5759,11262426,"TERMINAL",0,0,"[?25lon\r[?25h[?25lo\r[?25h[?25li\r[?25h[?25lt\r[?25h[?25lc\r[?25h[?25la\r[?25h[?25l_\r[?25h[?25lt\r[?25h\r",,terminal_output +5760,11262481,"TERMINAL",0,0,"[?25lte\r[?25h\r",,terminal_output +5761,11262611,"TERMINAL",0,0,"[?25lla\r[?25h[?25ll\r[?25h[?25l_\r[?25h\r",,terminal_output +5762,11262751,"TERMINAL",0,0,"[?25lu\r[?25h",,terminal_output +5763,11262978,"TERMINAL",0,0,"[?25ln\r[?25h",,terminal_output +5764,11272512,"TERMINAL",0,0,"l",,terminal_output +5765,11272781,"TERMINAL",0,0,"[?25la[?25h",,terminal_output +5766,11273394,"TERMINAL",0,0,"[?25lt[?25h",,terminal_output +5767,11273699,"TERMINAL",0,0,"[?25le[?25h",,terminal_output +5768,11273849,"TERMINAL",0,0,"[?25ln[?25h",,terminal_output +5769,11274018,"TERMINAL",0,0,"[?25lt[?25h",,terminal_output +5770,11274230,"TERMINAL",0,0,"[?25l_[?25h",,terminal_output +5771,11274600,"TERMINAL",0,0,"[?25lp[?25h",,terminal_output +5772,11274708,"TERMINAL",0,0,"[?25la[?25h",,terminal_output +5773,11274910,"TERMINAL",0,0,"[?25lt[?25h",,terminal_output +5774,11275019,"TERMINAL",0,0,"[?25lh[?25h",,terminal_output +5775,11275636,"TERMINAL",0,0,"[?25lh\r[?25h",,terminal_output +5776,11275801,"TERMINAL",0,0,"[?25lc[?25h",,terminal_output +5777,11275866,"TERMINAL",0,0,"[?25lh[?25h",,terminal_output +5778,11276191,"TERMINAL",0,0,"[?25l_[?25h",,terminal_output +5779,11276466,"TERMINAL",0,0,"[?25ld[?25h",,terminal_output +5780,11276537,"TERMINAL",0,0,"[?25li[?25h",,terminal_output +5781,11276603,"TERMINAL",0,0,"[?25lm[?25h",,terminal_output +5782,11276807,"TERMINAL",0,0,"\r\n32\r\n(jdb) ",,terminal_output +5783,11281125,"genie.py",0,0,"",python,tab +5784,11281127,"genie.py",2542,0,"",python,selection_mouse +5785,11281133,"genie.py",2541,0,"",python,selection_command +5786,11284235,"genie.py",2288,0,"",python,selection_mouse +5787,11284849,"genie.py",2360,0,"",python,selection_mouse +5788,11284864,"genie.py",2359,0,"",python,selection_command +5789,11285820,"genie.py",2360,0,"\n ",python,content +5790,11295185,"genie.py",2360,0,"",python,selection_mouse +5791,11317140,"genie.py",0,0,"",python,tab +5792,11330331,"genie.py",2361,0,"",python,selection_command +5793,11331991,"genie.py",2361,0," # INSERT_YOUR_CODE\n",python,content +5794,11332023,"genie.py",2388,0," batch_size = batch[""videos""].shape[0]\n",python,content +5795,11332150,"genie.py",2434,0," seq_len = batch[""videos""].shape[1]\n",python,content +5796,11332405,"genie.py",2477,0," arr = jnp.zeros((batch_size, seq_len - 1, 1, self.latent_patch_dim))\n",python,content +5797,11332408,"genie.py",2554,9,"",python,content +5798,11335470,"genie.py",2387,0,"",python,selection_mouse +5799,11335632,"genie.py",2371,16,"INSERT_YOUR_CODE",python,selection_mouse +5800,11335646,"genie.py",2372,15,"NSERT_YOUR_CODE",python,selection_command +5801,11335844,"genie.py",2361,27," # INSERT_YOUR_CODE\n",python,selection_mouse +5802,11343166,"genie.py",2361,27,"",python,content +5803,11346620,"genie.py",2465,0,"",python,selection_mouse +5804,11347533,"genie.py",2417,0,"",python,selection_mouse +5805,11348123,"genie.py",2534,0,"",python,selection_mouse +5806,11348574,"genie.py",2459,0,"",python,selection_mouse +5807,11348725,"genie.py",2458,3,"arr",python,selection_mouse +5808,11349072,"genie.py",2458,3,"l",python,content +5809,11349073,"genie.py",2459,0,"",python,selection_keyboard +5810,11349213,"genie.py",2459,0,"a",python,content +5811,11349214,"genie.py",2460,0,"",python,selection_keyboard +5812,11349284,"genie.py",2460,0,"m",python,content +5813,11349285,"genie.py",2461,0,"",python,selection_keyboard +5814,11349661,"genie.py",2461,0,"_",python,content +5815,11349662,"genie.py",2462,0,"",python,selection_keyboard +5816,11349963,"genie.py",2462,0,"o",python,content +5817,11349964,"genie.py",2463,0,"",python,selection_keyboard +5818,11350152,"genie.py",2463,0,"u",python,content +5819,11350153,"genie.py",2464,0,"",python,selection_keyboard +5820,11350303,"genie.py",2464,0,"t",python,content +5821,11350303,"genie.py",2465,0,"",python,selection_keyboard +5822,11350436,"genie.py",2465,0,"p",python,content +5823,11350437,"genie.py",2466,0,"",python,selection_keyboard +5824,11350638,"genie.py",2466,0,"u",python,content +5825,11350639,"genie.py",2467,0,"",python,selection_keyboard +5826,11350716,"genie.py",2467,0,"t",python,content +5827,11350717,"genie.py",2468,0,"",python,selection_keyboard +5828,11350918,"genie.py",2468,0,"s",python,content +5829,11350919,"genie.py",2469,0,"",python,selection_keyboard +5830,11351274,"genie.py",2469,0,"_",python,content +5831,11351275,"genie.py",2470,0,"",python,selection_keyboard +5832,11351529,"genie.py",2470,0,"m",python,content +5833,11351530,"genie.py",2471,0,"",python,selection_keyboard +5834,11351685,"genie.py",2471,0,"o",python,content +5835,11351686,"genie.py",2472,0,"",python,selection_keyboard +5836,11351883,"genie.py",2472,0,"d",python,content +5837,11351884,"genie.py",2473,0,"",python,selection_keyboard +5838,11352468,"genie.py",2472,1,"",python,content +5839,11352631,"genie.py",2472,0,"c",python,content +5840,11352632,"genie.py",2473,0,"",python,selection_keyboard +5841,11352683,"genie.py",2473,0,"k",python,content +5842,11352684,"genie.py",2474,0,"",python,selection_keyboard +5843,11353476,"genie.py",2473,0,"",python,selection_command +5844,11354628,"genie.py",2539,0,"\n ",python,content +5845,11355627,"genie.py",2548,0,"l",python,content +5846,11355628,"genie.py",2549,0,"",python,selection_keyboard +5847,11355699,"genie.py",2549,0,"a",python,content +5848,11355700,"genie.py",2550,0,"",python,selection_keyboard +5849,11355822,"genie.py",2550,0,"m",python,content +5850,11355823,"genie.py",2551,0,"",python,selection_keyboard +5851,11356564,"genie.py",2548,3,"lam_outputs",python,content +5852,11356925,"genie.py",2559,0," ",python,content +5853,11356926,"genie.py",2560,0,"",python,selection_keyboard +5854,11357134,"genie.py",2560,0,"=",python,content +5855,11357135,"genie.py",2561,0,"",python,selection_keyboard +5856,11357241,"genie.py",2561,0," ",python,content +5857,11357241,"genie.py",2562,0,"",python,selection_keyboard +5858,11359122,"genie.py",2561,0,"",python,selection_command +5859,11359790,"genie.py",2540,23,"",python,content +5860,11359796,"genie.py",2548,0,"",python,selection_command +5861,11360238,"genie.py",2458,0,"",python,selection_command +5862,11361093,"genie.py",2459,0,"",python,selection_command +5863,11361597,"genie.py",2460,0,"",python,selection_command +5864,11361723,"genie.py",2461,0,"",python,selection_command +5865,11361905,"genie.py",2462,0,"",python,selection_command +5866,11362405,"genie.py",2463,0,"",python,selection_command +5867,11362462,"genie.py",2464,0,"",python,selection_command +5868,11362463,"genie.py",2465,0,"",python,selection_command +5869,11362532,"genie.py",2466,0,"",python,selection_command +5870,11362533,"genie.py",2467,0,"",python,selection_command +5871,11362544,"genie.py",2468,0,"",python,selection_command +5872,11362595,"genie.py",2469,0,"",python,selection_command +5873,11362605,"genie.py",2470,0,"",python,selection_command +5874,11362651,"genie.py",2471,0,"",python,selection_command +5875,11362663,"genie.py",2472,0,"",python,selection_command +5876,11362775,"genie.py",2473,0,"",python,selection_command +5877,11362935,"genie.py",2474,0,"",python,selection_command +5878,11364899,"genie.py",2473,0,"",python,selection_command +5879,11365149,"genie.py",2472,0,"",python,selection_command +5880,11365665,"genie.py",2471,0,"",python,selection_command +5881,11365673,"genie.py",2470,0,"",python,selection_command +5882,11365736,"genie.py",2469,0,"",python,selection_command +5883,11365740,"genie.py",2468,0,"",python,selection_command +5884,11365795,"genie.py",2467,0,"",python,selection_command +5885,11365819,"genie.py",2466,0,"",python,selection_command +5886,11365832,"genie.py",2465,0,"",python,selection_command +5887,11365859,"genie.py",2464,0,"",python,selection_command +5888,11365918,"genie.py",2463,0,"",python,selection_command +5889,11365918,"genie.py",2462,0,"",python,selection_command +5890,11365978,"genie.py",2461,0,"",python,selection_command +5891,11366102,"genie.py",2460,0,"",python,selection_command +5892,11366232,"genie.py",2459,0,"",python,selection_command +5893,11366355,"genie.py",2458,0,"",python,selection_command +5894,11366572,"genie.py",2457,0,"",python,selection_command +5895,11367212,"genie.py",2457,1,"",python,content +5896,11368316,"genie.py",2457,16,"",python,content +5897,11369002,"genie.py",2457,0," ",python,content +5898,11369380,"genie.py",2458,0,"p",python,content +5899,11369380,"genie.py",2459,0,"",python,selection_keyboard +5900,11369748,"genie.py",2458,1,"",python,content +5901,11369933,"genie.py",2458,0,"l",python,content +5902,11369934,"genie.py",2459,0,"",python,selection_keyboard +5903,11370047,"genie.py",2459,0,"a",python,content +5904,11370048,"genie.py",2460,0,"",python,selection_keyboard +5905,11370314,"genie.py",2460,0,"t",python,content +5906,11370315,"genie.py",2461,0,"",python,selection_keyboard +5907,11370619,"genie.py",2461,0,"e",python,content +5908,11370620,"genie.py",2462,0,"",python,selection_keyboard +5909,11370723,"genie.py",2462,0,"n",python,content +5910,11370724,"genie.py",2463,0,"",python,selection_keyboard +5911,11370850,"genie.py",2463,0,"t",python,content +5912,11370851,"genie.py",2464,0,"",python,selection_keyboard +5913,11371053,"genie.py",2464,0,"_",python,content +5914,11371054,"genie.py",2465,0,"",python,selection_keyboard +5915,11371241,"genie.py",2465,0,"a",python,content +5916,11371242,"genie.py",2466,0,"",python,selection_keyboard +5917,11371354,"genie.py",2466,0,"c",python,content +5918,11371355,"genie.py",2467,0,"",python,selection_keyboard +5919,11371530,"genie.py",2467,0,"t",python,content +5920,11371530,"genie.py",2468,0,"",python,selection_keyboard +5921,11371658,"genie.py",2468,0,"i",python,content +5922,11371659,"genie.py",2469,0,"",python,selection_keyboard +5923,11371733,"genie.py",2469,0,"o",python,content +5924,11371734,"genie.py",2470,0,"",python,selection_keyboard +5925,11371852,"genie.py",2470,0,"n",python,content +5926,11371853,"genie.py",2471,0,"",python,selection_keyboard +5927,11372008,"genie.py",2471,0,"s",python,content +5928,11372009,"genie.py",2472,0,"",python,selection_keyboard +5929,11372151,"genie.py",2472,0,"_",python,content +5930,11372152,"genie.py",2473,0,"",python,selection_keyboard +5931,11372429,"genie.py",2473,0,"m",python,content +5932,11372430,"genie.py",2474,0,"",python,selection_keyboard +5933,11372614,"genie.py",2474,0,"o",python,content +5934,11372615,"genie.py",2475,0,"",python,selection_keyboard +5935,11372765,"genie.py",2475,0,"c",python,content +5936,11372766,"genie.py",2476,0,"",python,selection_keyboard +5937,11373121,"genie.py",2476,0,"k",python,content +5938,11373122,"genie.py",2477,0,"",python,selection_keyboard +5939,11373206,"genie.py",2477,0,"s",python,content +5940,11373207,"genie.py",2478,0,"",python,selection_keyboard +5941,11373635,"genie.py",2477,1,"",python,content +5942,11373682,"genie.py",2477,0,"e",python,content +5943,11373683,"genie.py",2478,0,"",python,selection_keyboard +5944,11373750,"genie.py",2478,0,"d",python,content +5945,11373751,"genie.py",2479,0,"",python,selection_keyboard +5946,11374345,"genie.py",2478,0,"",python,selection_command +5947,11374475,"genie.py",2567,0,"",python,selection_command +5948,11375227,"genie.py",2592,0,"",python,selection_command +5949,11375432,"genie.py",2593,0,"",python,selection_command +5950,11375693,"genie.py",2671,0,"",python,selection_command +5951,11375872,"genie.py",2672,0,"",python,selection_command +5952,11376363,"genie.py",2673,0,"",python,selection_command +5953,11376424,"genie.py",2674,0,"",python,selection_command +5954,11376425,"genie.py",2675,0,"",python,selection_command +5955,11376448,"genie.py",2676,0,"",python,selection_command +5956,11376498,"genie.py",2677,0,"",python,selection_command +5957,11376519,"genie.py",2678,0,"",python,selection_command +5958,11376554,"genie.py",2679,0,"",python,selection_command +5959,11376567,"genie.py",2680,0,"",python,selection_command +5960,11376591,"genie.py",2681,0,"",python,selection_command +5961,11376630,"genie.py",2682,0,"",python,selection_command +5962,11376657,"genie.py",2683,0,"",python,selection_command +5963,11376685,"genie.py",2684,0,"",python,selection_command +5964,11376725,"genie.py",2685,0,"",python,selection_command +5965,11376751,"genie.py",2686,0,"",python,selection_command +5966,11376778,"genie.py",2687,0,"",python,selection_command +5967,11376805,"genie.py",2688,0,"",python,selection_command +5968,11376833,"genie.py",2689,0,"",python,selection_command +5969,11376864,"genie.py",2690,0,"",python,selection_command +5970,11376892,"genie.py",2691,0,"",python,selection_command +5971,11376930,"genie.py",2692,0,"",python,selection_command +5972,11376958,"genie.py",2693,0,"",python,selection_command +5973,11376983,"genie.py",2694,0,"",python,selection_command +5974,11377014,"genie.py",2695,0,"",python,selection_command +5975,11377257,"genie.py",2696,0,"",python,selection_command +5976,11377571,"genie.py",2696,11,"",python,content +5977,11377917,"genie.py",2696,1,"",python,content +5978,11378104,"genie.py",2696,1,"",python,content +5979,11378302,"genie.py",2696,1,"",python,content +5980,11378478,"genie.py",2696,1,"",python,content +5981,11378658,"genie.py",2696,1,"",python,content +5982,11378852,"genie.py",2696,1,"",python,content +5983,11379258,"genie.py",2696,1,"",python,content +5984,11379634,"genie.py",2696,0,"l",python,content +5985,11379635,"genie.py",2697,0,"",python,selection_keyboard +5986,11379872,"genie.py",2697,0,"a",python,content +5987,11379873,"genie.py",2698,0,"",python,selection_keyboard +5988,11380134,"genie.py",2698,0,"t",python,content +5989,11380135,"genie.py",2699,0,"",python,selection_keyboard +5990,11380468,"genie.py",2699,0,"e",python,content +5991,11380469,"genie.py",2700,0,"",python,selection_keyboard +5992,11380624,"genie.py",2700,0,"n",python,content +5993,11380625,"genie.py",2701,0,"",python,selection_keyboard +5994,11381349,"genie.py",2696,5,"latent_actions_mocked",python,content +5995,11381671,"genie.py",2716,0,"",python,selection_command +5996,11381858,"genie.py",2638,0,"",python,selection_command +5997,11382026,"genie.py",2567,0,"",python,selection_command +5998,11382164,"genie.py",2519,0,"",python,selection_command +5999,11383576,"genie.py",2270,0,"",python,selection_mouse +6000,11384584,"genie.py",2264,0,"",python,selection_command +6001,11385422,"genie.py",2264,0,"#",python,content +6002,11385423,"genie.py",2265,0,"",python,selection_keyboard +6003,11385498,"genie.py",2265,0," ",python,content +6004,11385498,"genie.py",2266,0,"",python,selection_keyboard +6005,11385931,"genie.py",2265,0,"",python,selection_command +6006,11386164,"genie.py",2341,0,"",python,selection_command +6007,11389139,"genie.py",2332,31,"",python,content +6008,11389164,"genie.py",2340,0,"",python,selection_command +6009,11391698,"TERMINAL",0,0,"\rself.latent_patch_dim",,terminal_output +6010,11392665,"TERMINAL",0,0,"\r[4@num_actions",,terminal_output +6011,11393155,"TERMINAL",0,0,"\rpatch_dim",,terminal_output +6012,11393317,"TERMINAL",0,0,"\r",,terminal_output +6013,11394252,"TERMINAL",0,0,"^DERROR:2025-06-30 17:33:58,599:jax._src.debugging:96: jax.debug.callback failed\r\nTraceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugging.py"", line 94, in debug_callback_impl\r\n callback(*args)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugging.py"", line 334, in _flat_callback\r\n callback(*args, **kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugger/core.py"", line 220, in _breakpoint_callback\r\n debugger(frames, thread_id, **kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugger/cli_debugger.py"", line 167, in run_debugger\r\n CliDebugger(frames, thread_id, **kwargs).run()\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugger/cli_debugger.py"", line 160, in run\r\n self.cmdloop()\r\n File ""/home/hk-project-p0023960/tum_cte0515/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/cmd.py"", line 138, in cmdloop\r\n stop = self.onecmd(line)\r\n File ""/home/hk-project-p0023960/tum_cte0515/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/cmd.py"", line 217, in onecmd\r\n return func(arg)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugger/cli_debugger.py"", line 146, in do_quit\r\n sys.exit(0)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/wandb/sdk/lib/exit_hooks.py"", line 36, in exit\r\n self._orig_exit(orig_code) # type: ignore\r\nSystemExit: 0\r\nERROR:jax._src.debugging:jax.debug.callback failed\r\nTraceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugging.py"", line 94, in debug_callback_impl\r\n callback(*args)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugging.py"", line 334, in _flat_callback\r\n callback(*args, **kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugger/core.py"", line 220, in _breakpoint_callback\r\n debugger(frames, thread_id, **kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugger/cli_debugger.py"", line 167, in run_debugger\r\n CliDebugger(frames, thread_id, **kwargs).run()\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugger/cli_debugger.py"", line 160, in run\r\n self.cmdloop()\r\n File ""/home/hk-project-p0023960/tum_cte0515/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/cmd.py"", line 138, in cmdloop\r\n stop = self.onecmd(line)\r\n File ""/home/hk-project-p0023960/tum_cte0515/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/cmd.py"", line 217, in onecmd\r\n return func(arg)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugger/cli_debugger.py"", line 146, in do_quit\r\n sys.exit(0)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/wandb/sdk/lib/exit_hooks.py"", line 36, in exit\r\n self._orig_exit(orig_code) # type: ignore\r\nSystemExit: 0\r\n",,terminal_output +6014,11395892,"TERMINAL",0,0,"wandb: \r\nwandb: 🚀 View run dynamics-tiny-overfit-big-lr-0000 at: https://wandb.ai/instant-uv/jafar/runs/4d6085mf\r\nwandb: Find logs at: ../../../../../hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/wandb/run-20250630_172528-4d6085mf/logs\r\n",,terminal_output +6015,11397563,"genie.py",0,0,"",python,tab +6016,11397564,"genie.py",2296,0,"",python,selection_mouse +6017,11398087,"genie.py",2213,0,"",python,selection_mouse +6018,11398774,"genie.py",2255,0,"\n ",python,content +6019,11399310,"genie.py",2264,0,"#",python,content +6020,11399311,"genie.py",2265,0,"",python,selection_keyboard +6021,11399522,"genie.py",2265,0," ",python,content +6022,11399522,"genie.py",2266,0,"",python,selection_keyboard +6023,11399765,"genie.py",2266,0,"F",python,content +6024,11399766,"genie.py",2267,0,"",python,selection_keyboard +6025,11399834,"genie.py",2267,0,"I",python,content +6026,11399835,"genie.py",2268,0,"",python,selection_keyboard +6027,11400007,"genie.py",2268,0,"X",python,content +6028,11400008,"genie.py",2269,0,"",python,selection_keyboard +6029,11400121,"genie.py",2269,0,"M",python,content +6030,11400121,"genie.py",2270,0,"",python,selection_keyboard +6031,11400206,"genie.py",2270,0,"E",python,content +6032,11400206,"genie.py",2271,0,"",python,selection_keyboard +6033,11400355,"genie.py",2271,0," ",python,content +6034,11400356,"genie.py",2272,0,"",python,selection_keyboard +6035,11400672,"genie.py",2272,0,"M",python,content +6036,11400672,"genie.py",2273,0,"",python,selection_keyboard +6037,11400916,"TERMINAL",0,0,"]0;tum_cte0515@hkn0408:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0408 jafar]$ ",,terminal_output +6038,11401101,"genie.py",2272,1,"",python,content +6039,11401373,"genie.py",2272,0,"@",python,content +6040,11401374,"genie.py",2273,0,"",python,selection_keyboard +6041,11401629,"genie.py",2273,0,"m",python,content +6042,11401630,"genie.py",2274,0,"",python,selection_keyboard +6043,11401868,"genie.py",2274,0,"h",python,content +6044,11401869,"genie.py",2275,0,"",python,selection_keyboard +6045,11401949,"genie.py",2275,0,"i",python,content +6046,11401950,"genie.py",2276,0,"",python,selection_keyboard +6047,11402199,"genie.py",2275,1,"",python,content +6048,11402342,"genie.py",2274,1,"",python,content +6049,11402528,"genie.py",2274,0,"i",python,content +6050,11402529,"genie.py",2275,0,"",python,selection_keyboard +6051,11402594,"genie.py",2275,0,"h",python,content +6052,11402595,"genie.py",2276,0,"",python,selection_keyboard +6053,11402700,"genie.py",2276,0,"i",python,content +6054,11402700,"genie.py",2277,0,"",python,selection_keyboard +6055,11402763,"genie.py",2277,0,"r",python,content +6056,11402764,"genie.py",2278,0,"",python,selection_keyboard +6057,11403765,"genie.py",2472,0,"",python,selection_mouse +6058,11404245,"genie.py",2471,0,"",python,selection_command +6059,11404664,"genie.py",2538,0,"\n ",python,content +6060,11405646,"genie.py",2543,4,"",python,content +6061,11405784,"genie.py",2539,4,"",python,content +6062,11406079,"genie.py",2538,1,"",python,content +6063,11406292,"genie.py",2537,0,"",python,selection_command +6064,11406833,"genie.py",2561,0,"",python,selection_command +6065,11406994,"genie.py",2639,0,"",python,selection_command +6066,11407191,"genie.py",2712,0,"",python,selection_command +6067,11407357,"genie.py",2722,0,"",python,selection_command +6068,11407826,"genie.py",2723,0,"\n ",python,content +6069,11408645,"genie.py",2732,0,"#",python,content +6070,11408646,"genie.py",2733,0,"",python,selection_keyboard +6071,11408948,"genie.py",2733,0,"#",python,content +6072,11408949,"genie.py",2734,0,"",python,selection_keyboard +6073,11409109,"genie.py",2734,0," ",python,content +6074,11409110,"genie.py",2735,0,"",python,selection_keyboard +6075,11409467,"genie.py",2734,0,"",python,selection_command +6076,11411760,"TERMINAL",0,0,"sh scripts_horeka/train_dynamics.sh ",,terminal_output +6077,11417062,"scripts_horeka/train_dynamics.sh",0,0,"",shellscript,tab +6078,11419136,"TERMINAL",0,0,"\r(jafar) [tum_cte0515@hkn0408 jafar]$ s\r\n\rh scripts_horeka/train_dynamics.sh ",,terminal_output +6079,11421034,"TERMINAL",0,0,"\r(jafar) [tum_cte0515@hkn0408 jafar]$ sh scripts_horeka/train_dynamics.sh ",,terminal_output +6080,11423409,"scripts_horeka/train_dynamics.sh",1509,0,"",shellscript,selection_mouse +6081,11424283,"scripts_horeka/overfit_sample_tiny/train_dynamics_overfit_sample_big_lr.sbatch",0,0,"",shellscript,tab +6082,11428050,"scripts_horeka/train_dynamics.sh",0,0,"",shellscript,tab +6083,11429648,"train_dynamics.py",0,0,"",python,tab +6084,11430820,"train_dynamics.py",1261,0,"",python,selection_mouse +6085,11430961,"train_dynamics.py",1260,3,"int",python,selection_mouse +6086,11431477,"train_dynamics.py",1315,0,"",python,selection_mouse +6087,11431626,"train_dynamics.py",1306,18,"num_latent_actions",python,selection_mouse +6088,11432656,"train_dynamics.py",1311,0,"",python,selection_mouse +6089,11432657,"train_dynamics.py",1306,18,"num_latent_actions",python,selection_mouse +6090,11439755,"utils/dataloader.py",0,0,"",python,tab +6091,11441676,"scripts_horeka/train_dynamics.sh",0,0,"",shellscript,tab +6092,11442489,"scripts_horeka/train_dynamics.sh",1459,0,"",shellscript,selection_mouse +6093,11442920,"scripts_horeka/train_dynamics.sh",1508,0,"",shellscript,selection_mouse +6094,11443585,"scripts_horeka/train_dynamics.sh",1508,0," ",shellscript,content +6095,11443586,"scripts_horeka/train_dynamics.sh",1509,0,"",shellscript,selection_keyboard +6096,11443957,"scripts_horeka/train_dynamics.sh",1509,0,"\",shellscript,content +6097,11443958,"scripts_horeka/train_dynamics.sh",1510,0,"",shellscript,selection_keyboard +6098,11444094,"scripts_horeka/train_dynamics.sh",1510,0,"\n ",shellscript,content +6099,11445615,"scripts_horeka/train_dynamics.sh",1511,4,"",shellscript,content +6100,11445806,"scripts_horeka/train_dynamics.sh",1511,0,"-",shellscript,content +6101,11445807,"scripts_horeka/train_dynamics.sh",1512,0,"",shellscript,selection_keyboard +6102,11445965,"scripts_horeka/train_dynamics.sh",1512,0,"-",shellscript,content +6103,11445966,"scripts_horeka/train_dynamics.sh",1513,0,"",shellscript,selection_keyboard +6104,11447035,"scripts_horeka/train_dynamics.sh",1513,0,"num_latent_actions",shellscript,content +6105,11448147,"scripts_horeka/train_dynamics.sh",1511,0,"",shellscript,selection_mouse +6106,11448388,"scripts_horeka/train_dynamics.sh",1511,0," ",shellscript,content +6107,11448389,"scripts_horeka/train_dynamics.sh",1512,0,"",shellscript,selection_keyboard +6108,11449132,"scripts_horeka/train_dynamics.sh",1511,1,"",shellscript,content +6109,11449231,"scripts_horeka/train_dynamics.sh",1511,0," ",shellscript,content +6110,11450085,"scripts_horeka/train_dynamics.sh",1535,0,"",shellscript,selection_mouse +6111,11450713,"scripts_horeka/train_dynamics.sh",1535,0,"=",shellscript,content +6112,11450714,"scripts_horeka/train_dynamics.sh",1536,0,"",shellscript,selection_keyboard +6113,11451057,"scripts_horeka/train_dynamics.sh",1536,0,"1",shellscript,content +6114,11451058,"scripts_horeka/train_dynamics.sh",1537,0,"",shellscript,selection_keyboard +6115,11453632,"TERMINAL",0,0,"[?25l[?2004l\r[?25h",,terminal_output +6116,11453760,"TERMINAL",0,0,"SLURM_STEP_NUM_TASKS=1\r\nSLURM_JOB_USER=tum_cte0515\r\nSLURM_TASKS_PER_NODE=1\r\nSLURM_JOB_UID=999226\r\nSLURM_TASK_PID=3458179\r\nSLURM_JOB_GPUS=1\r\nSLURM_LOCALID=0\r\nSLURM_SUBMIT_DIR=/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar\r\nSLURMD_NODENAME=hkn0408\r\nSLURM_JOB_START_TIME=1751296933\r\nSLURM_STEP_NODELIST=hkn0408\r\nSLURM_CLUSTER_NAME=hk\r\nSLURM_JOB_END_TIME=1751300533\r\nSLURM_PMI2_SRUN_PORT=38655\r\nSLURM_CPUS_ON_NODE=6\r\nSLURM_JOB_CPUS_PER_NODE=6\r\nSLURM_GPUS_ON_NODE=1\r\nSLURM_GTIDS=0\r\nSLURM_JOB_PARTITION=accelerated\r\nSLURM_TRES_PER_TASK=cpu=5\r\nSLURM_OOM_KILL_STEP=0\r\nSLURM_JOB_NUM_NODES=1\r\nSLURM_STEPID=4294967290\r\nSLURM_JOBID=3306855\r\nSLURM_PTY_PORT=36241\r\nSLURM_JOB_QOS=normal\r\nSLURM_LAUNCH_NODE_IPADDR=10.0.7.199\r\nSLURM_PTY_WIN_ROW=37\r\nSLURM_PMI2_PROC_MAPPING=(vector,(0,1,1))\r\nSLURMD_DEBUG=2\r\nSLURM_PROCID=0\r\nSLURM_CPUS_PER_TASK=5\r\nSLURM_NTASKS=1\r\nSLURM_TOPOLOGY_ADDR=hkibb.hkibbi1.hkibbi1e12.hkn0408\r\nSLURM_TOPOLOGY_ADDR_PATTERN=switch.switch.switch.node\r\nSLURM_SRUN_COMM_HOST=10.0.7.199\r\nSLURM_SCRIPT_CONTEXT=prolog_task\r\nSLURM_MEM_PER_NODE=51200\r\nSLURM_PTY_WIN_COL=202\r\nSLURM_NODELIST=hkn0408\r\nSLURM_SRUN_COMM_PORT=42547\r\nSLURM_STEP_ID=4294967290\r\nSLURM_JOB_ACCOUNT=hk-project-p0023960\r\nSLURM_PRIO_PROCESS=0\r\nSLURM_NPROCS=1\r\nSLURM_NNODES=1\r\nSLURM_SUBMIT_HOST=hkn1991.localdomain\r\nSLURM_JOB_ID=3306855\r\nSLURM_NODEID=0\r\nSLURM_STEP_NUM_NODES=1\r\nSLURM_STEP_TASKS_PER_NODE=1\r\nSLURM_MPI_TYPE=pmi2\r\nSLURM_PMI2_STEP_NODES=hkn0408\r\nSLURM_CONF=/etc/slurm/slurm.conf\r\nSLURM_JOB_NAME=interactive\r\nSLURM_NTASKS_PER_NODE=1\r\nSLURM_STEP_LAUNCHER_PORT=42547\r\nSLURM_JOB_GID=502226\r\nSLURM_JOB_NODELIST=hkn0408\r\n",,terminal_output +6117,11456411,"TERMINAL",0,0,"2025-06-30 17:35:00.789830: E external/local_xla/xla/stream_executor/cuda/cuda_fft.cc:467] Unable to register cuFFT factory: Attempting to register factory for plugin cuFFT when one has already been registered\r\nWARNING: All log messages before absl::InitializeLog() is called are written to STDERR\r\nE0000 00:00:1751297700.802780 3474566 cuda_dnn.cc:8579] Unable to register cuDNN factory: Attempting to register factory for plugin cuDNN when one has already been registered\r\nE0000 00:00:1751297700.806872 3474566 cuda_blas.cc:1407] Unable to register cuBLAS factory: Attempting to register factory for plugin cuBLAS when one has already been registered\r\nW0000 00:00:1751297700.819045 3474566 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\nW0000 00:00:1751297700.819072 3474566 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\nW0000 00:00:1751297700.819074 3474566 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\nW0000 00:00:1751297700.819076 3474566 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\n",,terminal_output +6118,11460187,"TERMINAL",0,0,"W0000 00:00:1751297704.614604 3474566 gpu_device.cc:2341] Cannot dlopen some GPU libraries. Please make sure the missing libraries mentioned above are installed properly if you would like to use GPU. Follow the guide at https://www.tensorflow.org/install/gpu for how to download and setup the required libraries for your platform.\r\nSkipping registering GPU devices...\r\n",,terminal_output +6119,11460534,"TERMINAL",0,0,"Running on 1 devices.\r\n",,terminal_output +6120,11461413,"TERMINAL",0,0,"wandb: Currently logged in as: mihir-mahajan2002 (instant-uv) to https://api.wandb.ai. Use `wandb login --relogin` to force relogin\r\n",,terminal_output +6121,11461972,"TERMINAL",0,0,"wandb: Tracking run with wandb version 0.19.11\r\nwandb: Run data is saved locally in /hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/wandb/run-20250630_173505-8a3o8xbw\r\nwandb: Run `wandb offline` to turn off syncing.\r\nwandb: Syncing run dynamics-tiny-overfit-big-lr-0000\r\nwandb: ⭐️ View project at https://wandb.ai/instant-uv/jafar\r\nwandb: 🚀 View run at https://wandb.ai/instant-uv/jafar/runs/8a3o8xbw\r\n",,terminal_output +6122,11463578,"TERMINAL",0,0,"2025-06-30 17:35:07.916358: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +6123,11477315,"TERMINAL",0,0,"2025-06-30 17:35:21.737936: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +6124,11483035,"TERMINAL",0,0,"2025-06-30 17:35:27.433995: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +6125,11489989,"TERMINAL",0,0,"2025-06-30 17:35:34.414095: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +6126,11518258,"TERMINAL",0,0,"Traceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py"", line 198, in \r\n train_state = restore_genie_components(\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/genie.py"", line 304, in restore_genie_components\r\n restored_lam_params = {\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/genie.py"", line 307, in \r\n if k in train_state.params[""params""][""lam""]\r\nKeyError: 'lam'\r\n",,terminal_output +6127,11519400,"TERMINAL",0,0,"wandb: \r\nwandb: 🚀 View run dynamics-tiny-overfit-big-lr-0000 at: https://wandb.ai/instant-uv/jafar/runs/8a3o8xbw\r\nwandb: Find logs at: ../../../../../hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/wandb/run-20250630_173505-8a3o8xbw/logs\r\n",,terminal_output +6128,11521384,"TERMINAL",0,0,"]0;tum_cte0515@hkn0408:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0408 jafar]$ ",,terminal_output +6129,11553022,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/genie.py",0,0,"from typing import Dict, Any\n\nimport optax\nimport jax\nimport jax.numpy as jnp\nimport flax.linen as nn\nfrom jax import NamedSharding\nfrom flax.training.train_state import TrainState\nfrom flax.training import orbax_utils\nfrom orbax.checkpoint import PyTreeCheckpointer\n\nfrom models.dynamics import DynamicsMaskGIT\nfrom models.lam import LatentActionModel\nfrom models.tokenizer import TokenizerVQVAE\n\n\nclass Genie(nn.Module):\n """"""Genie model""""""\n\n # --- Tokenizer ---\n in_dim: int\n tokenizer_dim: int\n latent_patch_dim: int\n num_patch_latents: int\n patch_size: int\n tokenizer_num_blocks: int\n tokenizer_num_heads: int\n # --- LAM ---\n lam_dim: int\n latent_action_dim: int\n num_latent_actions: int\n lam_patch_size: int\n lam_num_blocks: int\n lam_num_heads: int\n # --- Dynamics ---\n dyna_dim: int\n dyna_num_blocks: int\n dyna_num_heads: int\n dropout: float = 0.0\n mask_limit: float = 0.0\n\n def setup(self):\n self.tokenizer = TokenizerVQVAE(\n in_dim=self.in_dim,\n model_dim=self.tokenizer_dim,\n latent_dim=self.latent_patch_dim,\n num_latents=self.num_patch_latents,\n patch_size=self.patch_size,\n num_blocks=self.tokenizer_num_blocks,\n num_heads=self.tokenizer_num_heads,\n dropout=0.0,\n codebook_dropout=0.0,\n )\n self.lam = LatentActionModel(\n in_dim=self.in_dim,\n model_dim=self.lam_dim,\n latent_dim=self.latent_patch_dim,\n num_latents=self.num_latent_actions,\n patch_size=self.lam_patch_size,\n num_blocks=self.lam_num_blocks,\n num_heads=self.lam_num_heads,\n dropout=0.0,\n codebook_dropout=0.0,\n )\n self.dynamics = DynamicsMaskGIT(\n model_dim=self.dyna_dim,\n num_latents=self.num_patch_latents,\n num_blocks=self.dyna_num_blocks,\n num_heads=self.dyna_num_heads,\n dropout=self.dropout,\n mask_limit=self.mask_limit,\n )\n\n def __call__(self, batch: Dict[str, Any], training: bool = True) -> Dict[str, Any]:\n tokenizer_outputs = self.tokenizer.vq_encode(batch[""videos""], training=False)\n # FIXME @mihir\n # lam_outputs = self.lam.vq_encode(batch[""videos""], training=False)\n batch_size = batch[""videos""].shape[0]\n seq_len = batch[""videos""].shape[1]\n latent_actions_mocked = jnp.zeros((batch_size, seq_len - 1, 1, self.latent_patch_dim))\n outputs = dict(\n video_tokens=jax.lax.stop_gradient(tokenizer_outputs[""indices""]),\n latent_actions=jax.lax.stop_gradient(latent_actions_mocked),\n )\n ## \n outputs[""mask_rng""] = batch[""mask_rng""]\n dyna_outputs = self.dynamics(outputs, training)\n outputs.update(dyna_outputs)\n mle_indices = jnp.argmax(outputs[""token_logits""], axis=-1)\n outputs[""recon""] = self.tokenizer.decode(\n mle_indices, batch[""videos""].shape[2:4]\n )\n return outputs\n\n @nn.compact\n def sample(\n self,\n batch: Dict[str, Any],\n steps: int = 25,\n temperature: int = 1,\n sample_argmax: bool = False,\n ) -> Any:\n # --- Encode videos and actions ---\n tokenizer_out = self.tokenizer.vq_encode(batch[""videos""], training=False)\n token_idxs = tokenizer_out[""indices""]\n new_frame_idxs = jnp.zeros_like(token_idxs)[:, 0]\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n\n # --- Initialize MaskGIT ---\n init_mask = jnp.ones_like(token_idxs, dtype=bool)[:, 0]\n init_carry = (\n batch[""rng""],\n new_frame_idxs,\n init_mask,\n token_idxs,\n action_tokens,\n )\n MaskGITLoop = nn.scan(\n MaskGITStep,\n variable_broadcast=""params"",\n split_rngs={""params"": False},\n in_axes=0,\n out_axes=0,\n length=steps,\n )\n\n # --- Run MaskGIT loop ---\n loop_fn = MaskGITLoop(\n dynamics=self.dynamics,\n tokenizer=self.tokenizer,\n temperature=temperature,\n sample_argmax=sample_argmax,\n steps=steps,\n )\n final_carry, _ = loop_fn(init_carry, jnp.arange(steps))\n new_frame_idxs = final_carry[1]\n new_frame_pixels = self.tokenizer.decode(\n jnp.expand_dims(new_frame_idxs, 1),\n video_hw=batch[""videos""].shape[2:4],\n )\n return new_frame_pixels\n\n def vq_encode(self, batch, training) -> Dict[str, Any]:\n # --- Preprocess videos ---\n lam_output = self.lam.vq_encode(batch[""videos""], training=training)\n return lam_output[""indices""]\n\n\nclass MaskGITStep(nn.Module):\n dynamics: nn.Module\n tokenizer: nn.Module\n temperature: float\n sample_argmax: bool\n steps: int\n\n @nn.compact\n def __call__(self, carry, x):\n rng, final_token_idxs, mask, token_idxs, action_tokens = carry\n step = x\n B, T, N = token_idxs.shape[:3]\n\n # --- Construct + encode video ---\n vid_token_idxs = jnp.concatenate(\n (token_idxs, jnp.expand_dims(final_token_idxs, 1)), axis=1\n )\n vid_embed = self.dynamics.patch_embed(vid_token_idxs)\n curr_masked_frame = jnp.where(\n jnp.expand_dims(mask, -1),\n self.dynamics.mask_token[0],\n vid_embed[:, -1],\n )\n vid_embed = vid_embed.at[:, -1].set(curr_masked_frame)\n\n # --- Predict transition ---\n act_embed = self.dynamics.action_up(action_tokens)\n vid_embed += jnp.pad(act_embed, ((0, 0), (1, 0), (0, 0), (0, 0)))\n unmasked_ratio = jnp.cos(jnp.pi * (step + 1) / (self.steps * 2))\n step_temp = self.temperature * (1.0 - unmasked_ratio)\n final_logits = self.dynamics.dynamics(vid_embed)[:, -1] / step_temp\n\n # --- Sample new tokens for final frame ---\n if self.sample_argmax:\n sampled_token_idxs = jnp.argmax(final_logits, axis=-1)\n else:\n rng, _rng = jax.random.split(rng)\n sampled_token_idxs = jnp.where(\n step == self.steps - 1,\n jnp.argmax(final_logits, axis=-1),\n jax.random.categorical(_rng, final_logits),\n )\n gather_fn = jax.vmap(jax.vmap(lambda x, y: x[y]))\n final_token_probs = gather_fn(jax.nn.softmax(final_logits), sampled_token_idxs)\n final_token_probs += ~mask\n # Update masked tokens only\n new_token_idxs = jnp.where(mask, sampled_token_idxs, final_token_idxs)\n\n # --- Update mask ---\n num_unmasked_tokens = jnp.round(N * (1.0 - unmasked_ratio)).astype(int)\n idx_mask = jnp.arange(final_token_probs.shape[-1]) > num_unmasked_tokens\n sorted_idxs = jnp.argsort(final_token_probs, axis=-1, descending=True)\n mask_update_fn = jax.vmap(lambda msk, ids: msk.at[ids].set(idx_mask))\n new_mask = mask_update_fn(mask, sorted_idxs)\n\n new_carry = (rng, new_token_idxs, new_mask, token_idxs, action_tokens)\n return new_carry, None\n\n\ndef restore_genie_components(\n train_state: TrainState,\n sharding: NamedSharding,\n inputs: Dict[str, jax.Array],\n rng: jax.Array,\n args,\n):\n """"""Restore pre-trained Genie components""""""\n rng, _rng = jax.random.split(rng)\n\n dummy_tokenizer = TokenizerVQVAE(\n in_dim=args.image_channels,\n model_dim=args.tokenizer_dim,\n latent_dim=args.latent_patch_dim,\n num_latents=args.num_patch_latents,\n patch_size=args.patch_size,\n num_blocks=args.tokenizer_num_blocks,\n num_heads=args.tokenizer_num_heads,\n dropout=args.dropout,\n codebook_dropout=args.dropout,\n )\n dummy_lam = LatentActionModel(\n in_dim=args.image_channels,\n model_dim=args.lam_dim,\n latent_dim=args.latent_patch_dim,\n num_latents=args.num_latent_actions,\n patch_size=args.lam_patch_size,\n num_blocks=args.lam_num_blocks,\n num_heads=args.lam_num_heads,\n dropout=args.dropout,\n codebook_dropout=args.dropout,\n )\n tokenizer_init_params = dummy_tokenizer.init(_rng, inputs)\n lam_init_params = dummy_lam.init(_rng, inputs)\n\n # dummy values since we only use tx to initialize the dummy train states\n dummy_tx = optax.adamw(\n learning_rate=optax.constant_schedule(args.max_lr),\n b1=0.9,\n b2=0.9,\n weight_decay=1e-4,\n )\n\n dummy_tokenizer_train_state = TrainState.create(\n apply_fn=dummy_tokenizer.apply, params=tokenizer_init_params, tx=dummy_tx\n )\n dummy_lam_train_state = TrainState.create(\n apply_fn=dummy_lam.apply, params=lam_init_params, tx=dummy_tx\n )\n\n def create_abstract_sharded_pytree(pytree_template, sharding_spec):\n """"""Replaces arrays in a pytree with ShapeDtypeStructs having the given sharding.""""""\n\n def map_fn(leaf_template):\n if hasattr(leaf_template, ""shape"") and hasattr(leaf_template, ""dtype""):\n return jax.ShapeDtypeStruct(\n leaf_template.shape, leaf_template.dtype, sharding=sharding_spec\n )\n return leaf_template\n\n return jax.tree_util.tree_map(map_fn, pytree_template)\n\n abstract_sharded_tokenizer_state = create_abstract_sharded_pytree(\n dummy_tokenizer_train_state, sharding\n )\n abstract_sharded_lam_state = create_abstract_sharded_pytree(\n dummy_lam_train_state, sharding\n )\n\n tokenizer_restore_target = {""model"": abstract_sharded_tokenizer_state}\n lam_restore_target = {""model"": abstract_sharded_lam_state}\n\n tokenizer_restore_args = orbax_utils.restore_args_from_target(\n tokenizer_restore_target\n )\n lam_restore_args = orbax_utils.restore_args_from_target(lam_restore_target)\n\n restored_tokenizer_params = (\n PyTreeCheckpointer()\n .restore(\n args.tokenizer_checkpoint,\n item=tokenizer_restore_target,\n restore_args=tokenizer_restore_args,\n )[""model""]\n .params[""params""]\n )\n restored_lam_params = (\n PyTreeCheckpointer()\n .restore(\n args.lam_checkpoint, item=lam_restore_target, restore_args=lam_restore_args\n )[""model""]\n .params[""params""]\n )\n # Genie does not initialize all LAM modules, thus we omit those extra modules during restoration\n # (f.srambical) FIXME: Currently, this is a small HBM memory crunch since the LAM's decoder is loaded into HBM and immediately dicarded.\n # A workaround would be to restore to host memory first, and only move the weights to HBM after pruning the decoder\n restored_lam_params = {\n k: v\n for k, v in restored_lam_params.items()\n if k in train_state.params[""params""][""lam""]\n }\n\n train_state.params[""params""][""tokenizer""].update(restored_tokenizer_params)\n train_state.params[""params""][""lam""].update(restored_lam_params)\n\n return train_state\n",python,tab +6130,11554683,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/genie.py",10862,0,"",python,selection_mouse +6131,11555291,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/genie.py",10902,0,"",python,selection_mouse +6132,11557570,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/genie.py",10864,0,"",python,selection_mouse +6133,11558178,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/genie.py",10902,0,"",python,selection_mouse +6134,11558968,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/genie.py",10901,0,"",python,selection_mouse +6135,11558970,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/genie.py",10900,0,"",python,selection_command +6136,11566852,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/genie.py",10771,0,"",python,selection_mouse +6137,11566987,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/genie.py",10759,19,"restored_lam_params",python,selection_mouse +6138,11579747,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/genie.py",2289,0,"",python,selection_mouse +6139,11580932,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/genie.py",2288,1,"",python,content +6140,11581049,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/genie.py",2287,1,"",python,content +6141,11581479,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/genie.py",2286,0,"",python,selection_command +6142,11584031,"TERMINAL",0,0,"sh scripts_horeka/train_dynamics.sh ",,terminal_output +6143,11585073,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +6144,11585187,"TERMINAL",0,0,"SLURM_STEP_NUM_TASKS=1\r\nSLURM_JOB_USER=tum_cte0515\r\nSLURM_TASKS_PER_NODE=1\r\nSLURM_JOB_UID=999226\r\nSLURM_TASK_PID=3458179\r\nSLURM_JOB_GPUS=1\r\nSLURM_LOCALID=0\r\nSLURM_SUBMIT_DIR=/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar\r\nSLURMD_NODENAME=hkn0408\r\nSLURM_JOB_START_TIME=1751296933\r\nSLURM_STEP_NODELIST=hkn0408\r\nSLURM_CLUSTER_NAME=hk\r\nSLURM_JOB_END_TIME=1751300533\r\nSLURM_PMI2_SRUN_PORT=38655\r\nSLURM_CPUS_ON_NODE=6\r\nSLURM_JOB_CPUS_PER_NODE=6\r\nSLURM_GPUS_ON_NODE=1\r\nSLURM_GTIDS=0\r\nSLURM_JOB_PARTITION=accelerated\r\nSLURM_TRES_PER_TASK=cpu=5\r\nSLURM_OOM_KILL_STEP=0\r\nSLURM_JOB_NUM_NODES=1\r\nSLURM_STEPID=4294967290\r\nSLURM_JOBID=3306855\r\nSLURM_PTY_PORT=36241\r\nSLURM_JOB_QOS=normal\r\nSLURM_LAUNCH_NODE_IPADDR=10.0.7.199\r\nSLURM_PTY_WIN_ROW=37\r\nSLURM_PMI2_PROC_MAPPING=(vector,(0,1,1))\r\nSLURMD_DEBUG=2\r\nSLURM_PROCID=0\r\nSLURM_CPUS_PER_TASK=5\r\nSLURM_NTASKS=1\r\nSLURM_TOPOLOGY_ADDR=hkibb.hkibbi1.hkibbi1e12.hkn0408\r\nSLURM_TOPOLOGY_ADDR_PATTERN=switch.switch.switch.node\r\nSLURM_SRUN_COMM_HOST=10.0.7.199\r\nSLURM_SCRIPT_CONTEXT=prolog_task\r\nSLURM_MEM_PER_NODE=51200\r\nSLURM_PTY_WIN_COL=202\r\nSLURM_NODELIST=hkn0408\r\nSLURM_SRUN_COMM_PORT=42547\r\nSLURM_STEP_ID=4294967290\r\nSLURM_JOB_ACCOUNT=hk-project-p0023960\r\nSLURM_PRIO_PROCESS=0\r\nSLURM_NPROCS=1\r\nSLURM_NNODES=1\r\nSLURM_SUBMIT_HOST=hkn1991.localdomain\r\nSLURM_JOB_ID=3306855\r\nSLURM_NODEID=0\r\nSLURM_STEP_NUM_NODES=1\r\nSLURM_STEP_TASKS_PER_NODE=1\r\nSLURM_MPI_TYPE=pmi2\r\nSLURM_PMI2_STEP_NODES=hkn0408\r\nSLURM_CONF=/etc/slurm/slurm.conf\r\nSLURM_JOB_NAME=interactive\r\nSLURM_NTASKS_PER_NODE=1\r\nSLURM_STEP_LAUNCHER_PORT=42547\r\nSLURM_JOB_GID=502226\r\nSLURM_JOB_NODELIST=hkn0408\r\n",,terminal_output +6145,11587264,"TERMINAL",0,0,"2025-06-30 17:37:11.661691: E external/local_xla/xla/stream_executor/cuda/cuda_fft.cc:467] Unable to register cuFFT factory: Attempting to register factory for plugin cuFFT when one has already been registered\r\nWARNING: All log messages before absl::InitializeLog() is called are written to STDERR\r\nE0000 00:00:1751297831.674547 3476277 cuda_dnn.cc:8579] Unable to register cuDNN factory: Attempting to register factory for plugin cuDNN when one has already been registered\r\nE0000 00:00:1751297831.678870 3476277 cuda_blas.cc:1407] Unable to register cuBLAS factory: Attempting to register factory for plugin cuBLAS when one has already been registered\r\nW0000 00:00:1751297831.690942 3476277 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\nW0000 00:00:1751297831.690959 3476277 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\nW0000 00:00:1751297831.690961 3476277 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\nW0000 00:00:1751297831.690963 3476277 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\n",,terminal_output +6146,11589497,"TERMINAL",0,0,"W0000 00:00:1751297833.922242 3476277 gpu_device.cc:2341] Cannot dlopen some GPU libraries. Please make sure the missing libraries mentioned above are installed properly if you would like to use GPU. Follow the guide at https://www.tensorflow.org/install/gpu for how to download and setup the required libraries for your platform.\r\nSkipping registering GPU devices...\r\n",,terminal_output +6147,11589839,"TERMINAL",0,0,"Running on 1 devices.\r\n",,terminal_output +6148,11590560,"TERMINAL",0,0,"wandb: Currently logged in as: mihir-mahajan2002 (instant-uv) to https://api.wandb.ai. Use `wandb login --relogin` to force relogin\r\n",,terminal_output +6149,11591074,"TERMINAL",0,0,"wandb: Tracking run with wandb version 0.19.11\r\nwandb: Run data is saved locally in /hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/wandb/run-20250630_173714-l5ohdpai\r\nwandb: Run `wandb offline` to turn off syncing.\r\nwandb: Syncing run dynamics-tiny-overfit-big-lr-0000\r\nwandb: ⭐️ View project at https://wandb.ai/instant-uv/jafar\r\nwandb: 🚀 View run at https://wandb.ai/instant-uv/jafar/runs/l5ohdpai\r\n",,terminal_output +6150,11592539,"TERMINAL",0,0,"2025-06-30 17:37:16.889506: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +6151,11605099,"TERMINAL",0,0,"2025-06-30 17:37:29.496110: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +6152,11620119,"TERMINAL",0,0,"2025-06-30 17:37:44.545806: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +6153,11627179,"TERMINAL",0,0,"2025-06-30 17:37:51.591255: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +6154,11645302,"TERMINAL",0,0,"batch shape: (1, 16, 90, 160, 3)\r\n",,terminal_output +6155,11647900,"TERMINAL",0,0,"jax.errors.SimplifiedTraceback: For simplicity, JAX has removed its internal frames from the traceback of the following exception. Set JAX_TRACEBACK_FILTERING=off to include these.\r\n\r\nThe above exception was the direct cause of the following exception:\r\n\r\nTraceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py"", line 239, in \r\n train_state, loss, recon, metrics = train_step(train_state, inputs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py"", line 110, in train_step\r\n (loss, (recon, metrics)), grads = grad_fn(state.params, state, inputs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py"", line 82, in dynamics_loss_fn\r\n outputs = state.apply_fn(\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/genie.py"", line 77, in __call__\r\n lam_outputs = self.lam.vq_encode(batch[""videos""], training=False)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/models/lam.py"", line 81, in vq_encode\r\n z_q, z, emb, indices = self.vq(z, training)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/utils/nn.py"", line 107, in setup\r\n self.param(\r\nflax.errors.ScopeParamShapeError: Initializer expected to generate shape (6, 32) but got shape (1, 32) instead for parameter ""codebook"" in ""/lam/vq"". (https://flax.readthedocs.io/en/latest/api_reference/flax.errors.html#flax.errors.ScopeParamShapeError)\r\n",,terminal_output +6156,11649232,"TERMINAL",0,0,"wandb: \r\nwandb: 🚀 View run dynamics-tiny-overfit-big-lr-0000 at: https://wandb.ai/instant-uv/jafar/runs/l5ohdpai\r\nwandb: Find logs at: ../../../../../hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/wandb/run-20250630_173714-l5ohdpai/logs\r\n",,terminal_output +6157,11650977,"TERMINAL",0,0,"]0;tum_cte0515@hkn0408:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0408 jafar]$ ",,terminal_output +6158,11668501,"scripts_horeka/train_lam.sh",0,0,"#!/usr/bin/env bash\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\n\ntf_records_dir=$ws_dir/knoms_tfrecords_500_shards\nws_dir='/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/'\n\njob_name=""debug""\nslurm_job_id=""0000""\n\nCHECKPOINT_DIR=$ws_dir/checkpoints/$job_name_$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\nenv | grep SLURM\n\npython train_lam.py \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=1 \\n --min_lr=5e-7 \\n --max_lr=5e-6 \\n --warmup_steps=125 \\n --log_image_interval=3 \\n --log \\n --entity instant-uv \\n --project jafar \\n --data_dir $tf_records_dir\n",shellscript,tab +6159,11669714,"scripts_horeka/train_lam.sh",548,0,"",shellscript,selection_mouse +6160,11669716,"scripts_horeka/train_lam.sh",547,0,"",shellscript,selection_command +6161,11670242,"scripts_horeka/train_lam.sh",627,0,"",shellscript,selection_mouse +6162,11670244,"scripts_horeka/train_lam.sh",626,0,"",shellscript,selection_command +6163,11672933,"scripts_horeka/train_lam.sh",536,0,"",shellscript,selection_mouse +6164,11672935,"scripts_horeka/train_lam.sh",535,0,"",shellscript,selection_command +6165,11673592,"scripts_horeka/train_lam.sh",622,0,"",shellscript,selection_mouse +6166,11674184,"scripts_horeka/train_lam.sh",627,0,"",shellscript,selection_mouse +6167,11674190,"scripts_horeka/train_lam.sh",626,0,"",shellscript,selection_command +6168,11674579,"scripts_horeka/train_lam.sh",627,0,"",shellscript,selection_mouse +6169,11674581,"scripts_horeka/train_lam.sh",626,0,"",shellscript,selection_command +6170,11677453,"scripts_horeka/overfit_sample_tiny/train_lam_overfit_sample.sbatch",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=1\n#SBATCH --time=15:00:00\n#SBATCH --partition=accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:1\n#SBATCH --output=logs/logs_training/%x_%j.log\n#SBATCH --error=logs/logs_training/%x_%j.log\n#SBATCH --mail-user=mihir.mahajan2002@gmail.com\n#SBATCH --job-name=train_lam_minecraft_overfit_sample\n#SBATCH --mem=50G\n#SBATCH --mail-type=ALL\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\ntf_records_dir=$ws_dir/knoms_tfrecords_500_shards\nws_dir='/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/'\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=$ws_dir/checkpoints/$job_name_$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\nenv | grep SLURM\n\nsrun python train_lam.py \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=1 \\n --min_lr=3e-4 \\n --max_lr=3e-4 \\n --warmup_steps=125 \\n --log_image_interval=3 \\n --log \\n --log_checkpoint_interval=500 \\n --name=lam-tiny-overfit-$slurm_job_id \\n --tags lam overfit tiny \\n --entity instant-uv \\n --project jafar \\n --data_dir $tf_records_dir \\n --model_dim=384 \\n --latent_dim=32 \\n --num_latents=6 \\n --patch_size=16 \\n --num_blocks=8 \\n --num_heads=8 \\n --codebook_dropout=0.0",shellscript,tab +6171,11679192,"scripts_horeka/overfit_sample_tiny/train_lam_overfit_sample.sbatch",1331,0,"",shellscript,selection_mouse +6172,11679208,"scripts_horeka/overfit_sample_tiny/train_lam_overfit_sample.sbatch",1330,0,"",shellscript,selection_command +6173,11679298,"scripts_horeka/overfit_sample_tiny/train_lam_overfit_sample.sbatch",1330,1,"0",shellscript,selection_mouse +6174,11679298,"scripts_horeka/overfit_sample_tiny/train_lam_overfit_sample.sbatch",1331,0,"",shellscript,selection_command +6175,11679363,"scripts_horeka/overfit_sample_tiny/train_lam_overfit_sample.sbatch",1284,47,"\n --num_heads=8 \\n --codebook_dropout=0.0",shellscript,selection_mouse +6176,11679380,"scripts_horeka/overfit_sample_tiny/train_lam_overfit_sample.sbatch",1215,116,"32 \\n --num_latents=6 \\n --patch_size=16 \\n --num_blocks=8 \\n --num_heads=8 \\n --codebook_dropout=0.0",shellscript,selection_mouse +6177,11679425,"scripts_horeka/overfit_sample_tiny/train_lam_overfit_sample.sbatch",993,338,"g_checkpoint_interval=500 \\n --name=lam-tiny-overfit-$slurm_job_id \\n --tags lam overfit tiny \\n --entity instant-uv \\n --project jafar \\n --data_dir $tf_records_dir \\n --model_dim=384 \\n --latent_dim=32 \\n --num_latents=6 \\n --patch_size=16 \\n --num_blocks=8 \\n --num_heads=8 \\n --codebook_dropout=0.0",shellscript,selection_mouse +6178,11679426,"scripts_horeka/overfit_sample_tiny/train_lam_overfit_sample.sbatch",924,407,"-warmup_steps=125 \\n --log_image_interval=3 \\n --log \\n --log_checkpoint_interval=500 \\n --name=lam-tiny-overfit-$slurm_job_id \\n --tags lam overfit tiny \\n --entity instant-uv \\n --project jafar \\n --data_dir $tf_records_dir \\n --model_dim=384 \\n --latent_dim=32 \\n --num_latents=6 \\n --patch_size=16 \\n --num_blocks=8 \\n --num_heads=8 \\n --codebook_dropout=0.0",shellscript,selection_mouse +6179,11679433,"scripts_horeka/overfit_sample_tiny/train_lam_overfit_sample.sbatch",883,448,"--min_lr=3e-4 \\n --max_lr=3e-4 \\n --warmup_steps=125 \\n --log_image_interval=3 \\n --log \\n --log_checkpoint_interval=500 \\n --name=lam-tiny-overfit-$slurm_job_id \\n --tags lam overfit tiny \\n --entity instant-uv \\n --project jafar \\n --data_dir $tf_records_dir \\n --model_dim=384 \\n --latent_dim=32 \\n --num_latents=6 \\n --patch_size=16 \\n --num_blocks=8 \\n --num_heads=8 \\n --codebook_dropout=0.0",shellscript,selection_mouse +6180,11679488,"scripts_horeka/overfit_sample_tiny/train_lam_overfit_sample.sbatch",882,449," --min_lr=3e-4 \\n --max_lr=3e-4 \\n --warmup_steps=125 \\n --log_image_interval=3 \\n --log \\n --log_checkpoint_interval=500 \\n --name=lam-tiny-overfit-$slurm_job_id \\n --tags lam overfit tiny \\n --entity instant-uv \\n --project jafar \\n --data_dir $tf_records_dir \\n --model_dim=384 \\n --latent_dim=32 \\n --num_latents=6 \\n --patch_size=16 \\n --num_blocks=8 \\n --num_heads=8 \\n --codebook_dropout=0.0",shellscript,selection_mouse +6181,11679489,"scripts_horeka/overfit_sample_tiny/train_lam_overfit_sample.sbatch",861,470," --batch_size=1 \\n --min_lr=3e-4 \\n --max_lr=3e-4 \\n --warmup_steps=125 \\n --log_image_interval=3 \\n --log \\n --log_checkpoint_interval=500 \\n --name=lam-tiny-overfit-$slurm_job_id \\n --tags lam overfit tiny \\n --entity instant-uv \\n --project jafar \\n --data_dir $tf_records_dir \\n --model_dim=384 \\n --latent_dim=32 \\n --num_latents=6 \\n --patch_size=16 \\n --num_blocks=8 \\n --num_heads=8 \\n --codebook_dropout=0.0",shellscript,selection_mouse +6182,11679545,"scripts_horeka/overfit_sample_tiny/train_lam_overfit_sample.sbatch",860,471," --batch_size=1 \\n --min_lr=3e-4 \\n --max_lr=3e-4 \\n --warmup_steps=125 \\n --log_image_interval=3 \\n --log \\n --log_checkpoint_interval=500 \\n --name=lam-tiny-overfit-$slurm_job_id \\n --tags lam overfit tiny \\n --entity instant-uv \\n --project jafar \\n --data_dir $tf_records_dir \\n --model_dim=384 \\n --latent_dim=32 \\n --num_latents=6 \\n --patch_size=16 \\n --num_blocks=8 \\n --num_heads=8 \\n --codebook_dropout=0.0",shellscript,selection_mouse +6183,11679630,"scripts_horeka/overfit_sample_tiny/train_lam_overfit_sample.sbatch",859,472," --batch_size=1 \\n --min_lr=3e-4 \\n --max_lr=3e-4 \\n --warmup_steps=125 \\n --log_image_interval=3 \\n --log \\n --log_checkpoint_interval=500 \\n --name=lam-tiny-overfit-$slurm_job_id \\n --tags lam overfit tiny \\n --entity instant-uv \\n --project jafar \\n --data_dir $tf_records_dir \\n --model_dim=384 \\n --latent_dim=32 \\n --num_latents=6 \\n --patch_size=16 \\n --num_blocks=8 \\n --num_heads=8 \\n --codebook_dropout=0.0",shellscript,selection_mouse +6184,11679832,"scripts_horeka/overfit_sample_tiny/train_lam_overfit_sample.sbatch",858,473," --batch_size=1 \\n --min_lr=3e-4 \\n --max_lr=3e-4 \\n --warmup_steps=125 \\n --log_image_interval=3 \\n --log \\n --log_checkpoint_interval=500 \\n --name=lam-tiny-overfit-$slurm_job_id \\n --tags lam overfit tiny \\n --entity instant-uv \\n --project jafar \\n --data_dir $tf_records_dir \\n --model_dim=384 \\n --latent_dim=32 \\n --num_latents=6 \\n --patch_size=16 \\n --num_blocks=8 \\n --num_heads=8 \\n --codebook_dropout=0.0",shellscript,selection_mouse +6185,11679848,"scripts_horeka/overfit_sample_tiny/train_lam_overfit_sample.sbatch",825,506," --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=1 \\n --min_lr=3e-4 \\n --max_lr=3e-4 \\n --warmup_steps=125 \\n --log_image_interval=3 \\n --log \\n --log_checkpoint_interval=500 \\n --name=lam-tiny-overfit-$slurm_job_id \\n --tags lam overfit tiny \\n --entity instant-uv \\n --project jafar \\n --data_dir $tf_records_dir \\n --model_dim=384 \\n --latent_dim=32 \\n --num_latents=6 \\n --patch_size=16 \\n --num_blocks=8 \\n --num_heads=8 \\n --codebook_dropout=0.0",shellscript,selection_mouse +6186,11682797,"scripts_horeka/train_lam.sh",0,0,"",shellscript,tab +6187,11683603,"scripts_horeka/train_lam.sh",628,0,"",shellscript,selection_mouse +6188,11683788,"scripts_horeka/train_lam.sh",607,21,"_dir $tf_records_dir\n",shellscript,selection_mouse +6189,11683831,"scripts_horeka/train_lam.sh",579,49,"--project jafar \\n --data_dir $tf_records_dir\n",shellscript,selection_mouse +6190,11683854,"scripts_horeka/train_lam.sh",551,77," --entity instant-uv \\n --project jafar \\n --data_dir $tf_records_dir\n",shellscript,selection_mouse +6191,11683866,"scripts_horeka/train_lam.sh",537,91," --log \\n --entity instant-uv \\n --project jafar \\n --data_dir $tf_records_dir\n",shellscript,selection_mouse +6192,11683894,"scripts_horeka/train_lam.sh",508,120," --log_image_interval=3 \\n --log \\n --entity instant-uv \\n --project jafar \\n --data_dir $tf_records_dir\n",shellscript,selection_mouse +6193,11683911,"scripts_horeka/train_lam.sh",483,145," --warmup_steps=125 \\n --log_image_interval=3 \\n --log \\n --entity instant-uv \\n --project jafar \\n --data_dir $tf_records_dir\n",shellscript,selection_mouse +6194,11683968,"scripts_horeka/train_lam.sh",463,165," --max_lr=5e-6 \\n --warmup_steps=125 \\n --log_image_interval=3 \\n --log \\n --entity instant-uv \\n --project jafar \\n --data_dir $tf_records_dir\n",shellscript,selection_mouse +6195,11684099,"scripts_horeka/train_lam.sh",443,185," --min_lr=5e-7 \\n --max_lr=5e-6 \\n --warmup_steps=125 \\n --log_image_interval=3 \\n --log \\n --entity instant-uv \\n --project jafar \\n --data_dir $tf_records_dir\n",shellscript,selection_mouse +6196,11684161,"scripts_horeka/train_lam.sh",422,206," --batch_size=1 \\n --min_lr=5e-7 \\n --max_lr=5e-6 \\n --warmup_steps=125 \\n --log_image_interval=3 \\n --log \\n --entity instant-uv \\n --project jafar \\n --data_dir $tf_records_dir\n",shellscript,selection_mouse +6197,11684325,"scripts_horeka/train_lam.sh",389,239," --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=1 \\n --min_lr=5e-7 \\n --max_lr=5e-6 \\n --warmup_steps=125 \\n --log_image_interval=3 \\n --log \\n --entity instant-uv \\n --project jafar \\n --data_dir $tf_records_dir\n",shellscript,selection_mouse +6198,11685120,"scripts_horeka/train_lam.sh",389,239,"",shellscript,content +6199,11685615,"scripts_horeka/train_lam.sh",389,0," --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=1 \\n --min_lr=3e-4 \\n --max_lr=3e-4 \\n --warmup_steps=125 \\n --log_image_interval=3 \\n --log \\n --log_checkpoint_interval=500 \\n --name=lam-tiny-overfit-$slurm_job_id \\n --tags lam overfit tiny \\n --entity instant-uv \\n --project jafar \\n --data_dir $tf_records_dir \\n --model_dim=384 \\n --latent_dim=32 \\n --num_latents=6 \\n --patch_size=16 \\n --num_blocks=8 \\n --num_heads=8 \\n --codebook_dropout=0.0",shellscript,content +6200,11686403,"scripts_horeka/train_lam.sh",575,0,"",shellscript,selection_mouse +6201,11686905,"scripts_horeka/train_lam.sh",827,0,"",shellscript,selection_mouse +6202,11687405,"scripts_horeka/train_lam.sh",895,0,"",shellscript,selection_mouse +6203,11688330,"scripts_horeka/train_lam.sh",800,0,"",shellscript,selection_mouse +6204,11688792,"scripts_horeka/train_lam.sh",751,0,"",shellscript,selection_mouse +6205,11689137,"scripts_horeka/train_lam.sh",750,0,"",shellscript,selection_command +6206,11689712,"scripts_horeka/train_lam.sh",777,0,"",shellscript,selection_mouse +6207,11691413,"TERMINAL",0,0,"sh scripts_horeka/train_dynamics.sh ",,terminal_output +6208,11694972,"TERMINAL",0,0,"l",,terminal_output +6209,11695141,"TERMINAL",0,0,"am.sh ",,terminal_output +6210,11697170,"scripts_horeka/train_lam.sh",0,0,"",shellscript,tab +6211,11697171,"scripts_horeka/train_lam.sh",382,0,"",shellscript,selection_mouse +6212,11697327,"scripts_horeka/train_lam.sh",374,9,"train_lam",shellscript,selection_mouse +6213,11700599,"train_lam.py",0,0,"",python,tab +6214,11705943,"train_lam.py",1074,0,"",python,selection_mouse +6215,11706073,"train_lam.py",1073,11,"num_latents",python,selection_mouse +6216,11708844,"train_lam.py",1142,0,"",python,selection_mouse +6217,11708857,"train_lam.py",1141,0,"",python,selection_command +6218,11709793,"train_lam.py",1101,0,"",python,selection_mouse +6219,11711470,"utils/dataloader.py",0,0,"",python,tab +6220,11712179,"scripts_horeka/train_dynamics.sh",0,0,"",shellscript,tab +6221,11717502,"scripts_horeka/train_lam.sh",0,0,"",shellscript,tab +6222,11720773,"scripts_horeka/train_lam.sh",895,0,"",shellscript,selection_mouse +6223,11720791,"scripts_horeka/train_lam.sh",894,0,"",shellscript,selection_command +6224,11721358,"scripts_horeka/train_lam.sh",868,0,"",shellscript,selection_mouse +6225,11721364,"scripts_horeka/train_lam.sh",867,0,"",shellscript,selection_command +6226,11721973,"scripts_horeka/train_lam.sh",895,0,"",shellscript,selection_mouse +6227,11721978,"scripts_horeka/train_lam.sh",894,0,"",shellscript,selection_command +6228,11722368,"scripts_horeka/train_lam.sh",895,0,"",shellscript,selection_command +6229,11723061,"scripts_horeka/train_lam.sh",895,0," ",shellscript,content +6230,11723062,"scripts_horeka/train_lam.sh",896,0,"",shellscript,selection_keyboard +6231,11723234,"scripts_horeka/train_lam.sh",896,0,"\",shellscript,content +6232,11723235,"scripts_horeka/train_lam.sh",897,0,"",shellscript,selection_keyboard +6233,11723384,"scripts_horeka/train_lam.sh",897,0,"\n ",shellscript,content +6234,11723831,"scripts_horeka/train_lam.sh",902,0,"num_latents",shellscript,content +6235,11725364,"scripts_horeka/train_lam.sh",902,0,"",shellscript,selection_mouse +6236,11726253,"scripts_horeka/train_lam.sh",902,0,"-",shellscript,content +6237,11726254,"scripts_horeka/train_lam.sh",903,0,"",shellscript,selection_keyboard +6238,11726614,"scripts_horeka/train_lam.sh",903,0,"-",shellscript,content +6239,11726614,"scripts_horeka/train_lam.sh",904,0,"",shellscript,selection_keyboard +6240,11727544,"scripts_horeka/train_lam.sh",915,0,"",shellscript,selection_mouse +6241,11728229,"scripts_horeka/train_lam.sh",915,0,"=",shellscript,content +6242,11728230,"scripts_horeka/train_lam.sh",916,0,"",shellscript,selection_keyboard +6243,11728292,"scripts_horeka/train_lam.sh",916,0,"!",shellscript,content +6244,11728293,"scripts_horeka/train_lam.sh",917,0,"",shellscript,selection_keyboard +6245,11729197,"scripts_horeka/train_lam.sh",916,1,"",shellscript,content +6246,11729262,"scripts_horeka/train_lam.sh",916,0,"1",shellscript,content +6247,11729262,"scripts_horeka/train_lam.sh",917,0,"",shellscript,selection_keyboard +6248,11730725,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +6249,11730880,"TERMINAL",0,0,"SLURM_STEP_NUM_TASKS=1\r\nSLURM_JOB_USER=tum_cte0515\r\nSLURM_TASKS_PER_NODE=1\r\nSLURM_JOB_UID=999226\r\nSLURM_TASK_PID=3458179\r\nSLURM_JOB_GPUS=1\r\nSLURM_LOCALID=0\r\nSLURM_SUBMIT_DIR=/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar\r\nSLURMD_NODENAME=hkn0408\r\nSLURM_JOB_START_TIME=1751296933\r\nSLURM_STEP_NODELIST=hkn0408\r\nSLURM_CLUSTER_NAME=hk\r\nSLURM_JOB_END_TIME=1751300533\r\nSLURM_PMI2_SRUN_PORT=38655\r\nSLURM_CPUS_ON_NODE=6\r\nSLURM_JOB_CPUS_PER_NODE=6\r\nSLURM_GPUS_ON_NODE=1\r\nSLURM_GTIDS=0\r\nSLURM_JOB_PARTITION=accelerated\r\nSLURM_TRES_PER_TASK=cpu=5\r\nSLURM_OOM_KILL_STEP=0\r\nSLURM_JOB_NUM_NODES=1\r\nSLURM_STEPID=4294967290\r\nSLURM_JOBID=3306855\r\nSLURM_PTY_PORT=36241\r\nSLURM_JOB_QOS=normal\r\nSLURM_LAUNCH_NODE_IPADDR=10.0.7.199\r\nSLURM_PTY_WIN_ROW=37\r\nSLURM_PMI2_PROC_MAPPING=(vector,(0,1,1))\r\nSLURMD_DEBUG=2\r\nSLURM_PROCID=0\r\nSLURM_CPUS_PER_TASK=5\r\nSLURM_NTASKS=1\r\nSLURM_TOPOLOGY_ADDR=hkibb.hkibbi1.hkibbi1e12.hkn0408\r\nSLURM_TOPOLOGY_ADDR_PATTERN=switch.switch.switch.node\r\nSLURM_SRUN_COMM_HOST=10.0.7.199\r\nSLURM_SCRIPT_CONTEXT=prolog_task\r\nSLURM_MEM_PER_NODE=51200\r\nSLURM_PTY_WIN_COL=202\r\nSLURM_NODELIST=hkn0408\r\nSLURM_SRUN_COMM_PORT=42547\r\nSLURM_STEP_ID=4294967290\r\nSLURM_JOB_ACCOUNT=hk-project-p0023960\r\nSLURM_PRIO_PROCESS=0\r\nSLURM_NPROCS=1\r\nSLURM_NNODES=1\r\nSLURM_SUBMIT_HOST=hkn1991.localdomain\r\nSLURM_JOB_ID=3306855\r\nSLURM_NODEID=0\r\nSLURM_STEP_NUM_NODES=1\r\nSLURM_STEP_TASKS_PER_NODE=1\r\nSLURM_MPI_TYPE=pmi2\r\nSLURM_PMI2_STEP_NODES=hkn0408\r\nSLURM_CONF=/etc/slurm/slurm.conf\r\nSLURM_JOB_NAME=interactive\r\nSLURM_NTASKS_PER_NODE=1\r\nSLURM_STEP_LAUNCHER_PORT=42547\r\nSLURM_JOB_GID=502226\r\nSLURM_JOB_NODELIST=hkn0408\r\n",,terminal_output +6250,11732924,"TERMINAL",0,0,"2025-06-30 17:39:37.308186: E external/local_xla/xla/stream_executor/cuda/cuda_fft.cc:467] Unable to register cuFFT factory: Attempting to register factory for plugin cuFFT when one has already been registered\r\nWARNING: All log messages before absl::InitializeLog() is called are written to STDERR\r\nE0000 00:00:1751297977.321322 3477956 cuda_dnn.cc:8579] Unable to register cuDNN factory: Attempting to register factory for plugin cuDNN when one has already been registered\r\nE0000 00:00:1751297977.325680 3477956 cuda_blas.cc:1407] Unable to register cuBLAS factory: Attempting to register factory for plugin cuBLAS when one has already been registered\r\nW0000 00:00:1751297977.337355 3477956 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\nW0000 00:00:1751297977.337376 3477956 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\nW0000 00:00:1751297977.337378 3477956 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\nW0000 00:00:1751297977.337380 3477956 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\n",,terminal_output +6251,11735077,"TERMINAL",0,0,"W0000 00:00:1751297979.502165 3477956 gpu_device.cc:2341] Cannot dlopen some GPU libraries. Please make sure the missing libraries mentioned above are installed properly if you would like to use GPU. Follow the guide at https://www.tensorflow.org/install/gpu for how to download and setup the required libraries for your platform.\r\nSkipping registering GPU devices...\r\n",,terminal_output +6252,11735389,"TERMINAL",0,0,"Running on 1 devices.\r\n",,terminal_output +6253,11735770,"TERMINAL",0,0,"^CTraceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_lam.py"", line 141, in \r\n wandb.init(\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/wandb/sdk/wandb_init.py"", line 1623, in init\r\n wi.maybe_login(init_settings)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/wandb/sdk/wandb_init.py"", line 208, in maybe_login\r\n wandb_login._login(\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/wandb/sdk/wandb_login.py"", line 282, in _login\r\n wlogin = _WandbLogin(\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/wandb/sdk/wandb_login.py"", line 118, in __init__\r\n self._wandb_setup = wandb.setup()\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/wandb/sdk/wandb_setup.py"", line 444, in setup\r\n return _setup(settings=settings)\r\n File ""/home/hk-project-p0023960/tum_cte0515/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/contextlib.py"", line 79, in inner\r\n",,terminal_output +6254,11735877,"TERMINAL",0,0," return func(*args, **kwds)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/wandb/sdk/wandb_setup.py"", line 383, in _setup\r\n _singleton.ensure_service()\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/wandb/sdk/wandb_setup.py"", line 325, in ensure_service\r\n self._connection = service_connection.connect_to_service(self._settings)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/wandb/sdk/lib/service_connection.py"", line 41, in connect_to_service\r\n return _start_and_connect_service(settings)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/wandb/sdk/lib/service_connection.py"", line 76, in _start_and_connect_service\r\n proc.start()\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/wandb/sdk/service/service.py"", line 242, in start\r\n self._launch_server()\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/wandb/sdk/service/service.py"", line 234, in _launch_server\r\n self._wait_for_ports(fname, proc=internal_proc)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/wandb/sdk/service/service.py"", line 110, in _wait_for_ports\r\n time.sleep(0.2)\r\nKeyboardInterrupt\r\n",,terminal_output +6255,11736459,"TERMINAL",0,0,"^C",,terminal_output +6256,11736583,"TERMINAL",0,0,"\r\n]0;tum_cte0515@hkn0408:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0408 jafar]$ ",,terminal_output +6257,11737785,"scripts_horeka/train_lam.sh",0,0,"",shellscript,tab +6258,11737786,"scripts_horeka/train_lam.sh",533,0,"",shellscript,selection_mouse +6259,11738684,"scripts_horeka/train_lam.sh",533,1,"",shellscript,content +6260,11738874,"scripts_horeka/train_lam.sh",533,0,"1",shellscript,content +6261,11738875,"scripts_horeka/train_lam.sh",534,0,"",shellscript,selection_keyboard +6262,11739547,"scripts_horeka/train_lam.sh",534,0,"0",shellscript,content +6263,11739548,"scripts_horeka/train_lam.sh",535,0,"",shellscript,selection_keyboard +6264,11739642,"scripts_horeka/train_lam.sh",535,0,"0",shellscript,content +6265,11739643,"scripts_horeka/train_lam.sh",536,0,"",shellscript,selection_keyboard +6266,11740586,"scripts_horeka/train_lam.sh",583,0,"",shellscript,selection_mouse +6267,11741197,"scripts_horeka/train_lam.sh",584,0,"",shellscript,selection_mouse +6268,11741595,"scripts_horeka/train_lam.sh",583,1,"",shellscript,content +6269,11741721,"scripts_horeka/train_lam.sh",582,1,"",shellscript,content +6270,11743879,"TERMINAL",0,0,"sh scripts_horeka/train_lam.sh ",,terminal_output +6271,11744348,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +6272,11744470,"TERMINAL",0,0,"SLURM_STEP_NUM_TASKS=1\r\nSLURM_JOB_USER=tum_cte0515\r\nSLURM_TASKS_PER_NODE=1\r\nSLURM_JOB_UID=999226\r\nSLURM_TASK_PID=3458179\r\nSLURM_JOB_GPUS=1\r\nSLURM_LOCALID=0\r\nSLURM_SUBMIT_DIR=/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar\r\nSLURMD_NODENAME=hkn0408\r\nSLURM_JOB_START_TIME=1751296933\r\nSLURM_STEP_NODELIST=hkn0408\r\nSLURM_CLUSTER_NAME=hk\r\nSLURM_JOB_END_TIME=1751300533\r\nSLURM_PMI2_SRUN_PORT=38655\r\nSLURM_CPUS_ON_NODE=6\r\nSLURM_JOB_CPUS_PER_NODE=6\r\nSLURM_GPUS_ON_NODE=1\r\nSLURM_GTIDS=0\r\nSLURM_JOB_PARTITION=accelerated\r\nSLURM_TRES_PER_TASK=cpu=5\r\nSLURM_OOM_KILL_STEP=0\r\nSLURM_JOB_NUM_NODES=1\r\nSLURM_STEPID=4294967290\r\nSLURM_JOBID=3306855\r\nSLURM_PTY_PORT=36241\r\nSLURM_JOB_QOS=normal\r\nSLURM_LAUNCH_NODE_IPADDR=10.0.7.199\r\nSLURM_PTY_WIN_ROW=37\r\nSLURM_PMI2_PROC_MAPPING=(vector,(0,1,1))\r\nSLURMD_DEBUG=2\r\nSLURM_PROCID=0\r\nSLURM_CPUS_PER_TASK=5\r\nSLURM_NTASKS=1\r\nSLURM_TOPOLOGY_ADDR=hkibb.hkibbi1.hkibbi1e12.hkn0408\r\nSLURM_TOPOLOGY_ADDR_PATTERN=switch.switch.switch.node\r\nSLURM_SRUN_COMM_HOST=10.0.7.199\r\nSLURM_SCRIPT_CONTEXT=prolog_task\r\nSLURM_MEM_PER_NODE=51200\r\nSLURM_PTY_WIN_COL=202\r\nSLURM_NODELIST=hkn0408\r\nSLURM_SRUN_COMM_PORT=42547\r\nSLURM_STEP_ID=4294967290\r\nSLURM_JOB_ACCOUNT=hk-project-p0023960\r\nSLURM_PRIO_PROCESS=0\r\nSLURM_NPROCS=1\r\nSLURM_NNODES=1\r\nSLURM_SUBMIT_HOST=hkn1991.localdomain\r\nSLURM_JOB_ID=3306855\r\nSLURM_NODEID=0\r\nSLURM_STEP_NUM_NODES=1\r\nSLURM_STEP_TASKS_PER_NODE=1\r\nSLURM_MPI_TYPE=pmi2\r\nSLURM_PMI2_STEP_NODES=hkn0408\r\nSLURM_CONF=/etc/slurm/slurm.conf\r\nSLURM_JOB_NAME=interactive\r\nSLURM_NTASKS_PER_NODE=1\r\nSLURM_STEP_LAUNCHER_PORT=42547\r\nSLURM_JOB_GID=502226\r\nSLURM_JOB_NODELIST=hkn0408\r\n",,terminal_output +6273,11746430,"TERMINAL",0,0,"2025-06-30 17:39:50.765472: E external/local_xla/xla/stream_executor/cuda/cuda_fft.cc:467] Unable to register cuFFT factory: Attempting to register factory for plugin cuFFT when one has already been registered\r\nWARNING: All log messages before absl::InitializeLog() is called are written to STDERR\r\nE0000 00:00:1751297990.778434 3478060 cuda_dnn.cc:8579] Unable to register cuDNN factory: Attempting to register factory for plugin cuDNN when one has already been registered\r\nE0000 00:00:1751297990.783077 3478060 cuda_blas.cc:1407] Unable to register cuBLAS factory: Attempting to register factory for plugin cuBLAS when one has already been registered\r\nW0000 00:00:1751297990.795798 3478060 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\nW0000 00:00:1751297990.795817 3478060 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\nW0000 00:00:1751297990.795819 3478060 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\nW0000 00:00:1751297990.795821 3478060 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\n",,terminal_output +6274,11748397,"TERMINAL",0,0,"W0000 00:00:1751297992.809723 3478060 gpu_device.cc:2341] Cannot dlopen some GPU libraries. Please make sure the missing libraries mentioned above are installed properly if you would like to use GPU. Follow the guide at https://www.tensorflow.org/install/gpu for how to download and setup the required libraries for your platform.\r\nSkipping registering GPU devices...\r\n",,terminal_output +6275,11748695,"TERMINAL",0,0,"Running on 1 devices.\r\n",,terminal_output +6276,11749489,"TERMINAL",0,0,"wandb: Currently logged in as: mihir-mahajan2002 (instant-uv) to https://api.wandb.ai. Use `wandb login --relogin` to force relogin\r\n",,terminal_output +6277,11750097,"TERMINAL",0,0,"wandb: Tracking run with wandb version 0.19.11\r\nwandb: Run data is saved locally in /hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/wandb/run-20250630_173953-w34z28wb\r\nwandb: Run `wandb offline` to turn off syncing.\r\nwandb: Syncing run lam-tiny-overfit-0000\r\nwandb: ⭐️ View project at https://wandb.ai/instant-uv/jafar\r\nwandb: 🚀 View run at https://wandb.ai/instant-uv/jafar/runs/w34z28wb\r\n",,terminal_output +6278,11773151,"TERMINAL",0,0,"Starting training from step 0...\r\nbatch shape: (1, 16, 90, 160, 3)\r\n",,terminal_output +6279,11784402,"TERMINAL",0,0,"2025-06-30 17:40:28.789985: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-06-30 17:40:28.790023: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +6280,11831603,"TERMINAL",0,0,"Step 0, loss: 0.3208085000514984, step time: 58435.8332157135ms\r\n",,terminal_output +6281,11831667,"TERMINAL",0,0,"Step 1, loss: 0.3073524832725525, step time: 25.124073028564453ms\r\nStep 2, loss: 0.27462828159332275, step time: 18.448591232299805ms\r\n",,terminal_output +6282,11831729,"TERMINAL",0,0,"Step 3, loss: 0.23908670246601105, step time: 17.795562744140625ms\r\nStep 4, loss: 0.21754923462867737, step time: 16.640424728393555ms\r\n",,terminal_output +6283,11833877,"TERMINAL",0,0,"Step 5, loss: 0.20371302962303162, step time: 20.058631896972656ms\r\n",,terminal_output +6284,11833940,"TERMINAL",0,0,"Step 6, loss: 0.18872153759002686, step time: 21.886587142944336ms\r\nStep 7, loss: 0.1804431825876236, step time: 17.987489700317383ms\r\n",,terminal_output +6285,11834002,"TERMINAL",0,0,"Step 8, loss: 0.170509934425354, step time: 18.817424774169922ms\r\nStep 9, loss: 0.16182765364646912, step time: 17.09604263305664ms\r\n",,terminal_output +6286,11835558,"TERMINAL",0,0,"Step 10, loss: 0.1554047018289566, step time: 22.533893585205078ms\r\nStep 11, loss: 0.14831596612930298, step time: 21.761417388916016ms\r\nStep 12, loss: 0.1458558291196823, step time: 18.009424209594727ms\r\nStep 13, loss: 0.14321398735046387, step time: 15.563011169433594ms\r\n",,terminal_output +6287,11835619,"TERMINAL",0,0,"Step 14, loss: 0.13924242556095123, step time: 18.10622215270996ms\r\n",,terminal_output +6288,11837141,"TERMINAL",0,0,"Step 15, loss: 0.13653706014156342, step time: 23.61154556274414ms\r\n",,terminal_output +6289,11837217,"TERMINAL",0,0,"Step 16, loss: 0.13340817391872406, step time: 21.59905433654785ms\r\nStep 17, loss: 0.12981106340885162, step time: 18.237590789794922ms\r\n",,terminal_output +6290,11837280,"TERMINAL",0,0,"Step 18, loss: 0.1269380748271942, step time: 18.561840057373047ms\r\nStep 19, loss: 0.12450557947158813, step time: 17.241477966308594ms\r\n",,terminal_output +6291,11838664,"TERMINAL",0,0,"Step 20, loss: 0.12029479444026947, step time: 23.556947708129883ms\r\n",,terminal_output +6292,11838726,"TERMINAL",0,0,"Step 21, loss: 0.11691753566265106, step time: 22.33409881591797ms\r\nStep 22, loss: 0.11433517187833786, step time: 17.959117889404297ms\r\n",,terminal_output +6293,11838788,"TERMINAL",0,0,"Step 23, loss: 0.11245304346084595, step time: 15.118837356567383ms\r\nStep 24, loss: 0.1114572286605835, step time: 15.847206115722656ms\r\n",,terminal_output +6294,11840328,"TERMINAL",0,0,"Step 25, loss: 0.10939903557300568, step time: 23.0252742767334ms\r\n",,terminal_output +6295,11840429,"TERMINAL",0,0,"Step 26, loss: 0.10761740058660507, step time: 22.678375244140625ms\r\nStep 27, loss: 0.10401813685894012, step time: 19.020557403564453ms\r\nStep 28, loss: 0.10108228027820587, step time: 18.959999084472656ms\r\n",,terminal_output +6296,11840481,"TERMINAL",0,0,"Step 29, loss: 0.09862412512302399, step time: 17.359495162963867ms\r\n",,terminal_output +6297,11841917,"TERMINAL",0,0,"Step 30, loss: 0.09769131243228912, step time: 24.273395538330078ms\r\n",,terminal_output +6298,11841986,"TERMINAL",0,0,"Step 31, loss: 0.097041055560112, step time: 24.31964874267578ms\r\nStep 32, loss: 0.09320252388715744, step time: 21.619558334350586ms\r\n",,terminal_output +6299,11842049,"TERMINAL",0,0,"Step 33, loss: 0.09221898019313812, step time: 18.45860481262207ms\r\nStep 34, loss: 0.08875531703233719, step time: 18.17154884338379ms\r\n",,terminal_output +6300,11843471,"TERMINAL",0,0,"Step 35, loss: 0.08590184152126312, step time: 23.734092712402344ms\r\n",,terminal_output +6301,11843561,"TERMINAL",0,0,"Step 36, loss: 0.08299506455659866, step time: 22.62568473815918ms\r\nStep 37, loss: 0.08247227221727371, step time: 18.484830856323242ms\r\nStep 38, loss: 0.08043555915355682, step time: 18.42021942138672ms\r\n",,terminal_output +6302,11843648,"TERMINAL",0,0,"Step 39, loss: 0.07953275740146637, step time: 18.604755401611328ms\r\n",,terminal_output +6303,11845021,"TERMINAL",0,0,"Step 40, loss: 0.0777396634221077, step time: 22.40896224975586ms\r\n",,terminal_output +6304,11845083,"TERMINAL",0,0,"Step 41, loss: 0.07307805120944977, step time: 22.54629135131836ms\r\nStep 42, loss: 0.07179070264101028, step time: 18.379688262939453ms\r\n",,terminal_output +6305,11845176,"TERMINAL",0,0,"Step 43, loss: 0.07171543687582016, step time: 15.060901641845703ms\r\nStep 44, loss: 0.06897179037332535, step time: 14.755487442016602ms\r\n",,terminal_output +6306,11846633,"TERMINAL",0,0,"Step 45, loss: 0.06636292487382889, step time: 23.900508880615234ms\r\nStep 46, loss: 0.0642528235912323, step time: 22.475481033325195ms\r\nStep 47, loss: 0.06067855656147003, step time: 18.66626739501953ms\r\n",,terminal_output +6307,11846731,"TERMINAL",0,0,"Step 48, loss: 0.058606673032045364, step time: 18.527507781982422ms\r\nStep 49, loss: 0.058222051709890366, step time: 17.365694046020508ms\r\n",,terminal_output +6308,11847183,"TERMINAL",0,0,"^C",,terminal_output +6309,11847384,"TERMINAL",0,0,"Traceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_lam.py"", line 276, in \r\n orbax_checkpointer.save(\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/checkpointers/checkpointer.py"", line 255, in save\r\n self._handler.save(tmpdir.get(), args=ckpt_args)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/handlers/pytree_checkpoint_handler.py"", line 594, in save\r\n self._handler_impl.save(directory, args=args)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/handlers/base_pytree_checkpoint_handler.py"", line 554, in save\r\n asyncio_utils.run_sync(async_save(directory, *args, **kwargs))\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/asyncio_utils.py"", line 50, in run_sync\r\n return asyncio.run(coro)\r\n File ""/home/hk-project-p0023960/tum_cte0515/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/asyncio/runners.py"", line 44, in run\r\n return loop.run_until_complete(main)\r\n File ""/home/hk-project-p0023960/tum_cte0515/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/asyncio/base_events.py"", line 636, in run_until_complete\r\n self.run_forever()\r\n File ""/home/hk-project-p0023960/tum_cte0515/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/asyncio/base_events.py"", line 603, in run_forever\r\n self._run_once()\r\n File ""/home/hk-project-p0023960/tum_cte0515/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/asyncio/base_events.py"", line 1909, in _run_once\r\n handle._run()\r\n File ""/home/hk-project-p0023960/tum_cte0515/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/asyncio/events.py"", line 80, in _run\r\n self._context.run(self._callback, *self._args)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/handlers/base_pytree_checkpoint_handler.py"", line 552, in async_save\r\n f.result() # Block on result.\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/futures/future.py"", line 174, in result\r\n f.result(timeout=time_remaining)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/futures/future.py"", line 397, in result\r\n return self._f.result(timeout=timeout)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/futures/future.py"", line 348, in result\r\n return self._t.result(timeout=timeout)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/futures/future.py"", line 297, in result\r\n self.join(timeout=timeout)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/futures/future.py"", line 288, in join\r\n super().join(timeout=timeout)\r\n File ""/home/hk-project-p0023960/tum_cte0515/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/threading.py"", line 1096, in join\r\n self._wait_for_tstate_lock()\r\n File ""/home/hk-project-p0023960/tum_cte0515/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/threading.py"", line 1116, in _wait_for_tstate_lock\r\n if lock.acquire(block, timeout):\r\nKeyboardInterrupt\r\n",,terminal_output +6310,11847441,"TERMINAL",0,0,"^C",,terminal_output +6311,11847538,"TERMINAL",0,0,"^C",,terminal_output +6312,11847694,"TERMINAL",0,0,"Exception ignored in: Exception ignored in sys.unraisablehook: \r\nTraceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/wandb/sdk/lib/console_capture.py"", line 155, in write_with_callbacks\r\n",,terminal_output +6313,11847764,"TERMINAL",0,0," cb(s, n)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/wandb/sdk/lib/redirect.py"", line 662, in _on_write\r\n^C File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/wandb/sdk/wandb_run.py"", line 2500, in \r\n",,terminal_output +6314,11847840,"TERMINAL",0,0," lambda data: self._console_raw_callback(""stderr"", data),\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/wandb/sdk/wandb_run.py"", line 406, in wrapper\r\n return func(self, *args, **kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/wandb/sdk/wandb_run.py"", line 464, in wrapper_fn\r\n return func(self, *args, **kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/wandb/sdk/wandb_run.py"", line 1639, in _console_raw_callback\r\n self._backend.interface.publish_output_raw(name, data)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/wandb/sdk/interface/interface.py"", line 762, in publish_output_raw\r\n self._publish_output_raw(o)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/wandb/sdk/interface/interface_shared.py"", line 38, in _publish_output_raw\r\n self._publish(rec)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/wandb/sdk/interface/interface_sock.py"", line 39, in _publish\r\n self._sock_client.send_record_publish(record)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/wandb/sdk/lib/sock_client.py"", line 174, in send_record_publish\r\n",,terminal_output +6315,11847892,"TERMINAL",0,0," self.send_server_request(server_req)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/wandb/sdk/lib/sock_client.py"", line 154, in send_server_request\r\n self._send_message(msg)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/wandb/sdk/lib/sock_client.py"", line 151, in _send_message\r\n self._sendall_with_error_handle(header + data)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/wandb/sdk/lib/sock_client.py"", line 130, in _sendall_with_error_handle\r\n sent = self._sock.send(data)\r\nKeyboardInterrupt: \r\n",,terminal_output +6316,11849837,"TERMINAL",0,0,"wandb: \r\nwandb: 🚀 View run lam-tiny-overfit-0000 at: https://wandb.ai/instant-uv/jafar/runs/w34z28wb\r\nwandb: Find logs at: ../../../../../hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/wandb/run-20250630_173953-w34z28wb/logs\r\n",,terminal_output +6317,11851385,"TERMINAL",0,0,"\r\n]0;tum_cte0515@hkn0408:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0408 jafar]$ ",,terminal_output +6318,11880542,"TERMINAL",0,0,"bash",,terminal_focus +6319,11881055,"TERMINAL",0,0,"srun",,terminal_focus +6320,11908800,"TERMINAL",0,0,"bash",,terminal_focus +6321,11934062,"scripts_horeka/train_lam.sh",0,0,"",shellscript,tab +6322,11934063,"scripts_horeka/train_lam.sh",242,0,"",shellscript,selection_mouse +6323,11934712,"scripts_horeka/train_lam.sh",259,0,"",shellscript,selection_mouse +6324,11957219,"scripts_horeka/train_lam.sh",223,0,"",shellscript,selection_mouse +6325,11957355,"scripts_horeka/train_lam.sh",221,2,"d/",shellscript,selection_mouse +6326,11957360,"scripts_horeka/train_lam.sh",220,3,"ed/",shellscript,selection_mouse +6327,11957375,"scripts_horeka/train_lam.sh",217,6,"hared/",shellscript,selection_mouse +6328,11957432,"scripts_horeka/train_lam.sh",214,9,"s_shared/",shellscript,selection_mouse +6329,11957438,"scripts_horeka/train_lam.sh",211,12,"a_ws_shared/",shellscript,selection_mouse +6330,11957439,"scripts_horeka/train_lam.sh",209,14,"afa_ws_shared/",shellscript,selection_mouse +6331,11957442,"scripts_horeka/train_lam.sh",207,16,"-jafa_ws_shared/",shellscript,selection_mouse +6332,11957458,"scripts_horeka/train_lam.sh",204,19,"695-jafa_ws_shared/",shellscript,selection_mouse +6333,11957506,"scripts_horeka/train_lam.sh",203,20,"3695-jafa_ws_shared/",shellscript,selection_mouse +6334,11957520,"scripts_horeka/train_lam.sh",201,22,"nd3695-jafa_ws_shared/",shellscript,selection_mouse +6335,11957535,"scripts_horeka/train_lam.sh",198,25,"m_ind3695-jafa_ws_shared/",shellscript,selection_mouse +6336,11957549,"scripts_horeka/train_lam.sh",197,26,"um_ind3695-jafa_ws_shared/",shellscript,selection_mouse +6337,11957563,"scripts_horeka/train_lam.sh",196,27,"tum_ind3695-jafa_ws_shared/",shellscript,selection_mouse +6338,11957577,"scripts_horeka/train_lam.sh",194,29,"h/tum_ind3695-jafa_ws_shared/",shellscript,selection_mouse +6339,11957594,"scripts_horeka/train_lam.sh",193,30,"ch/tum_ind3695-jafa_ws_shared/",shellscript,selection_mouse +6340,11957608,"scripts_horeka/train_lam.sh",192,31,"tch/tum_ind3695-jafa_ws_shared/",shellscript,selection_mouse +6341,11957665,"scripts_horeka/train_lam.sh",190,33,"ratch/tum_ind3695-jafa_ws_shared/",shellscript,selection_mouse +6342,11957666,"scripts_horeka/train_lam.sh",188,35,"scratch/tum_ind3695-jafa_ws_shared/",shellscript,selection_mouse +6343,11957666,"scripts_horeka/train_lam.sh",186,37,"e/scratch/tum_ind3695-jafa_ws_shared/",shellscript,selection_mouse +6344,11957666,"scripts_horeka/train_lam.sh",185,38,"ce/scratch/tum_ind3695-jafa_ws_shared/",shellscript,selection_mouse +6345,11957676,"scripts_horeka/train_lam.sh",183,40,"pace/scratch/tum_ind3695-jafa_ws_shared/",shellscript,selection_mouse +6346,11957694,"scripts_horeka/train_lam.sh",131,92,"/knoms_tfrecords_500_shards\nws_dir='/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/",shellscript,selection_mouse +6347,11957752,"scripts_horeka/train_lam.sh",130,93,"r/knoms_tfrecords_500_shards\nws_dir='/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/",shellscript,selection_mouse +6348,11957753,"scripts_horeka/train_lam.sh",129,94,"ir/knoms_tfrecords_500_shards\nws_dir='/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/",shellscript,selection_mouse +6349,11957753,"scripts_horeka/train_lam.sh",127,96,"_dir/knoms_tfrecords_500_shards\nws_dir='/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/",shellscript,selection_mouse +6350,11957813,"scripts_horeka/train_lam.sh",126,97,"s_dir/knoms_tfrecords_500_shards\nws_dir='/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/",shellscript,selection_mouse +6351,11957814,"scripts_horeka/train_lam.sh",125,98,"ws_dir/knoms_tfrecords_500_shards\nws_dir='/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/",shellscript,selection_mouse +6352,11957814,"scripts_horeka/train_lam.sh",124,99,"$ws_dir/knoms_tfrecords_500_shards\nws_dir='/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/",shellscript,selection_mouse +6353,11957875,"scripts_horeka/train_lam.sh",123,100,"=$ws_dir/knoms_tfrecords_500_shards\nws_dir='/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/",shellscript,selection_mouse +6354,11957875,"scripts_horeka/train_lam.sh",122,101,"r=$ws_dir/knoms_tfrecords_500_shards\nws_dir='/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/",shellscript,selection_mouse +6355,11957892,"scripts_horeka/train_lam.sh",121,102,"ir=$ws_dir/knoms_tfrecords_500_shards\nws_dir='/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/",shellscript,selection_mouse +6356,11957952,"scripts_horeka/train_lam.sh",171,52,"s/work/workspace/scratch/tum_ind3695-jafa_ws_shared/",shellscript,selection_mouse +6357,11957952,"scripts_horeka/train_lam.sh",170,53,"fs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/",shellscript,selection_mouse +6358,11958025,"scripts_horeka/train_lam.sh",169,54,"kfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/",shellscript,selection_mouse +6359,11958088,"scripts_horeka/train_lam.sh",168,55,"hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/",shellscript,selection_mouse +6360,11958164,"scripts_horeka/train_lam.sh",167,56,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/",shellscript,selection_mouse +6361,12001542,"scripts_horeka/train_lam.sh",263,0,"",shellscript,selection_mouse +6362,12002185,"scripts_horeka/train_lam.sh",347,0,"",shellscript,selection_mouse +6363,12002940,"scripts_horeka/train_lam.sh",308,0,"",shellscript,selection_mouse +6364,12003774,"scripts_horeka/train_lam.sh",308,0," ",shellscript,content +6365,12003777,"scripts_horeka/train_lam.sh",309,0,"",shellscript,selection_keyboard +6366,12004676,"scripts_horeka/train_lam.sh",308,1,"",shellscript,content +6367,12006633,"scripts_horeka/train_lam.sh",308,0,"/",shellscript,content +6368,12006634,"scripts_horeka/train_lam.sh",309,0,"",shellscript,selection_keyboard +6369,12008934,"scripts_horeka/train_lam.sh",309,1,"",shellscript,content +6370,12010359,"scripts_horeka/train_lam.sh",308,0,"",shellscript,selection_command +6371,12011166,"scripts_horeka/train_lam.sh",309,0,"",shellscript,selection_command +6372,12011360,"scripts_horeka/train_lam.sh",308,0,"",shellscript,selection_command +6373,12012127,"scripts_horeka/train_lam.sh",309,0,"",shellscript,selection_command +6374,12012300,"scripts_horeka/train_lam.sh",308,0,"",shellscript,selection_command +6375,12012900,"scripts_horeka/train_lam.sh",309,0,"",shellscript,selection_command +6376,12013101,"scripts_horeka/train_lam.sh",308,0,"",shellscript,selection_command +6377,12013527,"scripts_horeka/train_lam.sh",309,0,"",shellscript,selection_command +6378,12013775,"scripts_horeka/train_lam.sh",308,0,"",shellscript,selection_command +6379,12014009,"scripts_horeka/train_lam.sh",309,0,"",shellscript,selection_command +6380,12014262,"scripts_horeka/train_lam.sh",308,0,"",shellscript,selection_command +6381,12014516,"scripts_horeka/train_lam.sh",309,0,"",shellscript,selection_command +6382,12014756,"scripts_horeka/train_lam.sh",308,0,"",shellscript,selection_command +6383,12015007,"scripts_horeka/train_lam.sh",309,0,"",shellscript,selection_command +6384,12015273,"scripts_horeka/train_lam.sh",308,0,"",shellscript,selection_command +6385,12015535,"scripts_horeka/train_lam.sh",309,0,"",shellscript,selection_command +6386,12015794,"scripts_horeka/train_lam.sh",308,0,"",shellscript,selection_command +6387,12016042,"scripts_horeka/train_lam.sh",309,0,"",shellscript,selection_command +6388,12016313,"scripts_horeka/train_lam.sh",308,0,"",shellscript,selection_command +6389,12016565,"scripts_horeka/train_lam.sh",309,0,"",shellscript,selection_command +6390,12016815,"scripts_horeka/train_lam.sh",308,0,"",shellscript,selection_command +6391,12017064,"scripts_horeka/train_lam.sh",309,0,"",shellscript,selection_command +6392,12017321,"scripts_horeka/train_lam.sh",308,0,"",shellscript,selection_command +6393,12017565,"scripts_horeka/train_lam.sh",309,0,"",shellscript,selection_command +6394,12017809,"scripts_horeka/train_lam.sh",308,0,"",shellscript,selection_command +6395,12018067,"scripts_horeka/train_lam.sh",309,0,"",shellscript,selection_command +6396,12018296,"scripts_horeka/train_lam.sh",308,0,"",shellscript,selection_command +6397,12018575,"scripts_horeka/train_lam.sh",309,0,"",shellscript,selection_command +6398,12018887,"scripts_horeka/train_lam.sh",308,0,"",shellscript,selection_command +6399,12043393,"TERMINAL",0,0,"ls /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/0000/lam_1751297992_5",,terminal_command +6400,12043413,"TERMINAL",0,0,"]633;E;2025-06-30 17:44:47 ls /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/0000/lam_1751297992_5;ead59344-49db-4336-9336-47fae706e637]633;Carray_metadatas _CHECKPOINT_METADATA d manifest.ocdbt _METADATA ocdbt.process_0 _sharding\r\n]0;tum_cte0515@hkn1991:/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar]633;D;0",,terminal_output +6401,12050029,"TERMINAL",0,0,"ls /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/",,terminal_command +6402,12050081,"TERMINAL",0,0,"]633;E;2025-06-30 17:44:54 ls /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/;ead59344-49db-4336-9336-47fae706e637]633;C",,terminal_output +6403,12050327,"TERMINAL",0,0,"0000 3290296 3290392 3292213 3292329 3292333 3292337 3294601 3296540 3296575 3297577 3297606 3297727 3299065 3299258 3300233 3300672 3301029 dyn tokenizer\r\n3290283 3290366 3290439 3292221 3292330 3292334 3292338 3294602 3296571 3297569 3297578 3297671 3299016 3299066 3299259 3300290 3301025 3301030 dynamics_ckpt_dir tokenizer_ckpt_dir\r\n3290284 3290367 3290440 3292258 3292331 3292335 3292339 3294603 3296573 3297575 3297582 3297693 3299062 3299068 3299272 3300658 3301026 3301031 lam\r\n3290295 3290391 3291405 3292328 3292332 3292336 3294600 3296502 3296574 3297576 3297586 3297706 3299063 3299069 3299579 3300663 3301027 3306801 lam_ckpt_dir\r\n]0;tum_cte0515@hkn1991:/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar]633;D;0",,terminal_output +6404,12061276,"TERMINAL",0,0,"mkdir /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/lam-1-action",,terminal_command +6405,12061296,"TERMINAL",0,0,"]633;E;2025-06-30 17:45:05 mkdir /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/lam-1-action;ead59344-49db-4336-9336-47fae706e637]633;C]0;tum_cte0515@hkn1991:/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar]633;D;0",,terminal_output +6406,12080839,"TERMINAL",0,0,"mv /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/0000/lam_1751297992_5 /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/lam-1-action",,terminal_command +6407,12080864,"TERMINAL",0,0,"]633;E;2025-06-30 17:45:25 mv /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/0000/lam_1751297992_5 /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/lam-1-action;ead59344-49db-4336-9336-47fae706e637]633;C]0;tum_cte0515@hkn1991:/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar]633;D;0",,terminal_output +6408,12085454,"TERMINAL",0,0,"ls /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/lam-1-action",,terminal_command +6409,12085470,"TERMINAL",0,0,"]633;E;2025-06-30 17:45:29 ls /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/lam-1-action;ead59344-49db-4336-9336-47fae706e637]633;Clam_1751297992_5\r\n]0;tum_cte0515@hkn1991:/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar]633;D;0",,terminal_output +6410,12088843,"TERMINAL",0,0,"ls /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/lam-1-action/lam_1751297992_5/",,terminal_command +6411,12088862,"TERMINAL",0,0,"]633;E;2025-06-30 17:45:33 ls /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/lam-1-action/lam_1751297992_5/;ead59344-49db-4336-9336-47fae706e637]633;Carray_metadatas _CHECKPOINT_METADATA d manifest.ocdbt _METADATA ocdbt.process_0 _sharding\r\n]0;tum_cte0515@hkn1991:/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar]633;D;0",,terminal_output +6412,12093613,"scripts_horeka/train_dynamics.sh",0,0,"",shellscript,tab +6413,12095956,"scripts_horeka/train_dynamics.sh",561,0,"",shellscript,selection_mouse +6414,12096956,"scripts_horeka/train_dynamics.sh",560,0,"",shellscript,selection_command +6415,12097730,"scripts_horeka/train_dynamics.sh",548,0,"",shellscript,selection_command +6416,12098099,"scripts_horeka/train_dynamics.sh",548,0,"#",shellscript,content +6417,12098101,"scripts_horeka/train_dynamics.sh",549,0,"",shellscript,selection_keyboard +6418,12098200,"scripts_horeka/train_dynamics.sh",549,0," ",shellscript,content +6419,12098201,"scripts_horeka/train_dynamics.sh",550,0,"",shellscript,selection_keyboard +6420,12098763,"scripts_horeka/train_dynamics.sh",549,0,"",shellscript,selection_command +6421,12098953,"scripts_horeka/train_dynamics.sh",661,0,"\n",shellscript,content +6422,12099792,"scripts_horeka/train_dynamics.sh",662,0,"l",shellscript,content +6423,12099793,"scripts_horeka/train_dynamics.sh",663,0,"",shellscript,selection_keyboard +6424,12099888,"scripts_horeka/train_dynamics.sh",663,0,"a",shellscript,content +6425,12099889,"scripts_horeka/train_dynamics.sh",664,0,"",shellscript,selection_keyboard +6426,12100003,"scripts_horeka/train_dynamics.sh",664,0,"m",shellscript,content +6427,12100004,"scripts_horeka/train_dynamics.sh",665,0,"",shellscript,selection_keyboard +6428,12100326,"scripts_horeka/train_dynamics.sh",665,0,"_",shellscript,content +6429,12100326,"scripts_horeka/train_dynamics.sh",666,0,"",shellscript,selection_keyboard +6430,12101765,"scripts_horeka/train_dynamics.sh",662,4,"lam_ckpt_dir",shellscript,content +6431,12102904,"scripts_horeka/train_dynamics.sh",674,0,"=",shellscript,content +6432,12102906,"scripts_horeka/train_dynamics.sh",675,0,"",shellscript,selection_keyboard +6433,12103941,"scripts_horeka/train_dynamics.sh",675,0,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/lam-1-action/lam_1751297992_5/",shellscript,content +6434,12106425,"TERMINAL",0,0,"srun",,terminal_focus +6435,12106934,"TERMINAL",0,0,"sh scripts_horeka/train_lam.sh ",,terminal_output +6436,12107738,"TERMINAL",0,0,"[5@dynamics",,terminal_output +6437,12108649,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +6438,12109118,"TERMINAL",0,0,"SLURM_STEP_NUM_TASKS=1\r\nSLURM_JOB_USER=tum_cte0515\r\nSLURM_TASKS_PER_NODE=1\r\nSLURM_JOB_UID=999226\r\nSLURM_TASK_PID=3458179\r\nSLURM_JOB_GPUS=1\r\nSLURM_LOCALID=0\r\nSLURM_SUBMIT_DIR=/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar\r\nSLURMD_NODENAME=hkn0408\r\nSLURM_JOB_START_TIME=1751296933\r\nSLURM_STEP_NODELIST=hkn0408\r\nSLURM_CLUSTER_NAME=hk\r\nSLURM_JOB_END_TIME=1751300533\r\nSLURM_PMI2_SRUN_PORT=38655\r\nSLURM_CPUS_ON_NODE=6\r\nSLURM_JOB_CPUS_PER_NODE=6\r\nSLURM_GPUS_ON_NODE=1\r\nSLURM_GTIDS=0\r\nSLURM_JOB_PARTITION=accelerated\r\nSLURM_TRES_PER_TASK=cpu=5\r\nSLURM_OOM_KILL_STEP=0\r\nSLURM_JOB_NUM_NODES=1\r\nSLURM_STEPID=4294967290\r\nSLURM_JOBID=3306855\r\nSLURM_PTY_PORT=36241\r\nSLURM_JOB_QOS=normal\r\nSLURM_LAUNCH_NODE_IPADDR=10.0.7.199\r\nSLURM_PTY_WIN_ROW=37\r\nSLURM_PMI2_PROC_MAPPING=(vector,(0,1,1))\r\nSLURMD_DEBUG=2\r\nSLURM_PROCID=0\r\nSLURM_CPUS_PER_TASK=5\r\nSLURM_NTASKS=1\r\nSLURM_TOPOLOGY_ADDR=hkibb.hkibbi1.hkibbi1e12.hkn0408\r\nSLURM_TOPOLOGY_ADDR_PATTERN=switch.switch.switch.node\r\nSLURM_SRUN_COMM_HOST=10.0.7.199\r\nSLURM_SCRIPT_CONTEXT=prolog_task\r\nSLURM_MEM_PER_NODE=51200\r\nSLURM_PTY_WIN_COL=202\r\nSLURM_NODELIST=hkn0408\r\nSLURM_SRUN_COMM_PORT=42547\r\nSLURM_STEP_ID=4294967290\r\nSLURM_JOB_ACCOUNT=hk-project-p0023960\r\nSLURM_PRIO_PROCESS=0\r\nSLURM_NPROCS=1\r\nSLURM_NNODES=1\r\nSLURM_SUBMIT_HOST=hkn1991.localdomain\r\nSLURM_JOB_ID=3306855\r\nSLURM_NODEID=0\r\nSLURM_STEP_NUM_NODES=1\r\nSLURM_STEP_TASKS_PER_NODE=1\r\nSLURM_MPI_TYPE=pmi2\r\nSLURM_PMI2_STEP_NODES=hkn0408\r\nSLURM_CONF=/etc/slurm/slurm.conf\r\nSLURM_JOB_NAME=interactive\r\nSLURM_NTASKS_PER_NODE=1\r\nSLURM_STEP_LAUNCHER_PORT=42547\r\nSLURM_JOB_GID=502226\r\nSLURM_JOB_NODELIST=hkn0408\r\n",,terminal_output +6439,12111809,"TERMINAL",0,0,"2025-06-30 17:45:56.191972: E external/local_xla/xla/stream_executor/cuda/cuda_fft.cc:467] Unable to register cuFFT factory: Attempting to register factory for plugin cuFFT when one has already been registered\r\nWARNING: All log messages before absl::InitializeLog() is called are written to STDERR\r\nE0000 00:00:1751298356.204731 3480440 cuda_dnn.cc:8579] Unable to register cuDNN factory: Attempting to register factory for plugin cuDNN when one has already been registered\r\nE0000 00:00:1751298356.209191 3480440 cuda_blas.cc:1407] Unable to register cuBLAS factory: Attempting to register factory for plugin cuBLAS when one has already been registered\r\nW0000 00:00:1751298356.221972 3480440 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\nW0000 00:00:1751298356.221990 3480440 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\nW0000 00:00:1751298356.221992 3480440 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\nW0000 00:00:1751298356.221995 3480440 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\n",,terminal_output +6440,12116373,"TERMINAL",0,0,"W0000 00:00:1751298360.801986 3480440 gpu_device.cc:2341] Cannot dlopen some GPU libraries. Please make sure the missing libraries mentioned above are installed properly if you would like to use GPU. Follow the guide at https://www.tensorflow.org/install/gpu for how to download and setup the required libraries for your platform.\r\nSkipping registering GPU devices...\r\n",,terminal_output +6441,12116693,"TERMINAL",0,0,"Running on 1 devices.\r\n",,terminal_output +6442,12117613,"TERMINAL",0,0,"wandb: Currently logged in as: mihir-mahajan2002 (instant-uv) to https://api.wandb.ai. Use `wandb login --relogin` to force relogin\r\n",,terminal_output +6443,12118145,"TERMINAL",0,0,"wandb: Tracking run with wandb version 0.19.11\r\nwandb: Run data is saved locally in /hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/wandb/run-20250630_174602-5xgbrz02\r\nwandb: Run `wandb offline` to turn off syncing.\r\nwandb: Syncing run dynamics-tiny-overfit-big-lr-0000\r\nwandb: ⭐️ View project at https://wandb.ai/instant-uv/jafar\r\nwandb: 🚀 View run at https://wandb.ai/instant-uv/jafar/runs/5xgbrz02\r\n",,terminal_output +6444,12119557,"TERMINAL",0,0,"2025-06-30 17:46:03.904264: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +6445,12132657,"TERMINAL",0,0,"2025-06-30 17:46:17.071147: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +6446,12148646,"TERMINAL",0,0,"2025-06-30 17:46:33.037507: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +6447,12155988,"TERMINAL",0,0,"2025-06-30 17:46:40.413644: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +6448,12175264,"TERMINAL",0,0,"batch shape: (1, 16, 90, 160, 3)\r\n",,terminal_output +6449,12186585,"TERMINAL",0,0,"2025-06-30 17:47:11.009803: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-06-30 17:47:11.010370: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-06-30 17:47:11.010485: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-06-30 17:47:11.011108: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-06-30 17:47:11.012132: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +6450,12232700,"TERMINAL",0,0,"Step 0, loss: 8.909699440002441, step time: 57150.07281303406ms\r\n",,terminal_output +6451,12232754,"TERMINAL",0,0,"Step 1, loss: 8.718293190002441, step time: 27.709245681762695ms\r\n",,terminal_output +6452,12232861,"TERMINAL",0,0,"Step 2, loss: 8.347650527954102, step time: 21.499156951904297ms\r\n",,terminal_output +6453,12232927,"TERMINAL",0,0,"Step 3, loss: 8.447335243225098, step time: 19.44565773010254ms\r\n",,terminal_output +6454,12232991,"TERMINAL",0,0,"Step 4, loss: 7.986716270446777, step time: 19.00196075439453ms\r\n",,terminal_output +6455,12233099,"TERMINAL",0,0,"Step 5, loss: 7.829824924468994, step time: 19.054174423217773ms\r\nStep 6, loss: 7.859235763549805, step time: 18.241405487060547ms\r\n",,terminal_output +6456,12233161,"TERMINAL",0,0,"Step 7, loss: 7.548120975494385, step time: 20.714998245239258ms\r\n",,terminal_output +6457,12233226,"TERMINAL",0,0,"Step 8, loss: 7.427951335906982, step time: 20.364999771118164ms\r\n",,terminal_output +6458,12233292,"TERMINAL",0,0,"Step 9, loss: 7.490316390991211, step time: 18.751144409179688ms\r\n",,terminal_output +6459,12233354,"TERMINAL",0,0,"Step 10, loss: 7.2540411949157715, step time: 18.707752227783203ms\r\n",,terminal_output +6460,12233417,"TERMINAL",0,0,"Step 11, loss: 7.244628429412842, step time: 19.289016723632812ms\r\n",,terminal_output +6461,12233483,"TERMINAL",0,0,"Step 12, loss: 7.090365886688232, step time: 19.064903259277344ms\r\n",,terminal_output +6462,12233544,"TERMINAL",0,0,"Step 13, loss: 6.978655815124512, step time: 18.771886825561523ms\r\n",,terminal_output +6463,12233643,"TERMINAL",0,0,"Step 14, loss: 6.969472408294678, step time: 18.965721130371094ms\r\n",,terminal_output +6464,12233680,"TERMINAL",0,0,"Step 15, loss: 6.915572166442871, step time: 18.626689910888672ms\r\n",,terminal_output +6465,12233733,"TERMINAL",0,0,"Step 16, loss: 6.7982282638549805, step time: 19.001483917236328ms\r\n",,terminal_output +6466,12233829,"TERMINAL",0,0,"Step 17, loss: 6.738492488861084, step time: 19.21224594116211ms\r\n",,terminal_output +6467,12233937,"TERMINAL",0,0,"Step 18, loss: 6.924352645874023, step time: 18.683671951293945ms\r\nStep 19, loss: 6.621378421783447, step time: 18.628358840942383ms\r\n",,terminal_output +6468,12234003,"TERMINAL",0,0,"Step 20, loss: 6.606691360473633, step time: 19.305706024169922ms\r\n",,terminal_output +6469,12234351,"TERMINAL",0,0,"Step 21, loss: 6.544440269470215, step time: 337.4321460723877ms\r\n",,terminal_output +6470,12234401,"TERMINAL",0,0,"Step 22, loss: 6.506128311157227, step time: 26.36575698852539ms\r\n",,terminal_output +6471,12234496,"TERMINAL",0,0,"Step 23, loss: 6.4384074211120605, step time: 20.961284637451172ms\r\n",,terminal_output +6472,12234558,"TERMINAL",0,0,"Step 24, loss: 6.389023303985596, step time: 19.853591918945312ms\r\n",,terminal_output +6473,12234620,"TERMINAL",0,0,"Step 25, loss: 6.341235637664795, step time: 19.16956901550293ms\r\n",,terminal_output +6474,12234690,"TERMINAL",0,0,"Step 26, loss: 6.309752464294434, step time: 18.91160011291504ms\r\n",,terminal_output +6475,12234751,"TERMINAL",0,0,"Step 27, loss: 6.2737250328063965, step time: 19.234180450439453ms\r\n",,terminal_output +6476,12234813,"TERMINAL",0,0,"Step 28, loss: 6.283984184265137, step time: 18.737316131591797ms\r\n",,terminal_output +6477,12234879,"TERMINAL",0,0,"Step 29, loss: 6.205039978027344, step time: 18.49651336669922ms\r\n",,terminal_output +6478,12234943,"TERMINAL",0,0,"Step 30, loss: 6.182046890258789, step time: 19.376754760742188ms\r\n",,terminal_output +6479,12235006,"TERMINAL",0,0,"Step 31, loss: 6.179562568664551, step time: 18.68724822998047ms\r\n",,terminal_output +6480,12235072,"TERMINAL",0,0,"Step 32, loss: 6.16857385635376, step time: 18.77570152282715ms\r\n",,terminal_output +6481,12235133,"TERMINAL",0,0,"Step 33, loss: 6.058876991271973, step time: 18.924474716186523ms\r\n",,terminal_output +6482,12235241,"TERMINAL",0,0,"Step 34, loss: 6.059284210205078, step time: 18.794536590576172ms\r\nStep 35, loss: 6.004425525665283, step time: 18.407821655273438ms\r\n",,terminal_output +6483,12235311,"TERMINAL",0,0,"Step 36, loss: 6.0015692710876465, step time: 18.600940704345703ms\r\n",,terminal_output +6484,12235374,"TERMINAL",0,0,"Step 37, loss: 5.957552433013916, step time: 18.186092376708984ms\r\n",,terminal_output +6485,12235471,"TERMINAL",0,0,"Step 38, loss: 5.9610748291015625, step time: 18.46790313720703ms\r\n",,terminal_output +6486,12235526,"TERMINAL",0,0,"Step 39, loss: 5.896188259124756, step time: 18.707990646362305ms\r\n",,terminal_output +6487,12235579,"TERMINAL",0,0,"Step 40, loss: 5.865735054016113, step time: 18.700838088989258ms\r\n",,terminal_output +6488,12235687,"TERMINAL",0,0,"Step 41, loss: 5.850087642669678, step time: 18.31841468811035ms\r\nStep 42, loss: 5.809425354003906, step time: 18.868446350097656ms\r\n",,terminal_output +6489,12235743,"TERMINAL",0,0,"Step 43, loss: 5.83782958984375, step time: 18.26310157775879ms\r\n",,terminal_output +6490,12235805,"TERMINAL",0,0,"Step 44, loss: 5.781611919403076, step time: 18.358707427978516ms\r\n",,terminal_output +6491,12235872,"TERMINAL",0,0,"Step 45, loss: 5.744217872619629, step time: 18.38207244873047ms\r\n",,terminal_output +6492,12235974,"TERMINAL",0,0,"Step 46, loss: 5.706882476806641, step time: 18.268585205078125ms\r\n",,terminal_output +6493,12236031,"TERMINAL",0,0,"Step 47, loss: 5.679924964904785, step time: 29.971837997436523ms\r\n",,terminal_output +6494,12236138,"TERMINAL",0,0,"Step 48, loss: 5.669457912445068, step time: 20.226240158081055ms\r\nStep 49, loss: 5.622964382171631, step time: 18.4023380279541ms\r\n",,terminal_output +6495,12236200,"TERMINAL",0,0,"Step 50, loss: 5.6421427726745605, step time: 18.47076416015625ms\r\n",,terminal_output +6496,12236264,"TERMINAL",0,0,"Step 51, loss: 5.574595928192139, step time: 18.613576889038086ms\r\n",,terminal_output +6497,12236325,"TERMINAL",0,0,"Step 52, loss: 5.644163608551025, step time: 18.645524978637695ms\r\n",,terminal_output +6498,12236391,"TERMINAL",0,0,"Step 53, loss: 5.607563018798828, step time: 18.13530921936035ms\r\n",,terminal_output +6499,12236454,"TERMINAL",0,0,"Step 54, loss: 5.527794361114502, step time: 18.645524978637695ms\r\n",,terminal_output +6500,12236516,"TERMINAL",0,0,"Step 55, loss: 5.585363864898682, step time: 18.10431480407715ms\r\n",,terminal_output +6501,12236580,"TERMINAL",0,0,"Step 56, loss: 5.478458404541016, step time: 18.462181091308594ms\r\n",,terminal_output +6502,12236644,"TERMINAL",0,0,"Step 57, loss: 5.491300582885742, step time: 18.699169158935547ms\r\n",,terminal_output +6503,12236713,"TERMINAL",0,0,"Step 58, loss: 5.444189071655273, step time: 18.421411514282227ms\r\n",,terminal_output +6504,12236779,"TERMINAL",0,0,"Step 59, loss: 5.420370578765869, step time: 18.06950569152832ms\r\n",,terminal_output +6505,12236903,"TERMINAL",0,0,"Step 60, loss: 5.435080051422119, step time: 21.513700485229492ms\r\nStep 61, loss: 5.421053886413574, step time: 19.41084861755371ms\r\n",,terminal_output +6506,12236967,"TERMINAL",0,0,"Step 62, loss: 5.416149616241455, step time: 18.663406372070312ms\r\n",,terminal_output +6507,12237037,"TERMINAL",0,0,"Step 63, loss: 5.307112216949463, step time: 18.791913986206055ms\r\n",,terminal_output +6508,12237131,"TERMINAL",0,0,"Step 64, loss: 5.324649810791016, step time: 18.42045783996582ms\r\n",,terminal_output +6509,12237185,"TERMINAL",0,0,"Step 65, loss: 5.297629356384277, step time: 18.373727798461914ms\r\n",,terminal_output +6510,12237292,"TERMINAL",0,0,"Step 66, loss: 5.294079780578613, step time: 18.650531768798828ms\r\nStep 67, loss: 5.245375633239746, step time: 18.275976181030273ms\r\n",,terminal_output +6511,12237357,"TERMINAL",0,0,"Step 68, loss: 5.228888988494873, step time: 18.35942268371582ms\r\n",,terminal_output +6512,12237419,"TERMINAL",0,0,"Step 69, loss: 5.203759670257568, step time: 18.60833168029785ms\r\n",,terminal_output +6513,12237488,"TERMINAL",0,0,"Step 70, loss: 5.177604675292969, step time: 24.387121200561523ms\r\n",,terminal_output +6514,12237547,"TERMINAL",0,0,"Step 71, loss: 5.141502380371094, step time: 18.464088439941406ms\r\n",,terminal_output +6515,12237617,"TERMINAL",0,0,"Step 72, loss: 5.160765647888184, step time: 18.590688705444336ms\r\n",,terminal_output +6516,12237689,"TERMINAL",0,0,"Step 73, loss: 5.102837562561035, step time: 18.34845542907715ms\r\n",,terminal_output +6517,12237752,"TERMINAL",0,0,"Step 74, loss: 5.109357833862305, step time: 18.414735794067383ms\r\n",,terminal_output +6518,12237815,"TERMINAL",0,0,"Step 75, loss: 5.159406661987305, step time: 18.67961883544922ms\r\n",,terminal_output +6519,12237876,"TERMINAL",0,0,"Step 76, loss: 5.056825637817383, step time: 18.18084716796875ms\r\n",,terminal_output +6520,12237943,"TERMINAL",0,0,"Step 77, loss: 5.097210884094238, step time: 18.305540084838867ms\r\n",,terminal_output +6521,12238064,"TERMINAL",0,0,"Step 78, loss: 5.024137496948242, step time: 18.799304962158203ms\r\nStep 79, loss: 4.994380950927734, step time: 18.4018611907959ms\r\n",,terminal_output +6522,12238154,"TERMINAL",0,0,"Step 80, loss: 5.043018817901611, step time: 18.376827239990234ms\r\n",,terminal_output +6523,12238263,"TERMINAL",0,0,"Step 81, loss: 4.993089199066162, step time: 18.56827735900879ms\r\nStep 82, loss: 4.99432373046875, step time: 19.674062728881836ms\r\n",,terminal_output +6524,12238395,"TERMINAL",0,0,"Step 83, loss: 4.945208549499512, step time: 18.67079734802246ms\r\nStep 84, loss: 4.9048991203308105, step time: 18.547773361206055ms\r\n",,terminal_output +6525,12238480,"TERMINAL",0,0,"Step 85, loss: 4.8820672035217285, step time: 18.223047256469727ms\r\n",,terminal_output +6526,12238539,"TERMINAL",0,0,"Step 86, loss: 5.078080654144287, step time: 18.27383041381836ms\r\n",,terminal_output +6527,12238642,"TERMINAL",0,0,"Step 87, loss: 4.942952632904053, step time: 18.586158752441406ms\r\nStep 88, loss: 4.877021312713623, step time: 18.340587615966797ms\r\n",,terminal_output +6528,12238713,"TERMINAL",0,0,"Step 89, loss: 4.862122535705566, step time: 18.48292350769043ms\r\n",,terminal_output +6529,12238773,"TERMINAL",0,0,"Step 90, loss: 4.821525573730469, step time: 18.78809928894043ms\r\n",,terminal_output +6530,12238838,"TERMINAL",0,0,"Step 91, loss: 4.782376766204834, step time: 18.2342529296875ms\r\n",,terminal_output +6531,12238899,"TERMINAL",0,0,"Step 92, loss: 4.84404993057251, step time: 18.52726936340332ms\r\n",,terminal_output +6532,12239000,"TERMINAL",0,0,"Step 93, loss: 4.958974361419678, step time: 18.64457130432129ms\r\n",,terminal_output +6533,12239047,"TERMINAL",0,0,"Step 94, loss: 4.77755069732666, step time: 20.59006690979004ms\r\n",,terminal_output +6534,12239152,"TERMINAL",0,0,"Step 95, loss: 4.716158866882324, step time: 18.4323787689209ms\r\nStep 96, loss: 4.74016809463501, step time: 18.54109764099121ms\r\n",,terminal_output +6535,12239216,"TERMINAL",0,0,"Step 97, loss: 4.685654163360596, step time: 18.630504608154297ms\r\n",,terminal_output +6536,12239278,"TERMINAL",0,0,"Step 98, loss: 4.6879754066467285, step time: 18.44167709350586ms\r\n",,terminal_output +6537,12239342,"TERMINAL",0,0,"Step 99, loss: 4.749401569366455, step time: 18.68128776550293ms\r\n",,terminal_output +6538,12239472,"TERMINAL",0,0,"Step 100, loss: 4.670839309692383, step time: 18.42665672302246ms\r\nStep 101, loss: 4.685161590576172, step time: 18.316030502319336ms\r\n",,terminal_output +6539,12239537,"TERMINAL",0,0,"Step 102, loss: 4.631730079650879, step time: 18.598318099975586ms\r\n",,terminal_output +6540,12239639,"TERMINAL",0,0,"Step 103, loss: 4.768329620361328, step time: 18.570899963378906ms\r\n",,terminal_output +6541,12239691,"TERMINAL",0,0,"Step 104, loss: 4.625213623046875, step time: 18.593549728393555ms\r\n",,terminal_output +6542,12239742,"TERMINAL",0,0,"Step 105, loss: 4.6983256340026855, step time: 18.622398376464844ms\r\n",,terminal_output +6543,12239857,"TERMINAL",0,0,"Step 106, loss: 4.613818645477295, step time: 18.514394760131836ms\r\nStep 107, loss: 4.708028793334961, step time: 18.589496612548828ms\r\n",,terminal_output +6544,12239953,"TERMINAL",0,0,"Step 108, loss: 4.761256694793701, step time: 18.535375595092773ms\r\n",,terminal_output +6545,12240006,"TERMINAL",0,0,"Step 109, loss: 4.682203769683838, step time: 18.56207847595215ms\r\n",,terminal_output +6546,12240153,"TERMINAL",0,0,"Step 110, loss: 4.612459182739258, step time: 18.559694290161133ms\r\nStep 111, loss: 4.658640384674072, step time: 18.754005432128906ms\r\n",,terminal_output +6547,12240202,"TERMINAL",0,0,"Step 112, loss: 4.5396575927734375, step time: 18.384695053100586ms\r\n",,terminal_output +6548,12240307,"TERMINAL",0,0,"Step 113, loss: 4.5633440017700195, step time: 18.3408260345459ms\r\nStep 114, loss: 4.562738418579102, step time: 18.556833267211914ms\r\n",,terminal_output +6549,12240405,"TERMINAL",0,0,"Step 115, loss: 4.5758137702941895, step time: 18.44000816345215ms\r\n",,terminal_output +6550,12240510,"TERMINAL",0,0,"Step 116, loss: 4.57586145401001, step time: 18.56064796447754ms\r\n",,terminal_output +6551,12240573,"TERMINAL",0,0,"Step 117, loss: 4.4810261726379395, step time: 18.585681915283203ms\r\nStep 118, loss: 4.469954490661621, step time: 18.612146377563477ms\r\n",,terminal_output +6552,12240652,"TERMINAL",0,0,"Step 119, loss: 4.510289669036865, step time: 18.31364631652832ms\r\n",,terminal_output +6553,12240711,"TERMINAL",0,0,"Step 120, loss: 4.4268574714660645, step time: 18.74542236328125ms\r\n",,terminal_output +6554,12240773,"TERMINAL",0,0,"Step 121, loss: 4.507619380950928, step time: 18.52893829345703ms\r\n",,terminal_output +6555,12240825,"TERMINAL",0,0,"Step 122, loss: 4.544739723205566, step time: 18.175840377807617ms\r\n",,terminal_output +6556,12240890,"TERMINAL",0,0,"Step 123, loss: 4.39323616027832, step time: 18.69034767150879ms\r\n",,terminal_output +6557,12240954,"TERMINAL",0,0,"Step 124, loss: 4.392701625823975, step time: 18.65530014038086ms\r\n",,terminal_output +6558,12241029,"TERMINAL",0,0,"Step 125, loss: 4.361972808837891, step time: 18.33176612854004ms\r\n",,terminal_output +6559,12241124,"TERMINAL",0,0,"Step 126, loss: 4.3598833084106445, step time: 18.724918365478516ms\r\n",,terminal_output +6560,12241175,"TERMINAL",0,0,"Step 127, loss: 4.3625407218933105, step time: 18.341541290283203ms\r\n",,terminal_output +6561,12241283,"TERMINAL",0,0,"Step 128, loss: 4.308709144592285, step time: 18.323659896850586ms\r\nStep 129, loss: 4.473628044128418, step time: 18.778324127197266ms\r\n",,terminal_output +6562,12241344,"TERMINAL",0,0,"Step 130, loss: 4.4516706466674805, step time: 18.750429153442383ms\r\n",,terminal_output +6563,12241405,"TERMINAL",0,0,"Step 131, loss: 4.323705196380615, step time: 18.514633178710938ms\r\n",,terminal_output +6564,12241475,"TERMINAL",0,0,"Step 132, loss: 4.332315444946289, step time: 18.650531768798828ms\r\n",,terminal_output +6565,12241567,"TERMINAL",0,0,"Step 133, loss: 4.475514888763428, step time: 18.377304077148438ms\r\n",,terminal_output +6566,12241688,"TERMINAL",0,0,"Step 134, loss: 4.274569511413574, step time: 18.35322380065918ms\r\nStep 135, loss: 4.2778239250183105, step time: 18.625974655151367ms\r\n",,terminal_output +6567,12241738,"TERMINAL",0,0,"Step 136, loss: 4.28542423248291, step time: 18.424034118652344ms\r\n",,terminal_output +6568,12241802,"TERMINAL",0,0,"Step 137, loss: 4.330113887786865, step time: 18.619298934936523ms\r\n",,terminal_output +6569,12241861,"TERMINAL",0,0,"Step 138, loss: 4.507289886474609, step time: 18.780946731567383ms\r\n",,terminal_output +6570,12241953,"TERMINAL",0,0,"Step 139, loss: 4.242666244506836, step time: 18.319368362426758ms\r\n",,terminal_output +6571,12242004,"TERMINAL",0,0,"Step 140, loss: 4.259644508361816, step time: 18.982410430908203ms\r\n",,terminal_output +6572,12242110,"TERMINAL",0,0,"Step 141, loss: 4.211047172546387, step time: 18.996000289916992ms\r\nStep 142, loss: 4.296898365020752, step time: 18.387556076049805ms\r\n",,terminal_output +6573,12242200,"TERMINAL",0,0,"Step 143, loss: 4.203551769256592, step time: 18.37921142578125ms\r\n",,terminal_output +6574,12242250,"TERMINAL",0,0,"Step 144, loss: 4.2029805183410645, step time: 18.641948699951172ms\r\n",,terminal_output +6575,12242343,"TERMINAL",0,0,"Step 145, loss: 4.217230796813965, step time: 18.389463424682617ms\r\n",,terminal_output +6576,12242448,"TERMINAL",0,0,"Step 146, loss: 4.214897632598877, step time: 19.69122886657715ms\r\nStep 147, loss: 4.1993536949157715, step time: 18.5699462890625ms\r\n",,terminal_output +6577,12242511,"TERMINAL",0,0,"Step 148, loss: 4.164219379425049, step time: 18.461942672729492ms\r\n",,terminal_output +6578,12242572,"TERMINAL",0,0,"Step 149, loss: 4.209932804107666, step time: 18.182754516601562ms\r\n",,terminal_output +6579,12242632,"TERMINAL",0,0,"Step 150, loss: 4.138350963592529, step time: 18.82147789001465ms\r\n",,terminal_output +6580,12242701,"TERMINAL",0,0,"Step 151, loss: 4.13429594039917, step time: 18.425703048706055ms\r\n",,terminal_output +6581,12242752,"TERMINAL",0,0,"Step 152, loss: 4.102514743804932, step time: 18.529653549194336ms\r\n",,terminal_output +6582,12242847,"TERMINAL",0,0,"Step 153, loss: 4.177950382232666, step time: 18.565654754638672ms\r\n",,terminal_output +6583,12242953,"TERMINAL",0,0,"Step 154, loss: 4.082980632781982, step time: 18.393516540527344ms\r\nStep 155, loss: 4.1976637840271, step time: 18.322229385375977ms\r\n",,terminal_output +6584,12243046,"TERMINAL",0,0,"Step 156, loss: 4.128870964050293, step time: 18.68128776550293ms\r\n",,terminal_output +6585,12243101,"TERMINAL",0,0,"Step 157, loss: 4.177318572998047, step time: 18.320083618164062ms\r\n",,terminal_output +6586,12243213,"TERMINAL",0,0,"Step 158, loss: 4.111741542816162, step time: 18.334388732910156ms\r\nStep 159, loss: 4.162619590759277, step time: 18.705368041992188ms\r\n",,terminal_output +6587,12243267,"TERMINAL",0,0,"Step 160, loss: 4.065517902374268, step time: 19.987106323242188ms\r\n",,terminal_output +6588,12243360,"TERMINAL",0,0,"Step 161, loss: 4.196533203125, step time: 18.976211547851562ms\r\n",,terminal_output +6589,12243411,"TERMINAL",0,0,"Step 162, loss: 4.064414024353027, step time: 18.90850067138672ms\r\n",,terminal_output +6590,12243556,"TERMINAL",0,0,"Step 163, loss: 4.079598903656006, step time: 21.58951759338379ms\r\nStep 164, loss: 4.01621675491333, step time: 18.881559371948242ms\r\n",,terminal_output +6591,12243670,"TERMINAL",0,0,"Step 165, loss: 4.3789286613464355, step time: 18.990516662597656ms\r\nStep 166, loss: 4.1576032638549805, step time: 18.60666275024414ms\r\n",,terminal_output +6592,12243733,"TERMINAL",0,0,"Step 167, loss: 4.047615051269531, step time: 18.586158752441406ms\r\n",,terminal_output +6593,12243827,"TERMINAL",0,0,"Step 168, loss: 4.07843017578125, step time: 18.860816955566406ms\r\n",,terminal_output +6594,12243933,"TERMINAL",0,0,"Step 169, loss: 4.035501003265381, step time: 18.482446670532227ms\r\nStep 170, loss: 4.005661964416504, step time: 18.596410751342773ms\r\n",,terminal_output +6595,12244041,"TERMINAL",0,0,"Step 171, loss: 3.997438430786133, step time: 18.730878829956055ms\r\nStep 172, loss: 3.99068546295166, step time: 18.66769790649414ms\r\n",,terminal_output +6596,12244105,"TERMINAL",0,0,"Step 173, loss: 4.225961208343506, step time: 18.51677894592285ms\r\n",,terminal_output +6597,12244170,"TERMINAL",0,0,"Step 174, loss: 3.966600179672241, step time: 19.023895263671875ms\r\n",,terminal_output +6598,12244276,"TERMINAL",0,0,"Step 175, loss: 3.991746425628662, step time: 18.659591674804688ms\r\n",,terminal_output +6599,12244327,"TERMINAL",0,0,"Step 176, loss: 3.939676523208618, step time: 18.649816513061523ms\r\n",,terminal_output +6600,12244433,"TERMINAL",0,0,"Step 177, loss: 3.970024347305298, step time: 18.8448429107666ms\r\nStep 178, loss: 4.062183856964111, step time: 18.54848861694336ms\r\n",,terminal_output +6601,12244526,"TERMINAL",0,0,"Step 179, loss: 3.9491286277770996, step time: 18.723011016845703ms\r\n",,terminal_output +6602,12244578,"TERMINAL",0,0,"Step 180, loss: 3.9129130840301514, step time: 18.864870071411133ms\r\n",,terminal_output +6603,12244673,"TERMINAL",0,0,"Step 181, loss: 4.057594299316406, step time: 18.645048141479492ms\r\n",,terminal_output +6604,12244750,"TERMINAL",0,0,"Step 182, loss: 3.9182724952697754, step time: 18.580198287963867ms\r\nStep 183, loss: 4.213396072387695, step time: 18.827199935913086ms\r\n",,terminal_output +6605,12244814,"TERMINAL",0,0,"Step 184, loss: 3.910738945007324, step time: 18.350601196289062ms\r\n",,terminal_output +6606,12244923,"TERMINAL",0,0,"Step 185, loss: 3.9770679473876953, step time: 18.25118064880371ms\r\n",,terminal_output +6607,12245036,"TERMINAL",0,0,"Step 186, loss: 3.962029457092285, step time: 18.668413162231445ms\r\n",,terminal_output +6608,12245143,"TERMINAL",0,0,"Step 187, loss: 4.225457191467285, step time: 18.375635147094727ms\r\nStep 188, loss: 3.9283082485198975, step time: 18.597841262817383ms\r\nStep 189, loss: 3.8743882179260254, step time: 18.518447875976562ms\r\n",,terminal_output +6609,12245206,"TERMINAL",0,0,"Step 190, loss: 3.937622547149658, step time: 18.363475799560547ms\r\n",,terminal_output +6610,12245267,"TERMINAL",0,0,"Step 191, loss: 3.8893446922302246, step time: 18.403291702270508ms\r\n",,terminal_output +6611,12245328,"TERMINAL",0,0,"Step 192, loss: 3.871443748474121, step time: 18.54419708251953ms\r\n",,terminal_output +6612,12245393,"TERMINAL",0,0,"Step 193, loss: 3.8855764865875244, step time: 18.18704605102539ms\r\n",,terminal_output +6613,12245496,"TERMINAL",0,0,"Step 194, loss: 3.919605255126953, step time: 18.291234970092773ms\r\n",,terminal_output +6614,12245547,"TERMINAL",0,0,"Step 195, loss: 3.8708837032318115, step time: 18.58067512512207ms\r\n",,terminal_output +6615,12245651,"TERMINAL",0,0,"Step 196, loss: 3.960422992706299, step time: 18.564224243164062ms\r\nStep 197, loss: 3.867398500442505, step time: 18.4781551361084ms\r\n",,terminal_output +6616,12245776,"TERMINAL",0,0,"Step 198, loss: 3.8491618633270264, step time: 18.825054168701172ms\r\nStep 199, loss: 3.822082996368408, step time: 18.826007843017578ms\r\n",,terminal_output +6617,12245840,"TERMINAL",0,0,"Step 200, loss: 4.039896488189697, step time: 18.496990203857422ms\r\n",,terminal_output +6618,12246174,"TERMINAL",0,0,"Step 201, loss: 3.8115761280059814, step time: 331.6047191619873ms\r\n",,terminal_output +6619,12246279,"TERMINAL",0,0,"Step 202, loss: 3.8493740558624268, step time: 26.634693145751953ms\r\n",,terminal_output +6620,12246330,"TERMINAL",0,0,"Step 203, loss: 3.830979347229004, step time: 20.598173141479492ms\r\n",,terminal_output +6621,12246476,"TERMINAL",0,0,"Step 204, loss: 3.793891668319702, step time: 19.665002822875977ms\r\nStep 205, loss: 3.8141117095947266, step time: 19.078731536865234ms\r\n",,terminal_output +6622,12246527,"TERMINAL",0,0,"Step 206, loss: 3.813584089279175, step time: 18.75448226928711ms\r\n",,terminal_output +6623,12246631,"TERMINAL",0,0,"Step 207, loss: 3.785759687423706, step time: 19.138336181640625ms\r\nStep 208, loss: 3.846027374267578, step time: 18.72563362121582ms\r\n",,terminal_output +6624,12246696,"TERMINAL",0,0,"Step 209, loss: 3.835111141204834, step time: 18.684864044189453ms\r\n",,terminal_output +6625,12246759,"TERMINAL",0,0,"Step 210, loss: 3.8682708740234375, step time: 19.28567886352539ms\r\n",,terminal_output +6626,12246859,"TERMINAL",0,0,"Step 211, loss: 3.9975996017456055, step time: 20.226478576660156ms\r\n",,terminal_output +6627,12246921,"TERMINAL",0,0,"Step 212, loss: 3.770719289779663, step time: 19.074201583862305ms\r\n",,terminal_output +6628,12246981,"TERMINAL",0,0,"Step 213, loss: 3.746039390563965, step time: 19.156932830810547ms\r\n",,terminal_output +6629,12247042,"TERMINAL",0,0,"Step 214, loss: 3.738029718399048, step time: 18.669843673706055ms\r\n",,terminal_output +6630,12247105,"TERMINAL",0,0,"Step 215, loss: 3.815014123916626, step time: 18.77617835998535ms\r\n",,terminal_output +6631,12247165,"TERMINAL",0,0,"Step 216, loss: 3.751372814178467, step time: 19.168376922607422ms\r\n",,terminal_output +6632,12247225,"TERMINAL",0,0,"Step 217, loss: 3.7805256843566895, step time: 19.511938095092773ms\r\n",,terminal_output +6633,12247286,"TERMINAL",0,0,"Step 218, loss: 3.731015205383301, step time: 19.75083351135254ms\r\n",,terminal_output +6634,12247348,"TERMINAL",0,0,"Step 219, loss: 3.736156463623047, step time: 18.844127655029297ms\r\n",,terminal_output +6635,12247410,"TERMINAL",0,0,"Step 220, loss: 3.74418044090271, step time: 18.727540969848633ms\r\n",,terminal_output +6636,12247472,"TERMINAL",0,0,"Step 221, loss: 3.740985870361328, step time: 18.458843231201172ms\r\n",,terminal_output +6637,12247535,"TERMINAL",0,0,"Step 222, loss: 3.7945363521575928, step time: 18.961191177368164ms\r\n",,terminal_output +6638,12247599,"TERMINAL",0,0,"Step 223, loss: 3.7097489833831787, step time: 18.659591674804688ms\r\n",,terminal_output +6639,12247731,"TERMINAL",0,0,"Step 224, loss: 3.7406630516052246, step time: 18.58234405517578ms\r\nStep 225, loss: 3.7481703758239746, step time: 18.73469352722168ms\r\n",,terminal_output +6640,12247794,"TERMINAL",0,0,"Step 226, loss: 3.688950777053833, step time: 18.60809326171875ms\r\n",,terminal_output +6641,12247922,"TERMINAL",0,0,"Step 227, loss: 3.7380287647247314, step time: 18.372058868408203ms\r\nStep 228, loss: 3.6496431827545166, step time: 18.831968307495117ms\r\n",,terminal_output +6642,12248016,"TERMINAL",0,0,"Step 229, loss: 3.6831068992614746, step time: 18.537282943725586ms\r\n",,terminal_output +6643,12248068,"TERMINAL",0,0,"Step 230, loss: 3.71427059173584, step time: 18.44167709350586ms\r\n",,terminal_output +6644,12248165,"TERMINAL",0,0,"Step 231, loss: 3.843520164489746, step time: 18.713712692260742ms\r\n",,terminal_output +6645,12248212,"TERMINAL",0,0,"Step 232, loss: 3.6520841121673584, step time: 18.154382705688477ms\r\n",,terminal_output +6646,12248316,"TERMINAL",0,0,"Step 233, loss: 3.708714723587036, step time: 18.90850067138672ms\r\nStep 234, loss: 3.6473441123962402, step time: 19.48070526123047ms\r\n",,terminal_output +6647,12248413,"TERMINAL",0,0,"Step 235, loss: 3.6408164501190186, step time: 18.590211868286133ms\r\n",,terminal_output +6648,12248465,"TERMINAL",0,0,"Step 236, loss: 3.6309900283813477, step time: 18.525362014770508ms\r\n",,terminal_output +6649,12248572,"TERMINAL",0,0,"Step 237, loss: 3.781858205795288, step time: 18.77307891845703ms\r\nStep 238, loss: 3.6961586475372314, step time: 18.566131591796875ms\r\n",,terminal_output +6650,12248644,"TERMINAL",0,0,"Step 239, loss: 3.638131618499756, step time: 18.488168716430664ms\r\n",,terminal_output +6651,12248731,"TERMINAL",0,0,"Step 240, loss: 3.6630098819732666, step time: 30.761241912841797ms\r\n",,terminal_output +6652,12248766,"TERMINAL",0,0,"Step 241, loss: 3.641226291656494, step time: 19.215822219848633ms\r\n",,terminal_output +6653,12248893,"TERMINAL",0,0,"Step 242, loss: 3.6308040618896484, step time: 18.456459045410156ms\r\nStep 243, loss: 3.6356201171875, step time: 18.9511775970459ms\r\n",,terminal_output +6654,12248996,"TERMINAL",0,0,"Step 244, loss: 3.6048178672790527, step time: 18.355131149291992ms\r\n",,terminal_output +6655,12249049,"TERMINAL",0,0,"Step 245, loss: 3.6062591075897217, step time: 18.580198287963867ms\r\n",,terminal_output +6656,12249156,"TERMINAL",0,0,"Step 246, loss: 3.691812038421631, step time: 18.677711486816406ms\r\nStep 247, loss: 3.6038506031036377, step time: 19.410133361816406ms\r\n",,terminal_output +6657,12249218,"TERMINAL",0,0,"Step 248, loss: 3.647033214569092, step time: 18.661022186279297ms\r\n",,terminal_output +6658,12249282,"TERMINAL",0,0,"Step 249, loss: 3.6104276180267334, step time: 18.97740364074707ms\r\n",,terminal_output +6659,12249345,"TERMINAL",0,0,"Step 250, loss: 3.6427993774414062, step time: 18.520593643188477ms\r\n",,terminal_output +6660,12249407,"TERMINAL",0,0,"Step 251, loss: 3.6583502292633057, step time: 18.63551139831543ms\r\n",,terminal_output +6661,12249471,"TERMINAL",0,0,"Step 252, loss: 3.583991527557373, step time: 18.697738647460938ms\r\n",,terminal_output +6662,12249535,"TERMINAL",0,0,"Step 253, loss: 3.5619945526123047, step time: 18.475055694580078ms\r\n",,terminal_output +6663,12249598,"TERMINAL",0,0,"Step 254, loss: 3.594876289367676, step time: 18.251419067382812ms\r\n",,terminal_output +6664,12249728,"TERMINAL",0,0,"Step 255, loss: 3.6091606616973877, step time: 18.635988235473633ms\r\nStep 256, loss: 3.6317179203033447, step time: 18.381595611572266ms\r\n",,terminal_output +6665,12249791,"TERMINAL",0,0,"Step 257, loss: 3.6440298557281494, step time: 18.23139190673828ms\r\n",,terminal_output +6666,12249893,"TERMINAL",0,0,"Step 258, loss: 3.552222728729248, step time: 18.72420310974121ms\r\n",,terminal_output +6667,12249952,"TERMINAL",0,0,"Step 259, loss: 3.556636333465576, step time: 18.465280532836914ms\r\n",,terminal_output +6668,12250011,"TERMINAL",0,0,"Step 260, loss: 3.5403966903686523, step time: 18.455028533935547ms\r\n",,terminal_output +6669,12250071,"TERMINAL",0,0,"Step 261, loss: 3.7328014373779297, step time: 18.714189529418945ms\r\n",,terminal_output +6670,12250133,"TERMINAL",0,0,"Step 262, loss: 3.7892582416534424, step time: 18.485069274902344ms\r\n",,terminal_output +6671,12250242,"TERMINAL",0,0,"Step 263, loss: 3.5246634483337402, step time: 21.09813690185547ms\r\nStep 264, loss: 3.557971477508545, step time: 20.117521286010742ms\r\n",,terminal_output +6672,12250350,"TERMINAL",0,0,"Step 265, loss: 3.5499746799468994, step time: 18.439531326293945ms\r\n",,terminal_output +6673,12250403,"TERMINAL",0,0,"Step 266, loss: 3.5851659774780273, step time: 18.476486206054688ms\r\n",,terminal_output +6674,12250507,"TERMINAL",0,0,"Step 267, loss: 3.7596182823181152, step time: 18.809080123901367ms\r\nStep 268, loss: 3.5285682678222656, step time: 18.46146583557129ms\r\n",,terminal_output +6675,12250599,"TERMINAL",0,0,"Step 269, loss: 3.4934065341949463, step time: 18.49198341369629ms\r\n",,terminal_output +6676,12250651,"TERMINAL",0,0,"Step 270, loss: 3.526407480239868, step time: 18.671751022338867ms\r\n",,terminal_output +6677,12250759,"TERMINAL",0,0,"Step 271, loss: 3.507366180419922, step time: 18.416404724121094ms\r\nStep 272, loss: 3.500816822052002, step time: 18.43857765197754ms\r\n",,terminal_output +6678,12250824,"TERMINAL",0,0,"Step 273, loss: 3.5235114097595215, step time: 18.773555755615234ms\r\n",,terminal_output +6679,12250899,"scripts_horeka/train_dynamics.sh",0,0,"",shellscript,tab +6680,12250900,"scripts_horeka/train_dynamics.sh",955,0,"",shellscript,selection_mouse +6681,12250905,"TERMINAL",0,0,"Step 274, loss: 3.5207645893096924, step time: 18.430709838867188ms\r\n",,terminal_output +6682,12250963,"TERMINAL",0,0,"Step 275, loss: 3.511777400970459, step time: 18.55301856994629ms\r\n",,terminal_output +6683,12251007,"scripts_horeka/train_dynamics.sh",945,23,"log_checkpoint_interval",shellscript,selection_mouse +6684,12251021,"TERMINAL",0,0,"Step 276, loss: 3.6310362815856934, step time: 18.849611282348633ms\r\n",,terminal_output +6685,12251086,"TERMINAL",0,0,"Step 277, loss: 3.6603615283966064, step time: 22.022724151611328ms\r\n",,terminal_output +6686,12251151,"TERMINAL",0,0,"Step 278, loss: 3.504436731338501, step time: 19.84238624572754ms\r\n",,terminal_output +6687,12251217,"TERMINAL",0,0,"Step 279, loss: 3.5852715969085693, step time: 19.217252731323242ms\r\n",,terminal_output +6688,12251283,"TERMINAL",0,0,"Step 280, loss: 3.5407819747924805, step time: 18.570423126220703ms\r\n",,terminal_output +6689,12251349,"TERMINAL",0,0,"Step 281, loss: 3.504209280014038, step time: 18.39303970336914ms\r\n",,terminal_output +6690,12251409,"TERMINAL",0,0,"Step 282, loss: 3.571397542953491, step time: 18.94211769104004ms\r\n",,terminal_output +6691,12251474,"TERMINAL",0,0,"Step 283, loss: 3.49774432182312, step time: 18.280744552612305ms\r\n",,terminal_output +6692,12251571,"TERMINAL",0,0,"Step 284, loss: 3.889955520629883, step time: 18.407106399536133ms\r\n",,terminal_output +6693,12251631,"TERMINAL",0,0,"Step 285, loss: 3.465817928314209, step time: 18.743038177490234ms\r\n",,terminal_output +6694,12251693,"TERMINAL",0,0,"Step 286, loss: 3.526139974594116, step time: 18.840789794921875ms\r\n",,terminal_output +6695,12251745,"TERMINAL",0,0,"Step 287, loss: 3.482206344604492, step time: 18.728256225585938ms\r\n",,terminal_output +6696,12251852,"TERMINAL",0,0,"Step 288, loss: 3.512920379638672, step time: 20.604610443115234ms\r\nStep 289, loss: 3.5560529232025146, step time: 18.665790557861328ms\r\n",,terminal_output +6697,12251951,"TERMINAL",0,0,"Step 290, loss: 3.4611923694610596, step time: 18.462181091308594ms\r\n",,terminal_output +6698,12251997,"TERMINAL",0,0,"Step 291, loss: 3.452996015548706, step time: 18.776416778564453ms\r\n",,terminal_output +6699,12252089,"TERMINAL",0,0,"Step 292, loss: 3.509181261062622, step time: 20.91526985168457ms\r\n",,terminal_output +6700,12252194,"TERMINAL",0,0,"Step 293, loss: 3.552706718444824, step time: 18.84174346923828ms\r\nStep 294, loss: 3.493772029876709, step time: 19.372940063476562ms\r\n",,terminal_output +6701,12252254,"TERMINAL",0,0,"Step 295, loss: 3.4538769721984863, step time: 22.121667861938477ms\r\n",,terminal_output +6702,12252314,"TERMINAL",0,0,"Step 296, loss: 3.5759787559509277, step time: 19.96779441833496ms\r\n",,terminal_output +6703,12252375,"TERMINAL",0,0,"Step 297, loss: 3.4483602046966553, step time: 19.210100173950195ms\r\n",,terminal_output +6704,12252438,"TERMINAL",0,0,"Step 298, loss: 3.4428815841674805, step time: 20.59340476989746ms\r\n",,terminal_output +6705,12252534,"TERMINAL",0,0,"Step 299, loss: 3.425215482711792, step time: 19.30093765258789ms\r\n",,terminal_output +6706,12252597,"TERMINAL",0,0,"Step 300, loss: 3.4301774501800537, step time: 19.200563430786133ms\r\n",,terminal_output +6707,12252660,"TERMINAL",0,0,"Step 301, loss: 3.4646897315979004, step time: 18.686532974243164ms\r\n",,terminal_output +6708,12252703,"scripts_horeka/train_dynamics.sh",913,0,"",shellscript,selection_mouse +6709,12252787,"TERMINAL",0,0,"Step 302, loss: 3.4070348739624023, step time: 18.625497817993164ms\r\nStep 303, loss: 3.442068576812744, step time: 18.92852783203125ms\r\n",,terminal_output +6710,12252826,"TERMINAL",0,0,"Step 304, loss: 3.436889886856079, step time: 18.58830451965332ms\r\n",,terminal_output +6711,12252922,"TERMINAL",0,0,"Step 305, loss: 3.488862991333008, step time: 18.478870391845703ms\r\n",,terminal_output +6712,12252986,"TERMINAL",0,0,"Step 306, loss: 3.417104721069336, step time: 19.20032501220703ms\r\n",,terminal_output +6713,12253048,"TERMINAL",0,0,"Step 307, loss: 3.4767608642578125, step time: 18.890857696533203ms\r\n",,terminal_output +6714,12253109,"TERMINAL",0,0,"Step 308, loss: 3.3802742958068848, step time: 18.506765365600586ms\r\n",,terminal_output +6715,12253169,"TERMINAL",0,0,"Step 309, loss: 3.4652369022369385, step time: 18.75019073486328ms\r\n",,terminal_output +6716,12253229,"TERMINAL",0,0,"Step 310, loss: 3.369961977005005, step time: 18.7685489654541ms\r\n",,terminal_output +6717,12253292,"TERMINAL",0,0,"Step 311, loss: 3.4029524326324463, step time: 19.15287971496582ms\r\n",,terminal_output +6718,12253354,"TERMINAL",0,0,"Step 312, loss: 3.367755651473999, step time: 18.828153610229492ms\r\n",,terminal_output +6719,12253416,"TERMINAL",0,0,"Step 313, loss: 3.4942476749420166, step time: 17.614364624023438ms\r\n",,terminal_output +6720,12253479,"TERMINAL",0,0,"Step 314, loss: 3.5948402881622314, step time: 18.147706985473633ms\r\n",,terminal_output +6721,12253540,"TERMINAL",0,0,"Step 315, loss: 3.440859317779541, step time: 18.656015396118164ms\r\n",,terminal_output +6722,12253658,"TERMINAL",0,0,"Step 316, loss: 3.4198668003082275, step time: 18.02349090576172ms\r\nStep 317, loss: 3.3863162994384766, step time: 18.05710792541504ms\r\n",,terminal_output +6723,12253721,"TERMINAL",0,0,"Step 318, loss: 3.4541306495666504, step time: 18.44048500061035ms\r\n",,terminal_output +6724,12253784,"TERMINAL",0,0,"Step 319, loss: 3.3767342567443848, step time: 17.28653907775879ms\r\n",,terminal_output +6725,12253847,"TERMINAL",0,0,"Step 320, loss: 3.648125171661377, step time: 17.270565032958984ms\r\n",,terminal_output +6726,12253912,"TERMINAL",0,0,"Step 321, loss: 3.415821075439453, step time: 18.412351608276367ms\r\n",,terminal_output +6727,12253976,"TERMINAL",0,0,"Step 322, loss: 3.373850107192993, step time: 17.985105514526367ms\r\n",,terminal_output +6728,12254039,"TERMINAL",0,0,"Step 323, loss: 3.356968641281128, step time: 18.075227737426758ms\r\n",,terminal_output +6729,12254103,"TERMINAL",0,0,"Step 324, loss: 3.360400915145874, step time: 18.560171127319336ms\r\n",,terminal_output +6730,12254167,"TERMINAL",0,0,"Step 325, loss: 3.3596296310424805, step time: 18.013954162597656ms\r\n",,terminal_output +6731,12254231,"TERMINAL",0,0,"Step 326, loss: 3.728973627090454, step time: 18.141746520996094ms\r\n",,terminal_output +6732,12254325,"TERMINAL",0,0,"Step 327, loss: 3.346759796142578, step time: 18.57161521911621ms\r\n",,terminal_output +6733,12254377,"TERMINAL",0,0,"Step 328, loss: 3.3277158737182617, step time: 18.186330795288086ms\r\n",,terminal_output +6734,12254471,"TERMINAL",0,0,"Step 329, loss: 3.4312984943389893, step time: 17.668724060058594ms\r\n",,terminal_output +6735,12254521,"TERMINAL",0,0,"Step 330, loss: 3.539376974105835, step time: 18.219947814941406ms\r\n",,terminal_output +6736,12254572,"TERMINAL",0,0,"Step 331, loss: 3.3344566822052, step time: 17.85135269165039ms\r\n",,terminal_output +6737,12254679,"TERMINAL",0,0,"Step 332, loss: 3.351396322250366, step time: 17.832517623901367ms\r\nStep 333, loss: 3.3551554679870605, step time: 18.5391902923584ms\r\n",,terminal_output +6738,12254742,"TERMINAL",0,0,"Step 334, loss: 3.3536300659179688, step time: 17.925739288330078ms\r\n",,terminal_output +6739,12254806,"TERMINAL",0,0,"Step 335, loss: 3.3769659996032715, step time: 17.9901123046875ms\r\n",,terminal_output +6740,12254868,"TERMINAL",0,0,"Step 336, loss: 3.3206076622009277, step time: 18.059253692626953ms\r\n",,terminal_output +6741,12254932,"TERMINAL",0,0,"Step 337, loss: 3.4676432609558105, step time: 17.817258834838867ms\r\n",,terminal_output +6742,12254994,"TERMINAL",0,0,"Step 338, loss: 3.3243470191955566, step time: 17.740488052368164ms\r\n",,terminal_output +6743,12255061,"TERMINAL",0,0,"Step 339, loss: 3.356837272644043, step time: 21.143674850463867ms\r\n",,terminal_output +6744,12255154,"TERMINAL",0,0,"Step 340, loss: 3.4476349353790283, step time: 18.162965774536133ms\r\n",,terminal_output +6745,12255205,"TERMINAL",0,0,"Step 341, loss: 3.3518078327178955, step time: 17.865419387817383ms\r\n",,terminal_output +6746,12255301,"TERMINAL",0,0,"Step 342, loss: 3.3178017139434814, step time: 17.79794692993164ms\r\n",,terminal_output +6747,12255354,"TERMINAL",0,0,"Step 343, loss: 3.332786798477173, step time: 18.011808395385742ms\r\n",,terminal_output +6748,12255406,"TERMINAL",0,0,"Step 344, loss: 3.319492816925049, step time: 18.073081970214844ms\r\n",,terminal_output +6749,12255514,"TERMINAL",0,0,"Step 345, loss: 3.3729593753814697, step time: 18.673419952392578ms\r\nStep 346, loss: 3.4646618366241455, step time: 18.03445816040039ms\r\n",,terminal_output +6750,12255700,"TERMINAL",0,0,"Step 347, loss: 3.2904350757598877, step time: 18.280506134033203ms\r\nStep 348, loss: 3.3559060096740723, step time: 18.74566078186035ms\r\nStep 349, loss: 3.336353302001953, step time: 18.004894256591797ms\r\n",,terminal_output +6751,12255803,"TERMINAL",0,0,"Step 350, loss: 3.3141019344329834, step time: 17.840862274169922ms\r\n",,terminal_output +6752,12255863,"TERMINAL",0,0,"Step 351, loss: 3.302981376647949, step time: 18.254756927490234ms\r\n",,terminal_output +6753,12255969,"TERMINAL",0,0,"Step 352, loss: 3.3007748126983643, step time: 18.069744110107422ms\r\nStep 353, loss: 3.330817699432373, step time: 18.0966854095459ms\r\n",,terminal_output +6754,12256382,"TERMINAL",0,0,"Step 354, loss: 3.2811827659606934, step time: 337.7065658569336ms\r\nStep 355, loss: 3.463655948638916, step time: 25.124073028564453ms\r\n",,terminal_output +6755,12256445,"TERMINAL",0,0,"Step 356, loss: 3.377342700958252, step time: 21.042585372924805ms\r\n",,terminal_output +6756,12256508,"TERMINAL",0,0,"Step 357, loss: 3.2811567783355713, step time: 18.832921981811523ms\r\n",,terminal_output +6757,12256570,"TERMINAL",0,0,"Step 358, loss: 3.3851022720336914, step time: 17.95196533203125ms\r\n",,terminal_output +6758,12256631,"TERMINAL",0,0,"Step 359, loss: 3.321389675140381, step time: 17.75074005126953ms\r\n",,terminal_output +6759,12256694,"TERMINAL",0,0,"Step 360, loss: 3.3719749450683594, step time: 18.1429386138916ms\r\n",,terminal_output +6760,12256749,"TERMINAL",0,0,"Step 361, loss: 3.2804746627807617, step time: 17.741918563842773ms\r\n",,terminal_output +6761,12256856,"TERMINAL",0,0,"Step 362, loss: 3.2693567276000977, step time: 28.49721908569336ms\r\n",,terminal_output +6762,12256916,"TERMINAL",0,0,"Step 363, loss: 3.2842116355895996, step time: 18.983840942382812ms\r\n",,terminal_output +6763,12256979,"TERMINAL",0,0,"Step 364, loss: 3.2612898349761963, step time: 17.42410659790039ms\r\n",,terminal_output +6764,12257038,"TERMINAL",0,0,"Step 365, loss: 3.2614119052886963, step time: 17.58098602294922ms\r\n",,terminal_output +6765,12257102,"TERMINAL",0,0,"Step 366, loss: 3.2903735637664795, step time: 17.989635467529297ms\r\n",,terminal_output +6766,12257164,"TERMINAL",0,0,"Step 367, loss: 3.30450701713562, step time: 17.70472526550293ms\r\n",,terminal_output +6767,12257225,"TERMINAL",0,0,"Step 368, loss: 3.252113103866577, step time: 17.23313331604004ms\r\n",,terminal_output +6768,12257285,"TERMINAL",0,0,"Step 369, loss: 3.259033441543579, step time: 17.67277717590332ms\r\n",,terminal_output +6769,12257347,"TERMINAL",0,0,"Step 370, loss: 3.6581931114196777, step time: 17.508745193481445ms\r\n",,terminal_output +6770,12257407,"TERMINAL",0,0,"Step 371, loss: 3.2383453845977783, step time: 17.589807510375977ms\r\n",,terminal_output +6771,12257470,"TERMINAL",0,0,"Step 372, loss: 3.3340394496917725, step time: 17.859697341918945ms\r\n",,terminal_output +6772,12257531,"TERMINAL",0,0,"Step 373, loss: 3.3701372146606445, step time: 17.520427703857422ms\r\n",,terminal_output +6773,12257656,"TERMINAL",0,0,"Step 374, loss: 3.5484814643859863, step time: 17.56429672241211ms\r\nStep 375, loss: 3.5907063484191895, step time: 18.079280853271484ms\r\n",,terminal_output +6774,12257721,"TERMINAL",0,0,"Step 376, loss: 3.3712801933288574, step time: 18.25881004333496ms\r\n",,terminal_output +6775,12257778,"TERMINAL",0,0,"Step 377, loss: 3.3053512573242188, step time: 17.73381233215332ms\r\n",,terminal_output +6776,12257875,"TERMINAL",0,0,"Step 378, loss: 3.286297559738159, step time: 17.960071563720703ms\r\n",,terminal_output +6777,12257927,"TERMINAL",0,0,"Step 379, loss: 3.273937940597534, step time: 17.154693603515625ms\r\n",,terminal_output +6778,12258032,"TERMINAL",0,0,"Step 380, loss: 3.2690999507904053, step time: 17.844200134277344ms\r\nStep 381, loss: 3.3985161781311035, step time: 18.09072494506836ms\r\n",,terminal_output +6779,12258093,"TERMINAL",0,0,"Step 382, loss: 3.266808032989502, step time: 17.17090606689453ms\r\n",,terminal_output +6780,12258188,"TERMINAL",0,0,"Step 383, loss: 3.4756929874420166, step time: 17.652273178100586ms\r\n",,terminal_output +6781,12258241,"TERMINAL",0,0,"Step 384, loss: 3.2744333744049072, step time: 18.02825927734375ms\r\n",,terminal_output +6782,12258291,"TERMINAL",0,0,"Step 385, loss: 3.2612380981445312, step time: 18.76211166381836ms\r\n",,terminal_output +6783,12258436,"TERMINAL",0,0,"Step 386, loss: 3.256757974624634, step time: 18.093347549438477ms\r\nStep 387, loss: 3.437058210372925, step time: 18.50271224975586ms\r\n",,terminal_output +6784,12258540,"TERMINAL",0,0,"Step 388, loss: 3.252890110015869, step time: 17.490863800048828ms\r\nStep 389, loss: 3.288691759109497, step time: 17.859697341918945ms\r\n",,terminal_output +6785,12258661,"TERMINAL",0,0,"Step 390, loss: 3.3028173446655273, step time: 18.227338790893555ms\r\nStep 391, loss: 3.213207244873047, step time: 17.644643783569336ms\r\n",,terminal_output +6786,12258724,"TERMINAL",0,0,"Step 392, loss: 3.2382731437683105, step time: 17.097949981689453ms\r\n",,terminal_output +6787,12258786,"TERMINAL",0,0,"Step 393, loss: 3.2399826049804688, step time: 18.027782440185547ms\r\n",,terminal_output +6788,12258886,"TERMINAL",0,0,"Step 394, loss: 3.222968339920044, step time: 17.74907112121582ms\r\n",,terminal_output +6789,12258947,"TERMINAL",0,0,"Step 395, loss: 3.3453609943389893, step time: 18.13483238220215ms\r\n",,terminal_output +6790,12259008,"TERMINAL",0,0,"Step 396, loss: 3.2235519886016846, step time: 18.334627151489258ms\r\n",,terminal_output +6791,12259069,"TERMINAL",0,0,"Step 397, loss: 3.2235870361328125, step time: 17.773866653442383ms\r\n",,terminal_output +6792,12259131,"TERMINAL",0,0,"Step 398, loss: 3.215395450592041, step time: 17.89069175720215ms\r\n",,terminal_output +6793,12259192,"TERMINAL",0,0,"Step 399, loss: 3.2031373977661133, step time: 18.465042114257812ms\r\n",,terminal_output +6794,12259254,"TERMINAL",0,0,"Step 400, loss: 3.196845054626465, step time: 17.383575439453125ms\r\n",,terminal_output +6795,12259316,"TERMINAL",0,0,"Step 401, loss: 3.1847753524780273, step time: 17.559051513671875ms\r\n",,terminal_output +6796,12259380,"TERMINAL",0,0,"Step 402, loss: 3.2079315185546875, step time: 20.896196365356445ms\r\n",,terminal_output +6797,12259441,"TERMINAL",0,0,"Step 403, loss: 3.2365663051605225, step time: 18.071651458740234ms\r\n",,terminal_output +6798,12259553,"TERMINAL",0,0,"Step 404, loss: 3.1928024291992188, step time: 17.530202865600586ms\r\nStep 405, loss: 3.2760848999023438, step time: 18.027544021606445ms\r\n",,terminal_output +6799,12259618,"TERMINAL",0,0,"Step 406, loss: 3.313450336456299, step time: 17.874717712402344ms\r\n",,terminal_output +6800,12259679,"TERMINAL",0,0,"Step 407, loss: 3.2058942317962646, step time: 17.888307571411133ms\r\n",,terminal_output +6801,12259740,"TERMINAL",0,0,"Step 408, loss: 3.2162258625030518, step time: 17.893314361572266ms\r\n",,terminal_output +6802,12259803,"TERMINAL",0,0,"Step 409, loss: 3.211693525314331, step time: 18.003463745117188ms\r\n",,terminal_output +6803,12259914,"TERMINAL",0,0,"Step 410, loss: 3.1852011680603027, step time: 18.956899642944336ms\r\n",,terminal_output +6804,12259975,"TERMINAL",0,0,"Step 411, loss: 3.4410009384155273, step time: 18.873929977416992ms\r\n",,terminal_output +6805,12260035,"TERMINAL",0,0,"Step 412, loss: 3.1831743717193604, step time: 17.61174201965332ms\r\n",,terminal_output +6806,12260096,"TERMINAL",0,0,"Step 413, loss: 3.5752882957458496, step time: 17.8377628326416ms\r\n",,terminal_output +6807,12260208,"TERMINAL",0,0,"Step 414, loss: 3.1764707565307617, step time: 18.10598373413086ms\r\nStep 415, loss: 3.175825595855713, step time: 17.68636703491211ms\r\n",,terminal_output +6808,12260275,"TERMINAL",0,0,"Step 416, loss: 3.1943559646606445, step time: 17.52305030822754ms\r\n",,terminal_output +6809,12260337,"TERMINAL",0,0,"Step 417, loss: 3.35471510887146, step time: 18.073320388793945ms\r\n",,terminal_output +6810,12260402,"TERMINAL",0,0,"Step 418, loss: 3.296203851699829, step time: 17.47417449951172ms\r\n",,terminal_output +6811,12260498,"TERMINAL",0,0,"Step 419, loss: 3.171919345855713, step time: 17.713069915771484ms\r\n",,terminal_output +6812,12260603,"TERMINAL",0,0,"Step 420, loss: 3.1777353286743164, step time: 18.258094787597656ms\r\nStep 421, loss: 3.236844539642334, step time: 18.28312873840332ms\r\n",,terminal_output +6813,12260720,"TERMINAL",0,0,"Step 422, loss: 3.241393566131592, step time: 18.321990966796875ms\r\nStep 423, loss: 3.288106679916382, step time: 18.193960189819336ms\r\n",,terminal_output +6814,12260781,"TERMINAL",0,0,"Step 424, loss: 3.2271299362182617, step time: 18.126726150512695ms\r\n",,terminal_output +6815,12260848,"TERMINAL",0,0,"Step 425, loss: 3.185241222381592, step time: 17.958879470825195ms\r\n",,terminal_output +6816,12260911,"TERMINAL",0,0,"Step 426, loss: 3.27419114112854, step time: 28.565645217895508ms\r\n",,terminal_output +6817,12261005,"TERMINAL",0,0,"Step 427, loss: 3.2017879486083984, step time: 21.469831466674805ms\r\n",,terminal_output +6818,12261059,"TERMINAL",0,0,"Step 428, loss: 3.1768035888671875, step time: 18.259763717651367ms\r\n",,terminal_output +6819,12261156,"TERMINAL",0,0,"Step 429, loss: 3.1634600162506104, step time: 18.33653450012207ms\r\n",,terminal_output +6820,12261229,"TERMINAL",0,0,"Step 430, loss: 3.1438727378845215, step time: 18.413066864013672ms\r\nStep 431, loss: 3.1524293422698975, step time: 17.78888702392578ms\r\n",,terminal_output +6821,12261293,"TERMINAL",0,0,"Step 432, loss: 3.150831460952759, step time: 18.86725425720215ms\r\n",,terminal_output +6822,12261419,"TERMINAL",0,0,"Step 433, loss: 3.191488742828369, step time: 18.042325973510742ms\r\nStep 434, loss: 3.1359474658966064, step time: 17.838716506958008ms\r\n",,terminal_output +6823,12261527,"TERMINAL",0,0,"Step 435, loss: 3.1445977687835693, step time: 18.604516983032227ms\r\n",,terminal_output +6824,12261578,"TERMINAL",0,0,"Step 436, loss: 3.26412296295166, step time: 18.39447021484375ms\r\n",,terminal_output +6825,12261697,"TERMINAL",0,0,"Step 437, loss: 3.1487393379211426, step time: 17.817974090576172ms\r\nStep 438, loss: 3.1482865810394287, step time: 23.57029914855957ms\r\n",,terminal_output +6826,12261750,"TERMINAL",0,0,"Step 439, loss: 3.146181344985962, step time: 23.94413948059082ms\r\n",,terminal_output +6827,12261842,"TERMINAL",0,0,"Step 440, loss: 3.58016300201416, step time: 24.881362915039062ms\r\n",,terminal_output +6828,12261894,"TERMINAL",0,0,"Step 441, loss: 3.122772693634033, step time: 25.4518985748291ms\r\n",,terminal_output +6829,12261987,"TERMINAL",0,0,"Step 442, loss: 3.138759136199951, step time: 25.64859390258789ms\r\n",,terminal_output +6830,12262092,"TERMINAL",0,0,"Step 443, loss: 3.406296730041504, step time: 24.318695068359375ms\r\nStep 444, loss: 3.159327745437622, step time: 25.929927825927734ms\r\n",,terminal_output +6831,12262186,"TERMINAL",0,0,"Step 445, loss: 3.1216824054718018, step time: 26.216745376586914ms\r\n",,terminal_output +6832,12262237,"TERMINAL",0,0,"Step 446, loss: 3.140852928161621, step time: 25.335311889648438ms\r\n",,terminal_output +6833,12262344,"TERMINAL",0,0,"Step 447, loss: 3.1065356731414795, step time: 20.534992218017578ms\r\nStep 448, loss: 3.127049446105957, step time: 18.134117126464844ms\r\n",,terminal_output +6834,12262437,"TERMINAL",0,0,"Step 449, loss: 3.1036417484283447, step time: 20.14303207397461ms\r\n",,terminal_output +6835,12262488,"TERMINAL",0,0,"Step 450, loss: 3.0895698070526123, step time: 19.26898956298828ms\r\n",,terminal_output +6836,12262593,"TERMINAL",0,0,"Step 451, loss: 3.1933610439300537, step time: 17.769575119018555ms\r\nStep 452, loss: 3.0842020511627197, step time: 17.571449279785156ms\r\n",,terminal_output +6837,12262658,"TERMINAL",0,0,"Step 453, loss: 3.112292766571045, step time: 18.222332000732422ms\r\n",,terminal_output +6838,12262720,"TERMINAL",0,0,"Step 454, loss: 3.189051628112793, step time: 17.322063446044922ms\r\n",,terminal_output +6839,12262847,"TERMINAL",0,0,"Step 455, loss: 3.112288475036621, step time: 18.55754852294922ms\r\nStep 456, loss: 3.161353349685669, step time: 18.27549934387207ms\r\n",,terminal_output +6840,12262973,"TERMINAL",0,0,"Step 457, loss: 3.2054033279418945, step time: 18.0056095123291ms\r\nStep 458, loss: 3.114917039871216, step time: 17.64845848083496ms\r\n",,terminal_output +6841,12263037,"TERMINAL",0,0,"Step 459, loss: 3.0859386920928955, step time: 19.08278465270996ms\r\n",,terminal_output +6842,12263134,"TERMINAL",0,0,"Step 460, loss: 3.0715293884277344, step time: 18.451690673828125ms\r\n",,terminal_output +6843,12263185,"TERMINAL",0,0,"Step 461, loss: 3.1340417861938477, step time: 18.350839614868164ms\r\n",,terminal_output +6844,12263292,"TERMINAL",0,0,"Step 462, loss: 3.0900418758392334, step time: 18.955230712890625ms\r\nStep 463, loss: 3.076301336288452, step time: 19.100427627563477ms\r\n",,terminal_output +6845,12263423,"TERMINAL",0,0,"Step 464, loss: 3.0787808895111084, step time: 18.6769962310791ms\r\nStep 465, loss: 3.148822069168091, step time: 19.170045852661133ms\r\n",,terminal_output +6846,12263487,"TERMINAL",0,0,"Step 466, loss: 3.0662643909454346, step time: 18.514156341552734ms\r\n",,terminal_output +6847,12263549,"TERMINAL",0,0,"Step 467, loss: 3.0729784965515137, step time: 18.395185470581055ms\r\n",,terminal_output +6848,12263647,"TERMINAL",0,0,"Step 468, loss: 3.104776382446289, step time: 19.718408584594727ms\r\n",,terminal_output +6849,12263697,"TERMINAL",0,0,"Step 469, loss: 3.11262583732605, step time: 18.270492553710938ms\r\n",,terminal_output +6850,12263749,"TERMINAL",0,0,"Step 470, loss: 3.070051431655884, step time: 18.174171447753906ms\r\n",,terminal_output +6851,12263843,"TERMINAL",0,0,"Step 471, loss: 3.0293986797332764, step time: 19.025087356567383ms\r\n",,terminal_output +6852,12263949,"TERMINAL",0,0,"Step 472, loss: 3.089268684387207, step time: 18.07689666748047ms\r\nStep 473, loss: 3.0556912422180176, step time: 18.17488670349121ms\r\n",,terminal_output +6853,12264060,"TERMINAL",0,0,"Step 474, loss: 3.042151927947998, step time: 18.373489379882812ms\r\nStep 475, loss: 3.298755645751953, step time: 18.36252212524414ms\r\n",,terminal_output +6854,12264128,"TERMINAL",0,0,"Step 476, loss: 3.1154487133026123, step time: 18.05853843688965ms\r\n",,terminal_output +6855,12264190,"TERMINAL",0,0,"Step 477, loss: 3.0620052814483643, step time: 20.627498626708984ms\r\n",,terminal_output +6856,12264254,"TERMINAL",0,0,"Step 478, loss: 3.148881196975708, step time: 17.853975296020508ms\r\n",,terminal_output +6857,12264351,"TERMINAL",0,0,"Step 479, loss: 3.1711878776550293, step time: 17.975568771362305ms\r\n",,terminal_output +6858,12264418,"TERMINAL",0,0,"Step 480, loss: 3.0572116374969482, step time: 18.040895462036133ms\r\n",,terminal_output +6859,12264484,"TERMINAL",0,0,"Step 481, loss: 3.185419797897339, step time: 18.07093620300293ms\r\n",,terminal_output +6860,12264545,"TERMINAL",0,0,"Step 482, loss: 3.110973358154297, step time: 17.640352249145508ms\r\n",,terminal_output +6861,12264608,"TERMINAL",0,0,"Step 483, loss: 3.060779571533203, step time: 18.076419830322266ms\r\n",,terminal_output +6862,12264668,"TERMINAL",0,0,"Step 484, loss: 3.064950942993164, step time: 17.459869384765625ms\r\n",,terminal_output +6863,12264730,"TERMINAL",0,0,"Step 485, loss: 3.1429238319396973, step time: 18.308639526367188ms\r\n",,terminal_output +6864,12265075,"TERMINAL",0,0,"Step 486, loss: 3.0922796726226807, step time: 310.12940406799316ms\r\nStep 487, loss: 3.0529398918151855, step time: 25.559663772583008ms\r\n",,terminal_output +6865,12265173,"TERMINAL",0,0,"Step 488, loss: 3.0570590496063232, step time: 20.290374755859375ms\r\n",,terminal_output +6866,12265224,"TERMINAL",0,0,"Step 489, loss: 3.0746161937713623, step time: 19.026756286621094ms\r\n",,terminal_output +6867,12265317,"TERMINAL",0,0,"Step 490, loss: 3.147012710571289, step time: 18.282413482666016ms\r\n",,terminal_output +6868,12265367,"TERMINAL",0,0,"Step 491, loss: 3.027697801589966, step time: 19.145727157592773ms\r\n",,terminal_output +6869,12265470,"TERMINAL",0,0,"Step 492, loss: 3.028930425643921, step time: 18.640518188476562ms\r\nStep 493, loss: 3.0087859630584717, step time: 17.766237258911133ms\r\n",,terminal_output +6870,12265562,"TERMINAL",0,0,"Step 494, loss: 3.0318009853363037, step time: 18.144607543945312ms\r\n",,terminal_output +6871,12265613,"TERMINAL",0,0,"Step 495, loss: 3.033616542816162, step time: 18.97597312927246ms\r\n",,terminal_output +6872,12265718,"TERMINAL",0,0,"Step 496, loss: 3.0249228477478027, step time: 18.04065704345703ms\r\nStep 497, loss: 3.071646213531494, step time: 17.969608306884766ms\r\n",,terminal_output +6873,12265783,"TERMINAL",0,0,"Step 498, loss: 3.036263942718506, step time: 19.15121078491211ms\r\n",,terminal_output +6874,12265843,"TERMINAL",0,0,"Step 499, loss: 3.0345990657806396, step time: 17.699241638183594ms\r\n",,terminal_output +6875,12269183,"TERMINAL",0,0,"Step 500, loss: 2.9986605644226074, step time: 25.433063507080078ms\r\n",,terminal_output +6876,12269294,"TERMINAL",0,0,"Step 501, loss: 3.028377056121826, step time: 25.497913360595703ms\r\n",,terminal_output +6877,12269347,"TERMINAL",0,0,"Step 502, loss: 3.098773956298828, step time: 21.263837814331055ms\r\n",,terminal_output +6878,12269454,"TERMINAL",0,0,"Step 503, loss: 3.299412250518799, step time: 19.170284271240234ms\r\nStep 504, loss: 3.013336658477783, step time: 19.068479537963867ms\r\n",,terminal_output +6879,12269517,"TERMINAL",0,0,"Step 505, loss: 3.038120746612549, step time: 18.827438354492188ms\r\n",,terminal_output +6880,12269580,"TERMINAL",0,0,"Step 506, loss: 2.98600697517395, step time: 18.75925064086914ms\r\n",,terminal_output +6881,12269644,"TERMINAL",0,0,"Step 507, loss: 3.046041488647461, step time: 18.6154842376709ms\r\n",,terminal_output +6882,12269713,"TERMINAL",0,0,"Step 508, loss: 3.0407369136810303, step time: 17.523765563964844ms\r\n",,terminal_output +6883,12269862,"TERMINAL",0,0,"Step 509, loss: 3.136046886444092, step time: 17.479658126831055ms\r\nStep 510, loss: 3.0162439346313477, step time: 17.422914505004883ms\r\n",,terminal_output +6884,12269934,"TERMINAL",0,0,"Step 511, loss: 3.0154497623443604, step time: 17.398595809936523ms\r\n",,terminal_output +6885,12269987,"TERMINAL",0,0,"Step 512, loss: 2.976616621017456, step time: 17.580032348632812ms\r\n",,terminal_output +6886,12270095,"TERMINAL",0,0,"Step 513, loss: 2.973191738128662, step time: 17.27914810180664ms\r\nStep 514, loss: 2.9911842346191406, step time: 20.382165908813477ms\r\n",,terminal_output +6887,12270189,"TERMINAL",0,0,"Step 515, loss: 3.2098731994628906, step time: 17.379045486450195ms\r\n",,terminal_output +6888,12270241,"TERMINAL",0,0,"Step 516, loss: 2.9793436527252197, step time: 17.379045486450195ms\r\n",,terminal_output +6889,12270347,"TERMINAL",0,0,"Step 517, loss: 3.3094584941864014, step time: 17.53401756286621ms\r\nStep 518, loss: 3.0397274494171143, step time: 17.158985137939453ms\r\n",,terminal_output +6890,12270447,"TERMINAL",0,0,"Step 519, loss: 2.967381238937378, step time: 17.327547073364258ms\r\n",,terminal_output +6891,12270552,"TERMINAL",0,0,"Step 520, loss: 3.014528751373291, step time: 17.194747924804688ms\r\nStep 521, loss: 2.9791183471679688, step time: 16.83211326599121ms\r\n",,terminal_output +6892,12270617,"TERMINAL",0,0,"Step 522, loss: 2.9877264499664307, step time: 17.207860946655273ms\r\n",,terminal_output +6893,12270674,"TERMINAL",0,0,"Step 523, loss: 2.9884214401245117, step time: 17.15373992919922ms\r\n",,terminal_output +6894,12270781,"TERMINAL",0,0,"Step 524, loss: 3.0101232528686523, step time: 17.051219940185547ms\r\nStep 525, loss: 3.1280570030212402, step time: 17.116785049438477ms\r\n",,terminal_output +6895,12270849,"TERMINAL",0,0,"Step 526, loss: 3.1464226245880127, step time: 17.226457595825195ms\r\n",,terminal_output +6896,12270909,"TERMINAL",0,0,"Step 527, loss: 2.9844024181365967, step time: 17.219066619873047ms\r\n",,terminal_output +6897,12270969,"TERMINAL",0,0,"Step 528, loss: 2.9696829319000244, step time: 17.119646072387695ms\r\n",,terminal_output +6898,12271032,"TERMINAL",0,0,"Step 529, loss: 2.9928879737854004, step time: 17.485380172729492ms\r\n",,terminal_output +6899,12271097,"TERMINAL",0,0,"Step 530, loss: 2.986985921859741, step time: 17.337799072265625ms\r\n",,terminal_output +6900,12271192,"TERMINAL",0,0,"Step 531, loss: 2.9829349517822266, step time: 16.9370174407959ms\r\n",,terminal_output +6901,12271245,"TERMINAL",0,0,"Step 532, loss: 3.0166027545928955, step time: 17.10200309753418ms\r\n",,terminal_output +6902,12271351,"TERMINAL",0,0,"Step 533, loss: 2.964179039001465, step time: 17.208337783813477ms\r\nStep 534, loss: 2.9395203590393066, step time: 17.109155654907227ms\r\n",,terminal_output +6903,12271445,"TERMINAL",0,0,"Step 535, loss: 3.0878443717956543, step time: 17.566919326782227ms\r\n",,terminal_output +6904,12271497,"TERMINAL",0,0,"Step 536, loss: 2.9492671489715576, step time: 17.221689224243164ms\r\n",,terminal_output +6905,12271603,"TERMINAL",0,0,"Step 537, loss: 2.957651376724243, step time: 17.205238342285156ms\r\nStep 538, loss: 2.9557924270629883, step time: 17.201662063598633ms\r\n",,terminal_output +6906,12271665,"TERMINAL",0,0,"Step 539, loss: 2.950427293777466, step time: 17.107725143432617ms\r\n",,terminal_output +6907,12271732,"TERMINAL",0,0,"Step 540, loss: 2.9402236938476562, step time: 17.190217971801758ms\r\n",,terminal_output +6908,12271853,"TERMINAL",0,0,"Step 541, loss: 2.9675168991088867, step time: 17.328739166259766ms\r\nStep 542, loss: 2.9378578662872314, step time: 17.028093338012695ms\r\n",,terminal_output +6909,12271955,"TERMINAL",0,0,"Step 543, loss: 2.9613568782806396, step time: 17.33255386352539ms\r\n",,terminal_output +6910,12272010,"TERMINAL",0,0,"Step 544, loss: 2.936619520187378, step time: 17.328262329101562ms\r\n",,terminal_output +6911,12272116,"TERMINAL",0,0,"Step 545, loss: 2.9290311336517334, step time: 17.184972763061523ms\r\nStep 546, loss: 2.9320762157440186, step time: 17.318010330200195ms\r\n",,terminal_output +6912,12272178,"TERMINAL",0,0,"Step 547, loss: 3.138371706008911, step time: 18.201828002929688ms\r\n",,terminal_output +6913,12272240,"TERMINAL",0,0,"Step 548, loss: 2.9305927753448486, step time: 17.354488372802734ms\r\n",,terminal_output +6914,12272303,"TERMINAL",0,0,"Step 549, loss: 3.0113375186920166, step time: 16.93129539489746ms\r\n",,terminal_output +6915,12272366,"TERMINAL",0,0,"Step 550, loss: 3.11002779006958, step time: 17.21358299255371ms\r\n",,terminal_output +6916,12272430,"TERMINAL",0,0,"Step 551, loss: 2.911191940307617, step time: 17.138242721557617ms\r\n",,terminal_output +6917,12272493,"TERMINAL",0,0,"Step 552, loss: 2.9688680171966553, step time: 17.32349395751953ms\r\n",,terminal_output +6918,12272816,"TERMINAL",0,0,"Step 553, loss: 2.9054689407348633, step time: 326.76243782043457ms\r\n",,terminal_output +6919,12272883,"TERMINAL",0,0,"Step 554, loss: 3.0413553714752197, step time: 24.40810203552246ms\r\n",,terminal_output +6920,12272950,"TERMINAL",0,0,"Step 555, loss: 2.9008960723876953, step time: 19.39868927001953ms\r\n",,terminal_output +6921,12273013,"TERMINAL",0,0,"Step 556, loss: 2.9501802921295166, step time: 17.93670654296875ms\r\n",,terminal_output +6922,12273076,"TERMINAL",0,0,"Step 557, loss: 2.929795742034912, step time: 17.426013946533203ms\r\n",,terminal_output +6923,12273147,"TERMINAL",0,0,"Step 558, loss: 2.941316843032837, step time: 17.7156925201416ms\r\n",,terminal_output +6924,12273213,"TERMINAL",0,0,"Step 559, loss: 2.948000431060791, step time: 18.451929092407227ms\r\n",,terminal_output +6925,12273302,"TERMINAL",0,0,"Step 560, loss: 2.890090227127075, step time: 17.473936080932617ms\r\n",,terminal_output +6926,12273409,"TERMINAL",0,0,"Step 561, loss: 2.9781599044799805, step time: 17.590045928955078ms\r\nStep 562, loss: 2.9079508781433105, step time: 18.326520919799805ms\r\n",,terminal_output +6927,12273518,"TERMINAL",0,0,"Step 563, loss: 2.9205219745635986, step time: 17.472505569458008ms\r\nStep 564, loss: 2.925220251083374, step time: 17.802953720092773ms\r\n",,terminal_output +6928,12273651,"TERMINAL",0,0,"Step 565, loss: 2.899961233139038, step time: 17.65131950378418ms\r\nStep 566, loss: 2.895134687423706, step time: 17.302989959716797ms\r\n",,terminal_output +6929,12273723,"TERMINAL",0,0,"Step 567, loss: 2.880694627761841, step time: 17.343759536743164ms\r\n",,terminal_output +6930,12273781,"TERMINAL",0,0,"Step 568, loss: 2.921663284301758, step time: 17.60721206665039ms\r\n",,terminal_output +6931,12273851,"TERMINAL",0,0,"Step 569, loss: 2.8964242935180664, step time: 17.0896053314209ms\r\n",,terminal_output +6932,12273961,"TERMINAL",0,0,"Step 570, loss: 2.9331884384155273, step time: 17.342567443847656ms\r\nStep 571, loss: 2.8985934257507324, step time: 17.345905303955078ms\r\n",,terminal_output +6933,12274026,"TERMINAL",0,0,"Step 572, loss: 2.885720729827881, step time: 17.055749893188477ms\r\n",,terminal_output +6934,12274089,"TERMINAL",0,0,"Step 573, loss: 2.9928457736968994, step time: 16.92032814025879ms\r\n",,terminal_output +6935,12274155,"TERMINAL",0,0,"Step 574, loss: 2.877833843231201, step time: 17.182350158691406ms\r\n",,terminal_output +6936,12274215,"TERMINAL",0,0,"Step 575, loss: 2.865748405456543, step time: 17.099618911743164ms\r\n",,terminal_output +6937,12274312,"TERMINAL",0,0,"Step 576, loss: 3.1725893020629883, step time: 17.086029052734375ms\r\n",,terminal_output +6938,12274371,"TERMINAL",0,0,"Step 577, loss: 2.879155158996582, step time: 17.58098602294922ms\r\n",,terminal_output +6939,12274477,"TERMINAL",0,0,"Step 578, loss: 2.9551401138305664, step time: 17.08054542541504ms\r\nStep 579, loss: 2.86299467086792, step time: 17.262697219848633ms\r\n",,terminal_output +6940,12274540,"TERMINAL",0,0,"Step 580, loss: 2.856186628341675, step time: 17.409801483154297ms\r\n",,terminal_output +6941,12274606,"TERMINAL",0,0,"Step 581, loss: 3.0163040161132812, step time: 17.515897750854492ms\r\n",,terminal_output +6942,12274669,"TERMINAL",0,0,"Step 582, loss: 2.8761701583862305, step time: 17.46511459350586ms\r\n",,terminal_output +6943,12274732,"TERMINAL",0,0,"Step 583, loss: 2.870033025741577, step time: 17.64059066772461ms\r\n",,terminal_output +6944,12274786,"TERMINAL",0,0,"Step 584, loss: 2.880959987640381, step time: 17.503976821899414ms\r\n",,terminal_output +6945,12274882,"TERMINAL",0,0,"Step 585, loss: 2.9847609996795654, step time: 17.245769500732422ms\r\n",,terminal_output +6946,12274935,"TERMINAL",0,0,"Step 586, loss: 2.8802740573883057, step time: 17.529964447021484ms\r\n",,terminal_output +6947,12275041,"TERMINAL",0,0,"Step 587, loss: 2.8706459999084473, step time: 17.41313934326172ms\r\nStep 588, loss: 2.866168260574341, step time: 18.96071434020996ms\r\n",,terminal_output +6948,12275107,"TERMINAL",0,0,"Step 589, loss: 2.9526445865631104, step time: 18.781661987304688ms\r\n",,terminal_output +6949,12275168,"TERMINAL",0,0,"Step 590, loss: 2.8907570838928223, step time: 17.128944396972656ms\r\n",,terminal_output +6950,12275235,"TERMINAL",0,0,"Step 591, loss: 2.8488609790802, step time: 17.360448837280273ms\r\n",,terminal_output +6951,12275294,"TERMINAL",0,0,"Step 592, loss: 2.8595516681671143, step time: 17.405033111572266ms\r\n",,terminal_output +6952,12275364,"TERMINAL",0,0,"Step 593, loss: 2.906439781188965, step time: 17.16303825378418ms\r\n",,terminal_output +6953,12275429,"TERMINAL",0,0,"Step 594, loss: 3.0390448570251465, step time: 17.150163650512695ms\r\n",,terminal_output +6954,12275545,"TERMINAL",0,0,"Step 595, loss: 2.8777270317077637, step time: 17.198801040649414ms\r\nStep 596, loss: 2.9120981693267822, step time: 17.209529876708984ms\r\n",,terminal_output +6955,12275610,"TERMINAL",0,0,"Step 597, loss: 2.847552537918091, step time: 17.03357696533203ms\r\n",,terminal_output +6956,12275672,"TERMINAL",0,0,"Step 598, loss: 2.9553346633911133, step time: 17.084598541259766ms\r\n",,terminal_output +6957,12275739,"TERMINAL",0,0,"Step 599, loss: 2.9677300453186035, step time: 17.183780670166016ms\r\n",,terminal_output +6958,12275839,"TERMINAL",0,0,"Step 600, loss: 2.844635486602783, step time: 17.103910446166992ms\r\n",,terminal_output +6959,12275901,"TERMINAL",0,0,"Step 601, loss: 2.864506721496582, step time: 17.63439178466797ms\r\n",,terminal_output +6960,12275965,"TERMINAL",0,0,"Step 602, loss: 2.8669557571411133, step time: 17.184734344482422ms\r\n",,terminal_output +6961,12276078,"TERMINAL",0,0,"Step 603, loss: 2.95902156829834, step time: 17.315149307250977ms\r\nStep 604, loss: 2.8267905712127686, step time: 17.4252986907959ms\r\n",,terminal_output +6962,12276186,"TERMINAL",0,0,"Step 605, loss: 2.8156445026397705, step time: 16.83950424194336ms\r\nStep 606, loss: 2.832456111907959, step time: 17.092227935791016ms\r\n",,terminal_output +6963,12276247,"TERMINAL",0,0,"Step 607, loss: 2.8826258182525635, step time: 17.734050750732422ms\r\n",,terminal_output +6964,12276315,"TERMINAL",0,0,"Step 608, loss: 2.865459680557251, step time: 17.197370529174805ms\r\n",,terminal_output +6965,12276387,"TERMINAL",0,0,"Step 609, loss: 2.9904122352600098, step time: 16.926288604736328ms\r\n",,terminal_output +6966,12276449,"TERMINAL",0,0,"Step 610, loss: 2.8365345001220703, step time: 17.377614974975586ms\r\n",,terminal_output +6967,12276555,"TERMINAL",0,0,"Step 611, loss: 2.8194918632507324, step time: 17.17996597290039ms\r\nStep 612, loss: 2.808399200439453, step time: 17.29297637939453ms\r\n",,terminal_output +6968,12276651,"TERMINAL",0,0,"Step 613, loss: 2.869631290435791, step time: 17.483234405517578ms\r\n",,terminal_output +6969,12276704,"TERMINAL",0,0,"Step 614, loss: 2.8315062522888184, step time: 17.16136932373047ms\r\n",,terminal_output +6970,12276761,"TERMINAL",0,0,"Step 615, loss: 2.9400951862335205, step time: 17.348289489746094ms\r\n",,terminal_output +6971,12276877,"TERMINAL",0,0,"Step 616, loss: 2.8145456314086914, step time: 17.897844314575195ms\r\nStep 617, loss: 2.8077049255371094, step time: 17.984628677368164ms\r\n",,terminal_output +6972,12276969,"TERMINAL",0,0,"Step 618, loss: 2.8044979572296143, step time: 17.561912536621094ms\r\n",,terminal_output +6973,12277021,"TERMINAL",0,0,"Step 619, loss: 2.804281711578369, step time: 29.021739959716797ms\r\n",,terminal_output +6974,12277114,"TERMINAL",0,0,"Step 620, loss: 2.8302667140960693, step time: 18.904685974121094ms\r\n",,terminal_output +6975,12277169,"TERMINAL",0,0,"Step 621, loss: 2.8361871242523193, step time: 17.66180992126465ms\r\n",,terminal_output +6976,12277276,"TERMINAL",0,0,"Step 622, loss: 2.813973903656006, step time: 17.43173599243164ms\r\nStep 623, loss: 3.4771220684051514, step time: 17.37833023071289ms\r\n",,terminal_output +6977,12277338,"TERMINAL",0,0,"Step 624, loss: 2.859257459640503, step time: 17.365694046020508ms\r\n",,terminal_output +6978,12277400,"TERMINAL",0,0,"Step 625, loss: 2.862818479537964, step time: 17.48180389404297ms\r\n",,terminal_output +6979,12277528,"TERMINAL",0,0,"Step 626, loss: 2.812676191329956, step time: 16.960859298706055ms\r\nStep 627, loss: 2.839157819747925, step time: 16.973257064819336ms\r\n",,terminal_output +6980,12277591,"TERMINAL",0,0,"Step 628, loss: 2.7844390869140625, step time: 17.352581024169922ms\r\n",,terminal_output +6981,12277654,"TERMINAL",0,0,"Step 629, loss: 2.927062511444092, step time: 17.32182502746582ms\r\n",,terminal_output +6982,12277715,"TERMINAL",0,0,"Step 630, loss: 2.8011693954467773, step time: 17.274141311645508ms\r\n",,terminal_output +6983,12277779,"TERMINAL",0,0,"Step 631, loss: 2.8265910148620605, step time: 17.577409744262695ms\r\n",,terminal_output +6984,12277845,"TERMINAL",0,0,"Step 632, loss: 2.7940478324890137, step time: 17.2421932220459ms\r\n",,terminal_output +6985,12277953,"TERMINAL",0,0,"Step 633, loss: 2.8947274684906006, step time: 17.246246337890625ms\r\nStep 634, loss: 2.770004987716675, step time: 17.487764358520508ms\r\n",,terminal_output +6986,12278022,"TERMINAL",0,0,"Step 635, loss: 2.7516403198242188, step time: 17.301559448242188ms\r\n",,terminal_output +6987,12278117,"TERMINAL",0,0,"Step 636, loss: 2.7848622798919678, step time: 17.48824119567871ms\r\n",,terminal_output +6988,12278226,"TERMINAL",0,0,"Step 637, loss: 2.7848570346832275, step time: 17.513275146484375ms\r\nStep 638, loss: 2.8255035877227783, step time: 17.171621322631836ms\r\n",,terminal_output +6989,12278287,"TERMINAL",0,0,"Step 639, loss: 2.769381523132324, step time: 17.67873764038086ms\r\n",,terminal_output +6990,12278351,"TERMINAL",0,0,"Step 640, loss: 2.8587005138397217, step time: 17.60578155517578ms\r\n",,terminal_output +6991,12278415,"TERMINAL",0,0,"Step 641, loss: 2.8115291595458984, step time: 18.570423126220703ms\r\n",,terminal_output +6992,12278478,"TERMINAL",0,0,"Step 642, loss: 2.848722219467163, step time: 17.63010025024414ms\r\n",,terminal_output +6993,12278598,"TERMINAL",0,0,"Step 643, loss: 2.7499306201934814, step time: 17.870187759399414ms\r\nStep 644, loss: 2.8332371711730957, step time: 17.597198486328125ms\r\n",,terminal_output +6994,12278662,"TERMINAL",0,0,"Step 645, loss: 3.2470967769622803, step time: 17.319440841674805ms\r\n",,terminal_output +6995,12278715,"TERMINAL",0,0,"Step 646, loss: 2.818451404571533, step time: 17.632007598876953ms\r\n",,terminal_output +6996,12278809,"TERMINAL",0,0,"Step 647, loss: 2.964045763015747, step time: 17.12822914123535ms\r\n",,terminal_output +6997,12278914,"TERMINAL",0,0,"Step 648, loss: 2.7783799171447754, step time: 17.154216766357422ms\r\nStep 649, loss: 2.770526647567749, step time: 17.73667335510254ms\r\n",,terminal_output +6998,12279006,"TERMINAL",0,0,"Step 650, loss: 2.761198043823242, step time: 23.14019203186035ms\r\n",,terminal_output +6999,12279057,"TERMINAL",0,0,"Step 651, loss: 2.7654342651367188, step time: 19.708871841430664ms\r\n",,terminal_output +7000,12279166,"TERMINAL",0,0,"Step 652, loss: 2.7500674724578857, step time: 17.4560546875ms\r\nStep 653, loss: 2.832023859024048, step time: 17.283201217651367ms\r\n",,terminal_output +7001,12279227,"TERMINAL",0,0,"Step 654, loss: 2.7659785747528076, step time: 17.607688903808594ms\r\n",,terminal_output +7002,12279290,"TERMINAL",0,0,"Step 655, loss: 2.7467098236083984, step time: 17.708778381347656ms\r\n",,terminal_output +7003,12279354,"TERMINAL",0,0,"Step 656, loss: 2.983271360397339, step time: 17.5168514251709ms\r\n",,terminal_output +7004,12279417,"TERMINAL",0,0,"Step 657, loss: 2.776688814163208, step time: 17.220258712768555ms\r\n",,terminal_output +7005,12279481,"TERMINAL",0,0,"Step 658, loss: 2.7624266147613525, step time: 17.573118209838867ms\r\n",,terminal_output +7006,12279542,"TERMINAL",0,0,"Step 659, loss: 2.776120185852051, step time: 17.12203025817871ms\r\n",,terminal_output +7007,12279636,"TERMINAL",0,0,"Step 660, loss: 2.8263843059539795, step time: 17.116069793701172ms\r\n",,terminal_output +7008,12279688,"TERMINAL",0,0,"Step 661, loss: 2.756460189819336, step time: 17.63439178466797ms\r\n",,terminal_output +7009,12279789,"TERMINAL",0,0,"Step 662, loss: 2.7777743339538574, step time: 17.323017120361328ms\r\nStep 663, loss: 2.751502275466919, step time: 17.221689224243164ms\r\n",,terminal_output +7010,12279882,"TERMINAL",0,0,"Step 664, loss: 2.7415196895599365, step time: 17.180681228637695ms\r\n",,terminal_output +7011,12279987,"TERMINAL",0,0,"Step 665, loss: 2.741229295730591, step time: 16.90506935119629ms\r\nStep 666, loss: 2.742516040802002, step time: 17.32349395751953ms\r\n",,terminal_output +7012,12280079,"TERMINAL",0,0,"Step 667, loss: 2.7167301177978516, step time: 17.76885986328125ms\r\n",,terminal_output +7013,12280131,"TERMINAL",0,0,"Step 668, loss: 3.012868881225586, step time: 17.198562622070312ms\r\n",,terminal_output +7014,12280182,"TERMINAL",0,0,"Step 669, loss: 2.760575294494629, step time: 17.204761505126953ms\r\n",,terminal_output +7015,12280295,"TERMINAL",0,0,"Step 670, loss: 3.098511219024658, step time: 17.46344566345215ms\r\nStep 671, loss: 3.249662160873413, step time: 17.05145835876465ms\r\n",,terminal_output +7016,12280356,"TERMINAL",0,0,"Step 672, loss: 2.7511892318725586, step time: 17.113685607910156ms\r\n",,terminal_output +7017,12280419,"TERMINAL",0,0,"Step 673, loss: 2.9426369667053223, step time: 17.54927635192871ms\r\n",,terminal_output +7018,12280487,"TERMINAL",0,0,"Step 674, loss: 2.7448389530181885, step time: 17.25602149963379ms\r\n",,terminal_output +7019,12280548,"TERMINAL",0,0,"Step 675, loss: 2.8604555130004883, step time: 17.150163650512695ms\r\n",,terminal_output +7020,12280609,"TERMINAL",0,0,"Step 676, loss: 2.955537796020508, step time: 17.187118530273438ms\r\n",,terminal_output +7021,12280734,"TERMINAL",0,0,"Step 677, loss: 2.7462031841278076, step time: 17.117977142333984ms\r\nStep 678, loss: 2.724808692932129, step time: 17.173051834106445ms\r\n",,terminal_output +7022,12280797,"TERMINAL",0,0,"Step 679, loss: 2.841205358505249, step time: 17.2274112701416ms\r\n",,terminal_output +7023,12280863,"TERMINAL",0,0,"Step 680, loss: 2.8155603408813477, step time: 17.00139045715332ms\r\n",,terminal_output +7024,12280927,"TERMINAL",0,0,"Step 681, loss: 2.789311408996582, step time: 17.144203186035156ms\r\n",,terminal_output +7025,12280993,"TERMINAL",0,0,"Step 682, loss: 2.7268264293670654, step time: 17.423391342163086ms\r\n",,terminal_output +7026,12281058,"TERMINAL",0,0,"Step 683, loss: 2.7308032512664795, step time: 18.461942672729492ms\r\n",,terminal_output +7027,12281118,"TERMINAL",0,0,"Step 684, loss: 2.7408690452575684, step time: 21.77572250366211ms\r\n",,terminal_output +7028,12281182,"TERMINAL",0,0,"Step 685, loss: 2.7633256912231445, step time: 17.986774444580078ms\r\n",,terminal_output +7029,12281245,"TERMINAL",0,0,"Step 686, loss: 2.7144694328308105, step time: 17.6389217376709ms\r\n",,terminal_output +7030,12281308,"TERMINAL",0,0,"Step 687, loss: 2.7095205783843994, step time: 17.305374145507812ms\r\n",,terminal_output +7031,12281371,"TERMINAL",0,0,"Step 688, loss: 2.709765911102295, step time: 17.314434051513672ms\r\n",,terminal_output +7032,12281431,"TERMINAL",0,0,"Step 689, loss: 2.7115702629089355, step time: 17.071008682250977ms\r\n",,terminal_output +7033,12281496,"TERMINAL",0,0,"Step 690, loss: 2.726181745529175, step time: 17.292022705078125ms\r\n",,terminal_output +7034,12281558,"TERMINAL",0,0,"Step 691, loss: 2.7109851837158203, step time: 17.360687255859375ms\r\n",,terminal_output +7035,12281621,"TERMINAL",0,0,"Step 692, loss: 2.7154057025909424, step time: 17.32349395751953ms\r\n",,terminal_output +7036,12281692,"TERMINAL",0,0,"Step 693, loss: 2.7768452167510986, step time: 16.937255859375ms\r\n",,terminal_output +7037,12281747,"TERMINAL",0,0,"Step 694, loss: 2.739307403564453, step time: 17.34614372253418ms\r\n",,terminal_output +7038,12281810,"TERMINAL",0,0,"Step 695, loss: 2.6954474449157715, step time: 17.107725143432617ms\r\n",,terminal_output +7039,12282023,"TERMINAL",0,0,"Step 696, loss: 2.690643548965454, step time: 17.215251922607422ms\r\n",,terminal_output +7040,12282082,"TERMINAL",0,0,"Step 697, loss: 2.7062923908233643, step time: 17.529726028442383ms\r\nStep 698, loss: 2.692762851715088, step time: 17.285585403442383ms\r\nStep 699, loss: 2.7353603839874268, step time: 17.173051834106445ms\r\n",,terminal_output +7041,12282143,"TERMINAL",0,0,"Step 700, loss: 2.6787195205688477, step time: 17.46225357055664ms\r\n",,terminal_output +7042,12282205,"TERMINAL",0,0,"Step 701, loss: 2.757991075515747, step time: 16.954421997070312ms\r\n",,terminal_output +7043,12282265,"TERMINAL",0,0,"Step 702, loss: 2.745208501815796, step time: 16.9680118560791ms\r\n",,terminal_output +7044,12282327,"TERMINAL",0,0,"Step 703, loss: 2.7014262676239014, step time: 17.419099807739258ms\r\n",,terminal_output +7045,12282388,"TERMINAL",0,0,"Step 704, loss: 2.6792519092559814, step time: 17.23313331604004ms\r\n",,terminal_output +7046,12282451,"TERMINAL",0,0,"Step 705, loss: 2.6941661834716797, step time: 16.83950424194336ms\r\n",,terminal_output +7047,12282512,"TERMINAL",0,0,"Step 706, loss: 2.6465559005737305, step time: 17.338275909423828ms\r\n",,terminal_output +7048,12282575,"TERMINAL",0,0,"Step 707, loss: 2.7024340629577637, step time: 16.99209213256836ms\r\n",,terminal_output +7049,12282639,"TERMINAL",0,0,"Step 708, loss: 2.687591791152954, step time: 17.1663761138916ms\r\n",,terminal_output +7050,12282709,"TERMINAL",0,0,"Step 709, loss: 2.6933281421661377, step time: 17.48180389404297ms\r\n",,terminal_output +7051,12282762,"TERMINAL",0,0,"Step 710, loss: 2.6721959114074707, step time: 17.293930053710938ms\r\n",,terminal_output +7052,12282859,"TERMINAL",0,0,"Step 711, loss: 2.6763031482696533, step time: 17.34447479248047ms\r\n",,terminal_output +7053,12283211,"TERMINAL",0,0,"Step 712, loss: 3.0594992637634277, step time: 313.0671977996826ms\r\nStep 713, loss: 2.660386562347412, step time: 24.837970733642578ms\r\n",,terminal_output +7054,12283270,"TERMINAL",0,0,"Step 714, loss: 2.8074467182159424, step time: 19.96922492980957ms\r\n",,terminal_output +7055,12283334,"TERMINAL",0,0,"Step 715, loss: 2.710689067840576, step time: 18.703937530517578ms\r\n",,terminal_output +7056,12283429,"TERMINAL",0,0,"Step 716, loss: 2.672635316848755, step time: 18.419742584228516ms\r\n",,terminal_output +7057,12283481,"TERMINAL",0,0,"Step 717, loss: 2.671063184738159, step time: 17.365455627441406ms\r\n",,terminal_output +7058,12283597,"TERMINAL",0,0,"Step 718, loss: 2.6973700523376465, step time: 17.557144165039062ms\r\nStep 719, loss: 2.717689037322998, step time: 17.404556274414062ms\r\n",,terminal_output +7059,12283648,"TERMINAL",0,0,"Step 720, loss: 2.7790699005126953, step time: 17.665863037109375ms\r\n",,terminal_output +7060,12283724,"TERMINAL",0,0,"Step 721, loss: 2.6559972763061523, step time: 18.058061599731445ms\r\n",,terminal_output +7061,12283775,"TERMINAL",0,0,"Step 722, loss: 2.682507276535034, step time: 17.470121383666992ms\r\n",,terminal_output +7062,12283871,"TERMINAL",0,0,"Step 723, loss: 2.6577413082122803, step time: 17.19522476196289ms\r\n",,terminal_output +7063,12283922,"TERMINAL",0,0,"Step 724, loss: 2.6802096366882324, step time: 17.693042755126953ms\r\n",,terminal_output +7064,12284017,"TERMINAL",0,0,"Step 725, loss: 2.835843801498413, step time: 17.116785049438477ms\r\n",,terminal_output +7065,12284069,"TERMINAL",0,0,"Step 726, loss: 2.740819215774536, step time: 17.30966567993164ms\r\n",,terminal_output +7066,12284123,"TERMINAL",0,0,"Step 727, loss: 2.672718048095703, step time: 17.796993255615234ms\r\n",,terminal_output +7067,12284230,"TERMINAL",0,0,"Step 728, loss: 2.6675097942352295, step time: 17.032861709594727ms\r\nStep 729, loss: 2.6532974243164062, step time: 17.244577407836914ms\r\n",,terminal_output +7068,12284293,"TERMINAL",0,0,"Step 730, loss: 2.6753273010253906, step time: 17.575502395629883ms\r\n",,terminal_output +7069,12284356,"TERMINAL",0,0,"Step 731, loss: 2.6353795528411865, step time: 16.897916793823242ms\r\n",,terminal_output +7070,12284420,"TERMINAL",0,0,"Step 732, loss: 2.81351900100708, step time: 17.30489730834961ms\r\n",,terminal_output +7071,12284484,"TERMINAL",0,0,"Step 733, loss: 2.687188148498535, step time: 17.334699630737305ms\r\n",,terminal_output +7072,12284548,"TERMINAL",0,0,"Step 734, loss: 2.707404851913452, step time: 16.953229904174805ms\r\n",,terminal_output +7073,12284662,"TERMINAL",0,0,"Step 735, loss: 2.639500141143799, step time: 18.03112030029297ms\r\nStep 736, loss: 2.63496470451355, step time: 17.95673370361328ms\r\n",,terminal_output +7074,12284727,"TERMINAL",0,0,"Step 737, loss: 2.6487085819244385, step time: 17.2119140625ms\r\n",,terminal_output +7075,12284829,"TERMINAL",0,0,"Step 738, loss: 2.7002761363983154, step time: 17.445802688598633ms\r\n",,terminal_output +7076,12284889,"TERMINAL",0,0,"Step 739, loss: 2.6390469074249268, step time: 17.960548400878906ms\r\n",,terminal_output +7077,12284951,"TERMINAL",0,0,"Step 740, loss: 2.624938726425171, step time: 17.244815826416016ms\r\n",,terminal_output +7078,12285012,"TERMINAL",0,0,"Step 741, loss: 2.624005079269409, step time: 18.158674240112305ms\r\n",,terminal_output +7079,12285117,"TERMINAL",0,0,"Step 742, loss: 2.6241393089294434, step time: 17.680644989013672ms\r\nStep 743, loss: 2.637296438217163, step time: 17.419815063476562ms\r\n",,terminal_output +7080,12285230,"TERMINAL",0,0,"Step 744, loss: 2.602811098098755, step time: 17.058849334716797ms\r\nStep 745, loss: 2.619842767715454, step time: 17.512083053588867ms\r\n",,terminal_output +7081,12285295,"TERMINAL",0,0,"Step 746, loss: 2.62282133102417, step time: 20.219802856445312ms\r\n",,terminal_output +7082,12285357,"TERMINAL",0,0,"Step 747, loss: 2.7830870151519775, step time: 17.13728904724121ms\r\n",,terminal_output +7083,12285421,"TERMINAL",0,0,"Step 748, loss: 2.6910901069641113, step time: 17.22240447998047ms\r\n",,terminal_output +7084,12285526,"TERMINAL",0,0,"Step 749, loss: 2.597715377807617, step time: 17.21668243408203ms\r\n",,terminal_output +7085,12285579,"TERMINAL",0,0,"Step 750, loss: 2.6135828495025635, step time: 17.151355743408203ms\r\n",,terminal_output +7086,12285682,"TERMINAL",0,0,"Step 751, loss: 2.614192247390747, step time: 17.6694393157959ms\r\nStep 752, loss: 2.6319077014923096, step time: 17.179250717163086ms\r\n",,terminal_output +7087,12285751,"TERMINAL",0,0,"Step 753, loss: 2.6111085414886475, step time: 18.17464828491211ms\r\n",,terminal_output +7088,12285799,"TERMINAL",0,0,"Step 754, loss: 2.758741617202759, step time: 17.24720001220703ms\r\n",,terminal_output +7089,12285897,"TERMINAL",0,0,"Step 755, loss: 2.5989298820495605, step time: 17.139673233032227ms\r\n",,terminal_output +7090,12285957,"TERMINAL",0,0,"Step 756, loss: 2.6222996711730957, step time: 17.02880859375ms\r\n",,terminal_output +7091,12286017,"TERMINAL",0,0,"Step 757, loss: 2.6211555004119873, step time: 17.650842666625977ms\r\n",,terminal_output +7092,12286077,"TERMINAL",0,0,"Step 758, loss: 2.5957984924316406, step time: 17.17686653137207ms\r\n",,terminal_output +7093,12286138,"TERMINAL",0,0,"Step 759, loss: 2.7290053367614746, step time: 17.081260681152344ms\r\n",,terminal_output +7094,12286202,"TERMINAL",0,0,"Step 760, loss: 2.5845394134521484, step time: 17.41766929626465ms\r\n",,terminal_output +7095,12286259,"TERMINAL",0,0,"Step 761, loss: 2.592097043991089, step time: 17.08364486694336ms\r\n",,terminal_output +7096,12286367,"TERMINAL",0,0,"Step 762, loss: 2.5946035385131836, step time: 17.222881317138672ms\r\nStep 763, loss: 2.7723727226257324, step time: 17.62104034423828ms\r\n",,terminal_output +7097,12286431,"TERMINAL",0,0,"Step 764, loss: 2.592926025390625, step time: 17.16923713684082ms\r\n",,terminal_output +7098,12286558,"TERMINAL",0,0,"Step 765, loss: 2.602463960647583, step time: 17.150402069091797ms\r\nStep 766, loss: 2.5838217735290527, step time: 17.171621322631836ms\r\n",,terminal_output +7099,12286621,"TERMINAL",0,0,"Step 767, loss: 2.7151577472686768, step time: 16.888856887817383ms\r\n",,terminal_output +7100,12286693,"TERMINAL",0,0,"Step 768, loss: 2.5837817192077637, step time: 17.152786254882812ms\r\n",,terminal_output +7101,12286747,"TERMINAL",0,0,"Step 769, loss: 2.6236701011657715, step time: 17.592430114746094ms\r\n",,terminal_output +7102,12286857,"TERMINAL",0,0,"Step 770, loss: 2.5984115600585938, step time: 27.474164962768555ms\r\n",,terminal_output +7103,12286917,"TERMINAL",0,0,"Step 771, loss: 2.5848522186279297, step time: 17.815113067626953ms\r\n",,terminal_output +7104,12286990,"TERMINAL",0,0,"Step 772, loss: 2.559892416000366, step time: 17.65608787536621ms\r\n",,terminal_output +7105,12287041,"TERMINAL",0,0,"Step 773, loss: 2.5782415866851807, step time: 17.197608947753906ms\r\n",,terminal_output +7106,12287147,"TERMINAL",0,0,"Step 774, loss: 2.703176259994507, step time: 17.041683197021484ms\r\nStep 775, loss: 2.6680967807769775, step time: 17.75979995727539ms\r\n",,terminal_output +7107,12287211,"TERMINAL",0,0,"Step 776, loss: 2.585477590560913, step time: 17.11869239807129ms\r\n",,terminal_output +7108,12287276,"TERMINAL",0,0,"Step 777, loss: 2.6405513286590576, step time: 17.205476760864258ms\r\n",,terminal_output +7109,12287334,"TERMINAL",0,0,"Step 778, loss: 2.564565896987915, step time: 17.725467681884766ms\r\n",,terminal_output +7110,12287397,"TERMINAL",0,0,"Step 779, loss: 2.661937952041626, step time: 17.009973526000977ms\r\n",,terminal_output +7111,12287514,"TERMINAL",0,0,"Step 780, loss: 2.572702407836914, step time: 17.4407958984375ms\r\nStep 781, loss: 2.57755184173584, step time: 17.765522003173828ms\r\n",,terminal_output +7112,12287576,"TERMINAL",0,0,"Step 782, loss: 2.555290699005127, step time: 17.098426818847656ms\r\n",,terminal_output +7113,12287638,"TERMINAL",0,0,"Step 783, loss: 2.6488943099975586, step time: 17.99488067626953ms\r\n",,terminal_output +7114,12287710,"TERMINAL",0,0,"Step 784, loss: 2.5666089057922363, step time: 17.449378967285156ms\r\n",,terminal_output +7115,12287767,"TERMINAL",0,0,"Step 785, loss: 2.572946548461914, step time: 17.309188842773438ms\r\n",,terminal_output +7116,12287830,"TERMINAL",0,0,"Step 786, loss: 2.544102668762207, step time: 17.241954803466797ms\r\n",,terminal_output +7117,12287895,"TERMINAL",0,0,"Step 787, loss: 2.583127737045288, step time: 17.63129234313965ms\r\n",,terminal_output +7118,12287960,"TERMINAL",0,0,"Step 788, loss: 2.574246406555176, step time: 17.247438430786133ms\r\n",,terminal_output +7119,12288023,"TERMINAL",0,0,"Step 789, loss: 2.719421148300171, step time: 16.95537567138672ms\r\n",,terminal_output +7120,12288121,"TERMINAL",0,0,"Step 790, loss: 2.547783136367798, step time: 17.604351043701172ms\r\n",,terminal_output +7121,12288174,"TERMINAL",0,0,"Step 791, loss: 2.549146890640259, step time: 16.925334930419922ms\r\n",,terminal_output +7122,12288281,"TERMINAL",0,0,"Step 792, loss: 2.8019020557403564, step time: 17.288684844970703ms\r\nStep 793, loss: 2.5428662300109863, step time: 17.607927322387695ms\r\n",,terminal_output +7123,12288344,"TERMINAL",0,0,"Step 794, loss: 2.5463321208953857, step time: 17.5018310546875ms\r\n",,terminal_output +7124,12288411,"TERMINAL",0,0,"Step 795, loss: 2.5502829551696777, step time: 18.43094825744629ms\r\n",,terminal_output +7125,12288501,"TERMINAL",0,0,"Step 796, loss: 2.5340943336486816, step time: 18.1429386138916ms\r\n",,terminal_output +7126,12288555,"TERMINAL",0,0,"Step 797, loss: 2.5270047187805176, step time: 17.36140251159668ms\r\n",,terminal_output +7127,12288655,"TERMINAL",0,0,"Step 798, loss: 2.554666757583618, step time: 17.345905303955078ms\r\nStep 799, loss: 2.560772657394409, step time: 17.467975616455078ms\r\n",,terminal_output +7128,12288750,"TERMINAL",0,0,"Step 800, loss: 2.6974737644195557, step time: 17.742633819580078ms\r\n",,terminal_output +7129,12288801,"TERMINAL",0,0,"Step 801, loss: 2.5446877479553223, step time: 17.763376235961914ms\r\n",,terminal_output +7130,12288905,"TERMINAL",0,0,"Step 802, loss: 3.4271726608276367, step time: 18.256664276123047ms\r\nStep 803, loss: 2.536792039871216, step time: 17.564058303833008ms\r\n",,terminal_output +7131,12289000,"TERMINAL",0,0,"Step 804, loss: 2.561798334121704, step time: 17.76123046875ms\r\n",,terminal_output +7132,12289059,"TERMINAL",0,0,"Step 805, loss: 2.652815818786621, step time: 17.740726470947266ms\r\n",,terminal_output +7133,12289161,"TERMINAL",0,0,"Step 806, loss: 2.79365873336792, step time: 17.306089401245117ms\r\nStep 807, loss: 2.530501127243042, step time: 17.560720443725586ms\r\n",,terminal_output +7134,12289221,"TERMINAL",0,0,"Step 808, loss: 2.56636118888855, step time: 17.6541805267334ms\r\n",,terminal_output +7135,12289284,"TERMINAL",0,0,"Step 809, loss: 2.5634422302246094, step time: 17.181396484375ms\r\n",,terminal_output +7136,12289349,"TERMINAL",0,0,"Step 810, loss: 2.54906964302063, step time: 17.48943328857422ms\r\n",,terminal_output +7137,12289412,"TERMINAL",0,0,"Step 811, loss: 2.5520291328430176, step time: 17.816543579101562ms\r\n",,terminal_output +7138,12289476,"TERMINAL",0,0,"Step 812, loss: 2.5199549198150635, step time: 17.15564727783203ms\r\n",,terminal_output +7139,12289538,"TERMINAL",0,0,"Step 813, loss: 2.51556396484375, step time: 16.959190368652344ms\r\n",,terminal_output +7140,12289637,"TERMINAL",0,0,"Step 814, loss: 2.5793771743774414, step time: 17.607688903808594ms\r\n",,terminal_output +7141,12289731,"TERMINAL",0,0,"Step 815, loss: 2.549856424331665, step time: 17.154693603515625ms\r\nStep 816, loss: 2.577479362487793, step time: 17.31419563293457ms\r\n",,terminal_output +7142,12289790,"TERMINAL",0,0,"Step 817, loss: 2.5397403240203857, step time: 17.503738403320312ms\r\n",,terminal_output +7143,12289887,"TERMINAL",0,0,"Step 818, loss: 2.5105366706848145, step time: 17.30036735534668ms\r\n",,terminal_output +7144,12289939,"TERMINAL",0,0,"Step 819, loss: 2.628335475921631, step time: 17.153024673461914ms\r\n",,terminal_output +7145,12290049,"TERMINAL",0,0,"Step 820, loss: 2.559589385986328, step time: 17.42243766784668ms\r\nStep 821, loss: 2.5037224292755127, step time: 16.963720321655273ms\r\n",,terminal_output +7146,12290113,"TERMINAL",0,0,"Step 822, loss: 2.499894618988037, step time: 17.108678817749023ms\r\n",,terminal_output +7147,12290178,"TERMINAL",0,0,"Step 823, loss: 2.5035951137542725, step time: 17.348289489746094ms\r\n",,terminal_output +7148,12290240,"TERMINAL",0,0,"Step 824, loss: 2.522540330886841, step time: 16.930818557739258ms\r\n",,terminal_output +7149,12290302,"TERMINAL",0,0,"Step 825, loss: 2.504810333251953, step time: 16.78943634033203ms\r\n",,terminal_output +7150,12290363,"TERMINAL",0,0,"Step 826, loss: 2.510265827178955, step time: 17.431020736694336ms\r\n",,terminal_output +7151,12290427,"TERMINAL",0,0,"Step 827, loss: 2.512892484664917, step time: 16.813039779663086ms\r\n",,terminal_output +7152,12290519,"TERMINAL",0,0,"Step 828, loss: 2.5074520111083984, step time: 17.139911651611328ms\r\n",,terminal_output +7153,12290571,"TERMINAL",0,0,"Step 829, loss: 2.6965723037719727, step time: 19.206523895263672ms\r\n",,terminal_output +7154,12290676,"TERMINAL",0,0,"Step 830, loss: 2.5389089584350586, step time: 17.74120330810547ms\r\nStep 831, loss: 2.4994564056396484, step time: 17.191410064697266ms\r\n",,terminal_output +7155,12290800,"TERMINAL",0,0,"Step 832, loss: 2.503788948059082, step time: 17.28057861328125ms\r\nStep 833, loss: 2.5260868072509766, step time: 16.739845275878906ms\r\n",,terminal_output +7156,12290866,"TERMINAL",0,0,"Step 834, loss: 2.5136373043060303, step time: 17.038583755493164ms\r\n",,terminal_output +7157,12290991,"TERMINAL",0,0,"Step 835, loss: 2.4974067211151123, step time: 17.351865768432617ms\r\nStep 836, loss: 2.512505054473877, step time: 16.943931579589844ms\r\n",,terminal_output +7158,12291058,"TERMINAL",0,0,"Step 837, loss: 2.501438856124878, step time: 17.004966735839844ms\r\n",,terminal_output +7159,12291119,"TERMINAL",0,0,"Step 838, loss: 2.6320483684539795, step time: 17.139673233032227ms\r\n",,terminal_output +7160,12291219,"TERMINAL",0,0,"Step 839, loss: 2.5568461418151855, step time: 16.898393630981445ms\r\n",,terminal_output +7161,12291266,"TERMINAL",0,0,"Step 840, loss: 2.4837210178375244, step time: 17.47584342956543ms\r\n",,terminal_output +7162,12291373,"TERMINAL",0,0,"Step 841, loss: 2.474322557449341, step time: 17.969608306884766ms\r\nStep 842, loss: 2.7065389156341553, step time: 17.31419563293457ms\r\n",,terminal_output +7163,12291435,"TERMINAL",0,0,"Step 843, loss: 2.491368055343628, step time: 17.253637313842773ms\r\n",,terminal_output +7164,12291500,"TERMINAL",0,0,"Step 844, loss: 2.550077199935913, step time: 17.77029037475586ms\r\n",,terminal_output +7165,12291564,"TERMINAL",0,0,"Step 845, loss: 2.64424467086792, step time: 17.340898513793945ms\r\n",,terminal_output +7166,12291627,"TERMINAL",0,0,"Step 846, loss: 2.582465887069702, step time: 17.569541931152344ms\r\n",,terminal_output +7167,12292000,"TERMINAL",0,0,"Step 847, loss: 2.488762378692627, step time: 334.05542373657227ms\r\n",,terminal_output +7168,12292053,"TERMINAL",0,0,"Step 848, loss: 2.491881847381592, step time: 26.39603614807129ms\r\n",,terminal_output +7169,12292146,"TERMINAL",0,0,"Step 849, loss: 2.5223371982574463, step time: 24.863004684448242ms\r\n",,terminal_output +7170,12292254,"TERMINAL",0,0,"Step 850, loss: 2.4655580520629883, step time: 32.51457214355469ms\r\nStep 851, loss: 2.546821355819702, step time: 24.8415470123291ms\r\n",,terminal_output +7171,12292314,"TERMINAL",0,0,"Step 852, loss: 2.8935859203338623, step time: 20.61605453491211ms\r\n",,terminal_output +7172,12292375,"TERMINAL",0,0,"Step 853, loss: 2.4763665199279785, step time: 18.37015151977539ms\r\n",,terminal_output +7173,12292443,"TERMINAL",0,0,"Step 854, loss: 2.476341724395752, step time: 17.778873443603516ms\r\n",,terminal_output +7174,12292503,"TERMINAL",0,0,"Step 855, loss: 2.621347665786743, step time: 17.441272735595703ms\r\n",,terminal_output +7175,12292571,"TERMINAL",0,0,"Step 856, loss: 2.4745397567749023, step time: 17.808914184570312ms\r\n",,terminal_output +7176,12292632,"TERMINAL",0,0,"Step 857, loss: 2.5069196224212646, step time: 17.647981643676758ms\r\n",,terminal_output +7177,12292696,"TERMINAL",0,0,"Step 858, loss: 2.469402551651001, step time: 17.560958862304688ms\r\n",,terminal_output +7178,12292759,"TERMINAL",0,0,"Step 859, loss: 2.473043203353882, step time: 18.062829971313477ms\r\n",,terminal_output +7179,12292818,"TERMINAL",0,0,"Step 860, loss: 2.458495855331421, step time: 17.641544342041016ms\r\n",,terminal_output +7180,12292881,"TERMINAL",0,0,"Step 861, loss: 2.4535202980041504, step time: 17.400741577148438ms\r\n",,terminal_output +7181,12292985,"TERMINAL",0,0,"Step 862, loss: 2.449366331100464, step time: 17.755985260009766ms\r\n",,terminal_output +7182,12293036,"TERMINAL",0,0,"Step 863, loss: 2.5374183654785156, step time: 17.464160919189453ms\r\n",,terminal_output +7183,12293145,"TERMINAL",0,0,"Step 864, loss: 2.457937479019165, step time: 17.587900161743164ms\r\nStep 865, loss: 2.468402147293091, step time: 17.712116241455078ms\r\n",,terminal_output +7184,12293210,"TERMINAL",0,0,"Step 866, loss: 2.583247184753418, step time: 17.49420166015625ms\r\n",,terminal_output +7185,12293269,"TERMINAL",0,0,"Step 867, loss: 2.479551315307617, step time: 17.32325553894043ms\r\n",,terminal_output +7186,12293327,"TERMINAL",0,0,"Step 868, loss: 2.5137441158294678, step time: 20.396947860717773ms\r\n",,terminal_output +7187,12293421,"TERMINAL",0,0,"Step 869, loss: 2.530520439147949, step time: 17.04692840576172ms\r\n",,terminal_output +7188,12293473,"TERMINAL",0,0,"Step 870, loss: 2.4619789123535156, step time: 19.002437591552734ms\r\n",,terminal_output +7189,12293597,"TERMINAL",0,0,"Step 871, loss: 2.514727830886841, step time: 18.063068389892578ms\r\nStep 872, loss: 2.447794198989868, step time: 17.35067367553711ms\r\n",,terminal_output +7190,12293650,"TERMINAL",0,0,"Step 873, loss: 2.4536314010620117, step time: 17.054319381713867ms\r\n",,terminal_output +7191,12293715,"TERMINAL",0,0,"Step 874, loss: 2.4716453552246094, step time: 17.8377628326416ms\r\n",,terminal_output +7192,12293781,"TERMINAL",0,0,"Step 875, loss: 2.459439516067505, step time: 17.344236373901367ms\r\n",,terminal_output +7193,12293844,"TERMINAL",0,0,"Step 876, loss: 2.4914188385009766, step time: 17.342805862426758ms\r\n",,terminal_output +7194,12293918,"TERMINAL",0,0,"Step 877, loss: 2.4600839614868164, step time: 17.44699478149414ms\r\n",,terminal_output +7195,12293975,"TERMINAL",0,0,"Step 878, loss: 2.4458582401275635, step time: 17.592668533325195ms\r\n",,terminal_output +7196,12294037,"TERMINAL",0,0,"Step 879, loss: 2.4358716011047363, step time: 17.455101013183594ms\r\n",,terminal_output +7197,12294099,"TERMINAL",0,0,"Step 880, loss: 2.507403612136841, step time: 17.514944076538086ms\r\n",,terminal_output +7198,12294162,"TERMINAL",0,0,"Step 881, loss: 2.4150116443634033, step time: 17.23647117614746ms\r\n",,terminal_output +7199,12294227,"TERMINAL",0,0,"Step 882, loss: 2.428581714630127, step time: 17.42863655090332ms\r\n",,terminal_output +7200,12294337,"TERMINAL",0,0,"Step 883, loss: 2.442091941833496, step time: 17.5933837890625ms\r\nStep 884, loss: 2.436896324157715, step time: 17.23790168762207ms\r\n",,terminal_output +7201,12294434,"TERMINAL",0,0,"Step 885, loss: 2.422713279724121, step time: 17.151594161987305ms\r\n",,terminal_output +7202,12294494,"TERMINAL",0,0,"Step 886, loss: 2.4311563968658447, step time: 17.340898513793945ms\r\n",,terminal_output +7203,12294603,"TERMINAL",0,0,"Step 887, loss: 2.4172394275665283, step time: 16.796350479125977ms\r\nStep 888, loss: 2.4296152591705322, step time: 17.279624938964844ms\r\n",,terminal_output +7204,12294729,"TERMINAL",0,0,"Step 889, loss: 2.8523058891296387, step time: 17.242431640625ms\r\nStep 890, loss: 2.4666244983673096, step time: 17.205238342285156ms\r\n",,terminal_output +7205,12294792,"TERMINAL",0,0,"Step 891, loss: 3.3388683795928955, step time: 17.048358917236328ms\r\n",,terminal_output +7206,12294855,"TERMINAL",0,0,"Step 892, loss: 2.40468168258667, step time: 17.71092414855957ms\r\n",,terminal_output +7207,12294920,"TERMINAL",0,0,"Step 893, loss: 2.4182205200195312, step time: 17.285823822021484ms\r\n",,terminal_output +7208,12294978,"TERMINAL",0,0,"Step 894, loss: 2.419562339782715, step time: 17.23647117614746ms\r\n",,terminal_output +7209,12295042,"TERMINAL",0,0,"Step 895, loss: 2.6680760383605957, step time: 17.51112937927246ms\r\n",,terminal_output +7210,12295108,"TERMINAL",0,0,"Step 896, loss: 2.413499116897583, step time: 17.461538314819336ms\r\n",,terminal_output +7211,12295164,"TERMINAL",0,0,"Step 897, loss: 2.5802876949310303, step time: 17.027854919433594ms\r\n",,terminal_output +7212,12295262,"TERMINAL",0,0,"Step 898, loss: 2.4114840030670166, step time: 17.817258834838867ms\r\n",,terminal_output +7213,12295328,"TERMINAL",0,0,"Step 899, loss: 2.4410037994384766, step time: 17.318010330200195ms\r\n",,terminal_output +7214,12295395,"TERMINAL",0,0,"Step 900, loss: 2.413522243499756, step time: 17.38286018371582ms\r\n",,terminal_output +7215,12295453,"TERMINAL",0,0,"Step 901, loss: 2.4281656742095947, step time: 17.797231674194336ms\r\n",,terminal_output +7216,12295559,"TERMINAL",0,0,"Step 902, loss: 2.4314515590667725, step time: 17.5015926361084ms\r\nStep 903, loss: 2.388138771057129, step time: 17.221927642822266ms\r\n",,terminal_output +7217,12295619,"TERMINAL",0,0,"Step 904, loss: 2.53930926322937, step time: 17.708301544189453ms\r\n",,terminal_output +7218,12295683,"TERMINAL",0,0,"Step 905, loss: 2.425868511199951, step time: 17.420291900634766ms\r\n",,terminal_output +7219,12295762,"TERMINAL",0,0,"Step 906, loss: 2.41031551361084, step time: 17.84205436706543ms\r\n",,terminal_output +7220,12295795,"TERMINAL",0,0,"Step 907, loss: 2.413708448410034, step time: 17.78101921081543ms\r\n",,terminal_output +7221,12295860,"TERMINAL",0,0,"Step 908, loss: 2.507901430130005, step time: 17.348289489746094ms\r\n",,terminal_output +7222,12295920,"TERMINAL",0,0,"Step 909, loss: 2.6000070571899414, step time: 17.28677749633789ms\r\n",,terminal_output +7223,12295987,"TERMINAL",0,0,"Step 910, loss: 2.3959929943084717, step time: 17.86947250366211ms\r\n",,terminal_output +7224,12296049,"TERMINAL",0,0,"Step 911, loss: 2.4043471813201904, step time: 17.065048217773438ms\r\n",,terminal_output +7225,12296115,"TERMINAL",0,0,"Step 912, loss: 2.4218802452087402, step time: 17.53544807434082ms\r\n",,terminal_output +7226,12296173,"TERMINAL",0,0,"Step 913, loss: 2.394512891769409, step time: 17.775297164916992ms\r\n",,terminal_output +7227,12296266,"TERMINAL",0,0,"Step 914, loss: 2.418442964553833, step time: 17.38286018371582ms\r\n",,terminal_output +7228,12296318,"TERMINAL",0,0,"Step 915, loss: 2.388014793395996, step time: 17.264842987060547ms\r\n",,terminal_output +7229,12296410,"TERMINAL",0,0,"Step 916, loss: 2.4276814460754395, step time: 17.542123794555664ms\r\n",,terminal_output +7230,12296462,"TERMINAL",0,0,"Step 917, loss: 2.6199212074279785, step time: 17.023086547851562ms\r\n",,terminal_output +7231,12296567,"TERMINAL",0,0,"Step 918, loss: 2.4575443267822266, step time: 17.154216766357422ms\r\nStep 919, loss: 2.984955072402954, step time: 17.435550689697266ms\r\n",,terminal_output +7232,12296691,"TERMINAL",0,0,"Step 920, loss: 2.3981943130493164, step time: 17.311573028564453ms\r\nStep 921, loss: 2.4909181594848633, step time: 17.040729522705078ms\r\n",,terminal_output +7233,12296756,"TERMINAL",0,0,"Step 922, loss: 2.457169532775879, step time: 17.351627349853516ms\r\n",,terminal_output +7234,12296819,"TERMINAL",0,0,"Step 923, loss: 2.3967766761779785, step time: 24.402141571044922ms\r\n",,terminal_output +7235,12296882,"TERMINAL",0,0,"Step 924, loss: 2.393172264099121, step time: 18.051862716674805ms\r\n",,terminal_output +7236,12296946,"TERMINAL",0,0,"Step 925, loss: 2.3856663703918457, step time: 17.49396324157715ms\r\n",,terminal_output +7237,12297013,"TERMINAL",0,0,"Step 926, loss: 2.4103736877441406, step time: 17.627477645874023ms\r\n",,terminal_output +7238,12297079,"TERMINAL",0,0,"Step 927, loss: 2.409722328186035, step time: 16.9827938079834ms\r\n",,terminal_output +7239,12297140,"TERMINAL",0,0,"Step 928, loss: 2.381051540374756, step time: 17.422914505004883ms\r\n",,terminal_output +7240,12297203,"TERMINAL",0,0,"Step 929, loss: 2.408881425857544, step time: 17.031431198120117ms\r\n",,terminal_output +7241,12297272,"TERMINAL",0,0,"Step 930, loss: 2.482043504714966, step time: 17.530441284179688ms\r\n",,terminal_output +7242,12297327,"TERMINAL",0,0,"Step 931, loss: 2.5438668727874756, step time: 17.79770851135254ms\r\n",,terminal_output +7243,12297442,"TERMINAL",0,0,"Step 932, loss: 2.412856101989746, step time: 17.222166061401367ms\r\nStep 933, loss: 3.2360191345214844, step time: 17.21811294555664ms\r\n",,terminal_output +7244,12297508,"TERMINAL",0,0,"Step 934, loss: 2.390052556991577, step time: 17.756938934326172ms\r\n",,terminal_output +7245,12297580,"TERMINAL",0,0,"Step 935, loss: 2.405268430709839, step time: 17.09437370300293ms\r\n",,terminal_output +7246,12297632,"TERMINAL",0,0,"Step 936, loss: 2.4580752849578857, step time: 17.498254776000977ms\r\n",,terminal_output +7247,12297757,"TERMINAL",0,0,"Step 937, loss: 2.3771374225616455, step time: 17.80080795288086ms\r\nStep 938, loss: 2.3764564990997314, step time: 17.53997802734375ms\r\n",,terminal_output +7248,12297825,"TERMINAL",0,0,"Step 939, loss: 2.3376553058624268, step time: 17.139911651611328ms\r\n",,terminal_output +7249,12297888,"TERMINAL",0,0,"Step 940, loss: 2.378109931945801, step time: 18.360376358032227ms\r\n",,terminal_output +7250,12297980,"TERMINAL",0,0,"Step 941, loss: 2.7358739376068115, step time: 16.927242279052734ms\r\n",,terminal_output +7251,12298032,"TERMINAL",0,0,"Step 942, loss: 2.372699022293091, step time: 17.496824264526367ms\r\n",,terminal_output +7252,12298138,"TERMINAL",0,0,"Step 943, loss: 2.354182004928589, step time: 17.48204231262207ms\r\nStep 944, loss: 2.3953239917755127, step time: 17.209291458129883ms\r\n",,terminal_output +7253,12298235,"TERMINAL",0,0,"Step 945, loss: 2.5451271533966064, step time: 18.425703048706055ms\r\n",,terminal_output +7254,12298295,"TERMINAL",0,0,"Step 946, loss: 2.556633710861206, step time: 17.4710750579834ms\r\n",,terminal_output +7255,12298358,"TERMINAL",0,0,"Step 947, loss: 2.442840099334717, step time: 16.990184783935547ms\r\n",,terminal_output +7256,12298420,"TERMINAL",0,0,"Step 948, loss: 2.3652212619781494, step time: 17.122268676757812ms\r\n",,terminal_output +7257,12298481,"TERMINAL",0,0,"Step 949, loss: 2.363018751144409, step time: 18.505573272705078ms\r\n",,terminal_output +7258,12298546,"TERMINAL",0,0,"Step 950, loss: 2.4039077758789062, step time: 17.39954948425293ms\r\n",,terminal_output +7259,12298647,"TERMINAL",0,0,"Step 951, loss: 2.5995800495147705, step time: 17.05622673034668ms\r\nStep 952, loss: 2.3754336833953857, step time: 17.596960067749023ms\r\n",,terminal_output +7260,12298770,"TERMINAL",0,0,"Step 953, loss: 2.4994113445281982, step time: 16.821622848510742ms\r\nStep 954, loss: 2.3718249797821045, step time: 17.282485961914062ms\r\n",,terminal_output +7261,12298836,"TERMINAL",0,0,"Step 955, loss: 2.357266902923584, step time: 17.464399337768555ms\r\n",,terminal_output +7262,12298896,"TERMINAL",0,0,"Step 956, loss: 2.6007468700408936, step time: 17.165184020996094ms\r\n",,terminal_output +7263,12298995,"TERMINAL",0,0,"Step 957, loss: 2.3900210857391357, step time: 17.28987693786621ms\r\n",,terminal_output +7264,12299057,"TERMINAL",0,0,"Step 958, loss: 2.383235216140747, step time: 17.808198928833008ms\r\n",,terminal_output +7265,12299164,"TERMINAL",0,0,"Step 959, loss: 2.3512086868286133, step time: 17.01498031616211ms\r\nStep 960, loss: 2.4407005310058594, step time: 17.20142364501953ms\r\n",,terminal_output +7266,12299257,"TERMINAL",0,0,"Step 961, loss: 2.352071762084961, step time: 17.7459716796875ms\r\n",,terminal_output +7267,12299337,"TERMINAL",0,0,"Step 962, loss: 2.3638875484466553, step time: 17.47918128967285ms\r\nStep 963, loss: 2.3645200729370117, step time: 17.014741897583008ms\r\n",,terminal_output +7268,12299434,"TERMINAL",0,0,"Step 964, loss: 2.348252773284912, step time: 17.750024795532227ms\r\n",,terminal_output +7269,12299492,"TERMINAL",0,0,"Step 965, loss: 2.358011245727539, step time: 16.895294189453125ms\r\n",,terminal_output +7270,12299597,"TERMINAL",0,0,"Step 966, loss: 2.5555033683776855, step time: 17.02117919921875ms\r\nStep 967, loss: 2.386237621307373, step time: 17.823219299316406ms\r\n",,terminal_output +7271,12299993,"TERMINAL",0,0,"Step 968, loss: 2.340944290161133, step time: 334.49506759643555ms\r\nStep 969, loss: 2.546095371246338, step time: 24.307727813720703ms\r\n",,terminal_output +7272,12300130,"TERMINAL",0,0,"Step 970, loss: 2.449744939804077, step time: 19.719362258911133ms\r\nStep 971, loss: 2.359431266784668, step time: 18.527984619140625ms\r\n",,terminal_output +7273,12300194,"TERMINAL",0,0,"Step 972, loss: 2.3672752380371094, step time: 18.023252487182617ms\r\n",,terminal_output +7274,12300257,"TERMINAL",0,0,"Step 973, loss: 2.3694679737091064, step time: 17.51232147216797ms\r\n",,terminal_output +7275,12300318,"TERMINAL",0,0,"Step 974, loss: 2.3285791873931885, step time: 17.69232749938965ms\r\n",,terminal_output +7276,12300379,"TERMINAL",0,0,"Step 975, loss: 2.346327304840088, step time: 17.374038696289062ms\r\n",,terminal_output +7277,12300482,"TERMINAL",0,0,"Step 976, loss: 2.460334300994873, step time: 17.412185668945312ms\r\n",,terminal_output +7278,12300534,"TERMINAL",0,0,"Step 977, loss: 2.35121488571167, step time: 17.62223243713379ms\r\n",,terminal_output +7279,12300638,"TERMINAL",0,0,"Step 978, loss: 2.332124948501587, step time: 17.65131950378418ms\r\nStep 979, loss: 2.5427510738372803, step time: 17.33684539794922ms\r\n",,terminal_output +7280,12300757,"TERMINAL",0,0,"Step 980, loss: 2.397127151489258, step time: 17.4407958984375ms\r\nStep 981, loss: 2.343231201171875, step time: 17.028093338012695ms\r\n",,terminal_output +7281,12300819,"TERMINAL",0,0,"Step 982, loss: 2.339107036590576, step time: 17.146587371826172ms\r\n",,terminal_output +7282,12300884,"TERMINAL",0,0,"Step 983, loss: 2.324528694152832, step time: 17.16017723083496ms\r\n",,terminal_output +7283,12300943,"TERMINAL",0,0,"Step 984, loss: 2.40791654586792, step time: 17.09580421447754ms\r\n",,terminal_output +7284,12301011,"TERMINAL",0,0,"Step 985, loss: 2.343759775161743, step time: 17.1658992767334ms\r\n",,terminal_output +7285,12301072,"TERMINAL",0,0,"Step 986, loss: 2.3580191135406494, step time: 17.742156982421875ms\r\n",,terminal_output +7286,12301134,"TERMINAL",0,0,"Step 987, loss: 2.535240650177002, step time: 16.877174377441406ms\r\n",,terminal_output +7287,12301198,"TERMINAL",0,0,"Step 988, loss: 2.4478821754455566, step time: 17.16756820678711ms\r\n",,terminal_output +7288,12301294,"TERMINAL",0,0,"Step 989, loss: 2.3384199142456055, step time: 17.291545867919922ms\r\n",,terminal_output +7289,12301345,"TERMINAL",0,0,"Step 990, loss: 2.9838972091674805, step time: 17.21644401550293ms\r\n",,terminal_output +7290,12301450,"TERMINAL",0,0,"Step 991, loss: 2.3312489986419678, step time: 16.75271987915039ms\r\nStep 992, loss: 2.369450807571411, step time: 17.551898956298828ms\r\n",,terminal_output +7291,12301559,"TERMINAL",0,0,"Step 993, loss: 2.3186187744140625, step time: 31.032085418701172ms\r\n",,terminal_output +7292,12301611,"TERMINAL",0,0,"Step 994, loss: 2.3343558311462402, step time: 17.693758010864258ms\r\n",,terminal_output +7293,12301714,"TERMINAL",0,0,"Step 995, loss: 2.318537473678589, step time: 17.440080642700195ms\r\nStep 996, loss: 2.326307535171509, step time: 17.702102661132812ms\r\n",,terminal_output +7294,12301778,"TERMINAL",0,0,"Step 997, loss: 2.3191564083099365, step time: 17.29440689086914ms\r\n",,terminal_output +7295,12301841,"TERMINAL",0,0,"Step 998, loss: 2.325314998626709, step time: 17.39954948425293ms\r\n",,terminal_output +7296,12301907,"TERMINAL",0,0,"Step 999, loss: 2.3335015773773193, step time: 16.843557357788086ms\r\n",,terminal_output +7297,12304544,"TERMINAL",0,0,"Step 1000, loss: 2.338075876235962, step time: 44.78144645690918ms\r\n",,terminal_output +7298,12304609,"TERMINAL",0,0,"Step 1001, loss: 2.3046507835388184, step time: 27.585268020629883ms\r\n",,terminal_output +7299,12304672,"TERMINAL",0,0,"Step 1002, loss: 2.3138327598571777, step time: 20.743370056152344ms\r\n",,terminal_output +7300,12304736,"TERMINAL",0,0,"Step 1003, loss: 2.3144681453704834, step time: 19.8519229888916ms\r\n",,terminal_output +7301,12304792,"TERMINAL",0,0,"Step 1004, loss: 2.4033093452453613, step time: 19.165754318237305ms\r\n",,terminal_output +7302,12304857,"TERMINAL",0,0,"Step 1005, loss: 2.3353447914123535, step time: 18.459558486938477ms\r\n",,terminal_output +7303,12304922,"TERMINAL",0,0,"Step 1006, loss: 2.319504976272583, step time: 18.467187881469727ms\r\n",,terminal_output +7304,12304988,"TERMINAL",0,0,"Step 1007, loss: 2.2879323959350586, step time: 18.614768981933594ms\r\n",,terminal_output +7305,12305079,"TERMINAL",0,0,"Step 1008, loss: 2.2983627319335938, step time: 20.227432250976562ms\r\n",,terminal_output +7306,12305131,"TERMINAL",0,0,"Step 1009, loss: 2.2897417545318604, step time: 18.33176612854004ms\r\n",,terminal_output +7307,12305276,"TERMINAL",0,0,"Step 1010, loss: 2.2991695404052734, step time: 18.864154815673828ms\r\nStep 1011, loss: 2.3595035076141357, step time: 18.085956573486328ms\r\n",,terminal_output +7308,12305327,"TERMINAL",0,0,"Step 1012, loss: 2.290069341659546, step time: 18.097877502441406ms\r\n",,terminal_output +7309,12305438,"TERMINAL",0,0,"Step 1013, loss: 2.2768967151641846, step time: 18.41259002685547ms\r\nStep 1014, loss: 2.2975409030914307, step time: 18.634319305419922ms\r\n",,terminal_output +7310,12305497,"TERMINAL",0,0,"Step 1015, loss: 2.2869763374328613, step time: 18.719911575317383ms\r\n",,terminal_output +7311,12305559,"TERMINAL",0,0,"Step 1016, loss: 2.272278070449829, step time: 18.474817276000977ms\r\n",,terminal_output +7312,12305624,"TERMINAL",0,0,"Step 1017, loss: 2.2882416248321533, step time: 20.740985870361328ms\r\n",,terminal_output +7313,12305690,"TERMINAL",0,0,"Step 1018, loss: 2.3586156368255615, step time: 18.680095672607422ms\r\n",,terminal_output +7314,12305752,"TERMINAL",0,0,"Step 1019, loss: 2.4336612224578857, step time: 18.293380737304688ms\r\n",,terminal_output +7315,12305859,"TERMINAL",0,0,"Step 1020, loss: 2.7357430458068848, step time: 18.387794494628906ms\r\n",,terminal_output +7316,12305911,"TERMINAL",0,0,"Step 1021, loss: 2.304259777069092, step time: 17.810583114624023ms\r\n",,terminal_output +7317,12306018,"TERMINAL",0,0,"Step 1022, loss: 2.40690541267395, step time: 18.73922348022461ms\r\nStep 1023, loss: 2.570831775665283, step time: 17.5473690032959ms\r\n",,terminal_output +7318,12306109,"TERMINAL",0,0,"Step 1024, loss: 2.3094143867492676, step time: 18.255949020385742ms\r\n",,terminal_output +7319,12306160,"TERMINAL",0,0,"Step 1025, loss: 2.270716905593872, step time: 18.712997436523438ms\r\n",,terminal_output +7320,12306265,"TERMINAL",0,0,"Step 1026, loss: 2.2752277851104736, step time: 18.44191551208496ms\r\nStep 1027, loss: 2.2913382053375244, step time: 18.32723617553711ms\r\n",,terminal_output +7321,12306330,"TERMINAL",0,0,"Step 1028, loss: 2.4666945934295654, step time: 21.22950553894043ms\r\n",,terminal_output +7322,12306394,"TERMINAL",0,0,"Step 1029, loss: 2.2846829891204834, step time: 18.174409866333008ms\r\n",,terminal_output +7323,12306459,"TERMINAL",0,0,"Step 1030, loss: 2.4562153816223145, step time: 18.419265747070312ms\r\n",,terminal_output +7324,12306522,"TERMINAL",0,0,"Step 1031, loss: 2.281177520751953, step time: 18.45693588256836ms\r\n",,terminal_output +7325,12306597,"TERMINAL",0,0,"Step 1032, loss: 2.2722043991088867, step time: 18.222808837890625ms\r\n",,terminal_output +7326,12306650,"TERMINAL",0,0,"Step 1033, loss: 2.3012590408325195, step time: 18.271207809448242ms\r\n",,terminal_output +7327,12306777,"TERMINAL",0,0,"Step 1034, loss: 2.27341365814209, step time: 19.044876098632812ms\r\nStep 1035, loss: 2.7952184677124023, step time: 18.094539642333984ms\r\n",,terminal_output +7328,12306883,"TERMINAL",0,0,"Step 1036, loss: 2.2601044178009033, step time: 20.8432674407959ms\r\n",,terminal_output +7329,12306943,"TERMINAL",0,0,"Step 1037, loss: 2.4458816051483154, step time: 18.748044967651367ms\r\n",,terminal_output +7330,12307006,"TERMINAL",0,0,"Step 1038, loss: 2.2644107341766357, step time: 18.544673919677734ms\r\n",,terminal_output +7331,12307070,"TERMINAL",0,0,"Step 1039, loss: 2.2669668197631836, step time: 18.056869506835938ms\r\n",,terminal_output +7332,12307132,"TERMINAL",0,0,"Step 1040, loss: 2.261953115463257, step time: 18.198728561401367ms\r\n",,terminal_output +7333,12307194,"TERMINAL",0,0,"Step 1041, loss: 2.257025718688965, step time: 18.182039260864258ms\r\n",,terminal_output +7334,12307256,"TERMINAL",0,0,"Step 1042, loss: 2.26253080368042, step time: 17.93193817138672ms\r\n",,terminal_output +7335,12307319,"TERMINAL",0,0,"Step 1043, loss: 2.2905561923980713, step time: 18.013715744018555ms\r\n",,terminal_output +7336,12307381,"TERMINAL",0,0,"Step 1044, loss: 2.279493808746338, step time: 17.998933792114258ms\r\n",,terminal_output +7337,12307443,"TERMINAL",0,0,"Step 1045, loss: 2.270639657974243, step time: 18.66602897644043ms\r\n",,terminal_output +7338,12307779,"TERMINAL",0,0,"Step 1046, loss: 2.278273820877075, step time: 359.4176769256592ms\r\n",,terminal_output +7339,12307887,"TERMINAL",0,0,"Step 1047, loss: 2.253429889678955, step time: 26.23581886291504ms\r\n",,terminal_output +7340,12307937,"TERMINAL",0,0,"Step 1048, loss: 2.2478439807891846, step time: 20.46370506286621ms\r\n",,terminal_output +7341,12308041,"TERMINAL",0,0,"Step 1049, loss: 2.281851053237915, step time: 19.91105079650879ms\r\nStep 1050, loss: 2.252243757247925, step time: 19.07038688659668ms\r\n",,terminal_output +7342,12308139,"TERMINAL",0,0,"Step 1051, loss: 2.3224565982818604, step time: 18.80645751953125ms\r\n",,terminal_output +7343,12308191,"TERMINAL",0,0,"Step 1052, loss: 2.246544361114502, step time: 18.941640853881836ms\r\n",,terminal_output +7344,12308283,"TERMINAL",0,0,"Step 1053, loss: 2.2705554962158203, step time: 19.28400993347168ms\r\n",,terminal_output +7345,12308335,"TERMINAL",0,0,"Step 1054, loss: 2.632296323776245, step time: 18.47243309020996ms\r\n",,terminal_output +7346,12308387,"TERMINAL",0,0,"Step 1055, loss: 2.366685628890991, step time: 19.036293029785156ms\r\n",,terminal_output +7347,12308491,"TERMINAL",0,0,"Step 1056, loss: 2.352146625518799, step time: 18.199920654296875ms\r\nStep 1057, loss: 2.276226282119751, step time: 19.898414611816406ms\r\n",,terminal_output +7348,12308554,"TERMINAL",0,0,"Step 1058, loss: 2.2781035900115967, step time: 19.215106964111328ms\r\n",,terminal_output +7349,12308641,"TERMINAL",0,0,"Step 1059, loss: 2.3729097843170166, step time: 18.418550491333008ms\r\n",,terminal_output +7350,12308699,"TERMINAL",0,0,"Step 1060, loss: 2.5528717041015625, step time: 18.405437469482422ms\r\n",,terminal_output +7351,12308752,"TERMINAL",0,0,"Step 1061, loss: 2.2652974128723145, step time: 21.29507064819336ms\r\n",,terminal_output +7352,12308818,"TERMINAL",0,0,"Step 1062, loss: 2.31980562210083, step time: 18.553972244262695ms\r\n",,terminal_output +7353,12308880,"TERMINAL",0,0,"Step 1063, loss: 2.2505929470062256, step time: 18.06044578552246ms\r\n",,terminal_output +7354,12308946,"TERMINAL",0,0,"Step 1064, loss: 2.2817542552948, step time: 18.5699462890625ms\r\n",,terminal_output +7355,12309008,"TERMINAL",0,0,"Step 1065, loss: 2.2555437088012695, step time: 17.90022850036621ms\r\n",,terminal_output +7356,12309068,"TERMINAL",0,0,"Step 1066, loss: 2.2268245220184326, step time: 18.12291145324707ms\r\n",,terminal_output +7357,12309131,"TERMINAL",0,0,"Step 1067, loss: 2.2492892742156982, step time: 18.279075622558594ms\r\n",,terminal_output +7358,12309268,"TERMINAL",0,0,"Step 1068, loss: 2.2410755157470703, step time: 31.41641616821289ms\r\nStep 1069, loss: 2.242849588394165, step time: 19.871234893798828ms\r\n",,terminal_output +7359,12309335,"TERMINAL",0,0,"Step 1070, loss: 2.6186068058013916, step time: 18.412351608276367ms\r\n",,terminal_output +7360,12309394,"TERMINAL",0,0,"Step 1071, loss: 2.232684850692749, step time: 18.346786499023438ms\r\n",,terminal_output +7361,12309457,"TERMINAL",0,0,"Step 1072, loss: 2.295728921890259, step time: 17.94576644897461ms\r\n",,terminal_output +7362,12309520,"TERMINAL",0,0,"Step 1073, loss: 2.2370221614837646, step time: 18.26643943786621ms\r\n",,terminal_output +7363,12309583,"TERMINAL",0,0,"Step 1074, loss: 2.2401232719421387, step time: 18.105506896972656ms\r\n",,terminal_output +7364,12309720,"TERMINAL",0,0,"Step 1075, loss: 2.224452257156372, step time: 18.053770065307617ms\r\nStep 1076, loss: 2.2226381301879883, step time: 18.564701080322266ms\r\n",,terminal_output +7365,12309846,"TERMINAL",0,0,"Step 1077, loss: 2.227520704269409, step time: 18.259286880493164ms\r\nStep 1078, loss: 2.3479373455047607, step time: 22.646665573120117ms\r\n",,terminal_output +7366,12309945,"TERMINAL",0,0,"Step 1079, loss: 2.26663875579834, step time: 18.672466278076172ms\r\n",,terminal_output +7367,12310060,"TERMINAL",0,0,"Step 1080, loss: 2.2234904766082764, step time: 18.084287643432617ms\r\n",,terminal_output +7368,12310165,"TERMINAL",0,0,"Step 1081, loss: 2.273693799972534, step time: 17.766237258911133ms\r\nStep 1082, loss: 2.232968330383301, step time: 17.932415008544922ms\r\nStep 1083, loss: 2.2213127613067627, step time: 17.85421371459961ms\r\n",,terminal_output +7369,12310259,"TERMINAL",0,0,"Step 1084, loss: 2.513646125793457, step time: 17.8220272064209ms\r\n",,terminal_output +7370,12310311,"TERMINAL",0,0,"Step 1085, loss: 2.2336831092834473, step time: 18.91613006591797ms\r\n",,terminal_output +7371,12310406,"TERMINAL",0,0,"Step 1086, loss: 2.2085249423980713, step time: 18.004894256591797ms\r\n",,terminal_output +7372,12310458,"TERMINAL",0,0,"Step 1087, loss: 2.7603683471679688, step time: 18.01919937133789ms\r\n",,terminal_output +7373,12310522,"TERMINAL",0,0,"Step 1088, loss: 2.2436985969543457, step time: 18.748760223388672ms\r\n",,terminal_output +7374,12310583,"TERMINAL",0,0,"Step 1089, loss: 2.217773199081421, step time: 18.083810806274414ms\r\n",,terminal_output +7375,12310643,"TERMINAL",0,0,"Step 1090, loss: 2.230954647064209, step time: 18.246173858642578ms\r\n",,terminal_output +7376,12310705,"TERMINAL",0,0,"Step 1091, loss: 2.220285177230835, step time: 18.22185516357422ms\r\n",,terminal_output +7377,12310756,"TERMINAL",0,0,"Step 1092, loss: 2.208392381668091, step time: 17.77338981628418ms\r\n",,terminal_output +7378,12310871,"TERMINAL",0,0,"Step 1093, loss: 2.2118663787841797, step time: 17.77029037475586ms\r\nStep 1094, loss: 2.9728283882141113, step time: 18.423795700073242ms\r\n",,terminal_output +7379,12310935,"TERMINAL",0,0,"Step 1095, loss: 2.217477560043335, step time: 17.915010452270508ms\r\n",,terminal_output +7380,12311028,"TERMINAL",0,0,"Step 1096, loss: 2.2187752723693848, step time: 18.306493759155273ms\r\n",,terminal_output +7381,12311082,"TERMINAL",0,0,"Step 1097, loss: 2.207075595855713, step time: 20.404338836669922ms\r\n",,terminal_output +7382,12311179,"TERMINAL",0,0,"Step 1098, loss: 2.4226324558258057, step time: 18.960952758789062ms\r\n",,terminal_output +7383,12311251,"TERMINAL",0,0,"Step 1099, loss: 2.1909914016723633, step time: 18.495559692382812ms\r\nStep 1100, loss: 2.200065851211548, step time: 18.451929092407227ms\r\n",,terminal_output +7384,12311316,"TERMINAL",0,0,"Step 1101, loss: 2.300175428390503, step time: 18.261194229125977ms\r\n",,terminal_output +7385,12311381,"TERMINAL",0,0,"Step 1102, loss: 2.2439205646514893, step time: 18.388032913208008ms\r\n",,terminal_output +7386,12311510,"TERMINAL",0,0,"Step 1103, loss: 2.207542657852173, step time: 20.9805965423584ms\r\nStep 1104, loss: 2.1922948360443115, step time: 18.27239990234375ms\r\n",,terminal_output +7387,12311616,"TERMINAL",0,0,"Step 1105, loss: 2.253067970275879, step time: 18.027067184448242ms\r\n",,terminal_output +7388,12311667,"TERMINAL",0,0,"Step 1106, loss: 2.1991310119628906, step time: 18.301010131835938ms\r\n",,terminal_output +7389,12311771,"TERMINAL",0,0,"Step 1107, loss: 2.196627140045166, step time: 17.61031150817871ms\r\nStep 1108, loss: 2.2280571460723877, step time: 18.11814308166504ms\r\n",,terminal_output +7390,12311866,"TERMINAL",0,0,"Step 1109, loss: 2.190120220184326, step time: 18.57304573059082ms\r\n",,terminal_output +7391,12311917,"TERMINAL",0,0,"Step 1110, loss: 2.2703542709350586, step time: 18.105268478393555ms\r\n",,terminal_output +7392,12312009,"TERMINAL",0,0,"Step 1111, loss: 2.1845626831054688, step time: 18.007993698120117ms\r\n",,terminal_output +7393,12312085,"TERMINAL",0,0,"Step 1112, loss: 2.1821653842926025, step time: 18.06044578552246ms\r\nStep 1113, loss: 2.1874608993530273, step time: 17.462491989135742ms\r\n",,terminal_output +7394,12312196,"TERMINAL",0,0,"Step 1114, loss: 2.464388847351074, step time: 17.662525177001953ms\r\n",,terminal_output +7395,12312310,"TERMINAL",0,0,"Step 1115, loss: 2.2147932052612305, step time: 18.13960075378418ms\r\nStep 1116, loss: 2.193164348602295, step time: 17.14920997619629ms\r\n",,terminal_output +7396,12312415,"TERMINAL",0,0,"Step 1117, loss: 2.2457189559936523, step time: 17.33684539794922ms\r\nStep 1118, loss: 2.2084767818450928, step time: 17.61770248413086ms\r\n",,terminal_output +7397,12312477,"TERMINAL",0,0,"Step 1119, loss: 2.1837377548217773, step time: 17.021656036376953ms\r\n",,terminal_output +7398,12312537,"TERMINAL",0,0,"Step 1120, loss: 2.564178943634033, step time: 17.503738403320312ms\r\n",,terminal_output +7399,12312633,"TERMINAL",0,0,"Step 1121, loss: 2.1832146644592285, step time: 18.12577247619629ms\r\n",,terminal_output +7400,12312684,"TERMINAL",0,0,"Step 1122, loss: 2.313373327255249, step time: 17.686843872070312ms\r\n",,terminal_output +7401,12312788,"TERMINAL",0,0,"Step 1123, loss: 2.222154378890991, step time: 17.475128173828125ms\r\nStep 1124, loss: 2.197437047958374, step time: 17.829179763793945ms\r\n",,terminal_output +7402,12312879,"TERMINAL",0,0,"Step 1125, loss: 2.1449084281921387, step time: 17.016172409057617ms\r\n",,terminal_output +7403,12312983,"TERMINAL",0,0,"Step 1126, loss: 2.263397216796875, step time: 17.516374588012695ms\r\nStep 1127, loss: 2.226914882659912, step time: 18.302202224731445ms\r\n",,terminal_output +7404,12313078,"TERMINAL",0,0,"Step 1128, loss: 2.1659696102142334, step time: 18.416404724121094ms\r\n",,terminal_output +7405,12313131,"TERMINAL",0,0,"Step 1129, loss: 2.2837436199188232, step time: 17.807722091674805ms\r\n",,terminal_output +7406,12313237,"TERMINAL",0,0,"Step 1130, loss: 2.169367790222168, step time: 18.1882381439209ms\r\nStep 1131, loss: 2.1904494762420654, step time: 18.0361270904541ms\r\n",,terminal_output +7407,12313329,"TERMINAL",0,0,"Step 1132, loss: 2.156233549118042, step time: 17.76742935180664ms\r\n",,terminal_output +7408,12313381,"TERMINAL",0,0,"Step 1133, loss: 2.178377866744995, step time: 18.18704605102539ms\r\n",,terminal_output +7409,12313472,"TERMINAL",0,0,"Step 1134, loss: 2.18743634223938, step time: 17.79913902282715ms\r\n",,terminal_output +7410,12313523,"TERMINAL",0,0,"Step 1135, loss: 2.484088659286499, step time: 19.498109817504883ms\r\n",,terminal_output +7411,12313630,"TERMINAL",0,0,"Step 1136, loss: 2.166881561279297, step time: 19.313573837280273ms\r\nStep 1137, loss: 2.1605331897735596, step time: 17.918109893798828ms\r\n",,terminal_output +7412,12313695,"TERMINAL",0,0,"Step 1138, loss: 2.1731975078582764, step time: 17.956972122192383ms\r\n",,terminal_output +7413,12313750,"TERMINAL",0,0,"Step 1139, loss: 2.16298770904541, step time: 17.902135848999023ms\r\n",,terminal_output +7414,12313815,"TERMINAL",0,0,"Step 1140, loss: 2.1673502922058105, step time: 18.020153045654297ms\r\n",,terminal_output +7415,12313877,"TERMINAL",0,0,"Step 1141, loss: 2.159594774246216, step time: 17.450809478759766ms\r\n",,terminal_output +7416,12313938,"TERMINAL",0,0,"Step 1142, loss: 2.1689743995666504, step time: 17.838239669799805ms\r\n",,terminal_output +7417,12314038,"TERMINAL",0,0,"Step 1143, loss: 2.18036150932312, step time: 17.460346221923828ms\r\n",,terminal_output +7418,12314091,"TERMINAL",0,0,"Step 1144, loss: 2.188847064971924, step time: 17.688274383544922ms\r\n",,terminal_output +7419,12314197,"TERMINAL",0,0,"Step 1145, loss: 2.1803598403930664, step time: 17.72928237915039ms\r\nStep 1146, loss: 2.170236110687256, step time: 17.176389694213867ms\r\n",,terminal_output +7420,12314289,"TERMINAL",0,0,"Step 1147, loss: 2.1806142330169678, step time: 17.410755157470703ms\r\n",,terminal_output +7421,12314341,"TERMINAL",0,0,"Step 1148, loss: 2.161978244781494, step time: 18.140792846679688ms\r\n",,terminal_output +7422,12314511,"TERMINAL",0,0,"Step 1149, loss: 2.44118595123291, step time: 17.06409454345703ms\r\nStep 1150, loss: 2.200655460357666, step time: 17.554044723510742ms\r\nStep 1151, loss: 2.14530086517334, step time: 17.719745635986328ms\r\n",,terminal_output +7423,12314571,"TERMINAL",0,0,"Step 1152, loss: 2.1483564376831055, step time: 17.343759536743164ms\r\n",,terminal_output +7424,12314637,"TERMINAL",0,0,"Step 1153, loss: 2.2661330699920654, step time: 17.528772354125977ms\r\n",,terminal_output +7425,12314701,"TERMINAL",0,0,"Step 1154, loss: 2.1418607234954834, step time: 17.86184310913086ms\r\n",,terminal_output +7426,12314765,"TERMINAL",0,0,"Step 1155, loss: 2.156805992126465, step time: 17.21048355102539ms\r\n",,terminal_output +7427,12314835,"TERMINAL",0,0,"Step 1156, loss: 2.172631025314331, step time: 17.256736755371094ms\r\n",,terminal_output +7428,12314891,"TERMINAL",0,0,"Step 1157, loss: 2.3615427017211914, step time: 17.409563064575195ms\r\n",,terminal_output +7429,12314987,"TERMINAL",0,0,"Step 1158, loss: 2.1628575325012207, step time: 17.59481430053711ms\r\n",,terminal_output +7430,12315046,"TERMINAL",0,0,"Step 1159, loss: 2.1758718490600586, step time: 17.047882080078125ms\r\n",,terminal_output +7431,12315151,"TERMINAL",0,0,"Step 1160, loss: 2.142838716506958, step time: 18.064260482788086ms\r\nStep 1161, loss: 2.1491641998291016, step time: 17.280101776123047ms\r\n",,terminal_output +7432,12315211,"TERMINAL",0,0,"Step 1162, loss: 2.1434037685394287, step time: 19.18816566467285ms\r\n",,terminal_output +7433,12315272,"TERMINAL",0,0,"Step 1163, loss: 2.1355342864990234, step time: 17.96722412109375ms\r\n",,terminal_output +7434,12315334,"TERMINAL",0,0,"Step 1164, loss: 2.135509490966797, step time: 17.609119415283203ms\r\n",,terminal_output +7435,12315426,"TERMINAL",0,0,"Step 1165, loss: 2.1318609714508057, step time: 17.248153686523438ms\r\n",,terminal_output +7436,12315532,"TERMINAL",0,0,"Step 1166, loss: 2.144465208053589, step time: 17.995834350585938ms\r\nStep 1167, loss: 2.126500129699707, step time: 17.228126525878906ms\r\n",,terminal_output +7437,12315626,"TERMINAL",0,0,"Step 1168, loss: 2.1224539279937744, step time: 17.269611358642578ms\r\n",,terminal_output +7438,12315677,"TERMINAL",0,0,"Step 1169, loss: 2.132528066635132, step time: 18.03421974182129ms\r\n",,terminal_output +7439,12315780,"TERMINAL",0,0,"Step 1170, loss: 2.128612756729126, step time: 17.72284507751465ms\r\nStep 1171, loss: 2.152146339416504, step time: 27.833938598632812ms\r\n",,terminal_output +7440,12315910,"TERMINAL",0,0,"Step 1172, loss: 2.112018585205078, step time: 21.243572235107422ms\r\nStep 1173, loss: 2.1390092372894287, step time: 17.583370208740234ms\r\n",,terminal_output +7441,12315982,"TERMINAL",0,0,"Step 1174, loss: 2.1194136142730713, step time: 17.8678035736084ms\r\n",,terminal_output +7442,12316038,"TERMINAL",0,0,"Step 1175, loss: 3.0786259174346924, step time: 17.63606071472168ms\r\n",,terminal_output +7443,12316140,"TERMINAL",0,0,"Step 1176, loss: 2.1353213787078857, step time: 17.60101318359375ms\r\n",,terminal_output +7444,12316270,"TERMINAL",0,0,"Step 1177, loss: 2.12174391746521, step time: 17.55237579345703ms\r\nStep 1178, loss: 2.1486616134643555, step time: 18.193721771240234ms\r\n",,terminal_output +7445,12316383,"TERMINAL",0,0,"Step 1179, loss: 2.4026002883911133, step time: 17.42720603942871ms\r\nStep 1180, loss: 2.1079111099243164, step time: 17.092466354370117ms\r\n",,terminal_output +7446,12316711,"TERMINAL",0,0,"Step 1181, loss: 2.109891414642334, step time: 355.30543327331543ms\r\n",,terminal_output +7447,12316820,"TERMINAL",0,0,"Step 1182, loss: 2.248506546020508, step time: 26.04532241821289ms\r\n",,terminal_output +7448,12316872,"TERMINAL",0,0,"Step 1183, loss: 2.129626512527466, step time: 21.87061309814453ms\r\n",,terminal_output +7449,12316976,"TERMINAL",0,0,"Step 1184, loss: 2.121291160583496, step time: 19.35267448425293ms\r\nStep 1185, loss: 2.1195781230926514, step time: 17.920732498168945ms\r\n",,terminal_output +7450,12317047,"TERMINAL",0,0,"Step 1186, loss: 2.1350700855255127, step time: 17.972230911254883ms\r\n",,terminal_output +7451,12317108,"TERMINAL",0,0,"Step 1187, loss: 2.103747844696045, step time: 18.010854721069336ms\r\n",,terminal_output +7452,12317168,"TERMINAL",0,0,"Step 1188, loss: 2.1183080673217773, step time: 17.397165298461914ms\r\n",,terminal_output +7453,12317229,"TERMINAL",0,0,"Step 1189, loss: 2.1113598346710205, step time: 17.430782318115234ms\r\n",,terminal_output +7454,12317294,"TERMINAL",0,0,"Step 1190, loss: 2.1020524501800537, step time: 18.192052841186523ms\r\n",,terminal_output +7455,12317393,"TERMINAL",0,0,"Step 1191, loss: 2.1373915672302246, step time: 17.060041427612305ms\r\n",,terminal_output +7456,12317445,"TERMINAL",0,0,"Step 1192, loss: 2.446441650390625, step time: 17.55046844482422ms\r\n",,terminal_output +7457,12317549,"TERMINAL",0,0,"Step 1193, loss: 2.173973560333252, step time: 17.819881439208984ms\r\nStep 1194, loss: 2.0887749195098877, step time: 17.77172088623047ms\r\n",,terminal_output +7458,12317650,"TERMINAL",0,0,"Step 1195, loss: 2.1199285984039307, step time: 17.498493194580078ms\r\n",,terminal_output +7459,12317702,"TERMINAL",0,0,"Step 1196, loss: 2.970952272415161, step time: 18.030166625976562ms\r\n",,terminal_output +7460,12317753,"TERMINAL",0,0,"Step 1197, loss: 2.091862201690674, step time: 17.27581024169922ms\r\n",,terminal_output +7461,12317846,"TERMINAL",0,0,"Step 1198, loss: 2.090829849243164, step time: 17.478466033935547ms\r\n",,terminal_output +7462,12317897,"TERMINAL",0,0,"Step 1199, loss: 2.2107555866241455, step time: 17.7919864654541ms\r\n",,terminal_output +7463,12317953,"TERMINAL",0,0,"Step 1200, loss: 2.1117048263549805, step time: 17.435550689697266ms\r\n",,terminal_output +7464,12318293,"TERMINAL",0,0,"Step 1201, loss: 2.104084014892578, step time: 17.47918128967285ms\r\nStep 1202, loss: 2.089977264404297, step time: 18.19014549255371ms\r\nStep 1203, loss: 2.0881898403167725, step time: 19.222736358642578ms\r\nStep 1204, loss: 2.0969479084014893, step time: 17.543792724609375ms\r\n",,terminal_output +7465,12318371,"TERMINAL",0,0,"Step 1205, loss: 2.0782580375671387, step time: 17.800331115722656ms\r\n",,terminal_output +7466,12318510,"TERMINAL",0,0,"Step 1206, loss: 2.1154870986938477, step time: 17.6851749420166ms\r\nStep 1207, loss: 2.0904831886291504, step time: 17.344236373901367ms\r\nStep 1208, loss: 2.1003501415252686, step time: 17.894268035888672ms\r\n",,terminal_output +7467,12318631,"TERMINAL",0,0,"Step 1209, loss: 2.115586996078491, step time: 17.23623275756836ms\r\nStep 1210, loss: 2.1055099964141846, step time: 18.314361572265625ms\r\n",,terminal_output +7468,12318755,"TERMINAL",0,0,"Step 1211, loss: 2.082465410232544, step time: 18.051862716674805ms\r\nStep 1212, loss: 2.069237470626831, step time: 17.380237579345703ms\r\nStep 1213, loss: 2.130394697189331, step time: 17.33875274658203ms\r\n",,terminal_output +7469,12318818,"TERMINAL",0,0,"Step 1214, loss: 2.081908702850342, step time: 17.82989501953125ms\r\n",,terminal_output +7470,12318878,"TERMINAL",0,0,"Step 1215, loss: 2.11055588722229, step time: 17.490863800048828ms\r\n",,terminal_output +7471,12318942,"TERMINAL",0,0,"Step 1216, loss: 2.0979785919189453, step time: 17.574310302734375ms\r\n",,terminal_output +7472,12319037,"TERMINAL",0,0,"Step 1217, loss: 2.082664966583252, step time: 17.981290817260742ms\r\n",,terminal_output +7473,12319088,"TERMINAL",0,0,"Step 1218, loss: 2.110236406326294, step time: 17.493247985839844ms\r\n",,terminal_output +7474,12319255,"TERMINAL",0,0,"Step 1219, loss: 2.102754592895508, step time: 17.62843132019043ms\r\nStep 1220, loss: 2.0711545944213867, step time: 18.081188201904297ms\r\nStep 1221, loss: 2.0725340843200684, step time: 17.502546310424805ms\r\n",,terminal_output +7475,12319319,"TERMINAL",0,0,"Step 1222, loss: 2.2318615913391113, step time: 17.713069915771484ms\r\n",,terminal_output +7476,12319382,"TERMINAL",0,0,"Step 1223, loss: 2.0552380084991455, step time: 18.010616302490234ms\r\n",,terminal_output +7477,12319447,"TERMINAL",0,0,"Step 1224, loss: 2.076233148574829, step time: 17.682790756225586ms\r\n",,terminal_output +7478,12319510,"TERMINAL",0,0,"Step 1225, loss: 2.0580098628997803, step time: 17.576932907104492ms\r\n",,terminal_output +7479,12319582,"TERMINAL",0,0,"Step 1226, loss: 2.0596256256103516, step time: 18.041372299194336ms\r\n",,terminal_output +7480,12319637,"TERMINAL",0,0,"Step 1227, loss: 2.0663788318634033, step time: 17.603635787963867ms\r\n",,terminal_output +7481,12319762,"TERMINAL",0,0,"Step 1228, loss: 2.1113438606262207, step time: 17.516374588012695ms\r\nStep 1229, loss: 2.0641515254974365, step time: 17.91071891784668ms\r\n",,terminal_output +7482,12319870,"TERMINAL",0,0,"Step 1230, loss: 2.052523612976074, step time: 17.7457332611084ms\r\n",,terminal_output +7483,12319921,"TERMINAL",0,0,"Step 1231, loss: 2.445039749145508, step time: 17.400264739990234ms\r\n",,terminal_output +7484,12320025,"TERMINAL",0,0,"Step 1232, loss: 2.0404675006866455, step time: 17.992734909057617ms\r\nStep 1233, loss: 2.2023367881774902, step time: 17.60578155517578ms\r\n",,terminal_output +7485,12320122,"TERMINAL",0,0,"Step 1234, loss: 2.059903383255005, step time: 17.676591873168945ms\r\n",,terminal_output +7486,12320173,"TERMINAL",0,0,"Step 1235, loss: 2.0573980808258057, step time: 20.7674503326416ms\r\n",,terminal_output +7487,12320276,"TERMINAL",0,0,"Step 1236, loss: 2.10341477394104, step time: 17.67110824584961ms\r\nStep 1237, loss: 2.067394495010376, step time: 17.37213134765625ms\r\n",,terminal_output +7488,12320377,"TERMINAL",0,0,"Step 1238, loss: 2.062617301940918, step time: 18.15652847290039ms\r\n",,terminal_output +7489,12320428,"TERMINAL",0,0,"Step 1239, loss: 2.1008286476135254, step time: 17.812252044677734ms\r\n",,terminal_output +7490,12320491,"TERMINAL",0,0,"Step 1240, loss: 2.229482889175415, step time: 17.688512802124023ms\r\n",,terminal_output +7491,12320552,"TERMINAL",0,0,"Step 1241, loss: 2.090806245803833, step time: 18.195390701293945ms\r\n",,terminal_output +7492,12320610,"TERMINAL",0,0,"Step 1242, loss: 2.067457914352417, step time: 17.731189727783203ms\r\n",,terminal_output +7493,12320721,"TERMINAL",0,0,"Step 1243, loss: 2.049466848373413, step time: 17.386436462402344ms\r\nStep 1244, loss: 2.7993674278259277, step time: 17.96865463256836ms\r\n",,terminal_output +7494,12320787,"TERMINAL",0,0,"Step 1245, loss: 2.0791728496551514, step time: 16.95728302001953ms\r\n",,terminal_output +7495,12320848,"TERMINAL",0,0,"Step 1246, loss: 2.0601282119750977, step time: 17.275571823120117ms\r\n",,terminal_output +7496,12320920,"TERMINAL",0,0,"Step 1247, loss: 2.2949576377868652, step time: 17.953157424926758ms\r\n",,terminal_output +7497,12320985,"TERMINAL",0,0,"Step 1248, loss: 2.104787826538086, step time: 17.89546012878418ms\r\n",,terminal_output +7498,12321046,"TERMINAL",0,0,"Step 1249, loss: 2.165231704711914, step time: 17.17829704284668ms\r\n",,terminal_output +7499,12321112,"TERMINAL",0,0,"Step 1250, loss: 2.2813913822174072, step time: 17.910242080688477ms\r\n",,terminal_output +7500,12321168,"TERMINAL",0,0,"Step 1251, loss: 2.1027674674987793, step time: 17.008304595947266ms\r\n",,terminal_output +7501,12321238,"TERMINAL",0,0,"Step 1252, loss: 2.130563259124756, step time: 17.0743465423584ms\r\n",,terminal_output +7502,12321327,"TERMINAL",0,0,"Step 1253, loss: 2.090756416320801, step time: 17.89259910583496ms\r\n",,terminal_output +7503,12321382,"TERMINAL",0,0,"Step 1254, loss: 2.0228257179260254, step time: 17.636775970458984ms\r\n",,terminal_output +7504,12321487,"TERMINAL",0,0,"Step 1255, loss: 2.0542683601379395, step time: 17.710447311401367ms\r\nStep 1256, loss: 2.088738441467285, step time: 18.20826530456543ms\r\n",,terminal_output +7505,12321547,"TERMINAL",0,0,"Step 1257, loss: 2.1300573348999023, step time: 17.357826232910156ms\r\n",,terminal_output +7506,12321609,"TERMINAL",0,0,"Step 1258, loss: 2.079737663269043, step time: 18.56398582458496ms\r\n",,terminal_output +7507,12321746,"TERMINAL",0,0,"Step 1259, loss: 2.0344059467315674, step time: 23.97751808166504ms\r\nStep 1260, loss: 2.4138622283935547, step time: 24.316787719726562ms\r\n",,terminal_output +7508,12321814,"TERMINAL",0,0,"Step 1261, loss: 2.042872667312622, step time: 24.34372901916504ms\r\n",,terminal_output +7509,12321919,"TERMINAL",0,0,"Step 1262, loss: 2.1766421794891357, step time: 26.812314987182617ms\r\n",,terminal_output +7510,12322008,"TERMINAL",0,0,"Step 1263, loss: 2.0725929737091064, step time: 25.098085403442383ms\r\n",,terminal_output +7511,12322065,"TERMINAL",0,0,"Step 1264, loss: 2.055652141571045, step time: 27.316570281982422ms\r\n",,terminal_output +7512,12322118,"TERMINAL",0,0,"Step 1265, loss: 2.0808191299438477, step time: 25.71702003479004ms\r\n",,terminal_output +7513,12322183,"TERMINAL",0,0,"Step 1266, loss: 2.1424665451049805, step time: 23.871183395385742ms\r\n",,terminal_output +7514,12322253,"TERMINAL",0,0,"Step 1267, loss: 2.1061933040618896, step time: 19.55127716064453ms\r\n",,terminal_output +7515,12322318,"TERMINAL",0,0,"Step 1268, loss: 2.0781779289245605, step time: 29.684782028198242ms\r\n",,terminal_output +7516,12322374,"TERMINAL",0,0,"Step 1269, loss: 2.0680325031280518, step time: 17.495155334472656ms\r\n",,terminal_output +7517,12322435,"TERMINAL",0,0,"Step 1270, loss: 2.0260813236236572, step time: 17.43292808532715ms\r\n",,terminal_output +7518,12322499,"TERMINAL",0,0,"Step 1271, loss: 2.043138265609741, step time: 17.874479293823242ms\r\n",,terminal_output +7519,12322560,"TERMINAL",0,0,"Step 1272, loss: 2.114326000213623, step time: 17.379045486450195ms\r\n",,terminal_output +7520,12322641,"TERMINAL",0,0,"Step 1273, loss: 2.038682699203491, step time: 17.531394958496094ms\r\n",,terminal_output +7521,12322755,"TERMINAL",0,0,"Step 1274, loss: 2.0326900482177734, step time: 28.373241424560547ms\r\nStep 1275, loss: 2.0223939418792725, step time: 20.336389541625977ms\r\n",,terminal_output +7522,12322811,"TERMINAL",0,0,"Step 1276, loss: 2.0744616985321045, step time: 17.791748046875ms\r\n",,terminal_output +7523,12322918,"TERMINAL",0,0,"Step 1277, loss: 2.0378732681274414, step time: 18.382549285888672ms\r\nStep 1278, loss: 2.040250539779663, step time: 17.49444007873535ms\r\n",,terminal_output +7524,12323013,"TERMINAL",0,0,"Step 1279, loss: 2.0299394130706787, step time: 17.54593849182129ms\r\n",,terminal_output +7525,12323063,"TERMINAL",0,0,"Step 1280, loss: 2.0681188106536865, step time: 18.511295318603516ms\r\n",,terminal_output +7526,12323185,"TERMINAL",0,0,"Step 1281, loss: 2.0401899814605713, step time: 17.70806312561035ms\r\nStep 1282, loss: 2.040334701538086, step time: 17.911672592163086ms\r\n",,terminal_output +7527,12323254,"TERMINAL",0,0,"Step 1283, loss: 2.020169496536255, step time: 17.814159393310547ms\r\n",,terminal_output +7528,12323318,"TERMINAL",0,0,"Step 1284, loss: 2.026745319366455, step time: 17.807960510253906ms\r\n",,terminal_output +7529,12323424,"TERMINAL",0,0,"Step 1285, loss: 1.99609375, step time: 21.436214447021484ms\r\n",,terminal_output +7530,12323542,"TERMINAL",0,0,"Step 1286, loss: 2.0559682846069336, step time: 20.84207534790039ms\r\nStep 1287, loss: 2.0721964836120605, step time: 18.12601089477539ms\r\n",,terminal_output +7531,12323649,"TERMINAL",0,0,"Step 1288, loss: 2.037846565246582, step time: 18.552064895629883ms\r\nStep 1289, loss: 1.9842405319213867, step time: 18.125534057617188ms\r\n",,terminal_output +7532,12323715,"TERMINAL",0,0,"Step 1290, loss: 2.024758815765381, step time: 18.076181411743164ms\r\n",,terminal_output +7533,12323779,"TERMINAL",0,0,"Step 1291, loss: 2.0021047592163086, step time: 17.74001121520996ms\r\n",,terminal_output +7534,12323840,"TERMINAL",0,0,"Step 1292, loss: 1.992276906967163, step time: 18.239259719848633ms\r\n",,terminal_output +7535,12323988,"TERMINAL",0,0,"Step 1293, loss: 2.1020867824554443, step time: 17.636537551879883ms\r\nStep 1294, loss: 1.990691900253296, step time: 17.66204833984375ms\r\n",,terminal_output +7536,12324103,"TERMINAL",0,0,"Step 1295, loss: 2.0201430320739746, step time: 17.809629440307617ms\r\nStep 1296, loss: 2.1230812072753906, step time: 17.342805862426758ms\r\n",,terminal_output +7537,12324164,"TERMINAL",0,0,"Step 1297, loss: 1.9979522228240967, step time: 17.52614974975586ms\r\n",,terminal_output +7538,12324285,"TERMINAL",0,0,"Step 1298, loss: 2.5427308082580566, step time: 18.083572387695312ms\r\nStep 1299, loss: 2.150670289993286, step time: 17.267704010009766ms\r\n",,terminal_output +7539,12324349,"TERMINAL",0,0,"Step 1300, loss: 1.9983059167861938, step time: 17.464399337768555ms\r\n",,terminal_output +7540,12324409,"TERMINAL",0,0,"Step 1301, loss: 1.9857871532440186, step time: 17.696142196655273ms\r\n",,terminal_output +7541,12324754,"TERMINAL",0,0,"Step 1302, loss: 1.992780089378357, step time: 350.86560249328613ms\r\n",,terminal_output +7542,12324861,"TERMINAL",0,0,"Step 1303, loss: 1.9914941787719727, step time: 24.74689483642578ms\r\n",,terminal_output +7543,12324923,"TERMINAL",0,0,"Step 1304, loss: 2.0043206214904785, step time: 19.81353759765625ms\r\n",,terminal_output +7544,12325028,"TERMINAL",0,0,"Step 1305, loss: 1.999132752418518, step time: 18.793344497680664ms\r\nStep 1306, loss: 2.0003557205200195, step time: 18.276214599609375ms\r\n",,terminal_output +7545,12325089,"TERMINAL",0,0,"Step 1307, loss: 1.9918245077133179, step time: 17.802953720092773ms\r\n",,terminal_output +7546,12325156,"TERMINAL",0,0,"Step 1308, loss: 2.0092451572418213, step time: 17.977237701416016ms\r\n",,terminal_output +7547,12325217,"TERMINAL",0,0,"Step 1309, loss: 1.9804140329360962, step time: 17.712116241455078ms\r\n",,terminal_output +7548,12325290,"TERMINAL",0,0,"Step 1310, loss: 2.0021564960479736, step time: 17.58885383605957ms\r\n",,terminal_output +7549,12325352,"TERMINAL",0,0,"Step 1311, loss: 1.9784647226333618, step time: 17.852783203125ms\r\n",,terminal_output +7550,12325412,"TERMINAL",0,0,"Step 1312, loss: 2.017265558242798, step time: 17.739534378051758ms\r\n",,terminal_output +7551,12325474,"TERMINAL",0,0,"Step 1313, loss: 2.0514018535614014, step time: 17.3799991607666ms\r\n",,terminal_output +7552,12325535,"TERMINAL",0,0,"Step 1314, loss: 2.0191736221313477, step time: 17.743587493896484ms\r\n",,terminal_output +7553,12325594,"TERMINAL",0,0,"Step 1315, loss: 1.9849470853805542, step time: 17.61174201965332ms\r\n",,terminal_output +7554,12325656,"TERMINAL",0,0,"Step 1316, loss: 1.9816383123397827, step time: 17.45748519897461ms\r\n",,terminal_output +7555,12325777,"TERMINAL",0,0,"Step 1317, loss: 1.9874566793441772, step time: 17.906904220581055ms\r\nStep 1318, loss: 1.9878567457199097, step time: 18.376588821411133ms\r\n",,terminal_output +7556,12325842,"TERMINAL",0,0,"Step 1319, loss: 1.9715187549591064, step time: 17.154455184936523ms\r\n",,terminal_output +7557,12325944,"TERMINAL",0,0,"Step 1320, loss: 2.0056421756744385, step time: 18.467187881469727ms\r\n",,terminal_output +7558,12325993,"TERMINAL",0,0,"Step 1321, loss: 1.9889756441116333, step time: 17.185211181640625ms\r\n",,terminal_output +7559,12326100,"TERMINAL",0,0,"Step 1322, loss: 2.000126838684082, step time: 17.145395278930664ms\r\nStep 1323, loss: 1.992250680923462, step time: 17.869234085083008ms\r\n",,terminal_output +7560,12326219,"TERMINAL",0,0,"Step 1324, loss: 1.9667719602584839, step time: 17.300128936767578ms\r\nStep 1325, loss: 1.9495837688446045, step time: 17.75646209716797ms\r\n",,terminal_output +7561,12326282,"TERMINAL",0,0,"Step 1326, loss: 1.9590624570846558, step time: 17.870426177978516ms\r\n",,terminal_output +7562,12326389,"TERMINAL",0,0,"Step 1327, loss: 2.3958373069763184, step time: 17.626047134399414ms\r\n",,terminal_output +7563,12326441,"TERMINAL",0,0,"Step 1328, loss: 1.9657803773880005, step time: 17.618417739868164ms\r\n",,terminal_output +7564,12326547,"TERMINAL",0,0,"Step 1329, loss: 1.9577103853225708, step time: 17.84062385559082ms\r\nStep 1330, loss: 1.9645837545394897, step time: 17.710208892822266ms\r\n",,terminal_output +7565,12326639,"TERMINAL",0,0,"Step 1331, loss: 2.909027576446533, step time: 17.931222915649414ms\r\n",,terminal_output +7566,12326691,"TERMINAL",0,0,"Step 1332, loss: 1.9801958799362183, step time: 17.925262451171875ms\r\n",,terminal_output +7567,12326755,"TERMINAL",0,0,"Step 1333, loss: 1.9754114151000977, step time: 17.452001571655273ms\r\n",,terminal_output +7568,12326865,"TERMINAL",0,0,"Step 1334, loss: 1.9475715160369873, step time: 17.679452896118164ms\r\nStep 1335, loss: 1.947060465812683, step time: 20.10631561279297ms\r\n",,terminal_output +7569,12326957,"TERMINAL",0,0,"Step 1336, loss: 2.03029727935791, step time: 18.13530921936035ms\r\n",,terminal_output +7570,12327007,"TERMINAL",0,0,"Step 1337, loss: 1.972922444343567, step time: 17.74287223815918ms\r\n",,terminal_output +7571,12327111,"TERMINAL",0,0,"Step 1338, loss: 1.968217372894287, step time: 18.06473731994629ms\r\nStep 1339, loss: 1.9352716207504272, step time: 17.63010025024414ms\r\n",,terminal_output +7572,12327206,"TERMINAL",0,0,"Step 1340, loss: 2.132251739501953, step time: 17.61603355407715ms\r\n",,terminal_output +7573,12327257,"TERMINAL",0,0,"Step 1341, loss: 1.9823434352874756, step time: 17.548561096191406ms\r\n",,terminal_output +7574,12327349,"TERMINAL",0,0,"Step 1342, loss: 2.0399158000946045, step time: 18.68581771850586ms\r\n",,terminal_output +7575,12327401,"TERMINAL",0,0,"Step 1343, loss: 2.017920732498169, step time: 17.517566680908203ms\r\n",,terminal_output +7576,12327453,"TERMINAL",0,0,"Step 1344, loss: 2.1169183254241943, step time: 17.824411392211914ms\r\n",,terminal_output +7577,12327557,"TERMINAL",0,0,"Step 1345, loss: 1.9614622592926025, step time: 17.589092254638672ms\r\nStep 1346, loss: 1.981018304824829, step time: 17.602920532226562ms\r\n",,terminal_output +7578,12327617,"TERMINAL",0,0,"Step 1347, loss: 1.9514150619506836, step time: 17.765045166015625ms\r\n",,terminal_output +7579,12327679,"TERMINAL",0,0,"Step 1348, loss: 1.9841339588165283, step time: 17.309904098510742ms\r\n",,terminal_output +7580,12327741,"TERMINAL",0,0,"Step 1349, loss: 2.0802462100982666, step time: 17.3037052154541ms\r\n",,terminal_output +7581,12327849,"TERMINAL",0,0,"Step 1350, loss: 2.0905559062957764, step time: 17.570972442626953ms\r\n",,terminal_output +7582,12327901,"TERMINAL",0,0,"Step 1351, loss: 2.2144792079925537, step time: 17.267227172851562ms\r\n",,terminal_output +7583,12328004,"TERMINAL",0,0,"Step 1352, loss: 1.98080575466156, step time: 17.635822296142578ms\r\nStep 1353, loss: 2.1104204654693604, step time: 17.794132232666016ms\r\n",,terminal_output +7584,12328124,"TERMINAL",0,0,"Step 1354, loss: 1.9941065311431885, step time: 17.366886138916016ms\r\nStep 1355, loss: 1.9583520889282227, step time: 17.32945442199707ms\r\n",,terminal_output +7585,12328220,"TERMINAL",0,0,"Step 1356, loss: 1.9523017406463623, step time: 17.666101455688477ms\r\n",,terminal_output +7586,12328270,"TERMINAL",0,0,"Step 1357, loss: 1.9817904233932495, step time: 17.00615882873535ms\r\n",,terminal_output +7587,12328366,"TERMINAL",0,0,"Step 1358, loss: 1.9611819982528687, step time: 17.54450798034668ms\r\n",,terminal_output +7588,12328417,"TERMINAL",0,0,"Step 1359, loss: 1.9719655513763428, step time: 17.543792724609375ms\r\n",,terminal_output +7589,12328469,"TERMINAL",0,0,"Step 1360, loss: 1.9802062511444092, step time: 17.213106155395508ms\r\n",,terminal_output +7590,12328573,"TERMINAL",0,0,"Step 1361, loss: 1.9890934228897095, step time: 17.65894889831543ms\r\nStep 1362, loss: 1.9317787885665894, step time: 19.275188446044922ms\r\n",,terminal_output +7591,12328643,"TERMINAL",0,0,"Step 1363, loss: 1.9450303316116333, step time: 17.595767974853516ms\r\n",,terminal_output +7592,12328712,"TERMINAL",0,0,"Step 1364, loss: 1.9697537422180176, step time: 17.6544189453125ms\r\n",,terminal_output +7593,12328761,"TERMINAL",0,0,"Step 1365, loss: 1.9371592998504639, step time: 18.160343170166016ms\r\n",,terminal_output +7594,12328886,"TERMINAL",0,0,"Step 1366, loss: 1.9352076053619385, step time: 17.76266098022461ms\r\nStep 1367, loss: 2.049988031387329, step time: 17.54140853881836ms\r\n",,terminal_output +7595,12328986,"TERMINAL",0,0,"Step 1368, loss: 1.9446508884429932, step time: 18.225669860839844ms\r\n",,terminal_output +7596,12329046,"TERMINAL",0,0,"Step 1369, loss: 2.025479316711426, step time: 17.7304744720459ms\r\n",,terminal_output +7597,12329107,"TERMINAL",0,0,"Step 1370, loss: 1.9186257123947144, step time: 17.491579055786133ms\r\n",,terminal_output +7598,12329167,"TERMINAL",0,0,"Step 1371, loss: 1.9214847087860107, step time: 17.79007911682129ms\r\n",,terminal_output +7599,12329229,"TERMINAL",0,0,"Step 1372, loss: 3.0003702640533447, step time: 17.633914947509766ms\r\n",,terminal_output +7600,12329288,"TERMINAL",0,0,"Step 1373, loss: 1.934760332107544, step time: 17.39501953125ms\r\n",,terminal_output +7601,12329349,"TERMINAL",0,0,"Step 1374, loss: 1.9243388175964355, step time: 17.771005630493164ms\r\n",,terminal_output +7602,12329411,"TERMINAL",0,0,"Step 1375, loss: 1.9258778095245361, step time: 17.596960067749023ms\r\n",,terminal_output +7603,12329473,"TERMINAL",0,0,"Step 1376, loss: 1.8926584720611572, step time: 20.47872543334961ms\r\n",,terminal_output +7604,12329542,"TERMINAL",0,0,"Step 1377, loss: 1.9165476560592651, step time: 17.529010772705078ms\r\n",,terminal_output +7605,12329604,"TERMINAL",0,0,"Step 1378, loss: 1.9782631397247314, step time: 17.307281494140625ms\r\n",,terminal_output +7606,12329734,"TERMINAL",0,0,"Step 1379, loss: 1.9244648218154907, step time: 17.696619033813477ms\r\nStep 1380, loss: 2.5869882106781006, step time: 18.156051635742188ms\r\n",,terminal_output +7607,12329786,"TERMINAL",0,0,"Step 1381, loss: 2.7806694507598877, step time: 17.618894577026367ms\r\n",,terminal_output +7608,12329848,"TERMINAL",0,0,"Step 1382, loss: 1.9650671482086182, step time: 17.783641815185547ms\r\n",,terminal_output +7609,12329907,"TERMINAL",0,0,"Step 1383, loss: 2.1521353721618652, step time: 17.612934112548828ms\r\n",,terminal_output +7610,12329969,"TERMINAL",0,0,"Step 1384, loss: 1.9549378156661987, step time: 17.68183708190918ms\r\n",,terminal_output +7611,12330034,"TERMINAL",0,0,"Step 1385, loss: 2.132997512817383, step time: 17.60244369506836ms\r\n",,terminal_output +7612,12330109,"TERMINAL",0,0,"Step 1386, loss: 1.9254481792449951, step time: 18.13507080078125ms\r\n",,terminal_output +7613,12330172,"TERMINAL",0,0,"Step 1387, loss: 1.926684856414795, step time: 17.37236976623535ms\r\n",,terminal_output +7614,12330232,"TERMINAL",0,0,"Step 1388, loss: 1.9361460208892822, step time: 17.36736297607422ms\r\n",,terminal_output +7615,12330291,"TERMINAL",0,0,"Step 1389, loss: 1.9604018926620483, step time: 17.813682556152344ms\r\n",,terminal_output +7616,12330352,"TERMINAL",0,0,"Step 1390, loss: 1.973036766052246, step time: 18.311262130737305ms\r\n",,terminal_output +7617,12330414,"TERMINAL",0,0,"Step 1391, loss: 1.9998809099197388, step time: 17.69566535949707ms\r\n",,terminal_output +7618,12330519,"TERMINAL",0,0,"Step 1392, loss: 1.928213357925415, step time: 17.948627471923828ms\r\n",,terminal_output +7619,12330584,"TERMINAL",0,0,"Step 1393, loss: 1.9789578914642334, step time: 17.08674430847168ms\r\n",,terminal_output +7620,12330641,"TERMINAL",0,0,"Step 1394, loss: 1.8997105360031128, step time: 17.364978790283203ms\r\n",,terminal_output +7621,12330713,"TERMINAL",0,0,"Step 1395, loss: 2.0897107124328613, step time: 17.64845848083496ms\r\n",,terminal_output +7622,12330763,"TERMINAL",0,0,"Step 1396, loss: 2.0056331157684326, step time: 17.486572265625ms\r\n",,terminal_output +7623,12330826,"TERMINAL",0,0,"Step 1397, loss: 1.932546854019165, step time: 17.85755157470703ms\r\n",,terminal_output +7624,12330885,"TERMINAL",0,0,"Step 1398, loss: 2.0065548419952393, step time: 17.874479293823242ms\r\n",,terminal_output +7625,12330945,"TERMINAL",0,0,"Step 1399, loss: 1.9268455505371094, step time: 17.49420166015625ms\r\n",,terminal_output +7626,12331007,"TERMINAL",0,0,"Step 1400, loss: 1.9212243556976318, step time: 17.769575119018555ms\r\n",,terminal_output +7627,12331112,"TERMINAL",0,0,"Step 1401, loss: 1.9055601358413696, step time: 18.444538116455078ms\r\nStep 1402, loss: 1.90707528591156, step time: 17.44556427001953ms\r\n",,terminal_output +7628,12331205,"TERMINAL",0,0,"Step 1403, loss: 1.926560640335083, step time: 17.394304275512695ms\r\n",,terminal_output +7629,12331256,"TERMINAL",0,0,"Step 1404, loss: 2.0989015102386475, step time: 17.95029640197754ms\r\n",,terminal_output +7630,12331349,"TERMINAL",0,0,"Step 1405, loss: 1.9146466255187988, step time: 17.54140853881836ms\r\n",,terminal_output +7631,12331400,"TERMINAL",0,0,"Step 1406, loss: 1.9053014516830444, step time: 17.515182495117188ms\r\n",,terminal_output +7632,12331504,"TERMINAL",0,0,"Step 1407, loss: 1.9023686647415161, step time: 17.61770248413086ms\r\nStep 1408, loss: 1.9203767776489258, step time: 17.504215240478516ms\r\n",,terminal_output +7633,12331620,"TERMINAL",0,0,"Step 1409, loss: 1.9662472009658813, step time: 17.26841926574707ms\r\nStep 1410, loss: 1.9039877653121948, step time: 17.557621002197266ms\r\n",,terminal_output +7634,12331748,"TERMINAL",0,0,"Step 1411, loss: 2.016641139984131, step time: 17.29297637939453ms\r\nStep 1412, loss: 1.95565927028656, step time: 19.908428192138672ms\r\n",,terminal_output +7635,12331812,"TERMINAL",0,0,"Step 1413, loss: 1.8836116790771484, step time: 17.983436584472656ms\r\n",,terminal_output +7636,12331873,"TERMINAL",0,0,"Step 1414, loss: 2.0623788833618164, step time: 17.35401153564453ms\r\n",,terminal_output +7637,12331969,"TERMINAL",0,0,"Step 1415, loss: 1.9174206256866455, step time: 17.042160034179688ms\r\n",,terminal_output +7638,12332021,"TERMINAL",0,0,"Step 1416, loss: 1.886781096458435, step time: 17.523765563964844ms\r\n",,terminal_output +7639,12332112,"TERMINAL",0,0,"Step 1417, loss: 1.8914391994476318, step time: 17.06671714782715ms\r\n",,terminal_output +7640,12332164,"TERMINAL",0,0,"Step 1418, loss: 1.9043290615081787, step time: 17.495155334472656ms\r\n",,terminal_output +7641,12332216,"TERMINAL",0,0,"Step 1419, loss: 2.3177623748779297, step time: 17.573833465576172ms\r\n",,terminal_output +7642,12332319,"TERMINAL",0,0,"Step 1420, loss: 1.8830331563949585, step time: 17.450332641601562ms\r\nStep 1421, loss: 2.466611385345459, step time: 17.319202423095703ms\r\n",,terminal_output +7643,12332382,"TERMINAL",0,0,"Step 1422, loss: 1.9291123151779175, step time: 17.661094665527344ms\r\n",,terminal_output +7644,12332486,"TERMINAL",0,0,"Step 1423, loss: 1.8906440734863281, step time: 28.206586837768555ms\r\n",,terminal_output +7645,12332537,"TERMINAL",0,0,"Step 1424, loss: 2.006288528442383, step time: 20.681381225585938ms\r\n",,terminal_output +7646,12332641,"TERMINAL",0,0,"Step 1425, loss: 1.8865835666656494, step time: 17.70305633544922ms\r\nStep 1426, loss: 1.8718873262405396, step time: 17.43006706237793ms\r\n",,terminal_output +7647,12332767,"TERMINAL",0,0,"Step 1427, loss: 1.9203156232833862, step time: 17.071962356567383ms\r\nStep 1428, loss: 1.9877289533615112, step time: 17.357587814331055ms\r\n",,terminal_output +7648,12332831,"TERMINAL",0,0,"Step 1429, loss: 2.4464385509490967, step time: 17.34614372253418ms\r\n",,terminal_output +7649,12332892,"TERMINAL",0,0,"Step 1430, loss: 1.9028550386428833, step time: 17.52495765686035ms\r\n",,terminal_output +7650,12332954,"TERMINAL",0,0,"Step 1431, loss: 1.9271286725997925, step time: 17.418384552001953ms\r\n",,terminal_output +7651,12333017,"TERMINAL",0,0,"Step 1432, loss: 1.8876062631607056, step time: 17.47727394104004ms\r\n",,terminal_output +7652,12333082,"TERMINAL",0,0,"Step 1433, loss: 1.8877148628234863, step time: 17.331600189208984ms\r\n",,terminal_output +7653,12333147,"TERMINAL",0,0,"Step 1434, loss: 1.8797749280929565, step time: 17.60697364807129ms\r\n",,terminal_output +7654,12333212,"TERMINAL",0,0,"Step 1435, loss: 1.9330865144729614, step time: 20.1265811920166ms\r\n",,terminal_output +7655,12333276,"TERMINAL",0,0,"Step 1436, loss: 1.8802770376205444, step time: 19.231081008911133ms\r\n",,terminal_output +7656,12333337,"TERMINAL",0,0,"Step 1437, loss: 1.9250754117965698, step time: 17.41313934326172ms\r\n",,terminal_output +7657,12333400,"TERMINAL",0,0,"Step 1438, loss: 1.8926573991775513, step time: 17.145872116088867ms\r\n",,terminal_output +7658,12333465,"TERMINAL",0,0,"Step 1439, loss: 1.8723260164260864, step time: 17.176389694213867ms\r\n",,terminal_output +7659,12333533,"TERMINAL",0,0,"Step 1440, loss: 1.9186315536499023, step time: 17.703533172607422ms\r\n",,terminal_output +7660,12333641,"TERMINAL",0,0,"Step 1441, loss: 1.869659185409546, step time: 18.846750259399414ms\r\n",,terminal_output +7661,12333656,"TERMINAL",0,0,"Step 1442, loss: 1.8866527080535889, step time: 17.815828323364258ms\r\n",,terminal_output +7662,12333723,"TERMINAL",0,0,"Step 1443, loss: 1.8616366386413574, step time: 17.75360107421875ms\r\n",,terminal_output +7663,12333783,"TERMINAL",0,0,"Step 1444, loss: 1.9842642545700073, step time: 17.176389694213867ms\r\n",,terminal_output +7664,12333846,"TERMINAL",0,0,"Step 1445, loss: 1.9736735820770264, step time: 17.748594284057617ms\r\n",,terminal_output +7665,12333910,"TERMINAL",0,0,"Step 1446, loss: 1.89107084274292, step time: 17.79937744140625ms\r\n",,terminal_output +7666,12333974,"TERMINAL",0,0,"Step 1447, loss: 1.9153797626495361, step time: 17.795085906982422ms\r\n",,terminal_output +7667,12334037,"TERMINAL",0,0,"Step 1448, loss: 1.8822073936462402, step time: 17.609357833862305ms\r\n",,terminal_output +7668,12334104,"TERMINAL",0,0,"Step 1449, loss: 1.8829383850097656, step time: 17.986297607421875ms\r\n",,terminal_output +7669,12334166,"TERMINAL",0,0,"Step 1450, loss: 1.8959156274795532, step time: 17.663955688476562ms\r\n",,terminal_output +7670,12334228,"TERMINAL",0,0,"Step 1451, loss: 1.875236988067627, step time: 17.21358299255371ms\r\n",,terminal_output +7671,12334289,"TERMINAL",0,0,"Step 1452, loss: 1.927149772644043, step time: 17.57526397705078ms\r\n",,terminal_output +7672,12334351,"TERMINAL",0,0,"Step 1453, loss: 1.873917818069458, step time: 17.274141311645508ms\r\n",,terminal_output +7673,12334420,"TERMINAL",0,0,"Step 1454, loss: 1.8661267757415771, step time: 18.017292022705078ms\r\n",,terminal_output +7674,12334481,"TERMINAL",0,0,"Step 1455, loss: 1.9239319562911987, step time: 17.6544189453125ms\r\n",,terminal_output +7675,12334544,"TERMINAL",0,0,"Step 1456, loss: 1.8704584836959839, step time: 17.64988899230957ms\r\n",,terminal_output +7676,12334621,"TERMINAL",0,0,"Step 1457, loss: 1.8669222593307495, step time: 17.301559448242188ms\r\n",,terminal_output +7677,12334683,"TERMINAL",0,0,"Step 1458, loss: 1.8753271102905273, step time: 17.902851104736328ms\r\n",,terminal_output +7678,12334743,"TERMINAL",0,0,"Step 1459, loss: 1.8584614992141724, step time: 17.311573028564453ms\r\n",,terminal_output +7679,12334796,"TERMINAL",0,0,"Step 1460, loss: 1.8601794242858887, step time: 17.962217330932617ms\r\n",,terminal_output +7680,12334892,"TERMINAL",0,0,"Step 1461, loss: 1.8893611431121826, step time: 17.685413360595703ms\r\n",,terminal_output +7681,12334943,"TERMINAL",0,0,"Step 1462, loss: 1.8486056327819824, step time: 17.77791976928711ms\r\n",,terminal_output +7682,12335047,"TERMINAL",0,0,"Step 1463, loss: 1.8360844850540161, step time: 17.35520362854004ms\r\nStep 1464, loss: 1.9216090440750122, step time: 17.753124237060547ms\r\n",,terminal_output +7683,12335143,"TERMINAL",0,0,"Step 1465, loss: 1.8530867099761963, step time: 17.446279525756836ms\r\n",,terminal_output +7684,12335195,"TERMINAL",0,0,"Step 1466, loss: 1.8565369844436646, step time: 17.36903190612793ms\r\n",,terminal_output +7685,12335289,"TERMINAL",0,0,"Step 1467, loss: 1.8713533878326416, step time: 17.566680908203125ms\r\n",,terminal_output +7686,12335365,"TERMINAL",0,0,"Step 1468, loss: 1.9839742183685303, step time: 17.351388931274414ms\r\nStep 1469, loss: 1.8186911344528198, step time: 17.399072647094727ms\r\n",,terminal_output +7687,12335428,"TERMINAL",0,0,"Step 1470, loss: 1.8333306312561035, step time: 17.667055130004883ms\r\n",,terminal_output +7688,12335531,"TERMINAL",0,0,"Step 1471, loss: 1.8450250625610352, step time: 18.031597137451172ms\r\n",,terminal_output +7689,12335591,"TERMINAL",0,0,"Step 1472, loss: 1.935400128364563, step time: 17.310380935668945ms\r\n",,terminal_output +7690,12335654,"TERMINAL",0,0,"Step 1473, loss: 2.027862548828125, step time: 17.556428909301758ms\r\n",,terminal_output +7691,12335713,"TERMINAL",0,0,"Step 1474, loss: 1.8247349262237549, step time: 17.606735229492188ms\r\n",,terminal_output +7692,12335771,"TERMINAL",0,0,"Step 1475, loss: 2.145770788192749, step time: 17.39358901977539ms\r\n",,terminal_output +7693,12335877,"TERMINAL",0,0,"Step 1476, loss: 1.829148292541504, step time: 17.357587814331055ms\r\nStep 1477, loss: 1.9082293510437012, step time: 17.31586456298828ms\r\n",,terminal_output +7694,12335973,"TERMINAL",0,0,"Step 1478, loss: 1.8490827083587646, step time: 17.540454864501953ms\r\n",,terminal_output +7695,12336024,"TERMINAL",0,0,"Step 1479, loss: 1.8364938497543335, step time: 17.672061920166016ms\r\n",,terminal_output +7696,12336127,"TERMINAL",0,0,"Step 1480, loss: 1.8980292081832886, step time: 17.152786254882812ms\r\nStep 1481, loss: 2.252753973007202, step time: 17.25482940673828ms\r\n",,terminal_output +7697,12336219,"TERMINAL",0,0,"Step 1482, loss: 1.8748594522476196, step time: 17.810583114624023ms\r\n",,terminal_output +7698,12336270,"TERMINAL",0,0,"Step 1483, loss: 1.8588453531265259, step time: 18.293380737304688ms\r\n",,terminal_output +7699,12336363,"TERMINAL",0,0,"Step 1484, loss: 1.8304784297943115, step time: 18.288850784301758ms\r\n",,terminal_output +7700,12336414,"TERMINAL",0,0,"Step 1485, loss: 1.8295327425003052, step time: 17.623424530029297ms\r\n",,terminal_output +7701,12336519,"TERMINAL",0,0,"Step 1486, loss: 1.8464852571487427, step time: 17.429351806640625ms\r\nStep 1487, loss: 1.948408603668213, step time: 17.111778259277344ms\r\n",,terminal_output +7702,12336941,"TERMINAL",0,0,"Step 1488, loss: 1.825276494026184, step time: 367.68174171447754ms\r\nStep 1489, loss: 1.8159434795379639, step time: 24.567604064941406ms\r\n",,terminal_output +7703,12337047,"TERMINAL",0,0,"Step 1490, loss: 1.8394345045089722, step time: 20.043134689331055ms\r\n",,terminal_output +7704,12337098,"TERMINAL",0,0,"Step 1491, loss: 1.9313793182373047, step time: 18.99886131286621ms\r\n",,terminal_output +7705,12337204,"TERMINAL",0,0,"Step 1492, loss: 1.8194289207458496, step time: 18.31340789794922ms\r\nStep 1493, loss: 1.8257601261138916, step time: 17.581701278686523ms\r\n",,terminal_output +7706,12337266,"TERMINAL",0,0,"Step 1494, loss: 1.8213123083114624, step time: 18.342971801757812ms\r\n",,terminal_output +7707,12337330,"TERMINAL",0,0,"Step 1495, loss: 1.819672703742981, step time: 17.56596565246582ms\r\n",,terminal_output +7708,12337392,"TERMINAL",0,0,"Step 1496, loss: 1.8113914728164673, step time: 17.39954948425293ms\r\n",,terminal_output +7709,12337455,"TERMINAL",0,0,"Step 1497, loss: 1.8433018922805786, step time: 17.717599868774414ms\r\n",,terminal_output +7710,12337520,"TERMINAL",0,0,"Step 1498, loss: 2.2628324031829834, step time: 17.873048782348633ms\r\n",,terminal_output +7711,12337614,"TERMINAL",0,0,"Step 1499, loss: 1.8591372966766357, step time: 17.418622970581055ms\r\n",,terminal_output +7712,12340238,"TERMINAL",0,0,"Step 1500, loss: 2.022200107574463, step time: 37.06049919128418ms\r\n",,terminal_output +7713,12340346,"TERMINAL",0,0,"Step 1501, loss: 2.3966329097747803, step time: 26.276350021362305ms\r\n",,terminal_output +7714,12340397,"TERMINAL",0,0,"Step 1502, loss: 1.819557785987854, step time: 21.306276321411133ms\r\n",,terminal_output +7715,12340542,"TERMINAL",0,0,"Step 1503, loss: 1.795802354812622, step time: 19.817829132080078ms\r\nStep 1504, loss: 1.8300788402557373, step time: 18.83387565612793ms\r\n",,terminal_output +7716,12340603,"TERMINAL",0,0,"Step 1505, loss: 1.8134100437164307, step time: 19.737720489501953ms\r\n",,terminal_output +7717,12340663,"TERMINAL",0,0,"Step 1506, loss: 1.8244359493255615, step time: 18.705368041992188ms\r\n",,terminal_output +7718,12340766,"TERMINAL",0,0,"Step 1507, loss: 1.8134069442749023, step time: 17.88187026977539ms\r\nStep 1508, loss: 2.0942137241363525, step time: 17.72332191467285ms\r\n",,terminal_output +7719,12340859,"TERMINAL",0,0,"Step 1509, loss: 1.8123648166656494, step time: 18.663644790649414ms\r\n",,terminal_output +7720,12340911,"TERMINAL",0,0,"Step 1510, loss: 1.8398369550704956, step time: 18.330812454223633ms\r\n",,terminal_output +7721,12341056,"TERMINAL",0,0,"Step 1511, loss: 1.8141599893569946, step time: 18.938302993774414ms\r\nStep 1512, loss: 1.8262696266174316, step time: 17.26698875427246ms\r\n",,terminal_output +7722,12341107,"TERMINAL",0,0,"Step 1513, loss: 1.831630825996399, step time: 21.99268341064453ms\r\n",,terminal_output +7723,12341199,"TERMINAL",0,0,"Step 1514, loss: 1.7916512489318848, step time: 19.3173885345459ms\r\n",,terminal_output +7724,12341251,"TERMINAL",0,0,"Step 1515, loss: 1.822587490081787, step time: 18.2650089263916ms\r\n",,terminal_output +7725,12341357,"TERMINAL",0,0,"Step 1516, loss: 1.8041726350784302, step time: 19.156455993652344ms\r\nStep 1517, loss: 1.807127833366394, step time: 19.91438865661621ms\r\n",,terminal_output +7726,12341417,"TERMINAL",0,0,"Step 1518, loss: 1.888193964958191, step time: 19.7141170501709ms\r\n",,terminal_output +7727,12341479,"TERMINAL",0,0,"Step 1519, loss: 1.9369276762008667, step time: 18.19586753845215ms\r\n",,terminal_output +7728,12341537,"TERMINAL",0,0,"Step 1520, loss: 1.7942134141921997, step time: 19.4852352142334ms\r\n",,terminal_output +7729,12341600,"TERMINAL",0,0,"Step 1521, loss: 1.8083183765411377, step time: 18.201589584350586ms\r\n",,terminal_output +7730,12341663,"TERMINAL",0,0,"Step 1522, loss: 1.7983686923980713, step time: 17.867326736450195ms\r\n",,terminal_output +7731,12341726,"TERMINAL",0,0,"Step 1523, loss: 1.7900848388671875, step time: 17.967700958251953ms\r\n",,terminal_output +7732,12341790,"TERMINAL",0,0,"Step 1524, loss: 1.7896174192428589, step time: 17.418622970581055ms\r\n",,terminal_output +7733,12341889,"TERMINAL",0,0,"Step 1525, loss: 1.9154863357543945, step time: 17.82536506652832ms\r\n",,terminal_output +7734,12341950,"TERMINAL",0,0,"Step 1526, loss: 1.8543801307678223, step time: 17.7304744720459ms\r\n",,terminal_output +7735,12342009,"TERMINAL",0,0,"Step 1527, loss: 1.7895183563232422, step time: 17.64988899230957ms\r\n",,terminal_output +7736,12342073,"TERMINAL",0,0,"Step 1528, loss: 1.8038363456726074, step time: 17.677783966064453ms\r\n",,terminal_output +7737,12342132,"TERMINAL",0,0,"Step 1529, loss: 1.7906157970428467, step time: 17.95220375061035ms\r\n",,terminal_output +7738,12342193,"TERMINAL",0,0,"Step 1530, loss: 1.8510146141052246, step time: 17.676830291748047ms\r\n",,terminal_output +7739,12342255,"TERMINAL",0,0,"Step 1531, loss: 1.9428682327270508, step time: 18.027782440185547ms\r\n",,terminal_output +7740,12342315,"TERMINAL",0,0,"Step 1532, loss: 1.8779566287994385, step time: 17.769813537597656ms\r\n",,terminal_output +7741,12342376,"TERMINAL",0,0,"Step 1533, loss: 1.788272738456726, step time: 18.01156997680664ms\r\n",,terminal_output +7742,12342437,"TERMINAL",0,0,"Step 1534, loss: 1.8019180297851562, step time: 17.357826232910156ms\r\n",,terminal_output +7743,12342497,"TERMINAL",0,0,"Step 1535, loss: 1.8276774883270264, step time: 17.772436141967773ms\r\n",,terminal_output +7744,12342561,"TERMINAL",0,0,"Step 1536, loss: 1.7752548456192017, step time: 17.292261123657227ms\r\n",,terminal_output +7745,12342654,"TERMINAL",0,0,"Step 1537, loss: 1.8631051778793335, step time: 17.55237579345703ms\r\n",,terminal_output +7746,12342709,"TERMINAL",0,0,"Step 1538, loss: 1.8248306512832642, step time: 18.1734561920166ms\r\n",,terminal_output +7747,12342761,"TERMINAL",0,0,"Step 1539, loss: 1.7809396982192993, step time: 17.620563507080078ms\r\n",,terminal_output +7748,12342875,"TERMINAL",0,0,"Step 1540, loss: 1.8028258085250854, step time: 17.5626277923584ms\r\nStep 1541, loss: 1.7824702262878418, step time: 18.096923828125ms\r\n",,terminal_output +7749,12343185,"TERMINAL",0,0,"Step 1542, loss: 1.9649486541748047, step time: 305.74941635131836ms\r\n",,terminal_output +7750,12343296,"TERMINAL",0,0,"Step 1543, loss: 1.8078855276107788, step time: 25.10976791381836ms\r\n",,terminal_output +7751,12343357,"TERMINAL",0,0,"Step 1544, loss: 2.3567044734954834, step time: 19.872188568115234ms\r\n",,terminal_output +7752,12343461,"TERMINAL",0,0,"Step 1545, loss: 1.8734819889068604, step time: 22.2170352935791ms\r\nStep 1546, loss: 1.7860498428344727, step time: 18.00680160522461ms\r\n",,terminal_output +7753,12343553,"TERMINAL",0,0,"Step 1547, loss: 1.758764624595642, step time: 17.798900604248047ms\r\n",,terminal_output +7754,12343647,"TERMINAL",0,0,"Step 1548, loss: 1.777652382850647, step time: 17.952680587768555ms\r\nStep 1549, loss: 1.8031260967254639, step time: 18.689393997192383ms\r\n",,terminal_output +7755,12343782,"TERMINAL",0,0,"Step 1550, loss: 1.8680115938186646, step time: 17.864465713500977ms\r\nStep 1551, loss: 1.785199761390686, step time: 18.251657485961914ms\r\n",,terminal_output +7756,12343842,"TERMINAL",0,0,"Step 1552, loss: 1.7782620191574097, step time: 17.63629913330078ms\r\n",,terminal_output +7757,12343903,"TERMINAL",0,0,"Step 1553, loss: 1.7781403064727783, step time: 17.502546310424805ms\r\n",,terminal_output +7758,12343968,"TERMINAL",0,0,"Step 1554, loss: 1.777439832687378, step time: 17.67277717590332ms\r\n",,terminal_output +7759,12344029,"TERMINAL",0,0,"Step 1555, loss: 1.749213457107544, step time: 17.67420768737793ms\r\n",,terminal_output +7760,12344091,"TERMINAL",0,0,"Step 1556, loss: 2.280574321746826, step time: 17.179012298583984ms\r\n",,terminal_output +7761,12344154,"TERMINAL",0,0,"Step 1557, loss: 1.8372938632965088, step time: 17.958641052246094ms\r\n",,terminal_output +7762,12344216,"TERMINAL",0,0,"Step 1558, loss: 1.7733322381973267, step time: 17.583608627319336ms\r\n",,terminal_output +7763,12344309,"TERMINAL",0,0,"Step 1559, loss: 1.7948403358459473, step time: 17.640352249145508ms\r\n",,terminal_output +7764,12344416,"TERMINAL",0,0,"Step 1560, loss: 1.7665765285491943, step time: 18.056869506835938ms\r\nStep 1561, loss: 1.7858752012252808, step time: 18.22972297668457ms\r\n",,terminal_output +7765,12344478,"TERMINAL",0,0,"Step 1562, loss: 2.2707722187042236, step time: 17.165422439575195ms\r\n",,terminal_output +7766,12344541,"TERMINAL",0,0,"Step 1563, loss: 1.7983336448669434, step time: 17.838239669799805ms\r\n",,terminal_output +7767,12344605,"TERMINAL",0,0,"Step 1564, loss: 2.2929322719573975, step time: 17.473936080932617ms\r\n",,terminal_output +7768,12344673,"TERMINAL",0,0,"Step 1565, loss: 1.7688989639282227, step time: 17.65584945678711ms\r\n",,terminal_output +7769,12344733,"TERMINAL",0,0,"Step 1566, loss: 1.7788984775543213, step time: 17.779111862182617ms\r\n",,terminal_output +7770,12344790,"TERMINAL",0,0,"Step 1567, loss: 1.8416999578475952, step time: 17.439842224121094ms\r\n",,terminal_output +7771,12344853,"TERMINAL",0,0,"Step 1568, loss: 1.9072256088256836, step time: 17.246723175048828ms\r\n",,terminal_output +7772,12344918,"TERMINAL",0,0,"Step 1569, loss: 1.8582098484039307, step time: 17.650604248046875ms\r\n",,terminal_output +7773,12344982,"TERMINAL",0,0,"Step 1570, loss: 1.7824079990386963, step time: 18.104076385498047ms\r\n",,terminal_output +7774,12345046,"TERMINAL",0,0,"Step 1571, loss: 1.7854292392730713, step time: 17.4863338470459ms\r\n",,terminal_output +7775,12345110,"TERMINAL",0,0,"Step 1572, loss: 2.0823464393615723, step time: 17.78125762939453ms\r\n",,terminal_output +7776,12345176,"TERMINAL",0,0,"Step 1573, loss: 1.7758342027664185, step time: 17.834186553955078ms\r\n",,terminal_output +7777,12345242,"TERMINAL",0,0,"Step 1574, loss: 1.764944314956665, step time: 17.419099807739258ms\r\n",,terminal_output +7778,12345303,"TERMINAL",0,0,"Step 1575, loss: 1.7654973268508911, step time: 18.844127655029297ms\r\n",,terminal_output +7779,12345367,"TERMINAL",0,0,"Step 1576, loss: 1.7841075658798218, step time: 17.539501190185547ms\r\n",,terminal_output +7780,12345433,"TERMINAL",0,0,"Step 1577, loss: 1.8264548778533936, step time: 17.613649368286133ms\r\n",,terminal_output +7781,12345492,"TERMINAL",0,0,"Step 1578, loss: 1.7780711650848389, step time: 17.579317092895508ms\r\n",,terminal_output +7782,12345557,"TERMINAL",0,0,"Step 1579, loss: 1.8201059103012085, step time: 17.47918128967285ms\r\n",,terminal_output +7783,12345621,"TERMINAL",0,0,"Step 1580, loss: 1.8932218551635742, step time: 17.385482788085938ms\r\n",,terminal_output +7784,12345687,"TERMINAL",0,0,"Step 1581, loss: 1.7667977809906006, step time: 18.056869506835938ms\r\n",,terminal_output +7785,12345753,"TERMINAL",0,0,"Step 1582, loss: 1.7484586238861084, step time: 17.516374588012695ms\r\n",,terminal_output +7786,12345807,"TERMINAL",0,0,"Step 1583, loss: 1.78519868850708, step time: 17.607450485229492ms\r\n",,terminal_output +7787,12345872,"TERMINAL",0,0,"Step 1584, loss: 1.7605156898498535, step time: 17.725229263305664ms\r\n",,terminal_output +7788,12345936,"TERMINAL",0,0,"Step 1585, loss: 1.9148972034454346, step time: 17.822265625ms\r\n",,terminal_output +7789,12346000,"TERMINAL",0,0,"Step 1586, loss: 1.7482954263687134, step time: 17.437219619750977ms\r\n",,terminal_output +7790,12346062,"TERMINAL",0,0,"Step 1587, loss: 1.8010870218276978, step time: 18.103599548339844ms\r\n",,terminal_output +7791,12346126,"TERMINAL",0,0,"Step 1588, loss: 1.773201823234558, step time: 19.67763900756836ms\r\n",,terminal_output +7792,12346190,"TERMINAL",0,0,"Step 1589, loss: 1.7411752939224243, step time: 17.94147491455078ms\r\n",,terminal_output +7793,12346252,"TERMINAL",0,0,"Step 1590, loss: 1.8069676160812378, step time: 17.818450927734375ms\r\n",,terminal_output +7794,12346319,"TERMINAL",0,0,"Step 1591, loss: 1.7939562797546387, step time: 17.571210861206055ms\r\n",,terminal_output +7795,12346381,"TERMINAL",0,0,"Step 1592, loss: 1.753301978111267, step time: 17.366647720336914ms\r\n",,terminal_output +7796,12346442,"TERMINAL",0,0,"Step 1593, loss: 1.7563081979751587, step time: 17.80390739440918ms\r\n",,terminal_output +7797,12346505,"TERMINAL",0,0,"Step 1594, loss: 1.8775839805603027, step time: 17.233610153198242ms\r\n",,terminal_output +7798,12346571,"TERMINAL",0,0,"Step 1595, loss: 1.738688588142395, step time: 17.15683937072754ms\r\n",,terminal_output +7799,12346632,"TERMINAL",0,0,"Step 1596, loss: 1.7382018566131592, step time: 17.638444900512695ms\r\n",,terminal_output +7800,12346707,"TERMINAL",0,0,"Step 1597, loss: 1.742969274520874, step time: 17.531633377075195ms\r\n",,terminal_output +7801,12346759,"TERMINAL",0,0,"Step 1598, loss: 1.728883981704712, step time: 16.89291000366211ms\r\n",,terminal_output +7802,12346865,"TERMINAL",0,0,"Step 1599, loss: 1.8613282442092896, step time: 29.008150100708008ms\r\n",,terminal_output +7803,12346930,"TERMINAL",0,0,"Step 1600, loss: 1.7137609720230103, step time: 17.935991287231445ms\r\n",,terminal_output +7804,12346991,"TERMINAL",0,0,"Step 1601, loss: 1.8065612316131592, step time: 29.345035552978516ms\r\n",,terminal_output +7805,12347054,"TERMINAL",0,0,"Step 1602, loss: 1.7469444274902344, step time: 19.766569137573242ms\r\n",,terminal_output +7806,12347117,"TERMINAL",0,0,"Step 1603, loss: 1.744879961013794, step time: 17.717361450195312ms\r\n",,terminal_output +7807,12347179,"TERMINAL",0,0,"Step 1604, loss: 1.7352477312088013, step time: 17.305612564086914ms\r\n",,terminal_output +7808,12347285,"TERMINAL",0,0,"Step 1605, loss: 1.7302147150039673, step time: 18.148183822631836ms\r\nStep 1606, loss: 1.8791308403015137, step time: 17.38142967224121ms\r\n",,terminal_output +7809,12347380,"TERMINAL",0,0,"Step 1607, loss: 1.7236942052841187, step time: 17.697811126708984ms\r\n",,terminal_output +7810,12347431,"TERMINAL",0,0,"Step 1608, loss: 1.8858444690704346, step time: 17.60554313659668ms\r\n",,terminal_output +7811,12347577,"TERMINAL",0,0,"Step 1609, loss: 1.7336300611495972, step time: 17.829418182373047ms\r\nStep 1610, loss: 1.7816723585128784, step time: 17.597436904907227ms\r\n",,terminal_output +7812,12347629,"TERMINAL",0,0,"Step 1611, loss: 1.7929106950759888, step time: 17.752885818481445ms\r\n",,terminal_output +7813,12347740,"TERMINAL",0,0,"Step 1612, loss: 1.7288082838058472, step time: 17.42100715637207ms\r\nStep 1613, loss: 1.713800072669983, step time: 17.5778865814209ms\r\n",,terminal_output +7814,12347805,"TERMINAL",0,0,"Step 1614, loss: 1.720908284187317, step time: 17.69089698791504ms\r\n",,terminal_output +7815,12347863,"TERMINAL",0,0,"Step 1615, loss: 1.714783787727356, step time: 17.546415328979492ms\r\n",,terminal_output +7816,12347924,"TERMINAL",0,0,"Step 1616, loss: 1.8362029790878296, step time: 17.525434494018555ms\r\n",,terminal_output +7817,12348034,"TERMINAL",0,0,"Step 1617, loss: 1.9211863279342651, step time: 25.27785301208496ms\r\n",,terminal_output +7818,12348087,"TERMINAL",0,0,"Step 1618, loss: 2.0270354747772217, step time: 18.00370216369629ms\r\n",,terminal_output +7819,12348194,"TERMINAL",0,0,"Step 1619, loss: 1.9628099203109741, step time: 17.36736297607422ms\r\nStep 1620, loss: 1.8080106973648071, step time: 17.732858657836914ms\r\n",,terminal_output +7820,12348286,"TERMINAL",0,0,"Step 1621, loss: 1.731273889541626, step time: 17.827510833740234ms\r\n",,terminal_output +7821,12348392,"TERMINAL",0,0,"Step 1622, loss: 1.8245899677276611, step time: 17.45319366455078ms\r\nStep 1623, loss: 1.7299004793167114, step time: 18.105030059814453ms\r\n",,terminal_output +7822,12348453,"TERMINAL",0,0,"Step 1624, loss: 1.7219092845916748, step time: 17.55523681640625ms\r\n",,terminal_output +7823,12348514,"TERMINAL",0,0,"Step 1625, loss: 1.740703821182251, step time: 17.588138580322266ms\r\n",,terminal_output +7824,12348577,"TERMINAL",0,0,"Step 1626, loss: 1.7615931034088135, step time: 17.906904220581055ms\r\n",,terminal_output +7825,12348644,"TERMINAL",0,0,"Step 1627, loss: 1.7195626497268677, step time: 18.96953582763672ms\r\n",,terminal_output +7826,12348706,"TERMINAL",0,0,"Step 1628, loss: 1.7082087993621826, step time: 17.82059669494629ms\r\n",,terminal_output +7827,12348758,"TERMINAL",0,0,"Step 1629, loss: 1.9913506507873535, step time: 18.185138702392578ms\r\n",,terminal_output +7828,12348885,"TERMINAL",0,0,"Step 1630, loss: 1.73832106590271, step time: 17.762422561645508ms\r\nStep 1631, loss: 1.8854178190231323, step time: 17.667293548583984ms\r\n",,terminal_output +7829,12348947,"TERMINAL",0,0,"Step 1632, loss: 1.734789252281189, step time: 18.050670623779297ms\r\n",,terminal_output +7830,12349040,"TERMINAL",0,0,"Step 1633, loss: 1.7187397480010986, step time: 17.971277236938477ms\r\n",,terminal_output +7831,12349094,"TERMINAL",0,0,"Step 1634, loss: 1.7350267171859741, step time: 17.100095748901367ms\r\n",,terminal_output +7832,12349191,"TERMINAL",0,0,"Step 1635, loss: 1.7269384860992432, step time: 21.075963973999023ms\r\n",,terminal_output +7833,12349270,"TERMINAL",0,0,"Step 1636, loss: 1.7115942239761353, step time: 17.63176918029785ms\r\nStep 1637, loss: 1.7234418392181396, step time: 17.460346221923828ms\r\n",,terminal_output +7834,12349369,"TERMINAL",0,0,"Step 1638, loss: 1.7483761310577393, step time: 17.651796340942383ms\r\n",,terminal_output +7835,12349429,"TERMINAL",0,0,"Step 1639, loss: 1.7094675302505493, step time: 17.720937728881836ms\r\n",,terminal_output +7836,12349491,"TERMINAL",0,0,"Step 1640, loss: 1.7055370807647705, step time: 17.139196395874023ms\r\n",,terminal_output +7837,12349552,"TERMINAL",0,0,"Step 1641, loss: 1.9796911478042603, step time: 17.87543296813965ms\r\n",,terminal_output +7838,12349614,"TERMINAL",0,0,"Step 1642, loss: 1.705365538597107, step time: 17.188072204589844ms\r\n",,terminal_output +7839,12349676,"TERMINAL",0,0,"Step 1643, loss: 1.6954587697982788, step time: 17.177581787109375ms\r\n",,terminal_output +7840,12349780,"TERMINAL",0,0,"Step 1644, loss: 1.6723763942718506, step time: 17.31252670288086ms\r\nStep 1645, loss: 1.7718415260314941, step time: 17.825603485107422ms\r\n",,terminal_output +7841,12349844,"TERMINAL",0,0,"Step 1646, loss: 1.6881016492843628, step time: 17.321109771728516ms\r\n",,terminal_output +7842,12350030,"TERMINAL",0,0,"Step 1647, loss: 1.7020002603530884, step time: 18.034696578979492ms\r\n",,terminal_output +7843,12350104,"TERMINAL",0,0,"Step 1648, loss: 1.6881438493728638, step time: 17.571449279785156ms\r\nStep 1649, loss: 1.7765856981277466, step time: 17.747879028320312ms\r\nStep 1650, loss: 1.6850875616073608, step time: 17.897844314575195ms\r\n",,terminal_output +7844,12350198,"TERMINAL",0,0,"Step 1651, loss: 1.7315303087234497, step time: 17.483949661254883ms\r\n",,terminal_output +7845,12350250,"TERMINAL",0,0,"Step 1652, loss: 1.6903146505355835, step time: 17.218589782714844ms\r\n",,terminal_output +7846,12350356,"TERMINAL",0,0,"Step 1653, loss: 1.7887786626815796, step time: 19.07801628112793ms\r\nStep 1654, loss: 1.6995795965194702, step time: 17.23957061767578ms\r\n",,terminal_output +7847,12350422,"TERMINAL",0,0,"Step 1655, loss: 1.6829074621200562, step time: 17.4257755279541ms\r\n",,terminal_output +7848,12350517,"TERMINAL",0,0,"Step 1656, loss: 1.6976529359817505, step time: 17.66824722290039ms\r\n",,terminal_output +7849,12350623,"TERMINAL",0,0,"Step 1657, loss: 1.664505124092102, step time: 17.575979232788086ms\r\nStep 1658, loss: 1.6982630491256714, step time: 17.22097396850586ms\r\n",,terminal_output +7850,12350686,"TERMINAL",0,0,"Step 1659, loss: 1.6795289516448975, step time: 17.50969886779785ms\r\n",,terminal_output +7851,12350750,"TERMINAL",0,0,"Step 1660, loss: 2.0635876655578613, step time: 17.186403274536133ms\r\n",,terminal_output +7852,12350802,"TERMINAL",0,0,"Step 1661, loss: 1.766906976699829, step time: 17.293691635131836ms\r\n",,terminal_output +7853,12350898,"TERMINAL",0,0,"Step 1662, loss: 1.6964614391326904, step time: 17.4562931060791ms\r\n",,terminal_output +7854,12350950,"TERMINAL",0,0,"Step 1663, loss: 1.752098798751831, step time: 17.22884178161621ms\r\n",,terminal_output +7855,12351056,"TERMINAL",0,0,"Step 1664, loss: 1.7001022100448608, step time: 17.108678817749023ms\r\nStep 1665, loss: 1.7702715396881104, step time: 17.537832260131836ms\r\n",,terminal_output +7856,12351121,"TERMINAL",0,0,"Step 1666, loss: 1.6618239879608154, step time: 17.158031463623047ms\r\n",,terminal_output +7857,12351185,"TERMINAL",0,0,"Step 1667, loss: 1.6963741779327393, step time: 17.18902587890625ms\r\n",,terminal_output +7858,12351249,"TERMINAL",0,0,"Step 1668, loss: 1.666621208190918, step time: 17.803668975830078ms\r\n",,terminal_output +7859,12351311,"TERMINAL",0,0,"Step 1669, loss: 1.6683202981948853, step time: 19.809722900390625ms\r\n",,terminal_output +7860,12351375,"TERMINAL",0,0,"Step 1670, loss: 1.6513731479644775, step time: 17.091035842895508ms\r\n",,terminal_output +7861,12351442,"TERMINAL",0,0,"Step 1671, loss: 1.6657215356826782, step time: 17.834186553955078ms\r\n",,terminal_output +7862,12351502,"TERMINAL",0,0,"Step 1672, loss: 1.665015697479248, step time: 17.24100112915039ms\r\n",,terminal_output +7863,12351563,"TERMINAL",0,0,"Step 1673, loss: 1.686783790588379, step time: 17.557382583618164ms\r\n",,terminal_output +7864,12351629,"TERMINAL",0,0,"Step 1674, loss: 1.6832717657089233, step time: 18.166303634643555ms\r\n",,terminal_output +7865,12351708,"TERMINAL",0,0,"Step 1675, loss: 1.7114522457122803, step time: 24.58977699279785ms\r\n",,terminal_output +7866,12351770,"TERMINAL",0,0,"Step 1676, loss: 1.6665807962417603, step time: 33.185720443725586ms\r\n",,terminal_output +7867,12351836,"TERMINAL",0,0,"Step 1677, loss: 1.723129153251648, step time: 31.197309494018555ms\r\n",,terminal_output +7868,12351942,"TERMINAL",0,0,"Step 1678, loss: 1.6478264331817627, step time: 24.38974380493164ms\r\n",,terminal_output +7869,12351995,"TERMINAL",0,0,"Step 1679, loss: 1.6961764097213745, step time: 25.40731430053711ms\r\n",,terminal_output +7870,12352091,"TERMINAL",0,0,"Step 1680, loss: 1.660515546798706, step time: 38.10405731201172ms\r\n",,terminal_output +7871,12352155,"TERMINAL",0,0,"Step 1681, loss: 1.7586027383804321, step time: 27.851104736328125ms\r\n",,terminal_output +7872,12352217,"TERMINAL",0,0,"Step 1682, loss: 1.6819339990615845, step time: 24.399757385253906ms\r\n",,terminal_output +7873,12352279,"TERMINAL",0,0,"Step 1683, loss: 1.6514177322387695, step time: 24.613142013549805ms\r\n",,terminal_output +7874,12352340,"TERMINAL",0,0,"Step 1684, loss: 1.675528883934021, step time: 19.89006996154785ms\r\n",,terminal_output +7875,12352406,"TERMINAL",0,0,"Step 1685, loss: 1.64907968044281, step time: 18.515586853027344ms\r\n",,terminal_output +7876,12352511,"TERMINAL",0,0,"Step 1686, loss: 1.69243323802948, step time: 17.997026443481445ms\r\nStep 1687, loss: 1.645233154296875, step time: 17.89569854736328ms\r\n",,terminal_output +7877,12352608,"TERMINAL",0,0,"Step 1688, loss: 1.676406741142273, step time: 17.390727996826172ms\r\n",,terminal_output +7878,12352671,"TERMINAL",0,0,"Step 1689, loss: 1.6394336223602295, step time: 18.72730255126953ms\r\n",,terminal_output +7879,12352764,"TERMINAL",0,0,"Step 1690, loss: 1.746100664138794, step time: 17.55523681640625ms\r\nStep 1691, loss: 1.6454819440841675, step time: 17.654895782470703ms\r\n",,terminal_output +7880,12352829,"TERMINAL",0,0,"Step 1692, loss: 1.6514259576797485, step time: 18.247604370117188ms\r\n",,terminal_output +7881,12352930,"TERMINAL",0,0,"Step 1693, loss: 1.6787670850753784, step time: 18.172025680541992ms\r\n",,terminal_output +7882,12352983,"TERMINAL",0,0,"Step 1694, loss: 1.6250929832458496, step time: 17.455577850341797ms\r\n",,terminal_output +7883,12353088,"TERMINAL",0,0,"Step 1695, loss: 1.6630260944366455, step time: 18.36252212524414ms\r\nStep 1696, loss: 1.645230770111084, step time: 17.747879028320312ms\r\n",,terminal_output +7884,12353150,"TERMINAL",0,0,"Step 1697, loss: 1.6706397533416748, step time: 17.610788345336914ms\r\n",,terminal_output +7885,12353272,"TERMINAL",0,0,"Step 1698, loss: 1.6360276937484741, step time: 17.95053482055664ms\r\nStep 1699, loss: 1.718072533607483, step time: 17.927885055541992ms\r\n",,terminal_output +7886,12353335,"TERMINAL",0,0,"Step 1700, loss: 1.651178240776062, step time: 17.69232749938965ms\r\n",,terminal_output +7887,12353400,"TERMINAL",0,0,"Step 1701, loss: 1.6350679397583008, step time: 18.429994583129883ms\r\n",,terminal_output +7888,12353466,"TERMINAL",0,0,"Step 1702, loss: 1.6477766036987305, step time: 17.765045166015625ms\r\n",,terminal_output +7889,12353527,"TERMINAL",0,0,"Step 1703, loss: 1.6238971948623657, step time: 17.697811126708984ms\r\n",,terminal_output +7890,12353602,"TERMINAL",0,0,"Step 1704, loss: 1.6390691995620728, step time: 18.10455322265625ms\r\n",,terminal_output +7891,12353654,"TERMINAL",0,0,"Step 1705, loss: 1.7352591753005981, step time: 19.17576789855957ms\r\n",,terminal_output +7892,12353746,"TERMINAL",0,0,"Step 1706, loss: 1.6269381046295166, step time: 17.637968063354492ms\r\n",,terminal_output +7893,12353809,"TERMINAL",0,0,"Step 1707, loss: 1.6360678672790527, step time: 18.337726593017578ms\r\n",,terminal_output +7894,12353868,"TERMINAL",0,0,"Step 1708, loss: 1.6146615743637085, step time: 17.530202865600586ms\r\n",,terminal_output +7895,12353932,"TERMINAL",0,0,"Step 1709, loss: 1.6182482242584229, step time: 17.487764358520508ms\r\n",,terminal_output +7896,12353991,"TERMINAL",0,0,"Step 1710, loss: 1.631109356880188, step time: 17.838001251220703ms\r\n",,terminal_output +7897,12354054,"TERMINAL",0,0,"Step 1711, loss: 2.0897579193115234, step time: 17.583370208740234ms\r\n",,terminal_output +7898,12354116,"TERMINAL",0,0,"Step 1712, loss: 1.6337826251983643, step time: 17.227888107299805ms\r\n",,terminal_output +7899,12354179,"TERMINAL",0,0,"Step 1713, loss: 1.6224397420883179, step time: 18.10169219970703ms\r\n",,terminal_output +7900,12354241,"TERMINAL",0,0,"Step 1714, loss: 1.6451356410980225, step time: 17.47751235961914ms\r\n",,terminal_output +7901,12354304,"TERMINAL",0,0,"Step 1715, loss: 1.6451927423477173, step time: 17.562389373779297ms\r\n",,terminal_output +7902,12354368,"TERMINAL",0,0,"Step 1716, loss: 1.6112544536590576, step time: 17.682313919067383ms\r\n",,terminal_output +7903,12354432,"TERMINAL",0,0,"Step 1717, loss: 1.6240040063858032, step time: 17.707347869873047ms\r\n",,terminal_output +7904,12354494,"TERMINAL",0,0,"Step 1718, loss: 1.7202411890029907, step time: 16.98923110961914ms\r\n",,terminal_output +7905,12354557,"TERMINAL",0,0,"Step 1719, loss: 1.7821719646453857, step time: 18.001079559326172ms\r\n",,terminal_output +7906,12354623,"TERMINAL",0,0,"Step 1720, loss: 1.7243531942367554, step time: 17.110347747802734ms\r\n",,terminal_output +7907,12354684,"TERMINAL",0,0,"Step 1721, loss: 1.6300361156463623, step time: 17.41170883178711ms\r\n",,terminal_output +7908,12354746,"TERMINAL",0,0,"Step 1722, loss: 1.6360524892807007, step time: 17.598628997802734ms\r\n",,terminal_output +7909,12354885,"TERMINAL",0,0,"Step 1723, loss: 1.6657768487930298, step time: 17.660140991210938ms\r\nStep 1724, loss: 1.6331136226654053, step time: 18.47100257873535ms\r\n",,terminal_output +7910,12354989,"TERMINAL",0,0,"Step 1725, loss: 1.6078811883926392, step time: 18.086671829223633ms\r\n",,terminal_output +7911,12355041,"TERMINAL",0,0,"Step 1726, loss: 1.6214995384216309, step time: 17.487287521362305ms\r\n",,terminal_output +7912,12355137,"TERMINAL",0,0,"Step 1727, loss: 1.6471372842788696, step time: 17.64392852783203ms\r\n",,terminal_output +7913,12355506,"TERMINAL",0,0,"Step 1728, loss: 1.6802345514297485, step time: 358.6463928222656ms\r\nStep 1729, loss: 1.6413978338241577, step time: 27.767658233642578ms\r\n",,terminal_output +7914,12355612,"TERMINAL",0,0,"Step 1730, loss: 1.635027289390564, step time: 19.924402236938477ms\r\n",,terminal_output +7915,12355665,"TERMINAL",0,0,"Step 1731, loss: 1.682108759880066, step time: 19.158124923706055ms\r\n",,terminal_output +7916,12355771,"TERMINAL",0,0,"Step 1732, loss: 1.6390825510025024, step time: 17.635345458984375ms\r\nStep 1733, loss: 1.9551398754119873, step time: 17.86661148071289ms\r\n",,terminal_output +7917,12355835,"TERMINAL",0,0,"Step 1734, loss: 1.6953318119049072, step time: 18.07856559753418ms\r\n",,terminal_output +7918,12355894,"TERMINAL",0,0,"Step 1735, loss: 1.5994404554367065, step time: 17.61651039123535ms\r\n",,terminal_output +7919,12355958,"TERMINAL",0,0,"Step 1736, loss: 1.6078490018844604, step time: 17.399311065673828ms\r\n",,terminal_output +7920,12356066,"TERMINAL",0,0,"Step 1737, loss: 1.7134828567504883, step time: 31.996488571166992ms\r\n",,terminal_output +7921,12356117,"TERMINAL",0,0,"Step 1738, loss: 1.612112283706665, step time: 18.453598022460938ms\r\n",,terminal_output +7922,12356220,"TERMINAL",0,0,"Step 1739, loss: 1.626162052154541, step time: 17.812490463256836ms\r\nStep 1740, loss: 1.6072121858596802, step time: 17.972946166992188ms\r\n",,terminal_output +7923,12356282,"TERMINAL",0,0,"Step 1741, loss: 1.6291526556015015, step time: 17.78888702392578ms\r\n",,terminal_output +7924,12356343,"TERMINAL",0,0,"Step 1742, loss: 1.7554594278335571, step time: 17.427682876586914ms\r\n",,terminal_output +7925,12356406,"TERMINAL",0,0,"Step 1743, loss: 1.6513419151306152, step time: 17.95363426208496ms\r\n",,terminal_output +7926,12356470,"TERMINAL",0,0,"Step 1744, loss: 1.791731595993042, step time: 17.501354217529297ms\r\n",,terminal_output +7927,12356537,"TERMINAL",0,0,"Step 1745, loss: 1.6255687475204468, step time: 17.719030380249023ms\r\n",,terminal_output +7928,12356601,"TERMINAL",0,0,"Step 1746, loss: 1.627820372581482, step time: 17.733097076416016ms\r\n",,terminal_output +7929,12356664,"TERMINAL",0,0,"Step 1747, loss: 1.5976132154464722, step time: 17.681121826171875ms\r\n",,terminal_output +7930,12356730,"TERMINAL",0,0,"Step 1748, loss: 1.613918662071228, step time: 17.61341094970703ms\r\n",,terminal_output +7931,12356858,"TERMINAL",0,0,"Step 1749, loss: 1.6171400547027588, step time: 18.2647705078125ms\r\nStep 1750, loss: 1.6345646381378174, step time: 19.52195167541504ms\r\n",,terminal_output +7932,12356991,"TERMINAL",0,0,"Step 1751, loss: 1.6439329385757446, step time: 18.07236671447754ms\r\nStep 1752, loss: 1.62680184841156, step time: 18.061399459838867ms\r\n",,terminal_output +7933,12357052,"TERMINAL",0,0,"Step 1753, loss: 1.7068592309951782, step time: 18.128156661987305ms\r\n",,terminal_output +7934,12357117,"TERMINAL",0,0,"Step 1754, loss: 1.6106910705566406, step time: 17.586946487426758ms\r\n",,terminal_output +7935,12357177,"TERMINAL",0,0,"Step 1755, loss: 1.5887575149536133, step time: 18.088817596435547ms\r\n",,terminal_output +7936,12357241,"TERMINAL",0,0,"Step 1756, loss: 1.5738004446029663, step time: 17.656326293945312ms\r\n",,terminal_output +7937,12357365,"TERMINAL",0,0,"Step 1757, loss: 1.647818684577942, step time: 17.58575439453125ms\r\nStep 1758, loss: 1.6019322872161865, step time: 18.060922622680664ms\r\n",,terminal_output +7938,12357426,"TERMINAL",0,0,"Step 1759, loss: 1.6114780902862549, step time: 17.93527603149414ms\r\n",,terminal_output +7939,12357490,"TERMINAL",0,0,"Step 1760, loss: 1.664872646331787, step time: 17.42267608642578ms\r\n",,terminal_output +7940,12357582,"TERMINAL",0,0,"Step 1761, loss: 1.6146411895751953, step time: 18.13960075378418ms\r\n",,terminal_output +7941,12357633,"TERMINAL",0,0,"Step 1762, loss: 1.6161390542984009, step time: 17.69733428955078ms\r\n",,terminal_output +7942,12357743,"TERMINAL",0,0,"Step 1763, loss: 1.608345866203308, step time: 17.577171325683594ms\r\nStep 1764, loss: 1.8396357297897339, step time: 17.86208152770996ms\r\n",,terminal_output +7943,12357807,"TERMINAL",0,0,"Step 1765, loss: 1.5933767557144165, step time: 17.81773567199707ms\r\n",,terminal_output +7944,12357901,"TERMINAL",0,0,"Step 1766, loss: 1.876944899559021, step time: 17.50469207763672ms\r\n",,terminal_output +7945,12357954,"TERMINAL",0,0,"Step 1767, loss: 1.712045669555664, step time: 17.971515655517578ms\r\n",,terminal_output +7946,12358099,"TERMINAL",0,0,"Step 1768, loss: 1.6732912063598633, step time: 17.653465270996094ms\r\nStep 1769, loss: 1.5862692594528198, step time: 17.462491989135742ms\r\n",,terminal_output +7947,12358155,"TERMINAL",0,0,"Step 1770, loss: 1.5758647918701172, step time: 18.89944076538086ms\r\n",,terminal_output +7948,12358259,"TERMINAL",0,0,"Step 1771, loss: 1.6414077281951904, step time: 17.958402633666992ms\r\nStep 1772, loss: 1.6446315050125122, step time: 17.290115356445312ms\r\n",,terminal_output +7949,12358326,"TERMINAL",0,0,"Step 1773, loss: 1.7079466581344604, step time: 18.343687057495117ms\r\n",,terminal_output +7950,12358384,"TERMINAL",0,0,"Step 1774, loss: 1.5994232892990112, step time: 17.687082290649414ms\r\n",,terminal_output +7951,12358447,"TERMINAL",0,0,"Step 1775, loss: 1.6672688722610474, step time: 17.246007919311523ms\r\n",,terminal_output +7952,12358511,"TERMINAL",0,0,"Step 1776, loss: 1.9607495069503784, step time: 17.822265625ms\r\n",,terminal_output +7953,12358572,"TERMINAL",0,0,"Step 1777, loss: 1.6432381868362427, step time: 18.108367919921875ms\r\n",,terminal_output +7954,12358644,"TERMINAL",0,0,"Step 1778, loss: 1.5829139947891235, step time: 17.18282699584961ms\r\n",,terminal_output +7955,12358700,"TERMINAL",0,0,"Step 1779, loss: 1.5949244499206543, step time: 19.302845001220703ms\r\n",,terminal_output +7956,12358766,"TERMINAL",0,0,"Step 1780, loss: 1.6968046426773071, step time: 17.827510833740234ms\r\n",,terminal_output +7957,12358830,"TERMINAL",0,0,"Step 1781, loss: 1.635400414466858, step time: 17.589569091796875ms\r\n",,terminal_output +7958,12358893,"TERMINAL",0,0,"Step 1782, loss: 1.5913045406341553, step time: 18.6767578125ms\r\n",,terminal_output +7959,12358956,"TERMINAL",0,0,"Step 1783, loss: 1.5934230089187622, step time: 17.874956130981445ms\r\n",,terminal_output +7960,12359019,"TERMINAL",0,0,"Step 1784, loss: 1.5887802839279175, step time: 17.220497131347656ms\r\n",,terminal_output +7961,12359084,"TERMINAL",0,0,"Step 1785, loss: 1.5938091278076172, step time: 18.114328384399414ms\r\n",,terminal_output +7962,12359182,"TERMINAL",0,0,"Step 1786, loss: 1.726784110069275, step time: 17.43936538696289ms\r\n",,terminal_output +7963,12359235,"TERMINAL",0,0,"Step 1787, loss: 1.839153528213501, step time: 17.382144927978516ms\r\n",,terminal_output +7964,12359342,"TERMINAL",0,0,"Step 1788, loss: 1.6063578128814697, step time: 18.310070037841797ms\r\nStep 1789, loss: 1.6255390644073486, step time: 17.73667335510254ms\r\n",,terminal_output +7965,12359405,"TERMINAL",0,0,"Step 1790, loss: 1.5802494287490845, step time: 17.32325553894043ms\r\n",,terminal_output +7966,12359467,"TERMINAL",0,0,"Step 1791, loss: 1.5845756530761719, step time: 18.05877685546875ms\r\n",,terminal_output +7967,12359531,"TERMINAL",0,0,"Step 1792, loss: 1.6069449186325073, step time: 17.4102783203125ms\r\n",,terminal_output +7968,12359597,"TERMINAL",0,0,"Step 1793, loss: 1.5626050233840942, step time: 17.532825469970703ms\r\n",,terminal_output +7969,12359660,"TERMINAL",0,0,"Step 1794, loss: 1.5629994869232178, step time: 17.687320709228516ms\r\n",,terminal_output +7970,12359724,"TERMINAL",0,0,"Step 1795, loss: 1.5711872577667236, step time: 17.424345016479492ms\r\n",,terminal_output +7971,12359825,"TERMINAL",0,0,"Step 1796, loss: 1.5675967931747437, step time: 17.064809799194336ms\r\n",,terminal_output +7972,12359887,"TERMINAL",0,0,"Step 1797, loss: 1.5541422367095947, step time: 18.314123153686523ms\r\n",,terminal_output +7973,12359951,"TERMINAL",0,0,"Step 1798, loss: 1.5776599645614624, step time: 17.67277717590332ms\r\n",,terminal_output +7974,12360015,"TERMINAL",0,0,"Step 1799, loss: 1.556792974472046, step time: 17.748117446899414ms\r\n",,terminal_output +7975,12360079,"TERMINAL",0,0,"Step 1800, loss: 1.597133994102478, step time: 18.0666446685791ms\r\n",,terminal_output +7976,12360170,"TERMINAL",0,0,"Step 1801, loss: 1.637808918952942, step time: 18.01776885986328ms\r\nStep 1802, loss: 1.574985146522522, step time: 17.513275146484375ms\r\n",,terminal_output +7977,12360267,"TERMINAL",0,0,"Step 1803, loss: 1.7937825918197632, step time: 18.164396286010742ms\r\n",,terminal_output +7978,12360329,"TERMINAL",0,0,"Step 1804, loss: 1.5868475437164307, step time: 17.676353454589844ms\r\n",,terminal_output +7979,12360393,"TERMINAL",0,0,"Step 1805, loss: 1.56648850440979, step time: 17.586231231689453ms\r\n",,terminal_output +7980,12360457,"TERMINAL",0,0,"Step 1806, loss: 1.5442607402801514, step time: 18.093347549438477ms\r\n",,terminal_output +7981,12360521,"TERMINAL",0,0,"Step 1807, loss: 1.5494571924209595, step time: 18.01013946533203ms\r\n",,terminal_output +7982,12360586,"TERMINAL",0,0,"Step 1808, loss: 1.5838747024536133, step time: 17.412185668945312ms\r\n",,terminal_output +7983,12360649,"TERMINAL",0,0,"Step 1809, loss: 1.5798271894454956, step time: 18.306255340576172ms\r\n",,terminal_output +7984,12360751,"TERMINAL",0,0,"Step 1810, loss: 1.5526372194290161, step time: 17.637014389038086ms\r\nStep 1811, loss: 1.5489827394485474, step time: 17.774105072021484ms\r\n",,terminal_output +7985,12360847,"TERMINAL",0,0,"Step 1812, loss: 1.5461809635162354, step time: 18.069028854370117ms\r\n",,terminal_output +7986,12360911,"TERMINAL",0,0,"Step 1813, loss: 1.5730350017547607, step time: 17.878293991088867ms\r\n",,terminal_output +7987,12361013,"TERMINAL",0,0,"Step 1814, loss: 1.5597426891326904, step time: 17.389535903930664ms\r\nStep 1815, loss: 1.5497429370880127, step time: 17.99917221069336ms\r\n",,terminal_output +7988,12361089,"TERMINAL",0,0,"Step 1816, loss: 1.6209319829940796, step time: 17.473697662353516ms\r\n",,terminal_output +7989,12361215,"TERMINAL",0,0,"Step 1817, loss: 1.6075810194015503, step time: 17.583131790161133ms\r\nStep 1818, loss: 1.5749821662902832, step time: 17.429828643798828ms\r\n",,terminal_output +7990,12361322,"TERMINAL",0,0,"Step 1819, loss: 1.536383032798767, step time: 17.309188842773438ms\r\nStep 1820, loss: 1.5194261074066162, step time: 17.18425750732422ms\r\n",,terminal_output +7991,12361419,"TERMINAL",0,0,"Step 1821, loss: 1.55721914768219, step time: 18.067121505737305ms\r\n",,terminal_output +7992,12361483,"TERMINAL",0,0,"Step 1822, loss: 1.5384693145751953, step time: 17.166852951049805ms\r\n",,terminal_output +7993,12361534,"TERMINAL",0,0,"Step 1823, loss: 1.598897099494934, step time: 17.6846981048584ms\r\n",,terminal_output +7994,12361637,"TERMINAL",0,0,"Step 1824, loss: 1.6413578987121582, step time: 17.88949966430664ms\r\nStep 1825, loss: 1.6791713237762451, step time: 17.5321102142334ms\r\n",,terminal_output +7995,12361705,"TERMINAL",0,0,"Step 1826, loss: 1.5628929138183594, step time: 17.1661376953125ms\r\n",,terminal_output +7996,12361756,"TERMINAL",0,0,"Step 1827, loss: 1.5283284187316895, step time: 17.863988876342773ms\r\n",,terminal_output +7997,12361851,"TERMINAL",0,0,"Step 1828, loss: 1.5672310590744019, step time: 17.38262176513672ms\r\n",,terminal_output +7998,12361905,"TERMINAL",0,0,"Step 1829, loss: 1.5439558029174805, step time: 17.248153686523438ms\r\n",,terminal_output +7999,12362001,"TERMINAL",0,0,"Step 1830, loss: 1.6157655715942383, step time: 18.019914627075195ms\r\n",,terminal_output +8000,12362055,"TERMINAL",0,0,"Step 1831, loss: 1.5258779525756836, step time: 17.705917358398438ms\r\n",,terminal_output +8001,12362104,"TERMINAL",0,0,"Step 1832, loss: 1.5356754064559937, step time: 17.20118522644043ms\r\n",,terminal_output +8002,12362211,"TERMINAL",0,0,"Step 1833, loss: 1.5216439962387085, step time: 18.198490142822266ms\r\nStep 1834, loss: 1.6650482416152954, step time: 17.597436904907227ms\r\n",,terminal_output +8003,12362305,"TERMINAL",0,0,"Step 1835, loss: 1.5484883785247803, step time: 17.551660537719727ms\r\n",,terminal_output +8004,12362358,"TERMINAL",0,0,"Step 1836, loss: 1.5510785579681396, step time: 17.932891845703125ms\r\n",,terminal_output +8005,12362467,"TERMINAL",0,0,"Step 1837, loss: 1.753387212753296, step time: 17.6546573638916ms\r\nStep 1838, loss: 1.5561189651489258, step time: 17.36140251159668ms\r\n",,terminal_output +8006,12362530,"TERMINAL",0,0,"Step 1839, loss: 1.5502439737319946, step time: 18.359899520874023ms\r\n",,terminal_output +8007,12362591,"TERMINAL",0,0,"Step 1840, loss: 1.5333263874053955, step time: 17.19379425048828ms\r\n",,terminal_output +8008,12362661,"TERMINAL",0,0,"Step 1841, loss: 2.0848233699798584, step time: 18.944740295410156ms\r\n",,terminal_output +8009,12362718,"TERMINAL",0,0,"Step 1842, loss: 1.5380151271820068, step time: 17.633676528930664ms\r\n",,terminal_output +8010,12362787,"TERMINAL",0,0,"Step 1843, loss: 1.5420966148376465, step time: 17.521381378173828ms\r\n",,terminal_output +8011,12362849,"TERMINAL",0,0,"Step 1844, loss: 1.6034965515136719, step time: 17.166852951049805ms\r\n",,terminal_output +8012,12362911,"TERMINAL",0,0,"Step 1845, loss: 1.5194427967071533, step time: 18.16248893737793ms\r\n",,terminal_output +8013,12362971,"TERMINAL",0,0,"Step 1846, loss: 1.5295288562774658, step time: 17.71712303161621ms\r\n",,terminal_output +8014,12363034,"TERMINAL",0,0,"Step 1847, loss: 1.5711758136749268, step time: 17.482757568359375ms\r\n",,terminal_output +8015,12363098,"TERMINAL",0,0,"Step 1848, loss: 1.5277985334396362, step time: 17.621517181396484ms\r\n",,terminal_output +8016,12363170,"TERMINAL",0,0,"Step 1849, loss: 1.5834687948226929, step time: 17.505884170532227ms\r\n",,terminal_output +8017,12363262,"TERMINAL",0,0,"Step 1850, loss: 1.554459571838379, step time: 17.194509506225586ms\r\n",,terminal_output +8018,12363313,"TERMINAL",0,0,"Step 1851, loss: 1.5027309656143188, step time: 18.223047256469727ms\r\n",,terminal_output +8019,12363419,"TERMINAL",0,0,"Step 1852, loss: 1.497196912765503, step time: 17.55809783935547ms\r\nStep 1853, loss: 1.5427762269973755, step time: 17.45152473449707ms\r\n",,terminal_output +8020,12363540,"TERMINAL",0,0,"Step 1854, loss: 1.53907310962677, step time: 18.00394058227539ms\r\nStep 1855, loss: 1.5714466571807861, step time: 17.77052879333496ms\r\n",,terminal_output +8021,12363654,"TERMINAL",0,0,"Step 1856, loss: 1.491697907447815, step time: 17.47870445251465ms\r\n",,terminal_output +8022,12363669,"TERMINAL",0,0,"Step 1857, loss: 1.523173213005066, step time: 19.02318000793457ms\r\n",,terminal_output +8023,12363741,"TERMINAL",0,0,"Step 1858, loss: 1.5715821981430054, step time: 20.303964614868164ms\r\n",,terminal_output +8024,12363858,"TERMINAL",0,0,"Step 1859, loss: 1.692241907119751, step time: 17.996549606323242ms\r\nStep 1860, loss: 1.5776773691177368, step time: 18.168210983276367ms\r\n",,terminal_output +8025,12363921,"TERMINAL",0,0,"Step 1861, loss: 1.5918896198272705, step time: 17.541170120239258ms\r\n",,terminal_output +8026,12363987,"TERMINAL",0,0,"Step 1862, loss: 1.4796721935272217, step time: 17.43173599243164ms\r\n",,terminal_output +8027,12364049,"TERMINAL",0,0,"Step 1863, loss: 1.5085744857788086, step time: 18.32723617553711ms\r\n",,terminal_output +8028,12364113,"TERMINAL",0,0,"Step 1864, loss: 1.788150668144226, step time: 19.835948944091797ms\r\n",,terminal_output +8029,12364178,"TERMINAL",0,0,"Step 1865, loss: 1.5073814392089844, step time: 17.489910125732422ms\r\n",,terminal_output +8030,12364245,"TERMINAL",0,0,"Step 1866, loss: 1.5067510604858398, step time: 17.7919864654541ms\r\n",,terminal_output +8031,12364306,"TERMINAL",0,0,"Step 1867, loss: 1.5033833980560303, step time: 17.377853393554688ms\r\n",,terminal_output +8032,12364373,"TERMINAL",0,0,"Step 1868, loss: 1.4914923906326294, step time: 17.327070236206055ms\r\n",,terminal_output +8033,12364436,"TERMINAL",0,0,"Step 1869, loss: 1.919798731803894, step time: 18.182039260864258ms\r\n",,terminal_output +8034,12364499,"TERMINAL",0,0,"Step 1870, loss: 1.4957576990127563, step time: 17.458200454711914ms\r\n",,terminal_output +8035,12364561,"TERMINAL",0,0,"Step 1871, loss: 1.513657569885254, step time: 17.259597778320312ms\r\n",,terminal_output +8036,12364624,"TERMINAL",0,0,"Step 1872, loss: 1.4839897155761719, step time: 17.578840255737305ms\r\n",,terminal_output +8037,12364686,"TERMINAL",0,0,"Step 1873, loss: 1.4899462461471558, step time: 17.55213737487793ms\r\n",,terminal_output +8038,12364749,"TERMINAL",0,0,"Step 1874, loss: 1.5029350519180298, step time: 17.271041870117188ms\r\n",,terminal_output +8039,12364810,"TERMINAL",0,0,"Step 1875, loss: 1.4905645847320557, step time: 17.931461334228516ms\r\n",,terminal_output +8040,12364875,"TERMINAL",0,0,"Step 1876, loss: 1.5441457033157349, step time: 17.360448837280273ms\r\n",,terminal_output +8041,12364939,"TERMINAL",0,0,"Step 1877, loss: 1.6399093866348267, step time: 17.308712005615234ms\r\n",,terminal_output +8042,12365040,"TERMINAL",0,0,"Step 1878, loss: 1.5975048542022705, step time: 17.69256591796875ms\r\n",,terminal_output +8043,12365090,"TERMINAL",0,0,"Step 1879, loss: 1.4784303903579712, step time: 17.516136169433594ms\r\n",,terminal_output +8044,12365196,"TERMINAL",0,0,"Step 1880, loss: 1.4982315301895142, step time: 17.095327377319336ms\r\nStep 1881, loss: 1.4819552898406982, step time: 17.856359481811523ms\r\n",,terminal_output +8045,12365259,"TERMINAL",0,0,"Step 1882, loss: 1.4845002889633179, step time: 17.293214797973633ms\r\n",,terminal_output +8046,12365326,"TERMINAL",0,0,"Step 1883, loss: 1.4952373504638672, step time: 17.554759979248047ms\r\n",,terminal_output +8047,12365385,"TERMINAL",0,0,"Step 1884, loss: 1.5138088464736938, step time: 17.680883407592773ms\r\n",,terminal_output +8048,12365453,"TERMINAL",0,0,"Step 1885, loss: 1.4978641271591187, step time: 17.28653907775879ms\r\n",,terminal_output +8049,12365516,"TERMINAL",0,0,"Step 1886, loss: 1.5279021263122559, step time: 17.26818084716797ms\r\n",,terminal_output +8050,12365854,"TERMINAL",0,0,"Step 1887, loss: 1.4849865436553955, step time: 342.35382080078125ms\r\n",,terminal_output +8051,12365920,"TERMINAL",0,0,"Step 1888, loss: 1.496230959892273, step time: 24.820566177368164ms\r\n",,terminal_output +8052,12365985,"TERMINAL",0,0,"Step 1889, loss: 1.5386720895767212, step time: 20.0345516204834ms\r\n",,terminal_output +8053,12366049,"TERMINAL",0,0,"Step 1890, loss: 1.4903309345245361, step time: 18.552303314208984ms\r\n",,terminal_output +8054,12366113,"TERMINAL",0,0,"Step 1891, loss: 1.5312414169311523, step time: 18.319368362426758ms\r\n",,terminal_output +8055,12366177,"TERMINAL",0,0,"Step 1892, loss: 1.4841195344924927, step time: 18.17917823791504ms\r\n",,terminal_output +8056,12366238,"TERMINAL",0,0,"Step 1893, loss: 1.502120018005371, step time: 17.96889305114746ms\r\n",,terminal_output +8057,12366305,"TERMINAL",0,0,"Step 1894, loss: 1.4658994674682617, step time: 17.570018768310547ms\r\n",,terminal_output +8058,12366367,"TERMINAL",0,0,"Step 1895, loss: 1.5805689096450806, step time: 17.76862144470215ms\r\n",,terminal_output +8059,12366431,"TERMINAL",0,0,"Step 1896, loss: 1.487719178199768, step time: 18.155574798583984ms\r\n",,terminal_output +8060,12366497,"TERMINAL",0,0,"Step 1897, loss: 1.4728513956069946, step time: 17.7614688873291ms\r\n",,terminal_output +8061,12366557,"TERMINAL",0,0,"Step 1898, loss: 1.492843747138977, step time: 17.531156539916992ms\r\n",,terminal_output +8062,12366621,"TERMINAL",0,0,"Step 1899, loss: 1.4688465595245361, step time: 18.553972244262695ms\r\n",,terminal_output +8063,12366689,"TERMINAL",0,0,"Step 1900, loss: 1.4996347427368164, step time: 17.385244369506836ms\r\n",,terminal_output +8064,12366752,"TERMINAL",0,0,"Step 1901, loss: 1.4832109212875366, step time: 17.612934112548828ms\r\n",,terminal_output +8065,12366818,"TERMINAL",0,0,"Step 1902, loss: 1.4826816320419312, step time: 18.895864486694336ms\r\n",,terminal_output +8066,12366879,"TERMINAL",0,0,"Step 1903, loss: 1.506264567375183, step time: 19.043445587158203ms\r\n",,terminal_output +8067,12366939,"TERMINAL",0,0,"Step 1904, loss: 1.490215539932251, step time: 17.695188522338867ms\r\n",,terminal_output +8068,12367007,"TERMINAL",0,0,"Step 1905, loss: 1.451426386833191, step time: 18.2797908782959ms\r\n",,terminal_output +8069,12367071,"TERMINAL",0,0,"Step 1906, loss: 1.5195883512496948, step time: 17.706871032714844ms\r\n",,terminal_output +8070,12367164,"TERMINAL",0,0,"Step 1907, loss: 1.4695533514022827, step time: 17.704010009765625ms\r\n",,terminal_output +8071,12367217,"TERMINAL",0,0,"Step 1908, loss: 1.4675277471542358, step time: 18.161296844482422ms\r\n",,terminal_output +8072,12367362,"TERMINAL",0,0,"Step 1909, loss: 1.4652369022369385, step time: 17.75503158569336ms\r\nStep 1910, loss: 1.4655557870864868, step time: 17.56906509399414ms\r\n",,terminal_output +8073,12367414,"TERMINAL",0,0,"Step 1911, loss: 1.8140634298324585, step time: 18.462419509887695ms\r\n",,terminal_output +8074,12367519,"TERMINAL",0,0,"Step 1912, loss: 1.4924696683883667, step time: 19.547700881958008ms\r\nStep 1913, loss: 1.4524556398391724, step time: 18.755435943603516ms\r\n",,terminal_output +8075,12367614,"TERMINAL",0,0,"Step 1914, loss: 1.5886132717132568, step time: 17.899274826049805ms\r\n",,terminal_output +8076,12367667,"TERMINAL",0,0,"Step 1915, loss: 2.064089059829712, step time: 17.35520362854004ms\r\n",,terminal_output +8077,12367772,"TERMINAL",0,0,"Step 1916, loss: 1.4807270765304565, step time: 17.293691635131836ms\r\nStep 1917, loss: 1.4854828119277954, step time: 18.2342529296875ms\r\n",,terminal_output +8078,12367836,"TERMINAL",0,0,"Step 1918, loss: 1.4649797677993774, step time: 17.08984375ms\r\n",,terminal_output +8079,12367895,"TERMINAL",0,0,"Step 1919, loss: 1.4737223386764526, step time: 17.40097999572754ms\r\n",,terminal_output +8080,12367957,"TERMINAL",0,0,"Step 1920, loss: 1.46481454372406, step time: 17.784595489501953ms\r\n",,terminal_output +8081,12368055,"TERMINAL",0,0,"Step 1921, loss: 1.4636774063110352, step time: 17.77791976928711ms\r\n",,terminal_output +8082,12368108,"TERMINAL",0,0,"Step 1922, loss: 1.5627331733703613, step time: 17.03786849975586ms\r\n",,terminal_output +8083,12368227,"TERMINAL",0,0,"Step 1923, loss: 1.5076134204864502, step time: 18.12434196472168ms\r\nStep 1924, loss: 1.5119324922561646, step time: 17.09437370300293ms\r\n",,terminal_output +8084,12368289,"TERMINAL",0,0,"Step 1925, loss: 1.481738805770874, step time: 17.10796356201172ms\r\n",,terminal_output +8085,12368399,"TERMINAL",0,0,"Step 1926, loss: 1.4685466289520264, step time: 17.795085906982422ms\r\nStep 1927, loss: 1.4714292287826538, step time: 17.591476440429688ms\r\n",,terminal_output +8086,12368464,"TERMINAL",0,0,"Step 1928, loss: 1.5515332221984863, step time: 17.034292221069336ms\r\n",,terminal_output +8087,12368530,"TERMINAL",0,0,"Step 1929, loss: 1.4600772857666016, step time: 17.894983291625977ms\r\n",,terminal_output +8088,12368656,"TERMINAL",0,0,"Step 1930, loss: 1.4494091272354126, step time: 17.447710037231445ms\r\nStep 1931, loss: 1.5163853168487549, step time: 17.62843132019043ms\r\n",,terminal_output +8089,12368752,"TERMINAL",0,0,"Step 1932, loss: 1.5732696056365967, step time: 19.202470779418945ms\r\n",,terminal_output +8090,12368816,"TERMINAL",0,0,"Step 1933, loss: 1.5831208229064941, step time: 17.91858673095703ms\r\n",,terminal_output +8091,12368879,"TERMINAL",0,0,"Step 1934, loss: 1.4659162759780884, step time: 17.112255096435547ms\r\n",,terminal_output +8092,12368941,"TERMINAL",0,0,"Step 1935, loss: 1.4928531646728516, step time: 34.48772430419922ms\r\n",,terminal_output +8093,12369005,"TERMINAL",0,0,"Step 1936, loss: 1.4361931085586548, step time: 18.163681030273438ms\r\n",,terminal_output +8094,12369066,"TERMINAL",0,0,"Step 1937, loss: 1.4643415212631226, step time: 17.75646209716797ms\r\n",,terminal_output +8095,12369129,"TERMINAL",0,0,"Step 1938, loss: 1.4784753322601318, step time: 18.057823181152344ms\r\n",,terminal_output +8096,12369195,"TERMINAL",0,0,"Step 1939, loss: 1.475759744644165, step time: 17.760276794433594ms\r\n",,terminal_output +8097,12369257,"TERMINAL",0,0,"Step 1940, loss: 1.4892412424087524, step time: 17.36927032470703ms\r\n",,terminal_output +8098,12369318,"TERMINAL",0,0,"Step 1941, loss: 1.4507009983062744, step time: 18.230438232421875ms\r\n",,terminal_output +8099,12369379,"TERMINAL",0,0,"Step 1942, loss: 1.4469736814498901, step time: 17.4257755279541ms\r\n",,terminal_output +8100,12369441,"TERMINAL",0,0,"Step 1943, loss: 1.4683884382247925, step time: 17.427444458007812ms\r\n",,terminal_output +8101,12369503,"TERMINAL",0,0,"Step 1944, loss: 1.4409863948822021, step time: 17.894983291625977ms\r\n",,terminal_output +8102,12369567,"TERMINAL",0,0,"Step 1945, loss: 1.4420249462127686, step time: 17.647981643676758ms\r\n",,terminal_output +8103,12369630,"TERMINAL",0,0,"Step 1946, loss: 1.4324966669082642, step time: 17.276763916015625ms\r\n",,terminal_output +8104,12369691,"TERMINAL",0,0,"Step 1947, loss: 1.4401315450668335, step time: 18.260717391967773ms\r\n",,terminal_output +8105,12369753,"TERMINAL",0,0,"Step 1948, loss: 1.4439133405685425, step time: 17.170190811157227ms\r\n",,terminal_output +8106,12369809,"TERMINAL",0,0,"Step 1949, loss: 1.5540848970413208, step time: 17.316818237304688ms\r\n",,terminal_output +8107,12369874,"TERMINAL",0,0,"Step 1950, loss: 1.4734536409378052, step time: 18.208026885986328ms\r\n",,terminal_output +8108,12369936,"TERMINAL",0,0,"Step 1951, loss: 1.4263588190078735, step time: 17.759084701538086ms\r\n",,terminal_output +8109,12370001,"TERMINAL",0,0,"Step 1952, loss: 1.414996862411499, step time: 17.277002334594727ms\r\n",,terminal_output +8110,12370063,"TERMINAL",0,0,"Step 1953, loss: 1.4274756908416748, step time: 18.353700637817383ms\r\n",,terminal_output +8111,12370126,"TERMINAL",0,0,"Step 1954, loss: 1.440123200416565, step time: 17.666101455688477ms\r\n",,terminal_output +8112,12370190,"TERMINAL",0,0,"Step 1955, loss: 1.4271228313446045, step time: 17.763376235961914ms\r\n",,terminal_output +8113,12370255,"TERMINAL",0,0,"Step 1956, loss: 1.6510223150253296, step time: 18.027305603027344ms\r\n",,terminal_output +8114,12370348,"TERMINAL",0,0,"Step 1957, loss: 1.4767589569091797, step time: 17.97652244567871ms\r\n",,terminal_output +8115,12370400,"TERMINAL",0,0,"Step 1958, loss: 1.4334162473678589, step time: 18.02349090576172ms\r\n",,terminal_output +8116,12370494,"TERMINAL",0,0,"Step 1959, loss: 1.4084385633468628, step time: 18.328189849853516ms\r\n",,terminal_output +8117,12370547,"TERMINAL",0,0,"Step 1960, loss: 1.4058837890625, step time: 17.528533935546875ms\r\n",,terminal_output +8118,12370600,"TERMINAL",0,0,"Step 1961, loss: 2.041776180267334, step time: 17.586708068847656ms\r\n",,terminal_output +8119,12370707,"TERMINAL",0,0,"Step 1962, loss: 1.5251585245132446, step time: 17.57359504699707ms\r\nStep 1963, loss: 1.564255714416504, step time: 17.447471618652344ms\r\n",,terminal_output +8120,12370760,"TERMINAL",0,0,"Step 1964, loss: 1.4404891729354858, step time: 17.232418060302734ms\r\n",,terminal_output +8121,12370855,"TERMINAL",0,0,"Step 1965, loss: 1.4282399415969849, step time: 18.02802085876465ms\r\n",,terminal_output +8122,12370918,"TERMINAL",0,0,"Step 1966, loss: 1.4217404127120972, step time: 17.382383346557617ms\r\n",,terminal_output +8123,12370981,"TERMINAL",0,0,"Step 1967, loss: 1.4316695928573608, step time: 17.566919326782227ms\r\n",,terminal_output +8124,12371045,"TERMINAL",0,0,"Step 1968, loss: 1.4328207969665527, step time: 17.870664596557617ms\r\n",,terminal_output +8125,12371107,"TERMINAL",0,0,"Step 1969, loss: 1.4427423477172852, step time: 21.69060707092285ms\r\n",,terminal_output +8126,12371171,"TERMINAL",0,0,"Step 1970, loss: 1.454306960105896, step time: 20.38264274597168ms\r\n",,terminal_output +8127,12371234,"TERMINAL",0,0,"Step 1971, loss: 1.4224461317062378, step time: 19.665002822875977ms\r\n",,terminal_output +8128,12371296,"TERMINAL",0,0,"Step 1972, loss: 1.4194989204406738, step time: 18.056154251098633ms\r\n",,terminal_output +8129,12371357,"TERMINAL",0,0,"Step 1973, loss: 1.4116066694259644, step time: 17.79794692993164ms\r\n",,terminal_output +8130,12371464,"TERMINAL",0,0,"Step 1974, loss: 1.4423601627349854, step time: 18.09096336364746ms\r\nStep 1975, loss: 2.1888434886932373, step time: 17.776966094970703ms\r\n",,terminal_output +8131,12371529,"TERMINAL",0,0,"Step 1976, loss: 1.4279690980911255, step time: 17.479419708251953ms\r\n",,terminal_output +8132,12371593,"TERMINAL",0,0,"Step 1977, loss: 2.355529308319092, step time: 18.303394317626953ms\r\n",,terminal_output +8133,12371657,"TERMINAL",0,0,"Step 1978, loss: 1.5029253959655762, step time: 17.63463020324707ms\r\n",,terminal_output +8134,12371722,"TERMINAL",0,0,"Step 1979, loss: 1.4400568008422852, step time: 17.559289932250977ms\r\n",,terminal_output +8135,12371778,"TERMINAL",0,0,"Step 1980, loss: 1.5181784629821777, step time: 17.726898193359375ms\r\n",,terminal_output +8136,12371858,"TERMINAL",0,0,"Step 1981, loss: 1.568602442741394, step time: 17.9903507232666ms\r\n",,terminal_output +8137,12371921,"TERMINAL",0,0,"Step 1982, loss: 1.4734688997268677, step time: 17.20428466796875ms\r\n",,terminal_output +8138,12372067,"TERMINAL",0,0,"Step 1983, loss: 1.4172061681747437, step time: 18.369674682617188ms\r\nStep 1984, loss: 1.4169846773147583, step time: 19.44446563720703ms\r\n",,terminal_output +8139,12372120,"TERMINAL",0,0,"Step 1985, loss: 1.4850045442581177, step time: 18.43118667602539ms\r\n",,terminal_output +8140,12372226,"TERMINAL",0,0,"Step 1986, loss: 1.423357367515564, step time: 18.16272735595703ms\r\nStep 1987, loss: 1.4467922449111938, step time: 17.992258071899414ms\r\n",,terminal_output +8141,12372321,"TERMINAL",0,0,"Step 1988, loss: 1.511229157447815, step time: 17.515182495117188ms\r\n",,terminal_output +8142,12372375,"TERMINAL",0,0,"Step 1989, loss: 1.4301002025604248, step time: 18.2340145111084ms\r\n",,terminal_output +8143,12372480,"TERMINAL",0,0,"Step 1990, loss: 1.4033973217010498, step time: 17.669200897216797ms\r\nStep 1991, loss: 1.4388223886489868, step time: 18.673181533813477ms\r\n",,terminal_output +8144,12372575,"TERMINAL",0,0,"Step 1992, loss: 1.4063067436218262, step time: 17.999887466430664ms\r\n",,terminal_output +8145,12372628,"TERMINAL",0,0,"Step 1993, loss: 1.4160934686660767, step time: 18.026351928710938ms\r\n",,terminal_output +8146,12372733,"TERMINAL",0,0,"Step 1994, loss: 1.4460731744766235, step time: 17.553091049194336ms\r\nStep 1995, loss: 1.404103398323059, step time: 17.919063568115234ms\r\n",,terminal_output +8147,12372800,"TERMINAL",0,0,"Step 1996, loss: 1.6897952556610107, step time: 17.505645751953125ms\r\n",,terminal_output +8148,12372863,"TERMINAL",0,0,"Step 1997, loss: 1.4324110746383667, step time: 17.7152156829834ms\r\n",,terminal_output +8149,12372926,"TERMINAL",0,0,"Step 1998, loss: 1.4146891832351685, step time: 17.741680145263672ms\r\n",,terminal_output +8150,12372990,"TERMINAL",0,0,"Step 1999, loss: 1.4325754642486572, step time: 18.078327178955078ms\r\n",,terminal_output +8151,12375721,"TERMINAL",0,0,"Step 2000, loss: 1.9347426891326904, step time: 24.281978607177734ms\r\n",,terminal_output +8152,12375780,"TERMINAL",0,0,"Step 2001, loss: 1.4881246089935303, step time: 25.57826042175293ms\r\n",,terminal_output +8153,12375887,"TERMINAL",0,0,"Step 2002, loss: 1.4223697185516357, step time: 20.015716552734375ms\r\n",,terminal_output +8154,12375938,"TERMINAL",0,0,"Step 2003, loss: 1.635452151298523, step time: 19.55890655517578ms\r\n",,terminal_output +8155,12376045,"TERMINAL",0,0,"Step 2004, loss: 1.409161925315857, step time: 18.572092056274414ms\r\nStep 2005, loss: 1.401513934135437, step time: 18.74995231628418ms\r\n",,terminal_output +8156,12376103,"TERMINAL",0,0,"Step 2006, loss: 1.4116650819778442, step time: 18.61429214477539ms\r\n",,terminal_output +8157,12376209,"TERMINAL",0,0,"Step 2007, loss: 1.4039868116378784, step time: 18.779993057250977ms\r\n",,terminal_output +8158,12376261,"TERMINAL",0,0,"Step 2008, loss: 1.4992667436599731, step time: 18.543720245361328ms\r\n",,terminal_output +8159,12376367,"TERMINAL",0,0,"Step 2009, loss: 1.3967500925064087, step time: 18.387317657470703ms\r\nStep 2010, loss: 1.3853280544281006, step time: 18.434762954711914ms\r\n",,terminal_output +8160,12376459,"TERMINAL",0,0,"Step 2011, loss: 1.3962293863296509, step time: 17.908334732055664ms\r\n",,terminal_output +8161,12376511,"TERMINAL",0,0,"Step 2012, loss: 1.417504072189331, step time: 17.286300659179688ms\r\n",,terminal_output +8162,12376615,"TERMINAL",0,0,"Step 2013, loss: 1.4466230869293213, step time: 17.970800399780273ms\r\nStep 2014, loss: 1.4486538171768188, step time: 18.854141235351562ms\r\n",,terminal_output +8163,12376741,"TERMINAL",0,0,"Step 2015, loss: 1.4017096757888794, step time: 17.950057983398438ms\r\nStep 2016, loss: 1.3725686073303223, step time: 17.506837844848633ms\r\n",,terminal_output +8164,12376805,"TERMINAL",0,0,"Step 2017, loss: 1.506191372871399, step time: 18.238544464111328ms\r\n",,terminal_output +8165,12376869,"TERMINAL",0,0,"Step 2018, loss: 1.4362927675247192, step time: 18.49508285522461ms\r\n",,terminal_output +8166,12376936,"TERMINAL",0,0,"Step 2019, loss: 1.3877149820327759, step time: 18.326997756958008ms\r\n",,terminal_output +8167,12376999,"TERMINAL",0,0,"Step 2020, loss: 1.382449984550476, step time: 17.56143569946289ms\r\n",,terminal_output +8168,12377059,"TERMINAL",0,0,"Step 2021, loss: 1.384202480316162, step time: 17.38762855529785ms\r\n",,terminal_output +8169,12377120,"TERMINAL",0,0,"Step 2022, loss: 1.4663559198379517, step time: 17.65727996826172ms\r\n",,terminal_output +8170,12377182,"TERMINAL",0,0,"Step 2023, loss: 1.42094087600708, step time: 17.65275001525879ms\r\n",,terminal_output +8171,12377246,"TERMINAL",0,0,"Step 2024, loss: 1.5613353252410889, step time: 17.259836196899414ms\r\n",,terminal_output +8172,12377308,"TERMINAL",0,0,"Step 2025, loss: 1.7986886501312256, step time: 17.876148223876953ms\r\n",,terminal_output +8173,12377371,"TERMINAL",0,0,"Step 2026, loss: 1.379317283630371, step time: 17.317771911621094ms\r\n",,terminal_output +8174,12377436,"TERMINAL",0,0,"Step 2027, loss: 1.614650845527649, step time: 17.356395721435547ms\r\n",,terminal_output +8175,12377500,"TERMINAL",0,0,"Step 2028, loss: 1.564086675643921, step time: 17.358779907226562ms\r\n",,terminal_output +8176,12377564,"TERMINAL",0,0,"Step 2029, loss: 1.4069825410842896, step time: 17.421245574951172ms\r\n",,terminal_output +8177,12377630,"TERMINAL",0,0,"Step 2030, loss: 1.3947027921676636, step time: 17.011165618896484ms\r\n",,terminal_output +8178,12377694,"TERMINAL",0,0,"Step 2031, loss: 1.5217363834381104, step time: 17.729997634887695ms\r\n",,terminal_output +8179,12377759,"TERMINAL",0,0,"Step 2032, loss: 1.4200433492660522, step time: 17.021894454956055ms\r\n",,terminal_output +8180,12377821,"TERMINAL",0,0,"Step 2033, loss: 1.4245702028274536, step time: 17.220735549926758ms\r\n",,terminal_output +8181,12377884,"TERMINAL",0,0,"Step 2034, loss: 1.4410978555679321, step time: 17.36140251159668ms\r\n",,terminal_output +8182,12377978,"TERMINAL",0,0,"Step 2035, loss: 1.4227015972137451, step time: 17.298221588134766ms\r\n",,terminal_output +8183,12378030,"TERMINAL",0,0,"Step 2036, loss: 1.4424266815185547, step time: 17.077207565307617ms\r\n",,terminal_output +8184,12378135,"TERMINAL",0,0,"Step 2037, loss: 1.6221531629562378, step time: 17.762422561645508ms\r\nStep 2038, loss: 1.390914797782898, step time: 17.369747161865234ms\r\n",,terminal_output +8185,12378212,"TERMINAL",0,0,"Step 2039, loss: 1.6308996677398682, step time: 17.22097396850586ms\r\n",,terminal_output +8186,12378259,"TERMINAL",0,0,"Step 2040, loss: 1.3934075832366943, step time: 17.38572120666504ms\r\n",,terminal_output +8187,12378366,"TERMINAL",0,0,"Step 2041, loss: 1.394445538520813, step time: 17.48824119567871ms\r\n",,terminal_output +8188,12378426,"TERMINAL",0,0,"Step 2042, loss: 1.4245688915252686, step time: 17.218351364135742ms\r\n",,terminal_output +8189,12378487,"TERMINAL",0,0,"Step 2043, loss: 1.4100770950317383, step time: 17.64655113220215ms\r\n",,terminal_output +8190,12378550,"TERMINAL",0,0,"Step 2044, loss: 1.3884227275848389, step time: 17.276287078857422ms\r\n",,terminal_output +8191,12378650,"TERMINAL",0,0,"Step 2045, loss: 1.4004515409469604, step time: 17.30799674987793ms\r\nStep 2046, loss: 1.3912746906280518, step time: 17.573833465576172ms\r\n",,terminal_output +8192,12378771,"TERMINAL",0,0,"Step 2047, loss: 1.3706382513046265, step time: 17.59958267211914ms\r\nStep 2048, loss: 1.3917722702026367, step time: 18.31197738647461ms\r\n",,terminal_output +8193,12378899,"TERMINAL",0,0,"Step 2049, loss: 1.36775803565979, step time: 17.798900604248047ms\r\nStep 2050, loss: 1.4041979312896729, step time: 17.374753952026367ms\r\n",,terminal_output +8194,12378963,"TERMINAL",0,0,"Step 2051, loss: 1.3725560903549194, step time: 17.17853546142578ms\r\n",,terminal_output +8195,12379026,"TERMINAL",0,0,"Step 2052, loss: 1.3733853101730347, step time: 17.621517181396484ms\r\n",,terminal_output +8196,12379090,"TERMINAL",0,0,"Step 2053, loss: 1.5756021738052368, step time: 17.518997192382812ms\r\n",,terminal_output +8197,12379153,"TERMINAL",0,0,"Step 2054, loss: 1.4362300634384155, step time: 17.14348793029785ms\r\n",,terminal_output +8198,12379217,"TERMINAL",0,0,"Step 2055, loss: 1.3685475587844849, step time: 18.772125244140625ms\r\n",,terminal_output +8199,12379281,"TERMINAL",0,0,"Step 2056, loss: 1.3757803440093994, step time: 18.561124801635742ms\r\n",,terminal_output +8200,12379343,"TERMINAL",0,0,"Step 2057, loss: 1.4018120765686035, step time: 18.71657371520996ms\r\n",,terminal_output +8201,12379407,"TERMINAL",0,0,"Step 2058, loss: 1.3472157716751099, step time: 18.232107162475586ms\r\n",,terminal_output +8202,12379500,"TERMINAL",0,0,"Step 2059, loss: 1.3616851568222046, step time: 18.262386322021484ms\r\n",,terminal_output +8203,12379551,"TERMINAL",0,0,"Step 2060, loss: 1.3445459604263306, step time: 17.420053482055664ms\r\n",,terminal_output +8204,12379643,"TERMINAL",0,0,"Step 2061, loss: 1.368888020515442, step time: 17.676353454589844ms\r\n",,terminal_output +8205,12379752,"TERMINAL",0,0,"Step 2062, loss: 1.46315336227417, step time: 18.123626708984375ms\r\nStep 2063, loss: 1.3472648859024048, step time: 17.281055450439453ms\r\n",,terminal_output +8206,12379816,"TERMINAL",0,0,"Step 2064, loss: 1.358643651008606, step time: 17.51399040222168ms\r\n",,terminal_output +8207,12379880,"TERMINAL",0,0,"Step 2065, loss: 1.4755635261535645, step time: 17.291784286499023ms\r\n",,terminal_output +8208,12379947,"TERMINAL",0,0,"Step 2066, loss: 1.4673151969909668, step time: 18.2340145111084ms\r\n",,terminal_output +8209,12380007,"TERMINAL",0,0,"Step 2067, loss: 1.8429090976715088, step time: 19.33741569519043ms\r\n",,terminal_output +8210,12380069,"TERMINAL",0,0,"Step 2068, loss: 1.3803316354751587, step time: 17.67134666442871ms\r\n",,terminal_output +8211,12380130,"TERMINAL",0,0,"Step 2069, loss: 1.3792859315872192, step time: 17.467975616455078ms\r\n",,terminal_output +8212,12380192,"TERMINAL",0,0,"Step 2070, loss: 1.3473632335662842, step time: 18.66292953491211ms\r\n",,terminal_output +8213,12380252,"TERMINAL",0,0,"Step 2071, loss: 1.4709174633026123, step time: 17.77195930480957ms\r\n",,terminal_output +8214,12380313,"TERMINAL",0,0,"Step 2072, loss: 2.127793788909912, step time: 17.414093017578125ms\r\n",,terminal_output +8215,12380387,"TERMINAL",0,0,"Step 2073, loss: 1.3793838024139404, step time: 17.765522003173828ms\r\n",,terminal_output +8216,12380440,"TERMINAL",0,0,"Step 2074, loss: 1.3558303117752075, step time: 17.414569854736328ms\r\n",,terminal_output +8217,12380505,"TERMINAL",0,0,"Step 2075, loss: 1.3953198194503784, step time: 17.247676849365234ms\r\n",,terminal_output +8218,12380570,"TERMINAL",0,0,"Step 2076, loss: 1.357901692390442, step time: 17.596721649169922ms\r\n",,terminal_output +8219,12380661,"TERMINAL",0,0,"Step 2077, loss: 1.3736985921859741, step time: 17.393112182617188ms\r\n",,terminal_output +8220,12380713,"TERMINAL",0,0,"Step 2078, loss: 1.434958815574646, step time: 17.446517944335938ms\r\n",,terminal_output +8221,12380765,"TERMINAL",0,0,"Step 2079, loss: 1.366053581237793, step time: 18.036842346191406ms\r\n",,terminal_output +8222,12380909,"TERMINAL",0,0,"Step 2080, loss: 2.2763421535491943, step time: 17.729520797729492ms\r\nStep 2081, loss: 1.3398107290267944, step time: 17.464876174926758ms\r\n",,terminal_output +8223,12380960,"TERMINAL",0,0,"Step 2082, loss: 1.353331446647644, step time: 17.637252807617188ms\r\n",,terminal_output +8224,12381063,"TERMINAL",0,0,"Step 2083, loss: 1.445576548576355, step time: 17.683029174804688ms\r\nStep 2084, loss: 1.386947512626648, step time: 17.43316650390625ms\r\n",,terminal_output +8225,12381130,"TERMINAL",0,0,"Step 2085, loss: 1.349325180053711, step time: 17.592906951904297ms\r\n",,terminal_output +8226,12381191,"TERMINAL",0,0,"Step 2086, loss: 1.3688714504241943, step time: 17.17972755432129ms\r\n",,terminal_output +8227,12381285,"TERMINAL",0,0,"Step 2087, loss: 1.3506358861923218, step time: 17.202138900756836ms\r\n",,terminal_output +8228,12381348,"TERMINAL",0,0,"Step 2088, loss: 1.3564165830612183, step time: 17.975568771362305ms\r\n",,terminal_output +8229,12381410,"TERMINAL",0,0,"Step 2089, loss: 1.4115875959396362, step time: 17.69256591796875ms\r\n",,terminal_output +8230,12381474,"TERMINAL",0,0,"Step 2090, loss: 1.375662922859192, step time: 17.451763153076172ms\r\n",,terminal_output +8231,12381537,"TERMINAL",0,0,"Step 2091, loss: 1.8358486890792847, step time: 21.16107940673828ms\r\n",,terminal_output +8232,12381603,"TERMINAL",0,0,"Step 2092, loss: 1.3883817195892334, step time: 17.530202865600586ms\r\n",,terminal_output +8233,12381672,"TERMINAL",0,0,"Step 2093, loss: 1.3788508176803589, step time: 19.690752029418945ms\r\n",,terminal_output +8234,12381730,"TERMINAL",0,0,"Step 2094, loss: 1.3519243001937866, step time: 23.465871810913086ms\r\n",,terminal_output +8235,12381801,"TERMINAL",0,0,"Step 2095, loss: 1.3359383344650269, step time: 25.36177635192871ms\r\n",,terminal_output +8236,12381861,"TERMINAL",0,0,"Step 2096, loss: 1.3759132623672485, step time: 24.57451820373535ms\r\n",,terminal_output +8237,12382216,"TERMINAL",0,0,"Step 2097, loss: 1.3529696464538574, step time: 362.05244064331055ms\r\n",,terminal_output +8238,12382279,"TERMINAL",0,0,"Step 2098, loss: 1.3505198955535889, step time: 24.376392364501953ms\r\n",,terminal_output +8239,12382345,"TERMINAL",0,0,"Step 2099, loss: 1.3821003437042236, step time: 19.525527954101562ms\r\n",,terminal_output +8240,12382409,"TERMINAL",0,0,"Step 2100, loss: 1.3512673377990723, step time: 18.812894821166992ms\r\n",,terminal_output +8241,12382471,"TERMINAL",0,0,"Step 2101, loss: 1.33487069606781, step time: 17.67706871032715ms\r\n",,terminal_output +8242,12382534,"TERMINAL",0,0,"Step 2102, loss: 1.3833295106887817, step time: 17.318248748779297ms\r\n",,terminal_output +8243,12382627,"TERMINAL",0,0,"Step 2103, loss: 1.333299160003662, step time: 17.809391021728516ms\r\n",,terminal_output +8244,12382679,"TERMINAL",0,0,"Step 2104, loss: 1.3355439901351929, step time: 17.40431785583496ms\r\n",,terminal_output +8245,12382784,"TERMINAL",0,0,"Step 2105, loss: 1.3558868169784546, step time: 17.278671264648438ms\r\nStep 2106, loss: 1.352160096168518, step time: 17.7152156829834ms\r\n",,terminal_output +8246,12382850,"TERMINAL",0,0,"Step 2107, loss: 1.329958200454712, step time: 17.49396324157715ms\r\n",,terminal_output +8247,12382910,"TERMINAL",0,0,"Step 2108, loss: 1.3627852201461792, step time: 17.222166061401367ms\r\n",,terminal_output +8248,12382973,"TERMINAL",0,0,"Step 2109, loss: 1.6085628271102905, step time: 17.750024795532227ms\r\n",,terminal_output +8249,12383040,"TERMINAL",0,0,"Step 2110, loss: 1.34433114528656, step time: 17.55499839782715ms\r\n",,terminal_output +8250,12383098,"TERMINAL",0,0,"Step 2111, loss: 1.326938271522522, step time: 17.714738845825195ms\r\n",,terminal_output +8251,12383165,"TERMINAL",0,0,"Step 2112, loss: 1.3902496099472046, step time: 17.838001251220703ms\r\n",,terminal_output +8252,12383225,"TERMINAL",0,0,"Step 2113, loss: 1.407383680343628, step time: 17.679691314697266ms\r\n",,terminal_output +8253,12383289,"TERMINAL",0,0,"Step 2114, loss: 1.3315627574920654, step time: 17.45462417602539ms\r\n",,terminal_output +8254,12383391,"TERMINAL",0,0,"Step 2115, loss: 1.3133419752120972, step time: 17.826318740844727ms\r\n",,terminal_output +8255,12383440,"TERMINAL",0,0,"Step 2116, loss: 1.341168761253357, step time: 17.570018768310547ms\r\n",,terminal_output +8256,12383545,"TERMINAL",0,0,"Step 2117, loss: 1.3131951093673706, step time: 17.584800720214844ms\r\nStep 2118, loss: 1.742846965789795, step time: 17.81749725341797ms\r\n",,terminal_output +8257,12383690,"TERMINAL",0,0,"Step 2119, loss: 1.2990200519561768, step time: 17.81749725341797ms\r\nStep 2120, loss: 1.3079875707626343, step time: 17.505168914794922ms\r\n",,terminal_output +8258,12383761,"TERMINAL",0,0,"Step 2121, loss: 1.9442187547683716, step time: 18.77903938293457ms\r\n",,terminal_output +8259,12383797,"TERMINAL",0,0,"Step 2122, loss: 1.3261618614196777, step time: 18.339872360229492ms\r\n",,terminal_output +8260,12383891,"TERMINAL",0,0,"Step 2123, loss: 1.32099449634552, step time: 17.744779586791992ms\r\n",,terminal_output +8261,12383998,"TERMINAL",0,0,"Step 2124, loss: 1.6603190898895264, step time: 18.215417861938477ms\r\nStep 2125, loss: 1.323807716369629, step time: 18.189191818237305ms\r\n",,terminal_output +8262,12384060,"TERMINAL",0,0,"Step 2126, loss: 1.455655813217163, step time: 17.563581466674805ms\r\n",,terminal_output +8263,12384121,"TERMINAL",0,0,"Step 2127, loss: 1.336948275566101, step time: 17.925262451171875ms\r\n",,terminal_output +8264,12384181,"TERMINAL",0,0,"Step 2128, loss: 1.3229713439941406, step time: 17.45748519897461ms\r\n",,terminal_output +8265,12384245,"TERMINAL",0,0,"Step 2129, loss: 1.3381481170654297, step time: 17.528057098388672ms\r\n",,terminal_output +8266,12384343,"TERMINAL",0,0,"Step 2130, loss: 1.3449316024780273, step time: 18.319368362426758ms\r\n",,terminal_output +8267,12384394,"TERMINAL",0,0,"Step 2131, loss: 1.4001061916351318, step time: 17.7152156829834ms\r\n",,terminal_output +8268,12384497,"TERMINAL",0,0,"Step 2132, loss: 1.3632599115371704, step time: 17.241239547729492ms\r\nStep 2133, loss: 1.342673897743225, step time: 18.042564392089844ms\r\n",,terminal_output +8269,12384595,"TERMINAL",0,0,"Step 2134, loss: 1.3244706392288208, step time: 17.90142059326172ms\r\n",,terminal_output +8270,12384650,"TERMINAL",0,0,"Step 2135, loss: 1.3071680068969727, step time: 17.36760139465332ms\r\n",,terminal_output +8271,12384756,"TERMINAL",0,0,"Step 2136, loss: 1.3092284202575684, step time: 17.757177352905273ms\r\nStep 2137, loss: 1.3472942113876343, step time: 17.51565933227539ms\r\n",,terminal_output +8272,12384824,"TERMINAL",0,0,"Step 2138, loss: 1.3076133728027344, step time: 17.318248748779297ms\r\n",,terminal_output +8273,12384882,"TERMINAL",0,0,"Step 2139, loss: 1.8506717681884766, step time: 17.956972122192383ms\r\n",,terminal_output +8274,12384976,"TERMINAL",0,0,"Step 2140, loss: 1.301252007484436, step time: 17.456531524658203ms\r\n",,terminal_output +8275,12385084,"TERMINAL",0,0,"Step 2141, loss: 1.3079731464385986, step time: 17.875194549560547ms\r\nStep 2142, loss: 1.4423015117645264, step time: 17.862796783447266ms\r\n",,terminal_output +8276,12385146,"TERMINAL",0,0,"Step 2143, loss: 1.5375202894210815, step time: 17.5168514251709ms\r\n",,terminal_output +8277,12385208,"TERMINAL",0,0,"Step 2144, loss: 1.5231400728225708, step time: 17.546653747558594ms\r\n",,terminal_output +8278,12385271,"TERMINAL",0,0,"Step 2145, loss: 1.3197027444839478, step time: 17.459869384765625ms\r\n",,terminal_output +8279,12385362,"TERMINAL",0,0,"Step 2146, loss: 1.2936830520629883, step time: 17.491817474365234ms\r\n",,terminal_output +8280,12385416,"TERMINAL",0,0,"Step 2147, loss: 1.4982212781906128, step time: 17.488718032836914ms\r\n",,terminal_output +8281,12385522,"TERMINAL",0,0,"Step 2148, loss: 1.3619977235794067, step time: 17.8372859954834ms\r\nStep 2149, loss: 1.3347082138061523, step time: 17.48061180114746ms\r\n",,terminal_output +8282,12385585,"TERMINAL",0,0,"Step 2150, loss: 1.3185303211212158, step time: 19.86241340637207ms\r\n",,terminal_output +8283,12385649,"TERMINAL",0,0,"Step 2151, loss: 1.4304150342941284, step time: 18.327951431274414ms\r\n",,terminal_output +8284,12385717,"TERMINAL",0,0,"Step 2152, loss: 1.6233129501342773, step time: 17.724037170410156ms\r\n",,terminal_output +8285,12385780,"TERMINAL",0,0,"Step 2153, loss: 1.2977560758590698, step time: 17.775774002075195ms\r\n",,terminal_output +8286,12385839,"TERMINAL",0,0,"Step 2154, loss: 1.517107367515564, step time: 17.98272132873535ms\r\n",,terminal_output +8287,12385937,"TERMINAL",0,0,"Step 2155, loss: 1.3219847679138184, step time: 17.29273796081543ms\r\n",,terminal_output +8288,12385995,"TERMINAL",0,0,"Step 2156, loss: 1.5410878658294678, step time: 17.367839813232422ms\r\n",,terminal_output +8289,12386057,"TERMINAL",0,0,"Step 2157, loss: 1.3142192363739014, step time: 18.355846405029297ms\r\n",,terminal_output +8290,12386117,"TERMINAL",0,0,"Step 2158, loss: 1.3269249200820923, step time: 17.44246482849121ms\r\n",,terminal_output +8291,12386180,"TERMINAL",0,0,"Step 2159, loss: 1.3036928176879883, step time: 17.365217208862305ms\r\n",,terminal_output +8292,12386239,"TERMINAL",0,0,"Step 2160, loss: 1.3015248775482178, step time: 18.02682876586914ms\r\n",,terminal_output +8293,12386333,"TERMINAL",0,0,"Step 2161, loss: 1.3115371465682983, step time: 18.423080444335938ms\r\n",,terminal_output +8294,12386456,"TERMINAL",0,0,"Step 2162, loss: 1.290556788444519, step time: 17.332792282104492ms\r\n",,terminal_output +8295,12386568,"TERMINAL",0,0,"Step 2163, loss: 1.4620827436447144, step time: 17.971038818359375ms\r\nStep 2164, loss: 1.353301763534546, step time: 17.278432846069336ms\r\nStep 2165, loss: 1.3128564357757568, step time: 17.509937286376953ms\r\n",,terminal_output +8296,12386625,"TERMINAL",0,0,"Step 2166, loss: 1.3351083993911743, step time: 18.22662353515625ms\r\n",,terminal_output +8297,12386743,"TERMINAL",0,0,"Step 2167, loss: 1.3155544996261597, step time: 17.510652542114258ms\r\nStep 2168, loss: 1.51944100856781, step time: 17.27461814880371ms\r\n",,terminal_output +8298,12386805,"TERMINAL",0,0,"Step 2169, loss: 1.3111653327941895, step time: 24.454355239868164ms\r\n",,terminal_output +8299,12386873,"TERMINAL",0,0,"Step 2170, loss: 1.3036388158798218, step time: 18.215179443359375ms\r\n",,terminal_output +8300,12386934,"TERMINAL",0,0,"Step 2171, loss: 1.328001856803894, step time: 17.555713653564453ms\r\n",,terminal_output +8301,12387000,"TERMINAL",0,0,"Step 2172, loss: 1.2876102924346924, step time: 17.94123649597168ms\r\n",,terminal_output +8302,12387093,"TERMINAL",0,0,"Step 2173, loss: 1.3416578769683838, step time: 17.39501953125ms\r\n",,terminal_output +8303,12387146,"TERMINAL",0,0,"Step 2174, loss: 1.2870275974273682, step time: 30.101537704467773ms\r\n",,terminal_output +8304,12387241,"TERMINAL",0,0,"Step 2175, loss: 1.297039270401001, step time: 17.962217330932617ms\r\n",,terminal_output +8305,12387304,"TERMINAL",0,0,"Step 2176, loss: 1.2822831869125366, step time: 17.582416534423828ms\r\n",,terminal_output +8306,12387364,"TERMINAL",0,0,"Step 2177, loss: 1.3042446374893188, step time: 17.79341697692871ms\r\n",,terminal_output +8307,12387425,"TERMINAL",0,0,"Step 2178, loss: 1.429888129234314, step time: 17.72308349609375ms\r\n",,terminal_output +8308,12387488,"TERMINAL",0,0,"Step 2179, loss: 1.301772952079773, step time: 17.531394958496094ms\r\n",,terminal_output +8309,12387549,"TERMINAL",0,0,"Step 2180, loss: 1.4567382335662842, step time: 18.339157104492188ms\r\n",,terminal_output +8310,12387611,"TERMINAL",0,0,"Step 2181, loss: 1.2831703424453735, step time: 18.285512924194336ms\r\n",,terminal_output +8311,12387716,"TERMINAL",0,0,"Step 2182, loss: 1.3026692867279053, step time: 17.84539222717285ms\r\nStep 2183, loss: 1.2733092308044434, step time: 17.571687698364258ms\r\n",,terminal_output +8312,12387779,"TERMINAL",0,0,"Step 2184, loss: 1.281022548675537, step time: 18.074989318847656ms\r\n",,terminal_output +8313,12387839,"TERMINAL",0,0,"Step 2185, loss: 1.2883113622665405, step time: 17.5168514251709ms\r\n",,terminal_output +8314,12387901,"TERMINAL",0,0,"Step 2186, loss: 1.2844533920288086, step time: 17.392396926879883ms\r\n",,terminal_output +8315,12387962,"TERMINAL",0,0,"Step 2187, loss: 1.372239589691162, step time: 17.989635467529297ms\r\n",,terminal_output +8316,12388026,"TERMINAL",0,0,"Step 2188, loss: 1.3193578720092773, step time: 17.715930938720703ms\r\n",,terminal_output +8317,12388092,"TERMINAL",0,0,"Step 2189, loss: 1.340633511543274, step time: 17.53687858581543ms\r\n",,terminal_output +8318,12388154,"TERMINAL",0,0,"Step 2190, loss: 1.2719860076904297, step time: 17.984390258789062ms\r\n",,terminal_output +8319,12388249,"TERMINAL",0,0,"Step 2191, loss: 1.2802131175994873, step time: 17.615079879760742ms\r\n",,terminal_output +8320,12388301,"TERMINAL",0,0,"Step 2192, loss: 1.3421032428741455, step time: 17.4105167388916ms\r\n",,terminal_output +8321,12388393,"TERMINAL",0,0,"Step 2193, loss: 1.2525041103363037, step time: 18.329858779907227ms\r\n",,terminal_output +8322,12388447,"TERMINAL",0,0,"Step 2194, loss: 1.5286821126937866, step time: 17.821311950683594ms\r\n",,terminal_output +8323,12388499,"TERMINAL",0,0,"Step 2195, loss: 1.6235100030899048, step time: 17.303466796875ms\r\n",,terminal_output +8324,12388650,"TERMINAL",0,0,"Step 2196, loss: 1.2608312368392944, step time: 17.650842666625977ms\r\nStep 2197, loss: 1.3587902784347534, step time: 17.424821853637695ms\r\n",,terminal_output +8325,12388665,"TERMINAL",0,0,"Step 2198, loss: 1.2609484195709229, step time: 17.456769943237305ms\r\n",,terminal_output +8326,12388731,"TERMINAL",0,0,"Step 2199, loss: 1.2631640434265137, step time: 17.737388610839844ms\r\n",,terminal_output +8327,12388797,"TERMINAL",0,0,"Step 2200, loss: 1.2817645072937012, step time: 19.2415714263916ms\r\n",,terminal_output +8328,12388885,"TERMINAL",0,0,"Step 2201, loss: 1.2725353240966797, step time: 24.98173713684082ms\r\n",,terminal_output +8329,12388937,"TERMINAL",0,0,"Step 2202, loss: 1.4690771102905273, step time: 19.294023513793945ms\r\n",,terminal_output +8330,12389082,"TERMINAL",0,0,"Step 2203, loss: 1.2735775709152222, step time: 17.87567138671875ms\r\nStep 2204, loss: 1.284706711769104, step time: 17.739295959472656ms\r\n",,terminal_output +8331,12389135,"TERMINAL",0,0,"Step 2205, loss: 1.2684911489486694, step time: 18.050193786621094ms\r\n",,terminal_output +8332,12389241,"TERMINAL",0,0,"Step 2206, loss: 1.272404432296753, step time: 17.58432388305664ms\r\nStep 2207, loss: 1.3102402687072754, step time: 17.7459716796875ms\r\n",,terminal_output +8333,12389338,"TERMINAL",0,0,"Step 2208, loss: 1.2697006464004517, step time: 18.171310424804688ms\r\n",,terminal_output +8334,12389391,"TERMINAL",0,0,"Step 2209, loss: 1.4819672107696533, step time: 17.688751220703125ms\r\n",,terminal_output +8335,12389496,"TERMINAL",0,0,"Step 2210, loss: 1.2830928564071655, step time: 17.597675323486328ms\r\nStep 2211, loss: 1.2543516159057617, step time: 17.952442169189453ms\r\n",,terminal_output +8336,12389591,"TERMINAL",0,0,"Step 2212, loss: 1.2855480909347534, step time: 17.947912216186523ms\r\n",,terminal_output +8337,12389645,"TERMINAL",0,0,"Step 2213, loss: 1.2821472883224487, step time: 17.412424087524414ms\r\n",,terminal_output +8338,12389755,"TERMINAL",0,0,"Step 2214, loss: 1.260668396949768, step time: 17.911911010742188ms\r\nStep 2215, loss: 1.3010997772216797, step time: 17.560958862304688ms\r\n",,terminal_output +8339,12389879,"TERMINAL",0,0,"Step 2216, loss: 2.3172876834869385, step time: 17.278671264648438ms\r\nStep 2217, loss: 1.6887867450714111, step time: 17.99178123474121ms\r\n",,terminal_output +8340,12389975,"TERMINAL",0,0,"Step 2218, loss: 1.4005671739578247, step time: 17.72904396057129ms\r\n",,terminal_output +8341,12390037,"TERMINAL",0,0,"Step 2219, loss: 1.268459677696228, step time: 17.481327056884766ms\r\n",,terminal_output +8342,12390099,"TERMINAL",0,0,"Step 2220, loss: 1.291839838027954, step time: 17.846345901489258ms\r\n",,terminal_output +8343,12390162,"TERMINAL",0,0,"Step 2221, loss: 1.3709537982940674, step time: 17.557144165039062ms\r\n",,terminal_output +8344,12390223,"TERMINAL",0,0,"Step 2222, loss: 1.2462730407714844, step time: 17.52018928527832ms\r\n",,terminal_output +8345,12390289,"TERMINAL",0,0,"Step 2223, loss: 1.301079273223877, step time: 17.927169799804688ms\r\n",,terminal_output +8346,12390349,"TERMINAL",0,0,"Step 2224, loss: 1.2590339183807373, step time: 17.77791976928711ms\r\n",,terminal_output +8347,12390411,"TERMINAL",0,0,"Step 2225, loss: 1.249916911125183, step time: 17.325401306152344ms\r\n",,terminal_output +8348,12390473,"TERMINAL",0,0,"Step 2226, loss: 1.2546799182891846, step time: 17.59815216064453ms\r\n",,terminal_output +8349,12390579,"TERMINAL",0,0,"Step 2227, loss: 1.268832802772522, step time: 17.15874671936035ms\r\nStep 2228, loss: 1.3854193687438965, step time: 17.49277114868164ms\r\n",,terminal_output +8350,12390642,"TERMINAL",0,0,"Step 2229, loss: 1.2473249435424805, step time: 17.698287963867188ms\r\n",,terminal_output +8351,12390711,"TERMINAL",0,0,"Step 2230, loss: 1.84063720703125, step time: 17.551422119140625ms\r\n",,terminal_output +8352,12390764,"TERMINAL",0,0,"Step 2231, loss: 1.2603294849395752, step time: 20.41339874267578ms\r\n",,terminal_output +8353,12390855,"TERMINAL",0,0,"Step 2232, loss: 1.2647749185562134, step time: 17.628908157348633ms\r\n",,terminal_output +8354,12390910,"TERMINAL",0,0,"Step 2233, loss: 1.2926592826843262, step time: 17.427444458007812ms\r\n",,terminal_output +8355,12391058,"TERMINAL",0,0,"Step 2234, loss: 2.2925937175750732, step time: 17.376184463500977ms\r\nStep 2235, loss: 1.3464970588684082, step time: 17.483949661254883ms\r\n",,terminal_output +8356,12391110,"TERMINAL",0,0,"Step 2236, loss: 1.4119354486465454, step time: 17.714738845825195ms\r\n",,terminal_output +8357,12391217,"TERMINAL",0,0,"Step 2237, loss: 1.2468541860580444, step time: 17.32015609741211ms\r\nStep 2238, loss: 1.2446800470352173, step time: 17.981767654418945ms\r\n",,terminal_output +8358,12391311,"TERMINAL",0,0,"Step 2239, loss: 1.2368770837783813, step time: 17.182111740112305ms\r\n",,terminal_output +8359,12391364,"TERMINAL",0,0,"Step 2240, loss: 1.2672977447509766, step time: 17.423629760742188ms\r\n",,terminal_output +8360,12391469,"TERMINAL",0,0,"Step 2241, loss: 1.2683128118515015, step time: 17.789840698242188ms\r\nStep 2242, loss: 1.251612663269043, step time: 17.264127731323242ms\r\n",,terminal_output +8361,12391565,"TERMINAL",0,0,"Step 2243, loss: 1.686398983001709, step time: 17.197132110595703ms\r\n",,terminal_output +8362,12391618,"TERMINAL",0,0,"Step 2244, loss: 1.319690227508545, step time: 17.81773567199707ms\r\n",,terminal_output +8363,12391724,"TERMINAL",0,0,"Step 2245, loss: 1.4188178777694702, step time: 17.18592643737793ms\r\nStep 2246, loss: 1.2778908014297485, step time: 17.066240310668945ms\r\n",,terminal_output +8364,12391789,"TERMINAL",0,0,"Step 2247, loss: 1.3084627389907837, step time: 17.68779754638672ms\r\n",,terminal_output +8365,12391851,"TERMINAL",0,0,"Step 2248, loss: 1.249130129814148, step time: 17.194747924804688ms\r\n",,terminal_output +8366,12391913,"TERMINAL",0,0,"Step 2249, loss: 1.2476272583007812, step time: 17.449617385864258ms\r\n",,terminal_output +8367,12391976,"TERMINAL",0,0,"Step 2250, loss: 1.2574975490570068, step time: 17.97175407409668ms\r\n",,terminal_output +8368,12392038,"TERMINAL",0,0,"Step 2251, loss: 1.2594037055969238, step time: 17.620325088500977ms\r\n",,terminal_output +8369,12392102,"TERMINAL",0,0,"Step 2252, loss: 1.296335220336914, step time: 17.211198806762695ms\r\n",,terminal_output +8370,12392161,"TERMINAL",0,0,"Step 2253, loss: 1.242714762687683, step time: 17.840862274169922ms\r\n",,terminal_output +8371,12392222,"TERMINAL",0,0,"Step 2254, loss: 1.2629536390304565, step time: 17.528295516967773ms\r\n",,terminal_output +8372,12392287,"TERMINAL",0,0,"Step 2255, loss: 1.2635465860366821, step time: 17.55666732788086ms\r\n",,terminal_output +8373,12392603,"TERMINAL",0,0,"Step 2256, loss: 1.3913586139678955, step time: 313.86685371398926ms\r\n",,terminal_output +8374,12392753,"TERMINAL",0,0,"Step 2257, loss: 1.7057092189788818, step time: 25.12049674987793ms\r\nStep 2258, loss: 1.2548035383224487, step time: 19.693613052368164ms\r\n",,terminal_output +8375,12392816,"TERMINAL",0,0,"Step 2259, loss: 1.2822197675704956, step time: 18.860816955566406ms\r\n",,terminal_output +8376,12392875,"TERMINAL",0,0,"Step 2260, loss: 1.2576640844345093, step time: 18.092870712280273ms\r\n",,terminal_output +8377,12392937,"TERMINAL",0,0,"Step 2261, loss: 1.2738274335861206, step time: 17.99178123474121ms\r\n",,terminal_output +8378,12393011,"TERMINAL",0,0,"Step 2262, loss: 1.231602430343628, step time: 18.41568946838379ms\r\n",,terminal_output +8379,12393073,"TERMINAL",0,0,"Step 2263, loss: 1.2303193807601929, step time: 17.719507217407227ms\r\n",,terminal_output +8380,12393132,"TERMINAL",0,0,"Step 2264, loss: 1.2671006917953491, step time: 17.750263214111328ms\r\n",,terminal_output +8381,12393195,"TERMINAL",0,0,"Step 2265, loss: 1.23621666431427, step time: 18.122196197509766ms\r\n",,terminal_output +8382,12393259,"TERMINAL",0,0,"Step 2266, loss: 1.2392281293869019, step time: 17.64225959777832ms\r\n",,terminal_output +8383,12393372,"TERMINAL",0,0,"Step 2267, loss: 1.411831021308899, step time: 17.623186111450195ms\r\nStep 2268, loss: 1.2365573644638062, step time: 18.32723617553711ms\r\n",,terminal_output +8384,12393434,"TERMINAL",0,0,"Step 2269, loss: 1.2291311025619507, step time: 17.8530216217041ms\r\n",,terminal_output +8385,12393498,"TERMINAL",0,0,"Step 2270, loss: 1.2272136211395264, step time: 17.59171485900879ms\r\n",,terminal_output +8386,12393564,"TERMINAL",0,0,"Step 2271, loss: 1.2792081832885742, step time: 18.046140670776367ms\r\n",,terminal_output +8387,12393636,"TERMINAL",0,0,"Step 2272, loss: 1.2357637882232666, step time: 17.828702926635742ms\r\n",,terminal_output +8388,12393753,"TERMINAL",0,0,"Step 2273, loss: 1.221953272819519, step time: 17.355918884277344ms\r\nStep 2274, loss: 1.4061334133148193, step time: 18.088579177856445ms\r\n",,terminal_output +8389,12393880,"TERMINAL",0,0,"Step 2275, loss: 1.28325617313385, step time: 18.769502639770508ms\r\nStep 2276, loss: 1.379478096961975, step time: 17.63772964477539ms\r\n",,terminal_output +8390,12393943,"TERMINAL",0,0,"Step 2277, loss: 1.4490532875061035, step time: 18.22495460510254ms\r\n",,terminal_output +8391,12394008,"TERMINAL",0,0,"Step 2278, loss: 1.2310471534729004, step time: 19.085168838500977ms\r\n",,terminal_output +8392,12394072,"TERMINAL",0,0,"Step 2279, loss: 1.223641037940979, step time: 17.63749122619629ms\r\n",,terminal_output +8393,12394136,"TERMINAL",0,0,"Step 2280, loss: 1.2174673080444336, step time: 17.795801162719727ms\r\n",,terminal_output +8394,12394199,"TERMINAL",0,0,"Step 2281, loss: 1.294796347618103, step time: 17.502546310424805ms\r\n",,terminal_output +8395,12394264,"TERMINAL",0,0,"Step 2282, loss: 1.4811335802078247, step time: 17.396211624145508ms\r\n",,terminal_output +8396,12394331,"TERMINAL",0,0,"Step 2283, loss: 1.212098479270935, step time: 18.088102340698242ms\r\n",,terminal_output +8397,12394390,"TERMINAL",0,0,"Step 2284, loss: 1.2685794830322266, step time: 17.504453659057617ms\r\n",,terminal_output +8398,12394453,"TERMINAL",0,0,"Step 2285, loss: 1.4426265954971313, step time: 17.620563507080078ms\r\n",,terminal_output +8399,12394516,"TERMINAL",0,0,"Step 2286, loss: 1.2728590965270996, step time: 17.949581146240234ms\r\n",,terminal_output +8400,12394580,"TERMINAL",0,0,"Step 2287, loss: 1.2624030113220215, step time: 17.282485961914062ms\r\n",,terminal_output +8401,12394644,"TERMINAL",0,0,"Step 2288, loss: 2.015443801879883, step time: 17.705678939819336ms\r\n",,terminal_output +8402,12394719,"TERMINAL",0,0,"Step 2289, loss: 1.3601678609848022, step time: 17.78554916381836ms\r\n",,terminal_output +8403,12394772,"TERMINAL",0,0,"Step 2290, loss: 1.2045100927352905, step time: 17.653465270996094ms\r\n",,terminal_output +8404,12394839,"TERMINAL",0,0,"Step 2291, loss: 1.2280535697937012, step time: 17.586946487426758ms\r\n",,terminal_output +8405,12394937,"TERMINAL",0,0,"Step 2292, loss: 1.2523516416549683, step time: 18.040180206298828ms\r\n",,terminal_output +8406,12394990,"TERMINAL",0,0,"Step 2293, loss: 1.2246642112731934, step time: 18.103837966918945ms\r\n",,terminal_output +8407,12395095,"TERMINAL",0,0,"Step 2294, loss: 1.338982343673706, step time: 17.566442489624023ms\r\nStep 2295, loss: 1.2246748208999634, step time: 18.130064010620117ms\r\n",,terminal_output +8408,12395156,"TERMINAL",0,0,"Step 2296, loss: 1.2568559646606445, step time: 17.559528350830078ms\r\n",,terminal_output +8409,12395223,"TERMINAL",0,0,"Step 2297, loss: 1.2613778114318848, step time: 17.25482940673828ms\r\n",,terminal_output +8410,12395282,"TERMINAL",0,0,"Step 2298, loss: 1.243928074836731, step time: 17.882823944091797ms\r\n",,terminal_output +8411,12395342,"TERMINAL",0,0,"Step 2299, loss: 1.2585450410842896, step time: 18.120765686035156ms\r\n",,terminal_output +8412,12395409,"TERMINAL",0,0,"Step 2300, loss: 1.4065831899642944, step time: 17.885208129882812ms\r\n",,terminal_output +8413,12395505,"TERMINAL",0,0,"Step 2301, loss: 1.2447928190231323, step time: 18.32294464111328ms\r\n",,terminal_output +8414,12395612,"TERMINAL",0,0,"Step 2302, loss: 1.234413981437683, step time: 17.807960510253906ms\r\nStep 2303, loss: 1.3061894178390503, step time: 17.68970489501953ms\r\n",,terminal_output +8415,12395673,"TERMINAL",0,0,"Step 2304, loss: 1.7742924690246582, step time: 18.125057220458984ms\r\n",,terminal_output +8416,12395738,"TERMINAL",0,0,"Step 2305, loss: 1.2193418741226196, step time: 17.571687698364258ms\r\n",,terminal_output +8417,12395789,"TERMINAL",0,0,"Step 2306, loss: 1.2268273830413818, step time: 17.479896545410156ms\r\n",,terminal_output +8418,12395885,"TERMINAL",0,0,"Step 2307, loss: 1.213208794593811, step time: 17.54140853881836ms\r\n",,terminal_output +8419,12395992,"TERMINAL",0,0,"Step 2308, loss: 1.2095777988433838, step time: 17.742633819580078ms\r\nStep 2309, loss: 1.2726290225982666, step time: 17.42410659790039ms\r\n",,terminal_output +8420,12396053,"TERMINAL",0,0,"Step 2310, loss: 1.2084057331085205, step time: 18.261432647705078ms\r\n",,terminal_output +8421,12396113,"TERMINAL",0,0,"Step 2311, loss: 1.2340474128723145, step time: 17.54927635192871ms\r\n",,terminal_output +8422,12396174,"TERMINAL",0,0,"Step 2312, loss: 1.2035462856292725, step time: 17.28081703186035ms\r\n",,terminal_output +8423,12396235,"TERMINAL",0,0,"Step 2313, loss: 1.2213414907455444, step time: 17.75836944580078ms\r\n",,terminal_output +8424,12396298,"TERMINAL",0,0,"Step 2314, loss: 1.288938045501709, step time: 17.612218856811523ms\r\n",,terminal_output +8425,12396359,"TERMINAL",0,0,"Step 2315, loss: 1.2021299600601196, step time: 17.522335052490234ms\r\n",,terminal_output +8426,12396420,"TERMINAL",0,0,"Step 2316, loss: 1.1939946413040161, step time: 17.902135848999023ms\r\n",,terminal_output +8427,12396512,"TERMINAL",0,0,"Step 2317, loss: 1.1908628940582275, step time: 17.101526260375977ms\r\n",,terminal_output +8428,12396563,"TERMINAL",0,0,"Step 2318, loss: 1.19979727268219, step time: 17.211198806762695ms\r\n",,terminal_output +8429,12396656,"TERMINAL",0,0,"Step 2319, loss: 1.1795272827148438, step time: 17.55523681640625ms\r\n",,terminal_output +8430,12396708,"TERMINAL",0,0,"Step 2320, loss: 1.2134116888046265, step time: 17.492294311523438ms\r\n",,terminal_output +8431,12396768,"TERMINAL",0,0,"Step 2321, loss: 1.215440034866333, step time: 17.367124557495117ms\r\n",,terminal_output +8432,12396828,"TERMINAL",0,0,"Step 2322, loss: 1.1992734670639038, step time: 22.141695022583008ms\r\n",,terminal_output +8433,12396889,"TERMINAL",0,0,"Step 2323, loss: 1.2157000303268433, step time: 18.451929092407227ms\r\n",,terminal_output +8434,12396950,"TERMINAL",0,0,"Step 2324, loss: 2.3969390392303467, step time: 17.526626586914062ms\r\n",,terminal_output +8435,12397012,"TERMINAL",0,0,"Step 2325, loss: 1.1964012384414673, step time: 18.133163452148438ms\r\n",,terminal_output +8436,12397073,"TERMINAL",0,0,"Step 2326, loss: 1.1895610094070435, step time: 17.741918563842773ms\r\n",,terminal_output +8437,12397133,"TERMINAL",0,0,"Step 2327, loss: 1.1983146667480469, step time: 17.605066299438477ms\r\n",,terminal_output +8438,12397196,"TERMINAL",0,0,"Step 2328, loss: 1.1960992813110352, step time: 18.225908279418945ms\r\n",,terminal_output +8439,12397265,"TERMINAL",0,0,"Step 2329, loss: 1.1830912828445435, step time: 17.328739166259766ms\r\n",,terminal_output +8440,12397319,"TERMINAL",0,0,"Step 2330, loss: 1.272593379020691, step time: 17.522811889648438ms\r\n",,terminal_output +8441,12397413,"TERMINAL",0,0,"Step 2331, loss: 1.4509060382843018, step time: 17.781734466552734ms\r\n",,terminal_output +8442,12397526,"TERMINAL",0,0,"Step 2332, loss: 1.1999502182006836, step time: 17.4715518951416ms\r\nStep 2333, loss: 1.599431037902832, step time: 17.383813858032227ms\r\n",,terminal_output +8443,12397632,"TERMINAL",0,0,"Step 2334, loss: 1.3949586153030396, step time: 18.157958984375ms\r\nStep 2335, loss: 1.2146623134613037, step time: 17.342567443847656ms\r\n",,terminal_output +8444,12397768,"TERMINAL",0,0,"Step 2336, loss: 1.2160382270812988, step time: 17.31419563293457ms\r\nStep 2337, loss: 1.191399335861206, step time: 17.8070068359375ms\r\n",,terminal_output +8445,12397830,"TERMINAL",0,0,"Step 2338, loss: 1.2532758712768555, step time: 18.023252487182617ms\r\n",,terminal_output +8446,12397972,"TERMINAL",0,0,"Step 2339, loss: 1.2783104181289673, step time: 17.429828643798828ms\r\n",,terminal_output +8447,12398023,"TERMINAL",0,0,"Step 2340, loss: 1.1826634407043457, step time: 17.933368682861328ms\r\nStep 2341, loss: 1.2168450355529785, step time: 17.424583435058594ms\r\n",,terminal_output +8448,12398170,"TERMINAL",0,0,"Step 2342, loss: 1.7888157367706299, step time: 17.345190048217773ms\r\nStep 2343, loss: 1.2100635766983032, step time: 18.285036087036133ms\r\n",,terminal_output +8449,12398222,"TERMINAL",0,0,"Step 2344, loss: 1.225911021232605, step time: 17.60578155517578ms\r\n",,terminal_output +8450,12398393,"TERMINAL",0,0,"Step 2345, loss: 1.2303965091705322, step time: 17.377853393554688ms\r\nStep 2346, loss: 1.1793383359909058, step time: 18.335819244384766ms\r\nStep 2347, loss: 1.1981916427612305, step time: 17.25602149963379ms\r\n",,terminal_output +8451,12398460,"TERMINAL",0,0,"Step 2348, loss: 1.1833207607269287, step time: 17.3492431640625ms\r\n",,terminal_output +8452,12398517,"TERMINAL",0,0,"Step 2349, loss: 1.197593331336975, step time: 17.77172088623047ms\r\n",,terminal_output +8453,12398593,"TERMINAL",0,0,"Step 2350, loss: 1.2317427396774292, step time: 17.817258834838867ms\r\n",,terminal_output +8454,12398645,"TERMINAL",0,0,"Step 2351, loss: 2.0163614749908447, step time: 17.62843132019043ms\r\n",,terminal_output +8455,12398773,"TERMINAL",0,0,"Step 2352, loss: 1.1800003051757812, step time: 18.138885498046875ms\r\nStep 2353, loss: 1.1969327926635742, step time: 17.48490333557129ms\r\n",,terminal_output +8456,12398836,"TERMINAL",0,0,"Step 2354, loss: 1.2007625102996826, step time: 18.57781410217285ms\r\n",,terminal_output +8457,12398941,"TERMINAL",0,0,"Step 2355, loss: 1.1992695331573486, step time: 18.141984939575195ms\r\n",,terminal_output +8458,12398995,"TERMINAL",0,0,"Step 2356, loss: 1.5751243829727173, step time: 18.07379722595215ms\r\n",,terminal_output +8459,12399101,"TERMINAL",0,0,"Step 2357, loss: 1.3016446828842163, step time: 17.4558162689209ms\r\nStep 2358, loss: 1.5438474416732788, step time: 18.21112632751465ms\r\n",,terminal_output +8460,12399162,"TERMINAL",0,0,"Step 2359, loss: 1.7631516456604004, step time: 17.64965057373047ms\r\n",,terminal_output +8461,12399224,"TERMINAL",0,0,"Step 2360, loss: 1.2231061458587646, step time: 17.462968826293945ms\r\n",,terminal_output +8462,12399285,"TERMINAL",0,0,"Step 2361, loss: 1.195873737335205, step time: 18.050193786621094ms\r\n",,terminal_output +8463,12399346,"TERMINAL",0,0,"Step 2362, loss: 1.1724236011505127, step time: 17.681598663330078ms\r\n",,terminal_output +8464,12399408,"TERMINAL",0,0,"Step 2363, loss: 1.3442325592041016, step time: 17.685413360595703ms\r\n",,terminal_output +8465,12399473,"TERMINAL",0,0,"Step 2364, loss: 1.1919891834259033, step time: 18.15176010131836ms\r\n",,terminal_output +8466,12399568,"TERMINAL",0,0,"Step 2365, loss: 1.1802784204483032, step time: 17.429113388061523ms\r\n",,terminal_output +8467,12399621,"TERMINAL",0,0,"Step 2366, loss: 1.176827073097229, step time: 17.348766326904297ms\r\n",,terminal_output +8468,12399728,"TERMINAL",0,0,"Step 2367, loss: 2.0081703662872314, step time: 18.186092376708984ms\r\nStep 2368, loss: 1.1859039068222046, step time: 18.578290939331055ms\r\n",,terminal_output +8469,12399790,"TERMINAL",0,0,"Step 2369, loss: 1.197725772857666, step time: 17.615079879760742ms\r\n",,terminal_output +8470,12399894,"TERMINAL",0,0,"Step 2370, loss: 1.1880321502685547, step time: 18.044710159301758ms\r\n",,terminal_output +8471,12399955,"TERMINAL",0,0,"Step 2371, loss: 1.1927660703659058, step time: 17.446041107177734ms\r\n",,terminal_output +8472,12400016,"TERMINAL",0,0,"Step 2372, loss: 1.1933342218399048, step time: 17.638444900512695ms\r\n",,terminal_output +8473,12400121,"TERMINAL",0,0,"Step 2373, loss: 1.292476773262024, step time: 17.69232749938965ms\r\nStep 2374, loss: 1.1796891689300537, step time: 17.394542694091797ms\r\n",,terminal_output +8474,12400185,"TERMINAL",0,0,"Step 2375, loss: 1.400928258895874, step time: 17.35687255859375ms\r\n",,terminal_output +8475,12400244,"TERMINAL",0,0,"Step 2376, loss: 1.2030696868896484, step time: 18.110036849975586ms\r\n",,terminal_output +8476,12400305,"TERMINAL",0,0,"Step 2377, loss: 1.202937364578247, step time: 17.30036735534668ms\r\n",,terminal_output +8477,12400366,"TERMINAL",0,0,"Step 2378, loss: 1.2115851640701294, step time: 17.23957061767578ms\r\n",,terminal_output +8478,12400428,"TERMINAL",0,0,"Step 2379, loss: 1.2852623462677002, step time: 18.03421974182129ms\r\n",,terminal_output +8479,12400534,"TERMINAL",0,0,"Step 2380, loss: 1.204197883605957, step time: 17.78888702392578ms\r\n",,terminal_output +8480,12400583,"TERMINAL",0,0,"Step 2381, loss: 1.16305410861969, step time: 17.589807510375977ms\r\n",,terminal_output +8481,12400691,"TERMINAL",0,0,"Step 2382, loss: 1.2213263511657715, step time: 17.922639846801758ms\r\nStep 2383, loss: 1.1907254457473755, step time: 17.382144927978516ms\r\n",,terminal_output +8482,12400810,"TERMINAL",0,0,"Step 2384, loss: 1.2635421752929688, step time: 17.139196395874023ms\r\nStep 2385, loss: 1.842943549156189, step time: 17.658710479736328ms\r\n",,terminal_output +8483,12400905,"TERMINAL",0,0,"Step 2386, loss: 1.2168700695037842, step time: 17.1966552734375ms\r\n",,terminal_output +8484,12400968,"TERMINAL",0,0,"Step 2387, loss: 1.1908695697784424, step time: 17.28534698486328ms\r\n",,terminal_output +8485,12401031,"TERMINAL",0,0,"Step 2388, loss: 1.343092441558838, step time: 17.88949966430664ms\r\n",,terminal_output +8486,12401093,"TERMINAL",0,0,"Step 2389, loss: 1.1968902349472046, step time: 19.49620246887207ms\r\n",,terminal_output +8487,12401156,"TERMINAL",0,0,"Step 2390, loss: 1.2044909000396729, step time: 18.4633731842041ms\r\n",,terminal_output +8488,12401460,"TERMINAL",0,0,"Step 2391, loss: 1.4813110828399658, step time: 331.8302631378174ms\r\n",,terminal_output +8489,12401568,"TERMINAL",0,0,"Step 2392, loss: 1.2080475091934204, step time: 24.966001510620117ms\r\n",,terminal_output +8490,12401620,"TERMINAL",0,0,"Step 2393, loss: 1.160476803779602, step time: 19.97971534729004ms\r\n",,terminal_output +8491,12401727,"TERMINAL",0,0,"Step 2394, loss: 1.241193413734436, step time: 19.07205581665039ms\r\nStep 2395, loss: 1.2087032794952393, step time: 17.97175407409668ms\r\n",,terminal_output +8492,12401794,"TERMINAL",0,0,"Step 2396, loss: 1.167702317237854, step time: 17.68040657043457ms\r\n",,terminal_output +8493,12401856,"TERMINAL",0,0,"Step 2397, loss: 1.1929292678833008, step time: 18.007755279541016ms\r\n",,terminal_output +8494,12401920,"TERMINAL",0,0,"Step 2398, loss: 1.1611003875732422, step time: 17.599105834960938ms\r\n",,terminal_output +8495,12401986,"TERMINAL",0,0,"Step 2399, loss: 1.1442033052444458, step time: 17.35997200012207ms\r\n",,terminal_output +8496,12402048,"TERMINAL",0,0,"Step 2400, loss: 1.172295331954956, step time: 18.275976181030273ms\r\n",,terminal_output +8497,12402114,"TERMINAL",0,0,"Step 2401, loss: 1.1742275953292847, step time: 17.392873764038086ms\r\n",,terminal_output +8498,12402178,"TERMINAL",0,0,"Step 2402, loss: 1.1562440395355225, step time: 17.610549926757812ms\r\n",,terminal_output +8499,12402241,"TERMINAL",0,0,"Step 2403, loss: 2.078303337097168, step time: 18.13793182373047ms\r\n",,terminal_output +8500,12402307,"TERMINAL",0,0,"Step 2404, loss: 1.2391659021377563, step time: 17.64965057373047ms\r\n",,terminal_output +8501,12402367,"TERMINAL",0,0,"Step 2405, loss: 1.1701853275299072, step time: 17.574310302734375ms\r\n",,terminal_output +8502,12402432,"TERMINAL",0,0,"Step 2406, loss: 1.3129167556762695, step time: 17.84515380859375ms\r\n",,terminal_output +8503,12402578,"TERMINAL",0,0,"Step 2407, loss: 1.1692359447479248, step time: 17.482519149780273ms\r\nStep 2408, loss: 1.194347620010376, step time: 17.05026626586914ms\r\n",,terminal_output +8504,12402634,"TERMINAL",0,0,"Step 2409, loss: 1.1954792737960815, step time: 18.15962791442871ms\r\n",,terminal_output +8505,12402741,"TERMINAL",0,0,"Step 2410, loss: 1.1555246114730835, step time: 17.745018005371094ms\r\nStep 2411, loss: 1.1784887313842773, step time: 17.427682876586914ms\r\n",,terminal_output +8506,12402807,"TERMINAL",0,0,"Step 2412, loss: 1.1589709520339966, step time: 18.040895462036133ms\r\n",,terminal_output +8507,12402870,"TERMINAL",0,0,"Step 2413, loss: 1.1463799476623535, step time: 17.40741729736328ms\r\n",,terminal_output +8508,12402932,"TERMINAL",0,0,"Step 2414, loss: 1.1759593486785889, step time: 17.49396324157715ms\r\n",,terminal_output +8509,12402997,"TERMINAL",0,0,"Step 2415, loss: 1.152075171470642, step time: 18.069982528686523ms\r\n",,terminal_output +8510,12403066,"TERMINAL",0,0,"Step 2416, loss: 1.1450765132904053, step time: 17.574787139892578ms\r\n",,terminal_output +8511,12403124,"TERMINAL",0,0,"Step 2417, loss: 1.1826850175857544, step time: 17.301559448242188ms\r\n",,terminal_output +8512,12403194,"TERMINAL",0,0,"Step 2418, loss: 1.1523900032043457, step time: 17.879009246826172ms\r\n",,terminal_output +8513,12403258,"TERMINAL",0,0,"Step 2419, loss: 1.206724762916565, step time: 17.480850219726562ms\r\n",,terminal_output +8514,12403321,"TERMINAL",0,0,"Step 2420, loss: 1.1491061449050903, step time: 17.107248306274414ms\r\n",,terminal_output +8515,12403388,"TERMINAL",0,0,"Step 2421, loss: 1.5423387289047241, step time: 17.548561096191406ms\r\n",,terminal_output +8516,12403448,"TERMINAL",0,0,"Step 2422, loss: 1.1422662734985352, step time: 17.215251922607422ms\r\n",,terminal_output +8517,12403512,"TERMINAL",0,0,"Step 2423, loss: 1.4108548164367676, step time: 17.25482940673828ms\r\n",,terminal_output +8518,12403574,"TERMINAL",0,0,"Step 2424, loss: 1.1473993062973022, step time: 17.943143844604492ms\r\n",,terminal_output +8519,12403642,"TERMINAL",0,0,"Step 2425, loss: 1.1963779926300049, step time: 17.496109008789062ms\r\n",,terminal_output +8520,12403698,"TERMINAL",0,0,"Step 2426, loss: 1.128868818283081, step time: 17.4100399017334ms\r\n",,terminal_output +8521,12403759,"TERMINAL",0,0,"Step 2427, loss: 1.145516037940979, step time: 17.97342300415039ms\r\n",,terminal_output +8522,12403811,"TERMINAL",0,0,"Step 2428, loss: 1.1605863571166992, step time: 18.285512924194336ms\r\n",,terminal_output +8523,12403909,"TERMINAL",0,0,"Step 2429, loss: 1.1361762285232544, step time: 17.868757247924805ms\r\n",,terminal_output +8524,12403972,"TERMINAL",0,0,"Step 2430, loss: 1.2092643976211548, step time: 18.481731414794922ms\r\n",,terminal_output +8525,12404036,"TERMINAL",0,0,"Step 2431, loss: 1.1367167234420776, step time: 17.32325553894043ms\r\n",,terminal_output +8526,12404097,"TERMINAL",0,0,"Step 2432, loss: 1.1953924894332886, step time: 17.409324645996094ms\r\n",,terminal_output +8527,12404156,"TERMINAL",0,0,"Step 2433, loss: 1.119476318359375, step time: 17.99631118774414ms\r\n",,terminal_output +8528,12404218,"TERMINAL",0,0,"Step 2434, loss: 1.1543500423431396, step time: 17.633914947509766ms\r\n",,terminal_output +8529,12404278,"TERMINAL",0,0,"Step 2435, loss: 1.6577050685882568, step time: 17.385005950927734ms\r\n",,terminal_output +8530,12404343,"TERMINAL",0,0,"Step 2436, loss: 1.1556813716888428, step time: 18.07093620300293ms\r\n",,terminal_output +8531,12404448,"TERMINAL",0,0,"Step 2437, loss: 2.0050549507141113, step time: 16.97540283203125ms\r\nStep 2438, loss: 1.1310184001922607, step time: 16.976118087768555ms\r\n",,terminal_output +8532,12404514,"TERMINAL",0,0,"Step 2439, loss: 1.364357590675354, step time: 17.711639404296875ms\r\n",,terminal_output +8533,12404571,"TERMINAL",0,0,"Step 2440, loss: 1.1618993282318115, step time: 17.741680145263672ms\r\n",,terminal_output +8534,12404634,"TERMINAL",0,0,"Step 2441, loss: 1.1300733089447021, step time: 17.477035522460938ms\r\n",,terminal_output +8535,12404722,"TERMINAL",0,0,"Step 2442, loss: 1.241353988647461, step time: 18.251895904541016ms\r\n",,terminal_output +8536,12404769,"TERMINAL",0,0,"Step 2443, loss: 1.1273828744888306, step time: 17.499685287475586ms\r\n",,terminal_output +8537,12404834,"TERMINAL",0,0,"Step 2444, loss: 1.1904882192611694, step time: 17.237424850463867ms\r\n",,terminal_output +8538,12404897,"TERMINAL",0,0,"Step 2445, loss: 1.1359206438064575, step time: 18.770933151245117ms\r\n",,terminal_output +8539,12404957,"TERMINAL",0,0,"Step 2446, loss: 1.5158040523529053, step time: 18.152475357055664ms\r\n",,terminal_output +8540,12405053,"TERMINAL",0,0,"Step 2447, loss: 1.190993309020996, step time: 20.03622055053711ms\r\n",,terminal_output +8541,12405106,"TERMINAL",0,0,"Step 2448, loss: 1.4154044389724731, step time: 18.250226974487305ms\r\n",,terminal_output +8542,12405215,"TERMINAL",0,0,"Step 2449, loss: 1.1612833738327026, step time: 17.536401748657227ms\r\nStep 2450, loss: 1.1507936716079712, step time: 17.511844635009766ms\r\n",,terminal_output +8543,12405281,"TERMINAL",0,0,"Step 2451, loss: 1.1719292402267456, step time: 18.07713508605957ms\r\n",,terminal_output +8544,12405350,"TERMINAL",0,0,"Step 2452, loss: 1.489656686782837, step time: 17.592430114746094ms\r\n",,terminal_output +8545,12405404,"TERMINAL",0,0,"Step 2453, loss: 1.3094803094863892, step time: 17.462730407714844ms\r\n",,terminal_output +8546,12405514,"TERMINAL",0,0,"Step 2454, loss: 1.1437407732009888, step time: 18.134593963623047ms\r\n",,terminal_output +8547,12405569,"TERMINAL",0,0,"Step 2455, loss: 1.1964234113693237, step time: 17.351388931274414ms\r\n",,terminal_output +8548,12405672,"TERMINAL",0,0,"Step 2456, loss: 1.1307612657546997, step time: 17.519474029541016ms\r\nStep 2457, loss: 1.2264405488967896, step time: 18.011093139648438ms\r\n",,terminal_output +8549,12405730,"TERMINAL",0,0,"Step 2458, loss: 1.1991084814071655, step time: 17.856359481811523ms\r\n",,terminal_output +8550,12405858,"TERMINAL",0,0,"Step 2459, loss: 1.1418178081512451, step time: 17.41313934326172ms\r\nStep 2460, loss: 1.1168996095657349, step time: 18.192052841186523ms\r\n",,terminal_output +8551,12405921,"TERMINAL",0,0,"Step 2461, loss: 1.444836974143982, step time: 17.30513572692871ms\r\n",,terminal_output +8552,12405985,"TERMINAL",0,0,"Step 2462, loss: 1.2918084859848022, step time: 17.29869842529297ms\r\n",,terminal_output +8553,12406048,"TERMINAL",0,0,"Step 2463, loss: 1.5599942207336426, step time: 17.93956756591797ms\r\n",,terminal_output +8554,12406110,"TERMINAL",0,0,"Step 2464, loss: 1.1289063692092896, step time: 17.434120178222656ms\r\n",,terminal_output +8555,12406170,"TERMINAL",0,0,"Step 2465, loss: 1.1284338235855103, step time: 17.16780662536621ms\r\n",,terminal_output +8556,12406230,"TERMINAL",0,0,"Step 2466, loss: 1.1348025798797607, step time: 18.024206161499023ms\r\n",,terminal_output +8557,12406292,"TERMINAL",0,0,"Step 2467, loss: 1.1662408113479614, step time: 16.990184783935547ms\r\n",,terminal_output +8558,12406356,"TERMINAL",0,0,"Step 2468, loss: 1.1208449602127075, step time: 17.220258712768555ms\r\n",,terminal_output +8559,12406424,"TERMINAL",0,0,"Step 2469, loss: 1.1174184083938599, step time: 17.650604248046875ms\r\n",,terminal_output +8560,12406481,"TERMINAL",0,0,"Step 2470, loss: 1.1446369886398315, step time: 17.63176918029785ms\r\n",,terminal_output +8561,12406575,"TERMINAL",0,0,"Step 2471, loss: 1.1174252033233643, step time: 17.184019088745117ms\r\n",,terminal_output +8562,12406627,"TERMINAL",0,0,"Step 2472, loss: 1.11581552028656, step time: 17.875194549560547ms\r\n",,terminal_output +8563,12406733,"TERMINAL",0,0,"Step 2473, loss: 1.1053539514541626, step time: 17.00735092163086ms\r\nStep 2474, loss: 1.172888159751892, step time: 17.374277114868164ms\r\n",,terminal_output +8564,12406800,"TERMINAL",0,0,"Step 2475, loss: 1.5317176580429077, step time: 20.254135131835938ms\r\n",,terminal_output +8565,12406861,"TERMINAL",0,0,"Step 2476, loss: 1.1076515913009644, step time: 19.494295120239258ms\r\n",,terminal_output +8566,12406927,"TERMINAL",0,0,"Step 2477, loss: 1.1250829696655273, step time: 17.949342727661133ms\r\n",,terminal_output +8567,12407056,"TERMINAL",0,0,"Step 2478, loss: 1.1061569452285767, step time: 18.32747459411621ms\r\n",,terminal_output +8568,12407121,"TERMINAL",0,0,"Step 2479, loss: 1.3292570114135742, step time: 17.685651779174805ms\r\nStep 2480, loss: 1.128013014793396, step time: 17.389297485351562ms\r\n",,terminal_output +8569,12407260,"TERMINAL",0,0,"Step 2481, loss: 1.1355036497116089, step time: 18.216848373413086ms\r\n",,terminal_output +8570,12407316,"TERMINAL",0,0,"Step 2482, loss: 1.1135995388031006, step time: 17.718791961669922ms\r\nStep 2483, loss: 1.2720789909362793, step time: 17.59815216064453ms\r\n",,terminal_output +8571,12407457,"TERMINAL",0,0,"Step 2484, loss: 1.1308152675628662, step time: 18.134355545043945ms\r\n",,terminal_output +8572,12407512,"TERMINAL",0,0,"Step 2485, loss: 1.1136667728424072, step time: 17.47918128967285ms\r\nStep 2486, loss: 1.1215572357177734, step time: 17.421483993530273ms\r\n",,terminal_output +8573,12407588,"TERMINAL",0,0,"Step 2487, loss: 1.109931230545044, step time: 17.84062385559082ms\r\n",,terminal_output +8574,12407653,"TERMINAL",0,0,"Step 2488, loss: 1.1081128120422363, step time: 17.388105392456055ms\r\n",,terminal_output +8575,12407754,"TERMINAL",0,0,"Step 2489, loss: 1.109451413154602, step time: 17.43006706237793ms\r\n",,terminal_output +8576,12407814,"TERMINAL",0,0,"Step 2490, loss: 1.109117865562439, step time: 18.13673973083496ms\r\nStep 2491, loss: 1.1559087038040161, step time: 17.28534698486328ms\r\n",,terminal_output +8577,12407945,"TERMINAL",0,0,"Step 2492, loss: 1.0920714139938354, step time: 17.699003219604492ms\r\n",,terminal_output +8578,12408086,"TERMINAL",0,0,"Step 2493, loss: 1.3522891998291016, step time: 17.894268035888672ms\r\nStep 2494, loss: 1.0968754291534424, step time: 17.37380027770996ms\r\n",,terminal_output +8579,12408142,"TERMINAL",0,0,"Step 2495, loss: 1.1036137342453003, step time: 17.444849014282227ms\r\nStep 2496, loss: 1.1452704668045044, step time: 17.849445343017578ms\r\n",,terminal_output +8580,12408284,"TERMINAL",0,0,"Step 2497, loss: 1.082803726196289, step time: 17.162084579467773ms\r\n",,terminal_output +8581,12408339,"TERMINAL",0,0,"Step 2498, loss: 1.391903281211853, step time: 17.227888107299805ms\r\nStep 2499, loss: 1.2265684604644775, step time: 18.014907836914062ms\r\n",,terminal_output +8582,12411016,"TERMINAL",0,0,"Step 2500, loss: 1.1073002815246582, step time: 24.69611167907715ms\r\n",,terminal_output +8583,12411080,"TERMINAL",0,0,"Step 2501, loss: 1.1312764883041382, step time: 25.195598602294922ms\r\n",,terminal_output +8584,12411188,"TERMINAL",0,0,"Step 2502, loss: 1.1109539270401, step time: 20.00141143798828ms\r\n",,terminal_output +8585,12411241,"TERMINAL",0,0,"Step 2503, loss: 1.129234790802002, step time: 19.74654197692871ms\r\n",,terminal_output +8586,12411349,"TERMINAL",0,0,"Step 2504, loss: 1.0692938566207886, step time: 19.108295440673828ms\r\nStep 2505, loss: 1.079296350479126, step time: 19.390344619750977ms\r\n",,terminal_output +8587,12411443,"TERMINAL",0,0,"Step 2506, loss: 1.0988179445266724, step time: 18.49365234375ms\r\n",,terminal_output +8588,12411497,"TERMINAL",0,0,"Step 2507, loss: 1.0912084579467773, step time: 18.6920166015625ms\r\n",,terminal_output +8589,12411592,"TERMINAL",0,0,"Step 2508, loss: 1.0810679197311401, step time: 17.633438110351562ms\r\n",,terminal_output +8590,12411760,"TERMINAL",0,0,"Step 2509, loss: 1.1146584749221802, step time: 18.50414276123047ms\r\nStep 2510, loss: 1.107714056968689, step time: 21.305084228515625ms\r\n",,terminal_output +8591,12411811,"TERMINAL",0,0,"Step 2511, loss: 1.155220627784729, step time: 27.52065658569336ms\r\nStep 2512, loss: 1.0981742143630981, step time: 23.390769958496094ms\r\n",,terminal_output +8592,12411934,"TERMINAL",0,0,"Step 2513, loss: 1.651943564414978, step time: 26.6420841217041ms\r\nStep 2514, loss: 1.0805306434631348, step time: 24.93143081665039ms\r\n",,terminal_output +8593,12412045,"TERMINAL",0,0,"Step 2515, loss: 1.4597018957138062, step time: 25.647401809692383ms\r\n",,terminal_output +8594,12412098,"TERMINAL",0,0,"Step 2516, loss: 1.2009763717651367, step time: 25.845766067504883ms\r\n",,terminal_output +8595,12412206,"TERMINAL",0,0,"Step 2517, loss: 1.1654043197631836, step time: 26.317358016967773ms\r\nStep 2518, loss: 1.5422799587249756, step time: 25.100231170654297ms\r\n",,terminal_output +8596,12412269,"TERMINAL",0,0,"Step 2519, loss: 1.077750325202942, step time: 21.633386611938477ms\r\n",,terminal_output +8597,12412352,"TERMINAL",0,0,"Step 2520, loss: 1.1001243591308594, step time: 18.384456634521484ms\r\n",,terminal_output +8598,12412432,"TERMINAL",0,0,"Step 2521, loss: 1.092535138130188, step time: 17.7767276763916ms\r\n",,terminal_output +8599,12412493,"TERMINAL",0,0,"Step 2522, loss: 1.0765691995620728, step time: 29.189348220825195ms\r\n",,terminal_output +8600,12412555,"TERMINAL",0,0,"Step 2523, loss: 1.2594398260116577, step time: 21.179676055908203ms\r\n",,terminal_output +8601,12412618,"TERMINAL",0,0,"Step 2524, loss: 1.0829801559448242, step time: 17.5933837890625ms\r\n",,terminal_output +8602,12412683,"TERMINAL",0,0,"Step 2525, loss: 1.0840750932693481, step time: 18.190383911132812ms\r\n",,terminal_output +8603,12412742,"TERMINAL",0,0,"Step 2526, loss: 1.495468258857727, step time: 17.23623275756836ms\r\n",,terminal_output +8604,12412808,"TERMINAL",0,0,"Step 2527, loss: 1.187528371810913, step time: 17.648935317993164ms\r\n",,terminal_output +8605,12412869,"TERMINAL",0,0,"Step 2528, loss: 1.0846604108810425, step time: 18.44477653503418ms\r\n",,terminal_output +8606,12412932,"TERMINAL",0,0,"Step 2529, loss: 1.1021933555603027, step time: 19.540786743164062ms\r\n",,terminal_output +8607,12412994,"TERMINAL",0,0,"Step 2530, loss: 1.0930843353271484, step time: 18.637895584106445ms\r\n",,terminal_output +8608,12413058,"TERMINAL",0,0,"Step 2531, loss: 1.2708995342254639, step time: 18.419742584228516ms\r\n",,terminal_output +8609,12413124,"TERMINAL",0,0,"Step 2532, loss: 1.225351095199585, step time: 17.594337463378906ms\r\n",,terminal_output +8610,12413198,"TERMINAL",0,0,"Step 2533, loss: 1.1042137145996094, step time: 17.670154571533203ms\r\n",,terminal_output +8611,12413260,"TERMINAL",0,0,"Step 2534, loss: 1.0806443691253662, step time: 18.213272094726562ms\r\n",,terminal_output +8612,12413366,"TERMINAL",0,0,"Step 2535, loss: 1.0785365104675293, step time: 17.710447311401367ms\r\nStep 2536, loss: 1.3234691619873047, step time: 17.327547073364258ms\r\n",,terminal_output +8613,12413483,"TERMINAL",0,0,"Step 2537, loss: 1.0805033445358276, step time: 18.18394660949707ms\r\nStep 2538, loss: 1.1057161092758179, step time: 17.289161682128906ms\r\n",,terminal_output +8614,12413577,"TERMINAL",0,0,"Step 2539, loss: 1.0624346733093262, step time: 17.67134666442871ms\r\n",,terminal_output +8615,12413683,"TERMINAL",0,0,"Step 2540, loss: 1.0771652460098267, step time: 17.697572708129883ms\r\nStep 2541, loss: 1.1363022327423096, step time: 17.78388023376465ms\r\n",,terminal_output +8616,12413751,"TERMINAL",0,0,"Step 2542, loss: 1.0696157217025757, step time: 17.432451248168945ms\r\n",,terminal_output +8617,12413809,"TERMINAL",0,0,"Step 2543, loss: 1.0622559785842896, step time: 18.065214157104492ms\r\n",,terminal_output +8618,12413872,"TERMINAL",0,0,"Step 2544, loss: 1.086026906967163, step time: 18.59450340270996ms\r\n",,terminal_output +8619,12413941,"TERMINAL",0,0,"Step 2545, loss: 1.068956971168518, step time: 18.186330795288086ms\r\n",,terminal_output +8620,12414002,"TERMINAL",0,0,"Step 2546, loss: 1.1006428003311157, step time: 17.99321174621582ms\r\n",,terminal_output +8621,12414068,"TERMINAL",0,0,"Step 2547, loss: 1.1409080028533936, step time: 17.935752868652344ms\r\n",,terminal_output +8622,12414132,"TERMINAL",0,0,"Step 2548, loss: 1.0716452598571777, step time: 17.248868942260742ms\r\n",,terminal_output +8623,12414198,"TERMINAL",0,0,"Step 2549, loss: 1.0851354598999023, step time: 18.19443702697754ms\r\n",,terminal_output +8624,12414261,"TERMINAL",0,0,"Step 2550, loss: 1.0869319438934326, step time: 17.500877380371094ms\r\n",,terminal_output +8625,12414373,"TERMINAL",0,0,"Step 2551, loss: 1.0912482738494873, step time: 17.36927032470703ms\r\nStep 2552, loss: 1.0899949073791504, step time: 17.99154281616211ms\r\n",,terminal_output +8626,12414437,"TERMINAL",0,0,"Step 2553, loss: 1.0610007047653198, step time: 17.510175704956055ms\r\n",,terminal_output +8627,12414503,"TERMINAL",0,0,"Step 2554, loss: 1.1445266008377075, step time: 23.122549057006836ms\r\n",,terminal_output +8628,12414566,"TERMINAL",0,0,"Step 2555, loss: 1.0638080835342407, step time: 19.829750061035156ms\r\n",,terminal_output +8629,12414630,"TERMINAL",0,0,"Step 2556, loss: 1.8706003427505493, step time: 17.332792282104492ms\r\n",,terminal_output +8630,12414695,"TERMINAL",0,0,"Step 2557, loss: 1.1153944730758667, step time: 17.435789108276367ms\r\n",,terminal_output +8631,12414760,"TERMINAL",0,0,"Step 2558, loss: 1.070207953453064, step time: 17.45772361755371ms\r\n",,terminal_output +8632,12414857,"TERMINAL",0,0,"Step 2559, loss: 1.075053095817566, step time: 17.264604568481445ms\r\n",,terminal_output +8633,12414919,"TERMINAL",0,0,"Step 2560, loss: 1.3081490993499756, step time: 17.645597457885742ms\r\n",,terminal_output +8634,12414981,"TERMINAL",0,0,"Step 2561, loss: 1.063960313796997, step time: 18.117189407348633ms\r\n",,terminal_output +8635,12415044,"TERMINAL",0,0,"Step 2562, loss: 1.057342767715454, step time: 17.239809036254883ms\r\n",,terminal_output +8636,12415108,"TERMINAL",0,0,"Step 2563, loss: 1.0802029371261597, step time: 17.378807067871094ms\r\n",,terminal_output +8637,12415174,"TERMINAL",0,0,"Step 2564, loss: 1.0798784494400024, step time: 18.14126968383789ms\r\n",,terminal_output +8638,12415238,"TERMINAL",0,0,"Step 2565, loss: 1.055511713027954, step time: 17.629623413085938ms\r\n",,terminal_output +8639,12415300,"TERMINAL",0,0,"Step 2566, loss: 1.0276682376861572, step time: 17.317771911621094ms\r\n",,terminal_output +8640,12415363,"TERMINAL",0,0,"Step 2567, loss: 1.0940848588943481, step time: 17.908573150634766ms\r\n",,terminal_output +8641,12415426,"TERMINAL",0,0,"Step 2568, loss: 1.1064509153366089, step time: 17.345905303955078ms\r\n",,terminal_output +8642,12415486,"TERMINAL",0,0,"Step 2569, loss: 2.046887159347534, step time: 17.32182502746582ms\r\n",,terminal_output +8643,12415594,"TERMINAL",0,0,"Step 2570, loss: 1.4044554233551025, step time: 17.73238182067871ms\r\nStep 2571, loss: 1.1988619565963745, step time: 17.478466033935547ms\r\n",,terminal_output +8644,12415654,"TERMINAL",0,0,"Step 2572, loss: 1.1526930332183838, step time: 17.262697219848633ms\r\n",,terminal_output +8645,12415723,"TERMINAL",0,0,"Step 2573, loss: 1.0737502574920654, step time: 18.021583557128906ms\r\n",,terminal_output +8646,12415775,"TERMINAL",0,0,"Step 2574, loss: 1.0517487525939941, step time: 17.314672470092773ms\r\n",,terminal_output +8647,12415841,"TERMINAL",0,0,"Step 2575, loss: 1.0932759046554565, step time: 17.42243766784668ms\r\n",,terminal_output +8648,12415964,"TERMINAL",0,0,"Step 2576, loss: 1.0444923639297485, step time: 17.482757568359375ms\r\nStep 2577, loss: 1.0544055700302124, step time: 17.725467681884766ms\r\n",,terminal_output +8649,12416029,"TERMINAL",0,0,"Step 2578, loss: 1.0459294319152832, step time: 17.00115203857422ms\r\n",,terminal_output +8650,12416098,"TERMINAL",0,0,"Step 2579, loss: 1.0511993169784546, step time: 18.038511276245117ms\r\n",,terminal_output +8651,12416156,"TERMINAL",0,0,"Step 2580, loss: 1.0424269437789917, step time: 17.05002784729004ms\r\n",,terminal_output +8652,12416226,"TERMINAL",0,0,"Step 2581, loss: 1.0432134866714478, step time: 17.39501953125ms\r\n",,terminal_output +8653,12416522,"TERMINAL",0,0,"Step 2582, loss: 1.070966362953186, step time: 299.6180057525635ms\r\n",,terminal_output +8654,12416588,"TERMINAL",0,0,"Step 2583, loss: 1.20882248878479, step time: 25.0093936920166ms\r\n",,terminal_output +8655,12416658,"TERMINAL",0,0,"Step 2584, loss: 1.0525537729263306, step time: 19.837617874145508ms\r\n",,terminal_output +8656,12416728,"TERMINAL",0,0,"Step 2585, loss: 1.044970989227295, step time: 19.08087730407715ms\r\n",,terminal_output +8657,12416780,"TERMINAL",0,0,"Step 2586, loss: 1.0526732206344604, step time: 17.754316329956055ms\r\n",,terminal_output +8658,12416879,"TERMINAL",0,0,"Step 2587, loss: 1.0310419797897339, step time: 19.266128540039062ms\r\n",,terminal_output +8659,12416945,"TERMINAL",0,0,"Step 2588, loss: 1.1974284648895264, step time: 18.0819034576416ms\r\n",,terminal_output +8660,12417006,"TERMINAL",0,0,"Step 2589, loss: 1.0555245876312256, step time: 17.52781867980957ms\r\n",,terminal_output +8661,12417113,"TERMINAL",0,0,"Step 2590, loss: 1.0773968696594238, step time: 17.480134963989258ms\r\nStep 2591, loss: 1.0288643836975098, step time: 18.100500106811523ms\r\n",,terminal_output +8662,12417178,"TERMINAL",0,0,"Step 2592, loss: 1.04879629611969, step time: 17.521142959594727ms\r\n",,terminal_output +8663,12417240,"TERMINAL",0,0,"Step 2593, loss: 1.110229253768921, step time: 17.355918884277344ms\r\n",,terminal_output +8664,12417305,"TERMINAL",0,0,"Step 2594, loss: 1.058640956878662, step time: 17.92311668395996ms\r\n",,terminal_output +8665,12417368,"TERMINAL",0,0,"Step 2595, loss: 1.0563346147537231, step time: 17.43769645690918ms\r\n",,terminal_output +8666,12417466,"TERMINAL",0,0,"Step 2596, loss: 1.031929612159729, step time: 17.437219619750977ms\r\n",,terminal_output +8667,12417546,"TERMINAL",0,0,"Step 2597, loss: 1.0352098941802979, step time: 18.213510513305664ms\r\nStep 2598, loss: 1.0284591913223267, step time: 17.409086227416992ms\r\n",,terminal_output +8668,12417655,"TERMINAL",0,0,"Step 2599, loss: 1.479037880897522, step time: 17.26531982421875ms\r\n",,terminal_output +8669,12417708,"TERMINAL",0,0,"Step 2600, loss: 1.0149340629577637, step time: 19.022464752197266ms\r\n",,terminal_output +8670,12417815,"TERMINAL",0,0,"Step 2601, loss: 1.0446231365203857, step time: 17.93980598449707ms\r\nStep 2602, loss: 1.0528922080993652, step time: 17.418384552001953ms\r\n",,terminal_output +8671,12417912,"TERMINAL",0,0,"Step 2603, loss: 1.223267912864685, step time: 17.968416213989258ms\r\n",,terminal_output +8672,12418026,"TERMINAL",0,0,"Step 2604, loss: 1.0306730270385742, step time: 17.306804656982422ms\r\nStep 2605, loss: 1.022081732749939, step time: 17.600297927856445ms\r\n",,terminal_output +8673,12418135,"TERMINAL",0,0,"Step 2606, loss: 1.0631108283996582, step time: 17.772674560546875ms\r\nStep 2607, loss: 1.1365495920181274, step time: 17.984867095947266ms\r\n",,terminal_output +8674,12418197,"TERMINAL",0,0,"Step 2608, loss: 1.0492281913757324, step time: 17.37046241760254ms\r\n",,terminal_output +8675,12418294,"TERMINAL",0,0,"Step 2609, loss: 1.050317406654358, step time: 18.388748168945312ms\r\n",,terminal_output +8676,12418355,"TERMINAL",0,0,"Step 2610, loss: 1.1541228294372559, step time: 17.765283584594727ms\r\n",,terminal_output +8677,12418417,"TERMINAL",0,0,"Step 2611, loss: 1.1829628944396973, step time: 17.536163330078125ms\r\n",,terminal_output +8678,12418479,"TERMINAL",0,0,"Step 2612, loss: 1.0674368143081665, step time: 18.767118453979492ms\r\n",,terminal_output +8679,12418595,"TERMINAL",0,0,"Step 2613, loss: 1.0386260747909546, step time: 17.668724060058594ms\r\nStep 2614, loss: 1.034480094909668, step time: 17.25029945373535ms\r\n",,terminal_output +8680,12418661,"TERMINAL",0,0,"Step 2615, loss: 1.021653413772583, step time: 18.21446418762207ms\r\n",,terminal_output +8681,12418711,"TERMINAL",0,0,"Step 2616, loss: 1.0570313930511475, step time: 17.534971237182617ms\r\n",,terminal_output +8682,12418763,"TERMINAL",0,0,"Step 2617, loss: 1.0186851024627686, step time: 17.7462100982666ms\r\n",,terminal_output +8683,12418856,"TERMINAL",0,0,"Step 2618, loss: 1.7325479984283447, step time: 17.80080795288086ms\r\n",,terminal_output +8684,12418971,"TERMINAL",0,0,"Step 2619, loss: 1.1194953918457031, step time: 19.310474395751953ms\r\nStep 2620, loss: 1.0228376388549805, step time: 17.734289169311523ms\r\n",,terminal_output +8685,12419026,"TERMINAL",0,0,"Step 2621, loss: 1.0465309619903564, step time: 18.287181854248047ms\r\n",,terminal_output +8686,12419146,"TERMINAL",0,0,"Step 2622, loss: 1.0847835540771484, step time: 17.32659339904785ms\r\nStep 2623, loss: 1.0110304355621338, step time: 17.232179641723633ms\r\n",,terminal_output +8687,12419273,"TERMINAL",0,0,"Step 2624, loss: 1.0408947467803955, step time: 17.50969886779785ms\r\nStep 2625, loss: 1.04184889793396, step time: 17.650604248046875ms\r\n",,terminal_output +8688,12419366,"TERMINAL",0,0,"Step 2626, loss: 1.1006399393081665, step time: 17.183542251586914ms\r\n",,terminal_output +8689,12419417,"TERMINAL",0,0,"Step 2627, loss: 1.0371533632278442, step time: 18.124818801879883ms\r\n",,terminal_output +8690,12419562,"TERMINAL",0,0,"Step 2628, loss: 1.0382463932037354, step time: 17.177820205688477ms\r\nStep 2629, loss: 1.015669345855713, step time: 17.290830612182617ms\r\n",,terminal_output +8691,12419614,"TERMINAL",0,0,"Step 2630, loss: 1.00846529006958, step time: 17.769813537597656ms\r\n",,terminal_output +8692,12419729,"TERMINAL",0,0,"Step 2631, loss: 1.0611437559127808, step time: 17.676353454589844ms\r\nStep 2632, loss: 1.0106762647628784, step time: 17.02713966369629ms\r\n",,terminal_output +8693,12419781,"TERMINAL",0,0,"Step 2633, loss: 1.025653600692749, step time: 18.13650131225586ms\r\n",,terminal_output +8694,12419877,"TERMINAL",0,0,"Step 2634, loss: 1.0066598653793335, step time: 16.978740692138672ms\r\n",,terminal_output +8695,12419975,"TERMINAL",0,0,"Step 2635, loss: 1.0453466176986694, step time: 17.287731170654297ms\r\nStep 2636, loss: 1.0581262111663818, step time: 17.78888702392578ms\r\n",,terminal_output +8696,12420075,"TERMINAL",0,0,"Step 2637, loss: 1.244755744934082, step time: 17.73548126220703ms\r\n",,terminal_output +8697,12420127,"TERMINAL",0,0,"Step 2638, loss: 1.1168603897094727, step time: 17.267704010009766ms\r\n",,terminal_output +8698,12420231,"TERMINAL",0,0,"Step 2639, loss: 1.0171473026275635, step time: 17.915010452270508ms\r\nStep 2640, loss: 1.0357598066329956, step time: 17.242431640625ms\r\n",,terminal_output +8699,12420292,"TERMINAL",0,0,"Step 2641, loss: 1.7004566192626953, step time: 17.35830307006836ms\r\n",,terminal_output +8700,12420350,"TERMINAL",0,0,"Step 2642, loss: 1.0181970596313477, step time: 17.75074005126953ms\r\n",,terminal_output +8701,12420415,"TERMINAL",0,0,"Step 2643, loss: 1.0479363203048706, step time: 17.637968063354492ms\r\n",,terminal_output +8702,12420519,"TERMINAL",0,0,"Step 2644, loss: 1.025980830192566, step time: 17.017126083374023ms\r\n",,terminal_output +8703,12420572,"TERMINAL",0,0,"Step 2645, loss: 0.9995766282081604, step time: 17.670631408691406ms\r\n",,terminal_output +8704,12420676,"TERMINAL",0,0,"Step 2646, loss: 0.9949168562889099, step time: 17.116308212280273ms\r\nStep 2647, loss: 1.3721851110458374, step time: 17.531156539916992ms\r\n",,terminal_output +8705,12420796,"TERMINAL",0,0,"Step 2648, loss: 1.0140568017959595, step time: 17.54474639892578ms\r\nStep 2649, loss: 1.0048511028289795, step time: 19.086360931396484ms\r\n",,terminal_output +8706,12420860,"TERMINAL",0,0,"Step 2650, loss: 1.7093863487243652, step time: 17.24529266357422ms\r\n",,terminal_output +8707,12420963,"TERMINAL",0,0,"Step 2651, loss: 1.0237141847610474, step time: 17.88616180419922ms\r\n",,terminal_output +8708,12421024,"TERMINAL",0,0,"Step 2652, loss: 1.0284433364868164, step time: 17.053604125976562ms\r\n",,terminal_output +8709,12421084,"TERMINAL",0,0,"Step 2653, loss: 1.019483208656311, step time: 17.322540283203125ms\r\n",,terminal_output +8710,12421143,"TERMINAL",0,0,"Step 2654, loss: 1.0488873720169067, step time: 17.653226852416992ms\r\n",,terminal_output +8711,12421206,"TERMINAL",0,0,"Step 2655, loss: 1.1012076139450073, step time: 17.23647117614746ms\r\n",,terminal_output +8712,12421265,"TERMINAL",0,0,"Step 2656, loss: 1.1461594104766846, step time: 17.33875274658203ms\r\n",,terminal_output +8713,12421372,"TERMINAL",0,0,"Step 2657, loss: 1.0090980529785156, step time: 18.241405487060547ms\r\nStep 2658, loss: 1.306196689605713, step time: 17.214298248291016ms\r\n",,terminal_output +8714,12421434,"TERMINAL",0,0,"Step 2659, loss: 1.0358468294143677, step time: 17.40860939025879ms\r\n",,terminal_output +8715,12421497,"TERMINAL",0,0,"Step 2660, loss: 1.0458319187164307, step time: 17.7919864654541ms\r\n",,terminal_output +8716,12421560,"TERMINAL",0,0,"Step 2661, loss: 1.0907038450241089, step time: 17.432689666748047ms\r\n",,terminal_output +8717,12421620,"TERMINAL",0,0,"Step 2662, loss: 0.9943606853485107, step time: 17.08054542541504ms\r\n",,terminal_output +8718,12421746,"TERMINAL",0,0,"Step 2663, loss: 1.0529488325119019, step time: 18.187999725341797ms\r\nStep 2664, loss: 1.0250025987625122, step time: 17.0137882232666ms\r\n",,terminal_output +8719,12421810,"TERMINAL",0,0,"Step 2665, loss: 1.0092763900756836, step time: 17.304182052612305ms\r\n",,terminal_output +8720,12421874,"TERMINAL",0,0,"Step 2666, loss: 0.9961357712745667, step time: 17.629384994506836ms\r\n",,terminal_output +8721,12421936,"TERMINAL",0,0,"Step 2667, loss: 1.0124239921569824, step time: 17.51112937927246ms\r\n",,terminal_output +8722,12422005,"TERMINAL",0,0,"Step 2668, loss: 1.0320590734481812, step time: 17.38882064819336ms\r\n",,terminal_output +8723,12422064,"TERMINAL",0,0,"Step 2669, loss: 1.0038065910339355, step time: 18.02349090576172ms\r\n",,terminal_output +8724,12422129,"TERMINAL",0,0,"Step 2670, loss: 1.0843559503555298, step time: 17.37499237060547ms\r\n",,terminal_output +8725,12422239,"TERMINAL",0,0,"Step 2671, loss: 0.9836573600769043, step time: 17.472267150878906ms\r\n",,terminal_output +8726,12422273,"TERMINAL",0,0,"Step 2672, loss: 0.9959774613380432, step time: 17.720460891723633ms\r\n",,terminal_output +8727,12422324,"TERMINAL",0,0,"Step 2673, loss: 2.227456569671631, step time: 17.442941665649414ms\r\n",,terminal_output +8728,12422385,"TERMINAL",0,0,"Step 2674, loss: 0.9819285869598389, step time: 17.340421676635742ms\r\n",,terminal_output +8729,12422446,"TERMINAL",0,0,"Step 2675, loss: 1.0200320482254028, step time: 18.006324768066406ms\r\n",,terminal_output +8730,12422509,"TERMINAL",0,0,"Step 2676, loss: 1.2705636024475098, step time: 17.127037048339844ms\r\n",,terminal_output +8731,12422568,"TERMINAL",0,0,"Step 2677, loss: 0.993523359298706, step time: 17.537593841552734ms\r\n",,terminal_output +8732,12422639,"TERMINAL",0,0,"Step 2678, loss: 0.9947813749313354, step time: 17.5020694732666ms\r\n",,terminal_output +8733,12422717,"TERMINAL",0,0,"Step 2679, loss: 1.0266809463500977, step time: 17.479658126831055ms\r\n",,terminal_output +8734,12422771,"TERMINAL",0,0,"Step 2680, loss: 1.013495683670044, step time: 17.157316207885742ms\r\n",,terminal_output +8735,12422854,"TERMINAL",0,0,"Step 2681, loss: 1.0033351182937622, step time: 18.131732940673828ms\r\n",,terminal_output +8736,12422914,"TERMINAL",0,0,"Step 2682, loss: 0.9982730150222778, step time: 17.154932022094727ms\r\n",,terminal_output +8737,12422974,"TERMINAL",0,0,"Step 2683, loss: 0.9783347845077515, step time: 17.565250396728516ms\r\n",,terminal_output +8738,12423036,"TERMINAL",0,0,"Step 2684, loss: 0.9791362881660461, step time: 17.817974090576172ms\r\n",,terminal_output +8739,12423099,"TERMINAL",0,0,"Step 2685, loss: 1.3817377090454102, step time: 17.816781997680664ms\r\n",,terminal_output +8740,12423159,"TERMINAL",0,0,"Step 2686, loss: 1.009456753730774, step time: 17.301082611083984ms\r\n",,terminal_output +8741,12423221,"TERMINAL",0,0,"Step 2687, loss: 1.0070403814315796, step time: 19.1497802734375ms\r\n",,terminal_output +8742,12423282,"TERMINAL",0,0,"Step 2688, loss: 1.013232707977295, step time: 17.406463623046875ms\r\n",,terminal_output +8743,12423396,"TERMINAL",0,0,"Step 2689, loss: 1.0052759647369385, step time: 17.767906188964844ms\r\nStep 2690, loss: 1.01160728931427, step time: 18.124103546142578ms\r\n",,terminal_output +8744,12423489,"TERMINAL",0,0,"Step 2691, loss: 0.9942573308944702, step time: 18.47219467163086ms\r\n",,terminal_output +8745,12423539,"TERMINAL",0,0,"Step 2692, loss: 0.9957294464111328, step time: 17.53687858581543ms\r\n",,terminal_output +8746,12423649,"TERMINAL",0,0,"Step 2693, loss: 1.0831242799758911, step time: 18.418312072753906ms\r\nStep 2694, loss: 1.1591142416000366, step time: 17.418622970581055ms\r\n",,terminal_output +8747,12423717,"TERMINAL",0,0,"Step 2695, loss: 0.9787966012954712, step time: 17.577409744262695ms\r\n",,terminal_output +8748,12423812,"TERMINAL",0,0,"Step 2696, loss: 1.00022554397583, step time: 18.07260513305664ms\r\n",,terminal_output +8749,12423863,"TERMINAL",0,0,"Step 2697, loss: 1.006208896636963, step time: 17.980575561523438ms\r\n",,terminal_output +8750,12423969,"TERMINAL",0,0,"Step 2698, loss: 0.9930241703987122, step time: 19.060373306274414ms\r\nStep 2699, loss: 1.5133031606674194, step time: 18.722057342529297ms\r\n",,terminal_output +8751,12424062,"TERMINAL",0,0,"Step 2700, loss: 0.9883109331130981, step time: 17.39501953125ms\r\n",,terminal_output +8752,12424113,"TERMINAL",0,0,"Step 2701, loss: 1.0404863357543945, step time: 17.649412155151367ms\r\n",,terminal_output +8753,12424205,"TERMINAL",0,0,"Step 2702, loss: 0.970649242401123, step time: 17.797470092773438ms\r\n",,terminal_output +8754,12424524,"TERMINAL",0,0,"Step 2703, loss: 1.0284202098846436, step time: 296.065092086792ms\r\nStep 2704, loss: 1.0040814876556396, step time: 25.043725967407227ms\r\n",,terminal_output +8755,12424615,"TERMINAL",0,0,"Step 2705, loss: 1.0391844511032104, step time: 19.89269256591797ms\r\n",,terminal_output +8756,12424668,"TERMINAL",0,0,"Step 2706, loss: 1.0346390008926392, step time: 18.407344818115234ms\r\n",,terminal_output +8757,12424776,"TERMINAL",0,0,"Step 2707, loss: 0.9892398715019226, step time: 17.97795295715332ms\r\nStep 2708, loss: 1.1379104852676392, step time: 19.118309020996094ms\r\n",,terminal_output +8758,12424869,"TERMINAL",0,0,"Step 2709, loss: 0.9788236021995544, step time: 18.521547317504883ms\r\n",,terminal_output +8759,12424925,"TERMINAL",0,0,"Step 2710, loss: 0.9997800588607788, step time: 17.3187255859375ms\r\n",,terminal_output +8760,12425019,"TERMINAL",0,0,"Step 2711, loss: 0.9563693404197693, step time: 17.742633819580078ms\r\n",,terminal_output +8761,12425070,"TERMINAL",0,0,"Step 2712, loss: 0.9634289145469666, step time: 17.386674880981445ms\r\n",,terminal_output +8762,12425123,"TERMINAL",0,0,"Step 2713, loss: 0.9562653303146362, step time: 17.293453216552734ms\r\n",,terminal_output +8763,12425229,"TERMINAL",0,0,"Step 2714, loss: 0.984774649143219, step time: 17.157793045043945ms\r\nStep 2715, loss: 0.9874159097671509, step time: 17.805099487304688ms\r\n",,terminal_output +8764,12425354,"TERMINAL",0,0,"Step 2716, loss: 0.9629781246185303, step time: 17.203330993652344ms\r\nStep 2717, loss: 0.9895915985107422, step time: 17.6236629486084ms\r\n",,terminal_output +8765,12425414,"TERMINAL",0,0,"Step 2718, loss: 0.9817484617233276, step time: 17.389774322509766ms\r\n",,terminal_output +8766,12425478,"TERMINAL",0,0,"Step 2719, loss: 1.8414462804794312, step time: 17.42863655090332ms\r\n",,terminal_output +8767,12425543,"TERMINAL",0,0,"Step 2720, loss: 1.0542101860046387, step time: 17.321109771728516ms\r\n",,terminal_output +8768,12425607,"TERMINAL",0,0,"Step 2721, loss: 0.9759794473648071, step time: 17.751693725585938ms\r\n",,terminal_output +8769,12425668,"TERMINAL",0,0,"Step 2722, loss: 0.9674623608589172, step time: 17.186641693115234ms\r\n",,terminal_output +8770,12425734,"TERMINAL",0,0,"Step 2723, loss: 1.0473980903625488, step time: 17.48061180114746ms\r\n",,terminal_output +8771,12425857,"TERMINAL",0,0,"Step 2724, loss: 0.9557493329048157, step time: 17.40431785583496ms\r\nStep 2725, loss: 0.992158830165863, step time: 17.44389533996582ms\r\n",,terminal_output +8772,12425985,"TERMINAL",0,0,"Step 2726, loss: 0.9859790802001953, step time: 17.28057861328125ms\r\nStep 2727, loss: 1.1891648769378662, step time: 18.042802810668945ms\r\n",,terminal_output +8773,12426048,"TERMINAL",0,0,"Step 2728, loss: 0.9821884632110596, step time: 17.491817474365234ms\r\n",,terminal_output +8774,12426112,"TERMINAL",0,0,"Step 2729, loss: 0.9820268154144287, step time: 17.912626266479492ms\r\n",,terminal_output +8775,12426180,"TERMINAL",0,0,"Step 2730, loss: 0.9825838804244995, step time: 17.662763595581055ms\r\n",,terminal_output +8776,12426238,"TERMINAL",0,0,"Step 2731, loss: 0.9838352799415588, step time: 17.305374145507812ms\r\n",,terminal_output +8777,12426309,"TERMINAL",0,0,"Step 2732, loss: 0.9860727787017822, step time: 17.110586166381836ms\r\n",,terminal_output +8778,12426370,"TERMINAL",0,0,"Step 2733, loss: 1.1098058223724365, step time: 17.928361892700195ms\r\n",,terminal_output +8779,12426469,"TERMINAL",0,0,"Step 2734, loss: 0.9554578065872192, step time: 17.3037052154541ms\r\n",,terminal_output +8780,12426514,"TERMINAL",0,0,"Step 2735, loss: 1.2635188102722168, step time: 17.46964454650879ms\r\n",,terminal_output +8781,12426618,"TERMINAL",0,0,"Step 2736, loss: 0.940122127532959, step time: 17.755746841430664ms\r\nStep 2737, loss: 0.9977895021438599, step time: 17.728567123413086ms\r\n",,terminal_output +8782,12426712,"TERMINAL",0,0,"Step 2738, loss: 0.9912504553794861, step time: 17.34614372253418ms\r\n",,terminal_output +8783,12426763,"TERMINAL",0,0,"Step 2739, loss: 0.95362389087677, step time: 17.83132553100586ms\r\n",,terminal_output +8784,12426908,"TERMINAL",0,0,"Step 2740, loss: 0.9516004323959351, step time: 18.265247344970703ms\r\nStep 2741, loss: 0.95135098695755, step time: 18.900632858276367ms\r\n",,terminal_output +8785,12426960,"TERMINAL",0,0,"Step 2742, loss: 1.0022814273834229, step time: 18.126249313354492ms\r\n",,terminal_output +8786,12427065,"TERMINAL",0,0,"Step 2743, loss: 0.9890190958976746, step time: 17.95363426208496ms\r\nStep 2744, loss: 1.290846347808838, step time: 17.608165740966797ms\r\n",,terminal_output +8787,12427159,"TERMINAL",0,0,"Step 2745, loss: 1.0424880981445312, step time: 17.825841903686523ms\r\n",,terminal_output +8788,12427210,"TERMINAL",0,0,"Step 2746, loss: 0.9575249552726746, step time: 17.530441284179688ms\r\n",,terminal_output +8789,12427317,"TERMINAL",0,0,"Step 2747, loss: 0.942352294921875, step time: 17.653226852416992ms\r\nStep 2748, loss: 1.7799550294876099, step time: 17.547130584716797ms\r\n",,terminal_output +8790,12427413,"TERMINAL",0,0,"Step 2749, loss: 0.9798762798309326, step time: 17.63010025024414ms\r\n",,terminal_output +8791,12427465,"TERMINAL",0,0,"Step 2750, loss: 0.950106143951416, step time: 17.050504684448242ms\r\n",,terminal_output +8792,12427568,"TERMINAL",0,0,"Step 2751, loss: 0.9575136303901672, step time: 18.087387084960938ms\r\nStep 2752, loss: 0.9444260597229004, step time: 17.177343368530273ms\r\n",,terminal_output +8793,12427662,"TERMINAL",0,0,"Step 2753, loss: 0.9846349954605103, step time: 17.502546310424805ms\r\n",,terminal_output +8794,12427715,"TERMINAL",0,0,"Step 2754, loss: 0.9549999237060547, step time: 17.644166946411133ms\r\n",,terminal_output +8795,12427768,"TERMINAL",0,0,"Step 2755, loss: 0.9889299869537354, step time: 17.303466796875ms\r\n",,terminal_output +8796,12427865,"TERMINAL",0,0,"Step 2756, loss: 0.9655653834342957, step time: 17.265796661376953ms\r\n",,terminal_output +8797,12427940,"TERMINAL",0,0,"Step 2757, loss: 0.945864737033844, step time: 17.95792579650879ms\r\n",,terminal_output +8798,12427993,"TERMINAL",0,0,"Step 2758, loss: 0.9602180123329163, step time: 17.400264739990234ms\r\n",,terminal_output +8799,12428044,"TERMINAL",0,0,"Step 2759, loss: 0.9637877941131592, step time: 17.578125ms\r\n",,terminal_output +8800,12428149,"TERMINAL",0,0,"Step 2760, loss: 0.945978045463562, step time: 17.669200897216797ms\r\nStep 2761, loss: 0.9892336130142212, step time: 17.525672912597656ms\r\n",,terminal_output +8801,12428244,"TERMINAL",0,0,"Step 2762, loss: 0.9413585662841797, step time: 17.2426700592041ms\r\n",,terminal_output +8802,12428301,"TERMINAL",0,0,"Step 2763, loss: 0.9379169940948486, step time: 17.792224884033203ms\r\n",,terminal_output +8803,12428356,"TERMINAL",0,0,"Step 2764, loss: 0.9571956396102905, step time: 17.571687698364258ms\r\n",,terminal_output +8804,12428447,"TERMINAL",0,0,"Step 2765, loss: 0.9414076805114746, step time: 17.241954803466797ms\r\n",,terminal_output +8805,12428497,"TERMINAL",0,0,"Step 2766, loss: 1.0766786336898804, step time: 17.64535903930664ms\r\n",,terminal_output +8806,12428550,"TERMINAL",0,0,"Step 2767, loss: 0.9393184185028076, step time: 17.485857009887695ms\r\n",,terminal_output +8807,12428648,"TERMINAL",0,0,"Step 2768, loss: 0.9404231309890747, step time: 17.29273796081543ms\r\nStep 2769, loss: 1.2595077753067017, step time: 17.773866653442383ms\r\n",,terminal_output +8808,12428773,"TERMINAL",0,0,"Step 2770, loss: 0.945274829864502, step time: 17.199039459228516ms\r\nStep 2771, loss: 0.9718354940414429, step time: 17.323017120361328ms\r\n",,terminal_output +8809,12428845,"TERMINAL",0,0,"Step 2772, loss: 0.9755735397338867, step time: 17.75050163269043ms\r\n",,terminal_output +8810,12428901,"TERMINAL",0,0,"Step 2773, loss: 1.032515287399292, step time: 17.560243606567383ms\r\n",,terminal_output +8811,12428963,"TERMINAL",0,0,"Step 2774, loss: 0.9717956781387329, step time: 18.291234970092773ms\r\n",,terminal_output +8812,12429028,"TERMINAL",0,0,"Step 2775, loss: 0.9304009079933167, step time: 18.288850784301758ms\r\n",,terminal_output +8813,12429093,"TERMINAL",0,0,"Step 2776, loss: 0.9699116349220276, step time: 17.55213737487793ms\r\n",,terminal_output +8814,12429228,"TERMINAL",0,0,"Step 2777, loss: 0.9351369142532349, step time: 17.859697341918945ms\r\nStep 2778, loss: 1.0049023628234863, step time: 17.67873764038086ms\r\n",,terminal_output +8815,12429290,"TERMINAL",0,0,"Step 2779, loss: 1.0560346841812134, step time: 17.57502555847168ms\r\n",,terminal_output +8816,12429352,"TERMINAL",0,0,"Step 2780, loss: 0.9412361979484558, step time: 17.66228675842285ms\r\n",,terminal_output +8817,12429421,"TERMINAL",0,0,"Step 2781, loss: 0.9413841366767883, step time: 18.22352409362793ms\r\n",,terminal_output +8818,12429480,"TERMINAL",0,0,"Step 2782, loss: 1.0097129344940186, step time: 18.553733825683594ms\r\n",,terminal_output +8819,12429549,"TERMINAL",0,0,"Step 2783, loss: 0.9507375359535217, step time: 17.949819564819336ms\r\n",,terminal_output +8820,12429613,"TERMINAL",0,0,"Step 2784, loss: 0.9467148184776306, step time: 17.86661148071289ms\r\n",,terminal_output +8821,12429678,"TERMINAL",0,0,"Step 2785, loss: 0.9213660359382629, step time: 17.45891571044922ms\r\n",,terminal_output +8822,12429743,"TERMINAL",0,0,"Step 2786, loss: 0.9282547235488892, step time: 17.638206481933594ms\r\n",,terminal_output +8823,12429809,"TERMINAL",0,0,"Step 2787, loss: 0.922845721244812, step time: 17.713308334350586ms\r\n",,terminal_output +8824,12429870,"TERMINAL",0,0,"Step 2788, loss: 0.9393779635429382, step time: 17.604589462280273ms\r\n",,terminal_output +8825,12429933,"TERMINAL",0,0,"Step 2789, loss: 0.9476966261863708, step time: 17.75336265563965ms\r\n",,terminal_output +8826,12430009,"TERMINAL",0,0,"Step 2790, loss: 0.9167813062667847, step time: 17.90165901184082ms\r\n",,terminal_output +8827,12430071,"TERMINAL",0,0,"Step 2791, loss: 0.9140170216560364, step time: 17.876863479614258ms\r\n",,terminal_output +8828,12430133,"TERMINAL",0,0,"Step 2792, loss: 0.9524109959602356, step time: 17.33708381652832ms\r\n",,terminal_output +8829,12430196,"TERMINAL",0,0,"Step 2793, loss: 0.9149685502052307, step time: 17.971515655517578ms\r\n",,terminal_output +8830,12430259,"TERMINAL",0,0,"Step 2794, loss: 0.9150663614273071, step time: 17.353057861328125ms\r\n",,terminal_output +8831,12430327,"TERMINAL",0,0,"Step 2795, loss: 0.8966777920722961, step time: 17.323017120361328ms\r\n",,terminal_output +8832,12430383,"TERMINAL",0,0,"Step 2796, loss: 0.913902997970581, step time: 17.430543899536133ms\r\n",,terminal_output +8833,12430488,"TERMINAL",0,0,"Step 2797, loss: 0.9743791222572327, step time: 17.490386962890625ms\r\nStep 2798, loss: 0.9537357687950134, step time: 17.3952579498291ms\r\n",,terminal_output +8834,12430559,"TERMINAL",0,0,"Step 2799, loss: 0.9367599487304688, step time: 17.890214920043945ms\r\n",,terminal_output +8835,12430622,"TERMINAL",0,0,"Step 2800, loss: 1.4003263711929321, step time: 17.277956008911133ms\r\n",,terminal_output +8836,12430685,"TERMINAL",0,0,"Step 2801, loss: 1.0470224618911743, step time: 17.86947250366211ms\r\n",,terminal_output +8837,12430752,"TERMINAL",0,0,"Step 2802, loss: 1.041654348373413, step time: 17.997026443481445ms\r\n",,terminal_output +8838,12430818,"TERMINAL",0,0,"Step 2803, loss: 0.9285067915916443, step time: 17.731428146362305ms\r\n",,terminal_output +8839,12430880,"TERMINAL",0,0,"Step 2804, loss: 0.9210830926895142, step time: 17.67134666442871ms\r\n",,terminal_output +8840,12430943,"TERMINAL",0,0,"Step 2805, loss: 0.9872313141822815, step time: 17.972469329833984ms\r\n",,terminal_output +8841,12431008,"TERMINAL",0,0,"Step 2806, loss: 0.9953128695487976, step time: 17.540931701660156ms\r\n",,terminal_output +8842,12431069,"TERMINAL",0,0,"Step 2807, loss: 0.9091702699661255, step time: 19.49930191040039ms\r\n",,terminal_output +8843,12431133,"TERMINAL",0,0,"Step 2808, loss: 0.9426342844963074, step time: 18.722057342529297ms\r\n",,terminal_output +8844,12431195,"TERMINAL",0,0,"Step 2809, loss: 0.907955527305603, step time: 18.070459365844727ms\r\n",,terminal_output +8845,12431254,"TERMINAL",0,0,"Step 2810, loss: 0.8989658355712891, step time: 17.542600631713867ms\r\n",,terminal_output +8846,12431375,"TERMINAL",0,0,"Step 2811, loss: 0.9140142798423767, step time: 18.046140670776367ms\r\nStep 2812, loss: 1.0783504247665405, step time: 17.681360244750977ms\r\n",,terminal_output +8847,12431439,"TERMINAL",0,0,"Step 2813, loss: 0.9310740232467651, step time: 17.728090286254883ms\r\n",,terminal_output +8848,12431508,"TERMINAL",0,0,"Step 2814, loss: 0.9193982481956482, step time: 17.542362213134766ms\r\n",,terminal_output +8849,12431567,"TERMINAL",0,0,"Step 2815, loss: 1.4234362840652466, step time: 17.331838607788086ms\r\n",,terminal_output +8850,12431663,"TERMINAL",0,0,"Step 2816, loss: 0.9141407608985901, step time: 17.369747161865234ms\r\n",,terminal_output +8851,12431716,"TERMINAL",0,0,"Step 2817, loss: 0.9227811098098755, step time: 17.99321174621582ms\r\n",,terminal_output +8852,12431768,"TERMINAL",0,0,"Step 2818, loss: 0.9086954593658447, step time: 20.437955856323242ms\r\n",,terminal_output +8853,12431868,"TERMINAL",0,0,"Step 2819, loss: 0.9089304208755493, step time: 17.792940139770508ms\r\n",,terminal_output +8854,12431976,"TERMINAL",0,0,"Step 2820, loss: 0.9118900895118713, step time: 17.667293548583984ms\r\nStep 2821, loss: 1.0739641189575195, step time: 17.42839813232422ms\r\n",,terminal_output +8855,12432039,"TERMINAL",0,0,"Step 2822, loss: 0.9206361174583435, step time: 19.428491592407227ms\r\n",,terminal_output +8856,12432101,"TERMINAL",0,0,"Step 2823, loss: 0.9108383655548096, step time: 18.4173583984375ms\r\n",,terminal_output +8857,12432164,"TERMINAL",0,0,"Step 2824, loss: 0.9061914682388306, step time: 17.378807067871094ms\r\n",,terminal_output +8858,12432225,"TERMINAL",0,0,"Step 2825, loss: 1.0199356079101562, step time: 21.595001220703125ms\r\n",,terminal_output +8859,12432287,"TERMINAL",0,0,"Step 2826, loss: 0.8983405828475952, step time: 19.44708824157715ms\r\n",,terminal_output +8860,12432348,"TERMINAL",0,0,"Step 2827, loss: 0.9866552352905273, step time: 18.203020095825195ms\r\n",,terminal_output +8861,12432411,"TERMINAL",0,0,"Step 2828, loss: 1.0208104848861694, step time: 19.41394805908203ms\r\n",,terminal_output +8862,12432474,"TERMINAL",0,0,"Step 2829, loss: 0.9023295640945435, step time: 18.652915954589844ms\r\n",,terminal_output +8863,12432534,"TERMINAL",0,0,"Step 2830, loss: 0.8987720012664795, step time: 17.582178115844727ms\r\n",,terminal_output +8864,12432599,"TERMINAL",0,0,"Step 2831, loss: 0.9033589363098145, step time: 17.836332321166992ms\r\n",,terminal_output +8865,12432664,"TERMINAL",0,0,"Step 2832, loss: 0.8973348736763, step time: 17.854928970336914ms\r\n",,terminal_output +8866,12432729,"TERMINAL",0,0,"Step 2833, loss: 1.4112370014190674, step time: 17.46654510498047ms\r\n",,terminal_output +8867,12432782,"TERMINAL",0,0,"Step 2834, loss: 0.8857424855232239, step time: 17.496109008789062ms\r\n",,terminal_output +8868,12432878,"TERMINAL",0,0,"Step 2835, loss: 0.904382050037384, step time: 18.033266067504883ms\r\n",,terminal_output +8869,12432940,"TERMINAL",0,0,"Step 2836, loss: 0.8886148929595947, step time: 17.959117889404297ms\r\n",,terminal_output +8870,12433002,"TERMINAL",0,0,"Step 2837, loss: 1.7371379137039185, step time: 18.299579620361328ms\r\n",,terminal_output +8871,12433063,"TERMINAL",0,0,"Step 2838, loss: 0.8890159130096436, step time: 17.7152156829834ms\r\n",,terminal_output +8872,12433126,"TERMINAL",0,0,"Step 2839, loss: 0.9186498522758484, step time: 17.69566535949707ms\r\n",,terminal_output +8873,12433233,"TERMINAL",0,0,"Step 2840, loss: 0.923595666885376, step time: 17.296552658081055ms\r\nStep 2841, loss: 1.0118224620819092, step time: 17.74764060974121ms\r\n",,terminal_output +8874,12433349,"TERMINAL",0,0,"Step 2842, loss: 0.9312754273414612, step time: 17.084121704101562ms\r\nStep 2843, loss: 1.2147672176361084, step time: 17.804384231567383ms\r\n",,terminal_output +8875,12433413,"TERMINAL",0,0,"Step 2844, loss: 0.9119166731834412, step time: 17.32468605041504ms\r\n",,terminal_output +8876,12433492,"TERMINAL",0,0,"Step 2845, loss: 1.2563279867172241, step time: 17.118453979492188ms\r\n",,terminal_output +8877,12433554,"TERMINAL",0,0,"Step 2846, loss: 0.9085460305213928, step time: 17.205238342285156ms\r\n",,terminal_output +8878,12433666,"TERMINAL",0,0,"Step 2847, loss: 0.8960003852844238, step time: 17.777204513549805ms\r\nStep 2848, loss: 0.9181288480758667, step time: 17.348051071166992ms\r\n",,terminal_output +8879,12433762,"TERMINAL",0,0,"Step 2849, loss: 0.9376095533370972, step time: 17.61031150817871ms\r\n",,terminal_output +8880,12433830,"TERMINAL",0,0,"Step 2850, loss: 0.9743022322654724, step time: 18.017053604125977ms\r\n",,terminal_output +8881,12433985,"TERMINAL",0,0,"Step 2851, loss: 0.9055765271186829, step time: 17.577409744262695ms\r\n",,terminal_output +8882,12434047,"TERMINAL",0,0,"Step 2852, loss: 0.9832655191421509, step time: 17.7614688873291ms\r\nStep 2853, loss: 0.9151949286460876, step time: 18.94974708557129ms\r\nStep 2854, loss: 0.8967407941818237, step time: 17.62223243713379ms\r\n",,terminal_output +8883,12434110,"TERMINAL",0,0,"Step 2855, loss: 0.8846573233604431, step time: 17.51422882080078ms\r\n",,terminal_output +8884,12434173,"TERMINAL",0,0,"Step 2856, loss: 0.901608407497406, step time: 17.51089096069336ms\r\n",,terminal_output +8885,12434236,"TERMINAL",0,0,"Step 2857, loss: 1.0348650217056274, step time: 17.839670181274414ms\r\n",,terminal_output +8886,12434329,"TERMINAL",0,0,"Step 2858, loss: 0.9087278842926025, step time: 17.30036735534668ms\r\n",,terminal_output +8887,12434390,"TERMINAL",0,0,"Step 2859, loss: 0.895134687423706, step time: 17.85564422607422ms\r\n",,terminal_output +8888,12434503,"TERMINAL",0,0,"Step 2860, loss: 0.8974598050117493, step time: 17.805099487304688ms\r\nStep 2861, loss: 1.1219124794006348, step time: 17.786026000976562ms\r\n",,terminal_output +8889,12434563,"TERMINAL",0,0,"Step 2862, loss: 0.8958275318145752, step time: 17.528772354125977ms\r\n",,terminal_output +8890,12434624,"TERMINAL",0,0,"Step 2863, loss: 1.0612642765045166, step time: 17.467498779296875ms\r\n",,terminal_output +8891,12434684,"TERMINAL",0,0,"Step 2864, loss: 0.9638949036598206, step time: 17.244338989257812ms\r\n",,terminal_output +8892,12434745,"TERMINAL",0,0,"Step 2865, loss: 0.9024931788444519, step time: 18.034934997558594ms\r\n",,terminal_output +8893,12434848,"TERMINAL",0,0,"Step 2866, loss: 0.9444332718849182, step time: 17.42863655090332ms\r\n",,terminal_output +8894,12434910,"TERMINAL",0,0,"Step 2867, loss: 0.962113618850708, step time: 17.754554748535156ms\r\n",,terminal_output +8895,12434973,"TERMINAL",0,0,"Step 2868, loss: 0.9282876253128052, step time: 17.778635025024414ms\r\n",,terminal_output +8896,12435037,"TERMINAL",0,0,"Step 2869, loss: 0.8909265398979187, step time: 17.736434936523438ms\r\n",,terminal_output +8897,12435101,"TERMINAL",0,0,"Step 2870, loss: 0.9059036374092102, step time: 17.508268356323242ms\r\n",,terminal_output +8898,12435153,"TERMINAL",0,0,"Step 2871, loss: 0.9427759647369385, step time: 18.116474151611328ms\r\n",,terminal_output +8899,12435257,"TERMINAL",0,0,"Step 2872, loss: 0.9049775004386902, step time: 17.46821403503418ms\r\nStep 2873, loss: 0.8906242847442627, step time: 17.902851104736328ms\r\n",,terminal_output +8900,12435386,"TERMINAL",0,0,"Step 2874, loss: 0.8947702646255493, step time: 17.687082290649414ms\r\nStep 2875, loss: 0.8684954047203064, step time: 17.458200454711914ms\r\n",,terminal_output +8901,12435509,"TERMINAL",0,0,"Step 2876, loss: 0.9012208580970764, step time: 17.46535301208496ms\r\nStep 2877, loss: 1.6020839214324951, step time: 18.100738525390625ms\r\n",,terminal_output +8902,12435575,"TERMINAL",0,0,"Step 2878, loss: 0.894561231136322, step time: 17.45128631591797ms\r\n",,terminal_output +8903,12435634,"TERMINAL",0,0,"Step 2879, loss: 0.9468452334403992, step time: 17.978429794311523ms\r\n",,terminal_output +8904,12435696,"TERMINAL",0,0,"Step 2880, loss: 1.3824610710144043, step time: 17.73667335510254ms\r\n",,terminal_output +8905,12435823,"TERMINAL",0,0,"Step 2881, loss: 0.8790562748908997, step time: 17.38905906677246ms\r\nStep 2882, loss: 0.9032405018806458, step time: 17.45152473449707ms\r\n",,terminal_output +8906,12435889,"TERMINAL",0,0,"Step 2883, loss: 0.8730911612510681, step time: 17.798185348510742ms\r\n",,terminal_output +8907,12435953,"TERMINAL",0,0,"Step 2884, loss: 0.8980988264083862, step time: 17.399311065673828ms\r\n",,terminal_output +8908,12436014,"TERMINAL",0,0,"Step 2885, loss: 0.8788546919822693, step time: 17.706632614135742ms\r\n",,terminal_output +8909,12436078,"TERMINAL",0,0,"Step 2886, loss: 0.8833661675453186, step time: 17.65608787536621ms\r\n",,terminal_output +8910,12436140,"TERMINAL",0,0,"Step 2887, loss: 0.8796710968017578, step time: 17.32921600341797ms\r\n",,terminal_output +8911,12436203,"TERMINAL",0,0,"Step 2888, loss: 0.9269954562187195, step time: 17.229318618774414ms\r\n",,terminal_output +8912,12436541,"TERMINAL",0,0,"Step 2889, loss: 0.9034699201583862, step time: 336.70687675476074ms\r\n",,terminal_output +8913,12436609,"TERMINAL",0,0,"Step 2890, loss: 0.8919618129730225, step time: 25.09903907775879ms\r\n",,terminal_output +8914,12436673,"TERMINAL",0,0,"Step 2891, loss: 0.9852052927017212, step time: 20.278215408325195ms\r\n",,terminal_output +8915,12436741,"TERMINAL",0,0,"Step 2892, loss: 0.8988160490989685, step time: 18.48435401916504ms\r\n",,terminal_output +8916,12436866,"TERMINAL",0,0,"Step 2893, loss: 0.891446590423584, step time: 18.094539642333984ms\r\nStep 2894, loss: 0.8785429000854492, step time: 18.489360809326172ms\r\n",,terminal_output +8917,12436956,"TERMINAL",0,0,"Step 2895, loss: 0.9151846766471863, step time: 18.103361129760742ms\r\n",,terminal_output +8918,12437008,"TERMINAL",0,0,"Step 2896, loss: 0.8631590008735657, step time: 17.513275146484375ms\r\n",,terminal_output +8919,12437075,"TERMINAL",0,0,"Step 2897, loss: 0.8565736413002014, step time: 17.834901809692383ms\r\n",,terminal_output +8920,12437136,"TERMINAL",0,0,"Step 2898, loss: 0.8839073181152344, step time: 17.73238182067871ms\r\n",,terminal_output +8921,12437209,"TERMINAL",0,0,"Step 2899, loss: 0.8551111817359924, step time: 18.07713508605957ms\r\n",,terminal_output +8922,12437317,"TERMINAL",0,0,"Step 2900, loss: 0.8777234554290771, step time: 17.406940460205078ms\r\nStep 2901, loss: 1.276871919631958, step time: 18.080711364746094ms\r\n",,terminal_output +8923,12437380,"TERMINAL",0,0,"Step 2902, loss: 0.8713765144348145, step time: 17.61794090270996ms\r\n",,terminal_output +8924,12437449,"TERMINAL",0,0,"Step 2903, loss: 0.900939404964447, step time: 17.619609832763672ms\r\n",,terminal_output +8925,12437509,"TERMINAL",0,0,"Step 2904, loss: 0.870836615562439, step time: 17.627239227294922ms\r\n",,terminal_output +8926,12437571,"TERMINAL",0,0,"Step 2905, loss: 0.8679755330085754, step time: 17.332792282104492ms\r\n",,terminal_output +8927,12437635,"TERMINAL",0,0,"Step 2906, loss: 0.8849374055862427, step time: 17.21024513244629ms\r\n",,terminal_output +8928,12437708,"TERMINAL",0,0,"Step 2907, loss: 1.328971028327942, step time: 17.752885818481445ms\r\n",,terminal_output +8929,12437763,"TERMINAL",0,0,"Step 2908, loss: 1.0304135084152222, step time: 16.978740692138672ms\r\n",,terminal_output +8930,12437829,"TERMINAL",0,0,"Step 2909, loss: 0.8578675985336304, step time: 17.312049865722656ms\r\n",,terminal_output +8931,12437890,"TERMINAL",0,0,"Step 2910, loss: 0.8708962202072144, step time: 17.642498016357422ms\r\n",,terminal_output +8932,12437955,"TERMINAL",0,0,"Step 2911, loss: 0.8999071717262268, step time: 17.33565330505371ms\r\n",,terminal_output +8933,12438103,"TERMINAL",0,0,"Step 2912, loss: 0.8748328685760498, step time: 17.38119125366211ms\r\nStep 2913, loss: 1.1907212734222412, step time: 18.80669593811035ms\r\n",,terminal_output +8934,12438169,"TERMINAL",0,0,"Step 2914, loss: 0.8682526350021362, step time: 17.353534698486328ms\r\n",,terminal_output +8935,12438232,"TERMINAL",0,0,"Step 2915, loss: 0.8597549200057983, step time: 17.503738403320312ms\r\n",,terminal_output +8936,12438297,"TERMINAL",0,0,"Step 2916, loss: 1.174391269683838, step time: 17.78125762939453ms\r\n",,terminal_output +8937,12438361,"TERMINAL",0,0,"Step 2917, loss: 0.920044481754303, step time: 17.530441284179688ms\r\n",,terminal_output +8938,12438424,"TERMINAL",0,0,"Step 2918, loss: 0.8708925843238831, step time: 17.275571823120117ms\r\n",,terminal_output +8939,12438495,"TERMINAL",0,0,"Step 2919, loss: 0.8787393569946289, step time: 17.833948135375977ms\r\n",,terminal_output +8940,12438557,"TERMINAL",0,0,"Step 2920, loss: 0.8484177589416504, step time: 17.083168029785156ms\r\n",,terminal_output +8941,12438655,"TERMINAL",0,0,"Step 2921, loss: 0.9298020005226135, step time: 17.397642135620117ms\r\nStep 2922, loss: 0.8627092242240906, step time: 17.410755157470703ms\r\n",,terminal_output +8942,12438711,"TERMINAL",0,0,"Step 2923, loss: 0.9310205578804016, step time: 17.387866973876953ms\r\n",,terminal_output +8943,12438768,"TERMINAL",0,0,"Step 2924, loss: 0.8426164388656616, step time: 17.035245895385742ms\r\n",,terminal_output +8944,12438900,"TERMINAL",0,0,"Step 2925, loss: 0.8529980778694153, step time: 17.785072326660156ms\r\nStep 2926, loss: 0.8585878014564514, step time: 17.20905303955078ms\r\n",,terminal_output +8945,12438963,"TERMINAL",0,0,"Step 2927, loss: 0.864342212677002, step time: 19.346237182617188ms\r\n",,terminal_output +8946,12439026,"TERMINAL",0,0,"Step 2928, loss: 0.8587469458580017, step time: 17.83013343811035ms\r\n",,terminal_output +8947,12439091,"TERMINAL",0,0,"Step 2929, loss: 0.9372100830078125, step time: 17.317771911621094ms\r\n",,terminal_output +8948,12439155,"TERMINAL",0,0,"Step 2930, loss: 0.8464034795761108, step time: 17.378807067871094ms\r\n",,terminal_output +8949,12439218,"TERMINAL",0,0,"Step 2931, loss: 0.8421271443367004, step time: 18.24164390563965ms\r\n",,terminal_output +8950,12439287,"TERMINAL",0,0,"Step 2932, loss: 0.882346510887146, step time: 17.493724822998047ms\r\n",,terminal_output +8951,12439347,"TERMINAL",0,0,"Step 2933, loss: 0.8341352939605713, step time: 17.823457717895508ms\r\n",,terminal_output +8952,12439414,"TERMINAL",0,0,"Step 2934, loss: 0.889587938785553, step time: 17.641544342041016ms\r\n",,terminal_output +8953,12439478,"TERMINAL",0,0,"Step 2935, loss: 0.8380404710769653, step time: 17.383575439453125ms\r\n",,terminal_output +8954,12439541,"TERMINAL",0,0,"Step 2936, loss: 0.8744531869888306, step time: 17.089128494262695ms\r\n",,terminal_output +8955,12439607,"TERMINAL",0,0,"Step 2937, loss: 0.8671919107437134, step time: 17.876863479614258ms\r\n",,terminal_output +8956,12439669,"TERMINAL",0,0,"Step 2938, loss: 0.8997387886047363, step time: 17.072439193725586ms\r\n",,terminal_output +8957,12439735,"TERMINAL",0,0,"Step 2939, loss: 0.8489466309547424, step time: 17.567157745361328ms\r\n",,terminal_output +8958,12439791,"TERMINAL",0,0,"Step 2940, loss: 0.9028115272521973, step time: 17.602920532226562ms\r\n",,terminal_output +8959,12439850,"TERMINAL",0,0,"Step 2941, loss: 0.8476864695549011, step time: 17.62986183166504ms\r\n",,terminal_output +8960,12439920,"TERMINAL",0,0,"Step 2942, loss: 0.8390682339668274, step time: 17.211437225341797ms\r\n",,terminal_output +8961,12439985,"TERMINAL",0,0,"Step 2943, loss: 0.8581759929656982, step time: 17.951488494873047ms\r\n",,terminal_output +8962,12440046,"TERMINAL",0,0,"Step 2944, loss: 0.8475777506828308, step time: 17.188549041748047ms\r\n",,terminal_output +8963,12440112,"TERMINAL",0,0,"Step 2945, loss: 0.9018814563751221, step time: 17.619848251342773ms\r\n",,terminal_output +8964,12440173,"TERMINAL",0,0,"Step 2946, loss: 0.8557964563369751, step time: 17.887592315673828ms\r\n",,terminal_output +8965,12440237,"TERMINAL",0,0,"Step 2947, loss: 1.0496677160263062, step time: 17.713308334350586ms\r\n",,terminal_output +8966,12440314,"TERMINAL",0,0,"Step 2948, loss: 0.8379666209220886, step time: 17.600059509277344ms\r\n",,terminal_output +8967,12440379,"TERMINAL",0,0,"Step 2949, loss: 1.0217318534851074, step time: 31.867504119873047ms\r\n",,terminal_output +8968,12440432,"TERMINAL",0,0,"Step 2950, loss: 0.9536195993423462, step time: 17.487287521362305ms\r\n",,terminal_output +8969,12440528,"TERMINAL",0,0,"Step 2951, loss: 0.8603991270065308, step time: 18.06807518005371ms\r\n",,terminal_output +8970,12440582,"TERMINAL",0,0,"Step 2952, loss: 0.9177143573760986, step time: 18.01609992980957ms\r\n",,terminal_output +8971,12440687,"TERMINAL",0,0,"Step 2953, loss: 0.8368696570396423, step time: 17.444610595703125ms\r\nStep 2954, loss: 0.8335176706314087, step time: 17.26388931274414ms\r\n",,terminal_output +8972,12440751,"TERMINAL",0,0,"Step 2955, loss: 0.8852279186248779, step time: 18.149852752685547ms\r\n",,terminal_output +8973,12440876,"TERMINAL",0,0,"Step 2956, loss: 1.0782028436660767, step time: 17.63772964477539ms\r\nStep 2957, loss: 0.8674063086509705, step time: 17.73357391357422ms\r\n",,terminal_output +8974,12440979,"TERMINAL",0,0,"Step 2958, loss: 0.8283566832542419, step time: 17.819643020629883ms\r\n",,terminal_output +8975,12441033,"TERMINAL",0,0,"Step 2959, loss: 0.8658210635185242, step time: 17.26222038269043ms\r\n",,terminal_output +8976,12441139,"TERMINAL",0,0,"Step 2960, loss: 0.8475369811058044, step time: 17.46082305908203ms\r\nStep 2961, loss: 0.8644816279411316, step time: 17.856597900390625ms\r\n",,terminal_output +8977,12441202,"TERMINAL",0,0,"Step 2962, loss: 0.8282126188278198, step time: 17.505168914794922ms\r\n",,terminal_output +8978,12441265,"TERMINAL",0,0,"Step 2963, loss: 1.2363680601119995, step time: 17.760753631591797ms\r\n",,terminal_output +8979,12441329,"TERMINAL",0,0,"Step 2964, loss: 0.8487050533294678, step time: 18.081188201904297ms\r\n",,terminal_output +8980,12441392,"TERMINAL",0,0,"Step 2965, loss: 0.8571843504905701, step time: 17.168045043945312ms\r\n",,terminal_output +8981,12441457,"TERMINAL",0,0,"Step 2966, loss: 0.8586033582687378, step time: 17.182111740112305ms\r\n",,terminal_output +8982,12441519,"TERMINAL",0,0,"Step 2967, loss: 1.0117745399475098, step time: 18.310070037841797ms\r\n",,terminal_output +8983,12441582,"TERMINAL",0,0,"Step 2968, loss: 0.8320257067680359, step time: 17.186641693115234ms\r\n",,terminal_output +8984,12441644,"TERMINAL",0,0,"Step 2969, loss: 0.826644241809845, step time: 18.067598342895508ms\r\n",,terminal_output +8985,12441718,"TERMINAL",0,0,"Step 2970, loss: 0.8703672885894775, step time: 24.365663528442383ms\r\n",,terminal_output +8986,12441772,"TERMINAL",0,0,"Step 2971, loss: 1.1082751750946045, step time: 24.74498748779297ms\r\n",,terminal_output +8987,12441837,"TERMINAL",0,0,"Step 2972, loss: 0.8475077152252197, step time: 24.260997772216797ms\r\n",,terminal_output +8988,12441941,"TERMINAL",0,0,"Step 2973, loss: 0.8245114088058472, step time: 25.438785552978516ms\r\n",,terminal_output +8989,12442004,"TERMINAL",0,0,"Step 2974, loss: 0.8557788133621216, step time: 25.00772476196289ms\r\n",,terminal_output +8990,12442058,"TERMINAL",0,0,"Step 2975, loss: 0.8811825513839722, step time: 25.70819854736328ms\r\n",,terminal_output +8991,12442123,"TERMINAL",0,0,"Step 2976, loss: 0.8701484799385071, step time: 25.555133819580078ms\r\n",,terminal_output +8992,12442188,"TERMINAL",0,0,"Step 2977, loss: 1.7942014932632446, step time: 24.95574951171875ms\r\n",,terminal_output +8993,12442250,"TERMINAL",0,0,"Step 2978, loss: 0.8542850613594055, step time: 22.17245101928711ms\r\n",,terminal_output +8994,12442316,"TERMINAL",0,0,"Step 2979, loss: 0.8899618983268738, step time: 19.960641860961914ms\r\n",,terminal_output +8995,12442382,"TERMINAL",0,0,"Step 2980, loss: 0.8407869338989258, step time: 18.09096336364746ms\r\n",,terminal_output +8996,12442445,"TERMINAL",0,0,"Step 2981, loss: 0.8719552159309387, step time: 18.014907836914062ms\r\n",,terminal_output +8997,12442510,"TERMINAL",0,0,"Step 2982, loss: 0.8337448835372925, step time: 21.630525588989258ms\r\n",,terminal_output +8998,12442602,"TERMINAL",0,0,"Step 2983, loss: 0.8485339879989624, step time: 19.07181739807129ms\r\n",,terminal_output +8999,12442654,"TERMINAL",0,0,"Step 2984, loss: 0.9059914350509644, step time: 17.29416847229004ms\r\n",,terminal_output +9000,12442705,"TERMINAL",0,0,"Step 2985, loss: 0.8676141500473022, step time: 18.146514892578125ms\r\n",,terminal_output +9001,12442811,"TERMINAL",0,0,"Step 2986, loss: 0.8398300409317017, step time: 17.57073402404785ms\r\nStep 2987, loss: 0.8372301459312439, step time: 18.07570457458496ms\r\n",,terminal_output +9002,12442878,"TERMINAL",0,0,"Step 2988, loss: 0.8495950698852539, step time: 17.604827880859375ms\r\n",,terminal_output +9003,12442938,"TERMINAL",0,0,"Step 2989, loss: 0.889206051826477, step time: 17.298460006713867ms\r\n",,terminal_output +9004,12442999,"TERMINAL",0,0,"Step 2990, loss: 1.0622227191925049, step time: 17.479419708251953ms\r\n",,terminal_output +9005,12443092,"TERMINAL",0,0,"Step 2991, loss: 1.2134943008422852, step time: 17.667770385742188ms\r\n",,terminal_output +9006,12443143,"TERMINAL",0,0,"Step 2992, loss: 0.8160448670387268, step time: 17.605066299438477ms\r\n",,terminal_output +9007,12443238,"TERMINAL",0,0,"Step 2993, loss: 0.8131226301193237, step time: 17.76599884033203ms\r\n",,terminal_output +9008,12443290,"TERMINAL",0,0,"Step 2994, loss: 0.8705707788467407, step time: 17.786741256713867ms\r\n",,terminal_output +9009,12443343,"TERMINAL",0,0,"Step 2995, loss: 0.841334879398346, step time: 17.39192008972168ms\r\n",,terminal_output +9010,12443451,"TERMINAL",0,0,"Step 2996, loss: 0.7939363121986389, step time: 17.347097396850586ms\r\nStep 2997, loss: 0.8174118995666504, step time: 17.733335494995117ms\r\n",,terminal_output +9011,12443513,"TERMINAL",0,0,"Step 2998, loss: 1.1525896787643433, step time: 17.20881462097168ms\r\n",,terminal_output +9012,12443576,"TERMINAL",0,0,"Step 2999, loss: 0.8131028413772583, step time: 17.50349998474121ms\r\n",,terminal_output +9013,12446266,"TERMINAL",0,0,"Step 3000, loss: 0.850567638874054, step time: 25.977373123168945ms\r\n",,terminal_output +9014,12446404,"TERMINAL",0,0,"Step 3001, loss: 0.8848626613616943, step time: 25.01082420349121ms\r\nStep 3002, loss: 0.8090106248855591, step time: 20.087718963623047ms\r\n",,terminal_output +9015,12446467,"TERMINAL",0,0,"Step 3003, loss: 0.8478677868843079, step time: 19.638776779174805ms\r\n",,terminal_output +9016,12446530,"TERMINAL",0,0,"Step 3004, loss: 1.8445703983306885, step time: 18.667936325073242ms\r\n",,terminal_output +9017,12446596,"TERMINAL",0,0,"Step 3005, loss: 0.8960731029510498, step time: 18.746376037597656ms\r\n",,terminal_output +9018,12446658,"TERMINAL",0,0,"Step 3006, loss: 0.8247637748718262, step time: 18.154144287109375ms\r\n",,terminal_output +9019,12446731,"TERMINAL",0,0,"Step 3007, loss: 0.8225172162055969, step time: 19.323348999023438ms\r\n",,terminal_output +9020,12446784,"TERMINAL",0,0,"Step 3008, loss: 0.8857836723327637, step time: 18.311023712158203ms\r\n",,terminal_output +9021,12446881,"TERMINAL",0,0,"Step 3009, loss: 0.8787814974784851, step time: 20.737409591674805ms\r\n",,terminal_output +9022,12446944,"TERMINAL",0,0,"Step 3010, loss: 0.819672167301178, step time: 18.957138061523438ms\r\n",,terminal_output +9023,12447007,"TERMINAL",0,0,"Step 3011, loss: 0.812210738658905, step time: 18.6312198638916ms\r\n",,terminal_output +9024,12447071,"TERMINAL",0,0,"Step 3012, loss: 0.8225824236869812, step time: 18.018722534179688ms\r\n",,terminal_output +9025,12447139,"TERMINAL",0,0,"Step 3013, loss: 0.8486559391021729, step time: 18.810033798217773ms\r\n",,terminal_output +9026,12447202,"TERMINAL",0,0,"Step 3014, loss: 0.9301778078079224, step time: 17.93503761291504ms\r\n",,terminal_output +9027,12447268,"TERMINAL",0,0,"Step 3015, loss: 0.8390700221061707, step time: 18.13030242919922ms\r\n",,terminal_output +9028,12447332,"TERMINAL",0,0,"Step 3016, loss: 1.1916884183883667, step time: 18.100500106811523ms\r\n",,terminal_output +9029,12447441,"TERMINAL",0,0,"Step 3017, loss: 0.9435974359512329, step time: 18.210649490356445ms\r\nStep 3018, loss: 0.852699339389801, step time: 17.95363426208496ms\r\n",,terminal_output +9030,12447535,"TERMINAL",0,0,"Step 3019, loss: 0.8515487909317017, step time: 18.56827735900879ms\r\n",,terminal_output +9031,12447589,"TERMINAL",0,0,"Step 3020, loss: 0.8214482665061951, step time: 17.823219299316406ms\r\n",,terminal_output +9032,12447700,"TERMINAL",0,0,"Step 3021, loss: 0.8474302291870117, step time: 18.17011833190918ms\r\nStep 3022, loss: 0.8032711148262024, step time: 18.193960189819336ms\r\n",,terminal_output +9033,12447815,"TERMINAL",0,0,"Step 3023, loss: 0.7985700368881226, step time: 18.186092376708984ms\r\nStep 3024, loss: 1.0279778242111206, step time: 17.943382263183594ms\r\n",,terminal_output +9034,12447886,"TERMINAL",0,0,"Step 3025, loss: 1.6211779117584229, step time: 18.645524978637695ms\r\n",,terminal_output +9035,12447980,"TERMINAL",0,0,"Step 3026, loss: 0.8200723528862, step time: 18.05257797241211ms\r\n",,terminal_output +9036,12448089,"TERMINAL",0,0,"Step 3027, loss: 0.9211355447769165, step time: 18.208026885986328ms\r\nStep 3028, loss: 0.9335607290267944, step time: 18.32127571105957ms\r\n",,terminal_output +9037,12448150,"TERMINAL",0,0,"Step 3029, loss: 0.8623762726783752, step time: 18.254518508911133ms\r\n",,terminal_output +9038,12448220,"TERMINAL",0,0,"Step 3030, loss: 1.0697376728057861, step time: 20.178556442260742ms\r\n",,terminal_output +9039,12448276,"TERMINAL",0,0,"Step 3031, loss: 0.8634429574012756, step time: 19.025564193725586ms\r\n",,terminal_output +9040,12448338,"TERMINAL",0,0,"Step 3032, loss: 0.8211304545402527, step time: 17.792463302612305ms\r\n",,terminal_output +9041,12448400,"TERMINAL",0,0,"Step 3033, loss: 0.8766558170318604, step time: 18.16582679748535ms\r\n",,terminal_output +9042,12448495,"TERMINAL",0,0,"Step 3034, loss: 0.9092847108840942, step time: 18.309354782104492ms\r\n",,terminal_output +9043,12448548,"TERMINAL",0,0,"Step 3035, loss: 0.8078023791313171, step time: 18.265247344970703ms\r\n",,terminal_output +9044,12448636,"TERMINAL",0,0,"Step 3036, loss: 0.8172566294670105, step time: 19.277334213256836ms\r\n",,terminal_output +9045,12448689,"TERMINAL",0,0,"Step 3037, loss: 0.8347296118736267, step time: 19.030332565307617ms\r\n",,terminal_output +9046,12448799,"TERMINAL",0,0,"Step 3038, loss: 1.0485427379608154, step time: 18.256425857543945ms\r\nStep 3039, loss: 0.8177950978279114, step time: 18.788576126098633ms\r\n",,terminal_output +9047,12448867,"TERMINAL",0,0,"Step 3040, loss: 0.8417245745658875, step time: 18.778324127197266ms\r\n",,terminal_output +9048,12448929,"TERMINAL",0,0,"Step 3041, loss: 0.8764176964759827, step time: 18.28312873840332ms\r\n",,terminal_output +9049,12448995,"TERMINAL",0,0,"Step 3042, loss: 0.7931830286979675, step time: 18.968820571899414ms\r\n",,terminal_output +9050,12449059,"TERMINAL",0,0,"Step 3043, loss: 0.7969344258308411, step time: 19.521713256835938ms\r\n",,terminal_output +9051,12449124,"TERMINAL",0,0,"Step 3044, loss: 0.8297790288925171, step time: 18.220186233520508ms\r\n",,terminal_output +9052,12449189,"TERMINAL",0,0,"Step 3045, loss: 0.8070472478866577, step time: 18.340349197387695ms\r\n",,terminal_output +9053,12449250,"TERMINAL",0,0,"Step 3046, loss: 0.805249810218811, step time: 18.553972244262695ms\r\n",,terminal_output +9054,12449313,"TERMINAL",0,0,"Step 3047, loss: 0.9424448013305664, step time: 18.196582794189453ms\r\n",,terminal_output +9055,12449378,"TERMINAL",0,0,"Step 3048, loss: 0.8041410446166992, step time: 18.320322036743164ms\r\n",,terminal_output +9056,12449443,"TERMINAL",0,0,"Step 3049, loss: 2.180765390396118, step time: 18.644094467163086ms\r\n",,terminal_output +9057,12449503,"TERMINAL",0,0,"Step 3050, loss: 0.8075015544891357, step time: 18.171310424804688ms\r\n",,terminal_output +9058,12449572,"TERMINAL",0,0,"Step 3051, loss: 0.8155456185340881, step time: 18.41878890991211ms\r\n",,terminal_output +9059,12449637,"TERMINAL",0,0,"Step 3052, loss: 0.8487077951431274, step time: 18.743515014648438ms\r\n",,terminal_output +9060,12449700,"TERMINAL",0,0,"Step 3053, loss: 1.388755440711975, step time: 18.344879150390625ms\r\n",,terminal_output +9061,12449767,"TERMINAL",0,0,"Step 3054, loss: 0.7923840880393982, step time: 18.399953842163086ms\r\n",,terminal_output +9062,12449833,"TERMINAL",0,0,"Step 3055, loss: 0.8269585967063904, step time: 18.52869987487793ms\r\n",,terminal_output +9063,12449896,"TERMINAL",0,0,"Step 3056, loss: 0.8047820329666138, step time: 18.038511276245117ms\r\n",,terminal_output +9064,12450009,"TERMINAL",0,0,"Step 3057, loss: 1.091185450553894, step time: 18.308162689208984ms\r\nStep 3058, loss: 1.583943247795105, step time: 18.230676651000977ms\r\n",,terminal_output +9065,12450080,"TERMINAL",0,0,"Step 3059, loss: 0.8542832732200623, step time: 18.114089965820312ms\r\n",,terminal_output +9066,12450148,"TERMINAL",0,0,"Step 3060, loss: 0.8109424114227295, step time: 18.234729766845703ms\r\n",,terminal_output +9067,12450211,"TERMINAL",0,0,"Step 3061, loss: 0.8398407101631165, step time: 18.753767013549805ms\r\n",,terminal_output +9068,12450274,"TERMINAL",0,0,"Step 3062, loss: 0.8179522156715393, step time: 18.286943435668945ms\r\n",,terminal_output +9069,12450379,"TERMINAL",0,0,"Step 3063, loss: 0.8168396353721619, step time: 18.100500106811523ms\r\n",,terminal_output +9070,12450423,"TERMINAL",0,0,"Step 3064, loss: 0.8179104924201965, step time: 21.69036865234375ms\r\n",,terminal_output +9071,12450475,"TERMINAL",0,0,"Step 3065, loss: 0.807386577129364, step time: 18.340110778808594ms\r\n",,terminal_output +9072,12450595,"TERMINAL",0,0,"Step 3066, loss: 0.8145003318786621, step time: 18.04184913635254ms\r\nStep 3067, loss: 0.8222499489784241, step time: 18.658161163330078ms\r\n",,terminal_output +9073,12450655,"TERMINAL",0,0,"Step 3068, loss: 0.9794687032699585, step time: 18.160581588745117ms\r\n",,terminal_output +9074,12450731,"TERMINAL",0,0,"Step 3069, loss: 0.801228940486908, step time: 18.33486557006836ms\r\n",,terminal_output +9075,12450785,"TERMINAL",0,0,"Step 3070, loss: 0.8044925332069397, step time: 18.28622817993164ms\r\n",,terminal_output +9076,12450883,"TERMINAL",0,0,"Step 3071, loss: 0.8330696821212769, step time: 18.16105842590332ms\r\n",,terminal_output +9077,12450990,"TERMINAL",0,0,"Step 3072, loss: 0.8395012021064758, step time: 18.2342529296875ms\r\nStep 3073, loss: 0.7902594208717346, step time: 18.55182647705078ms\r\n",,terminal_output +9078,12451056,"TERMINAL",0,0,"Step 3074, loss: 0.7849982380867004, step time: 18.20230484008789ms\r\n",,terminal_output +9079,12451120,"TERMINAL",0,0,"Step 3075, loss: 0.8063144683837891, step time: 18.425464630126953ms\r\n",,terminal_output +9080,12451183,"TERMINAL",0,0,"Step 3076, loss: 0.8127094507217407, step time: 18.463134765625ms\r\n",,terminal_output +9081,12451246,"TERMINAL",0,0,"Step 3077, loss: 0.8270286917686462, step time: 18.39280128479004ms\r\n",,terminal_output +9082,12451315,"TERMINAL",0,0,"Step 3078, loss: 0.8395557403564453, step time: 18.134117126464844ms\r\n",,terminal_output +9083,12451374,"TERMINAL",0,0,"Step 3079, loss: 0.7740931510925293, step time: 18.729209899902344ms\r\n",,terminal_output +9084,12451470,"TERMINAL",0,0,"Step 3080, loss: 0.7830696105957031, step time: 18.369436264038086ms\r\n",,terminal_output +9085,12451579,"TERMINAL",0,0,"Step 3081, loss: 1.020735740661621, step time: 18.204450607299805ms\r\nStep 3082, loss: 0.7692108750343323, step time: 18.311023712158203ms\r\n",,terminal_output +9086,12451640,"TERMINAL",0,0,"Step 3083, loss: 0.795935869216919, step time: 18.494606018066406ms\r\n",,terminal_output +9087,12451706,"TERMINAL",0,0,"Step 3084, loss: 0.7822022438049316, step time: 18.16868782043457ms\r\n",,terminal_output +9088,12451819,"TERMINAL",0,0,"Step 3085, loss: 1.056321382522583, step time: 18.718719482421875ms\r\nStep 3086, loss: 0.7691999673843384, step time: 18.208742141723633ms\r\n",,terminal_output +9089,12451911,"TERMINAL",0,0,"Step 3087, loss: 1.2736176252365112, step time: 18.77450942993164ms\r\n",,terminal_output +9090,12451965,"TERMINAL",0,0,"Step 3088, loss: 0.8231275677680969, step time: 18.297910690307617ms\r\n",,terminal_output +9091,12452111,"TERMINAL",0,0,"Step 3089, loss: 0.778695821762085, step time: 18.543004989624023ms\r\nStep 3090, loss: 0.9841760396957397, step time: 18.241167068481445ms\r\n",,terminal_output +9092,12452164,"TERMINAL",0,0,"Step 3091, loss: 0.7656779885292053, step time: 18.962383270263672ms\r\n",,terminal_output +9093,12452270,"TERMINAL",0,0,"Step 3092, loss: 0.7719867825508118, step time: 18.22972297668457ms\r\nStep 3093, loss: 0.8792111873626709, step time: 18.434762954711914ms\r\n",,terminal_output +9094,12452395,"TERMINAL",0,0,"Step 3094, loss: 0.8037866950035095, step time: 18.84293556213379ms\r\nStep 3095, loss: 0.793001651763916, step time: 18.04208755493164ms\r\n",,terminal_output +9095,12452455,"TERMINAL",0,0,"Step 3096, loss: 0.9148339033126831, step time: 18.032550811767578ms\r\n",,terminal_output +9096,12452523,"TERMINAL",0,0,"Step 3097, loss: 0.8087784647941589, step time: 18.62645149230957ms\r\n",,terminal_output +9097,12452587,"TERMINAL",0,0,"Step 3098, loss: 0.7819738984107971, step time: 17.752885818481445ms\r\n",,terminal_output +9098,12452651,"TERMINAL",0,0,"Step 3099, loss: 0.8053283095359802, step time: 18.289804458618164ms\r\n",,terminal_output +9099,12452716,"TERMINAL",0,0,"Step 3100, loss: 0.7815624475479126, step time: 18.304109573364258ms\r\n",,terminal_output +9100,12452783,"TERMINAL",0,0,"Step 3101, loss: 0.8744439482688904, step time: 18.342971801757812ms\r\n",,terminal_output +9101,12452843,"TERMINAL",0,0,"Step 3102, loss: 0.8103324770927429, step time: 18.06640625ms\r\n",,terminal_output +9102,12452907,"TERMINAL",0,0,"Step 3103, loss: 1.245047926902771, step time: 18.5244083404541ms\r\n",,terminal_output +9103,12452971,"TERMINAL",0,0,"Step 3104, loss: 0.7899085879325867, step time: 18.12434196472168ms\r\n",,terminal_output +9104,12453035,"TERMINAL",0,0,"Step 3105, loss: 0.7777484059333801, step time: 17.91858673095703ms\r\n",,terminal_output +9105,12453395,"TERMINAL",0,0,"Step 3106, loss: 0.8158565759658813, step time: 292.28901863098145ms\r\nStep 3107, loss: 0.810300886631012, step time: 25.82859992980957ms\r\n",,terminal_output +9106,12453461,"TERMINAL",0,0,"Step 3108, loss: 0.7730057835578918, step time: 20.554780960083008ms\r\n",,terminal_output +9107,12453564,"TERMINAL",0,0,"Step 3109, loss: 0.8454558253288269, step time: 19.657135009765625ms\r\n",,terminal_output +9108,12453666,"TERMINAL",0,0,"Step 3110, loss: 0.7939194440841675, step time: 18.64790916442871ms\r\nStep 3111, loss: 0.7768331170082092, step time: 18.85700225830078ms\r\n",,terminal_output +9109,12453720,"TERMINAL",0,0,"Step 3112, loss: 0.7720035910606384, step time: 18.85080337524414ms\r\n",,terminal_output +9110,12453783,"TERMINAL",0,0,"Step 3113, loss: 0.7636235952377319, step time: 18.473148345947266ms\r\n",,terminal_output +9111,12453844,"TERMINAL",0,0,"Step 3114, loss: 0.7994507551193237, step time: 18.281936645507812ms\r\n",,terminal_output +9112,12453909,"TERMINAL",0,0,"Step 3115, loss: 0.7678641080856323, step time: 19.057750701904297ms\r\n",,terminal_output +9113,12453973,"TERMINAL",0,0,"Step 3116, loss: 1.7145434617996216, step time: 18.43738555908203ms\r\n",,terminal_output +9114,12454036,"TERMINAL",0,0,"Step 3117, loss: 1.0640279054641724, step time: 19.78325843811035ms\r\n",,terminal_output +9115,12454100,"TERMINAL",0,0,"Step 3118, loss: 0.887392520904541, step time: 19.163846969604492ms\r\n",,terminal_output +9116,12454165,"TERMINAL",0,0,"Step 3119, loss: 1.8286516666412354, step time: 18.46623420715332ms\r\n",,terminal_output +9117,12454231,"TERMINAL",0,0,"Step 3120, loss: 0.7879194021224976, step time: 18.57161521911621ms\r\n",,terminal_output +9118,12454300,"TERMINAL",0,0,"Step 3121, loss: 0.7980645298957825, step time: 18.79596710205078ms\r\n",,terminal_output +9119,12454358,"TERMINAL",0,0,"Step 3122, loss: 1.0233700275421143, step time: 18.320560455322266ms\r\n",,terminal_output +9120,12454422,"TERMINAL",0,0,"Step 3123, loss: 0.7764005064964294, step time: 18.625736236572266ms\r\n",,terminal_output +9121,12454529,"TERMINAL",0,0,"Step 3124, loss: 0.7920283079147339, step time: 18.80049705505371ms\r\n",,terminal_output +9122,12454581,"TERMINAL",0,0,"Step 3125, loss: 0.9365432858467102, step time: 18.29671859741211ms\r\n",,terminal_output +9123,12454688,"TERMINAL",0,0,"Step 3126, loss: 0.7900577187538147, step time: 18.09549331665039ms\r\nStep 3127, loss: 0.777693510055542, step time: 18.7225341796875ms\r\n",,terminal_output +9124,12454764,"TERMINAL",0,0,"Step 3128, loss: 0.7960734963417053, step time: 18.18394660949707ms\r\n",,terminal_output +9125,12454827,"TERMINAL",0,0,"Step 3129, loss: 0.9187926650047302, step time: 18.236398696899414ms\r\n",,terminal_output +9126,12454888,"TERMINAL",0,0,"Step 3130, loss: 0.77674400806427, step time: 18.583297729492188ms\r\n",,terminal_output +9127,12454950,"TERMINAL",0,0,"Step 3131, loss: 0.7756539583206177, step time: 18.227338790893555ms\r\n",,terminal_output +9128,12455013,"TERMINAL",0,0,"Step 3132, loss: 1.162819743156433, step time: 18.2039737701416ms\r\n",,terminal_output +9129,12455078,"TERMINAL",0,0,"Step 3133, loss: 0.7737531661987305, step time: 18.664836883544922ms\r\n",,terminal_output +9130,12455140,"TERMINAL",0,0,"Step 3134, loss: 0.795357346534729, step time: 18.202781677246094ms\r\n",,terminal_output +9131,12455203,"TERMINAL",0,0,"Step 3135, loss: 0.7619643211364746, step time: 18.172502517700195ms\r\n",,terminal_output +9132,12455265,"TERMINAL",0,0,"Step 3136, loss: 0.7790812849998474, step time: 18.645286560058594ms\r\n",,terminal_output +9133,12455328,"TERMINAL",0,0,"Step 3137, loss: 0.7892406582832336, step time: 19.66118812561035ms\r\n",,terminal_output +9134,12455391,"TERMINAL",0,0,"Step 3138, loss: 0.813014805316925, step time: 18.11528205871582ms\r\n",,terminal_output +9135,12455453,"TERMINAL",0,0,"Step 3139, loss: 0.837084174156189, step time: 19.6688175201416ms\r\n",,terminal_output +9136,12455518,"TERMINAL",0,0,"Step 3140, loss: 0.776798665523529, step time: 18.21589469909668ms\r\n",,terminal_output +9137,12455580,"TERMINAL",0,0,"Step 3141, loss: 0.7774462699890137, step time: 18.249988555908203ms\r\n",,terminal_output +9138,12455646,"TERMINAL",0,0,"Step 3142, loss: 0.7764366269111633, step time: 18.467426300048828ms\r\n",,terminal_output +9139,12455712,"TERMINAL",0,0,"Step 3143, loss: 0.7629446983337402, step time: 18.3103084564209ms\r\n",,terminal_output +9140,12455773,"TERMINAL",0,0,"Step 3144, loss: 0.9132166504859924, step time: 18.24784278869629ms\r\n",,terminal_output +9141,12455839,"TERMINAL",0,0,"Step 3145, loss: 0.8076940774917603, step time: 18.56398582458496ms\r\n",,terminal_output +9142,12455907,"TERMINAL",0,0,"Step 3146, loss: 0.75987309217453, step time: 18.19443702697754ms\r\n",,terminal_output +9143,12455976,"TERMINAL",0,0,"Step 3147, loss: 0.7596914172172546, step time: 18.319368362426758ms\r\n",,terminal_output +9144,12456037,"TERMINAL",0,0,"Step 3148, loss: 0.789766252040863, step time: 18.555164337158203ms\r\n",,terminal_output +9145,12456100,"TERMINAL",0,0,"Step 3149, loss: 0.7585653066635132, step time: 18.12124252319336ms\r\n",,terminal_output +9146,12456161,"TERMINAL",0,0,"Step 3150, loss: 0.7931028008460999, step time: 18.15509796142578ms\r\n",,terminal_output +9147,12456225,"TERMINAL",0,0,"Step 3151, loss: 0.9485695362091064, step time: 18.605709075927734ms\r\n",,terminal_output +9148,12456285,"TERMINAL",0,0,"Step 3152, loss: 0.7922922968864441, step time: 18.067121505737305ms\r\n",,terminal_output +9149,12456347,"TERMINAL",0,0,"Step 3153, loss: 0.7664362788200378, step time: 18.202781677246094ms\r\n",,terminal_output +9150,12456443,"TERMINAL",0,0,"Step 3154, loss: 0.9788705706596375, step time: 18.510818481445312ms\r\n",,terminal_output +9151,12456496,"TERMINAL",0,0,"Step 3155, loss: 0.7790473699569702, step time: 17.892837524414062ms\r\n",,terminal_output +9152,12456591,"TERMINAL",0,0,"Step 3156, loss: 0.744101345539093, step time: 18.140792846679688ms\r\n",,terminal_output +9153,12456671,"TERMINAL",0,0,"Step 3157, loss: 0.7508472800254822, step time: 18.518686294555664ms\r\nStep 3158, loss: 0.8106594085693359, step time: 17.86208152770996ms\r\n",,terminal_output +9154,12456737,"TERMINAL",0,0,"Step 3159, loss: 0.8100281357765198, step time: 18.212080001831055ms\r\n",,terminal_output +9155,12456837,"TERMINAL",0,0,"Step 3160, loss: 0.7518429756164551, step time: 21.95882797241211ms\r\n",,terminal_output +9156,12456900,"TERMINAL",0,0,"Step 3161, loss: 0.7768243551254272, step time: 19.45328712463379ms\r\n",,terminal_output +9157,12456962,"TERMINAL",0,0,"Step 3162, loss: 0.7743523120880127, step time: 18.570423126220703ms\r\n",,terminal_output +9158,12457023,"TERMINAL",0,0,"Step 3163, loss: 1.204934000968933, step time: 18.871545791625977ms\r\n",,terminal_output +9159,12457131,"TERMINAL",0,0,"Step 3164, loss: 0.7933077216148376, step time: 18.140077590942383ms\r\nStep 3165, loss: 0.7403247356414795, step time: 18.301010131835938ms\r\n",,terminal_output +9160,12457193,"TERMINAL",0,0,"Step 3166, loss: 0.7391353845596313, step time: 18.490076065063477ms\r\n",,terminal_output +9161,12457256,"TERMINAL",0,0,"Step 3167, loss: 0.955938458442688, step time: 18.233537673950195ms\r\n",,terminal_output +9162,12457319,"TERMINAL",0,0,"Step 3168, loss: 0.8028301000595093, step time: 18.25714111328125ms\r\n",,terminal_output +9163,12457382,"TERMINAL",0,0,"Step 3169, loss: 0.7593178153038025, step time: 18.655776977539062ms\r\n",,terminal_output +9164,12457461,"TERMINAL",0,0,"Step 3170, loss: 0.9453123807907104, step time: 18.13817024230957ms\r\n",,terminal_output +9165,12457510,"TERMINAL",0,0,"Step 3171, loss: 0.7548125982284546, step time: 18.262624740600586ms\r\n",,terminal_output +9166,12457605,"TERMINAL",0,0,"Step 3172, loss: 0.7885466814041138, step time: 18.512725830078125ms\r\n",,terminal_output +9167,12457667,"TERMINAL",0,0,"Step 3173, loss: 0.7460389137268066, step time: 18.282651901245117ms\r\n",,terminal_output +9168,12457765,"TERMINAL",0,0,"Step 3174, loss: 0.7404679656028748, step time: 18.109798431396484ms\r\nStep 3175, loss: 0.7783029079437256, step time: 18.42474937438965ms\r\n",,terminal_output +9169,12457865,"TERMINAL",0,0,"Step 3176, loss: 0.7403014302253723, step time: 18.100261688232422ms\r\n",,terminal_output +9170,12457927,"TERMINAL",0,0,"Step 3177, loss: 0.7542654871940613, step time: 18.442869186401367ms\r\n",,terminal_output +9171,12457988,"TERMINAL",0,0,"Step 3178, loss: 0.7293797731399536, step time: 18.503189086914062ms\r\n",,terminal_output +9172,12458097,"TERMINAL",0,0,"Step 3179, loss: 0.7428099513053894, step time: 18.494129180908203ms\r\nStep 3180, loss: 0.7800072431564331, step time: 19.014596939086914ms\r\n",,terminal_output +9173,12458215,"TERMINAL",0,0,"Step 3181, loss: 0.7458032369613647, step time: 18.63551139831543ms\r\nStep 3182, loss: 0.772988498210907, step time: 18.207550048828125ms\r\n",,terminal_output +9174,12458279,"TERMINAL",0,0,"Step 3183, loss: 0.7381616234779358, step time: 18.112897872924805ms\r\n",,terminal_output +9175,12458378,"TERMINAL",0,0,"Step 3184, loss: 0.8242717385292053, step time: 18.78833770751953ms\r\n",,terminal_output +9176,12458430,"TERMINAL",0,0,"Step 3185, loss: 0.9601048231124878, step time: 18.15485954284668ms\r\n",,terminal_output +9177,12458533,"TERMINAL",0,0,"Step 3186, loss: 0.7336779236793518, step time: 18.033504486083984ms\r\nStep 3187, loss: 0.7434549927711487, step time: 18.646717071533203ms\r\n",,terminal_output +9178,12458660,"TERMINAL",0,0,"Step 3188, loss: 0.8119211196899414, step time: 17.87853240966797ms\r\nStep 3189, loss: 0.8213369846343994, step time: 18.202781677246094ms\r\n",,terminal_output +9179,12458724,"TERMINAL",0,0,"Step 3190, loss: 0.7450001239776611, step time: 18.71347427368164ms\r\n",,terminal_output +9180,12458787,"TERMINAL",0,0,"Step 3191, loss: 0.7376208901405334, step time: 18.032550811767578ms\r\n",,terminal_output +9181,12458917,"TERMINAL",0,0,"Step 3192, loss: 0.7728272080421448, step time: 17.95053482055664ms\r\nStep 3193, loss: 0.7352964282035828, step time: 19.021272659301758ms\r\n",,terminal_output +9182,12459027,"TERMINAL",0,0,"Step 3194, loss: 0.7373220324516296, step time: 18.110990524291992ms\r\n",,terminal_output +9183,12459078,"TERMINAL",0,0,"Step 3195, loss: 0.860075831413269, step time: 19.805192947387695ms\r\n",,terminal_output +9184,12459182,"TERMINAL",0,0,"Step 3196, loss: 0.7477606534957886, step time: 18.923282623291016ms\r\nStep 3197, loss: 0.7187005281448364, step time: 18.538951873779297ms\r\n",,terminal_output +9185,12459302,"TERMINAL",0,0,"Step 3198, loss: 0.727311372756958, step time: 18.42212677001953ms\r\nStep 3199, loss: 0.7547690272331238, step time: 18.729448318481445ms\r\n",,terminal_output +9186,12459399,"TERMINAL",0,0,"Step 3200, loss: 1.1758404970169067, step time: 18.393754959106445ms\r\n",,terminal_output +9187,12459462,"TERMINAL",0,0,"Step 3201, loss: 0.7196024060249329, step time: 18.33367347717285ms\r\n",,terminal_output +9188,12459528,"TERMINAL",0,0,"Step 3202, loss: 0.7325133681297302, step time: 18.735885620117188ms\r\n",,terminal_output +9189,12459584,"TERMINAL",0,0,"Step 3203, loss: 0.7221993803977966, step time: 18.549203872680664ms\r\n",,terminal_output +9190,12459688,"TERMINAL",0,0,"Step 3204, loss: 0.7269914150238037, step time: 18.33653450012207ms\r\nStep 3205, loss: 0.7910768389701843, step time: 18.694639205932617ms\r\n",,terminal_output +9191,12459814,"TERMINAL",0,0,"Step 3206, loss: 0.7758175730705261, step time: 18.059492111206055ms\r\nStep 3207, loss: 0.7352136969566345, step time: 18.198013305664062ms\r\n",,terminal_output +9192,12459878,"TERMINAL",0,0,"Step 3208, loss: 0.7230603098869324, step time: 18.231630325317383ms\r\n",,terminal_output +9193,12460012,"TERMINAL",0,0,"Step 3209, loss: 0.7530298829078674, step time: 18.33319664001465ms\r\nStep 3210, loss: 0.7591447830200195, step time: 18.494129180908203ms\r\n",,terminal_output +9194,12460073,"TERMINAL",0,0,"Step 3211, loss: 0.8865545392036438, step time: 18.940210342407227ms\r\n",,terminal_output +9195,12460165,"TERMINAL",0,0,"Step 3212, loss: 0.7307533621788025, step time: 18.369436264038086ms\r\n",,terminal_output +9196,12460216,"TERMINAL",0,0,"Step 3213, loss: 0.7144466042518616, step time: 18.250703811645508ms\r\n",,terminal_output +9197,12460313,"TERMINAL",0,0,"Step 3214, loss: 0.7068611979484558, step time: 18.665790557861328ms\r\n",,terminal_output +9198,12460391,"TERMINAL",0,0,"Step 3215, loss: 1.3784961700439453, step time: 18.110990524291992ms\r\nStep 3216, loss: 0.7286797761917114, step time: 18.064260482788086ms\r\n",,terminal_output +9199,12460487,"TERMINAL",0,0,"Step 3217, loss: 0.7624641060829163, step time: 18.665790557861328ms\r\n",,terminal_output +9200,12460550,"TERMINAL",0,0,"Step 3218, loss: 0.841212809085846, step time: 18.457412719726562ms\r\n",,terminal_output +9201,12460611,"TERMINAL",0,0,"Step 3219, loss: 0.7271928787231445, step time: 18.370628356933594ms\r\n",,terminal_output +9202,12460672,"TERMINAL",0,0,"Step 3220, loss: 0.7204583287239075, step time: 18.51630210876465ms\r\n",,terminal_output +9203,12460773,"TERMINAL",0,0,"Step 3221, loss: 0.9226012229919434, step time: 18.084287643432617ms\r\nStep 3222, loss: 0.7195271253585815, step time: 18.863677978515625ms\r\n",,terminal_output +9204,12460838,"TERMINAL",0,0,"Step 3223, loss: 0.7532616853713989, step time: 18.47696304321289ms\r\n",,terminal_output +9205,12461012,"TERMINAL",0,0,"Step 3224, loss: 0.733243465423584, step time: 18.1732177734375ms\r\n",,terminal_output +9206,12461095,"TERMINAL",0,0,"Step 3225, loss: 0.8445701599121094, step time: 17.75503158569336ms\r\nStep 3226, loss: 0.72199547290802, step time: 18.594741821289062ms\r\nStep 3227, loss: 0.7078302502632141, step time: 20.93505859375ms\r\n",,terminal_output +9207,12461194,"TERMINAL",0,0,"Step 3228, loss: 0.7258476614952087, step time: 18.971920013427734ms\r\n",,terminal_output +9208,12461255,"TERMINAL",0,0,"Step 3229, loss: 0.7200080156326294, step time: 18.876075744628906ms\r\n",,terminal_output +9209,12461320,"TERMINAL",0,0,"Step 3230, loss: 0.7336091995239258, step time: 18.111228942871094ms\r\n",,terminal_output +9210,12461380,"TERMINAL",0,0,"Step 3231, loss: 0.7264801859855652, step time: 17.832040786743164ms\r\n",,terminal_output +9211,12461441,"TERMINAL",0,0,"Step 3232, loss: 0.7095871567726135, step time: 18.513917922973633ms\r\n",,terminal_output +9212,12461502,"TERMINAL",0,0,"Step 3233, loss: 0.8943198323249817, step time: 17.823219299316406ms\r\n",,terminal_output +9213,12461564,"TERMINAL",0,0,"Step 3234, loss: 0.7147888541221619, step time: 17.907381057739258ms\r\n",,terminal_output +9214,12461626,"TERMINAL",0,0,"Step 3235, loss: 0.768257200717926, step time: 18.373489379882812ms\r\n",,terminal_output +9215,12461735,"TERMINAL",0,0,"Step 3236, loss: 0.7217652201652527, step time: 18.006324768066406ms\r\nStep 3237, loss: 0.7335465550422668, step time: 17.680883407592773ms\r\n",,terminal_output +9216,12461842,"TERMINAL",0,0,"Step 3238, loss: 0.7030541300773621, step time: 18.663644790649414ms\r\n",,terminal_output +9217,12461894,"TERMINAL",0,0,"Step 3239, loss: 0.7255879640579224, step time: 17.86065101623535ms\r\n",,terminal_output +9218,12461989,"TERMINAL",0,0,"Step 3240, loss: 0.808052122592926, step time: 18.04971694946289ms\r\n",,terminal_output +9219,12462299,"TERMINAL",0,0,"Step 3241, loss: 0.9586153626441956, step time: 297.0895767211914ms\r\nStep 3242, loss: 0.7935583591461182, step time: 25.649309158325195ms\r\n",,terminal_output +9220,12462365,"TERMINAL",0,0,"Step 3243, loss: 0.7048426270484924, step time: 20.687580108642578ms\r\n",,terminal_output +9221,12462429,"TERMINAL",0,0,"Step 3244, loss: 0.7528800368309021, step time: 19.576311111450195ms\r\n",,terminal_output +9222,12462493,"TERMINAL",0,0,"Step 3245, loss: 0.7683957815170288, step time: 18.177509307861328ms\r\n",,terminal_output +9223,12462558,"TERMINAL",0,0,"Step 3246, loss: 0.7068027853965759, step time: 18.426179885864258ms\r\n",,terminal_output +9224,12462619,"TERMINAL",0,0,"Step 3247, loss: 1.010276198387146, step time: 18.75758171081543ms\r\n",,terminal_output +9225,12462683,"TERMINAL",0,0,"Step 3248, loss: 0.7122265696525574, step time: 18.34273338317871ms\r\n",,terminal_output +9226,12462748,"TERMINAL",0,0,"Step 3249, loss: 0.7754254341125488, step time: 18.092632293701172ms\r\n",,terminal_output +9227,12462848,"TERMINAL",0,0,"Step 3250, loss: 0.7226694226264954, step time: 19.787311553955078ms\r\n",,terminal_output +9228,12462910,"TERMINAL",0,0,"Step 3251, loss: 0.6962166428565979, step time: 18.174171447753906ms\r\n",,terminal_output +9229,12463016,"TERMINAL",0,0,"Step 3252, loss: 0.8159881830215454, step time: 18.352508544921875ms\r\nStep 3253, loss: 0.723599374294281, step time: 18.717050552368164ms\r\n",,terminal_output +9230,12463133,"TERMINAL",0,0,"Step 3254, loss: 0.7379250526428223, step time: 18.793344497680664ms\r\nStep 3255, loss: 0.721422553062439, step time: 18.164396286010742ms\r\n",,terminal_output +9231,12463197,"TERMINAL",0,0,"Step 3256, loss: 0.7351098656654358, step time: 18.823862075805664ms\r\n",,terminal_output +9232,12463289,"TERMINAL",0,0,"Step 3257, loss: 0.7189483642578125, step time: 17.780065536499023ms\r\n",,terminal_output +9233,12463357,"TERMINAL",0,0,"Step 3258, loss: 0.7091448307037354, step time: 17.938613891601562ms\r\n",,terminal_output +9234,12463414,"TERMINAL",0,0,"Step 3259, loss: 0.76624596118927, step time: 18.36419105529785ms\r\n",,terminal_output +9235,12463524,"TERMINAL",0,0,"Step 3260, loss: 0.7024484872817993, step time: 18.3255672454834ms\r\nStep 3261, loss: 0.9492724537849426, step time: 18.047094345092773ms\r\n",,terminal_output +9236,12463640,"TERMINAL",0,0,"Step 3262, loss: 0.726364254951477, step time: 18.944263458251953ms\r\nStep 3263, loss: 0.704882800579071, step time: 18.170595169067383ms\r\n",,terminal_output +9237,12463706,"TERMINAL",0,0,"Step 3264, loss: 0.8105280995368958, step time: 18.28789710998535ms\r\n",,terminal_output +9238,12463774,"TERMINAL",0,0,"Step 3265, loss: 0.7281477451324463, step time: 18.552303314208984ms\r\n",,terminal_output +9239,12463873,"TERMINAL",0,0,"Step 3266, loss: 0.6944226026535034, step time: 18.307924270629883ms\r\n",,terminal_output +9240,12463935,"TERMINAL",0,0,"Step 3267, loss: 1.6674270629882812, step time: 18.152475357055664ms\r\n",,terminal_output +9241,12463996,"TERMINAL",0,0,"Step 3268, loss: 0.8379749655723572, step time: 18.772363662719727ms\r\n",,terminal_output +9242,12464062,"TERMINAL",0,0,"Step 3269, loss: 0.7152090668678284, step time: 18.155336380004883ms\r\n",,terminal_output +9243,12464118,"TERMINAL",0,0,"Step 3270, loss: 0.7619292736053467, step time: 19.48070526123047ms\r\n",,terminal_output +9244,12464182,"TERMINAL",0,0,"Step 3271, loss: 0.7038714289665222, step time: 18.921375274658203ms\r\n",,terminal_output +9245,12464244,"TERMINAL",0,0,"Step 3272, loss: 0.7086149454116821, step time: 18.195152282714844ms\r\n",,terminal_output +9246,12464307,"TERMINAL",0,0,"Step 3273, loss: 1.0420693159103394, step time: 17.771005630493164ms\r\n",,terminal_output +9247,12464368,"TERMINAL",0,0,"Step 3274, loss: 1.7313463687896729, step time: 18.9058780670166ms\r\n",,terminal_output +9248,12464429,"TERMINAL",0,0,"Step 3275, loss: 0.7528182864189148, step time: 17.694950103759766ms\r\n",,terminal_output +9249,12464494,"TERMINAL",0,0,"Step 3276, loss: 0.7364839911460876, step time: 18.132686614990234ms\r\n",,terminal_output +9250,12464555,"TERMINAL",0,0,"Step 3277, loss: 0.7568260431289673, step time: 18.139123916625977ms\r\n",,terminal_output +9251,12464623,"TERMINAL",0,0,"Step 3278, loss: 0.7711244821548462, step time: 17.89379119873047ms\r\n",,terminal_output +9252,12464684,"TERMINAL",0,0,"Step 3279, loss: 0.726152241230011, step time: 17.84539222717285ms\r\n",,terminal_output +9253,12464755,"TERMINAL",0,0,"Step 3280, loss: 0.7564980387687683, step time: 18.7380313873291ms\r\n",,terminal_output +9254,12464819,"TERMINAL",0,0,"Step 3281, loss: 0.9331837892532349, step time: 17.77362823486328ms\r\n",,terminal_output +9255,12464882,"TERMINAL",0,0,"Step 3282, loss: 0.9025630950927734, step time: 18.21303367614746ms\r\n",,terminal_output +9256,12464941,"TERMINAL",0,0,"Step 3283, loss: 1.6326128244400024, step time: 18.59259605407715ms\r\n",,terminal_output +9257,12465005,"TERMINAL",0,0,"Step 3284, loss: 0.8296145796775818, step time: 19.45638656616211ms\r\n",,terminal_output +9258,12465069,"TERMINAL",0,0,"Step 3285, loss: 0.7266654968261719, step time: 18.15319061279297ms\r\n",,terminal_output +9259,12465124,"TERMINAL",0,0,"Step 3286, loss: 0.7303250432014465, step time: 18.736600875854492ms\r\n",,terminal_output +9260,12465189,"TERMINAL",0,0,"Step 3287, loss: 0.7278887629508972, step time: 17.745494842529297ms\r\n",,terminal_output +9261,12465249,"TERMINAL",0,0,"Step 3288, loss: 1.0271918773651123, step time: 18.094301223754883ms\r\n",,terminal_output +9262,12465343,"TERMINAL",0,0,"Step 3289, loss: 0.883409321308136, step time: 18.201351165771484ms\r\n",,terminal_output +9263,12465399,"TERMINAL",0,0,"Step 3290, loss: 0.7184712886810303, step time: 17.92001724243164ms\r\n",,terminal_output +9264,12465504,"TERMINAL",0,0,"Step 3291, loss: 0.7332683205604553, step time: 17.577409744262695ms\r\nStep 3292, loss: 0.7880446314811707, step time: 19.500017166137695ms\r\n",,terminal_output +9265,12465566,"TERMINAL",0,0,"Step 3293, loss: 0.7677649259567261, step time: 17.965078353881836ms\r\n",,terminal_output +9266,12465629,"TERMINAL",0,0,"Step 3294, loss: 0.7629014849662781, step time: 17.748355865478516ms\r\n",,terminal_output +9267,12465690,"TERMINAL",0,0,"Step 3295, loss: 0.7430033683776855, step time: 18.5849666595459ms\r\n",,terminal_output +9268,12465754,"TERMINAL",0,0,"Step 3296, loss: 2.132173776626587, step time: 18.202781677246094ms\r\n",,terminal_output +9269,12465820,"TERMINAL",0,0,"Step 3297, loss: 0.77342289686203, step time: 17.81749725341797ms\r\n",,terminal_output +9270,12465924,"TERMINAL",0,0,"Step 3298, loss: 0.8431922197341919, step time: 19.117116928100586ms\r\n",,terminal_output +9271,12465977,"TERMINAL",0,0,"Step 3299, loss: 0.7028087973594666, step time: 18.294095993041992ms\r\n",,terminal_output +9272,12466083,"TERMINAL",0,0,"Step 3300, loss: 0.7668644189834595, step time: 18.433332443237305ms\r\nStep 3301, loss: 0.7049422860145569, step time: 18.68438720703125ms\r\n",,terminal_output +9273,12466176,"TERMINAL",0,0,"Step 3302, loss: 0.7168764472007751, step time: 17.94600486755371ms\r\n",,terminal_output +9274,12466226,"TERMINAL",0,0,"Step 3303, loss: 0.7214182615280151, step time: 17.966747283935547ms\r\n",,terminal_output +9275,12466332,"TERMINAL",0,0,"Step 3304, loss: 0.7313080430030823, step time: 18.658876419067383ms\r\nStep 3305, loss: 0.7055577039718628, step time: 18.059968948364258ms\r\n",,terminal_output +9276,12466440,"TERMINAL",0,0,"Step 3306, loss: 1.3044047355651855, step time: 17.7459716796875ms\r\n",,terminal_output +9277,12466491,"TERMINAL",0,0,"Step 3307, loss: 0.7139526009559631, step time: 18.384456634521484ms\r\n",,terminal_output +9278,12466599,"TERMINAL",0,0,"Step 3308, loss: 0.770224392414093, step time: 17.77505874633789ms\r\nStep 3309, loss: 0.7319220304489136, step time: 17.808198928833008ms\r\n",,terminal_output +9279,12466662,"TERMINAL",0,0,"Step 3310, loss: 0.8936522006988525, step time: 18.324851989746094ms\r\n",,terminal_output +9280,12466726,"TERMINAL",0,0,"Step 3311, loss: 0.7023299932479858, step time: 17.835378646850586ms\r\n",,terminal_output +9281,12466788,"TERMINAL",0,0,"Step 3312, loss: 0.7410528659820557, step time: 17.85898208618164ms\r\n",,terminal_output +9282,12466851,"TERMINAL",0,0,"Step 3313, loss: 0.7124667763710022, step time: 21.656513214111328ms\r\n",,terminal_output +9283,12466911,"TERMINAL",0,0,"Step 3314, loss: 0.7161973118782043, step time: 18.469810485839844ms\r\n",,terminal_output +9284,12466977,"TERMINAL",0,0,"Step 3315, loss: 0.7652620673179626, step time: 18.047094345092773ms\r\n",,terminal_output +9285,12467038,"TERMINAL",0,0,"Step 3316, loss: 0.7178583741188049, step time: 18.33176612854004ms\r\n",,terminal_output +9286,12467101,"TERMINAL",0,0,"Step 3317, loss: 0.6871594190597534, step time: 17.84801483154297ms\r\n",,terminal_output +9287,12467166,"TERMINAL",0,0,"Step 3318, loss: 0.6784730553627014, step time: 18.028736114501953ms\r\n",,terminal_output +9288,12467229,"TERMINAL",0,0,"Step 3319, loss: 0.6887625455856323, step time: 18.147945404052734ms\r\n",,terminal_output +9289,12467294,"TERMINAL",0,0,"Step 3320, loss: 0.7444949746131897, step time: 18.004179000854492ms\r\n",,terminal_output +9290,12467355,"TERMINAL",0,0,"Step 3321, loss: 0.7468280792236328, step time: 17.80080795288086ms\r\n",,terminal_output +9291,12467418,"TERMINAL",0,0,"Step 3322, loss: 0.7276806235313416, step time: 18.29361915588379ms\r\n",,terminal_output +9292,12467525,"TERMINAL",0,0,"Step 3323, loss: 0.8522881269454956, step time: 17.626047134399414ms\r\n",,terminal_output +9293,12467576,"TERMINAL",0,0,"Step 3324, loss: 0.7204300761222839, step time: 17.81487464904785ms\r\n",,terminal_output +9294,12467680,"TERMINAL",0,0,"Step 3325, loss: 0.6757293939590454, step time: 18.366575241088867ms\r\nStep 3326, loss: 0.6888552904129028, step time: 17.826318740844727ms\r\n",,terminal_output +9295,12467806,"TERMINAL",0,0,"Step 3327, loss: 0.6675882935523987, step time: 18.03755760192871ms\r\nStep 3328, loss: 1.0132266283035278, step time: 18.316984176635742ms\r\n",,terminal_output +9296,12467877,"TERMINAL",0,0,"Step 3329, loss: 0.7038977742195129, step time: 17.845869064331055ms\r\n",,terminal_output +9297,12467934,"TERMINAL",0,0,"Step 3330, loss: 0.7340322732925415, step time: 18.064260482788086ms\r\n",,terminal_output +9298,12467997,"TERMINAL",0,0,"Step 3331, loss: 0.6773838400840759, step time: 18.47052574157715ms\r\n",,terminal_output +9299,12468062,"TERMINAL",0,0,"Step 3332, loss: 0.6805254220962524, step time: 18.299102783203125ms\r\n",,terminal_output +9300,12468128,"TERMINAL",0,0,"Step 3333, loss: 0.718692421913147, step time: 17.9750919342041ms\r\n",,terminal_output +9301,12468228,"TERMINAL",0,0,"Step 3334, loss: 0.8609626293182373, step time: 21.745920181274414ms\r\n",,terminal_output +9302,12468281,"TERMINAL",0,0,"Step 3335, loss: 0.6801620721817017, step time: 17.986297607421875ms\r\n",,terminal_output +9303,12468342,"TERMINAL",0,0,"Step 3336, loss: 1.1618335247039795, step time: 18.047332763671875ms\r\n",,terminal_output +9304,12468406,"TERMINAL",0,0,"Step 3337, loss: 0.6941530704498291, step time: 18.30887794494629ms\r\n",,terminal_output +9305,12468465,"TERMINAL",0,0,"Step 3338, loss: 0.693045973777771, step time: 17.96865463256836ms\r\n",,terminal_output +9306,12468527,"TERMINAL",0,0,"Step 3339, loss: 0.6899024248123169, step time: 17.946720123291016ms\r\n",,terminal_output +9307,12468594,"TERMINAL",0,0,"Step 3340, loss: 0.6782724857330322, step time: 18.83244514465332ms\r\n",,terminal_output +9308,12468664,"TERMINAL",0,0,"Step 3341, loss: 0.6792365908622742, step time: 17.885684967041016ms\r\n",,terminal_output +9309,12468732,"TERMINAL",0,0,"Step 3342, loss: 0.6882842183113098, step time: 18.045902252197266ms\r\n",,terminal_output +9310,12468769,"TERMINAL",0,0,"Step 3343, loss: 1.2197561264038086, step time: 18.395423889160156ms\r\n",,terminal_output +9311,12468835,"TERMINAL",0,0,"Step 3344, loss: 0.6806923747062683, step time: 18.15319061279297ms\r\n",,terminal_output +9312,12468958,"TERMINAL",0,0,"Step 3345, loss: 0.7751818299293518, step time: 17.784833908081055ms\r\nStep 3346, loss: 0.6877865195274353, step time: 18.896102905273438ms\r\n",,terminal_output +9313,12469021,"TERMINAL",0,0,"Step 3347, loss: 0.7268890142440796, step time: 17.78268814086914ms\r\n",,terminal_output +9314,12469149,"TERMINAL",0,0,"Step 3348, loss: 1.692473292350769, step time: 19.484996795654297ms\r\nStep 3349, loss: 0.9512479901313782, step time: 18.822193145751953ms\r\n",,terminal_output +9315,12469255,"TERMINAL",0,0,"Step 3350, loss: 0.897285521030426, step time: 18.065929412841797ms\r\n",,terminal_output +9316,12469307,"TERMINAL",0,0,"Step 3351, loss: 0.6999131441116333, step time: 17.864704132080078ms\r\n",,terminal_output +9317,12469411,"TERMINAL",0,0,"Step 3352, loss: 0.7104360461235046, step time: 18.96834373474121ms\r\nStep 3353, loss: 0.7359337210655212, step time: 17.95673370361328ms\r\n",,terminal_output +9318,12469507,"TERMINAL",0,0,"Step 3354, loss: 0.7359165549278259, step time: 18.221616744995117ms\r\n",,terminal_output +9319,12469560,"TERMINAL",0,0,"Step 3355, loss: 0.7405615448951721, step time: 18.296480178833008ms\r\n",,terminal_output +9320,12469666,"TERMINAL",0,0,"Step 3356, loss: 0.8309345245361328, step time: 18.23115348815918ms\r\nStep 3357, loss: 0.688188374042511, step time: 18.07570457458496ms\r\n",,terminal_output +9321,12469729,"TERMINAL",0,0,"Step 3358, loss: 0.7245261669158936, step time: 18.597841262817383ms\r\n",,terminal_output +9322,12469790,"TERMINAL",0,0,"Step 3359, loss: 0.7387321591377258, step time: 18.285512924194336ms\r\n",,terminal_output +9323,12469854,"TERMINAL",0,0,"Step 3360, loss: 0.6981122493743896, step time: 18.21613311767578ms\r\n",,terminal_output +9324,12469917,"TERMINAL",0,0,"Step 3361, loss: 0.6809089779853821, step time: 18.425941467285156ms\r\n",,terminal_output +9325,12470283,"TERMINAL",0,0,"Step 3362, loss: 0.682659924030304, step time: 326.30348205566406ms\r\n",,terminal_output +9326,12470335,"TERMINAL",0,0,"Step 3363, loss: 0.8005244731903076, step time: 25.889158248901367ms\r\n",,terminal_output +9327,12470424,"TERMINAL",0,0,"Step 3364, loss: 0.6751247644424438, step time: 20.700931549072266ms\r\n",,terminal_output +9328,12470475,"TERMINAL",0,0,"Step 3365, loss: 0.726291835308075, step time: 19.077539443969727ms\r\n",,terminal_output +9329,12470585,"TERMINAL",0,0,"Step 3366, loss: 0.6610589623451233, step time: 18.750667572021484ms\r\nStep 3367, loss: 0.7053085565567017, step time: 18.202543258666992ms\r\n",,terminal_output +9330,12470678,"TERMINAL",0,0,"Step 3368, loss: 0.6734586954116821, step time: 18.541336059570312ms\r\n",,terminal_output +9331,12470769,"TERMINAL",0,0,"Step 3369, loss: 0.8759310245513916, step time: 18.1734561920166ms\r\nStep 3370, loss: 0.654590904712677, step time: 18.543243408203125ms\r\n",,terminal_output +9332,12470865,"TERMINAL",0,0,"Step 3371, loss: 0.6821420788764954, step time: 18.460512161254883ms\r\n",,terminal_output +9333,12470928,"TERMINAL",0,0,"Step 3372, loss: 0.8066024780273438, step time: 18.407583236694336ms\r\n",,terminal_output +9334,12470992,"TERMINAL",0,0,"Step 3373, loss: 0.7306004762649536, step time: 18.137693405151367ms\r\n",,terminal_output +9335,12471053,"TERMINAL",0,0,"Step 3374, loss: 0.7020735740661621, step time: 18.726348876953125ms\r\n",,terminal_output +9336,12471112,"TERMINAL",0,0,"Step 3375, loss: 0.6745165586471558, step time: 18.06354522705078ms\r\n",,terminal_output +9337,12471172,"TERMINAL",0,0,"Step 3376, loss: 0.7655800580978394, step time: 18.308639526367188ms\r\n",,terminal_output +9338,12471238,"TERMINAL",0,0,"Step 3377, loss: 0.6453057527542114, step time: 18.39423179626465ms\r\n",,terminal_output +9339,12471366,"TERMINAL",0,0,"Step 3378, loss: 0.7795218229293823, step time: 18.095731735229492ms\r\n",,terminal_output +9340,12471415,"TERMINAL",0,0,"Step 3379, loss: 0.6735126376152039, step time: 18.62931251525879ms\r\nStep 3380, loss: 0.6819379329681396, step time: 18.718481063842773ms\r\n",,terminal_output +9341,12471541,"TERMINAL",0,0,"Step 3381, loss: 0.660805344581604, step time: 18.09978485107422ms\r\nStep 3382, loss: 0.7031335234642029, step time: 18.41568946838379ms\r\n",,terminal_output +9342,12471596,"TERMINAL",0,0,"Step 3383, loss: 0.685788631439209, step time: 18.16248893737793ms\r\n",,terminal_output +9343,12471707,"TERMINAL",0,0,"Step 3384, loss: 0.6650276184082031, step time: 23.177385330200195ms\r\n",,terminal_output +9344,12471800,"TERMINAL",0,0,"Step 3385, loss: 0.6685788035392761, step time: 24.816513061523438ms\r\nStep 3386, loss: 0.6505571603775024, step time: 25.32219886779785ms\r\n",,terminal_output +9345,12471893,"TERMINAL",0,0,"Step 3387, loss: 0.6752606630325317, step time: 26.061534881591797ms\r\n",,terminal_output +9346,12471990,"TERMINAL",0,0,"Step 3388, loss: 0.6956521272659302, step time: 26.1533260345459ms\r\n",,terminal_output +9347,12472070,"TERMINAL",0,0,"Step 3389, loss: 0.6511753797531128, step time: 26.00264549255371ms\r\nStep 3390, loss: 0.6505884528160095, step time: 26.08776092529297ms\r\n",,terminal_output +9348,12472137,"TERMINAL",0,0,"Step 3391, loss: 0.7148522138595581, step time: 26.28350257873535ms\r\n",,terminal_output +9349,12472203,"TERMINAL",0,0,"Step 3392, loss: 0.6454046368598938, step time: 25.52008628845215ms\r\n",,terminal_output +9350,12472269,"TERMINAL",0,0,"Step 3393, loss: 0.8006756901741028, step time: 20.18427848815918ms\r\n",,terminal_output +9351,12472333,"TERMINAL",0,0,"Step 3394, loss: 0.6509225964546204, step time: 18.69511604309082ms\r\n",,terminal_output +9352,12472431,"TERMINAL",0,0,"Step 3395, loss: 0.6609110832214355, step time: 18.6007022857666ms\r\n",,terminal_output +9353,12472484,"TERMINAL",0,0,"Step 3396, loss: 0.7661804556846619, step time: 18.4786319732666ms\r\n",,terminal_output +9354,12472590,"TERMINAL",0,0,"Step 3397, loss: 0.6746051907539368, step time: 18.190860748291016ms\r\nStep 3398, loss: 0.658288300037384, step time: 18.45264434814453ms\r\n",,terminal_output +9355,12472684,"TERMINAL",0,0,"Step 3399, loss: 0.6709529757499695, step time: 18.00704002380371ms\r\n",,terminal_output +9356,12472781,"TERMINAL",0,0,"Step 3400, loss: 0.652317225933075, step time: 18.297433853149414ms\r\nStep 3401, loss: 0.6787147521972656, step time: 18.439054489135742ms\r\n",,terminal_output +9357,12472874,"TERMINAL",0,0,"Step 3402, loss: 0.6334774494171143, step time: 19.048213958740234ms\r\n",,terminal_output +9358,12472929,"TERMINAL",0,0,"Step 3403, loss: 0.6780474781990051, step time: 17.917871475219727ms\r\n",,terminal_output +9359,12473025,"TERMINAL",0,0,"Step 3404, loss: 0.6614920496940613, step time: 18.58973503112793ms\r\n",,terminal_output +9360,12473100,"TERMINAL",0,0,"Step 3405, loss: 0.6378105878829956, step time: 17.96245574951172ms\r\nStep 3406, loss: 0.6948789358139038, step time: 18.226146697998047ms\r\n",,terminal_output +9361,12473163,"TERMINAL",0,0,"Step 3407, loss: 0.6512437462806702, step time: 18.390893936157227ms\r\n",,terminal_output +9362,12473227,"TERMINAL",0,0,"Step 3408, loss: 0.6782195568084717, step time: 18.19467544555664ms\r\n",,terminal_output +9363,12473292,"TERMINAL",0,0,"Step 3409, loss: 0.6437628865242004, step time: 17.97795295715332ms\r\n",,terminal_output +9364,12473356,"TERMINAL",0,0,"Step 3410, loss: 0.6443634033203125, step time: 18.239736557006836ms\r\n",,terminal_output +9365,12473420,"TERMINAL",0,0,"Step 3411, loss: 0.6538420915603638, step time: 17.724275588989258ms\r\n",,terminal_output +9366,12473483,"TERMINAL",0,0,"Step 3412, loss: 0.6838521361351013, step time: 17.940044403076172ms\r\n",,terminal_output +9367,12473546,"TERMINAL",0,0,"Step 3413, loss: 0.6290802955627441, step time: 18.074512481689453ms\r\n",,terminal_output +9368,12473664,"TERMINAL",0,0,"Step 3414, loss: 0.6428261995315552, step time: 18.102169036865234ms\r\n",,terminal_output +9369,12473675,"TERMINAL",0,0,"Step 3415, loss: 0.6491519212722778, step time: 17.9440975189209ms\r\n",,terminal_output +9370,12473803,"TERMINAL",0,0,"Step 3416, loss: 0.637328028678894, step time: 18.178701400756836ms\r\nStep 3417, loss: 0.6365069150924683, step time: 17.935752868652344ms\r\n",,terminal_output +9371,12473868,"TERMINAL",0,0,"Step 3418, loss: 0.6354169249534607, step time: 18.09406280517578ms\r\n",,terminal_output +9372,12473928,"TERMINAL",0,0,"Step 3419, loss: 0.6363373398780823, step time: 18.364906311035156ms\r\n",,terminal_output +9373,12473995,"TERMINAL",0,0,"Step 3420, loss: 0.6365015506744385, step time: 18.425941467285156ms\r\n",,terminal_output +9374,12474060,"TERMINAL",0,0,"Step 3421, loss: 0.6757134199142456, step time: 18.192768096923828ms\r\n",,terminal_output +9375,12474123,"TERMINAL",0,0,"Step 3422, loss: 0.932314932346344, step time: 19.895076751708984ms\r\n",,terminal_output +9376,12474187,"TERMINAL",0,0,"Step 3423, loss: 0.6829216480255127, step time: 18.524169921875ms\r\n",,terminal_output +9377,12474250,"TERMINAL",0,0,"Step 3424, loss: 0.8293502926826477, step time: 18.290281295776367ms\r\n",,terminal_output +9378,12474316,"TERMINAL",0,0,"Step 3425, loss: 0.6379381418228149, step time: 18.555879592895508ms\r\n",,terminal_output +9379,12474382,"TERMINAL",0,0,"Step 3426, loss: 0.6366522312164307, step time: 18.439292907714844ms\r\n",,terminal_output +9380,12474446,"TERMINAL",0,0,"Step 3427, loss: 0.638522744178772, step time: 18.129587173461914ms\r\n",,terminal_output +9381,12474509,"TERMINAL",0,0,"Step 3428, loss: 0.6351145505905151, step time: 18.5396671295166ms\r\n",,terminal_output +9382,12474574,"TERMINAL",0,0,"Step 3429, loss: 0.6679768562316895, step time: 18.22185516357422ms\r\n",,terminal_output +9383,12474638,"TERMINAL",0,0,"Step 3430, loss: 0.6231021881103516, step time: 19.49787139892578ms\r\n",,terminal_output +9384,12474702,"TERMINAL",0,0,"Step 3431, loss: 0.641098141670227, step time: 18.51820945739746ms\r\n",,terminal_output +9385,12474767,"TERMINAL",0,0,"Step 3432, loss: 0.6383892297744751, step time: 18.430709838867188ms\r\n",,terminal_output +9386,12474829,"TERMINAL",0,0,"Step 3433, loss: 0.6680991053581238, step time: 18.354415893554688ms\r\n",,terminal_output +9387,12474957,"TERMINAL",0,0,"Step 3434, loss: 0.6921835541725159, step time: 18.72110366821289ms\r\nStep 3435, loss: 0.6943373084068298, step time: 18.175125122070312ms\r\n",,terminal_output +9388,12475021,"TERMINAL",0,0,"Step 3436, loss: 0.6469936370849609, step time: 18.352270126342773ms\r\n",,terminal_output +9389,12475085,"TERMINAL",0,0,"Step 3437, loss: 0.6214350461959839, step time: 18.977642059326172ms\r\n",,terminal_output +9390,12475148,"TERMINAL",0,0,"Step 3438, loss: 0.6349973082542419, step time: 18.049001693725586ms\r\n",,terminal_output +9391,12475213,"TERMINAL",0,0,"Step 3439, loss: 0.689900815486908, step time: 18.537044525146484ms\r\n",,terminal_output +9392,12475279,"TERMINAL",0,0,"Step 3440, loss: 0.8163467049598694, step time: 18.613338470458984ms\r\n",,terminal_output +9393,12475343,"TERMINAL",0,0,"Step 3441, loss: 0.6241861581802368, step time: 17.958641052246094ms\r\n",,terminal_output +9394,12475406,"TERMINAL",0,0,"Step 3442, loss: 0.6401978135108948, step time: 18.35179328918457ms\r\n",,terminal_output +9395,12475473,"TERMINAL",0,0,"Step 3443, loss: 0.6567491888999939, step time: 18.838882446289062ms\r\n",,terminal_output +9396,12475536,"TERMINAL",0,0,"Step 3444, loss: 0.6514127254486084, step time: 18.9056396484375ms\r\n",,terminal_output +9397,12475600,"TERMINAL",0,0,"Step 3445, loss: 0.7878701686859131, step time: 17.98081398010254ms\r\n",,terminal_output +9398,12475665,"TERMINAL",0,0,"Step 3446, loss: 0.6220337152481079, step time: 18.140316009521484ms\r\n",,terminal_output +9399,12475737,"TERMINAL",0,0,"Step 3447, loss: 0.6412348747253418, step time: 17.538785934448242ms\r\n",,terminal_output +9400,12475791,"TERMINAL",0,0,"Step 3448, loss: 0.6341937184333801, step time: 17.893552780151367ms\r\n",,terminal_output +9401,12475859,"TERMINAL",0,0,"Step 3449, loss: 0.6314793825149536, step time: 18.187761306762695ms\r\n",,terminal_output +9402,12475919,"TERMINAL",0,0,"Step 3450, loss: 0.6715268492698669, step time: 17.86637306213379ms\r\n",,terminal_output +9403,12475983,"TERMINAL",0,0,"Step 3451, loss: 0.6551046371459961, step time: 18.14746856689453ms\r\n",,terminal_output +9404,12476048,"TERMINAL",0,0,"Step 3452, loss: 0.641600489616394, step time: 18.379688262939453ms\r\n",,terminal_output +9405,12476109,"TERMINAL",0,0,"Step 3453, loss: 0.6373332738876343, step time: 18.04351806640625ms\r\n",,terminal_output +9406,12476173,"TERMINAL",0,0,"Step 3454, loss: 0.613273561000824, step time: 18.418312072753906ms\r\n",,terminal_output +9407,12476237,"TERMINAL",0,0,"Step 3455, loss: 0.6604581475257874, step time: 18.314838409423828ms\r\n",,terminal_output +9408,12476299,"TERMINAL",0,0,"Step 3456, loss: 0.6410080790519714, step time: 17.97175407409668ms\r\n",,terminal_output +9409,12476363,"TERMINAL",0,0,"Step 3457, loss: 0.7153897285461426, step time: 17.557859420776367ms\r\n",,terminal_output +9410,12476437,"TERMINAL",0,0,"Step 3458, loss: 0.7710679769515991, step time: 17.96269416809082ms\r\n",,terminal_output +9411,12476493,"TERMINAL",0,0,"Step 3459, loss: 0.6172492504119873, step time: 17.46845245361328ms\r\n",,terminal_output +9412,12476555,"TERMINAL",0,0,"Step 3460, loss: 0.8299505710601807, step time: 17.413854598999023ms\r\n",,terminal_output +9413,12476617,"TERMINAL",0,0,"Step 3461, loss: 0.6278221607208252, step time: 17.566680908203125ms\r\n",,terminal_output +9414,12476681,"TERMINAL",0,0,"Step 3462, loss: 1.035122275352478, step time: 17.356395721435547ms\r\n",,terminal_output +9415,12476745,"TERMINAL",0,0,"Step 3463, loss: 0.6492611169815063, step time: 17.24720001220703ms\r\n",,terminal_output +9416,12476846,"TERMINAL",0,0,"Step 3464, loss: 1.0799002647399902, step time: 25.691986083984375ms\r\n",,terminal_output +9417,12476907,"TERMINAL",0,0,"Step 3465, loss: 0.6265944838523865, step time: 18.30434799194336ms\r\n",,terminal_output +9418,12476969,"TERMINAL",0,0,"Step 3466, loss: 0.6526379585266113, step time: 17.56882667541504ms\r\n",,terminal_output +9419,12477029,"TERMINAL",0,0,"Step 3467, loss: 0.6465537548065186, step time: 18.065214157104492ms\r\n",,terminal_output +9420,12477136,"TERMINAL",0,0,"Step 3468, loss: 0.6444334983825684, step time: 17.313480377197266ms\r\nStep 3469, loss: 0.792404294013977, step time: 17.484664916992188ms\r\n",,terminal_output +9421,12477199,"TERMINAL",0,0,"Step 3470, loss: 0.6229084730148315, step time: 17.63463020324707ms\r\n",,terminal_output +9422,12477260,"TERMINAL",0,0,"Step 3471, loss: 0.6770672798156738, step time: 17.221450805664062ms\r\n",,terminal_output +9423,12477323,"TERMINAL",0,0,"Step 3472, loss: 0.6455245018005371, step time: 17.235755920410156ms\r\n",,terminal_output +9424,12477385,"TERMINAL",0,0,"Step 3473, loss: 0.6259572505950928, step time: 17.732620239257812ms\r\n",,terminal_output +9425,12477449,"TERMINAL",0,0,"Step 3474, loss: 0.626731276512146, step time: 17.25459098815918ms\r\n",,terminal_output +9426,12477511,"TERMINAL",0,0,"Step 3475, loss: 0.6368069052696228, step time: 17.27747917175293ms\r\n",,terminal_output +9427,12477643,"TERMINAL",0,0,"Step 3476, loss: 0.6330866813659668, step time: 17.380714416503906ms\r\nStep 3477, loss: 0.63707435131073, step time: 18.137693405151367ms\r\n",,terminal_output +9428,12477709,"TERMINAL",0,0,"Step 3478, loss: 0.6304335594177246, step time: 17.30799674987793ms\r\n",,terminal_output +9429,12477772,"TERMINAL",0,0,"Step 3479, loss: 0.6151230335235596, step time: 17.579317092895508ms\r\n",,terminal_output +9430,12477874,"TERMINAL",0,0,"Step 3480, loss: 0.6663818359375, step time: 17.079591751098633ms\r\n",,terminal_output +9431,12477936,"TERMINAL",0,0,"Step 3481, loss: 0.6197932958602905, step time: 17.09604263305664ms\r\n",,terminal_output +9432,12477986,"TERMINAL",0,0,"Step 3482, loss: 1.3809689283370972, step time: 17.902851104736328ms\r\n",,terminal_output +9433,12478093,"TERMINAL",0,0,"Step 3483, loss: 0.8860543370246887, step time: 17.315387725830078ms\r\nStep 3484, loss: 0.6361433863639832, step time: 17.158031463623047ms\r\n",,terminal_output +9434,12478228,"TERMINAL",0,0,"Step 3485, loss: 0.641486644744873, step time: 17.562389373779297ms\r\nStep 3486, loss: 0.6294899582862854, step time: 29.31690216064453ms\r\n",,terminal_output +9435,12478289,"TERMINAL",0,0,"Step 3487, loss: 0.6455631256103516, step time: 18.94521713256836ms\r\n",,terminal_output +9436,12478406,"TERMINAL",0,0,"Step 3488, loss: 0.6208667159080505, step time: 18.05257797241211ms\r\nStep 3489, loss: 1.3835389614105225, step time: 17.49873161315918ms\r\n",,terminal_output +9437,12478476,"TERMINAL",0,0,"Step 3490, loss: 0.736669659614563, step time: 17.38762855529785ms\r\n",,terminal_output +9438,12478538,"TERMINAL",0,0,"Step 3491, loss: 0.7193517684936523, step time: 17.734527587890625ms\r\n",,terminal_output +9439,12478651,"TERMINAL",0,0,"Step 3492, loss: 1.0319304466247559, step time: 17.360687255859375ms\r\n",,terminal_output +9440,12478663,"TERMINAL",0,0,"Step 3493, loss: 0.648921549320221, step time: 17.47727394104004ms\r\n",,terminal_output +9441,12478761,"TERMINAL",0,0,"Step 3494, loss: 0.651159405708313, step time: 17.848968505859375ms\r\n",,terminal_output +9442,12478814,"TERMINAL",0,0,"Step 3495, loss: 0.620039701461792, step time: 17.429113388061523ms\r\n",,terminal_output +9443,12478920,"TERMINAL",0,0,"Step 3496, loss: 0.9383485913276672, step time: 17.25316047668457ms\r\nStep 3497, loss: 1.8422802686691284, step time: 17.78125762939453ms\r\n",,terminal_output +9444,12479015,"TERMINAL",0,0,"Step 3498, loss: 0.6523244380950928, step time: 17.28653907775879ms\r\n",,terminal_output +9445,12479076,"TERMINAL",0,0,"Step 3499, loss: 0.6843200922012329, step time: 17.856121063232422ms\r\n",,terminal_output +9446,12481809,"TERMINAL",0,0,"Step 3500, loss: 0.6523938775062561, step time: 28.954029083251953ms\r\nStep 3501, loss: 0.6835646629333496, step time: 25.665998458862305ms\r\n",,terminal_output +9447,12481918,"TERMINAL",0,0,"Step 3502, loss: 0.7111265063285828, step time: 20.188331604003906ms\r\n",,terminal_output +9448,12481971,"TERMINAL",0,0,"Step 3503, loss: 0.630577027797699, step time: 19.662857055664062ms\r\n",,terminal_output +9449,12482078,"TERMINAL",0,0,"Step 3504, loss: 0.651500403881073, step time: 19.310712814331055ms\r\nStep 3505, loss: 0.6533471345901489, step time: 18.551111221313477ms\r\n",,terminal_output +9450,12482174,"TERMINAL",0,0,"Step 3506, loss: 0.65799880027771, step time: 18.22519302368164ms\r\n",,terminal_output +9451,12482228,"TERMINAL",0,0,"Step 3507, loss: 0.6368314623832703, step time: 19.194602966308594ms\r\n",,terminal_output +9452,12482335,"TERMINAL",0,0,"Step 3508, loss: 0.6441951394081116, step time: 18.544673919677734ms\r\nStep 3509, loss: 0.6267582178115845, step time: 18.646240234375ms\r\n",,terminal_output +9453,12482401,"TERMINAL",0,0,"Step 3510, loss: 0.9303170442581177, step time: 18.784761428833008ms\r\n",,terminal_output +9454,12482484,"TERMINAL",0,0,"Step 3511, loss: 0.6483383774757385, step time: 18.862009048461914ms\r\n",,terminal_output +9455,12482543,"TERMINAL",0,0,"Step 3512, loss: 0.6324969530105591, step time: 18.437862396240234ms\r\n",,terminal_output +9456,12482605,"TERMINAL",0,0,"Step 3513, loss: 0.6511791944503784, step time: 18.650531768798828ms\r\n",,terminal_output +9457,12482669,"TERMINAL",0,0,"Step 3514, loss: 0.6216821074485779, step time: 18.192052841186523ms\r\n",,terminal_output +9458,12482773,"TERMINAL",0,0,"Step 3515, loss: 0.6362032890319824, step time: 18.199682235717773ms\r\n",,terminal_output +9459,12482785,"TERMINAL",0,0,"Step 3516, loss: 0.6544057726860046, step time: 17.640352249145508ms\r\n",,terminal_output +9460,12482883,"TERMINAL",0,0,"Step 3517, loss: 0.6326199173927307, step time: 17.474889755249023ms\r\n",,terminal_output +9461,12482946,"TERMINAL",0,0,"Step 3518, loss: 0.6035759449005127, step time: 18.50414276123047ms\r\n",,terminal_output +9462,12483010,"TERMINAL",0,0,"Step 3519, loss: 0.8492881059646606, step time: 18.65530014038086ms\r\n",,terminal_output +9463,12483073,"TERMINAL",0,0,"Step 3520, loss: 0.6156255602836609, step time: 17.478466033935547ms\r\n",,terminal_output +9464,12483139,"TERMINAL",0,0,"Step 3521, loss: 0.6137776374816895, step time: 17.262935638427734ms\r\n",,terminal_output +9465,12483200,"TERMINAL",0,0,"Step 3522, loss: 0.601787805557251, step time: 17.74287223815918ms\r\n",,terminal_output +9466,12483264,"TERMINAL",0,0,"Step 3523, loss: 1.0409537553787231, step time: 17.58098602294922ms\r\n",,terminal_output +9467,12483370,"TERMINAL",0,0,"Step 3524, loss: 0.6083652377128601, step time: 17.386436462402344ms\r\nStep 3525, loss: 0.6274896860122681, step time: 18.1121826171875ms\r\n",,terminal_output +9468,12483483,"TERMINAL",0,0,"Step 3526, loss: 0.6626960039138794, step time: 17.131567001342773ms\r\nStep 3527, loss: 0.6155005097389221, step time: 17.185688018798828ms\r\n",,terminal_output +9469,12483548,"TERMINAL",0,0,"Step 3528, loss: 0.6481289863586426, step time: 17.5778865814209ms\r\n",,terminal_output +9470,12483663,"TERMINAL",0,0,"Step 3529, loss: 0.6675240397453308, step time: 17.493009567260742ms\r\n",,terminal_output +9471,12483677,"TERMINAL",0,0,"Step 3530, loss: 0.6497655510902405, step time: 17.41313934326172ms\r\n",,terminal_output +9472,12483744,"TERMINAL",0,0,"Step 3531, loss: 0.6110954284667969, step time: 17.844200134277344ms\r\n",,terminal_output +9473,12483807,"TERMINAL",0,0,"Step 3532, loss: 0.6214988827705383, step time: 17.404794692993164ms\r\n",,terminal_output +9474,12483869,"TERMINAL",0,0,"Step 3533, loss: 0.6048597693443298, step time: 17.132997512817383ms\r\n",,terminal_output +9475,12483937,"TERMINAL",0,0,"Step 3534, loss: 0.63303142786026, step time: 17.539024353027344ms\r\n",,terminal_output +9476,12484030,"TERMINAL",0,0,"Step 3535, loss: 0.6085760593414307, step time: 17.247676849365234ms\r\n",,terminal_output +9477,12484137,"TERMINAL",0,0,"Step 3536, loss: 0.6127177476882935, step time: 17.390012741088867ms\r\nStep 3537, loss: 0.6986814737319946, step time: 19.31285858154297ms\r\n",,terminal_output +9478,12484201,"TERMINAL",0,0,"Step 3538, loss: 0.656207799911499, step time: 17.841815948486328ms\r\n",,terminal_output +9479,12484264,"TERMINAL",0,0,"Step 3539, loss: 0.6217910647392273, step time: 17.587900161743164ms\r\n",,terminal_output +9480,12484328,"TERMINAL",0,0,"Step 3540, loss: 0.6413928866386414, step time: 17.490863800048828ms\r\n",,terminal_output +9481,12484392,"TERMINAL",0,0,"Step 3541, loss: 0.5879388451576233, step time: 17.5015926361084ms\r\n",,terminal_output +9482,12484467,"TERMINAL",0,0,"Step 3542, loss: 0.6163845658302307, step time: 17.549753189086914ms\r\n",,terminal_output +9483,12484532,"TERMINAL",0,0,"Step 3543, loss: 0.5877510905265808, step time: 17.79627799987793ms\r\n",,terminal_output +9484,12484595,"TERMINAL",0,0,"Step 3544, loss: 0.6080242991447449, step time: 17.114639282226562ms\r\n",,terminal_output +9485,12484648,"TERMINAL",0,0,"Step 3545, loss: 0.6469610929489136, step time: 17.2727108001709ms\r\n",,terminal_output +9486,12484758,"TERMINAL",0,0,"Step 3546, loss: 0.6382428407669067, step time: 17.28963851928711ms\r\nStep 3547, loss: 0.585652232170105, step time: 17.398595809936523ms\r\n",,terminal_output +9487,12484824,"TERMINAL",0,0,"Step 3548, loss: 0.6028010845184326, step time: 17.3490047454834ms\r\n",,terminal_output +9488,12484889,"TERMINAL",0,0,"Step 3549, loss: 0.601521372795105, step time: 17.508506774902344ms\r\n",,terminal_output +9489,12484983,"TERMINAL",0,0,"Step 3550, loss: 0.5857531428337097, step time: 17.467498779296875ms\r\n",,terminal_output +9490,12485036,"TERMINAL",0,0,"Step 3551, loss: 0.6097555160522461, step time: 17.316579818725586ms\r\n",,terminal_output +9491,12485140,"TERMINAL",0,0,"Step 3552, loss: 0.6585080027580261, step time: 17.43006706237793ms\r\nStep 3553, loss: 0.5774202346801758, step time: 17.47751235961914ms\r\n",,terminal_output +9492,12485232,"TERMINAL",0,0,"Step 3554, loss: 0.5783625841140747, step time: 17.460346221923828ms\r\n",,terminal_output +9493,12485283,"TERMINAL",0,0,"Step 3555, loss: 0.7239235043525696, step time: 17.84205436706543ms\r\n",,terminal_output +9494,12485376,"TERMINAL",0,0,"Step 3556, loss: 0.5939116477966309, step time: 17.609357833862305ms\r\n",,terminal_output +9495,12485429,"TERMINAL",0,0,"Step 3557, loss: 0.5883265733718872, step time: 17.47298240661621ms\r\n",,terminal_output +9496,12485534,"TERMINAL",0,0,"Step 3558, loss: 0.6499732136726379, step time: 17.672300338745117ms\r\nStep 3559, loss: 0.8561148047447205, step time: 17.236709594726562ms\r\n",,terminal_output +9497,12485632,"TERMINAL",0,0,"Step 3560, loss: 0.5994701981544495, step time: 17.60697364807129ms\r\n",,terminal_output +9498,12485684,"TERMINAL",0,0,"Step 3561, loss: 0.59061199426651, step time: 17.645597457885742ms\r\n",,terminal_output +9499,12485779,"TERMINAL",0,0,"Step 3562, loss: 0.5824797749519348, step time: 17.530202865600586ms\r\nStep 3563, loss: 0.5715084075927734, step time: 17.200231552124023ms\r\n",,terminal_output +9500,12485840,"TERMINAL",0,0,"Step 3564, loss: 0.5905726552009583, step time: 17.653703689575195ms\r\n",,terminal_output +9501,12485908,"TERMINAL",0,0,"Step 3565, loss: 0.6116973161697388, step time: 17.412185668945312ms\r\n",,terminal_output +9502,12485972,"TERMINAL",0,0,"Step 3566, loss: 0.5839132070541382, step time: 17.912864685058594ms\r\n",,terminal_output +9503,12486035,"TERMINAL",0,0,"Step 3567, loss: 1.1984796524047852, step time: 17.737627029418945ms\r\n",,terminal_output +9504,12486096,"TERMINAL",0,0,"Step 3568, loss: 0.5665317177772522, step time: 17.733097076416016ms\r\n",,terminal_output +9505,12486164,"TERMINAL",0,0,"Step 3569, loss: 0.5674810409545898, step time: 17.78268814086914ms\r\n",,terminal_output +9506,12486234,"TERMINAL",0,0,"Step 3570, loss: 0.5789042711257935, step time: 17.971038818359375ms\r\n",,terminal_output +9507,12486297,"TERMINAL",0,0,"Step 3571, loss: 1.0574201345443726, step time: 17.548799514770508ms\r\n",,terminal_output +9508,12486360,"TERMINAL",0,0,"Step 3572, loss: 0.575903594493866, step time: 17.704248428344727ms\r\n",,terminal_output +9509,12486425,"TERMINAL",0,0,"Step 3573, loss: 1.0636001825332642, step time: 17.86494255065918ms\r\n",,terminal_output +9510,12486485,"TERMINAL",0,0,"Step 3574, loss: 0.7024598717689514, step time: 17.71068572998047ms\r\n",,terminal_output +9511,12486548,"TERMINAL",0,0,"Step 3575, loss: 0.6057440042495728, step time: 17.611980438232422ms\r\n",,terminal_output +9512,12486612,"TERMINAL",0,0,"Step 3576, loss: 1.1936612129211426, step time: 17.539262771606445ms\r\n",,terminal_output +9513,12486671,"TERMINAL",0,0,"Step 3577, loss: 0.6776776909828186, step time: 17.483234405517578ms\r\n",,terminal_output +9514,12486743,"TERMINAL",0,0,"Step 3578, loss: 0.6122671961784363, step time: 17.43149757385254ms\r\n",,terminal_output +9515,12486811,"TERMINAL",0,0,"Step 3579, loss: 0.6271798014640808, step time: 17.763137817382812ms\r\n",,terminal_output +9516,12486872,"TERMINAL",0,0,"Step 3580, loss: 0.593533456325531, step time: 19.7141170501709ms\r\n",,terminal_output +9517,12486936,"TERMINAL",0,0,"Step 3581, loss: 0.5970282554626465, step time: 18.196582794189453ms\r\n",,terminal_output +9518,12486999,"TERMINAL",0,0,"Step 3582, loss: 0.6119205355644226, step time: 17.968416213989258ms\r\n",,terminal_output +9519,12487095,"TERMINAL",0,0,"Step 3583, loss: 0.6072080135345459, step time: 17.76909828186035ms\r\n",,terminal_output +9520,12487148,"TERMINAL",0,0,"Step 3584, loss: 0.6152064204216003, step time: 17.590045928955078ms\r\n",,terminal_output +9521,12487258,"TERMINAL",0,0,"Step 3585, loss: 0.641463041305542, step time: 17.965078353881836ms\r\nStep 3586, loss: 0.5784457921981812, step time: 17.9598331451416ms\r\n",,terminal_output +9522,12487353,"TERMINAL",0,0,"Step 3587, loss: 0.632764458656311, step time: 17.772197723388672ms\r\n",,terminal_output +9523,12487405,"TERMINAL",0,0,"Step 3588, loss: 0.5833030343055725, step time: 17.642736434936523ms\r\n",,terminal_output +9524,12487508,"TERMINAL",0,0,"Step 3589, loss: 0.6225164532661438, step time: 17.847061157226562ms\r\nStep 3590, loss: 0.5862085223197937, step time: 17.6694393157959ms\r\n",,terminal_output +9525,12487569,"TERMINAL",0,0,"Step 3591, loss: 0.6137374639511108, step time: 18.175601959228516ms\r\n",,terminal_output +9526,12487630,"TERMINAL",0,0,"Step 3592, loss: 0.6161337494850159, step time: 17.462730407714844ms\r\n",,terminal_output +9527,12487691,"TERMINAL",0,0,"Step 3593, loss: 0.6127504110336304, step time: 17.73381233215332ms\r\n",,terminal_output +9528,12487755,"TERMINAL",0,0,"Step 3594, loss: 0.6136842966079712, step time: 17.792463302612305ms\r\n",,terminal_output +9529,12487822,"TERMINAL",0,0,"Step 3595, loss: 0.587609589099884, step time: 18.5086727142334ms\r\n",,terminal_output +9530,12487891,"TERMINAL",0,0,"Step 3596, loss: 0.7284563779830933, step time: 17.56763458251953ms\r\n",,terminal_output +9531,12487953,"TERMINAL",0,0,"Step 3597, loss: 0.5950003266334534, step time: 17.844676971435547ms\r\n",,terminal_output +9532,12488015,"TERMINAL",0,0,"Step 3598, loss: 0.5808073878288269, step time: 17.67730712890625ms\r\n",,terminal_output +9533,12488108,"TERMINAL",0,0,"Step 3599, loss: 0.5719313025474548, step time: 17.80080795288086ms\r\n",,terminal_output +9534,12488159,"TERMINAL",0,0,"Step 3600, loss: 1.3298567533493042, step time: 17.617225646972656ms\r\n",,terminal_output +9535,12488252,"TERMINAL",0,0,"Step 3601, loss: 0.5727757811546326, step time: 17.687320709228516ms\r\n",,terminal_output +9536,12488304,"TERMINAL",0,0,"Step 3602, loss: 0.5737887024879456, step time: 17.668962478637695ms\r\n",,terminal_output +9537,12488370,"TERMINAL",0,0,"Step 3603, loss: 0.5823802351951599, step time: 17.869949340820312ms\r\n",,terminal_output +9538,12488466,"TERMINAL",0,0,"Step 3604, loss: 0.7820863723754883, step time: 17.58122444152832ms\r\nStep 3605, loss: 0.5798802971839905, step time: 17.709970474243164ms\r\n",,terminal_output +9539,12488758,"TERMINAL",0,0,"Step 3606, loss: 0.8039788007736206, step time: 297.18947410583496ms\r\n",,terminal_output +9540,12488826,"TERMINAL",0,0,"Step 3607, loss: 0.6059080362319946, step time: 25.454044342041016ms\r\n",,terminal_output +9541,12488888,"TERMINAL",0,0,"Step 3608, loss: 0.5821679830551147, step time: 20.02263069152832ms\r\n",,terminal_output +9542,12488951,"TERMINAL",0,0,"Step 3609, loss: 0.6208439469337463, step time: 18.818140029907227ms\r\n",,terminal_output +9543,12489027,"TERMINAL",0,0,"Step 3610, loss: 0.5678666234016418, step time: 18.063783645629883ms\r\n",,terminal_output +9544,12489092,"TERMINAL",0,0,"Step 3611, loss: 0.5839845538139343, step time: 17.71855354309082ms\r\n",,terminal_output +9545,12489153,"TERMINAL",0,0,"Step 3612, loss: 0.5912941694259644, step time: 19.159317016601562ms\r\n",,terminal_output +9546,12489216,"TERMINAL",0,0,"Step 3613, loss: 0.6987298130989075, step time: 18.0206298828125ms\r\n",,terminal_output +9547,12489277,"TERMINAL",0,0,"Step 3614, loss: 0.6661478877067566, step time: 18.019914627075195ms\r\n",,terminal_output +9548,12489339,"TERMINAL",0,0,"Step 3615, loss: 0.5658619403839111, step time: 18.09406280517578ms\r\n",,terminal_output +9549,12489400,"TERMINAL",0,0,"Step 3616, loss: 0.5770571231842041, step time: 17.848491668701172ms\r\n",,terminal_output +9550,12489465,"TERMINAL",0,0,"Step 3617, loss: 0.7274704575538635, step time: 17.84038543701172ms\r\n",,terminal_output +9551,12489562,"TERMINAL",0,0,"Step 3618, loss: 0.5837101936340332, step time: 18.12005043029785ms\r\n",,terminal_output +9552,12489612,"TERMINAL",0,0,"Step 3619, loss: 0.5721889734268188, step time: 17.686843872070312ms\r\n",,terminal_output +9553,12489676,"TERMINAL",0,0,"Step 3620, loss: 0.57725590467453, step time: 17.74001121520996ms\r\n",,terminal_output +9554,12489738,"TERMINAL",0,0,"Step 3621, loss: 0.5956520438194275, step time: 17.89236068725586ms\r\n",,terminal_output +9555,12489800,"TERMINAL",0,0,"Step 3622, loss: 0.5678858757019043, step time: 17.95029640197754ms\r\n",,terminal_output +9556,12489864,"TERMINAL",0,0,"Step 3623, loss: 0.5839600563049316, step time: 17.511606216430664ms\r\n",,terminal_output +9557,12489926,"TERMINAL",0,0,"Step 3624, loss: 0.5540775060653687, step time: 17.938852310180664ms\r\n",,terminal_output +9558,12489994,"TERMINAL",0,0,"Step 3625, loss: 0.5560829639434814, step time: 17.54617691040039ms\r\n",,terminal_output +9559,12490053,"TERMINAL",0,0,"Step 3626, loss: 0.6299954056739807, step time: 17.726898193359375ms\r\n",,terminal_output +9560,12490146,"TERMINAL",0,0,"Step 3627, loss: 0.5965279340744019, step time: 18.185138702392578ms\r\n",,terminal_output +9561,12490197,"TERMINAL",0,0,"Step 3628, loss: 0.5568459033966064, step time: 17.95053482055664ms\r\n",,terminal_output +9562,12490301,"TERMINAL",0,0,"Step 3629, loss: 0.5969396829605103, step time: 17.755746841430664ms\r\nStep 3630, loss: 0.5538972616195679, step time: 17.964839935302734ms\r\n",,terminal_output +9563,12490426,"TERMINAL",0,0,"Step 3631, loss: 0.5646158456802368, step time: 17.65298843383789ms\r\nStep 3632, loss: 0.6085208058357239, step time: 17.844200134277344ms\r\n",,terminal_output +9564,12490520,"TERMINAL",0,0,"Step 3633, loss: 0.5594251751899719, step time: 18.16415786743164ms\r\n",,terminal_output +9565,12490573,"TERMINAL",0,0,"Step 3634, loss: 0.5825685262680054, step time: 17.922163009643555ms\r\n",,terminal_output +9566,12490666,"TERMINAL",0,0,"Step 3635, loss: 0.5474445223808289, step time: 17.374038696289062ms\r\n",,terminal_output +9567,12490718,"TERMINAL",0,0,"Step 3636, loss: 0.5644654631614685, step time: 17.999887466430664ms\r\n",,terminal_output +9568,12490804,"TERMINAL",0,0,"Step 3637, loss: 0.5578233003616333, step time: 17.90642738342285ms\r\nStep 3638, loss: 0.5999698042869568, step time: 17.464876174926758ms\r\n",,terminal_output +9569,12490899,"TERMINAL",0,0,"Step 3639, loss: 0.8263399004936218, step time: 18.0051326751709ms\r\n",,terminal_output +9570,12491004,"TERMINAL",0,0,"Step 3640, loss: 0.5559206008911133, step time: 17.70472526550293ms\r\nStep 3641, loss: 1.0631475448608398, step time: 17.851829528808594ms\r\n",,terminal_output +9571,12491074,"TERMINAL",0,0,"Step 3642, loss: 0.5776113271713257, step time: 19.37580108642578ms\r\n",,terminal_output +9572,12491128,"TERMINAL",0,0,"Step 3643, loss: 0.5581129193305969, step time: 19.29187774658203ms\r\n",,terminal_output +9573,12491226,"TERMINAL",0,0,"Step 3644, loss: 0.5607265830039978, step time: 18.12577247619629ms\r\n",,terminal_output +9574,12491276,"TERMINAL",0,0,"Step 3645, loss: 0.5553498864173889, step time: 18.046855926513672ms\r\n",,terminal_output +9575,12491380,"TERMINAL",0,0,"Step 3646, loss: 0.5450147986412048, step time: 17.81320571899414ms\r\nStep 3647, loss: 0.5527924299240112, step time: 17.540693283081055ms\r\n",,terminal_output +9576,12491480,"TERMINAL",0,0,"Step 3648, loss: 0.5462024807929993, step time: 18.029212951660156ms\r\n",,terminal_output +9577,12491530,"TERMINAL",0,0,"Step 3649, loss: 0.5895618200302124, step time: 17.771244049072266ms\r\n",,terminal_output +9578,12491622,"TERMINAL",0,0,"Step 3650, loss: 0.5729874968528748, step time: 17.531871795654297ms\r\n",,terminal_output +9579,12491675,"TERMINAL",0,0,"Step 3651, loss: 0.5460999011993408, step time: 18.302202224731445ms\r\n",,terminal_output +9580,12491728,"TERMINAL",0,0,"Step 3652, loss: 0.5927660465240479, step time: 17.679452896118164ms\r\n",,terminal_output +9581,12491781,"TERMINAL",0,0,"Step 3653, loss: 0.5425142049789429, step time: 17.780303955078125ms\r\n",,terminal_output +9582,12491878,"TERMINAL",0,0,"Step 3654, loss: 0.6468907594680786, step time: 17.876148223876953ms\r\n",,terminal_output +9583,12491957,"TERMINAL",0,0,"Step 3655, loss: 0.5828535556793213, step time: 17.689228057861328ms\r\nStep 3656, loss: 0.6726356148719788, step time: 19.227027893066406ms\r\n",,terminal_output +9584,12492021,"TERMINAL",0,0,"Step 3657, loss: 0.568627119064331, step time: 18.090486526489258ms\r\n",,terminal_output +9585,12492086,"TERMINAL",0,0,"Step 3658, loss: 0.6801186800003052, step time: 17.974376678466797ms\r\n",,terminal_output +9586,12492150,"TERMINAL",0,0,"Step 3659, loss: 0.883320152759552, step time: 17.626285552978516ms\r\n",,terminal_output +9587,12492213,"TERMINAL",0,0,"Step 3660, loss: 0.5378109812736511, step time: 17.625093460083008ms\r\n",,terminal_output +9588,12492278,"TERMINAL",0,0,"Step 3661, loss: 1.4063222408294678, step time: 17.91214942932129ms\r\n",,terminal_output +9589,12492342,"TERMINAL",0,0,"Step 3662, loss: 1.1181395053863525, step time: 17.596721649169922ms\r\n",,terminal_output +9590,12492405,"TERMINAL",0,0,"Step 3663, loss: 0.5548064708709717, step time: 18.232345581054688ms\r\n",,terminal_output +9591,12492470,"TERMINAL",0,0,"Step 3664, loss: 0.7649160027503967, step time: 17.635345458984375ms\r\n",,terminal_output +9592,12492575,"TERMINAL",0,0,"Step 3665, loss: 0.5502346754074097, step time: 17.56882667541504ms\r\n",,terminal_output +9593,12492628,"TERMINAL",0,0,"Step 3666, loss: 0.5897209644317627, step time: 17.755508422851562ms\r\n",,terminal_output +9594,12492737,"TERMINAL",0,0,"Step 3667, loss: 0.5724310278892517, step time: 17.638444900512695ms\r\nStep 3668, loss: 0.5865145325660706, step time: 17.719268798828125ms\r\n",,terminal_output +9595,12492788,"TERMINAL",0,0,"Step 3669, loss: 0.5746397972106934, step time: 17.89402961730957ms\r\n",,terminal_output +9596,12492886,"TERMINAL",0,0,"Step 3670, loss: 0.6112074851989746, step time: 17.537593841552734ms\r\n",,terminal_output +9597,12492942,"TERMINAL",0,0,"Step 3671, loss: 0.5888944864273071, step time: 17.6849365234375ms\r\n",,terminal_output +9598,12493047,"TERMINAL",0,0,"Step 3672, loss: 0.5899592041969299, step time: 18.129825592041016ms\r\nStep 3673, loss: 0.5836943984031677, step time: 17.515897750854492ms\r\n",,terminal_output +9599,12493111,"TERMINAL",0,0,"Step 3674, loss: 0.5583611130714417, step time: 17.718791961669922ms\r\n",,terminal_output +9600,12493172,"TERMINAL",0,0,"Step 3675, loss: 0.6084800958633423, step time: 17.83156394958496ms\r\n",,terminal_output +9601,12493235,"TERMINAL",0,0,"Step 3676, loss: 0.5495439171791077, step time: 17.361164093017578ms\r\n",,terminal_output +9602,12493305,"TERMINAL",0,0,"Step 3677, loss: 0.5627337098121643, step time: 17.36283302307129ms\r\n",,terminal_output +9603,12493363,"TERMINAL",0,0,"Step 3678, loss: 0.55470871925354, step time: 17.57192611694336ms\r\n",,terminal_output +9604,12493431,"TERMINAL",0,0,"Step 3679, loss: 0.6519596576690674, step time: 17.49134063720703ms\r\n",,terminal_output +9605,12493495,"TERMINAL",0,0,"Step 3680, loss: 0.5553207397460938, step time: 17.456769943237305ms\r\n",,terminal_output +9606,12493560,"TERMINAL",0,0,"Step 3681, loss: 0.5411044955253601, step time: 18.402099609375ms\r\n",,terminal_output +9607,12493627,"TERMINAL",0,0,"Step 3682, loss: 0.5372430682182312, step time: 17.46225357055664ms\r\n",,terminal_output +9608,12493679,"TERMINAL",0,0,"Step 3683, loss: 0.5398059487342834, step time: 17.21501350402832ms\r\n",,terminal_output +9609,12493745,"TERMINAL",0,0,"Step 3684, loss: 0.5357926487922668, step time: 17.694950103759766ms\r\n",,terminal_output +9610,12493869,"TERMINAL",0,0,"Step 3685, loss: 0.5426528453826904, step time: 17.551660537719727ms\r\nStep 3686, loss: 0.6222529411315918, step time: 17.642736434936523ms\r\n",,terminal_output +9611,12493940,"TERMINAL",0,0,"Step 3687, loss: 0.5693287253379822, step time: 17.870426177978516ms\r\n",,terminal_output +9612,12494004,"TERMINAL",0,0,"Step 3688, loss: 0.5392071604728699, step time: 17.716646194458008ms\r\n",,terminal_output +9613,12494100,"TERMINAL",0,0,"Step 3689, loss: 0.5421180725097656, step time: 17.420530319213867ms\r\n",,terminal_output +9614,12494153,"TERMINAL",0,0,"Step 3690, loss: 0.5704549551010132, step time: 17.81153678894043ms\r\n",,terminal_output +9615,12494260,"TERMINAL",0,0,"Step 3691, loss: 0.582339882850647, step time: 18.723726272583008ms\r\nStep 3692, loss: 0.5278633832931519, step time: 17.688512802124023ms\r\n",,terminal_output +9616,12494323,"TERMINAL",0,0,"Step 3693, loss: 0.5770503878593445, step time: 18.160104751586914ms\r\n",,terminal_output +9617,12494388,"TERMINAL",0,0,"Step 3694, loss: 0.5404582023620605, step time: 17.55213737487793ms\r\n",,terminal_output +9618,12494483,"TERMINAL",0,0,"Step 3695, loss: 0.6577937602996826, step time: 17.27294921875ms\r\n",,terminal_output +9619,12494575,"TERMINAL",0,0,"Step 3696, loss: 0.5286301374435425, step time: 17.62557029724121ms\r\nStep 3697, loss: 0.5283125638961792, step time: 17.113924026489258ms\r\n",,terminal_output +9620,12494635,"TERMINAL",0,0,"Step 3698, loss: 0.5332752466201782, step time: 17.09294319152832ms\r\n",,terminal_output +9621,12494694,"TERMINAL",0,0,"Step 3699, loss: 0.6084367036819458, step time: 17.8678035736084ms\r\n",,terminal_output +9622,12494757,"TERMINAL",0,0,"Step 3700, loss: 0.5396841764450073, step time: 17.143726348876953ms\r\n",,terminal_output +9623,12494823,"TERMINAL",0,0,"Step 3701, loss: 0.9755365252494812, step time: 17.433643341064453ms\r\n",,terminal_output +9624,12494924,"TERMINAL",0,0,"Step 3702, loss: 0.5964109897613525, step time: 17.627716064453125ms\r\n",,terminal_output +9625,12494976,"TERMINAL",0,0,"Step 3703, loss: 0.5446330308914185, step time: 17.45462417602539ms\r\n",,terminal_output +9626,12495031,"TERMINAL",0,0,"Step 3704, loss: 0.5861592888832092, step time: 17.35401153564453ms\r\n",,terminal_output +9627,12495141,"TERMINAL",0,0,"Step 3705, loss: 0.5333245992660522, step time: 17.91548728942871ms\r\nStep 3706, loss: 0.5368571281433105, step time: 17.578601837158203ms\r\n",,terminal_output +9628,12495205,"TERMINAL",0,0,"Step 3707, loss: 0.5291024446487427, step time: 17.322063446044922ms\r\n",,terminal_output +9629,12495268,"TERMINAL",0,0,"Step 3708, loss: 0.5289568901062012, step time: 17.616987228393555ms\r\n",,terminal_output +9630,12495369,"TERMINAL",0,0,"Step 3709, loss: 0.7575088143348694, step time: 17.90142059326172ms\r\n",,terminal_output +9631,12495421,"TERMINAL",0,0,"Step 3710, loss: 0.5436604022979736, step time: 17.872095108032227ms\r\n",,terminal_output +9632,12495525,"TERMINAL",0,0,"Step 3711, loss: 0.9160717725753784, step time: 17.925262451171875ms\r\nStep 3712, loss: 0.533249020576477, step time: 17.584800720214844ms\r\n",,terminal_output +9633,12495622,"TERMINAL",0,0,"Step 3713, loss: 0.558140754699707, step time: 17.5936222076416ms\r\n",,terminal_output +9634,12495678,"TERMINAL",0,0,"Step 3714, loss: 0.5812147259712219, step time: 17.716407775878906ms\r\n",,terminal_output +9635,12495783,"TERMINAL",0,0,"Step 3715, loss: 0.5624394416809082, step time: 17.15826988220215ms\r\nStep 3716, loss: 0.9961888194084167, step time: 17.601728439331055ms\r\n",,terminal_output +9636,12495848,"TERMINAL",0,0,"Step 3717, loss: 0.5328889489173889, step time: 17.926454544067383ms\r\n",,terminal_output +9637,12495906,"TERMINAL",0,0,"Step 3718, loss: 0.526031494140625, step time: 17.24982261657715ms\r\n",,terminal_output +9638,12495969,"TERMINAL",0,0,"Step 3719, loss: 0.634738564491272, step time: 17.58098602294922ms\r\n",,terminal_output +9639,12496038,"TERMINAL",0,0,"Step 3720, loss: 0.715407133102417, step time: 18.082857131958008ms\r\n",,terminal_output +9640,12496093,"TERMINAL",0,0,"Step 3721, loss: 0.5885801315307617, step time: 17.385005950927734ms\r\n",,terminal_output +9641,12496186,"TERMINAL",0,0,"Step 3722, loss: 0.7939266562461853, step time: 17.562150955200195ms\r\n",,terminal_output +9642,12496239,"TERMINAL",0,0,"Step 3723, loss: 0.5332267880439758, step time: 18.07570457458496ms\r\n",,terminal_output +9643,12496334,"TERMINAL",0,0,"Step 3724, loss: 0.5610411763191223, step time: 17.322301864624023ms\r\n",,terminal_output +9644,12496410,"TERMINAL",0,0,"Step 3725, loss: 0.5505979061126709, step time: 17.279863357543945ms\r\nStep 3726, loss: 0.5467742681503296, step time: 17.688751220703125ms\r\n",,terminal_output +9645,12496474,"TERMINAL",0,0,"Step 3727, loss: 0.5661014914512634, step time: 17.2121524810791ms\r\n",,terminal_output +9646,12496542,"TERMINAL",0,0,"Step 3728, loss: 0.5554651618003845, step time: 17.237186431884766ms\r\n",,terminal_output +9647,12496603,"TERMINAL",0,0,"Step 3729, loss: 0.5415982007980347, step time: 17.552852630615234ms\r\n",,terminal_output +9648,12496669,"TERMINAL",0,0,"Step 3730, loss: 0.5474806427955627, step time: 17.380952835083008ms\r\n",,terminal_output +9649,12496736,"TERMINAL",0,0,"Step 3731, loss: 0.5433210730552673, step time: 17.649412155151367ms\r\n",,terminal_output +9650,12496788,"TERMINAL",0,0,"Step 3732, loss: 0.5466967821121216, step time: 17.32611656188965ms\r\n",,terminal_output +9651,12496882,"TERMINAL",0,0,"Step 3733, loss: 0.596850574016571, step time: 19.69146728515625ms\r\n",,terminal_output +9652,12496989,"TERMINAL",0,0,"Step 3734, loss: 0.5352545380592346, step time: 17.790555953979492ms\r\nStep 3735, loss: 0.5169112086296082, step time: 17.769336700439453ms\r\n",,terminal_output +9653,12497049,"TERMINAL",0,0,"Step 3736, loss: 0.8762649297714233, step time: 17.58432388305664ms\r\n",,terminal_output +9654,12497109,"TERMINAL",0,0,"Step 3737, loss: 0.5834501385688782, step time: 17.44818687438965ms\r\n",,terminal_output +9655,12497175,"TERMINAL",0,0,"Step 3738, loss: 0.5245290994644165, step time: 17.61937141418457ms\r\n",,terminal_output +9656,12497237,"TERMINAL",0,0,"Step 3739, loss: 0.5490208864212036, step time: 17.51255989074707ms\r\n",,terminal_output +9657,12497303,"TERMINAL",0,0,"Step 3740, loss: 0.5283023118972778, step time: 17.27914810180664ms\r\n",,terminal_output +9658,12497625,"TERMINAL",0,0,"Step 3741, loss: 0.5269209146499634, step time: 297.9698181152344ms\r\n",,terminal_output +9659,12497678,"TERMINAL",0,0,"Step 3742, loss: 0.5125319361686707, step time: 24.939298629760742ms\r\n",,terminal_output +9660,12497797,"TERMINAL",0,0,"Step 3743, loss: 0.5860445499420166, step time: 20.191192626953125ms\r\nStep 3744, loss: 0.5445102453231812, step time: 18.512487411499023ms\r\n",,terminal_output +9661,12497925,"TERMINAL",0,0,"Step 3745, loss: 0.515649676322937, step time: 17.684459686279297ms\r\nStep 3746, loss: 0.5136951804161072, step time: 17.713069915771484ms\r\n",,terminal_output +9662,12497987,"TERMINAL",0,0,"Step 3747, loss: 0.5273823738098145, step time: 19.063949584960938ms\r\n",,terminal_output +9663,12498055,"TERMINAL",0,0,"Step 3748, loss: 0.5112739205360413, step time: 17.620086669921875ms\r\n",,terminal_output +9664,12498117,"TERMINAL",0,0,"Step 3749, loss: 0.5131238698959351, step time: 17.459869384765625ms\r\n",,terminal_output +9665,12498217,"TERMINAL",0,0,"Step 3750, loss: 0.5196529626846313, step time: 17.737150192260742ms\r\n",,terminal_output +9666,12498263,"TERMINAL",0,0,"Step 3751, loss: 0.6627670526504517, step time: 17.31705665588379ms\r\n",,terminal_output +9667,12498367,"TERMINAL",0,0,"Step 3752, loss: 0.5096257328987122, step time: 17.468929290771484ms\r\nStep 3753, loss: 0.5428776741027832, step time: 18.007278442382812ms\r\n",,terminal_output +9668,12498429,"TERMINAL",0,0,"Step 3754, loss: 0.5746314525604248, step time: 17.381668090820312ms\r\n",,terminal_output +9669,12498493,"TERMINAL",0,0,"Step 3755, loss: 0.5319900512695312, step time: 17.276287078857422ms\r\n",,terminal_output +9670,12498559,"TERMINAL",0,0,"Step 3756, loss: 0.5133414268493652, step time: 17.74001121520996ms\r\n",,terminal_output +9671,12498633,"TERMINAL",0,0,"Step 3757, loss: 0.5206695199012756, step time: 17.4863338470459ms\r\n",,terminal_output +9672,12498685,"TERMINAL",0,0,"Step 3758, loss: 0.5169530510902405, step time: 17.441749572753906ms\r\n",,terminal_output +9673,12498748,"TERMINAL",0,0,"Step 3759, loss: 0.5180447697639465, step time: 17.99917221069336ms\r\n",,terminal_output +9674,12498813,"TERMINAL",0,0,"Step 3760, loss: 0.5129595994949341, step time: 17.55070686340332ms\r\n",,terminal_output +9675,12498872,"TERMINAL",0,0,"Step 3761, loss: 0.516817569732666, step time: 17.413616180419922ms\r\n",,terminal_output +9676,12498937,"TERMINAL",0,0,"Step 3762, loss: 0.5133233070373535, step time: 17.69852638244629ms\r\n",,terminal_output +9677,12498998,"TERMINAL",0,0,"Step 3763, loss: 0.5644176006317139, step time: 17.486572265625ms\r\n",,terminal_output +9678,12499062,"TERMINAL",0,0,"Step 3764, loss: 0.5529967546463013, step time: 17.81940460205078ms\r\n",,terminal_output +9679,12499126,"TERMINAL",0,0,"Step 3765, loss: 0.5650733709335327, step time: 17.771005630493164ms\r\n",,terminal_output +9680,12499195,"TERMINAL",0,0,"Step 3766, loss: 0.8939201831817627, step time: 18.62955093383789ms\r\n",,terminal_output +9681,12499266,"TERMINAL",0,0,"Step 3767, loss: 0.721724808216095, step time: 17.958879470825195ms\r\n",,terminal_output +9682,12499326,"TERMINAL",0,0,"Step 3768, loss: 0.5217231512069702, step time: 18.00251007080078ms\r\n",,terminal_output +9683,12499387,"TERMINAL",0,0,"Step 3769, loss: 0.5061210989952087, step time: 17.579317092895508ms\r\n",,terminal_output +9684,12499449,"TERMINAL",0,0,"Step 3770, loss: 0.5125647187232971, step time: 17.660856246948242ms\r\n",,terminal_output +9685,12499514,"TERMINAL",0,0,"Step 3771, loss: 2.0207202434539795, step time: 17.94600486755371ms\r\n",,terminal_output +9686,12499576,"TERMINAL",0,0,"Step 3772, loss: 0.5145351886749268, step time: 17.45748519897461ms\r\n",,terminal_output +9687,12499640,"TERMINAL",0,0,"Step 3773, loss: 0.5293834209442139, step time: 17.54140853881836ms\r\n",,terminal_output +9688,12499701,"TERMINAL",0,0,"Step 3774, loss: 0.5359043478965759, step time: 17.684221267700195ms\r\n",,terminal_output +9689,12499765,"TERMINAL",0,0,"Step 3775, loss: 0.6546307802200317, step time: 17.529010772705078ms\r\n",,terminal_output +9690,12499830,"TERMINAL",0,0,"Step 3776, loss: 0.7989087700843811, step time: 17.394304275512695ms\r\n",,terminal_output +9691,12499891,"TERMINAL",0,0,"Step 3777, loss: 0.522495448589325, step time: 18.19443702697754ms\r\n",,terminal_output +9692,12499953,"TERMINAL",0,0,"Step 3778, loss: 0.5713529586791992, step time: 17.38715171813965ms\r\n",,terminal_output +9693,12500016,"TERMINAL",0,0,"Step 3779, loss: 0.5163617134094238, step time: 18.253326416015625ms\r\n",,terminal_output +9694,12500113,"TERMINAL",0,0,"Step 3780, loss: 0.5251612663269043, step time: 17.832279205322266ms\r\n",,terminal_output +9695,12500165,"TERMINAL",0,0,"Step 3781, loss: 0.5573549270629883, step time: 17.71068572998047ms\r\n",,terminal_output +9696,12500271,"TERMINAL",0,0,"Step 3782, loss: 0.5699422955513, step time: 17.641305923461914ms\r\nStep 3783, loss: 0.5124651193618774, step time: 17.9440975189209ms\r\n",,terminal_output +9697,12500333,"TERMINAL",0,0,"Step 3784, loss: 0.5476704835891724, step time: 17.582416534423828ms\r\n",,terminal_output +9698,12500425,"TERMINAL",0,0,"Step 3785, loss: 0.5518194437026978, step time: 17.451047897338867ms\r\n",,terminal_output +9699,12500534,"TERMINAL",0,0,"Step 3786, loss: 1.0956692695617676, step time: 17.894983291625977ms\r\nStep 3787, loss: 0.5112495422363281, step time: 17.748117446899414ms\r\n",,terminal_output +9700,12500597,"TERMINAL",0,0,"Step 3788, loss: 0.5671960711479187, step time: 17.44866371154785ms\r\n",,terminal_output +9701,12500726,"TERMINAL",0,0,"Step 3789, loss: 0.5114923119544983, step time: 18.425941467285156ms\r\nStep 3790, loss: 0.5151346921920776, step time: 17.89402961730957ms\r\n",,terminal_output +9702,12500779,"TERMINAL",0,0,"Step 3791, loss: 0.5760080814361572, step time: 17.664670944213867ms\r\n",,terminal_output +9703,12500846,"TERMINAL",0,0,"Step 3792, loss: 0.8570379018783569, step time: 17.62676239013672ms\r\n",,terminal_output +9704,12500920,"TERMINAL",0,0,"Step 3793, loss: 0.6304656267166138, step time: 17.733335494995117ms\r\n",,terminal_output +9705,12500993,"TERMINAL",0,0,"Step 3794, loss: 0.5109800696372986, step time: 17.667055130004883ms\r\n",,terminal_output +9706,12501097,"TERMINAL",0,0,"Step 3795, loss: 0.5111283659934998, step time: 18.011093139648438ms\r\nStep 3796, loss: 0.6455307006835938, step time: 17.313480377197266ms\r\n",,terminal_output +9707,12501158,"TERMINAL",0,0,"Step 3797, loss: 0.5295989513397217, step time: 17.84038543701172ms\r\n",,terminal_output +9708,12501221,"TERMINAL",0,0,"Step 3798, loss: 0.5240065455436707, step time: 17.681121826171875ms\r\n",,terminal_output +9709,12501293,"TERMINAL",0,0,"Step 3799, loss: 0.505134105682373, step time: 20.502090454101562ms\r\n",,terminal_output +9710,12501357,"TERMINAL",0,0,"Step 3800, loss: 0.49903440475463867, step time: 18.245458602905273ms\r\n",,terminal_output +9711,12501449,"TERMINAL",0,0,"Step 3801, loss: 0.4996604323387146, step time: 18.224000930786133ms\r\n",,terminal_output +9712,12501503,"TERMINAL",0,0,"Step 3802, loss: 0.5200759768486023, step time: 17.891407012939453ms\r\n",,terminal_output +9713,12501606,"TERMINAL",0,0,"Step 3803, loss: 0.629906177520752, step time: 17.8067684173584ms\r\nStep 3804, loss: 0.5513630509376526, step time: 18.00084114074707ms\r\n",,terminal_output +9714,12501699,"TERMINAL",0,0,"Step 3805, loss: 0.5157216191291809, step time: 21.817684173583984ms\r\n",,terminal_output +9715,12501807,"TERMINAL",0,0,"Step 3806, loss: 0.7372964024543762, step time: 23.597002029418945ms\r\nStep 3807, loss: 0.5624346137046814, step time: 25.50983428955078ms\r\n",,terminal_output +9716,12501872,"TERMINAL",0,0,"Step 3808, loss: 0.5176101326942444, step time: 25.48050880432129ms\r\n",,terminal_output +9717,12501973,"TERMINAL",0,0,"Step 3809, loss: 0.5005306601524353, step time: 25.5279541015625ms\r\n",,terminal_output +9718,12502026,"TERMINAL",0,0,"Step 3810, loss: 0.5037626624107361, step time: 25.713205337524414ms\r\n",,terminal_output +9719,12502120,"TERMINAL",0,0,"Step 3811, loss: 0.6041675209999084, step time: 25.664329528808594ms\r\n",,terminal_output +9720,12502171,"TERMINAL",0,0,"Step 3812, loss: 0.5606538653373718, step time: 25.838136672973633ms\r\n",,terminal_output +9721,12502279,"TERMINAL",0,0,"Step 3813, loss: 0.5225651264190674, step time: 25.81310272216797ms\r\nStep 3814, loss: 0.5313342809677124, step time: 20.625829696655273ms\r\n",,terminal_output +9722,12502342,"TERMINAL",0,0,"Step 3815, loss: 0.49755313992500305, step time: 18.76688003540039ms\r\n",,terminal_output +9723,12502406,"TERMINAL",0,0,"Step 3816, loss: 0.5651124119758606, step time: 18.225431442260742ms\r\n",,terminal_output +9724,12502468,"TERMINAL",0,0,"Step 3817, loss: 0.5081019997596741, step time: 17.994403839111328ms\r\n",,terminal_output +9725,12502530,"TERMINAL",0,0,"Step 3818, loss: 0.49703115224838257, step time: 17.892837524414062ms\r\n",,terminal_output +9726,12502591,"TERMINAL",0,0,"Step 3819, loss: 0.5624579787254333, step time: 18.360376358032227ms\r\n",,terminal_output +9727,12502651,"TERMINAL",0,0,"Step 3820, loss: 0.5219689011573792, step time: 17.87257194519043ms\r\n",,terminal_output +9728,12502723,"TERMINAL",0,0,"Step 3821, loss: 0.9551877379417419, step time: 17.804384231567383ms\r\n",,terminal_output +9729,12502775,"TERMINAL",0,0,"Step 3822, loss: 0.47848692536354065, step time: 17.901897430419922ms\r\n",,terminal_output +9730,12502844,"TERMINAL",0,0,"Step 3823, loss: 0.4933544397354126, step time: 17.789602279663086ms\r\n",,terminal_output +9731,12502904,"TERMINAL",0,0,"Step 3824, loss: 0.5101677179336548, step time: 17.80414581298828ms\r\n",,terminal_output +9732,12502966,"TERMINAL",0,0,"Step 3825, loss: 0.8351713418960571, step time: 18.155574798583984ms\r\n",,terminal_output +9733,12503028,"TERMINAL",0,0,"Step 3826, loss: 0.5006747841835022, step time: 17.7614688873291ms\r\n",,terminal_output +9734,12503090,"TERMINAL",0,0,"Step 3827, loss: 0.5228747129440308, step time: 17.862319946289062ms\r\n",,terminal_output +9735,12503151,"TERMINAL",0,0,"Step 3828, loss: 0.5020275115966797, step time: 18.333911895751953ms\r\n",,terminal_output +9736,12503217,"TERMINAL",0,0,"Step 3829, loss: 0.5004339218139648, step time: 17.602920532226562ms\r\n",,terminal_output +9737,12503279,"TERMINAL",0,0,"Step 3830, loss: 0.48609575629234314, step time: 17.50636100769043ms\r\n",,terminal_output +9738,12503343,"TERMINAL",0,0,"Step 3831, loss: 0.49320632219314575, step time: 18.36371421813965ms\r\n",,terminal_output +9739,12503408,"TERMINAL",0,0,"Step 3832, loss: 0.5206774473190308, step time: 17.76885986328125ms\r\n",,terminal_output +9740,12503471,"TERMINAL",0,0,"Step 3833, loss: 0.48483216762542725, step time: 17.490863800048828ms\r\n",,terminal_output +9741,12503534,"TERMINAL",0,0,"Step 3834, loss: 0.48650503158569336, step time: 18.02659034729004ms\r\n",,terminal_output +9742,12503647,"TERMINAL",0,0,"Step 3835, loss: 0.5467296838760376, step time: 17.491817474365234ms\r\n",,terminal_output +9743,12503672,"TERMINAL",0,0,"Step 3836, loss: 0.48900824785232544, step time: 17.838716506958008ms\r\n",,terminal_output +9744,12503786,"TERMINAL",0,0,"Step 3837, loss: 0.4986163079738617, step time: 18.29051971435547ms\r\nStep 3838, loss: 0.4972028434276581, step time: 17.72618293762207ms\r\n",,terminal_output +9745,12503853,"TERMINAL",0,0,"Step 3839, loss: 0.47634318470954895, step time: 17.702341079711914ms\r\n",,terminal_output +9746,12503943,"TERMINAL",0,0,"Step 3840, loss: 0.5255839228630066, step time: 18.01323890686035ms\r\n",,terminal_output +9747,12504051,"TERMINAL",0,0,"Step 3841, loss: 0.4897177219390869, step time: 17.617225646972656ms\r\nStep 3842, loss: 0.597702145576477, step time: 17.596721649169922ms\r\n",,terminal_output +9748,12504113,"TERMINAL",0,0,"Step 3843, loss: 0.63397616147995, step time: 18.05591583251953ms\r\n",,terminal_output +9749,12504179,"TERMINAL",0,0,"Step 3844, loss: 0.5171335339546204, step time: 17.54283905029297ms\r\n",,terminal_output +9750,12504243,"TERMINAL",0,0,"Step 3845, loss: 0.4844931364059448, step time: 18.63551139831543ms\r\n",,terminal_output +9751,12504356,"TERMINAL",0,0,"Step 3846, loss: 0.5256843566894531, step time: 18.227577209472656ms\r\nStep 3847, loss: 0.5214373469352722, step time: 17.54307746887207ms\r\n",,terminal_output +9752,12504422,"TERMINAL",0,0,"Step 3848, loss: 0.5102081894874573, step time: 17.35711097717285ms\r\n",,terminal_output +9753,12504553,"TERMINAL",0,0,"Step 3849, loss: 0.47841721773147583, step time: 18.023252487182617ms\r\nStep 3850, loss: 0.505364716053009, step time: 18.014907836914062ms\r\n",,terminal_output +9754,12504618,"TERMINAL",0,0,"Step 3851, loss: 0.5066282749176025, step time: 17.433643341064453ms\r\n",,terminal_output +9755,12504680,"TERMINAL",0,0,"Step 3852, loss: 0.5296849608421326, step time: 17.96698570251465ms\r\n",,terminal_output +9756,12504746,"TERMINAL",0,0,"Step 3853, loss: 0.5118847489356995, step time: 17.432212829589844ms\r\n",,terminal_output +9757,12504798,"TERMINAL",0,0,"Step 3854, loss: 0.48855361342430115, step time: 17.487764358520508ms\r\n",,terminal_output +9758,12504892,"TERMINAL",0,0,"Step 3855, loss: 0.5187848210334778, step time: 18.02515983581543ms\r\n",,terminal_output +9759,12504944,"TERMINAL",0,0,"Step 3856, loss: 0.5138198137283325, step time: 17.658233642578125ms\r\n",,terminal_output +9760,12505036,"TERMINAL",0,0,"Step 3857, loss: 0.5727316737174988, step time: 17.909765243530273ms\r\n",,terminal_output +9761,12505089,"TERMINAL",0,0,"Step 3858, loss: 0.5342170596122742, step time: 17.941713333129883ms\r\n",,terminal_output +9762,12505195,"TERMINAL",0,0,"Step 3859, loss: 0.4762214422225952, step time: 17.735958099365234ms\r\nStep 3860, loss: 0.5026840567588806, step time: 17.67563819885254ms\r\n",,terminal_output +9763,12505289,"TERMINAL",0,0,"Step 3861, loss: 0.47590431571006775, step time: 18.14889907836914ms\r\n",,terminal_output +9764,12505572,"TERMINAL",0,0,"Step 3862, loss: 0.4896043837070465, step time: 324.3100643157959ms\r\n",,terminal_output +9765,12505638,"TERMINAL",0,0,"Step 3863, loss: 0.47364798188209534, step time: 24.770498275756836ms\r\n",,terminal_output +9766,12505709,"TERMINAL",0,0,"Step 3864, loss: 0.4816991090774536, step time: 19.356250762939453ms\r\n",,terminal_output +9767,12505773,"TERMINAL",0,0,"Step 3865, loss: 0.502058744430542, step time: 18.443584442138672ms\r\n",,terminal_output +9768,12505830,"TERMINAL",0,0,"Step 3866, loss: 0.47954219579696655, step time: 18.011808395385742ms\r\n",,terminal_output +9769,12505893,"TERMINAL",0,0,"Step 3867, loss: 0.474213570356369, step time: 17.930984497070312ms\r\n",,terminal_output +9770,12505956,"TERMINAL",0,0,"Step 3868, loss: 0.47593146562576294, step time: 18.24331283569336ms\r\n",,terminal_output +9771,12506052,"TERMINAL",0,0,"Step 3869, loss: 0.5906273126602173, step time: 18.003225326538086ms\r\n",,terminal_output +9772,12506103,"TERMINAL",0,0,"Step 3870, loss: 0.5022305846214294, step time: 17.290115356445312ms\r\n",,terminal_output +9773,12506195,"TERMINAL",0,0,"Step 3871, loss: 0.8571324348449707, step time: 17.619848251342773ms\r\n",,terminal_output +9774,12506246,"TERMINAL",0,0,"Step 3872, loss: 0.49592503905296326, step time: 17.367124557495117ms\r\n",,terminal_output +9775,12506298,"TERMINAL",0,0,"Step 3873, loss: 0.5081104040145874, step time: 17.54903793334961ms\r\n",,terminal_output +9776,12506402,"TERMINAL",0,0,"Step 3874, loss: 0.503541886806488, step time: 17.57025718688965ms\r\nStep 3875, loss: 0.472197026014328, step time: 17.394542694091797ms\r\n",,terminal_output +9777,12506496,"TERMINAL",0,0,"Step 3876, loss: 0.5028272867202759, step time: 17.662763595581055ms\r\n",,terminal_output +9778,12506547,"TERMINAL",0,0,"Step 3877, loss: 1.5643084049224854, step time: 17.7919864654541ms\r\n",,terminal_output +9779,12506640,"TERMINAL",0,0,"Step 3878, loss: 0.5074616074562073, step time: 17.77791976928711ms\r\n",,terminal_output +9780,12506694,"TERMINAL",0,0,"Step 3879, loss: 0.5292625427246094, step time: 17.310619354248047ms\r\n",,terminal_output +9781,12506799,"TERMINAL",0,0,"Step 3880, loss: 0.5499780774116516, step time: 17.846107482910156ms\r\nStep 3881, loss: 0.5111914873123169, step time: 17.558813095092773ms\r\n",,terminal_output +9782,12506863,"TERMINAL",0,0,"Step 3882, loss: 0.47295165061950684, step time: 19.282102584838867ms\r\n",,terminal_output +9783,12506921,"TERMINAL",0,0,"Step 3883, loss: 0.5506457090377808, step time: 18.016815185546875ms\r\n",,terminal_output +9784,12506988,"TERMINAL",0,0,"Step 3884, loss: 0.48918038606643677, step time: 17.541885375976562ms\r\n",,terminal_output +9785,12507042,"TERMINAL",0,0,"Step 3885, loss: 0.507321834564209, step time: 17.688512802124023ms\r\n",,terminal_output +9786,12507135,"TERMINAL",0,0,"Step 3886, loss: 0.4795459806919098, step time: 18.53775978088379ms\r\n",,terminal_output +9787,12507188,"TERMINAL",0,0,"Step 3887, loss: 0.4774737060070038, step time: 17.64965057373047ms\r\n",,terminal_output +9788,12507298,"TERMINAL",0,0,"Step 3888, loss: 1.5789740085601807, step time: 17.361164093017578ms\r\nStep 3889, loss: 0.6177122592926025, step time: 17.96436309814453ms\r\n",,terminal_output +9789,12507359,"TERMINAL",0,0,"Step 3890, loss: 0.5003854036331177, step time: 17.777442932128906ms\r\n",,terminal_output +9790,12507422,"TERMINAL",0,0,"Step 3891, loss: 0.49368754029273987, step time: 17.62104034423828ms\r\n",,terminal_output +9791,12507481,"TERMINAL",0,0,"Step 3892, loss: 0.48318663239479065, step time: 17.667055130004883ms\r\n",,terminal_output +9792,12507547,"TERMINAL",0,0,"Step 3893, loss: 0.4864007532596588, step time: 17.595291137695312ms\r\n",,terminal_output +9793,12507640,"TERMINAL",0,0,"Step 3894, loss: 0.47878527641296387, step time: 17.2879695892334ms\r\n",,terminal_output +9794,12507694,"TERMINAL",0,0,"Step 3895, loss: 0.4860522449016571, step time: 17.679452896118164ms\r\n",,terminal_output +9795,12507801,"TERMINAL",0,0,"Step 3896, loss: 0.4833972752094269, step time: 17.517566680908203ms\r\nStep 3897, loss: 1.8399686813354492, step time: 17.404556274414062ms\r\n",,terminal_output +9796,12507867,"TERMINAL",0,0,"Step 3898, loss: 0.6404615044593811, step time: 17.790794372558594ms\r\n",,terminal_output +9797,12507929,"TERMINAL",0,0,"Step 3899, loss: 0.4892677664756775, step time: 17.620563507080078ms\r\n",,terminal_output +9798,12507990,"TERMINAL",0,0,"Step 3900, loss: 0.7826300263404846, step time: 17.283916473388672ms\r\n",,terminal_output +9799,12508051,"TERMINAL",0,0,"Step 3901, loss: 0.4863031506538391, step time: 17.893314361572266ms\r\n",,terminal_output +9800,12508113,"TERMINAL",0,0,"Step 3902, loss: 0.5184105634689331, step time: 17.49587059020996ms\r\n",,terminal_output +9801,12508217,"TERMINAL",0,0,"Step 3903, loss: 0.4743037223815918, step time: 17.611980438232422ms\r\n",,terminal_output +9802,12508258,"TERMINAL",0,0,"Step 3904, loss: 0.511167585849762, step time: 17.531633377075195ms\r\n",,terminal_output +9803,12508356,"TERMINAL",0,0,"Step 3905, loss: 0.792900800704956, step time: 17.42386817932129ms\r\n",,terminal_output +9804,12508429,"TERMINAL",0,0,"Step 3906, loss: 0.5948659181594849, step time: 17.30966567993164ms\r\nStep 3907, loss: 0.5013391375541687, step time: 17.76409149169922ms\r\n",,terminal_output +9805,12508500,"TERMINAL",0,0,"Step 3908, loss: 0.5107985734939575, step time: 17.293214797973633ms\r\n",,terminal_output +9806,12508559,"TERMINAL",0,0,"Step 3909, loss: 0.48671308159828186, step time: 17.3490047454834ms\r\n",,terminal_output +9807,12508673,"TERMINAL",0,0,"Step 3910, loss: 0.48753780126571655, step time: 17.786502838134766ms\r\n",,terminal_output +9808,12508681,"TERMINAL",0,0,"Step 3911, loss: 0.8511398434638977, step time: 17.446041107177734ms\r\n",,terminal_output +9809,12508825,"TERMINAL",0,0,"Step 3912, loss: 0.48955923318862915, step time: 17.026185989379883ms\r\nStep 3913, loss: 0.4848446249961853, step time: 17.849206924438477ms\r\n",,terminal_output +9810,12508921,"TERMINAL",0,0,"Step 3914, loss: 0.535470724105835, step time: 18.743276596069336ms\r\n",,terminal_output +9811,12508983,"TERMINAL",0,0,"Step 3915, loss: 0.49294477701187134, step time: 17.81296730041504ms\r\n",,terminal_output +9812,12509046,"TERMINAL",0,0,"Step 3916, loss: 0.4625548720359802, step time: 17.934560775756836ms\r\n",,terminal_output +9813,12509148,"TERMINAL",0,0,"Step 3917, loss: 0.516485869884491, step time: 17.426490783691406ms\r\nStep 3918, loss: 0.47126659750938416, step time: 17.363548278808594ms\r\n",,terminal_output +9814,12509244,"TERMINAL",0,0,"Step 3919, loss: 0.4761417508125305, step time: 18.3260440826416ms\r\n",,terminal_output +9815,12509295,"TERMINAL",0,0,"Step 3920, loss: 0.48781901597976685, step time: 18.0666446685791ms\r\n",,terminal_output +9816,12509400,"TERMINAL",0,0,"Step 3921, loss: 0.5496413111686707, step time: 17.940521240234375ms\r\nStep 3922, loss: 0.46772530674934387, step time: 17.984390258789062ms\r\n",,terminal_output +9817,12509495,"TERMINAL",0,0,"Step 3923, loss: 0.4625478982925415, step time: 17.61770248413086ms\r\n",,terminal_output +9818,12509546,"TERMINAL",0,0,"Step 3924, loss: 0.4549483358860016, step time: 17.23027229309082ms\r\n",,terminal_output +9819,12509653,"TERMINAL",0,0,"Step 3925, loss: 0.50238037109375, step time: 17.82393455505371ms\r\nStep 3926, loss: 0.4648951292037964, step time: 17.517566680908203ms\r\n",,terminal_output +9820,12509781,"TERMINAL",0,0,"Step 3927, loss: 0.4599802792072296, step time: 17.332792282104492ms\r\nStep 3928, loss: 0.45135366916656494, step time: 17.748117446899414ms\r\n",,terminal_output +9821,12509842,"TERMINAL",0,0,"Step 3929, loss: 0.4664369523525238, step time: 17.4100399017334ms\r\n",,terminal_output +9822,12509903,"TERMINAL",0,0,"Step 3930, loss: 0.5245561003684998, step time: 17.5783634185791ms\r\n",,terminal_output +9823,12509970,"TERMINAL",0,0,"Step 3931, loss: 0.7670914530754089, step time: 17.702341079711914ms\r\n",,terminal_output +9824,12510032,"TERMINAL",0,0,"Step 3932, loss: 0.5362075567245483, step time: 17.576217651367188ms\r\n",,terminal_output +9825,12510097,"TERMINAL",0,0,"Step 3933, loss: 0.46484482288360596, step time: 17.471790313720703ms\r\n",,terminal_output +9826,12510159,"TERMINAL",0,0,"Step 3934, loss: 0.5130138397216797, step time: 17.45319366455078ms\r\n",,terminal_output +9827,12510220,"TERMINAL",0,0,"Step 3935, loss: 0.46731048822402954, step time: 17.423391342163086ms\r\n",,terminal_output +9828,12510286,"TERMINAL",0,0,"Step 3936, loss: 0.45106109976768494, step time: 17.667770385742188ms\r\n",,terminal_output +9829,12510346,"TERMINAL",0,0,"Step 3937, loss: 0.4559725821018219, step time: 18.03731918334961ms\r\n",,terminal_output +9830,12510423,"TERMINAL",0,0,"Step 3938, loss: 0.5638471245765686, step time: 19.918441772460938ms\r\n",,terminal_output +9831,12510474,"TERMINAL",0,0,"Step 3939, loss: 0.4761486351490021, step time: 17.913341522216797ms\r\n",,terminal_output +9832,12510539,"TERMINAL",0,0,"Step 3940, loss: 0.45766904950141907, step time: 17.740488052368164ms\r\n",,terminal_output +9833,12510647,"TERMINAL",0,0,"Step 3941, loss: 1.1530693769454956, step time: 17.749786376953125ms\r\n",,terminal_output +9834,12510679,"TERMINAL",0,0,"Step 3942, loss: 0.476127028465271, step time: 17.354488372802734ms\r\n",,terminal_output +9835,12510738,"TERMINAL",0,0,"Step 3943, loss: 0.5115242600440979, step time: 18.040895462036133ms\r\n",,terminal_output +9836,12510805,"TERMINAL",0,0,"Step 3944, loss: 0.486471951007843, step time: 17.474889755249023ms\r\n",,terminal_output +9837,12510867,"TERMINAL",0,0,"Step 3945, loss: 0.9266186952590942, step time: 17.296791076660156ms\r\n",,terminal_output +9838,12510919,"TERMINAL",0,0,"Step 3946, loss: 0.4707529842853546, step time: 18.015384674072266ms\r\n",,terminal_output +9839,12510983,"TERMINAL",0,0,"Step 3947, loss: 0.46161526441574097, step time: 18.27239990234375ms\r\n",,terminal_output +9840,12511049,"TERMINAL",0,0,"Step 3948, loss: 0.4880183935165405, step time: 17.223358154296875ms\r\n",,terminal_output +9841,12511114,"TERMINAL",0,0,"Step 3949, loss: 0.49094539880752563, step time: 17.675399780273438ms\r\n",,terminal_output +9842,12511178,"TERMINAL",0,0,"Step 3950, loss: 0.46965572237968445, step time: 17.278432846069336ms\r\n",,terminal_output +9843,12511236,"TERMINAL",0,0,"Step 3951, loss: 0.48439890146255493, step time: 17.57526397705078ms\r\n",,terminal_output +9844,12511364,"TERMINAL",0,0,"Step 3952, loss: 0.48166894912719727, step time: 17.730236053466797ms\r\nStep 3953, loss: 0.5280522108078003, step time: 17.232894897460938ms\r\n",,terminal_output +9845,12511460,"TERMINAL",0,0,"Step 3954, loss: 0.4697203040122986, step time: 17.370939254760742ms\r\n",,terminal_output +9846,12511525,"TERMINAL",0,0,"Step 3955, loss: 0.4622354209423065, step time: 17.7001953125ms\r\n",,terminal_output +9847,12511588,"TERMINAL",0,0,"Step 3956, loss: 0.44874107837677, step time: 17.339468002319336ms\r\n",,terminal_output +9848,12511640,"TERMINAL",0,0,"Step 3957, loss: 0.4523545503616333, step time: 17.501115798950195ms\r\n",,terminal_output +9849,12511746,"TERMINAL",0,0,"Step 3958, loss: 0.4713846743106842, step time: 17.71831512451172ms\r\nStep 3959, loss: 0.44756370782852173, step time: 17.45128631591797ms\r\n",,terminal_output +9850,12511813,"TERMINAL",0,0,"Step 3960, loss: 0.4667411744594574, step time: 17.482757568359375ms\r\n",,terminal_output +9851,12511924,"TERMINAL",0,0,"Step 3961, loss: 0.4592478573322296, step time: 17.726659774780273ms\r\n",,terminal_output +9852,12511999,"TERMINAL",0,0,"Step 3962, loss: 0.4913868308067322, step time: 17.748594284057617ms\r\nStep 3963, loss: 0.4551180601119995, step time: 17.719507217407227ms\r\n",,terminal_output +9853,12512101,"TERMINAL",0,0,"Step 3964, loss: 0.46917104721069336, step time: 17.622947692871094ms\r\n",,terminal_output +9854,12512150,"TERMINAL",0,0,"Step 3965, loss: 0.6334706544876099, step time: 17.331838607788086ms\r\n",,terminal_output +9855,12512255,"TERMINAL",0,0,"Step 3966, loss: 0.47003883123397827, step time: 17.38429069519043ms\r\nStep 3967, loss: 0.4551662504673004, step time: 17.948389053344727ms\r\n",,terminal_output +9856,12512350,"TERMINAL",0,0,"Step 3968, loss: 0.47470036149024963, step time: 17.316818237304688ms\r\n",,terminal_output +9857,12512453,"TERMINAL",0,0,"Step 3969, loss: 0.4683530032634735, step time: 17.406940460205078ms\r\nStep 3970, loss: 0.612378716468811, step time: 17.835617065429688ms\r\n",,terminal_output +9858,12512570,"TERMINAL",0,0,"Step 3971, loss: 0.4545617997646332, step time: 17.669200897216797ms\r\nStep 3972, loss: 0.5556722283363342, step time: 17.354249954223633ms\r\n",,terminal_output +9859,12512704,"TERMINAL",0,0,"Step 3973, loss: 0.4786590039730072, step time: 17.820358276367188ms\r\nStep 3974, loss: 0.4517137408256531, step time: 17.449378967285156ms\r\n",,terminal_output +9860,12512769,"TERMINAL",0,0,"Step 3975, loss: 0.42933791875839233, step time: 17.386913299560547ms\r\n",,terminal_output +9861,12512837,"TERMINAL",0,0,"Step 3976, loss: 0.45065149664878845, step time: 17.772674560546875ms\r\n",,terminal_output +9862,12512903,"TERMINAL",0,0,"Step 3977, loss: 0.46045032143592834, step time: 17.282962799072266ms\r\n",,terminal_output +9863,12512969,"TERMINAL",0,0,"Step 3978, loss: 0.4644969403743744, step time: 17.32635498046875ms\r\n",,terminal_output +9864,12513037,"TERMINAL",0,0,"Step 3979, loss: 0.43861377239227295, step time: 17.724275588989258ms\r\n",,terminal_output +9865,12513108,"TERMINAL",0,0,"Step 3980, loss: 0.4526980221271515, step time: 17.637968063354492ms\r\n",,terminal_output +9866,12513174,"TERMINAL",0,0,"Step 3981, loss: 0.4327860474586487, step time: 17.392873764038086ms\r\n",,terminal_output +9867,12513239,"TERMINAL",0,0,"Step 3982, loss: 0.47863462567329407, step time: 17.83609390258789ms\r\n",,terminal_output +9868,12513337,"TERMINAL",0,0,"Step 3983, loss: 0.45071762800216675, step time: 17.639875411987305ms\r\nStep 3984, loss: 0.5763452053070068, step time: 17.392396926879883ms\r\n",,terminal_output +9869,12513404,"TERMINAL",0,0,"Step 3985, loss: 0.4288410246372223, step time: 17.627954483032227ms\r\n",,terminal_output +9870,12513469,"TERMINAL",0,0,"Step 3986, loss: 0.44987136125564575, step time: 17.554044723510742ms\r\n",,terminal_output +9871,12513541,"TERMINAL",0,0,"Step 3987, loss: 0.4333173930644989, step time: 17.29297637939453ms\r\n",,terminal_output +9872,12513613,"TERMINAL",0,0,"Step 3988, loss: 0.4658542573451996, step time: 17.780303955078125ms\r\n",,terminal_output +9873,12513660,"TERMINAL",0,0,"Step 3989, loss: 0.4331316351890564, step time: 17.276525497436523ms\r\n",,terminal_output +9874,12513789,"TERMINAL",0,0,"Step 3990, loss: 0.5398355722427368, step time: 30.98583221435547ms\r\nStep 3991, loss: 0.43625691533088684, step time: 18.49842071533203ms\r\n",,terminal_output +9875,12513858,"TERMINAL",0,0,"Step 3992, loss: 0.4450013041496277, step time: 17.84491539001465ms\r\n",,terminal_output +9876,12513923,"TERMINAL",0,0,"Step 3993, loss: 0.44174715876579285, step time: 17.369747161865234ms\r\n",,terminal_output +9877,12513989,"TERMINAL",0,0,"Step 3994, loss: 0.6884430646896362, step time: 17.945051193237305ms\r\n",,terminal_output +9878,12514056,"TERMINAL",0,0,"Step 3995, loss: 0.42844972014427185, step time: 17.38762855529785ms\r\n",,terminal_output +9879,12514123,"TERMINAL",0,0,"Step 3996, loss: 0.9580062627792358, step time: 17.37666130065918ms\r\n",,terminal_output +9880,12514189,"TERMINAL",0,0,"Step 3997, loss: 0.44475147128105164, step time: 17.840862274169922ms\r\n",,terminal_output +9881,12514261,"TERMINAL",0,0,"Step 3998, loss: 0.4302358627319336, step time: 18.89324188232422ms\r\n",,terminal_output +9882,12514325,"TERMINAL",0,0,"Step 3999, loss: 0.45012733340263367, step time: 18.31650733947754ms\r\n",,terminal_output +9883,12517183,"TERMINAL",0,0,"Step 4000, loss: 0.43872037529945374, step time: 28.75065803527832ms\r\nStep 4001, loss: 0.4492608606815338, step time: 25.17867088317871ms\r\n",,terminal_output +9884,12517236,"TERMINAL",0,0,"Step 4002, loss: 0.46129652857780457, step time: 20.734310150146484ms\r\n",,terminal_output +9885,12517332,"TERMINAL",0,0,"Step 4003, loss: 0.4648560881614685, step time: 19.418716430664062ms\r\n",,terminal_output +9886,12517437,"TERMINAL",0,0,"Step 4004, loss: 0.4437858462333679, step time: 18.898487091064453ms\r\nStep 4005, loss: 0.45133256912231445, step time: 18.944263458251953ms\r\n",,terminal_output +9887,12517508,"TERMINAL",0,0,"Step 4006, loss: 0.43053510785102844, step time: 18.57900619506836ms\r\n",,terminal_output +9888,12517578,"TERMINAL",0,0,"Step 4007, loss: 0.41922488808631897, step time: 18.53322982788086ms\r\n",,terminal_output +9889,12517645,"TERMINAL",0,0,"Step 4008, loss: 0.4358307421207428, step time: 17.53377914428711ms\r\n",,terminal_output +9890,12517760,"TERMINAL",0,0,"Step 4009, loss: 0.4314057230949402, step time: 17.397642135620117ms\r\nStep 4010, loss: 0.563033401966095, step time: 17.180204391479492ms\r\n",,terminal_output +9891,12517827,"TERMINAL",0,0,"Step 4011, loss: 0.4296819865703583, step time: 18.117904663085938ms\r\n",,terminal_output +9892,12517892,"TERMINAL",0,0,"Step 4012, loss: 0.43280622363090515, step time: 17.063379287719727ms\r\n",,terminal_output +9893,12517955,"TERMINAL",0,0,"Step 4013, loss: 0.42329132556915283, step time: 17.745494842529297ms\r\n",,terminal_output +9894,12518023,"TERMINAL",0,0,"Step 4014, loss: 0.4400646388530731, step time: 17.551898956298828ms\r\n",,terminal_output +9895,12518089,"TERMINAL",0,0,"Step 4015, loss: 0.46615076065063477, step time: 17.61794090270996ms\r\n",,terminal_output +9896,12518154,"TERMINAL",0,0,"Step 4016, loss: 0.42946571111679077, step time: 17.167091369628906ms\r\n",,terminal_output +9897,12518229,"TERMINAL",0,0,"Step 4017, loss: 0.5444297194480896, step time: 17.865657806396484ms\r\n",,terminal_output +9898,12518293,"TERMINAL",0,0,"Step 4018, loss: 0.4421769976615906, step time: 16.897201538085938ms\r\n",,terminal_output +9899,12518365,"TERMINAL",0,0,"Step 4019, loss: 0.4394099712371826, step time: 22.27187156677246ms\r\n",,terminal_output +9900,12518438,"TERMINAL",0,0,"Step 4020, loss: 0.4622306227684021, step time: 17.43006706237793ms\r\n",,terminal_output +9901,12518484,"TERMINAL",0,0,"Step 4021, loss: 0.45732852816581726, step time: 17.54903793334961ms\r\n",,terminal_output +9902,12518546,"TERMINAL",0,0,"Step 4022, loss: 0.4317288398742676, step time: 17.245054244995117ms\r\n",,terminal_output +9903,12518653,"TERMINAL",0,0,"Step 4023, loss: 0.43996551632881165, step time: 18.0056095123291ms\r\nStep 4024, loss: 0.4275314509868622, step time: 17.337560653686523ms\r\n",,terminal_output +9904,12518772,"TERMINAL",0,0,"Step 4025, loss: 0.554362416267395, step time: 17.713069915771484ms\r\nStep 4026, loss: 1.5727273225784302, step time: 17.480850219726562ms\r\n",,terminal_output +9905,12518901,"TERMINAL",0,0,"Step 4027, loss: 0.5799418687820435, step time: 17.49277114868164ms\r\nStep 4028, loss: 1.1577930450439453, step time: 17.476558685302734ms\r\n",,terminal_output +9906,12518971,"TERMINAL",0,0,"Step 4029, loss: 0.4350793659687042, step time: 17.921924591064453ms\r\n",,terminal_output +9907,12519037,"TERMINAL",0,0,"Step 4030, loss: 0.44370758533477783, step time: 17.40431785583496ms\r\n",,terminal_output +9908,12519104,"TERMINAL",0,0,"Step 4031, loss: 0.4501814842224121, step time: 17.369985580444336ms\r\n",,terminal_output +9909,12519169,"TERMINAL",0,0,"Step 4032, loss: 0.4891679883003235, step time: 17.68970489501953ms\r\n",,terminal_output +9910,12519239,"TERMINAL",0,0,"Step 4033, loss: 0.4636751413345337, step time: 17.17996597290039ms\r\n",,terminal_output +9911,12519305,"TERMINAL",0,0,"Step 4034, loss: 0.43437641859054565, step time: 18.680810928344727ms\r\n",,terminal_output +9912,12519374,"TERMINAL",0,0,"Step 4035, loss: 0.4746631979942322, step time: 18.35918426513672ms\r\n",,terminal_output +9913,12519445,"TERMINAL",0,0,"Step 4036, loss: 0.4909677505493164, step time: 17.425537109375ms\r\n",,terminal_output +9914,12519517,"TERMINAL",0,0,"Step 4037, loss: 0.4339216649532318, step time: 17.846107482910156ms\r\n",,terminal_output +9915,12519566,"TERMINAL",0,0,"Step 4038, loss: 0.6318186521530151, step time: 17.627716064453125ms\r\n",,terminal_output +9916,12519617,"TERMINAL",0,0,"Step 4039, loss: 0.4605858623981476, step time: 17.5631046295166ms\r\n",,terminal_output +9917,12519745,"TERMINAL",0,0,"Step 4040, loss: 0.670056164264679, step time: 17.559528350830078ms\r\nStep 4041, loss: 0.4734265208244324, step time: 18.27216148376465ms\r\n",,terminal_output +9918,12519805,"TERMINAL",0,0,"Step 4042, loss: 0.5151941180229187, step time: 17.537832260131836ms\r\n",,terminal_output +9919,12519893,"TERMINAL",0,0,"Step 4043, loss: 0.44840022921562195, step time: 17.675399780273438ms\r\n",,terminal_output +9920,12519949,"TERMINAL",0,0,"Step 4044, loss: 0.44556811451911926, step time: 17.963886260986328ms\r\n",,terminal_output +9921,12520055,"TERMINAL",0,0,"Step 4045, loss: 0.4813152253627777, step time: 17.416954040527344ms\r\nStep 4046, loss: 0.4228169322013855, step time: 17.5473690032959ms\r\n",,terminal_output +9922,12520151,"TERMINAL",0,0,"Step 4047, loss: 0.4364224374294281, step time: 17.980098724365234ms\r\n",,terminal_output +9923,12520204,"TERMINAL",0,0,"Step 4048, loss: 0.7005646228790283, step time: 17.231225967407227ms\r\n",,terminal_output +9924,12520256,"TERMINAL",0,0,"Step 4049, loss: 0.4253663718700409, step time: 17.324447631835938ms\r\n",,terminal_output +9925,12520369,"TERMINAL",0,0,"Step 4050, loss: 0.4290497601032257, step time: 17.943620681762695ms\r\nStep 4051, loss: 0.4296317398548126, step time: 17.449617385864258ms\r\n",,terminal_output +9926,12520464,"TERMINAL",0,0,"Step 4052, loss: 0.4392671287059784, step time: 17.227649688720703ms\r\n",,terminal_output +9927,12520568,"TERMINAL",0,0,"Step 4053, loss: 0.42550498247146606, step time: 18.157005310058594ms\r\nStep 4054, loss: 0.6690137386322021, step time: 17.26841926574707ms\r\n",,terminal_output +9928,12520634,"TERMINAL",0,0,"Step 4055, loss: 0.4512556195259094, step time: 17.760753631591797ms\r\n",,terminal_output +9929,12520699,"TERMINAL",0,0,"Step 4056, loss: 0.43187278509140015, step time: 17.863750457763672ms\r\n",,terminal_output +9930,12520767,"TERMINAL",0,0,"Step 4057, loss: 1.839219093322754, step time: 17.578601837158203ms\r\n",,terminal_output +9931,12520833,"TERMINAL",0,0,"Step 4058, loss: 0.4849301278591156, step time: 17.716169357299805ms\r\n",,terminal_output +9932,12520901,"TERMINAL",0,0,"Step 4059, loss: 0.4366891086101532, step time: 18.000364303588867ms\r\n",,terminal_output +9933,12520967,"TERMINAL",0,0,"Step 4060, loss: 0.4615394175052643, step time: 17.32945442199707ms\r\n",,terminal_output +9934,12521033,"TERMINAL",0,0,"Step 4061, loss: 0.8869538307189941, step time: 17.868757247924805ms\r\n",,terminal_output +9935,12521100,"TERMINAL",0,0,"Step 4062, loss: 0.4627637267112732, step time: 18.03731918334961ms\r\n",,terminal_output +9936,12521198,"TERMINAL",0,0,"Step 4063, loss: 0.4975549876689911, step time: 17.708539962768555ms\r\nStep 4064, loss: 0.43081942200660706, step time: 17.663240432739258ms\r\n",,terminal_output +9937,12521263,"TERMINAL",0,0,"Step 4065, loss: 0.5224701166152954, step time: 17.973661422729492ms\r\n",,terminal_output +9938,12521330,"TERMINAL",0,0,"Step 4066, loss: 0.5058913826942444, step time: 17.337322235107422ms\r\n",,terminal_output +9939,12521394,"TERMINAL",0,0,"Step 4067, loss: 0.4575176537036896, step time: 17.859220504760742ms\r\n",,terminal_output +9940,12521460,"TERMINAL",0,0,"Step 4068, loss: 0.4431230127811432, step time: 17.8682804107666ms\r\n",,terminal_output +9941,12521525,"TERMINAL",0,0,"Step 4069, loss: 0.5620326995849609, step time: 17.33231544494629ms\r\n",,terminal_output +9942,12521591,"TERMINAL",0,0,"Step 4070, loss: 0.4522801339626312, step time: 17.4863338470459ms\r\n",,terminal_output +9943,12521657,"TERMINAL",0,0,"Step 4071, loss: 0.5047569274902344, step time: 18.146276473999023ms\r\n",,terminal_output +9944,12521728,"TERMINAL",0,0,"Step 4072, loss: 0.43146783113479614, step time: 17.397403717041016ms\r\n",,terminal_output +9945,12521781,"TERMINAL",0,0,"Step 4073, loss: 0.4533654749393463, step time: 17.675399780273438ms\r\n",,terminal_output +9946,12521849,"TERMINAL",0,0,"Step 4074, loss: 0.43569377064704895, step time: 17.844200134277344ms\r\n",,terminal_output +9947,12521915,"TERMINAL",0,0,"Step 4075, loss: 0.41297048330307007, step time: 17.405271530151367ms\r\n",,terminal_output +9948,12521981,"TERMINAL",0,0,"Step 4076, loss: 0.4353754222393036, step time: 17.163991928100586ms\r\n",,terminal_output +9949,12522048,"TERMINAL",0,0,"Step 4077, loss: 0.43356746435165405, step time: 17.836809158325195ms\r\n",,terminal_output +9950,12522114,"TERMINAL",0,0,"Step 4078, loss: 0.41989442706108093, step time: 17.38262176513672ms\r\n",,terminal_output +9951,12522181,"TERMINAL",0,0,"Step 4079, loss: 0.4219387471675873, step time: 17.530441284179688ms\r\n",,terminal_output +9952,12522279,"TERMINAL",0,0,"Step 4080, loss: 1.0894012451171875, step time: 17.856359481811523ms\r\nStep 4081, loss: 0.412762850522995, step time: 17.699480056762695ms\r\n",,terminal_output +9953,12522344,"TERMINAL",0,0,"Step 4082, loss: 0.4208854138851166, step time: 17.487764358520508ms\r\n",,terminal_output +9954,12522410,"TERMINAL",0,0,"Step 4083, loss: 0.4506154954433441, step time: 17.963171005249023ms\r\n",,terminal_output +9955,12522475,"TERMINAL",0,0,"Step 4084, loss: 0.47348257899284363, step time: 17.290592193603516ms\r\n",,terminal_output +9956,12522541,"TERMINAL",0,0,"Step 4085, loss: 0.4688534438610077, step time: 17.77052879333496ms\r\n",,terminal_output +9957,12522613,"TERMINAL",0,0,"Step 4086, loss: 0.44883888959884644, step time: 17.822980880737305ms\r\n",,terminal_output +9958,12522679,"TERMINAL",0,0,"Step 4087, loss: 0.47886407375335693, step time: 19.03820037841797ms\r\n",,terminal_output +9959,12522792,"TERMINAL",0,0,"Step 4088, loss: 0.419831246137619, step time: 17.724275588989258ms\r\nStep 4089, loss: 0.4202868342399597, step time: 18.13817024230957ms\r\n",,terminal_output +9960,12522919,"TERMINAL",0,0,"Step 4090, loss: 0.41324561834335327, step time: 17.440319061279297ms\r\nStep 4091, loss: 0.4014841318130493, step time: 17.739534378051758ms\r\n",,terminal_output +9961,12523014,"TERMINAL",0,0,"Step 4092, loss: 0.4184302091598511, step time: 18.065690994262695ms\r\n",,terminal_output +9962,12523117,"TERMINAL",0,0,"Step 4093, loss: 0.47275519371032715, step time: 17.683744430541992ms\r\nStep 4094, loss: 0.41066959500312805, step time: 17.694473266601562ms\r\n",,terminal_output +9963,12523189,"TERMINAL",0,0,"Step 4095, loss: 0.40326282382011414, step time: 19.367218017578125ms\r\n",,terminal_output +9964,12523261,"TERMINAL",0,0,"Step 4096, loss: 0.45057031512260437, step time: 18.029212951660156ms\r\n",,terminal_output +9965,12523333,"TERMINAL",0,0,"Step 4097, loss: 0.4168725311756134, step time: 17.77172088623047ms\r\n",,terminal_output +9966,12523428,"TERMINAL",0,0,"Step 4098, loss: 0.4628649055957794, step time: 18.022775650024414ms\r\nStep 4099, loss: 0.41506096720695496, step time: 17.293691635131836ms\r\n",,terminal_output +9967,12523494,"TERMINAL",0,0,"Step 4100, loss: 0.3946588933467865, step time: 17.66681671142578ms\r\n",,terminal_output +9968,12523559,"TERMINAL",0,0,"Step 4101, loss: 0.40955406427383423, step time: 18.0203914642334ms\r\n",,terminal_output +9969,12523624,"TERMINAL",0,0,"Step 4102, loss: 0.4072399139404297, step time: 17.490625381469727ms\r\n",,terminal_output +9970,12523693,"TERMINAL",0,0,"Step 4103, loss: 0.4218464195728302, step time: 17.879009246826172ms\r\n",,terminal_output +9971,12523765,"TERMINAL",0,0,"Step 4104, loss: 0.440224289894104, step time: 17.888784408569336ms\r\n",,terminal_output +9972,12523829,"TERMINAL",0,0,"Step 4105, loss: 0.4239710569381714, step time: 17.609834671020508ms\r\n",,terminal_output +9973,12523893,"TERMINAL",0,0,"Step 4106, loss: 0.4158596992492676, step time: 17.39501953125ms\r\n",,terminal_output +9974,12523999,"TERMINAL",0,0,"Step 4107, loss: 0.40628042817115784, step time: 17.98248291015625ms\r\nStep 4108, loss: 0.3952210545539856, step time: 17.21930503845215ms\r\n",,terminal_output +9975,12524094,"TERMINAL",0,0,"Step 4109, loss: 0.452506959438324, step time: 17.84801483154297ms\r\n",,terminal_output +9976,12524197,"TERMINAL",0,0,"Step 4110, loss: 0.4176771938800812, step time: 17.882823944091797ms\r\nStep 4111, loss: 0.4173959493637085, step time: 17.509937286376953ms\r\n",,terminal_output +9977,12524269,"TERMINAL",0,0,"Step 4112, loss: 0.40424057841300964, step time: 17.374038696289062ms\r\n",,terminal_output +9978,12524341,"TERMINAL",0,0,"Step 4113, loss: 0.40842366218566895, step time: 19.382476806640625ms\r\n",,terminal_output +9979,12524413,"TERMINAL",0,0,"Step 4114, loss: 0.5372037291526794, step time: 17.573833465576172ms\r\n",,terminal_output +9980,12524520,"TERMINAL",0,0,"Step 4115, loss: 0.39266353845596313, step time: 17.79627799987793ms\r\nStep 4116, loss: 0.432341605424881, step time: 18.40066909790039ms\r\n",,terminal_output +9981,12524633,"TERMINAL",0,0,"Step 4117, loss: 1.8915998935699463, step time: 17.78864860534668ms\r\nStep 4118, loss: 0.42123767733573914, step time: 17.573118209838867ms\r\n",,terminal_output +9982,12524778,"TERMINAL",0,0,"Step 4119, loss: 0.4661077558994293, step time: 17.858505249023438ms\r\nStep 4120, loss: 0.3968602120876312, step time: 17.423152923583984ms\r\n",,terminal_output +9983,12524845,"TERMINAL",0,0,"Step 4121, loss: 0.40435829758644104, step time: 17.694711685180664ms\r\n",,terminal_output +9984,12524920,"TERMINAL",0,0,"Step 4122, loss: 0.4321497976779938, step time: 17.89402961730957ms\r\n",,terminal_output +9985,12524981,"TERMINAL",0,0,"Step 4123, loss: 0.9032256007194519, step time: 17.60697364807129ms\r\n",,terminal_output +9986,12525045,"TERMINAL",0,0,"Step 4124, loss: 0.4056660234928131, step time: 18.362760543823242ms\r\n",,terminal_output +9987,12525404,"TERMINAL",0,0,"Step 4125, loss: 0.4179798662662506, step time: 304.67772483825684ms\r\nStep 4126, loss: 0.40694549679756165, step time: 25.004148483276367ms\r\n",,terminal_output +9988,12525524,"TERMINAL",0,0,"Step 4127, loss: 0.4339996576309204, step time: 20.35832405090332ms\r\nStep 4128, loss: 0.39188748598098755, step time: 18.982410430908203ms\r\n",,terminal_output +9989,12525620,"TERMINAL",0,0,"Step 4129, loss: 0.41961294412612915, step time: 18.166065216064453ms\r\n",,terminal_output +9990,12525728,"TERMINAL",0,0,"Step 4130, loss: 0.43266671895980835, step time: 18.016576766967773ms\r\nStep 4131, loss: 0.45813730359077454, step time: 18.338918685913086ms\r\n",,terminal_output +9991,12525784,"TERMINAL",0,0,"Step 4132, loss: 0.40054839849472046, step time: 17.596006393432617ms\r\n",,terminal_output +9992,12525878,"TERMINAL",0,0,"Step 4133, loss: 0.390920490026474, step time: 17.796039581298828ms\r\n",,terminal_output +9993,12525985,"TERMINAL",0,0,"Step 4134, loss: 0.4093519151210785, step time: 17.765045166015625ms\r\nStep 4135, loss: 0.4417061507701874, step time: 17.70496368408203ms\r\n",,terminal_output +9994,12526103,"TERMINAL",0,0,"Step 4136, loss: 0.40321338176727295, step time: 18.7990665435791ms\r\nStep 4137, loss: 0.4170215129852295, step time: 18.36848258972168ms\r\n",,terminal_output +9995,12526231,"TERMINAL",0,0,"Step 4138, loss: 0.44093602895736694, step time: 17.445802688598633ms\r\nStep 4139, loss: 0.44736865162849426, step time: 17.678022384643555ms\r\n",,terminal_output +9996,12526325,"TERMINAL",0,0,"Step 4140, loss: 0.40169981122016907, step time: 17.96746253967285ms\r\n",,terminal_output +9997,12526435,"TERMINAL",0,0,"Step 4141, loss: 0.4468500018119812, step time: 17.46225357055664ms\r\nStep 4142, loss: 0.40481463074684143, step time: 17.462491989135742ms\r\n",,terminal_output +9998,12526501,"TERMINAL",0,0,"Step 4143, loss: 0.41513311862945557, step time: 17.98105239868164ms\r\n",,terminal_output +9999,12526618,"TERMINAL",0,0,"Step 4144, loss: 0.4617738127708435, step time: 17.230749130249023ms\r\nStep 4145, loss: 0.40204113721847534, step time: 17.78125762939453ms\r\n",,terminal_output +10000,12526756,"TERMINAL",0,0,"Step 4146, loss: 0.3824499249458313, step time: 17.948150634765625ms\r\nStep 4147, loss: 0.3944189250469208, step time: 17.599821090698242ms\r\n",,terminal_output +10001,12526811,"TERMINAL",0,0,"Step 4148, loss: 0.42876532673835754, step time: 25.900840759277344ms\r\n",,terminal_output +10002,12526941,"TERMINAL",0,0,"Step 4149, loss: 0.3866567015647888, step time: 18.843650817871094ms\r\nStep 4150, loss: 0.37995707988739014, step time: 17.52495765686035ms\r\n",,terminal_output +10003,12527006,"TERMINAL",0,0,"Step 4151, loss: 0.38426336646080017, step time: 17.881155014038086ms\r\n",,terminal_output +10004,12527077,"TERMINAL",0,0,"Step 4152, loss: 0.4104127585887909, step time: 18.132686614990234ms\r\n",,terminal_output +10005,12527150,"TERMINAL",0,0,"Step 4153, loss: 0.467949777841568, step time: 17.65727996826172ms\r\n",,terminal_output +10006,12527221,"TERMINAL",0,0,"Step 4154, loss: 0.414065420627594, step time: 17.62986183166504ms\r\n",,terminal_output +10007,12527293,"TERMINAL",0,0,"Step 4155, loss: 0.3776034712791443, step time: 18.17607879638672ms\r\n",,terminal_output +10008,12527340,"TERMINAL",0,0,"Step 4156, loss: 0.400240421295166, step time: 17.215251922607422ms\r\n",,terminal_output +10009,12527449,"TERMINAL",0,0,"Step 4157, loss: 0.39385291934013367, step time: 17.644882202148438ms\r\nStep 4158, loss: 0.39992237091064453, step time: 18.033504486083984ms\r\n",,terminal_output +10010,12527576,"TERMINAL",0,0,"Step 4159, loss: 0.3789478540420532, step time: 17.643213272094727ms\r\nStep 4160, loss: 0.4080585837364197, step time: 17.592430114746094ms\r\n",,terminal_output +10011,12527672,"TERMINAL",0,0,"Step 4161, loss: 0.3811773955821991, step time: 18.121957778930664ms\r\n",,terminal_output +10012,12527778,"TERMINAL",0,0,"Step 4162, loss: 0.38848239183425903, step time: 17.23647117614746ms\r\nStep 4163, loss: 0.4153452217578888, step time: 18.111228942871094ms\r\n",,terminal_output +10013,12527846,"TERMINAL",0,0,"Step 4164, loss: 0.4294741153717041, step time: 17.972707748413086ms\r\n",,terminal_output +10014,12527912,"TERMINAL",0,0,"Step 4165, loss: 0.49959510564804077, step time: 17.08388328552246ms\r\n",,terminal_output +10015,12527977,"TERMINAL",0,0,"Step 4166, loss: 0.3939529061317444, step time: 17.146825790405273ms\r\n",,terminal_output +10016,12528043,"TERMINAL",0,0,"Step 4167, loss: 0.4066638946533203, step time: 17.949819564819336ms\r\n",,terminal_output +10017,12528109,"TERMINAL",0,0,"Step 4168, loss: 0.4239635765552521, step time: 17.46392250061035ms\r\n",,terminal_output +10018,12528175,"TERMINAL",0,0,"Step 4169, loss: 0.4028860330581665, step time: 17.375469207763672ms\r\n",,terminal_output +10019,12528242,"TERMINAL",0,0,"Step 4170, loss: 0.5851848721504211, step time: 17.763853073120117ms\r\n",,terminal_output +10020,12528341,"TERMINAL",0,0,"Step 4171, loss: 0.3730749189853668, step time: 17.579317092895508ms\r\nStep 4172, loss: 0.40155547857284546, step time: 17.457962036132812ms\r\n",,terminal_output +10021,12528407,"TERMINAL",0,0,"Step 4173, loss: 0.4182925522327423, step time: 18.03445816040039ms\r\n",,terminal_output +10022,12528474,"TERMINAL",0,0,"Step 4174, loss: 0.3828583359718323, step time: 17.234325408935547ms\r\n",,terminal_output +10023,12528541,"TERMINAL",0,0,"Step 4175, loss: 1.3107414245605469, step time: 17.55666732788086ms\r\n",,terminal_output +10024,12528614,"TERMINAL",0,0,"Step 4176, loss: 0.6200284957885742, step time: 17.603635787963867ms\r\n",,terminal_output +10025,12528661,"TERMINAL",0,0,"Step 4177, loss: 0.5831300020217896, step time: 17.24696159362793ms\r\n",,terminal_output +10026,12528726,"TERMINAL",0,0,"Step 4178, loss: 0.4411971867084503, step time: 17.11297035217285ms\r\n",,terminal_output +10027,12528793,"TERMINAL",0,0,"Step 4179, loss: 0.4088217616081238, step time: 17.77052879333496ms\r\n",,terminal_output +10028,12528860,"TERMINAL",0,0,"Step 4180, loss: 0.5689710974693298, step time: 17.042160034179688ms\r\n",,terminal_output +10029,12528925,"TERMINAL",0,0,"Step 4181, loss: 0.38971900939941406, step time: 17.482280731201172ms\r\n",,terminal_output +10030,12528991,"TERMINAL",0,0,"Step 4182, loss: 0.46227386593818665, step time: 18.019437789916992ms\r\n",,terminal_output +10031,12529056,"TERMINAL",0,0,"Step 4183, loss: 0.39441365003585815, step time: 17.454862594604492ms\r\n",,terminal_output +10032,12529121,"TERMINAL",0,0,"Step 4184, loss: 0.5276151299476624, step time: 17.114639282226562ms\r\n",,terminal_output +10033,12529186,"TERMINAL",0,0,"Step 4185, loss: 0.3999396562576294, step time: 18.016815185546875ms\r\n",,terminal_output +10034,12529249,"TERMINAL",0,0,"Step 4186, loss: 0.39629292488098145, step time: 17.396211624145508ms\r\n",,terminal_output +10035,12529314,"TERMINAL",0,0,"Step 4187, loss: 0.39523524045944214, step time: 18.95737648010254ms\r\n",,terminal_output +10036,12529422,"TERMINAL",0,0,"Step 4188, loss: 0.49772870540618896, step time: 18.138647079467773ms\r\nStep 4189, loss: 0.38687750697135925, step time: 18.033981323242188ms\r\n",,terminal_output +10037,12529490,"TERMINAL",0,0,"Step 4190, loss: 0.39166873693466187, step time: 17.802715301513672ms\r\n",,terminal_output +10038,12529556,"TERMINAL",0,0,"Step 4191, loss: 0.42019888758659363, step time: 18.073081970214844ms\r\n",,terminal_output +10039,12529622,"TERMINAL",0,0,"Step 4192, loss: 0.4159165322780609, step time: 17.615795135498047ms\r\n",,terminal_output +10040,12529687,"TERMINAL",0,0,"Step 4193, loss: 0.49409255385398865, step time: 17.540931701660156ms\r\n",,terminal_output +10041,12529810,"TERMINAL",0,0,"Step 4194, loss: 0.43259289860725403, step time: 17.821788787841797ms\r\nStep 4195, loss: 0.5956458449363708, step time: 17.385482788085938ms\r\n",,terminal_output +10042,12529871,"TERMINAL",0,0,"Step 4196, loss: 0.391620397567749, step time: 17.23480224609375ms\r\n",,terminal_output +10043,12529932,"TERMINAL",0,0,"Step 4197, loss: 1.4304664134979248, step time: 17.932415008544922ms\r\n",,terminal_output +10044,12530028,"TERMINAL",0,0,"Step 4198, loss: 0.4298824667930603, step time: 17.220020294189453ms\r\n",,terminal_output +10045,12530091,"TERMINAL",0,0,"Step 4199, loss: 0.38717561960220337, step time: 18.764734268188477ms\r\n",,terminal_output +10046,12530144,"TERMINAL",0,0,"Step 4200, loss: 0.40131881833076477, step time: 17.60554313659668ms\r\n",,terminal_output +10047,12530250,"TERMINAL",0,0,"Step 4201, loss: 0.4005872905254364, step time: 17.38762855529785ms\r\nStep 4202, loss: 0.3900701701641083, step time: 17.646312713623047ms\r\n",,terminal_output +10048,12530316,"TERMINAL",0,0,"Step 4203, loss: 0.3959106504917145, step time: 18.112659454345703ms\r\n",,terminal_output +10049,12530382,"TERMINAL",0,0,"Step 4204, loss: 0.43065014481544495, step time: 17.363786697387695ms\r\n",,terminal_output +10050,12530436,"TERMINAL",0,0,"Step 4205, loss: 0.3869365155696869, step time: 17.61174201965332ms\r\n",,terminal_output +10051,12530502,"TERMINAL",0,0,"Step 4206, loss: 0.43333372473716736, step time: 18.135786056518555ms\r\n",,terminal_output +10052,12530567,"TERMINAL",0,0,"Step 4207, loss: 0.38527458906173706, step time: 17.66490936279297ms\r\n",,terminal_output +10053,12530633,"TERMINAL",0,0,"Step 4208, loss: 1.3453208208084106, step time: 17.574548721313477ms\r\n",,terminal_output +10054,12530699,"TERMINAL",0,0,"Step 4209, loss: 0.4009964168071747, step time: 18.25237274169922ms\r\n",,terminal_output +10055,12530766,"TERMINAL",0,0,"Step 4210, loss: 0.38875389099121094, step time: 17.12965965270996ms\r\n",,terminal_output +10056,12530831,"TERMINAL",0,0,"Step 4211, loss: 0.4170868992805481, step time: 17.412662506103516ms\r\n",,terminal_output +10057,12530897,"TERMINAL",0,0,"Step 4212, loss: 0.4098030924797058, step time: 17.82965660095215ms\r\n",,terminal_output +10058,12530963,"TERMINAL",0,0,"Step 4213, loss: 0.4451367259025574, step time: 17.175674438476562ms\r\n",,terminal_output +10059,12531029,"TERMINAL",0,0,"Step 4214, loss: 0.4596688747406006, step time: 17.45295524597168ms\r\n",,terminal_output +10060,12531096,"TERMINAL",0,0,"Step 4215, loss: 0.3904966711997986, step time: 17.95220375061035ms\r\n",,terminal_output +10061,12531163,"TERMINAL",0,0,"Step 4216, loss: 0.43710437417030334, step time: 16.974687576293945ms\r\n",,terminal_output +10062,12531229,"TERMINAL",0,0,"Step 4217, loss: 0.3775072991847992, step time: 17.305612564086914ms\r\n",,terminal_output +10063,12531324,"TERMINAL",0,0,"Step 4218, loss: 0.37360072135925293, step time: 17.489910125732422ms\r\nStep 4219, loss: 0.39513301849365234, step time: 17.85588264465332ms\r\n",,terminal_output +10064,12531388,"TERMINAL",0,0,"Step 4220, loss: 0.3841243088245392, step time: 17.38572120666504ms\r\n",,terminal_output +10065,12531454,"TERMINAL",0,0,"Step 4221, loss: 0.37679871916770935, step time: 17.76719093322754ms\r\n",,terminal_output +10066,12531518,"TERMINAL",0,0,"Step 4222, loss: 1.6637173891067505, step time: 17.27008819580078ms\r\n",,terminal_output +10067,12531582,"TERMINAL",0,0,"Step 4223, loss: 0.3729419410228729, step time: 17.301082611083984ms\r\n",,terminal_output +10068,12531649,"TERMINAL",0,0,"Step 4224, loss: 0.39543572068214417, step time: 18.62192153930664ms\r\n",,terminal_output +10069,12531716,"TERMINAL",0,0,"Step 4225, loss: 0.38525086641311646, step time: 23.159503936767578ms\r\n",,terminal_output +10070,12531781,"TERMINAL",0,0,"Step 4226, loss: 0.38669994473457336, step time: 24.649381637573242ms\r\n",,terminal_output +10071,12531846,"TERMINAL",0,0,"Step 4227, loss: 0.4094327390193939, step time: 25.374412536621094ms\r\n",,terminal_output +10072,12531911,"TERMINAL",0,0,"Step 4228, loss: 0.544869065284729, step time: 24.215221405029297ms\r\n",,terminal_output +10073,12531976,"TERMINAL",0,0,"Step 4229, loss: 0.5107325911521912, step time: 25.506019592285156ms\r\n",,terminal_output +10074,12532118,"TERMINAL",0,0,"Step 4230, loss: 0.7038261294364929, step time: 25.877952575683594ms\r\nStep 4231, loss: 0.38979339599609375, step time: 25.142669677734375ms\r\n",,terminal_output +10075,12532185,"TERMINAL",0,0,"Step 4232, loss: 0.3961845934391022, step time: 25.031328201293945ms\r\n",,terminal_output +10076,12532251,"TERMINAL",0,0,"Step 4233, loss: 0.3798200488090515, step time: 22.70364761352539ms\r\n",,terminal_output +10077,12532317,"TERMINAL",0,0,"Step 4234, loss: 0.4244937300682068, step time: 19.2568302154541ms\r\n",,terminal_output +10078,12532392,"TERMINAL",0,0,"Step 4235, loss: 0.41289108991622925, step time: 18.12911033630371ms\r\n",,terminal_output +10079,12532449,"TERMINAL",0,0,"Step 4236, loss: 0.3723524808883667, step time: 18.047332763671875ms\r\n",,terminal_output +10080,12532558,"TERMINAL",0,0,"Step 4237, loss: 0.4425422251224518, step time: 17.3490047454834ms\r\nStep 4238, loss: 0.5641791224479675, step time: 17.721891403198242ms\r\n",,terminal_output +10081,12532655,"TERMINAL",0,0,"Step 4239, loss: 0.45717233419418335, step time: 18.032550811767578ms\r\n",,terminal_output +10082,12532708,"TERMINAL",0,0,"Step 4240, loss: 0.46416810154914856, step time: 17.36736297607422ms\r\n",,terminal_output +10083,12532813,"TERMINAL",0,0,"Step 4241, loss: 0.3887636959552765, step time: 17.617464065551758ms\r\nStep 4242, loss: 0.38960856199264526, step time: 17.828702926635742ms\r\n",,terminal_output +10084,12532909,"TERMINAL",0,0,"Step 4243, loss: 1.050268530845642, step time: 17.60125160217285ms\r\n",,terminal_output +10085,12532971,"TERMINAL",0,0,"Step 4244, loss: 0.3842370808124542, step time: 17.369747161865234ms\r\n",,terminal_output +10086,12533033,"TERMINAL",0,0,"Step 4245, loss: 0.47637128829956055, step time: 17.95506477355957ms\r\n",,terminal_output +10087,12533097,"TERMINAL",0,0,"Step 4246, loss: 0.3983449935913086, step time: 17.26698875427246ms\r\n",,terminal_output +10088,12533161,"TERMINAL",0,0,"Step 4247, loss: 0.42124560475349426, step time: 17.58718490600586ms\r\n",,terminal_output +10089,12533224,"TERMINAL",0,0,"Step 4248, loss: 1.5444416999816895, step time: 17.88020133972168ms\r\n",,terminal_output +10090,12533323,"TERMINAL",0,0,"Step 4249, loss: 0.39635878801345825, step time: 17.244338989257812ms\r\nStep 4250, loss: 0.37442073225975037, step time: 17.310142517089844ms\r\n",,terminal_output +10091,12533425,"TERMINAL",0,0,"Step 4251, loss: 0.3841531276702881, step time: 18.143177032470703ms\r\n",,terminal_output +10092,12533479,"TERMINAL",0,0,"Step 4252, loss: 0.3933289647102356, step time: 17.36736297607422ms\r\n",,terminal_output +10093,12533531,"TERMINAL",0,0,"Step 4253, loss: 0.4321526288986206, step time: 17.327070236206055ms\r\n",,terminal_output +10094,12533640,"TERMINAL",0,0,"Step 4254, loss: 0.38908258080482483, step time: 17.86661148071289ms\r\nStep 4255, loss: 0.38416898250579834, step time: 17.33851432800293ms\r\n",,terminal_output +10095,12533765,"TERMINAL",0,0,"Step 4256, loss: 0.4369804859161377, step time: 17.44818687438965ms\r\nStep 4257, loss: 0.37924250960350037, step time: 18.039226531982422ms\r\n",,terminal_output +10096,12533894,"TERMINAL",0,0,"Step 4258, loss: 0.3841896057128906, step time: 17.40288734436035ms\r\nStep 4259, loss: 0.40154868364334106, step time: 18.826723098754883ms\r\n",,terminal_output +10097,12533963,"TERMINAL",0,0,"Step 4260, loss: 0.3876505494117737, step time: 17.912864685058594ms\r\n",,terminal_output +10098,12534024,"TERMINAL",0,0,"Step 4261, loss: 0.38834425806999207, step time: 17.508983612060547ms\r\n",,terminal_output +10099,12534087,"TERMINAL",0,0,"Step 4262, loss: 0.3929530084133148, step time: 17.753124237060547ms\r\n",,terminal_output +10100,12534152,"TERMINAL",0,0,"Step 4263, loss: 0.37805840373039246, step time: 17.680644989013672ms\r\n",,terminal_output +10101,12534215,"TERMINAL",0,0,"Step 4264, loss: 0.3761991262435913, step time: 17.239809036254883ms\r\n",,terminal_output +10102,12534311,"TERMINAL",0,0,"Step 4265, loss: 0.3608344793319702, step time: 17.575979232788086ms\r\n",,terminal_output +10103,12534373,"TERMINAL",0,0,"Step 4266, loss: 0.37316977977752686, step time: 19.003629684448242ms\r\n",,terminal_output +10104,12534434,"TERMINAL",0,0,"Step 4267, loss: 0.4153344929218292, step time: 17.74764060974121ms\r\n",,terminal_output +10105,12534498,"TERMINAL",0,0,"Step 4268, loss: 0.39506444334983826, step time: 17.410755157470703ms\r\n",,terminal_output +10106,12534557,"TERMINAL",0,0,"Step 4269, loss: 0.38648563623428345, step time: 18.174171447753906ms\r\n",,terminal_output +10107,12534623,"TERMINAL",0,0,"Step 4270, loss: 0.6128065586090088, step time: 17.570018768310547ms\r\n",,terminal_output +10108,12534685,"TERMINAL",0,0,"Step 4271, loss: 0.39776524901390076, step time: 17.390966415405273ms\r\n",,terminal_output +10109,12534790,"TERMINAL",0,0,"Step 4272, loss: 0.7090855240821838, step time: 18.006086349487305ms\r\nStep 4273, loss: 0.3904110789299011, step time: 17.207622528076172ms\r\n",,terminal_output +10110,12534858,"TERMINAL",0,0,"Step 4274, loss: 0.40244269371032715, step time: 17.44365692138672ms\r\n",,terminal_output +10111,12534953,"TERMINAL",0,0,"Step 4275, loss: 0.4222319722175598, step time: 17.928123474121094ms\r\n",,terminal_output +10112,12535004,"TERMINAL",0,0,"Step 4276, loss: 0.38891246914863586, step time: 17.448902130126953ms\r\n",,terminal_output +10113,12535056,"TERMINAL",0,0,"Step 4277, loss: 0.38192278146743774, step time: 17.555713653564453ms\r\n",,terminal_output +10114,12535167,"TERMINAL",0,0,"Step 4278, loss: 0.3693772852420807, step time: 17.803192138671875ms\r\nStep 4279, loss: 0.44240644574165344, step time: 17.283201217651367ms\r\n",,terminal_output +10115,12535308,"TERMINAL",0,0,"Step 4280, loss: 0.3634501099586487, step time: 17.49444007873535ms\r\nStep 4281, loss: 0.3573331832885742, step time: 17.99774169921875ms\r\n",,terminal_output +10116,12535373,"TERMINAL",0,0,"Step 4282, loss: 0.4991653561592102, step time: 17.40241050720215ms\r\n",,terminal_output +10117,12535462,"TERMINAL",0,0,"Step 4283, loss: 0.35770732164382935, step time: 17.480850219726562ms\r\n",,terminal_output +10118,12535866,"TERMINAL",0,0,"Step 4284, loss: 0.35268211364746094, step time: 295.85719108581543ms\r\nStep 4285, loss: 0.42774370312690735, step time: 25.106430053710938ms\r\nStep 4286, loss: 0.3663550019264221, step time: 20.305395126342773ms\r\n",,terminal_output +10119,12535927,"TERMINAL",0,0,"Step 4287, loss: 0.3549250364303589, step time: 18.99433135986328ms\r\n",,terminal_output +10120,12535995,"TERMINAL",0,0,"Step 4288, loss: 0.3626345098018646, step time: 17.796039581298828ms\r\n",,terminal_output +10121,12536091,"TERMINAL",0,0,"Step 4289, loss: 0.37179914116859436, step time: 17.86661148071289ms\r\n",,terminal_output +10122,12536144,"TERMINAL",0,0,"Step 4290, loss: 0.3452754318714142, step time: 18.152952194213867ms\r\n",,terminal_output +10123,12536198,"TERMINAL",0,0,"Step 4291, loss: 0.37395909428596497, step time: 17.61150360107422ms\r\n",,terminal_output +10124,12536306,"TERMINAL",0,0,"Step 4292, loss: 0.3849221169948578, step time: 17.564773559570312ms\r\nStep 4293, loss: 0.34958383440971375, step time: 18.210887908935547ms\r\n",,terminal_output +10125,12536401,"TERMINAL",0,0,"Step 4294, loss: 0.41256439685821533, step time: 17.417430877685547ms\r\n",,terminal_output +10126,12536568,"TERMINAL",0,0,"Step 4295, loss: 0.38064929842948914, step time: 17.966508865356445ms\r\nStep 4296, loss: 0.3676804006099701, step time: 18.239736557006836ms\r\n^CException ignored in: \r\nTraceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/lib/__init__.py"", line 128, in _xla_gc_callback\r\n def _xla_gc_callback(*args):\r\nKeyboardInterrupt: \r\nStep 4297, loss: 0.3560146689414978, step time: 32.59420394897461ms\r\n",,terminal_output +10127,12536646,"TERMINAL",0,0,"Step 4298, loss: 0.35039186477661133, step time: 17.614364624023438ms\r\n",,terminal_output +10128,12536770,"TERMINAL",0,0,"Step 4299, loss: 0.46582987904548645, step time: 18.05591583251953ms\r\n^CTraceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py"", line 241, in \r\n print(f""Step {step}, loss: {loss}, step time: {elapsed_time}ms"")\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/array.py"", line 341, in __format__\r\n return format(self._value[()], format_spec)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/profiler.py"", line 354, in wrapper\r\n return func(*args, **kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/array.py"", line 641, in _value\r\n npy_value, did_copy = self._single_device_array_to_np_array_did_copy()\r\nKeyboardInterrupt\r\n",,terminal_output +10129,12536938,"TERMINAL",0,0,"^CException ignored in atexit callback: .teardown_atexit at 0x151a700c6680>\r\nTraceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/wandb/sdk/lib/service_connection.py"", line 94, in teardown_atexit\r\n conn.teardown(hooks.exit_code)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/wandb/sdk/lib/service_connection.py"", line 226, in teardown\r\n self._router.join()\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/wandb/sdk/interface/router.py"", line 75, in join\r\n self._thread.join()\r\n File ""/home/hk-project-p0023960/tum_cte0515/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/threading.py"", line 1096, in join\r\n self._wait_for_tstate_lock()\r\n File ""/home/hk-project-p0023960/tum_cte0515/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/threading.py"", line 1116, in _wait_for_tstate_lock\r\n if lock.acquire(block, timeout):\r\nKeyboardInterrupt: \r\n",,terminal_output +10130,12537150,"TERMINAL",0,0,"^CException ignored in: .remove at 0x151aceb1a710>\r\nTraceback (most recent call last):\r\n File ""/home/hk-project-p0023960/tum_cte0515/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/weakref.py"", line 370, in remove\r\n",,terminal_output +10131,12537216,"TERMINAL",0,0," def remove(k, selfref=ref(self)):\r\nKeyboardInterrupt: \r\n",,terminal_output +10132,12538668,"TERMINAL",0,0,"\r\n]0;tum_cte0515@hkn0408:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0408 jafar]$ ",,terminal_output +10133,12547264,"scripts_horeka/train_dynamics.sh",0,0,"",shellscript,tab +10134,12547266,"scripts_horeka/train_dynamics.sh",1624,0,"",shellscript,selection_mouse +10135,12547750,"scripts_horeka/train_dynamics.sh",1651,0,"",shellscript,selection_mouse +10136,12548259,"scripts_horeka/train_dynamics.sh",1647,0,"",shellscript,selection_mouse +10137,12549474,"scripts_horeka/train_dynamics.sh",1619,0,"",shellscript,selection_mouse +10138,12549643,"scripts_horeka/train_dynamics.sh",1606,14,"dyna_num_heads",shellscript,selection_mouse +10139,12550547,"scripts_horeka/train_dynamics.sh",1473,0,"",shellscript,selection_mouse +10140,12550663,"scripts_horeka/train_dynamics.sh",1455,18,"num_latent_actions",shellscript,selection_mouse +10141,12550818,"scripts_horeka/train_dynamics.sh",1449,29," --num_latent_actions=6 \\n",shellscript,selection_mouse +10142,12551409,"scripts_horeka/train_dynamics.sh",1449,29,"",shellscript,content +10143,12553287,"TERMINAL",0,0,"sh scripts_horeka/train_dynamics.sh ",,terminal_output +10144,12553490,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +10145,12553647,"TERMINAL",0,0,"SLURM_STEP_NUM_TASKS=1\r\nSLURM_JOB_USER=tum_cte0515\r\nSLURM_TASKS_PER_NODE=1\r\nSLURM_JOB_UID=999226\r\nSLURM_TASK_PID=3458179\r\nSLURM_JOB_GPUS=1\r\nSLURM_LOCALID=0\r\nSLURM_SUBMIT_DIR=/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar\r\nSLURMD_NODENAME=hkn0408\r\nSLURM_JOB_START_TIME=1751296933\r\nSLURM_STEP_NODELIST=hkn0408\r\nSLURM_CLUSTER_NAME=hk\r\nSLURM_JOB_END_TIME=1751300533\r\nSLURM_PMI2_SRUN_PORT=38655\r\nSLURM_CPUS_ON_NODE=6\r\nSLURM_JOB_CPUS_PER_NODE=6\r\nSLURM_GPUS_ON_NODE=1\r\nSLURM_GTIDS=0\r\nSLURM_JOB_PARTITION=accelerated\r\nSLURM_TRES_PER_TASK=cpu=5\r\nSLURM_OOM_KILL_STEP=0\r\nSLURM_JOB_NUM_NODES=1\r\nSLURM_STEPID=4294967290\r\nSLURM_JOBID=3306855\r\nSLURM_PTY_PORT=36241\r\nSLURM_JOB_QOS=normal\r\nSLURM_LAUNCH_NODE_IPADDR=10.0.7.199\r\nSLURM_PTY_WIN_ROW=37\r\nSLURM_PMI2_PROC_MAPPING=(vector,(0,1,1))\r\nSLURMD_DEBUG=2\r\nSLURM_PROCID=0\r\nSLURM_CPUS_PER_TASK=5\r\nSLURM_NTASKS=1\r\nSLURM_TOPOLOGY_ADDR=hkibb.hkibbi1.hkibbi1e12.hkn0408\r\nSLURM_TOPOLOGY_ADDR_PATTERN=switch.switch.switch.node\r\nSLURM_SRUN_COMM_HOST=10.0.7.199\r\nSLURM_SCRIPT_CONTEXT=prolog_task\r\nSLURM_MEM_PER_NODE=51200\r\nSLURM_PTY_WIN_COL=202\r\nSLURM_NODELIST=hkn0408\r\nSLURM_SRUN_COMM_PORT=42547\r\nSLURM_STEP_ID=4294967290\r\nSLURM_JOB_ACCOUNT=hk-project-p0023960\r\nSLURM_PRIO_PROCESS=0\r\nSLURM_NPROCS=1\r\nSLURM_NNODES=1\r\nSLURM_SUBMIT_HOST=hkn1991.localdomain\r\nSLURM_JOB_ID=3306855\r\nSLURM_NODEID=0\r\nSLURM_STEP_NUM_NODES=1\r\nSLURM_STEP_TASKS_PER_NODE=1\r\nSLURM_MPI_TYPE=pmi2\r\nSLURM_PMI2_STEP_NODES=hkn0408\r\nSLURM_CONF=/etc/slurm/slurm.conf\r\nSLURM_JOB_NAME=interactive\r\nSLURM_NTASKS_PER_NODE=1\r\nSLURM_STEP_LAUNCHER_PORT=42547\r\nSLURM_JOB_GID=502226\r\nSLURM_JOB_NODELIST=hkn0408\r\n",,terminal_output +10146,12556439,"TERMINAL",0,0,"2025-06-30 17:53:20.852128: E external/local_xla/xla/stream_executor/cuda/cuda_fft.cc:467] Unable to register cuFFT factory: Attempting to register factory for plugin cuFFT when one has already been registered\r\nWARNING: All log messages before absl::InitializeLog() is called are written to STDERR\r\nE0000 00:00:1751298800.865467 3484254 cuda_dnn.cc:8579] Unable to register cuDNN factory: Attempting to register factory for plugin cuDNN when one has already been registered\r\nE0000 00:00:1751298800.869726 3484254 cuda_blas.cc:1407] Unable to register cuBLAS factory: Attempting to register factory for plugin cuBLAS when one has already been registered\r\n",,terminal_output +10147,12556492,"TERMINAL",0,0,"W0000 00:00:1751298800.882121 3484254 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\nW0000 00:00:1751298800.882139 3484254 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\nW0000 00:00:1751298800.882142 3484254 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\nW0000 00:00:1751298800.882144 3484254 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\n",,terminal_output +10148,12561112,"TERMINAL",0,0,"W0000 00:00:1751298805.510746 3484254 gpu_device.cc:2341] Cannot dlopen some GPU libraries. Please make sure the missing libraries mentioned above are installed properly if you would like to use GPU. Follow the guide at https://www.tensorflow.org/install/gpu for how to download and setup the required libraries for your platform.\r\nSkipping registering GPU devices...\r\n",,terminal_output +10149,12561830,"TERMINAL",0,0,"Running on 1 devices.\r\n",,terminal_output +10150,12562649,"TERMINAL",0,0,"wandb: Currently logged in as: mihir-mahajan2002 (instant-uv) to https://api.wandb.ai. Use `wandb login --relogin` to force relogin\r\n",,terminal_output +10151,12563375,"TERMINAL",0,0,"wandb: Tracking run with wandb version 0.19.11\r\nwandb: Run data is saved locally in /hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/wandb/run-20250630_175327-tdctixii\r\nwandb: Run `wandb offline` to turn off syncing.\r\nwandb: Syncing run dynamics-tiny-overfit-big-lr-0000\r\nwandb: ⭐️ View project at https://wandb.ai/instant-uv/jafar\r\nwandb: 🚀 View run at https://wandb.ai/instant-uv/jafar/runs/tdctixii\r\n",,terminal_output +10152,12564799,"TERMINAL",0,0,"2025-06-30 17:53:29.149722: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +10153,12572578,"train_dynamics.py",0,0,"",python,tab +10154,12573347,"genie.py",0,0,"",python,tab +10155,12576716,"genie.py",2560,0,"",python,selection_mouse +10156,12576717,"genie.py",2559,0,"",python,selection_command +10157,12576810,"genie.py",2560,0,"",python,selection_mouse +10158,12576811,"genie.py",2559,0,"",python,selection_command +10159,12577274,"genie.py",2481,0,"",python,selection_mouse +10160,12577418,"genie.py",2478,5,"zeros",python,selection_mouse +10161,12577511,"TERMINAL",0,0,"2025-06-30 17:53:41.929643: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +10162,12577988,"genie.py",2476,0,"",python,selection_mouse +10163,12578122,"genie.py",2474,3,"jnp",python,selection_mouse +10164,12578659,"genie.py",2480,0,"",python,selection_mouse +10165,12578808,"genie.py",2478,5,"zeros",python,selection_mouse +10166,12579407,"genie.py",2475,0,"",python,selection_mouse +10167,12579577,"genie.py",2474,3,"jnp",python,selection_mouse +10168,12580087,"genie.py",2481,0,"",python,selection_mouse +10169,12580225,"genie.py",2478,5,"zeros",python,selection_mouse +10170,12580945,"genie.py",2475,0,"",python,selection_mouse +10171,12581109,"genie.py",2474,3,"jnp",python,selection_mouse +10172,12581711,"genie.py",2560,0,"",python,selection_mouse +10173,12581714,"genie.py",2559,0,"",python,selection_command +10174,12581832,"genie.py",2560,0,"",python,selection_mouse +10175,12581847,"genie.py",2559,0,"",python,selection_command +10176,12582967,"genie.py",2511,0,"",python,selection_mouse +10177,12583628,"genie.py",2523,0,"",python,selection_mouse +10178,12583743,"genie.py",2518,16,"latent_patch_dim",python,selection_mouse +10179,12584406,"genie.py",2515,0,"",python,selection_mouse +10180,12584544,"genie.py",2513,4,"self",python,selection_mouse +10181,12584777,"genie.py",2513,21,"self.latent_patch_dim",python,selection_mouse +10182,12585242,"genie.py",2523,0,"",python,selection_mouse +10183,12585243,"genie.py",2518,16,"latent_patch_dim",python,selection_mouse +10184,12585502,"genie.py",2517,17,".latent_patch_dim",python,selection_mouse +10185,12585532,"genie.py",2513,21,"self.latent_patch_dim",python,selection_mouse +10186,12585957,"genie.py",2514,0,"",python,selection_mouse +10187,12585958,"genie.py",2513,4,"self",python,selection_mouse +10188,12586186,"genie.py",2513,5,"self.",python,selection_mouse +10189,12586199,"genie.py",2513,21,"self.latent_patch_dim",python,selection_mouse +10190,12586568,"genie.py",2521,0,"",python,selection_mouse +10191,12593062,"TERMINAL",0,0,"2025-06-30 17:53:57.452500: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +10192,12599901,"TERMINAL",0,0,"2025-06-30 17:54:04.329815: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +10193,12618644,"TERMINAL",0,0,"batch shape: (1, 16, 90, 160, 3)\r\n",,terminal_output +10194,12629903,"TERMINAL",0,0,"2025-06-30 17:54:34.328657: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-06-30 17:54:34.329243: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-06-30 17:54:34.329358: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-06-30 17:54:34.329996: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-06-30 17:54:34.331038: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +10195,12633259,"genie.py",0,0,"",python,tab +10196,12633261,"genie.py",2560,0,"",python,selection_mouse +10197,12633280,"genie.py",2559,0,"",python,selection_command +10198,12644709,"train_dynamics.py",0,0,"",python,tab +10199,12648362,"train_dynamics.py",2742,0,"",python,selection_mouse +10200,12648520,"train_dynamics.py",2737,8,"select_p",python,selection_mouse +10201,12649348,"train_dynamics.py",2749,0,"",python,selection_mouse +10202,12651547,"train_dynamics.py",2744,0,"",python,selection_mouse +10203,12651699,"train_dynamics.py",2737,8,"select_p",python,selection_mouse +10204,12651857,"train_dynamics.py",2729,46," select_p=select_probs.max(-1).mean(),\n",python,selection_mouse +10205,12652088,"train_dynamics.py",2729,115," select_p=select_probs.max(-1).mean(),\n entropy=jax.scipy.special.entr(select_probs).sum(-1).mean(),\n",python,selection_mouse +10206,12674925,"TERMINAL",0,0,"Step 0, loss: 8.909699440002441, step time: 55995.774269104004ms\r\n",,terminal_output +10207,12675033,"TERMINAL",0,0,"Step 1, loss: 8.718293190002441, step time: 28.285980224609375ms\r\n",,terminal_output +10208,12675099,"TERMINAL",0,0,"Step 2, loss: 8.347654342651367, step time: 21.987199783325195ms\r\n",,terminal_output +10209,12675162,"TERMINAL",0,0,"Step 3, loss: 8.447279930114746, step time: 19.145488739013672ms\r\n",,terminal_output +10210,12675214,"TERMINAL",0,0,"Step 4, loss: 7.9867167472839355, step time: 18.896818161010742ms\r\n",,terminal_output +10211,12675280,"TERMINAL",0,0,"Step 5, loss: 7.829794406890869, step time: 19.077539443969727ms\r\n",,terminal_output +10212,12675341,"TERMINAL",0,0,"Step 6, loss: 7.859234809875488, step time: 17.750263214111328ms\r\n",,terminal_output +10213,12675406,"TERMINAL",0,0,"Step 7, loss: 7.548120975494385, step time: 18.583059310913086ms\r\n",,terminal_output +10214,12675717,"TERMINAL",0,0,"Step 8, loss: 7.427960395812988, step time: 336.9109630584717ms\r\n",,terminal_output +10215,12675849,"TERMINAL",0,0,"Step 9, loss: 7.490304470062256, step time: 25.653362274169922ms\r\nStep 10, loss: 7.2540459632873535, step time: 19.99187469482422ms\r\n",,terminal_output +10216,12675951,"TERMINAL",0,0,"Step 11, loss: 7.244624137878418, step time: 20.21169662475586ms\r\n",,terminal_output +10217,12676013,"TERMINAL",0,0,"Step 12, loss: 7.09035587310791, step time: 18.7075138092041ms\r\n",,terminal_output +10218,12676127,"TERMINAL",0,0,"Step 13, loss: 6.97866153717041, step time: 17.61174201965332ms\r\nStep 14, loss: 6.969475269317627, step time: 18.118619918823242ms\r\n",,terminal_output +10219,12676234,"TERMINAL",0,0,"Step 15, loss: 6.91555118560791, step time: 17.583608627319336ms\r\nStep 16, loss: 6.798232078552246, step time: 17.46201515197754ms\r\n",,terminal_output +10220,12676298,"TERMINAL",0,0,"Step 17, loss: 6.738493919372559, step time: 17.962932586669922ms\r\n",,terminal_output +10221,12676362,"TERMINAL",0,0,"Step 18, loss: 6.92435359954834, step time: 17.38762855529785ms\r\n",,terminal_output +10222,12676425,"TERMINAL",0,0,"Step 19, loss: 6.621379852294922, step time: 17.467498779296875ms\r\n",,terminal_output +10223,12676488,"TERMINAL",0,0,"Step 20, loss: 6.606697082519531, step time: 17.93527603149414ms\r\n",,terminal_output +10224,12676550,"TERMINAL",0,0,"Step 21, loss: 6.544436454772949, step time: 17.784833908081055ms\r\n",,terminal_output +10225,12676612,"TERMINAL",0,0,"Step 22, loss: 6.5061140060424805, step time: 18.534421920776367ms\r\n",,terminal_output +10226,12676758,"TERMINAL",0,0,"Step 23, loss: 6.438405990600586, step time: 18.338441848754883ms\r\n",,terminal_output +10227,12676821,"TERMINAL",0,0,"Step 24, loss: 6.389014720916748, step time: 17.542362213134766ms\r\nStep 25, loss: 6.341236591339111, step time: 17.655611038208008ms\r\n",,terminal_output +10228,12676875,"TERMINAL",0,0,"Step 26, loss: 6.309741973876953, step time: 19.756555557250977ms\r\n",,terminal_output +10229,12676972,"TERMINAL",0,0,"Step 27, loss: 6.273733139038086, step time: 19.011259078979492ms\r\n",,terminal_output +10230,12677045,"TERMINAL",0,0,"Step 28, loss: 6.283977508544922, step time: 17.700910568237305ms\r\nStep 29, loss: 6.205024719238281, step time: 18.34559440612793ms\r\n",,terminal_output +10231,12677141,"TERMINAL",0,0,"Step 30, loss: 6.18203067779541, step time: 17.497539520263672ms\r\n",,terminal_output +10232,12677194,"TERMINAL",0,0,"Step 31, loss: 6.179567813873291, step time: 17.743349075317383ms\r\n",,terminal_output +10233,12677325,"TERMINAL",0,0,"Step 32, loss: 6.168574810028076, step time: 18.02515983581543ms\r\nStep 33, loss: 6.058864593505859, step time: 17.603158950805664ms\r\n",,terminal_output +10234,12677432,"TERMINAL",0,0,"Step 34, loss: 6.059278964996338, step time: 17.434120178222656ms\r\nStep 35, loss: 6.00441837310791, step time: 18.187284469604492ms\r\n",,terminal_output +10235,12677496,"TERMINAL",0,0,"Step 36, loss: 6.001573085784912, step time: 19.676923751831055ms\r\n",,terminal_output +10236,12677559,"TERMINAL",0,0,"Step 37, loss: 5.95756721496582, step time: 17.581701278686523ms\r\n",,terminal_output +10237,12677624,"TERMINAL",0,0,"Step 38, loss: 5.961080074310303, step time: 18.058300018310547ms\r\n",,terminal_output +10238,12677688,"TERMINAL",0,0,"Step 39, loss: 5.896195411682129, step time: 17.710447311401367ms\r\n",,terminal_output +10239,12677750,"TERMINAL",0,0,"Step 40, loss: 5.865736961364746, step time: 17.54021644592285ms\r\n",,terminal_output +10240,12677821,"TERMINAL",0,0,"Step 41, loss: 5.850093841552734, step time: 18.0816650390625ms\r\n",,terminal_output +10241,12677878,"TERMINAL",0,0,"Step 42, loss: 5.809415817260742, step time: 17.41647720336914ms\r\n",,terminal_output +10242,12677943,"TERMINAL",0,0,"Step 43, loss: 5.837826728820801, step time: 17.814159393310547ms\r\n",,terminal_output +10243,12678005,"TERMINAL",0,0,"Step 44, loss: 5.7816033363342285, step time: 19.07491683959961ms\r\n",,terminal_output +10244,12678068,"TERMINAL",0,0,"Step 45, loss: 5.744215488433838, step time: 17.76576042175293ms\r\n",,terminal_output +10245,12678128,"TERMINAL",0,0,"Step 46, loss: 5.706871509552002, step time: 17.446279525756836ms\r\n",,terminal_output +10246,12678190,"TERMINAL",0,0,"Step 47, loss: 5.679916858673096, step time: 18.169641494750977ms\r\n",,terminal_output +10247,12678250,"TERMINAL",0,0,"Step 48, loss: 5.6694560050964355, step time: 17.486572265625ms\r\n",,terminal_output +10248,12678311,"TERMINAL",0,0,"Step 49, loss: 5.622962474822998, step time: 17.50493049621582ms\r\n",,terminal_output +10249,12678375,"TERMINAL",0,0,"Step 50, loss: 5.64215087890625, step time: 18.021106719970703ms\r\n",,terminal_output +10250,12678438,"TERMINAL",0,0,"Step 51, loss: 5.574595928192139, step time: 17.68970489501953ms\r\n",,terminal_output +10251,12678554,"TERMINAL",0,0,"Step 52, loss: 5.644159317016602, step time: 17.431020736694336ms\r\nStep 53, loss: 5.607558250427246, step time: 18.057584762573242ms\r\n",,terminal_output +10252,12678703,"TERMINAL",0,0,"Step 54, loss: 5.527812480926514, step time: 17.393112182617188ms\r\nStep 55, loss: 5.585357666015625, step time: 17.762184143066406ms\r\n",,terminal_output +10253,12678808,"TERMINAL",0,0,"Step 56, loss: 5.478453159332275, step time: 18.027067184448242ms\r\n",,terminal_output +10254,12678871,"TERMINAL",0,0,"Step 57, loss: 5.491310119628906, step time: 17.575502395629883ms\r\nStep 58, loss: 5.444187641143799, step time: 17.530202865600586ms\r\n",,terminal_output +10255,12678930,"TERMINAL",0,0,"Step 59, loss: 5.420383453369141, step time: 18.151521682739258ms\r\n",,terminal_output +10256,12678992,"TERMINAL",0,0,"Step 60, loss: 5.4350738525390625, step time: 17.58861541748047ms\r\n",,terminal_output +10257,12679055,"TERMINAL",0,0,"Step 61, loss: 5.42104434967041, step time: 18.57900619506836ms\r\n",,terminal_output +10258,12679145,"TERMINAL",0,0,"Step 62, loss: 5.416147708892822, step time: 18.444061279296875ms\r\n",,terminal_output +10259,12679208,"TERMINAL",0,0,"Step 63, loss: 5.307103633880615, step time: 17.664194107055664ms\r\n",,terminal_output +10260,12679270,"TERMINAL",0,0,"Step 64, loss: 5.3246636390686035, step time: 17.497539520263672ms\r\n",,terminal_output +10261,12679321,"TERMINAL",0,0,"Step 65, loss: 5.297624111175537, step time: 17.902374267578125ms\r\n",,terminal_output +10262,12679465,"TERMINAL",0,0,"Step 66, loss: 5.294092178344727, step time: 17.343997955322266ms\r\nStep 67, loss: 5.245370864868164, step time: 17.401933670043945ms\r\n",,terminal_output +10263,12679516,"TERMINAL",0,0,"Step 68, loss: 5.2288994789123535, step time: 17.75979995727539ms\r\n",,terminal_output +10264,12679619,"TERMINAL",0,0,"Step 69, loss: 5.203762054443359, step time: 17.39811897277832ms\r\nStep 70, loss: 5.177609443664551, step time: 17.430782318115234ms\r\n",,terminal_output +10265,12679711,"TERMINAL",0,0,"Step 71, loss: 5.14149808883667, step time: 17.894268035888672ms\r\n",,terminal_output +10266,12679763,"TERMINAL",0,0,"Step 72, loss: 5.160760402679443, step time: 17.482757568359375ms\r\n",,terminal_output +10267,12679869,"TERMINAL",0,0,"Step 73, loss: 5.102837085723877, step time: 18.67508888244629ms\r\nStep 74, loss: 5.109338760375977, step time: 18.25404167175293ms\r\n",,terminal_output +10268,12679970,"TERMINAL",0,0,"Step 75, loss: 5.1594014167785645, step time: 17.3037052154541ms\r\n",,terminal_output +10269,12680031,"TERMINAL",0,0,"Step 76, loss: 5.056812763214111, step time: 17.263412475585938ms\r\n",,terminal_output +10270,12680095,"TERMINAL",0,0,"Step 77, loss: 5.097204208374023, step time: 17.730712890625ms\r\n",,terminal_output +10271,12680147,"TERMINAL",0,0,"Step 78, loss: 5.024117946624756, step time: 17.20595359802246ms\r\n",,terminal_output +10272,12680253,"TERMINAL",0,0,"Step 79, loss: 4.994380950927734, step time: 17.372608184814453ms\r\nStep 80, loss: 5.043018341064453, step time: 17.75050163269043ms\r\n",,terminal_output +10273,12680363,"TERMINAL",0,0,"Step 81, loss: 4.993098735809326, step time: 17.18926429748535ms\r\nStep 82, loss: 4.9943108558654785, step time: 17.203807830810547ms\r\n",,terminal_output +10274,12680427,"TERMINAL",0,0,"Step 83, loss: 4.945209980010986, step time: 17.678499221801758ms\r\n",,terminal_output +10275,12680522,"TERMINAL",0,0,"Step 84, loss: 4.904895305633545, step time: 17.207622528076172ms\r\n",,terminal_output +10276,12680574,"TERMINAL",0,0,"Step 85, loss: 4.882058620452881, step time: 17.516136169433594ms\r\n",,terminal_output +10277,12680719,"TERMINAL",0,0,"Step 86, loss: 5.078086853027344, step time: 17.680644989013672ms\r\nStep 87, loss: 4.942941665649414, step time: 17.142534255981445ms\r\n",,terminal_output +10278,12680770,"TERMINAL",0,0,"Step 88, loss: 4.877025604248047, step time: 17.128944396972656ms\r\n",,terminal_output +10279,12680878,"TERMINAL",0,0,"Step 89, loss: 4.8621320724487305, step time: 17.84682273864746ms\r\nStep 90, loss: 4.821526050567627, step time: 17.223358154296875ms\r\n",,terminal_output +10280,12680931,"TERMINAL",0,0,"Step 91, loss: 4.782381057739258, step time: 17.17662811279297ms\r\n",,terminal_output +10281,12681032,"TERMINAL",0,0,"Step 92, loss: 4.844040393829346, step time: 17.737150192260742ms\r\n",,terminal_output +10282,12681085,"TERMINAL",0,0,"Step 93, loss: 4.95897102355957, step time: 17.029285430908203ms\r\n",,terminal_output +10283,12681190,"TERMINAL",0,0,"Step 94, loss: 4.77754020690918, step time: 17.167329788208008ms\r\nStep 95, loss: 4.716156005859375, step time: 17.664670944213867ms\r\n",,terminal_output +10284,12681287,"TERMINAL",0,0,"Step 96, loss: 4.740158557891846, step time: 17.23003387451172ms\r\n",,terminal_output +10285,12681339,"TERMINAL",0,0,"Step 97, loss: 4.685649394989014, step time: 17.40550994873047ms\r\n",,terminal_output +10286,12681446,"TERMINAL",0,0,"Step 98, loss: 4.687989711761475, step time: 17.97199249267578ms\r\nStep 99, loss: 4.749385833740234, step time: 17.348289489746094ms\r\n",,terminal_output +10287,12681508,"TERMINAL",0,0,"Step 100, loss: 4.67085075378418, step time: 17.42720603942871ms\r\n",,terminal_output +10288,12681570,"TERMINAL",0,0,"Step 101, loss: 4.685153007507324, step time: 18.15485954284668ms\r\n",,terminal_output +10289,12681631,"TERMINAL",0,0,"Step 102, loss: 4.6317291259765625, step time: 17.856836318969727ms\r\n",,terminal_output +10290,12681692,"TERMINAL",0,0,"Step 103, loss: 4.7683258056640625, step time: 17.444372177124023ms\r\n",,terminal_output +10291,12681752,"TERMINAL",0,0,"Step 104, loss: 4.6252336502075195, step time: 18.0203914642334ms\r\n",,terminal_output +10292,12681813,"TERMINAL",0,0,"Step 105, loss: 4.698330879211426, step time: 17.424583435058594ms\r\n",,terminal_output +10293,12681875,"TERMINAL",0,0,"Step 106, loss: 4.613829135894775, step time: 17.51852035522461ms\r\n",,terminal_output +10294,12681971,"TERMINAL",0,0,"Step 107, loss: 4.7080206871032715, step time: 18.072128295898438ms\r\n",,terminal_output +10295,12682033,"TERMINAL",0,0,"Step 108, loss: 4.761252403259277, step time: 18.415451049804688ms\r\n",,terminal_output +10296,12682096,"TERMINAL",0,0,"Step 109, loss: 4.682215690612793, step time: 17.44842529296875ms\r\n",,terminal_output +10297,12682156,"TERMINAL",0,0,"Step 110, loss: 4.612472057342529, step time: 18.153667449951172ms\r\n",,terminal_output +10298,12682220,"TERMINAL",0,0,"Step 111, loss: 4.65864896774292, step time: 17.667293548583984ms\r\n",,terminal_output +10299,12682296,"TERMINAL",0,0,"Step 112, loss: 4.539667129516602, step time: 18.427610397338867ms\r\n",,terminal_output +10300,12682348,"TERMINAL",0,0,"Step 113, loss: 4.5633544921875, step time: 18.167734146118164ms\r\n",,terminal_output +10301,12682412,"TERMINAL",0,0,"Step 114, loss: 4.562738418579102, step time: 17.623424530029297ms\r\n",,terminal_output +10302,12682473,"TERMINAL",0,0,"Step 115, loss: 4.575807094573975, step time: 17.583847045898438ms\r\n",,terminal_output +10303,12682535,"TERMINAL",0,0,"Step 116, loss: 4.575873851776123, step time: 18.0971622467041ms\r\n",,terminal_output +10304,12682596,"TERMINAL",0,0,"Step 117, loss: 4.4810261726379395, step time: 17.59052276611328ms\r\n",,terminal_output +10305,12682700,"TERMINAL",0,0,"Step 118, loss: 4.4699554443359375, step time: 17.54164695739746ms\r\nStep 119, loss: 4.510281085968018, step time: 18.389225006103516ms\r\n",,terminal_output +10306,12682818,"TERMINAL",0,0,"Step 120, loss: 4.426855564117432, step time: 18.71800422668457ms\r\nStep 121, loss: 4.50761079788208, step time: 17.824649810791016ms\r\n",,terminal_output +10307,12682879,"TERMINAL",0,0,"Step 122, loss: 4.544761657714844, step time: 18.41449737548828ms\r\n",,terminal_output +10308,12682944,"TERMINAL",0,0,"Step 123, loss: 4.393248558044434, step time: 17.818212509155273ms\r\n",,terminal_output +10309,12683009,"TERMINAL",0,0,"Step 124, loss: 4.392709732055664, step time: 17.659425735473633ms\r\n",,terminal_output +10310,12683074,"TERMINAL",0,0,"Step 125, loss: 4.36198616027832, step time: 18.081188201904297ms\r\n",,terminal_output +10311,12683385,"TERMINAL",0,0,"Step 126, loss: 4.359892845153809, step time: 313.6465549468994ms\r\n",,terminal_output +10312,12683454,"TERMINAL",0,0,"Step 127, loss: 4.362560272216797, step time: 24.836063385009766ms\r\n",,terminal_output +10313,12683516,"TERMINAL",0,0,"Step 128, loss: 4.308724880218506, step time: 19.945621490478516ms\r\n",,terminal_output +10314,12683642,"TERMINAL",0,0,"Step 129, loss: 4.4736328125, step time: 18.58663558959961ms\r\nStep 130, loss: 4.451694011688232, step time: 17.835140228271484ms\r\n",,terminal_output +10315,12683705,"TERMINAL",0,0,"Step 131, loss: 4.323732376098633, step time: 18.412351608276367ms\r\n",,terminal_output +10316,12683765,"TERMINAL",0,0,"Step 132, loss: 4.332326889038086, step time: 17.84801483154297ms\r\n",,terminal_output +10317,12683835,"TERMINAL",0,0,"Step 133, loss: 4.475522518157959, step time: 17.917633056640625ms\r\n",,terminal_output +10318,12683931,"TERMINAL",0,0,"Step 134, loss: 4.274587154388428, step time: 18.468379974365234ms\r\n",,terminal_output +10319,12683997,"TERMINAL",0,0,"Step 135, loss: 4.277841091156006, step time: 18.233537673950195ms\r\n",,terminal_output +10320,12684050,"TERMINAL",0,0,"Step 136, loss: 4.285419464111328, step time: 17.79484748840332ms\r\n",,terminal_output +10321,12684155,"TERMINAL",0,0,"Step 137, loss: 4.330131530761719, step time: 17.99154281616211ms\r\nStep 138, loss: 4.50730562210083, step time: 18.10932159423828ms\r\n",,terminal_output +10322,12684217,"TERMINAL",0,0,"Step 139, loss: 4.242671489715576, step time: 17.798662185668945ms\r\n",,terminal_output +10323,12684280,"TERMINAL",0,0,"Step 140, loss: 4.259637355804443, step time: 17.75336265563965ms\r\n",,terminal_output +10324,12684392,"TERMINAL",0,0,"Step 141, loss: 4.211062431335449, step time: 17.95482635498047ms\r\nStep 142, loss: 4.296913146972656, step time: 17.455339431762695ms\r\n",,terminal_output +10325,12684455,"TERMINAL",0,0,"Step 143, loss: 4.203567981719971, step time: 17.722606658935547ms\r\n",,terminal_output +10326,12684518,"TERMINAL",0,0,"Step 144, loss: 4.202986717224121, step time: 17.77362823486328ms\r\n",,terminal_output +10327,12684580,"TERMINAL",0,0,"Step 145, loss: 4.217236518859863, step time: 17.83156394958496ms\r\n",,terminal_output +10328,12684645,"TERMINAL",0,0,"Step 146, loss: 4.214910507202148, step time: 17.626523971557617ms\r\n",,terminal_output +10329,12684710,"TERMINAL",0,0,"Step 147, loss: 4.199376583099365, step time: 18.23115348815918ms\r\n",,terminal_output +10330,12684774,"TERMINAL",0,0,"Step 148, loss: 4.164237976074219, step time: 17.761707305908203ms\r\n",,terminal_output +10331,12684838,"TERMINAL",0,0,"Step 149, loss: 4.209935188293457, step time: 19.82855796813965ms\r\n",,terminal_output +10332,12684896,"TERMINAL",0,0,"Step 150, loss: 4.138351917266846, step time: 18.157482147216797ms\r\n",,terminal_output +10333,12684959,"TERMINAL",0,0,"Step 151, loss: 4.1342973709106445, step time: 17.822265625ms\r\n",,terminal_output +10334,12685028,"TERMINAL",0,0,"Step 152, loss: 4.102519989013672, step time: 17.539262771606445ms\r\n",,terminal_output +10335,12685090,"TERMINAL",0,0,"Step 153, loss: 4.177949905395508, step time: 18.645763397216797ms\r\n",,terminal_output +10336,12685161,"TERMINAL",0,0,"Step 154, loss: 4.082982540130615, step time: 17.952442169189453ms\r\n",,terminal_output +10337,12685223,"TERMINAL",0,0,"Step 155, loss: 4.197655200958252, step time: 17.804622650146484ms\r\n",,terminal_output +10338,12685287,"TERMINAL",0,0,"Step 156, loss: 4.128884315490723, step time: 18.053531646728516ms\r\n",,terminal_output +10339,12685352,"TERMINAL",0,0,"Step 157, loss: 4.177336692810059, step time: 17.696142196655273ms\r\n",,terminal_output +10340,12685419,"TERMINAL",0,0,"Step 158, loss: 4.111761569976807, step time: 17.46082305908203ms\r\n",,terminal_output +10341,12685480,"TERMINAL",0,0,"Step 159, loss: 4.1626505851745605, step time: 17.888307571411133ms\r\n",,terminal_output +10342,12685542,"TERMINAL",0,0,"Step 160, loss: 4.06549596786499, step time: 17.398357391357422ms\r\n",,terminal_output +10343,12685606,"TERMINAL",0,0,"Step 161, loss: 4.196542739868164, step time: 17.600059509277344ms\r\n",,terminal_output +10344,12685715,"TERMINAL",0,0,"Step 162, loss: 4.064416408538818, step time: 17.62080192565918ms\r\nStep 163, loss: 4.079616546630859, step time: 17.516613006591797ms\r\n",,terminal_output +10345,12685778,"TERMINAL",0,0,"Step 164, loss: 4.016226768493652, step time: 17.2731876373291ms\r\n",,terminal_output +10346,12685893,"TERMINAL",0,0,"Step 165, loss: 4.378942012786865, step time: 17.78888702392578ms\r\nStep 166, loss: 4.157599449157715, step time: 17.410993576049805ms\r\n",,terminal_output +10347,12685991,"TERMINAL",0,0,"Step 167, loss: 4.047636985778809, step time: 17.719507217407227ms\r\n",,terminal_output +10348,12686098,"TERMINAL",0,0,"Step 168, loss: 4.07844352722168, step time: 18.58997344970703ms\r\nStep 169, loss: 4.035508155822754, step time: 17.262697219848633ms\r\n",,terminal_output +10349,12686205,"TERMINAL",0,0,"Step 170, loss: 4.005664348602295, step time: 17.49420166015625ms\r\nStep 171, loss: 3.997448205947876, step time: 17.575502395629883ms\r\n",,terminal_output +10350,12686269,"TERMINAL",0,0,"Step 172, loss: 3.9906692504882812, step time: 17.354488372802734ms\r\n",,terminal_output +10351,12686330,"TERMINAL",0,0,"Step 173, loss: 4.225956916809082, step time: 17.26984977722168ms\r\n",,terminal_output +10352,12686397,"TERMINAL",0,0,"Step 174, loss: 3.9666106700897217, step time: 17.599105834960938ms\r\n",,terminal_output +10353,12686460,"TERMINAL",0,0,"Step 175, loss: 3.9917688369750977, step time: 17.614126205444336ms\r\n",,terminal_output +10354,12686520,"TERMINAL",0,0,"Step 176, loss: 3.939692735671997, step time: 17.495393753051758ms\r\n",,terminal_output +10355,12686614,"TERMINAL",0,0,"Step 177, loss: 3.970026731491089, step time: 17.879962921142578ms\r\n",,terminal_output +10356,12686667,"TERMINAL",0,0,"Step 178, loss: 4.062187194824219, step time: 17.47751235961914ms\r\n",,terminal_output +10357,12686780,"TERMINAL",0,0,"Step 179, loss: 3.9491360187530518, step time: 17.457962036132812ms\r\nStep 180, loss: 3.912917375564575, step time: 17.992258071899414ms\r\n",,terminal_output +10358,12686843,"TERMINAL",0,0,"Step 181, loss: 4.057607173919678, step time: 22.79949188232422ms\r\n",,terminal_output +10359,12686895,"TERMINAL",0,0,"Step 182, loss: 3.9182851314544678, step time: 17.937660217285156ms\r\n",,terminal_output +10360,12686958,"TERMINAL",0,0,"Step 183, loss: 4.213393211364746, step time: 17.827987670898438ms\r\n",,terminal_output +10361,12687021,"TERMINAL",0,0,"Step 184, loss: 3.910740852355957, step time: 17.532825469970703ms\r\n",,terminal_output +10362,12687087,"TERMINAL",0,0,"Step 185, loss: 3.977078676223755, step time: 18.32723617553711ms\r\n",,terminal_output +10363,12687152,"TERMINAL",0,0,"Step 186, loss: 3.9620397090911865, step time: 17.766714096069336ms\r\n",,terminal_output +10364,12687215,"TERMINAL",0,0,"Step 187, loss: 4.225456714630127, step time: 17.615079879760742ms\r\n",,terminal_output +10365,12687277,"TERMINAL",0,0,"Step 188, loss: 3.928316116333008, step time: 17.547607421875ms\r\n",,terminal_output +10366,12687340,"TERMINAL",0,0,"Step 189, loss: 3.8743667602539062, step time: 17.760276794433594ms\r\n",,terminal_output +10367,12687404,"TERMINAL",0,0,"Step 190, loss: 3.9376211166381836, step time: 17.30799674987793ms\r\n",,terminal_output +10368,12687466,"TERMINAL",0,0,"Step 191, loss: 3.8893306255340576, step time: 17.360687255859375ms\r\n",,terminal_output +10369,12687535,"TERMINAL",0,0,"Step 192, loss: 3.8714563846588135, step time: 17.68350601196289ms\r\n",,terminal_output +10370,12687600,"TERMINAL",0,0,"Step 193, loss: 3.885575771331787, step time: 17.277240753173828ms\r\n",,terminal_output +10371,12687663,"TERMINAL",0,0,"Step 194, loss: 3.9196033477783203, step time: 17.264604568481445ms\r\n",,terminal_output +10372,12687727,"TERMINAL",0,0,"Step 195, loss: 3.8708760738372803, step time: 17.652034759521484ms\r\n",,terminal_output +10373,12687837,"TERMINAL",0,0,"Step 196, loss: 3.960435390472412, step time: 17.472028732299805ms\r\nStep 197, loss: 3.8674209117889404, step time: 17.43316650390625ms\r\n",,terminal_output +10374,12687898,"TERMINAL",0,0,"Step 198, loss: 3.849144697189331, step time: 17.924070358276367ms\r\n",,terminal_output +10375,12687962,"TERMINAL",0,0,"Step 199, loss: 3.822087287902832, step time: 17.230510711669922ms\r\n",,terminal_output +10376,12688025,"TERMINAL",0,0,"Step 200, loss: 4.039886951446533, step time: 17.41933822631836ms\r\n",,terminal_output +10377,12688089,"TERMINAL",0,0,"Step 201, loss: 3.8115732669830322, step time: 17.78578758239746ms\r\n",,terminal_output +10378,12688155,"TERMINAL",0,0,"Step 202, loss: 3.8493802547454834, step time: 17.52638816833496ms\r\n",,terminal_output +10379,12688229,"TERMINAL",0,0,"Step 203, loss: 3.8309929370880127, step time: 17.53520965576172ms\r\n",,terminal_output +10380,12688283,"TERMINAL",0,0,"Step 204, loss: 3.793884754180908, step time: 17.810344696044922ms\r\n",,terminal_output +10381,12688342,"TERMINAL",0,0,"Step 205, loss: 3.814124822616577, step time: 17.528772354125977ms\r\n",,terminal_output +10382,12688406,"TERMINAL",0,0,"Step 206, loss: 3.813607931137085, step time: 17.592668533325195ms\r\n",,terminal_output +10383,12688469,"TERMINAL",0,0,"Step 207, loss: 3.785747766494751, step time: 17.733097076416016ms\r\n",,terminal_output +10384,12688533,"TERMINAL",0,0,"Step 208, loss: 3.8460285663604736, step time: 17.493247985839844ms\r\n",,terminal_output +10385,12688637,"TERMINAL",0,0,"Step 209, loss: 3.8351237773895264, step time: 17.43936538696289ms\r\n",,terminal_output +10386,12688648,"TERMINAL",0,0,"Step 210, loss: 3.868271589279175, step time: 17.83132553100586ms\r\n",,terminal_output +10387,12688748,"TERMINAL",0,0,"Step 211, loss: 3.9975802898406982, step time: 17.43626594543457ms\r\n",,terminal_output +10388,12688847,"TERMINAL",0,0,"Step 212, loss: 3.7707247734069824, step time: 17.265796661376953ms\r\nStep 213, loss: 3.7460272312164307, step time: 17.59195327758789ms\r\n",,terminal_output +10389,12688938,"TERMINAL",0,0,"Step 214, loss: 3.7380292415618896, step time: 17.33708381652832ms\r\n",,terminal_output +10390,12689049,"TERMINAL",0,0,"Step 215, loss: 3.8150203227996826, step time: 17.722368240356445ms\r\nStep 216, loss: 3.751368761062622, step time: 17.866849899291992ms\r\n",,terminal_output +10391,12689109,"TERMINAL",0,0,"Step 217, loss: 3.780510902404785, step time: 20.203828811645508ms\r\n",,terminal_output +10392,12689222,"TERMINAL",0,0,"Step 218, loss: 3.7310078144073486, step time: 17.930269241333008ms\r\nStep 219, loss: 3.7361488342285156, step time: 17.82369613647461ms\r\n",,terminal_output +10393,12689285,"TERMINAL",0,0,"Step 220, loss: 3.7441697120666504, step time: 17.741918563842773ms\r\n",,terminal_output +10394,12689349,"TERMINAL",0,0,"Step 221, loss: 3.740966320037842, step time: 17.592906951904297ms\r\n",,terminal_output +10395,12689464,"TERMINAL",0,0,"Step 222, loss: 3.7945196628570557, step time: 17.858266830444336ms\r\nStep 223, loss: 3.709740161895752, step time: 17.641305923461914ms\r\n",,terminal_output +10396,12689525,"TERMINAL",0,0,"Step 224, loss: 3.740666151046753, step time: 17.604589462280273ms\r\n",,terminal_output +10397,12689625,"TERMINAL",0,0,"Step 225, loss: 3.748166084289551, step time: 17.945051193237305ms\r\n",,terminal_output +10398,12689674,"TERMINAL",0,0,"Step 226, loss: 3.6889235973358154, step time: 17.63439178466797ms\r\n",,terminal_output +10399,12689781,"TERMINAL",0,0,"Step 227, loss: 3.738034248352051, step time: 17.560720443725586ms\r\nStep 228, loss: 3.649634599685669, step time: 17.814159393310547ms\r\n",,terminal_output +10400,12689849,"TERMINAL",0,0,"Step 229, loss: 3.6830825805664062, step time: 18.867969512939453ms\r\n",,terminal_output +10401,12689902,"TERMINAL",0,0,"Step 230, loss: 3.7142670154571533, step time: 18.0361270904541ms\r\n",,terminal_output +10402,12689998,"TERMINAL",0,0,"Step 231, loss: 3.843500852584839, step time: 18.05400848388672ms\r\n",,terminal_output +10403,12690060,"TERMINAL",0,0,"Step 232, loss: 3.6520869731903076, step time: 17.55690574645996ms\r\n",,terminal_output +10404,12690168,"TERMINAL",0,0,"Step 233, loss: 3.708725929260254, step time: 17.620325088500977ms\r\nStep 234, loss: 3.6473302841186523, step time: 17.851591110229492ms\r\n",,terminal_output +10405,12690220,"TERMINAL",0,0,"Step 235, loss: 3.640800714492798, step time: 17.447233200073242ms\r\n",,terminal_output +10406,12690283,"TERMINAL",0,0,"Step 236, loss: 3.630981206893921, step time: 17.392396926879883ms\r\n",,terminal_output +10407,12690347,"TERMINAL",0,0,"Step 237, loss: 3.7818570137023926, step time: 17.916440963745117ms\r\n",,terminal_output +10408,12690444,"TERMINAL",0,0,"Step 238, loss: 3.696143388748169, step time: 17.53687858581543ms\r\n",,terminal_output +10409,12690551,"TERMINAL",0,0,"Step 239, loss: 3.638120651245117, step time: 17.745494842529297ms\r\nStep 240, loss: 3.662997245788574, step time: 17.90904998779297ms\r\n",,terminal_output +10410,12690616,"TERMINAL",0,0,"Step 241, loss: 3.6412220001220703, step time: 17.96889305114746ms\r\n",,terminal_output +10411,12690680,"TERMINAL",0,0,"Step 242, loss: 3.63079571723938, step time: 17.383337020874023ms\r\n",,terminal_output +10412,12690744,"TERMINAL",0,0,"Step 243, loss: 3.6356303691864014, step time: 18.243074417114258ms\r\n",,terminal_output +10413,12690810,"TERMINAL",0,0,"Step 244, loss: 3.604811191558838, step time: 17.38882064819336ms\r\n",,terminal_output +10414,12690876,"TERMINAL",0,0,"Step 245, loss: 3.6062676906585693, step time: 17.68779754638672ms\r\n",,terminal_output +10415,12690938,"TERMINAL",0,0,"Step 246, loss: 3.6918156147003174, step time: 17.618656158447266ms\r\n",,terminal_output +10416,12691001,"TERMINAL",0,0,"Step 247, loss: 3.603846311569214, step time: 17.4410343170166ms\r\n",,terminal_output +10417,12691065,"TERMINAL",0,0,"Step 248, loss: 3.6470420360565186, step time: 17.49420166015625ms\r\n",,terminal_output +10418,12691193,"TERMINAL",0,0,"Step 249, loss: 3.6104252338409424, step time: 17.719268798828125ms\r\nStep 250, loss: 3.6427814960479736, step time: 28.545379638671875ms\r\n",,terminal_output +10419,12691300,"TERMINAL",0,0,"Step 251, loss: 3.6583311557769775, step time: 21.193981170654297ms\r\nStep 252, loss: 3.5840086936950684, step time: 17.744064331054688ms\r\n",,terminal_output +10420,12691417,"TERMINAL",0,0,"Step 253, loss: 3.5620079040527344, step time: 17.391681671142578ms\r\nStep 254, loss: 3.5948727130889893, step time: 17.427444458007812ms\r\n",,terminal_output +10421,12691514,"TERMINAL",0,0,"Step 255, loss: 3.6091463565826416, step time: 17.63606071472168ms\r\n",,terminal_output +10422,12691569,"TERMINAL",0,0,"Step 256, loss: 3.631713390350342, step time: 17.567157745361328ms\r\n",,terminal_output +10423,12691679,"TERMINAL",0,0,"Step 257, loss: 3.6440391540527344, step time: 17.537593841552734ms\r\nStep 258, loss: 3.5522260665893555, step time: 17.566919326782227ms\r\n",,terminal_output +10424,12691774,"TERMINAL",0,0,"Step 259, loss: 3.556635856628418, step time: 17.3490047454834ms\r\n",,terminal_output +10425,12691882,"TERMINAL",0,0,"Step 260, loss: 3.540414333343506, step time: 17.525911331176758ms\r\nStep 261, loss: 3.7328264713287354, step time: 17.96865463256836ms\r\n",,terminal_output +10426,12691947,"TERMINAL",0,0,"Step 262, loss: 3.789276599884033, step time: 17.245769500732422ms\r\n",,terminal_output +10427,12692045,"TERMINAL",0,0,"Step 263, loss: 3.524683713912964, step time: 17.410993576049805ms\r\n",,terminal_output +10428,12692109,"TERMINAL",0,0,"Step 264, loss: 3.557978630065918, step time: 17.5018310546875ms\r\nStep 265, loss: 3.5499956607818604, step time: 17.216920852661133ms\r\n",,terminal_output +10429,12692175,"TERMINAL",0,0,"Step 266, loss: 3.585178852081299, step time: 17.245054244995117ms\r\n",,terminal_output +10430,12692298,"TERMINAL",0,0,"Step 267, loss: 3.7596523761749268, step time: 17.46964454650879ms\r\nStep 268, loss: 3.528548002243042, step time: 17.192602157592773ms\r\n",,terminal_output +10431,12692361,"TERMINAL",0,0,"Step 269, loss: 3.4934237003326416, step time: 17.32158660888672ms\r\n",,terminal_output +10432,12692426,"TERMINAL",0,0,"Step 270, loss: 3.526414632797241, step time: 17.819881439208984ms\r\n",,terminal_output +10433,12692493,"TERMINAL",0,0,"Step 271, loss: 3.507356882095337, step time: 17.449140548706055ms\r\n",,terminal_output +10434,12692566,"TERMINAL",0,0,"Step 272, loss: 3.500800848007202, step time: 17.459630966186523ms\r\n",,terminal_output +10435,12692633,"TERMINAL",0,0,"Step 273, loss: 3.523533821105957, step time: 17.63916015625ms\r\n",,terminal_output +10436,12692695,"TERMINAL",0,0,"Step 274, loss: 3.5207676887512207, step time: 17.516374588012695ms\r\n",,terminal_output +10437,12692760,"TERMINAL",0,0,"Step 275, loss: 3.5117852687835693, step time: 17.484664916992188ms\r\n",,terminal_output +10438,12692868,"TERMINAL",0,0,"Step 276, loss: 3.6310508251190186, step time: 17.780780792236328ms\r\nStep 277, loss: 3.6603734493255615, step time: 17.48943328857422ms\r\n",,terminal_output +10439,12692938,"TERMINAL",0,0,"Step 278, loss: 3.5044455528259277, step time: 17.39954948425293ms\r\n",,terminal_output +10440,12693009,"TERMINAL",0,0,"Step 279, loss: 3.585279941558838, step time: 17.848491668701172ms\r\n",,terminal_output +10441,12693073,"TERMINAL",0,0,"Step 280, loss: 3.5408058166503906, step time: 17.528057098388672ms\r\n",,terminal_output +10442,12693141,"TERMINAL",0,0,"Step 281, loss: 3.504241943359375, step time: 17.492055892944336ms\r\n",,terminal_output +10443,12693215,"TERMINAL",0,0,"Step 282, loss: 3.571410894393921, step time: 17.78721809387207ms\r\n",,terminal_output +10444,12693281,"TERMINAL",0,0,"Step 283, loss: 3.4977543354034424, step time: 17.452716827392578ms\r\n",,terminal_output +10445,12693333,"TERMINAL",0,0,"Step 284, loss: 3.8899805545806885, step time: 17.5626277923584ms\r\n",,terminal_output +10446,12693389,"TERMINAL",0,0,"Step 285, loss: 3.465823173522949, step time: 18.90087127685547ms\r\n",,terminal_output +10447,12693497,"TERMINAL",0,0,"Step 286, loss: 3.5262086391448975, step time: 20.02882957458496ms\r\nStep 287, loss: 3.4822278022766113, step time: 19.207477569580078ms\r\n",,terminal_output +10448,12693627,"TERMINAL",0,0,"Step 288, loss: 3.5129430294036865, step time: 17.798662185668945ms\r\nStep 289, loss: 3.5560238361358643, step time: 17.608165740966797ms\r\n",,terminal_output +10449,12693681,"TERMINAL",0,0,"Step 290, loss: 3.461211681365967, step time: 17.560482025146484ms\r\n",,terminal_output +10450,12693745,"TERMINAL",0,0,"Step 291, loss: 3.453012228012085, step time: 17.88473129272461ms\r\n",,terminal_output +10451,12693860,"TERMINAL",0,0,"Step 292, loss: 3.5092008113861084, step time: 17.54903793334961ms\r\n",,terminal_output +10452,12693873,"TERMINAL",0,0,"Step 293, loss: 3.5527114868164062, step time: 17.490625381469727ms\r\n",,terminal_output +10453,12693975,"TERMINAL",0,0,"Step 294, loss: 3.493783950805664, step time: 17.72332191467285ms\r\n",,terminal_output +10454,12694036,"TERMINAL",0,0,"Step 295, loss: 3.45388126373291, step time: 17.64965057373047ms\r\n",,terminal_output +10455,12694089,"TERMINAL",0,0,"Step 296, loss: 3.575984001159668, step time: 17.442703247070312ms\r\n",,terminal_output +10456,12694196,"TERMINAL",0,0,"Step 297, loss: 3.448336124420166, step time: 17.69089698791504ms\r\nStep 298, loss: 3.442906618118286, step time: 17.456531524658203ms\r\n",,terminal_output +10457,12694259,"TERMINAL",0,0,"Step 299, loss: 3.425269365310669, step time: 17.534732818603516ms\r\n",,terminal_output +10458,12694323,"TERMINAL",0,0,"Step 300, loss: 3.4301772117614746, step time: 17.66800880432129ms\r\n",,terminal_output +10459,12694399,"TERMINAL",0,0,"Step 301, loss: 3.4647281169891357, step time: 17.46082305908203ms\r\n",,terminal_output +10460,12694455,"TERMINAL",0,0,"Step 302, loss: 3.4070498943328857, step time: 17.377138137817383ms\r\n",,terminal_output +10461,12694521,"TERMINAL",0,0,"Step 303, loss: 3.4421029090881348, step time: 17.843246459960938ms\r\n",,terminal_output +10462,12694581,"TERMINAL",0,0,"Step 304, loss: 3.436889886856079, step time: 17.282485961914062ms\r\n",,terminal_output +10463,12694649,"TERMINAL",0,0,"Step 305, loss: 3.488879680633545, step time: 17.566919326782227ms\r\n",,terminal_output +10464,12694936,"TERMINAL",0,0,"Step 306, loss: 3.417158365249634, step time: 307.07454681396484ms\r\n",,terminal_output +10465,12695002,"TERMINAL",0,0,"Step 307, loss: 3.476766347885132, step time: 24.595260620117188ms\r\n",,terminal_output +10466,12695062,"TERMINAL",0,0,"Step 308, loss: 3.380316734313965, step time: 19.715309143066406ms\r\n",,terminal_output +10467,12695126,"TERMINAL",0,0,"Step 309, loss: 3.4652442932128906, step time: 18.407106399536133ms\r\n",,terminal_output +10468,12695188,"TERMINAL",0,0,"Step 310, loss: 3.3699848651885986, step time: 17.781496047973633ms\r\n",,terminal_output +10469,12695252,"TERMINAL",0,0,"Step 311, loss: 3.402975082397461, step time: 17.587900161743164ms\r\n",,terminal_output +10470,12695315,"TERMINAL",0,0,"Step 312, loss: 3.3677732944488525, step time: 17.75836944580078ms\r\n",,terminal_output +10471,12695385,"TERMINAL",0,0,"Step 313, loss: 3.494318962097168, step time: 17.625093460083008ms\r\n",,terminal_output +10472,12695442,"TERMINAL",0,0,"Step 314, loss: 3.59484601020813, step time: 17.386198043823242ms\r\n",,terminal_output +10473,12695504,"TERMINAL",0,0,"Step 315, loss: 3.4408938884735107, step time: 17.917871475219727ms\r\n",,terminal_output +10474,12695569,"TERMINAL",0,0,"Step 316, loss: 3.4199111461639404, step time: 17.462968826293945ms\r\n",,terminal_output +10475,12695636,"TERMINAL",0,0,"Step 317, loss: 3.3863818645477295, step time: 17.415523529052734ms\r\n",,terminal_output +10476,12695698,"TERMINAL",0,0,"Step 318, loss: 3.4541244506835938, step time: 17.75360107421875ms\r\n",,terminal_output +10477,12695817,"TERMINAL",0,0,"Step 319, loss: 3.376756191253662, step time: 17.323017120361328ms\r\nStep 320, loss: 3.648071527481079, step time: 17.34137535095215ms\r\n",,terminal_output +10478,12695908,"TERMINAL",0,0,"Step 321, loss: 3.4158504009246826, step time: 17.85564422607422ms\r\n",,terminal_output +10479,12695961,"TERMINAL",0,0,"Step 322, loss: 3.373858690261841, step time: 17.3797607421875ms\r\n",,terminal_output +10480,12696066,"TERMINAL",0,0,"Step 323, loss: 3.35699462890625, step time: 17.501354217529297ms\r\nStep 324, loss: 3.3604094982147217, step time: 17.715930938720703ms\r\n",,terminal_output +10481,12696161,"TERMINAL",0,0,"Step 325, loss: 3.359633445739746, step time: 17.50326156616211ms\r\n",,terminal_output +10482,12696214,"TERMINAL",0,0,"Step 326, loss: 3.7289366722106934, step time: 17.427682876586914ms\r\n",,terminal_output +10483,12696321,"TERMINAL",0,0,"Step 327, loss: 3.346776247024536, step time: 17.823457717895508ms\r\nStep 328, loss: 3.327735185623169, step time: 17.301321029663086ms\r\n",,terminal_output +10484,12696414,"TERMINAL",0,0,"Step 329, loss: 3.4313440322875977, step time: 17.37070083618164ms\r\n",,terminal_output +10485,12696468,"TERMINAL",0,0,"Step 330, loss: 3.5393688678741455, step time: 17.84038543701172ms\r\n",,terminal_output +10486,12696578,"TERMINAL",0,0,"Step 331, loss: 3.3344600200653076, step time: 17.34447479248047ms\r\nStep 332, loss: 3.351377010345459, step time: 17.429590225219727ms\r\n",,terminal_output +10487,12696682,"TERMINAL",0,0,"Step 333, loss: 3.3551957607269287, step time: 17.813444137573242ms\r\n",,terminal_output +10488,12696755,"TERMINAL",0,0,"Step 334, loss: 3.35365629196167, step time: 17.59481430053711ms\r\nStep 335, loss: 3.376988172531128, step time: 18.377304077148438ms\r\n",,terminal_output +10489,12696826,"TERMINAL",0,0,"Step 336, loss: 3.3206470012664795, step time: 18.95904541015625ms\r\n",,terminal_output +10490,12696882,"TERMINAL",0,0,"Step 337, loss: 3.467625856399536, step time: 17.925500869750977ms\r\n",,terminal_output +10491,12696943,"TERMINAL",0,0,"Step 338, loss: 3.324364423751831, step time: 17.515897750854492ms\r\n",,terminal_output +10492,12697007,"TERMINAL",0,0,"Step 339, loss: 3.356827735900879, step time: 17.820119857788086ms\r\n",,terminal_output +10493,12697069,"TERMINAL",0,0,"Step 340, loss: 3.447638511657715, step time: 17.403841018676758ms\r\n",,terminal_output +10494,12697164,"TERMINAL",0,0,"Step 341, loss: 3.351808547973633, step time: 17.2879695892334ms\r\n",,terminal_output +10495,12697217,"TERMINAL",0,0,"Step 342, loss: 3.317807197570801, step time: 17.551898956298828ms\r\n",,terminal_output +10496,12697329,"TERMINAL",0,0,"Step 343, loss: 3.33280348777771, step time: 17.240524291992188ms\r\nStep 344, loss: 3.319537878036499, step time: 17.115354537963867ms\r\n",,terminal_output +10497,12697385,"TERMINAL",0,0,"Step 345, loss: 3.372982978820801, step time: 17.532825469970703ms\r\n",,terminal_output +10498,12697451,"TERMINAL",0,0,"Step 346, loss: 3.4646594524383545, step time: 17.05622673034668ms\r\n",,terminal_output +10499,12697512,"TERMINAL",0,0,"Step 347, loss: 3.2904279232025146, step time: 17.125368118286133ms\r\n",,terminal_output +10500,12697575,"TERMINAL",0,0,"Step 348, loss: 3.3559255599975586, step time: 17.492294311523438ms\r\n",,terminal_output +10501,12697640,"TERMINAL",0,0,"Step 349, loss: 3.3363630771636963, step time: 19.18339729309082ms\r\n",,terminal_output +10502,12697761,"TERMINAL",0,0,"Step 350, loss: 3.314103364944458, step time: 17.34185218811035ms\r\nStep 351, loss: 3.302969217300415, step time: 17.585277557373047ms\r\n",,terminal_output +10503,12697821,"TERMINAL",0,0,"Step 352, loss: 3.300781488418579, step time: 17.117977142333984ms\r\n",,terminal_output +10504,12697879,"TERMINAL",0,0,"Step 353, loss: 3.330855369567871, step time: 17.17853546142578ms\r\n",,terminal_output +10505,12697981,"TERMINAL",0,0,"Step 354, loss: 3.2811827659606934, step time: 17.541170120239258ms\r\n",,terminal_output +10506,12698033,"TERMINAL",0,0,"Step 355, loss: 3.463672161102295, step time: 17.27747917175293ms\r\n",,terminal_output +10507,12698136,"TERMINAL",0,0,"Step 356, loss: 3.3773350715637207, step time: 17.23027229309082ms\r\nStep 357, loss: 3.281174659729004, step time: 17.750978469848633ms\r\n",,terminal_output +10508,12698229,"TERMINAL",0,0,"Step 358, loss: 3.385127067565918, step time: 17.223834991455078ms\r\n",,terminal_output +10509,12698282,"TERMINAL",0,0,"Step 359, loss: 3.321413516998291, step time: 17.235755920410156ms\r\n",,terminal_output +10510,12698346,"TERMINAL",0,0,"Step 360, loss: 3.3719842433929443, step time: 17.544269561767578ms\r\n",,terminal_output +10511,12698412,"TERMINAL",0,0,"Step 361, loss: 3.2804956436157227, step time: 17.26531982421875ms\r\n",,terminal_output +10512,12698481,"TERMINAL",0,0,"Step 362, loss: 3.269374370574951, step time: 17.338991165161133ms\r\n",,terminal_output +10513,12698540,"TERMINAL",0,0,"Step 363, loss: 3.2841694355010986, step time: 17.69399642944336ms\r\n",,terminal_output +10514,12698717,"TERMINAL",0,0,"Step 364, loss: 3.261289596557617, step time: 17.086505889892578ms\r\nStep 365, loss: 3.2614288330078125, step time: 17.550230026245117ms\r\nStep 366, loss: 3.2904279232025146, step time: 17.600059509277344ms\r\n",,terminal_output +10515,12698760,"TERMINAL",0,0,"Step 367, loss: 3.3045432567596436, step time: 17.23504066467285ms\r\n",,terminal_output +10516,12698824,"TERMINAL",0,0,"Step 368, loss: 3.2521555423736572, step time: 17.128705978393555ms\r\n",,terminal_output +10517,12698883,"TERMINAL",0,0,"Step 369, loss: 3.2590255737304688, step time: 18.929719924926758ms\r\n",,terminal_output +10518,12698945,"TERMINAL",0,0,"Step 370, loss: 3.6581976413726807, step time: 17.205238342285156ms\r\n",,terminal_output +10519,12699072,"TERMINAL",0,0,"Step 371, loss: 3.238367795944214, step time: 17.27771759033203ms\r\nStep 372, loss: 3.3340795040130615, step time: 17.515182495117188ms\r\n",,terminal_output +10520,12699135,"TERMINAL",0,0,"Step 373, loss: 3.370140314102173, step time: 17.35377311706543ms\r\n",,terminal_output +10521,12699201,"TERMINAL",0,0,"Step 374, loss: 3.548503875732422, step time: 17.05765724182129ms\r\n",,terminal_output +10522,12699266,"TERMINAL",0,0,"Step 375, loss: 3.5907113552093506, step time: 17.625093460083008ms\r\n",,terminal_output +10523,12699329,"TERMINAL",0,0,"Step 376, loss: 3.371299982070923, step time: 17.12799072265625ms\r\n",,terminal_output +10524,12699401,"TERMINAL",0,0,"Step 377, loss: 3.3053877353668213, step time: 17.368316650390625ms\r\n",,terminal_output +10525,12699466,"TERMINAL",0,0,"Step 378, loss: 3.2863657474517822, step time: 17.437219619750977ms\r\n",,terminal_output +10526,12699574,"TERMINAL",0,0,"Step 379, loss: 3.2739694118499756, step time: 17.294883728027344ms\r\nStep 380, loss: 3.269118547439575, step time: 17.373323440551758ms\r\n",,terminal_output +10527,12699636,"TERMINAL",0,0,"Step 381, loss: 3.3984923362731934, step time: 17.81606674194336ms\r\n",,terminal_output +10528,12699705,"TERMINAL",0,0,"Step 382, loss: 3.266827344894409, step time: 17.401456832885742ms\r\n",,terminal_output +10529,12699765,"TERMINAL",0,0,"Step 383, loss: 3.4757440090179443, step time: 17.36736297607422ms\r\n",,terminal_output +10530,12699892,"TERMINAL",0,0,"Step 384, loss: 3.274470567703247, step time: 17.635345458984375ms\r\nStep 385, loss: 3.261284351348877, step time: 18.645048141479492ms\r\n",,terminal_output +10531,12699957,"TERMINAL",0,0,"Step 386, loss: 3.2567765712738037, step time: 17.664670944213867ms\r\n",,terminal_output +10532,12700019,"TERMINAL",0,0,"Step 387, loss: 3.437080144882202, step time: 17.836332321166992ms\r\n",,terminal_output +10533,12700081,"TERMINAL",0,0,"Step 388, loss: 3.2529003620147705, step time: 17.41623878479004ms\r\n",,terminal_output +10534,12700142,"TERMINAL",0,0,"Step 389, loss: 3.2887187004089355, step time: 18.970489501953125ms\r\n",,terminal_output +10535,12700209,"TERMINAL",0,0,"Step 390, loss: 3.3027963638305664, step time: 17.774581909179688ms\r\n",,terminal_output +10536,12700271,"TERMINAL",0,0,"Step 391, loss: 3.213216543197632, step time: 17.394542694091797ms\r\n",,terminal_output +10537,12700368,"TERMINAL",0,0,"Step 392, loss: 3.2383158206939697, step time: 17.36736297607422ms\r\n",,terminal_output +10538,12700433,"TERMINAL",0,0,"Step 393, loss: 3.2399990558624268, step time: 17.801761627197266ms\r\n",,terminal_output +10539,12700483,"TERMINAL",0,0,"Step 394, loss: 3.223008394241333, step time: 17.272233963012695ms\r\n",,terminal_output +10540,12700589,"TERMINAL",0,0,"Step 395, loss: 3.345351219177246, step time: 17.464637756347656ms\r\nStep 396, loss: 3.2235522270202637, step time: 17.73834228515625ms\r\n",,terminal_output +10541,12700713,"TERMINAL",0,0,"Step 397, loss: 3.2235922813415527, step time: 17.431259155273438ms\r\nStep 398, loss: 3.2154014110565186, step time: 17.449378967285156ms\r\n",,terminal_output +10542,12700771,"TERMINAL",0,0,"Step 399, loss: 3.203162670135498, step time: 17.825841903686523ms\r\n",,terminal_output +10543,12700898,"TERMINAL",0,0,"Step 400, loss: 3.196885347366333, step time: 17.560482025146484ms\r\nStep 401, loss: 3.184805154800415, step time: 17.405986785888672ms\r\n",,terminal_output +10544,12700993,"TERMINAL",0,0,"Step 402, loss: 3.207977294921875, step time: 17.625093460083008ms\r\n",,terminal_output +10545,12701167,"TERMINAL",0,0,"Step 403, loss: 3.2365708351135254, step time: 17.398595809936523ms\r\n",,terminal_output +10546,12701231,"TERMINAL",0,0,"Step 404, loss: 3.192842483520508, step time: 17.603158950805664ms\r\nStep 405, loss: 3.2761168479919434, step time: 18.00704002380371ms\r\nStep 406, loss: 3.3134753704071045, step time: 17.293930053710938ms\r\n",,terminal_output +10547,12701293,"TERMINAL",0,0,"Step 407, loss: 3.2059309482574463, step time: 17.37070083618164ms\r\n",,terminal_output +10548,12701356,"TERMINAL",0,0,"Step 408, loss: 3.2162585258483887, step time: 17.615079879760742ms\r\n",,terminal_output +10549,12701451,"TERMINAL",0,0,"Step 409, loss: 3.21170973777771, step time: 17.514705657958984ms\r\n",,terminal_output +10550,12701561,"TERMINAL",0,0,"Step 410, loss: 3.1852519512176514, step time: 17.337322235107422ms\r\nStep 411, loss: 3.441063165664673, step time: 19.133329391479492ms\r\n",,terminal_output +10551,12701673,"TERMINAL",0,0,"Step 412, loss: 3.1831846237182617, step time: 17.652273178100586ms\r\nStep 413, loss: 3.575326919555664, step time: 17.676830291748047ms\r\n",,terminal_output +10552,12701736,"TERMINAL",0,0,"Step 414, loss: 3.1765224933624268, step time: 17.790555953979492ms\r\n",,terminal_output +10553,12701861,"TERMINAL",0,0,"Step 415, loss: 3.1758275032043457, step time: 17.59791374206543ms\r\nStep 416, loss: 3.194380044937134, step time: 17.39215850830078ms\r\n",,terminal_output +10554,12701925,"TERMINAL",0,0,"Step 417, loss: 3.3547701835632324, step time: 17.891645431518555ms\r\n",,terminal_output +10555,12701987,"TERMINAL",0,0,"Step 418, loss: 3.2962472438812256, step time: 17.499446868896484ms\r\n",,terminal_output +10556,12702054,"TERMINAL",0,0,"Step 419, loss: 3.17195200920105, step time: 17.4863338470459ms\r\n",,terminal_output +10557,12702116,"TERMINAL",0,0,"Step 420, loss: 3.1777472496032715, step time: 17.749547958374023ms\r\n",,terminal_output +10558,12702178,"TERMINAL",0,0,"Step 421, loss: 3.2368662357330322, step time: 17.447948455810547ms\r\n",,terminal_output +10559,12702240,"TERMINAL",0,0,"Step 422, loss: 3.241424798965454, step time: 17.39048957824707ms\r\n",,terminal_output +10560,12702360,"TERMINAL",0,0,"Step 423, loss: 3.2881152629852295, step time: 17.768383026123047ms\r\nStep 424, loss: 3.2271666526794434, step time: 17.354249954223633ms\r\n",,terminal_output +10561,12702423,"TERMINAL",0,0,"Step 425, loss: 3.1852681636810303, step time: 17.25149154663086ms\r\n",,terminal_output +10562,12702485,"TERMINAL",0,0,"Step 426, loss: 3.274242877960205, step time: 17.464876174926758ms\r\n",,terminal_output +10563,12702549,"TERMINAL",0,0,"Step 427, loss: 3.2018349170684814, step time: 19.67644691467285ms\r\n",,terminal_output +10564,12702613,"TERMINAL",0,0,"Step 428, loss: 3.1768598556518555, step time: 17.270803451538086ms\r\n",,terminal_output +10565,12702677,"TERMINAL",0,0,"Step 429, loss: 3.1635098457336426, step time: 17.745494842529297ms\r\n",,terminal_output +10566,12702740,"TERMINAL",0,0,"Step 430, loss: 3.143899917602539, step time: 17.17662811279297ms\r\n",,terminal_output +10567,12702804,"TERMINAL",0,0,"Step 431, loss: 3.152442455291748, step time: 17.33088493347168ms\r\n",,terminal_output +10568,12702858,"TERMINAL",0,0,"Step 432, loss: 3.1508991718292236, step time: 17.75527000427246ms\r\n",,terminal_output +10569,12702955,"TERMINAL",0,0,"Step 433, loss: 3.191544771194458, step time: 17.379283905029297ms\r\n",,terminal_output +10570,12703017,"TERMINAL",0,0,"Step 434, loss: 3.1360089778900146, step time: 17.184972763061523ms\r\n",,terminal_output +10571,12703083,"TERMINAL",0,0,"Step 435, loss: 3.144620418548584, step time: 17.836332321166992ms\r\n",,terminal_output +10572,12703144,"TERMINAL",0,0,"Step 436, loss: 3.264134407043457, step time: 17.35973358154297ms\r\n",,terminal_output +10573,12703209,"TERMINAL",0,0,"Step 437, loss: 3.1487557888031006, step time: 17.422199249267578ms\r\n",,terminal_output +10574,12703261,"TERMINAL",0,0,"Step 438, loss: 3.148315906524658, step time: 17.732858657836914ms\r\n",,terminal_output +10575,12703313,"TERMINAL",0,0,"Step 439, loss: 3.146226644515991, step time: 17.397165298461914ms\r\n",,terminal_output +10576,12703417,"TERMINAL",0,0,"Step 440, loss: 3.5801894664764404, step time: 17.16446876525879ms\r\nStep 441, loss: 3.1228220462799072, step time: 17.66061782836914ms\r\n",,terminal_output +10577,12703479,"TERMINAL",0,0,"Step 442, loss: 3.138826608657837, step time: 17.076492309570312ms\r\n",,terminal_output +10578,12703540,"TERMINAL",0,0,"Step 443, loss: 3.406341552734375, step time: 17.2426700592041ms\r\n",,terminal_output +10579,12703656,"TERMINAL",0,0,"Step 444, loss: 3.1593525409698486, step time: 17.544984817504883ms\r\n",,terminal_output +10580,12703665,"TERMINAL",0,0,"Step 445, loss: 3.1217148303985596, step time: 17.253637313842773ms\r\n",,terminal_output +10581,12703763,"TERMINAL",0,0,"Step 446, loss: 3.140899419784546, step time: 17.115116119384766ms\r\n",,terminal_output +10582,12703869,"TERMINAL",0,0,"Step 447, loss: 3.106581449508667, step time: 17.668724060058594ms\r\nStep 448, loss: 3.127079486846924, step time: 17.171144485473633ms\r\n",,terminal_output +10583,12703933,"TERMINAL",0,0,"Step 449, loss: 3.1036717891693115, step time: 17.183780670166016ms\r\n",,terminal_output +10584,12703991,"TERMINAL",0,0,"Step 450, loss: 3.0896310806274414, step time: 17.507553100585938ms\r\n",,terminal_output +10585,12704052,"TERMINAL",0,0,"Step 451, loss: 3.1933908462524414, step time: 17.24529266357422ms\r\n",,terminal_output +10586,12704112,"TERMINAL",0,0,"Step 452, loss: 3.084228038787842, step time: 17.209291458129883ms\r\n",,terminal_output +10587,12704177,"TERMINAL",0,0,"Step 453, loss: 3.112327814102173, step time: 17.536640167236328ms\r\n",,terminal_output +10588,12704241,"TERMINAL",0,0,"Step 454, loss: 3.1890931129455566, step time: 17.120361328125ms\r\n",,terminal_output +10589,12704303,"TERMINAL",0,0,"Step 455, loss: 3.1123199462890625, step time: 17.47298240661621ms\r\n",,terminal_output +10590,12704367,"TERMINAL",0,0,"Step 456, loss: 3.161430597305298, step time: 17.825603485107422ms\r\n",,terminal_output +10591,12704428,"TERMINAL",0,0,"Step 457, loss: 3.2054269313812256, step time: 17.36617088317871ms\r\n",,terminal_output +10592,12704492,"TERMINAL",0,0,"Step 458, loss: 3.1149442195892334, step time: 17.24100112915039ms\r\n",,terminal_output +10593,12704885,"TERMINAL",0,0,"Step 459, loss: 3.085963010787964, step time: 322.47352600097656ms\r\nStep 460, loss: 3.0715439319610596, step time: 26.358366012573242ms\r\n",,terminal_output +10594,12704938,"TERMINAL",0,0,"Step 461, loss: 3.134066581726074, step time: 19.651412963867188ms\r\n",,terminal_output +10595,12705002,"TERMINAL",0,0,"Step 462, loss: 3.0900607109069824, step time: 18.87369155883789ms\r\n",,terminal_output +10596,12705063,"TERMINAL",0,0,"Step 463, loss: 3.0763535499572754, step time: 18.248558044433594ms\r\n",,terminal_output +10597,12705125,"TERMINAL",0,0,"Step 464, loss: 3.078829526901245, step time: 17.752885818481445ms\r\n",,terminal_output +10598,12705252,"TERMINAL",0,0,"Step 465, loss: 3.148851156234741, step time: 18.16391944885254ms\r\nStep 466, loss: 3.06630277633667, step time: 17.330646514892578ms\r\n",,terminal_output +10599,12705314,"TERMINAL",0,0,"Step 467, loss: 3.0730245113372803, step time: 17.456769943237305ms\r\n",,terminal_output +10600,12705380,"TERMINAL",0,0,"Step 468, loss: 3.10479474067688, step time: 17.91977882385254ms\r\n",,terminal_output +10601,12705439,"TERMINAL",0,0,"Step 469, loss: 3.112694025039673, step time: 17.529726028442383ms\r\n",,terminal_output +10602,12705566,"TERMINAL",0,0,"Step 470, loss: 3.0700843334198, step time: 17.534255981445312ms\r\nStep 471, loss: 3.0294315814971924, step time: 17.92311668395996ms\r\n",,terminal_output +10603,12705669,"TERMINAL",0,0,"Step 472, loss: 3.0892934799194336, step time: 17.40407943725586ms\r\n",,terminal_output +10604,12705721,"TERMINAL",0,0,"Step 473, loss: 3.0557026863098145, step time: 17.399311065673828ms\r\n",,terminal_output +10605,12705771,"TERMINAL",0,0,"Step 474, loss: 3.0422110557556152, step time: 17.835378646850586ms\r\n",,terminal_output +10606,12705880,"TERMINAL",0,0,"Step 475, loss: 3.2988600730895996, step time: 17.41766929626465ms\r\nStep 476, loss: 3.1154589653015137, step time: 17.333269119262695ms\r\n",,terminal_output +10607,12705974,"TERMINAL",0,0,"Step 477, loss: 3.0620100498199463, step time: 17.90642738342285ms\r\n",,terminal_output +10608,12706027,"TERMINAL",0,0,"Step 478, loss: 3.148902177810669, step time: 17.284631729125977ms\r\n",,terminal_output +10609,12706132,"TERMINAL",0,0,"Step 479, loss: 3.171234369277954, step time: 17.455577850341797ms\r\nStep 480, loss: 3.057253122329712, step time: 17.79007911682129ms\r\n",,terminal_output +10610,12706227,"TERMINAL",0,0,"Step 481, loss: 3.185427665710449, step time: 17.4410343170166ms\r\n",,terminal_output +10611,12706290,"TERMINAL",0,0,"Step 482, loss: 3.1109771728515625, step time: 17.256736755371094ms\r\n",,terminal_output +10612,12706353,"TERMINAL",0,0,"Step 483, loss: 3.0607943534851074, step time: 17.877578735351562ms\r\n",,terminal_output +10613,12706416,"TERMINAL",0,0,"Step 484, loss: 3.065007209777832, step time: 17.287492752075195ms\r\n",,terminal_output +10614,12706523,"TERMINAL",0,0,"Step 485, loss: 3.142970323562622, step time: 17.26508140563965ms\r\nStep 486, loss: 3.0923335552215576, step time: 17.670631408691406ms\r\n",,terminal_output +10615,12706584,"TERMINAL",0,0,"Step 487, loss: 3.052971839904785, step time: 17.213821411132812ms\r\n",,terminal_output +10616,12706644,"TERMINAL",0,0,"Step 488, loss: 3.0570497512817383, step time: 17.052650451660156ms\r\n",,terminal_output +10617,12706704,"TERMINAL",0,0,"Step 489, loss: 3.074643611907959, step time: 17.772436141967773ms\r\n",,terminal_output +10618,12706765,"TERMINAL",0,0,"Step 490, loss: 3.147033452987671, step time: 17.1205997467041ms\r\n",,terminal_output +10619,12706913,"TERMINAL",0,0,"Step 491, loss: 3.027735948562622, step time: 19.15740966796875ms\r\nStep 492, loss: 3.0289552211761475, step time: 18.253087997436523ms\r\n",,terminal_output +10620,12706963,"TERMINAL",0,0,"Step 493, loss: 3.0088562965393066, step time: 17.2727108001709ms\r\n",,terminal_output +10621,12707027,"TERMINAL",0,0,"Step 494, loss: 3.031829595565796, step time: 17.206430435180664ms\r\n",,terminal_output +10622,12707089,"TERMINAL",0,0,"Step 495, loss: 3.0336670875549316, step time: 17.697572708129883ms\r\n",,terminal_output +10623,12707191,"TERMINAL",0,0,"Step 496, loss: 3.0249574184417725, step time: 17.028093338012695ms\r\nStep 497, loss: 3.0716769695281982, step time: 17.152786254882812ms\r\n",,terminal_output +10624,12707254,"TERMINAL",0,0,"Step 498, loss: 3.0362627506256104, step time: 17.664194107055664ms\r\n",,terminal_output +10625,12707354,"TERMINAL",0,0,"Step 499, loss: 3.0346333980560303, step time: 17.135143280029297ms\r\n",,terminal_output +10626,12710582,"TERMINAL",0,0,"Step 500, loss: 2.9986801147460938, step time: 37.06550598144531ms\r\n",,terminal_output +10627,12710626,"TERMINAL",0,0,"Step 501, loss: 3.0283944606781006, step time: 26.72266960144043ms\r\n",,terminal_output +10628,12710764,"TERMINAL",0,0,"Step 502, loss: 3.098792552947998, step time: 21.46148681640625ms\r\n",,terminal_output +10629,12710818,"TERMINAL",0,0,"Step 503, loss: 3.2994282245635986, step time: 20.08652687072754ms\r\nStep 504, loss: 3.0133533477783203, step time: 19.420862197875977ms\r\n",,terminal_output +10630,12711099,"TERMINAL",0,0,"Step 505, loss: 3.0381526947021484, step time: 293.8039302825928ms\r\n",,terminal_output +10631,12711230,"TERMINAL",0,0,"Step 506, loss: 2.986041307449341, step time: 26.016712188720703ms\r\n",,terminal_output +10632,12711392,"TERMINAL",0,0,"Step 507, loss: 3.046104669570923, step time: 21.24953269958496ms\r\nStep 508, loss: 3.0407423973083496, step time: 19.90532875061035ms\r\n",,terminal_output +10633,12711498,"TERMINAL",0,0,"Step 509, loss: 3.135976552963257, step time: 19.45328712463379ms\r\nStep 510, loss: 3.0162713527679443, step time: 19.096851348876953ms\r\nStep 511, loss: 3.015451192855835, step time: 19.233226776123047ms\r\n",,terminal_output +10634,12711549,"TERMINAL",0,0,"Step 512, loss: 2.9766600131988525, step time: 18.868684768676758ms\r\n",,terminal_output +10635,12711695,"TERMINAL",0,0,"Step 513, loss: 2.973224639892578, step time: 18.625736236572266ms\r\nStep 514, loss: 2.991211414337158, step time: 19.21224594116211ms\r\n",,terminal_output +10636,12711747,"TERMINAL",0,0,"Step 515, loss: 3.2099525928497314, step time: 24.84869956970215ms\r\n",,terminal_output +10637,12711859,"TERMINAL",0,0,"Step 516, loss: 2.9793739318847656, step time: 25.61163902282715ms\r\nStep 517, loss: 3.3095152378082275, step time: 25.838136672973633ms\r\n",,terminal_output +10638,12711965,"TERMINAL",0,0,"Step 518, loss: 3.0397815704345703, step time: 26.443958282470703ms\r\n",,terminal_output +10639,12712022,"TERMINAL",0,0,"Step 519, loss: 2.9674017429351807, step time: 26.128053665161133ms\r\n",,terminal_output +10640,12712109,"TERMINAL",0,0,"Step 520, loss: 3.0145819187164307, step time: 27.222156524658203ms\r\n",,terminal_output +10641,12712168,"TERMINAL",0,0,"Step 521, loss: 2.9791221618652344, step time: 27.962684631347656ms\r\n",,terminal_output +10642,12712229,"TERMINAL",0,0,"Step 522, loss: 2.9877281188964844, step time: 26.855945587158203ms\r\n",,terminal_output +10643,12712293,"TERMINAL",0,0,"Step 523, loss: 2.9884607791900635, step time: 24.956941604614258ms\r\n",,terminal_output +10644,12712353,"TERMINAL",0,0,"Step 524, loss: 3.010178327560425, step time: 19.50860023498535ms\r\n",,terminal_output +10645,12712456,"TERMINAL",0,0,"Step 525, loss: 3.128098249435425, step time: 18.149852752685547ms\r\nStep 526, loss: 3.146497964859009, step time: 19.359827041625977ms\r\n",,terminal_output +10646,12712519,"TERMINAL",0,0,"Step 527, loss: 2.9844253063201904, step time: 18.689393997192383ms\r\n",,terminal_output +10647,12712580,"TERMINAL",0,0,"Step 528, loss: 2.969705104827881, step time: 17.505645751953125ms\r\n",,terminal_output +10648,12712643,"TERMINAL",0,0,"Step 529, loss: 2.992933750152588, step time: 17.81630516052246ms\r\n",,terminal_output +10649,12712770,"TERMINAL",0,0,"Step 530, loss: 2.9870221614837646, step time: 17.5473690032959ms\r\nStep 531, loss: 2.98295521736145, step time: 17.640352249145508ms\r\n",,terminal_output +10650,12712835,"TERMINAL",0,0,"Step 532, loss: 3.01664137840271, step time: 19.06275749206543ms\r\n",,terminal_output +10651,12712895,"TERMINAL",0,0,"Step 533, loss: 2.9641873836517334, step time: 17.690181732177734ms\r\n",,terminal_output +10652,12712959,"TERMINAL",0,0,"Step 534, loss: 2.939521551132202, step time: 17.5173282623291ms\r\n",,terminal_output +10653,12713030,"TERMINAL",0,0,"Step 535, loss: 3.0878825187683105, step time: 17.64059066772461ms\r\n",,terminal_output +10654,12713085,"TERMINAL",0,0,"Step 536, loss: 2.9492828845977783, step time: 17.2119140625ms\r\n",,terminal_output +10655,12713147,"TERMINAL",0,0,"Step 537, loss: 2.957699775695801, step time: 17.30489730834961ms\r\n",,terminal_output +10656,12713211,"TERMINAL",0,0,"Step 538, loss: 2.9557807445526123, step time: 17.30060577392578ms\r\n",,terminal_output +10657,12713274,"TERMINAL",0,0,"Step 539, loss: 2.9504497051239014, step time: 17.4863338470459ms\r\n",,terminal_output +10658,12713401,"TERMINAL",0,0,"Step 540, loss: 2.9402496814727783, step time: 17.346858978271484ms\r\nStep 541, loss: 2.9675867557525635, step time: 17.745256423950195ms\r\n",,terminal_output +10659,12713468,"TERMINAL",0,0,"Step 542, loss: 2.9379031658172607, step time: 17.373323440551758ms\r\n",,terminal_output +10660,12713534,"TERMINAL",0,0,"Step 543, loss: 2.9613964557647705, step time: 17.117977142333984ms\r\n",,terminal_output +10661,12713644,"TERMINAL",0,0,"Step 544, loss: 2.936643362045288, step time: 17.30203628540039ms\r\n",,terminal_output +10662,12713648,"TERMINAL",0,0,"Step 545, loss: 2.9290406703948975, step time: 17.148256301879883ms\r\n",,terminal_output +10663,12713726,"TERMINAL",0,0,"Step 546, loss: 2.9321188926696777, step time: 17.191648483276367ms\r\n",,terminal_output +10664,12713834,"TERMINAL",0,0,"Step 547, loss: 3.138364315032959, step time: 17.4407958984375ms\r\nStep 548, loss: 2.930605411529541, step time: 16.9222354888916ms\r\n",,terminal_output +10665,12713898,"TERMINAL",0,0,"Step 549, loss: 3.011319875717163, step time: 17.233848571777344ms\r\n",,terminal_output +10666,12714049,"TERMINAL",0,0,"Step 550, loss: 3.1100776195526123, step time: 17.65918731689453ms\r\n",,terminal_output +10667,12714095,"TERMINAL",0,0,"Step 551, loss: 2.9112069606781006, step time: 17.390727996826172ms\r\nStep 552, loss: 2.968890905380249, step time: 17.347097396850586ms\r\n",,terminal_output +10668,12714189,"TERMINAL",0,0,"Step 553, loss: 2.9054949283599854, step time: 17.80223846435547ms\r\n",,terminal_output +10669,12714241,"TERMINAL",0,0,"Step 554, loss: 3.0413331985473633, step time: 17.2731876373291ms\r\n",,terminal_output +10670,12714346,"TERMINAL",0,0,"Step 555, loss: 2.9009134769439697, step time: 17.446517944335938ms\r\nStep 556, loss: 2.9501986503601074, step time: 17.597198486328125ms\r\n",,terminal_output +10671,12714462,"TERMINAL",0,0,"Step 557, loss: 2.929802417755127, step time: 17.377138137817383ms\r\nStep 558, loss: 2.941357374191284, step time: 17.21978187561035ms\r\n",,terminal_output +10672,12714551,"TERMINAL",0,0,"Step 559, loss: 2.9480504989624023, step time: 17.512798309326172ms\r\n",,terminal_output +10673,12714602,"TERMINAL",0,0,"Step 560, loss: 2.890130043029785, step time: 17.194032669067383ms\r\n",,terminal_output +10674,12714698,"TERMINAL",0,0,"Step 561, loss: 2.978198528289795, step time: 17.223119735717773ms\r\n",,terminal_output +10675,12714750,"TERMINAL",0,0,"Step 562, loss: 2.907970905303955, step time: 17.516374588012695ms\r\n",,terminal_output +10676,12714845,"TERMINAL",0,0,"Step 563, loss: 2.9205520153045654, step time: 17.347335815429688ms\r\nStep 564, loss: 2.925255537033081, step time: 17.102718353271484ms\r\n",,terminal_output +10677,12714902,"TERMINAL",0,0,"Step 565, loss: 2.899961233139038, step time: 18.871307373046875ms\r\n",,terminal_output +10678,12714966,"TERMINAL",0,0,"Step 566, loss: 2.8951499462127686, step time: 17.904043197631836ms\r\n",,terminal_output +10679,12715032,"TERMINAL",0,0,"Step 567, loss: 2.8807268142700195, step time: 17.582416534423828ms\r\n",,terminal_output +10680,12715101,"TERMINAL",0,0,"Step 568, loss: 2.92166805267334, step time: 17.62986183166504ms\r\n",,terminal_output +10681,12715165,"TERMINAL",0,0,"Step 569, loss: 2.8964521884918213, step time: 17.293453216552734ms\r\n",,terminal_output +10682,12715223,"TERMINAL",0,0,"Step 570, loss: 2.933234453201294, step time: 19.946575164794922ms\r\n",,terminal_output +10683,12715288,"TERMINAL",0,0,"Step 571, loss: 2.898621082305908, step time: 17.74144172668457ms\r\n",,terminal_output +10684,12715379,"TERMINAL",0,0,"Step 572, loss: 2.8857500553131104, step time: 17.147541046142578ms\r\n",,terminal_output +10685,12715430,"TERMINAL",0,0,"Step 573, loss: 2.9928579330444336, step time: 17.340421676635742ms\r\n",,terminal_output +10686,12715541,"TERMINAL",0,0,"Step 574, loss: 2.8778367042541504, step time: 17.53854751586914ms\r\nStep 575, loss: 2.865795850753784, step time: 17.48514175415039ms\r\n",,terminal_output +10687,12715606,"TERMINAL",0,0,"Step 576, loss: 3.172584056854248, step time: 17.235994338989258ms\r\n",,terminal_output +10688,12715677,"TERMINAL",0,0,"Step 577, loss: 2.879143476486206, step time: 17.658710479736328ms\r\n",,terminal_output +10689,12715744,"TERMINAL",0,0,"Step 578, loss: 2.9551784992218018, step time: 17.294883728027344ms\r\n",,terminal_output +10690,12715795,"TERMINAL",0,0,"Step 579, loss: 2.8630294799804688, step time: 17.567157745361328ms\r\n",,terminal_output +10691,12715847,"TERMINAL",0,0,"Step 580, loss: 2.856224536895752, step time: 17.560243606567383ms\r\n",,terminal_output +10692,12715943,"TERMINAL",0,0,"Step 581, loss: 3.0163941383361816, step time: 17.45295524597168ms\r\n",,terminal_output +10693,12716054,"TERMINAL",0,0,"Step 582, loss: 2.8762080669403076, step time: 17.280101776123047ms\r\nStep 583, loss: 2.8700902462005615, step time: 17.988204956054688ms\r\n",,terminal_output +10694,12716113,"TERMINAL",0,0,"Step 584, loss: 2.880967140197754, step time: 17.84205436706543ms\r\n",,terminal_output +10695,12716171,"TERMINAL",0,0,"Step 585, loss: 2.9848060607910156, step time: 17.462968826293945ms\r\n",,terminal_output +10696,12716232,"TERMINAL",0,0,"Step 586, loss: 2.8802621364593506, step time: 17.589092254638672ms\r\n",,terminal_output +10697,12716294,"TERMINAL",0,0,"Step 587, loss: 2.8707165718078613, step time: 17.15254783630371ms\r\n",,terminal_output +10698,12716355,"TERMINAL",0,0,"Step 588, loss: 2.866241455078125, step time: 17.022132873535156ms\r\n",,terminal_output +10699,12716468,"TERMINAL",0,0,"Step 589, loss: 2.952641487121582, step time: 17.611980438232422ms\r\nStep 590, loss: 2.8907864093780518, step time: 16.965627670288086ms\r\n",,terminal_output +10700,12716530,"TERMINAL",0,0,"Step 591, loss: 2.848808526992798, step time: 17.004966735839844ms\r\n",,terminal_output +10701,12716592,"TERMINAL",0,0,"Step 592, loss: 2.8595621585845947, step time: 17.264842987060547ms\r\n",,terminal_output +10702,12716652,"TERMINAL",0,0,"Step 593, loss: 2.9064695835113525, step time: 17.1201229095459ms\r\n",,terminal_output +10703,12716714,"TERMINAL",0,0,"Step 594, loss: 3.039114475250244, step time: 17.1658992767334ms\r\n",,terminal_output +10704,12716776,"TERMINAL",0,0,"Step 595, loss: 2.87772798538208, step time: 17.618179321289062ms\r\n",,terminal_output +10705,12716838,"TERMINAL",0,0,"Step 596, loss: 2.9120354652404785, step time: 19.216537475585938ms\r\n",,terminal_output +10706,12716905,"TERMINAL",0,0,"Step 597, loss: 2.8475868701934814, step time: 17.963171005249023ms\r\n",,terminal_output +10707,12716964,"TERMINAL",0,0,"Step 598, loss: 2.955308437347412, step time: 17.629146575927734ms\r\n",,terminal_output +10708,12717027,"TERMINAL",0,0,"Step 599, loss: 2.967778205871582, step time: 17.413616180419922ms\r\n",,terminal_output +10709,12717090,"TERMINAL",0,0,"Step 600, loss: 2.8446149826049805, step time: 17.270326614379883ms\r\n",,terminal_output +10710,12717152,"TERMINAL",0,0,"Step 601, loss: 2.864501476287842, step time: 17.67587661743164ms\r\n",,terminal_output +10711,12717218,"TERMINAL",0,0,"Step 602, loss: 2.8670554161071777, step time: 17.24076271057129ms\r\n",,terminal_output +10712,12717281,"TERMINAL",0,0,"Step 603, loss: 2.959082841873169, step time: 17.293930053710938ms\r\n",,terminal_output +10713,12717375,"TERMINAL",0,0,"Step 604, loss: 2.826820135116577, step time: 17.501115798950195ms\r\n",,terminal_output +10714,12717426,"TERMINAL",0,0,"Step 605, loss: 2.8156957626342773, step time: 17.302274703979492ms\r\n",,terminal_output +10715,12717531,"TERMINAL",0,0,"Step 606, loss: 2.8324952125549316, step time: 17.18878746032715ms\r\nStep 607, loss: 2.882648468017578, step time: 17.69399642944336ms\r\n",,terminal_output +10716,12717629,"TERMINAL",0,0,"Step 608, loss: 2.865516424179077, step time: 17.267227172851562ms\r\n",,terminal_output +10717,12717679,"TERMINAL",0,0,"Step 609, loss: 2.9904391765594482, step time: 17.32182502746582ms\r\n",,terminal_output +10718,12717783,"TERMINAL",0,0,"Step 610, loss: 2.83661150932312, step time: 17.511844635009766ms\r\nStep 611, loss: 2.8195033073425293, step time: 17.196178436279297ms\r\n",,terminal_output +10719,12717846,"TERMINAL",0,0,"Step 612, loss: 2.808467388153076, step time: 17.182588577270508ms\r\n",,terminal_output +10720,12717910,"TERMINAL",0,0,"Step 613, loss: 2.869688034057617, step time: 17.461538314819336ms\r\n",,terminal_output +10721,12718010,"TERMINAL",0,0,"Step 614, loss: 2.8316056728363037, step time: 16.96324348449707ms\r\n",,terminal_output +10722,12718060,"TERMINAL",0,0,"Step 615, loss: 2.940131187438965, step time: 17.198801040649414ms\r\n",,terminal_output +10723,12718164,"TERMINAL",0,0,"Step 616, loss: 2.814579963684082, step time: 17.459630966186523ms\r\nStep 617, loss: 2.8077285289764404, step time: 17.12203025817871ms\r\n",,terminal_output +10724,12718230,"TERMINAL",0,0,"Step 618, loss: 2.8045480251312256, step time: 17.136096954345703ms\r\n",,terminal_output +10725,12718286,"TERMINAL",0,0,"Step 619, loss: 2.8043222427368164, step time: 20.191431045532227ms\r\n",,terminal_output +10726,12718349,"TERMINAL",0,0,"Step 620, loss: 2.8303298950195312, step time: 17.08817481994629ms\r\n",,terminal_output +10727,12718413,"TERMINAL",0,0,"Step 621, loss: 2.836270570755005, step time: 17.33112335205078ms\r\n",,terminal_output +10728,12718471,"TERMINAL",0,0,"Step 622, loss: 2.814030408859253, step time: 17.471790313720703ms\r\n",,terminal_output +10729,12718564,"TERMINAL",0,0,"Step 623, loss: 3.477308511734009, step time: 17.28987693786621ms\r\n",,terminal_output +10730,12718660,"TERMINAL",0,0,"Step 624, loss: 2.859282970428467, step time: 17.168283462524414ms\r\nStep 625, loss: 2.862908124923706, step time: 17.632246017456055ms\r\n",,terminal_output +10731,12718719,"TERMINAL",0,0,"Step 626, loss: 2.8127424716949463, step time: 17.11440086364746ms\r\n",,terminal_output +10732,12718780,"TERMINAL",0,0,"Step 627, loss: 2.8392996788024902, step time: 17.10343360900879ms\r\n",,terminal_output +10733,12718847,"TERMINAL",0,0,"Step 628, loss: 2.784489393234253, step time: 17.439603805541992ms\r\n",,terminal_output +10734,12718944,"TERMINAL",0,0,"Step 629, loss: 2.9271044731140137, step time: 17.052888870239258ms\r\n",,terminal_output +10735,12719006,"TERMINAL",0,0,"Step 630, loss: 2.8011980056762695, step time: 16.95418357849121ms\r\n",,terminal_output +10736,12719066,"TERMINAL",0,0,"Step 631, loss: 2.826672315597534, step time: 17.429113388061523ms\r\n",,terminal_output +10737,12719173,"TERMINAL",0,0,"Step 632, loss: 2.7941384315490723, step time: 17.017841339111328ms\r\nStep 633, loss: 2.8947951793670654, step time: 17.081260681152344ms\r\n",,terminal_output +10738,12719238,"TERMINAL",0,0,"Step 634, loss: 2.770045280456543, step time: 17.21978187561035ms\r\n",,terminal_output +10739,12719298,"TERMINAL",0,0,"Step 635, loss: 2.751680850982666, step time: 17.066478729248047ms\r\n",,terminal_output +10740,12719358,"TERMINAL",0,0,"Step 636, loss: 2.7849032878875732, step time: 16.879558563232422ms\r\n",,terminal_output +10741,12719419,"TERMINAL",0,0,"Step 637, loss: 2.7849278450012207, step time: 17.49134063720703ms\r\n",,terminal_output +10742,12719481,"TERMINAL",0,0,"Step 638, loss: 2.825601577758789, step time: 17.145633697509766ms\r\n",,terminal_output +10743,12719544,"TERMINAL",0,0,"Step 639, loss: 2.769442558288574, step time: 17.115354537963867ms\r\n",,terminal_output +10744,12719604,"TERMINAL",0,0,"Step 640, loss: 2.8587722778320312, step time: 17.500638961791992ms\r\n",,terminal_output +10745,12719665,"TERMINAL",0,0,"Step 641, loss: 2.811560869216919, step time: 17.37356185913086ms\r\n",,terminal_output +10746,12719726,"TERMINAL",0,0,"Step 642, loss: 2.8488357067108154, step time: 17.191410064697266ms\r\n",,terminal_output +10747,12719787,"TERMINAL",0,0,"Step 643, loss: 2.7500267028808594, step time: 17.55809783935547ms\r\n",,terminal_output +10748,12719904,"TERMINAL",0,0,"Step 644, loss: 2.833298921585083, step time: 17.264842987060547ms\r\nStep 645, loss: 3.2472217082977295, step time: 17.622947692871094ms\r\n",,terminal_output +10749,12719967,"TERMINAL",0,0,"Step 646, loss: 2.818516492843628, step time: 18.513917922973633ms\r\n",,terminal_output +10750,12720071,"TERMINAL",0,0,"Step 647, loss: 2.9640591144561768, step time: 17.747879028320312ms\r\n",,terminal_output +10751,12720124,"TERMINAL",0,0,"Step 648, loss: 2.778484344482422, step time: 17.2574520111084ms\r\n",,terminal_output +10752,12720233,"TERMINAL",0,0,"Step 649, loss: 2.7706658840179443, step time: 17.782926559448242ms\r\nStep 650, loss: 2.761237859725952, step time: 17.083168029785156ms\r\n",,terminal_output +10753,12720302,"TERMINAL",0,0,"Step 651, loss: 2.7654640674591064, step time: 17.231225967407227ms\r\n",,terminal_output +10754,12720366,"TERMINAL",0,0,"Step 652, loss: 2.750103712081909, step time: 17.347097396850586ms\r\n",,terminal_output +10755,12720431,"TERMINAL",0,0,"Step 653, loss: 2.8321611881256104, step time: 17.490386962890625ms\r\n",,terminal_output +10756,12720537,"TERMINAL",0,0,"Step 654, loss: 2.7660818099975586, step time: 17.15850830078125ms\r\nStep 655, loss: 2.746788501739502, step time: 17.9746150970459ms\r\n",,terminal_output +10757,12720667,"TERMINAL",0,0,"Step 656, loss: 2.983238458633423, step time: 17.24720001220703ms\r\nStep 657, loss: 2.7767834663391113, step time: 17.411470413208008ms\r\n",,terminal_output +10758,12720734,"TERMINAL",0,0,"Step 658, loss: 2.762549638748169, step time: 17.554283142089844ms\r\n",,terminal_output +10759,12720790,"TERMINAL",0,0,"Step 659, loss: 2.77622127532959, step time: 17.56143569946289ms\r\n",,terminal_output +10760,12720853,"TERMINAL",0,0,"Step 660, loss: 2.8264338970184326, step time: 17.194509506225586ms\r\n",,terminal_output +10761,12720905,"TERMINAL",0,0,"Step 661, loss: 2.7565340995788574, step time: 17.70806312561035ms\r\n",,terminal_output +10762,12721002,"TERMINAL",0,0,"Step 662, loss: 2.7778449058532715, step time: 17.296552658081055ms\r\n",,terminal_output +10763,12721061,"TERMINAL",0,0,"Step 663, loss: 2.751634359359741, step time: 17.621517181396484ms\r\n",,terminal_output +10764,12721370,"TERMINAL",0,0,"Step 664, loss: 2.7416131496429443, step time: 299.8363971710205ms\r\n",,terminal_output +10765,12721423,"TERMINAL",0,0,"Step 665, loss: 2.7413289546966553, step time: 24.918317794799805ms\r\n",,terminal_output +10766,12721539,"TERMINAL",0,0,"Step 666, loss: 2.742568016052246, step time: 19.802331924438477ms\r\nStep 667, loss: 2.7167916297912598, step time: 18.53322982788086ms\r\n",,terminal_output +10767,12721602,"TERMINAL",0,0,"Step 668, loss: 3.0129401683807373, step time: 17.517566680908203ms\r\n",,terminal_output +10768,12721661,"TERMINAL",0,0,"Step 669, loss: 2.7606699466705322, step time: 17.556190490722656ms\r\n",,terminal_output +10769,12721727,"TERMINAL",0,0,"Step 670, loss: 3.098538398742676, step time: 17.96579360961914ms\r\n",,terminal_output +10770,12721789,"TERMINAL",0,0,"Step 671, loss: 3.2497806549072266, step time: 17.645597457885742ms\r\n",,terminal_output +10771,12721904,"TERMINAL",0,0,"Step 672, loss: 2.751133441925049, step time: 17.486572265625ms\r\nStep 673, loss: 2.942812204360962, step time: 18.015623092651367ms\r\n",,terminal_output +10772,12721966,"TERMINAL",0,0,"Step 674, loss: 2.7448999881744385, step time: 17.36903190612793ms\r\n",,terminal_output +10773,12722091,"TERMINAL",0,0,"Step 675, loss: 2.8605613708496094, step time: 17.42267608642578ms\r\nStep 676, loss: 2.9555251598358154, step time: 17.937660217285156ms\r\n",,terminal_output +10774,12722156,"TERMINAL",0,0,"Step 677, loss: 2.746256113052368, step time: 17.444372177124023ms\r\n",,terminal_output +10775,12722219,"TERMINAL",0,0,"Step 678, loss: 2.7248425483703613, step time: 17.30942726135254ms\r\n",,terminal_output +10776,12722318,"TERMINAL",0,0,"Step 679, loss: 2.841245174407959, step time: 17.826557159423828ms\r\n",,terminal_output +10777,12722368,"TERMINAL",0,0,"Step 680, loss: 2.8156068325042725, step time: 17.110347747802734ms\r\n",,terminal_output +10778,12722473,"TERMINAL",0,0,"Step 681, loss: 2.789335250854492, step time: 17.370223999023438ms\r\nStep 682, loss: 2.7268404960632324, step time: 17.604589462280273ms\r\n",,terminal_output +10779,12722597,"TERMINAL",0,0,"Step 683, loss: 2.7308573722839355, step time: 17.415761947631836ms\r\nStep 684, loss: 2.740919589996338, step time: 17.068147659301758ms\r\n",,terminal_output +10780,12722688,"TERMINAL",0,0,"Step 685, loss: 2.763362169265747, step time: 17.58885383605957ms\r\n",,terminal_output +10781,12722739,"TERMINAL",0,0,"Step 686, loss: 2.7145836353302, step time: 16.968488693237305ms\r\n",,terminal_output +10782,12722842,"TERMINAL",0,0,"Step 687, loss: 2.709547758102417, step time: 17.35210418701172ms\r\nStep 688, loss: 2.7098212242126465, step time: 17.43793487548828ms\r\n",,terminal_output +10783,12722906,"TERMINAL",0,0,"Step 689, loss: 2.7116127014160156, step time: 17.174959182739258ms\r\n",,terminal_output +10784,12723031,"TERMINAL",0,0,"Step 690, loss: 2.726240396499634, step time: 17.174720764160156ms\r\nStep 691, loss: 2.7110464572906494, step time: 17.650604248046875ms\r\n",,terminal_output +10785,12723098,"TERMINAL",0,0,"Step 692, loss: 2.7154650688171387, step time: 17.10653305053711ms\r\n",,terminal_output +10786,12723159,"TERMINAL",0,0,"Step 693, loss: 2.776911735534668, step time: 17.266035079956055ms\r\n",,terminal_output +10787,12723257,"TERMINAL",0,0,"Step 694, loss: 2.739352226257324, step time: 17.438411712646484ms\r\n",,terminal_output +10788,12723306,"TERMINAL",0,0,"Step 695, loss: 2.6955068111419678, step time: 17.2121524810791ms\r\n",,terminal_output +10789,12723412,"TERMINAL",0,0,"Step 696, loss: 2.6906585693359375, step time: 17.13275909423828ms\r\nStep 697, loss: 2.7063534259796143, step time: 17.617225646972656ms\r\n",,terminal_output +10790,12723531,"TERMINAL",0,0,"Step 698, loss: 2.6928305625915527, step time: 17.151355743408203ms\r\nStep 699, loss: 2.735443592071533, step time: 17.244577407836914ms\r\n",,terminal_output +10791,12723643,"TERMINAL",0,0,"Step 700, loss: 2.678776741027832, step time: 17.456531524658203ms\r\n",,terminal_output +10792,12723656,"TERMINAL",0,0,"Step 701, loss: 2.758054494857788, step time: 18.413543701171875ms\r\n",,terminal_output +10793,12723751,"TERMINAL",0,0,"Step 702, loss: 2.7452545166015625, step time: 17.154455184936523ms\r\n",,terminal_output +10794,12723860,"TERMINAL",0,0,"Step 703, loss: 2.7014846801757812, step time: 17.596721649169922ms\r\nStep 704, loss: 2.6792852878570557, step time: 17.08531379699707ms\r\n",,terminal_output +10795,12723926,"TERMINAL",0,0,"Step 705, loss: 2.694226026535034, step time: 17.303943634033203ms\r\n",,terminal_output +10796,12723989,"TERMINAL",0,0,"Step 706, loss: 2.646610975265503, step time: 17.426252365112305ms\r\n",,terminal_output +10797,12724106,"TERMINAL",0,0,"Step 707, loss: 2.702486991882324, step time: 17.365217208862305ms\r\nStep 708, loss: 2.6876602172851562, step time: 16.974449157714844ms\r\n",,terminal_output +10798,12724175,"TERMINAL",0,0,"Step 709, loss: 2.693384885787964, step time: 17.643451690673828ms\r\n",,terminal_output +10799,12724237,"TERMINAL",0,0,"Step 710, loss: 2.6722757816314697, step time: 17.103910446166992ms\r\n",,terminal_output +10800,12724300,"TERMINAL",0,0,"Step 711, loss: 2.6763579845428467, step time: 17.21811294555664ms\r\n",,terminal_output +10801,12724359,"TERMINAL",0,0,"Step 712, loss: 3.0595595836639404, step time: 17.43769645690918ms\r\n",,terminal_output +10802,12724424,"TERMINAL",0,0,"Step 713, loss: 2.6604394912719727, step time: 17.213821411132812ms\r\n",,terminal_output +10803,12724484,"TERMINAL",0,0,"Step 714, loss: 2.8075344562530518, step time: 17.000198364257812ms\r\n",,terminal_output +10804,12724552,"TERMINAL",0,0,"Step 715, loss: 2.7107646465301514, step time: 18.059492111206055ms\r\n",,terminal_output +10805,12724609,"TERMINAL",0,0,"Step 716, loss: 2.672743082046509, step time: 17.052173614501953ms\r\n",,terminal_output +10806,12724722,"TERMINAL",0,0,"Step 717, loss: 2.671097993850708, step time: 17.243623733520508ms\r\nStep 718, loss: 2.697427272796631, step time: 17.67420768737793ms\r\n",,terminal_output +10807,12724787,"TERMINAL",0,0,"Step 719, loss: 2.717714309692383, step time: 17.445802688598633ms\r\n",,terminal_output +10808,12724915,"TERMINAL",0,0,"Step 720, loss: 2.7791693210601807, step time: 17.291545867919922ms\r\nStep 721, loss: 2.6560592651367188, step time: 17.755508422851562ms\r\n",,terminal_output +10809,12724982,"TERMINAL",0,0,"Step 722, loss: 2.6825575828552246, step time: 18.628358840942383ms\r\n",,terminal_output +10810,12725045,"TERMINAL",0,0,"Step 723, loss: 2.6578357219696045, step time: 17.748355865478516ms\r\n",,terminal_output +10811,12725110,"TERMINAL",0,0,"Step 724, loss: 2.6802258491516113, step time: 17.618417739868164ms\r\n",,terminal_output +10812,12725173,"TERMINAL",0,0,"Step 725, loss: 2.8359265327453613, step time: 17.513751983642578ms\r\n",,terminal_output +10813,12725238,"TERMINAL",0,0,"Step 726, loss: 2.740851640701294, step time: 17.313003540039062ms\r\n",,terminal_output +10814,12725304,"TERMINAL",0,0,"Step 727, loss: 2.6728198528289795, step time: 17.785310745239258ms\r\n",,terminal_output +10815,12725367,"TERMINAL",0,0,"Step 728, loss: 2.6675827503204346, step time: 17.24076271057129ms\r\n",,terminal_output +10816,12725436,"TERMINAL",0,0,"Step 729, loss: 2.6533238887786865, step time: 17.382144927978516ms\r\n",,terminal_output +10817,12725544,"TERMINAL",0,0,"Step 730, loss: 2.6753323078155518, step time: 17.64988899230957ms\r\nStep 731, loss: 2.6354293823242188, step time: 17.46511459350586ms\r\n",,terminal_output +10818,12725662,"TERMINAL",0,0,"Step 732, loss: 2.8136091232299805, step time: 17.29416847229004ms\r\nStep 733, loss: 2.687272071838379, step time: 17.833709716796875ms\r\n",,terminal_output +10819,12725726,"TERMINAL",0,0,"Step 734, loss: 2.7074406147003174, step time: 17.26698875427246ms\r\n",,terminal_output +10820,12725789,"TERMINAL",0,0,"Step 735, loss: 2.6395325660705566, step time: 17.273664474487305ms\r\n",,terminal_output +10821,12725854,"TERMINAL",0,0,"Step 736, loss: 2.634998321533203, step time: 18.53346824645996ms\r\n",,terminal_output +10822,12725909,"TERMINAL",0,0,"Step 737, loss: 2.6487722396850586, step time: 17.433881759643555ms\r\n",,terminal_output +10823,12726046,"TERMINAL",0,0,"Step 738, loss: 2.7003610134124756, step time: 17.207860946655273ms\r\nStep 739, loss: 2.6391069889068604, step time: 17.70639419555664ms\r\n",,terminal_output +10824,12726108,"TERMINAL",0,0,"Step 740, loss: 2.625011682510376, step time: 17.197132110595703ms\r\n",,terminal_output +10825,12726173,"TERMINAL",0,0,"Step 741, loss: 2.624053478240967, step time: 17.25459098815918ms\r\n",,terminal_output +10826,12726239,"TERMINAL",0,0,"Step 742, loss: 2.6241884231567383, step time: 17.477035522460938ms\r\n",,terminal_output +10827,12726300,"TERMINAL",0,0,"Step 743, loss: 2.6373777389526367, step time: 17.247676849365234ms\r\n",,terminal_output +10828,12726365,"TERMINAL",0,0,"Step 744, loss: 2.602853536605835, step time: 17.014265060424805ms\r\n",,terminal_output +10829,12726428,"TERMINAL",0,0,"Step 745, loss: 2.6198768615722656, step time: 17.543554306030273ms\r\n",,terminal_output +10830,12726495,"TERMINAL",0,0,"Step 746, loss: 2.622877359390259, step time: 17.02594757080078ms\r\n",,terminal_output +10831,12726559,"TERMINAL",0,0,"Step 747, loss: 2.7831690311431885, step time: 21.12412452697754ms\r\n",,terminal_output +10832,12726624,"TERMINAL",0,0,"Step 748, loss: 2.6911473274230957, step time: 17.523527145385742ms\r\n",,terminal_output +10833,12726695,"TERMINAL",0,0,"Step 749, loss: 2.5977890491485596, step time: 17.227888107299805ms\r\n",,terminal_output +10834,12726766,"TERMINAL",0,0,"Step 750, loss: 2.6136815547943115, step time: 17.18425750732422ms\r\n",,terminal_output +10835,12726873,"TERMINAL",0,0,"Step 751, loss: 2.6142005920410156, step time: 17.672061920166016ms\r\nStep 752, loss: 2.631971597671509, step time: 18.807172775268555ms\r\n",,terminal_output +10836,12726937,"TERMINAL",0,0,"Step 753, loss: 2.6111671924591064, step time: 17.596960067749023ms\r\n",,terminal_output +10837,12726998,"TERMINAL",0,0,"Step 754, loss: 2.7588396072387695, step time: 17.583847045898438ms\r\n",,terminal_output +10838,12727059,"TERMINAL",0,0,"Step 755, loss: 2.5989999771118164, step time: 17.324447631835938ms\r\n",,terminal_output +10839,12727166,"TERMINAL",0,0,"Step 756, loss: 2.6223642826080322, step time: 17.038822174072266ms\r\nStep 757, loss: 2.621206760406494, step time: 17.672061920166016ms\r\n",,terminal_output +10840,12727239,"TERMINAL",0,0,"Step 758, loss: 2.5958621501922607, step time: 17.17996597290039ms\r\n",,terminal_output +10841,12727303,"TERMINAL",0,0,"Step 759, loss: 2.7290384769439697, step time: 17.18902587890625ms\r\n",,terminal_output +10842,12727371,"TERMINAL",0,0,"Step 760, loss: 2.5845930576324463, step time: 17.48180389404297ms\r\n",,terminal_output +10843,12727433,"TERMINAL",0,0,"Step 761, loss: 2.592150926589966, step time: 17.234325408935547ms\r\n",,terminal_output +10844,12727502,"TERMINAL",0,0,"Step 762, loss: 2.594653606414795, step time: 17.094850540161133ms\r\n",,terminal_output +10845,12727562,"TERMINAL",0,0,"Step 763, loss: 2.7724246978759766, step time: 17.676830291748047ms\r\n",,terminal_output +10846,12727622,"TERMINAL",0,0,"Step 764, loss: 2.592986822128296, step time: 17.009973526000977ms\r\n",,terminal_output +10847,12727683,"TERMINAL",0,0,"Step 765, loss: 2.602522134780884, step time: 17.190933227539062ms\r\n",,terminal_output +10848,12727790,"TERMINAL",0,0,"Step 766, loss: 2.5838799476623535, step time: 17.647981643676758ms\r\nStep 767, loss: 2.7152037620544434, step time: 17.178058624267578ms\r\n",,terminal_output +10849,12727853,"TERMINAL",0,0,"Step 768, loss: 2.5838558673858643, step time: 17.198562622070312ms\r\n",,terminal_output +10850,12727918,"TERMINAL",0,0,"Step 769, loss: 2.623695135116577, step time: 17.603397369384766ms\r\n",,terminal_output +10851,12727980,"TERMINAL",0,0,"Step 770, loss: 2.598454475402832, step time: 17.11273193359375ms\r\n",,terminal_output +10852,12728043,"TERMINAL",0,0,"Step 771, loss: 2.584885358810425, step time: 17.207622528076172ms\r\n",,terminal_output +10853,12728165,"train_dynamics.py",2628,0,"",python,selection_mouse +10854,12728184,"TERMINAL",0,0,"Step 772, loss: 2.559954881668091, step time: 17.512083053588867ms\r\nStep 773, loss: 2.5783331394195557, step time: 17.286062240600586ms\r\n",,terminal_output +10855,12728298,"TERMINAL",0,0,"Step 774, loss: 2.703261375427246, step time: 17.10200309753418ms\r\nStep 775, loss: 2.668161153793335, step time: 17.852067947387695ms\r\n",,terminal_output +10856,12728388,"TERMINAL",0,0,"Step 776, loss: 2.5854990482330322, step time: 16.991853713989258ms\r\n",,terminal_output +10857,12728440,"TERMINAL",0,0,"Step 777, loss: 2.6405770778656006, step time: 17.444372177124023ms\r\n",,terminal_output +10858,12728532,"TERMINAL",0,0,"Step 778, loss: 2.5646090507507324, step time: 17.390966415405273ms\r\n",,terminal_output +10859,12728639,"TERMINAL",0,0,"Step 779, loss: 2.662022829055786, step time: 17.194747924804688ms\r\nStep 780, loss: 2.5727522373199463, step time: 17.221689224243164ms\r\n",,terminal_output +10860,12728694,"train_dynamics.py",2596,0,"",python,selection_mouse +10861,12728706,"train_dynamics.py",2595,0,"",python,selection_command +10862,12728760,"TERMINAL",0,0,"Step 781, loss: 2.577601194381714, step time: 17.808914184570312ms\r\nStep 782, loss: 2.5553693771362305, step time: 17.212390899658203ms\r\n",,terminal_output +10863,12728813,"TERMINAL",0,0,"Step 783, loss: 2.649003028869629, step time: 17.480850219726562ms\r\n",,terminal_output +10864,12728921,"TERMINAL",0,0,"Step 784, loss: 2.566685438156128, step time: 17.84205436706543ms\r\n",,terminal_output +10865,12728985,"TERMINAL",0,0,"Step 785, loss: 2.573040723800659, step time: 17.461061477661133ms\r\nStep 786, loss: 2.544152021408081, step time: 17.302989959716797ms\r\n",,terminal_output +10866,12729086,"TERMINAL",0,0,"Step 787, loss: 2.5831828117370605, step time: 17.88806915283203ms\r\n",,terminal_output +10867,12729142,"TERMINAL",0,0,"Step 788, loss: 2.5742857456207275, step time: 17.238855361938477ms\r\n",,terminal_output +10868,12729204,"TERMINAL",0,0,"Step 789, loss: 2.719480037689209, step time: 17.406225204467773ms\r\n",,terminal_output +10869,12729250,"train_dynamics.py",2590,0,"",python,selection_mouse +10870,12729315,"TERMINAL",0,0,"Step 790, loss: 2.5477523803710938, step time: 17.616987228393555ms\r\nStep 791, loss: 2.549170732498169, step time: 17.41933822631836ms\r\n",,terminal_output +10871,12729379,"TERMINAL",0,0,"Step 792, loss: 2.8019461631774902, step time: 17.3187255859375ms\r\n",,terminal_output +10872,12729446,"TERMINAL",0,0,"Step 793, loss: 2.5429835319519043, step time: 17.832040786743164ms\r\n",,terminal_output +10873,12729504,"TERMINAL",0,0,"Step 794, loss: 2.546445846557617, step time: 17.41313934326172ms\r\n",,terminal_output +10874,12729572,"TERMINAL",0,0,"Step 795, loss: 2.5503334999084473, step time: 17.552852630615234ms\r\n",,terminal_output +10875,12729636,"TERMINAL",0,0,"Step 796, loss: 2.5341336727142334, step time: 18.122196197509766ms\r\n",,terminal_output +10876,12729701,"TERMINAL",0,0,"Step 797, loss: 2.5270602703094482, step time: 17.40288734436035ms\r\n",,terminal_output +10877,12729768,"TERMINAL",0,0,"Step 798, loss: 2.5547473430633545, step time: 17.295360565185547ms\r\n",,terminal_output +10878,12729821,"train_dynamics.py",2652,0,"",python,selection_mouse +10879,12730078,"TERMINAL",0,0,"Step 799, loss: 2.560844898223877, step time: 329.0987014770508ms\r\n",,terminal_output +10880,12730140,"TERMINAL",0,0,"Step 800, loss: 2.6974940299987793, step time: 25.46381950378418ms\r\n",,terminal_output +10881,12730202,"TERMINAL",0,0,"Step 801, loss: 2.544755220413208, step time: 20.57480812072754ms\r\n",,terminal_output +10882,12730266,"TERMINAL",0,0,"Step 802, loss: 3.4272243976593018, step time: 19.085407257080078ms\r\n",,terminal_output +10883,12730329,"TERMINAL",0,0,"Step 803, loss: 2.536870241165161, step time: 18.105030059814453ms\r\n",,terminal_output +10884,12730350,"train_dynamics.py",2620,0,"",python,selection_mouse +10885,12730403,"TERMINAL",0,0,"Step 804, loss: 2.5619091987609863, step time: 17.7767276763916ms\r\n",,terminal_output +10886,12730496,"train_dynamics.py",2605,18,"cross_entropy_loss",python,selection_mouse +10887,12730601,"TERMINAL",0,0,"Step 805, loss: 2.6528751850128174, step time: 18.17464828491211ms\r\nStep 806, loss: 2.7937662601470947, step time: 17.567157745361328ms\r\nStep 807, loss: 2.5305471420288086, step time: 17.620563507080078ms\r\n",,terminal_output +10888,12730705,"TERMINAL",0,0,"Step 808, loss: 2.566378355026245, step time: 17.78268814086914ms\r\nStep 809, loss: 2.5634822845458984, step time: 17.388582229614258ms\r\n",,terminal_output +10889,12730766,"TERMINAL",0,0,"Step 810, loss: 2.549121141433716, step time: 17.352819442749023ms\r\n",,terminal_output +10890,12730830,"TERMINAL",0,0,"Step 811, loss: 2.5520987510681152, step time: 17.862558364868164ms\r\n",,terminal_output +10891,12730931,"TERMINAL",0,0,"Step 812, loss: 2.5200512409210205, step time: 17.19832420349121ms\r\n",,terminal_output +10892,12730984,"TERMINAL",0,0,"Step 813, loss: 2.515629529953003, step time: 17.435789108276367ms\r\n",,terminal_output +10893,12731057,"TERMINAL",0,0,"Step 814, loss: 2.5794312953948975, step time: 17.719030380249023ms\r\n",,terminal_output +10894,12731123,"TERMINAL",0,0,"Step 815, loss: 2.5499935150146484, step time: 17.7764892578125ms\r\n",,terminal_output +10895,12731136,"train_dynamics.py",2752,0,"",python,selection_mouse +10896,12731176,"TERMINAL",0,0,"Step 816, loss: 2.577542781829834, step time: 17.436981201171875ms\r\n",,terminal_output +10897,12731228,"TERMINAL",0,0,"Step 817, loss: 2.5398192405700684, step time: 17.935514450073242ms\r\n",,terminal_output +10898,12731281,"TERMINAL",0,0,"Step 818, loss: 2.510617256164551, step time: 17.314910888671875ms\r\n",,terminal_output +10899,12731402,"TERMINAL",0,0,"Step 819, loss: 2.628406047821045, step time: 17.46988296508789ms\r\nStep 820, loss: 2.5596327781677246, step time: 17.873525619506836ms\r\n",,terminal_output +10900,12731454,"TERMINAL",0,0,"Step 821, loss: 2.50378155708313, step time: 17.446041107177734ms\r\n",,terminal_output +10901,12731548,"TERMINAL",0,0,"Step 822, loss: 2.499960422515869, step time: 17.343759536743164ms\r\n",,terminal_output +10902,12731606,"TERMINAL",0,0,"Step 823, loss: 2.5036497116088867, step time: 17.940759658813477ms\r\n",,terminal_output +10903,12731714,"TERMINAL",0,0,"Step 824, loss: 2.5226168632507324, step time: 17.333507537841797ms\r\nStep 825, loss: 2.504878520965576, step time: 17.678260803222656ms\r\n",,terminal_output +10904,12731750,"train_dynamics.py",2692,0,"",python,selection_mouse +10905,12731776,"TERMINAL",0,0,"Step 826, loss: 2.510341167449951, step time: 18.691062927246094ms\r\n",,terminal_output +10906,12731853,"TERMINAL",0,0,"Step 827, loss: 2.5129499435424805, step time: 17.418384552001953ms\r\n",,terminal_output +10907,12731880,"train_dynamics.py",2689,7,"outputs",python,selection_mouse +10908,12731908,"TERMINAL",0,0,"Step 828, loss: 2.507504463195801, step time: 17.392396926879883ms\r\n",,terminal_output +10909,12731966,"TERMINAL",0,0,"Step 829, loss: 2.696652412414551, step time: 17.914772033691406ms\r\n",,terminal_output +10910,12732058,"TERMINAL",0,0,"Step 830, loss: 2.538978099822998, step time: 17.35210418701172ms\r\n",,terminal_output +10911,12732166,"TERMINAL",0,0,"Step 831, loss: 2.4995288848876953, step time: 17.415285110473633ms\r\nStep 832, loss: 2.5038251876831055, step time: 17.610549926757812ms\r\n",,terminal_output +10912,12732236,"TERMINAL",0,0,"Step 833, loss: 2.526118278503418, step time: 17.51565933227539ms\r\n",,terminal_output +10913,12732297,"TERMINAL",0,0,"Step 834, loss: 2.5136754512786865, step time: 17.133235931396484ms\r\n",,terminal_output +10914,12732361,"TERMINAL",0,0,"Step 835, loss: 2.4974617958068848, step time: 17.804861068725586ms\r\n",,terminal_output +10915,12732413,"train_dynamics.py",2658,0,"",python,selection_mouse +10916,12732437,"TERMINAL",0,0,"Step 836, loss: 2.5125420093536377, step time: 17.279624938964844ms\r\n",,terminal_output +10917,12732483,"TERMINAL",0,0,"Step 837, loss: 2.5014870166778564, step time: 17.403602600097656ms\r\n",,terminal_output +10918,12732535,"TERMINAL",0,0,"Step 838, loss: 2.632115364074707, step time: 17.657995223999023ms\r\n",,terminal_output +10919,12732579,"train_dynamics.py",2641,21,"masked_token_accuracy",python,selection_mouse +10920,12732649,"TERMINAL",0,0,"Step 839, loss: 2.5568859577178955, step time: 17.317533493041992ms\r\nStep 840, loss: 2.483748435974121, step time: 17.21358299255371ms\r\n",,terminal_output +10921,12732725,"TERMINAL",0,0,"Step 841, loss: 2.474390745162964, step time: 17.7004337310791ms\r\n",,terminal_output +10922,12732834,"TERMINAL",0,0,"Step 842, loss: 2.7065932750701904, step time: 17.152786254882812ms\r\nStep 843, loss: 2.4914450645446777, step time: 17.241477966308594ms\r\n",,terminal_output +10923,12732902,"TERMINAL",0,0,"Step 844, loss: 2.5500988960266113, step time: 17.774581909179688ms\r\n",,terminal_output +10924,12732958,"TERMINAL",0,0,"Step 845, loss: 2.6442604064941406, step time: 17.817974090576172ms\r\n",,terminal_output +10925,12733095,"TERMINAL",0,0,"Step 846, loss: 2.582460880279541, step time: 17.638683319091797ms\r\nStep 847, loss: 2.4887948036193848, step time: 17.979860305786133ms\r\n",,terminal_output +10926,12733131,"train_dynamics.py",2620,0,"",python,selection_mouse +10927,12733160,"TERMINAL",0,0,"Step 848, loss: 2.4919440746307373, step time: 17.378568649291992ms\r\n",,terminal_output +10928,12733236,"TERMINAL",0,0,"Step 849, loss: 2.522386312484741, step time: 18.523454666137695ms\r\n",,terminal_output +10929,12733293,"train_dynamics.py",2605,18,"cross_entropy_loss",python,selection_mouse +10930,12733317,"TERMINAL",0,0,"Step 850, loss: 2.4655795097351074, step time: 17.756938934326172ms\r\n",,terminal_output +10931,12733361,"TERMINAL",0,0,"Step 851, loss: 2.54685640335083, step time: 17.164230346679688ms\r\n",,terminal_output +10932,12733414,"TERMINAL",0,0,"Step 852, loss: 2.8935022354125977, step time: 17.09890365600586ms\r\n",,terminal_output +10933,12733534,"TERMINAL",0,0,"Step 853, loss: 2.4763612747192383, step time: 17.693042755126953ms\r\nStep 854, loss: 2.476349353790283, step time: 16.957759857177734ms\r\n",,terminal_output +10934,12733603,"TERMINAL",0,0,"Step 855, loss: 2.621344804763794, step time: 17.0896053314209ms\r\n",,terminal_output +10935,12733656,"TERMINAL",0,0,"Step 856, loss: 2.4745705127716064, step time: 17.506837844848633ms\r\n",,terminal_output +10936,12733722,"TERMINAL",0,0,"Step 857, loss: 2.506976366043091, step time: 17.194747924804688ms\r\n",,terminal_output +10937,12733785,"TERMINAL",0,0,"Step 858, loss: 2.469454765319824, step time: 17.035245895385742ms\r\n",,terminal_output +10938,12733856,"TERMINAL",0,0,"Step 859, loss: 2.4730591773986816, step time: 17.55237579345703ms\r\n",,terminal_output +10939,12733910,"TERMINAL",0,0,"Step 860, loss: 2.458489179611206, step time: 17.084836959838867ms\r\n",,terminal_output +10940,12734004,"TERMINAL",0,0,"Step 861, loss: 2.453538656234741, step time: 17.07911491394043ms\r\n",,terminal_output +10941,12734057,"TERMINAL",0,0,"Step 862, loss: 2.449434757232666, step time: 17.49587059020996ms\r\n",,terminal_output +10942,12734110,"TERMINAL",0,0,"Step 863, loss: 2.537539482116699, step time: 17.139196395874023ms\r\n",,terminal_output +10943,12734218,"TERMINAL",0,0,"Step 864, loss: 2.4579436779022217, step time: 17.00115203857422ms\r\nStep 865, loss: 2.4683852195739746, step time: 17.493009567260742ms\r\n",,terminal_output +10944,12734314,"TERMINAL",0,0,"Step 866, loss: 2.583247184753418, step time: 16.961336135864258ms\r\n",,terminal_output +10945,12734424,"TERMINAL",0,0,"Step 867, loss: 2.479597806930542, step time: 17.054319381713867ms\r\nStep 868, loss: 2.513808250427246, step time: 17.35997200012207ms\r\n",,terminal_output +10946,12734532,"TERMINAL",0,0,"Step 869, loss: 2.5305676460266113, step time: 17.059326171875ms\r\nStep 870, loss: 2.462038516998291, step time: 16.93105697631836ms\r\n",,terminal_output +10947,12734596,"TERMINAL",0,0,"Step 871, loss: 2.514760971069336, step time: 17.479419708251953ms\r\n",,terminal_output +10948,12734658,"TERMINAL",0,0,"Step 872, loss: 2.447812557220459, step time: 18.723726272583008ms\r\n",,terminal_output +10949,12734722,"TERMINAL",0,0,"Step 873, loss: 2.453623056411743, step time: 17.603158950805664ms\r\n",,terminal_output +10950,12734784,"TERMINAL",0,0,"Step 874, loss: 2.471684455871582, step time: 17.490863800048828ms\r\n",,terminal_output +10951,12734847,"TERMINAL",0,0,"Step 875, loss: 2.4594931602478027, step time: 17.050743103027344ms\r\n",,terminal_output +10952,12734910,"TERMINAL",0,0,"Step 876, loss: 2.4914519786834717, step time: 16.900062561035156ms\r\n",,terminal_output +10953,12734973,"TERMINAL",0,0,"Step 877, loss: 2.460130214691162, step time: 18.366098403930664ms\r\n",,terminal_output +10954,12735037,"TERMINAL",0,0,"Step 878, loss: 2.445840358734131, step time: 17.605304718017578ms\r\n",,terminal_output +10955,12735096,"TERMINAL",0,0,"Step 879, loss: 2.435840368270874, step time: 17.27747917175293ms\r\n",,terminal_output +10956,12735159,"TERMINAL",0,0,"Step 880, loss: 2.507463216781616, step time: 17.44365692138672ms\r\n",,terminal_output +10957,12735223,"TERMINAL",0,0,"Step 881, loss: 2.4150338172912598, step time: 17.116069793701172ms\r\n",,terminal_output +10958,12735365,"TERMINAL",0,0,"Step 882, loss: 2.428614854812622, step time: 17.02141761779785ms\r\nStep 883, loss: 2.442134141921997, step time: 17.47417449951172ms\r\n",,terminal_output +10959,12735428,"TERMINAL",0,0,"Step 884, loss: 2.4368960857391357, step time: 16.884565353393555ms\r\n",,terminal_output +10960,12735494,"TERMINAL",0,0,"Step 885, loss: 2.4226839542388916, step time: 17.081022262573242ms\r\n",,terminal_output +10961,12735554,"TERMINAL",0,0,"Step 886, loss: 2.4311470985412598, step time: 17.519712448120117ms\r\n",,terminal_output +10962,12735619,"TERMINAL",0,0,"Step 887, loss: 2.4173312187194824, step time: 17.084836959838867ms\r\n",,terminal_output +10963,12735680,"TERMINAL",0,0,"Step 888, loss: 2.4297406673431396, step time: 17.03357696533203ms\r\n",,terminal_output +10964,12735785,"TERMINAL",0,0,"Step 889, loss: 2.8524303436279297, step time: 17.547130584716797ms\r\nStep 890, loss: 2.466686248779297, step time: 17.145872116088867ms\r\n",,terminal_output +10965,12735846,"TERMINAL",0,0,"Step 891, loss: 3.339053153991699, step time: 17.42267608642578ms\r\n",,terminal_output +10966,12735911,"TERMINAL",0,0,"Step 892, loss: 2.4046473503112793, step time: 17.674684524536133ms\r\n",,terminal_output +10967,12735974,"TERMINAL",0,0,"Step 893, loss: 2.4182236194610596, step time: 17.226219177246094ms\r\n",,terminal_output +10968,12736036,"TERMINAL",0,0,"Step 894, loss: 2.419630289077759, step time: 17.11750030517578ms\r\n",,terminal_output +10969,12736097,"TERMINAL",0,0,"Step 895, loss: 2.668215036392212, step time: 17.728567123413086ms\r\n",,terminal_output +10970,12736195,"TERMINAL",0,0,"Step 896, loss: 2.413515567779541, step time: 17.177343368530273ms\r\n",,terminal_output +10971,12736302,"TERMINAL",0,0,"Step 897, loss: 2.5805087089538574, step time: 17.17996597290039ms\r\nStep 898, loss: 2.4114928245544434, step time: 17.54140853881836ms\r\n",,terminal_output +10972,12736358,"TERMINAL",0,0,"Step 899, loss: 2.441099166870117, step time: 17.177581787109375ms\r\n",,terminal_output +10973,12736448,"TERMINAL",0,0,"Step 900, loss: 2.413583517074585, step time: 17.19379425048828ms\r\n",,terminal_output +10974,12736502,"TERMINAL",0,0,"Step 901, loss: 2.4282562732696533, step time: 17.714738845825195ms\r\n",,terminal_output +10975,12736607,"TERMINAL",0,0,"Step 902, loss: 2.431511163711548, step time: 17.13705062866211ms\r\nStep 903, loss: 2.388143301010132, step time: 17.26698875427246ms\r\n",,terminal_output +10976,12736667,"TERMINAL",0,0,"Step 904, loss: 2.5393569469451904, step time: 17.87734031677246ms\r\n",,terminal_output +10977,12736729,"TERMINAL",0,0,"Step 905, loss: 2.4258835315704346, step time: 17.14944839477539ms\r\n",,terminal_output +10978,12736799,"TERMINAL",0,0,"Step 906, loss: 2.410358428955078, step time: 17.165184020996094ms\r\n",,terminal_output +10979,12736866,"TERMINAL",0,0,"Step 907, loss: 2.413785219192505, step time: 19.600629806518555ms\r\n",,terminal_output +10980,12736905,"TERMINAL",0,0,"Step 908, loss: 2.5079479217529297, step time: 17.431974411010742ms\r\n",,terminal_output +10981,12736998,"TERMINAL",0,0,"Step 909, loss: 2.600118398666382, step time: 17.249345779418945ms\r\n",,terminal_output +10982,12737103,"TERMINAL",0,0,"Step 910, loss: 2.3960297107696533, step time: 17.615318298339844ms\r\nStep 911, loss: 2.4043962955474854, step time: 17.222881317138672ms\r\n",,terminal_output +10983,12737162,"TERMINAL",0,0,"Step 912, loss: 2.4218947887420654, step time: 17.158031463623047ms\r\n",,terminal_output +10984,12737226,"TERMINAL",0,0,"Step 913, loss: 2.394655466079712, step time: 17.79317855834961ms\r\n",,terminal_output +10985,12737294,"TERMINAL",0,0,"Step 914, loss: 2.4185678958892822, step time: 17.09127426147461ms\r\n",,terminal_output +10986,12737381,"TERMINAL",0,0,"Step 915, loss: 2.3880550861358643, step time: 17.223834991455078ms\r\n",,terminal_output +10987,12737438,"TERMINAL",0,0,"Step 916, loss: 2.4276914596557617, step time: 17.529010772705078ms\r\n",,terminal_output +10988,12737542,"TERMINAL",0,0,"Step 917, loss: 2.619856834411621, step time: 17.272233963012695ms\r\nStep 918, loss: 2.457625389099121, step time: 17.171859741210938ms\r\n",,terminal_output +10989,12737654,"TERMINAL",0,0,"Step 919, loss: 2.984959602355957, step time: 27.02808380126953ms\r\n",,terminal_output +10990,12737918,"TERMINAL",0,0,"Step 920, loss: 2.3982532024383545, step time: 306.0276508331299ms\r\n",,terminal_output +10991,12738026,"TERMINAL",0,0,"Step 921, loss: 2.4909212589263916, step time: 24.22785758972168ms\r\n",,terminal_output +10992,12738079,"TERMINAL",0,0,"Step 922, loss: 2.457193613052368, step time: 19.359111785888672ms\r\n",,terminal_output +10993,12738185,"TERMINAL",0,0,"Step 923, loss: 2.3967835903167725, step time: 18.125534057617188ms\r\nStep 924, loss: 2.393186330795288, step time: 17.29893684387207ms\r\n",,terminal_output +10994,12738252,"TERMINAL",0,0,"Step 925, loss: 2.385629892349243, step time: 17.22407341003418ms\r\n",,terminal_output +10995,12738315,"TERMINAL",0,0,"Step 926, loss: 2.4104366302490234, step time: 17.340660095214844ms\r\n",,terminal_output +10996,12738378,"TERMINAL",0,0,"Step 927, loss: 2.409785747528076, step time: 17.107248306274414ms\r\n",,terminal_output +10997,12738442,"TERMINAL",0,0,"Step 928, loss: 2.381120204925537, step time: 17.09294319152832ms\r\n",,terminal_output +10998,12738505,"TERMINAL",0,0,"Step 929, loss: 2.4090070724487305, step time: 18.38231086730957ms\r\n",,terminal_output +10999,12738569,"TERMINAL",0,0,"Step 930, loss: 2.482074499130249, step time: 17.21334457397461ms\r\n",,terminal_output +11000,12738683,"TERMINAL",0,0,"Step 931, loss: 2.5439701080322266, step time: 17.29583740234375ms\r\nStep 932, loss: 2.4128637313842773, step time: 17.552614212036133ms\r\n",,terminal_output +11001,12738748,"TERMINAL",0,0,"Step 933, loss: 3.235999345779419, step time: 17.352819442749023ms\r\n",,terminal_output +11002,12738829,"TERMINAL",0,0,"Step 934, loss: 2.390050172805786, step time: 17.076492309570312ms\r\n",,terminal_output +11003,12738872,"TERMINAL",0,0,"Step 935, loss: 2.4051334857940674, step time: 17.55213737487793ms\r\n",,terminal_output +11004,12738965,"TERMINAL",0,0,"Step 936, loss: 2.4580788612365723, step time: 17.174959182739258ms\r\n",,terminal_output +11005,12739017,"TERMINAL",0,0,"Step 937, loss: 2.377117395401001, step time: 17.286062240600586ms\r\n",,terminal_output +11006,12739122,"TERMINAL",0,0,"Step 938, loss: 2.3764379024505615, step time: 17.647743225097656ms\r\nStep 939, loss: 2.337657928466797, step time: 17.251014709472656ms\r\n",,terminal_output +11007,12739218,"TERMINAL",0,0,"Step 940, loss: 2.378068447113037, step time: 17.142057418823242ms\r\n",,terminal_output +11008,12739269,"TERMINAL",0,0,"Step 941, loss: 2.736077308654785, step time: 17.609357833862305ms\r\n",,terminal_output +11009,12739378,"TERMINAL",0,0,"Step 942, loss: 2.3727474212646484, step time: 17.171859741210938ms\r\nStep 943, loss: 2.354219436645508, step time: 17.168045043945312ms\r\n",,terminal_output +11010,12739440,"TERMINAL",0,0,"Step 944, loss: 2.395423412322998, step time: 17.436504364013672ms\r\n",,terminal_output +11011,12739502,"TERMINAL",0,0,"Step 945, loss: 2.545206069946289, step time: 17.325162887573242ms\r\n",,terminal_output +11012,12739565,"TERMINAL",0,0,"Step 946, loss: 2.556637763977051, step time: 17.145156860351562ms\r\n",,terminal_output +11013,12739629,"TERMINAL",0,0,"Step 947, loss: 2.44284987449646, step time: 17.522335052490234ms\r\n",,terminal_output +11014,12739692,"TERMINAL",0,0,"Step 948, loss: 2.3653130531311035, step time: 17.20714569091797ms\r\n",,terminal_output +11015,12739816,"TERMINAL",0,0,"Step 949, loss: 2.3629841804504395, step time: 17.25172996520996ms\r\nStep 950, loss: 2.403949737548828, step time: 17.46344566345215ms\r\n",,terminal_output +11016,12739869,"TERMINAL",0,0,"Step 951, loss: 2.5996973514556885, step time: 17.254352569580078ms\r\n",,terminal_output +11017,12739935,"TERMINAL",0,0,"Step 952, loss: 2.375453233718872, step time: 22.251605987548828ms\r\n",,terminal_output +11018,12739996,"TERMINAL",0,0,"Step 953, loss: 2.4993832111358643, step time: 20.586490631103516ms\r\n",,terminal_output +11019,12740062,"TERMINAL",0,0,"Step 954, loss: 2.3718578815460205, step time: 17.786502838134766ms\r\n",,terminal_output +11020,12740127,"TERMINAL",0,0,"Step 955, loss: 2.3573546409606934, step time: 17.431020736694336ms\r\n",,terminal_output +11021,12740192,"TERMINAL",0,0,"Step 956, loss: 2.6006109714508057, step time: 17.52185821533203ms\r\n",,terminal_output +11022,12740254,"TERMINAL",0,0,"Step 957, loss: 2.3900513648986816, step time: 17.302513122558594ms\r\n",,terminal_output +11023,12740318,"TERMINAL",0,0,"Step 958, loss: 2.38326358795166, step time: 17.14634895324707ms\r\n",,terminal_output +11024,12740387,"TERMINAL",0,0,"Step 959, loss: 2.35119891166687, step time: 17.533063888549805ms\r\n",,terminal_output +11025,12740438,"TERMINAL",0,0,"Step 960, loss: 2.4407715797424316, step time: 17.26388931274414ms\r\n",,terminal_output +11026,12740532,"TERMINAL",0,0,"Step 961, loss: 2.3521196842193604, step time: 17.162322998046875ms\r\n",,terminal_output +11027,12740637,"TERMINAL",0,0,"Step 962, loss: 2.363922119140625, step time: 17.34781265258789ms\r\nStep 963, loss: 2.3644890785217285, step time: 17.302274703979492ms\r\n",,terminal_output +11028,12740742,"TERMINAL",0,0,"Step 964, loss: 2.3483686447143555, step time: 17.1356201171875ms\r\nStep 965, loss: 2.35815167427063, step time: 17.509937286376953ms\r\n",,terminal_output +11029,12740883,"TERMINAL",0,0,"Step 966, loss: 2.555351495742798, step time: 17.203092575073242ms\r\nStep 967, loss: 2.386258363723755, step time: 17.2271728515625ms\r\n",,terminal_output +11030,12740980,"TERMINAL",0,0,"Step 968, loss: 2.340963125228882, step time: 17.44222640991211ms\r\n",,terminal_output +11031,12741027,"TERMINAL",0,0,"Step 969, loss: 2.5462183952331543, step time: 17.340660095214844ms\r\n",,terminal_output +11032,12741078,"TERMINAL",0,0,"Step 970, loss: 2.4498450756073, step time: 17.19355583190918ms\r\n",,terminal_output +11033,12741184,"TERMINAL",0,0,"Step 971, loss: 2.3594863414764404, step time: 17.644643783569336ms\r\nStep 972, loss: 2.367307424545288, step time: 17.1816349029541ms\r\n",,terminal_output +11034,12741246,"TERMINAL",0,0,"Step 973, loss: 2.369511365890503, step time: 17.48347282409668ms\r\n",,terminal_output +11035,12741306,"TERMINAL",0,0,"Step 974, loss: 2.3286309242248535, step time: 17.666339874267578ms\r\n",,terminal_output +11036,12741367,"TERMINAL",0,0,"Step 975, loss: 2.3463923931121826, step time: 17.418861389160156ms\r\n",,terminal_output +11037,12741432,"TERMINAL",0,0,"Step 976, loss: 2.4603497982025146, step time: 17.32015609741211ms\r\n",,terminal_output +11038,12741524,"TERMINAL",0,0,"Step 977, loss: 2.351219415664673, step time: 17.64512062072754ms\r\n",,terminal_output +11039,12741577,"TERMINAL",0,0,"Step 978, loss: 2.3321642875671387, step time: 17.20881462097168ms\r\n",,terminal_output +11040,12741724,"TERMINAL",0,0,"Step 979, loss: 2.542832612991333, step time: 17.3799991607666ms\r\nStep 980, loss: 2.3972065448760986, step time: 25.058984756469727ms\r\n",,terminal_output +11041,12741777,"TERMINAL",0,0,"Step 981, loss: 2.3433420658111572, step time: 35.181283950805664ms\r\n",,terminal_output +11042,12741891,"TERMINAL",0,0,"Step 982, loss: 2.339155673980713, step time: 23.0104923248291ms\r\nStep 983, loss: 2.324523687362671, step time: 24.90973472595215ms\r\n",,terminal_output +11043,12741958,"TERMINAL",0,0,"Step 984, loss: 2.4079599380493164, step time: 24.825096130371094ms\r\n",,terminal_output +11044,12742025,"TERMINAL",0,0,"Step 985, loss: 2.34381365776062, step time: 25.16007423400879ms\r\n",,terminal_output +11045,12742092,"TERMINAL",0,0,"Step 986, loss: 2.3581085205078125, step time: 24.42479133605957ms\r\n",,terminal_output +11046,12742158,"TERMINAL",0,0,"Step 987, loss: 2.5354299545288086, step time: 25.0704288482666ms\r\n",,terminal_output +11047,12742222,"TERMINAL",0,0,"Step 988, loss: 2.447924852371216, step time: 23.625612258911133ms\r\n",,terminal_output +11048,12742286,"TERMINAL",0,0,"Step 989, loss: 2.338510274887085, step time: 19.598722457885742ms\r\n",,terminal_output +11049,12742350,"TERMINAL",0,0,"Step 990, loss: 2.9838078022003174, step time: 17.81463623046875ms\r\n",,terminal_output +11050,12742417,"TERMINAL",0,0,"Step 991, loss: 2.33132266998291, step time: 17.70162582397461ms\r\n",,terminal_output +11051,12742478,"TERMINAL",0,0,"Step 992, loss: 2.3695034980773926, step time: 17.609596252441406ms\r\n",,terminal_output +11052,12742539,"TERMINAL",0,0,"Step 993, loss: 2.318692922592163, step time: 17.53520965576172ms\r\n",,terminal_output +11053,12742600,"TERMINAL",0,0,"Step 994, loss: 2.334357261657715, step time: 17.349958419799805ms\r\n",,terminal_output +11054,12742661,"TERMINAL",0,0,"Step 995, loss: 2.3185665607452393, step time: 17.642974853515625ms\r\n",,terminal_output +11055,12742721,"TERMINAL",0,0,"Step 996, loss: 2.326364517211914, step time: 17.204999923706055ms\r\n",,terminal_output +11056,12742782,"TERMINAL",0,0,"Step 997, loss: 2.3192479610443115, step time: 17.345190048217773ms\r\n",,terminal_output +11057,12742845,"TERMINAL",0,0,"Step 998, loss: 2.3253653049468994, step time: 17.690420150756836ms\r\n",,terminal_output +11058,12742951,"TERMINAL",0,0,"Step 999, loss: 2.3335628509521484, step time: 17.363786697387695ms\r\n",,terminal_output +11059,12745783,"TERMINAL",0,0,"Step 1000, loss: 2.3381569385528564, step time: 26.880741119384766ms\r\n",,terminal_output +11060,12745868,"TERMINAL",0,0,"Step 1001, loss: 2.3047256469726562, step time: 25.000810623168945ms\r\n",,terminal_output +11061,12746002,"TERMINAL",0,0,"Step 1002, loss: 2.3138742446899414, step time: 20.71404457092285ms\r\nStep 1003, loss: 2.3145294189453125, step time: 20.40839195251465ms\r\n",,terminal_output +11062,12746067,"TERMINAL",0,0,"Step 1004, loss: 2.403388738632202, step time: 19.120216369628906ms\r\n",,terminal_output +11063,12746130,"TERMINAL",0,0,"Step 1005, loss: 2.3353748321533203, step time: 19.274234771728516ms\r\n",,terminal_output +11064,12746242,"TERMINAL",0,0,"Step 1006, loss: 2.319547653198242, step time: 19.268035888671875ms\r\nStep 1007, loss: 2.287912607192993, step time: 19.35434341430664ms\r\n",,terminal_output +11065,12746306,"TERMINAL",0,0,"Step 1008, loss: 2.2983734607696533, step time: 18.76044273376465ms\r\n",,terminal_output +11066,12746370,"TERMINAL",0,0,"Step 1009, loss: 2.2897887229919434, step time: 19.386768341064453ms\r\n",,terminal_output +11067,12746464,"TERMINAL",0,0,"Step 1010, loss: 2.299206495285034, step time: 18.868684768676758ms\r\n",,terminal_output +11068,12746574,"TERMINAL",0,0,"Step 1011, loss: 2.359449863433838, step time: 18.958091735839844ms\r\nStep 1012, loss: 2.2900636196136475, step time: 19.064664840698242ms\r\n",,terminal_output +11069,12746643,"TERMINAL",0,0,"Step 1013, loss: 2.276996374130249, step time: 18.94354820251465ms\r\n",,terminal_output +11070,12746704,"TERMINAL",0,0,"Step 1014, loss: 2.2975847721099854, step time: 18.774747848510742ms\r\n",,terminal_output +11071,12746768,"TERMINAL",0,0,"Step 1015, loss: 2.2869741916656494, step time: 19.33908462524414ms\r\n",,terminal_output +11072,12746830,"TERMINAL",0,0,"Step 1016, loss: 2.272282600402832, step time: 29.45113182067871ms\r\n",,terminal_output +11073,12746954,"TERMINAL",0,0,"Step 1017, loss: 2.288295030593872, step time: 19.70529556274414ms\r\nStep 1018, loss: 2.358670711517334, step time: 19.181013107299805ms\r\n",,terminal_output +11074,12747055,"TERMINAL",0,0,"Step 1019, loss: 2.4337480068206787, step time: 19.00959014892578ms\r\n",,terminal_output +11075,12747115,"TERMINAL",0,0,"Step 1020, loss: 2.7358336448669434, step time: 23.079872131347656ms\r\n",,terminal_output +11076,12747176,"TERMINAL",0,0,"Step 1021, loss: 2.3042054176330566, step time: 19.214391708374023ms\r\n",,terminal_output +11077,12747236,"TERMINAL",0,0,"Step 1022, loss: 2.4069502353668213, step time: 18.622398376464844ms\r\n",,terminal_output +11078,12747297,"TERMINAL",0,0,"Step 1023, loss: 2.570761203765869, step time: 18.886566162109375ms\r\n",,terminal_output +11079,12747360,"TERMINAL",0,0,"Step 1024, loss: 2.309499740600586, step time: 18.99862289428711ms\r\n",,terminal_output +11080,12747419,"TERMINAL",0,0,"Step 1025, loss: 2.2708184719085693, step time: 18.99099349975586ms\r\n",,terminal_output +11081,12747511,"TERMINAL",0,0,"Step 1026, loss: 2.2752580642700195, step time: 18.69821548461914ms\r\n",,terminal_output +11082,12747620,"TERMINAL",0,0,"Step 1027, loss: 2.2914187908172607, step time: 19.149065017700195ms\r\nStep 1028, loss: 2.466712236404419, step time: 18.695592880249023ms\r\n",,terminal_output +11083,12747682,"TERMINAL",0,0,"Step 1029, loss: 2.284726858139038, step time: 18.965959548950195ms\r\n",,terminal_output +11084,12747744,"TERMINAL",0,0,"Step 1030, loss: 2.4563021659851074, step time: 19.03843879699707ms\r\n",,terminal_output +11085,12747814,"TERMINAL",0,0,"Step 1031, loss: 2.281183958053589, step time: 18.952608108520508ms\r\n",,terminal_output +11086,12747867,"TERMINAL",0,0,"Step 1032, loss: 2.2722959518432617, step time: 18.688201904296875ms\r\n",,terminal_output +11087,12747939,"TERMINAL",0,0,"Step 1033, loss: 2.3013408184051514, step time: 19.016027450561523ms\r\n",,terminal_output +11088,12748003,"TERMINAL",0,0,"Step 1034, loss: 2.273477554321289, step time: 18.709182739257812ms\r\n",,terminal_output +11089,12748064,"TERMINAL",0,0,"Step 1035, loss: 2.7950708866119385, step time: 18.92828941345215ms\r\n",,terminal_output +11090,12748133,"TERMINAL",0,0,"Step 1036, loss: 2.2601137161254883, step time: 18.9211368560791ms\r\n",,terminal_output +11091,12748193,"TERMINAL",0,0,"Step 1037, loss: 2.446054458618164, step time: 18.849611282348633ms\r\n",,terminal_output +11092,12748289,"TERMINAL",0,0,"Step 1038, loss: 2.2644176483154297, step time: 18.605470657348633ms\r\n",,terminal_output +11093,12748343,"TERMINAL",0,0,"Step 1039, loss: 2.266934394836426, step time: 19.240617752075195ms\r\n",,terminal_output +11094,12748409,"TERMINAL",0,0,"Step 1040, loss: 2.261916399002075, step time: 18.686294555664062ms\r\n",,terminal_output +11095,12748461,"TERMINAL",0,0,"Step 1041, loss: 2.2570157051086426, step time: 18.860340118408203ms\r\n",,terminal_output +11096,12748569,"TERMINAL",0,0,"Step 1042, loss: 2.262580633163452, step time: 19.042015075683594ms\r\nStep 1043, loss: 2.2905356884002686, step time: 18.912315368652344ms\r\n",,terminal_output +11097,12748635,"TERMINAL",0,0,"Step 1044, loss: 2.2795186042785645, step time: 18.644332885742188ms\r\n",,terminal_output +11098,12748689,"TERMINAL",0,0,"Step 1045, loss: 2.2706379890441895, step time: 18.990039825439453ms\r\n",,terminal_output +11099,12748784,"TERMINAL",0,0,"Step 1046, loss: 2.278201103210449, step time: 18.62931251525879ms\r\n",,terminal_output +11100,12748847,"TERMINAL",0,0,"Step 1047, loss: 2.253450393676758, step time: 18.764019012451172ms\r\n",,terminal_output +11101,12748909,"TERMINAL",0,0,"Step 1048, loss: 2.247875928878784, step time: 18.87059211730957ms\r\n",,terminal_output +11102,12748972,"TERMINAL",0,0,"Step 1049, loss: 2.281886339187622, step time: 18.816709518432617ms\r\n",,terminal_output +11103,12749035,"TERMINAL",0,0,"Step 1050, loss: 2.252222776412964, step time: 18.54681968688965ms\r\n",,terminal_output +11104,12749097,"TERMINAL",0,0,"Step 1051, loss: 2.322404384613037, step time: 19.09661293029785ms\r\n",,terminal_output +11105,12749202,"TERMINAL",0,0,"Step 1052, loss: 2.2465860843658447, step time: 18.476486206054688ms\r\nStep 1053, loss: 2.2706000804901123, step time: 18.614768981933594ms\r\n",,terminal_output +11106,12749262,"TERMINAL",0,0,"Step 1054, loss: 2.6322267055511475, step time: 18.707752227783203ms\r\n",,terminal_output +11107,12749323,"TERMINAL",0,0,"Step 1055, loss: 2.366704225540161, step time: 18.661975860595703ms\r\n",,terminal_output +11108,12749387,"TERMINAL",0,0,"Step 1056, loss: 2.3521721363067627, step time: 18.504619598388672ms\r\n",,terminal_output +11109,12749448,"TERMINAL",0,0,"Step 1057, loss: 2.2760231494903564, step time: 18.851518630981445ms\r\n",,terminal_output +11110,12749509,"TERMINAL",0,0,"Step 1058, loss: 2.277864694595337, step time: 18.584251403808594ms\r\n",,terminal_output +11111,12749611,"TERMINAL",0,0,"Step 1059, loss: 2.3728575706481934, step time: 18.503904342651367ms\r\n",,terminal_output +11112,12749669,"TERMINAL",0,0,"Step 1060, loss: 2.552882194519043, step time: 18.874645233154297ms\r\n",,terminal_output +11113,12749731,"TERMINAL",0,0,"Step 1061, loss: 2.2653110027313232, step time: 18.757104873657227ms\r\n",,terminal_output +11114,12749807,"TERMINAL",0,0,"Step 1062, loss: 2.319842576980591, step time: 18.689393997192383ms\r\n",,terminal_output +11115,12749890,"TERMINAL",0,0,"Step 1063, loss: 2.2505762577056885, step time: 19.040346145629883ms\r\nStep 1064, loss: 2.2817609310150146, step time: 18.740177154541016ms\r\n",,terminal_output +11116,12749995,"TERMINAL",0,0,"Step 1065, loss: 2.2555253505706787, step time: 18.57781410217285ms\r\n",,terminal_output +11117,12750057,"TERMINAL",0,0,"Step 1066, loss: 2.2267262935638428, step time: 19.237995147705078ms\r\n",,terminal_output +11118,12750169,"TERMINAL",0,0,"Step 1067, loss: 2.249286413192749, step time: 19.579410552978516ms\r\nStep 1068, loss: 2.2410614490509033, step time: 18.93162727355957ms\r\n",,terminal_output +11119,12750232,"TERMINAL",0,0,"Step 1069, loss: 2.242892026901245, step time: 19.022464752197266ms\r\n",,terminal_output +11120,12750338,"TERMINAL",0,0,"Step 1070, loss: 2.618776798248291, step time: 18.57161521911621ms\r\nStep 1071, loss: 2.2326722145080566, step time: 18.652915954589844ms\r\n",,terminal_output +11121,12750432,"TERMINAL",0,0,"Step 1072, loss: 2.29561710357666, step time: 18.808603286743164ms\r\n",,terminal_output +11122,12750483,"TERMINAL",0,0,"Step 1073, loss: 2.237043619155884, step time: 21.149635314941406ms\r\n",,terminal_output +11123,12750575,"TERMINAL",0,0,"Step 1074, loss: 2.2401585578918457, step time: 18.67365837097168ms\r\n",,terminal_output +11124,12750626,"TERMINAL",0,0,"Step 1075, loss: 2.2245066165924072, step time: 18.89801025390625ms\r\n",,terminal_output +11125,12750729,"TERMINAL",0,0,"Step 1076, loss: 2.2226099967956543, step time: 18.477439880371094ms\r\nStep 1077, loss: 2.2274959087371826, step time: 18.635034561157227ms\r\n",,terminal_output +11126,12750822,"TERMINAL",0,0,"Step 1078, loss: 2.347820281982422, step time: 18.78523826599121ms\r\n",,terminal_output +11127,12750872,"TERMINAL",0,0,"Step 1079, loss: 2.2666022777557373, step time: 18.58806610107422ms\r\n",,terminal_output +11128,12750979,"TERMINAL",0,0,"Step 1080, loss: 2.223522186279297, step time: 18.70417594909668ms\r\nStep 1081, loss: 2.273740291595459, step time: 19.04296875ms\r\n",,terminal_output +11129,12751106,"TERMINAL",0,0,"Step 1082, loss: 2.2328941822052, step time: 18.731117248535156ms\r\nStep 1083, loss: 2.221238136291504, step time: 18.811464309692383ms\r\n",,terminal_output +11130,12751201,"TERMINAL",0,0,"Step 1084, loss: 2.5139172077178955, step time: 18.92995834350586ms\r\n",,terminal_output +11131,12751253,"TERMINAL",0,0,"Step 1085, loss: 2.233656167984009, step time: 18.783092498779297ms\r\n",,terminal_output +11132,12751357,"TERMINAL",0,0,"Step 1086, loss: 2.208498001098633, step time: 18.75615119934082ms\r\nStep 1087, loss: 2.760406017303467, step time: 19.002437591552734ms\r\n",,terminal_output +11133,12751418,"TERMINAL",0,0,"Step 1088, loss: 2.243770122528076, step time: 18.558263778686523ms\r\n",,terminal_output +11134,12751483,"TERMINAL",0,0,"Step 1089, loss: 2.21783709526062, step time: 18.767356872558594ms\r\n",,terminal_output +11135,12751546,"TERMINAL",0,0,"Step 1090, loss: 2.2309885025024414, step time: 18.887758255004883ms\r\n",,terminal_output +11136,12751610,"TERMINAL",0,0,"Step 1091, loss: 2.2202556133270264, step time: 18.722057342529297ms\r\n",,terminal_output +11137,12751683,"TERMINAL",0,0,"Step 1092, loss: 2.208395004272461, step time: 18.665313720703125ms\r\n",,terminal_output +11138,12751742,"TERMINAL",0,0,"Step 1093, loss: 2.211848020553589, step time: 19.57559585571289ms\r\n",,terminal_output +11139,12751835,"TERMINAL",0,0,"Step 1094, loss: 2.972702741622925, step time: 18.767595291137695ms\r\n",,terminal_output +11140,12751891,"TERMINAL",0,0,"Step 1095, loss: 2.217564344406128, step time: 18.806934356689453ms\r\n",,terminal_output +11141,12751996,"TERMINAL",0,0,"Step 1096, loss: 2.2188720703125, step time: 18.997907638549805ms\r\nStep 1097, loss: 2.207148790359497, step time: 18.840789794921875ms\r\n",,terminal_output +11142,12752059,"TERMINAL",0,0,"Step 1098, loss: 2.4226832389831543, step time: 18.60499382019043ms\r\n",,terminal_output +11143,12752122,"TERMINAL",0,0,"Step 1099, loss: 2.1909422874450684, step time: 19.14834976196289ms\r\n",,terminal_output +11144,12752184,"TERMINAL",0,0,"Step 1100, loss: 2.200113534927368, step time: 18.73636245727539ms\r\n",,terminal_output +11145,12752245,"TERMINAL",0,0,"Step 1101, loss: 2.3002259731292725, step time: 18.761634826660156ms\r\n",,terminal_output +11146,12752319,"TERMINAL",0,0,"Step 1102, loss: 2.243957996368408, step time: 18.920183181762695ms\r\n",,terminal_output +11147,12752426,"TERMINAL",0,0,"Step 1103, loss: 2.2075746059417725, step time: 18.818378448486328ms\r\n",,terminal_output +11148,12752498,"TERMINAL",0,0,"Step 1104, loss: 2.1923084259033203, step time: 18.680810928344727ms\r\nStep 1105, loss: 2.2531321048736572, step time: 19.21844482421875ms\r\n",,terminal_output +11149,12752563,"TERMINAL",0,0,"Step 1106, loss: 2.199138641357422, step time: 18.648624420166016ms\r\n",,terminal_output +11150,12752635,"TERMINAL",0,0,"Step 1107, loss: 2.1966724395751953, step time: 19.046783447265625ms\r\n",,terminal_output +11151,12752690,"TERMINAL",0,0,"Step 1108, loss: 2.2280681133270264, step time: 18.87655258178711ms\r\n",,terminal_output +11152,12752752,"TERMINAL",0,0,"Step 1109, loss: 2.1901087760925293, step time: 18.75448226928711ms\r\n",,terminal_output +11153,12752866,"TERMINAL",0,0,"Step 1110, loss: 2.2703752517700195, step time: 18.72849464416504ms\r\n",,terminal_output +11154,12752879,"TERMINAL",0,0,"Step 1111, loss: 2.184553384780884, step time: 19.109010696411133ms\r\n",,terminal_output +11155,12752972,"TERMINAL",0,0,"Step 1112, loss: 2.1821517944335938, step time: 18.62502098083496ms\r\n",,terminal_output +11156,12753147,"TERMINAL",0,0,"Step 1113, loss: 2.1874396800994873, step time: 18.763065338134766ms\r\nStep 1114, loss: 2.464348316192627, step time: 18.852949142456055ms\r\n",,terminal_output +11157,12753211,"TERMINAL",0,0,"Step 1115, loss: 2.2148189544677734, step time: 18.738269805908203ms\r\nStep 1116, loss: 2.1932201385498047, step time: 18.706560134887695ms\r\n",,terminal_output +11158,12753270,"TERMINAL",0,0,"Step 1117, loss: 2.2457668781280518, step time: 19.01412010192871ms\r\n",,terminal_output +11159,12753329,"TERMINAL",0,0,"Step 1118, loss: 2.208475112915039, step time: 18.5546875ms\r\n",,terminal_output +11160,12753392,"TERMINAL",0,0,"Step 1119, loss: 2.1837310791015625, step time: 18.69964599609375ms\r\n",,terminal_output +11161,12753489,"TERMINAL",0,0,"Step 1120, loss: 2.564183473587036, step time: 18.941164016723633ms\r\n",,terminal_output +11162,12753540,"TERMINAL",0,0,"Step 1121, loss: 2.183297634124756, step time: 18.872499465942383ms\r\n",,terminal_output +11163,12753644,"TERMINAL",0,0,"Step 1122, loss: 2.313441514968872, step time: 18.627166748046875ms\r\nStep 1123, loss: 2.2220988273620605, step time: 19.191980361938477ms\r\n",,terminal_output +11164,12753715,"TERMINAL",0,0,"Step 1124, loss: 2.197404146194458, step time: 21.400928497314453ms\r\n",,terminal_output +11165,12753833,"TERMINAL",0,0,"Step 1125, loss: 2.1448733806610107, step time: 18.807172775268555ms\r\nStep 1126, loss: 2.2634670734405518, step time: 19.124507904052734ms\r\n",,terminal_output +11166,12753935,"TERMINAL",0,0,"Step 1127, loss: 2.226943016052246, step time: 18.715858459472656ms\r\n",,terminal_output +11167,12753996,"TERMINAL",0,0,"Step 1128, loss: 2.165938377380371, step time: 18.573760986328125ms\r\n",,terminal_output +11168,12754057,"TERMINAL",0,0,"Step 1129, loss: 2.283677339553833, step time: 19.021034240722656ms\r\n",,terminal_output +11169,12754118,"TERMINAL",0,0,"Step 1130, loss: 2.1693332195281982, step time: 18.568754196166992ms\r\n",,terminal_output +11170,12754223,"TERMINAL",0,0,"Step 1131, loss: 2.190481424331665, step time: 18.863439559936523ms\r\nStep 1132, loss: 2.156221628189087, step time: 18.848657608032227ms\r\n",,terminal_output +11171,12754319,"TERMINAL",0,0,"Step 1133, loss: 2.178338050842285, step time: 18.2955265045166ms\r\n",,terminal_output +11172,12754366,"TERMINAL",0,0,"Step 1134, loss: 2.187467575073242, step time: 18.68891716003418ms\r\n",,terminal_output +11173,12754472,"TERMINAL",0,0,"Step 1135, loss: 2.4839043617248535, step time: 18.561363220214844ms\r\nStep 1136, loss: 2.1668918132781982, step time: 18.661022186279297ms\r\n",,terminal_output +11174,12754538,"TERMINAL",0,0,"Step 1137, loss: 2.160487413406372, step time: 18.221378326416016ms\r\n",,terminal_output +11175,12754600,"TERMINAL",0,0,"Step 1138, loss: 2.1732075214385986, step time: 18.887042999267578ms\r\n",,terminal_output +11176,12754663,"TERMINAL",0,0,"Step 1139, loss: 2.1629526615142822, step time: 18.416166305541992ms\r\n",,terminal_output +11177,12754726,"TERMINAL",0,0,"Step 1140, loss: 2.1672675609588623, step time: 18.66769790649414ms\r\n",,terminal_output +11178,12754795,"TERMINAL",0,0,"Step 1141, loss: 2.159604072570801, step time: 18.59760284423828ms\r\n",,terminal_output +11179,12754856,"TERMINAL",0,0,"Step 1142, loss: 2.168959379196167, step time: 18.642663955688477ms\r\n",,terminal_output +11180,12754910,"TERMINAL",0,0,"Step 1143, loss: 2.180339813232422, step time: 18.29075813293457ms\r\n",,terminal_output +11181,12754976,"TERMINAL",0,0,"Step 1144, loss: 2.188857316970825, step time: 18.76354217529297ms\r\n",,terminal_output +11182,12755038,"TERMINAL",0,0,"Step 1145, loss: 2.180337429046631, step time: 19.310474395751953ms\r\n",,terminal_output +11183,12755101,"TERMINAL",0,0,"Step 1146, loss: 2.1701877117156982, step time: 19.330501556396484ms\r\n",,terminal_output +11184,12755163,"TERMINAL",0,0,"Step 1147, loss: 2.180654287338257, step time: 18.777132034301758ms\r\n",,terminal_output +11185,12755227,"TERMINAL",0,0,"Step 1148, loss: 2.16194748878479, step time: 18.872737884521484ms\r\n",,terminal_output +11186,12755290,"TERMINAL",0,0,"Step 1149, loss: 2.4411113262176514, step time: 18.535137176513672ms\r\n",,terminal_output +11187,12755398,"TERMINAL",0,0,"Step 1150, loss: 2.2006595134735107, step time: 19.023656845092773ms\r\n",,terminal_output +11188,12755449,"TERMINAL",0,0,"Step 1151, loss: 2.145359992980957, step time: 18.0816650390625ms\r\n",,terminal_output +11189,12755515,"TERMINAL",0,0,"Step 1152, loss: 2.148434638977051, step time: 18.612384796142578ms\r\n",,terminal_output +11190,12755570,"TERMINAL",0,0,"Step 1153, loss: 2.2661805152893066, step time: 18.47052574157715ms\r\n",,terminal_output +11191,12755630,"TERMINAL",0,0,"Step 1154, loss: 2.141906976699829, step time: 18.53156089782715ms\r\n",,terminal_output +11192,12755734,"TERMINAL",0,0,"Step 1155, loss: 2.1567881107330322, step time: 18.21589469909668ms\r\nStep 1156, loss: 2.1726062297821045, step time: 18.90850067138672ms\r\n",,terminal_output +11193,12755808,"TERMINAL",0,0,"Step 1157, loss: 2.361621141433716, step time: 18.109560012817383ms\r\n",,terminal_output +11194,12755862,"TERMINAL",0,0,"Step 1158, loss: 2.1628811359405518, step time: 18.38994026184082ms\r\n",,terminal_output +11195,12755989,"TERMINAL",0,0,"Step 1159, loss: 2.1759285926818848, step time: 18.449783325195312ms\r\nStep 1160, loss: 2.1428346633911133, step time: 18.762826919555664ms\r\n",,terminal_output +11196,12756084,"TERMINAL",0,0,"Step 1161, loss: 2.1491305828094482, step time: 18.465757369995117ms\r\n",,terminal_output +11197,12756145,"TERMINAL",0,0,"Step 1162, loss: 2.14345121383667, step time: 18.973350524902344ms\r\n",,terminal_output +11198,12756250,"TERMINAL",0,0,"Step 1163, loss: 2.135535955429077, step time: 18.26000213623047ms\r\nStep 1164, loss: 2.1354105472564697, step time: 18.57447624206543ms\r\n",,terminal_output +11199,12756367,"TERMINAL",0,0,"Step 1165, loss: 2.1318347454071045, step time: 18.361806869506836ms\r\nStep 1166, loss: 2.144516706466675, step time: 18.41902732849121ms\r\n",,terminal_output +11200,12756434,"TERMINAL",0,0,"Step 1167, loss: 2.1265182495117188, step time: 18.04327964782715ms\r\n",,terminal_output +11201,12756802,"TERMINAL",0,0,"Step 1168, loss: 2.1224231719970703, step time: 302.2174835205078ms\r\nStep 1169, loss: 2.132593870162964, step time: 32.76395797729492ms\r\n",,terminal_output +11202,12756908,"TERMINAL",0,0,"Step 1170, loss: 2.1286306381225586, step time: 22.855281829833984ms\r\n",,terminal_output +11203,12756966,"TERMINAL",0,0,"Step 1171, loss: 2.1521291732788086, step time: 20.13850212097168ms\r\n",,terminal_output +11204,12757027,"TERMINAL",0,0,"Step 1172, loss: 2.111985683441162, step time: 19.206523895263672ms\r\n",,terminal_output +11205,12757087,"TERMINAL",0,0,"Step 1173, loss: 2.1389057636260986, step time: 18.479585647583008ms\r\n",,terminal_output +11206,12757155,"TERMINAL",0,0,"Step 1174, loss: 2.119419813156128, step time: 19.116640090942383ms\r\n",,terminal_output +11207,12757214,"TERMINAL",0,0,"Step 1175, loss: 3.078709602355957, step time: 18.416166305541992ms\r\n",,terminal_output +11208,12757319,"TERMINAL",0,0,"Step 1176, loss: 2.1353583335876465, step time: 18.621444702148438ms\r\nStep 1177, loss: 2.1217970848083496, step time: 18.587350845336914ms\r\n",,terminal_output +11209,12757412,"TERMINAL",0,0,"Step 1178, loss: 2.148664951324463, step time: 18.655061721801758ms\r\n",,terminal_output +11210,12757463,"TERMINAL",0,0,"Step 1179, loss: 2.402489185333252, step time: 18.178939819335938ms\r\n",,terminal_output +11211,12757554,"TERMINAL",0,0,"Step 1180, loss: 2.1079037189483643, step time: 18.871307373046875ms\r\n",,terminal_output +11212,12757605,"TERMINAL",0,0,"Step 1181, loss: 2.109936237335205, step time: 18.1884765625ms\r\n",,terminal_output +11213,12757656,"TERMINAL",0,0,"Step 1182, loss: 2.2484798431396484, step time: 18.50128173828125ms\r\n",,terminal_output +11214,12757761,"TERMINAL",0,0,"Step 1183, loss: 2.1295831203460693, step time: 18.585681915283203ms\r\nStep 1184, loss: 2.1212656497955322, step time: 18.541574478149414ms\r\n",,terminal_output +11215,12757892,"TERMINAL",0,0,"Step 1185, loss: 2.1195623874664307, step time: 18.105506896972656ms\r\nStep 1186, loss: 2.1350367069244385, step time: 18.971920013427734ms\r\n",,terminal_output +11216,12757963,"TERMINAL",0,0,"Step 1187, loss: 2.1037368774414062, step time: 18.030405044555664ms\r\n",,terminal_output +11217,12758017,"TERMINAL",0,0,"Step 1188, loss: 2.118344306945801, step time: 18.54729652404785ms\r\n",,terminal_output +11218,12758080,"TERMINAL",0,0,"Step 1189, loss: 2.1113195419311523, step time: 18.485546112060547ms\r\n",,terminal_output +11219,12758141,"TERMINAL",0,0,"Step 1190, loss: 2.102046012878418, step time: 18.90707015991211ms\r\n",,terminal_output +11220,12758201,"TERMINAL",0,0,"Step 1191, loss: 2.1374034881591797, step time: 18.372058868408203ms\r\n",,terminal_output +11221,12758264,"TERMINAL",0,0,"Step 1192, loss: 2.44649600982666, step time: 18.933773040771484ms\r\n",,terminal_output +11222,12758327,"TERMINAL",0,0,"Step 1193, loss: 2.1739282608032227, step time: 18.2955265045166ms\r\n",,terminal_output +11223,12758428,"TERMINAL",0,0,"Step 1194, loss: 2.088898181915283, step time: 18.643856048583984ms\r\n",,terminal_output +11224,12758491,"TERMINAL",0,0,"Step 1195, loss: 2.120002269744873, step time: 18.66888999938965ms\r\n",,terminal_output +11225,12758636,"TERMINAL",0,0,"Step 1196, loss: 2.9710192680358887, step time: 18.6460018157959ms\r\nStep 1197, loss: 2.0918684005737305, step time: 18.326282501220703ms\r\n",,terminal_output +11226,12758647,"TERMINAL",0,0,"Step 1198, loss: 2.090871572494507, step time: 18.951892852783203ms\r\n",,terminal_output +11227,12758748,"TERMINAL",0,0,"Step 1199, loss: 2.2108163833618164, step time: 18.411874771118164ms\r\n",,terminal_output +11228,12758818,"TERMINAL",0,0,"Step 1200, loss: 2.1117281913757324, step time: 18.622875213623047ms\r\n",,terminal_output +11229,12758869,"TERMINAL",0,0,"Step 1201, loss: 2.104104995727539, step time: 18.59140396118164ms\r\n",,terminal_output +11230,12758933,"TERMINAL",0,0,"Step 1202, loss: 2.0899856090545654, step time: 18.527984619140625ms\r\n",,terminal_output +11231,12758992,"TERMINAL",0,0,"Step 1203, loss: 2.0881690979003906, step time: 19.040346145629883ms\r\n",,terminal_output +11232,12759054,"TERMINAL",0,0,"Step 1204, loss: 2.0969653129577637, step time: 18.70584487915039ms\r\n",,terminal_output +11233,12759115,"TERMINAL",0,0,"Step 1205, loss: 2.0782876014709473, step time: 18.898963928222656ms\r\n",,terminal_output +11234,12759174,"TERMINAL",0,0,"Step 1206, loss: 2.1154491901397705, step time: 18.657207489013672ms\r\n",,terminal_output +11235,12759241,"TERMINAL",0,0,"Step 1207, loss: 2.090463161468506, step time: 18.679380416870117ms\r\n",,terminal_output +11236,12759306,"TERMINAL",0,0,"Step 1208, loss: 2.100343942642212, step time: 18.492460250854492ms\r\n",,terminal_output +11237,12759364,"TERMINAL",0,0,"Step 1209, loss: 2.1155896186828613, step time: 18.769025802612305ms\r\n",,terminal_output +11238,12759427,"TERMINAL",0,0,"Step 1210, loss: 2.1055426597595215, step time: 18.9666748046875ms\r\n",,terminal_output +11239,12759490,"TERMINAL",0,0,"Step 1211, loss: 2.0824947357177734, step time: 18.93329620361328ms\r\n",,terminal_output +11240,12759549,"TERMINAL",0,0,"Step 1212, loss: 2.069267988204956, step time: 18.495798110961914ms\r\n",,terminal_output +11241,12759619,"TERMINAL",0,0,"Step 1213, loss: 2.130350351333618, step time: 19.23537254333496ms\r\n",,terminal_output +11242,12759674,"TERMINAL",0,0,"Step 1214, loss: 2.0819668769836426, step time: 18.39470863342285ms\r\n",,terminal_output +11243,12759735,"TERMINAL",0,0,"Step 1215, loss: 2.1105458736419678, step time: 18.751144409179688ms\r\n",,terminal_output +11244,12759809,"TERMINAL",0,0,"Step 1216, loss: 2.0980093479156494, step time: 18.857240676879883ms\r\n",,terminal_output +11245,12759861,"TERMINAL",0,0,"Step 1217, loss: 2.082695484161377, step time: 18.264055252075195ms\r\n",,terminal_output +11246,12759924,"TERMINAL",0,0,"Step 1218, loss: 2.1102874279022217, step time: 18.582820892333984ms\r\n",,terminal_output +11247,12759983,"TERMINAL",0,0,"Step 1219, loss: 2.102776527404785, step time: 19.176006317138672ms\r\n",,terminal_output +11248,12760075,"TERMINAL",0,0,"Step 1220, loss: 2.07114577293396, step time: 18.801212310791016ms\r\n",,terminal_output +11249,12760126,"TERMINAL",0,0,"Step 1221, loss: 2.0725536346435547, step time: 21.404743194580078ms\r\n",,terminal_output +11250,12760218,"TERMINAL",0,0,"Step 1222, loss: 2.2318360805511475, step time: 19.783496856689453ms\r\n",,terminal_output +11251,12760330,"TERMINAL",0,0,"Step 1223, loss: 2.0552937984466553, step time: 18.94402503967285ms\r\nStep 1224, loss: 2.0762546062469482, step time: 18.001079559326172ms\r\n",,terminal_output +11252,12760393,"TERMINAL",0,0,"Step 1225, loss: 2.0580556392669678, step time: 19.34337615966797ms\r\n",,terminal_output +11253,12760454,"TERMINAL",0,0,"Step 1226, loss: 2.059671640396118, step time: 18.590927124023438ms\r\n",,terminal_output +11254,12760517,"TERMINAL",0,0,"Step 1227, loss: 2.0663607120513916, step time: 18.351078033447266ms\r\n",,terminal_output +11255,12760581,"TERMINAL",0,0,"Step 1228, loss: 2.111372470855713, step time: 19.143104553222656ms\r\n",,terminal_output +11256,12760639,"TERMINAL",0,0,"Step 1229, loss: 2.0641660690307617, step time: 18.918752670288086ms\r\n",,terminal_output +11257,12760753,"TERMINAL",0,0,"Step 1230, loss: 2.052488327026367, step time: 18.602609634399414ms\r\nStep 1231, loss: 2.445032835006714, step time: 19.245147705078125ms\r\n",,terminal_output +11258,12760825,"TERMINAL",0,0,"Step 1232, loss: 2.040370225906372, step time: 18.581628799438477ms\r\n",,terminal_output +11259,12760878,"TERMINAL",0,0,"Step 1233, loss: 2.2023563385009766, step time: 18.88728141784668ms\r\n",,terminal_output +11260,12760945,"TERMINAL",0,0,"Step 1234, loss: 2.0598533153533936, step time: 18.473148345947266ms\r\n",,terminal_output +11261,12761006,"TERMINAL",0,0,"Step 1235, loss: 2.0573885440826416, step time: 18.941879272460938ms\r\n",,terminal_output +11262,12761070,"TERMINAL",0,0,"Step 1236, loss: 2.103379249572754, step time: 18.877029418945312ms\r\n",,terminal_output +11263,12761133,"TERMINAL",0,0,"Step 1237, loss: 2.0673680305480957, step time: 18.754959106445312ms\r\n",,terminal_output +11264,12761196,"TERMINAL",0,0,"Step 1238, loss: 2.0626020431518555, step time: 18.61429214477539ms\r\n",,terminal_output +11265,12761269,"TERMINAL",0,0,"Step 1239, loss: 2.1007683277130127, step time: 18.824100494384766ms\r\n",,terminal_output +11266,12761334,"TERMINAL",0,0,"Step 1240, loss: 2.229418992996216, step time: 18.923521041870117ms\r\n",,terminal_output +11267,12761402,"TERMINAL",0,0,"Step 1241, loss: 2.0907931327819824, step time: 18.741846084594727ms\r\n",,terminal_output +11268,12761467,"TERMINAL",0,0,"Step 1242, loss: 2.0674283504486084, step time: 18.541812896728516ms\r\n",,terminal_output +11269,12761529,"TERMINAL",0,0,"Step 1243, loss: 2.0494542121887207, step time: 19.119739532470703ms\r\n",,terminal_output +11270,12761595,"TERMINAL",0,0,"Step 1244, loss: 2.7990758419036865, step time: 18.150806427001953ms\r\n",,terminal_output +11271,12761656,"TERMINAL",0,0,"Step 1245, loss: 2.0791213512420654, step time: 18.993377685546875ms\r\n",,terminal_output +11272,12761719,"TERMINAL",0,0,"Step 1246, loss: 2.060063123703003, step time: 18.866300582885742ms\r\n",,terminal_output +11273,12761786,"TERMINAL",0,0,"Step 1247, loss: 2.294869899749756, step time: 18.38064193725586ms\r\n",,terminal_output +11274,12761891,"TERMINAL",0,0,"Step 1248, loss: 2.104671001434326, step time: 18.525362014770508ms\r\nStep 1249, loss: 2.165252447128296, step time: 19.150495529174805ms\r\n",,terminal_output +11275,12761954,"TERMINAL",0,0,"Step 1250, loss: 2.2813963890075684, step time: 18.618106842041016ms\r\n",,terminal_output +11276,12762056,"TERMINAL",0,0,"Step 1251, loss: 2.1027984619140625, step time: 18.856525421142578ms\r\n",,terminal_output +11277,12762106,"TERMINAL",0,0,"Step 1252, loss: 2.130364418029785, step time: 19.006013870239258ms\r\n",,terminal_output +11278,12762210,"TERMINAL",0,0,"Step 1253, loss: 2.090595245361328, step time: 18.784761428833008ms\r\nStep 1254, loss: 2.0228826999664307, step time: 18.02229881286621ms\r\n",,terminal_output +11279,12762303,"TERMINAL",0,0,"Step 1255, loss: 2.0543673038482666, step time: 19.150972366333008ms\r\n",,terminal_output +11280,12762353,"TERMINAL",0,0,"Step 1256, loss: 2.0887792110443115, step time: 19.64545249938965ms\r\n",,terminal_output +11281,12762445,"TERMINAL",0,0,"Step 1257, loss: 2.1300570964813232, step time: 18.40972900390625ms\r\n",,terminal_output +11282,12762498,"TERMINAL",0,0,"Step 1258, loss: 2.079676628112793, step time: 18.90873908996582ms\r\n",,terminal_output +11283,12762603,"TERMINAL",0,0,"Step 1259, loss: 2.0344204902648926, step time: 18.77593994140625ms\r\nStep 1260, loss: 2.4137442111968994, step time: 18.609046936035156ms\r\n",,terminal_output +11284,12762674,"TERMINAL",0,0,"Step 1261, loss: 2.042973756790161, step time: 19.07968521118164ms\r\n",,terminal_output +11285,12762781,"TERMINAL",0,0,"Step 1262, loss: 2.1768200397491455, step time: 18.42188835144043ms\r\nStep 1263, loss: 2.0726804733276367, step time: 18.979787826538086ms\r\n",,terminal_output +11286,12762907,"TERMINAL",0,0,"Step 1264, loss: 2.055572986602783, step time: 18.378496170043945ms\r\nStep 1265, loss: 2.0806617736816406, step time: 18.998146057128906ms\r\n",,terminal_output +11287,12762972,"TERMINAL",0,0,"Step 1266, loss: 2.1425037384033203, step time: 18.782615661621094ms\r\n",,terminal_output +11288,12763037,"TERMINAL",0,0,"Step 1267, loss: 2.1061816215515137, step time: 18.811941146850586ms\r\n",,terminal_output +11289,12763134,"TERMINAL",0,0,"Step 1268, loss: 2.078256607055664, step time: 18.998384475708008ms\r\n",,terminal_output +11290,12763191,"TERMINAL",0,0,"Step 1269, loss: 2.0680127143859863, step time: 18.674373626708984ms\r\n",,terminal_output +11291,12763253,"TERMINAL",0,0,"Step 1270, loss: 2.0260863304138184, step time: 19.01698112487793ms\r\n",,terminal_output +11292,12763357,"TERMINAL",0,0,"Step 1271, loss: 2.0430448055267334, step time: 18.87226104736328ms\r\nStep 1272, loss: 2.114325761795044, step time: 18.552303314208984ms\r\n",,terminal_output +11293,12763417,"TERMINAL",0,0,"Step 1273, loss: 2.0387070178985596, step time: 19.14048194885254ms\r\n",,terminal_output +11294,12763478,"TERMINAL",0,0,"Step 1274, loss: 2.0327022075653076, step time: 18.10741424560547ms\r\n",,terminal_output +11295,12763540,"TERMINAL",0,0,"Step 1275, loss: 2.0224435329437256, step time: 18.8143253326416ms\r\n",,terminal_output +11296,12763650,"TERMINAL",0,0,"Step 1276, loss: 2.0744504928588867, step time: 18.729686737060547ms\r\n",,terminal_output +11297,12763659,"TERMINAL",0,0,"Step 1277, loss: 2.037841558456421, step time: 18.424034118652344ms\r\n",,terminal_output +11298,12763755,"TERMINAL",0,0,"Step 1278, loss: 2.0402448177337646, step time: 18.526792526245117ms\r\n",,terminal_output +11299,12763861,"TERMINAL",0,0,"Step 1279, loss: 2.0299668312072754, step time: 19.214391708374023ms\r\nStep 1280, loss: 2.0681817531585693, step time: 18.58043670654297ms\r\n",,terminal_output +11300,12763925,"TERMINAL",0,0,"Step 1281, loss: 2.0402095317840576, step time: 18.49222183227539ms\r\n",,terminal_output +11301,12763983,"TERMINAL",0,0,"Step 1282, loss: 2.040320634841919, step time: 18.589258193969727ms\r\n",,terminal_output +11302,12764046,"TERMINAL",0,0,"Step 1283, loss: 2.0201973915100098, step time: 18.624544143676758ms\r\n",,terminal_output +11303,12764108,"TERMINAL",0,0,"Step 1284, loss: 2.026705741882324, step time: 17.673254013061523ms\r\n",,terminal_output +11304,12764215,"TERMINAL",0,0,"Step 1285, loss: 1.9960862398147583, step time: 19.02031898498535ms\r\n",,terminal_output +11305,12764267,"TERMINAL",0,0,"Step 1286, loss: 2.0559656620025635, step time: 19.65498924255371ms\r\n",,terminal_output +11306,12764411,"TERMINAL",0,0,"Step 1287, loss: 2.072159767150879, step time: 18.294095993041992ms\r\nStep 1288, loss: 2.037788152694702, step time: 18.675565719604492ms\r\n",,terminal_output +11307,12764463,"TERMINAL",0,0,"Step 1289, loss: 1.9841822385787964, step time: 18.57924461364746ms\r\n",,terminal_output +11308,12764559,"TERMINAL",0,0,"Step 1290, loss: 2.0246737003326416, step time: 18.210411071777344ms\r\n",,terminal_output +11309,12764611,"TERMINAL",0,0,"Step 1291, loss: 2.0020854473114014, step time: 18.916606903076172ms\r\n",,terminal_output +11310,12764662,"TERMINAL",0,0,"Step 1292, loss: 1.9922226667404175, step time: 18.338918685913086ms\r\n",,terminal_output +11311,12764823,"TERMINAL",0,0,"Step 1293, loss: 2.1020665168762207, step time: 18.54562759399414ms\r\nStep 1294, loss: 1.9906328916549683, step time: 18.003463745117188ms\r\n",,terminal_output +11312,12764895,"TERMINAL",0,0,"Step 1295, loss: 2.020110607147217, step time: 18.726587295532227ms\r\nStep 1296, loss: 2.1230218410491943, step time: 18.339872360229492ms\r\n",,terminal_output +11313,12764991,"TERMINAL",0,0,"Step 1297, loss: 1.9979444742202759, step time: 18.48602294921875ms\r\n",,terminal_output +11314,12765055,"TERMINAL",0,0,"Step 1298, loss: 2.542527914047241, step time: 19.797325134277344ms\r\n",,terminal_output +11315,12765116,"TERMINAL",0,0,"Step 1299, loss: 2.1506271362304688, step time: 20.641803741455078ms\r\n",,terminal_output +11316,12765220,"TERMINAL",0,0,"Step 1300, loss: 1.998291015625, step time: 20.4925537109375ms\r\nStep 1301, loss: 1.9857304096221924, step time: 19.54174041748047ms\r\n",,terminal_output +11317,12765284,"TERMINAL",0,0,"Step 1302, loss: 1.9927419424057007, step time: 19.167661666870117ms\r\n",,terminal_output +11318,12765344,"TERMINAL",0,0,"Step 1303, loss: 1.9914674758911133, step time: 19.544124603271484ms\r\n",,terminal_output +11319,12765406,"TERMINAL",0,0,"Step 1304, loss: 2.0042195320129395, step time: 18.459796905517578ms\r\n",,terminal_output +11320,12765467,"TERMINAL",0,0,"Step 1305, loss: 1.9991060495376587, step time: 18.885135650634766ms\r\n",,terminal_output +11321,12765529,"TERMINAL",0,0,"Step 1306, loss: 2.000349283218384, step time: 19.08707618713379ms\r\n",,terminal_output +11322,12765589,"TERMINAL",0,0,"Step 1307, loss: 1.9917571544647217, step time: 18.201112747192383ms\r\n",,terminal_output +11323,12765651,"TERMINAL",0,0,"Step 1308, loss: 2.009218215942383, step time: 18.543720245361328ms\r\n",,terminal_output +11324,12765716,"TERMINAL",0,0,"Step 1309, loss: 1.9803836345672607, step time: 19.155263900756836ms\r\n",,terminal_output +11325,12765780,"TERMINAL",0,0,"Step 1310, loss: 2.002067804336548, step time: 18.514394760131836ms\r\n",,terminal_output +11326,12765846,"TERMINAL",0,0,"Step 1311, loss: 1.9784311056137085, step time: 18.694400787353516ms\r\n",,terminal_output +11327,12765945,"TERMINAL",0,0,"Step 1312, loss: 2.0172252655029297, step time: 18.906116485595703ms\r\n",,terminal_output +11328,12766006,"TERMINAL",0,0,"Step 1313, loss: 2.051313877105713, step time: 18.9669132232666ms\r\n",,terminal_output +11329,12766069,"TERMINAL",0,0,"Step 1314, loss: 2.0190298557281494, step time: 18.030405044555664ms\r\n",,terminal_output +11330,12766126,"TERMINAL",0,0,"Step 1315, loss: 1.9850386381149292, step time: 19.08707618713379ms\r\n",,terminal_output +11331,12766187,"TERMINAL",0,0,"Step 1316, loss: 1.9815679788589478, step time: 18.541812896728516ms\r\n",,terminal_output +11332,12766246,"TERMINAL",0,0,"Step 1317, loss: 1.987366795539856, step time: 18.877744674682617ms\r\n",,terminal_output +11333,12766351,"TERMINAL",0,0,"Step 1318, loss: 1.9879010915756226, step time: 18.917322158813477ms\r\nStep 1319, loss: 1.971440076828003, step time: 18.772363662719727ms\r\n",,terminal_output +11334,12766445,"TERMINAL",0,0,"Step 1320, loss: 2.005528688430786, step time: 18.477439880371094ms\r\n",,terminal_output +11335,12766496,"TERMINAL",0,0,"Step 1321, loss: 1.9888525009155273, step time: 19.024133682250977ms\r\n",,terminal_output +11336,12766642,"TERMINAL",0,0,"Step 1322, loss: 2.000094413757324, step time: 18.271207809448242ms\r\nStep 1323, loss: 1.9922517538070679, step time: 18.07093620300293ms\r\n",,terminal_output +11337,12766693,"TERMINAL",0,0,"Step 1324, loss: 1.966746211051941, step time: 18.149852752685547ms\r\n",,terminal_output +11338,12766797,"TERMINAL",0,0,"Step 1325, loss: 1.9494044780731201, step time: 17.495155334472656ms\r\nStep 1326, loss: 1.958966851234436, step time: 17.202138900756836ms\r\n",,terminal_output +11339,12767091,"TERMINAL",0,0,"Step 1327, loss: 2.396625518798828, step time: 293.9932346343994ms\r\n",,terminal_output +11340,12767157,"TERMINAL",0,0,"Step 1328, loss: 1.9657543897628784, step time: 24.81675148010254ms\r\n",,terminal_output +11341,12767220,"TERMINAL",0,0,"Step 1329, loss: 1.9576020240783691, step time: 19.911766052246094ms\r\n",,terminal_output +11342,12767284,"TERMINAL",0,0,"Step 1330, loss: 1.9644612073898315, step time: 18.60666275024414ms\r\n",,terminal_output +11343,12767347,"TERMINAL",0,0,"Step 1331, loss: 2.909241199493408, step time: 17.93956756591797ms\r\n",,terminal_output +11344,12767409,"TERMINAL",0,0,"Step 1332, loss: 1.9802742004394531, step time: 17.439603805541992ms\r\n",,terminal_output +11345,12767472,"TERMINAL",0,0,"Step 1333, loss: 1.9754407405853271, step time: 18.095016479492188ms\r\n",,terminal_output +11346,12767536,"TERMINAL",0,0,"Step 1334, loss: 1.9474995136260986, step time: 17.337799072265625ms\r\n",,terminal_output +11347,12767635,"TERMINAL",0,0,"Step 1335, loss: 1.9469990730285645, step time: 17.516374588012695ms\r\n",,terminal_output +11348,12767686,"TERMINAL",0,0,"Step 1336, loss: 2.0302462577819824, step time: 17.897367477416992ms\r\n",,terminal_output +11349,12767789,"TERMINAL",0,0,"Step 1337, loss: 1.9729044437408447, step time: 17.546892166137695ms\r\nStep 1338, loss: 1.9682409763336182, step time: 20.223140716552734ms\r\n",,terminal_output +11350,12767914,"TERMINAL",0,0,"Step 1339, loss: 1.9352748394012451, step time: 17.881155014038086ms\r\nStep 1340, loss: 2.1323421001434326, step time: 17.1964168548584ms\r\n",,terminal_output +11351,12767977,"TERMINAL",0,0,"Step 1341, loss: 1.98231840133667, step time: 17.354249954223633ms\r\n",,terminal_output +11352,12768078,"TERMINAL",0,0,"Step 1342, loss: 2.0399396419525146, step time: 17.804861068725586ms\r\n",,terminal_output +11353,12768129,"TERMINAL",0,0,"Step 1343, loss: 2.018035650253296, step time: 17.468929290771484ms\r\n",,terminal_output +11354,12768234,"TERMINAL",0,0,"Step 1344, loss: 2.1170897483825684, step time: 17.238616943359375ms\r\nStep 1345, loss: 1.9614776372909546, step time: 18.050193786621094ms\r\n",,terminal_output +11355,12768295,"TERMINAL",0,0,"Step 1346, loss: 1.9809188842773438, step time: 17.322778701782227ms\r\n",,terminal_output +11356,12768395,"TERMINAL",0,0,"Step 1347, loss: 1.951357364654541, step time: 17.54140853881836ms\r\n",,terminal_output +11357,12768447,"TERMINAL",0,0,"Step 1348, loss: 1.9842925071716309, step time: 17.80533790588379ms\r\n",,terminal_output +11358,12768500,"TERMINAL",0,0,"Step 1349, loss: 2.080648899078369, step time: 17.62986183166504ms\r\n",,terminal_output +11359,12768663,"TERMINAL",0,0,"Step 1350, loss: 2.0907115936279297, step time: 17.395496368408203ms\r\nStep 1351, loss: 2.214397668838501, step time: 17.989397048950195ms\r\nStep 1352, loss: 1.9807546138763428, step time: 17.309904098510742ms\r\n",,terminal_output +11360,12768762,"TERMINAL",0,0,"Step 1353, loss: 2.1106300354003906, step time: 17.627716064453125ms\r\n",,terminal_output +11361,12768868,"TERMINAL",0,0,"Step 1354, loss: 1.9942662715911865, step time: 17.896652221679688ms\r\nStep 1355, loss: 1.9584184885025024, step time: 17.553091049194336ms\r\n",,terminal_output +11362,12768932,"TERMINAL",0,0,"Step 1356, loss: 1.9522634744644165, step time: 17.28963851928711ms\r\n",,terminal_output +11363,12768989,"TERMINAL",0,0,"Step 1357, loss: 1.981553316116333, step time: 18.106698989868164ms\r\n",,terminal_output +11364,12769051,"TERMINAL",0,0,"Step 1358, loss: 1.961098074913025, step time: 17.451047897338867ms\r\n",,terminal_output +11365,12769111,"TERMINAL",0,0,"Step 1359, loss: 1.9720641374588013, step time: 17.682790756225586ms\r\n",,terminal_output +11366,12769173,"TERMINAL",0,0,"Step 1360, loss: 1.9803075790405273, step time: 17.771005630493164ms\r\n",,terminal_output +11367,12769233,"TERMINAL",0,0,"Step 1361, loss: 1.9891639947891235, step time: 17.56763458251953ms\r\n",,terminal_output +11368,12769294,"TERMINAL",0,0,"Step 1362, loss: 1.9317439794540405, step time: 17.363786697387695ms\r\n",,terminal_output +11369,12769355,"TERMINAL",0,0,"Step 1363, loss: 1.9448944330215454, step time: 17.874956130981445ms\r\n",,terminal_output +11370,12769448,"TERMINAL",0,0,"Step 1364, loss: 1.9696426391601562, step time: 17.385005950927734ms\r\n",,terminal_output +11371,12769552,"TERMINAL",0,0,"Step 1365, loss: 1.9371658563613892, step time: 17.664432525634766ms\r\nStep 1366, loss: 1.9352400302886963, step time: 17.92311668395996ms\r\n",,terminal_output +11372,12769614,"TERMINAL",0,0,"Step 1367, loss: 2.0501091480255127, step time: 17.674684524536133ms\r\n",,terminal_output +11373,12769674,"TERMINAL",0,0,"Step 1368, loss: 1.9446606636047363, step time: 17.4863338470459ms\r\n",,terminal_output +11374,12769736,"TERMINAL",0,0,"Step 1369, loss: 2.0254135131835938, step time: 17.99631118774414ms\r\n",,terminal_output +11375,12769854,"TERMINAL",0,0,"Step 1370, loss: 1.9186593294143677, step time: 17.15254783630371ms\r\nStep 1371, loss: 1.9214516878128052, step time: 17.39501953125ms\r\n",,terminal_output +11376,12769911,"TERMINAL",0,0,"Step 1372, loss: 3.000871419906616, step time: 17.592668533325195ms\r\n",,terminal_output +11377,12770015,"TERMINAL",0,0,"Step 1373, loss: 1.9346321821212769, step time: 17.32158660888672ms\r\n",,terminal_output +11378,12770075,"TERMINAL",0,0,"Step 1374, loss: 1.9242336750030518, step time: 17.115116119384766ms\r\n",,terminal_output +11379,12770136,"TERMINAL",0,0,"Step 1375, loss: 1.9258300065994263, step time: 19.172191619873047ms\r\n",,terminal_output +11380,12770198,"TERMINAL",0,0,"Step 1376, loss: 1.892662525177002, step time: 17.83919334411621ms\r\n",,terminal_output +11381,12770258,"TERMINAL",0,0,"Step 1377, loss: 1.9165148735046387, step time: 17.743349075317383ms\r\n",,terminal_output +11382,12770318,"TERMINAL",0,0,"Step 1378, loss: 1.978232502937317, step time: 17.847537994384766ms\r\n",,terminal_output +11383,12770384,"TERMINAL",0,0,"Step 1379, loss: 1.924405813217163, step time: 17.53687858581543ms\r\n",,terminal_output +11384,12770436,"TERMINAL",0,0,"Step 1380, loss: 2.587307929992676, step time: 17.17400550842285ms\r\n",,terminal_output +11385,12770541,"TERMINAL",0,0,"Step 1381, loss: 2.781235933303833, step time: 17.82393455505371ms\r\nStep 1382, loss: 1.9653810262680054, step time: 17.13418960571289ms\r\n",,terminal_output +11386,12770635,"TERMINAL",0,0,"Step 1383, loss: 2.1522738933563232, step time: 17.258405685424805ms\r\n",,terminal_output +11387,12770687,"TERMINAL",0,0,"Step 1384, loss: 1.9550474882125854, step time: 17.600059509277344ms\r\n",,terminal_output +11388,12770791,"TERMINAL",0,0,"Step 1385, loss: 2.1330997943878174, step time: 17.5478458404541ms\r\nStep 1386, loss: 1.9253493547439575, step time: 17.075538635253906ms\r\n",,terminal_output +11389,12770909,"TERMINAL",0,0,"Step 1387, loss: 1.926666259765625, step time: 17.782211303710938ms\r\nStep 1388, loss: 1.9362190961837769, step time: 17.052888870239258ms\r\n",,terminal_output +11390,12770973,"TERMINAL",0,0,"Step 1389, loss: 1.9607181549072266, step time: 17.231464385986328ms\r\n",,terminal_output +11391,12771100,"TERMINAL",0,0,"Step 1390, loss: 1.9732258319854736, step time: 17.738819122314453ms\r\nStep 1391, loss: 2.000032424926758, step time: 18.317222595214844ms\r\n",,terminal_output +11392,12771162,"TERMINAL",0,0,"Step 1392, loss: 1.9280307292938232, step time: 17.35210418701172ms\r\n",,terminal_output +11393,12771225,"TERMINAL",0,0,"Step 1393, loss: 1.9787975549697876, step time: 18.04637908935547ms\r\n",,terminal_output +11394,12771288,"TERMINAL",0,0,"Step 1394, loss: 1.899673342704773, step time: 17.375946044921875ms\r\n",,terminal_output +11395,12771353,"TERMINAL",0,0,"Step 1395, loss: 2.0898725986480713, step time: 18.021106719970703ms\r\n",,terminal_output +11396,12771416,"TERMINAL",0,0,"Step 1396, loss: 2.00581431388855, step time: 18.17488670349121ms\r\n",,terminal_output +11397,12771484,"TERMINAL",0,0,"Step 1397, loss: 1.932634711265564, step time: 17.713069915771484ms\r\n",,terminal_output +11398,12771546,"TERMINAL",0,0,"Step 1398, loss: 2.0064258575439453, step time: 17.438650131225586ms\r\n",,terminal_output +11399,12771638,"TERMINAL",0,0,"Step 1399, loss: 1.9266096353530884, step time: 18.108129501342773ms\r\n",,terminal_output +11400,12771689,"TERMINAL",0,0,"Step 1400, loss: 1.9211534261703491, step time: 20.91383934020996ms\r\n",,terminal_output +11401,12771793,"TERMINAL",0,0,"Step 1401, loss: 1.905639410018921, step time: 24.8563289642334ms\r\nStep 1402, loss: 1.9071764945983887, step time: 24.367094039916992ms\r\n",,terminal_output +11402,12771867,"TERMINAL",0,0,"Step 1403, loss: 1.9265503883361816, step time: 25.30193328857422ms\r\n",,terminal_output +11403,12771929,"TERMINAL",0,0,"Step 1404, loss: 2.0988597869873047, step time: 24.358034133911133ms\r\n",,terminal_output +11404,12771993,"TERMINAL",0,0,"Step 1405, loss: 1.9146537780761719, step time: 25.59494972229004ms\r\n",,terminal_output +11405,12772062,"TERMINAL",0,0,"Step 1406, loss: 1.9054611921310425, step time: 24.86276626586914ms\r\n",,terminal_output +11406,12772124,"TERMINAL",0,0,"Step 1407, loss: 1.902313232421875, step time: 25.38013458251953ms\r\n",,terminal_output +11407,12772252,"TERMINAL",0,0,"Step 1408, loss: 1.92042076587677, step time: 25.24542808532715ms\r\nStep 1409, loss: 1.9662867784500122, step time: 20.637035369873047ms\r\n",,terminal_output +11408,12772317,"TERMINAL",0,0,"Step 1410, loss: 1.9040007591247559, step time: 18.37468147277832ms\r\n",,terminal_output +11409,12772379,"TERMINAL",0,0,"Step 1411, loss: 2.016533851623535, step time: 18.45550537109375ms\r\n",,terminal_output +11410,12772472,"TERMINAL",0,0,"Step 1412, loss: 1.9556390047073364, step time: 17.42243766784668ms\r\n",,terminal_output +11411,12772524,"TERMINAL",0,0,"Step 1413, loss: 1.8836735486984253, step time: 17.5628662109375ms\r\n",,terminal_output +11412,12772618,"TERMINAL",0,0,"Step 1414, loss: 2.062502384185791, step time: 17.807960510253906ms\r\n",,terminal_output +11413,12772673,"TERMINAL",0,0,"Step 1415, loss: 1.9172613620758057, step time: 17.481565475463867ms\r\n",,terminal_output +11414,12772721,"TERMINAL",0,0,"Step 1416, loss: 1.8867570161819458, step time: 17.229557037353516ms\r\n",,terminal_output +11415,12772825,"TERMINAL",0,0,"Step 1417, loss: 1.8915172815322876, step time: 17.932653427124023ms\r\nStep 1418, loss: 1.9043444395065308, step time: 17.453908920288086ms\r\n",,terminal_output +11416,12772889,"TERMINAL",0,0,"Step 1419, loss: 2.317709445953369, step time: 17.52305030822754ms\r\n",,terminal_output +11417,12772948,"TERMINAL",0,0,"Step 1420, loss: 1.8830221891403198, step time: 17.710208892822266ms\r\n",,terminal_output +11418,12773014,"TERMINAL",0,0,"Step 1421, loss: 2.4666807651519775, step time: 17.531394958496094ms\r\n",,terminal_output +11419,12773082,"TERMINAL",0,0,"Step 1422, loss: 1.929221510887146, step time: 17.206430435180664ms\r\n",,terminal_output +11420,12773139,"TERMINAL",0,0,"Step 1423, loss: 1.890805959701538, step time: 17.85564422607422ms\r\n",,terminal_output +11421,12773200,"TERMINAL",0,0,"Step 1424, loss: 2.006319999694824, step time: 17.23790168762207ms\r\n",,terminal_output +11422,12773296,"TERMINAL",0,0,"Step 1425, loss: 1.8866052627563477, step time: 17.70782470703125ms\r\n",,terminal_output +11423,12773345,"TERMINAL",0,0,"Step 1426, loss: 1.8719030618667603, step time: 17.905712127685547ms\r\n",,terminal_output +11424,12773410,"TERMINAL",0,0,"Step 1427, loss: 1.9202970266342163, step time: 17.394304275512695ms\r\n",,terminal_output +11425,12773472,"TERMINAL",0,0,"Step 1428, loss: 1.9877455234527588, step time: 17.20142364501953ms\r\n",,terminal_output +11426,12773533,"TERMINAL",0,0,"Step 1429, loss: 2.446531295776367, step time: 17.879962921142578ms\r\n",,terminal_output +11427,12773690,"TERMINAL",0,0,"Step 1430, loss: 1.9028736352920532, step time: 17.27151870727539ms\r\nStep 1431, loss: 1.927193284034729, step time: 17.392873764038086ms\r\nStep 1432, loss: 1.8877005577087402, step time: 17.630577087402344ms\r\n",,terminal_output +11428,12773784,"TERMINAL",0,0,"Step 1433, loss: 1.8878436088562012, step time: 17.401456832885742ms\r\n",,terminal_output +11429,12773879,"TERMINAL",0,0,"Step 1434, loss: 1.8798478841781616, step time: 17.172574996948242ms\r\nStep 1435, loss: 1.9331607818603516, step time: 17.91834831237793ms\r\n",,terminal_output +11430,12773973,"TERMINAL",0,0,"Step 1436, loss: 1.8803507089614868, step time: 17.30060577392578ms\r\n",,terminal_output +11431,12774079,"TERMINAL",0,0,"Step 1437, loss: 1.9251006841659546, step time: 17.581939697265625ms\r\nStep 1438, loss: 1.8926255702972412, step time: 19.62137222290039ms\r\n",,terminal_output +11432,12774189,"TERMINAL",0,0,"Step 1439, loss: 1.8723598718643188, step time: 18.06950569152832ms\r\nStep 1440, loss: 1.9185781478881836, step time: 17.35854148864746ms\r\n",,terminal_output +11433,12774251,"TERMINAL",0,0,"Step 1441, loss: 1.8697339296340942, step time: 17.87257194519043ms\r\n",,terminal_output +11434,12774313,"TERMINAL",0,0,"Step 1442, loss: 1.8866652250289917, step time: 17.25029945373535ms\r\n",,terminal_output +11435,12774439,"TERMINAL",0,0,"Step 1443, loss: 1.8616561889648438, step time: 17.41337776184082ms\r\nStep 1444, loss: 1.9842814207077026, step time: 17.598628997802734ms\r\n",,terminal_output +11436,12774538,"TERMINAL",0,0,"Step 1445, loss: 1.9736907482147217, step time: 17.75646209716797ms\r\n",,terminal_output +11437,12774589,"TERMINAL",0,0,"Step 1446, loss: 1.8908346891403198, step time: 17.31395721435547ms\r\n",,terminal_output +11438,12774640,"TERMINAL",0,0,"Step 1447, loss: 1.9154919385910034, step time: 17.94576644897461ms\r\n",,terminal_output +11439,12774786,"TERMINAL",0,0,"Step 1448, loss: 1.882267713546753, step time: 17.297744750976562ms\r\nStep 1449, loss: 1.8828563690185547, step time: 17.595291137695312ms\r\n",,terminal_output +11440,12774877,"TERMINAL",0,0,"Step 1450, loss: 1.8958110809326172, step time: 17.928361892700195ms\r\nStep 1451, loss: 1.8752732276916504, step time: 17.514944076538086ms\r\n",,terminal_output +11441,12774968,"TERMINAL",0,0,"Step 1452, loss: 1.9273420572280884, step time: 17.34447479248047ms\r\n",,terminal_output +11442,12775073,"TERMINAL",0,0,"Step 1453, loss: 1.8740975856781006, step time: 18.039226531982422ms\r\nStep 1454, loss: 1.8662359714508057, step time: 17.369985580444336ms\r\n",,terminal_output +11443,12775192,"TERMINAL",0,0,"Step 1455, loss: 1.9238605499267578, step time: 19.162416458129883ms\r\nStep 1456, loss: 1.8705296516418457, step time: 18.4023380279541ms\r\n",,terminal_output +11444,12775255,"TERMINAL",0,0,"Step 1457, loss: 1.8670865297317505, step time: 17.84992218017578ms\r\n",,terminal_output +11445,12775319,"TERMINAL",0,0,"Step 1458, loss: 1.8755383491516113, step time: 17.44866371154785ms\r\n",,terminal_output +11446,12775380,"TERMINAL",0,0,"Step 1459, loss: 1.8585599660873413, step time: 18.1887149810791ms\r\n",,terminal_output +11447,12775444,"TERMINAL",0,0,"Step 1460, loss: 1.8600834608078003, step time: 17.527341842651367ms\r\n",,terminal_output +11448,12775547,"TERMINAL",0,0,"Step 1461, loss: 1.889396071434021, step time: 18.637895584106445ms\r\n",,terminal_output +11449,12775850,"TERMINAL",0,0,"Step 1462, loss: 1.8487199544906616, step time: 336.43364906311035ms\r\n",,terminal_output +11450,12775977,"TERMINAL",0,0,"Step 1463, loss: 1.836112380027771, step time: 25.32649040222168ms\r\nStep 1464, loss: 1.9216505289077759, step time: 19.997358322143555ms\r\n",,terminal_output +11451,12776103,"TERMINAL",0,0,"Step 1465, loss: 1.852829933166504, step time: 19.104957580566406ms\r\nStep 1466, loss: 1.8564597368240356, step time: 18.09215545654297ms\r\n",,terminal_output +11452,12776167,"TERMINAL",0,0,"Step 1467, loss: 1.8715124130249023, step time: 17.912864685058594ms\r\n",,terminal_output +11453,12776233,"TERMINAL",0,0,"Step 1468, loss: 1.9841768741607666, step time: 18.023967742919922ms\r\n",,terminal_output +11454,12776295,"TERMINAL",0,0,"Step 1469, loss: 1.8187603950500488, step time: 17.764568328857422ms\r\n",,terminal_output +11455,12776359,"TERMINAL",0,0,"Step 1470, loss: 1.833369255065918, step time: 17.400503158569336ms\r\n",,terminal_output +11456,12776419,"TERMINAL",0,0,"Step 1471, loss: 1.8449615240097046, step time: 17.976760864257812ms\r\n",,terminal_output +11457,12776529,"TERMINAL",0,0,"Step 1472, loss: 1.9354532957077026, step time: 33.158063888549805ms\r\n",,terminal_output +11458,12776581,"TERMINAL",0,0,"Step 1473, loss: 2.0279219150543213, step time: 17.870664596557617ms\r\n",,terminal_output +11459,12776687,"TERMINAL",0,0,"Step 1474, loss: 1.8246991634368896, step time: 17.725229263305664ms\r\nStep 1475, loss: 2.1454899311065674, step time: 17.553091049194336ms\r\n",,terminal_output +11460,12776806,"TERMINAL",0,0,"Step 1476, loss: 1.8291000127792358, step time: 17.31085777282715ms\r\nStep 1477, loss: 1.9083023071289062, step time: 18.446922302246094ms\r\n",,terminal_output +11461,12776869,"TERMINAL",0,0,"Step 1478, loss: 1.849157452583313, step time: 18.623828887939453ms\r\n",,terminal_output +11462,12776936,"TERMINAL",0,0,"Step 1479, loss: 1.8364115953445435, step time: 17.919063568115234ms\r\n",,terminal_output +11463,12777025,"TERMINAL",0,0,"Step 1480, loss: 1.897942304611206, step time: 18.08023452758789ms\r\n",,terminal_output +11464,12777076,"TERMINAL",0,0,"Step 1481, loss: 2.25258469581604, step time: 17.772197723388672ms\r\n",,terminal_output +11465,12777181,"TERMINAL",0,0,"Step 1482, loss: 1.874877691268921, step time: 17.494916915893555ms\r\nStep 1483, loss: 1.8588203191757202, step time: 18.268585205078125ms\r\n",,terminal_output +11466,12777243,"TERMINAL",0,0,"Step 1484, loss: 1.830420732498169, step time: 17.707347869873047ms\r\n",,terminal_output +11467,12777305,"TERMINAL",0,0,"Step 1485, loss: 1.8295972347259521, step time: 17.65155792236328ms\r\n",,terminal_output +11468,12777401,"TERMINAL",0,0,"Step 1486, loss: 1.8465015888214111, step time: 17.822980880737305ms\r\n",,terminal_output +11469,12777452,"TERMINAL",0,0,"Step 1487, loss: 1.9483507871627808, step time: 17.561912536621094ms\r\n",,terminal_output +11470,12777632,"TERMINAL",0,0,"Step 1488, loss: 1.8252156972885132, step time: 17.447233200073242ms\r\nStep 1489, loss: 1.8159074783325195, step time: 18.158674240112305ms\r\n",,terminal_output +11471,12777710,"TERMINAL",0,0,"Step 1490, loss: 1.8393936157226562, step time: 17.217397689819336ms\r\nStep 1491, loss: 1.931267261505127, step time: 17.583131790161133ms\r\n",,terminal_output +11472,12777773,"TERMINAL",0,0,"Step 1492, loss: 1.819352149963379, step time: 17.62700080871582ms\r\n",,terminal_output +11473,12777869,"TERMINAL",0,0,"Step 1493, loss: 1.8258095979690552, step time: 17.594337463378906ms\r\nStep 1494, loss: 1.8213785886764526, step time: 17.392873764038086ms\r\n",,terminal_output +11474,12777965,"TERMINAL",0,0,"Step 1495, loss: 1.8197394609451294, step time: 17.92430877685547ms\r\n",,terminal_output +11475,12778028,"TERMINAL",0,0,"Step 1496, loss: 1.8115042448043823, step time: 17.22860336303711ms\r\n",,terminal_output +11476,12778091,"TERMINAL",0,0,"Step 1497, loss: 1.8433958292007446, step time: 17.445802688598633ms\r\n",,terminal_output +11477,12778153,"TERMINAL",0,0,"Step 1498, loss: 2.2627832889556885, step time: 17.81177520751953ms\r\n",,terminal_output +11478,12778247,"TERMINAL",0,0,"Step 1499, loss: 1.8592604398727417, step time: 17.43149757385254ms\r\n",,terminal_output +11479,12780776,"TERMINAL",0,0,"Step 1500, loss: 2.022221326828003, step time: 29.61277961730957ms\r\nStep 1501, loss: 2.3967971801757812, step time: 25.210857391357422ms\r\n",,terminal_output +11480,12780909,"TERMINAL",0,0,"Step 1502, loss: 1.8196204900741577, step time: 20.645618438720703ms\r\nStep 1503, loss: 1.7958781719207764, step time: 20.289897918701172ms\r\n",,terminal_output +11481,12780972,"TERMINAL",0,0,"Step 1504, loss: 1.8300373554229736, step time: 19.12999153137207ms\r\n",,terminal_output +11482,12781278,"TERMINAL",0,0,"Step 1505, loss: 1.8134151697158813, step time: 296.1993217468262ms\r\n",,terminal_output +11483,12781351,"TERMINAL",0,0,"Step 1506, loss: 1.8245354890823364, step time: 25.83456039428711ms\r\n",,terminal_output +11484,12781424,"TERMINAL",0,0,"Step 1507, loss: 1.8135560750961304, step time: 21.114349365234375ms\r\n",,terminal_output +11485,12781481,"TERMINAL",0,0,"Step 1508, loss: 2.0946764945983887, step time: 18.610715866088867ms\r\n",,terminal_output +11486,12781625,"TERMINAL",0,0,"Step 1509, loss: 1.8123186826705933, step time: 18.25714111328125ms\r\nStep 1510, loss: 1.8398181200027466, step time: 17.5478458404541ms\r\n",,terminal_output +11487,12781676,"TERMINAL",0,0,"Step 1511, loss: 1.814285397529602, step time: 19.84095573425293ms\r\n",,terminal_output +11488,12781772,"TERMINAL",0,0,"Step 1512, loss: 1.8264034986495972, step time: 18.651723861694336ms\r\n",,terminal_output +11489,12781855,"TERMINAL",0,0,"Step 1513, loss: 1.8317933082580566, step time: 18.058061599731445ms\r\nStep 1514, loss: 1.7916343212127686, step time: 17.803430557250977ms\r\n",,terminal_output +11490,12781906,"TERMINAL",0,0,"Step 1515, loss: 1.8225440979003906, step time: 17.84682273864746ms\r\n",,terminal_output +11491,12782015,"TERMINAL",0,0,"Step 1516, loss: 1.804258108139038, step time: 18.795251846313477ms\r\n",,terminal_output +11492,12782074,"TERMINAL",0,0,"Step 1517, loss: 1.8071331977844238, step time: 19.597291946411133ms\r\n",,terminal_output +11493,12782141,"TERMINAL",0,0,"Step 1518, loss: 1.888279676437378, step time: 18.88871192932129ms\r\n",,terminal_output +11494,12782202,"TERMINAL",0,0,"Step 1519, loss: 1.9369657039642334, step time: 19.066810607910156ms\r\n",,terminal_output +11495,12782307,"TERMINAL",0,0,"Step 1520, loss: 1.7943257093429565, step time: 19.031286239624023ms\r\nStep 1521, loss: 1.8083186149597168, step time: 18.969297409057617ms\r\n",,terminal_output +11496,12782430,"TERMINAL",0,0,"Step 1522, loss: 1.7984142303466797, step time: 18.62359046936035ms\r\nStep 1523, loss: 1.790100336074829, step time: 18.814802169799805ms\r\n",,terminal_output +11497,12782484,"TERMINAL",0,0,"Step 1524, loss: 1.7896850109100342, step time: 18.583059310913086ms\r\n",,terminal_output +11498,12782583,"TERMINAL",0,0,"Step 1525, loss: 1.915489912033081, step time: 17.589330673217773ms\r\n",,terminal_output +11499,12782634,"TERMINAL",0,0,"Step 1526, loss: 1.8545145988464355, step time: 19.030332565307617ms\r\n",,terminal_output +11500,12782741,"TERMINAL",0,0,"Step 1527, loss: 1.7896367311477661, step time: 18.743038177490234ms\r\nStep 1528, loss: 1.8037703037261963, step time: 17.46368408203125ms\r\n",,terminal_output +11501,12782866,"TERMINAL",0,0,"Step 1529, loss: 1.7905298471450806, step time: 17.935752868652344ms\r\nStep 1530, loss: 1.851035237312317, step time: 17.23766326904297ms\r\n",,terminal_output +11502,12782937,"TERMINAL",0,0,"Step 1531, loss: 1.9429796934127808, step time: 17.59648323059082ms\r\n",,terminal_output +11503,12782997,"TERMINAL",0,0,"Step 1532, loss: 1.878017544746399, step time: 17.458438873291016ms\r\n",,terminal_output +11504,12783116,"TERMINAL",0,0,"Step 1533, loss: 1.7883472442626953, step time: 17.43340492248535ms\r\nStep 1534, loss: 1.801914930343628, step time: 17.070770263671875ms\r\n",,terminal_output +11505,12783179,"TERMINAL",0,0,"Step 1535, loss: 1.827498435974121, step time: 19.387006759643555ms\r\n",,terminal_output +11506,12783274,"TERMINAL",0,0,"Step 1536, loss: 1.7752856016159058, step time: 18.742799758911133ms\r\n",,terminal_output +11507,12783326,"TERMINAL",0,0,"Step 1537, loss: 1.8632783889770508, step time: 18.983125686645508ms\r\n",,terminal_output +11508,12783433,"TERMINAL",0,0,"Step 1538, loss: 1.8248780965805054, step time: 19.01555061340332ms\r\nStep 1539, loss: 1.7808300256729126, step time: 17.787694931030273ms\r\n",,terminal_output +11509,12783537,"TERMINAL",0,0,"Step 1540, loss: 1.80280601978302, step time: 17.308950424194336ms\r\n",,terminal_output +11510,12783600,"TERMINAL",0,0,"Step 1541, loss: 1.782529354095459, step time: 17.94886589050293ms\r\n",,terminal_output +11511,12783685,"TERMINAL",0,0,"Step 1542, loss: 1.964821457862854, step time: 17.54021644592285ms\r\nStep 1543, loss: 1.8079134225845337, step time: 18.02372932434082ms\r\n",,terminal_output +11512,12783780,"TERMINAL",0,0,"Step 1544, loss: 2.3567259311676025, step time: 17.636537551879883ms\r\n",,terminal_output +11513,12783890,"TERMINAL",0,0,"Step 1545, loss: 1.873538613319397, step time: 17.579078674316406ms\r\nStep 1546, loss: 1.785962700843811, step time: 17.221689224243164ms\r\n",,terminal_output +11514,12783960,"TERMINAL",0,0,"Step 1547, loss: 1.7588059902191162, step time: 18.045902252197266ms\r\n",,terminal_output +11515,12784014,"TERMINAL",0,0,"Step 1548, loss: 1.7776726484298706, step time: 17.342567443847656ms\r\n",,terminal_output +11516,12784122,"TERMINAL",0,0,"Step 1549, loss: 1.8031420707702637, step time: 17.928361892700195ms\r\nStep 1550, loss: 1.868054747581482, step time: 17.52305030822754ms\r\n",,terminal_output +11517,12784185,"TERMINAL",0,0,"Step 1551, loss: 1.7851258516311646, step time: 17.586469650268555ms\r\n",,terminal_output +11518,12784252,"TERMINAL",0,0,"Step 1552, loss: 1.778322696685791, step time: 17.279624938964844ms\r\n",,terminal_output +11519,12784320,"TERMINAL",0,0,"Step 1553, loss: 1.778220772743225, step time: 18.071651458740234ms\r\n",,terminal_output +11520,12784381,"TERMINAL",0,0,"Step 1554, loss: 1.7775425910949707, step time: 17.276525497436523ms\r\n",,terminal_output +11521,12784450,"TERMINAL",0,0,"Step 1555, loss: 1.749221920967102, step time: 17.62104034423828ms\r\n",,terminal_output +11522,12784523,"TERMINAL",0,0,"Step 1556, loss: 2.280580520629883, step time: 17.816543579101562ms\r\n",,terminal_output +11523,12784593,"TERMINAL",0,0,"Step 1557, loss: 1.8372817039489746, step time: 17.618894577026367ms\r\n",,terminal_output +11524,12784703,"TERMINAL",0,0,"Step 1558, loss: 1.7733311653137207, step time: 17.241239547729492ms\r\nStep 1559, loss: 1.7948698997497559, step time: 18.111467361450195ms\r\n",,terminal_output +11525,12784773,"TERMINAL",0,0,"Step 1560, loss: 1.7665785551071167, step time: 17.231464385986328ms\r\n",,terminal_output +11526,12784887,"TERMINAL",0,0,"Step 1561, loss: 1.7859458923339844, step time: 17.726898193359375ms\r\nStep 1562, loss: 2.2706594467163086, step time: 17.61341094970703ms\r\n",,terminal_output +11527,12785001,"TERMINAL",0,0,"Step 1563, loss: 1.7983052730560303, step time: 17.661571502685547ms\r\nStep 1564, loss: 2.2930095195770264, step time: 17.336368560791016ms\r\n",,terminal_output +11528,12785071,"TERMINAL",0,0,"Step 1565, loss: 1.7687898874282837, step time: 17.81630516052246ms\r\n",,terminal_output +11529,12785140,"TERMINAL",0,0,"Step 1566, loss: 1.7787816524505615, step time: 17.316102981567383ms\r\n",,terminal_output +11530,12785202,"TERMINAL",0,0,"Step 1567, loss: 1.8415805101394653, step time: 19.55556869506836ms\r\n",,terminal_output +11531,12785264,"TERMINAL",0,0,"Step 1568, loss: 1.907413125038147, step time: 18.03421974182129ms\r\n",,terminal_output +11532,12785330,"TERMINAL",0,0,"Step 1569, loss: 1.8582102060317993, step time: 17.796754837036133ms\r\n",,terminal_output +11533,12785393,"TERMINAL",0,0,"Step 1570, loss: 1.7824275493621826, step time: 17.367124557495117ms\r\n",,terminal_output +11534,12785462,"TERMINAL",0,0,"Step 1571, loss: 1.7854208946228027, step time: 18.03278923034668ms\r\n",,terminal_output +11535,12785532,"TERMINAL",0,0,"Step 1572, loss: 2.0825273990631104, step time: 17.20428466796875ms\r\n",,terminal_output +11536,12785596,"TERMINAL",0,0,"Step 1573, loss: 1.7758183479309082, step time: 17.650127410888672ms\r\n",,terminal_output +11537,12785666,"TERMINAL",0,0,"Step 1574, loss: 1.7647989988327026, step time: 17.563343048095703ms\r\n",,terminal_output +11538,12785718,"TERMINAL",0,0,"Step 1575, loss: 1.7654832601547241, step time: 17.470598220825195ms\r\n",,terminal_output +11539,12785772,"TERMINAL",0,0,"Step 1576, loss: 1.7841070890426636, step time: 17.351388931274414ms\r\n",,terminal_output +11540,12785895,"TERMINAL",0,0,"Step 1577, loss: 1.8264416456222534, step time: 18.108129501342773ms\r\nStep 1578, loss: 1.778178095817566, step time: 17.377376556396484ms\r\n",,terminal_output +11541,12785966,"TERMINAL",0,0,"Step 1579, loss: 1.8201334476470947, step time: 17.645835876464844ms\r\n",,terminal_output +11542,12786025,"TERMINAL",0,0,"Step 1580, loss: 1.893143653869629, step time: 17.624855041503906ms\r\n",,terminal_output +11543,12786095,"TERMINAL",0,0,"Step 1581, loss: 1.7666107416152954, step time: 17.680883407592773ms\r\n",,terminal_output +11544,12786160,"TERMINAL",0,0,"Step 1582, loss: 1.74834406375885, step time: 17.283201217651367ms\r\n",,terminal_output +11545,12786229,"TERMINAL",0,0,"Step 1583, loss: 1.7852833271026611, step time: 17.779111862182617ms\r\n",,terminal_output +11546,12786288,"TERMINAL",0,0,"Step 1584, loss: 1.760607361793518, step time: 17.29416847229004ms\r\n",,terminal_output +11547,12786349,"TERMINAL",0,0,"Step 1585, loss: 1.914902687072754, step time: 17.51565933227539ms\r\n",,terminal_output +11548,12786419,"TERMINAL",0,0,"Step 1586, loss: 1.748259425163269, step time: 17.659902572631836ms\r\n",,terminal_output +11549,12786543,"TERMINAL",0,0,"Step 1587, loss: 1.8011753559112549, step time: 17.50349998474121ms\r\nStep 1588, loss: 1.7733807563781738, step time: 17.3337459564209ms\r\n",,terminal_output +11550,12786728,"TERMINAL",0,0,"Step 1589, loss: 1.7412081956863403, step time: 17.75383949279785ms\r\nStep 1590, loss: 1.8068279027938843, step time: 17.160892486572266ms\r\nStep 1591, loss: 1.7939765453338623, step time: 17.447948455810547ms\r\n",,terminal_output +11551,12786786,"TERMINAL",0,0,"Step 1592, loss: 1.7533468008041382, step time: 17.40574836730957ms\r\n",,terminal_output +11552,12786921,"TERMINAL",0,0,"Step 1593, loss: 1.756459355354309, step time: 18.524646759033203ms\r\nStep 1594, loss: 1.87770676612854, step time: 18.126487731933594ms\r\n",,terminal_output +11553,12786974,"TERMINAL",0,0,"Step 1595, loss: 1.7386103868484497, step time: 17.97962188720703ms\r\n",,terminal_output +11554,12787101,"TERMINAL",0,0,"Step 1596, loss: 1.7382290363311768, step time: 17.20285415649414ms\r\nStep 1597, loss: 1.743029236793518, step time: 17.28057861328125ms\r\n",,terminal_output +11555,12787233,"TERMINAL",0,0,"Step 1598, loss: 1.7288745641708374, step time: 17.49134063720703ms\r\nStep 1599, loss: 1.8613113164901733, step time: 17.28677749633789ms\r\n",,terminal_output +11556,12787290,"TERMINAL",0,0,"Step 1600, loss: 1.7137507200241089, step time: 17.125606536865234ms\r\n",,terminal_output +11557,12787339,"TERMINAL",0,0,"Step 1601, loss: 1.806563377380371, step time: 17.574548721313477ms\r\n",,terminal_output +11558,12787447,"TERMINAL",0,0,"Step 1602, loss: 1.7469385862350464, step time: 17.20142364501953ms\r\nStep 1603, loss: 1.7449456453323364, step time: 17.28534698486328ms\r\n",,terminal_output +11559,12787554,"TERMINAL",0,0,"Step 1604, loss: 1.7352100610733032, step time: 17.346858978271484ms\r\n",,terminal_output +11560,12787653,"TERMINAL",0,0,"Step 1605, loss: 1.7301663160324097, step time: 17.475128173828125ms\r\nStep 1606, loss: 1.8792431354522705, step time: 17.241716384887695ms\r\n",,terminal_output +11561,12787706,"TERMINAL",0,0,"Step 1607, loss: 1.723662257194519, step time: 18.077611923217773ms\r\n",,terminal_output +11562,12787812,"TERMINAL",0,0,"Step 1608, loss: 1.885951042175293, step time: 17.23194122314453ms\r\nStep 1609, loss: 1.7336022853851318, step time: 17.440319061279297ms\r\n",,terminal_output +11563,12787880,"TERMINAL",0,0,"Step 1610, loss: 1.781610369682312, step time: 17.810821533203125ms\r\n",,terminal_output +11564,12787946,"TERMINAL",0,0,"Step 1611, loss: 1.7929376363754272, step time: 17.5478458404541ms\r\n",,terminal_output +11565,12788010,"TERMINAL",0,0,"Step 1612, loss: 1.7288016080856323, step time: 17.27581024169922ms\r\n",,terminal_output +11566,12788126,"TERMINAL",0,0,"Step 1613, loss: 1.7137930393218994, step time: 17.81487464904785ms\r\nStep 1614, loss: 1.7208963632583618, step time: 17.207860946655273ms\r\n",,terminal_output +11567,12788218,"TERMINAL",0,0,"Step 1615, loss: 1.7149064540863037, step time: 17.419815063476562ms\r\n",,terminal_output +11568,12788284,"TERMINAL",0,0,"Step 1616, loss: 1.8363068103790283, step time: 19.153356552124023ms\r\n",,terminal_output +11569,12788335,"TERMINAL",0,0,"Step 1617, loss: 1.9212604761123657, step time: 17.495393753051758ms\r\n",,terminal_output +11570,12788464,"TERMINAL",0,0,"Step 1618, loss: 2.027085542678833, step time: 17.207860946655273ms\r\nStep 1619, loss: 1.9628870487213135, step time: 17.771005630493164ms\r\n",,terminal_output +11571,12788597,"TERMINAL",0,0,"Step 1620, loss: 1.8079754114151, step time: 17.217159271240234ms\r\nStep 1621, loss: 1.7312251329421997, step time: 17.494678497314453ms\r\n",,terminal_output +11572,12788688,"TERMINAL",0,0,"Step 1622, loss: 1.8246808052062988, step time: 17.568588256835938ms\r\nStep 1623, loss: 1.7298784255981445, step time: 17.351150512695312ms\r\n",,terminal_output +11573,12788788,"TERMINAL",0,0,"Step 1624, loss: 1.7219116687774658, step time: 17.219066619873047ms\r\n",,terminal_output +11574,12788900,"TERMINAL",0,0,"Step 1625, loss: 1.7407262325286865, step time: 17.767667770385742ms\r\nStep 1626, loss: 1.761559009552002, step time: 18.39423179626465ms\r\n",,terminal_output +11575,12789012,"TERMINAL",0,0,"Step 1627, loss: 1.7195154428482056, step time: 17.635822296142578ms\r\nStep 1628, loss: 1.7083073854446411, step time: 17.647743225097656ms\r\n",,terminal_output +11576,12789138,"TERMINAL",0,0,"Step 1629, loss: 1.9913159608840942, step time: 17.498254776000977ms\r\n",,terminal_output +11577,12789195,"TERMINAL",0,0,"Step 1630, loss: 1.738339900970459, step time: 17.50659942626953ms\r\nStep 1631, loss: 1.8853553533554077, step time: 17.90452003479004ms\r\n",,terminal_output +11578,12789293,"TERMINAL",0,0,"Step 1632, loss: 1.73467218875885, step time: 17.400026321411133ms\r\n",,terminal_output +11579,12789402,"TERMINAL",0,0,"Step 1633, loss: 1.7187141180038452, step time: 17.719268798828125ms\r\nStep 1634, loss: 1.7349430322647095, step time: 17.64678955078125ms\r\n",,terminal_output +11580,12789469,"TERMINAL",0,0,"Step 1635, loss: 1.7269736528396606, step time: 17.406463623046875ms\r\n",,terminal_output +11581,12789534,"TERMINAL",0,0,"Step 1636, loss: 1.7115428447723389, step time: 17.32349395751953ms\r\n",,terminal_output +11582,12789642,"TERMINAL",0,0,"Step 1637, loss: 1.7234156131744385, step time: 17.889022827148438ms\r\nStep 1638, loss: 1.7482094764709473, step time: 17.36903190612793ms\r\n",,terminal_output +11583,12789706,"TERMINAL",0,0,"Step 1639, loss: 1.7093738317489624, step time: 17.446517944335938ms\r\n",,terminal_output +11584,12789765,"TERMINAL",0,0,"Step 1640, loss: 1.7055771350860596, step time: 17.64369010925293ms\r\n",,terminal_output +11585,12789885,"TERMINAL",0,0,"Step 1641, loss: 1.9798378944396973, step time: 17.401695251464844ms\r\nStep 1642, loss: 1.7053767442703247, step time: 17.356395721435547ms\r\n",,terminal_output +11586,12790012,"TERMINAL",0,0,"Step 1643, loss: 1.6953864097595215, step time: 17.859458923339844ms\r\nStep 1644, loss: 1.6722973585128784, step time: 17.396926879882812ms\r\n",,terminal_output +11587,12790098,"TERMINAL",0,0,"Step 1645, loss: 1.7717629671096802, step time: 17.53687858581543ms\r\n",,terminal_output +11588,12790156,"TERMINAL",0,0,"Step 1646, loss: 1.6881335973739624, step time: 17.632007598876953ms\r\n",,terminal_output +11589,12790266,"TERMINAL",0,0,"Step 1647, loss: 1.7019586563110352, step time: 18.890380859375ms\r\nStep 1648, loss: 1.6881763935089111, step time: 17.78101921081543ms\r\n",,terminal_output +11590,12790335,"TERMINAL",0,0,"Step 1649, loss: 1.7764745950698853, step time: 17.879247665405273ms\r\n",,terminal_output +11591,12790395,"TERMINAL",0,0,"Step 1650, loss: 1.685001015663147, step time: 17.623424530029297ms\r\n",,terminal_output +11592,12790456,"TERMINAL",0,0,"Step 1651, loss: 1.7316031455993652, step time: 17.511367797851562ms\r\n",,terminal_output +11593,12790532,"TERMINAL",0,0,"Step 1652, loss: 1.6903363466262817, step time: 17.587661743164062ms\r\n",,terminal_output +11594,12790588,"TERMINAL",0,0,"Step 1653, loss: 1.788646936416626, step time: 17.549753189086914ms\r\n",,terminal_output +11595,12790648,"TERMINAL",0,0,"Step 1654, loss: 1.6994696855545044, step time: 17.261981964111328ms\r\n",,terminal_output +11596,12790724,"TERMINAL",0,0,"Step 1655, loss: 1.682896614074707, step time: 18.001079559326172ms\r\n",,terminal_output +11597,12790812,"TERMINAL",0,0,"Step 1656, loss: 1.6975783109664917, step time: 17.41194725036621ms\r\n",,terminal_output +11598,12790897,"TERMINAL",0,0,"Step 1657, loss: 1.6645392179489136, step time: 17.536163330078125ms\r\nStep 1658, loss: 1.6982916593551636, step time: 17.64845848083496ms\r\n",,terminal_output +11599,12791022,"TERMINAL",0,0,"Step 1659, loss: 1.679592251777649, step time: 17.466068267822266ms\r\nStep 1660, loss: 2.0635738372802734, step time: 17.397403717041016ms\r\n",,terminal_output +11600,12791120,"TERMINAL",0,0,"Step 1661, loss: 1.7669057846069336, step time: 18.125295639038086ms\r\n",,terminal_output +11601,12791170,"TERMINAL",0,0,"Step 1662, loss: 1.696531057357788, step time: 17.432451248168945ms\r\n",,terminal_output +11602,12791220,"TERMINAL",0,0,"Step 1663, loss: 1.7522461414337158, step time: 17.545223236083984ms\r\n",,terminal_output +11603,12791580,"TERMINAL",0,0,"Step 1664, loss: 1.7000333070755005, step time: 297.0271110534668ms\r\nStep 1665, loss: 1.7703872919082642, step time: 25.36153793334961ms\r\n",,terminal_output +11604,12791660,"TERMINAL",0,0,"Step 1666, loss: 1.6617718935012817, step time: 19.921064376831055ms\r\n",,terminal_output +11605,12791748,"TERMINAL",0,0,"Step 1667, loss: 1.6963802576065063, step time: 18.72730255126953ms\r\n",,terminal_output +11606,12791810,"TERMINAL",0,0,"Step 1668, loss: 1.6667150259017944, step time: 17.777442932128906ms\r\n",,terminal_output +11607,12791984,"TERMINAL",0,0,"Step 1669, loss: 1.6682814359664917, step time: 17.66681671142578ms\r\nStep 1670, loss: 1.6513525247573853, step time: 17.97318458557129ms\r\nStep 1671, loss: 1.6657230854034424, step time: 17.74120330810547ms\r\n",,terminal_output +11608,12792065,"TERMINAL",0,0,"Step 1672, loss: 1.6650418043136597, step time: 17.63129234313965ms\r\n",,terminal_output +11609,12792110,"TERMINAL",0,0,"Step 1673, loss: 1.686801552772522, step time: 18.129825592041016ms\r\n",,terminal_output +11610,12792195,"TERMINAL",0,0,"Step 1674, loss: 1.6832185983657837, step time: 17.435073852539062ms\r\n",,terminal_output +11611,12792244,"TERMINAL",0,0,"Step 1675, loss: 1.7114351987838745, step time: 17.614364624023438ms\r\n",,terminal_output +11612,12792351,"TERMINAL",0,0,"Step 1676, loss: 1.6665188074111938, step time: 17.821073532104492ms\r\nStep 1677, loss: 1.7231543064117432, step time: 17.53520965576172ms\r\n",,terminal_output +11613,12792413,"TERMINAL",0,0,"Step 1678, loss: 1.6477807760238647, step time: 17.508506774902344ms\r\n",,terminal_output +11614,12792476,"TERMINAL",0,0,"Step 1679, loss: 1.6962076425552368, step time: 17.95792579650879ms\r\n",,terminal_output +11615,12792534,"TERMINAL",0,0,"Step 1680, loss: 1.660443902015686, step time: 17.515897750854492ms\r\n",,terminal_output +11616,12792596,"TERMINAL",0,0,"Step 1681, loss: 1.758439064025879, step time: 17.5931453704834ms\r\n",,terminal_output +11617,12792703,"TERMINAL",0,0,"Step 1682, loss: 1.681945562362671, step time: 17.811059951782227ms\r\nStep 1683, loss: 1.6513904333114624, step time: 17.52185821533203ms\r\n",,terminal_output +11618,12792796,"TERMINAL",0,0,"Step 1684, loss: 1.6754738092422485, step time: 17.304182052612305ms\r\n",,terminal_output +11619,12792895,"TERMINAL",0,0,"Step 1685, loss: 1.649022102355957, step time: 17.948627471923828ms\r\nStep 1686, loss: 1.6924504041671753, step time: 17.543554306030273ms\r\n",,terminal_output +11620,12792990,"TERMINAL",0,0,"Step 1687, loss: 1.6452131271362305, step time: 17.811059951782227ms\r\n",,terminal_output +11621,12793098,"TERMINAL",0,0,"Step 1688, loss: 1.6764336824417114, step time: 17.98844337463379ms\r\nStep 1689, loss: 1.6393688917160034, step time: 17.59028434753418ms\r\n",,terminal_output +11622,12793160,"TERMINAL",0,0,"Step 1690, loss: 1.746119737625122, step time: 17.311573028564453ms\r\n",,terminal_output +11623,12793234,"TERMINAL",0,0,"Step 1691, loss: 1.645434021949768, step time: 17.74001121520996ms\r\n",,terminal_output +11624,12793285,"TERMINAL",0,0,"Step 1692, loss: 1.651488184928894, step time: 17.235994338989258ms\r\n",,terminal_output +11625,12793347,"TERMINAL",0,0,"Step 1693, loss: 1.6787996292114258, step time: 17.3032283782959ms\r\n",,terminal_output +11626,12793421,"TERMINAL",0,0,"Step 1694, loss: 1.6250563859939575, step time: 17.70472526550293ms\r\n",,terminal_output +11627,12793480,"TERMINAL",0,0,"Step 1695, loss: 1.6629738807678223, step time: 17.377853393554688ms\r\n",,terminal_output +11628,12793593,"TERMINAL",0,0,"Step 1696, loss: 1.6451914310455322, step time: 17.103195190429688ms\r\nStep 1697, loss: 1.670711874961853, step time: 17.7304744720459ms\r\n",,terminal_output +11629,12793650,"TERMINAL",0,0,"Step 1698, loss: 1.6359533071517944, step time: 17.2121524810791ms\r\n",,terminal_output +11630,12793702,"TERMINAL",0,0,"Step 1699, loss: 1.7180402278900146, step time: 17.278432846069336ms\r\n",,terminal_output +11631,12793798,"TERMINAL",0,0,"Step 1700, loss: 1.651125192642212, step time: 17.5173282623291ms\r\n",,terminal_output +11632,12793907,"TERMINAL",0,0,"Step 1701, loss: 1.6350868940353394, step time: 18.38827133178711ms\r\nStep 1702, loss: 1.6477445363998413, step time: 17.40574836730957ms\r\n",,terminal_output +11633,12793971,"TERMINAL",0,0,"Step 1703, loss: 1.62379789352417, step time: 17.841339111328125ms\r\n",,terminal_output +11634,12794031,"TERMINAL",0,0,"Step 1704, loss: 1.6389888525009155, step time: 17.22884178161621ms\r\n",,terminal_output +11635,12794142,"TERMINAL",0,0,"Step 1705, loss: 1.7352099418640137, step time: 17.3337459564209ms\r\nStep 1706, loss: 1.6270155906677246, step time: 17.490625381469727ms\r\n",,terminal_output +11636,12794207,"TERMINAL",0,0,"Step 1707, loss: 1.6361380815505981, step time: 17.17662811279297ms\r\n",,terminal_output +11637,12794270,"TERMINAL",0,0,"Step 1708, loss: 1.6146297454833984, step time: 17.124652862548828ms\r\n",,terminal_output +11638,12794336,"TERMINAL",0,0,"Step 1709, loss: 1.6182353496551514, step time: 17.562150955200195ms\r\n",,terminal_output +11639,12794401,"TERMINAL",0,0,"Step 1710, loss: 1.631135106086731, step time: 17.165184020996094ms\r\n",,terminal_output +11640,12794465,"TERMINAL",0,0,"Step 1711, loss: 2.0898468494415283, step time: 17.248153686523438ms\r\n",,terminal_output +11641,12794529,"TERMINAL",0,0,"Step 1712, loss: 1.6337273120880127, step time: 17.507553100585938ms\r\n",,terminal_output +11642,12794592,"TERMINAL",0,0,"Step 1713, loss: 1.6224312782287598, step time: 17.359256744384766ms\r\n",,terminal_output +11643,12794657,"TERMINAL",0,0,"Step 1714, loss: 1.6452276706695557, step time: 17.23313331604004ms\r\n",,terminal_output +11644,12794722,"TERMINAL",0,0,"Step 1715, loss: 1.6452698707580566, step time: 17.722129821777344ms\r\n",,terminal_output +11645,12794783,"TERMINAL",0,0,"Step 1716, loss: 1.6113080978393555, step time: 17.011404037475586ms\r\n",,terminal_output +11646,12794846,"TERMINAL",0,0,"Step 1717, loss: 1.6239346265792847, step time: 17.246484756469727ms\r\n",,terminal_output +11647,12794905,"TERMINAL",0,0,"Step 1718, loss: 1.720353126525879, step time: 17.584562301635742ms\r\n",,terminal_output +11648,12794968,"TERMINAL",0,0,"Step 1719, loss: 1.782148838043213, step time: 17.182350158691406ms\r\n",,terminal_output +11649,12795031,"TERMINAL",0,0,"Step 1720, loss: 1.724379301071167, step time: 17.244577407836914ms\r\n",,terminal_output +11650,12795093,"TERMINAL",0,0,"Step 1721, loss: 1.6300526857376099, step time: 17.725229263305664ms\r\n",,terminal_output +11651,12795156,"TERMINAL",0,0,"Step 1722, loss: 1.636051893234253, step time: 17.1661376953125ms\r\n",,terminal_output +11652,12795219,"TERMINAL",0,0,"Step 1723, loss: 1.6657681465148926, step time: 18.923044204711914ms\r\n",,terminal_output +11653,12795283,"TERMINAL",0,0,"Step 1724, loss: 1.6331225633621216, step time: 19.25206184387207ms\r\n",,terminal_output +11654,12795375,"TERMINAL",0,0,"Step 1725, loss: 1.6078976392745972, step time: 19.675254821777344ms\r\n",,terminal_output +11655,12795431,"TERMINAL",0,0,"Step 1726, loss: 1.6214853525161743, step time: 17.3947811126709ms\r\n",,terminal_output +11656,12795537,"TERMINAL",0,0,"Step 1727, loss: 1.6471495628356934, step time: 17.795562744140625ms\r\nStep 1728, loss: 1.6802369356155396, step time: 17.266511917114258ms\r\n",,terminal_output +11657,12795600,"TERMINAL",0,0,"Step 1729, loss: 1.6414456367492676, step time: 17.470836639404297ms\r\n",,terminal_output +11658,12795705,"TERMINAL",0,0,"Step 1730, loss: 1.6350582838058472, step time: 17.63296127319336ms\r\n",,terminal_output +11659,12795756,"TERMINAL",0,0,"Step 1731, loss: 1.682080864906311, step time: 17.284393310546875ms\r\n",,terminal_output +11660,12795866,"TERMINAL",0,0,"Step 1732, loss: 1.6391431093215942, step time: 17.229080200195312ms\r\nStep 1733, loss: 1.95513117313385, step time: 17.62104034423828ms\r\n",,terminal_output +11661,12795934,"TERMINAL",0,0,"Step 1734, loss: 1.6953891515731812, step time: 17.23170280456543ms\r\n",,terminal_output +11662,12795997,"TERMINAL",0,0,"Step 1735, loss: 1.599429726600647, step time: 17.35973358154297ms\r\n",,terminal_output +11663,12796057,"TERMINAL",0,0,"Step 1736, loss: 1.607752799987793, step time: 17.560243606567383ms\r\n",,terminal_output +11664,12796121,"TERMINAL",0,0,"Step 1737, loss: 1.713295817375183, step time: 17.21358299255371ms\r\n",,terminal_output +11665,12796182,"TERMINAL",0,0,"Step 1738, loss: 1.6119917631149292, step time: 17.1511173248291ms\r\n",,terminal_output +11666,12796244,"TERMINAL",0,0,"Step 1739, loss: 1.6261039972305298, step time: 17.722368240356445ms\r\n",,terminal_output +11667,12796304,"TERMINAL",0,0,"Step 1740, loss: 1.6071858406066895, step time: 17.037153244018555ms\r\n",,terminal_output +11668,12796368,"TERMINAL",0,0,"Step 1741, loss: 1.629026174545288, step time: 17.279386520385742ms\r\n",,terminal_output +11669,12796429,"TERMINAL",0,0,"Step 1742, loss: 1.7553848028182983, step time: 17.470836639404297ms\r\n",,terminal_output +11670,12796493,"TERMINAL",0,0,"Step 1743, loss: 1.6513292789459229, step time: 17.178058624267578ms\r\n",,terminal_output +11671,12796559,"TERMINAL",0,0,"Step 1744, loss: 1.7917662858963013, step time: 17.138004302978516ms\r\n",,terminal_output +11672,12796623,"TERMINAL",0,0,"Step 1745, loss: 1.625593900680542, step time: 17.796993255615234ms\r\n",,terminal_output +11673,12796689,"TERMINAL",0,0,"Step 1746, loss: 1.627763271331787, step time: 17.367124557495117ms\r\n",,terminal_output +11674,12796748,"TERMINAL",0,0,"Step 1747, loss: 1.597595453262329, step time: 17.267942428588867ms\r\n",,terminal_output +11675,12796820,"TERMINAL",0,0,"Step 1748, loss: 1.6138371229171753, step time: 17.459630966186523ms\r\n",,terminal_output +11676,12796872,"TERMINAL",0,0,"Step 1749, loss: 1.617215871810913, step time: 19.497156143188477ms\r\n",,terminal_output +11677,12796966,"TERMINAL",0,0,"Step 1750, loss: 1.6345605850219727, step time: 17.63629913330078ms\r\n",,terminal_output +11678,12797019,"TERMINAL",0,0,"Step 1751, loss: 1.6439244747161865, step time: 17.819881439208984ms\r\n",,terminal_output +11679,12797069,"TERMINAL",0,0,"Step 1752, loss: 1.6267201900482178, step time: 17.27151870727539ms\r\n",,terminal_output +11680,12797172,"TERMINAL",0,0,"Step 1753, loss: 1.7068414688110352, step time: 17.284393310546875ms\r\nStep 1754, loss: 1.6107393503189087, step time: 17.614126205444336ms\r\n",,terminal_output +11681,12797265,"TERMINAL",0,0,"Step 1755, loss: 1.5887645483016968, step time: 17.484426498413086ms\r\n",,terminal_output +11682,12797318,"TERMINAL",0,0,"Step 1756, loss: 1.573825716972351, step time: 17.4100399017334ms\r\n",,terminal_output +11683,12797487,"TERMINAL",0,0,"Step 1757, loss: 1.647837519645691, step time: 17.798900604248047ms\r\nStep 1758, loss: 1.601921796798706, step time: 17.248153686523438ms\r\nStep 1759, loss: 1.611412525177002, step time: 17.443418502807617ms\r\n",,terminal_output +11684,12797554,"TERMINAL",0,0,"Step 1760, loss: 1.6648805141448975, step time: 17.781734466552734ms\r\n",,terminal_output +11685,12797617,"TERMINAL",0,0,"Step 1761, loss: 1.6146513223648071, step time: 17.419099807739258ms\r\n",,terminal_output +11686,12797678,"TERMINAL",0,0,"Step 1762, loss: 1.6162980794906616, step time: 17.422914505004883ms\r\n",,terminal_output +11687,12797740,"TERMINAL",0,0,"Step 1763, loss: 1.6083359718322754, step time: 17.72451400756836ms\r\n",,terminal_output +11688,12797804,"TERMINAL",0,0,"Step 1764, loss: 1.8396761417388916, step time: 17.31729507446289ms\r\n",,terminal_output +11689,12797866,"TERMINAL",0,0,"Step 1765, loss: 1.5933847427368164, step time: 17.676591873168945ms\r\n",,terminal_output +11690,12797990,"TERMINAL",0,0,"Step 1766, loss: 1.876869797706604, step time: 17.738819122314453ms\r\nStep 1767, loss: 1.712103247642517, step time: 17.504453659057617ms\r\n",,terminal_output +11691,12798054,"TERMINAL",0,0,"Step 1768, loss: 1.673413634300232, step time: 17.243623733520508ms\r\n",,terminal_output +11692,12798117,"TERMINAL",0,0,"Step 1769, loss: 1.586330771446228, step time: 17.828941345214844ms\r\n",,terminal_output +11693,12798186,"TERMINAL",0,0,"Step 1770, loss: 1.575774908065796, step time: 17.579317092895508ms\r\n",,terminal_output +11694,12798252,"TERMINAL",0,0,"Step 1771, loss: 1.64138662815094, step time: 17.486572265625ms\r\n",,terminal_output +11695,12798313,"TERMINAL",0,0,"Step 1772, loss: 1.644583821296692, step time: 17.661571502685547ms\r\n",,terminal_output +11696,12798376,"TERMINAL",0,0,"Step 1773, loss: 1.7080092430114746, step time: 17.45462417602539ms\r\n",,terminal_output +11697,12798469,"TERMINAL",0,0,"Step 1774, loss: 1.5994133949279785, step time: 17.208337783813477ms\r\n",,terminal_output +11698,12798520,"TERMINAL",0,0,"Step 1775, loss: 1.6670989990234375, step time: 17.812252044677734ms\r\n",,terminal_output +11699,12798631,"TERMINAL",0,0,"Step 1776, loss: 1.960821509361267, step time: 17.29559898376465ms\r\nStep 1777, loss: 1.6431938409805298, step time: 17.47417449951172ms\r\n",,terminal_output +11700,12798684,"TERMINAL",0,0,"Step 1778, loss: 1.5829119682312012, step time: 17.812728881835938ms\r\n",,terminal_output +11701,12798748,"TERMINAL",0,0,"Step 1779, loss: 1.5950664281845093, step time: 17.31276512145996ms\r\n",,terminal_output +11702,12798819,"TERMINAL",0,0,"Step 1780, loss: 1.6968841552734375, step time: 17.130374908447266ms\r\n",,terminal_output +11703,12798873,"TERMINAL",0,0,"Step 1781, loss: 1.6353116035461426, step time: 17.583370208740234ms\r\n",,terminal_output +11704,12798937,"TERMINAL",0,0,"Step 1782, loss: 1.5912948846817017, step time: 17.090559005737305ms\r\n",,terminal_output +11705,12798997,"TERMINAL",0,0,"Step 1783, loss: 1.5934146642684937, step time: 17.214059829711914ms\r\n",,terminal_output +11706,12799057,"TERMINAL",0,0,"Step 1784, loss: 1.588787317276001, step time: 17.478227615356445ms\r\n",,terminal_output +11707,12799119,"TERMINAL",0,0,"Step 1785, loss: 1.59375, step time: 17.420053482055664ms\r\n",,terminal_output +11708,12799215,"TERMINAL",0,0,"Step 1786, loss: 1.7270516157150269, step time: 17.309904098510742ms\r\n",,terminal_output +11709,12799267,"TERMINAL",0,0,"Step 1787, loss: 1.8391038179397583, step time: 18.064260482788086ms\r\n",,terminal_output +11710,12799373,"TERMINAL",0,0,"Step 1788, loss: 1.6062763929367065, step time: 17.342805862426758ms\r\nStep 1789, loss: 1.625484585762024, step time: 17.424583435058594ms\r\n",,terminal_output +11711,12799437,"TERMINAL",0,0,"Step 1790, loss: 1.5803133249282837, step time: 17.84515380859375ms\r\n",,terminal_output +11712,12799497,"TERMINAL",0,0,"Step 1791, loss: 1.584574580192566, step time: 17.46964454650879ms\r\n",,terminal_output +11713,12799560,"TERMINAL",0,0,"Step 1792, loss: 1.6069543361663818, step time: 17.249107360839844ms\r\n",,terminal_output +11714,12799624,"TERMINAL",0,0,"Step 1793, loss: 1.5625741481781006, step time: 17.910480499267578ms\r\n",,terminal_output +11715,12799687,"TERMINAL",0,0,"Step 1794, loss: 1.5629829168319702, step time: 17.38595962524414ms\r\n",,terminal_output +11716,12799749,"TERMINAL",0,0,"Step 1795, loss: 1.5711634159088135, step time: 17.516613006591797ms\r\n",,terminal_output +11717,12799824,"TERMINAL",0,0,"Step 1796, loss: 1.5676770210266113, step time: 17.734289169311523ms\r\n",,terminal_output +11718,12799876,"TERMINAL",0,0,"Step 1797, loss: 1.554165244102478, step time: 17.48180389404297ms\r\n",,terminal_output +11719,12799941,"TERMINAL",0,0,"Step 1798, loss: 1.5777032375335693, step time: 17.33875274658203ms\r\n",,terminal_output +11720,12800243,"TERMINAL",0,0,"Step 1799, loss: 1.5567574501037598, step time: 304.20374870300293ms\r\n",,terminal_output +11721,12800311,"TERMINAL",0,0,"Step 1800, loss: 1.5970637798309326, step time: 24.67489242553711ms\r\n",,terminal_output +11722,12800385,"TERMINAL",0,0,"Step 1801, loss: 1.6378123760223389, step time: 20.80821990966797ms\r\n",,terminal_output +11723,12800438,"TERMINAL",0,0,"Step 1802, loss: 1.5750230550765991, step time: 19.34361457824707ms\r\n",,terminal_output +11724,12800534,"TERMINAL",0,0,"Step 1803, loss: 1.793875813484192, step time: 18.42784881591797ms\r\n",,terminal_output +11725,12800599,"TERMINAL",0,0,"Step 1804, loss: 1.5870020389556885, step time: 17.940282821655273ms\r\n",,terminal_output +11726,12800654,"TERMINAL",0,0,"Step 1805, loss: 1.566513180732727, step time: 18.458127975463867ms\r\n",,terminal_output +11727,12800762,"TERMINAL",0,0,"Step 1806, loss: 1.544219970703125, step time: 17.874717712402344ms\r\nStep 1807, loss: 1.5493978261947632, step time: 17.551422119140625ms\r\n",,terminal_output +11728,12800826,"TERMINAL",0,0,"Step 1808, loss: 1.5838088989257812, step time: 17.926692962646484ms\r\n",,terminal_output +11729,12800881,"TERMINAL",0,0,"Step 1809, loss: 1.579769253730774, step time: 17.52781867980957ms\r\n",,terminal_output +11730,12800975,"TERMINAL",0,0,"Step 1810, loss: 1.5525989532470703, step time: 17.367839813232422ms\r\n",,terminal_output +11731,12801037,"TERMINAL",0,0,"Step 1811, loss: 1.5489581823349, step time: 17.968416213989258ms\r\n",,terminal_output +11732,12801151,"TERMINAL",0,0,"Step 1812, loss: 1.5461328029632568, step time: 17.290830612182617ms\r\nStep 1813, loss: 1.5730490684509277, step time: 17.53711700439453ms\r\n",,terminal_output +11733,12801205,"TERMINAL",0,0,"Step 1814, loss: 1.5597482919692993, step time: 17.8375244140625ms\r\n",,terminal_output +11734,12801316,"TERMINAL",0,0,"Step 1815, loss: 1.5497255325317383, step time: 17.460107803344727ms\r\nStep 1816, loss: 1.6208456754684448, step time: 17.77052879333496ms\r\n",,terminal_output +11735,12801379,"TERMINAL",0,0,"Step 1817, loss: 1.607588291168213, step time: 18.011093139648438ms\r\n",,terminal_output +11736,12801439,"TERMINAL",0,0,"Step 1818, loss: 1.5750315189361572, step time: 17.426729202270508ms\r\n",,terminal_output +11737,12801502,"TERMINAL",0,0,"Step 1819, loss: 1.5364644527435303, step time: 17.587661743164062ms\r\n",,terminal_output +11738,12801567,"TERMINAL",0,0,"Step 1820, loss: 1.5194450616836548, step time: 18.14746856689453ms\r\n",,terminal_output +11739,12801632,"TERMINAL",0,0,"Step 1821, loss: 1.557236909866333, step time: 17.637968063354492ms\r\n",,terminal_output +11740,12801700,"TERMINAL",0,0,"Step 1822, loss: 1.5384680032730103, step time: 23.416996002197266ms\r\n",,terminal_output +11741,12801847,"TERMINAL",0,0,"Step 1823, loss: 1.5990370512008667, step time: 36.36741638183594ms\r\nStep 1824, loss: 1.641434669494629, step time: 23.250102996826172ms\r\n",,terminal_output +11742,12801900,"TERMINAL",0,0,"Step 1825, loss: 1.6792223453521729, step time: 25.156497955322266ms\r\n",,terminal_output +11743,12801968,"TERMINAL",0,0,"Step 1826, loss: 1.5628635883331299, step time: 25.433063507080078ms\r\n",,terminal_output +11744,12802033,"TERMINAL",0,0,"Step 1827, loss: 1.528434157371521, step time: 25.159835815429688ms\r\n",,terminal_output +11745,12802105,"TERMINAL",0,0,"Step 1828, loss: 1.5672117471694946, step time: 24.701356887817383ms\r\n",,terminal_output +11746,12802173,"TERMINAL",0,0,"Step 1829, loss: 1.5440856218338013, step time: 29.27398681640625ms\r\n",,terminal_output +11747,12802274,"TERMINAL",0,0,"Step 1830, loss: 1.6158020496368408, step time: 23.98681640625ms\r\n",,terminal_output +11748,12802328,"TERMINAL",0,0,"Step 1831, loss: 1.5259454250335693, step time: 19.710540771484375ms\r\n",,terminal_output +11749,12802434,"TERMINAL",0,0,"Step 1832, loss: 1.5356649160385132, step time: 18.585920333862305ms\r\nStep 1833, loss: 1.5217537879943848, step time: 17.8525447845459ms\r\n",,terminal_output +11750,12802499,"TERMINAL",0,0,"Step 1834, loss: 1.6650731563568115, step time: 17.595291137695312ms\r\n",,terminal_output +11751,12802563,"TERMINAL",0,0,"Step 1835, loss: 1.5485992431640625, step time: 18.114566802978516ms\r\n",,terminal_output +11752,12802625,"TERMINAL",0,0,"Step 1836, loss: 1.5510393381118774, step time: 17.493486404418945ms\r\n",,terminal_output +11753,12802690,"TERMINAL",0,0,"Step 1837, loss: 1.753188133239746, step time: 17.899513244628906ms\r\n",,terminal_output +11754,12802755,"TERMINAL",0,0,"Step 1838, loss: 1.556076169013977, step time: 17.72022247314453ms\r\n",,terminal_output +11755,12802819,"TERMINAL",0,0,"Step 1839, loss: 1.5502055883407593, step time: 17.53091812133789ms\r\n",,terminal_output +11756,12802881,"TERMINAL",0,0,"Step 1840, loss: 1.5332332849502563, step time: 17.44675636291504ms\r\n",,terminal_output +11757,12802943,"TERMINAL",0,0,"Step 1841, loss: 2.0846076011657715, step time: 18.050193786621094ms\r\n",,terminal_output +11758,12803005,"TERMINAL",0,0,"Step 1842, loss: 1.5379712581634521, step time: 17.34781265258789ms\r\n",,terminal_output +11759,12803066,"TERMINAL",0,0,"Step 1843, loss: 1.5418790578842163, step time: 17.328262329101562ms\r\n",,terminal_output +11760,12803126,"TERMINAL",0,0,"Step 1844, loss: 1.6033947467803955, step time: 17.73381233215332ms\r\n",,terminal_output +11761,12803186,"TERMINAL",0,0,"Step 1845, loss: 1.5193856954574585, step time: 17.37236976623535ms\r\n",,terminal_output +11762,12803247,"TERMINAL",0,0,"Step 1846, loss: 1.5295623540878296, step time: 17.191648483276367ms\r\n",,terminal_output +11763,12803308,"TERMINAL",0,0,"Step 1847, loss: 1.5712138414382935, step time: 18.036842346191406ms\r\n",,terminal_output +11764,12803405,"TERMINAL",0,0,"Step 1848, loss: 1.5278180837631226, step time: 17.167091369628906ms\r\n",,terminal_output +11765,12803467,"TERMINAL",0,0,"Step 1849, loss: 1.583421230316162, step time: 17.700672149658203ms\r\n",,terminal_output +11766,12803572,"TERMINAL",0,0,"Step 1850, loss: 1.5543004274368286, step time: 17.725467681884766ms\r\nStep 1851, loss: 1.5026956796646118, step time: 17.291784286499023ms\r\n",,terminal_output +11767,12803633,"TERMINAL",0,0,"Step 1852, loss: 1.4972172975540161, step time: 17.332077026367188ms\r\n",,terminal_output +11768,12803751,"TERMINAL",0,0,"Step 1853, loss: 1.5425978899002075, step time: 17.808914184570312ms\r\n",,terminal_output +11769,12803878,"TERMINAL",0,0,"Step 1854, loss: 1.5389684438705444, step time: 17.2426700592041ms\r\nStep 1855, loss: 1.571571946144104, step time: 17.376184463500977ms\r\nStep 1856, loss: 1.4917045831680298, step time: 17.75383949279785ms\r\n",,terminal_output +11770,12803930,"TERMINAL",0,0,"Step 1857, loss: 1.5231201648712158, step time: 17.342805862426758ms\r\n",,terminal_output +11771,12804023,"TERMINAL",0,0,"Step 1858, loss: 1.5716676712036133, step time: 17.333269119262695ms\r\n",,terminal_output +11772,12804077,"TERMINAL",0,0,"Step 1859, loss: 1.6924961805343628, step time: 17.86184310913086ms\r\n",,terminal_output +11773,12804184,"TERMINAL",0,0,"Step 1860, loss: 1.5775800943374634, step time: 17.194271087646484ms\r\nStep 1861, loss: 1.5919244289398193, step time: 17.294883728027344ms\r\n",,terminal_output +11774,12804278,"TERMINAL",0,0,"Step 1862, loss: 1.4797145128250122, step time: 17.67277717590332ms\r\n",,terminal_output +11775,12804331,"TERMINAL",0,0,"Step 1863, loss: 1.5087674856185913, step time: 17.355680465698242ms\r\n",,terminal_output +11776,12804437,"TERMINAL",0,0,"Step 1864, loss: 1.7879546880722046, step time: 17.139196395874023ms\r\nStep 1865, loss: 1.5074598789215088, step time: 17.836332321166992ms\r\n",,terminal_output +11777,12804502,"TERMINAL",0,0,"Step 1866, loss: 1.5068663358688354, step time: 17.211437225341797ms\r\n",,terminal_output +11778,12804564,"TERMINAL",0,0,"Step 1867, loss: 1.503327488899231, step time: 17.238378524780273ms\r\n",,terminal_output +11779,12804630,"TERMINAL",0,0,"Step 1868, loss: 1.491411805152893, step time: 17.67706871032715ms\r\n",,terminal_output +11780,12804692,"TERMINAL",0,0,"Step 1869, loss: 1.9198991060256958, step time: 17.26222038269043ms\r\n",,terminal_output +11781,12804756,"TERMINAL",0,0,"Step 1870, loss: 1.4957501888275146, step time: 17.335176467895508ms\r\n",,terminal_output +11782,12804830,"TERMINAL",0,0,"Step 1871, loss: 1.513588309288025, step time: 18.01300048828125ms\r\n",,terminal_output +11783,12804884,"TERMINAL",0,0,"Step 1872, loss: 1.4838956594467163, step time: 17.1661376953125ms\r\n",,terminal_output +11784,12804950,"TERMINAL",0,0,"Step 1873, loss: 1.489841103553772, step time: 17.50349998474121ms\r\n",,terminal_output +11785,12805013,"TERMINAL",0,0,"Step 1874, loss: 1.502976417541504, step time: 17.97175407409668ms\r\n",,terminal_output +11786,12805076,"TERMINAL",0,0,"Step 1875, loss: 1.4904643297195435, step time: 17.61341094970703ms\r\n",,terminal_output +11787,12805142,"TERMINAL",0,0,"Step 1876, loss: 1.5441160202026367, step time: 17.399072647094727ms\r\n",,terminal_output +11788,12805246,"TERMINAL",0,0,"Step 1877, loss: 1.6397626399993896, step time: 17.981529235839844ms\r\nStep 1878, loss: 1.5975284576416016, step time: 18.856287002563477ms\r\n",,terminal_output +11789,12805340,"TERMINAL",0,0,"Step 1879, loss: 1.4784408807754517, step time: 17.896175384521484ms\r\n",,terminal_output +11790,12805407,"TERMINAL",0,0,"Step 1880, loss: 1.498336672782898, step time: 17.805099487304688ms\r\n",,terminal_output +11791,12805461,"TERMINAL",0,0,"Step 1881, loss: 1.4817980527877808, step time: 17.39811897277832ms\r\n",,terminal_output +11792,12805568,"TERMINAL",0,0,"Step 1882, loss: 1.484596848487854, step time: 17.1968936920166ms\r\nStep 1883, loss: 1.4952892065048218, step time: 17.84491539001465ms\r\n",,terminal_output +11793,12805687,"TERMINAL",0,0,"Step 1884, loss: 1.5137168169021606, step time: 17.159461975097656ms\r\nStep 1885, loss: 1.4979338645935059, step time: 17.333269119262695ms\r\n",,terminal_output +11794,12805751,"TERMINAL",0,0,"Step 1886, loss: 1.5279185771942139, step time: 17.961502075195312ms\r\n",,terminal_output +11795,12805816,"TERMINAL",0,0,"Step 1887, loss: 1.485001564025879, step time: 17.2574520111084ms\r\n",,terminal_output +11796,12805881,"TERMINAL",0,0,"Step 1888, loss: 1.496199131011963, step time: 17.59958267211914ms\r\n",,terminal_output +11797,12805946,"TERMINAL",0,0,"Step 1889, loss: 1.5386382341384888, step time: 17.745256423950195ms\r\n",,terminal_output +11798,12806013,"TERMINAL",0,0,"Step 1890, loss: 1.4903583526611328, step time: 17.27128028869629ms\r\n",,terminal_output +11799,12806071,"TERMINAL",0,0,"Step 1891, loss: 1.531195044517517, step time: 17.264842987060547ms\r\n",,terminal_output +11800,12806134,"TERMINAL",0,0,"Step 1892, loss: 1.4840288162231445, step time: 18.342018127441406ms\r\n",,terminal_output +11801,12806199,"TERMINAL",0,0,"Step 1893, loss: 1.5021458864212036, step time: 17.261266708374023ms\r\n",,terminal_output +11802,12806260,"TERMINAL",0,0,"Step 1894, loss: 1.4657840728759766, step time: 17.17352867126465ms\r\n",,terminal_output +11803,12806324,"TERMINAL",0,0,"Step 1895, loss: 1.5805339813232422, step time: 17.824172973632812ms\r\n",,terminal_output +11804,12806389,"TERMINAL",0,0,"Step 1896, loss: 1.487716555595398, step time: 17.08245277404785ms\r\n",,terminal_output +11805,12806453,"TERMINAL",0,0,"Step 1897, loss: 1.4729219675064087, step time: 17.397165298461914ms\r\n",,terminal_output +11806,12806518,"TERMINAL",0,0,"Step 1898, loss: 1.492751955986023, step time: 17.66800880432129ms\r\n",,terminal_output +11807,12806583,"TERMINAL",0,0,"Step 1899, loss: 1.4688838720321655, step time: 17.50016212463379ms\r\n",,terminal_output +11808,12806646,"TERMINAL",0,0,"Step 1900, loss: 1.4997038841247559, step time: 17.210721969604492ms\r\n",,terminal_output +11809,12806712,"TERMINAL",0,0,"Step 1901, loss: 1.483138918876648, step time: 17.681121826171875ms\r\n",,terminal_output +11810,12806776,"TERMINAL",0,0,"Step 1902, loss: 1.4825609922409058, step time: 17.40288734436035ms\r\n",,terminal_output +11811,12806839,"TERMINAL",0,0,"Step 1903, loss: 1.5063817501068115, step time: 18.396615982055664ms\r\n",,terminal_output +11812,12806903,"TERMINAL",0,0,"Step 1904, loss: 1.4902671575546265, step time: 18.7685489654541ms\r\n",,terminal_output +11813,12806967,"TERMINAL",0,0,"Step 1905, loss: 1.4513825178146362, step time: 17.747879028320312ms\r\n",,terminal_output +11814,12807033,"TERMINAL",0,0,"Step 1906, loss: 1.5194915533065796, step time: 17.621994018554688ms\r\n",,terminal_output +11815,12807097,"TERMINAL",0,0,"Step 1907, loss: 1.4695546627044678, step time: 18.056869506835938ms\r\n",,terminal_output +11816,12807152,"TERMINAL",0,0,"Step 1908, loss: 1.4675283432006836, step time: 17.473459243774414ms\r\n",,terminal_output +11817,12807217,"TERMINAL",0,0,"Step 1909, loss: 1.465131402015686, step time: 17.52924919128418ms\r\n",,terminal_output +11818,12807277,"TERMINAL",0,0,"Step 1910, loss: 1.465604305267334, step time: 17.859697341918945ms\r\n",,terminal_output +11819,12807385,"TERMINAL",0,0,"Step 1911, loss: 1.8141039609909058, step time: 17.596960067749023ms\r\nStep 1912, loss: 1.4924545288085938, step time: 17.58408546447754ms\r\n",,terminal_output +11820,12807447,"TERMINAL",0,0,"Step 1913, loss: 1.452370285987854, step time: 17.938613891601562ms\r\n",,terminal_output +11821,12807512,"TERMINAL",0,0,"Step 1914, loss: 1.588560938835144, step time: 17.43626594543457ms\r\n",,terminal_output +11822,12807570,"TERMINAL",0,0,"Step 1915, loss: 2.063981294631958, step time: 17.530441284179688ms\r\n",,terminal_output +11823,12807668,"TERMINAL",0,0,"Step 1916, loss: 1.4807370901107788, step time: 18.071413040161133ms\r\n",,terminal_output +11824,12807720,"TERMINAL",0,0,"Step 1917, loss: 1.4853742122650146, step time: 17.548799514770508ms\r\n",,terminal_output +11825,12807836,"TERMINAL",0,0,"Step 1918, loss: 1.4649364948272705, step time: 17.36140251159668ms\r\nStep 1919, loss: 1.4737098217010498, step time: 17.968416213989258ms\r\n",,terminal_output +11826,12808218,"TERMINAL",0,0,"Step 1920, loss: 1.46479070186615, step time: 333.77718925476074ms\r\nStep 1921, loss: 1.4637134075164795, step time: 25.127172470092773ms\r\n",,terminal_output +11827,12808314,"TERMINAL",0,0,"Step 1922, loss: 1.5628963708877563, step time: 19.938230514526367ms\r\n",,terminal_output +11828,12808374,"TERMINAL",0,0,"Step 1923, loss: 1.5076185464859009, step time: 18.67198944091797ms\r\n",,terminal_output +11829,12808439,"TERMINAL",0,0,"Step 1924, loss: 1.5119062662124634, step time: 17.81630516052246ms\r\n",,terminal_output +11830,12808503,"TERMINAL",0,0,"Step 1925, loss: 1.4818769693374634, step time: 17.788171768188477ms\r\n",,terminal_output +11831,12808555,"TERMINAL",0,0,"Step 1926, loss: 1.4685696363449097, step time: 17.754077911376953ms\r\n",,terminal_output +11832,12808663,"TERMINAL",0,0,"Step 1927, loss: 1.4714537858963013, step time: 17.59195327758789ms\r\nStep 1928, loss: 1.5514870882034302, step time: 17.3952579498291ms\r\n",,terminal_output +11833,12808755,"TERMINAL",0,0,"Step 1929, loss: 1.4600774049758911, step time: 17.881155014038086ms\r\n",,terminal_output +11834,12808861,"TERMINAL",0,0,"Step 1930, loss: 1.449378252029419, step time: 18.674373626708984ms\r\nStep 1931, loss: 1.5164886713027954, step time: 17.64202117919922ms\r\n",,terminal_output +11835,12808923,"TERMINAL",0,0,"Step 1932, loss: 1.5733650922775269, step time: 17.693758010864258ms\r\n",,terminal_output +11836,12808985,"TERMINAL",0,0,"Step 1933, loss: 1.5830646753311157, step time: 17.54593849182129ms\r\n",,terminal_output +11837,12809046,"TERMINAL",0,0,"Step 1934, loss: 1.4657683372497559, step time: 17.656564712524414ms\r\n",,terminal_output +11838,12809109,"TERMINAL",0,0,"Step 1935, loss: 1.4928150177001953, step time: 17.938852310180664ms\r\n",,terminal_output +11839,12809202,"TERMINAL",0,0,"Step 1936, loss: 1.436194658279419, step time: 33.490896224975586ms\r\n",,terminal_output +11840,12809253,"TERMINAL",0,0,"Step 1937, loss: 1.4643460512161255, step time: 18.496274948120117ms\r\n",,terminal_output +11841,12809393,"TERMINAL",0,0,"Step 1938, loss: 1.4784427881240845, step time: 18.053531646728516ms\r\nStep 1939, loss: 1.4755356311798096, step time: 17.643451690673828ms\r\n",,terminal_output +11842,12809499,"TERMINAL",0,0,"Step 1940, loss: 1.4891780614852905, step time: 17.58599281311035ms\r\nStep 1941, loss: 1.4506874084472656, step time: 18.16248893737793ms\r\n",,terminal_output +11843,12809593,"TERMINAL",0,0,"Step 1942, loss: 1.4468703269958496, step time: 17.52758026123047ms\r\n",,terminal_output +11844,12809644,"TERMINAL",0,0,"Step 1943, loss: 1.4683434963226318, step time: 17.636775970458984ms\r\n",,terminal_output +11845,12809747,"TERMINAL",0,0,"Step 1944, loss: 1.4409271478652954, step time: 17.817974090576172ms\r\nStep 1945, loss: 1.4420710802078247, step time: 17.620325088500977ms\r\n",,terminal_output +11846,12809810,"TERMINAL",0,0,"Step 1946, loss: 1.4325954914093018, step time: 17.514705657958984ms\r\n",,terminal_output +11847,12809873,"TERMINAL",0,0,"Step 1947, loss: 1.440051555633545, step time: 17.851591110229492ms\r\n",,terminal_output +11848,12809933,"TERMINAL",0,0,"Step 1948, loss: 1.4436819553375244, step time: 17.47918128967285ms\r\n",,terminal_output +11849,12810028,"TERMINAL",0,0,"Step 1949, loss: 1.5541088581085205, step time: 17.59791374206543ms\r\n",,terminal_output +11850,12810079,"TERMINAL",0,0,"Step 1950, loss: 1.4733607769012451, step time: 17.745256423950195ms\r\n",,terminal_output +11851,12810171,"TERMINAL",0,0,"Step 1951, loss: 1.4264551401138306, step time: 17.60268211364746ms\r\n",,terminal_output +11852,12810223,"TERMINAL",0,0,"Step 1952, loss: 1.4150030612945557, step time: 17.531156539916992ms\r\n",,terminal_output +11853,12810274,"TERMINAL",0,0,"Step 1953, loss: 1.427333950996399, step time: 18.198013305664062ms\r\n",,terminal_output +11854,12810383,"TERMINAL",0,0,"Step 1954, loss: 1.4399840831756592, step time: 18.263578414916992ms\r\nStep 1955, loss: 1.4271113872528076, step time: 17.972469329833984ms\r\n",,terminal_output +11855,12810478,"TERMINAL",0,0,"Step 1956, loss: 1.6509535312652588, step time: 17.899513244628906ms\r\n",,terminal_output +11856,12810591,"TERMINAL",0,0,"Step 1957, loss: 1.4768682718276978, step time: 17.736434936523438ms\r\nStep 1958, loss: 1.433388113975525, step time: 17.453670501708984ms\r\n",,terminal_output +11857,12810700,"TERMINAL",0,0,"Step 1959, loss: 1.408435583114624, step time: 17.81463623046875ms\r\nStep 1960, loss: 1.405888319015503, step time: 17.534732818603516ms\r\n",,terminal_output +11858,12810763,"TERMINAL",0,0,"Step 1961, loss: 2.041738986968994, step time: 17.670392990112305ms\r\n",,terminal_output +11859,12810837,"TERMINAL",0,0,"Step 1962, loss: 1.5252259969711304, step time: 17.744779586791992ms\r\n",,terminal_output +11860,12810890,"TERMINAL",0,0,"Step 1963, loss: 1.5641807317733765, step time: 17.686843872070312ms\r\n",,terminal_output +11861,12810962,"TERMINAL",0,0,"Step 1964, loss: 1.4404271841049194, step time: 17.441272735595703ms\r\n",,terminal_output +11862,12811022,"TERMINAL",0,0,"Step 1965, loss: 1.4282482862472534, step time: 17.832040786743164ms\r\n",,terminal_output +11863,12811099,"TERMINAL",0,0,"Step 1966, loss: 1.4217548370361328, step time: 17.49277114868164ms\r\n",,terminal_output +11864,12811205,"TERMINAL",0,0,"Step 1967, loss: 1.4315831661224365, step time: 17.50493049621582ms\r\nStep 1968, loss: 1.432714581489563, step time: 17.752647399902344ms\r\n",,terminal_output +11865,12811269,"TERMINAL",0,0,"Step 1969, loss: 1.44264817237854, step time: 17.505645751953125ms\r\n",,terminal_output +11866,12811334,"TERMINAL",0,0,"Step 1970, loss: 1.454316258430481, step time: 17.488956451416016ms\r\n",,terminal_output +11867,12811397,"TERMINAL",0,0,"Step 1971, loss: 1.422383427619934, step time: 17.70329475402832ms\r\n",,terminal_output +11868,12811462,"TERMINAL",0,0,"Step 1972, loss: 1.4195729494094849, step time: 17.35854148864746ms\r\n",,terminal_output +11869,12811526,"TERMINAL",0,0,"Step 1973, loss: 1.4116376638412476, step time: 17.500638961791992ms\r\n",,terminal_output +11870,12811588,"TERMINAL",0,0,"Step 1974, loss: 1.442280650138855, step time: 17.56572723388672ms\r\n",,terminal_output +11871,12811649,"TERMINAL",0,0,"Step 1975, loss: 2.188683271408081, step time: 17.786026000976562ms\r\n",,terminal_output +11872,12811711,"TERMINAL",0,0,"Step 1976, loss: 1.428005337715149, step time: 17.41194725036621ms\r\n",,terminal_output +11873,12811779,"TERMINAL",0,0,"Step 1977, loss: 2.3554933071136475, step time: 17.739295959472656ms\r\n",,terminal_output +11874,12811883,"TERMINAL",0,0,"Step 1978, loss: 1.5029281377792358, step time: 17.35520362854004ms\r\nStep 1979, loss: 1.4401218891143799, step time: 17.52161979675293ms\r\n",,terminal_output +11875,12811982,"TERMINAL",0,0,"Step 1980, loss: 1.5181796550750732, step time: 17.622947692871094ms\r\n",,terminal_output +11876,12812089,"TERMINAL",0,0,"Step 1981, loss: 1.5685005187988281, step time: 17.580509185791016ms\r\nStep 1982, loss: 1.4733539819717407, step time: 17.36760139465332ms\r\n",,terminal_output +11877,12812152,"TERMINAL",0,0,"Step 1983, loss: 1.4172383546829224, step time: 17.83156394958496ms\r\n",,terminal_output +11878,12812216,"TERMINAL",0,0,"Step 1984, loss: 1.4169074296951294, step time: 17.388105392456055ms\r\n",,terminal_output +11879,12812279,"TERMINAL",0,0,"Step 1985, loss: 1.484939694404602, step time: 17.635583877563477ms\r\n",,terminal_output +11880,12812342,"TERMINAL",0,0,"Step 1986, loss: 1.4233813285827637, step time: 17.647266387939453ms\r\n",,terminal_output +11881,12812405,"TERMINAL",0,0,"Step 1987, loss: 1.4467719793319702, step time: 17.473936080932617ms\r\n",,terminal_output +11882,12812471,"TERMINAL",0,0,"Step 1988, loss: 1.5112237930297852, step time: 17.35091209411621ms\r\n",,terminal_output +11883,12812544,"TERMINAL",0,0,"Step 1989, loss: 1.4299631118774414, step time: 17.716646194458008ms\r\n",,terminal_output +11884,12812596,"TERMINAL",0,0,"Step 1990, loss: 1.403244137763977, step time: 17.40121841430664ms\r\n",,terminal_output +11885,12812663,"TERMINAL",0,0,"Step 1991, loss: 1.438736081123352, step time: 17.771482467651367ms\r\n",,terminal_output +11886,12812725,"TERMINAL",0,0,"Step 1992, loss: 1.4062403440475464, step time: 17.751455307006836ms\r\n",,terminal_output +11887,12812793,"TERMINAL",0,0,"Step 1993, loss: 1.4160574674606323, step time: 17.57192611694336ms\r\n",,terminal_output +11888,12812856,"TERMINAL",0,0,"Step 1994, loss: 1.4460058212280273, step time: 17.446517944335938ms\r\n",,terminal_output +11889,12812920,"TERMINAL",0,0,"Step 1995, loss: 1.4040305614471436, step time: 17.78244972229004ms\r\n",,terminal_output +11890,12812983,"TERMINAL",0,0,"Step 1996, loss: 1.6898759603500366, step time: 17.522811889648438ms\r\n",,terminal_output +11891,12813062,"TERMINAL",0,0,"Step 1997, loss: 1.4325151443481445, step time: 17.564773559570312ms\r\n",,terminal_output +11892,12813112,"TERMINAL",0,0,"Step 1998, loss: 1.4146488904953003, step time: 17.598628997802734ms\r\n",,terminal_output +11893,12813164,"TERMINAL",0,0,"Step 1999, loss: 1.4325056076049805, step time: 17.603158950805664ms\r\n",,terminal_output +11894,12815898,"TERMINAL",0,0,"Step 2000, loss: 1.9346003532409668, step time: 29.473066329956055ms\r\n",,terminal_output +11895,12816007,"TERMINAL",0,0,"Step 2001, loss: 1.4879850149154663, step time: 25.03824234008789ms\r\n",,terminal_output +11896,12816060,"TERMINAL",0,0,"Step 2002, loss: 1.4224193096160889, step time: 20.721912384033203ms\r\n",,terminal_output +11897,12816167,"TERMINAL",0,0,"Step 2003, loss: 1.6354082822799683, step time: 20.774364471435547ms\r\nStep 2004, loss: 1.4091814756393433, step time: 19.260406494140625ms\r\n",,terminal_output +11898,12816230,"TERMINAL",0,0,"Step 2005, loss: 1.4015296697616577, step time: 19.34075355529785ms\r\n",,terminal_output +11899,12816302,"TERMINAL",0,0,"Step 2006, loss: 1.4117352962493896, step time: 19.078731536865234ms\r\n",,terminal_output +11900,12816363,"TERMINAL",0,0,"Step 2007, loss: 1.4038692712783813, step time: 19.623279571533203ms\r\n",,terminal_output +11901,12816430,"TERMINAL",0,0,"Step 2008, loss: 1.499242901802063, step time: 18.60332489013672ms\r\n",,terminal_output +11902,12816492,"TERMINAL",0,0,"Step 2009, loss: 1.3966716527938843, step time: 19.646406173706055ms\r\n",,terminal_output +11903,12816561,"TERMINAL",0,0,"Step 2010, loss: 1.3853368759155273, step time: 19.435405731201172ms\r\n",,terminal_output +11904,12816627,"TERMINAL",0,0,"Step 2011, loss: 1.3961970806121826, step time: 19.586801528930664ms\r\n",,terminal_output +11905,12816689,"TERMINAL",0,0,"Step 2012, loss: 1.417436957359314, step time: 18.959760665893555ms\r\n",,terminal_output +11906,12816784,"TERMINAL",0,0,"Step 2013, loss: 1.446713924407959, step time: 19.014596939086914ms\r\n",,terminal_output +11907,12816838,"TERMINAL",0,0,"Step 2014, loss: 1.4485613107681274, step time: 26.64637565612793ms\r\n",,terminal_output +11908,12816892,"TERMINAL",0,0,"Step 2015, loss: 1.4017269611358643, step time: 20.226716995239258ms\r\n",,terminal_output +11909,12816988,"TERMINAL",0,0,"Step 2016, loss: 1.3724863529205322, step time: 18.98980140686035ms\r\n",,terminal_output +11910,12817070,"TERMINAL",0,0,"Step 2017, loss: 1.5061731338500977, step time: 19.07968521118164ms\r\nStep 2018, loss: 1.4362413883209229, step time: 19.000768661499023ms\r\n",,terminal_output +11911,12817205,"TERMINAL",0,0,"Step 2019, loss: 1.38774573802948, step time: 19.029617309570312ms\r\nStep 2020, loss: 1.3825551271438599, step time: 18.70274543762207ms\r\n",,terminal_output +11912,12817298,"TERMINAL",0,0,"Step 2021, loss: 1.384101390838623, step time: 19.308090209960938ms\r\n",,terminal_output +11913,12817364,"TERMINAL",0,0,"Step 2022, loss: 1.466249942779541, step time: 18.651962280273438ms\r\n",,terminal_output +11914,12817423,"TERMINAL",0,0,"Step 2023, loss: 1.421072006225586, step time: 19.09780502319336ms\r\n",,terminal_output +11915,12817488,"TERMINAL",0,0,"Step 2024, loss: 1.5613389015197754, step time: 18.85080337524414ms\r\n",,terminal_output +11916,12817549,"TERMINAL",0,0,"Step 2025, loss: 1.7984997034072876, step time: 18.86296272277832ms\r\n",,terminal_output +11917,12817614,"TERMINAL",0,0,"Step 2026, loss: 1.3793219327926636, step time: 18.752336502075195ms\r\n",,terminal_output +11918,12817680,"TERMINAL",0,0,"Step 2027, loss: 1.6146713495254517, step time: 19.10853385925293ms\r\n",,terminal_output +11919,12817742,"TERMINAL",0,0,"Step 2028, loss: 1.5643583536148071, step time: 18.571853637695312ms\r\n",,terminal_output +11920,12817804,"TERMINAL",0,0,"Step 2029, loss: 1.4069136381149292, step time: 18.693923950195312ms\r\n",,terminal_output +11921,12817909,"TERMINAL",0,0,"Step 2030, loss: 1.3944593667984009, step time: 22.954225540161133ms\r\nStep 2031, loss: 1.5215134620666504, step time: 18.911123275756836ms\r\n",,terminal_output +11922,12818006,"TERMINAL",0,0,"Step 2032, loss: 1.4200276136398315, step time: 18.49222183227539ms\r\n",,terminal_output +11923,12818121,"TERMINAL",0,0,"Step 2033, loss: 1.424425482749939, step time: 19.028902053833008ms\r\nStep 2034, loss: 1.4409735202789307, step time: 18.732070922851562ms\r\n",,terminal_output +11924,12818182,"TERMINAL",0,0,"Step 2035, loss: 1.4226375818252563, step time: 18.87202262878418ms\r\n",,terminal_output +11925,12818248,"TERMINAL",0,0,"Step 2036, loss: 1.4421724081039429, step time: 18.77570152282715ms\r\n",,terminal_output +11926,12818310,"TERMINAL",0,0,"Step 2037, loss: 1.6221891641616821, step time: 18.711328506469727ms\r\n",,terminal_output +11927,12818376,"TERMINAL",0,0,"Step 2038, loss: 1.3908287286758423, step time: 18.61262321472168ms\r\n",,terminal_output +11928,12818483,"TERMINAL",0,0,"Step 2039, loss: 1.6308828592300415, step time: 19.00768280029297ms\r\nStep 2040, loss: 1.3933733701705933, step time: 18.617868423461914ms\r\n",,terminal_output +11929,12818549,"TERMINAL",0,0,"Step 2041, loss: 1.394451379776001, step time: 18.96071434020996ms\r\n",,terminal_output +11930,12818678,"TERMINAL",0,0,"Step 2042, loss: 1.4245376586914062, step time: 18.787145614624023ms\r\nStep 2043, loss: 1.409900188446045, step time: 18.788576126098633ms\r\n",,terminal_output +11931,12818735,"TERMINAL",0,0,"Step 2044, loss: 1.3882381916046143, step time: 18.49365234375ms\r\n",,terminal_output +11932,12818806,"TERMINAL",0,0,"Step 2045, loss: 1.4004251956939697, step time: 19.283771514892578ms\r\n",,terminal_output +11933,12818871,"TERMINAL",0,0,"Step 2046, loss: 1.3911681175231934, step time: 18.8751220703125ms\r\n",,terminal_output +11934,12818926,"TERMINAL",0,0,"Step 2047, loss: 1.3705488443374634, step time: 18.750429153442383ms\r\n",,terminal_output +11935,12819021,"TERMINAL",0,0,"Step 2048, loss: 1.3917179107666016, step time: 18.89944076538086ms\r\n",,terminal_output +11936,12819073,"TERMINAL",0,0,"Step 2049, loss: 1.3677434921264648, step time: 18.679380416870117ms\r\n",,terminal_output +11937,12819245,"TERMINAL",0,0,"Step 2050, loss: 1.4042110443115234, step time: 18.342018127441406ms\r\nStep 2051, loss: 1.3725184202194214, step time: 18.791675567626953ms\r\nStep 2052, loss: 1.373321294784546, step time: 18.2497501373291ms\r\n",,terminal_output +11938,12819313,"TERMINAL",0,0,"Step 2053, loss: 1.575629472732544, step time: 18.312931060791016ms\r\n",,terminal_output +11939,12819374,"TERMINAL",0,0,"Step 2054, loss: 1.4362879991531372, step time: 18.552780151367188ms\r\n",,terminal_output +11940,12819438,"TERMINAL",0,0,"Step 2055, loss: 1.3684941530227661, step time: 18.5849666595459ms\r\n",,terminal_output +11941,12819505,"TERMINAL",0,0,"Step 2056, loss: 1.3757036924362183, step time: 18.298625946044922ms\r\n",,terminal_output +11942,12819570,"TERMINAL",0,0,"Step 2057, loss: 1.401928186416626, step time: 18.78070831298828ms\r\n",,terminal_output +11943,12819633,"TERMINAL",0,0,"Step 2058, loss: 1.3471606969833374, step time: 18.229007720947266ms\r\n",,terminal_output +11944,12819696,"TERMINAL",0,0,"Step 2059, loss: 1.3617608547210693, step time: 18.746614456176758ms\r\n",,terminal_output +11945,12819781,"TERMINAL",0,0,"Step 2060, loss: 1.3445185422897339, step time: 18.572330474853516ms\r\n",,terminal_output +11946,12819887,"TERMINAL",0,0,"Step 2061, loss: 1.3688803911209106, step time: 18.579721450805664ms\r\nStep 2062, loss: 1.4630850553512573, step time: 18.252134323120117ms\r\n",,terminal_output +11947,12819958,"TERMINAL",0,0,"Step 2063, loss: 1.347203016281128, step time: 18.726587295532227ms\r\n",,terminal_output +11948,12820017,"TERMINAL",0,0,"Step 2064, loss: 1.3586763143539429, step time: 18.27836036682129ms\r\n",,terminal_output +11949,12820111,"TERMINAL",0,0,"Step 2065, loss: 1.4755948781967163, step time: 18.583297729492188ms\r\n",,terminal_output +11950,12820163,"TERMINAL",0,0,"Step 2066, loss: 1.4671467542648315, step time: 18.538236618041992ms\r\n",,terminal_output +11951,12820214,"TERMINAL",0,0,"Step 2067, loss: 1.842976689338684, step time: 18.529415130615234ms\r\n",,terminal_output +11952,12820358,"TERMINAL",0,0,"Step 2068, loss: 1.3803340196609497, step time: 18.311500549316406ms\r\nStep 2069, loss: 1.3791569471359253, step time: 21.98004722595215ms\r\n",,terminal_output +11953,12820410,"TERMINAL",0,0,"Step 2070, loss: 1.347291111946106, step time: 18.810749053955078ms\r\n",,terminal_output +11954,12820503,"TERMINAL",0,0,"Step 2071, loss: 1.4710404872894287, step time: 19.17886734008789ms\r\n",,terminal_output +11955,12820553,"TERMINAL",0,0,"Step 2072, loss: 2.128089427947998, step time: 18.574237823486328ms\r\n",,terminal_output +11956,12820609,"TERMINAL",0,0,"Step 2073, loss: 1.3793853521347046, step time: 18.66769790649414ms\r\n",,terminal_output +11957,12820716,"TERMINAL",0,0,"Step 2074, loss: 1.3558799028396606, step time: 18.433332443237305ms\r\nStep 2075, loss: 1.3954100608825684, step time: 18.94378662109375ms\r\n",,terminal_output +11958,12820777,"TERMINAL",0,0,"Step 2076, loss: 1.3578218221664429, step time: 18.388032913208008ms\r\n",,terminal_output +11959,12820846,"TERMINAL",0,0,"Step 2077, loss: 1.3736789226531982, step time: 18.558263778686523ms\r\n",,terminal_output +11960,12820900,"TERMINAL",0,0,"Step 2078, loss: 1.4349406957626343, step time: 18.557310104370117ms\r\n",,terminal_output +11961,12820995,"TERMINAL",0,0,"Step 2079, loss: 1.3659820556640625, step time: 18.651723861694336ms\r\n",,terminal_output +11962,12821047,"TERMINAL",0,0,"Step 2080, loss: 2.276493787765503, step time: 18.3866024017334ms\r\n",,terminal_output +11963,12821156,"TERMINAL",0,0,"Step 2081, loss: 1.3399873971939087, step time: 19.382476806640625ms\r\nStep 2082, loss: 1.3533337116241455, step time: 18.61405372619629ms\r\n",,terminal_output +11964,12821251,"TERMINAL",0,0,"Step 2083, loss: 1.4456526041030884, step time: 18.689393997192383ms\r\n",,terminal_output +11965,12821305,"TERMINAL",0,0,"Step 2084, loss: 1.3869736194610596, step time: 18.67198944091797ms\r\n",,terminal_output +11966,12821414,"TERMINAL",0,0,"Step 2085, loss: 1.3491270542144775, step time: 18.637418746948242ms\r\nStep 2086, loss: 1.3688212633132935, step time: 18.435239791870117ms\r\n",,terminal_output +11967,12821475,"TERMINAL",0,0,"Step 2087, loss: 1.3506968021392822, step time: 18.79405975341797ms\r\n",,terminal_output +11968,12821543,"TERMINAL",0,0,"Step 2088, loss: 1.356231451034546, step time: 18.25881004333496ms\r\n",,terminal_output +11969,12821605,"TERMINAL",0,0,"Step 2089, loss: 1.4116244316101074, step time: 18.501996994018555ms\r\n",,terminal_output +11970,12821669,"TERMINAL",0,0,"Step 2090, loss: 1.3756582736968994, step time: 18.540620803833008ms\r\n",,terminal_output +11971,12821734,"TERMINAL",0,0,"Step 2091, loss: 1.8360974788665771, step time: 18.57614517211914ms\r\n",,terminal_output +11972,12821795,"TERMINAL",0,0,"Step 2092, loss: 1.3885154724121094, step time: 18.399953842163086ms\r\n",,terminal_output +11973,12821858,"TERMINAL",0,0,"Step 2093, loss: 1.3788975477218628, step time: 18.84770393371582ms\r\n",,terminal_output +11974,12821922,"TERMINAL",0,0,"Step 2094, loss: 1.3519712686538696, step time: 18.443822860717773ms\r\n",,terminal_output +11975,12821983,"TERMINAL",0,0,"Step 2095, loss: 1.33591628074646, step time: 18.545150756835938ms\r\n",,terminal_output +11976,12822046,"TERMINAL",0,0,"Step 2096, loss: 1.3758955001831055, step time: 18.85843276977539ms\r\n",,terminal_output +11977,12822107,"TERMINAL",0,0,"Step 2097, loss: 1.3529837131500244, step time: 18.62812042236328ms\r\n",,terminal_output +11978,12822170,"TERMINAL",0,0,"Step 2098, loss: 1.3504546880722046, step time: 18.398523330688477ms\r\n",,terminal_output +11979,12822234,"TERMINAL",0,0,"Step 2099, loss: 1.3822344541549683, step time: 18.86725425720215ms\r\n",,terminal_output +11980,12822335,"TERMINAL",0,0,"Step 2100, loss: 1.3512487411499023, step time: 18.561840057373047ms\r\n",,terminal_output +11981,12822387,"TERMINAL",0,0,"Step 2101, loss: 1.3349019289016724, step time: 18.89944076538086ms\r\n",,terminal_output +11982,12822494,"TERMINAL",0,0,"Step 2102, loss: 1.383386492729187, step time: 18.776893615722656ms\r\nStep 2103, loss: 1.333329677581787, step time: 18.938302993774414ms\r\n",,terminal_output +11983,12822567,"TERMINAL",0,0,"Step 2104, loss: 1.3355833292007446, step time: 18.634319305419922ms\r\n",,terminal_output +11984,12822624,"TERMINAL",0,0,"Step 2105, loss: 1.3559095859527588, step time: 18.826961517333984ms\r\n",,terminal_output +11985,12822686,"TERMINAL",0,0,"Step 2106, loss: 1.3520913124084473, step time: 18.405675888061523ms\r\n",,terminal_output +11986,12822752,"TERMINAL",0,0,"Step 2107, loss: 1.3299468755722046, step time: 18.572330474853516ms\r\n",,terminal_output +11987,12822821,"TERMINAL",0,0,"Step 2108, loss: 1.3627252578735352, step time: 18.918991088867188ms\r\n",,terminal_output +11988,12822872,"TERMINAL",0,0,"Step 2109, loss: 1.6086677312850952, step time: 18.622636795043945ms\r\n",,terminal_output +11989,12822936,"TERMINAL",0,0,"Step 2110, loss: 1.3442940711975098, step time: 18.97263526916504ms\r\n",,terminal_output +11990,12823063,"TERMINAL",0,0,"Step 2111, loss: 1.3268812894821167, step time: 19.527196884155273ms\r\nStep 2112, loss: 1.3903814554214478, step time: 18.640995025634766ms\r\n",,terminal_output +11991,12823128,"TERMINAL",0,0,"Step 2113, loss: 1.4074625968933105, step time: 18.815994262695312ms\r\n",,terminal_output +11992,12823199,"TERMINAL",0,0,"Step 2114, loss: 1.3314224481582642, step time: 18.808364868164062ms\r\n",,terminal_output +11993,12823261,"TERMINAL",0,0,"Step 2115, loss: 1.313434362411499, step time: 18.709659576416016ms\r\n",,terminal_output +11994,12823358,"TERMINAL",0,0,"Step 2116, loss: 1.3409830331802368, step time: 18.459796905517578ms\r\n",,terminal_output +11995,12823411,"TERMINAL",0,0,"Step 2117, loss: 1.3130637407302856, step time: 18.92709732055664ms\r\n",,terminal_output +11996,12823519,"TERMINAL",0,0,"Step 2118, loss: 1.7430081367492676, step time: 18.446683883666992ms\r\nStep 2119, loss: 1.2990522384643555, step time: 18.748044967651367ms\r\n",,terminal_output +11997,12823633,"TERMINAL",0,0,"Step 2120, loss: 1.3079363107681274, step time: 18.52703094482422ms\r\nStep 2121, loss: 1.9440373182296753, step time: 18.62335205078125ms\r\n",,terminal_output +11998,12823736,"TERMINAL",0,0,"Step 2122, loss: 1.3260306119918823, step time: 18.419504165649414ms\r\n",,terminal_output +11999,12823791,"TERMINAL",0,0,"Step 2123, loss: 1.3207863569259644, step time: 18.805980682373047ms\r\n",,terminal_output +12000,12823895,"TERMINAL",0,0,"Step 2124, loss: 1.6601463556289673, step time: 18.256425857543945ms\r\nStep 2125, loss: 1.3237158060073853, step time: 18.527746200561523ms\r\n",,terminal_output +12001,12823958,"TERMINAL",0,0,"Step 2126, loss: 1.4557899236679077, step time: 18.61262321472168ms\r\n",,terminal_output +12002,12824018,"TERMINAL",0,0,"Step 2127, loss: 1.3370250463485718, step time: 18.56827735900879ms\r\n",,terminal_output +12003,12824082,"TERMINAL",0,0,"Step 2128, loss: 1.3230316638946533, step time: 18.253564834594727ms\r\n",,terminal_output +12004,12824142,"TERMINAL",0,0,"Step 2129, loss: 1.3381540775299072, step time: 18.829822540283203ms\r\n",,terminal_output +12005,12824207,"TERMINAL",0,0,"Step 2130, loss: 1.3449156284332275, step time: 18.24665069580078ms\r\n",,terminal_output +12006,12824268,"TERMINAL",0,0,"Step 2131, loss: 1.3999309539794922, step time: 18.607616424560547ms\r\n",,terminal_output +12007,12824331,"TERMINAL",0,0,"Step 2132, loss: 1.3632550239562988, step time: 18.578529357910156ms\r\n",,terminal_output +12008,12824396,"TERMINAL",0,0,"Step 2133, loss: 1.3426275253295898, step time: 18.65077018737793ms\r\n",,terminal_output +12009,12824461,"TERMINAL",0,0,"Step 2134, loss: 1.3244268894195557, step time: 18.253803253173828ms\r\n",,terminal_output +12010,12824523,"TERMINAL",0,0,"Step 2135, loss: 1.3071955442428589, step time: 19.193649291992188ms\r\n",,terminal_output +12011,12824585,"TERMINAL",0,0,"Step 2136, loss: 1.3090986013412476, step time: 18.37158203125ms\r\n",,terminal_output +12012,12824655,"TERMINAL",0,0,"Step 2137, loss: 1.3471726179122925, step time: 18.75162124633789ms\r\n",,terminal_output +12013,12824715,"TERMINAL",0,0,"Step 2138, loss: 1.3075847625732422, step time: 18.76688003540039ms\r\n",,terminal_output +12014,12824786,"TERMINAL",0,0,"Step 2139, loss: 1.8505326509475708, step time: 18.717288970947266ms\r\n",,terminal_output +12015,12824903,"TERMINAL",0,0,"Step 2140, loss: 1.3011473417282104, step time: 18.42474937438965ms\r\nStep 2141, loss: 1.307906150817871, step time: 18.902301788330078ms\r\n",,terminal_output +12016,12824998,"TERMINAL",0,0,"Step 2142, loss: 1.4422287940979004, step time: 18.63837242126465ms\r\n",,terminal_output +12017,12825049,"TERMINAL",0,0,"Step 2143, loss: 1.5374155044555664, step time: 19.58179473876953ms\r\n",,terminal_output +12018,12825142,"TERMINAL",0,0,"Step 2144, loss: 1.523100733757019, step time: 19.03390884399414ms\r\n",,terminal_output +12019,12825196,"TERMINAL",0,0,"Step 2145, loss: 1.3196547031402588, step time: 18.88728141784668ms\r\n",,terminal_output +12020,12825301,"TERMINAL",0,0,"Step 2146, loss: 1.2936207056045532, step time: 18.529415130615234ms\r\nStep 2147, loss: 1.4981826543807983, step time: 18.78213882446289ms\r\n",,terminal_output +12021,12825363,"TERMINAL",0,0,"Step 2148, loss: 1.3619214296340942, step time: 19.52052116394043ms\r\n",,terminal_output +12022,12825421,"TERMINAL",0,0,"Step 2149, loss: 1.334753155708313, step time: 18.837451934814453ms\r\n",,terminal_output +12023,12825486,"TERMINAL",0,0,"Step 2150, loss: 1.3184887170791626, step time: 18.70441436767578ms\r\n",,terminal_output +12024,12825562,"TERMINAL",0,0,"Step 2151, loss: 1.4303134679794312, step time: 18.54085922241211ms\r\n",,terminal_output +12025,12825622,"TERMINAL",0,0,"Step 2152, loss: 1.623073935508728, step time: 18.36538314819336ms\r\n",,terminal_output +12026,12825693,"TERMINAL",0,0,"Step 2153, loss: 1.2978843450546265, step time: 18.748760223388672ms\r\n",,terminal_output +12027,12825822,"TERMINAL",0,0,"Step 2154, loss: 1.5174436569213867, step time: 18.368244171142578ms\r\nStep 2155, loss: 1.321915864944458, step time: 18.72730255126953ms\r\n",,terminal_output +12028,12825874,"TERMINAL",0,0,"Step 2156, loss: 1.540778636932373, step time: 18.94521713256836ms\r\n",,terminal_output +12029,12825939,"TERMINAL",0,0,"Step 2157, loss: 1.3140350580215454, step time: 18.772602081298828ms\r\n",,terminal_output +12030,12825999,"TERMINAL",0,0,"Step 2158, loss: 1.3268741369247437, step time: 18.644332885742188ms\r\n",,terminal_output +12031,12826063,"TERMINAL",0,0,"Step 2159, loss: 1.3036859035491943, step time: 19.19078826904297ms\r\n",,terminal_output +12032,12826124,"TERMINAL",0,0,"Step 2160, loss: 1.3016120195388794, step time: 18.63837242126465ms\r\n",,terminal_output +12033,12826232,"TERMINAL",0,0,"Step 2161, loss: 1.3115184307098389, step time: 20.013809204101562ms\r\n",,terminal_output +12034,12826283,"TERMINAL",0,0,"Step 2162, loss: 1.290439248085022, step time: 19.071578979492188ms\r\n",,terminal_output +12035,12826378,"TERMINAL",0,0,"Step 2163, loss: 1.4618971347808838, step time: 18.6922550201416ms\r\n",,terminal_output +12036,12826430,"TERMINAL",0,0,"Step 2164, loss: 1.3533211946487427, step time: 18.588542938232422ms\r\n",,terminal_output +12037,12826483,"TERMINAL",0,0,"Step 2165, loss: 1.3129404783248901, step time: 18.95284652709961ms\r\n",,terminal_output +12038,12826539,"TERMINAL",0,0,"Step 2166, loss: 1.335118293762207, step time: 18.582820892333984ms\r\n",,terminal_output +12039,12826602,"TERMINAL",0,0,"Step 2167, loss: 1.3155196905136108, step time: 18.671035766601562ms\r\n",,terminal_output +12040,12826903,"TERMINAL",0,0,"Step 2168, loss: 1.5195485353469849, step time: 315.6464099884033ms\r\n",,terminal_output +12041,12826969,"TERMINAL",0,0,"Step 2169, loss: 1.3110771179199219, step time: 26.308298110961914ms\r\n",,terminal_output +12042,12827098,"TERMINAL",0,0,"Step 2170, loss: 1.3036309480667114, step time: 21.32892608642578ms\r\nStep 2171, loss: 1.3279438018798828, step time: 19.814252853393555ms\r\n",,terminal_output +12043,12827162,"TERMINAL",0,0,"Step 2172, loss: 1.2876250743865967, step time: 18.944978713989258ms\r\n",,terminal_output +12044,12827226,"TERMINAL",0,0,"Step 2173, loss: 1.3417888879776, step time: 18.738508224487305ms\r\n",,terminal_output +12045,12827318,"TERMINAL",0,0,"Step 2174, loss: 1.2870084047317505, step time: 19.12975311279297ms\r\n",,terminal_output +12046,12827369,"TERMINAL",0,0,"Step 2175, loss: 1.2969704866409302, step time: 18.611907958984375ms\r\n",,terminal_output +12047,12827463,"TERMINAL",0,0,"Step 2176, loss: 1.282275676727295, step time: 18.51034164428711ms\r\n",,terminal_output +12048,12827515,"TERMINAL",0,0,"Step 2177, loss: 1.3042714595794678, step time: 18.846511840820312ms\r\n",,terminal_output +12049,12827578,"TERMINAL",0,0,"Step 2178, loss: 1.4297467470169067, step time: 18.404722213745117ms\r\n",,terminal_output +12050,12827643,"TERMINAL",0,0,"Step 2179, loss: 1.3018118143081665, step time: 18.550395965576172ms\r\n",,terminal_output +12051,12827709,"TERMINAL",0,0,"Step 2180, loss: 1.456557273864746, step time: 18.88132095336914ms\r\n",,terminal_output +12052,12827773,"TERMINAL",0,0,"Step 2181, loss: 1.283263921737671, step time: 18.546581268310547ms\r\n",,terminal_output +12053,12827869,"TERMINAL",0,0,"Step 2182, loss: 1.3026821613311768, step time: 18.452882766723633ms\r\nStep 2183, loss: 1.273237943649292, step time: 18.98789405822754ms\r\n",,terminal_output +12054,12827967,"TERMINAL",0,0,"Step 2184, loss: 1.2809776067733765, step time: 18.477916717529297ms\r\n",,terminal_output +12055,12828030,"TERMINAL",0,0,"Step 2185, loss: 1.2882839441299438, step time: 18.640995025634766ms\r\n",,terminal_output +12056,12828092,"TERMINAL",0,0,"Step 2186, loss: 1.2844241857528687, step time: 19.077301025390625ms\r\n",,terminal_output +12057,12828155,"TERMINAL",0,0,"Step 2187, loss: 1.3721815347671509, step time: 18.514394760131836ms\r\n",,terminal_output +12058,12828265,"TERMINAL",0,0,"Step 2188, loss: 1.319244623184204, step time: 18.6614990234375ms\r\nStep 2189, loss: 1.3406234979629517, step time: 18.97406578063965ms\r\n",,terminal_output +12059,12828330,"TERMINAL",0,0,"Step 2190, loss: 1.271928310394287, step time: 18.51487159729004ms\r\n",,terminal_output +12060,12828390,"TERMINAL",0,0,"Step 2191, loss: 1.280198574066162, step time: 18.557310104370117ms\r\n",,terminal_output +12061,12828454,"TERMINAL",0,0,"Step 2192, loss: 1.3420403003692627, step time: 18.897056579589844ms\r\n",,terminal_output +12062,12828516,"TERMINAL",0,0,"Step 2193, loss: 1.2524566650390625, step time: 18.450498580932617ms\r\n",,terminal_output +12063,12828592,"TERMINAL",0,0,"Step 2194, loss: 1.5286900997161865, step time: 18.42498779296875ms\r\n",,terminal_output +12064,12828645,"TERMINAL",0,0,"Step 2195, loss: 1.6235562562942505, step time: 18.865585327148438ms\r\n",,terminal_output +12065,12828712,"TERMINAL",0,0,"Step 2196, loss: 1.2608433961868286, step time: 18.488645553588867ms\r\n",,terminal_output +12066,12828775,"TERMINAL",0,0,"Step 2197, loss: 1.358688473701477, step time: 18.59307289123535ms\r\n",,terminal_output +12067,12828886,"TERMINAL",0,0,"Step 2198, loss: 1.2608338594436646, step time: 18.88728141784668ms\r\nStep 2199, loss: 1.2632020711898804, step time: 18.540620803833008ms\r\n",,terminal_output +12068,12828949,"TERMINAL",0,0,"Step 2200, loss: 1.2816821336746216, step time: 18.71490478515625ms\r\n",,terminal_output +12069,12829015,"TERMINAL",0,0,"Step 2201, loss: 1.2723966836929321, step time: 19.960403442382812ms\r\n",,terminal_output +12070,12829080,"TERMINAL",0,0,"Step 2202, loss: 1.4690009355545044, step time: 18.612146377563477ms\r\n",,terminal_output +12071,12829141,"TERMINAL",0,0,"Step 2203, loss: 1.2736247777938843, step time: 18.673419952392578ms\r\n",,terminal_output +12072,12829207,"TERMINAL",0,0,"Step 2204, loss: 1.2845720052719116, step time: 18.911123275756836ms\r\n",,terminal_output +12073,12829267,"TERMINAL",0,0,"Step 2205, loss: 1.2684602737426758, step time: 18.233537673950195ms\r\n",,terminal_output +12074,12829388,"TERMINAL",0,0,"Step 2206, loss: 1.2723188400268555, step time: 18.38970184326172ms\r\nStep 2207, loss: 1.3101829290390015, step time: 18.788814544677734ms\r\n",,terminal_output +12075,12829451,"TERMINAL",0,0,"Step 2208, loss: 1.269514560699463, step time: 18.29695701599121ms\r\n",,terminal_output +12076,12829514,"TERMINAL",0,0,"Step 2209, loss: 1.4818663597106934, step time: 18.339157104492188ms\r\n",,terminal_output +12077,12829577,"TERMINAL",0,0,"Step 2210, loss: 1.2831659317016602, step time: 18.924951553344727ms\r\n",,terminal_output +12078,12829645,"TERMINAL",0,0,"Step 2211, loss: 1.2543939352035522, step time: 18.49508285522461ms\r\n",,terminal_output +12079,12829707,"TERMINAL",0,0,"Step 2212, loss: 1.28541898727417, step time: 18.419981002807617ms\r\n",,terminal_output +12080,12829770,"TERMINAL",0,0,"Step 2213, loss: 1.2820072174072266, step time: 18.83244514465332ms\r\n",,terminal_output +12081,12829842,"TERMINAL",0,0,"Step 2214, loss: 1.2605584859848022, step time: 18.4326171875ms\r\n",,terminal_output +12082,12829895,"TERMINAL",0,0,"Step 2215, loss: 1.3011053800582886, step time: 18.525123596191406ms\r\n",,terminal_output +12083,12829967,"TERMINAL",0,0,"Step 2216, loss: 2.3175292015075684, step time: 18.800020217895508ms\r\n",,terminal_output +12084,12830029,"TERMINAL",0,0,"Step 2217, loss: 1.6888154745101929, step time: 18.517494201660156ms\r\n",,terminal_output +12085,12830088,"TERMINAL",0,0,"Step 2218, loss: 1.4006452560424805, step time: 18.440961837768555ms\r\n",,terminal_output +12086,12830152,"TERMINAL",0,0,"Step 2219, loss: 1.2683576345443726, step time: 18.828630447387695ms\r\n",,terminal_output +12087,12830218,"TERMINAL",0,0,"Step 2220, loss: 1.2918007373809814, step time: 18.419504165649414ms\r\n",,terminal_output +12088,12830312,"TERMINAL",0,0,"Step 2221, loss: 1.3710142374038696, step time: 18.271207809448242ms\r\n",,terminal_output +12089,12830364,"TERMINAL",0,0,"Step 2222, loss: 1.2463093996047974, step time: 20.284652709960938ms\r\n",,terminal_output +12090,12830427,"TERMINAL",0,0,"Step 2223, loss: 1.3008977174758911, step time: 18.93162727355957ms\r\n",,terminal_output +12091,12830553,"TERMINAL",0,0,"Step 2224, loss: 1.2588536739349365, step time: 18.5244083404541ms\r\nStep 2225, loss: 1.2497379779815674, step time: 18.957853317260742ms\r\n",,terminal_output +12092,12830610,"TERMINAL",0,0,"Step 2226, loss: 1.254611611366272, step time: 18.613576889038086ms\r\n",,terminal_output +12093,12830673,"TERMINAL",0,0,"Step 2227, loss: 1.2688755989074707, step time: 18.769264221191406ms\r\n",,terminal_output +12094,12830734,"TERMINAL",0,0,"Step 2228, loss: 1.385254979133606, step time: 18.968582153320312ms\r\n",,terminal_output +12095,12830795,"TERMINAL",0,0,"Step 2229, loss: 1.247185468673706, step time: 19.76943016052246ms\r\n",,terminal_output +12096,12830859,"TERMINAL",0,0,"Step 2230, loss: 1.840462565422058, step time: 18.59140396118164ms\r\n",,terminal_output +12097,12830910,"TERMINAL",0,0,"Step 2231, loss: 1.2602647542953491, step time: 18.9363956451416ms\r\n",,terminal_output +12098,12831008,"TERMINAL",0,0,"Step 2232, loss: 1.264717936515808, step time: 18.613576889038086ms\r\n",,terminal_output +12099,12831059,"TERMINAL",0,0,"Step 2233, loss: 1.2925056219100952, step time: 18.364906311035156ms\r\n",,terminal_output +12100,12831152,"TERMINAL",0,0,"Step 2234, loss: 2.2928733825683594, step time: 18.909692764282227ms\r\n",,terminal_output +12101,12831204,"TERMINAL",0,0,"Step 2235, loss: 1.3463475704193115, step time: 18.29671859741211ms\r\n",,terminal_output +12102,12831255,"TERMINAL",0,0,"Step 2236, loss: 1.4118175506591797, step time: 18.658161163330078ms\r\n",,terminal_output +12103,12831359,"TERMINAL",0,0,"Step 2237, loss: 1.2467594146728516, step time: 18.904924392700195ms\r\nStep 2238, loss: 1.2446327209472656, step time: 18.681764602661133ms\r\n",,terminal_output +12104,12831454,"TERMINAL",0,0,"Step 2239, loss: 1.2368313074111938, step time: 18.268585205078125ms\r\n",,terminal_output +12105,12831506,"TERMINAL",0,0,"Step 2240, loss: 1.267181396484375, step time: 19.006013870239258ms\r\n",,terminal_output +12106,12831613,"TERMINAL",0,0,"Step 2241, loss: 1.268234133720398, step time: 18.309593200683594ms\r\nStep 2242, loss: 1.2515463829040527, step time: 18.601417541503906ms\r\n",,terminal_output +12107,12831708,"TERMINAL",0,0,"Step 2243, loss: 1.686257243156433, step time: 23.138999938964844ms\r\n",,terminal_output +12108,12831759,"TERMINAL",0,0,"Step 2244, loss: 1.3195306062698364, step time: 28.017282485961914ms\r\n",,terminal_output +12109,12831871,"TERMINAL",0,0,"Step 2245, loss: 1.4187637567520142, step time: 24.835824966430664ms\r\nStep 2246, loss: 1.2779849767684937, step time: 26.322126388549805ms\r\n",,terminal_output +12110,12831982,"TERMINAL",0,0,"Step 2247, loss: 1.3085930347442627, step time: 25.293827056884766ms\r\n",,terminal_output +12111,12832036,"TERMINAL",0,0,"Step 2248, loss: 1.2489961385726929, step time: 25.846242904663086ms\r\n",,terminal_output +12112,12832141,"TERMINAL",0,0,"Step 2249, loss: 1.2475144863128662, step time: 26.48305892944336ms\r\nStep 2250, loss: 1.2573511600494385, step time: 22.95207977294922ms\r\n",,terminal_output +12113,12832202,"TERMINAL",0,0,"Step 2251, loss: 1.2592440843582153, step time: 18.969297409057617ms\r\n",,terminal_output +12114,12832265,"TERMINAL",0,0,"Step 2252, loss: 1.2963279485702515, step time: 19.099950790405273ms\r\n",,terminal_output +12115,12832327,"TERMINAL",0,0,"Step 2253, loss: 1.2428807020187378, step time: 18.370628356933594ms\r\n",,terminal_output +12116,12832390,"TERMINAL",0,0,"Step 2254, loss: 1.2630127668380737, step time: 18.488168716430664ms\r\n",,terminal_output +12117,12832519,"TERMINAL",0,0,"Step 2255, loss: 1.2633569240570068, step time: 18.618106842041016ms\r\nStep 2256, loss: 1.3915290832519531, step time: 18.717527389526367ms\r\n",,terminal_output +12118,12832612,"TERMINAL",0,0,"Step 2257, loss: 1.7057406902313232, step time: 18.301010131835938ms\r\n",,terminal_output +12119,12832664,"TERMINAL",0,0,"Step 2258, loss: 1.254629135131836, step time: 19.023895263671875ms\r\n",,terminal_output +12120,12832811,"TERMINAL",0,0,"Step 2259, loss: 1.2823470830917358, step time: 18.294095993041992ms\r\nStep 2260, loss: 1.257771372795105, step time: 18.959999084472656ms\r\n",,terminal_output +12121,12832901,"TERMINAL",0,0,"Step 2261, loss: 1.2738715410232544, step time: 18.780946731567383ms\r\nStep 2262, loss: 1.2315410375595093, step time: 18.87369155883789ms\r\n",,terminal_output +12122,12832996,"TERMINAL",0,0,"Step 2263, loss: 1.2301888465881348, step time: 18.31960678100586ms\r\n",,terminal_output +12123,12833101,"TERMINAL",0,0,"Step 2264, loss: 1.267040729522705, step time: 19.025325775146484ms\r\nStep 2265, loss: 1.2361550331115723, step time: 18.219947814941406ms\r\n",,terminal_output +12124,12833164,"TERMINAL",0,0,"Step 2266, loss: 1.2393194437026978, step time: 18.63884925842285ms\r\n",,terminal_output +12125,12833233,"TERMINAL",0,0,"Step 2267, loss: 1.4119806289672852, step time: 18.61715316772461ms\r\n",,terminal_output +12126,12833286,"TERMINAL",0,0,"Step 2268, loss: 1.2363981008529663, step time: 18.68891716003418ms\r\n",,terminal_output +12127,12833347,"TERMINAL",0,0,"Step 2269, loss: 1.2289607524871826, step time: 18.187761306762695ms\r\n",,terminal_output +12128,12833411,"TERMINAL",0,0,"Step 2270, loss: 1.2271647453308105, step time: 18.898725509643555ms\r\n",,terminal_output +12129,12833502,"TERMINAL",0,0,"Step 2271, loss: 1.2790749073028564, step time: 18.14413070678711ms\r\n",,terminal_output +12130,12833554,"TERMINAL",0,0,"Step 2272, loss: 1.235882043838501, step time: 18.558025360107422ms\r\n",,terminal_output +12131,12833635,"TERMINAL",0,0,"Step 2273, loss: 1.2218775749206543, step time: 18.593549728393555ms\r\n",,terminal_output +12132,12833687,"TERMINAL",0,0,"Step 2274, loss: 1.4059431552886963, step time: 18.588781356811523ms\r\n",,terminal_output +12133,12833790,"TERMINAL",0,0,"Step 2275, loss: 1.283166766166687, step time: 18.13364028930664ms\r\nStep 2276, loss: 1.3794217109680176, step time: 18.635034561157227ms\r\n",,terminal_output +12134,12833862,"TERMINAL",0,0,"Step 2277, loss: 1.4491654634475708, step time: 17.91834831237793ms\r\n",,terminal_output +12135,12833925,"TERMINAL",0,0,"Step 2278, loss: 1.2308392524719238, step time: 18.350601196289062ms\r\n",,terminal_output +12136,12833986,"TERMINAL",0,0,"Step 2279, loss: 1.2234444618225098, step time: 18.373489379882812ms\r\n",,terminal_output +12137,12834047,"TERMINAL",0,0,"Step 2280, loss: 1.2173106670379639, step time: 18.736839294433594ms\r\n",,terminal_output +12138,12834108,"TERMINAL",0,0,"Step 2281, loss: 1.2948777675628662, step time: 18.622398376464844ms\r\n",,terminal_output +12139,12834169,"TERMINAL",0,0,"Step 2282, loss: 1.4810142517089844, step time: 18.7530517578125ms\r\n",,terminal_output +12140,12834234,"TERMINAL",0,0,"Step 2283, loss: 1.2119989395141602, step time: 18.546104431152344ms\r\n",,terminal_output +12141,12834299,"TERMINAL",0,0,"Step 2284, loss: 1.2686089277267456, step time: 18.181800842285156ms\r\n",,terminal_output +12142,12834363,"TERMINAL",0,0,"Step 2285, loss: 1.4427738189697266, step time: 18.535852432250977ms\r\n",,terminal_output +12143,12834425,"TERMINAL",0,0,"Step 2286, loss: 1.2728182077407837, step time: 19.19412612915039ms\r\n",,terminal_output +12144,12834489,"TERMINAL",0,0,"Step 2287, loss: 1.2623461484909058, step time: 19.105195999145508ms\r\n",,terminal_output +12145,12834583,"TERMINAL",0,0,"Step 2288, loss: 2.015838623046875, step time: 18.880844116210938ms\r\n",,terminal_output +12146,12834635,"TERMINAL",0,0,"Step 2289, loss: 1.359947919845581, step time: 18.691062927246094ms\r\n",,terminal_output +12147,12834779,"TERMINAL",0,0,"Step 2290, loss: 1.2043497562408447, step time: 17.64082908630371ms\r\nStep 2291, loss: 1.2280024290084839, step time: 18.10431480407715ms\r\n",,terminal_output +12148,12834830,"TERMINAL",0,0,"Step 2292, loss: 1.252158522605896, step time: 17.481327056884766ms\r\n",,terminal_output +12149,12834882,"TERMINAL",0,0,"Step 2293, loss: 1.224634051322937, step time: 18.98050308227539ms\r\n",,terminal_output +12150,12834976,"TERMINAL",0,0,"Step 2294, loss: 1.3387084007263184, step time: 18.915414810180664ms\r\n",,terminal_output +12151,12835029,"TERMINAL",0,0,"Step 2295, loss: 1.224588394165039, step time: 17.949342727661133ms\r\n",,terminal_output +12152,12835136,"TERMINAL",0,0,"Step 2296, loss: 1.2566485404968262, step time: 20.236730575561523ms\r\nStep 2297, loss: 1.261184811592102, step time: 19.09661293029785ms\r\n",,terminal_output +12153,12835247,"TERMINAL",0,0,"Step 2298, loss: 1.2437571287155151, step time: 17.891407012939453ms\r\nStep 2299, loss: 1.2582769393920898, step time: 18.328428268432617ms\r\n",,terminal_output +12154,12835310,"TERMINAL",0,0,"Step 2300, loss: 1.4064630270004272, step time: 18.519878387451172ms\r\n",,terminal_output +12155,12835375,"TERMINAL",0,0,"Step 2301, loss: 1.2446796894073486, step time: 21.066904067993164ms\r\n",,terminal_output +12156,12835438,"TERMINAL",0,0,"Step 2302, loss: 1.234575629234314, step time: 18.314599990844727ms\r\n",,terminal_output +12157,12835504,"TERMINAL",0,0,"Step 2303, loss: 1.3060470819473267, step time: 18.162012100219727ms\r\n",,terminal_output +12158,12835569,"TERMINAL",0,0,"Step 2304, loss: 1.7739657163619995, step time: 17.684459686279297ms\r\n",,terminal_output +12159,12835662,"TERMINAL",0,0,"Step 2305, loss: 1.2192625999450684, step time: 17.79961585998535ms\r\n",,terminal_output +12160,12835713,"TERMINAL",0,0,"Step 2306, loss: 1.2267311811447144, step time: 18.044233322143555ms\r\n",,terminal_output +12161,12835817,"TERMINAL",0,0,"Step 2307, loss: 1.2131141424179077, step time: 17.614364624023438ms\r\nStep 2308, loss: 1.2095158100128174, step time: 17.46535301208496ms\r\n",,terminal_output +12162,12835882,"TERMINAL",0,0,"Step 2309, loss: 1.2724508047103882, step time: 18.698453903198242ms\r\n",,terminal_output +12163,12836005,"TERMINAL",0,0,"Step 2310, loss: 1.208298921585083, step time: 17.50493049621582ms\r\nStep 2311, loss: 1.2339465618133545, step time: 17.59624481201172ms\r\n",,terminal_output +12164,12836068,"TERMINAL",0,0,"Step 2312, loss: 1.2033374309539795, step time: 17.63319969177246ms\r\n",,terminal_output +12165,12836130,"TERMINAL",0,0,"Step 2313, loss: 1.2213526964187622, step time: 17.452239990234375ms\r\n",,terminal_output +12166,12836222,"TERMINAL",0,0,"Step 2314, loss: 1.288852572441101, step time: 17.537593841552734ms\r\n",,terminal_output +12167,12836273,"TERMINAL",0,0,"Step 2315, loss: 1.2020024061203003, step time: 17.778396606445312ms\r\n",,terminal_output +12168,12836366,"TERMINAL",0,0,"Step 2316, loss: 1.1938860416412354, step time: 17.514467239379883ms\r\n",,terminal_output +12169,12836417,"TERMINAL",0,0,"Step 2317, loss: 1.1907975673675537, step time: 17.516374588012695ms\r\n",,terminal_output +12170,12836468,"TERMINAL",0,0,"Step 2318, loss: 1.1997270584106445, step time: 17.7004337310791ms\r\n",,terminal_output +12171,12836572,"TERMINAL",0,0,"Step 2319, loss: 1.1794254779815674, step time: 17.544984817504883ms\r\nStep 2320, loss: 1.213234782218933, step time: 17.42720603942871ms\r\n",,terminal_output +12172,12836633,"TERMINAL",0,0,"Step 2321, loss: 1.2153781652450562, step time: 17.797470092773438ms\r\n",,terminal_output +12173,12836693,"TERMINAL",0,0,"Step 2322, loss: 1.199036717414856, step time: 17.607927322387695ms\r\n",,terminal_output +12174,12836756,"TERMINAL",0,0,"Step 2323, loss: 1.2157363891601562, step time: 17.54593849182129ms\r\n",,terminal_output +12175,12836847,"TERMINAL",0,0,"Step 2324, loss: 2.3970956802368164, step time: 18.923282623291016ms\r\n",,terminal_output +12176,12836899,"TERMINAL",0,0,"Step 2325, loss: 1.1962090730667114, step time: 18.57137680053711ms\r\n",,terminal_output +12177,12836978,"TERMINAL",0,0,"Step 2326, loss: 1.1893776655197144, step time: 17.78721809387207ms\r\n",,terminal_output +12178,12837244,"TERMINAL",0,0,"Step 2327, loss: 1.1982487440109253, step time: 298.2296943664551ms\r\n",,terminal_output +12179,12837311,"TERMINAL",0,0,"Step 2328, loss: 1.195858120918274, step time: 25.146007537841797ms\r\n",,terminal_output +12180,12837373,"TERMINAL",0,0,"Step 2329, loss: 1.1829535961151123, step time: 20.405054092407227ms\r\n",,terminal_output +12181,12837466,"TERMINAL",0,0,"Step 2330, loss: 1.2723873853683472, step time: 18.54681968688965ms\r\n",,terminal_output +12182,12837517,"TERMINAL",0,0,"Step 2331, loss: 1.4506802558898926, step time: 18.065929412841797ms\r\n",,terminal_output +12183,12837623,"TERMINAL",0,0,"Step 2332, loss: 1.1999348402023315, step time: 17.667055130004883ms\r\nStep 2333, loss: 1.5992813110351562, step time: 18.10622215270996ms\r\n",,terminal_output +12184,12837719,"TERMINAL",0,0,"Step 2334, loss: 1.3946822881698608, step time: 17.70758628845215ms\r\n",,terminal_output +12185,12837769,"TERMINAL",0,0,"Step 2335, loss: 1.214855432510376, step time: 18.09215545654297ms\r\n",,terminal_output +12186,12837876,"TERMINAL",0,0,"Step 2336, loss: 1.2162084579467773, step time: 18.125295639038086ms\r\nStep 2337, loss: 1.1913448572158813, step time: 17.974376678466797ms\r\n",,terminal_output +12187,12837977,"TERMINAL",0,0,"Step 2338, loss: 1.2533094882965088, step time: 17.902135848999023ms\r\n",,terminal_output +12188,12838041,"TERMINAL",0,0,"Step 2339, loss: 1.2785166501998901, step time: 18.340110778808594ms\r\n",,terminal_output +12189,12838159,"TERMINAL",0,0,"Step 2340, loss: 1.1826530694961548, step time: 17.84992218017578ms\r\nStep 2341, loss: 1.2167423963546753, step time: 17.93956756591797ms\r\n",,terminal_output +12190,12838266,"TERMINAL",0,0,"Step 2342, loss: 1.7889329195022583, step time: 18.0666446685791ms\r\nStep 2343, loss: 1.2100471258163452, step time: 17.774105072021484ms\r\n",,terminal_output +12191,12838337,"TERMINAL",0,0,"Step 2344, loss: 1.2257939577102661, step time: 17.73977279663086ms\r\n",,terminal_output +12192,12838393,"TERMINAL",0,0,"Step 2345, loss: 1.2303471565246582, step time: 18.084049224853516ms\r\n",,terminal_output +12193,12838503,"TERMINAL",0,0,"Step 2346, loss: 1.1793421506881714, step time: 17.66490936279297ms\r\nStep 2347, loss: 1.1983767747879028, step time: 17.947673797607422ms\r\n",,terminal_output +12194,12838568,"TERMINAL",0,0,"Step 2348, loss: 1.1833264827728271, step time: 17.988920211791992ms\r\n",,terminal_output +12195,12838633,"TERMINAL",0,0,"Step 2349, loss: 1.1976242065429688, step time: 17.776966094970703ms\r\n",,terminal_output +12196,12838759,"TERMINAL",0,0,"Step 2350, loss: 1.231698751449585, step time: 17.614364624023438ms\r\nStep 2351, loss: 2.0156021118164062, step time: 17.95482635498047ms\r\n",,terminal_output +12197,12838832,"TERMINAL",0,0,"Step 2352, loss: 1.1801358461380005, step time: 17.61341094970703ms\r\n",,terminal_output +12198,12838883,"TERMINAL",0,0,"Step 2353, loss: 1.197206735610962, step time: 18.64910125732422ms\r\n",,terminal_output +12199,12838950,"TERMINAL",0,0,"Step 2354, loss: 1.2006933689117432, step time: 17.870187759399414ms\r\n",,terminal_output +12200,12839007,"TERMINAL",0,0,"Step 2355, loss: 1.1990324258804321, step time: 17.6694393157959ms\r\n",,terminal_output +12201,12839078,"TERMINAL",0,0,"Step 2356, loss: 1.5753792524337769, step time: 17.469167709350586ms\r\n",,terminal_output +12202,12839141,"TERMINAL",0,0,"Step 2357, loss: 1.3013951778411865, step time: 17.863750457763672ms\r\n",,terminal_output +12203,12839199,"TERMINAL",0,0,"Step 2358, loss: 1.544049859046936, step time: 17.522811889648438ms\r\n",,terminal_output +12204,12839261,"TERMINAL",0,0,"Step 2359, loss: 1.7635178565979004, step time: 17.606735229492188ms\r\n",,terminal_output +12205,12839322,"TERMINAL",0,0,"Step 2360, loss: 1.2233868837356567, step time: 17.763614654541016ms\r\n",,terminal_output +12206,12839384,"TERMINAL",0,0,"Step 2361, loss: 1.1958273649215698, step time: 17.470121383666992ms\r\n",,terminal_output +12207,12839449,"TERMINAL",0,0,"Step 2362, loss: 1.1722979545593262, step time: 17.45438575744629ms\r\n",,terminal_output +12208,12839588,"TERMINAL",0,0,"Step 2363, loss: 1.3442370891571045, step time: 17.89236068725586ms\r\nStep 2364, loss: 1.191846489906311, step time: 17.51232147216797ms\r\n",,terminal_output +12209,12839653,"TERMINAL",0,0,"Step 2365, loss: 1.1802324056625366, step time: 17.969369888305664ms\r\n",,terminal_output +12210,12839713,"TERMINAL",0,0,"Step 2366, loss: 1.1769541501998901, step time: 18.066883087158203ms\r\n",,terminal_output +12211,12839775,"TERMINAL",0,0,"Step 2367, loss: 2.0079355239868164, step time: 17.87400245666504ms\r\n",,terminal_output +12212,12839886,"TERMINAL",0,0,"Step 2368, loss: 1.1857879161834717, step time: 17.671585083007812ms\r\nStep 2369, loss: 1.1976250410079956, step time: 18.117904663085938ms\r\n",,terminal_output +12213,12839985,"TERMINAL",0,0,"Step 2370, loss: 1.1878876686096191, step time: 17.732858657836914ms\r\n",,terminal_output +12214,12840046,"TERMINAL",0,0,"Step 2371, loss: 1.1930383443832397, step time: 17.824649810791016ms\r\n",,terminal_output +12215,12840109,"TERMINAL",0,0,"Step 2372, loss: 1.1935129165649414, step time: 17.975568771362305ms\r\n",,terminal_output +12216,12840172,"TERMINAL",0,0,"Step 2373, loss: 1.2925124168395996, step time: 17.732858657836914ms\r\n",,terminal_output +12217,12840236,"TERMINAL",0,0,"Step 2374, loss: 1.1796600818634033, step time: 17.64059066772461ms\r\n",,terminal_output +12218,12840303,"TERMINAL",0,0,"Step 2375, loss: 1.4009724855422974, step time: 17.975568771362305ms\r\n",,terminal_output +12219,12840363,"TERMINAL",0,0,"Step 2376, loss: 1.20302414894104, step time: 17.66228675842285ms\r\n",,terminal_output +12220,12840427,"TERMINAL",0,0,"Step 2377, loss: 1.2027692794799805, step time: 20.33400535583496ms\r\n",,terminal_output +12221,12840490,"TERMINAL",0,0,"Step 2378, loss: 1.2115938663482666, step time: 18.54562759399414ms\r\n",,terminal_output +12222,12840555,"TERMINAL",0,0,"Step 2379, loss: 1.2850191593170166, step time: 18.16248893737793ms\r\n",,terminal_output +12223,12840673,"TERMINAL",0,0,"Step 2380, loss: 1.2043451070785522, step time: 17.91524887084961ms\r\nStep 2381, loss: 1.1630845069885254, step time: 18.087387084960938ms\r\n",,terminal_output +12224,12840781,"TERMINAL",0,0,"Step 2382, loss: 1.2212326526641846, step time: 17.749309539794922ms\r\nStep 2383, loss: 1.1906993389129639, step time: 17.7462100982666ms\r\n",,terminal_output +12225,12840846,"TERMINAL",0,0,"Step 2384, loss: 1.2637088298797607, step time: 17.88949966430664ms\r\n",,terminal_output +12226,12840913,"TERMINAL",0,0,"Step 2385, loss: 1.8429911136627197, step time: 17.815828323364258ms\r\n",,terminal_output +12227,12840976,"TERMINAL",0,0,"Step 2386, loss: 1.2168132066726685, step time: 17.714977264404297ms\r\n",,terminal_output +12228,12841049,"TERMINAL",0,0,"Step 2387, loss: 1.190616250038147, step time: 18.17941665649414ms\r\n",,terminal_output +12229,12841111,"TERMINAL",0,0,"Step 2388, loss: 1.3430390357971191, step time: 17.60101318359375ms\r\n",,terminal_output +12230,12841172,"TERMINAL",0,0,"Step 2389, loss: 1.1970072984695435, step time: 17.776966094970703ms\r\n",,terminal_output +12231,12841282,"TERMINAL",0,0,"Step 2390, loss: 1.204503059387207, step time: 17.95792579650879ms\r\nStep 2391, loss: 1.4810410737991333, step time: 17.792224884033203ms\r\n",,terminal_output +12232,12841339,"TERMINAL",0,0,"Step 2392, loss: 1.2078208923339844, step time: 17.813920974731445ms\r\n",,terminal_output +12233,12841401,"TERMINAL",0,0,"Step 2393, loss: 1.1604204177856445, step time: 18.041610717773438ms\r\n",,terminal_output +12234,12841466,"TERMINAL",0,0,"Step 2394, loss: 1.2411779165267944, step time: 17.64059066772461ms\r\n",,terminal_output +12235,12841527,"TERMINAL",0,0,"Step 2395, loss: 1.2088124752044678, step time: 17.737865447998047ms\r\n",,terminal_output +12236,12841594,"TERMINAL",0,0,"Step 2396, loss: 1.1675080060958862, step time: 17.73667335510254ms\r\n",,terminal_output +12237,12841656,"TERMINAL",0,0,"Step 2397, loss: 1.192911148071289, step time: 17.555713653564453ms\r\n",,terminal_output +12238,12841720,"TERMINAL",0,0,"Step 2398, loss: 1.1610620021820068, step time: 17.53520965576172ms\r\n",,terminal_output +12239,12841790,"TERMINAL",0,0,"Step 2399, loss: 1.144058346748352, step time: 17.80867576599121ms\r\n",,terminal_output +12240,12841848,"TERMINAL",0,0,"Step 2400, loss: 1.1720879077911377, step time: 17.479896545410156ms\r\n",,terminal_output +12241,12841911,"TERMINAL",0,0,"Step 2401, loss: 1.1741255521774292, step time: 17.554759979248047ms\r\n",,terminal_output +12242,12841971,"TERMINAL",0,0,"Step 2402, loss: 1.1562227010726929, step time: 17.607450485229492ms\r\n",,terminal_output +12243,12842032,"TERMINAL",0,0,"Step 2403, loss: 2.0779566764831543, step time: 17.679691314697266ms\r\n",,terminal_output +12244,12842094,"TERMINAL",0,0,"Step 2404, loss: 1.2389576435089111, step time: 17.53973960876465ms\r\n",,terminal_output +12245,12842156,"TERMINAL",0,0,"Step 2405, loss: 1.17007315158844, step time: 17.728567123413086ms\r\n",,terminal_output +12246,12842219,"TERMINAL",0,0,"Step 2406, loss: 1.3129006624221802, step time: 17.367839813232422ms\r\n",,terminal_output +12247,12842283,"TERMINAL",0,0,"Step 2407, loss: 1.1691248416900635, step time: 17.44818687438965ms\r\n",,terminal_output +12248,12842348,"TERMINAL",0,0,"Step 2408, loss: 1.1942875385284424, step time: 17.630577087402344ms\r\n",,terminal_output +12249,12842409,"TERMINAL",0,0,"Step 2409, loss: 1.1953288316726685, step time: 17.655372619628906ms\r\n",,terminal_output +12250,12842472,"TERMINAL",0,0,"Step 2410, loss: 1.1553038358688354, step time: 17.36736297607422ms\r\n",,terminal_output +12251,12842534,"TERMINAL",0,0,"Step 2411, loss: 1.1784652471542358, step time: 17.883777618408203ms\r\n",,terminal_output +12252,12842597,"TERMINAL",0,0,"Step 2412, loss: 1.1588863134384155, step time: 17.617225646972656ms\r\n",,terminal_output +12253,12842664,"TERMINAL",0,0,"Step 2413, loss: 1.146419882774353, step time: 17.930984497070312ms\r\n",,terminal_output +12254,12842726,"TERMINAL",0,0,"Step 2414, loss: 1.1758352518081665, step time: 17.812252044677734ms\r\n",,terminal_output +12255,12842789,"TERMINAL",0,0,"Step 2415, loss: 1.1519403457641602, step time: 17.613887786865234ms\r\n",,terminal_output +12256,12842899,"TERMINAL",0,0,"Step 2416, loss: 1.1449637413024902, step time: 17.462968826293945ms\r\nStep 2417, loss: 1.1826212406158447, step time: 17.71068572998047ms\r\n",,terminal_output +12257,12842994,"TERMINAL",0,0,"Step 2418, loss: 1.1524006128311157, step time: 17.431259155273438ms\r\n",,terminal_output +12258,12843103,"TERMINAL",0,0,"Step 2419, loss: 1.2066233158111572, step time: 17.597198486328125ms\r\nStep 2420, loss: 1.148913860321045, step time: 17.65751838684082ms\r\n",,terminal_output +12259,12843166,"TERMINAL",0,0,"Step 2421, loss: 1.5422245264053345, step time: 17.60268211364746ms\r\n",,terminal_output +12260,12843232,"TERMINAL",0,0,"Step 2422, loss: 1.142246127128601, step time: 17.38595962524414ms\r\n",,terminal_output +12261,12843293,"TERMINAL",0,0,"Step 2423, loss: 1.4106652736663818, step time: 17.852067947387695ms\r\n",,terminal_output +12262,12843356,"TERMINAL",0,0,"Step 2424, loss: 1.1473826169967651, step time: 17.418622970581055ms\r\n",,terminal_output +12263,12843419,"TERMINAL",0,0,"Step 2425, loss: 1.1962803602218628, step time: 20.605802536010742ms\r\n",,terminal_output +12264,12843486,"TERMINAL",0,0,"Step 2426, loss: 1.1288093328475952, step time: 17.68183708190918ms\r\n",,terminal_output +12265,12843550,"TERMINAL",0,0,"Step 2427, loss: 1.1455622911453247, step time: 17.874717712402344ms\r\n",,terminal_output +12266,12843657,"TERMINAL",0,0,"Step 2428, loss: 1.1604732275009155, step time: 17.6851749420166ms\r\nStep 2429, loss: 1.136123538017273, step time: 18.100500106811523ms\r\n",,terminal_output +12267,12843760,"TERMINAL",0,0,"Step 2430, loss: 1.2092974185943604, step time: 18.14579963684082ms\r\n",,terminal_output +12268,12843832,"TERMINAL",0,0,"Step 2431, loss: 1.1366735696792603, step time: 17.727136611938477ms\r\n",,terminal_output +12269,12843912,"TERMINAL",0,0,"Step 2432, loss: 1.1953397989273071, step time: 17.87734031677246ms\r\nStep 2433, loss: 1.1194243431091309, step time: 17.70949363708496ms\r\n",,terminal_output +12270,12843975,"TERMINAL",0,0,"Step 2434, loss: 1.154388427734375, step time: 17.616748809814453ms\r\n",,terminal_output +12271,12844042,"TERMINAL",0,0,"Step 2435, loss: 1.657551884651184, step time: 18.123865127563477ms\r\n",,terminal_output +12272,12844105,"TERMINAL",0,0,"Step 2436, loss: 1.1557666063308716, step time: 17.765045166015625ms\r\n",,terminal_output +12273,12844167,"TERMINAL",0,0,"Step 2437, loss: 2.005220651626587, step time: 17.733097076416016ms\r\n",,terminal_output +12274,12844231,"TERMINAL",0,0,"Step 2438, loss: 1.1308635473251343, step time: 18.03278923034668ms\r\n",,terminal_output +12275,12844293,"TERMINAL",0,0,"Step 2439, loss: 1.3645585775375366, step time: 17.751216888427734ms\r\n",,terminal_output +12276,12844358,"TERMINAL",0,0,"Step 2440, loss: 1.161795735359192, step time: 17.83466339111328ms\r\n",,terminal_output +12277,12844421,"TERMINAL",0,0,"Step 2441, loss: 1.129930853843689, step time: 18.15509796142578ms\r\n",,terminal_output +12278,12844494,"TERMINAL",0,0,"Step 2442, loss: 1.2412376403808594, step time: 17.749309539794922ms\r\n",,terminal_output +12279,12844559,"TERMINAL",0,0,"Step 2443, loss: 1.1272600889205933, step time: 17.84825325012207ms\r\n",,terminal_output +12280,12844630,"TERMINAL",0,0,"Step 2444, loss: 1.190414309501648, step time: 18.021106719970703ms\r\n",,terminal_output +12281,12844695,"TERMINAL",0,0,"Step 2445, loss: 1.1357890367507935, step time: 17.737150192260742ms\r\n",,terminal_output +12282,12844759,"TERMINAL",0,0,"Step 2446, loss: 1.5157899856567383, step time: 17.70305633544922ms\r\n",,terminal_output +12283,12844823,"TERMINAL",0,0,"Step 2447, loss: 1.1911100149154663, step time: 18.12458038330078ms\r\n",,terminal_output +12284,12844886,"TERMINAL",0,0,"Step 2448, loss: 1.4151241779327393, step time: 17.62843132019043ms\r\n",,terminal_output +12285,12844950,"TERMINAL",0,0,"Step 2449, loss: 1.1611541509628296, step time: 17.69876480102539ms\r\n",,terminal_output +12286,12845059,"TERMINAL",0,0,"Step 2450, loss: 1.150745153427124, step time: 18.01300048828125ms\r\nStep 2451, loss: 1.1717886924743652, step time: 17.901182174682617ms\r\n",,terminal_output +12287,12845174,"TERMINAL",0,0,"Step 2452, loss: 1.4891510009765625, step time: 17.773866653442383ms\r\nStep 2453, loss: 1.3095104694366455, step time: 18.021345138549805ms\r\n",,terminal_output +12288,12845272,"TERMINAL",0,0,"Step 2454, loss: 1.143723726272583, step time: 17.71688461303711ms\r\n",,terminal_output +12289,12845333,"TERMINAL",0,0,"Step 2455, loss: 1.1962013244628906, step time: 17.911434173583984ms\r\n",,terminal_output +12290,12845396,"TERMINAL",0,0,"Step 2456, loss: 1.1305806636810303, step time: 17.92168617248535ms\r\n",,terminal_output +12291,12845459,"TERMINAL",0,0,"Step 2457, loss: 1.2262459993362427, step time: 19.284725189208984ms\r\n",,terminal_output +12292,12845520,"TERMINAL",0,0,"Step 2458, loss: 1.1989939212799072, step time: 18.174171447753906ms\r\n",,terminal_output +12293,12845599,"TERMINAL",0,0,"Step 2459, loss: 1.1416996717453003, step time: 18.221139907836914ms\r\n",,terminal_output +12294,12845653,"TERMINAL",0,0,"Step 2460, loss: 1.1166692972183228, step time: 17.877817153930664ms\r\n",,terminal_output +12295,12845765,"TERMINAL",0,0,"Step 2461, loss: 1.4445509910583496, step time: 17.792701721191406ms\r\n",,terminal_output +12296,12846030,"TERMINAL",0,0,"Step 2462, loss: 1.2917633056640625, step time: 341.3572311401367ms\r\n",,terminal_output +12297,12846094,"TERMINAL",0,0,"Step 2463, loss: 1.5595781803131104, step time: 25.127172470092773ms\r\n",,terminal_output +12298,12846157,"TERMINAL",0,0,"Step 2464, loss: 1.1288748979568481, step time: 20.278215408325195ms\r\n",,terminal_output +12299,12846224,"TERMINAL",0,0,"Step 2465, loss: 1.1284070014953613, step time: 18.905162811279297ms\r\n",,terminal_output +12300,12846286,"TERMINAL",0,0,"Step 2466, loss: 1.1346677541732788, step time: 18.23139190673828ms\r\n",,terminal_output +12301,12846350,"TERMINAL",0,0,"Step 2467, loss: 1.166162371635437, step time: 18.135547637939453ms\r\n",,terminal_output +12302,12846414,"TERMINAL",0,0,"Step 2468, loss: 1.1206626892089844, step time: 18.24045181274414ms\r\n",,terminal_output +12303,12846476,"TERMINAL",0,0,"Step 2469, loss: 1.1173789501190186, step time: 17.9440975189209ms\r\n",,terminal_output +12304,12846540,"TERMINAL",0,0,"Step 2470, loss: 1.1444302797317505, step time: 17.93694496154785ms\r\n",,terminal_output +12305,12846604,"TERMINAL",0,0,"Step 2471, loss: 1.11739981174469, step time: 18.347740173339844ms\r\n",,terminal_output +12306,12846667,"TERMINAL",0,0,"Step 2472, loss: 1.1157770156860352, step time: 17.72308349609375ms\r\n",,terminal_output +12307,12846732,"TERMINAL",0,0,"Step 2473, loss: 1.1051442623138428, step time: 18.006324768066406ms\r\n",,terminal_output +12308,12846795,"TERMINAL",0,0,"Step 2474, loss: 1.1727399826049805, step time: 18.143892288208008ms\r\n",,terminal_output +12309,12846861,"TERMINAL",0,0,"Step 2475, loss: 1.5317890644073486, step time: 19.93250846862793ms\r\n",,terminal_output +12310,12846914,"TERMINAL",0,0,"Step 2476, loss: 1.1075553894042969, step time: 18.236160278320312ms\r\n",,terminal_output +12311,12846979,"TERMINAL",0,0,"Step 2477, loss: 1.124735951423645, step time: 18.235206604003906ms\r\n",,terminal_output +12312,12847044,"TERMINAL",0,0,"Step 2478, loss: 1.106002926826477, step time: 18.846750259399414ms\r\n",,terminal_output +12313,12847108,"TERMINAL",0,0,"Step 2479, loss: 1.3291056156158447, step time: 21.998882293701172ms\r\n",,terminal_output +12314,12847173,"TERMINAL",0,0,"Step 2480, loss: 1.1278855800628662, step time: 21.29507064819336ms\r\n",,terminal_output +12315,12847233,"TERMINAL",0,0,"Step 2481, loss: 1.135338306427002, step time: 24.50108528137207ms\r\n",,terminal_output +12316,12847302,"TERMINAL",0,0,"Step 2482, loss: 1.1134307384490967, step time: 23.805856704711914ms\r\n",,terminal_output +12317,12847365,"TERMINAL",0,0,"Step 2483, loss: 1.271790862083435, step time: 23.453950881958008ms\r\n",,terminal_output +12318,12847432,"TERMINAL",0,0,"Step 2484, loss: 1.1306612491607666, step time: 25.376319885253906ms\r\n",,terminal_output +12319,12847499,"TERMINAL",0,0,"Step 2485, loss: 1.1135834455490112, step time: 22.467374801635742ms\r\n",,terminal_output +12320,12847563,"TERMINAL",0,0,"Step 2486, loss: 1.12142813205719, step time: 23.506879806518555ms\r\n",,terminal_output +12321,12847636,"TERMINAL",0,0,"Step 2487, loss: 1.109820008277893, step time: 25.281906127929688ms\r\n",,terminal_output +12322,12847698,"TERMINAL",0,0,"Step 2488, loss: 1.1080573797225952, step time: 23.664236068725586ms\r\n",,terminal_output +12323,12847761,"TERMINAL",0,0,"Step 2489, loss: 1.1091231107711792, step time: 22.20630645751953ms\r\n",,terminal_output +12324,12847830,"TERMINAL",0,0,"Step 2490, loss: 1.1089931726455688, step time: 21.958589553833008ms\r\n",,terminal_output +12325,12847889,"TERMINAL",0,0,"Step 2491, loss: 1.1556671857833862, step time: 25.560617446899414ms\r\n",,terminal_output +12326,12847951,"TERMINAL",0,0,"Step 2492, loss: 1.0919482707977295, step time: 22.470474243164062ms\r\n",,terminal_output +12327,12848058,"TERMINAL",0,0,"Step 2493, loss: 1.3522590398788452, step time: 22.898435592651367ms\r\n",,terminal_output +12328,12848111,"TERMINAL",0,0,"Step 2494, loss: 1.0967603921890259, step time: 21.63076400756836ms\r\n",,terminal_output +12329,12848217,"TERMINAL",0,0,"Step 2495, loss: 1.1033103466033936, step time: 25.45762062072754ms\r\nStep 2496, loss: 1.1450138092041016, step time: 23.74410629272461ms\r\n",,terminal_output +12330,12848312,"TERMINAL",0,0,"Step 2497, loss: 1.0826826095581055, step time: 23.866653442382812ms\r\n",,terminal_output +12331,12848417,"TERMINAL",0,0,"Step 2498, loss: 1.392135739326477, step time: 22.342443466186523ms\r\nStep 2499, loss: 1.2265286445617676, step time: 22.5677490234375ms\r\n",,terminal_output +12332,12850979,"TERMINAL",0,0,"Step 2500, loss: 1.1071394681930542, step time: 36.13924980163574ms\r\n",,terminal_output +12333,12851086,"TERMINAL",0,0,"Step 2501, loss: 1.1311979293823242, step time: 28.622150421142578ms\r\n",,terminal_output +12334,12851316,"TERMINAL",0,0,"^C",,terminal_output +12335,12851478,"TERMINAL",0,0,"Exception ignored in: \r\nTraceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/lib/__init__.py"", line 128, in _xla_gc_callback\r\n def _xla_gc_callback(*args):\r\nKeyboardInterrupt: \r\nStep 2502, loss: 1.1110038757324219, step time: 331.9885730743408ms\r\n^CTraceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py"", line 241, in \r\n print(f""Step {step}, loss: {loss}, step time: {elapsed_time}ms"")\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/array.py"", line 341, in __format__\r\n return format(self._value[()], format_spec)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/profiler.py"", line 354, in wrapper\r\n return func(*args, **kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/array.py"", line 641, in _value\r\n npy_value, did_copy = self._single_device_array_to_np_array_did_copy()\r\nKeyboardInterrupt\r\n",,terminal_output +12336,12851680,"TERMINAL",0,0,"^CException ignored in atexit callback: .teardown_atexit at 0x1505f00ca680>\r\nTraceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/wandb/sdk/lib/service_connection.py"", line 94, in teardown_atexit\r\n conn.teardown(hooks.exit_code)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/wandb/sdk/lib/service_connection.py"", line 226, in teardown\r\n self._router.join()\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/wandb/sdk/interface/router.py"", line 75, in join\r\n self._thread.join()\r\n File ""/home/hk-project-p0023960/tum_cte0515/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/threading.py"", line 1096, in join\r\n self._wait_for_tstate_lock()\r\n File ""/home/hk-project-p0023960/tum_cte0515/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/threading.py"", line 1116, in _wait_for_tstate_lock\r\n if lock.acquire(block, timeout):\r\nKeyboardInterrupt: \r\n",,terminal_output +12337,12851871,"TERMINAL",0,0,"^CException ignored in: .remove at 0x15064e986710>\r\nTraceback (most recent call last):\r\n File ""/home/hk-project-p0023960/tum_cte0515/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/weakref.py"", line 370, in remove\r\n def remove(k, selfref=ref(self)):\r\nKeyboardInterrupt: \r\n",,terminal_output +12338,12852042,"TERMINAL",0,0,"^CException ignored in: .remove at 0x15064e986710>\r\nTraceback (most recent call last):\r\n File ""/home/hk-project-p0023960/tum_cte0515/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/weakref.py"", line 370, in remove\r\n def remove(k, selfref=ref(self)):\r\nKeyboardInterrupt: \r\n",,terminal_output +12339,12852237,"TERMINAL",0,0,"^C",,terminal_output +12340,12852374,"TERMINAL",0,0,"Exception ignored in atexit callback: \r\nTraceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/api.py"", line 3168, in clean_up\r\n clear_backends()\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/api.py"", line 3158, in clear_backends\r\n pjit._infer_params_cached.cache_clear()\r\nKeyboardInterrupt: \r\n",,terminal_output +12341,12852468,"TERMINAL",0,0,"^C",,terminal_output +12342,12852678,"TERMINAL",0,0,"^C",,terminal_output +12343,12852789,"TERMINAL",0,0,"\r\n]0;tum_cte0515@hkn0408:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0408 jafar]$ ",,terminal_output +12344,12852893,"TERMINAL",0,0,"^C[?2004l\r[?2004h[?2004l\r\r\n]0;tum_cte0515@hkn0408:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0408 jafar]$ ",,terminal_output +12345,12856497,"train_dynamics.py",0,0,"",python,tab +12346,12857650,"train_dynamics.py",2849,0,"",python,selection_mouse +12347,12857653,"train_dynamics.py",2848,0,"",python,selection_command +12348,12884951,"scripts_horeka/train_dynamics.sh",0,0,"",shellscript,tab +12349,12886657,"scripts_horeka/train_dynamics.sh",1396,0,"",shellscript,selection_mouse +12350,12887221,"scripts_horeka/train_dynamics.sh",1570,0,"",shellscript,selection_mouse +12351,12887731,"scripts_horeka/train_dynamics.sh",1595,0,"",shellscript,selection_mouse +12352,12888263,"scripts_horeka/train_dynamics.sh",1622,0,"",shellscript,selection_mouse +12353,12888812,"scripts_horeka/train_dynamics.sh",1622,0,"\n ",shellscript,content +12354,12889653,"scripts_horeka/train_dynamics.sh",1627,0,"-",shellscript,content +12355,12889653,"scripts_horeka/train_dynamics.sh",1628,0,"",shellscript,selection_keyboard +12356,12889814,"scripts_horeka/train_dynamics.sh",1628,0,"-",shellscript,content +12357,12889815,"scripts_horeka/train_dynamics.sh",1629,0,"",shellscript,selection_keyboard +12358,12890525,"scripts_horeka/train_dynamics.sh",1628,1,"",shellscript,content +12359,12890680,"scripts_horeka/train_dynamics.sh",1627,1,"",shellscript,content +12360,12890801,"scripts_horeka/train_dynamics.sh",1623,4,"",shellscript,content +12361,12890951,"scripts_horeka/train_dynamics.sh",1622,1,"",shellscript,content +12362,12891433,"scripts_horeka/train_dynamics.sh",1622,0," ",shellscript,content +12363,12891434,"scripts_horeka/train_dynamics.sh",1623,0,"",shellscript,selection_keyboard +12364,12891670,"scripts_horeka/train_dynamics.sh",1623,0,"\",shellscript,content +12365,12891671,"scripts_horeka/train_dynamics.sh",1624,0,"",shellscript,selection_keyboard +12366,12891883,"scripts_horeka/train_dynamics.sh",1624,0,"\n ",shellscript,content +12367,12892643,"scripts_horeka/train_dynamics.sh",1629,0,"-",shellscript,content +12368,12892644,"scripts_horeka/train_dynamics.sh",1630,0,"",shellscript,selection_keyboard +12369,12892792,"scripts_horeka/train_dynamics.sh",1630,0,"-",shellscript,content +12370,12892793,"scripts_horeka/train_dynamics.sh",1631,0,"",shellscript,selection_keyboard +12371,12893724,"scripts_horeka/train_dynamics.sh",1631,0,"m",shellscript,content +12372,12893727,"scripts_horeka/train_dynamics.sh",1632,0,"",shellscript,selection_keyboard +12373,12893792,"scripts_horeka/train_dynamics.sh",1632,0,"a",shellscript,content +12374,12893792,"scripts_horeka/train_dynamics.sh",1633,0,"",shellscript,selection_keyboard +12375,12893897,"scripts_horeka/train_dynamics.sh",1633,0,"s",shellscript,content +12376,12893898,"scripts_horeka/train_dynamics.sh",1634,0,"",shellscript,selection_keyboard +12377,12893957,"scripts_horeka/train_dynamics.sh",1634,0,"k",shellscript,content +12378,12893957,"scripts_horeka/train_dynamics.sh",1635,0,"",shellscript,selection_keyboard +12379,12894260,"scripts_horeka/train_dynamics.sh",1635,0,"_",shellscript,content +12380,12894261,"scripts_horeka/train_dynamics.sh",1636,0,"",shellscript,selection_keyboard +12381,12894501,"scripts_horeka/train_dynamics.sh",1636,0,"p",shellscript,content +12382,12894502,"scripts_horeka/train_dynamics.sh",1637,0,"",shellscript,selection_keyboard +12383,12894609,"scripts_horeka/train_dynamics.sh",1637,0,"r",shellscript,content +12384,12894610,"scripts_horeka/train_dynamics.sh",1638,0,"",shellscript,selection_keyboard +12385,12895024,"scripts_horeka/train_dynamics.sh",1638,0,"o",shellscript,content +12386,12895025,"scripts_horeka/train_dynamics.sh",1639,0,"",shellscript,selection_keyboard +12387,12895241,"scripts_horeka/train_dynamics.sh",1639,0,"b",shellscript,content +12388,12895241,"scripts_horeka/train_dynamics.sh",1640,0,"",shellscript,selection_keyboard +12389,12895801,"scripts_horeka/train_dynamics.sh",1640,0,"=",shellscript,content +12390,12895801,"scripts_horeka/train_dynamics.sh",1641,0,"",shellscript,selection_keyboard +12391,12896136,"scripts_horeka/train_dynamics.sh",1641,0,"0",shellscript,content +12392,12896137,"scripts_horeka/train_dynamics.sh",1642,0,"",shellscript,selection_keyboard +12393,12896657,"scripts_horeka/train_dynamics.sh",1642,0,".",shellscript,content +12394,12896658,"scripts_horeka/train_dynamics.sh",1643,0,"",shellscript,selection_keyboard +12395,12896873,"scripts_horeka/train_dynamics.sh",1643,0,"o",shellscript,content +12396,12896874,"scripts_horeka/train_dynamics.sh",1644,0,"",shellscript,selection_keyboard +12397,12897348,"scripts_horeka/train_dynamics.sh",1643,1,"",shellscript,content +12398,12897472,"scripts_horeka/train_dynamics.sh",1642,1,"",shellscript,content +12399,12898737,"scripts_horeka/train_dynamics.sh",1642,0,".",shellscript,content +12400,12898738,"scripts_horeka/train_dynamics.sh",1643,0,"",shellscript,selection_keyboard +12401,12899108,"scripts_horeka/train_dynamics.sh",1643,0,"0",shellscript,content +12402,12899108,"scripts_horeka/train_dynamics.sh",1644,0,"",shellscript,selection_keyboard +12403,12903962,"TERMINAL",0,0,"bash",,terminal_focus +12404,12903963,"TERMINAL",0,0,"srun",,terminal_focus +12405,12904155,"TERMINAL",0,0,"\r(jafar) [tum_cte0515@hkn0408 jafar]$ ",,terminal_output +12406,12910481,"TERMINAL",0,0,"\r(jafar) [tum_cte0515@hkn0408 jafar]$ ",,terminal_output +12407,12912611,"train_dynamics.py",0,0,"",python,tab +12408,12916615,"train_dynamics.py",1555,0,"",python,selection_mouse +12409,12916758,"train_dynamics.py",1548,7,"dropout",python,selection_mouse +12410,12917311,"train_dynamics.py",1578,0,"",python,selection_mouse +12411,12917480,"train_dynamics.py",1573,10,"mask_limit",python,selection_mouse +12412,12925851,"scripts_horeka/train_dynamics.sh",0,0,"",shellscript,tab +12413,12926812,"scripts_horeka/train_dynamics.sh",1643,0,"",shellscript,selection_command +12414,12926999,"scripts_horeka/train_dynamics.sh",1642,0,"",shellscript,selection_command +12415,12927176,"scripts_horeka/train_dynamics.sh",1641,0,"",shellscript,selection_command +12416,12927341,"scripts_horeka/train_dynamics.sh",1640,0,"",shellscript,selection_command +12417,12927593,"scripts_horeka/train_dynamics.sh",1639,1,"",shellscript,content +12418,12927734,"scripts_horeka/train_dynamics.sh",1638,1,"",shellscript,content +12419,12927880,"scripts_horeka/train_dynamics.sh",1637,1,"",shellscript,content +12420,12928120,"scripts_horeka/train_dynamics.sh",1631,6,"",shellscript,content +12421,12928618,"scripts_horeka/train_dynamics.sh",1631,0,"mask_limit",shellscript,content +12422,12930667,"TERMINAL",0,0,"sh scripts_horeka/train_dynamics.sh ",,terminal_output +12423,12931320,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +12424,12931445,"TERMINAL",0,0,"SLURM_STEP_NUM_TASKS=1\r\nSLURM_JOB_USER=tum_cte0515\r\nSLURM_TASKS_PER_NODE=1\r\nSLURM_JOB_UID=999226\r\nSLURM_TASK_PID=3458179\r\nSLURM_JOB_GPUS=1\r\nSLURM_LOCALID=0\r\nSLURM_SUBMIT_DIR=/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar\r\nSLURMD_NODENAME=hkn0408\r\nSLURM_JOB_START_TIME=1751296933\r\nSLURM_STEP_NODELIST=hkn0408\r\nSLURM_CLUSTER_NAME=hk\r\nSLURM_JOB_END_TIME=1751300533\r\nSLURM_PMI2_SRUN_PORT=38655\r\nSLURM_CPUS_ON_NODE=6\r\nSLURM_JOB_CPUS_PER_NODE=6\r\nSLURM_GPUS_ON_NODE=1\r\nSLURM_GTIDS=0\r\nSLURM_JOB_PARTITION=accelerated\r\nSLURM_TRES_PER_TASK=cpu=5\r\nSLURM_OOM_KILL_STEP=0\r\nSLURM_JOB_NUM_NODES=1\r\nSLURM_STEPID=4294967290\r\nSLURM_JOBID=3306855\r\nSLURM_PTY_PORT=36241\r\nSLURM_JOB_QOS=normal\r\nSLURM_LAUNCH_NODE_IPADDR=10.0.7.199\r\nSLURM_PTY_WIN_ROW=37\r\nSLURM_PMI2_PROC_MAPPING=(vector,(0,1,1))\r\nSLURMD_DEBUG=2\r\nSLURM_PROCID=0\r\nSLURM_CPUS_PER_TASK=5\r\nSLURM_NTASKS=1\r\nSLURM_TOPOLOGY_ADDR=hkibb.hkibbi1.hkibbi1e12.hkn0408\r\nSLURM_TOPOLOGY_ADDR_PATTERN=switch.switch.switch.node\r\nSLURM_SRUN_COMM_HOST=10.0.7.199\r\nSLURM_SCRIPT_CONTEXT=prolog_task\r\nSLURM_MEM_PER_NODE=51200\r\nSLURM_PTY_WIN_COL=202\r\nSLURM_NODELIST=hkn0408\r\nSLURM_SRUN_COMM_PORT=42547\r\nSLURM_STEP_ID=4294967290\r\nSLURM_JOB_ACCOUNT=hk-project-p0023960\r\nSLURM_PRIO_PROCESS=0\r\nSLURM_NPROCS=1\r\nSLURM_NNODES=1\r\nSLURM_SUBMIT_HOST=hkn1991.localdomain\r\nSLURM_JOB_ID=3306855\r\nSLURM_NODEID=0\r\nSLURM_STEP_NUM_NODES=1\r\nSLURM_STEP_TASKS_PER_NODE=1\r\nSLURM_MPI_TYPE=pmi2\r\nSLURM_PMI2_STEP_NODES=hkn0408\r\nSLURM_CONF=/etc/slurm/slurm.conf\r\nSLURM_JOB_NAME=interactive\r\nSLURM_NTASKS_PER_NODE=1\r\nSLURM_STEP_LAUNCHER_PORT=42547\r\nSLURM_JOB_GID=502226\r\nSLURM_JOB_NODELIST=hkn0408\r\n",,terminal_output +12425,12933826,"TERMINAL",0,0,"2025-06-30 17:59:38.179278: E external/local_xla/xla/stream_executor/cuda/cuda_fft.cc:467] Unable to register cuFFT factory: Attempting to register factory for plugin cuFFT when one has already been registered\r\nWARNING: All log messages before absl::InitializeLog() is called are written to STDERR\r\nE0000 00:00:1751299178.193073 3499639 cuda_dnn.cc:8579] Unable to register cuDNN factory: Attempting to register factory for plugin cuDNN when one has already been registered\r\nE0000 00:00:1751299178.197629 3499639 cuda_blas.cc:1407] Unable to register cuBLAS factory: Attempting to register factory for plugin cuBLAS when one has already been registered\r\n",,terminal_output +12426,12933885,"TERMINAL",0,0,"W0000 00:00:1751299178.210574 3499639 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\nW0000 00:00:1751299178.210592 3499639 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\nW0000 00:00:1751299178.210595 3499639 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\nW0000 00:00:1751299178.210597 3499639 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\n",,terminal_output +12427,12937033,"TERMINAL",0,0,"W0000 00:00:1751299181.461343 3499639 gpu_device.cc:2341] Cannot dlopen some GPU libraries. Please make sure the missing libraries mentioned above are installed properly if you would like to use GPU. Follow the guide at https://www.tensorflow.org/install/gpu for how to download and setup the required libraries for your platform.\r\nSkipping registering GPU devices...\r\n",,terminal_output +12428,12937435,"TERMINAL",0,0,"Running on 1 devices.\r\n",,terminal_output +12429,12938356,"TERMINAL",0,0,"wandb: Currently logged in as: mihir-mahajan2002 (instant-uv) to https://api.wandb.ai. Use `wandb login --relogin` to force relogin\r\n",,terminal_output +12430,12938950,"TERMINAL",0,0,"wandb: Tracking run with wandb version 0.19.11\r\nwandb: Run data is saved locally in /hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/wandb/run-20250630_175942-2424kvd8\r\nwandb: Run `wandb offline` to turn off syncing.\r\nwandb: Syncing run dynamics-tiny-overfit-big-lr-0000\r\nwandb: ⭐️ View project at https://wandb.ai/instant-uv/jafar\r\nwandb: 🚀 View run at https://wandb.ai/instant-uv/jafar/runs/2424kvd8\r\n",,terminal_output +12431,12940374,"TERMINAL",0,0,"2025-06-30 17:59:44.802561: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +12432,12953207,"TERMINAL",0,0,"2025-06-30 17:59:57.553793: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +12433,12964405,"train_lam.py",0,0,"",python,tab +12434,12966017,"train_dynamics.py",0,0,"",python,tab +12435,12969072,"TERMINAL",0,0,"2025-06-30 18:00:13.499841: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +12436,12975043,"train_dynamics.py",2756,0,"",python,selection_mouse +12437,12975817,"train_dynamics.py",2728,0,"",python,selection_mouse +12438,12975818,"train_dynamics.py",2727,0,"",python,selection_command +12439,12975988,"train_dynamics.py",2727,1,",",python,selection_mouse +12440,12975989,"train_dynamics.py",2728,0,"",python,selection_command +12441,12976047,"train_dynamics.py",2728,46,"\n select_p=select_probs.max(-1).mean(),",python,selection_mouse +12442,12976063,"train_dynamics.py",2728,37,"\n select_p=select_probs.max(-1",python,selection_mouse +12443,12976126,"train_dynamics.py",2688,40,"=outputs[""token_logits""].max(-1).mean(),",python,selection_mouse +12444,12976127,"train_dynamics.py",2680,48,"ct_logit=outputs[""token_logits""].max(-1).mean(),",python,selection_mouse +12445,12976127,"train_dynamics.py",2728,7,"\n ",python,selection_mouse +12446,12976132,"train_dynamics.py",2728,3,"\n ",python,selection_mouse +12447,12976149,"train_dynamics.py",2728,1,"\n",python,selection_mouse +12448,12976235,"TERMINAL",0,0,"2025-06-30 18:00:20.665016: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +12449,12976819,"train_dynamics.py",2673,0,"",python,selection_mouse +12450,12976994,"train_dynamics.py",2668,8," ",python,selection_mouse +12451,12977169,"train_dynamics.py",2668,20," select_logit",python,selection_mouse +12452,12977231,"train_dynamics.py",2668,28," select_logit=outputs",python,selection_mouse +12453,12977231,"train_dynamics.py",2663,13,"acc,\n ",python,selection_mouse +12454,12977240,"train_dynamics.py",2667,9,"\n ",python,selection_mouse +12455,12977517,"train_dynamics.py",2668,60," select_logit=outputs[""token_logits""].max(-1).mean(),",python,selection_mouse +12456,12977581,"train_dynamics.py",2668,106," select_logit=outputs[""token_logits""].max(-1).mean(),\n select_p=select_probs.max(-1).mean(),",python,selection_mouse +12457,12990292,"train_dynamics.py",2823,0,"",python,selection_mouse +12458,12990446,"train_dynamics.py",2774,49,"\n entropy=jax.scipy.special.entr(select_pr",python,selection_mouse +12459,12990475,"train_dynamics.py",2772,51,"),\n entropy=jax.scipy.special.entr(select_pr",python,selection_mouse +12460,12990495,"train_dynamics.py",2771,52,"(),\n entropy=jax.scipy.special.entr(select_pr",python,selection_mouse +12461,12990514,"train_dynamics.py",2767,56,"mean(),\n entropy=jax.scipy.special.entr(select_pr",python,selection_mouse +12462,12990529,"train_dynamics.py",2764,59,"1).mean(),\n entropy=jax.scipy.special.entr(select_pr",python,selection_mouse +12463,12990546,"train_dynamics.py",2701,122,"en_logits""].max(-1).mean(),\n select_p=select_probs.max(-1).mean(),\n entropy=jax.scipy.special.entr(select_pr",python,selection_mouse +12464,12990572,"train_dynamics.py",2697,126,"""token_logits""].max(-1).mean(),\n select_p=select_probs.max(-1).mean(),\n entropy=jax.scipy.special.entr(select_pr",python,selection_mouse +12465,12990587,"train_dynamics.py",2696,127,"[""token_logits""].max(-1).mean(),\n select_p=select_probs.max(-1).mean(),\n entropy=jax.scipy.special.entr(select_pr",python,selection_mouse +12466,12990660,"train_dynamics.py",2759,64,"max(-1).mean(),\n entropy=jax.scipy.special.entr(select_pr",python,selection_mouse +12467,12990679,"train_dynamics.py",2760,63,"ax(-1).mean(),\n entropy=jax.scipy.special.entr(select_pr",python,selection_mouse +12468,12990694,"train_dynamics.py",2761,62,"x(-1).mean(),\n entropy=jax.scipy.special.entr(select_pr",python,selection_mouse +12469,12990750,"train_dynamics.py",2762,61,"(-1).mean(),\n entropy=jax.scipy.special.entr(select_pr",python,selection_mouse +12470,12990807,"train_dynamics.py",2760,63,"ax(-1).mean(),\n entropy=jax.scipy.special.entr(select_pr",python,selection_mouse +12471,12990811,"train_dynamics.py",2757,66,"s.max(-1).mean(),\n entropy=jax.scipy.special.entr(select_pr",python,selection_mouse +12472,12990828,"train_dynamics.py",2753,70,"probs.max(-1).mean(),\n entropy=jax.scipy.special.entr(select_pr",python,selection_mouse +12473,12990873,"train_dynamics.py",2684,139,"ogit=outputs[""token_logits""].max(-1).mean(),\n select_p=select_probs.max(-1).mean(),\n entropy=jax.scipy.special.entr(select_pr",python,selection_mouse +12474,12990886,"train_dynamics.py",2681,142,"t_logit=outputs[""token_logits""].max(-1).mean(),\n select_p=select_probs.max(-1).mean(),\n entropy=jax.scipy.special.entr(select_pr",python,selection_mouse +12475,12990899,"train_dynamics.py",2678,145,"lect_logit=outputs[""token_logits""].max(-1).mean(),\n select_p=select_probs.max(-1).mean(),\n entropy=jax.scipy.special.entr(select_pr",python,selection_mouse +12476,12990913,"train_dynamics.py",2676,147,"select_logit=outputs[""token_logits""].max(-1).mean(),\n select_p=select_probs.max(-1).mean(),\n entropy=jax.scipy.special.entr(select_pr",python,selection_mouse +12477,12990927,"train_dynamics.py",2675,148," select_logit=outputs[""token_logits""].max(-1).mean(),\n select_p=select_probs.max(-1).mean(),\n entropy=jax.scipy.special.entr(select_pr",python,selection_mouse +12478,12990986,"train_dynamics.py",2674,149," select_logit=outputs[""token_logits""].max(-1).mean(),\n select_p=select_probs.max(-1).mean(),\n entropy=jax.scipy.special.entr(select_pr",python,selection_mouse +12479,12991044,"train_dynamics.py",2675,148," select_logit=outputs[""token_logits""].max(-1).mean(),\n select_p=select_probs.max(-1).mean(),\n entropy=jax.scipy.special.entr(select_pr",python,selection_mouse +12480,12991295,"train_dynamics.py",2640,183," masked_token_accuracy=acc,\n select_logit=outputs[""token_logits""].max(-1).mean(),\n select_p=select_probs.max(-1).mean(),\n entropy=jax.scipy.special.entr(select_pr",python,selection_mouse +12481,12991917,"train_dynamics.py",2580,0,"",python,selection_mouse +12482,12992033,"train_dynamics.py",2577,4," ",python,selection_mouse +12483,12992201,"train_dynamics.py",2577,26," metrics = dict(\n ",python,selection_mouse +12484,12992216,"train_dynamics.py",2577,64," metrics = dict(\n cross_entropy_loss=ce_loss,\n ",python,selection_mouse +12485,12992273,"train_dynamics.py",2577,111," metrics = dict(\n cross_entropy_loss=ce_loss,\n masked_token_accuracy=acc,\n select_logit",python,selection_mouse +12486,12992274,"train_dynamics.py",2577,168," metrics = dict(\n cross_entropy_loss=ce_loss,\n masked_token_accuracy=acc,\n select_logit=outputs[""token_logits""].max(-1).mean(),\n select_p",python,selection_mouse +12487,12992276,"train_dynamics.py",2577,181," metrics = dict(\n cross_entropy_loss=ce_loss,\n masked_token_accuracy=acc,\n select_logit=outputs[""token_logits""].max(-1).mean(),\n select_p=select_probs",python,selection_mouse +12488,12992377,"train_dynamics.py",2577,182," metrics = dict(\n cross_entropy_loss=ce_loss,\n masked_token_accuracy=acc,\n select_logit=outputs[""token_logits""].max(-1).mean(),\n select_p=select_probs.",python,selection_mouse +12489,12992396,"train_dynamics.py",2577,185," metrics = dict(\n cross_entropy_loss=ce_loss,\n masked_token_accuracy=acc,\n select_logit=outputs[""token_logits""].max(-1).mean(),\n select_p=select_probs.max",python,selection_mouse +12490,12992413,"train_dynamics.py",2577,198," metrics = dict(\n cross_entropy_loss=ce_loss,\n masked_token_accuracy=acc,\n select_logit=outputs[""token_logits""].max(-1).mean(),\n select_p=select_probs.max(-1).mean(),\n",python,selection_mouse +12491,12992428,"train_dynamics.py",2577,236," metrics = dict(\n cross_entropy_loss=ce_loss,\n masked_token_accuracy=acc,\n select_logit=outputs[""token_logits""].max(-1).mean(),\n select_p=select_probs.max(-1).mean(),\n entropy=jax.scipy.special.entr",python,selection_mouse +12492,12992487,"train_dynamics.py",2577,249," metrics = dict(\n cross_entropy_loss=ce_loss,\n masked_token_accuracy=acc,\n select_logit=outputs[""token_logits""].max(-1).mean(),\n select_p=select_probs.max(-1).mean(),\n entropy=jax.scipy.special.entr(select_probs",python,selection_mouse +12493,12992718,"train_dynamics.py",2577,272," metrics = dict(\n cross_entropy_loss=ce_loss,\n masked_token_accuracy=acc,\n select_logit=outputs[""token_logits""].max(-1).mean(),\n select_p=select_probs.max(-1).mean(),\n entropy=jax.scipy.special.entr(select_probs).sum(-1).mean(),\n )",python,selection_mouse +12494,12995645,"TERMINAL",0,0,"batch shape: (1, 16, 90, 160, 3)\r\n",,terminal_output +12495,13006695,"TERMINAL",0,0,"2025-06-30 18:00:51.122974: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-06-30 18:00:51.123563: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-06-30 18:00:51.123678: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-06-30 18:00:51.124312: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-06-30 18:00:51.125348: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +12496,13053537,"TERMINAL",0,0,"Step 0, loss: 8.689549446105957, step time: 57639.85276222229ms\r\n",,terminal_output +12497,13053679,"TERMINAL",0,0,"Step 1, loss: 8.55783462524414, step time: 28.937578201293945ms\r\nStep 2, loss: nan, step time: 22.460222244262695ms\r\n",,terminal_output +12498,13053818,"TERMINAL",0,0,"Step 3, loss: 8.484328269958496, step time: 20.624876022338867ms\r\nStep 4, loss: 8.051848411560059, step time: 20.745515823364258ms\r\n",,terminal_output +12499,13053875,"TERMINAL",0,0,"Step 5, loss: 7.87677526473999, step time: 21.01588249206543ms\r\n",,terminal_output +12500,13053945,"TERMINAL",0,0,"Step 6, loss: 7.976510047912598, step time: 28.328895568847656ms\r\n",,terminal_output +12501,13054045,"TERMINAL",0,0,"Step 7, loss: 7.64009428024292, step time: 23.944854736328125ms\r\n",,terminal_output +12502,13054134,"TERMINAL",0,0,"Step 8, loss: 7.523484230041504, step time: 22.38917350769043ms\r\n",,terminal_output +12503,13054212,"TERMINAL",0,0,"Step 9, loss: 7.702459812164307, step time: 21.413087844848633ms\r\nStep 10, loss: 7.3823561668396, step time: 20.579814910888672ms\r\n",,terminal_output +12504,13054270,"TERMINAL",0,0,"Step 11, loss: 7.428343296051025, step time: 20.121097564697266ms\r\n",,terminal_output +12505,13054365,"TERMINAL",0,0,"Step 12, loss: 7.15370512008667, step time: 19.87290382385254ms\r\n",,terminal_output +12506,13054418,"TERMINAL",0,0,"Step 13, loss: 7.074524402618408, step time: 19.4094181060791ms\r\n",,terminal_output +12507,13054511,"TERMINAL",0,0,"Step 14, loss: 7.104835033416748, step time: 20.330429077148438ms\r\n",,terminal_output +12508,13054562,"TERMINAL",0,0,"Step 15, loss: 7.108646869659424, step time: 20.364761352539062ms\r\n",,terminal_output +12509,13054666,"TERMINAL",0,0,"Step 16, loss: 6.894292831420898, step time: 27.843952178955078ms\r\nStep 17, loss: 6.810558319091797, step time: 25.731325149536133ms\r\n",,terminal_output +12510,13054745,"TERMINAL",0,0,"Step 18, loss: 7.083794116973877, step time: 22.03989028930664ms\r\n",,terminal_output +12511,13054855,"TERMINAL",0,0,"Step 19, loss: 6.705544948577881, step time: 19.7451114654541ms\r\nStep 20, loss: 6.761994361877441, step time: 19.38462257385254ms\r\n",,terminal_output +12512,13055301,"TERMINAL",0,0,"Step 21, loss: 6.602332592010498, step time: 346.7426300048828ms\r\nStep 22, loss: 6.569886207580566, step time: 26.810646057128906ms\r\n",,terminal_output +12513,13055354,"TERMINAL",0,0,"Step 23, loss: 6.525876045227051, step time: 24.57427978515625ms\r\n",,terminal_output +12514,13055452,"TERMINAL",0,0,"Step 24, loss: 6.504883289337158, step time: 23.523330688476562ms\r\n",,terminal_output +12515,13055503,"TERMINAL",0,0,"Step 25, loss: 6.411362171173096, step time: 20.768165588378906ms\r\n",,terminal_output +12516,13055609,"TERMINAL",0,0,"Step 26, loss: 6.395390510559082, step time: 19.929885864257812ms\r\nStep 27, loss: 6.355239391326904, step time: 20.798444747924805ms\r\n",,terminal_output +12517,13055672,"TERMINAL",0,0,"Step 28, loss: 6.494202136993408, step time: 19.96612548828125ms\r\n",,terminal_output +12518,13055737,"TERMINAL",0,0,"Step 29, loss: 6.271925926208496, step time: 21.17466926574707ms\r\n",,terminal_output +12519,13055801,"TERMINAL",0,0,"Step 30, loss: 6.248132705688477, step time: 29.83999252319336ms\r\n",,terminal_output +12520,13055883,"TERMINAL",0,0,"Step 31, loss: 6.271213054656982, step time: 25.063037872314453ms\r\n",,terminal_output +12521,13055930,"TERMINAL",0,0,"Step 32, loss: 6.259011745452881, step time: 22.17698097229004ms\r\n",,terminal_output +12522,13055995,"TERMINAL",0,0,"Step 33, loss: 6.128746032714844, step time: 20.38741111755371ms\r\n",,terminal_output +12523,13056053,"TERMINAL",0,0,"Step 34, loss: 6.1155104637146, step time: 19.522428512573242ms\r\n",,terminal_output +12524,13056185,"TERMINAL",0,0,"Step 35, loss: 6.070591449737549, step time: 19.1190242767334ms\r\nStep 36, loss: 6.081422805786133, step time: 20.017147064208984ms\r\n",,terminal_output +12525,13056253,"TERMINAL",0,0,"Step 37, loss: 6.027641296386719, step time: 20.053863525390625ms\r\n",,terminal_output +12526,13056316,"TERMINAL",0,0,"Step 38, loss: 6.0136189460754395, step time: 19.797563552856445ms\r\n",,terminal_output +12527,13056412,"TERMINAL",0,0,"Step 39, loss: 5.952182292938232, step time: 20.123720169067383ms\r\n",,terminal_output +12528,13056463,"TERMINAL",0,0,"Step 40, loss: 5.892744064331055, step time: 20.7672119140625ms\r\n",,terminal_output +12529,13056609,"TERMINAL",0,0,"Step 41, loss: 5.907363414764404, step time: 28.022289276123047ms\r\nStep 42, loss: 5.851045608520508, step time: 28.453826904296875ms\r\n",,terminal_output +12530,13056674,"TERMINAL",0,0,"Step 43, loss: 5.885713577270508, step time: 23.765087127685547ms\r\n",,terminal_output +12531,13056739,"TERMINAL",0,0,"Step 44, loss: 5.826593399047852, step time: 23.53501319885254ms\r\n",,terminal_output +12532,13056787,"TERMINAL",0,0,"Step 45, loss: 5.784215927124023, step time: 28.898239135742188ms\r\n",,terminal_output +12533,13056886,"TERMINAL",0,0,"Step 46, loss: 5.74515438079834, step time: 26.83544158935547ms\r\n",,terminal_output +12534,13056962,"TERMINAL",0,0,"Step 47, loss: 5.684447765350342, step time: 22.115468978881836ms\r\n",,terminal_output +12535,13057048,"TERMINAL",0,0,"Step 48, loss: 5.704304218292236, step time: 20.7064151763916ms\r\nStep 49, loss: 5.633974552154541, step time: 20.182132720947266ms\r\n",,terminal_output +12536,13057111,"TERMINAL",0,0,"Step 50, loss: 5.69882345199585, step time: 29.192209243774414ms\r\n",,terminal_output +12537,13057215,"TERMINAL",0,0,"Step 51, loss: 5.58906888961792, step time: 28.42879295349121ms\r\n",,terminal_output +12538,13057277,"TERMINAL",0,0,"Step 52, loss: 5.7050089836120605, step time: 30.6093692779541ms\r\n",,terminal_output +12539,13057339,"TERMINAL",0,0,"Step 53, loss: 5.656703472137451, step time: 27.97985076904297ms\r\n",,terminal_output +12540,13057402,"TERMINAL",0,0,"Step 54, loss: 5.558479309082031, step time: 30.73596954345703ms\r\n",,terminal_output +12541,13057468,"TERMINAL",0,0,"Step 55, loss: 5.6658549308776855, step time: 27.997255325317383ms\r\n",,terminal_output +12542,13057564,"TERMINAL",0,0,"Step 56, loss: 5.493319034576416, step time: 29.447078704833984ms\r\n",,terminal_output +12543,13057669,"TERMINAL",0,0,"Step 57, loss: 5.552465915679932, step time: 28.528451919555664ms\r\nStep 58, loss: 5.446744918823242, step time: 26.310205459594727ms\r\n",,terminal_output +12544,13057771,"TERMINAL",0,0,"Step 59, loss: 5.418356895446777, step time: 21.50440216064453ms\r\n",,terminal_output +12545,13057829,"TERMINAL",0,0,"Step 60, loss: 5.505122184753418, step time: 19.96016502380371ms\r\n",,terminal_output +12546,13057946,"TERMINAL",0,0,"Step 61, loss: 5.4869208335876465, step time: 19.352197647094727ms\r\nStep 62, loss: 5.492381572723389, step time: 18.960952758789062ms\r\n",,terminal_output +12547,13058006,"TERMINAL",0,0,"Step 63, loss: 5.320959091186523, step time: 18.996477127075195ms\r\n",,terminal_output +12548,13058068,"TERMINAL",0,0,"Step 64, loss: 5.370678424835205, step time: 18.59307289123535ms\r\n",,terminal_output +12549,13058130,"TERMINAL",0,0,"Step 65, loss: 5.177929401397705, step time: 18.58234405517578ms\r\n",,terminal_output +12550,13058222,"TERMINAL",0,0,"Step 66, loss: 5.352731704711914, step time: 18.822193145751953ms\r\n",,terminal_output +12551,13058275,"TERMINAL",0,0,"Step 67, loss: 5.222672462463379, step time: 18.53036880493164ms\r\n",,terminal_output +12552,13058381,"TERMINAL",0,0,"Step 68, loss: 5.316426753997803, step time: 18.40066909790039ms\r\nStep 69, loss: 5.209280490875244, step time: 18.85247230529785ms\r\n",,terminal_output +12553,13058474,"TERMINAL",0,0,"Step 70, loss: 5.1855292320251465, step time: 18.48912239074707ms\r\n",,terminal_output +12554,13058537,"TERMINAL",0,0,"Step 71, loss: 5.109462261199951, step time: 18.53013038635254ms\r\n",,terminal_output +12555,13058642,"TERMINAL",0,0,"Step 72, loss: 5.1947526931762695, step time: 18.76354217529297ms\r\n",,terminal_output +12556,13058695,"TERMINAL",0,0,"Step 73, loss: 5.126896858215332, step time: 18.447160720825195ms\r\nStep 74, loss: 5.097331523895264, step time: 18.3563232421875ms\r\n",,terminal_output +12557,13058788,"TERMINAL",0,0,"Step 75, loss: 5.227081298828125, step time: 18.912315368652344ms\r\n",,terminal_output +12558,13058896,"TERMINAL",0,0,"Step 76, loss: 5.092558860778809, step time: 18.354415893554688ms\r\nStep 77, loss: 5.141752243041992, step time: 18.294572830200195ms\r\n",,terminal_output +12559,13059017,"TERMINAL",0,0,"Step 78, loss: 5.0579514503479, step time: 18.520593643188477ms\r\nStep 79, loss: 4.95880126953125, step time: 18.6464786529541ms\r\n",,terminal_output +12560,13059082,"TERMINAL",0,0,"Step 80, loss: 5.087688446044922, step time: 18.998146057128906ms\r\n",,terminal_output +12561,13059146,"TERMINAL",0,0,"Step 81, loss: 5.013207912445068, step time: 18.831491470336914ms\r\n",,terminal_output +12562,13059208,"TERMINAL",0,0,"Step 82, loss: 5.013736724853516, step time: 18.369197845458984ms\r\n",,terminal_output +12563,13059272,"TERMINAL",0,0,"Step 83, loss: 4.941514492034912, step time: 21.76690101623535ms\r\n",,terminal_output +12564,13059339,"TERMINAL",0,0,"Step 84, loss: 4.936014175415039, step time: 18.69654655456543ms\r\n",,terminal_output +12565,13059453,"TERMINAL",0,0,"Step 85, loss: 4.886699676513672, step time: 31.733274459838867ms\r\n",,terminal_output +12566,13059506,"TERMINAL",0,0,"Step 86, loss: 5.250229835510254, step time: 26.709556579589844ms\r\n",,terminal_output +12567,13059559,"TERMINAL",0,0,"Step 87, loss: 4.991880416870117, step time: 19.252300262451172ms\r\n",,terminal_output +12568,13059671,"TERMINAL",0,0,"Step 88, loss: 4.877004623413086, step time: 18.24474334716797ms\r\nStep 89, loss: 4.860544204711914, step time: 18.410682678222656ms\r\n",,terminal_output +12569,13059734,"TERMINAL",0,0,"Step 90, loss: 4.821750640869141, step time: 18.770456314086914ms\r\n",,terminal_output +12570,13059797,"TERMINAL",0,0,"Step 91, loss: 4.774339199066162, step time: 18.661022186279297ms\r\n",,terminal_output +12571,13059893,"TERMINAL",0,0,"Step 92, loss: 4.870851039886475, step time: 18.233776092529297ms\r\n",,terminal_output +12572,13059946,"TERMINAL",0,0,"Step 93, loss: 5.084613800048828, step time: 18.718719482421875ms\r\n",,terminal_output +12573,13059999,"TERMINAL",0,0,"Step 94, loss: 4.775279998779297, step time: 18.726348876953125ms\r\n",,terminal_output +12574,13060094,"TERMINAL",0,0,"Step 95, loss: 4.990357398986816, step time: 18.30601692199707ms\r\n",,terminal_output +12575,13060156,"TERMINAL",0,0,"Step 96, loss: 4.728123664855957, step time: 18.648862838745117ms\r\n",,terminal_output +12576,13060220,"TERMINAL",0,0,"Step 97, loss: 4.763580799102783, step time: 18.42474937438965ms\r\n",,terminal_output +12577,13060284,"TERMINAL",0,0,"Step 98, loss: 4.657594203948975, step time: 18.277883529663086ms\r\n",,terminal_output +12578,13060346,"TERMINAL",0,0,"Step 99, loss: 4.781642913818359, step time: 18.61715316772461ms\r\n",,terminal_output +12579,13060461,"TERMINAL",0,0,"Step 100, loss: 4.687735557556152, step time: 18.25428009033203ms\r\nStep 101, loss: 4.696214199066162, step time: 18.25547218322754ms\r\n",,terminal_output +12580,13060565,"TERMINAL",0,0,"Step 102, loss: 4.641601085662842, step time: 19.521236419677734ms\r\nStep 103, loss: 4.866119384765625, step time: 18.345117568969727ms\r\n",,terminal_output +12581,13060660,"TERMINAL",0,0,"Step 104, loss: 4.689534664154053, step time: 18.147706985473633ms\r\n",,terminal_output +12582,13060715,"TERMINAL",0,0,"Step 105, loss: 4.77653694152832, step time: 18.659591674804688ms\r\n",,terminal_output +12583,13060821,"TERMINAL",0,0,"Step 106, loss: 4.611968994140625, step time: 18.253326416015625ms\r\nStep 107, loss: 4.803019046783447, step time: 18.25118064880371ms\r\n",,terminal_output +12584,13060883,"TERMINAL",0,0,"Step 108, loss: 4.870712757110596, step time: 18.49198341369629ms\r\n",,terminal_output +12585,13060949,"TERMINAL",0,0,"Step 109, loss: 4.773675918579102, step time: 20.476102828979492ms\r\n",,terminal_output +12586,13061013,"TERMINAL",0,0,"Step 110, loss: 4.655884265899658, step time: 19.501209259033203ms\r\n",,terminal_output +12587,13061108,"TERMINAL",0,0,"Step 111, loss: 4.745955944061279, step time: 20.001649856567383ms\r\n",,terminal_output +12588,13061213,"TERMINAL",0,0,"Step 112, loss: 4.579080581665039, step time: 21.106719970703125ms\r\nStep 113, loss: 4.6177849769592285, step time: 19.457101821899414ms\r\n",,terminal_output +12589,13061329,"TERMINAL",0,0,"Step 114, loss: 4.62890100479126, step time: 19.156455993652344ms\r\nStep 115, loss: 4.642035484313965, step time: 21.843910217285156ms\r\n",,terminal_output +12590,13061397,"TERMINAL",0,0,"Step 116, loss: 4.652178764343262, step time: 19.904136657714844ms\r\n",,terminal_output +12591,13061460,"TERMINAL",0,0,"Step 117, loss: 4.491632461547852, step time: 19.136428833007812ms\r\n",,terminal_output +12592,13061552,"TERMINAL",0,0,"Step 118, loss: 4.500707626342773, step time: 34.20090675354004ms\r\n",,terminal_output +12593,13061608,"TERMINAL",0,0,"Step 119, loss: 4.569210529327393, step time: 19.90532875061035ms\r\n",,terminal_output +12594,13061659,"TERMINAL",0,0,"Step 120, loss: 4.431127548217773, step time: 19.146203994750977ms\r\n",,terminal_output +12595,13061753,"TERMINAL",0,0,"Step 121, loss: 4.582759380340576, step time: 18.560409545898438ms\r\n",,terminal_output +12596,13061807,"TERMINAL",0,0,"Step 122, loss: 4.6465888023376465, step time: 18.694162368774414ms\r\n",,terminal_output +12597,13061875,"TERMINAL",0,0,"Step 123, loss: 4.4205451011657715, step time: 18.75162124633789ms\r\n",,terminal_output +12598,13061936,"TERMINAL",0,0,"Step 124, loss: 4.418066501617432, step time: 18.407106399536133ms\r\n",,terminal_output +12599,13062046,"TERMINAL",0,0,"Step 125, loss: 4.358034133911133, step time: 18.56374740600586ms\r\nStep 126, loss: 4.396193981170654, step time: 18.89634132385254ms\r\n",,terminal_output +12600,13062141,"TERMINAL",0,0,"Step 127, loss: 4.347992420196533, step time: 18.571853637695312ms\r\n",,terminal_output +12601,13062205,"TERMINAL",0,0,"Step 128, loss: 4.376864433288574, step time: 18.534421920776367ms\r\n",,terminal_output +12602,13062269,"TERMINAL",0,0,"Step 129, loss: 4.559895038604736, step time: 22.729873657226562ms\r\n",,terminal_output +12603,13062377,"TERMINAL",0,0,"Step 130, loss: 4.522045135498047, step time: 18.8448429107666ms\r\nStep 131, loss: 4.332486152648926, step time: 18.45407485961914ms\r\n",,terminal_output +12604,13062446,"TERMINAL",0,0,"Step 132, loss: 4.364961624145508, step time: 18.700599670410156ms\r\n",,terminal_output +12605,13062508,"TERMINAL",0,0,"Step 133, loss: 4.575298309326172, step time: 18.416643142700195ms\r\n",,terminal_output +12606,13062572,"TERMINAL",0,0,"Step 134, loss: 4.336301326751709, step time: 18.368005752563477ms\r\n",,terminal_output +12607,13062639,"TERMINAL",0,0,"Step 135, loss: 4.321098804473877, step time: 19.026517868041992ms\r\n",,terminal_output +12608,13062702,"TERMINAL",0,0,"Step 136, loss: 4.314168453216553, step time: 18.479347229003906ms\r\n",,terminal_output +12609,13062766,"TERMINAL",0,0,"Step 137, loss: 4.36724853515625, step time: 18.581867218017578ms\r\n",,terminal_output +12610,13062879,"TERMINAL",0,0,"Step 138, loss: 4.649411201477051, step time: 19.006013870239258ms\r\nStep 139, loss: 4.214529991149902, step time: 20.01357078552246ms\r\n",,terminal_output +12611,13062950,"TERMINAL",0,0,"Step 140, loss: 4.289625644683838, step time: 18.568754196166992ms\r\n",,terminal_output +12612,13063003,"TERMINAL",0,0,"Step 141, loss: 4.257591724395752, step time: 18.938541412353516ms\r\n",,terminal_output +12613,13063100,"TERMINAL",0,0,"Step 142, loss: 4.355730056762695, step time: 24.838685989379883ms\r\n",,terminal_output +12614,13063162,"TERMINAL",0,0,"Step 143, loss: 4.188891410827637, step time: 22.464275360107422ms\r\n",,terminal_output +12615,13063225,"TERMINAL",0,0,"Step 144, loss: 4.233375072479248, step time: 21.253347396850586ms\r\n",,terminal_output +12616,13063288,"TERMINAL",0,0,"Step 145, loss: 4.212933540344238, step time: 19.35553550720215ms\r\n",,terminal_output +12617,13063396,"TERMINAL",0,0,"Step 146, loss: 4.226122856140137, step time: 20.15376091003418ms\r\nStep 147, loss: 4.226682186126709, step time: 19.063949584960938ms\r\n",,terminal_output +12618,13063458,"TERMINAL",0,0,"Step 148, loss: 4.175089359283447, step time: 18.5239315032959ms\r\n",,terminal_output +12619,13063522,"TERMINAL",0,0,"Step 149, loss: 4.2500457763671875, step time: 19.22893524169922ms\r\n",,terminal_output +12620,13063649,"TERMINAL",0,0,"Step 150, loss: 4.140637397766113, step time: 19.164323806762695ms\r\nStep 151, loss: 4.094871997833252, step time: 18.55015754699707ms\r\n",,terminal_output +12621,13063776,"TERMINAL",0,0,"Step 152, loss: 4.109274864196777, step time: 18.358230590820312ms\r\nStep 153, loss: 4.230240821838379, step time: 18.706560134887695ms\r\n",,terminal_output +12622,13063902,"TERMINAL",0,0,"Step 154, loss: 4.129260063171387, step time: 19.139528274536133ms\r\nStep 155, loss: 4.263181686401367, step time: 18.567323684692383ms\r\n",,terminal_output +12623,13063969,"TERMINAL",0,0,"Step 156, loss: 4.126038551330566, step time: 18.779277801513672ms\r\n",,terminal_output +12624,13064070,"TERMINAL",0,0,"Step 157, loss: 4.232539653778076, step time: 18.491744995117188ms\r\n",,terminal_output +12625,13064133,"TERMINAL",0,0,"Step 158, loss: 4.15305757522583, step time: 18.428325653076172ms\r\n",,terminal_output +12626,13064198,"TERMINAL",0,0,"Step 159, loss: 4.216355323791504, step time: 18.659114837646484ms\r\n",,terminal_output +12627,13064250,"TERMINAL",0,0,"Step 160, loss: 4.067484378814697, step time: 18.39613914489746ms\r\n",,terminal_output +12628,13064359,"TERMINAL",0,0,"Step 161, loss: 4.28833532333374, step time: 18.31507682800293ms\r\nStep 162, loss: 4.049203872680664, step time: 18.74232292175293ms\r\n",,terminal_output +12629,13064419,"TERMINAL",0,0,"Step 163, loss: 4.106430530548096, step time: 18.404245376586914ms\r\n",,terminal_output +12630,13064482,"TERMINAL",0,0,"Step 164, loss: 4.0097336769104, step time: 18.241405487060547ms\r\n",,terminal_output +12631,13064551,"TERMINAL",0,0,"Step 165, loss: 4.5672736167907715, step time: 18.6765193939209ms\r\n",,terminal_output +12632,13064611,"TERMINAL",0,0,"Step 166, loss: 4.247875690460205, step time: 18.32437515258789ms\r\n",,terminal_output +12633,13064708,"TERMINAL",0,0,"Step 167, loss: 4.112375736236572, step time: 18.34702491760254ms\r\n",,terminal_output +12634,13064815,"TERMINAL",0,0,"Step 168, loss: 4.151537895202637, step time: 18.643617630004883ms\r\nStep 169, loss: 4.076761722564697, step time: 18.27716827392578ms\r\n",,terminal_output +12635,13064911,"TERMINAL",0,0,"Step 170, loss: 4.016961097717285, step time: 18.149852752685547ms\r\n",,terminal_output +12636,13064986,"TERMINAL",0,0,"Step 171, loss: 3.975597858428955, step time: 18.926620483398438ms\r\nStep 172, loss: 4.0074782371521, step time: 18.15938949584961ms\r\n",,terminal_output +12637,13065080,"TERMINAL",0,0,"Step 173, loss: 4.354491710662842, step time: 18.293380737304688ms\r\n",,terminal_output +12638,13065442,"TERMINAL",0,0,"Step 174, loss: 3.9077391624450684, step time: 350.70085525512695ms\r\n",,terminal_output +12639,13065495,"TERMINAL",0,0,"Step 175, loss: 4.020373821258545, step time: 32.73582458496094ms\r\n",,terminal_output +12640,13065560,"TERMINAL",0,0,"Step 176, loss: 3.9569814205169678, step time: 20.920991897583008ms\r\n",,terminal_output +12641,13065622,"TERMINAL",0,0,"Step 177, loss: 4.00203800201416, step time: 19.32692527770996ms\r\n",,terminal_output +12642,13065687,"TERMINAL",0,0,"Step 178, loss: 4.172319412231445, step time: 18.695831298828125ms\r\n",,terminal_output +12643,13065750,"TERMINAL",0,0,"Step 179, loss: 3.9680659770965576, step time: 20.149946212768555ms\r\n",,terminal_output +12644,13065847,"TERMINAL",0,0,"Step 180, loss: 3.8355979919433594, step time: 30.54523468017578ms\r\n",,terminal_output +12645,13065909,"TERMINAL",0,0,"Step 181, loss: 4.161109447479248, step time: 20.017385482788086ms\r\n",,terminal_output +12646,13066016,"TERMINAL",0,0,"Step 182, loss: 3.935492992401123, step time: 20.39051055908203ms\r\nStep 183, loss: 4.363037586212158, step time: 20.37215232849121ms\r\n",,terminal_output +12647,13066082,"TERMINAL",0,0,"Step 184, loss: 3.90312123298645, step time: 18.445730209350586ms\r\n",,terminal_output +12648,13066142,"TERMINAL",0,0,"Step 185, loss: 4.062814235687256, step time: 18.407821655273438ms\r\n",,terminal_output +12649,13066263,"TERMINAL",0,0,"Step 186, loss: 4.033594608306885, step time: 18.77570152282715ms\r\nStep 187, loss: 4.4140472412109375, step time: 18.47243309020996ms\r\n",,terminal_output +12650,13066326,"TERMINAL",0,0,"Step 188, loss: 3.9773778915405273, step time: 19.91748809814453ms\r\n",,terminal_output +12651,13066425,"TERMINAL",0,0,"Step 189, loss: 3.874654769897461, step time: 32.829999923706055ms\r\n",,terminal_output +12652,13066479,"TERMINAL",0,0,"Step 190, loss: 3.9931163787841797, step time: 37.51063346862793ms\r\n",,terminal_output +12653,13066576,"TERMINAL",0,0,"Step 191, loss: 3.943058729171753, step time: 36.51928901672363ms\r\n",,terminal_output +12654,13066628,"TERMINAL",0,0,"Step 192, loss: 3.9152073860168457, step time: 37.74309158325195ms\r\n",,terminal_output +12655,13066725,"TERMINAL",0,0,"Step 193, loss: 3.893993377685547, step time: 36.229848861694336ms\r\n",,terminal_output +12656,13066838,"TERMINAL",0,0,"Step 194, loss: 3.998897075653076, step time: 37.100791931152344ms\r\nStep 195, loss: 3.881891965866089, step time: 37.454843521118164ms\r\n",,terminal_output +12657,13066910,"TERMINAL",0,0,"Step 196, loss: 4.091691970825195, step time: 36.41700744628906ms\r\n",,terminal_output +12658,13066989,"TERMINAL",0,0,"Step 197, loss: 3.9132349491119385, step time: 40.56596755981445ms\r\n",,terminal_output +12659,13067054,"TERMINAL",0,0,"Step 198, loss: 3.8609254360198975, step time: 37.62960433959961ms\r\n",,terminal_output +12660,13067127,"TERMINAL",0,0,"Step 199, loss: 3.8706564903259277, step time: 34.295082092285156ms\r\n",,terminal_output +12661,13067199,"TERMINAL",0,0,"Step 200, loss: 4.1936163902282715, step time: 35.445451736450195ms\r\n",,terminal_output +12662,13067294,"TERMINAL",0,0,"Step 201, loss: 3.810234308242798, step time: 35.10117530822754ms\r\n",,terminal_output +12663,13067358,"TERMINAL",0,0,"Step 202, loss: 3.9192707538604736, step time: 36.373138427734375ms\r\n",,terminal_output +12664,13067421,"TERMINAL",0,0,"Step 203, loss: 3.873218059539795, step time: 35.2783203125ms\r\n",,terminal_output +12665,13067486,"TERMINAL",0,0,"Step 204, loss: 3.834641933441162, step time: 38.77067565917969ms\r\n",,terminal_output +12666,13067580,"TERMINAL",0,0,"Step 205, loss: 3.8293211460113525, step time: 34.57140922546387ms\r\n",,terminal_output +12667,13067633,"TERMINAL",0,0,"Step 206, loss: 3.881747007369995, step time: 35.42280197143555ms\r\n",,terminal_output +12668,13067726,"TERMINAL",0,0,"Step 207, loss: 3.822761297225952, step time: 36.41510009765625ms\r\n",,terminal_output +12669,13067841,"TERMINAL",0,0,"Step 208, loss: 3.9390931129455566, step time: 36.11874580383301ms\r\nStep 209, loss: 3.9336724281311035, step time: 34.32154655456543ms\r\n",,terminal_output +12670,13067984,"TERMINAL",0,0,"Step 210, loss: 3.971034526824951, step time: 36.11111640930176ms\r\nStep 211, loss: 4.157374858856201, step time: 34.462690353393555ms\r\n",,terminal_output +12671,13068093,"TERMINAL",0,0,"Step 212, loss: 3.796286106109619, step time: 36.27419471740723ms\r\n",,terminal_output +12672,13068159,"TERMINAL",0,0,"Step 213, loss: 3.763195276260376, step time: 39.58582878112793ms\r\n",,terminal_output +12673,13068222,"TERMINAL",0,0,"Step 214, loss: 3.8489930629730225, step time: 35.878658294677734ms\r\n",,terminal_output +12674,13068288,"TERMINAL",0,0,"Step 215, loss: 3.898550033569336, step time: 34.84392166137695ms\r\n",,terminal_output +12675,13068347,"TERMINAL",0,0,"Step 216, loss: 3.8087551593780518, step time: 36.25941276550293ms\r\n",,terminal_output +12676,13068457,"TERMINAL",0,0,"Step 217, loss: 3.841482639312744, step time: 35.61067581176758ms\r\n",,terminal_output +12677,13068511,"TERMINAL",0,0,"Step 218, loss: 3.7611608505249023, step time: 42.2670841217041ms\r\n",,terminal_output +12678,13068657,"TERMINAL",0,0,"Step 219, loss: 3.755693197250366, step time: 59.16547775268555ms\r\nStep 220, loss: 3.821178913116455, step time: 34.14583206176758ms\r\n",,terminal_output +12679,13068798,"TERMINAL",0,0,"Step 221, loss: 3.7941906452178955, step time: 33.17379951477051ms\r\nStep 222, loss: 3.9112281799316406, step time: 35.1717472076416ms\r\n",,terminal_output +12680,13068907,"TERMINAL",0,0,"Step 223, loss: 3.6876964569091797, step time: 37.70160675048828ms\r\n",,terminal_output +12681,13069003,"TERMINAL",0,0,"Step 224, loss: 3.837676525115967, step time: 23.45561981201172ms\r\nStep 225, loss: 3.845881462097168, step time: 19.704818725585938ms\r\n",,terminal_output +12682,13069097,"TERMINAL",0,0,"Step 226, loss: 3.698542594909668, step time: 18.919706344604492ms\r\n",,terminal_output +12683,13069160,"TERMINAL",0,0,"Step 227, loss: 3.8143362998962402, step time: 18.81575584411621ms\r\n",,terminal_output +12684,13069223,"TERMINAL",0,0,"Step 228, loss: 3.672456979751587, step time: 20.761489868164062ms\r\n",,terminal_output +12685,13069276,"TERMINAL",0,0,"Step 229, loss: 3.718217134475708, step time: 19.023656845092773ms\r\n",,terminal_output +12686,13069389,"TERMINAL",0,0,"Step 230, loss: 3.790865421295166, step time: 18.94855499267578ms\r\nStep 231, loss: 4.001021862030029, step time: 19.222021102905273ms\r\n",,terminal_output +12687,13069484,"TERMINAL",0,0,"Step 232, loss: 3.722919464111328, step time: 19.053220748901367ms\r\n",,terminal_output +12688,13069537,"TERMINAL",0,0,"Step 233, loss: 3.7943880558013916, step time: 19.684314727783203ms\r\n",,terminal_output +12689,13069657,"TERMINAL",0,0,"Step 234, loss: 3.7134029865264893, step time: 19.972801208496094ms\r\nStep 235, loss: 3.6983165740966797, step time: 19.5159912109375ms\r\n",,terminal_output +12690,13069718,"TERMINAL",0,0,"Step 236, loss: 3.6939804553985596, step time: 20.357370376586914ms\r\n",,terminal_output +12691,13069779,"TERMINAL",0,0,"Step 237, loss: 3.9443554878234863, step time: 20.14756202697754ms\r\n",,terminal_output +12692,13069845,"TERMINAL",0,0,"Step 238, loss: 3.8124773502349854, step time: 21.89493179321289ms\r\n",,terminal_output +12693,13069975,"TERMINAL",0,0,"Step 239, loss: 3.6693243980407715, step time: 19.00172233581543ms\r\nStep 240, loss: 3.6840786933898926, step time: 19.508838653564453ms\r\n",,terminal_output +12694,13070041,"TERMINAL",0,0,"Step 241, loss: 3.665410041809082, step time: 18.71776580810547ms\r\n",,terminal_output +12695,13070104,"TERMINAL",0,0,"Step 242, loss: 3.7132813930511475, step time: 20.125627517700195ms\r\n",,terminal_output +12696,13070169,"TERMINAL",0,0,"Step 243, loss: 3.691347599029541, step time: 19.16790008544922ms\r\n",,terminal_output +12697,13070233,"TERMINAL",0,0,"Step 244, loss: 3.641010284423828, step time: 18.677234649658203ms\r\n",,terminal_output +12698,13070343,"TERMINAL",0,0,"Step 245, loss: 3.7039363384246826, step time: 30.605792999267578ms\r\n",,terminal_output +12699,13070398,"TERMINAL",0,0,"Step 246, loss: 3.8258798122406006, step time: 27.767419815063477ms\r\n",,terminal_output +12700,13070449,"TERMINAL",0,0,"Step 247, loss: 3.6457178592681885, step time: 19.32358741760254ms\r\n",,terminal_output +12701,13070544,"TERMINAL",0,0,"Step 248, loss: 3.734707832336426, step time: 18.491506576538086ms\r\n",,terminal_output +12702,13070596,"TERMINAL",0,0,"Step 249, loss: 3.6475653648376465, step time: 18.87655258178711ms\r\n",,terminal_output +12703,13070662,"TERMINAL",0,0,"Step 250, loss: 3.7350728511810303, step time: 18.719196319580078ms\r\n",,terminal_output +12704,13070769,"TERMINAL",0,0,"Step 251, loss: 3.7957072257995605, step time: 18.68271827697754ms\r\nStep 252, loss: 3.6427087783813477, step time: 18.879175186157227ms\r\n",,terminal_output +12705,13070885,"TERMINAL",0,0,"Step 253, loss: 3.615842342376709, step time: 18.498659133911133ms\r\nStep 254, loss: 3.722642183303833, step time: 18.560171127319336ms\r\n",,terminal_output +12706,13070953,"TERMINAL",0,0,"Step 255, loss: 3.6800432205200195, step time: 18.992185592651367ms\r\n",,terminal_output +12707,13071075,"TERMINAL",0,0,"Step 256, loss: 3.768442153930664, step time: 18.488168716430664ms\r\nStep 257, loss: 3.7757856845855713, step time: 18.477916717529297ms\r\n",,terminal_output +12708,13071204,"TERMINAL",0,0,"Step 258, loss: 3.588463544845581, step time: 18.863201141357422ms\r\nStep 259, loss: 3.5297353267669678, step time: 18.39590072631836ms\r\n",,terminal_output +12709,13071269,"TERMINAL",0,0,"Step 260, loss: 3.630030632019043, step time: 19.515037536621094ms\r\n",,terminal_output +12710,13071379,"TERMINAL",0,0,"Step 261, loss: 3.9019484519958496, step time: 27.966022491455078ms\r\n",,terminal_output +12711,13071442,"TERMINAL",0,0,"Step 262, loss: 3.9888956546783447, step time: 21.800518035888672ms\r\n",,terminal_output +12712,13071505,"TERMINAL",0,0,"Step 263, loss: 3.6227381229400635, step time: 19.619464874267578ms\r\n",,terminal_output +12713,13071567,"TERMINAL",0,0,"Step 264, loss: 3.656881093978882, step time: 19.508838653564453ms\r\n",,terminal_output +12714,13071631,"TERMINAL",0,0,"Step 265, loss: 3.613293170928955, step time: 18.769502639770508ms\r\n",,terminal_output +12715,13071692,"TERMINAL",0,0,"Step 266, loss: 3.7286465167999268, step time: 18.77760887145996ms\r\n",,terminal_output +12716,13071758,"TERMINAL",0,0,"Step 267, loss: 3.9502103328704834, step time: 19.088029861450195ms\r\n",,terminal_output +12717,13071819,"TERMINAL",0,0,"Step 268, loss: 3.5806655883789062, step time: 18.987655639648438ms\r\n",,terminal_output +12718,13071882,"TERMINAL",0,0,"Step 269, loss: 3.5680999755859375, step time: 18.638134002685547ms\r\n",,terminal_output +12719,13071981,"TERMINAL",0,0,"Step 270, loss: 3.548867702484131, step time: 19.368886947631836ms\r\nStep 271, loss: 3.5758838653564453, step time: 18.60523223876953ms\r\n",,terminal_output +12720,13072081,"TERMINAL",0,0,"Step 272, loss: 3.6534013748168945, step time: 18.504619598388672ms\r\n",,terminal_output +12721,13072144,"TERMINAL",0,0,"Step 273, loss: 3.6011581420898438, step time: 20.461320877075195ms\r\n",,terminal_output +12722,13072208,"TERMINAL",0,0,"Step 274, loss: 3.5878636837005615, step time: 19.4399356842041ms\r\n",,terminal_output +12723,13072261,"TERMINAL",0,0,"Step 275, loss: 3.5689773559570312, step time: 18.90277862548828ms\r\n",,terminal_output +12724,13072367,"TERMINAL",0,0,"Step 276, loss: 3.792187213897705, step time: 19.36626434326172ms\r\nStep 277, loss: 3.859490156173706, step time: 18.67198944091797ms\r\n",,terminal_output +12725,13072464,"TERMINAL",0,0,"Step 278, loss: 3.5558807849884033, step time: 26.251792907714844ms\r\n",,terminal_output +12726,13072518,"TERMINAL",0,0,"Step 279, loss: 3.763871669769287, step time: 19.13285255432129ms\r\n",,terminal_output +12727,13072624,"TERMINAL",0,0,"Step 280, loss: 3.6789658069610596, step time: 18.59450340270996ms\r\nStep 281, loss: 3.568511724472046, step time: 18.6612606048584ms\r\n",,terminal_output +12728,13072720,"TERMINAL",0,0,"Step 282, loss: 3.7220780849456787, step time: 19.341707229614258ms\r\n",,terminal_output +12729,13072783,"TERMINAL",0,0,"Step 283, loss: 3.563838243484497, step time: 18.706083297729492ms\r\n",,terminal_output +12730,13072845,"TERMINAL",0,0,"Step 284, loss: 4.155140399932861, step time: 18.595218658447266ms\r\n",,terminal_output +12731,13072925,"TERMINAL",0,0,"Step 285, loss: 3.5620994567871094, step time: 19.147157669067383ms\r\n",,terminal_output +12732,13073016,"TERMINAL",0,0,"Step 286, loss: 3.6190521717071533, step time: 18.780231475830078ms\r\nStep 287, loss: 3.575111150741577, step time: 27.666330337524414ms\r\n",,terminal_output +12733,13073125,"TERMINAL",0,0,"Step 288, loss: 3.641087055206299, step time: 29.863595962524414ms\r\n",,terminal_output +12734,13073188,"TERMINAL",0,0,"Step 289, loss: 3.7198729515075684, step time: 24.13177490234375ms\r\n",,terminal_output +12735,13073251,"TERMINAL",0,0,"Step 290, loss: 3.516509532928467, step time: 21.839618682861328ms\r\n",,terminal_output +12736,13073313,"TERMINAL",0,0,"Step 291, loss: 3.521411895751953, step time: 20.038127899169922ms\r\n",,terminal_output +12737,13073375,"TERMINAL",0,0,"Step 292, loss: 3.6502978801727295, step time: 18.82457733154297ms\r\n",,terminal_output +12738,13073438,"TERMINAL",0,0,"Step 293, loss: 3.739314079284668, step time: 18.63574981689453ms\r\n",,terminal_output +12739,13073499,"TERMINAL",0,0,"Step 294, loss: 3.6360831260681152, step time: 18.789291381835938ms\r\n",,terminal_output +12740,13073562,"TERMINAL",0,0,"Step 295, loss: 3.5532994270324707, step time: 18.3255672454834ms\r\n",,terminal_output +12741,13073665,"TERMINAL",0,0,"Step 296, loss: 3.7776553630828857, step time: 17.7309513092041ms\r\nStep 297, loss: 3.527681589126587, step time: 18.693208694458008ms\r\n",,terminal_output +12742,13073763,"TERMINAL",0,0,"Step 298, loss: 3.5263304710388184, step time: 19.019365310668945ms\r\n",,terminal_output +12743,13073827,"TERMINAL",0,0,"Step 299, loss: 3.4855806827545166, step time: 17.78864860534668ms\r\n",,terminal_output +12744,13073892,"TERMINAL",0,0,"Step 300, loss: 3.5135021209716797, step time: 18.62025260925293ms\r\n",,terminal_output +12745,13073953,"TERMINAL",0,0,"Step 301, loss: 3.58273983001709, step time: 18.0208683013916ms\r\n",,terminal_output +12746,13074018,"TERMINAL",0,0,"Step 302, loss: 3.514829397201538, step time: 18.121957778930664ms\r\n",,terminal_output +12747,13074082,"TERMINAL",0,0,"Step 303, loss: 3.5749146938323975, step time: 18.55301856994629ms\r\n",,terminal_output +12748,13074147,"TERMINAL",0,0,"Step 304, loss: 3.5622994899749756, step time: 18.132925033569336ms\r\n",,terminal_output +12749,13074211,"TERMINAL",0,0,"Step 305, loss: 3.6781795024871826, step time: 18.045425415039062ms\r\n",,terminal_output +12750,13074544,"TERMINAL",0,0,"Step 306, loss: 3.4416067600250244, step time: 358.96921157836914ms\r\n",,terminal_output +12751,13074693,"TERMINAL",0,0,"Step 307, loss: 3.644469976425171, step time: 27.455806732177734ms\r\n",,terminal_output +12752,13074750,"TERMINAL",0,0,"Step 308, loss: 3.448575019836426, step time: 21.13795280456543ms\r\nStep 309, loss: 3.6145312786102295, step time: 19.33002471923828ms\r\n",,terminal_output +12753,13074868,"TERMINAL",0,0,"Step 310, loss: 3.3695640563964844, step time: 29.0224552154541ms\r\nStep 311, loss: 3.420666217803955, step time: 19.7296142578125ms\r\n",,terminal_output +12754,13074931,"TERMINAL",0,0,"Step 312, loss: 3.4535303115844727, step time: 18.307924270629883ms\r\n",,terminal_output +12755,13075039,"TERMINAL",0,0,"Step 313, loss: 3.671692132949829, step time: 27.558565139770508ms\r\n",,terminal_output +12756,13075116,"TERMINAL",0,0,"Step 314, loss: 3.8163936138153076, step time: 27.024269104003906ms\r\n",,terminal_output +12757,13075178,"TERMINAL",0,0,"Step 315, loss: 3.5983762741088867, step time: 28.907299041748047ms\r\n",,terminal_output +12758,13075240,"TERMINAL",0,0,"Step 316, loss: 3.55959415435791, step time: 19.606590270996094ms\r\n",,terminal_output +12759,13075302,"TERMINAL",0,0,"Step 317, loss: 3.485574245452881, step time: 18.123388290405273ms\r\n",,terminal_output +12760,13075364,"TERMINAL",0,0,"Step 318, loss: 3.6289682388305664, step time: 18.69344711303711ms\r\n",,terminal_output +12761,13075424,"TERMINAL",0,0,"Step 319, loss: 3.4916346073150635, step time: 18.242359161376953ms\r\n",,terminal_output +12762,13075485,"TERMINAL",0,0,"Step 320, loss: 3.860574245452881, step time: 17.8530216217041ms\r\n",,terminal_output +12763,13075544,"TERMINAL",0,0,"Step 321, loss: 3.552736282348633, step time: 18.77570152282715ms\r\n",,terminal_output +12764,13075604,"TERMINAL",0,0,"Step 322, loss: 3.442617893218994, step time: 19.1648006439209ms\r\n",,terminal_output +12765,13075663,"TERMINAL",0,0,"Step 323, loss: 3.409424066543579, step time: 18.2192325592041ms\r\n",,terminal_output +12766,13075725,"TERMINAL",0,0,"Step 324, loss: 3.4946482181549072, step time: 18.672704696655273ms\r\n",,terminal_output +12767,13075787,"TERMINAL",0,0,"Step 325, loss: 3.3997771739959717, step time: 18.10169219970703ms\r\n",,terminal_output +12768,13075840,"scripts_horeka/train_dynamics.sh",0,0,"",shellscript,tab +12769,13076062,"TERMINAL",0,0,"Step 326, loss: 3.959672212600708, step time: 17.610549926757812ms\r\nStep 327, loss: 3.440978527069092, step time: 18.704652786254883ms\r\nStep 328, loss: 3.4329986572265625, step time: 18.20659637451172ms\r\nStep 329, loss: 3.596367835998535, step time: 18.009662628173828ms\r\n",,terminal_output +12770,13076115,"TERMINAL",0,0,"Step 330, loss: 3.7517292499542236, step time: 35.80474853515625ms\r\n",,terminal_output +12771,13076177,"TERMINAL",0,0,"Step 331, loss: 3.4352846145629883, step time: 26.856660842895508ms\r\n",,terminal_output +12772,13076229,"TERMINAL",0,0,"Step 332, loss: 3.406421661376953, step time: 18.895864486694336ms\r\n",,terminal_output +12773,13076339,"TERMINAL",0,0,"Step 333, loss: 3.4964683055877686, step time: 19.191741943359375ms\r\n",,terminal_output +12774,13076398,"TERMINAL",0,0,"Step 334, loss: 3.496710777282715, step time: 20.68305015563965ms\r\n",,terminal_output +12775,13076459,"TERMINAL",0,0,"Step 335, loss: 3.5374441146850586, step time: 28.177499771118164ms\r\n",,terminal_output +12776,13076522,"TERMINAL",0,0,"Step 336, loss: 3.353012800216675, step time: 20.586490631103516ms\r\n",,terminal_output +12777,13076586,"TERMINAL",0,0,"Step 337, loss: 3.679769515991211, step time: 20.513296127319336ms\r\n",,terminal_output +12778,13076647,"TERMINAL",0,0,"Step 338, loss: 3.372817277908325, step time: 20.809412002563477ms\r\n",,terminal_output +12779,13076708,"TERMINAL",0,0,"Step 339, loss: 3.4763669967651367, step time: 47.94621467590332ms\r\n",,terminal_output +12780,13076772,"TERMINAL",0,0,"Step 340, loss: 3.6588261127471924, step time: 30.307769775390625ms\r\n",,terminal_output +12781,13076882,"TERMINAL",0,0,"Step 341, loss: 3.4958114624023438, step time: 20.38431167602539ms\r\n",,terminal_output +12782,13076976,"TERMINAL",0,0,"Step 342, loss: 3.3557791709899902, step time: 19.429445266723633ms\r\nStep 343, loss: 3.484327793121338, step time: 21.063566207885742ms\r\n",,terminal_output +12783,13077031,"TERMINAL",0,0,"Step 344, loss: 3.4195213317871094, step time: 18.46480369567871ms\r\n",,terminal_output +12784,13077142,"TERMINAL",0,0,"Step 345, loss: 3.573828935623169, step time: 28.395414352416992ms\r\n",,terminal_output +12785,13077196,"TERMINAL",0,0,"Step 346, loss: 3.7039332389831543, step time: 19.52648162841797ms\r\n",,terminal_output +12786,13077301,"TERMINAL",0,0,"Step 347, loss: 3.3608264923095703, step time: 19.32811737060547ms\r\nStep 348, loss: 3.541182041168213, step time: 20.513296127319336ms\r\n",,terminal_output +12787,13077399,"TERMINAL",0,0,"Step 349, loss: 3.483211040496826, step time: 17.853260040283203ms\r\n",,terminal_output +12788,13077461,"TERMINAL",0,0,"Step 350, loss: 3.4159646034240723, step time: 26.761293411254883ms\r\n",,terminal_output +12789,13077522,"TERMINAL",0,0,"Step 351, loss: 3.4238996505737305, step time: 28.285741806030273ms\r\n",,terminal_output +12790,13077584,"TERMINAL",0,0,"Step 352, loss: 3.4542574882507324, step time: 19.88816261291504ms\r\n",,terminal_output +12791,13077644,"TERMINAL",0,0,"Step 353, loss: 3.4627816677093506, step time: 19.869327545166016ms\r\n",,terminal_output +12792,13077706,"TERMINAL",0,0,"Step 354, loss: 3.3621857166290283, step time: 20.20263671875ms\r\n",,terminal_output +12793,13077816,"TERMINAL",0,0,"Step 355, loss: 3.691389322280884, step time: 18.890380859375ms\r\n",,terminal_output +12794,13077900,"TERMINAL",0,0,"Step 356, loss: 3.579784393310547, step time: 17.80533790588379ms\r\nStep 357, loss: 3.4179067611694336, step time: 35.83788871765137ms\r\n",,terminal_output +12795,13077959,"TERMINAL",0,0,"Step 358, loss: 3.5756514072418213, step time: 29.050588607788086ms\r\n",,terminal_output +12796,13078023,"TERMINAL",0,0,"Step 359, loss: 3.484461545944214, step time: 21.724224090576172ms\r\n",,terminal_output +12797,13078132,"TERMINAL",0,0,"Step 360, loss: 3.5668210983276367, step time: 20.730018615722656ms\r\n",,terminal_output +12798,13078184,"TERMINAL",0,0,"Step 361, loss: 3.4311931133270264, step time: 19.963502883911133ms\r\n",,terminal_output +12799,13078288,"TERMINAL",0,0,"Step 362, loss: 3.3777427673339844, step time: 19.64712142944336ms\r\nStep 363, loss: 3.367206335067749, step time: 20.23625373840332ms\r\n",,terminal_output +12800,13078382,"TERMINAL",0,0,"Step 364, loss: 3.3792529106140137, step time: 19.366741180419922ms\r\n",,terminal_output +12801,13078441,"TERMINAL",0,0,"Step 365, loss: 3.4136695861816406, step time: 19.808292388916016ms\r\n",,terminal_output +12802,13078547,"TERMINAL",0,0,"Step 366, loss: 3.464433193206787, step time: 23.97918701171875ms\r\nStep 367, loss: 3.4774107933044434, step time: 20.03622055053711ms\r\n",,terminal_output +12803,13078657,"TERMINAL",0,0,"Step 368, loss: 3.3498055934906006, step time: 19.53887939453125ms\r\n",,terminal_output +12804,13078668,"TERMINAL",0,0,"Step 369, loss: 3.3468573093414307, step time: 19.835472106933594ms\r\n",,terminal_output +12805,13078764,"TERMINAL",0,0,"Step 370, loss: 3.9060564041137695, step time: 18.811941146850586ms\r\n",,terminal_output +12806,13078830,"TERMINAL",0,0,"Step 371, loss: 3.357588529586792, step time: 19.207239151000977ms\r\n",,terminal_output +12807,13078936,"TERMINAL",0,0,"Step 372, loss: 3.5271999835968018, step time: 20.63918113708496ms\r\nStep 373, loss: 3.5824193954467773, step time: 18.98789405822754ms\r\n",,terminal_output +12808,13078998,"TERMINAL",0,0,"Step 374, loss: 3.7728030681610107, step time: 18.71776580810547ms\r\n",,terminal_output +12809,13079051,"TERMINAL",0,0,"Step 375, loss: 3.822685956954956, step time: 19.561052322387695ms\r\n",,terminal_output +12810,13079116,"TERMINAL",0,0,"Step 376, loss: 3.5983285903930664, step time: 19.189119338989258ms\r\n",,terminal_output +12811,13079178,"TERMINAL",0,0,"Step 377, loss: 3.4406397342681885, step time: 19.364595413208008ms\r\n",,terminal_output +12812,13079242,"TERMINAL",0,0,"Step 378, loss: 3.4445619583129883, step time: 20.157814025878906ms\r\n",,terminal_output +12813,13079382,"TERMINAL",0,0,"Step 379, loss: 3.4500014781951904, step time: 27.184486389160156ms\r\nStep 380, loss: 3.40958309173584, step time: 20.158052444458008ms\r\n",,terminal_output +12814,13079442,"TERMINAL",0,0,"Step 381, loss: 3.627074718475342, step time: 20.226240158081055ms\r\n",,terminal_output +12815,13079534,"TERMINAL",0,0,"Step 382, loss: 3.378175735473633, step time: 19.092082977294922ms\r\n",,terminal_output +12816,13079586,"TERMINAL",0,0,"Step 383, loss: 3.6824469566345215, step time: 19.217491149902344ms\r\n",,terminal_output +12817,13079679,"TERMINAL",0,0,"Step 384, loss: 3.452260732650757, step time: 22.968292236328125ms\r\n",,terminal_output +12818,13079788,"TERMINAL",0,0,"Step 385, loss: 3.4435126781463623, step time: 47.7144718170166ms\r\nStep 386, loss: 3.4166572093963623, step time: 22.038698196411133ms\r\n",,terminal_output +12819,13079850,"TERMINAL",0,0,"Step 387, loss: 3.640009641647339, step time: 20.028352737426758ms\r\n",,terminal_output +12820,13079978,"TERMINAL",0,0,"Step 388, loss: 3.4011712074279785, step time: 21.26765251159668ms\r\nStep 389, loss: 3.5046896934509277, step time: 21.361351013183594ms\r\n",,terminal_output +12821,13080088,"TERMINAL",0,0,"Step 390, loss: 3.4974236488342285, step time: 29.345273971557617ms\r\n",,terminal_output +12822,13080149,"TERMINAL",0,0,"Step 391, loss: 3.3019869327545166, step time: 23.03791046142578ms\r\n",,terminal_output +12823,13080252,"TERMINAL",0,0,"Step 392, loss: 3.3455429077148438, step time: 18.84174346923828ms\r\nStep 393, loss: 3.3955535888671875, step time: 18.412351608276367ms\r\n",,terminal_output +12824,13080366,"TERMINAL",0,0,"Step 394, loss: 3.259045124053955, step time: 17.770767211914062ms\r\nStep 395, loss: 3.5734426975250244, step time: 17.77958869934082ms\r\n",,terminal_output +12825,13080427,"TERMINAL",0,0,"Step 396, loss: 3.344585657119751, step time: 18.117189407348633ms\r\n",,terminal_output +12826,13080492,"TERMINAL",0,0,"Step 397, loss: 3.2055861949920654, step time: 17.70615577697754ms\r\n",,terminal_output +12827,13080584,"TERMINAL",0,0,"Step 398, loss: 3.3102614879608154, step time: 18.43881607055664ms\r\n",,terminal_output +12828,13080636,"TERMINAL",0,0,"Step 399, loss: 3.304657459259033, step time: 18.538713455200195ms\r\n",,terminal_output +12829,13080731,"TERMINAL",0,0,"Step 400, loss: 3.308197259902954, step time: 17.914533615112305ms\r\n",,terminal_output +12830,13080812,"TERMINAL",0,0,"Step 401, loss: 3.296997547149658, step time: 18.185853958129883ms\r\nStep 402, loss: 3.351926326751709, step time: 18.041133880615234ms\r\n",,terminal_output +12831,13080874,"TERMINAL",0,0,"Step 403, loss: 3.4089438915252686, step time: 17.969608306884766ms\r\n",,terminal_output +12832,13080945,"TERMINAL",0,0,"Step 404, loss: 3.324066638946533, step time: 17.487287521362305ms\r\n",,terminal_output +12833,13080997,"TERMINAL",0,0,"Step 405, loss: 3.455606698989868, step time: 18.06807518005371ms\r\n",,terminal_output +12834,13081092,"TERMINAL",0,0,"Step 406, loss: 3.524430990219116, step time: 18.908977508544922ms\r\n",,terminal_output +12835,13081143,"TERMINAL",0,0,"Step 407, loss: 3.3180465698242188, step time: 18.004417419433594ms\r\n",,terminal_output +12836,13081241,"TERMINAL",0,0,"Step 408, loss: 3.30737566947937, step time: 18.03898811340332ms\r\n",,terminal_output +12837,13081292,"TERMINAL",0,0,"Step 409, loss: 3.3669629096984863, step time: 17.624378204345703ms\r\n",,terminal_output +12838,13081355,"TERMINAL",0,0,"Step 410, loss: 3.2706711292266846, step time: 19.880056381225586ms\r\n",,terminal_output +12839,13081413,"TERMINAL",0,0,"Step 411, loss: 3.6697487831115723, step time: 19.021272659301758ms\r\n",,terminal_output +12840,13081505,"TERMINAL",0,0,"Step 412, loss: 3.3685147762298584, step time: 17.726421356201172ms\r\n",,terminal_output +12841,13081558,"TERMINAL",0,0,"Step 413, loss: 3.800107717514038, step time: 18.378496170043945ms\r\n",,terminal_output +12842,13081666,"TERMINAL",0,0,"Step 414, loss: 3.3240432739257812, step time: 18.82457733154297ms\r\nStep 415, loss: 3.288235664367676, step time: 18.086671829223633ms\r\n",,terminal_output +12843,13081725,"TERMINAL",0,0,"Step 416, loss: 3.3509504795074463, step time: 18.212556838989258ms\r\n",,terminal_output +12844,13081786,"TERMINAL",0,0,"Step 417, loss: 3.573028087615967, step time: 18.502235412597656ms\r\n",,terminal_output +12845,13081849,"TERMINAL",0,0,"Step 418, loss: 3.516404151916504, step time: 18.270254135131836ms\r\n",,terminal_output +12846,13081911,"TERMINAL",0,0,"Step 419, loss: 3.2908668518066406, step time: 17.80414581298828ms\r\n",,terminal_output +12847,13081973,"TERMINAL",0,0,"Step 420, loss: 3.319967746734619, step time: 18.470048904418945ms\r\n",,terminal_output +12848,13082099,"TERMINAL",0,0,"Step 421, loss: 3.405092716217041, step time: 17.90785789489746ms\r\nStep 422, loss: 3.43595027923584, step time: 18.451452255249023ms\r\n",,terminal_output +12849,13082161,"TERMINAL",0,0,"Step 423, loss: 3.519303321838379, step time: 18.32866668701172ms\r\n",,terminal_output +12850,13082533,"TERMINAL",0,0,"Step 424, loss: 3.4240407943725586, step time: 366.9581413269043ms\r\n",,terminal_output +12851,13082675,"TERMINAL",0,0,"Step 425, loss: 3.355448007583618, step time: 31.27908706665039ms\r\nStep 426, loss: 3.4990921020507812, step time: 20.919322967529297ms\r\n",,terminal_output +12852,13082738,"TERMINAL",0,0,"Step 427, loss: 3.3800771236419678, step time: 18.780946731567383ms\r\n",,terminal_output +12853,13082796,"TERMINAL",0,0,"Step 428, loss: 3.227243661880493, step time: 17.96889305114746ms\r\n",,terminal_output +12854,13082859,"TERMINAL",0,0,"Step 429, loss: 3.246396541595459, step time: 17.7614688873291ms\r\n",,terminal_output +12855,13082921,"TERMINAL",0,0,"Step 430, loss: 3.267817497253418, step time: 36.00049018859863ms\r\n",,terminal_output +12856,13082988,"TERMINAL",0,0,"Step 431, loss: 3.245276689529419, step time: 22.50838279724121ms\r\n",,terminal_output +12857,13083052,"TERMINAL",0,0,"Step 432, loss: 3.2034270763397217, step time: 22.994279861450195ms\r\n",,terminal_output +12858,13083159,"TERMINAL",0,0,"Step 433, loss: 3.322828531265259, step time: 23.525714874267578ms\r\n",,terminal_output +12859,13083220,"TERMINAL",0,0,"Step 434, loss: 3.202282667160034, step time: 22.135496139526367ms\r\n",,terminal_output +12860,13083284,"TERMINAL",0,0,"Step 435, loss: 3.292877197265625, step time: 19.1648006439209ms\r\n",,terminal_output +12861,13083348,"TERMINAL",0,0,"Step 436, loss: 3.479626417160034, step time: 18.518686294555664ms\r\n",,terminal_output +12862,13083406,"TERMINAL",0,0,"Step 437, loss: 3.261664390563965, step time: 17.7457332611084ms\r\n",,terminal_output +12863,13083464,"TERMINAL",0,0,"Step 438, loss: 3.160892963409424, step time: 17.9746150970459ms\r\n",,terminal_output +12864,13083527,"TERMINAL",0,0,"Step 439, loss: 3.2821502685546875, step time: 18.395662307739258ms\r\n",,terminal_output +12865,13083640,"TERMINAL",0,0,"Step 440, loss: 3.7937798500061035, step time: 19.19722557067871ms\r\nStep 441, loss: 3.1652770042419434, step time: 17.74001121520996ms\r\n",,terminal_output +12866,13083692,"TERMINAL",0,0,"Step 442, loss: 3.2917978763580322, step time: 17.9750919342041ms\r\n",,terminal_output +12867,13083762,"TERMINAL",0,0,"Step 443, loss: 3.6319124698638916, step time: 17.777204513549805ms\r\n",,terminal_output +12868,13083822,"TERMINAL",0,0,"Step 444, loss: 3.332979440689087, step time: 18.023252487182617ms\r\n",,terminal_output +12869,13083923,"TERMINAL",0,0,"Step 445, loss: 3.3514420986175537, step time: 17.89259910583496ms\r\n",,terminal_output +12870,13084025,"TERMINAL",0,0,"Step 446, loss: 3.271710157394409, step time: 17.51422882080078ms\r\nStep 447, loss: 3.1941287517547607, step time: 17.316341400146484ms\r\n",,terminal_output +12871,13084091,"TERMINAL",0,0,"Step 448, loss: 3.2418296337127686, step time: 17.793655395507812ms\r\n",,terminal_output +12872,13084148,"TERMINAL",0,0,"Step 449, loss: 3.2458908557891846, step time: 17.4868106842041ms\r\n",,terminal_output +12873,13084255,"TERMINAL",0,0,"Step 450, loss: 3.165888547897339, step time: 17.452478408813477ms\r\nStep 451, loss: 3.380819320678711, step time: 17.93694496154785ms\r\n",,terminal_output +12874,13084355,"TERMINAL",0,0,"Step 452, loss: 3.209061622619629, step time: 17.552614212036133ms\r\n",,terminal_output +12875,13084402,"TERMINAL",0,0,"Step 453, loss: 3.2656984329223633, step time: 17.531871795654297ms\r\n",,terminal_output +12876,13084499,"TERMINAL",0,0,"Step 454, loss: 3.3965156078338623, step time: 17.73357391357422ms\r\n",,terminal_output +12877,13084571,"TERMINAL",0,0,"Step 455, loss: 3.2539656162261963, step time: 17.543315887451172ms\r\nStep 456, loss: 3.343231201171875, step time: 17.43483543395996ms\r\n",,terminal_output +12878,13084635,"TERMINAL",0,0,"Step 457, loss: 3.417072296142578, step time: 17.68946647644043ms\r\n",,terminal_output +12879,13084697,"TERMINAL",0,0,"Step 458, loss: 3.284874677658081, step time: 17.466068267822266ms\r\n",,terminal_output +12880,13084760,"TERMINAL",0,0,"Step 459, loss: 3.168848752975464, step time: 17.421483993530273ms\r\n",,terminal_output +12881,13084824,"TERMINAL",0,0,"Step 460, loss: 3.1946752071380615, step time: 18.777847290039062ms\r\n",,terminal_output +12882,13084886,"TERMINAL",0,0,"Step 461, loss: 3.3196983337402344, step time: 17.922401428222656ms\r\n",,terminal_output +12883,13085006,"TERMINAL",0,0,"Step 462, loss: 3.206270933151245, step time: 17.46392250061035ms\r\n",,terminal_output +12884,13085079,"TERMINAL",0,0,"Step 463, loss: 3.2069671154022217, step time: 17.69232749938965ms\r\n",,terminal_output +12885,13085147,"TERMINAL",0,0,"Step 464, loss: 3.138157606124878, step time: 17.405986785888672ms\r\nStep 465, loss: 3.3358466625213623, step time: 17.549753189086914ms\r\n",,terminal_output +12886,13085241,"TERMINAL",0,0,"Step 466, loss: 3.1906890869140625, step time: 17.720699310302734ms\r\n",,terminal_output +12887,13085353,"TERMINAL",0,0,"Step 467, loss: 3.1265971660614014, step time: 17.46058464050293ms\r\nStep 468, loss: 3.265618324279785, step time: 17.551898956298828ms\r\n",,terminal_output +12888,13085458,"TERMINAL",0,0,"Step 469, loss: 3.2813189029693604, step time: 17.714500427246094ms\r\nStep 470, loss: 3.1749799251556396, step time: 17.493724822998047ms\r\n",,terminal_output +12889,13085559,"TERMINAL",0,0,"Step 471, loss: 3.1693408489227295, step time: 25.88176727294922ms\r\n",,terminal_output +12890,13085629,"TERMINAL",0,0,"Step 472, loss: 3.262582540512085, step time: 19.717693328857422ms\r\n",,terminal_output +12891,13085683,"TERMINAL",0,0,"Step 473, loss: 3.1506612300872803, step time: 18.108129501342773ms\r\n",,terminal_output +12892,13085749,"TERMINAL",0,0,"Step 474, loss: 3.1192338466644287, step time: 17.592906951904297ms\r\n",,terminal_output +12893,13085812,"TERMINAL",0,0,"Step 475, loss: 3.485172986984253, step time: 17.763853073120117ms\r\n",,terminal_output +12894,13085872,"TERMINAL",0,0,"Step 476, loss: 3.290539026260376, step time: 17.43626594543457ms\r\n",,terminal_output +12895,13085924,"TERMINAL",0,0,"Step 477, loss: 3.1957223415374756, step time: 17.688989639282227ms\r\n",,terminal_output +12896,13086026,"TERMINAL",0,0,"Step 478, loss: 3.3619771003723145, step time: 17.818689346313477ms\r\nStep 479, loss: 3.391371726989746, step time: 17.39501953125ms\r\n",,terminal_output +12897,13086119,"TERMINAL",0,0,"Step 480, loss: 3.2052273750305176, step time: 17.543315887451172ms\r\n",,terminal_output +12898,13086229,"TERMINAL",0,0,"Step 481, loss: 3.3865106105804443, step time: 17.93646812438965ms\r\nStep 482, loss: 3.312429666519165, step time: 17.63176918029785ms\r\n",,terminal_output +12899,13086299,"TERMINAL",0,0,"Step 483, loss: 3.190742254257202, step time: 17.670392990112305ms\r\n",,terminal_output +12900,13086360,"TERMINAL",0,0,"Step 484, loss: 3.242037534713745, step time: 20.09749412536621ms\r\n",,terminal_output +12901,13086427,"TERMINAL",0,0,"Step 485, loss: 3.3712220191955566, step time: 17.856597900390625ms\r\n",,terminal_output +12902,13086491,"TERMINAL",0,0,"Step 486, loss: 3.284458875656128, step time: 17.637252807617188ms\r\n",,terminal_output +12903,13086554,"TERMINAL",0,0,"Step 487, loss: 3.18546462059021, step time: 17.746448516845703ms\r\n",,terminal_output +12904,13086620,"TERMINAL",0,0,"Step 488, loss: 3.1027538776397705, step time: 17.46368408203125ms\r\n",,terminal_output +12905,13086690,"TERMINAL",0,0,"Step 489, loss: 3.2620654106140137, step time: 17.459869384765625ms\r\n",,terminal_output +12906,13086751,"TERMINAL",0,0,"Step 490, loss: 3.3873965740203857, step time: 17.760753631591797ms\r\n",,terminal_output +12907,13086820,"TERMINAL",0,0,"Step 491, loss: 3.244058132171631, step time: 17.431974411010742ms\r\n",,terminal_output +12908,13086882,"TERMINAL",0,0,"Step 492, loss: 3.293890953063965, step time: 18.745899200439453ms\r\n",,terminal_output +12909,13086959,"TERMINAL",0,0,"Step 493, loss: 3.050508975982666, step time: 17.986297607421875ms\r\n",,terminal_output +12910,13087012,"TERMINAL",0,0,"Step 494, loss: 3.182774543762207, step time: 17.65155792236328ms\r\n",,terminal_output +12911,13087079,"TERMINAL",0,0,"Step 495, loss: 3.1861746311187744, step time: 17.414093017578125ms\r\n",,terminal_output +12912,13087138,"TERMINAL",0,0,"Step 496, loss: 3.1282737255096436, step time: 17.61627197265625ms\r\n",,terminal_output +12913,13087201,"TERMINAL",0,0,"Step 497, loss: 3.260277032852173, step time: 17.52185821533203ms\r\n",,terminal_output +12914,13087266,"TERMINAL",0,0,"Step 498, loss: 3.050048589706421, step time: 17.389297485351562ms\r\n",,terminal_output +12915,13087329,"TERMINAL",0,0,"Step 499, loss: 3.1730380058288574, step time: 17.68040657043457ms\r\n",,terminal_output +12916,13091058,"TERMINAL",0,0,"Step 500, loss: 3.1426491737365723, step time: 38.35487365722656ms\r\n",,terminal_output +12917,13091165,"TERMINAL",0,0,"Step 501, loss: 3.1229662895202637, step time: 28.213024139404297ms\r\n",,terminal_output +12918,13091216,"TERMINAL",0,0,"Step 502, loss: 3.308950185775757, step time: 21.904706954956055ms\r\n",,terminal_output +12919,13091365,"TERMINAL",0,0,"Step 503, loss: 3.5268476009368896, step time: 19.20914649963379ms\r\nStep 504, loss: 3.1161627769470215, step time: 19.45352554321289ms\r\n",,terminal_output +12920,13091418,"TERMINAL",0,0,"Step 505, loss: 3.23127818107605, step time: 19.051551818847656ms\r\n",,terminal_output +12921,13091521,"TERMINAL",0,0,"Step 506, loss: 3.0240888595581055, step time: 17.624855041503906ms\r\nStep 507, loss: 3.232754707336426, step time: 17.50969886779785ms\r\n",,terminal_output +12922,13091617,"TERMINAL",0,0,"Step 508, loss: 3.2404372692108154, step time: 17.507553100585938ms\r\n",,terminal_output +12923,13091669,"TERMINAL",0,0,"Step 509, loss: 3.3763718605041504, step time: 17.4868106842041ms\r\n",,terminal_output +12924,13091786,"TERMINAL",0,0,"Step 510, loss: 3.207689046859741, step time: 17.161846160888672ms\r\nStep 511, loss: 3.1978399753570557, step time: 17.673492431640625ms\r\n",,terminal_output +12925,13091892,"TERMINAL",0,0,"Step 512, loss: 3.098280429840088, step time: 17.015457153320312ms\r\nStep 513, loss: 3.098233222961426, step time: 21.28458023071289ms\r\n",,terminal_output +12926,13092020,"TERMINAL",0,0,"Step 514, loss: 3.141817808151245, step time: 19.3479061126709ms\r\nStep 515, loss: 3.4811031818389893, step time: 18.797636032104492ms\r\n",,terminal_output +12927,13092085,"TERMINAL",0,0,"Step 516, loss: 3.0722239017486572, step time: 17.65751838684082ms\r\n",,terminal_output +12928,13092151,"TERMINAL",0,0,"Step 517, loss: 3.547832727432251, step time: 17.700672149658203ms\r\n",,terminal_output +12929,13092212,"TERMINAL",0,0,"Step 518, loss: 3.264246702194214, step time: 17.223119735717773ms\r\n",,terminal_output +12930,13092275,"TERMINAL",0,0,"Step 519, loss: 3.1368494033813477, step time: 17.106294631958008ms\r\n",,terminal_output +12931,13092342,"TERMINAL",0,0,"Step 520, loss: 3.236825704574585, step time: 17.421245574951172ms\r\n",,terminal_output +12932,13092402,"TERMINAL",0,0,"Step 521, loss: 3.1431171894073486, step time: 17.205476760864258ms\r\n",,terminal_output +12933,13092467,"TERMINAL",0,0,"Step 522, loss: 3.1081550121307373, step time: 17.20452308654785ms\r\n",,terminal_output +12934,13092532,"TERMINAL",0,0,"Step 523, loss: 3.1101233959198, step time: 17.18592643737793ms\r\n",,terminal_output +12935,13092597,"TERMINAL",0,0,"Step 524, loss: 3.190566062927246, step time: 17.117738723754883ms\r\n",,terminal_output +12936,13092658,"TERMINAL",0,0,"Step 525, loss: 3.378103017807007, step time: 17.299890518188477ms\r\n",,terminal_output +12937,13092719,"TERMINAL",0,0,"Step 526, loss: 3.3935272693634033, step time: 17.58098602294922ms\r\n",,terminal_output +12938,13092780,"TERMINAL",0,0,"Step 527, loss: 3.151963710784912, step time: 17.47441291809082ms\r\n",,terminal_output +12939,13092842,"TERMINAL",0,0,"Step 528, loss: 3.049553871154785, step time: 17.2879695892334ms\r\n",,terminal_output +12940,13092937,"TERMINAL",0,0,"Step 529, loss: 3.1355948448181152, step time: 17.8678035736084ms\r\n",,terminal_output +12941,13092991,"TERMINAL",0,0,"Step 530, loss: 3.177429676055908, step time: 20.735502243041992ms\r\n",,terminal_output +12942,13093098,"TERMINAL",0,0,"Step 531, loss: 3.0669848918914795, step time: 17.468690872192383ms\r\nStep 532, loss: 3.2273151874542236, step time: 17.544269561767578ms\r\n",,terminal_output +12943,13093158,"TERMINAL",0,0,"Step 533, loss: 3.1065268516540527, step time: 17.412185668945312ms\r\n",,terminal_output +12944,13093252,"TERMINAL",0,0,"Step 534, loss: 2.995347499847412, step time: 17.166614532470703ms\r\n",,terminal_output +12945,13093306,"scripts_horeka/train_dynamics.sh",0,0,"",shellscript,tab +12946,13093323,"TERMINAL",0,0,"Step 535, loss: 3.3329050540924072, step time: 17.345190048217773ms\r\n",,terminal_output +12947,13093411,"TERMINAL",0,0,"Step 536, loss: 3.1307930946350098, step time: 17.164945602416992ms\r\nStep 537, loss: 3.141733169555664, step time: 17.029762268066406ms\r\n",,terminal_output +12948,13093479,"TERMINAL",0,0,"Step 538, loss: 3.1254992485046387, step time: 18.379688262939453ms\r\n",,terminal_output +12949,13093540,"TERMINAL",0,0,"Step 539, loss: 3.1085197925567627, step time: 17.546892166137695ms\r\n",,terminal_output +12950,13093650,"TERMINAL",0,0,"Step 540, loss: 3.0745465755462646, step time: 17.260074615478516ms\r\n",,terminal_output +12951,13093658,"TERMINAL",0,0,"Step 541, loss: 3.155090570449829, step time: 17.30632781982422ms\r\n",,terminal_output +12952,13093759,"TERMINAL",0,0,"Step 542, loss: 3.078294515609741, step time: 17.12942123413086ms\r\n",,terminal_output +12953,13093819,"TERMINAL",0,0,"Step 543, loss: 3.033266544342041, step time: 17.078638076782227ms\r\n",,terminal_output +12954,13093881,"TERMINAL",0,0,"Step 544, loss: 3.1012537479400635, step time: 17.347097396850586ms\r\n",,terminal_output +12955,13093949,"TERMINAL",0,0,"Step 545, loss: 3.0816006660461426, step time: 17.103910446166992ms\r\n",,terminal_output +12956,13094001,"TERMINAL",0,0,"Step 546, loss: 3.0275750160217285, step time: 17.026662826538086ms\r\n",,terminal_output +12957,13094064,"TERMINAL",0,0,"Step 547, loss: 3.3888466358184814, step time: 17.45152473449707ms\r\n",,terminal_output +12958,13094123,"TERMINAL",0,0,"Step 548, loss: 3.089984178543091, step time: 17.112016677856445ms\r\n",,terminal_output +12959,13094185,"TERMINAL",0,0,"Step 549, loss: 3.222837209701538, step time: 17.17662811279297ms\r\n",,terminal_output +12960,13094298,"TERMINAL",0,0,"Step 550, loss: 3.3546907901763916, step time: 19.301891326904297ms\r\nStep 551, loss: 3.0613083839416504, step time: 17.20428466796875ms\r\n",,terminal_output +12961,13094367,"TERMINAL",0,0,"Step 552, loss: 3.172055244445801, step time: 17.076492309570312ms\r\n",,terminal_output +12962,13094429,"TERMINAL",0,0,"Step 553, loss: 3.0098536014556885, step time: 17.4410343170166ms\r\n",,terminal_output +12963,13094490,"TERMINAL",0,0,"Step 554, loss: 3.285386323928833, step time: 17.102479934692383ms\r\n",,terminal_output +12964,13094553,"TERMINAL",0,0,"Step 555, loss: 3.054184913635254, step time: 17.13252067565918ms\r\n",,terminal_output +12965,13094614,"TERMINAL",0,0,"Step 556, loss: 3.0467803478240967, step time: 17.319440841674805ms\r\n",,terminal_output +12966,13094677,"TERMINAL",0,0,"Step 557, loss: 3.0676023960113525, step time: 16.982078552246094ms\r\n",,terminal_output +12967,13094739,"TERMINAL",0,0,"Step 558, loss: 3.123826265335083, step time: 17.06695556640625ms\r\n",,terminal_output +12968,13094800,"TERMINAL",0,0,"Step 559, loss: 3.12650465965271, step time: 17.5321102142334ms\r\n",,terminal_output +12969,13094861,"TERMINAL",0,0,"Step 560, loss: 3.05751895904541, step time: 17.10367202758789ms\r\n",,terminal_output +12970,13094922,"TERMINAL",0,0,"Step 561, loss: 3.2181596755981445, step time: 17.163753509521484ms\r\n",,terminal_output +12971,13094984,"TERMINAL",0,0,"Step 562, loss: 3.040540933609009, step time: 17.377614974975586ms\r\n",,terminal_output +12972,13095043,"TERMINAL",0,0,"Step 563, loss: 3.1017563343048096, step time: 17.064332962036133ms\r\n",,terminal_output +12973,13095104,"TERMINAL",0,0,"Step 564, loss: 3.1235897541046143, step time: 16.987323760986328ms\r\n",,terminal_output +12974,13095212,"TERMINAL",0,0,"Step 565, loss: 3.077768087387085, step time: 17.38142967224121ms\r\n",,terminal_output +12975,13095266,"TERMINAL",0,0,"Step 566, loss: 3.011887550354004, step time: 17.02737808227539ms\r\n",,terminal_output +12976,13095371,"TERMINAL",0,0,"Step 567, loss: 3.0261499881744385, step time: 17.583847045898438ms\r\nStep 568, loss: 3.1250665187835693, step time: 17.324447631835938ms\r\n",,terminal_output +12977,13095431,"TERMINAL",0,0,"Step 569, loss: 3.0450034141540527, step time: 17.280101776123047ms\r\n",,terminal_output +12978,13095495,"TERMINAL",0,0,"Step 570, loss: 3.1407418251037598, step time: 16.96610450744629ms\r\n",,terminal_output +12979,13095590,"TERMINAL",0,0,"Step 571, loss: 3.086327075958252, step time: 17.555952072143555ms\r\n",,terminal_output +12980,13095641,"TERMINAL",0,0,"Step 572, loss: 3.0184898376464844, step time: 16.976594924926758ms\r\n",,terminal_output +12981,13095746,"TERMINAL",0,0,"Step 573, loss: 3.2299537658691406, step time: 17.106056213378906ms\r\nStep 574, loss: 3.031233072280884, step time: 17.255544662475586ms\r\n",,terminal_output +12982,13095807,"TERMINAL",0,0,"Step 575, loss: 2.9987010955810547, step time: 17.185211181640625ms\r\n",,terminal_output +12983,13095867,"TERMINAL",0,0,"Step 576, loss: 3.4141950607299805, step time: 17.11106300354004ms\r\n",,terminal_output +12984,13095929,"TERMINAL",0,0,"Step 577, loss: 3.035724401473999, step time: 17.61603355407715ms\r\n",,terminal_output +12985,13095992,"TERMINAL",0,0,"Step 578, loss: 3.2288241386413574, step time: 17.093420028686523ms\r\n",,terminal_output +12986,13096095,"TERMINAL",0,0,"Step 579, loss: 2.968163013458252, step time: 17.2426700592041ms\r\n",,terminal_output +12987,13096156,"TERMINAL",0,0,"Step 580, loss: 3.020369529724121, step time: 17.360448837280273ms\r\n",,terminal_output +12988,13096264,"TERMINAL",0,0,"Step 581, loss: 3.256178855895996, step time: 17.0438289642334ms\r\nStep 582, loss: 3.004216194152832, step time: 17.11726188659668ms\r\n",,terminal_output +12989,13096324,"TERMINAL",0,0,"Step 583, loss: 2.9823532104492188, step time: 17.731189727783203ms\r\n",,terminal_output +12990,13096384,"TERMINAL",0,0,"Step 584, loss: 2.9894139766693115, step time: 17.25292205810547ms\r\n",,terminal_output +12991,13096518,"TERMINAL",0,0,"Step 585, loss: 3.2476658821105957, step time: 19.234418869018555ms\r\n",,terminal_output +12992,13096570,"TERMINAL",0,0,"Step 586, loss: 3.021145820617676, step time: 17.39335060119629ms\r\nStep 587, loss: 2.8273191452026367, step time: 17.226219177246094ms\r\n",,terminal_output +12993,13096661,"TERMINAL",0,0,"Step 588, loss: 3.0441699028015137, step time: 17.055034637451172ms\r\n",,terminal_output +12994,13096712,"TERMINAL",0,0,"Step 589, loss: 3.1899712085723877, step time: 17.545700073242188ms\r\n",,terminal_output +12995,13096816,"TERMINAL",0,0,"Step 590, loss: 3.116027593612671, step time: 17.31586456298828ms\r\nStep 591, loss: 2.854785680770874, step time: 17.64512062072754ms\r\n",,terminal_output +12996,13096877,"TERMINAL",0,0,"Step 592, loss: 3.01686692237854, step time: 17.80104637145996ms\r\n",,terminal_output +12997,13096949,"TERMINAL",0,0,"Step 593, loss: 3.1416871547698975, step time: 17.520904541015625ms\r\n",,terminal_output +12998,13097002,"TERMINAL",0,0,"Step 594, loss: 3.3190600872039795, step time: 17.155170440673828ms\r\n",,terminal_output +12999,13097094,"TERMINAL",0,0,"Step 595, loss: 3.101808547973633, step time: 17.452716827392578ms\r\n",,terminal_output +13000,13097146,"TERMINAL",0,0,"Step 596, loss: 3.1267881393432617, step time: 17.086029052734375ms\r\n",,terminal_output +13001,13097238,"TERMINAL",0,0,"Step 597, loss: 3.029484272003174, step time: 17.33851432800293ms\r\n",,terminal_output +13002,13097292,"TERMINAL",0,0,"Step 598, loss: 3.2091686725616455, step time: 17.487049102783203ms\r\n",,terminal_output +13003,13097397,"TERMINAL",0,0,"Step 599, loss: 3.233118772506714, step time: 17.361164093017578ms\r\nStep 600, loss: 2.951237201690674, step time: 17.1051025390625ms\r\n",,terminal_output +13004,13097508,"TERMINAL",0,0,"Step 601, loss: 3.0061891078948975, step time: 17.479419708251953ms\r\nStep 602, loss: 3.1074323654174805, step time: 18.112659454345703ms\r\n",,terminal_output +13005,13097601,"TERMINAL",0,0,"Step 603, loss: 3.2181601524353027, step time: 17.432212829589844ms\r\n",,terminal_output +13006,13097653,"TERMINAL",0,0,"Step 604, loss: 3.024550199508667, step time: 17.404794692993164ms\r\n",,terminal_output +13007,13097746,"TERMINAL",0,0,"Step 605, loss: 2.993032693862915, step time: 17.13109016418457ms\r\n",,terminal_output +13008,13097797,"TERMINAL",0,0,"Step 606, loss: 2.9662649631500244, step time: 17.034053802490234ms\r\n",,terminal_output +13009,13097849,"TERMINAL",0,0,"Step 607, loss: 3.1100049018859863, step time: 17.342329025268555ms\r\n",,terminal_output +13010,13097963,"TERMINAL",0,0,"Step 608, loss: 3.0876567363739014, step time: 17.07625389099121ms\r\nStep 609, loss: 3.2535126209259033, step time: 17.276525497436523ms\r\n",,terminal_output +13011,13098019,"TERMINAL",0,0,"Step 610, loss: 3.0411221981048584, step time: 17.43602752685547ms\r\n",,terminal_output +13012,13098085,"TERMINAL",0,0,"Step 611, loss: 3.035609245300293, step time: 17.252683639526367ms\r\n",,terminal_output +13013,13098160,"TERMINAL",0,0,"Step 612, loss: 2.9876034259796143, step time: 17.1353816986084ms\r\n",,terminal_output +13014,13098266,"TERMINAL",0,0,"Step 613, loss: 3.103290557861328, step time: 17.584562301635742ms\r\nStep 614, loss: 2.9788715839385986, step time: 17.076730728149414ms\r\n",,terminal_output +13015,13098371,"TERMINAL",0,0,"Step 615, loss: 3.2132279872894287, step time: 17.18878746032715ms\r\n",,terminal_output +13016,13098435,"TERMINAL",0,0,"Step 616, loss: 3.0183048248291016, step time: 19.017457962036133ms\r\n",,terminal_output +13017,13098497,"TERMINAL",0,0,"Step 617, loss: 3.01176118850708, step time: 17.217159271240234ms\r\n",,terminal_output +13018,13098558,"TERMINAL",0,0,"Step 618, loss: 3.001624584197998, step time: 17.401695251464844ms\r\n",,terminal_output +13019,13098651,"TERMINAL",0,0,"Step 619, loss: 2.922579765319824, step time: 17.30656623840332ms\r\nStep 620, loss: 3.033935070037842, step time: 16.983747482299805ms\r\n",,terminal_output +13020,13098751,"TERMINAL",0,0,"Step 621, loss: 3.039620876312256, step time: 17.102479934692383ms\r\n",,terminal_output +13021,13098861,"TERMINAL",0,0,"Step 622, loss: 3.0123414993286133, step time: 17.374038696289062ms\r\nStep 623, loss: 3.680480480194092, step time: 17.200231552124023ms\r\n",,terminal_output +13022,13098920,"TERMINAL",0,0,"Step 624, loss: 3.109011173248291, step time: 17.046451568603516ms\r\n",,terminal_output +13023,13099024,"TERMINAL",0,0,"Step 625, loss: 3.077183723449707, step time: 17.546415328979492ms\r\nStep 626, loss: 3.0140621662139893, step time: 16.985654830932617ms\r\n",,terminal_output +13024,13099087,"TERMINAL",0,0,"Step 627, loss: 3.054466724395752, step time: 17.154693603515625ms\r\n",,terminal_output +13025,13099192,"TERMINAL",0,0,"Step 628, loss: 2.9033539295196533, step time: 17.269134521484375ms\r\n",,terminal_output +13026,13099242,"TERMINAL",0,0,"Step 629, loss: 3.189070463180542, step time: 17.100095748901367ms\r\n",,terminal_output +13027,13099346,"TERMINAL",0,0,"Step 630, loss: 2.953155755996704, step time: 17.054319381713867ms\r\nStep 631, loss: 3.0297343730926514, step time: 17.30632781982422ms\r\n",,terminal_output +13028,13099484,"TERMINAL",0,0,"Step 632, loss: 2.964137554168701, step time: 17.163991928100586ms\r\nStep 633, loss: 3.1559672355651855, step time: 20.459413528442383ms\r\n",,terminal_output +13029,13099535,"TERMINAL",0,0,"Step 634, loss: 3.0039594173431396, step time: 17.473220825195312ms\r\n",,terminal_output +13030,13099597,"TERMINAL",0,0,"Step 635, loss: 2.9032785892486572, step time: 17.15564727783203ms\r\n",,terminal_output +13031,13099719,"TERMINAL",0,0,"Step 636, loss: 2.9644775390625, step time: 17.071008682250977ms\r\nStep 637, loss: 2.894623279571533, step time: 17.389535903930664ms\r\n",,terminal_output +13032,13099783,"TERMINAL",0,0,"Step 638, loss: 3.061483383178711, step time: 17.038822174072266ms\r\n",,terminal_output +13033,13099876,"TERMINAL",0,0,"Step 639, loss: 2.8899450302124023, step time: 17.180204391479492ms\r\n",,terminal_output +13034,13099927,"TERMINAL",0,0,"Step 640, loss: 3.1218807697296143, step time: 17.44556427001953ms\r\n",,terminal_output +13035,13100033,"TERMINAL",0,0,"Step 641, loss: 3.055418014526367, step time: 17.200708389282227ms\r\nStep 642, loss: 3.105691909790039, step time: 17.154693603515625ms\r\n",,terminal_output +13036,13100131,"TERMINAL",0,0,"Step 643, loss: 2.8818862438201904, step time: 17.704248428344727ms\r\n",,terminal_output +13037,13100192,"TERMINAL",0,0,"Step 644, loss: 3.091142416000366, step time: 16.96038246154785ms\r\n",,terminal_output +13038,13100254,"TERMINAL",0,0,"Step 645, loss: 3.515016794204712, step time: 17.16446876525879ms\r\n",,terminal_output +13039,13100318,"TERMINAL",0,0,"Step 646, loss: 3.0846025943756104, step time: 17.339468002319336ms\r\n",,terminal_output +13040,13100384,"TERMINAL",0,0,"Step 647, loss: 3.25535249710083, step time: 17.291545867919922ms\r\n",,terminal_output +13041,13100435,"TERMINAL",0,0,"Step 648, loss: 2.9920101165771484, step time: 16.846418380737305ms\r\n",,terminal_output +13042,13100542,"TERMINAL",0,0,"Step 649, loss: 2.967695951461792, step time: 17.37070083618164ms\r\nStep 650, loss: 2.898470401763916, step time: 17.16899871826172ms\r\n",,terminal_output +13043,13100599,"TERMINAL",0,0,"Step 651, loss: 2.9544169902801514, step time: 16.9832706451416ms\r\n",,terminal_output +13044,13100691,"TERMINAL",0,0,"Step 652, loss: 2.9436166286468506, step time: 17.4100399017334ms\r\n",,terminal_output +13045,13100743,"TERMINAL",0,0,"Step 653, loss: 3.092282772064209, step time: 17.694473266601562ms\r\n",,terminal_output +13046,13100835,"TERMINAL",0,0,"Step 654, loss: 2.9853174686431885, step time: 16.988515853881836ms\r\n",,terminal_output +13047,13100886,"TERMINAL",0,0,"Step 655, loss: 2.8791513442993164, step time: 17.390012741088867ms\r\n",,terminal_output +13048,13100990,"TERMINAL",0,0,"Step 656, loss: 3.246440887451172, step time: 17.169713973999023ms\r\nStep 657, loss: 2.9818999767303467, step time: 17.371654510498047ms\r\n",,terminal_output +13049,13101051,"TERMINAL",0,0,"Step 658, loss: 2.933448553085327, step time: 17.279863357543945ms\r\n",,terminal_output +13050,13101114,"TERMINAL",0,0,"Step 659, loss: 3.0029993057250977, step time: 17.14015007019043ms\r\n",,terminal_output +13051,13101175,"TERMINAL",0,0,"Step 660, loss: 3.0821897983551025, step time: 17.11583137512207ms\r\n",,terminal_output +13052,13101235,"TERMINAL",0,0,"Step 661, loss: 3.0003085136413574, step time: 17.305850982666016ms\r\n",,terminal_output +13053,13101328,"TERMINAL",0,0,"Step 662, loss: 3.011531114578247, step time: 21.79551124572754ms\r\n",,terminal_output +13054,13101380,"TERMINAL",0,0,"Step 663, loss: 2.91288161277771, step time: 20.745515823364258ms\r\n",,terminal_output +13055,13101524,"TERMINAL",0,0,"Step 664, loss: 2.952118396759033, step time: 18.77450942993164ms\r\nStep 665, loss: 2.9161367416381836, step time: 17.872095108032227ms\r\n",,terminal_output +13056,13101576,"TERMINAL",0,0,"Step 666, loss: 2.840698480606079, step time: 17.615795135498047ms\r\n",,terminal_output +13057,13101678,"TERMINAL",0,0,"Step 667, loss: 2.863809823989868, step time: 18.74089241027832ms\r\nStep 668, loss: 3.291548490524292, step time: 19.650936126708984ms\r\n",,terminal_output +13058,13101772,"TERMINAL",0,0,"Step 669, loss: 2.9734697341918945, step time: 22.45306968688965ms\r\n",,terminal_output +13059,13101823,"TERMINAL",0,0,"Step 670, loss: 3.3332715034484863, step time: 24.295330047607422ms\r\n",,terminal_output +13060,13101918,"TERMINAL",0,0,"Step 671, loss: 3.474235773086548, step time: 24.541616439819336ms\r\n",,terminal_output +13061,13101969,"TERMINAL",0,0,"Step 672, loss: 2.7398319244384766, step time: 24.254560470581055ms\r\n",,terminal_output +13062,13102020,"TERMINAL",0,0,"Step 673, loss: 3.208942413330078, step time: 24.694442749023438ms\r\n",,terminal_output +13063,13102114,"TERMINAL",0,0,"Step 674, loss: 2.942472457885742, step time: 25.251150131225586ms\r\n",,terminal_output +13064,13102219,"TERMINAL",0,0,"Step 675, loss: 3.13566255569458, step time: 24.930238723754883ms\r\nStep 676, loss: 3.251110076904297, step time: 25.22873878479004ms\r\n",,terminal_output +13065,13102330,"TERMINAL",0,0,"Step 677, loss: 2.8228392601013184, step time: 21.680593490600586ms\r\nStep 678, loss: 2.8780407905578613, step time: 18.914461135864258ms\r\n",,terminal_output +13066,13102687,"TERMINAL",0,0,"Step 679, loss: 3.0811030864715576, step time: 354.55822944641113ms\r\n",,terminal_output +13067,13102799,"TERMINAL",0,0,"Step 680, loss: 3.0968596935272217, step time: 27.57716178894043ms\r\n",,terminal_output +13068,13102849,"TERMINAL",0,0,"Step 681, loss: 3.0461385250091553, step time: 19.861221313476562ms\r\n",,terminal_output +13069,13102964,"TERMINAL",0,0,"Step 682, loss: 2.9355924129486084, step time: 18.57137680053711ms\r\nStep 683, loss: 2.7572591304779053, step time: 18.09978485107422ms\r\n",,terminal_output +13070,13103015,"TERMINAL",0,0,"Step 684, loss: 2.8674817085266113, step time: 17.619609832763672ms\r\n",,terminal_output +13071,13103109,"TERMINAL",0,0,"Step 685, loss: 3.0215306282043457, step time: 17.888545989990234ms\r\n",,terminal_output +13072,13103212,"TERMINAL",0,0,"Step 686, loss: 2.915949583053589, step time: 17.436504364013672ms\r\nStep 687, loss: 2.8305344581604004, step time: 17.7304744720459ms\r\n",,terminal_output +13073,13103306,"TERMINAL",0,0,"Step 688, loss: 2.8387088775634766, step time: 17.670631408691406ms\r\n",,terminal_output +13074,13103412,"TERMINAL",0,0,"Step 689, loss: 2.9018406867980957, step time: 17.263174057006836ms\r\nStep 690, loss: 2.877009868621826, step time: 17.282485961914062ms\r\n",,terminal_output +13075,13103476,"TERMINAL",0,0,"Step 691, loss: 2.882403612136841, step time: 17.72618293762207ms\r\n",,terminal_output +13076,13103535,"TERMINAL",0,0,"Step 692, loss: 2.9178953170776367, step time: 17.342567443847656ms\r\n",,terminal_output +13077,13103606,"TERMINAL",0,0,"Step 693, loss: 3.0294415950775146, step time: 17.494678497314453ms\r\n",,terminal_output +13078,13103657,"TERMINAL",0,0,"Step 694, loss: 2.951517343521118, step time: 21.831512451171875ms\r\n",,terminal_output +13079,13103720,"TERMINAL",0,0,"Step 695, loss: 2.872103691101074, step time: 17.664670944213867ms\r\n",,terminal_output +13080,13103783,"TERMINAL",0,0,"Step 696, loss: 2.979658842086792, step time: 17.2879695892334ms\r\n",,terminal_output +13081,13103842,"TERMINAL",0,0,"Step 697, loss: 2.8974268436431885, step time: 17.80557632446289ms\r\n",,terminal_output +13082,13103906,"TERMINAL",0,0,"Step 698, loss: 2.8332159519195557, step time: 17.253637313842773ms\r\n",,terminal_output +13083,13104032,"TERMINAL",0,0,"Step 699, loss: 2.993293046951294, step time: 17.55070686340332ms\r\nStep 700, loss: 2.8602118492126465, step time: 17.494916915893555ms\r\n",,terminal_output +13084,13104096,"TERMINAL",0,0,"Step 701, loss: 3.0158464908599854, step time: 17.32325553894043ms\r\n",,terminal_output +13085,13104203,"TERMINAL",0,0,"Step 702, loss: 2.9894983768463135, step time: 17.218351364135742ms\r\n",,terminal_output +13086,13104252,"TERMINAL",0,0,"Step 703, loss: 2.936554193496704, step time: 17.659425735473633ms\r\n",,terminal_output +13087,13104356,"TERMINAL",0,0,"Step 704, loss: 2.7535929679870605, step time: 17.14015007019043ms\r\nStep 705, loss: 2.802450180053711, step time: 17.23480224609375ms\r\n",,terminal_output +13088,13104449,"TERMINAL",0,0,"Step 706, loss: 2.7878053188323975, step time: 17.52495765686035ms\r\n",,terminal_output +13089,13104500,"TERMINAL",0,0,"Step 707, loss: 2.950880289077759, step time: 17.621994018554688ms\r\n",,terminal_output +13090,13104605,"TERMINAL",0,0,"Step 708, loss: 2.9056456089019775, step time: 17.19379425048828ms\r\nStep 709, loss: 2.9220035076141357, step time: 17.671823501586914ms\r\n",,terminal_output +13091,13104699,"TERMINAL",0,0,"Step 710, loss: 2.861039400100708, step time: 17.29559898376465ms\r\n",,terminal_output +13092,13104752,"TERMINAL",0,0,"Step 711, loss: 2.8574717044830322, step time: 17.337560653686523ms\r\n",,terminal_output +13093,13104855,"TERMINAL",0,0,"Step 712, loss: 3.3251421451568604, step time: 17.421722412109375ms\r\nStep 713, loss: 2.8551218509674072, step time: 17.3795223236084ms\r\n",,terminal_output +13094,13104920,"TERMINAL",0,0,"Step 714, loss: 3.089505434036255, step time: 17.166614532470703ms\r\n",,terminal_output +13095,13104983,"TERMINAL",0,0,"Step 715, loss: 2.9612743854522705, step time: 17.40121841430664ms\r\n",,terminal_output +13096,13105085,"TERMINAL",0,0,"Step 716, loss: 2.8181004524230957, step time: 17.243623733520508ms\r\n",,terminal_output +13097,13105146,"TERMINAL",0,0,"Step 717, loss: 2.85516619682312, step time: 17.325401306152344ms\r\n",,terminal_output +13098,13105208,"TERMINAL",0,0,"Step 718, loss: 2.9304943084716797, step time: 17.461061477661133ms\r\n",,terminal_output +13099,13105275,"TERMINAL",0,0,"Step 719, loss: 2.9897513389587402, step time: 17.061948776245117ms\r\n",,terminal_output +13100,13105339,"TERMINAL",0,0,"Step 720, loss: 3.059072494506836, step time: 17.293214797973633ms\r\n",,terminal_output +13101,13105400,"TERMINAL",0,0,"Step 721, loss: 2.7759041786193848, step time: 17.551660537719727ms\r\n",,terminal_output +13102,13105460,"TERMINAL",0,0,"Step 722, loss: 2.9677298069000244, step time: 17.150402069091797ms\r\n",,terminal_output +13103,13105521,"TERMINAL",0,0,"Step 723, loss: 2.8737316131591797, step time: 17.30799674987793ms\r\n",,terminal_output +13104,13105582,"TERMINAL",0,0,"Step 724, loss: 2.850545644760132, step time: 17.396211624145508ms\r\n",,terminal_output +13105,13105643,"TERMINAL",0,0,"Step 725, loss: 3.0858731269836426, step time: 17.18878746032715ms\r\n",,terminal_output +13106,13105704,"TERMINAL",0,0,"Step 726, loss: 3.0064492225646973, step time: 17.099380493164062ms\r\n",,terminal_output +13107,13105765,"TERMINAL",0,0,"Step 727, loss: 2.8981645107269287, step time: 17.706871032714844ms\r\n",,terminal_output +13108,13105873,"TERMINAL",0,0,"Step 728, loss: 2.853132724761963, step time: 17.745018005371094ms\r\nStep 729, loss: 2.8395490646362305, step time: 17.19379425048828ms\r\n",,terminal_output +13109,13106003,"TERMINAL",0,0,"Step 730, loss: 2.8695132732391357, step time: 17.528057098388672ms\r\nStep 731, loss: 2.8480770587921143, step time: 16.993999481201172ms\r\n",,terminal_output +13110,13106068,"TERMINAL",0,0,"Step 732, loss: 3.1144819259643555, step time: 17.118453979492188ms\r\n",,terminal_output +13111,13106134,"TERMINAL",0,0,"Step 733, loss: 2.9414682388305664, step time: 17.64202117919922ms\r\n",,terminal_output +13112,13106196,"TERMINAL",0,0,"Step 734, loss: 2.9748830795288086, step time: 17.013072967529297ms\r\n",,terminal_output +13113,13106254,"TERMINAL",0,0,"Step 735, loss: 2.8887779712677, step time: 16.999006271362305ms\r\n",,terminal_output +13114,13106315,"TERMINAL",0,0,"Step 736, loss: 2.775092601776123, step time: 17.487525939941406ms\r\n",,terminal_output +13115,13106379,"TERMINAL",0,0,"Step 737, loss: 2.889678478240967, step time: 18.451213836669922ms\r\n",,terminal_output +13116,13106447,"TERMINAL",0,0,"Step 738, loss: 2.9769091606140137, step time: 17.58575439453125ms\r\n",,terminal_output +13117,13106506,"TERMINAL",0,0,"Step 739, loss: 2.8317549228668213, step time: 17.62700080871582ms\r\n",,terminal_output +13118,13106571,"TERMINAL",0,0,"Step 740, loss: 2.815378427505493, step time: 17.178058624267578ms\r\n",,terminal_output +13119,13106633,"TERMINAL",0,0,"Step 741, loss: 2.8267221450805664, step time: 17.283916473388672ms\r\n",,terminal_output +13120,13106696,"TERMINAL",0,0,"Step 742, loss: 2.805265188217163, step time: 17.595529556274414ms\r\n",,terminal_output +13121,13106759,"TERMINAL",0,0,"Step 743, loss: 2.8452107906341553, step time: 17.375946044921875ms\r\n",,terminal_output +13122,13106836,"TERMINAL",0,0,"Step 744, loss: 2.823758602142334, step time: 17.170429229736328ms\r\n",,terminal_output +13123,13106890,"TERMINAL",0,0,"Step 745, loss: 2.8417043685913086, step time: 17.75670051574707ms\r\n",,terminal_output +13124,13106963,"TERMINAL",0,0,"Step 746, loss: 2.8237640857696533, step time: 17.131805419921875ms\r\n",,terminal_output +13125,13107012,"TERMINAL",0,0,"Step 747, loss: 3.0762932300567627, step time: 19.251585006713867ms\r\n",,terminal_output +13126,13107076,"TERMINAL",0,0,"Step 748, loss: 2.963782787322998, step time: 17.868518829345703ms\r\n",,terminal_output +13127,13107139,"TERMINAL",0,0,"Step 749, loss: 2.8341310024261475, step time: 16.994237899780273ms\r\n",,terminal_output +13128,13107242,"TERMINAL",0,0,"Step 750, loss: 2.8489632606506348, step time: 17.110109329223633ms\r\n",,terminal_output +13129,13107300,"TERMINAL",0,0,"Step 751, loss: 2.816943645477295, step time: 17.47894287109375ms\r\n",,terminal_output +13130,13107361,"TERMINAL",0,0,"Step 752, loss: 2.8917055130004883, step time: 17.296791076660156ms\r\n",,terminal_output +13131,13107422,"TERMINAL",0,0,"Step 753, loss: 2.862109899520874, step time: 16.991376876831055ms\r\n",,terminal_output +13132,13107529,"TERMINAL",0,0,"Step 754, loss: 3.045844793319702, step time: 17.5931453704834ms\r\nStep 755, loss: 2.8018667697906494, step time: 17.325639724731445ms\r\n",,terminal_output +13133,13107589,"TERMINAL",0,0,"Step 756, loss: 2.7729930877685547, step time: 17.017841339111328ms\r\n",,terminal_output +13134,13107649,"TERMINAL",0,0,"Step 757, loss: 2.8504655361175537, step time: 17.518997192382812ms\r\n",,terminal_output +13135,13107712,"TERMINAL",0,0,"Step 758, loss: 2.7127296924591064, step time: 17.21501350402832ms\r\n",,terminal_output +13136,13107777,"TERMINAL",0,0,"Step 759, loss: 3.034632682800293, step time: 17.173290252685547ms\r\n",,terminal_output +13137,13107838,"TERMINAL",0,0,"Step 760, loss: 2.7748937606811523, step time: 17.673492431640625ms\r\n",,terminal_output +13138,13107900,"TERMINAL",0,0,"Step 761, loss: 2.77191424369812, step time: 17.320632934570312ms\r\n",,terminal_output +13139,13107965,"TERMINAL",0,0,"Step 762, loss: 2.6750166416168213, step time: 17.193317413330078ms\r\n",,terminal_output +13140,13108020,"TERMINAL",0,0,"Step 763, loss: 3.073103904724121, step time: 17.453908920288086ms\r\n",,terminal_output +13141,13108085,"TERMINAL",0,0,"Step 764, loss: 2.799596071243286, step time: 17.206192016601562ms\r\n",,terminal_output +13142,13108148,"TERMINAL",0,0,"Step 765, loss: 2.7909793853759766, step time: 17.203807830810547ms\r\n",,terminal_output +13143,13108211,"TERMINAL",0,0,"Step 766, loss: 2.811706304550171, step time: 20.35999298095703ms\r\n",,terminal_output +13144,13108279,"TERMINAL",0,0,"Step 767, loss: 3.030635118484497, step time: 17.05479621887207ms\r\n",,terminal_output +13145,13108344,"TERMINAL",0,0,"Step 768, loss: 2.7635605335235596, step time: 17.1506404876709ms\r\n",,terminal_output +13146,13108407,"TERMINAL",0,0,"Step 769, loss: 2.891143798828125, step time: 17.386198043823242ms\r\n",,terminal_output +13147,13108468,"TERMINAL",0,0,"Step 770, loss: 0.008930739015340805, step time: 17.20571517944336ms\r\n",,terminal_output +13148,13108529,"TERMINAL",0,0,"Step 771, loss: 2.6627657413482666, step time: 16.970157623291016ms\r\n",,terminal_output +13149,13108643,"TERMINAL",0,0,"Step 772, loss: 2.7813127040863037, step time: 17.335891723632812ms\r\n",,terminal_output +13150,13108650,"TERMINAL",0,0,"Step 773, loss: 2.770496129989624, step time: 17.037630081176758ms\r\n",,terminal_output +13151,13108751,"TERMINAL",0,0,"Step 774, loss: 2.9872360229492188, step time: 17.205476760864258ms\r\n",,terminal_output +13152,13108815,"TERMINAL",0,0,"Step 775, loss: 2.9571280479431152, step time: 17.243146896362305ms\r\n",,terminal_output +13153,13108877,"TERMINAL",0,0,"Step 776, loss: 2.8530893325805664, step time: 17.116785049438477ms\r\n",,terminal_output +13154,13108949,"TERMINAL",0,0,"Step 777, loss: 2.912386894226074, step time: 17.16327667236328ms\r\n",,terminal_output +13155,13109034,"TERMINAL",0,0,"Step 778, loss: 2.7885754108428955, step time: 17.618656158447266ms\r\nStep 779, loss: 2.9675395488739014, step time: 16.849756240844727ms\r\n",,terminal_output +13156,13109155,"TERMINAL",0,0,"Step 780, loss: 2.7357752323150635, step time: 17.23313331604004ms\r\nStep 781, loss: 2.689643621444702, step time: 17.43769645690918ms\r\n",,terminal_output +13157,13109250,"TERMINAL",0,0,"Step 782, loss: 2.7730712890625, step time: 17.130136489868164ms\r\n",,terminal_output +13158,13109303,"TERMINAL",0,0,"Step 783, loss: 2.979790449142456, step time: 16.94655418395996ms\r\n",,terminal_output +13159,13109411,"TERMINAL",0,0,"Step 784, loss: 2.660066604614258, step time: 17.277002334594727ms\r\nStep 785, loss: 2.8018078804016113, step time: 17.215490341186523ms\r\n",,terminal_output +13160,13109473,"TERMINAL",0,0,"Step 786, loss: 2.700263023376465, step time: 17.16017723083496ms\r\n",,terminal_output +13161,13109538,"TERMINAL",0,0,"Step 787, loss: 2.86362624168396, step time: 17.439603805541992ms\r\n",,terminal_output +13162,13109612,"TERMINAL",0,0,"Step 788, loss: 2.7211318016052246, step time: 17.21477508544922ms\r\n",,terminal_output +13163,13109664,"TERMINAL",0,0,"Step 789, loss: 3.0330593585968018, step time: 17.050981521606445ms\r\n",,terminal_output +13164,13109785,"TERMINAL",0,0,"Step 790, loss: 2.7094578742980957, step time: 17.3642635345459ms\r\nStep 791, loss: 2.7304770946502686, step time: 17.145872116088867ms\r\n",,terminal_output +13165,13109848,"TERMINAL",0,0,"Step 792, loss: 3.1515979766845703, step time: 17.11559295654297ms\r\n",,terminal_output +13166,13109911,"TERMINAL",0,0,"Step 793, loss: 2.703885793685913, step time: 17.36140251159668ms\r\n",,terminal_output +13167,13109975,"TERMINAL",0,0,"Step 794, loss: 2.7565083503723145, step time: 17.386198043823242ms\r\n",,terminal_output +13168,13110036,"TERMINAL",0,0,"Step 795, loss: 2.7610177993774414, step time: 17.19379425048828ms\r\n",,terminal_output +13169,13110099,"TERMINAL",0,0,"Step 796, loss: 2.6918394565582275, step time: 17.623424530029297ms\r\n",,terminal_output +13170,13110163,"TERMINAL",0,0,"Step 797, loss: 2.7349867820739746, step time: 17.115354537963867ms\r\n",,terminal_output +13171,13110228,"TERMINAL",0,0,"Step 798, loss: 2.702329158782959, step time: 17.244815826416016ms\r\n",,terminal_output +13172,13110288,"TERMINAL",0,0,"Step 799, loss: 2.847532272338867, step time: 17.287731170654297ms\r\n",,terminal_output +13173,13110351,"TERMINAL",0,0,"Step 800, loss: 3.0077788829803467, step time: 18.2192325592041ms\r\n",,terminal_output +13174,13110432,"TERMINAL",0,0,"Step 801, loss: 2.676750898361206, step time: 19.165754318237305ms\r\n",,terminal_output +13175,13110488,"TERMINAL",0,0,"Step 802, loss: 3.6719470024108887, step time: 19.512653350830078ms\r\n",,terminal_output +13176,13110548,"TERMINAL",0,0,"Step 803, loss: 2.692842483520508, step time: 17.749309539794922ms\r\n",,terminal_output +13177,13110612,"TERMINAL",0,0,"Step 804, loss: 2.8100106716156006, step time: 17.67706871032715ms\r\n",,terminal_output +13178,13110676,"TERMINAL",0,0,"Step 805, loss: 2.9760403633117676, step time: 17.84372329711914ms\r\n",,terminal_output +13179,13110740,"TERMINAL",0,0,"Step 806, loss: 3.094003677368164, step time: 17.531633377075195ms\r\n",,terminal_output +13180,13110800,"TERMINAL",0,0,"Step 807, loss: 2.664128303527832, step time: 17.774105072021484ms\r\n",,terminal_output +13181,13110864,"TERMINAL",0,0,"Step 808, loss: 2.8166887760162354, step time: 17.487287521362305ms\r\n",,terminal_output +13182,13110924,"TERMINAL",0,0,"Step 809, loss: 2.8223915100097656, step time: 17.35520362854004ms\r\n",,terminal_output +13183,13110990,"TERMINAL",0,0,"Step 810, loss: 2.808067798614502, step time: 17.130374908447266ms\r\n",,terminal_output +13184,13111046,"TERMINAL",0,0,"Step 811, loss: 2.815821647644043, step time: 17.5478458404541ms\r\n",,terminal_output +13185,13111178,"TERMINAL",0,0,"Step 812, loss: 2.7317609786987305, step time: 17.148733139038086ms\r\n",,terminal_output +13186,13111236,"TERMINAL",0,0,"Step 813, loss: 2.760584831237793, step time: 17.23790168762207ms\r\nStep 814, loss: 2.8784420490264893, step time: 17.44985580444336ms\r\n",,terminal_output +13187,13111330,"TERMINAL",0,0,"Step 815, loss: 2.8276073932647705, step time: 17.003297805786133ms\r\n",,terminal_output +13188,13111452,"TERMINAL",0,0,"Step 816, loss: 2.8756814002990723, step time: 17.43936538696289ms\r\n",,terminal_output +13189,13111520,"TERMINAL",0,0,"Step 817, loss: 2.694998025894165, step time: 17.676830291748047ms\r\nStep 818, loss: 2.746044874191284, step time: 17.063379287719727ms\r\n",,terminal_output +13190,13111624,"TERMINAL",0,0,"Step 819, loss: 2.9344441890716553, step time: 17.138004302978516ms\r\nStep 820, loss: 2.828800916671753, step time: 17.54450798034668ms\r\n",,terminal_output +13191,13111683,"TERMINAL",0,0,"Step 821, loss: 2.711716651916504, step time: 17.079830169677734ms\r\n",,terminal_output +13192,13111745,"TERMINAL",0,0,"Step 822, loss: 2.7407114505767822, step time: 17.06385612487793ms\r\n",,terminal_output +13193,13111838,"TERMINAL",0,0,"Step 823, loss: 2.726332187652588, step time: 17.747163772583008ms\r\n",,terminal_output +13194,13111889,"TERMINAL",0,0,"Step 824, loss: 2.762096881866455, step time: 16.992807388305664ms\r\n",,terminal_output +13195,13111993,"TERMINAL",0,0,"Step 825, loss: 2.6625077724456787, step time: 16.961097717285156ms\r\nStep 826, loss: 2.749866485595703, step time: 17.557859420776367ms\r\n",,terminal_output +13196,13112056,"TERMINAL",0,0,"Step 827, loss: 2.8069987297058105, step time: 17.001867294311523ms\r\n",,terminal_output +13197,13112154,"TERMINAL",0,0,"Step 828, loss: 2.7438080310821533, step time: 17.112255096435547ms\r\n",,terminal_output +13198,13112206,"TERMINAL",0,0,"Step 829, loss: 2.993252992630005, step time: 17.474651336669922ms\r\n",,terminal_output +13199,13112310,"TERMINAL",0,0,"Step 830, loss: 2.8398897647857666, step time: 17.330169677734375ms\r\nStep 831, loss: 2.7419636249542236, step time: 17.040014266967773ms\r\n",,terminal_output +13200,13112373,"TERMINAL",0,0,"Step 832, loss: 2.716221332550049, step time: 17.444849014282227ms\r\n",,terminal_output +13201,13112434,"TERMINAL",0,0,"Step 833, loss: 2.8172473907470703, step time: 17.14491844177246ms\r\n",,terminal_output +13202,13112497,"TERMINAL",0,0,"Step 834, loss: 2.7597997188568115, step time: 17.20738410949707ms\r\n",,terminal_output +13203,13112560,"TERMINAL",0,0,"Step 835, loss: 2.739572048187256, step time: 17.43912696838379ms\r\n",,terminal_output +13204,13112620,"TERMINAL",0,0,"Step 836, loss: 2.761704444885254, step time: 17.026662826538086ms\r\n",,terminal_output +13205,13112683,"TERMINAL",0,0,"Step 837, loss: 2.709848165512085, step time: 17.0440673828125ms\r\n",,terminal_output +13206,13113097,"TERMINAL",0,0,"Step 838, loss: 2.9476921558380127, step time: 345.59011459350586ms\r\nStep 839, loss: 2.873464584350586, step time: 24.649620056152344ms\r\n",,terminal_output +13207,13113195,"TERMINAL",0,0,"Step 840, loss: 2.66143798828125, step time: 19.97685432434082ms\r\n",,terminal_output +13208,13113248,"TERMINAL",0,0,"Step 841, loss: 2.7002816200256348, step time: 19.340038299560547ms\r\n",,terminal_output +13209,13113391,"TERMINAL",0,0,"Step 842, loss: 3.053093194961548, step time: 19.01531219482422ms\r\nStep 843, loss: 2.626612901687622, step time: 17.691612243652344ms\r\n",,terminal_output +13210,13113442,"TERMINAL",0,0,"Step 844, loss: 2.837344169616699, step time: 17.830371856689453ms\r\n",,terminal_output +13211,13113545,"TERMINAL",0,0,"Step 845, loss: 2.9962403774261475, step time: 17.22884178161621ms\r\nStep 846, loss: 2.873544692993164, step time: 17.464160919189453ms\r\n",,terminal_output +13212,13113632,"TERMINAL",0,0,"Step 847, loss: 2.6242284774780273, step time: 17.64965057373047ms\r\n",,terminal_output +13213,13113685,"TERMINAL",0,0,"Step 848, loss: 2.788217544555664, step time: 18.30315589904785ms\r\n",,terminal_output +13214,13113747,"TERMINAL",0,0,"Step 849, loss: 2.8244168758392334, step time: 17.32659339904785ms\r\n",,terminal_output +13215,13113810,"TERMINAL",0,0,"Step 850, loss: 2.6459388732910156, step time: 17.677783966064453ms\r\n",,terminal_output +13216,13113872,"TERMINAL",0,0,"Step 851, loss: 2.8839211463928223, step time: 17.125606536865234ms\r\n",,terminal_output +13217,13113944,"TERMINAL",0,0,"Step 852, loss: 3.2167749404907227, step time: 17.693758010864258ms\r\n",,terminal_output +13218,13113999,"TERMINAL",0,0,"Step 853, loss: 2.673790216445923, step time: 17.424583435058594ms\r\n",,terminal_output +13219,13114061,"TERMINAL",0,0,"Step 854, loss: 2.726231098175049, step time: 17.24863052368164ms\r\n",,terminal_output +13220,13114161,"TERMINAL",0,0,"Step 855, loss: 3.0040814876556396, step time: 17.14944839477539ms\r\n",,terminal_output +13221,13114222,"TERMINAL",0,0,"Step 856, loss: 2.7069952487945557, step time: 17.666339874267578ms\r\n",,terminal_output +13222,13114296,"TERMINAL",0,0,"Step 857, loss: 2.8009915351867676, step time: 17.202377319335938ms\r\n",,terminal_output +13223,13114357,"TERMINAL",0,0,"Step 858, loss: 2.6951382160186768, step time: 17.15826988220215ms\r\n",,terminal_output +13224,13114415,"TERMINAL",0,0,"Step 859, loss: 2.631566286087036, step time: 17.586708068847656ms\r\n",,terminal_output +13225,13114480,"TERMINAL",0,0,"Step 860, loss: 2.6893346309661865, step time: 30.541181564331055ms\r\n",,terminal_output +13226,13114542,"TERMINAL",0,0,"Step 861, loss: 2.6386473178863525, step time: 19.41680908203125ms\r\n",,terminal_output +13227,13114647,"TERMINAL",0,0,"Step 862, loss: 2.6938271522521973, step time: 17.705917358398438ms\r\nStep 863, loss: 2.868391275405884, step time: 17.380714416503906ms\r\n",,terminal_output +13228,13114709,"TERMINAL",0,0,"Step 864, loss: 2.7059755325317383, step time: 17.210006713867188ms\r\n",,terminal_output +13229,13114771,"TERMINAL",0,0,"Step 865, loss: 2.7148289680480957, step time: 17.406702041625977ms\r\n",,terminal_output +13230,13114833,"TERMINAL",0,0,"Step 866, loss: 2.9038147926330566, step time: 17.322063446044922ms\r\n",,terminal_output +13231,13114895,"TERMINAL",0,0,"Step 867, loss: 2.6478664875030518, step time: 17.14634895324707ms\r\n",,terminal_output +13232,13114956,"TERMINAL",0,0,"Step 868, loss: 2.806588649749756, step time: 17.998218536376953ms\r\n",,terminal_output +13233,13115012,"TERMINAL",0,0,"Step 869, loss: 2.8323819637298584, step time: 17.259597778320312ms\r\n",,terminal_output +13234,13115114,"TERMINAL",0,0,"Step 870, loss: 2.741577625274658, step time: 17.254352569580078ms\r\n",,terminal_output +13235,13115177,"TERMINAL",0,0,"Step 871, loss: 2.824002981185913, step time: 17.27437973022461ms\r\n",,terminal_output +13236,13115239,"TERMINAL",0,0,"Step 872, loss: 2.694077730178833, step time: 17.087936401367188ms\r\n",,terminal_output +13237,13115298,"TERMINAL",0,0,"Step 873, loss: 2.660705804824829, step time: 17.236948013305664ms\r\n",,terminal_output +13238,13115360,"TERMINAL",0,0,"Step 874, loss: 2.779025077819824, step time: 17.618894577026367ms\r\n",,terminal_output +13239,13115420,"TERMINAL",0,0,"Step 875, loss: 2.7555348873138428, step time: 17.21787452697754ms\r\n",,terminal_output +13240,13115485,"TERMINAL",0,0,"Step 876, loss: 2.8119254112243652, step time: 17.253398895263672ms\r\n",,terminal_output +13241,13115544,"TERMINAL",0,0,"Step 877, loss: 2.6426587104797363, step time: 17.65608787536621ms\r\n",,terminal_output +13242,13115605,"TERMINAL",0,0,"Step 878, loss: 2.706219434738159, step time: 17.045021057128906ms\r\n",,terminal_output +13243,13115666,"TERMINAL",0,0,"Step 879, loss: 2.7035725116729736, step time: 17.150163650512695ms\r\n",,terminal_output +13244,13115727,"TERMINAL",0,0,"Step 880, loss: 2.8486971855163574, step time: 18.090009689331055ms\r\n",,terminal_output +13245,13115787,"TERMINAL",0,0,"Step 881, loss: 2.95249605178833, step time: 18.043994903564453ms\r\n",,terminal_output +13246,13115849,"TERMINAL",0,0,"Step 882, loss: 2.4288692474365234, step time: 17.30799674987793ms\r\n",,terminal_output +13247,13115912,"TERMINAL",0,0,"Step 883, loss: 2.7105391025543213, step time: 17.6999568939209ms\r\n",,terminal_output +13248,13116021,"TERMINAL",0,0,"Step 884, loss: 2.694837808609009, step time: 17.35377311706543ms\r\nStep 885, loss: 2.6510305404663086, step time: 17.3184871673584ms\r\n",,terminal_output +13249,13116087,"TERMINAL",0,0,"Step 886, loss: 2.694075107574463, step time: 17.558574676513672ms\r\n",,terminal_output +13250,13116152,"TERMINAL",0,0,"Step 887, loss: 2.6487298011779785, step time: 16.894102096557617ms\r\n",,terminal_output +13251,13116215,"TERMINAL",0,0,"Step 888, loss: 2.6918673515319824, step time: 17.088651657104492ms\r\n",,terminal_output +13252,13116278,"TERMINAL",0,0,"Step 889, loss: 3.1506032943725586, step time: 17.447471618652344ms\r\n",,terminal_output +13253,13116339,"TERMINAL",0,0,"Step 890, loss: 2.783203125, step time: 17.078399658203125ms\r\n",,terminal_output +13254,13116399,"TERMINAL",0,0,"Step 891, loss: 3.5654821395874023, step time: 17.415523529052734ms\r\n",,terminal_output +13255,13116465,"TERMINAL",0,0,"Step 892, loss: 2.5740153789520264, step time: 17.63606071472168ms\r\n",,terminal_output +13256,13116528,"TERMINAL",0,0,"Step 893, loss: 2.535672903060913, step time: 17.124176025390625ms\r\n",,terminal_output +13257,13116592,"TERMINAL",0,0,"Step 894, loss: 2.7000620365142822, step time: 17.09461212158203ms\r\n",,terminal_output +13258,13116655,"TERMINAL",0,0,"Step 895, loss: 3.0084118843078613, step time: 17.62843132019043ms\r\n",,terminal_output +13259,13116752,"TERMINAL",0,0,"Step 896, loss: 2.642892360687256, step time: 17.188310623168945ms\r\n",,terminal_output +13260,13116812,"TERMINAL",0,0,"Step 897, loss: 2.9513189792633057, step time: 17.285585403442383ms\r\n",,terminal_output +13261,13116876,"TERMINAL",0,0,"Step 898, loss: 2.6077473163604736, step time: 18.283843994140625ms\r\n",,terminal_output +13262,13116946,"TERMINAL",0,0,"Step 899, loss: 2.691192626953125, step time: 17.259597778320312ms\r\n",,terminal_output +13263,13116998,"TERMINAL",0,0,"Step 900, loss: 2.611513376235962, step time: 17.139434814453125ms\r\n",,terminal_output +13264,13117065,"TERMINAL",0,0,"Step 901, loss: 2.6749346256256104, step time: 17.487049102783203ms\r\n",,terminal_output +13265,13117126,"TERMINAL",0,0,"Step 902, loss: 2.694244861602783, step time: 17.090559005737305ms\r\n",,terminal_output +13266,13117188,"TERMINAL",0,0,"Step 903, loss: 2.5515360832214355, step time: 17.205476760864258ms\r\n",,terminal_output +13267,13117253,"TERMINAL",0,0,"Step 904, loss: 2.877751588821411, step time: 17.702579498291016ms\r\n",,terminal_output +13268,13117316,"TERMINAL",0,0,"Step 905, loss: 2.5667636394500732, step time: 17.021656036376953ms\r\n",,terminal_output +13269,13117378,"TERMINAL",0,0,"Step 906, loss: 2.5971803665161133, step time: 17.35210418701172ms\r\n",,terminal_output +13270,13117441,"TERMINAL",0,0,"Step 907, loss: 2.630035161972046, step time: 17.264366149902344ms\r\n",,terminal_output +13271,13117506,"TERMINAL",0,0,"Step 908, loss: 2.8425748348236084, step time: 17.246723175048828ms\r\n",,terminal_output +13272,13117570,"TERMINAL",0,0,"Step 909, loss: 2.949862480163574, step time: 17.328739166259766ms\r\n",,terminal_output +13273,13117633,"TERMINAL",0,0,"Step 910, loss: 2.5474748611450195, step time: 17.491817474365234ms\r\n",,terminal_output +13274,13117740,"TERMINAL",0,0,"Step 911, loss: 2.557452440261841, step time: 17.122745513916016ms\r\nStep 912, loss: 2.5844995975494385, step time: 17.379045486450195ms\r\n",,terminal_output +13275,13117847,"TERMINAL",0,0,"Step 913, loss: 2.6509761810302734, step time: 17.452239990234375ms\r\nStep 914, loss: 2.70310378074646, step time: 17.045974731445312ms\r\n",,terminal_output +13276,13117944,"TERMINAL",0,0,"Step 915, loss: 2.60870361328125, step time: 17.048358917236328ms\r\n",,terminal_output +13277,13117997,"TERMINAL",0,0,"Step 916, loss: 2.7346231937408447, step time: 17.5473690032959ms\r\n",,terminal_output +13278,13118103,"TERMINAL",0,0,"Step 917, loss: 2.9464941024780273, step time: 16.94321632385254ms\r\nStep 918, loss: 2.7754666805267334, step time: 17.167329788208008ms\r\n",,terminal_output +13279,13118167,"TERMINAL",0,0,"Step 919, loss: 3.27065110206604, step time: 17.657756805419922ms\r\n",,terminal_output +13280,13118225,"TERMINAL",0,0,"Step 920, loss: 2.5878169536590576, step time: 17.14324951171875ms\r\n",,terminal_output +13281,13118286,"TERMINAL",0,0,"Step 921, loss: 2.865220546722412, step time: 16.995906829833984ms\r\n",,terminal_output +13282,13118348,"TERMINAL",0,0,"Step 922, loss: 2.7971091270446777, step time: 17.566442489624023ms\r\n",,terminal_output +13283,13118411,"TERMINAL",0,0,"Step 923, loss: 2.6993510723114014, step time: 17.373323440551758ms\r\n",,terminal_output +13284,13118470,"TERMINAL",0,0,"Step 924, loss: 2.6673741340637207, step time: 17.336368560791016ms\r\n",,terminal_output +13285,13118534,"TERMINAL",0,0,"Step 925, loss: 2.6360156536102295, step time: 17.803668975830078ms\r\n",,terminal_output +13286,13118653,"TERMINAL",0,0,"Step 926, loss: 2.721395254135132, step time: 17.176389694213867ms\r\n",,terminal_output +13287,13118661,"TERMINAL",0,0,"Step 927, loss: 2.726163625717163, step time: 17.107486724853516ms\r\n",,terminal_output +13288,13118761,"TERMINAL",0,0,"Step 928, loss: 2.60276198387146, step time: 17.449617385864258ms\r\n",,terminal_output +13289,13118821,"TERMINAL",0,0,"Step 929, loss: 2.4895732402801514, step time: 17.082691192626953ms\r\n",,terminal_output +13290,13118883,"TERMINAL",0,0,"Step 930, loss: 2.8136649131774902, step time: 17.214536666870117ms\r\n",,terminal_output +13291,13118987,"TERMINAL",0,0,"Step 931, loss: 2.9344663619995117, step time: 17.490625381469727ms\r\nStep 932, loss: 2.646549701690674, step time: 17.049789428710938ms\r\n",,terminal_output +13292,13119050,"TERMINAL",0,0,"Step 933, loss: 3.5439682006835938, step time: 16.885042190551758ms\r\n",,terminal_output +13293,13119109,"TERMINAL",0,0,"Step 934, loss: 2.617152690887451, step time: 17.528295516967773ms\r\n",,terminal_output +13294,13119174,"TERMINAL",0,0,"Step 935, loss: 2.7071073055267334, step time: 17.1205997467041ms\r\n",,terminal_output +13295,13119246,"TERMINAL",0,0,"Step 936, loss: 2.790607213973999, step time: 17.162084579467773ms\r\n",,terminal_output +13296,13119306,"TERMINAL",0,0,"Step 937, loss: 2.648050546646118, step time: 17.346858978271484ms\r\n",,terminal_output +13297,13119371,"TERMINAL",0,0,"Step 938, loss: 2.6042535305023193, step time: 17.127275466918945ms\r\n",,terminal_output +13298,13119433,"TERMINAL",0,0,"Step 939, loss: 2.5882678031921387, step time: 17.06409454345703ms\r\n",,terminal_output +13299,13119505,"TERMINAL",0,0,"Step 940, loss: 2.572874069213867, step time: 17.575740814208984ms\r\n",,terminal_output +13300,13119569,"TERMINAL",0,0,"Step 941, loss: 3.070690870285034, step time: 17.008066177368164ms\r\n",,terminal_output +13301,13119675,"TERMINAL",0,0,"Step 942, loss: 2.676833391189575, step time: 17.113447189331055ms\r\nStep 943, loss: 2.608337640762329, step time: 17.690181732177734ms\r\n",,terminal_output +13302,13119768,"TERMINAL",0,0,"Step 944, loss: 2.670793294906616, step time: 17.38119125366211ms\r\n",,terminal_output +13303,13119872,"TERMINAL",0,0,"Step 945, loss: 2.926982879638672, step time: 17.046451568603516ms\r\nStep 946, loss: 2.881757974624634, step time: 17.408370971679688ms\r\n",,terminal_output +13304,13119977,"TERMINAL",0,0,"Step 947, loss: 2.7770142555236816, step time: 17.06242561340332ms\r\nStep 948, loss: 2.5944316387176514, step time: 17.146587371826172ms\r\n",,terminal_output +13305,13120102,"TERMINAL",0,0,"Step 949, loss: 2.6133720874786377, step time: 17.470121383666992ms\r\nStep 950, loss: 2.7279257774353027, step time: 17.12942123413086ms\r\n",,terminal_output +13306,13120169,"TERMINAL",0,0,"Step 951, loss: 3.0266025066375732, step time: 16.812562942504883ms\r\n",,terminal_output +13307,13120231,"TERMINAL",0,0,"Step 952, loss: 2.6721458435058594, step time: 17.487764358520508ms\r\n",,terminal_output +13308,13120300,"TERMINAL",0,0,"Step 953, loss: 2.867021322250366, step time: 16.892194747924805ms\r\n",,terminal_output +13309,13120416,"TERMINAL",0,0,"Step 954, loss: 2.691068172454834, step time: 17.11273193359375ms\r\nStep 955, loss: 2.629370927810669, step time: 17.246007919311523ms\r\n",,terminal_output +13310,13120481,"TERMINAL",0,0,"Step 956, loss: 2.954845905303955, step time: 17.158031463623047ms\r\n",,terminal_output +13311,13120581,"TERMINAL",0,0,"Step 957, loss: 2.694161891937256, step time: 17.03667640686035ms\r\n",,terminal_output +13312,13120632,"TERMINAL",0,0,"Step 958, loss: 2.5820209980010986, step time: 17.517566680908203ms\r\n",,terminal_output +13313,13120736,"TERMINAL",0,0,"Step 959, loss: 2.6178669929504395, step time: 16.939640045166016ms\r\nStep 960, loss: 2.7630341053009033, step time: 17.211198806762695ms\r\n",,terminal_output +13314,13120829,"TERMINAL",0,0,"Step 961, loss: 2.6178102493286133, step time: 17.29559898376465ms\r\n",,terminal_output +13315,13120879,"TERMINAL",0,0,"Step 962, loss: 2.657900333404541, step time: 17.110586166381836ms\r\n",,terminal_output +13316,13120931,"TERMINAL",0,0,"Step 963, loss: 2.668759822845459, step time: 18.09072494506836ms\r\n",,terminal_output +13317,13121008,"TERMINAL",0,0,"Step 964, loss: 2.6406028270721436, step time: 17.628192901611328ms\r\n",,terminal_output +13318,13121046,"TERMINAL",0,0,"Step 965, loss: 2.5722100734710693, step time: 17.03500747680664ms\r\n",,terminal_output +13319,13121137,"TERMINAL",0,0,"Step 966, loss: 2.897935152053833, step time: 19.40131187438965ms\r\n",,terminal_output +13320,13121251,"TERMINAL",0,0,"Step 967, loss: 2.73122239112854, step time: 17.9750919342041ms\r\nStep 968, loss: 2.5698790550231934, step time: 17.299175262451172ms\r\n",,terminal_output +13321,13121308,"TERMINAL",0,0,"Step 969, loss: 2.8994760513305664, step time: 17.160654067993164ms\r\n",,terminal_output +13322,13121403,"TERMINAL",0,0,"Step 970, loss: 2.801668882369995, step time: 17.569780349731445ms\r\n",,terminal_output +13323,13121454,"TERMINAL",0,0,"Step 971, loss: 2.655064821243286, step time: 17.08531379699707ms\r\n",,terminal_output +13324,13121547,"TERMINAL",0,0,"Step 972, loss: 2.668138027191162, step time: 17.223834991455078ms\r\n",,terminal_output +13325,13121799,"TERMINAL",0,0,"Step 973, loss: 2.567497491836548, step time: 306.0271739959717ms\r\n",,terminal_output +13326,13121864,"TERMINAL",0,0,"Step 974, loss: 2.5210371017456055, step time: 24.869203567504883ms\r\n",,terminal_output +13327,13121994,"TERMINAL",0,0,"Step 975, loss: 2.608943223953247, step time: 19.53577995300293ms\r\nStep 976, loss: 2.837611198425293, step time: 18.584728240966797ms\r\n",,terminal_output +13328,13122060,"TERMINAL",0,0,"Step 977, loss: 2.6563096046447754, step time: 17.523527145385742ms\r\n",,terminal_output +13329,13122118,"TERMINAL",0,0,"Step 978, loss: 2.5617763996124268, step time: 17.30203628540039ms\r\n",,terminal_output +13330,13122212,"TERMINAL",0,0,"Step 979, loss: 2.8925888538360596, step time: 17.677783966064453ms\r\n",,terminal_output +13331,13122264,"TERMINAL",0,0,"Step 980, loss: 2.7380495071411133, step time: 17.29416847229004ms\r\n",,terminal_output +13332,13122368,"TERMINAL",0,0,"Step 981, loss: 2.6154918670654297, step time: 17.255783081054688ms\r\nStep 982, loss: 2.6423099040985107, step time: 17.7156925201416ms\r\n",,terminal_output +13333,13122462,"TERMINAL",0,0,"Step 983, loss: 2.62133526802063, step time: 17.164945602416992ms\r\n",,terminal_output +13334,13122513,"TERMINAL",0,0,"Step 984, loss: 2.7531135082244873, step time: 17.075777053833008ms\r\n",,terminal_output +13335,13122659,"TERMINAL",0,0,"Step 985, loss: 2.5285425186157227, step time: 17.711639404296875ms\r\nStep 986, loss: 2.6839520931243896, step time: 17.17543601989746ms\r\n",,terminal_output +13336,13122711,"TERMINAL",0,0,"Step 987, loss: 2.9455859661102295, step time: 17.07768440246582ms\r\n",,terminal_output +13337,13122817,"TERMINAL",0,0,"Step 988, loss: 2.836838722229004, step time: 17.668724060058594ms\r\nStep 989, loss: 2.5657098293304443, step time: 17.342090606689453ms\r\n",,terminal_output +13338,13122882,"TERMINAL",0,0,"Step 990, loss: 3.2822389602661133, step time: 17.233848571777344ms\r\n",,terminal_output +13339,13122951,"TERMINAL",0,0,"Step 991, loss: 2.536342144012451, step time: 18.06473731994629ms\r\n",,terminal_output +13340,13123004,"TERMINAL",0,0,"Step 992, loss: 2.7286837100982666, step time: 17.535924911499023ms\r\n",,terminal_output +13341,13123068,"TERMINAL",0,0,"Step 993, loss: 2.50891375541687, step time: 17.566680908203125ms\r\n",,terminal_output +13342,13123126,"TERMINAL",0,0,"Step 994, loss: 2.660032272338867, step time: 17.696142196655273ms\r\n",,terminal_output +13343,13123191,"TERMINAL",0,0,"Step 995, loss: 2.6476457118988037, step time: 17.104148864746094ms\r\n",,terminal_output +13344,13123250,"TERMINAL",0,0,"Step 996, loss: 2.537144184112549, step time: 17.14491844177246ms\r\n",,terminal_output +13345,13123375,"TERMINAL",0,0,"Step 997, loss: 2.5319478511810303, step time: 17.50493049621582ms\r\nStep 998, loss: 2.628326177597046, step time: 17.110347747802734ms\r\n",,terminal_output +13346,13123437,"TERMINAL",0,0,"Step 999, loss: 2.609591484069824, step time: 16.891002655029297ms\r\n",,terminal_output +13347,13126157,"TERMINAL",0,0,"Step 1000, loss: 2.6564254760742188, step time: 26.317119598388672ms\r\n",,terminal_output +13348,13126267,"TERMINAL",0,0,"Step 1001, loss: 2.55694842338562, step time: 26.002883911132812ms\r\n",,terminal_output +13349,13126319,"TERMINAL",0,0,"Step 1002, loss: 2.509397029876709, step time: 20.558595657348633ms\r\n",,terminal_output +13350,13126414,"TERMINAL",0,0,"Step 1003, loss: 2.5199337005615234, step time: 19.610881805419922ms\r\n",,terminal_output +13351,13126464,"TERMINAL",0,0,"Step 1004, loss: 2.7468645572662354, step time: 19.542694091796875ms\r\n",,terminal_output +13352,13126516,"TERMINAL",0,0,"Step 1005, loss: 2.662914514541626, step time: 18.92256736755371ms\r\n",,terminal_output +13353,13126621,"TERMINAL",0,0,"Step 1006, loss: 2.612950086593628, step time: 18.48912239074707ms\r\nStep 1007, loss: 2.5819239616394043, step time: 18.721580505371094ms\r\n",,terminal_output +13354,13126679,"TERMINAL",0,0,"Step 1008, loss: 2.5503227710723877, step time: 18.220901489257812ms\r\n",,terminal_output +13355,13126784,"TERMINAL",0,0,"Step 1009, loss: 2.5332672595977783, step time: 18.188953399658203ms\r\n",,terminal_output +13356,13126837,"TERMINAL",0,0,"Step 1010, loss: 2.5410118103027344, step time: 19.964933395385742ms\r\n",,terminal_output +13357,13126947,"TERMINAL",0,0,"Step 1011, loss: 2.6628966331481934, step time: 19.624710083007812ms\r\nStep 1012, loss: 2.492847442626953, step time: 18.34893226623535ms\r\n",,terminal_output +13358,13126999,"TERMINAL",0,0,"Step 1013, loss: 2.4500796794891357, step time: 18.304109573364258ms\r\n",,terminal_output +13359,13127063,"TERMINAL",0,0,"Step 1014, loss: 2.5800464153289795, step time: 18.475055694580078ms\r\n",,terminal_output +13360,13127165,"TERMINAL",0,0,"Step 1015, loss: 2.559265613555908, step time: 17.8983211517334ms\r\n",,terminal_output +13361,13127227,"TERMINAL",0,0,"Step 1016, loss: 2.525838613510132, step time: 18.55921745300293ms\r\n",,terminal_output +13362,13127289,"TERMINAL",0,0,"Step 1017, loss: 2.574042797088623, step time: 17.985105514526367ms\r\n",,terminal_output +13363,13127354,"TERMINAL",0,0,"Step 1018, loss: 2.680952787399292, step time: 18.285751342773438ms\r\n",,terminal_output +13364,13127416,"TERMINAL",0,0,"Step 1019, loss: 2.7762880325317383, step time: 18.317699432373047ms\r\n",,terminal_output +13365,13127475,"TERMINAL",0,0,"Step 1020, loss: 3.0724401473999023, step time: 18.093347549438477ms\r\n",,terminal_output +13366,13127535,"TERMINAL",0,0,"Step 1021, loss: 2.494020938873291, step time: 18.340110778808594ms\r\n",,terminal_output +13367,13127596,"TERMINAL",0,0,"Step 1022, loss: 2.7605388164520264, step time: 18.538475036621094ms\r\n",,terminal_output +13368,13127656,"TERMINAL",0,0,"Step 1023, loss: 2.8975765705108643, step time: 18.349885940551758ms\r\n",,terminal_output +13369,13127718,"TERMINAL",0,0,"Step 1024, loss: 2.583467960357666, step time: 18.193721771240234ms\r\n",,terminal_output +13370,13127779,"TERMINAL",0,0,"Step 1025, loss: 2.526623010635376, step time: 18.651962280273438ms\r\n",,terminal_output +13371,13127872,"TERMINAL",0,0,"Step 1026, loss: 2.552732467651367, step time: 17.988204956054688ms\r\n",,terminal_output +13372,13127923,"TERMINAL",0,0,"Step 1027, loss: 2.5640032291412354, step time: 18.158674240112305ms\r\n",,terminal_output +13373,13128027,"TERMINAL",0,0,"Step 1028, loss: 2.8159258365631104, step time: 18.39756965637207ms\r\nStep 1029, loss: 2.5565686225891113, step time: 18.119096755981445ms\r\n",,terminal_output +13374,13128084,"TERMINAL",0,0,"Step 1030, loss: 2.8226046562194824, step time: 18.06926727294922ms\r\n",,terminal_output +13375,13128452,"TERMINAL",0,0,"Step 1031, loss: 2.42063307762146, step time: 362.80035972595215ms\r\n",,terminal_output +13376,13128558,"TERMINAL",0,0,"Step 1032, loss: 2.5080349445343018, step time: 29.572248458862305ms\r\n",,terminal_output +13377,13128651,"TERMINAL",0,0,"Step 1033, loss: 2.613830089569092, step time: 20.83897590637207ms\r\nStep 1034, loss: 2.542586088180542, step time: 19.655466079711914ms\r\n",,terminal_output +13378,13128751,"TERMINAL",0,0,"Step 1035, loss: 3.156116485595703, step time: 18.507003784179688ms\r\n",,terminal_output +13379,13128812,"TERMINAL",0,0,"Step 1036, loss: 2.5163509845733643, step time: 18.547773361206055ms\r\n",,terminal_output +13380,13128871,"TERMINAL",0,0,"Step 1037, loss: 2.8291447162628174, step time: 18.689870834350586ms\r\n",,terminal_output +13381,13128931,"TERMINAL",0,0,"Step 1038, loss: 2.418713092803955, step time: 18.500089645385742ms\r\n",,terminal_output +13382,13129039,"TERMINAL",0,0,"Step 1039, loss: 2.5146002769470215, step time: 17.968177795410156ms\r\nStep 1040, loss: 2.5054564476013184, step time: 24.859905242919922ms\r\n",,terminal_output +13383,13129138,"TERMINAL",0,0,"Step 1041, loss: 2.4762158393859863, step time: 18.533706665039062ms\r\n",,terminal_output +13384,13129207,"TERMINAL",0,0,"Step 1042, loss: 2.5498857498168945, step time: 18.38207244873047ms\r\n",,terminal_output +13385,13129269,"TERMINAL",0,0,"Step 1043, loss: 2.6128110885620117, step time: 18.627166748046875ms\r\n",,terminal_output +13386,13129373,"TERMINAL",0,0,"Step 1044, loss: 2.5772995948791504, step time: 18.162250518798828ms\r\nStep 1045, loss: 2.5358755588531494, step time: 18.30458641052246ms\r\n",,terminal_output +13387,13129437,"TERMINAL",0,0,"Step 1046, loss: 2.4949963092803955, step time: 18.614768981933594ms\r\n",,terminal_output +13388,13129495,"TERMINAL",0,0,"Step 1047, loss: 2.467144727706909, step time: 18.29051971435547ms\r\n",,terminal_output +13389,13129556,"TERMINAL",0,0,"Step 1048, loss: 2.4792983531951904, step time: 18.37944984436035ms\r\n",,terminal_output +13390,13129619,"TERMINAL",0,0,"Step 1049, loss: 2.8649022579193115, step time: 18.492937088012695ms\r\n",,terminal_output +13391,13129681,"TERMINAL",0,0,"Step 1050, loss: 2.4398419857025146, step time: 18.174171447753906ms\r\n",,terminal_output +13392,13129744,"TERMINAL",0,0,"Step 1051, loss: 2.6717090606689453, step time: 18.041133880615234ms\r\n",,terminal_output +13393,13129815,"TERMINAL",0,0,"Step 1052, loss: 2.5022380352020264, step time: 18.74399185180664ms\r\n",,terminal_output +13394,13129869,"TERMINAL",0,0,"Step 1053, loss: 2.4494986534118652, step time: 18.07260513305664ms\r\n",,terminal_output +13395,13129998,"TERMINAL",0,0,"Step 1054, loss: 2.937206983566284, step time: 18.301963806152344ms\r\nStep 1055, loss: 2.740325927734375, step time: 18.470287322998047ms\r\n",,terminal_output +13396,13130060,"TERMINAL",0,0,"Step 1056, loss: 2.7163844108581543, step time: 18.352985382080078ms\r\n",,terminal_output +13397,13130124,"TERMINAL",0,0,"Step 1057, loss: 2.5110325813293457, step time: 18.217086791992188ms\r\n",,terminal_output +13398,13130189,"TERMINAL",0,0,"Step 1058, loss: 2.541001081466675, step time: 18.36872100830078ms\r\n",,terminal_output +13399,13130282,"TERMINAL",0,0,"Step 1059, loss: 2.748553991317749, step time: 17.774343490600586ms\r\n",,terminal_output +13400,13130336,"TERMINAL",0,0,"Step 1060, loss: 2.9407529830932617, step time: 18.337726593017578ms\r\n",,terminal_output +13401,13130426,"TERMINAL",0,0,"Step 1061, loss: 2.491118907928467, step time: 18.04065704345703ms\r\n",,terminal_output +13402,13130477,"TERMINAL",0,0,"Step 1062, loss: 2.6878130435943604, step time: 18.396377563476562ms\r\n",,terminal_output +13403,13130580,"TERMINAL",0,0,"Step 1063, loss: 2.5072853565216064, step time: 18.021583557128906ms\r\nStep 1064, loss: 2.611621856689453, step time: 18.660306930541992ms\r\n",,terminal_output +13404,13130673,"TERMINAL",0,0,"Step 1065, loss: 2.558635950088501, step time: 17.911195755004883ms\r\n",,terminal_output +13405,13130726,"TERMINAL",0,0,"Step 1066, loss: 2.4199681282043457, step time: 20.78843116760254ms\r\n",,terminal_output +13406,13130832,"TERMINAL",0,0,"Step 1067, loss: 2.561048984527588, step time: 18.375396728515625ms\r\nStep 1068, loss: 2.4455018043518066, step time: 18.253803253173828ms\r\n",,terminal_output +13407,13130896,"TERMINAL",0,0,"Step 1069, loss: 2.4703240394592285, step time: 17.990827560424805ms\r\n",,terminal_output +13408,13131010,"TERMINAL",0,0,"Step 1070, loss: 2.994645118713379, step time: 18.63574981689453ms\r\n",,terminal_output +13409,13131021,"TERMINAL",0,0,"Step 1071, loss: 2.4867000579833984, step time: 18.09978485107422ms\r\n",,terminal_output +13410,13131120,"TERMINAL",0,0,"Step 1072, loss: 2.6279146671295166, step time: 18.040895462036133ms\r\n",,terminal_output +13411,13131182,"TERMINAL",0,0,"Step 1073, loss: 2.5383481979370117, step time: 18.405675888061523ms\r\n",,terminal_output +13412,13131246,"TERMINAL",0,0,"Step 1074, loss: 2.5639383792877197, step time: 18.27836036682129ms\r\n",,terminal_output +13413,13131313,"TERMINAL",0,0,"Step 1075, loss: 2.4283387660980225, step time: 19.245386123657227ms\r\n",,terminal_output +13414,13131376,"TERMINAL",0,0,"Step 1076, loss: 2.472081184387207, step time: 24.672746658325195ms\r\n",,terminal_output +13415,13131439,"TERMINAL",0,0,"Step 1077, loss: 2.400536298751831, step time: 20.4923152923584ms\r\n",,terminal_output +13416,13131502,"TERMINAL",0,0,"Step 1078, loss: 2.7122223377227783, step time: 19.46115493774414ms\r\n",,terminal_output +13417,13131561,"TERMINAL",0,0,"Step 1079, loss: 2.615061044692993, step time: 18.791675567626953ms\r\n",,terminal_output +13418,13131625,"TERMINAL",0,0,"Step 1080, loss: 2.460653781890869, step time: 18.540620803833008ms\r\n",,terminal_output +13419,13131687,"TERMINAL",0,0,"Step 1081, loss: 2.614305019378662, step time: 18.459081649780273ms\r\n",,terminal_output +13420,13131756,"TERMINAL",0,0,"Step 1082, loss: 2.466244697570801, step time: 18.945693969726562ms\r\n",,terminal_output +13421,13131863,"TERMINAL",0,0,"Step 1083, loss: 2.5305278301239014, step time: 18.034934997558594ms\r\nStep 1084, loss: 2.846092939376831, step time: 18.476009368896484ms\r\n",,terminal_output +13422,13131927,"TERMINAL",0,0,"Step 1085, loss: 2.4741365909576416, step time: 18.507957458496094ms\r\n",,terminal_output +13423,13131991,"TERMINAL",0,0,"Step 1086, loss: 2.4817354679107666, step time: 18.253803253173828ms\r\n",,terminal_output +13424,13132044,"TERMINAL",0,0,"Step 1087, loss: 3.0836493968963623, step time: 18.07093620300293ms\r\n",,terminal_output +13425,13132108,"TERMINAL",0,0,"Step 1088, loss: 2.5476415157318115, step time: 18.855571746826172ms\r\n",,terminal_output +13426,13132173,"TERMINAL",0,0,"Step 1089, loss: 2.506239652633667, step time: 17.856359481811523ms\r\n",,terminal_output +13427,13132236,"TERMINAL",0,0,"Step 1090, loss: 2.5331027507781982, step time: 18.318891525268555ms\r\n",,terminal_output +13428,13132300,"TERMINAL",0,0,"Step 1091, loss: 2.3559482097625732, step time: 18.193483352661133ms\r\n",,terminal_output +13429,13132379,"TERMINAL",0,0,"Step 1092, loss: 2.41231369972229, step time: 18.408775329589844ms\r\n",,terminal_output +13430,13132434,"TERMINAL",0,0,"Step 1093, loss: 2.424858808517456, step time: 20.5996036529541ms\r\n",,terminal_output +13431,13132499,"TERMINAL",0,0,"Step 1094, loss: 3.2190568447113037, step time: 18.66912841796875ms\r\n",,terminal_output +13432,13132620,"TERMINAL",0,0,"Step 1095, loss: 2.409684658050537, step time: 18.221378326416016ms\r\nStep 1096, loss: 2.350508451461792, step time: 18.27549934387207ms\r\n",,terminal_output +13433,13132681,"TERMINAL",0,0,"Step 1097, loss: 2.442290782928467, step time: 18.831729888916016ms\r\n",,terminal_output +13434,13132747,"TERMINAL",0,0,"Step 1098, loss: 2.7958972454071045, step time: 18.227100372314453ms\r\n",,terminal_output +13435,13132809,"TERMINAL",0,0,"Step 1099, loss: 2.454036235809326, step time: 18.291234970092773ms\r\n",,terminal_output +13436,13132918,"TERMINAL",0,0,"Step 1100, loss: 2.3774056434631348, step time: 18.671751022338867ms\r\n",,terminal_output +13437,13132953,"TERMINAL",0,0,"Step 1101, loss: 2.6536734104156494, step time: 18.224716186523438ms\r\n",,terminal_output +13438,13133004,"TERMINAL",0,0,"Step 1102, loss: 2.5606486797332764, step time: 18.702983856201172ms\r\n",,terminal_output +13439,13133101,"TERMINAL",0,0,"Step 1103, loss: 2.4426465034484863, step time: 18.9054012298584ms\r\n",,terminal_output +13440,13133162,"TERMINAL",0,0,"Step 1104, loss: 2.3759586811065674, step time: 18.679141998291016ms\r\n",,terminal_output +13441,13133267,"TERMINAL",0,0,"Step 1105, loss: 2.5823726654052734, step time: 18.218040466308594ms\r\nStep 1106, loss: 2.433629274368286, step time: 18.693923950195312ms\r\n",,terminal_output +13442,13133327,"TERMINAL",0,0,"Step 1107, loss: 2.425884246826172, step time: 18.213510513305664ms\r\n",,terminal_output +13443,13133387,"TERMINAL",0,0,"Step 1108, loss: 2.4324240684509277, step time: 18.414735794067383ms\r\n",,terminal_output +13444,13133445,"TERMINAL",0,0,"Step 1109, loss: 2.3857479095458984, step time: 18.52107048034668ms\r\n",,terminal_output +13445,13133506,"TERMINAL",0,0,"Step 1110, loss: 2.5871546268463135, step time: 18.411636352539062ms\r\n",,terminal_output +13446,13133569,"TERMINAL",0,0,"Step 1111, loss: 2.392155170440674, step time: 18.414020538330078ms\r\n",,terminal_output +13447,13133643,"TERMINAL",0,0,"Step 1112, loss: 2.461189031600952, step time: 18.596887588500977ms\r\n",,terminal_output +13448,13133701,"TERMINAL",0,0,"Step 1113, loss: 2.4222590923309326, step time: 18.13530921936035ms\r\n",,terminal_output +13449,13133791,"TERMINAL",0,0,"Step 1114, loss: 2.794342517852783, step time: 18.0051326751709ms\r\n",,terminal_output +13450,13133844,"TERMINAL",0,0,"Step 1115, loss: 2.485506534576416, step time: 18.69344711303711ms\r\n",,terminal_output +13451,13133935,"TERMINAL",0,0,"Step 1116, loss: 2.3533742427825928, step time: 18.360376358032227ms\r\n",,terminal_output +13452,13133989,"TERMINAL",0,0,"Step 1117, loss: 2.5771210193634033, step time: 18.270254135131836ms\r\n",,terminal_output +13453,13134037,"TERMINAL",0,0,"Step 1118, loss: 2.5347540378570557, step time: 18.54681968688965ms\r\n",,terminal_output +13454,13134133,"TERMINAL",0,0,"Step 1119, loss: 2.487842082977295, step time: 18.0966854095459ms\r\n",,terminal_output +13455,13134185,"TERMINAL",0,0,"Step 1120, loss: 2.909764289855957, step time: 18.44954490661621ms\r\n",,terminal_output +13456,13134234,"TERMINAL",0,0,"Step 1121, loss: 2.449432611465454, step time: 21.201372146606445ms\r\n",,terminal_output +13457,13134350,"TERMINAL",0,0,"Step 1122, loss: 2.6920111179351807, step time: 18.56827735900879ms\r\nStep 1123, loss: 2.569627285003662, step time: 18.233299255371094ms\r\n",,terminal_output +13458,13134408,"TERMINAL",0,0,"Step 1124, loss: 2.5113685131073, step time: 18.59140396118164ms\r\n",,terminal_output +13459,13134474,"TERMINAL",0,0,"Step 1125, loss: 2.3586442470550537, step time: 18.13507080078125ms\r\n",,terminal_output +13460,13134534,"TERMINAL",0,0,"Step 1126, loss: 2.594515323638916, step time: 18.2192325592041ms\r\n",,terminal_output +13461,13134603,"TERMINAL",0,0,"Step 1127, loss: 2.5715391635894775, step time: 18.463850021362305ms\r\n",,terminal_output +13462,13134666,"TERMINAL",0,0,"Step 1128, loss: 2.4303789138793945, step time: 18.23258399963379ms\r\n",,terminal_output +13463,13134724,"TERMINAL",0,0,"Step 1129, loss: 2.6834123134613037, step time: 18.10312271118164ms\r\n",,terminal_output +13464,13134785,"TERMINAL",0,0,"Step 1130, loss: 2.440661907196045, step time: 18.651485443115234ms\r\n",,terminal_output +13465,13134852,"TERMINAL",0,0,"Step 1131, loss: 2.4159514904022217, step time: 17.653226852416992ms\r\n",,terminal_output +13466,13134911,"TERMINAL",0,0,"Step 1132, loss: 2.4541876316070557, step time: 18.312692642211914ms\r\n",,terminal_output +13467,13134979,"TERMINAL",0,0,"Step 1133, loss: 2.4973630905151367, step time: 18.130064010620117ms\r\n",,terminal_output +13468,13135043,"TERMINAL",0,0,"Step 1134, loss: 2.3859360218048096, step time: 18.500804901123047ms\r\n",,terminal_output +13469,13135103,"TERMINAL",0,0,"Step 1135, loss: 2.8553318977355957, step time: 18.173933029174805ms\r\n",,terminal_output +13470,13135166,"TERMINAL",0,0,"Step 1136, loss: 2.441032886505127, step time: 18.663406372070312ms\r\n",,terminal_output +13471,13135228,"TERMINAL",0,0,"Step 1137, loss: 2.4264373779296875, step time: 17.952919006347656ms\r\n",,terminal_output +13472,13135292,"TERMINAL",0,0,"Step 1138, loss: 2.4699182510375977, step time: 18.169641494750977ms\r\n",,terminal_output +13473,13135355,"TERMINAL",0,0,"Step 1139, loss: 2.4206480979919434, step time: 18.372535705566406ms\r\n",,terminal_output +13474,13135420,"TERMINAL",0,0,"Step 1140, loss: 2.473872423171997, step time: 18.432140350341797ms\r\n",,terminal_output +13475,13135484,"TERMINAL",0,0,"Step 1141, loss: 2.4876246452331543, step time: 18.04828643798828ms\r\n",,terminal_output +13476,13135588,"TERMINAL",0,0,"Step 1142, loss: 2.468386650085449, step time: 18.459796905517578ms\r\n",,terminal_output +13477,13135639,"TERMINAL",0,0,"Step 1143, loss: 2.489253044128418, step time: 18.095970153808594ms\r\n",,terminal_output +13478,13135749,"TERMINAL",0,0,"Step 1144, loss: 2.4541454315185547, step time: 17.998456954956055ms\r\nStep 1145, loss: 2.3871347904205322, step time: 18.712759017944336ms\r\n",,terminal_output +13479,13135810,"TERMINAL",0,0,"Step 1146, loss: 2.422236919403076, step time: 18.039703369140625ms\r\n",,terminal_output +13480,13135871,"TERMINAL",0,0,"Step 1147, loss: 2.4907078742980957, step time: 18.133878707885742ms\r\n",,terminal_output +13481,13135935,"TERMINAL",0,0,"Step 1148, loss: 2.379387855529785, step time: 20.1871395111084ms\r\n",,terminal_output +13482,13136007,"TERMINAL",0,0,"Step 1149, loss: 2.7978408336639404, step time: 17.79770851135254ms\r\n",,terminal_output +13483,13136061,"TERMINAL",0,0,"Step 1150, loss: 2.536198854446411, step time: 18.221139907836914ms\r\n",,terminal_output +13484,13136122,"TERMINAL",0,0,"Step 1151, loss: 2.4264326095581055, step time: 18.483400344848633ms\r\n",,terminal_output +13485,13136242,"TERMINAL",0,0,"Step 1152, loss: 2.4245569705963135, step time: 18.36538314819336ms\r\n",,terminal_output +13486,13136328,"TERMINAL",0,0,"Step 1153, loss: 2.6293325424194336, step time: 17.833471298217773ms\r\nStep 1154, loss: 2.409252405166626, step time: 18.718957901000977ms\r\n",,terminal_output +13487,13136388,"TERMINAL",0,0,"Step 1155, loss: 2.436619758605957, step time: 18.192291259765625ms\r\n",,terminal_output +13488,13136450,"TERMINAL",0,0,"Step 1156, loss: 2.5037240982055664, step time: 18.395662307739258ms\r\n",,terminal_output +13489,13136509,"TERMINAL",0,0,"Step 1157, loss: 2.740966796875, step time: 18.663883209228516ms\r\n",,terminal_output +13490,13136571,"TERMINAL",0,0,"Step 1158, loss: 2.1393465995788574, step time: 18.430233001708984ms\r\n",,terminal_output +13491,13136647,"TERMINAL",0,0,"Step 1159, loss: 2.5147292613983154, step time: 18.258094787597656ms\r\n",,terminal_output +13492,13136699,"TERMINAL",0,0,"Step 1160, loss: 2.385946750640869, step time: 18.750667572021484ms\r\n",,terminal_output +13493,13136766,"TERMINAL",0,0,"Step 1161, loss: 2.454528570175171, step time: 17.940282821655273ms\r\n",,terminal_output +13494,13136828,"TERMINAL",0,0,"Step 1162, loss: 2.4256949424743652, step time: 18.44048500061035ms\r\n",,terminal_output +13495,13136891,"TERMINAL",0,0,"Step 1163, loss: 2.4268040657043457, step time: 18.646955490112305ms\r\n",,terminal_output +13496,13136959,"TERMINAL",0,0,"Step 1164, loss: 2.3449065685272217, step time: 18.445491790771484ms\r\n",,terminal_output +13497,13137013,"TERMINAL",0,0,"Step 1165, loss: 2.404942274093628, step time: 18.208742141723633ms\r\n",,terminal_output +13498,13137078,"TERMINAL",0,0,"Step 1166, loss: 2.463904619216919, step time: 18.31960678100586ms\r\n",,terminal_output +13499,13137140,"TERMINAL",0,0,"Step 1167, loss: 2.3477306365966797, step time: 18.240928649902344ms\r\n",,terminal_output +13500,13137229,"TERMINAL",0,0,"Step 1168, loss: 2.4362688064575195, step time: 18.369197845458984ms\r\n",,terminal_output +13501,13137280,"TERMINAL",0,0,"Step 1169, loss: 2.419912338256836, step time: 18.772363662719727ms\r\n",,terminal_output +13502,13137371,"TERMINAL",0,0,"Step 1170, loss: 2.417001247406006, step time: 18.060684204101562ms\r\n",,terminal_output +13503,13137424,"TERMINAL",0,0,"Step 1171, loss: 2.491873264312744, step time: 18.254995346069336ms\r\n",,terminal_output +13504,13137527,"TERMINAL",0,0,"Step 1172, loss: 2.3687782287597656, step time: 18.406152725219727ms\r\nStep 1173, loss: 2.4451847076416016, step time: 18.47386360168457ms\r\n",,terminal_output +13505,13137624,"TERMINAL",0,0,"Step 1174, loss: 2.3691658973693848, step time: 18.370389938354492ms\r\n",,terminal_output +13506,13137672,"TERMINAL",0,0,"Step 1175, loss: 3.388031482696533, step time: 18.841266632080078ms\r\n",,terminal_output +13507,13137764,"TERMINAL",0,0,"Step 1176, loss: 2.430271625518799, step time: 18.51201057434082ms\r\n",,terminal_output +13508,13137816,"TERMINAL",0,0,"Step 1177, loss: 2.433398962020874, step time: 18.148422241210938ms\r\n",,terminal_output +13509,13137867,"TERMINAL",0,0,"Step 1178, loss: 2.4966561794281006, step time: 18.62645149230957ms\r\n",,terminal_output +13510,13137973,"TERMINAL",0,0,"Step 1179, loss: 2.777134895324707, step time: 18.12124252319336ms\r\nStep 1180, loss: 2.316175937652588, step time: 18.22376251220703ms\r\n",,terminal_output +13511,13138041,"TERMINAL",0,0,"Step 1181, loss: 2.3655827045440674, step time: 18.349409103393555ms\r\n",,terminal_output +13512,13138102,"TERMINAL",0,0,"Step 1182, loss: 2.621142625808716, step time: 18.499135971069336ms\r\n",,terminal_output +13513,13138165,"TERMINAL",0,0,"Step 1183, loss: 2.4037344455718994, step time: 17.997026443481445ms\r\n",,terminal_output +13514,13138228,"TERMINAL",0,0,"Step 1184, loss: 2.404238224029541, step time: 18.49055290222168ms\r\n",,terminal_output +13515,13138290,"TERMINAL",0,0,"Step 1185, loss: 2.4428141117095947, step time: 17.912864685058594ms\r\n",,terminal_output +13516,13138415,"TERMINAL",0,0,"Step 1186, loss: 2.2086684703826904, step time: 17.98725128173828ms\r\nStep 1187, loss: 2.3173296451568604, step time: 18.584728240966797ms\r\n",,terminal_output +13517,13138480,"TERMINAL",0,0,"Step 1188, loss: 2.429673433303833, step time: 18.311500549316406ms\r\n",,terminal_output +13518,13138544,"TERMINAL",0,0,"Step 1189, loss: 2.354123830795288, step time: 18.33343505859375ms\r\n",,terminal_output +13519,13138862,"TERMINAL",0,0,"Step 1190, loss: 2.4105072021484375, step time: 302.0148277282715ms\r\n",,terminal_output +13520,13138928,"TERMINAL",0,0,"Step 1191, loss: 2.4489004611968994, step time: 26.239633560180664ms\r\n",,terminal_output +13521,13138988,"TERMINAL",0,0,"Step 1192, loss: 2.8107850551605225, step time: 21.021127700805664ms\r\n",,terminal_output +13522,13139051,"TERMINAL",0,0,"Step 1193, loss: 2.527174949645996, step time: 19.614219665527344ms\r\n",,terminal_output +13523,13139112,"TERMINAL",0,0,"Step 1194, loss: 2.3568148612976074, step time: 18.7990665435791ms\r\n",,terminal_output +13524,13139179,"TERMINAL",0,0,"Step 1195, loss: 2.3289949893951416, step time: 18.504619598388672ms\r\n",,terminal_output +13525,13139236,"TERMINAL",0,0,"Step 1196, loss: 3.269412040710449, step time: 18.826007843017578ms\r\n",,terminal_output +13526,13139309,"TERMINAL",0,0,"Step 1197, loss: 2.3530919551849365, step time: 18.446922302246094ms\r\n",,terminal_output +13527,13139427,"TERMINAL",0,0,"Step 1198, loss: 2.3463799953460693, step time: 18.377065658569336ms\r\nStep 1199, loss: 2.571545362472534, step time: 18.782854080200195ms\r\n",,terminal_output +13528,13139493,"TERMINAL",0,0,"Step 1200, loss: 2.4096946716308594, step time: 18.497943878173828ms\r\n",,terminal_output +13529,13139555,"TERMINAL",0,0,"Step 1201, loss: 2.3645832538604736, step time: 18.37301254272461ms\r\n",,terminal_output +13530,13139621,"TERMINAL",0,0,"Step 1202, loss: 2.301581621170044, step time: 19.084453582763672ms\r\n",,terminal_output +13531,13139682,"TERMINAL",0,0,"Step 1203, loss: 2.3509862422943115, step time: 18.310070037841797ms\r\n",,terminal_output +13532,13139747,"TERMINAL",0,0,"Step 1204, loss: 2.3632049560546875, step time: 18.400192260742188ms\r\n",,terminal_output +13533,13139810,"TERMINAL",0,0,"Step 1205, loss: 2.300339698791504, step time: 18.918752670288086ms\r\n",,terminal_output +13534,13139950,"TERMINAL",0,0,"Step 1206, loss: 2.4401357173919678, step time: 18.270492553710938ms\r\nStep 1207, loss: 2.366290807723999, step time: 18.296241760253906ms\r\n",,terminal_output +13535,13140012,"TERMINAL",0,0,"Step 1208, loss: 2.3434255123138428, step time: 18.642902374267578ms\r\n",,terminal_output +13536,13140070,"TERMINAL",0,0,"Step 1209, loss: 2.43646240234375, step time: 18.27263832092285ms\r\n",,terminal_output +13537,13140135,"TERMINAL",0,0,"Step 1210, loss: 2.4037294387817383, step time: 21.60334587097168ms\r\n",,terminal_output +13538,13140198,"TERMINAL",0,0,"Step 1211, loss: 2.3767812252044678, step time: 18.845796585083008ms\r\n",,terminal_output +13539,13140259,"TERMINAL",0,0,"Step 1212, loss: 2.333707332611084, step time: 18.5701847076416ms\r\n",,terminal_output +13540,13140322,"TERMINAL",0,0,"Step 1213, loss: 2.4922022819519043, step time: 18.31793785095215ms\r\n",,terminal_output +13541,13140428,"TERMINAL",0,0,"Step 1214, loss: 2.227491617202759, step time: 18.75901222229004ms\r\n",,terminal_output +13542,13140479,"TERMINAL",0,0,"Step 1215, loss: 2.4450645446777344, step time: 18.43094825744629ms\r\n",,terminal_output +13543,13140582,"TERMINAL",0,0,"Step 1216, loss: 2.2284963130950928, step time: 18.53656768798828ms\r\nStep 1217, loss: 2.359239101409912, step time: 18.858909606933594ms\r\n",,terminal_output +13544,13140643,"TERMINAL",0,0,"Step 1218, loss: 2.4555325508117676, step time: 18.571138381958008ms\r\n",,terminal_output +13545,13140706,"TERMINAL",0,0,"Step 1219, loss: 2.3746891021728516, step time: 18.479585647583008ms\r\n",,terminal_output +13546,13140839,"TERMINAL",0,0,"Step 1220, loss: 2.3221282958984375, step time: 18.871068954467773ms\r\nStep 1221, loss: 2.3551206588745117, step time: 18.341541290283203ms\r\n",,terminal_output +13547,13140905,"TERMINAL",0,0,"Step 1222, loss: 2.6060638427734375, step time: 18.307209014892578ms\r\n",,terminal_output +13548,13140964,"TERMINAL",0,0,"Step 1223, loss: 2.192023277282715, step time: 18.4476375579834ms\r\n",,terminal_output +13549,13141027,"TERMINAL",0,0,"Step 1224, loss: 2.3588006496429443, step time: 18.265724182128906ms\r\n",,terminal_output +13550,13141092,"TERMINAL",0,0,"Step 1225, loss: 2.3118221759796143, step time: 18.0819034576416ms\r\n",,terminal_output +13551,13141160,"TERMINAL",0,0,"Step 1226, loss: 2.3235111236572266, step time: 18.759727478027344ms\r\n",,terminal_output +13552,13141223,"TERMINAL",0,0,"Step 1227, loss: 2.3085989952087402, step time: 18.13220977783203ms\r\n",,terminal_output +13553,13141285,"TERMINAL",0,0,"Step 1228, loss: 2.4475884437561035, step time: 18.30315589904785ms\r\n",,terminal_output +13554,13141349,"TERMINAL",0,0,"Step 1229, loss: 2.3944895267486572, step time: 18.40686798095703ms\r\n",,terminal_output +13555,13141413,"TERMINAL",0,0,"Step 1230, loss: 2.287182331085205, step time: 18.419504165649414ms\r\n",,terminal_output +13556,13141476,"TERMINAL",0,0,"Step 1231, loss: 2.8343591690063477, step time: 18.303632736206055ms\r\n",,terminal_output +13557,13141539,"TERMINAL",0,0,"Step 1232, loss: 2.3152196407318115, step time: 19.09160614013672ms\r\n",,terminal_output +13558,13141604,"TERMINAL",0,0,"Step 1233, loss: 2.6171324253082275, step time: 18.07260513305664ms\r\n",,terminal_output +13559,13141666,"TERMINAL",0,0,"Step 1234, loss: 2.321004629135132, step time: 18.33057403564453ms\r\n",,terminal_output +13560,13141732,"TERMINAL",0,0,"Step 1235, loss: 2.3541250228881836, step time: 18.514394760131836ms\r\n",,terminal_output +13561,13141793,"TERMINAL",0,0,"Step 1236, loss: 2.4846110343933105, step time: 18.266677856445312ms\r\n",,terminal_output +13562,13141859,"TERMINAL",0,0,"Step 1237, loss: 2.4061527252197266, step time: 17.977476119995117ms\r\n",,terminal_output +13563,13141922,"TERMINAL",0,0,"Step 1238, loss: 2.3695409297943115, step time: 18.709182739257812ms\r\n",,terminal_output +13564,13141985,"TERMINAL",0,0,"Step 1239, loss: 2.4647445678710938, step time: 18.64790916442871ms\r\n",,terminal_output +13565,13142089,"TERMINAL",0,0,"Step 1240, loss: 2.616096019744873, step time: 18.03302764892578ms\r\n",,terminal_output +13566,13142149,"TERMINAL",0,0,"Step 1241, loss: 2.4469101428985596, step time: 18.564701080322266ms\r\n",,terminal_output +13567,13142254,"TERMINAL",0,0,"Step 1242, loss: 2.2938430309295654, step time: 18.503665924072266ms\r\nStep 1243, loss: 2.3499372005462646, step time: 18.218040466308594ms\r\n",,terminal_output +13568,13142366,"TERMINAL",0,0,"Step 1244, loss: 3.239586353302002, step time: 18.703460693359375ms\r\nStep 1245, loss: 2.425809860229492, step time: 18.332242965698242ms\r\n",,terminal_output +13569,13142458,"TERMINAL",0,0,"Step 1246, loss: 2.349510908126831, step time: 18.3870792388916ms\r\n",,terminal_output +13570,13142509,"TERMINAL",0,0,"Step 1247, loss: 2.6926591396331787, step time: 18.47076416015625ms\r\n",,terminal_output +13571,13142601,"TERMINAL",0,0,"Step 1248, loss: 2.3319590091705322, step time: 18.256187438964844ms\r\n",,terminal_output +13572,13142653,"TERMINAL",0,0,"Step 1249, loss: 2.563314914703369, step time: 18.27239990234375ms\r\n",,terminal_output +13573,13142716,"TERMINAL",0,0,"Step 1250, loss: 2.6564226150512695, step time: 18.965721130371094ms\r\n",,terminal_output +13574,13142775,"TERMINAL",0,0,"Step 1251, loss: 2.3659496307373047, step time: 18.360137939453125ms\r\n",,terminal_output +13575,13142836,"TERMINAL",0,0,"Step 1252, loss: 2.4950830936431885, step time: 18.5546875ms\r\n",,terminal_output +13576,13142897,"TERMINAL",0,0,"Step 1253, loss: 2.407757043838501, step time: 18.662691116333008ms\r\n",,terminal_output +13577,13142959,"TERMINAL",0,0,"Step 1254, loss: 2.28265643119812, step time: 18.6765193939209ms\r\n",,terminal_output +13578,13143012,"TERMINAL",0,0,"Step 1255, loss: 2.3676249980926514, step time: 18.021583557128906ms\r\n",,terminal_output +13579,13143107,"TERMINAL",0,0,"Step 1256, loss: 2.420250177383423, step time: 18.961429595947266ms\r\n",,terminal_output +13580,13143167,"TERMINAL",0,0,"Step 1257, loss: 2.482231616973877, step time: 18.161296844482422ms\r\n",,terminal_output +13581,13143228,"TERMINAL",0,0,"Step 1258, loss: 2.284047842025757, step time: 18.126726150512695ms\r\n",,terminal_output +13582,13143337,"TERMINAL",0,0,"Step 1259, loss: 2.2854955196380615, step time: 18.828153610229492ms\r\nStep 1260, loss: 2.802168846130371, step time: 18.456220626831055ms\r\n",,terminal_output +13583,13143396,"TERMINAL",0,0,"Step 1261, loss: 2.288559675216675, step time: 18.169403076171875ms\r\n",,terminal_output +13584,13143457,"TERMINAL",0,0,"Step 1262, loss: 2.5818397998809814, step time: 18.71776580810547ms\r\n",,terminal_output +13585,13143520,"TERMINAL",0,0,"Step 1263, loss: 2.3984808921813965, step time: 18.224716186523438ms\r\n",,terminal_output +13586,13143592,"TERMINAL",0,0,"Step 1264, loss: 2.2546303272247314, step time: 19.450664520263672ms\r\n",,terminal_output +13587,13143643,"TERMINAL",0,0,"Step 1265, loss: 2.4383723735809326, step time: 18.79119873046875ms\r\n",,terminal_output +13588,13143707,"TERMINAL",0,0,"Step 1266, loss: 2.542052745819092, step time: 18.327951431274414ms\r\n",,terminal_output +13589,13143811,"TERMINAL",0,0,"Step 1267, loss: 2.47800874710083, step time: 18.347978591918945ms\r\n",,terminal_output +13590,13143863,"TERMINAL",0,0,"Step 1268, loss: 2.291710138320923, step time: 18.962860107421875ms\r\n",,terminal_output +13591,13143968,"TERMINAL",0,0,"Step 1269, loss: 2.2247021198272705, step time: 18.134355545043945ms\r\nStep 1270, loss: 2.3336782455444336, step time: 18.364906311035156ms\r\n",,terminal_output +13592,13144032,"TERMINAL",0,0,"Step 1271, loss: 2.2582945823669434, step time: 18.384218215942383ms\r\n",,terminal_output +13593,13144096,"TERMINAL",0,0,"Step 1272, loss: 2.477060079574585, step time: 18.23735237121582ms\r\n",,terminal_output +13594,13144218,"TERMINAL",0,0,"Step 1273, loss: 2.015209674835205, step time: 17.821550369262695ms\r\nStep 1274, loss: 2.15871524810791, step time: 18.555164337158203ms\r\n",,terminal_output +13595,13144288,"TERMINAL",0,0,"Step 1275, loss: 2.231208562850952, step time: 18.109560012817383ms\r\n",,terminal_output +13596,13144366,"TERMINAL",0,0,"Step 1276, loss: 2.456712007522583, step time: 18.377304077148438ms\r\n",,terminal_output +13597,13144416,"TERMINAL",0,0,"Step 1277, loss: 2.386913537979126, step time: 18.625497817993164ms\r\n",,terminal_output +13598,13144479,"TERMINAL",0,0,"Step 1278, loss: 2.348111391067505, step time: 17.937898635864258ms\r\n",,terminal_output +13599,13144578,"TERMINAL",0,0,"Step 1279, loss: 2.3474247455596924, step time: 18.162965774536133ms\r\n",,terminal_output +13600,13144632,"TERMINAL",0,0,"Step 1280, loss: 2.4517416954040527, step time: 18.43881607055664ms\r\n",,terminal_output +13601,13144735,"TERMINAL",0,0,"Step 1281, loss: 2.388368844985962, step time: 18.17011833190918ms\r\nStep 1282, loss: 2.3849616050720215, step time: 18.50581169128418ms\r\n",,terminal_output +13602,13144831,"TERMINAL",0,0,"Step 1283, loss: 2.3288326263427734, step time: 18.640995025634766ms\r\n",,terminal_output +13603,13144883,"TERMINAL",0,0,"Step 1284, loss: 2.3733930587768555, step time: 18.357515335083008ms\r\n",,terminal_output +13604,13144976,"TERMINAL",0,0,"Step 1285, loss: 2.2611231803894043, step time: 18.270015716552734ms\r\n",,terminal_output +13605,13145028,"TERMINAL",0,0,"Step 1286, loss: 2.4222161769866943, step time: 20.013809204101562ms\r\n",,terminal_output +13606,13145121,"TERMINAL",0,0,"Step 1287, loss: 2.4349637031555176, step time: 20.334959030151367ms\r\n",,terminal_output +13607,13145174,"TERMINAL",0,0,"Step 1288, loss: 2.3979203701019287, step time: 18.44930648803711ms\r\n",,terminal_output +13608,13145225,"TERMINAL",0,0,"Step 1289, loss: 2.269249677658081, step time: 18.794536590576172ms\r\n",,terminal_output +13609,13145328,"TERMINAL",0,0,"Step 1290, loss: 2.379610538482666, step time: 18.64790916442871ms\r\nStep 1291, loss: 2.3343608379364014, step time: 18.05257797241211ms\r\n",,terminal_output +13610,13145425,"TERMINAL",0,0,"Step 1292, loss: 2.2630393505096436, step time: 18.911123275756836ms\r\n",,terminal_output +13611,13145486,"TERMINAL",0,0,"Step 1293, loss: 2.500985622406006, step time: 18.27073097229004ms\r\n",,terminal_output +13612,13145546,"TERMINAL",0,0,"Step 1294, loss: 2.3160324096679688, step time: 18.332481384277344ms\r\n",,terminal_output +13613,13145609,"TERMINAL",0,0,"Step 1295, loss: 2.4074854850769043, step time: 18.3260440826416ms\r\n",,terminal_output +13614,13145670,"TERMINAL",0,0,"Step 1296, loss: 2.5104382038116455, step time: 18.0051326751709ms\r\n",,terminal_output +13615,13145733,"TERMINAL",0,0,"Step 1297, loss: 2.2682065963745117, step time: 18.904447555541992ms\r\n",,terminal_output +13616,13145793,"TERMINAL",0,0,"Step 1298, loss: 2.9012551307678223, step time: 18.894195556640625ms\r\n",,terminal_output +13617,13145854,"TERMINAL",0,0,"Step 1299, loss: 2.6005477905273438, step time: 18.901348114013672ms\r\n",,terminal_output +13618,13145916,"TERMINAL",0,0,"Step 1300, loss: 2.2459421157836914, step time: 18.507719039916992ms\r\n",,terminal_output +13619,13146029,"TERMINAL",0,0,"Step 1301, loss: 2.257012128829956, step time: 19.02174949645996ms\r\nStep 1302, loss: 2.293227434158325, step time: 18.101930618286133ms\r\n",,terminal_output +13620,13146124,"TERMINAL",0,0,"Step 1303, loss: 2.2846713066101074, step time: 18.11671257019043ms\r\n",,terminal_output +13621,13146177,"TERMINAL",0,0,"Step 1304, loss: 2.2233314514160156, step time: 18.70131492614746ms\r\n",,terminal_output +13622,13146348,"TERMINAL",0,0,"Step 1305, loss: 2.3447959423065186, step time: 17.93384552001953ms\r\nStep 1306, loss: 2.226984977722168, step time: 18.53203773498535ms\r\nStep 1307, loss: 2.261016845703125, step time: 18.285274505615234ms\r\n",,terminal_output +13623,13146412,"TERMINAL",0,0,"Step 1308, loss: 2.359011173248291, step time: 18.510103225708008ms\r\n",,terminal_output +13624,13146473,"TERMINAL",0,0,"Step 1309, loss: 2.263180732727051, step time: 18.088817596435547ms\r\n",,terminal_output +13625,13146568,"TERMINAL",0,0,"Step 1310, loss: 2.3801677227020264, step time: 18.91922950744629ms\r\n",,terminal_output +13626,13146619,"TERMINAL",0,0,"Step 1311, loss: 2.3143575191497803, step time: 18.161296844482422ms\r\n",,terminal_output +13627,13146712,"TERMINAL",0,0,"Step 1312, loss: 2.3829312324523926, step time: 18.47243309020996ms\r\n",,terminal_output +13628,13146764,"TERMINAL",0,0,"Step 1313, loss: 2.4480433464050293, step time: 18.842458724975586ms\r\n",,terminal_output +13629,13146817,"TERMINAL",0,0,"Step 1314, loss: 2.3896045684814453, step time: 18.349647521972656ms\r\n",,terminal_output +13630,13146924,"TERMINAL",0,0,"Step 1315, loss: 2.3370139598846436, step time: 18.454313278198242ms\r\nStep 1316, loss: 2.2933566570281982, step time: 18.800735473632812ms\r\n",,terminal_output +13631,13146990,"TERMINAL",0,0,"Step 1317, loss: 2.3327324390411377, step time: 18.27073097229004ms\r\n",,terminal_output +13632,13147049,"TERMINAL",0,0,"Step 1318, loss: 2.2904229164123535, step time: 18.500804901123047ms\r\n",,terminal_output +13633,13147152,"TERMINAL",0,0,"Step 1319, loss: 2.254703998565674, step time: 18.882036209106445ms\r\n",,terminal_output +13634,13147203,"TERMINAL",0,0,"Step 1320, loss: 2.366980791091919, step time: 18.503189086914062ms\r\n",,terminal_output +13635,13147309,"TERMINAL",0,0,"Step 1321, loss: 2.32533860206604, step time: 17.957210540771484ms\r\nStep 1322, loss: 2.3747334480285645, step time: 18.671035766601562ms\r\n",,terminal_output +13636,13147377,"TERMINAL",0,0,"Step 1323, loss: 2.2615058422088623, step time: 18.09215545654297ms\r\n",,terminal_output +13637,13147439,"TERMINAL",0,0,"Step 1324, loss: 2.2449450492858887, step time: 18.288850784301758ms\r\n",,terminal_output +13638,13147766,"TERMINAL",0,0,"Step 1325, loss: 2.1884396076202393, step time: 332.7193260192871ms\r\n",,terminal_output +13639,13147873,"TERMINAL",0,0,"Step 1326, loss: 2.2058656215667725, step time: 26.314496994018555ms\r\n",,terminal_output +13640,13147926,"TERMINAL",0,0,"Step 1327, loss: 2.7285475730895996, step time: 21.123647689819336ms\r\n",,terminal_output +13641,13148033,"TERMINAL",0,0,"Step 1328, loss: 2.232982635498047, step time: 19.96779441833496ms\r\nStep 1329, loss: 2.21931791305542, step time: 18.72086524963379ms\r\n",,terminal_output +13642,13148102,"TERMINAL",0,0,"Step 1330, loss: 2.212339401245117, step time: 18.846750259399414ms\r\n",,terminal_output +13643,13148166,"TERMINAL",0,0,"Step 1331, loss: 3.208066940307617, step time: 18.971920013427734ms\r\n",,terminal_output +13644,13148228,"TERMINAL",0,0,"Step 1332, loss: 2.310663938522339, step time: 18.551111221313477ms\r\n",,terminal_output +13645,13148288,"TERMINAL",0,0,"Step 1333, loss: 2.277543783187866, step time: 18.04327964782715ms\r\n",,terminal_output +13646,13148347,"TERMINAL",0,0,"Step 1334, loss: 2.2219738960266113, step time: 18.863439559936523ms\r\n",,terminal_output +13647,13148415,"TERMINAL",0,0,"Step 1335, loss: 2.2130815982818604, step time: 18.443584442138672ms\r\n",,terminal_output +13648,13148477,"TERMINAL",0,0,"Step 1336, loss: 2.416303873062134, step time: 18.53656768798828ms\r\n",,terminal_output +13649,13148539,"TERMINAL",0,0,"Step 1337, loss: 2.321282386779785, step time: 18.661022186279297ms\r\n",,terminal_output +13650,13148727,"TERMINAL",0,0,"Step 1338, loss: 2.1803154945373535, step time: 18.425703048706055ms\r\n",,terminal_output +13651,13148727,"TERMINAL",0,0,"Step 1339, loss: 2.1980926990509033, step time: 18.33057403564453ms\r\n",,terminal_output +13652,13148807,"TERMINAL",0,0,"Step 1340, loss: 2.547503709793091, step time: 18.48435401916504ms\r\nStep 1341, loss: 2.3479268550872803, step time: 18.19896697998047ms\r\n",,terminal_output +13653,13148917,"TERMINAL",0,0,"Step 1342, loss: 2.428041934967041, step time: 18.31650733947754ms\r\nStep 1343, loss: 2.389960289001465, step time: 18.37158203125ms\r\n",,terminal_output +13654,13148982,"TERMINAL",0,0,"Step 1344, loss: 2.518415689468384, step time: 27.39119529724121ms\r\n",,terminal_output +13655,13149113,"TERMINAL",0,0,"Step 1345, loss: 2.2667038440704346, step time: 19.784927368164062ms\r\nStep 1346, loss: 2.1230931282043457, step time: 18.999814987182617ms\r\n",,terminal_output +13656,13149177,"TERMINAL",0,0,"Step 1347, loss: 2.177708625793457, step time: 18.166780471801758ms\r\n",,terminal_output +13657,13149240,"TERMINAL",0,0,"Step 1348, loss: 2.330301284790039, step time: 18.567323684692383ms\r\n",,terminal_output +13658,13149304,"TERMINAL",0,0,"Step 1349, loss: 2.461195230484009, step time: 19.2873477935791ms\r\n",,terminal_output +13659,13149372,"TERMINAL",0,0,"Step 1350, loss: 2.507324695587158, step time: 18.474102020263672ms\r\n",,terminal_output +13660,13149435,"TERMINAL",0,0,"Step 1351, loss: 2.6413800716400146, step time: 17.909526824951172ms\r\n",,terminal_output +13661,13149501,"TERMINAL",0,0,"Step 1352, loss: 2.3429036140441895, step time: 18.981456756591797ms\r\n",,terminal_output +13662,13149623,"TERMINAL",0,0,"Step 1353, loss: 2.542971134185791, step time: 18.092870712280273ms\r\nStep 1354, loss: 2.176450490951538, step time: 18.364906311035156ms\r\n",,terminal_output +13663,13149689,"TERMINAL",0,0,"Step 1355, loss: 2.2119216918945312, step time: 18.788814544677734ms\r\n",,terminal_output +13664,13149753,"TERMINAL",0,0,"Step 1356, loss: 2.2181813716888428, step time: 18.192291259765625ms\r\n",,terminal_output +13665,13149821,"TERMINAL",0,0,"Step 1357, loss: 2.3417837619781494, step time: 17.91977882385254ms\r\n",,terminal_output +13666,13149879,"TERMINAL",0,0,"Step 1358, loss: 2.322357654571533, step time: 18.958091735839844ms\r\n",,terminal_output +13667,13150007,"TERMINAL",0,0,"Step 1359, loss: 2.2188003063201904, step time: 18.19443702697754ms\r\nStep 1360, loss: 2.1661746501922607, step time: 18.244266510009766ms\r\n",,terminal_output +13668,13150073,"TERMINAL",0,0,"Step 1361, loss: 2.3458292484283447, step time: 18.23258399963379ms\r\n",,terminal_output +13669,13150142,"TERMINAL",0,0,"Step 1362, loss: 2.1991865634918213, step time: 18.01300048828125ms\r\n",,terminal_output +13670,13150213,"TERMINAL",0,0,"Step 1363, loss: 2.1748952865600586, step time: 18.285751342773438ms\r\n",,terminal_output +13671,13150278,"TERMINAL",0,0,"Step 1364, loss: 2.1635730266571045, step time: 18.445730209350586ms\r\n",,terminal_output +13672,13150338,"TERMINAL",0,0,"Step 1365, loss: 2.2716777324676514, step time: 18.28908920288086ms\r\n",,terminal_output +13673,13150403,"TERMINAL",0,0,"Step 1366, loss: 2.18542742729187, step time: 18.25737953186035ms\r\n",,terminal_output +13674,13150468,"TERMINAL",0,0,"Step 1367, loss: 2.45523738861084, step time: 18.713712692260742ms\r\n",,terminal_output +13675,13150532,"TERMINAL",0,0,"Step 1368, loss: 2.1569182872772217, step time: 18.156766891479492ms\r\n",,terminal_output +13676,13150593,"TERMINAL",0,0,"Step 1369, loss: 2.4209091663360596, step time: 17.811298370361328ms\r\n",,terminal_output +13677,13150656,"TERMINAL",0,0,"Step 1370, loss: 2.2168281078338623, step time: 23.2546329498291ms\r\n",,terminal_output +13678,13150720,"TERMINAL",0,0,"Step 1371, loss: 2.2377731800079346, step time: 18.386363983154297ms\r\n",,terminal_output +13679,13150789,"TERMINAL",0,0,"Step 1372, loss: 3.3152740001678467, step time: 18.372297286987305ms\r\n",,terminal_output +13680,13150852,"TERMINAL",0,0,"Step 1373, loss: 2.271864652633667, step time: 18.61405372619629ms\r\n",,terminal_output +13681,13150917,"TERMINAL",0,0,"Step 1374, loss: 2.2129065990448, step time: 18.465518951416016ms\r\n",,terminal_output +13682,13150980,"TERMINAL",0,0,"Step 1375, loss: 2.1908392906188965, step time: 18.363475799560547ms\r\n",,terminal_output +13683,13151032,"TERMINAL",0,0,"Step 1376, loss: 2.111746072769165, step time: 19.028663635253906ms\r\n",,terminal_output +13684,13151128,"TERMINAL",0,0,"Step 1377, loss: 2.1519062519073486, step time: 21.117210388183594ms\r\n",,terminal_output +13685,13151238,"TERMINAL",0,0,"Step 1378, loss: 2.3687448501586914, step time: 19.45805549621582ms\r\nStep 1379, loss: 2.2620463371276855, step time: 19.00005340576172ms\r\n",,terminal_output +13686,13151301,"TERMINAL",0,0,"Step 1380, loss: 2.895932674407959, step time: 18.379688262939453ms\r\n",,terminal_output +13687,13151364,"TERMINAL",0,0,"Step 1381, loss: 3.1715736389160156, step time: 18.473386764526367ms\r\n",,terminal_output +13688,13151428,"TERMINAL",0,0,"Step 1382, loss: 2.2282090187072754, step time: 18.880605697631836ms\r\n",,terminal_output +13689,13151491,"TERMINAL",0,0,"Step 1383, loss: 2.5683069229125977, step time: 18.369197845458984ms\r\n",,terminal_output +13690,13151560,"TERMINAL",0,0,"Step 1384, loss: 2.229947328567505, step time: 18.37444305419922ms\r\n",,terminal_output +13691,13151625,"TERMINAL",0,0,"Step 1385, loss: 2.5228559970855713, step time: 18.690109252929688ms\r\n",,terminal_output +13692,13151694,"TERMINAL",0,0,"Step 1386, loss: 2.2535200119018555, step time: 18.338680267333984ms\r\n",,terminal_output +13693,13151759,"TERMINAL",0,0,"Step 1387, loss: 2.209139108657837, step time: 18.208742141723633ms\r\n",,terminal_output +13694,13151865,"TERMINAL",0,0,"Step 1388, loss: 2.254744529724121, step time: 18.442630767822266ms\r\nStep 1389, loss: 2.30010986328125, step time: 17.824172973632812ms\r\n",,terminal_output +13695,13151999,"TERMINAL",0,0,"Step 1390, loss: 2.370616912841797, step time: 18.404245376586914ms\r\nStep 1391, loss: 2.388087749481201, step time: 19.461631774902344ms\r\n",,terminal_output +13696,13152056,"TERMINAL",0,0,"Step 1392, loss: 2.1634116172790527, step time: 19.11640167236328ms\r\n",,terminal_output +13697,13152149,"TERMINAL",0,0,"Step 1393, loss: 2.3889174461364746, step time: 18.680095672607422ms\r\n",,terminal_output +13698,13152201,"TERMINAL",0,0,"Step 1394, loss: 2.1874167919158936, step time: 18.874168395996094ms\r\n",,terminal_output +13699,13152302,"TERMINAL",0,0,"Step 1395, loss: 2.4955129623413086, step time: 20.494461059570312ms\r\n",,terminal_output +13700,13152371,"TERMINAL",0,0,"Step 1396, loss: 2.426755905151367, step time: 18.785476684570312ms\r\nStep 1397, loss: 2.245387077331543, step time: 18.9511775970459ms\r\n",,terminal_output +13701,13152478,"TERMINAL",0,0,"Step 1398, loss: 2.4346556663513184, step time: 21.24166488647461ms\r\n",,terminal_output +13702,13152530,"TERMINAL",0,0,"Step 1399, loss: 2.179715633392334, step time: 18.85080337524414ms\r\n",,terminal_output +13703,13152636,"TERMINAL",0,0,"Step 1400, loss: 2.1298294067382812, step time: 19.304275512695312ms\r\nStep 1401, loss: 2.139286518096924, step time: 18.203258514404297ms\r\n",,terminal_output +13704,13152729,"TERMINAL",0,0,"Step 1402, loss: 2.1767189502716064, step time: 18.54872703552246ms\r\n",,terminal_output +13705,13152780,"TERMINAL",0,0,"Step 1403, loss: 2.2796170711517334, step time: 18.662214279174805ms\r\n",,terminal_output +13706,13152832,"TERMINAL",0,0,"Step 1404, loss: 2.5360188484191895, step time: 18.04375648498535ms\r\n",,terminal_output +13707,13152951,"TERMINAL",0,0,"Step 1405, loss: 2.1549558639526367, step time: 18.01609992980957ms\r\nStep 1406, loss: 2.127436876296997, step time: 19.022464752197266ms\r\n",,terminal_output +13708,13153007,"TERMINAL",0,0,"Step 1407, loss: 2.219592571258545, step time: 17.94147491455078ms\r\n",,terminal_output +13709,13153070,"TERMINAL",0,0,"Step 1408, loss: 2.15771746635437, step time: 18.44954490661621ms\r\n",,terminal_output +13710,13153177,"TERMINAL",0,0,"Step 1409, loss: 2.3337135314941406, step time: 18.497228622436523ms\r\n",,terminal_output +13711,13153229,"TERMINAL",0,0,"Step 1410, loss: 2.1300432682037354, step time: 18.309593200683594ms\r\n",,terminal_output +13712,13153336,"TERMINAL",0,0,"Step 1411, loss: 2.396089553833008, step time: 17.886877059936523ms\r\nStep 1412, loss: 2.3280229568481445, step time: 18.491029739379883ms\r\n",,terminal_output +13713,13153430,"TERMINAL",0,0,"Step 1413, loss: 2.1603500843048096, step time: 18.0051326751709ms\r\n",,terminal_output +13714,13153537,"TERMINAL",0,0,"Step 1414, loss: 2.461987018585205, step time: 18.952131271362305ms\r\nStep 1415, loss: 2.1220755577087402, step time: 19.217252731323242ms\r\n",,terminal_output +13715,13153645,"TERMINAL",0,0,"Step 1416, loss: 2.196084976196289, step time: 18.082857131958008ms\r\nStep 1417, loss: 2.1177093982696533, step time: 18.425464630126953ms\r\n",,terminal_output +13716,13153712,"TERMINAL",0,0,"Step 1418, loss: 2.2266104221343994, step time: 18.49508285522461ms\r\n",,terminal_output +13717,13153772,"TERMINAL",0,0,"Step 1419, loss: 2.77840256690979, step time: 18.105506896972656ms\r\n",,terminal_output +13718,13153834,"TERMINAL",0,0,"Step 1420, loss: 2.2276716232299805, step time: 18.223047256469727ms\r\n",,terminal_output +13719,13153895,"TERMINAL",0,0,"Step 1421, loss: 2.912395477294922, step time: 18.5089111328125ms\r\n",,terminal_output +13720,13154023,"TERMINAL",0,0,"Step 1422, loss: 2.3090312480926514, step time: 18.156766891479492ms\r\nStep 1423, loss: 2.1563358306884766, step time: 18.07856559753418ms\r\n",,terminal_output +13721,13154087,"TERMINAL",0,0,"Step 1424, loss: 2.4497783184051514, step time: 18.482446670532227ms\r\n",,terminal_output +13722,13154152,"TERMINAL",0,0,"Step 1425, loss: 2.186244010925293, step time: 18.372535705566406ms\r\n",,terminal_output +13723,13154216,"TERMINAL",0,0,"Step 1426, loss: 2.110142230987549, step time: 18.390655517578125ms\r\n",,terminal_output +13724,13154279,"TERMINAL",0,0,"Step 1427, loss: 2.101624011993408, step time: 18.648862838745117ms\r\n",,terminal_output +13725,13154344,"TERMINAL",0,0,"Step 1428, loss: 2.403782606124878, step time: 18.106937408447266ms\r\n",,terminal_output +13726,13154408,"TERMINAL",0,0,"Step 1429, loss: 2.8697245121002197, step time: 17.744064331054688ms\r\n",,terminal_output +13727,13154470,"TERMINAL",0,0,"Step 1430, loss: 2.1988189220428467, step time: 18.849611282348633ms\r\n",,terminal_output +13728,13154534,"TERMINAL",0,0,"Step 1431, loss: 2.3187620639801025, step time: 18.273591995239258ms\r\n",,terminal_output +13729,13154595,"TERMINAL",0,0,"Step 1432, loss: 2.2072761058807373, step time: 17.90165901184082ms\r\n",,terminal_output +13730,13154690,"TERMINAL",0,0,"Step 1433, loss: 2.1493375301361084, step time: 18.28622817993164ms\r\n",,terminal_output +13731,13154746,"TERMINAL",0,0,"Step 1434, loss: 2.181800603866577, step time: 17.761945724487305ms\r\n",,terminal_output +13732,13154852,"TERMINAL",0,0,"Step 1435, loss: 2.2908542156219482, step time: 18.058061599731445ms\r\nStep 1436, loss: 2.1847894191741943, step time: 18.601417541503906ms\r\n",,terminal_output +13733,13154915,"TERMINAL",0,0,"Step 1437, loss: 2.3221595287323, step time: 18.255949020385742ms\r\n",,terminal_output +13734,13154977,"TERMINAL",0,0,"Step 1438, loss: 2.2168076038360596, step time: 17.891407012939453ms\r\n",,terminal_output +13735,13155042,"TERMINAL",0,0,"Step 1439, loss: 2.184436082839966, step time: 18.761873245239258ms\r\n",,terminal_output +13736,13155144,"TERMINAL",0,0,"Step 1440, loss: 2.1566250324249268, step time: 18.07999610900879ms\r\n",,terminal_output +13737,13155195,"TERMINAL",0,0,"Step 1441, loss: 2.1675760746002197, step time: 18.15652847290039ms\r\n",,terminal_output +13738,13155303,"TERMINAL",0,0,"Step 1442, loss: 2.2360358238220215, step time: 18.46766471862793ms\r\nStep 1443, loss: 2.1710517406463623, step time: 18.088817596435547ms\r\n",,terminal_output +13739,13155429,"TERMINAL",0,0,"Step 1444, loss: 2.3904056549072266, step time: 17.81487464904785ms\r\nStep 1445, loss: 2.4142098426818848, step time: 18.416881561279297ms\r\n",,terminal_output +13740,13155716,"TERMINAL",0,0,"Step 1446, loss: 2.217039108276367, step time: 295.32504081726074ms\r\n",,terminal_output +13741,13155848,"TERMINAL",0,0,"Step 1447, loss: 2.293592691421509, step time: 25.866031646728516ms\r\nStep 1448, loss: 2.257197380065918, step time: 20.925521850585938ms\r\n",,terminal_output +13742,13155913,"TERMINAL",0,0,"Step 1449, loss: 2.239841938018799, step time: 19.179105758666992ms\r\n",,terminal_output +13743,13155976,"TERMINAL",0,0,"Step 1450, loss: 2.3850386142730713, step time: 18.472671508789062ms\r\n",,terminal_output +13744,13156042,"TERMINAL",0,0,"Step 1451, loss: 2.07765793800354, step time: 18.501758575439453ms\r\n",,terminal_output +13745,13156107,"TERMINAL",0,0,"Step 1452, loss: 2.32442569732666, step time: 18.8140869140625ms\r\n",,terminal_output +13746,13156203,"TERMINAL",0,0,"Step 1453, loss: 2.2156519889831543, step time: 18.263578414916992ms\r\n",,terminal_output +13747,13156252,"TERMINAL",0,0,"Step 1454, loss: 2.229790449142456, step time: 18.620967864990234ms\r\n",,terminal_output +13748,13156345,"TERMINAL",0,0,"Step 1455, loss: 2.3368079662323, step time: 18.542051315307617ms\r\n",,terminal_output +13749,13156396,"TERMINAL",0,0,"Step 1456, loss: 2.0537757873535156, step time: 18.502473831176758ms\r\n",,terminal_output +13750,13156447,"TERMINAL",0,0,"Step 1457, loss: 2.1085102558135986, step time: 18.222808837890625ms\r\n",,terminal_output +13751,13156551,"TERMINAL",0,0,"Step 1458, loss: 2.2447211742401123, step time: 18.202781677246094ms\r\nStep 1459, loss: 2.2070634365081787, step time: 17.90761947631836ms\r\n",,terminal_output +13752,13156613,"TERMINAL",0,0,"Step 1460, loss: 2.11445689201355, step time: 18.133878707885742ms\r\n",,terminal_output +13753,13156683,"TERMINAL",0,0,"Step 1461, loss: 2.2656657695770264, step time: 18.02372932434082ms\r\n",,terminal_output +13754,13156745,"TERMINAL",0,0,"Step 1462, loss: 2.179795742034912, step time: 19.55103874206543ms\r\n",,terminal_output +13755,13156810,"TERMINAL",0,0,"Step 1463, loss: 2.093553304672241, step time: 18.594980239868164ms\r\n",,terminal_output +13756,13156905,"TERMINAL",0,0,"Step 1464, loss: 2.345237970352173, step time: 18.381595611572266ms\r\n",,terminal_output +13757,13156956,"TERMINAL",0,0,"Step 1465, loss: 2.0529725551605225, step time: 18.36419105529785ms\r\n",,terminal_output +13758,13157008,"TERMINAL",0,0,"Step 1466, loss: 2.0636589527130127, step time: 18.198251724243164ms\r\n",,terminal_output +13759,13157153,"TERMINAL",0,0,"Step 1467, loss: 2.2697603702545166, step time: 18.609046936035156ms\r\nStep 1468, loss: 2.4112563133239746, step time: 18.256187438964844ms\r\n",,terminal_output +13760,13157205,"TERMINAL",0,0,"Step 1469, loss: 2.1578569412231445, step time: 17.87424087524414ms\r\n",,terminal_output +13761,13157298,"TERMINAL",0,0,"Step 1470, loss: 2.121487617492676, step time: 18.281221389770508ms\r\n",,terminal_output +13762,13157349,"TERMINAL",0,0,"Step 1471, loss: 2.0893044471740723, step time: 18.17178726196289ms\r\n",,terminal_output +13763,13157400,"TERMINAL",0,0,"Step 1472, loss: 2.350614309310913, step time: 18.21589469909668ms\r\n",,terminal_output +13764,13157507,"TERMINAL",0,0,"Step 1473, loss: 2.4886176586151123, step time: 18.37944984436035ms\r\nStep 1474, loss: 2.153481960296631, step time: 18.05734634399414ms\r\n",,terminal_output +13765,13157573,"TERMINAL",0,0,"Step 1475, loss: 2.571690797805786, step time: 18.168926239013672ms\r\n",,terminal_output +13766,13157641,"TERMINAL",0,0,"Step 1476, loss: 2.0970969200134277, step time: 18.091678619384766ms\r\n",,terminal_output +13767,13157700,"TERMINAL",0,0,"Step 1477, loss: 2.3288965225219727, step time: 17.529964447021484ms\r\n",,terminal_output +13768,13157763,"TERMINAL",0,0,"Step 1478, loss: 2.1020760536193848, step time: 17.78697967529297ms\r\n",,terminal_output +13769,13157828,"TERMINAL",0,0,"Step 1479, loss: 2.153721332550049, step time: 17.859697341918945ms\r\n",,terminal_output +13770,13157889,"TERMINAL",0,0,"Step 1480, loss: 2.3196866512298584, step time: 17.702102661132812ms\r\n",,terminal_output +13771,13157951,"TERMINAL",0,0,"Step 1481, loss: 2.711716651916504, step time: 17.608642578125ms\r\n",,terminal_output +13772,13158019,"TERMINAL",0,0,"Step 1482, loss: 2.2620739936828613, step time: 17.94719696044922ms\r\n",,terminal_output +13773,13158071,"TERMINAL",0,0,"Step 1483, loss: 2.2298123836517334, step time: 17.63629913330078ms\r\n",,terminal_output +13774,13158170,"TERMINAL",0,0,"Step 1484, loss: 2.077559232711792, step time: 17.607688903808594ms\r\n",,terminal_output +13775,13158223,"TERMINAL",0,0,"Step 1485, loss: 2.1483094692230225, step time: 17.847776412963867ms\r\n",,terminal_output +13776,13158328,"TERMINAL",0,0,"Step 1486, loss: 2.226494312286377, step time: 17.609119415283203ms\r\nStep 1487, loss: 2.4157989025115967, step time: 17.410993576049805ms\r\n",,terminal_output +13777,13158423,"TERMINAL",0,0,"Step 1488, loss: 2.113374948501587, step time: 18.04971694946289ms\r\n",,terminal_output +13778,13158475,"TERMINAL",0,0,"Step 1489, loss: 2.0946507453918457, step time: 18.362045288085938ms\r\n",,terminal_output +13779,13158582,"TERMINAL",0,0,"Step 1490, loss: 2.1292178630828857, step time: 17.66371726989746ms\r\nStep 1491, loss: 2.394484043121338, step time: 17.699718475341797ms\r\n",,terminal_output +13780,13158641,"TERMINAL",0,0,"Step 1492, loss: 2.133153200149536, step time: 17.615079879760742ms\r\n",,terminal_output +13781,13158703,"TERMINAL",0,0,"Step 1493, loss: 2.134141683578491, step time: 17.3947811126709ms\r\n",,terminal_output +13782,13158764,"TERMINAL",0,0,"Step 1494, loss: 2.0840108394622803, step time: 17.953872680664062ms\r\n",,terminal_output +13783,13158886,"TERMINAL",0,0,"Step 1495, loss: 2.089031457901001, step time: 17.351150512695312ms\r\n",,terminal_output +13784,13158964,"TERMINAL",0,0,"Step 1496, loss: 2.138777732849121, step time: 17.705202102661133ms\r\nStep 1497, loss: 2.251452922821045, step time: 17.801284790039062ms\r\n",,terminal_output +13785,13159026,"TERMINAL",0,0,"Step 1498, loss: 2.6705188751220703, step time: 17.55976676940918ms\r\n",,terminal_output +13786,13159087,"TERMINAL",0,0,"Step 1499, loss: nan, step time: 17.66800880432129ms\r\n",,terminal_output +13787,13161797,"TERMINAL",0,0,"Step 1500, loss: 2.473625421524048, step time: 40.89927673339844ms\r\nStep 1501, loss: 2.830594539642334, step time: 26.32737159729004ms\r\n",,terminal_output +13788,13161857,"TERMINAL",0,0,"Step 1502, loss: 2.1217761039733887, step time: 21.113157272338867ms\r\n",,terminal_output +13789,13161919,"TERMINAL",0,0,"Step 1503, loss: 2.0880208015441895, step time: 19.721031188964844ms\r\n",,terminal_output +13790,13161979,"TERMINAL",0,0,"Step 1504, loss: 2.1541833877563477, step time: 19.231319427490234ms\r\n",,terminal_output +13791,13162087,"TERMINAL",0,0,"Step 1505, loss: 2.0961127281188965, step time: 19.051074981689453ms\r\n",,terminal_output +13792,13162153,"TERMINAL",0,0,"Step 1506, loss: 2.1333088874816895, step time: 17.81463623046875ms\r\n",,terminal_output +13793,13162213,"TERMINAL",0,0,"Step 1507, loss: 2.0798354148864746, step time: 19.13166046142578ms\r\n",,terminal_output +13794,13162281,"TERMINAL",0,0,"Step 1508, loss: 2.5974507331848145, step time: 19.05345916748047ms\r\n",,terminal_output +13795,13162339,"TERMINAL",0,0,"Step 1509, loss: 2.102487564086914, step time: 18.535614013671875ms\r\n",,terminal_output +13796,13162443,"TERMINAL",0,0,"Step 1510, loss: 2.144507646560669, step time: 17.588376998901367ms\r\nStep 1511, loss: 2.0942347049713135, step time: 18.099069595336914ms\r\n",,terminal_output +13797,13162551,"TERMINAL",0,0,"Step 1512, loss: 2.1669135093688965, step time: 17.714738845825195ms\r\nStep 1513, loss: 2.2359349727630615, step time: 17.729997634887695ms\r\n",,terminal_output +13798,13162660,"TERMINAL",0,0,"Step 1514, loss: 2.1027584075927734, step time: 19.111156463623047ms\r\n",,terminal_output +13799,13162712,"TERMINAL",0,0,"Step 1515, loss: 2.203340530395508, step time: 18.57161521911621ms\r\n",,terminal_output +13800,13162805,"TERMINAL",0,0,"Step 1516, loss: 2.161379098892212, step time: 17.807722091674805ms\r\n",,terminal_output +13801,13163079,"TERMINAL",0,0,"Step 1517, loss: 1.9870076179504395, step time: 316.87307357788086ms\r\n",,terminal_output +13802,13163144,"TERMINAL",0,0,"Step 1518, loss: 2.305664300918579, step time: 26.645660400390625ms\r\n",,terminal_output +13803,13163206,"TERMINAL",0,0,"Step 1519, loss: 2.4107069969177246, step time: 20.859241485595703ms\r\n",,terminal_output +13804,13163269,"TERMINAL",0,0,"Step 1520, loss: 2.1110048294067383, step time: 18.87965202331543ms\r\n",,terminal_output +13805,13163340,"TERMINAL",0,0,"Step 1521, loss: 2.123418092727661, step time: 17.88020133972168ms\r\n",,terminal_output +13806,13163414,"TERMINAL",0,0,"Step 1522, loss: 2.021061658859253, step time: 17.754316329956055ms\r\n",,terminal_output +13807,13163462,"TERMINAL",0,0,"Step 1523, loss: 2.0168845653533936, step time: 17.895221710205078ms\r\n",,terminal_output +13808,13163526,"TERMINAL",0,0,"Step 1524, loss: 2.08992075920105, step time: 17.7004337310791ms\r\n",,terminal_output +13809,13163596,"TERMINAL",0,0,"Step 1525, loss: 2.373570442199707, step time: 17.560720443725586ms\r\n",,terminal_output +13810,13163650,"TERMINAL",0,0,"Step 1526, loss: 2.220982313156128, step time: 18.240690231323242ms\r\n",,terminal_output +13811,13163712,"TERMINAL",0,0,"Step 1527, loss: 1.9734374284744263, step time: 17.519474029541016ms\r\n",,terminal_output +13812,13163809,"TERMINAL",0,0,"Step 1528, loss: 2.098222494125366, step time: 17.39954948425293ms\r\n",,terminal_output +13813,13163856,"TERMINAL",0,0,"Step 1529, loss: 2.0862696170806885, step time: 17.81177520751953ms\r\n",,terminal_output +13814,13163953,"TERMINAL",0,0,"Step 1530, loss: 2.272014856338501, step time: 17.641544342041016ms\r\n",,terminal_output +13815,13164026,"TERMINAL",0,0,"Step 1531, loss: 2.3638253211975098, step time: 17.488479614257812ms\r\nStep 1532, loss: 2.2881720066070557, step time: 17.839431762695312ms\r\n",,terminal_output +13816,13164089,"TERMINAL",0,0,"Step 1533, loss: 2.096276044845581, step time: 17.487287521362305ms\r\n",,terminal_output +13817,13164151,"TERMINAL",0,0,"Step 1534, loss: 2.087161064147949, step time: 18.04637908935547ms\r\n",,terminal_output +13818,13164215,"TERMINAL",0,0,"Step 1535, loss: 2.2425363063812256, step time: 17.855405807495117ms\r\n",,terminal_output +13819,13164278,"TERMINAL",0,0,"Step 1536, loss: 2.110178232192993, step time: 17.300844192504883ms\r\n",,terminal_output +13820,13164341,"TERMINAL",0,0,"Step 1537, loss: 2.2533063888549805, step time: 17.423391342163086ms\r\n",,terminal_output +13821,13164404,"TERMINAL",0,0,"Step 1538, loss: 2.2403504848480225, step time: 17.795801162719727ms\r\n",,terminal_output +13822,13164466,"TERMINAL",0,0,"Step 1539, loss: 2.1311309337615967, step time: 17.375946044921875ms\r\n",,terminal_output +13823,13164532,"TERMINAL",0,0,"Step 1540, loss: 1.966444492340088, step time: 17.271757125854492ms\r\n",,terminal_output +13824,13164601,"TERMINAL",0,0,"Step 1541, loss: 2.121593713760376, step time: 17.963409423828125ms\r\n",,terminal_output +13825,13164726,"TERMINAL",0,0,"Step 1542, loss: 2.364870309829712, step time: 17.307043075561523ms\r\nStep 1543, loss: 2.052539110183716, step time: 17.35234260559082ms\r\n",,terminal_output +13826,13164787,"TERMINAL",0,0,"Step 1544, loss: 2.779374361038208, step time: 17.844438552856445ms\r\n",,terminal_output +13827,13164851,"TERMINAL",0,0,"Step 1545, loss: 2.3114607334136963, step time: 17.41313934326172ms\r\n",,terminal_output +13828,13164910,"TERMINAL",0,0,"Step 1546, loss: 2.131636381149292, step time: 17.345905303955078ms\r\n",,terminal_output +13829,13164976,"TERMINAL",0,0,"Step 1547, loss: 2.0880889892578125, step time: 17.84968376159668ms\r\n",,terminal_output +13830,13165038,"TERMINAL",0,0,"Step 1548, loss: 2.131049394607544, step time: 17.525434494018555ms\r\n",,terminal_output +13831,13165132,"TERMINAL",0,0,"Step 1549, loss: 2.0045547485351562, step time: 17.598867416381836ms\r\n",,terminal_output +13832,13165182,"TERMINAL",0,0,"Step 1550, loss: 2.2804043292999268, step time: 17.971277236938477ms\r\n",,terminal_output +13833,13165275,"TERMINAL",0,0,"Step 1551, loss: 2.0797853469848633, step time: 17.264366149902344ms\r\n",,terminal_output +13834,13165327,"TERMINAL",0,0,"Step 1552, loss: 2.0406477451324463, step time: 17.522573471069336ms\r\n",,terminal_output +13835,13165430,"TERMINAL",0,0,"Step 1553, loss: 2.150189161300659, step time: 17.85588264465332ms\r\nStep 1554, loss: 2.129117012023926, step time: 17.334461212158203ms\r\n",,terminal_output +13836,13165579,"TERMINAL",0,0,"Step 1555, loss: 2.067265510559082, step time: 17.463207244873047ms\r\nStep 1556, loss: 2.72829008102417, step time: 17.966747283935547ms\r\n",,terminal_output +13837,13165633,"TERMINAL",0,0,"Step 1557, loss: 2.2801318168640137, step time: 17.574787139892578ms\r\n",,terminal_output +13838,13165738,"TERMINAL",0,0,"Step 1558, loss: 2.07008695602417, step time: 17.40884780883789ms\r\nStep 1559, loss: 2.2032551765441895, step time: 17.848968505859375ms\r\n",,terminal_output +13839,13165870,"TERMINAL",0,0,"Step 1560, loss: 2.0452563762664795, step time: 21.5761661529541ms\r\nStep 1561, loss: 2.0201592445373535, step time: 17.653226852416992ms\r\n",,terminal_output +13840,13165934,"TERMINAL",0,0,"Step 1562, loss: 2.7758870124816895, step time: 17.78435707092285ms\r\n",,terminal_output +13841,13166056,"TERMINAL",0,0,"Step 1563, loss: 1.893591046333313, step time: 17.352581024169922ms\r\nStep 1564, loss: 2.743903875350952, step time: 17.342329025268555ms\r\n",,terminal_output +13842,13166123,"TERMINAL",0,0,"Step 1565, loss: 2.0876779556274414, step time: 18.101215362548828ms\r\n",,terminal_output +13843,13166258,"TERMINAL",0,0,"Step 1566, loss: 1.996695876121521, step time: 17.374277114868164ms\r\nStep 1567, loss: 2.2853918075561523, step time: 17.519235610961914ms\r\n",,terminal_output +13844,13166311,"TERMINAL",0,0,"Step 1568, loss: 2.339430332183838, step time: 17.902612686157227ms\r\n",,terminal_output +13845,13166408,"TERMINAL",0,0,"Step 1569, loss: 2.2768826484680176, step time: 17.324209213256836ms\r\n",,terminal_output +13846,13166460,"TERMINAL",0,0,"Step 1570, loss: 2.1526172161102295, step time: 17.708539962768555ms\r\n",,terminal_output +13847,13166606,"TERMINAL",0,0,"Step 1571, loss: 2.084146499633789, step time: 17.77362823486328ms\r\nStep 1572, loss: 2.577543258666992, step time: 17.723798751831055ms\r\n",,terminal_output +13848,13166658,"TERMINAL",0,0,"Step 1573, loss: 2.159935474395752, step time: 17.783403396606445ms\r\n",,terminal_output +13849,13166762,"TERMINAL",0,0,"Step 1574, loss: 2.068659782409668, step time: 17.971038818359375ms\r\nStep 1575, loss: 2.0501582622528076, step time: 17.429351806640625ms\r\n",,terminal_output +13850,13166841,"TERMINAL",0,0,"Step 1576, loss: 2.1920273303985596, step time: 17.427444458007812ms\r\n",,terminal_output +13851,13166897,"TERMINAL",0,0,"Step 1577, loss: 2.250776767730713, step time: 17.687082290649414ms\r\n",,terminal_output +13852,13167007,"TERMINAL",0,0,"Step 1578, loss: 2.15401554107666, step time: 17.304420471191406ms\r\nStep 1579, loss: 2.2035598754882812, step time: 17.26555824279785ms\r\n",,terminal_output +13853,13167135,"TERMINAL",0,0,"Step 1580, loss: 2.3246066570281982, step time: 17.71998405456543ms\r\nStep 1581, loss: 2.094078302383423, step time: 18.932342529296875ms\r\n",,terminal_output +13854,13167228,"TERMINAL",0,0,"Step 1582, loss: 2.0570223331451416, step time: 17.29297637939453ms\r\n",,terminal_output +13855,13167280,"TERMINAL",0,0,"Step 1583, loss: 2.2143189907073975, step time: 17.827510833740234ms\r\n",,terminal_output +13856,13167375,"TERMINAL",0,0,"Step 1584, loss: 2.1460187435150146, step time: 17.25912094116211ms\r\n",,terminal_output +13857,13167426,"TERMINAL",0,0,"Step 1585, loss: 2.3721890449523926, step time: 17.49420166015625ms\r\n",,terminal_output +13858,13167479,"TERMINAL",0,0,"Step 1586, loss: 2.1186363697052, step time: 17.995357513427734ms\r\n",,terminal_output +13859,13167583,"TERMINAL",0,0,"Step 1587, loss: 2.232171058654785, step time: 17.364025115966797ms\r\nStep 1588, loss: 1.9986838102340698, step time: 17.391681671142578ms\r\n",,terminal_output +13860,13167675,"TERMINAL",0,0,"Step 1589, loss: 2.0907740592956543, step time: 17.821550369262695ms\r\n",,terminal_output +13861,13167781,"TERMINAL",0,0,"Step 1590, loss: 2.250340700149536, step time: 17.37380027770996ms\r\nStep 1591, loss: 2.1681532859802246, step time: 17.289400100708008ms\r\n",,terminal_output +13862,13167843,"TERMINAL",0,0,"Step 1592, loss: 1.985269546508789, step time: 18.067598342895508ms\r\n",,terminal_output +13863,13167907,"TERMINAL",0,0,"Step 1593, loss: 2.1325690746307373, step time: 18.575668334960938ms\r\n",,terminal_output +13864,13168031,"TERMINAL",0,0,"Step 1594, loss: 2.362422227859497, step time: 17.388343811035156ms\r\nStep 1595, loss: 2.055495500564575, step time: 17.800331115722656ms\r\n",,terminal_output +13865,13168097,"TERMINAL",0,0,"Step 1596, loss: 2.037766695022583, step time: 17.32778549194336ms\r\n",,terminal_output +13866,13168158,"TERMINAL",0,0,"Step 1597, loss: 2.0619609355926514, step time: 17.44389533996582ms\r\n",,terminal_output +13867,13168225,"TERMINAL",0,0,"Step 1598, loss: 2.027420997619629, step time: 17.989158630371094ms\r\n",,terminal_output +13868,13168289,"TERMINAL",0,0,"Step 1599, loss: 2.342139959335327, step time: 17.490386962890625ms\r\n",,terminal_output +13869,13168412,"TERMINAL",0,0,"Step 1600, loss: 2.0489916801452637, step time: 17.16780662536621ms\r\nStep 1601, loss: 2.218278408050537, step time: 17.825841903686523ms\r\n",,terminal_output +13870,13168476,"TERMINAL",0,0,"Step 1602, loss: 2.049417495727539, step time: 17.35663414001465ms\r\n",,terminal_output +13871,13168538,"TERMINAL",0,0,"Step 1603, loss: 2.0319180488586426, step time: 17.31252670288086ms\r\n",,terminal_output +13872,13168677,"TERMINAL",0,0,"Step 1604, loss: 2.0962159633636475, step time: 17.680644989013672ms\r\nStep 1605, loss: 2.0542068481445312, step time: 17.01831817626953ms\r\n",,terminal_output +13873,13168743,"TERMINAL",0,0,"Step 1606, loss: 2.3477864265441895, step time: 17.21787452697754ms\r\n",,terminal_output +13874,13168807,"TERMINAL",0,0,"Step 1607, loss: 2.009190082550049, step time: 17.861366271972656ms\r\n",,terminal_output +13875,13168876,"TERMINAL",0,0,"Step 1608, loss: 2.3121178150177, step time: 17.322540283203125ms\r\n",,terminal_output +13876,13168937,"TERMINAL",0,0,"Step 1609, loss: 1.9954115152359009, step time: 17.26055145263672ms\r\n",,terminal_output +13877,13169045,"TERMINAL",0,0,"Step 1610, loss: 2.202986240386963, step time: 17.989158630371094ms\r\nStep 1611, loss: 2.217890739440918, step time: 17.138957977294922ms\r\n",,terminal_output +13878,13169142,"TERMINAL",0,0,"Step 1612, loss: 2.0928499698638916, step time: 17.440319061279297ms\r\n",,terminal_output +13879,13169205,"TERMINAL",0,0,"Step 1613, loss: 1.8410876989364624, step time: 19.388437271118164ms\r\n",,terminal_output +13880,13169269,"TERMINAL",0,0,"Step 1614, loss: 2.033803939819336, step time: 17.744064331054688ms\r\n",,terminal_output +13881,13169332,"TERMINAL",0,0,"Step 1615, loss: 2.04135799407959, step time: 17.3490047454834ms\r\n",,terminal_output +13882,13169393,"TERMINAL",0,0,"Step 1616, loss: 2.3013417720794678, step time: 17.822980880737305ms\r\n",,terminal_output +13883,13169457,"TERMINAL",0,0,"Step 1617, loss: 2.4028828144073486, step time: 17.462730407714844ms\r\n",,terminal_output +13884,13169521,"TERMINAL",0,0,"Step 1618, loss: 2.4829049110412598, step time: 17.60101318359375ms\r\n",,terminal_output +13885,13169587,"TERMINAL",0,0,"Step 1619, loss: 2.4107143878936768, step time: 23.684024810791016ms\r\n",,terminal_output +13886,13169651,"TERMINAL",0,0,"Step 1620, loss: 2.267228603363037, step time: 19.11759376525879ms\r\n",,terminal_output +13887,13169758,"TERMINAL",0,0,"Step 1621, loss: 2.1333460807800293, step time: 17.95816421508789ms\r\nStep 1622, loss: 2.315372943878174, step time: 18.161296844482422ms\r\n",,terminal_output +13888,13169823,"TERMINAL",0,0,"Step 1623, loss: 1.9548301696777344, step time: 17.467260360717773ms\r\n",,terminal_output +13889,13169888,"TERMINAL",0,0,"Step 1624, loss: 2.0247671604156494, step time: 17.401695251464844ms\r\n",,terminal_output +13890,13169961,"TERMINAL",0,0,"Step 1625, loss: 1.925068974494934, step time: 21.244287490844727ms\r\n",,terminal_output +13891,13170014,"TERMINAL",0,0,"Step 1626, loss: 2.1965150833129883, step time: 17.37070083618164ms\r\n",,terminal_output +13892,13170081,"TERMINAL",0,0,"Step 1627, loss: 2.028791904449463, step time: 17.359018325805664ms\r\n",,terminal_output +13893,13170142,"TERMINAL",0,0,"Step 1628, loss: 2.0545814037323, step time: 17.822265625ms\r\n",,terminal_output +13894,13170206,"TERMINAL",0,0,"Step 1629, loss: 2.4710586071014404, step time: 17.423391342163086ms\r\n",,terminal_output +13895,13170269,"TERMINAL",0,0,"Step 1630, loss: 1.995578408241272, step time: 17.366886138916016ms\r\n",,terminal_output +13896,13170332,"TERMINAL",0,0,"Step 1631, loss: 2.3634653091430664, step time: 17.872333526611328ms\r\n",,terminal_output +13897,13170399,"TERMINAL",0,0,"Step 1632, loss: 1.91606605052948, step time: 17.27461814880371ms\r\n",,terminal_output +13898,13170462,"TERMINAL",0,0,"Step 1633, loss: 2.1125149726867676, step time: 17.360925674438477ms\r\n",,terminal_output +13899,13170526,"TERMINAL",0,0,"Step 1634, loss: 1.9594475030899048, step time: 17.72141456604004ms\r\n",,terminal_output +13900,13170592,"TERMINAL",0,0,"Step 1635, loss: 1.9753090143203735, step time: 17.411231994628906ms\r\n",,terminal_output +13901,13170667,"TERMINAL",0,0,"Step 1636, loss: 2.0416319370269775, step time: 17.318248748779297ms\r\n",,terminal_output +13902,13170720,"TERMINAL",0,0,"Step 1637, loss: 2.0834615230560303, step time: 17.67420768737793ms\r\n",,terminal_output +13903,13171029,"TERMINAL",0,0,"Step 1638, loss: 2.15674090385437, step time: 330.5938243865967ms\r\n",,terminal_output +13904,13171137,"TERMINAL",0,0,"Step 1639, loss: 2.0661542415618896, step time: 25.095224380493164ms\r\n",,terminal_output +13905,13171202,"TERMINAL",0,0,"Step 1640, loss: 2.0099964141845703, step time: 20.035982131958008ms\r\n",,terminal_output +13906,13171266,"TERMINAL",0,0,"Step 1641, loss: 2.4707605838775635, step time: 18.60213279724121ms\r\n",,terminal_output +13907,13171316,"TERMINAL",0,0,"Step 1642, loss: 2.0762951374053955, step time: 18.04351806640625ms\r\n",,terminal_output +13908,13171380,"TERMINAL",0,0,"Step 1643, loss: 2.019798994064331, step time: 18.993616104125977ms\r\n",,terminal_output +13909,13171441,"TERMINAL",0,0,"Step 1644, loss: 1.9510350227355957, step time: 18.331527709960938ms\r\n",,terminal_output +13910,13171504,"TERMINAL",0,0,"Step 1645, loss: 2.18280291557312, step time: 17.57979393005371ms\r\n",,terminal_output +13911,13171571,"TERMINAL",0,0,"Step 1646, loss: 1.9713621139526367, step time: 17.440080642700195ms\r\n",,terminal_output +13912,13171634,"TERMINAL",0,0,"Step 1647, loss: 1.9872937202453613, step time: 18.0206298828125ms\r\n",,terminal_output +13913,13171698,"TERMINAL",0,0,"Step 1648, loss: 2.0377986431121826, step time: 17.584562301635742ms\r\n",,terminal_output +13914,13171765,"TERMINAL",0,0,"Step 1649, loss: 2.227062702178955, step time: 18.2340145111084ms\r\n",,terminal_output +13915,13171829,"TERMINAL",0,0,"Step 1650, loss: 2.0426511764526367, step time: 17.75670051574707ms\r\n",,terminal_output +13916,13171888,"TERMINAL",0,0,"Step 1651, loss: 2.155688524246216, step time: 17.267942428588867ms\r\n",,terminal_output +13917,13171952,"TERMINAL",0,0,"Step 1652, loss: 1.9496678113937378, step time: 17.324209213256836ms\r\n",,terminal_output +13918,13172017,"TERMINAL",0,0,"Step 1653, loss: 2.242861032485962, step time: 17.636775970458984ms\r\n",,terminal_output +13919,13172078,"TERMINAL",0,0,"Step 1654, loss: 2.051689386367798, step time: 17.343997955322266ms\r\n",,terminal_output +13920,13172146,"TERMINAL",0,0,"Step 1655, loss: 1.988045573234558, step time: 27.78458595275879ms\r\n",,terminal_output +13921,13172210,"TERMINAL",0,0,"Step 1656, loss: 1.9554457664489746, step time: 19.667625427246094ms\r\n",,terminal_output +13922,13172274,"TERMINAL",0,0,"Step 1657, loss: 1.9576375484466553, step time: 17.44222640991211ms\r\n",,terminal_output +13923,13172340,"TERMINAL",0,0,"Step 1658, loss: 2.0833659172058105, step time: 17.400026321411133ms\r\n",,terminal_output +13924,13172404,"TERMINAL",0,0,"Step 1659, loss: 2.0409200191497803, step time: 17.614364624023438ms\r\n",,terminal_output +13925,13172466,"TERMINAL",0,0,"Step 1660, loss: 2.5547564029693604, step time: 17.337799072265625ms\r\n",,terminal_output +13926,13172529,"TERMINAL",0,0,"Step 1661, loss: 2.1875646114349365, step time: 17.37046241760254ms\r\n",,terminal_output +13927,13172594,"TERMINAL",0,0,"Step 1662, loss: 1.9559701681137085, step time: 17.629384994506836ms\r\n",,terminal_output +13928,13172656,"TERMINAL",0,0,"Step 1663, loss: 2.201298952102661, step time: 17.421722412109375ms\r\n",,terminal_output +13929,13172721,"TERMINAL",0,0,"Step 1664, loss: 2.0653841495513916, step time: 17.4102783203125ms\r\n",,terminal_output +13930,13172798,"TERMINAL",0,0,"Step 1665, loss: 2.2257258892059326, step time: 18.128395080566406ms\r\n",,terminal_output +13931,13172851,"TERMINAL",0,0,"Step 1666, loss: 1.9763479232788086, step time: 17.714977264404297ms\r\n",,terminal_output +13932,13172906,"TERMINAL",0,0,"Step 1667, loss: 1.9137104749679565, step time: 17.482280731201172ms\r\n",,terminal_output +13933,13173002,"TERMINAL",0,0,"Step 1668, loss: 1.9928613901138306, step time: 22.165298461914062ms\r\nStep 1669, loss: 1.9556348323822021, step time: 19.230127334594727ms\r\n",,terminal_output +13934,13173104,"TERMINAL",0,0,"Step 1670, loss: 1.9236767292022705, step time: 18.830299377441406ms\r\n",,terminal_output +13935,13173157,"TERMINAL",0,0,"Step 1671, loss: 1.985823631286621, step time: 18.01323890686035ms\r\n",,terminal_output +13936,13173301,"TERMINAL",0,0,"Step 1672, loss: 1.9189426898956299, step time: 17.655611038208008ms\r\nStep 1673, loss: 1.9715232849121094, step time: 17.540454864501953ms\r\n",,terminal_output +13937,13173394,"TERMINAL",0,0,"Step 1674, loss: 2.069005012512207, step time: 17.813682556152344ms\r\nStep 1675, loss: 2.140115261077881, step time: 17.590045928955078ms\r\n",,terminal_output +13938,13173456,"TERMINAL",0,0,"Step 1676, loss: 1.9900935888290405, step time: 17.415523529052734ms\r\n",,terminal_output +13939,13173519,"TERMINAL",0,0,"Step 1677, loss: 2.1554388999938965, step time: 17.70615577697754ms\r\n",,terminal_output +13940,13173594,"TERMINAL",0,0,"Step 1678, loss: 1.9810456037521362, step time: 17.45772361755371ms\r\n",,terminal_output +13941,13173647,"TERMINAL",0,0,"Step 1679, loss: 2.1019883155822754, step time: 17.377376556396484ms\r\n",,terminal_output +13942,13173713,"TERMINAL",0,0,"Step 1680, loss: 2.0230021476745605, step time: 17.696142196655273ms\r\n",,terminal_output +13943,13173774,"TERMINAL",0,0,"Step 1681, loss: 2.1934292316436768, step time: 17.38262176513672ms\r\n",,terminal_output +13944,13173838,"TERMINAL",0,0,"Step 1682, loss: 1.8750189542770386, step time: 17.50779151916504ms\r\n",,terminal_output +13945,13173901,"TERMINAL",0,0,"Step 1683, loss: 1.9516921043395996, step time: 17.70329475402832ms\r\n",,terminal_output +13946,13173966,"TERMINAL",0,0,"Step 1684, loss: 2.070783853530884, step time: 17.366886138916016ms\r\n",,terminal_output +13947,13174019,"TERMINAL",0,0,"Step 1685, loss: 1.8944978713989258, step time: 17.43769645690918ms\r\n",,terminal_output +13948,13174115,"TERMINAL",0,0,"Step 1686, loss: 2.1039857864379883, step time: 17.84825325012207ms\r\n",,terminal_output +13949,13174179,"TERMINAL",0,0,"Step 1687, loss: 1.9686933755874634, step time: 23.025035858154297ms\r\n",,terminal_output +13950,13174249,"TERMINAL",0,0,"Step 1688, loss: 2.071126699447632, step time: 25.921344757080078ms\r\n",,terminal_output +13951,13174305,"TERMINAL",0,0,"Step 1689, loss: 1.9458513259887695, step time: 23.07915687561035ms\r\n",,terminal_output +13952,13174379,"TERMINAL",0,0,"Step 1690, loss: 2.203839063644409, step time: 21.378755569458008ms\r\n",,terminal_output +13953,13174432,"TERMINAL",0,0,"Step 1691, loss: 2.00862455368042, step time: 20.349502563476562ms\r\n",,terminal_output +13954,13174542,"TERMINAL",0,0,"Step 1692, loss: 2.0437636375427246, step time: 20.09868621826172ms\r\nStep 1693, loss: 1.9529526233673096, step time: 19.414901733398438ms\r\n",,terminal_output +13955,13174637,"TERMINAL",0,0,"Step 1694, loss: 1.9319815635681152, step time: 18.906831741333008ms\r\n",,terminal_output +13956,13174690,"TERMINAL",0,0,"Step 1695, loss: 2.066349744796753, step time: 19.192218780517578ms\r\n",,terminal_output +13957,13174796,"TERMINAL",0,0,"Step 1696, loss: 1.9716030359268188, step time: 19.292116165161133ms\r\nStep 1697, loss: 2.086733102798462, step time: 19.472599029541016ms\r\n",,terminal_output +13958,13174928,"TERMINAL",0,0,"Step 1698, loss: 1.906885027885437, step time: 19.06418800354004ms\r\nStep 1699, loss: 2.1861963272094727, step time: 18.0361270904541ms\r\n",,terminal_output +13959,13174988,"TERMINAL",0,0,"Step 1700, loss: 2.036912202835083, step time: 20.050525665283203ms\r\n",,terminal_output +13960,13175043,"TERMINAL",0,0,"Step 1701, loss: 1.9756758213043213, step time: 18.94521713256836ms\r\n",,terminal_output +13961,13175141,"TERMINAL",0,0,"Step 1702, loss: 1.8625140190124512, step time: 26.55482292175293ms\r\n",,terminal_output +13962,13175206,"TERMINAL",0,0,"Step 1703, loss: 1.932867407798767, step time: 26.3521671295166ms\r\n",,terminal_output +13963,13175272,"TERMINAL",0,0,"Step 1704, loss: 1.8958104848861694, step time: 21.829843521118164ms\r\n",,terminal_output +13964,13175343,"TERMINAL",0,0,"Step 1705, loss: 2.2182223796844482, step time: 20.8590030670166ms\r\n",,terminal_output +13965,13175419,"TERMINAL",0,0,"Step 1706, loss: 1.8878611326217651, step time: 20.437955856323242ms\r\n",,terminal_output +13966,13175464,"TERMINAL",0,0,"Step 1707, loss: 1.9353296756744385, step time: 19.947290420532227ms\r\n",,terminal_output +13967,13175570,"TERMINAL",0,0,"Step 1708, loss: 1.8950811624526978, step time: 18.813133239746094ms\r\nStep 1709, loss: 1.9260060787200928, step time: 18.514156341552734ms\r\n",,terminal_output +13968,13175669,"TERMINAL",0,0,"Step 1710, loss: 1.9578909873962402, step time: 18.688201904296875ms\r\n",,terminal_output +13969,13175720,"TERMINAL",0,0,"Step 1711, loss: 2.4593505859375, step time: 18.611669540405273ms\r\n",,terminal_output +13970,13175824,"TERMINAL",0,0,"Step 1712, loss: 2.0059022903442383, step time: 18.194913864135742ms\r\nStep 1713, loss: 1.9399701356887817, step time: 19.135236740112305ms\r\n",,terminal_output +13971,13175919,"TERMINAL",0,0,"Step 1714, loss: 2.042543649673462, step time: 22.574663162231445ms\r\n",,terminal_output +13972,13176019,"TERMINAL",0,0,"Step 1715, loss: 1.7479150295257568, step time: 25.623798370361328ms\r\nStep 1716, loss: 1.9851871728897095, step time: 29.05893325805664ms\r\n",,terminal_output +13973,13176129,"TERMINAL",0,0,"Step 1717, loss: 1.7817678451538086, step time: 26.63445472717285ms\r\n",,terminal_output +13974,13176190,"TERMINAL",0,0,"Step 1718, loss: 2.1639349460601807, step time: 24.633169174194336ms\r\n",,terminal_output +13975,13176250,"TERMINAL",0,0,"Step 1719, loss: 2.2481298446655273, step time: 28.218507766723633ms\r\n",,terminal_output +13976,13176311,"TERMINAL",0,0,"Step 1720, loss: 2.207361936569214, step time: 26.256561279296875ms\r\n",,terminal_output +13977,13176372,"TERMINAL",0,0,"Step 1721, loss: 1.9883487224578857, step time: 19.90032196044922ms\r\n",,terminal_output +13978,13176434,"TERMINAL",0,0,"Step 1722, loss: 2.021272897720337, step time: 21.633625030517578ms\r\n",,terminal_output +13979,13176547,"TERMINAL",0,0,"Step 1723, loss: 2.057304620742798, step time: 19.918441772460938ms\r\n",,terminal_output +13980,13176600,"TERMINAL",0,0,"Step 1724, loss: 1.9994559288024902, step time: 20.131587982177734ms\r\n",,terminal_output +13981,13176699,"TERMINAL",0,0,"Step 1725, loss: 1.873852014541626, step time: 18.78523826599121ms\r\n",,terminal_output +13982,13176775,"TERMINAL",0,0,"Step 1726, loss: 1.9190435409545898, step time: 17.84825325012207ms\r\nStep 1727, loss: 2.028132438659668, step time: 19.204139709472656ms\r\n",,terminal_output +13983,13176838,"TERMINAL",0,0,"Step 1728, loss: 2.1458351612091064, step time: 19.898176193237305ms\r\n",,terminal_output +13984,13176904,"TERMINAL",0,0,"Step 1729, loss: 2.04693603515625, step time: 21.71611785888672ms\r\n",,terminal_output +13985,13176981,"TERMINAL",0,0,"Step 1730, loss: 1.9090795516967773, step time: 18.068552017211914ms\r\n",,terminal_output +13986,13177055,"TERMINAL",0,0,"Step 1731, loss: 2.114967107772827, step time: 17.961502075195312ms\r\n",,terminal_output +13987,13177107,"TERMINAL",0,0,"Step 1732, loss: 2.0615179538726807, step time: 17.732858657836914ms\r\n",,terminal_output +13988,13177204,"TERMINAL",0,0,"Step 1733, loss: 2.423243522644043, step time: 17.376184463500977ms\r\n",,terminal_output +13989,13177257,"TERMINAL",0,0,"Step 1734, loss: 2.1803085803985596, step time: 17.773866653442383ms\r\n",,terminal_output +13990,13177323,"TERMINAL",0,0,"Step 1735, loss: 1.9130185842514038, step time: 18.49222183227539ms\r\n",,terminal_output +13991,13177384,"TERMINAL",0,0,"Step 1736, loss: 1.920017957687378, step time: 19.181251525878906ms\r\n",,terminal_output +13992,13177449,"TERMINAL",0,0,"Step 1737, loss: 2.2206575870513916, step time: 19.100189208984375ms\r\n",,terminal_output +13993,13177512,"TERMINAL",0,0,"Step 1738, loss: 1.9254318475723267, step time: 19.991636276245117ms\r\n",,terminal_output +13994,13177574,"TERMINAL",0,0,"Step 1739, loss: 2.0726799964904785, step time: 27.771711349487305ms\r\n",,terminal_output +13995,13177636,"TERMINAL",0,0,"Step 1740, loss: 1.9501017332077026, step time: 25.007009506225586ms\r\n",,terminal_output +13996,13177699,"TERMINAL",0,0,"Step 1741, loss: 1.919276475906372, step time: 21.06642723083496ms\r\n",,terminal_output +13997,13177763,"TERMINAL",0,0,"Step 1742, loss: 2.251281976699829, step time: 22.37677574157715ms\r\n",,terminal_output +13998,13177826,"TERMINAL",0,0,"Step 1743, loss: 2.084925651550293, step time: 20.962238311767578ms\r\n",,terminal_output +13999,13177889,"TERMINAL",0,0,"Step 1744, loss: 2.3305132389068604, step time: 20.3707218170166ms\r\n",,terminal_output +14000,13177952,"TERMINAL",0,0,"Step 1745, loss: 2.0194060802459717, step time: 27.570247650146484ms\r\n",,terminal_output +14001,13178014,"TERMINAL",0,0,"Step 1746, loss: 2.083800792694092, step time: 24.88398551940918ms\r\n",,terminal_output +14002,13178076,"TERMINAL",0,0,"Step 1747, loss: 1.9332544803619385, step time: 23.529767990112305ms\r\n",,terminal_output +14003,13178140,"TERMINAL",0,0,"Step 1748, loss: 2.0207951068878174, step time: 23.06675910949707ms\r\n",,terminal_output +14004,13178205,"TERMINAL",0,0,"Step 1749, loss: 1.8411809206008911, step time: 20.709753036499023ms\r\n",,terminal_output +14005,13178270,"TERMINAL",0,0,"Step 1750, loss: 2.0668692588806152, step time: 20.490407943725586ms\r\n",,terminal_output +14006,13178399,"TERMINAL",0,0,"Step 1751, loss: 2.1148464679718018, step time: 20.80678939819336ms\r\nStep 1752, loss: 1.8462215662002563, step time: 19.06108856201172ms\r\n",,terminal_output +14007,13178462,"TERMINAL",0,0,"Step 1753, loss: 2.2383456230163574, step time: 20.457744598388672ms\r\n",,terminal_output +14008,13178526,"TERMINAL",0,0,"Step 1754, loss: 1.9120638370513916, step time: 19.75274085998535ms\r\n",,terminal_output +14009,13178608,"TERMINAL",0,0,"Step 1755, loss: 2.0033514499664307, step time: 19.684553146362305ms\r\n",,terminal_output +14010,13178659,"TERMINAL",0,0,"Step 1756, loss: 1.9292112588882446, step time: 27.183055877685547ms\r\n",,terminal_output +14011,13178726,"TERMINAL",0,0,"Step 1757, loss: 2.0857150554656982, step time: 25.502443313598633ms\r\n",,terminal_output +14012,13178830,"TERMINAL",0,0,"Step 1758, loss: 2.0236172676086426, step time: 21.021366119384766ms\r\n",,terminal_output +14013,13178884,"TERMINAL",0,0,"Step 1759, loss: 2.045825719833374, step time: 18.91922950744629ms\r\n",,terminal_output +14014,13178991,"TERMINAL",0,0,"Step 1760, loss: 2.1496164798736572, step time: 20.215272903442383ms\r\nStep 1761, loss: 1.9237252473831177, step time: 19.122600555419922ms\r\n",,terminal_output +14015,13179056,"TERMINAL",0,0,"Step 1762, loss: 2.05855131149292, step time: 18.4018611907959ms\r\n",,terminal_output +14016,13179119,"TERMINAL",0,0,"Step 1763, loss: 2.0302515029907227, step time: 18.128395080566406ms\r\n",,terminal_output +14017,13179188,"TERMINAL",0,0,"Step 1764, loss: 2.3461599349975586, step time: 18.271207809448242ms\r\n",,terminal_output +14018,13179247,"TERMINAL",0,0,"Step 1765, loss: 1.870507836341858, step time: 18.673181533813477ms\r\n",,terminal_output +14019,13179363,"TERMINAL",0,0,"Step 1766, loss: 2.3929972648620605, step time: 17.927885055541992ms\r\nStep 1767, loss: 2.2252418994903564, step time: 19.861459732055664ms\r\n",,terminal_output +14020,13179428,"TERMINAL",0,0,"Step 1768, loss: 2.121495485305786, step time: 22.843122482299805ms\r\n",,terminal_output +14021,13179493,"TERMINAL",0,0,"Step 1769, loss: 1.9612234830856323, step time: 21.799564361572266ms\r\n",,terminal_output +14022,13179604,"TERMINAL",0,0,"Step 1770, loss: 1.911534070968628, step time: 34.87229347229004ms\r\n",,terminal_output +14023,13179658,"TERMINAL",0,0,"Step 1771, loss: 2.10734224319458, step time: 19.82593536376953ms\r\n",,terminal_output +14024,13179765,"TERMINAL",0,0,"Step 1772, loss: 2.1160998344421387, step time: 19.521474838256836ms\r\nStep 1773, loss: 2.2044472694396973, step time: 19.487619400024414ms\r\n",,terminal_output +14025,13179827,"TERMINAL",0,0,"Step 1774, loss: 2.005516529083252, step time: 20.10822296142578ms\r\n",,terminal_output +14026,13179890,"TERMINAL",0,0,"Step 1775, loss: 2.1116244792938232, step time: 27.789831161499023ms\r\n",,terminal_output +14027,13179958,"TERMINAL",0,0,"Step 1776, loss: 2.521270275115967, step time: 26.763200759887695ms\r\n",,terminal_output +14028,13180022,"TERMINAL",0,0,"Step 1777, loss: 2.149779796600342, step time: 21.776437759399414ms\r\n",,terminal_output +14029,13180087,"TERMINAL",0,0,"Step 1778, loss: 1.9259467124938965, step time: 19.957780838012695ms\r\n",,terminal_output +14030,13180152,"TERMINAL",0,0,"Step 1779, loss: 1.9529961347579956, step time: 19.194841384887695ms\r\n",,terminal_output +14031,13180217,"TERMINAL",0,0,"Step 1780, loss: 2.21394419670105, step time: 18.443584442138672ms\r\n",,terminal_output +14032,13180282,"TERMINAL",0,0,"Step 1781, loss: 2.1179044246673584, step time: 19.07205581665039ms\r\n",,terminal_output +14033,13180350,"TERMINAL",0,0,"Step 1782, loss: 1.9024803638458252, step time: 20.238876342773438ms\r\n",,terminal_output +14034,13180409,"TERMINAL",0,0,"Step 1783, loss: 1.9151066541671753, step time: 18.201828002929688ms\r\n",,terminal_output +14035,13180474,"TERMINAL",0,0,"Step 1784, loss: 1.9316494464874268, step time: 18.79143714904785ms\r\n",,terminal_output +14036,13180536,"TERMINAL",0,0,"Step 1785, loss: 1.8887150287628174, step time: 19.356966018676758ms\r\n",,terminal_output +14037,13180599,"TERMINAL",0,0,"Step 1786, loss: 2.196991205215454, step time: 20.175695419311523ms\r\n",,terminal_output +14038,13180666,"TERMINAL",0,0,"Step 1787, loss: 2.292447805404663, step time: 27.846336364746094ms\r\n",,terminal_output +14039,13180734,"TERMINAL",0,0,"Step 1788, loss: 1.863525390625, step time: 27.025222778320312ms\r\n",,terminal_output +14040,13180798,"TERMINAL",0,0,"Step 1789, loss: 1.7117992639541626, step time: 21.868228912353516ms\r\n",,terminal_output +14041,13180866,"TERMINAL",0,0,"Step 1790, loss: 1.9136408567428589, step time: 23.982763290405273ms\r\n",,terminal_output +14042,13180933,"TERMINAL",0,0,"Step 1791, loss: 1.9741919040679932, step time: 27.68087387084961ms\r\n",,terminal_output +14043,13180999,"TERMINAL",0,0,"Step 1792, loss: 1.9218343496322632, step time: 24.71613883972168ms\r\n",,terminal_output +14044,13181100,"TERMINAL",0,0,"Step 1793, loss: 1.9368482828140259, step time: 22.17721939086914ms\r\n",,terminal_output +14045,13181161,"TERMINAL",0,0,"Step 1794, loss: 1.8974512815475464, step time: 21.529197692871094ms\r\n",,terminal_output +14046,13181227,"TERMINAL",0,0,"Step 1795, loss: 1.9400876760482788, step time: 20.31111717224121ms\r\n",,terminal_output +14047,13181289,"TERMINAL",0,0,"Step 1796, loss: 1.9492148160934448, step time: 27.09054946899414ms\r\n",,terminal_output +14048,13181351,"TERMINAL",0,0,"Step 1797, loss: 1.9317327737808228, step time: 28.22089195251465ms\r\n",,terminal_output +14049,13181416,"TERMINAL",0,0,"Step 1798, loss: 1.9940822124481201, step time: 29.33526039123535ms\r\n",,terminal_output +14050,13181476,"TERMINAL",0,0,"Step 1799, loss: 1.9388364553451538, step time: 28.591394424438477ms\r\n",,terminal_output +14051,13181539,"TERMINAL",0,0,"Step 1800, loss: 2.05556321144104, step time: 28.172969818115234ms\r\n",,terminal_output +14052,13181613,"TERMINAL",0,0,"Step 1801, loss: 2.1297781467437744, step time: 28.254032135009766ms\r\n",,terminal_output +14053,13181679,"TERMINAL",0,0,"Step 1802, loss: 1.9279576539993286, step time: 27.158498764038086ms\r\n",,terminal_output +14054,13181745,"TERMINAL",0,0,"Step 1803, loss: 2.3054251670837402, step time: 28.235673904418945ms\r\n",,terminal_output +14055,13181809,"TERMINAL",0,0,"Step 1804, loss: 1.8316681385040283, step time: 27.170658111572266ms\r\n",,terminal_output +14056,13181887,"TERMINAL",0,0,"Step 1805, loss: 1.9919564723968506, step time: 24.88112449645996ms\r\n",,terminal_output +14057,13181949,"TERMINAL",0,0,"Step 1806, loss: 1.888636827468872, step time: 21.347522735595703ms\r\n",,terminal_output +14058,13182069,"TERMINAL",0,0,"Step 1807, loss: 1.961155891418457, step time: 18.872976303100586ms\r\nStep 1808, loss: 1.8359400033950806, step time: 18.0511474609375ms\r\n",,terminal_output +14059,13182136,"TERMINAL",0,0,"Step 1809, loss: 2.003767490386963, step time: 17.864227294921875ms\r\n",,terminal_output +14060,13182198,"TERMINAL",0,0,"Step 1810, loss: 1.9150347709655762, step time: 17.650842666625977ms\r\n",,terminal_output +14061,13182264,"TERMINAL",0,0,"Step 1811, loss: 1.7607791423797607, step time: 17.495393753051758ms\r\n",,terminal_output +14062,13182327,"TERMINAL",0,0,"Step 1812, loss: 1.9445079565048218, step time: 17.812490463256836ms\r\n",,terminal_output +14063,13182391,"TERMINAL",0,0,"Step 1813, loss: 1.6909823417663574, step time: 17.434358596801758ms\r\n",,terminal_output +14064,13182456,"TERMINAL",0,0,"Step 1814, loss: 1.943591833114624, step time: 17.510175704956055ms\r\n",,terminal_output +14065,13182518,"TERMINAL",0,0,"Step 1815, loss: 1.8668981790542603, step time: 17.655134201049805ms\r\n",,terminal_output +14066,13182580,"TERMINAL",0,0,"Step 1816, loss: 2.0969552993774414, step time: 17.444133758544922ms\r\n",,terminal_output +14067,13182641,"TERMINAL",0,0,"Step 1817, loss: 2.1071999073028564, step time: 17.669200897216797ms\r\n",,terminal_output +14068,13182703,"TERMINAL",0,0,"Step 1818, loss: 2.0321831703186035, step time: 17.760515213012695ms\r\n",,terminal_output +14069,13182797,"TERMINAL",0,0,"Step 1819, loss: 1.9231590032577515, step time: 17.65894889831543ms\r\n",,terminal_output +14070,13182851,"TERMINAL",0,0,"Step 1820, loss: 1.8810827732086182, step time: 17.9443359375ms\r\n",,terminal_output +14071,13182963,"TERMINAL",0,0,"Step 1821, loss: 1.9789892435073853, step time: 17.97318458557129ms\r\nStep 1822, loss: 1.9292024374008179, step time: 17.571449279785156ms\r\n",,terminal_output +14072,13183012,"TERMINAL",0,0,"Step 1823, loss: 2.0720715522766113, step time: 17.577409744262695ms\r\n",,terminal_output +14073,13183407,"TERMINAL",0,0,"Step 1824, loss: 2.1325550079345703, step time: 360.52870750427246ms\r\n",,terminal_output +14074,13183460,"TERMINAL",0,0,"Step 1825, loss: 2.1831471920013428, step time: 25.00128746032715ms\r\n",,terminal_output +14075,13183634,"TERMINAL",0,0,"Step 1826, loss: 2.0404930114746094, step time: 19.61207389831543ms\r\nStep 1827, loss: 1.8876953125, step time: 19.087791442871094ms\r\nStep 1828, loss: 1.7596333026885986, step time: 17.99321174621582ms\r\n",,terminal_output +14076,13183734,"TERMINAL",0,0,"Step 1829, loss: 1.9771783351898193, step time: 17.670392990112305ms\r\n",,terminal_output +14077,13183787,"TERMINAL",0,0,"Step 1830, loss: 2.120837450027466, step time: 17.92454719543457ms\r\n",,terminal_output +14078,13183894,"TERMINAL",0,0,"Step 1831, loss: 1.8920260667800903, step time: 17.53830909729004ms\r\nStep 1832, loss: 1.8480746746063232, step time: 17.557382583618164ms\r\n",,terminal_output +14079,13183968,"TERMINAL",0,0,"Step 1833, loss: 1.8467711210250854, step time: 17.821550369262695ms\r\n",,terminal_output +14080,13184021,"TERMINAL",0,0,"Step 1834, loss: 2.1686923503875732, step time: 17.458677291870117ms\r\n",,terminal_output +14081,13184086,"TERMINAL",0,0,"Step 1835, loss: 1.992264747619629, step time: 17.203569412231445ms\r\n",,terminal_output +14082,13184149,"TERMINAL",0,0,"Step 1836, loss: 2.0005042552948, step time: 17.7457332611084ms\r\n",,terminal_output +14083,13184213,"TERMINAL",0,0,"Step 1837, loss: 2.252121686935425, step time: 17.36283302307129ms\r\n",,terminal_output +14084,13184308,"TERMINAL",0,0,"Step 1838, loss: 2.013275146484375, step time: 17.54283905029297ms\r\n",,terminal_output +14085,13184389,"TERMINAL",0,0,"Step 1839, loss: 2.0103347301483154, step time: 17.79937744140625ms\r\nStep 1840, loss: 1.8479233980178833, step time: 17.380475997924805ms\r\n",,terminal_output +14086,13184516,"TERMINAL",0,0,"Step 1841, loss: 2.5433919429779053, step time: 17.29297637939453ms\r\nStep 1842, loss: 1.8862920999526978, step time: 17.811059951782227ms\r\n",,terminal_output +14087,13184581,"TERMINAL",0,0,"Step 1843, loss: 1.8342230319976807, step time: 17.22574234008789ms\r\n",,terminal_output +14088,13184643,"TERMINAL",0,0,"Step 1844, loss: 2.127354145050049, step time: 17.39811897277832ms\r\n",,terminal_output +14089,13184706,"TERMINAL",0,0,"Step 1845, loss: 1.8527787923812866, step time: 17.57049560546875ms\r\n",,terminal_output +14090,13184770,"TERMINAL",0,0,"Step 1846, loss: 1.8197897672653198, step time: 17.4405574798584ms\r\n",,terminal_output +14091,13184839,"TERMINAL",0,0,"Step 1847, loss: 1.8008157014846802, step time: 17.122983932495117ms\r\n",,terminal_output +14092,13184902,"TERMINAL",0,0,"Step 1848, loss: 1.909934163093567, step time: 17.61150360107422ms\r\n",,terminal_output +14093,13184966,"TERMINAL",0,0,"Step 1849, loss: 2.096442461013794, step time: 17.276525497436523ms\r\n",,terminal_output +14094,13185019,"TERMINAL",0,0,"Step 1850, loss: 2.0174007415771484, step time: 17.3187255859375ms\r\n",,terminal_output +14095,13185114,"TERMINAL",0,0,"Step 1851, loss: 1.855780839920044, step time: 19.817113876342773ms\r\n",,terminal_output +14096,13185222,"TERMINAL",0,0,"Step 1852, loss: 1.8498998880386353, step time: 17.54593849182129ms\r\nStep 1853, loss: 1.9321925640106201, step time: 17.26222038269043ms\r\n",,terminal_output +14097,13185287,"TERMINAL",0,0,"Step 1854, loss: 1.8669281005859375, step time: 17.751216888427734ms\r\n",,terminal_output +14098,13185349,"TERMINAL",0,0,"Step 1855, loss: 2.038703441619873, step time: 17.267942428588867ms\r\n",,terminal_output +14099,13185413,"TERMINAL",0,0,"Step 1856, loss: 1.879077434539795, step time: 17.322301864624023ms\r\n",,terminal_output +14100,13185477,"TERMINAL",0,0,"Step 1857, loss: 1.9310414791107178, step time: 17.543315887451172ms\r\n",,terminal_output +14101,13185540,"TERMINAL",0,0,"Step 1858, loss: 1.9304535388946533, step time: 17.380237579345703ms\r\n",,terminal_output +14102,13185620,"TERMINAL",0,0,"Step 1859, loss: 2.1940762996673584, step time: 17.145395278930664ms\r\n",,terminal_output +14103,13185685,"TERMINAL",0,0,"Step 1860, loss: 2.0657200813293457, step time: 17.752408981323242ms\r\n",,terminal_output +14104,13185749,"TERMINAL",0,0,"Step 1861, loss: 2.096299409866333, step time: 17.200231552124023ms\r\n",,terminal_output +14105,13185856,"TERMINAL",0,0,"Step 1862, loss: 1.8482486009597778, step time: 17.30656623840332ms\r\nStep 1863, loss: 1.8940608501434326, step time: 17.673969268798828ms\r\n",,terminal_output +14106,13185920,"TERMINAL",0,0,"Step 1864, loss: 2.3140668869018555, step time: 17.37833023071289ms\r\n",,terminal_output +14107,13186025,"TERMINAL",0,0,"Step 1865, loss: 1.9437766075134277, step time: 19.854307174682617ms\r\nStep 1866, loss: 1.8892996311187744, step time: 17.736196517944336ms\r\n",,terminal_output +14108,13186120,"TERMINAL",0,0,"Step 1867, loss: 1.8297659158706665, step time: 17.179012298583984ms\r\n",,terminal_output +14109,13186227,"TERMINAL",0,0,"Step 1868, loss: 1.841306447982788, step time: 17.389535903930664ms\r\nStep 1869, loss: 2.4379525184631348, step time: 17.46392250061035ms\r\n",,terminal_output +14110,13186290,"TERMINAL",0,0,"Step 1870, loss: 1.903056263923645, step time: 17.40407943725586ms\r\n",,terminal_output +14111,13186353,"TERMINAL",0,0,"Step 1871, loss: 1.8440852165222168, step time: 17.131328582763672ms\r\n",,terminal_output +14112,13186422,"TERMINAL",0,0,"Step 1872, loss: 1.8449928760528564, step time: 17.72904396057129ms\r\n",,terminal_output +14113,13186480,"TERMINAL",0,0,"Step 1873, loss: 1.8679598569869995, step time: 17.122507095336914ms\r\n",,terminal_output +14114,13186551,"TERMINAL",0,0,"Step 1874, loss: 1.9072558879852295, step time: 17.28987693786621ms\r\n",,terminal_output +14115,13186615,"TERMINAL",0,0,"Step 1875, loss: 1.881138801574707, step time: 17.518281936645508ms\r\n",,terminal_output +14116,13186677,"TERMINAL",0,0,"Step 1876, loss: 1.836431860923767, step time: 17.424345016479492ms\r\n",,terminal_output +14117,13186743,"TERMINAL",0,0,"Step 1877, loss: 2.1703970432281494, step time: 17.35830307006836ms\r\n",,terminal_output +14118,13186806,"TERMINAL",0,0,"Step 1878, loss: 2.079402446746826, step time: 17.72284507751465ms\r\n",,terminal_output +14119,13186869,"TERMINAL",0,0,"Step 1879, loss: 1.9219008684158325, step time: 17.394542694091797ms\r\n",,terminal_output +14120,13186930,"TERMINAL",0,0,"Step 1880, loss: 1.824465274810791, step time: 17.330408096313477ms\r\n",,terminal_output +14121,13187036,"TERMINAL",0,0,"Step 1881, loss: 1.8307827711105347, step time: 17.51875877380371ms\r\nStep 1882, loss: 1.9073759317398071, step time: 17.316102981567383ms\r\n",,terminal_output +14122,13187099,"TERMINAL",0,0,"Step 1883, loss: 1.8466699123382568, step time: 17.300844192504883ms\r\n",,terminal_output +14123,13187159,"TERMINAL",0,0,"Step 1884, loss: 1.807223916053772, step time: 17.619609832763672ms\r\n",,terminal_output +14124,13187219,"TERMINAL",0,0,"Step 1885, loss: 1.7836705446243286, step time: 17.35973358154297ms\r\n",,terminal_output +14125,13187286,"TERMINAL",0,0,"Step 1886, loss: 2.028444766998291, step time: 17.333984375ms\r\n",,terminal_output +14126,13187375,"TERMINAL",0,0,"Step 1887, loss: 1.8556382656097412, step time: 17.613649368286133ms\r\n",,terminal_output +14127,13187430,"TERMINAL",0,0,"Step 1888, loss: 1.933770775794983, step time: 17.36593246459961ms\r\n",,terminal_output +14128,13187537,"TERMINAL",0,0,"Step 1889, loss: 2.0121312141418457, step time: 17.27151870727539ms\r\nStep 1890, loss: 1.9125245809555054, step time: 17.838478088378906ms\r\n",,terminal_output +14129,13187601,"TERMINAL",0,0,"Step 1891, loss: 2.0028865337371826, step time: 17.358064651489258ms\r\n",,terminal_output +14130,13187669,"TERMINAL",0,0,"Step 1892, loss: 1.8545584678649902, step time: 17.35401153564453ms\r\n",,terminal_output +14131,13187736,"TERMINAL",0,0,"Step 1893, loss: 1.944990634918213, step time: 18.438100814819336ms\r\n",,terminal_output +14132,13187804,"TERMINAL",0,0,"Step 1894, loss: 1.8263036012649536, step time: 17.270326614379883ms\r\n",,terminal_output +14133,13187866,"TERMINAL",0,0,"Step 1895, loss: 2.0861241817474365, step time: 17.230510711669922ms\r\n",,terminal_output +14134,13187934,"TERMINAL",0,0,"Step 1896, loss: 1.7972532510757446, step time: 17.633438110351562ms\r\n",,terminal_output +14135,13187992,"TERMINAL",0,0,"Step 1897, loss: 1.781683087348938, step time: 17.295122146606445ms\r\n",,terminal_output +14136,13188063,"TERMINAL",0,0,"Step 1898, loss: 1.9417473077774048, step time: 17.303943634033203ms\r\n",,terminal_output +14137,13188127,"TERMINAL",0,0,"Step 1899, loss: 1.8355618715286255, step time: 17.67444610595703ms\r\n",,terminal_output +14138,13188247,"TERMINAL",0,0,"Step 1900, loss: 1.7502882480621338, step time: 17.200469970703125ms\r\nStep 1901, loss: 1.8995012044906616, step time: 17.25006103515625ms\r\n",,terminal_output +14139,13188330,"TERMINAL",0,0,"Step 1902, loss: 1.7589118480682373, step time: 17.667770385742188ms\r\n",,terminal_output +14140,13188438,"TERMINAL",0,0,"Step 1903, loss: 1.9820927381515503, step time: 17.128705978393555ms\r\nStep 1904, loss: 1.9468554258346558, step time: 17.24410057067871ms\r\n",,terminal_output +14141,13188501,"TERMINAL",0,0,"Step 1905, loss: 1.8224194049835205, step time: 19.61040496826172ms\r\n",,terminal_output +14142,13188566,"TERMINAL",0,0,"Step 1906, loss: 1.980417013168335, step time: 17.905473709106445ms\r\n",,terminal_output +14143,13188671,"TERMINAL",0,0,"Step 1907, loss: 1.8680129051208496, step time: 17.122268676757812ms\r\nStep 1908, loss: 1.8648371696472168, step time: 17.649173736572266ms\r\n",,terminal_output +14144,13188733,"TERMINAL",0,0,"Step 1909, loss: 1.8040512800216675, step time: 17.2579288482666ms\r\n",,terminal_output +14145,13188831,"TERMINAL",0,0,"Step 1910, loss: 1.8854742050170898, step time: 17.83156394958496ms\r\n",,terminal_output +14146,13188884,"TERMINAL",0,0,"Step 1911, loss: 2.3016645908355713, step time: 17.557382583618164ms\r\n",,terminal_output +14147,13188990,"TERMINAL",0,0,"Step 1912, loss: 1.952483892440796, step time: 17.35520362854004ms\r\nStep 1913, loss: 1.8258914947509766, step time: 17.24076271057129ms\r\n",,terminal_output +14148,13189055,"TERMINAL",0,0,"Step 1914, loss: 2.1092662811279297, step time: 17.6849365234375ms\r\n",,terminal_output +14149,13189120,"TERMINAL",0,0,"Step 1915, loss: 2.5702059268951416, step time: 17.240524291992188ms\r\n",,terminal_output +14150,13189183,"TERMINAL",0,0,"Step 1916, loss: 1.936400294303894, step time: 17.35711097717285ms\r\n",,terminal_output +14151,13189247,"TERMINAL",0,0,"Step 1917, loss: 1.756020426750183, step time: 17.574310302734375ms\r\n",,terminal_output +14152,13189309,"TERMINAL",0,0,"Step 1918, loss: 1.686174750328064, step time: 17.279386520385742ms\r\n",,terminal_output +14153,13189374,"TERMINAL",0,0,"Step 1919, loss: 1.799153208732605, step time: 26.529550552368164ms\r\n",,terminal_output +14154,13189434,"TERMINAL",0,0,"Step 1920, loss: 1.7673944234848022, step time: 23.226261138916016ms\r\n",,terminal_output +14155,13189501,"TERMINAL",0,0,"Step 1921, loss: 1.798887848854065, step time: 17.399072647094727ms\r\n",,terminal_output +14156,13189567,"TERMINAL",0,0,"Step 1922, loss: 2.0778744220733643, step time: 17.3337459564209ms\r\n",,terminal_output +14157,13189628,"TERMINAL",0,0,"Step 1923, loss: 1.9854248762130737, step time: 17.734527587890625ms\r\n",,terminal_output +14158,13189723,"TERMINAL",0,0,"Step 1924, loss: 2.003272533416748, step time: 17.357349395751953ms\r\n",,terminal_output +14159,13189776,"TERMINAL",0,0,"Step 1925, loss: 1.8939032554626465, step time: 17.312049865722656ms\r\n",,terminal_output +14160,13189880,"TERMINAL",0,0,"Step 1926, loss: 1.8512989282608032, step time: 17.67134666442871ms\r\nStep 1927, loss: 1.9236714839935303, step time: 17.29726791381836ms\r\n",,terminal_output +14161,13189935,"TERMINAL",0,0,"Step 1928, loss: 2.0520830154418945, step time: 17.512083053588867ms\r\n",,terminal_output +14162,13190060,"TERMINAL",0,0,"Step 1929, loss: 1.8600459098815918, step time: 17.724990844726562ms\r\nStep 1930, loss: 1.790048599243164, step time: 17.367124557495117ms\r\n",,terminal_output +14163,13190125,"TERMINAL",0,0,"Step 1931, loss: 2.0519208908081055, step time: 17.352581024169922ms\r\n",,terminal_output +14164,13190225,"TERMINAL",0,0,"Step 1932, loss: 2.0995073318481445, step time: 17.85731315612793ms\r\n",,terminal_output +14165,13190277,"TERMINAL",0,0,"Step 1933, loss: 2.160626173019409, step time: 17.301082611083984ms\r\n",,terminal_output +14166,13190384,"TERMINAL",0,0,"Step 1934, loss: 1.8015060424804688, step time: 18.926143646240234ms\r\nStep 1935, loss: 1.8545726537704468, step time: 18.28312873840332ms\r\n",,terminal_output +14167,13190478,"TERMINAL",0,0,"Step 1936, loss: 1.8191906213760376, step time: 17.395734786987305ms\r\n",,terminal_output +14168,13190539,"TERMINAL",0,0,"Step 1937, loss: 1.7568676471710205, step time: 17.247915267944336ms\r\n",,terminal_output +14169,13190601,"TERMINAL",0,0,"Step 1938, loss: 1.7512682676315308, step time: 17.656564712524414ms\r\n",,terminal_output +14170,13190662,"TERMINAL",0,0,"Step 1939, loss: 1.7555605173110962, step time: 17.22860336303711ms\r\n",,terminal_output +14171,13190767,"TERMINAL",0,0,"Step 1940, loss: 1.9502220153808594, step time: 17.52448081970215ms\r\nStep 1941, loss: 1.8398895263671875, step time: 17.7152156829834ms\r\n",,terminal_output +14172,13190886,"TERMINAL",0,0,"Step 1942, loss: 1.8025630712509155, step time: 17.490863800048828ms\r\nStep 1943, loss: 1.9135948419570923, step time: 18.07236671447754ms\r\n",,terminal_output +14173,13190945,"TERMINAL",0,0,"Step 1944, loss: 1.779089331626892, step time: 18.03445816040039ms\r\n",,terminal_output +14174,13191069,"TERMINAL",0,0,"Step 1945, loss: 1.7792352437973022, step time: 17.18425750732422ms\r\nStep 1946, loss: 1.863294005393982, step time: 17.58718490600586ms\r\n",,terminal_output +14175,13191133,"TERMINAL",0,0,"Step 1947, loss: 1.8371117115020752, step time: 17.63463020324707ms\r\n",,terminal_output +14176,13191262,"TERMINAL",0,0,"Step 1948, loss: 1.8581421375274658, step time: 17.3647403717041ms\r\nStep 1949, loss: 2.0575411319732666, step time: 17.267704010009766ms\r\n",,terminal_output +14177,13191355,"TERMINAL",0,0,"Step 1950, loss: 1.942523717880249, step time: 25.16031265258789ms\r\n",,terminal_output +14178,13191407,"TERMINAL",0,0,"Step 1951, loss: 1.7799426317214966, step time: 20.148754119873047ms\r\n",,terminal_output +14179,13191503,"TERMINAL",0,0,"Step 1952, loss: 1.776184320449829, step time: 18.39447021484375ms\r\n",,terminal_output +14180,13191556,"TERMINAL",0,0,"Step 1953, loss: 1.783806324005127, step time: 18.294811248779297ms\r\n",,terminal_output +14181,13191609,"TERMINAL",0,0,"Step 1954, loss: 1.8795384168624878, step time: 17.774581909179688ms\r\n",,terminal_output +14182,13191713,"TERMINAL",0,0,"Step 1955, loss: 1.7690702676773071, step time: 17.606496810913086ms\r\nStep 1956, loss: 2.2256674766540527, step time: 18.144845962524414ms\r\n",,terminal_output +14183,13191773,"TERMINAL",0,0,"Step 1957, loss: 1.975986361503601, step time: 17.512798309326172ms\r\n",,terminal_output +14184,13191833,"TERMINAL",0,0,"Step 1958, loss: 1.8610236644744873, step time: 17.769813537597656ms\r\n",,terminal_output +14185,13191896,"TERMINAL",0,0,"Step 1959, loss: 1.7801192998886108, step time: 19.232749938964844ms\r\n",,terminal_output +14186,13192025,"TERMINAL",0,0,"Step 1960, loss: 1.7958616018295288, step time: 17.53401756286621ms\r\nStep 1961, loss: 2.587003469467163, step time: 17.25602149963379ms\r\n",,terminal_output +14187,13192091,"TERMINAL",0,0,"Step 1962, loss: 2.021651029586792, step time: 17.765045166015625ms\r\n",,terminal_output +14188,13192192,"TERMINAL",0,0,"Step 1963, loss: 2.119065761566162, step time: 17.279863357543945ms\r\n",,terminal_output +14189,13192243,"TERMINAL",0,0,"Step 1964, loss: 1.8676902055740356, step time: 18.415451049804688ms\r\n",,terminal_output +14190,13192346,"TERMINAL",0,0,"Step 1965, loss: 1.7630188465118408, step time: 23.58222007751465ms\r\nStep 1966, loss: 1.7541999816894531, step time: 19.354581832885742ms\r\n",,terminal_output +14191,13192438,"TERMINAL",0,0,"Step 1967, loss: 1.7057757377624512, step time: 18.07713508605957ms\r\n",,terminal_output +14192,13192489,"TERMINAL",0,0,"Step 1968, loss: 1.8527898788452148, step time: 18.11814308166504ms\r\n",,terminal_output +14193,13192582,"TERMINAL",0,0,"Step 1969, loss: 1.8682533502578735, step time: 17.43769645690918ms\r\n",,terminal_output +14194,13192632,"TERMINAL",0,0,"Step 1970, loss: 1.7171409130096436, step time: 17.513275146484375ms\r\n",,terminal_output +14195,13192687,"TERMINAL",0,0,"Step 1971, loss: 1.748583197593689, step time: 17.650604248046875ms\r\n",,terminal_output +14196,13192792,"TERMINAL",0,0,"Step 1972, loss: 1.7560467720031738, step time: 17.5631046295166ms\r\nStep 1973, loss: 1.7571258544921875, step time: 17.41623878479004ms\r\n",,terminal_output +14197,13192852,"TERMINAL",0,0,"Step 1974, loss: 1.7535001039505005, step time: 17.888307571411133ms\r\n",,terminal_output +14198,13192914,"TERMINAL",0,0,"Step 1975, loss: 2.6014745235443115, step time: 17.57192611694336ms\r\n",,terminal_output +14199,13192974,"TERMINAL",0,0,"Step 1976, loss: 1.7697464227676392, step time: 17.70949363708496ms\r\n",,terminal_output +14200,13193035,"TERMINAL",0,0,"Step 1977, loss: 2.7943315505981445, step time: 20.071029663085938ms\r\n",,terminal_output +14201,13193144,"TERMINAL",0,0,"Step 1978, loss: 2.0126752853393555, step time: 24.231672286987305ms\r\n",,terminal_output +14202,13193195,"TERMINAL",0,0,"Step 1979, loss: 1.7310289144515991, step time: 22.815227508544922ms\r\n",,terminal_output +14203,13193301,"TERMINAL",0,0,"Step 1980, loss: 2.0137240886688232, step time: 19.83046531677246ms\r\nStep 1981, loss: 2.093531370162964, step time: 18.18561553955078ms\r\n",,terminal_output +14204,13193394,"TERMINAL",0,0,"Step 1982, loss: 1.9736723899841309, step time: 17.731428146362305ms\r\n",,terminal_output +14205,13193714,"TERMINAL",0,0,"Step 1983, loss: 1.8222808837890625, step time: 352.02884674072266ms\r\n",,terminal_output +14206,13193823,"TERMINAL",0,0,"Step 1984, loss: 1.7828586101531982, step time: 24.7342586517334ms\r\n",,terminal_output +14207,13193873,"TERMINAL",0,0,"Step 1985, loss: 1.9643371105194092, step time: 19.752025604248047ms\r\n",,terminal_output +14208,13193982,"TERMINAL",0,0,"Step 1986, loss: 1.8448066711425781, step time: 18.588542938232422ms\r\nStep 1987, loss: 1.932600736618042, step time: 17.769813537597656ms\r\n",,terminal_output +14209,13194042,"TERMINAL",0,0,"Step 1988, loss: 2.028438091278076, step time: 17.716169357299805ms\r\n",,terminal_output +14210,13194109,"TERMINAL",0,0,"Step 1989, loss: 1.724957823753357, step time: 17.981767654418945ms\r\n",,terminal_output +14211,13194170,"TERMINAL",0,0,"Step 1990, loss: 1.7283198833465576, step time: 17.549991607666016ms\r\n",,terminal_output +14212,13194233,"TERMINAL",0,0,"Step 1991, loss: 1.7371141910552979, step time: 17.27747917175293ms\r\n",,terminal_output +14213,13194295,"TERMINAL",0,0,"Step 1992, loss: 1.814325213432312, step time: 17.879009246826172ms\r\n",,terminal_output +14214,13194357,"TERMINAL",0,0,"Step 1993, loss: 1.7121063470840454, step time: 17.473459243774414ms\r\n",,terminal_output +14215,13194418,"TERMINAL",0,0,"Step 1994, loss: 1.9324253797531128, step time: 18.704891204833984ms\r\n",,terminal_output +14216,13194488,"TERMINAL",0,0,"Step 1995, loss: 1.7994730472564697, step time: 18.254995346069336ms\r\n",,terminal_output +14217,13194544,"TERMINAL",0,0,"Step 1996, loss: 2.2549421787261963, step time: 17.580032348632812ms\r\n",,terminal_output +14218,13194607,"TERMINAL",0,0,"Step 1997, loss: 1.6939244270324707, step time: 17.470836639404297ms\r\n",,terminal_output +14219,13194668,"TERMINAL",0,0,"Step 1998, loss: 1.7100318670272827, step time: 19.073963165283203ms\r\n",,terminal_output +14220,13194735,"TERMINAL",0,0,"Step 1999, loss: 1.7073532342910767, step time: 17.66514778137207ms\r\n",,terminal_output +14221,13197432,"TERMINAL",0,0,"Step 2000, loss: 2.4522624015808105, step time: 41.63336753845215ms\r\nStep 2001, loss: 2.0318262577056885, step time: 27.16517448425293ms\r\n",,terminal_output +14222,13197483,"TERMINAL",0,0,"Step 2002, loss: 1.870410680770874, step time: 20.36738395690918ms\r\n",,terminal_output +14223,13197597,"TERMINAL",0,0,"Step 2003, loss: 2.1666760444641113, step time: 19.72341537475586ms\r\nStep 2004, loss: 1.852097511291504, step time: 18.96500587463379ms\r\n",,terminal_output +14224,13197662,"TERMINAL",0,0,"Step 2005, loss: 1.7930094003677368, step time: 18.934965133666992ms\r\n",,terminal_output +14225,13197728,"TERMINAL",0,0,"Step 2006, loss: 1.7457166910171509, step time: 18.425703048706055ms\r\n",,terminal_output +14226,13197819,"TERMINAL",0,0,"Step 2007, loss: 1.826462745666504, step time: 19.284963607788086ms\r\n",,terminal_output +14227,13197872,"TERMINAL",0,0,"Step 2008, loss: 2.0035057067871094, step time: 18.407106399536133ms\r\n",,terminal_output +14228,13197982,"TERMINAL",0,0,"Step 2009, loss: 1.750398874282837, step time: 18.193721771240234ms\r\nStep 2010, loss: 1.7163859605789185, step time: 20.439863204956055ms\r\n",,terminal_output +14229,13198049,"TERMINAL",0,0,"Step 2011, loss: 1.7430806159973145, step time: 18.616676330566406ms\r\n",,terminal_output +14230,13198109,"TERMINAL",0,0,"Step 2012, loss: 1.646648645401001, step time: 18.254518508911133ms\r\n",,terminal_output +14231,13198173,"TERMINAL",0,0,"Step 2013, loss: 1.9233403205871582, step time: 18.5697078704834ms\r\n",,terminal_output +14232,13198247,"TERMINAL",0,0,"Step 2014, loss: 1.935562252998352, step time: 18.200397491455078ms\r\n",,terminal_output +14233,13198304,"TERMINAL",0,0,"Step 2015, loss: 1.8405085802078247, step time: 18.250465393066406ms\r\n",,terminal_output +14234,13198394,"TERMINAL",0,0,"Step 2016, loss: 1.7533308267593384, step time: 18.382549285888672ms\r\n",,terminal_output +14235,13198448,"TERMINAL",0,0,"Step 2017, loss: 2.073935031890869, step time: 18.2187557220459ms\r\n",,terminal_output +14236,13198539,"TERMINAL",0,0,"Step 2018, loss: 1.9555479288101196, step time: 18.5396671295166ms\r\n",,terminal_output +14237,13198594,"TERMINAL",0,0,"Step 2019, loss: 1.7231409549713135, step time: 18.570899963378906ms\r\n",,terminal_output +14238,13198682,"TERMINAL",0,0,"Step 2020, loss: 1.8274612426757812, step time: 18.238306045532227ms\r\nStep 2021, loss: 1.8295748233795166, step time: 18.171072006225586ms\r\n",,terminal_output +14239,13198776,"TERMINAL",0,0,"Step 2022, loss: 1.9623048305511475, step time: 18.575668334960938ms\r\n",,terminal_output +14240,13198880,"TERMINAL",0,0,"Step 2023, loss: 1.9125702381134033, step time: 18.262624740600586ms\r\nStep 2024, loss: 2.080944299697876, step time: 18.29981803894043ms\r\n",,terminal_output +14241,13198940,"TERMINAL",0,0,"Step 2025, loss: 2.370969295501709, step time: 18.326997756958008ms\r\n",,terminal_output +14242,13199007,"TERMINAL",0,0,"Step 2026, loss: 1.7889281511306763, step time: 20.28036117553711ms\r\n",,terminal_output +14243,13199064,"TERMINAL",0,0,"Step 2027, loss: 2.196521759033203, step time: 18.096446990966797ms\r\n",,terminal_output +14244,13199170,"TERMINAL",0,0,"Step 2028, loss: 2.1160268783569336, step time: 18.46146583557129ms\r\n",,terminal_output +14245,13199231,"TERMINAL",0,0,"Step 2029, loss: 1.730539083480835, step time: 18.189430236816406ms\r\n",,terminal_output +14246,13199297,"TERMINAL",0,0,"Step 2030, loss: 1.7642468214035034, step time: 18.220186233520508ms\r\n",,terminal_output +14247,13199357,"TERMINAL",0,0,"Step 2031, loss: 2.0598037242889404, step time: 18.51058006286621ms\r\n",,terminal_output +14248,13199417,"TERMINAL",0,0,"Step 2032, loss: 1.8697396516799927, step time: 18.106460571289062ms\r\n",,terminal_output +14249,13199480,"TERMINAL",0,0,"Step 2033, loss: 1.687483549118042, step time: 18.00227165222168ms\r\n",,terminal_output +14250,13199544,"TERMINAL",0,0,"Step 2034, loss: 1.9582427740097046, step time: 18.33033561706543ms\r\n",,terminal_output +14251,13199608,"TERMINAL",0,0,"Step 2035, loss: 1.6253230571746826, step time: 17.977237701416016ms\r\n",,terminal_output +14252,13199672,"TERMINAL",0,0,"Step 2036, loss: 1.5767909288406372, step time: 18.123149871826172ms\r\n",,terminal_output +14253,13199733,"TERMINAL",0,0,"Step 2037, loss: 2.180745840072632, step time: 18.27096939086914ms\r\n",,terminal_output +14254,13199797,"TERMINAL",0,0,"Step 2038, loss: 1.8223146200180054, step time: 18.033742904663086ms\r\n",,terminal_output +14255,13199860,"TERMINAL",0,0,"Step 2039, loss: 2.2130210399627686, step time: 17.95220375061035ms\r\n",,terminal_output +14256,13199924,"TERMINAL",0,0,"Step 2040, loss: 1.7266196012496948, step time: 18.5396671295166ms\r\n",,terminal_output +14257,13199987,"TERMINAL",0,0,"Step 2041, loss: 1.6949766874313354, step time: 18.005847930908203ms\r\n",,terminal_output +14258,13200048,"TERMINAL",0,0,"Step 2042, loss: 1.9378670454025269, step time: 19.50526237487793ms\r\n",,terminal_output +14259,13200148,"TERMINAL",0,0,"Step 2043, loss: 1.6472516059875488, step time: 18.120527267456055ms\r\nStep 2044, loss: 1.6522045135498047, step time: 18.12577247619629ms\r\n",,terminal_output +14260,13200211,"TERMINAL",0,0,"Step 2045, loss: 1.8576171398162842, step time: 17.757654190063477ms\r\n",,terminal_output +14261,13200312,"TERMINAL",0,0,"Step 2046, loss: 1.6744741201400757, step time: 18.370866775512695ms\r\n",,terminal_output +14262,13200375,"TERMINAL",0,0,"Step 2047, loss: 1.7727948427200317, step time: 17.725467681884766ms\r\n",,terminal_output +14263,13200435,"TERMINAL",0,0,"Step 2048, loss: 1.827378511428833, step time: 18.13054084777832ms\r\n",,terminal_output +14264,13200543,"TERMINAL",0,0,"Step 2049, loss: 1.7479678392410278, step time: 18.14413070678711ms\r\nStep 2050, loss: 1.7342828512191772, step time: 18.111228942871094ms\r\n",,terminal_output +14265,13200636,"TERMINAL",0,0,"Step 2051, loss: 1.6273889541625977, step time: 17.870426177978516ms\r\n",,terminal_output +14266,13200688,"TERMINAL",0,0,"Step 2052, loss: 1.7748523950576782, step time: 18.31841468811035ms\r\n",,terminal_output +14267,13200794,"TERMINAL",0,0,"Step 2053, loss: 2.106851100921631, step time: 17.7764892578125ms\r\nStep 2054, loss: 1.9473931789398193, step time: 18.053531646728516ms\r\n",,terminal_output +14268,13201103,"TERMINAL",0,0,"Step 2055, loss: 1.7006186246871948, step time: 314.24760818481445ms\r\n",,terminal_output +14269,13201240,"TERMINAL",0,0,"Step 2056, loss: 1.6693766117095947, step time: 25.786399841308594ms\r\nStep 2057, loss: 1.8488324880599976, step time: 20.521879196166992ms\r\n",,terminal_output +14270,13201304,"TERMINAL",0,0,"Step 2058, loss: 1.7577921152114868, step time: 19.38486099243164ms\r\n",,terminal_output +14271,13201388,"TERMINAL",0,0,"Step 2059, loss: 1.6823692321777344, step time: 18.671274185180664ms\r\n",,terminal_output +14272,13201439,"TERMINAL",0,0,"Step 2060, loss: 1.7008702754974365, step time: 18.550395965576172ms\r\n",,terminal_output +14273,13201552,"TERMINAL",0,0,"Step 2061, loss: 1.6329766511917114, step time: 18.266916275024414ms\r\nStep 2062, loss: 2.006359577178955, step time: 18.2802677154541ms\r\n",,terminal_output +14274,13201616,"TERMINAL",0,0,"Step 2063, loss: 1.6928064823150635, step time: 17.903804779052734ms\r\n",,terminal_output +14275,13201680,"TERMINAL",0,0,"Step 2064, loss: 1.7387446165084839, step time: 18.582582473754883ms\r\n",,terminal_output +14276,13201753,"TERMINAL",0,0,"Step 2065, loss: 1.991653323173523, step time: 19.25516128540039ms\r\n",,terminal_output +14277,13201806,"TERMINAL",0,0,"Step 2066, loss: 1.9684367179870605, step time: 18.50104331970215ms\r\n",,terminal_output +14278,13201871,"TERMINAL",0,0,"Step 2067, loss: 2.4129269123077393, step time: 18.3560848236084ms\r\n",,terminal_output +14279,13201939,"TERMINAL",0,0,"Step 2068, loss: 1.6866706609725952, step time: 18.436193466186523ms\r\n",,terminal_output +14280,13202000,"TERMINAL",0,0,"Step 2069, loss: 1.8029237985610962, step time: 17.986536026000977ms\r\n",,terminal_output +14281,13202066,"TERMINAL",0,0,"Step 2070, loss: 1.736371636390686, step time: 18.546104431152344ms\r\n",,terminal_output +14282,13202132,"TERMINAL",0,0,"Step 2071, loss: 2.0115888118743896, step time: 18.027305603027344ms\r\n",,terminal_output +14283,13202196,"TERMINAL",0,0,"Step 2072, loss: 2.6392126083374023, step time: 19.856691360473633ms\r\n",,terminal_output +14284,13202259,"TERMINAL",0,0,"Step 2073, loss: 1.656765341758728, step time: 18.28908920288086ms\r\n",,terminal_output +14285,13202323,"TERMINAL",0,0,"Step 2074, loss: 1.7438544034957886, step time: 18.178939819335938ms\r\n",,terminal_output +14286,13202388,"TERMINAL",0,0,"Step 2075, loss: 1.5966366529464722, step time: 17.959117889404297ms\r\n",,terminal_output +14287,13202455,"TERMINAL",0,0,"Step 2076, loss: 1.7070753574371338, step time: 18.560171127319336ms\r\n",,terminal_output +14288,13202519,"TERMINAL",0,0,"Step 2077, loss: 1.7129571437835693, step time: 17.82846450805664ms\r\n",,terminal_output +14289,13202613,"TERMINAL",0,0,"Step 2078, loss: 1.9477102756500244, step time: 18.134593963623047ms\r\n",,terminal_output +14290,13202722,"TERMINAL",0,0,"Step 2079, loss: 1.8148163557052612, step time: 18.01133155822754ms\r\nStep 2080, loss: 2.8232429027557373, step time: 18.1734561920166ms\r\n",,terminal_output +14291,13202784,"TERMINAL",0,0,"Step 2081, loss: 1.6511986255645752, step time: 17.78244972229004ms\r\n",,terminal_output +14292,13202848,"TERMINAL",0,0,"Step 2082, loss: 1.7709530591964722, step time: 18.45836639404297ms\r\n",,terminal_output +14293,13202912,"TERMINAL",0,0,"Step 2083, loss: 1.9858847856521606, step time: 17.813920974731445ms\r\n",,terminal_output +14294,13202974,"TERMINAL",0,0,"Step 2084, loss: 1.8451119661331177, step time: 18.33939552307129ms\r\n",,terminal_output +14295,13203033,"TERMINAL",0,0,"Step 2085, loss: 1.6749436855316162, step time: 18.3866024017334ms\r\n",,terminal_output +14296,13203095,"TERMINAL",0,0,"Step 2086, loss: 1.6189213991165161, step time: 27.308940887451172ms\r\n",,terminal_output +14297,13203158,"TERMINAL",0,0,"Step 2087, loss: 1.7017375230789185, step time: 28.42545509338379ms\r\n",,terminal_output +14298,13203222,"TERMINAL",0,0,"Step 2088, loss: 1.6921285390853882, step time: 21.596431732177734ms\r\n",,terminal_output +14299,13203284,"TERMINAL",0,0,"Step 2089, loss: 1.9250067472457886, step time: 19.283771514892578ms\r\n",,terminal_output +14300,13203349,"TERMINAL",0,0,"Step 2090, loss: 1.6751513481140137, step time: 18.95284652709961ms\r\n",,terminal_output +14301,13203413,"TERMINAL",0,0,"Step 2091, loss: 2.327409029006958, step time: 18.770456314086914ms\r\n",,terminal_output +14302,13203480,"TERMINAL",0,0,"Step 2092, loss: 1.89174485206604, step time: 18.505334854125977ms\r\n",,terminal_output +14303,13203546,"TERMINAL",0,0,"Step 2093, loss: 1.8641403913497925, step time: 18.05710792541504ms\r\n",,terminal_output +14304,13203653,"TERMINAL",0,0,"Step 2094, loss: 1.7168240547180176, step time: 18.739700317382812ms\r\n",,terminal_output +14305,13203667,"TERMINAL",0,0,"Step 2095, loss: 1.6824369430541992, step time: 17.932891845703125ms\r\n",,terminal_output +14306,13203767,"TERMINAL",0,0,"Step 2096, loss: 1.9073009490966797, step time: 18.465757369995117ms\r\n",,terminal_output +14307,13203830,"TERMINAL",0,0,"Step 2097, loss: 1.6358678340911865, step time: 18.346071243286133ms\r\n",,terminal_output +14308,13203894,"TERMINAL",0,0,"Step 2098, loss: 1.6058048009872437, step time: 18.30291748046875ms\r\n",,terminal_output +14309,13203966,"TERMINAL",0,0,"Step 2099, loss: 1.8498533964157104, step time: 17.99178123474121ms\r\n",,terminal_output +14310,13204047,"TERMINAL",0,0,"Step 2100, loss: 1.744857907295227, step time: 18.517494201660156ms\r\nStep 2101, loss: 1.7336779832839966, step time: 18.1887149810791ms\r\n",,terminal_output +14311,13204145,"TERMINAL",0,0,"Step 2102, loss: 1.8847914934158325, step time: 18.326759338378906ms\r\n",,terminal_output +14312,13204196,"TERMINAL",0,0,"Step 2103, loss: 1.7595056295394897, step time: 18.1884765625ms\r\n",,terminal_output +14313,13204302,"TERMINAL",0,0,"Step 2104, loss: 1.6754660606384277, step time: 19.118309020996094ms\r\nStep 2105, loss: 1.7213226556777954, step time: 17.9290771484375ms\r\n",,terminal_output +14314,13204366,"TERMINAL",0,0,"Step 2106, loss: 1.7023588418960571, step time: 18.67842674255371ms\r\n",,terminal_output +14315,13204429,"TERMINAL",0,0,"Step 2107, loss: 1.6784489154815674, step time: 18.37468147277832ms\r\n",,terminal_output +14316,13204492,"TERMINAL",0,0,"Step 2108, loss: 1.8495391607284546, step time: 18.038511276245117ms\r\n",,terminal_output +14317,13204585,"TERMINAL",0,0,"Step 2109, loss: 2.1783194541931152, step time: 18.573284149169922ms\r\n",,terminal_output +14318,13204690,"TERMINAL",0,0,"Step 2110, loss: 1.6882431507110596, step time: 18.23139190673828ms\r\nStep 2111, loss: 1.6439604759216309, step time: 18.353700637817383ms\r\n",,terminal_output +14319,13204754,"TERMINAL",0,0,"Step 2112, loss: 1.904364824295044, step time: 18.44310760498047ms\r\n",,terminal_output +14320,13204813,"TERMINAL",0,0,"Step 2113, loss: 1.8961185216903687, step time: 18.017292022705078ms\r\n",,terminal_output +14321,13204876,"TERMINAL",0,0,"Step 2114, loss: 1.7430232763290405, step time: 18.196821212768555ms\r\n",,terminal_output +14322,13204940,"TERMINAL",0,0,"Step 2115, loss: 1.6486785411834717, step time: 18.439531326293945ms\r\n",,terminal_output +14323,13205002,"TERMINAL",0,0,"Step 2116, loss: 1.5999916791915894, step time: 18.13340187072754ms\r\n",,terminal_output +14324,13205104,"TERMINAL",0,0,"Step 2117, loss: 1.7551411390304565, step time: 18.07117462158203ms\r\n",,terminal_output +14325,13205169,"TERMINAL",0,0,"Step 2118, loss: 2.325657844543457, step time: 20.20573616027832ms\r\n",,terminal_output +14326,13205231,"TERMINAL",0,0,"Step 2119, loss: 1.6465786695480347, step time: 18.764257431030273ms\r\n",,terminal_output +14327,13205293,"TERMINAL",0,0,"Step 2120, loss: 1.6981970071792603, step time: 18.195629119873047ms\r\n",,terminal_output +14328,13205358,"TERMINAL",0,0,"Step 2121, loss: 2.4304020404815674, step time: 18.445730209350586ms\r\n",,terminal_output +14329,13205419,"TERMINAL",0,0,"Step 2122, loss: 1.6290355920791626, step time: 18.275022506713867ms\r\n",,terminal_output +14330,13205481,"TERMINAL",0,0,"Step 2123, loss: 1.615816354751587, step time: 18.116474151611328ms\r\n",,terminal_output +14331,13205543,"TERMINAL",0,0,"Step 2124, loss: 2.217719078063965, step time: 18.320322036743164ms\r\n",,terminal_output +14332,13205606,"TERMINAL",0,0,"Step 2125, loss: 1.5867550373077393, step time: 18.185138702392578ms\r\n",,terminal_output +14333,13205670,"TERMINAL",0,0,"Step 2126, loss: 2.005845785140991, step time: 18.17631721496582ms\r\n",,terminal_output +14334,13205775,"TERMINAL",0,0,"Step 2127, loss: 1.8142213821411133, step time: 18.424510955810547ms\r\nStep 2128, loss: 1.6265299320220947, step time: 18.100738525390625ms\r\n",,terminal_output +14335,13205869,"TERMINAL",0,0,"Step 2129, loss: 1.583372950553894, step time: 18.059968948364258ms\r\n",,terminal_output +14336,13205922,"TERMINAL",0,0,"Step 2130, loss: 1.5875025987625122, step time: 18.520832061767578ms\r\n",,terminal_output +14337,13206029,"TERMINAL",0,0,"Step 2131, loss: 1.888100266456604, step time: 18.1884765625ms\r\nStep 2132, loss: 1.872552752494812, step time: 18.102169036865234ms\r\n",,terminal_output +14338,13206139,"TERMINAL",0,0,"Step 2133, loss: 1.6696538925170898, step time: 30.780315399169922ms\r\n",,terminal_output +14339,13206202,"TERMINAL",0,0,"Step 2134, loss: 1.7580528259277344, step time: 18.48435401916504ms\r\n",,terminal_output +14340,13206265,"TERMINAL",0,0,"Step 2135, loss: 1.7444089651107788, step time: 18.056154251098633ms\r\n",,terminal_output +14341,13206327,"TERMINAL",0,0,"Step 2136, loss: 1.731003999710083, step time: 18.613576889038086ms\r\n",,terminal_output +14342,13206396,"TERMINAL",0,0,"Step 2137, loss: 1.8279513120651245, step time: 18.090009689331055ms\r\n",,terminal_output +14343,13206451,"TERMINAL",0,0,"Step 2138, loss: 1.5732113122940063, step time: 18.273591995239258ms\r\n",,terminal_output +14344,13206516,"TERMINAL",0,0,"Step 2139, loss: 2.4958574771881104, step time: 18.398523330688477ms\r\n",,terminal_output +14345,13206575,"TERMINAL",0,0,"Step 2140, loss: 1.606212854385376, step time: 18.08619499206543ms\r\n",,terminal_output +14346,13206680,"TERMINAL",0,0,"Step 2141, loss: 1.7368252277374268, step time: 18.207550048828125ms\r\nStep 2142, loss: 1.9595407247543335, step time: 18.590211868286133ms\r\n",,terminal_output +14347,13206798,"TERMINAL",0,0,"Step 2143, loss: 2.086930990219116, step time: 18.28765869140625ms\r\nStep 2144, loss: 2.0992517471313477, step time: 18.250226974487305ms\r\n",,terminal_output +14348,13206896,"TERMINAL",0,0,"Step 2145, loss: 1.5762321949005127, step time: 18.524169921875ms\r\n",,terminal_output +14349,13206947,"TERMINAL",0,0,"Step 2146, loss: 1.6787680387496948, step time: 18.308401107788086ms\r\n",,terminal_output +14350,13207055,"TERMINAL",0,0,"Step 2147, loss: 2.1001930236816406, step time: 17.78411865234375ms\r\nStep 2148, loss: 1.8421401977539062, step time: 18.552303314208984ms\r\n",,terminal_output +14351,13207151,"TERMINAL",0,0,"Step 2149, loss: 1.82656729221344, step time: 17.856359481811523ms\r\n",,terminal_output +14352,13207256,"TERMINAL",0,0,"Step 2150, loss: 1.7333338260650635, step time: 25.10237693786621ms\r\nStep 2151, loss: 1.9554188251495361, step time: 18.265962600708008ms\r\n",,terminal_output +14353,13207351,"TERMINAL",0,0,"Step 2152, loss: 2.1816210746765137, step time: 18.27836036682129ms\r\n",,terminal_output +14354,13207404,"TERMINAL",0,0,"Step 2153, loss: 1.6624665260314941, step time: 17.779827117919922ms\r\n",,terminal_output +14355,13207511,"TERMINAL",0,0,"Step 2154, loss: 2.104044198989868, step time: 18.419265747070312ms\r\nStep 2155, loss: 1.5917630195617676, step time: 18.18990707397461ms\r\n",,terminal_output +14356,13207607,"TERMINAL",0,0,"Step 2156, loss: 2.1170146465301514, step time: 18.326282501220703ms\r\n",,terminal_output +14357,13207658,"TERMINAL",0,0,"Step 2157, loss: 1.5959912538528442, step time: 18.12601089477539ms\r\n",,terminal_output +14358,13207762,"TERMINAL",0,0,"Step 2158, loss: 1.8180383443832397, step time: 18.172502517700195ms\r\nStep 2159, loss: 1.7259032726287842, step time: 17.844676971435547ms\r\n",,terminal_output +14359,13207858,"TERMINAL",0,0,"Step 2160, loss: 1.7018303871154785, step time: 18.581628799438477ms\r\n",,terminal_output +14360,13207909,"TERMINAL",0,0,"Step 2161, loss: 1.6360160112380981, step time: 19.361019134521484ms\r\n",,terminal_output +14361,13208036,"TERMINAL",0,0,"Step 2162, loss: 1.6907414197921753, step time: 18.617630004882812ms\r\nStep 2163, loss: 2.0676357746124268, step time: 18.249034881591797ms\r\n",,terminal_output +14362,13208101,"TERMINAL",0,0,"Step 2164, loss: 1.8649661540985107, step time: 18.68581771850586ms\r\n",,terminal_output +14363,13208204,"TERMINAL",0,0,"Step 2165, loss: 1.7411977052688599, step time: 17.95363426208496ms\r\n",,terminal_output +14364,13208255,"TERMINAL",0,0,"Step 2166, loss: 1.6189756393432617, step time: 18.54228973388672ms\r\n",,terminal_output +14365,13208363,"TERMINAL",0,0,"Step 2167, loss: 1.7780612707138062, step time: 17.956256866455078ms\r\nStep 2168, loss: 2.1008872985839844, step time: 18.381595611572266ms\r\n",,terminal_output +14366,13208425,"TERMINAL",0,0,"Step 2169, loss: 1.771884560585022, step time: 18.29838752746582ms\r\n",,terminal_output +14367,13208489,"TERMINAL",0,0,"Step 2170, loss: 1.806522011756897, step time: 18.24498176574707ms\r\n",,terminal_output +14368,13208552,"TERMINAL",0,0,"Step 2171, loss: 1.5902360677719116, step time: 17.93527603149414ms\r\n",,terminal_output +14369,13208663,"TERMINAL",0,0,"Step 2172, loss: 1.6563373804092407, step time: 18.548011779785156ms\r\n",,terminal_output +14370,13208677,"TERMINAL",0,0,"Step 2173, loss: 1.666458010673523, step time: 18.01896095275879ms\r\n",,terminal_output +14371,13208777,"TERMINAL",0,0,"Step 2174, loss: 1.712310552597046, step time: 18.163204193115234ms\r\n",,terminal_output +14372,13208842,"TERMINAL",0,0,"Step 2175, loss: 1.616446852684021, step time: 18.25571060180664ms\r\n",,terminal_output +14373,13208905,"TERMINAL",0,0,"Step 2176, loss: 1.6652414798736572, step time: 18.274545669555664ms\r\n",,terminal_output +14374,13209003,"TERMINAL",0,0,"Step 2177, loss: 1.6414166688919067, step time: 17.77172088623047ms\r\nStep 2178, loss: 1.9935253858566284, step time: 18.561363220214844ms\r\n",,terminal_output +14375,13209068,"TERMINAL",0,0,"Step 2179, loss: 1.8007123470306396, step time: 17.766475677490234ms\r\n",,terminal_output +14376,13209127,"TERMINAL",0,0,"Step 2180, loss: 2.0169925689697266, step time: 18.48745346069336ms\r\n",,terminal_output +14377,13209190,"TERMINAL",0,0,"Step 2181, loss: 1.6987370252609253, step time: 18.17178726196289ms\r\n",,terminal_output +14378,13209249,"TERMINAL",0,0,"Step 2182, loss: 1.622005820274353, step time: 18.082380294799805ms\r\n",,terminal_output +14379,13209341,"TERMINAL",0,0,"Step 2183, loss: 1.68606436252594, step time: 18.818140029907227ms\r\n",,terminal_output +14380,13209393,"TERMINAL",0,0,"Step 2184, loss: 1.5877339839935303, step time: 18.82195472717285ms\r\n",,terminal_output +14381,13209485,"TERMINAL",0,0,"Step 2185, loss: 1.73807692527771, step time: 17.99941062927246ms\r\n",,terminal_output +14382,13209537,"TERMINAL",0,0,"Step 2186, loss: 1.7204904556274414, step time: 18.2497501373291ms\r\n",,terminal_output +14383,13209588,"TERMINAL",0,0,"Step 2187, loss: 1.8863320350646973, step time: 18.274545669555664ms\r\n",,terminal_output +14384,13209692,"TERMINAL",0,0,"Step 2188, loss: 1.515380859375, step time: 18.157958984375ms\r\nStep 2189, loss: 1.8472208976745605, step time: 17.714500427246094ms\r\n",,terminal_output +14385,13209785,"TERMINAL",0,0,"Step 2190, loss: 1.6373679637908936, step time: 18.575191497802734ms\r\n",,terminal_output +14386,13209836,"TERMINAL",0,0,"Step 2191, loss: 1.7524508237838745, step time: 17.87710189819336ms\r\n",,terminal_output +14387,13209945,"TERMINAL",0,0,"Step 2192, loss: 1.8641153573989868, step time: 18.016576766967773ms\r\nStep 2193, loss: 1.6159584522247314, step time: 18.25571060180664ms\r\n",,terminal_output +14388,13210073,"TERMINAL",0,0,"Step 2194, loss: 2.1164286136627197, step time: 18.163204193115234ms\r\nStep 2195, loss: 2.2055158615112305, step time: 17.83585548400879ms\r\n",,terminal_output +14389,13210135,"TERMINAL",0,0,"Step 2196, loss: 1.6351397037506104, step time: 19.791364669799805ms\r\n",,terminal_output +14390,13210198,"TERMINAL",0,0,"Step 2197, loss: 1.886430025100708, step time: 17.906904220581055ms\r\n",,terminal_output +14391,13210270,"TERMINAL",0,0,"Step 2198, loss: 1.6523821353912354, step time: 18.026113510131836ms\r\n",,terminal_output +14392,13210334,"TERMINAL",0,0,"Step 2199, loss: 1.6611902713775635, step time: 18.123388290405273ms\r\n",,terminal_output +14393,13210392,"TERMINAL",0,0,"Step 2200, loss: 1.5619750022888184, step time: 18.228530883789062ms\r\n",,terminal_output +14394,13210453,"TERMINAL",0,0,"Step 2201, loss: 1.5693538188934326, step time: 18.119096755981445ms\r\n",,terminal_output +14395,13210591,"TERMINAL",0,0,"Step 2202, loss: 2.0329484939575195, step time: 18.49055290222168ms\r\nStep 2203, loss: 1.7150834798812866, step time: 18.05734634399414ms\r\n",,terminal_output +14396,13210645,"TERMINAL",0,0,"Step 2204, loss: 1.7761991024017334, step time: 17.93193817138672ms\r\n",,terminal_output +14397,13210805,"scripts_horeka/train_dynamics.sh",1448,0,"",shellscript,selection_mouse +14398,13210806,"TERMINAL",0,0,"Step 2205, loss: 1.7183935642242432, step time: 18.52130889892578ms\r\n",,terminal_output +14399,13210854,"TERMINAL",0,0,"Step 2206, loss: 1.656522274017334, step time: 18.178462982177734ms\r\nStep 2207, loss: 1.7186542749404907, step time: 18.10431480407715ms\r\n",,terminal_output +14400,13210930,"TERMINAL",0,0,"Step 2208, loss: 1.719881296157837, step time: 18.3560848236084ms\r\n",,terminal_output +14401,13211027,"TERMINAL",0,0,"Step 2209, loss: 2.058093309402466, step time: 18.031597137451172ms\r\nStep 2210, loss: 1.558006763458252, step time: 18.01466941833496ms\r\n",,terminal_output +14402,13211122,"TERMINAL",0,0,"Step 2211, loss: 1.5653808116912842, step time: 21.25835418701172ms\r\n",,terminal_output +14403,13211228,"TERMINAL",0,0,"Step 2212, loss: 1.5879987478256226, step time: 22.379636764526367ms\r\nStep 2213, loss: 1.6064354181289673, step time: 19.709348678588867ms\r\n",,terminal_output +14404,13211576,"TERMINAL",0,0,"Step 2214, loss: 1.6440447568893433, step time: 352.9343605041504ms\r\n",,terminal_output +14405,13211682,"TERMINAL",0,0,"Step 2215, loss: 1.6121234893798828, step time: 25.870323181152344ms\r\n",,terminal_output +14406,13211733,"TERMINAL",0,0,"Step 2216, loss: 2.7958762645721436, step time: 21.045684814453125ms\r\n",,terminal_output +14407,13211895,"TERMINAL",0,0,"Step 2217, loss: 2.2613472938537598, step time: 19.775390625ms\r\nStep 2218, loss: 1.9694395065307617, step time: 18.76044273376465ms\r\n",,terminal_output +14408,13211977,"TERMINAL",0,0,"Step 2219, loss: 1.6456421613693237, step time: 18.530845642089844ms\r\nStep 2220, loss: 1.5113534927368164, step time: 19.725799560546875ms\r\n",,terminal_output +14409,13212031,"TERMINAL",0,0,"Step 2221, loss: 1.8882310390472412, step time: 20.767688751220703ms\r\n",,terminal_output +14410,13212098,"TERMINAL",0,0,"Step 2222, loss: 1.6340231895446777, step time: 19.097566604614258ms\r\n",,terminal_output +14411,13212159,"TERMINAL",0,0,"Step 2223, loss: 1.7501651048660278, step time: 19.199132919311523ms\r\n",,terminal_output +14412,13212253,"TERMINAL",0,0,"Step 2224, loss: 1.611076831817627, step time: 18.238306045532227ms\r\n",,terminal_output +14413,13212310,"TERMINAL",0,0,"Step 2225, loss: 1.5565989017486572, step time: 18.320083618164062ms\r\n",,terminal_output +14414,13212379,"TERMINAL",0,0,"Step 2226, loss: 1.70731782913208, step time: 18.808364868164062ms\r\n",,terminal_output +14415,13212421,"TERMINAL",0,0,"Step 2227, loss: 1.5721945762634277, step time: 18.29218864440918ms\r\n",,terminal_output +14416,13212539,"TERMINAL",0,0,"Step 2228, loss: 1.9550539255142212, step time: 18.37158203125ms\r\nStep 2229, loss: 1.66130530834198, step time: 18.621206283569336ms\r\n",,terminal_output +14417,13212647,"TERMINAL",0,0,"Step 2230, loss: 2.4329957962036133, step time: 18.367528915405273ms\r\n",,terminal_output +14418,13212711,"TERMINAL",0,0,"Step 2231, loss: 1.6557133197784424, step time: 18.22519302368164ms\r\n",,terminal_output +14419,13212764,"TERMINAL",0,0,"Step 2232, loss: 1.7133831977844238, step time: 18.541812896728516ms\r\n",,terminal_output +14420,13212870,"TERMINAL",0,0,"Step 2233, loss: 1.7545781135559082, step time: 18.284320831298828ms\r\nStep 2234, loss: 2.794851303100586, step time: 17.992734909057617ms\r\n",,terminal_output +14421,13212963,"TERMINAL",0,0,"Step 2235, loss: 1.9063092470169067, step time: 18.685102462768555ms\r\n",,terminal_output +14422,13213015,"TERMINAL",0,0,"Step 2236, loss: 1.9640376567840576, step time: 19.177913665771484ms\r\n",,terminal_output +14423,13213108,"TERMINAL",0,0,"Step 2237, loss: 1.6290624141693115, step time: 26.637792587280273ms\r\n",,terminal_output +14424,13213161,"TERMINAL",0,0,"Step 2238, loss: 1.6723096370697021, step time: 26.555538177490234ms\r\n",,terminal_output +14425,13213213,"TERMINAL",0,0,"Step 2239, loss: 1.6222947835922241, step time: 24.268388748168945ms\r\n",,terminal_output +14426,13213319,"TERMINAL",0,0,"Step 2240, loss: 1.5616482496261597, step time: 20.411252975463867ms\r\nStep 2241, loss: 1.472292423248291, step time: 21.22187614440918ms\r\n",,terminal_output +14427,13213414,"TERMINAL",0,0,"Step 2242, loss: 1.6761220693588257, step time: 18.867969512939453ms\r\n",,terminal_output +14428,13213467,"TERMINAL",0,0,"Step 2243, loss: 2.320547103881836, step time: 18.28145980834961ms\r\n",,terminal_output +14429,13213574,"TERMINAL",0,0,"Step 2244, loss: 1.855873465538025, step time: 18.589019775390625ms\r\nStep 2245, loss: 2.0011227130889893, step time: 18.161296844482422ms\r\n",,terminal_output +14430,13213637,"TERMINAL",0,0,"Step 2246, loss: 1.633111596107483, step time: 18.17917823791504ms\r\n",,terminal_output +14431,13213701,"TERMINAL",0,0,"Step 2247, loss: 1.5238066911697388, step time: 19.70386505126953ms\r\n",,terminal_output +14432,13213768,"TERMINAL",0,0,"Step 2248, loss: 1.6902244091033936, step time: 18.6922550201416ms\r\n",,terminal_output +14433,13213828,"TERMINAL",0,0,"Step 2249, loss: 1.5804015398025513, step time: 18.223047256469727ms\r\n",,terminal_output +14434,13213894,"TERMINAL",0,0,"Step 2250, loss: 1.6749660968780518, step time: 18.76354217529297ms\r\n",,terminal_output +14435,13213968,"TERMINAL",0,0,"Step 2251, loss: 1.5784552097320557, step time: 18.123149871826172ms\r\n",,terminal_output +14436,13214023,"TERMINAL",0,0,"Step 2252, loss: 1.8001978397369385, step time: 18.162250518798828ms\r\n",,terminal_output +14437,13214087,"TERMINAL",0,0,"Step 2253, loss: 1.7332674264907837, step time: 18.329143524169922ms\r\n",,terminal_output +14438,13214166,"TERMINAL",0,0,"Step 2254, loss: 1.7385494709014893, step time: 18.199443817138672ms\r\n",,terminal_output +14439,13214230,"TERMINAL",0,0,"Step 2255, loss: 1.5822505950927734, step time: 17.905473709106445ms\r\n",,terminal_output +14440,13214294,"TERMINAL",0,0,"Step 2256, loss: 1.944952368736267, step time: 18.69511604309082ms\r\n",,terminal_output +14441,13214401,"TERMINAL",0,0,"Step 2257, loss: 2.2826080322265625, step time: 17.812490463256836ms\r\n",,terminal_output +14442,13214464,"TERMINAL",0,0,"Step 2258, loss: 1.694263219833374, step time: 20.791292190551758ms\r\nStep 2259, loss: 1.5932209491729736, step time: 18.259763717651367ms\r\n",,terminal_output +14443,13214569,"TERMINAL",0,0,"Step 2260, loss: 1.7188502550125122, step time: 18.207788467407227ms\r\n",,terminal_output +14444,13214635,"TERMINAL",0,0,"Step 2261, loss: 1.5124692916870117, step time: 18.100738525390625ms\r\n",,terminal_output +14445,13214697,"TERMINAL",0,0,"Step 2262, loss: 1.595391869544983, step time: 18.543243408203125ms\r\n",,terminal_output +14446,13214760,"TERMINAL",0,0,"Step 2263, loss: 1.6485543251037598, step time: 18.041133880615234ms\r\n",,terminal_output +14447,13214824,"TERMINAL",0,0,"Step 2264, loss: 1.755618691444397, step time: 18.18251609802246ms\r\n",,terminal_output +14448,13214891,"TERMINAL",0,0,"Step 2265, loss: 1.6574956178665161, step time: 18.682241439819336ms\r\n",,terminal_output +14449,13214971,"TERMINAL",0,0,"Step 2266, loss: 1.501171588897705, step time: 18.204927444458008ms\r\n",,terminal_output +14450,13215049,"TERMINAL",0,0,"Step 2267, loss: 1.9777255058288574, step time: 18.12911033630371ms\r\nStep 2268, loss: 1.5415366888046265, step time: 18.656015396118164ms\r\n",,terminal_output +14451,13215109,"TERMINAL",0,0,"Step 2269, loss: 1.555641770362854, step time: 17.83442497253418ms\r\n",,terminal_output +14452,13215174,"TERMINAL",0,0,"Step 2270, loss: 1.5321847200393677, step time: 18.265247344970703ms\r\n",,terminal_output +14453,13215270,"TERMINAL",0,0,"Step 2271, loss: 1.7846672534942627, step time: 18.21589469909668ms\r\n",,terminal_output +14454,13215324,"TERMINAL",0,0,"Step 2272, loss: 1.5722498893737793, step time: 17.91071891784668ms\r\n",,terminal_output +14455,13215437,"TERMINAL",0,0,"Step 2273, loss: 1.5805555582046509, step time: 17.826557159423828ms\r\nStep 2274, loss: 1.9723540544509888, step time: 18.510818481445312ms\r\n",,terminal_output +14456,13215498,"TERMINAL",0,0,"Step 2275, loss: 1.8132777214050293, step time: 18.11957359313965ms\r\n",,terminal_output +14457,13215556,"TERMINAL",0,0,"Step 2276, loss: 1.9646986722946167, step time: 18.00990104675293ms\r\n",,terminal_output +14458,13215619,"TERMINAL",0,0,"Step 2277, loss: 2.018483877182007, step time: 18.55945587158203ms\r\n",,terminal_output +14459,13215682,"TERMINAL",0,0,"Step 2278, loss: 1.547558307647705, step time: 18.061399459838867ms\r\n",,terminal_output +14460,13215742,"TERMINAL",0,0,"Step 2279, loss: 1.616882085800171, step time: 18.06497573852539ms\r\n",,terminal_output +14461,13215806,"TERMINAL",0,0,"Step 2280, loss: 1.5733755826950073, step time: 19.716501235961914ms\r\n",,terminal_output +14462,13215912,"TERMINAL",0,0,"Step 2281, loss: 1.7868220806121826, step time: 21.662473678588867ms\r\n",,terminal_output +14463,13215998,"TERMINAL",0,0,"Step 2282, loss: 2.058131694793701, step time: 19.229650497436523ms\r\nStep 2283, loss: 1.643549919128418, step time: 18.656015396118164ms\r\n",,terminal_output +14464,13216062,"TERMINAL",0,0,"Step 2284, loss: 1.807004690170288, step time: 18.494844436645508ms\r\n",,terminal_output +14465,13216162,"TERMINAL",0,0,"Step 2285, loss: 2.0130016803741455, step time: 17.892837524414062ms\r\n",,terminal_output +14466,13216223,"TERMINAL",0,0,"Step 2286, loss: 1.5027012825012207, step time: 18.665075302124023ms\r\n",,terminal_output +14467,13216288,"TERMINAL",0,0,"Step 2287, loss: 1.5398188829421997, step time: 17.840862274169922ms\r\n",,terminal_output +14468,13216348,"TERMINAL",0,0,"Step 2288, loss: 2.531337261199951, step time: 17.998456954956055ms\r\n",,terminal_output +14469,13216409,"TERMINAL",0,0,"Step 2289, loss: 1.9762256145477295, step time: 18.24951171875ms\r\n",,terminal_output +14470,13216470,"TERMINAL",0,0,"Step 2290, loss: 1.5854307413101196, step time: 18.36991310119629ms\r\n",,terminal_output +14471,13216542,"TERMINAL",0,0,"Step 2291, loss: 1.5330586433410645, step time: 17.970800399780273ms\r\n",,terminal_output +14472,13216597,"TERMINAL",0,0,"Step 2292, loss: 1.5518851280212402, step time: 18.54681968688965ms\r\n",,terminal_output +14473,13216659,"TERMINAL",0,0,"Step 2293, loss: 1.5993084907531738, step time: 18.21160316467285ms\r\n",,terminal_output +14474,13216770,"TERMINAL",0,0,"Step 2294, loss: 1.8999985456466675, step time: 18.214702606201172ms\r\nStep 2295, loss: 1.662448763847351, step time: 18.89657974243164ms\r\n",,terminal_output +14475,13216833,"TERMINAL",0,0,"Step 2296, loss: 1.4264901876449585, step time: 18.41259002685547ms\r\n",,terminal_output +14476,13216902,"TERMINAL",0,0,"Step 2297, loss: 1.7654284238815308, step time: 18.859386444091797ms\r\n",,terminal_output +14477,13216975,"TERMINAL",0,0,"Step 2298, loss: 1.7136578559875488, step time: 19.320249557495117ms\r\n",,terminal_output +14478,13217029,"TERMINAL",0,0,"Step 2299, loss: 1.5184597969055176, step time: 18.130779266357422ms\r\n",,terminal_output +14479,13217121,"TERMINAL",0,0,"Step 2300, loss: 2.000058650970459, step time: 18.030643463134766ms\r\n",,terminal_output +14480,13217172,"TERMINAL",0,0,"Step 2301, loss: 1.7369136810302734, step time: 18.96214485168457ms\r\n",,terminal_output +14481,13217265,"TERMINAL",0,0,"Step 2302, loss: 1.4927791357040405, step time: 18.034696578979492ms\r\n",,terminal_output +14482,13217316,"TERMINAL",0,0,"Step 2303, loss: 1.883691668510437, step time: 18.11075210571289ms\r\n",,terminal_output +14483,13217368,"TERMINAL",0,0,"Step 2304, loss: 2.3828060626983643, step time: 18.328189849853516ms\r\n",,terminal_output +14484,13217478,"TERMINAL",0,0,"Step 2305, loss: 1.6507326364517212, step time: 18.093585968017578ms\r\nStep 2306, loss: 1.698379635810852, step time: 17.81153678894043ms\r\n",,terminal_output +14485,13217567,"TERMINAL",0,0,"Step 2307, loss: 1.5987423658370972, step time: 18.588781356811523ms\r\n",,terminal_output +14486,13217626,"TERMINAL",0,0,"Step 2308, loss: 1.6157561540603638, step time: 20.285606384277344ms\r\n",,terminal_output +14487,13217687,"TERMINAL",0,0,"Step 2309, loss: 1.822896122932434, step time: 18.282413482666016ms\r\n",,terminal_output +14488,13217750,"TERMINAL",0,0,"Step 2310, loss: 1.6140539646148682, step time: 18.277645111083984ms\r\n",,terminal_output +14489,13217812,"TERMINAL",0,0,"Step 2311, loss: 1.5468559265136719, step time: 18.131732940673828ms\r\n",,terminal_output +14490,13217876,"TERMINAL",0,0,"Step 2312, loss: 1.6449873447418213, step time: 17.682313919067383ms\r\n",,terminal_output +14491,13217939,"TERMINAL",0,0,"Step 2313, loss: 1.6897916793823242, step time: 18.425703048706055ms\r\n",,terminal_output +14492,13218045,"TERMINAL",0,0,"Step 2314, loss: 1.843456745147705, step time: 17.672061920166016ms\r\nStep 2315, loss: 1.6389331817626953, step time: 17.596721649169922ms\r\n",,terminal_output +14493,13218112,"TERMINAL",0,0,"Step 2316, loss: 1.5692436695098877, step time: 18.16082000732422ms\r\n",,terminal_output +14494,13218172,"TERMINAL",0,0,"Step 2317, loss: 1.6043282747268677, step time: 18.12124252319336ms\r\n",,terminal_output +14495,13218249,"TERMINAL",0,0,"Step 2318, loss: 1.5788346529006958, step time: 18.074750900268555ms\r\n",,terminal_output +14496,13218302,"TERMINAL",0,0,"Step 2319, loss: 1.569230079650879, step time: 18.459558486938477ms\r\n",,terminal_output +14497,13218364,"TERMINAL",0,0,"Step 2320, loss: 1.5422489643096924, step time: 17.944812774658203ms\r\n",,terminal_output +14498,13218428,"TERMINAL",0,0,"Step 2321, loss: 1.7057926654815674, step time: 17.615795135498047ms\r\n",,terminal_output +14499,13218492,"TERMINAL",0,0,"Step 2322, loss: 1.5185232162475586, step time: 18.177509307861328ms\r\n",,terminal_output +14500,13218558,"TERMINAL",0,0,"Step 2323, loss: 1.6991591453552246, step time: 18.010377883911133ms\r\n",,terminal_output +14501,13218632,"TERMINAL",0,0,"Step 2324, loss: 2.9635376930236816, step time: 17.945289611816406ms\r\n",,terminal_output +14502,13218684,"TERMINAL",0,0,"Step 2325, loss: 1.6698211431503296, step time: 18.460988998413086ms\r\n",,terminal_output +14503,13218792,"TERMINAL",0,0,"Step 2326, loss: 1.5768637657165527, step time: 30.886173248291016ms\r\n",,terminal_output +14504,13218855,"TERMINAL",0,0,"Step 2327, loss: 1.5266510248184204, step time: 18.401622772216797ms\r\n",,terminal_output +14505,13218919,"TERMINAL",0,0,"Step 2328, loss: 1.6181610822677612, step time: 18.208980560302734ms\r\n",,terminal_output +14506,13218980,"TERMINAL",0,0,"Step 2329, loss: 1.4543941020965576, step time: 18.178224563598633ms\r\n",,terminal_output +14507,13219044,"TERMINAL",0,0,"Step 2330, loss: 1.8188780546188354, step time: 18.036603927612305ms\r\n",,terminal_output +14508,13219105,"TERMINAL",0,0,"Step 2331, loss: 2.057976245880127, step time: 18.04804801940918ms\r\n",,terminal_output +14509,13219168,"TERMINAL",0,0,"Step 2332, loss: 1.6744537353515625, step time: 18.100500106811523ms\r\n",,terminal_output +14510,13219229,"TERMINAL",0,0,"Step 2333, loss: 2.1700613498687744, step time: 18.082857131958008ms\r\n",,terminal_output +14511,13219339,"TERMINAL",0,0,"Step 2334, loss: 2.0009238719940186, step time: 18.38397979736328ms\r\nStep 2335, loss: 1.6241888999938965, step time: 17.898082733154297ms\r\n",,terminal_output +14512,13219433,"TERMINAL",0,0,"Step 2336, loss: 1.5306347608566284, step time: 17.830610275268555ms\r\n",,terminal_output +14513,13219485,"TERMINAL",0,0,"Step 2337, loss: 1.5553532838821411, step time: 18.332958221435547ms\r\n",,terminal_output +14514,13219592,"TERMINAL",0,0,"Step 2338, loss: 1.7913209199905396, step time: 17.818450927734375ms\r\nStep 2339, loss: 1.7975986003875732, step time: 18.02229881286621ms\r\n",,terminal_output +14515,13219687,"TERMINAL",0,0,"Step 2340, loss: 1.5831972360610962, step time: 18.340110778808594ms\r\n",,terminal_output +14516,13219799,"TERMINAL",0,0,"Step 2341, loss: 1.7127861976623535, step time: 18.029451370239258ms\r\nStep 2342, loss: 2.4032399654388428, step time: 17.840862274169922ms\r\n",,terminal_output +14517,13219864,"TERMINAL",0,0,"Step 2343, loss: 1.5328890085220337, step time: 18.234968185424805ms\r\n",,terminal_output +14518,13219929,"TERMINAL",0,0,"Step 2344, loss: 1.6467036008834839, step time: 21.889925003051758ms\r\n",,terminal_output +14519,13220031,"TERMINAL",0,0,"Step 2345, loss: 1.5263618230819702, step time: 19.714832305908203ms\r\nStep 2346, loss: 1.568703532218933, step time: 18.127918243408203ms\r\n",,terminal_output +14520,13220127,"TERMINAL",0,0,"Step 2347, loss: 1.6892423629760742, step time: 17.56143569946289ms\r\n",,terminal_output +14521,13220190,"TERMINAL",0,0,"Step 2348, loss: 1.6413841247558594, step time: 18.106937408447266ms\r\n",,terminal_output +14522,13220505,"TERMINAL",0,0,"Step 2349, loss: 1.6621086597442627, step time: 344.79331970214844ms\r\n",,terminal_output +14523,13220647,"TERMINAL",0,0,"Step 2350, loss: 1.5018311738967896, step time: 25.929689407348633ms\r\nStep 2351, loss: 2.641185998916626, step time: 20.244598388671875ms\r\n",,terminal_output +14524,13220704,"TERMINAL",0,0,"Step 2352, loss: 1.568153738975525, step time: 19.04129981994629ms\r\n",,terminal_output +14525,13220767,"TERMINAL",0,0,"Step 2353, loss: 1.4985836744308472, step time: 18.36228370666504ms\r\n",,terminal_output +14526,13220833,"TERMINAL",0,0,"Step 2354, loss: 1.5027579069137573, step time: 18.282175064086914ms\r\n",,terminal_output +14527,13220921,"TERMINAL",0,0,"Step 2355, loss: 1.6265710592269897, step time: 18.826007843017578ms\r\n",,terminal_output +14528,13221022,"TERMINAL",0,0,"Step 2356, loss: 2.138997793197632, step time: 18.203258514404297ms\r\nStep 2357, loss: 1.8608449697494507, step time: 17.841815948486328ms\r\n",,terminal_output +14529,13221115,"TERMINAL",0,0,"Step 2358, loss: 2.1754040718078613, step time: 18.334627151489258ms\r\n",,terminal_output +14530,13221223,"TERMINAL",0,0,"Step 2359, loss: 2.4063611030578613, step time: 18.074750900268555ms\r\nStep 2360, loss: 1.49574875831604, step time: 18.318653106689453ms\r\n",,terminal_output +14531,13221284,"TERMINAL",0,0,"Step 2361, loss: 1.4780629873275757, step time: 19.84715461730957ms\r\n",,terminal_output +14532,13221378,"TERMINAL",0,0,"Step 2362, loss: 1.510003924369812, step time: 23.90575408935547ms\r\n",,terminal_output +14533,13221486,"TERMINAL",0,0,"Step 2363, loss: 1.9125068187713623, step time: 19.90365982055664ms\r\nStep 2364, loss: 1.6172314882278442, step time: 19.065141677856445ms\r\n",,terminal_output +14534,13221600,"TERMINAL",0,0,"Step 2365, loss: 1.5409324169158936, step time: 18.25237274169922ms\r\nStep 2366, loss: 1.5228779315948486, step time: 19.01078224182129ms\r\n",,terminal_output +14535,13221664,"TERMINAL",0,0,"Step 2367, loss: 2.549546718597412, step time: 23.83255958557129ms\r\n",,terminal_output +14536,13221734,"TERMINAL",0,0,"Step 2368, loss: 1.6390591859817505, step time: 25.226354598999023ms\r\n",,terminal_output +14537,13221802,"TERMINAL",0,0,"Step 2369, loss: 1.4962809085845947, step time: 25.949954986572266ms\r\n",,terminal_output +14538,13221871,"TERMINAL",0,0,"Step 2370, loss: 1.549001932144165, step time: 26.077747344970703ms\r\n",,terminal_output +14539,13221935,"TERMINAL",0,0,"Step 2371, loss: 1.5715688467025757, step time: 25.94447135925293ms\r\n",,terminal_output +14540,13222000,"TERMINAL",0,0,"Step 2372, loss: 1.4762778282165527, step time: 25.948286056518555ms\r\n",,terminal_output +14541,13222111,"TERMINAL",0,0,"Step 2373, loss: 1.8578499555587769, step time: 26.35931968688965ms\r\n",,terminal_output +14542,13222173,"TERMINAL",0,0,"Step 2374, loss: 1.6145293712615967, step time: 25.646209716796875ms\r\n",,terminal_output +14543,13222234,"TERMINAL",0,0,"Step 2375, loss: 2.013335943222046, step time: 23.179292678833008ms\r\n",,terminal_output +14544,13222294,"TERMINAL",0,0,"Step 2376, loss: 1.631669521331787, step time: 21.594524383544922ms\r\n",,terminal_output +14545,13222364,"TERMINAL",0,0,"Step 2377, loss: 1.4933980703353882, step time: 18.84746551513672ms\r\n",,terminal_output +14546,13222420,"TERMINAL",0,0,"Step 2378, loss: 1.4489727020263672, step time: 18.623828887939453ms\r\n",,terminal_output +14547,13222481,"TERMINAL",0,0,"Step 2379, loss: 1.8632230758666992, step time: 18.528461456298828ms\r\n",,terminal_output +14548,13222540,"TERMINAL",0,0,"Step 2380, loss: 1.425231695175171, step time: 18.070459365844727ms\r\n",,terminal_output +14549,13222600,"TERMINAL",0,0,"Step 2381, loss: 1.5073331594467163, step time: 17.812490463256836ms\r\n",,terminal_output +14550,13222663,"TERMINAL",0,0,"Step 2382, loss: 1.7183159589767456, step time: 18.730878829956055ms\r\n",,terminal_output +14551,13222770,"TERMINAL",0,0,"Step 2383, loss: 1.6644935607910156, step time: 17.918109893798828ms\r\n",,terminal_output +14552,13222854,"TERMINAL",0,0,"Step 2384, loss: 1.8247536420822144, step time: 18.416166305541992ms\r\nStep 2385, loss: 2.3764867782592773, step time: 18.463850021362305ms\r\n",,terminal_output +14553,13222912,"TERMINAL",0,0,"Step 2386, loss: 1.6647124290466309, step time: 18.064022064208984ms\r\n",,terminal_output +14554,13223026,"TERMINAL",0,0,"Step 2387, loss: 1.6041955947875977, step time: 18.013954162597656ms\r\nStep 2388, loss: 1.9071485996246338, step time: 18.7835693359375ms\r\n",,terminal_output +14555,13223118,"TERMINAL",0,0,"Step 2389, loss: 1.6902556419372559, step time: 26.975154876708984ms\r\n",,terminal_output +14556,13223170,"TERMINAL",0,0,"Step 2390, loss: 1.3865424394607544, step time: 22.666454315185547ms\r\n",,terminal_output +14557,13223264,"TERMINAL",0,0,"Step 2391, loss: 2.047118902206421, step time: 20.630359649658203ms\r\n",,terminal_output +14558,13223382,"TERMINAL",0,0,"Step 2392, loss: 1.719921588897705, step time: 18.9974308013916ms\r\nStep 2393, loss: 1.5211001634597778, step time: 18.907785415649414ms\r\n",,terminal_output +14559,13223441,"TERMINAL",0,0,"Step 2394, loss: 1.8018406629562378, step time: 18.783092498779297ms\r\n",,terminal_output +14560,13223509,"TERMINAL",0,0,"Step 2395, loss: 1.4414231777191162, step time: 18.2797908782959ms\r\n",,terminal_output +14561,13223570,"TERMINAL",0,0,"Step 2396, loss: 1.5018999576568604, step time: 18.029212951660156ms\r\n",,terminal_output +14562,13223633,"TERMINAL",0,0,"Step 2397, loss: 1.477857232093811, step time: 18.337011337280273ms\r\n",,terminal_output +14563,13223683,"TERMINAL",0,0,"Step 2398, loss: 1.5756568908691406, step time: 18.0971622467041ms\r\n",,terminal_output +14564,13223778,"TERMINAL",0,0,"Step 2399, loss: 1.5346781015396118, step time: 17.983436584472656ms\r\n",,terminal_output +14565,13223839,"TERMINAL",0,0,"Step 2400, loss: 1.609419345855713, step time: 18.619060516357422ms\r\n",,terminal_output +14566,13223891,"TERMINAL",0,0,"Step 2401, loss: 1.571838140487671, step time: 18.201112747192383ms\r\n",,terminal_output +14567,13223997,"TERMINAL",0,0,"Step 2402, loss: 1.5510616302490234, step time: 18.132448196411133ms\r\nStep 2403, loss: 2.6586339473724365, step time: 18.679380416870117ms\r\n",,terminal_output +14568,13224059,"TERMINAL",0,0,"Step 2404, loss: 1.8072786331176758, step time: 18.1732177734375ms\r\n",,terminal_output +14569,13224161,"TERMINAL",0,0,"Step 2405, loss: 1.6313011646270752, step time: 18.021345138549805ms\r\n",,terminal_output +14570,13224213,"TERMINAL",0,0,"Step 2406, loss: 1.8781952857971191, step time: 18.598318099975586ms\r\n",,terminal_output +14571,13224319,"TERMINAL",0,0,"Step 2407, loss: 1.6268219947814941, step time: 18.15199851989746ms\r\nStep 2408, loss: 1.6860005855560303, step time: 18.091917037963867ms\r\n",,terminal_output +14572,13224413,"TERMINAL",0,0,"Step 2409, loss: 1.5547682046890259, step time: 18.582582473754883ms\r\n",,terminal_output +14573,13224466,"TERMINAL",0,0,"Step 2410, loss: 1.5198228359222412, step time: 17.965078353881836ms\r\n",,terminal_output +14574,13224572,"TERMINAL",0,0,"Step 2411, loss: 1.4670932292938232, step time: 17.771482467651367ms\r\nStep 2412, loss: 1.5432417392730713, step time: 18.48578453063965ms\r\n",,terminal_output +14575,13224635,"TERMINAL",0,0,"Step 2413, loss: 1.5434019565582275, step time: 18.145322799682617ms\r\n",,terminal_output +14576,13224698,"TERMINAL",0,0,"Step 2414, loss: 1.4061230421066284, step time: 18.091440200805664ms\r\n",,terminal_output +14577,13224792,"TERMINAL",0,0,"Step 2415, loss: 1.4593250751495361, step time: 18.447160720825195ms\r\n",,terminal_output +14578,13224843,"TERMINAL",0,0,"Step 2416, loss: 1.4970060586929321, step time: 18.115520477294922ms\r\n",,terminal_output +14579,13224939,"TERMINAL",0,0,"Step 2417, loss: 1.4640251398086548, step time: 18.183231353759766ms\r\n",,terminal_output +14580,13225013,"TERMINAL",0,0,"Step 2418, loss: 1.4751193523406982, step time: 18.48769187927246ms\r\nStep 2419, loss: 1.7096763849258423, step time: 17.74430274963379ms\r\n",,terminal_output +14581,13225079,"TERMINAL",0,0,"Step 2420, loss: 1.6043763160705566, step time: 17.763376235961914ms\r\n",,terminal_output +14582,13225172,"TERMINAL",0,0,"Step 2421, loss: 2.1193253993988037, step time: 18.34583282470703ms\r\n",,terminal_output +14583,13225278,"TERMINAL",0,0,"Step 2422, loss: 1.5257271528244019, step time: 18.0509090423584ms\r\nStep 2423, loss: 2.0096750259399414, step time: 18.057584762573242ms\r\n",,terminal_output +14584,13225339,"TERMINAL",0,0,"Step 2424, loss: 1.4504462480545044, step time: 18.372058868408203ms\r\n",,terminal_output +14585,13225399,"TERMINAL",0,0,"Step 2425, loss: 1.669040560722351, step time: 18.25857162475586ms\r\n",,terminal_output +14586,13225491,"TERMINAL",0,0,"Step 2426, loss: 1.4618791341781616, step time: 17.96102523803711ms\r\n",,terminal_output +14587,13225613,"TERMINAL",0,0,"Step 2427, loss: 1.560042381286621, step time: 18.26190948486328ms\r\nStep 2428, loss: 1.6427102088928223, step time: 19.08421516418457ms\r\n",,terminal_output +14588,13225665,"TERMINAL",0,0,"Step 2429, loss: 1.5251669883728027, step time: 17.827987670898438ms\r\n",,terminal_output +14589,13225760,"TERMINAL",0,0,"Step 2430, loss: 1.7354755401611328, step time: 18.301963806152344ms\r\n",,terminal_output +14590,13225811,"TERMINAL",0,0,"Step 2431, loss: 1.4572309255599976, step time: 18.187522888183594ms\r\n",,terminal_output +14591,13225876,"TERMINAL",0,0,"Step 2432, loss: 1.7192637920379639, step time: 17.850160598754883ms\r\n",,terminal_output +14592,13225933,"TERMINAL",0,0,"Step 2433, loss: 1.5410408973693848, step time: 18.670082092285156ms\r\n",,terminal_output +14593,13225997,"TERMINAL",0,0,"Step 2434, loss: 1.6333526372909546, step time: 17.90165901184082ms\r\n",,terminal_output +14594,13226044,"TERMINAL",0,0,"Step 2435, loss: 2.326345920562744, step time: 17.728090286254883ms\r\n",,terminal_output +14595,13226107,"TERMINAL",0,0,"Step 2436, loss: 1.6181632280349731, step time: 18.28145980834961ms\r\n",,terminal_output +14596,13226171,"TERMINAL",0,0,"Step 2437, loss: 2.6131205558776855, step time: 18.022775650024414ms\r\n",,terminal_output +14597,13226266,"TERMINAL",0,0,"Step 2438, loss: 1.5246702432632446, step time: 18.091201782226562ms\r\n",,terminal_output +14598,13226317,"TERMINAL",0,0,"Step 2439, loss: 2.0061194896698, step time: 18.40519905090332ms\r\n",,terminal_output +14599,13226423,"TERMINAL",0,0,"Step 2440, loss: 1.6448818445205688, step time: 18.0661678314209ms\r\nStep 2441, loss: 1.444480061531067, step time: 18.00704002380371ms\r\n",,terminal_output +14600,13226489,"TERMINAL",0,0,"Step 2442, loss: 1.809045433998108, step time: 18.401145935058594ms\r\n",,terminal_output +14601,13226549,"TERMINAL",0,0,"Step 2443, loss: 1.5150861740112305, step time: 17.89236068725586ms\r\n",,terminal_output +14602,13226611,"TERMINAL",0,0,"Step 2444, loss: 1.7642532587051392, step time: 18.242835998535156ms\r\n",,terminal_output +14603,13226676,"TERMINAL",0,0,"Step 2445, loss: 1.4501086473464966, step time: 21.11029624938965ms\r\n",,terminal_output +14604,13226738,"TERMINAL",0,0,"Step 2446, loss: 2.1880929470062256, step time: 18.211841583251953ms\r\n",,terminal_output +14605,13226800,"TERMINAL",0,0,"Step 2447, loss: 1.419335126876831, step time: 18.180131912231445ms\r\n",,terminal_output +14606,13226863,"TERMINAL",0,0,"Step 2448, loss: 2.0647904872894287, step time: 18.579959869384766ms\r\n",,terminal_output +14607,13226928,"TERMINAL",0,0,"Step 2449, loss: 1.6762830018997192, step time: 18.10622215270996ms\r\n",,terminal_output +14608,13226996,"TERMINAL",0,0,"Step 2450, loss: 1.389452338218689, step time: 18.137693405151367ms\r\n",,terminal_output +14609,13227119,"TERMINAL",0,0,"Step 2451, loss: 1.429916262626648, step time: 18.576622009277344ms\r\nStep 2452, loss: 2.1449596881866455, step time: 18.032312393188477ms\r\n",,terminal_output +14610,13227246,"TERMINAL",0,0,"Step 2453, loss: 1.9482700824737549, step time: 17.724275588989258ms\r\nStep 2454, loss: 1.3604100942611694, step time: 18.229246139526367ms\r\n",,terminal_output +14611,13227310,"TERMINAL",0,0,"Step 2455, loss: 1.6908150911331177, step time: 18.114805221557617ms\r\n",,terminal_output +14612,13227414,"TERMINAL",0,0,"Step 2456, loss: 1.5235055685043335, step time: 17.943143844604492ms\r\n",,terminal_output +14613,13227539,"TERMINAL",0,0,"Step 2457, loss: 1.8103104829788208, step time: 18.215656280517578ms\r\nStep 2458, loss: 1.1382665634155273, step time: 22.701740264892578ms\r\n^C",,terminal_output +14614,13227603,"TERMINAL",0,0,"Traceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py"", line 241, in \r\n print(f""Step {step}, loss: {loss}, step time: {elapsed_time}ms"")\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/array.py"", line 341, in __format__\r\n return format(self._value[()], format_spec)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/profiler.py"", line 354, in wrapper\r\n return func(*args, **kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/array.py"", line 641, in _value\r\n npy_value, did_copy = self._single_device_array_to_np_array_did_copy()\r\nKeyboardInterrupt\r\n",,terminal_output +14615,13227743,"TERMINAL",0,0,"^CException ignored in atexit callback: .teardown_atexit at 0x14fe40102680>\r\nTraceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/wandb/sdk/lib/service_connection.py"", line 94, in teardown_atexit\r\n conn.teardown(hooks.exit_code)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/wandb/sdk/lib/service_connection.py"", line 226, in teardown\r\n self._router.join()\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/wandb/sdk/interface/router.py"", line 75, in join\r\n self._thread.join()\r\n File ""/home/hk-project-p0023960/tum_cte0515/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/threading.py"", line 1096, in join\r\n self._wait_for_tstate_lock()\r\n File ""/home/hk-project-p0023960/tum_cte0515/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/threading.py"", line 1116, in _wait_for_tstate_lock\r\n if lock.acquire(block, timeout):\r\nKeyboardInterrupt: \r\n",,terminal_output +14616,13227981,"TERMINAL",0,0,"^CException ignored in: .remove at 0x14fe9de72710>\r\nTraceback (most recent call last):\r\n File ""/home/hk-project-p0023960/tum_cte0515/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/weakref.py"", line 370, in remove\r\n def remove(k, selfref=ref(self)):\r\nKeyboardInterrupt: \r\n",,terminal_output +14617,13228175,"TERMINAL",0,0,"^CException ignored in: .remove at 0x14fe9de72710>\r\nTraceback (most recent call last):\r\n File ""/home/hk-project-p0023960/tum_cte0515/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/weakref.py"", line 370, in remove\r\n def remove(k, selfref=ref(self)):\r\nKeyboardInterrupt: \r\n",,terminal_output +14618,13228679,"TERMINAL",0,0,"^CException ignored in atexit callback: \r\nTraceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/api.py"", line 3169, in clean_up\r\n clear_caches()\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/api.py"", line 3196, in clear_caches\r\n xc._xla.PjitFunctionCache.clear_all()\r\nKeyboardInterrupt: \r\n",,terminal_output +14619,13228873,"TERMINAL",0,0,"^C",,terminal_output +14620,13229110,"TERMINAL",0,0,"^C",,terminal_output +14621,13229233,"TERMINAL",0,0,"\r\n]0;tum_cte0515@hkn0408:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0408 jafar]$ ",,terminal_output +14622,13229412,"TERMINAL",0,0,"^C[?2004l\r[?2004h[?2004l\r\r\n]0;tum_cte0515@hkn0408:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0408 jafar]$ ",,terminal_output +14623,13231155,"scripts_horeka/train_dynamics.sh",0,0,"",shellscript,tab +14624,13232636,"scripts_horeka/train_dynamics.sh",870,0,"",shellscript,selection_mouse +14625,13233813,"scripts_horeka/train_dynamics.sh",869,1,"",shellscript,content +14626,13233908,"scripts_horeka/train_dynamics.sh",869,0,"3",shellscript,content +14627,13233909,"scripts_horeka/train_dynamics.sh",870,0,"",shellscript,selection_keyboard +14628,13234026,"scripts_horeka/train_dynamics.sh",890,0,"",shellscript,selection_command +14629,13234565,"scripts_horeka/train_dynamics.sh",889,1,"",shellscript,content +14630,13234874,"scripts_horeka/train_dynamics.sh",889,0,"3",shellscript,content +14631,13234875,"scripts_horeka/train_dynamics.sh",890,0,"",shellscript,selection_keyboard +14632,13236690,"scripts_horeka/train_dynamics.sh",0,0,"",shellscript,tab +14633,13238208,"TERMINAL",0,0,"sh scripts_horeka/train_dynamics.sh ",,terminal_output +14634,13238657,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +14635,13239128,"TERMINAL",0,0,"SLURM_STEP_NUM_TASKS=1\r\nSLURM_JOB_USER=tum_cte0515\r\nSLURM_TASKS_PER_NODE=1\r\nSLURM_JOB_UID=999226\r\nSLURM_TASK_PID=3458179\r\nSLURM_JOB_GPUS=1\r\nSLURM_LOCALID=0\r\nSLURM_SUBMIT_DIR=/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar\r\nSLURMD_NODENAME=hkn0408\r\nSLURM_JOB_START_TIME=1751296933\r\nSLURM_STEP_NODELIST=hkn0408\r\nSLURM_CLUSTER_NAME=hk\r\nSLURM_JOB_END_TIME=1751300533\r\nSLURM_PMI2_SRUN_PORT=38655\r\nSLURM_CPUS_ON_NODE=6\r\nSLURM_JOB_CPUS_PER_NODE=6\r\nSLURM_GPUS_ON_NODE=1\r\nSLURM_GTIDS=0\r\nSLURM_JOB_PARTITION=accelerated\r\nSLURM_TRES_PER_TASK=cpu=5\r\nSLURM_OOM_KILL_STEP=0\r\nSLURM_JOB_NUM_NODES=1\r\nSLURM_STEPID=4294967290\r\nSLURM_JOBID=3306855\r\nSLURM_PTY_PORT=36241\r\nSLURM_JOB_QOS=normal\r\nSLURM_LAUNCH_NODE_IPADDR=10.0.7.199\r\nSLURM_PTY_WIN_ROW=37\r\nSLURM_PMI2_PROC_MAPPING=(vector,(0,1,1))\r\nSLURMD_DEBUG=2\r\nSLURM_PROCID=0\r\nSLURM_CPUS_PER_TASK=5\r\nSLURM_NTASKS=1\r\nSLURM_TOPOLOGY_ADDR=hkibb.hkibbi1.hkibbi1e12.hkn0408\r\nSLURM_TOPOLOGY_ADDR_PATTERN=switch.switch.switch.node\r\nSLURM_SRUN_COMM_HOST=10.0.7.199\r\nSLURM_SCRIPT_CONTEXT=prolog_task\r\nSLURM_MEM_PER_NODE=51200\r\nSLURM_PTY_WIN_COL=202\r\nSLURM_NODELIST=hkn0408\r\nSLURM_SRUN_COMM_PORT=42547\r\nSLURM_STEP_ID=4294967290\r\nSLURM_JOB_ACCOUNT=hk-project-p0023960\r\nSLURM_PRIO_PROCESS=0\r\nSLURM_NPROCS=1\r\nSLURM_NNODES=1\r\nSLURM_SUBMIT_HOST=hkn1991.localdomain\r\nSLURM_JOB_ID=3306855\r\nSLURM_NODEID=0\r\nSLURM_STEP_NUM_NODES=1\r\nSLURM_STEP_TASKS_PER_NODE=1\r\nSLURM_MPI_TYPE=pmi2\r\nSLURM_PMI2_STEP_NODES=hkn0408\r\nSLURM_CONF=/etc/slurm/slurm.conf\r\nSLURM_JOB_NAME=interactive\r\nSLURM_NTASKS_PER_NODE=1\r\nSLURM_STEP_LAUNCHER_PORT=42547\r\nSLURM_JOB_GID=502226\r\nSLURM_JOB_NODELIST=hkn0408\r\n",,terminal_output +14636,13245432,"TERMINAL",0,0,"2025-06-30 18:04:49.825951: E external/local_xla/xla/stream_executor/cuda/cuda_fft.cc:467] Unable to register cuFFT factory: Attempting to register factory for plugin cuFFT when one has already been registered\r\nWARNING: All log messages before absl::InitializeLog() is called are written to STDERR\r\nE0000 00:00:1751299489.839696 3503931 cuda_dnn.cc:8579] Unable to register cuDNN factory: Attempting to register factory for plugin cuDNN when one has already been registered\r\nE0000 00:00:1751299489.844242 3503931 cuda_blas.cc:1407] Unable to register cuBLAS factory: Attempting to register factory for plugin cuBLAS when one has already been registered\r\nW0000 00:00:1751299489.857210 3503931 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\nW0000 00:00:1751299489.857230 3503931 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\nW0000 00:00:1751299489.857232 3503931 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\nW0000 00:00:1751299489.857234 3503931 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\n",,terminal_output +14637,13254908,"TERMINAL",0,0,"W0000 00:00:1751299499.335687 3503931 gpu_device.cc:2341] Cannot dlopen some GPU libraries. Please make sure the missing libraries mentioned above are installed properly if you would like to use GPU. Follow the guide at https://www.tensorflow.org/install/gpu for how to download and setup the required libraries for your platform.\r\nSkipping registering GPU devices...\r\n",,terminal_output +14638,13255276,"TERMINAL",0,0,"Running on 1 devices.\r\n",,terminal_output +14639,13256184,"TERMINAL",0,0,"wandb: Currently logged in as: mihir-mahajan2002 (instant-uv) to https://api.wandb.ai. Use `wandb login --relogin` to force relogin\r\n",,terminal_output +14640,13256749,"TERMINAL",0,0,"wandb: Tracking run with wandb version 0.19.11\r\nwandb: Run data is saved locally in /hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/wandb/run-20250630_180500-6dr2ybgc\r\nwandb: Run `wandb offline` to turn off syncing.\r\nwandb: Syncing run dynamics-tiny-overfit-big-lr-0000\r\nwandb: ⭐️ View project at https://wandb.ai/instant-uv/jafar\r\nwandb: 🚀 View run at https://wandb.ai/instant-uv/jafar/runs/6dr2ybgc\r\n",,terminal_output +14641,13258247,"TERMINAL",0,0,"2025-06-30 18:05:02.669058: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +14642,13271416,"TERMINAL",0,0,"2025-06-30 18:05:15.837324: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +14643,13287189,"TERMINAL",0,0,"2025-06-30 18:05:31.561422: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +14644,13294467,"TERMINAL",0,0,"2025-06-30 18:05:38.881280: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +14645,13314212,"TERMINAL",0,0,"batch shape: (1, 16, 90, 160, 3)\r\n",,terminal_output +14646,13325174,"TERMINAL",0,0,"2025-06-30 18:06:09.601975: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-06-30 18:06:09.602584: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-06-30 18:06:09.602699: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-06-30 18:06:09.603363: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-06-30 18:06:09.604451: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +14647,13372640,"TERMINAL",0,0,"Step 0, loss: 8.689549446105957, step time: 58286.63110733032ms\r\n",,terminal_output +14648,13372701,"TERMINAL",0,0,"Step 1, loss: 8.141585350036621, step time: 28.000354766845703ms\r\n",,terminal_output +14649,13372811,"TERMINAL",0,0,"Step 2, loss: nan, step time: 21.036624908447266ms\r\n",,terminal_output +14650,13372872,"TERMINAL",0,0,"Step 3, loss: 7.575166702270508, step time: 19.42300796508789ms\r\n",,terminal_output +14651,13372932,"TERMINAL",0,0,"Step 4, loss: 7.437936305999756, step time: 18.640518188476562ms\r\n",,terminal_output +14652,13372991,"TERMINAL",0,0,"Step 5, loss: 7.132447719573975, step time: 18.461942672729492ms\r\n",,terminal_output +14653,13373062,"TERMINAL",0,0,"Step 6, loss: 6.929551601409912, step time: 18.799304962158203ms\r\n",,terminal_output +14654,13373114,"TERMINAL",0,0,"Step 7, loss: 6.766776084899902, step time: 18.380165100097656ms\r\n",,terminal_output +14655,13373177,"TERMINAL",0,0,"Step 8, loss: 6.593125343322754, step time: 18.55945587158203ms\r\n",,terminal_output +14656,13373235,"TERMINAL",0,0,"Step 9, loss: 6.652856826782227, step time: 18.570423126220703ms\r\n",,terminal_output +14657,13373348,"TERMINAL",0,0,"Step 10, loss: 6.401626110076904, step time: 18.819808959960938ms\r\nStep 11, loss: 6.411824703216553, step time: 18.292665481567383ms\r\n",,terminal_output +14658,13373409,"TERMINAL",0,0,"Step 12, loss: 6.155965805053711, step time: 19.13762092590332ms\r\n",,terminal_output +14659,13373502,"TERMINAL",0,0,"Step 13, loss: 6.087963104248047, step time: 18.329143524169922ms\r\n",,terminal_output +14660,13373557,"TERMINAL",0,0,"Step 14, loss: 5.984762191772461, step time: 18.261432647705078ms\r\n",,terminal_output +14661,13373658,"TERMINAL",0,0,"Step 15, loss: 5.949819564819336, step time: 18.818378448486328ms\r\nStep 16, loss: 5.88980770111084, step time: 18.34392547607422ms\r\n",,terminal_output +14662,13373751,"TERMINAL",0,0,"Step 17, loss: 5.840494155883789, step time: 18.167734146118164ms\r\n",,terminal_output +14663,13373858,"TERMINAL",0,0,"Step 18, loss: 6.03936243057251, step time: 18.642187118530273ms\r\nStep 19, loss: 5.658323287963867, step time: 18.16701889038086ms\r\n",,terminal_output +14664,13373922,"TERMINAL",0,0,"Step 20, loss: 5.57962703704834, step time: 18.199443817138672ms\r\n",,terminal_output +14665,13374024,"TERMINAL",0,0,"Step 21, loss: 5.600968837738037, step time: 18.650531768798828ms\r\n",,terminal_output +14666,13374072,"TERMINAL",0,0,"Step 22, loss: 5.566801071166992, step time: 18.05591583251953ms\r\n",,terminal_output +14667,13374125,"TERMINAL",0,0,"Step 23, loss: 5.421731472015381, step time: 18.15009117126465ms\r\n",,terminal_output +14668,13374225,"TERMINAL",0,0,"Step 24, loss: 5.361855506896973, step time: 18.518924713134766ms\r\n",,terminal_output +14669,13374296,"TERMINAL",0,0,"Step 25, loss: 5.370822429656982, step time: 18.198728561401367ms\r\nStep 26, loss: 5.3421173095703125, step time: 18.22352409362793ms\r\n",,terminal_output +14670,13374707,"TERMINAL",0,0,"Step 27, loss: 5.286174297332764, step time: 339.2829895019531ms\r\nStep 28, loss: 5.165326118469238, step time: 26.45397186279297ms\r\n",,terminal_output +14671,13374809,"TERMINAL",0,0,"Step 29, loss: 5.111940860748291, step time: 20.698070526123047ms\r\n",,terminal_output +14672,13374857,"TERMINAL",0,0,"Step 30, loss: 5.064902305603027, step time: 19.6533203125ms\r\n",,terminal_output +14673,13374964,"TERMINAL",0,0,"Step 31, loss: 5.192670822143555, step time: 18.625497817993164ms\r\nStep 32, loss: 5.176879405975342, step time: 18.50414276123047ms\r\n",,terminal_output +14674,13375059,"TERMINAL",0,0,"Step 33, loss: 5.023235321044922, step time: 18.841981887817383ms\r\n",,terminal_output +14675,13375157,"TERMINAL",0,0,"Step 34, loss: 4.929561614990234, step time: 18.378496170043945ms\r\nStep 35, loss: 4.955502986907959, step time: 18.457412719726562ms\r\n",,terminal_output +14676,13375253,"TERMINAL",0,0,"Step 36, loss: 4.884085655212402, step time: 18.906593322753906ms\r\n",,terminal_output +14677,13375315,"TERMINAL",0,0,"Step 37, loss: 4.922764301300049, step time: 18.44000816345215ms\r\n",,terminal_output +14678,13375384,"TERMINAL",0,0,"Step 38, loss: 4.72947359085083, step time: 18.42808723449707ms\r\n",,terminal_output +14679,13375542,"TERMINAL",0,0,"Step 39, loss: 4.814236640930176, step time: 18.707275390625ms\r\nStep 40, loss: 4.714200019836426, step time: 18.53346824645996ms\r\n",,terminal_output +14680,13375620,"TERMINAL",0,0,"Step 41, loss: 4.763123989105225, step time: 18.374204635620117ms\r\nStep 42, loss: 4.662085056304932, step time: 19.13762092590332ms\r\n",,terminal_output +14681,13375686,"TERMINAL",0,0,"Step 43, loss: 4.773284435272217, step time: 18.33033561706543ms\r\n",,terminal_output +14682,13375748,"TERMINAL",0,0,"Step 44, loss: 4.548842430114746, step time: 18.311500549316406ms\r\n",,terminal_output +14683,13375817,"TERMINAL",0,0,"Step 45, loss: 4.621316432952881, step time: 19.05369758605957ms\r\n",,terminal_output +14684,13375876,"TERMINAL",0,0,"Step 46, loss: 4.5331807136535645, step time: 18.342971801757812ms\r\n",,terminal_output +14685,13375945,"TERMINAL",0,0,"Step 47, loss: 4.4568705558776855, step time: 18.221378326416016ms\r\n",,terminal_output +14686,13376009,"TERMINAL",0,0,"Step 48, loss: 4.505385875701904, step time: 18.86892318725586ms\r\n",,terminal_output +14687,13376116,"TERMINAL",0,0,"Step 49, loss: 4.382482051849365, step time: 18.240928649902344ms\r\nStep 50, loss: 4.245072364807129, step time: 18.273591995239258ms\r\n",,terminal_output +14688,13376214,"TERMINAL",0,0,"Step 51, loss: 4.340331077575684, step time: 18.860340118408203ms\r\n",,terminal_output +14689,13376268,"TERMINAL",0,0,"Step 52, loss: 4.607442378997803, step time: 18.5089111328125ms\r\n",,terminal_output +14690,13376376,"TERMINAL",0,0,"Step 53, loss: 4.538057327270508, step time: 18.265247344970703ms\r\nStep 54, loss: 4.338743686676025, step time: 18.773555755615234ms\r\n",,terminal_output +14691,13376471,"TERMINAL",0,0,"Step 55, loss: 4.56574010848999, step time: 18.283367156982422ms\r\n",,terminal_output +14692,13376526,"TERMINAL",0,0,"Step 56, loss: 4.22885274887085, step time: 18.381834030151367ms\r\n",,terminal_output +14693,13376632,"TERMINAL",0,0,"Step 57, loss: 4.443630695343018, step time: 18.828868865966797ms\r\nStep 58, loss: 4.221024990081787, step time: 18.30887794494629ms\r\n",,terminal_output +14694,13376728,"TERMINAL",0,0,"Step 59, loss: 4.176178455352783, step time: 18.18251609802246ms\r\n",,terminal_output +14695,13376781,"TERMINAL",0,0,"Step 60, loss: 4.422875881195068, step time: 19.397497177124023ms\r\n",,terminal_output +14696,13376888,"TERMINAL",0,0,"Step 61, loss: 4.430339813232422, step time: 21.196603775024414ms\r\nStep 62, loss: 4.457839488983154, step time: 18.915653228759766ms\r\n",,terminal_output +14697,13376984,"TERMINAL",0,0,"Step 63, loss: 4.142489433288574, step time: 18.85986328125ms\r\n",,terminal_output +14698,13377041,"TERMINAL",0,0,"Step 64, loss: 4.222628593444824, step time: 18.539905548095703ms\r\n",,terminal_output +14699,13377147,"TERMINAL",0,0,"Step 65, loss: 4.004862308502197, step time: 18.347740173339844ms\r\nStep 66, loss: 4.144205093383789, step time: 18.836021423339844ms\r\n",,terminal_output +14700,13377214,"TERMINAL",0,0,"Step 67, loss: 3.9601545333862305, step time: 18.11528205871582ms\r\n",,terminal_output +14701,13377276,"TERMINAL",0,0,"Step 68, loss: 4.048355579376221, step time: 19.27947998046875ms\r\n",,terminal_output +14702,13377339,"TERMINAL",0,0,"Step 69, loss: 4.0442657470703125, step time: 18.918752670288086ms\r\n",,terminal_output +14703,13377402,"TERMINAL",0,0,"Step 70, loss: 4.032415866851807, step time: 18.406152725219727ms\r\n",,terminal_output +14704,13377472,"TERMINAL",0,0,"Step 71, loss: 3.8850531578063965, step time: 18.557310104370117ms\r\n",,terminal_output +14705,13377530,"TERMINAL",0,0,"Step 72, loss: 4.062247276306152, step time: 19.089460372924805ms\r\n",,terminal_output +14706,13377594,"TERMINAL",0,0,"Step 73, loss: 3.9714303016662598, step time: 18.44310760498047ms\r\n",,terminal_output +14707,13377660,"TERMINAL",0,0,"Step 74, loss: 3.9980924129486084, step time: 18.565893173217773ms\r\n",,terminal_output +14708,13377722,"TERMINAL",0,0,"Step 75, loss: 4.213746547698975, step time: 18.92375946044922ms\r\n",,terminal_output +14709,13377786,"TERMINAL",0,0,"Step 76, loss: 3.953911542892456, step time: 18.541574478149414ms\r\n",,terminal_output +14710,13377849,"TERMINAL",0,0,"Step 77, loss: 4.094261646270752, step time: 18.57900619506836ms\r\n",,terminal_output +14711,13377914,"TERMINAL",0,0,"Step 78, loss: 3.9316294193267822, step time: 18.84150505065918ms\r\n",,terminal_output +14712,13377979,"TERMINAL",0,0,"Step 79, loss: 3.839531660079956, step time: 18.427133560180664ms\r\n",,terminal_output +14713,13378043,"TERMINAL",0,0,"Step 80, loss: 4.026218891143799, step time: 18.29385757446289ms\r\n",,terminal_output +14714,13378108,"TERMINAL",0,0,"Step 81, loss: 3.945244550704956, step time: 18.71466636657715ms\r\n",,terminal_output +14715,13378165,"TERMINAL",0,0,"Step 82, loss: 3.9471042156219482, step time: 18.473148345947266ms\r\n",,terminal_output +14716,13378233,"TERMINAL",0,0,"Step 83, loss: 3.8663547039031982, step time: 18.27073097229004ms\r\n",,terminal_output +14717,13378299,"TERMINAL",0,0,"Step 84, loss: 3.847092390060425, step time: 18.961191177368164ms\r\n",,terminal_output +14718,13378359,"TERMINAL",0,0,"Step 85, loss: 3.7905218601226807, step time: 18.2650089263916ms\r\n",,terminal_output +14719,13378424,"TERMINAL",0,0,"Step 86, loss: 4.401544570922852, step time: 18.32890510559082ms\r\n",,terminal_output +14720,13378487,"TERMINAL",0,0,"Step 87, loss: 3.9787940979003906, step time: 18.680810928344727ms\r\n",,terminal_output +14721,13378551,"TERMINAL",0,0,"Step 88, loss: 3.8408706188201904, step time: 18.312454223632812ms\r\n",,terminal_output +14722,13378660,"TERMINAL",0,0,"Step 89, loss: 3.814897298812866, step time: 18.26953887939453ms\r\n",,terminal_output +14723,13378671,"TERMINAL",0,0,"Step 90, loss: 3.803877115249634, step time: 18.732786178588867ms\r\n",,terminal_output +14724,13378799,"TERMINAL",0,0,"Step 91, loss: 3.727346658706665, step time: 18.215179443359375ms\r\nStep 92, loss: 3.872795581817627, step time: 18.260478973388672ms\r\n",,terminal_output +14725,13378894,"TERMINAL",0,0,"Step 93, loss: 4.219383716583252, step time: 18.782854080200195ms\r\n",,terminal_output +14726,13378948,"TERMINAL",0,0,"Step 94, loss: 3.7291715145111084, step time: 18.208742141723633ms\r\n",,terminal_output +14727,13379044,"TERMINAL",0,0,"Step 95, loss: 3.887648820877075, step time: 18.251657485961914ms\r\n",,terminal_output +14728,13379120,"TERMINAL",0,0,"Step 96, loss: 3.7162063121795654, step time: 18.728256225585938ms\r\nStep 97, loss: 3.713698387145996, step time: 23.076534271240234ms\r\n",,terminal_output +14729,13379223,"TERMINAL",0,0,"Step 98, loss: 3.6644182205200195, step time: 20.260334014892578ms\r\n",,terminal_output +14730,13379286,"TERMINAL",0,0,"Step 99, loss: 3.803079843521118, step time: 19.124984741210938ms\r\n",,terminal_output +14731,13379351,"TERMINAL",0,0,"Step 100, loss: 3.636923313140869, step time: 18.372535705566406ms\r\n",,terminal_output +14732,13379413,"TERMINAL",0,0,"Step 101, loss: 3.709846258163452, step time: 18.456697463989258ms\r\n",,terminal_output +14733,13379476,"TERMINAL",0,0,"Step 102, loss: 3.6857197284698486, step time: 20.102739334106445ms\r\n",,terminal_output +14734,13379538,"TERMINAL",0,0,"Step 103, loss: 3.9463634490966797, step time: 18.369674682617188ms\r\n",,terminal_output +14735,13379600,"TERMINAL",0,0,"Step 104, loss: 3.690385341644287, step time: 18.483877182006836ms\r\n",,terminal_output +14736,13379661,"TERMINAL",0,0,"Step 105, loss: 3.844726324081421, step time: 18.923282623291016ms\r\n",,terminal_output +14737,13379724,"TERMINAL",0,0,"Step 106, loss: 3.6722121238708496, step time: 18.59116554260254ms\r\n",,terminal_output +14738,13379790,"TERMINAL",0,0,"Step 107, loss: 3.883789539337158, step time: 18.494844436645508ms\r\n",,terminal_output +14739,13379852,"TERMINAL",0,0,"Step 108, loss: 4.0153937339782715, step time: 18.859386444091797ms\r\n",,terminal_output +14740,13379916,"TERMINAL",0,0,"Step 109, loss: 3.8626949787139893, step time: 18.550634384155273ms\r\n",,terminal_output +14741,13379980,"TERMINAL",0,0,"Step 110, loss: 3.738445997238159, step time: 18.380403518676758ms\r\n",,terminal_output +14742,13380042,"TERMINAL",0,0,"Step 111, loss: 3.8446242809295654, step time: 19.033193588256836ms\r\n",,terminal_output +14743,13380140,"TERMINAL",0,0,"Step 112, loss: 3.6656317710876465, step time: 18.257856369018555ms\r\nStep 113, loss: 3.714176654815674, step time: 18.410921096801758ms\r\n",,terminal_output +14744,13380203,"TERMINAL",0,0,"Step 114, loss: 3.7243549823760986, step time: 18.80645751953125ms\r\n",,terminal_output +14745,13380302,"TERMINAL",0,0,"Step 115, loss: 3.754054069519043, step time: 18.71347427368164ms\r\n",,terminal_output +14746,13380364,"TERMINAL",0,0,"Step 116, loss: 3.7899250984191895, step time: 18.24498176574707ms\r\n",,terminal_output +14747,13380428,"TERMINAL",0,0,"Step 117, loss: 3.6017940044403076, step time: 19.0887451171875ms\r\n",,terminal_output +14748,13380488,"TERMINAL",0,0,"Step 118, loss: 3.599475860595703, step time: 18.210172653198242ms\r\n",,terminal_output +14749,13380548,"TERMINAL",0,0,"Step 119, loss: 3.6867940425872803, step time: 18.496274948120117ms\r\n",,terminal_output +14750,13380619,"TERMINAL",0,0,"Step 120, loss: 3.5096471309661865, step time: 18.82004737854004ms\r\n",,terminal_output +14751,13380672,"TERMINAL",0,0,"Step 121, loss: 3.6908161640167236, step time: 18.604040145874023ms\r\n",,terminal_output +14752,13380779,"TERMINAL",0,0,"Step 122, loss: 3.7813262939453125, step time: 18.343448638916016ms\r\nStep 123, loss: 3.5313544273376465, step time: 19.00339126586914ms\r\n",,terminal_output +14753,13380871,"TERMINAL",0,0,"Step 124, loss: 3.51796555519104, step time: 18.178224563598633ms\r\n",,terminal_output +14754,13380924,"TERMINAL",0,0,"Step 125, loss: 3.478478193283081, step time: 18.589019775390625ms\r\n",,terminal_output +14755,13381020,"TERMINAL",0,0,"Step 126, loss: 3.5144424438476562, step time: 18.83101463317871ms\r\n",,terminal_output +14756,13381073,"TERMINAL",0,0,"Step 127, loss: 3.4956071376800537, step time: 18.565654754638672ms\r\n",,terminal_output +14757,13381125,"TERMINAL",0,0,"Step 128, loss: 3.4462649822235107, step time: 29.274702072143555ms\r\n",,terminal_output +14758,13381224,"TERMINAL",0,0,"Step 129, loss: 3.6973211765289307, step time: 19.279956817626953ms\r\n",,terminal_output +14759,13381273,"TERMINAL",0,0,"Step 130, loss: 3.6640148162841797, step time: 18.712520599365234ms\r\n",,terminal_output +14760,13381381,"TERMINAL",0,0,"Step 131, loss: 3.501038074493408, step time: 18.718481063842773ms\r\nStep 132, loss: 3.516145706176758, step time: 19.13905143737793ms\r\n",,terminal_output +14761,13381443,"TERMINAL",0,0,"Step 133, loss: 3.7430739402770996, step time: 18.826961517333984ms\r\n",,terminal_output +14762,13381506,"TERMINAL",0,0,"Step 134, loss: 3.483546733856201, step time: 18.472671508789062ms\r\n",,terminal_output +14763,13381571,"TERMINAL",0,0,"Step 135, loss: 3.4698967933654785, step time: 19.17719841003418ms\r\n",,terminal_output +14764,13381633,"TERMINAL",0,0,"Step 136, loss: 3.4796478748321533, step time: 18.455028533935547ms\r\n",,terminal_output +14765,13381697,"TERMINAL",0,0,"Step 137, loss: 3.5361626148223877, step time: 18.537282943725586ms\r\n",,terminal_output +14766,13381760,"TERMINAL",0,0,"Step 138, loss: 3.8551197052001953, step time: 19.171953201293945ms\r\n",,terminal_output +14767,13381824,"TERMINAL",0,0,"Step 139, loss: 3.3624331951141357, step time: 18.55325698852539ms\r\n",,terminal_output +14768,13381886,"TERMINAL",0,0,"Step 140, loss: 3.4655673503875732, step time: 18.342018127441406ms\r\n",,terminal_output +14769,13381949,"TERMINAL",0,0,"Step 141, loss: 3.42878794670105, step time: 19.2720890045166ms\r\n",,terminal_output +14770,13382017,"TERMINAL",0,0,"Step 142, loss: 3.57708740234375, step time: 18.61572265625ms\r\n",,terminal_output +14771,13382131,"TERMINAL",0,0,"Step 143, loss: 3.382728099822998, step time: 18.513917922973633ms\r\nStep 144, loss: 3.4101500511169434, step time: 18.73040199279785ms\r\n",,terminal_output +14772,13382475,"TERMINAL",0,0,"Step 145, loss: 3.420574903488159, step time: 299.24869537353516ms\r\n",,terminal_output +14773,13382528,"TERMINAL",0,0,"Step 146, loss: 3.42496657371521, step time: 26.277542114257812ms\r\n",,terminal_output +14774,13382632,"TERMINAL",0,0,"Step 147, loss: 3.432577610015869, step time: 21.00062370300293ms\r\nStep 148, loss: 3.3707218170166016, step time: 19.571542739868164ms\r\n",,terminal_output +14775,13382731,"TERMINAL",0,0,"Step 149, loss: 3.462348699569702, step time: 18.838167190551758ms\r\n",,terminal_output +14776,13382779,"TERMINAL",0,0,"Step 150, loss: 3.353206157684326, step time: 18.62478256225586ms\r\n",,terminal_output +14777,13382888,"TERMINAL",0,0,"Step 151, loss: 3.2587175369262695, step time: 19.02008056640625ms\r\nStep 152, loss: 3.23954701423645, step time: 18.36705207824707ms\r\n",,terminal_output +14778,13382955,"TERMINAL",0,0,"Step 153, loss: 3.4568939208984375, step time: 20.509958267211914ms\r\n",,terminal_output +14779,13383076,"TERMINAL",0,0,"Step 154, loss: 3.323068141937256, step time: 18.95427703857422ms\r\nStep 155, loss: 3.4806103706359863, step time: 18.889904022216797ms\r\n",,terminal_output +14780,13383199,"TERMINAL",0,0,"Step 156, loss: 3.358218193054199, step time: 18.60499382019043ms\r\nStep 157, loss: 3.4688756465911865, step time: 19.005775451660156ms\r\n",,terminal_output +14781,13383307,"TERMINAL",0,0,"Step 158, loss: 3.3808557987213135, step time: 18.570899963378906ms\r\n",,terminal_output +14782,13383369,"TERMINAL",0,0,"Step 159, loss: 3.4469916820526123, step time: 18.49198341369629ms\r\n",,terminal_output +14783,13383436,"TERMINAL",0,0,"Step 160, loss: 3.3215222358703613, step time: 18.697261810302734ms\r\n",,terminal_output +14784,13383487,"TERMINAL",0,0,"Step 161, loss: 3.5418453216552734, step time: 18.571853637695312ms\r\n",,terminal_output +14785,13383649,"TERMINAL",0,0,"Step 162, loss: 3.282073497772217, step time: 18.388986587524414ms\r\nStep 163, loss: 3.3535850048065186, step time: 18.529653549194336ms\r\nStep 164, loss: 3.277435064315796, step time: 18.305540084838867ms\r\n",,terminal_output +14786,13383716,"TERMINAL",0,0,"Step 165, loss: 3.87895131111145, step time: 18.26643943786621ms\r\n",,terminal_output +14787,13383776,"TERMINAL",0,0,"Step 166, loss: 3.492748260498047, step time: 18.586397171020508ms\r\n",,terminal_output +14788,13383840,"TERMINAL",0,0,"Step 167, loss: 3.3742237091064453, step time: 18.438339233398438ms\r\n",,terminal_output +14789,13383907,"TERMINAL",0,0,"Step 168, loss: 3.385145664215088, step time: 18.354415893554688ms\r\n",,terminal_output +14790,13383971,"TERMINAL",0,0,"Step 169, loss: 3.324228525161743, step time: 18.938541412353516ms\r\n",,terminal_output +14791,13384094,"TERMINAL",0,0,"Step 170, loss: 3.285801649093628, step time: 18.40996742248535ms\r\nStep 171, loss: 3.2571053504943848, step time: 18.2342529296875ms\r\n",,terminal_output +14792,13384159,"TERMINAL",0,0,"Step 172, loss: 3.258303642272949, step time: 18.67389678955078ms\r\n",,terminal_output +14793,13384227,"TERMINAL",0,0,"Step 173, loss: 3.62978458404541, step time: 18.092632293701172ms\r\n",,terminal_output +14794,13384291,"TERMINAL",0,0,"Step 174, loss: 3.166855573654175, step time: 18.204927444458008ms\r\n",,terminal_output +14795,13384355,"TERMINAL",0,0,"Step 175, loss: 3.2830848693847656, step time: 18.474578857421875ms\r\n",,terminal_output +14796,13384422,"TERMINAL",0,0,"Step 176, loss: 3.1662709712982178, step time: 18.30458641052246ms\r\n",,terminal_output +14797,13384485,"TERMINAL",0,0,"Step 177, loss: 3.261627435684204, step time: 18.224716186523438ms\r\n",,terminal_output +14798,13384548,"TERMINAL",0,0,"Step 178, loss: 3.4414331912994385, step time: 18.504619598388672ms\r\n",,terminal_output +14799,13384612,"TERMINAL",0,0,"Step 179, loss: 3.2419564723968506, step time: 18.15485954284668ms\r\n",,terminal_output +14800,13384706,"TERMINAL",0,0,"Step 180, loss: 3.0829391479492188, step time: 18.231630325317383ms\r\n",,terminal_output +14801,13384819,"TERMINAL",0,0,"Step 181, loss: 3.4150538444519043, step time: 18.51344108581543ms\r\nStep 182, loss: 3.2347216606140137, step time: 18.236875534057617ms\r\n",,terminal_output +14802,13384876,"TERMINAL",0,0,"Step 183, loss: 3.6240196228027344, step time: 18.1734561920166ms\r\n",,terminal_output +14803,13384938,"TERMINAL",0,0,"Step 184, loss: 3.199064254760742, step time: 18.647432327270508ms\r\n",,terminal_output +14804,13385046,"TERMINAL",0,0,"Step 185, loss: 3.329559803009033, step time: 18.267393112182617ms\r\nStep 186, loss: 3.3113224506378174, step time: 18.191099166870117ms\r\n",,terminal_output +14805,13385141,"TERMINAL",0,0,"Step 187, loss: 3.722374200820923, step time: 18.63265037536621ms\r\n",,terminal_output +14806,13385213,"TERMINAL",0,0,"Step 188, loss: 3.2516961097717285, step time: 18.316030502319336ms\r\n",,terminal_output +14807,13385273,"TERMINAL",0,0,"Step 189, loss: 3.1324944496154785, step time: 18.20230484008789ms\r\n",,terminal_output +14808,13385339,"TERMINAL",0,0,"Step 190, loss: 3.274319887161255, step time: 18.530607223510742ms\r\n",,terminal_output +14809,13385400,"TERMINAL",0,0,"Step 191, loss: 3.2310831546783447, step time: 20.818233489990234ms\r\n",,terminal_output +14810,13385508,"TERMINAL",0,0,"Step 192, loss: 3.1905744075775146, step time: 18.97263526916504ms\r\nStep 193, loss: 3.1715738773345947, step time: 18.424272537231445ms\r\n",,terminal_output +14811,13385623,"TERMINAL",0,0,"Step 194, loss: 3.269465208053589, step time: 18.05591583251953ms\r\nStep 195, loss: 3.159886121749878, step time: 17.319917678833008ms\r\n",,terminal_output +14812,13385686,"TERMINAL",0,0,"Step 196, loss: 3.367490768432617, step time: 18.217802047729492ms\r\n",,terminal_output +14813,13385779,"TERMINAL",0,0,"Step 197, loss: 3.2050209045410156, step time: 17.825603485107422ms\r\n",,terminal_output +14814,13385832,"TERMINAL",0,0,"Step 198, loss: 3.155040979385376, step time: 17.86184310913086ms\r\n",,terminal_output +14815,13385927,"TERMINAL",0,0,"Step 199, loss: 3.144803762435913, step time: 17.5783634185791ms\r\n",,terminal_output +14816,13385981,"TERMINAL",0,0,"Step 200, loss: 3.4585587978363037, step time: 17.336606979370117ms\r\n",,terminal_output +14817,13386034,"TERMINAL",0,0,"Step 201, loss: 3.092339038848877, step time: 17.72284507751465ms\r\n",,terminal_output +14818,13386140,"TERMINAL",0,0,"Step 202, loss: 3.2021329402923584, step time: 17.54021644592285ms\r\nStep 203, loss: 3.141288995742798, step time: 17.296314239501953ms\r\n",,terminal_output +14819,13386207,"TERMINAL",0,0,"Step 204, loss: 3.1534762382507324, step time: 17.946958541870117ms\r\n",,terminal_output +14820,13386266,"TERMINAL",0,0,"Step 205, loss: 3.10160231590271, step time: 18.00084114074707ms\r\n",,terminal_output +14821,13386336,"TERMINAL",0,0,"Step 206, loss: 3.1695973873138428, step time: 17.380952835083008ms\r\n",,terminal_output +14822,13386403,"TERMINAL",0,0,"Step 207, loss: 3.1043057441711426, step time: 17.81916618347168ms\r\n",,terminal_output +14823,13386466,"TERMINAL",0,0,"Step 208, loss: 3.2059662342071533, step time: 18.040180206298828ms\r\n",,terminal_output +14824,13386574,"TERMINAL",0,0,"Step 209, loss: 3.204486131668091, step time: 17.191410064697266ms\r\nStep 210, loss: 3.235976219177246, step time: 17.767906188964844ms\r\n",,terminal_output +14825,13386637,"TERMINAL",0,0,"Step 211, loss: 3.426595449447632, step time: 17.693281173706055ms\r\n",,terminal_output +14826,13386700,"TERMINAL",0,0,"Step 212, loss: 3.0842981338500977, step time: 17.72928237915039ms\r\n",,terminal_output +14827,13386764,"TERMINAL",0,0,"Step 213, loss: 3.0514650344848633, step time: 17.360925674438477ms\r\n",,terminal_output +14828,13386829,"TERMINAL",0,0,"Step 214, loss: 3.1215298175811768, step time: 18.250226974487305ms\r\n",,terminal_output +14829,13386892,"TERMINAL",0,0,"Step 215, loss: 3.192780017852783, step time: 17.11583137512207ms\r\n",,terminal_output +14830,13386960,"TERMINAL",0,0,"Step 216, loss: 3.094558000564575, step time: 17.25149154663086ms\r\n",,terminal_output +14831,13387026,"TERMINAL",0,0,"Step 217, loss: 3.1178839206695557, step time: 17.282962799072266ms\r\n",,terminal_output +14832,13387138,"TERMINAL",0,0,"Step 218, loss: 3.0789473056793213, step time: 17.689228057861328ms\r\nStep 219, loss: 3.0687925815582275, step time: 17.458200454711914ms\r\n",,terminal_output +14833,13387202,"TERMINAL",0,0,"Step 220, loss: 3.107891082763672, step time: 17.57216453552246ms\r\n",,terminal_output +14834,13387263,"TERMINAL",0,0,"Step 221, loss: 3.0797598361968994, step time: 17.100095748901367ms\r\n",,terminal_output +14835,13387326,"TERMINAL",0,0,"Step 222, loss: 3.1931142807006836, step time: 17.575979232788086ms\r\n",,terminal_output +14836,13387391,"TERMINAL",0,0,"Step 223, loss: 2.9837536811828613, step time: 17.571449279785156ms\r\n",,terminal_output +14837,13387452,"TERMINAL",0,0,"Step 224, loss: 3.117361307144165, step time: 17.370939254760742ms\r\n",,terminal_output +14838,13387517,"TERMINAL",0,0,"Step 225, loss: 3.1119604110717773, step time: 17.412185668945312ms\r\n",,terminal_output +14839,13387621,"TERMINAL",0,0,"Step 226, loss: 2.986405611038208, step time: 18.121719360351562ms\r\n",,terminal_output +14840,13387673,"TERMINAL",0,0,"Step 227, loss: 3.088287115097046, step time: 17.810821533203125ms\r\n",,terminal_output +14841,13387779,"TERMINAL",0,0,"Step 228, loss: 2.944687843322754, step time: 17.289400100708008ms\r\nStep 229, loss: 3.016641139984131, step time: 17.99774169921875ms\r\n",,terminal_output +14842,13387874,"TERMINAL",0,0,"Step 230, loss: 3.0649561882019043, step time: 17.445802688598633ms\r\n",,terminal_output +14843,13387927,"TERMINAL",0,0,"Step 231, loss: 3.278744697570801, step time: 17.74907112121582ms\r\n",,terminal_output +14844,13388032,"TERMINAL",0,0,"Step 232, loss: 3.022156238555908, step time: 17.43602752685547ms\r\nStep 233, loss: 3.081570863723755, step time: 17.253398895263672ms\r\n",,terminal_output +14845,13388146,"TERMINAL",0,0,"Step 234, loss: 2.985562562942505, step time: 17.343997955322266ms\r\nStep 235, loss: 2.9895944595336914, step time: 17.92001724243164ms\r\n",,terminal_output +14846,13388212,"TERMINAL",0,0,"Step 236, loss: 2.9661664962768555, step time: 17.130374908447266ms\r\n",,terminal_output +14847,13388274,"TERMINAL",0,0,"Step 237, loss: 3.256110668182373, step time: 17.033815383911133ms\r\n",,terminal_output +14848,13388336,"TERMINAL",0,0,"Step 238, loss: 3.0881354808807373, step time: 17.310380935668945ms\r\n",,terminal_output +14849,13388399,"TERMINAL",0,0,"Step 239, loss: 2.9516701698303223, step time: 17.02713966369629ms\r\n",,terminal_output +14850,13388470,"TERMINAL",0,0,"Step 240, loss: 2.9688432216644287, step time: 17.109155654907227ms\r\n",,terminal_output +14851,13388543,"TERMINAL",0,0,"Step 241, loss: 2.9764397144317627, step time: 17.266511917114258ms\r\n",,terminal_output +14852,13388650,"TERMINAL",0,0,"Step 242, loss: 3.011570930480957, step time: 17.049789428710938ms\r\nStep 243, loss: 2.946286201477051, step time: 16.975879669189453ms\r\n",,terminal_output +14853,13388763,"TERMINAL",0,0,"Step 244, loss: 2.9339890480041504, step time: 17.256975173950195ms\r\n",,terminal_output +14854,13388857,"TERMINAL",0,0,"Step 245, loss: 2.9327495098114014, step time: 16.981124877929688ms\r\nStep 246, loss: 3.1270899772644043, step time: 17.261505126953125ms\r\n",,terminal_output +14855,13388911,"TERMINAL",0,0,"Step 247, loss: 2.9434282779693604, step time: 18.021106719970703ms\r\n",,terminal_output +14856,13389002,"TERMINAL",0,0,"Step 248, loss: 3.0236480236053467, step time: 17.4715518951416ms\r\n",,terminal_output +14857,13389054,"TERMINAL",0,0,"Step 249, loss: 2.942600727081299, step time: 16.94798469543457ms\r\n",,terminal_output +14858,13389145,"TERMINAL",0,0,"Step 250, loss: 3.0146706104278564, step time: 17.348527908325195ms\r\nStep 251, loss: 3.092883825302124, step time: 17.014265060424805ms\r\n",,terminal_output +14859,13389240,"TERMINAL",0,0,"Step 252, loss: 2.9449100494384766, step time: 17.16017723083496ms\r\n",,terminal_output +14860,13389348,"TERMINAL",0,0,"Step 253, loss: 2.887612819671631, step time: 17.25292205810547ms\r\nStep 254, loss: 3.183401107788086, step time: 17.125844955444336ms\r\n",,terminal_output +14861,13389460,"TERMINAL",0,0,"Step 255, loss: 2.970400094985962, step time: 16.98780059814453ms\r\nStep 256, loss: 3.048018455505371, step time: 17.566680908203125ms\r\n",,terminal_output +14862,13389524,"TERMINAL",0,0,"Step 257, loss: 3.0525684356689453, step time: 17.0900821685791ms\r\n",,terminal_output +14863,13389587,"TERMINAL",0,0,"Step 258, loss: 2.862213373184204, step time: 17.07911491394043ms\r\n",,terminal_output +14864,13389649,"TERMINAL",0,0,"Step 259, loss: 2.8527016639709473, step time: 17.267227172851562ms\r\n",,terminal_output +14865,13389712,"TERMINAL",0,0,"Step 260, loss: 2.9278616905212402, step time: 17.037630081176758ms\r\n",,terminal_output +14866,13389776,"TERMINAL",0,0,"Step 261, loss: 3.2014334201812744, step time: 17.04692840576172ms\r\n",,terminal_output +14867,13389835,"TERMINAL",0,0,"Step 262, loss: 3.3059635162353516, step time: 17.597436904907227ms\r\n",,terminal_output +14868,13389930,"TERMINAL",0,0,"Step 263, loss: 2.903062343597412, step time: 17.005443572998047ms\r\n",,terminal_output +14869,13389984,"TERMINAL",0,0,"Step 264, loss: 2.949629783630371, step time: 20.586490631103516ms\r\n",,terminal_output +14870,13390092,"TERMINAL",0,0,"Step 265, loss: 2.904022455215454, step time: 17.239093780517578ms\r\nStep 266, loss: 3.0144450664520264, step time: 17.179489135742188ms\r\n",,terminal_output +14871,13390159,"TERMINAL",0,0,"Step 267, loss: 3.2563178539276123, step time: 17.004966735839844ms\r\n",,terminal_output +14872,13390223,"TERMINAL",0,0,"Step 268, loss: 2.8812332153320312, step time: 17.421722412109375ms\r\n",,terminal_output +14873,13390286,"TERMINAL",0,0,"Step 269, loss: 2.8626692295074463, step time: 16.960620880126953ms\r\n",,terminal_output +14874,13390349,"TERMINAL",0,0,"Step 270, loss: 2.837293863296509, step time: 17.09723472595215ms\r\n",,terminal_output +14875,13390413,"TERMINAL",0,0,"Step 271, loss: 2.848111867904663, step time: 17.42386817932129ms\r\n",,terminal_output +14876,13390478,"TERMINAL",0,0,"Step 272, loss: 2.9361064434051514, step time: 17.32635498046875ms\r\n",,terminal_output +14877,13390542,"TERMINAL",0,0,"Step 273, loss: 2.858933925628662, step time: 17.011642456054688ms\r\n",,terminal_output +14878,13390604,"TERMINAL",0,0,"Step 274, loss: 2.848180055618286, step time: 17.321109771728516ms\r\n",,terminal_output +14879,13390668,"TERMINAL",0,0,"Step 275, loss: 2.8563947677612305, step time: 16.923904418945312ms\r\n",,terminal_output +14880,13390731,"TERMINAL",0,0,"Step 276, loss: 3.1181588172912598, step time: 16.96324348449707ms\r\n",,terminal_output +14881,13390795,"TERMINAL",0,0,"Step 277, loss: 3.1649374961853027, step time: 17.243146896362305ms\r\n",,terminal_output +14882,13390856,"TERMINAL",0,0,"Step 278, loss: 2.855489730834961, step time: 17.247438430786133ms\r\n",,terminal_output +14883,13390922,"TERMINAL",0,0,"Step 279, loss: 3.0818469524383545, step time: 16.913652420043945ms\r\n",,terminal_output +14884,13390983,"TERMINAL",0,0,"Step 280, loss: 2.9798407554626465, step time: 17.308712005615234ms\r\n",,terminal_output +14885,13391047,"TERMINAL",0,0,"Step 281, loss: 2.848113775253296, step time: 17.00758934020996ms\r\n",,terminal_output +14886,13391152,"TERMINAL",0,0,"Step 282, loss: 3.004603147506714, step time: 19.053936004638672ms\r\nStep 283, loss: 2.846170663833618, step time: 18.258333206176758ms\r\n",,terminal_output +14887,13391250,"TERMINAL",0,0,"Step 284, loss: 3.515552520751953, step time: 17.412185668945312ms\r\n",,terminal_output +14888,13391315,"TERMINAL",0,0,"Step 285, loss: 2.849346399307251, step time: 17.069339752197266ms\r\n",,terminal_output +14889,13391382,"TERMINAL",0,0,"Step 286, loss: 2.944363594055176, step time: 17.51852035522461ms\r\n",,terminal_output +14890,13391434,"TERMINAL",0,0,"Step 287, loss: 2.8223981857299805, step time: 16.982316970825195ms\r\n",,terminal_output +14891,13391539,"TERMINAL",0,0,"Step 288, loss: 2.9338390827178955, step time: 17.021656036376953ms\r\nStep 289, loss: 3.0249388217926025, step time: 17.229557037353516ms\r\n",,terminal_output +14892,13391601,"TERMINAL",0,0,"Step 290, loss: 2.8222689628601074, step time: 17.121076583862305ms\r\n",,terminal_output +14893,13391663,"TERMINAL",0,0,"Step 291, loss: 2.828390598297119, step time: 16.999244689941406ms\r\n",,terminal_output +14894,13391726,"TERMINAL",0,0,"Step 292, loss: 2.955505132675171, step time: 17.33231544494629ms\r\n",,terminal_output +14895,13391789,"TERMINAL",0,0,"Step 293, loss: 3.034531593322754, step time: 16.9217586517334ms\r\n",,terminal_output +14896,13391850,"TERMINAL",0,0,"Step 294, loss: 2.913703680038452, step time: 17.08698272705078ms\r\n",,terminal_output +14897,13391911,"TERMINAL",0,0,"Step 295, loss: 2.822098970413208, step time: 17.232179641723633ms\r\n",,terminal_output +14898,13391972,"TERMINAL",0,0,"Step 296, loss: 3.0924482345581055, step time: 17.17352867126465ms\r\n",,terminal_output +14899,13392040,"TERMINAL",0,0,"Step 297, loss: 2.822359800338745, step time: 17.212629318237305ms\r\n",,terminal_output +14900,13392099,"TERMINAL",0,0,"Step 298, loss: 2.8040590286254883, step time: 17.58718490600586ms\r\n",,terminal_output +14901,13392164,"TERMINAL",0,0,"Step 299, loss: 2.7626736164093018, step time: 17.098188400268555ms\r\n",,terminal_output +14902,13392225,"TERMINAL",0,0,"Step 300, loss: 2.802218198776245, step time: 16.997337341308594ms\r\n",,terminal_output +14903,13392309,"TERMINAL",0,0,"Step 301, loss: 2.8541924953460693, step time: 17.225980758666992ms\r\n",,terminal_output +14904,13392409,"TERMINAL",0,0,"Step 302, loss: 2.806307315826416, step time: 17.26675033569336ms\r\nStep 303, loss: 2.845247983932495, step time: 17.1511173248291ms\r\n",,terminal_output +14905,13392462,"TERMINAL",0,0,"Step 304, loss: 2.8479363918304443, step time: 17.618656158447266ms\r\n",,terminal_output +14906,13392526,"TERMINAL",0,0,"Step 305, loss: 2.9501867294311523, step time: 17.08388328552246ms\r\n",,terminal_output +14907,13392649,"TERMINAL",0,0,"Step 306, loss: 2.72306752204895, step time: 17.681598663330078ms\r\nStep 307, loss: 2.9250845909118652, step time: 17.16446876525879ms\r\n",,terminal_output +14908,13392714,"TERMINAL",0,0,"Step 308, loss: 2.7267441749572754, step time: 17.01974868774414ms\r\n",,terminal_output +14909,13392774,"TERMINAL",0,0,"Step 309, loss: 2.8974671363830566, step time: 17.240524291992188ms\r\n",,terminal_output +14910,13392839,"TERMINAL",0,0,"Step 310, loss: 2.6751132011413574, step time: 17.61174201965332ms\r\n",,terminal_output +14911,13392931,"TERMINAL",0,0,"Step 311, loss: 2.6126677989959717, step time: 17.024517059326172ms\r\n",,terminal_output +14912,13392982,"TERMINAL",0,0,"Step 312, loss: 2.67999005317688, step time: 17.196178436279297ms\r\n",,terminal_output +14913,13393089,"TERMINAL",0,0,"Step 313, loss: 2.9781224727630615, step time: 17.342567443847656ms\r\nStep 314, loss: 3.1536552906036377, step time: 17.29607582092285ms\r\n",,terminal_output +14914,13393154,"TERMINAL",0,0,"Step 315, loss: 2.8802850246429443, step time: 16.958236694335938ms\r\n",,terminal_output +14915,13393214,"TERMINAL",0,0,"Step 316, loss: 2.8315110206604004, step time: 17.241954803466797ms\r\n",,terminal_output +14916,13393278,"TERMINAL",0,0,"Step 317, loss: 2.7534446716308594, step time: 16.86859130859375ms\r\n",,terminal_output +14917,13393339,"TERMINAL",0,0,"Step 318, loss: 2.9055016040802, step time: 16.99209213256836ms\r\n",,terminal_output +14918,13393432,"TERMINAL",0,0,"Step 319, loss: 2.7587850093841553, step time: 17.236948013305664ms\r\n",,terminal_output +14919,13393484,"TERMINAL",0,0,"Step 320, loss: 3.19150972366333, step time: 18.027305603027344ms\r\n",,terminal_output +14920,13393656,"TERMINAL",0,0,"Step 321, loss: 2.824730157852173, step time: 17.24839210510254ms\r\nStep 322, loss: 2.666029453277588, step time: 17.313003540039062ms\r\nStep 323, loss: 2.674084424972534, step time: 16.915082931518555ms\r\n",,terminal_output +14921,13393723,"TERMINAL",0,0,"Step 324, loss: 2.7511179447174072, step time: 17.063140869140625ms\r\n",,terminal_output +14922,13394043,"TERMINAL",0,0,"Step 325, loss: 2.676931381225586, step time: 320.4541206359863ms\r\n",,terminal_output +14923,13394107,"TERMINAL",0,0,"Step 326, loss: 3.34348726272583, step time: 24.667739868164062ms\r\n",,terminal_output +14924,13394176,"TERMINAL",0,0,"Step 327, loss: 2.6811530590057373, step time: 19.520044326782227ms\r\n",,terminal_output +14925,13394237,"TERMINAL",0,0,"Step 328, loss: 2.65031361579895, step time: 18.100976943969727ms\r\n",,terminal_output +14926,13394304,"TERMINAL",0,0,"Step 329, loss: 2.8792455196380615, step time: 17.21358299255371ms\r\n",,terminal_output +14927,13394367,"TERMINAL",0,0,"Step 330, loss: 3.0553698539733887, step time: 17.330169677734375ms\r\n",,terminal_output +14928,13394430,"TERMINAL",0,0,"Step 331, loss: 2.6817924976348877, step time: 17.388105392456055ms\r\n",,terminal_output +14929,13394492,"TERMINAL",0,0,"Step 332, loss: 2.686277151107788, step time: 17.069578170776367ms\r\n",,terminal_output +14930,13394555,"TERMINAL",0,0,"Step 333, loss: 2.753190279006958, step time: 17.081260681152344ms\r\n",,terminal_output +14931,13394618,"TERMINAL",0,0,"Step 334, loss: 2.735034942626953, step time: 17.383813858032227ms\r\n",,terminal_output +14932,13394683,"TERMINAL",0,0,"Step 335, loss: 2.808338165283203, step time: 16.92962646484375ms\r\n",,terminal_output +14933,13394742,"TERMINAL",0,0,"Step 336, loss: 2.6485533714294434, step time: 17.098426818847656ms\r\n",,terminal_output +14934,13394810,"TERMINAL",0,0,"Step 337, loss: 2.9789459705352783, step time: 17.273426055908203ms\r\n",,terminal_output +14935,13394873,"TERMINAL",0,0,"Step 338, loss: 2.6259963512420654, step time: 17.091989517211914ms\r\n",,terminal_output +14936,13394937,"TERMINAL",0,0,"Step 339, loss: 2.7346596717834473, step time: 17.157793045043945ms\r\n",,terminal_output +14937,13395006,"TERMINAL",0,0,"Step 340, loss: 2.9333605766296387, step time: 17.46201515197754ms\r\n",,terminal_output +14938,13395069,"TERMINAL",0,0,"Step 341, loss: 2.721320629119873, step time: 16.98160171508789ms\r\n",,terminal_output +14939,13395122,"TERMINAL",0,0,"Step 342, loss: 2.6002190113067627, step time: 17.010927200317383ms\r\n",,terminal_output +14940,13395217,"TERMINAL",0,0,"Step 343, loss: 2.7423672676086426, step time: 17.307519912719727ms\r\n",,terminal_output +14941,13395269,"TERMINAL",0,0,"Step 344, loss: 2.6658685207366943, step time: 17.103910446166992ms\r\n",,terminal_output +14942,13395374,"TERMINAL",0,0,"Step 345, loss: 2.8353593349456787, step time: 17.084836959838867ms\r\nStep 346, loss: 2.9878604412078857, step time: 17.624855041503906ms\r\n",,terminal_output +14943,13395438,"TERMINAL",0,0,"Step 347, loss: 2.5840280055999756, step time: 16.95108413696289ms\r\n",,terminal_output +14944,13395499,"TERMINAL",0,0,"Step 348, loss: 2.792804002761841, step time: 17.05646514892578ms\r\n",,terminal_output +14945,13395562,"TERMINAL",0,0,"Step 349, loss: 2.7282114028930664, step time: 17.315149307250977ms\r\n",,terminal_output +14946,13395682,"TERMINAL",0,0,"Step 350, loss: 2.671783447265625, step time: 17.07291603088379ms\r\nStep 351, loss: 2.6423513889312744, step time: 16.997098922729492ms\r\n",,terminal_output +14947,13395754,"TERMINAL",0,0,"Step 352, loss: 2.6808829307556152, step time: 17.483234405517578ms\r\n",,terminal_output +14948,13395812,"TERMINAL",0,0,"Step 353, loss: 2.711239814758301, step time: 16.889333724975586ms\r\n",,terminal_output +14949,13395873,"TERMINAL",0,0,"Step 354, loss: 2.5904061794281006, step time: 17.052650451660156ms\r\n",,terminal_output +14950,13395969,"TERMINAL",0,0,"Step 355, loss: 2.9915826320648193, step time: 17.382383346557617ms\r\n",,terminal_output +14951,13396020,"TERMINAL",0,0,"Step 356, loss: 2.833090305328369, step time: 17.145872116088867ms\r\n",,terminal_output +14952,13396127,"TERMINAL",0,0,"Step 357, loss: 2.645092010498047, step time: 16.923904418945312ms\r\nStep 358, loss: 2.8143677711486816, step time: 17.58575439453125ms\r\n",,terminal_output +14953,13396197,"TERMINAL",0,0,"Step 359, loss: 2.7220232486724854, step time: 16.88671112060547ms\r\n",,terminal_output +14954,13396253,"TERMINAL",0,0,"Step 360, loss: 2.827486038208008, step time: 17.01831817626953ms\r\n",,terminal_output +14955,13396349,"TERMINAL",0,0,"Step 361, loss: 2.669715166091919, step time: 17.56739616394043ms\r\n",,terminal_output +14956,13396403,"TERMINAL",0,0,"Step 362, loss: 2.6230368614196777, step time: 17.09580421447754ms\r\n",,terminal_output +14957,13396506,"TERMINAL",0,0,"Step 363, loss: 2.722080945968628, step time: 18.682241439819336ms\r\nStep 364, loss: 2.5836548805236816, step time: 18.344879150390625ms\r\n",,terminal_output +14958,13396568,"TERMINAL",0,0,"Step 365, loss: 2.5220155715942383, step time: 17.04573631286621ms\r\n",,terminal_output +14959,13396629,"TERMINAL",0,0,"Step 366, loss: 2.6950623989105225, step time: 17.234325408935547ms\r\n",,terminal_output +14960,13396691,"TERMINAL",0,0,"Step 367, loss: 2.709268093109131, step time: 17.238855361938477ms\r\n",,terminal_output +14961,13396753,"TERMINAL",0,0,"Step 368, loss: 2.5749874114990234, step time: 17.218589782714844ms\r\n",,terminal_output +14962,13396817,"TERMINAL",0,0,"Step 369, loss: 2.565392255783081, step time: 16.854047775268555ms\r\n",,terminal_output +14963,13396880,"TERMINAL",0,0,"Step 370, loss: 3.343287706375122, step time: 17.54450798034668ms\r\n",,terminal_output +14964,13396944,"TERMINAL",0,0,"Step 371, loss: 2.596346616744995, step time: 17.192363739013672ms\r\n",,terminal_output +14965,13397008,"TERMINAL",0,0,"Step 372, loss: 2.8144452571868896, step time: 17.107725143432617ms\r\n",,terminal_output +14966,13397071,"TERMINAL",0,0,"Step 373, loss: 2.908782958984375, step time: 17.151355743408203ms\r\n",,terminal_output +14967,13397125,"TERMINAL",0,0,"Step 374, loss: 3.088275909423828, step time: 17.11869239807129ms\r\n",,terminal_output +14968,13397219,"TERMINAL",0,0,"Step 375, loss: 3.2195966243743896, step time: 17.00448989868164ms\r\n",,terminal_output +14969,13397326,"TERMINAL",0,0,"Step 376, loss: 2.86336350440979, step time: 17.29869842529297ms\r\nStep 377, loss: 2.708648681640625, step time: 16.920804977416992ms\r\n",,terminal_output +14970,13397436,"TERMINAL",0,0,"Step 378, loss: 2.6682722568511963, step time: 17.211437225341797ms\r\nStep 379, loss: 2.67370343208313, step time: 17.301559448242188ms\r\n",,terminal_output +14971,13397529,"TERMINAL",0,0,"Step 380, loss: 2.6476950645446777, step time: 17.119169235229492ms\r\n",,terminal_output +14972,13397582,"TERMINAL",0,0,"Step 381, loss: 3.026494264602661, step time: 17.010927200317383ms\r\n",,terminal_output +14973,13397677,"TERMINAL",0,0,"Step 382, loss: 2.576516628265381, step time: 17.586469650268555ms\r\n",,terminal_output +14974,13397729,"TERMINAL",0,0,"Step 383, loss: 3.039518356323242, step time: 16.996383666992188ms\r\n",,terminal_output +14975,13397782,"TERMINAL",0,0,"Step 384, loss: 2.687319278717041, step time: 17.08078384399414ms\r\n",,terminal_output +14976,13397888,"TERMINAL",0,0,"Step 385, loss: 2.6989381313323975, step time: 17.223119735717773ms\r\nStep 386, loss: 2.671849489212036, step time: 17.071962356567383ms\r\n",,terminal_output +14977,13397949,"TERMINAL",0,0,"Step 387, loss: 2.9719762802124023, step time: 16.99233055114746ms\r\n",,terminal_output +14978,13398012,"TERMINAL",0,0,"Step 388, loss: 2.643913745880127, step time: 17.427682876586914ms\r\n",,terminal_output +14979,13398128,"TERMINAL",0,0,"Step 389, loss: 2.7584543228149414, step time: 16.95871353149414ms\r\nStep 390, loss: 2.7288079261779785, step time: 17.139673233032227ms\r\n",,terminal_output +14980,13398253,"TERMINAL",0,0,"Step 391, loss: 2.5349323749542236, step time: 17.264842987060547ms\r\nStep 392, loss: 2.5864200592041016, step time: 17.091751098632812ms\r\n",,terminal_output +14981,13398348,"TERMINAL",0,0,"Step 393, loss: 2.63137149810791, step time: 16.945600509643555ms\r\n",,terminal_output +14982,13398402,"TERMINAL",0,0,"Step 394, loss: 2.482508897781372, step time: 17.398357391357422ms\r\n",,terminal_output +14983,13398509,"TERMINAL",0,0,"Step 395, loss: 2.8190722465515137, step time: 16.997575759887695ms\r\nStep 396, loss: 2.5390923023223877, step time: 17.11273193359375ms\r\n",,terminal_output +14984,13398638,"TERMINAL",0,0,"Step 397, loss: 2.4070329666137695, step time: 17.301082611083984ms\r\nStep 398, loss: 2.499530553817749, step time: 17.04692840576172ms\r\n",,terminal_output +14985,13398701,"TERMINAL",0,0,"Step 399, loss: 2.528214454650879, step time: 16.900300979614258ms\r\n",,terminal_output +14986,13398765,"TERMINAL",0,0,"Step 400, loss: 2.51460862159729, step time: 17.398357391357422ms\r\n",,terminal_output +14987,13398829,"TERMINAL",0,0,"Step 401, loss: 2.5057907104492188, step time: 16.92986488342285ms\r\n",,terminal_output +14988,13398893,"TERMINAL",0,0,"Step 402, loss: 2.557786226272583, step time: 27.78005599975586ms\r\n",,terminal_output +14989,13398958,"TERMINAL",0,0,"Step 403, loss: 2.6223442554473877, step time: 19.95682716369629ms\r\n",,terminal_output +14990,13399022,"TERMINAL",0,0,"Step 404, loss: 2.535396099090576, step time: 17.18616485595703ms\r\n",,terminal_output +14991,13399085,"TERMINAL",0,0,"Step 405, loss: 2.69203782081604, step time: 16.88241958618164ms\r\n",,terminal_output +14992,13399148,"TERMINAL",0,0,"Step 406, loss: 2.7881534099578857, step time: 17.46225357055664ms\r\n",,terminal_output +14993,13399213,"TERMINAL",0,0,"Step 407, loss: 2.524198055267334, step time: 16.83354377746582ms\r\n",,terminal_output +14994,13399274,"TERMINAL",0,0,"Step 408, loss: 2.492323160171509, step time: 17.09914207458496ms\r\n",,terminal_output +14995,13399338,"TERMINAL",0,0,"Step 409, loss: 2.5717971324920654, step time: 17.293214797973633ms\r\n",,terminal_output +14996,13399448,"TERMINAL",0,0,"Step 410, loss: 2.4726715087890625, step time: 19.468307495117188ms\r\n",,terminal_output +14997,13399501,"TERMINAL",0,0,"Step 411, loss: 3.034803867340088, step time: 17.422199249267578ms\r\n",,terminal_output +14998,13399609,"TERMINAL",0,0,"Step 412, loss: 2.5323119163513184, step time: 17.47441291809082ms\r\nStep 413, loss: 3.221040725708008, step time: 16.971588134765625ms\r\n",,terminal_output +14999,13399670,"TERMINAL",0,0,"Step 414, loss: 2.5479190349578857, step time: 17.142772674560547ms\r\n",,terminal_output +15000,13399733,"TERMINAL",0,0,"Step 415, loss: 2.496277093887329, step time: 17.32492446899414ms\r\n",,terminal_output +15001,13399796,"TERMINAL",0,0,"Step 416, loss: 2.5525131225585938, step time: 17.221927642822266ms\r\n",,terminal_output +15002,13399859,"TERMINAL",0,0,"Step 417, loss: 2.896656036376953, step time: 17.035722732543945ms\r\n",,terminal_output +15003,13399923,"TERMINAL",0,0,"Step 418, loss: 2.7767174243927, step time: 17.45772361755371ms\r\n",,terminal_output +15004,13399986,"TERMINAL",0,0,"Step 419, loss: 2.5043563842773438, step time: 16.978740692138672ms\r\n",,terminal_output +15005,13400050,"TERMINAL",0,0,"Step 420, loss: 2.5516443252563477, step time: 17.158985137939453ms\r\n",,terminal_output +15006,13400161,"TERMINAL",0,0,"Step 421, loss: 2.6424903869628906, step time: 17.34757423400879ms\r\nStep 422, loss: 2.6801562309265137, step time: 17.19808578491211ms\r\n",,terminal_output +15007,13400228,"TERMINAL",0,0,"Step 423, loss: 2.7682111263275146, step time: 17.236709594726562ms\r\n",,terminal_output +15008,13400290,"TERMINAL",0,0,"Step 424, loss: 2.6522738933563232, step time: 17.47608184814453ms\r\n",,terminal_output +15009,13400352,"TERMINAL",0,0,"Step 425, loss: 2.571159601211548, step time: 17.096996307373047ms\r\n",,terminal_output +15010,13400422,"TERMINAL",0,0,"Step 426, loss: 2.7599072456359863, step time: 17.233848571777344ms\r\n",,terminal_output +15011,13400516,"TERMINAL",0,0,"Step 427, loss: 2.5875706672668457, step time: 17.315149307250977ms\r\n",,terminal_output +15012,13400568,"TERMINAL",0,0,"Step 428, loss: 2.4307022094726562, step time: 17.154693603515625ms\r\n",,terminal_output +15013,13400621,"TERMINAL",0,0,"Step 429, loss: 2.4339756965637207, step time: 17.090797424316406ms\r\n",,terminal_output +15014,13400729,"TERMINAL",0,0,"Step 430, loss: 2.455442428588867, step time: 17.534732818603516ms\r\nStep 431, loss: 2.4423623085021973, step time: 17.03023910522461ms\r\n",,terminal_output +15015,13400791,"TERMINAL",0,0,"Step 432, loss: 2.3700246810913086, step time: 17.040252685546875ms\r\n",,terminal_output +15016,13400856,"TERMINAL",0,0,"Step 433, loss: 2.5248143672943115, step time: 17.47441291809082ms\r\n",,terminal_output +15017,13400921,"TERMINAL",0,0,"Step 434, loss: 2.3824315071105957, step time: 17.11559295654297ms\r\n",,terminal_output +15018,13401061,"TERMINAL",0,0,"Step 435, loss: 2.4658384323120117, step time: 17.015695571899414ms\r\nStep 436, loss: 2.6969716548919678, step time: 17.678260803222656ms\r\n",,terminal_output +15019,13401114,"TERMINAL",0,0,"Step 437, loss: 2.4004201889038086, step time: 16.96324348449707ms\r\n",,terminal_output +15020,13401221,"TERMINAL",0,0,"Step 438, loss: 2.362661838531494, step time: 20.160436630249023ms\r\nStep 439, loss: 2.4565682411193848, step time: 17.418861389160156ms\r\n",,terminal_output +15021,13401353,"TERMINAL",0,0,"Step 440, loss: 3.291581153869629, step time: 20.665884017944336ms\r\nStep 441, loss: 2.3342113494873047, step time: 20.902633666992188ms\r\n",,terminal_output +15022,13401417,"TERMINAL",0,0,"Step 442, loss: 2.4827940464019775, step time: 18.916606903076172ms\r\n",,terminal_output +15023,13401481,"TERMINAL",0,0,"Step 443, loss: 3.0424346923828125, step time: 17.580747604370117ms\r\n",,terminal_output +15024,13401544,"TERMINAL",0,0,"Step 444, loss: 2.5492498874664307, step time: 17.55547523498535ms\r\n",,terminal_output +15025,13401611,"TERMINAL",0,0,"Step 445, loss: 2.39846134185791, step time: 29.665470123291016ms\r\n",,terminal_output +15026,13401678,"TERMINAL",0,0,"Step 446, loss: 2.4360272884368896, step time: 30.75432777404785ms\r\n",,terminal_output +15027,13401741,"TERMINAL",0,0,"Step 447, loss: 2.3535494804382324, step time: 19.093036651611328ms\r\n",,terminal_output +15028,13401806,"TERMINAL",0,0,"Step 448, loss: 2.4037227630615234, step time: 17.844200134277344ms\r\n",,terminal_output +15029,13401869,"TERMINAL",0,0,"Step 449, loss: 2.4292798042297363, step time: 17.233610153198242ms\r\n",,terminal_output +15030,13402001,"TERMINAL",0,0,"Step 450, loss: 2.313063383102417, step time: 28.110027313232422ms\r\nStep 451, loss: 2.5831005573272705, step time: 18.81098747253418ms\r\n",,terminal_output +15031,13402078,"TERMINAL",0,0,"Step 452, loss: 2.391248941421509, step time: 17.230510711669922ms\r\n",,terminal_output +15032,13402145,"TERMINAL",0,0,"Step 453, loss: 2.401221990585327, step time: 17.064571380615234ms\r\n",,terminal_output +15033,13402211,"TERMINAL",0,0,"Step 454, loss: 2.638640880584717, step time: 17.637014389038086ms\r\n",,terminal_output +15034,13402265,"TERMINAL",0,0,"Step 455, loss: 2.4219512939453125, step time: 28.957366943359375ms\r\n",,terminal_output +15035,13402334,"TERMINAL",0,0,"Step 456, loss: 2.5538430213928223, step time: 18.92709732055664ms\r\n",,terminal_output +15036,13402391,"TERMINAL",0,0,"Step 457, loss: 2.6400306224823, step time: 17.677783966064453ms\r\n",,terminal_output +15037,13402453,"TERMINAL",0,0,"Step 458, loss: 2.4675042629241943, step time: 17.376184463500977ms\r\n",,terminal_output +15038,13402517,"TERMINAL",0,0,"Step 459, loss: 2.303699016571045, step time: 17.132997512817383ms\r\n",,terminal_output +15039,13402581,"TERMINAL",0,0,"Step 460, loss: 2.347496747970581, step time: 17.589807510375977ms\r\n",,terminal_output +15040,13402648,"TERMINAL",0,0,"Step 461, loss: 2.5106053352355957, step time: 17.08388328552246ms\r\n",,terminal_output +15041,13402809,"TERMINAL",0,0,"Step 462, loss: 2.3857994079589844, step time: 17.177820205688477ms\r\n",,terminal_output +15042,13402844,"TERMINAL",0,0,"Step 463, loss: 2.3678219318389893, step time: 17.6699161529541ms\r\nStep 464, loss: 2.3540053367614746, step time: 17.252683639526367ms\r\n",,terminal_output +15043,13402898,"TERMINAL",0,0,"Step 465, loss: 2.561051845550537, step time: 17.285823822021484ms\r\n",,terminal_output +15044,13402994,"TERMINAL",0,0,"Step 466, loss: 2.342806816101074, step time: 17.843961715698242ms\r\n",,terminal_output +15045,13403137,"TERMINAL",0,0,"Step 467, loss: 2.3077621459960938, step time: 17.223358154296875ms\r\nStep 468, loss: 2.436553955078125, step time: 17.364978790283203ms\r\nStep 469, loss: 2.459991693496704, step time: 17.427444458007812ms\r\n",,terminal_output +15046,13403234,"TERMINAL",0,0,"Step 470, loss: 2.3409316539764404, step time: 17.171621322631836ms\r\n",,terminal_output +15047,13403287,"TERMINAL",0,0,"Step 471, loss: 2.265014886856079, step time: 16.965627670288086ms\r\n",,terminal_output +15048,13403394,"TERMINAL",0,0,"Step 472, loss: 2.439819574356079, step time: 17.673492431640625ms\r\nStep 473, loss: 2.2950963973999023, step time: 17.1053409576416ms\r\n",,terminal_output +15049,13403491,"TERMINAL",0,0,"Step 474, loss: 2.244920492172241, step time: 17.316102981567383ms\r\n",,terminal_output +15050,13403545,"TERMINAL",0,0,"Step 475, loss: 2.7778258323669434, step time: 17.49110221862793ms\r\n",,terminal_output +15051,13403654,"TERMINAL",0,0,"Step 476, loss: 2.4904229640960693, step time: 17.121553421020508ms\r\nStep 477, loss: 2.2144131660461426, step time: 17.22121238708496ms\r\n",,terminal_output +15052,13403978,"TERMINAL",0,0,"Step 478, loss: 2.5740060806274414, step time: 304.807186126709ms\r\n",,terminal_output +15053,13404032,"TERMINAL",0,0,"Step 479, loss: 2.6254050731658936, step time: 24.985551834106445ms\r\n",,terminal_output +15054,13404142,"TERMINAL",0,0,"Step 480, loss: 2.341294050216675, step time: 19.845008850097656ms\r\nStep 481, loss: 2.621349811553955, step time: 18.19610595703125ms\r\n",,terminal_output +15055,13404205,"TERMINAL",0,0,"Step 482, loss: 2.5635695457458496, step time: 17.492055892944336ms\r\n",,terminal_output +15056,13404301,"TERMINAL",0,0,"Step 483, loss: 2.318742036819458, step time: 17.10963249206543ms\r\n",,terminal_output +15057,13404354,"TERMINAL",0,0,"Step 484, loss: 2.3892455101013184, step time: 17.97795295715332ms\r\n",,terminal_output +15058,13404461,"TERMINAL",0,0,"Step 485, loss: 2.608309745788574, step time: 17.127275466918945ms\r\nStep 486, loss: 2.466430425643921, step time: 17.464637756347656ms\r\n",,terminal_output +15059,13404580,"TERMINAL",0,0,"Step 487, loss: 2.284583330154419, step time: 17.777204513549805ms\r\nStep 488, loss: 2.224761724472046, step time: 17.42696762084961ms\r\n",,terminal_output +15060,13404644,"TERMINAL",0,0,"Step 489, loss: 2.433544635772705, step time: 18.697738647460938ms\r\n",,terminal_output +15061,13404709,"TERMINAL",0,0,"Step 490, loss: 2.597912549972534, step time: 17.798662185668945ms\r\n",,terminal_output +15062,13404772,"TERMINAL",0,0,"Step 491, loss: 2.303631544113159, step time: 17.80104637145996ms\r\n",,terminal_output +15063,13404835,"TERMINAL",0,0,"Step 492, loss: 2.2765541076660156, step time: 17.4868106842041ms\r\n",,terminal_output +15064,13404897,"TERMINAL",0,0,"Step 493, loss: 2.1811554431915283, step time: 17.4868106842041ms\r\n",,terminal_output +15065,13404958,"TERMINAL",0,0,"Step 494, loss: 2.32300066947937, step time: 17.423391342163086ms\r\n",,terminal_output +15066,13405020,"TERMINAL",0,0,"Step 495, loss: 2.341677665710449, step time: 17.084360122680664ms\r\n",,terminal_output +15067,13405083,"TERMINAL",0,0,"Step 496, loss: 2.266765832901001, step time: 17.72284507751465ms\r\n",,terminal_output +15068,13405140,"TERMINAL",0,0,"Step 497, loss: 2.4475388526916504, step time: 17.091751098632812ms\r\n",,terminal_output +15069,13405245,"TERMINAL",0,0,"Step 498, loss: 2.159973382949829, step time: 17.214298248291016ms\r\n",,terminal_output +15070,13405296,"TERMINAL",0,0,"Step 499, loss: 2.321068286895752, step time: 17.709016799926758ms\r\n",,terminal_output +15071,13408684,"TERMINAL",0,0,"Step 500, loss: 2.265986442565918, step time: 25.133609771728516ms\r\nStep 501, loss: 2.224968194961548, step time: 25.780439376831055ms\r\nStep 502, loss: 2.5151593685150146, step time: 20.310163497924805ms\r\nStep 503, loss: 2.9289283752441406, step time: 19.58489418029785ms\r\n",,terminal_output +15072,13408751,"TERMINAL",0,0,"Step 504, loss: 2.2663848400115967, step time: 18.765687942504883ms\r\n",,terminal_output +15073,13408847,"TERMINAL",0,0,"Step 505, loss: 2.4017510414123535, step time: 18.59450340270996ms\r\n",,terminal_output +15074,13408955,"TERMINAL",0,0,"Step 506, loss: 2.16784405708313, step time: 18.187999725341797ms\r\nStep 507, loss: 2.4220244884490967, step time: 19.660472869873047ms\r\n",,terminal_output +15075,13409021,"TERMINAL",0,0,"Step 508, loss: 2.408363103866577, step time: 18.184185028076172ms\r\n",,terminal_output +15076,13409135,"TERMINAL",0,0,"Step 509, loss: 2.620131254196167, step time: 18.793582916259766ms\r\nStep 510, loss: 2.377384662628174, step time: 18.25714111328125ms\r\n",,terminal_output +15077,13409200,"TERMINAL",0,0,"Step 511, loss: 2.3612711429595947, step time: 18.435955047607422ms\r\n",,terminal_output +15078,13409262,"TERMINAL",0,0,"Step 512, loss: 2.216810464859009, step time: 18.25881004333496ms\r\n",,terminal_output +15079,13409326,"TERMINAL",0,0,"Step 513, loss: 2.2056965827941895, step time: 17.804622650146484ms\r\n",,terminal_output +15080,13409388,"TERMINAL",0,0,"Step 514, loss: 2.226975202560425, step time: 18.08333396911621ms\r\n",,terminal_output +15081,13409459,"TERMINAL",0,0,"Step 515, loss: 2.8772969245910645, step time: 18.386125564575195ms\r\n",,terminal_output +15082,13409520,"TERMINAL",0,0,"Step 516, loss: 2.208069086074829, step time: 19.730567932128906ms\r\n",,terminal_output +15083,13409582,"TERMINAL",0,0,"Step 517, loss: 2.9549317359924316, step time: 17.95220375061035ms\r\n",,terminal_output +15084,13409647,"TERMINAL",0,0,"Step 518, loss: 2.431490898132324, step time: 18.39303970336914ms\r\n",,terminal_output +15085,13409711,"TERMINAL",0,0,"Step 519, loss: 2.28549861907959, step time: 18.24045181274414ms\r\n",,terminal_output +15086,13409775,"TERMINAL",0,0,"Step 520, loss: 2.3950612545013428, step time: 18.111228942871094ms\r\n",,terminal_output +15087,13409851,"TERMINAL",0,0,"Step 521, loss: 2.260559320449829, step time: 18.024921417236328ms\r\n",,terminal_output +15088,13409906,"TERMINAL",0,0,"Step 522, loss: 2.246382236480713, step time: 16.993045806884766ms\r\n",,terminal_output +15089,13410000,"TERMINAL",0,0,"Step 523, loss: 2.2393672466278076, step time: 18.221616744995117ms\r\n",,terminal_output +15090,13410068,"TERMINAL",0,0,"Step 524, loss: 2.345398187637329, step time: 18.340349197387695ms\r\n",,terminal_output +15091,13410119,"TERMINAL",0,0,"Step 525, loss: 2.622704267501831, step time: 18.097400665283203ms\r\n",,terminal_output +15092,13410182,"TERMINAL",0,0,"Step 526, loss: 2.680124282836914, step time: 23.616313934326172ms\r\n",,terminal_output +15093,13410240,"TERMINAL",0,0,"Step 527, loss: 2.301339864730835, step time: 24.703025817871094ms\r\n",,terminal_output +15094,13410300,"TERMINAL",0,0,"Step 528, loss: 2.2122387886047363, step time: 18.523454666137695ms\r\n",,terminal_output +15095,13410362,"TERMINAL",0,0,"Step 529, loss: 2.272254467010498, step time: 18.525362014770508ms\r\n",,terminal_output +15096,13410422,"TERMINAL",0,0,"Step 530, loss: 2.312775135040283, step time: 18.43881607055664ms\r\n",,terminal_output +15097,13410484,"TERMINAL",0,0,"Step 531, loss: 2.1618528366088867, step time: 18.574953079223633ms\r\n",,terminal_output +15098,13410589,"TERMINAL",0,0,"Step 532, loss: 2.416227102279663, step time: 18.227338790893555ms\r\n",,terminal_output +15099,13410939,"TERMINAL",0,0,"Step 533, loss: 2.252876043319702, step time: 312.17098236083984ms\r\nStep 534, loss: 2.1027605533599854, step time: 27.617692947387695ms\r\n",,terminal_output +15100,13411000,"TERMINAL",0,0,"Step 535, loss: 2.563058614730835, step time: 19.864797592163086ms\r\n",,terminal_output +15101,13411077,"TERMINAL",0,0,"Step 536, loss: 2.2687225341796875, step time: 18.316268920898438ms\r\n",,terminal_output +15102,13411128,"TERMINAL",0,0,"Step 537, loss: 2.267047643661499, step time: 17.487764358520508ms\r\n",,terminal_output +15103,13411192,"TERMINAL",0,0,"Step 538, loss: 2.2454466819763184, step time: 20.349979400634766ms\r\n",,terminal_output +15104,13411349,"TERMINAL",0,0,"Step 539, loss: 2.2325618267059326, step time: 18.832921981811523ms\r\nStep 540, loss: 2.1917736530303955, step time: 18.2650089263916ms\r\n",,terminal_output +15105,13411400,"TERMINAL",0,0,"Step 541, loss: 2.3018546104431152, step time: 18.71776580810547ms\r\n",,terminal_output +15106,13411545,"TERMINAL",0,0,"Step 542, loss: 2.181635856628418, step time: 18.253326416015625ms\r\nStep 543, loss: 2.145486354827881, step time: 17.3490047454834ms\r\n",,terminal_output +15107,13411595,"TERMINAL",0,0,"Step 544, loss: 2.190119981765747, step time: 16.86692237854004ms\r\n",,terminal_output +15108,13411692,"TERMINAL",0,0,"Step 545, loss: 2.195983648300171, step time: 18.414974212646484ms\r\n",,terminal_output +15109,13411743,"TERMINAL",0,0,"Step 546, loss: 2.1434197425842285, step time: 17.107725143432617ms\r\n",,terminal_output +15110,13411793,"TERMINAL",0,0,"Step 547, loss: 2.6774563789367676, step time: 18.23139190673828ms\r\n",,terminal_output +15111,13411898,"TERMINAL",0,0,"Step 548, loss: 2.1894657611846924, step time: 18.56374740600586ms\r\nStep 549, loss: 2.389995574951172, step time: 18.537521362304688ms\r\n",,terminal_output +15112,13411962,"TERMINAL",0,0,"Step 550, loss: 2.607349395751953, step time: 18.059968948364258ms\r\n",,terminal_output +15113,13412025,"TERMINAL",0,0,"Step 551, loss: 2.1472995281219482, step time: 18.309831619262695ms\r\n",,terminal_output +15114,13412087,"TERMINAL",0,0,"Step 552, loss: 2.3217666149139404, step time: 18.260478973388672ms\r\n",,terminal_output +15115,13412191,"TERMINAL",0,0,"Step 553, loss: 2.0926873683929443, step time: 22.00937271118164ms\r\n",,terminal_output +15116,13412254,"TERMINAL",0,0,"Step 554, loss: 2.476094961166382, step time: 18.607616424560547ms\r\n",,terminal_output +15117,13412313,"TERMINAL",0,0,"Step 555, loss: 2.0874130725860596, step time: 18.030405044555664ms\r\n",,terminal_output +15118,13412373,"TERMINAL",0,0,"Step 556, loss: 2.061767816543579, step time: 18.123626708984375ms\r\n",,terminal_output +15119,13412480,"TERMINAL",0,0,"Step 557, loss: 2.113706588745117, step time: 18.704891204833984ms\r\nStep 558, loss: 2.282102108001709, step time: 18.34273338317871ms\r\n",,terminal_output +15120,13412576,"TERMINAL",0,0,"Step 559, loss: 2.2730209827423096, step time: 18.337726593017578ms\r\n",,terminal_output +15121,13412629,"TERMINAL",0,0,"Step 560, loss: 2.125591278076172, step time: 18.62311363220215ms\r\n",,terminal_output +15122,13412734,"TERMINAL",0,0,"Step 561, loss: 2.384514331817627, step time: 18.32747459411621ms\r\nStep 562, loss: 2.1161997318267822, step time: 17.844438552856445ms\r\n",,terminal_output +15123,13412831,"TERMINAL",0,0,"Step 563, loss: 2.228926420211792, step time: 17.923355102539062ms\r\n",,terminal_output +15124,13412883,"TERMINAL",0,0,"Step 564, loss: 2.2593271732330322, step time: 17.83275604248047ms\r\n",,terminal_output +15125,13412991,"TERMINAL",0,0,"Step 565, loss: 2.201507568359375, step time: 17.76266098022461ms\r\nStep 566, loss: 2.087207317352295, step time: 17.759323120117188ms\r\n",,terminal_output +15126,13413054,"TERMINAL",0,0,"Step 567, loss: 2.1013309955596924, step time: 18.57280731201172ms\r\n",,terminal_output +15127,13413116,"TERMINAL",0,0,"Step 568, loss: 2.2404279708862305, step time: 18.571138381958008ms\r\n",,terminal_output +15128,13413176,"TERMINAL",0,0,"Step 569, loss: 2.135443925857544, step time: 18.250226974487305ms\r\n",,terminal_output +15129,13413311,"TERMINAL",0,0,"Step 570, loss: 2.2734837532043457, step time: 17.88043975830078ms\r\nStep 571, loss: 2.153172731399536, step time: 17.7156925201416ms\r\n",,terminal_output +15130,13413376,"TERMINAL",0,0,"Step 572, loss: 2.0843379497528076, step time: 17.475366592407227ms\r\n",,terminal_output +15131,13413438,"TERMINAL",0,0,"Step 573, loss: 2.4019038677215576, step time: 18.160104751586914ms\r\n",,terminal_output +15132,13413501,"TERMINAL",0,0,"Step 574, loss: 2.1210949420928955, step time: 18.088579177856445ms\r\n",,terminal_output +15133,13413564,"TERMINAL",0,0,"Step 575, loss: 2.0391685962677, step time: 18.047571182250977ms\r\n",,terminal_output +15134,13413736,"TERMINAL",0,0,"Step 576, loss: 2.735412120819092, step time: 17.691612243652344ms\r\n",,terminal_output +15135,13413737,"TERMINAL",0,0,"Step 577, loss: 2.076709747314453, step time: 17.919063568115234ms\r\n",,terminal_output +15136,13413851,"TERMINAL",0,0,"Step 578, loss: 2.365447998046875, step time: 17.40121841430664ms\r\nStep 579, loss: 2.036189556121826, step time: 19.981861114501953ms\r\n",,terminal_output +15137,13413902,"TERMINAL",0,0,"Step 580, loss: 2.056488037109375, step time: 18.26167106628418ms\r\n",,terminal_output +15138,13414006,"TERMINAL",0,0,"Step 581, loss: 2.428299903869629, step time: 18.118858337402344ms\r\nStep 582, loss: 2.0270118713378906, step time: 17.68016815185547ms\r\n",,terminal_output +15139,13414129,"TERMINAL",0,0,"Step 583, loss: 2.0227110385894775, step time: 17.945289611816406ms\r\nStep 584, loss: 2.038361072540283, step time: 17.44818687438965ms\r\n",,terminal_output +15140,13414193,"TERMINAL",0,0,"Step 585, loss: 2.4449102878570557, step time: 18.13650131225586ms\r\n",,terminal_output +15141,13414260,"TERMINAL",0,0,"Step 586, loss: 2.0881845951080322, step time: 18.04375648498535ms\r\n",,terminal_output +15142,13414321,"TERMINAL",0,0,"Step 587, loss: 1.861844539642334, step time: 18.02849769592285ms\r\n",,terminal_output +15143,13414385,"TERMINAL",0,0,"Step 588, loss: 2.1289775371551514, step time: 17.658233642578125ms\r\n",,terminal_output +15144,13414477,"TERMINAL",0,0,"Step 589, loss: 2.335169553756714, step time: 17.846107482910156ms\r\n",,terminal_output +15145,13414529,"TERMINAL",0,0,"Step 590, loss: 2.2446842193603516, step time: 17.52161979675293ms\r\n",,terminal_output +15146,13414621,"TERMINAL",0,0,"Step 591, loss: 1.8807215690612793, step time: 18.124103546142578ms\r\n",,terminal_output +15147,13414674,"TERMINAL",0,0,"Step 592, loss: 2.1197257041931152, step time: 21.062135696411133ms\r\n",,terminal_output +15148,13414777,"TERMINAL",0,0,"Step 593, loss: 2.2853844165802, step time: 18.238306045532227ms\r\nStep 594, loss: 2.525700092315674, step time: 17.58408546447754ms\r\n",,terminal_output +15149,13414875,"TERMINAL",0,0,"Step 595, loss: 2.213250160217285, step time: 17.809152603149414ms\r\n",,terminal_output +15150,13414926,"TERMINAL",0,0,"Step 596, loss: 2.2684385776519775, step time: 17.45462417602539ms\r\n",,terminal_output +15151,13415030,"TERMINAL",0,0,"Step 597, loss: 2.0915777683258057, step time: 18.375873565673828ms\r\nStep 598, loss: 2.4110426902770996, step time: 18.011808395385742ms\r\n",,terminal_output +15152,13415151,"TERMINAL",0,0,"Step 599, loss: 2.4153289794921875, step time: 18.159151077270508ms\r\nStep 600, loss: 1.9960435628890991, step time: 17.656326293945312ms\r\n",,terminal_output +15153,13415213,"TERMINAL",0,0,"Step 601, loss: 2.0543227195739746, step time: 18.06163787841797ms\r\n",,terminal_output +15154,13415314,"TERMINAL",0,0,"Step 602, loss: 2.2168779373168945, step time: 17.493247985839844ms\r\n",,terminal_output +15155,13415374,"TERMINAL",0,0,"Step 603, loss: 2.4027082920074463, step time: 18.155574798583984ms\r\n",,terminal_output +15156,13415435,"TERMINAL",0,0,"Step 604, loss: 2.10211181640625, step time: 17.94743537902832ms\r\n",,terminal_output +15157,13415486,"TERMINAL",0,0,"Step 605, loss: 2.0574333667755127, step time: 18.016576766967773ms\r\n",,terminal_output +15158,13415548,"TERMINAL",0,0,"Step 606, loss: 1.9874417781829834, step time: 17.797470092773438ms\r\n",,terminal_output +15159,13415609,"TERMINAL",0,0,"Step 607, loss: 2.225358247756958, step time: 18.00251007080078ms\r\n",,terminal_output +15160,13415726,"TERMINAL",0,0,"Step 608, loss: 2.1687397956848145, step time: 17.58098602294922ms\r\nStep 609, loss: 2.4443700313568115, step time: 18.16582679748535ms\r\n",,terminal_output +15161,13415821,"TERMINAL",0,0,"Step 610, loss: 2.127899169921875, step time: 17.9445743560791ms\r\n",,terminal_output +15162,13415872,"TERMINAL",0,0,"Step 611, loss: 2.043548345565796, step time: 18.00847053527832ms\r\n",,terminal_output +15163,13415976,"TERMINAL",0,0,"Step 612, loss: 2.043325424194336, step time: 17.939329147338867ms\r\nStep 613, loss: 2.2081356048583984, step time: 17.99178123474121ms\r\n",,terminal_output +15164,13416100,"TERMINAL",0,0,"Step 614, loss: 2.0309457778930664, step time: 17.467737197875977ms\r\nStep 615, loss: 2.4054183959960938, step time: 18.152713775634766ms\r\n",,terminal_output +15165,13416163,"TERMINAL",0,0,"Step 616, loss: 2.0543406009674072, step time: 18.596649169921875ms\r\n",,terminal_output +15166,13416226,"TERMINAL",0,0,"Step 617, loss: 2.043178081512451, step time: 18.294095993041992ms\r\n",,terminal_output +15167,13416290,"TERMINAL",0,0,"Step 618, loss: 2.0563111305236816, step time: 17.711639404296875ms\r\n",,terminal_output +15168,13416353,"TERMINAL",0,0,"Step 619, loss: 1.9896612167358398, step time: 17.84539222717285ms\r\n",,terminal_output +15169,13416417,"TERMINAL",0,0,"Step 620, loss: 2.1181411743164062, step time: 17.500638961791992ms\r\n",,terminal_output +15170,13416480,"TERMINAL",0,0,"Step 621, loss: 2.1197056770324707, step time: 18.295764923095703ms\r\n",,terminal_output +15171,13416544,"TERMINAL",0,0,"Step 622, loss: 1.912917971611023, step time: 17.87090301513672ms\r\n",,terminal_output +15172,13416607,"TERMINAL",0,0,"Step 623, loss: 3.2828357219696045, step time: 18.045902252197266ms\r\n",,terminal_output +15173,13416670,"TERMINAL",0,0,"Step 624, loss: 2.213414192199707, step time: 17.63319969177246ms\r\n",,terminal_output +15174,13416735,"TERMINAL",0,0,"Step 625, loss: 2.200937271118164, step time: 17.731189727783203ms\r\n",,terminal_output +15175,13416796,"TERMINAL",0,0,"Step 626, loss: 2.083055019378662, step time: 17.4715518951416ms\r\n",,terminal_output +15176,13416888,"TERMINAL",0,0,"Step 627, loss: 2.1390933990478516, step time: 19.788026809692383ms\r\n",,terminal_output +15177,13416939,"TERMINAL",0,0,"Step 628, loss: 1.8601056337356567, step time: 18.812894821166992ms\r\n",,terminal_output +15178,13417031,"TERMINAL",0,0,"Step 629, loss: 2.3861682415008545, step time: 18.54109764099121ms\r\n",,terminal_output +15179,13417082,"TERMINAL",0,0,"Step 630, loss: 1.995255708694458, step time: 17.72475242614746ms\r\n",,terminal_output +15180,13417134,"TERMINAL",0,0,"Step 631, loss: 2.1136128902435303, step time: 18.110275268554688ms\r\n",,terminal_output +15181,13417279,"TERMINAL",0,0,"Step 632, loss: 2.0174121856689453, step time: 17.635345458984375ms\r\nStep 633, loss: 2.326800584793091, step time: 18.168926239013672ms\r\n",,terminal_output +15182,13417330,"TERMINAL",0,0,"Step 634, loss: 1.9848387241363525, step time: 17.895221710205078ms\r\n",,terminal_output +15183,13417433,"TERMINAL",0,0,"Step 635, loss: 1.9674205780029297, step time: 18.024682998657227ms\r\nStep 636, loss: 2.0000877380371094, step time: 17.989397048950195ms\r\n",,terminal_output +15184,13417525,"TERMINAL",0,0,"Step 637, loss: 1.9239442348480225, step time: 18.10932159423828ms\r\n",,terminal_output +15185,13417576,"TERMINAL",0,0,"Step 638, loss: 2.1518845558166504, step time: 27.240991592407227ms\r\n",,terminal_output +15186,13417669,"TERMINAL",0,0,"Step 639, loss: 1.926207184791565, step time: 20.520448684692383ms\r\n",,terminal_output +15187,13417720,"TERMINAL",0,0,"Step 640, loss: 2.250783681869507, step time: 18.256187438964844ms\r\n",,terminal_output +15188,13417785,"TERMINAL",0,0,"Step 641, loss: 2.1670053005218506, step time: 18.588781356811523ms\r\n",,terminal_output +15189,13417849,"TERMINAL",0,0,"Step 642, loss: 2.240570068359375, step time: 17.9288387298584ms\r\n",,terminal_output +15190,13417909,"TERMINAL",0,0,"Step 643, loss: 1.8921499252319336, step time: 18.2647705078125ms\r\n",,terminal_output +15191,13417974,"TERMINAL",0,0,"Step 644, loss: 2.222476005554199, step time: 17.59624481201172ms\r\n",,terminal_output +15192,13418030,"TERMINAL",0,0,"Step 645, loss: 2.979797840118408, step time: 18.172740936279297ms\r\n",,terminal_output +15193,13418093,"TERMINAL",0,0,"Step 646, loss: 2.1880903244018555, step time: 17.983675003051758ms\r\n",,terminal_output +15194,13418153,"TERMINAL",0,0,"Step 647, loss: 2.5104992389678955, step time: 18.224716186523438ms\r\n",,terminal_output +15195,13418214,"TERMINAL",0,0,"Step 648, loss: 2.0539426803588867, step time: 17.777442932128906ms\r\n",,terminal_output +15196,13418273,"TERMINAL",0,0,"Step 649, loss: 2.031144142150879, step time: 18.015146255493164ms\r\n",,terminal_output +15197,13418332,"TERMINAL",0,0,"Step 650, loss: 1.8932195901870728, step time: 23.54574203491211ms\r\n",,terminal_output +15198,13418395,"TERMINAL",0,0,"Step 651, loss: 2.0286905765533447, step time: 19.194841384887695ms\r\n",,terminal_output +15199,13418457,"TERMINAL",0,0,"Step 652, loss: 2.0111992359161377, step time: 18.4173583984375ms\r\n",,terminal_output +15200,13418517,"TERMINAL",0,0,"Step 653, loss: 2.2121424674987793, step time: 18.61286163330078ms\r\n",,terminal_output +15201,13418590,"TERMINAL",0,0,"Step 654, loss: 2.0539095401763916, step time: 17.853975296020508ms\r\n",,terminal_output +15202,13418642,"TERMINAL",0,0,"Step 655, loss: 1.8932158946990967, step time: 17.861604690551758ms\r\n",,terminal_output +15203,13418706,"TERMINAL",0,0,"Step 656, loss: 2.4993338584899902, step time: 17.581939697265625ms\r\n",,terminal_output +15204,13418833,"TERMINAL",0,0,"Step 657, loss: 2.0536117553710938, step time: 18.4323787689209ms\r\nStep 658, loss: 1.9445878267288208, step time: 17.924070358276367ms\r\n",,terminal_output +15205,13418937,"TERMINAL",0,0,"Step 659, loss: 2.0569639205932617, step time: 18.254518508911133ms\r\n",,terminal_output +15206,13418989,"TERMINAL",0,0,"Step 660, loss: 2.2000677585601807, step time: 28.9154052734375ms\r\n",,terminal_output +15207,13419095,"TERMINAL",0,0,"Step 661, loss: 2.0190510749816895, step time: 19.610166549682617ms\r\nStep 662, loss: 1.9674382209777832, step time: 17.8377628326416ms\r\n",,terminal_output +15208,13419159,"TERMINAL",0,0,"Step 663, loss: 1.9021403789520264, step time: 18.01919937133789ms\r\n",,terminal_output +15209,13419254,"TERMINAL",0,0,"Step 664, loss: 2.017223834991455, step time: 18.27216148376465ms\r\n",,terminal_output +15210,13419361,"TERMINAL",0,0,"Step 665, loss: 1.9197534322738647, step time: 18.329620361328125ms\r\nStep 666, loss: 1.8156685829162598, step time: 17.69089698791504ms\r\n",,terminal_output +15211,13419421,"TERMINAL",0,0,"Step 667, loss: 1.8787659406661987, step time: 17.894983291625977ms\r\n",,terminal_output +15212,13419773,"TERMINAL",0,0,"Step 668, loss: 2.5785789489746094, step time: 357.36894607543945ms\r\n",,terminal_output +15213,13419839,"TERMINAL",0,0,"Step 669, loss: 2.04351806640625, step time: 25.43926239013672ms\r\n",,terminal_output +15214,13419946,"TERMINAL",0,0,"Step 670, loss: 2.674504041671753, step time: 20.812273025512695ms\r\n",,terminal_output +15215,13420013,"TERMINAL",0,0,"Step 671, loss: 2.965587854385376, step time: 18.970727920532227ms\r\n",,terminal_output +15216,13420119,"TERMINAL",0,0,"Step 672, loss: 1.8286573886871338, step time: 18.265724182128906ms\r\nStep 673, loss: 2.4450247287750244, step time: 18.029451370239258ms\r\n",,terminal_output +15217,13420185,"TERMINAL",0,0,"Step 674, loss: 1.9558099508285522, step time: 17.637968063354492ms\r\n",,terminal_output +15218,13420246,"TERMINAL",0,0,"Step 675, loss: 2.270803451538086, step time: 18.12124252319336ms\r\n",,terminal_output +15219,13420313,"TERMINAL",0,0,"Step 676, loss: 2.5863780975341797, step time: 18.230199813842773ms\r\n",,terminal_output +15220,13420376,"TERMINAL",0,0,"Step 677, loss: 1.846303105354309, step time: 19.84429359436035ms\r\n",,terminal_output +15221,13420439,"TERMINAL",0,0,"Step 678, loss: 1.9171779155731201, step time: 17.99178123474121ms\r\n",,terminal_output +15222,13420502,"TERMINAL",0,0,"Step 679, loss: 2.2312185764312744, step time: 17.952680587768555ms\r\n",,terminal_output +15223,13420565,"TERMINAL",0,0,"Step 680, loss: 2.2542855739593506, step time: 17.81463623046875ms\r\n",,terminal_output +15224,13420627,"TERMINAL",0,0,"Step 681, loss: 2.1776390075683594, step time: 18.253087997436523ms\r\n",,terminal_output +15225,13420688,"TERMINAL",0,0,"Step 682, loss: 1.9869128465652466, step time: 18.15938949584961ms\r\n",,terminal_output +15226,13420749,"TERMINAL",0,0,"Step 683, loss: 1.8330434560775757, step time: 18.329858779907227ms\r\n",,terminal_output +15227,13420812,"TERMINAL",0,0,"Step 684, loss: 1.8752481937408447, step time: 17.757654190063477ms\r\n",,terminal_output +15228,13420872,"TERMINAL",0,0,"Step 685, loss: 2.1384570598602295, step time: 18.026351928710938ms\r\n",,terminal_output +15229,13420993,"TERMINAL",0,0,"Step 686, loss: 1.940635323524475, step time: 20.563364028930664ms\r\nStep 687, loss: 1.8646999597549438, step time: 17.995119094848633ms\r\n",,terminal_output +15230,13421058,"TERMINAL",0,0,"Step 688, loss: 1.8531017303466797, step time: 17.912626266479492ms\r\n",,terminal_output +15231,13421121,"TERMINAL",0,0,"Step 689, loss: 1.9036328792572021, step time: 22.176265716552734ms\r\n",,terminal_output +15232,13421181,"TERMINAL",0,0,"Step 690, loss: 1.9150969982147217, step time: 19.376754760742188ms\r\n",,terminal_output +15233,13421244,"TERMINAL",0,0,"Step 691, loss: 1.8888839483261108, step time: 18.541574478149414ms\r\n",,terminal_output +15234,13421304,"TERMINAL",0,0,"Step 692, loss: 1.8524110317230225, step time: 19.28257942199707ms\r\n",,terminal_output +15235,13421369,"TERMINAL",0,0,"Step 693, loss: 2.123549699783325, step time: 19.82259750366211ms\r\n",,terminal_output +15236,13421462,"TERMINAL",0,0,"Step 694, loss: 2.013396978378296, step time: 18.674135208129883ms\r\n",,terminal_output +15237,13421513,"TERMINAL",0,0,"Step 695, loss: 1.8609421253204346, step time: 19.02937889099121ms\r\n",,terminal_output +15238,13421608,"TERMINAL",0,0,"Step 696, loss: 1.9273022413253784, step time: 19.54174041748047ms\r\n",,terminal_output +15239,13421688,"TERMINAL",0,0,"Step 697, loss: 1.8978995084762573, step time: 18.96047592163086ms\r\nStep 698, loss: 1.8521051406860352, step time: 18.0511474609375ms\r\n",,terminal_output +15240,13421790,"TERMINAL",0,0,"Step 699, loss: 2.059140682220459, step time: 18.428802490234375ms\r\n",,terminal_output +15241,13421842,"TERMINAL",0,0,"Step 700, loss: 1.84236478805542, step time: 18.114328384399414ms\r\n",,terminal_output +15242,13421947,"TERMINAL",0,0,"Step 701, loss: 2.1049001216888428, step time: 18.244504928588867ms\r\nStep 702, loss: 2.0868937969207764, step time: 17.876863479614258ms\r\n",,terminal_output +15243,13422040,"TERMINAL",0,0,"Step 703, loss: 1.982433795928955, step time: 18.043041229248047ms\r\n",,terminal_output +15244,13422132,"TERMINAL",0,0,"Step 704, loss: 1.7466275691986084, step time: 17.552614212036133ms\r\nStep 705, loss: 1.7741270065307617, step time: 18.268346786499023ms\r\n",,terminal_output +15245,13422228,"TERMINAL",0,0,"Step 706, loss: 1.7836813926696777, step time: 18.80955696105957ms\r\n",,terminal_output +15246,13422288,"TERMINAL",0,0,"Step 707, loss: 2.0241451263427734, step time: 18.69821548461914ms\r\n",,terminal_output +15247,13422350,"TERMINAL",0,0,"Step 708, loss: 1.9557350873947144, step time: 17.818450927734375ms\r\n",,terminal_output +15248,13422456,"TERMINAL",0,0,"Step 709, loss: 1.9373877048492432, step time: 17.927885055541992ms\r\nStep 710, loss: 1.8457754850387573, step time: 17.89069175720215ms\r\n",,terminal_output +15249,13422549,"TERMINAL",0,0,"Step 711, loss: 1.8708717823028564, step time: 18.29051971435547ms\r\n",,terminal_output +15250,13422604,"TERMINAL",0,0,"Step 712, loss: 2.640681505203247, step time: 18.213987350463867ms\r\n",,terminal_output +15251,13422711,"TERMINAL",0,0,"Step 713, loss: 1.8495032787322998, step time: 18.233060836791992ms\r\nStep 714, loss: 2.2258622646331787, step time: 17.744779586791992ms\r\n",,terminal_output +15252,13422775,"TERMINAL",0,0,"Step 715, loss: 2.0098164081573486, step time: 19.14358139038086ms\r\n",,terminal_output +15253,13422844,"TERMINAL",0,0,"Step 716, loss: 1.849663257598877, step time: 18.43404769897461ms\r\n",,terminal_output +15254,13422903,"TERMINAL",0,0,"Step 717, loss: 1.8582276105880737, step time: 18.403291702270508ms\r\n",,terminal_output +15255,13423020,"TERMINAL",0,0,"Step 718, loss: 1.9927958250045776, step time: 18.29671859741211ms\r\nStep 719, loss: 2.0824453830718994, step time: 18.46003532409668ms\r\n",,terminal_output +15256,13423084,"TERMINAL",0,0,"Step 720, loss: 2.1954426765441895, step time: 18.1276798248291ms\r\n",,terminal_output +15257,13423207,"TERMINAL",0,0,"Step 721, loss: 1.8024818897247314, step time: 17.9898738861084ms\r\nStep 722, loss: 1.8498620986938477, step time: 17.51255989074707ms\r\n",,terminal_output +15258,13423276,"TERMINAL",0,0,"Step 723, loss: 1.8580881357192993, step time: 18.39756965637207ms\r\n",,terminal_output +15259,13423331,"TERMINAL",0,0,"Step 724, loss: 1.8405601978302002, step time: 27.466535568237305ms\r\n",,terminal_output +15260,13423409,"TERMINAL",0,0,"Step 725, loss: 2.276750326156616, step time: 22.27044105529785ms\r\n",,terminal_output +15261,13423477,"TERMINAL",0,0,"Step 726, loss: 2.0979483127593994, step time: 18.353939056396484ms\r\n",,terminal_output +15262,13423600,"TERMINAL",0,0,"Step 727, loss: 1.8070836067199707, step time: 18.02515983581543ms\r\nStep 728, loss: 1.773024082183838, step time: 18.45526695251465ms\r\n",,terminal_output +15263,13423653,"TERMINAL",0,0,"Step 729, loss: 1.859239935874939, step time: 18.50605010986328ms\r\n",,terminal_output +15264,13423748,"TERMINAL",0,0,"Step 730, loss: 1.8838143348693848, step time: 18.103837966918945ms\r\n",,terminal_output +15265,13423852,"TERMINAL",0,0,"Step 731, loss: 1.8584985733032227, step time: 18.491744995117188ms\r\nStep 732, loss: 2.335444211959839, step time: 17.90022850036621ms\r\n",,terminal_output +15266,13423947,"TERMINAL",0,0,"Step 733, loss: 2.0050344467163086, step time: 17.901182174682617ms\r\n",,terminal_output +15267,13424001,"TERMINAL",0,0,"Step 734, loss: 2.065429210662842, step time: 19.33431625366211ms\r\n",,terminal_output +15268,13424108,"TERMINAL",0,0,"Step 735, loss: 1.8927342891693115, step time: 20.233869552612305ms\r\nStep 736, loss: 1.711895227432251, step time: 18.364429473876953ms\r\n",,terminal_output +15269,13424173,"TERMINAL",0,0,"Step 737, loss: 1.9318441152572632, step time: 18.67389678955078ms\r\n",,terminal_output +15270,13424231,"TERMINAL",0,0,"Step 738, loss: 2.089529037475586, step time: 18.121957778930664ms\r\n",,terminal_output +15271,13424292,"TERMINAL",0,0,"Step 739, loss: 1.8341147899627686, step time: 18.17154884338379ms\r\n",,terminal_output +15272,13424355,"TERMINAL",0,0,"Step 740, loss: 1.8318970203399658, step time: 17.671585083007812ms\r\n",,terminal_output +15273,13424417,"TERMINAL",0,0,"Step 741, loss: 1.7852226495742798, step time: 18.209457397460938ms\r\n",,terminal_output +15274,13424481,"TERMINAL",0,0,"Step 742, loss: 1.7586753368377686, step time: 17.9598331451416ms\r\n",,terminal_output +15275,13424545,"TERMINAL",0,0,"Step 743, loss: 1.8576409816741943, step time: 18.677234649658203ms\r\n",,terminal_output +15276,13424607,"TERMINAL",0,0,"Step 744, loss: 1.7974803447723389, step time: 17.824649810791016ms\r\n",,terminal_output +15277,13424672,"TERMINAL",0,0,"Step 745, loss: 1.8509160280227661, step time: 17.710447311401367ms\r\n",,terminal_output +15278,13424731,"TERMINAL",0,0,"Step 746, loss: 1.8383100032806396, step time: 17.510652542114258ms\r\n",,terminal_output +15279,13424795,"TERMINAL",0,0,"Step 747, loss: 2.2385411262512207, step time: 18.34893226623535ms\r\n",,terminal_output +15280,13424892,"TERMINAL",0,0,"Step 748, loss: 2.055950403213501, step time: 17.99631118774414ms\r\n",,terminal_output +15281,13424952,"TERMINAL",0,0,"Step 749, loss: 1.7885725498199463, step time: 18.04327964782715ms\r\n",,terminal_output +15282,13425013,"TERMINAL",0,0,"Step 750, loss: 1.8672853708267212, step time: 17.972230911254883ms\r\n",,terminal_output +15283,13425074,"TERMINAL",0,0,"Step 751, loss: 1.7880280017852783, step time: 17.59052276611328ms\r\n",,terminal_output +15284,13425127,"TERMINAL",0,0,"Step 752, loss: 1.9283233880996704, step time: 17.61317253112793ms\r\n",,terminal_output +15285,13425224,"TERMINAL",0,0,"Step 753, loss: 1.7573844194412231, step time: 18.06187629699707ms\r\n",,terminal_output +15286,13425303,"TERMINAL",0,0,"Step 754, loss: 2.19704532623291, step time: 17.84658432006836ms\r\nStep 755, loss: 1.7681881189346313, step time: 18.319368362426758ms\r\n",,terminal_output +15287,13425366,"TERMINAL",0,0,"Step 756, loss: 1.6971651315689087, step time: 17.623186111450195ms\r\n",,terminal_output +15288,13425430,"TERMINAL",0,0,"Step 757, loss: 1.8466389179229736, step time: 17.79770851135254ms\r\n",,terminal_output +15289,13425493,"TERMINAL",0,0,"Step 758, loss: 1.667635440826416, step time: 17.48347282409668ms\r\n",,terminal_output +15290,13425587,"TERMINAL",0,0,"Step 759, loss: 2.1849122047424316, step time: 17.90452003479004ms\r\n",,terminal_output +15291,13425641,"TERMINAL",0,0,"Step 760, loss: 1.770949125289917, step time: 17.882823944091797ms\r\n",,terminal_output +15292,13425747,"TERMINAL",0,0,"Step 761, loss: 1.7238167524337769, step time: 18.03874969482422ms\r\nStep 762, loss: 1.6160229444503784, step time: 17.7004337310791ms\r\n",,terminal_output +15293,13425843,"TERMINAL",0,0,"Step 763, loss: 2.284759759902954, step time: 17.7462100982666ms\r\n",,terminal_output +15294,13425896,"TERMINAL",0,0,"Step 764, loss: 1.7485592365264893, step time: 17.478227615356445ms\r\n",,terminal_output +15295,13426006,"TERMINAL",0,0,"Step 765, loss: 1.784379005432129, step time: 17.834186553955078ms\r\nStep 766, loss: 1.7785087823867798, step time: 17.945289611816406ms\r\n",,terminal_output +15296,13426126,"TERMINAL",0,0,"Step 767, loss: 2.170199394226074, step time: 17.9135799407959ms\r\nStep 768, loss: 1.7367463111877441, step time: 17.651796340942383ms\r\n",,terminal_output +15297,13426190,"TERMINAL",0,0,"Step 769, loss: 1.921410322189331, step time: 17.682313919067383ms\r\n",,terminal_output +15298,13426294,"TERMINAL",0,0,"Step 770, loss: 0.0006417360855266452, step time: 17.467021942138672ms\r\n",,terminal_output +15299,13426347,"TERMINAL",0,0,"Step 771, loss: 1.585180640220642, step time: 18.049001693725586ms\r\n",,terminal_output +15300,13426457,"TERMINAL",0,0,"Step 772, loss: 1.683385968208313, step time: 17.804622650146484ms\r\nStep 773, loss: 1.738686442375183, step time: 17.937660217285156ms\r\n",,terminal_output +15301,13426605,"TERMINAL",0,0,"Step 774, loss: 2.0941991806030273, step time: 17.828941345214844ms\r\nStep 775, loss: 2.050546169281006, step time: 17.879962921142578ms\r\n",,terminal_output +15302,13426658,"TERMINAL",0,0,"Step 776, loss: 1.876831293106079, step time: 18.63551139831543ms\r\n",,terminal_output +15303,13426763,"TERMINAL",0,0,"Step 777, loss: 1.9490036964416504, step time: 18.098831176757812ms\r\nStep 778, loss: 1.7706964015960693, step time: 17.56739616394043ms\r\n",,terminal_output +15304,13426855,"TERMINAL",0,0,"Step 779, loss: 2.0361225605010986, step time: 18.49532127380371ms\r\n",,terminal_output +15305,13426907,"TERMINAL",0,0,"Step 780, loss: 1.6745615005493164, step time: 17.825841903686523ms\r\n",,terminal_output +15306,13427004,"TERMINAL",0,0,"Step 781, loss: 1.615854263305664, step time: 17.194032669067383ms\r\n",,terminal_output +15307,13427076,"TERMINAL",0,0,"Step 782, loss: 1.6785483360290527, step time: 17.169952392578125ms\r\nStep 783, loss: 2.0658886432647705, step time: 16.972064971923828ms\r\n",,terminal_output +15308,13427142,"TERMINAL",0,0,"Step 784, loss: 1.5796865224838257, step time: 16.736268997192383ms\r\n",,terminal_output +15309,13427207,"TERMINAL",0,0,"Step 785, loss: 1.6900150775909424, step time: 17.57335662841797ms\r\n",,terminal_output +15310,13427270,"TERMINAL",0,0,"Step 786, loss: 1.671752691268921, step time: 17.853498458862305ms\r\n",,terminal_output +15311,13427333,"TERMINAL",0,0,"Step 787, loss: 1.8555678129196167, step time: 18.04947853088379ms\r\n",,terminal_output +15312,13427396,"TERMINAL",0,0,"Step 788, loss: 1.7066988945007324, step time: 18.519878387451172ms\r\n",,terminal_output +15313,13427747,"TERMINAL",0,0,"Step 789, loss: 2.1794421672821045, step time: 355.1476001739502ms\r\n",,terminal_output +15314,13427858,"TERMINAL",0,0,"Step 790, loss: 1.657688856124878, step time: 24.173974990844727ms\r\n",,terminal_output +15315,13427909,"TERMINAL",0,0,"Step 791, loss: 1.702824354171753, step time: 21.001815795898438ms\r\n",,terminal_output +15316,13428016,"TERMINAL",0,0,"Step 792, loss: 2.3557794094085693, step time: 19.257545471191406ms\r\nStep 793, loss: 1.6765742301940918, step time: 17.518043518066406ms\r\n",,terminal_output +15317,13428138,"TERMINAL",0,0,"Step 794, loss: 1.7111904621124268, step time: 18.053054809570312ms\r\nStep 795, loss: 1.7452750205993652, step time: 19.10400390625ms\r\n",,terminal_output +15318,13428200,"TERMINAL",0,0,"Step 796, loss: 1.5913126468658447, step time: 18.09859275817871ms\r\n",,terminal_output +15319,13428302,"TERMINAL",0,0,"Step 797, loss: 1.6710288524627686, step time: 17.667293548583984ms\r\n",,terminal_output +15320,13428395,"TERMINAL",0,0,"Step 798, loss: 1.682114839553833, step time: 17.851591110229492ms\r\nStep 799, loss: 1.8543891906738281, step time: 17.227888107299805ms\r\n",,terminal_output +15321,13428462,"TERMINAL",0,0,"Step 800, loss: 2.127826452255249, step time: 17.100095748901367ms\r\n",,terminal_output +15322,13428518,"TERMINAL",0,0,"Step 801, loss: 1.623083233833313, step time: 18.198728561401367ms\r\n",,terminal_output +15323,13428592,"TERMINAL",0,0,"Step 802, loss: 3.3078413009643555, step time: 17.54903793334961ms\r\n",,terminal_output +15324,13428644,"TERMINAL",0,0,"Step 803, loss: 1.608664631843567, step time: 17.278432846069336ms\r\n",,terminal_output +15325,13428711,"TERMINAL",0,0,"Step 804, loss: 1.8233047723770142, step time: 17.307758331298828ms\r\n",,terminal_output +15326,13428826,"TERMINAL",0,0,"Step 805, loss: 2.1199536323547363, step time: 17.03357696533203ms\r\nStep 806, loss: 2.3029918670654297, step time: 16.994714736938477ms\r\n",,terminal_output +15327,13428925,"TERMINAL",0,0,"Step 807, loss: 1.6137266159057617, step time: 18.21160316467285ms\r\n",,terminal_output +15328,13428977,"TERMINAL",0,0,"Step 808, loss: 1.852910041809082, step time: 17.751216888427734ms\r\n",,terminal_output +15329,13429085,"TERMINAL",0,0,"Step 809, loss: 1.8164161443710327, step time: 17.03166961669922ms\r\nStep 810, loss: 1.7955347299575806, step time: 17.424345016479492ms\r\n",,terminal_output +15330,13429152,"TERMINAL",0,0,"Step 811, loss: 1.7854264974594116, step time: 17.02713966369629ms\r\n",,terminal_output +15331,13429214,"TERMINAL",0,0,"Step 812, loss: 1.6936602592468262, step time: 16.99542999267578ms\r\n",,terminal_output +15332,13429308,"TERMINAL",0,0,"Step 813, loss: 1.6474730968475342, step time: 18.157243728637695ms\r\n",,terminal_output +15333,13429361,"TERMINAL",0,0,"Step 814, loss: 1.9199413061141968, step time: 17.597675323486328ms\r\n",,terminal_output +15334,13429466,"TERMINAL",0,0,"Step 815, loss: 1.8297818899154663, step time: 17.09270477294922ms\r\nStep 816, loss: 1.8991256952285767, step time: 17.487525939941406ms\r\n",,terminal_output +15335,13429559,"TERMINAL",0,0,"Step 817, loss: 1.6027215719223022, step time: 17.15993881225586ms\r\n",,terminal_output +15336,13429613,"TERMINAL",0,0,"Step 818, loss: 1.6114639043807983, step time: 17.174720764160156ms\r\n",,terminal_output +15337,13429679,"TERMINAL",0,0,"Step 819, loss: 2.0116536617279053, step time: 18.181562423706055ms\r\n",,terminal_output +15338,13429741,"TERMINAL",0,0,"Step 820, loss: 1.8416374921798706, step time: 17.53401756286621ms\r\n",,terminal_output +15339,13429816,"TERMINAL",0,0,"Step 821, loss: 1.6370080709457397, step time: 17.241477966308594ms\r\n",,terminal_output +15340,13429896,"TERMINAL",0,0,"Step 822, loss: 1.6796166896820068, step time: 17.259597778320312ms\r\nStep 823, loss: 1.6746243238449097, step time: 16.966581344604492ms\r\n",,terminal_output +15341,13430023,"TERMINAL",0,0,"Step 824, loss: 1.6163471937179565, step time: 17.12965965270996ms\r\nStep 825, loss: 1.6054717302322388, step time: 18.25690269470215ms\r\n",,terminal_output +15342,13430090,"TERMINAL",0,0,"Step 826, loss: 1.72222900390625, step time: 17.69852638244629ms\r\n",,terminal_output +15343,13430190,"TERMINAL",0,0,"Step 827, loss: 1.806440830230713, step time: 17.14944839477539ms\r\n",,terminal_output +15344,13430252,"TERMINAL",0,0,"Step 828, loss: 1.668718695640564, step time: 18.161535263061523ms\r\n",,terminal_output +15345,13430314,"TERMINAL",0,0,"Step 829, loss: 2.0989887714385986, step time: 18.01586151123047ms\r\n",,terminal_output +15346,13430384,"TERMINAL",0,0,"Step 830, loss: 1.8492308855056763, step time: 17.902135848999023ms\r\n",,terminal_output +15347,13430436,"TERMINAL",0,0,"Step 831, loss: 1.7012349367141724, step time: 18.389225006103516ms\r\n",,terminal_output +15348,13430489,"TERMINAL",0,0,"Step 832, loss: 1.604257583618164, step time: 17.67563819885254ms\r\n",,terminal_output +15349,13430598,"TERMINAL",0,0,"Step 833, loss: 1.6284695863723755, step time: 17.834901809692383ms\r\nStep 834, loss: 1.7305279970169067, step time: 18.28789710998535ms\r\n",,terminal_output +15350,13430662,"TERMINAL",0,0,"Step 835, loss: 1.6995768547058105, step time: 18.076658248901367ms\r\n",,terminal_output +15351,13430726,"TERMINAL",0,0,"Step 836, loss: 1.7716712951660156, step time: 18.532276153564453ms\r\n",,terminal_output +15352,13430791,"TERMINAL",0,0,"Step 837, loss: 1.6392122507095337, step time: 18.226146697998047ms\r\n",,terminal_output +15353,13430856,"TERMINAL",0,0,"Step 838, loss: 2.0785694122314453, step time: 17.639636993408203ms\r\n",,terminal_output +15354,13430920,"TERMINAL",0,0,"Step 839, loss: 1.9264864921569824, step time: 17.765283584594727ms\r\n",,terminal_output +15355,13430984,"TERMINAL",0,0,"Step 840, loss: 1.5352234840393066, step time: 17.437219619750977ms\r\n",,terminal_output +15356,13431046,"TERMINAL",0,0,"Step 841, loss: 1.6007068157196045, step time: 17.25602149963379ms\r\n",,terminal_output +15357,13431113,"TERMINAL",0,0,"Step 842, loss: 2.2249159812927246, step time: 17.012357711791992ms\r\n",,terminal_output +15358,13431179,"TERMINAL",0,0,"Step 843, loss: 1.523386001586914, step time: 17.159223556518555ms\r\n",,terminal_output +15359,13431244,"TERMINAL",0,0,"Step 844, loss: 1.8656609058380127, step time: 16.755342483520508ms\r\n",,terminal_output +15360,13431305,"TERMINAL",0,0,"Step 845, loss: 2.1320996284484863, step time: 22.84550666809082ms\r\n",,terminal_output +15361,13431370,"TERMINAL",0,0,"Step 846, loss: 1.9515016078948975, step time: 20.296335220336914ms\r\n",,terminal_output +15362,13431431,"TERMINAL",0,0,"Step 847, loss: 1.5671741962432861, step time: 18.87059211730957ms\r\n",,terminal_output +15363,13431540,"TERMINAL",0,0,"Step 848, loss: 1.7721651792526245, step time: 18.597841262817383ms\r\n",,terminal_output +15364,13431594,"TERMINAL",0,0,"Step 849, loss: 1.8373754024505615, step time: 17.869234085083008ms\r\n",,terminal_output +15365,13431701,"TERMINAL",0,0,"Step 850, loss: 1.557765007019043, step time: 17.148256301879883ms\r\nStep 851, loss: 1.936964511871338, step time: 17.327547073364258ms\r\n",,terminal_output +15366,13431766,"TERMINAL",0,0,"Step 852, loss: 2.5520401000976562, step time: 17.533540725708008ms\r\n",,terminal_output +15367,13431829,"TERMINAL",0,0,"Step 853, loss: 1.6030040979385376, step time: 17.2579288482666ms\r\n",,terminal_output +15368,13431915,"TERMINAL",0,0,"Step 854, loss: 1.638034701347351, step time: 18.17917823791504ms\r\n",,terminal_output +15369,13431976,"TERMINAL",0,0,"Step 855, loss: 2.0965585708618164, step time: 18.300533294677734ms\r\n",,terminal_output +15370,13432039,"TERMINAL",0,0,"Step 856, loss: 1.642792820930481, step time: 17.66371726989746ms\r\n",,terminal_output +15371,13432102,"TERMINAL",0,0,"Step 857, loss: 1.8181194067001343, step time: 21.01755142211914ms\r\n",,terminal_output +15372,13432164,"TERMINAL",0,0,"Step 858, loss: 1.6082409620285034, step time: 18.55158805847168ms\r\n",,terminal_output +15373,13432227,"TERMINAL",0,0,"Step 859, loss: 1.44694983959198, step time: 17.172813415527344ms\r\n",,terminal_output +15374,13432294,"TERMINAL",0,0,"Step 860, loss: 1.5773755311965942, step time: 18.208026885986328ms\r\n",,terminal_output +15375,13432356,"TERMINAL",0,0,"Step 861, loss: 1.563124656677246, step time: 18.45240592956543ms\r\n",,terminal_output +15376,13432419,"TERMINAL",0,0,"Step 862, loss: 1.601547122001648, step time: 17.6846981048584ms\r\n",,terminal_output +15377,13432486,"TERMINAL",0,0,"Step 863, loss: 1.9191200733184814, step time: 18.041610717773438ms\r\n",,terminal_output +15378,13432549,"TERMINAL",0,0,"Step 864, loss: 1.6097074747085571, step time: 18.502235412597656ms\r\n",,terminal_output +15379,13432613,"TERMINAL",0,0,"Step 865, loss: 1.6589465141296387, step time: 18.33510398864746ms\r\n",,terminal_output +15380,13432676,"TERMINAL",0,0,"Step 866, loss: 2.0087766647338867, step time: 18.10908317565918ms\r\n",,terminal_output +15381,13432740,"TERMINAL",0,0,"Step 867, loss: 1.488051414489746, step time: 18.760204315185547ms\r\n",,terminal_output +15382,13432804,"TERMINAL",0,0,"Step 868, loss: 1.837746500968933, step time: 17.734766006469727ms\r\n",,terminal_output +15383,13432870,"TERMINAL",0,0,"Step 869, loss: 1.8712172508239746, step time: 29.940128326416016ms\r\n",,terminal_output +15384,13432929,"TERMINAL",0,0,"Step 870, loss: 1.6573524475097656, step time: 21.898984909057617ms\r\n",,terminal_output +15385,13432993,"TERMINAL",0,0,"Step 871, loss: 1.8690667152404785, step time: 17.54593849182129ms\r\n",,terminal_output +15386,13433057,"TERMINAL",0,0,"Step 872, loss: 1.572629451751709, step time: 17.502307891845703ms\r\n",,terminal_output +15387,13433159,"TERMINAL",0,0,"Step 873, loss: 1.5658115148544312, step time: 17.784833908081055ms\r\nStep 874, loss: 1.7691543102264404, step time: 17.05765724182129ms\r\n",,terminal_output +15388,13433224,"TERMINAL",0,0,"Step 875, loss: 1.7286242246627808, step time: 17.598628997802734ms\r\n",,terminal_output +15389,13433287,"TERMINAL",0,0,"Step 876, loss: 1.8243274688720703, step time: 17.264366149902344ms\r\n",,terminal_output +15390,13433352,"TERMINAL",0,0,"Step 877, loss: 1.537305474281311, step time: 17.661094665527344ms\r\n",,terminal_output +15391,13433416,"TERMINAL",0,0,"Step 878, loss: 1.5420280694961548, step time: 18.284320831298828ms\r\n",,terminal_output +15392,13433487,"TERMINAL",0,0,"Step 879, loss: 1.6698709726333618, step time: 18.8751220703125ms\r\n",,terminal_output +15393,13433552,"TERMINAL",0,0,"Step 880, loss: 1.8820053339004517, step time: 18.315792083740234ms\r\n",,terminal_output +15394,13433660,"TERMINAL",0,0,"Step 881, loss: 1.6280335187911987, step time: 18.346071243286133ms\r\n",,terminal_output +15395,13433673,"TERMINAL",0,0,"Step 882, loss: 1.416365385055542, step time: 18.704652786254883ms\r\n",,terminal_output +15396,13433768,"TERMINAL",0,0,"Step 883, loss: 1.6657413244247437, step time: 18.561124801635742ms\r\n",,terminal_output +15397,13433829,"TERMINAL",0,0,"Step 884, loss: 1.641423225402832, step time: 17.73214340209961ms\r\n",,terminal_output +15398,13433893,"TERMINAL",0,0,"Step 885, loss: 1.5400474071502686, step time: 17.380237579345703ms\r\n",,terminal_output +15399,13433954,"TERMINAL",0,0,"Step 886, loss: 1.6467829942703247, step time: 16.946792602539062ms\r\n",,terminal_output +15400,13434016,"TERMINAL",0,0,"Step 887, loss: 1.592185139656067, step time: 18.189430236816406ms\r\n",,terminal_output +15401,13434080,"TERMINAL",0,0,"Step 888, loss: 1.4882073402404785, step time: 17.91071891784668ms\r\n",,terminal_output +15402,13434148,"TERMINAL",0,0,"Step 889, loss: 2.4710543155670166, step time: 17.403125762939453ms\r\n",,terminal_output +15403,13434205,"TERMINAL",0,0,"Step 890, loss: 1.803852915763855, step time: 31.399965286254883ms\r\n",,terminal_output +15404,13434266,"TERMINAL",0,0,"Step 891, loss: 3.1716554164886475, step time: 20.188331604003906ms\r\n",,terminal_output +15405,13434383,"TERMINAL",0,0,"Step 892, loss: 1.4516494274139404, step time: 17.40264892578125ms\r\nStep 893, loss: 1.4280282258987427, step time: 18.440961837768555ms\r\n",,terminal_output +15406,13434445,"TERMINAL",0,0,"Step 894, loss: 1.5166640281677246, step time: 18.293380737304688ms\r\n",,terminal_output +15407,13434553,"TERMINAL",0,0,"Step 895, loss: 2.238647937774658, step time: 28.54609489440918ms\r\n",,terminal_output +15408,13434618,"TERMINAL",0,0,"Step 896, loss: 1.5659587383270264, step time: 19.932985305786133ms\r\n",,terminal_output +15409,13434680,"TERMINAL",0,0,"Step 897, loss: 2.0643248558044434, step time: 18.14413070678711ms\r\n",,terminal_output +15410,13434744,"TERMINAL",0,0,"Step 898, loss: 1.4781992435455322, step time: 17.791032791137695ms\r\n",,terminal_output +15411,13434806,"TERMINAL",0,0,"Step 899, loss: 1.6618175506591797, step time: 17.197847366333008ms\r\n",,terminal_output +15412,13434869,"TERMINAL",0,0,"Step 900, loss: 1.4941513538360596, step time: 18.18227767944336ms\r\n",,terminal_output +15413,13434933,"TERMINAL",0,0,"Step 901, loss: 1.585773229598999, step time: 18.256425857543945ms\r\n",,terminal_output +15414,13434998,"TERMINAL",0,0,"Step 902, loss: 1.65141761302948, step time: 17.759084701538086ms\r\n",,terminal_output +15415,13435069,"TERMINAL",0,0,"Step 903, loss: 1.4374336004257202, step time: 17.42696762084961ms\r\n",,terminal_output +15416,13435157,"TERMINAL",0,0,"Step 904, loss: 1.9526921510696411, step time: 17.122983932495117ms\r\nStep 905, loss: 1.4393491744995117, step time: 18.253803253173828ms\r\n",,terminal_output +15417,13435217,"TERMINAL",0,0,"Step 906, loss: 1.4897915124893188, step time: 17.327070236206055ms\r\n",,terminal_output +15418,13435284,"TERMINAL",0,0,"Step 907, loss: 1.5625522136688232, step time: 18.1577205657959ms\r\n",,terminal_output +15419,13435346,"TERMINAL",0,0,"Step 908, loss: 1.9118237495422363, step time: 17.618656158447266ms\r\n",,terminal_output +15420,13435420,"TERMINAL",0,0,"Step 909, loss: 2.128458261489868, step time: 17.376184463500977ms\r\n",,terminal_output +15421,13435474,"TERMINAL",0,0,"Step 910, loss: 1.4480303525924683, step time: 17.061471939086914ms\r\n",,terminal_output +15422,13435540,"TERMINAL",0,0,"Step 911, loss: 1.4464927911758423, step time: 17.401695251464844ms\r\n",,terminal_output +15423,13435602,"TERMINAL",0,0,"Step 912, loss: 1.4535564184188843, step time: 17.26531982421875ms\r\n",,terminal_output +15424,13435671,"TERMINAL",0,0,"Step 913, loss: 1.5610296726226807, step time: 18.034934997558594ms\r\n",,terminal_output +15425,13435733,"TERMINAL",0,0,"Step 914, loss: 1.6380984783172607, step time: 17.60387420654297ms\r\n",,terminal_output +15426,13435804,"TERMINAL",0,0,"Step 915, loss: 1.5103490352630615, step time: 17.399311065673828ms\r\n",,terminal_output +15427,13435863,"TERMINAL",0,0,"Step 916, loss: 1.729714035987854, step time: 17.08245277404785ms\r\n",,terminal_output +15428,13435932,"TERMINAL",0,0,"Step 917, loss: 2.090130567550659, step time: 17.380237579345703ms\r\n",,terminal_output +15429,13435996,"TERMINAL",0,0,"Step 918, loss: 1.7905439138412476, step time: 17.459869384765625ms\r\n",,terminal_output +15430,13436059,"TERMINAL",0,0,"Step 919, loss: 2.717653512954712, step time: 17.058372497558594ms\r\n",,terminal_output +15431,13436127,"TERMINAL",0,0,"Step 920, loss: 1.4601490497589111, step time: 16.888856887817383ms\r\n",,terminal_output +15432,13436191,"TERMINAL",0,0,"Step 921, loss: 1.9478437900543213, step time: 17.56763458251953ms\r\n",,terminal_output +15433,13436252,"TERMINAL",0,0,"Step 922, loss: 1.8212852478027344, step time: 17.041683197021484ms\r\n",,terminal_output +15434,13436323,"TERMINAL",0,0,"Step 923, loss: 1.6248027086257935, step time: 17.03357696533203ms\r\n",,terminal_output +15435,13436380,"TERMINAL",0,0,"Step 924, loss: 1.6097582578659058, step time: 17.49110221862793ms\r\n",,terminal_output +15436,13436492,"TERMINAL",0,0,"Step 925, loss: 1.57805597782135, step time: 18.204450607299805ms\r\nStep 926, loss: 1.6799625158309937, step time: 17.599105834960938ms\r\n",,terminal_output +15437,13436558,"TERMINAL",0,0,"Step 927, loss: 1.6606159210205078, step time: 17.420053482055664ms\r\n",,terminal_output +15438,13436619,"TERMINAL",0,0,"Step 928, loss: 1.4722152948379517, step time: 17.029762268066406ms\r\n",,terminal_output +15439,13436682,"TERMINAL",0,0,"Step 929, loss: 1.4410754442214966, step time: 17.2882080078125ms\r\n",,terminal_output +15440,13436747,"TERMINAL",0,0,"Step 930, loss: 1.8531793355941772, step time: 17.492294311523438ms\r\n",,terminal_output +15441,13436812,"TERMINAL",0,0,"Step 931, loss: 2.067030906677246, step time: 17.957210540771484ms\r\n",,terminal_output +15442,13436874,"TERMINAL",0,0,"Step 932, loss: 1.4945975542068481, step time: 17.803668975830078ms\r\n",,terminal_output +15443,13436940,"TERMINAL",0,0,"Step 933, loss: 3.121880292892456, step time: 17.625093460083008ms\r\n",,terminal_output +15444,13437004,"TERMINAL",0,0,"Step 934, loss: 1.571622610092163, step time: 17.198562622070312ms\r\n",,terminal_output +15445,13437079,"TERMINAL",0,0,"Step 935, loss: 1.7191190719604492, step time: 17.151594161987305ms\r\n",,terminal_output +15446,13437132,"TERMINAL",0,0,"Step 936, loss: 1.8423439264297485, step time: 17.33255386352539ms\r\n",,terminal_output +15447,13437199,"TERMINAL",0,0,"Step 937, loss: 1.5190705060958862, step time: 18.15485954284668ms\r\n",,terminal_output +15448,13437261,"TERMINAL",0,0,"Step 938, loss: 1.4802831411361694, step time: 17.75813102722168ms\r\n",,terminal_output +15449,13437325,"TERMINAL",0,0,"Step 939, loss: 1.4933196306228638, step time: 17.413616180419922ms\r\n",,terminal_output +15450,13437389,"TERMINAL",0,0,"Step 940, loss: 1.4656317234039307, step time: 17.107725143432617ms\r\n",,terminal_output +15451,13437452,"TERMINAL",0,0,"Step 941, loss: 2.3389089107513428, step time: 17.351865768432617ms\r\n",,terminal_output +15452,13437519,"TERMINAL",0,0,"Step 942, loss: 1.6184866428375244, step time: 17.589330673217773ms\r\n",,terminal_output +15453,13437588,"TERMINAL",0,0,"Step 943, loss: 1.5381755828857422, step time: 18.201828002929688ms\r\n",,terminal_output +15454,13437651,"TERMINAL",0,0,"Step 944, loss: 1.6514697074890137, step time: 17.854690551757812ms\r\n",,terminal_output +15455,13437714,"TERMINAL",0,0,"Step 945, loss: 2.0780270099639893, step time: 17.68660545349121ms\r\n",,terminal_output +15456,13437777,"TERMINAL",0,0,"Step 946, loss: 1.9788278341293335, step time: 17.108917236328125ms\r\n",,terminal_output +15457,13437841,"TERMINAL",0,0,"Step 947, loss: 1.80068838596344, step time: 17.461776733398438ms\r\n",,terminal_output +15458,13437907,"TERMINAL",0,0,"Step 948, loss: 1.4770601987838745, step time: 17.505645751953125ms\r\n",,terminal_output +15459,13437970,"TERMINAL",0,0,"Step 949, loss: 1.4740952253341675, step time: 18.19300651550293ms\r\n",,terminal_output +15460,13438036,"TERMINAL",0,0,"Step 950, loss: 1.7063233852386475, step time: 17.800092697143555ms\r\n",,terminal_output +15461,13438127,"TERMINAL",0,0,"Step 951, loss: 2.1678338050842285, step time: 17.53973960876465ms\r\nStep 952, loss: 1.6071600914001465, step time: 17.105817794799805ms\r\n",,terminal_output +15462,13438263,"TERMINAL",0,0,"Step 953, loss: 1.9709193706512451, step time: 17.287254333496094ms\r\nStep 954, loss: 1.626023769378662, step time: 17.4710750579834ms\r\n",,terminal_output +15463,13438380,"TERMINAL",0,0,"Step 955, loss: 1.5107756853103638, step time: 18.058300018310547ms\r\nStep 956, loss: 2.1076323986053467, step time: 17.605304718017578ms\r\n",,terminal_output +15464,13438448,"TERMINAL",0,0,"Step 957, loss: 1.6734273433685303, step time: 17.580032348632812ms\r\n",,terminal_output +15465,13438509,"TERMINAL",0,0,"Step 958, loss: 1.4319276809692383, step time: 17.101764678955078ms\r\n",,terminal_output +15466,13438574,"TERMINAL",0,0,"Step 959, loss: 1.4791892766952515, step time: 17.25625991821289ms\r\n",,terminal_output +15467,13438637,"TERMINAL",0,0,"Step 960, loss: 1.788475751876831, step time: 17.487525939941406ms\r\n",,terminal_output +15468,13438698,"TERMINAL",0,0,"Step 961, loss: 1.509108066558838, step time: 18.187999725341797ms\r\n",,terminal_output +15469,13438821,"TERMINAL",0,0,"Step 962, loss: 1.5800517797470093, step time: 17.809391021728516ms\r\nStep 963, loss: 1.5974957942962646, step time: 17.544031143188477ms\r\n",,terminal_output +15470,13438882,"TERMINAL",0,0,"Step 964, loss: 1.5753108263015747, step time: 17.11297035217285ms\r\n",,terminal_output +15471,13438947,"TERMINAL",0,0,"Step 965, loss: 1.4216381311416626, step time: 17.31085777282715ms\r\n",,terminal_output +15472,13439009,"TERMINAL",0,0,"Step 966, loss: 2.0463547706604004, step time: 18.981456756591797ms\r\n",,terminal_output +15473,13439079,"TERMINAL",0,0,"Step 967, loss: 1.711569905281067, step time: 18.268108367919922ms\r\n",,terminal_output +15474,13439140,"TERMINAL",0,0,"Step 968, loss: 1.4243600368499756, step time: 17.64369010925293ms\r\n",,terminal_output +15475,13439199,"TERMINAL",0,0,"Step 969, loss: 2.044799327850342, step time: 17.396926879882812ms\r\n",,terminal_output +15476,13439261,"TERMINAL",0,0,"Step 970, loss: 1.8474873304367065, step time: 16.954898834228516ms\r\n",,terminal_output +15477,13439332,"TERMINAL",0,0,"Step 971, loss: 1.5919983386993408, step time: 17.060518264770508ms\r\n",,terminal_output +15478,13439389,"TERMINAL",0,0,"Step 972, loss: 1.6142005920410156, step time: 17.40717887878418ms\r\n",,terminal_output +15479,13439483,"TERMINAL",0,0,"Step 973, loss: 1.3752071857452393, step time: 18.189191818237305ms\r\n",,terminal_output +15480,13439535,"TERMINAL",0,0,"Step 974, loss: 1.3987133502960205, step time: 17.7915096282959ms\r\n",,terminal_output +15481,13439919,"TERMINAL",0,0,"Step 975, loss: 1.5008280277252197, step time: 363.62409591674805ms\r\n",,terminal_output +15482,13439980,"TERMINAL",0,0,"Step 976, loss: 1.9243954420089722, step time: 24.41692352294922ms\r\n",,terminal_output +15483,13440087,"TERMINAL",0,0,"Step 977, loss: 1.5744497776031494, step time: 19.634246826171875ms\r\nStep 978, loss: 1.396993637084961, step time: 18.896102905273438ms\r\n",,terminal_output +15484,13440152,"TERMINAL",0,0,"Step 979, loss: 2.0019898414611816, step time: 19.593477249145508ms\r\n",,terminal_output +15485,13440209,"TERMINAL",0,0,"Step 980, loss: 1.734453558921814, step time: 18.134355545043945ms\r\n",,terminal_output +15486,13440273,"TERMINAL",0,0,"Step 981, loss: 1.5304512977600098, step time: 17.699241638183594ms\r\n",,terminal_output +15487,13440340,"TERMINAL",0,0,"Step 982, loss: 1.5529392957687378, step time: 17.19069480895996ms\r\n",,terminal_output +15488,13440401,"TERMINAL",0,0,"Step 983, loss: 1.5072520971298218, step time: 17.38882064819336ms\r\n",,terminal_output +15489,13440468,"TERMINAL",0,0,"Step 984, loss: 1.729079008102417, step time: 17.48490333557129ms\r\n",,terminal_output +15490,13440530,"TERMINAL",0,0,"Step 985, loss: 1.3727633953094482, step time: 18.259048461914062ms\r\n",,terminal_output +15491,13440592,"TERMINAL",0,0,"Step 986, loss: 1.6156210899353027, step time: 18.006563186645508ms\r\n",,terminal_output +15492,13440655,"TERMINAL",0,0,"Step 987, loss: 2.03769850730896, step time: 18.861055374145508ms\r\n",,terminal_output +15493,13440719,"TERMINAL",0,0,"Step 988, loss: 1.8644015789031982, step time: 17.855167388916016ms\r\n",,terminal_output +15494,13440782,"TERMINAL",0,0,"Step 989, loss: 1.3829957246780396, step time: 17.160892486572266ms\r\n",,terminal_output +15495,13440846,"TERMINAL",0,0,"Step 990, loss: 2.599475622177124, step time: 17.490863800048828ms\r\n",,terminal_output +15496,13440916,"TERMINAL",0,0,"Step 991, loss: 1.409733772277832, step time: 18.37158203125ms\r\n",,terminal_output +15497,13440975,"TERMINAL",0,0,"Step 992, loss: 1.7203009128570557, step time: 17.727375030517578ms\r\n",,terminal_output +15498,13441082,"TERMINAL",0,0,"Step 993, loss: 1.37556791305542, step time: 17.66514778137207ms\r\nStep 994, loss: 1.6134015321731567, step time: 17.173051834106445ms\r\n",,terminal_output +15499,13441149,"TERMINAL",0,0,"Step 995, loss: 1.54501211643219, step time: 17.282962799072266ms\r\n",,terminal_output +15500,13441250,"TERMINAL",0,0,"Step 996, loss: 1.38052237033844, step time: 29.645919799804688ms\r\n",,terminal_output +15501,13441304,"TERMINAL",0,0,"Step 997, loss: 1.3132556676864624, step time: 19.379615783691406ms\r\n",,terminal_output +15502,13441411,"TERMINAL",0,0,"Step 998, loss: 1.5664671659469604, step time: 17.616987228393555ms\r\nStep 999, loss: 1.4942905902862549, step time: 17.978906631469727ms\r\n",,terminal_output +15503,13444101,"TERMINAL",0,0,"Step 1000, loss: 1.587169885635376, step time: 28.313159942626953ms\r\n",,terminal_output +15504,13444210,"TERMINAL",0,0,"Step 1001, loss: 1.4035351276397705, step time: 25.931358337402344ms\r\n",,terminal_output +15505,13444263,"TERMINAL",0,0,"Step 1002, loss: 1.3454772233963013, step time: 21.41714096069336ms\r\n",,terminal_output +15506,13444357,"TERMINAL",0,0,"Step 1003, loss: 1.2890063524246216, step time: 19.59538459777832ms\r\n",,terminal_output +15507,13444439,"TERMINAL",0,0,"Step 1004, loss: 1.7561651468276978, step time: 18.589496612548828ms\r\nStep 1005, loss: 1.621397614479065, step time: 18.843412399291992ms\r\n",,terminal_output +15508,13444540,"TERMINAL",0,0,"Step 1006, loss: 1.5144659280776978, step time: 18.049001693725586ms\r\n",,terminal_output +15509,13444594,"TERMINAL",0,0,"Step 1007, loss: 1.4649877548217773, step time: 18.08452606201172ms\r\n",,terminal_output +15510,13444716,"TERMINAL",0,0,"Step 1008, loss: 1.4000179767608643, step time: 18.497467041015625ms\r\nStep 1009, loss: 1.3659495115280151, step time: 18.27526092529297ms\r\n",,terminal_output +15511,13444779,"TERMINAL",0,0,"Step 1010, loss: 1.3278734683990479, step time: 18.44191551208496ms\r\n",,terminal_output +15512,13444844,"TERMINAL",0,0,"Step 1011, loss: 1.6088998317718506, step time: 31.0821533203125ms\r\n",,terminal_output +15513,13444908,"TERMINAL",0,0,"Step 1012, loss: 1.3282171487808228, step time: 18.868446350097656ms\r\n",,terminal_output +15514,13444972,"TERMINAL",0,0,"Step 1013, loss: 1.2340106964111328, step time: 18.308639526367188ms\r\n",,terminal_output +15515,13445036,"TERMINAL",0,0,"Step 1014, loss: 1.4591917991638184, step time: 18.885374069213867ms\r\n",,terminal_output +15516,13445154,"TERMINAL",0,0,"Step 1015, loss: 1.4384303092956543, step time: 18.047809600830078ms\r\nStep 1016, loss: 1.3756085634231567, step time: 18.245697021484375ms\r\n",,terminal_output +15517,13445244,"TERMINAL",0,0,"Step 1017, loss: 1.4543436765670776, step time: 18.291234970092773ms\r\n",,terminal_output +15518,13445304,"TERMINAL",0,0,"Step 1018, loss: 1.6406474113464355, step time: 18.195390701293945ms\r\n",,terminal_output +15519,13445409,"TERMINAL",0,0,"Step 1019, loss: 1.7974653244018555, step time: 18.132686614990234ms\r\nStep 1020, loss: 2.3739030361175537, step time: 18.618106842041016ms\r\n",,terminal_output +15520,13445471,"TERMINAL",0,0,"Step 1021, loss: 1.2837022542953491, step time: 17.83442497253418ms\r\n",,terminal_output +15521,13445534,"TERMINAL",0,0,"Step 1022, loss: 1.792750597000122, step time: 18.12148094177246ms\r\n",,terminal_output +15522,13445599,"TERMINAL",0,0,"Step 1023, loss: 2.09423565864563, step time: 25.21371841430664ms\r\n",,terminal_output +15523,13445664,"TERMINAL",0,0,"Step 1024, loss: 1.4854772090911865, step time: 20.213603973388672ms\r\n",,terminal_output +15524,13445809,"TERMINAL",0,0,"Step 1025, loss: 1.371551513671875, step time: 29.251813888549805ms\r\nStep 1026, loss: 1.425672173500061, step time: 20.45297622680664ms\r\n",,terminal_output +15525,13445864,"TERMINAL",0,0,"Step 1027, loss: 1.3299455642700195, step time: 17.627477645874023ms\r\n",,terminal_output +15526,13445929,"TERMINAL",0,0,"Step 1028, loss: 1.9061154127120972, step time: 17.97962188720703ms\r\n",,terminal_output +15527,13445991,"TERMINAL",0,0,"Step 1029, loss: 1.453637719154358, step time: 18.25428009033203ms\r\n",,terminal_output +15528,13446056,"TERMINAL",0,0,"Step 1030, loss: 1.9359689950942993, step time: 17.972707748413086ms\r\n",,terminal_output +15529,13446121,"TERMINAL",0,0,"Step 1031, loss: 1.274125576019287, step time: 18.015384674072266ms\r\n",,terminal_output +15530,13446245,"TERMINAL",0,0,"Step 1032, loss: 1.3148361444473267, step time: 18.12148094177246ms\r\nStep 1033, loss: 1.5357639789581299, step time: 17.909765243530273ms\r\n",,terminal_output +15531,13446340,"TERMINAL",0,0,"Step 1034, loss: 1.4347208738327026, step time: 23.417234420776367ms\r\n",,terminal_output +15532,13446678,"TERMINAL",0,0,"Step 1035, loss: 2.426224946975708, step time: 340.6376838684082ms\r\n",,terminal_output +15533,13446733,"TERMINAL",0,0,"Step 1036, loss: 1.3326232433319092, step time: 25.329113006591797ms\r\n",,terminal_output +15534,13446851,"TERMINAL",0,0,"Step 1037, loss: 1.9243413209915161, step time: 20.812511444091797ms\r\nStep 1038, loss: 1.2476739883422852, step time: 19.25349235534668ms\r\n",,terminal_output +15535,13446911,"TERMINAL",0,0,"Step 1039, loss: 1.33793044090271, step time: 18.500804901123047ms\r\n",,terminal_output +15536,13446978,"TERMINAL",0,0,"Step 1040, loss: 1.37893545627594, step time: 18.51654052734375ms\r\n",,terminal_output +15537,13447041,"TERMINAL",0,0,"Step 1041, loss: 1.3297795057296753, step time: 18.753528594970703ms\r\n",,terminal_output +15538,13447106,"TERMINAL",0,0,"Step 1042, loss: 1.4400051832199097, step time: 17.83466339111328ms\r\n",,terminal_output +15539,13447207,"TERMINAL",0,0,"Step 1043, loss: 1.5418908596038818, step time: 18.217086791992188ms\r\n",,terminal_output +15540,13447320,"TERMINAL",0,0,"Step 1044, loss: 1.484481692314148, step time: 18.599748611450195ms\r\nStep 1045, loss: 1.4215458631515503, step time: 17.83156394958496ms\r\n",,terminal_output +15541,13447428,"TERMINAL",0,0,"Step 1046, loss: 1.2615551948547363, step time: 18.069028854370117ms\r\nStep 1047, loss: 1.287211537361145, step time: 18.229961395263672ms\r\n",,terminal_output +15542,13447493,"TERMINAL",0,0,"Step 1048, loss: 1.320102334022522, step time: 18.218994140625ms\r\n",,terminal_output +15543,13447558,"TERMINAL",0,0,"Step 1049, loss: 1.627254843711853, step time: 18.264055252075195ms\r\n",,terminal_output +15544,13447623,"TERMINAL",0,0,"Step 1050, loss: 1.2545218467712402, step time: 18.81265640258789ms\r\n",,terminal_output +15545,13447738,"TERMINAL",0,0,"Step 1051, loss: 1.6455447673797607, step time: 18.338680267333984ms\r\nStep 1052, loss: 1.3371050357818604, step time: 18.12577247619629ms\r\n",,terminal_output +15546,13447804,"TERMINAL",0,0,"Step 1053, loss: 1.2760088443756104, step time: 18.235206604003906ms\r\n",,terminal_output +15547,13447867,"TERMINAL",0,0,"Step 1054, loss: 2.214562177658081, step time: 18.57280731201172ms\r\n",,terminal_output +15548,13447935,"TERMINAL",0,0,"Step 1055, loss: 1.7836318016052246, step time: 17.940521240234375ms\r\n",,terminal_output +15549,13447995,"TERMINAL",0,0,"Step 1056, loss: 1.7471835613250732, step time: 18.539905548095703ms\r\n",,terminal_output +15550,13448059,"TERMINAL",0,0,"Step 1057, loss: 1.4122055768966675, step time: 17.75956153869629ms\r\n",,terminal_output +15551,13448124,"TERMINAL",0,0,"Step 1058, loss: 1.4774386882781982, step time: 18.344640731811523ms\r\n",,terminal_output +15552,13448189,"TERMINAL",0,0,"Step 1059, loss: 1.8345997333526611, step time: 18.397092819213867ms\r\n",,terminal_output +15553,13448254,"TERMINAL",0,0,"Step 1060, loss: 2.18605637550354, step time: 18.056154251098633ms\r\n",,terminal_output +15554,13448316,"TERMINAL",0,0,"Step 1061, loss: 1.3151932954788208, step time: 17.75956153869629ms\r\n",,terminal_output +15555,13448380,"TERMINAL",0,0,"Step 1062, loss: 1.6755634546279907, step time: 18.596410751342773ms\r\n",,terminal_output +15556,13448444,"TERMINAL",0,0,"Step 1063, loss: 1.3408021926879883, step time: 18.059492111206055ms\r\n",,terminal_output +15557,13448509,"TERMINAL",0,0,"Step 1064, loss: 1.5801278352737427, step time: 18.238544464111328ms\r\n",,terminal_output +15558,13448574,"TERMINAL",0,0,"Step 1065, loss: 1.4910374879837036, step time: 20.08986473083496ms\r\n",,terminal_output +15559,13448639,"TERMINAL",0,0,"Step 1066, loss: 1.2608062028884888, step time: 18.020153045654297ms\r\n",,terminal_output +15560,13448702,"TERMINAL",0,0,"Step 1067, loss: 1.4571272134780884, step time: 17.791271209716797ms\r\n",,terminal_output +15561,13448766,"TERMINAL",0,0,"Step 1068, loss: 1.2908222675323486, step time: 18.63408088684082ms\r\n",,terminal_output +15562,13448837,"TERMINAL",0,0,"Step 1069, loss: 1.3214869499206543, step time: 17.83585548400879ms\r\n",,terminal_output +15563,13448892,"TERMINAL",0,0,"Step 1070, loss: 2.220303535461426, step time: 18.100976943969727ms\r\n",,terminal_output +15564,13448987,"TERMINAL",0,0,"Step 1071, loss: 1.2187765836715698, step time: 18.395185470581055ms\r\n",,terminal_output +15565,13449040,"TERMINAL",0,0,"Step 1072, loss: 1.590039610862732, step time: 18.000125885009766ms\r\n",,terminal_output +15566,13449145,"TERMINAL",0,0,"Step 1073, loss: 1.336045265197754, step time: 17.837047576904297ms\r\nStep 1074, loss: 1.4535921812057495, step time: 18.199443817138672ms\r\n",,terminal_output +15567,13449212,"TERMINAL",0,0,"Step 1075, loss: 1.2250890731811523, step time: 18.01896095275879ms\r\n",,terminal_output +15568,13449274,"TERMINAL",0,0,"Step 1076, loss: 1.3244744539260864, step time: 18.085956573486328ms\r\n",,terminal_output +15569,13449341,"TERMINAL",0,0,"Step 1077, loss: 1.2274322509765625, step time: 18.227815628051758ms\r\n",,terminal_output +15570,13449404,"TERMINAL",0,0,"Step 1078, loss: 1.7115764617919922, step time: 18.045425415039062ms\r\n",,terminal_output +15571,13449467,"TERMINAL",0,0,"Step 1079, loss: 1.5698081254959106, step time: 17.625808715820312ms\r\n",,terminal_output +15572,13449537,"TERMINAL",0,0,"Step 1080, loss: 1.2576099634170532, step time: 18.433570861816406ms\r\n",,terminal_output +15573,13449642,"TERMINAL",0,0,"Step 1081, loss: 1.5586262941360474, step time: 17.871618270874023ms\r\nStep 1082, loss: 1.21219003200531, step time: 18.11957359313965ms\r\n",,terminal_output +15574,13449711,"TERMINAL",0,0,"Step 1083, loss: 1.370497465133667, step time: 18.28312873840332ms\r\n",,terminal_output +15575,13449773,"TERMINAL",0,0,"Step 1084, loss: 2.008293628692627, step time: 18.006086349487305ms\r\n",,terminal_output +15576,13449838,"TERMINAL",0,0,"Step 1085, loss: 1.2910345792770386, step time: 19.49596405029297ms\r\n",,terminal_output +15577,13449901,"TERMINAL",0,0,"Step 1086, loss: 1.3198553323745728, step time: 18.321752548217773ms\r\n",,terminal_output +15578,13449965,"TERMINAL",0,0,"Step 1087, loss: 2.430588483810425, step time: 17.539024353027344ms\r\n",,terminal_output +15579,13450029,"TERMINAL",0,0,"Step 1088, loss: 1.4510186910629272, step time: 18.044233322143555ms\r\n",,terminal_output +15580,13450093,"TERMINAL",0,0,"Step 1089, loss: 1.3812271356582642, step time: 18.160104751586914ms\r\n",,terminal_output +15581,13450157,"TERMINAL",0,0,"Step 1090, loss: 1.4196932315826416, step time: 17.953872680664062ms\r\n",,terminal_output +15582,13450216,"TERMINAL",0,0,"Step 1091, loss: 1.043676733970642, step time: 17.942190170288086ms\r\n",,terminal_output +15583,13450280,"TERMINAL",0,0,"Step 1092, loss: 1.185298204421997, step time: 18.231868743896484ms\r\n",,terminal_output +15584,13450341,"TERMINAL",0,0,"Step 1093, loss: 1.1774452924728394, step time: 17.734050750732422ms\r\n",,terminal_output +15585,13450404,"TERMINAL",0,0,"Step 1094, loss: 2.6215298175811768, step time: 17.839908599853516ms\r\n",,terminal_output +15586,13450501,"TERMINAL",0,0,"Step 1095, loss: 1.1629616022109985, step time: 21.88706398010254ms\r\n",,terminal_output +15587,13450609,"TERMINAL",0,0,"Step 1096, loss: 1.1815229654312134, step time: 17.752647399902344ms\r\nStep 1097, loss: 1.2302058935165405, step time: 18.040895462036133ms\r\n",,terminal_output +15588,13450721,"TERMINAL",0,0,"Step 1098, loss: 1.9168201684951782, step time: 18.110036849975586ms\r\nStep 1099, loss: 1.2065234184265137, step time: 17.858266830444336ms\r\n",,terminal_output +15589,13450789,"TERMINAL",0,0,"Step 1100, loss: 1.1999626159667969, step time: 17.51542091369629ms\r\n",,terminal_output +15590,13450852,"TERMINAL",0,0,"Step 1101, loss: 1.622688889503479, step time: 18.291711807250977ms\r\n",,terminal_output +15591,13450913,"TERMINAL",0,0,"Step 1102, loss: 1.4730998277664185, step time: 17.707347869873047ms\r\n",,terminal_output +15592,13450982,"TERMINAL",0,0,"Step 1103, loss: 1.2690266370773315, step time: 17.751455307006836ms\r\n",,terminal_output +15593,13451068,"TERMINAL",0,0,"Step 1104, loss: 1.1727609634399414, step time: 18.30291748046875ms\r\n",,terminal_output +15594,13451116,"TERMINAL",0,0,"Step 1105, loss: 1.5214707851409912, step time: 21.321773529052734ms\r\n",,terminal_output +15595,13451182,"TERMINAL",0,0,"Step 1106, loss: 1.2433449029922485, step time: 21.425247192382812ms\r\n",,terminal_output +15596,13451248,"TERMINAL",0,0,"Step 1107, loss: 1.180524230003357, step time: 19.753694534301758ms\r\n",,terminal_output +15597,13451309,"TERMINAL",0,0,"Step 1108, loss: 1.1800295114517212, step time: 18.66936683654785ms\r\n",,terminal_output +15598,13451370,"TERMINAL",0,0,"Step 1109, loss: 1.1563633680343628, step time: 18.787622451782227ms\r\n",,terminal_output +15599,13451436,"TERMINAL",0,0,"Step 1110, loss: 1.5319234132766724, step time: 18.795251846313477ms\r\n",,terminal_output +15600,13451501,"TERMINAL",0,0,"Step 1111, loss: 1.1871130466461182, step time: 17.78388023376465ms\r\n",,terminal_output +15601,13451560,"TERMINAL",0,0,"Step 1112, loss: 1.279797077178955, step time: 17.99607276916504ms\r\n",,terminal_output +15602,13451629,"TERMINAL",0,0,"Step 1113, loss: 1.1836460828781128, step time: 18.204450607299805ms\r\n",,terminal_output +15603,13451692,"TERMINAL",0,0,"Step 1114, loss: 1.9096264839172363, step time: 18.207788467407227ms\r\n",,terminal_output +15604,13451755,"TERMINAL",0,0,"Step 1115, loss: 1.3319602012634277, step time: 17.93694496154785ms\r\n",,terminal_output +15605,13451868,"TERMINAL",0,0,"Step 1116, loss: 1.0982009172439575, step time: 18.641233444213867ms\r\nStep 1117, loss: 1.5263701677322388, step time: 17.61031150817871ms\r\n",,terminal_output +15606,13451939,"TERMINAL",0,0,"Step 1118, loss: 1.4447828531265259, step time: 18.856048583984375ms\r\n",,terminal_output +15607,13451997,"TERMINAL",0,0,"Step 1119, loss: 1.3589024543762207, step time: 18.512964248657227ms\r\n",,terminal_output +15608,13452062,"TERMINAL",0,0,"Step 1120, loss: 2.190533399581909, step time: 19.417285919189453ms\r\n",,terminal_output +15609,13452126,"TERMINAL",0,0,"Step 1121, loss: 1.2887698411941528, step time: 19.30689811706543ms\r\n",,terminal_output +15610,13452190,"TERMINAL",0,0,"Step 1122, loss: 1.7150442600250244, step time: 19.208669662475586ms\r\n",,terminal_output +15611,13452284,"TERMINAL",0,0,"Step 1123, loss: 1.4920917749404907, step time: 17.742395401000977ms\r\n",,terminal_output +15612,13452337,"TERMINAL",0,0,"Step 1124, loss: 1.3906904458999634, step time: 18.1119441986084ms\r\n",,terminal_output +15613,13452432,"TERMINAL",0,0,"Step 1125, loss: 1.178344488143921, step time: 18.28479766845703ms\r\n",,terminal_output +15614,13452484,"TERMINAL",0,0,"Step 1126, loss: 1.5330734252929688, step time: 18.002986907958984ms\r\n",,terminal_output +15615,13452536,"TERMINAL",0,0,"Step 1127, loss: 1.467076301574707, step time: 18.82314682006836ms\r\n",,terminal_output +15616,13452643,"TERMINAL",0,0,"Step 1128, loss: 1.2263336181640625, step time: 18.968582153320312ms\r\nStep 1129, loss: 1.6645888090133667, step time: 18.263578414916992ms\r\n",,terminal_output +15617,13452706,"TERMINAL",0,0,"Step 1130, loss: 1.2849361896514893, step time: 18.151521682739258ms\r\n",,terminal_output +15618,13452770,"TERMINAL",0,0,"Step 1131, loss: 1.2101558446884155, step time: 18.620729446411133ms\r\n",,terminal_output +15619,13452833,"TERMINAL",0,0,"Step 1132, loss: 1.2582343816757202, step time: 18.06044578552246ms\r\n",,terminal_output +15620,13452899,"TERMINAL",0,0,"Step 1133, loss: 1.355921745300293, step time: 18.230915069580078ms\r\n",,terminal_output +15621,13452961,"TERMINAL",0,0,"Step 1134, loss: 1.1237632036209106, step time: 18.549442291259766ms\r\n",,terminal_output +15622,13453026,"TERMINAL",0,0,"Step 1135, loss: 2.0414679050445557, step time: 18.079519271850586ms\r\n",,terminal_output +15623,13453080,"TERMINAL",0,0,"Step 1136, loss: 1.2806110382080078, step time: 18.270015716552734ms\r\n",,terminal_output +15624,13453303,"TERMINAL",0,0,"Step 1137, loss: 1.2799800634384155, step time: 32.13810920715332ms\r\n",,terminal_output +15625,13453387,"TERMINAL",0,0,"Step 1138, loss: 1.3554965257644653, step time: 19.05035972595215ms\r\nStep 1139, loss: 1.2549359798431396, step time: 17.904043197631836ms\r\nStep 1140, loss: 1.328658938407898, step time: 18.38517189025879ms\r\n",,terminal_output +15626,13453445,"TERMINAL",0,0,"Step 1141, loss: 1.3442744016647339, step time: 18.030643463134766ms\r\n",,terminal_output +15627,13453567,"TERMINAL",0,0,"Step 1142, loss: 1.2970530986785889, step time: 18.416404724121094ms\r\nStep 1143, loss: 1.3700708150863647, step time: 18.982887268066406ms\r\n",,terminal_output +15628,13453666,"TERMINAL",0,0,"Step 1144, loss: 1.1601039171218872, step time: 18.08643341064453ms\r\nStep 1145, loss: 1.1516025066375732, step time: 17.93527603149414ms\r\n",,terminal_output +15629,13453721,"TERMINAL",0,0,"Step 1146, loss: 1.2642308473587036, step time: 18.438339233398438ms\r\n",,terminal_output +15630,13453816,"TERMINAL",0,0,"Step 1147, loss: 1.3551819324493408, step time: 17.83609390258789ms\r\n",,terminal_output +15631,13453868,"TERMINAL",0,0,"Step 1148, loss: 1.1400394439697266, step time: 20.30324935913086ms\r\n",,terminal_output +15632,13453964,"TERMINAL",0,0,"Step 1149, loss: 1.9611501693725586, step time: 19.606351852416992ms\r\n",,terminal_output +15633,13454043,"TERMINAL",0,0,"Step 1150, loss: 1.4466097354888916, step time: 17.963171005249023ms\r\nStep 1151, loss: 1.228400707244873, step time: 17.983675003051758ms\r\n",,terminal_output +15634,13454108,"TERMINAL",0,0,"Step 1152, loss: 1.2583106756210327, step time: 18.438100814819336ms\r\n",,terminal_output +15635,13454212,"TERMINAL",0,0,"Step 1153, loss: 1.6108571290969849, step time: 17.96245574951172ms\r\n",,terminal_output +15636,13454265,"TERMINAL",0,0,"Step 1154, loss: 1.1798696517944336, step time: 18.13673973083496ms\r\n",,terminal_output +15637,13454361,"TERMINAL",0,0,"Step 1155, loss: 1.2521088123321533, step time: 18.099546432495117ms\r\n",,terminal_output +15638,13454669,"TERMINAL",0,0,"Step 1156, loss: 1.3771175146102905, step time: 355.6549549102783ms\r\n",,terminal_output +15639,13454728,"TERMINAL",0,0,"Step 1157, loss: 1.8250941038131714, step time: 25.721073150634766ms\r\n",,terminal_output +15640,13454822,"TERMINAL",0,0,"Step 1158, loss: 0.9135463237762451, step time: 20.322084426879883ms\r\n",,terminal_output +15641,13454929,"TERMINAL",0,0,"Step 1159, loss: 1.4269261360168457, step time: 18.758773803710938ms\r\nStep 1160, loss: 1.1452959775924683, step time: 18.379926681518555ms\r\n",,terminal_output +15642,13454994,"TERMINAL",0,0,"Step 1161, loss: 1.3082900047302246, step time: 17.769813537597656ms\r\n",,terminal_output +15643,13455057,"TERMINAL",0,0,"Step 1162, loss: 1.25233793258667, step time: 18.117427825927734ms\r\n",,terminal_output +15644,13455170,"TERMINAL",0,0,"Step 1163, loss: 1.2427313327789307, step time: 18.149375915527344ms\r\nStep 1164, loss: 1.0826181173324585, step time: 17.737627029418945ms\r\n",,terminal_output +15645,13455264,"TERMINAL",0,0,"Step 1165, loss: 1.2303645610809326, step time: 18.186092376708984ms\r\n",,terminal_output +15646,13455327,"TERMINAL",0,0,"Step 1166, loss: 1.3319647312164307, step time: 17.558574676513672ms\r\n",,terminal_output +15647,13455385,"TERMINAL",0,0,"Step 1167, loss: 1.0895124673843384, step time: 17.65751838684082ms\r\n",,terminal_output +15648,13455445,"TERMINAL",0,0,"Step 1168, loss: 1.270050287246704, step time: 18.129348754882812ms\r\n",,terminal_output +15649,13455509,"TERMINAL",0,0,"Step 1169, loss: 1.2249505519866943, step time: 17.795801162719727ms\r\n",,terminal_output +15650,13455569,"TERMINAL",0,0,"Step 1170, loss: 1.266775369644165, step time: 17.84682273864746ms\r\n",,terminal_output +15651,13455635,"TERMINAL",0,0,"Step 1171, loss: 1.376876950263977, step time: 17.975807189941406ms\r\n",,terminal_output +15652,13455699,"TERMINAL",0,0,"Step 1172, loss: 1.1335369348526, step time: 17.74740219116211ms\r\n",,terminal_output +15653,13455762,"TERMINAL",0,0,"Step 1173, loss: 1.2726027965545654, step time: 17.415523529052734ms\r\n",,terminal_output +15654,13455832,"TERMINAL",0,0,"Step 1174, loss: 1.1931222677230835, step time: 18.0208683013916ms\r\n",,terminal_output +15655,13455894,"TERMINAL",0,0,"Step 1175, loss: 2.79319167137146, step time: 17.660140991210938ms\r\n",,terminal_output +15656,13455957,"TERMINAL",0,0,"Step 1176, loss: 1.2538970708847046, step time: 17.791748046875ms\r\n",,terminal_output +15657,13456020,"TERMINAL",0,0,"Step 1177, loss: 1.2823734283447266, step time: 18.053054809570312ms\r\n",,terminal_output +15658,13456118,"TERMINAL",0,0,"Step 1178, loss: 1.3826583623886108, step time: 17.494916915893555ms\r\nStep 1179, loss: 1.9271856546401978, step time: 17.824411392211914ms\r\n",,terminal_output +15659,13456214,"TERMINAL",0,0,"Step 1180, loss: 1.1079479455947876, step time: 17.95029640197754ms\r\n",,terminal_output +15660,13456325,"TERMINAL",0,0,"Step 1181, loss: 1.1484438180923462, step time: 17.507553100585938ms\r\nStep 1182, loss: 1.5923008918762207, step time: 17.70305633544922ms\r\n",,terminal_output +15661,13456434,"TERMINAL",0,0,"Step 1183, loss: 1.2248684167861938, step time: 18.065690994262695ms\r\nStep 1184, loss: 1.2092015743255615, step time: 17.612934112548828ms\r\n",,terminal_output +15662,13456497,"TERMINAL",0,0,"Step 1185, loss: 1.2830744981765747, step time: 17.982959747314453ms\r\n",,terminal_output +15663,13456562,"TERMINAL",0,0,"Step 1186, loss: 1.1327825784683228, step time: 17.89569854736328ms\r\n",,terminal_output +15664,13456627,"TERMINAL",0,0,"Step 1187, loss: 1.059801697731018, step time: 17.75050163269043ms\r\n",,terminal_output +15665,13456688,"TERMINAL",0,0,"Step 1188, loss: 1.2656443119049072, step time: 17.877817153930664ms\r\n",,terminal_output +15666,13456756,"TERMINAL",0,0,"Step 1189, loss: 1.1201823949813843, step time: 17.600059509277344ms\r\n",,terminal_output +15667,13456814,"TERMINAL",0,0,"Step 1190, loss: 1.2277430295944214, step time: 17.930030822753906ms\r\n",,terminal_output +15668,13456884,"TERMINAL",0,0,"Step 1191, loss: 1.3089599609375, step time: 17.795801162719727ms\r\n",,terminal_output +15669,13456945,"TERMINAL",0,0,"Step 1192, loss: 2.0458598136901855, step time: 18.042564392089844ms\r\n",,terminal_output +15670,13457008,"TERMINAL",0,0,"Step 1193, loss: 1.450824499130249, step time: 17.92597770690918ms\r\n",,terminal_output +15671,13457080,"TERMINAL",0,0,"Step 1194, loss: 1.1096017360687256, step time: 17.789840698242188ms\r\n",,terminal_output +15672,13457136,"TERMINAL",0,0,"Step 1195, loss: 1.045210838317871, step time: 19.5770263671875ms\r\n",,terminal_output +15673,13457230,"TERMINAL",0,0,"Step 1196, loss: 2.6925153732299805, step time: 17.751693725585938ms\r\n",,terminal_output +15674,13457336,"TERMINAL",0,0,"Step 1197, loss: 1.1180689334869385, step time: 17.656803131103516ms\r\nStep 1198, loss: 1.0518211126327515, step time: 17.951011657714844ms\r\n",,terminal_output +15675,13457458,"TERMINAL",0,0,"Step 1199, loss: 1.5490361452102661, step time: 17.807960510253906ms\r\nStep 1200, loss: 1.240433692932129, step time: 17.915964126586914ms\r\n",,terminal_output +15676,13457519,"TERMINAL",0,0,"Step 1201, loss: 1.180422306060791, step time: 17.71068572998047ms\r\n",,terminal_output +15677,13457580,"TERMINAL",0,0,"Step 1202, loss: 1.0274896621704102, step time: 18.26024055480957ms\r\n",,terminal_output +15678,13457675,"TERMINAL",0,0,"Step 1203, loss: 1.135499119758606, step time: 17.905473709106445ms\r\n",,terminal_output +15679,13457727,"TERMINAL",0,0,"Step 1204, loss: 1.1634560823440552, step time: 18.053054809570312ms\r\n",,terminal_output +15680,13457824,"TERMINAL",0,0,"Step 1205, loss: 1.0569078922271729, step time: 17.64678955078125ms\r\n",,terminal_output +15681,13457876,"TERMINAL",0,0,"Step 1206, loss: 1.3362473249435425, step time: 21.414995193481445ms\r\n",,terminal_output +15682,13457930,"TERMINAL",0,0,"Step 1207, loss: 1.156895399093628, step time: 17.969608306884766ms\r\n",,terminal_output +15683,13458036,"TERMINAL",0,0,"Step 1208, loss: 1.1660927534103394, step time: 17.91548728942871ms\r\nStep 1209, loss: 1.2728277444839478, step time: 17.99607276916504ms\r\n",,terminal_output +15684,13458156,"TERMINAL",0,0,"Step 1210, loss: 1.2105437517166138, step time: 17.75336265563965ms\r\nStep 1211, loss: 1.2021340131759644, step time: 17.99178123474121ms\r\n",,terminal_output +15685,13458222,"TERMINAL",0,0,"Step 1212, loss: 1.0732372999191284, step time: 17.48490333557129ms\r\n",,terminal_output +15686,13458286,"TERMINAL",0,0,"Step 1213, loss: 1.369961142539978, step time: 17.666339874267578ms\r\n",,terminal_output +15687,13458352,"TERMINAL",0,0,"Step 1214, loss: 0.9575775861740112, step time: 17.439603805541992ms\r\n",,terminal_output +15688,13458413,"TERMINAL",0,0,"Step 1215, loss: 1.3105425834655762, step time: 17.844200134277344ms\r\n",,terminal_output +15689,13458477,"TERMINAL",0,0,"Step 1216, loss: 0.9483713507652283, step time: 17.89999008178711ms\r\n",,terminal_output +15690,13458541,"TERMINAL",0,0,"Step 1217, loss: 1.1344232559204102, step time: 17.841577529907227ms\r\n",,terminal_output +15691,13458635,"TERMINAL",0,0,"Step 1218, loss: 1.3472899198532104, step time: 18.26786994934082ms\r\n",,terminal_output +15692,13458688,"TERMINAL",0,0,"Step 1219, loss: 1.1387943029403687, step time: 18.085479736328125ms\r\n",,terminal_output +15693,13458795,"TERMINAL",0,0,"Step 1220, loss: 1.0742182731628418, step time: 17.93527603149414ms\r\nStep 1221, loss: 1.164669394493103, step time: 18.25690269470215ms\r\n",,terminal_output +15694,13458917,"TERMINAL",0,0,"Step 1222, loss: 1.6290720701217651, step time: 18.357515335083008ms\r\nStep 1223, loss: 1.0017693042755127, step time: 17.47751235961914ms\r\n",,terminal_output +15695,13458975,"TERMINAL",0,0,"Step 1224, loss: 1.1989728212356567, step time: 17.791271209716797ms\r\n",,terminal_output +15696,13459045,"TERMINAL",0,0,"Step 1225, loss: 1.1327695846557617, step time: 18.13364028930664ms\r\n",,terminal_output +15697,13459110,"TERMINAL",0,0,"Step 1226, loss: 1.1100314855575562, step time: 17.409086227416992ms\r\n",,terminal_output +15698,13459162,"TERMINAL",0,0,"Step 1227, loss: 1.1056342124938965, step time: 17.777681350708008ms\r\n",,terminal_output +15699,13459277,"TERMINAL",0,0,"Step 1228, loss: 1.2977832555770874, step time: 30.556917190551758ms\r\n",,terminal_output +15700,13459336,"TERMINAL",0,0,"Step 1229, loss: 1.233262062072754, step time: 22.88079261779785ms\r\n",,terminal_output +15701,13459404,"TERMINAL",0,0,"Step 1230, loss: 1.0527642965316772, step time: 20.128965377807617ms\r\n",,terminal_output +15702,13459469,"TERMINAL",0,0,"Step 1231, loss: 2.0637333393096924, step time: 19.84095573425293ms\r\n",,terminal_output +15703,13459532,"TERMINAL",0,0,"Step 1232, loss: 1.0738842487335205, step time: 19.210100173950195ms\r\n",,terminal_output +15704,13459597,"TERMINAL",0,0,"Step 1233, loss: 1.640146255493164, step time: 19.035816192626953ms\r\n",,terminal_output +15705,13459657,"TERMINAL",0,0,"Step 1234, loss: 1.0915478467941284, step time: 18.99409294128418ms\r\n",,terminal_output +15706,13459723,"TERMINAL",0,0,"Step 1235, loss: 1.1570442914962769, step time: 21.775007247924805ms\r\n",,terminal_output +15707,13459828,"TERMINAL",0,0,"Step 1236, loss: 1.382845163345337, step time: 18.997669219970703ms\r\nStep 1237, loss: 1.2414188385009766, step time: 18.758296966552734ms\r\n",,terminal_output +15708,13459890,"TERMINAL",0,0,"Step 1238, loss: 1.1782196760177612, step time: 18.94068717956543ms\r\n",,terminal_output +15709,13459953,"TERMINAL",0,0,"Step 1239, loss: 1.3462557792663574, step time: 18.767595291137695ms\r\n",,terminal_output +15710,13460017,"TERMINAL",0,0,"Step 1240, loss: 1.6280694007873535, step time: 18.837928771972656ms\r\n",,terminal_output +15711,13460133,"TERMINAL",0,0,"Step 1241, loss: 1.364837884902954, step time: 19.011497497558594ms\r\n",,terminal_output +15712,13460144,"TERMINAL",0,0,"Step 1242, loss: 0.9846665263175964, step time: 18.706560134887695ms\r\n",,terminal_output +15713,13460284,"TERMINAL",0,0,"Step 1243, loss: 1.1715984344482422, step time: 34.50155258178711ms\r\nStep 1244, loss: 2.5931334495544434, step time: 21.704673767089844ms\r\n",,terminal_output +15714,13460414,"TERMINAL",0,0,"Step 1245, loss: 1.3326752185821533, step time: 18.934249877929688ms\r\nStep 1246, loss: 1.1791049242019653, step time: 19.436359405517578ms\r\n",,terminal_output +15715,13460473,"TERMINAL",0,0,"Step 1247, loss: 1.8173779249191284, step time: 18.602371215820312ms\r\n",,terminal_output +15716,13460572,"TERMINAL",0,0,"Step 1248, loss: 1.0208426713943481, step time: 18.73922348022461ms\r\n",,terminal_output +15717,13460620,"TERMINAL",0,0,"Step 1249, loss: 1.5447391271591187, step time: 19.23227310180664ms\r\n",,terminal_output +15718,13460734,"TERMINAL",0,0,"Step 1250, loss: 1.7336382865905762, step time: 18.73493194580078ms\r\nStep 1251, loss: 0.9992225766181946, step time: 18.903017044067383ms\r\n",,terminal_output +15719,13460802,"TERMINAL",0,0,"Step 1252, loss: 1.4186031818389893, step time: 21.774768829345703ms\r\n",,terminal_output +15720,13460872,"TERMINAL",0,0,"Step 1253, loss: 1.2589757442474365, step time: 19.53434944152832ms\r\n",,terminal_output +15721,13460938,"TERMINAL",0,0,"Step 1254, loss: 1.101224660873413, step time: 19.049644470214844ms\r\n",,terminal_output +15722,13461005,"TERMINAL",0,0,"Step 1255, loss: 1.2231413125991821, step time: 19.1957950592041ms\r\n",,terminal_output +15723,13461080,"TERMINAL",0,0,"Step 1256, loss: 1.3388075828552246, step time: 19.062519073486328ms\r\n",,terminal_output +15724,13461133,"TERMINAL",0,0,"Step 1257, loss: 1.4284121990203857, step time: 18.87679100036621ms\r\n",,terminal_output +15725,13461205,"TERMINAL",0,0,"Step 1258, loss: 1.0097638368606567, step time: 19.089937210083008ms\r\n",,terminal_output +15726,13461265,"TERMINAL",0,0,"Step 1259, loss: 1.059259295463562, step time: 27.449369430541992ms\r\n",,terminal_output +15727,13461365,"TERMINAL",0,0,"Step 1260, loss: 2.048037052154541, step time: 24.99866485595703ms\r\n",,terminal_output +15728,13461410,"TERMINAL",0,0,"Step 1261, loss: 1.0799458026885986, step time: 21.25239372253418ms\r\n",,terminal_output +15729,13461513,"TERMINAL",0,0,"Step 1262, loss: 1.5627497434616089, step time: 19.069671630859375ms\r\nStep 1263, loss: 1.267378807067871, step time: 18.220186233520508ms\r\n",,terminal_output +15730,13461612,"TERMINAL",0,0,"Step 1264, loss: 1.0369905233383179, step time: 18.651962280273438ms\r\n",,terminal_output +15731,13461664,"TERMINAL",0,0,"Step 1265, loss: 1.3441340923309326, step time: 17.937898635864258ms\r\n",,terminal_output +15732,13461779,"TERMINAL",0,0,"Step 1266, loss: 1.5386407375335693, step time: 18.326282501220703ms\r\nStep 1267, loss: 1.408644676208496, step time: 18.282175064086914ms\r\n",,terminal_output +15733,13461889,"TERMINAL",0,0,"Step 1268, loss: 0.8721926808357239, step time: 18.07403564453125ms\r\nStep 1269, loss: 0.9841749668121338, step time: 17.76266098022461ms\r\n",,terminal_output +15734,13461983,"TERMINAL",0,0,"Step 1270, loss: 1.1642558574676514, step time: 18.14723014831543ms\r\n",,terminal_output +15735,13462032,"TERMINAL",0,0,"Step 1271, loss: 0.9853724837303162, step time: 17.5778865814209ms\r\n",,terminal_output +15736,13462144,"TERMINAL",0,0,"Step 1272, loss: 1.4143630266189575, step time: 18.254518508911133ms\r\nStep 1273, loss: 0.9515409469604492, step time: 17.93503761291504ms\r\n",,terminal_output +15737,13462241,"TERMINAL",0,0,"Step 1274, loss: 0.9142830967903137, step time: 18.01276206970215ms\r\n",,terminal_output +15738,13462292,"TERMINAL",0,0,"Step 1275, loss: 1.0331343412399292, step time: 17.93360710144043ms\r\n",,terminal_output +15739,13462385,"TERMINAL",0,0,"Step 1276, loss: 1.4214770793914795, step time: 18.125057220458984ms\r\n",,terminal_output +15740,13462437,"TERMINAL",0,0,"Step 1277, loss: 1.2767155170440674, step time: 17.716169357299805ms\r\n",,terminal_output +15741,13462491,"TERMINAL",0,0,"Step 1278, loss: 0.9858666658401489, step time: 17.930030822753906ms\r\n",,terminal_output +15742,13462598,"TERMINAL",0,0,"Step 1279, loss: 1.185200810432434, step time: 18.23282241821289ms\r\nStep 1280, loss: 1.3792228698730469, step time: 18.078088760375977ms\r\n",,terminal_output +15743,13462693,"TERMINAL",0,0,"Step 1281, loss: 1.2473779916763306, step time: 18.123626708984375ms\r\n",,terminal_output +15744,13462747,"TERMINAL",0,0,"Step 1282, loss: 1.2717437744140625, step time: 18.172740936279297ms\r\n",,terminal_output +15745,13462860,"TERMINAL",0,0,"Step 1283, loss: 1.1629469394683838, step time: 18.079280853271484ms\r\nStep 1284, loss: 1.2188419103622437, step time: 17.840862274169922ms\r\n",,terminal_output +15746,13462933,"TERMINAL",0,0,"Step 1285, loss: 1.0522801876068115, step time: 18.169403076171875ms\r\n",,terminal_output +15747,13462996,"TERMINAL",0,0,"Step 1286, loss: 1.3107969760894775, step time: 19.72794532775879ms\r\n",,terminal_output +15748,13463066,"TERMINAL",0,0,"Step 1287, loss: 1.355686902999878, step time: 18.598556518554688ms\r\n",,terminal_output +15749,13463127,"TERMINAL",0,0,"Step 1288, loss: 1.229042649269104, step time: 18.288135528564453ms\r\n",,terminal_output +15750,13463189,"TERMINAL",0,0,"Step 1289, loss: 1.0810967683792114, step time: 18.26190948486328ms\r\n",,terminal_output +15751,13463257,"TERMINAL",0,0,"Step 1290, loss: 1.215030312538147, step time: 17.688274383544922ms\r\n",,terminal_output +15752,13463352,"TERMINAL",0,0,"Step 1291, loss: 1.1260370016098022, step time: 18.34273338317871ms\r\n",,terminal_output +15753,13463407,"TERMINAL",0,0,"Step 1292, loss: 0.9938496351242065, step time: 17.895936965942383ms\r\n",,terminal_output +15754,13463511,"TERMINAL",0,0,"Step 1293, loss: 1.4359779357910156, step time: 17.86494255065918ms\r\nStep 1294, loss: 1.086093783378601, step time: 18.076419830322266ms\r\n",,terminal_output +15755,13463635,"TERMINAL",0,0,"Step 1295, loss: 1.2529964447021484, step time: 17.76742935180664ms\r\nStep 1296, loss: 1.4459723234176636, step time: 18.551349639892578ms\r\n",,terminal_output +15756,13463741,"TERMINAL",0,0,"Step 1297, loss: 1.03577721118927, step time: 18.91946792602539ms\r\n",,terminal_output +15757,13463855,"TERMINAL",0,0,"Step 1298, loss: 2.154233694076538, step time: 18.1882381439209ms\r\nStep 1299, loss: 1.5702983140945435, step time: 18.451452255249023ms\r\n",,terminal_output +15758,13463961,"TERMINAL",0,0,"Step 1300, loss: 1.0017447471618652, step time: 18.479347229003906ms\r\nStep 1301, loss: 0.995663583278656, step time: 17.615079879760742ms\r\n",,terminal_output +15759,13464077,"TERMINAL",0,0,"Step 1302, loss: 1.0879417657852173, step time: 17.87710189819336ms\r\nStep 1303, loss: 1.0517452955245972, step time: 18.19443702697754ms\r\n",,terminal_output +15760,13464138,"TERMINAL",0,0,"Step 1304, loss: 0.9863085150718689, step time: 18.10479164123535ms\r\n",,terminal_output +15761,13464268,"TERMINAL",0,0,"Step 1305, loss: 1.1602787971496582, step time: 17.822265625ms\r\nStep 1306, loss: 0.9197725653648376, step time: 18.146038055419922ms\r\n",,terminal_output +15762,13464331,"TERMINAL",0,0,"Step 1307, loss: 1.0329327583312988, step time: 17.769813537597656ms\r\n",,terminal_output +15763,13464399,"TERMINAL",0,0,"Step 1308, loss: 1.1939185857772827, step time: 17.82679557800293ms\r\n",,terminal_output +15764,13464467,"TERMINAL",0,0,"Step 1309, loss: 0.846961259841919, step time: 18.193483352661133ms\r\n",,terminal_output +15765,13464533,"TERMINAL",0,0,"Step 1310, loss: 1.1879960298538208, step time: 17.46392250061035ms\r\n",,terminal_output +15766,13464591,"TERMINAL",0,0,"Step 1311, loss: 1.1206090450286865, step time: 17.890453338623047ms\r\n",,terminal_output +15767,13464686,"TERMINAL",0,0,"Step 1312, loss: 1.1880097389221191, step time: 18.184661865234375ms\r\n",,terminal_output +15768,13464750,"TERMINAL",0,0,"Step 1313, loss: 1.371395230293274, step time: 17.943382263183594ms\r\n",,terminal_output +15769,13464875,"TERMINAL",0,0,"Step 1314, loss: 1.2391752004623413, step time: 17.5473690032959ms\r\nStep 1315, loss: 1.167906641960144, step time: 18.229246139526367ms\r\n",,terminal_output +15770,13464939,"TERMINAL",0,0,"Step 1316, loss: 1.0686190128326416, step time: 17.64225959777832ms\r\n",,terminal_output +15771,13465003,"TERMINAL",0,0,"Step 1317, loss: 1.1012579202651978, step time: 17.8372859954834ms\r\n",,terminal_output +15772,13465078,"TERMINAL",0,0,"Step 1318, loss: 0.9252830147743225, step time: 17.80867576599121ms\r\n",,terminal_output +15773,13465131,"TERMINAL",0,0,"Step 1319, loss: 0.9888854622840881, step time: 17.849445343017578ms\r\n",,terminal_output +15774,13465198,"TERMINAL",0,0,"Step 1320, loss: 1.200825572013855, step time: 17.727136611938477ms\r\n",,terminal_output +15775,13465262,"TERMINAL",0,0,"Step 1321, loss: 1.1433409452438354, step time: 18.141746520996094ms\r\n",,terminal_output +15776,13465329,"TERMINAL",0,0,"Step 1322, loss: 1.1967638731002808, step time: 17.544269561767578ms\r\n",,terminal_output +15777,13465397,"TERMINAL",0,0,"Step 1323, loss: 0.9090213179588318, step time: 17.91095733642578ms\r\n",,terminal_output +15778,13465461,"TERMINAL",0,0,"Step 1324, loss: 0.9808241724967957, step time: 17.73977279663086ms\r\n",,terminal_output +15779,13465541,"TERMINAL",0,0,"Step 1325, loss: 0.9127727150917053, step time: 17.712116241455078ms\r\n",,terminal_output +15780,13465594,"TERMINAL",0,0,"Step 1326, loss: 0.9004561901092529, step time: 17.81487464904785ms\r\n",,terminal_output +15781,13465646,"TERMINAL",0,0,"Step 1327, loss: 1.904542326927185, step time: 18.199920654296875ms\r\n",,terminal_output +15782,13465757,"TERMINAL",0,0,"Step 1328, loss: 0.9096892476081848, step time: 17.935514450073242ms\r\nStep 1329, loss: 0.9384757280349731, step time: 17.51875877380371ms\r\n",,terminal_output +15783,13465819,"TERMINAL",0,0,"Step 1330, loss: 0.9167397022247314, step time: 18.200159072875977ms\r\n",,terminal_output +15784,13465891,"TERMINAL",0,0,"Step 1331, loss: 2.6068811416625977, step time: 17.933368682861328ms\r\n",,terminal_output +15785,13465955,"TERMINAL",0,0,"Step 1332, loss: 1.1161785125732422, step time: 17.44842529296875ms\r\n",,terminal_output +15786,13466036,"TERMINAL",0,0,"Step 1333, loss: 1.0326050519943237, step time: 18.133163452148438ms\r\n",,terminal_output +15787,13466130,"TERMINAL",0,0,"Step 1334, loss: 0.9776953458786011, step time: 17.829179763793945ms\r\nStep 1335, loss: 0.9773023724555969, step time: 17.925024032592773ms\r\n",,terminal_output +15788,13466194,"TERMINAL",0,0,"Step 1336, loss: 1.3325754404067993, step time: 19.158601760864258ms\r\n",,terminal_output +15789,13466319,"TERMINAL",0,0,"Step 1337, loss: 1.1319756507873535, step time: 18.106460571289062ms\r\nStep 1338, loss: 0.8845635056495667, step time: 17.850160598754883ms\r\n",,terminal_output +15790,13466384,"TERMINAL",0,0,"Step 1339, loss: 0.9367558360099792, step time: 17.99941062927246ms\r\n",,terminal_output +15791,13466477,"TERMINAL",0,0,"Step 1340, loss: 1.5438575744628906, step time: 17.530441284179688ms\r\n",,terminal_output +15792,13466529,"TERMINAL",0,0,"Step 1341, loss: 1.1778790950775146, step time: 17.80390739440918ms\r\n",,terminal_output +15793,13466959,"TERMINAL",0,0,"Step 1342, loss: 1.3540325164794922, step time: 359.8346710205078ms\r\nStep 1343, loss: 1.2682994604110718, step time: 26.19791030883789ms\r\n",,terminal_output +15794,13467025,"TERMINAL",0,0,"Step 1344, loss: 1.5110429525375366, step time: 20.67112922668457ms\r\n",,terminal_output +15795,13467089,"TERMINAL",0,0,"Step 1345, loss: 1.0670596361160278, step time: 18.6922550201416ms\r\n",,terminal_output +15796,13467156,"TERMINAL",0,0,"Step 1346, loss: 0.8627001643180847, step time: 21.786212921142578ms\r\n",,terminal_output +15797,13467217,"TERMINAL",0,0,"Step 1347, loss: 0.9277145862579346, step time: 18.228769302368164ms\r\n",,terminal_output +15798,13467279,"TERMINAL",0,0,"Step 1348, loss: 1.1658257246017456, step time: 18.15938949584961ms\r\n",,terminal_output +15799,13467342,"TERMINAL",0,0,"Step 1349, loss: 1.4305461645126343, step time: 17.853736877441406ms\r\n",,terminal_output +15800,13467413,"TERMINAL",0,0,"Step 1350, loss: 1.5227288007736206, step time: 18.224000930786133ms\r\n",,terminal_output +15801,13467472,"TERMINAL",0,0,"Step 1351, loss: 1.7328190803527832, step time: 17.983198165893555ms\r\n",,terminal_output +15802,13467541,"TERMINAL",0,0,"Step 1352, loss: 1.1814419031143188, step time: 18.190383911132812ms\r\n",,terminal_output +15803,13467606,"TERMINAL",0,0,"Step 1353, loss: 1.5301371812820435, step time: 17.623424530029297ms\r\n",,terminal_output +15804,13467713,"TERMINAL",0,0,"Step 1354, loss: 0.8635929822921753, step time: 19.042253494262695ms\r\nStep 1355, loss: 0.9746565222740173, step time: 17.728567123413086ms\r\n",,terminal_output +15805,13467777,"TERMINAL",0,0,"Step 1356, loss: 0.9996647834777832, step time: 17.863988876342773ms\r\n",,terminal_output +15806,13467845,"TERMINAL",0,0,"Step 1357, loss: 1.19768226146698, step time: 17.989635467529297ms\r\n",,terminal_output +15807,13467906,"TERMINAL",0,0,"Step 1358, loss: 1.130288004875183, step time: 17.895221710205078ms\r\n",,terminal_output +15808,13467974,"TERMINAL",0,0,"Step 1359, loss: 0.9061819314956665, step time: 17.938613891601562ms\r\n",,terminal_output +15809,13468036,"TERMINAL",0,0,"Step 1360, loss: 0.8820932507514954, step time: 18.280982971191406ms\r\n",,terminal_output +15810,13468123,"TERMINAL",0,0,"Step 1361, loss: 1.2104061841964722, step time: 18.64790916442871ms\r\n",,terminal_output +15811,13468176,"TERMINAL",0,0,"Step 1362, loss: 0.9654989838600159, step time: 18.083810806274414ms\r\n",,terminal_output +15812,13468285,"TERMINAL",0,0,"Step 1363, loss: 0.909111499786377, step time: 18.330812454223633ms\r\nStep 1364, loss: 0.8570519089698792, step time: 18.04828643798828ms\r\n",,terminal_output +15813,13468351,"TERMINAL",0,0,"Step 1365, loss: 1.0410796403884888, step time: 18.186092376708984ms\r\n",,terminal_output +15814,13468411,"TERMINAL",0,0,"Step 1366, loss: 0.8797945380210876, step time: 18.062114715576172ms\r\n",,terminal_output +15815,13468476,"TERMINAL",0,0,"Step 1367, loss: 1.3761911392211914, step time: 17.96412467956543ms\r\n",,terminal_output +15816,13468536,"TERMINAL",0,0,"Step 1368, loss: 0.8678590059280396, step time: 19.21868324279785ms\r\n",,terminal_output +15817,13468614,"TERMINAL",0,0,"Step 1369, loss: 1.3215758800506592, step time: 18.050670623779297ms\r\n",,terminal_output +15818,13468666,"TERMINAL",0,0,"Step 1370, loss: 0.9135570526123047, step time: 18.048524856567383ms\r\n",,terminal_output +15819,13468729,"TERMINAL",0,0,"Step 1371, loss: 1.0187400579452515, step time: 17.517566680908203ms\r\n",,terminal_output +15820,13468792,"TERMINAL",0,0,"Step 1372, loss: 2.6461973190307617, step time: 18.065214157104492ms\r\n",,terminal_output +15821,13468855,"TERMINAL",0,0,"Step 1373, loss: 1.0578793287277222, step time: 17.925739288330078ms\r\n",,terminal_output +15822,13468917,"TERMINAL",0,0,"Step 1374, loss: 0.9716113805770874, step time: 17.407655715942383ms\r\n",,terminal_output +15823,13468984,"TERMINAL",0,0,"Step 1375, loss: 0.9280281066894531, step time: 33.7677001953125ms\r\n",,terminal_output +15824,13469119,"TERMINAL",0,0,"Step 1376, loss: 0.7902562618255615, step time: 24.834156036376953ms\r\nStep 1377, loss: 0.889866828918457, step time: 18.83983612060547ms\r\n",,terminal_output +15825,13469188,"TERMINAL",0,0,"Step 1378, loss: 1.2201205492019653, step time: 18.59450340270996ms\r\n",,terminal_output +15826,13469256,"TERMINAL",0,0,"Step 1379, loss: 1.032928705215454, step time: 17.857789993286133ms\r\n",,terminal_output +15827,13469315,"TERMINAL",0,0,"Step 1380, loss: 2.1961112022399902, step time: 18.164873123168945ms\r\n",,terminal_output +15828,13469379,"TERMINAL",0,0,"Step 1381, loss: 2.4956486225128174, step time: 18.16844940185547ms\r\n",,terminal_output +15829,13469441,"TERMINAL",0,0,"Step 1382, loss: 1.032610297203064, step time: 17.968177795410156ms\r\n",,terminal_output +15830,13469508,"TERMINAL",0,0,"Step 1383, loss: 1.6444435119628906, step time: 18.281936645507812ms\r\n",,terminal_output +15831,13469575,"TERMINAL",0,0,"Step 1384, loss: 1.0206317901611328, step time: 28.29265594482422ms\r\n",,terminal_output +15832,13469664,"TERMINAL",0,0,"Step 1385, loss: 1.5473428964614868, step time: 23.099899291992188ms\r\n",,terminal_output +15833,13469719,"TERMINAL",0,0,"Step 1386, loss: 1.0395567417144775, step time: 18.340349197387695ms\r\n",,terminal_output +15834,13469826,"TERMINAL",0,0,"Step 1387, loss: 0.9789926409721375, step time: 18.55778694152832ms\r\nStep 1388, loss: 1.037350058555603, step time: 17.84205436706543ms\r\n",,terminal_output +15835,13469920,"TERMINAL",0,0,"Step 1389, loss: 1.0908726453781128, step time: 17.857789993286133ms\r\n",,terminal_output +15836,13470037,"TERMINAL",0,0,"Step 1390, loss: 1.221312165260315, step time: 18.285751342773438ms\r\nStep 1391, loss: 1.2514166831970215, step time: 17.94743537902832ms\r\n",,terminal_output +15837,13470102,"TERMINAL",0,0,"Step 1392, loss: 0.8857017755508423, step time: 17.902374267578125ms\r\n",,terminal_output +15838,13470165,"TERMINAL",0,0,"Step 1393, loss: 1.2676819562911987, step time: 17.93837547302246ms\r\n",,terminal_output +15839,13470256,"TERMINAL",0,0,"Step 1394, loss: 0.9431571364402771, step time: 18.055438995361328ms\r\n",,terminal_output +15840,13470307,"TERMINAL",0,0,"Step 1395, loss: 1.483913779258728, step time: 21.100282669067383ms\r\n",,terminal_output +15841,13470358,"TERMINAL",0,0,"Step 1396, loss: 1.3051042556762695, step time: 18.37635040283203ms\r\n",,terminal_output +15842,13470465,"TERMINAL",0,0,"Step 1397, loss: 0.8030412793159485, step time: 17.902851104736328ms\r\nStep 1398, loss: 1.3385465145111084, step time: 18.0666446685791ms\r\n",,terminal_output +15843,13470586,"TERMINAL",0,0,"Step 1399, loss: 0.914773166179657, step time: 18.387794494628906ms\r\nStep 1400, loss: 0.8334091901779175, step time: 17.86184310913086ms\r\n",,terminal_output +15844,13470655,"TERMINAL",0,0,"Step 1401, loss: 0.8199423551559448, step time: 17.790794372558594ms\r\n",,terminal_output +15845,13470718,"TERMINAL",0,0,"Step 1402, loss: 0.8992899060249329, step time: 18.10598373413086ms\r\n",,terminal_output +15846,13470786,"TERMINAL",0,0,"Step 1403, loss: 1.0636783838272095, step time: 17.903566360473633ms\r\n",,terminal_output +15847,13470859,"TERMINAL",0,0,"Step 1404, loss: 1.5351195335388184, step time: 18.166065216064453ms\r\n",,terminal_output +15848,13470924,"TERMINAL",0,0,"Step 1405, loss: 0.851844072341919, step time: 18.58830451965332ms\r\n",,terminal_output +15849,13470985,"TERMINAL",0,0,"Step 1406, loss: 0.8280873894691467, step time: 17.900705337524414ms\r\n",,terminal_output +15850,13471056,"TERMINAL",0,0,"Step 1407, loss: 0.9537350535392761, step time: 17.846107482910156ms\r\n",,terminal_output +15851,13471120,"TERMINAL",0,0,"Step 1408, loss: 0.7989912033081055, step time: 18.244028091430664ms\r\n",,terminal_output +15852,13471184,"TERMINAL",0,0,"Step 1409, loss: 1.1796756982803345, step time: 17.915725708007812ms\r\n",,terminal_output +15853,13471249,"TERMINAL",0,0,"Step 1410, loss: 0.7425267100334167, step time: 17.538070678710938ms\r\n",,terminal_output +15854,13471313,"TERMINAL",0,0,"Step 1411, loss: 1.2835397720336914, step time: 18.10741424560547ms\r\n",,terminal_output +15855,13471434,"TERMINAL",0,0,"Step 1412, loss: 1.1882623434066772, step time: 17.591238021850586ms\r\nStep 1413, loss: 0.8971027135848999, step time: 17.731904983520508ms\r\n",,terminal_output +15856,13471498,"TERMINAL",0,0,"Step 1414, loss: 1.426677942276001, step time: 18.085002899169922ms\r\n",,terminal_output +15857,13471567,"TERMINAL",0,0,"Step 1415, loss: 0.7899190187454224, step time: 17.600536346435547ms\r\n",,terminal_output +15858,13471631,"TERMINAL",0,0,"Step 1416, loss: 0.944018542766571, step time: 17.901897430419922ms\r\n",,terminal_output +15859,13471697,"TERMINAL",0,0,"Step 1417, loss: 0.8314478397369385, step time: 18.377065658569336ms\r\n",,terminal_output +15860,13471761,"TERMINAL",0,0,"Step 1418, loss: 1.0302404165267944, step time: 17.519235610961914ms\r\n",,terminal_output +15861,13471827,"TERMINAL",0,0,"Step 1419, loss: 1.970213532447815, step time: 17.901897430419922ms\r\n",,terminal_output +15862,13471892,"TERMINAL",0,0,"Step 1420, loss: 0.9790772795677185, step time: 18.219709396362305ms\r\n",,terminal_output +15863,13471944,"TERMINAL",0,0,"Step 1421, loss: 2.166992664337158, step time: 17.551660537719727ms\r\n",,terminal_output +15864,13472058,"TERMINAL",0,0,"Step 1422, loss: 1.1376588344573975, step time: 17.705440521240234ms\r\nStep 1423, loss: 0.8292070031166077, step time: 17.881155014038086ms\r\n",,terminal_output +15865,13472170,"TERMINAL",0,0,"Step 1424, loss: 1.3625224828720093, step time: 17.553091049194336ms\r\nStep 1425, loss: 0.9196574687957764, step time: 17.88187026977539ms\r\n",,terminal_output +15866,13472239,"TERMINAL",0,0,"Step 1426, loss: 0.8144299387931824, step time: 17.961502075195312ms\r\n",,terminal_output +15867,13472298,"TERMINAL",0,0,"Step 1427, loss: 0.6970473527908325, step time: 17.830848693847656ms\r\n",,terminal_output +15868,13472383,"TERMINAL",0,0,"Step 1428, loss: 1.3361904621124268, step time: 17.454147338867188ms\r\n",,terminal_output +15869,13472443,"TERMINAL",0,0,"Step 1429, loss: 2.1066858768463135, step time: 18.20063591003418ms\r\n",,terminal_output +15870,13472501,"TERMINAL",0,0,"Step 1430, loss: 0.9639675617218018, step time: 17.68016815185547ms\r\n",,terminal_output +15871,13472648,"TERMINAL",0,0,"Step 1431, loss: 1.1678900718688965, step time: 17.879247665405273ms\r\nStep 1432, loss: 1.015129804611206, step time: 18.07713508605957ms\r\n",,terminal_output +15872,13472704,"TERMINAL",0,0,"Step 1433, loss: 0.8447422981262207, step time: 17.90452003479004ms\r\n",,terminal_output +15873,13472815,"TERMINAL",0,0,"Step 1434, loss: 0.9126633405685425, step time: 17.67730712890625ms\r\nStep 1435, loss: 1.117699384689331, step time: 18.06473731994629ms\r\n",,terminal_output +15874,13472872,"TERMINAL",0,0,"Step 1436, loss: 0.836051881313324, step time: 17.91548728942871ms\r\n",,terminal_output +15875,13472936,"TERMINAL",0,0,"Step 1437, loss: 1.1595618724822998, step time: 17.89116859436035ms\r\n",,terminal_output +15876,13473067,"TERMINAL",0,0,"Step 1438, loss: 0.9815731048583984, step time: 18.332958221435547ms\r\nStep 1439, loss: 0.9364102482795715, step time: 18.25857162475586ms\r\n",,terminal_output +15877,13473118,"TERMINAL",0,0,"Step 1440, loss: 0.8113387227058411, step time: 17.868757247924805ms\r\n",,terminal_output +15878,13473185,"TERMINAL",0,0,"Step 1441, loss: 0.9256907105445862, step time: 18.08905601501465ms\r\n",,terminal_output +15879,13473326,"TERMINAL",0,0,"Step 1442, loss: 0.9953712224960327, step time: 17.85755157470703ms\r\nStep 1443, loss: 0.907853364944458, step time: 17.838478088378906ms\r\n",,terminal_output +15880,13473411,"TERMINAL",0,0,"Step 1444, loss: 1.3422491550445557, step time: 17.986297607421875ms\r\n",,terminal_output +15881,13473464,"TERMINAL",0,0,"Step 1445, loss: 1.3296911716461182, step time: 17.422914505004883ms\r\n",,terminal_output +15882,13473572,"TERMINAL",0,0,"Step 1446, loss: 1.001581072807312, step time: 17.952919006347656ms\r\nStep 1447, loss: 1.1440315246582031, step time: 18.024682998657227ms\r\n",,terminal_output +15883,13473638,"TERMINAL",0,0,"Step 1448, loss: 1.053952693939209, step time: 17.956256866455078ms\r\n",,terminal_output +15884,13473691,"TERMINAL",0,0,"Step 1449, loss: 1.0093353986740112, step time: 17.553329467773438ms\r\n",,terminal_output +15885,13473759,"TERMINAL",0,0,"Step 1450, loss: 0.8482520580291748, step time: 18.146753311157227ms\r\n",,terminal_output +15886,13473882,"TERMINAL",0,0,"Step 1451, loss: 0.7953035831451416, step time: 17.53520965576172ms\r\nStep 1452, loss: 1.2284724712371826, step time: 18.0051326751709ms\r\n",,terminal_output +15887,13473945,"TERMINAL",0,0,"Step 1453, loss: 1.0223883390426636, step time: 17.937183380126953ms\r\n",,terminal_output +15888,13474009,"TERMINAL",0,0,"Step 1454, loss: 1.0317597389221191, step time: 17.46964454650879ms\r\n",,terminal_output +15889,13474148,"TERMINAL",0,0,"Step 1455, loss: 1.199292778968811, step time: 29.848575592041016ms\r\nStep 1456, loss: 0.8061184287071228, step time: 27.536392211914062ms\r\n",,terminal_output +15890,13474213,"TERMINAL",0,0,"Step 1457, loss: 0.8427061438560486, step time: 18.387556076049805ms\r\n",,terminal_output +15891,13474338,"TERMINAL",0,0,"Step 1458, loss: 1.0545077323913574, step time: 18.676042556762695ms\r\nStep 1459, loss: 1.0171654224395752, step time: 18.303871154785156ms\r\n",,terminal_output +15892,13474402,"TERMINAL",0,0,"Step 1460, loss: 0.8006699085235596, step time: 17.815828323364258ms\r\n",,terminal_output +15893,13474465,"TERMINAL",0,0,"Step 1461, loss: 1.1273183822631836, step time: 17.597436904907227ms\r\n",,terminal_output +15894,13474530,"TERMINAL",0,0,"Step 1462, loss: 0.9786027073860168, step time: 18.358945846557617ms\r\n",,terminal_output +15895,13474593,"TERMINAL",0,0,"Step 1463, loss: 0.8179502487182617, step time: 17.782211303710938ms\r\n",,terminal_output +15896,13474658,"TERMINAL",0,0,"Step 1464, loss: 1.2180440425872803, step time: 18.01276206970215ms\r\n",,terminal_output +15897,13474723,"TERMINAL",0,0,"Step 1465, loss: 0.8048025965690613, step time: 18.340587615966797ms\r\n",,terminal_output +15898,13474787,"TERMINAL",0,0,"Step 1466, loss: 0.7629062533378601, step time: 18.114089965820312ms\r\n",,terminal_output +15899,13474851,"TERMINAL",0,0,"Step 1467, loss: 1.0937994718551636, step time: 18.112897872924805ms\r\n",,terminal_output +15900,13474917,"TERMINAL",0,0,"Step 1468, loss: 1.3370219469070435, step time: 18.240690231323242ms\r\n",,terminal_output +15901,13475014,"TERMINAL",0,0,"Step 1469, loss: 0.8881111145019531, step time: 17.882108688354492ms\r\n",,terminal_output +15902,13475067,"TERMINAL",0,0,"Step 1470, loss: 0.8131898641586304, step time: 17.55237579345703ms\r\n",,terminal_output +15903,13475179,"TERMINAL",0,0,"Step 1471, loss: 0.7223706841468811, step time: 18.511295318603516ms\r\nStep 1472, loss: 1.2229546308517456, step time: 18.068790435791016ms\r\n",,terminal_output +15904,13475327,"TERMINAL",0,0,"Step 1473, loss: 1.4920551776885986, step time: 17.947673797607422ms\r\nStep 1474, loss: 0.9023169875144958, step time: 18.259286880493164ms\r\n",,terminal_output +15905,13475435,"TERMINAL",0,0,"Step 1475, loss: 1.630976915359497, step time: 17.989397048950195ms\r\nStep 1476, loss: 0.798345148563385, step time: 17.89402961730957ms\r\n",,terminal_output +15906,13475497,"TERMINAL",0,0,"Step 1477, loss: 1.193069338798523, step time: 18.254995346069336ms\r\n",,terminal_output +15907,13475600,"TERMINAL",0,0,"Step 1478, loss: 0.7540730237960815, step time: 17.42267608642578ms\r\n",,terminal_output +15908,13475685,"TERMINAL",0,0,"Step 1479, loss: 0.7622054815292358, step time: 17.798423767089844ms\r\nStep 1480, loss: 1.1663265228271484, step time: 18.12744140625ms\r\n",,terminal_output +15909,13475780,"TERMINAL",0,0,"Step 1481, loss: 1.8617075681686401, step time: 17.79961585998535ms\r\n",,terminal_output +15910,13475888,"TERMINAL",0,0,"Step 1482, loss: 1.0771249532699585, step time: 19.132614135742188ms\r\nStep 1483, loss: 1.0357352495193481, step time: 18.344640731811523ms\r\n",,terminal_output +15911,13476006,"TERMINAL",0,0,"Step 1484, loss: 0.7719859480857849, step time: 17.917156219482422ms\r\nStep 1485, loss: 0.8647317886352539, step time: 18.141508102416992ms\r\n",,terminal_output +15912,13476131,"TERMINAL",0,0,"Step 1486, loss: 1.0168497562408447, step time: 18.02682876586914ms\r\nStep 1487, loss: 1.371023416519165, step time: 17.55499839782715ms\r\n",,terminal_output +15913,13476194,"TERMINAL",0,0,"Step 1488, loss: 0.8261809349060059, step time: 17.808198928833008ms\r\n",,terminal_output +15914,13476259,"TERMINAL",0,0,"Step 1489, loss: 0.8390469551086426, step time: 18.36562156677246ms\r\n",,terminal_output +15915,13476326,"TERMINAL",0,0,"Step 1490, loss: 0.8236764073371887, step time: 18.01586151123047ms\r\n",,terminal_output +15916,13476388,"TERMINAL",0,0,"Step 1491, loss: 1.3049044609069824, step time: 18.013715744018555ms\r\n",,terminal_output +15917,13476452,"TERMINAL",0,0,"Step 1492, loss: 0.8521315455436707, step time: 17.798185348510742ms\r\n",,terminal_output +15918,13476518,"TERMINAL",0,0,"Step 1493, loss: 0.8239741921424866, step time: 17.976045608520508ms\r\n",,terminal_output +15919,13476583,"TERMINAL",0,0,"Step 1494, loss: 0.8109914660453796, step time: 17.70806312561035ms\r\n",,terminal_output +15920,13476648,"TERMINAL",0,0,"Step 1495, loss: 0.7781511545181274, step time: 18.207073211669922ms\r\n",,terminal_output +15921,13476713,"TERMINAL",0,0,"Step 1496, loss: 0.8280020356178284, step time: 17.949819564819336ms\r\n",,terminal_output +15922,13476778,"TERMINAL",0,0,"Step 1497, loss: 1.0371065139770508, step time: 17.995357513427734ms\r\n",,terminal_output +15923,13476844,"TERMINAL",0,0,"Step 1498, loss: 1.855574607849121, step time: 19.888877868652344ms\r\n",,terminal_output +15924,13476907,"TERMINAL",0,0,"Step 1499, loss: nan, step time: 18.172502517700195ms\r\n",,terminal_output +15925,13479863,"TERMINAL",0,0,"Step 1500, loss: 1.4529558420181274, step time: 37.346601486206055ms\r\n",,terminal_output +15926,13479914,"TERMINAL",0,0,"Step 1501, loss: 2.0108625888824463, step time: 26.706695556640625ms\r\n",,terminal_output +15927,13480010,"TERMINAL",0,0,"Step 1502, loss: 0.8891615271568298, step time: 20.19214630126953ms\r\n",,terminal_output +15928,13480062,"TERMINAL",0,0,"Step 1503, loss: 0.8466394543647766, step time: 19.76466178894043ms\r\n",,terminal_output +15929,13480161,"TERMINAL",0,0,"Step 1504, loss: 0.7657440900802612, step time: 19.176959991455078ms\r\nStep 1505, loss: 0.8191248178482056, step time: 18.897294998168945ms\r\n",,terminal_output +15930,13480255,"TERMINAL",0,0,"Step 1506, loss: 0.8542219996452332, step time: 18.338441848754883ms\r\n",,terminal_output +15931,13480362,"TERMINAL",0,0,"Step 1507, loss: 0.7870840430259705, step time: 19.29020881652832ms\r\nStep 1508, loss: 1.6974672079086304, step time: 18.566131591796875ms\r\n",,terminal_output +15932,13480426,"TERMINAL",0,0,"Step 1509, loss: 0.7244929671287537, step time: 18.88442039489746ms\r\n",,terminal_output +15933,13480489,"TERMINAL",0,0,"Step 1510, loss: 0.9135421514511108, step time: 18.358469009399414ms\r\n",,terminal_output +15934,13480561,"TERMINAL",0,0,"Step 1511, loss: 0.8336856365203857, step time: 18.503904342651367ms\r\n",,terminal_output +15935,13480621,"TERMINAL",0,0,"Step 1512, loss: 0.8712185621261597, step time: 17.798662185668945ms\r\n",,terminal_output +15936,13480687,"TERMINAL",0,0,"Step 1513, loss: 1.016741394996643, step time: 19.27924156188965ms\r\n",,terminal_output +15937,13480752,"TERMINAL",0,0,"Step 1514, loss: 0.7493240833282471, step time: 17.76909828186035ms\r\n",,terminal_output +15938,13480823,"TERMINAL",0,0,"Step 1515, loss: 0.9758703708648682, step time: 17.5325870513916ms\r\n",,terminal_output +15939,13480902,"TERMINAL",0,0,"Step 1516, loss: 0.9194523096084595, step time: 17.405033111572266ms\r\n",,terminal_output +15940,13480956,"TERMINAL",0,0,"Step 1517, loss: 0.6729057431221008, step time: 22.796630859375ms\r\n",,terminal_output +15941,13481011,"TERMINAL",0,0,"Step 1518, loss: 1.1747474670410156, step time: 18.176794052124023ms\r\n",,terminal_output +15942,13481119,"TERMINAL",0,0,"Step 1519, loss: 1.3773874044418335, step time: 18.064498901367188ms\r\nStep 1520, loss: 0.8402460813522339, step time: 20.267248153686523ms\r\n",,terminal_output +15943,13481192,"TERMINAL",0,0,"Step 1521, loss: 0.8388347625732422, step time: 19.936323165893555ms\r\n",,terminal_output +15944,13481253,"TERMINAL",0,0,"Step 1522, loss: 0.6987299919128418, step time: 18.929719924926758ms\r\n",,terminal_output +15945,13481316,"TERMINAL",0,0,"Step 1523, loss: 0.6435758471488953, step time: 18.921375274658203ms\r\n",,terminal_output +15946,13481383,"TERMINAL",0,0,"Step 1524, loss: 0.8201982378959656, step time: 17.809391021728516ms\r\n",,terminal_output +15947,13481449,"TERMINAL",0,0,"Step 1525, loss: 1.295487880706787, step time: 30.41243553161621ms\r\n",,terminal_output +15948,13481514,"TERMINAL",0,0,"Step 1526, loss: 1.0059784650802612, step time: 19.335269927978516ms\r\n",,terminal_output +15949,13481576,"TERMINAL",0,0,"Step 1527, loss: 0.6903620958328247, step time: 18.402814865112305ms\r\n",,terminal_output +15950,13481639,"TERMINAL",0,0,"Step 1528, loss: 0.7587352395057678, step time: 17.98701286315918ms\r\n",,terminal_output +15951,13481703,"TERMINAL",0,0,"Step 1529, loss: 0.7418766021728516, step time: 19.147634506225586ms\r\n",,terminal_output +15952,13481767,"TERMINAL",0,0,"Step 1530, loss: 1.1260035037994385, step time: 18.49055290222168ms\r\n",,terminal_output +15953,13481830,"TERMINAL",0,0,"Step 1531, loss: 1.2743966579437256, step time: 18.827199935913086ms\r\n",,terminal_output +15954,13481898,"TERMINAL",0,0,"Step 1532, loss: 1.1668003797531128, step time: 18.34726333618164ms\r\n",,terminal_output +15955,13481968,"TERMINAL",0,0,"Step 1533, loss: 0.7947860956192017, step time: 18.724918365478516ms\r\n",,terminal_output +15956,13482035,"TERMINAL",0,0,"Step 1534, loss: 0.7253834009170532, step time: 18.8295841217041ms\r\n",,terminal_output +15957,13482092,"TERMINAL",0,0,"Step 1535, loss: 1.022500991821289, step time: 18.675804138183594ms\r\n",,terminal_output +15958,13482157,"TERMINAL",0,0,"Step 1536, loss: 0.8821784853935242, step time: 23.482084274291992ms\r\n",,terminal_output +15959,13482219,"TERMINAL",0,0,"Step 1537, loss: 1.0980336666107178, step time: 22.01366424560547ms\r\n",,terminal_output +15960,13482285,"TERMINAL",0,0,"Step 1538, loss: 1.0978610515594482, step time: 17.348051071166992ms\r\n",,terminal_output +15961,13482351,"TERMINAL",0,0,"Step 1539, loss: 0.88130784034729, step time: 17.490386962890625ms\r\n",,terminal_output +15962,13482416,"TERMINAL",0,0,"Step 1540, loss: 0.7113392353057861, step time: 18.329381942749023ms\r\n",,terminal_output +15963,13482481,"TERMINAL",0,0,"Step 1541, loss: 0.8731212615966797, step time: 18.68605613708496ms\r\n",,terminal_output +15964,13482546,"TERMINAL",0,0,"Step 1542, loss: 1.2966474294662476, step time: 18.271207809448242ms\r\n",,terminal_output +15965,13482617,"TERMINAL",0,0,"Step 1543, loss: 0.7201831340789795, step time: 18.717288970947266ms\r\n",,terminal_output +15966,13482680,"TERMINAL",0,0,"Step 1544, loss: 1.9992055892944336, step time: 18.108367919921875ms\r\n",,terminal_output +15967,13482789,"TERMINAL",0,0,"Step 1545, loss: 1.1753038167953491, step time: 18.095970153808594ms\r\nStep 1546, loss: 0.9081177711486816, step time: 18.02682876586914ms\r\n",,terminal_output +15968,13483139,"TERMINAL",0,0,"Step 1547, loss: 0.8435634970664978, step time: 351.54056549072266ms\r\n",,terminal_output +15969,13483193,"TERMINAL",0,0,"Step 1548, loss: 0.8977615833282471, step time: 24.886608123779297ms\r\n",,terminal_output +15970,13483255,"TERMINAL",0,0,"Step 1549, loss: 0.617733359336853, step time: 19.974470138549805ms\r\n",,terminal_output +15971,13483388,"TERMINAL",0,0,"Step 1550, loss: 1.1328885555267334, step time: 18.90397071838379ms\r\nStep 1551, loss: 0.6241264939308167, step time: 17.70162582397461ms\r\n",,terminal_output +15972,13483449,"TERMINAL",0,0,"Step 1552, loss: 0.7494999170303345, step time: 17.454862594604492ms\r\n",,terminal_output +15973,13483513,"TERMINAL",0,0,"Step 1553, loss: 0.9485476016998291, step time: 17.84968376159668ms\r\n",,terminal_output +15974,13483577,"TERMINAL",0,0,"Step 1554, loss: 0.8441201448440552, step time: 17.103195190429688ms\r\n",,terminal_output +15975,13483642,"TERMINAL",0,0,"Step 1555, loss: 0.7754071354866028, step time: 18.187522888183594ms\r\n",,terminal_output +15976,13483708,"TERMINAL",0,0,"Step 1556, loss: 1.9044228792190552, step time: 17.853736877441406ms\r\n",,terminal_output +15977,13483780,"TERMINAL",0,0,"Step 1557, loss: 1.119070053100586, step time: 17.49873161315918ms\r\n",,terminal_output +15978,13483868,"TERMINAL",0,0,"Step 1558, loss: 0.8255000114440918, step time: 17.24100112915039ms\r\n",,terminal_output +15979,13483917,"TERMINAL",0,0,"Step 1559, loss: 1.004231572151184, step time: 17.73858070373535ms\r\n",,terminal_output +15980,13483980,"TERMINAL",0,0,"Step 1560, loss: 0.7504690289497375, step time: 17.19522476196289ms\r\n",,terminal_output +15981,13484043,"TERMINAL",0,0,"Step 1561, loss: 0.6346629858016968, step time: 17.561912536621094ms\r\n",,terminal_output +15982,13484106,"TERMINAL",0,0,"Step 1562, loss: 1.9712926149368286, step time: 19.239425659179688ms\r\n",,terminal_output +15983,13484169,"TERMINAL",0,0,"Step 1563, loss: 0.629068911075592, step time: 17.260074615478516ms\r\n",,terminal_output +15984,13484233,"TERMINAL",0,0,"Step 1564, loss: 1.894705891609192, step time: 17.379283905029297ms\r\n",,terminal_output +15985,13484298,"TERMINAL",0,0,"Step 1565, loss: 0.8273833990097046, step time: 17.705917358398438ms\r\n",,terminal_output +15986,13484364,"TERMINAL",0,0,"Step 1566, loss: 0.6998957395553589, step time: 17.188310623168945ms\r\n",,terminal_output +15987,13484435,"TERMINAL",0,0,"Step 1567, loss: 1.1576002836227417, step time: 17.37046241760254ms\r\n",,terminal_output +15988,13484496,"TERMINAL",0,0,"Step 1568, loss: 1.2435810565948486, step time: 17.441987991333008ms\r\n",,terminal_output +15989,13484604,"TERMINAL",0,0,"Step 1569, loss: 1.1282768249511719, step time: 16.9980525970459ms\r\nStep 1570, loss: 0.9255958199501038, step time: 17.5168514251709ms\r\n",,terminal_output +15990,13484720,"TERMINAL",0,0,"Step 1571, loss: 0.6894004344940186, step time: 17.44985580444336ms\r\nStep 1572, loss: 1.655692458152771, step time: 17.368316650390625ms\r\n",,terminal_output +15991,13484785,"TERMINAL",0,0,"Step 1573, loss: 0.9286782741546631, step time: 17.18592643737793ms\r\n",,terminal_output +15992,13484846,"TERMINAL",0,0,"Step 1574, loss: 0.7228723168373108, step time: 20.095348358154297ms\r\n",,terminal_output +15993,13484911,"TERMINAL",0,0,"Step 1575, loss: 0.7024320363998413, step time: 17.38739013671875ms\r\n",,terminal_output +15994,13484974,"TERMINAL",0,0,"Step 1576, loss: 0.9626418352127075, step time: 17.628192901611328ms\r\n",,terminal_output +15995,13485038,"TERMINAL",0,0,"Step 1577, loss: 1.1137465238571167, step time: 17.580032348632812ms\r\n",,terminal_output +15996,13485098,"TERMINAL",0,0,"Step 1578, loss: 0.9177200794219971, step time: 16.95871353149414ms\r\n",,terminal_output +15997,13485167,"TERMINAL",0,0,"Step 1579, loss: 1.011858344078064, step time: 17.328977584838867ms\r\n",,terminal_output +15998,13485274,"TERMINAL",0,0,"Step 1580, loss: 1.2287107706069946, step time: 17.598390579223633ms\r\nStep 1581, loss: 0.8168590068817139, step time: 17.130613327026367ms\r\n",,terminal_output +15999,13485339,"TERMINAL",0,0,"Step 1582, loss: 0.7695603966712952, step time: 17.53711700439453ms\r\n",,terminal_output +16000,13485402,"TERMINAL",0,0,"Step 1583, loss: 1.0023016929626465, step time: 17.759084701538086ms\r\n",,terminal_output +16001,13485465,"TERMINAL",0,0,"Step 1584, loss: 0.8787809014320374, step time: 17.362594604492188ms\r\n",,terminal_output +16002,13485528,"TERMINAL",0,0,"Step 1585, loss: 1.2775943279266357, step time: 17.39215850830078ms\r\n",,terminal_output +16003,13485594,"TERMINAL",0,0,"Step 1586, loss: 0.8825451731681824, step time: 17.409563064575195ms\r\n",,terminal_output +16004,13485657,"TERMINAL",0,0,"Step 1587, loss: 1.0294265747070312, step time: 16.97516441345215ms\r\n",,terminal_output +16005,13485724,"TERMINAL",0,0,"Step 1588, loss: 0.6155805587768555, step time: 17.170190811157227ms\r\n",,terminal_output +16006,13485789,"TERMINAL",0,0,"Step 1589, loss: 0.8265912532806396, step time: 17.583608627319336ms\r\n",,terminal_output +16007,13485903,"TERMINAL",0,0,"Step 1590, loss: 1.0985435247421265, step time: 17.241716384887695ms\r\nStep 1591, loss: 0.9417194724082947, step time: 17.304658889770508ms\r\n",,terminal_output +16008,13485966,"TERMINAL",0,0,"Step 1592, loss: 0.6327294707298279, step time: 17.63153076171875ms\r\n",,terminal_output +16009,13486029,"TERMINAL",0,0,"Step 1593, loss: 0.8934794068336487, step time: 17.186403274536133ms\r\n",,terminal_output +16010,13486093,"TERMINAL",0,0,"Step 1594, loss: 1.2586129903793335, step time: 17.310142517089844ms\r\n",,terminal_output +16011,13486153,"TERMINAL",0,0,"Step 1595, loss: 0.7612659335136414, step time: 17.413854598999023ms\r\n",,terminal_output +16012,13486219,"TERMINAL",0,0,"Step 1596, loss: 0.6860966086387634, step time: 17.296314239501953ms\r\n",,terminal_output +16013,13486281,"TERMINAL",0,0,"Step 1597, loss: 0.7321569323539734, step time: 17.2884464263916ms\r\n",,terminal_output +16014,13486345,"TERMINAL",0,0,"Step 1598, loss: 0.6976541876792908, step time: 17.403364181518555ms\r\n",,terminal_output +16015,13486409,"TERMINAL",0,0,"Step 1599, loss: 1.2474632263183594, step time: 17.182111740112305ms\r\n",,terminal_output +16016,13486470,"TERMINAL",0,0,"Step 1600, loss: 0.7704007029533386, step time: 17.21334457397461ms\r\n",,terminal_output +16017,13486566,"TERMINAL",0,0,"Step 1601, loss: 1.0379765033721924, step time: 17.754077911376953ms\r\n",,terminal_output +16018,13486617,"TERMINAL",0,0,"Step 1602, loss: 0.6191462278366089, step time: 17.069578170776367ms\r\n",,terminal_output +16019,13486727,"TERMINAL",0,0,"Step 1603, loss: 0.6711424589157104, step time: 17.225980758666992ms\r\nStep 1604, loss: 0.8199820518493652, step time: 17.58551597595215ms\r\n",,terminal_output +16020,13486789,"TERMINAL",0,0,"Step 1605, loss: 0.7371923923492432, step time: 17.275333404541016ms\r\n",,terminal_output +16021,13486853,"TERMINAL",0,0,"Step 1606, loss: 1.2570416927337646, step time: 17.332077026367188ms\r\n",,terminal_output +16022,13486922,"TERMINAL",0,0,"Step 1607, loss: 0.6875560283660889, step time: 17.564058303833008ms\r\n",,terminal_output +16023,13486984,"TERMINAL",0,0,"Step 1608, loss: 1.206570029258728, step time: 17.148733139038086ms\r\n",,terminal_output +16024,13487050,"TERMINAL",0,0,"Step 1609, loss: 0.6398335099220276, step time: 17.140865325927734ms\r\n",,terminal_output +16025,13487158,"TERMINAL",0,0,"Step 1610, loss: 1.0050143003463745, step time: 17.58742332458496ms\r\nStep 1611, loss: 1.0256019830703735, step time: 17.125368118286133ms\r\n",,terminal_output +16026,13487224,"TERMINAL",0,0,"Step 1612, loss: 0.7869951725006104, step time: 18.745899200439453ms\r\n",,terminal_output +16027,13487286,"TERMINAL",0,0,"Step 1613, loss: 0.591250479221344, step time: 17.870426177978516ms\r\n",,terminal_output +16028,13487390,"TERMINAL",0,0,"Step 1614, loss: 0.7215008735656738, step time: 17.0285701751709ms\r\n",,terminal_output +16029,13487432,"TERMINAL",0,0,"Step 1615, loss: 0.7079718112945557, step time: 17.249345779418945ms\r\n",,terminal_output +16030,13487540,"TERMINAL",0,0,"Step 1616, loss: 1.1814515590667725, step time: 17.557382583618164ms\r\nStep 1617, loss: 1.3472199440002441, step time: 17.050981521606445ms\r\n",,terminal_output +16031,13487614,"TERMINAL",0,0,"Step 1618, loss: 1.4841207265853882, step time: 17.37689971923828ms\r\n",,terminal_output +16032,13487671,"TERMINAL",0,0,"Step 1619, loss: 1.4261733293533325, step time: 17.740249633789062ms\r\n",,terminal_output +16033,13487797,"TERMINAL",0,0,"Step 1620, loss: 1.1350243091583252, step time: 17.40407943725586ms\r\nStep 1621, loss: 0.9161179661750793, step time: 17.596721649169922ms\r\n",,terminal_output +16034,13487859,"TERMINAL",0,0,"Step 1622, loss: 1.2052278518676758, step time: 17.63749122619629ms\r\n",,terminal_output +16035,13487923,"TERMINAL",0,0,"Step 1623, loss: 0.6000490784645081, step time: 17.23480224609375ms\r\n",,terminal_output +16036,13487987,"TERMINAL",0,0,"Step 1624, loss: 0.7377719283103943, step time: 17.239809036254883ms\r\n",,terminal_output +16037,13488051,"TERMINAL",0,0,"Step 1625, loss: 0.5138790011405945, step time: 21.126270294189453ms\r\n",,terminal_output +16038,13488115,"TERMINAL",0,0,"Step 1626, loss: 1.0234969854354858, step time: 17.104148864746094ms\r\n",,terminal_output +16039,13488174,"TERMINAL",0,0,"Step 1627, loss: 0.7231090664863586, step time: 17.444849014282227ms\r\n",,terminal_output +16040,13488253,"TERMINAL",0,0,"Step 1628, loss: 0.7571076154708862, step time: 17.364978790283203ms\r\n",,terminal_output +16041,13488306,"TERMINAL",0,0,"Step 1629, loss: 1.5050647258758545, step time: 17.182588577270508ms\r\n",,terminal_output +16042,13488373,"TERMINAL",0,0,"Step 1630, loss: 0.6459687352180481, step time: 17.347097396850586ms\r\n",,terminal_output +16043,13488435,"TERMINAL",0,0,"Step 1631, loss: 1.2903434038162231, step time: 17.658710479736328ms\r\n",,terminal_output +16044,13488498,"TERMINAL",0,0,"Step 1632, loss: 0.6246881484985352, step time: 17.039775848388672ms\r\n",,terminal_output +16045,13488563,"TERMINAL",0,0,"Step 1633, loss: 0.9084457755088806, step time: 17.353296279907227ms\r\n",,terminal_output +16046,13488631,"TERMINAL",0,0,"Step 1634, loss: 0.6515172719955444, step time: 17.637014389038086ms\r\n",,terminal_output +16047,13488698,"TERMINAL",0,0,"Step 1635, loss: 0.639416515827179, step time: 16.934633255004883ms\r\n",,terminal_output +16048,13488766,"TERMINAL",0,0,"Step 1636, loss: 0.7600330710411072, step time: 17.29893684387207ms\r\n",,terminal_output +16049,13488831,"TERMINAL",0,0,"Step 1637, loss: 0.8612014651298523, step time: 17.45748519897461ms\r\n",,terminal_output +16050,13488898,"TERMINAL",0,0,"Step 1638, loss: 0.9524387121200562, step time: 16.981840133666992ms\r\n",,terminal_output +16051,13488962,"TERMINAL",0,0,"Step 1639, loss: 0.7980165481567383, step time: 17.334461212158203ms\r\n",,terminal_output +16052,13489031,"TERMINAL",0,0,"Step 1640, loss: 0.6637346744537354, step time: 18.059253692626953ms\r\n",,terminal_output +16053,13489095,"TERMINAL",0,0,"Step 1641, loss: 1.5097140073776245, step time: 17.03500747680664ms\r\n",,terminal_output +16054,13489160,"TERMINAL",0,0,"Step 1642, loss: 0.7937094569206238, step time: 17.281770706176758ms\r\n",,terminal_output +16055,13489208,"TERMINAL",0,0,"Step 1643, loss: 0.7046133279800415, step time: 17.573833465576172ms\r\n",,terminal_output +16056,13489278,"TERMINAL",0,0,"Step 1644, loss: 0.6741011738777161, step time: 17.117738723754883ms\r\n",,terminal_output +16057,13489420,"TERMINAL",0,0,"Step 1645, loss: 1.001129150390625, step time: 17.288923263549805ms\r\nStep 1646, loss: 0.6462862491607666, step time: 17.3797607421875ms\r\n",,terminal_output +16058,13489506,"TERMINAL",0,0,"Step 1647, loss: 0.6475381255149841, step time: 17.099380493164062ms\r\nStep 1648, loss: 0.7155217528343201, step time: 17.443180084228516ms\r\n",,terminal_output +16059,13489564,"TERMINAL",0,0,"Step 1649, loss: 1.0327256917953491, step time: 17.364501953125ms\r\n",,terminal_output +16060,13489628,"TERMINAL",0,0,"Step 1650, loss: 0.7618099451065063, step time: 17.366886138916016ms\r\n",,terminal_output +16061,13489692,"TERMINAL",0,0,"Step 1651, loss: 0.9597911834716797, step time: 17.9903507232666ms\r\n",,terminal_output +16062,13489755,"TERMINAL",0,0,"Step 1652, loss: 0.5711528062820435, step time: 17.42076873779297ms\r\n",,terminal_output +16063,13489817,"TERMINAL",0,0,"Step 1653, loss: 1.1012192964553833, step time: 17.3337459564209ms\r\n",,terminal_output +16064,13489881,"TERMINAL",0,0,"Step 1654, loss: 0.7719597220420837, step time: 17.144203186035156ms\r\n",,terminal_output +16065,13489946,"TERMINAL",0,0,"Step 1655, loss: 0.6170490980148315, step time: 17.64202117919922ms\r\n",,terminal_output +16066,13490007,"TERMINAL",0,0,"Step 1656, loss: 0.5631877183914185, step time: 17.21811294555664ms\r\n",,terminal_output +16067,13490080,"TERMINAL",0,0,"Step 1657, loss: 0.6665654182434082, step time: 17.29726791381836ms\r\n",,terminal_output +16068,13490135,"TERMINAL",0,0,"Step 1658, loss: 0.7977774143218994, step time: 17.308950424194336ms\r\n",,terminal_output +16069,13490202,"TERMINAL",0,0,"Step 1659, loss: 0.713200032711029, step time: 17.181873321533203ms\r\n",,terminal_output +16070,13490263,"TERMINAL",0,0,"Step 1660, loss: 1.6325666904449463, step time: 17.325639724731445ms\r\n",,terminal_output +16071,13490328,"TERMINAL",0,0,"Step 1661, loss: 1.0228440761566162, step time: 17.419099807739258ms\r\n",,terminal_output +16072,13490395,"TERMINAL",0,0,"Step 1662, loss: 0.5875503420829773, step time: 18.694400787353516ms\r\n",,terminal_output +16073,13490442,"TERMINAL",0,0,"Step 1663, loss: 1.0317984819412231, step time: 18.113374710083008ms\r\n",,terminal_output +16074,13490538,"TERMINAL",0,0,"Step 1664, loss: 0.6190814971923828, step time: 17.53067970275879ms\r\n",,terminal_output +16075,13490591,"TERMINAL",0,0,"Step 1665, loss: 1.0945616960525513, step time: 17.025232315063477ms\r\n",,terminal_output +16076,13490696,"TERMINAL",0,0,"Step 1666, loss: 0.6523257493972778, step time: 17.43602752685547ms\r\nStep 1667, loss: 0.5620846152305603, step time: 17.533540725708008ms\r\n",,terminal_output +16077,13490813,"TERMINAL",0,0,"Step 1668, loss: 0.7151626348495483, step time: 17.136096954345703ms\r\nStep 1669, loss: 0.6259867548942566, step time: 17.193078994750977ms\r\n",,terminal_output +16078,13490877,"TERMINAL",0,0,"Step 1670, loss: 0.6387184858322144, step time: 17.46678352355957ms\r\n",,terminal_output +16079,13490945,"TERMINAL",0,0,"Step 1671, loss: 0.6102519035339355, step time: 16.955137252807617ms\r\n",,terminal_output +16080,13491011,"TERMINAL",0,0,"Step 1672, loss: 0.6025556921958923, step time: 17.41504669189453ms\r\n",,terminal_output +16081,13491086,"TERMINAL",0,0,"Step 1673, loss: 0.594477653503418, step time: 17.642974853515625ms\r\n",,terminal_output +16082,13491140,"TERMINAL",0,0,"Step 1674, loss: 0.8564729690551758, step time: 17.020702362060547ms\r\n",,terminal_output +16083,13491208,"TERMINAL",0,0,"Step 1675, loss: 0.9546167254447937, step time: 30.714750289916992ms\r\n",,terminal_output +16084,13491267,"TERMINAL",0,0,"Step 1676, loss: 0.6542630791664124, step time: 20.956754684448242ms\r\n",,terminal_output +16085,13491331,"TERMINAL",0,0,"Step 1677, loss: 0.9875368475914001, step time: 25.345802307128906ms\r\n",,terminal_output +16086,13491391,"TERMINAL",0,0,"Step 1678, loss: 0.6861509084701538, step time: 20.618438720703125ms\r\n",,terminal_output +16087,13491453,"TERMINAL",0,0,"Step 1679, loss: 0.8557046055793762, step time: 19.34194564819336ms\r\n",,terminal_output +16088,13491516,"TERMINAL",0,0,"Step 1680, loss: 0.5982068777084351, step time: 18.141746520996094ms\r\n",,terminal_output +16089,13491581,"TERMINAL",0,0,"Step 1681, loss: 1.061423659324646, step time: 17.892122268676758ms\r\n",,terminal_output +16090,13491678,"TERMINAL",0,0,"Step 1682, loss: 0.4920685887336731, step time: 17.883777618408203ms\r\n",,terminal_output +16091,13491739,"TERMINAL",0,0,"Step 1683, loss: 0.6280431747436523, step time: 17.503976821899414ms\r\n",,terminal_output +16092,13491801,"TERMINAL",0,0,"Step 1684, loss: 0.8268080353736877, step time: 17.424583435058594ms\r\n",,terminal_output +16093,13491863,"TERMINAL",0,0,"Step 1685, loss: 0.5881810188293457, step time: 17.788410186767578ms\r\n",,terminal_output +16094,13491926,"TERMINAL",0,0,"Step 1686, loss: 0.8866826295852661, step time: 17.251253128051758ms\r\n",,terminal_output +16095,13491989,"TERMINAL",0,0,"Step 1687, loss: 0.6112675666809082, step time: 17.546415328979492ms\r\n",,terminal_output +16096,13492053,"TERMINAL",0,0,"Step 1688, loss: 0.8149237036705017, step time: 20.937204360961914ms\r\n",,terminal_output +16097,13492145,"TERMINAL",0,0,"Step 1689, loss: 0.6043192744255066, step time: 17.338991165161133ms\r\nStep 1690, loss: 1.0201399326324463, step time: 17.60244369506836ms\r\n",,terminal_output +16098,13492211,"TERMINAL",0,0,"Step 1691, loss: 0.6619279980659485, step time: 17.824172973632812ms\r\n",,terminal_output +16099,13492271,"TERMINAL",0,0,"Step 1692, loss: 0.7733119130134583, step time: 17.256736755371094ms\r\n",,terminal_output +16100,13492335,"TERMINAL",0,0,"Step 1693, loss: 0.5436730980873108, step time: 17.630338668823242ms\r\n",,terminal_output +16101,13492398,"TERMINAL",0,0,"Step 1694, loss: 0.5937609076499939, step time: 17.760276794433594ms\r\n",,terminal_output +16102,13492461,"TERMINAL",0,0,"Step 1695, loss: 0.8068756461143494, step time: 17.186880111694336ms\r\n",,terminal_output +16103,13492527,"TERMINAL",0,0,"Step 1696, loss: 0.5533164739608765, step time: 17.304182052612305ms\r\n",,terminal_output +16104,13492588,"TERMINAL",0,0,"Step 1697, loss: 0.8640456795692444, step time: 18.728256225585938ms\r\n",,terminal_output +16105,13492651,"TERMINAL",0,0,"Step 1698, loss: 0.5611826181411743, step time: 17.554044723510742ms\r\n",,terminal_output +16106,13492714,"TERMINAL",0,0,"Step 1699, loss: 1.0273284912109375, step time: 18.053531646728516ms\r\n",,terminal_output +16107,13492834,"TERMINAL",0,0,"Step 1700, loss: 0.7794484496116638, step time: 18.018484115600586ms\r\nStep 1701, loss: 0.6629684567451477, step time: 17.406702041625977ms\r\n",,terminal_output +16108,13492899,"TERMINAL",0,0,"Step 1702, loss: 0.5176922678947449, step time: 17.306804656982422ms\r\n",,terminal_output +16109,13492964,"TERMINAL",0,0,"Step 1703, loss: 0.5903071165084839, step time: 18.027782440185547ms\r\n",,terminal_output +16110,13493028,"TERMINAL",0,0,"Step 1704, loss: 0.5194821953773499, step time: 17.34757423400879ms\r\n",,terminal_output +16111,13493090,"TERMINAL",0,0,"Step 1705, loss: 1.0551925897598267, step time: 17.640113830566406ms\r\n",,terminal_output +16112,13493150,"TERMINAL",0,0,"Step 1706, loss: 0.5499284863471985, step time: 17.867326736450195ms\r\n",,terminal_output +16113,13493275,"TERMINAL",0,0,"Step 1707, loss: 0.6121250987052917, step time: 17.30942726135254ms\r\nStep 1708, loss: 0.54842209815979, step time: 17.402172088623047ms\r\n",,terminal_output +16114,13493338,"TERMINAL",0,0,"Step 1709, loss: 0.6162003874778748, step time: 17.618179321289062ms\r\n",,terminal_output +16115,13493403,"TERMINAL",0,0,"Step 1710, loss: 0.5745964050292969, step time: 17.19045639038086ms\r\n",,terminal_output +16116,13493466,"TERMINAL",0,0,"Step 1711, loss: 1.5358421802520752, step time: 17.39358901977539ms\r\n",,terminal_output +16117,13493535,"TERMINAL",0,0,"Step 1712, loss: 0.7130601406097412, step time: 17.81773567199707ms\r\n",,terminal_output +16118,13493640,"TERMINAL",0,0,"Step 1713, loss: 0.6285957098007202, step time: 17.282724380493164ms\r\n",,terminal_output +16119,13493652,"TERMINAL",0,0,"Step 1714, loss: 0.7631869912147522, step time: 17.368555068969727ms\r\n",,terminal_output +16120,13493746,"TERMINAL",0,0,"Step 1715, loss: 0.43935272097587585, step time: 17.56143569946289ms\r\n",,terminal_output +16121,13493853,"TERMINAL",0,0,"Step 1716, loss: 0.6725702881813049, step time: 17.056941986083984ms\r\nStep 1717, loss: 0.5074970722198486, step time: 17.379045486450195ms\r\n",,terminal_output +16122,13493916,"TERMINAL",0,0,"Step 1718, loss: 0.9999223351478577, step time: 17.62676239013672ms\r\n",,terminal_output +16123,13493981,"TERMINAL",0,0,"Step 1719, loss: 1.1174999475479126, step time: 17.138242721557617ms\r\n",,terminal_output +16124,13494045,"TERMINAL",0,0,"Step 1720, loss: 1.040444254875183, step time: 17.409086227416992ms\r\n",,terminal_output +16125,13494155,"TERMINAL",0,0,"Step 1721, loss: 0.7371370792388916, step time: 17.644166946411133ms\r\nStep 1722, loss: 0.7478628158569336, step time: 17.110347747802734ms\r\n",,terminal_output +16126,13494250,"TERMINAL",0,0,"Step 1723, loss: 0.792447566986084, step time: 17.41957664489746ms\r\n",,terminal_output +16127,13494320,"TERMINAL",0,0,"Step 1724, loss: 0.6896923780441284, step time: 19.494295120239258ms\r\n",,terminal_output +16128,13494383,"TERMINAL",0,0,"Step 1725, loss: 0.531403124332428, step time: 17.50946044921875ms\r\n",,terminal_output +16129,13494447,"TERMINAL",0,0,"Step 1726, loss: 0.5844637751579285, step time: 17.444610595703125ms\r\n",,terminal_output +16130,13494508,"TERMINAL",0,0,"Step 1727, loss: 0.78983473777771, step time: 17.684459686279297ms\r\n",,terminal_output +16131,13494574,"TERMINAL",0,0,"Step 1728, loss: 0.9362884163856506, step time: 17.121553421020508ms\r\n",,terminal_output +16132,13494634,"TERMINAL",0,0,"Step 1729, loss: 0.5970510244369507, step time: 17.5473690032959ms\r\n",,terminal_output +16133,13494700,"TERMINAL",0,0,"Step 1730, loss: 0.5606725215911865, step time: 17.615318298339844ms\r\n",,terminal_output +16134,13494761,"TERMINAL",0,0,"Step 1731, loss: 0.9067614078521729, step time: 17.363548278808594ms\r\n",,terminal_output +16135,13494825,"TERMINAL",0,0,"Step 1732, loss: 0.5992577075958252, step time: 17.403125762939453ms\r\n",,terminal_output +16136,13495164,"TERMINAL",0,0,"Step 1733, loss: 1.4460911750793457, step time: 355.7872772216797ms\r\n",,terminal_output +16137,13495276,"TERMINAL",0,0,"Step 1734, loss: 0.9961471557617188, step time: 24.681568145751953ms\r\n",,terminal_output +16138,13495323,"TERMINAL",0,0,"Step 1735, loss: 0.5754312872886658, step time: 19.884347915649414ms\r\n",,terminal_output +16139,13495431,"TERMINAL",0,0,"Step 1736, loss: 0.582155168056488, step time: 18.699169158935547ms\r\nStep 1737, loss: 1.0577336549758911, step time: 17.685413360595703ms\r\n",,terminal_output +16140,13495525,"TERMINAL",0,0,"Step 1738, loss: 0.5323503613471985, step time: 17.557382583618164ms\r\n",,terminal_output +16141,13495638,"TERMINAL",0,0,"Step 1739, loss: 0.8020661473274231, step time: 18.022775650024414ms\r\nStep 1740, loss: 0.686117947101593, step time: 17.337560653686523ms\r\n",,terminal_output +16142,13495700,"TERMINAL",0,0,"Step 1741, loss: 0.5766202807426453, step time: 17.653226852416992ms\r\n",,terminal_output +16143,13495764,"TERMINAL",0,0,"Step 1742, loss: 1.153761625289917, step time: 17.779827117919922ms\r\n",,terminal_output +16144,13495829,"TERMINAL",0,0,"Step 1743, loss: 0.8391830921173096, step time: 17.169713973999023ms\r\n",,terminal_output +16145,13495893,"TERMINAL",0,0,"Step 1744, loss: 1.3045262098312378, step time: 17.322540283203125ms\r\n",,terminal_output +16146,13495958,"TERMINAL",0,0,"Step 1745, loss: 0.7516317963600159, step time: 17.838478088378906ms\r\n",,terminal_output +16147,13496019,"TERMINAL",0,0,"Step 1746, loss: 0.8732985258102417, step time: 17.232656478881836ms\r\n",,terminal_output +16148,13496090,"TERMINAL",0,0,"Step 1747, loss: 0.6571415066719055, step time: 17.359256744384766ms\r\n",,terminal_output +16149,13496143,"TERMINAL",0,0,"Step 1748, loss: 0.7648025155067444, step time: 17.720699310302734ms\r\n",,terminal_output +16150,13496214,"TERMINAL",0,0,"Step 1749, loss: 0.542279839515686, step time: 17.2574520111084ms\r\n",,terminal_output +16151,13496278,"TERMINAL",0,0,"Step 1750, loss: 0.8479506969451904, step time: 17.366647720336914ms\r\n",,terminal_output +16152,13496393,"TERMINAL",0,0,"Step 1751, loss: 0.90125972032547, step time: 17.661333084106445ms\r\nStep 1752, loss: 0.49488839507102966, step time: 17.253637313842773ms\r\n",,terminal_output +16153,13496502,"TERMINAL",0,0,"Step 1753, loss: 1.1175563335418701, step time: 17.292499542236328ms\r\nStep 1754, loss: 0.5231014490127563, step time: 17.58432388305664ms\r\n",,terminal_output +16154,13496619,"TERMINAL",0,0,"Step 1755, loss: 0.7215627431869507, step time: 17.177343368530273ms\r\nStep 1756, loss: 0.6035615801811218, step time: 17.347097396850586ms\r\n",,terminal_output +16155,13496681,"TERMINAL",0,0,"Step 1757, loss: 0.8633816838264465, step time: 17.85111427307129ms\r\n",,terminal_output +16156,13496745,"TERMINAL",0,0,"Step 1758, loss: 0.7589741945266724, step time: 17.11893081665039ms\r\n",,terminal_output +16157,13496810,"TERMINAL",0,0,"Step 1759, loss: 0.8023495078086853, step time: 19.6688175201416ms\r\n",,terminal_output +16158,13496872,"TERMINAL",0,0,"Step 1760, loss: 0.9455207586288452, step time: 17.86947250366211ms\r\n",,terminal_output +16159,13496935,"TERMINAL",0,0,"Step 1761, loss: 0.5353946685791016, step time: 17.169713973999023ms\r\n",,terminal_output +16160,13497001,"TERMINAL",0,0,"Step 1762, loss: 0.8398222923278809, step time: 17.5473690032959ms\r\n",,terminal_output +16161,13497060,"TERMINAL",0,0,"Step 1763, loss: 0.7638548016548157, step time: 17.90785789489746ms\r\n",,terminal_output +16162,13497123,"TERMINAL",0,0,"Step 1764, loss: 1.3119763135910034, step time: 17.24863052368164ms\r\n",,terminal_output +16163,13497250,"TERMINAL",0,0,"Step 1765, loss: 0.5581149458885193, step time: 17.25459098815918ms\r\nStep 1766, loss: 1.3474969863891602, step time: 17.782211303710938ms\r\n",,terminal_output +16164,13497311,"TERMINAL",0,0,"Step 1767, loss: 1.0587409734725952, step time: 17.471790313720703ms\r\n",,terminal_output +16165,13497374,"TERMINAL",0,0,"Step 1768, loss: 0.922202467918396, step time: 17.23456382751465ms\r\n",,terminal_output +16166,13497440,"TERMINAL",0,0,"Step 1769, loss: 0.6580217480659485, step time: 17.6239013671875ms\r\n",,terminal_output +16167,13497534,"TERMINAL",0,0,"Step 1770, loss: 0.6069571375846863, step time: 17.191648483276367ms\r\n",,terminal_output +16168,13497592,"TERMINAL",0,0,"Step 1771, loss: 0.8953723311424255, step time: 17.356395721435547ms\r\n",,terminal_output +16169,13497645,"TERMINAL",0,0,"Step 1772, loss: 0.9068785905838013, step time: 17.561674118041992ms\r\n",,terminal_output +16170,13497755,"TERMINAL",0,0,"Step 1773, loss: 1.0608468055725098, step time: 20.25127410888672ms\r\nStep 1774, loss: 0.7338653206825256, step time: 17.754554748535156ms\r\n",,terminal_output +16171,13497814,"TERMINAL",0,0,"Step 1775, loss: 0.9231095910072327, step time: 17.859220504760742ms\r\n",,terminal_output +16172,13497876,"TERMINAL",0,0,"Step 1776, loss: 1.5848398208618164, step time: 17.157793045043945ms\r\n",,terminal_output +16173,13497943,"TERMINAL",0,0,"Step 1777, loss: 0.9142629504203796, step time: 17.515897750854492ms\r\n",,terminal_output +16174,13498005,"TERMINAL",0,0,"Step 1778, loss: 0.6172780990600586, step time: 17.618179321289062ms\r\n",,terminal_output +16175,13498066,"TERMINAL",0,0,"Step 1779, loss: 0.6643739342689514, step time: 17.163515090942383ms\r\n",,terminal_output +16176,13498192,"TERMINAL",0,0,"Step 1780, loss: 1.0922327041625977, step time: 17.44556427001953ms\r\nStep 1781, loss: 0.8595288395881653, step time: 17.75956153869629ms\r\n",,terminal_output +16177,13498256,"TERMINAL",0,0,"Step 1782, loss: 0.5846227407455444, step time: 17.047882080078125ms\r\n",,terminal_output +16178,13498355,"TERMINAL",0,0,"Step 1783, loss: 0.5677077174186707, step time: 17.61174201965332ms\r\n",,terminal_output +16179,13498410,"TERMINAL",0,0,"Step 1784, loss: 0.601246178150177, step time: 17.690181732177734ms\r\n",,terminal_output +16180,13498465,"TERMINAL",0,0,"Step 1785, loss: 0.5930267572402954, step time: 17.16446876525879ms\r\n",,terminal_output +16181,13498571,"TERMINAL",0,0,"Step 1786, loss: 1.0478122234344482, step time: 17.531871795654297ms\r\nStep 1787, loss: 1.2343709468841553, step time: 17.722368240356445ms\r\n",,terminal_output +16182,13498645,"TERMINAL",0,0,"Step 1788, loss: 0.4977528154850006, step time: 17.122268676757812ms\r\n",,terminal_output +16183,13498696,"TERMINAL",0,0,"Step 1789, loss: 0.43816590309143066, step time: 17.358779907226562ms\r\n",,terminal_output +16184,13498788,"TERMINAL",0,0,"Step 1790, loss: 0.5957951545715332, step time: 18.323183059692383ms\r\n",,terminal_output +16185,13498839,"TERMINAL",0,0,"Step 1791, loss: 0.7186657190322876, step time: 17.225980758666992ms\r\n",,terminal_output +16186,13498931,"TERMINAL",0,0,"Step 1792, loss: 0.534619927406311, step time: 17.29559898376465ms\r\n",,terminal_output +16187,13498983,"TERMINAL",0,0,"Step 1793, loss: 0.6576482653617859, step time: 17.711162567138672ms\r\n",,terminal_output +16188,13499034,"TERMINAL",0,0,"Step 1794, loss: 0.5812981724739075, step time: 17.142057418823242ms\r\n",,terminal_output +16189,13499141,"TERMINAL",0,0,"Step 1795, loss: 0.6196104288101196, step time: 17.30203628540039ms\r\nStep 1796, loss: 0.6766104102134705, step time: 17.728090286254883ms\r\n",,terminal_output +16190,13499208,"TERMINAL",0,0,"Step 1797, loss: 0.6317276358604431, step time: 17.14348793029785ms\r\n",,terminal_output +16191,13499268,"TERMINAL",0,0,"Step 1798, loss: 0.745221734046936, step time: 17.292261123657227ms\r\n",,terminal_output +16192,13499331,"TERMINAL",0,0,"Step 1799, loss: 0.623579740524292, step time: 17.66657829284668ms\r\n",,terminal_output +16193,13499392,"TERMINAL",0,0,"Step 1800, loss: 0.8137320280075073, step time: 17.078399658203125ms\r\n",,terminal_output +16194,13499459,"TERMINAL",0,0,"Step 1801, loss: 0.9268092513084412, step time: 18.045902252197266ms\r\n",,terminal_output +16195,13499520,"TERMINAL",0,0,"Step 1802, loss: 0.4817691147327423, step time: 18.092870712280273ms\r\n",,terminal_output +16196,13499582,"TERMINAL",0,0,"Step 1803, loss: 1.2315565347671509, step time: 18.517255783081055ms\r\n",,terminal_output +16197,13499645,"TERMINAL",0,0,"Step 1804, loss: 0.45166367292404175, step time: 17.50493049621582ms\r\n",,terminal_output +16198,13499707,"TERMINAL",0,0,"Step 1805, loss: 0.7134393453598022, step time: 17.6236629486084ms\r\n",,terminal_output +16199,13499769,"TERMINAL",0,0,"Step 1806, loss: 0.5772085785865784, step time: 17.186641693115234ms\r\n",,terminal_output +16200,13499829,"TERMINAL",0,0,"Step 1807, loss: 0.6613762378692627, step time: 17.268896102905273ms\r\n",,terminal_output +16201,13499890,"TERMINAL",0,0,"Step 1808, loss: 0.46968719363212585, step time: 17.611980438232422ms\r\n",,terminal_output +16202,13499953,"TERMINAL",0,0,"Step 1809, loss: 0.7332410216331482, step time: 17.26818084716797ms\r\n",,terminal_output +16203,13500018,"TERMINAL",0,0,"Step 1810, loss: 0.6058740019798279, step time: 17.437458038330078ms\r\n",,terminal_output +16204,13500139,"TERMINAL",0,0,"Step 1811, loss: 0.4478296637535095, step time: 17.812252044677734ms\r\n",,terminal_output +16205,13500144,"TERMINAL",0,0,"Step 1812, loss: 0.6096716523170471, step time: 17.141103744506836ms\r\n",,terminal_output +16206,13500206,"TERMINAL",0,0,"Step 1813, loss: 0.403492271900177, step time: 17.208099365234375ms\r\n",,terminal_output +16207,13500353,"TERMINAL",0,0,"Step 1814, loss: 0.6404091119766235, step time: 17.698049545288086ms\r\nStep 1815, loss: 0.46752050518989563, step time: 17.08674430847168ms\r\n",,terminal_output +16208,13500400,"TERMINAL",0,0,"Step 1816, loss: 0.9188406467437744, step time: 19.713401794433594ms\r\n",,terminal_output +16209,13500458,"TERMINAL",0,0,"Step 1817, loss: 0.8705322742462158, step time: 18.66602897644043ms\r\n",,terminal_output +16210,13500562,"TERMINAL",0,0,"Step 1818, loss: 0.769799530506134, step time: 17.151355743408203ms\r\n",,terminal_output +16211,13500613,"TERMINAL",0,0,"Step 1819, loss: 0.5874884724617004, step time: 17.264127731323242ms\r\n",,terminal_output +16212,13500720,"TERMINAL",0,0,"Step 1820, loss: 0.5699717998504639, step time: 17.695903778076172ms\r\nStep 1821, loss: 0.7116115689277649, step time: 17.20452308654785ms\r\n",,terminal_output +16213,13500786,"TERMINAL",0,0,"Step 1822, loss: 0.6287060976028442, step time: 17.270565032958984ms\r\n",,terminal_output +16214,13500848,"TERMINAL",0,0,"Step 1823, loss: 0.8588234186172485, step time: 17.877578735351562ms\r\n",,terminal_output +16215,13500915,"TERMINAL",0,0,"Step 1824, loss: 0.9302721619606018, step time: 17.307758331298828ms\r\n",,terminal_output +16216,13500979,"TERMINAL",0,0,"Step 1825, loss: 1.0094314813613892, step time: 17.516136169433594ms\r\n",,terminal_output +16217,13501043,"TERMINAL",0,0,"Step 1826, loss: 0.793140709400177, step time: 17.729997634887695ms\r\n",,terminal_output +16218,13501107,"TERMINAL",0,0,"Step 1827, loss: 0.5831853151321411, step time: 17.4560546875ms\r\n",,terminal_output +16219,13501169,"TERMINAL",0,0,"Step 1828, loss: 0.40465888381004333, step time: 17.581939697265625ms\r\n",,terminal_output +16220,13501233,"TERMINAL",0,0,"Step 1829, loss: 0.7219067811965942, step time: 17.839908599853516ms\r\n",,terminal_output +16221,13501295,"TERMINAL",0,0,"Step 1830, loss: 0.9274840950965881, step time: 17.21501350402832ms\r\n",,terminal_output +16222,13501359,"TERMINAL",0,0,"Step 1831, loss: 0.5878835320472717, step time: 31.032323837280273ms\r\n",,terminal_output +16223,13501423,"TERMINAL",0,0,"Step 1832, loss: 0.4885282516479492, step time: 20.445823669433594ms\r\n",,terminal_output +16224,13501486,"TERMINAL",0,0,"Step 1833, loss: 0.5241203904151917, step time: 17.386674880981445ms\r\n",,terminal_output +16225,13501551,"TERMINAL",0,0,"Step 1834, loss: 0.986041247844696, step time: 17.639636993408203ms\r\n",,terminal_output +16226,13501614,"TERMINAL",0,0,"Step 1835, loss: 0.6978299617767334, step time: 17.79484748840332ms\r\n",,terminal_output +16227,13501687,"TERMINAL",0,0,"Step 1836, loss: 0.7203549146652222, step time: 17.224550247192383ms\r\n",,terminal_output +16228,13501727,"TERMINAL",0,0,"Step 1837, loss: 1.1439169645309448, step time: 17.50349998474121ms\r\n",,terminal_output +16229,13501790,"TERMINAL",0,0,"Step 1838, loss: 0.7308967709541321, step time: 17.565011978149414ms\r\n",,terminal_output +16230,13501854,"TERMINAL",0,0,"Step 1839, loss: 0.7288815975189209, step time: 17.409086227416992ms\r\n",,terminal_output +16231,13501918,"TERMINAL",0,0,"Step 1840, loss: 0.49539902806282043, step time: 17.407894134521484ms\r\n",,terminal_output +16232,13501982,"TERMINAL",0,0,"Step 1841, loss: 1.6689211130142212, step time: 17.852306365966797ms\r\n",,terminal_output +16233,13502045,"TERMINAL",0,0,"Step 1842, loss: 0.5810508131980896, step time: 17.284631729125977ms\r\n",,terminal_output +16234,13502112,"TERMINAL",0,0,"Step 1843, loss: 0.4904039204120636, step time: 17.60125160217285ms\r\n",,terminal_output +16235,13502185,"TERMINAL",0,0,"Step 1844, loss: 0.9218800663948059, step time: 17.888307571411133ms\r\n",,terminal_output +16236,13502240,"TERMINAL",0,0,"Step 1845, loss: 0.5499873757362366, step time: 17.138242721557617ms\r\n",,terminal_output +16237,13502353,"TERMINAL",0,0,"Step 1846, loss: 0.4996955096721649, step time: 17.330169677734375ms\r\nStep 1847, loss: 0.43589887022972107, step time: 19.20604705810547ms\r\n",,terminal_output +16238,13502422,"TERMINAL",0,0,"Step 1848, loss: 0.6006234288215637, step time: 17.342567443847656ms\r\n",,terminal_output +16239,13502490,"TERMINAL",0,0,"Step 1849, loss: 0.8842418193817139, step time: 17.67277717590332ms\r\n",,terminal_output +16240,13502558,"TERMINAL",0,0,"Step 1850, loss: 0.7605680227279663, step time: 17.924785614013672ms\r\n",,terminal_output +16241,13502648,"TERMINAL",0,0,"Step 1851, loss: 0.5234324336051941, step time: 17.3184871673584ms\r\n",,terminal_output +16242,13502699,"TERMINAL",0,0,"Step 1852, loss: 0.5227451324462891, step time: 17.23647117614746ms\r\n",,terminal_output +16243,13502805,"TERMINAL",0,0,"Step 1853, loss: 0.6494211554527283, step time: 17.855167388916016ms\r\nStep 1854, loss: 0.4797573685646057, step time: 17.369747161865234ms\r\n",,terminal_output +16244,13502899,"TERMINAL",0,0,"Step 1855, loss: 0.7728223204612732, step time: 17.644405364990234ms\r\n",,terminal_output +16245,13502949,"TERMINAL",0,0,"Step 1856, loss: 0.5696260929107666, step time: 17.78244972229004ms\r\n",,terminal_output +16246,13503055,"TERMINAL",0,0,"Step 1857, loss: 0.654244065284729, step time: 17.34137535095215ms\r\nStep 1858, loss: 0.4675481915473938, step time: 17.72761344909668ms\r\n",,terminal_output +16247,13503171,"TERMINAL",0,0,"Step 1859, loss: 1.078255295753479, step time: 17.7762508392334ms\r\nStep 1860, loss: 0.8333162069320679, step time: 17.55356788635254ms\r\n",,terminal_output +16248,13503235,"TERMINAL",0,0,"Step 1861, loss: 0.8799046874046326, step time: 17.54307746887207ms\r\n",,terminal_output +16249,13503338,"TERMINAL",0,0,"Step 1862, loss: 0.551651656627655, step time: 17.646312713623047ms\r\n",,terminal_output +16250,13503390,"TERMINAL",0,0,"Step 1863, loss: 0.6131778955459595, step time: 17.247676849365234ms\r\n",,terminal_output +16251,13503494,"TERMINAL",0,0,"Step 1864, loss: 1.2393702268600464, step time: 17.50636100769043ms\r\nStep 1865, loss: 0.6661881804466248, step time: 17.773151397705078ms\r\n",,terminal_output +16252,13503557,"TERMINAL",0,0,"Step 1866, loss: 0.6102824211120605, step time: 17.532825469970703ms\r\n",,terminal_output +16253,13503622,"TERMINAL",0,0,"Step 1867, loss: 0.5117310881614685, step time: 17.404556274414062ms\r\n",,terminal_output +16254,13503674,"TERMINAL",0,0,"Step 1868, loss: 0.53471440076828, step time: 17.845869064331055ms\r\n",,terminal_output +16255,13503737,"TERMINAL",0,0,"Step 1869, loss: 1.5638870000839233, step time: 17.255544662475586ms\r\n",,terminal_output +16256,13503838,"TERMINAL",0,0,"Step 1870, loss: 0.6437984704971313, step time: 17.350435256958008ms\r\n",,terminal_output +16257,13503898,"TERMINAL",0,0,"Step 1871, loss: 0.469163179397583, step time: 17.804622650146484ms\r\n",,terminal_output +16258,13503959,"TERMINAL",0,0,"Step 1872, loss: 0.5533799529075623, step time: 17.294645309448242ms\r\n",,terminal_output +16259,13504020,"TERMINAL",0,0,"Step 1873, loss: 0.5127090215682983, step time: 17.641544342041016ms\r\n",,terminal_output +16260,13504088,"TERMINAL",0,0,"Step 1874, loss: 0.6295731663703918, step time: 17.473697662353516ms\r\n",,terminal_output +16261,13504138,"TERMINAL",0,0,"Step 1875, loss: 0.5469538569450378, step time: 17.322540283203125ms\r\n",,terminal_output +16262,13504201,"TERMINAL",0,0,"Step 1876, loss: 0.43029502034187317, step time: 17.247676849365234ms\r\n",,terminal_output +16263,13504265,"TERMINAL",0,0,"Step 1877, loss: 1.0517009496688843, step time: 17.963409423828125ms\r\n",,terminal_output +16264,13504373,"TERMINAL",0,0,"Step 1878, loss: 0.8942021131515503, step time: 17.412185668945312ms\r\nStep 1879, loss: 0.6104525327682495, step time: 17.42386817932129ms\r\n",,terminal_output +16265,13504431,"TERMINAL",0,0,"Step 1880, loss: 0.5070714354515076, step time: 17.902612686157227ms\r\n",,terminal_output +16266,13504524,"TERMINAL",0,0,"Step 1881, loss: 0.48713213205337524, step time: 17.388105392456055ms\r\n",,terminal_output +16267,13504575,"TERMINAL",0,0,"Step 1882, loss: 0.6455796360969543, step time: 17.294645309448242ms\r\n",,terminal_output +16268,13504669,"TERMINAL",0,0,"Step 1883, loss: 0.49160248041152954, step time: 17.84682273864746ms\r\n",,terminal_output +16269,13504719,"TERMINAL",0,0,"Step 1884, loss: 0.45843759179115295, step time: 16.98923110961914ms\r\n",,terminal_output +16270,13504771,"TERMINAL",0,0,"Step 1885, loss: 0.4218675196170807, step time: 17.239093780517578ms\r\n",,terminal_output +16271,13504876,"TERMINAL",0,0,"Step 1886, loss: 0.778265655040741, step time: 17.65584945678711ms\r\nStep 1887, loss: 0.4738801121711731, step time: 17.046689987182617ms\r\n",,terminal_output +16272,13504971,"TERMINAL",0,0,"Step 1888, loss: 0.6523844599723816, step time: 17.325639724731445ms\r\n",,terminal_output +16273,13505024,"TERMINAL",0,0,"Step 1889, loss: 0.7949831485748291, step time: 17.96698570251465ms\r\n",,terminal_output +16274,13505130,"TERMINAL",0,0,"Step 1890, loss: 0.6185640096664429, step time: 17.159461975097656ms\r\nStep 1891, loss: 0.7675100564956665, step time: 17.4100399017334ms\r\n",,terminal_output +16275,13505470,"TERMINAL",0,0,"Step 1892, loss: 0.5774645805358887, step time: 344.5143699645996ms\r\n",,terminal_output +16276,13505576,"TERMINAL",0,0,"Step 1893, loss: 0.6742174625396729, step time: 25.000333786010742ms\r\n",,terminal_output +16277,13505637,"TERMINAL",0,0,"Step 1894, loss: 0.5120471119880676, step time: 20.06816864013672ms\r\n",,terminal_output +16278,13505698,"TERMINAL",0,0,"Step 1895, loss: 0.8890476226806641, step time: 18.857717514038086ms\r\n",,terminal_output +16279,13505761,"TERMINAL",0,0,"Step 1896, loss: 0.3964972198009491, step time: 17.74001121520996ms\r\n",,terminal_output +16280,13505824,"TERMINAL",0,0,"Step 1897, loss: 0.4290817379951477, step time: 17.661333084106445ms\r\n",,terminal_output +16281,13505887,"TERMINAL",0,0,"Step 1898, loss: 0.6709446310997009, step time: 17.812013626098633ms\r\n",,terminal_output +16282,13505951,"TERMINAL",0,0,"Step 1899, loss: 0.48494863510131836, step time: 17.467737197875977ms\r\n",,terminal_output +16283,13506014,"TERMINAL",0,0,"Step 1900, loss: 0.3907568156719208, step time: 17.536640167236328ms\r\n",,terminal_output +16284,13506085,"TERMINAL",0,0,"Step 1901, loss: 0.5763824582099915, step time: 17.917156219482422ms\r\n",,terminal_output +16285,13506138,"TERMINAL",0,0,"Step 1902, loss: 0.42007261514663696, step time: 17.200231552124023ms\r\n",,terminal_output +16286,13506204,"TERMINAL",0,0,"Step 1903, loss: 0.7167828679084778, step time: 17.653226852416992ms\r\n",,terminal_output +16287,13506264,"TERMINAL",0,0,"Step 1904, loss: 0.668205738067627, step time: 18.022775650024414ms\r\n",,terminal_output +16288,13506327,"TERMINAL",0,0,"Step 1905, loss: 0.4738105535507202, step time: 17.33112335205078ms\r\n",,terminal_output +16289,13506389,"TERMINAL",0,0,"Step 1906, loss: 0.7323951721191406, step time: 17.586469650268555ms\r\n",,terminal_output +16290,13506513,"TERMINAL",0,0,"Step 1907, loss: 0.5454718470573425, step time: 17.939090728759766ms\r\n",,terminal_output +16291,13506573,"TERMINAL",0,0,"Step 1908, loss: 0.5546355843544006, step time: 17.195701599121094ms\r\nStep 1909, loss: 0.4400474429130554, step time: 17.425537109375ms\r\n",,terminal_output +16292,13506637,"TERMINAL",0,0,"Step 1910, loss: 0.5656319260597229, step time: 17.90785789489746ms\r\n",,terminal_output +16293,13506697,"TERMINAL",0,0,"Step 1911, loss: 1.334846019744873, step time: 17.267465591430664ms\r\n",,terminal_output +16294,13506758,"TERMINAL",0,0,"Step 1912, loss: 0.6816387176513672, step time: 17.4105167388916ms\r\n",,terminal_output +16295,13506864,"TERMINAL",0,0,"Step 1913, loss: 0.5272793769836426, step time: 17.93694496154785ms\r\nStep 1914, loss: 0.9379263520240784, step time: 17.19069480895996ms\r\n",,terminal_output +16296,13506964,"TERMINAL",0,0,"Step 1915, loss: 1.7370504140853882, step time: 17.319917678833008ms\r\n",,terminal_output +16297,13507017,"TERMINAL",0,0,"Step 1916, loss: 0.6556528806686401, step time: 17.808914184570312ms\r\n",,terminal_output +16298,13507124,"TERMINAL",0,0,"Step 1917, loss: 0.3619794547557831, step time: 17.136096954345703ms\r\nStep 1918, loss: 0.38304251432418823, step time: 17.344951629638672ms\r\n",,terminal_output +16299,13507190,"TERMINAL",0,0,"Step 1919, loss: 0.43646880984306335, step time: 17.773866653442383ms\r\n",,terminal_output +16300,13507252,"TERMINAL",0,0,"Step 1920, loss: 0.41214513778686523, step time: 17.095327377319336ms\r\n",,terminal_output +16301,13507315,"TERMINAL",0,0,"Step 1921, loss: 0.4772522747516632, step time: 18.178224563598633ms\r\n",,terminal_output +16302,13507377,"TERMINAL",0,0,"Step 1922, loss: 0.8454111814498901, step time: 17.70758628845215ms\r\n",,terminal_output +16303,13507438,"TERMINAL",0,0,"Step 1923, loss: 0.7339538931846619, step time: 17.424583435058594ms\r\n",,terminal_output +16304,13507500,"TERMINAL",0,0,"Step 1924, loss: 0.7735539674758911, step time: 17.42076873779297ms\r\n",,terminal_output +16305,13507562,"TERMINAL",0,0,"Step 1925, loss: 0.5668778419494629, step time: 17.900943756103516ms\r\n",,terminal_output +16306,13507628,"TERMINAL",0,0,"Step 1926, loss: 0.512348473072052, step time: 17.278194427490234ms\r\n",,terminal_output +16307,13507689,"TERMINAL",0,0,"Step 1927, loss: 0.6432813405990601, step time: 17.551898956298828ms\r\n",,terminal_output +16308,13507750,"TERMINAL",0,0,"Step 1928, loss: 0.8272056579589844, step time: 17.726898193359375ms\r\n",,terminal_output +16309,13507811,"TERMINAL",0,0,"Step 1929, loss: 0.5333925485610962, step time: 17.25482940673828ms\r\n",,terminal_output +16310,13507881,"TERMINAL",0,0,"Step 1930, loss: 0.46133187413215637, step time: 17.542123794555664ms\r\n",,terminal_output +16311,13507942,"TERMINAL",0,0,"Step 1931, loss: 0.7990086674690247, step time: 18.143177032470703ms\r\n",,terminal_output +16312,13508036,"TERMINAL",0,0,"Step 1932, loss: 0.8927668333053589, step time: 17.242431640625ms\r\n",,terminal_output +16313,13508089,"TERMINAL",0,0,"Step 1933, loss: 0.9883179068565369, step time: 17.377138137817383ms\r\n",,terminal_output +16314,13508142,"TERMINAL",0,0,"Step 1934, loss: 0.44205835461616516, step time: 17.871618270874023ms\r\n",,terminal_output +16315,13508236,"TERMINAL",0,0,"Step 1935, loss: 0.3906691372394562, step time: 17.02570915222168ms\r\n",,terminal_output +16316,13508289,"TERMINAL",0,0,"Step 1936, loss: 0.49043694138526917, step time: 18.354415893554688ms\r\n",,terminal_output +16317,13508396,"TERMINAL",0,0,"Step 1937, loss: 0.39458292722702026, step time: 18.917322158813477ms\r\nStep 1938, loss: 0.4153454899787903, step time: 17.598867416381836ms\r\n",,terminal_output +16318,13508459,"TERMINAL",0,0,"Step 1939, loss: 0.39435452222824097, step time: 17.532825469970703ms\r\n",,terminal_output +16319,13508521,"TERMINAL",0,0,"Step 1940, loss: 0.678325891494751, step time: 17.87734031677246ms\r\n",,terminal_output +16320,13508595,"TERMINAL",0,0,"Step 1941, loss: 0.4808194935321808, step time: 17.181873321533203ms\r\n",,terminal_output +16321,13508665,"TERMINAL",0,0,"Step 1942, loss: 0.44771379232406616, step time: 17.454862594604492ms\r\n",,terminal_output +16322,13508704,"TERMINAL",0,0,"Step 1943, loss: 0.6247037649154663, step time: 18.118858337402344ms\r\n",,terminal_output +16323,13508835,"TERMINAL",0,0,"Step 1944, loss: 0.45663705468177795, step time: 17.024517059326172ms\r\nStep 1945, loss: 0.4209759831428528, step time: 17.477989196777344ms\r\n",,terminal_output +16324,13508960,"TERMINAL",0,0,"Step 1946, loss: 0.5442571043968201, step time: 17.65584945678711ms\r\nStep 1947, loss: 0.535047709941864, step time: 17.32158660888672ms\r\n",,terminal_output +16325,13509024,"TERMINAL",0,0,"Step 1948, loss: 0.5497475862503052, step time: 17.431020736694336ms\r\n",,terminal_output +16326,13509098,"TERMINAL",0,0,"Step 1949, loss: 0.8432881832122803, step time: 17.871379852294922ms\r\n",,terminal_output +16327,13509152,"TERMINAL",0,0,"Step 1950, loss: 0.6950938105583191, step time: 17.23456382751465ms\r\n",,terminal_output +16328,13509248,"TERMINAL",0,0,"Step 1951, loss: 0.3889223635196686, step time: 17.225027084350586ms\r\n",,terminal_output +16329,13509316,"TERMINAL",0,0,"Step 1952, loss: 0.44714072346687317, step time: 17.719030380249023ms\r\n",,terminal_output +16330,13509374,"TERMINAL",0,0,"Step 1953, loss: 0.440634548664093, step time: 17.760753631591797ms\r\n",,terminal_output +16331,13509482,"TERMINAL",0,0,"Step 1954, loss: 0.5654312372207642, step time: 17.29440689086914ms\r\nStep 1955, loss: 0.37799620628356934, step time: 17.61770248413086ms\r\n",,terminal_output +16332,13509546,"TERMINAL",0,0,"Step 1956, loss: 1.1046594381332397, step time: 17.045021057128906ms\r\n",,terminal_output +16333,13509612,"TERMINAL",0,0,"Step 1957, loss: 0.7046963572502136, step time: 17.485380172729492ms\r\n",,terminal_output +16334,13509677,"TERMINAL",0,0,"Step 1958, loss: 0.5393818020820618, step time: 17.63129234313965ms\r\n",,terminal_output +16335,13509735,"TERMINAL",0,0,"Step 1959, loss: 0.44753995537757874, step time: 17.058134078979492ms\r\n",,terminal_output +16336,13509799,"TERMINAL",0,0,"Step 1960, loss: 0.4767264127731323, step time: 17.508268356323242ms\r\n",,terminal_output +16337,13509863,"TERMINAL",0,0,"Step 1961, loss: 1.739213228225708, step time: 17.82846450805664ms\r\n",,terminal_output +16338,13509927,"TERMINAL",0,0,"Step 1962, loss: 0.8265509009361267, step time: 17.167091369628906ms\r\n",,terminal_output +16339,13509993,"TERMINAL",0,0,"Step 1963, loss: 0.926501452922821, step time: 17.4257755279541ms\r\n",,terminal_output +16340,13510056,"TERMINAL",0,0,"Step 1964, loss: 0.5794516205787659, step time: 17.621517181396484ms\r\n",,terminal_output +16341,13510167,"TERMINAL",0,0,"Step 1965, loss: 0.424121230840683, step time: 17.11297035217285ms\r\nStep 1966, loss: 0.41260606050491333, step time: 17.156600952148438ms\r\n",,terminal_output +16342,13510264,"TERMINAL",0,0,"Step 1967, loss: 0.35325053334236145, step time: 17.86041259765625ms\r\n",,terminal_output +16343,13510318,"TERMINAL",0,0,"Step 1968, loss: 0.5799536108970642, step time: 17.050981521606445ms\r\n",,terminal_output +16344,13510425,"TERMINAL",0,0,"Step 1969, loss: 0.5708839893341064, step time: 17.98725128173828ms\r\nStep 1970, loss: 0.3613331913948059, step time: 17.849445343017578ms\r\n",,terminal_output +16345,13510522,"TERMINAL",0,0,"Step 1971, loss: 0.41843608021736145, step time: 17.08364486694336ms\r\n",,terminal_output +16346,13510585,"TERMINAL",0,0,"Step 1972, loss: 0.4066825211048126, step time: 17.269134521484375ms\r\n",,terminal_output +16347,13510699,"TERMINAL",0,0,"Step 1973, loss: 0.43100646138191223, step time: 18.04327964782715ms\r\nStep 1974, loss: 0.36638784408569336, step time: 17.20285415649414ms\r\n",,terminal_output +16348,13510809,"TERMINAL",0,0,"Step 1975, loss: 1.9212313890457153, step time: 17.250776290893555ms\r\nStep 1976, loss: 0.39265722036361694, step time: 17.638206481933594ms\r\n",,terminal_output +16349,13510876,"TERMINAL",0,0,"Step 1977, loss: 2.0529282093048096, step time: 17.06671714782715ms\r\n",,terminal_output +16350,13510940,"TERMINAL",0,0,"Step 1978, loss: 0.8510561585426331, step time: 17.28057861328125ms\r\n",,terminal_output +16351,13511005,"TERMINAL",0,0,"Step 1979, loss: 0.4453509747982025, step time: 17.731904983520508ms\r\n",,terminal_output +16352,13511069,"TERMINAL",0,0,"Step 1980, loss: 0.8569568991661072, step time: 17.068862915039062ms\r\n",,terminal_output +16353,13511135,"TERMINAL",0,0,"Step 1981, loss: 0.9537602663040161, step time: 22.345781326293945ms\r\n",,terminal_output +16354,13511186,"TERMINAL",0,0,"Step 1982, loss: 0.7147477865219116, step time: 19.59848403930664ms\r\n",,terminal_output +16355,13511281,"TERMINAL",0,0,"Step 1983, loss: 0.5280134677886963, step time: 17.88783073425293ms\r\n",,terminal_output +16356,13511333,"TERMINAL",0,0,"Step 1984, loss: 0.4826514720916748, step time: 17.68970489501953ms\r\n",,terminal_output +16357,13511440,"TERMINAL",0,0,"Step 1985, loss: 0.7422817945480347, step time: 18.140316009521484ms\r\nStep 1986, loss: 0.5797393321990967, step time: 17.364501953125ms\r\n",,terminal_output +16358,13511535,"TERMINAL",0,0,"Step 1987, loss: 0.6813188791275024, step time: 19.754886627197266ms\r\n",,terminal_output +16359,13511643,"TERMINAL",0,0,"Step 1988, loss: 0.780269980430603, step time: 17.9293155670166ms\r\nStep 1989, loss: 0.39508453011512756, step time: 17.22431182861328ms\r\n",,terminal_output +16360,13511707,"TERMINAL",0,0,"Step 1990, loss: 0.38736802339553833, step time: 17.62080192565918ms\r\n",,terminal_output +16361,13511770,"TERMINAL",0,0,"Step 1991, loss: 0.3814454674720764, step time: 17.786741256713867ms\r\n",,terminal_output +16362,13511835,"TERMINAL",0,0,"Step 1992, loss: 0.4956248700618744, step time: 17.298460006713867ms\r\n",,terminal_output +16363,13511901,"TERMINAL",0,0,"Step 1993, loss: 0.364808589220047, step time: 17.30632781982422ms\r\n",,terminal_output +16364,13511965,"TERMINAL",0,0,"Step 1994, loss: 0.6926953792572021, step time: 17.859458923339844ms\r\n",,terminal_output +16365,13512029,"TERMINAL",0,0,"Step 1995, loss: 0.481611967086792, step time: 18.04971694946289ms\r\n",,terminal_output +16366,13512132,"TERMINAL",0,0,"Step 1996, loss: 1.1966665983200073, step time: 21.120309829711914ms\r\nStep 1997, loss: 0.3637525141239166, step time: 19.253969192504883ms\r\n",,terminal_output +16367,13512230,"TERMINAL",0,0,"Step 1998, loss: 0.4071165919303894, step time: 17.74311065673828ms\r\n",,terminal_output +16368,13512294,"TERMINAL",0,0,"Step 1999, loss: 0.391543984413147, step time: 17.663955688476562ms\r\n",,terminal_output +16369,13515066,"TERMINAL",0,0,"Step 2000, loss: 1.6074433326721191, step time: 35.27235984802246ms\r\n",,terminal_output +16370,13515143,"TERMINAL",0,0,"Step 2001, loss: 0.8111899495124817, step time: 26.086091995239258ms\r\n",,terminal_output +16371,13515206,"TERMINAL",0,0,"Step 2002, loss: 0.5891009569168091, step time: 20.419597625732422ms\r\n",,terminal_output +16372,13515272,"TERMINAL",0,0,"Step 2003, loss: 1.0628997087478638, step time: 19.077301025390625ms\r\n",,terminal_output +16373,13515335,"TERMINAL",0,0,"Step 2004, loss: 0.5677580833435059, step time: 18.32270622253418ms\r\n",,terminal_output +16374,13515401,"TERMINAL",0,0,"Step 2005, loss: 0.4380817711353302, step time: 18.76068115234375ms\r\n",,terminal_output +16375,13515531,"TERMINAL",0,0,"Step 2006, loss: 0.3594041168689728, step time: 18.06497573852539ms\r\nStep 2007, loss: 0.5519790053367615, step time: 18.182992935180664ms\r\n",,terminal_output +16376,13515630,"TERMINAL",0,0,"Step 2008, loss: 0.8316953182220459, step time: 18.294811248779297ms\r\n",,terminal_output +16377,13515682,"TERMINAL",0,0,"Step 2009, loss: 0.4146089255809784, step time: 18.009185791015625ms\r\n",,terminal_output +16378,13515792,"TERMINAL",0,0,"Step 2010, loss: 0.4102868139743805, step time: 17.843246459960938ms\r\nStep 2011, loss: 0.43389254808425903, step time: 18.337249755859375ms\r\n",,terminal_output +16379,13515854,"TERMINAL",0,0,"Step 2012, loss: 0.34896668791770935, step time: 17.90165901184082ms\r\n",,terminal_output +16380,13515917,"TERMINAL",0,0,"Step 2013, loss: 0.6614764928817749, step time: 19.532203674316406ms\r\n",,terminal_output +16381,13515980,"TERMINAL",0,0,"Step 2014, loss: 0.6913071870803833, step time: 18.449068069458008ms\r\n",,terminal_output +16382,13516044,"TERMINAL",0,0,"Step 2015, loss: 0.5744280815124512, step time: 17.989635467529297ms\r\n",,terminal_output +16383,13516112,"TERMINAL",0,0,"Step 2016, loss: 0.4652135968208313, step time: 17.77505874633789ms\r\n",,terminal_output +16384,13516164,"TERMINAL",0,0,"Step 2017, loss: 0.8844683766365051, step time: 18.434762954711914ms\r\n",,terminal_output +16385,13516235,"TERMINAL",0,0,"Step 2018, loss: 0.687136173248291, step time: 18.005847930908203ms\r\n",,terminal_output +16386,13516299,"TERMINAL",0,0,"Step 2019, loss: 0.39415445923805237, step time: 17.92597770690918ms\r\n",,terminal_output +16387,13516362,"TERMINAL",0,0,"Step 2020, loss: 0.5438243746757507, step time: 18.053054809570312ms\r\n",,terminal_output +16388,13516427,"TERMINAL",0,0,"Step 2021, loss: 0.5317552089691162, step time: 17.957687377929688ms\r\n",,terminal_output +16389,13516487,"TERMINAL",0,0,"Step 2022, loss: 0.7076389789581299, step time: 17.574787139892578ms\r\n",,terminal_output +16390,13516547,"TERMINAL",0,0,"Step 2023, loss: 0.6346760392189026, step time: 18.383026123046875ms\r\n",,terminal_output +16391,13516613,"TERMINAL",0,0,"Step 2024, loss: 0.9137428998947144, step time: 17.680883407592773ms\r\n",,terminal_output +16392,13516682,"TERMINAL",0,0,"Step 2025, loss: 1.3970452547073364, step time: 17.72141456604004ms\r\n",,terminal_output +16393,13516740,"TERMINAL",0,0,"Step 2026, loss: 0.4774758219718933, step time: 18.164873123168945ms\r\n",,terminal_output +16394,13516808,"TERMINAL",0,0,"Step 2027, loss: 1.0918766260147095, step time: 17.787933349609375ms\r\n",,terminal_output +16395,13516869,"TERMINAL",0,0,"Step 2028, loss: 0.9947903156280518, step time: 17.76599884033203ms\r\n",,terminal_output +16396,13516961,"TERMINAL",0,0,"Step 2029, loss: 0.4192631244659424, step time: 18.387556076049805ms\r\n",,terminal_output +16397,13517012,"TERMINAL",0,0,"Step 2030, loss: 0.44983720779418945, step time: 17.762422561645508ms\r\n",,terminal_output +16398,13517128,"TERMINAL",0,0,"Step 2031, loss: 0.8872948884963989, step time: 17.761707305908203ms\r\nStep 2032, loss: 0.5861594676971436, step time: 18.157958984375ms\r\n",,terminal_output +16399,13517190,"TERMINAL",0,0,"Step 2033, loss: 0.34489691257476807, step time: 17.85874366760254ms\r\n",,terminal_output +16400,13517286,"TERMINAL",0,0,"Step 2034, loss: 0.700538694858551, step time: 17.68016815185547ms\r\n",,terminal_output +16401,13517338,"TERMINAL",0,0,"Step 2035, loss: 0.33879321813583374, step time: 18.015623092651367ms\r\n",,terminal_output +16402,13517447,"TERMINAL",0,0,"Step 2036, loss: 0.3041006624698639, step time: 17.795085906982422ms\r\nStep 2037, loss: 1.0880756378173828, step time: 17.77029037475586ms\r\n",,terminal_output +16403,13517582,"TERMINAL",0,0,"Step 2038, loss: 0.5646641850471497, step time: 17.890214920043945ms\r\nStep 2039, loss: 1.148948073387146, step time: 17.8830623626709ms\r\n",,terminal_output +16404,13517641,"TERMINAL",0,0,"Step 2040, loss: 0.4253295958042145, step time: 17.618417739868164ms\r\n",,terminal_output +16405,13517735,"TERMINAL",0,0,"Step 2041, loss: 0.3772103488445282, step time: 18.23139190673828ms\r\n",,terminal_output +16406,13517791,"TERMINAL",0,0,"Step 2042, loss: 0.72015780210495, step time: 17.71855354309082ms\r\n",,terminal_output +16407,13517896,"TERMINAL",0,0,"Step 2043, loss: 0.35264554619789124, step time: 18.041610717773438ms\r\nStep 2044, loss: 0.3078494668006897, step time: 18.372297286987305ms\r\n",,terminal_output +16408,13517969,"TERMINAL",0,0,"Step 2045, loss: 0.5618690848350525, step time: 17.773151397705078ms\r\n",,terminal_output +16409,13518021,"TERMINAL",0,0,"Step 2046, loss: 0.3058488667011261, step time: 18.59307289123535ms\r\n",,terminal_output +16410,13518115,"TERMINAL",0,0,"Step 2047, loss: 0.45888644456863403, step time: 18.49198341369629ms\r\n",,terminal_output +16411,13518166,"TERMINAL",0,0,"Step 2048, loss: 0.531501293182373, step time: 17.75383949279785ms\r\n",,terminal_output +16412,13518259,"TERMINAL",0,0,"Step 2049, loss: 0.43989577889442444, step time: 17.620563507080078ms\r\n",,terminal_output +16413,13518418,"TERMINAL",0,0,"Step 2050, loss: 0.3721370995044708, step time: 18.13030242919922ms\r\n",,terminal_output +16414,13518467,"TERMINAL",0,0,"Step 2051, loss: 0.3411455452442169, step time: 18.2039737701416ms\r\nStep 2052, loss: 0.5160686373710632, step time: 17.69566535949707ms\r\nStep 2053, loss: 0.981572151184082, step time: 17.898082733154297ms\r\n",,terminal_output +16415,13518567,"TERMINAL",0,0,"Step 2054, loss: 0.7030844688415527, step time: 17.711162567138672ms\r\n",,terminal_output +16416,13518661,"TERMINAL",0,0,"Step 2055, loss: 0.3830394744873047, step time: 17.724037170410156ms\r\nStep 2056, loss: 0.3721580505371094, step time: 17.977237701416016ms\r\n",,terminal_output +16417,13518715,"TERMINAL",0,0,"Step 2057, loss: 0.5770866870880127, step time: 17.72165298461914ms\r\n",,terminal_output +16418,13518781,"TERMINAL",0,0,"Step 2058, loss: 0.47231218218803406, step time: 17.717838287353516ms\r\n",,terminal_output +16419,13518877,"TERMINAL",0,0,"Step 2059, loss: 0.3984323740005493, step time: 18.141984939575195ms\r\n",,terminal_output +16420,13518940,"TERMINAL",0,0,"Step 2060, loss: 0.3575756847858429, step time: 17.730712890625ms\r\n",,terminal_output +16421,13519002,"TERMINAL",0,0,"Step 2061, loss: 0.32951653003692627, step time: 17.899036407470703ms\r\n",,terminal_output +16422,13519106,"TERMINAL",0,0,"Step 2062, loss: 0.7854127883911133, step time: 17.972707748413086ms\r\nStep 2063, loss: 0.33679062128067017, step time: 17.383337020874023ms\r\n",,terminal_output +16423,13519186,"TERMINAL",0,0,"Step 2064, loss: 0.3928094804286957, step time: 17.60125160217285ms\r\n",,terminal_output +16424,13519240,"TERMINAL",0,0,"Step 2065, loss: 0.7974432706832886, step time: 18.259048461914062ms\r\n",,terminal_output +16425,13519333,"TERMINAL",0,0,"Step 2066, loss: 0.7303389310836792, step time: 17.40407943725586ms\r\n",,terminal_output +16426,13519390,"TERMINAL",0,0,"Step 2067, loss: 1.4987393617630005, step time: 17.8682804107666ms\r\n",,terminal_output +16427,13519449,"TERMINAL",0,0,"Step 2068, loss: 0.297181099653244, step time: 18.114089965820312ms\r\n",,terminal_output +16428,13519512,"TERMINAL",0,0,"Step 2069, loss: 0.5286634564399719, step time: 17.719507217407227ms\r\n",,terminal_output +16429,13519574,"TERMINAL",0,0,"Step 2070, loss: 0.4263046979904175, step time: 18.149852752685547ms\r\n",,terminal_output +16430,13519636,"TERMINAL",0,0,"Step 2071, loss: 0.7854613661766052, step time: 18.049240112304688ms\r\n",,terminal_output +16431,13519699,"TERMINAL",0,0,"Step 2072, loss: 1.7549880743026733, step time: 17.712116241455078ms\r\n",,terminal_output +16432,13519806,"TERMINAL",0,0,"Step 2073, loss: 0.3235563337802887, step time: 17.826318740844727ms\r\nStep 2074, loss: 0.4760124087333679, step time: 18.01776885986328ms\r\n",,terminal_output +16433,13519900,"TERMINAL",0,0,"Step 2075, loss: 0.31368494033813477, step time: 17.545223236083984ms\r\n",,terminal_output +16434,13520008,"TERMINAL",0,0,"Step 2076, loss: 0.42551085352897644, step time: 17.625808715820312ms\r\nStep 2077, loss: 0.30324745178222656, step time: 18.219470977783203ms\r\n",,terminal_output +16435,13520081,"TERMINAL",0,0,"Step 2078, loss: 0.717103123664856, step time: 17.64082908630371ms\r\n",,terminal_output +16436,13520138,"TERMINAL",0,0,"Step 2079, loss: 0.5292949676513672, step time: 17.319917678833008ms\r\n",,terminal_output +16437,13520204,"TERMINAL",0,0,"Step 2080, loss: 2.014173746109009, step time: 18.01896095275879ms\r\n",,terminal_output +16438,13520265,"TERMINAL",0,0,"Step 2081, loss: 0.3462884724140167, step time: 17.807960510253906ms\r\n",,terminal_output +16439,13520329,"TERMINAL",0,0,"Step 2082, loss: 0.5184590816497803, step time: 17.621517181396484ms\r\n",,terminal_output +16440,13520395,"TERMINAL",0,0,"Step 2083, loss: 0.7807287573814392, step time: 17.810821533203125ms\r\n",,terminal_output +16441,13520448,"TERMINAL",0,0,"Step 2084, loss: 0.5764801502227783, step time: 17.66657829284668ms\r\n",,terminal_output +16442,13520544,"TERMINAL",0,0,"Step 2085, loss: 0.39074432849884033, step time: 17.705678939819336ms\r\n",,terminal_output +16443,13520621,"TERMINAL",0,0,"Step 2086, loss: 0.27875038981437683, step time: 17.8678035736084ms\r\nStep 2087, loss: 0.417107492685318, step time: 18.946409225463867ms\r\n",,terminal_output +16444,13520683,"TERMINAL",0,0,"Step 2088, loss: 0.3846781551837921, step time: 17.830371856689453ms\r\n",,terminal_output +16445,13520777,"TERMINAL",0,0,"Step 2089, loss: 0.7174662351608276, step time: 18.162250518798828ms\r\n",,terminal_output +16446,13520829,"TERMINAL",0,0,"Step 2090, loss: 0.3030830919742584, step time: 17.70496368408203ms\r\n",,terminal_output +16447,13520923,"TERMINAL",0,0,"Step 2091, loss: 1.3365930318832397, step time: 17.65918731689453ms\r\n",,terminal_output +16448,13520976,"TERMINAL",0,0,"Step 2092, loss: 0.6344414949417114, step time: 17.984867095947266ms\r\n",,terminal_output +16449,13521029,"TERMINAL",0,0,"Step 2093, loss: 0.6116564273834229, step time: 17.676591873168945ms\r\n",,terminal_output +16450,13521145,"TERMINAL",0,0,"Step 2094, loss: 0.39344438910484314, step time: 17.56453514099121ms\r\nStep 2095, loss: 0.37378057837486267, step time: 18.161535263061523ms\r\n",,terminal_output +16451,13521210,"TERMINAL",0,0,"Step 2096, loss: 0.6466460824012756, step time: 17.70639419555664ms\r\n",,terminal_output +16452,13521272,"TERMINAL",0,0,"Step 2097, loss: 0.28243958950042725, step time: 17.84515380859375ms\r\n",,terminal_output +16453,13521634,"TERMINAL",0,0,"Step 2098, loss: 0.2992682456970215, step time: 365.0956153869629ms\r\n",,terminal_output +16454,13521698,"TERMINAL",0,0,"Step 2099, loss: 0.5778929591178894, step time: 26.041746139526367ms\r\n",,terminal_output +16455,13521765,"TERMINAL",0,0,"Step 2100, loss: 0.43871191143989563, step time: 33.76603126525879ms\r\n",,terminal_output +16456,13521874,"TERMINAL",0,0,"Step 2101, loss: 0.43858602643013, step time: 33.960580825805664ms\r\n",,terminal_output +16457,13521926,"TERMINAL",0,0,"Step 2102, loss: 0.6118518114089966, step time: 23.11992645263672ms\r\n",,terminal_output +16458,13522020,"TERMINAL",0,0,"Step 2103, loss: 0.4380248188972473, step time: 46.698570251464844ms\r\n",,terminal_output +16459,13522127,"TERMINAL",0,0,"Step 2104, loss: 0.3637695610523224, step time: 25.438308715820312ms\r\nStep 2105, loss: 0.2852281332015991, step time: 25.426387786865234ms\r\n",,terminal_output +16460,13522189,"TERMINAL",0,0,"Step 2106, loss: 0.31650274991989136, step time: 34.51037406921387ms\r\n",,terminal_output +16461,13522254,"TERMINAL",0,0,"Step 2107, loss: 0.3832203447818756, step time: 21.29983901977539ms\r\n",,terminal_output +16462,13522319,"TERMINAL",0,0,"Step 2108, loss: 0.6003914475440979, step time: 19.338607788085938ms\r\n",,terminal_output +16463,13522382,"TERMINAL",0,0,"Step 2109, loss: 1.117270827293396, step time: 18.824100494384766ms\r\n",,terminal_output +16464,13522447,"TERMINAL",0,0,"Step 2110, loss: 0.33110830187797546, step time: 18.805742263793945ms\r\n",,terminal_output +16465,13522510,"TERMINAL",0,0,"Step 2111, loss: 0.34483617544174194, step time: 18.08762550354004ms\r\n",,terminal_output +16466,13522576,"TERMINAL",0,0,"Step 2112, loss: 0.6713663935661316, step time: 18.038272857666016ms\r\n",,terminal_output +16467,13522643,"TERMINAL",0,0,"Step 2113, loss: 0.6687753796577454, step time: 18.627166748046875ms\r\n",,terminal_output +16468,13522710,"TERMINAL",0,0,"Step 2114, loss: 0.4713885188102722, step time: 17.632246017456055ms\r\n",,terminal_output +16469,13522771,"TERMINAL",0,0,"Step 2115, loss: 0.3342791795730591, step time: 17.983436584472656ms\r\n",,terminal_output +16470,13522837,"TERMINAL",0,0,"Step 2116, loss: 0.3262382745742798, step time: 18.491268157958984ms\r\n",,terminal_output +16471,13522898,"TERMINAL",0,0,"Step 2117, loss: 0.47952938079833984, step time: 18.346548080444336ms\r\n",,terminal_output +16472,13522961,"TERMINAL",0,0,"Step 2118, loss: 1.3308244943618774, step time: 18.215656280517578ms\r\n",,terminal_output +16473,13523025,"TERMINAL",0,0,"Step 2119, loss: 0.33699095249176025, step time: 18.69654655456543ms\r\n",,terminal_output +16474,13523092,"TERMINAL",0,0,"Step 2120, loss: 0.40632495284080505, step time: 18.10741424560547ms\r\n",,terminal_output +16475,13523152,"TERMINAL",0,0,"Step 2121, loss: 1.5019265413284302, step time: 17.929792404174805ms\r\n",,terminal_output +16476,13523219,"TERMINAL",0,0,"Step 2122, loss: 0.33295121788978577, step time: 18.40043067932129ms\r\n",,terminal_output +16477,13523314,"TERMINAL",0,0,"Step 2123, loss: 0.3214326798915863, step time: 25.356054306030273ms\r\n",,terminal_output +16478,13523420,"TERMINAL",0,0,"Step 2124, loss: 1.1505569219589233, step time: 20.364046096801758ms\r\nStep 2125, loss: 0.2678167521953583, step time: 18.611669540405273ms\r\n",,terminal_output +16479,13523561,"TERMINAL",0,0,"Step 2126, loss: 0.8012334704399109, step time: 18.250703811645508ms\r\nStep 2127, loss: 0.549795925617218, step time: 18.32270622253418ms\r\n",,terminal_output +16480,13523617,"TERMINAL",0,0,"Step 2128, loss: 0.3278600871562958, step time: 18.202781677246094ms\r\n",,terminal_output +16481,13523670,"TERMINAL",0,0,"Step 2129, loss: 0.29688629508018494, step time: 17.93956756591797ms\r\n",,terminal_output +16482,13523784,"TERMINAL",0,0,"Step 2130, loss: 0.27624747157096863, step time: 17.83466339111328ms\r\nStep 2131, loss: 0.6794952154159546, step time: 18.37444305419922ms\r\n",,terminal_output +16483,13523847,"TERMINAL",0,0,"Step 2132, loss: 0.6442793607711792, step time: 17.795562744140625ms\r\n",,terminal_output +16484,13523909,"TERMINAL",0,0,"Step 2133, loss: 0.31492531299591064, step time: 17.8835391998291ms\r\n",,terminal_output +16485,13523973,"TERMINAL",0,0,"Step 2134, loss: 0.4720836877822876, step time: 18.259763717651367ms\r\n",,terminal_output +16486,13524078,"TERMINAL",0,0,"Step 2135, loss: 0.43955615162849426, step time: 17.3494815826416ms\r\n",,terminal_output +16487,13524129,"TERMINAL",0,0,"Step 2136, loss: 0.44825854897499084, step time: 17.728805541992188ms\r\n",,terminal_output +16488,13524233,"TERMINAL",0,0,"Step 2137, loss: 0.5700712203979492, step time: 18.34893226623535ms\r\nStep 2138, loss: 0.3011395335197449, step time: 17.832279205322266ms\r\n",,terminal_output +16489,13524328,"TERMINAL",0,0,"Step 2139, loss: 1.585947036743164, step time: 17.588376998901367ms\r\n",,terminal_output +16490,13524379,"TERMINAL",0,0,"Step 2140, loss: 0.299631267786026, step time: 18.40662956237793ms\r\n",,terminal_output +16491,13524472,"TERMINAL",0,0,"Step 2141, loss: 0.4725056290626526, step time: 18.007755279541016ms\r\n",,terminal_output +16492,13524530,"TERMINAL",0,0,"Step 2142, loss: 0.7772221565246582, step time: 17.712116241455078ms\r\n",,terminal_output +16493,13524611,"TERMINAL",0,0,"Step 2143, loss: 0.9511222839355469, step time: 18.557310104370117ms\r\nStep 2144, loss: 0.9589352607727051, step time: 23.24080467224121ms\r\n",,terminal_output +16494,13524708,"TERMINAL",0,0,"Step 2145, loss: 0.28311172127723694, step time: 17.606496810913086ms\r\n",,terminal_output +16495,13524759,"TERMINAL",0,0,"Step 2146, loss: 0.4133186638355255, step time: 18.223285675048828ms\r\n",,terminal_output +16496,13524851,"TERMINAL",0,0,"Step 2147, loss: 0.9219383001327515, step time: 18.14413070678711ms\r\n",,terminal_output +16497,13524905,"TERMINAL",0,0,"Step 2148, loss: 0.5743530988693237, step time: 17.916202545166016ms\r\n",,terminal_output +16498,13525009,"TERMINAL",0,0,"Step 2149, loss: 0.5376202464103699, step time: 18.539905548095703ms\r\nStep 2150, loss: 0.42976412177085876, step time: 17.676591873168945ms\r\n",,terminal_output +16499,13525120,"TERMINAL",0,0,"Step 2151, loss: 0.7504144310951233, step time: 17.702341079711914ms\r\nStep 2152, loss: 1.0882586240768433, step time: 18.016576766967773ms\r\n",,terminal_output +16500,13525185,"TERMINAL",0,0,"Step 2153, loss: 0.3876115381717682, step time: 17.983675003051758ms\r\n",,terminal_output +16501,13525283,"TERMINAL",0,0,"Step 2154, loss: 0.9988353848457336, step time: 17.959117889404297ms\r\n",,terminal_output +16502,13525337,"TERMINAL",0,0,"Step 2155, loss: 0.30880632996559143, step time: 18.03135871887207ms\r\n",,terminal_output +16503,13525403,"TERMINAL",0,0,"Step 2156, loss: 0.9728145599365234, step time: 17.87853240966797ms\r\n",,terminal_output +16504,13525464,"TERMINAL",0,0,"Step 2157, loss: 0.31129151582717896, step time: 17.896652221679688ms\r\n",,terminal_output +16505,13525525,"TERMINAL",0,0,"Step 2158, loss: 0.5521438717842102, step time: 17.979860305786133ms\r\n",,terminal_output +16506,13525586,"TERMINAL",0,0,"Step 2159, loss: 0.4295809864997864, step time: 18.001079559326172ms\r\n",,terminal_output +16507,13525647,"TERMINAL",0,0,"Step 2160, loss: 0.4192390441894531, step time: 17.751693725585938ms\r\n",,terminal_output +16508,13525801,"train_lam.py",0,0,"",python,tab +16509,13525819,"TERMINAL",0,0,"Step 2161, loss: 0.33793744444847107, step time: 20.63274383544922ms\r\n",,terminal_output +16510,13525913,"TERMINAL",0,0,"Step 2162, loss: 0.4088253080844879, step time: 17.918109893798828ms\r\nStep 2163, loss: 0.8849466443061829, step time: 18.002748489379883ms\r\nStep 2164, loss: 0.5811828970909119, step time: 18.329381942749023ms\r\n",,terminal_output +16511,13526067,"TERMINAL",0,0,"Step 2165, loss: 0.4782315194606781, step time: 17.958641052246094ms\r\nStep 2166, loss: 0.28567415475845337, step time: 17.820358276367188ms\r\n",,terminal_output +16512,13526118,"TERMINAL",0,0,"Step 2167, loss: 0.5108449459075928, step time: 18.44334602355957ms\r\n",,terminal_output +16513,13526166,"TERMINAL",0,0,"Step 2168, loss: 0.9411285519599915, step time: 18.011093139648438ms\r\n",,terminal_output +16514,13526230,"TERMINAL",0,0,"Step 2169, loss: 0.4895876944065094, step time: 18.866539001464844ms\r\n",,terminal_output +16515,13526335,"TERMINAL",0,0,"Step 2170, loss: 0.5207639336585999, step time: 18.471479415893555ms\r\n",,terminal_output +16516,13526424,"TERMINAL",0,0,"Step 2171, loss: 0.2850491404533386, step time: 17.760276794433594ms\r\nStep 2172, loss: 0.3908407986164093, step time: 17.76909828186035ms\r\n",,terminal_output +16517,13526523,"TERMINAL",0,0,"Step 2173, loss: 0.29813912510871887, step time: 17.805099487304688ms\r\n",,terminal_output +16518,13526585,"TERMINAL",0,0,"Step 2174, loss: 0.4084281027317047, step time: 17.760038375854492ms\r\n",,terminal_output +16519,13526652,"TERMINAL",0,0,"Step 2175, loss: 0.3244043290615082, step time: 17.40741729736328ms\r\n",,terminal_output +16520,13526714,"TERMINAL",0,0,"Step 2176, loss: 0.39247164130210876, step time: 18.44024658203125ms\r\n",,terminal_output +16521,13526775,"TERMINAL",0,0,"Step 2177, loss: 0.3375771641731262, step time: 18.124103546142578ms\r\n",,terminal_output +16522,13526886,"TERMINAL",0,0,"Step 2178, loss: 0.7773751616477966, step time: 17.990589141845703ms\r\nStep 2179, loss: 0.5230486989021301, step time: 18.577098846435547ms\r\n",,terminal_output +16523,13526959,"TERMINAL",0,0,"Step 2180, loss: 0.7957146167755127, step time: 17.771005630493164ms\r\n",,terminal_output +16524,13527013,"TERMINAL",0,0,"Step 2181, loss: 0.3956294655799866, step time: 18.225431442260742ms\r\n",,terminal_output +16525,13527092,"TERMINAL",0,0,"Step 2182, loss: 0.25518104434013367, step time: 18.22352409362793ms\r\n",,terminal_output +16526,13527139,"TERMINAL",0,0,"Step 2183, loss: 0.3968817889690399, step time: 17.67754554748535ms\r\n",,terminal_output +16527,13527206,"TERMINAL",0,0,"Step 2184, loss: 0.30393123626708984, step time: 17.634868621826172ms\r\n",,terminal_output +16528,13527265,"TERMINAL",0,0,"Step 2185, loss: 0.43447446823120117, step time: 18.355846405029297ms\r\n",,terminal_output +16529,13527316,"TERMINAL",0,0,"Step 2186, loss: 0.429694265127182, step time: 17.69089698791504ms\r\n",,terminal_output +16530,13527408,"TERMINAL",0,0,"Step 2187, loss: 0.6712568998336792, step time: 17.683029174804688ms\r\n",,terminal_output +16531,13527514,"TERMINAL",0,0,"Step 2188, loss: 0.23627297580242157, step time: 19.844770431518555ms\r\nStep 2189, loss: 0.5988524556159973, step time: 18.047332763671875ms\r\n",,terminal_output +16532,13527578,"TERMINAL",0,0,"Step 2190, loss: 0.33612531423568726, step time: 17.97318458557129ms\r\n",,terminal_output +16533,13527641,"TERMINAL",0,0,"Step 2191, loss: 0.4746376872062683, step time: 18.58067512512207ms\r\n",,terminal_output +16534,13527703,"TERMINAL",0,0,"Step 2192, loss: 0.608634889125824, step time: 18.016576766967773ms\r\n",,terminal_output +16535,13527795,"TERMINAL",0,0,"Step 2193, loss: 0.32750773429870605, step time: 17.935991287231445ms\r\n",,terminal_output +16536,13527849,"TERMINAL",0,0,"Step 2194, loss: 1.0142537355422974, step time: 17.81320571899414ms\r\n",,terminal_output +16537,13527944,"TERMINAL",0,0,"Step 2195, loss: 1.112493872642517, step time: 17.7309513092041ms\r\n",,terminal_output +16538,13528018,"TERMINAL",0,0,"Step 2196, loss: 0.33959710597991943, step time: 17.937183380126953ms\r\nStep 2197, loss: 0.6494626998901367, step time: 18.311500549316406ms\r\n",,terminal_output +16539,13528127,"TERMINAL",0,0,"Step 2198, loss: 0.3680502474308014, step time: 18.007278442382812ms\r\n",,terminal_output +16540,13528148,"TERMINAL",0,0,"Step 2199, loss: 0.3610173463821411, step time: 18.040895462036133ms\r\n",,terminal_output +16541,13528233,"TERMINAL",0,0,"Step 2200, loss: 0.2650594115257263, step time: 30.406951904296875ms\r\n",,terminal_output +16542,13528303,"TERMINAL",0,0,"Step 2201, loss: 0.2977732717990875, step time: 27.406692504882812ms\r\n",,terminal_output +16543,13528354,"TERMINAL",0,0,"Step 2202, loss: 0.8863615989685059, step time: 18.864870071411133ms\r\n",,terminal_output +16544,13528449,"TERMINAL",0,0,"Step 2203, loss: 0.44386085867881775, step time: 18.735647201538086ms\r\n",,terminal_output +16545,13528519,"TERMINAL",0,0,"Step 2204, loss: 0.4918750524520874, step time: 18.26310157775879ms\r\n",,terminal_output +16546,13528571,"TERMINAL",0,0,"Step 2205, loss: 0.4464147686958313, step time: 18.024444580078125ms\r\n",,terminal_output +16547,13528636,"TERMINAL",0,0,"Step 2206, loss: 0.3319155275821686, step time: 18.056869506835938ms\r\n",,terminal_output +16548,13528688,"TERMINAL",0,0,"Step 2207, loss: 0.24484016001224518, step time: 18.00227165222168ms\r\n",,terminal_output +16549,13528784,"TERMINAL",0,0,"Step 2208, loss: 0.439060240983963, step time: 17.75956153869629ms\r\n",,terminal_output +16550,13528863,"TERMINAL",0,0,"Step 2209, loss: 0.9372261166572571, step time: 18.691301345825195ms\r\nStep 2210, loss: 0.26407676935195923, step time: 17.984628677368164ms\r\n",,terminal_output +16551,13528926,"TERMINAL",0,0,"Step 2211, loss: 0.3093978762626648, step time: 18.131494522094727ms\r\n",,terminal_output +16552,13528989,"TERMINAL",0,0,"Step 2212, loss: 0.2629855275154114, step time: 18.25237274169922ms\r\n",,terminal_output +16553,13529055,"TERMINAL",0,0,"Step 2213, loss: 0.2702237069606781, step time: 17.48943328857422ms\r\n",,terminal_output +16554,13529118,"TERMINAL",0,0,"Step 2214, loss: 0.3733116686344147, step time: 17.623424530029297ms\r\n",,terminal_output +16555,13529221,"TERMINAL",0,0,"Step 2215, loss: 0.2424977421760559, step time: 18.443584442138672ms\r\n",,terminal_output +16556,13529281,"TERMINAL",0,0,"Step 2216, loss: 2.018151044845581, step time: 17.9445743560791ms\r\n",,terminal_output +16557,13529343,"TERMINAL",0,0,"Step 2217, loss: 1.2030068635940552, step time: 18.10288429260254ms\r\n",,terminal_output +16558,13529402,"TERMINAL",0,0,"Step 2218, loss: 0.7638980746269226, step time: 18.832683563232422ms\r\n",,terminal_output +16559,13529465,"TERMINAL",0,0,"Step 2219, loss: 0.3861181437969208, step time: 17.99917221069336ms\r\n",,terminal_output +16560,13529524,"TERMINAL",0,0,"Step 2220, loss: 0.26369085907936096, step time: 17.870426177978516ms\r\n",,terminal_output +16561,13529586,"TERMINAL",0,0,"Step 2221, loss: 0.7492930889129639, step time: 17.89546012878418ms\r\n",,terminal_output +16562,13529649,"TERMINAL",0,0,"Step 2222, loss: 0.3720057010650635, step time: 17.886877059936523ms\r\n",,terminal_output +16563,13529723,"train_lam.py",1719,0,"",python,selection_mouse +16564,13529735,"TERMINAL",0,0,"Step 2223, loss: 0.5045683979988098, step time: 17.653465270996094ms\r\n",,terminal_output +16565,13529774,"TERMINAL",0,0,"Step 2224, loss: 0.3342932462692261, step time: 18.073320388793945ms\r\n",,terminal_output +16566,13529827,"TERMINAL",0,0,"Step 2225, loss: 0.3021675944328308, step time: 17.81296730041504ms\r\n",,terminal_output +16567,13529863,"train_lam.py",1716,16,"gt_future_frames",python,selection_mouse +16568,13529953,"TERMINAL",0,0,"Step 2226, loss: 0.46546462178230286, step time: 17.490625381469727ms\r\nStep 2227, loss: 0.28260180354118347, step time: 17.86184310913086ms\r\n",,terminal_output +16569,13530020,"TERMINAL",0,0,"Step 2228, loss: 0.7236341834068298, step time: 17.765045166015625ms\r\n",,terminal_output +16570,13530082,"TERMINAL",0,0,"Step 2229, loss: 0.3838784396648407, step time: 17.509937286376953ms\r\n",,terminal_output +16571,13530146,"TERMINAL",0,0,"Step 2230, loss: 1.4428057670593262, step time: 18.17011833190918ms\r\n",,terminal_output +16572,13530205,"TERMINAL",0,0,"Step 2231, loss: 0.3815544545650482, step time: 17.554044723510742ms\r\n",,terminal_output +16573,13530272,"TERMINAL",0,0,"Step 2232, loss: 0.47102656960487366, step time: 18.012046813964844ms\r\n",,terminal_output +16574,13530587,"TERMINAL",0,0,"Step 2233, loss: 0.5288072228431702, step time: 309.57722663879395ms\r\n",,terminal_output +16575,13530687,"train_lam.py",1736,0,"",python,selection_mouse +16576,13530688,"TERMINAL",0,0,"Step 2234, loss: 1.8755276203155518, step time: 25.16484260559082ms\r\n",,terminal_output +16577,13530779,"TERMINAL",0,0,"Step 2235, loss: 0.671887218952179, step time: 24.013996124267578ms\r\nStep 2236, loss: 0.7266162633895874, step time: 19.166231155395508ms\r\n",,terminal_output +16578,13530876,"TERMINAL",0,0,"Step 2237, loss: 0.3374066650867462, step time: 18.593311309814453ms\r\n",,terminal_output +16579,13530931,"TERMINAL",0,0,"Step 2238, loss: 0.38336238265037537, step time: 17.960309982299805ms\r\n",,terminal_output +16580,13531036,"TERMINAL",0,0,"Step 2239, loss: 0.3442050516605377, step time: 18.759727478027344ms\r\nStep 2240, loss: 0.2768022119998932, step time: 17.833948135375977ms\r\n",,terminal_output +16581,13531160,"TERMINAL",0,0,"Step 2241, loss: 0.2509387135505676, step time: 17.943143844604492ms\r\nStep 2242, loss: 0.40978848934173584, step time: 18.387794494628906ms\r\n",,terminal_output +16582,13531226,"TERMINAL",0,0,"Step 2243, loss: 1.1962804794311523, step time: 17.941951751708984ms\r\n",,terminal_output +16583,13531290,"train_lam.py",1726,0,"",python,selection_mouse +16584,13531301,"TERMINAL",0,0,"Step 2244, loss: 0.5944609045982361, step time: 17.937421798706055ms\r\n",,terminal_output +16585,13531353,"TERMINAL",0,0,"Step 2245, loss: 0.8314822316169739, step time: 18.374919891357422ms\r\n",,terminal_output +16586,13531447,"train_lam.py",1716,16,"gt_future_frames",python,selection_mouse +16587,13531474,"TERMINAL",0,0,"Step 2246, loss: 0.3759438991546631, step time: 17.948627471923828ms\r\n",,terminal_output +16588,13531541,"TERMINAL",0,0,"Step 2247, loss: 0.25348663330078125, step time: 17.637252807617188ms\r\nStep 2248, loss: 0.3987288773059845, step time: 18.608808517456055ms\r\n",,terminal_output +16589,13531606,"TERMINAL",0,0,"Step 2249, loss: 0.30124205350875854, step time: 17.630577087402344ms\r\n",,terminal_output +16590,13531672,"TERMINAL",0,0,"Step 2250, loss: 0.3937009871006012, step time: 17.95053482055664ms\r\n",,terminal_output +16591,13531748,"TERMINAL",0,0,"Step 2251, loss: 0.2993415594100952, step time: 17.903804779052734ms\r\n",,terminal_output +16592,13531788,"TERMINAL",0,0,"Step 2252, loss: 0.554463267326355, step time: 17.901897430419922ms\r\n",,terminal_output +16593,13531859,"TERMINAL",0,0,"Step 2253, loss: 0.460584431886673, step time: 18.035888671875ms\r\n",,terminal_output +16594,13531937,"TERMINAL",0,0,"Step 2254, loss: 0.4603962302207947, step time: 18.453121185302734ms\r\n",,terminal_output +16595,13531989,"TERMINAL",0,0,"Step 2255, loss: 0.2936877906322479, step time: 18.249034881591797ms\r\n",,terminal_output +16596,13532052,"TERMINAL",0,0,"Step 2256, loss: 0.7462236285209656, step time: 17.879724502563477ms\r\n",,terminal_output +16597,13532114,"TERMINAL",0,0,"Step 2257, loss: 1.2301061153411865, step time: 18.50724220275879ms\r\n",,terminal_output +16598,13532175,"TERMINAL",0,0,"Step 2258, loss: 0.44720369577407837, step time: 17.497539520263672ms\r\n",,terminal_output +16599,13532237,"TERMINAL",0,0,"Step 2259, loss: 0.24197685718536377, step time: 18.010854721069336ms\r\n",,terminal_output +16600,13532298,"TERMINAL",0,0,"Step 2260, loss: 0.48219844698905945, step time: 17.9750919342041ms\r\n",,terminal_output +16601,13532359,"TERMINAL",0,0,"Step 2261, loss: 0.2697449326515198, step time: 18.03874969482422ms\r\n",,terminal_output +16602,13532420,"TERMINAL",0,0,"Step 2262, loss: 0.3338528871536255, step time: 17.766952514648438ms\r\n",,terminal_output +16603,13532485,"TERMINAL",0,0,"Step 2263, loss: 0.3859674334526062, step time: 18.284320831298828ms\r\n",,terminal_output +16604,13532613,"TERMINAL",0,0,"Step 2264, loss: 0.4964815676212311, step time: 17.991304397583008ms\r\nStep 2265, loss: 0.36417829990386963, step time: 17.64702796936035ms\r\n",,terminal_output +16605,13532708,"TERMINAL",0,0,"Step 2266, loss: 0.2631588876247406, step time: 18.248558044433594ms\r\n",,terminal_output +16606,13532813,"TERMINAL",0,0,"Step 2267, loss: 0.8386749625205994, step time: 17.75360107421875ms\r\nStep 2268, loss: 0.28638797998428345, step time: 17.640352249145508ms\r\n",,terminal_output +16607,13532929,"TERMINAL",0,0,"Step 2269, loss: 0.2734149396419525, step time: 18.673419952392578ms\r\nStep 2270, loss: 0.26418331265449524, step time: 18.657922744750977ms\r\n",,terminal_output +16608,13532991,"TERMINAL",0,0,"Step 2271, loss: 0.5224041938781738, step time: 18.038272857666016ms\r\n",,terminal_output +16609,13533053,"TERMINAL",0,0,"Step 2272, loss: 0.27642378211021423, step time: 18.6004638671875ms\r\n",,terminal_output +16610,13533116,"TERMINAL",0,0,"Step 2273, loss: 0.3038704991340637, step time: 18.172502517700195ms\r\n",,terminal_output +16611,13533227,"TERMINAL",0,0,"Step 2274, loss: 0.8047860860824585, step time: 17.923593521118164ms\r\n",,terminal_output +16612,13533287,"TERMINAL",0,0,"Step 2275, loss: 0.5676214098930359, step time: 18.402099609375ms\r\n",,terminal_output +16613,13533338,"TERMINAL",0,0,"Step 2276, loss: 0.7430126667022705, step time: 17.90618896484375ms\r\n",,terminal_output +16614,13533440,"TERMINAL",0,0,"Step 2277, loss: 0.8410358428955078, step time: 17.4710750579834ms\r\nStep 2278, loss: 0.2674497663974762, step time: 18.410682678222656ms\r\n",,terminal_output +16615,13533535,"TERMINAL",0,0,"Step 2279, loss: 0.3359852731227875, step time: 17.827987670898438ms\r\n",,terminal_output +16616,13533592,"TERMINAL",0,0,"Step 2280, loss: 0.3105624318122864, step time: 17.7614688873291ms\r\n",,terminal_output +16617,13533692,"TERMINAL",0,0,"Step 2281, loss: 0.5356578826904297, step time: 17.928600311279297ms\r\nStep 2282, loss: 0.8620448112487793, step time: 17.841577529907227ms\r\n",,terminal_output +16618,13533828,"TERMINAL",0,0,"Step 2283, loss: 0.36307376623153687, step time: 17.462491989135742ms\r\n",,terminal_output +16619,13533892,"TERMINAL",0,0,"Step 2284, loss: 0.5607874989509583, step time: 18.3103084564209ms\r\nStep 2285, loss: 0.8512306809425354, step time: 20.06983757019043ms\r\n",,terminal_output +16620,13533957,"TERMINAL",0,0,"Step 2286, loss: 0.24641576409339905, step time: 18.10622215270996ms\r\n",,terminal_output +16621,13534020,"TERMINAL",0,0,"Step 2287, loss: 0.2387147843837738, step time: 18.474578857421875ms\r\n",,terminal_output +16622,13534082,"TERMINAL",0,0,"Step 2288, loss: 1.5939898490905762, step time: 17.888784408569336ms\r\n",,terminal_output +16623,13534205,"TERMINAL",0,0,"Step 2289, loss: 0.7807190418243408, step time: 17.949342727661133ms\r\nStep 2290, loss: 0.32713282108306885, step time: 18.229961395263672ms\r\n",,terminal_output +16624,13534268,"TERMINAL",0,0,"Step 2291, loss: 0.2652679681777954, step time: 17.76266098022461ms\r\n",,terminal_output +16625,13534353,"TERMINAL",0,0,"Step 2292, loss: 0.2175189107656479, step time: 17.642498016357422ms\r\n",,terminal_output +16626,13534498,"TERMINAL",0,0,"Step 2293, loss: 0.3205443322658539, step time: 18.42021942138672ms\r\nStep 2294, loss: 0.7186260223388672, step time: 17.782211303710938ms\r\n",,terminal_output +16627,13534549,"TERMINAL",0,0,"Step 2295, loss: 0.40763404965400696, step time: 17.452478408813477ms\r\n",,terminal_output +16628,13534656,"TERMINAL",0,0,"Step 2296, loss: 0.2359783947467804, step time: 18.3260440826416ms\r\nStep 2297, loss: 0.49620118737220764, step time: 17.623186111450195ms\r\n",,terminal_output +16629,13534721,"TERMINAL",0,0,"Step 2298, loss: 0.4568437933921814, step time: 17.75646209716797ms\r\n",,terminal_output +16630,13534778,"TERMINAL",0,0,"Step 2299, loss: 0.245560884475708, step time: 18.190383911132812ms\r\n",,terminal_output +16631,13534877,"TERMINAL",0,0,"Step 2300, loss: 0.8537680506706238, step time: 17.70639419555664ms\r\n",,terminal_output +16632,13534948,"TERMINAL",0,0,"Step 2301, loss: 0.4781368672847748, step time: 17.805814743041992ms\r\n",,terminal_output +16633,13534999,"TERMINAL",0,0,"Step 2302, loss: 0.22786414623260498, step time: 31.963348388671875ms\r\n",,terminal_output +16634,13535119,"TERMINAL",0,0,"Step 2303, loss: 0.6341933012008667, step time: 18.987655639648438ms\r\nStep 2304, loss: 1.4235107898712158, step time: 17.8985595703125ms\r\n",,terminal_output +16635,13535172,"TERMINAL",0,0,"Step 2305, loss: 0.3914104700088501, step time: 18.544912338256836ms\r\n",,terminal_output +16636,13535268,"TERMINAL",0,0,"Step 2306, loss: 0.45998796820640564, step time: 17.948150634765625ms\r\n",,terminal_output +16637,13535321,"TERMINAL",0,0,"Step 2307, loss: 0.36191341280937195, step time: 18.159151077270508ms\r\n",,terminal_output +16638,13535425,"TERMINAL",0,0,"Step 2308, loss: 0.367517352104187, step time: 18.460512161254883ms\r\nStep 2309, loss: 0.5765528082847595, step time: 17.884016036987305ms\r\n",,terminal_output +16639,13535489,"TERMINAL",0,0,"Step 2310, loss: 0.34881043434143066, step time: 17.93837547302246ms\r\n",,terminal_output +16640,13535551,"TERMINAL",0,0,"Step 2311, loss: 0.2588205635547638, step time: 18.156766891479492ms\r\n",,terminal_output +16641,13535642,"TERMINAL",0,0,"Step 2312, loss: 0.3693830966949463, step time: 17.467021942138672ms\r\n",,terminal_output +16642,13535695,"TERMINAL",0,0,"Step 2313, loss: 0.43080002069473267, step time: 17.86327362060547ms\r\n",,terminal_output +16643,13535758,"TERMINAL",0,0,"Step 2314, loss: 0.6263172626495361, step time: 18.337011337280273ms\r\n",,terminal_output +16644,13535820,"TERMINAL",0,0,"Step 2315, loss: 0.37743163108825684, step time: 18.442630767822266ms\r\n",,terminal_output +16645,13535880,"TERMINAL",0,0,"Step 2316, loss: 0.27459388971328735, step time: 17.89569854736328ms\r\n",,terminal_output +16646,13535943,"TERMINAL",0,0,"Step 2317, loss: 0.3414015471935272, step time: 18.098831176757812ms\r\n",,terminal_output +16647,13536002,"TERMINAL",0,0,"Step 2318, loss: 0.25848639011383057, step time: 18.088102340698242ms\r\n",,terminal_output +16648,13536066,"TERMINAL",0,0,"Step 2319, loss: 0.29830777645111084, step time: 18.59593391418457ms\r\n",,terminal_output +16649,13536127,"TERMINAL",0,0,"Step 2320, loss: 0.24325986206531525, step time: 20.215272903442383ms\r\n",,terminal_output +16650,13536193,"TERMINAL",0,0,"Step 2321, loss: 0.44486361742019653, step time: 17.652034759521484ms\r\n",,terminal_output +16651,13536252,"TERMINAL",0,0,"Step 2322, loss: 0.24531233310699463, step time: 17.84205436706543ms\r\n",,terminal_output +16652,13536345,"TERMINAL",0,0,"Step 2323, loss: 0.4348905086517334, step time: 18.335580825805664ms\r\n",,terminal_output +16653,13536454,"TERMINAL",0,0,"Step 2324, loss: 2.123737335205078, step time: 17.567873001098633ms\r\nStep 2325, loss: 0.40719160437583923, step time: 17.856359481811523ms\r\n",,terminal_output +16654,13536516,"TERMINAL",0,0,"Step 2326, loss: 0.3028669059276581, step time: 18.532752990722656ms\r\n",,terminal_output +16655,13536577,"TERMINAL",0,0,"Step 2327, loss: 0.2841465473175049, step time: 17.75336265563965ms\r\n",,terminal_output +16656,13536640,"TERMINAL",0,0,"Step 2328, loss: 0.3782965838909149, step time: 17.69733428955078ms\r\n",,terminal_output +16657,13536700,"TERMINAL",0,0,"Step 2329, loss: 0.22973819077014923, step time: 18.41259002685547ms\r\n",,terminal_output +16658,13536763,"TERMINAL",0,0,"Step 2330, loss: 0.5989118814468384, step time: 17.8070068359375ms\r\n",,terminal_output +16659,13536823,"TERMINAL",0,0,"Step 2331, loss: 0.9060900807380676, step time: 17.89712905883789ms\r\n",,terminal_output +16660,13536947,"train_dynamics.py",0,0,"",python,tab +16661,13536966,"TERMINAL",0,0,"Step 2332, loss: 0.4276825785636902, step time: 18.240690231323242ms\r\n",,terminal_output +16662,13537024,"TERMINAL",0,0,"Step 2333, loss: 1.108391523361206, step time: 17.945051193237305ms\r\nStep 2334, loss: 0.8136082887649536, step time: 17.9445743560791ms\r\n",,terminal_output +16663,13537166,"TERMINAL",0,0,"Step 2335, loss: 0.38442879915237427, step time: 18.816709518432617ms\r\nStep 2336, loss: 0.293387770652771, step time: 17.786026000976562ms\r\n",,terminal_output +16664,13537283,"TERMINAL",0,0,"Step 2337, loss: 0.3030564785003662, step time: 17.993927001953125ms\r\nStep 2338, loss: 0.5625065565109253, step time: 18.268346786499023ms\r\n",,terminal_output +16665,13537334,"TERMINAL",0,0,"Step 2339, loss: 0.5462490320205688, step time: 17.742156982421875ms\r\n",,terminal_output +16666,13537442,"TERMINAL",0,0,"Step 2340, loss: 0.3224828541278839, step time: 17.717599868774414ms\r\n",,terminal_output +16667,13537506,"TERMINAL",0,0,"Step 2341, loss: 0.46780720353126526, step time: 17.887592315673828ms\r\n",,terminal_output +16668,13537571,"TERMINAL",0,0,"Step 2342, loss: 1.4552624225616455, step time: 17.499208450317383ms\r\n",,terminal_output +16669,13537623,"TERMINAL",0,0,"Step 2343, loss: 0.2455267310142517, step time: 17.87400245666504ms\r\n",,terminal_output +16670,13537687,"TERMINAL",0,0,"Step 2344, loss: 0.4366265833377838, step time: 17.91214942932129ms\r\n",,terminal_output +16671,13537750,"TERMINAL",0,0,"Step 2345, loss: 0.25169122219085693, step time: 17.884016036987305ms\r\n",,terminal_output +16672,13537810,"TERMINAL",0,0,"Step 2346, loss: 0.3655984699726105, step time: 17.426490783691406ms\r\n",,terminal_output +16673,13537871,"TERMINAL",0,0,"Step 2347, loss: 0.4634867310523987, step time: 17.757892608642578ms\r\n",,terminal_output +16674,13537937,"TERMINAL",0,0,"Step 2348, loss: 0.37916916608810425, step time: 17.550230026245117ms\r\n",,terminal_output +16675,13537998,"TERMINAL",0,0,"Step 2349, loss: 0.4235343635082245, step time: 17.826557159423828ms\r\n",,terminal_output +16676,13538008,"train_dynamics.py",2849,0,"",python,selection_mouse +16677,13538012,"train_dynamics.py",2848,0,"",python,selection_command +16678,13538102,"TERMINAL",0,0,"Step 2350, loss: 0.23904432356357574, step time: 18.364429473876953ms\r\nStep 2351, loss: 1.8046518564224243, step time: 17.717838287353516ms\r\n",,terminal_output +16679,13538165,"TERMINAL",0,0,"Step 2352, loss: 0.34767282009124756, step time: 17.80223846435547ms\r\n",,terminal_output +16680,13538230,"TERMINAL",0,0,"Step 2353, loss: 0.24187520146369934, step time: 18.146038055419922ms\r\n",,terminal_output +16681,13538625,"train_dynamics.py",2849,0,"",python,selection_mouse +16682,13538639,"train_dynamics.py",2848,0,"",python,selection_command +16683,13538667,"TERMINAL",0,0,"Step 2354, loss: 0.24555911123752594, step time: 341.6445255279541ms\r\nStep 2355, loss: 0.38003644347190857, step time: 25.300979614257812ms\r\n",,terminal_output +16684,13538734,"TERMINAL",0,0,"Step 2356, loss: 1.183040976524353, step time: 20.00713348388672ms\r\n",,terminal_output +16685,13538774,"TERMINAL",0,0,"Step 2357, loss: 0.686955988407135, step time: 18.839359283447266ms\r\n",,terminal_output +16686,13538872,"TERMINAL",0,0,"Step 2358, loss: 1.155915379524231, step time: 18.002748489379883ms\r\n",,terminal_output +16687,13538934,"TERMINAL",0,0,"Step 2359, loss: 1.349729061126709, step time: 18.257856369018555ms\r\n",,terminal_output +16688,13539004,"TERMINAL",0,0,"Step 2360, loss: 0.2648065388202667, step time: 18.23902130126953ms\r\n",,terminal_output +16689,13539061,"TERMINAL",0,0,"Step 2361, loss: 0.3126969635486603, step time: 17.740964889526367ms\r\n",,terminal_output +16690,13539156,"TERMINAL",0,0,"Step 2362, loss: 0.3486918807029724, step time: 18.104076385498047ms\r\nStep 2363, loss: 0.8047502636909485, step time: 18.30458641052246ms\r\n",,terminal_output +16691,13539248,"TERMINAL",0,0,"Step 2364, loss: 0.389827698469162, step time: 18.118619918823242ms\r\n",,terminal_output +16692,13539358,"TERMINAL",0,0,"Step 2365, loss: 0.2976421117782593, step time: 17.885684967041016ms\r\nStep 2366, loss: 0.2932267189025879, step time: 18.26024055480957ms\r\n",,terminal_output +16693,13539476,"TERMINAL",0,0,"Step 2367, loss: 1.5528080463409424, step time: 17.796993255615234ms\r\nStep 2368, loss: 0.42459678649902344, step time: 17.92001724243164ms\r\n",,terminal_output +16694,13539592,"TERMINAL",0,0,"Step 2369, loss: 0.28082260489463806, step time: 18.272876739501953ms\r\n",,terminal_output +16695,13539669,"TERMINAL",0,0,"Step 2370, loss: 0.33380112051963806, step time: 18.010854721069336ms\r\nStep 2371, loss: 0.36847686767578125, step time: 17.45295524597168ms\r\n",,terminal_output +16696,13539769,"TERMINAL",0,0,"Step 2372, loss: 0.2465931624174118, step time: 18.385887145996094ms\r\n",,terminal_output +16697,13539830,"TERMINAL",0,0,"Step 2373, loss: 0.6440330147743225, step time: 18.02802085876465ms\r\n",,terminal_output +16698,13539893,"TERMINAL",0,0,"Step 2374, loss: 0.3697233200073242, step time: 18.070459365844727ms\r\n",,terminal_output +16699,13539954,"train_dynamics.py",1964,0,"",python,selection_mouse +16700,13539979,"TERMINAL",0,0,"Step 2375, loss: 0.8153151869773865, step time: 17.866849899291992ms\r\n",,terminal_output +16701,13540060,"TERMINAL",0,0,"Step 2376, loss: 0.3944758474826813, step time: 18.04971694946289ms\r\nStep 2377, loss: 0.2566487491130829, step time: 17.832040786743164ms\r\n",,terminal_output +16702,13540131,"train_dynamics.py",1956,16,"dynamics_loss_fn",python,selection_mouse +16703,13540179,"TERMINAL",0,0,"Step 2378, loss: 0.21372318267822266, step time: 17.700672149658203ms\r\nStep 2379, loss: 0.6328036785125732, step time: 17.38762855529785ms\r\n",,terminal_output +16704,13540246,"TERMINAL",0,0,"Step 2380, loss: 0.21161912381649017, step time: 18.047809600830078ms\r\n",,terminal_output +16705,13540374,"TERMINAL",0,0,"Step 2381, loss: 0.26677200198173523, step time: 17.925500869750977ms\r\nStep 2382, loss: 0.4867081940174103, step time: 18.034934997558594ms\r\n",,terminal_output +16706,13540457,"TERMINAL",0,0,"Step 2383, loss: 0.42319250106811523, step time: 17.5168514251709ms\r\n",,terminal_output +16707,13540505,"TERMINAL",0,0,"Step 2384, loss: 0.6027562618255615, step time: 18.106698989868164ms\r\n",,terminal_output +16708,13540605,"TERMINAL",0,0,"Step 2385, loss: 1.3808910846710205, step time: 17.822265625ms\r\n",,terminal_output +16709,13540709,"TERMINAL",0,0,"Step 2386, loss: 0.4401237368583679, step time: 17.375469207763672ms\r\nStep 2387, loss: 0.39422735571861267, step time: 18.22376251220703ms\r\n",,terminal_output +16710,13540771,"TERMINAL",0,0,"Step 2388, loss: 0.7316480875015259, step time: 17.803430557250977ms\r\n",,terminal_output +16711,13540835,"TERMINAL",0,0,"Step 2389, loss: 0.4507906138896942, step time: 17.37499237060547ms\r\n",,terminal_output +16712,13540898,"TERMINAL",0,0,"Step 2390, loss: 0.19451063871383667, step time: 18.987655639648438ms\r\n",,terminal_output +16713,13540962,"TERMINAL",0,0,"Step 2391, loss: 0.9409677386283875, step time: 17.835617065429688ms\r\n",,terminal_output +16714,13541022,"TERMINAL",0,0,"Step 2392, loss: 0.48649951815605164, step time: 17.585039138793945ms\r\n",,terminal_output +16715,13541082,"TERMINAL",0,0,"Step 2393, loss: 0.2917943000793457, step time: 18.599271774291992ms\r\n",,terminal_output +16716,13541144,"TERMINAL",0,0,"Step 2394, loss: 0.5398277640342712, step time: 19.342899322509766ms\r\n",,terminal_output +16717,13541245,"TERMINAL",0,0,"Step 2395, loss: 0.2177080661058426, step time: 17.88043975830078ms\r\n",,terminal_output +16718,13541304,"TERMINAL",0,0,"Step 2396, loss: 0.27626344561576843, step time: 18.50128173828125ms\r\n",,terminal_output +16719,13541368,"TERMINAL",0,0,"Step 2397, loss: 0.22638072073459625, step time: 17.839431762695312ms\r\n",,terminal_output +16720,13541436,"TERMINAL",0,0,"Step 2398, loss: 0.3524790108203888, step time: 17.661571502685547ms\r\n",,terminal_output +16721,13541494,"TERMINAL",0,0,"Step 2399, loss: 0.2807544469833374, step time: 18.01276206970215ms\r\n",,terminal_output +16722,13541601,"TERMINAL",0,0,"Step 2400, loss: 0.3673376142978668, step time: 18.0356502532959ms\r\nStep 2401, loss: 0.33399730920791626, step time: 17.71259307861328ms\r\n",,terminal_output +16723,13541664,"TERMINAL",0,0,"Step 2402, loss: 0.3066083788871765, step time: 18.27216148376465ms\r\n",,terminal_output +16724,13541727,"TERMINAL",0,0,"Step 2403, loss: 1.7228580713272095, step time: 17.932415008544922ms\r\n",,terminal_output +16725,13541786,"TERMINAL",0,0,"Step 2404, loss: 0.5552637577056885, step time: 18.08619499206543ms\r\n",,terminal_output +16726,13541851,"TERMINAL",0,0,"Step 2405, loss: 0.3928828835487366, step time: 17.650604248046875ms\r\n",,terminal_output +16727,13541906,"TERMINAL",0,0,"Step 2406, loss: 0.711822509765625, step time: 17.487525939941406ms\r\n",,terminal_output +16728,13541995,"TERMINAL",0,0,"Step 2407, loss: 0.397396057844162, step time: 17.518281936645508ms\r\n",,terminal_output +16729,13542105,"TERMINAL",0,0,"Step 2408, loss: 0.44076046347618103, step time: 18.39470863342285ms\r\nStep 2409, loss: 0.21209178864955902, step time: 17.837047576904297ms\r\n",,terminal_output +16730,13542158,"TERMINAL",0,0,"Step 2410, loss: 0.27392178773880005, step time: 17.686128616333008ms\r\n",,terminal_output +16731,13542254,"TERMINAL",0,0,"Step 2411, loss: 0.21876853704452515, step time: 17.85898208618164ms\r\n",,terminal_output +16732,13542360,"TERMINAL",0,0,"Step 2412, loss: 0.32240650057792664, step time: 17.5626277923584ms\r\nStep 2413, loss: 0.31001681089401245, step time: 18.40829849243164ms\r\n",,terminal_output +16733,13542501,"TERMINAL",0,0,"Step 2414, loss: 0.22420020401477814, step time: 18.26786994934082ms\r\nStep 2415, loss: 0.21339085698127747, step time: 17.643451690673828ms\r\n",,terminal_output +16734,13542609,"TERMINAL",0,0,"Step 2416, loss: 0.24338051676750183, step time: 18.010854721069336ms\r\nStep 2417, loss: 0.18562470376491547, step time: 18.058061599731445ms\r\n",,terminal_output +16735,13542703,"TERMINAL",0,0,"Step 2418, loss: 0.2173062413930893, step time: 17.802000045776367ms\r\n",,terminal_output +16736,13542752,"TERMINAL",0,0,"Step 2419, loss: 0.4787009358406067, step time: 17.61770248413086ms\r\n",,terminal_output +16737,13542842,"TERMINAL",0,0,"Step 2420, loss: 0.37045052647590637, step time: 18.154382705688477ms\r\n",,terminal_output +16738,13542896,"TERMINAL",0,0,"Step 2421, loss: 1.0784261226654053, step time: 17.333984375ms\r\n",,terminal_output +16739,13542970,"TERMINAL",0,0,"Step 2422, loss: 0.2720912992954254, step time: 18.276453018188477ms\r\n",,terminal_output +16740,13543022,"TERMINAL",0,0,"Step 2423, loss: 0.8318920731544495, step time: 18.268108367919922ms\r\n",,terminal_output +16741,13543120,"TERMINAL",0,0,"Step 2424, loss: 0.232558473944664, step time: 18.30267906188965ms\r\nStep 2425, loss: 0.4434366822242737, step time: 18.09978485107422ms\r\n",,terminal_output +16742,13543182,"TERMINAL",0,0,"Step 2426, loss: 0.25383731722831726, step time: 18.285036087036133ms\r\n",,terminal_output +16743,13543253,"TERMINAL",0,0,"Step 2427, loss: 0.31001853942871094, step time: 18.031835556030273ms\r\n",,terminal_output +16744,13543341,"TERMINAL",0,0,"Step 2428, loss: 0.4083701968193054, step time: 17.872095108032227ms\r\n",,terminal_output +16745,13543450,"TERMINAL",0,0,"Step 2429, loss: 0.28426387906074524, step time: 17.64082908630371ms\r\nStep 2430, loss: 0.49733424186706543, step time: 17.890453338623047ms\r\n",,terminal_output +16746,13543517,"TERMINAL",0,0,"Step 2431, loss: 0.22403228282928467, step time: 17.568349838256836ms\r\n",,terminal_output +16747,13543578,"TERMINAL",0,0,"Step 2432, loss: 0.4761861264705658, step time: 18.105030059814453ms\r\n",,terminal_output +16748,13543637,"TERMINAL",0,0,"Step 2433, loss: 0.3028140366077423, step time: 17.78721809387207ms\r\n",,terminal_output +16749,13543702,"TERMINAL",0,0,"Step 2434, loss: 0.37177547812461853, step time: 17.824411392211914ms\r\n",,terminal_output +16750,13543761,"TERMINAL",0,0,"Step 2435, loss: 1.2201248407363892, step time: 30.276060104370117ms\r\n",,terminal_output +16751,13543824,"TERMINAL",0,0,"Step 2436, loss: 0.354716420173645, step time: 27.53472328186035ms\r\n",,terminal_output +16752,13543884,"TERMINAL",0,0,"Step 2437, loss: 1.4993314743041992, step time: 18.424034118652344ms\r\n",,terminal_output +16753,13543947,"TERMINAL",0,0,"Step 2438, loss: 0.2921009361743927, step time: 18.367290496826172ms\r\n",,terminal_output +16754,13544042,"TERMINAL",0,0,"Step 2439, loss: 0.8718039989471436, step time: 17.617225646972656ms\r\n",,terminal_output +16755,13544140,"TERMINAL",0,0,"Step 2440, loss: 0.4002149701118469, step time: 17.37689971923828ms\r\nStep 2441, loss: 0.20711135864257812, step time: 17.705440521240234ms\r\n",,terminal_output +16756,13544203,"TERMINAL",0,0,"Step 2442, loss: 0.555779218673706, step time: 17.82965660095215ms\r\n",,terminal_output +16757,13544270,"TERMINAL",0,0,"Step 2443, loss: 0.27329930663108826, step time: 17.811059951782227ms\r\n",,terminal_output +16758,13544335,"TERMINAL",0,0,"Step 2444, loss: 0.521682858467102, step time: 18.245458602905273ms\r\n",,terminal_output +16759,13544396,"TERMINAL",0,0,"Step 2445, loss: 0.21590836346149445, step time: 19.51456069946289ms\r\n",,terminal_output +16760,13544455,"TERMINAL",0,0,"Step 2446, loss: 1.0845725536346436, step time: 17.89069175720215ms\r\n",,terminal_output +16761,13544519,"TERMINAL",0,0,"Step 2447, loss: 0.18894755840301514, step time: 17.7004337310791ms\r\n",,terminal_output +16762,13544584,"TERMINAL",0,0,"Step 2448, loss: 0.8939473032951355, step time: 17.686843872070312ms\r\n",,terminal_output +16763,13544679,"TERMINAL",0,0,"Step 2449, loss: 0.4398882985115051, step time: 17.540693283081055ms\r\n",,terminal_output +16764,13544742,"TERMINAL",0,0,"Step 2450, loss: 0.19487307965755463, step time: 18.07546615600586ms\r\n",,terminal_output +16765,13544803,"TERMINAL",0,0,"Step 2451, loss: 0.2166358083486557, step time: 17.626523971557617ms\r\n",,terminal_output +16766,13544867,"TERMINAL",0,0,"Step 2452, loss: 0.995707631111145, step time: 17.688274383544922ms\r\n",,terminal_output +16767,13544943,"TERMINAL",0,0,"Step 2453, loss: 0.7255429029464722, step time: 17.594099044799805ms\r\n",,terminal_output +16768,13545041,"TERMINAL",0,0,"Step 2454, loss: 0.19169138371944427, step time: 17.76909828186035ms\r\nStep 2455, loss: 0.4434201717376709, step time: 17.740964889526367ms\r\n",,terminal_output +16769,13545100,"TERMINAL",0,0,"Step 2456, loss: 0.2988072335720062, step time: 18.07689666748047ms\r\n",,terminal_output +16770,13545150,"TERMINAL",0,0,"Step 2457, loss: 0.5819079875946045, step time: 17.819643020629883ms\r\n",,terminal_output +16771,13545246,"TERMINAL",0,0,"Step 2458, loss: 0.15396134555339813, step time: 17.796993255615234ms\r\n",,terminal_output +16772,13545350,"TERMINAL",0,0,"Step 2459, loss: 0.3626112937927246, step time: 17.60578155517578ms\r\nStep 2460, loss: 0.2674678862094879, step time: 17.775535583496094ms\r\n",,terminal_output +16773,13545413,"TERMINAL",0,0,"Step 2461, loss: 0.9481694102287292, step time: 17.742395401000977ms\r\n",,terminal_output +16774,13545477,"TERMINAL",0,0,"Step 2462, loss: 0.6834018230438232, step time: 18.028736114501953ms\r\n",,terminal_output +16775,13545542,"TERMINAL",0,0,"Step 2463, loss: 1.087138295173645, step time: 17.47584342956543ms\r\n",,terminal_output +16776,13545607,"TERMINAL",0,0,"Step 2464, loss: 0.3441504240036011, step time: 17.658472061157227ms\r\n",,terminal_output +16777,13545628,"train_dynamics.py",2329,0,"",python,selection_mouse +16778,13545683,"TERMINAL",0,0,"Step 2465, loss: 0.33660897612571716, step time: 20.840167999267578ms\r\n",,terminal_output +16779,13545766,"train_dynamics.py",2324,7,"outputs",python,selection_mouse +16780,13545811,"TERMINAL",0,0,"Step 2466, loss: 0.24370013177394867, step time: 17.768144607543945ms\r\nStep 2467, loss: 0.20914191007614136, step time: 17.84062385559082ms\r\n",,terminal_output +16781,13545850,"TERMINAL",0,0,"Step 2468, loss: 0.26828712224960327, step time: 17.79341697692871ms\r\n",,terminal_output +16782,13545944,"TERMINAL",0,0,"Step 2469, loss: 0.31118905544281006, step time: 17.830610275268555ms\r\n",,terminal_output +16783,13546007,"TERMINAL",0,0,"Step 2470, loss: 0.2251889407634735, step time: 17.673254013061523ms\r\n",,terminal_output +16784,13546026,"train_dynamics.py",2324,8,"outputs[",python,selection_mouse +16785,13546070,"train_dynamics.py",2324,9,"outputs[""",python,selection_mouse +16786,13546089,"train_dynamics.py",2324,21,"outputs[""video_tokens",python,selection_mouse +16787,13546127,"TERMINAL",0,0,"Step 2471, loss: 0.3894631862640381, step time: 17.92168617248535ms\r\nStep 2472, loss: 0.281046986579895, step time: 17.232894897460938ms\r\n",,terminal_output +16788,13546190,"TERMINAL",0,0,"Step 2473, loss: 0.246990367770195, step time: 17.713308334350586ms\r\n",,terminal_output +16789,13546228,"TERMINAL",0,0,"Step 2474, loss: 0.5086336731910706, step time: 17.912864685058594ms\r\n",,terminal_output +16790,13546263,"train_dynamics.py",2324,29,"outputs[""video_tokens""]\n )",python,selection_mouse +16791,13546316,"TERMINAL",0,0,"Step 2475, loss: 1.0877817869186401, step time: 17.566204071044922ms\r\n",,terminal_output +16792,13546378,"TERMINAL",0,0,"Step 2476, loss: 0.2343180924654007, step time: 17.728567123413086ms\r\n",,terminal_output +16793,13546440,"TERMINAL",0,0,"Step 2477, loss: 0.27752208709716797, step time: 17.822265625ms\r\n",,terminal_output +16794,13546504,"TERMINAL",0,0,"Step 2478, loss: 0.3385981023311615, step time: 17.71259307861328ms\r\n",,terminal_output +16795,13546569,"TERMINAL",0,0,"Step 2479, loss: 0.767912745475769, step time: 17.531633377075195ms\r\n",,terminal_output +16796,13546604,"train_dynamics.py",2353,0,"",python,selection_mouse +16797,13546617,"train_dynamics.py",2352,0,"",python,selection_command +16798,13546683,"TERMINAL",0,0,"Step 2480, loss: 0.40063241124153137, step time: 18.053531646728516ms\r\nStep 2481, loss: 0.19871315360069275, step time: 17.452001571655273ms\r\n",,terminal_output +16799,13546736,"TERMINAL",0,0,"Step 2482, loss: 0.28724995255470276, step time: 17.767667770385742ms\r\n",,terminal_output +16800,13546831,"TERMINAL",0,0,"Step 2483, loss: 0.6769108176231384, step time: 17.64702796936035ms\r\n",,terminal_output +16801,13546942,"TERMINAL",0,0,"Step 2484, loss: 0.3976140022277832, step time: 17.687559127807617ms\r\nStep 2485, loss: 0.33677080273628235, step time: 17.648935317993164ms\r\n",,terminal_output +16802,13547022,"TERMINAL",0,0,"Step 2486, loss: 0.18667630851268768, step time: 17.90452003479004ms\r\n",,terminal_output +16803,13547081,"TERMINAL",0,0,"Step 2487, loss: 0.24173013865947723, step time: 18.08643341064453ms\r\n",,terminal_output +16804,13547145,"TERMINAL",0,0,"Step 2488, loss: 0.36965394020080566, step time: 17.95172691345215ms\r\n",,terminal_output +16805,13547211,"TERMINAL",0,0,"Step 2489, loss: 0.32635432481765747, step time: 17.816781997680664ms\r\n",,terminal_output +16806,13547275,"TERMINAL",0,0,"Step 2490, loss: 0.3713848292827606, step time: 17.70639419555664ms\r\n",,terminal_output +16807,13547308,"train_dynamics.py",2353,0,"",python,selection_mouse +16808,13547320,"train_dynamics.py",2352,0,"",python,selection_command +16809,13547381,"TERMINAL",0,0,"Step 2491, loss: 0.20094868540763855, step time: 17.61484146118164ms\r\nStep 2492, loss: 0.2681460976600647, step time: 17.943859100341797ms\r\n",,terminal_output +16810,13547435,"TERMINAL",0,0,"Step 2493, loss: 0.8257467746734619, step time: 17.68183708190918ms\r\n",,terminal_output +16811,13547531,"TERMINAL",0,0,"Step 2494, loss: 0.2813432812690735, step time: 17.576217651367188ms\r\n",,terminal_output +16812,13547627,"TERMINAL",0,0,"Step 2495, loss: 0.3093223571777344, step time: 17.735719680786133ms\r\nStep 2496, loss: 0.16126947104930878, step time: 17.83919334411621ms\r\n",,terminal_output +16813,13547687,"TERMINAL",0,0,"Step 2497, loss: 0.21697089076042175, step time: 17.586469650268555ms\r\n",,terminal_output +16814,13547754,"TERMINAL",0,0,"Step 2498, loss: 0.8674336671829224, step time: 18.108129501342773ms\r\n",,terminal_output +16815,13547814,"TERMINAL",0,0,"Step 2499, loss: 0.5939723253250122, step time: 17.5931453704834ms\r\n",,terminal_output +16816,13548230,"train_dynamics.py",2242,0,"",python,selection_mouse +16817,13548873,"train_dynamics.py",2353,0,"",python,selection_mouse +16818,13548882,"train_dynamics.py",2352,0,"",python,selection_command +16819,13550493,"TERMINAL",0,0,"Step 2500, loss: 0.23694057762622833, step time: 35.80069541931152ms\r\n",,terminal_output +16820,13550599,"TERMINAL",0,0,"Step 2501, loss: 0.40301084518432617, step time: 25.50506591796875ms\r\n",,terminal_output +16821,13550662,"TERMINAL",0,0,"Step 2502, loss: 0.19108597934246063, step time: 20.244598388671875ms\r\n",,terminal_output +16822,13550769,"TERMINAL",0,0,"Step 2503, loss: 0.4383196532726288, step time: 19.32072639465332ms\r\nStep 2504, loss: 0.2770060896873474, step time: 18.471240997314453ms\r\n",,terminal_output +16823,13550833,"TERMINAL",0,0,"Step 2505, loss: 0.23104780912399292, step time: 18.880605697631836ms\r\n",,terminal_output +16824,13550950,"TERMINAL",0,0,"Step 2506, loss: 0.3772428631782532, step time: 20.842313766479492ms\r\nStep 2507, loss: 0.3113582730293274, step time: 18.485307693481445ms\r\n",,terminal_output +16825,13551012,"TERMINAL",0,0,"Step 2508, loss: 0.23650884628295898, step time: 18.269777297973633ms\r\n",,terminal_output +16826,13551140,"TERMINAL",0,0,"Step 2509, loss: 0.39187657833099365, step time: 18.21303367614746ms\r\nStep 2510, loss: 0.17541532218456268, step time: 17.79460906982422ms\r\n",,terminal_output +16827,13551240,"TERMINAL",0,0,"Step 2511, loss: 0.4957045018672943, step time: 18.670320510864258ms\r\n",,terminal_output +16828,13551302,"TERMINAL",0,0,"Step 2512, loss: 0.18166494369506836, step time: 17.94266700744629ms\r\n",,terminal_output +16829,13551320,"train_dynamics.py",2364,0,"",python,selection_mouse +16830,13551399,"TERMINAL",0,0,"Step 2513, loss: 1.1988310813903809, step time: 25.472164154052734ms\r\nStep 2514, loss: 0.2915380299091339, step time: 20.77460289001465ms\r\n",,terminal_output +16831,13551495,"TERMINAL",0,0,"Step 2515, loss: 0.9143955707550049, step time: 19.298315048217773ms\r\n",,terminal_output +16832,13551557,"TERMINAL",0,0,"Step 2516, loss: 0.555522084236145, step time: 18.373966217041016ms\r\n",,terminal_output +16833,13551630,"TERMINAL",0,0,"Step 2517, loss: 0.47548454999923706, step time: 18.653154373168945ms\r\n",,terminal_output +16834,13551689,"TERMINAL",0,0,"Step 2518, loss: 1.1115498542785645, step time: 17.986536026000977ms\r\n",,terminal_output +16835,13551749,"TERMINAL",0,0,"Step 2519, loss: 0.23110060393810272, step time: 18.18060874938965ms\r\n",,terminal_output +16836,13551818,"TERMINAL",0,0,"Step 2520, loss: 0.36200177669525146, step time: 18.21613311767578ms\r\n",,terminal_output +16837,13551875,"TERMINAL",0,0,"Step 2521, loss: 0.2321314662694931, step time: 18.17011833190918ms\r\n",,terminal_output +16838,13551915,"train_dynamics.py",2353,0,"",python,selection_mouse +16839,13551926,"train_dynamics.py",2352,0,"",python,selection_command +16840,13551986,"TERMINAL",0,0,"Step 2522, loss: 0.2209942787885666, step time: 17.856359481811523ms\r\nStep 2523, loss: 0.6311133503913879, step time: 18.594980239868164ms\r\n",,terminal_output +16841,13552039,"TERMINAL",0,0,"Step 2524, loss: 0.2538274824619293, step time: 17.746686935424805ms\r\n",,terminal_output +16842,13552135,"TERMINAL",0,0,"Step 2525, loss: 0.3357591927051544, step time: 18.103361129760742ms\r\n",,terminal_output +16843,13552188,"TERMINAL",0,0,"Step 2526, loss: 0.9961915016174316, step time: 18.295764923095703ms\r\n",,terminal_output +16844,13552295,"TERMINAL",0,0,"Step 2527, loss: 0.5713459253311157, step time: 18.0513858795166ms\r\nStep 2528, loss: 0.22438445687294006, step time: 17.728805541992188ms\r\n",,terminal_output +16845,13552357,"TERMINAL",0,0,"Step 2529, loss: 0.35434192419052124, step time: 18.34869384765625ms\r\n",,terminal_output +16846,13552418,"TERMINAL",0,0,"Step 2530, loss: 0.2888212203979492, step time: 18.40519905090332ms\r\n",,terminal_output +16847,13552482,"TERMINAL",0,0,"Step 2531, loss: 0.6511371731758118, step time: 17.601966857910156ms\r\n",,terminal_output +16848,13552542,"TERMINAL",0,0,"Step 2532, loss: 0.6327691078186035, step time: 18.136262893676758ms\r\n",,terminal_output +16849,13552603,"TERMINAL",0,0,"Step 2533, loss: 0.19887857139110565, step time: 17.57025718688965ms\r\n",,terminal_output +16850,13552664,"TERMINAL",0,0,"Step 2534, loss: 0.2430071085691452, step time: 17.94266700744629ms\r\n",,terminal_output +16851,13552727,"TERMINAL",0,0,"Step 2535, loss: 0.2695155441761017, step time: 18.18537712097168ms\r\n",,terminal_output +16852,13552789,"TERMINAL",0,0,"Step 2536, loss: 0.8017718195915222, step time: 17.788171768188477ms\r\n",,terminal_output +16853,13552853,"TERMINAL",0,0,"Step 2537, loss: 0.23384274542331696, step time: 17.513275146484375ms\r\n",,terminal_output +16854,13552924,"TERMINAL",0,0,"Step 2538, loss: 0.4006144404411316, step time: 18.22495460510254ms\r\n",,terminal_output +16855,13552973,"train_dynamics.py",2331,0,"",python,selection_mouse +16856,13552996,"TERMINAL",0,0,"Step 2539, loss: 0.2407323718070984, step time: 18.534421920776367ms\r\n",,terminal_output +16857,13553050,"TERMINAL",0,0,"Step 2540, loss: 0.2945456802845001, step time: 18.587827682495117ms\r\n",,terminal_output +16858,13553147,"train_dynamics.py",2324,7,"outputs",python,selection_mouse +16859,13553160,"TERMINAL",0,0,"Step 2541, loss: 0.15294049680233002, step time: 18.079519271850586ms\r\n",,terminal_output +16860,13553253,"TERMINAL",0,0,"Step 2542, loss: 0.21158786118030548, step time: 18.076419830322266ms\r\nStep 2543, loss: 0.2533012330532074, step time: 17.739057540893555ms\r\n",,terminal_output +16861,13553302,"TERMINAL",0,0,"Step 2544, loss: 0.19464199244976044, step time: 18.202543258666992ms\r\n",,terminal_output +16862,13553380,"TERMINAL",0,0,"Step 2545, loss: 0.28119248151779175, step time: 17.602205276489258ms\r\n",,terminal_output +16863,13553442,"TERMINAL",0,0,"Step 2546, loss: 0.10428482294082642, step time: 17.784595489501953ms\r\n",,terminal_output +16864,13553505,"TERMINAL",0,0,"Step 2547, loss: 0.44335928559303284, step time: 18.131494522094727ms\r\n",,terminal_output +16865,13553571,"TERMINAL",0,0,"Step 2548, loss: 0.2694486677646637, step time: 17.811059951782227ms\r\n",,terminal_output +16866,13553679,"TERMINAL",0,0,"Step 2549, loss: 0.34113115072250366, step time: 17.372608184814453ms\r\nStep 2550, loss: 0.16714102029800415, step time: 18.31221580505371ms\r\n",,terminal_output +16867,13553740,"TERMINAL",0,0,"Step 2551, loss: 0.18422532081604004, step time: 18.768787384033203ms\r\n",,terminal_output +16868,13553802,"TERMINAL",0,0,"Step 2552, loss: 0.33946487307548523, step time: 18.047809600830078ms\r\n",,terminal_output +16869,13553864,"TERMINAL",0,0,"Step 2553, loss: 0.2752811908721924, step time: 18.042564392089844ms\r\n",,terminal_output +16870,13553929,"TERMINAL",0,0,"Step 2554, loss: 0.48559248447418213, step time: 17.749786376953125ms\r\n",,terminal_output +16871,13554070,"TERMINAL",0,0,"Step 2555, loss: 0.2568955719470978, step time: 17.51708984375ms\r\nStep 2556, loss: 1.4839645624160767, step time: 18.23735237121582ms\r\n",,terminal_output +16872,13554179,"TERMINAL",0,0,"Step 2557, loss: 0.40146204829216003, step time: 17.441272735595703ms\r\nStep 2558, loss: 0.32579305768013, step time: 17.91524887084961ms\r\n",,terminal_output +16873,13554271,"TERMINAL",0,0,"Step 2559, loss: 0.36907958984375, step time: 17.94719696044922ms\r\n",,terminal_output +16874,13554333,"TERMINAL",0,0,"Step 2560, loss: 0.7128666043281555, step time: 17.699241638183594ms\r\n",,terminal_output +16875,13554393,"TERMINAL",0,0,"Step 2561, loss: 0.18764039874076843, step time: 17.57025718688965ms\r\n",,terminal_output +16876,13554458,"TERMINAL",0,0,"Step 2562, loss: 0.2519644796848297, step time: 18.110990524291992ms\r\n",,terminal_output +16877,13554517,"TERMINAL",0,0,"Step 2563, loss: 0.3545781970024109, step time: 17.69113540649414ms\r\n",,terminal_output +16878,13554569,"TERMINAL",0,0,"Step 2564, loss: 0.1849375218153, step time: 17.902135848999023ms\r\n",,terminal_output +16879,13554663,"TERMINAL",0,0,"Step 2565, loss: 0.2811315059661865, step time: 18.177270889282227ms\r\n",,terminal_output +16880,13554748,"TERMINAL",0,0,"Step 2566, loss: 0.21877340972423553, step time: 17.660140991210938ms\r\nStep 2567, loss: 0.4062005579471588, step time: 17.573833465576172ms\r\n",,terminal_output +16881,13554847,"TERMINAL",0,0,"Step 2568, loss: 0.2032184898853302, step time: 18.35489273071289ms\r\n",,terminal_output +16882,13554904,"TERMINAL",0,0,"Step 2569, loss: 1.4814032316207886, step time: 17.54140853881836ms\r\n",,terminal_output +16883,13554963,"TERMINAL",0,0,"Step 2570, loss: 0.7747229933738708, step time: 17.756223678588867ms\r\n",,terminal_output +16884,13555025,"TERMINAL",0,0,"Step 2571, loss: 0.5724214911460876, step time: 18.31960678100586ms\r\n",,terminal_output +16885,13555083,"TERMINAL",0,0,"Step 2572, loss: 0.5582448244094849, step time: 17.72308349609375ms\r\n",,terminal_output +16886,13555192,"TERMINAL",0,0,"Step 2573, loss: 0.343054860830307, step time: 19.625186920166016ms\r\nStep 2574, loss: 0.3042224645614624, step time: 18.52250099182129ms\r\n",,terminal_output +16887,13555255,"TERMINAL",0,0,"Step 2575, loss: 0.1825735718011856, step time: 17.646312713623047ms\r\n",,terminal_output +16888,13555381,"TERMINAL",0,0,"Step 2576, loss: 0.28612983226776123, step time: 17.664194107055664ms\r\nStep 2577, loss: 0.30394119024276733, step time: 18.06330680847168ms\r\n",,terminal_output +16889,13555443,"TERMINAL",0,0,"Step 2578, loss: 0.24177882075309753, step time: 18.15629005432129ms\r\n",,terminal_output +16890,13555507,"TERMINAL",0,0,"Step 2579, loss: 0.1848173439502716, step time: 17.339229583740234ms\r\n",,terminal_output +16891,13555605,"TERMINAL",0,0,"Step 2580, loss: 0.23285725712776184, step time: 18.329858779907227ms\r\n",,terminal_output +16892,13555656,"TERMINAL",0,0,"Step 2581, loss: 0.20600569248199463, step time: 17.55523681640625ms\r\n",,terminal_output +16893,13555751,"TERMINAL",0,0,"Step 2582, loss: 0.18133485317230225, step time: 17.951250076293945ms\r\n",,terminal_output +16894,13556053,"TERMINAL",0,0,"Step 2583, loss: 0.674052894115448, step time: 352.0512580871582ms\r\n",,terminal_output +16895,13556128,"TERMINAL",0,0,"Step 2584, loss: 0.18752315640449524, step time: 28.48339080810547ms\r\n",,terminal_output +16896,13556228,"TERMINAL",0,0,"Step 2585, loss: 0.2525848150253296, step time: 20.823240280151367ms\r\n",,terminal_output +16897,13556288,"TERMINAL",0,0,"Step 2586, loss: 0.26777440309524536, step time: 19.35434341430664ms\r\n",,terminal_output +16898,13556394,"TERMINAL",0,0,"Step 2587, loss: 0.20550543069839478, step time: 18.40996742248535ms\r\nStep 2588, loss: 0.6074584722518921, step time: 18.39733123779297ms\r\n",,terminal_output +16899,13556455,"TERMINAL",0,0,"Step 2589, loss: 0.1695084422826767, step time: 18.942832946777344ms\r\n",,terminal_output +16900,13556568,"TERMINAL",0,0,"Step 2590, loss: 0.15662984549999237, step time: 18.01156997680664ms\r\nStep 2591, loss: 0.23719733953475952, step time: 18.05734634399414ms\r\n",,terminal_output +16901,13556661,"TERMINAL",0,0,"Step 2592, loss: 0.29575589299201965, step time: 18.552780151367188ms\r\n",,terminal_output +16902,13556713,"TERMINAL",0,0,"Step 2593, loss: 0.44760605692863464, step time: 18.445253372192383ms\r\n",,terminal_output +16903,13556805,"TERMINAL",0,0,"Step 2594, loss: 0.17686951160430908, step time: 18.063068389892578ms\r\n",,terminal_output +16904,13556857,"TERMINAL",0,0,"Step 2595, loss: 0.17139698565006256, step time: 18.62311363220215ms\r\n",,terminal_output +16905,13556908,"TERMINAL",0,0,"Step 2596, loss: 0.21437455713748932, step time: 18.08333396911621ms\r\n",,terminal_output +16906,13557014,"TERMINAL",0,0,"Step 2597, loss: 0.29278114438056946, step time: 18.268823623657227ms\r\nStep 2598, loss: 0.2721705138683319, step time: 18.498897552490234ms\r\n",,terminal_output +16907,13557137,"TERMINAL",0,0,"Step 2599, loss: 0.8974171876907349, step time: 44.338226318359375ms\r\nStep 2600, loss: 0.23054291307926178, step time: 19.32668685913086ms\r\n",,terminal_output +16908,13557203,"TERMINAL",0,0,"Step 2601, loss: 0.3059785068035126, step time: 26.286840438842773ms\r\n",,terminal_output +16909,13557271,"TERMINAL",0,0,"Step 2602, loss: 0.16802261769771576, step time: 20.28179168701172ms\r\n",,terminal_output +16910,13557333,"TERMINAL",0,0,"Step 2603, loss: 0.6454927325248718, step time: 17.966508865356445ms\r\n",,terminal_output +16911,13557349,"train_dynamics.py",2353,0,"",python,selection_mouse +16912,13557349,"train_dynamics.py",2352,0,"",python,selection_command +16913,13557406,"TERMINAL",0,0,"Step 2604, loss: 0.26318973302841187, step time: 18.532991409301758ms\r\n",,terminal_output +16914,13557472,"TERMINAL",0,0,"Step 2605, loss: 0.2577773630619049, step time: 18.232107162475586ms\r\n",,terminal_output +16915,13557532,"TERMINAL",0,0,"Step 2606, loss: 0.3453039526939392, step time: 18.097639083862305ms\r\n",,terminal_output +16916,13557607,"TERMINAL",0,0,"Step 2607, loss: 0.4826253652572632, step time: 18.476486206054688ms\r\n",,terminal_output +16917,13557659,"TERMINAL",0,0,"Step 2608, loss: 0.33398497104644775, step time: 17.854928970336914ms\r\n",,terminal_output +16918,13557725,"TERMINAL",0,0,"Step 2609, loss: 0.15593813359737396, step time: 18.441438674926758ms\r\n",,terminal_output +16919,13557786,"TERMINAL",0,0,"Step 2610, loss: 0.5252047777175903, step time: 18.471717834472656ms\r\n",,terminal_output +16920,13557846,"TERMINAL",0,0,"Step 2611, loss: 0.582653284072876, step time: 17.833232879638672ms\r\n",,terminal_output +16921,13557909,"TERMINAL",0,0,"Step 2612, loss: 0.34134626388549805, step time: 17.89402961730957ms\r\n",,terminal_output +16922,13557971,"TERMINAL",0,0,"Step 2613, loss: 0.16476893424987793, step time: 18.987655639648438ms\r\n",,terminal_output +16923,13558040,"train_dynamics.py",2327,0,"",python,selection_mouse +16924,13558051,"TERMINAL",0,0,"Step 2614, loss: 0.2939769923686981, step time: 17.92430877685547ms\r\n",,terminal_output +16925,13558104,"TERMINAL",0,0,"Step 2615, loss: 0.23480260372161865, step time: 17.844438552856445ms\r\n",,terminal_output +16926,13558157,"TERMINAL",0,0,"Step 2616, loss: 0.155042365193367, step time: 18.424034118652344ms\r\n",,terminal_output +16927,13558221,"train_dynamics.py",2324,7,"outputs",python,selection_mouse +16928,13558296,"TERMINAL",0,0,"Step 2617, loss: 0.17882020771503448, step time: 18.345117568969727ms\r\nStep 2618, loss: 1.2384693622589111, step time: 17.957210540771484ms\r\n",,terminal_output +16929,13558444,"TERMINAL",0,0,"Step 2619, loss: 0.5005735158920288, step time: 18.296241760253906ms\r\nStep 2620, loss: 0.23195335268974304, step time: 18.071651458740234ms\r\n",,terminal_output +16930,13558547,"TERMINAL",0,0,"Step 2621, loss: 0.3308248817920685, step time: 18.373489379882812ms\r\nStep 2622, loss: 0.4282743036746979, step time: 18.387556076049805ms\r\n",,terminal_output +16931,13558653,"TERMINAL",0,0,"Step 2623, loss: 0.22139941155910492, step time: 17.93813705444336ms\r\n",,terminal_output +16932,13558667,"TERMINAL",0,0,"Step 2624, loss: 0.17555977404117584, step time: 17.772197723388672ms\r\n",,terminal_output +16933,13558766,"TERMINAL",0,0,"Step 2625, loss: 0.37548720836639404, step time: 18.465042114257812ms\r\n",,terminal_output +16934,13558831,"TERMINAL",0,0,"Step 2626, loss: 0.4392528831958771, step time: 17.939329147338867ms\r\n",,terminal_output +16935,13558892,"TERMINAL",0,0,"Step 2627, loss: 0.16786400973796844, step time: 23.285627365112305ms\r\n",,terminal_output +16936,13558957,"TERMINAL",0,0,"Step 2628, loss: 0.15586282312870026, step time: 18.225908279418945ms\r\n",,terminal_output +16937,13559064,"TERMINAL",0,0,"Step 2629, loss: 0.2535324990749359, step time: 18.1577205657959ms\r\nStep 2630, loss: 0.20156195759773254, step time: 17.718791961669922ms\r\n",,terminal_output +16938,13559126,"TERMINAL",0,0,"Step 2631, loss: 0.39983901381492615, step time: 18.40949058532715ms\r\n",,terminal_output +16939,13559192,"TERMINAL",0,0,"Step 2632, loss: 0.20725876092910767, step time: 17.818212509155273ms\r\n",,terminal_output +16940,13559256,"TERMINAL",0,0,"Step 2633, loss: 0.14757901430130005, step time: 18.137693405151367ms\r\n",,terminal_output +16941,13559318,"TERMINAL",0,0,"Step 2634, loss: 0.22646737098693848, step time: 18.455982208251953ms\r\n",,terminal_output +16942,13559380,"TERMINAL",0,0,"Step 2635, loss: 0.3126131296157837, step time: 17.81916618347168ms\r\n",,terminal_output +16943,13559431,"TERMINAL",0,0,"Step 2636, loss: 0.34508100152015686, step time: 17.882108688354492ms\r\n",,terminal_output +16944,13559524,"TERMINAL",0,0,"Step 2637, loss: 0.6682013273239136, step time: 18.42784881591797ms\r\n",,terminal_output +16945,13559622,"TERMINAL",0,0,"Step 2638, loss: 0.5020005106925964, step time: 17.91858673095703ms\r\nStep 2639, loss: 0.17577679455280304, step time: 17.990827560424805ms\r\n",,terminal_output +16946,13559683,"TERMINAL",0,0,"Step 2640, loss: 0.15713410079479218, step time: 18.247365951538086ms\r\n",,terminal_output +16947,13559743,"TERMINAL",0,0,"Step 2641, loss: 1.1651676893234253, step time: 17.994165420532227ms\r\n",,terminal_output +16948,13559871,"TERMINAL",0,0,"Step 2642, loss: 0.29048722982406616, step time: 18.245935440063477ms\r\nStep 2643, loss: 0.12499335408210754, step time: 18.331527709960938ms\r\n",,terminal_output +16949,13559968,"TERMINAL",0,0,"Step 2644, loss: 0.28260526061058044, step time: 17.71402359008789ms\r\n",,terminal_output +16950,13560035,"TERMINAL",0,0,"Step 2645, loss: 0.2166566550731659, step time: 18.049955368041992ms\r\n",,terminal_output +16951,13560105,"TERMINAL",0,0,"Step 2646, loss: 0.20939753949642181, step time: 18.22662353515625ms\r\n",,terminal_output +16952,13560158,"TERMINAL",0,0,"Step 2647, loss: 0.9039793014526367, step time: 17.460346221923828ms\r\n",,terminal_output +16953,13560221,"TERMINAL",0,0,"Step 2648, loss: 0.16000531613826752, step time: 17.859697341918945ms\r\n",,terminal_output +16954,13560279,"TERMINAL",0,0,"Step 2649, loss: 0.20723453164100647, step time: 18.42808723449707ms\r\n",,terminal_output +16955,13560343,"TERMINAL",0,0,"Step 2650, loss: 1.1812547445297241, step time: 18.24021339416504ms\r\n",,terminal_output +16956,13560404,"TERMINAL",0,0,"Step 2651, loss: 0.18036164343357086, step time: 18.407344818115234ms\r\n",,terminal_output +16957,13560465,"TERMINAL",0,0,"Step 2652, loss: 0.16894541680812836, step time: 18.635988235473633ms\r\n",,terminal_output +16958,13560526,"TERMINAL",0,0,"Step 2653, loss: 0.17502716183662415, step time: 18.00704002380371ms\r\n",,terminal_output +16959,13560589,"TERMINAL",0,0,"Step 2654, loss: 0.3884369730949402, step time: 17.775774002075195ms\r\n",,terminal_output +16960,13560649,"TERMINAL",0,0,"Step 2655, loss: 0.4772644639015198, step time: 17.978906631469727ms\r\n",,terminal_output +16961,13560763,"TERMINAL",0,0,"Step 2656, loss: 0.5941383242607117, step time: 17.791271209716797ms\r\nStep 2657, loss: 0.18465761840343475, step time: 17.966270446777344ms\r\n",,terminal_output +16962,13560830,"TERMINAL",0,0,"Step 2658, loss: 0.7992832660675049, step time: 18.9821720123291ms\r\n",,terminal_output +16963,13560890,"TERMINAL",0,0,"Step 2659, loss: 0.16853882372379303, step time: 17.55690574645996ms\r\n",,terminal_output +16964,13560995,"TERMINAL",0,0,"Step 2660, loss: 0.16238969564437866, step time: 17.969608306884766ms\r\n",,terminal_output +16965,13561056,"TERMINAL",0,0,"Step 2661, loss: 0.4903051555156708, step time: 18.349170684814453ms\r\n",,terminal_output +16966,13561116,"TERMINAL",0,0,"Step 2662, loss: 0.2257869988679886, step time: 17.766237258911133ms\r\n",,terminal_output +16967,13561176,"TERMINAL",0,0,"Step 2663, loss: 0.40308141708374023, step time: 17.659664154052734ms\r\n",,terminal_output +16968,13561284,"TERMINAL",0,0,"Step 2664, loss: 0.14523695409297943, step time: 18.177270889282227ms\r\nStep 2665, loss: 0.26238155364990234, step time: 18.091440200805664ms\r\n",,terminal_output +16969,13561347,"TERMINAL",0,0,"Step 2666, loss: 0.20048002898693085, step time: 17.750978469848633ms\r\n",,terminal_output +16970,13561410,"TERMINAL",0,0,"Step 2667, loss: 0.2921615242958069, step time: 17.963647842407227ms\r\n",,terminal_output +16971,13561528,"TERMINAL",0,0,"Step 2668, loss: 0.33567824959754944, step time: 17.708301544189453ms\r\nStep 2669, loss: 0.25125133991241455, step time: 18.0819034576416ms\r\n",,terminal_output +16972,13561632,"TERMINAL",0,0,"Step 2670, loss: 0.468095988035202, step time: 18.364667892456055ms\r\n",,terminal_output +16973,13561693,"TERMINAL",0,0,"Step 2671, loss: 0.25800615549087524, step time: 17.655611038208008ms\r\n",,terminal_output +16974,13561753,"TERMINAL",0,0,"Step 2672, loss: 0.1621190309524536, step time: 17.696380615234375ms\r\n",,terminal_output +16975,13561824,"TERMINAL",0,0,"Step 2673, loss: 1.6828902959823608, step time: 18.46003532409668ms\r\n",,terminal_output +16976,13561909,"TERMINAL",0,0,"Step 2674, loss: 0.24716641008853912, step time: 17.78554916381836ms\r\n",,terminal_output +16977,13561931,"TERMINAL",0,0,"Step 2675, loss: 0.16784298419952393, step time: 17.652273178100586ms\r\n",,terminal_output +16978,13561994,"TERMINAL",0,0,"Step 2676, loss: 0.7379910945892334, step time: 18.47386360168457ms\r\n",,terminal_output +16979,13562053,"TERMINAL",0,0,"Step 2677, loss: 0.22492900490760803, step time: 17.896413803100586ms\r\n",,terminal_output +16980,13562115,"TERMINAL",0,0,"Step 2678, loss: 0.287945955991745, step time: 17.885684967041016ms\r\n",,terminal_output +16981,13562175,"TERMINAL",0,0,"Step 2679, loss: 0.13375221192836761, step time: 18.260955810546875ms\r\n",,terminal_output +16982,13562247,"TERMINAL",0,0,"Step 2680, loss: 0.27841508388519287, step time: 17.97628402709961ms\r\n",,terminal_output +16983,13562316,"TERMINAL",0,0,"Step 2681, loss: 0.14295557141304016, step time: 18.713712692260742ms\r\n",,terminal_output +16984,13562368,"TERMINAL",0,0,"Step 2682, loss: 0.2634340524673462, step time: 18.416404724121094ms\r\n",,terminal_output +16985,13562461,"TERMINAL",0,0,"Step 2683, loss: 0.20214100182056427, step time: 17.416715621948242ms\r\n",,terminal_output +16986,13562522,"TERMINAL",0,0,"Step 2684, loss: 0.18273299932479858, step time: 17.74764060974121ms\r\n",,terminal_output +16987,13562583,"TERMINAL",0,0,"Step 2685, loss: 0.892434298992157, step time: 18.07570457458496ms\r\n",,terminal_output +16988,13562638,"train_dynamics.py",2083,0,"",python,selection_mouse +16989,13562648,"TERMINAL",0,0,"Step 2686, loss: 0.15378102660179138, step time: 17.691612243652344ms\r\n",,terminal_output +16990,13562699,"TERMINAL",0,0,"Step 2687, loss: 0.15151512622833252, step time: 17.39192008972168ms\r\n",,terminal_output +16991,13562762,"TERMINAL",0,0,"Step 2688, loss: 0.14238972961902618, step time: 18.100976943969727ms\r\n",,terminal_output +16992,13562819,"TERMINAL",0,0,"Step 2689, loss: 0.14696411788463593, step time: 17.574310302734375ms\r\n",,terminal_output +16993,13562879,"TERMINAL",0,0,"Step 2690, loss: 0.32095977663993835, step time: 17.754316329956055ms\r\n",,terminal_output +16994,13562939,"TERMINAL",0,0,"Step 2691, loss: 0.24188393354415894, step time: 18.100738525390625ms\r\n",,terminal_output +16995,13563000,"TERMINAL",0,0,"Step 2692, loss: 0.31341004371643066, step time: 18.18060874938965ms\r\n",,terminal_output +16996,13563041,"train_dynamics.py",2092,0,"",python,selection_mouse +16997,13563083,"TERMINAL",0,0,"Step 2693, loss: 0.409140020608902, step time: 18.10479164123535ms\r\n",,terminal_output +16998,13563149,"TERMINAL",0,0,"Step 2694, loss: 0.6174647808074951, step time: 18.403053283691406ms\r\n",,terminal_output +16999,13563188,"TERMINAL",0,0,"Step 2695, loss: 0.21544089913368225, step time: 17.655134201049805ms\r\n",,terminal_output +17000,13563281,"TERMINAL",0,0,"Step 2696, loss: 0.2638944983482361, step time: 17.961740493774414ms\r\n",,terminal_output +17001,13563386,"TERMINAL",0,0,"Step 2697, loss: 0.14433066546916962, step time: 18.199682235717773ms\r\nStep 2698, loss: 0.2628761827945709, step time: 17.9593563079834ms\r\n",,terminal_output +17002,13563449,"TERMINAL",0,0,"Step 2699, loss: 0.9488570690155029, step time: 17.669200897216797ms\r\n",,terminal_output +17003,13563510,"TERMINAL",0,0,"Step 2700, loss: 0.17012514173984528, step time: 18.40686798095703ms\r\n",,terminal_output +17004,13563571,"TERMINAL",0,0,"Step 2701, loss: 0.35085830092430115, step time: 17.653942108154297ms\r\n",,terminal_output +17005,13563634,"TERMINAL",0,0,"Step 2702, loss: 0.21179476380348206, step time: 17.701387405395508ms\r\n",,terminal_output +17006,13563701,"TERMINAL",0,0,"Step 2703, loss: 0.12332545220851898, step time: 18.08333396911621ms\r\n",,terminal_output +17007,13563831,"train_dynamics.py",2043,0,"",python,selection_mouse +17008,13564003,"TERMINAL",0,0,"Step 2704, loss: 0.13199853897094727, step time: 302.140474319458ms\r\n",,terminal_output +17009,13564020,"train_dynamics.py",2040,7,"outputs",python,selection_mouse +17010,13564073,"TERMINAL",0,0,"Step 2705, loss: 0.4148947596549988, step time: 25.16341209411621ms\r\n",,terminal_output +17011,13564176,"TERMINAL",0,0,"Step 2706, loss: 0.31369420886039734, step time: 20.04408836364746ms\r\n",,terminal_output +17012,13564330,"TERMINAL",0,0,"Step 2707, loss: 0.2745029628276825, step time: 18.705368041992188ms\r\nStep 2708, loss: 0.5578873157501221, step time: 18.343210220336914ms\r\nStep 2709, loss: 0.14510567486286163, step time: 17.81630516052246ms\r\n",,terminal_output +17013,13564427,"TERMINAL",0,0,"Step 2710, loss: 0.14659716188907623, step time: 18.302440643310547ms\r\n",,terminal_output +17014,13564534,"TERMINAL",0,0,"Step 2711, loss: 0.19192622601985931, step time: 17.57216453552246ms\r\nStep 2712, loss: 0.21472394466400146, step time: 17.850399017333984ms\r\n",,terminal_output +17015,13564597,"TERMINAL",0,0,"Step 2713, loss: 0.1738164722919464, step time: 18.093109130859375ms\r\n",,terminal_output +17016,13564661,"TERMINAL",0,0,"Step 2714, loss: 0.15187755227088928, step time: 18.57781410217285ms\r\n",,terminal_output +17017,13564724,"TERMINAL",0,0,"Step 2715, loss: 0.12462299317121506, step time: 17.5325870513916ms\r\n",,terminal_output +17018,13564811,"TERMINAL",0,0,"Step 2716, loss: 0.20750918984413147, step time: 18.119096755981445ms\r\n",,terminal_output +17019,13564916,"TERMINAL",0,0,"Step 2717, loss: 0.13276733458042145, step time: 17.74883270263672ms\r\nStep 2718, loss: 0.20480526983737946, step time: 17.76742935180664ms\r\n",,terminal_output +17020,13565035,"TERMINAL",0,0,"Step 2719, loss: 1.3045417070388794, step time: 18.105745315551758ms\r\nStep 2720, loss: 0.41128188371658325, step time: 17.779111862182617ms\r\n",,terminal_output +17021,13565161,"TERMINAL",0,0,"Step 2721, loss: 0.18111132085323334, step time: 17.882347106933594ms\r\nStep 2722, loss: 0.24079272150993347, step time: 18.318891525268555ms\r\n",,terminal_output +17022,13565224,"TERMINAL",0,0,"Step 2723, loss: 0.4342818558216095, step time: 17.772436141967773ms\r\n",,terminal_output +17023,13565354,"TERMINAL",0,0,"Step 2724, loss: 0.21307845413684845, step time: 18.062829971313477ms\r\nStep 2725, loss: 0.3033878207206726, step time: 17.83275604248047ms\r\n",,terminal_output +17024,13565459,"TERMINAL",0,0,"Step 2726, loss: 0.15065665543079376, step time: 17.745494842529297ms\r\n",,terminal_output +17025,13565520,"TERMINAL",0,0,"Step 2727, loss: 0.6188046932220459, step time: 17.79460906982422ms\r\n",,terminal_output +17026,13565580,"TERMINAL",0,0,"Step 2728, loss: 0.15362948179244995, step time: 17.964839935302734ms\r\n",,terminal_output +17027,13565631,"TERMINAL",0,0,"Step 2729, loss: 0.2779793441295624, step time: 17.42410659790039ms\r\n",,terminal_output +17028,13565725,"TERMINAL",0,0,"Step 2730, loss: 0.3098108470439911, step time: 17.807960510253906ms\r\n",,terminal_output +17029,13565776,"TERMINAL",0,0,"Step 2731, loss: 0.1130596324801445, step time: 17.69399642944336ms\r\n",,terminal_output +17030,13565828,"TERMINAL",0,0,"Step 2732, loss: 0.26953455805778503, step time: 17.59624481201172ms\r\n",,terminal_output +17031,13565933,"TERMINAL",0,0,"Step 2733, loss: 0.5235747694969177, step time: 17.395734786987305ms\r\nStep 2734, loss: 0.14278095960617065, step time: 18.08333396911621ms\r\n",,terminal_output +17032,13566025,"TERMINAL",0,0,"Step 2735, loss: 0.7346162796020508, step time: 17.623186111450195ms\r\n",,terminal_output +17033,13566076,"TERMINAL",0,0,"Step 2736, loss: 0.20954230427742004, step time: 17.796993255615234ms\r\n",,terminal_output +17034,13566183,"TERMINAL",0,0,"Step 2737, loss: 0.12524132430553436, step time: 18.07689666748047ms\r\nStep 2738, loss: 0.3654278814792633, step time: 17.815828323364258ms\r\n",,terminal_output +17035,13566281,"TERMINAL",0,0,"Step 2739, loss: 0.22142331302165985, step time: 17.565250396728516ms\r\n",,terminal_output +17036,13566332,"TERMINAL",0,0,"Step 2740, loss: 0.17293913662433624, step time: 17.989635467529297ms\r\n",,terminal_output +17037,13566358,"train_dynamics.py",2043,0,"",python,selection_mouse +17038,13566445,"TERMINAL",0,0,"Step 2741, loss: 0.15587474405765533, step time: 17.57669448852539ms\r\nStep 2742, loss: 0.33090075850486755, step time: 17.772436141967773ms\r\n",,terminal_output +17039,13566507,"TERMINAL",0,0,"Step 2743, loss: 0.10985183715820312, step time: 17.77791976928711ms\r\n",,terminal_output +17040,13566586,"TERMINAL",0,0,"Step 2744, loss: 0.8400098085403442, step time: 17.911911010742188ms\r\n",,terminal_output +17041,13566652,"TERMINAL",0,0,"Step 2745, loss: 0.3977057933807373, step time: 17.346858978271484ms\r\n",,terminal_output +17042,13566758,"TERMINAL",0,0,"Step 2746, loss: 0.20423327386379242, step time: 17.993688583374023ms\r\nStep 2747, loss: 0.1929541379213333, step time: 17.46225357055664ms\r\n",,terminal_output +17043,13566851,"TERMINAL",0,0,"Step 2748, loss: 1.3262875080108643, step time: 17.75383949279785ms\r\n",,terminal_output +17044,13566902,"TERMINAL",0,0,"Step 2749, loss: 0.12728512287139893, step time: 17.911672592163086ms\r\n",,terminal_output +17045,13566954,"TERMINAL",0,0,"Step 2750, loss: 0.24809812009334564, step time: 17.8530216217041ms\r\n",,terminal_output +17046,13567048,"TERMINAL",0,0,"Step 2751, loss: 0.2512338161468506, step time: 17.5778865814209ms\r\n",,terminal_output +17047,13567099,"TERMINAL",0,0,"Step 2752, loss: 0.18500776588916779, step time: 18.15032958984375ms\r\n",,terminal_output +17048,13567150,"TERMINAL",0,0,"Step 2753, loss: 0.11232779175043106, step time: 17.580747604370117ms\r\n",,terminal_output +17049,13567296,"TERMINAL",0,0,"Step 2754, loss: 0.16095034778118134, step time: 17.711162567138672ms\r\nStep 2755, loss: 0.1477474719285965, step time: 17.829418182373047ms\r\n",,terminal_output +17050,13567374,"TERMINAL",0,0,"Step 2756, loss: 0.26282817125320435, step time: 17.821073532104492ms\r\n",,terminal_output +17051,13567400,"TERMINAL",0,0,"Step 2757, loss: 0.1578829139471054, step time: 17.61913299560547ms\r\n",,terminal_output +17052,13567549,"TERMINAL",0,0,"Step 2758, loss: 0.2875335216522217, step time: 18.14746856689453ms\r\nStep 2759, loss: 0.11275217682123184, step time: 17.533302307128906ms\r\n",,terminal_output +17053,13567600,"TERMINAL",0,0,"Step 2760, loss: 0.22723707556724548, step time: 17.65131950378418ms\r\n",,terminal_output +17054,13567744,"TERMINAL",0,0,"Step 2761, loss: 0.32074037194252014, step time: 20.15376091003418ms\r\nStep 2762, loss: 0.18398621678352356, step time: 18.016576766967773ms\r\n",,terminal_output +17055,13567795,"TERMINAL",0,0,"Step 2763, loss: 0.1802438348531723, step time: 17.61651039123535ms\r\n",,terminal_output +17056,13567899,"TERMINAL",0,0,"Step 2764, loss: 0.2759605050086975, step time: 18.049240112304688ms\r\nStep 2765, loss: 0.15300820767879486, step time: 17.824172973632812ms\r\n",,terminal_output +17057,13567961,"TERMINAL",0,0,"Step 2766, loss: 0.4913426637649536, step time: 17.149686813354492ms\r\n",,terminal_output +17058,13568023,"TERMINAL",0,0,"Step 2767, loss: 0.17223425209522247, step time: 18.127918243408203ms\r\n",,terminal_output +17059,13568101,"TERMINAL",0,0,"Step 2768, loss: 0.22993507981300354, step time: 17.052650451660156ms\r\n",,terminal_output +17060,13568154,"TERMINAL",0,0,"Step 2769, loss: 0.6932970881462097, step time: 17.19522476196289ms\r\n",,terminal_output +17061,13568252,"TERMINAL",0,0,"Step 2770, loss: 0.2711198925971985, step time: 17.219066619873047ms\r\n",,terminal_output +17062,13568352,"TERMINAL",0,0,"Step 2771, loss: 0.2991481423377991, step time: 17.293930053710938ms\r\nStep 2772, loss: 0.10220512747764587, step time: 16.933441162109375ms\r\n",,terminal_output +17063,13568463,"TERMINAL",0,0,"Step 2773, loss: 0.41333481669425964, step time: 17.328739166259766ms\r\nStep 2774, loss: 0.1167769804596901, step time: 16.954421997070312ms\r\n",,terminal_output +17064,13568559,"TERMINAL",0,0,"Step 2775, loss: 0.21569004654884338, step time: 17.06218719482422ms\r\n",,terminal_output +17065,13568653,"TERMINAL",0,0,"Step 2776, loss: 0.11668552458286285, step time: 17.23337173461914ms\r\n",,terminal_output +17066,13568661,"TERMINAL",0,0,"Step 2777, loss: 0.1405770629644394, step time: 17.194271087646484ms\r\n",,terminal_output +17067,13568761,"TERMINAL",0,0,"Step 2778, loss: 0.3722473978996277, step time: 16.94631576538086ms\r\n",,terminal_output +17068,13568821,"TERMINAL",0,0,"Step 2779, loss: 0.4560248851776123, step time: 17.354726791381836ms\r\n",,terminal_output +17069,13568881,"TERMINAL",0,0,"Step 2780, loss: 0.14466910064220428, step time: 16.861438751220703ms\r\n",,terminal_output +17070,13568945,"TERMINAL",0,0,"Step 2781, loss: 0.2325666844844818, step time: 17.17209815979004ms\r\n",,terminal_output +17071,13569004,"TERMINAL",0,0,"Step 2782, loss: 0.39239710569381714, step time: 17.238378524780273ms\r\n",,terminal_output +17072,13569064,"TERMINAL",0,0,"Step 2783, loss: 0.11490754783153534, step time: 17.01807975769043ms\r\n",,terminal_output +17073,13569172,"TERMINAL",0,0,"Step 2784, loss: 0.2678728699684143, step time: 16.91150665283203ms\r\nStep 2785, loss: 0.14692802727222443, step time: 17.412900924682617ms\r\n",,terminal_output +17074,13569265,"TERMINAL",0,0,"Step 2786, loss: 0.21497921645641327, step time: 16.92986488342285ms\r\n",,terminal_output +17075,13569371,"TERMINAL",0,0,"Step 2787, loss: 0.1442042738199234, step time: 20.660877227783203ms\r\nStep 2788, loss: 0.23779429495334625, step time: 17.147302627563477ms\r\n",,terminal_output +17076,13569431,"TERMINAL",0,0,"Step 2789, loss: 0.1251075565814972, step time: 17.02570915222168ms\r\n",,terminal_output +17077,13569493,"TERMINAL",0,0,"Step 2790, loss: 0.1686444878578186, step time: 16.961336135864258ms\r\n",,terminal_output +17078,13569555,"TERMINAL",0,0,"Step 2791, loss: 0.1844804435968399, step time: 17.50349998474121ms\r\n",,terminal_output +17079,13569618,"TERMINAL",0,0,"Step 2792, loss: 0.07046036422252655, step time: 16.868114471435547ms\r\n",,terminal_output +17080,13569681,"TERMINAL",0,0,"Step 2793, loss: 0.1341717690229416, step time: 17.415523529052734ms\r\n",,terminal_output +17081,13569761,"TERMINAL",0,0,"Step 2794, loss: 0.15191520750522614, step time: 17.169952392578125ms\r\n",,terminal_output +17082,13569864,"TERMINAL",0,0,"Step 2795, loss: 0.1800323724746704, step time: 17.05789566040039ms\r\nStep 2796, loss: 0.1776786893606186, step time: 16.94774627685547ms\r\n",,terminal_output +17083,13570008,"TERMINAL",0,0,"Step 2797, loss: 0.3334167003631592, step time: 17.548084259033203ms\r\nStep 2798, loss: 0.287069171667099, step time: 16.912221908569336ms\r\n",,terminal_output +17084,13570123,"train_dynamics.py",2056,0,"",python,selection_mouse +17085,13570125,"TERMINAL",0,0,"Step 2799, loss: 0.09871064871549606, step time: 17.1353816986084ms\r\nStep 2800, loss: 0.8565123677253723, step time: 17.27437973022461ms\r\n",,terminal_output +17086,13570177,"TERMINAL",0,0,"Step 2801, loss: 0.396762877702713, step time: 17.119884490966797ms\r\n",,terminal_output +17087,13570241,"TERMINAL",0,0,"Step 2802, loss: 0.45346853137016296, step time: 16.925573348999023ms\r\n",,terminal_output +17088,13570384,"TERMINAL",0,0,"Step 2803, loss: 0.2077862173318863, step time: 17.33875274658203ms\r\nStep 2804, loss: 0.19497475028038025, step time: 16.86859130859375ms\r\n",,terminal_output +17089,13570448,"TERMINAL",0,0,"Step 2805, loss: 0.35918861627578735, step time: 17.13848114013672ms\r\n",,terminal_output +17090,13570622,"TERMINAL",0,0,"Step 2806, loss: 0.3573306202888489, step time: 17.20714569091797ms\r\nStep 2807, loss: 0.20086002349853516, step time: 17.255067825317383ms\r\nStep 2808, loss: 0.12291000038385391, step time: 17.127275466918945ms\r\n",,terminal_output +17091,13570688,"TERMINAL",0,0,"Step 2809, loss: 0.20156919956207275, step time: 17.296314239501953ms\r\n",,terminal_output +17092,13570790,"TERMINAL",0,0,"Step 2810, loss: 0.14710192382335663, step time: 16.95084571838379ms\r\n",,terminal_output +17093,13570895,"TERMINAL",0,0,"Step 2811, loss: 0.13652360439300537, step time: 16.94798469543457ms\r\nStep 2812, loss: 0.49037307500839233, step time: 17.230987548828125ms\r\n",,terminal_output +17094,13570952,"TERMINAL",0,0,"Step 2813, loss: 0.1043618842959404, step time: 17.449378967285156ms\r\n",,terminal_output +17095,13571003,"TERMINAL",0,0,"Step 2814, loss: 0.11983977258205414, step time: 16.86263084411621ms\r\n",,terminal_output +17096,13571069,"TERMINAL",0,0,"Step 2815, loss: 0.9244123697280884, step time: 17.406225204467773ms\r\n",,terminal_output +17097,13571187,"TERMINAL",0,0,"Step 2816, loss: 0.13371902704238892, step time: 20.59459686279297ms\r\nStep 2817, loss: 0.11377803236246109, step time: 18.363237380981445ms\r\n",,terminal_output +17098,13571251,"TERMINAL",0,0,"Step 2818, loss: 0.1574816107749939, step time: 17.632484436035156ms\r\n",,terminal_output +17099,13571311,"TERMINAL",0,0,"Step 2819, loss: 0.1997031271457672, step time: 17.2731876373291ms\r\n",,terminal_output +17100,13571372,"TERMINAL",0,0,"Step 2820, loss: 0.23370462656021118, step time: 17.111539840698242ms\r\n",,terminal_output +17101,13571437,"TERMINAL",0,0,"Step 2821, loss: 0.5510194301605225, step time: 17.3799991607666ms\r\n",,terminal_output +17102,13571498,"TERMINAL",0,0,"Step 2822, loss: 0.10245275497436523, step time: 17.248868942260742ms\r\n",,terminal_output +17103,13571560,"TERMINAL",0,0,"Step 2823, loss: 0.19145645201206207, step time: 17.313480377197266ms\r\n",,terminal_output +17104,13571712,"genie.py",0,0,"",python,tab +17105,13571732,"TERMINAL",0,0,"Step 2824, loss: 0.16527633368968964, step time: 17.272472381591797ms\r\n",,terminal_output +17106,13571820,"TERMINAL",0,0,"Step 2825, loss: 0.4459780156612396, step time: 17.102479934692383ms\r\nStep 2826, loss: 0.14727549254894257, step time: 17.128467559814453ms\r\nStep 2827, loss: 0.36317184567451477, step time: 17.375946044921875ms\r\n",,terminal_output +17107,13571892,"TERMINAL",0,0,"Step 2828, loss: 0.40389785170555115, step time: 16.959667205810547ms\r\n",,terminal_output +17108,13571957,"TERMINAL",0,0,"Step 2829, loss: 0.12103009968996048, step time: 17.20571517944336ms\r\n",,terminal_output +17109,13572012,"TERMINAL",0,0,"Step 2830, loss: 0.17645417153835297, step time: 17.320871353149414ms\r\n",,terminal_output +17110,13572065,"TERMINAL",0,0,"Step 2831, loss: 0.12577293813228607, step time: 17.73977279663086ms\r\n",,terminal_output +17111,13572132,"TERMINAL",0,0,"Step 2832, loss: 0.1782994121313095, step time: 17.168760299682617ms\r\n",,terminal_output +17112,13572199,"TERMINAL",0,0,"Step 2833, loss: 0.969539999961853, step time: 17.569541931152344ms\r\n",,terminal_output +17113,13572269,"TERMINAL",0,0,"Step 2834, loss: 0.15331871807575226, step time: 17.212867736816406ms\r\n",,terminal_output +17114,13572336,"TERMINAL",0,0,"Step 2835, loss: 0.11732107400894165, step time: 17.20595359802246ms\r\n",,terminal_output +17115,13572402,"TERMINAL",0,0,"Step 2836, loss: 0.1599642038345337, step time: 17.17209815979004ms\r\n",,terminal_output +17116,13572467,"TERMINAL",0,0,"Step 2837, loss: 1.2584149837493896, step time: 17.162084579467773ms\r\n",,terminal_output +17117,13572529,"TERMINAL",0,0,"Step 2838, loss: 0.1316445916891098, step time: 16.92676544189453ms\r\n",,terminal_output +17118,13572589,"TERMINAL",0,0,"Step 2839, loss: 0.26269036531448364, step time: 18.046855926513672ms\r\n",,terminal_output +17119,13572694,"TERMINAL",0,0,"Step 2840, loss: 0.2501056492328644, step time: 17.246246337890625ms\r\nStep 2841, loss: 0.454336553812027, step time: 18.102407455444336ms\r\n",,terminal_output +17120,13572756,"TERMINAL",0,0,"Step 2842, loss: 0.276639848947525, step time: 17.23480224609375ms\r\n",,terminal_output +17121,13572819,"TERMINAL",0,0,"Step 2843, loss: 0.6332246661186218, step time: 17.408370971679688ms\r\n",,terminal_output +17122,13572880,"TERMINAL",0,0,"Step 2844, loss: 0.12041790038347244, step time: 17.40288734436035ms\r\n",,terminal_output +17123,13572951,"genie.py",2366,0,"",python,selection_mouse +17124,13573009,"TERMINAL",0,0,"Step 2845, loss: 0.7217036485671997, step time: 17.555952072143555ms\r\nStep 2846, loss: 0.21160215139389038, step time: 17.147064208984375ms\r\n",,terminal_output +17125,13573074,"TERMINAL",0,0,"Step 2847, loss: 0.1734887659549713, step time: 17.586708068847656ms\r\n",,terminal_output +17126,13573136,"TERMINAL",0,0,"Step 2848, loss: 0.24277690052986145, step time: 17.313718795776367ms\r\n",,terminal_output +17127,13573192,"TERMINAL",0,0,"Step 2849, loss: 0.11212800443172455, step time: 17.331838607788086ms\r\n",,terminal_output +17128,13573257,"TERMINAL",0,0,"Step 2850, loss: 0.37900736927986145, step time: 16.981840133666992ms\r\n",,terminal_output +17129,13573318,"TERMINAL",0,0,"Step 2851, loss: 0.21538354456424713, step time: 17.440080642700195ms\r\n",,terminal_output +17130,13573385,"TERMINAL",0,0,"Step 2852, loss: 0.30542513728141785, step time: 16.875267028808594ms\r\n",,terminal_output +17131,13573482,"TERMINAL",0,0,"Step 2853, loss: 0.24536365270614624, step time: 17.11249351501465ms\r\n",,terminal_output +17132,13573535,"TERMINAL",0,0,"Step 2854, loss: 0.14919252693653107, step time: 17.395496368408203ms\r\n",,terminal_output +17133,13573641,"TERMINAL",0,0,"Step 2855, loss: 0.203634113073349, step time: 17.09890365600586ms\r\nStep 2856, loss: 0.16914954781532288, step time: 16.916751861572266ms\r\n",,terminal_output +17134,13573704,"TERMINAL",0,0,"Step 2857, loss: 0.43502917885780334, step time: 17.57979393005371ms\r\n",,terminal_output +17135,13573765,"TERMINAL",0,0,"Step 2858, loss: 0.11821475625038147, step time: 16.939640045166016ms\r\n",,terminal_output +17136,13573826,"TERMINAL",0,0,"Step 2859, loss: 0.1655665934085846, step time: 17.03500747680664ms\r\n",,terminal_output +17137,13573882,"genie.py",3075,0,"",python,selection_mouse +17138,13573907,"TERMINAL",0,0,"Step 2860, loss: 0.15809324383735657, step time: 17.374753952026367ms\r\n",,terminal_output +17139,13573953,"TERMINAL",0,0,"Step 2861, loss: 0.6391705274581909, step time: 17.241239547729492ms\r\n",,terminal_output +17140,13574043,"genie.py",3069,7,"outputs",python,selection_mouse +17141,13574065,"TERMINAL",0,0,"Step 2862, loss: 0.15020737051963806, step time: 16.92938804626465ms\r\n",,terminal_output +17142,13574141,"TERMINAL",0,0,"Step 2863, loss: 0.5189803838729858, step time: 17.34328269958496ms\r\nStep 2864, loss: 0.3442348539829254, step time: 16.928672790527344ms\r\n",,terminal_output +17143,13574249,"TERMINAL",0,0,"Step 2865, loss: 0.24576464295387268, step time: 29.04486656188965ms\r\n",,terminal_output +17144,13574312,"TERMINAL",0,0,"Step 2866, loss: 0.32635632157325745, step time: 29.361724853515625ms\r\n",,terminal_output +17145,13574365,"TERMINAL",0,0,"Step 2867, loss: 0.32331711053848267, step time: 18.35346221923828ms\r\n",,terminal_output +17146,13574482,"TERMINAL",0,0,"Step 2868, loss: 0.08564018458127975, step time: 17.09127426147461ms\r\nStep 2869, loss: 0.11294595897197723, step time: 17.37499237060547ms\r\n",,terminal_output +17147,13574543,"TERMINAL",0,0,"Step 2870, loss: 0.24789385497570038, step time: 17.01211929321289ms\r\n",,terminal_output +17148,13574605,"TERMINAL",0,0,"Step 2871, loss: 0.09786305576562881, step time: 17.000436782836914ms\r\n",,terminal_output +17149,13574667,"TERMINAL",0,0,"Step 2872, loss: 0.09485229104757309, step time: 17.176389694213867ms\r\n",,terminal_output +17150,13574730,"TERMINAL",0,0,"Step 2873, loss: 0.2163049429655075, step time: 17.09890365600586ms\r\n",,terminal_output +17151,13574843,"TERMINAL",0,0,"Step 2874, loss: 0.10346174985170364, step time: 16.94655418395996ms\r\nStep 2875, loss: 0.17104125022888184, step time: 17.351150512695312ms\r\n",,terminal_output +17152,13574906,"TERMINAL",0,0,"Step 2876, loss: 0.25957971811294556, step time: 16.936302185058594ms\r\n",,terminal_output +17153,13574978,"TERMINAL",0,0,"Step 2877, loss: 1.1170936822891235, step time: 17.203330993652344ms\r\n",,terminal_output +17154,13575003,"genie.py",2977,0,"",python,selection_mouse +17155,13575059,"TERMINAL",0,0,"Step 2878, loss: 0.08206159621477127, step time: 17.266511917114258ms\r\n",,terminal_output +17156,13575112,"TERMINAL",0,0,"Step 2879, loss: 0.3245094120502472, step time: 17.338991165161133ms\r\n",,terminal_output +17157,13575156,"genie.py",2974,9,"tokenizer",python,selection_mouse +17158,13575222,"TERMINAL",0,0,"Step 2880, loss: 0.8683496117591858, step time: 16.888856887817383ms\r\nStep 2881, loss: 0.16286467015743256, step time: 17.508983612060547ms\r\n",,terminal_output +17159,13575318,"TERMINAL",0,0,"Step 2882, loss: 0.09984078258275986, step time: 16.93868637084961ms\r\n",,terminal_output +17160,13575370,"TERMINAL",0,0,"Step 2883, loss: 0.18585409224033356, step time: 17.101764678955078ms\r\n",,terminal_output +17161,13575496,"TERMINAL",0,0,"Step 2884, loss: 0.22459453344345093, step time: 17.187118530273438ms\r\nStep 2885, loss: 0.12141600251197815, step time: 17.06218719482422ms\r\n",,terminal_output +17162,13575557,"TERMINAL",0,0,"Step 2886, loss: 0.20674414932727814, step time: 16.933202743530273ms\r\n",,terminal_output +17163,13575619,"TERMINAL",0,0,"Step 2887, loss: 0.1397571563720703, step time: 17.286300659179688ms\r\n",,terminal_output +17164,13575688,"TERMINAL",0,0,"Step 2888, loss: 0.2928454279899597, step time: 19.304752349853516ms\r\n",,terminal_output +17165,13575746,"genie.py",2987,0,"",python,selection_mouse +17166,13575774,"TERMINAL",0,0,"Step 2889, loss: 0.10994356125593185, step time: 18.01776885986328ms\r\n",,terminal_output +17167,13575864,"genie.py",2984,6,"decode",python,selection_mouse +17168,13576091,"TERMINAL",0,0,"Step 2890, loss: 0.11770528554916382, step time: 357.46002197265625ms\r\n",,terminal_output +17169,13576189,"TERMINAL",0,0,"Step 2891, loss: 0.3551819920539856, step time: 27.144432067871094ms\r\n",,terminal_output +17170,13576240,"TERMINAL",0,0,"Step 2892, loss: 0.25949811935424805, step time: 19.30093765258789ms\r\n",,terminal_output +17171,13576345,"TERMINAL",0,0,"Step 2893, loss: 0.21528813242912292, step time: 18.129825592041016ms\r\nStep 2894, loss: 0.13504041731357574, step time: 17.16923713684082ms\r\n",,terminal_output +17172,13576410,"TERMINAL",0,0,"Step 2895, loss: 0.0872017964720726, step time: 17.482757568359375ms\r\n",,terminal_output +17173,13576477,"TERMINAL",0,0,"Step 2896, loss: 0.1761000156402588, step time: 17.40121841430664ms\r\n",,terminal_output +17174,13576536,"TERMINAL",0,0,"Step 2897, loss: 0.14894819259643555, step time: 17.033815383911133ms\r\n",,terminal_output +17175,13576550,"genie.py",3021,0,"",python,selection_mouse +17176,13576608,"TERMINAL",0,0,"Step 2898, loss: 0.13363288342952728, step time: 17.168521881103516ms\r\n",,terminal_output +17177,13576667,"TERMINAL",0,0,"Step 2899, loss: 0.15114857256412506, step time: 17.541885375976562ms\r\n",,terminal_output +17178,13576701,"genie.py",3017,5,"batch",python,selection_mouse +17179,13576731,"TERMINAL",0,0,"Step 2900, loss: 0.0942852571606636, step time: 16.994237899780273ms\r\n",,terminal_output +17180,13576798,"TERMINAL",0,0,"Step 2901, loss: 0.7917698621749878, step time: 17.06552505493164ms\r\n",,terminal_output +17181,13576856,"TERMINAL",0,0,"Step 2902, loss: 0.2102607935667038, step time: 17.473936080932617ms\r\n",,terminal_output +17182,13576917,"TERMINAL",0,0,"Step 2903, loss: 0.30830588936805725, step time: 20.3244686126709ms\r\n",,terminal_output +17183,13576929,"genie.py",3017,6,"batch[",python,selection_mouse +17184,13576957,"genie.py",3017,7,"batch[""",python,selection_mouse +17185,13576993,"genie.py",3017,13,"batch[""videos",python,selection_mouse +17186,13577006,"TERMINAL",0,0,"Step 2904, loss: 0.16264130175113678, step time: 19.56629753112793ms\r\n",,terminal_output +17187,13577037,"genie.py",3017,15,"batch[""videos""]",python,selection_mouse +17188,13577051,"genie.py",3017,21,"batch[""videos""].shape",python,selection_mouse +17189,13577062,"TERMINAL",0,0,"Step 2905, loss: 0.20805609226226807, step time: 17.474889755249023ms\r\n",,terminal_output +17190,13577121,"genie.py",3017,22,"batch[""videos""].shape[",python,selection_mouse +17191,13577135,"TERMINAL",0,0,"Step 2906, loss: 0.10640303045511246, step time: 17.44818687438965ms\r\n",,terminal_output +17192,13577148,"genie.py",3017,23,"batch[""videos""].shape[2",python,selection_mouse +17193,13577184,"genie.py",3017,24,"batch[""videos""].shape[2:",python,selection_mouse +17194,13577196,"TERMINAL",0,0,"Step 2907, loss: 0.8811199069023132, step time: 17.101287841796875ms\r\n",,terminal_output +17195,13577224,"genie.py",3017,25,"batch[""videos""].shape[2:4",python,selection_mouse +17196,13577237,"TERMINAL",0,0,"Step 2908, loss: 0.4494628608226776, step time: 17.34161376953125ms\r\n",,terminal_output +17197,13577290,"genie.py",3017,26,"batch[""videos""].shape[2:4]",python,selection_mouse +17198,13577304,"TERMINAL",0,0,"Step 2909, loss: 0.13121454417705536, step time: 16.981124877929688ms\r\n",,terminal_output +17199,13577369,"TERMINAL",0,0,"Step 2910, loss: 0.15574884414672852, step time: 16.849756240844727ms\r\n",,terminal_output +17200,13577464,"TERMINAL",0,0,"Step 2911, loss: 0.2711693346500397, step time: 17.58742332458496ms\r\n",,terminal_output +17201,13577562,"genie.py",3043,0,"",python,selection_mouse +17202,13577575,"TERMINAL",0,0,"Step 2912, loss: 0.2347400188446045, step time: 17.127513885498047ms\r\nStep 2913, loss: 0.7133570909500122, step time: 17.17996597290039ms\r\n",,terminal_output +17203,13577627,"TERMINAL",0,0,"Step 2914, loss: 0.1423291563987732, step time: 17.241239547729492ms\r\n",,terminal_output +17204,13577692,"TERMINAL",0,0,"Step 2915, loss: 0.18048113584518433, step time: 17.23480224609375ms\r\n",,terminal_output +17205,13577753,"TERMINAL",0,0,"Step 2916, loss: 0.7185021638870239, step time: 16.927003860473633ms\r\n",,terminal_output +17206,13577910,"TERMINAL",0,0,"Step 2917, loss: 0.2875681221485138, step time: 17.598628997802734ms\r\n",,terminal_output +17207,13577936,"genie.py",3041,2,"4]",python,selection_mouse +17208,13577953,"genie.py",3039,4,"2:4]",python,selection_mouse +17209,13577971,"genie.py",3033,10,"shape[2:4]",python,selection_mouse +17210,13578005,"TERMINAL",0,0,"Step 2918, loss: 0.21820753812789917, step time: 16.933679580688477ms\r\nStep 2919, loss: 0.24390602111816406, step time: 16.97826385498047ms\r\nStep 2920, loss: 0.17665760219097137, step time: 17.507553100585938ms\r\n",,terminal_output +17211,13578021,"genie.py",3031,12,"].shape[2:4]",python,selection_mouse +17212,13578034,"genie.py",3024,19,"videos""].shape[2:4]",python,selection_mouse +17213,13578066,"TERMINAL",0,0,"Step 2921, loss: 0.33992117643356323, step time: 17.55237579345703ms\r\n",,terminal_output +17214,13578118,"TERMINAL",0,0,"Step 2922, loss: 0.1663876324892044, step time: 17.088651657104492ms\r\n",,terminal_output +17215,13578142,"genie.py",3023,20,"""videos""].shape[2:4]",python,selection_mouse +17216,13578182,"TERMINAL",0,0,"Step 2923, loss: 0.08994697779417038, step time: 17.43936538696289ms\r\n",,terminal_output +17217,13578211,"genie.py",3022,21,"[""videos""].shape[2:4]",python,selection_mouse +17218,13578250,"TERMINAL",0,0,"Step 2924, loss: 0.15387514233589172, step time: 17.05765724182129ms\r\n",,terminal_output +17219,13578257,"genie.py",3017,26,"batch[""videos""].shape[2:4]",python,selection_mouse +17220,13578310,"TERMINAL",0,0,"Step 2925, loss: 0.1725449115037918, step time: 17.11106300354004ms\r\n",,terminal_output +17221,13578375,"TERMINAL",0,0,"Step 2926, loss: 0.14577391743659973, step time: 17.379283905029297ms\r\n",,terminal_output +17222,13578435,"TERMINAL",0,0,"Step 2927, loss: 0.2250380963087082, step time: 17.201900482177734ms\r\n",,terminal_output +17223,13578499,"TERMINAL",0,0,"Step 2928, loss: 0.120512954890728, step time: 16.913890838623047ms\r\n",,terminal_output +17224,13578632,"TERMINAL",0,0,"Step 2929, loss: 0.31078240275382996, step time: 17.69232749938965ms\r\nStep 2930, loss: 0.1912180632352829, step time: 17.597436904907227ms\r\n",,terminal_output +17225,13578684,"TERMINAL",0,0,"Step 2931, loss: 0.14251455664634705, step time: 19.59824562072754ms\r\n",,terminal_output +17226,13578750,"TERMINAL",0,0,"Step 2932, loss: 0.09914499521255493, step time: 17.90475845336914ms\r\n",,terminal_output +17227,13578809,"TERMINAL",0,0,"Step 2933, loss: 0.16795869171619415, step time: 17.274856567382812ms\r\n",,terminal_output +17228,13578889,"genie.py",3017,0,"",python,selection_mouse +17229,13578889,"genie.py",3017,5,"batch",python,selection_mouse +17230,13578908,"TERMINAL",0,0,"Step 2934, loss: 0.2833564579486847, step time: 17.081499099731445ms\r\n",,terminal_output +17231,13578946,"TERMINAL",0,0,"Step 2935, loss: 0.13808339834213257, step time: 17.574310302734375ms\r\n",,terminal_output +17232,13579009,"TERMINAL",0,0,"Step 2936, loss: 0.25068244338035583, step time: 17.049312591552734ms\r\n",,terminal_output +17233,13579068,"TERMINAL",0,0,"Step 2937, loss: 0.09284340590238571, step time: 17.086505889892578ms\r\n",,terminal_output +17234,13579132,"TERMINAL",0,0,"Step 2938, loss: 0.28953397274017334, step time: 17.271041870117188ms\r\n",,terminal_output +17235,13579144,"genie.py",3017,7,"batch[""",python,selection_mouse +17236,13579163,"genie.py",3017,13,"batch[""videos",python,selection_mouse +17237,13579195,"genie.py",3017,14,"batch[""videos""",python,selection_mouse +17238,13579196,"TERMINAL",0,0,"Step 2939, loss: 0.11759785562753677, step time: 16.98470115661621ms\r\n",,terminal_output +17239,13579209,"genie.py",3017,16,"batch[""videos""].",python,selection_mouse +17240,13579235,"genie.py",3017,21,"batch[""videos""].shape",python,selection_mouse +17241,13579249,"TERMINAL",0,0,"Step 2940, loss: 0.29053857922554016, step time: 16.949176788330078ms\r\n",,terminal_output +17242,13579294,"genie.py",3017,22,"batch[""videos""].shape[",python,selection_mouse +17243,13579310,"genie.py",3017,23,"batch[""videos""].shape[2",python,selection_mouse +17244,13579321,"TERMINAL",0,0,"Step 2941, loss: 0.18054106831550598, step time: 17.302989959716797ms\r\n",,terminal_output +17245,13579341,"genie.py",3017,24,"batch[""videos""].shape[2:",python,selection_mouse +17246,13579352,"genie.py",3017,25,"batch[""videos""].shape[2:4",python,selection_mouse +17247,13579364,"genie.py",3017,26,"batch[""videos""].shape[2:4]",python,selection_mouse +17248,13579386,"TERMINAL",0,0,"Step 2942, loss: 0.16096235811710358, step time: 16.931772232055664ms\r\n",,terminal_output +17249,13579439,"TERMINAL",0,0,"Step 2943, loss: 0.1916251927614212, step time: 16.959667205810547ms\r\n",,terminal_output +17250,13579506,"TERMINAL",0,0,"Step 2944, loss: 0.18236474692821503, step time: 17.25029945373535ms\r\n",,terminal_output +17251,13579566,"TERMINAL",0,0,"Step 2945, loss: 0.27433672547340393, step time: 17.112016677856445ms\r\n",,terminal_output +17252,13579633,"genie.py",3043,0,"",python,selection_mouse +17253,13579646,"TERMINAL",0,0,"Step 2946, loss: 0.08925879746675491, step time: 16.9064998626709ms\r\n",,terminal_output +17254,13579696,"TERMINAL",0,0,"Step 2947, loss: 0.4529922604560852, step time: 17.220735549926758ms\r\n",,terminal_output +17255,13579749,"TERMINAL",0,0,"Step 2948, loss: 0.10428926348686218, step time: 16.921520233154297ms\r\n",,terminal_output +17256,13579843,"TERMINAL",0,0,"Step 2949, loss: 0.4286902844905853, step time: 17.211437225341797ms\r\n",,terminal_output +17257,13579950,"TERMINAL",0,0,"Step 2950, loss: 0.35785970091819763, step time: 17.277240753173828ms\r\nStep 2951, loss: 0.1930863857269287, step time: 17.113447189331055ms\r\n",,terminal_output +17258,13580013,"TERMINAL",0,0,"Step 2952, loss: 0.3218582570552826, step time: 16.935110092163086ms\r\n",,terminal_output +17259,13580077,"TERMINAL",0,0,"Step 2953, loss: 0.14278574287891388, step time: 17.316102981567383ms\r\n",,terminal_output +17260,13580191,"TERMINAL",0,0,"Step 2954, loss: 0.1340804398059845, step time: 16.936779022216797ms\r\nStep 2955, loss: 0.2639833390712738, step time: 17.189741134643555ms\r\n",,terminal_output +17261,13580257,"TERMINAL",0,0,"Step 2956, loss: 0.5479362607002258, step time: 17.50969886779785ms\r\n",,terminal_output +17262,13580318,"TERMINAL",0,0,"Step 2957, loss: 0.20251338183879852, step time: 17.036914825439453ms\r\n",,terminal_output +17263,13580388,"TERMINAL",0,0,"Step 2958, loss: 0.19613420963287354, step time: 16.915082931518555ms\r\n",,terminal_output +17264,13580443,"TERMINAL",0,0,"Step 2959, loss: 0.2480696737766266, step time: 17.429351806640625ms\r\n",,terminal_output +17265,13580537,"TERMINAL",0,0,"Step 2960, loss: 0.12830761075019836, step time: 17.42720603942871ms\r\n",,terminal_output +17266,13580645,"TERMINAL",0,0,"Step 2961, loss: 0.10490076243877411, step time: 17.046689987182617ms\r\nStep 2962, loss: 0.11717149615287781, step time: 18.500328063964844ms\r\n",,terminal_output +17267,13580709,"TERMINAL",0,0,"Step 2963, loss: 0.6925582885742188, step time: 19.164085388183594ms\r\n",,terminal_output +17268,13580773,"TERMINAL",0,0,"Step 2964, loss: 0.19973209500312805, step time: 18.372774124145508ms\r\n",,terminal_output +17269,13580835,"TERMINAL",0,0,"Step 2965, loss: 0.23932939767837524, step time: 18.852710723876953ms\r\n",,terminal_output +17270,13580930,"TERMINAL",0,0,"Step 2966, loss: 0.11004667729139328, step time: 17.783641815185547ms\r\n",,terminal_output +17271,13581007,"TERMINAL",0,0,"Step 2967, loss: 0.4380974769592285, step time: 17.646074295043945ms\r\nStep 2968, loss: 0.12385065853595734, step time: 19.364595413208008ms\r\n",,terminal_output +17272,13581070,"TERMINAL",0,0,"Step 2969, loss: 0.17719653248786926, step time: 20.73359489440918ms\r\n",,terminal_output +17273,13581149,"TERMINAL",0,0,"Step 2970, loss: 0.2778306305408478, step time: 22.171497344970703ms\r\n",,terminal_output +17274,13581209,"TERMINAL",0,0,"Step 2971, loss: 0.5456030964851379, step time: 27.110815048217773ms\r\n",,terminal_output +17275,13581273,"TERMINAL",0,0,"Step 2972, loss: 0.10640010982751846, step time: 20.968198776245117ms\r\n",,terminal_output +17276,13581336,"TERMINAL",0,0,"Step 2973, loss: 0.15689557790756226, step time: 24.04928207397461ms\r\n",,terminal_output +17277,13581401,"TERMINAL",0,0,"Step 2974, loss: 0.11280462145805359, step time: 22.215604782104492ms\r\n",,terminal_output +17278,13581468,"TERMINAL",0,0,"Step 2975, loss: 0.08087892830371857, step time: 22.203445434570312ms\r\n",,terminal_output +17279,13581530,"TERMINAL",0,0,"Step 2976, loss: 0.0806313082575798, step time: 19.09780502319336ms\r\n",,terminal_output +17280,13581592,"TERMINAL",0,0,"Step 2977, loss: 1.3973463773727417, step time: 18.30291748046875ms\r\n",,terminal_output +17281,13581662,"TERMINAL",0,0,"Step 2978, loss: 0.1990860551595688, step time: 17.34447479248047ms\r\n",,terminal_output +17282,13581725,"TERMINAL",0,0,"Step 2979, loss: 0.2589623034000397, step time: 17.161846160888672ms\r\n",,terminal_output +17283,13581787,"TERMINAL",0,0,"Step 2980, loss: 0.19455331563949585, step time: 17.467737197875977ms\r\n",,terminal_output +17284,13581850,"TERMINAL",0,0,"Step 2981, loss: 0.12779588997364044, step time: 17.283916473388672ms\r\n",,terminal_output +17285,13581913,"TERMINAL",0,0,"Step 2982, loss: 0.12513208389282227, step time: 17.41504669189453ms\r\n",,terminal_output +17286,13581976,"TERMINAL",0,0,"Step 2983, loss: 0.11266571283340454, step time: 17.54593849182129ms\r\n",,terminal_output +17287,13582039,"TERMINAL",0,0,"Step 2984, loss: 0.44857871532440186, step time: 17.093658447265625ms\r\n",,terminal_output +17288,13582113,"TERMINAL",0,0,"Step 2985, loss: 0.08862161636352539, step time: 17.08817481994629ms\r\n",,terminal_output +17289,13582166,"TERMINAL",0,0,"Step 2986, loss: 0.09506256133317947, step time: 17.59171485900879ms\r\n",,terminal_output +17290,13582232,"TERMINAL",0,0,"Step 2987, loss: 0.11303332448005676, step time: 23.638248443603516ms\r\n",,terminal_output +17291,13582298,"TERMINAL",0,0,"Step 2988, loss: 0.24100561439990997, step time: 26.644229888916016ms\r\n",,terminal_output +17292,13582359,"TERMINAL",0,0,"Step 2989, loss: 0.26618918776512146, step time: 22.756576538085938ms\r\n",,terminal_output +17293,13582423,"TERMINAL",0,0,"Step 2990, loss: 0.5135601162910461, step time: 21.03567123413086ms\r\n",,terminal_output +17294,13582487,"TERMINAL",0,0,"Step 2991, loss: 0.7005478143692017, step time: 19.681215286254883ms\r\n",,terminal_output +17295,13582549,"TERMINAL",0,0,"Step 2992, loss: 0.1692461371421814, step time: 19.446134567260742ms\r\n",,terminal_output +17296,13582611,"TERMINAL",0,0,"Step 2993, loss: 0.15681004524230957, step time: 18.86153221130371ms\r\n",,terminal_output +17297,13582705,"TERMINAL",0,0,"Step 2994, loss: 0.2645583152770996, step time: 18.065690994262695ms\r\n",,terminal_output +17298,13582758,"TERMINAL",0,0,"Step 2995, loss: 0.24237532913684845, step time: 18.471717834472656ms\r\n",,terminal_output +17299,13582865,"TERMINAL",0,0,"Step 2996, loss: 0.1483294814825058, step time: 17.902851104736328ms\r\nStep 2997, loss: 0.19167600572109222, step time: 18.392086029052734ms\r\n",,terminal_output +17300,13582946,"TERMINAL",0,0,"Step 2998, loss: 0.6423157453536987, step time: 18.229246139526367ms\r\n",,terminal_output +17301,13583003,"TERMINAL",0,0,"Step 2999, loss: 0.10753235965967178, step time: 22.990942001342773ms\r\n",,terminal_output +17302,13586149,"TERMINAL",0,0,"Step 3000, loss: 0.0885780081152916, step time: 36.44919395446777ms\r\n",,terminal_output +17303,13586215,"TERMINAL",0,0,"Step 3001, loss: 0.29157745838165283, step time: 26.00884437561035ms\r\n",,terminal_output +17304,13586279,"TERMINAL",0,0,"Step 3002, loss: 0.14160127937793732, step time: 19.779443740844727ms\r\n",,terminal_output +17305,13586346,"TERMINAL",0,0,"Step 3003, loss: 0.09288305789232254, step time: 19.617319107055664ms\r\n",,terminal_output +17306,13586440,"TERMINAL",0,0,"Step 3004, loss: 1.2463101148605347, step time: 18.281221389770508ms\r\n",,terminal_output +17307,13586494,"TERMINAL",0,0,"Step 3005, loss: 0.2930556535720825, step time: 18.48006248474121ms\r\n",,terminal_output +17308,13586600,"TERMINAL",0,0,"Step 3006, loss: 0.2334708273410797, step time: 18.31197738647461ms\r\nStep 3007, loss: 0.20458470284938812, step time: 18.567323684692383ms\r\n",,terminal_output +17309,13586723,"TERMINAL",0,0,"Step 3008, loss: 0.09906583279371262, step time: 17.947673797607422ms\r\nStep 3009, loss: 0.08233209699392319, step time: 18.754005432128906ms\r\n",,terminal_output +17310,13586788,"TERMINAL",0,0,"Step 3010, loss: 0.11211400479078293, step time: 17.780065536499023ms\r\n",,terminal_output +17311,13586852,"TERMINAL",0,0,"Step 3011, loss: 0.1955854296684265, step time: 18.20683479309082ms\r\n",,terminal_output +17312,13586916,"TERMINAL",0,0,"Step 3012, loss: 0.21349118649959564, step time: 18.312692642211914ms\r\n",,terminal_output +17313,13586982,"TERMINAL",0,0,"Step 3013, loss: 0.2458428293466568, step time: 18.45717430114746ms\r\n",,terminal_output +17314,13587046,"TERMINAL",0,0,"Step 3014, loss: 0.3822793662548065, step time: 17.794370651245117ms\r\n",,terminal_output +17315,13587112,"TERMINAL",0,0,"Step 3015, loss: 0.1768486499786377, step time: 22.329092025756836ms\r\n",,terminal_output +17316,13587174,"TERMINAL",0,0,"Step 3016, loss: 0.7075090408325195, step time: 19.442319869995117ms\r\n",,terminal_output +17317,13587237,"TERMINAL",0,0,"Step 3017, loss: 0.3934643268585205, step time: 18.791913986206055ms\r\n",,terminal_output +17318,13587332,"TERMINAL",0,0,"Step 3018, loss: 0.2577436566352844, step time: 20.202159881591797ms\r\n",,terminal_output +17319,13587395,"TERMINAL",0,0,"Step 3019, loss: 0.11950468271970749, step time: 18.58830451965332ms\r\n",,terminal_output +17320,13587457,"TERMINAL",0,0,"Step 3020, loss: 0.15586794912815094, step time: 18.428325653076172ms\r\n",,terminal_output +17321,13587519,"TERMINAL",0,0,"Step 3021, loss: 0.11080291867256165, step time: 18.47553253173828ms\r\n",,terminal_output +17322,13587581,"TERMINAL",0,0,"Step 3022, loss: 0.14327605068683624, step time: 18.15485954284668ms\r\n",,terminal_output +17323,13587642,"TERMINAL",0,0,"Step 3023, loss: 0.1366039365530014, step time: 19.836902618408203ms\r\n",,terminal_output +17324,13587705,"TERMINAL",0,0,"Step 3024, loss: 0.5175530314445496, step time: 19.30975914001465ms\r\n",,terminal_output +17325,13587767,"TERMINAL",0,0,"Step 3025, loss: 1.0710190534591675, step time: 18.510818481445312ms\r\n",,terminal_output +17326,13587833,"TERMINAL",0,0,"Step 3026, loss: 0.11874300241470337, step time: 18.263578414916992ms\r\n",,terminal_output +17327,13587894,"TERMINAL",0,0,"Step 3027, loss: 0.39086806774139404, step time: 18.081188201904297ms\r\n",,terminal_output +17328,13587956,"TERMINAL",0,0,"Step 3028, loss: 0.32784798741340637, step time: 17.22097396850586ms\r\n",,terminal_output +17329,13588019,"TERMINAL",0,0,"Step 3029, loss: 0.2725343406200409, step time: 17.67730712890625ms\r\n",,terminal_output +17330,13588082,"TERMINAL",0,0,"Step 3030, loss: 0.5727409720420837, step time: 18.376827239990234ms\r\n",,terminal_output +17331,13588183,"TERMINAL",0,0,"Step 3031, loss: 0.09601617604494095, step time: 17.626285552978516ms\r\nStep 3032, loss: 0.11819915473461151, step time: 17.310380935668945ms\r\n",,terminal_output +17332,13588280,"TERMINAL",0,0,"Step 3033, loss: 0.3211604356765747, step time: 18.038272857666016ms\r\n",,terminal_output +17333,13588343,"TERMINAL",0,0,"Step 3034, loss: 0.3430149555206299, step time: 17.004728317260742ms\r\n",,terminal_output +17334,13588402,"TERMINAL",0,0,"Step 3035, loss: 0.1410049945116043, step time: 18.832683563232422ms\r\n",,terminal_output +17335,13588463,"TERMINAL",0,0,"Step 3036, loss: 0.12232796847820282, step time: 22.554397583007812ms\r\n",,terminal_output +17336,13588524,"TERMINAL",0,0,"Step 3037, loss: 0.08557207137346268, step time: 18.626689910888672ms\r\n",,terminal_output +17337,13588598,"TERMINAL",0,0,"Step 3038, loss: 0.5551460385322571, step time: 17.670869827270508ms\r\n",,terminal_output +17338,13588654,"TERMINAL",0,0,"Step 3039, loss: 0.18782953917980194, step time: 18.08309555053711ms\r\n",,terminal_output +17339,13588708,"TERMINAL",0,0,"Step 3040, loss: 0.22555135190486908, step time: 17.119169235229492ms\r\n",,terminal_output +17340,13588804,"TERMINAL",0,0,"Step 3041, loss: 0.2885376214981079, step time: 17.524003982543945ms\r\n",,terminal_output +17341,13588857,"TERMINAL",0,0,"Step 3042, loss: 0.13206452131271362, step time: 17.76885986328125ms\r\n",,terminal_output +17342,13588963,"TERMINAL",0,0,"Step 3043, loss: 0.14308467507362366, step time: 17.316818237304688ms\r\nStep 3044, loss: 0.23280112445354462, step time: 17.263412475585938ms\r\n",,terminal_output +17343,13589062,"TERMINAL",0,0,"Step 3045, loss: 0.14649254083633423, step time: 18.031597137451172ms\r\n",,terminal_output +17344,13589114,"TERMINAL",0,0,"Step 3046, loss: 0.09767463803291321, step time: 17.003536224365234ms\r\n",,terminal_output +17345,13589167,"TERMINAL",0,0,"Step 3047, loss: 0.35801154375076294, step time: 17.49253273010254ms\r\n",,terminal_output +17346,13589338,"TERMINAL",0,0,"Step 3048, loss: 0.1026964783668518, step time: 17.569303512573242ms\r\nStep 3049, loss: 1.4254159927368164, step time: 18.781423568725586ms\r\nStep 3050, loss: 0.19381055235862732, step time: 17.14634895324707ms\r\n",,terminal_output +17347,13589400,"TERMINAL",0,0,"Step 3051, loss: 0.1289556622505188, step time: 17.870426177978516ms\r\n",,terminal_output +17348,13589464,"TERMINAL",0,0,"Step 3052, loss: 0.09992121905088425, step time: 16.9832706451416ms\r\n",,terminal_output +17349,13589528,"TERMINAL",0,0,"Step 3053, loss: 0.979738712310791, step time: 17.40741729736328ms\r\n",,terminal_output +17350,13589590,"TERMINAL",0,0,"Step 3054, loss: 0.10052872449159622, step time: 17.520666122436523ms\r\n",,terminal_output +17351,13589654,"TERMINAL",0,0,"Step 3055, loss: 0.2079887092113495, step time: 17.199039459228516ms\r\n",,terminal_output +17352,13589716,"TERMINAL",0,0,"Step 3056, loss: 0.09700949490070343, step time: 17.09890365600586ms\r\n",,terminal_output +17353,13589779,"TERMINAL",0,0,"Step 3057, loss: 0.5870861411094666, step time: 17.783403396606445ms\r\n",,terminal_output +17354,13589926,"TERMINAL",0,0,"Step 3058, loss: 1.024985432624817, step time: 16.85357093811035ms\r\nStep 3059, loss: 0.06224917992949486, step time: 17.415523529052734ms\r\n",,terminal_output +17355,13590032,"TERMINAL",0,0,"Step 3060, loss: 0.15723152458667755, step time: 17.63629913330078ms\r\nStep 3061, loss: 0.10704600811004639, step time: 17.347097396850586ms\r\n",,terminal_output +17356,13590107,"TERMINAL",0,0,"Step 3062, loss: 0.10754970461130142, step time: 17.070770263671875ms\r\n",,terminal_output +17357,13590162,"TERMINAL",0,0,"Step 3063, loss: 0.2550256848335266, step time: 17.75383949279785ms\r\n",,terminal_output +17358,13590228,"TERMINAL",0,0,"Step 3064, loss: 0.11894550174474716, step time: 16.88981056213379ms\r\n",,terminal_output +17359,13590290,"TERMINAL",0,0,"Step 3065, loss: 0.09678251296281815, step time: 17.38595962524414ms\r\n",,terminal_output +17360,13590352,"TERMINAL",0,0,"Step 3066, loss: 0.16000892221927643, step time: 17.545700073242188ms\r\n",,terminal_output +17361,13590460,"TERMINAL",0,0,"Step 3067, loss: 0.20855620503425598, step time: 17.283201217651367ms\r\nStep 3068, loss: 0.4333001971244812, step time: 17.052888870239258ms\r\n",,terminal_output +17362,13590525,"TERMINAL",0,0,"Step 3069, loss: 0.11767808347940445, step time: 17.791032791137695ms\r\n",,terminal_output +17363,13590588,"TERMINAL",0,0,"Step 3070, loss: 0.175692617893219, step time: 16.920089721679688ms\r\n",,terminal_output +17364,13590651,"TERMINAL",0,0,"Step 3071, loss: 0.11532949656248093, step time: 17.40860939025879ms\r\n",,terminal_output +17365,13590716,"TERMINAL",0,0,"Step 3072, loss: 0.08381029963493347, step time: 17.53520965576172ms\r\n",,terminal_output +17366,13590780,"TERMINAL",0,0,"Step 3073, loss: 0.11777138710021973, step time: 17.229557037353516ms\r\n",,terminal_output +17367,13590843,"TERMINAL",0,0,"Step 3074, loss: 0.14603912830352783, step time: 17.250776290893555ms\r\n",,terminal_output +17368,13590907,"TERMINAL",0,0,"Step 3075, loss: 0.18525256216526031, step time: 18.01156997680664ms\r\n",,terminal_output +17369,13591212,"TERMINAL",0,0,"Step 3076, loss: 0.2116224318742752, step time: 309.2827796936035ms\r\n",,terminal_output +17370,13591276,"TERMINAL",0,0,"Step 3077, loss: 0.264817476272583, step time: 25.47430992126465ms\r\n",,terminal_output +17371,13591341,"TERMINAL",0,0,"Step 3078, loss: 0.0927891880273819, step time: 19.702911376953125ms\r\n",,terminal_output +17372,13591418,"TERMINAL",0,0,"Step 3079, loss: 0.15352080762386322, step time: 18.40519905090332ms\r\n",,terminal_output +17373,13591468,"TERMINAL",0,0,"Step 3080, loss: 0.13666605949401855, step time: 17.640113830566406ms\r\n",,terminal_output +17374,13591564,"TERMINAL",0,0,"Step 3081, loss: 0.5355048775672913, step time: 17.740249633789062ms\r\n",,terminal_output +17375,13591629,"TERMINAL",0,0,"Step 3082, loss: 0.12064313888549805, step time: 17.333984375ms\r\n",,terminal_output +17376,13591692,"TERMINAL",0,0,"Step 3083, loss: 0.2037515789270401, step time: 17.57025718688965ms\r\n",,terminal_output +17377,13591744,"TERMINAL",0,0,"Step 3084, loss: 0.15672464668750763, step time: 17.39358901977539ms\r\n",,terminal_output +17378,13591851,"TERMINAL",0,0,"Step 3085, loss: 0.5334080457687378, step time: 17.55976676940918ms\r\nStep 3086, loss: 0.14751435816287994, step time: 17.16160774230957ms\r\n",,terminal_output +17379,13591969,"TERMINAL",0,0,"Step 3087, loss: 0.7461285591125488, step time: 17.594337463378906ms\r\nStep 3088, loss: 0.24490568041801453, step time: 17.173290252685547ms\r\n",,terminal_output +17380,13592032,"TERMINAL",0,0,"Step 3089, loss: 0.152579665184021, step time: 17.54617691040039ms\r\n",,terminal_output +17381,13592097,"TERMINAL",0,0,"Step 3090, loss: 0.48465684056282043, step time: 17.27604866027832ms\r\n",,terminal_output +17382,13592167,"TERMINAL",0,0,"Step 3091, loss: 0.13654911518096924, step time: 17.740488052368164ms\r\n",,terminal_output +17383,13592221,"TERMINAL",0,0,"Step 3092, loss: 0.12730827927589417, step time: 17.206668853759766ms\r\n",,terminal_output +17384,13592314,"TERMINAL",0,0,"Step 3093, loss: 0.2938902676105499, step time: 17.52471923828125ms\r\n",,terminal_output +17385,13592366,"TERMINAL",0,0,"Step 3094, loss: 0.08350370824337006, step time: 17.17686653137207ms\r\n",,terminal_output +17386,13592473,"TERMINAL",0,0,"Step 3095, loss: 0.17048892378807068, step time: 17.522811889648438ms\r\nStep 3096, loss: 0.34588491916656494, step time: 17.12346076965332ms\r\n",,terminal_output +17387,13592567,"TERMINAL",0,0,"Step 3097, loss: 0.20937135815620422, step time: 17.653703689575195ms\r\n",,terminal_output +17388,13592621,"TERMINAL",0,0,"Step 3098, loss: 0.1667383909225464, step time: 18.15485954284668ms\r\n",,terminal_output +17389,13592726,"TERMINAL",0,0,"Step 3099, loss: 0.22971540689468384, step time: 17.468929290771484ms\r\nStep 3100, loss: 0.10378870368003845, step time: 17.131328582763672ms\r\n",,terminal_output +17390,13592791,"TERMINAL",0,0,"Step 3101, loss: 0.3221632242202759, step time: 17.417430877685547ms\r\n",,terminal_output +17391,13592853,"TERMINAL",0,0,"Step 3102, loss: 0.08370693027973175, step time: 17.246723175048828ms\r\n",,terminal_output +17392,13592916,"TERMINAL",0,0,"Step 3103, loss: 0.685007631778717, step time: 17.60697364807129ms\r\n",,terminal_output +17393,13592980,"TERMINAL",0,0,"Step 3104, loss: 0.08927281200885773, step time: 17.42243766784668ms\r\n",,terminal_output +17394,13593043,"TERMINAL",0,0,"Step 3105, loss: 0.12387962639331818, step time: 17.914772033691406ms\r\n",,terminal_output +17395,13593110,"TERMINAL",0,0,"Step 3106, loss: 0.09088484942913055, step time: 24.905681610107422ms\r\n",,terminal_output +17396,13593175,"TERMINAL",0,0,"Step 3107, loss: 0.25158125162124634, step time: 24.624347686767578ms\r\n",,terminal_output +17397,13593237,"TERMINAL",0,0,"Step 3108, loss: 0.1100207194685936, step time: 20.462751388549805ms\r\n",,terminal_output +17398,13593301,"TERMINAL",0,0,"Step 3109, loss: 0.2897469103336334, step time: 18.744707107543945ms\r\n",,terminal_output +17399,13593418,"TERMINAL",0,0,"Step 3110, loss: 0.20813894271850586, step time: 17.861127853393555ms\r\nStep 3111, loss: 0.11306965351104736, step time: 17.925262451171875ms\r\n",,terminal_output +17400,13593480,"TERMINAL",0,0,"Step 3112, loss: 0.15872368216514587, step time: 17.639875411987305ms\r\n",,terminal_output +17401,13593543,"TERMINAL",0,0,"Step 3113, loss: 0.13420474529266357, step time: 17.526626586914062ms\r\n",,terminal_output +17402,13593656,"TERMINAL",0,0,"Step 3114, loss: 0.06963833421468735, step time: 17.545461654663086ms\r\n",,terminal_output +17403,13593670,"TERMINAL",0,0,"Step 3115, loss: 0.1027170792222023, step time: 17.534494400024414ms\r\n",,terminal_output +17404,13593768,"TERMINAL",0,0,"Step 3116, loss: 1.1552929878234863, step time: 17.32945442199707ms\r\n",,terminal_output +17405,13593831,"TERMINAL",0,0,"Step 3117, loss: 0.4899154603481293, step time: 17.43793487548828ms\r\n",,terminal_output +17406,13593893,"TERMINAL",0,0,"Step 3118, loss: 0.295366495847702, step time: 17.37189292907715ms\r\n",,terminal_output +17407,13593956,"TERMINAL",0,0,"Step 3119, loss: 1.1208312511444092, step time: 17.484426498413086ms\r\n",,terminal_output +17408,13594022,"TERMINAL",0,0,"Step 3120, loss: 0.1712566465139389, step time: 17.425537109375ms\r\n",,terminal_output +17409,13594074,"TERMINAL",0,0,"Step 3121, loss: 0.17278969287872314, step time: 17.600059509277344ms\r\n",,terminal_output +17410,13594182,"TERMINAL",0,0,"Step 3122, loss: 0.4777095913887024, step time: 17.328739166259766ms\r\nStep 3123, loss: 0.14674656093120575, step time: 17.615795135498047ms\r\n",,terminal_output +17411,13594249,"TERMINAL",0,0,"Step 3124, loss: 0.07103053480386734, step time: 17.35830307006836ms\r\n",,terminal_output +17412,13594310,"TERMINAL",0,0,"Step 3125, loss: 0.37968382239341736, step time: 18.414974212646484ms\r\n",,terminal_output +17413,13594373,"TERMINAL",0,0,"Step 3126, loss: 0.1555660367012024, step time: 17.602205276489258ms\r\n",,terminal_output +17414,13594440,"TERMINAL",0,0,"Step 3127, loss: 0.14858165383338928, step time: 17.595291137695312ms\r\n",,terminal_output +17415,13594503,"TERMINAL",0,0,"Step 3128, loss: 0.10897926241159439, step time: 17.543554306030273ms\r\n",,terminal_output +17416,13594566,"TERMINAL",0,0,"Step 3129, loss: 0.34799709916114807, step time: 17.44246482849121ms\r\n",,terminal_output +17417,13594629,"TERMINAL",0,0,"Step 3130, loss: 0.09372709691524506, step time: 17.383098602294922ms\r\n",,terminal_output +17418,13594738,"TERMINAL",0,0,"Step 3131, loss: 0.14890696108341217, step time: 17.345905303955078ms\r\nStep 3132, loss: 0.6310451626777649, step time: 17.353057861328125ms\r\n",,terminal_output +17419,13594803,"TERMINAL",0,0,"Step 3133, loss: 0.12342946231365204, step time: 17.568349838256836ms\r\n",,terminal_output +17420,13594866,"TERMINAL",0,0,"Step 3134, loss: 0.08801788836717606, step time: 17.220735549926758ms\r\n",,terminal_output +17421,13594929,"TERMINAL",0,0,"Step 3135, loss: 0.12010503560304642, step time: 17.368793487548828ms\r\n",,terminal_output +17422,13594991,"TERMINAL",0,0,"Step 3136, loss: 0.08007929474115372, step time: 17.36617088317871ms\r\n",,terminal_output +17423,13595056,"TERMINAL",0,0,"Step 3137, loss: 0.09943939745426178, step time: 17.296552658081055ms\r\n",,terminal_output +17424,13595119,"TERMINAL",0,0,"Step 3138, loss: 0.21154117584228516, step time: 17.526626586914062ms\r\n",,terminal_output +17425,13595171,"TERMINAL",0,0,"Step 3139, loss: 0.2840358316898346, step time: 17.50946044921875ms\r\n",,terminal_output +17426,13595267,"TERMINAL",0,0,"Step 3140, loss: 0.10073148459196091, step time: 17.25935935974121ms\r\n",,terminal_output +17427,13595320,"TERMINAL",0,0,"Step 3141, loss: 0.18036867678165436, step time: 17.568111419677734ms\r\n",,terminal_output +17428,13595427,"TERMINAL",0,0,"Step 3142, loss: 0.08792446553707123, step time: 17.38572120666504ms\r\nStep 3143, loss: 0.12470930069684982, step time: 17.536640167236328ms\r\n",,terminal_output +17429,13595520,"TERMINAL",0,0,"Step 3144, loss: 0.3515014946460724, step time: 17.30823516845703ms\r\n",,terminal_output +17430,13595629,"TERMINAL",0,0,"Step 3145, loss: 0.06928745657205582, step time: 17.412900924682617ms\r\nStep 3146, loss: 0.1292540282011032, step time: 17.20595359802246ms\r\n",,terminal_output +17431,13595751,"TERMINAL",0,0,"Step 3147, loss: 0.10783196240663528, step time: 17.32349395751953ms\r\nStep 3148, loss: 0.18903565406799316, step time: 17.32778549194336ms\r\n",,terminal_output +17432,13595807,"TERMINAL",0,0,"Step 3149, loss: 0.10267305374145508, step time: 17.60077476501465ms\r\n",,terminal_output +17433,13595868,"TERMINAL",0,0,"Step 3150, loss: 0.2148938626050949, step time: 17.271757125854492ms\r\n",,terminal_output +17434,13595932,"TERMINAL",0,0,"Step 3151, loss: 0.3805862069129944, step time: 17.700672149658203ms\r\n",,terminal_output +17435,13595996,"TERMINAL",0,0,"Step 3152, loss: 0.216514453291893, step time: 17.33708381652832ms\r\n",,terminal_output +17436,13596059,"TERMINAL",0,0,"Step 3153, loss: 0.18003733456134796, step time: 17.504215240478516ms\r\n",,terminal_output +17437,13596174,"TERMINAL",0,0,"Step 3154, loss: 0.37126949429512024, step time: 17.31085777282715ms\r\nStep 3155, loss: 0.17787441611289978, step time: 17.376184463500977ms\r\n",,terminal_output +17438,13596270,"TERMINAL",0,0,"Step 3156, loss: 0.11342272907495499, step time: 17.18735694885254ms\r\n",,terminal_output +17439,13596380,"TERMINAL",0,0,"Step 3157, loss: 0.08928865939378738, step time: 17.618894577026367ms\r\nStep 3158, loss: 0.23398400843143463, step time: 17.163753509521484ms\r\n",,terminal_output +17440,13596445,"TERMINAL",0,0,"Step 3159, loss: 0.06569927930831909, step time: 17.535924911499023ms\r\n",,terminal_output +17441,13596508,"TERMINAL",0,0,"Step 3160, loss: 0.09427116066217422, step time: 19.080638885498047ms\r\n",,terminal_output +17442,13596572,"TERMINAL",0,0,"Step 3161, loss: 0.08058363199234009, step time: 17.612457275390625ms\r\n",,terminal_output +17443,13596636,"TERMINAL",0,0,"Step 3162, loss: 0.20323902368545532, step time: 17.175674438476562ms\r\n",,terminal_output +17444,13596700,"TERMINAL",0,0,"Step 3163, loss: 0.6496005654335022, step time: 17.593860626220703ms\r\n",,terminal_output +17445,13596763,"TERMINAL",0,0,"Step 3164, loss: 0.06639686226844788, step time: 17.202377319335938ms\r\n",,terminal_output +17446,13596826,"TERMINAL",0,0,"Step 3165, loss: 0.11251327395439148, step time: 17.291784286499023ms\r\n",,terminal_output +17447,13596890,"TERMINAL",0,0,"Step 3166, loss: 0.16015766561031342, step time: 17.391443252563477ms\r\n",,terminal_output +17448,13596954,"TERMINAL",0,0,"Step 3167, loss: 0.5651372671127319, step time: 17.49897003173828ms\r\n",,terminal_output +17449,13597017,"TERMINAL",0,0,"Step 3168, loss: 0.22988030314445496, step time: 17.345428466796875ms\r\n",,terminal_output +17450,13597081,"TERMINAL",0,0,"Step 3169, loss: 0.08242733776569366, step time: 17.592191696166992ms\r\n",,terminal_output +17451,13597179,"TERMINAL",0,0,"Step 3170, loss: 0.421946257352829, step time: 17.31133460998535ms\r\nStep 3171, loss: 0.16480709612369537, step time: 17.717599868774414ms\r\n",,terminal_output +17452,13597245,"TERMINAL",0,0,"Step 3172, loss: 0.21830326318740845, step time: 17.43292808532715ms\r\n",,terminal_output +17453,13597307,"TERMINAL",0,0,"Step 3173, loss: 0.09969297051429749, step time: 17.522573471069336ms\r\n",,terminal_output +17454,13597370,"TERMINAL",0,0,"Step 3174, loss: 0.16152822971343994, step time: 17.209768295288086ms\r\n",,terminal_output +17455,13597434,"TERMINAL",0,0,"Step 3175, loss: 0.2521897554397583, step time: 17.46845245361328ms\r\n",,terminal_output +17456,13597498,"TERMINAL",0,0,"Step 3176, loss: 0.1276674121618271, step time: 17.303943634033203ms\r\n",,terminal_output +17457,13597561,"TERMINAL",0,0,"Step 3177, loss: 0.17787890136241913, step time: 17.433881759643555ms\r\n",,terminal_output +17458,13597623,"TERMINAL",0,0,"Step 3178, loss: 0.13950899243354797, step time: 17.233610153198242ms\r\n",,terminal_output +17459,13597687,"TERMINAL",0,0,"Step 3179, loss: 0.150351420044899, step time: 17.57502555847168ms\r\n",,terminal_output +17460,13597749,"TERMINAL",0,0,"Step 3180, loss: 0.08900032192468643, step time: 17.356157302856445ms\r\n",,terminal_output +17461,13597812,"TERMINAL",0,0,"Step 3181, loss: 0.0890858843922615, step time: 17.648696899414062ms\r\n",,terminal_output +17462,13597875,"TERMINAL",0,0,"Step 3182, loss: 0.23028285801410675, step time: 17.232894897460938ms\r\n",,terminal_output +17463,13597939,"TERMINAL",0,0,"Step 3183, loss: 0.1350541114807129, step time: 17.638444900512695ms\r\n",,terminal_output +17464,13598003,"TERMINAL",0,0,"Step 3184, loss: 0.24311889708042145, step time: 17.41647720336914ms\r\n",,terminal_output +17465,13598066,"TERMINAL",0,0,"Step 3185, loss: 0.42729222774505615, step time: 17.357349395751953ms\r\n",,terminal_output +17466,13598131,"TERMINAL",0,0,"Step 3186, loss: 0.08594167232513428, step time: 17.55213737487793ms\r\n",,terminal_output +17467,13598184,"TERMINAL",0,0,"Step 3187, loss: 0.08347458392381668, step time: 18.9816951751709ms\r\n",,terminal_output +17468,13598251,"TERMINAL",0,0,"Step 3188, loss: 0.2257862240076065, step time: 17.92597770690918ms\r\n",,terminal_output +17469,13598311,"TERMINAL",0,0,"Step 3189, loss: 0.254683256149292, step time: 17.78101921081543ms\r\n",,terminal_output +17470,13598376,"TERMINAL",0,0,"Step 3190, loss: 0.10704756528139114, step time: 17.457008361816406ms\r\n",,terminal_output +17471,13598439,"TERMINAL",0,0,"Step 3191, loss: 0.12981687486171722, step time: 17.586946487426758ms\r\n",,terminal_output +17472,13598503,"TERMINAL",0,0,"Step 3192, loss: 0.06632167100906372, step time: 17.205238342285156ms\r\n",,terminal_output +17473,13598566,"TERMINAL",0,0,"Step 3193, loss: 0.1082848459482193, step time: 21.26455307006836ms\r\n",,terminal_output +17474,13598630,"TERMINAL",0,0,"Step 3194, loss: 0.12295506149530411, step time: 17.261266708374023ms\r\n",,terminal_output +17475,13598688,"TERMINAL",0,0,"Step 3195, loss: 0.27582669258117676, step time: 17.38286018371582ms\r\n",,terminal_output +17476,13598812,"TERMINAL",0,0,"Step 3196, loss: 0.15423522889614105, step time: 17.342567443847656ms\r\nStep 3197, loss: 0.12260755151510239, step time: 17.5628662109375ms\r\n",,terminal_output +17477,13598878,"TERMINAL",0,0,"Step 3198, loss: 0.10343699157238007, step time: 17.265796661376953ms\r\n",,terminal_output +17478,13598942,"TERMINAL",0,0,"Step 3199, loss: 0.07089337706565857, step time: 17.61317253112793ms\r\n",,terminal_output +17479,13599006,"TERMINAL",0,0,"Step 3200, loss: 0.6238647699356079, step time: 17.32492446899414ms\r\n",,terminal_output +17480,13599070,"TERMINAL",0,0,"Step 3201, loss: 0.0926005095243454, step time: 20.102977752685547ms\r\n",,terminal_output +17481,13599200,"TERMINAL",0,0,"Step 3202, loss: 0.11284878104925156, step time: 30.298471450805664ms\r\nStep 3203, loss: 0.12854640185832977, step time: 21.951675415039062ms\r\n",,terminal_output +17482,13599265,"TERMINAL",0,0,"Step 3204, loss: 0.10245734453201294, step time: 18.51344108581543ms\r\n",,terminal_output +17483,13599327,"TERMINAL",0,0,"Step 3205, loss: 0.055908869951963425, step time: 18.064498901367188ms\r\n",,terminal_output +17484,13599389,"TERMINAL",0,0,"Step 3206, loss: 0.17085112631320953, step time: 17.61460304260254ms\r\n",,terminal_output +17485,13599453,"TERMINAL",0,0,"Step 3207, loss: 0.08038704097270966, step time: 17.880916595458984ms\r\n",,terminal_output +17486,13599516,"TERMINAL",0,0,"Step 3208, loss: 0.1315177083015442, step time: 17.64225959777832ms\r\n",,terminal_output +17487,13599579,"TERMINAL",0,0,"Step 3209, loss: 0.06987665593624115, step time: 17.94576644897461ms\r\n",,terminal_output +17488,13599641,"TERMINAL",0,0,"Step 3210, loss: 0.18193651735782623, step time: 17.446517944335938ms\r\n",,terminal_output +17489,13599704,"TERMINAL",0,0,"Step 3211, loss: 0.3233909606933594, step time: 18.83411407470703ms\r\n",,terminal_output +17490,13599770,"TERMINAL",0,0,"Step 3212, loss: 0.13810062408447266, step time: 19.12379264831543ms\r\n",,terminal_output +17491,13599831,"TERMINAL",0,0,"Step 3213, loss: 0.09990405291318893, step time: 19.4094181060791ms\r\n",,terminal_output +17492,13599894,"TERMINAL",0,0,"Step 3214, loss: 0.11718630790710449, step time: 18.59879493713379ms\r\n",,terminal_output +17493,13599959,"TERMINAL",0,0,"Step 3215, loss: 0.8537768125534058, step time: 18.320798873901367ms\r\n",,terminal_output +17494,13600039,"TERMINAL",0,0,"Step 3216, loss: 0.12663552165031433, step time: 17.98725128173828ms\r\n",,terminal_output +17495,13600103,"TERMINAL",0,0,"Step 3217, loss: 0.1882605105638504, step time: 19.893884658813477ms\r\n",,terminal_output +17496,13600164,"TERMINAL",0,0,"Step 3218, loss: 0.31736722588539124, step time: 20.723819732666016ms\r\n",,terminal_output +17497,13600230,"TERMINAL",0,0,"Step 3219, loss: 0.08175747096538544, step time: 23.00548553466797ms\r\n",,terminal_output +17498,13600292,"TERMINAL",0,0,"Step 3220, loss: 0.0815688744187355, step time: 26.99112892150879ms\r\n",,terminal_output +17499,13600355,"TERMINAL",0,0,"Step 3221, loss: 0.3602351248264313, step time: 21.402359008789062ms\r\n",,terminal_output +17500,13600419,"TERMINAL",0,0,"Step 3222, loss: 0.0925762802362442, step time: 19.87743377685547ms\r\n",,terminal_output +17501,13600484,"TERMINAL",0,0,"Step 3223, loss: 0.06915783137083054, step time: 20.981311798095703ms\r\n",,terminal_output +17502,13600547,"TERMINAL",0,0,"Step 3224, loss: 0.08901125192642212, step time: 21.63529396057129ms\r\n",,terminal_output +17503,13600612,"TERMINAL",0,0,"Step 3225, loss: 0.32418644428253174, step time: 18.975496292114258ms\r\n",,terminal_output +17504,13600672,"TERMINAL",0,0,"Step 3226, loss: 0.1101192906498909, step time: 18.553495407104492ms\r\n",,terminal_output +17505,13600737,"TERMINAL",0,0,"Step 3227, loss: 0.08315719664096832, step time: 17.92001724243164ms\r\n",,terminal_output +17506,13600799,"TERMINAL",0,0,"Step 3228, loss: 0.07741407305002213, step time: 17.613649368286133ms\r\n",,terminal_output +17507,13600862,"TERMINAL",0,0,"Step 3229, loss: 0.08142401278018951, step time: 17.910242080688477ms\r\n",,terminal_output +17508,13600925,"TERMINAL",0,0,"Step 3230, loss: 0.08403488248586655, step time: 17.47894287109375ms\r\n",,terminal_output +17509,13600990,"TERMINAL",0,0,"Step 3231, loss: 0.08721224963665009, step time: 17.57955551147461ms\r\n",,terminal_output +17510,13601054,"TERMINAL",0,0,"Step 3232, loss: 0.09164345264434814, step time: 17.541170120239258ms\r\n",,terminal_output +17511,13601117,"TERMINAL",0,0,"Step 3233, loss: 0.39622169733047485, step time: 19.818544387817383ms\r\n",,terminal_output +17512,13601171,"TERMINAL",0,0,"Step 3234, loss: 0.1341542750597, step time: 19.16027069091797ms\r\n",,terminal_output +17513,13601267,"TERMINAL",0,0,"Step 3235, loss: 0.2112063318490982, step time: 18.06473731994629ms\r\n",,terminal_output +17514,13601320,"TERMINAL",0,0,"Step 3236, loss: 0.06981360167264938, step time: 17.81749725341797ms\r\n",,terminal_output +17515,13601428,"TERMINAL",0,0,"Step 3237, loss: 0.06825435161590576, step time: 21.09384536743164ms\r\nStep 3238, loss: 0.13505232334136963, step time: 25.198698043823242ms\r\n",,terminal_output +17516,13601491,"TERMINAL",0,0,"Step 3239, loss: 0.06696652621030807, step time: 19.849777221679688ms\r\n",,terminal_output +17517,13601553,"TERMINAL",0,0,"Step 3240, loss: 0.2523665726184845, step time: 19.81806755065918ms\r\n",,terminal_output +17518,13601616,"TERMINAL",0,0,"Step 3241, loss: 0.4284205138683319, step time: 18.970489501953125ms\r\n",,terminal_output +17519,13601710,"TERMINAL",0,0,"Step 3242, loss: 0.2502392530441284, step time: 17.81463623046875ms\r\n",,terminal_output +17520,13601764,"TERMINAL",0,0,"Step 3243, loss: 0.10722129791975021, step time: 22.653818130493164ms\r\n",,terminal_output +17521,13601869,"TERMINAL",0,0,"Step 3244, loss: 0.07062561064958572, step time: 18.64314079284668ms\r\nStep 3245, loss: 0.21694526076316833, step time: 18.082141876220703ms\r\n",,terminal_output +17522,13601968,"TERMINAL",0,0,"Step 3246, loss: 0.09698802977800369, step time: 17.579317092895508ms\r\n",,terminal_output +17523,13602018,"TERMINAL",0,0,"Step 3247, loss: 0.49904850125312805, step time: 17.85898208618164ms\r\n",,terminal_output +17524,13602128,"TERMINAL",0,0,"Step 3248, loss: 0.1013537123799324, step time: 17.508983612060547ms\r\nStep 3249, loss: 0.22297024726867676, step time: 17.61603355407715ms\r\n",,terminal_output +17525,13602184,"TERMINAL",0,0,"Step 3250, loss: 0.1464923769235611, step time: 17.534494400024414ms\r\n",,terminal_output +17526,13602277,"TERMINAL",0,0,"Step 3251, loss: 0.08823296427726746, step time: 17.676353454589844ms\r\n",,terminal_output +17527,13602331,"TERMINAL",0,0,"Step 3252, loss: 0.2880866229534149, step time: 17.31395721435547ms\r\n",,terminal_output +17528,13602438,"TERMINAL",0,0,"Step 3253, loss: 0.0671086460351944, step time: 17.825841903686523ms\r\nStep 3254, loss: 0.16135787963867188, step time: 17.404556274414062ms\r\n",,terminal_output +17529,13602534,"TERMINAL",0,0,"Step 3255, loss: 0.16707144677639008, step time: 17.8983211517334ms\r\n",,terminal_output +17530,13602587,"TERMINAL",0,0,"Step 3256, loss: 0.18065659701824188, step time: 17.4252986907959ms\r\n",,terminal_output +17531,13602694,"TERMINAL",0,0,"Step 3257, loss: 0.15660929679870605, step time: 17.53091812133789ms\r\nStep 3258, loss: 0.09884638339281082, step time: 17.361164093017578ms\r\n",,terminal_output +17532,13602880,"TERMINAL",0,0,"Step 3259, loss: 0.05234042927622795, step time: 17.703771591186523ms\r\nStep 3260, loss: 0.08333569020032883, step time: 17.28677749633789ms\r\n",,terminal_output +17533,13602922,"TERMINAL",0,0,"Step 3261, loss: 0.4250009059906006, step time: 17.511844635009766ms\r\n",,terminal_output +17534,13603216,"TERMINAL",0,0,"Step 3262, loss: 0.1739276498556137, step time: 337.95928955078125ms\r\n",,terminal_output +17535,13603279,"TERMINAL",0,0,"Step 3263, loss: 0.10710122436285019, step time: 25.061368942260742ms\r\n",,terminal_output +17536,13603409,"TERMINAL",0,0,"Step 3264, loss: 0.25536373257637024, step time: 19.677162170410156ms\r\nStep 3265, loss: 0.17779821157455444, step time: 18.316984176635742ms\r\n",,terminal_output +17537,13603473,"TERMINAL",0,0,"Step 3266, loss: 0.07146193087100983, step time: 17.802953720092773ms\r\n",,terminal_output +17538,13603542,"TERMINAL",0,0,"Step 3267, loss: 1.092780590057373, step time: 17.77505874633789ms\r\n",,terminal_output +17539,13603647,"TERMINAL",0,0,"Step 3268, loss: 0.3256489932537079, step time: 17.65131950378418ms\r\n",,terminal_output +17540,13603662,"TERMINAL",0,0,"Step 3269, loss: 0.14722441136837006, step time: 17.513751983642578ms\r\n",,terminal_output +17541,13603757,"TERMINAL",0,0,"Step 3270, loss: 0.052662692964076996, step time: 20.61629295349121ms\r\n",,terminal_output +17542,13603821,"TERMINAL",0,0,"Step 3271, loss: 0.10505037009716034, step time: 18.260478973388672ms\r\n",,terminal_output +17543,13603887,"TERMINAL",0,0,"Step 3272, loss: 0.1260652244091034, step time: 17.380475997924805ms\r\n",,terminal_output +17544,13603939,"TERMINAL",0,0,"Step 3273, loss: 0.6456269025802612, step time: 17.5628662109375ms\r\n",,terminal_output +17545,13604051,"TERMINAL",0,0,"Step 3274, loss: 1.2252713441848755, step time: 17.50922203063965ms\r\nStep 3275, loss: 0.23414409160614014, step time: 17.40860939025879ms\r\n",,terminal_output +17546,13604116,"TERMINAL",0,0,"Step 3276, loss: 0.12821942567825317, step time: 17.299413681030273ms\r\n",,terminal_output +17547,13604173,"TERMINAL",0,0,"Step 3277, loss: 0.13116662204265594, step time: 17.637252807617188ms\r\n",,terminal_output +17548,13604306,"TERMINAL",0,0,"Step 3278, loss: 0.09669505059719086, step time: 17.195701599121094ms\r\nStep 3279, loss: 0.22203963994979858, step time: 17.520427703857422ms\r\n",,terminal_output +17549,13604359,"TERMINAL",0,0,"Step 3280, loss: 0.09263643622398376, step time: 17.401933670043945ms\r\n",,terminal_output +17550,13604458,"TERMINAL",0,0,"Step 3281, loss: 0.6118023991584778, step time: 17.24529266357422ms\r\n",,terminal_output +17551,13604539,"TERMINAL",0,0,"Step 3282, loss: 0.4632544219493866, step time: 17.32468605041504ms\r\nStep 3283, loss: 1.2285720109939575, step time: 17.626523971557617ms\r\n",,terminal_output +17552,13604602,"TERMINAL",0,0,"Step 3284, loss: 0.3621324598789215, step time: 17.28224754333496ms\r\n",,terminal_output +17553,13604666,"TERMINAL",0,0,"Step 3285, loss: 0.21646638214588165, step time: 17.390727996826172ms\r\n",,terminal_output +17554,13604731,"TERMINAL",0,0,"Step 3286, loss: 0.26231393218040466, step time: 17.28987693786621ms\r\n",,terminal_output +17555,13604799,"TERMINAL",0,0,"Step 3287, loss: 0.1850007027387619, step time: 17.5168514251709ms\r\n",,terminal_output +17556,13604866,"TERMINAL",0,0,"Step 3288, loss: 0.670924961566925, step time: 17.323970794677734ms\r\n",,terminal_output +17557,13604931,"TERMINAL",0,0,"Step 3289, loss: 0.53240966796875, step time: 17.65584945678711ms\r\n",,terminal_output +17558,13604995,"TERMINAL",0,0,"Step 3290, loss: 0.14611904323101044, step time: 17.25029945373535ms\r\n",,terminal_output +17559,13605055,"TERMINAL",0,0,"Step 3291, loss: 0.22806860506534576, step time: 17.566442489624023ms\r\n",,terminal_output +17560,13605118,"TERMINAL",0,0,"Step 3292, loss: 0.08266887813806534, step time: 17.43912696838379ms\r\n",,terminal_output +17561,13605171,"TERMINAL",0,0,"Step 3293, loss: 0.09028725326061249, step time: 17.490386962890625ms\r\n",,terminal_output +17562,13605268,"TERMINAL",0,0,"Step 3294, loss: 0.10033639520406723, step time: 17.16899871826172ms\r\n",,terminal_output +17563,13605334,"TERMINAL",0,0,"Step 3295, loss: 0.10358130186796188, step time: 17.434120178222656ms\r\n",,terminal_output +17564,13605387,"TERMINAL",0,0,"Step 3296, loss: 1.578910231590271, step time: 17.22240447998047ms\r\n",,terminal_output +17565,13605495,"TERMINAL",0,0,"Step 3297, loss: 0.3165096640586853, step time: 17.271995544433594ms\r\nStep 3298, loss: 0.42601415514945984, step time: 17.370939254760742ms\r\n",,terminal_output +17566,13605560,"TERMINAL",0,0,"Step 3299, loss: 0.12594063580036163, step time: 17.441272735595703ms\r\n",,terminal_output +17567,13605625,"TERMINAL",0,0,"Step 3300, loss: 0.2623870074748993, step time: 17.21048355102539ms\r\n",,terminal_output +17568,13605691,"TERMINAL",0,0,"Step 3301, loss: 0.11421151459217072, step time: 17.538785934448242ms\r\n",,terminal_output +17569,13605755,"TERMINAL",0,0,"Step 3302, loss: 0.16842254996299744, step time: 17.19975471496582ms\r\n",,terminal_output +17570,13605822,"TERMINAL",0,0,"Step 3303, loss: 0.14297296106815338, step time: 30.948400497436523ms\r\n",,terminal_output +17571,13605882,"TERMINAL",0,0,"Step 3304, loss: 0.25207096338272095, step time: 19.766807556152344ms\r\n",,terminal_output +17572,13605946,"TERMINAL",0,0,"Step 3305, loss: 0.16884712874889374, step time: 17.762184143066406ms\r\n",,terminal_output +17573,13606011,"TERMINAL",0,0,"Step 3306, loss: 1.0072205066680908, step time: 17.365217208862305ms\r\n",,terminal_output +17574,13606076,"TERMINAL",0,0,"Step 3307, loss: 0.1337486058473587, step time: 17.805099487304688ms\r\n",,terminal_output +17575,13606139,"TERMINAL",0,0,"Step 3308, loss: 0.08313478529453278, step time: 17.35973358154297ms\r\n",,terminal_output +17576,13606208,"TERMINAL",0,0,"Step 3309, loss: 0.11580606549978256, step time: 17.493724822998047ms\r\n",,terminal_output +17577,13606275,"TERMINAL",0,0,"Step 3310, loss: 0.4522537291049957, step time: 17.56453514099121ms\r\n",,terminal_output +17578,13606337,"TERMINAL",0,0,"Step 3311, loss: 0.17887932062149048, step time: 17.61484146118164ms\r\n",,terminal_output +17579,13606400,"TERMINAL",0,0,"Step 3312, loss: 0.24467891454696655, step time: 17.243146896362305ms\r\n",,terminal_output +17580,13606461,"TERMINAL",0,0,"Step 3313, loss: 0.09768570959568024, step time: 17.780303955078125ms\r\n",,terminal_output +17581,13606523,"TERMINAL",0,0,"Step 3314, loss: 0.10007822513580322, step time: 17.2576904296875ms\r\n",,terminal_output +17582,13606592,"TERMINAL",0,0,"Step 3315, loss: 0.27250105142593384, step time: 17.446517944335938ms\r\n",,terminal_output +17583,13606655,"TERMINAL",0,0,"Step 3316, loss: 0.16412928700447083, step time: 17.444849014282227ms\r\n",,terminal_output +17584,13606707,"TERMINAL",0,0,"Step 3317, loss: 0.09652473032474518, step time: 20.293712615966797ms\r\n",,terminal_output +17585,13606771,"TERMINAL",0,0,"Step 3318, loss: 0.10307203233242035, step time: 22.789716720581055ms\r\n",,terminal_output +17586,13606835,"TERMINAL",0,0,"Step 3319, loss: 0.13116374611854553, step time: 28.905391693115234ms\r\n",,terminal_output +17587,13606898,"TERMINAL",0,0,"Step 3320, loss: 0.06259234994649887, step time: 21.610736846923828ms\r\n",,terminal_output +17588,13606964,"TERMINAL",0,0,"Step 3321, loss: 0.24481216073036194, step time: 20.282506942749023ms\r\n",,terminal_output +17589,13607040,"TERMINAL",0,0,"Step 3322, loss: 0.06283967196941376, step time: 20.357608795166016ms\r\n",,terminal_output +17590,13607105,"TERMINAL",0,0,"Step 3323, loss: 0.34335872530937195, step time: 23.245811462402344ms\r\n",,terminal_output +17591,13607160,"TERMINAL",0,0,"Step 3324, loss: 0.17616648972034454, step time: 20.070791244506836ms\r\n",,terminal_output +17592,13607227,"TERMINAL",0,0,"Step 3325, loss: 0.12691964209079742, step time: 18.53322982788086ms\r\n",,terminal_output +17593,13607287,"TERMINAL",0,0,"Step 3326, loss: 0.10816974937915802, step time: 17.781734466552734ms\r\n",,terminal_output +17594,13607348,"TERMINAL",0,0,"Step 3327, loss: 0.09538692235946655, step time: 17.8220272064209ms\r\n",,terminal_output +17595,13607411,"TERMINAL",0,0,"Step 3328, loss: 0.477796345949173, step time: 17.648696899414062ms\r\n",,terminal_output +17596,13607475,"TERMINAL",0,0,"Step 3329, loss: 0.06766120344400406, step time: 17.400503158569336ms\r\n",,terminal_output +17597,13607523,"TERMINAL",0,0,"Step 3330, loss: 0.22035075724124908, step time: 17.396211624145508ms\r\n",,terminal_output +17598,13607617,"TERMINAL",0,0,"Step 3331, loss: 0.06572579592466354, step time: 17.64845848083496ms\r\n",,terminal_output +17599,13607669,"TERMINAL",0,0,"Step 3332, loss: 0.13246099650859833, step time: 17.26388931274414ms\r\n",,terminal_output +17600,13607720,"TERMINAL",0,0,"Step 3333, loss: 0.18939341604709625, step time: 17.67730712890625ms\r\n",,terminal_output +17601,13607815,"TERMINAL",0,0,"Step 3334, loss: 0.3214133679866791, step time: 17.52781867980957ms\r\n",,terminal_output +17602,13607866,"TERMINAL",0,0,"Step 3335, loss: 0.13943901658058167, step time: 23.260116577148438ms\r\n",,terminal_output +17603,13607917,"TERMINAL",0,0,"Step 3336, loss: 0.6135159730911255, step time: 17.536401748657227ms\r\n",,terminal_output +17604,13608021,"TERMINAL",0,0,"Step 3337, loss: 0.1292760670185089, step time: 17.422914505004883ms\r\nStep 3338, loss: 0.11874215304851532, step time: 17.15826988220215ms\r\n",,terminal_output +17605,13608086,"TERMINAL",0,0,"Step 3339, loss: 0.17292435467243195, step time: 17.275094985961914ms\r\n",,terminal_output +17606,13608146,"TERMINAL",0,0,"Step 3340, loss: 0.12327456474304199, step time: 17.61627197265625ms\r\n",,terminal_output +17607,13608208,"TERMINAL",0,0,"Step 3341, loss: 0.0771043598651886, step time: 17.571210861206055ms\r\n",,terminal_output +17608,13608278,"TERMINAL",0,0,"Step 3342, loss: 0.06703530251979828, step time: 17.234325408935547ms\r\n",,terminal_output +17609,13608328,"genie.py",2429,0,"",python,selection_mouse +17610,13608350,"TERMINAL",0,0,"Step 3343, loss: 0.6742765307426453, step time: 17.609357833862305ms\r\n",,terminal_output +17611,13608403,"TERMINAL",0,0,"Step 3344, loss: 0.13444817066192627, step time: 17.267704010009766ms\r\n",,terminal_output +17612,13608463,"genie.py",2424,6,"videos",python,selection_mouse +17613,13608526,"TERMINAL",0,0,"Step 3345, loss: 0.26068347692489624, step time: 17.424583435058594ms\r\nStep 3346, loss: 0.09075639396905899, step time: 17.33112335205078ms\r\n",,terminal_output +17614,13608654,"TERMINAL",0,0,"Step 3347, loss: 0.0777292475104332, step time: 17.579078674316406ms\r\nStep 3348, loss: 1.0367789268493652, step time: 17.14920997619629ms\r\n",,terminal_output +17615,13608709,"TERMINAL",0,0,"Step 3349, loss: 0.4211771786212921, step time: 17.66037940979004ms\r\n",,terminal_output +17616,13608775,"TERMINAL",0,0,"Step 3350, loss: 0.41355758905410767, step time: 17.17090606689453ms\r\n",,terminal_output +17617,13608836,"TERMINAL",0,0,"Step 3351, loss: 0.09461047500371933, step time: 17.67277717590332ms\r\n",,terminal_output +17618,13608899,"TERMINAL",0,0,"Step 3352, loss: 0.07282578200101852, step time: 17.377614974975586ms\r\n",,terminal_output +17619,13608961,"TERMINAL",0,0,"Step 3353, loss: 0.06205200031399727, step time: 17.500638961791992ms\r\n",,terminal_output +17620,13609054,"TERMINAL",0,0,"Step 3354, loss: 0.24349932372570038, step time: 17.284393310546875ms\r\n",,terminal_output +17621,13609159,"TERMINAL",0,0,"Step 3355, loss: 0.22287628054618835, step time: 17.693519592285156ms\r\nStep 3356, loss: 0.323066383600235, step time: 17.313241958618164ms\r\n",,terminal_output +17622,13609222,"TERMINAL",0,0,"Step 3357, loss: 0.12648123502731323, step time: 17.58289337158203ms\r\n",,terminal_output +17623,13609282,"TERMINAL",0,0,"Step 3358, loss: 0.20605050027370453, step time: 17.316102981567383ms\r\n",,terminal_output +17624,13609344,"TERMINAL",0,0,"Step 3359, loss: 0.0866069495677948, step time: 17.383575439453125ms\r\n",,terminal_output +17625,13609404,"TERMINAL",0,0,"Step 3360, loss: 0.1915995478630066, step time: 17.320871353149414ms\r\n",,terminal_output +17626,13609466,"TERMINAL",0,0,"Step 3361, loss: 0.09873499721288681, step time: 18.472909927368164ms\r\n",,terminal_output +17627,13609530,"TERMINAL",0,0,"Step 3362, loss: 0.12188823521137238, step time: 17.40288734436035ms\r\n",,terminal_output +17628,13609595,"TERMINAL",0,0,"Step 3363, loss: 0.3119605481624603, step time: 17.44532585144043ms\r\n",,terminal_output +17629,13609618,"genie.py",2386,0,"",python,selection_mouse +17630,13609648,"TERMINAL",0,0,"Step 3364, loss: 0.10000665485858917, step time: 17.34757423400879ms\r\n",,terminal_output +17631,13609749,"TERMINAL",0,0,"Step 3365, loss: 0.06672189384698868, step time: 19.306182861328125ms\r\n",,terminal_output +17632,13609767,"genie.py",2381,6,"videos",python,selection_mouse +17633,13609845,"TERMINAL",0,0,"Step 3366, loss: 0.1227022185921669, step time: 18.69344711303711ms\r\nStep 3367, loss: 0.2086683213710785, step time: 18.126487731933594ms\r\n",,terminal_output +17634,13609940,"TERMINAL",0,0,"Step 3368, loss: 0.13076646625995636, step time: 17.3947811126709ms\r\n",,terminal_output +17635,13609994,"TERMINAL",0,0,"Step 3369, loss: 0.32741937041282654, step time: 17.592668533325195ms\r\n",,terminal_output +17636,13610057,"TERMINAL",0,0,"Step 3370, loss: 0.10540448874235153, step time: 17.54140853881836ms\r\n",,terminal_output +17637,13610152,"TERMINAL",0,0,"Step 3371, loss: 0.18545041978359222, step time: 20.685434341430664ms\r\nStep 3372, loss: 0.3131660521030426, step time: 17.238855361938477ms\r\n",,terminal_output +17638,13610254,"TERMINAL",0,0,"Step 3373, loss: 0.2099725604057312, step time: 17.6851749420166ms\r\n",,terminal_output +17639,13610316,"TERMINAL",0,0,"Step 3374, loss: 0.06659257411956787, step time: 17.17686653137207ms\r\n",,terminal_output +17640,13610382,"TERMINAL",0,0,"Step 3375, loss: 0.07155613601207733, step time: 17.409801483154297ms\r\n",,terminal_output +17641,13610436,"TERMINAL",0,0,"Step 3376, loss: 0.274522989988327, step time: 17.338991165161133ms\r\n",,terminal_output +17642,13610499,"TERMINAL",0,0,"Step 3377, loss: 0.1114390641450882, step time: 17.353057861328125ms\r\n",,terminal_output +17643,13610562,"TERMINAL",0,0,"Step 3378, loss: 0.25075483322143555, step time: 17.14801788330078ms\r\n",,terminal_output +17644,13610626,"TERMINAL",0,0,"Step 3379, loss: 0.07027757167816162, step time: 17.669200897216797ms\r\n",,terminal_output +17645,13610725,"TERMINAL",0,0,"Step 3380, loss: 0.0676468089222908, step time: 17.17519760131836ms\r\nStep 3381, loss: 0.09506569057703018, step time: 17.385005950927734ms\r\n",,terminal_output +17646,13610786,"TERMINAL",0,0,"Step 3382, loss: 0.21767546236515045, step time: 17.37809181213379ms\r\n",,terminal_output +17647,13610849,"TERMINAL",0,0,"Step 3383, loss: 0.18393133580684662, step time: 17.390012741088867ms\r\n",,terminal_output +17648,13610913,"TERMINAL",0,0,"Step 3384, loss: 0.13064582645893097, step time: 17.168521881103516ms\r\n",,terminal_output +17649,13610976,"TERMINAL",0,0,"Step 3385, loss: 0.07020436972379684, step time: 17.544269561767578ms\r\n",,terminal_output +17650,13611039,"TERMINAL",0,0,"Step 3386, loss: 0.09694182872772217, step time: 17.268657684326172ms\r\n",,terminal_output +17651,13611103,"TERMINAL",0,0,"Step 3387, loss: 0.0663222074508667, step time: 17.338037490844727ms\r\n",,terminal_output +17652,13611164,"TERMINAL",0,0,"Step 3388, loss: 0.17681261897087097, step time: 17.281770706176758ms\r\n",,terminal_output +17653,13611225,"TERMINAL",0,0,"Step 3389, loss: 0.10244457423686981, step time: 17.36307144165039ms\r\n",,terminal_output +17654,13611288,"TERMINAL",0,0,"Step 3390, loss: 0.09945786744356155, step time: 20.107507705688477ms\r\n",,terminal_output +17655,13611349,"TERMINAL",0,0,"Step 3391, loss: 0.19876538217067719, step time: 22.184371948242188ms\r\n",,terminal_output +17656,13611430,"TERMINAL",0,0,"Step 3392, loss: 0.10515087842941284, step time: 18.71323585510254ms\r\n",,terminal_output +17657,13611482,"TERMINAL",0,0,"Step 3393, loss: 0.2972913980484009, step time: 18.148422241210938ms\r\n",,terminal_output +17658,13611546,"TERMINAL",0,0,"Step 3394, loss: 0.11138254404067993, step time: 17.809391021728516ms\r\n",,terminal_output +17659,13611607,"TERMINAL",0,0,"Step 3395, loss: 0.12167888134717941, step time: 17.737627029418945ms\r\n",,terminal_output +17660,13611759,"TERMINAL",0,0,"Step 3396, loss: 0.2410019040107727, step time: 17.466306686401367ms\r\nStep 3397, loss: 0.05511289834976196, step time: 17.720460891723633ms\r\n",,terminal_output +17661,13611812,"TERMINAL",0,0,"Step 3398, loss: 0.12874151766300201, step time: 17.230749130249023ms\r\n",,terminal_output +17662,13611859,"genie.py",2430,0,"",python,selection_mouse +17663,13611917,"TERMINAL",0,0,"Step 3399, loss: 0.1439979374408722, step time: 17.4407958984375ms\r\nStep 3400, loss: 0.059102240949869156, step time: 17.428159713745117ms\r\n",,terminal_output +17664,13611971,"TERMINAL",0,0,"Step 3401, loss: 0.1672317385673523, step time: 17.48943328857422ms\r\n",,terminal_output +17665,13612002,"genie.py",2424,6,"videos",python,selection_mouse +17666,13612054,"TERMINAL",0,0,"Step 3402, loss: 0.09043605625629425, step time: 17.19832420349121ms\r\n",,terminal_output +17667,13612107,"TERMINAL",0,0,"Step 3403, loss: 0.05679565295577049, step time: 17.54450798034668ms\r\n",,terminal_output +17668,13612175,"TERMINAL",0,0,"Step 3404, loss: 0.053214315325021744, step time: 17.299652099609375ms\r\n",,terminal_output +17669,13612235,"TERMINAL",0,0,"Step 3405, loss: 0.10140431672334671, step time: 17.49587059020996ms\r\n",,terminal_output +17670,13612299,"TERMINAL",0,0,"Step 3406, loss: 0.18480154871940613, step time: 17.37213134765625ms\r\n",,terminal_output +17671,13612362,"TERMINAL",0,0,"Step 3407, loss: 0.10752306878566742, step time: 17.468690872192383ms\r\n",,terminal_output +17672,13612425,"TERMINAL",0,0,"Step 3408, loss: 0.04553989693522453, step time: 17.221927642822266ms\r\n",,terminal_output +17673,13612485,"TERMINAL",0,0,"Step 3409, loss: 0.07011586427688599, step time: 17.574310302734375ms\r\n",,terminal_output +17674,13612544,"TERMINAL",0,0,"Step 3410, loss: 0.055868156254291534, step time: 17.321109771728516ms\r\n",,terminal_output +17675,13612605,"TERMINAL",0,0,"Step 3411, loss: 0.07217677682638168, step time: 17.43292808532715ms\r\n",,terminal_output +17676,13612663,"TERMINAL",0,0,"Step 3412, loss: 0.042895518243312836, step time: 17.412662506103516ms\r\n",,terminal_output +17677,13612725,"TERMINAL",0,0,"Step 3413, loss: 0.09877589344978333, step time: 17.779827117919922ms\r\n",,terminal_output +17678,13612784,"TERMINAL",0,0,"Step 3414, loss: 0.12762193381786346, step time: 17.537593841552734ms\r\n",,terminal_output +17679,13612878,"TERMINAL",0,0,"Step 3415, loss: 0.11802063882350922, step time: 17.67873764038086ms\r\n",,terminal_output +17680,13612929,"TERMINAL",0,0,"Step 3416, loss: 0.06813482940196991, step time: 17.2269344329834ms\r\n",,terminal_output +17681,13613022,"TERMINAL",0,0,"Step 3417, loss: 0.08499997109174728, step time: 17.64965057373047ms\r\n",,terminal_output +17682,13613074,"TERMINAL",0,0,"Step 3418, loss: 0.10405539721250534, step time: 18.023014068603516ms\r\n",,terminal_output +17683,13613134,"genie.py",2658,0,"",python,selection_mouse +17684,13613173,"TERMINAL",0,0,"Step 3419, loss: 0.10281914472579956, step time: 21.132707595825195ms\r\nStep 3420, loss: 0.06325463205575943, step time: 20.309925079345703ms\r\n",,terminal_output +17685,13613295,"genie.py",2651,14,"latent_actions",python,selection_mouse +17686,13613474,"TERMINAL",0,0,"Step 3421, loss: 0.040876809507608414, step time: 302.30164527893066ms\r\n",,terminal_output +17687,13613539,"TERMINAL",0,0,"Step 3422, loss: 0.4282323718070984, step time: 25.1007080078125ms\r\n",,terminal_output +17688,13613663,"TERMINAL",0,0,"Step 3423, loss: 0.04057393595576286, step time: 20.0045108795166ms\r\n",,terminal_output +17689,13613669,"TERMINAL",0,0,"Step 3424, loss: 0.2697887420654297, step time: 18.34273338317871ms\r\n",,terminal_output +17690,13613727,"TERMINAL",0,0,"Step 3425, loss: 0.0683782547712326, step time: 18.0661678314209ms\r\n",,terminal_output +17691,13613826,"TERMINAL",0,0,"Step 3426, loss: 0.08905091881752014, step time: 17.621278762817383ms\r\n",,terminal_output +17692,13613871,"genie.py",2581,0,"",python,selection_mouse +17693,13613925,"TERMINAL",0,0,"Step 3427, loss: 0.12736941874027252, step time: 17.885208129882812ms\r\nStep 3428, loss: 0.07803475111722946, step time: 17.394542694091797ms\r\n",,terminal_output +17694,13614028,"genie.py",2573,12,"video_tokens",python,selection_mouse +17695,13614029,"TERMINAL",0,0,"Step 3429, loss: 0.15836773812770844, step time: 17.586469650268555ms\r\n",,terminal_output +17696,13614107,"TERMINAL",0,0,"Step 3430, loss: 0.07580415904521942, step time: 17.643213272094727ms\r\nStep 3431, loss: 0.05297442153096199, step time: 17.50469207763672ms\r\n",,terminal_output +17697,13614172,"TERMINAL",0,0,"Step 3432, loss: 0.1225447952747345, step time: 17.458677291870117ms\r\n",,terminal_output +17698,13614232,"TERMINAL",0,0,"Step 3433, loss: 0.04727773368358612, step time: 17.78411865234375ms\r\n",,terminal_output +17699,13614294,"TERMINAL",0,0,"Step 3434, loss: 0.18369297683238983, step time: 17.38739013671875ms\r\n",,terminal_output +17700,13614356,"TERMINAL",0,0,"Step 3435, loss: 0.16267609596252441, step time: 17.521142959594727ms\r\n",,terminal_output +17701,13614416,"TERMINAL",0,0,"Step 3436, loss: 0.04896881431341171, step time: 17.496347427368164ms\r\n",,terminal_output +17702,13614477,"TERMINAL",0,0,"Step 3437, loss: 0.07550359517335892, step time: 17.408132553100586ms\r\n",,terminal_output +17703,13614540,"TERMINAL",0,0,"Step 3438, loss: 0.060347579419612885, step time: 17.218589782714844ms\r\n",,terminal_output +17704,13614603,"TERMINAL",0,0,"Step 3439, loss: 0.19163797795772552, step time: 17.835140228271484ms\r\n",,terminal_output +17705,13614667,"TERMINAL",0,0,"Step 3440, loss: 0.28065818548202515, step time: 17.322540283203125ms\r\n",,terminal_output +17706,13614730,"TERMINAL",0,0,"Step 3441, loss: 0.09451032429933548, step time: 17.401933670043945ms\r\n",,terminal_output +17707,13614792,"TERMINAL",0,0,"Step 3442, loss: 0.11098161339759827, step time: 17.571210861206055ms\r\n",,terminal_output +17708,13614856,"TERMINAL",0,0,"Step 3443, loss: 0.14761953055858612, step time: 17.644405364990234ms\r\n",,terminal_output +17709,13614921,"TERMINAL",0,0,"Step 3444, loss: 0.05269185081124306, step time: 17.33088493347168ms\r\n",,terminal_output +17710,13614967,"genie.py",2620,0,"",python,selection_mouse +17711,13614989,"TERMINAL",0,0,"Step 3445, loss: 0.27526816725730896, step time: 17.67253875732422ms\r\n",,terminal_output +17712,13615055,"TERMINAL",0,0,"Step 3446, loss: 0.07140276581048965, step time: 17.24839210510254ms\r\n",,terminal_output +17713,13615114,"genie.py",2608,17,"tokenizer_outputs",python,selection_mouse +17714,13615171,"TERMINAL",0,0,"Step 3447, loss: 0.05802386254072189, step time: 17.423152923583984ms\r\nStep 3448, loss: 0.08498916029930115, step time: 17.492055892944336ms\r\n",,terminal_output +17715,13615266,"TERMINAL",0,0,"Step 3449, loss: 0.06533670425415039, step time: 17.43173599243164ms\r\n",,terminal_output +17716,13615319,"TERMINAL",0,0,"Step 3450, loss: 0.05058617889881134, step time: 17.490386962890625ms\r\n",,terminal_output +17717,13615423,"TERMINAL",0,0,"Step 3451, loss: 0.04919327422976494, step time: 17.69542694091797ms\r\nStep 3452, loss: 0.13581545650959015, step time: 17.315149307250977ms\r\n",,terminal_output +17718,13615490,"TERMINAL",0,0,"Step 3453, loss: 0.05445489287376404, step time: 17.595529556274414ms\r\n",,terminal_output +17719,13615548,"TERMINAL",0,0,"Step 3454, loss: 0.09137549996376038, step time: 17.469167709350586ms\r\n",,terminal_output +17720,13615621,"TERMINAL",0,0,"Step 3455, loss: 0.048696938902139664, step time: 17.407655715942383ms\r\n",,terminal_output +17721,13615646,"genie.py",2635,0,"",python,selection_mouse +17722,13615680,"TERMINAL",0,0,"Step 3456, loss: 0.04877118766307831, step time: 17.268657684326172ms\r\n",,terminal_output +17723,13615748,"TERMINAL",0,0,"Step 3457, loss: 0.2116924524307251, step time: 17.57049560546875ms\r\n",,terminal_output +17724,13615781,"genie.py",2634,4,"""]),",python,selection_mouse +17725,13615837,"TERMINAL",0,0,"Step 3458, loss: 0.24677036702632904, step time: 17.203569412231445ms\r\n",,terminal_output +17726,13615889,"TERMINAL",0,0,"Step 3459, loss: 0.09218882769346237, step time: 17.425537109375ms\r\n",,terminal_output +17727,13615942,"TERMINAL",0,0,"Step 3460, loss: 0.3122272789478302, step time: 17.538785934448242ms\r\n",,terminal_output +17728,13616067,"TERMINAL",0,0,"Step 3461, loss: 0.04633995145559311, step time: 17.47870445251465ms\r\nStep 3462, loss: 0.48346513509750366, step time: 17.239093780517578ms\r\n",,terminal_output +17729,13616200,"TERMINAL",0,0,"Step 3463, loss: 0.04466058313846588, step time: 17.635345458984375ms\r\n",,terminal_output +17730,13616253,"TERMINAL",0,0,"Step 3464, loss: 0.5184741020202637, step time: 17.281770706176758ms\r\nStep 3465, loss: 0.05563613772392273, step time: 17.369747161865234ms\r\n",,terminal_output +17731,13616291,"genie.py",2630,0,"",python,selection_mouse +17732,13616336,"TERMINAL",0,0,"Step 3466, loss: 0.0500553622841835, step time: 17.731428146362305ms\r\n",,terminal_output +17733,13616496,"genie.py",2627,7,"indices",python,selection_mouse +17734,13616497,"TERMINAL",0,0,"Step 3467, loss: 0.12077855318784714, step time: 17.575740814208984ms\r\nStep 3468, loss: 0.12854544818401337, step time: 17.467975616455078ms\r\n",,terminal_output +17735,13616557,"TERMINAL",0,0,"Step 3469, loss: 0.29672378301620483, step time: 17.64225959777832ms\r\nStep 3470, loss: 0.10543324798345566, step time: 17.234325408935547ms\r\n",,terminal_output +17736,13616701,"TERMINAL",0,0,"Step 3471, loss: 0.046313852071762085, step time: 17.551898956298828ms\r\n",,terminal_output +17737,13616845,"TERMINAL",0,0,"Step 3472, loss: 0.08888714760541916, step time: 17.61913299560547ms\r\nStep 3473, loss: 0.07807202637195587, step time: 18.53466033935547ms\r\n",,terminal_output +17738,13616908,"TERMINAL",0,0,"Step 3474, loss: 0.05895419418811798, step time: 17.366409301757812ms\r\nStep 3475, loss: 0.08593084663152695, step time: 17.730712890625ms\r\n",,terminal_output +17739,13617029,"genie.py",2617,0,"",python,selection_mouse +17740,13617030,"TERMINAL",0,0,"Step 3476, loss: 0.060523826628923416, step time: 17.299890518188477ms\r\n",,terminal_output +17741,13617062,"genie.py",2608,17,"tokenizer_outputs",python,selection_mouse +17742,13617234,"TERMINAL",0,0,"Step 3477, loss: 0.053724609315395355, step time: 18.2952880859375ms\r\nStep 3478, loss: 0.10039108991622925, step time: 17.546415328979492ms\r\nStep 3479, loss: 0.053630124777555466, step time: 17.52614974975586ms\r\n",,terminal_output +17743,13617292,"TERMINAL",0,0,"Step 3480, loss: 0.02538667805492878, step time: 17.389535903930664ms\r\nStep 3481, loss: 0.07930725067853928, step time: 18.0814266204834ms\r\n",,terminal_output +17744,13617420,"TERMINAL",0,0,"Step 3482, loss: 0.819751501083374, step time: 17.216205596923828ms\r\n",,terminal_output +17745,13617421,"TERMINAL",0,0,"Step 3483, loss: 0.33165547251701355, step time: 17.627716064453125ms\r\n",,terminal_output +17746,13617474,"TERMINAL",0,0,"Step 3484, loss: 0.04994545504450798, step time: 17.530441284179688ms\r\n",,terminal_output +17747,13617527,"TERMINAL",0,0,"Step 3485, loss: 0.12013246864080429, step time: 17.498016357421875ms\r\n",,terminal_output +17748,13617579,"TERMINAL",0,0,"Step 3486, loss: 0.05580003559589386, step time: 17.1964168548584ms\r\n",,terminal_output +17749,13617628,"genie.py",2633,0,"",python,selection_mouse +17750,13617698,"TERMINAL",0,0,"Step 3487, loss: 0.13183830678462982, step time: 17.6846981048584ms\r\n",,terminal_output +17751,13617848,"genie.py",2627,7,"indices",python,selection_mouse +17752,13617849,"TERMINAL",0,0,"Step 3488, loss: 0.06429482251405716, step time: 17.18735694885254ms\r\nStep 3489, loss: 0.9458694458007812, step time: 17.52614974975586ms\r\n",,terminal_output +17753,13617903,"TERMINAL",0,0,"Step 3490, loss: 0.21339303255081177, step time: 17.498493194580078ms\r\nStep 3491, loss: 0.21678222715854645, step time: 17.345905303955078ms\r\n",,terminal_output +17754,13618104,"TERMINAL",0,0,"Step 3492, loss: 0.5568541884422302, step time: 17.23480224609375ms\r\nStep 3493, loss: 0.15613791346549988, step time: 17.58551597595215ms\r\n",,terminal_output +17755,13618257,"TERMINAL",0,0,"Step 3494, loss: 0.19633042812347412, step time: 17.21978187561035ms\r\nStep 3495, loss: 0.11511830985546112, step time: 17.441749572753906ms\r\n",,terminal_output +17756,13618279,"genie.py",2620,0,"",python,selection_mouse +17757,13618322,"TERMINAL",0,0,"Step 3496, loss: 0.45753979682922363, step time: 17.89093017578125ms\r\nStep 3497, loss: 1.0607818365097046, step time: 17.618894577026367ms\r\nStep 3498, loss: 0.05575904622673988, step time: 17.48347282409668ms\r\n",,terminal_output +17758,13618463,"genie.py",2608,17,"tokenizer_outputs",python,selection_mouse +17759,13618465,"TERMINAL",0,0,"Step 3499, loss: 0.043905552476644516, step time: 17.91095733642578ms\r\n",,terminal_output +17760,13618946,"genie.py",2634,0,"",python,selection_mouse +17761,13619076,"genie.py",2627,7,"indices",python,selection_mouse +17762,13619699,"genie.py",2629,0,"",python,selection_mouse +17763,13619699,"genie.py",2627,7,"indices",python,selection_mouse +17764,13620213,"genie.py",2622,0,"",python,selection_mouse +17765,13620368,"genie.py",2608,17,"tokenizer_outputs",python,selection_mouse +17766,13620865,"genie.py",2631,0,"",python,selection_mouse +17767,13621000,"genie.py",2627,7,"indices",python,selection_mouse +17768,13621073,"TERMINAL",0,0,"Step 3500, loss: 0.13494788110256195, step time: 26.318788528442383ms\r\n",,terminal_output +17769,13621139,"TERMINAL",0,0,"Step 3501, loss: 0.06662134826183319, step time: 25.293827056884766ms\r\n",,terminal_output +17770,13621208,"TERMINAL",0,0,"Step 3502, loss: 0.25739526748657227, step time: 20.132064819335938ms\r\n",,terminal_output +17771,13621270,"TERMINAL",0,0,"Step 3503, loss: 0.12720611691474915, step time: 20.519018173217773ms\r\n",,terminal_output +17772,13621334,"TERMINAL",0,0,"Step 3504, loss: 0.048059090971946716, step time: 18.924951553344727ms\r\n",,terminal_output +17773,13621398,"TERMINAL",0,0,"Step 3505, loss: 0.14793497323989868, step time: 19.025087356567383ms\r\n",,terminal_output +17774,13621461,"TERMINAL",0,0,"Step 3506, loss: 0.05893438309431076, step time: 18.845796585083008ms\r\n",,terminal_output +17775,13621501,"genie.py",2621,0,"",python,selection_mouse +17776,13621548,"TERMINAL",0,0,"Step 3507, loss: 0.15460734069347382, step time: 18.672704696655273ms\r\n",,terminal_output +17777,13621616,"TERMINAL",0,0,"Step 3508, loss: 0.14078642427921295, step time: 18.431901931762695ms\r\n",,terminal_output +17778,13621651,"genie.py",2608,17,"tokenizer_outputs",python,selection_mouse +17779,13621726,"TERMINAL",0,0,"Step 3509, loss: 0.12463772296905518, step time: 19.293785095214844ms\r\nStep 3510, loss: 0.4281243085861206, step time: 18.297672271728516ms\r\n",,terminal_output +17780,13621820,"TERMINAL",0,0,"Step 3511, loss: 0.15567797422409058, step time: 18.42021942138672ms\r\n",,terminal_output +17781,13621911,"genie.py",2608,18,"tokenizer_outputs[",python,selection_mouse +17782,13621930,"genie.py",2608,26,"tokenizer_outputs[""indices",python,selection_mouse +17783,13621931,"TERMINAL",0,0,"Step 3512, loss: 0.0788796991109848, step time: 18.67389678955078ms\r\nStep 3513, loss: 0.1733950525522232, step time: 18.043994903564453ms\r\n",,terminal_output +17784,13622032,"TERMINAL",0,0,"Step 3514, loss: 0.11842253059148788, step time: 17.42076873779297ms\r\nStep 3515, loss: 0.0693470686674118, step time: 18.691301345825195ms\r\n",,terminal_output +17785,13622098,"TERMINAL",0,0,"Step 3516, loss: 0.16702209413051605, step time: 18.158435821533203ms\r\n",,terminal_output +17786,13622163,"TERMINAL",0,0,"Step 3517, loss: 0.10528648644685745, step time: 18.145084381103516ms\r\n",,terminal_output +17787,13622226,"TERMINAL",0,0,"Step 3518, loss: 0.11719679087400436, step time: 17.6544189453125ms\r\n",,terminal_output +17788,13622290,"TERMINAL",0,0,"Step 3519, loss: 0.3828565180301666, step time: 17.16756820678711ms\r\n",,terminal_output +17789,13622305,"genie.py",2633,0,"",python,selection_mouse +17790,13622306,"genie.py",2627,7,"indices",python,selection_mouse +17791,13622403,"TERMINAL",0,0,"Step 3520, loss: 0.0913020446896553, step time: 17.221689224243164ms\r\n",,terminal_output +17792,13622415,"TERMINAL",0,0,"Step 3521, loss: 0.10088204592466354, step time: 17.91214942932129ms\r\n",,terminal_output +17793,13622514,"TERMINAL",0,0,"Step 3522, loss: 0.08237361162900925, step time: 17.182350158691406ms\r\n",,terminal_output +17794,13622576,"TERMINAL",0,0,"Step 3523, loss: 0.5141103267669678, step time: 17.954111099243164ms\r\n",,terminal_output +17795,13622638,"TERMINAL",0,0,"Step 3524, loss: 0.09850461781024933, step time: 17.458677291870117ms\r\n",,terminal_output +17796,13622698,"TERMINAL",0,0,"Step 3525, loss: 0.12847691774368286, step time: 17.18425750732422ms\r\n",,terminal_output +17797,13622762,"TERMINAL",0,0,"Step 3526, loss: 0.16751621663570404, step time: 17.27461814880371ms\r\n",,terminal_output +17798,13622823,"TERMINAL",0,0,"Step 3527, loss: 0.11640352010726929, step time: 17.67706871032715ms\r\n",,terminal_output +17799,13622885,"TERMINAL",0,0,"Step 3528, loss: 0.05212179571390152, step time: 17.4100399017334ms\r\n",,terminal_output +17800,13622921,"genie.py",2620,0,"",python,selection_mouse +17801,13622989,"TERMINAL",0,0,"Step 3529, loss: 0.17264604568481445, step time: 17.27128028869629ms\r\nStep 3530, loss: 0.04919895529747009, step time: 17.77505874633789ms\r\n",,terminal_output +17802,13623048,"genie.py",2608,17,"tokenizer_outputs",python,selection_mouse +17803,13623103,"TERMINAL",0,0,"Step 3531, loss: 0.057836029678583145, step time: 18.378257751464844ms\r\nStep 3532, loss: 0.12225714325904846, step time: 20.50042152404785ms\r\n",,terminal_output +17804,13623175,"TERMINAL",0,0,"Step 3533, loss: 0.12304091453552246, step time: 20.186901092529297ms\r\n",,terminal_output +17805,13623238,"TERMINAL",0,0,"Step 3534, loss: 0.1377551406621933, step time: 18.800020217895508ms\r\n",,terminal_output +17806,13623308,"TERMINAL",0,0,"Step 3535, loss: 0.09172436594963074, step time: 18.146514892578125ms\r\n",,terminal_output +17807,13623361,"TERMINAL",0,0,"Step 3536, loss: 0.059750430285930634, step time: 18.100976943969727ms\r\n",,terminal_output +17808,13623483,"TERMINAL",0,0,"Step 3537, loss: 0.20476645231246948, step time: 17.694950103759766ms\r\nStep 3538, loss: 0.031206704676151276, step time: 17.463207244873047ms\r\n",,terminal_output +17809,13623576,"TERMINAL",0,0,"Step 3539, loss: 0.12928617000579834, step time: 17.908573150634766ms\r\n",,terminal_output +17810,13623676,"TERMINAL",0,0,"Step 3540, loss: 0.044757138937711716, step time: 17.574071884155273ms\r\nStep 3541, loss: 0.07085419446229935, step time: 17.437458038330078ms\r\n",,terminal_output +17811,13623696,"genie.py",2630,0,"",python,selection_mouse +17812,13623736,"TERMINAL",0,0,"Step 3542, loss: 0.11227310448884964, step time: 17.65275001525879ms\r\n",,terminal_output +17813,13623804,"TERMINAL",0,0,"Step 3543, loss: 0.05102323740720749, step time: 17.136573791503906ms\r\n",,terminal_output +17814,13623846,"genie.py",2627,7,"indices",python,selection_mouse +17815,13623924,"TERMINAL",0,0,"Step 3544, loss: 0.04901835694909096, step time: 17.488956451416016ms\r\nStep 3545, loss: 0.1555357575416565, step time: 17.62247085571289ms\r\n",,terminal_output +17816,13624021,"TERMINAL",0,0,"Step 3546, loss: 0.039230942726135254, step time: 17.26841926574707ms\r\n",,terminal_output +17817,13624084,"TERMINAL",0,0,"Step 3547, loss: 0.05485823377966881, step time: 17.18902587890625ms\r\n",,terminal_output +17818,13624144,"TERMINAL",0,0,"Step 3548, loss: 0.11319120973348618, step time: 17.600297927856445ms\r\n",,terminal_output +17819,13624195,"TERMINAL",0,0,"Step 3549, loss: 0.049700528383255005, step time: 17.179489135742188ms\r\n",,terminal_output +17820,13624289,"TERMINAL",0,0,"Step 3550, loss: 0.06235366314649582, step time: 17.357826232910156ms\r\n",,terminal_output +17821,13624375,"TERMINAL",0,0,"Step 3551, loss: 0.11149109899997711, step time: 17.711639404296875ms\r\n",,terminal_output +17822,13624430,"TERMINAL",0,0,"Step 3552, loss: 0.16027581691741943, step time: 17.44222640991211ms\r\nStep 3553, loss: 0.0717523992061615, step time: 17.245769500732422ms\r\n",,terminal_output +17823,13624455,"genie.py",2621,0,"",python,selection_mouse +17824,13624495,"TERMINAL",0,0,"Step 3554, loss: 0.05223160609602928, step time: 17.729759216308594ms\r\n",,terminal_output +17825,13624559,"TERMINAL",0,0,"Step 3555, loss: 0.23870661854743958, step time: 17.309188842773438ms\r\n",,terminal_output +17826,13624584,"genie.py",2608,17,"tokenizer_outputs",python,selection_mouse +17827,13624624,"TERMINAL",0,0,"Step 3556, loss: 0.045948326587677, step time: 17.621994018554688ms\r\n",,terminal_output +17828,13624691,"TERMINAL",0,0,"Step 3557, loss: 0.09230595082044601, step time: 17.6088809967041ms\r\n",,terminal_output +17829,13624759,"TERMINAL",0,0,"Step 3558, loss: 0.1696528196334839, step time: 17.26222038269043ms\r\n",,terminal_output +17830,13624819,"TERMINAL",0,0,"Step 3559, loss: 0.31696346402168274, step time: 17.05455780029297ms\r\n",,terminal_output +17831,13624881,"TERMINAL",0,0,"Step 3560, loss: 0.040709275752305984, step time: 18.566608428955078ms\r\n",,terminal_output +17832,13624943,"TERMINAL",0,0,"Step 3561, loss: 0.04668324813246727, step time: 17.14038848876953ms\r\n",,terminal_output +17833,13625006,"TERMINAL",0,0,"Step 3562, loss: 0.06195283308625221, step time: 17.35687255859375ms\r\n",,terminal_output +17834,13625069,"TERMINAL",0,0,"Step 3563, loss: 0.09906554967164993, step time: 17.5473690032959ms\r\n",,terminal_output +17835,13625154,"genie.py",2629,0,"",python,selection_mouse +17836,13625182,"TERMINAL",0,0,"Step 3564, loss: 0.053655955940485, step time: 17.40097999572754ms\r\nStep 3565, loss: 0.034589383751153946, step time: 17.15993881225586ms\r\n",,terminal_output +17837,13625252,"TERMINAL",0,0,"Step 3566, loss: 0.07773071527481079, step time: 18.916606903076172ms\r\n",,terminal_output +17838,13625302,"genie.py",2627,7,"indices",python,selection_mouse +17839,13625393,"TERMINAL",0,0,"Step 3567, loss: 0.6718458533287048, step time: 17.427682876586914ms\r\nStep 3568, loss: 0.0719611644744873, step time: 17.64082908630371ms\r\n",,terminal_output +17840,13625513,"TERMINAL",0,0,"Step 3569, loss: 0.08636244386434555, step time: 17.69399642944336ms\r\nStep 3570, loss: 0.07506956905126572, step time: 17.637968063354492ms\r\n",,terminal_output +17841,13625624,"TERMINAL",0,0,"Step 3571, loss: 0.5812150239944458, step time: 17.205476760864258ms\r\nStep 3572, loss: 0.06926298886537552, step time: 17.688989639282227ms\r\n",,terminal_output +17842,13625722,"TERMINAL",0,0,"Step 3573, loss: 0.5451415777206421, step time: 17.360687255859375ms\r\n",,terminal_output +17843,13625774,"TERMINAL",0,0,"Step 3574, loss: 0.22274330258369446, step time: 17.405033111572266ms\r\n",,terminal_output +17844,13625879,"TERMINAL",0,0,"Step 3575, loss: 0.14409422874450684, step time: 17.639636993408203ms\r\nStep 3576, loss: 0.6726343035697937, step time: 17.614126205444336ms\r\n",,terminal_output +17845,13625971,"TERMINAL",0,0,"Step 3577, loss: 0.1931881457567215, step time: 17.1811580657959ms\r\n",,terminal_output +17846,13626023,"TERMINAL",0,0,"Step 3578, loss: 0.11623430252075195, step time: 17.759323120117188ms\r\n",,terminal_output +17847,13626199,"TERMINAL",0,0,"Step 3579, loss: 0.043855223804712296, step time: 17.03166961669922ms\r\nStep 3580, loss: 0.09819520264863968, step time: 17.337322235107422ms\r\n",,terminal_output +17848,13626259,"TERMINAL",0,0,"Step 3581, loss: 0.059032149612903595, step time: 17.637252807617188ms\r\nStep 3582, loss: 0.04884573817253113, step time: 17.401933670043945ms\r\n",,terminal_output +17849,13626351,"TERMINAL",0,0,"Step 3583, loss: 0.050627123564481735, step time: 17.435550689697266ms\r\n",,terminal_output +17850,13626403,"TERMINAL",0,0,"Step 3584, loss: 0.04799493029713631, step time: 17.6544189453125ms\r\n",,terminal_output +17851,13626508,"TERMINAL",0,0,"Step 3585, loss: 0.040718112140893936, step time: 17.22097396850586ms\r\nStep 3586, loss: 0.08924107253551483, step time: 17.449378967285156ms\r\n",,terminal_output +17852,13626571,"TERMINAL",0,0,"Step 3587, loss: 0.03317401558160782, step time: 17.536163330078125ms\r\n",,terminal_output +17853,13626634,"TERMINAL",0,0,"Step 3588, loss: 0.05714767053723335, step time: 17.498254776000977ms\r\n",,terminal_output +17854,13626698,"TERMINAL",0,0,"Step 3589, loss: 0.05492322891950607, step time: 17.199039459228516ms\r\n",,terminal_output +17855,13626760,"TERMINAL",0,0,"Step 3590, loss: 0.1005137488245964, step time: 17.659902572631836ms\r\n",,terminal_output +17856,13626824,"TERMINAL",0,0,"Step 3591, loss: 0.14046108722686768, step time: 17.041921615600586ms\r\n",,terminal_output +17857,13626890,"TERMINAL",0,0,"Step 3592, loss: 0.1495077759027481, step time: 17.353057861328125ms\r\n",,terminal_output +17858,13626952,"TERMINAL",0,0,"Step 3593, loss: 0.056829970329999924, step time: 17.668485641479492ms\r\n",,terminal_output +17859,13627300,"TERMINAL",0,0,"Step 3594, loss: 0.05637076869606972, step time: 309.16500091552734ms\r\n",,terminal_output +17860,13627349,"TERMINAL",0,0,"Step 3595, loss: 0.11829005926847458, step time: 25.127649307250977ms\r\n",,terminal_output +17861,13627454,"TERMINAL",0,0,"Step 3596, loss: 0.2545446753501892, step time: 19.42300796508789ms\r\nStep 3597, loss: 0.04746675118803978, step time: 18.828868865966797ms\r\n",,terminal_output +17862,13627518,"TERMINAL",0,0,"Step 3598, loss: 0.07250452041625977, step time: 18.239736557006836ms\r\n",,terminal_output +17863,13627615,"TERMINAL",0,0,"Step 3599, loss: 0.05851905420422554, step time: 17.828702926635742ms\r\n",,terminal_output +17864,13627677,"TERMINAL",0,0,"Step 3600, loss: 0.7297011613845825, step time: 17.969846725463867ms\r\n",,terminal_output +17865,13627738,"TERMINAL",0,0,"Step 3601, loss: 0.09118116647005081, step time: 17.53830909729004ms\r\n",,terminal_output +17866,13627803,"TERMINAL",0,0,"Step 3602, loss: 0.08223311603069305, step time: 17.360210418701172ms\r\n",,terminal_output +17867,13627859,"TERMINAL",0,0,"Step 3603, loss: 0.07498189061880112, step time: 17.693519592285156ms\r\n",,terminal_output +17868,13627921,"TERMINAL",0,0,"Step 3604, loss: 0.34464094042778015, step time: 17.747163772583008ms\r\n",,terminal_output +17869,13627980,"TERMINAL",0,0,"Step 3605, loss: 0.06173497810959816, step time: 17.314672470092773ms\r\n",,terminal_output +17870,13628041,"TERMINAL",0,0,"Step 3606, loss: 0.3493490219116211, step time: 17.674684524536133ms\r\n",,terminal_output +17871,13628113,"TERMINAL",0,0,"Step 3607, loss: 0.03492200747132301, step time: 17.114877700805664ms\r\n",,terminal_output +17872,13628201,"TERMINAL",0,0,"Step 3608, loss: 0.06547997146844864, step time: 17.156362533569336ms\r\nStep 3609, loss: 0.13555572926998138, step time: 17.70639419555664ms\r\n",,terminal_output +17873,13628267,"TERMINAL",0,0,"Step 3610, loss: 0.07983314245939255, step time: 17.577171325683594ms\r\n",,terminal_output +17874,13628392,"TERMINAL",0,0,"Step 3611, loss: 0.1024814248085022, step time: 17.20595359802246ms\r\nStep 3612, loss: 0.11252953857183456, step time: 17.838001251220703ms\r\n",,terminal_output +17875,13628454,"TERMINAL",0,0,"Step 3613, loss: 0.19542203843593597, step time: 17.406702041625977ms\r\n",,terminal_output +17876,13628521,"TERMINAL",0,0,"Step 3614, loss: 0.17789937555789948, step time: 17.40431785583496ms\r\n",,terminal_output +17877,13628599,"TERMINAL",0,0,"Step 3615, loss: 0.0682479664683342, step time: 17.406225204467773ms\r\n",,terminal_output +17878,13628648,"TERMINAL",0,0,"Step 3616, loss: 0.047743674367666245, step time: 17.615318298339844ms\r\n",,terminal_output +17879,13628712,"TERMINAL",0,0,"Step 3617, loss: 0.23129524290561676, step time: 32.0887565612793ms\r\n",,terminal_output +17880,13628821,"TERMINAL",0,0,"Step 3618, loss: 0.04917058348655701, step time: 27.07982063293457ms\r\n",,terminal_output +17881,13628884,"TERMINAL",0,0,"Step 3619, loss: 0.09353335201740265, step time: 19.374608993530273ms\r\n",,terminal_output +17882,13629007,"TERMINAL",0,0,"Step 3620, loss: 0.042232368141412735, step time: 34.80410575866699ms\r\n",,terminal_output +17883,13629048,"TERMINAL",0,0,"Step 3621, loss: 0.040689483284950256, step time: 21.220922470092773ms\r\nStep 3622, loss: 0.05730060115456581, step time: 20.661354064941406ms\r\n",,terminal_output +17884,13629111,"TERMINAL",0,0,"Step 3623, loss: 0.1339542120695114, step time: 19.855022430419922ms\r\n",,terminal_output +17885,13629175,"TERMINAL",0,0,"Step 3624, loss: 0.0606498047709465, step time: 19.21868324279785ms\r\n",,terminal_output +17886,13629279,"TERMINAL",0,0,"Step 3625, loss: 0.05114758387207985, step time: 17.855405807495117ms\r\n",,terminal_output +17887,13629332,"TERMINAL",0,0,"Step 3626, loss: 0.13117247819900513, step time: 17.671823501586914ms\r\n",,terminal_output +17888,13629439,"TERMINAL",0,0,"Step 3627, loss: 0.03805861249566078, step time: 17.67110824584961ms\r\nStep 3628, loss: 0.07946863770484924, step time: 17.650127410888672ms\r\n",,terminal_output +17889,13629555,"TERMINAL",0,0,"Step 3629, loss: 0.04202812537550926, step time: 17.31705665588379ms\r\nStep 3630, loss: 0.07112972438335419, step time: 17.885446548461914ms\r\n",,terminal_output +17890,13629622,"TERMINAL",0,0,"Step 3631, loss: 0.10500787198543549, step time: 17.528533935546875ms\r\n",,terminal_output +17891,13629681,"TERMINAL",0,0,"Step 3632, loss: 0.13310551643371582, step time: 17.553329467773438ms\r\n",,terminal_output +17892,13629745,"TERMINAL",0,0,"Step 3633, loss: 0.04024756699800491, step time: 18.106698989868164ms\r\n",,terminal_output +17893,13629825,"TERMINAL",0,0,"Step 3634, loss: 0.1098773181438446, step time: 17.82679557800293ms\r\n",,terminal_output +17894,13629886,"TERMINAL",0,0,"Step 3635, loss: 0.06221956014633179, step time: 17.37189292907715ms\r\n",,terminal_output +17895,13629950,"TERMINAL",0,0,"Step 3636, loss: 0.03972109034657478, step time: 18.019914627075195ms\r\n",,terminal_output +17896,13630013,"TERMINAL",0,0,"Step 3637, loss: 0.08300292491912842, step time: 17.261028289794922ms\r\n",,terminal_output +17897,13630074,"TERMINAL",0,0,"Step 3638, loss: 0.12728211283683777, step time: 17.472267150878906ms\r\n",,terminal_output +17898,13630138,"TERMINAL",0,0,"Step 3639, loss: 0.2978692948818207, step time: 17.600536346435547ms\r\n",,terminal_output +17899,13630199,"TERMINAL",0,0,"Step 3640, loss: 0.05133834108710289, step time: 30.27653694152832ms\r\n",,terminal_output +17900,13630262,"TERMINAL",0,0,"Step 3641, loss: 0.4961668848991394, step time: 27.50229835510254ms\r\n",,terminal_output +17901,13630329,"TERMINAL",0,0,"Step 3642, loss: 0.04231693595647812, step time: 19.3941593170166ms\r\n",,terminal_output +17902,13630391,"TERMINAL",0,0,"Step 3643, loss: 0.07860824465751648, step time: 17.786502838134766ms\r\n",,terminal_output +17903,13630455,"TERMINAL",0,0,"Step 3644, loss: 0.10683774203062057, step time: 17.43292808532715ms\r\n",,terminal_output +17904,13630519,"TERMINAL",0,0,"Step 3645, loss: 0.06054333969950676, step time: 17.48204231262207ms\r\n",,terminal_output +17905,13630619,"TERMINAL",0,0,"Step 3646, loss: 0.0682557076215744, step time: 17.38595962524414ms\r\n",,terminal_output +17906,13630673,"TERMINAL",0,0,"Step 3647, loss: 0.04567126929759979, step time: 17.245769500732422ms\r\n",,terminal_output +17907,13630779,"TERMINAL",0,0,"Step 3648, loss: 0.06740818172693253, step time: 17.74287223815918ms\r\nStep 3649, loss: 0.035385869443416595, step time: 17.159700393676758ms\r\n",,terminal_output +17908,13630875,"TERMINAL",0,0,"Step 3650, loss: 0.05073706433176994, step time: 17.169713973999023ms\r\n",,terminal_output +17909,13630927,"TERMINAL",0,0,"Step 3651, loss: 0.07947582751512527, step time: 17.401456832885742ms\r\n",,terminal_output +17910,13631034,"TERMINAL",0,0,"Step 3652, loss: 0.13027244806289673, step time: 17.236948013305664ms\r\nStep 3653, loss: 0.07637286931276321, step time: 17.1511173248291ms\r\n",,terminal_output +17911,13631099,"TERMINAL",0,0,"Step 3654, loss: 0.17461737990379333, step time: 18.566131591796875ms\r\n",,terminal_output +17912,13631160,"TERMINAL",0,0,"Step 3655, loss: 0.03893847391009331, step time: 20.5686092376709ms\r\n",,terminal_output +17913,13631219,"TERMINAL",0,0,"Step 3656, loss: 0.21580849587917328, step time: 18.253087997436523ms\r\n",,terminal_output +17914,13631307,"TERMINAL",0,0,"Step 3657, loss: 0.03783132880926132, step time: 17.879486083984375ms\r\n",,terminal_output +17915,13631363,"TERMINAL",0,0,"Step 3658, loss: 0.2020265907049179, step time: 17.627477645874023ms\r\n",,terminal_output +17916,13631470,"TERMINAL",0,0,"Step 3659, loss: 0.39330819249153137, step time: 18.18060874938965ms\r\nStep 3660, loss: 0.05790166184306145, step time: 18.109560012817383ms\r\n",,terminal_output +17917,13631536,"TERMINAL",0,0,"Step 3661, loss: 0.7248721122741699, step time: 17.316102981567383ms\r\n",,terminal_output +17918,13631603,"TERMINAL",0,0,"Step 3662, loss: 0.49250373244285583, step time: 17.25482940673828ms\r\n",,terminal_output +17919,13631668,"TERMINAL",0,0,"Step 3663, loss: 0.07063573598861694, step time: 17.659664154052734ms\r\n",,terminal_output +17920,13631730,"TERMINAL",0,0,"Step 3664, loss: 0.296269029378891, step time: 24.297475814819336ms\r\n",,terminal_output +17921,13631794,"TERMINAL",0,0,"Step 3665, loss: 0.059292271733284, step time: 17.360925674438477ms\r\n",,terminal_output +17922,13631858,"TERMINAL",0,0,"Step 3666, loss: 0.04397013783454895, step time: 17.649173736572266ms\r\n",,terminal_output +17923,13631921,"TERMINAL",0,0,"Step 3667, loss: 0.051961351186037064, step time: 17.125844955444336ms\r\n",,terminal_output +17924,13631986,"TERMINAL",0,0,"Step 3668, loss: 0.1607588678598404, step time: 18.011808395385742ms\r\n",,terminal_output +17925,13632050,"TERMINAL",0,0,"Step 3669, loss: 0.10125934332609177, step time: 18.01443099975586ms\r\n",,terminal_output +17926,13632124,"TERMINAL",0,0,"Step 3670, loss: 0.037771075963974, step time: 17.607688903808594ms\r\n",,terminal_output +17927,13632177,"TERMINAL",0,0,"Step 3671, loss: 0.04239509254693985, step time: 17.43173599243164ms\r\n",,terminal_output +17928,13632242,"TERMINAL",0,0,"Step 3672, loss: 0.05151873454451561, step time: 17.906665802001953ms\r\n",,terminal_output +17929,13632303,"TERMINAL",0,0,"Step 3673, loss: 0.12074366211891174, step time: 17.137527465820312ms\r\n",,terminal_output +17930,13632366,"TERMINAL",0,0,"Step 3674, loss: 0.04510174319148064, step time: 17.133235931396484ms\r\n",,terminal_output +17931,13632429,"TERMINAL",0,0,"Step 3675, loss: 0.1303495317697525, step time: 17.395973205566406ms\r\n",,terminal_output +17932,13632493,"TERMINAL",0,0,"Step 3676, loss: 0.07142212241888046, step time: 17.356395721435547ms\r\n",,terminal_output +17933,13632556,"TERMINAL",0,0,"Step 3677, loss: 0.050956327468156815, step time: 17.060041427612305ms\r\n",,terminal_output +17934,13632619,"TERMINAL",0,0,"Step 3678, loss: 0.04325643181800842, step time: 17.943859100341797ms\r\n",,terminal_output +17935,13632683,"TERMINAL",0,0,"Step 3679, loss: 0.17558884620666504, step time: 17.066240310668945ms\r\n",,terminal_output +17936,13632746,"TERMINAL",0,0,"Step 3680, loss: 0.1103796511888504, step time: 17.20905303955078ms\r\n",,terminal_output +17937,13632810,"TERMINAL",0,0,"Step 3681, loss: 0.06296499818563461, step time: 17.238855361938477ms\r\n",,terminal_output +17938,13632874,"TERMINAL",0,0,"Step 3682, loss: 0.06382119655609131, step time: 17.61007308959961ms\r\n",,terminal_output +17939,13632940,"TERMINAL",0,0,"Step 3683, loss: 0.056797225028276443, step time: 17.287015914916992ms\r\n",,terminal_output +17940,13633003,"TERMINAL",0,0,"Step 3684, loss: 0.07021103799343109, step time: 17.67277717590332ms\r\n",,terminal_output +17941,13633067,"TERMINAL",0,0,"Step 3685, loss: 0.039453621953725815, step time: 17.05312728881836ms\r\n",,terminal_output +17942,13633175,"TERMINAL",0,0,"Step 3686, loss: 0.16803006827831268, step time: 17.081499099731445ms\r\nStep 3687, loss: 0.04104249179363251, step time: 17.283201217651367ms\r\n",,terminal_output +17943,13633270,"TERMINAL",0,0,"Step 3688, loss: 0.04528610035777092, step time: 17.72332191467285ms\r\n",,terminal_output +17944,13633378,"TERMINAL",0,0,"Step 3689, loss: 0.04398451745510101, step time: 17.26388931274414ms\r\nStep 3690, loss: 0.11784522980451584, step time: 17.700910568237305ms\r\n",,terminal_output +17945,13633491,"TERMINAL",0,0,"Step 3691, loss: 0.12454160302877426, step time: 17.003774642944336ms\r\nStep 3692, loss: 0.050869543105363846, step time: 17.271041870117188ms\r\n",,terminal_output +17946,13633556,"TERMINAL",0,0,"Step 3693, loss: 0.11538983136415482, step time: 18.24188232421875ms\r\n",,terminal_output +17947,13633666,"TERMINAL",0,0,"Step 3694, loss: 0.03826415538787842, step time: 17.662525177001953ms\r\n",,terminal_output +17948,13633681,"TERMINAL",0,0,"Step 3695, loss: 0.17425239086151123, step time: 17.21787452697754ms\r\n",,terminal_output +17949,13633785,"TERMINAL",0,0,"Step 3696, loss: 0.04832637682557106, step time: 17.560958862304688ms\r\n",,terminal_output +17950,13633842,"TERMINAL",0,0,"Step 3697, loss: 0.07300341874361038, step time: 17.008543014526367ms\r\n",,terminal_output +17951,13633906,"TERMINAL",0,0,"Step 3698, loss: 0.07926168292760849, step time: 17.161846160888672ms\r\n",,terminal_output +17952,13633959,"TERMINAL",0,0,"Step 3699, loss: 0.03248462453484535, step time: 17.313718795776367ms\r\n",,terminal_output +17953,13634067,"TERMINAL",0,0,"Step 3700, loss: 0.038328930735588074, step time: 17.420291900634766ms\r\nStep 3701, loss: 0.435608446598053, step time: 17.145872116088867ms\r\n",,terminal_output +17954,13634185,"TERMINAL",0,0,"Step 3702, loss: 0.12426287680864334, step time: 17.771005630493164ms\r\nStep 3703, loss: 0.07544735819101334, step time: 17.04096794128418ms\r\n",,terminal_output +17955,13634248,"TERMINAL",0,0,"Step 3704, loss: 0.13900445401668549, step time: 17.267704010009766ms\r\n",,terminal_output +17956,13634309,"TERMINAL",0,0,"Step 3705, loss: 0.04198437184095383, step time: 17.33994483947754ms\r\n",,terminal_output +17957,13634437,"TERMINAL",0,0,"Step 3706, loss: 0.03482886776328087, step time: 17.397403717041016ms\r\nStep 3707, loss: 0.06493639200925827, step time: 17.011642456054688ms\r\n",,terminal_output +17958,13634502,"TERMINAL",0,0,"Step 3708, loss: 0.04598657786846161, step time: 17.689228057861328ms\r\n",,terminal_output +17959,13634565,"TERMINAL",0,0,"Step 3709, loss: 0.23483966290950775, step time: 17.041921615600586ms\r\n",,terminal_output +17960,13634631,"TERMINAL",0,0,"Step 3710, loss: 0.10045021772384644, step time: 17.08245277404785ms\r\n",,terminal_output +17961,13634692,"TERMINAL",0,0,"Step 3711, loss: 0.37530872225761414, step time: 17.235517501831055ms\r\n",,terminal_output +17962,13634755,"TERMINAL",0,0,"Step 3712, loss: 0.06498131155967712, step time: 19.681692123413086ms\r\n",,terminal_output +17963,13634818,"TERMINAL",0,0,"Step 3713, loss: 0.03940413147211075, step time: 17.13085174560547ms\r\n",,terminal_output +17964,13634882,"TERMINAL",0,0,"Step 3714, loss: 0.04019596427679062, step time: 17.69709587097168ms\r\n",,terminal_output +17965,13634946,"TERMINAL",0,0,"Step 3715, loss: 0.136892169713974, step time: 17.19975471496582ms\r\n",,terminal_output +17966,13635009,"TERMINAL",0,0,"Step 3716, loss: 0.48400697112083435, step time: 16.9980525970459ms\r\n",,terminal_output +17967,13635074,"TERMINAL",0,0,"Step 3717, loss: 0.08485392481088638, step time: 17.292499542236328ms\r\n",,terminal_output +17968,13635140,"TERMINAL",0,0,"Step 3718, loss: 0.07114633917808533, step time: 17.531871795654297ms\r\n",,terminal_output +17969,13635195,"TERMINAL",0,0,"Step 3719, loss: 0.1925332248210907, step time: 17.235517501831055ms\r\n",,terminal_output +17970,13635294,"TERMINAL",0,0,"Step 3720, loss: 0.32764941453933716, step time: 17.74120330810547ms\r\n",,terminal_output +17971,13635344,"TERMINAL",0,0,"Step 3721, loss: 0.15427877008914948, step time: 16.96181297302246ms\r\n",,terminal_output +17972,13635451,"TERMINAL",0,0,"Step 3722, loss: 0.37015339732170105, step time: 17.186403274536133ms\r\nStep 3723, loss: 0.0563526414334774, step time: 17.26531982421875ms\r\n",,terminal_output +17973,13635548,"TERMINAL",0,0,"Step 3724, loss: 0.04605625197291374, step time: 17.228126525878906ms\r\n",,terminal_output +17974,13635609,"TERMINAL",0,0,"Step 3725, loss: 0.044510748237371445, step time: 17.030715942382812ms\r\n",,terminal_output +17975,13635679,"TERMINAL",0,0,"Step 3726, loss: 0.053111400455236435, step time: 17.79460906982422ms\r\n",,terminal_output +17976,13635735,"TERMINAL",0,0,"Step 3727, loss: 0.04794805869460106, step time: 17.000675201416016ms\r\n",,terminal_output +17977,13635800,"TERMINAL",0,0,"Step 3728, loss: 0.11247575283050537, step time: 17.22884178161621ms\r\n",,terminal_output +17978,13635863,"TERMINAL",0,0,"Step 3729, loss: 0.06254995614290237, step time: 17.242431640625ms\r\n",,terminal_output +17979,13635926,"TERMINAL",0,0,"Step 3730, loss: 0.05103950574994087, step time: 17.46201515197754ms\r\n",,terminal_output +17980,13635988,"TERMINAL",0,0,"Step 3731, loss: 0.10752005875110626, step time: 17.210960388183594ms\r\n",,terminal_output +17981,13636050,"TERMINAL",0,0,"Step 3732, loss: 0.09874025732278824, step time: 17.827510833740234ms\r\n",,terminal_output +17982,13636126,"TERMINAL",0,0,"Step 3733, loss: 0.13138987123966217, step time: 17.206430435180664ms\r\n",,terminal_output +17983,13636178,"TERMINAL",0,0,"Step 3734, loss: 0.05341397970914841, step time: 17.19188690185547ms\r\n",,terminal_output +17984,13636245,"TERMINAL",0,0,"Step 3735, loss: 0.0676141008734703, step time: 17.48037338256836ms\r\n",,terminal_output +17985,13636306,"TERMINAL",0,0,"Step 3736, loss: 0.404004842042923, step time: 17.342090606689453ms\r\n",,terminal_output +17986,13636367,"TERMINAL",0,0,"Step 3737, loss: 0.1254197359085083, step time: 17.224788665771484ms\r\n",,terminal_output +17987,13636430,"TERMINAL",0,0,"Step 3738, loss: 0.06704273819923401, step time: 17.687082290649414ms\r\n",,terminal_output +17988,13636494,"TERMINAL",0,0,"Step 3739, loss: 0.04714834690093994, step time: 17.230987548828125ms\r\n",,terminal_output +17989,13636558,"TERMINAL",0,0,"Step 3740, loss: 0.07143030315637589, step time: 17.10367202758789ms\r\n",,terminal_output +17990,13636622,"TERMINAL",0,0,"Step 3741, loss: 0.05648721009492874, step time: 17.23313331604004ms\r\n",,terminal_output +17991,13636684,"TERMINAL",0,0,"Step 3742, loss: 0.0646209642291069, step time: 17.56739616394043ms\r\n",,terminal_output +17992,13636747,"TERMINAL",0,0,"Step 3743, loss: 0.031771134585142136, step time: 17.253398895263672ms\r\n",,terminal_output +17993,13636810,"TERMINAL",0,0,"Step 3744, loss: 0.04347117617726326, step time: 17.83442497253418ms\r\n",,terminal_output +17994,13636872,"TERMINAL",0,0,"Step 3745, loss: 0.06661182641983032, step time: 17.268896102905273ms\r\n",,terminal_output +17995,13636935,"TERMINAL",0,0,"Step 3746, loss: 0.06951973587274551, step time: 17.226696014404297ms\r\n",,terminal_output +17996,13637000,"TERMINAL",0,0,"Step 3747, loss: 0.04486537724733353, step time: 17.491579055786133ms\r\n",,terminal_output +17997,13637061,"TERMINAL",0,0,"Step 3748, loss: 0.04667793586850166, step time: 17.483234405517578ms\r\n",,terminal_output +17998,13637122,"TERMINAL",0,0,"Step 3749, loss: 0.04034317657351494, step time: 17.225027084350586ms\r\n",,terminal_output +17999,13637174,"TERMINAL",0,0,"Step 3750, loss: 0.041491519659757614, step time: 17.5933837890625ms\r\n",,terminal_output +18000,13637268,"TERMINAL",0,0,"Step 3751, loss: 0.26163506507873535, step time: 17.226696014404297ms\r\n",,terminal_output +18001,13637320,"TERMINAL",0,0,"Step 3752, loss: 0.050792157649993896, step time: 17.25292205810547ms\r\n",,terminal_output +18002,13637373,"TERMINAL",0,0,"Step 3753, loss: 0.10492931306362152, step time: 17.413616180419922ms\r\n",,terminal_output +18003,13637482,"TERMINAL",0,0,"Step 3754, loss: 0.15041695535182953, step time: 17.244815826416016ms\r\nStep 3755, loss: 0.08903703838586807, step time: 17.046451568603516ms\r\n",,terminal_output +18004,13637542,"TERMINAL",0,0,"Step 3756, loss: 0.07584920525550842, step time: 17.74740219116211ms\r\n",,terminal_output +18005,13637634,"TERMINAL",0,0,"Step 3757, loss: 0.08584180474281311, step time: 17.254352569580078ms\r\n",,terminal_output +18006,13637685,"TERMINAL",0,0,"Step 3758, loss: 0.0423109196126461, step time: 28.355121612548828ms\r\n",,terminal_output +18007,13637789,"TERMINAL",0,0,"Step 3759, loss: 0.031941093504428864, step time: 17.3947811126709ms\r\nStep 3760, loss: 0.0398179329931736, step time: 17.44556427001953ms\r\n",,terminal_output +18008,13637881,"TERMINAL",0,0,"Step 3761, loss: 0.0969250276684761, step time: 17.174959182739258ms\r\n",,terminal_output +18009,13637932,"TERMINAL",0,0,"Step 3762, loss: 0.07905063033103943, step time: 17.6389217376709ms\r\n",,terminal_output +18010,13638036,"TERMINAL",0,0,"Step 3763, loss: 0.11854002624750137, step time: 17.16327667236328ms\r\nStep 3764, loss: 0.1055029109120369, step time: 17.1048641204834ms\r\n",,terminal_output +18011,13638098,"TERMINAL",0,0,"Step 3765, loss: 0.03899041563272476, step time: 18.399953842163086ms\r\n",,terminal_output +18012,13638159,"TERMINAL",0,0,"Step 3766, loss: 0.33953019976615906, step time: 17.386198043823242ms\r\n",,terminal_output +18013,13638222,"TERMINAL",0,0,"Step 3767, loss: 0.23106880486011505, step time: 17.256498336791992ms\r\n",,terminal_output +18014,13638317,"TERMINAL",0,0,"Step 3768, loss: 0.039397019892930984, step time: 18.172740936279297ms\r\n",,terminal_output +18015,13638367,"TERMINAL",0,0,"Step 3769, loss: 0.052423011511564255, step time: 17.4560546875ms\r\n",,terminal_output +18016,13638419,"TERMINAL",0,0,"Step 3770, loss: 0.07212673872709274, step time: 17.115116119384766ms\r\n",,terminal_output +18017,13638511,"TERMINAL",0,0,"Step 3771, loss: 1.1715295314788818, step time: 17.34471321105957ms\r\n",,terminal_output +18018,13638659,"TERMINAL",0,0,"Step 3772, loss: 0.07149491459131241, step time: 17.46368408203125ms\r\nStep 3773, loss: 0.10586026310920715, step time: 17.000913619995117ms\r\nStep 3774, loss: 0.1263386756181717, step time: 17.55356788635254ms\r\n",,terminal_output +18019,13638723,"TERMINAL",0,0,"Step 3775, loss: 0.25309279561042786, step time: 17.066001892089844ms\r\n",,terminal_output +18020,13638786,"TERMINAL",0,0,"Step 3776, loss: 0.38686421513557434, step time: 16.99090003967285ms\r\n",,terminal_output +18021,13638849,"TERMINAL",0,0,"Step 3777, loss: 0.0872439295053482, step time: 17.46535301208496ms\r\n",,terminal_output +18022,13638914,"TERMINAL",0,0,"Step 3778, loss: 0.0331120602786541, step time: 17.525434494018555ms\r\n",,terminal_output +18023,13638974,"TERMINAL",0,0,"Step 3779, loss: 0.0786120817065239, step time: 16.993045806884766ms\r\n",,terminal_output +18024,13639293,"TERMINAL",0,0,"Step 3780, loss: 0.0862547755241394, step time: 304.7952651977539ms\r\n",,terminal_output +18025,13639353,"TERMINAL",0,0,"Step 3781, loss: 0.05659710243344307, step time: 24.93429183959961ms\r\n",,terminal_output +18026,13639418,"TERMINAL",0,0,"Step 3782, loss: 0.1125747337937355, step time: 20.292043685913086ms\r\n",,terminal_output +18027,13639480,"TERMINAL",0,0,"Step 3783, loss: 0.09763316065073013, step time: 18.839597702026367ms\r\n",,terminal_output +18028,13639543,"TERMINAL",0,0,"Step 3784, loss: 0.044441282749176025, step time: 17.896175384521484ms\r\n",,terminal_output +18029,13639641,"TERMINAL",0,0,"Step 3785, loss: 0.052281755954027176, step time: 17.596960067749023ms\r\n",,terminal_output +18030,13639688,"TERMINAL",0,0,"Step 3786, loss: 0.6443966031074524, step time: 18.07236671447754ms\r\n",,terminal_output +18031,13639791,"TERMINAL",0,0,"Step 3787, loss: 0.09934979677200317, step time: 17.36736297607422ms\r\nStep 3788, loss: 0.03772083297371864, step time: 17.4252986907959ms\r\n",,terminal_output +18032,13639884,"TERMINAL",0,0,"Step 3789, loss: 0.12887254357337952, step time: 17.473697662353516ms\r\n",,terminal_output +18033,13639937,"TERMINAL",0,0,"Step 3790, loss: 0.09881990402936935, step time: 17.573118209838867ms\r\n",,terminal_output +18034,13640034,"TERMINAL",0,0,"Step 3791, loss: 0.21218951046466827, step time: 17.24386215209961ms\r\n",,terminal_output +18035,13640116,"TERMINAL",0,0,"Step 3792, loss: 0.5204137563705444, step time: 17.65298843383789ms\r\nStep 3793, loss: 0.2588045597076416, step time: 17.17829704284668ms\r\n",,terminal_output +18036,13640167,"TERMINAL",0,0,"Step 3794, loss: 0.10069712996482849, step time: 17.470121383666992ms\r\n",,terminal_output +18037,13640231,"TERMINAL",0,0,"Step 3795, loss: 0.1093311458826065, step time: 17.391681671142578ms\r\n",,terminal_output +18038,13640331,"TERMINAL",0,0,"Step 3796, loss: 0.308767706155777, step time: 17.23194122314453ms\r\n",,terminal_output +18039,13640383,"TERMINAL",0,0,"Step 3797, loss: 0.15529654920101166, step time: 16.971588134765625ms\r\n",,terminal_output +18040,13640435,"TERMINAL",0,0,"Step 3798, loss: 0.1506708860397339, step time: 18.183469772338867ms\r\n",,terminal_output +18041,13640537,"TERMINAL",0,0,"Step 3799, loss: 0.07157915830612183, step time: 17.765283584594727ms\r\n",,terminal_output +18042,13640610,"TERMINAL",0,0,"Step 3800, loss: 0.07527269423007965, step time: 17.53520965576172ms\r\nStep 3801, loss: 0.08204744011163712, step time: 19.460678100585938ms\r\n",,terminal_output +18043,13640676,"TERMINAL",0,0,"Step 3802, loss: 0.139421284198761, step time: 17.252445220947266ms\r\n",,terminal_output +18044,13640735,"TERMINAL",0,0,"Step 3803, loss: 0.2254847139120102, step time: 17.005205154418945ms\r\n",,terminal_output +18045,13640802,"TERMINAL",0,0,"Step 3804, loss: 0.07566124200820923, step time: 17.832517623901367ms\r\n",,terminal_output +18046,13640892,"TERMINAL",0,0,"Step 3805, loss: 0.10488103330135345, step time: 17.2882080078125ms\r\n",,terminal_output +18047,13640943,"TERMINAL",0,0,"Step 3806, loss: 0.2988656163215637, step time: 17.443180084228516ms\r\n",,terminal_output +18048,13641040,"TERMINAL",0,0,"Step 3807, loss: 0.16105973720550537, step time: 17.51708984375ms\r\n",,terminal_output +18049,13641091,"TERMINAL",0,0,"Step 3808, loss: 0.04483658820390701, step time: 17.581462860107422ms\r\n",,terminal_output +18050,13641177,"TERMINAL",0,0,"Step 3809, loss: 0.07787314802408218, step time: 17.41766929626465ms\r\nStep 3810, loss: 0.08039698749780655, step time: 17.72022247314453ms\r\n",,terminal_output +18051,13641243,"TERMINAL",0,0,"Step 3811, loss: 0.17274829745292664, step time: 17.09604263305664ms\r\n",,terminal_output +18052,13641369,"TERMINAL",0,0,"Step 3812, loss: 0.03423730656504631, step time: 22.658348083496094ms\r\nStep 3813, loss: 0.03862718492746353, step time: 20.443439483642578ms\r\n",,terminal_output +18053,13641433,"TERMINAL",0,0,"Step 3814, loss: 0.050350867211818695, step time: 18.347978591918945ms\r\n",,terminal_output +18054,13641529,"TERMINAL",0,0,"Step 3815, loss: 0.05037478730082512, step time: 17.634153366088867ms\r\n",,terminal_output +18055,13641578,"TERMINAL",0,0,"Step 3816, loss: 0.03651324659585953, step time: 18.22686195373535ms\r\n",,terminal_output +18056,13641684,"TERMINAL",0,0,"Step 3817, loss: 0.05837368592619896, step time: 17.71092414855957ms\r\nStep 3818, loss: 0.06092773377895355, step time: 18.389463424682617ms\r\n",,terminal_output +18057,13641749,"TERMINAL",0,0,"Step 3819, loss: 0.17723868787288666, step time: 18.22662353515625ms\r\n",,terminal_output +18058,13641813,"TERMINAL",0,0,"Step 3820, loss: 0.10494570434093475, step time: 17.68636703491211ms\r\n",,terminal_output +18059,13641873,"TERMINAL",0,0,"Step 3821, loss: 0.47689056396484375, step time: 17.31586456298828ms\r\n",,terminal_output +18060,13641934,"TERMINAL",0,0,"Step 3822, loss: 0.054434165358543396, step time: 17.773866653442383ms\r\n",,terminal_output +18061,13641998,"TERMINAL",0,0,"Step 3823, loss: 0.07938035577535629, step time: 17.112255096435547ms\r\n",,terminal_output +18062,13642059,"TERMINAL",0,0,"Step 3824, loss: 0.04488813132047653, step time: 17.304182052612305ms\r\n",,terminal_output +18063,13642137,"TERMINAL",0,0,"Step 3825, loss: 0.32929104566574097, step time: 17.613649368286133ms\r\n",,terminal_output +18064,13642197,"TERMINAL",0,0,"Step 3826, loss: 0.053001768887043, step time: 17.660856246948242ms\r\n",,terminal_output +18065,13642246,"TERMINAL",0,0,"Step 3827, loss: 0.1146152913570404, step time: 17.212390899658203ms\r\n",,terminal_output +18066,13642311,"TERMINAL",0,0,"Step 3828, loss: 0.0915818139910698, step time: 17.767667770385742ms\r\n",,terminal_output +18067,13642413,"TERMINAL",0,0,"Step 3829, loss: 0.04450996592640877, step time: 17.21644401550293ms\r\n",,terminal_output +18068,13642464,"TERMINAL",0,0,"Step 3830, loss: 0.05159911513328552, step time: 17.15826988220215ms\r\n",,terminal_output +18069,13642571,"TERMINAL",0,0,"Step 3831, loss: 0.04328271374106407, step time: 17.436504364013672ms\r\nStep 3832, loss: 0.03763808682560921, step time: 17.31133460998535ms\r\n",,terminal_output +18070,13642638,"TERMINAL",0,0,"Step 3833, loss: 0.07814466953277588, step time: 18.04184913635254ms\r\n",,terminal_output +18071,13642699,"TERMINAL",0,0,"Step 3834, loss: 0.04617120325565338, step time: 18.231630325317383ms\r\n",,terminal_output +18072,13642762,"TERMINAL",0,0,"Step 3835, loss: 0.1580149382352829, step time: 17.349958419799805ms\r\n",,terminal_output +18073,13642825,"TERMINAL",0,0,"Step 3836, loss: 0.048344891518354416, step time: 17.279386520385742ms\r\n",,terminal_output +18074,13642889,"TERMINAL",0,0,"Step 3837, loss: 0.08474275469779968, step time: 17.46535301208496ms\r\n",,terminal_output +18075,13642954,"TERMINAL",0,0,"Step 3838, loss: 0.07955236732959747, step time: 17.4102783203125ms\r\n",,terminal_output +18076,13643012,"TERMINAL",0,0,"Step 3839, loss: 0.044212743639945984, step time: 17.241954803466797ms\r\n",,terminal_output +18077,13643081,"TERMINAL",0,0,"Step 3840, loss: 0.03163188323378563, step time: 17.71712303161621ms\r\n",,terminal_output +18078,13643197,"TERMINAL",0,0,"Step 3841, loss: 0.07263463735580444, step time: 17.040014266967773ms\r\nStep 3842, loss: 0.17511530220508575, step time: 17.227888107299805ms\r\n",,terminal_output +18079,13643261,"TERMINAL",0,0,"Step 3843, loss: 0.1963316947221756, step time: 17.77362823486328ms\r\n",,terminal_output +18080,13643322,"TERMINAL",0,0,"Step 3844, loss: 0.10553507506847382, step time: 17.47751235961914ms\r\n",,terminal_output +18081,13643415,"TERMINAL",0,0,"Step 3845, loss: 0.0511016808450222, step time: 17.415523529052734ms\r\n",,terminal_output +18082,13643457,"TERMINAL",0,0,"Step 3846, loss: 0.030732184648513794, step time: 17.970800399780273ms\r\n",,terminal_output +18083,13643529,"TERMINAL",0,0,"Step 3847, loss: 0.12131286412477493, step time: 17.301082611083984ms\r\n",,terminal_output +18084,13643594,"TERMINAL",0,0,"Step 3848, loss: 0.10742805898189545, step time: 17.215490341186523ms\r\n",,terminal_output +18085,13643659,"TERMINAL",0,0,"Step 3849, loss: 0.06235579028725624, step time: 17.468929290771484ms\r\n",,terminal_output +18086,13643700,"TERMINAL",0,0,"Step 3850, loss: 0.10041462630033493, step time: 17.355918884277344ms\r\n",,terminal_output +18087,13643824,"TERMINAL",0,0,"Step 3851, loss: 0.03273403272032738, step time: 17.166614532470703ms\r\nStep 3852, loss: 0.14460165798664093, step time: 17.74907112121582ms\r\n",,terminal_output +18088,13643889,"TERMINAL",0,0,"Step 3853, loss: 0.03404791280627251, step time: 18.96357536315918ms\r\n",,terminal_output +18089,13643952,"TERMINAL",0,0,"Step 3854, loss: 0.042874108999967575, step time: 17.210006713867188ms\r\n",,terminal_output +18090,13644017,"TERMINAL",0,0,"Step 3855, loss: 0.1074981689453125, step time: 17.615079879760742ms\r\n",,terminal_output +18091,13644081,"TERMINAL",0,0,"Step 3856, loss: 0.10817854851484299, step time: 17.38715171813965ms\r\n",,terminal_output +18092,13644144,"TERMINAL",0,0,"Step 3857, loss: 0.14434602856636047, step time: 17.28534698486328ms\r\n",,terminal_output +18093,13644269,"TERMINAL",0,0,"Step 3858, loss: 0.03374025225639343, step time: 18.00394058227539ms\r\nStep 3859, loss: 0.055033713579177856, step time: 17.295360565185547ms\r\n",,terminal_output +18094,13644396,"TERMINAL",0,0,"Step 3860, loss: 0.034171730279922485, step time: 17.229318618774414ms\r\nStep 3861, loss: 0.050441715866327286, step time: 17.48347282409668ms\r\n",,terminal_output +18095,13644465,"TERMINAL",0,0,"Step 3862, loss: 0.03838445246219635, step time: 17.434358596801758ms\r\n",,terminal_output +18096,13644523,"TERMINAL",0,0,"Step 3863, loss: 0.04365791752934456, step time: 17.210960388183594ms\r\n",,terminal_output +18097,13644615,"TERMINAL",0,0,"Step 3864, loss: 0.06810161471366882, step time: 17.72904396057129ms\r\n",,terminal_output +18098,13644666,"TERMINAL",0,0,"Step 3865, loss: 0.03252026438713074, step time: 18.074750900268555ms\r\n",,terminal_output +18099,13644759,"TERMINAL",0,0,"Step 3866, loss: 0.03812286630272865, step time: 17.32921600341797ms\r\n",,terminal_output +18100,13644812,"TERMINAL",0,0,"Step 3867, loss: 0.056640591472387314, step time: 17.277240753173828ms\r\n",,terminal_output +18101,13644917,"TERMINAL",0,0,"Step 3868, loss: 0.03992899879813194, step time: 17.328977584838867ms\r\nStep 3869, loss: 0.15753372013568878, step time: 17.109394073486328ms\r\n",,terminal_output +18102,13644979,"TERMINAL",0,0,"Step 3870, loss: 0.09031428396701813, step time: 17.915725708007812ms\r\n",,terminal_output +18103,13645043,"TERMINAL",0,0,"Step 3871, loss: 0.3371596336364746, step time: 17.044544219970703ms\r\n",,terminal_output +18104,13645104,"TERMINAL",0,0,"Step 3872, loss: 0.08962684124708176, step time: 17.423629760742188ms\r\n",,terminal_output +18105,13645167,"TERMINAL",0,0,"Step 3873, loss: 0.0992654412984848, step time: 17.415523529052734ms\r\n",,terminal_output +18106,13645229,"TERMINAL",0,0,"Step 3874, loss: 0.03386497125029564, step time: 17.475366592407227ms\r\n",,terminal_output +18107,13645291,"TERMINAL",0,0,"Step 3875, loss: 0.07388857752084732, step time: 17.182111740112305ms\r\n",,terminal_output +18108,13645355,"TERMINAL",0,0,"Step 3876, loss: 0.03473784402012825, step time: 17.83013343811035ms\r\n",,terminal_output +18109,13645419,"TERMINAL",0,0,"Step 3877, loss: 0.824462354183197, step time: 17.11249351501465ms\r\n",,terminal_output +18110,13645482,"TERMINAL",0,0,"Step 3878, loss: 0.03250666335225105, step time: 17.174720764160156ms\r\n",,terminal_output +18111,13645545,"TERMINAL",0,0,"Step 3879, loss: 0.13195696473121643, step time: 17.52781867980957ms\r\n",,terminal_output +18112,13645608,"TERMINAL",0,0,"Step 3880, loss: 0.17215457558631897, step time: 17.349958419799805ms\r\n",,terminal_output +18113,13645675,"TERMINAL",0,0,"Step 3881, loss: 0.031113620847463608, step time: 17.101287841796875ms\r\n",,terminal_output +18114,13645731,"TERMINAL",0,0,"Step 3882, loss: 0.05635225027799606, step time: 17.550945281982422ms\r\n",,terminal_output +18115,13645828,"TERMINAL",0,0,"Step 3883, loss: 0.18313413858413696, step time: 17.04239845275879ms\r\n",,terminal_output +18116,13645881,"TERMINAL",0,0,"Step 3884, loss: 0.0746540054678917, step time: 16.9832706451416ms\r\n",,terminal_output +18117,13645988,"TERMINAL",0,0,"Step 3885, loss: 0.03396589308977127, step time: 17.461538314819336ms\r\nStep 3886, loss: 0.06899793446063995, step time: 17.341136932373047ms\r\n",,terminal_output +18118,13646052,"TERMINAL",0,0,"Step 3887, loss: 0.07523393630981445, step time: 17.088651657104492ms\r\n",,terminal_output +18119,13646125,"TERMINAL",0,0,"Step 3888, loss: 0.8854204416275024, step time: 17.734766006469727ms\r\n",,terminal_output +18120,13646177,"TERMINAL",0,0,"Step 3889, loss: 0.17381839454174042, step time: 17.07768440246582ms\r\n",,terminal_output +18121,13646242,"TERMINAL",0,0,"Step 3890, loss: 0.11167680472135544, step time: 17.110109329223633ms\r\n",,terminal_output +18122,13646313,"TERMINAL",0,0,"Step 3891, loss: 0.05409790575504303, step time: 17.401933670043945ms\r\n",,terminal_output +18123,13646367,"TERMINAL",0,0,"Step 3892, loss: 0.061256181448698044, step time: 17.549991607666016ms\r\n",,terminal_output +18124,13646483,"TERMINAL",0,0,"Step 3893, loss: 0.051941048353910446, step time: 17.11416244506836ms\r\nStep 3894, loss: 0.05550018697977066, step time: 17.65894889831543ms\r\n",,terminal_output +18125,13646548,"TERMINAL",0,0,"Step 3895, loss: 0.05251133069396019, step time: 17.238616943359375ms\r\n",,terminal_output +18126,13646611,"TERMINAL",0,0,"Step 3896, loss: 0.09443274140357971, step time: 17.478227615356445ms\r\n",,terminal_output +18127,13646675,"TERMINAL",0,0,"Step 3897, loss: 1.3663201332092285, step time: 17.52924919128418ms\r\n",,terminal_output +18128,13646737,"TERMINAL",0,0,"Step 3898, loss: 0.2479545772075653, step time: 17.343759536743164ms\r\n",,terminal_output +18129,13646801,"TERMINAL",0,0,"Step 3899, loss: 0.06938696652650833, step time: 18.670320510864258ms\r\n",,terminal_output +18130,13646862,"TERMINAL",0,0,"Step 3900, loss: 0.39579153060913086, step time: 17.841339111328125ms\r\n",,terminal_output +18131,13646926,"TERMINAL",0,0,"Step 3901, loss: 0.10573699325323105, step time: 17.09914207458496ms\r\n",,terminal_output +18132,13647021,"TERMINAL",0,0,"Step 3902, loss: 0.07449322938919067, step time: 17.25625991821289ms\r\n",,terminal_output +18133,13647074,"TERMINAL",0,0,"Step 3903, loss: 0.10102036595344543, step time: 17.551660537719727ms\r\n",,terminal_output +18134,13647182,"TERMINAL",0,0,"Step 3904, loss: 0.1604223996400833, step time: 17.326831817626953ms\r\nStep 3905, loss: 0.4859030246734619, step time: 17.07315444946289ms\r\n",,terminal_output +18135,13647247,"TERMINAL",0,0,"Step 3906, loss: 0.2341996282339096, step time: 17.75646209716797ms\r\n",,terminal_output +18136,13647306,"TERMINAL",0,0,"Step 3907, loss: 0.13025233149528503, step time: 17.192363739013672ms\r\n",,terminal_output +18137,13647368,"TERMINAL",0,0,"Step 3908, loss: 0.0472966767847538, step time: 17.15683937072754ms\r\n",,terminal_output +18138,13647431,"TERMINAL",0,0,"Step 3909, loss: 0.08139786869287491, step time: 17.567873001098633ms\r\n",,terminal_output +18139,13647493,"TERMINAL",0,0,"Step 3910, loss: 0.07109209150075912, step time: 17.21048355102539ms\r\n",,terminal_output +18140,13647558,"TERMINAL",0,0,"Step 3911, loss: 0.4431733191013336, step time: 17.020225524902344ms\r\n",,terminal_output +18141,13647657,"TERMINAL",0,0,"Step 3912, loss: 0.11059299111366272, step time: 17.830848693847656ms\r\n",,terminal_output +18142,13647724,"TERMINAL",0,0,"Step 3913, loss: 0.05434218421578407, step time: 17.215251922607422ms\r\n",,terminal_output +18143,13647785,"TERMINAL",0,0,"Step 3914, loss: 0.043904177844524384, step time: 18.573760986328125ms\r\n",,terminal_output +18144,13647846,"TERMINAL",0,0,"Step 3915, loss: 0.05429980531334877, step time: 17.693758010864258ms\r\n",,terminal_output +18145,13647906,"TERMINAL",0,0,"Step 3916, loss: 0.08113034069538116, step time: 17.586469650268555ms\r\n",,terminal_output +18146,13648012,"TERMINAL",0,0,"Step 3917, loss: 0.049453794956207275, step time: 17.077922821044922ms\r\nStep 3918, loss: 0.06658731400966644, step time: 17.761707305908203ms\r\n",,terminal_output +18147,13648075,"TERMINAL",0,0,"Step 3919, loss: 0.07331275939941406, step time: 16.97230339050293ms\r\n",,terminal_output +18148,13648138,"TERMINAL",0,0,"Step 3920, loss: 0.044173676520586014, step time: 17.157793045043945ms\r\n",,terminal_output +18149,13648202,"TERMINAL",0,0,"Step 3921, loss: 0.19801436364650726, step time: 17.374753952026367ms\r\n",,terminal_output +18150,13648266,"TERMINAL",0,0,"Step 3922, loss: 0.04634373262524605, step time: 17.51422882080078ms\r\n",,terminal_output +18151,13648362,"genie.py",3053,0,"",python,selection_mouse +18152,13648413,"TERMINAL",0,0,"Step 3923, loss: 0.048334136605262756, step time: 17.25006103515625ms\r\nStep 3924, loss: 0.06854210048913956, step time: 17.99154281616211ms\r\n",,terminal_output +18153,13648466,"TERMINAL",0,0,"Step 3925, loss: 0.11945939064025879, step time: 17.342329025268555ms\r\n",,terminal_output +18154,13648600,"TERMINAL",0,0,"Step 3926, loss: 0.06770391017198563, step time: 17.305612564086914ms\r\nStep 3927, loss: 0.0647689625620842, step time: 17.582178115844727ms\r\n",,terminal_output +18155,13648651,"TERMINAL",0,0,"Step 3928, loss: 0.048496443778276443, step time: 17.362356185913086ms\r\n",,terminal_output +18156,13648714,"TERMINAL",0,0,"Step 3929, loss: 0.07271784543991089, step time: 17.071247100830078ms\r\n",,terminal_output +18157,13648814,"TERMINAL",0,0,"Step 3930, loss: 0.11901318281888962, step time: 17.7154541015625ms\r\n",,terminal_output +18158,13648866,"TERMINAL",0,0,"Step 3931, loss: 0.3343139588832855, step time: 17.055034637451172ms\r\n",,terminal_output +18159,13648985,"TERMINAL",0,0,"Step 3932, loss: 0.13684985041618347, step time: 17.190217971801758ms\r\nStep 3933, loss: 0.04029408469796181, step time: 17.380952835083008ms\r\n",,terminal_output +18160,13649051,"TERMINAL",0,0,"Step 3934, loss: 0.028736000880599022, step time: 17.511367797851562ms\r\n",,terminal_output +18161,13649112,"TERMINAL",0,0,"Step 3935, loss: 0.04169397056102753, step time: 17.333269119262695ms\r\n",,terminal_output +18162,13649218,"TERMINAL",0,0,"Step 3936, loss: 0.056311801075935364, step time: 17.779827117919922ms\r\nStep 3937, loss: 0.0668037161231041, step time: 17.071247100830078ms\r\n",,terminal_output +18163,13649284,"TERMINAL",0,0,"Step 3938, loss: 0.18465259671211243, step time: 17.314672470092773ms\r\n",,terminal_output +18164,13649646,"TERMINAL",0,0,"Step 3939, loss: 0.031239137053489685, step time: 295.055627822876ms\r\nStep 3940, loss: 0.040842294692993164, step time: 25.423765182495117ms\r\n",,terminal_output +18165,13649712,"TERMINAL",0,0,"Step 3941, loss: 0.6091691851615906, step time: 19.496440887451172ms\r\n",,terminal_output +18166,13649774,"TERMINAL",0,0,"Step 3942, loss: 0.0347108468413353, step time: 18.47982406616211ms\r\n",,terminal_output +18167,13649837,"TERMINAL",0,0,"Step 3943, loss: 0.11037921160459518, step time: 17.69852638244629ms\r\n",,terminal_output +18168,13649902,"TERMINAL",0,0,"Step 3944, loss: 0.10188170522451401, step time: 17.710447311401367ms\r\n",,terminal_output +18169,13649964,"TERMINAL",0,0,"Step 3945, loss: 0.46538129448890686, step time: 17.97652244567871ms\r\n",,terminal_output +18170,13650027,"TERMINAL",0,0,"Step 3946, loss: 0.0433000884950161, step time: 17.673730850219727ms\r\n",,terminal_output +18171,13650090,"TERMINAL",0,0,"Step 3947, loss: 0.06054797023534775, step time: 17.393827438354492ms\r\n",,terminal_output +18172,13650155,"TERMINAL",0,0,"Step 3948, loss: 0.027922261506319046, step time: 18.027067184448242ms\r\n",,terminal_output +18173,13650212,"TERMINAL",0,0,"Step 3949, loss: 0.033090345561504364, step time: 17.134428024291992ms\r\n",,terminal_output +18174,13650318,"TERMINAL",0,0,"Step 3950, loss: 0.08162756264209747, step time: 17.354726791381836ms\r\n",,terminal_output +18175,13650379,"TERMINAL",0,0,"Step 3951, loss: 0.037944018840789795, step time: 17.52448081970215ms\r\n",,terminal_output +18176,13650494,"TERMINAL",0,0,"Step 3952, loss: 0.10658597201108932, step time: 17.71068572998047ms\r\nStep 3953, loss: 0.14953239262104034, step time: 17.118453979492188ms\r\n",,terminal_output +18177,13650602,"TERMINAL",0,0,"Step 3954, loss: 0.0838698074221611, step time: 17.735719680786133ms\r\nStep 3955, loss: 0.034461576491594315, step time: 17.125606536865234ms\r\n",,terminal_output +18178,13650664,"TERMINAL",0,0,"Step 3956, loss: 0.060507819056510925, step time: 17.479896545410156ms\r\n",,terminal_output +18179,13650726,"TERMINAL",0,0,"Step 3957, loss: 0.04933617636561394, step time: 17.67563819885254ms\r\n",,terminal_output +18180,13650790,"TERMINAL",0,0,"Step 3958, loss: 0.036010079085826874, step time: 17.660140991210938ms\r\n",,terminal_output +18181,13650852,"TERMINAL",0,0,"Step 3959, loss: 0.05176273360848427, step time: 17.252206802368164ms\r\n",,terminal_output +18182,13650919,"TERMINAL",0,0,"Step 3960, loss: 0.041347358375787735, step time: 17.999887466430664ms\r\n",,terminal_output +18183,13650979,"TERMINAL",0,0,"Step 3961, loss: 0.04295455291867256, step time: 17.266035079956055ms\r\n",,terminal_output +18184,13651064,"TERMINAL",0,0,"Step 3962, loss: 0.022409647703170776, step time: 17.19498634338379ms\r\n",,terminal_output +18185,13651165,"TERMINAL",0,0,"Step 3963, loss: 0.06093477085232735, step time: 17.529010772705078ms\r\nStep 3964, loss: 0.02953966334462166, step time: 17.441511154174805ms\r\n",,terminal_output +18186,13651229,"TERMINAL",0,0,"Step 3965, loss: 0.21719516813755035, step time: 17.20428466796875ms\r\n",,terminal_output +18187,13651294,"TERMINAL",0,0,"Step 3966, loss: 0.03126898780465126, step time: 17.915725708007812ms\r\n",,terminal_output +18188,13651357,"TERMINAL",0,0,"Step 3967, loss: 0.042152754962444305, step time: 17.2879695892334ms\r\n",,terminal_output +18189,13651418,"TERMINAL",0,0,"Step 3968, loss: 0.09392309933900833, step time: 17.255306243896484ms\r\n",,terminal_output +18190,13651482,"TERMINAL",0,0,"Step 3969, loss: 0.03624449297785759, step time: 17.64225959777832ms\r\n",,terminal_output +18191,13651545,"TERMINAL",0,0,"Step 3970, loss: 0.19216208159923553, step time: 17.508983612060547ms\r\n",,terminal_output +18192,13651610,"TERMINAL",0,0,"Step 3971, loss: 0.030885469168424606, step time: 17.28081703186035ms\r\n",,terminal_output +18193,13651673,"TERMINAL",0,0,"Step 3972, loss: 0.15212216973304749, step time: 18.073081970214844ms\r\n",,terminal_output +18194,13651740,"TERMINAL",0,0,"Step 3973, loss: 0.10287471115589142, step time: 18.80168914794922ms\r\n",,terminal_output +18195,13651801,"TERMINAL",0,0,"Step 3974, loss: 0.026196187362074852, step time: 17.80223846435547ms\r\n",,terminal_output +18196,13651863,"TERMINAL",0,0,"Step 3975, loss: 0.038270410150289536, step time: 17.550945281982422ms\r\n",,terminal_output +18197,13651927,"TERMINAL",0,0,"Step 3976, loss: 0.03683210164308548, step time: 17.853736877441406ms\r\n",,terminal_output +18198,13651992,"TERMINAL",0,0,"Step 3977, loss: 0.09192385524511337, step time: 17.069578170776367ms\r\n",,terminal_output +18199,13652055,"TERMINAL",0,0,"Step 3978, loss: 0.09176577627658844, step time: 17.937660217285156ms\r\n",,terminal_output +18200,13652129,"TERMINAL",0,0,"Step 3979, loss: 0.04557204246520996, step time: 17.02570915222168ms\r\n",,terminal_output +18201,13652183,"TERMINAL",0,0,"Step 3980, loss: 0.034704480320215225, step time: 17.125368118286133ms\r\n",,terminal_output +18202,13652278,"TERMINAL",0,0,"Step 3981, loss: 0.04423714801669121, step time: 17.388105392456055ms\r\n",,terminal_output +18203,13652341,"TERMINAL",0,0,"Step 3982, loss: 0.09286506474018097, step time: 17.337322235107422ms\r\n",,terminal_output +18204,13652448,"TERMINAL",0,0,"Step 3983, loss: 0.02978423796594143, step time: 17.138004302978516ms\r\nStep 3984, loss: 0.14952488243579865, step time: 18.201589584350586ms\r\n",,terminal_output +18205,13652545,"TERMINAL",0,0,"Step 3985, loss: 0.03840387985110283, step time: 17.14301109313965ms\r\n",,terminal_output +18206,13652622,"TERMINAL",0,0,"Step 3986, loss: 0.026398733258247375, step time: 17.47727394104004ms\r\nStep 3987, loss: 0.03778335824608803, step time: 17.508506774902344ms\r\n",,terminal_output +18207,13652742,"TERMINAL",0,0,"Step 3988, loss: 0.1001957580447197, step time: 17.48967170715332ms\r\n",,terminal_output +18208,13652798,"TERMINAL",0,0,"Step 3989, loss: 0.03308841958642006, step time: 17.095327377319336ms\r\n",,terminal_output +18209,13652851,"TERMINAL",0,0,"Step 3990, loss: 0.12943729758262634, step time: 17.83013343811035ms\r\n",,terminal_output +18210,13652905,"TERMINAL",0,0,"Step 3991, loss: 0.03383132070302963, step time: 17.058849334716797ms\r\n",,terminal_output +18211,13653010,"TERMINAL",0,0,"Step 3992, loss: 0.03908586502075195, step time: 17.100811004638672ms\r\nStep 3993, loss: 0.04626398906111717, step time: 17.464637756347656ms\r\n",,terminal_output +18212,13653126,"TERMINAL",0,0,"Step 3994, loss: 0.24412892758846283, step time: 17.471790313720703ms\r\nStep 3995, loss: 0.03144589811563492, step time: 17.222881317138672ms\r\n",,terminal_output +18213,13653198,"TERMINAL",0,0,"Step 3996, loss: 0.3976896107196808, step time: 17.868757247924805ms\r\n",,terminal_output +18214,13653252,"TERMINAL",0,0,"Step 3997, loss: 0.03173267841339111, step time: 17.067670822143555ms\r\n",,terminal_output +18215,13653347,"TERMINAL",0,0,"Step 3998, loss: 0.03704097867012024, step time: 17.39788055419922ms\r\n",,terminal_output +18216,13653404,"TERMINAL",0,0,"Step 3999, loss: 0.03805752471089363, step time: 17.730236053466797ms\r\n",,terminal_output +18217,13654226,"genie.py",3053,0,"\n ",python,content +18218,13655421,"genie.py",3062,0,"o",python,content +18219,13655422,"genie.py",3063,0,"",python,selection_keyboard +18220,13655523,"genie.py",3063,0,"u",python,content +18221,13655525,"genie.py",3064,0,"",python,selection_keyboard +18222,13655626,"genie.py",3064,0,"t",python,content +18223,13655628,"genie.py",3065,0,"",python,selection_keyboard +18224,13656223,"TERMINAL",0,0,"Step 4000, loss: 0.03972824290394783, step time: 26.381969451904297ms\r\n",,terminal_output +18225,13656312,"TERMINAL",0,0,"Step 4001, loss: 0.08346590399742126, step time: 24.328231811523438ms\r\n",,terminal_output +18226,13656351,"TERMINAL",0,0,"Step 4002, loss: 0.09397011250257492, step time: 19.375085830688477ms\r\n",,terminal_output +18227,13656374,"genie.py",3062,3,"outputs",python,content +18228,13656478,"TERMINAL",0,0,"Step 4003, loss: 0.03229666128754616, step time: 18.918514251708984ms\r\nStep 4004, loss: 0.03627511486411095, step time: 18.295764923095703ms\r\n",,terminal_output +18229,13656544,"TERMINAL",0,0,"Step 4005, loss: 0.03919322416186333, step time: 18.385887145996094ms\r\n",,terminal_output +18230,13656612,"TERMINAL",0,0,"Step 4006, loss: 0.056797683238983154, step time: 17.91858673095703ms\r\n",,terminal_output +18231,13656674,"TERMINAL",0,0,"Step 4007, loss: 0.05460456386208534, step time: 18.616437911987305ms\r\n",,terminal_output +18232,13656810,"TERMINAL",0,0,"Step 4008, loss: 0.05121134594082832, step time: 17.837047576904297ms\r\nStep 4009, loss: 0.0335366427898407, step time: 18.033981323242188ms\r\n",,terminal_output +18233,13656943,"TERMINAL",0,0,"Step 4010, loss: 0.20736795663833618, step time: 18.069028854370117ms\r\nStep 4011, loss: 0.04400738701224327, step time: 18.232345581054688ms\r\n",,terminal_output +18234,13657013,"TERMINAL",0,0,"Step 4012, loss: 0.04470236599445343, step time: 17.74144172668457ms\r\n",,terminal_output +18235,13657073,"TERMINAL",0,0,"Step 4013, loss: 0.0518047921359539, step time: 18.32270622253418ms\r\n",,terminal_output +18236,13657142,"TERMINAL",0,0,"Step 4014, loss: 0.03820471838116646, step time: 17.926692962646484ms\r\n",,terminal_output +18237,13657227,"TERMINAL",0,0,"Step 4015, loss: 0.09007281810045242, step time: 17.968177795410156ms\r\n",,terminal_output +18238,13657310,"TERMINAL",0,0,"Step 4016, loss: 0.038154128938913345, step time: 17.920494079589844ms\r\nStep 4017, loss: 0.15093101561069489, step time: 18.285751342773438ms\r\n",,terminal_output +18239,13657387,"genie.py",3069,0,"[]",python,content +18240,13657388,"genie.py",3070,0,"",python,selection_keyboard +18241,13657389,"TERMINAL",0,0,"Step 4018, loss: 0.06271184235811234, step time: 17.726659774780273ms\r\n",,terminal_output +18242,13657443,"TERMINAL",0,0,"Step 4019, loss: 0.0536326989531517, step time: 18.22829246520996ms\r\n",,terminal_output +18243,13657513,"TERMINAL",0,0,"Step 4020, loss: 0.0840483084321022, step time: 27.454614639282227ms\r\n",,terminal_output +18244,13657651,"TERMINAL",0,0,"Step 4021, loss: 0.02455201745033264, step time: 19.521474838256836ms\r\nStep 4022, loss: 0.06207016482949257, step time: 18.050670623779297ms\r\n",,terminal_output +18245,13657701,"genie.py",3070,0,"2",python,content +18246,13657702,"genie.py",3071,0,"",python,selection_keyboard +18247,13657703,"TERMINAL",0,0,"Step 4023, loss: 0.07359367609024048, step time: 18.079042434692383ms\r\n",,terminal_output +18248,13657769,"TERMINAL",0,0,"Step 4024, loss: 0.05326316878199577, step time: 17.874479293823242ms\r\n",,terminal_output +18249,13657852,"TERMINAL",0,0,"Step 4025, loss: 0.15054503083229065, step time: 28.705120086669922ms\r\n",,terminal_output +18250,13657920,"TERMINAL",0,0,"Step 4026, loss: 0.6999757885932922, step time: 21.488428115844727ms\r\n",,terminal_output +18251,13657972,"TERMINAL",0,0,"Step 4027, loss: 0.15248273313045502, step time: 27.895689010620117ms\r\n",,terminal_output +18252,13658054,"TERMINAL",0,0,"Step 4028, loss: 0.5320363640785217, step time: 18.784284591674805ms\r\n",,terminal_output +18253,13658112,"TERMINAL",0,0,"Step 4029, loss: 0.04266516864299774, step time: 18.248796463012695ms\r\n",,terminal_output +18254,13658177,"TERMINAL",0,0,"Step 4030, loss: 0.04433344677090645, step time: 17.901897430419922ms\r\n",,terminal_output +18255,13658222,"genie.py",3070,1,"",python,content +18256,13658296,"TERMINAL",0,0,"Step 4031, loss: 0.052464697510004044, step time: 18.323421478271484ms\r\nStep 4032, loss: 0.026596445590257645, step time: 17.713308334350586ms\r\n",,terminal_output +18257,13658348,"TERMINAL",0,0,"Step 4033, loss: 0.03441577032208443, step time: 18.368005752563477ms\r\n",,terminal_output +18258,13658437,"TERMINAL",0,0,"Step 4034, loss: 0.048172127455472946, step time: 18.274307250976562ms\r\n",,terminal_output +18259,13658487,"TERMINAL",0,0,"Step 4035, loss: 0.038195401430130005, step time: 29.74390983581543ms\r\n",,terminal_output +18260,13658545,"genie.py",3070,0,"""""",python,content +18261,13658546,"genie.py",3071,0,"",python,selection_keyboard +18262,13658547,"TERMINAL",0,0,"Step 4036, loss: 0.028373898938298225, step time: 21.879196166992188ms\r\n",,terminal_output +18263,13658624,"TERMINAL",0,0,"Step 4037, loss: 0.0679936558008194, step time: 18.52560043334961ms\r\n",,terminal_output +18264,13658677,"TERMINAL",0,0,"Step 4038, loss: 0.22855928540229797, step time: 18.036365509033203ms\r\n",,terminal_output +18265,13658761,"TERMINAL",0,0,"Step 4039, loss: 0.09155765175819397, step time: 18.183231353759766ms\r\n",,terminal_output +18266,13658825,"TERMINAL",0,0,"Step 4040, loss: 0.22235335409641266, step time: 18.34893226623535ms\r\n",,terminal_output +18267,13658891,"TERMINAL",0,0,"Step 4041, loss: 0.10626866668462753, step time: 18.0203914642334ms\r\n",,terminal_output +18268,13658953,"TERMINAL",0,0,"Step 4042, loss: 0.13510911166667938, step time: 18.001794815063477ms\r\n",,terminal_output +18269,13658996,"TERMINAL",0,0,"Step 4043, loss: 0.07783808559179306, step time: 18.349170684814453ms\r\n",,terminal_output +18270,13659076,"TERMINAL",0,0,"Step 4044, loss: 0.045909859240055084, step time: 17.950773239135742ms\r\n",,terminal_output +18271,13659129,"TERMINAL",0,0,"Step 4045, loss: 0.023674389347434044, step time: 17.96746253967285ms\r\n",,terminal_output +18272,13659182,"TERMINAL",0,0,"Step 4046, loss: 0.06243437901139259, step time: 18.146753311157227ms\r\n",,terminal_output +18273,13659255,"genie.py",3071,0,"g",python,content +18274,13659257,"genie.py",3072,0,"",python,selection_keyboard +18275,13659326,"TERMINAL",0,0,"Step 4047, loss: 0.08006569743156433, step time: 17.819643020629883ms\r\nStep 4048, loss: 0.3036661148071289, step time: 17.893075942993164ms\r\n",,terminal_output +18276,13659374,"genie.py",3072,0,"t",python,content +18277,13659375,"genie.py",3073,0,"",python,selection_keyboard +18278,13659397,"TERMINAL",0,0,"Step 4049, loss: 0.05380317196249962, step time: 18.038034439086914ms\r\n",,terminal_output +18279,13659461,"TERMINAL",0,0,"Step 4050, loss: 0.05540928989648819, step time: 17.9135799407959ms\r\n",,terminal_output +18280,13659501,"TERMINAL",0,0,"Step 4051, loss: 0.07299911975860596, step time: 17.96412467956543ms\r\n",,terminal_output +18281,13659600,"TERMINAL",0,0,"Step 4052, loss: 0.10071957856416702, step time: 18.068313598632812ms\r\n",,terminal_output +18282,13659709,"TERMINAL",0,0,"Step 4053, loss: 0.07796245068311691, step time: 17.753124237060547ms\r\nStep 4054, loss: 0.23007066547870636, step time: 17.727136611938477ms\r\n",,terminal_output +18283,13659773,"genie.py",3073,0,"_",python,content +18284,13659774,"genie.py",3074,0,"",python,selection_keyboard +18285,13659774,"TERMINAL",0,0,"Step 4055, loss: 0.07115837186574936, step time: 17.956972122192383ms\r\n",,terminal_output +18286,13659834,"TERMINAL",0,0,"Step 4056, loss: 0.06075073406100273, step time: 17.780542373657227ms\r\n",,terminal_output +18287,13659887,"TERMINAL",0,0,"Step 4057, loss: 0.7871822714805603, step time: 17.668724060058594ms\r\n",,terminal_output +18288,13660008,"TERMINAL",0,0,"Step 4058, loss: 0.020574476569890976, step time: 18.077373504638672ms\r\nStep 4059, loss: 0.039584532380104065, step time: 17.66371726989746ms\r\n",,terminal_output +18289,13660150,"TERMINAL",0,0,"Step 4060, loss: 0.03860118240118027, step time: 17.930984497070312ms\r\nStep 4061, loss: 0.5052409768104553, step time: 18.07260513305664ms\r\n",,terminal_output +18290,13660203,"TERMINAL",0,0,"Step 4062, loss: 0.025212088599801064, step time: 17.840862274169922ms\r\n",,terminal_output +18291,13660252,"genie.py",3074,0,"d",python,content +18292,13660253,"genie.py",3075,0,"",python,selection_keyboard +18293,13660278,"TERMINAL",0,0,"Step 4063, loss: 0.12298676371574402, step time: 17.59171485900879ms\r\n",,terminal_output +18294,13660339,"TERMINAL",0,0,"Step 4064, loss: 0.034629736095666885, step time: 18.12291145324707ms\r\n",,terminal_output +18295,13660413,"genie.py",3075,0,"e",python,content +18296,13660414,"genie.py",3076,0,"",python,selection_keyboard +18297,13660414,"TERMINAL",0,0,"Step 4065, loss: 0.14415349066257477, step time: 17.517805099487305ms\r\n",,terminal_output +18298,13660482,"TERMINAL",0,0,"Step 4066, loss: 0.020156430080533028, step time: 17.9293155670166ms\r\n",,terminal_output +18299,13660487,"genie.py",3076,0,"b",python,content +18300,13660490,"genie.py",3077,0,"",python,selection_keyboard +18301,13660541,"TERMINAL",0,0,"Step 4067, loss: 0.044801127165555954, step time: 17.974376678466797ms\r\n",,terminal_output +18302,13660590,"genie.py",3077,0,"u",python,content +18303,13660591,"genie.py",3078,0,"",python,selection_keyboard +18304,13660655,"TERMINAL",0,0,"Step 4068, loss: 0.11133576929569244, step time: 18.76044273376465ms\r\nStep 4069, loss: 0.2273930311203003, step time: 17.853975296020508ms\r\n",,terminal_output +18305,13660734,"genie.py",3078,0,"g",python,content +18306,13660735,"genie.py",3079,0,"",python,selection_keyboard +18307,13660793,"TERMINAL",0,0,"Step 4070, loss: 0.03592671453952789, step time: 18.152236938476562ms\r\nStep 4071, loss: 0.11523677408695221, step time: 17.681360244750977ms\r\n",,terminal_output +18308,13660848,"TERMINAL",0,0,"Step 4072, loss: 0.04081272706389427, step time: 17.816543579101562ms\r\n",,terminal_output +18309,13660916,"TERMINAL",0,0,"Step 4073, loss: 0.03570961207151413, step time: 17.911195755004883ms\r\n",,terminal_output +18310,13660969,"TERMINAL",0,0,"Step 4074, loss: 0.08453010022640228, step time: 18.082857131958008ms\r\n",,terminal_output +18311,13661055,"TERMINAL",0,0,"Step 4075, loss: 0.06264753639698029, step time: 17.85564422607422ms\r\n",,terminal_output +18312,13661125,"TERMINAL",0,0,"Step 4076, loss: 0.08122266829013824, step time: 20.182132720947266ms\r\n",,terminal_output +18313,13661170,"TERMINAL",0,0,"Step 4077, loss: 0.07944168895483017, step time: 18.625974655151367ms\r\n",,terminal_output +18314,13661195,"genie.py",3080,0,"",python,selection_command +18315,13661286,"TERMINAL",0,0,"Step 4078, loss: 0.040984075516462326, step time: 18.136024475097656ms\r\n",,terminal_output +18316,13661298,"TERMINAL",0,0,"Step 4079, loss: 0.04331531375646591, step time: 18.228530883789062ms\r\n",,terminal_output +18317,13661379,"genie.py",3081,0,"",python,selection_command +18318,13661437,"TERMINAL",0,0,"Step 4080, loss: 0.47893649339675903, step time: 17.943859100341797ms\r\nStep 4081, loss: 0.05293111503124237, step time: 24.128198623657227ms\r\n",,terminal_output +18319,13661489,"TERMINAL",0,0,"Step 4082, loss: 0.055413197726011276, step time: 19.826650619506836ms\r\n",,terminal_output +18320,13661555,"TERMINAL",0,0,"Step 4083, loss: 0.034190189093351364, step time: 17.886877059936523ms\r\n",,terminal_output +18321,13661622,"TERMINAL",0,0,"Step 4084, loss: 0.12490278482437134, step time: 18.07856559753418ms\r\n",,terminal_output +18322,13661681,"TERMINAL",0,0,"Step 4085, loss: 0.029041951522231102, step time: 29.66451644897461ms\r\n",,terminal_output +18323,13661751,"TERMINAL",0,0,"Step 4086, loss: 0.10506752133369446, step time: 28.83458137512207ms\r\n",,terminal_output +18324,13661836,"TERMINAL",0,0,"Step 4087, loss: 0.11689282208681107, step time: 19.52528953552246ms\r\n",,terminal_output +18325,13661888,"TERMINAL",0,0,"Step 4088, loss: 0.0555868074297905, step time: 18.576860427856445ms\r\n",,terminal_output +18326,13661921,"genie.py",3081,0," ",python,content +18327,13661922,"genie.py",3082,0,"",python,selection_keyboard +18328,13661969,"TERMINAL",0,0,"Step 4089, loss: 0.03795759379863739, step time: 18.318891525268555ms\r\n",,terminal_output +18329,13662028,"TERMINAL",0,0,"Step 4090, loss: 0.04608698561787605, step time: 18.207311630249023ms\r\n",,terminal_output +18330,13662108,"TERMINAL",0,0,"Step 4091, loss: 0.05689748004078865, step time: 18.095016479492188ms\r\n",,terminal_output +18331,13662215,"TERMINAL",0,0,"Step 4092, loss: 0.04434172436594963, step time: 17.847537994384766ms\r\nStep 4093, loss: 0.03255311772227287, step time: 17.852067947387695ms\r\n",,terminal_output +18332,13662250,"genie.py",3082,0,"=",python,content +18333,13662251,"genie.py",3083,0,"",python,selection_keyboard +18334,13662277,"TERMINAL",0,0,"Step 4094, loss: 0.047785017639398575, step time: 18.246173858642578ms\r\n",,terminal_output +18335,13662331,"TERMINAL",0,0,"Step 4095, loss: 0.040544845163822174, step time: 17.697572708129883ms\r\n",,terminal_output +18336,13662382,"genie.py",3083,0," ",python,content +18337,13662383,"genie.py",3084,0,"",python,selection_keyboard +18338,13662467,"TERMINAL",0,0,"Step 4096, loss: 0.02514517866075039, step time: 18.056392669677734ms\r\nStep 4097, loss: 0.033126652240753174, step time: 18.17941665649414ms\r\n",,terminal_output +18339,13662535,"TERMINAL",0,0,"Step 4098, loss: nan, step time: 17.78888702392578ms\r\n",,terminal_output +18340,13662582,"TERMINAL",0,0,"Step 4099, loss: 0.07574394345283508, step time: 17.77362823486328ms\r\n",,terminal_output +18341,13662670,"TERMINAL",0,0,"Step 4100, loss: 0.04754437506198883, step time: 18.16105842590332ms\r\n",,terminal_output +18342,13662757,"TERMINAL",0,0,"Step 4101, loss: 0.03894432634115219, step time: 17.77338981628418ms\r\n",,terminal_output +18343,13662768,"genie.py",3084,0,"s",python,content +18344,13662769,"genie.py",3085,0,"",python,selection_keyboard +18345,13662770,"TERMINAL",0,0,"Step 4102, loss: 0.043270647525787354, step time: 17.87424087524414ms\r\n",,terminal_output +18346,13662831,"TERMINAL",0,0,"Step 4103, loss: 0.06838712841272354, step time: 18.296003341674805ms\r\n",,terminal_output +18347,13662927,"TERMINAL",0,0,"Step 4104, loss: 0.024426976218819618, step time: 18.04375648498535ms\r\n",,terminal_output +18348,13662984,"genie.py",3085,0,"e",python,content +18349,13662985,"genie.py",3086,0,"",python,selection_keyboard +18350,13663042,"genie.py",3086,0,"l",python,content +18351,13663044,"genie.py",3087,0,"",python,selection_keyboard +18352,13663054,"TERMINAL",0,0,"Step 4105, loss: 0.027037184685468674, step time: 17.746686935424805ms\r\nStep 4106, loss: 0.03948485106229782, step time: 18.185853958129883ms\r\n",,terminal_output +18353,13663156,"genie.py",3087,0,"f",python,content +18354,13663158,"genie.py",3088,0,"",python,selection_keyboard +18355,13663158,"TERMINAL",0,0,"Step 4107, loss: 0.04810047894716263, step time: 18.072843551635742ms\r\nStep 4108, loss: 0.0415397509932518, step time: 17.815351486206055ms\r\n",,terminal_output +18356,13663256,"TERMINAL",0,0,"Step 4109, loss: 0.07667215168476105, step time: 18.28455924987793ms\r\n",,terminal_output +18357,13663257,"genie.py",3088,0,".",python,content +18358,13663258,"genie.py",3089,0,"",python,selection_keyboard +18359,13663350,"TERMINAL",0,0,"Step 4110, loss: 0.024972859770059586, step time: 17.97652244567871ms\r\nStep 4111, loss: 0.027776524424552917, step time: 18.330812454223633ms\r\n",,terminal_output +18360,13663416,"TERMINAL",0,0,"Step 4112, loss: 0.02737913280725479, step time: 18.07546615600586ms\r\n",,terminal_output +18361,13663497,"TERMINAL",0,0,"Step 4113, loss: 0.026763999834656715, step time: 17.980337142944336ms\r\n",,terminal_output +18362,13663532,"TERMINAL",0,0,"Step 4114, loss: 0.14742960035800934, step time: 17.838239669799805ms\r\n",,terminal_output +18363,13663657,"TERMINAL",0,0,"Step 4115, loss: 0.03714382275938988, step time: 18.98980140686035ms\r\nStep 4116, loss: 0.07154050469398499, step time: 18.074989318847656ms\r\n",,terminal_output +18364,13663725,"TERMINAL",0,0,"Step 4117, loss: 0.7015092372894287, step time: 18.11814308166504ms\r\n",,terminal_output +18365,13663789,"TERMINAL",0,0,"Step 4118, loss: 0.07204525917768478, step time: 18.10741424560547ms\r\n",,terminal_output +18366,13663855,"TERMINAL",0,0,"Step 4119, loss: 0.1163753792643547, step time: 17.949581146240234ms\r\n",,terminal_output +18367,13663922,"TERMINAL",0,0,"Step 4120, loss: 0.04165048897266388, step time: 17.854928970336914ms\r\n",,terminal_output +18368,13663974,"TERMINAL",0,0,"Step 4121, loss: 0.05807608366012573, step time: 18.205642700195312ms\r\n",,terminal_output +18369,13664042,"TERMINAL",0,0,"Step 4122, loss: 0.08730101585388184, step time: 17.84825325012207ms\r\n",,terminal_output +18370,13664099,"genie.py",3089,0,"t",python,content +18371,13664100,"genie.py",3090,0,"",python,selection_keyboard +18372,13664111,"TERMINAL",0,0,"Step 4123, loss: 0.4143831431865692, step time: 18.27836036682129ms\r\n",,terminal_output +18373,13664136,"genie.py",3090,0,"o",python,content +18374,13664138,"genie.py",3091,0,"",python,selection_keyboard +18375,13664201,"TERMINAL",0,0,"Step 4124, loss: 0.039829809218645096, step time: 18.11051368713379ms\r\n",,terminal_output +18376,13664244,"genie.py",3091,0,"k",python,content +18377,13664245,"genie.py",3092,0,"",python,selection_keyboard +18378,13664245,"TERMINAL",0,0,"Step 4125, loss: 0.06657454371452332, step time: 17.834901809692383ms\r\n",,terminal_output +18379,13664299,"TERMINAL",0,0,"Step 4126, loss: 0.05121534690260887, step time: 17.722129821777344ms\r\n",,terminal_output +18380,13664367,"TERMINAL",0,0,"Step 4127, loss: 0.046024125069379807, step time: 18.271446228027344ms\r\n",,terminal_output +18381,13664441,"TERMINAL",0,0,"Step 4128, loss: 0.06195204332470894, step time: 21.802186965942383ms\r\n",,terminal_output +18382,13664515,"TERMINAL",0,0,"Step 4129, loss: 0.04779425635933876, step time: 18.154382705688477ms\r\n",,terminal_output +18383,13664565,"TERMINAL",0,0,"Step 4130, loss: 0.09397254139184952, step time: 17.897605895996094ms\r\n",,terminal_output +18384,13664632,"TERMINAL",0,0,"Step 4131, loss: 0.024276118725538254, step time: 17.884492874145508ms\r\n",,terminal_output +18385,13664684,"TERMINAL",0,0,"Step 4132, loss: 0.04703877493739128, step time: 17.7609920501709ms\r\n",,terminal_output +18386,13664801,"TERMINAL",0,0,"Step 4133, loss: 0.047692976891994476, step time: 18.232345581054688ms\r\nStep 4134, loss: 0.07588748633861542, step time: 17.724275588989258ms\r\n",,terminal_output +18387,13664868,"TERMINAL",0,0,"Step 4135, loss: 0.09997750073671341, step time: 17.777681350708008ms\r\n",,terminal_output +18388,13664931,"TERMINAL",0,0,"Step 4136, loss: 0.036796171218156815, step time: 17.934322357177734ms\r\n",,terminal_output +18389,13665014,"TERMINAL",0,0,"Step 4137, loss: 0.08060634136199951, step time: 17.943859100341797ms\r\n",,terminal_output +18390,13665067,"TERMINAL",0,0,"Step 4138, loss: 0.025980155915021896, step time: 17.737388610839844ms\r\n",,terminal_output +18391,13665198,"TERMINAL",0,0,"Step 4139, loss: 0.09381282329559326, step time: 18.51511001586914ms\r\nStep 4140, loss: 0.048117876052856445, step time: 18.0051326751709ms\r\n",,terminal_output +18392,13665260,"TERMINAL",0,0,"Step 4141, loss: 0.025116251781582832, step time: 17.989397048950195ms\r\n",,terminal_output +18393,13665313,"TERMINAL",0,0,"Step 4142, loss: 0.06698276102542877, step time: 17.944812774658203ms\r\n",,terminal_output +18394,13665360,"genie.py",3089,3,"tokenizer",python,content +18395,13665455,"TERMINAL",0,0,"Step 4143, loss: 0.029046785086393356, step time: 17.966508865356445ms\r\nStep 4144, loss: 0.09701050072908401, step time: 17.914295196533203ms\r\n",,terminal_output +18396,13665509,"TERMINAL",0,0,"Step 4145, loss: 0.02792823687195778, step time: 17.911911010742188ms\r\n",,terminal_output +18397,13665579,"TERMINAL",0,0,"Step 4146, loss: 0.04451574385166168, step time: 17.821073532104492ms\r\n",,terminal_output +18398,13665653,"TERMINAL",0,0,"Step 4147, loss: 0.05360904335975647, step time: 17.95482635498047ms\r\n",,terminal_output +18399,13665762,"TERMINAL",0,0,"Step 4148, loss: 0.021225659176707268, step time: 17.93837547302246ms\r\nStep 4149, loss: 0.03622829541563988, step time: 17.83442497253418ms\r\n",,terminal_output +18400,13665832,"TERMINAL",0,0,"Step 4150, loss: 0.04466960206627846, step time: 17.81773567199707ms\r\n",,terminal_output +18401,13665899,"TERMINAL",0,0,"Step 4151, loss: 0.04896533116698265, step time: 18.297195434570312ms\r\n",,terminal_output +18402,13665952,"TERMINAL",0,0,"Step 4152, loss: 0.02238420955836773, step time: 17.825841903686523ms\r\n",,terminal_output +18403,13666033,"TERMINAL",0,0,"Step 4153, loss: 0.09468071162700653, step time: 17.923593521118164ms\r\n",,terminal_output +18404,13666092,"TERMINAL",0,0,"Step 4154, loss: 0.02063724771142006, step time: 17.854690551757812ms\r\n",,terminal_output +18405,13666222,"TERMINAL",0,0,"Step 4155, loss: 0.04265521094202995, step time: 29.628515243530273ms\r\nStep 4156, loss: 0.05528818070888519, step time: 22.397279739379883ms\r\n",,terminal_output +18406,13666360,"TERMINAL",0,0,"Step 4157, loss: 0.04596329852938652, step time: 20.232677459716797ms\r\nStep 4158, loss: 0.022213207557797432, step time: 19.256114959716797ms\r\n",,terminal_output +18407,13666413,"TERMINAL",0,0,"Step 4159, loss: 0.024932436645030975, step time: 19.430875778198242ms\r\n",,terminal_output +18408,13666494,"TERMINAL",0,0,"Step 4160, loss: 0.06909719854593277, step time: 26.844263076782227ms\r\n",,terminal_output +18409,13666522,"genie.py",3098,0,".",python,content +18410,13666523,"genie.py",3099,0,"",python,selection_keyboard +18411,13666617,"TERMINAL",0,0,"Step 4161, loss: 0.029571939259767532, step time: 18.537521362304688ms\r\nStep 4162, loss: 0.03981076925992966, step time: 18.039464950561523ms\r\n",,terminal_output +18412,13666668,"genie.py",3099,0,"d",python,content +18413,13666669,"genie.py",3100,0,"",python,selection_keyboard +18414,13666690,"TERMINAL",0,0,"Step 4163, loss: 0.016610631719231606, step time: 18.215179443359375ms\r\n",,terminal_output +18415,13666743,"TERMINAL",0,0,"Step 4164, loss: 0.02081826888024807, step time: 17.838001251220703ms\r\n",,terminal_output +18416,13666820,"genie.py",3100,0,"e",python,content +18417,13666820,"genie.py",3101,0,"",python,selection_keyboard +18418,13666821,"TERMINAL",0,0,"Step 4165, loss: 0.12074429541826248, step time: 17.731904983520508ms\r\n",,terminal_output +18419,13666879,"TERMINAL",0,0,"Step 4166, loss: 0.04830783233046532, step time: 18.14436912536621ms\r\n",,terminal_output +18420,13666944,"TERMINAL",0,0,"Step 4167, loss: 0.06577951461076736, step time: 18.847942352294922ms\r\n",,terminal_output +18421,13666987,"genie.py",3101,0,"c",python,content +18422,13666988,"genie.py",3102,0,"",python,selection_keyboard +18423,13667053,"TERMINAL",0,0,"Step 4168, loss: 0.014894644729793072, step time: 17.81773567199707ms\r\nStep 4169, loss: 0.01720399037003517, step time: 18.050432205200195ms\r\n",,terminal_output +18424,13667088,"genie.py",3102,0,"o",python,content +18425,13667089,"genie.py",3103,0,"",python,selection_keyboard +18426,13667187,"TERMINAL",0,0,"Step 4170, loss: 0.18489207327365875, step time: 17.838001251220703ms\r\nStep 4171, loss: 0.03858424350619316, step time: 17.66657829284668ms\r\n",,terminal_output +18427,13667229,"genie.py",3103,0,"d",python,content +18428,13667229,"genie.py",3104,0,"",python,selection_keyboard +18429,13667259,"TERMINAL",0,0,"Step 4172, loss: 0.059911418706178665, step time: 18.03731918334961ms\r\n",,terminal_output +18430,13667335,"genie.py",3104,0,"e",python,content +18431,13667336,"genie.py",3105,0,"",python,selection_keyboard +18432,13667336,"TERMINAL",0,0,"Step 4173, loss: 0.01639031246304512, step time: 17.9443359375ms\r\n",,terminal_output +18433,13667435,"TERMINAL",0,0,"Step 4174, loss: 0.04247983545064926, step time: 17.755985260009766ms\r\nStep 4175, loss: 0.5524902939796448, step time: 17.980337142944336ms\r\n",,terminal_output +18434,13667531,"TERMINAL",0,0,"Step 4176, loss: 0.170089989900589, step time: 17.800331115722656ms\r\n",,terminal_output +18435,13667644,"TERMINAL",0,0,"Step 4177, loss: 0.1471829116344452, step time: 17.63772964477539ms\r\nStep 4178, loss: 0.02027304656803608, step time: 18.06807518005371ms\r\n",,terminal_output +18436,13667714,"TERMINAL",0,0,"Step 4179, loss: 0.02735428512096405, step time: 17.982006072998047ms\r\n",,terminal_output +18437,13667766,"TERMINAL",0,0,"Step 4180, loss: 0.2689780592918396, step time: 17.940759658813477ms\r\n",,terminal_output +18438,13667901,"TERMINAL",0,0,"Step 4181, loss: 0.0579708032310009, step time: 18.20206642150879ms\r\nStep 4182, loss: 0.09627731144428253, step time: 17.795801162719727ms\r\n",,terminal_output +18439,13667973,"TERMINAL",0,0,"Step 4183, loss: 0.03515956923365593, step time: 17.9750919342041ms\r\n",,terminal_output +18440,13668253,"TERMINAL",0,0,"Step 4184, loss: 0.1743556559085846, step time: 295.33863067626953ms\r\n",,terminal_output +18441,13668318,"TERMINAL",0,0,"Step 4185, loss: 0.05963095650076866, step time: 25.330305099487305ms\r\n",,terminal_output +18442,13668389,"TERMINAL",0,0,"Step 4186, loss: 0.03986816108226776, step time: 20.577669143676758ms\r\n",,terminal_output +18443,13668426,"genie.py",3105,0,"()",python,content +18444,13668427,"genie.py",3106,0,"",python,selection_keyboard +18445,13668454,"TERMINAL",0,0,"Step 4187, loss: 0.05765143409371376, step time: 19.423723220825195ms\r\n",,terminal_output +18446,13668540,"TERMINAL",0,0,"Step 4188, loss: 0.1355210840702057, step time: 19.96612548828125ms\r\n",,terminal_output +18447,13668575,"TERMINAL",0,0,"Step 4189, loss: 0.052971284836530685, step time: 17.769336700439453ms\r\n",,terminal_output +18448,13668646,"genie.py",3106,0,"\n \n ",python,content +18449,13668669,"TERMINAL",0,0,"Step 4190, loss: 0.027931710705161095, step time: 18.44000816345215ms\r\n",,terminal_output +18450,13668715,"TERMINAL",0,0,"Step 4191, loss: 0.08360714465379715, step time: 17.901182174682617ms\r\n",,terminal_output +18451,13668781,"TERMINAL",0,0,"Step 4192, loss: 0.07745812088251114, step time: 18.019437789916992ms\r\n",,terminal_output +18452,13668866,"TERMINAL",0,0,"Step 4193, loss: 0.12771177291870117, step time: 18.347501754760742ms\r\n",,terminal_output +18453,13668959,"TERMINAL",0,0,"Step 4194, loss: 0.01591557450592518, step time: 17.97795295715332ms\r\nStep 4195, loss: 0.18664850294589996, step time: 17.639875411987305ms\r\n",,terminal_output +18454,13669024,"TERMINAL",0,0,"Step 4196, loss: 0.02991621010005474, step time: 18.25547218322754ms\r\n",,terminal_output +18455,13669088,"TERMINAL",0,0,"Step 4197, loss: 0.5840113162994385, step time: 18.02802085876465ms\r\n",,terminal_output +18456,13669154,"TERMINAL",0,0,"Step 4198, loss: 0.02281113713979721, step time: 17.933368682861328ms\r\n",,terminal_output +18457,13669221,"TERMINAL",0,0,"Step 4199, loss: 0.032001346349716187, step time: 18.274545669555664ms\r\n",,terminal_output +18458,13669283,"TERMINAL",0,0,"Step 4200, loss: 0.03383150324225426, step time: 17.944812774658203ms\r\n",,terminal_output +18459,13669345,"TERMINAL",0,0,"Step 4201, loss: 0.05919165909290314, step time: 17.437219619750977ms\r\n",,terminal_output +18460,13669409,"TERMINAL",0,0,"Step 4202, loss: 0.0593799389898777, step time: 18.194198608398438ms\r\n",,terminal_output +18461,13669475,"TERMINAL",0,0,"Step 4203, loss: 0.09034851938486099, step time: 17.700910568237305ms\r\n",,terminal_output +18462,13669539,"TERMINAL",0,0,"Step 4204, loss: 0.12196104973554611, step time: 18.0203914642334ms\r\n",,terminal_output +18463,13669603,"TERMINAL",0,0,"Step 4205, loss: 0.04393595829606056, step time: 18.070459365844727ms\r\n",,terminal_output +18464,13669661,"TERMINAL",0,0,"Step 4206, loss: 0.02347577176988125, step time: 17.845869064331055ms\r\n",,terminal_output +18465,13669724,"TERMINAL",0,0,"Step 4207, loss: 0.06014546751976013, step time: 17.583608627319336ms\r\n",,terminal_output +18466,13669788,"TERMINAL",0,0,"Step 4208, loss: 0.4921204149723053, step time: 18.16725730895996ms\r\n",,terminal_output +18467,13669853,"TERMINAL",0,0,"Step 4209, loss: 0.04420752078294754, step time: 17.815589904785156ms\r\n",,terminal_output +18468,13669918,"TERMINAL",0,0,"Step 4210, loss: 0.07781821489334106, step time: 17.83585548400879ms\r\n",,terminal_output +18469,13669986,"TERMINAL",0,0,"Step 4211, loss: 0.07101920247077942, step time: 18.201589584350586ms\r\n",,terminal_output +18470,13670053,"genie.py",3129,0,"",python,selection_mouse +18471,13670117,"TERMINAL",0,0,"Step 4212, loss: 0.08967224508523941, step time: 17.803430557250977ms\r\nStep 4213, loss: 0.02439744397997856, step time: 17.659902572631836ms\r\n",,terminal_output +18472,13670160,"TERMINAL",0,0,"Step 4214, loss: 0.09643279016017914, step time: 18.206357955932617ms\r\n",,terminal_output +18473,13670272,"TERMINAL",0,0,"Step 4215, loss: 0.04934614524245262, step time: 17.566442489624023ms\r\n",,terminal_output +18474,13670324,"TERMINAL",0,0,"Step 4216, loss: 0.02352992072701454, step time: 17.899036407470703ms\r\n",,terminal_output +18475,13670380,"TERMINAL",0,0,"Step 4217, loss: 0.05593489855527878, step time: 18.26000213623047ms\r\n",,terminal_output +18476,13670432,"TERMINAL",0,0,"Step 4218, loss: 0.0554899126291275, step time: 17.933368682861328ms\r\n",,terminal_output +18477,13670536,"genie.py",3119,0,"",python,selection_mouse +18478,13670552,"TERMINAL",0,0,"Step 4219, loss: 0.06987584382295609, step time: 17.547607421875ms\r\n",,terminal_output +18479,13670607,"TERMINAL",0,0,"Step 4220, loss: 0.06978557258844376, step time: 18.27239990234375ms\r\nStep 4221, loss: 0.03468921035528183, step time: 18.014907836914062ms\r\n",,terminal_output +18480,13670696,"TERMINAL",0,0,"Step 4222, loss: 0.8360735774040222, step time: 17.740726470947266ms\r\n",,terminal_output +18481,13670736,"TERMINAL",0,0,"Step 4223, loss: 0.04411252960562706, step time: 18.19586753845215ms\r\n",,terminal_output +18482,13670802,"TERMINAL",0,0,"Step 4224, loss: 0.07868745923042297, step time: 17.845869064331055ms\r\n",,terminal_output +18483,13670908,"TERMINAL",0,0,"Step 4225, loss: 0.0770244374871254, step time: 32.39870071411133ms\r\n",,terminal_output +18484,13670978,"TERMINAL",0,0,"Step 4226, loss: 0.10056493431329727, step time: 22.658109664916992ms\r\n",,terminal_output +18485,13671036,"TERMINAL",0,0,"Step 4227, loss: 0.048129260540008545, step time: 18.26167106628418ms\r\n",,terminal_output +18486,13671100,"TERMINAL",0,0,"Step 4228, loss: 0.2326572835445404, step time: 17.546653747558594ms\r\n",,terminal_output +18487,13671164,"TERMINAL",0,0,"Step 4229, loss: 0.16580869257450104, step time: 18.08953285217285ms\r\n",,terminal_output +18488,13671227,"TERMINAL",0,0,"Step 4230, loss: 0.27417871356010437, step time: 17.752408981323242ms\r\n",,terminal_output +18489,13671290,"TERMINAL",0,0,"Step 4231, loss: 0.052587103098630905, step time: 18.0511474609375ms\r\n",,terminal_output +18490,13671353,"TERMINAL",0,0,"Step 4232, loss: 0.05834707245230675, step time: 24.936914443969727ms\r\n",,terminal_output +18491,13671418,"TERMINAL",0,0,"Step 4233, loss: 0.08081825822591782, step time: 20.976781845092773ms\r\n",,terminal_output +18492,13671485,"TERMINAL",0,0,"Step 4234, loss: 0.1269955039024353, step time: 18.92685890197754ms\r\n",,terminal_output +18493,13671547,"TERMINAL",0,0,"Step 4235, loss: 0.10905846953392029, step time: 18.691539764404297ms\r\n",,terminal_output +18494,13671664,"TERMINAL",0,0,"Step 4236, loss: 0.06254551559686661, step time: 18.18394660949707ms\r\nStep 4237, loss: 0.024409839883446693, step time: 18.47076416015625ms\r\n",,terminal_output +18495,13671772,"TERMINAL",0,0,"Step 4238, loss: 0.24419106543064117, step time: 18.13673973083496ms\r\nStep 4239, loss: 0.1011347770690918, step time: 18.007516860961914ms\r\n",,terminal_output +18496,13671833,"TERMINAL",0,0,"Step 4240, loss: 0.11841300874948502, step time: 17.82679557800293ms\r\n",,terminal_output +18497,13671898,"TERMINAL",0,0,"Step 4241, loss: 0.036036085337400436, step time: 18.33319664001465ms\r\n",,terminal_output +18498,13671961,"TERMINAL",0,0,"Step 4242, loss: 0.042845677584409714, step time: 17.63176918029785ms\r\n",,terminal_output +18499,13672035,"TERMINAL",0,0,"Step 4243, loss: 0.43319210410118103, step time: 18.197059631347656ms\r\n",,terminal_output +18500,13672089,"TERMINAL",0,0,"Step 4244, loss: 0.05192861706018448, step time: 18.152713775634766ms\r\n",,terminal_output +18501,13672155,"TERMINAL",0,0,"Step 4245, loss: 0.12694065272808075, step time: 19.232988357543945ms\r\n",,terminal_output +18502,13672221,"TERMINAL",0,0,"Step 4246, loss: 0.07152368128299713, step time: 18.31674575805664ms\r\n",,terminal_output +18503,13672284,"TERMINAL",0,0,"Step 4247, loss: 0.025279324501752853, step time: 18.230438232421875ms\r\n",,terminal_output +18504,13672351,"TERMINAL",0,0,"Step 4248, loss: 0.6334125399589539, step time: 17.9290771484375ms\r\n",,terminal_output +18505,13672472,"TERMINAL",0,0,"Step 4249, loss: 0.030758211389183998, step time: 17.976760864257812ms\r\nStep 4250, loss: 0.04577477648854256, step time: 18.082141876220703ms\r\n",,terminal_output +18506,13672524,"TERMINAL",0,0,"Step 4251, loss: 0.044850606471300125, step time: 17.913341522216797ms\r\n",,terminal_output +18507,13672624,"TERMINAL",0,0,"Step 4252, loss: 0.04046149551868439, step time: 17.742395401000977ms\r\n",,terminal_output +18508,13672702,"TERMINAL",0,0,"Step 4253, loss: 0.025450410321354866, step time: 18.202543258666992ms\r\n",,terminal_output +18509,13672756,"TERMINAL",0,0,"Step 4254, loss: 0.052427660673856735, step time: 17.81773567199707ms\r\n",,terminal_output +18510,13672785,"genie.py",3012,0,"",python,selection_mouse +18511,13672809,"TERMINAL",0,0,"Step 4255, loss: 0.058988336473703384, step time: 18.06163787841797ms\r\n",,terminal_output +18512,13672861,"TERMINAL",0,0,"Step 4256, loss: 0.14949966967105865, step time: 18.25237274169922ms\r\n",,terminal_output +18513,13672926,"genie.py",3004,11,"mle_indices",python,selection_mouse +18514,13672981,"TERMINAL",0,0,"Step 4257, loss: 0.06175236776471138, step time: 17.876148223876953ms\r\nStep 4258, loss: 0.03574543446302414, step time: 17.937421798706055ms\r\n",,terminal_output +18515,13673042,"TERMINAL",0,0,"Step 4259, loss: 0.027136029675602913, step time: 18.336772918701172ms\r\n",,terminal_output +18516,13673105,"TERMINAL",0,0,"Step 4260, loss: 0.038234274834394455, step time: 17.973899841308594ms\r\n",,terminal_output +18517,13673174,"TERMINAL",0,0,"Step 4261, loss: 0.0434839092195034, step time: 17.949581146240234ms\r\n",,terminal_output +18518,13673226,"TERMINAL",0,0,"Step 4262, loss: 0.06956340372562408, step time: 19.423961639404297ms\r\n",,terminal_output +18519,13673319,"TERMINAL",0,0,"Step 4263, loss: 0.05742737650871277, step time: 17.897605895996094ms\r\n",,terminal_output +18520,13673381,"TERMINAL",0,0,"Step 4264, loss: 0.03697634115815163, step time: 17.636775970458984ms\r\n",,terminal_output +18521,13673444,"TERMINAL",0,0,"Step 4265, loss: 0.03954865038394928, step time: 18.51940155029297ms\r\n",,terminal_output +18522,13673497,"TERMINAL",0,0,"Step 4266, loss: 0.0589907206594944, step time: 17.834186553955078ms\r\n",,terminal_output +18523,13673676,"genie.py",3011,0,"",python,selection_mouse +18524,13673677,"genie.py",3004,11,"mle_indices",python,selection_mouse +18525,13673684,"TERMINAL",0,0,"Step 4267, loss: 0.020581649616360664, step time: 18.047571182250977ms\r\nStep 4268, loss: 0.022466709837317467, step time: 18.281936645507812ms\r\nStep 4269, loss: 0.022842353209853172, step time: 17.911672592163086ms\r\n",,terminal_output +18526,13673750,"TERMINAL",0,0,"Step 4270, loss: 0.2582893967628479, step time: 17.975807189941406ms\r\n",,terminal_output +18527,13673803,"TERMINAL",0,0,"Step 4271, loss: 0.020639779046177864, step time: 17.992496490478516ms\r\n",,terminal_output +18528,13673911,"TERMINAL",0,0,"Step 4272, loss: 0.2537883222103119, step time: 17.864704132080078ms\r\n",,terminal_output +18529,13673964,"TERMINAL",0,0,"Step 4273, loss: 0.038600753992795944, step time: 17.84515380859375ms\r\n",,terminal_output +18530,13674030,"TERMINAL",0,0,"Step 4274, loss: 0.03878900781273842, step time: 17.943620681762695ms\r\n",,terminal_output +18531,13674091,"TERMINAL",0,0,"Step 4275, loss: 0.1112871989607811, step time: 17.87090301513672ms\r\n",,terminal_output +18532,13674185,"TERMINAL",0,0,"Step 4276, loss: 0.08653423190116882, step time: 17.716646194458008ms\r\nStep 4277, loss: 0.03161781281232834, step time: 18.094778060913086ms\r\n",,terminal_output +18533,13674278,"TERMINAL",0,0,"Step 4278, loss: 0.06258749216794968, step time: 17.716169357299805ms\r\n",,terminal_output +18534,13674384,"TERMINAL",0,0,"Step 4279, loss: 0.11405176669359207, step time: 17.885208129882812ms\r\nStep 4280, loss: 0.03219975531101227, step time: 18.403291702270508ms\r\n",,terminal_output +18535,13674501,"TERMINAL",0,0,"Step 4281, loss: 0.03268260508775711, step time: 17.79937744140625ms\r\nStep 4282, loss: 0.11640390008687973, step time: 18.05734634399414ms\r\n",,terminal_output +18536,13674569,"TERMINAL",0,0,"Step 4283, loss: 0.03516204282641411, step time: 18.001079559326172ms\r\n",,terminal_output +18537,13674627,"TERMINAL",0,0,"Step 4284, loss: 0.04341237619519234, step time: 18.054962158203125ms\r\n",,terminal_output +18538,13674693,"TERMINAL",0,0,"Step 4285, loss: 0.03724456578493118, step time: 17.79031753540039ms\r\n",,terminal_output +18539,13674756,"TERMINAL",0,0,"Step 4286, loss: 0.06137319281697273, step time: 18.143177032470703ms\r\n",,terminal_output +18540,13674824,"TERMINAL",0,0,"Step 4287, loss: 0.054950013756752014, step time: 17.72165298461914ms\r\n",,terminal_output +18541,13674886,"TERMINAL",0,0,"Step 4288, loss: 0.030102256685495377, step time: 17.671585083007812ms\r\n",,terminal_output +18542,13674955,"TERMINAL",0,0,"Step 4289, loss: 0.057048410177230835, step time: 17.927169799804688ms\r\n",,terminal_output +18543,13675011,"TERMINAL",0,0,"Step 4290, loss: 0.04770369827747345, step time: 17.868995666503906ms\r\n",,terminal_output +18544,13675073,"TERMINAL",0,0,"Step 4291, loss: 0.04143303632736206, step time: 17.756223678588867ms\r\n",,terminal_output +18545,13675147,"TERMINAL",0,0,"Step 4292, loss: 0.0884486511349678, step time: 18.1427001953125ms\r\n",,terminal_output +18546,13675202,"TERMINAL",0,0,"Step 4293, loss: 0.04607652872800827, step time: 17.494678497314453ms\r\n",,terminal_output +18547,13675312,"genie.py",2926,0,"",python,selection_mouse +18548,13675335,"TERMINAL",0,0,"Step 4294, loss: 0.03113619051873684, step time: 17.887353897094727ms\r\nStep 4295, loss: 0.06011462211608887, step time: 18.13530921936035ms\r\n",,terminal_output +18549,13675469,"genie.py",2917,12,"token_logits",python,selection_mouse +18550,13675470,"TERMINAL",0,0,"Step 4296, loss: 0.03146107494831085, step time: 17.99464225769043ms\r\nStep 4297, loss: 0.041015058755874634, step time: 17.869234085083008ms\r\n",,terminal_output +18551,13675522,"TERMINAL",0,0,"Step 4298, loss: 0.052409086376428604, step time: 18.79739761352539ms\r\n",,terminal_output +18552,13675616,"TERMINAL",0,0,"Step 4299, loss: 0.12721841037273407, step time: 18.221139907836914ms\r\n",,terminal_output +18553,13675725,"TERMINAL",0,0,"Step 4300, loss: 0.0342937596142292, step time: 17.931699752807617ms\r\nStep 4301, loss: 0.06013995036482811, step time: 18.39447021484375ms\r\n",,terminal_output +18554,13675783,"TERMINAL",0,0,"Step 4302, loss: 0.1558273285627365, step time: 18.330812454223633ms\r\n",,terminal_output +18555,13675843,"TERMINAL",0,0,"Step 4303, loss: 0.03912133723497391, step time: 17.817974090576172ms\r\n",,terminal_output +18556,13675905,"TERMINAL",0,0,"Step 4304, loss: 0.023844685405492783, step time: 19.35291290283203ms\r\n",,terminal_output +18557,13675968,"TERMINAL",0,0,"Step 4305, loss: 0.07783295214176178, step time: 18.013477325439453ms\r\n",,terminal_output +18558,13676035,"TERMINAL",0,0,"Step 4306, loss: 0.20488795638084412, step time: 18.07093620300293ms\r\n",,terminal_output +18559,13676100,"TERMINAL",0,0,"Step 4307, loss: 0.024866268038749695, step time: 18.29218864440918ms\r\n",,terminal_output +18560,13676122,"genie.py",2912,0,"",python,selection_mouse +18561,13676167,"TERMINAL",0,0,"Step 4308, loss: 0.051119428128004074, step time: 18.07689666748047ms\r\n",,terminal_output +18562,13676262,"TERMINAL",0,0,"Step 4309, loss: 0.02797521837055683, step time: 17.727136611938477ms\r\n",,terminal_output +18563,13676275,"genie.py",2908,7,"outputs",python,selection_mouse +18564,13676354,"TERMINAL",0,0,"Step 4310, loss: 0.22303621470928192, step time: 18.25881004333496ms\r\nStep 4311, loss: 0.07280068099498749, step time: 17.828702926635742ms\r\n",,terminal_output +18565,13676452,"TERMINAL",0,0,"Step 4312, loss: 0.07505200058221817, step time: 17.5325870513916ms\r\n",,terminal_output +18566,13676513,"TERMINAL",0,0,"Step 4313, loss: 0.11877824366092682, step time: 17.940521240234375ms\r\n",,terminal_output +18567,13676583,"TERMINAL",0,0,"Step 4314, loss: 0.03647258132696152, step time: 17.818927764892578ms\r\n",,terminal_output +18568,13676637,"TERMINAL",0,0,"Step 4315, loss: 0.02253578044474125, step time: 17.776012420654297ms\r\n",,terminal_output +18569,13676744,"TERMINAL",0,0,"Step 4316, loss: 0.027239402756094933, step time: 18.11385154724121ms\r\nStep 4317, loss: 0.11647890508174896, step time: 17.499685287475586ms\r\n",,terminal_output +18570,13676805,"TERMINAL",0,0,"Step 4318, loss: 0.01884496957063675, step time: 17.87877082824707ms\r\n",,terminal_output +18571,13676821,"genie.py",2901,0,"",python,selection_mouse +18572,13676880,"TERMINAL",0,0,"Step 4319, loss: 0.03597100079059601, step time: 17.972230911254883ms\r\n",,terminal_output +18573,13676934,"TERMINAL",0,0,"Step 4320, loss: 0.07378122955560684, step time: 17.999887466430664ms\r\n",,terminal_output +18574,13676980,"genie.py",2901,6,"argmax",python,selection_mouse +18575,13677025,"TERMINAL",0,0,"Step 4321, loss: 0.060999173671007156, step time: 17.72475242614746ms\r\n",,terminal_output +18576,13677059,"TERMINAL",0,0,"Step 4322, loss: 0.05509796366095543, step time: 18.189668655395508ms\r\n",,terminal_output +18577,13677154,"TERMINAL",0,0,"Step 4323, loss: 0.028954794630408287, step time: 17.986297607421875ms\r\n",,terminal_output +18578,13677207,"TERMINAL",0,0,"Step 4324, loss: 0.021444624289870262, step time: 17.621755599975586ms\r\n",,terminal_output +18579,13677311,"TERMINAL",0,0,"Step 4325, loss: 0.027607889845967293, step time: 18.02229881286621ms\r\nStep 4326, loss: 0.019561443477869034, step time: 17.83466339111328ms\r\n",,terminal_output +18580,13677382,"TERMINAL",0,0,"Step 4327, loss: 0.11802242696285248, step time: 17.766475677490234ms\r\n",,terminal_output +18581,13677439,"TERMINAL",0,0,"Step 4328, loss: 0.02433089353144169, step time: 18.134117126464844ms\r\n",,terminal_output +18582,13677501,"TERMINAL",0,0,"Step 4329, loss: 0.03821435570716858, step time: 17.47870445251465ms\r\n",,terminal_output +18583,13677598,"TERMINAL",0,0,"Step 4330, loss: 0.15000800788402557, step time: 17.766714096069336ms\r\n",,terminal_output +18584,13677653,"genie.py",2905,0,"",python,selection_mouse +18585,13677654,"genie.py",2901,6,"argmax",python,selection_mouse +18586,13677705,"TERMINAL",0,0,"Step 4331, loss: 0.046427171677351, step time: 18.20659637451172ms\r\nStep 4332, loss: 0.016588779166340828, step time: 17.93813705444336ms\r\n",,terminal_output +18587,13677759,"TERMINAL",0,0,"Step 4333, loss: 0.02944347821176052, step time: 17.822265625ms\r\n",,terminal_output +18588,13677854,"TERMINAL",0,0,"Step 4334, loss: 0.06991307437419891, step time: 18.204212188720703ms\r\n",,terminal_output +18589,13677947,"TERMINAL",0,0,"Step 4335, loss: 0.07831496745347977, step time: 17.978429794311523ms\r\nStep 4336, loss: 0.04416492208838463, step time: 17.693758010864258ms\r\n",,terminal_output +18590,13678045,"TERMINAL",0,0,"Step 4337, loss: 0.043188802897930145, step time: 17.92001724243164ms\r\n",,terminal_output +18591,13678108,"TERMINAL",0,0,"Step 4338, loss: 0.10771728307008743, step time: 17.68326759338379ms\r\n",,terminal_output +18592,13678178,"genie.py",2898,0,"",python,selection_mouse +18593,13678215,"TERMINAL",0,0,"Step 4339, loss: 0.03457290679216385, step time: 17.731428146362305ms\r\nStep 4340, loss: 0.043782349675893784, step time: 18.11695098876953ms\r\n",,terminal_output +18594,13678268,"TERMINAL",0,0,"Step 4341, loss: 0.02744997665286064, step time: 17.457008361816406ms\r\n",,terminal_output +18595,13678319,"genie.py",2897,3,"jnp",python,selection_mouse +18596,13678374,"TERMINAL",0,0,"Step 4342, loss: 0.021460069343447685, step time: 17.907142639160156ms\r\n",,terminal_output +18597,13678637,"TERMINAL",0,0,"Step 4343, loss: 0.017106324434280396, step time: 296.0679531097412ms\r\n",,terminal_output +18598,13678712,"TERMINAL",0,0,"Step 4344, loss: 0.017276650294661522, step time: 25.772571563720703ms\r\n",,terminal_output +18599,13678767,"TERMINAL",0,0,"Step 4345, loss: 0.039901431649923325, step time: 20.0808048248291ms\r\n",,terminal_output +18600,13678786,"genie.py",2904,0,"",python,selection_mouse +18601,13678831,"TERMINAL",0,0,"Step 4346, loss: 0.06971583515405655, step time: 19.17576789855957ms\r\n",,terminal_output +18602,13678895,"TERMINAL",0,0,"Step 4347, loss: 0.028806589543819427, step time: 18.222808837890625ms\r\n",,terminal_output +18603,13678931,"genie.py",2901,6,"argmax",python,selection_mouse +18604,13678974,"TERMINAL",0,0,"Step 4348, loss: 0.023911645635962486, step time: 17.915725708007812ms\r\n",,terminal_output +18605,13679027,"TERMINAL",0,0,"Step 4349, loss: 0.04788820818066597, step time: 18.15199851989746ms\r\n",,terminal_output +18606,13679134,"TERMINAL",0,0,"Step 4350, loss: 0.07038373500108719, step time: 18.114805221557617ms\r\n",,terminal_output +18607,13679192,"TERMINAL",0,0,"Step 4351, loss: 0.2539207339286804, step time: 19.06609535217285ms\r\n",,terminal_output +18608,13679255,"TERMINAL",0,0,"Step 4352, loss: 0.08072483539581299, step time: 18.59307289123535ms\r\n",,terminal_output +18609,13679318,"TERMINAL",0,0,"Step 4353, loss: 0.17521634697914124, step time: 17.62843132019043ms\r\n",,terminal_output +18610,13679381,"TERMINAL",0,0,"Step 4354, loss: 0.021573364734649658, step time: 18.13673973083496ms\r\n",,terminal_output +18611,13679439,"TERMINAL",0,0,"Step 4355, loss: 0.09411711245775223, step time: 31.940221786499023ms\r\n",,terminal_output +18612,13679516,"genie.py",2961,0,"",python,selection_mouse +18613,13679526,"TERMINAL",0,0,"Step 4356, loss: 0.040326617658138275, step time: 29.3123722076416ms\r\n",,terminal_output +18614,13679578,"TERMINAL",0,0,"Step 4357, loss: 0.016571741551160812, step time: 19.094228744506836ms\r\n",,terminal_output +18615,13679685,"TERMINAL",0,0,"Step 4358, loss: 0.06477898359298706, step time: 18.58353614807129ms\r\nStep 4359, loss: 0.017552141100168228, step time: 18.229007720947266ms\r\n",,terminal_output +18616,13679746,"TERMINAL",0,0,"Step 4360, loss: 0.01979835331439972, step time: 17.633676528930664ms\r\n",,terminal_output +18617,13679807,"TERMINAL",0,0,"Step 4361, loss: 0.048372913151979446, step time: 18.113374710083008ms\r\n",,terminal_output +18618,13679871,"TERMINAL",0,0,"Step 4362, loss: 0.03430592268705368, step time: 17.945051193237305ms\r\n",,terminal_output +18619,13679936,"TERMINAL",0,0,"Step 4363, loss: 0.27774322032928467, step time: 17.8682804107666ms\r\n",,terminal_output +18620,13679997,"genie.py",2890,0,"",python,selection_mouse +18621,13680019,"TERMINAL",0,0,"Step 4364, loss: 0.0667380839586258, step time: 18.34702491760254ms\r\n",,terminal_output +18622,13680073,"TERMINAL",0,0,"Step 4365, loss: 0.028867483139038086, step time: 17.920732498168945ms\r\n",,terminal_output +18623,13680142,"genie.py",2883,11,"mle_indices",python,selection_mouse +18624,13680194,"TERMINAL",0,0,"Step 4366, loss: 0.2743188738822937, step time: 17.896652221679688ms\r\nStep 4367, loss: 0.04601320996880531, step time: 18.150806427001953ms\r\n",,terminal_output +18625,13680297,"TERMINAL",0,0,"Step 4368, loss: 0.04268490523099899, step time: 17.693758010864258ms\r\n",,terminal_output +18626,13680354,"TERMINAL",0,0,"Step 4369, loss: 0.23296725749969482, step time: 17.879486083984375ms\r\n",,terminal_output +18627,13680417,"TERMINAL",0,0,"Step 4370, loss: 0.0389355830848217, step time: 18.288373947143555ms\r\n",,terminal_output +18628,13680481,"TERMINAL",0,0,"Step 4371, loss: 0.0674959123134613, step time: 17.843008041381836ms\r\n",,terminal_output +18629,13680544,"TERMINAL",0,0,"Step 4372, loss: 0.03202277421951294, step time: 17.774581909179688ms\r\n",,terminal_output +18630,13680609,"TERMINAL",0,0,"Step 4373, loss: 0.03183240070939064, step time: 18.027782440185547ms\r\n",,terminal_output +18631,13680668,"TERMINAL",0,0,"Step 4374, loss: 0.09391630440950394, step time: 17.74740219116211ms\r\n",,terminal_output +18632,13680732,"TERMINAL",0,0,"Step 4375, loss: 0.15030157566070557, step time: 18.050432205200195ms\r\n",,terminal_output +18633,13680793,"TERMINAL",0,0,"Step 4376, loss: 0.040187984704971313, step time: 17.937660217285156ms\r\n",,terminal_output +18634,13680855,"TERMINAL",0,0,"Step 4377, loss: 0.0257129929959774, step time: 17.85421371459961ms\r\n",,terminal_output +18635,13680963,"TERMINAL",0,0,"Step 4378, loss: 0.05291461944580078, step time: 17.67277717590332ms\r\nStep 4379, loss: 0.14330622553825378, step time: 17.95339584350586ms\r\n",,terminal_output +18636,13681086,"TERMINAL",0,0,"Step 4380, loss: 0.052350372076034546, step time: 17.754077911376953ms\r\nStep 4381, loss: 0.028508882969617844, step time: 17.9290771484375ms\r\n",,terminal_output +18637,13681152,"TERMINAL",0,0,"Step 4382, loss: 0.03510627895593643, step time: 18.319129943847656ms\r\n",,terminal_output +18638,13681251,"TERMINAL",0,0,"Step 4383, loss: 0.034178994596004486, step time: 17.85445213317871ms\r\n",,terminal_output +18639,13681313,"TERMINAL",0,0,"Step 4384, loss: 0.02636502869427204, step time: 17.606496810913086ms\r\n",,terminal_output +18640,13681379,"TERMINAL",0,0,"Step 4385, loss: 0.029789548367261887, step time: 18.11814308166504ms\r\n",,terminal_output +18641,13681440,"TERMINAL",0,0,"Step 4386, loss: 0.030062096193432808, step time: 18.019914627075195ms\r\n",,terminal_output +18642,13681505,"TERMINAL",0,0,"Step 4387, loss: 0.02341979369521141, step time: 17.537593841552734ms\r\n",,terminal_output +18643,13681568,"TERMINAL",0,0,"Step 4388, loss: 0.2731618583202362, step time: 18.107891082763672ms\r\n",,terminal_output +18644,13681633,"TERMINAL",0,0,"Step 4389, loss: 0.03854874148964882, step time: 19.87481117248535ms\r\n",,terminal_output +18645,13681695,"TERMINAL",0,0,"Step 4390, loss: 0.13711771368980408, step time: 17.94123649597168ms\r\n",,terminal_output +18646,13681760,"TERMINAL",0,0,"Step 4391, loss: 0.11941812932491302, step time: 18.250703811645508ms\r\n",,terminal_output +18647,13681821,"TERMINAL",0,0,"Step 4392, loss: 0.045393723994493484, step time: 17.818212509155273ms\r\n",,terminal_output +18648,13681883,"TERMINAL",0,0,"Step 4393, loss: 0.05241644009947777, step time: 18.04804801940918ms\r\n",,terminal_output +18649,13681946,"TERMINAL",0,0,"Step 4394, loss: 0.10884950309991837, step time: 18.115997314453125ms\r\n",,terminal_output +18650,13682008,"TERMINAL",0,0,"Step 4395, loss: 0.035068780183792114, step time: 17.871618270874023ms\r\n",,terminal_output +18651,13682071,"TERMINAL",0,0,"Step 4396, loss: 0.024112721905112267, step time: 17.766714096069336ms\r\n",,terminal_output +18652,13682132,"TERMINAL",0,0,"Step 4397, loss: 0.028322715312242508, step time: 18.30434799194336ms\r\n",,terminal_output +18653,13682228,"TERMINAL",0,0,"Step 4398, loss: 0.03397464007139206, step time: 17.544269561767578ms\r\nStep 4399, loss: 0.028702152892947197, step time: 18.009424209594727ms\r\n",,terminal_output +18654,13682321,"TERMINAL",0,0,"Step 4400, loss: 0.030127499252557755, step time: 18.349647521972656ms\r\n",,terminal_output +18655,13682426,"TERMINAL",0,0,"Step 4401, loss: 0.06155658885836601, step time: 17.63129234313965ms\r\nStep 4402, loss: 0.10572722554206848, step time: 17.87400245666504ms\r\n",,terminal_output +18656,13682483,"TERMINAL",0,0,"Step 4403, loss: 0.017547322437167168, step time: 18.28455924987793ms\r\n",,terminal_output +18657,13682544,"TERMINAL",0,0,"Step 4404, loss: 0.04235391691327095, step time: 17.788410186767578ms\r\n",,terminal_output +18658,13682652,"TERMINAL",0,0,"Step 4405, loss: 0.05210278928279877, step time: 17.813444137573242ms\r\n",,terminal_output +18659,13682703,"TERMINAL",0,0,"Step 4406, loss: 0.02532692439854145, step time: 18.026113510131836ms\r\n",,terminal_output +18660,13682755,"TERMINAL",0,0,"Step 4407, loss: 0.023208467289805412, step time: 17.815828323364258ms\r\n",,terminal_output +18661,13682900,"TERMINAL",0,0,"Step 4408, loss: 0.17163591086864471, step time: 17.873764038085938ms\r\nStep 4409, loss: 0.022520968690514565, step time: 17.956018447875977ms\r\n",,terminal_output +18662,13682953,"TERMINAL",0,0,"Step 4410, loss: 0.024533871561288834, step time: 17.869234085083008ms\r\n",,terminal_output +18663,13682994,"genie.py",2189,0,"",python,selection_mouse +18664,13683059,"TERMINAL",0,0,"Step 4411, loss: 0.023054905235767365, step time: 17.796039581298828ms\r\nStep 4412, loss: 0.04447579011321068, step time: 18.235445022583008ms\r\n",,terminal_output +18665,13683153,"genie.py",2178,17,"tokenizer_outputs",python,selection_mouse +18666,13683174,"TERMINAL",0,0,"Step 4413, loss: 0.21840378642082214, step time: 18.008708953857422ms\r\n",,terminal_output +18667,13683255,"TERMINAL",0,0,"Step 4414, loss: 0.06783416122198105, step time: 17.750024795532227ms\r\nStep 4415, loss: 0.04733968526124954, step time: 18.253803253173828ms\r\n",,terminal_output +18668,13683347,"TERMINAL",0,0,"Step 4416, loss: 0.02328081801533699, step time: 17.758846282958984ms\r\n",,terminal_output +18669,13683454,"TERMINAL",0,0,"Step 4417, loss: 0.06823677569627762, step time: 17.87114143371582ms\r\nStep 4418, loss: 0.2837775945663452, step time: 18.665313720703125ms\r\n",,terminal_output +18670,13683520,"TERMINAL",0,0,"Step 4419, loss: 0.08732791990041733, step time: 17.67134666442871ms\r\n",,terminal_output +18671,13683593,"TERMINAL",0,0,"Step 4420, loss: 0.26287320256233215, step time: 17.89236068725586ms\r\n",,terminal_output +18672,13683647,"TERMINAL",0,0,"Step 4421, loss: 0.016219424083828926, step time: 18.436431884765625ms\r\n",,terminal_output +18673,13683712,"TERMINAL",0,0,"Step 4422, loss: 0.0292995423078537, step time: 17.73214340209961ms\r\n",,terminal_output +18674,13683772,"TERMINAL",0,0,"Step 4423, loss: 0.09485110640525818, step time: 17.941951751708984ms\r\n",,terminal_output +18675,13683834,"TERMINAL",0,0,"Step 4424, loss: 0.06200942024588585, step time: 18.21732521057129ms\r\n",,terminal_output +18676,13683893,"TERMINAL",0,0,"Step 4425, loss: 0.07316862046718597, step time: 17.939329147338867ms\r\n",,terminal_output +18677,13683955,"TERMINAL",0,0,"Step 4426, loss: 0.031237196177244186, step time: 21.564483642578125ms\r\n",,terminal_output +18678,13684016,"TERMINAL",0,0,"Step 4427, loss: 0.1264091432094574, step time: 18.38064193725586ms\r\n",,terminal_output +18679,13684109,"TERMINAL",0,0,"Step 4428, loss: 0.029801364988088608, step time: 17.995357513427734ms\r\n",,terminal_output +18680,13684204,"TERMINAL",0,0,"Step 4429, loss: 0.02900909259915352, step time: 17.8983211517334ms\r\nStep 4430, loss: 0.38842254877090454, step time: 18.141508102416992ms\r\n",,terminal_output +18681,13684288,"TERMINAL",0,0,"Step 4431, loss: 0.04117513820528984, step time: 17.93050765991211ms\r\n",,terminal_output +18682,13684395,"TERMINAL",0,0,"Step 4432, loss: 0.05046917125582695, step time: 17.830848693847656ms\r\nStep 4433, loss: 0.045086320489645004, step time: 17.998218536376953ms\r\n",,terminal_output +18683,13684458,"TERMINAL",0,0,"Step 4434, loss: 0.027940109372138977, step time: 17.83466339111328ms\r\n",,terminal_output +18684,13684518,"TERMINAL",0,0,"Step 4435, loss: 0.0741688534617424, step time: 17.849206924438477ms\r\n",,terminal_output +18685,13684611,"TERMINAL",0,0,"Step 4436, loss: 0.057863011956214905, step time: 18.05734634399414ms\r\n",,terminal_output +18686,13684666,"TERMINAL",0,0,"Step 4437, loss: 0.6065502166748047, step time: 17.546653747558594ms\r\n",,terminal_output +18687,13684772,"TERMINAL",0,0,"Step 4438, loss: 0.3239509165287018, step time: 17.84968376159668ms\r\nStep 4439, loss: 0.10363578051328659, step time: 17.97938346862793ms\r\n",,terminal_output +18688,13684818,"genie.py",3119,0,"",python,selection_mouse +18689,13684836,"TERMINAL",0,0,"Step 4440, loss: 0.0461147278547287, step time: 17.80414581298828ms\r\n",,terminal_output +18690,13684901,"TERMINAL",0,0,"Step 4441, loss: 0.1020880788564682, step time: 17.60101318359375ms\r\n",,terminal_output +18691,13684999,"TERMINAL",0,0,"Step 4442, loss: 0.3204096257686615, step time: 18.294334411621094ms\r\n",,terminal_output +18692,13685060,"TERMINAL",0,0,"Step 4443, loss: 0.028100300580263138, step time: 17.37833023071289ms\r\n",,terminal_output +18693,13685122,"TERMINAL",0,0,"Step 4444, loss: 0.029004927724599838, step time: 18.048763275146484ms\r\n",,terminal_output +18694,13685221,"TERMINAL",0,0,"Step 4445, loss: 0.026344124227762222, step time: 18.193483352661133ms\r\nStep 4446, loss: 0.09689120948314667, step time: 18.033266067504883ms\r\n",,terminal_output +18695,13685318,"TERMINAL",0,0,"Step 4447, loss: 0.0884636864066124, step time: 17.5931453704834ms\r\n",,terminal_output +18696,13685379,"TERMINAL",0,0,"Step 4448, loss: 0.12480474263429642, step time: 18.26310157775879ms\r\n",,terminal_output +18697,13685441,"TERMINAL",0,0,"Step 4449, loss: 0.15770553052425385, step time: 17.5473690032959ms\r\n",,terminal_output +18698,13685548,"TERMINAL",0,0,"Step 4450, loss: 0.0928022637963295, step time: 17.830610275268555ms\r\nStep 4451, loss: 0.06581258028745651, step time: 18.115758895874023ms\r\n",,terminal_output +18699,13685612,"TERMINAL",0,0,"Step 4452, loss: 0.060929473489522934, step time: 17.587900161743164ms\r\n",,terminal_output +18700,13685677,"TERMINAL",0,0,"Step 4453, loss: 0.046013206243515015, step time: 17.541170120239258ms\r\n",,terminal_output +18701,13685737,"genie.py",3119,0,"r",python,content +18702,13685739,"genie.py",3120,0,"",python,selection_keyboard +18703,13685759,"TERMINAL",0,0,"Step 4454, loss: 0.039510924369096756, step time: 18.43404769897461ms\r\n",,terminal_output +18704,13685793,"genie.py",3120,0,"o",python,content +18705,13685795,"genie.py",3121,0,"",python,selection_keyboard +18706,13685814,"TERMINAL",0,0,"Step 4455, loss: 0.07430168241262436, step time: 17.543554306030273ms\r\n",,terminal_output +18707,13685874,"TERMINAL",0,0,"Step 4456, loss: 0.2614211142063141, step time: 17.737627029418945ms\r\n",,terminal_output +18708,13685926,"TERMINAL",0,0,"Step 4457, loss: 0.06086878478527069, step time: 18.025875091552734ms\r\n",,terminal_output +18709,13685961,"genie.py",3121,0,"k",python,content +18710,13685962,"genie.py",3122,0,"",python,selection_keyboard +18711,13686054,"TERMINAL",0,0,"Step 4458, loss: 0.2104986011981964, step time: 17.83132553100586ms\r\nStep 4459, loss: 0.04154632240533829, step time: 17.6694393157959ms\r\n",,terminal_output +18712,13686150,"TERMINAL",0,0,"Step 4460, loss: 0.0685887336730957, step time: 18.31221580505371ms\r\n",,terminal_output +18713,13686237,"genie.py",3121,1,"",python,content +18714,13686260,"TERMINAL",0,0,"Step 4461, loss: 0.14560584723949432, step time: 17.629623413085938ms\r\nStep 4462, loss: 0.02595585025846958, step time: 17.79460906982422ms\r\n",,terminal_output +18715,13686355,"genie.py",3120,1,"",python,content +18716,13686376,"TERMINAL",0,0,"Step 4463, loss: 0.04514671489596367, step time: 18.04375648498535ms\r\nStep 4464, loss: 0.036656465381383896, step time: 17.866849899291992ms\r\n",,terminal_output +18717,13686455,"TERMINAL",0,0,"Step 4465, loss: 0.027145951986312866, step time: 18.033266067504883ms\r\n",,terminal_output +18718,13686477,"genie.py",3119,1,"",python,content +18719,13686499,"TERMINAL",0,0,"Step 4466, loss: 0.04234306886792183, step time: 18.399477005004883ms\r\n",,terminal_output +18720,13686565,"TERMINAL",0,0,"Step 4467, loss: 0.07782670855522156, step time: 17.714262008666992ms\r\n",,terminal_output +18721,13686616,"genie.py",3119,0,"t",python,content +18722,13686617,"genie.py",3120,0,"",python,selection_keyboard +18723,13686641,"TERMINAL",0,0,"Step 4468, loss: 0.44855085015296936, step time: 17.739057540893555ms\r\n",,terminal_output +18724,13686695,"TERMINAL",0,0,"Step 4469, loss: 0.037446487694978714, step time: 18.160343170166016ms\r\n",,terminal_output +18725,13686753,"genie.py",3120,0,"k",python,content +18726,13686754,"genie.py",3121,0,"",python,selection_keyboard +18727,13686817,"TERMINAL",0,0,"Step 4470, loss: 0.040062565356492996, step time: 18.053531646728516ms\r\nStep 4471, loss: 0.04831644892692566, step time: 17.661094665527344ms\r\n",,terminal_output +18728,13686946,"TERMINAL",0,0,"Step 4472, loss: 0.15145301818847656, step time: 17.978191375732422ms\r\nStep 4473, loss: 0.14138232171535492, step time: 17.666101455688477ms\r\n",,terminal_output +18729,13687076,"genie.py",3120,1,"",python,content +18730,13687079,"TERMINAL",0,0,"Step 4474, loss: 0.05581926926970482, step time: 17.935991287231445ms\r\nStep 4475, loss: 0.032707344740629196, step time: 18.032550811767578ms\r\n",,terminal_output +18731,13687140,"TERMINAL",0,0,"Step 4476, loss: 0.057502392679452896, step time: 17.708301544189453ms\r\n",,terminal_output +18732,13687214,"TERMINAL",0,0,"Step 4477, loss: 0.0738583505153656, step time: 17.606735229492188ms\r\n",,terminal_output +18733,13687568,"TERMINAL",0,0,"Step 4478, loss: 0.2352808564901352, step time: 330.1665782928467ms\r\n",,terminal_output +18734,13687621,"TERMINAL",0,0,"Step 4479, loss: 0.02348640374839306, step time: 25.275707244873047ms\r\n",,terminal_output +18735,13687728,"TERMINAL",0,0,"Step 4480, loss: 0.024382302537560463, step time: 20.48182487487793ms\r\nStep 4481, loss: 0.021164702251553535, step time: 19.10686492919922ms\r\n",,terminal_output +18736,13687794,"TERMINAL",0,0,"Step 4482, loss: 0.028028085827827454, step time: 18.488168716430664ms\r\n",,terminal_output +18737,13687859,"TERMINAL",0,0,"Step 4483, loss: 0.09109722822904587, step time: 17.760753631591797ms\r\n",,terminal_output +18738,13687925,"TERMINAL",0,0,"Step 4484, loss: 0.027561703696846962, step time: 19.420623779296875ms\r\n",,terminal_output +18739,13688001,"TERMINAL",0,0,"Step 4485, loss: 0.5457495450973511, step time: 17.693281173706055ms\r\n",,terminal_output +18740,13688005,"genie.py",3120,0,"o",python,content +18741,13688006,"genie.py",3121,0,"",python,selection_keyboard +18742,13688059,"TERMINAL",0,0,"Step 4486, loss: 0.040742963552474976, step time: 17.940521240234375ms\r\n",,terminal_output +18743,13688111,"TERMINAL",0,0,"Step 4487, loss: 0.038389500230550766, step time: 18.22519302368164ms\r\n",,terminal_output +18744,13688127,"genie.py",3121,0,"k",python,content +18745,13688128,"genie.py",3122,0,"",python,selection_keyboard +18746,13688240,"TERMINAL",0,0,"Step 4488, loss: 0.06446133553981781, step time: 17.833471298217773ms\r\nStep 4489, loss: 0.08586053550243378, step time: 17.668724060058594ms\r\n",,terminal_output +18747,13688392,"TERMINAL",0,0,"Step 4490, loss: 0.10921638458967209, step time: 18.479108810424805ms\r\nStep 4491, loss: 0.06061836704611778, step time: 17.745494842529297ms\r\n",,terminal_output +18748,13688434,"TERMINAL",0,0,"Step 4492, loss: 0.044860415160655975, step time: 17.87567138671875ms\r\n",,terminal_output +18749,13688529,"TERMINAL",0,0,"Step 4493, loss: 0.06906341761350632, step time: 18.299341201782227ms\r\n",,terminal_output +18750,13688685,"TERMINAL",0,0,"Step 4494, loss: 0.03745188191533089, step time: 17.873048782348633ms\r\nStep 4495, loss: 0.03585641458630562, step time: 17.78721809387207ms\r\nStep 4496, loss: 0.06402173638343811, step time: 18.247604370117188ms\r\n",,terminal_output +18751,13688750,"TERMINAL",0,0,"Step 4497, loss: 0.0827370211482048, step time: 17.6699161529541ms\r\n",,terminal_output +18752,13688794,"genie.py",3119,3,"tokenizer_outputs",python,content +18753,13688873,"TERMINAL",0,0,"Step 4498, loss: 0.5536215305328369, step time: 17.869949340820312ms\r\nStep 4499, loss: 0.05125775188207626, step time: 18.05591583251953ms\r\n",,terminal_output +18754,13689034,"genie.py",3136,0,",",python,content +18755,13689035,"genie.py",3137,0,"",python,selection_keyboard +18756,13689193,"genie.py",3137,0," ",python,content +18757,13689194,"genie.py",3138,0,"",python,selection_keyboard +18758,13689703,"genie.py",3138,0,"b",python,content +18759,13689704,"genie.py",3139,0,"",python,selection_keyboard +18760,13689801,"genie.py",3139,0,"a",python,content +18761,13689802,"genie.py",3140,0,"",python,selection_keyboard +18762,13689994,"genie.py",3140,0,"t",python,content +18763,13689995,"genie.py",3141,0,"",python,selection_keyboard +18764,13690153,"genie.py",3141,0,"c",python,content +18765,13690154,"genie.py",3142,0,"",python,selection_keyboard +18766,13690253,"genie.py",3142,0,"h",python,content +18767,13690254,"genie.py",3143,0,"",python,selection_keyboard +18768,13690482,"genie.py",3138,5,"batch",python,content +18769,13691326,"genie.py",3143,0,"[]",python,content +18770,13691327,"genie.py",3144,0,"",python,selection_keyboard +18771,13691505,"TERMINAL",0,0,"Step 4500, loss: 0.07376089692115784, step time: 37.412166595458984ms\r\n",,terminal_output +18772,13691568,"TERMINAL",0,0,"Step 4501, loss: 0.05841459333896637, step time: 26.59749984741211ms\r\n",,terminal_output +18773,13691634,"TERMINAL",0,0,"Step 4502, loss: 0.0513916090130806, step time: 20.644664764404297ms\r\n",,terminal_output +18774,13691693,"genie.py",3144,0,"""""",python,content +18775,13691694,"genie.py",3145,0,"",python,selection_keyboard +18776,13691774,"TERMINAL",0,0,"Step 4503, loss: 0.0140187107026577, step time: 20.142316818237305ms\r\nStep 4504, loss: 0.07580235600471497, step time: 19.448280334472656ms\r\n",,terminal_output +18777,13691838,"TERMINAL",0,0,"Step 4505, loss: 0.023517955094575882, step time: 18.82767677307129ms\r\n",,terminal_output +18778,13691957,"TERMINAL",0,0,"Step 4506, loss: 0.05193924158811569, step time: 18.48745346069336ms\r\nStep 4507, loss: 0.06359686702489853, step time: 19.412517547607422ms\r\n",,terminal_output +18779,13692005,"genie.py",3145,0,"v",python,content +18780,13692006,"genie.py",3146,0,"",python,selection_keyboard +18781,13692029,"TERMINAL",0,0,"Step 4508, loss: 0.020375164225697517, step time: 19.102096557617188ms\r\n",,terminal_output +18782,13692108,"genie.py",3146,0,"i",python,content +18783,13692109,"genie.py",3147,0,"",python,selection_keyboard +18784,13692109,"TERMINAL",0,0,"Step 4509, loss: 0.06601263582706451, step time: 23.804187774658203ms\r\n",,terminal_output +18785,13692229,"TERMINAL",0,0,"Step 4510, loss: 0.0361262708902359, step time: 20.325660705566406ms\r\nStep 4511, loss: 0.6683502793312073, step time: 19.126415252685547ms\r\n",,terminal_output +18786,13692242,"genie.py",3147,0,"d",python,content +18787,13692243,"genie.py",3148,0,"",python,selection_keyboard +18788,13692295,"TERMINAL",0,0,"Step 4512, loss: 0.029559338465332985, step time: 21.0721492767334ms\r\n",,terminal_output +18789,13692342,"genie.py",3148,0,"e",python,content +18790,13692343,"genie.py",3149,0,"",python,selection_keyboard +18791,13692365,"TERMINAL",0,0,"Step 4513, loss: 0.24134030938148499, step time: 20.543336868286133ms\r\n",,terminal_output +18792,13692439,"TERMINAL",0,0,"Step 4514, loss: 0.06961069256067276, step time: 18.97120475769043ms\r\n",,terminal_output +18793,13692464,"genie.py",3149,0,"o",python,content +18794,13692465,"genie.py",3150,0,"",python,selection_keyboard +18795,13692475,"TERMINAL",0,0,"Step 4515, loss: 0.03747688606381416, step time: 19.602060317993164ms\r\n",,terminal_output +18796,13692560,"genie.py",3150,0,"s",python,content +18797,13692561,"genie.py",3151,0,"",python,selection_keyboard +18798,13692561,"TERMINAL",0,0,"Step 4516, loss: 0.04996934533119202, step time: 18.66459846496582ms\r\n",,terminal_output +18799,13692629,"TERMINAL",0,0,"Step 4517, loss: 0.2022048383951187, step time: 17.97318458557129ms\r\n",,terminal_output +18800,13692693,"TERMINAL",0,0,"Step 4518, loss: 0.04133624956011772, step time: 17.292022705078125ms\r\n",,terminal_output +18801,13692787,"TERMINAL",0,0,"Step 4519, loss: 0.0321330726146698, step time: 19.245624542236328ms\r\n",,terminal_output +18802,13692879,"TERMINAL",0,0,"Step 4520, loss: 0.0508335642516613, step time: 18.45264434814453ms\r\nStep 4521, loss: 0.11113493144512177, step time: 18.38850975036621ms\r\n",,terminal_output +18803,13692948,"TERMINAL",0,0,"Step 4522, loss: 0.140909805893898, step time: 18.0051326751709ms\r\n",,terminal_output +18804,13692961,"genie.py",3152,0,"",python,selection_command +18805,13693256,"TERMINAL",0,0,"Step 4523, loss: 0.023445583879947662, step time: 320.9822177886963ms\r\n",,terminal_output +18806,13693312,"TERMINAL",0,0,"Step 4524, loss: 0.028816021978855133, step time: 28.513669967651367ms\r\n",,terminal_output +18807,13693378,"TERMINAL",0,0,"Step 4525, loss: 0.03824533149600029, step time: 20.268678665161133ms\r\n",,terminal_output +18808,13693415,"genie.py",3153,0,"",python,selection_command +18809,13693479,"TERMINAL",0,0,"Step 4526, loss: 0.03355708718299866, step time: 18.517017364501953ms\r\n",,terminal_output +18810,13693536,"TERMINAL",0,0,"Step 4527, loss: 0.030034678056836128, step time: 19.753456115722656ms\r\n",,terminal_output +18811,13693650,"TERMINAL",0,0,"Step 4528, loss: 0.022862058132886887, step time: 17.86494255065918ms\r\nStep 4529, loss: 0.13414163887500763, step time: 18.08953285217285ms\r\n",,terminal_output +18812,13693716,"TERMINAL",0,0,"Step 4530, loss: 0.05391319841146469, step time: 17.351865768432617ms\r\n",,terminal_output +18813,13693825,"TERMINAL",0,0,"Step 4531, loss: 0.023877397179603577, step time: 17.51875877380371ms\r\nStep 4532, loss: 0.034697167575359344, step time: 17.688512802124023ms\r\n",,terminal_output +18814,13693896,"TERMINAL",0,0,"Step 4533, loss: 0.021711628884077072, step time: 17.59195327758789ms\r\n",,terminal_output +18815,13693957,"TERMINAL",0,0,"Step 4534, loss: 0.06657078862190247, step time: 18.200397491455078ms\r\n",,terminal_output +18816,13694022,"TERMINAL",0,0,"Step 4535, loss: 0.027462223544716835, step time: 18.43547821044922ms\r\n",,terminal_output +18817,13694087,"TERMINAL",0,0,"Step 4536, loss: 0.030558550730347633, step time: 17.94266700744629ms\r\n",,terminal_output +18818,13694150,"genie.py",3153,0,".",python,content +18819,13694151,"genie.py",3154,0,"",python,selection_keyboard +18820,13694218,"TERMINAL",0,0,"Step 4537, loss: 0.032663315534591675, step time: 17.244338989257812ms\r\nStep 4538, loss: 0.058979883790016174, step time: 17.204999923706055ms\r\n",,terminal_output +18821,13694281,"TERMINAL",0,0,"Step 4539, loss: 0.07289502024650574, step time: 18.186092376708984ms\r\n",,terminal_output +18822,13694322,"genie.py",3154,0,"s",python,content +18823,13694323,"genie.py",3155,0,"",python,selection_keyboard +18824,13694348,"TERMINAL",0,0,"Step 4540, loss: 0.04956209287047386, step time: 17.23647117614746ms\r\n",,terminal_output +18825,13694407,"genie.py",3155,0,"h",python,content +18826,13694409,"genie.py",3156,0,"",python,selection_keyboard +18827,13694409,"TERMINAL",0,0,"Step 4541, loss: 0.06234150752425194, step time: 17.638683319091797ms\r\n",,terminal_output +18828,13694471,"TERMINAL",0,0,"Step 4542, loss: 0.014618773013353348, step time: 17.09127426147461ms\r\n",,terminal_output +18829,13694526,"genie.py",3156,0,"a",python,content +18830,13694527,"genie.py",3157,0,"",python,selection_keyboard +18831,13694538,"TERMINAL",0,0,"Step 4543, loss: 0.025593971833586693, step time: 17.41814613342285ms\r\n",,terminal_output +18832,13694601,"genie.py",3157,0,"p",python,content +18833,13694604,"genie.py",3158,0,"",python,selection_keyboard +18834,13694604,"TERMINAL",0,0,"Step 4544, loss: 0.06364988535642624, step time: 17.59791374206543ms\r\n",,terminal_output +18835,13694638,"genie.py",3158,0,"e",python,content +18836,13694640,"genie.py",3159,0,"",python,selection_keyboard +18837,13694665,"TERMINAL",0,0,"Step 4545, loss: 0.01868741773068905, step time: 18.358230590820312ms\r\n",,terminal_output +18838,13694778,"TERMINAL",0,0,"Step 4546, loss: 0.07113111764192581, step time: 17.067432403564453ms\r\nStep 4547, loss: 0.02303859032690525, step time: 17.876863479614258ms\r\n",,terminal_output +18839,13694856,"TERMINAL",0,0,"Step 4548, loss: 0.019489392638206482, step time: 18.48888397216797ms\r\n",,terminal_output +18840,13694912,"TERMINAL",0,0,"Step 4549, loss: 0.03774670511484146, step time: 18.253087997436523ms\r\n",,terminal_output +18841,13694983,"TERMINAL",0,0,"Step 4550, loss: 0.015231556259095669, step time: 17.52614974975586ms\r\n",,terminal_output +18842,13695044,"TERMINAL",0,0,"Step 4551, loss: 0.028586024418473244, step time: 17.704486846923828ms\r\n",,terminal_output +18843,13695123,"TERMINAL",0,0,"Step 4552, loss: 0.02239743247628212, step time: 18.172025680541992ms\r\n",,terminal_output +18844,13695175,"TERMINAL",0,0,"Step 4553, loss: 0.04946644604206085, step time: 18.865346908569336ms\r\n",,terminal_output +18845,13695230,"genie.py",3159,0,"[]",python,content +18846,13695232,"genie.py",3160,0,"",python,selection_keyboard +18847,13695253,"TERMINAL",0,0,"Step 4554, loss: 0.0684007778763771, step time: 18.094539642333984ms\r\n",,terminal_output +18848,13695307,"TERMINAL",0,0,"Step 4555, loss: 0.016696734353899956, step time: 17.528772354125977ms\r\n",,terminal_output +18849,13695374,"TERMINAL",0,0,"Step 4556, loss: 0.05456195026636124, step time: 17.33851432800293ms\r\n",,terminal_output +18850,13695453,"TERMINAL",0,0,"Step 4557, loss: 0.02199694886803627, step time: 18.02968978881836ms\r\n",,terminal_output +18851,13695505,"TERMINAL",0,0,"Step 4558, loss: 0.26760372519493103, step time: 17.062664031982422ms\r\n",,terminal_output +18852,13695559,"TERMINAL",0,0,"Step 4559, loss: 0.04156109690666199, step time: 18.131017684936523ms\r\n",,terminal_output +18853,13695630,"TERMINAL",0,0,"Step 4560, loss: 0.023914922028779984, step time: 17.106294631958008ms\r\n",,terminal_output +18854,13695682,"TERMINAL",0,0,"Step 4561, loss: 0.22817492485046387, step time: 17.34304428100586ms\r\n",,terminal_output +18855,13695750,"TERMINAL",0,0,"Step 4562, loss: 0.0252772755920887, step time: 17.632007598876953ms\r\n",,terminal_output +18856,13695843,"TERMINAL",0,0,"Step 4563, loss: 0.014278505928814411, step time: 17.177104949951172ms\r\n",,terminal_output +18857,13695896,"TERMINAL",0,0,"Step 4564, loss: 0.02949564717710018, step time: 17.13085174560547ms\r\n",,terminal_output +18858,13696044,"TERMINAL",0,0,"Step 4565, loss: 0.10470878332853317, step time: 17.55237579345703ms\r\nStep 4566, loss: 0.11051762104034424, step time: 17.32611656188965ms\r\n",,terminal_output +18859,13696068,"genie.py",3160,0,"2",python,content +18860,13696068,"genie.py",3161,0,"",python,selection_keyboard +18861,13696145,"TERMINAL",0,0,"Step 4567, loss: 0.011632089503109455, step time: 17.294645309448242ms\r\nStep 4568, loss: 0.019379181787371635, step time: 17.464876174926758ms\r\n",,terminal_output +18862,13696186,"TERMINAL",0,0,"Step 4569, loss: 0.08167386800050735, step time: 17.205238342285156ms\r\n",,terminal_output +18863,13696227,"genie.py",3161,0,":",python,content +18864,13696228,"genie.py",3162,0,"",python,selection_keyboard +18865,13696279,"TERMINAL",0,0,"Step 4570, loss: 0.022830989211797714, step time: 17.125606536865234ms\r\n",,terminal_output +18866,13696345,"TERMINAL",0,0,"Step 4571, loss: 0.027796871960163116, step time: 17.64202117919922ms\r\n",,terminal_output +18867,13696397,"TERMINAL",0,0,"Step 4572, loss: 0.015521862544119358, step time: 17.161130905151367ms\r\n",,terminal_output +18868,13696505,"TERMINAL",0,0,"Step 4573, loss: 0.019365686923265457, step time: 17.124414443969727ms\r\nStep 4574, loss: 0.03940365090966225, step time: 17.33231544494629ms\r\n",,terminal_output +18869,13696541,"genie.py",3162,0,"4",python,content +18870,13696542,"genie.py",3163,0,"",python,selection_keyboard +18871,13696644,"TERMINAL",0,0,"Step 4575, loss: 0.11116106063127518, step time: 17.229795455932617ms\r\nStep 4576, loss: 0.05275200679898262, step time: 17.749309539794922ms\r\n",,terminal_output +18872,13696697,"TERMINAL",0,0,"Step 4577, loss: 0.030404534190893173, step time: 17.490863800048828ms\r\n",,terminal_output +18873,13696819,"TERMINAL",0,0,"Step 4578, loss: 0.197404146194458, step time: 17.01951026916504ms\r\nStep 4579, loss: 0.02109258808195591, step time: 16.948699951171875ms\r\n",,terminal_output +18874,13696900,"TERMINAL",0,0,"Step 4580, loss: 0.027465419843792915, step time: 17.316341400146484ms\r\n",,terminal_output +18875,13696966,"TERMINAL",0,0,"Step 4581, loss: 0.09945764392614365, step time: 18.06783676147461ms\r\n",,terminal_output +18876,13697031,"TERMINAL",0,0,"Step 4582, loss: 0.07331497967243195, step time: 17.73834228515625ms\r\n",,terminal_output +18877,13697050,"genie.py",3162,0,"",python,selection_command +18878,13697145,"TERMINAL",0,0,"Step 4583, loss: 0.03562031686306, step time: 17.328262329101562ms\r\nStep 4584, loss: 0.014350375160574913, step time: 16.967296600341797ms\r\n",,terminal_output +18879,13697198,"TERMINAL",0,0,"Step 4585, loss: 0.02413172833621502, step time: 17.87090301513672ms\r\n",,terminal_output +18880,13697261,"TERMINAL",0,0,"Step 4586, loss: 0.0311094019562006, step time: 18.10169219970703ms\r\n",,terminal_output +18881,13697326,"TERMINAL",0,0,"Step 4587, loss: 0.028426188975572586, step time: 18.056631088256836ms\r\n",,terminal_output +18882,13697389,"TERMINAL",0,0,"Step 4588, loss: 0.017695758491754532, step time: 17.676353454589844ms\r\n",,terminal_output +18883,13697487,"TERMINAL",0,0,"Step 4589, loss: 0.06937628984451294, step time: 17.3947811126709ms\r\n",,terminal_output +18884,13697598,"TERMINAL",0,0,"Step 4590, loss: 0.026440568268299103, step time: 17.345666885375977ms\r\nStep 4591, loss: 0.09398607164621353, step time: 17.430543899536133ms\r\n",,terminal_output +18885,13697652,"TERMINAL",0,0,"Step 4592, loss: 0.02171591855585575, step time: 17.243385314941406ms\r\n",,terminal_output +18886,13697698,"genie.py",3174,0,"",python,selection_mouse +18887,13697712,"genie.py",3173,0,"",python,selection_command +18888,13697739,"TERMINAL",0,0,"Step 4593, loss: 0.019768841564655304, step time: 17.32611656188965ms\r\n",,terminal_output +18889,13697796,"TERMINAL",0,0,"Step 4594, loss: 0.025428611785173416, step time: 17.23313331604004ms\r\n",,terminal_output +18890,13697851,"TERMINAL",0,0,"Step 4595, loss: 0.04554114118218422, step time: 30.114173889160156ms\r\n",,terminal_output +18891,13697915,"TERMINAL",0,0,"Step 4596, loss: 0.04426058381795883, step time: 21.175384521484375ms\r\n",,terminal_output +18892,13698008,"TERMINAL",0,0,"Step 4597, loss: 0.030035987496376038, step time: 19.539594650268555ms\r\n",,terminal_output +18893,13698071,"TERMINAL",0,0,"Step 4598, loss: 0.04150446876883507, step time: 18.968820571899414ms\r\n",,terminal_output +18894,13698177,"TERMINAL",0,0,"Step 4599, loss: 0.019731150940060616, step time: 18.60976219177246ms\r\nStep 4600, loss: 0.019393041729927063, step time: 19.066333770751953ms\r\n",,terminal_output +18895,13698243,"TERMINAL",0,0,"Step 4601, loss: 0.1632884442806244, step time: 18.715381622314453ms\r\n",,terminal_output +18896,13698257,"genie.py",3068,0,"",python,selection_mouse +18897,13698309,"TERMINAL",0,0,"Step 4602, loss: 0.05652501806616783, step time: 18.162250518798828ms\r\n",,terminal_output +18898,13698402,"TERMINAL",0,0,"Step 4603, loss: 0.01631302759051323, step time: 18.303394317626953ms\r\n",,terminal_output +18899,13698456,"TERMINAL",0,0,"Step 4604, loss: 0.021710999310016632, step time: 18.355131149291992ms\r\n",,terminal_output +18900,13698517,"TERMINAL",0,0,"Step 4605, loss: 0.02340180054306984, step time: 20.444869995117188ms\r\n",,terminal_output +18901,13698574,"TERMINAL",0,0,"Step 4606, loss: 0.012426722794771194, step time: 18.59903335571289ms\r\n",,terminal_output +18902,13698639,"TERMINAL",0,0,"Step 4607, loss: 0.09966474026441574, step time: 19.441604614257812ms\r\n",,terminal_output +18903,13698700,"TERMINAL",0,0,"Step 4608, loss: 0.043018702417612076, step time: 18.367290496826172ms\r\n",,terminal_output +18904,13698765,"genie.py",3121,0,"",python,selection_mouse +18905,13698776,"TERMINAL",0,0,"Step 4609, loss: 0.022168224677443504, step time: 18.34869384765625ms\r\n",,terminal_output +18906,13698826,"TERMINAL",0,0,"Step 4610, loss: 0.057213231921195984, step time: 18.34893226623535ms\r\n",,terminal_output +18907,13698935,"genie.py",3119,17,"tokenizer_outputs",python,selection_mouse +18908,13698948,"TERMINAL",0,0,"Step 4611, loss: 0.19704531133174896, step time: 18.369674682617188ms\r\nStep 4612, loss: 0.010079842060804367, step time: 18.151283264160156ms\r\n",,terminal_output +18909,13699001,"TERMINAL",0,0,"Step 4613, loss: 0.01935405470430851, step time: 18.49818229675293ms\r\n",,terminal_output +18910,13699097,"TERMINAL",0,0,"Step 4614, loss: 0.02639273926615715, step time: 18.042802810668945ms\r\n",,terminal_output +18911,13699200,"TERMINAL",0,0,"Step 4615, loss: 0.04627253860235214, step time: 25.93374252319336ms\r\nStep 4616, loss: 0.20965497195720673, step time: 19.001245498657227ms\r\n",,terminal_output +18912,13699262,"TERMINAL",0,0,"Step 4617, loss: 0.049051325768232346, step time: 17.623424530029297ms\r\n",,terminal_output +18913,13699323,"TERMINAL",0,0,"Step 4618, loss: 0.14356796443462372, step time: 17.222881317138672ms\r\n",,terminal_output +18914,13699447,"TERMINAL",0,0,"Step 4619, loss: 0.07079633325338364, step time: 17.59171485900879ms\r\nStep 4620, loss: 0.04061407595872879, step time: 17.11440086364746ms\r\n",,terminal_output +18915,13699513,"TERMINAL",0,0,"Step 4621, loss: 0.026758532971143723, step time: 17.519235610961914ms\r\n",,terminal_output +18916,13699579,"genie.py",3062,0,"",python,selection_mouse +18917,13699590,"TERMINAL",0,0,"Step 4622, loss: 0.019078010693192482, step time: 17.60101318359375ms\r\n",,terminal_output +18918,13699651,"TERMINAL",0,0,"Step 4623, loss: 0.05301319435238838, step time: 17.192363739013672ms\r\n",,terminal_output +18919,13699718,"TERMINAL",0,0,"Step 4624, loss: 0.5822445154190063, step time: 17.006874084472656ms\r\n",,terminal_output +18920,13699828,"TERMINAL",0,0,"Step 4625, loss: 0.023327646777033806, step time: 17.45772361755371ms\r\nStep 4626, loss: 0.04254286736249924, step time: 17.197370529174805ms\r\n",,terminal_output +18921,13699901,"TERMINAL",0,0,"Step 4627, loss: 0.052274249494075775, step time: 17.417430877685547ms\r\n",,terminal_output +18922,13699965,"TERMINAL",0,0,"Step 4628, loss: 0.09500151872634888, step time: 17.305612564086914ms\r\n",,terminal_output +18923,13700030,"TERMINAL",0,0,"Step 4629, loss: 0.03334158658981323, step time: 17.204999923706055ms\r\n",,terminal_output +18924,13700081,"TERMINAL",0,0,"Step 4630, loss: 0.3835216164588928, step time: 17.24863052368164ms\r\n",,terminal_output +18925,13700155,"TERMINAL",0,0,"Step 4631, loss: 0.06873726844787598, step time: 17.580747604370117ms\r\n",,terminal_output +18926,13700211,"TERMINAL",0,0,"Step 4632, loss: 0.037452008575201035, step time: 17.101526260375977ms\r\n",,terminal_output +18927,13700246,"genie.py",3184,0,"",python,selection_mouse +18928,13700341,"TERMINAL",0,0,"Step 4633, loss: 0.04749777540564537, step time: 17.30632781982422ms\r\nStep 4634, loss: 0.02626035548746586, step time: 17.408370971679688ms\r\n",,terminal_output +18929,13700398,"TERMINAL",0,0,"Step 4635, loss: 0.17254067957401276, step time: 17.47870445251465ms\r\n",,terminal_output +18930,13700494,"TERMINAL",0,0,"Step 4636, loss: 0.03673764318227768, step time: 16.97826385498047ms\r\n",,terminal_output +18931,13700558,"TERMINAL",0,0,"Step 4637, loss: 0.05219882354140282, step time: 17.707347869873047ms\r\n",,terminal_output +18932,13700621,"TERMINAL",0,0,"Step 4638, loss: 0.13011197745800018, step time: 16.994237899780273ms\r\n",,terminal_output +18933,13700681,"TERMINAL",0,0,"Step 4639, loss: 0.2081819772720337, step time: 17.561674118041992ms\r\n",,terminal_output +18934,13700744,"TERMINAL",0,0,"Step 4640, loss: 0.0431046225130558, step time: 17.508983612060547ms\r\n",,terminal_output +18935,13700804,"TERMINAL",0,0,"Step 4641, loss: 0.042130209505558014, step time: 17.27437973022461ms\r\n",,terminal_output +18936,13700864,"TERMINAL",0,0,"Step 4642, loss: 0.024207457900047302, step time: 17.151355743408203ms\r\n",,terminal_output +18937,13700925,"TERMINAL",0,0,"Step 4643, loss: 0.02356293983757496, step time: 17.281770706176758ms\r\n",,terminal_output +18938,13700987,"TERMINAL",0,0,"Step 4644, loss: 0.1465020328760147, step time: 17.201662063598633ms\r\n",,terminal_output +18939,13701048,"TERMINAL",0,0,"Step 4645, loss: 0.02203219383955002, step time: 17.23766326904297ms\r\n",,terminal_output +18940,13701103,"genie.py",3174,0,"",python,selection_mouse +18941,13701113,"genie.py",3173,0,"",python,selection_command +18942,13701174,"TERMINAL",0,0,"Step 4646, loss: 0.03559804707765579, step time: 17.267465591430664ms\r\nStep 4647, loss: 0.06372172385454178, step time: 17.400026321411133ms\r\n",,terminal_output +18943,13701227,"TERMINAL",0,0,"Step 4648, loss: 0.03202967345714569, step time: 17.28653907775879ms\r\n",,terminal_output +18944,13701345,"TERMINAL",0,0,"Step 4649, loss: 0.011453899554908276, step time: 21.884441375732422ms\r\n",,terminal_output +18945,13701425,"TERMINAL",0,0,"Step 4650, loss: 0.02848002128303051, step time: 27.96316146850586ms\r\nStep 4651, loss: 0.03328419849276543, step time: 21.433353424072266ms\r\n",,terminal_output +18946,13701484,"TERMINAL",0,0,"Step 4652, loss: 0.01887330412864685, step time: 18.706560134887695ms\r\n",,terminal_output +18947,13701550,"TERMINAL",0,0,"Step 4653, loss: 0.03823872655630112, step time: 26.136398315429688ms\r\n",,terminal_output +18948,13701614,"TERMINAL",0,0,"Step 4654, loss: 0.04390246048569679, step time: 21.155118942260742ms\r\n",,terminal_output +18949,13701679,"TERMINAL",0,0,"Step 4655, loss: 0.02577955275774002, step time: 18.579483032226562ms\r\n",,terminal_output +18950,13701774,"TERMINAL",0,0,"Step 4656, loss: 0.017136365175247192, step time: 17.49420166015625ms\r\n",,terminal_output +18951,13701828,"TERMINAL",0,0,"Step 4657, loss: 0.013174585998058319, step time: 17.7156925201416ms\r\n",,terminal_output +18952,13701893,"TERMINAL",0,0,"Step 4658, loss: 0.1151573657989502, step time: 17.40407943725586ms\r\n",,terminal_output +18953,13701957,"TERMINAL",0,0,"Step 4659, loss: 0.017284315079450607, step time: 17.5015926361084ms\r\n",,terminal_output +18954,13702019,"TERMINAL",0,0,"Step 4660, loss: 0.048020582646131516, step time: 17.162561416625977ms\r\n",,terminal_output +18955,13702084,"TERMINAL",0,0,"Step 4661, loss: 0.021127022802829742, step time: 17.451763153076172ms\r\n",,terminal_output +18956,13702188,"TERMINAL",0,0,"Step 4662, loss: 0.014284586533904076, step time: 17.47870445251465ms\r\nStep 4663, loss: 0.1336282193660736, step time: 17.483949661254883ms\r\n",,terminal_output +18957,13702254,"TERMINAL",0,0,"Step 4664, loss: 0.012461810372769833, step time: 17.287731170654297ms\r\n",,terminal_output +18958,13702315,"TERMINAL",0,0,"Step 4665, loss: 0.046822287142276764, step time: 17.315387725830078ms\r\n",,terminal_output +18959,13702379,"TERMINAL",0,0,"Step 4666, loss: 0.04198841005563736, step time: 17.274856567382812ms\r\n",,terminal_output +18960,13702442,"TERMINAL",0,0,"Step 4667, loss: 0.06790140271186829, step time: 17.589330673217773ms\r\n",,terminal_output +18961,13702505,"TERMINAL",0,0,"Step 4668, loss: 0.06680664420127869, step time: 17.140626907348633ms\r\n",,terminal_output +18962,13702567,"TERMINAL",0,0,"Step 4669, loss: 0.0303210336714983, step time: 17.304420471191406ms\r\n",,terminal_output +18963,13702630,"TERMINAL",0,0,"Step 4670, loss: 0.028447434306144714, step time: 17.516613006591797ms\r\n",,terminal_output +18964,13702690,"TERMINAL",0,0,"Step 4671, loss: 0.013557269237935543, step time: 17.74764060974121ms\r\n",,terminal_output +18965,13702756,"TERMINAL",0,0,"Step 4672, loss: 0.027345694601535797, step time: 17.29869842529297ms\r\n",,terminal_output +18966,13702814,"TERMINAL",0,0,"Step 4673, loss: 0.02083444409072399, step time: 17.697572708129883ms\r\n",,terminal_output +18967,13702879,"TERMINAL",0,0,"Step 4674, loss: 0.02579883672297001, step time: 17.0590877532959ms\r\n",,terminal_output +18968,13703062,"TERMINAL",0,0,"Step 4675, loss: 0.055257171392440796, step time: 17.379283905029297ms\r\nStep 4676, loss: 0.04046333208680153, step time: 17.35854148864746ms\r\n",,terminal_output +18969,13703127,"TERMINAL",0,0,"Step 4677, loss: 0.08772727847099304, step time: 17.3187255859375ms\r\nStep 4678, loss: 0.009355244226753712, step time: 17.232179641723633ms\r\n",,terminal_output +18970,13703240,"TERMINAL",0,0,"Step 4679, loss: 0.02456810511648655, step time: 17.277002334594727ms\r\n",,terminal_output +18971,13703318,"TERMINAL",0,0,"Step 4680, loss: 0.04156578332185745, step time: 17.4562931060791ms\r\nStep 4681, loss: 0.048645660281181335, step time: 17.47727394104004ms\r\n",,terminal_output +18972,13703382,"TERMINAL",0,0,"Step 4682, loss: 0.020726623013615608, step time: 17.355918884277344ms\r\n",,terminal_output +18973,13703447,"TERMINAL",0,0,"Step 4683, loss: 0.015488633885979652, step time: 17.36617088317871ms\r\n",,terminal_output +18974,13703512,"TERMINAL",0,0,"Step 4684, loss: 0.19507388770580292, step time: 17.557621002197266ms\r\n",,terminal_output +18975,13703578,"TERMINAL",0,0,"Step 4685, loss: 0.036790631711483, step time: 17.707109451293945ms\r\n",,terminal_output +18976,13703643,"TERMINAL",0,0,"Step 4686, loss: 0.09512657672166824, step time: 17.198801040649414ms\r\n",,terminal_output +18977,13703697,"TERMINAL",0,0,"Step 4687, loss: 0.11629343777894974, step time: 17.277240753173828ms\r\n",,terminal_output +18978,13703822,"TERMINAL",0,0,"Step 4688, loss: 0.016268158331513405, step time: 17.26055145263672ms\r\nStep 4689, loss: 0.04535285010933876, step time: 17.667531967163086ms\r\n",,terminal_output +18979,13703918,"TERMINAL",0,0,"Step 4690, loss: 0.019943075254559517, step time: 17.073631286621094ms\r\n",,terminal_output +18980,13703969,"TERMINAL",0,0,"Step 4691, loss: 0.014538003131747246, step time: 17.3187255859375ms\r\n",,terminal_output +18981,13704061,"TERMINAL",0,0,"Step 4692, loss: 0.048476025462150574, step time: 17.265796661376953ms\r\n",,terminal_output +18982,13704112,"TERMINAL",0,0,"Step 4693, loss: 0.01486129965633154, step time: 17.498254776000977ms\r\n",,terminal_output +18983,13704164,"TERMINAL",0,0,"Step 4694, loss: 0.020682798698544502, step time: 17.29750633239746ms\r\n",,terminal_output +18984,13704268,"TERMINAL",0,0,"Step 4695, loss: 0.01921418309211731, step time: 17.2116756439209ms\r\nStep 4696, loss: 0.020669015124440193, step time: 17.015933990478516ms\r\n",,terminal_output +18985,13704362,"TERMINAL",0,0,"Step 4697, loss: 0.3495972156524658, step time: 17.34328269958496ms\r\n",,terminal_output +18986,13704468,"TERMINAL",0,0,"Step 4698, loss: 0.013201150111854076, step time: 17.251253128051758ms\r\nStep 4699, loss: 0.015559986233711243, step time: 17.49730110168457ms\r\n",,terminal_output +18987,13704527,"TERMINAL",0,0,"Step 4700, loss: 0.029413342475891113, step time: 17.363786697387695ms\r\n",,terminal_output +18988,13704588,"TERMINAL",0,0,"Step 4701, loss: 0.042652059346437454, step time: 17.342805862426758ms\r\n",,terminal_output +18989,13704648,"TERMINAL",0,0,"Step 4702, loss: 0.02662975899875164, step time: 17.374753952026367ms\r\n",,terminal_output +18990,13704709,"TERMINAL",0,0,"Step 4703, loss: 0.08209333568811417, step time: 17.735958099365234ms\r\n",,terminal_output +18991,13704776,"TERMINAL",0,0,"Step 4704, loss: 0.027464494109153748, step time: 17.275094985961914ms\r\n",,terminal_output +18992,13704837,"TERMINAL",0,0,"Step 4705, loss: 0.022243129089474678, step time: 17.34328269958496ms\r\n",,terminal_output +18993,13704900,"TERMINAL",0,0,"Step 4706, loss: 0.019664399325847626, step time: 17.508745193481445ms\r\n",,terminal_output +18994,13704966,"TERMINAL",0,0,"Step 4707, loss: 0.018674718216061592, step time: 17.50493049621582ms\r\n",,terminal_output +18995,13705027,"TERMINAL",0,0,"Step 4708, loss: 0.023236066102981567, step time: 17.001628875732422ms\r\n",,terminal_output +18996,13705327,"TERMINAL",0,0,"Step 4709, loss: 0.040900640189647675, step time: 294.86823081970215ms\r\n",,terminal_output +18997,13705399,"TERMINAL",0,0,"Step 4710, loss: 0.03426727280020714, step time: 24.8110294342041ms\r\n",,terminal_output +18998,13705484,"TERMINAL",0,0,"Step 4711, loss: 0.09097254276275635, step time: 20.103931427001953ms\r\n",,terminal_output +18999,13705550,"TERMINAL",0,0,"Step 4712, loss: 0.052800364792346954, step time: 18.035411834716797ms\r\n",,terminal_output +19000,13705615,"TERMINAL",0,0,"Step 4713, loss: 0.0182831808924675, step time: 17.865419387817383ms\r\n",,terminal_output +19001,13705671,"TERMINAL",0,0,"Step 4714, loss: 0.12820173799991608, step time: 17.34018325805664ms\r\n",,terminal_output +19002,13705776,"TERMINAL",0,0,"Step 4715, loss: 0.026394691318273544, step time: 17.579078674316406ms\r\nStep 4716, loss: 0.02196844480931759, step time: 17.380952835083008ms\r\n",,terminal_output +19003,13705837,"TERMINAL",0,0,"Step 4717, loss: 0.02358555980026722, step time: 17.613887786865234ms\r\n",,terminal_output +19004,13705903,"TERMINAL",0,0,"Step 4718, loss: 0.026886925101280212, step time: 17.53520965576172ms\r\n",,terminal_output +19005,13705964,"TERMINAL",0,0,"Step 4719, loss: 0.02447250857949257, step time: 17.540693283081055ms\r\n",,terminal_output +19006,13706058,"TERMINAL",0,0,"Step 4720, loss: 0.014190953224897385, step time: 17.423152923583984ms\r\n",,terminal_output +19007,13706109,"TERMINAL",0,0,"Step 4721, loss: 0.03095237910747528, step time: 17.861127853393555ms\r\n",,terminal_output +19008,13706211,"TERMINAL",0,0,"Step 4722, loss: 0.01750711165368557, step time: 17.160654067993164ms\r\nStep 4723, loss: 0.02200012467801571, step time: 17.684221267700195ms\r\n",,terminal_output +19009,13706304,"TERMINAL",0,0,"Step 4724, loss: 0.06842813640832901, step time: 17.405033111572266ms\r\n",,terminal_output +19010,13706355,"TERMINAL",0,0,"Step 4725, loss: 0.19643272459506989, step time: 17.290830612182617ms\r\n",,terminal_output +19011,13706448,"TERMINAL",0,0,"Step 4726, loss: 0.07731731981039047, step time: 17.12965965270996ms\r\n",,terminal_output +19012,13706501,"TERMINAL",0,0,"Step 4727, loss: 0.029484767466783524, step time: 17.897605895996094ms\r\n",,terminal_output +19013,13706561,"TERMINAL",0,0,"Step 4728, loss: 0.059325478971004486, step time: 17.039060592651367ms\r\n",,terminal_output +19014,13706622,"TERMINAL",0,0,"Step 4729, loss: 0.0443112850189209, step time: 17.32635498046875ms\r\n",,terminal_output +19015,13706736,"TERMINAL",0,0,"Step 4730, loss: 0.031214378774166107, step time: 17.46654510498047ms\r\nStep 4731, loss: 0.02100488170981407, step time: 17.542362213134766ms\r\n",,terminal_output +19016,13706791,"TERMINAL",0,0,"Step 4732, loss: 0.03470154106616974, step time: 17.229318618774414ms\r\n",,terminal_output +19017,13706857,"TERMINAL",0,0,"Step 4733, loss: 0.01345166563987732, step time: 17.696380615234375ms\r\n",,terminal_output +19018,13706921,"TERMINAL",0,0,"Step 4734, loss: 0.0266855601221323, step time: 17.307043075561523ms\r\n",,terminal_output +19019,13706984,"TERMINAL",0,0,"Step 4735, loss: 0.015992553904652596, step time: 17.37380027770996ms\r\n",,terminal_output +19020,13707050,"TERMINAL",0,0,"Step 4736, loss: 0.09561073780059814, step time: 17.47870445251465ms\r\n",,terminal_output +19021,13707113,"TERMINAL",0,0,"Step 4737, loss: 0.02789095602929592, step time: 17.426729202270508ms\r\n",,terminal_output +19022,13707176,"TERMINAL",0,0,"Step 4738, loss: 0.020713569596409798, step time: 18.620967864990234ms\r\n",,terminal_output +19023,13707228,"TERMINAL",0,0,"Step 4739, loss: 0.020519301295280457, step time: 17.680883407592773ms\r\n",,terminal_output +19024,13707321,"TERMINAL",0,0,"Step 4740, loss: 0.031318556517362595, step time: 17.10796356201172ms\r\n",,terminal_output +19025,13707376,"TERMINAL",0,0,"Step 4741, loss: 0.09070497751235962, step time: 17.542362213134766ms\r\n",,terminal_output +19026,13707468,"TERMINAL",0,0,"Step 4742, loss: 0.02509642019867897, step time: 17.77815818786621ms\r\n",,terminal_output +19027,13707519,"TERMINAL",0,0,"Step 4743, loss: 0.13508151471614838, step time: 17.668724060058594ms\r\n",,terminal_output +19028,13707572,"TERMINAL",0,0,"Step 4744, loss: 0.09598848223686218, step time: 17.07601547241211ms\r\n",,terminal_output +19029,13707678,"TERMINAL",0,0,"Step 4745, loss: 0.03584587574005127, step time: 17.67873764038086ms\r\nStep 4746, loss: 0.052946947515010834, step time: 17.060518264770508ms\r\n",,terminal_output +19030,13707773,"TERMINAL",0,0,"Step 4747, loss: 0.02055225893855095, step time: 17.295122146606445ms\r\n",,terminal_output +19031,13707826,"TERMINAL",0,0,"Step 4748, loss: 0.013825779780745506, step time: 17.49396324157715ms\r\n",,terminal_output +19032,13707932,"TERMINAL",0,0,"Step 4749, loss: 0.020711062476038933, step time: 17.243146896362305ms\r\nStep 4750, loss: 0.030004942789673805, step time: 17.261028289794922ms\r\n",,terminal_output +19033,13708026,"TERMINAL",0,0,"Step 4751, loss: 0.031072594225406647, step time: 17.498493194580078ms\r\n",,terminal_output +19034,13708080,"TERMINAL",0,0,"Step 4752, loss: 0.027015255764126778, step time: 17.23957061767578ms\r\n",,terminal_output +19035,13708185,"TERMINAL",0,0,"Step 4753, loss: 0.00830400176346302, step time: 17.399311065673828ms\r\nStep 4754, loss: 0.03043510764837265, step time: 17.408370971679688ms\r\n",,terminal_output +19036,13708259,"TERMINAL",0,0,"Step 4755, loss: 0.042225271463394165, step time: 17.25935935974121ms\r\n",,terminal_output +19037,13708316,"TERMINAL",0,0,"Step 4756, loss: 0.017457932233810425, step time: 17.25459098815918ms\r\n",,terminal_output +19038,13708408,"TERMINAL",0,0,"Step 4757, loss: 0.09533512592315674, step time: 17.796754837036133ms\r\n",,terminal_output +19039,13708476,"TERMINAL",0,0,"Step 4758, loss: 0.014665447175502777, step time: 17.114877700805664ms\r\n",,terminal_output +19040,13708578,"TERMINAL",0,0,"Step 4759, loss: 0.04911096394062042, step time: 17.659902572631836ms\r\nStep 4760, loss: 0.05023117735981941, step time: 17.549753189086914ms\r\n",,terminal_output +19041,13708647,"TERMINAL",0,0,"Step 4761, loss: 0.022010410204529762, step time: 17.652034759521484ms\r\n",,terminal_output +19042,13708714,"TERMINAL",0,0,"Step 4762, loss: 0.01283633429557085, step time: 17.106056213378906ms\r\n",,terminal_output +19043,13708776,"TERMINAL",0,0,"Step 4763, loss: 0.029165351763367653, step time: 17.449617385864258ms\r\n",,terminal_output +19044,13708840,"TERMINAL",0,0,"Step 4764, loss: 0.013482480309903622, step time: 17.154693603515625ms\r\n",,terminal_output +19045,13708904,"TERMINAL",0,0,"Step 4765, loss: 0.3585037291049957, step time: 17.313480377197266ms\r\n",,terminal_output +19046,13708968,"TERMINAL",0,0,"Step 4766, loss: 0.02403714880347252, step time: 17.481088638305664ms\r\n",,terminal_output +19047,13709032,"TERMINAL",0,0,"Step 4767, loss: 0.05183880031108856, step time: 17.494678497314453ms\r\n",,terminal_output +19048,13709162,"TERMINAL",0,0,"Step 4768, loss: 0.05286276340484619, step time: 17.023563385009766ms\r\nStep 4769, loss: 0.02951470948755741, step time: 17.748117446899414ms\r\n",,terminal_output +19049,13709216,"TERMINAL",0,0,"Step 4770, loss: 0.020256472751498222, step time: 29.420852661132812ms\r\n",,terminal_output +19050,13709311,"TERMINAL",0,0,"Step 4771, loss: 0.019616059958934784, step time: 21.833181381225586ms\r\n",,terminal_output +19051,13709363,"TERMINAL",0,0,"Step 4772, loss: 0.01712963730096817, step time: 18.962860107421875ms\r\n",,terminal_output +19052,13709428,"TERMINAL",0,0,"Step 4773, loss: 0.01746850460767746, step time: 18.739700317382812ms\r\n",,terminal_output +19053,13709492,"TERMINAL",0,0,"Step 4774, loss: 0.01823548786342144, step time: 18.34273338317871ms\r\n",,terminal_output +19054,13709552,"TERMINAL",0,0,"Step 4775, loss: 0.26281121373176575, step time: 18.743038177490234ms\r\n",,terminal_output +19055,13709657,"TERMINAL",0,0,"Step 4776, loss: 0.02999188005924225, step time: 18.10312271118164ms\r\nStep 4777, loss: 0.04729612171649933, step time: 18.13673973083496ms\r\n",,terminal_output +19056,13709721,"TERMINAL",0,0,"Step 4778, loss: 0.04183916747570038, step time: 18.401384353637695ms\r\n",,terminal_output +19057,13709783,"TERMINAL",0,0,"Step 4779, loss: 0.06689148396253586, step time: 17.946720123291016ms\r\n",,terminal_output +19058,13709848,"TERMINAL",0,0,"Step 4780, loss: 0.05989507585763931, step time: 17.751216888427734ms\r\n",,terminal_output +19059,13709916,"TERMINAL",0,0,"Step 4781, loss: 0.04457063600420952, step time: 18.499374389648438ms\r\n",,terminal_output +19060,13709989,"TERMINAL",0,0,"Step 4782, loss: 0.03506653383374214, step time: 17.60101318359375ms\r\n",,terminal_output +19061,13710061,"TERMINAL",0,0,"Step 4783, loss: 0.02272852137684822, step time: 17.981529235839844ms\r\n",,terminal_output +19062,13710128,"TERMINAL",0,0,"Step 4784, loss: 0.1160159781575203, step time: 18.294572830200195ms\r\n",,terminal_output +19063,13710205,"TERMINAL",0,0,"Step 4785, loss: 0.013938220217823982, step time: 18.002986907958984ms\r\n",,terminal_output +19064,13710258,"TERMINAL",0,0,"Step 4786, loss: 0.022991042584180832, step time: 26.160001754760742ms\r\n",,terminal_output +19065,13710363,"TERMINAL",0,0,"Step 4787, loss: 0.02988114207983017, step time: 21.77262306213379ms\r\nStep 4788, loss: 0.007485724985599518, step time: 18.25237274169922ms\r\n",,terminal_output +19066,13710458,"TERMINAL",0,0,"Step 4789, loss: 0.01778941974043846, step time: 18.975496292114258ms\r\n",,terminal_output +19067,13710523,"TERMINAL",0,0,"Step 4790, loss: 0.01835792325437069, step time: 18.034934997558594ms\r\n",,terminal_output +19068,13710587,"TERMINAL",0,0,"Step 4791, loss: 0.059367939829826355, step time: 17.728328704833984ms\r\n",,terminal_output +19069,13710669,"TERMINAL",0,0,"Step 4792, loss: 0.05255413427948952, step time: 17.386198043823242ms\r\n",,terminal_output +19070,13710732,"TERMINAL",0,0,"Step 4793, loss: 0.23429492115974426, step time: 17.889022827148438ms\r\n",,terminal_output +19071,13710795,"TERMINAL",0,0,"Step 4794, loss: 0.05778619274497032, step time: 17.39335060119629ms\r\n",,terminal_output +19072,13710859,"TERMINAL",0,0,"Step 4795, loss: 0.01574757695198059, step time: 17.56143569946289ms\r\n",,terminal_output +19073,13710923,"TERMINAL",0,0,"Step 4796, loss: 0.03135178983211517, step time: 17.446041107177734ms\r\n",,terminal_output +19074,13711017,"TERMINAL",0,0,"Step 4797, loss: 0.02859264425933361, step time: 17.415285110473633ms\r\nStep 4798, loss: 0.18165479600429535, step time: 17.145872116088867ms\r\n",,terminal_output +19075,13711078,"TERMINAL",0,0,"Step 4799, loss: 0.019466405734419823, step time: 17.602205276489258ms\r\n",,terminal_output +19076,13711146,"TERMINAL",0,0,"Step 4800, loss: 0.22839199006557465, step time: 17.58599281311035ms\r\n",,terminal_output +19077,13711208,"TERMINAL",0,0,"Step 4801, loss: 0.023071197792887688, step time: 18.13483238220215ms\r\n",,terminal_output +19078,13711269,"TERMINAL",0,0,"Step 4802, loss: 0.02616375871002674, step time: 17.854928970336914ms\r\n",,terminal_output +19079,13711336,"TERMINAL",0,0,"Step 4803, loss: 0.01946154423058033, step time: 17.333507537841797ms\r\n",,terminal_output +19080,13711399,"TERMINAL",0,0,"Step 4804, loss: 0.030989261344075203, step time: 17.215967178344727ms\r\n",,terminal_output +19081,13711462,"TERMINAL",0,0,"Step 4805, loss: 0.030328741297125816, step time: 17.562389373779297ms\r\n",,terminal_output +19082,13711526,"TERMINAL",0,0,"Step 4806, loss: 0.04525235295295715, step time: 17.27461814880371ms\r\n",,terminal_output +19083,13711589,"TERMINAL",0,0,"Step 4807, loss: 0.01984217017889023, step time: 17.253875732421875ms\r\n",,terminal_output +19084,13711654,"TERMINAL",0,0,"Step 4808, loss: 0.03881760314106941, step time: 17.4407958984375ms\r\n",,terminal_output +19085,13711716,"TERMINAL",0,0,"Step 4809, loss: 0.12443803250789642, step time: 17.277240753173828ms\r\n",,terminal_output +19086,13711781,"TERMINAL",0,0,"Step 4810, loss: 0.03279035910964012, step time: 17.365217208862305ms\r\n",,terminal_output +19087,13711857,"TERMINAL",0,0,"Step 4811, loss: 0.031275007873773575, step time: 17.67754554748535ms\r\n",,terminal_output +19088,13711919,"TERMINAL",0,0,"Step 4812, loss: 0.017041707411408424, step time: 17.03500747680664ms\r\n",,terminal_output +19089,13712050,"TERMINAL",0,0,"Step 4813, loss: 0.03547954559326172, step time: 17.206668853759766ms\r\nStep 4814, loss: 0.044125787913799286, step time: 17.641305923461914ms\r\n",,terminal_output +19090,13712113,"TERMINAL",0,0,"Step 4815, loss: 0.042909178882837296, step time: 17.346858978271484ms\r\n",,terminal_output +19091,13712231,"TERMINAL",0,0,"Step 4816, loss: 0.11103203892707825, step time: 17.085552215576172ms\r\nStep 4817, loss: 0.02410638891160488, step time: 17.59028434753418ms\r\n",,terminal_output +19092,13712301,"TERMINAL",0,0,"Step 4818, loss: 0.04579517990350723, step time: 17.24982261657715ms\r\n",,terminal_output +19093,13712375,"TERMINAL",0,0,"Step 4819, loss: 0.01972738467156887, step time: 17.19355583190918ms\r\n",,terminal_output +19094,13712450,"TERMINAL",0,0,"Step 4820, loss: 0.014786266721785069, step time: 17.470359802246094ms\r\n",,terminal_output +19095,13712517,"TERMINAL",0,0,"Step 4821, loss: 0.013958224095404148, step time: 17.344951629638672ms\r\n",,terminal_output +19096,13712629,"TERMINAL",0,0,"Step 4822, loss: 0.04449781775474548, step time: 17.07005500793457ms\r\nStep 4823, loss: 0.14782801270484924, step time: 17.443418502807617ms\r\n",,terminal_output +19097,13712755,"TERMINAL",0,0,"Step 4824, loss: 0.019405750557780266, step time: 17.42720603942871ms\r\nStep 4825, loss: 0.012747402302920818, step time: 17.348289489746094ms\r\n",,terminal_output +19098,13712812,"TERMINAL",0,0,"Step 4826, loss: 0.018119944259524345, step time: 17.31419563293457ms\r\n",,terminal_output +19099,13712884,"TERMINAL",0,0,"Step 4827, loss: 0.026831576600670815, step time: 17.27747917175293ms\r\n",,terminal_output +19100,13712945,"TERMINAL",0,0,"Step 4828, loss: 0.017547808587551117, step time: 17.21358299255371ms\r\n",,terminal_output +19101,13713026,"TERMINAL",0,0,"Step 4829, loss: 0.026968445628881454, step time: 17.58742332458496ms\r\n",,terminal_output +19102,13713083,"TERMINAL",0,0,"Step 4830, loss: 0.01794588379561901, step time: 17.11273193359375ms\r\n",,terminal_output +19103,13713134,"TERMINAL",0,0,"Step 4831, loss: 0.03504886105656624, step time: 17.376422882080078ms\r\n",,terminal_output +19104,13713233,"TERMINAL",0,0,"Step 4832, loss: 0.03467246890068054, step time: 17.440080642700195ms\r\nStep 4833, loss: 0.11704512685537338, step time: 17.756938934326172ms\r\n",,terminal_output +19105,13713320,"TERMINAL",0,0,"Step 4834, loss: 0.013721819967031479, step time: 17.23027229309082ms\r\n",,terminal_output +19106,13713396,"TERMINAL",0,0,"Step 4835, loss: 0.024122819304466248, step time: 17.458200454711914ms\r\n",,terminal_output +19107,13713442,"TERMINAL",0,0,"Step 4836, loss: 0.03693164885044098, step time: 17.21477508544922ms\r\n",,terminal_output +19108,13713494,"TERMINAL",0,0,"Step 4837, loss: 0.0442817322909832, step time: 17.284631729125977ms\r\n",,terminal_output +19109,13713600,"TERMINAL",0,0,"Step 4838, loss: 0.016907015815377235, step time: 17.61460304260254ms\r\n",,terminal_output +19110,13713680,"TERMINAL",0,0,"Step 4839, loss: 0.15942934155464172, step time: 17.342090606689453ms\r\nStep 4840, loss: 0.01897023245692253, step time: 17.218351364135742ms\r\n",,terminal_output +19111,13713742,"TERMINAL",0,0,"Step 4841, loss: 0.02612845040857792, step time: 17.411231994628906ms\r\n",,terminal_output +19112,13713806,"TERMINAL",0,0,"Step 4842, loss: 0.016955409198999405, step time: 17.278194427490234ms\r\n",,terminal_output +19113,13713866,"TERMINAL",0,0,"Step 4843, loss: 0.021819619461894035, step time: 17.26818084716797ms\r\n",,terminal_output +19114,13713931,"TERMINAL",0,0,"Step 4844, loss: 0.020933806896209717, step time: 17.507553100585938ms\r\n",,terminal_output +19115,13713996,"TERMINAL",0,0,"Step 4845, loss: 0.01783205196261406, step time: 17.282724380493164ms\r\n",,terminal_output +19116,13714063,"TERMINAL",0,0,"Step 4846, loss: 0.3069881796836853, step time: 17.23194122314453ms\r\n",,terminal_output +19117,13714141,"TERMINAL",0,0,"Step 4847, loss: 0.02742147445678711, step time: 17.861366271972656ms\r\n",,terminal_output +19118,13714199,"TERMINAL",0,0,"Step 4848, loss: 0.024674592539668083, step time: 17.309188842773438ms\r\n",,terminal_output +19119,13714273,"TERMINAL",0,0,"Step 4849, loss: 0.03351066634058952, step time: 17.14468002319336ms\r\n",,terminal_output +19120,13714335,"TERMINAL",0,0,"Step 4850, loss: 0.07593059539794922, step time: 17.573118209838867ms\r\n",,terminal_output +19121,13714411,"TERMINAL",0,0,"Step 4851, loss: 0.22863665223121643, step time: 17.24696159362793ms\r\n",,terminal_output +19122,13714472,"TERMINAL",0,0,"Step 4852, loss: 0.028010575100779533, step time: 17.36307144165039ms\r\n",,terminal_output +19123,13714535,"TERMINAL",0,0,"Step 4853, loss: 0.014333977364003658, step time: 17.53997802734375ms\r\n",,terminal_output +19124,13714600,"TERMINAL",0,0,"Step 4854, loss: 0.023771662265062332, step time: 17.007112503051758ms\r\n",,terminal_output +19125,13714662,"TERMINAL",0,0,"Step 4855, loss: 0.05591171234846115, step time: 17.25149154663086ms\r\n",,terminal_output +19126,13714768,"TERMINAL",0,0,"Step 4856, loss: 0.03716927021741867, step time: 17.398834228515625ms\r\nStep 4857, loss: 0.07706110179424286, step time: 17.200708389282227ms\r\n",,terminal_output +19127,13714831,"TERMINAL",0,0,"Step 4858, loss: 0.10137838870286942, step time: 17.128467559814453ms\r\n",,terminal_output +19128,13714893,"TERMINAL",0,0,"Step 4859, loss: 0.02450765110552311, step time: 17.370939254760742ms\r\n",,terminal_output +19129,13714956,"TERMINAL",0,0,"Step 4860, loss: 0.017587760463356972, step time: 17.407655715942383ms\r\n",,terminal_output +19130,13715021,"TERMINAL",0,0,"Step 4861, loss: 0.024154599756002426, step time: 17.194032669067383ms\r\n",,terminal_output +19131,13715101,"TERMINAL",0,0,"Step 4862, loss: 0.05166942626237869, step time: 17.586946487426758ms\r\n",,terminal_output +19132,13715149,"TERMINAL",0,0,"Step 4863, loss: 0.05290824547410011, step time: 17.37356185913086ms\r\n",,terminal_output +19133,13715216,"TERMINAL",0,0,"Step 4864, loss: 0.07551393657922745, step time: 17.400264739990234ms\r\n",,terminal_output +19134,13715291,"TERMINAL",0,0,"Step 4865, loss: 0.018589314073324203, step time: 17.54164695739746ms\r\n",,terminal_output +19135,13715341,"TERMINAL",0,0,"Step 4866, loss: 0.015424379147589207, step time: 17.22574234008789ms\r\n",,terminal_output +19136,13715404,"TERMINAL",0,0,"Step 4867, loss: 0.032421309500932693, step time: 17.243623733520508ms\r\n",,terminal_output +19137,13715686,"TERMINAL",0,0,"Step 4868, loss: 0.026201486587524414, step time: 291.5306091308594ms\r\n",,terminal_output +19138,13715791,"TERMINAL",0,0,"Step 4869, loss: 0.01881386898458004, step time: 25.29740333557129ms\r\n",,terminal_output +19139,13715842,"TERMINAL",0,0,"Step 4870, loss: 0.05780132859945297, step time: 19.240379333496094ms\r\n",,terminal_output +19140,13715946,"TERMINAL",0,0,"Step 4871, loss: 0.45811617374420166, step time: 18.35322380065918ms\r\nStep 4872, loss: 0.01798858679831028, step time: 17.415761947631836ms\r\n",,terminal_output +19141,13716040,"TERMINAL",0,0,"Step 4873, loss: 0.17491228878498077, step time: 17.542600631713867ms\r\n",,terminal_output +19142,13716091,"TERMINAL",0,0,"Step 4874, loss: 0.025338567793369293, step time: 17.699480056762695ms\r\n",,terminal_output +19143,13716207,"TERMINAL",0,0,"Step 4875, loss: 0.0721268579363823, step time: 17.431974411010742ms\r\nStep 4876, loss: 0.03817936033010483, step time: 17.264604568481445ms\r\n",,terminal_output +19144,13716263,"TERMINAL",0,0,"Step 4877, loss: 0.026089124381542206, step time: 17.519235610961914ms\r\n",,terminal_output +19145,13716325,"TERMINAL",0,0,"Step 4878, loss: 0.04683190584182739, step time: 17.447471618652344ms\r\n",,terminal_output +19146,13716387,"TERMINAL",0,0,"Step 4879, loss: 0.04143248498439789, step time: 17.354249954223633ms\r\n",,terminal_output +19147,13716451,"TERMINAL",0,0,"Step 4880, loss: 0.03764025866985321, step time: 17.656803131103516ms\r\n",,terminal_output +19148,13716515,"TERMINAL",0,0,"Step 4881, loss: 0.012028571218252182, step time: 17.266273498535156ms\r\n",,terminal_output +19149,13716577,"TERMINAL",0,0,"Step 4882, loss: 0.027421128004789352, step time: 17.299890518188477ms\r\n",,terminal_output +19150,13716643,"TERMINAL",0,0,"Step 4883, loss: 0.014883660711348057, step time: 17.89069175720215ms\r\n",,terminal_output +19151,13716711,"TERMINAL",0,0,"Step 4884, loss: 0.010421092621982098, step time: 17.210960388183594ms\r\n",,terminal_output +19152,13716770,"TERMINAL",0,0,"Step 4885, loss: 0.08262357860803604, step time: 17.287015914916992ms\r\n",,terminal_output +19153,13716839,"TERMINAL",0,0,"Step 4886, loss: 0.023143619298934937, step time: 17.711400985717773ms\r\n",,terminal_output +19154,13716902,"TERMINAL",0,0,"Step 4887, loss: 0.03369683772325516, step time: 17.34638214111328ms\r\n",,terminal_output +19155,13716968,"TERMINAL",0,0,"Step 4888, loss: 0.15907609462738037, step time: 17.06218719482422ms\r\n",,terminal_output +19156,13717032,"TERMINAL",0,0,"Step 4889, loss: 0.034425023943185806, step time: 17.865657806396484ms\r\n",,terminal_output +19157,13717092,"TERMINAL",0,0,"Step 4890, loss: 0.05646156519651413, step time: 17.22097396850586ms\r\n",,terminal_output +19158,13717156,"TERMINAL",0,0,"Step 4891, loss: 0.025499427691102028, step time: 17.345905303955078ms\r\n",,terminal_output +19159,13717215,"TERMINAL",0,0,"Step 4892, loss: 0.027152612805366516, step time: 17.581701278686523ms\r\n",,terminal_output +19160,13717277,"TERMINAL",0,0,"Step 4893, loss: 0.05245926231145859, step time: 17.2731876373291ms\r\n",,terminal_output +19161,13717341,"TERMINAL",0,0,"Step 4894, loss: 0.020354285836219788, step time: 17.10963249206543ms\r\n",,terminal_output +19162,13717406,"TERMINAL",0,0,"Step 4895, loss: 0.03551708534359932, step time: 29.484272003173828ms\r\n",,terminal_output +19163,13717512,"TERMINAL",0,0,"Step 4896, loss: 0.03038772940635681, step time: 21.152734756469727ms\r\n",,terminal_output +19164,13717625,"TERMINAL",0,0,"Step 4897, loss: 0.030033063143491745, step time: 27.272939682006836ms\r\nStep 4898, loss: 0.009091218933463097, step time: 18.174409866333008ms\r\n",,terminal_output +19165,13717732,"TERMINAL",0,0,"Step 4899, loss: 0.012622270733118057, step time: 17.531394958496094ms\r\nStep 4900, loss: 0.06809093058109283, step time: 17.36736297607422ms\r\n",,terminal_output +19166,13717827,"TERMINAL",0,0,"Step 4901, loss: 0.029838120564818382, step time: 17.951488494873047ms\r\n",,terminal_output +19167,13717936,"TERMINAL",0,0,"Step 4902, loss: 0.012053826823830605, step time: 17.15397834777832ms\r\nStep 4903, loss: 0.014601842500269413, step time: 17.272233963012695ms\r\n",,terminal_output +19168,13718007,"TERMINAL",0,0,"Step 4904, loss: 0.016062647104263306, step time: 17.705678939819336ms\r\n",,terminal_output +19169,13718065,"TERMINAL",0,0,"Step 4905, loss: 0.008230880834162235, step time: 17.218828201293945ms\r\n",,terminal_output +19170,13718135,"TERMINAL",0,0,"Step 4906, loss: 0.03914906084537506, step time: 17.169713973999023ms\r\n",,terminal_output +19171,13718230,"TERMINAL",0,0,"Step 4907, loss: 0.047293100506067276, step time: 17.56763458251953ms\r\nStep 4908, loss: 0.022067006677389145, step time: 16.94178581237793ms\r\n",,terminal_output +19172,13718325,"TERMINAL",0,0,"Step 4909, loss: 0.0916624516248703, step time: 17.327547073364258ms\r\n",,terminal_output +19173,13718386,"TERMINAL",0,0,"Step 4910, loss: 0.0230603888630867, step time: 17.547607421875ms\r\n",,terminal_output +19174,13718447,"TERMINAL",0,0,"Step 4911, loss: 0.018659472465515137, step time: 17.450809478759766ms\r\n",,terminal_output +19175,13718511,"TERMINAL",0,0,"Step 4912, loss: 0.013281709514558315, step time: 17.09771156311035ms\r\n",,terminal_output +19176,13718568,"TERMINAL",0,0,"Step 4913, loss: 0.013203117996454239, step time: 17.743349075317383ms\r\n",,terminal_output +19177,13718633,"TERMINAL",0,0,"Step 4914, loss: 0.03958656266331673, step time: 17.471790313720703ms\r\n",,terminal_output +19178,13718683,"TERMINAL",0,0,"Step 4915, loss: 0.015351192094385624, step time: 17.347335815429688ms\r\n",,terminal_output +19179,13718778,"TERMINAL",0,0,"Step 4916, loss: 0.04776659607887268, step time: 21.10147476196289ms\r\n",,terminal_output +19180,13718840,"TERMINAL",0,0,"Step 4917, loss: 0.05686773732304573, step time: 17.348527908325195ms\r\n",,terminal_output +19181,13718903,"TERMINAL",0,0,"Step 4918, loss: 0.017742682248353958, step time: 17.248153686523438ms\r\n",,terminal_output +19182,13718963,"TERMINAL",0,0,"Step 4919, loss: 0.07289701700210571, step time: 17.931222915649414ms\r\n",,terminal_output +19183,13719069,"TERMINAL",0,0,"Step 4920, loss: 0.045102477073669434, step time: 17.18449592590332ms\r\nStep 4921, loss: 0.024349825456738472, step time: 17.25029945373535ms\r\n",,terminal_output +19184,13719182,"TERMINAL",0,0,"Step 4922, loss: 0.010773763060569763, step time: 17.725706100463867ms\r\nStep 4923, loss: 0.09314003586769104, step time: 17.47584342956543ms\r\n",,terminal_output +19185,13719241,"TERMINAL",0,0,"Step 4924, loss: 0.006784906145185232, step time: 17.322301864624023ms\r\n",,terminal_output +19186,13719375,"TERMINAL",0,0,"Step 4925, loss: 0.012557662092149258, step time: 17.878293991088867ms\r\nStep 4926, loss: 0.012502366676926613, step time: 17.118215560913086ms\r\n",,terminal_output +19187,13719502,"TERMINAL",0,0,"Step 4927, loss: 0.03458268940448761, step time: 17.431259155273438ms\r\nStep 4928, loss: 0.06034914776682854, step time: 17.521142959594727ms\r\n",,terminal_output +19188,13719563,"TERMINAL",0,0,"Step 4929, loss: 0.009363198652863503, step time: 17.279386520385742ms\r\n",,terminal_output +19189,13719656,"TERMINAL",0,0,"Step 4930, loss: 0.01568761095404625, step time: 17.140626907348633ms\r\n",,terminal_output +19190,13719707,"TERMINAL",0,0,"Step 4931, loss: 0.020661266520619392, step time: 17.458200454711914ms\r\n",,terminal_output +19191,13719815,"TERMINAL",0,0,"Step 4932, loss: 0.026464644819498062, step time: 17.23623275756836ms\r\nStep 4933, loss: 0.02832135558128357, step time: 17.24529266357422ms\r\n",,terminal_output +19192,13719879,"TERMINAL",0,0,"Step 4934, loss: 0.015630697831511497, step time: 17.5015926361084ms\r\n",,terminal_output +19193,13719937,"TERMINAL",0,0,"Step 4935, loss: 0.0125731211155653, step time: 17.334938049316406ms\r\n",,terminal_output +19194,13720005,"TERMINAL",0,0,"Step 4936, loss: 0.07904144376516342, step time: 17.266511917114258ms\r\n",,terminal_output +19195,13720066,"TERMINAL",0,0,"Step 4937, loss: 0.04517186060547829, step time: 17.76742935180664ms\r\n",,terminal_output +19196,13720188,"TERMINAL",0,0,"Step 4938, loss: 0.012563200667500496, step time: 17.188072204589844ms\r\nStep 4939, loss: 0.03849826380610466, step time: 17.38595962524414ms\r\n",,terminal_output +19197,13720253,"TERMINAL",0,0,"Step 4940, loss: 0.017133520916104317, step time: 17.457962036132812ms\r\n",,terminal_output +19198,13720318,"TERMINAL",0,0,"Step 4941, loss: 0.03852930665016174, step time: 17.200708389282227ms\r\n",,terminal_output +19199,13720413,"TERMINAL",0,0,"Step 4942, loss: 0.018667645752429962, step time: 17.23003387451172ms\r\n",,terminal_output +19200,13720466,"TERMINAL",0,0,"Step 4943, loss: 0.05124862119555473, step time: 17.5168514251709ms\r\n",,terminal_output +19201,13720575,"TERMINAL",0,0,"Step 4944, loss: 0.10234338045120239, step time: 17.09580421447754ms\r\nStep 4945, loss: 0.01847854256629944, step time: 17.36927032470703ms\r\n",,terminal_output +19202,13720698,"TERMINAL",0,0,"Step 4946, loss: 0.04013970494270325, step time: 17.508745193481445ms\r\nStep 4947, loss: 0.03086160495877266, step time: 17.262935638427734ms\r\n",,terminal_output +19203,13720762,"TERMINAL",0,0,"Step 4948, loss: 0.024030715227127075, step time: 17.08507537841797ms\r\n",,terminal_output +19204,13720823,"TERMINAL",0,0,"Step 4949, loss: 0.019797377288341522, step time: 17.635107040405273ms\r\n",,terminal_output +19205,13720887,"TERMINAL",0,0,"Step 4950, loss: 0.00868980586528778, step time: 17.22097396850586ms\r\n",,terminal_output +19206,13720951,"TERMINAL",0,0,"Step 4951, loss: 0.024664828553795815, step time: 17.330169677734375ms\r\n",,terminal_output +19207,13721016,"TERMINAL",0,0,"Step 4952, loss: 0.020276764407753944, step time: 17.48824119567871ms\r\n",,terminal_output +19208,13721077,"TERMINAL",0,0,"Step 4953, loss: 0.007167883217334747, step time: 17.91095733642578ms\r\n",,terminal_output +19209,13721139,"TERMINAL",0,0,"Step 4954, loss: 0.03782843053340912, step time: 19.109249114990234ms\r\n",,terminal_output +19210,13721204,"TERMINAL",0,0,"Step 4955, loss: 0.03019074909389019, step time: 18.236160278320312ms\r\n",,terminal_output +19211,13721267,"TERMINAL",0,0,"Step 4956, loss: 0.039218202233314514, step time: 17.248868942260742ms\r\n",,terminal_output +19212,13721329,"TERMINAL",0,0,"Step 4957, loss: 0.022308846935629845, step time: 17.551660537719727ms\r\n",,terminal_output +19213,13721393,"TERMINAL",0,0,"Step 4958, loss: 0.04290838912129402, step time: 17.90761947631836ms\r\n",,terminal_output +19214,13721454,"TERMINAL",0,0,"Step 4959, loss: 0.00437437929213047, step time: 17.581701278686523ms\r\n",,terminal_output +19215,13721519,"TERMINAL",0,0,"Step 4960, loss: 0.10257747769355774, step time: 17.35377311706543ms\r\n",,terminal_output +19216,13721589,"TERMINAL",0,0,"Step 4961, loss: 0.028989143669605255, step time: 17.747163772583008ms\r\n",,terminal_output +19217,13721654,"TERMINAL",0,0,"Step 4962, loss: 0.042518001049757004, step time: 17.131567001342773ms\r\n",,terminal_output +19218,13721715,"TERMINAL",0,0,"Step 4963, loss: 0.0199444442987442, step time: 17.45772361755371ms\r\n",,terminal_output +19219,13721776,"TERMINAL",0,0,"Step 4964, loss: 0.041753076016902924, step time: 17.518997192382812ms\r\n",,terminal_output +19220,13721868,"TERMINAL",0,0,"Step 4965, loss: 0.014194688759744167, step time: 17.169475555419922ms\r\n",,terminal_output +19221,13721921,"TERMINAL",0,0,"Step 4966, loss: 0.012220301665365696, step time: 17.05765724182129ms\r\n",,terminal_output +19222,13722028,"TERMINAL",0,0,"Step 4967, loss: 0.01076488010585308, step time: 17.678260803222656ms\r\nStep 4968, loss: 0.022023309022188187, step time: 17.293453216552734ms\r\n",,terminal_output +19223,13722095,"TERMINAL",0,0,"Step 4969, loss: 0.019929692149162292, step time: 17.94290542602539ms\r\n",,terminal_output +19224,13722159,"TERMINAL",0,0,"Step 4970, loss: 0.04432964324951172, step time: 17.58861541748047ms\r\n",,terminal_output +19225,13722227,"TERMINAL",0,0,"Step 4971, loss: 0.011205174960196018, step time: 17.277956008911133ms\r\n",,terminal_output +19226,13722292,"TERMINAL",0,0,"Step 4972, loss: 0.03725292906165123, step time: 17.24720001220703ms\r\n",,terminal_output +19227,13722350,"TERMINAL",0,0,"Step 4973, loss: 0.022992897778749466, step time: 17.670869827270508ms\r\n",,terminal_output +19228,13722413,"TERMINAL",0,0,"Step 4974, loss: 0.01446596346795559, step time: 17.15707778930664ms\r\n",,terminal_output +19229,13722475,"TERMINAL",0,0,"Step 4975, loss: 0.01139292772859335, step time: 17.25006103515625ms\r\n",,terminal_output +19230,13722543,"TERMINAL",0,0,"Step 4976, loss: 0.11798083037137985, step time: 17.480134963989258ms\r\n",,terminal_output +19231,13722607,"TERMINAL",0,0,"Step 4977, loss: 0.026773512363433838, step time: 17.15683937072754ms\r\n",,terminal_output +19232,13722671,"TERMINAL",0,0,"Step 4978, loss: 0.008753935806453228, step time: 17.043113708496094ms\r\n",,terminal_output +19233,13722735,"TERMINAL",0,0,"Step 4979, loss: 0.025972671806812286, step time: 17.45748519897461ms\r\n",,terminal_output +19234,13722799,"TERMINAL",0,0,"Step 4980, loss: 0.014868441969156265, step time: 17.163753509521484ms\r\n",,terminal_output +19235,13722864,"TERMINAL",0,0,"Step 4981, loss: 0.011767719872295856, step time: 17.427444458007812ms\r\n",,terminal_output +19236,13722926,"TERMINAL",0,0,"Step 4982, loss: 0.016865847632288933, step time: 17.62557029724121ms\r\n",,terminal_output +19237,13722990,"TERMINAL",0,0,"Step 4983, loss: 0.015104218386113644, step time: 17.361879348754883ms\r\n",,terminal_output +19238,13723052,"TERMINAL",0,0,"Step 4984, loss: 0.012100364081561565, step time: 17.064332962036133ms\r\n",,terminal_output +19239,13723119,"TERMINAL",0,0,"Step 4985, loss: 0.014297104440629482, step time: 17.395973205566406ms\r\n",,terminal_output +19240,13723194,"TERMINAL",0,0,"Step 4986, loss: 0.08820021897554398, step time: 17.193317413330078ms\r\n",,terminal_output +19241,13723232,"TERMINAL",0,0,"Step 4987, loss: 0.00736694410443306, step time: 17.20881462097168ms\r\n",,terminal_output +19242,13723297,"TERMINAL",0,0,"Step 4988, loss: 0.01581256464123726, step time: 17.454147338867188ms\r\n",,terminal_output +19243,13723373,"TERMINAL",0,0,"Step 4989, loss: 0.034146662801504135, step time: 17.351150512695312ms\r\n",,terminal_output +19244,13723425,"TERMINAL",0,0,"Step 4990, loss: 0.013365688733756542, step time: 17.333030700683594ms\r\n",,terminal_output +19245,13723492,"TERMINAL",0,0,"Step 4991, loss: 0.016149116680026054, step time: 17.59815216064453ms\r\n",,terminal_output +19246,13723553,"TERMINAL",0,0,"Step 4992, loss: 0.05948382988572121, step time: 17.731189727783203ms\r\n",,terminal_output +19247,13723670,"TERMINAL",0,0,"Step 4993, loss: 0.05770909786224365, step time: 17.775535583496094ms\r\n",,terminal_output +19248,13723679,"TERMINAL",0,0,"Step 4994, loss: 0.020783323794603348, step time: 17.78435707092285ms\r\n",,terminal_output +19249,13723735,"TERMINAL",0,0,"Step 4995, loss: 0.01680692844092846, step time: 17.48490333557129ms\r\n",,terminal_output +19250,13723800,"TERMINAL",0,0,"Step 4996, loss: 0.11025453358888626, step time: 17.25006103515625ms\r\n",,terminal_output +19251,13723925,"TERMINAL",0,0,"Step 4997, loss: 0.028463270515203476, step time: 17.62533187866211ms\r\nStep 4998, loss: 0.11072088778018951, step time: 16.97230339050293ms\r\n",,terminal_output +19252,13723987,"TERMINAL",0,0,"Step 4999, loss: 0.017842182889580727, step time: 17.374753952026367ms\r\n",,terminal_output +19253,13726865,"TERMINAL",0,0,"Step 5000, loss: 0.17330150306224823, step time: 34.09838676452637ms\r\nStep 5001, loss: 0.007777343038469553, step time: 26.760578155517578ms\r\n",,terminal_output +19254,13726935,"TERMINAL",0,0,"Step 5002, loss: 0.0127739692106843, step time: 19.736766815185547ms\r\n",,terminal_output +19255,13726992,"TERMINAL",0,0,"Step 5003, loss: 0.012031378224492073, step time: 19.79970932006836ms\r\n",,terminal_output +19256,13727056,"TERMINAL",0,0,"Step 5004, loss: 0.07535131275653839, step time: 18.13960075378418ms\r\n",,terminal_output +19257,13727119,"TERMINAL",0,0,"Step 5005, loss: 0.02445358596742153, step time: 18.196582794189453ms\r\n",,terminal_output +19258,13727194,"TERMINAL",0,0,"Step 5006, loss: 0.026020728051662445, step time: 18.30577850341797ms\r\n",,terminal_output +19259,13727246,"TERMINAL",0,0,"Step 5007, loss: 0.025545431300997734, step time: 19.066810607910156ms\r\n",,terminal_output +19260,13727312,"TERMINAL",0,0,"Step 5008, loss: 0.023736605420708656, step time: 17.986297607421875ms\r\n",,terminal_output +19261,13727415,"TERMINAL",0,0,"Step 5009, loss: 0.042882490903139114, step time: 18.6312198638916ms\r\n",,terminal_output +19262,13727467,"TERMINAL",0,0,"Step 5010, loss: 0.09383391588926315, step time: 17.87590980529785ms\r\n",,terminal_output +19263,13727571,"TERMINAL",0,0,"Step 5011, loss: 0.013921452686190605, step time: 18.159151077270508ms\r\nStep 5012, loss: 0.02924274653196335, step time: 18.205881118774414ms\r\n",,terminal_output +19264,13727689,"TERMINAL",0,0,"Step 5013, loss: 0.0218832865357399, step time: 17.91667938232422ms\r\nStep 5014, loss: 0.048048969358205795, step time: 17.767667770385742ms\r\n",,terminal_output +19265,13727786,"TERMINAL",0,0,"Step 5015, loss: 0.02265859581530094, step time: 18.66912841796875ms\r\n",,terminal_output +19266,13727836,"TERMINAL",0,0,"Step 5016, loss: 0.01227337121963501, step time: 17.832517623901367ms\r\n",,terminal_output +19267,13727928,"TERMINAL",0,0,"Step 5017, loss: 0.017844058573246002, step time: 17.824411392211914ms\r\n",,terminal_output +19268,13727981,"TERMINAL",0,0,"Step 5018, loss: 0.043992891907691956, step time: 18.182992935180664ms\r\n",,terminal_output +19269,13728035,"TERMINAL",0,0,"Step 5019, loss: 0.015130188316106796, step time: 17.933368682861328ms\r\n",,terminal_output +19270,13728101,"TERMINAL",0,0,"Step 5020, loss: 0.02196982130408287, step time: 17.800092697143555ms\r\n",,terminal_output +19271,13728198,"TERMINAL",0,0,"Step 5021, loss: 0.0222727470099926, step time: 18.54228973388672ms\r\nStep 5022, loss: 0.02341858111321926, step time: 17.670154571533203ms\r\n",,terminal_output +19272,13728293,"TERMINAL",0,0,"Step 5023, loss: 0.016628576442599297, step time: 17.82822608947754ms\r\n",,terminal_output +19273,13728361,"TERMINAL",0,0,"Step 5024, loss: 0.01346933376044035, step time: 18.120765686035156ms\r\n",,terminal_output +19274,13728467,"TERMINAL",0,0,"Step 5025, loss: 0.011330676265060902, step time: 18.05853843688965ms\r\nStep 5026, loss: 0.020061112940311432, step time: 17.702102661132812ms\r\n",,terminal_output +19275,13728527,"TERMINAL",0,0,"Step 5027, loss: 0.023943431675434113, step time: 18.451690673828125ms\r\n",,terminal_output +19276,13728596,"TERMINAL",0,0,"Step 5028, loss: 0.01341933198273182, step time: 18.02992820739746ms\r\n",,terminal_output +19277,13728649,"TERMINAL",0,0,"Step 5029, loss: 0.009484464302659035, step time: 18.40066909790039ms\r\n",,terminal_output +19278,13728744,"TERMINAL",0,0,"Step 5030, loss: 0.05064363405108452, step time: 18.344640731811523ms\r\n",,terminal_output +19279,13728807,"TERMINAL",0,0,"Step 5031, loss: 0.010613336227834225, step time: 18.13054084777832ms\r\n",,terminal_output +19280,13728868,"TERMINAL",0,0,"Step 5032, loss: 0.07297520339488983, step time: 17.803668975830078ms\r\n",,terminal_output +19281,13728936,"TERMINAL",0,0,"Step 5033, loss: 0.24583746492862701, step time: 19.240856170654297ms\r\n",,terminal_output +19282,13729046,"TERMINAL",0,0,"Step 5034, loss: 0.009743784554302692, step time: 17.69256591796875ms\r\nStep 5035, loss: 0.03246741369366646, step time: 17.945051193237305ms\r\n",,terminal_output +19283,13729106,"TERMINAL",0,0,"Step 5036, loss: 0.03190695121884346, step time: 18.426179885864258ms\r\n",,terminal_output +19284,13729170,"TERMINAL",0,0,"Step 5037, loss: 0.019417908042669296, step time: 18.137454986572266ms\r\n",,terminal_output +19285,13729232,"TERMINAL",0,0,"Step 5038, loss: 0.04349455237388611, step time: 17.76909828186035ms\r\n",,terminal_output +19286,13729292,"TERMINAL",0,0,"Step 5039, loss: 0.12810395658016205, step time: 18.560409545898438ms\r\n",,terminal_output +19287,13729355,"TERMINAL",0,0,"Step 5040, loss: 0.01999729499220848, step time: 17.879009246826172ms\r\n",,terminal_output +19288,13729418,"TERMINAL",0,0,"Step 5041, loss: 0.023553919047117233, step time: 18.007516860961914ms\r\n",,terminal_output +19289,13729530,"TERMINAL",0,0,"Step 5042, loss: 0.04146023094654083, step time: 18.14723014831543ms\r\nStep 5043, loss: 0.06798727810382843, step time: 18.056154251098633ms\r\n",,terminal_output +19290,13729599,"TERMINAL",0,0,"Step 5044, loss: 0.019836973398923874, step time: 17.966032028198242ms\r\n",,terminal_output +19291,13729662,"TERMINAL",0,0,"Step 5045, loss: 0.025128666311502457, step time: 18.594026565551758ms\r\n",,terminal_output +19292,13729725,"TERMINAL",0,0,"Step 5046, loss: 0.03911333531141281, step time: 17.889022827148438ms\r\n",,terminal_output +19293,13729815,"TERMINAL",0,0,"Step 5047, loss: 0.011684478260576725, step time: 18.07570457458496ms\r\n",,terminal_output +19294,13729868,"TERMINAL",0,0,"Step 5048, loss: 0.01703440025448799, step time: 18.196582794189453ms\r\n",,terminal_output +19295,13729933,"TERMINAL",0,0,"Step 5049, loss: 0.03249392658472061, step time: 18.068552017211914ms\r\n",,terminal_output +19296,13729996,"TERMINAL",0,0,"Step 5050, loss: 0.023260829970240593, step time: 17.641305923461914ms\r\n",,terminal_output +19297,13730058,"TERMINAL",0,0,"Step 5051, loss: 0.01195209939032793, step time: 18.482446670532227ms\r\n",,terminal_output +19298,13730334,"TERMINAL",0,0,"Step 5052, loss: 0.2979929745197296, step time: 289.73889350891113ms\r\n",,terminal_output +19299,13730400,"TERMINAL",0,0,"Step 5053, loss: 0.02345210500061512, step time: 26.064634323120117ms\r\n",,terminal_output +19300,13730509,"TERMINAL",0,0,"Step 5054, loss: 0.014112630859017372, step time: 20.248889923095703ms\r\n",,terminal_output +19301,13730616,"TERMINAL",0,0,"Step 5055, loss: 0.024365315213799477, step time: 19.240617752075195ms\r\nStep 5056, loss: 0.2531431317329407, step time: 18.240690231323242ms\r\n",,terminal_output +19302,13730723,"TERMINAL",0,0,"Step 5057, loss: 0.04296828433871269, step time: 18.11981201171875ms\r\nStep 5058, loss: 0.019117413088679314, step time: 18.16391944885254ms\r\n",,terminal_output +19303,13730789,"TERMINAL",0,0,"Step 5059, loss: 0.03271098434925079, step time: 17.884492874145508ms\r\n",,terminal_output +19304,13730852,"TERMINAL",0,0,"Step 5060, loss: 0.011607772670686245, step time: 18.01013946533203ms\r\n",,terminal_output +19305,13730947,"TERMINAL",0,0,"Step 5061, loss: 0.051729705184698105, step time: 18.218994140625ms\r\n",,terminal_output +19306,13730999,"TERMINAL",0,0,"Step 5062, loss: 0.03212013840675354, step time: 17.972946166992188ms\r\n",,terminal_output +19307,13731092,"TERMINAL",0,0,"Step 5063, loss: 0.03363073617219925, step time: 17.977476119995117ms\r\n",,terminal_output +19308,13731145,"TERMINAL",0,0,"Step 5064, loss: 0.014330415986478329, step time: 18.390178680419922ms\r\n",,terminal_output +19309,13731197,"TERMINAL",0,0,"Step 5065, loss: 0.011931711807847023, step time: 17.911434173583984ms\r\n",,terminal_output +19310,13731390,"TERMINAL",0,0,"Step 5066, loss: 0.07353320717811584, step time: 17.928123474121094ms\r\nStep 5067, loss: 0.010653280653059483, step time: 23.406267166137695ms\r\n",,terminal_output +19311,13731443,"TERMINAL",0,0,"Step 5068, loss: 0.02093968540430069, step time: 20.849227905273438ms\r\nStep 5069, loss: 0.015955990180373192, step time: 19.220352172851562ms\r\n",,terminal_output +19312,13731559,"TERMINAL",0,0,"Step 5070, loss: 0.01291000097990036, step time: 18.655776977539062ms\r\nStep 5071, loss: 0.018292035907506943, step time: 18.29218864440918ms\r\n",,terminal_output +19313,13731621,"TERMINAL",0,0,"Step 5072, loss: 0.043687909841537476, step time: 18.15485954284668ms\r\n",,terminal_output +19314,13731687,"TERMINAL",0,0,"Step 5073, loss: 0.04770456627011299, step time: 18.501758575439453ms\r\n",,terminal_output +19315,13731759,"TERMINAL",0,0,"Step 5074, loss: 0.029420025646686554, step time: 18.065452575683594ms\r\n",,terminal_output +19316,13731815,"TERMINAL",0,0,"Step 5075, loss: 0.035241659730672836, step time: 19.50669288635254ms\r\n",,terminal_output +19317,13731878,"TERMINAL",0,0,"Step 5076, loss: 0.016797620803117752, step time: 18.384456634521484ms\r\n",,terminal_output +19318,13731973,"TERMINAL",0,0,"Step 5077, loss: 0.12975460290908813, step time: 18.126487731933594ms\r\n",,terminal_output +19319,13732026,"TERMINAL",0,0,"Step 5078, loss: 0.012797481380403042, step time: 17.96436309814453ms\r\n",,terminal_output +19320,13732131,"TERMINAL",0,0,"Step 5079, loss: 0.01847049407660961, step time: 18.27263832092285ms\r\nStep 5080, loss: 0.011472644284367561, step time: 18.400192260742188ms\r\n",,terminal_output +19321,13732259,"TERMINAL",0,0,"Step 5081, loss: 0.013416117988526821, step time: 18.09549331665039ms\r\nStep 5082, loss: 0.02205868437886238, step time: 18.252849578857422ms\r\n",,terminal_output +19322,13732327,"TERMINAL",0,0,"Step 5083, loss: 0.020175542682409286, step time: 18.03874969482422ms\r\n",,terminal_output +19323,13732388,"TERMINAL",0,0,"Step 5084, loss: 0.03464549034833908, step time: 18.048763275146484ms\r\n",,terminal_output +19324,13732455,"TERMINAL",0,0,"Step 5085, loss: 0.07528547942638397, step time: 18.25118064880371ms\r\n",,terminal_output +19325,13732514,"TERMINAL",0,0,"Step 5086, loss: 0.018801644444465637, step time: 17.98534393310547ms\r\n",,terminal_output +19326,13732577,"TERMINAL",0,0,"Step 5087, loss: 0.0058975061401724815, step time: 18.080472946166992ms\r\n",,terminal_output +19327,13732640,"TERMINAL",0,0,"Step 5088, loss: 0.012657552026212215, step time: 18.37778091430664ms\r\n",,terminal_output +19328,13732705,"TERMINAL",0,0,"Step 5089, loss: 0.01509940717369318, step time: 18.600940704345703ms\r\n",,terminal_output +19329,13732768,"TERMINAL",0,0,"Step 5090, loss: 0.03684161975979805, step time: 18.1882381439209ms\r\n",,terminal_output +19330,13732831,"TERMINAL",0,0,"Step 5091, loss: 0.0740358754992485, step time: 18.331527709960938ms\r\n",,terminal_output +19331,13732893,"TERMINAL",0,0,"Step 5092, loss: 0.009766141884028912, step time: 18.068790435791016ms\r\n",,terminal_output +19332,13732957,"TERMINAL",0,0,"Step 5093, loss: 0.013173318468034267, step time: 18.038034439086914ms\r\n",,terminal_output +19333,13733020,"TERMINAL",0,0,"Step 5094, loss: 0.014205081388354301, step time: 18.291711807250977ms\r\n",,terminal_output +19334,13733083,"TERMINAL",0,0,"Step 5095, loss: 0.14351098239421844, step time: 17.820358276367188ms\r\n",,terminal_output +19335,13733155,"TERMINAL",0,0,"Step 5096, loss: 0.1734871119260788, step time: 18.006324768066406ms\r\n",,terminal_output +19336,13733207,"TERMINAL",0,0,"Step 5097, loss: 0.015505324117839336, step time: 18.192291259765625ms\r\n",,terminal_output +19337,13733302,"TERMINAL",0,0,"Step 5098, loss: 0.011248835362493992, step time: 18.01300048828125ms\r\n",,terminal_output +19338,13733368,"TERMINAL",0,0,"Step 5099, loss: 0.16287751495838165, step time: 18.012285232543945ms\r\n",,terminal_output +19339,13733428,"TERMINAL",0,0,"Step 5100, loss: 0.014645320363342762, step time: 18.194198608398438ms\r\n",,terminal_output +19340,13733540,"TERMINAL",0,0,"Step 5101, loss: 0.3051011264324188, step time: 18.043041229248047ms\r\nStep 5102, loss: 0.01075430866330862, step time: 18.01586151123047ms\r\n",,terminal_output +19341,13733646,"TERMINAL",0,0,"Step 5103, loss: 0.009815080091357231, step time: 19.446372985839844ms\r\n",,terminal_output +19342,13733659,"TERMINAL",0,0,"Step 5104, loss: 0.06623981893062592, step time: 18.345355987548828ms\r\n",,terminal_output +19343,13733757,"TERMINAL",0,0,"Step 5105, loss: 0.022213920950889587, step time: 18.031835556030273ms\r\n",,terminal_output +19344,13733819,"TERMINAL",0,0,"Step 5106, loss: 0.1285407394170761, step time: 18.35489273071289ms\r\n",,terminal_output +19345,13733883,"TERMINAL",0,0,"Step 5107, loss: 0.014742512255907059, step time: 18.070459365844727ms\r\n",,terminal_output +19346,13733946,"TERMINAL",0,0,"Step 5108, loss: 0.030433379113674164, step time: 18.042802810668945ms\r\n",,terminal_output +19347,13734007,"TERMINAL",0,0,"Step 5109, loss: 0.010118755511939526, step time: 18.273353576660156ms\r\n",,terminal_output +19348,13734069,"TERMINAL",0,0,"Step 5110, loss: 0.04958736523985863, step time: 18.02992820739746ms\r\n",,terminal_output +19349,13734130,"TERMINAL",0,0,"Step 5111, loss: 0.013654563575983047, step time: 18.165111541748047ms\r\n",,terminal_output +19350,13734237,"TERMINAL",0,0,"Step 5112, loss: 0.008754156529903412, step time: 18.19324493408203ms\r\nStep 5113, loss: 0.026334194466471672, step time: 18.037080764770508ms\r\n",,terminal_output +19351,13734302,"TERMINAL",0,0,"Step 5114, loss: 0.05709864944219589, step time: 17.97175407409668ms\r\n",,terminal_output +19352,13734361,"TERMINAL",0,0,"Step 5115, loss: 0.020975010469555855, step time: 18.093585968017578ms\r\n",,terminal_output +19353,13734425,"TERMINAL",0,0,"Step 5116, loss: 0.019382182508707047, step time: 17.937421798706055ms\r\n",,terminal_output +19354,13734487,"TERMINAL",0,0,"Step 5117, loss: 0.04939350113272667, step time: 17.928123474121094ms\r\n",,terminal_output +19355,13734550,"TERMINAL",0,0,"Step 5118, loss: 0.030684873461723328, step time: 18.420696258544922ms\r\n",,terminal_output +19356,13734612,"TERMINAL",0,0,"Step 5119, loss: 0.03252409026026726, step time: 18.021583557128906ms\r\n",,terminal_output +19357,13734677,"TERMINAL",0,0,"Step 5120, loss: 0.028185220435261726, step time: 18.04184913635254ms\r\n",,terminal_output +19358,13734738,"TERMINAL",0,0,"Step 5121, loss: 0.018450379371643066, step time: 18.33653450012207ms\r\n",,terminal_output +19359,13734836,"TERMINAL",0,0,"Step 5122, loss: 0.025697721168398857, step time: 18.065452575683594ms\r\n",,terminal_output +19360,13734888,"TERMINAL",0,0,"Step 5123, loss: 0.22171151638031006, step time: 17.87853240966797ms\r\n",,terminal_output +19361,13734994,"TERMINAL",0,0,"Step 5124, loss: 0.023223988711833954, step time: 18.404483795166016ms\r\nStep 5125, loss: 0.017771873623132706, step time: 17.818689346313477ms\r\n",,terminal_output +19362,13735055,"TERMINAL",0,0,"Step 5126, loss: 0.016633624210953712, step time: 18.02515983581543ms\r\n",,terminal_output +19363,13735118,"TERMINAL",0,0,"Step 5127, loss: 0.037012334913015366, step time: 18.088102340698242ms\r\n",,terminal_output +19364,13735181,"TERMINAL",0,0,"Step 5128, loss: 0.041114699095487595, step time: 18.19586753845215ms\r\n",,terminal_output +19365,13735233,"TERMINAL",0,0,"Step 5129, loss: 0.02475428581237793, step time: 17.872095108032227ms\r\n",,terminal_output +19366,13735365,"TERMINAL",0,0,"Step 5130, loss: 0.018357552587985992, step time: 18.352746963500977ms\r\nStep 5131, loss: 0.012408431619405746, step time: 17.886877059936523ms\r\n",,terminal_output +19367,13735428,"TERMINAL",0,0,"Step 5132, loss: 0.01242532767355442, step time: 19.763469696044922ms\r\n",,terminal_output +19368,13735492,"TERMINAL",0,0,"Step 5133, loss: 0.020843572914600372, step time: 18.259286880493164ms\r\n",,terminal_output +19369,13735587,"TERMINAL",0,0,"Step 5134, loss: 0.020988894626498222, step time: 18.027305603027344ms\r\n",,terminal_output +19370,13735639,"TERMINAL",0,0,"Step 5135, loss: 0.01413227804005146, step time: 18.005847930908203ms\r\n",,terminal_output +19371,13735745,"TERMINAL",0,0,"Step 5136, loss: 0.07148472219705582, step time: 18.07856559753418ms\r\nStep 5137, loss: 0.05270678922533989, step time: 18.085479736328125ms\r\n",,terminal_output +19372,13735840,"TERMINAL",0,0,"Step 5138, loss: 0.012036861851811409, step time: 17.879486083984375ms\r\n",,terminal_output +19373,13735896,"TERMINAL",0,0,"Step 5139, loss: 0.011394145898520947, step time: 18.300771713256836ms\r\n",,terminal_output +19374,13736001,"TERMINAL",0,0,"Step 5140, loss: 0.029335815459489822, step time: 17.993450164794922ms\r\nStep 5141, loss: 0.040943846106529236, step time: 18.174171447753906ms\r\n",,terminal_output +19375,13736065,"TERMINAL",0,0,"Step 5142, loss: 0.06993645429611206, step time: 18.074750900268555ms\r\n",,terminal_output +19376,13736132,"TERMINAL",0,0,"Step 5143, loss: 0.013982564210891724, step time: 18.00847053527832ms\r\n",,terminal_output +19377,13736192,"TERMINAL",0,0,"Step 5144, loss: 0.002636091550812125, step time: 17.77052879333496ms\r\n",,terminal_output +19378,13736256,"TERMINAL",0,0,"Step 5145, loss: 0.02644926868379116, step time: 18.12005043029785ms\r\n",,terminal_output +19379,13736346,"TERMINAL",0,0,"Step 5146, loss: 0.017215458676218987, step time: 17.876148223876953ms\r\n",,terminal_output +19380,13736400,"TERMINAL",0,0,"Step 5147, loss: 0.017976833507418633, step time: 17.7462100982666ms\r\n",,terminal_output +19381,13736508,"TERMINAL",0,0,"Step 5148, loss: 0.01467815786600113, step time: 18.243789672851562ms\r\nStep 5149, loss: 0.0184103325009346, step time: 17.5933837890625ms\r\n",,terminal_output +19382,13736602,"TERMINAL",0,0,"Step 5150, loss: 0.0104052210226655, step time: 17.82679557800293ms\r\n",,terminal_output +19383,13736708,"TERMINAL",0,0,"Step 5151, loss: 0.009364771656692028, step time: 17.88496971130371ms\r\nStep 5152, loss: 0.01676999218761921, step time: 17.84515380859375ms\r\n",,terminal_output +19384,13736773,"TERMINAL",0,0,"Step 5153, loss: 0.03826966881752014, step time: 17.589807510375977ms\r\n",,terminal_output +19385,13736836,"TERMINAL",0,0,"Step 5154, loss: 0.3702954947948456, step time: 18.229246139526367ms\r\n",,terminal_output +19386,13736900,"TERMINAL",0,0,"Step 5155, loss: 0.014978704042732716, step time: 17.546415328979492ms\r\n",,terminal_output +19387,13736961,"TERMINAL",0,0,"Step 5156, loss: 0.03589587286114693, step time: 17.953157424926758ms\r\n",,terminal_output +19388,13737026,"TERMINAL",0,0,"Step 5157, loss: 0.021488824859261513, step time: 18.30601692199707ms\r\n",,terminal_output +19389,13737089,"TERMINAL",0,0,"Step 5158, loss: 0.05885899066925049, step time: 18.128156661987305ms\r\n",,terminal_output +19390,13737149,"TERMINAL",0,0,"Step 5159, loss: 0.0647876113653183, step time: 17.879962921142578ms\r\n",,terminal_output +19391,13737202,"TERMINAL",0,0,"Step 5160, loss: 0.023414211347699165, step time: 18.283367156982422ms\r\n",,terminal_output +19392,13737296,"TERMINAL",0,0,"Step 5161, loss: 0.022017154842615128, step time: 17.8377628326416ms\r\n",,terminal_output +19393,13737400,"TERMINAL",0,0,"Step 5162, loss: 0.044707074761390686, step time: 17.872333526611328ms\r\nStep 5163, loss: 0.3982422649860382, step time: 18.058061599731445ms\r\n",,terminal_output +19394,13737465,"TERMINAL",0,0,"Step 5164, loss: 0.011152728460729122, step time: 17.84348487854004ms\r\n",,terminal_output +19395,13737529,"TERMINAL",0,0,"Step 5165, loss: 0.017471928149461746, step time: 17.636537551879883ms\r\n",,terminal_output +19396,13737641,"TERMINAL",0,0,"Step 5166, loss: 0.0440986342728138, step time: 18.247127532958984ms\r\nStep 5167, loss: 0.03355227783322334, step time: 18.01013946533203ms\r\n",,terminal_output +19397,13737706,"TERMINAL",0,0,"Step 5168, loss: 0.05282852426171303, step time: 17.824172973632812ms\r\n",,terminal_output +19398,13737800,"TERMINAL",0,0,"Step 5169, loss: 0.062128931283950806, step time: 18.179655075073242ms\r\n",,terminal_output +19399,13737853,"TERMINAL",0,0,"Step 5170, loss: 0.01904517039656639, step time: 17.899036407470703ms\r\n",,terminal_output +19400,13737951,"TERMINAL",0,0,"Step 5171, loss: 0.2306627482175827, step time: 17.636775970458984ms\r\n",,terminal_output +19401,13738021,"TERMINAL",0,0,"Step 5172, loss: 0.014497206546366215, step time: 18.09382438659668ms\r\nStep 5173, loss: 0.016015972942113876, step time: 17.71998405456543ms\r\n",,terminal_output +19402,13738128,"TERMINAL",0,0,"Step 5174, loss: 0.01683075726032257, step time: 17.772674560546875ms\r\n",,terminal_output +19403,13738216,"TERMINAL",0,0,"Step 5175, loss: 0.03530731424689293, step time: 17.94290542602539ms\r\nStep 5176, loss: 0.03392854705452919, step time: 18.099069595336914ms\r\n",,terminal_output +19404,13738312,"TERMINAL",0,0,"Step 5177, loss: 0.04164551943540573, step time: 17.5321102142334ms\r\n",,terminal_output +19405,13738424,"TERMINAL",0,0,"Step 5178, loss: 0.018109820783138275, step time: 18.25547218322754ms\r\nStep 5179, loss: 0.15618374943733215, step time: 17.691850662231445ms\r\n",,terminal_output +19406,13738487,"TERMINAL",0,0,"Step 5180, loss: 0.03401503339409828, step time: 17.93646812438965ms\r\n",,terminal_output +19407,13738551,"TERMINAL",0,0,"Step 5181, loss: 0.032283853739500046, step time: 17.836809158325195ms\r\n",,terminal_output +19408,13738634,"TERMINAL",0,0,"Step 5182, loss: 0.012270411476492882, step time: 17.957687377929688ms\r\n",,terminal_output +19409,13738687,"TERMINAL",0,0,"Step 5183, loss: 0.01743411272764206, step time: 17.747879028320312ms\r\n",,terminal_output +19410,13738791,"TERMINAL",0,0,"Step 5184, loss: 0.07099767029285431, step time: 18.143415451049805ms\r\nStep 5185, loss: 0.04388427734375, step time: 17.653226852416992ms\r\n",,terminal_output +19411,13738856,"TERMINAL",0,0,"Step 5186, loss: 0.13145044445991516, step time: 17.88020133972168ms\r\n",,terminal_output +19412,13738918,"TERMINAL",0,0,"Step 5187, loss: 0.013050459325313568, step time: 17.798185348510742ms\r\n",,terminal_output +19413,13738978,"TERMINAL",0,0,"Step 5188, loss: 0.01379175391048193, step time: 18.051862716674805ms\r\n",,terminal_output +19414,13739042,"TERMINAL",0,0,"Step 5189, loss: 0.016318874433636665, step time: 17.65275001525879ms\r\n",,terminal_output +19415,13739105,"TERMINAL",0,0,"Step 5190, loss: 0.15976421535015106, step time: 19.24586296081543ms\r\n",,terminal_output +19416,13739172,"TERMINAL",0,0,"Step 5191, loss: 0.055140022188425064, step time: 17.671823501586914ms\r\n",,terminal_output +19417,13739223,"TERMINAL",0,0,"Step 5192, loss: 0.03146800771355629, step time: 17.988204956054688ms\r\n",,terminal_output +19418,13739317,"TERMINAL",0,0,"Step 5193, loss: 0.024279747158288956, step time: 17.7762508392334ms\r\n",,terminal_output +19419,13739375,"TERMINAL",0,0,"Step 5194, loss: 0.013073572888970375, step time: 17.92001724243164ms\r\n",,terminal_output +19420,13739450,"TERMINAL",0,0,"Step 5195, loss: 0.023028653115034103, step time: 17.63772964477539ms\r\n",,terminal_output +19421,13739513,"TERMINAL",0,0,"Step 5196, loss: 0.011361886747181416, step time: 18.121719360351562ms\r\n",,terminal_output +19422,13739571,"TERMINAL",0,0,"Step 5197, loss: 0.038816727697849274, step time: 17.791271209716797ms\r\n",,terminal_output +19423,13739631,"TERMINAL",0,0,"Step 5198, loss: 0.03422614187002182, step time: 17.813444137573242ms\r\n",,terminal_output +19424,13739692,"TERMINAL",0,0,"Step 5199, loss: 0.021087951958179474, step time: 17.79007911682129ms\r\n",,terminal_output +19425,13739755,"TERMINAL",0,0,"Step 5200, loss: 0.04928404092788696, step time: 17.882108688354492ms\r\n",,terminal_output +19426,13739818,"TERMINAL",0,0,"Step 5201, loss: 0.01806257851421833, step time: 17.493724822998047ms\r\n",,terminal_output +19427,13739880,"TERMINAL",0,0,"Step 5202, loss: 0.011230085045099258, step time: 18.088340759277344ms\r\n",,terminal_output +19428,13739944,"TERMINAL",0,0,"Step 5203, loss: 0.026246745139360428, step time: 17.699241638183594ms\r\n",,terminal_output +19429,13740008,"TERMINAL",0,0,"Step 5204, loss: 0.022860752418637276, step time: 17.75050163269043ms\r\n",,terminal_output +19430,13740072,"TERMINAL",0,0,"Step 5205, loss: 0.006772558204829693, step time: 17.826080322265625ms\r\n",,terminal_output +19431,13740143,"TERMINAL",0,0,"Step 5206, loss: 0.016066065058112144, step time: 17.832279205322266ms\r\n",,terminal_output +19432,13740197,"TERMINAL",0,0,"Step 5207, loss: 0.011478920467197895, step time: 17.612934112548828ms\r\n",,terminal_output +19433,13740263,"TERMINAL",0,0,"Step 5208, loss: 0.0364815816283226, step time: 18.070220947265625ms\r\n",,terminal_output +19434,13740324,"TERMINAL",0,0,"Step 5209, loss: 0.01633179560303688, step time: 17.624616622924805ms\r\n",,terminal_output +19435,13740386,"TERMINAL",0,0,"Step 5210, loss: 0.022720880806446075, step time: 17.89546012878418ms\r\n",,terminal_output +19436,13740447,"TERMINAL",0,0,"Step 5211, loss: 0.007627464365214109, step time: 17.86065101623535ms\r\n",,terminal_output +19437,13740509,"TERMINAL",0,0,"Step 5212, loss: 0.009981177747249603, step time: 17.882108688354492ms\r\n",,terminal_output +19438,13740616,"TERMINAL",0,0,"Step 5213, loss: 0.1042935848236084, step time: 17.56572723388672ms\r\nStep 5214, loss: 0.04962460324168205, step time: 18.118619918823242ms\r\n",,terminal_output +19439,13740708,"TERMINAL",0,0,"Step 5215, loss: 0.04705379158258438, step time: 17.592191696166992ms\r\n",,terminal_output +19440,13740763,"TERMINAL",0,0,"Step 5216, loss: 0.05307561159133911, step time: 17.86494255065918ms\r\n",,terminal_output +19441,13740868,"TERMINAL",0,0,"Step 5217, loss: 0.013377074152231216, step time: 17.75336265563965ms\r\nStep 5218, loss: 0.013270573690533638, step time: 17.816543579101562ms\r\n",,terminal_output +19442,13740928,"TERMINAL",0,0,"Step 5219, loss: 0.03137979656457901, step time: 17.515182495117188ms\r\n",,terminal_output +19443,13740995,"TERMINAL",0,0,"Step 5220, loss: 0.016490694135427475, step time: 18.269777297973633ms\r\n",,terminal_output +19444,13741058,"TERMINAL",0,0,"Step 5221, loss: 0.016416259109973907, step time: 17.528533935546875ms\r\n",,terminal_output +19445,13741122,"TERMINAL",0,0,"Step 5222, loss: 0.12171168625354767, step time: 17.902851104736328ms\r\n",,terminal_output +19446,13741249,"TERMINAL",0,0,"Step 5223, loss: 0.023301493376493454, step time: 17.884254455566406ms\r\nStep 5224, loss: 0.013006819412112236, step time: 18.10932159423828ms\r\n",,terminal_output +19447,13741315,"TERMINAL",0,0,"Step 5225, loss: 0.024419495835900307, step time: 17.65275001525879ms\r\n",,terminal_output +19448,13741427,"TERMINAL",0,0,"Step 5226, loss: 0.014733772724866867, step time: 18.102645874023438ms\r\n",,terminal_output +19449,13741492,"TERMINAL",0,0,"Step 5227, loss: 0.012157071381807327, step time: 18.991708755493164ms\r\n",,terminal_output +19450,13741555,"TERMINAL",0,0,"Step 5228, loss: 0.025646550580859184, step time: 18.077611923217773ms\r\n",,terminal_output +19451,13741618,"TERMINAL",0,0,"Step 5229, loss: 0.07385333627462387, step time: 18.122196197509766ms\r\n",,terminal_output +19452,13741681,"TERMINAL",0,0,"Step 5230, loss: 0.011901508085429668, step time: 17.958402633666992ms\r\n",,terminal_output +19453,13741787,"TERMINAL",0,0,"Step 5231, loss: 0.012531355023384094, step time: 17.549753189086914ms\r\nStep 5232, loss: 0.01067714486271143, step time: 18.09382438659668ms\r\n",,terminal_output +19454,13741850,"TERMINAL",0,0,"Step 5233, loss: 0.02087266556918621, step time: 17.757654190063477ms\r\n",,terminal_output +19455,13741912,"TERMINAL",0,0,"Step 5234, loss: 0.03146607428789139, step time: 17.781734466552734ms\r\n",,terminal_output +19456,13741976,"TERMINAL",0,0,"Step 5235, loss: 0.01671520061790943, step time: 17.78554916381836ms\r\n",,terminal_output +19457,13742040,"TERMINAL",0,0,"Step 5236, loss: 0.14215236902236938, step time: 17.904281616210938ms\r\n",,terminal_output +19458,13742105,"TERMINAL",0,0,"Step 5237, loss: 0.02536696568131447, step time: 17.680644989013672ms\r\n",,terminal_output +19459,13742412,"TERMINAL",0,0,"Step 5238, loss: 0.02009993977844715, step time: 296.0519790649414ms\r\n",,terminal_output +19460,13742475,"TERMINAL",0,0,"Step 5239, loss: 0.03254863992333412, step time: 25.538921356201172ms\r\n",,terminal_output +19461,13742538,"TERMINAL",0,0,"Step 5240, loss: 0.03387363627552986, step time: 20.21932601928711ms\r\n",,terminal_output +19462,13742635,"TERMINAL",0,0,"Step 5241, loss: 0.012618103995919228, step time: 18.666982650756836ms\r\n",,terminal_output +19463,13742717,"TERMINAL",0,0,"Step 5242, loss: 0.28676745295524597, step time: 18.177270889282227ms\r\nStep 5243, loss: 0.017769023776054382, step time: 17.81487464904785ms\r\n",,terminal_output +19464,13742842,"TERMINAL",0,0,"Step 5244, loss: 0.025447972118854523, step time: 18.392324447631836ms\r\nStep 5245, loss: 0.02164282277226448, step time: 17.90595054626465ms\r\n",,terminal_output +19465,13742906,"TERMINAL",0,0,"Step 5246, loss: 0.11545497924089432, step time: 17.935752868652344ms\r\n",,terminal_output +19466,13743007,"TERMINAL",0,0,"Step 5247, loss: 0.020192855969071388, step time: 18.00251007080078ms\r\n",,terminal_output +19467,13743059,"TERMINAL",0,0,"Step 5248, loss: 0.012315794825553894, step time: 17.90142059326172ms\r\n",,terminal_output +19468,13743164,"TERMINAL",0,0,"Step 5249, loss: 0.01013458427041769, step time: 17.690658569335938ms\r\nStep 5250, loss: 0.20778138935565948, step time: 18.35775375366211ms\r\n",,terminal_output +19469,13743231,"TERMINAL",0,0,"Step 5251, loss: 0.0097816102206707, step time: 17.72451400756836ms\r\n",,terminal_output +19470,13743289,"TERMINAL",0,0,"Step 5252, loss: 0.013187156990170479, step time: 17.930984497070312ms\r\n",,terminal_output +19471,13743352,"TERMINAL",0,0,"Step 5253, loss: 0.010484007187187672, step time: 18.024682998657227ms\r\n",,terminal_output +19472,13743451,"TERMINAL",0,0,"Step 5254, loss: 0.009769274853169918, step time: 17.931461334228516ms\r\n",,terminal_output +19473,13743600,"TERMINAL",0,0,"Step 5255, loss: 0.028378190472722054, step time: 17.746686935424805ms\r\n",,terminal_output +19474,13743671,"TERMINAL",0,0,"Step 5256, loss: 0.015595020726323128, step time: 18.4175968170166ms\r\nStep 5257, loss: 0.02049844153225422, step time: 17.640113830566406ms\r\nStep 5258, loss: 0.1439659595489502, step time: 18.16725730895996ms\r\n",,terminal_output +19475,13743733,"TERMINAL",0,0,"Step 5259, loss: 0.016060490161180496, step time: 18.3260440826416ms\r\n",,terminal_output +19476,13743838,"TERMINAL",0,0,"Step 5260, loss: 0.012799966149032116, step time: 18.085956573486328ms\r\n",,terminal_output +19477,13743893,"TERMINAL",0,0,"Step 5261, loss: 0.02549861930310726, step time: 17.540931701660156ms\r\n",,terminal_output +19478,13744002,"TERMINAL",0,0,"Step 5262, loss: 0.0090017756447196, step time: 18.164634704589844ms\r\nStep 5263, loss: 0.011455581523478031, step time: 17.55380630493164ms\r\n",,terminal_output +19479,13744062,"TERMINAL",0,0,"Step 5264, loss: 0.052088573575019836, step time: 17.84491539001465ms\r\n",,terminal_output +19480,13744124,"TERMINAL",0,0,"Step 5265, loss: 0.02797313965857029, step time: 18.114089965820312ms\r\n",,terminal_output +19481,13744184,"TERMINAL",0,0,"Step 5266, loss: 0.024682054296135902, step time: 17.87734031677246ms\r\n",,terminal_output +19482,13744243,"TERMINAL",0,0,"Step 5267, loss: 0.01547474879771471, step time: 17.731904983520508ms\r\n",,terminal_output +19483,13744308,"TERMINAL",0,0,"Step 5268, loss: 0.0330561138689518, step time: 18.39756965637207ms\r\n",,terminal_output +19484,13744371,"TERMINAL",0,0,"Step 5269, loss: 0.013785220682621002, step time: 17.603158950805664ms\r\n",,terminal_output +19485,13744438,"TERMINAL",0,0,"Step 5270, loss: 0.03633062168955803, step time: 17.901897430419922ms\r\n",,terminal_output +19486,13744500,"TERMINAL",0,0,"Step 5271, loss: 0.010101266205310822, step time: 17.823457717895508ms\r\n",,terminal_output +19487,13744563,"TERMINAL",0,0,"Step 5272, loss: 0.00793937686830759, step time: 17.841339111328125ms\r\n",,terminal_output +19488,13744630,"TERMINAL",0,0,"Step 5273, loss: 0.020131221041083336, step time: 17.468929290771484ms\r\n",,terminal_output +19489,13744689,"TERMINAL",0,0,"Step 5274, loss: 0.008676158264279366, step time: 18.143892288208008ms\r\n",,terminal_output +19490,13744754,"TERMINAL",0,0,"Step 5275, loss: 0.010933026671409607, step time: 17.59624481201172ms\r\n",,terminal_output +19491,13744818,"TERMINAL",0,0,"Step 5276, loss: 0.009537514299154282, step time: 17.85421371459961ms\r\n",,terminal_output +19492,13744881,"TERMINAL",0,0,"Step 5277, loss: 0.03378339856863022, step time: 17.83013343811035ms\r\n",,terminal_output +19493,13744943,"TERMINAL",0,0,"Step 5278, loss: 0.010178656317293644, step time: 17.675161361694336ms\r\n",,terminal_output +19494,13745007,"TERMINAL",0,0,"Step 5279, loss: 0.021905092522501945, step time: 17.549753189086914ms\r\n",,terminal_output +19495,13745078,"TERMINAL",0,0,"Step 5280, loss: 0.013526096940040588, step time: 18.241167068481445ms\r\n",,terminal_output +19496,13745165,"TERMINAL",0,0,"Step 5281, loss: 0.009605872444808483, step time: 17.678260803222656ms\r\n",,terminal_output +19497,13745219,"TERMINAL",0,0,"Step 5282, loss: 0.034846384078264236, step time: 17.819643020629883ms\r\n",,terminal_output +19498,13745313,"TERMINAL",0,0,"Step 5283, loss: 0.030782243236899376, step time: 18.08762550354004ms\r\n",,terminal_output +19499,13745367,"TERMINAL",0,0,"Step 5284, loss: 0.008965284563601017, step time: 17.872333526611328ms\r\n",,terminal_output +19500,13745418,"TERMINAL",0,0,"Step 5285, loss: 0.043406892567873, step time: 17.704248428344727ms\r\n",,terminal_output +19501,13745522,"TERMINAL",0,0,"Step 5286, loss: 0.014402231201529503, step time: 18.071413040161133ms\r\nStep 5287, loss: 0.02372872829437256, step time: 17.731189727783203ms\r\n",,terminal_output +19502,13745592,"TERMINAL",0,0,"Step 5288, loss: 0.007744533941149712, step time: 17.927169799804688ms\r\n",,terminal_output +19503,13745655,"TERMINAL",0,0,"Step 5289, loss: 0.011331171728670597, step time: 17.916202545166016ms\r\n",,terminal_output +19504,13745718,"TERMINAL",0,0,"Step 5290, loss: 0.01516174990683794, step time: 19.097328186035156ms\r\n",,terminal_output +19505,13745785,"TERMINAL",0,0,"Step 5291, loss: 0.006163225043565035, step time: 17.568111419677734ms\r\n",,terminal_output +19506,13745875,"TERMINAL",0,0,"Step 5292, loss: 0.0670885294675827, step time: 18.154144287109375ms\r\n",,terminal_output +19507,13745981,"TERMINAL",0,0,"Step 5293, loss: 0.018532464280724525, step time: 17.709732055664062ms\r\nStep 5294, loss: 0.01800740882754326, step time: 18.003463745117188ms\r\n",,terminal_output +19508,13746046,"TERMINAL",0,0,"Step 5295, loss: 0.01330488733947277, step time: 18.01919937133789ms\r\n",,terminal_output +19509,13746162,"TERMINAL",0,0,"Step 5296, loss: 0.008818920701742172, step time: 18.086671829223633ms\r\nStep 5297, loss: 0.12934894859790802, step time: 17.55809783935547ms\r\n",,terminal_output +19510,13746214,"TERMINAL",0,0,"Step 5298, loss: 0.016173793002963066, step time: 18.241405487060547ms\r\n",,terminal_output +19511,13746311,"TERMINAL",0,0,"Step 5299, loss: 0.012550389394164085, step time: 17.613649368286133ms\r\n",,terminal_output +19512,13746375,"TERMINAL",0,0,"Step 5300, loss: 0.05579081550240517, step time: 18.009424209594727ms\r\n",,terminal_output +19513,13746431,"TERMINAL",0,0,"Step 5301, loss: 0.01665777899324894, step time: 18.27263832092285ms\r\n",,terminal_output +19514,13746492,"TERMINAL",0,0,"Step 5302, loss: 0.006852281279861927, step time: 17.838716506958008ms\r\n",,terminal_output +19515,13746556,"TERMINAL",0,0,"Step 5303, loss: 0.011619647964835167, step time: 17.748594284057617ms\r\n",,terminal_output +19516,13746617,"TERMINAL",0,0,"Step 5304, loss: 0.043497033417224884, step time: 18.179655075073242ms\r\n",,terminal_output +19517,13746680,"TERMINAL",0,0,"Step 5305, loss: 0.008553929626941681, step time: 17.657041549682617ms\r\n",,terminal_output +19518,13746751,"TERMINAL",0,0,"Step 5306, loss: 0.020251793786883354, step time: 17.886877059936523ms\r\n",,terminal_output +19519,13746801,"TERMINAL",0,0,"Step 5307, loss: 0.034627970308065414, step time: 18.023967742919922ms\r\n",,terminal_output +19520,13746915,"TERMINAL",0,0,"Step 5308, loss: 0.011824481189250946, step time: 17.838239669799805ms\r\nStep 5309, loss: 0.02790432795882225, step time: 17.68326759338379ms\r\n",,terminal_output +19521,13747024,"TERMINAL",0,0,"Step 5310, loss: 0.013224964961409569, step time: 18.100976943969727ms\r\n",,terminal_output +19522,13747077,"TERMINAL",0,0,"Step 5311, loss: 0.011732314713299274, step time: 17.62700080871582ms\r\n",,terminal_output +19523,13747184,"TERMINAL",0,0,"Step 5312, loss: 0.008425184525549412, step time: 17.87710189819336ms\r\nStep 5313, loss: 0.0038520973175764084, step time: 17.9445743560791ms\r\n",,terminal_output +19524,13747249,"TERMINAL",0,0,"Step 5314, loss: 0.0076780072413384914, step time: 18.006324768066406ms\r\n",,terminal_output +19525,13747310,"TERMINAL",0,0,"Step 5315, loss: 0.014136557467281818, step time: 17.724990844726562ms\r\n",,terminal_output +19526,13747378,"TERMINAL",0,0,"Step 5316, loss: 0.015120262280106544, step time: 18.134117126464844ms\r\n",,terminal_output +19527,13747434,"TERMINAL",0,0,"Step 5317, loss: 0.020436344668269157, step time: 17.79341697692871ms\r\n",,terminal_output +19528,13747497,"TERMINAL",0,0,"Step 5318, loss: 0.021257849410176277, step time: 18.04971694946289ms\r\n",,terminal_output +19529,13747560,"TERMINAL",0,0,"Step 5319, loss: 0.010843860916793346, step time: 18.067359924316406ms\r\n",,terminal_output +19530,13747632,"TERMINAL",0,0,"Step 5320, loss: 0.012197970412671566, step time: 17.973899841308594ms\r\n",,terminal_output +19531,13747686,"TERMINAL",0,0,"Step 5321, loss: 0.00975796952843666, step time: 17.543315887451172ms\r\n",,terminal_output +19532,13747748,"TERMINAL",0,0,"Step 5322, loss: 0.013801168650388718, step time: 18.27836036682129ms\r\n",,terminal_output +19533,13747842,"TERMINAL",0,0,"Step 5323, loss: 0.13211041688919067, step time: 17.722129821777344ms\r\n",,terminal_output +19534,13747893,"TERMINAL",0,0,"Step 5324, loss: 0.02194143459200859, step time: 17.773866653442383ms\r\n",,terminal_output +19535,13747945,"TERMINAL",0,0,"Step 5325, loss: 0.01054996345192194, step time: 18.1424617767334ms\r\n",,terminal_output +19536,13748040,"TERMINAL",0,0,"Step 5326, loss: 0.03766516223549843, step time: 17.853260040283203ms\r\n",,terminal_output +19537,13748093,"TERMINAL",0,0,"Step 5327, loss: 0.030767828226089478, step time: 17.48800277709961ms\r\n",,terminal_output +19538,13748203,"TERMINAL",0,0,"Step 5328, loss: 0.013411350548267365, step time: 18.094778060913086ms\r\nStep 5329, loss: 0.021024614572525024, step time: 17.60077476501465ms\r\n",,terminal_output +19539,13748267,"TERMINAL",0,0,"Step 5330, loss: 0.0718965008854866, step time: 29.73461151123047ms\r\n",,terminal_output +19540,13748331,"TERMINAL",0,0,"Step 5331, loss: 0.015439392067492008, step time: 22.87888526916504ms\r\n",,terminal_output +19541,13748394,"TERMINAL",0,0,"Step 5332, loss: 0.019217927008867264, step time: 19.82569694519043ms\r\n",,terminal_output +19542,13748464,"TERMINAL",0,0,"Step 5333, loss: 0.022172601893544197, step time: 19.36936378479004ms\r\n",,terminal_output +19543,13748523,"TERMINAL",0,0,"Step 5334, loss: 0.0178520530462265, step time: 28.19538116455078ms\r\n",,terminal_output +19544,13748609,"TERMINAL",0,0,"Step 5335, loss: 0.023520711809396744, step time: 18.50128173828125ms\r\n",,terminal_output +19545,13748686,"TERMINAL",0,0,"Step 5336, loss: 0.010747777298092842, step time: 18.237590789794922ms\r\n",,terminal_output +19546,13748715,"TERMINAL",0,0,"Step 5337, loss: 0.23279206454753876, step time: 18.129348754882812ms\r\n",,terminal_output +19547,13748808,"TERMINAL",0,0,"Step 5338, loss: 0.021748291328549385, step time: 17.83132553100586ms\r\n",,terminal_output +19548,13748927,"TERMINAL",0,0,"Step 5339, loss: 0.08491604775190353, step time: 19.729137420654297ms\r\nStep 5340, loss: 0.012086068280041218, step time: 18.409252166748047ms\r\n",,terminal_output +19549,13749006,"genie.py",3031,0,"",python,selection_mouse +19550,13749040,"TERMINAL",0,0,"Step 5341, loss: 0.01999293826520443, step time: 17.624378204345703ms\r\nStep 5342, loss: 0.023341046646237373, step time: 18.025636672973633ms\r\n",,terminal_output +19551,13749112,"TERMINAL",0,0,"Step 5343, loss: 0.011366014368832111, step time: 18.05710792541504ms\r\n",,terminal_output +19552,13749185,"TERMINAL",0,0,"Step 5344, loss: 0.011960928328335285, step time: 17.834901809692383ms\r\n",,terminal_output +19553,13749251,"TERMINAL",0,0,"Step 5345, loss: 0.014798193238675594, step time: 17.835140228271484ms\r\n",,terminal_output +19554,13749311,"TERMINAL",0,0,"Step 5346, loss: 0.031282052397727966, step time: 18.16844940185547ms\r\n",,terminal_output +19555,13749375,"TERMINAL",0,0,"Step 5347, loss: 0.039060525596141815, step time: 17.723798751831055ms\r\n",,terminal_output +19556,13749438,"TERMINAL",0,0,"Step 5348, loss: 0.030854666605591774, step time: 17.746448516845703ms\r\n",,terminal_output +19557,13749531,"TERMINAL",0,0,"Step 5349, loss: 0.04591314122080803, step time: 17.989635467529297ms\r\n",,terminal_output +19558,13749582,"TERMINAL",0,0,"Step 5350, loss: 0.011372853070497513, step time: 17.82512664794922ms\r\n",,terminal_output +19559,13749634,"TERMINAL",0,0,"Step 5351, loss: 0.04783255606889725, step time: 17.54927635192871ms\r\n",,terminal_output +19560,13749771,"TERMINAL",0,0,"Step 5352, loss: 0.018948392942547798, step time: 18.042564392089844ms\r\nStep 5353, loss: 0.034129947423934937, step time: 17.606019973754883ms\r\n",,terminal_output +19561,13749819,"TERMINAL",0,0,"Step 5354, loss: 0.03738715872168541, step time: 17.818450927734375ms\r\n",,terminal_output +19562,13749978,"TERMINAL",0,0,"Step 5355, loss: 0.027546213939785957, step time: 17.989158630371094ms\r\n",,terminal_output +19563,13750013,"TERMINAL",0,0,"Step 5356, loss: 0.019809825345873833, step time: 17.676830291748047ms\r\nStep 5357, loss: 0.014915336854755878, step time: 17.60697364807129ms\r\n",,terminal_output +19564,13750077,"TERMINAL",0,0,"Step 5358, loss: 0.015579327940940857, step time: 18.01609992980957ms\r\n",,terminal_output +19565,13750137,"TERMINAL",0,0,"Step 5359, loss: 0.2558949291706085, step time: 17.549514770507812ms\r\n",,terminal_output +19566,13750245,"TERMINAL",0,0,"Step 5360, loss: 0.01323616597801447, step time: 18.009185791015625ms\r\nStep 5361, loss: 0.01357141975313425, step time: 18.1732177734375ms\r\n",,terminal_output +19567,13750308,"TERMINAL",0,0,"Step 5362, loss: 0.015369600616395473, step time: 17.951250076293945ms\r\n",,terminal_output +19568,13750367,"TERMINAL",0,0,"Step 5363, loss: 0.022993803024291992, step time: 17.537593841552734ms\r\n",,terminal_output +19569,13750430,"TERMINAL",0,0,"Step 5364, loss: 0.03739353269338608, step time: 18.04208755493164ms\r\n",,terminal_output +19570,13750537,"TERMINAL",0,0,"Step 5365, loss: 0.025252601131796837, step time: 17.647266387939453ms\r\n",,terminal_output +19571,13750598,"TERMINAL",0,0,"Step 5366, loss: 0.012517636641860008, step time: 17.769813537597656ms\r\n",,terminal_output +19572,13750661,"TERMINAL",0,0,"Step 5367, loss: 0.03228290379047394, step time: 17.90642738342285ms\r\n",,terminal_output +19573,13750720,"TERMINAL",0,0,"Step 5368, loss: 0.023758819326758385, step time: 17.800331115722656ms\r\n",,terminal_output +19574,13750782,"TERMINAL",0,0,"Step 5369, loss: 0.013680624775588512, step time: 17.5931453704834ms\r\n",,terminal_output +19575,13750893,"TERMINAL",0,0,"Step 5370, loss: 0.03547028452157974, step time: 18.26620101928711ms\r\nStep 5371, loss: 0.01607070304453373, step time: 17.61007308959961ms\r\n",,terminal_output +19576,13750955,"TERMINAL",0,0,"Step 5372, loss: 0.011005527339875698, step time: 17.78721809387207ms\r\n",,terminal_output +19577,13751068,"TERMINAL",0,0,"Step 5373, loss: 0.016995636746287346, step time: 17.86041259765625ms\r\nStep 5374, loss: 0.027417991310358047, step time: 17.89569854736328ms\r\n",,terminal_output +19578,13751201,"TERMINAL",0,0,"Step 5375, loss: 0.015090705826878548, step time: 22.311925888061523ms\r\nStep 5376, loss: 0.020426493138074875, step time: 19.59061622619629ms\r\n",,terminal_output +19579,13751264,"TERMINAL",0,0,"Step 5377, loss: 0.023830536752939224, step time: 18.125295639038086ms\r\n",,terminal_output +19580,13751388,"TERMINAL",0,0,"Step 5378, loss: 0.00836919341236353, step time: 17.994403839111328ms\r\nStep 5379, loss: 0.018198709934949875, step time: 18.2039737701416ms\r\n",,terminal_output +19581,13751451,"TERMINAL",0,0,"Step 5380, loss: 0.03744989633560181, step time: 18.010377883911133ms\r\n",,terminal_output +19582,13751515,"TERMINAL",0,0,"Step 5381, loss: 0.017616089433431625, step time: 18.065929412841797ms\r\n",,terminal_output +19583,13751576,"TERMINAL",0,0,"Step 5382, loss: 0.007054446265101433, step time: 17.941951751708984ms\r\n",,terminal_output +19584,13751638,"TERMINAL",0,0,"Step 5383, loss: 0.012392787262797356, step time: 18.1581974029541ms\r\n",,terminal_output +19585,13751704,"TERMINAL",0,0,"Step 5384, loss: 0.015600663609802723, step time: 18.32890510559082ms\r\n",,terminal_output +19586,13751767,"TERMINAL",0,0,"Step 5385, loss: 0.017624743282794952, step time: 18.440961837768555ms\r\n",,terminal_output +19587,13751830,"TERMINAL",0,0,"Step 5386, loss: 0.008235535584390163, step time: 18.379688262939453ms\r\n",,terminal_output +19588,13751895,"TERMINAL",0,0,"Step 5387, loss: 0.27065137028694153, step time: 20.082950592041016ms\r\n",,terminal_output +19589,13751958,"TERMINAL",0,0,"Step 5388, loss: 0.009602942503988743, step time: 19.16980743408203ms\r\n",,terminal_output +19590,13752022,"TERMINAL",0,0,"Step 5389, loss: 0.004343512933701277, step time: 18.39756965637207ms\r\n",,terminal_output +19591,13752165,"TERMINAL",0,0,"Step 5390, loss: 0.05632614716887474, step time: 21.821260452270508ms\r\nStep 5391, loss: 0.11313087493181229, step time: 18.447399139404297ms\r\n",,terminal_output +19592,13752215,"TERMINAL",0,0,"Step 5392, loss: 0.021542688831686974, step time: 17.571210861206055ms\r\n",,terminal_output +19593,13752280,"TERMINAL",0,0,"Step 5393, loss: 0.03444012627005577, step time: 18.14413070678711ms\r\n",,terminal_output +19594,13752384,"TERMINAL",0,0,"Step 5394, loss: 0.012236193753778934, step time: 18.351316452026367ms\r\n",,terminal_output +19595,13752435,"TERMINAL",0,0,"Step 5395, loss: 0.021131806075572968, step time: 17.80080795288086ms\r\n",,terminal_output +19596,13752528,"TERMINAL",0,0,"Step 5396, loss: 0.015604428946971893, step time: 17.93050765991211ms\r\n",,terminal_output +19597,13752799,"TERMINAL",0,0,"Step 5397, loss: 0.012724628672003746, step time: 325.3288269042969ms\r\n",,terminal_output +19598,13752865,"TERMINAL",0,0,"Step 5398, loss: 0.016530826687812805, step time: 25.613784790039062ms\r\n",,terminal_output +19599,13752929,"TERMINAL",0,0,"Step 5399, loss: 0.015674395486712456, step time: 20.681142807006836ms\r\n",,terminal_output +19600,13752995,"TERMINAL",0,0,"Step 5400, loss: 0.06615748256444931, step time: 18.79119873046875ms\r\n",,terminal_output +19601,13753059,"TERMINAL",0,0,"Step 5401, loss: 0.031902920454740524, step time: 18.403291702270508ms\r\n",,terminal_output +19602,13753129,"TERMINAL",0,0,"Step 5402, loss: 0.010993948206305504, step time: 18.13220977783203ms\r\n",,terminal_output +19603,13753190,"TERMINAL",0,0,"Step 5403, loss: 0.0447332039475441, step time: 18.287181854248047ms\r\n",,terminal_output +19604,13753261,"TERMINAL",0,0,"Step 5404, loss: 0.01721804402768612, step time: 17.824411392211914ms\r\n",,terminal_output +19605,13753314,"TERMINAL",0,0,"Step 5405, loss: 0.01766921766102314, step time: 18.103361129760742ms\r\n",,terminal_output +19606,13753378,"TERMINAL",0,0,"Step 5406, loss: 0.02679062820971012, step time: 18.452167510986328ms\r\n",,terminal_output +19607,13753478,"TERMINAL",0,0,"Step 5407, loss: 0.03504477068781853, step time: 18.00227165222168ms\r\n",,terminal_output +19608,13753530,"TERMINAL",0,0,"Step 5408, loss: 0.025219477713108063, step time: 18.110036849975586ms\r\n",,terminal_output +19609,13753646,"TERMINAL",0,0,"Step 5409, loss: 0.009144973941147327, step time: 18.459320068359375ms\r\nStep 5410, loss: 0.011132639832794666, step time: 17.960071563720703ms\r\n",,terminal_output +19610,13753698,"TERMINAL",0,0,"Step 5411, loss: 0.007962514646351337, step time: 18.15009117126465ms\r\n",,terminal_output +19611,13753791,"TERMINAL",0,0,"Step 5412, loss: 0.06332582980394363, step time: 18.013715744018555ms\r\n",,terminal_output +19612,13753895,"TERMINAL",0,0,"Step 5413, loss: 0.00834096223115921, step time: 18.00537109375ms\r\nStep 5414, loss: 0.01779824309051037, step time: 17.773151397705078ms\r\n",,terminal_output +19613,13753987,"TERMINAL",0,0,"Step 5415, loss: 0.024914586916565895, step time: 18.372535705566406ms\r\n",,terminal_output +19614,13754039,"TERMINAL",0,0,"Step 5416, loss: 0.012319792993366718, step time: 17.765522003173828ms\r\n",,terminal_output +19615,13754134,"TERMINAL",0,0,"Step 5417, loss: 0.0277470164000988, step time: 17.83919334411621ms\r\n",,terminal_output +19616,13754209,"TERMINAL",0,0,"Step 5418, loss: 0.02463287115097046, step time: 18.193960189819336ms\r\nStep 5419, loss: 0.018231317400932312, step time: 18.082857131958008ms\r\n",,terminal_output +19617,13754333,"TERMINAL",0,0,"Step 5420, loss: 0.007272712886333466, step time: 17.572879791259766ms\r\n",,terminal_output +19618,13754391,"TERMINAL",0,0,"Step 5421, loss: 0.01721245050430298, step time: 18.31221580505371ms\r\n",,terminal_output +19619,13754451,"TERMINAL",0,0,"Step 5422, loss: 0.011507921852171421, step time: 17.86494255065918ms\r\n",,terminal_output +19620,13754494,"TERMINAL",0,0,"Step 5423, loss: 0.013273520395159721, step time: 17.92287826538086ms\r\n",,terminal_output +19621,13754550,"TERMINAL",0,0,"Step 5424, loss: 0.08862084895372391, step time: 18.08476448059082ms\r\n",,terminal_output +19622,13754656,"TERMINAL",0,0,"Step 5425, loss: 0.010515321046113968, step time: 18.07999610900879ms\r\nStep 5426, loss: 0.008796758018434048, step time: 17.798423767089844ms\r\n",,terminal_output +19623,13754785,"TERMINAL",0,0,"Step 5427, loss: 0.08577404916286469, step time: 18.14436912536621ms\r\nStep 5428, loss: 0.02133229933679104, step time: 17.703771591186523ms\r\n",,terminal_output +19624,13754853,"TERMINAL",0,0,"Step 5429, loss: 0.01331783551722765, step time: 18.413305282592773ms\r\n",,terminal_output +19625,13754941,"TERMINAL",0,0,"Step 5430, loss: 0.01191044133156538, step time: 18.30458641052246ms\r\n",,terminal_output +19626,13755051,"TERMINAL",0,0,"Step 5431, loss: 0.1683548539876938, step time: 18.018007278442383ms\r\nStep 5432, loss: 0.025226326659321785, step time: 17.817974090576172ms\r\n",,terminal_output +19627,13755112,"TERMINAL",0,0,"Step 5433, loss: 0.01851964369416237, step time: 18.68891716003418ms\r\n",,terminal_output +19628,13755175,"TERMINAL",0,0,"Step 5434, loss: 0.011048399843275547, step time: 17.52161979675293ms\r\n",,terminal_output +19629,13755233,"TERMINAL",0,0,"Step 5435, loss: 0.01801833137869835, step time: 17.841100692749023ms\r\n",,terminal_output +19630,13755306,"TERMINAL",0,0,"Step 5436, loss: 0.27655184268951416, step time: 18.25547218322754ms\r\n",,terminal_output +19631,13755370,"TERMINAL",0,0,"Step 5437, loss: 0.020228654146194458, step time: 17.833232879638672ms\r\n",,terminal_output +19632,13755488,"TERMINAL",0,0,"Step 5438, loss: 0.04598535969853401, step time: 17.82059669494629ms\r\nStep 5439, loss: 0.01112079806625843, step time: 18.28908920288086ms\r\n",,terminal_output +19633,13755620,"TERMINAL",0,0,"Step 5440, loss: 0.0329892672598362, step time: 17.755508422851562ms\r\nStep 5441, loss: 0.036190617829561234, step time: 17.958641052246094ms\r\n",,terminal_output +19634,13755680,"TERMINAL",0,0,"Step 5442, loss: 0.1697213053703308, step time: 18.23878288269043ms\r\n",,terminal_output +19635,13755742,"TERMINAL",0,0,"Step 5443, loss: 0.10174473375082016, step time: 17.873287200927734ms\r\n",,terminal_output +19636,13755808,"TERMINAL",0,0,"Step 5444, loss: 0.012785423547029495, step time: 17.92621612548828ms\r\n",,terminal_output +19637,13755875,"TERMINAL",0,0,"Step 5445, loss: 0.013850376941263676, step time: 18.531084060668945ms\r\n",,terminal_output +19638,13755939,"TERMINAL",0,0,"Step 5446, loss: 0.013919208198785782, step time: 17.519474029541016ms\r\n",,terminal_output +19639,13756001,"TERMINAL",0,0,"Step 5447, loss: 0.018514102324843407, step time: 17.940044403076172ms\r\n",,terminal_output +19640,13756065,"TERMINAL",0,0,"Step 5448, loss: 0.03918633237481117, step time: 18.158435821533203ms\r\n",,terminal_output +19641,13756148,"TERMINAL",0,0,"Step 5449, loss: 0.03120892122387886, step time: 17.8983211517334ms\r\n",,terminal_output +19642,13756201,"TERMINAL",0,0,"Step 5450, loss: 0.09574813395738602, step time: 17.842769622802734ms\r\n",,terminal_output +19643,13756266,"TERMINAL",0,0,"Step 5451, loss: 0.01861434057354927, step time: 18.35155487060547ms\r\n",,terminal_output +19644,13756326,"TERMINAL",0,0,"Step 5452, loss: 0.12222213298082352, step time: 17.78244972229004ms\r\n",,terminal_output +19645,13756418,"TERMINAL",0,0,"Step 5453, loss: 0.00953061692416668, step time: 17.92454719543457ms\r\n",,terminal_output +19646,13756470,"TERMINAL",0,0,"Step 5454, loss: 0.015336829237639904, step time: 17.971038818359375ms\r\n",,terminal_output +19647,13756575,"TERMINAL",0,0,"Step 5455, loss: 0.0258531142026186, step time: 17.934322357177734ms\r\nStep 5456, loss: 0.11305073648691177, step time: 17.745494842529297ms\r\n",,terminal_output +19648,13756642,"TERMINAL",0,0,"Step 5457, loss: 0.03064778633415699, step time: 18.250703811645508ms\r\n",,terminal_output +19649,13756703,"TERMINAL",0,0,"Step 5458, loss: 0.022920040413737297, step time: 17.78554916381836ms\r\n",,terminal_output +19650,13756766,"TERMINAL",0,0,"Step 5459, loss: 0.012386760674417019, step time: 17.847776412963867ms\r\n",,terminal_output +19651,13756840,"TERMINAL",0,0,"Step 5460, loss: 0.033513303846120834, step time: 18.093585968017578ms\r\n",,terminal_output +19652,13756893,"TERMINAL",0,0,"Step 5461, loss: 0.014766141772270203, step time: 17.982959747314453ms\r\n",,terminal_output +19653,13756958,"TERMINAL",0,0,"Step 5462, loss: 0.019696732982993126, step time: 17.641782760620117ms\r\n",,terminal_output +19654,13757018,"TERMINAL",0,0,"Step 5463, loss: 0.007816003635525703, step time: 18.2645320892334ms\r\n",,terminal_output +19655,13757113,"TERMINAL",0,0,"Step 5464, loss: 0.048275310546159744, step time: 17.7609920501709ms\r\n",,terminal_output +19656,13757206,"TERMINAL",0,0,"Step 5465, loss: 0.023133784532546997, step time: 18.033981323242188ms\r\nStep 5466, loss: 0.05077407509088516, step time: 17.960786819458008ms\r\n",,terminal_output +19657,13757297,"TERMINAL",0,0,"Step 5467, loss: 0.014218974858522415, step time: 17.894744873046875ms\r\n",,terminal_output +19658,13757404,"TERMINAL",0,0,"Step 5468, loss: 0.011508583091199398, step time: 17.77958869934082ms\r\nStep 5469, loss: 0.03217599540948868, step time: 18.489837646484375ms\r\n",,terminal_output +19659,13757466,"TERMINAL",0,0,"Step 5470, loss: 0.009580034762620926, step time: 17.815351486206055ms\r\n",,terminal_output +19660,13757560,"TERMINAL",0,0,"Step 5471, loss: 0.07935754209756851, step time: 17.99774169921875ms\r\n",,terminal_output +19661,13757609,"TERMINAL",0,0,"Step 5472, loss: 0.009880243800580502, step time: 18.248319625854492ms\r\n",,terminal_output +19662,13757713,"TERMINAL",0,0,"Step 5473, loss: 0.025977469980716705, step time: 18.052339553833008ms\r\nStep 5474, loss: 0.006544924341142178, step time: 18.414020538330078ms\r\n",,terminal_output +19663,13757776,"TERMINAL",0,0,"Step 5475, loss: 0.021894194185733795, step time: 18.938064575195312ms\r\n",,terminal_output +19664,13757840,"TERMINAL",0,0,"Step 5476, loss: 0.01671583391726017, step time: 17.955780029296875ms\r\n",,terminal_output +19665,13757941,"TERMINAL",0,0,"Step 5477, loss: 0.021381404250860214, step time: 18.10741424560547ms\r\n",,terminal_output +19666,13758006,"TERMINAL",0,0,"Step 5478, loss: 0.03847798705101013, step time: 18.181324005126953ms\r\n",,terminal_output +19667,13758068,"TERMINAL",0,0,"Step 5479, loss: 0.013736648485064507, step time: 19.100666046142578ms\r\n",,terminal_output +19668,13758127,"TERMINAL",0,0,"Step 5480, loss: 0.013123179785907269, step time: 17.808198928833008ms\r\n",,terminal_output +19669,13758226,"TERMINAL",0,0,"Step 5481, loss: 0.017133286222815514, step time: 18.43714714050293ms\r\nStep 5482, loss: 0.010351142846047878, step time: 17.913341522216797ms\r\n",,terminal_output +19670,13758289,"TERMINAL",0,0,"Step 5483, loss: 0.007723754737526178, step time: 18.238306045532227ms\r\n",,terminal_output +19671,13758394,"TERMINAL",0,0,"Step 5484, loss: 0.008844698779284954, step time: 18.15962791442871ms\r\n",,terminal_output +19672,13758446,"TERMINAL",0,0,"Step 5485, loss: 0.013480513356626034, step time: 17.991304397583008ms\r\n",,terminal_output +19673,13758550,"TERMINAL",0,0,"Step 5486, loss: 0.030375981703400612, step time: 17.485618591308594ms\r\nStep 5487, loss: 0.014896354638040066, step time: 18.387556076049805ms\r\n",,terminal_output +19674,13758659,"TERMINAL",0,0,"Step 5488, loss: 0.015692122280597687, step time: 17.755985260009766ms\r\n",,terminal_output +19675,13758668,"TERMINAL",0,0,"Step 5489, loss: 0.1042068749666214, step time: 18.097400665283203ms\r\n",,terminal_output +19676,13758773,"TERMINAL",0,0,"Step 5490, loss: 0.015305324457585812, step time: 18.41282844543457ms\r\n",,terminal_output +19677,13758835,"TERMINAL",0,0,"Step 5491, loss: 0.015859344974160194, step time: 18.042564392089844ms\r\n",,terminal_output +19678,13758899,"TERMINAL",0,0,"Step 5492, loss: 0.00884029921144247, step time: 18.088817596435547ms\r\n",,terminal_output +19679,13758963,"TERMINAL",0,0,"Step 5493, loss: 0.015908148139715195, step time: 18.501996994018555ms\r\n",,terminal_output +19680,13759025,"TERMINAL",0,0,"Step 5494, loss: 0.021510757505893707, step time: 17.620325088500977ms\r\n",,terminal_output +19681,13759089,"TERMINAL",0,0,"Step 5495, loss: 0.013081752695143223, step time: 18.04804801940918ms\r\n",,terminal_output +19682,13759151,"TERMINAL",0,0,"Step 5496, loss: 0.2575446665287018, step time: 18.266916275024414ms\r\n",,terminal_output +19683,13759213,"TERMINAL",0,0,"Step 5497, loss: 0.01581595465540886, step time: 18.016815185546875ms\r\n",,terminal_output +19684,13759273,"TERMINAL",0,0,"Step 5498, loss: 0.048784561455249786, step time: 17.75813102722168ms\r\n",,terminal_output +19685,13759332,"TERMINAL",0,0,"Step 5499, loss: 0.02694488689303398, step time: 18.41592788696289ms\r\n",,terminal_output +19686,13762177,"TERMINAL",0,0,"Step 5500, loss: 0.07356499880552292, step time: 26.966333389282227ms\r\n",,terminal_output +19687,13762229,"TERMINAL",0,0,"Step 5501, loss: 0.034660570323467255, step time: 25.42400360107422ms\r\n",,terminal_output +19688,13762296,"TERMINAL",0,0,"Step 5502, loss: 0.09464531391859055, step time: 20.2789306640625ms\r\n",,terminal_output +19689,13762399,"TERMINAL",0,0,"Step 5503, loss: 0.026871897280216217, step time: 19.821643829345703ms\r\n",,terminal_output +19690,13762449,"TERMINAL",0,0,"Step 5504, loss: 0.13958778977394104, step time: 18.960237503051758ms\r\n",,terminal_output +19691,13762596,"TERMINAL",0,0,"Step 5505, loss: 0.0071374839171767235, step time: 30.92646598815918ms\r\nStep 5506, loss: 0.014326613396406174, step time: 19.759654998779297ms\r\n",,terminal_output +19692,13762653,"TERMINAL",0,0,"Step 5507, loss: 0.02720562554895878, step time: 19.177675247192383ms\r\n",,terminal_output +19693,13762764,"TERMINAL",0,0,"Step 5508, loss: 0.03795439749956131, step time: 18.49651336669922ms\r\nStep 5509, loss: 0.02824249677360058, step time: 18.639326095581055ms\r\n",,terminal_output +19694,13762826,"TERMINAL",0,0,"Step 5510, loss: 0.05101252347230911, step time: 19.04582977294922ms\r\n",,terminal_output +19695,13762886,"TERMINAL",0,0,"Step 5511, loss: 0.02514774538576603, step time: 18.31817626953125ms\r\n",,terminal_output +19696,13762948,"TERMINAL",0,0,"Step 5512, loss: 0.05556436628103256, step time: 18.390893936157227ms\r\n",,terminal_output +19697,13763013,"TERMINAL",0,0,"Step 5513, loss: 0.012522541917860508, step time: 18.54681968688965ms\r\n",,terminal_output +19698,13763076,"TERMINAL",0,0,"Step 5514, loss: 0.1082475483417511, step time: 17.307758331298828ms\r\n",,terminal_output +19699,13763149,"TERMINAL",0,0,"Step 5515, loss: 0.034167829900979996, step time: 17.771244049072266ms\r\n",,terminal_output +19700,13763203,"TERMINAL",0,0,"Step 5516, loss: 0.01422104425728321, step time: 18.834590911865234ms\r\n",,terminal_output +19701,13763269,"TERMINAL",0,0,"Step 5517, loss: 0.011991411447525024, step time: 18.048763275146484ms\r\n",,terminal_output +19702,13763335,"TERMINAL",0,0,"Step 5518, loss: 0.027690395712852478, step time: 17.329931259155273ms\r\n",,terminal_output +19703,13763396,"TERMINAL",0,0,"Step 5519, loss: 0.013006237335503101, step time: 17.781734466552734ms\r\n",,terminal_output +19704,13763459,"TERMINAL",0,0,"Step 5520, loss: 0.025653289631009102, step time: 17.116069793701172ms\r\n",,terminal_output +19705,13763522,"TERMINAL",0,0,"Step 5521, loss: 0.02352718450129032, step time: 17.92287826538086ms\r\n",,terminal_output +19706,13763596,"TERMINAL",0,0,"Step 5522, loss: 0.034217678010463715, step time: 17.569541931152344ms\r\n",,terminal_output +19707,13763653,"TERMINAL",0,0,"Step 5523, loss: 0.08295369148254395, step time: 17.737150192260742ms\r\n",,terminal_output +19708,13763720,"TERMINAL",0,0,"Step 5524, loss: 0.02002834714949131, step time: 17.678260803222656ms\r\n",,terminal_output +19709,13763826,"TERMINAL",0,0,"Step 5525, loss: 0.055847521871328354, step time: 18.2187557220459ms\r\nStep 5526, loss: 0.07293367385864258, step time: 17.12799072265625ms\r\n",,terminal_output +19710,13763890,"TERMINAL",0,0,"Step 5527, loss: 0.021493081003427505, step time: 17.61484146118164ms\r\n",,terminal_output +19711,13763953,"TERMINAL",0,0,"Step 5528, loss: 0.0199380312114954, step time: 17.454147338867188ms\r\n",,terminal_output +19712,13764021,"TERMINAL",0,0,"Step 5529, loss: 0.028107617050409317, step time: 17.424583435058594ms\r\n",,terminal_output +19713,13764083,"TERMINAL",0,0,"Step 5530, loss: 0.12399167567491531, step time: 17.266511917114258ms\r\n",,terminal_output +19714,13764157,"TERMINAL",0,0,"Step 5531, loss: 0.10044573247432709, step time: 17.780065536499023ms\r\n",,terminal_output +19715,13764210,"TERMINAL",0,0,"Step 5532, loss: 0.007255226839333773, step time: 17.20714569091797ms\r\n",,terminal_output +19716,13764275,"TERMINAL",0,0,"Step 5533, loss: 0.010677040554583073, step time: 17.504453659057617ms\r\n",,terminal_output +19717,13764343,"TERMINAL",0,0,"Step 5534, loss: 0.015038671903312206, step time: 17.38882064819336ms\r\n",,terminal_output +19718,13764405,"TERMINAL",0,0,"Step 5535, loss: 0.08883988857269287, step time: 17.36140251159668ms\r\n",,terminal_output +19719,13764468,"TERMINAL",0,0,"Step 5536, loss: 0.012230345979332924, step time: 17.213106155395508ms\r\n",,terminal_output +19720,13764531,"TERMINAL",0,0,"Step 5537, loss: 0.017100080847740173, step time: 17.629146575927734ms\r\n",,terminal_output +19721,13764597,"TERMINAL",0,0,"Step 5538, loss: 0.00886175874620676, step time: 17.129898071289062ms\r\n",,terminal_output +19722,13764655,"TERMINAL",0,0,"Step 5539, loss: 0.014047089964151382, step time: 17.728328704833984ms\r\n",,terminal_output +19723,13764716,"TERMINAL",0,0,"Step 5540, loss: 0.010235725902020931, step time: 17.40407943725586ms\r\n",,terminal_output +19724,13764813,"TERMINAL",0,0,"Step 5541, loss: 0.009301330894231796, step time: 17.603397369384766ms\r\n",,terminal_output +19725,13764862,"TERMINAL",0,0,"Step 5542, loss: 0.014176961965858936, step time: 17.241954803466797ms\r\n",,terminal_output +19726,13764966,"TERMINAL",0,0,"Step 5543, loss: 0.01532732043415308, step time: 17.507076263427734ms\r\nStep 5544, loss: 0.041278328746557236, step time: 17.143726348876953ms\r\n",,terminal_output +19727,13765058,"TERMINAL",0,0,"Step 5545, loss: 0.026968393474817276, step time: 17.714977264404297ms\r\n",,terminal_output +19728,13765109,"TERMINAL",0,0,"Step 5546, loss: 0.016887502744793892, step time: 17.391443252563477ms\r\n",,terminal_output +19729,13765213,"TERMINAL",0,0,"Step 5547, loss: 0.02676093392074108, step time: 17.499446868896484ms\r\nStep 5548, loss: 0.015887046232819557, step time: 17.181873321533203ms\r\n",,terminal_output +19730,13765276,"TERMINAL",0,0,"Step 5549, loss: 0.018485242500901222, step time: 17.429351806640625ms\r\n",,terminal_output +19731,13765378,"TERMINAL",0,0,"Step 5550, loss: 0.020795535296201706, step time: 16.924381256103516ms\r\n",,terminal_output +19732,13765431,"TERMINAL",0,0,"Step 5551, loss: 0.01896549202501774, step time: 17.474651336669922ms\r\n",,terminal_output +19733,13765538,"TERMINAL",0,0,"Step 5552, loss: 0.033910490572452545, step time: 17.494916915893555ms\r\nStep 5553, loss: 0.02587098255753517, step time: 17.378807067871094ms\r\n",,terminal_output +19734,13765605,"TERMINAL",0,0,"Step 5554, loss: 0.0276764128357172, step time: 17.10963249206543ms\r\n",,terminal_output +19735,13765677,"TERMINAL",0,0,"Step 5555, loss: 0.04282856360077858, step time: 17.594337463378906ms\r\n",,terminal_output +19736,13765734,"TERMINAL",0,0,"Step 5556, loss: 0.021761685609817505, step time: 16.963481903076172ms\r\n",,terminal_output +19737,13765853,"TERMINAL",0,0,"Step 5557, loss: 0.029191305860877037, step time: 17.575740814208984ms\r\nStep 5558, loss: 0.01865094155073166, step time: 17.246723175048828ms\r\n",,terminal_output +19738,13765909,"TERMINAL",0,0,"Step 5559, loss: 0.1609281301498413, step time: 24.52707290649414ms\r\n",,terminal_output +19739,13766002,"TERMINAL",0,0,"Step 5560, loss: 0.016514141112565994, step time: 20.592451095581055ms\r\n",,terminal_output +19740,13766054,"TERMINAL",0,0,"Step 5561, loss: 0.038126975297927856, step time: 18.563032150268555ms\r\n",,terminal_output +19741,13766166,"TERMINAL",0,0,"Step 5562, loss: 0.037129104137420654, step time: 18.2950496673584ms\r\nStep 5563, loss: 0.025376463308930397, step time: 17.4868106842041ms\r\n",,terminal_output +19742,13766228,"TERMINAL",0,0,"Step 5564, loss: 0.04428863152861595, step time: 17.241239547729492ms\r\n",,terminal_output +19743,13766325,"TERMINAL",0,0,"Step 5565, loss: 0.023980828002095222, step time: 18.031597137451172ms\r\n",,terminal_output +19744,13766372,"TERMINAL",0,0,"Step 5566, loss: 0.023675302043557167, step time: 17.258167266845703ms\r\n",,terminal_output +19745,13766469,"TERMINAL",0,0,"Step 5567, loss: 0.023420531302690506, step time: 17.604827880859375ms\r\n",,terminal_output +19746,13766542,"TERMINAL",0,0,"Step 5568, loss: 0.019186805933713913, step time: 17.009735107421875ms\r\nStep 5569, loss: 0.01294358354061842, step time: 17.52758026123047ms\r\n",,terminal_output +19747,13766613,"TERMINAL",0,0,"Step 5570, loss: 0.011000714264810085, step time: 17.389297485351562ms\r\n",,terminal_output +19748,13766669,"TERMINAL",0,0,"Step 5571, loss: 0.009006302803754807, step time: 17.23480224609375ms\r\n",,terminal_output +19749,13766764,"TERMINAL",0,0,"Step 5572, loss: 0.022554151713848114, step time: 17.137527465820312ms\r\n",,terminal_output +19750,13766869,"TERMINAL",0,0,"Step 5573, loss: 0.03826499730348587, step time: 17.579317092895508ms\r\nStep 5574, loss: 0.03033473715186119, step time: 17.498254776000977ms\r\n",,terminal_output +19751,13766962,"TERMINAL",0,0,"Step 5575, loss: 0.01228359341621399, step time: 17.338991165161133ms\r\n",,terminal_output +19752,13767069,"TERMINAL",0,0,"Step 5576, loss: 0.07821918278932571, step time: 17.499923706054688ms\r\nStep 5577, loss: 0.012658332474529743, step time: 17.14324951171875ms\r\n",,terminal_output +19753,13767134,"TERMINAL",0,0,"Step 5578, loss: 0.14580582082271576, step time: 17.308950424194336ms\r\n",,terminal_output +19754,13767244,"TERMINAL",0,0,"Step 5579, loss: 0.026091696694493294, step time: 17.612457275390625ms\r\nStep 5580, loss: 0.0270373672246933, step time: 17.064332962036133ms\r\n",,terminal_output +19755,13767309,"TERMINAL",0,0,"Step 5581, loss: 0.03215956687927246, step time: 17.555952072143555ms\r\n",,terminal_output +19756,13767372,"TERMINAL",0,0,"Step 5582, loss: 0.01724582351744175, step time: 17.568111419677734ms\r\n",,terminal_output +19757,13767436,"TERMINAL",0,0,"Step 5583, loss: 0.03313853219151497, step time: 17.365694046020508ms\r\n",,terminal_output +19758,13767500,"TERMINAL",0,0,"Step 5584, loss: 0.14572618901729584, step time: 17.263174057006836ms\r\n",,terminal_output +19759,13767565,"TERMINAL",0,0,"Step 5585, loss: 0.024958331137895584, step time: 17.461776733398438ms\r\n",,terminal_output +19760,13767628,"TERMINAL",0,0,"Step 5586, loss: 0.01851024478673935, step time: 17.003774642944336ms\r\n",,terminal_output +19761,13767693,"TERMINAL",0,0,"Step 5587, loss: 0.027282126247882843, step time: 17.405033111572266ms\r\n",,terminal_output +19762,13767758,"TERMINAL",0,0,"Step 5588, loss: 0.01538369245827198, step time: 17.293214797973633ms\r\n",,terminal_output +19763,13767822,"TERMINAL",0,0,"Step 5589, loss: 0.03718757629394531, step time: 17.638206481933594ms\r\n",,terminal_output +19764,13767882,"TERMINAL",0,0,"Step 5590, loss: 0.03457063436508179, step time: 17.12775230407715ms\r\n",,terminal_output +19765,13767946,"TERMINAL",0,0,"Step 5591, loss: 0.016730112954974174, step time: 17.622947692871094ms\r\n",,terminal_output +19766,13768009,"TERMINAL",0,0,"Step 5592, loss: 0.025843245908617973, step time: 17.200231552124023ms\r\n",,terminal_output +19767,13768072,"TERMINAL",0,0,"Step 5593, loss: 0.012060093693435192, step time: 17.184019088745117ms\r\n",,terminal_output +19768,13768133,"TERMINAL",0,0,"Step 5594, loss: 0.02887802943587303, step time: 17.309188842773438ms\r\n",,terminal_output +19769,13768196,"TERMINAL",0,0,"Step 5595, loss: 0.029254937544465065, step time: 17.290353775024414ms\r\n",,terminal_output +19770,13768263,"TERMINAL",0,0,"Step 5596, loss: 0.01634005457162857, step time: 17.185211181640625ms\r\n",,terminal_output +19771,13768347,"TERMINAL",0,0,"Step 5597, loss: 0.04262896999716759, step time: 17.65751838684082ms\r\n",,terminal_output +19772,13768410,"TERMINAL",0,0,"Step 5598, loss: 0.035163987427949905, step time: 17.14944839477539ms\r\n",,terminal_output +19773,13768475,"TERMINAL",0,0,"Step 5599, loss: 0.08084402978420258, step time: 17.484188079833984ms\r\n",,terminal_output +19774,13768538,"TERMINAL",0,0,"Step 5600, loss: 0.009165480732917786, step time: 17.319440841674805ms\r\n",,terminal_output +19775,13768693,"TERMINAL",0,0,"Step 5601, loss: 0.015154344961047173, step time: 17.4405574798584ms\r\nStep 5602, loss: 0.020841170102357864, step time: 17.29106903076172ms\r\nStep 5603, loss: 0.021502457559108734, step time: 17.43340492248535ms\r\n",,terminal_output +19776,13768789,"TERMINAL",0,0,"Step 5604, loss: 0.013896901160478592, step time: 17.446279525756836ms\r\n",,terminal_output +19777,13768852,"TERMINAL",0,0,"Step 5605, loss: 0.04397463798522949, step time: 17.478466033935547ms\r\n",,terminal_output +19778,13768913,"TERMINAL",0,0,"Step 5606, loss: 0.0067050522193312645, step time: 17.569541931152344ms\r\n",,terminal_output +19779,13768977,"TERMINAL",0,0,"Step 5607, loss: 0.019040154293179512, step time: 17.516374588012695ms\r\n",,terminal_output +19780,13769040,"TERMINAL",0,0,"Step 5608, loss: 0.00929632131010294, step time: 17.075300216674805ms\r\n",,terminal_output +19781,13769103,"TERMINAL",0,0,"Step 5609, loss: 0.03719063475728035, step time: 17.42839813232422ms\r\n",,terminal_output +19782,13769196,"TERMINAL",0,0,"Step 5610, loss: 0.013670879416167736, step time: 17.088890075683594ms\r\nStep 5611, loss: 0.011849862523376942, step time: 17.339706420898438ms\r\n",,terminal_output +19783,13769264,"TERMINAL",0,0,"Step 5612, loss: 0.01846487820148468, step time: 17.336606979370117ms\r\n",,terminal_output +19784,13769328,"TERMINAL",0,0,"Step 5613, loss: 0.021263308823108673, step time: 17.608165740966797ms\r\n",,terminal_output +19785,13769391,"TERMINAL",0,0,"Step 5614, loss: 0.1260278970003128, step time: 17.27581024169922ms\r\n",,terminal_output +19786,13769454,"TERMINAL",0,0,"Step 5615, loss: 0.013209672644734383, step time: 22.984743118286133ms\r\n",,terminal_output +19787,13769520,"TERMINAL",0,0,"Step 5616, loss: 0.017628628760576248, step time: 17.39501953125ms\r\n",,terminal_output +19788,13769640,"TERMINAL",0,0,"Step 5617, loss: 0.02180410735309124, step time: 17.490386962890625ms\r\nStep 5618, loss: 0.005553800147026777, step time: 17.5778865814209ms\r\n",,terminal_output +19789,13769704,"TERMINAL",0,0,"Step 5619, loss: 0.013393031433224678, step time: 17.189741134643555ms\r\n",,terminal_output +19790,13769765,"TERMINAL",0,0,"Step 5620, loss: 0.013711106963455677, step time: 17.21048355102539ms\r\n",,terminal_output +19791,13769830,"TERMINAL",0,0,"Step 5621, loss: 0.01653081737458706, step time: 17.25602149963379ms\r\n",,terminal_output +19792,13769893,"TERMINAL",0,0,"Step 5622, loss: 0.0246021319180727, step time: 17.2579288482666ms\r\n",,terminal_output +19793,13769958,"TERMINAL",0,0,"Step 5623, loss: 0.010969718918204308, step time: 17.193078994750977ms\r\n",,terminal_output +19794,13770272,"TERMINAL",0,0,"Step 5624, loss: 0.01101649459451437, step time: 312.960147857666ms\r\n",,terminal_output +19795,13770334,"TERMINAL",0,0,"Step 5625, loss: 0.011943675577640533, step time: 25.141000747680664ms\r\n",,terminal_output +19796,13770402,"TERMINAL",0,0,"Step 5626, loss: 0.02850319817662239, step time: 19.538164138793945ms\r\n",,terminal_output +19797,13770466,"TERMINAL",0,0,"Step 5627, loss: 0.02251744270324707, step time: 18.11385154724121ms\r\n",,terminal_output +19798,13770561,"TERMINAL",0,0,"Step 5628, loss: 0.008964824490249157, step time: 17.757415771484375ms\r\n",,terminal_output +19799,13770615,"TERMINAL",0,0,"Step 5629, loss: 0.013683382421731949, step time: 17.45438575744629ms\r\n",,terminal_output +19800,13770720,"TERMINAL",0,0,"Step 5630, loss: 0.01409896183758974, step time: 17.76909828186035ms\r\nStep 5631, loss: 0.026351576671004295, step time: 17.375469207763672ms\r\n",,terminal_output +19801,13770813,"TERMINAL",0,0,"Step 5632, loss: 0.0147890979424119, step time: 17.3647403717041ms\r\n",,terminal_output +19802,13770866,"TERMINAL",0,0,"Step 5633, loss: 0.030091237276792526, step time: 17.47727394104004ms\r\n",,terminal_output +19803,13770972,"TERMINAL",0,0,"Step 5634, loss: 0.012228338979184628, step time: 17.4863338470459ms\r\nStep 5635, loss: 0.010754644870758057, step time: 17.528057098388672ms\r\n",,terminal_output +19804,13771065,"TERMINAL",0,0,"Step 5636, loss: 0.007175459060817957, step time: 17.691373825073242ms\r\n",,terminal_output +19805,13771118,"TERMINAL",0,0,"Step 5637, loss: 0.015541541390120983, step time: 17.24529266357422ms\r\n",,terminal_output +19806,13771224,"TERMINAL",0,0,"Step 5638, loss: 0.011528322473168373, step time: 17.651081085205078ms\r\nStep 5639, loss: 0.00948350690305233, step time: 17.46845245361328ms\r\n",,terminal_output +19807,13771304,"TERMINAL",0,0,"Step 5640, loss: 0.009125922806560993, step time: 17.507314682006836ms\r\n",,terminal_output +19808,13771370,"TERMINAL",0,0,"Step 5641, loss: 0.013517855666577816, step time: 17.280101776123047ms\r\n",,terminal_output +19809,13771435,"TERMINAL",0,0,"Step 5642, loss: 0.015275244601070881, step time: 17.437458038330078ms\r\n",,terminal_output +19810,13771499,"TERMINAL",0,0,"Step 5643, loss: 0.017964273691177368, step time: 17.31562614440918ms\r\n",,terminal_output +19811,13771561,"TERMINAL",0,0,"Step 5644, loss: 0.008265967480838299, step time: 17.26388931274414ms\r\n",,terminal_output +19812,13771626,"TERMINAL",0,0,"Step 5645, loss: 0.012317144311964512, step time: 17.450571060180664ms\r\n",,terminal_output +19813,13771689,"TERMINAL",0,0,"Step 5646, loss: 0.010983056388795376, step time: 17.270326614379883ms\r\n",,terminal_output +19814,13771754,"TERMINAL",0,0,"Step 5647, loss: 0.007260530721396208, step time: 17.329692840576172ms\r\n",,terminal_output +19815,13771819,"TERMINAL",0,0,"Step 5648, loss: 0.01566964015364647, step time: 17.784833908081055ms\r\n",,terminal_output +19816,13771883,"TERMINAL",0,0,"Step 5649, loss: 0.007756256498396397, step time: 17.23623275756836ms\r\n",,terminal_output +19817,13771946,"TERMINAL",0,0,"Step 5650, loss: 0.00931733101606369, step time: 17.412424087524414ms\r\n",,terminal_output +19818,13772013,"TERMINAL",0,0,"Step 5651, loss: 0.01803733967244625, step time: 17.359256744384766ms\r\n",,terminal_output +19819,13772078,"TERMINAL",0,0,"Step 5652, loss: 0.04615071043372154, step time: 17.22264289855957ms\r\n",,terminal_output +19820,13772143,"TERMINAL",0,0,"Step 5653, loss: 0.04573296010494232, step time: 17.28677749633789ms\r\n",,terminal_output +19821,13772235,"TERMINAL",0,0,"Step 5654, loss: 0.007723833434283733, step time: 17.575502395629883ms\r\nStep 5655, loss: 0.010564676485955715, step time: 17.264604568481445ms\r\n",,terminal_output +19822,13772319,"TERMINAL",0,0,"Step 5656, loss: 0.007314503658562899, step time: 17.293691635131836ms\r\n",,terminal_output +19823,13772378,"TERMINAL",0,0,"Step 5657, loss: 0.013447761535644531, step time: 17.428874969482422ms\r\n",,terminal_output +19824,13772441,"TERMINAL",0,0,"Step 5658, loss: 0.011871875263750553, step time: 17.17066764831543ms\r\n",,terminal_output +19825,13772505,"TERMINAL",0,0,"Step 5659, loss: 0.014019784517586231, step time: 17.35401153564453ms\r\n",,terminal_output +19826,13772569,"TERMINAL",0,0,"Step 5660, loss: 0.015172518789768219, step time: 17.598867416381836ms\r\n",,terminal_output +19827,13772632,"TERMINAL",0,0,"Step 5661, loss: 0.010979250073432922, step time: 17.133235931396484ms\r\n",,terminal_output +19828,13772696,"TERMINAL",0,0,"Step 5662, loss: 0.013601144775748253, step time: 17.311573028564453ms\r\n",,terminal_output +19829,13772762,"TERMINAL",0,0,"Step 5663, loss: 0.008069748058915138, step time: 17.367124557495117ms\r\n",,terminal_output +19830,13772827,"TERMINAL",0,0,"Step 5664, loss: 0.021069757640361786, step time: 17.30966567993164ms\r\n",,terminal_output +19831,13772889,"TERMINAL",0,0,"Step 5665, loss: 0.02251341938972473, step time: 18.62335205078125ms\r\n",,terminal_output +19832,13772952,"TERMINAL",0,0,"Step 5666, loss: 0.01410629041492939, step time: 17.793893814086914ms\r\n",,terminal_output +19833,13773014,"TERMINAL",0,0,"Step 5667, loss: 0.031287726014852524, step time: 17.479419708251953ms\r\n",,terminal_output +19834,13773077,"TERMINAL",0,0,"Step 5668, loss: 0.020747974514961243, step time: 17.238378524780273ms\r\n",,terminal_output +19835,13773136,"TERMINAL",0,0,"Step 5669, loss: 0.011749938130378723, step time: 18.030881881713867ms\r\n",,terminal_output +19836,13773261,"TERMINAL",0,0,"Step 5670, loss: 0.01080230250954628, step time: 17.308950424194336ms\r\nStep 5671, loss: 0.012324697338044643, step time: 17.170429229736328ms\r\n",,terminal_output +19837,13773323,"TERMINAL",0,0,"Step 5672, loss: 0.04521201178431511, step time: 17.559051513671875ms\r\n",,terminal_output +19838,13773449,"TERMINAL",0,0,"Step 5673, loss: 0.014137945137917995, step time: 17.188072204589844ms\r\nStep 5674, loss: 0.008268013596534729, step time: 17.39978790283203ms\r\n",,terminal_output +19839,13773557,"TERMINAL",0,0,"Step 5675, loss: 0.008091595955193043, step time: 30.17711639404297ms\r\n",,terminal_output +19840,13773658,"TERMINAL",0,0,"Step 5676, loss: 0.06441562622785568, step time: 21.44145965576172ms\r\nStep 5677, loss: 0.06947770714759827, step time: 24.706125259399414ms\r\n",,terminal_output +19841,13773754,"TERMINAL",0,0,"Step 5678, loss: 0.010310936719179153, step time: 18.093347549438477ms\r\n",,terminal_output +19842,13773816,"TERMINAL",0,0,"Step 5679, loss: 0.10946796834468842, step time: 17.605304718017578ms\r\n",,terminal_output +19843,13773878,"TERMINAL",0,0,"Step 5680, loss: 0.009530757553875446, step time: 17.531633377075195ms\r\n",,terminal_output +19844,13773940,"TERMINAL",0,0,"Step 5681, loss: 0.008461124263703823, step time: 17.53711700439453ms\r\n",,terminal_output +19845,13774000,"TERMINAL",0,0,"Step 5682, loss: 0.007414706517010927, step time: 17.141103744506836ms\r\n",,terminal_output +19846,13774062,"TERMINAL",0,0,"Step 5683, loss: 0.021549217402935028, step time: 17.405033111572266ms\r\n",,terminal_output +19847,13774137,"TERMINAL",0,0,"Step 5684, loss: 0.025285108014941216, step time: 17.729520797729492ms\r\n",,terminal_output +19848,13774191,"TERMINAL",0,0,"Step 5685, loss: 0.00932369939982891, step time: 17.32635498046875ms\r\n",,terminal_output +19849,13774259,"TERMINAL",0,0,"Step 5686, loss: 0.012529011815786362, step time: 17.515182495117188ms\r\n",,terminal_output +19850,13774373,"TERMINAL",0,0,"Step 5687, loss: 0.009346289560198784, step time: 17.345666885375977ms\r\nStep 5688, loss: 0.02076738141477108, step time: 17.252683639526367ms\r\n",,terminal_output +19851,13774478,"TERMINAL",0,0,"Step 5689, loss: 0.032787904143333435, step time: 17.185449600219727ms\r\nStep 5690, loss: 0.01850195787847042, step time: 17.515182495117188ms\r\n",,terminal_output +19852,13774572,"TERMINAL",0,0,"Step 5691, loss: 0.007445528171956539, step time: 17.225027084350586ms\r\n",,terminal_output +19853,13774624,"TERMINAL",0,0,"Step 5692, loss: 0.014741706661880016, step time: 17.2421932220459ms\r\n",,terminal_output +19854,13774728,"TERMINAL",0,0,"Step 5693, loss: 0.00801505520939827, step time: 17.25029945373535ms\r\nStep 5694, loss: 0.01445706095546484, step time: 17.08221435546875ms\r\n",,terminal_output +19855,13774822,"TERMINAL",0,0,"Step 5695, loss: 0.008783360943198204, step time: 17.474651336669922ms\r\n",,terminal_output +19856,13774872,"TERMINAL",0,0,"Step 5696, loss: 0.03145911917090416, step time: 17.719030380249023ms\r\n",,terminal_output +19857,13774968,"TERMINAL",0,0,"Step 5697, loss: 0.006207508035004139, step time: 17.11130142211914ms\r\n",,terminal_output +19858,13775042,"TERMINAL",0,0,"Step 5698, loss: 0.010786977596580982, step time: 17.475366592407227ms\r\nStep 5699, loss: 0.031197309494018555, step time: 17.398357391357422ms\r\n",,terminal_output +19859,13775135,"TERMINAL",0,0,"Step 5700, loss: 0.010005761869251728, step time: 17.37499237060547ms\r\n",,terminal_output +19860,13775234,"TERMINAL",0,0,"Step 5701, loss: 0.011136024259030819, step time: 17.249584197998047ms\r\nStep 5702, loss: 0.011595751158893108, step time: 17.787456512451172ms\r\n",,terminal_output +19861,13775328,"TERMINAL",0,0,"Step 5703, loss: 0.03338053077459335, step time: 17.251014709472656ms\r\n",,terminal_output +19862,13775434,"TERMINAL",0,0,"Step 5704, loss: 0.00507366843521595, step time: 17.27128028869629ms\r\nStep 5705, loss: 0.0035149026662111282, step time: 17.388105392456055ms\r\n",,terminal_output +19863,13775493,"TERMINAL",0,0,"Step 5706, loss: 0.124626025557518, step time: 17.446279525756836ms\r\n",,terminal_output +19864,13775554,"TERMINAL",0,0,"Step 5707, loss: 0.014891601167619228, step time: 17.131328582763672ms\r\n",,terminal_output +19865,13775615,"TERMINAL",0,0,"Step 5708, loss: 0.012913617305457592, step time: 18.44644546508789ms\r\n",,terminal_output +19866,13775677,"TERMINAL",0,0,"Step 5709, loss: 0.016286609694361687, step time: 17.624855041503906ms\r\n",,terminal_output +19867,13775741,"TERMINAL",0,0,"Step 5710, loss: 0.004271031357347965, step time: 17.7614688873291ms\r\n",,terminal_output +19868,13775798,"TERMINAL",0,0,"Step 5711, loss: 0.051912546157836914, step time: 17.409324645996094ms\r\n",,terminal_output +19869,13775863,"TERMINAL",0,0,"Step 5712, loss: 0.010475937277078629, step time: 17.32802391052246ms\r\n",,terminal_output +19870,13775926,"TERMINAL",0,0,"Step 5713, loss: 0.012020708061754704, step time: 17.20404624938965ms\r\n",,terminal_output +19871,13775989,"TERMINAL",0,0,"Step 5714, loss: 0.019093800336122513, step time: 17.523765563964844ms\r\n",,terminal_output +19872,13776052,"TERMINAL",0,0,"Step 5715, loss: 0.014901193790137768, step time: 17.167091369628906ms\r\n",,terminal_output +19873,13776117,"TERMINAL",0,0,"Step 5716, loss: 0.012073996476829052, step time: 18.32747459411621ms\r\n",,terminal_output +19874,13776186,"TERMINAL",0,0,"Step 5717, loss: 0.024022674188017845, step time: 17.432451248168945ms\r\n",,terminal_output +19875,13776318,"TERMINAL",0,0,"Step 5718, loss: 0.028170352801680565, step time: 17.26055145263672ms\r\nStep 5719, loss: 0.02699095569550991, step time: 17.196178436279297ms\r\n",,terminal_output +19876,13776371,"TERMINAL",0,0,"Step 5720, loss: 0.008486716076731682, step time: 17.44699478149414ms\r\n",,terminal_output +19877,13776464,"TERMINAL",0,0,"Step 5721, loss: 0.008949311450123787, step time: 17.259597778320312ms\r\n",,terminal_output +19878,13776516,"TERMINAL",0,0,"Step 5722, loss: 0.016098566353321075, step time: 17.438173294067383ms\r\n",,terminal_output +19879,13776612,"TERMINAL",0,0,"Step 5723, loss: 0.010236222296953201, step time: 17.353534698486328ms\r\n",,terminal_output +19880,13776663,"TERMINAL",0,0,"Step 5724, loss: 0.022913441061973572, step time: 17.420530319213867ms\r\n",,terminal_output +19881,13776714,"TERMINAL",0,0,"Step 5725, loss: 0.009329251945018768, step time: 17.23456382751465ms\r\n",,terminal_output +19882,13776819,"TERMINAL",0,0,"Step 5726, loss: 0.007378733716905117, step time: 17.706871032714844ms\r\nStep 5727, loss: 0.01638750173151493, step time: 17.301321029663086ms\r\n",,terminal_output +19883,13776914,"TERMINAL",0,0,"Step 5728, loss: 0.011175232008099556, step time: 17.113447189331055ms\r\n",,terminal_output +19884,13776966,"TERMINAL",0,0,"Step 5729, loss: 0.0066719199530780315, step time: 17.432212829589844ms\r\n",,terminal_output +19885,13777071,"TERMINAL",0,0,"Step 5730, loss: 0.009034003131091595, step time: 17.28987693786621ms\r\nStep 5731, loss: 0.0069747380912303925, step time: 17.289161682128906ms\r\n",,terminal_output +19886,13777189,"TERMINAL",0,0,"Step 5732, loss: 0.010638223960995674, step time: 17.634153366088867ms\r\nStep 5733, loss: 0.027728429064154625, step time: 17.202138900756836ms\r\n",,terminal_output +19887,13777254,"TERMINAL",0,0,"Step 5734, loss: 0.004924426320940256, step time: 17.52161979675293ms\r\n",,terminal_output +19888,13777317,"TERMINAL",0,0,"Step 5735, loss: 0.053647082298994064, step time: 17.400741577148438ms\r\n",,terminal_output +19889,13777380,"TERMINAL",0,0,"Step 5736, loss: 0.010360972955822945, step time: 17.424345016479492ms\r\n",,terminal_output +19890,13777441,"TERMINAL",0,0,"Step 5737, loss: 0.02856934256851673, step time: 17.15397834777832ms\r\n",,terminal_output +19891,13777505,"TERMINAL",0,0,"Step 5738, loss: 0.011091506108641624, step time: 17.34757423400879ms\r\n",,terminal_output +19892,13777598,"TERMINAL",0,0,"Step 5739, loss: 0.014741598628461361, step time: 17.771482467651367ms\r\n",,terminal_output +19893,13777650,"TERMINAL",0,0,"Step 5740, loss: 0.03265185281634331, step time: 17.162799835205078ms\r\n",,terminal_output +19894,13777795,"TERMINAL",0,0,"Step 5741, loss: 0.005667772609740496, step time: 17.54903793334961ms\r\nStep 5742, loss: 0.04022567346692085, step time: 17.247915267944336ms\r\n",,terminal_output +19895,13777846,"TERMINAL",0,0,"Step 5743, loss: 0.01095268689095974, step time: 17.342567443847656ms\r\n",,terminal_output +19896,13777897,"TERMINAL",0,0,"Step 5744, loss: 0.020272251218557358, step time: 17.54617691040039ms\r\n",,terminal_output +19897,13777993,"TERMINAL",0,0,"Step 5745, loss: 0.016439300030469894, step time: 29.847145080566406ms\r\n",,terminal_output +19898,13778104,"TERMINAL",0,0,"Step 5746, loss: 0.01158793643116951, step time: 21.654605865478516ms\r\nStep 5747, loss: 0.0053068059496581554, step time: 25.144577026367188ms\r\n",,terminal_output +19899,13778165,"TERMINAL",0,0,"Step 5748, loss: 0.007474659010767937, step time: 17.865896224975586ms\r\n",,terminal_output +19900,13778228,"TERMINAL",0,0,"Step 5749, loss: 0.008977905847132206, step time: 17.374277114868164ms\r\n",,terminal_output +19901,13778288,"TERMINAL",0,0,"Step 5750, loss: 0.008807072415947914, step time: 17.760515213012695ms\r\n",,terminal_output +19902,13778384,"TERMINAL",0,0,"Step 5751, loss: 0.006909030955284834, step time: 17.269611358642578ms\r\n",,terminal_output +19903,13778435,"TERMINAL",0,0,"Step 5752, loss: 0.07079307734966278, step time: 17.379283905029297ms\r\n",,terminal_output +19904,13778539,"TERMINAL",0,0,"Step 5753, loss: 0.010287338867783546, step time: 17.43316650390625ms\r\nStep 5754, loss: 0.011605111882090569, step time: 17.339229583740234ms\r\n",,terminal_output +19905,13778665,"TERMINAL",0,0,"Step 5755, loss: 0.019062204286456108, step time: 17.203569412231445ms\r\nStep 5756, loss: 0.009190679527819157, step time: 17.466306686401367ms\r\n",,terminal_output +19906,13778730,"TERMINAL",0,0,"Step 5757, loss: 0.030445916578173637, step time: 17.292499542236328ms\r\n",,terminal_output +19907,13778795,"TERMINAL",0,0,"Step 5758, loss: 0.013037567026913166, step time: 17.737627029418945ms\r\n",,terminal_output +19908,13778855,"TERMINAL",0,0,"Step 5759, loss: 0.007867304608225822, step time: 17.495393753051758ms\r\n",,terminal_output +19909,13778922,"TERMINAL",0,0,"Step 5760, loss: 0.013159355148673058, step time: 17.403125762939453ms\r\n",,terminal_output +19910,13778977,"TERMINAL",0,0,"Step 5761, loss: 0.011452949605882168, step time: 17.22431182861328ms\r\n",,terminal_output +19911,13779047,"TERMINAL",0,0,"Step 5762, loss: 0.009620165452361107, step time: 17.864227294921875ms\r\n",,terminal_output +19912,13779111,"TERMINAL",0,0,"Step 5763, loss: 0.012246107682585716, step time: 17.43340492248535ms\r\n",,terminal_output +19913,13779171,"TERMINAL",0,0,"Step 5764, loss: 0.005411808844655752, step time: 18.99123191833496ms\r\n",,terminal_output +19914,13779232,"TERMINAL",0,0,"Step 5765, loss: 0.008841060101985931, step time: 17.315149307250977ms\r\n",,terminal_output +19915,13779294,"TERMINAL",0,0,"Step 5766, loss: 0.007155742030590773, step time: 17.028331756591797ms\r\n",,terminal_output +19916,13779357,"TERMINAL",0,0,"Step 5767, loss: 0.021215952932834625, step time: 17.232656478881836ms\r\n",,terminal_output +19917,13779418,"TERMINAL",0,0,"Step 5768, loss: 0.012385659851133823, step time: 17.588138580322266ms\r\n",,terminal_output +19918,13779480,"TERMINAL",0,0,"Step 5769, loss: 0.014077330939471722, step time: 17.185449600219727ms\r\n",,terminal_output +19919,13779610,"TERMINAL",0,0,"Step 5770, loss: 0.008149614557623863, step time: 17.49396324157715ms\r\nStep 5771, loss: 0.008085410110652447, step time: 17.51995086669922ms\r\n",,terminal_output +19920,13779673,"TERMINAL",0,0,"Step 5772, loss: 0.005150997545570135, step time: 17.246484756469727ms\r\n",,terminal_output +19921,13779735,"TERMINAL",0,0,"Step 5773, loss: 0.009255463257431984, step time: 17.155170440673828ms\r\n",,terminal_output +19922,13779797,"TERMINAL",0,0,"Step 5774, loss: 0.006836972665041685, step time: 17.666339874267578ms\r\n",,terminal_output +19923,13779861,"TERMINAL",0,0,"Step 5775, loss: 0.01103690080344677, step time: 17.245769500732422ms\r\n",,terminal_output +19924,13779922,"TERMINAL",0,0,"Step 5776, loss: 0.04056693613529205, step time: 17.14038848876953ms\r\n",,terminal_output +19925,13779982,"TERMINAL",0,0,"Step 5777, loss: 0.015579300932586193, step time: 17.46082305908203ms\r\n",,terminal_output +19926,13780045,"TERMINAL",0,0,"Step 5778, loss: 0.0023690860252827406, step time: 17.183303833007812ms\r\n",,terminal_output +19927,13780105,"TERMINAL",0,0,"Step 5779, loss: 0.012138069607317448, step time: 17.187118530273438ms\r\n",,terminal_output +19928,13780167,"TERMINAL",0,0,"Step 5780, loss: 0.016293587163090706, step time: 17.65918731689453ms\r\n",,terminal_output +19929,13780238,"TERMINAL",0,0,"Step 5781, loss: 0.0163732897490263, step time: 17.144203186035156ms\r\n",,terminal_output +19930,13780331,"TERMINAL",0,0,"Step 5782, loss: 0.014561686664819717, step time: 17.466068267822266ms\r\n",,terminal_output +19931,13780584,"TERMINAL",0,0,"Step 5783, loss: 0.013930154033005238, step time: 293.7281131744385ms\r\n",,terminal_output +19932,13780649,"TERMINAL",0,0,"Step 5784, loss: 0.05958933010697365, step time: 25.036096572875977ms\r\n",,terminal_output +19933,13780716,"TERMINAL",0,0,"Step 5785, loss: 0.007944783195853233, step time: 19.73724365234375ms\r\n",,terminal_output +19934,13780779,"TERMINAL",0,0,"Step 5786, loss: 0.030544141307473183, step time: 18.428325653076172ms\r\n",,terminal_output +19935,13780842,"TERMINAL",0,0,"Step 5787, loss: 0.012795697897672653, step time: 17.588376998901367ms\r\n",,terminal_output +19936,13780938,"TERMINAL",0,0,"Step 5788, loss: 0.010739002376794815, step time: 17.673730850219727ms\r\n",,terminal_output +19937,13780990,"TERMINAL",0,0,"Step 5789, loss: 0.01399721298366785, step time: 17.623186111450195ms\r\n",,terminal_output +19938,13781095,"TERMINAL",0,0,"Step 5790, loss: 0.01611446589231491, step time: 17.60101318359375ms\r\nStep 5791, loss: 0.0062481979839503765, step time: 18.865585327148438ms\r\n",,terminal_output +19939,13781221,"TERMINAL",0,0,"Step 5792, loss: 0.022059964016079903, step time: 19.75417137145996ms\r\nStep 5793, loss: 0.005819179117679596, step time: 18.088817596435547ms\r\n",,terminal_output +19940,13781284,"TERMINAL",0,0,"Step 5794, loss: 0.008946146816015244, step time: 18.11838150024414ms\r\n",,terminal_output +19941,13781349,"TERMINAL",0,0,"Step 5795, loss: 0.008240146562457085, step time: 20.917654037475586ms\r\n",,terminal_output +19942,13781413,"TERMINAL",0,0,"Step 5796, loss: 0.0166114903986454, step time: 18.444061279296875ms\r\n",,terminal_output +19943,13781506,"TERMINAL",0,0,"Step 5797, loss: 0.06779611855745316, step time: 17.711639404296875ms\r\n",,terminal_output +19944,13781611,"TERMINAL",0,0,"Step 5798, loss: 0.0137544646859169, step time: 19.586801528930664ms\r\nStep 5799, loss: 0.014294552616775036, step time: 18.772363662719727ms\r\n",,terminal_output +19945,13781707,"TERMINAL",0,0,"Step 5800, loss: 0.012772008776664734, step time: 17.66204833984375ms\r\n",,terminal_output +19946,13781757,"TERMINAL",0,0,"Step 5801, loss: 0.03396109491586685, step time: 17.697811126708984ms\r\n",,terminal_output +19947,13781861,"TERMINAL",0,0,"Step 5802, loss: 0.006240403279662132, step time: 18.019437789916992ms\r\nStep 5803, loss: 0.013236130587756634, step time: 17.787933349609375ms\r\n",,terminal_output +19948,13781955,"TERMINAL",0,0,"Step 5804, loss: 0.0052922796458005905, step time: 17.67253875732422ms\r\n",,terminal_output +19949,13782007,"TERMINAL",0,0,"Step 5805, loss: 0.03696489706635475, step time: 17.496824264526367ms\r\n",,terminal_output +19950,13782110,"TERMINAL",0,0,"Step 5806, loss: 0.004811955150216818, step time: 17.350196838378906ms\r\nStep 5807, loss: 0.0173281729221344, step time: 17.638683319091797ms\r\n",,terminal_output +19951,13782237,"TERMINAL",0,0,"Step 5808, loss: 0.005775343626737595, step time: 17.424345016479492ms\r\nStep 5809, loss: 0.01177183911204338, step time: 17.29440689086914ms\r\n",,terminal_output +19952,13782304,"TERMINAL",0,0,"Step 5810, loss: 0.016735922545194626, step time: 17.711877822875977ms\r\n",,terminal_output +19953,13782364,"TERMINAL",0,0,"Step 5811, loss: 0.008460603654384613, step time: 17.23623275756836ms\r\n",,terminal_output +19954,13782428,"TERMINAL",0,0,"Step 5812, loss: 0.008619041182100773, step time: 17.3187255859375ms\r\n",,terminal_output +19955,13782490,"TERMINAL",0,0,"Step 5813, loss: 0.009053845889866352, step time: 17.470121383666992ms\r\n",,terminal_output +19956,13782590,"TERMINAL",0,0,"Step 5814, loss: 0.003750742645934224, step time: 17.032861709594727ms\r\n",,terminal_output +19957,13782643,"TERMINAL",0,0,"Step 5815, loss: 0.011551843956112862, step time: 17.203569412231445ms\r\n",,terminal_output +19958,13782749,"TERMINAL",0,0,"Step 5816, loss: 0.01047738641500473, step time: 17.583370208740234ms\r\nStep 5817, loss: 0.014107068069279194, step time: 17.184734344482422ms\r\n",,terminal_output +19959,13782866,"TERMINAL",0,0,"Step 5818, loss: 0.008431344293057919, step time: 17.362117767333984ms\r\nStep 5819, loss: 0.00528256269171834, step time: 17.69280433654785ms\r\n",,terminal_output +19960,13782943,"TERMINAL",0,0,"Step 5820, loss: 0.009398490190505981, step time: 17.515182495117188ms\r\n",,terminal_output +19961,13783006,"TERMINAL",0,0,"Step 5821, loss: 0.013421278446912766, step time: 17.53377914428711ms\r\n",,terminal_output +19962,13783070,"TERMINAL",0,0,"Step 5822, loss: 0.008176367729902267, step time: 17.79651641845703ms\r\n",,terminal_output +19963,13783180,"TERMINAL",0,0,"Step 5823, loss: 0.0026785284280776978, step time: 17.25935935974121ms\r\nStep 5824, loss: 0.05230417847633362, step time: 17.248153686523438ms\r\n",,terminal_output +19964,13783242,"TERMINAL",0,0,"Step 5825, loss: 0.012670719996094704, step time: 17.488956451416016ms\r\n",,terminal_output +19965,13783305,"TERMINAL",0,0,"Step 5826, loss: 0.005359445232897997, step time: 17.0743465423584ms\r\n",,terminal_output +19966,13783430,"TERMINAL",0,0,"Step 5827, loss: 0.00830972008407116, step time: 17.16136932373047ms\r\nStep 5828, loss: 0.015605738386511803, step time: 17.61150360107422ms\r\n",,terminal_output +19967,13783496,"TERMINAL",0,0,"Step 5829, loss: 0.006800110451877117, step time: 17.14801788330078ms\r\n",,terminal_output +19968,13783559,"TERMINAL",0,0,"Step 5830, loss: 0.08364129811525345, step time: 17.48347282409668ms\r\n",,terminal_output +19969,13783670,"TERMINAL",0,0,"Step 5831, loss: 0.020791105926036835, step time: 18.29385757446289ms\r\n",,terminal_output +19970,13783681,"TERMINAL",0,0,"Step 5832, loss: 0.009772480465471745, step time: 17.482757568359375ms\r\n",,terminal_output +19971,13783779,"TERMINAL",0,0,"Step 5833, loss: 0.018776172772049904, step time: 17.360925674438477ms\r\n",,terminal_output +19972,13783844,"TERMINAL",0,0,"Step 5834, loss: 0.029376499354839325, step time: 17.731189727783203ms\r\n",,terminal_output +19973,13783906,"TERMINAL",0,0,"Step 5835, loss: 0.012523760087788105, step time: 17.306804656982422ms\r\n",,terminal_output +19974,13783966,"TERMINAL",0,0,"Step 5836, loss: 0.013392068445682526, step time: 17.403364181518555ms\r\n",,terminal_output +19975,13784030,"TERMINAL",0,0,"Step 5837, loss: 0.00881533045321703, step time: 17.528533935546875ms\r\n",,terminal_output +19976,13784093,"TERMINAL",0,0,"Step 5838, loss: 0.007974393665790558, step time: 17.238855361938477ms\r\n",,terminal_output +19977,13784151,"TERMINAL",0,0,"Step 5839, loss: 0.006518093403428793, step time: 17.409801483154297ms\r\n",,terminal_output +19978,13784251,"TERMINAL",0,0,"Step 5840, loss: 0.00848216749727726, step time: 17.689228057861328ms\r\nStep 5841, loss: 0.007472889497876167, step time: 17.08817481994629ms\r\n",,terminal_output +19979,13784348,"TERMINAL",0,0,"Step 5842, loss: 0.015093029476702213, step time: 17.637252807617188ms\r\n",,terminal_output +19980,13784409,"TERMINAL",0,0,"Step 5843, loss: 0.008254645392298698, step time: 17.412662506103516ms\r\n",,terminal_output +19981,13784469,"TERMINAL",0,0,"Step 5844, loss: 0.010077184997498989, step time: 17.380714416503906ms\r\n",,terminal_output +19982,13784529,"TERMINAL",0,0,"Step 5845, loss: 0.015530950389802456, step time: 17.15683937072754ms\r\n",,terminal_output +19983,13784590,"TERMINAL",0,0,"Step 5846, loss: 0.01093097310513258, step time: 17.684221267700195ms\r\n",,terminal_output +19984,13784650,"TERMINAL",0,0,"Step 5847, loss: 0.006316009443253279, step time: 17.170429229736328ms\r\n",,terminal_output +19985,13784712,"TERMINAL",0,0,"Step 5848, loss: 0.027857990935444832, step time: 17.940998077392578ms\r\n",,terminal_output +19986,13784776,"TERMINAL",0,0,"Step 5849, loss: 0.017859933897852898, step time: 17.397642135620117ms\r\n",,terminal_output +19987,13784838,"TERMINAL",0,0,"Step 5850, loss: 0.011835480108857155, step time: 17.064809799194336ms\r\n",,terminal_output +19988,13784901,"TERMINAL",0,0,"Step 5851, loss: 0.007672801148146391, step time: 17.377614974975586ms\r\n",,terminal_output +19989,13784962,"TERMINAL",0,0,"Step 5852, loss: 0.013947742059826851, step time: 17.690658569335938ms\r\n",,terminal_output +19990,13785024,"TERMINAL",0,0,"Step 5853, loss: 0.021430667489767075, step time: 17.085790634155273ms\r\n",,terminal_output +19991,13785085,"TERMINAL",0,0,"Step 5854, loss: 0.012513280846178532, step time: 17.370939254760742ms\r\n",,terminal_output +19992,13785185,"TERMINAL",0,0,"Step 5855, loss: 0.08880745619535446, step time: 17.507553100585938ms\r\n",,terminal_output +19993,13785264,"TERMINAL",0,0,"Step 5856, loss: 0.005106585565954447, step time: 17.24982261657715ms\r\nStep 5857, loss: 0.015388768166303635, step time: 17.12656021118164ms\r\n",,terminal_output +19994,13785325,"TERMINAL",0,0,"Step 5858, loss: 0.014979030936956406, step time: 17.66180992126465ms\r\n",,terminal_output +19995,13785387,"TERMINAL",0,0,"Step 5859, loss: 0.018060119822621346, step time: 17.034292221069336ms\r\n",,terminal_output +19996,13785450,"TERMINAL",0,0,"Step 5860, loss: 0.10716351866722107, step time: 17.242431640625ms\r\n",,terminal_output +19997,13785544,"TERMINAL",0,0,"Step 5861, loss: 0.00990928616374731, step time: 17.40431785583496ms\r\n",,terminal_output +19998,13785653,"TERMINAL",0,0,"Step 5862, loss: 0.006517311092466116, step time: 17.15826988220215ms\r\nStep 5863, loss: 0.012221469543874264, step time: 17.310142517089844ms\r\n",,terminal_output +19999,13785716,"TERMINAL",0,0,"Step 5864, loss: 0.07085331529378891, step time: 17.647981643676758ms\r\n",,terminal_output +20000,13785830,"TERMINAL",0,0,"Step 5865, loss: 0.16200897097587585, step time: 17.287731170654297ms\r\nStep 5866, loss: 0.014690570533275604, step time: 17.448902130126953ms\r\n",,terminal_output +20001,13785891,"TERMINAL",0,0,"Step 5867, loss: 0.004408558364957571, step time: 17.67110824584961ms\r\n",,terminal_output +20002,13785954,"TERMINAL",0,0,"Step 5868, loss: 0.14170058071613312, step time: 17.36903190612793ms\r\n",,terminal_output +20003,13786017,"TERMINAL",0,0,"Step 5869, loss: 0.013724643737077713, step time: 17.122983932495117ms\r\n",,terminal_output +20004,13786082,"TERMINAL",0,0,"Step 5870, loss: 0.04461270943284035, step time: 17.574548721313477ms\r\n",,terminal_output +20005,13786145,"TERMINAL",0,0,"Step 5871, loss: 0.012452859431505203, step time: 17.392635345458984ms\r\n",,terminal_output +20006,13786210,"TERMINAL",0,0,"Step 5872, loss: 0.02114657685160637, step time: 17.06719398498535ms\r\n",,terminal_output +20007,13786275,"TERMINAL",0,0,"Step 5873, loss: 0.006158777512609959, step time: 17.519474029541016ms\r\n",,terminal_output +20008,13786339,"TERMINAL",0,0,"Step 5874, loss: 0.008777530863881111, step time: 17.256736755371094ms\r\n",,terminal_output +20009,13786401,"TERMINAL",0,0,"Step 5875, loss: 0.014242049306631088, step time: 17.1053409576416ms\r\n",,terminal_output +20010,13786497,"TERMINAL",0,0,"Step 5876, loss: 0.004430924076586962, step time: 17.574548721313477ms\r\n",,terminal_output +20011,13786601,"TERMINAL",0,0,"Step 5877, loss: 0.10688838362693787, step time: 17.079591751098633ms\r\nStep 5878, loss: 0.0050656492821872234, step time: 17.32182502746582ms\r\n",,terminal_output +20012,13786663,"TERMINAL",0,0,"Step 5879, loss: 0.007118727546185255, step time: 17.324447631835938ms\r\n",,terminal_output +20013,13786724,"TERMINAL",0,0,"Step 5880, loss: 0.01321541890501976, step time: 17.46988296508789ms\r\n",,terminal_output +20014,13786872,"TERMINAL",0,0,"Step 5881, loss: 0.024899642914533615, step time: 17.02880859375ms\r\nStep 5882, loss: 0.014620599336922169, step time: 17.766952514648438ms\r\n",,terminal_output +20015,13786925,"TERMINAL",0,0,"Step 5883, loss: 0.01435197051614523, step time: 17.17209815979004ms\r\n",,terminal_output +20016,13786977,"TERMINAL",0,0,"Step 5884, loss: 0.0063562639988958836, step time: 17.15850830078125ms\r\n",,terminal_output +20017,13787070,"TERMINAL",0,0,"Step 5885, loss: 0.05454662814736366, step time: 17.4863338470459ms\r\n",,terminal_output +20018,13787111,"genie.py",3103,0,"",python,selection_mouse +20019,13787133,"TERMINAL",0,0,"Step 5886, loss: 0.009831337258219719, step time: 17.471790313720703ms\r\n",,terminal_output +20020,13787173,"TERMINAL",0,0,"Step 5887, loss: 0.04314025118947029, step time: 17.283201217651367ms\r\n",,terminal_output +20021,13787228,"TERMINAL",0,0,"Step 5888, loss: 0.015879284590482712, step time: 17.51399040222168ms\r\n",,terminal_output +20022,13787322,"TERMINAL",0,0,"Step 5889, loss: 0.008401652798056602, step time: 17.352819442749023ms\r\n",,terminal_output +20023,13787444,"TERMINAL",0,0,"Step 5890, loss: 0.004789966624230146, step time: 17.618179321289062ms\r\nStep 5891, loss: 0.010792248882353306, step time: 17.342090606689453ms\r\n",,terminal_output +20024,13787496,"TERMINAL",0,0,"Step 5892, loss: 0.010243106633424759, step time: 17.198562622070312ms\r\n",,terminal_output +20025,13787559,"TERMINAL",0,0,"Step 5893, loss: 0.012799395248293877, step time: 17.12179183959961ms\r\n",,terminal_output +20026,13787620,"TERMINAL",0,0,"Step 5894, loss: 0.011681596748530865, step time: 17.66514778137207ms\r\n",,terminal_output +20027,13787683,"TERMINAL",0,0,"Step 5895, loss: 0.010124491527676582, step time: 17.589807510375977ms\r\n",,terminal_output +20028,13787749,"genie.py",3150,0,"",python,selection_mouse +20029,13787760,"TERMINAL",0,0,"Step 5896, loss: 0.11921599507331848, step time: 17.608642578125ms\r\n",,terminal_output +20030,13787801,"TERMINAL",0,0,"Step 5897, loss: 0.015523347072303295, step time: 18.19133758544922ms\r\n",,terminal_output +20031,13787896,"TERMINAL",0,0,"Step 5898, loss: 0.021993644535541534, step time: 17.525672912597656ms\r\n",,terminal_output +20032,13787946,"TERMINAL",0,0,"Step 5899, loss: 0.052522096782922745, step time: 17.170429229736328ms\r\n",,terminal_output +20033,13788079,"TERMINAL",0,0,"Step 5900, loss: 0.007551178336143494, step time: 17.862319946289062ms\r\nStep 5901, loss: 0.03288984298706055, step time: 17.075300216674805ms\r\n",,terminal_output +20034,13788130,"TERMINAL",0,0,"Step 5902, loss: 0.013506893068552017, step time: 17.474889755249023ms\r\n",,terminal_output +20035,13788194,"TERMINAL",0,0,"Step 5903, loss: 0.10717842727899551, step time: 17.610788345336914ms\r\n",,terminal_output +20036,13788264,"genie.py",3138,0,"",python,selection_mouse +20037,13788297,"TERMINAL",0,0,"Step 5904, loss: 0.018734270706772804, step time: 17.499208450317383ms\r\nStep 5905, loss: 0.015703368932008743, step time: 17.22884178161621ms\r\n",,terminal_output +20038,13788370,"TERMINAL",0,0,"Step 5906, loss: 0.009271607734262943, step time: 17.735004425048828ms\r\n",,terminal_output +20039,13788429,"TERMINAL",0,0,"Step 5907, loss: 0.016569243744015694, step time: 17.271995544433594ms\r\n",,terminal_output +20040,13788491,"TERMINAL",0,0,"Step 5908, loss: 0.02766311727464199, step time: 17.435789108276367ms\r\n",,terminal_output +20041,13788552,"TERMINAL",0,0,"Step 5909, loss: 0.029673483222723007, step time: 17.56429672241211ms\r\n",,terminal_output +20042,13788660,"TERMINAL",0,0,"Step 5910, loss: 0.03483327850699425, step time: 17.476320266723633ms\r\n",,terminal_output +20043,13788675,"TERMINAL",0,0,"Step 5911, loss: 0.010786245577037334, step time: 17.19355583190918ms\r\n",,terminal_output +20044,13788771,"genie.py",3174,0,"",python,selection_mouse +20045,13788783,"genie.py",3173,0,"",python,selection_command +20046,13788784,"TERMINAL",0,0,"Step 5912, loss: 0.062274254858493805, step time: 17.6239013671875ms\r\n",,terminal_output +20047,13788864,"TERMINAL",0,0,"Step 5913, loss: 0.014970164746046066, step time: 17.605304718017578ms\r\nStep 5914, loss: 0.021569132804870605, step time: 17.438888549804688ms\r\n",,terminal_output +20048,13788923,"TERMINAL",0,0,"Step 5915, loss: 0.019922835752367973, step time: 17.611980438232422ms\r\n",,terminal_output +20049,13789060,"TERMINAL",0,0,"Step 5916, loss: 0.00837754737585783, step time: 17.28057861328125ms\r\nStep 5917, loss: 0.009318366646766663, step time: 17.190217971801758ms\r\n",,terminal_output +20050,13789312,"genie.py",3174,0,"",python,selection_mouse +20051,13789314,"genie.py",3173,0,"",python,selection_command +20052,13789365,"TERMINAL",0,0,"Step 5918, loss: 0.015017181634902954, step time: 289.05344009399414ms\r\n",,terminal_output +20053,13789417,"TERMINAL",0,0,"Step 5919, loss: 0.04280999302864075, step time: 25.0546932220459ms\r\n",,terminal_output +20054,13789513,"TERMINAL",0,0,"Step 5920, loss: 0.008632716722786427, step time: 19.7141170501709ms\r\n",,terminal_output +20055,13789618,"TERMINAL",0,0,"Step 5921, loss: 0.011578979901969433, step time: 18.30768585205078ms\r\nStep 5922, loss: 0.009304882027208805, step time: 17.67873764038086ms\r\n",,terminal_output +20056,13789682,"TERMINAL",0,0,"Step 5923, loss: 0.010909081436693668, step time: 17.39192008972168ms\r\n",,terminal_output +20057,13789746,"TERMINAL",0,0,"Step 5924, loss: 0.011755570769309998, step time: 17.724275588989258ms\r\n",,terminal_output +20058,13789813,"TERMINAL",0,0,"Step 5925, loss: 0.004334998317062855, step time: 29.53052520751953ms\r\n",,terminal_output +20059,13789857,"genie.py",3174,0,"",python,selection_mouse +20060,13789877,"genie.py",3173,0,"",python,selection_command +20061,13789889,"TERMINAL",0,0,"Step 5926, loss: 0.030315380543470383, step time: 21.61097526550293ms\r\n",,terminal_output +20062,13789942,"TERMINAL",0,0,"Step 5927, loss: 0.01675458438694477, step time: 19.451141357421875ms\r\n",,terminal_output +20063,13789996,"TERMINAL",0,0,"Step 5928, loss: 0.025563886389136314, step time: 18.514633178710938ms\r\n",,terminal_output +20064,13790090,"TERMINAL",0,0,"Step 5929, loss: 0.004316333215683699, step time: 19.495487213134766ms\r\n",,terminal_output +20065,13790153,"TERMINAL",0,0,"Step 5930, loss: 0.01223578117787838, step time: 26.11708641052246ms\r\n",,terminal_output +20066,13790205,"TERMINAL",0,0,"Step 5931, loss: 0.03128573298454285, step time: 19.92011070251465ms\r\n",,terminal_output +20067,13790303,"TERMINAL",0,0,"Step 5932, loss: 0.023704154416918755, step time: 17.3647403717041ms\r\n",,terminal_output +20068,13790386,"TERMINAL",0,0,"Step 5933, loss: 0.006200434640049934, step time: 17.72904396057129ms\r\nStep 5934, loss: 0.025477278977632523, step time: 17.305612564086914ms\r\n",,terminal_output +20069,13790425,"genie.py",3064,0,"",python,selection_mouse +20070,13790449,"TERMINAL",0,0,"Step 5935, loss: 0.007268658373504877, step time: 17.360210418701172ms\r\n",,terminal_output +20071,13790502,"TERMINAL",0,0,"Step 5936, loss: 0.01088944636285305, step time: 17.859220504760742ms\r\n",,terminal_output +20072,13790602,"TERMINAL",0,0,"Step 5937, loss: 0.010454714298248291, step time: 17.229795455932617ms\r\n",,terminal_output +20073,13790665,"TERMINAL",0,0,"Step 5938, loss: 0.005517264828085899, step time: 17.57025718688965ms\r\n",,terminal_output +20074,13790733,"TERMINAL",0,0,"Step 5939, loss: 0.02380884811282158, step time: 17.702817916870117ms\r\n",,terminal_output +20075,13790785,"TERMINAL",0,0,"Step 5940, loss: 0.02248242124915123, step time: 17.283916473388672ms\r\n",,terminal_output +20076,13790898,"TERMINAL",0,0,"Step 5941, loss: 0.027348576113581657, step time: 18.159866333007812ms\r\nStep 5942, loss: 0.03646715730428696, step time: 18.044233322143555ms\r\n",,terminal_output +20077,13790910,"genie.py",3061,0,"",python,selection_mouse +20078,13790950,"TERMINAL",0,0,"Step 5943, loss: 0.005933803040534258, step time: 17.2271728515625ms\r\n",,terminal_output +20079,13791016,"TERMINAL",0,0,"Step 5944, loss: 0.009500270709395409, step time: 17.080068588256836ms\r\n",,terminal_output +20080,13791068,"genie.py",3054,8," ",python,selection_mouse +20081,13791128,"TERMINAL",0,0,"Step 5945, loss: 0.008703186176717281, step time: 17.460107803344727ms\r\nStep 5946, loss: 0.0253685861825943, step time: 17.25482940673828ms\r\n",,terminal_output +20082,13791183,"genie.py",3054,62," outputs[""gt_debug""] = self.tokenizer.decode(\n ",python,selection_mouse +20083,13791197,"TERMINAL",0,0,"Step 5947, loss: 0.00737031502649188, step time: 17.1358585357666ms\r\n",,terminal_output +20084,13791209,"genie.py",3054,63," outputs[""gt_debug""] = self.tokenizer.decode(\n ",python,selection_mouse +20085,13791232,"genie.py",3054,65," outputs[""gt_debug""] = self.tokenizer.decode(\n ",python,selection_mouse +20086,13791260,"genie.py",3054,120," outputs[""gt_debug""] = self.tokenizer.decode(\n tokenizer_outputs, batch[""videos""].shape[2:4]\n )",python,selection_mouse +20087,13791283,"TERMINAL",0,0,"Step 5948, loss: 0.006977023556828499, step time: 17.611026763916016ms\r\n",,terminal_output +20088,13791331,"TERMINAL",0,0,"Step 5949, loss: 0.012228471226990223, step time: 24.03855323791504ms\r\n",,terminal_output +20089,13791385,"TERMINAL",0,0,"Step 5950, loss: 0.013712438754737377, step time: 20.142793655395508ms\r\n",,terminal_output +20090,13791476,"genie.py",3174,0,"",python,selection_mouse +20091,13791488,"genie.py",3173,0,"",python,selection_command +20092,13791499,"TERMINAL",0,0,"Step 5951, loss: 0.01949082314968109, step time: 18.55921745300293ms\r\n",,terminal_output +20093,13791596,"TERMINAL",0,0,"Step 5952, loss: 0.009903782978653908, step time: 17.6999568939209ms\r\nStep 5953, loss: 0.005458616651594639, step time: 17.473697662353516ms\r\n",,terminal_output +20094,13791635,"TERMINAL",0,0,"Step 5954, loss: 0.007100140210241079, step time: 18.173933029174805ms\r\n",,terminal_output +20095,13791652,"genie.py",3174,0,"",python,selection_mouse +20096,13791663,"genie.py",3173,0,"",python,selection_command +20097,13791713,"TERMINAL",0,0,"Step 5955, loss: 0.007064465899020433, step time: 23.28944206237793ms\r\n",,terminal_output +20098,13791766,"TERMINAL",0,0,"Step 5956, loss: 0.004041961394250393, step time: 24.42789077758789ms\r\n",,terminal_output +20099,13791824,"genie.py",3173,1,")",python,selection_mouse +20100,13791839,"genie.py",3174,0,"",python,selection_command +20101,13791851,"genie.py",3119,55,"tokenizer_outputs, batch[""videos""].shape[2:4]\n )",python,selection_mouse +20102,13791889,"genie.py",3062,112,"outputs[""gt_debug""] = self.tokenizer.decode(\n tokenizer_outputs, batch[""videos""].shape[2:4]\n )",python,selection_mouse +20103,13791916,"TERMINAL",0,0,"Step 5957, loss: 0.004796494264155626, step time: 24.827957153320312ms\r\nStep 5958, loss: 0.03124994970858097, step time: 24.385690689086914ms\r\n",,terminal_output +20104,13791967,"TERMINAL",0,0,"Step 5959, loss: 0.02138643153011799, step time: 25.2532958984375ms\r\n",,terminal_output +20105,13792031,"TERMINAL",0,0,"Step 5960, loss: 0.0049311514012515545, step time: 25.422334671020508ms\r\n",,terminal_output +20106,13792132,"TERMINAL",0,0,"Step 5961, loss: 0.024560032412409782, step time: 24.965524673461914ms\r\n",,terminal_output +20107,13792186,"TERMINAL",0,0,"Step 5962, loss: 0.014611504971981049, step time: 25.247573852539062ms\r\n",,terminal_output +20108,13792237,"TERMINAL",0,0,"Step 5963, loss: 0.017316360026597977, step time: 24.747610092163086ms\r\n",,terminal_output +20109,13792333,"TERMINAL",0,0,"Step 5964, loss: 0.007801870349794626, step time: 19.38652992248535ms\r\n",,terminal_output +20110,13792457,"genie.py",3062,0,"",python,selection_mouse +20111,13792457,"genie.py",3062,7,"outputs",python,selection_mouse +20112,13792476,"TERMINAL",0,0,"Step 5965, loss: 0.007963907904922962, step time: 18.015623092651367ms\r\nStep 5966, loss: 0.018492843955755234, step time: 18.36228370666504ms\r\n",,terminal_output +20113,13792540,"TERMINAL",0,0,"Step 5967, loss: 0.00342882564291358, step time: 17.39954948425293ms\r\nStep 5968, loss: 0.008098095655441284, step time: 17.23623275756836ms\r\n",,terminal_output +20114,13792606,"TERMINAL",0,0,"Step 5969, loss: 0.1267315298318863, step time: 17.586469650268555ms\r\n",,terminal_output +20115,13792669,"genie.py",3062,57,"outputs[""gt_debug""] = self.tokenizer.decode(\n ",python,selection_mouse +20116,13792677,"TERMINAL",0,0,"Step 5970, loss: 0.004286782816052437, step time: 17.443418502807617ms\r\n",,terminal_output +20117,13792689,"genie.py",3062,112,"outputs[""gt_debug""] = self.tokenizer.decode(\n tokenizer_outputs, batch[""videos""].shape[2:4]\n )",python,selection_mouse +20118,13792742,"TERMINAL",0,0,"Step 5971, loss: 0.018032493069767952, step time: 17.340660095214844ms\r\n",,terminal_output +20119,13792795,"TERMINAL",0,0,"Step 5972, loss: 0.008000928908586502, step time: 18.05901527404785ms\r\n",,terminal_output +20120,13792861,"TERMINAL",0,0,"Step 5973, loss: 0.013384540565311909, step time: 17.38262176513672ms\r\n",,terminal_output +20121,13792887,"genie.py",3174,0,"",python,selection_mouse +20122,13792902,"genie.py",3173,0,"",python,selection_command +20123,13792987,"TERMINAL",0,0,"Step 5974, loss: 0.009290657937526703, step time: 17.52448081970215ms\r\nStep 5975, loss: 0.008973256684839725, step time: 17.64845848083496ms\r\n",,terminal_output +20124,13793043,"TERMINAL",0,0,"Step 5976, loss: 0.03150199353694916, step time: 17.391204833984375ms\r\n",,terminal_output +20125,13793107,"TERMINAL",0,0,"Step 5977, loss: 0.027568090707063675, step time: 17.362356185913086ms\r\n",,terminal_output +20126,13793173,"TERMINAL",0,0,"Step 5978, loss: 0.0643811747431755, step time: 17.853975296020508ms\r\n",,terminal_output +20127,13793232,"TERMINAL",0,0,"Step 5979, loss: 0.010360709391534328, step time: 20.699024200439453ms\r\n",,terminal_output +20128,13793345,"TERMINAL",0,0,"Step 5980, loss: 0.05793686956167221, step time: 17.566442489624023ms\r\n",,terminal_output +20129,13793398,"TERMINAL",0,0,"Step 5981, loss: 0.031157739460468292, step time: 17.565250396728516ms\r\n",,terminal_output +20130,13793449,"TERMINAL",0,0,"Step 5982, loss: 0.00958402268588543, step time: 17.19832420349121ms\r\n",,terminal_output +20131,13793554,"TERMINAL",0,0,"Step 5983, loss: 0.03890518099069595, step time: 17.42696762084961ms\r\nStep 5984, loss: 0.06943874806165695, step time: 17.877817153930664ms\r\n",,terminal_output +20132,13793680,"genie.py",3053,0,"",python,selection_mouse +20133,13793681,"genie.py",3052,0,"",python,selection_command +20134,13793681,"TERMINAL",0,0,"Step 5985, loss: 0.03138073906302452, step time: 17.39215850830078ms\r\nStep 5986, loss: 0.024281714111566544, step time: 17.369985580444336ms\r\n",,terminal_output +20135,13793754,"TERMINAL",0,0,"Step 5987, loss: 0.005164744798094034, step time: 17.66180992126465ms\r\n",,terminal_output +20136,13793802,"TERMINAL",0,0,"Step 5988, loss: 0.02922438271343708, step time: 17.52758026123047ms\r\n",,terminal_output +20137,13793869,"TERMINAL",0,0,"Step 5989, loss: 0.028137752786278725, step time: 17.322301864624023ms\r\n",,terminal_output +20138,13793940,"TERMINAL",0,0,"Step 5990, loss: 0.015666378661990166, step time: 17.735958099365234ms\r\n",,terminal_output +20139,13793993,"TERMINAL",0,0,"Step 5991, loss: 0.0142561886459589, step time: 17.197370529174805ms\r\n",,terminal_output +20140,13794061,"TERMINAL",0,0,"Step 5992, loss: 0.016783373430371284, step time: 17.165660858154297ms\r\n",,terminal_output +20141,13794118,"TERMINAL",0,0,"Step 5993, loss: 0.013436011038720608, step time: 17.51255989074707ms\r\n",,terminal_output +20142,13794241,"TERMINAL",0,0,"Step 5994, loss: 0.013317612931132317, step time: 17.17209815979004ms\r\nStep 5995, loss: 0.04905766248703003, step time: 17.302989959716797ms\r\n",,terminal_output +20143,13794304,"TERMINAL",0,0,"Step 5996, loss: 0.02619769610464573, step time: 17.636775970458984ms\r\n",,terminal_output +20144,13794369,"TERMINAL",0,0,"Step 5997, loss: 0.04961520805954933, step time: 17.12632179260254ms\r\n",,terminal_output +20145,13794413,"genie.py",3053,0,"\n ",python,content +20146,13794447,"TERMINAL",0,0,"Step 5998, loss: 0.24325548112392426, step time: 17.40860939025879ms\r\n",,terminal_output +20147,13794505,"TERMINAL",0,0,"Step 5999, loss: 0.0531868077814579, step time: 17.739057540893555ms\r\n",,terminal_output +20148,13794649,"genie.py",3062,0,"b",python,content +20149,13794651,"genie.py",3063,0,"",python,selection_keyboard +20150,13794940,"genie.py",3062,1,"",python,content +20151,13795166,"genie.py",3062,0,"j",python,content +20152,13795167,"genie.py",3063,0,"",python,selection_keyboard +20153,13795183,"genie.py",3063,0,"a",python,content +20154,13795184,"genie.py",3064,0,"",python,selection_keyboard +20155,13795334,"genie.py",3064,0,"x",python,content +20156,13795336,"genie.py",3065,0,"",python,selection_keyboard +20157,13795470,"genie.py",3065,0,".",python,content +20158,13795472,"genie.py",3066,0,"",python,selection_keyboard +20159,13795781,"genie.py",3066,0,"d",python,content +20160,13795782,"genie.py",3067,0,"",python,selection_keyboard +20161,13795914,"genie.py",3067,0,"e",python,content +20162,13795916,"genie.py",3068,0,"",python,selection_keyboard +20163,13795987,"genie.py",3068,0,"b",python,content +20164,13795988,"genie.py",3069,0,"",python,selection_keyboard +20165,13796112,"genie.py",3069,0,"u",python,content +20166,13796113,"genie.py",3070,0,"",python,selection_keyboard +20167,13796199,"genie.py",3070,0,"g",python,content +20168,13796200,"genie.py",3071,0,"",python,selection_keyboard +20169,13796358,"genie.py",3071,0,".",python,content +20170,13796359,"genie.py",3072,0,"",python,selection_keyboard +20171,13796632,"genie.py",3072,0,"b",python,content +20172,13796634,"genie.py",3073,0,"",python,selection_keyboard +20173,13796728,"genie.py",3073,0,"r",python,content +20174,13796729,"genie.py",3074,0,"",python,selection_keyboard +20175,13797010,"genie.py",3074,0,"a",python,content +20176,13797013,"genie.py",3075,0,"",python,selection_keyboard +20177,13797364,"genie.py",3072,3,"breakpoint",python,content +20178,13797456,"TERMINAL",0,0,"Step 6000, loss: 0.03281889483332634, step time: 25.756120681762695ms\r\n",,terminal_output +20179,13797521,"TERMINAL",0,0,"Step 6001, loss: 0.03803735971450806, step time: 24.784088134765625ms\r\n",,terminal_output +20180,13797585,"TERMINAL",0,0,"Step 6002, loss: 0.02973628044128418, step time: 19.362926483154297ms\r\n",,terminal_output +20181,13797664,"TERMINAL",0,0,"Step 6003, loss: 0.10651902109384537, step time: 18.95761489868164ms\r\n",,terminal_output +20182,13797735,"TERMINAL",0,0,"Step 6004, loss: 0.03680801764130592, step time: 18.354177474975586ms\r\n",,terminal_output +20183,13797800,"TERMINAL",0,0,"Step 6005, loss: 0.03325972333550453, step time: 18.199920654296875ms\r\n",,terminal_output +20184,13797896,"TERMINAL",0,0,"Step 6006, loss: 0.02638055570423603, step time: 17.96865463256836ms\r\n",,terminal_output +20185,13797974,"TERMINAL",0,0,"Step 6007, loss: 0.06773509830236435, step time: 17.718076705932617ms\r\nStep 6008, loss: 0.01052129827439785, step time: 16.924142837524414ms\r\n",,terminal_output +20186,13798034,"genie.py",3082,0,"()",python,content +20187,13798035,"genie.py",3083,0,"",python,selection_keyboard +20188,13798035,"TERMINAL",0,0,"Step 6009, loss: 0.027287309989333153, step time: 18.072843551635742ms\r\n",,terminal_output +20189,13798098,"genie.py",3083,1,")",python,content +20190,13798099,"genie.py",3084,0,"",python,selection_keyboard +20191,13798160,"TERMINAL",0,0,"Step 6010, loss: 0.07303223758935928, step time: 17.32492446899414ms\r\nStep 6011, loss: 0.004962676204741001, step time: 17.34757423400879ms\r\n",,terminal_output +20192,13798212,"TERMINAL",0,0,"Step 6012, loss: 0.01234908401966095, step time: 16.888141632080078ms\r\n",,terminal_output +20193,13798278,"TERMINAL",0,0,"Step 6013, loss: 0.012088642455637455, step time: 17.569541931152344ms\r\n",,terminal_output +20194,13798344,"TERMINAL",0,0,"Step 6014, loss: 0.0197502039372921, step time: 16.78013801574707ms\r\n",,terminal_output +20195,13798411,"TERMINAL",0,0,"Step 6015, loss: 0.013219320215284824, step time: 17.35377311706543ms\r\n",,terminal_output +20196,13798483,"TERMINAL",0,0,"Step 6016, loss: 0.01943977363407612, step time: 17.37499237060547ms\r\n",,terminal_output +20197,13798606,"TERMINAL",0,0,"Step 6017, loss: 0.011429278180003166, step time: 17.150163650512695ms\r\nStep 6018, loss: 0.020741969347000122, step time: 16.855239868164062ms\r\n",,terminal_output +20198,13798657,"TERMINAL",0,0,"Step 6019, loss: 7.748572897980921e-06, step time: 17.40407943725586ms\r\n",,terminal_output +20199,13798723,"TERMINAL",0,0,"Step 6020, loss: 0.009623986668884754, step time: 16.850948333740234ms\r\n",,terminal_output +20200,13798789,"TERMINAL",0,0,"Step 6021, loss: 0.019052553921937943, step time: 17.230749130249023ms\r\n",,terminal_output +20201,13798864,"TERMINAL",0,0,"Step 6022, loss: 0.006767864804714918, step time: 17.798900604248047ms\r\n",,terminal_output +20202,13798918,"TERMINAL",0,0,"Step 6023, loss: 0.013302640989422798, step time: 17.433881759643555ms\r\n",,terminal_output +20203,13798977,"TERMINAL",0,0,"Step 6024, loss: 0.009682838805019855, step time: 17.05789566040039ms\r\n",,terminal_output +20204,13799041,"TERMINAL",0,0,"Step 6025, loss: 0.008907072246074677, step time: 17.5173282623291ms\r\n",,terminal_output +20205,13799057,"TERMINAL",0,0,"bash",,terminal_focus +20206,13799109,"TERMINAL",0,0,"Step 6026, loss: 0.02798321098089218, step time: 16.83640480041504ms\r\n",,terminal_output +20207,13799161,"TERMINAL",0,0,"Step 6027, loss: 0.028247425332665443, step time: 17.046213150024414ms\r\n",,terminal_output +20208,13799226,"TERMINAL",0,0,"Step 6028, loss: 0.014190238900482655, step time: 17.386674880981445ms\r\n",,terminal_output +20209,13799301,"TERMINAL",0,0,"Step 6029, loss: 0.009401786141097546, step time: 17.099380493164062ms\r\n",,terminal_output +20210,13799365,"TERMINAL",0,0,"Step 6030, loss: 0.0070915366522967815, step time: 16.817331314086914ms\r\n",,terminal_output +20211,13799419,"TERMINAL",0,0,"Step 6031, loss: 0.03999242186546326, step time: 17.444372177124023ms\r\n",,terminal_output +20212,13799484,"TERMINAL",0,0,"Step 6032, loss: 0.05517091974616051, step time: 17.034053802490234ms\r\n",,terminal_output +20213,13799550,"TERMINAL",0,0,"Step 6033, loss: 0.005752784200012684, step time: 17.217159271240234ms\r\n",,terminal_output +20214,13799615,"TERMINAL",0,0,"Step 6034, loss: 0.03819815441966057, step time: 17.476797103881836ms\r\n",,terminal_output +20215,13799680,"TERMINAL",0,0,"Step 6035, loss: 0.03286392614245415, step time: 17.274856567382812ms\r\n",,terminal_output +20216,13799743,"TERMINAL",0,0,"Step 6036, loss: 0.0038955623749643564, step time: 17.002582550048828ms\r\n",,terminal_output +20217,13799808,"TERMINAL",0,0,"Step 6037, loss: 0.007500573992729187, step time: 17.686843872070312ms\r\n",,terminal_output +20218,13799875,"TERMINAL",0,0,"Step 6038, loss: 0.0034855881240218878, step time: 16.9222354888916ms\r\n",,terminal_output +20219,13799939,"TERMINAL",0,0,"Step 6039, loss: 0.006167997606098652, step time: 17.081022262573242ms\r\n",,terminal_output +20220,13799999,"TERMINAL",0,0,"Step 6040, loss: 0.009051129221916199, step time: 17.332792282104492ms\r\n",,terminal_output +20221,13800131,"TERMINAL",0,0,"Step 6041, loss: 0.009214970283210278, step time: 17.162561416625977ms\r\n",,terminal_output +20222,13800172,"TERMINAL",0,0,"Step 6042, loss: 0.013004808686673641, step time: 16.930341720581055ms\r\nStep 6043, loss: 0.007362025324255228, step time: 17.389774322509766ms\r\n",,terminal_output +20223,13800238,"TERMINAL",0,0,"Step 6044, loss: 0.026455732062458992, step time: 16.866445541381836ms\r\n",,terminal_output +20224,13800312,"TERMINAL",0,0,"Step 6045, loss: 0.0011763926595449448, step time: 17.142295837402344ms\r\n",,terminal_output +20225,13800368,"TERMINAL",0,0,"Step 6046, loss: 0.005056116729974747, step time: 17.2882080078125ms\r\n",,terminal_output +20226,13800489,"TERMINAL",0,0,"Step 6047, loss: 0.021704604849219322, step time: 17.094135284423828ms\r\nStep 6048, loss: 0.013177989982068539, step time: 16.889095306396484ms\r\n",,terminal_output +20227,13800549,"TERMINAL",0,0,"Step 6049, loss: 0.01969807781279087, step time: 17.517566680908203ms\r\n",,terminal_output +20228,13800613,"TERMINAL",0,0,"Step 6050, loss: 0.048165202140808105, step time: 16.827106475830078ms\r\n",,terminal_output +20229,13800705,"TERMINAL",0,0,"Step 6051, loss: 0.010030205361545086, step time: 17.22574234008789ms\r\n",,terminal_output +20230,13800812,"TERMINAL",0,0,"Step 6052, loss: 0.009823277592658997, step time: 17.226219177246094ms\r\nStep 6053, loss: 0.009045226499438286, step time: 17.33851432800293ms\r\n",,terminal_output +20231,13800863,"TERMINAL",0,0,"Step 6054, loss: 0.011947626248002052, step time: 16.88385009765625ms\r\n",,terminal_output +20232,13800952,"TERMINAL",0,0,"Step 6055, loss: 0.0112826619297266, step time: 17.763137817382812ms\r\n",,terminal_output +20233,13801047,"scripts_horeka/overfit_sample_tiny/sample.sh",0,0,"",shellscript,tab +20234,13801063,"TERMINAL",0,0,"Step 6056, loss: 0.0103284427896142, step time: 16.952991485595703ms\r\n",,terminal_output +20235,13801115,"TERMINAL",0,0,"Step 6057, loss: 0.006632729433476925, step time: 17.130374908447266ms\r\nStep 6058, loss: 0.005012488458305597, step time: 17.357349395751953ms\r\n",,terminal_output +20236,13801191,"TERMINAL",0,0,"Step 6059, loss: 0.0063479007221758366, step time: 17.21668243408203ms\r\n",,terminal_output +20237,13801273,"TERMINAL",0,0,"Step 6060, loss: 0.008230818435549736, step time: 19.23203468322754ms\r\n",,terminal_output +20238,13801383,"TERMINAL",0,0,"Step 6061, loss: 0.012935138307511806, step time: 18.00704002380371ms\r\nStep 6062, loss: 0.004664822015911341, step time: 17.08817481994629ms\r\n",,terminal_output +20239,13801436,"TERMINAL",0,0,"Step 6063, loss: 0.013239205814898014, step time: 17.157554626464844ms\r\n",,terminal_output +20240,13801555,"TERMINAL",0,0,"Step 6064, loss: 0.030062733218073845, step time: 17.276763916015625ms\r\n",,terminal_output +20241,13801602,"TERMINAL",0,0,"Step 6065, loss: 0.0070245652459561825, step time: 17.03357696533203ms\r\n",,terminal_output +20242,13801660,"TERMINAL",0,0,"Step 6066, loss: 0.033075373619794846, step time: 17.088890075683594ms\r\n",,terminal_output +20243,13801722,"TERMINAL",0,0,"Step 6067, loss: 0.013363007456064224, step time: 17.59815216064453ms\r\n",,terminal_output +20244,13801827,"TERMINAL",0,0,"Step 6068, loss: 0.011618330143392086, step time: 17.058372497558594ms\r\nStep 6069, loss: 0.00973579566925764, step time: 17.028093338012695ms\r\n",,terminal_output +20245,13801890,"TERMINAL",0,0,"Step 6070, loss: 0.02976190112531185, step time: 17.477750778198242ms\r\n",,terminal_output +20246,13801981,"TERMINAL",0,0,"Step 6071, loss: 0.022078704088926315, step time: 17.12346076965332ms\r\n",,terminal_output +20247,13802027,"TERMINAL",0,0,"srun",,terminal_focus +20248,13802041,"TERMINAL",0,0,"Step 6072, loss: 0.006136651150882244, step time: 16.9985294342041ms\r\n",,terminal_output +20249,13802091,"TERMINAL",0,0,"Step 6073, loss: 0.01717192307114601, step time: 17.576217651367188ms\r\n",,terminal_output +20250,13802142,"TERMINAL",0,0,"Step 6074, loss: 0.004770131316035986, step time: 16.974210739135742ms\r\n",,terminal_output +20251,13802208,"TERMINAL",0,0,"Step 6075, loss: 0.015425723046064377, step time: 17.171144485473633ms\r\n",,terminal_output +20252,13802326,"TERMINAL",0,0,"Step 6076, loss: 0.0054726870730519295, step time: 17.3492431640625ms\r\nStep 6077, loss: 0.014085324481129646, step time: 17.149686813354492ms\r\n",,terminal_output +20253,13802389,"TERMINAL",0,0,"Step 6078, loss: 0.0022805596236139536, step time: 16.92366600036621ms\r\n",,terminal_output +20254,13802456,"TERMINAL",0,0,"Step 6079, loss: 0.033322758972644806, step time: 17.449617385864258ms\r\n",,terminal_output +20255,13802523,"TERMINAL",0,0,"Step 6080, loss: 0.010584118776023388, step time: 16.966819763183594ms\r\n",,terminal_output +20256,13802583,"TERMINAL",0,0,"Step 6081, loss: 0.009689110331237316, step time: 17.03047752380371ms\r\n",,terminal_output +20257,13802724,"TERMINAL",0,0,"Step 6082, loss: 0.03918740153312683, step time: 17.416000366210938ms\r\nStep 6083, loss: 0.009909478016197681, step time: 17.09580421447754ms\r\n^C",,terminal_output +20258,13802788,"TERMINAL",0,0,"Traceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py"", line 241, in \r\n print(f""Step {step}, loss: {loss}, step time: {elapsed_time}ms"")\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/array.py"", line 341, in __format__\r\n return format(self._value[()], format_spec)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/profiler.py"", line 354, in wrapper\r\n return func(*args, **kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/array.py"", line 641, in _value\r\n npy_value, did_copy = self._single_device_array_to_np_array_did_copy()\r\nKeyboardInterrupt\r\n",,terminal_output +20259,13802936,"TERMINAL",0,0,"^CException ignored in atexit callback: .teardown_atexit at 0x1482340b25f0>\r\nTraceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/wandb/sdk/lib/service_connection.py"", line 94, in teardown_atexit\r\n conn.teardown(hooks.exit_code)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/wandb/sdk/lib/service_connection.py"", line 226, in teardown\r\n self._router.join()\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/wandb/sdk/interface/router.py"", line 75, in join\r\n self._thread.join()\r\n File ""/home/hk-project-p0023960/tum_cte0515/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/threading.py"", line 1096, in join\r\n self._wait_for_tstate_lock()\r\n File ""/home/hk-project-p0023960/tum_cte0515/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/threading.py"", line 1116, in _wait_for_tstate_lock\r\n if lock.acquire(block, timeout):\r\nKeyboardInterrupt: \r\n",,terminal_output +20260,13803206,"TERMINAL",0,0,"^CException ignored in: .remove at 0x14830cc7e710>\r\nTraceback (most recent call last):\r\n File ""/home/hk-project-p0023960/tum_cte0515/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/weakref.py"", line 370, in remove\r\n def remove(k, selfref=ref(self)):\r\nKeyboardInterrupt: \r\n",,terminal_output +20261,13803336,"TERMINAL",0,0,"^CException ignored in: .remove at 0x14830cc7e710>\r\nTraceback (most recent call last):\r\n File ""/home/hk-project-p0023960/tum_cte0515/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/weakref.py"", line 370, in remove\r\n def remove(k, selfref=ref(self)):\r\nKeyboardInterrupt: \r\n",,terminal_output +20262,13803837,"TERMINAL",0,0,"^CException ignored in atexit callback: \r\nTraceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/api.py"", line 3169, in clean_up\r\n clear_caches()\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/api.py"", line 3196, in clear_caches\r\n xc._xla.PjitFunctionCache.clear_all()\r\nKeyboardInterrupt: \r\n",,terminal_output +20263,13804036,"TERMINAL",0,0,"^C",,terminal_output +20264,13804243,"TERMINAL",0,0,"^C",,terminal_output +20265,13804357,"TERMINAL",0,0,"\r\n]0;tum_cte0515@hkn0408:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0408 jafar]$ ",,terminal_output +20266,13804977,"TERMINAL",0,0,"sh scripts_horeka/train_dynamics.sh ",,terminal_output +20267,13805653,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +20268,13805725,"TERMINAL",0,0,"SLURM_STEP_NUM_TASKS=1\r\nSLURM_JOB_USER=tum_cte0515\r\nSLURM_TASKS_PER_NODE=1\r\nSLURM_JOB_UID=999226\r\nSLURM_TASK_PID=3458179\r\nSLURM_JOB_GPUS=1\r\nSLURM_LOCALID=0\r\nSLURM_SUBMIT_DIR=/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar\r\nSLURMD_NODENAME=hkn0408\r\nSLURM_JOB_START_TIME=1751296933\r\nSLURM_STEP_NODELIST=hkn0408\r\nSLURM_CLUSTER_NAME=hk\r\nSLURM_JOB_END_TIME=1751300533\r\nSLURM_PMI2_SRUN_PORT=38655\r\nSLURM_CPUS_ON_NODE=6\r\nSLURM_JOB_CPUS_PER_NODE=6\r\nSLURM_GPUS_ON_NODE=1\r\nSLURM_GTIDS=0\r\nSLURM_JOB_PARTITION=accelerated\r\nSLURM_TRES_PER_TASK=cpu=5\r\nSLURM_OOM_KILL_STEP=0\r\nSLURM_JOB_NUM_NODES=1\r\nSLURM_STEPID=4294967290\r\nSLURM_JOBID=3306855\r\nSLURM_PTY_PORT=36241\r\nSLURM_JOB_QOS=normal\r\nSLURM_LAUNCH_NODE_IPADDR=10.0.7.199\r\nSLURM_PTY_WIN_ROW=37\r\nSLURM_PMI2_PROC_MAPPING=(vector,(0,1,1))\r\nSLURMD_DEBUG=2\r\nSLURM_PROCID=0\r\nSLURM_CPUS_PER_TASK=5\r\nSLURM_NTASKS=1\r\nSLURM_TOPOLOGY_ADDR=hkibb.hkibbi1.hkibbi1e12.hkn0408\r\nSLURM_TOPOLOGY_ADDR_PATTERN=switch.switch.switch.node\r\nSLURM_SRUN_COMM_HOST=10.0.7.199\r\nSLURM_SCRIPT_CONTEXT=prolog_task\r\nSLURM_MEM_PER_NODE=51200\r\nSLURM_PTY_WIN_COL=202\r\nSLURM_NODELIST=hkn0408\r\nSLURM_SRUN_COMM_PORT=42547\r\nSLURM_STEP_ID=4294967290\r\nSLURM_JOB_ACCOUNT=hk-project-p0023960\r\nSLURM_PRIO_PROCESS=0\r\nSLURM_NPROCS=1\r\nSLURM_NNODES=1\r\nSLURM_SUBMIT_HOST=hkn1991.localdomain\r\nSLURM_JOB_ID=3306855\r\nSLURM_NODEID=0\r\nSLURM_STEP_NUM_NODES=1\r\nSLURM_STEP_TASKS_PER_NODE=1\r\nSLURM_MPI_TYPE=pmi2\r\nSLURM_PMI2_STEP_NODES=hkn0408\r\nSLURM_CONF=/etc/slurm/slurm.conf\r\nSLURM_JOB_NAME=interactive\r\nSLURM_NTASKS_PER_NODE=1\r\nSLURM_STEP_LAUNCHER_PORT=42547\r\nSLURM_JOB_GID=502226\r\nSLURM_JOB_NODELIST=hkn0408\r\n",,terminal_output +20269,13808560,"TERMINAL",0,0,"^C",,terminal_output +20270,13808800,"TERMINAL",0,0,"2025-06-30 18:14:13.159237: E external/local_xla/xla/stream_executor/cuda/cuda_fft.cc:467] Unable to register cuFFT factory: Attempting to register factory for plugin cuFFT when one has already been registered\r\nWARNING: All log messages before absl::InitializeLog() is called are written to STDERR\r\nE0000 00:00:1751300053.172590 3509266 cuda_dnn.cc:8579] Unable to register cuDNN factory: Attempting to register factory for plugin cuDNN when one has already been registered\r\nE0000 00:00:1751300053.176791 3509266 cuda_blas.cc:1407] Unable to register cuBLAS factory: Attempting to register factory for plugin cuBLAS when one has already been registered\r\nW0000 00:00:1751300053.189058 3509266 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\nW0000 00:00:1751300053.189076 3509266 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\nW0000 00:00:1751300053.189078 3509266 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\nW0000 00:00:1751300053.189080 3509266 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\nTraceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py"", line 22, in \r\n from utils.dataloader import get_dataloader\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/utils/dataloader.py"", line 4, in \r\n import tensorflow as tf\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tensorflow/__init__.py"", line 40, in \r\n from tensorflow.python import pywrap_tensorflow as _pywrap_tensorflow # pylint: disable=unused-import\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tensorflow/python/pywrap_tensorflow.py"", line 37, in \r\n self_check.preload_check()\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tensorflow/python/platform/self_check.py"", line 63, in preload_check\r\n from tensorflow.python.platform import _pywrap_cpu_feature_guard\r\nKeyboardInterrupt\r\n",,terminal_output +20271,13808955,"TERMINAL",0,0,"\r\n]0;tum_cte0515@hkn0408:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0408 jafar]$ ",,terminal_output +20272,13809587,"TERMINAL",0,0,"sh scripts_horeka/train_dynamics.sh ",,terminal_output +20273,13810189,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +20274,13810302,"TERMINAL",0,0,"SLURM_STEP_NUM_TASKS=1\r\nSLURM_JOB_USER=tum_cte0515\r\nSLURM_TASKS_PER_NODE=1\r\nSLURM_JOB_UID=999226\r\nSLURM_TASK_PID=3458179\r\nSLURM_JOB_GPUS=1\r\nSLURM_LOCALID=0\r\nSLURM_SUBMIT_DIR=/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar\r\nSLURMD_NODENAME=hkn0408\r\nSLURM_JOB_START_TIME=1751296933\r\nSLURM_STEP_NODELIST=hkn0408\r\nSLURM_CLUSTER_NAME=hk\r\nSLURM_JOB_END_TIME=1751300533\r\nSLURM_PMI2_SRUN_PORT=38655\r\nSLURM_CPUS_ON_NODE=6\r\nSLURM_JOB_CPUS_PER_NODE=6\r\nSLURM_GPUS_ON_NODE=1\r\nSLURM_GTIDS=0\r\nSLURM_JOB_PARTITION=accelerated\r\nSLURM_TRES_PER_TASK=cpu=5\r\nSLURM_OOM_KILL_STEP=0\r\nSLURM_JOB_NUM_NODES=1\r\nSLURM_STEPID=4294967290\r\nSLURM_JOBID=3306855\r\nSLURM_PTY_PORT=36241\r\nSLURM_JOB_QOS=normal\r\nSLURM_LAUNCH_NODE_IPADDR=10.0.7.199\r\nSLURM_PTY_WIN_ROW=37\r\nSLURM_PMI2_PROC_MAPPING=(vector,(0,1,1))\r\nSLURMD_DEBUG=2\r\nSLURM_PROCID=0\r\nSLURM_CPUS_PER_TASK=5\r\nSLURM_NTASKS=1\r\nSLURM_TOPOLOGY_ADDR=hkibb.hkibbi1.hkibbi1e12.hkn0408\r\nSLURM_TOPOLOGY_ADDR_PATTERN=switch.switch.switch.node\r\nSLURM_SRUN_COMM_HOST=10.0.7.199\r\nSLURM_SCRIPT_CONTEXT=prolog_task\r\nSLURM_MEM_PER_NODE=51200\r\nSLURM_PTY_WIN_COL=202\r\nSLURM_NODELIST=hkn0408\r\nSLURM_SRUN_COMM_PORT=42547\r\nSLURM_STEP_ID=4294967290\r\nSLURM_JOB_ACCOUNT=hk-project-p0023960\r\nSLURM_PRIO_PROCESS=0\r\nSLURM_NPROCS=1\r\nSLURM_NNODES=1\r\nSLURM_SUBMIT_HOST=hkn1991.localdomain\r\nSLURM_JOB_ID=3306855\r\nSLURM_NODEID=0\r\nSLURM_STEP_NUM_NODES=1\r\nSLURM_STEP_TASKS_PER_NODE=1\r\nSLURM_MPI_TYPE=pmi2\r\nSLURM_PMI2_STEP_NODES=hkn0408\r\nSLURM_CONF=/etc/slurm/slurm.conf\r\nSLURM_JOB_NAME=interactive\r\nSLURM_NTASKS_PER_NODE=1\r\nSLURM_STEP_LAUNCHER_PORT=42547\r\nSLURM_JOB_GID=502226\r\nSLURM_JOB_NODELIST=hkn0408\r\n",,terminal_output +20275,13812255,"TERMINAL",0,0,"2025-06-30 18:14:16.671587: E external/local_xla/xla/stream_executor/cuda/cuda_fft.cc:467] Unable to register cuFFT factory: Attempting to register factory for plugin cuFFT when one has already been registered\r\nWARNING: All log messages before absl::InitializeLog() is called are written to STDERR\r\nE0000 00:00:1751300056.684678 3509297 cuda_dnn.cc:8579] Unable to register cuDNN factory: Attempting to register factory for plugin cuDNN when one has already been registered\r\nE0000 00:00:1751300056.689004 3509297 cuda_blas.cc:1407] Unable to register cuBLAS factory: Attempting to register factory for plugin cuBLAS when one has already been registered\r\n",,terminal_output +20276,13812311,"TERMINAL",0,0,"W0000 00:00:1751300056.700637 3509297 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\nW0000 00:00:1751300056.700657 3509297 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\nW0000 00:00:1751300056.700659 3509297 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\nW0000 00:00:1751300056.700661 3509297 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\n",,terminal_output +20277,13812581,"genie.py",0,0,"",python,tab +20278,13814006,"genie.py",3053,0,"",python,selection_mouse +20279,13814541,"genie.py",2733,0,"",python,selection_mouse +20280,13815150,"genie.py",2742,0,"",python,selection_mouse +20281,13815710,"genie.py",2721,0,"",python,selection_mouse +20282,13817679,"TERMINAL",0,0,"W0000 00:00:1751300062.057815 3509297 gpu_device.cc:2341] Cannot dlopen some GPU libraries. Please make sure the missing libraries mentioned above are installed properly if you would like to use GPU. Follow the guide at https://www.tensorflow.org/install/gpu for how to download and setup the required libraries for your platform.\r\nSkipping registering GPU devices...\r\n",,terminal_output +20283,13818089,"TERMINAL",0,0,"Running on 1 devices.\r\n",,terminal_output +20284,13819011,"TERMINAL",0,0,"wandb: Currently logged in as: mihir-mahajan2002 (instant-uv) to https://api.wandb.ai. Use `wandb login --relogin` to force relogin\r\n",,terminal_output +20285,13819776,"TERMINAL",0,0,"wandb: Tracking run with wandb version 0.19.11\r\nwandb: Run data is saved locally in /hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/wandb/run-20250630_181423-5hu6xzzt\r\nwandb: Run `wandb offline` to turn off syncing.\r\nwandb: Syncing run dynamics-tiny-overfit-big-lr-0000\r\nwandb: ⭐️ View project at https://wandb.ai/instant-uv/jafar\r\nwandb: 🚀 View run at https://wandb.ai/instant-uv/jafar/runs/5hu6xzzt\r\n",,terminal_output +20286,13821112,"TERMINAL",0,0,"2025-06-30 18:14:25.525307: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +20287,13833960,"TERMINAL",0,0,"2025-06-30 18:14:38.361931: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +20288,13849231,"TERMINAL",0,0,"2025-06-30 18:14:53.596503: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +20289,13856201,"TERMINAL",0,0,"2025-06-30 18:15:00.627786: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +20290,13859972,"TERMINAL",0,0,"Entering jdb:\r\n(jdb) ",,terminal_output +20291,13875855,"TERMINAL",0,0,"l",,terminal_output +20292,13875980,"TERMINAL",0,0,"\r\n> /hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/genie.py(93)\r\n dyna_outputs = self.dynamics(outputs, training)\r\n outputs.update(dyna_outputs)\r\n mle_indices = jnp.argmax(outputs[""token_logits""], axis=-1)\r\n outputs[""recon""] = self.tokenizer.decode(\r\n mle_indices, batch[""videos""].shape[2:4]\r\n )\r\n-> jax.debug.breakpoint()\r\n outputs[""gt_debug""] = self.tokenizer.decode(\r\n tokenizer_outputs, batch[""videos""].shape[2:4]\r\n )\r\n return outputs\r\n \r\n(jdb) ",,terminal_output +20293,13877982,"TERMINAL",0,0,"m",,terminal_output +20294,13878207,"TERMINAL",0,0,"l",,terminal_output +20295,13878285,"TERMINAL",0,0,"e",,terminal_output +20296,13878629,"TERMINAL",0,0,"_",,terminal_output +20297,13878963,"TERMINAL",0,0,"i",,terminal_output +20298,13879031,"TERMINAL",0,0,"n",,terminal_output +20299,13879191,"TERMINAL",0,0,"d",,terminal_output +20300,13879754,"TERMINAL",0,0,"[?25li[?25h",,terminal_output +20301,13879913,"TERMINAL",0,0,"[?25lc[?25h",,terminal_output +20302,13880087,"TERMINAL",0,0,"[?25le[?25h",,terminal_output +20303,13880272,"TERMINAL",0,0,"[?25ls[?25h",,terminal_output +20304,13880385,"TERMINAL",0,0,"[?25l.[?25h",,terminal_output +20305,13880672,"TERMINAL",0,0,"[?25ls[?25h",,terminal_output +20306,13880729,"TERMINAL",0,0,"[?25lh[?25h",,terminal_output +20307,13880869,"TERMINAL",0,0,"[?25la[?25h[?25lp[?25h",,terminal_output +20308,13881090,"TERMINAL",0,0,"[?25le[?25h\r\n(1, 16, 920)\r\n(jdb) ",,terminal_output +20309,13882110,"TERMINAL",0,0,"\rmle_indices.shape",,terminal_output +20310,13889018,"TERMINAL",0,0,"[?25le\r[?25h",,terminal_output +20311,13889283,"TERMINAL",0,0,"[?25lp\r[?25h",,terminal_output +20312,13890072,"TERMINAL",0,0,"[?25lha\r[?25h[?25lh\r[?25h[?25ls\r[?25h[?25l.\r[?25h[?25ls\r[?25h[?25le\r[?25h[?25lc\r[?25h[?25li\r[?25h[?25ld\r[?25h[?25ln\r[?25h[?25li\r[?25h[?25l_\r[?25h\r",,terminal_output +20313,13890330,"TERMINAL",0,0,"[?25ll\r[?25h",,terminal_output +20314,13890442,"TERMINAL",0,0,"[?25lm\r[?25h",,terminal_output +20315,13890590,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +20316,13890894,"TERMINAL",0,0,"[?25l9[?25h[?25l0[?25h",,terminal_output +20317,13891896,"TERMINAL",0,0,"[?25l*[?25h",,terminal_output +20318,13892773,"TERMINAL",0,0,"[?25l1[?25h",,terminal_output +20319,13893735,"TERMINAL",0,0,"[?25l6[?25h",,terminal_output +20320,13893786,"TERMINAL",0,0,"[?25l0[?25h",,terminal_output +20321,13894071,"TERMINAL",0,0,"\r\n14400\r\n(jdb) ",,terminal_output +20322,13904790,"TERMINAL",0,0,"[?25lo[?25h",,terminal_output +20323,13904964,"TERMINAL",0,0,"[?25lu[?25h",,terminal_output +20324,13905087,"TERMINAL",0,0,"[?25lt[?25h",,terminal_output +20325,13905270,"TERMINAL",0,0,"[?25lp[?25h",,terminal_output +20326,13905421,"TERMINAL",0,0,"[?25lu[?25h",,terminal_output +20327,13905487,"TERMINAL",0,0,"[?25lt[?25h",,terminal_output +20328,13905730,"TERMINAL",0,0,"[?25ls[?25h",,terminal_output +20329,13906078,"TERMINAL",0,0,"[?25l[[?25h",,terminal_output +20330,13906457,"TERMINAL",0,0,"[?25l""[?25h",,terminal_output +20331,13906882,"TERMINAL",0,0,"[?25lr[?25h",,terminal_output +20332,13907047,"TERMINAL",0,0,"[?25le[?25h",,terminal_output +20333,13907491,"TERMINAL",0,0,"[?25lc[?25h",,terminal_output +20334,13907617,"TERMINAL",0,0,"[?25lo[?25h",,terminal_output +20335,13907794,"TERMINAL",0,0,"[?25ln[?25h",,terminal_output +20336,13908180,"TERMINAL",0,0,"[?25l""[?25h",,terminal_output +20337,13908459,"TERMINAL",0,0,"[?25l][?25h",,terminal_output +20338,13908705,"TERMINAL",0,0,"[?25l.[?25h",,terminal_output +20339,13908982,"TERMINAL",0,0,"[?25ls[?25h",,terminal_output +20340,13909220,"TERMINAL",0,0,"[?25lh[?25h",,terminal_output +20341,13909285,"TERMINAL",0,0,"[?25la[?25h",,terminal_output +20342,13909416,"TERMINAL",0,0,"[?25lp[?25h",,terminal_output +20343,13909488,"TERMINAL",0,0,"[?25le[?25h",,terminal_output +20344,13909597,"TERMINAL",0,0,"\r\n(1, 16, 90, 160, 3)\r\n(jdb) ",,terminal_output +20345,13912999,"TERMINAL",0,0,"[?25lt[?25h",,terminal_output +20346,13913114,"TERMINAL",0,0,"[?25lo[?25h",,terminal_output +20347,13913239,"TERMINAL",0,0,"[?25lk[?25h",,terminal_output +20348,13913305,"TERMINAL",0,0,"[?25le[?25h",,terminal_output +20349,13913507,"TERMINAL",0,0,"[?25ln[?25h",,terminal_output +20350,13913669,"TERMINAL",0,0,"[?25li[?25h",,terminal_output +20351,13913900,"TERMINAL",0,0,"[?25lz[?25h",,terminal_output +20352,13914072,"TERMINAL",0,0,"[?25le[?25h[?25lr[?25h",,terminal_output +20353,13914447,"TERMINAL",0,0,"[?25l_[?25h",,terminal_output +20354,13914930,"TERMINAL",0,0,"[?25lo[?25h",,terminal_output +20355,13915142,"TERMINAL",0,0,"[?25lu[?25h",,terminal_output +20356,13915266,"TERMINAL",0,0,"[?25lt[?25h",,terminal_output +20357,13915390,"TERMINAL",0,0,"[?25lp[?25h",,terminal_output +20358,13915642,"TERMINAL",0,0,"[?25lu[?25h[?25lt[?25h",,terminal_output +20359,13915927,"TERMINAL",0,0,"[?25ls[?25h[?25l.[?25h",,terminal_output +20360,13916241,"TERMINAL",0,0,"[?25ls[?25h[?25lh[?25h",,terminal_output +20361,13916460,"TERMINAL",0,0,"[?25la[?25h[?25lp[?25h",,terminal_output +20362,13916606,"TERMINAL",0,0,"[?25le[?25h",,terminal_output +20363,13916674,"TERMINAL",0,0,"\r\n*** AttributeError: 'dict' object has no attribute 'shape'\r\n(jdb) ",,terminal_output +20364,13918044,"TERMINAL",0,0,"\rtokenizer_outputs.shape",,terminal_output +20365,13918365,"TERMINAL",0,0,"[?25le\r[?25h",,terminal_output +20366,13918558,"TERMINAL",0,0,"[?25lp\r[?25h",,terminal_output +20367,13918667,"TERMINAL",0,0,"[?25la\r[?25h",,terminal_output +20368,13918936,"TERMINAL",0,0,"[?25lsh\r[?25h\r",,terminal_output +20369,13919110,"TERMINAL",0,0,"[?25l.\r[?25h",,terminal_output +20370,13919717,"TERMINAL",0,0,"[?25l.[?25h",,terminal_output +20371,13919871,"TERMINAL",0,0,"[?25lk[?25h",,terminal_output +20372,13920013,"TERMINAL",0,0,"[?25le[?25h",,terminal_output +20373,13920219,"TERMINAL",0,0,"[?25ly[?25h",,terminal_output +20374,13920581,"TERMINAL",0,0,"[?25ls[?25h",,terminal_output +20375,13921160,"TERMINAL",0,0,"[?25l([?25h",,terminal_output +20376,13921161,"TERMINAL",0,0,"[?25l)[?25h",,terminal_output +20377,13921452,"TERMINAL",0,0,"\r\ndict_keys(['emb', 'indices', 'z', 'z_q'])\r\n(jdb) ",,terminal_output +20378,13922897,"TERMINAL",0,0,"\rtokenizer_outputs.keys()",,terminal_output +20379,13923701,"TERMINAL",0,0,"[?25l)\r[?25h",,terminal_output +20380,13924358,"TERMINAL",0,0,"[?25ls(\r[?25h[?25ls\r[?25h[?25ly\r[?25h[?25le\r[?25h[?25lk\r[?25h[?25l.\r[?25h\r",,terminal_output +20381,13924514,"TERMINAL",0,0,"[?25lt\r[?25h",,terminal_output +20382,13925344,"TERMINAL",0,0,"\r",,terminal_output +20383,13925923,"TERMINAL",0,0,"[?25l\r[?25h",,terminal_output +20384,13926818,"TERMINAL",0,0,"[?25lt[?25h[?25ls[?25h",,terminal_output +20385,13927279,"TERMINAL",0,0,"[?25l[[?25h",,terminal_output +20386,13927762,"TERMINAL",0,0,"[?25l""[?25h",,terminal_output +20387,13928463,"TERMINAL",0,0,"[?25le[?25h",,terminal_output +20388,13928788,"TERMINAL",0,0,"[?25lm[?25h",,terminal_output +20389,13929026,"TERMINAL",0,0,"[?25lb[?25h",,terminal_output +20390,13929418,"TERMINAL",0,0,"[?25l""[?25h",,terminal_output +20391,13929977,"TERMINAL",0,0,"[?25l][?25h",,terminal_output +20392,13930220,"TERMINAL",0,0,"[?25l.[?25h",,terminal_output +20393,13930435,"TERMINAL",0,0,"[?25ls[?25h",,terminal_output +20394,13930543,"TERMINAL",0,0,"[?25lh[?25h",,terminal_output +20395,13930609,"TERMINAL",0,0,"[?25la[?25h",,terminal_output +20396,13930833,"TERMINAL",0,0,"[?25le[?25h",,terminal_output +20397,13931179,"TERMINAL",0,0,"[?25le\r[?25h",,terminal_output +20398,13931332,"TERMINAL",0,0,"[?25lp[?25h",,terminal_output +20399,13931398,"TERMINAL",0,0,"[?25le[?25h",,terminal_output +20400,13931513,"TERMINAL",0,0,"\r\n(14720, 32)\r\n(jdb) ",,terminal_output +20401,13933626,"TERMINAL",0,0,"\rtokenizer_outputs[""emb""].shape",,terminal_output +20402,13936113,"TERMINAL",0,0,"[?25lb\r""[?25h",,terminal_output +20403,13936269,"TERMINAL",0,0,"[?25lm\r""[?25h",,terminal_output +20404,13936394,"TERMINAL",0,0,"[?25le\r""[?25h",,terminal_output +20405,13936919,"TERMINAL",0,0,"[?25l""\r[1@u""[?25h",,terminal_output +20406,13937306,"TERMINAL",0,0,"[?25lu\r""[?25h",,terminal_output +20407,13937510,"TERMINAL",0,0,"[?25l""\r[1@z""[?25h",,terminal_output +20408,13937865,"TERMINAL",0,0,"[?25l""\r[1@_""[?25h",,terminal_output +20409,13938121,"TERMINAL",0,0,"[?25l""\r[1@q""[?25h",,terminal_output +20410,13938375,"TERMINAL",0,0,"\r\n(1, 16, 920, 32)\r\n(jdb) ",,terminal_output +20411,13952210,"TERMINAL",0,0,"\rtokenizer_outputs[""z_q""].shape",,terminal_output +20412,13954939,"TERMINAL",0,0,"[?25lq\r""[?25h",,terminal_output +20413,13955163,"TERMINAL",0,0,"[?25l_\r""[?25h",,terminal_output +20414,13955281,"TERMINAL",0,0,"[?25lz\r""[?25h",,terminal_output +20415,13955603,"TERMINAL",0,0,"[?25l""\r[1@i""[?25h",,terminal_output +20416,13955717,"TERMINAL",0,0,"[?25l""\r[1@n""[?25h",,terminal_output +20417,13955911,"TERMINAL",0,0,"[?25l""\r[1@d""[?25h",,terminal_output +20418,13956077,"TERMINAL",0,0,"[?25l""\r[1@i""[?25h",,terminal_output +20419,13956234,"TERMINAL",0,0,"[?25l""\r[1@c""[?25h",,terminal_output +20420,13956357,"TERMINAL",0,0,"[?25l""\r[1@e""[?25h",,terminal_output +20421,13956547,"TERMINAL",0,0,"[?25l""\r[1@s""[?25h",,terminal_output +20422,13956612,"TERMINAL",0,0,"\r\n(1, 16, 920)\r\n(jdb) ",,terminal_output +20423,13968774,"train_dynamics.py",0,0,"",python,tab +20424,13971949,"genie.py",0,0,"",python,tab +20425,13973644,"genie.py",3167,0,"",python,selection_mouse +20426,13973997,"genie.py",3167,0,"tokenizer_outputs[""indices""]",python,content +20427,13975565,"genie.py",3167,0,"",python,selection_mouse +20428,13975725,"genie.py",3166,1,"s",python,selection_mouse +20429,13975741,"genie.py",3164,3,"uts",python,selection_mouse +20430,13975756,"genie.py",3162,5,"tputs",python,selection_mouse +20431,13975778,"genie.py",3159,8,"_outputs",python,selection_mouse +20432,13975794,"genie.py",3158,9,"r_outputs",python,selection_mouse +20433,13975805,"genie.py",3157,10,"er_outputs",python,selection_mouse +20434,13975821,"genie.py",3156,11,"zer_outputs",python,selection_mouse +20435,13975856,"genie.py",3155,12,"izer_outputs",python,selection_mouse +20436,13975886,"genie.py",3154,13,"nizer_outputs",python,selection_mouse +20437,13975913,"genie.py",3153,14,"enizer_outputs",python,selection_mouse +20438,13975936,"genie.py",3152,15,"kenizer_outputs",python,selection_mouse +20439,13975960,"genie.py",3151,16,"okenizer_outputs",python,selection_mouse +20440,13975986,"genie.py",3150,17,"tokenizer_outputs",python,selection_mouse +20441,13976604,"genie.py",3150,17,"",python,content +20442,13977643,"genie.py",3216,0,"",python,selection_mouse +20443,13978168,"genie.py",3152,0,"",python,selection_mouse +20444,13978665,"genie.py",3101,0,"",python,selection_mouse +20445,13978821,"genie.py",3100,2,"[""",python,selection_mouse +20446,13979455,"genie.py",3068,0,"",python,selection_mouse +20447,13980096,"genie.py",3084,0,"",python,selection_mouse +20448,13980245,"genie.py",3083,1,")",python,selection_mouse +20449,13980258,"genie.py",3081,3,"t()",python,selection_mouse +20450,13980316,"genie.py",3078,6,"oint()",python,selection_mouse +20451,13980317,"genie.py",3074,10,"eakpoint()",python,selection_mouse +20452,13980317,"genie.py",3069,15,"ug.breakpoint()",python,selection_mouse +20453,13980323,"genie.py",3067,17,"ebug.breakpoint()",python,selection_mouse +20454,13980340,"genie.py",3064,20,"x.debug.breakpoint()",python,selection_mouse +20455,13980396,"genie.py",3062,22,"jax.debug.breakpoint()",python,selection_mouse +20456,13980397,"genie.py",3061,23," jax.debug.breakpoint()",python,selection_mouse +20457,13980397,"genie.py",3060,24," jax.debug.breakpoint()",python,selection_mouse +20458,13980457,"genie.py",3059,25," jax.debug.breakpoint()",python,selection_mouse +20459,13980637,"genie.py",3060,24," jax.debug.breakpoint()",python,selection_mouse +20460,13980694,"genie.py",3061,23," jax.debug.breakpoint()",python,selection_mouse +20461,13980714,"genie.py",3062,22,"jax.debug.breakpoint()",python,selection_mouse +20462,13980823,"genie.py",3063,21,"ax.debug.breakpoint()",python,selection_mouse +20463,13981261,"genie.py",3063,0,"",python,selection_mouse +20464,13981261,"genie.py",3062,3,"jax",python,selection_mouse +20465,13981462,"genie.py",3062,4,"jax.",python,selection_mouse +20466,13981476,"genie.py",3062,9,"jax.debug",python,selection_mouse +20467,13981533,"genie.py",3062,10,"jax.debug.",python,selection_mouse +20468,13981533,"genie.py",3062,20,"jax.debug.breakpoint",python,selection_mouse +20469,13981624,"genie.py",3062,22,"jax.debug.breakpoint()",python,selection_mouse +20470,13982068,"genie.py",3084,0,"",python,selection_mouse +20471,13982522,"genie.py",3083,0,"",python,selection_command +20472,13983036,"genie.py",3054,31,"",python,content +20473,13983053,"genie.py",3062,0,"",python,selection_command +20474,13984780,"scripts_horeka/overfit_sample_tiny/sample.sh",0,0,"",shellscript,tab +20475,13985850,".gitignore",0,0,"",ignore,tab +20476,13986439,"scripts_horeka/train_lam.sh",0,0,"",shellscript,tab +20477,13987677,"utils/dataloader.py",0,0,"",python,tab +20478,13988498,"scripts_horeka/train_dynamics.sh",0,0,"",shellscript,tab +20479,13990339,"train_dynamics.py",0,0,"",python,tab +20480,13991387,"train_dynamics.py",2793,0,"",python,selection_mouse +20481,13991933,"train_dynamics.py",2594,0,"",python,selection_mouse +20482,13994695,"train_dynamics.py",2216,0,"",python,selection_mouse +20483,13994830,"train_dynamics.py",2212,7,"outputs",python,selection_mouse +20484,14000329,"train_dynamics.py",2233,0,"",python,selection_mouse +20485,14000492,"train_dynamics.py",2232,7,"ce_loss",python,selection_mouse +20486,14001002,"train_dynamics.py",2207,0,"",python,selection_mouse +20487,14001152,"train_dynamics.py",2205,4,"mask",python,selection_mouse +20488,14006522,"train_dynamics.py",2247,0,"",python,selection_mouse +20489,14007120,"train_dynamics.py",2214,0,"",python,selection_mouse +20490,14009476,"genie.py",0,0,"",python,tab +20491,14011047,"genie.py",2760,0,"",python,selection_mouse +20492,14011549,"genie.py",2755,0,"",python,selection_mouse +20493,14011692,"genie.py",2751,8,"mask_rng",python,selection_mouse +20494,14012759,"genie.py",2777,0,"",python,selection_mouse +20495,14012893,"genie.py",2771,8,"mask_rng",python,selection_mouse +20496,14014294,"genie.py",2767,0,"",python,selection_mouse +20497,14014464,"genie.py",2764,5,"batch",python,selection_mouse +20498,14017393,"train_dynamics.py",0,0,"",python,tab +20499,14021002,"train_dynamics.py",2110,0,"",python,selection_mouse +20500,14021188,"train_dynamics.py",2106,8,"training",python,selection_mouse +20501,14022108,"train_dynamics.py",2092,0,"",python,selection_mouse +20502,14022287,"train_dynamics.py",2090,6,"inputs",python,selection_mouse +20503,14045476,"train_dynamics.py",8309,0,"",python,selection_mouse +20504,14045612,"train_dynamics.py",8307,6,"inputs",python,selection_mouse +20505,14046175,"train_dynamics.py",8301,0,"",python,selection_mouse +20506,14046337,"train_dynamics.py",8298,6,"gt_seq",python,selection_mouse +20507,14046662,"train_dynamics.py",8298,8,"gt_seq =",python,selection_mouse +20508,14046723,"train_dynamics.py",8298,9,"gt_seq = ",python,selection_mouse +20509,14046724,"train_dynamics.py",8298,15,"gt_seq = inputs",python,selection_mouse +20510,14047121,"train_dynamics.py",8308,0,"",python,selection_mouse +20511,14047122,"train_dynamics.py",8307,6,"inputs",python,selection_mouse +20512,14047809,"train_dynamics.py",8375,0,"",python,selection_mouse +20513,14048011,"train_dynamics.py",8374,1,",",python,selection_mouse +20514,14048030,"train_dynamics.py",8373,2,"0,",python,selection_mouse +20515,14048043,"train_dynamics.py",8371,4,"p(0,",python,selection_mouse +20516,14048058,"train_dynamics.py",8368,7,"clip(0,",python,selection_mouse +20517,14048113,"train_dynamics.py",8364,11,"[0].clip(0,",python,selection_mouse +20518,14048114,"train_dynamics.py",8360,15,"econ[0].clip(0,",python,selection_mouse +20519,14048120,"train_dynamics.py",8309,66,"puts[""videos""][0]\n recon_seq = recon[0].clip(0,",python,selection_mouse +20520,14048134,"train_dynamics.py",8306,69," inputs[""videos""][0]\n recon_seq = recon[0].clip(0,",python,selection_mouse +20521,14048199,"train_dynamics.py",8304,71," = inputs[""videos""][0]\n recon_seq = recon[0].clip(0,",python,selection_mouse +20522,14048200,"train_dynamics.py",8303,72,"q = inputs[""videos""][0]\n recon_seq = recon[0].clip(0,",python,selection_mouse +20523,14048200,"train_dynamics.py",8302,73,"eq = inputs[""videos""][0]\n recon_seq = recon[0].clip(0,",python,selection_mouse +20524,14048571,"train_dynamics.py",8302,0,"",python,selection_mouse +20525,14048572,"train_dynamics.py",8298,6,"gt_seq",python,selection_mouse +20526,14048764,"train_dynamics.py",8298,7,"gt_seq ",python,selection_mouse +20527,14048782,"train_dynamics.py",8298,15,"gt_seq = inputs",python,selection_mouse +20528,14048837,"train_dynamics.py",8298,17,"gt_seq = inputs[""",python,selection_mouse +20529,14048838,"train_dynamics.py",8298,23,"gt_seq = inputs[""videos",python,selection_mouse +20530,14048897,"train_dynamics.py",8298,26,"gt_seq = inputs[""videos""][",python,selection_mouse +20531,14048897,"train_dynamics.py",8298,28,"gt_seq = inputs[""videos""][0]",python,selection_mouse +20532,14049124,"train_dynamics.py",8326,0,"",python,selection_mouse +20533,14049132,"train_dynamics.py",8325,0,"",python,selection_command +20534,14049306,"train_dynamics.py",8326,0,"",python,selection_mouse +20535,14049321,"train_dynamics.py",8325,0,"",python,selection_command +20536,14049479,"train_dynamics.py",8325,1,"]",python,selection_mouse +20537,14049481,"train_dynamics.py",8326,0,"",python,selection_command +20538,14049502,"train_dynamics.py",8325,1,"]",python,selection_mouse +20539,14049570,"train_dynamics.py",8323,3,"[0]",python,selection_mouse +20540,14049570,"train_dynamics.py",8315,11,"videos""][0]",python,selection_mouse +20541,14049962,"train_dynamics.py",8316,0,"",python,selection_mouse +20542,14055779,"train_dynamics.py",8499,0,"",python,selection_mouse +20543,14056363,"train_dynamics.py",8606,0,"",python,selection_mouse +20544,14056364,"train_dynamics.py",8605,0,"",python,selection_command +20545,14056512,"train_dynamics.py",8606,0,"",python,selection_mouse +20546,14056513,"train_dynamics.py",8605,0,"",python,selection_command +20547,14057014,"train_dynamics.py",8552,0,"",python,selection_mouse +20548,14057158,"train_dynamics.py",8540,14,"comparison_seq",python,selection_mouse +20549,14063048,"train_dynamics.py",8446,0,"",python,selection_mouse +20550,14063186,"train_dynamics.py",8441,9,"recon_seq",python,selection_mouse +20551,14064237,"train_dynamics.py",8361,0,"",python,selection_mouse +20552,14064373,"train_dynamics.py",8359,5,"recon",python,selection_mouse +20553,14067955,"train_dynamics.py",7594,0,"",python,selection_mouse +20554,14068116,"train_dynamics.py",7592,5,"recon",python,selection_mouse +20555,14070519,"train_dynamics.py",7617,0,"",python,selection_mouse +20556,14073473,"train_dynamics.py",3358,0,"",python,selection_mouse +20557,14073629,"train_dynamics.py",3356,5,"recon",python,selection_mouse +20558,14075839,"train_dynamics.py",3174,0,"",python,selection_mouse +20559,14076373,"train_dynamics.py",3104,0,"",python,selection_mouse +20560,14076547,"train_dynamics.py",3102,7,"grad_fn",python,selection_mouse +20561,14080098,"train_dynamics.py",3027,0,"",python,selection_mouse +20562,14082095,"train_dynamics.py",2876,0,"",python,selection_mouse +20563,14082270,"train_dynamics.py",2871,7,"outputs",python,selection_mouse +20564,14082884,"train_dynamics.py",2888,0,"",python,selection_mouse +20565,14083449,"train_dynamics.py",2887,0,"",python,selection_mouse +20566,14085402,"train_dynamics.py",2898,0,"",python,selection_mouse +20567,14086101,"train_dynamics.py",2849,0,"",python,selection_mouse +20568,14086103,"train_dynamics.py",2848,0,"",python,selection_command +20569,14086676,"train_dynamics.py",2893,0,"",python,selection_mouse +20570,14090206,"train_dynamics.py",2693,0,"",python,selection_mouse +20571,14090903,"train_dynamics.py",2800,0,"",python,selection_mouse +20572,14091518,"train_dynamics.py",2805,0,"",python,selection_mouse +20573,14092193,"train_dynamics.py",2894,0,"",python,selection_mouse +20574,14092810,"train_dynamics.py",2896,0,"",python,selection_mouse +20575,14107709,"genie.py",0,0,"",python,tab +20576,14111725,"train_dynamics.py",0,0,"",python,tab +20577,14112378,"scripts_horeka/overfit_sample_tiny/sample.sh",0,0,"",shellscript,tab +20578,14113559,"train_dynamics.py",0,0,"",python,tab +20579,14115671,"train_dynamics.py",2885,0,"",python,selection_mouse +20580,14116339,"train_dynamics.py",2886,0,"",python,selection_mouse +20581,14116936,"train_dynamics.py",2887,0,"",python,selection_mouse +20582,14118644,"train_dynamics.py",2887,0,",",python,content +20583,14118646,"train_dynamics.py",2888,0,"",python,selection_keyboard +20584,14118739,"train_dynamics.py",2888,0," ",python,content +20585,14118740,"train_dynamics.py",2889,0,"",python,selection_keyboard +20586,14119156,"train_dynamics.py",2889,0,"o",python,content +20587,14119157,"train_dynamics.py",2890,0,"",python,selection_keyboard +20588,14119343,"train_dynamics.py",2890,0,"u",python,content +20589,14119344,"train_dynamics.py",2891,0,"",python,selection_keyboard +20590,14119937,"train_dynamics.py",2889,2,"outputs",python,content +20591,14120499,"train_tokenizer.py",0,0,"",python,tab +20592,14121515,"train_tokenizer.py",2517,0,"",python,selection_command +20593,14128238,"train_dynamics.py",0,0,"",python,tab +20594,14130456,"train_dynamics.py",2896,0,"[]",python,content +20595,14130457,"train_dynamics.py",2897,0,"",python,selection_keyboard +20596,14131573,"train_dynamics.py",2897,0,"""""",python,content +20597,14131575,"train_dynamics.py",2898,0,"",python,selection_keyboard +20598,14131839,"train_dynamics.py",2898,0,"g",python,content +20599,14131839,"train_dynamics.py",2899,0,"",python,selection_keyboard +20600,14131969,"train_dynamics.py",2899,0,"t",python,content +20601,14131970,"train_dynamics.py",2900,0,"",python,selection_keyboard +20602,14132132,"train_dynamics.py",2900,0,"_",python,content +20603,14132133,"train_dynamics.py",2901,0,"",python,selection_keyboard +20604,14132580,"train_dynamics.py",2901,0,"e",python,content +20605,14132581,"train_dynamics.py",2902,0,"",python,selection_keyboard +20606,14132970,"train_dynamics.py",2901,1,"",python,content +20607,14133051,"train_dynamics.py",2901,0,"d",python,content +20608,14133052,"train_dynamics.py",2902,0,"",python,selection_keyboard +20609,14133151,"train_dynamics.py",2902,0,"e",python,content +20610,14133152,"train_dynamics.py",2903,0,"",python,selection_keyboard +20611,14133240,"train_dynamics.py",2903,0,"b",python,content +20612,14133243,"train_dynamics.py",2904,0,"",python,selection_keyboard +20613,14133388,"train_dynamics.py",2904,0,"u",python,content +20614,14133389,"train_dynamics.py",2905,0,"",python,selection_keyboard +20615,14133454,"train_dynamics.py",2905,0,"g",python,content +20616,14133455,"train_dynamics.py",2906,0,"",python,selection_keyboard +20617,14134058,"train_dynamics.py",2905,0,"",python,selection_command +20618,14134752,"train_dynamics.py",2891,0,"",python,selection_mouse +20619,14134901,"train_dynamics.py",2889,7,"outputs",python,selection_mouse +20620,14135100,"train_dynamics.py",2889,9,"outputs[""",python,selection_mouse +20621,14135127,"train_dynamics.py",2889,17,"outputs[""gt_debug",python,selection_mouse +20622,14135270,"train_dynamics.py",2889,18,"outputs[""gt_debug""",python,selection_mouse +20623,14135557,"train_dynamics.py",2889,19,"outputs[""gt_debug""]",python,selection_mouse +20624,14141812,"train_dynamics.py",3103,0,"",python,selection_mouse +20625,14142556,"train_dynamics.py",3102,0,"",python,selection_mouse +20626,14143533,"train_dynamics.py",3102,0,",",python,content +20627,14143536,"train_dynamics.py",3103,0,"",python,selection_keyboard +20628,14143621,"train_dynamics.py",3103,0," ",python,content +20629,14143622,"train_dynamics.py",3104,0,"",python,selection_keyboard +20630,14143838,"train_dynamics.py",3104,0,"d",python,content +20631,14143839,"train_dynamics.py",3105,0,"",python,selection_keyboard +20632,14144291,"train_dynamics.py",3105,0,"t",python,content +20633,14144292,"train_dynamics.py",3106,0,"",python,selection_keyboard +20634,14144510,"train_dynamics.py",3106,0,"_",python,content +20635,14144511,"train_dynamics.py",3107,0,"",python,selection_keyboard +20636,14144721,"train_dynamics.py",3107,0,"d",python,content +20637,14144722,"train_dynamics.py",3108,0,"",python,selection_keyboard +20638,14144891,"train_dynamics.py",3108,0,"e",python,content +20639,14144892,"train_dynamics.py",3109,0,"",python,selection_keyboard +20640,14145015,"train_dynamics.py",3109,0,"b",python,content +20641,14145016,"train_dynamics.py",3110,0,"",python,selection_keyboard +20642,14145117,"train_dynamics.py",3110,0,"u",python,content +20643,14145118,"train_dynamics.py",3111,0,"",python,selection_keyboard +20644,14145213,"train_dynamics.py",3111,0,"g",python,content +20645,14145213,"train_dynamics.py",3112,0,"",python,selection_keyboard +20646,14146357,"train_dynamics.py",3107,0,"",python,selection_mouse +20647,14146931,"train_dynamics.py",3098,0,"",python,selection_mouse +20648,14147062,"train_dynamics.py",3097,5,"recon",python,selection_mouse +20649,14147843,"train_dynamics.py",3105,0,"",python,selection_mouse +20650,14148301,"train_dynamics.py",3104,1,"",python,content +20651,14148639,"train_dynamics.py",3104,0,"g",python,content +20652,14148640,"train_dynamics.py",3105,0,"",python,selection_keyboard +20653,14149421,"train_dynamics.py",3403,0,"",python,selection_mouse +20654,14150188,"train_dynamics.py",3392,0,"",python,selection_mouse +20655,14150816,"train_dynamics.py",3392,0," ",python,content +20656,14150818,"train_dynamics.py",3393,0,"",python,selection_keyboard +20657,14151180,"train_dynamics.py",3392,1,"",python,content +20658,14151400,"train_dynamics.py",3392,0,",",python,content +20659,14151401,"train_dynamics.py",3393,0,"",python,selection_keyboard +20660,14151459,"train_dynamics.py",3393,0," ",python,content +20661,14151460,"train_dynamics.py",3394,0,"",python,selection_keyboard +20662,14151663,"train_dynamics.py",3394,0,"g",python,content +20663,14151664,"train_dynamics.py",3395,0,"",python,selection_keyboard +20664,14151815,"train_dynamics.py",3395,0,"t",python,content +20665,14151815,"train_dynamics.py",3396,0,"",python,selection_keyboard +20666,14152079,"train_dynamics.py",3396,0,"_",python,content +20667,14152080,"train_dynamics.py",3397,0,"",python,selection_keyboard +20668,14152281,"train_dynamics.py",3397,0,"d",python,content +20669,14152282,"train_dynamics.py",3398,0,"",python,selection_keyboard +20670,14152436,"train_dynamics.py",3398,0,"e",python,content +20671,14152437,"train_dynamics.py",3399,0,"",python,selection_keyboard +20672,14152564,"train_dynamics.py",3399,0,"b",python,content +20673,14152565,"train_dynamics.py",3400,0,"",python,selection_keyboard +20674,14152635,"train_dynamics.py",3400,0,"u",python,content +20675,14152636,"train_dynamics.py",3401,0,"",python,selection_keyboard +20676,14152747,"train_dynamics.py",3401,0,"g",python,content +20677,14152748,"train_dynamics.py",3402,0,"",python,selection_keyboard +20678,14156505,"train_dynamics.py",2941,0,"",python,selection_mouse +20679,14159124,"train_dynamics.py",7638,0,"",python,selection_mouse +20680,14160254,"train_dynamics.py",7638,0,",",python,content +20681,14160256,"train_dynamics.py",7639,0,"",python,selection_keyboard +20682,14160349,"train_dynamics.py",7639,0," ",python,content +20683,14160350,"train_dynamics.py",7640,0,"",python,selection_keyboard +20684,14160586,"train_dynamics.py",7640,0,"g",python,content +20685,14160587,"train_dynamics.py",7641,0,"",python,selection_keyboard +20686,14160756,"train_dynamics.py",7641,0,"t",python,content +20687,14160758,"train_dynamics.py",7642,0,"",python,selection_keyboard +20688,14161080,"train_dynamics.py",7642,0,"_",python,content +20689,14161081,"train_dynamics.py",7643,0,"",python,selection_keyboard +20690,14161426,"train_dynamics.py",7643,0,"d",python,content +20691,14161427,"train_dynamics.py",7644,0,"",python,selection_keyboard +20692,14161544,"train_dynamics.py",7644,0,"e",python,content +20693,14161546,"train_dynamics.py",7645,0,"",python,selection_keyboard +20694,14161684,"train_dynamics.py",7645,0,"b",python,content +20695,14161686,"train_dynamics.py",7646,0,"",python,selection_keyboard +20696,14161785,"train_dynamics.py",7646,0,"u",python,content +20697,14161786,"train_dynamics.py",7647,0,"",python,selection_keyboard +20698,14161868,"train_dynamics.py",7647,0,"g",python,content +20699,14161869,"train_dynamics.py",7648,0,"",python,selection_keyboard +20700,14162636,"train_dynamics.py",7647,0,"",python,selection_command +20701,14164162,"train_dynamics.py",7636,0,"",python,selection_mouse +20702,14168074,"train_dynamics.py",8794,0,"",python,selection_mouse +20703,14168630,"train_dynamics.py",8870,0,"",python,selection_mouse +20704,14169259,"train_dynamics.py",8887,0,"",python,selection_mouse +20705,14169876,"train_dynamics.py",8967,0,"",python,selection_mouse +20706,14170412,"train_dynamics.py",8959,0,"",python,selection_mouse +20707,14171072,"train_dynamics.py",9017,0,"",python,selection_mouse +20708,14171783,"train_dynamics.py",9011,0,"",python,selection_mouse +20709,14172372,"train_dynamics.py",8966,0,"",python,selection_mouse +20710,14178888,"train_dynamics.py",9024,0,"",python,selection_mouse +20711,14179070,"train_dynamics.py",9021,14,"comparison_seq",python,selection_mouse +20712,14189375,"train_dynamics.py",8891,0,"",python,selection_mouse +20713,14190106,"train_dynamics.py",8892,0,"",python,selection_mouse +20714,14190657,"train_dynamics.py",8890,0,"",python,selection_mouse +20715,14192446,"train_dynamics.py",9110,0,"",python,selection_mouse +20716,14192448,"train_dynamics.py",9109,0,"",python,selection_command +20717,14194473,"train_dynamics.py",9156,0,"",python,selection_mouse +20718,14194476,"train_dynamics.py",9155,0,"",python,selection_command +20719,14194625,"train_dynamics.py",9155,1,")",python,selection_mouse +20720,14194640,"train_dynamics.py",9156,0,"",python,selection_command +20721,14194703,"train_dynamics.py",9110,46,"\n wandb.log(log_images)",python,selection_mouse +20722,14194704,"train_dynamics.py",9084,72,"\n )\n wandb.log(log_images)",python,selection_mouse +20723,14194719,"train_dynamics.py",9012,144,".asarray(comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)",python,selection_mouse +20724,14194782,"train_dynamics.py",8954,202,"e_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)",python,selection_mouse +20725,14194782,"train_dynamics.py",8863,293,"recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)",python,selection_mouse +20726,14194783,"train_dynamics.py",8776,380," image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)",python,selection_mouse +20727,14194788,"train_dynamics.py",8775,381," image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)",python,selection_mouse +20728,14194804,"train_dynamics.py",8774,382," image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)",python,selection_mouse +20729,14194860,"train_dynamics.py",8731,425,"log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)",python,selection_mouse +20730,14194868,"train_dynamics.py",8730,426," log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)",python,selection_mouse +20731,14195683,"train_dynamics.py",8730,0,"",python,selection_mouse +20732,14195684,"train_dynamics.py",8707,24," ",python,selection_mouse +20733,14195906,"train_dynamics.py",8707,68," log_images = dict(\n ",python,selection_mouse +20734,14195950,"train_dynamics.py",8707,71," log_images = dict(\n ",python,selection_mouse +20735,14195951,"train_dynamics.py",8707,161," log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon",python,selection_mouse +20736,14196013,"train_dynamics.py",8707,257," log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon",python,selection_mouse +20737,14196013,"train_dynamics.py",8707,313," log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray",python,selection_mouse +20738,14196035,"train_dynamics.py",8707,377," log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),",python,selection_mouse +20739,14196093,"train_dynamics.py",8707,403," log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )",python,selection_mouse +20740,14196400,"train_dynamics.py",8707,449," log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)",python,selection_mouse +20741,14196845,"train_dynamics.py",9156,0,"",python,selection_mouse +20742,14196848,"train_dynamics.py",9155,0,"",python,selection_command +20743,14197796,"train_dynamics.py",9111,45," wandb.log(log_images)",python,selection_command +20744,14198171,"train_dynamics.py",9085,71," )\n wandb.log(log_images)",python,selection_command +20745,14198347,"train_dynamics.py",9054,102," ),\n )\n wandb.log(log_images)",python,selection_command +20746,14198462,"train_dynamics.py",8978,178," np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)",python,selection_command +20747,14198607,"train_dynamics.py",8923,233," true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)",python,selection_command +20748,14198891,"train_dynamics.py",8835,321," recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)",python,selection_command +20749,14199094,"train_dynamics.py",8750,406," image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)",python,selection_command +20750,14199495,"train_dynamics.py",8707,449," log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)",python,selection_command +20751,14199804,"train_dynamics.py",8707,0,"",python,selection_command +20752,14200158,"train_dynamics.py",8750,0,"",python,selection_command +20753,14200669,"train_dynamics.py",8835,0,"",python,selection_command +20754,14200682,"train_dynamics.py",8923,0,"",python,selection_command +20755,14200740,"train_dynamics.py",8978,0,"",python,selection_command +20756,14200741,"train_dynamics.py",9054,0,"",python,selection_command +20757,14200919,"train_dynamics.py",9085,0,"",python,selection_command +20758,14201198,"train_dynamics.py",9111,0,"",python,selection_command +20759,14201747,"train_dynamics.py",9156,0,"\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)",python,content +20760,14201749,"train_dynamics.py",9181,0,"",python,selection_command +20761,14202814,"train_dynamics.py",9135,0,"",python,selection_command +20762,14203010,"train_dynamics.py",9109,0,"",python,selection_command +20763,14203211,"train_dynamics.py",9078,0,"",python,selection_command +20764,14203378,"train_dynamics.py",9002,0,"",python,selection_command +20765,14203663,"train_dynamics.py",8947,0,"",python,selection_command +20766,14204705,"train_dynamics.py",8969,0,"",python,selection_mouse +20767,14204864,"train_dynamics.py",8965,5,"wandb",python,selection_mouse +20768,14205434,"train_dynamics.py",8973,0,"",python,selection_mouse +20769,14205599,"train_dynamics.py",8971,5,"Image",python,selection_mouse +20770,14206486,"train_dynamics.py",8962,0,"",python,selection_mouse +20771,14207022,"train_dynamics.py",8967,0,"",python,selection_mouse +20772,14207189,"train_dynamics.py",8965,5,"wandb",python,selection_mouse +20773,14207689,"train_dynamics.py",8974,0,"",python,selection_mouse +20774,14207848,"train_dynamics.py",8971,5,"Image",python,selection_mouse +20775,14210040,"train_dynamics.py",9342,0,"",python,selection_mouse +20776,14212726,"train_dynamics.py",9169,450,"",python,content +20777,14212744,"train_dynamics.py",9111,0,"",python,selection_command +20778,14214261,"train_dynamics.py",8356,0,"",python,selection_mouse +20779,14214482,"train_dynamics.py",8355,1," ",python,selection_mouse +20780,14214513,"train_dynamics.py",8356,48,"= inputs[""videos""][0]\n recon_",python,selection_mouse +20781,14214532,"train_dynamics.py",8356,47,"= inputs[""videos""][0]\n recon",python,selection_mouse +20782,14214850,"train_dynamics.py",8403,0,"",python,selection_mouse +20783,14214851,"train_dynamics.py",8398,9,"recon_seq",python,selection_mouse +20784,14215701,"train_dynamics.py",8429,0,"",python,selection_mouse +20785,14215709,"train_dynamics.py",8428,0,"",python,selection_command +20786,14216561,"train_dynamics.py",8408,0,"",python,selection_mouse +20787,14217166,"train_dynamics.py",8404,0,"",python,selection_mouse +20788,14217307,"train_dynamics.py",8398,9,"recon_seq",python,selection_mouse +20789,14218053,"train_dynamics.py",8429,0,"",python,selection_mouse +20790,14218055,"train_dynamics.py",8428,0,"",python,selection_command +20791,14219687,"train_dynamics.py",8429,0,"\n ",python,content +20792,14221996,"train_dynamics.py",8450,0,"d",python,content +20793,14221997,"train_dynamics.py",8451,0,"",python,selection_keyboard +20794,14222118,"train_dynamics.py",8451,0,"e",python,content +20795,14222120,"train_dynamics.py",8452,0,"",python,selection_keyboard +20796,14222302,"train_dynamics.py",8452,0,"b",python,content +20797,14222303,"train_dynamics.py",8453,0,"",python,selection_keyboard +20798,14222305,"train_dynamics.py",8453,0,"u",python,content +20799,14222306,"train_dynamics.py",8454,0,"",python,selection_keyboard +20800,14222452,"train_dynamics.py",8454,0,"g",python,content +20801,14222454,"train_dynamics.py",8455,0,"",python,selection_keyboard +20802,14222814,"train_dynamics.py",8455,0,"_",python,content +20803,14222815,"train_dynamics.py",8456,0,"",python,selection_keyboard +20804,14222982,"train_dynamics.py",8456,0,"s",python,content +20805,14222984,"train_dynamics.py",8457,0,"",python,selection_keyboard +20806,14223293,"train_dynamics.py",8457,0,"e",python,content +20807,14223294,"train_dynamics.py",8458,0,"",python,selection_keyboard +20808,14223789,"train_dynamics.py",8458,0,"q",python,content +20809,14223791,"train_dynamics.py",8459,0,"",python,selection_keyboard +20810,14224110,"train_dynamics.py",8459,0," ",python,content +20811,14224111,"train_dynamics.py",8460,0,"",python,selection_keyboard +20812,14224379,"train_dynamics.py",8460,0," ",python,content +20813,14224380,"train_dynamics.py",8461,0,"",python,selection_keyboard +20814,14224655,"train_dynamics.py",8460,1,"",python,content +20815,14224849,"train_dynamics.py",8460,0,"=",python,content +20816,14224850,"train_dynamics.py",8461,0,"",python,selection_keyboard +20817,14224959,"train_dynamics.py",8461,0," ",python,content +20818,14224961,"train_dynamics.py",8462,0,"",python,selection_keyboard +20819,14225985,"train_dynamics.py",8462,0,"g",python,content +20820,14225986,"train_dynamics.py",8463,0,"",python,selection_keyboard +20821,14226133,"train_dynamics.py",8463,0,"t",python,content +20822,14226135,"train_dynamics.py",8464,0,"",python,selection_keyboard +20823,14226628,"train_dynamics.py",8464,0,"_",python,content +20824,14226629,"train_dynamics.py",8465,0,"",python,selection_keyboard +20825,14227298,"train_dynamics.py",8462,3,"gt_debug",python,content +20826,14228290,"train_dynamics.py",8470,0,"[]",python,content +20827,14228291,"train_dynamics.py",8471,0,"",python,selection_keyboard +20828,14228558,"train_dynamics.py",8471,0,"0",python,content +20829,14228560,"train_dynamics.py",8472,0,"",python,selection_keyboard +20830,14228751,"train_dynamics.py",8472,1,"]",python,content +20831,14228752,"train_dynamics.py",8473,0,"",python,selection_keyboard +20832,14229577,"train_dynamics.py",8473,0,".",python,content +20833,14229578,"train_dynamics.py",8474,0,"",python,selection_keyboard +20834,14229942,"train_dynamics.py",8474,0,"y",python,content +20835,14229943,"train_dynamics.py",8475,0,"",python,selection_keyboard +20836,14230036,"train_dynamics.py",8475,0,"l",python,content +20837,14230037,"train_dynamics.py",8476,0,"",python,selection_keyboard +20838,14230287,"train_dynamics.py",8476,0,"i",python,content +20839,14230288,"train_dynamics.py",8477,0,"",python,selection_keyboard +20840,14230599,"train_dynamics.py",8476,1,"",python,content +20841,14230762,"train_dynamics.py",8475,1,"",python,content +20842,14230852,"train_dynamics.py",8474,1,"",python,content +20843,14231001,"train_dynamics.py",8474,0,"c",python,content +20844,14231002,"train_dynamics.py",8475,0,"",python,selection_keyboard +20845,14231061,"train_dynamics.py",8475,0,"l",python,content +20846,14231062,"train_dynamics.py",8476,0,"",python,selection_keyboard +20847,14231330,"train_dynamics.py",8476,0,"i",python,content +20848,14231331,"train_dynamics.py",8477,0,"",python,selection_keyboard +20849,14231503,"train_dynamics.py",8477,0,"p",python,content +20850,14231504,"train_dynamics.py",8478,0,"",python,selection_keyboard +20851,14232401,"train_dynamics.py",8478,0,"()",python,content +20852,14232402,"train_dynamics.py",8479,0,"",python,selection_keyboard +20853,14232704,"train_dynamics.py",8479,0,"0",python,content +20854,14232704,"train_dynamics.py",8480,0,"",python,selection_keyboard +20855,14232932,"train_dynamics.py",8480,0,",",python,content +20856,14232933,"train_dynamics.py",8481,0,"",python,selection_keyboard +20857,14233040,"train_dynamics.py",8481,0,"1",python,content +20858,14233041,"train_dynamics.py",8482,0,"",python,selection_keyboard +20859,14234317,"train_dynamics.py",8455,0,"",python,selection_mouse +20860,14234477,"train_dynamics.py",8450,9,"debug_seq",python,selection_mouse +20861,14235553,"train_dynamics.py",8543,0,"",python,selection_mouse +20862,14236036,"train_dynamics.py",8548,0,"",python,selection_mouse +20863,14236208,"train_dynamics.py",8546,9,"recon_seq",python,selection_mouse +20864,14237263,"train_dynamics.py",8620,0,"",python,selection_mouse +20865,14238099,"train_dynamics.py",8594,0,"",python,selection_mouse +20866,14238253,"train_dynamics.py",8586,14,"comparison_seq",python,selection_mouse +20867,14238801,"train_dynamics.py",8660,0,"",python,selection_mouse +20868,14239393,"train_dynamics.py",8711,0,"",python,selection_mouse +20869,14241216,"train_dynamics.py",8506,0,"",python,selection_mouse +20870,14241767,"train_dynamics.py",8504,0,"",python,selection_mouse +20871,14241953,"train_dynamics.py",8504,82,"comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n ",python,selection_mouse +20872,14241962,"train_dynamics.py",8504,138,"comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n ",python,selection_mouse +20873,14241981,"train_dynamics.py",8504,282,"comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if jax.process_index() == 0:\n l",python,selection_mouse +20874,14242040,"train_dynamics.py",8504,502,"comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n t",python,selection_mouse +20875,14242040,"train_dynamics.py",8504,559,"comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n ",python,selection_mouse +20876,14242046,"train_dynamics.py",8504,634,"comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),",python,selection_mouse +20877,14242060,"train_dynamics.py",8504,660,"comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )",python,selection_mouse +20878,14242334,"train_dynamics.py",8504,701,"comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_im",python,selection_mouse +20879,14242351,"train_dynamics.py",8504,702,"comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_ima",python,selection_mouse +20880,14242396,"train_dynamics.py",8504,703,"comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_imag",python,selection_mouse +20881,14242397,"train_dynamics.py",8504,705,"comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images",python,selection_mouse +20882,14242410,"train_dynamics.py",8504,706,"comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)",python,selection_mouse +20883,14244799,"train_dynamics.py",9210,0,"",python,selection_mouse +20884,14245146,"train_dynamics.py",9209,1,")",python,selection_mouse +20885,14245162,"train_dynamics.py",9199,11,"log_images)",python,selection_mouse +20886,14245220,"train_dynamics.py",9162,48," )\n wandb.log(log_images)",python,selection_mouse +20887,14245227,"train_dynamics.py",9047,163," np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)",python,selection_mouse +20888,14245245,"train_dynamics.py",8896,314," recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)",python,selection_mouse +20889,14245303,"train_dynamics.py",8805,405," image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)",python,selection_mouse +20890,14245304,"train_dynamics.py",8761,449," log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)",python,selection_mouse +20891,14245330,"train_dynamics.py",8712,498," if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)",python,selection_mouse +20892,14245386,"train_dynamics.py",8690,520," )\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)",python,selection_mouse +20893,14245391,"train_dynamics.py",8621,589," comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)",python,selection_mouse +20894,14245447,"train_dynamics.py",8566,644," comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)",python,selection_mouse +20895,14246350,"train_dynamics.py",8484,726," comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)",python,selection_mouse +20896,14247392,"train_dynamics.py",8504,0,"",python,selection_command +20897,14257283,"train_dynamics.py",8484,0,"",python,selection_command +20898,14259998,"train_dynamics.py",8712,0," debug_comparison_seq = jnp.concatenate((gt_seq, debug_seq), axis=1)\n",python,content +20899,14260093,"train_dynamics.py",8800,0," debug_comparison_seq = einops.rearrange(\n",python,content +20900,14260171,"train_dynamics.py",8861,0," debug_comparison_seq * 255, ""t h w c -> h (t w) c""\n",python,content +20901,14260203,"train_dynamics.py",8936,0," )\n",python,content +20902,14260662,"train_dynamics.py",9385,0," true_vs_debug=wandb.Image(\n",python,content +20903,14260693,"train_dynamics.py",9440,0," np.asarray(debug_comparison_seq.astype(np.uint8))\n",python,content +20904,14260694,"train_dynamics.py",9522,0," ),\n",python,content +20905,14264584,"TERMINAL",0,0,"bash",,terminal_focus +20906,14266349,"TERMINAL",0,0,"srun",,terminal_focus +20907,14268005,"TERMINAL",0,0,"^DERROR:2025-06-30 18:21:52,397:jax._src.debugging:96: jax.debug.callback failed\r\nTraceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugging.py"", line 94, in debug_callback_impl\r\n callback(*args)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugging.py"", line 334, in _flat_callback\r\n callback(*args, **kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugger/core.py"", line 220, in _breakpoint_callback\r\n debugger(frames, thread_id, **kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugger/cli_debugger.py"", line 167, in run_debugger\r\n CliDebugger(frames, thread_id, **kwargs).run()\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugger/cli_debugger.py"", line 160, in run\r\n self.cmdloop()\r\n File ""/home/hk-project-p0023960/tum_cte0515/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/cmd.py"", line 138, in cmdloop\r\n stop = self.onecmd(line)\r\n File ""/home/hk-project-p0023960/tum_cte0515/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/cmd.py"", line 217, in onecmd\r\n return func(arg)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugger/cli_debugger.py"", line 146, in do_quit\r\n sys.exit(0)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/wandb/sdk/lib/exit_hooks.py"", line 36, in exit\r\n self._orig_exit(orig_code) # type: ignore\r\nSystemExit: 0\r\nERROR:jax._src.debugging:jax.debug.callback failed\r\nTraceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugging.py"", line 94, in debug_callback_impl\r\n callback(*args)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugging.py"", line 334, in _flat_callback\r\n callback(*args, **kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugger/core.py"", line 220, in _breakpoint_callback\r\n debugger(frames, thread_id, **kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugger/cli_debugger.py"", line 167, in run_debugger\r\n CliDebugger(frames, thread_id, **kwargs).run()\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugger/cli_debugger.py"", line 160, in run\r\n self.cmdloop()\r\n File ""/home/hk-project-p0023960/tum_cte0515/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/cmd.py"", line 138, in cmdloop\r\n stop = self.onecmd(line)\r\n File ""/home/hk-project-p0023960/tum_cte0515/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/cmd.py"", line 217, in onecmd\r\n return func(arg)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/debugger/cli_debugger.py"", line 146, in do_quit\r\n sys.exit(0)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/wandb/sdk/lib/exit_hooks.py"", line 36, in exit\r\n self._orig_exit(orig_code) # type: ignore\r\nSystemExit: 0\r\n",,terminal_output +20908,14269877,"TERMINAL",0,0,"wandb: \r\nwandb: 🚀 View run dynamics-tiny-overfit-big-lr-0000 at: https://wandb.ai/instant-uv/jafar/runs/5hu6xzzt\r\nwandb: Find logs at: ../../../../../hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/wandb/run-20250630_181423-5hu6xzzt/logs\r\n",,terminal_output +20909,14271419,"TERMINAL",0,0,"]0;tum_cte0515@hkn0408:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0408 jafar]$ ",,terminal_output +20910,14271935,"TERMINAL",0,0,"[?25lq[?25h",,terminal_output +20911,14272002,"TERMINAL",0,0,"[?25lu[?25h",,terminal_output +20912,14272125,"TERMINAL",0,0,"[?25le[?25h",,terminal_output +20913,14272192,"TERMINAL",0,0,"[?25lu[?25h",,terminal_output +20914,14272257,"TERMINAL",0,0,"[?25le[?25h",,terminal_output +20915,14272463,"TERMINAL",0,0,"\r\n[?2004l\r[?1049h(B[?7hEvery 1.0s: squeue --mehkn0408.localdomain: Mon Jun 30 18:21:56 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3306855 accelerat interact tum_cte0 R59:43\t 1 hkn0408",,terminal_output +20916,14273363,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn0408:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0408 jafar]$ ",,terminal_output +20917,14273481,"TERMINAL",0,0,"queue",,terminal_output +20918,14273864,"TERMINAL",0,0,"",,terminal_output +20919,14274532,"TERMINAL",0,0,"[?2004l\r\r\nexit\r\nsalloc: Relinquishing job allocation 3306855\r\nsalloc: Job allocation 3306855 has been revoked.\r\n]0;tum_cte0515@hkn1991:~/Projects/jafar]633;D;0",,terminal_output +20920,14276541,"TERMINAL",0,0,"salloc --time=01:00:00 --partition=accelerated --nodes=1 --ntasks-per-node=1 --gres=gpu:1 --cpus-per-task=5 --mem=50G",,terminal_command +20921,14276592,"TERMINAL",0,0,"]633;E;2025-06-30 18:22:00 salloc --time=01:00:00 --partition=accelerated --nodes=1 --ntasks-per-node=1 --gres=gpu:1 --cpus-per-task=5 --mem=50G;4d11dbdc-690b-4257-b927-bbd493ebfa56]633;Csalloc: Granted job allocation 3307154\r\n",,terminal_output +20922,14276702,"TERMINAL",0,0,"salloc: Waiting for resource configuration\r\n",,terminal_output +20923,14280802,"TERMINAL",0,0,"[?25ls[?25h",,terminal_output +20924,14281005,"TERMINAL",0,0,"[?25lo[?25h",,terminal_output +20925,14281081,"TERMINAL",0,0,"[?25lu[?25h",,terminal_output +20926,14281189,"TERMINAL",0,0,"[?25lr[?25h",,terminal_output +20927,14281412,"TERMINAL",0,0,"[?25lc[?25h",,terminal_output +20928,14281597,"TERMINAL",0,0,"[?25le[?25h",,terminal_output +20929,14281777,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +20930,14281842,"TERMINAL",0,0,"[?25l.[?25h",,terminal_output +20931,14282460,"TERMINAL",0,0,"[?25lv[?25h",,terminal_output +20932,14282663,"TERMINAL",0,0,"[?25le[?25h",,terminal_output +20933,14282728,"TERMINAL",0,0,"[?25ln[?25h",,terminal_output +20934,14282846,"TERMINAL",0,0,"[?25lv[?25h",,terminal_output +20935,14283623,"TERMINAL",0,0,"[?25l/[?25h",,terminal_output +20936,14283856,"TERMINAL",0,0,"[?25lb[?25h",,terminal_output +20937,14284063,"TERMINAL",0,0,"[?25li[?25h",,terminal_output +20938,14284187,"TERMINAL",0,0,"[?25ln[?25h",,terminal_output +20939,14284757,"TERMINAL",0,0,"[?25l/[?25h",,terminal_output +20940,14284930,"TERMINAL",0,0,"[?25la[?25h",,terminal_output +20941,14285074,"TERMINAL",0,0,"[?25lc[?25h",,terminal_output +20942,14285318,"TERMINAL",0,0,"[?25lt[?25h",,terminal_output +20943,14285384,"TERMINAL",0,0,"[?25li[?25h",,terminal_output +20944,14285475,"TERMINAL",0,0,"[?25lv[?25h",,terminal_output +20945,14285731,"TERMINAL",0,0,"[?25la[?25h",,terminal_output +20946,14285796,"TERMINAL",0,0,"[?25lt[?25h",,terminal_output +20947,14285934,"TERMINAL",0,0,"[?25le[?25h",,terminal_output +20948,14286412,"TERMINAL",0,0,"\r\n",,terminal_output +20949,14303774,"TERMINAL",0,0,"salloc: Nodes hkn0531 are ready for job\r\n",,terminal_output +20950,14304076,"TERMINAL",0,0,"source .venv/bin/activate\r\n",,terminal_output +20951,14304639,"TERMINAL",0,0,"]0;tum_cte0515@hkn0531:~/Projects/jafar[?2004h[tum_cte0515@hkn0531 jafar]$ source .venv/bin/activate\r\n[?2004l\r",,terminal_output +20952,14304693,"TERMINAL",0,0,"]0;tum_cte0515@hkn0531:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0531 jafar]$ ",,terminal_output +20953,14322907,"TERMINAL",0,0,"bash",,terminal_focus +20954,14324322,"TERMINAL",0,0,"srun",,terminal_focus +20955,14328635,"TERMINAL",0,0,"source .venv/bin/activate",,terminal_output +20956,14328859,"TERMINAL",0,0,"queue",,terminal_output +20957,14329209,"TERMINAL",0,0,"sh scripts_horeka/train_dynamics.sh ",,terminal_output +20958,14330170,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +20959,14330322,"TERMINAL",0,0,"SLURM_STEP_NUM_TASKS=1\r\nSLURM_JOB_USER=tum_cte0515\r\nSLURM_TASKS_PER_NODE=1\r\nSLURM_JOB_UID=999226\r\nSLURM_TASK_PID=3921049\r\nSLURM_JOB_GPUS=3\r\nSLURM_LOCALID=0\r\nSLURM_SUBMIT_DIR=/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar\r\nSLURMD_NODENAME=hkn0531\r\nSLURM_JOB_START_TIME=1751300521\r\nSLURM_STEP_NODELIST=hkn0531\r\nSLURM_CLUSTER_NAME=hk\r\nSLURM_JOB_END_TIME=1751304121\r\nSLURM_PMI2_SRUN_PORT=37937\r\nSLURM_CPUS_ON_NODE=6\r\nSLURM_JOB_CPUS_PER_NODE=6\r\nSLURM_GPUS_ON_NODE=1\r\nSLURM_GTIDS=0\r\nSLURM_JOB_PARTITION=accelerated\r\nSLURM_TRES_PER_TASK=cpu=5\r\nSLURM_OOM_KILL_STEP=0\r\nSLURM_JOB_NUM_NODES=1\r\nSLURM_STEPID=4294967290\r\nSLURM_JOBID=3307154\r\nSLURM_PTY_PORT=38317\r\nSLURM_JOB_QOS=normal\r\nSLURM_LAUNCH_NODE_IPADDR=10.0.7.199\r\nSLURM_PTY_WIN_ROW=37\r\nSLURM_PMI2_PROC_MAPPING=(vector,(0,1,1))\r\nSLURMD_DEBUG=2\r\nSLURM_PROCID=0\r\nSLURM_CPUS_PER_TASK=5\r\nSLURM_NTASKS=1\r\nSLURM_TOPOLOGY_ADDR=hkibb.hkibbi1.hkibbi1e11.hkn0531\r\nSLURM_TOPOLOGY_ADDR_PATTERN=switch.switch.switch.node\r\nSLURM_SRUN_COMM_HOST=10.0.7.199\r\nSLURM_SCRIPT_CONTEXT=prolog_task\r\nSLURM_MEM_PER_NODE=51200\r\nSLURM_PTY_WIN_COL=93\r\nSLURM_NODELIST=hkn0531\r\nSLURM_SRUN_COMM_PORT=43825\r\nSLURM_STEP_ID=4294967290\r\nSLURM_JOB_ACCOUNT=hk-project-p0023960\r\nSLURM_PRIO_PROCESS=0\r\nSLURM_NPROCS=1\r\nSLURM_NNODES=1\r\nSLURM_SUBMIT_HOST=hkn1991.localdomain\r\nSLURM_JOB_ID=3307154\r\nSLURM_NODEID=0\r\nSLURM_STEP_NUM_NODES=1\r\nSLURM_STEP_TASKS_PER_NODE=1\r\nSLURM_MPI_TYPE=pmi2\r\nSLURM_PMI2_STEP_NODES=hkn0531\r\nSLURM_CONF=/etc/slurm/slurm.conf\r\nSLURM_JOB_NAME=interactive\r\nSLURM_NTASKS_PER_NODE=1\r\nSLURM_STEP_LAUNCHER_PORT=43825\r\nSLURM_JOB_GID=502226\r\nSLURM_JOB_NODELIST=hkn0531\r\n",,terminal_output +20960,14348493,"utils/dataloader.py",0,0,"",python,tab +20961,14351855,"train_dynamics.py",0,0,"",python,tab +20962,14355807,"train_dynamics.py",8654,0,"",python,selection_mouse +20963,14356254,"train_dynamics.py",8538,0,"",python,selection_mouse +20964,14356795,"TERMINAL",0,0,"2025-06-30 18:23:21.222206: E external/local_xla/xla/stream_executor/cuda/cuda_fft.cc:467] Unable to register cuFFT factory: Attempting to register factory for plugin cuFFT when one has already been registered\r\n",,terminal_output +20965,14356923,"TERMINAL",0,0,"WARNING: All log messages before absl::InitializeLog() is called are written to STDERR\r\nE0000 00:00:1751300601.345224 3921720 cuda_dnn.cc:8579] Unable to register cuDNN factory: Attempting to register factory for plugin cuDNN when one has already been registered\r\nE0000 00:00:1751300601.350542 3921720 cuda_blas.cc:1407] Unable to register cuBLAS factory: Attempting to register factory for plugin cuBLAS when one has already been registered\r\n",,terminal_output +20966,14357282,"TERMINAL",0,0,"W0000 00:00:1751300601.685966 3921720 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\nW0000 00:00:1751300601.686013 3921720 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\nW0000 00:00:1751300601.686015 3921720 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\nW0000 00:00:1751300601.686017 3921720 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\n",,terminal_output +20967,14360811,"train_dynamics.py",0,0,"",python,tab +20968,14369250,"train_dynamics.py",2534,0,"",python,selection_mouse +20969,14369933,"train_dynamics.py",2200,0,"",python,selection_mouse +20970,14370429,"train_dynamics.py",2218,0,"",python,selection_mouse +20971,14370559,"train_dynamics.py",2212,7,"outputs",python,selection_mouse +20972,14371245,"train_dynamics.py",2207,0,"",python,selection_mouse +20973,14371393,"train_dynamics.py",2205,4,"mask",python,selection_mouse +20974,14381411,"train_dynamics.py",2217,0,"",python,selection_mouse +20975,14381572,"train_dynamics.py",2212,7,"outputs",python,selection_mouse +20976,14381919,"train_dynamics.py",2227,0,"",python,selection_mouse +20977,14382230,"train_dynamics.py",2226,1,"]",python,selection_mouse +20978,14382239,"train_dynamics.py",2221,6,"mask""]",python,selection_mouse +20979,14382300,"train_dynamics.py",2220,7,"""mask""]",python,selection_mouse +20980,14382300,"train_dynamics.py",2212,15,"outputs[""mask""]",python,selection_mouse +20981,14383084,"train_dynamics.py",2217,0,"",python,selection_mouse +20982,14383085,"train_dynamics.py",2212,7,"outputs",python,selection_mouse +20983,14383300,"train_dynamics.py",2212,8,"outputs[",python,selection_mouse +20984,14383312,"train_dynamics.py",2212,13,"outputs[""mask",python,selection_mouse +20985,14383372,"train_dynamics.py",2212,14,"outputs[""mask""",python,selection_mouse +20986,14383373,"train_dynamics.py",2212,15,"outputs[""mask""]",python,selection_mouse +20987,14383594,"train_dynamics.py",2227,0,"",python,selection_mouse +20988,14383964,"train_dynamics.py",2225,2,"""]",python,selection_mouse +20989,14383972,"train_dynamics.py",2221,6,"mask""]",python,selection_mouse +20990,14384033,"train_dynamics.py",2219,8,"[""mask""]",python,selection_mouse +20991,14384033,"train_dynamics.py",2212,15,"outputs[""mask""]",python,selection_mouse +20992,14384556,"train_dynamics.py",2215,0,"",python,selection_mouse +20993,14384556,"train_dynamics.py",2212,7,"outputs",python,selection_mouse +20994,14384819,"train_dynamics.py",2212,9,"outputs[""",python,selection_mouse +20995,14384820,"train_dynamics.py",2212,13,"outputs[""mask",python,selection_mouse +20996,14384825,"train_dynamics.py",2212,15,"outputs[""mask""]",python,selection_mouse +20997,14385344,"train_dynamics.py",2227,0,"",python,selection_mouse +20998,14388582,"train_dynamics.py",2216,0,"",python,selection_mouse +20999,14388708,"train_dynamics.py",2212,7,"outputs",python,selection_mouse +21000,14388853,"train_dynamics.py",2201,27," mask = outputs[""mask""]\n",python,selection_mouse +21001,14390174,"TERMINAL",0,0,"W0000 00:00:1751300634.600539 3921720 gpu_device.cc:2341] Cannot dlopen some GPU libraries. Please make sure the missing libraries mentioned above are installed properly if you would like to use GPU. Follow the guide at https://www.tensorflow.org/install/gpu for how to download and setup the required libraries for your platform.\r\nSkipping registering GPU devices...\r\n",,terminal_output +21002,14391106,"TERMINAL",0,0,"Running on 1 devices.\r\n",,terminal_output +21003,14392356,"TERMINAL",0,0,"wandb: Currently logged in as: mihir-mahajan2002 (instant-uv) to https://api.wandb.ai. Use `wandb login --relogin` to force relogin\r\n",,terminal_output +21004,14393038,"TERMINAL",0,0,"wandb: Tracking run with wandb version 0.19.11\r\nwandb: Run data is saved locally in /hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/wandb/run-20250630_182356-x8r50yi7\r\nwandb: Run `wandb offline` to turn off syncing.\r\nwandb: Syncing run dynamics-tiny-overfit-big-lr-0000\r\nwandb: ⭐️ View project at https://wandb.ai/instant-uv/jafar\r\nwandb: 🚀 View run at https://wandb.ai/instant-uv/jafar/runs/x8r50yi7\r\n",,terminal_output +21005,14395610,"TERMINAL",0,0,"2025-06-30 18:24:00.024797: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +21006,14408429,"TERMINAL",0,0,"2025-06-30 18:24:12.766738: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +21007,14423375,"TERMINAL",0,0,"2025-06-30 18:24:27.778904: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +21008,14430155,"TERMINAL",0,0,"2025-06-30 18:24:34.580459: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +21009,14449185,"TERMINAL",0,0,"batch shape: (1, 16, 90, 160, 3)\r\n",,terminal_output +21010,14461942,"TERMINAL",0,0,"2025-06-30 18:25:06.361309: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-06-30 18:25:06.361869: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-06-30 18:25:06.361974: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-06-30 18:25:06.362600: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-06-30 18:25:06.363635: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +21011,14508572,"TERMINAL",0,0,"Step 0, loss: 8.689549446105957, step time: 59244.48490142822ms\r\n",,terminal_output +21012,14508668,"TERMINAL",0,0,"Step 1, loss: 8.141585350036621, step time: 29.60348129272461ms\r\n",,terminal_output +21013,14508773,"TERMINAL",0,0,"Step 2, loss: nan, step time: 33.92744064331055ms\r\n",,terminal_output +21014,14508882,"TERMINAL",0,0,"Step 3, loss: 7.575160503387451, step time: 20.24698257446289ms\r\n",,terminal_output +21015,14508934,"TERMINAL",0,0,"Step 4, loss: 7.437915325164795, step time: 20.17498016357422ms\r\n",,terminal_output +21016,14509028,"TERMINAL",0,0,"Step 5, loss: 7.132429599761963, step time: 19.972562789916992ms\r\n",,terminal_output +21017,14509135,"TERMINAL",0,0,"Step 6, loss: 6.929563045501709, step time: 19.469022750854492ms\r\n",,terminal_output +21018,14509194,"TERMINAL",0,0,"Step 7, loss: 6.766757011413574, step time: 19.44589614868164ms\r\n",,terminal_output +21019,14509304,"TERMINAL",0,0,"Step 8, loss: 6.593115329742432, step time: 19.931316375732422ms\r\n",,terminal_output +21020,14509370,"TERMINAL",0,0,"Step 9, loss: 6.652842998504639, step time: 19.588947296142578ms\r\n",,terminal_output +21021,14509478,"TERMINAL",0,0,"Step 10, loss: 6.40163516998291, step time: 19.72818374633789ms\r\n",,terminal_output +21022,14509638,"TERMINAL",0,0,"Step 11, loss: 6.411810874938965, step time: 19.98448371887207ms\r\nStep 12, loss: 6.155960559844971, step time: 19.851207733154297ms\r\n",,terminal_output +21023,14509749,"TERMINAL",0,0,"Step 13, loss: 6.087944030761719, step time: 19.64116096496582ms\r\n",,terminal_output +21024,14509807,"TERMINAL",0,0,"Step 14, loss: 5.984745979309082, step time: 20.12920379638672ms\r\n",,terminal_output +21025,14509914,"TERMINAL",0,0,"Step 15, loss: 5.949819087982178, step time: 19.963741302490234ms\r\n",,terminal_output +21026,14510025,"TERMINAL",0,0,"Step 16, loss: 5.8898234367370605, step time: 20.195960998535156ms\r\n",,terminal_output +21027,14510076,"TERMINAL",0,0,"Step 17, loss: 5.8404951095581055, step time: 19.993305206298828ms\r\n",,terminal_output +21028,14510182,"TERMINAL",0,0,"Step 18, loss: 6.039356231689453, step time: 19.906282424926758ms\r\n",,terminal_output +21029,14510246,"TERMINAL",0,0,"Step 19, loss: 5.658321380615234, step time: 19.770383834838867ms\r\n",,terminal_output +21030,14510355,"TERMINAL",0,0,"Step 20, loss: 5.579622268676758, step time: 20.175457000732422ms\r\n",,terminal_output +21031,14510419,"TERMINAL",0,0,"Step 21, loss: 5.600977420806885, step time: 19.606351852416992ms\r\n",,terminal_output +21032,14510526,"TERMINAL",0,0,"Step 22, loss: 5.566794395446777, step time: 19.586563110351562ms\r\n",,terminal_output +21033,14510632,"TERMINAL",0,0,"Step 23, loss: 5.421724796295166, step time: 19.84548568725586ms\r\n",,terminal_output +21034,14510686,"TERMINAL",0,0,"Step 24, loss: 5.36187744140625, step time: 19.68216896057129ms\r\n",,terminal_output +21035,14510873,"TERMINAL",0,0,"Step 25, loss: 5.370827674865723, step time: 19.658803939819336ms\r\nStep 26, loss: 5.342113971710205, step time: 20.151138305664062ms\r\n",,terminal_output +21036,14510950,"TERMINAL",0,0,"Step 27, loss: 5.286186695098877, step time: 19.589900970458984ms\r\n",,terminal_output +21037,14511056,"TERMINAL",0,0,"Step 28, loss: 5.165322780609131, step time: 19.62137222290039ms\r\n",,terminal_output +21038,14511126,"TERMINAL",0,0,"Step 29, loss: 5.111928939819336, step time: 19.8976993560791ms\r\n",,terminal_output +21039,14511234,"TERMINAL",0,0,"Step 30, loss: 5.064871311187744, step time: 19.901275634765625ms\r\n",,terminal_output +21040,14511302,"TERMINAL",0,0,"Step 31, loss: 5.192669868469238, step time: 19.70362663269043ms\r\n",,terminal_output +21041,14511415,"TERMINAL",0,0,"Step 32, loss: 5.17686128616333, step time: 19.99831199645996ms\r\n",,terminal_output +21042,14511477,"TERMINAL",0,0,"Step 33, loss: 5.023236274719238, step time: 19.663333892822266ms\r\n",,terminal_output +21043,14511595,"TERMINAL",0,0,"Step 34, loss: 4.9295759201049805, step time: 19.597291946411133ms\r\n",,terminal_output +21044,14511662,"TERMINAL",0,0,"Step 35, loss: 4.955498695373535, step time: 19.97232437133789ms\r\n",,terminal_output +21045,14511775,"TERMINAL",0,0,"Step 36, loss: 4.884079933166504, step time: 19.66714859008789ms\r\n",,terminal_output +21046,14511833,"TERMINAL",0,0,"Step 37, loss: 4.922764301300049, step time: 19.550561904907227ms\r\n",,terminal_output +21047,14511943,"TERMINAL",0,0,"Step 38, loss: 4.729457378387451, step time: 19.979476928710938ms\r\n",,terminal_output +21048,14512000,"TERMINAL",0,0,"Step 39, loss: 4.814234256744385, step time: 19.547224044799805ms\r\n",,terminal_output +21049,14512101,"TERMINAL",0,0,"Step 40, loss: 4.714190483093262, step time: 19.411325454711914ms\r\n",,terminal_output +21050,14512216,"TERMINAL",0,0,"Step 41, loss: 4.76311731338501, step time: 19.90818977355957ms\r\n",,terminal_output +21051,14512267,"TERMINAL",0,0,"Step 42, loss: 4.6620683670043945, step time: 19.400596618652344ms\r\n",,terminal_output +21052,14512376,"TERMINAL",0,0,"Step 43, loss: 4.7732696533203125, step time: 19.600391387939453ms\r\n",,terminal_output +21053,14512441,"TERMINAL",0,0,"Step 44, loss: 4.548834800720215, step time: 19.86074447631836ms\r\n",,terminal_output +21054,14512551,"TERMINAL",0,0,"Step 45, loss: 4.621319770812988, step time: 19.54174041748047ms\r\n",,terminal_output +21055,14512622,"TERMINAL",0,0,"Step 46, loss: 4.533157825469971, step time: 19.423961639404297ms\r\n",,terminal_output +21056,14512740,"TERMINAL",0,0,"Step 47, loss: 4.456870079040527, step time: 19.916057586669922ms\r\n",,terminal_output +21057,14512801,"TERMINAL",0,0,"Step 48, loss: 4.505385398864746, step time: 19.405841827392578ms\r\n",,terminal_output +21058,14512907,"TERMINAL",0,0,"Step 49, loss: 4.382493495941162, step time: 19.515514373779297ms\r\n",,terminal_output +21059,14512972,"TERMINAL",0,0,"Step 50, loss: 4.2450714111328125, step time: 19.858837127685547ms\r\n",,terminal_output +21060,14513079,"TERMINAL",0,0,"Step 51, loss: 4.340328693389893, step time: 19.505977630615234ms\r\n",,terminal_output +21061,14513144,"TERMINAL",0,0,"Step 52, loss: 4.6074419021606445, step time: 19.707202911376953ms\r\n",,terminal_output +21062,14513256,"TERMINAL",0,0,"Step 53, loss: 4.538056373596191, step time: 19.84691619873047ms\r\n",,terminal_output +21063,14513319,"TERMINAL",0,0,"Step 54, loss: 4.338731288909912, step time: 19.51289176940918ms\r\n",,terminal_output +21064,14513425,"TERMINAL",0,0,"Step 55, loss: 4.565731525421143, step time: 19.599199295043945ms\r\n",,terminal_output +21065,14513536,"TERMINAL",0,0,"Step 56, loss: 4.22885274887085, step time: 19.871234893798828ms\r\n",,terminal_output +21066,14513593,"TERMINAL",0,0,"Step 57, loss: 4.443627834320068, step time: 19.611120223999023ms\r\n",,terminal_output +21067,14513700,"TERMINAL",0,0,"Step 58, loss: 4.221002101898193, step time: 19.457340240478516ms\r\n",,terminal_output +21068,14513766,"TERMINAL",0,0,"Step 59, loss: 4.176154136657715, step time: 19.87743377685547ms\r\n",,terminal_output +21069,14513876,"TERMINAL",0,0,"Step 60, loss: 4.4228835105896, step time: 19.408464431762695ms\r\n",,terminal_output +21070,14513947,"TERMINAL",0,0,"Step 61, loss: 4.4303083419799805, step time: 21.53468132019043ms\r\n",,terminal_output +21071,14514075,"TERMINAL",0,0,"Step 62, loss: 4.457806587219238, step time: 22.00007438659668ms\r\n",,terminal_output +21072,14514127,"TERMINAL",0,0,"Step 63, loss: 4.142475605010986, step time: 20.190954208374023ms\r\n",,terminal_output +21073,14514220,"TERMINAL",0,0,"Step 64, loss: 4.222617149353027, step time: 19.826650619506836ms\r\n",,terminal_output +21074,14514332,"TERMINAL",0,0,"Step 65, loss: 4.004863262176514, step time: 19.92201805114746ms\r\n",,terminal_output +21075,14514395,"TERMINAL",0,0,"Step 66, loss: 4.144213676452637, step time: 27.384281158447266ms\r\n",,terminal_output +21076,14514509,"TERMINAL",0,0,"Step 67, loss: 3.96016788482666, step time: 21.012067794799805ms\r\n",,terminal_output +21077,14514562,"TERMINAL",0,0,"Step 68, loss: 4.048344612121582, step time: 19.854068756103516ms\r\n",,terminal_output +21078,14514683,"TERMINAL",0,0,"Step 69, loss: 4.0442728996276855, step time: 19.561052322387695ms\r\n",,terminal_output +21079,14514735,"TERMINAL",0,0,"Step 70, loss: 4.032415390014648, step time: 19.408702850341797ms\r\n",,terminal_output +21080,14514845,"TERMINAL",0,0,"Step 71, loss: 3.885061502456665, step time: 19.771575927734375ms\r\n",,terminal_output +21081,14514913,"TERMINAL",0,0,"Step 72, loss: 4.062239170074463, step time: 19.577980041503906ms\r\n",,terminal_output +21082,14515019,"TERMINAL",0,0,"Step 73, loss: 3.9714138507843018, step time: 19.501924514770508ms\r\n",,terminal_output +21083,14515083,"TERMINAL",0,0,"Step 74, loss: 3.998093366622925, step time: 19.792795181274414ms\r\n",,terminal_output +21084,14515196,"TERMINAL",0,0,"Step 75, loss: 4.213776111602783, step time: 19.475936889648438ms\r\n",,terminal_output +21085,14515263,"TERMINAL",0,0,"Step 76, loss: 3.9538779258728027, step time: 19.29759979248047ms\r\n",,terminal_output +21086,14515373,"TERMINAL",0,0,"Step 77, loss: 4.09425687789917, step time: 19.81067657470703ms\r\n",,terminal_output +21087,14515435,"TERMINAL",0,0,"Step 78, loss: 3.9316024780273438, step time: 19.377470016479492ms\r\n",,terminal_output +21088,14515545,"TERMINAL",0,0,"Step 79, loss: 3.839538335800171, step time: 19.543886184692383ms\r\n",,terminal_output +21089,14515613,"TERMINAL",0,0,"Step 80, loss: 4.026239395141602, step time: 19.78611946105957ms\r\n",,terminal_output +21090,14515723,"TERMINAL",0,0,"Step 81, loss: 3.9452385902404785, step time: 19.366741180419922ms\r\n",,terminal_output +21091,14515785,"TERMINAL",0,0,"Step 82, loss: 3.94709849357605, step time: 19.379138946533203ms\r\n",,terminal_output +21092,14515896,"TERMINAL",0,0,"Step 83, loss: 3.866347312927246, step time: 19.74964141845703ms\r\n",,terminal_output +21093,14515966,"TERMINAL",0,0,"Step 84, loss: 3.847072124481201, step time: 19.505023956298828ms\r\n",,terminal_output +21094,14516079,"TERMINAL",0,0,"Step 85, loss: 3.79050350189209, step time: 19.583940505981445ms\r\n",,terminal_output +21095,14516135,"TERMINAL",0,0,"Step 86, loss: 4.401532173156738, step time: 19.96612548828125ms\r\n",,terminal_output +21096,14516241,"TERMINAL",0,0,"Step 87, loss: 3.978780746459961, step time: 19.5772647857666ms\r\n",,terminal_output +21097,14516351,"TERMINAL",0,0,"Step 88, loss: 3.840876579284668, step time: 19.517898559570312ms\r\n",,terminal_output +21098,14516404,"TERMINAL",0,0,"Step 89, loss: 3.814903736114502, step time: 19.885540008544922ms\r\n",,terminal_output +21099,14516512,"TERMINAL",0,0,"Step 90, loss: 3.8038461208343506, step time: 19.543170928955078ms\r\n",,terminal_output +21100,14516664,"TERMINAL",0,0,"Step 91, loss: 3.7273175716400146, step time: 19.510507583618164ms\r\nStep 92, loss: 3.8727643489837646, step time: 19.855499267578125ms\r\n",,terminal_output +21101,14516771,"TERMINAL",0,0,"Step 93, loss: 4.219363212585449, step time: 19.446849822998047ms\r\n",,terminal_output +21102,14516884,"TERMINAL",0,0,"Step 94, loss: 3.7291347980499268, step time: 19.556045532226562ms\r\n",,terminal_output +21103,14516936,"TERMINAL",0,0,"Step 95, loss: 3.887514591217041, step time: 19.92058753967285ms\r\n",,terminal_output +21104,14517044,"TERMINAL",0,0,"Step 96, loss: 3.716327428817749, step time: 19.623994827270508ms\r\n",,terminal_output +21105,14517106,"TERMINAL",0,0,"Step 97, loss: 3.71360445022583, step time: 19.443750381469727ms\r\n",,terminal_output +21106,14517215,"TERMINAL",0,0,"Step 98, loss: 3.6644015312194824, step time: 19.81377601623535ms\r\n",,terminal_output +21107,14517275,"TERMINAL",0,0,"Step 99, loss: 3.8030824661254883, step time: 19.72651481628418ms\r\n",,terminal_output +21108,14517382,"TERMINAL",0,0,"Step 100, loss: 3.6369287967681885, step time: 19.717931747436523ms\r\n",,terminal_output +21109,14517503,"TERMINAL",0,0,"Step 101, loss: 3.7098796367645264, step time: 19.927501678466797ms\r\n",,terminal_output +21110,14517554,"TERMINAL",0,0,"Step 102, loss: 3.685777425765991, step time: 19.502878189086914ms\r\n",,terminal_output +21111,14517629,"TERMINAL",0,0,"Step 103, loss: 3.946397066116333, step time: 19.692420959472656ms\r\n",,terminal_output +21112,14517734,"TERMINAL",0,0,"Step 104, loss: 3.6903510093688965, step time: 19.965648651123047ms\r\n",,terminal_output +21113,14517843,"TERMINAL",0,0,"Step 105, loss: 3.8447704315185547, step time: 19.772768020629883ms\r\n",,terminal_output +21114,14517896,"TERMINAL",0,0,"Step 106, loss: 3.672220230102539, step time: 19.45018768310547ms\r\n",,terminal_output +21115,14518001,"TERMINAL",0,0,"Step 107, loss: 3.883784294128418, step time: 19.843101501464844ms\r\n",,terminal_output +21116,14518113,"TERMINAL",0,0,"Step 108, loss: 4.015435695648193, step time: 19.532203674316406ms\r\n",,terminal_output +21117,14518163,"TERMINAL",0,0,"Step 109, loss: 3.862706422805786, step time: 19.582033157348633ms\r\n",,terminal_output +21118,14518271,"TERMINAL",0,0,"Step 110, loss: 3.7384393215179443, step time: 19.99187469482422ms\r\n",,terminal_output +21119,14518331,"TERMINAL",0,0,"Step 111, loss: 3.844627857208252, step time: 19.601821899414062ms\r\n",,terminal_output +21120,14518457,"TERMINAL",0,0,"Step 112, loss: 3.665616035461426, step time: 19.472360610961914ms\r\n",,terminal_output +21121,14518509,"TERMINAL",0,0,"Step 113, loss: 3.7141823768615723, step time: 19.936323165893555ms\r\n",,terminal_output +21122,14518617,"TERMINAL",0,0,"Step 114, loss: 3.724351406097412, step time: 19.372224807739258ms\r\n",,terminal_output +21123,14518724,"TERMINAL",0,0,"Step 115, loss: 3.7540664672851562, step time: 19.780635833740234ms\r\n",,terminal_output +21124,14518786,"TERMINAL",0,0,"Step 116, loss: 3.78991961479187, step time: 19.902706146240234ms\r\n",,terminal_output +21125,14518881,"TERMINAL",0,0,"Step 117, loss: 3.6018004417419434, step time: 19.509077072143555ms\r\n",,terminal_output +21126,14518953,"TERMINAL",0,0,"Step 118, loss: 3.5994815826416016, step time: 19.616365432739258ms\r\n",,terminal_output +21127,14519063,"TERMINAL",0,0,"Step 119, loss: 3.686807155609131, step time: 19.876956939697266ms\r\n",,terminal_output +21128,14519126,"TERMINAL",0,0,"Step 120, loss: 3.5096230506896973, step time: 19.485950469970703ms\r\n",,terminal_output +21129,14519236,"TERMINAL",0,0,"Step 121, loss: 3.6908254623413086, step time: 19.542932510375977ms\r\n",,terminal_output +21130,14519298,"TERMINAL",0,0,"Step 122, loss: 3.781309127807617, step time: 19.77396011352539ms\r\n",,terminal_output +21131,14519408,"TERMINAL",0,0,"Step 123, loss: 3.531367301940918, step time: 19.47164535522461ms\r\n",,terminal_output +21132,14519473,"TERMINAL",0,0,"Step 124, loss: 3.5179569721221924, step time: 19.38629150390625ms\r\n",,terminal_output +21133,14519581,"TERMINAL",0,0,"Step 125, loss: 3.478496789932251, step time: 19.82736587524414ms\r\n",,terminal_output +21134,14519688,"TERMINAL",0,0,"Step 126, loss: 3.514430284500122, step time: 19.47951316833496ms\r\n",,terminal_output +21135,14519749,"TERMINAL",0,0,"Step 127, loss: 3.4955809116363525, step time: 19.649028778076172ms\r\n",,terminal_output +21136,14519844,"TERMINAL",0,0,"Step 128, loss: 3.446234703063965, step time: 19.797801971435547ms\r\n",,terminal_output +21137,14519952,"TERMINAL",0,0,"Step 129, loss: 3.6973416805267334, step time: 19.54174041748047ms\r\n",,terminal_output +21138,14520005,"TERMINAL",0,0,"Step 130, loss: 3.6640126705169678, step time: 19.47617530822754ms\r\n",,terminal_output +21139,14520112,"TERMINAL",0,0,"Step 131, loss: 3.501034736633301, step time: 20.380020141601562ms\r\n",,terminal_output +21140,14520175,"TERMINAL",0,0,"Step 132, loss: 3.516157865524292, step time: 19.69146728515625ms\r\n",,terminal_output +21141,14520280,"TERMINAL",0,0,"Step 133, loss: 3.7430944442749023, step time: 19.48380470275879ms\r\n",,terminal_output +21142,14520390,"TERMINAL",0,0,"Step 134, loss: 3.4835474491119385, step time: 19.95372772216797ms\r\n",,terminal_output +21143,14520455,"TERMINAL",0,0,"Step 135, loss: 3.4699172973632812, step time: 19.526958465576172ms\r\n",,terminal_output +21144,14520561,"TERMINAL",0,0,"Step 136, loss: 3.479644775390625, step time: 19.372224807739258ms\r\n",,terminal_output +21145,14520613,"TERMINAL",0,0,"Step 137, loss: 3.53615403175354, step time: 19.71602439880371ms\r\n",,terminal_output +21146,14520722,"TERMINAL",0,0,"Step 138, loss: 3.8550913333892822, step time: 19.304752349853516ms\r\n",,terminal_output +21147,14520788,"TERMINAL",0,0,"Step 139, loss: 3.3624913692474365, step time: 19.347667694091797ms\r\n",,terminal_output +21148,14520895,"TERMINAL",0,0,"Step 140, loss: 3.465573787689209, step time: 21.433353424072266ms\r\n",,terminal_output +21149,14521001,"TERMINAL",0,0,"Step 141, loss: 3.428746461868286, step time: 20.1566219329834ms\r\n",,terminal_output +21150,14521052,"TERMINAL",0,0,"Step 142, loss: 3.5771825313568115, step time: 19.93870735168457ms\r\n",,terminal_output +21151,14521415,"TERMINAL",0,0,"Step 143, loss: 3.382736921310425, step time: 357.6343059539795ms\r\n",,terminal_output +21152,14521535,"TERMINAL",0,0,"Step 144, loss: 3.4101648330688477, step time: 27.50253677368164ms\r\n",,terminal_output +21153,14521593,"TERMINAL",0,0,"Step 145, loss: 3.4205572605133057, step time: 22.214412689208984ms\r\n",,terminal_output +21154,14521703,"TERMINAL",0,0,"Step 146, loss: 3.42496657371521, step time: 20.747900009155273ms\r\n",,terminal_output +21155,14521769,"TERMINAL",0,0,"Step 147, loss: 3.43257212638855, step time: 19.914865493774414ms\r\n",,terminal_output +21156,14521877,"TERMINAL",0,0,"Step 148, loss: 3.3707263469696045, step time: 19.684791564941406ms\r\n",,terminal_output +21157,14521988,"TERMINAL",0,0,"Step 149, loss: 3.462343692779541, step time: 20.232677459716797ms\r\n",,terminal_output +21158,14522041,"TERMINAL",0,0,"Step 150, loss: 3.3532040119171143, step time: 19.68097686767578ms\r\n",,terminal_output +21159,14522146,"TERMINAL",0,0,"Step 151, loss: 3.258737564086914, step time: 19.76466178894043ms\r\n",,terminal_output +21160,14522209,"TERMINAL",0,0,"Step 152, loss: 3.2395389080047607, step time: 20.148754119873047ms\r\n",,terminal_output +21161,14522315,"TERMINAL",0,0,"Step 153, loss: 3.4569027423858643, step time: 19.83642578125ms\r\n",,terminal_output +21162,14522421,"TERMINAL",0,0,"Step 154, loss: 3.3230762481689453, step time: 19.599199295043945ms\r\n",,terminal_output +21163,14522474,"TERMINAL",0,0,"Step 155, loss: 3.480625867843628, step time: 20.158767700195312ms\r\n",,terminal_output +21164,14522568,"TERMINAL",0,0,"Step 156, loss: 3.3582355976104736, step time: 19.559383392333984ms\r\n",,terminal_output +21165,14522674,"TERMINAL",0,0,"Step 157, loss: 3.4689230918884277, step time: 20.049571990966797ms\r\n",,terminal_output +21166,14522738,"TERMINAL",0,0,"Step 158, loss: 3.3808789253234863, step time: 20.149946212768555ms\r\n",,terminal_output +21167,14522845,"TERMINAL",0,0,"Step 159, loss: 3.446992874145508, step time: 19.694089889526367ms\r\n",,terminal_output +21168,14522963,"TERMINAL",0,0,"Step 160, loss: 3.321545362472534, step time: 19.636154174804688ms\r\n",,terminal_output +21169,14523015,"TERMINAL",0,0,"Step 161, loss: 3.5419626235961914, step time: 20.145654678344727ms\r\n",,terminal_output +21170,14523121,"TERMINAL",0,0,"Step 162, loss: 3.2821097373962402, step time: 19.53291893005371ms\r\n",,terminal_output +21171,14523181,"TERMINAL",0,0,"Step 163, loss: 3.3535594940185547, step time: 19.54936981201172ms\r\n",,terminal_output +21172,14523290,"TERMINAL",0,0,"Step 164, loss: 3.2774722576141357, step time: 20.008087158203125ms\r\n",,terminal_output +21173,14523386,"TERMINAL",0,0,"Step 165, loss: 3.878875494003296, step time: 19.665002822875977ms\r\n",,terminal_output +21174,14523481,"TERMINAL",0,0,"Step 166, loss: 3.492800235748291, step time: 19.51742172241211ms\r\n",,terminal_output +21175,14523539,"TERMINAL",0,0,"Step 167, loss: 3.3742525577545166, step time: 20.04265785217285ms\r\n",,terminal_output +21176,14523742,"TERMINAL",0,0,"Step 168, loss: 3.3851799964904785, step time: 19.501924514770508ms\r\nStep 169, loss: 3.3242528438568115, step time: 19.498825073242188ms\r\n",,terminal_output +21177,14523893,"TERMINAL",0,0,"Step 170, loss: 3.285797357559204, step time: 20.1723575592041ms\r\n",,terminal_output +21178,14523979,"TERMINAL",0,0,"Step 171, loss: 3.2570879459381104, step time: 19.546985626220703ms\r\n",,terminal_output +21179,14524139,"TERMINAL",0,0,"Step 172, loss: 3.2583439350128174, step time: 22.69887924194336ms\r\nStep 173, loss: 3.629587411880493, step time: 20.778894424438477ms\r\n",,terminal_output +21180,14524227,"TERMINAL",0,0,"Step 174, loss: 3.166846990585327, step time: 20.26081085205078ms\r\n",,terminal_output +21181,14524293,"TERMINAL",0,0,"Step 175, loss: 3.2830944061279297, step time: 19.234180450439453ms\r\n",,terminal_output +21182,14524508,"TERMINAL",0,0,"Step 176, loss: 3.1662447452545166, step time: 19.551753997802734ms\r\nStep 177, loss: 3.2616310119628906, step time: 18.949031829833984ms\r\n",,terminal_output +21183,14524561,"TERMINAL",0,0,"Step 178, loss: 3.4413797855377197, step time: 18.903493881225586ms\r\n",,terminal_output +21184,14524714,"TERMINAL",0,0,"Step 179, loss: 3.241933584213257, step time: 19.38462257385254ms\r\n",,terminal_output +21185,14524765,"TERMINAL",0,0,"Step 180, loss: 3.0829503536224365, step time: 18.971681594848633ms\r\nStep 181, loss: 3.415151357650757, step time: 19.07634735107422ms\r\n",,terminal_output +21186,14524921,"TERMINAL",0,0,"Step 182, loss: 3.234758138656616, step time: 19.478321075439453ms\r\n",,terminal_output +21187,14524976,"TERMINAL",0,0,"Step 183, loss: 3.624000072479248, step time: 18.98813247680664ms\r\n",,terminal_output +21188,14525099,"TERMINAL",0,0,"Step 184, loss: 3.199063777923584, step time: 18.923044204711914ms\r\n",,terminal_output +21189,14525152,"TERMINAL",0,0,"Step 185, loss: 3.329582452774048, step time: 19.513607025146484ms\r\n",,terminal_output +21190,14525254,"TERMINAL",0,0,"Step 186, loss: 3.3113274574279785, step time: 18.934965133666992ms\r\n",,terminal_output +21191,14525395,"TERMINAL",0,0,"Step 187, loss: 3.7223410606384277, step time: 18.973350524902344ms\r\n",,terminal_output +21192,14525538,"TERMINAL",0,0,"Step 188, loss: 3.2517096996307373, step time: 19.465208053588867ms\r\n",,terminal_output +21193,14525539,"TERMINAL",0,0,"Step 189, loss: 3.13250470161438, step time: 18.964529037475586ms\r\n",,terminal_output +21194,14525594,"TERMINAL",0,0,"Step 190, loss: 3.2743091583251953, step time: 18.916845321655273ms\r\n",,terminal_output +21195,14525737,"TERMINAL",0,0,"Step 191, loss: 3.2310709953308105, step time: 19.319534301757812ms\r\n",,terminal_output +21196,14525790,"TERMINAL",0,0,"Step 192, loss: 3.190556287765503, step time: 18.85247230529785ms\r\n",,terminal_output +21197,14525901,"TERMINAL",0,0,"Step 193, loss: 3.1715917587280273, step time: 18.856048583984375ms\r\nStep 194, loss: 3.2694783210754395, step time: 19.3173885345459ms\r\n",,terminal_output +21198,14526012,"TERMINAL",0,0,"Step 195, loss: 3.159907817840576, step time: 18.964290618896484ms\r\n",,terminal_output +21199,14526075,"TERMINAL",0,0,"Step 196, loss: 3.3674581050872803, step time: 18.912792205810547ms\r\n",,terminal_output +21200,14526181,"TERMINAL",0,0,"Step 197, loss: 3.2050209045410156, step time: 19.39868927001953ms\r\n",,terminal_output +21201,14526290,"TERMINAL",0,0,"Step 198, loss: 3.15499210357666, step time: 18.918991088867188ms\r\n",,terminal_output +21202,14526343,"TERMINAL",0,0,"Step 199, loss: 3.144782781600952, step time: 18.87226104736328ms\r\n",,terminal_output +21203,14526449,"TERMINAL",0,0,"Step 200, loss: 3.4584927558898926, step time: 19.376039505004883ms\r\n",,terminal_output +21204,14526514,"TERMINAL",0,0,"Step 201, loss: 3.0923268795013428, step time: 19.06895637512207ms\r\n",,terminal_output +21205,14526622,"TERMINAL",0,0,"Step 202, loss: 3.2021472454071045, step time: 18.841028213500977ms\r\n",,terminal_output +21206,14526729,"TERMINAL",0,0,"Step 203, loss: 3.1412742137908936, step time: 19.36650276184082ms\r\n",,terminal_output +21207,14526792,"TERMINAL",0,0,"Step 204, loss: 3.153463363647461, step time: 18.8291072845459ms\r\n",,terminal_output +21208,14526899,"TERMINAL",0,0,"Step 205, loss: 3.1016154289245605, step time: 18.960952758789062ms\r\n",,terminal_output +21209,14526952,"TERMINAL",0,0,"Step 206, loss: 3.1695868968963623, step time: 19.406557083129883ms\r\n",,terminal_output +21210,14527059,"TERMINAL",0,0,"Step 207, loss: 3.1042888164520264, step time: 19.01841163635254ms\r\n",,terminal_output +21211,14527166,"TERMINAL",0,0,"Step 208, loss: 3.2059507369995117, step time: 18.794775009155273ms\r\n",,terminal_output +21212,14527218,"TERMINAL",0,0,"Step 209, loss: 3.204463005065918, step time: 19.28091049194336ms\r\n",,terminal_output +21213,14527325,"TERMINAL",0,0,"Step 210, loss: 3.2359414100646973, step time: 18.901824951171875ms\r\n",,terminal_output +21214,14527388,"TERMINAL",0,0,"Step 211, loss: 3.426626682281494, step time: 18.982648849487305ms\r\n",,terminal_output +21215,14527496,"TERMINAL",0,0,"Step 212, loss: 3.0842514038085938, step time: 19.32239532470703ms\r\n",,terminal_output +21216,14527651,"TERMINAL",0,0,"Step 213, loss: 3.051473379135132, step time: 18.98503303527832ms\r\nStep 214, loss: 3.1215505599975586, step time: 18.827199935913086ms\r\n",,terminal_output +21217,14527745,"TERMINAL",0,0,"Step 215, loss: 3.1927387714385986, step time: 19.715070724487305ms\r\n",,terminal_output +21218,14527853,"TERMINAL",0,0,"Step 216, loss: 3.0945656299591064, step time: 18.99099349975586ms\r\n",,terminal_output +21219,14527917,"TERMINAL",0,0,"Step 217, loss: 3.1178207397460938, step time: 18.9211368560791ms\r\n",,terminal_output +21220,14528026,"TERMINAL",0,0,"Step 218, loss: 3.0790059566497803, step time: 19.39702033996582ms\r\n",,terminal_output +21221,14528093,"TERMINAL",0,0,"Step 219, loss: 3.0688345432281494, step time: 18.948793411254883ms\r\n",,terminal_output +21222,14528202,"TERMINAL",0,0,"Step 220, loss: 3.1079182624816895, step time: 19.071578979492188ms\r\n",,terminal_output +21223,14528310,"TERMINAL",0,0,"Step 221, loss: 3.0797605514526367, step time: 19.492626190185547ms\r\n",,terminal_output +21224,14528364,"TERMINAL",0,0,"Step 222, loss: 3.1931850910186768, step time: 18.909454345703125ms\r\n",,terminal_output +21225,14528471,"TERMINAL",0,0,"Step 223, loss: 2.9837679862976074, step time: 18.981456756591797ms\r\n",,terminal_output +21226,14528532,"TERMINAL",0,0,"Step 224, loss: 3.117368698120117, step time: 19.36793327331543ms\r\n",,terminal_output +21227,14528631,"TERMINAL",0,0,"Step 225, loss: 3.1119468212127686, step time: 19.19245719909668ms\r\n",,terminal_output +21228,14528739,"TERMINAL",0,0,"Step 226, loss: 2.9864425659179688, step time: 18.94378662109375ms\r\n",,terminal_output +21229,14528800,"TERMINAL",0,0,"Step 227, loss: 3.088259696960449, step time: 19.47498321533203ms\r\n",,terminal_output +21230,14528911,"TERMINAL",0,0,"Step 228, loss: 2.9447696208953857, step time: 18.90110969543457ms\r\n",,terminal_output +21231,14528969,"TERMINAL",0,0,"Step 229, loss: 3.0166373252868652, step time: 19.031047821044922ms\r\n",,terminal_output +21232,14529080,"TERMINAL",0,0,"Step 230, loss: 3.06498122215271, step time: 25.15864372253418ms\r\n",,terminal_output +21233,14529199,"TERMINAL",0,0,"Step 231, loss: 3.2787036895751953, step time: 22.598981857299805ms\r\n",,terminal_output +21234,14529252,"TERMINAL",0,0,"Step 232, loss: 3.022174119949341, step time: 20.406246185302734ms\r\n",,terminal_output +21235,14529359,"TERMINAL",0,0,"Step 233, loss: 3.0815136432647705, step time: 20.204544067382812ms\r\n",,terminal_output +21236,14529426,"TERMINAL",0,0,"Step 234, loss: 2.9855446815490723, step time: 19.78445053100586ms\r\n",,terminal_output +21237,14529538,"TERMINAL",0,0,"Step 235, loss: 2.98958683013916, step time: 20.229339599609375ms\r\n",,terminal_output +21238,14529603,"TERMINAL",0,0,"Step 236, loss: 2.966106653213501, step time: 20.45464515686035ms\r\n",,terminal_output +21239,14529714,"TERMINAL",0,0,"Step 237, loss: 3.2559773921966553, step time: 19.865989685058594ms\r\n",,terminal_output +21240,14529771,"TERMINAL",0,0,"Step 238, loss: 3.0881340503692627, step time: 19.774675369262695ms\r\n",,terminal_output +21241,14529882,"TERMINAL",0,0,"Step 239, loss: 2.9517555236816406, step time: 20.287752151489258ms\r\n",,terminal_output +21242,14529942,"TERMINAL",0,0,"Step 240, loss: 2.9688174724578857, step time: 20.036935806274414ms\r\n",,terminal_output +21243,14530055,"TERMINAL",0,0,"Step 241, loss: 2.97637939453125, step time: 19.863367080688477ms\r\n",,terminal_output +21244,14530119,"TERMINAL",0,0,"Step 242, loss: 3.0114808082580566, step time: 20.2333927154541ms\r\n",,terminal_output +21245,14530228,"TERMINAL",0,0,"Step 243, loss: 2.946291208267212, step time: 19.832372665405273ms\r\n",,terminal_output +21246,14530290,"TERMINAL",0,0,"Step 244, loss: 2.9340314865112305, step time: 19.715309143066406ms\r\n",,terminal_output +21247,14530385,"TERMINAL",0,0,"Step 245, loss: 2.932636260986328, step time: 20.223379135131836ms\r\n",,terminal_output +21248,14530493,"TERMINAL",0,0,"Step 246, loss: 3.1271371841430664, step time: 19.597530364990234ms\r\n",,terminal_output +21249,14530555,"TERMINAL",0,0,"Step 247, loss: 2.943358898162842, step time: 19.63329315185547ms\r\n",,terminal_output +21250,14530659,"TERMINAL",0,0,"Step 248, loss: 3.0235848426818848, step time: 20.133018493652344ms\r\n",,terminal_output +21251,14530764,"TERMINAL",0,0,"Step 249, loss: 2.9425172805786133, step time: 19.690513610839844ms\r\n",,terminal_output +21252,14530830,"TERMINAL",0,0,"Step 250, loss: 3.0146865844726562, step time: 19.14238929748535ms\r\n",,terminal_output +21253,14530938,"TERMINAL",0,0,"Step 251, loss: 3.0928330421447754, step time: 19.530057907104492ms\r\n",,terminal_output +21254,14530994,"TERMINAL",0,0,"Step 252, loss: 2.9449009895324707, step time: 19.249916076660156ms\r\n",,terminal_output +21255,14531104,"TERMINAL",0,0,"Step 253, loss: 2.887584686279297, step time: 19.244670867919922ms\r\n",,terminal_output +21256,14531171,"TERMINAL",0,0,"Step 254, loss: 3.1832656860351562, step time: 19.44565773010254ms\r\n",,terminal_output +21257,14531281,"TERMINAL",0,0,"Step 255, loss: 2.97038197517395, step time: 19.083499908447266ms\r\n",,terminal_output +21258,14531345,"TERMINAL",0,0,"Step 256, loss: 3.0479936599731445, step time: 19.066810607910156ms\r\n",,terminal_output +21259,14531451,"TERMINAL",0,0,"Step 257, loss: 3.052544593811035, step time: 19.505023956298828ms\r\n",,terminal_output +21260,14531558,"TERMINAL",0,0,"Step 258, loss: 2.862173318862915, step time: 19.029617309570312ms\r\n",,terminal_output +21261,14531615,"TERMINAL",0,0,"Step 259, loss: 2.852703332901001, step time: 19.1195011138916ms\r\n",,terminal_output +21262,14531723,"TERMINAL",0,0,"Step 260, loss: 2.927851676940918, step time: 19.533634185791016ms\r\n",,terminal_output +21263,14531784,"TERMINAL",0,0,"Step 261, loss: 3.2014708518981934, step time: 19.169330596923828ms\r\n",,terminal_output +21264,14531929,"TERMINAL",0,0,"Step 262, loss: 3.3060216903686523, step time: 19.086837768554688ms\r\n",,terminal_output +21265,14531959,"TERMINAL",0,0,"Step 263, loss: 2.9030377864837646, step time: 19.478321075439453ms\r\n",,terminal_output +21266,14532069,"TERMINAL",0,0,"Step 264, loss: 2.949582815170288, step time: 24.168968200683594ms\r\n",,terminal_output +21267,14532136,"TERMINAL",0,0,"Step 265, loss: 2.9040286540985107, step time: 21.19898796081543ms\r\n",,terminal_output +21268,14532245,"TERMINAL",0,0,"Step 266, loss: 3.014430522918701, step time: 19.659996032714844ms\r\n",,terminal_output +21269,14532352,"TERMINAL",0,0,"Step 267, loss: 3.256373643875122, step time: 19.121170043945312ms\r\n",,terminal_output +21270,14532403,"TERMINAL",0,0,"Step 268, loss: 2.8812174797058105, step time: 19.00339126586914ms\r\n",,terminal_output +21271,14532510,"TERMINAL",0,0,"Step 269, loss: 2.8626725673675537, step time: 19.518375396728516ms\r\n",,terminal_output +21272,14532613,"TERMINAL",0,0,"Step 270, loss: 2.837279796600342, step time: 19.028425216674805ms\r\n",,terminal_output +21273,14532662,"TERMINAL",0,0,"Step 271, loss: 2.848116874694824, step time: 19.092082977294922ms\r\n",,terminal_output +21274,14532768,"TERMINAL",0,0,"Step 272, loss: 2.936079502105713, step time: 19.68216896057129ms\r\n",,terminal_output +21275,14532877,"TERMINAL",0,0,"Step 273, loss: 2.858917474746704, step time: 19.05083656311035ms\r\n",,terminal_output +21276,14532929,"TERMINAL",0,0,"Step 274, loss: 2.848142147064209, step time: 19.09613609313965ms\r\n",,terminal_output +21277,14533038,"TERMINAL",0,0,"Step 275, loss: 2.856344699859619, step time: 19.598960876464844ms\r\n",,terminal_output +21278,14533103,"TERMINAL",0,0,"Step 276, loss: 3.118135690689087, step time: 19.010305404663086ms\r\n",,terminal_output +21279,14533211,"TERMINAL",0,0,"Step 277, loss: 3.1649749279022217, step time: 19.074678421020508ms\r\n",,terminal_output +21280,14533280,"TERMINAL",0,0,"Step 278, loss: 2.855476140975952, step time: 19.54054832458496ms\r\n",,terminal_output +21281,14533385,"TERMINAL",0,0,"Step 279, loss: 3.0817551612854004, step time: 19.059181213378906ms\r\n",,terminal_output +21282,14533491,"TERMINAL",0,0,"Step 280, loss: 2.9797885417938232, step time: 19.059181213378906ms\r\n",,terminal_output +21283,14533552,"TERMINAL",0,0,"Step 281, loss: 2.8481009006500244, step time: 19.576549530029297ms\r\n",,terminal_output +21284,14533628,"TERMINAL",0,0,"Step 282, loss: 3.00461745262146, step time: 19.133567810058594ms\r\n",,terminal_output +21285,14533734,"TERMINAL",0,0,"Step 283, loss: 2.846144199371338, step time: 19.11187171936035ms\r\n",,terminal_output +21286,14534018,"TERMINAL",0,0,"Step 284, loss: 3.515678644180298, step time: 299.391508102417ms\r\n",,terminal_output +21287,14534126,"TERMINAL",0,0,"Step 285, loss: 2.849332332611084, step time: 26.95298194885254ms\r\n",,terminal_output +21288,14534234,"TERMINAL",0,0,"Step 286, loss: 2.9443717002868652, step time: 22.092819213867188ms\r\n",,terminal_output +21289,14534292,"TERMINAL",0,0,"Step 287, loss: 2.822627305984497, step time: 20.487070083618164ms\r\n",,terminal_output +21290,14534401,"TERMINAL",0,0,"Step 288, loss: 2.933814525604248, step time: 19.491910934448242ms\r\n",,terminal_output +21291,14534463,"TERMINAL",0,0,"Step 289, loss: 3.0248827934265137, step time: 19.345998764038086ms\r\n",,terminal_output +21292,14534584,"TERMINAL",0,0,"Step 290, loss: 2.8222556114196777, step time: 19.841909408569336ms\r\n",,terminal_output +21293,14534641,"TERMINAL",0,0,"Step 291, loss: 2.828418731689453, step time: 19.306182861328125ms\r\n",,terminal_output +21294,14534751,"TERMINAL",0,0,"Step 292, loss: 2.9555318355560303, step time: 19.13619041442871ms\r\n",,terminal_output +21295,14534814,"TERMINAL",0,0,"Step 293, loss: 3.0344955921173096, step time: 19.700288772583008ms\r\n",,terminal_output +21296,14534924,"TERMINAL",0,0,"Step 294, loss: 2.913677930831909, step time: 18.57781410217285ms\r\n",,terminal_output +21297,14535033,"TERMINAL",0,0,"Step 295, loss: 2.822075366973877, step time: 18.782615661621094ms\r\n",,terminal_output +21298,14535088,"TERMINAL",0,0,"Step 296, loss: 3.0923893451690674, step time: 19.13928985595703ms\r\n",,terminal_output +21299,14535195,"TERMINAL",0,0,"Step 297, loss: 2.822324752807617, step time: 18.538951873779297ms\r\n",,terminal_output +21300,14535260,"TERMINAL",0,0,"Step 298, loss: 2.804050922393799, step time: 18.545150756835938ms\r\n",,terminal_output +21301,14535368,"TERMINAL",0,0,"Step 299, loss: 2.7627158164978027, step time: 19.142866134643555ms\r\n",,terminal_output +21302,14535428,"TERMINAL",0,0,"Step 300, loss: 2.8021817207336426, step time: 18.65077018737793ms\r\n",,terminal_output +21303,14535539,"TERMINAL",0,0,"Step 301, loss: 2.854158639907837, step time: 18.414735794067383ms\r\n",,terminal_output +21304,14535606,"TERMINAL",0,0,"Step 302, loss: 2.806255578994751, step time: 19.028425216674805ms\r\n",,terminal_output +21305,14535714,"TERMINAL",0,0,"Step 303, loss: 2.8452303409576416, step time: 18.529176712036133ms\r\n",,terminal_output +21306,14535776,"TERMINAL",0,0,"Step 304, loss: 2.8479385375976562, step time: 18.48292350769043ms\r\n",,terminal_output +21307,14535887,"TERMINAL",0,0,"Step 305, loss: 2.950176239013672, step time: 19.017696380615234ms\r\n",,terminal_output +21308,14535953,"TERMINAL",0,0,"Step 306, loss: 2.7230873107910156, step time: 18.210172653198242ms\r\n",,terminal_output +21309,14536064,"TERMINAL",0,0,"Step 307, loss: 2.9250524044036865, step time: 18.36705207824707ms\r\n",,terminal_output +21310,14536124,"TERMINAL",0,0,"Step 308, loss: 2.7267661094665527, step time: 18.503904342651367ms\r\n",,terminal_output +21311,14536234,"TERMINAL",0,0,"Step 309, loss: 2.897493600845337, step time: 18.118619918823242ms\r\n",,terminal_output +21312,14536299,"TERMINAL",0,0,"Step 310, loss: 2.675137758255005, step time: 18.072843551635742ms\r\n",,terminal_output +21313,14536411,"TERMINAL",0,0,"Step 311, loss: 2.6126487255096436, step time: 18.630027770996094ms\r\n",,terminal_output +21314,14536469,"TERMINAL",0,0,"Step 312, loss: 2.6799376010894775, step time: 18.070459365844727ms\r\n",,terminal_output +21315,14536585,"TERMINAL",0,0,"Step 313, loss: 2.9781301021575928, step time: 18.766403198242188ms\r\n",,terminal_output +21316,14536641,"TERMINAL",0,0,"Step 314, loss: 3.1536200046539307, step time: 18.450260162353516ms\r\n",,terminal_output +21317,14536746,"TERMINAL",0,0,"Step 315, loss: 2.8802921772003174, step time: 18.255233764648438ms\r\n",,terminal_output +21318,14536851,"TERMINAL",0,0,"Step 316, loss: 2.8315300941467285, step time: 18.020153045654297ms\r\n",,terminal_output +21319,14536902,"TERMINAL",0,0,"Step 317, loss: 2.7534186840057373, step time: 19.344091415405273ms\r\n",,terminal_output +21320,14537012,"TERMINAL",0,0,"Step 318, loss: 2.9054622650146484, step time: 17.81916618347168ms\r\n",,terminal_output +21321,14537078,"TERMINAL",0,0,"Step 319, loss: 2.758765459060669, step time: 18.01776885986328ms\r\n",,terminal_output +21322,14537185,"TERMINAL",0,0,"Step 320, loss: 3.1916749477386475, step time: 18.433570861816406ms\r\n",,terminal_output +21323,14537253,"TERMINAL",0,0,"Step 321, loss: 2.824740171432495, step time: 17.98224449157715ms\r\n",,terminal_output +21324,14537360,"TERMINAL",0,0,"Step 322, loss: 2.665999412536621, step time: 18.103361129760742ms\r\n",,terminal_output +21325,14537421,"TERMINAL",0,0,"Step 323, loss: 2.6740710735321045, step time: 19.37556266784668ms\r\n",,terminal_output +21326,14537531,"TERMINAL",0,0,"Step 324, loss: 2.7510807514190674, step time: 17.877817153930664ms\r\n",,terminal_output +21327,14537593,"TERMINAL",0,0,"Step 325, loss: 2.6769182682037354, step time: 17.98081398010254ms\r\n",,terminal_output +21328,14537702,"TERMINAL",0,0,"Step 326, loss: 3.3431942462921143, step time: 18.444538116455078ms\r\n",,terminal_output +21329,14537810,"TERMINAL",0,0,"Step 327, loss: 2.68115234375, step time: 18.018722534179688ms\r\n",,terminal_output +21330,14537861,"TERMINAL",0,0,"Step 328, loss: 2.6503546237945557, step time: 17.827749252319336ms\r\n",,terminal_output +21331,14537967,"TERMINAL",0,0,"Step 329, loss: 2.879232406616211, step time: 18.464326858520508ms\r\n",,terminal_output +21332,14538028,"TERMINAL",0,0,"Step 330, loss: 3.0551810264587402, step time: 17.810344696044922ms\r\n",,terminal_output +21333,14538135,"TERMINAL",0,0,"Step 331, loss: 2.681903839111328, step time: 18.03135871887207ms\r\n",,terminal_output +21334,14538242,"TERMINAL",0,0,"Step 332, loss: 2.686215877532959, step time: 18.757104873657227ms\r\n",,terminal_output +21335,14538294,"TERMINAL",0,0,"Step 333, loss: 2.75303053855896, step time: 17.907142639160156ms\r\n",,terminal_output +21336,14538398,"TERMINAL",0,0,"Step 334, loss: 2.7349984645843506, step time: 17.869949340820312ms\r\n",,terminal_output +21337,14538462,"TERMINAL",0,0,"Step 335, loss: 2.808271884918213, step time: 18.632173538208008ms\r\n",,terminal_output +21338,14538568,"TERMINAL",0,0,"Step 336, loss: 2.648538589477539, step time: 17.94266700744629ms\r\n",,terminal_output +21339,14538648,"TERMINAL",0,0,"Step 337, loss: 2.978921413421631, step time: 18.751859664916992ms\r\n",,terminal_output +21340,14538754,"TERMINAL",0,0,"Step 338, loss: 2.6258392333984375, step time: 18.589258193969727ms\r\n",,terminal_output +21341,14538821,"TERMINAL",0,0,"Step 339, loss: 2.7346251010894775, step time: 18.76974105834961ms\r\n",,terminal_output +21342,14538933,"TERMINAL",0,0,"Step 340, loss: 2.933403968811035, step time: 18.81885528564453ms\r\n",,terminal_output +21343,14538985,"TERMINAL",0,0,"Step 341, loss: 2.721266746520996, step time: 19.419193267822266ms\r\n",,terminal_output +21344,14539078,"TERMINAL",0,0,"Step 342, loss: 2.6001882553100586, step time: 18.645286560058594ms\r\n",,terminal_output +21345,14539187,"TERMINAL",0,0,"Step 343, loss: 2.742321014404297, step time: 18.311262130737305ms\r\n",,terminal_output +21346,14539245,"TERMINAL",0,0,"Step 344, loss: 2.665865421295166, step time: 18.585681915283203ms\r\n",,terminal_output +21347,14539355,"TERMINAL",0,0,"Step 345, loss: 2.835319757461548, step time: 18.91636848449707ms\r\n",,terminal_output +21348,14539461,"TERMINAL",0,0,"Step 346, loss: 2.987732172012329, step time: 18.840789794921875ms\r\n",,terminal_output +21349,14539513,"TERMINAL",0,0,"Step 347, loss: 2.583937168121338, step time: 19.0122127532959ms\r\n",,terminal_output +21350,14539608,"TERMINAL",0,0,"Step 348, loss: 2.7929160594940186, step time: 17.99154281616211ms\r\n",,terminal_output +21351,14539714,"TERMINAL",0,0,"Step 349, loss: 2.728228807449341, step time: 18.084287643432617ms\r\n",,terminal_output +21352,14539785,"TERMINAL",0,0,"Step 350, loss: 2.671706438064575, step time: 18.471717834472656ms\r\n",,terminal_output +21353,14539892,"TERMINAL",0,0,"Step 351, loss: 2.642350912094116, step time: 18.39470863342285ms\r\n",,terminal_output +21354,14539944,"TERMINAL",0,0,"Step 352, loss: 2.680935859680176, step time: 18.908262252807617ms\r\n",,terminal_output +21355,14540050,"TERMINAL",0,0,"Step 353, loss: 2.7113265991210938, step time: 18.715620040893555ms\r\n",,terminal_output +21356,14540113,"TERMINAL",0,0,"Step 354, loss: 2.590346336364746, step time: 18.117427825927734ms\r\n",,terminal_output +21357,14540218,"TERMINAL",0,0,"Step 355, loss: 2.991598129272461, step time: 18.639087677001953ms\r\n",,terminal_output +21358,14540326,"TERMINAL",0,0,"Step 356, loss: 2.833113670349121, step time: 18.60809326171875ms\r\n",,terminal_output +21359,14540386,"TERMINAL",0,0,"Step 357, loss: 2.6451759338378906, step time: 18.19300651550293ms\r\n",,terminal_output +21360,14540496,"TERMINAL",0,0,"Step 358, loss: 2.8143765926361084, step time: 18.836021423339844ms\r\n",,terminal_output +21361,14540550,"TERMINAL",0,0,"Step 359, loss: 2.7220304012298584, step time: 19.41704750061035ms\r\n",,terminal_output +21362,14540655,"TERMINAL",0,0,"Step 360, loss: 2.8275415897369385, step time: 18.02682876586914ms\r\n",,terminal_output +21363,14540762,"TERMINAL",0,0,"Step 361, loss: 2.669779062271118, step time: 18.5549259185791ms\r\n",,terminal_output +21364,14540813,"TERMINAL",0,0,"Step 362, loss: 2.623091220855713, step time: 19.00792121887207ms\r\n",,terminal_output +21365,14540920,"TERMINAL",0,0,"Step 363, loss: 2.722055673599243, step time: 17.847776412963867ms\r\n",,terminal_output +21366,14540986,"TERMINAL",0,0,"Step 364, loss: 2.583688974380493, step time: 17.84491539001465ms\r\n",,terminal_output +21367,14541082,"TERMINAL",0,0,"Step 365, loss: 2.521902322769165, step time: 18.56851577758789ms\r\n",,terminal_output +21368,14541190,"TERMINAL",0,0,"Step 366, loss: 2.6951024532318115, step time: 17.8987979888916ms\r\n",,terminal_output +21369,14541247,"TERMINAL",0,0,"Step 367, loss: 2.709315776824951, step time: 17.857789993286133ms\r\n",,terminal_output +21370,14541355,"TERMINAL",0,0,"Step 368, loss: 2.5749893188476562, step time: 18.360137939453125ms\r\n",,terminal_output +21371,14541418,"TERMINAL",0,0,"Step 369, loss: 2.56538462638855, step time: 17.835617065429688ms\r\n",,terminal_output +21372,14541532,"TERMINAL",0,0,"Step 370, loss: 3.342763900756836, step time: 17.832040786743164ms\r\n",,terminal_output +21373,14541594,"TERMINAL",0,0,"Step 371, loss: 2.5963597297668457, step time: 18.42331886291504ms\r\n",,terminal_output +21374,14541702,"TERMINAL",0,0,"Step 372, loss: 2.8145127296447754, step time: 17.781734466552734ms\r\n",,terminal_output +21375,14541765,"TERMINAL",0,0,"Step 373, loss: 2.908660411834717, step time: 17.762184143066406ms\r\n",,terminal_output +21376,14541915,"TERMINAL",0,0,"Step 374, loss: 3.08823823928833, step time: 18.35179328918457ms\r\n",,terminal_output +21377,14541966,"TERMINAL",0,0,"Step 375, loss: 3.21951961517334, step time: 17.782926559448242ms\r\n",,terminal_output +21378,14542058,"TERMINAL",0,0,"Step 376, loss: 2.863464832305908, step time: 17.8525447845459ms\r\n",,terminal_output +21379,14542111,"TERMINAL",0,0,"Step 377, loss: 2.7087481021881104, step time: 18.37754249572754ms\r\n",,terminal_output +21380,14542203,"TERMINAL",0,0,"Step 378, loss: 2.668276071548462, step time: 17.847299575805664ms\r\n",,terminal_output +21381,14542315,"TERMINAL",0,0,"Step 379, loss: 2.673659086227417, step time: 17.88496971130371ms\r\n",,terminal_output +21382,14542370,"TERMINAL",0,0,"Step 380, loss: 2.647724151611328, step time: 18.444299697875977ms\r\n",,terminal_output +21383,14542462,"TERMINAL",0,0,"Step 381, loss: 3.0266716480255127, step time: 17.859220504760742ms\r\n",,terminal_output +21384,14542568,"TERMINAL",0,0,"Step 382, loss: 2.57649302482605, step time: 17.785072326660156ms\r\n",,terminal_output +21385,14542637,"TERMINAL",0,0,"Step 383, loss: 3.0395827293395996, step time: 18.40519905090332ms\r\n",,terminal_output +21386,14542769,"TERMINAL",0,0,"Step 384, loss: 2.687225818634033, step time: 17.912864685058594ms\r\n",,terminal_output +21387,14542822,"TERMINAL",0,0,"Step 385, loss: 2.6989552974700928, step time: 17.947912216186523ms\r\n",,terminal_output +21388,14542931,"TERMINAL",0,0,"Step 386, loss: 2.671882390975952, step time: 18.38231086730957ms\r\n",,terminal_output +21389,14542992,"TERMINAL",0,0,"Step 387, loss: 2.9720492362976074, step time: 17.896175384521484ms\r\n",,terminal_output +21390,14543102,"TERMINAL",0,0,"Step 388, loss: 2.6439080238342285, step time: 17.746686935424805ms\r\n",,terminal_output +21391,14543154,"TERMINAL",0,0,"Step 389, loss: 2.7584311962127686, step time: 18.516063690185547ms\r\n",,terminal_output +21392,14543262,"TERMINAL",0,0,"Step 390, loss: 2.728781223297119, step time: 17.781734466552734ms\r\n",,terminal_output +21393,14543326,"TERMINAL",0,0,"Step 391, loss: 2.534911870956421, step time: 17.836809158325195ms\r\n",,terminal_output +21394,14543435,"TERMINAL",0,0,"Step 392, loss: 2.586387872695923, step time: 18.398284912109375ms\r\n",,terminal_output +21395,14543497,"TERMINAL",0,0,"Step 393, loss: 2.631317377090454, step time: 17.838716506958008ms\r\n",,terminal_output +21396,14543612,"TERMINAL",0,0,"Step 394, loss: 2.482503890991211, step time: 17.82846450805664ms\r\n",,terminal_output +21397,14543670,"TERMINAL",0,0,"Step 395, loss: 2.819063663482666, step time: 18.39470863342285ms\r\n",,terminal_output +21398,14543779,"TERMINAL",0,0,"Step 396, loss: 2.539086103439331, step time: 17.813920974731445ms\r\n",,terminal_output +21399,14543888,"TERMINAL",0,0,"Step 397, loss: 2.4070370197296143, step time: 17.789363861083984ms\r\n",,terminal_output +21400,14543948,"TERMINAL",0,0,"Step 398, loss: 2.4994113445281982, step time: 22.93992042541504ms\r\n",,terminal_output +21401,14544060,"TERMINAL",0,0,"Step 399, loss: 2.5281686782836914, step time: 19.750118255615234ms\r\n",,terminal_output +21402,14544109,"TERMINAL",0,0,"Step 400, loss: 2.514575481414795, step time: 18.507957458496094ms\r\n",,terminal_output +21403,14544213,"TERMINAL",0,0,"Step 401, loss: 2.5058202743530273, step time: 18.589258193969727ms\r\n",,terminal_output +21404,14544324,"TERMINAL",0,0,"Step 402, loss: 2.5577971935272217, step time: 18.157482147216797ms\r\n",,terminal_output +21405,14544376,"TERMINAL",0,0,"Step 403, loss: 2.6223247051239014, step time: 17.943620681762695ms\r\n",,terminal_output +21406,14544485,"TERMINAL",0,0,"Step 404, loss: 2.535443067550659, step time: 18.533706665039062ms\r\n",,terminal_output +21407,14544550,"TERMINAL",0,0,"Step 405, loss: 2.6920113563537598, step time: 17.960309982299805ms\r\n",,terminal_output +21408,14544626,"TERMINAL",0,0,"Step 406, loss: 2.78810715675354, step time: 17.79651641845703ms\r\n",,terminal_output +21409,14544736,"TERMINAL",0,0,"Step 407, loss: 2.5241777896881104, step time: 18.40949058532715ms\r\n",,terminal_output +21410,14544799,"TERMINAL",0,0,"Step 408, loss: 2.492311716079712, step time: 17.79460906982422ms\r\n",,terminal_output +21411,14544910,"TERMINAL",0,0,"Step 409, loss: 2.5717761516571045, step time: 17.79937744140625ms\r\n",,terminal_output +21412,14544979,"TERMINAL",0,0,"Step 410, loss: 2.4727351665496826, step time: 19.83928680419922ms\r\n",,terminal_output +21413,14545368,"TERMINAL",0,0,"Step 411, loss: 3.034705400466919, step time: 297.8391647338867ms\r\nStep 412, loss: 2.532329797744751, step time: 25.702476501464844ms\r\n",,terminal_output +21414,14545476,"TERMINAL",0,0,"Step 413, loss: 3.2210912704467773, step time: 20.366430282592773ms\r\n",,terminal_output +21415,14545534,"TERMINAL",0,0,"Step 414, loss: 2.547912359237671, step time: 18.82147789001465ms\r\n",,terminal_output +21416,14545683,"TERMINAL",0,0,"Step 415, loss: 2.496286153793335, step time: 18.26310157775879ms\r\n",,terminal_output +21417,14545794,"TERMINAL",0,0,"Step 416, loss: 2.552553653717041, step time: 18.04041862487793ms\r\nStep 417, loss: 2.8966972827911377, step time: 18.29075813293457ms\r\n",,terminal_output +21418,14545908,"TERMINAL",0,0,"Step 418, loss: 2.7767844200134277, step time: 19.36173439025879ms\r\n",,terminal_output +21419,14545971,"TERMINAL",0,0,"Step 419, loss: 2.5043349266052246, step time: 19.002199172973633ms\r\n",,terminal_output +21420,14546081,"TERMINAL",0,0,"Step 420, loss: 2.551682472229004, step time: 18.729209899902344ms\r\n",,terminal_output +21421,14546147,"TERMINAL",0,0,"Step 421, loss: 2.642526626586914, step time: 18.17631721496582ms\r\n",,terminal_output +21422,14546262,"TERMINAL",0,0,"Step 422, loss: 2.6801717281341553, step time: 18.08452606201172ms\r\n",,terminal_output +21423,14546315,"TERMINAL",0,0,"Step 423, loss: 2.7681851387023926, step time: 18.374919891357422ms\r\n",,terminal_output +21424,14546427,"TERMINAL",0,0,"Step 424, loss: 2.6522769927978516, step time: 18.001317977905273ms\r\n",,terminal_output +21425,14546484,"TERMINAL",0,0,"Step 425, loss: 2.571186065673828, step time: 18.15342903137207ms\r\n",,terminal_output +21426,14546600,"TERMINAL",0,0,"Step 426, loss: 2.7599332332611084, step time: 18.262863159179688ms\r\n",,terminal_output +21427,14546664,"TERMINAL",0,0,"Step 427, loss: 2.5876305103302, step time: 18.473148345947266ms\r\n",,terminal_output +21428,14546771,"TERMINAL",0,0,"Step 428, loss: 2.430723190307617, step time: 20.029067993164062ms\r\n",,terminal_output +21429,14546835,"TERMINAL",0,0,"Step 429, loss: 2.433932065963745, step time: 18.971920013427734ms\r\n",,terminal_output +21430,14546941,"TERMINAL",0,0,"Step 430, loss: 2.4554836750030518, step time: 18.31507682800293ms\r\n",,terminal_output +21431,14547004,"TERMINAL",0,0,"Step 431, loss: 2.4424400329589844, step time: 18.85676383972168ms\r\n",,terminal_output +21432,14547112,"TERMINAL",0,0,"Step 432, loss: 2.3700995445251465, step time: 18.338680267333984ms\r\n",,terminal_output +21433,14547219,"TERMINAL",0,0,"Step 433, loss: 2.5248730182647705, step time: 17.957210540771484ms\r\n",,terminal_output +21434,14547270,"TERMINAL",0,0,"Step 434, loss: 2.3824124336242676, step time: 17.954111099243164ms\r\n",,terminal_output +21435,14547382,"TERMINAL",0,0,"Step 435, loss: 2.465885877609253, step time: 18.22662353515625ms\r\n",,terminal_output +21436,14547443,"TERMINAL",0,0,"Step 436, loss: 2.696932554244995, step time: 17.90642738342285ms\r\n",,terminal_output +21437,14547559,"TERMINAL",0,0,"Step 437, loss: 2.400345802307129, step time: 17.961740493774414ms\r\n",,terminal_output +21438,14547613,"TERMINAL",0,0,"Step 438, loss: 2.362680673599243, step time: 18.14723014831543ms\r\n",,terminal_output +21439,14547724,"TERMINAL",0,0,"Step 439, loss: 2.4565179347991943, step time: 17.980575561523438ms\r\n",,terminal_output +21440,14547794,"TERMINAL",0,0,"Step 440, loss: 3.291703462600708, step time: 18.001794815063477ms\r\n",,terminal_output +21441,14547905,"TERMINAL",0,0,"Step 441, loss: 2.3341758251190186, step time: 18.375873565673828ms\r\n",,terminal_output +21442,14547957,"TERMINAL",0,0,"Step 442, loss: 2.482790946960449, step time: 17.93217658996582ms\r\n",,terminal_output +21443,14548069,"TERMINAL",0,0,"Step 443, loss: 3.0422513484954834, step time: 17.943382263183594ms\r\n",,terminal_output +21444,14548133,"TERMINAL",0,0,"Step 444, loss: 2.5492539405822754, step time: 19.441843032836914ms\r\n",,terminal_output +21445,14548241,"TERMINAL",0,0,"Step 445, loss: 2.3984615802764893, step time: 17.961978912353516ms\r\n",,terminal_output +21446,14548351,"TERMINAL",0,0,"Step 446, loss: 2.4359395503997803, step time: 17.919540405273438ms\r\n",,terminal_output +21447,14548403,"TERMINAL",0,0,"Step 447, loss: 2.35347056388855, step time: 18.219709396362305ms\r\n",,terminal_output +21448,14548497,"TERMINAL",0,0,"Step 448, loss: 2.403672456741333, step time: 17.880678176879883ms\r\n",,terminal_output +21449,14548580,"TERMINAL",0,0,"Step 449, loss: 2.4292564392089844, step time: 17.899274826049805ms\r\n",,terminal_output +21450,14548659,"TERMINAL",0,0,"Step 450, loss: 2.3130345344543457, step time: 18.159151077270508ms\r\n",,terminal_output +21451,14548769,"TERMINAL",0,0,"Step 451, loss: 2.583115816116333, step time: 17.90595054626465ms\r\n",,terminal_output +21452,14548832,"TERMINAL",0,0,"Step 452, loss: 2.391223192214966, step time: 17.89712905883789ms\r\n",,terminal_output +21453,14548939,"TERMINAL",0,0,"Step 453, loss: 2.4011967182159424, step time: 18.282175064086914ms\r\n",,terminal_output +21454,14548994,"TERMINAL",0,0,"Step 454, loss: 2.6385581493377686, step time: 17.8372859954834ms\r\n",,terminal_output +21455,14549088,"TERMINAL",0,0,"Step 455, loss: 2.421907901763916, step time: 17.61007308959961ms\r\n",,terminal_output +21456,14549199,"TERMINAL",0,0,"Step 456, loss: 2.553825616836548, step time: 17.9440975189209ms\r\n",,terminal_output +21457,14549256,"TERMINAL",0,0,"Step 457, loss: 2.640059471130371, step time: 17.536163330078125ms\r\n",,terminal_output +21458,14549353,"TERMINAL",0,0,"Step 458, loss: 2.4675018787384033, step time: 17.437458038330078ms\r\n",,terminal_output +21459,14549464,"TERMINAL",0,0,"Step 459, loss: 2.303697347640991, step time: 17.919301986694336ms\r\n",,terminal_output +21460,14549519,"TERMINAL",0,0,"Step 460, loss: 2.347501039505005, step time: 17.494916915893555ms\r\n",,terminal_output +21461,14549625,"TERMINAL",0,0,"Step 461, loss: 2.510676383972168, step time: 17.55547523498535ms\r\n",,terminal_output +21462,14549733,"TERMINAL",0,0,"Step 462, loss: 2.385775089263916, step time: 17.807722091674805ms\r\n",,terminal_output +21463,14549795,"TERMINAL",0,0,"Step 463, loss: 2.367784023284912, step time: 17.537593841552734ms\r\n",,terminal_output +21464,14549860,"TERMINAL",0,0,"Step 464, loss: 2.353942632675171, step time: 17.525672912597656ms\r\n",,terminal_output +21465,14549971,"TERMINAL",0,0,"Step 465, loss: 2.561039686203003, step time: 17.876148223876953ms\r\n",,terminal_output +21466,14550032,"TERMINAL",0,0,"Step 466, loss: 2.3428738117218018, step time: 17.795801162719727ms\r\n",,terminal_output +21467,14550144,"TERMINAL",0,0,"Step 467, loss: 2.307762861251831, step time: 17.885208129882812ms\r\n",,terminal_output +21468,14550204,"TERMINAL",0,0,"Step 468, loss: 2.4365217685699463, step time: 18.079757690429688ms\r\n",,terminal_output +21469,14550311,"TERMINAL",0,0,"Step 469, loss: 2.459980010986328, step time: 17.607688903808594ms\r\n",,terminal_output +21470,14550380,"TERMINAL",0,0,"Step 470, loss: 2.340949058532715, step time: 17.954111099243164ms\r\n",,terminal_output +21471,14550485,"TERMINAL",0,0,"Step 471, loss: 2.265066385269165, step time: 18.20850372314453ms\r\n",,terminal_output +21472,14550548,"TERMINAL",0,0,"Step 472, loss: 2.439807415008545, step time: 17.764806747436523ms\r\n",,terminal_output +21473,14550655,"TERMINAL",0,0,"Step 473, loss: 2.29506778717041, step time: 17.807960510253906ms\r\n",,terminal_output +21474,14550719,"TERMINAL",0,0,"Step 474, loss: 2.2449443340301514, step time: 18.111705780029297ms\r\n",,terminal_output +21475,14550825,"TERMINAL",0,0,"Step 475, loss: 2.7778818607330322, step time: 19.32549476623535ms\r\n",,terminal_output +21476,14550936,"TERMINAL",0,0,"Step 476, loss: 2.490309715270996, step time: 18.438339233398438ms\r\n",,terminal_output +21477,14550989,"TERMINAL",0,0,"Step 477, loss: 2.2144081592559814, step time: 18.538713455200195ms\r\n",,terminal_output +21478,14551097,"TERMINAL",0,0,"Step 478, loss: 2.574019432067871, step time: 18.014907836914062ms\r\n",,terminal_output +21479,14551158,"TERMINAL",0,0,"Step 479, loss: 2.6253762245178223, step time: 17.970561981201172ms\r\n",,terminal_output +21480,14551265,"TERMINAL",0,0,"Step 480, loss: 2.341279983520508, step time: 18.15962791442871ms\r\n",,terminal_output +21481,14551330,"TERMINAL",0,0,"Step 481, loss: 2.6213572025299072, step time: 18.016576766967773ms\r\n",,terminal_output +21482,14551440,"TERMINAL",0,0,"Step 482, loss: 2.563610553741455, step time: 17.981529235839844ms\r\n",,terminal_output +21483,14551499,"TERMINAL",0,0,"Step 483, loss: 2.318735361099243, step time: 18.17178726196289ms\r\n",,terminal_output +21484,14551611,"TERMINAL",0,0,"Step 484, loss: 2.3892836570739746, step time: 17.923355102539062ms\r\n",,terminal_output +21485,14551710,"TERMINAL",0,0,"Step 485, loss: 2.608436107635498, step time: 17.53520965576172ms\r\n",,terminal_output +21486,14551772,"TERMINAL",0,0,"Step 486, loss: 2.466407060623169, step time: 17.72284507751465ms\r\n",,terminal_output +21487,14551885,"TERMINAL",0,0,"Step 487, loss: 2.284334897994995, step time: 17.54283905029297ms\r\n",,terminal_output +21488,14551936,"TERMINAL",0,0,"Step 488, loss: 2.224853277206421, step time: 17.443418502807617ms\r\n",,terminal_output +21489,14552053,"TERMINAL",0,0,"Step 489, loss: 2.43365216255188, step time: 17.73524284362793ms\r\n",,terminal_output +21490,14552107,"TERMINAL",0,0,"Step 490, loss: 2.5979390144348145, step time: 17.567157745361328ms\r\n",,terminal_output +21491,14552203,"TERMINAL",0,0,"Step 491, loss: 2.30374813079834, step time: 18.543243408203125ms\r\n",,terminal_output +21492,14552313,"TERMINAL",0,0,"Step 492, loss: 2.276710033416748, step time: 18.184423446655273ms\r\n",,terminal_output +21493,14552366,"TERMINAL",0,0,"Step 493, loss: 2.181220293045044, step time: 17.537832260131836ms\r\n",,terminal_output +21494,14552474,"TERMINAL",0,0,"Step 494, loss: 2.3230373859405518, step time: 17.489910125732422ms\r\n",,terminal_output +21495,14552536,"TERMINAL",0,0,"Step 495, loss: 2.341641664505005, step time: 17.82965660095215ms\r\n",,terminal_output +21496,14552622,"TERMINAL",0,0,"Step 496, loss: 2.266765832901001, step time: 17.45152473449707ms\r\n",,terminal_output +21497,14552731,"TERMINAL",0,0,"Step 497, loss: 2.4475629329681396, step time: 17.50040054321289ms\r\n",,terminal_output +21498,14552796,"TERMINAL",0,0,"Step 498, loss: 2.160003900527954, step time: 17.761945724487305ms\r\n",,terminal_output +21499,14552894,"TERMINAL",0,0,"Step 499, loss: 2.3211958408355713, step time: 17.581701278686523ms\r\n",,terminal_output +21500,14556528,"TERMINAL",0,0,"Step 500, loss: 2.2660505771636963, step time: 25.188922882080078ms\r\n",,terminal_output +21501,14556635,"TERMINAL",0,0,"Step 501, loss: 2.224977970123291, step time: 26.087045669555664ms\r\n",,terminal_output +21502,14556744,"TERMINAL",0,0,"Step 502, loss: 2.515143394470215, step time: 21.168231964111328ms\r\n",,terminal_output +21503,14556806,"TERMINAL",0,0,"Step 503, loss: 2.929034471511841, step time: 19.715309143066406ms\r\n",,terminal_output +21504,14556917,"TERMINAL",0,0,"Step 504, loss: 2.266451358795166, step time: 19.09017562866211ms\r\n",,terminal_output +21505,14556978,"TERMINAL",0,0,"Step 505, loss: 2.4017465114593506, step time: 20.101070404052734ms\r\n",,terminal_output +21506,14557086,"TERMINAL",0,0,"Step 506, loss: 2.1677989959716797, step time: 19.367456436157227ms\r\n",,terminal_output +21507,14557153,"TERMINAL",0,0,"Step 507, loss: 2.4220969676971436, step time: 18.29075813293457ms\r\n",,terminal_output +21508,14557265,"TERMINAL",0,0,"Step 508, loss: 2.4084203243255615, step time: 17.83585548400879ms\r\n",,terminal_output +21509,14557324,"TERMINAL",0,0,"Step 509, loss: 2.620168447494507, step time: 18.153905868530273ms\r\n",,terminal_output +21510,14557436,"TERMINAL",0,0,"Step 510, loss: 2.377239227294922, step time: 19.330978393554688ms\r\n",,terminal_output +21511,14557500,"TERMINAL",0,0,"Step 511, loss: 2.3611981868743896, step time: 18.941640853881836ms\r\n",,terminal_output +21512,14557611,"TERMINAL",0,0,"Step 512, loss: 2.2168338298797607, step time: 18.197059631347656ms\r\n",,terminal_output +21513,14557714,"TERMINAL",0,0,"Step 513, loss: 2.2057549953460693, step time: 17.913103103637695ms\r\n",,terminal_output +21514,14557778,"TERMINAL",0,0,"Step 514, loss: 2.2270169258117676, step time: 17.76409149169922ms\r\n",,terminal_output +21515,14557887,"TERMINAL",0,0,"Step 515, loss: 2.876786231994629, step time: 18.162965774536133ms\r\n",,terminal_output +21516,14557940,"TERMINAL",0,0,"Step 516, loss: 2.208085775375366, step time: 17.97628402709961ms\r\n",,terminal_output +21517,14558050,"TERMINAL",0,0,"Step 517, loss: 2.954848051071167, step time: 17.853975296020508ms\r\n",,terminal_output +21518,14558118,"TERMINAL",0,0,"Step 518, loss: 2.4315273761749268, step time: 23.186922073364258ms\r\n",,terminal_output +21519,14558225,"TERMINAL",0,0,"Step 519, loss: 2.2854597568511963, step time: 20.099401473999023ms\r\n",,terminal_output +21520,14558290,"TERMINAL",0,0,"Step 520, loss: 2.394923686981201, step time: 19.031047821044922ms\r\n",,terminal_output +21521,14558487,"TERMINAL",0,0,"Step 521, loss: 2.2604706287384033, step time: 18.535137176513672ms\r\nStep 522, loss: 2.246321201324463, step time: 18.025875091552734ms\r\n",,terminal_output +21522,14558540,"TERMINAL",0,0,"Step 523, loss: 2.239332437515259, step time: 18.02515983581543ms\r\n",,terminal_output +21523,14558645,"TERMINAL",0,0,"Step 524, loss: 2.3453595638275146, step time: 18.264055252075195ms\r\n",,terminal_output +21524,14558755,"TERMINAL",0,0,"Step 525, loss: 2.6227023601531982, step time: 17.866849899291992ms\r\n",,terminal_output +21525,14558818,"TERMINAL",0,0,"Step 526, loss: 2.6802186965942383, step time: 17.736196517944336ms\r\n",,terminal_output +21526,14558927,"TERMINAL",0,0,"Step 527, loss: 2.3012266159057617, step time: 17.917871475219727ms\r\n",,terminal_output +21527,14558980,"TERMINAL",0,0,"Step 528, loss: 2.212200880050659, step time: 17.94147491455078ms\r\n",,terminal_output +21528,14559090,"TERMINAL",0,0,"Step 529, loss: 2.272155284881592, step time: 23.045063018798828ms\r\n",,terminal_output +21529,14559185,"TERMINAL",0,0,"Step 530, loss: 2.3127553462982178, step time: 22.247791290283203ms\r\n",,terminal_output +21530,14559238,"TERMINAL",0,0,"Step 531, loss: 2.1619155406951904, step time: 20.63131332397461ms\r\n",,terminal_output +21531,14559347,"TERMINAL",0,0,"Step 532, loss: 2.4160664081573486, step time: 19.938945770263672ms\r\n",,terminal_output +21532,14559412,"TERMINAL",0,0,"Step 533, loss: 2.252814292907715, step time: 20.603656768798828ms\r\n",,terminal_output +21533,14559524,"TERMINAL",0,0,"Step 534, loss: 2.102902412414551, step time: 21.416425704956055ms\r\n",,terminal_output +21534,14559602,"TERMINAL",0,0,"Step 535, loss: 2.56292986869812, step time: 24.270296096801758ms\r\n",,terminal_output +21535,14559711,"TERMINAL",0,0,"Step 536, loss: 2.268681764602661, step time: 20.916223526000977ms\r\n",,terminal_output +21536,14559783,"TERMINAL",0,0,"Step 537, loss: 2.2670600414276123, step time: 18.689632415771484ms\r\n",,terminal_output +21537,14559848,"TERMINAL",0,0,"Step 538, loss: 2.2454612255096436, step time: 18.165111541748047ms\r\n",,terminal_output +21538,14559957,"TERMINAL",0,0,"Step 539, loss: 2.232544422149658, step time: 18.33057403564453ms\r\n",,terminal_output +21539,14560022,"TERMINAL",0,0,"Step 540, loss: 2.1917405128479004, step time: 18.27406883239746ms\r\n",,terminal_output +21540,14560133,"TERMINAL",0,0,"Step 541, loss: 2.301953077316284, step time: 18.044710159301758ms\r\n",,terminal_output +21541,14560195,"TERMINAL",0,0,"Step 542, loss: 2.181654691696167, step time: 18.49985122680664ms\r\n",,terminal_output +21542,14560303,"TERMINAL",0,0,"Step 543, loss: 2.1455459594726562, step time: 17.937421798706055ms\r\n",,terminal_output +21543,14560366,"TERMINAL",0,0,"Step 544, loss: 2.190117835998535, step time: 17.81916618347168ms\r\n",,terminal_output +21544,14560478,"TERMINAL",0,0,"Step 545, loss: 2.1959590911865234, step time: 18.2034969329834ms\r\n",,terminal_output +21545,14560542,"TERMINAL",0,0,"Step 546, loss: 2.143418788909912, step time: 18.105030059814453ms\r\n",,terminal_output +21546,14560626,"TERMINAL",0,0,"Step 547, loss: 2.6775014400482178, step time: 17.917156219482422ms\r\n",,terminal_output +21547,14560738,"TERMINAL",0,0,"Step 548, loss: 2.1895217895507812, step time: 18.344640731811523ms\r\n",,terminal_output +21548,14560799,"TERMINAL",0,0,"Step 549, loss: 2.390045404434204, step time: 17.884016036987305ms\r\n",,terminal_output +21549,14560911,"TERMINAL",0,0,"Step 550, loss: 2.60726261138916, step time: 17.83013343811035ms\r\n",,terminal_output +21550,14560972,"TERMINAL",0,0,"Step 551, loss: 2.1473491191864014, step time: 18.36419105529785ms\r\n",,terminal_output +21551,14561084,"TERMINAL",0,0,"Step 552, loss: 2.321769952774048, step time: 18.056869506835938ms\r\n",,terminal_output +21552,14561143,"TERMINAL",0,0,"Step 553, loss: 2.0926990509033203, step time: 17.851591110229492ms\r\n",,terminal_output +21553,14561253,"TERMINAL",0,0,"Step 554, loss: 2.4763376712799072, step time: 18.2192325592041ms\r\n",,terminal_output +21554,14561319,"TERMINAL",0,0,"Step 555, loss: 2.0874061584472656, step time: 18.892526626586914ms\r\n",,terminal_output +21555,14561431,"TERMINAL",0,0,"Step 556, loss: 2.0617928504943848, step time: 17.316341400146484ms\r\n",,terminal_output +21556,14561490,"TERMINAL",0,0,"Step 557, loss: 2.11367130279541, step time: 17.59195327758789ms\r\n",,terminal_output +21557,14561599,"TERMINAL",0,0,"Step 558, loss: 2.2820451259613037, step time: 17.53377914428711ms\r\n",,terminal_output +21558,14561701,"TERMINAL",0,0,"Step 559, loss: 2.2730488777160645, step time: 17.347335815429688ms\r\n",,terminal_output +21559,14561762,"TERMINAL",0,0,"Step 560, loss: 2.12557315826416, step time: 17.664432525634766ms\r\n",,terminal_output +21560,14561873,"TERMINAL",0,0,"Step 561, loss: 2.3845303058624268, step time: 17.25459098815918ms\r\n",,terminal_output +21561,14561922,"TERMINAL",0,0,"Step 562, loss: 2.116220235824585, step time: 17.154693603515625ms\r\n",,terminal_output +21562,14562032,"TERMINAL",0,0,"Step 563, loss: 2.2289562225341797, step time: 17.524003982543945ms\r\n",,terminal_output +21563,14562095,"TERMINAL",0,0,"Step 564, loss: 2.2593295574188232, step time: 17.522573471069336ms\r\n",,terminal_output +21564,14562208,"TERMINAL",0,0,"Step 565, loss: 2.201464891433716, step time: 17.32611656188965ms\r\n",,terminal_output +21565,14562263,"TERMINAL",0,0,"Step 566, loss: 2.087202548980713, step time: 17.734289169311523ms\r\n",,terminal_output +21566,14562372,"TERMINAL",0,0,"Step 567, loss: 2.1014063358306885, step time: 17.32945442199707ms\r\n",,terminal_output +21567,14562436,"TERMINAL",0,0,"Step 568, loss: 2.2403974533081055, step time: 17.183780670166016ms\r\n",,terminal_output +21568,14562530,"TERMINAL",0,0,"Step 569, loss: 2.1354453563690186, step time: 17.488956451416016ms\r\n",,terminal_output +21569,14562610,"TERMINAL",0,0,"Step 570, loss: 2.273487091064453, step time: 17.38452911376953ms\r\n",,terminal_output +21570,14562716,"TERMINAL",0,0,"Step 571, loss: 2.1532208919525146, step time: 17.31729507446289ms\r\n",,terminal_output +21571,14562786,"TERMINAL",0,0,"Step 572, loss: 2.0843167304992676, step time: 17.780542373657227ms\r\n",,terminal_output +21572,14562892,"TERMINAL",0,0,"Step 573, loss: 2.4018821716308594, step time: 17.360687255859375ms\r\n",,terminal_output +21573,14562957,"TERMINAL",0,0,"Step 574, loss: 2.1211373805999756, step time: 17.316102981567383ms\r\n",,terminal_output +21574,14563065,"TERMINAL",0,0,"Step 575, loss: 2.039125919342041, step time: 17.575979232788086ms\r\n",,terminal_output +21575,14563128,"TERMINAL",0,0,"Step 576, loss: 2.7354519367218018, step time: 17.521142959594727ms\r\n",,terminal_output +21576,14563238,"TERMINAL",0,0,"Step 577, loss: 2.0766403675079346, step time: 17.452478408813477ms\r\n",,terminal_output +21577,14563298,"TERMINAL",0,0,"Step 578, loss: 2.3654658794403076, step time: 17.6849365234375ms\r\n",,terminal_output +21578,14563404,"TERMINAL",0,0,"Step 579, loss: 2.0361273288726807, step time: 17.2884464263916ms\r\n",,terminal_output +21579,14563517,"TERMINAL",0,0,"Step 580, loss: 2.056330919265747, step time: 18.10479164123535ms\r\n",,terminal_output +21580,14563568,"TERMINAL",0,0,"Step 581, loss: 2.4283246994018555, step time: 17.548561096191406ms\r\n",,terminal_output +21581,14563653,"TERMINAL",0,0,"Step 582, loss: 2.026965379714966, step time: 24.60336685180664ms\r\n",,terminal_output +21582,14563759,"TERMINAL",0,0,"Step 583, loss: 2.0227582454681396, step time: 20.09415626525879ms\r\n",,terminal_output +21583,14563822,"TERMINAL",0,0,"Step 584, loss: 2.038283348083496, step time: 19.63043212890625ms\r\n",,terminal_output +21584,14563934,"TERMINAL",0,0,"Step 585, loss: 2.444772481918335, step time: 20.028114318847656ms\r\n",,terminal_output +21585,14564006,"TERMINAL",0,0,"Step 586, loss: 2.088186740875244, step time: 25.479793548583984ms\r\n",,terminal_output +21586,14564116,"TERMINAL",0,0,"Step 587, loss: 1.861857533454895, step time: 21.084308624267578ms\r\n",,terminal_output +21587,14564179,"TERMINAL",0,0,"Step 588, loss: 2.128904104232788, step time: 19.337177276611328ms\r\n",,terminal_output +21588,14564286,"TERMINAL",0,0,"Step 589, loss: 2.335122585296631, step time: 18.805980682373047ms\r\n",,terminal_output +21589,14564352,"TERMINAL",0,0,"Step 590, loss: 2.244600534439087, step time: 19.76466178894043ms\r\n",,terminal_output +21590,14564460,"TERMINAL",0,0,"Step 591, loss: 1.8806730508804321, step time: 28.140783309936523ms\r\n",,terminal_output +21591,14564569,"TERMINAL",0,0,"Step 592, loss: 2.1196794509887695, step time: 23.361682891845703ms\r\n",,terminal_output +21592,14564622,"TERMINAL",0,0,"Step 593, loss: 2.2853667736053467, step time: 21.85845375061035ms\r\n",,terminal_output +21593,14564730,"TERMINAL",0,0,"Step 594, loss: 2.525702953338623, step time: 24.58643913269043ms\r\n",,terminal_output +21594,14564811,"TERMINAL",0,0,"Step 595, loss: 2.2132327556610107, step time: 23.982524871826172ms\r\n",,terminal_output +21595,14564920,"TERMINAL",0,0,"Step 596, loss: 2.2684199810028076, step time: 23.54717254638672ms\r\n",,terminal_output +21596,14564972,"TERMINAL",0,0,"Step 597, loss: 2.091583251953125, step time: 21.212100982666016ms\r\n",,terminal_output +21597,14565081,"TERMINAL",0,0,"Step 598, loss: 2.411144733428955, step time: 21.453142166137695ms\r\n",,terminal_output +21598,14565189,"TERMINAL",0,0,"Step 599, loss: 2.415395736694336, step time: 22.578954696655273ms\r\n",,terminal_output +21599,14565242,"TERMINAL",0,0,"Step 600, loss: 1.99610435962677, step time: 23.723125457763672ms\r\n",,terminal_output +21600,14565352,"TERMINAL",0,0,"Step 601, loss: 2.0543124675750732, step time: 20.56121826171875ms\r\n",,terminal_output +21601,14565415,"TERMINAL",0,0,"Step 602, loss: 2.2168707847595215, step time: 20.056962966918945ms\r\n",,terminal_output +21602,14565526,"TERMINAL",0,0,"Step 603, loss: 2.40262508392334, step time: 19.188880920410156ms\r\n",,terminal_output +21603,14565592,"TERMINAL",0,0,"Step 604, loss: 2.1020827293395996, step time: 19.480228424072266ms\r\n",,terminal_output +21604,14565701,"TERMINAL",0,0,"Step 605, loss: 2.057415723800659, step time: 20.380735397338867ms\r\n",,terminal_output +21605,14565764,"TERMINAL",0,0,"Step 606, loss: 1.9874591827392578, step time: 27.281522750854492ms\r\n",,terminal_output +21606,14565880,"TERMINAL",0,0,"Step 607, loss: 2.225353956222534, step time: 22.03202247619629ms\r\n",,terminal_output +21607,14565937,"TERMINAL",0,0,"Step 608, loss: 2.1686694622039795, step time: 20.98250389099121ms\r\n",,terminal_output +21608,14566049,"TERMINAL",0,0,"Step 609, loss: 2.4443397521972656, step time: 19.742488861083984ms\r\n",,terminal_output +21609,14566154,"TERMINAL",0,0,"Step 610, loss: 2.1279666423797607, step time: 21.70395851135254ms\r\n",,terminal_output +21610,14566207,"TERMINAL",0,0,"Step 611, loss: 2.043492317199707, step time: 20.466089248657227ms\r\n",,terminal_output +21611,14566314,"TERMINAL",0,0,"Step 612, loss: 2.04325532913208, step time: 19.947528839111328ms\r\n",,terminal_output +21612,14566384,"TERMINAL",0,0,"Step 613, loss: 2.208047866821289, step time: 25.497913360595703ms\r\n",,terminal_output +21613,14566492,"TERMINAL",0,0,"Step 614, loss: 2.0309009552001953, step time: 25.406837463378906ms\r\n",,terminal_output +21614,14566556,"TERMINAL",0,0,"Step 615, loss: 2.405477285385132, step time: 21.187782287597656ms\r\n",,terminal_output +21615,14566642,"TERMINAL",0,0,"Step 616, loss: 2.0543580055236816, step time: 19.613265991210938ms\r\n",,terminal_output +21616,14566749,"TERMINAL",0,0,"Step 617, loss: 2.0431835651397705, step time: 20.706892013549805ms\r\n",,terminal_output +21617,14566856,"TERMINAL",0,0,"Step 618, loss: 2.056277275085449, step time: 20.652055740356445ms\r\n",,terminal_output +21618,14566907,"TERMINAL",0,0,"Step 619, loss: 1.9896118640899658, step time: 19.739151000976562ms\r\n",,terminal_output +21619,14567014,"TERMINAL",0,0,"Step 620, loss: 2.1181886196136475, step time: 22.164106369018555ms\r\n",,terminal_output +21620,14567123,"TERMINAL",0,0,"Step 621, loss: 2.1196446418762207, step time: 27.868270874023438ms\r\n",,terminal_output +21621,14567178,"TERMINAL",0,0,"Step 622, loss: 1.9130288362503052, step time: 23.073196411132812ms\r\n",,terminal_output +21622,14567289,"TERMINAL",0,0,"Step 623, loss: 3.2833828926086426, step time: 22.501230239868164ms\r\n",,terminal_output +21623,14567348,"TERMINAL",0,0,"Step 624, loss: 2.2133431434631348, step time: 22.100210189819336ms\r\n",,terminal_output +21624,14567464,"TERMINAL",0,0,"Step 625, loss: 2.2009637355804443, step time: 21.91758155822754ms\r\n",,terminal_output +21625,14567528,"TERMINAL",0,0,"Step 626, loss: 2.0830490589141846, step time: 21.32415771484375ms\r\n",,terminal_output +21626,14567633,"TERMINAL",0,0,"Step 627, loss: 2.1391897201538086, step time: 20.508289337158203ms\r\n",,terminal_output +21627,14567739,"TERMINAL",0,0,"Step 628, loss: 1.8601107597351074, step time: 19.67334747314453ms\r\n",,terminal_output +21628,14567806,"TERMINAL",0,0,"Step 629, loss: 2.386368751525879, step time: 20.25580406188965ms\r\n",,terminal_output +21629,14567916,"TERMINAL",0,0,"Step 630, loss: 1.9954097270965576, step time: 21.460771560668945ms\r\n",,terminal_output +21630,14567969,"TERMINAL",0,0,"Step 631, loss: 2.113739013671875, step time: 27.318716049194336ms\r\n",,terminal_output +21631,14568062,"TERMINAL",0,0,"Step 632, loss: 2.017411708831787, step time: 27.374744415283203ms\r\n",,terminal_output +21632,14568168,"TERMINAL",0,0,"Step 633, loss: 2.3269426822662354, step time: 22.12691307067871ms\r\n",,terminal_output +21633,14568278,"TERMINAL",0,0,"Step 634, loss: 1.9848741292953491, step time: 20.090579986572266ms\r\n",,terminal_output +21634,14568329,"TERMINAL",0,0,"Step 635, loss: 1.9674216508865356, step time: 19.547700881958008ms\r\n",,terminal_output +21635,14568422,"TERMINAL",0,0,"Step 636, loss: 2.0000791549682617, step time: 19.211292266845703ms\r\n",,terminal_output +21636,14568531,"TERMINAL",0,0,"Step 637, loss: 1.9240508079528809, step time: 18.86439323425293ms\r\n",,terminal_output +21637,14568598,"TERMINAL",0,0,"Step 638, loss: 2.1519572734832764, step time: 20.079612731933594ms\r\n",,terminal_output +21638,14568680,"TERMINAL",0,0,"Step 639, loss: 1.9261873960494995, step time: 20.157337188720703ms\r\n",,terminal_output +21639,14568786,"TERMINAL",0,0,"Step 640, loss: 2.2508132457733154, step time: 20.004749298095703ms\r\n",,terminal_output +21640,14568844,"TERMINAL",0,0,"Step 641, loss: 2.1670751571655273, step time: 19.368410110473633ms\r\n",,terminal_output +21641,14568953,"TERMINAL",0,0,"Step 642, loss: 2.240596294403076, step time: 18.937349319458008ms\r\n",,terminal_output +21642,14569018,"TERMINAL",0,0,"Step 643, loss: 1.8922268152236938, step time: 18.671274185180664ms\r\n",,terminal_output +21643,14569125,"TERMINAL",0,0,"Step 644, loss: 2.222456932067871, step time: 20.223140716552734ms\r\n",,terminal_output +21644,14569233,"TERMINAL",0,0,"Step 645, loss: 2.9805233478546143, step time: 21.695852279663086ms\r\n",,terminal_output +21645,14569283,"TERMINAL",0,0,"Step 646, loss: 2.188194990158081, step time: 20.982742309570312ms\r\n",,terminal_output +21646,14569391,"TERMINAL",0,0,"Step 647, loss: 2.510835886001587, step time: 19.878864288330078ms\r\n",,terminal_output +21647,14569454,"TERMINAL",0,0,"Step 648, loss: 2.054201602935791, step time: 19.211769104003906ms\r\n",,terminal_output +21648,14569562,"TERMINAL",0,0,"Step 649, loss: 2.031336784362793, step time: 19.652128219604492ms\r\n",,terminal_output +21649,14569636,"TERMINAL",0,0,"Step 650, loss: 1.893275260925293, step time: 19.894838333129883ms\r\n",,terminal_output +21650,14569750,"TERMINAL",0,0,"Step 651, loss: 2.0286693572998047, step time: 24.76787567138672ms\r\n",,terminal_output +21651,14569816,"TERMINAL",0,0,"Step 652, loss: 2.0113143920898438, step time: 27.998685836791992ms\r\n",,terminal_output +21652,14569922,"TERMINAL",0,0,"Step 653, loss: 2.212231159210205, step time: 22.922992706298828ms\r\n",,terminal_output +21653,14569985,"TERMINAL",0,0,"Step 654, loss: 2.053992986679077, step time: 21.27671241760254ms\r\n",,terminal_output +21654,14570097,"TERMINAL",0,0,"Step 655, loss: 1.8931323289871216, step time: 20.215988159179688ms\r\n",,terminal_output +21655,14570162,"TERMINAL",0,0,"Step 656, loss: 2.4993505477905273, step time: 20.808935165405273ms\r\n",,terminal_output +21656,14570273,"TERMINAL",0,0,"Step 657, loss: 2.0537192821502686, step time: 19.211530685424805ms\r\n",,terminal_output +21657,14570333,"TERMINAL",0,0,"Step 658, loss: 1.9447169303894043, step time: 18.4781551361084ms\r\n",,terminal_output +21658,14570421,"TERMINAL",0,0,"Step 659, loss: 2.0569963455200195, step time: 19.195079803466797ms\r\n",,terminal_output +21659,14570518,"TERMINAL",0,0,"Step 660, loss: 2.199883460998535, step time: 21.00682258605957ms\r\n",,terminal_output +21660,14570633,"TERMINAL",0,0,"Step 661, loss: 2.019115924835205, step time: 19.056081771850586ms\r\n",,terminal_output +21661,14570732,"TERMINAL",0,0,"Step 662, loss: 1.9675586223602295, step time: 24.875402450561523ms\r\n",,terminal_output +21662,14570793,"TERMINAL",0,0,"Step 663, loss: 1.902235984802246, step time: 20.911216735839844ms\r\n",,terminal_output +21663,14570903,"TERMINAL",0,0,"Step 664, loss: 2.017315626144409, step time: 19.346952438354492ms\r\n",,terminal_output +21664,14570955,"TERMINAL",0,0,"Step 665, loss: 1.9197795391082764, step time: 19.1037654876709ms\r\n",,terminal_output +21665,14571071,"TERMINAL",0,0,"Step 666, loss: 1.815674066543579, step time: 18.76688003540039ms\r\n",,terminal_output +21666,14571136,"TERMINAL",0,0,"Step 667, loss: 1.8787696361541748, step time: 18.89657974243164ms\r\n",,terminal_output +21667,14571250,"TERMINAL",0,0,"Step 668, loss: 2.5786147117614746, step time: 18.921852111816406ms\r\n",,terminal_output +21668,14571303,"TERMINAL",0,0,"Step 669, loss: 2.0435550212860107, step time: 18.163442611694336ms\r\n",,terminal_output +21669,14571412,"TERMINAL",0,0,"Step 670, loss: 2.674518346786499, step time: 19.1802978515625ms\r\n",,terminal_output +21670,14571484,"TERMINAL",0,0,"Step 671, loss: 2.9656805992126465, step time: 18.48125457763672ms\r\n",,terminal_output +21671,14571648,"TERMINAL",0,0,"Step 672, loss: 1.8287547826766968, step time: 21.43383026123047ms\r\nStep 673, loss: 2.4449453353881836, step time: 19.762277603149414ms\r\n",,terminal_output +21672,14571756,"TERMINAL",0,0,"Step 674, loss: 1.9557970762252808, step time: 18.931865692138672ms\r\n",,terminal_output +21673,14571865,"TERMINAL",0,0,"Step 675, loss: 2.2708423137664795, step time: 25.951385498046875ms\r\n",,terminal_output +21674,14571919,"TERMINAL",0,0,"Step 676, loss: 2.5863962173461914, step time: 27.405261993408203ms\r\n",,terminal_output +21675,14572017,"TERMINAL",0,0,"Step 677, loss: 1.8463644981384277, step time: 20.014286041259766ms\r\n",,terminal_output +21676,14572150,"TERMINAL",0,0,"Step 678, loss: 1.9172234535217285, step time: 24.681568145751953ms\r\n",,terminal_output +21677,14572188,"TERMINAL",0,0,"Step 679, loss: 2.231382131576538, step time: 27.312040328979492ms\r\n",,terminal_output +21678,14572295,"TERMINAL",0,0,"Step 680, loss: 2.254434108734131, step time: 23.43297004699707ms\r\n",,terminal_output +21679,14572401,"TERMINAL",0,0,"Step 681, loss: 2.177565574645996, step time: 20.099878311157227ms\r\n",,terminal_output +21680,14572498,"TERMINAL",0,0,"Step 682, loss: 1.9869319200515747, step time: 27.23217010498047ms\r\n",,terminal_output +21681,14572649,"TERMINAL",0,0,"Step 683, loss: 1.8330729007720947, step time: 28.599262237548828ms\r\nStep 684, loss: 1.8752888441085815, step time: 28.490781784057617ms\r\n",,terminal_output +21682,14572738,"TERMINAL",0,0,"Step 685, loss: 2.1384410858154297, step time: 27.332544326782227ms\r\n",,terminal_output +21683,14572849,"TERMINAL",0,0,"Step 686, loss: 1.9406520128250122, step time: 28.604507446289062ms\r\n",,terminal_output +21684,14572912,"TERMINAL",0,0,"Step 687, loss: 1.864851474761963, step time: 27.714967727661133ms\r\n",,terminal_output +21685,14573011,"TERMINAL",0,0,"Step 688, loss: 1.8532058000564575, step time: 25.47430992126465ms\r\n",,terminal_output +21686,14573121,"TERMINAL",0,0,"Step 689, loss: 1.9036210775375366, step time: 24.039745330810547ms\r\n",,terminal_output +21687,14573184,"TERMINAL",0,0,"Step 690, loss: 1.9151198863983154, step time: 28.64360809326172ms\r\n",,terminal_output +21688,14573289,"TERMINAL",0,0,"Step 691, loss: 1.88888680934906, step time: 27.61077880859375ms\r\n",,terminal_output +21689,14573410,"TERMINAL",0,0,"Step 692, loss: 1.8524470329284668, step time: 27.804136276245117ms\r\n",,terminal_output +21690,14573465,"TERMINAL",0,0,"Step 693, loss: 2.1236581802368164, step time: 22.940874099731445ms\r\n",,terminal_output +21691,14573630,"TERMINAL",0,0,"Step 694, loss: 2.013444662094116, step time: 23.25749397277832ms\r\nStep 695, loss: 1.8610365390777588, step time: 23.625612258911133ms\r\n",,terminal_output +21692,14573739,"TERMINAL",0,0,"Step 696, loss: 1.9272067546844482, step time: 23.337364196777344ms\r\n",,terminal_output +21693,14573852,"TERMINAL",0,0,"Step 697, loss: 1.8979145288467407, step time: 22.749662399291992ms\r\n",,terminal_output +21694,14573916,"TERMINAL",0,0,"Step 698, loss: 1.8520634174346924, step time: 22.86815643310547ms\r\n",,terminal_output +21695,14574021,"TERMINAL",0,0,"Step 699, loss: 2.059269905090332, step time: 25.20012855529785ms\r\n",,terminal_output +21696,14574082,"TERMINAL",0,0,"Step 700, loss: 1.8424679040908813, step time: 23.79465103149414ms\r\n",,terminal_output +21697,14574421,"TERMINAL",0,0,"Step 701, loss: 2.104999303817749, step time: 329.2887210845947ms\r\n",,terminal_output +21698,14574529,"TERMINAL",0,0,"Step 702, loss: 2.086913824081421, step time: 25.130033493041992ms\r\n",,terminal_output +21699,14574600,"TERMINAL",0,0,"Step 703, loss: 1.9824140071868896, step time: 21.679401397705078ms\r\n",,terminal_output +21700,14574705,"TERMINAL",0,0,"Step 704, loss: 1.7466697692871094, step time: 21.268129348754883ms\r\n",,terminal_output +21701,14574768,"TERMINAL",0,0,"Step 705, loss: 1.7740389108657837, step time: 20.025968551635742ms\r\n",,terminal_output +21702,14574881,"TERMINAL",0,0,"Step 706, loss: 1.7837210893630981, step time: 19.524097442626953ms\r\n",,terminal_output +21703,14574934,"TERMINAL",0,0,"Step 707, loss: 2.024108648300171, step time: 19.89889144897461ms\r\n",,terminal_output +21704,14575033,"TERMINAL",0,0,"Step 708, loss: 1.9557026624679565, step time: 19.85931396484375ms\r\n",,terminal_output +21705,14575154,"TERMINAL",0,0,"Step 709, loss: 1.9374432563781738, step time: 20.610332489013672ms\r\n",,terminal_output +21706,14575206,"TERMINAL",0,0,"Step 710, loss: 1.8458425998687744, step time: 20.645856857299805ms\r\n",,terminal_output +21707,14575313,"TERMINAL",0,0,"Step 711, loss: 1.8709062337875366, step time: 19.951581954956055ms\r\n",,terminal_output +21708,14575371,"TERMINAL",0,0,"Step 712, loss: 2.6407501697540283, step time: 19.707918167114258ms\r\n",,terminal_output +21709,14575486,"TERMINAL",0,0,"Step 713, loss: 1.8495628833770752, step time: 19.75393295288086ms\r\n",,terminal_output +21710,14575541,"TERMINAL",0,0,"Step 714, loss: 2.2257704734802246, step time: 20.760536193847656ms\r\n",,terminal_output +21711,14575650,"TERMINAL",0,0,"Step 715, loss: 2.009754180908203, step time: 29.086589813232422ms\r\n",,terminal_output +21712,14575760,"TERMINAL",0,0,"Step 716, loss: 1.8493479490280151, step time: 26.60369873046875ms\r\n",,terminal_output +21713,14575824,"TERMINAL",0,0,"Step 717, loss: 1.8581918478012085, step time: 22.878170013427734ms\r\n",,terminal_output +21714,14575931,"TERMINAL",0,0,"Step 718, loss: 1.9928882122039795, step time: 24.898290634155273ms\r\n",,terminal_output +21715,14575987,"TERMINAL",0,0,"Step 719, loss: 2.0824427604675293, step time: 22.853612899780273ms\r\n",,terminal_output +21716,14576089,"TERMINAL",0,0,"Step 720, loss: 2.1954874992370605, step time: 27.549028396606445ms\r\n",,terminal_output +21717,14576202,"TERMINAL",0,0,"Step 721, loss: 1.8024853467941284, step time: 29.478788375854492ms\r\n",,terminal_output +21718,14576265,"TERMINAL",0,0,"Step 722, loss: 1.8498687744140625, step time: 28.539180755615234ms\r\n",,terminal_output +21719,14576374,"TERMINAL",0,0,"Step 723, loss: 1.8581217527389526, step time: 25.035381317138672ms\r\n",,terminal_output +21720,14576484,"TERMINAL",0,0,"Step 724, loss: 1.8405625820159912, step time: 27.900218963623047ms\r\n",,terminal_output +21721,14576544,"TERMINAL",0,0,"Step 725, loss: 2.2766666412353516, step time: 24.145841598510742ms\r\n",,terminal_output +21722,14576650,"TERMINAL",0,0,"Step 726, loss: 2.097959518432617, step time: 23.03910255432129ms\r\n",,terminal_output +21723,14576753,"TERMINAL",0,0,"Step 727, loss: 1.8074065446853638, step time: 26.741981506347656ms\r\n",,terminal_output +21724,14576813,"TERMINAL",0,0,"Step 728, loss: 1.7732049226760864, step time: 22.948503494262695ms\r\n",,terminal_output +21725,14576924,"TERMINAL",0,0,"Step 729, loss: 1.8591694831848145, step time: 23.85997772216797ms\r\n",,terminal_output +21726,14576981,"TERMINAL",0,0,"Step 730, loss: 1.8837732076644897, step time: 21.880388259887695ms\r\n",,terminal_output +21727,14577078,"TERMINAL",0,0,"Step 731, loss: 1.8586238622665405, step time: 21.474838256835938ms\r\n",,terminal_output +21728,14577191,"TERMINAL",0,0,"Step 732, loss: 2.3358495235443115, step time: 22.159576416015625ms\r\n",,terminal_output +21729,14577242,"TERMINAL",0,0,"Step 733, loss: 2.0050363540649414, step time: 20.914316177368164ms\r\n",,terminal_output +21730,14577351,"TERMINAL",0,0,"Step 734, loss: 2.0654568672180176, step time: 21.992921829223633ms\r\n",,terminal_output +21731,14577459,"TERMINAL",0,0,"Step 735, loss: 1.89279043674469, step time: 21.874189376831055ms\r\n",,terminal_output +21732,14577512,"TERMINAL",0,0,"Step 736, loss: 1.7120720148086548, step time: 21.24500274658203ms\r\n",,terminal_output +21733,14577618,"TERMINAL",0,0,"Step 737, loss: 1.931971549987793, step time: 20.428180694580078ms\r\n",,terminal_output +21734,14577678,"TERMINAL",0,0,"Step 738, loss: 2.089684009552002, step time: 19.511699676513672ms\r\n",,terminal_output +21735,14577789,"TERMINAL",0,0,"Step 739, loss: 1.8341305255889893, step time: 18.540620803833008ms\r\n",,terminal_output +21736,14577856,"TERMINAL",0,0,"Step 740, loss: 1.8319387435913086, step time: 19.289493560791016ms\r\n",,terminal_output +21737,14577970,"TERMINAL",0,0,"Step 741, loss: 1.7853566408157349, step time: 18.91160011291504ms\r\n",,terminal_output +21738,14578027,"TERMINAL",0,0,"Step 742, loss: 1.7587547302246094, step time: 18.436908721923828ms\r\n",,terminal_output +21739,14578121,"TERMINAL",0,0,"Step 743, loss: 1.857669711112976, step time: 18.691062927246094ms\r\n",,terminal_output +21740,14578234,"TERMINAL",0,0,"Step 744, loss: 1.7975631952285767, step time: 18.558502197265625ms\r\n",,terminal_output +21741,14578288,"TERMINAL",0,0,"Step 745, loss: 1.851003646850586, step time: 18.25547218322754ms\r\n",,terminal_output +21742,14578396,"TERMINAL",0,0,"Step 746, loss: 1.838360071182251, step time: 19.990205764770508ms\r\n",,terminal_output +21743,14578460,"TERMINAL",0,0,"Step 747, loss: 2.238600969314575, step time: 18.912315368652344ms\r\n",,terminal_output +21744,14578620,"TERMINAL",0,0,"Step 748, loss: 2.0560529232025146, step time: 18.306732177734375ms\r\n",,terminal_output +21745,14578632,"TERMINAL",0,0,"Step 749, loss: 1.7887206077575684, step time: 18.937110900878906ms\r\n",,terminal_output +21746,14578741,"TERMINAL",0,0,"Step 750, loss: 1.8673992156982422, step time: 21.04043960571289ms\r\n",,terminal_output +21747,14578806,"TERMINAL",0,0,"Step 751, loss: 1.7880637645721436, step time: 20.500898361206055ms\r\n",,terminal_output +21748,14578916,"TERMINAL",0,0,"Step 752, loss: 1.9284706115722656, step time: 29.67691421508789ms\r\n",,terminal_output +21749,14579022,"TERMINAL",0,0,"Step 753, loss: 1.7574785947799683, step time: 27.673721313476562ms\r\n",,terminal_output +21750,14579077,"TERMINAL",0,0,"Step 754, loss: 2.197005271911621, step time: 23.01955223083496ms\r\n",,terminal_output +21751,14579169,"TERMINAL",0,0,"Step 755, loss: 1.7682125568389893, step time: 21.645784378051758ms\r\n",,terminal_output +21752,14579276,"TERMINAL",0,0,"Step 756, loss: 1.6971871852874756, step time: 26.217937469482422ms\r\n",,terminal_output +21753,14579385,"TERMINAL",0,0,"Step 757, loss: 1.846564769744873, step time: 23.820161819458008ms\r\n",,terminal_output +21754,14579436,"TERMINAL",0,0,"Step 758, loss: 1.6676422357559204, step time: 29.22344207763672ms\r\n",,terminal_output +21755,14579543,"TERMINAL",0,0,"Step 759, loss: 2.1849405765533447, step time: 29.06489372253418ms\r\n",,terminal_output +21756,14579619,"TERMINAL",0,0,"Step 760, loss: 1.77091646194458, step time: 26.020288467407227ms\r\n",,terminal_output +21757,14579728,"TERMINAL",0,0,"Step 761, loss: 1.7237293720245361, step time: 24.827957153320312ms\r\n",,terminal_output +21758,14579836,"TERMINAL",0,0,"Step 762, loss: 1.6161357164382935, step time: 27.49466896057129ms\r\n",,terminal_output +21759,14579891,"TERMINAL",0,0,"Step 763, loss: 2.2850096225738525, step time: 24.248838424682617ms\r\n",,terminal_output +21760,14580003,"TERMINAL",0,0,"Step 764, loss: 1.7486337423324585, step time: 28.22566032409668ms\r\n",,terminal_output +21761,14580112,"TERMINAL",0,0,"Step 765, loss: 1.7844046354293823, step time: 23.8037109375ms\r\n",,terminal_output +21762,14580165,"TERMINAL",0,0,"Step 766, loss: 1.7785519361495972, step time: 21.504640579223633ms\r\n",,terminal_output +21763,14580272,"TERMINAL",0,0,"Step 767, loss: 2.1702537536621094, step time: 26.246070861816406ms\r\n",,terminal_output +21764,14580337,"TERMINAL",0,0,"Step 768, loss: 1.7368555068969727, step time: 24.801015853881836ms\r\n",,terminal_output +21765,14580447,"TERMINAL",0,0,"Step 769, loss: 1.921446681022644, step time: 24.04618263244629ms\r\n",,terminal_output +21766,14580560,"TERMINAL",0,0,"Step 770, loss: 0.0006385194719769061, step time: 24.34539794921875ms\r\n",,terminal_output +21767,14580624,"TERMINAL",0,0,"Step 771, loss: 1.5858744382858276, step time: 24.025440216064453ms\r\n",,terminal_output +21768,14580731,"TERMINAL",0,0,"Step 772, loss: 1.6833946704864502, step time: 26.546478271484375ms\r\n",,terminal_output +21769,14580794,"TERMINAL",0,0,"Step 773, loss: 1.7387224435806274, step time: 24.164676666259766ms\r\n",,terminal_output +21770,14580904,"TERMINAL",0,0,"Step 774, loss: 2.0943005084991455, step time: 23.95486831665039ms\r\n",,terminal_output +21771,14580963,"TERMINAL",0,0,"Step 775, loss: 2.0505971908569336, step time: 21.468162536621094ms\r\n",,terminal_output +21772,14581072,"TERMINAL",0,0,"Step 776, loss: 1.8769408464431763, step time: 23.469209671020508ms\r\n",,terminal_output +21773,14581180,"TERMINAL",0,0,"Step 777, loss: 1.9489238262176514, step time: 23.225784301757812ms\r\n",,terminal_output +21774,14581234,"TERMINAL",0,0,"Step 778, loss: 1.7706996202468872, step time: 33.34689140319824ms\r\n",,terminal_output +21775,14581342,"TERMINAL",0,0,"Step 779, loss: 2.0361664295196533, step time: 21.19755744934082ms\r\n",,terminal_output +21776,14581454,"TERMINAL",0,0,"Step 780, loss: 1.6744625568389893, step time: 21.22044563293457ms\r\n",,terminal_output +21777,14581509,"TERMINAL",0,0,"Step 781, loss: 1.6159961223602295, step time: 20.505189895629883ms\r\n",,terminal_output +21778,14581619,"TERMINAL",0,0,"Step 782, loss: 1.6785283088684082, step time: 21.242141723632812ms\r\n",,terminal_output +21779,14581671,"TERMINAL",0,0,"Step 783, loss: 2.0658926963806152, step time: 21.398067474365234ms\r\n",,terminal_output +21780,14581773,"TERMINAL",0,0,"Step 784, loss: 1.5797141790390015, step time: 20.59149742126465ms\r\n",,terminal_output +21781,14581882,"TERMINAL",0,0,"Step 785, loss: 1.6901087760925293, step time: 20.839452743530273ms\r\n",,terminal_output +21782,14581937,"TERMINAL",0,0,"Step 786, loss: 1.671708345413208, step time: 21.678447723388672ms\r\n",,terminal_output +21783,14582042,"TERMINAL",0,0,"Step 787, loss: 1.8556345701217651, step time: 20.520687103271484ms\r\n",,terminal_output +21784,14582107,"TERMINAL",0,0,"Step 788, loss: 1.7068541049957275, step time: 21.108627319335938ms\r\n",,terminal_output +21785,14582216,"TERMINAL",0,0,"Step 789, loss: 2.1796412467956543, step time: 29.337167739868164ms\r\n",,terminal_output +21786,14582350,"TERMINAL",0,0,"Step 790, loss: 1.6576735973358154, step time: 28.02109718322754ms\r\n",,terminal_output +21787,14582403,"TERMINAL",0,0,"Step 791, loss: 1.7028344869613647, step time: 23.85401725769043ms\r\n",,terminal_output +21788,14582508,"TERMINAL",0,0,"Step 792, loss: 2.355846643447876, step time: 22.634506225585938ms\r\n",,terminal_output +21789,14582595,"TERMINAL",0,0,"Step 793, loss: 1.6767042875289917, step time: 24.261474609375ms\r\n",,terminal_output +21790,14582648,"TERMINAL",0,0,"Step 794, loss: 1.7113841772079468, step time: 24.43981170654297ms\r\n",,terminal_output +21791,14582760,"TERMINAL",0,0,"Step 795, loss: 1.7454293966293335, step time: 29.181241989135742ms\r\n",,terminal_output +21792,14582869,"TERMINAL",0,0,"Step 796, loss: 1.5912566184997559, step time: 27.90999412536621ms\r\n",,terminal_output +21793,14582921,"TERMINAL",0,0,"Step 797, loss: 1.671118140220642, step time: 26.117801666259766ms\r\n",,terminal_output +21794,14583020,"TERMINAL",0,0,"Step 798, loss: 1.6822991371154785, step time: 24.812936782836914ms\r\n",,terminal_output +21795,14583128,"TERMINAL",0,0,"Step 799, loss: 1.8544434309005737, step time: 27.428150177001953ms\r\n",,terminal_output +21796,14583237,"TERMINAL",0,0,"Step 800, loss: 2.127930164337158, step time: 23.94843101501465ms\r\n",,terminal_output +21797,14583288,"TERMINAL",0,0,"Step 801, loss: 1.623116374015808, step time: 26.262998580932617ms\r\n",,terminal_output +21798,14583394,"TERMINAL",0,0,"Step 802, loss: 3.3078079223632812, step time: 21.868467330932617ms\r\n",,terminal_output +21799,14583460,"TERMINAL",0,0,"Step 803, loss: 1.6087524890899658, step time: 22.000789642333984ms\r\n",,terminal_output +21800,14583565,"TERMINAL",0,0,"Step 804, loss: 1.823505163192749, step time: 23.508071899414062ms\r\n",,terminal_output +21801,14583637,"TERMINAL",0,0,"Step 805, loss: 2.120060920715332, step time: 22.913217544555664ms\r\n",,terminal_output +21802,14583747,"TERMINAL",0,0,"Step 806, loss: 2.3030641078948975, step time: 24.549245834350586ms\r\n",,terminal_output +21803,14583857,"TERMINAL",0,0,"Step 807, loss: 1.6138174533843994, step time: 24.161338806152344ms\r\n",,terminal_output +21804,14583916,"TERMINAL",0,0,"Step 808, loss: 1.8529579639434814, step time: 23.328781127929688ms\r\n",,terminal_output +21805,14584010,"TERMINAL",0,0,"Step 809, loss: 1.8165868520736694, step time: 26.77774429321289ms\r\n",,terminal_output +21806,14584124,"TERMINAL",0,0,"Step 810, loss: 1.795684814453125, step time: 24.37901496887207ms\r\n",,terminal_output +21807,14584177,"TERMINAL",0,0,"Step 811, loss: 1.7854279279708862, step time: 24.218082427978516ms\r\n",,terminal_output +21808,14584282,"TERMINAL",0,0,"Step 812, loss: 1.6937263011932373, step time: 24.31511878967285ms\r\n",,terminal_output +21809,14584399,"TERMINAL",0,0,"Step 813, loss: 1.6475893259048462, step time: 21.335124969482422ms\r\n",,terminal_output +21810,14584449,"TERMINAL",0,0,"Step 814, loss: 1.9200328588485718, step time: 22.24421501159668ms\r\n",,terminal_output +21811,14584554,"TERMINAL",0,0,"Step 815, loss: 1.829902172088623, step time: 21.959781646728516ms\r\n",,terminal_output +21812,14584615,"TERMINAL",0,0,"Step 816, loss: 1.8992154598236084, step time: 21.18515968322754ms\r\n",,terminal_output +21813,14584721,"TERMINAL",0,0,"Step 817, loss: 1.6028668880462646, step time: 20.51544189453125ms\r\n",,terminal_output +21814,14584829,"TERMINAL",0,0,"Step 818, loss: 1.6114537715911865, step time: 21.792888641357422ms\r\n",,terminal_output +21815,14584880,"TERMINAL",0,0,"Step 819, loss: 2.011821985244751, step time: 20.56884765625ms\r\n",,terminal_output +21816,14584985,"TERMINAL",0,0,"Step 820, loss: 1.8416752815246582, step time: 20.306110382080078ms\r\n",,terminal_output +21817,14585092,"TERMINAL",0,0,"Step 821, loss: 1.63699471950531, step time: 21.209716796875ms\r\n",,terminal_output +21818,14585144,"TERMINAL",0,0,"Step 822, loss: 1.6796388626098633, step time: 21.82483673095703ms\r\n",,terminal_output +21819,14585250,"TERMINAL",0,0,"Step 823, loss: 1.67470383644104, step time: 20.633220672607422ms\r\n",,terminal_output +21820,14585312,"TERMINAL",0,0,"Step 824, loss: 1.616377592086792, step time: 20.965576171875ms\r\n",,terminal_output +21821,14585422,"TERMINAL",0,0,"Step 825, loss: 1.6055108308792114, step time: 21.30579948425293ms\r\n",,terminal_output +21822,14585531,"TERMINAL",0,0,"Step 826, loss: 1.7222373485565186, step time: 26.428937911987305ms\r\n",,terminal_output +21823,14585676,"TERMINAL",0,0,"Step 827, loss: 1.8064117431640625, step time: 30.36808967590332ms\r\nStep 828, loss: 1.6687754392623901, step time: 24.750709533691406ms\r\n",,terminal_output +21824,14585769,"TERMINAL",0,0,"Step 829, loss: 2.099017858505249, step time: 21.58212661743164ms\r\n",,terminal_output +21825,14585876,"TERMINAL",0,0,"Step 830, loss: 1.849155068397522, step time: 25.454998016357422ms\r\n",,terminal_output +21826,14585942,"TERMINAL",0,0,"Step 831, loss: 1.70132315158844, step time: 23.884057998657227ms\r\n",,terminal_output +21827,14586047,"TERMINAL",0,0,"Step 832, loss: 1.604337215423584, step time: 26.53980255126953ms\r\n",,terminal_output +21828,14586156,"TERMINAL",0,0,"Step 833, loss: 1.6286606788635254, step time: 29.791593551635742ms\r\n",,terminal_output +21829,14586216,"TERMINAL",0,0,"Step 834, loss: 1.7303874492645264, step time: 28.25188636779785ms\r\n",,terminal_output +21830,14586326,"TERMINAL",0,0,"Step 835, loss: 1.6995269060134888, step time: 24.8720645904541ms\r\n",,terminal_output +21831,14586437,"TERMINAL",0,0,"Step 836, loss: 1.7717515230178833, step time: 28.53083610534668ms\r\n",,terminal_output +21832,14586490,"TERMINAL",0,0,"Step 837, loss: 1.6392688751220703, step time: 25.113582611083984ms\r\n",,terminal_output +21833,14586595,"TERMINAL",0,0,"Step 838, loss: 2.078636884689331, step time: 24.558544158935547ms\r\n",,terminal_output +21834,14586702,"TERMINAL",0,0,"Step 839, loss: 1.926476001739502, step time: 27.270078659057617ms\r\n",,terminal_output +21835,14586763,"TERMINAL",0,0,"Step 840, loss: 1.5351002216339111, step time: 25.974750518798828ms\r\n",,terminal_output +21836,14586871,"TERMINAL",0,0,"Step 841, loss: 1.600570559501648, step time: 26.051044464111328ms\r\n",,terminal_output +21837,14586979,"TERMINAL",0,0,"Step 842, loss: 2.2250499725341797, step time: 24.401426315307617ms\r\n",,terminal_output +21838,14587031,"TERMINAL",0,0,"Step 843, loss: 1.5234321355819702, step time: 23.09584617614746ms\r\n",,terminal_output +21839,14587138,"TERMINAL",0,0,"Step 844, loss: 1.8657275438308716, step time: 23.164033889770508ms\r\n",,terminal_output +21840,14587247,"TERMINAL",0,0,"Step 845, loss: 2.1320960521698, step time: 23.7274169921875ms\r\n",,terminal_output +21841,14587303,"TERMINAL",0,0,"Step 846, loss: 1.95160973072052, step time: 25.081157684326172ms\r\n",,terminal_output +21842,14587416,"TERMINAL",0,0,"Step 847, loss: 1.5673539638519287, step time: 23.002147674560547ms\r\n",,terminal_output +21843,14587524,"TERMINAL",0,0,"Step 848, loss: 1.7723572254180908, step time: 24.61838722229004ms\r\n",,terminal_output +21844,14587606,"TERMINAL",0,0,"Step 849, loss: 1.8375295400619507, step time: 21.330595016479492ms\r\n",,terminal_output +21845,14587659,"TERMINAL",0,0,"Step 850, loss: 1.5578891038894653, step time: 21.764516830444336ms\r\n",,terminal_output +21846,14587767,"TERMINAL",0,0,"Step 851, loss: 1.9371581077575684, step time: 20.938873291015625ms\r\n",,terminal_output +21847,14587877,"TERMINAL",0,0,"Step 852, loss: 2.552222728729248, step time: 21.175146102905273ms\r\n",,terminal_output +21848,14587931,"TERMINAL",0,0,"Step 853, loss: 1.6031489372253418, step time: 21.452903747558594ms\r\n",,terminal_output +21849,14588039,"TERMINAL",0,0,"Step 854, loss: 1.6381338834762573, step time: 21.376371383666992ms\r\n",,terminal_output +21850,14588102,"TERMINAL",0,0,"Step 855, loss: 2.0966227054595947, step time: 20.33686637878418ms\r\n",,terminal_output +21851,14588214,"TERMINAL",0,0,"Step 856, loss: 1.64289391040802, step time: 20.435810089111328ms\r\n",,terminal_output +21852,14588271,"TERMINAL",0,0,"Step 857, loss: 1.8181464672088623, step time: 21.40974998474121ms\r\n",,terminal_output +21853,14588365,"TERMINAL",0,0,"Step 858, loss: 1.6082409620285034, step time: 20.911216735839844ms\r\n",,terminal_output +21854,14588476,"TERMINAL",0,0,"Step 859, loss: 1.4471722841262817, step time: 20.204782485961914ms\r\n",,terminal_output +21855,14588531,"TERMINAL",0,0,"Step 860, loss: 1.5774279832839966, step time: 21.117687225341797ms\r\n",,terminal_output +21856,14588633,"TERMINAL",0,0,"Step 861, loss: 1.5631698369979858, step time: 20.97344398498535ms\r\n",,terminal_output +21857,14588739,"TERMINAL",0,0,"Step 862, loss: 1.6015545129776, step time: 20.89834213256836ms\r\n",,terminal_output +21858,14588820,"TERMINAL",0,0,"Step 863, loss: 1.9191291332244873, step time: 29.043197631835938ms\r\n",,terminal_output +21859,14588931,"TERMINAL",0,0,"Step 864, loss: 1.6097925901412964, step time: 28.080463409423828ms\r\n",,terminal_output +21860,14588984,"TERMINAL",0,0,"Step 865, loss: 1.6589285135269165, step time: 23.828506469726562ms\r\n",,terminal_output +21861,14589091,"TERMINAL",0,0,"Step 866, loss: 2.008711576461792, step time: 25.97951889038086ms\r\n",,terminal_output +21862,14589200,"TERMINAL",0,0,"Step 867, loss: 1.487949013710022, step time: 27.87613868713379ms\r\n",,terminal_output +21863,14589262,"TERMINAL",0,0,"Step 868, loss: 1.8377865552902222, step time: 28.725147247314453ms\r\n",,terminal_output +21864,14589637,"TERMINAL",0,0,"Step 869, loss: 1.8713520765304565, step time: 382.2305202484131ms\r\n",,terminal_output +21865,14589748,"TERMINAL",0,0,"Step 870, loss: 1.6573498249053955, step time: 25.412559509277344ms\r\n",,terminal_output +21866,14589854,"TERMINAL",0,0,"Step 871, loss: 1.8690860271453857, step time: 23.78535270690918ms\r\n",,terminal_output +21867,14589910,"TERMINAL",0,0,"Step 872, loss: 1.572749137878418, step time: 28.226613998413086ms\r\n",,terminal_output +21868,14590018,"TERMINAL",0,0,"Step 873, loss: 1.565833330154419, step time: 22.309303283691406ms\r\n",,terminal_output +21869,14590084,"TERMINAL",0,0,"Step 874, loss: 1.769325613975525, step time: 21.406173706054688ms\r\n",,terminal_output +21870,14590195,"TERMINAL",0,0,"Step 875, loss: 1.7285898923873901, step time: 24.54853057861328ms\r\n",,terminal_output +21871,14590302,"TERMINAL",0,0,"Step 876, loss: 1.8243556022644043, step time: 23.769617080688477ms\r\n",,terminal_output +21872,14590354,"TERMINAL",0,0,"Step 877, loss: 1.5372482538223267, step time: 23.621320724487305ms\r\n",,terminal_output +21873,14590462,"TERMINAL",0,0,"Step 878, loss: 1.5422391891479492, step time: 24.063825607299805ms\r\n",,terminal_output +21874,14590525,"TERMINAL",0,0,"Step 879, loss: 1.6698799133300781, step time: 22.782325744628906ms\r\n",,terminal_output +21875,14590629,"TERMINAL",0,0,"Step 880, loss: 1.881911277770996, step time: 23.2236385345459ms\r\n",,terminal_output +21876,14590736,"TERMINAL",0,0,"Step 881, loss: 1.6279555559158325, step time: 23.4224796295166ms\r\n",,terminal_output +21877,14590797,"TERMINAL",0,0,"Step 882, loss: 1.4164834022521973, step time: 23.488283157348633ms\r\n",,terminal_output +21878,14590905,"TERMINAL",0,0,"Step 883, loss: 1.6657967567443848, step time: 21.176576614379883ms\r\n",,terminal_output +21879,14590966,"TERMINAL",0,0,"Step 884, loss: 1.6414532661437988, step time: 21.488666534423828ms\r\n",,terminal_output +21880,14591074,"TERMINAL",0,0,"Step 885, loss: 1.540024757385254, step time: 22.556066513061523ms\r\n",,terminal_output +21881,14591186,"TERMINAL",0,0,"Step 886, loss: 1.646831750869751, step time: 21.0874080657959ms\r\n",,terminal_output +21882,14591237,"TERMINAL",0,0,"Step 887, loss: 1.5923454761505127, step time: 21.0878849029541ms\r\n",,terminal_output +21883,14591344,"TERMINAL",0,0,"Step 888, loss: 1.488388180732727, step time: 21.38543128967285ms\r\n",,terminal_output +21884,14591409,"TERMINAL",0,0,"Step 889, loss: 2.471071481704712, step time: 21.837949752807617ms\r\n",,terminal_output +21885,14591521,"TERMINAL",0,0,"Step 890, loss: 1.8038533926010132, step time: 21.80171012878418ms\r\n",,terminal_output +21886,14591597,"TERMINAL",0,0,"Step 891, loss: 3.1717445850372314, step time: 20.64967155456543ms\r\n",,terminal_output +21887,14591707,"TERMINAL",0,0,"Step 892, loss: 1.451690673828125, step time: 20.29561996459961ms\r\n",,terminal_output +21888,14591772,"TERMINAL",0,0,"Step 893, loss: 1.4280787706375122, step time: 22.327423095703125ms\r\n",,terminal_output +21889,14591882,"TERMINAL",0,0,"Step 894, loss: 1.5166950225830078, step time: 22.330522537231445ms\r\n",,terminal_output +21890,14591944,"TERMINAL",0,0,"Step 895, loss: 2.239125967025757, step time: 20.97940444946289ms\r\n",,terminal_output +21891,14592053,"TERMINAL",0,0,"Step 896, loss: 1.5660490989685059, step time: 22.38178253173828ms\r\n",,terminal_output +21892,14592111,"TERMINAL",0,0,"Step 897, loss: 2.0644025802612305, step time: 28.689146041870117ms\r\n",,terminal_output +21893,14592220,"TERMINAL",0,0,"Step 898, loss: 1.4782843589782715, step time: 28.17559242248535ms\r\n",,terminal_output +21894,14592332,"TERMINAL",0,0,"Step 899, loss: 1.6619906425476074, step time: 24.05834197998047ms\r\n",,terminal_output +21895,14592386,"TERMINAL",0,0,"Step 900, loss: 1.4942547082901, step time: 21.490812301635742ms\r\n",,terminal_output +21896,14592492,"TERMINAL",0,0,"Step 901, loss: 1.585818886756897, step time: 25.229454040527344ms\r\n",,terminal_output +21897,14592558,"TERMINAL",0,0,"Step 902, loss: 1.6515259742736816, step time: 24.0323543548584ms\r\n",,terminal_output +21898,14592649,"TERMINAL",0,0,"Step 903, loss: 1.4373817443847656, step time: 27.8165340423584ms\r\n",,terminal_output +21899,14592757,"TERMINAL",0,0,"Step 904, loss: 1.952797293663025, step time: 29.870033264160156ms\r\n",,terminal_output +21900,14592866,"TERMINAL",0,0,"Step 905, loss: 1.4394340515136719, step time: 27.74524688720703ms\r\n",,terminal_output +21901,14592919,"TERMINAL",0,0,"Step 906, loss: 1.4898964166641235, step time: 25.118112564086914ms\r\n",,terminal_output +21902,14593015,"TERMINAL",0,0,"Step 907, loss: 1.5626362562179565, step time: 26.96394920349121ms\r\n",,terminal_output +21903,14593129,"TERMINAL",0,0,"Step 908, loss: 1.911927342414856, step time: 25.382518768310547ms\r\n",,terminal_output +21904,14593189,"TERMINAL",0,0,"Step 909, loss: 2.1285765171051025, step time: 23.303508758544922ms\r\n",,terminal_output +21905,14593301,"TERMINAL",0,0,"Step 910, loss: 1.448134422302246, step time: 26.070117950439453ms\r\n",,terminal_output +21906,14593407,"TERMINAL",0,0,"Step 911, loss: 1.4465932846069336, step time: 23.12636375427246ms\r\n",,terminal_output +21907,14593468,"TERMINAL",0,0,"Step 912, loss: 1.4535781145095825, step time: 23.070812225341797ms\r\n",,terminal_output +21908,14593581,"TERMINAL",0,0,"Step 913, loss: 1.561029076576233, step time: 23.615598678588867ms\r\n",,terminal_output +21909,14593638,"TERMINAL",0,0,"Step 914, loss: 1.6381378173828125, step time: 24.278879165649414ms\r\n",,terminal_output +21910,14593745,"TERMINAL",0,0,"Step 915, loss: 1.5103644132614136, step time: 22.26400375366211ms\r\n",,terminal_output +21911,14593811,"TERMINAL",0,0,"Step 916, loss: 1.7298305034637451, step time: 22.392749786376953ms\r\n",,terminal_output +21912,14593925,"TERMINAL",0,0,"Step 917, loss: 2.0900630950927734, step time: 22.56321907043457ms\r\n",,terminal_output +21913,14594031,"TERMINAL",0,0,"Step 918, loss: 1.7905588150024414, step time: 26.090383529663086ms\r\n",,terminal_output +21914,14594083,"TERMINAL",0,0,"Step 919, loss: 2.718235969543457, step time: 23.64516258239746ms\r\n",,terminal_output +21915,14594191,"TERMINAL",0,0,"Step 920, loss: 1.4599988460540771, step time: 23.27561378479004ms\r\n",,terminal_output +21916,14594258,"TERMINAL",0,0,"Step 921, loss: 1.9477390050888062, step time: 21.346092224121094ms\r\n",,terminal_output +21917,14594367,"TERMINAL",0,0,"Step 922, loss: 1.8212504386901855, step time: 22.395610809326172ms\r\n",,terminal_output +21918,14594432,"TERMINAL",0,0,"Step 923, loss: 1.6248655319213867, step time: 21.609067916870117ms\r\n",,terminal_output +21919,14594545,"TERMINAL",0,0,"Step 924, loss: 1.6097851991653442, step time: 21.843433380126953ms\r\n",,terminal_output +21920,14594607,"TERMINAL",0,0,"Step 925, loss: 1.5779714584350586, step time: 20.849227905273438ms\r\n",,terminal_output +21921,14594718,"TERMINAL",0,0,"Step 926, loss: 1.6799358129501343, step time: 23.078203201293945ms\r\n",,terminal_output +21922,14594784,"TERMINAL",0,0,"Step 927, loss: 1.6604325771331787, step time: 21.23570442199707ms\r\n",,terminal_output +21923,14594909,"TERMINAL",0,0,"Step 928, loss: 1.472287654876709, step time: 20.804882049560547ms\r\n",,terminal_output +21924,14594961,"TERMINAL",0,0,"Step 929, loss: 1.441198468208313, step time: 20.80059051513672ms\r\n",,terminal_output +21925,14595069,"TERMINAL",0,0,"Step 930, loss: 1.8533260822296143, step time: 22.169113159179688ms\r\n",,terminal_output +21926,14595134,"TERMINAL",0,0,"Step 931, loss: 2.0670413970947266, step time: 21.25406265258789ms\r\n",,terminal_output +21927,14595244,"TERMINAL",0,0,"Step 932, loss: 1.4946064949035645, step time: 21.767854690551758ms\r\n",,terminal_output +21928,14595312,"TERMINAL",0,0,"Step 933, loss: 3.1222362518310547, step time: 21.533966064453125ms\r\n",,terminal_output +21929,14595432,"TERMINAL",0,0,"Step 934, loss: 1.5715290307998657, step time: 20.68638801574707ms\r\n",,terminal_output +21930,14595500,"TERMINAL",0,0,"Step 935, loss: 1.7191025018692017, step time: 28.652191162109375ms\r\n",,terminal_output +21931,14595608,"TERMINAL",0,0,"Step 936, loss: 1.8424623012542725, step time: 28.420686721801758ms\r\n",,terminal_output +21932,14595713,"TERMINAL",0,0,"Step 937, loss: 1.5191712379455566, step time: 23.1473445892334ms\r\n",,terminal_output +21933,14595773,"TERMINAL",0,0,"Step 938, loss: 1.480445384979248, step time: 21.416902542114258ms\r\n",,terminal_output +21934,14595897,"TERMINAL",0,0,"Step 939, loss: 1.4931966066360474, step time: 26.546478271484375ms\r\n",,terminal_output +21935,14595968,"TERMINAL",0,0,"Step 940, loss: 1.465695858001709, step time: 23.4682559967041ms\r\n",,terminal_output +21936,14596063,"TERMINAL",0,0,"Step 941, loss: 2.3391997814178467, step time: 27.85325050354004ms\r\n",,terminal_output +21937,14596129,"TERMINAL",0,0,"Step 942, loss: 1.6186834573745728, step time: 30.06434440612793ms\r\n",,terminal_output +21938,14596235,"TERMINAL",0,0,"Step 943, loss: 1.5381741523742676, step time: 27.508020401000977ms\r\n",,terminal_output +21939,14596299,"TERMINAL",0,0,"Step 944, loss: 1.6513631343841553, step time: 25.08997917175293ms\r\n",,terminal_output +21940,14596404,"TERMINAL",0,0,"Step 945, loss: 2.07820987701416, step time: 27.653932571411133ms\r\n",,terminal_output +21941,14596512,"TERMINAL",0,0,"Step 946, loss: 1.9788775444030762, step time: 25.59661865234375ms\r\n",,terminal_output +21942,14596572,"TERMINAL",0,0,"Step 947, loss: 1.8005878925323486, step time: 25.355100631713867ms\r\n",,terminal_output +21943,14596678,"TERMINAL",0,0,"Step 948, loss: 1.4771836996078491, step time: 27.032136917114258ms\r\n",,terminal_output +21944,14596787,"TERMINAL",0,0,"Step 949, loss: 1.4741876125335693, step time: 23.43297004699707ms\r\n",,terminal_output +21945,14596850,"TERMINAL",0,0,"Step 950, loss: 1.7063194513320923, step time: 26.63588523864746ms\r\n",,terminal_output +21946,14596958,"TERMINAL",0,0,"Step 951, loss: 2.1675784587860107, step time: 23.99921417236328ms\r\n",,terminal_output +21947,14597018,"TERMINAL",0,0,"Step 952, loss: 1.6071984767913818, step time: 24.013757705688477ms\r\n",,terminal_output +21948,14597126,"TERMINAL",0,0,"Step 953, loss: 1.970880150794983, step time: 24.3837833404541ms\r\n",,terminal_output +21949,14597239,"TERMINAL",0,0,"Step 954, loss: 1.6261237859725952, step time: 23.685455322265625ms\r\n",,terminal_output +21950,14597292,"TERMINAL",0,0,"Step 955, loss: 1.5107356309890747, step time: 24.461746215820312ms\r\n",,terminal_output +21951,14597399,"TERMINAL",0,0,"Step 956, loss: 2.1077980995178223, step time: 23.920774459838867ms\r\n",,terminal_output +21952,14597469,"TERMINAL",0,0,"Step 957, loss: 1.673296332359314, step time: 23.27585220336914ms\r\n",,terminal_output +21953,14597627,"TERMINAL",0,0,"Step 958, loss: 1.431923747062683, step time: 21.979570388793945ms\r\n",,terminal_output +21954,14597639,"TERMINAL",0,0,"Step 959, loss: 1.4791268110275269, step time: 22.142648696899414ms\r\n",,terminal_output +21955,14597744,"TERMINAL",0,0,"Step 960, loss: 1.7885745763778687, step time: 22.20439910888672ms\r\n",,terminal_output +21956,14597854,"TERMINAL",0,0,"Step 961, loss: 1.5092066526412964, step time: 21.616458892822266ms\r\n",,terminal_output +21957,14597906,"TERMINAL",0,0,"Step 962, loss: 1.5800219774246216, step time: 22.44424819946289ms\r\n",,terminal_output +21958,14598011,"TERMINAL",0,0,"Step 963, loss: 1.5975391864776611, step time: 20.570993423461914ms\r\n",,terminal_output +21959,14598124,"TERMINAL",0,0,"Step 964, loss: 1.575425148010254, step time: 22.579669952392578ms\r\n",,terminal_output +21960,14598173,"TERMINAL",0,0,"Step 965, loss: 1.4216562509536743, step time: 21.840572357177734ms\r\n",,terminal_output +21961,14598279,"TERMINAL",0,0,"Step 966, loss: 2.046335220336914, step time: 21.7740535736084ms\r\n",,terminal_output +21962,14598382,"TERMINAL",0,0,"Step 967, loss: 1.7115981578826904, step time: 22.38941192626953ms\r\n",,terminal_output +21963,14598433,"TERMINAL",0,0,"Step 968, loss: 1.424301266670227, step time: 21.74687385559082ms\r\n",,terminal_output +21964,14598538,"TERMINAL",0,0,"Step 969, loss: 2.0446808338165283, step time: 21.448612213134766ms\r\n",,terminal_output +21965,14598623,"TERMINAL",0,0,"Step 970, loss: 1.8473881483078003, step time: 21.58975601196289ms\r\n",,terminal_output +21966,14598728,"TERMINAL",0,0,"Step 971, loss: 1.592020034790039, step time: 22.115468978881836ms\r\n",,terminal_output +21967,14598791,"TERMINAL",0,0,"Step 972, loss: 1.6142109632492065, step time: 29.001951217651367ms\r\n",,terminal_output +21968,14598898,"TERMINAL",0,0,"Step 973, loss: 1.3752890825271606, step time: 27.64105796813965ms\r\n",,terminal_output +21969,14599009,"TERMINAL",0,0,"Step 974, loss: 1.3985425233840942, step time: 24.322509765625ms\r\n",,terminal_output +21970,14599060,"TERMINAL",0,0,"Step 975, loss: 1.5008363723754883, step time: 21.23117446899414ms\r\n",,terminal_output +21971,14599165,"TERMINAL",0,0,"Step 976, loss: 1.924322485923767, step time: 26.643753051757812ms\r\n",,terminal_output +21972,14599231,"TERMINAL",0,0,"Step 977, loss: 1.5745172500610352, step time: 23.556232452392578ms\r\n",,terminal_output +21973,14599338,"TERMINAL",0,0,"Step 978, loss: 1.3970261812210083, step time: 29.120445251464844ms\r\n",,terminal_output +21974,14599445,"TERMINAL",0,0,"Step 979, loss: 2.001965045928955, step time: 29.64186668395996ms\r\n",,terminal_output +21975,14599508,"TERMINAL",0,0,"Step 980, loss: 1.7343422174453735, step time: 26.856660842895508ms\r\n",,terminal_output +21976,14599615,"TERMINAL",0,0,"Step 981, loss: 1.530301570892334, step time: 24.32394027709961ms\r\n",,terminal_output +21977,14599720,"TERMINAL",0,0,"Step 982, loss: 1.5528926849365234, step time: 27.173995971679688ms\r\n",,terminal_output +21978,14599782,"TERMINAL",0,0,"Step 983, loss: 1.5072354078292847, step time: 24.07383918762207ms\r\n",,terminal_output +21979,14599888,"TERMINAL",0,0,"Step 984, loss: 1.72908616065979, step time: 26.82042121887207ms\r\n",,terminal_output +21980,14599995,"TERMINAL",0,0,"Step 985, loss: 1.3728950023651123, step time: 24.445533752441406ms\r\n",,terminal_output +21981,14600046,"TERMINAL",0,0,"Step 986, loss: 1.6157188415527344, step time: 22.56488800048828ms\r\n",,terminal_output +21982,14600151,"TERMINAL",0,0,"Step 987, loss: 2.0376312732696533, step time: 23.15664291381836ms\r\n",,terminal_output +21983,14600257,"TERMINAL",0,0,"Step 988, loss: 1.8644883632659912, step time: 22.5067138671875ms\r\n",,terminal_output +21984,14600309,"TERMINAL",0,0,"Step 989, loss: 1.3828831911087036, step time: 21.760225296020508ms\r\n",,terminal_output +21985,14600414,"TERMINAL",0,0,"Step 990, loss: 2.5997419357299805, step time: 22.733688354492188ms\r\n",,terminal_output +21986,14600521,"TERMINAL",0,0,"Step 991, loss: 1.4097166061401367, step time: 23.96392822265625ms\r\n",,terminal_output +21987,14600595,"TERMINAL",0,0,"Step 992, loss: 1.720334768295288, step time: 24.677515029907227ms\r\n",,terminal_output +21988,14600702,"TERMINAL",0,0,"Step 993, loss: 1.3755149841308594, step time: 23.65875244140625ms\r\n",,terminal_output +21989,14600763,"TERMINAL",0,0,"Step 994, loss: 1.6134023666381836, step time: 23.409128189086914ms\r\n",,terminal_output +21990,14600872,"TERMINAL",0,0,"Step 995, loss: 1.5449988842010498, step time: 22.564411163330078ms\r\n",,terminal_output +21991,14600936,"TERMINAL",0,0,"Step 996, loss: 1.3805607557296753, step time: 21.5761661529541ms\r\n",,terminal_output +21992,14601051,"TERMINAL",0,0,"Step 997, loss: 1.313269853591919, step time: 22.386789321899414ms\r\n",,terminal_output +21993,14601106,"TERMINAL",0,0,"Step 998, loss: 1.5664719343185425, step time: 22.85599708557129ms\r\n",,terminal_output +21994,14601202,"TERMINAL",0,0,"Step 999, loss: 1.4943255186080933, step time: 20.902633666992188ms\r\n",,terminal_output +21995,14604044,"TERMINAL",0,0,"Step 1000, loss: 1.5871800184249878, step time: 25.13885498046875ms\r\n",,terminal_output +21996,14604152,"TERMINAL",0,0,"Step 1001, loss: 1.4035148620605469, step time: 29.168128967285156ms\r\n",,terminal_output +21997,14604267,"TERMINAL",0,0,"Step 1002, loss: 1.3455983400344849, step time: 24.548053741455078ms\r\n",,terminal_output +21998,14604315,"TERMINAL",0,0,"Step 1003, loss: 1.2890344858169556, step time: 24.020671844482422ms\r\n",,terminal_output +21999,14604420,"TERMINAL",0,0,"Step 1004, loss: 1.7562764883041382, step time: 22.750139236450195ms\r\n",,terminal_output +22000,14604529,"TERMINAL",0,0,"Step 1005, loss: 1.6214540004730225, step time: 22.454023361206055ms\r\n",,terminal_output +22001,14604603,"TERMINAL",0,0,"Step 1006, loss: 1.5144726037979126, step time: 23.564815521240234ms\r\n",,terminal_output +22002,14604666,"TERMINAL",0,0,"Step 1007, loss: 1.4650273323059082, step time: 23.1168270111084ms\r\n",,terminal_output +22003,14604786,"TERMINAL",0,0,"Step 1008, loss: 1.4000153541564941, step time: 22.185325622558594ms\r\n",,terminal_output +22004,14604854,"TERMINAL",0,0,"Step 1009, loss: 1.3660616874694824, step time: 23.439645767211914ms\r\n",,terminal_output +22005,14604965,"TERMINAL",0,0,"Step 1010, loss: 1.3279510736465454, step time: 23.63729476928711ms\r\n",,terminal_output +22006,14605025,"TERMINAL",0,0,"Step 1011, loss: 1.6090415716171265, step time: 22.184133529663086ms\r\n",,terminal_output +22007,14605133,"TERMINAL",0,0,"Step 1012, loss: 1.3281214237213135, step time: 21.827220916748047ms\r\n",,terminal_output +22008,14605203,"TERMINAL",0,0,"Step 1013, loss: 1.2339893579483032, step time: 22.281169891357422ms\r\n",,terminal_output +22009,14605312,"TERMINAL",0,0,"Step 1014, loss: 1.4592334032058716, step time: 22.021055221557617ms\r\n",,terminal_output +22010,14605421,"TERMINAL",0,0,"Step 1015, loss: 1.438565969467163, step time: 28.191566467285156ms\r\n",,terminal_output +22011,14605472,"TERMINAL",0,0,"Step 1016, loss: 1.3755526542663574, step time: 32.02080726623535ms\r\n",,terminal_output +22012,14605590,"TERMINAL",0,0,"Step 1017, loss: 1.4543778896331787, step time: 25.402545928955078ms\r\n",,terminal_output +22013,14605652,"TERMINAL",0,0,"Step 1018, loss: 1.640671968460083, step time: 22.19700813293457ms\r\n",,terminal_output +22014,14605763,"TERMINAL",0,0,"Step 1019, loss: 1.797390103340149, step time: 27.452468872070312ms\r\n",,terminal_output +22015,14605881,"TERMINAL",0,0,"Step 1020, loss: 2.37400221824646, step time: 25.44713020324707ms\r\n",,terminal_output +22016,14605933,"TERMINAL",0,0,"Step 1021, loss: 1.2838653326034546, step time: 29.12139892578125ms\r\n",,terminal_output +22017,14606044,"TERMINAL",0,0,"Step 1022, loss: 1.7927197217941284, step time: 30.993938446044922ms\r\n",,terminal_output +22018,14606111,"TERMINAL",0,0,"Step 1023, loss: 2.0943922996520996, step time: 30.679941177368164ms\r\n",,terminal_output +22019,14606219,"TERMINAL",0,0,"Step 1024, loss: 1.485493779182434, step time: 28.461694717407227ms\r\n",,terminal_output +22020,14606326,"TERMINAL",0,0,"Step 1025, loss: 1.3713936805725098, step time: 26.383638381958008ms\r\n",,terminal_output +22021,14606386,"TERMINAL",0,0,"Step 1026, loss: 1.4256412982940674, step time: 28.4879207611084ms\r\n",,terminal_output +22022,14606499,"TERMINAL",0,0,"Step 1027, loss: 1.329973816871643, step time: 25.47430992126465ms\r\n",,terminal_output +22023,14606571,"TERMINAL",0,0,"Step 1028, loss: 1.9062196016311646, step time: 23.651599884033203ms\r\n",,terminal_output +22024,14606679,"TERMINAL",0,0,"Step 1029, loss: 1.4536737203598022, step time: 29.195785522460938ms\r\n",,terminal_output +22025,14606788,"TERMINAL",0,0,"Step 1030, loss: 1.93605375289917, step time: 23.843050003051758ms\r\n",,terminal_output +22026,14606848,"TERMINAL",0,0,"Step 1031, loss: 1.273969054222107, step time: 21.303176879882812ms\r\n",,terminal_output +22027,14606965,"TERMINAL",0,0,"Step 1032, loss: 1.314873456954956, step time: 20.580053329467773ms\r\n",,terminal_output +22028,14607017,"TERMINAL",0,0,"Step 1033, loss: 1.535938024520874, step time: 19.698619842529297ms\r\n",,terminal_output +22029,14607129,"TERMINAL",0,0,"Step 1034, loss: 1.4347620010375977, step time: 20.222187042236328ms\r\n",,terminal_output +22030,14607191,"TERMINAL",0,0,"Step 1035, loss: 2.425485372543335, step time: 19.58632469177246ms\r\n",,terminal_output +22031,14607301,"TERMINAL",0,0,"Step 1036, loss: 1.3325629234313965, step time: 19.505977630615234ms\r\n",,terminal_output +22032,14607363,"TERMINAL",0,0,"Step 1037, loss: 1.924299955368042, step time: 19.905567169189453ms\r\n",,terminal_output +22033,14607471,"TERMINAL",0,0,"Step 1038, loss: 1.247762680053711, step time: 19.351720809936523ms\r\n",,terminal_output +22034,14607546,"TERMINAL",0,0,"Step 1039, loss: 1.3378791809082031, step time: 19.155263900756836ms\r\n",,terminal_output +22035,14607651,"TERMINAL",0,0,"Step 1040, loss: 1.378947138786316, step time: 20.28512954711914ms\r\n",,terminal_output +22036,14607716,"TERMINAL",0,0,"Step 1041, loss: 1.3298263549804688, step time: 19.197463989257812ms\r\n",,terminal_output +22037,14607820,"TERMINAL",0,0,"Step 1042, loss: 1.4399205446243286, step time: 19.50979232788086ms\r\n",,terminal_output +22038,14607928,"TERMINAL",0,0,"Step 1043, loss: 1.5418426990509033, step time: 19.991636276245117ms\r\n",,terminal_output +22039,14607979,"TERMINAL",0,0,"Step 1044, loss: 1.4844766855239868, step time: 19.621610641479492ms\r\n",,terminal_output +22040,14608086,"TERMINAL",0,0,"Step 1045, loss: 1.421523928642273, step time: 21.25239372253418ms\r\n",,terminal_output +22041,14608155,"TERMINAL",0,0,"^C",,terminal_output +22042,14608212,"TERMINAL",0,0,"Traceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py"", line 241, in \r\n print(f""Step {step}, loss: {loss}, step time: {elapsed_time}ms"")\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/array.py"", line 341, in __format__\r\n return format(self._value[()], format_spec)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/profiler.py"", line 354, in wrapper\r\n return func(*args, **kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/array.py"", line 641, in _value\r\n npy_value, did_copy = self._single_device_array_to_np_array_did_copy()\r\nKeyboardInterrupt\r\n",,terminal_output +22043,14608351,"TERMINAL",0,0,"^CException ignored in atexit callback: .teardown_atexit at 0x14cc580ee830>\r\nTraceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/wandb/sdk/lib/service_connection.py"", line 94, in teardown_atexit\r\n conn.teardown(hooks.exit_code)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/wandb/sdk/lib/service_connection.py"", line 226, in teardown\r\n self._router.join()\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/wandb/sdk/interface/router.py"", line 75, in join\r\n self._thread.join()\r\n File ""/home/hk-project-p0023960/tum_cte0515/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/threading.py"", line 1096, in join\r\n self._wait_for_tstate_lock()\r\n File ""/home/hk-project-p0023960/tum_cte0515/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/threading.py"", line 1116, in _wait_for_tstate_lock\r\n if lock.acquire(block, timeout):\r\nKeyboardInterrupt: \r\n",,terminal_output +22044,14608545,"TERMINAL",0,0,"^CException ignored in: .remove at 0x14cd308e2710>\r\nTraceback (most recent call last):\r\n File ""/home/hk-project-p0023960/tum_cte0515/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/weakref.py"", line 370, in remove\r\n def remove(k, selfref=ref(self)):\r\nKeyboardInterrupt: \r\n",,terminal_output +22045,14608715,"TERMINAL",0,0,"^CException ignored in: .remove at 0x14cd308e2710>\r\nTraceback (most recent call last):\r\n File ""/home/hk-project-p0023960/tum_cte0515/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/weakref.py"", line 370, in remove\r\n def remove(k, selfref=ref(self)):\r\nKeyboardInterrupt: \r\n",,terminal_output +22046,14609687,"TERMINAL",0,0,"^C",,terminal_output +22047,14610068,"TERMINAL",0,0,"\r\n]0;tum_cte0515@hkn0531:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0531 jafar]$ ",,terminal_output +22048,14610998,"train_dynamics.py",0,0,"",python,tab +22049,14611524,"train_dynamics.py",2960,0,"",python,selection_mouse +22050,14613977,"train_dynamics.py",9522,31,"",python,content +22051,14613978,"train_dynamics.py",9440,82,"",python,content +22052,14613979,"train_dynamics.py",9385,55,"",python,content +22053,14613980,"train_dynamics.py",8936,22,"",python,content +22054,14613980,"train_dynamics.py",8861,75,"",python,content +22055,14613984,"train_dynamics.py",8800,61,"",python,content +22056,14613984,"train_dynamics.py",8712,88,"",python,content +22057,14622215,"train_dynamics.py",8712,0," debug_comparison_seq = jnp.concatenate((gt_seq, debug_seq), axis=1)\n",python,content +22058,14622216,"train_dynamics.py",8800,0," debug_comparison_seq = einops.rearrange(\n",python,content +22059,14622217,"train_dynamics.py",8861,0," debug_comparison_seq * 255, ""t h w c -> h (t w) c""\n",python,content +22060,14622220,"train_dynamics.py",8936,0," )\n",python,content +22061,14622221,"train_dynamics.py",9385,0," true_vs_debug=wandb.Image(\n",python,content +22062,14622222,"train_dynamics.py",9440,0," np.asarray(debug_comparison_seq.astype(np.uint8))\n",python,content +22063,14622222,"train_dynamics.py",9522,0," ),\n",python,content +22064,14626708,"genie.py",0,0,"",python,tab +22065,14629170,"genie.py",3035,0,"",python,selection_mouse +22066,14629765,"genie.py",2981,0,"",python,selection_mouse +22067,14629914,"genie.py",2974,9,"tokenizer",python,selection_mouse +22068,14630557,"genie.py",3025,0,"",python,selection_mouse +22069,14631223,"genie.py",3140,0,"",python,selection_mouse +22070,14631820,"genie.py",3133,0,"",python,selection_mouse +22071,14631967,"genie.py",3119,17,"tokenizer_outputs",python,selection_mouse +22072,14632633,"genie.py",3012,0,"",python,selection_mouse +22073,14632795,"genie.py",3004,11,"mle_indices",python,selection_mouse +22074,14633412,"genie.py",3129,0,"",python,selection_mouse +22075,14633597,"genie.py",3119,17,"tokenizer_outputs",python,selection_mouse +22076,14633814,"genie.py",3119,18,"tokenizer_outputs[",python,selection_mouse +22077,14633840,"genie.py",3119,26,"tokenizer_outputs[""indices",python,selection_mouse +22078,14634012,"genie.py",3119,27,"tokenizer_outputs[""indices""",python,selection_mouse +22079,14634245,"genie.py",3119,28,"tokenizer_outputs[""indices""]",python,selection_mouse +22080,14634927,"genie.py",3147,0,"",python,selection_mouse +22081,14635431,"genie.py",3146,0,"",python,selection_mouse +22082,14635593,"genie.py",3145,3,"""],",python,selection_mouse +22083,14635779,"genie.py",3145,3,"""],",python,selection_mouse +22084,14635801,"genie.py",3138,10,"indices""],",python,selection_mouse +22085,14635857,"genie.py",3137,11,"""indices""],",python,selection_mouse +22086,14635865,"genie.py",3082,66,"= self.tokenizer.decode(\n tokenizer_outputs[""indices""],",python,selection_mouse +22087,14635881,"genie.py",3080,68,"] = self.tokenizer.decode(\n tokenizer_outputs[""indices""],",python,selection_mouse +22088,14635937,"genie.py",3079,69,"""] = self.tokenizer.decode(\n tokenizer_outputs[""indices""],",python,selection_mouse +22089,14635937,"genie.py",3119,29,"tokenizer_outputs[""indices""],",python,selection_mouse +22090,14636627,"genie.py",3126,0,"",python,selection_mouse +22091,14636695,"genie.py",3119,17,"tokenizer_outputs",python,selection_mouse +22092,14636981,"genie.py",3119,18,"tokenizer_outputs[",python,selection_mouse +22093,14636981,"genie.py",3119,26,"tokenizer_outputs[""indices",python,selection_mouse +22094,14637296,"genie.py",3119,27,"tokenizer_outputs[""indices""",python,selection_mouse +22095,14637360,"genie.py",3119,28,"tokenizer_outputs[""indices""]",python,selection_mouse +22096,14638025,"genie.py",3147,0,"",python,selection_mouse +22097,14638452,"genie.py",3146,1,"]",python,selection_mouse +22098,14638479,"genie.py",3144,3,"s""]",python,selection_mouse +22099,14638493,"genie.py",3142,5,"ces""]",python,selection_mouse +22100,14638510,"genie.py",3140,7,"dices""]",python,selection_mouse +22101,14638523,"genie.py",3138,9,"indices""]",python,selection_mouse +22102,14638538,"genie.py",3136,11,"[""indices""]",python,selection_mouse +22103,14638552,"genie.py",3134,13,"ts[""indices""]",python,selection_mouse +22104,14638576,"genie.py",3133,14,"uts[""indices""]",python,selection_mouse +22105,14638589,"genie.py",3132,15,"puts[""indices""]",python,selection_mouse +22106,14638602,"genie.py",3131,16,"tputs[""indices""]",python,selection_mouse +22107,14638660,"genie.py",3130,17,"utputs[""indices""]",python,selection_mouse +22108,14638661,"genie.py",3129,18,"outputs[""indices""]",python,selection_mouse +22109,14638661,"genie.py",3128,19,"_outputs[""indices""]",python,selection_mouse +22110,14638784,"genie.py",3127,20,"r_outputs[""indices""]",python,selection_mouse +22111,14638785,"genie.py",3126,21,"er_outputs[""indices""]",python,selection_mouse +22112,14638786,"genie.py",3125,22,"zer_outputs[""indices""]",python,selection_mouse +22113,14639119,"genie.py",3125,0,"",python,selection_mouse +22114,14639120,"genie.py",3119,17,"tokenizer_outputs",python,selection_mouse +22115,14639399,"genie.py",3119,19,"tokenizer_outputs[""",python,selection_mouse +22116,14639427,"genie.py",3119,26,"tokenizer_outputs[""indices",python,selection_mouse +22117,14639804,"genie.py",3119,27,"tokenizer_outputs[""indices""",python,selection_mouse +22118,14640118,"genie.py",3119,28,"tokenizer_outputs[""indices""]",python,selection_mouse +22119,14640709,"genie.py",3147,0,"",python,selection_mouse +22120,14641476,"genie.py",3185,0,"",python,selection_mouse +22121,14641477,"genie.py",3184,0,"",python,selection_command +22122,14642204,"genie.py",3133,0,"",python,selection_mouse +22123,14642354,"genie.py",3119,17,"tokenizer_outputs",python,selection_mouse +22124,14648424,"train_dynamics.py",0,0,"",python,tab +22125,14649876,"genie.py",0,0,"",python,tab +22126,14651858,"train_dynamics.py",0,0,"",python,tab +22127,14655327,"train_dynamics.py",2504,0,"",python,selection_mouse +22128,14656200,"train_dynamics.py",2242,0,"",python,selection_mouse +22129,14656370,"train_dynamics.py",2242,5,"optax",python,selection_mouse +22130,14656887,"train_dynamics.py",2216,0,"",python,selection_mouse +22131,14657050,"train_dynamics.py",2212,7,"outputs",python,selection_mouse +22132,14671375,"train_dynamics.py",2244,0,"",python,selection_mouse +22133,14671926,"train_dynamics.py",2217,0,"",python,selection_mouse +22134,14673134,"train_dynamics.py",2210,0,"",python,selection_mouse +22135,14673667,"train_dynamics.py",2198,0,"",python,selection_mouse +22136,14674179,"train_dynamics.py",2208,0,"",python,selection_mouse +22137,14674800,"train_dynamics.py",2217,0,"",python,selection_mouse +22138,14675345,"train_dynamics.py",2213,0,"",python,selection_mouse +22139,14675513,"train_dynamics.py",2212,7,"outputs",python,selection_mouse +22140,14676153,"train_dynamics.py",2221,0,"",python,selection_mouse +22141,14676704,"train_dynamics.py",2227,0,"",python,selection_mouse +22142,14676917,"train_dynamics.py",2225,2,"""]",python,selection_mouse +22143,14676930,"train_dynamics.py",2223,4,"sk""]",python,selection_mouse +22144,14676962,"train_dynamics.py",2221,6,"mask""]",python,selection_mouse +22145,14676979,"train_dynamics.py",2218,9,"s[""mask""]",python,selection_mouse +22146,14676993,"train_dynamics.py",2217,10,"ts[""mask""]",python,selection_mouse +22147,14677008,"train_dynamics.py",2215,12,"puts[""mask""]",python,selection_mouse +22148,14677022,"train_dynamics.py",2214,13,"tputs[""mask""]",python,selection_mouse +22149,14677081,"train_dynamics.py",2213,14,"utputs[""mask""]",python,selection_mouse +22150,14677606,"train_dynamics.py",2213,0,"",python,selection_mouse +22151,14677899,"train_dynamics.py",2227,0,"",python,selection_mouse +22152,14805309,"scripts_horeka/train_tokenizer.sh",0,0,"#!/usr/bin/env bash\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\ntf_records_dir=$ws_dir/knoms_tfrecords_500_shards\nws_dir='/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/'\n\njob_name=""debug""\nslurm_job_id=""0000""\n\nCHECKPOINT_DIR=$ws_dir/checkpoints/$job_name_$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\nenv | grep SLURM\n\nsrun python train_tokenizer.py \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=16 \\n --min_lr=4.24e-4 \\n --max_lr=4.24e-4 \\n --log_image_interval=100 \\n --log \\n --name=test-wandb-tags-$slurm_job_id \\n --tags test tokenizer debug \\n --entity instant-uv \\n --project jafar \\n --data_dir $tf_records_dir",shellscript,tab +22153,14807245,"scripts_horeka/train_dynamics.sh",0,0,"",shellscript,tab +22154,14808358,"scripts_horeka/train_dynamics.sh",1645,0,"",shellscript,selection_mouse +22155,14808517,"scripts_horeka/train_dynamics.sh",1643,2,".0",shellscript,selection_mouse +22156,14808532,"scripts_horeka/train_dynamics.sh",1638,7,"mit=0.0",shellscript,selection_mouse +22157,14808591,"scripts_horeka/train_dynamics.sh",1600,45,"--num_latent_actions=1 \\n --mask_limit=0.0",shellscript,selection_mouse +22158,14808591,"scripts_horeka/train_dynamics.sh",1596,49," --num_latent_actions=1 \\n --mask_limit=0.0",shellscript,selection_mouse +22159,14808855,"scripts_horeka/train_dynamics.sh",1625,20," --mask_limit=0.0",shellscript,selection_mouse +22160,14808938,"scripts_horeka/train_dynamics.sh",1645,1,"\n",shellscript,selection_mouse +22161,14808994,"scripts_horeka/train_dynamics.sh",1628,17," --mask_limit=0.0",shellscript,selection_mouse +22162,14809006,"scripts_horeka/train_dynamics.sh",1631,14,"mask_limit=0.0",shellscript,selection_mouse +22163,14809022,"scripts_horeka/train_dynamics.sh",1645,1,"\n",shellscript,selection_mouse +22164,14809224,"scripts_horeka/train_dynamics.sh",1643,2,".0",shellscript,selection_mouse +22165,14809240,"scripts_horeka/train_dynamics.sh",1644,1,"0",shellscript,selection_mouse +22166,14809262,"scripts_horeka/train_dynamics.sh",1645,0,"",shellscript,selection_mouse +22167,14809458,"scripts_horeka/train_dynamics.sh",1644,1,"0",shellscript,selection_mouse +22168,14809528,"scripts_horeka/train_dynamics.sh",1643,2,".0",shellscript,selection_mouse +22169,14809614,"scripts_horeka/train_dynamics.sh",1642,3,"0.0",shellscript,selection_mouse +22170,14810384,"scripts_horeka/train_dynamics.sh",1642,3,"1",shellscript,content +22171,14810385,"scripts_horeka/train_dynamics.sh",1643,0,"",shellscript,selection_keyboard +22172,14810802,"scripts_horeka/train_dynamics.sh",1643,0,".",shellscript,content +22173,14810803,"scripts_horeka/train_dynamics.sh",1644,0,"",shellscript,selection_keyboard +22174,14812084,"scripts_horeka/train_dynamics.sh",1644,0,"0",shellscript,content +22175,14812085,"scripts_horeka/train_dynamics.sh",1645,0,"",shellscript,selection_keyboard +22176,14814674,"TERMINAL",0,0,"sh scripts_horeka/train_dynamics.sh ",,terminal_output +22177,14815404,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +22178,14815529,"TERMINAL",0,0,"SLURM_STEP_NUM_TASKS=1\r\nSLURM_JOB_USER=tum_cte0515\r\nSLURM_TASKS_PER_NODE=1\r\nSLURM_JOB_UID=999226\r\nSLURM_TASK_PID=3921049\r\nSLURM_JOB_GPUS=3\r\nSLURM_LOCALID=0\r\nSLURM_SUBMIT_DIR=/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar\r\nSLURMD_NODENAME=hkn0531\r\nSLURM_JOB_START_TIME=1751300521\r\nSLURM_STEP_NODELIST=hkn0531\r\nSLURM_CLUSTER_NAME=hk\r\nSLURM_JOB_END_TIME=1751304121\r\nSLURM_PMI2_SRUN_PORT=37937\r\nSLURM_CPUS_ON_NODE=6\r\nSLURM_JOB_CPUS_PER_NODE=6\r\nSLURM_GPUS_ON_NODE=1\r\nSLURM_GTIDS=0\r\nSLURM_JOB_PARTITION=accelerated\r\nSLURM_TRES_PER_TASK=cpu=5\r\nSLURM_OOM_KILL_STEP=0\r\nSLURM_JOB_NUM_NODES=1\r\nSLURM_STEPID=4294967290\r\nSLURM_JOBID=3307154\r\nSLURM_PTY_PORT=38317\r\nSLURM_JOB_QOS=normal\r\nSLURM_LAUNCH_NODE_IPADDR=10.0.7.199\r\nSLURM_PTY_WIN_ROW=37\r\nSLURM_PMI2_PROC_MAPPING=(vector,(0,1,1))\r\nSLURMD_DEBUG=2\r\nSLURM_PROCID=0\r\nSLURM_CPUS_PER_TASK=5\r\nSLURM_NTASKS=1\r\nSLURM_TOPOLOGY_ADDR=hkibb.hkibbi1.hkibbi1e11.hkn0531\r\nSLURM_TOPOLOGY_ADDR_PATTERN=switch.switch.switch.node\r\nSLURM_SRUN_COMM_HOST=10.0.7.199\r\nSLURM_SCRIPT_CONTEXT=prolog_task\r\nSLURM_MEM_PER_NODE=51200\r\nSLURM_PTY_WIN_COL=93\r\nSLURM_NODELIST=hkn0531\r\nSLURM_SRUN_COMM_PORT=43825\r\nSLURM_STEP_ID=4294967290\r\nSLURM_JOB_ACCOUNT=hk-project-p0023960\r\nSLURM_PRIO_PROCESS=0\r\nSLURM_NPROCS=1\r\nSLURM_NNODES=1\r\nSLURM_SUBMIT_HOST=hkn1991.localdomain\r\nSLURM_JOB_ID=3307154\r\nSLURM_NODEID=0\r\nSLURM_STEP_NUM_NODES=1\r\nSLURM_STEP_TASKS_PER_NODE=1\r\nSLURM_MPI_TYPE=pmi2\r\nSLURM_PMI2_STEP_NODES=hkn0531\r\nSLURM_CONF=/etc/slurm/slurm.conf\r\nSLURM_JOB_NAME=interactive\r\nSLURM_NTASKS_PER_NODE=1\r\nSLURM_STEP_LAUNCHER_PORT=43825\r\nSLURM_JOB_GID=502226\r\nSLURM_JOB_NODELIST=hkn0531\r\n",,terminal_output +22179,14818475,"TERMINAL",0,0,"2025-06-30 18:31:02.888933: E external/local_xla/xla/stream_executor/cuda/cuda_fft.cc:467] Unable to register cuFFT factory: Attempting to register factory for plugin cuFFT when one has already been registered\r\nWARNING: All log messages before absl::InitializeLog() is called are written to STDERR\r\nE0000 00:00:1751301062.902248 3930185 cuda_dnn.cc:8579] Unable to register cuDNN factory: Attempting to register factory for plugin cuDNN when one has already been registered\r\nE0000 00:00:1751301062.906634 3930185 cuda_blas.cc:1407] Unable to register cuBLAS factory: Attempting to register factory for plugin cuBLAS when one has already been registered\r\n",,terminal_output +22180,14818528,"TERMINAL",0,0,"W0000 00:00:1751301062.918774 3930185 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\nW0000 00:00:1751301062.918792 3930185 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\nW0000 00:00:1751301062.918795 3930185 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\nW0000 00:00:1751301062.918797 3930185 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\n",,terminal_output +22181,14823678,"TERMINAL",0,0,"W0000 00:00:1751301068.105438 3930185 gpu_device.cc:2341] Cannot dlopen some GPU libraries. Please make sure the missing libraries mentioned above are installed properly if you would like to use GPU. Follow the guide at https://www.tensorflow.org/install/gpu for how to download and setup the required libraries for your platform.\r\nSkipping registering GPU devices...\r\n",,terminal_output +22182,14824090,"TERMINAL",0,0,"Running on 1 devices.\r\n",,terminal_output +22183,14824914,"TERMINAL",0,0,"wandb: Currently logged in as: mihir-mahajan2002 (instant-uv) to https://api.wandb.ai. Use `wandb login --relogin` to force relogin\r\n",,terminal_output +22184,14825789,"TERMINAL",0,0,"wandb: Tracking run with wandb version 0.19.11\r\nwandb: Run data is saved locally in /hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/wandb/run-20250630_183109-k6pfalr3\r\nwandb: Run `wandb offline` to turn off syncing.\r\nwandb: Syncing run dynamics-tiny-overfit-big-lr-0000\r\nwandb: ⭐️ View project at https://wandb.ai/instant-uv/jafar\r\nwandb: 🚀 View run at https://wandb.ai/instant-uv/jafar/runs/k6pfalr3\r\n",,terminal_output +22185,14827264,"TERMINAL",0,0,"2025-06-30 18:31:11.628356: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +22186,14839875,"TERMINAL",0,0,"2025-06-30 18:31:24.299101: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +22187,14855312,"TERMINAL",0,0,"2025-06-30 18:31:39.697671: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +22188,14862281,"TERMINAL",0,0,"2025-06-30 18:31:46.707861: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +22189,14881632,"TERMINAL",0,0,"batch shape: (1, 16, 90, 160, 3)\r\n",,terminal_output +22190,14894735,"TERMINAL",0,0,"2025-06-30 18:32:19.066792: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-06-30 18:32:19.067384: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-06-30 18:32:19.067502: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-06-30 18:32:19.068162: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-06-30 18:32:19.069209: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +22191,14903741,"scripts_horeka/train_dynamics.sh",0,0,"",shellscript,tab +22192,14906337,"scripts_horeka/overfit_sample_tiny/sample.sh",0,0,"",shellscript,tab +22193,14906863,"train_dynamics.py",0,0,"",python,tab +22194,14909430,"scripts_horeka/train_tokenizer.sh",0,0,"",shellscript,tab +22195,14912511,"genie.py",0,0,"",python,tab +22196,14913074,"train_dynamics.py",0,0,"",python,tab +22197,14916336,"train_dynamics.py",2328,0,"",python,selection_mouse +22198,14916476,"train_dynamics.py",2324,7,"outputs",python,selection_mouse +22199,14916624,"train_dynamics.py",2323,8," outputs",python,selection_mouse +22200,14916643,"train_dynamics.py",2320,11,"""], outputs",python,selection_mouse +22201,14916657,"train_dynamics.py",2308,23,"token_logits""], outputs",python,selection_mouse +22202,14916717,"train_dynamics.py",2307,24,"""token_logits""], outputs",python,selection_mouse +22203,14916727,"train_dynamics.py",2306,25,"[""token_logits""], outputs",python,selection_mouse +22204,14916744,"train_dynamics.py",2299,32,"outputs[""token_logits""], outputs",python,selection_mouse +22205,14916860,"train_dynamics.py",2298,33," outputs[""token_logits""], outputs",python,selection_mouse +22206,14916945,"train_dynamics.py",2297,34," outputs[""token_logits""], outputs",python,selection_mouse +22207,14917013,"train_dynamics.py",2232,99,"ce_loss = optax.softmax_cross_entropy_with_integer_labels(\n outputs[""token_logits""], outputs",python,selection_mouse +22208,14917414,"train_dynamics.py",2235,0,"",python,selection_mouse +22209,14917415,"train_dynamics.py",2232,7,"ce_loss",python,selection_mouse +22210,14917604,"train_dynamics.py",2232,66,"ce_loss = optax.softmax_cross_entropy_with_integer_labels(\n ",python,selection_mouse +22211,14917661,"train_dynamics.py",2232,121,"ce_loss = optax.softmax_cross_entropy_with_integer_labels(\n outputs[""token_logits""], outputs[""video_tokens""]\n )",python,selection_mouse +22212,14918041,"train_dynamics.py",2353,0,"",python,selection_mouse +22213,14918362,"train_dynamics.py",2297,56," outputs[""token_logits""], outputs[""video_tokens""]\n )",python,selection_mouse +22214,14918377,"train_dynamics.py",2295,58," outputs[""token_logits""], outputs[""video_tokens""]\n )",python,selection_mouse +22215,14918437,"train_dynamics.py",2294,59," outputs[""token_logits""], outputs[""video_tokens""]\n )",python,selection_mouse +22216,14918438,"train_dynamics.py",2230,123," ce_loss = optax.softmax_cross_entropy_with_integer_labels(\n outputs[""token_logits""], outputs[""video_tokens""]\n )",python,selection_mouse +22217,14918494,"train_dynamics.py",2229,124," ce_loss = optax.softmax_cross_entropy_with_integer_labels(\n outputs[""token_logits""], outputs[""video_tokens""]\n )",python,selection_mouse +22218,14918942,"train_dynamics.py",2228,125," ce_loss = optax.softmax_cross_entropy_with_integer_labels(\n outputs[""token_logits""], outputs[""video_tokens""]\n )",python,selection_mouse +22219,14919487,"train_dynamics.py",2228,0,"",python,selection_mouse +22220,14919488,"train_dynamics.py",2228,4," ",python,selection_mouse +22221,14919672,"train_dynamics.py",2228,64," ce_loss = optax.softmax_cross_entropy_with_integer_labels(\n ",python,selection_mouse +22222,14919689,"train_dynamics.py",2228,66," ce_loss = optax.softmax_cross_entropy_with_integer_labels(\n ",python,selection_mouse +22223,14919741,"train_dynamics.py",2228,124," ce_loss = optax.softmax_cross_entropy_with_integer_labels(\n outputs[""token_logits""], outputs[""video_tokens""]\n ",python,selection_mouse +22224,14919742,"train_dynamics.py",2228,125," ce_loss = optax.softmax_cross_entropy_with_integer_labels(\n outputs[""token_logits""], outputs[""video_tokens""]\n )",python,selection_mouse +22225,14919797,"train_dynamics.py",2228,137," ce_loss = optax.softmax_cross_entropy_with_integer_labels(\n outputs[""token_logits""], outputs[""video_tokens""]\n )\n ce_loss",python,selection_mouse +22226,14920673,"train_dynamics.py",2364,0,"",python,selection_mouse +22227,14921180,"train_dynamics.py",2353,0,"",python,selection_mouse +22228,14921468,"train_dynamics.py",2298,55," outputs[""token_logits""], outputs[""video_tokens""]\n )",python,selection_mouse +22229,14921482,"train_dynamics.py",2296,57," outputs[""token_logits""], outputs[""video_tokens""]\n )",python,selection_mouse +22230,14921499,"train_dynamics.py",2295,58," outputs[""token_logits""], outputs[""video_tokens""]\n )",python,selection_mouse +22231,14921560,"train_dynamics.py",2293,60," outputs[""token_logits""], outputs[""video_tokens""]\n )",python,selection_mouse +22232,14921561,"train_dynamics.py",2292,61," outputs[""token_logits""], outputs[""video_tokens""]\n )",python,selection_mouse +22233,14921566,"train_dynamics.py",2229,124," ce_loss = optax.softmax_cross_entropy_with_integer_labels(\n outputs[""token_logits""], outputs[""video_tokens""]\n )",python,selection_mouse +22234,14921581,"train_dynamics.py",2228,125," ce_loss = optax.softmax_cross_entropy_with_integer_labels(\n outputs[""token_logits""], outputs[""video_tokens""]\n )",python,selection_mouse +22235,14934814,"train_dynamics.py",2370,0,"",python,selection_mouse +22236,14935474,"train_dynamics.py",2300,0,"",python,selection_mouse +22237,14935681,"train_dynamics.py",2299,1,"o",python,selection_mouse +22238,14935681,"train_dynamics.py",2298,2," o",python,selection_mouse +22239,14935696,"train_dynamics.py",2295,5," o",python,selection_mouse +22240,14935747,"train_dynamics.py",2293,7," o",python,selection_mouse +22241,14935748,"train_dynamics.py",2292,8," o",python,selection_mouse +22242,14935814,"train_dynamics.py",2291,9," o",python,selection_mouse +22243,14936754,"train_dynamics.py",2297,0,"",python,selection_mouse +22244,14937115,"train_dynamics.py",2353,0,"",python,selection_mouse +22245,14937323,"train_dynamics.py",2297,56," outputs[""token_logits""], outputs[""video_tokens""]\n )",python,selection_mouse +22246,14937329,"train_dynamics.py",2295,58," outputs[""token_logits""], outputs[""video_tokens""]\n )",python,selection_mouse +22247,14937390,"train_dynamics.py",2294,59," outputs[""token_logits""], outputs[""video_tokens""]\n )",python,selection_mouse +22248,14937391,"train_dynamics.py",2231,122," ce_loss = optax.softmax_cross_entropy_with_integer_labels(\n outputs[""token_logits""], outputs[""video_tokens""]\n )",python,selection_mouse +22249,14937391,"train_dynamics.py",2230,123," ce_loss = optax.softmax_cross_entropy_with_integer_labels(\n outputs[""token_logits""], outputs[""video_tokens""]\n )",python,selection_mouse +22250,14937461,"train_dynamics.py",2229,124," ce_loss = optax.softmax_cross_entropy_with_integer_labels(\n outputs[""token_logits""], outputs[""video_tokens""]\n )",python,selection_mouse +22251,14938091,"train_dynamics.py",2228,125," ce_loss = optax.softmax_cross_entropy_with_integer_labels(\n outputs[""token_logits""], outputs[""video_tokens""]\n )",python,selection_mouse +22252,14938666,"train_dynamics.py",2228,0,"",python,selection_mouse +22253,14939182,"train_dynamics.py",2291,0,"",python,selection_mouse +22254,14939346,"train_dynamics.py",2291,8," ",python,selection_mouse +22255,14940271,"train_dynamics.py",2299,0,"",python,selection_mouse +22256,14940336,"train_dynamics.py",2299,7,"outputs",python,selection_mouse +22257,14940978,"train_dynamics.py",2295,0,"",python,selection_mouse +22258,14941132,"train_dynamics.py",2291,8," ",python,selection_mouse +22259,14941148,"TERMINAL",0,0,"Step 0, loss: 9.507311820983887, step time: 59234.67421531677ms\r\n",,terminal_output +22260,14941212,"TERMINAL",0,0,"Step 1, loss: 8.612935066223145, step time: 28.935909271240234ms\r\n",,terminal_output +22261,14941320,"TERMINAL",0,0,"Step 2, loss: 8.0076322555542, step time: 22.5830078125ms\r\n",,terminal_output +22262,14941383,"TERMINAL",0,0,"Step 3, loss: 7.5659356117248535, step time: 21.3010311126709ms\r\n",,terminal_output +22263,14941492,"TERMINAL",0,0,"Step 4, loss: 7.2345170974731445, step time: 20.259380340576172ms\r\n",,terminal_output +22264,14941556,"TERMINAL",0,0,"Step 5, loss: 6.995401382446289, step time: 20.677566528320312ms\r\n",,terminal_output +22265,14941668,"TERMINAL",0,0,"Step 6, loss: 6.817324161529541, step time: 21.724700927734375ms\r\n",,terminal_output +22266,14941728,"TERMINAL",0,0,"Step 7, loss: 6.672580718994141, step time: 20.334482192993164ms\r\n",,terminal_output +22267,14941842,"TERMINAL",0,0,"Step 8, loss: 6.540624141693115, step time: 24.741649627685547ms\r\n",,terminal_output +22268,14941944,"TERMINAL",0,0,"Step 9, loss: 6.413122177124023, step time: 21.787643432617188ms\r\n",,terminal_output +22269,14942006,"TERMINAL",0,0,"Step 10, loss: 6.296413898468018, step time: 20.740509033203125ms\r\n",,terminal_output +22270,14942114,"TERMINAL",0,0,"Step 11, loss: 6.199769973754883, step time: 30.214548110961914ms\r\n",,terminal_output +22271,14942203,"TERMINAL",0,0,"Step 12, loss: 6.118111610412598, step time: 25.707483291625977ms\r\n",,terminal_output +22272,14942268,"TERMINAL",0,0,"Step 13, loss: 6.040252685546875, step time: 24.485111236572266ms\r\n",,terminal_output +22273,14942375,"TERMINAL",0,0,"Step 14, loss: 5.965729713439941, step time: 31.232357025146484ms\r\n",,terminal_output +22274,14942486,"TERMINAL",0,0,"Step 15, loss: 5.900126934051514, step time: 28.872251510620117ms\r\n",,terminal_output +22275,14942548,"TERMINAL",0,0,"Step 16, loss: 5.8440046310424805, step time: 25.450468063354492ms\r\n",,terminal_output +22276,14942656,"TERMINAL",0,0,"Step 17, loss: 5.791394233703613, step time: 28.687477111816406ms\r\n",,terminal_output +22277,14942807,"TERMINAL",0,0,"Step 18, loss: 5.7364373207092285, step time: 31.962871551513672ms\r\n",,terminal_output +22278,14942812,"TERMINAL",0,0,"Step 19, loss: 5.680272102355957, step time: 30.61842918395996ms\r\n",,terminal_output +22279,14942921,"TERMINAL",0,0,"Step 20, loss: 5.627078056335449, step time: 29.992341995239258ms\r\n",,terminal_output +22280,14943036,"TERMINAL",0,0,"Step 21, loss: 5.576468467712402, step time: 28.67865562438965ms\r\n",,terminal_output +22281,14943094,"TERMINAL",0,0,"Step 22, loss: 5.526310443878174, step time: 31.517744064331055ms\r\n",,terminal_output +22282,14943200,"TERMINAL",0,0,"Step 23, loss: 5.476001739501953, step time: 28.41806411743164ms\r\n",,terminal_output +22283,14943309,"TERMINAL",0,0,"Step 24, loss: 5.426018238067627, step time: 27.734756469726562ms\r\n",,terminal_output +22284,14943402,"TERMINAL",0,0,"Step 25, loss: 5.376683235168457, step time: 29.152631759643555ms\r\n",,terminal_output +22285,14943460,"TERMINAL",0,0,"Step 26, loss: 5.328715801239014, step time: 29.031753540039062ms\r\n",,terminal_output +22286,14943570,"TERMINAL",0,0,"Step 27, loss: 5.2822346687316895, step time: 29.710054397583008ms\r\n",,terminal_output +22287,14943640,"TERMINAL",0,0,"Step 28, loss: 5.237625598907471, step time: 25.838136672973633ms\r\n",,terminal_output +22288,14943752,"TERMINAL",0,0,"Step 29, loss: 5.195811748504639, step time: 25.733232498168945ms\r\n",,terminal_output +22289,14943828,"TERMINAL",0,0,"Step 30, loss: 5.156928062438965, step time: 26.615619659423828ms\r\n",,terminal_output +22290,14943939,"TERMINAL",0,0,"Step 31, loss: 5.119490623474121, step time: 24.151325225830078ms\r\n",,terminal_output +22291,14944021,"TERMINAL",0,0,"Step 32, loss: 5.082259178161621, step time: 28.177976608276367ms\r\n",,terminal_output +22292,14944131,"TERMINAL",0,0,"Step 33, loss: 5.045712947845459, step time: 26.201963424682617ms\r\n",,terminal_output +22293,14944194,"TERMINAL",0,0,"Step 34, loss: 5.010613918304443, step time: 25.12645721435547ms\r\n",,terminal_output +22294,14944299,"TERMINAL",0,0,"Step 35, loss: 4.977578163146973, step time: 24.88088607788086ms\r\n",,terminal_output +22295,14944364,"TERMINAL",0,0,"Step 36, loss: 4.946214199066162, step time: 26.772737503051758ms\r\n",,terminal_output +22296,14944469,"TERMINAL",0,0,"Step 37, loss: 4.915106773376465, step time: 25.380373001098633ms\r\n",,terminal_output +22297,14944579,"TERMINAL",0,0,"Step 38, loss: 4.883896350860596, step time: 25.699138641357422ms\r\n",,terminal_output +22298,14944629,"TERMINAL",0,0,"Step 39, loss: 4.854182243347168, step time: 24.37734603881836ms\r\n",,terminal_output +22299,14944733,"TERMINAL",0,0,"Step 40, loss: 4.825870037078857, step time: 23.242712020874023ms\r\n",,terminal_output +22300,14944844,"TERMINAL",0,0,"Step 41, loss: 4.797774314880371, step time: 22.694110870361328ms\r\n",,terminal_output +22301,14944906,"TERMINAL",0,0,"Step 42, loss: 4.769989967346191, step time: 22.9799747467041ms\r\n",,terminal_output +22302,14945017,"TERMINAL",0,0,"Step 43, loss: 4.7437744140625, step time: 21.984338760375977ms\r\n",,terminal_output +22303,14945083,"TERMINAL",0,0,"Step 44, loss: 4.719491958618164, step time: 22.57061004638672ms\r\n",,terminal_output +22304,14945189,"TERMINAL",0,0,"Step 45, loss: 4.695949077606201, step time: 23.164033889770508ms\r\n",,terminal_output +22305,14945249,"TERMINAL",0,0,"Step 46, loss: 4.672296524047852, step time: 23.0255126953125ms\r\n",,terminal_output +22306,14945435,"TERMINAL",0,0,"Step 47, loss: 4.648569107055664, step time: 22.742033004760742ms\r\nStep 48, loss: 4.625061511993408, step time: 23.197412490844727ms\r\n",,terminal_output +22307,14945552,"TERMINAL",0,0,"Step 49, loss: 4.601585865020752, step time: 23.4527587890625ms\r\n",,terminal_output +22308,14945612,"TERMINAL",0,0,"Step 50, loss: 4.57826042175293, step time: 22.90821075439453ms\r\n",,terminal_output +22309,14945691,"TERMINAL",0,0,"Step 51, loss: 4.555135726928711, step time: 23.44965934753418ms\r\n",,terminal_output +22310,14945794,"TERMINAL",0,0,"Step 52, loss: 4.531438827514648, step time: 22.630691528320312ms\r\n",,terminal_output +22311,14945902,"TERMINAL",0,0,"Step 53, loss: 4.507411479949951, step time: 31.10814094543457ms\r\n",,terminal_output +22312,14945964,"TERMINAL",0,0,"Step 54, loss: 4.483611583709717, step time: 30.838966369628906ms\r\n",,terminal_output +22313,14946073,"TERMINAL",0,0,"Step 55, loss: 4.4600958824157715, step time: 25.559425354003906ms\r\n",,terminal_output +22314,14946187,"TERMINAL",0,0,"Step 56, loss: 4.436676025390625, step time: 24.487972259521484ms\r\n",,terminal_output +22315,14946240,"TERMINAL",0,0,"Step 57, loss: 4.41303014755249, step time: 26.198387145996094ms\r\n",,terminal_output +22316,14946348,"TERMINAL",0,0,"Step 58, loss: 4.388967990875244, step time: 28.318405151367188ms\r\n",,terminal_output +22317,14946470,"TERMINAL",0,0,"Step 59, loss: 4.364536762237549, step time: 30.739784240722656ms\r\n",,terminal_output +22318,14946522,"TERMINAL",0,0,"Step 60, loss: 4.339911937713623, step time: 32.17911720275879ms\r\n",,terminal_output +22319,14946629,"TERMINAL",0,0,"Step 61, loss: 4.315260887145996, step time: 28.54180335998535ms\r\n",,terminal_output +22320,14946737,"TERMINAL",0,0,"Step 62, loss: 4.2907514572143555, step time: 28.157472610473633ms\r\n",,terminal_output +22321,14946800,"TERMINAL",0,0,"Step 63, loss: 4.266241550445557, step time: 29.85692024230957ms\r\n",,terminal_output +22322,14946911,"TERMINAL",0,0,"Step 64, loss: 4.241824626922607, step time: 25.94733238220215ms\r\n",,terminal_output +22323,14946971,"TERMINAL",0,0,"Step 65, loss: 4.217482089996338, step time: 28.78260612487793ms\r\n",,terminal_output +22324,14947081,"TERMINAL",0,0,"Step 66, loss: 4.193249702453613, step time: 26.015520095825195ms\r\n",,terminal_output +22325,14947191,"TERMINAL",0,0,"Step 67, loss: 4.169187068939209, step time: 25.727033615112305ms\r\n",,terminal_output +22326,14947244,"TERMINAL",0,0,"Step 68, loss: 4.145247459411621, step time: 25.244474411010742ms\r\n",,terminal_output +22327,14947351,"TERMINAL",0,0,"Step 69, loss: 4.121421813964844, step time: 24.63388442993164ms\r\n",,terminal_output +22328,14947418,"TERMINAL",0,0,"Step 70, loss: 4.097927570343018, step time: 24.998188018798828ms\r\n",,terminal_output +22329,14947528,"TERMINAL",0,0,"Step 71, loss: 4.074723720550537, step time: 23.626327514648438ms\r\n",,terminal_output +22330,14947595,"TERMINAL",0,0,"Step 72, loss: 4.052008152008057, step time: 25.031566619873047ms\r\n",,terminal_output +22331,14947702,"TERMINAL",0,0,"Step 73, loss: 4.029799461364746, step time: 23.046493530273438ms\r\n",,terminal_output +22332,14947867,"TERMINAL",0,0,"Step 74, loss: 4.007883548736572, step time: 24.127483367919922ms\r\nStep 75, loss: 3.986321449279785, step time: 23.79775047302246ms\r\n",,terminal_output +22333,14947959,"TERMINAL",0,0,"Step 76, loss: 3.9649617671966553, step time: 21.762371063232422ms\r\n",,terminal_output +22334,14948071,"TERMINAL",0,0,"Step 77, loss: 3.943984031677246, step time: 20.827054977416992ms\r\n",,terminal_output +22335,14948126,"TERMINAL",0,0,"Step 78, loss: 3.9236669540405273, step time: 21.24476432800293ms\r\n",,terminal_output +22336,14948221,"TERMINAL",0,0,"Step 79, loss: 3.9043726921081543, step time: 21.346330642700195ms\r\n",,terminal_output +22337,14948331,"TERMINAL",0,0,"Step 80, loss: 3.8850250244140625, step time: 20.92885971069336ms\r\n",,terminal_output +22338,14948426,"TERMINAL",0,0,"Step 81, loss: 3.8646352291107178, step time: 21.88277244567871ms\r\n",,terminal_output +22339,14948479,"TERMINAL",0,0,"Step 82, loss: 3.846709966659546, step time: 20.918846130371094ms\r\n",,terminal_output +22340,14948598,"TERMINAL",0,0,"Step 83, loss: 3.82886004447937, step time: 21.729469299316406ms\r\n",,terminal_output +22341,14948651,"TERMINAL",0,0,"Step 84, loss: 3.8095438480377197, step time: 22.23515510559082ms\r\n",,terminal_output +22342,14948764,"TERMINAL",0,0,"Step 85, loss: 3.7925143241882324, step time: 21.176576614379883ms\r\n",,terminal_output +22343,14948828,"TERMINAL",0,0,"Step 86, loss: 3.7750725746154785, step time: 21.906614303588867ms\r\n",,terminal_output +22344,14948936,"TERMINAL",0,0,"Step 87, loss: 3.756690263748169, step time: 21.871328353881836ms\r\n",,terminal_output +22345,14949047,"TERMINAL",0,0,"Step 88, loss: 3.740264654159546, step time: 21.761655807495117ms\r\n",,terminal_output +22346,14949101,"TERMINAL",0,0,"Step 89, loss: 3.72322940826416, step time: 27.81224250793457ms\r\n",,terminal_output +22347,14949211,"TERMINAL",0,0,"Step 90, loss: 3.7056872844696045, step time: 30.59220314025879ms\r\n",,terminal_output +22348,14949321,"TERMINAL",0,0,"Step 91, loss: 3.689795732498169, step time: 31.151771545410156ms\r\n",,terminal_output +22349,14949374,"TERMINAL",0,0,"Step 92, loss: 3.6734204292297363, step time: 27.11629867553711ms\r\n",,terminal_output +22350,14949483,"TERMINAL",0,0,"Step 93, loss: 3.656757354736328, step time: 24.6584415435791ms\r\n",,terminal_output +22351,14949594,"TERMINAL",0,0,"Step 94, loss: 3.6412148475646973, step time: 30.82418441772461ms\r\n",,terminal_output +22352,14949649,"TERMINAL",0,0,"Step 95, loss: 3.6256070137023926, step time: 26.98802947998047ms\r\n",,terminal_output +22353,14949744,"TERMINAL",0,0,"Step 96, loss: 3.609631061553955, step time: 31.27288818359375ms\r\n",,terminal_output +22354,14949835,"TERMINAL",0,0,"Step 97, loss: 3.5942890644073486, step time: 31.75067901611328ms\r\n",,terminal_output +22355,14949945,"TERMINAL",0,0,"Step 98, loss: 3.5791618824005127, step time: 30.245065689086914ms\r\n",,terminal_output +22356,14950054,"TERMINAL",0,0,"Step 99, loss: 3.5641379356384277, step time: 28.1984806060791ms\r\n",,terminal_output +22357,14950113,"TERMINAL",0,0,"Step 100, loss: 3.548877239227295, step time: 30.138015747070312ms\r\n",,terminal_output +22358,14950224,"TERMINAL",0,0,"Step 101, loss: 3.534043073654175, step time: 29.009342193603516ms\r\n",,terminal_output +22359,14950334,"TERMINAL",0,0,"Step 102, loss: 3.5197222232818604, step time: 31.313180923461914ms\r\n",,terminal_output +22360,14950387,"TERMINAL",0,0,"Step 103, loss: 3.504894256591797, step time: 27.56476402282715ms\r\n",,terminal_output +22361,14950495,"TERMINAL",0,0,"Step 104, loss: 3.490032911300659, step time: 26.085615158081055ms\r\n",,terminal_output +22362,14950604,"TERMINAL",0,0,"Step 105, loss: 3.476072072982788, step time: 29.231548309326172ms\r\n",,terminal_output +22363,14950700,"TERMINAL",0,0,"Step 106, loss: 3.4627645015716553, step time: 26.380300521850586ms\r\n",,terminal_output +22364,14950753,"TERMINAL",0,0,"Step 107, loss: 3.4496617317199707, step time: 25.29168128967285ms\r\n",,terminal_output +22365,14950858,"TERMINAL",0,0,"Step 108, loss: 3.437633752822876, step time: 26.630878448486328ms\r\n",,terminal_output +22366,14950965,"TERMINAL",0,0,"Step 109, loss: 3.423285961151123, step time: 26.501178741455078ms\r\n",,terminal_output +22367,14951026,"TERMINAL",0,0,"Step 110, loss: 3.4104373455047607, step time: 26.447296142578125ms\r\n",,terminal_output +22368,14951135,"TERMINAL",0,0,"Step 111, loss: 3.39892578125, step time: 25.55251121520996ms\r\n",,terminal_output +22369,14951204,"TERMINAL",0,0,"Step 112, loss: 3.385221004486084, step time: 25.1004695892334ms\r\n",,terminal_output +22370,14951314,"TERMINAL",0,0,"Step 113, loss: 3.3741700649261475, step time: 23.15998077392578ms\r\n",,terminal_output +22371,14951379,"TERMINAL",0,0,"Step 114, loss: 3.361137866973877, step time: 25.093555450439453ms\r\n",,terminal_output +22372,14951772,"TERMINAL",0,0,"Step 115, loss: 3.349860906600952, step time: 376.4317035675049ms\r\n",,terminal_output +22373,14951850,"TERMINAL",0,0,"Step 116, loss: 3.3367931842803955, step time: 28.492450714111328ms\r\n",,terminal_output +22374,14951960,"TERMINAL",0,0,"Step 117, loss: 3.326042890548706, step time: 24.501800537109375ms\r\n",,terminal_output +22375,14952029,"TERMINAL",0,0,"Step 118, loss: 3.3127527236938477, step time: 23.77629280090332ms\r\n",,terminal_output +22376,14952137,"TERMINAL",0,0,"Step 119, loss: 3.3018746376037598, step time: 22.795677185058594ms\r\n",,terminal_output +22377,14952248,"TERMINAL",0,0,"Step 120, loss: 3.2894997596740723, step time: 25.54154396057129ms\r\n",,terminal_output +22378,14952302,"TERMINAL",0,0,"Step 121, loss: 3.2775073051452637, step time: 23.44655990600586ms\r\n",,terminal_output +22379,14952411,"TERMINAL",0,0,"Step 122, loss: 3.26566481590271, step time: 23.159027099609375ms\r\n",,terminal_output +22380,14952476,"TERMINAL",0,0,"Step 123, loss: 3.253458023071289, step time: 28.575420379638672ms\r\n",,terminal_output +22381,14952587,"TERMINAL",0,0,"Step 124, loss: 3.2420096397399902, step time: 33.318281173706055ms\r\n",,terminal_output +22382,14952698,"TERMINAL",0,0,"Step 125, loss: 3.2294178009033203, step time: 28.809785842895508ms\r\n",,terminal_output +22383,14952753,"TERMINAL",0,0,"Step 126, loss: 3.218095064163208, step time: 25.679349899291992ms\r\n",,terminal_output +22384,14952861,"TERMINAL",0,0,"Step 127, loss: 3.2068698406219482, step time: 28.23162078857422ms\r\n",,terminal_output +22385,14952969,"TERMINAL",0,0,"Step 128, loss: 3.198315382003784, step time: 26.46803855895996ms\r\n",,terminal_output +22386,14953030,"TERMINAL",0,0,"Step 129, loss: 3.185802459716797, step time: 29.436349868774414ms\r\n",,terminal_output +22387,14953140,"TERMINAL",0,0,"Step 130, loss: 3.1729581356048584, step time: 32.58705139160156ms\r\n",,terminal_output +22388,14953309,"TERMINAL",0,0,"Step 131, loss: 3.163830041885376, step time: 29.819726943969727ms\r\nStep 132, loss: 3.152167797088623, step time: 27.982711791992188ms\r\n",,terminal_output +22389,14953425,"TERMINAL",0,0,"Step 133, loss: 3.140949010848999, step time: 29.652833938598633ms\r\n",,terminal_output +22390,14953487,"TERMINAL",0,0,"Step 134, loss: 3.1315464973449707, step time: 26.819229125976562ms\r\n",,terminal_output +22391,14953620,"TERMINAL",0,0,"Step 135, loss: 3.119676351547241, step time: 26.257753372192383ms\r\n",,terminal_output +22392,14953690,"TERMINAL",0,0,"Step 136, loss: 3.1092567443847656, step time: 28.026819229125977ms\r\n",,terminal_output +22393,14953852,"TERMINAL",0,0,"Step 137, loss: 3.0993876457214355, step time: 24.767398834228516ms\r\nStep 138, loss: 3.0875444412231445, step time: 25.89583396911621ms\r\n",,terminal_output +22394,14953964,"TERMINAL",0,0,"Step 139, loss: 3.0779850482940674, step time: 26.628971099853516ms\r\n",,terminal_output +22395,14954074,"TERMINAL",0,0,"Step 140, loss: 3.0664916038513184, step time: 25.476455688476562ms\r\n",,terminal_output +22396,14954128,"TERMINAL",0,0,"Step 141, loss: 3.0567498207092285, step time: 24.460792541503906ms\r\n",,terminal_output +22397,14954236,"TERMINAL",0,0,"Step 142, loss: 3.046973705291748, step time: 24.93453025817871ms\r\n",,terminal_output +22398,14954300,"TERMINAL",0,0,"Step 143, loss: 3.0378599166870117, step time: 24.129390716552734ms\r\n",,terminal_output +22399,14954411,"TERMINAL",0,0,"Step 144, loss: 3.0291762351989746, step time: 23.838043212890625ms\r\n",,terminal_output +22400,14954520,"TERMINAL",0,0,"Step 145, loss: 3.0166096687316895, step time: 24.100780487060547ms\r\n",,terminal_output +22401,14954574,"TERMINAL",0,0,"Step 146, loss: 3.0062344074249268, step time: 23.451805114746094ms\r\n",,terminal_output +22402,14954685,"TERMINAL",0,0,"Step 147, loss: 2.9986660480499268, step time: 21.825551986694336ms\r\n",,terminal_output +22403,14954759,"TERMINAL",0,0,"Step 148, loss: 2.98846435546875, step time: 22.31884002685547ms\r\n",,terminal_output +22404,14954833,"TERMINAL",0,0,"Step 149, loss: 2.9767818450927734, step time: 21.410703659057617ms\r\n",,terminal_output +22405,14954941,"TERMINAL",0,0,"Step 150, loss: 2.968921184539795, step time: 24.06001091003418ms\r\n",,terminal_output +22406,14955053,"TERMINAL",0,0,"Step 151, loss: 2.958876371383667, step time: 24.05834197998047ms\r\n",,terminal_output +22407,14955102,"TERMINAL",0,0,"Step 152, loss: 2.9486136436462402, step time: 24.463653564453125ms\r\n",,terminal_output +22408,14955211,"TERMINAL",0,0,"Step 153, loss: 2.9391844272613525, step time: 22.997379302978516ms\r\n",,terminal_output +22409,14955278,"TERMINAL",0,0,"Step 154, loss: 2.929898262023926, step time: 24.065494537353516ms\r\n",,terminal_output +22410,14955391,"TERMINAL",0,0,"Step 155, loss: 2.920175313949585, step time: 23.87547492980957ms\r\n",,terminal_output +22411,14955458,"TERMINAL",0,0,"Step 156, loss: 2.909623146057129, step time: 24.408817291259766ms\r\n",,terminal_output +22412,14955568,"TERMINAL",0,0,"Step 157, loss: 2.9018378257751465, step time: 23.476839065551758ms\r\n",,terminal_output +22413,14955675,"TERMINAL",0,0,"Step 158, loss: 2.8946170806884766, step time: 24.330615997314453ms\r\n",,terminal_output +22414,14955738,"TERMINAL",0,0,"Step 159, loss: 2.887887477874756, step time: 24.24168586730957ms\r\n",,terminal_output +22415,14955843,"TERMINAL",0,0,"Step 160, loss: 2.8765451908111572, step time: 27.205944061279297ms\r\n",,terminal_output +22416,14955938,"TERMINAL",0,0,"Step 161, loss: 2.8653409481048584, step time: 30.991315841674805ms\r\n",,terminal_output +22417,14956002,"TERMINAL",0,0,"Step 162, loss: 2.859830856323242, step time: 30.212879180908203ms\r\n",,terminal_output +22418,14956098,"TERMINAL",0,0,"Step 163, loss: 2.8506007194519043, step time: 25.59661865234375ms\r\n",,terminal_output +22419,14956207,"TERMINAL",0,0,"Step 164, loss: 2.838876962661743, step time: 26.66306495666504ms\r\n",,terminal_output +22420,14956272,"TERMINAL",0,0,"Step 165, loss: 2.8358404636383057, step time: 26.46470069885254ms\r\n",,terminal_output +22421,14956401,"TERMINAL",0,0,"Step 166, loss: 2.8231539726257324, step time: 29.765844345092773ms\r\n",,terminal_output +22422,14956481,"TERMINAL",0,0,"Step 167, loss: 2.815808057785034, step time: 30.961990356445312ms\r\n",,terminal_output +22423,14956589,"TERMINAL",0,0,"Step 168, loss: 2.808018207550049, step time: 31.186342239379883ms\r\n",,terminal_output +22424,14956651,"TERMINAL",0,0,"Step 169, loss: 2.7996952533721924, step time: 27.722597122192383ms\r\n",,terminal_output +22425,14956756,"TERMINAL",0,0,"Step 170, loss: 2.789252519607544, step time: 31.1129093170166ms\r\n",,terminal_output +22426,14956837,"TERMINAL",0,0,"Step 171, loss: 2.7834794521331787, step time: 27.323484420776367ms\r\n",,terminal_output +22427,14956955,"TERMINAL",0,0,"Step 172, loss: 2.774394989013672, step time: 25.724172592163086ms\r\n",,terminal_output +22428,14957016,"TERMINAL",0,0,"Step 173, loss: 2.7647831439971924, step time: 29.307842254638672ms\r\n",,terminal_output +22429,14957102,"TERMINAL",0,0,"Step 174, loss: 2.7563300132751465, step time: 24.709224700927734ms\r\n",,terminal_output +22430,14957207,"TERMINAL",0,0,"Step 175, loss: 2.749628782272339, step time: 25.384902954101562ms\r\n",,terminal_output +22431,14957315,"TERMINAL",0,0,"Step 176, loss: 2.7394473552703857, step time: 23.89383316040039ms\r\n",,terminal_output +22432,14957368,"TERMINAL",0,0,"Step 177, loss: 2.7305996417999268, step time: 23.48923683166504ms\r\n",,terminal_output +22433,14957476,"TERMINAL",0,0,"Step 178, loss: 2.7226264476776123, step time: 23.96225929260254ms\r\n",,terminal_output +22434,14957593,"TERMINAL",0,0,"Step 179, loss: 2.7135746479034424, step time: 23.56576919555664ms\r\n",,terminal_output +22435,14957640,"TERMINAL",0,0,"Step 180, loss: 2.7070722579956055, step time: 23.161888122558594ms\r\n",,terminal_output +22436,14957747,"TERMINAL",0,0,"Step 181, loss: 2.7000792026519775, step time: 23.635149002075195ms\r\n",,terminal_output +22437,14957814,"TERMINAL",0,0,"Step 182, loss: 2.6963634490966797, step time: 23.209333419799805ms\r\n",,terminal_output +22438,14957926,"TERMINAL",0,0,"Step 183, loss: 2.684445381164551, step time: 22.052764892578125ms\r\n",,terminal_output +22439,14957987,"TERMINAL",0,0,"Step 184, loss: 2.676784038543701, step time: 21.726369857788086ms\r\n",,terminal_output +22440,14958098,"TERMINAL",0,0,"Step 185, loss: 2.673312187194824, step time: 20.808696746826172ms\r\n",,terminal_output +22441,14958207,"TERMINAL",0,0,"Step 186, loss: 2.6605725288391113, step time: 21.424293518066406ms\r\n",,terminal_output +22442,14958257,"TERMINAL",0,0,"Step 187, loss: 2.6533756256103516, step time: 21.178483963012695ms\r\n",,terminal_output +22443,14958364,"TERMINAL",0,0,"Step 188, loss: 2.649083137512207, step time: 21.613597869873047ms\r\n",,terminal_output +22444,14958430,"TERMINAL",0,0,"Step 189, loss: 2.6368520259857178, step time: 20.6296443939209ms\r\n",,terminal_output +22445,14958538,"TERMINAL",0,0,"Step 190, loss: 2.634791135787964, step time: 21.06451988220215ms\r\n",,terminal_output +22446,14958637,"TERMINAL",0,0,"Step 191, loss: 2.6217825412750244, step time: 20.319700241088867ms\r\n",,terminal_output +22447,14958691,"TERMINAL",0,0,"Step 192, loss: 2.6183462142944336, step time: 20.23911476135254ms\r\n",,terminal_output +22448,14958797,"TERMINAL",0,0,"Step 193, loss: 2.6096293926239014, step time: 20.739316940307617ms\r\n",,terminal_output +22449,14958907,"TERMINAL",0,0,"Step 194, loss: 2.6027145385742188, step time: 20.142555236816406ms\r\n",,terminal_output +22450,14958969,"TERMINAL",0,0,"Step 195, loss: 2.595121383666992, step time: 20.89667320251465ms\r\n",,terminal_output +22451,14959079,"TERMINAL",0,0,"Step 196, loss: 2.5882158279418945, step time: 20.76554298400879ms\r\n",,terminal_output +22452,14959133,"TERMINAL",0,0,"Step 197, loss: 2.578895092010498, step time: 22.169828414916992ms\r\n",,terminal_output +22453,14959246,"TERMINAL",0,0,"Step 198, loss: 2.572849750518799, step time: 20.966768264770508ms\r\n",,terminal_output +22454,14959356,"TERMINAL",0,0,"Step 199, loss: 2.56436824798584, step time: 31.466960906982422ms\r\n",,terminal_output +22455,14959409,"TERMINAL",0,0,"Step 200, loss: 2.5575742721557617, step time: 27.252912521362305ms\r\n",,terminal_output +22456,14959523,"TERMINAL",0,0,"Step 201, loss: 2.548992395401001, step time: 23.588180541992188ms\r\n",,terminal_output +22457,14959588,"TERMINAL",0,0,"Step 202, loss: 2.5426080226898193, step time: 25.278091430664062ms\r\n",,terminal_output +22458,14959696,"TERMINAL",0,0,"Step 203, loss: 2.536529779434204, step time: 22.957563400268555ms\r\n",,terminal_output +22459,14959760,"TERMINAL",0,0,"Step 204, loss: 2.5341758728027344, step time: 22.5064754486084ms\r\n",,terminal_output +22460,14959849,"TERMINAL",0,0,"Step 205, loss: 2.5361945629119873, step time: 21.251916885375977ms\r\n",,terminal_output +22461,14959962,"TERMINAL",0,0,"Step 206, loss: 2.515373468399048, step time: 23.764371871948242ms\r\n",,terminal_output +22462,14960028,"TERMINAL",0,0,"Step 207, loss: 2.513566732406616, step time: 21.112442016601562ms\r\n",,terminal_output +22463,14960139,"TERMINAL",0,0,"Step 208, loss: 2.509768486022949, step time: 20.657777786254883ms\r\n",,terminal_output +22464,14960203,"TERMINAL",0,0,"Step 209, loss: 2.4949326515197754, step time: 24.712800979614258ms\r\n",,terminal_output +22465,14960317,"TERMINAL",0,0,"Step 210, loss: 2.497114658355713, step time: 24.50871467590332ms\r\n",,terminal_output +22466,14960425,"TERMINAL",0,0,"Step 211, loss: 2.4821276664733887, step time: 24.13010597229004ms\r\n",,terminal_output +22467,14960487,"TERMINAL",0,0,"Step 212, loss: 2.4813179969787598, step time: 24.579286575317383ms\r\n",,terminal_output +22468,14960592,"TERMINAL",0,0,"Step 213, loss: 2.471050977706909, step time: 25.09593963623047ms\r\n",,terminal_output +22469,14960656,"TERMINAL",0,0,"Step 214, loss: 2.465702772140503, step time: 24.817228317260742ms\r\n",,terminal_output +22470,14960766,"TERMINAL",0,0,"Step 215, loss: 2.4580674171447754, step time: 24.02210235595703ms\r\n",,terminal_output +22471,14960834,"TERMINAL",0,0,"Step 216, loss: 2.451810836791992, step time: 23.706436157226562ms\r\n",,terminal_output +22472,14960941,"TERMINAL",0,0,"Step 217, loss: 2.4443066120147705, step time: 26.143550872802734ms\r\n",,terminal_output +22473,14961051,"TERMINAL",0,0,"Step 218, loss: 2.4386816024780273, step time: 24.985074996948242ms\r\n",,terminal_output +22474,14961103,"TERMINAL",0,0,"Step 219, loss: 2.4303596019744873, step time: 25.234222412109375ms\r\n",,terminal_output +22475,14961209,"TERMINAL",0,0,"Step 220, loss: 2.425036668777466, step time: 25.286197662353516ms\r\n",,terminal_output +22476,14961316,"TERMINAL",0,0,"Step 221, loss: 2.4173998832702637, step time: 24.32084083557129ms\r\n",,terminal_output +22477,14961372,"TERMINAL",0,0,"Step 222, loss: 2.41560697555542, step time: 23.846864700317383ms\r\n",,terminal_output +22478,14961484,"TERMINAL",0,0,"Step 223, loss: 2.4109230041503906, step time: 23.42391014099121ms\r\n",,terminal_output +22479,14961549,"TERMINAL",0,0,"Step 224, loss: 2.4057297706604004, step time: 23.607969284057617ms\r\n",,terminal_output +22480,14961641,"TERMINAL",0,0,"Step 225, loss: 2.3906755447387695, step time: 23.832321166992188ms\r\n",,terminal_output +22481,14961748,"TERMINAL",0,0,"Step 226, loss: 2.3895041942596436, step time: 23.5288143157959ms\r\n",,terminal_output +22482,14961824,"TERMINAL",0,0,"Step 227, loss: 2.3822476863861084, step time: 24.018049240112305ms\r\n",,terminal_output +22483,14961932,"TERMINAL",0,0,"Step 228, loss: 2.373295545578003, step time: 23.78535270690918ms\r\n",,terminal_output +22484,14961992,"TERMINAL",0,0,"Step 229, loss: 2.3682568073272705, step time: 24.395465850830078ms\r\n",,terminal_output +22485,14962107,"TERMINAL",0,0,"Step 230, loss: 2.364257335662842, step time: 23.955821990966797ms\r\n",,terminal_output +22486,14962178,"TERMINAL",0,0,"Step 231, loss: 2.3527579307556152, step time: 23.450851440429688ms\r\n",,terminal_output +22487,14962290,"TERMINAL",0,0,"Step 232, loss: 2.35003399848938, step time: 24.341583251953125ms\r\n",,terminal_output +22488,14962347,"TERMINAL",0,0,"Step 233, loss: 2.3418681621551514, step time: 24.026870727539062ms\r\n",,terminal_output +22489,14962458,"TERMINAL",0,0,"Step 234, loss: 2.336901903152466, step time: 24.60169792175293ms\r\n",,terminal_output +22490,14962567,"TERMINAL",0,0,"Step 235, loss: 2.327061653137207, step time: 25.434494018554688ms\r\n",,terminal_output +22491,14962618,"TERMINAL",0,0,"Step 236, loss: 2.3231282234191895, step time: 31.445026397705078ms\r\n",,terminal_output +22492,14962713,"TERMINAL",0,0,"Step 237, loss: 2.3184328079223633, step time: 29.964685440063477ms\r\n",,terminal_output +22493,14962826,"TERMINAL",0,0,"Step 238, loss: 2.314175605773926, step time: 26.2296199798584ms\r\n",,terminal_output +22494,14962933,"TERMINAL",0,0,"Step 239, loss: 2.312859296798706, step time: 25.942325592041016ms\r\n",,terminal_output +22495,14962993,"TERMINAL",0,0,"Step 240, loss: 2.303056001663208, step time: 26.918649673461914ms\r\n",,terminal_output +22496,14963102,"TERMINAL",0,0,"Step 241, loss: 2.2934303283691406, step time: 28.154850006103516ms\r\n",,terminal_output +22497,14963171,"TERMINAL",0,0,"Step 242, loss: 2.291609287261963, step time: 31.42523765563965ms\r\n",,terminal_output +22498,14963276,"TERMINAL",0,0,"Step 243, loss: 2.2826287746429443, step time: 31.573057174682617ms\r\n",,terminal_output +22499,14963384,"TERMINAL",0,0,"Step 244, loss: 2.277472734451294, step time: 29.27398681640625ms\r\n",,terminal_output +22500,14963451,"TERMINAL",0,0,"Step 245, loss: 2.270986557006836, step time: 28.081178665161133ms\r\n",,terminal_output +22501,14963561,"TERMINAL",0,0,"Step 246, loss: 2.2661612033843994, step time: 30.263662338256836ms\r\n",,terminal_output +22502,14963636,"TERMINAL",0,0,"Step 247, loss: 2.257148265838623, step time: 26.98040008544922ms\r\n",,terminal_output +22503,14963744,"TERMINAL",0,0,"Step 248, loss: 2.2531886100769043, step time: 29.16693687438965ms\r\n",,terminal_output +22504,14963822,"TERMINAL",0,0,"Step 249, loss: 2.2474844455718994, step time: 24.625778198242188ms\r\n",,terminal_output +22505,14963929,"TERMINAL",0,0,"Step 250, loss: 2.2399168014526367, step time: 29.565095901489258ms\r\n",,terminal_output +22506,14964035,"TERMINAL",0,0,"Step 251, loss: 2.234557867050171, step time: 26.33976936340332ms\r\n",,terminal_output +22507,14964088,"TERMINAL",0,0,"Step 252, loss: 2.228756904602051, step time: 24.4748592376709ms\r\n",,terminal_output +22508,14964194,"TERMINAL",0,0,"Step 253, loss: 2.2220966815948486, step time: 24.185895919799805ms\r\n",,terminal_output +22509,14964306,"TERMINAL",0,0,"Step 254, loss: 2.2184646129608154, step time: 23.75173568725586ms\r\n",,terminal_output +22510,14964359,"TERMINAL",0,0,"Step 255, loss: 2.210136890411377, step time: 24.005413055419922ms\r\n",,terminal_output +22511,14964470,"TERMINAL",0,0,"Step 256, loss: 2.2079548835754395, step time: 24.086713790893555ms\r\n",,terminal_output +22512,14964542,"TERMINAL",0,0,"Step 257, loss: 2.203686237335205, step time: 23.349761962890625ms\r\n",,terminal_output +22513,14964654,"TERMINAL",0,0,"Step 258, loss: 2.199003219604492, step time: 23.593664169311523ms\r\n",,terminal_output +22514,14964712,"TERMINAL",0,0,"Step 259, loss: 2.1911823749542236, step time: 22.732019424438477ms\r\n",,terminal_output +22515,14964808,"TERMINAL",0,0,"Step 260, loss: 2.182438373565674, step time: 21.36850357055664ms\r\n",,terminal_output +22516,14964915,"TERMINAL",0,0,"Step 261, loss: 2.1803691387176514, step time: 21.142005920410156ms\r\n",,terminal_output +22517,14964979,"TERMINAL",0,0,"Step 262, loss: 2.1756319999694824, step time: 21.918535232543945ms\r\n",,terminal_output +22518,14965091,"TERMINAL",0,0,"Step 263, loss: 2.163356304168701, step time: 21.20804786682129ms\r\n",,terminal_output +22519,14965151,"TERMINAL",0,0,"Step 264, loss: 2.1632845401763916, step time: 21.768808364868164ms\r\n",,terminal_output +22520,14965261,"TERMINAL",0,0,"Step 265, loss: 2.1525795459747314, step time: 21.08597755432129ms\r\n",,terminal_output +22521,14965371,"TERMINAL",0,0,"Step 266, loss: 2.1536543369293213, step time: 22.39251136779785ms\r\n",,terminal_output +22522,14965434,"TERMINAL",0,0,"Step 267, loss: 2.143221616744995, step time: 23.3461856842041ms\r\n",,terminal_output +22523,14965543,"TERMINAL",0,0,"Step 268, loss: 2.1400110721588135, step time: 23.66948127746582ms\r\n",,terminal_output +22524,14965595,"TERMINAL",0,0,"Step 269, loss: 2.1398160457611084, step time: 23.8802433013916ms\r\n",,terminal_output +22525,14965695,"TERMINAL",0,0,"Step 270, loss: 2.131819486618042, step time: 24.45507049560547ms\r\n",,terminal_output +22526,14965804,"TERMINAL",0,0,"Step 271, loss: 2.1238853931427, step time: 23.71358871459961ms\r\n",,terminal_output +22527,14965904,"TERMINAL",0,0,"Step 272, loss: 2.116722583770752, step time: 24.512290954589844ms\r\n",,terminal_output +22528,14965970,"TERMINAL",0,0,"Step 273, loss: 2.112462282180786, step time: 30.342578887939453ms\r\n",,terminal_output +22529,14966075,"TERMINAL",0,0,"Step 274, loss: 2.1093451976776123, step time: 30.32994270324707ms\r\n",,terminal_output +22530,14966182,"TERMINAL",0,0,"Step 275, loss: 2.0991148948669434, step time: 25.89583396911621ms\r\n",,terminal_output +22531,14966235,"TERMINAL",0,0,"Step 276, loss: 2.0970325469970703, step time: 23.341894149780273ms\r\n",,terminal_output +22532,14966343,"TERMINAL",0,0,"Step 277, loss: 2.088059663772583, step time: 28.779029846191406ms\r\n",,terminal_output +22533,14966501,"TERMINAL",0,0,"Step 278, loss: 2.0874924659729004, step time: 27.17304229736328ms\r\n",,terminal_output +22534,14966549,"TERMINAL",0,0,"Step 279, loss: 2.080381393432617, step time: 30.025482177734375ms\r\n",,terminal_output +22535,14966609,"TERMINAL",0,0,"Step 280, loss: 2.0788142681121826, step time: 32.622337341308594ms\r\n",,terminal_output +22536,14966706,"TERMINAL",0,0,"Step 281, loss: 2.076169490814209, step time: 29.884815216064453ms\r\n",,terminal_output +22537,14966822,"TERMINAL",0,0,"Step 282, loss: 2.0696887969970703, step time: 27.596473693847656ms\r\n",,terminal_output +22538,14966886,"TERMINAL",0,0,"Step 283, loss: 2.0630056858062744, step time: 29.767990112304688ms\r\n",,terminal_output +22539,14966992,"TERMINAL",0,0,"Step 284, loss: 2.0541954040527344, step time: 29.315471649169922ms\r\n",,terminal_output +22540,14967103,"TERMINAL",0,0,"Step 285, loss: 2.0531694889068604, step time: 29.200077056884766ms\r\n",,terminal_output +22541,14967167,"TERMINAL",0,0,"Step 286, loss: 2.044839859008789, step time: 28.295516967773438ms\r\n",,terminal_output +22542,14967259,"TERMINAL",0,0,"Step 287, loss: 2.0370125770568848, step time: 25.679826736450195ms\r\n",,terminal_output +22543,14967365,"TERMINAL",0,0,"Step 288, loss: 2.0368196964263916, step time: 28.130054473876953ms\r\n",,terminal_output +22544,14967425,"TERMINAL",0,0,"Step 289, loss: 2.0277740955352783, step time: 26.31688117980957ms\r\n",,terminal_output +22545,14967534,"TERMINAL",0,0,"Step 290, loss: 2.0247700214385986, step time: 26.328325271606445ms\r\n",,terminal_output +22546,14967650,".gitignore",0,0,"",ignore,tab +22547,14967709,"TERMINAL",0,0,"Step 291, loss: 2.0143864154815674, step time: 25.390148162841797ms\r\nStep 292, loss: 2.0139894485473633, step time: 26.461362838745117ms\r\n",,terminal_output +22548,14967816,"TERMINAL",0,0,"Step 293, loss: 2.0088539123535156, step time: 26.060104370117188ms\r\n",,terminal_output +22549,14967896,"TERMINAL",0,0,"Step 294, loss: 2.0013463497161865, step time: 26.08942985534668ms\r\n",,terminal_output +22550,14968008,"TERMINAL",0,0,"Step 295, loss: 1.9987679719924927, step time: 27.00352668762207ms\r\n",,terminal_output +22551,14968058,"TERMINAL",0,0,"Step 296, loss: 1.9936459064483643, step time: 25.004863739013672ms\r\n",,terminal_output +22552,14968163,"TERMINAL",0,0,"Step 297, loss: 1.9947872161865234, step time: 23.558378219604492ms\r\n",,terminal_output +22553,14968268,"TERMINAL",0,0,"Step 298, loss: 1.9827895164489746, step time: 24.91903305053711ms\r\n",,terminal_output +22554,14968327,"TERMINAL",0,0,"Step 299, loss: 1.9736448526382446, step time: 24.0323543548584ms\r\n",,terminal_output +22555,14968434,"TERMINAL",0,0,"Step 300, loss: 1.9749618768692017, step time: 24.43838119506836ms\r\n",,terminal_output +22556,14968543,"TERMINAL",0,0,"Step 301, loss: 1.9687206745147705, step time: 23.83565902709961ms\r\n",,terminal_output +22557,14968631,"TERMINAL",0,0,"Step 302, loss: 1.9611330032348633, step time: 24.01900291442871ms\r\n",,terminal_output +22558,14968682,"TERMINAL",0,0,"Step 303, loss: 1.9542256593704224, step time: 23.312807083129883ms\r\n",,terminal_output +22559,14968810,"TERMINAL",0,0,"Step 304, loss: 1.9503086805343628, step time: 24.808883666992188ms\r\n",,terminal_output +22560,14968864,"TERMINAL",0,0,"Step 305, loss: 1.946618914604187, step time: 25.440692901611328ms\r\n",,terminal_output +22561,14968960,"TERMINAL",0,0,"Step 306, loss: 1.9372565746307373, step time: 23.38695526123047ms\r\n",,terminal_output +22562,14969384,"TERMINAL",0,0,"Step 307, loss: 1.935705304145813, step time: 363.1758689880371ms\r\n",,terminal_output +22563,14969435,"TERMINAL",0,0,"Step 308, loss: 1.9276556968688965, step time: 30.220508575439453ms\r\n",,terminal_output +22564,14969533,"TERMINAL",0,0,"Step 309, loss: 1.9260207414627075, step time: 25.797367095947266ms\r\n",,terminal_output +22565,14969591,"TERMINAL",0,0,"Step 310, loss: 1.9232734441757202, step time: 24.370193481445312ms\r\n",,terminal_output +22566,14969686,"TERMINAL",0,0,"Step 311, loss: 1.9287058115005493, step time: 24.80459213256836ms\r\n",,terminal_output +22567,14969830,"TERMINAL",0,0,"Step 312, loss: 1.933699607849121, step time: 23.49710464477539ms\r\n",,terminal_output +22568,14969904,"TERMINAL",0,0,"Step 313, loss: 1.9090187549591064, step time: 21.948814392089844ms\r\n",,terminal_output +22569,14969964,"TERMINAL",0,0,"Step 314, loss: 1.9099225997924805, step time: 20.197629928588867ms\r\n",,terminal_output +22570,14970073,"TERMINAL",0,0,"Step 315, loss: 1.908752202987671, step time: 28.827667236328125ms\r\n",,terminal_output +22571,14970138,"TERMINAL",0,0,"Step 316, loss: 1.889748454093933, step time: 23.404836654663086ms\r\n",,terminal_output +22572,14970243,"TERMINAL",0,0,"Step 317, loss: 1.9000216722488403, step time: 21.43573760986328ms\r\n",,terminal_output +22573,14970310,"TERMINAL",0,0,"Step 318, loss: 1.8806573152542114, step time: 26.098966598510742ms\r\n",,terminal_output +22574,14970402,"TERMINAL",0,0,"Step 319, loss: 1.8862649202346802, step time: 24.927854537963867ms\r\n",,terminal_output +22575,14970508,"TERMINAL",0,0,"Step 320, loss: 1.8762736320495605, step time: 26.162385940551758ms\r\n",,terminal_output +22576,14970573,"TERMINAL",0,0,"Step 321, loss: 1.8707338571548462, step time: 23.848533630371094ms\r\n",,terminal_output +22577,14970684,"TERMINAL",0,0,"Step 322, loss: 1.8664977550506592, step time: 24.41716194152832ms\r\n",,terminal_output +22578,14970759,"TERMINAL",0,0,"Step 323, loss: 1.8617593050003052, step time: 23.619651794433594ms\r\n",,terminal_output +22579,14970843,"TERMINAL",0,0,"Step 324, loss: 1.85305917263031, step time: 22.958993911743164ms\r\n",,terminal_output +22580,14970949,"TERMINAL",0,0,"Step 325, loss: 1.8539050817489624, step time: 40.079593658447266ms\r\n",,terminal_output +22581,14971058,"TERMINAL",0,0,"Step 326, loss: 1.842026948928833, step time: 36.54026985168457ms\r\n",,terminal_output +22582,14971168,"TERMINAL",0,0,"Step 327, loss: 1.8411537408828735, step time: 24.255752563476562ms\r\n",,terminal_output +22583,14971218,"TERMINAL",0,0,"Step 328, loss: 1.8359006643295288, step time: 25.22134780883789ms\r\n",,terminal_output +22584,14971324,"TERMINAL",0,0,"Step 329, loss: 1.8328015804290771, step time: 23.533344268798828ms\r\n",,terminal_output +22585,14971435,"TERMINAL",0,0,"Step 330, loss: 1.8379309177398682, step time: 24.54090118408203ms\r\n",,terminal_output +22586,14971488,"TERMINAL",0,0,"Step 331, loss: 1.8349276781082153, step time: 24.476289749145508ms\r\n",,terminal_output +22587,14971594,"TERMINAL",0,0,"Step 332, loss: 1.8157826662063599, step time: 25.34651756286621ms\r\n",,terminal_output +22588,14971701,"TERMINAL",0,0,"Step 333, loss: 1.8148834705352783, step time: 23.914813995361328ms\r\n",,terminal_output +22589,14971761,"TERMINAL",0,0,"Step 334, loss: 1.8114131689071655, step time: 24.283409118652344ms\r\n",,terminal_output +22590,14971870,"TERMINAL",0,0,"Step 335, loss: 1.804710865020752, step time: 22.4912166595459ms\r\n",,terminal_output +22591,14971968,"TERMINAL",0,0,"Step 336, loss: 1.7972182035446167, step time: 22.17578887939453ms\r\n",,terminal_output +22592,14972030,"TERMINAL",0,0,"Step 337, loss: 1.7976598739624023, step time: 27.41837501525879ms\r\n",,terminal_output +22593,14972135,"TERMINAL",0,0,"Step 338, loss: 1.7889975309371948, step time: 24.25217628479004ms\r\n",,terminal_output +22594,14972199,"TERMINAL",0,0,"Step 339, loss: 1.7815519571304321, step time: 23.643016815185547ms\r\n",,terminal_output +22595,14972305,"TERMINAL",0,0,"Step 340, loss: 1.7831528186798096, step time: 25.394678115844727ms\r\n",,terminal_output +22596,14972414,"TERMINAL",0,0,"Step 341, loss: 1.774870753288269, step time: 23.485422134399414ms\r\n",,terminal_output +22597,14972466,"TERMINAL",0,0,"Step 342, loss: 1.7688610553741455, step time: 23.926734924316406ms\r\n",,terminal_output +22598,14972563,"TERMINAL",0,0,"Step 343, loss: 1.765661358833313, step time: 22.620201110839844ms\r\n",,terminal_output +22599,14972676,"TERMINAL",0,0,"Step 344, loss: 1.76467764377594, step time: 28.515338897705078ms\r\n",,terminal_output +22600,14972737,"TERMINAL",0,0,"Step 345, loss: 1.7697638273239136, step time: 30.726194381713867ms\r\n",,terminal_output +22601,14972827,"TERMINAL",0,0,"Step 346, loss: 1.768739938735962, step time: 28.67746353149414ms\r\n",,terminal_output +22602,14972937,"TERMINAL",0,0,"Step 347, loss: 1.7608920335769653, step time: 23.458242416381836ms\r\n",,terminal_output +22603,14973045,"TERMINAL",0,0,"Step 348, loss: 1.7474069595336914, step time: 26.784420013427734ms\r\n",,terminal_output +22604,14973096,"TERMINAL",0,0,"Step 349, loss: 1.7483028173446655, step time: 24.92833137512207ms\r\n",,terminal_output +22605,14973205,"TERMINAL",0,0,"Step 350, loss: 1.7398681640625, step time: 29.16741371154785ms\r\n",,terminal_output +22606,14973316,"TERMINAL",0,0,"Step 351, loss: 1.7340863943099976, step time: 29.66165542602539ms\r\n",,terminal_output +22607,14973426,"TERMINAL",0,0,"Step 352, loss: 1.7352701425552368, step time: 29.792308807373047ms\r\n",,terminal_output +22608,14973467,"TERMINAL",0,0,"Step 353, loss: 1.7200307846069336, step time: 25.67148208618164ms\r\n",,terminal_output +22609,14973573,"TERMINAL",0,0,"Step 354, loss: 1.7251263856887817, step time: 29.575824737548828ms\r\n",,terminal_output +22610,14973647,"TERMINAL",0,0,"Step 355, loss: 1.7134332656860352, step time: 25.50339698791504ms\r\n",,terminal_output +22611,14973780,"genie.py",0,0,"",python,tab +22612,14973867,"TERMINAL",0,0,"Step 356, loss: 1.7115235328674316, step time: 24.520397186279297ms\r\nStep 357, loss: 1.704771637916565, step time: 27.371644973754883ms\r\n",,terminal_output +22613,14973939,"TERMINAL",0,0,"Step 358, loss: 1.7025079727172852, step time: 25.27332305908203ms\r\n",,terminal_output +22614,14974046,"TERMINAL",0,0,"Step 359, loss: 1.6948442459106445, step time: 27.498960494995117ms\r\n",,terminal_output +22615,14974113,"TERMINAL",0,0,"Step 360, loss: 1.6919516324996948, step time: 23.298978805541992ms\r\n",,terminal_output +22616,14974192,"TERMINAL",0,0,"Step 361, loss: 1.6861127614974976, step time: 24.105548858642578ms\r\n",,terminal_output +22617,14974300,"TERMINAL",0,0,"Step 362, loss: 1.6819181442260742, step time: 22.861003875732422ms\r\n",,terminal_output +22618,14974368,"TERMINAL",0,0,"Step 363, loss: 1.6798443794250488, step time: 22.25327491760254ms\r\n",,terminal_output +22619,14974475,"TERMINAL",0,0,"Step 364, loss: 1.6796128749847412, step time: 22.377490997314453ms\r\n",,terminal_output +22620,14974540,"TERMINAL",0,0,"Step 365, loss: 1.677086591720581, step time: 22.006750106811523ms\r\n",,terminal_output +22621,14974640,"TERMINAL",0,0,"Step 366, loss: 1.6753015518188477, step time: 23.08177947998047ms\r\n",,terminal_output +22622,14974748,"TERMINAL",0,0,"Step 367, loss: 1.6628443002700806, step time: 22.068500518798828ms\r\n",,terminal_output +22623,14974810,"TERMINAL",0,0,"Step 368, loss: 1.6612424850463867, step time: 20.36762237548828ms\r\n",,terminal_output +22624,14974924,"TERMINAL",0,0,"Step 369, loss: 1.65904700756073, step time: 19.26565170288086ms\r\n",,terminal_output +22625,14974981,"TERMINAL",0,0,"Step 370, loss: 1.6527385711669922, step time: 20.81918716430664ms\r\n",,terminal_output +22626,14975007,"genie.py",3053,0,"",python,selection_mouse +22627,14975010,"genie.py",3052,0,"",python,selection_command +22628,14975077,"TERMINAL",0,0,"Step 371, loss: 1.64058518409729, step time: 19.913911819458008ms\r\n",,terminal_output +22629,14975189,"TERMINAL",0,0,"Step 372, loss: 1.6485143899917603, step time: 19.531965255737305ms\r\n",,terminal_output +22630,14975252,"TERMINAL",0,0,"Step 373, loss: 1.6346646547317505, step time: 20.178556442260742ms\r\n",,terminal_output +22631,14975360,"TERMINAL",0,0,"Step 374, loss: 1.6354538202285767, step time: 19.909381866455078ms\r\n",,terminal_output +22632,14975411,"TERMINAL",0,0,"Step 375, loss: 1.6324361562728882, step time: 19.690513610839844ms\r\n",,terminal_output +22633,14975517,"TERMINAL",0,0,"Step 376, loss: 1.6290884017944336, step time: 19.943714141845703ms\r\n",,terminal_output +22634,14975532,"genie.py",3151,0,"",python,selection_mouse +22635,14975583,"TERMINAL",0,0,"Step 377, loss: 1.6195703744888306, step time: 20.1718807220459ms\r\n",,terminal_output +22636,14975692,"TERMINAL",0,0,"Step 378, loss: 1.6226003170013428, step time: 20.68638801574707ms\r\n",,terminal_output +22637,14975755,"TERMINAL",0,0,"Step 379, loss: 1.6148349046707153, step time: 20.032167434692383ms\r\n",,terminal_output +22638,14975831,"genie.py",3149,5,"batch",python,selection_mouse +22639,14975845,"TERMINAL",0,0,"Step 380, loss: 1.6084556579589844, step time: 18.842458724975586ms\r\n",,terminal_output +22640,14975928,"TERMINAL",0,0,"Step 381, loss: 1.606419324874878, step time: 19.0582275390625ms\r\n",,terminal_output +22641,14976037,"TERMINAL",0,0,"Step 382, loss: 1.598508358001709, step time: 19.11449432373047ms\r\n",,terminal_output +22642,14976105,"TERMINAL",0,0,"Step 383, loss: 1.5989664793014526, step time: 24.137496948242188ms\r\n",,terminal_output +22643,14976215,"TERMINAL",0,0,"Step 384, loss: 1.59083092212677, step time: 27.716636657714844ms\r\n",,terminal_output +22644,14976325,"TERMINAL",0,0,"Step 385, loss: 1.588877558708191, step time: 24.399518966674805ms\r\n",,terminal_output +22645,14976378,"TERMINAL",0,0,"Step 386, loss: 1.5848087072372437, step time: 26.269197463989258ms\r\n",,terminal_output +22646,14976485,"TERMINAL",0,0,"Step 387, loss: 1.5757603645324707, step time: 24.199724197387695ms\r\n",,terminal_output +22647,14976553,"TERMINAL",0,0,"Step 388, loss: 1.5725334882736206, step time: 27.916669845581055ms\r\n",,terminal_output +22648,14976568,"genie.py",3167,0,"",python,selection_mouse +22649,14976650,"TERMINAL",0,0,"Step 389, loss: 1.5705269575119019, step time: 30.167818069458008ms\r\n",,terminal_output +22650,14976715,"genie.py",3165,5,"shape",python,selection_mouse +22651,14976757,"TERMINAL",0,0,"Step 390, loss: 1.5644710063934326, step time: 29.417037963867188ms\r\n",,terminal_output +22652,14976838,"TERMINAL",0,0,"Step 391, loss: 1.5602211952209473, step time: 30.669450759887695ms\r\n",,terminal_output +22653,14976948,"TERMINAL",0,0,"Step 392, loss: 1.562431812286377, step time: 25.826454162597656ms\r\n",,terminal_output +22654,14977006,"TERMINAL",0,0,"Step 393, loss: 1.5606448650360107, step time: 24.484872817993164ms\r\n",,terminal_output +22655,14977113,"TERMINAL",0,0,"Step 394, loss: 1.5558085441589355, step time: 27.434825897216797ms\r\n",,terminal_output +22656,14977219,"TERMINAL",0,0,"Step 395, loss: 1.5468007326126099, step time: 24.28913116455078ms\r\n",,terminal_output +22657,14977278,"TERMINAL",0,0,"Step 396, loss: 1.5474019050598145, step time: 22.029399871826172ms\r\n",,terminal_output +22658,14977379,"genie.py",3152,0,"",python,selection_mouse +22659,14977393,"TERMINAL",0,0,"Step 397, loss: 1.5586743354797363, step time: 27.994394302368164ms\r\n",,terminal_output +22660,14977457,"TERMINAL",0,0,"Step 398, loss: 1.5342706441879272, step time: 20.899057388305664ms\r\n",,terminal_output +22661,14977536,"genie.py",3149,5,"batch",python,selection_mouse +22662,14977587,"TERMINAL",0,0,"Step 399, loss: 1.5321224927902222, step time: 19.69289779663086ms\r\n",,terminal_output +22663,14977653,"TERMINAL",0,0,"Step 400, loss: 1.5307132005691528, step time: 19.989013671875ms\r\n",,terminal_output +22664,14977754,"TERMINAL",0,0,"Step 401, loss: 1.524828553199768, step time: 18.688678741455078ms\r\n",,terminal_output +22665,14977837,"TERMINAL",0,0,"Step 402, loss: 1.5180339813232422, step time: 19.24729347229004ms\r\n",,terminal_output +22666,14977891,"TERMINAL",0,0,"Step 403, loss: 1.5181188583374023, step time: 19.652128219604492ms\r\n",,terminal_output +22667,14977987,"TERMINAL",0,0,"Step 404, loss: 1.5072308778762817, step time: 18.39137077331543ms\r\n",,terminal_output +22668,14978097,"TERMINAL",0,0,"Step 405, loss: 1.5080701112747192, step time: 18.73493194580078ms\r\n",,terminal_output +22669,14978151,"TERMINAL",0,0,"Step 406, loss: 1.5000861883163452, step time: 19.44279670715332ms\r\n",,terminal_output +22670,14978248,"TERMINAL",0,0,"Step 407, loss: 1.5002923011779785, step time: 18.625974655151367ms\r\n",,terminal_output +22671,14978358,"TERMINAL",0,0,"Step 408, loss: 1.4919601678848267, step time: 18.210172653198242ms\r\n",,terminal_output +22672,14978442,"genie.py",3208,0,"",python,selection_mouse +22673,14978457,"genie.py",3207,0,"",python,selection_command +22674,14978515,"TERMINAL",0,0,"Step 409, loss: 1.4880644083023071, step time: 20.021915435791016ms\r\nStep 410, loss: 1.4830247163772583, step time: 18.73946189880371ms\r\n",,terminal_output +22675,14978634,"TERMINAL",0,0,"Step 411, loss: 1.4854751825332642, step time: 18.163204193115234ms\r\n",,terminal_output +22676,14978687,"TERMINAL",0,0,"Step 412, loss: 1.4794962406158447, step time: 20.274877548217773ms\r\n",,terminal_output +22677,14978861,"TERMINAL",0,0,"Step 413, loss: 1.4864975214004517, step time: 20.025014877319336ms\r\nStep 414, loss: 1.48575758934021, step time: 18.599510192871094ms\r\n",,terminal_output +22678,14978970,"TERMINAL",0,0,"Step 415, loss: 1.4745323657989502, step time: 18.777132034301758ms\r\n",,terminal_output +22679,14979040,"TERMINAL",0,0,"Step 416, loss: 1.4651577472686768, step time: 22.77517318725586ms\r\n",,terminal_output +22680,14979066,"genie.py",3146,0,"",python,selection_mouse +22681,14979170,"TERMINAL",0,0,"Step 417, loss: 1.4717004299163818, step time: 24.715423583984375ms\r\n",,terminal_output +22682,14979223,"TERMINAL",0,0,"Step 418, loss: 1.4537779092788696, step time: 20.550966262817383ms\r\n",,terminal_output +22683,14979332,"TERMINAL",0,0,"Step 419, loss: 1.4557291269302368, step time: 18.97263526916504ms\r\n",,terminal_output +22684,14979395,"TERMINAL",0,0,"Step 420, loss: 1.4513742923736572, step time: 18.799781799316406ms\r\n",,terminal_output +22685,14979504,"TERMINAL",0,0,"Step 421, loss: 1.4448370933532715, step time: 19.231081008911133ms\r\n",,terminal_output +22686,14979557,"TERMINAL",0,0,"Step 422, loss: 1.440406084060669, step time: 18.46909523010254ms\r\n",,terminal_output +22687,14979664,"TERMINAL",0,0,"Step 423, loss: 1.4372453689575195, step time: 18.32294464111328ms\r\n",,terminal_output +22688,14979741,"TERMINAL",0,0,"Step 424, loss: 1.43199622631073, step time: 18.44024658203125ms\r\n",,terminal_output +22689,14979808,"genie.py",3208,0,"",python,selection_mouse +22690,14979808,"genie.py",3207,0,"",python,selection_command +22691,14979810,"TERMINAL",0,0,"Step 425, loss: 1.431006669998169, step time: 18.527507781982422ms\r\n",,terminal_output +22692,14979916,"TERMINAL",0,0,"Step 426, loss: 1.4180400371551514, step time: 18.003225326538086ms\r\n",,terminal_output +22693,14980017,"TERMINAL",0,0,"Step 427, loss: 1.4234896898269653, step time: 18.603801727294922ms\r\n",,terminal_output +22694,14980081,"TERMINAL",0,0,"Step 428, loss: 1.4158741235733032, step time: 19.563913345336914ms\r\n",,terminal_output +22695,14980190,"TERMINAL",0,0,"Step 429, loss: 1.4200292825698853, step time: 28.234481811523438ms\r\n",,terminal_output +22696,14980253,"TERMINAL",0,0,"Step 430, loss: 1.426841139793396, step time: 29.237985610961914ms\r\n",,terminal_output +22697,14980347,"TERMINAL",0,0,"Step 431, loss: 1.4208214282989502, step time: 29.80327606201172ms\r\n",,terminal_output +22698,14980457,"TERMINAL",0,0,"Step 432, loss: 1.3993219137191772, step time: 28.343677520751953ms\r\n",,terminal_output +22699,14980486,"genie.py",3141,0,"",python,selection_mouse +22700,14980543,"TERMINAL",0,0,"Step 433, loss: 1.4059268236160278, step time: 29.5107364654541ms\r\n",,terminal_output +22701,14980652,"TERMINAL",0,0,"Step 434, loss: 1.3997879028320312, step time: 28.485536575317383ms\r\n",,terminal_output +22702,14980718,"TERMINAL",0,0,"Step 435, loss: 1.3929475545883179, step time: 29.43873405456543ms\r\n",,terminal_output +22703,14980825,"TERMINAL",0,0,"Step 436, loss: 1.3864744901657104, step time: 27.42743492126465ms\r\n",,terminal_output +22704,14980930,"TERMINAL",0,0,"Step 437, loss: 1.3867191076278687, step time: 24.132728576660156ms\r\n",,terminal_output +22705,14980990,"TERMINAL",0,0,"Step 438, loss: 1.37984037399292, step time: 31.639814376831055ms\r\n",,terminal_output +22706,14981097,"TERMINAL",0,0,"Step 439, loss: 1.3735120296478271, step time: 28.81455421447754ms\r\n",,terminal_output +22707,14981177,"genie.py",3185,0,"",python,selection_mouse +22708,14981192,"genie.py",3184,0,"",python,selection_command +22709,14981193,"TERMINAL",0,0,"Step 440, loss: 1.369338870048523, step time: 20.9043025970459ms\r\n",,terminal_output +22710,14981263,"TERMINAL",0,0,"Step 441, loss: 1.3704378604888916, step time: 19.469738006591797ms\r\n",,terminal_output +22711,14981374,"TERMINAL",0,0,"Step 442, loss: 1.3620857000350952, step time: 20.615339279174805ms\r\n",,terminal_output +22712,14981458,"TERMINAL",0,0,"Step 443, loss: 1.3647187948226929, step time: 20.06220817565918ms\r\n",,terminal_output +22713,14981511,"TERMINAL",0,0,"Step 444, loss: 1.3733322620391846, step time: 24.158239364624023ms\r\n",,terminal_output +22714,14981621,"TERMINAL",0,0,"Step 445, loss: 1.3751143217086792, step time: 28.37967872619629ms\r\n",,terminal_output +22715,14981731,"TERMINAL",0,0,"Step 446, loss: 1.351228952407837, step time: 22.99189567565918ms\r\n",,terminal_output +22716,14981785,"TERMINAL",0,0,"Step 447, loss: 1.347723364830017, step time: 21.78668975830078ms\r\n",,terminal_output +22717,14981905,"TERMINAL",0,0,"Step 448, loss: 1.3533928394317627, step time: 24.92070198059082ms\r\n",,terminal_output +22718,14981958,"TERMINAL",0,0,"Step 449, loss: 1.339816927909851, step time: 24.005413055419922ms\r\n",,terminal_output +22719,14982066,"TERMINAL",0,0,"Step 450, loss: 1.335673213005066, step time: 22.888898849487305ms\r\n",,terminal_output +22720,14982130,"TERMINAL",0,0,"Step 451, loss: 1.3353307247161865, step time: 21.643638610839844ms\r\n",,terminal_output +22721,14982240,"TERMINAL",0,0,"Step 452, loss: 1.3297843933105469, step time: 22.61519432067871ms\r\n",,terminal_output +22722,14982300,"TERMINAL",0,0,"Step 453, loss: 1.3206138610839844, step time: 24.25527572631836ms\r\n",,terminal_output +22723,14982409,"TERMINAL",0,0,"Step 454, loss: 1.3234169483184814, step time: 21.410226821899414ms\r\n",,terminal_output +22724,14982474,"TERMINAL",0,0,"Step 455, loss: 1.3164966106414795, step time: 19.775867462158203ms\r\n",,terminal_output +22725,14982583,"TERMINAL",0,0,"Step 456, loss: 1.3112704753875732, step time: 19.968509674072266ms\r\n",,terminal_output +22726,14982647,"TERMINAL",0,0,"Step 457, loss: 1.3126453161239624, step time: 20.284175872802734ms\r\n",,terminal_output +22727,14982711,"genie.py",2587,0,"",python,selection_mouse +22728,14982761,"TERMINAL",0,0,"Step 458, loss: 1.3124066591262817, step time: 22.15433120727539ms\r\n",,terminal_output +22729,14982827,"TERMINAL",0,0,"Step 459, loss: 1.3157877922058105, step time: 25.335073471069336ms\r\n",,terminal_output +22730,14982938,"TERMINAL",0,0,"Step 460, loss: 1.29970121383667, step time: 23.48804473876953ms\r\n",,terminal_output +22731,14983000,"TERMINAL",0,0,"Step 461, loss: 1.3035368919372559, step time: 21.90709114074707ms\r\n",,terminal_output +22732,14983102,"TERMINAL",0,0,"Step 462, loss: 1.2995736598968506, step time: 21.999835968017578ms\r\n",,terminal_output +22733,14983211,"TERMINAL",0,0,"Step 463, loss: 1.2920695543289185, step time: 27.767181396484375ms\r\n",,terminal_output +22734,14983262,"TERMINAL",0,0,"Step 464, loss: 1.289419412612915, step time: 22.339820861816406ms\r\n",,terminal_output +22735,14983369,"TERMINAL",0,0,"Step 465, loss: 1.283176302909851, step time: 19.85931396484375ms\r\n",,terminal_output +22736,14983475,"TERMINAL",0,0,"Step 466, loss: 1.2809444665908813, step time: 21.03567123413086ms\r\n",,terminal_output +22737,14983529,"TERMINAL",0,0,"Step 467, loss: 1.271168828010559, step time: 20.481348037719727ms\r\n",,terminal_output +22738,14983643,"TERMINAL",0,0,"Step 468, loss: 1.2713996171951294, step time: 20.174503326416016ms\r\n",,terminal_output +22739,14983694,"TERMINAL",0,0,"Step 469, loss: 1.269385576248169, step time: 25.622844696044922ms\r\n",,terminal_output +22740,14983801,"TERMINAL",0,0,"Step 470, loss: 1.2630517482757568, step time: 27.07958221435547ms\r\n",,terminal_output +22741,14983906,"TERMINAL",0,0,"Step 471, loss: 1.2626309394836426, step time: 22.374629974365234ms\r\n",,terminal_output +22742,14984226,"genie.py",2491,0,"",python,selection_mouse +22743,14984283,"TERMINAL",0,0,"Step 472, loss: 1.252328634262085, step time: 313.4164810180664ms\r\nStep 473, loss: 1.255348801612854, step time: 25.82263946533203ms\r\n",,terminal_output +22744,14984388,"TERMINAL",0,0,"Step 474, loss: 1.2566207647323608, step time: 21.85845375061035ms\r\n",,terminal_output +22745,14984499,"TERMINAL",0,0,"Step 475, loss: 1.2719959020614624, step time: 29.183387756347656ms\r\n",,terminal_output +22746,14984551,"TERMINAL",0,0,"Step 476, loss: 1.2646039724349976, step time: 25.262832641601562ms\r\n",,terminal_output +22747,14984645,"TERMINAL",0,0,"Step 477, loss: 1.2395377159118652, step time: 21.90709114074707ms\r\n",,terminal_output +22748,14984754,"TERMINAL",0,0,"Step 478, loss: 1.251782774925232, step time: 21.193981170654297ms\r\n",,terminal_output +22749,14984806,"TERMINAL",0,0,"Step 479, loss: 1.2448276281356812, step time: 20.832300186157227ms\r\n",,terminal_output +22750,14984912,"TERMINAL",0,0,"Step 480, loss: 1.2344149351119995, step time: 21.64316177368164ms\r\n",,terminal_output +22751,14985020,"TERMINAL",0,0,"Step 481, loss: 1.231053113937378, step time: 20.88785171508789ms\r\n",,terminal_output +22752,14985078,"TERMINAL",0,0,"Step 482, loss: 1.229439616203308, step time: 20.857810974121094ms\r\n",,terminal_output +22753,14985174,"TERMINAL",0,0,"Step 483, loss: 1.2178796529769897, step time: 19.1342830657959ms\r\n",,terminal_output +22754,14985275,"genie.py",2227,0,"",python,selection_mouse +22755,14985293,"TERMINAL",0,0,"Step 484, loss: 1.2216471433639526, step time: 20.395517349243164ms\r\n",,terminal_output +22756,14985344,"TERMINAL",0,0,"Step 485, loss: 1.2126779556274414, step time: 19.93703842163086ms\r\n",,terminal_output +22757,14985452,"TERMINAL",0,0,"Step 486, loss: 1.206534504890442, step time: 23.201704025268555ms\r\n",,terminal_output +22758,14985513,"TERMINAL",0,0,"Step 487, loss: 1.2095564603805542, step time: 21.158695220947266ms\r\n",,terminal_output +22759,14985626,"TERMINAL",0,0,"Step 488, loss: 1.1966551542282104, step time: 19.846677780151367ms\r\n",,terminal_output +22760,14985686,"TERMINAL",0,0,"Step 489, loss: 1.201863408088684, step time: 19.387006759643555ms\r\n",,terminal_output +22761,14985797,"TERMINAL",0,0,"Step 490, loss: 1.2006927728652954, step time: 20.491838455200195ms\r\n",,terminal_output +22762,14985849,"TERMINAL",0,0,"Step 491, loss: 1.2147539854049683, step time: 19.326448440551758ms\r\n",,terminal_output +22763,14985955,"TERMINAL",0,0,"Step 492, loss: 1.206970453262329, step time: 20.545005798339844ms\r\n",,terminal_output +22764,14986062,"TERMINAL",0,0,"Step 493, loss: 1.1923500299453735, step time: 29.193639755249023ms\r\n",,terminal_output +22765,14986116,"TERMINAL",0,0,"Step 494, loss: 1.1945216655731201, step time: 25.588512420654297ms\r\n",,terminal_output +22766,14986212,"TERMINAL",0,0,"Step 495, loss: 1.1904375553131104, step time: 22.408008575439453ms\r\n",,terminal_output +22767,14986324,"TERMINAL",0,0,"Step 496, loss: 1.179870367050171, step time: 21.15035057067871ms\r\n",,terminal_output +22768,14986389,"TERMINAL",0,0,"Step 497, loss: 1.1766337156295776, step time: 20.54572105407715ms\r\n",,terminal_output +22769,14986496,"TERMINAL",0,0,"Step 498, loss: 1.1791664361953735, step time: 19.553661346435547ms\r\n",,terminal_output +22770,14986558,"TERMINAL",0,0,"Step 499, loss: 1.1626346111297607, step time: 20.002126693725586ms\r\n",,terminal_output +22771,14986811,"genie.py",2185,0,"",python,selection_mouse +22772,14986970,"genie.py",2178,17,"tokenizer_outputs",python,selection_mouse +22773,14987714,"genie.py",2218,0,"",python,selection_mouse +22774,14988591,"genie.py",2273,0,"",python,selection_mouse +22775,14988765,"genie.py",2273,5,"mihir",python,selection_mouse +22776,14989456,"genie.py",2190,0,"",python,selection_mouse +22777,14989620,"genie.py",2178,17,"tokenizer_outputs",python,selection_mouse +22778,14989923,"TERMINAL",0,0,"Step 500, loss: 1.17430579662323, step time: 26.15642547607422ms\r\n",,terminal_output +22779,14990035,"TERMINAL",0,0,"Step 501, loss: 1.157493233680725, step time: 28.115510940551758ms\r\n",,terminal_output +22780,14990142,"TERMINAL",0,0,"Step 502, loss: 1.161539912223816, step time: 25.99954605102539ms\r\n",,terminal_output +22781,14990202,"TERMINAL",0,0,"Step 503, loss: 1.1609666347503662, step time: 26.823759078979492ms\r\n",,terminal_output +22782,14990309,"TERMINAL",0,0,"Step 504, loss: 1.152453899383545, step time: 24.981260299682617ms\r\n",,terminal_output +22783,14990383,"TERMINAL",0,0,"Step 505, loss: 1.163409948348999, step time: 23.901939392089844ms\r\n",,terminal_output +22784,14990490,"TERMINAL",0,0,"Step 506, loss: 1.1576741933822632, step time: 23.305177688598633ms\r\n",,terminal_output +22785,14990585,"TERMINAL",0,0,"Step 507, loss: 1.1524845361709595, step time: 25.567054748535156ms\r\n",,terminal_output +22786,14990678,"TERMINAL",0,0,"Step 508, loss: 1.1514530181884766, step time: 25.060415267944336ms\r\n",,terminal_output +22787,14990730,"TERMINAL",0,0,"Step 509, loss: 1.154720664024353, step time: 25.931835174560547ms\r\n",,terminal_output +22788,14990837,"TERMINAL",0,0,"Step 510, loss: 1.133008360862732, step time: 24.79720115661621ms\r\n",,terminal_output +22789,14990942,"TERMINAL",0,0,"Step 511, loss: 1.1375198364257812, step time: 23.445844650268555ms\r\n",,terminal_output +22790,14991018,"TERMINAL",0,0,"Step 512, loss: 1.129751205444336, step time: 23.699283599853516ms\r\n",,terminal_output +22791,14991124,"TERMINAL",0,0,"Step 513, loss: 1.132415533065796, step time: 22.174835205078125ms\r\n",,terminal_output +22792,14991185,"TERMINAL",0,0,"Step 514, loss: 1.1178984642028809, step time: 22.887706756591797ms\r\n",,terminal_output +22793,14991292,"TERMINAL",0,0,"Step 515, loss: 1.1220932006835938, step time: 20.976543426513672ms\r\n",,terminal_output +22794,14991369,"TERMINAL",0,0,"Step 516, loss: 1.1150169372558594, step time: 21.08311653137207ms\r\n",,terminal_output +22795,14991475,"TERMINAL",0,0,"Step 517, loss: 1.1112602949142456, step time: 22.01390266418457ms\r\n",,terminal_output +22796,14991526,"TERMINAL",0,0,"Step 518, loss: 1.1092438697814941, step time: 20.674943923950195ms\r\n",,terminal_output +22797,14991635,"TERMINAL",0,0,"Step 519, loss: 1.1025214195251465, step time: 21.543502807617188ms\r\n",,terminal_output +22798,14991697,"TERMINAL",0,0,"Step 520, loss: 1.1051510572433472, step time: 22.546052932739258ms\r\n",,terminal_output +22799,14991810,"TERMINAL",0,0,"Step 521, loss: 1.09348464012146, step time: 21.63553237915039ms\r\n",,terminal_output +22800,14991913,"TERMINAL",0,0,"Step 522, loss: 1.1028261184692383, step time: 20.976543426513672ms\r\n",,terminal_output +22801,14991975,"TERMINAL",0,0,"Step 523, loss: 1.1163997650146484, step time: 21.172285079956055ms\r\n",,terminal_output +22802,14992080,"TERMINAL",0,0,"Step 524, loss: 1.1180698871612549, step time: 28.44858169555664ms\r\n",,terminal_output +22803,14992141,"TERMINAL",0,0,"Step 525, loss: 1.092007040977478, step time: 27.85634994506836ms\r\n",,terminal_output +22804,14992232,"TERMINAL",0,0,"Step 526, loss: 1.0880751609802246, step time: 23.885011672973633ms\r\n",,terminal_output +22805,14992351,"TERMINAL",0,0,"Step 527, loss: 1.0981714725494385, step time: 21.64173126220703ms\r\n",,terminal_output +22806,14992406,"TERMINAL",0,0,"Step 528, loss: 1.0787508487701416, step time: 25.53868293762207ms\r\n",,terminal_output +22807,14992501,"TERMINAL",0,0,"Step 529, loss: 1.0808920860290527, step time: 23.838281631469727ms\r\n",,terminal_output +22808,14992592,"TERMINAL",0,0,"Step 530, loss: 1.0736045837402344, step time: 28.36322784423828ms\r\n",,terminal_output +22809,14992699,"TERMINAL",0,0,"Step 531, loss: 1.0738109350204468, step time: 29.48451042175293ms\r\n",,terminal_output +22810,14992857,"TERMINAL",0,0,"Step 532, loss: 1.061598777770996, step time: 28.03182601928711ms\r\nStep 533, loss: 1.0670210123062134, step time: 25.59041976928711ms\r\n",,terminal_output +22811,14992966,"TERMINAL",0,0,"Step 534, loss: 1.0581022500991821, step time: 27.64153480529785ms\r\n",,terminal_output +22812,14993070,"TERMINAL",0,0,"Step 535, loss: 1.0537769794464111, step time: 25.71558952331543ms\r\n",,terminal_output +22813,14993163,"TERMINAL",0,0,"Step 536, loss: 1.0535145998001099, step time: 26.955604553222656ms\r\n",,terminal_output +22814,14993255,"TERMINAL",0,0,"Step 537, loss: 1.047345757484436, step time: 25.594711303710938ms\r\n",,terminal_output +22815,14993308,"TERMINAL",0,0,"Step 538, loss: 1.0560411214828491, step time: 24.57427978515625ms\r\n",,terminal_output +22816,14993427,"TERMINAL",0,0,"Step 539, loss: 1.059638500213623, step time: 25.41661262512207ms\r\n",,terminal_output +22817,14993460,"genie.py",2721,0,"",python,selection_mouse +22818,14993476,"genie.py",2720,0,"",python,selection_command +22819,14993523,"TERMINAL",0,0,"Step 540, loss: 1.0689268112182617, step time: 24.174928665161133ms\r\n",,terminal_output +22820,14993578,"TERMINAL",0,0,"Step 541, loss: 1.0409772396087646, step time: 23.65875244140625ms\r\n",,terminal_output +22821,14993669,"TERMINAL",0,0,"Step 542, loss: 1.0409109592437744, step time: 23.793935775756836ms\r\n",,terminal_output +22822,14993787,"TERMINAL",0,0,"Step 543, loss: 1.0464885234832764, step time: 23.01621437072754ms\r\n",,terminal_output +22823,14993840,"TERMINAL",0,0,"Step 544, loss: 1.0266858339309692, step time: 24.331331253051758ms\r\n",,terminal_output +22824,14993948,"TERMINAL",0,0,"Step 545, loss: 1.0317106246948242, step time: 24.745464324951172ms\r\n",,terminal_output +22825,14994056,"TERMINAL",0,0,"Step 546, loss: 1.0239524841308594, step time: 25.045156478881836ms\r\n",,terminal_output +22826,14994119,"TERMINAL",0,0,"Step 547, loss: 1.0209827423095703, step time: 23.719072341918945ms\r\n",,terminal_output +22827,14994470,"TERMINAL",0,0,"Step 548, loss: 1.015069603919983, step time: 342.5173759460449ms\r\n",,terminal_output +22828,14994579,"TERMINAL",0,0,"Step 549, loss: 1.0147931575775146, step time: 25.550365447998047ms\r\n",,terminal_output +22829,14994632,"TERMINAL",0,0,"Step 550, loss: 1.0094388723373413, step time: 22.961139678955078ms\r\n",,terminal_output +22830,14994739,"TERMINAL",0,0,"Step 551, loss: 1.0047441720962524, step time: 21.730899810791016ms\r\n",,terminal_output +22831,14994790,"genie.py",2549,0,"",python,selection_mouse +22832,14994843,"TERMINAL",0,0,"Step 552, loss: 1.007210612297058, step time: 22.690534591674805ms\r\n",,terminal_output +22833,14994908,"TERMINAL",0,0,"Step 553, loss: 1.0106480121612549, step time: 21.38066291809082ms\r\n",,terminal_output +22834,14994945,"genie.py",2545,7,"outputs",python,selection_mouse +22835,14995010,"TERMINAL",0,0,"Step 554, loss: 1.037363886833191, step time: 22.758007049560547ms\r\n",,terminal_output +22836,14995116,"TERMINAL",0,0,"Step 555, loss: 1.0230921506881714, step time: 21.441936492919922ms\r\n",,terminal_output +22837,14995167,"TERMINAL",0,0,"Step 556, loss: 0.9982299208641052, step time: 23.380041122436523ms\r\n",,terminal_output +22838,14995271,"TERMINAL",0,0,"Step 557, loss: 1.0114660263061523, step time: 22.16815948486328ms\r\n",,terminal_output +22839,14995331,"TERMINAL",0,0,"Step 558, loss: 0.9964351058006287, step time: 27.554750442504883ms\r\n",,terminal_output +22840,14995426,"TERMINAL",0,0,"Step 559, loss: 0.9950186014175415, step time: 29.993057250976562ms\r\n",,terminal_output +22841,14995536,"TERMINAL",0,0,"Step 560, loss: 0.989184558391571, step time: 26.351451873779297ms\r\n",,terminal_output +22842,14995554,"genie.py",2582,0,"",python,selection_mouse +22843,14995601,"TERMINAL",0,0,"Step 561, loss: 0.9884769916534424, step time: 21.978139877319336ms\r\n",,terminal_output +22844,14995718,"genie.py",2573,12,"video_tokens",python,selection_mouse +22845,14995725,"TERMINAL",0,0,"Step 562, loss: 0.9754377603530884, step time: 24.74379539489746ms\r\n",,terminal_output +22846,14995776,"TERMINAL",0,0,"Step 563, loss: 0.9823172688484192, step time: 24.27196502685547ms\r\n",,terminal_output +22847,14995884,"TERMINAL",0,0,"Step 564, loss: 0.9704325795173645, step time: 27.0388126373291ms\r\n",,terminal_output +22848,14995994,"TERMINAL",0,0,"Step 565, loss: 0.9697561264038086, step time: 28.850317001342773ms\r\n",,terminal_output +22849,14996050,"TERMINAL",0,0,"Step 566, loss: 0.9656413197517395, step time: 28.601646423339844ms\r\n",,terminal_output +22850,14996143,"TERMINAL",0,0,"Step 567, loss: 0.9647376537322998, step time: 25.463581085205078ms\r\n",,terminal_output +22851,14996257,"TERMINAL",0,0,"Step 568, loss: 0.9575032591819763, step time: 27.173995971679688ms\r\n",,terminal_output +22852,14996308,"genie.py",2721,0,"",python,selection_mouse +22853,14996322,"genie.py",2720,0,"",python,selection_command +22854,14996336,"TERMINAL",0,0,"Step 569, loss: 0.9583829045295715, step time: 27.296781539916992ms\r\n",,terminal_output +22855,14996419,"TERMINAL",0,0,"Step 570, loss: 0.9588454961776733, step time: 26.757240295410156ms\r\n",,terminal_output +22856,14996464,"genie.py",2721,0,"",python,selection_mouse +22857,14996474,"genie.py",2720,0,"",python,selection_command +22858,14996536,"TERMINAL",0,0,"Step 571, loss: 0.9611420631408691, step time: 27.36973762512207ms\r\n",,terminal_output +22859,14996589,"TERMINAL",0,0,"Step 572, loss: 0.9548993110656738, step time: 25.146484375ms\r\n",,terminal_output +22860,14996697,"TERMINAL",0,0,"Step 573, loss: 0.9583221077919006, step time: 24.63984489440918ms\r\n",,terminal_output +22861,14996856,"TERMINAL",0,0,"Step 574, loss: 0.9551094770431519, step time: 25.207996368408203ms\r\nStep 575, loss: 0.9606450200080872, step time: 24.792909622192383ms\r\n",,terminal_output +22862,14996973,"TERMINAL",0,0,"Step 576, loss: 0.9409807324409485, step time: 24.161815643310547ms\r\n",,terminal_output +22863,14997030,"TERMINAL",0,0,"Step 577, loss: 0.9425835609436035, step time: 24.202346801757812ms\r\n",,terminal_output +22864,14997143,"TERMINAL",0,0,"Step 578, loss: 0.942296028137207, step time: 24.022579193115234ms\r\n",,terminal_output +22865,14997207,"TERMINAL",0,0,"Step 579, loss: 0.9313137531280518, step time: 24.199962615966797ms\r\n",,terminal_output +22866,14997316,"TERMINAL",0,0,"Step 580, loss: 0.9300329089164734, step time: 23.688554763793945ms\r\n",,terminal_output +22867,14997423,"TERMINAL",0,0,"Step 581, loss: 0.9297688007354736, step time: 23.783445358276367ms\r\n",,terminal_output +22868,14997475,"genie.py",2757,0,"",python,selection_mouse +22869,14997496,"TERMINAL",0,0,"Step 582, loss: 0.9211683869361877, step time: 22.15576171875ms\r\n",,terminal_output +22870,14997563,"TERMINAL",0,0,"Step 583, loss: 0.9179749488830566, step time: 22.00150489807129ms\r\n",,terminal_output +22871,14997642,"genie.py",2751,8,"mask_rng",python,selection_mouse +22872,14997694,"TERMINAL",0,0,"Step 584, loss: 0.9220871329307556, step time: 22.556543350219727ms\r\n",,terminal_output +22873,14997760,"TERMINAL",0,0,"Step 585, loss: 0.9103701710700989, step time: 21.92544937133789ms\r\n",,terminal_output +22874,14997823,"TERMINAL",0,0,"Step 586, loss: 0.9102852940559387, step time: 23.114442825317383ms\r\n",,terminal_output +22875,14997916,"TERMINAL",0,0,"Step 587, loss: 0.9218392372131348, step time: 21.947860717773438ms\r\n",,terminal_output +22876,14998022,"TERMINAL",0,0,"Step 588, loss: 0.9140766263008118, step time: 21.845102310180664ms\r\n",,terminal_output +22877,14998121,"TERMINAL",0,0,"Step 589, loss: 0.9220240712165833, step time: 22.723913192749023ms\r\n",,terminal_output +22878,14998182,"TERMINAL",0,0,"Step 590, loss: 0.9106693267822266, step time: 21.718502044677734ms\r\n",,terminal_output +22879,14998288,"TERMINAL",0,0,"Step 591, loss: 0.8932555317878723, step time: 20.705699920654297ms\r\n",,terminal_output +22880,14998342,"TERMINAL",0,0,"Step 592, loss: 0.9005535840988159, step time: 21.260738372802734ms\r\n",,terminal_output +22881,14998451,"TERMINAL",0,0,"Step 593, loss: 0.8969929218292236, step time: 22.462129592895508ms\r\n",,terminal_output +22882,14998517,"TERMINAL",0,0,"Step 594, loss: 0.8891106843948364, step time: 21.546602249145508ms\r\n",,terminal_output +22883,14998532,"genie.py",2775,0,"",python,selection_mouse +22884,14998639,"TERMINAL",0,0,"Step 595, loss: 0.8904015421867371, step time: 22.308349609375ms\r\n",,terminal_output +22885,14998678,"genie.py",2771,8,"mask_rng",python,selection_mouse +22886,14998702,"TERMINAL",0,0,"Step 596, loss: 0.8831669688224792, step time: 29.537200927734375ms\r\n",,terminal_output +22887,14998821,"TERMINAL",0,0,"Step 597, loss: 0.8768750429153442, step time: 27.943134307861328ms\r\n",,terminal_output +22888,14998875,"TERMINAL",0,0,"Step 598, loss: 0.8780931830406189, step time: 23.97441864013672ms\r\n",,terminal_output +22889,14998970,"TERMINAL",0,0,"Step 599, loss: 0.8754285573959351, step time: 22.12834358215332ms\r\n",,terminal_output +22890,14999075,"TERMINAL",0,0,"Step 600, loss: 0.8719177842140198, step time: 26.647329330444336ms\r\n",,terminal_output +22891,14999185,"TERMINAL",0,0,"Step 601, loss: 0.8828992247581482, step time: 25.353431701660156ms\r\n",,terminal_output +22892,14999239,"TERMINAL",0,0,"Step 602, loss: 0.878451406955719, step time: 28.848648071289062ms\r\n",,terminal_output +22893,14999346,"TERMINAL",0,0,"Step 603, loss: 0.8759955763816833, step time: 29.183626174926758ms\r\n",,terminal_output +22894,14999387,"genie.py",2757,0,"",python,selection_mouse +22895,14999457,"TERMINAL",0,0,"Step 604, loss: 0.866679847240448, step time: 27.994155883789062ms\r\n",,terminal_output +22896,14999470,"genie.py",2751,8,"mask_rng",python,selection_mouse +22897,14999529,"TERMINAL",0,0,"Step 605, loss: 0.866732656955719, step time: 24.88541603088379ms\r\n",,terminal_output +22898,14999597,"TERMINAL",0,0,"Step 606, loss: 0.8760367035865784, step time: 27.593374252319336ms\r\n",,terminal_output +22899,14999713,"TERMINAL",0,0,"Step 607, loss: 0.8583146333694458, step time: 23.586034774780273ms\r\n",,terminal_output +22900,14999768,"TERMINAL",0,0,"Step 608, loss: 0.8550072908401489, step time: 23.908138275146484ms\r\n",,terminal_output +22901,14999858,"TERMINAL",0,0,"Step 609, loss: 0.8618142008781433, step time: 25.15721321105957ms\r\n",,terminal_output +22902,14999966,"TERMINAL",0,0,"Step 610, loss: 0.8494434356689453, step time: 23.314476013183594ms\r\n",,terminal_output +22903,15000164,"genie.py",2813,0,"",python,selection_mouse +22904,15000165,"TERMINAL",0,0,"Step 611, loss: 0.842527449131012, step time: 24.33943748474121ms\r\n",,terminal_output +22905,15000165,"TERMINAL",0,0,"Step 612, loss: 0.8454859256744385, step time: 23.53811264038086ms\r\n",,terminal_output +22906,15000213,"TERMINAL",0,0,"Step 613, loss: 0.8369922041893005, step time: 22.480487823486328ms\r\n",,terminal_output +22907,15000321,"TERMINAL",0,0,"Step 614, loss: 0.8376908898353577, step time: 22.243261337280273ms\r\n",,terminal_output +22908,15000389,"TERMINAL",0,0,"Step 615, loss: 0.825103223323822, step time: 21.489620208740234ms\r\n",,terminal_output +22909,15000495,"TERMINAL",0,0,"Step 616, loss: 0.8318465352058411, step time: 23.238182067871094ms\r\n",,terminal_output +22910,15000601,"TERMINAL",0,0,"Step 617, loss: 0.8285577893257141, step time: 21.943330764770508ms\r\n",,terminal_output +22911,15000653,"TERMINAL",0,0,"Step 618, loss: 0.8243942856788635, step time: 22.432804107666016ms\r\n",,terminal_output +22912,15000759,"TERMINAL",0,0,"Step 619, loss: 0.8345832228660583, step time: 20.891189575195312ms\r\n",,terminal_output +22913,15000828,"TERMINAL",0,0,"Step 620, loss: 0.8574151992797852, step time: 21.251916885375977ms\r\n",,terminal_output +22914,15000879,"genie.py",2793,0,"",python,selection_mouse +22915,15000931,"TERMINAL",0,0,"Step 621, loss: 0.8562306761741638, step time: 22.883176803588867ms\r\n",,terminal_output +22916,15001038,"TERMINAL",0,0,"Step 622, loss: 0.8212878108024597, step time: 22.657155990600586ms\r\n",,terminal_output +22917,15001101,"TERMINAL",0,0,"Step 623, loss: 0.8364267945289612, step time: 23.03004264831543ms\r\n",,terminal_output +22918,15001213,"TERMINAL",0,0,"Step 624, loss: 0.836966335773468, step time: 21.87371253967285ms\r\n",,terminal_output +22919,15001263,"TERMINAL",0,0,"Step 625, loss: 0.8082644939422607, step time: 22.080659866333008ms\r\n",,terminal_output +22920,15001370,"TERMINAL",0,0,"Step 626, loss: 0.8306080102920532, step time: 22.291898727416992ms\r\n",,terminal_output +22921,15001435,"TERMINAL",0,0,"Step 627, loss: 0.8055179119110107, step time: 22.274017333984375ms\r\n",,terminal_output +22922,15001542,"TERMINAL",0,0,"Step 628, loss: 0.8133110404014587, step time: 21.654129028320312ms\r\n",,terminal_output +22923,15001650,"TERMINAL",0,0,"Step 629, loss: 0.8020552396774292, step time: 21.915435791015625ms\r\n",,terminal_output +22924,15001715,"genie.py",2815,0,"",python,selection_mouse +22925,15001797,"TERMINAL",0,0,"Step 630, loss: 0.8007915616035461, step time: 22.438526153564453ms\r\nStep 631, loss: 0.7971873879432678, step time: 21.659374237060547ms\r\n",,terminal_output +22926,15001902,"TERMINAL",0,0,"Step 632, loss: 0.7930225133895874, step time: 21.665573120117188ms\r\n",,terminal_output +22927,15001958,"TERMINAL",0,0,"Step 633, loss: 0.7912254333496094, step time: 21.851062774658203ms\r\n",,terminal_output +22928,15002079,"TERMINAL",0,0,"Step 634, loss: 0.7821639776229858, step time: 28.485536575317383ms\r\n",,terminal_output +22929,15002143,"TERMINAL",0,0,"Step 635, loss: 0.789650022983551, step time: 28.905391693115234ms\r\n",,terminal_output +22930,15002249,"TERMINAL",0,0,"Step 636, loss: 0.7752587795257568, step time: 23.504972457885742ms\r\n",,terminal_output +22931,15002355,"TERMINAL",0,0,"Step 637, loss: 0.7901594042778015, step time: 21.162033081054688ms\r\n",,terminal_output +22932,15002423,"TERMINAL",0,0,"Step 638, loss: 0.7910169363021851, step time: 24.99222755432129ms\r\n",,terminal_output +22933,15002498,"TERMINAL",0,0,"Step 639, loss: 0.7918881773948669, step time: 24.103403091430664ms\r\n",,terminal_output +22934,15002625,"TERMINAL",0,0,"Step 640, loss: 0.7874628901481628, step time: 28.385639190673828ms\r\n",,terminal_output +22935,15002718,"TERMINAL",0,0,"Step 641, loss: 0.7799544930458069, step time: 29.191970825195312ms\r\n",,terminal_output +22936,15002781,"TERMINAL",0,0,"Step 642, loss: 0.7807058691978455, step time: 28.05948257446289ms\r\n",,terminal_output +22937,15002857,"TERMINAL",0,0,"Step 643, loss: 0.771154522895813, step time: 25.983572006225586ms\r\n",,terminal_output +22938,15002965,"TERMINAL",0,0,"Step 644, loss: 0.7695800065994263, step time: 29.392719268798828ms\r\n",,terminal_output +22939,15003071,"TERMINAL",0,0,"Step 645, loss: 0.770960807800293, step time: 26.53670310974121ms\r\n",,terminal_output +22940,15003141,"TERMINAL",0,0,"Step 646, loss: 0.7554818987846375, step time: 25.267362594604492ms\r\n",,terminal_output +22941,15003250,"TERMINAL",0,0,"Step 647, loss: 0.7644054293632507, step time: 27.435779571533203ms\r\n",,terminal_output +22942,15003320,"TERMINAL",0,0,"Step 648, loss: 0.7619789838790894, step time: 25.909423828125ms\r\n",,terminal_output +22943,15003429,"TERMINAL",0,0,"Step 649, loss: 0.7512655854225159, step time: 25.763988494873047ms\r\n",,terminal_output +22944,15003493,"TERMINAL",0,0,"Step 650, loss: 0.7471102476119995, step time: 24.5969295501709ms\r\n",,terminal_output +22945,15003612,"TERMINAL",0,0,"Step 651, loss: 0.7496411800384521, step time: 23.790597915649414ms\r\n",,terminal_output +22946,15003664,"TERMINAL",0,0,"Step 652, loss: 0.7462794780731201, step time: 23.7123966217041ms\r\n",,terminal_output +22947,15003772,"TERMINAL",0,0,"Step 653, loss: 0.7367504835128784, step time: 24.911165237426758ms\r\n",,terminal_output +22948,15003854,"TERMINAL",0,0,"Step 654, loss: 0.7423818111419678, step time: 23.87094497680664ms\r\n",,terminal_output +22949,15003936,"TERMINAL",0,0,"Step 655, loss: 0.7423661351203918, step time: 24.631977081298828ms\r\n",,terminal_output +22950,15004053,"TERMINAL",0,0,"Step 656, loss: 0.7469186782836914, step time: 25.725126266479492ms\r\n",,terminal_output +22951,15004111,"TERMINAL",0,0,"Step 657, loss: 0.7423498630523682, step time: 22.52340316772461ms\r\n",,terminal_output +22952,15004218,"TERMINAL",0,0,"Step 658, loss: 0.7507853507995605, step time: 21.608591079711914ms\r\n",,terminal_output +22953,15004285,"TERMINAL",0,0,"Step 659, loss: 0.7405874133110046, step time: 21.081924438476562ms\r\n",,terminal_output +22954,15004394,"TERMINAL",0,0,"Step 660, loss: 0.7288626432418823, step time: 21.4383602142334ms\r\n",,terminal_output +22955,15004457,"TERMINAL",0,0,"Step 661, loss: 0.7319261431694031, step time: 21.494626998901367ms\r\n",,terminal_output +22956,15004599,"TERMINAL",0,0,"Step 662, loss: 0.7346347570419312, step time: 21.452665328979492ms\r\n",,terminal_output +22957,15004628,"TERMINAL",0,0,"Step 663, loss: 0.7274311184883118, step time: 22.320270538330078ms\r\n",,terminal_output +22958,15004735,"TERMINAL",0,0,"Step 664, loss: 0.7114967107772827, step time: 21.082162857055664ms\r\n",,terminal_output +22959,15004843,"TERMINAL",0,0,"Step 665, loss: 0.7269500494003296, step time: 22.463083267211914ms\r\n",,terminal_output +22960,15004895,"TERMINAL",0,0,"Step 666, loss: 0.7086783647537231, step time: 22.257328033447266ms\r\n",,terminal_output +22961,15005001,"TERMINAL",0,0,"Step 667, loss: 0.7095675468444824, step time: 22.26996421813965ms\r\n",,terminal_output +22962,15005063,"TERMINAL",0,0,"Step 668, loss: 0.7081115245819092, step time: 21.619796752929688ms\r\n",,terminal_output +22963,15005171,"TERMINAL",0,0,"Step 669, loss: 0.7028464078903198, step time: 21.184444427490234ms\r\n",,terminal_output +22964,15005278,"TERMINAL",0,0,"Step 670, loss: 0.7040877342224121, step time: 23.550987243652344ms\r\n",,terminal_output +22965,15005341,"TERMINAL",0,0,"Step 671, loss: 0.7036203742027283, step time: 23.386716842651367ms\r\n",,terminal_output +22966,15005456,"TERMINAL",0,0,"Step 672, loss: 0.7190189361572266, step time: 29.27398681640625ms\r\n",,terminal_output +22967,15005516,"TERMINAL",0,0,"Step 673, loss: 0.7203040719032288, step time: 27.76026725769043ms\r\n",,terminal_output +22968,15005632,"TERMINAL",0,0,"Step 674, loss: 0.7003445029258728, step time: 24.30438995361328ms\r\n",,terminal_output +22969,15005695,"TERMINAL",0,0,"Step 675, loss: 0.6894780993461609, step time: 22.499561309814453ms\r\n",,terminal_output +22970,15005813,"TERMINAL",0,0,"Step 676, loss: 0.7060141563415527, step time: 20.57337760925293ms\r\n",,terminal_output +22971,15005865,"TERMINAL",0,0,"Step 677, loss: 0.6847976446151733, step time: 26.058673858642578ms\r\n",,terminal_output +22972,15005975,"TERMINAL",0,0,"Step 678, loss: 0.6867152452468872, step time: 28.879642486572266ms\r\n",,terminal_output +22973,15006083,"TERMINAL",0,0,"Step 679, loss: 0.6852007508277893, step time: 29.892683029174805ms\r\n",,terminal_output +22974,15006137,"TERMINAL",0,0,"Step 680, loss: 0.6797508001327515, step time: 27.730703353881836ms\r\n",,terminal_output +22975,15006233,"TERMINAL",0,0,"Step 681, loss: 0.6746798753738403, step time: 24.435758590698242ms\r\n",,terminal_output +22976,15006338,"TERMINAL",0,0,"Step 682, loss: 0.6817401647567749, step time: 27.584314346313477ms\r\n",,terminal_output +22977,15006446,"TERMINAL",0,0,"Step 683, loss: 0.6834551692008972, step time: 25.091171264648438ms\r\n",,terminal_output +22978,15006497,"TERMINAL",0,0,"Step 684, loss: 0.6946853399276733, step time: 26.908159255981445ms\r\n",,terminal_output +22979,15006604,"TERMINAL",0,0,"Step 685, loss: 0.6970546841621399, step time: 24.548053741455078ms\r\n",,terminal_output +22980,15006713,"TERMINAL",0,0,"Step 686, loss: 0.6735013723373413, step time: 24.443864822387695ms\r\n",,terminal_output +22981,15006765,"TERMINAL",0,0,"Step 687, loss: 0.6645267605781555, step time: 25.385379791259766ms\r\n",,terminal_output +22982,15006875,"TERMINAL",0,0,"Step 688, loss: 0.6799657940864563, step time: 24.164199829101562ms\r\n",,terminal_output +22983,15006972,"TERMINAL",0,0,"Step 689, loss: 0.6594918966293335, step time: 23.691654205322266ms\r\n",,terminal_output +22984,15007036,"TERMINAL",0,0,"Step 690, loss: 0.6585860252380371, step time: 24.358034133911133ms\r\n",,terminal_output +22985,15007132,"TERMINAL",0,0,"Step 691, loss: 0.6577052474021912, step time: 23.48041534423828ms\r\n",,terminal_output +22986,15007418,"genie.py",1811,0,"",python,selection_mouse +22987,15007469,"TERMINAL",0,0,"Step 692, loss: 0.6588120460510254, step time: 335.3393077850342ms\r\n",,terminal_output +22988,15007585,"TERMINAL",0,0,"Step 693, loss: 0.6451415419578552, step time: 25.495290756225586ms\r\n",,terminal_output +22989,15007646,"models/dynamics.py",0,0,"",python,tab +22990,15007665,"TERMINAL",0,0,"Step 694, loss: 0.6465836763381958, step time: 22.417783737182617ms\r\n",,terminal_output +22991,15007728,"TERMINAL",0,0,"Step 695, loss: 0.6529825329780579, step time: 22.469520568847656ms\r\n",,terminal_output +22992,15007832,"TERMINAL",0,0,"Step 696, loss: 0.6367067694664001, step time: 22.864341735839844ms\r\n",,terminal_output +22993,15007913,"TERMINAL",0,0,"Step 697, loss: 0.6442373394966125, step time: 21.805524826049805ms\r\n",,terminal_output +22994,15007978,"TERMINAL",0,0,"Step 698, loss: 0.64857417345047, step time: 21.963119506835938ms\r\n",,terminal_output +22995,15008067,"TERMINAL",0,0,"Step 699, loss: 0.6619282364845276, step time: 21.29340171813965ms\r\n",,terminal_output +22996,15008174,"TERMINAL",0,0,"Step 700, loss: 0.6493209004402161, step time: 23.055553436279297ms\r\n",,terminal_output +22997,15008252,"TERMINAL",0,0,"Step 701, loss: 0.6399906277656555, step time: 21.822690963745117ms\r\n",,terminal_output +22998,15008332,"TERMINAL",0,0,"Step 702, loss: 0.6380950212478638, step time: 21.184444427490234ms\r\n",,terminal_output +22999,15008437,"TERMINAL",0,0,"Step 703, loss: 0.6364947557449341, step time: 20.81894874572754ms\r\n",,terminal_output +23000,15008501,"TERMINAL",0,0,"Step 704, loss: 0.6232731938362122, step time: 22.19867706298828ms\r\n",,terminal_output +23001,15008624,"TERMINAL",0,0,"Step 705, loss: 0.6256018280982971, step time: 21.167278289794922ms\r\n",,terminal_output +23002,15008688,"TERMINAL",0,0,"Step 706, loss: 0.6284440159797668, step time: 34.760236740112305ms\r\n",,terminal_output +23003,15008797,"TERMINAL",0,0,"Step 707, loss: 0.6162070035934448, step time: 30.071258544921875ms\r\n",,terminal_output +23004,15008916,"TERMINAL",0,0,"Step 708, loss: 0.6193194389343262, step time: 28.45907211303711ms\r\n",,terminal_output +23005,15008979,"TERMINAL",0,0,"Step 709, loss: 0.6187372207641602, step time: 23.78678321838379ms\r\n",,terminal_output +23006,15009059,"TERMINAL",0,0,"Step 710, loss: 0.620647132396698, step time: 24.320602416992188ms\r\n",,terminal_output +23007,15009168,"TERMINAL",0,0,"Step 711, loss: 0.6149144172668457, step time: 25.32792091369629ms\r\n",,terminal_output +23008,15009229,"TERMINAL",0,0,"Step 712, loss: 0.6280742883682251, step time: 23.81277084350586ms\r\n",,terminal_output +23009,15009335,"TERMINAL",0,0,"Step 713, loss: 0.6167269945144653, step time: 29.233694076538086ms\r\n",,terminal_output +23010,15009444,"TERMINAL",0,0,"Step 714, loss: 0.6116113662719727, step time: 28.98240089416504ms\r\n",,terminal_output +23011,15009498,"TERMINAL",0,0,"Step 715, loss: 0.6012856364250183, step time: 28.116226196289062ms\r\n",,terminal_output +23012,15009577,"models/dynamics.py",612,0,"",python,selection_mouse +23013,15009600,"TERMINAL",0,0,"Step 716, loss: 0.6051496863365173, step time: 26.18241310119629ms\r\n",,terminal_output +23014,15009722,"TERMINAL",0,0,"Step 717, loss: 0.6039435267448425, step time: 28.570175170898438ms\r\n",,terminal_output +23015,15009769,"TERMINAL",0,0,"Step 718, loss: 0.5955711603164673, step time: 27.502059936523438ms\r\n",,terminal_output +23016,15009874,"TERMINAL",0,0,"Step 719, loss: 0.5874272584915161, step time: 25.905132293701172ms\r\n",,terminal_output +23017,15009984,"TERMINAL",0,0,"Step 720, loss: 0.5959934592247009, step time: 27.76646614074707ms\r\n",,terminal_output +23018,15010039,"TERMINAL",0,0,"Step 721, loss: 0.5878584980964661, step time: 24.349451065063477ms\r\n",,terminal_output +23019,15010148,"TERMINAL",0,0,"Step 722, loss: 0.5840296745300293, step time: 26.411771774291992ms\r\n",,terminal_output +23020,15010179,"models/dynamics.py",651,0,"",python,selection_mouse +23021,15010217,"TERMINAL",0,0,"Step 723, loss: 0.5811771750450134, step time: 23.786067962646484ms\r\n",,terminal_output +23022,15010326,"models/dynamics.py",648,10,"mask_token",python,selection_mouse +23023,15010341,"TERMINAL",0,0,"Step 724, loss: 0.5818542242050171, step time: 23.990392684936523ms\r\n",,terminal_output +23024,15010405,"TERMINAL",0,0,"Step 725, loss: 0.5873191356658936, step time: 24.325132369995117ms\r\n",,terminal_output +23025,15010517,"TERMINAL",0,0,"Step 726, loss: 0.6144107580184937, step time: 23.987531661987305ms\r\n",,terminal_output +23026,15010588,"TERMINAL",0,0,"Step 727, loss: 0.6097939014434814, step time: 24.158477783203125ms\r\n",,terminal_output +23027,15010695,"TERMINAL",0,0,"Step 728, loss: 0.6079618334770203, step time: 24.87945556640625ms\r\n",,terminal_output +23028,15010756,"TERMINAL",0,0,"Step 729, loss: 0.6083589196205139, step time: 23.389816284179688ms\r\n",,terminal_output +23029,15010835,"TERMINAL",0,0,"Step 730, loss: 0.6030698418617249, step time: 23.087501525878906ms\r\n",,terminal_output +23030,15010942,"TERMINAL",0,0,"Step 731, loss: 0.5848042368888855, step time: 22.19843864440918ms\r\n",,terminal_output +23031,15011049,"TERMINAL",0,0,"Step 732, loss: 0.5877878665924072, step time: 21.708965301513672ms\r\n",,terminal_output +23032,15011101,"TERMINAL",0,0,"Step 733, loss: 0.5800719857215881, step time: 21.537303924560547ms\r\n",,terminal_output +23033,15011207,"TERMINAL",0,0,"Step 734, loss: 0.5766139030456543, step time: 22.54343032836914ms\r\n",,terminal_output +23034,15011271,"TERMINAL",0,0,"Step 735, loss: 0.5796915888786316, step time: 20.44081687927246ms\r\n",,terminal_output +23035,15011384,"TERMINAL",0,0,"Step 736, loss: 0.5647517442703247, step time: 22.01247215270996ms\r\n",,terminal_output +23036,15011447,"TERMINAL",0,0,"Step 737, loss: 0.5726761817932129, step time: 20.978927612304688ms\r\n",,terminal_output +23037,15011502,"models/dynamics.py",743,0,"",python,selection_mouse +23038,15011562,"TERMINAL",0,0,"Step 738, loss: 0.5601532459259033, step time: 21.263599395751953ms\r\n",,terminal_output +23039,15011642,"models/dynamics.py",742,2,"),",python,selection_mouse +23040,15011706,"TERMINAL",0,0,"Step 739, loss: 0.5627942085266113, step time: 20.45750617980957ms\r\nStep 740, loss: 0.5594629645347595, step time: 22.304058074951172ms\r\n",,terminal_output +23041,15011807,"models/dynamics.py",742,2,"),",python,selection_mouse +23042,15011814,"models/dynamics.py",733,11,"model_dim),",python,selection_mouse +23043,15011815,"TERMINAL",0,0,"Step 741, loss: 0.5567857027053833, step time: 21.471261978149414ms\r\n",,terminal_output +23044,15011857,"models/dynamics.py",732,12,".model_dim),",python,selection_mouse +23045,15011877,"models/dynamics.py",728,16,"self.model_dim),",python,selection_mouse +23046,15011913,"models/dynamics.py",727,17," self.model_dim),",python,selection_mouse +23047,15011914,"models/dynamics.py",676,68,"initializers.lecun_uniform(),\n (1, 1, 1, self.model_dim),",python,selection_mouse +23048,15011915,"TERMINAL",0,0,"Step 742, loss: 0.5550785064697266, step time: 21.089792251586914ms\r\n",,terminal_output +23049,15011942,"models/dynamics.py",675,69,".initializers.lecun_uniform(),\n (1, 1, 1, self.model_dim),",python,selection_mouse +23050,15011958,"models/dynamics.py",648,96,"mask_token"",\n nn.initializers.lecun_uniform(),\n (1, 1, 1, self.model_dim),",python,selection_mouse +23051,15011971,"TERMINAL",0,0,"Step 743, loss: 0.5543044209480286, step time: 21.503925323486328ms\r\n",,terminal_output +23052,15011986,"models/dynamics.py",647,97,"""mask_token"",\n nn.initializers.lecun_uniform(),\n (1, 1, 1, self.model_dim),",python,selection_mouse +23053,15012004,"models/dynamics.py",646,98," ""mask_token"",\n nn.initializers.lecun_uniform(),\n (1, 1, 1, self.model_dim),",python,selection_mouse +23054,15012027,"models/dynamics.py",645,99," ""mask_token"",\n nn.initializers.lecun_uniform(),\n (1, 1, 1, self.model_dim),",python,selection_mouse +23055,15012092,"TERMINAL",0,0,"Step 744, loss: 0.5510455965995789, step time: 21.689653396606445ms\r\n",,terminal_output +23056,15012144,"TERMINAL",0,0,"Step 745, loss: 0.5547314286231995, step time: 28.504610061645508ms\r\n",,terminal_output +23057,15012157,"models/dynamics.py",605,139,"self.mask_token = self.param(\n ""mask_token"",\n nn.initializers.lecun_uniform(),\n (1, 1, 1, self.model_dim),",python,selection_mouse +23058,15012249,"TERMINAL",0,0,"Step 746, loss: 0.5464689135551453, step time: 28.576135635375977ms\r\n",,terminal_output +23059,15012364,"TERMINAL",0,0,"Step 747, loss: 0.5529837608337402, step time: 23.309707641601562ms\r\n",,terminal_output +23060,15012415,"TERMINAL",0,0,"Step 748, loss: 0.5413787961006165, step time: 21.02184295654297ms\r\n",,terminal_output +23061,15012522,"TERMINAL",0,0,"Step 749, loss: 0.5377904772758484, step time: 26.746034622192383ms\r\n",,terminal_output +23062,15012630,"TERMINAL",0,0,"Step 750, loss: 0.530968189239502, step time: 24.001121520996094ms\r\n",,terminal_output +23063,15012645,"models/dynamics.py",605,0,"",python,selection_mouse +23064,15012645,"models/dynamics.py",605,4,"self",python,selection_mouse +23065,15012684,"TERMINAL",0,0,"Step 751, loss: 0.5343640446662903, step time: 28.313159942626953ms\r\n",,terminal_output +23066,15012796,"TERMINAL",0,0,"Step 752, loss: 0.5328477621078491, step time: 29.021024703979492ms\r\n",,terminal_output +23067,15012810,"models/dynamics.py",605,38,"self.mask_token = self.param(\n ",python,selection_mouse +23068,15012836,"models/dynamics.py",605,65,"self.mask_token = self.param(\n ""mask_token"",\n ",python,selection_mouse +23069,15012849,"models/dynamics.py",605,66,"self.mask_token = self.param(\n ""mask_token"",\n ",python,selection_mouse +23070,15012860,"models/dynamics.py",605,112,"self.mask_token = self.param(\n ""mask_token"",\n nn.initializers.lecun_uniform(),\n ",python,selection_mouse +23071,15012872,"TERMINAL",0,0,"Step 753, loss: 0.534213662147522, step time: 27.718067169189453ms\r\n",,terminal_output +23072,15012883,"models/dynamics.py",605,113,"self.mask_token = self.param(\n ""mask_token"",\n nn.initializers.lecun_uniform(),\n ",python,selection_mouse +23073,15012894,"models/dynamics.py",605,149,"self.mask_token = self.param(\n ""mask_token"",\n nn.initializers.lecun_uniform(),\n (1, 1, 1, self.model_dim),\n )",python,selection_mouse +23074,15012974,"models/dynamics.py",605,172,"self.mask_token = self.param(\n ""mask_token"",\n nn.initializers.lecun_uniform(),\n (1, 1, 1, self.model_dim),\n )\n self.action_up",python,selection_mouse +23075,15012976,"TERMINAL",0,0,"Step 754, loss: 0.5384488701820374, step time: 24.64127540588379ms\r\n",,terminal_output +23076,15013055,"TERMINAL",0,0,"Step 755, loss: 0.5378986597061157, step time: 28.786420822143555ms\r\n",,terminal_output +23077,15013166,"TERMINAL",0,0,"Step 756, loss: 0.5333051085472107, step time: 23.97322654724121ms\r\n",,terminal_output +23078,15013219,"TERMINAL",0,0,"Step 757, loss: 0.5496705174446106, step time: 22.237777709960938ms\r\n",,terminal_output +23079,15013327,"TERMINAL",0,0,"Step 758, loss: 0.5701149106025696, step time: 26.150226593017578ms\r\n",,terminal_output +23080,15013432,"TERMINAL",0,0,"Step 759, loss: 0.5307528376579285, step time: 22.52340316772461ms\r\n",,terminal_output +23081,15013497,"TERMINAL",0,0,"Step 760, loss: 0.5331769585609436, step time: 23.654460906982422ms\r\n",,terminal_output +23082,15013608,"TERMINAL",0,0,"Step 761, loss: 0.5383210182189941, step time: 22.705793380737305ms\r\n",,terminal_output +23083,15013660,"TERMINAL",0,0,"Step 762, loss: 0.5146844983100891, step time: 21.806001663208008ms\r\n",,terminal_output +23084,15013732,"models/dynamics.py",754,0,"",python,selection_mouse +23085,15013747,"models/dynamics.py",753,0,"",python,selection_command +23086,15013834,"TERMINAL",0,0,"Step 763, loss: 0.5335012674331665, step time: 21.205425262451172ms\r\nStep 764, loss: 0.5085920095443726, step time: 21.62003517150879ms\r\n",,terminal_output +23087,15013947,"TERMINAL",0,0,"Step 765, loss: 0.523179829120636, step time: 23.207902908325195ms\r\n",,terminal_output +23088,15014020,"TERMINAL",0,0,"Step 766, loss: 0.5133762359619141, step time: 25.699853897094727ms\r\n",,terminal_output +23089,15014128,"TERMINAL",0,0,"Step 767, loss: 0.5047106146812439, step time: 23.67687225341797ms\r\n",,terminal_output +23090,15014189,"TERMINAL",0,0,"Step 768, loss: 0.5138747096061707, step time: 23.120641708374023ms\r\n",,terminal_output +23091,15014300,"TERMINAL",0,0,"Step 769, loss: 0.49688196182250977, step time: 21.596908569335938ms\r\n",,terminal_output +23092,15014366,"TERMINAL",0,0,"Step 770, loss: 0.5074995756149292, step time: 22.844552993774414ms\r\n",,terminal_output +23093,15014439,"models/dynamics.py",754,0,"",python,selection_mouse +23094,15014456,"models/dynamics.py",753,0,"",python,selection_command +23095,15014543,"TERMINAL",0,0,"Step 771, loss: 0.4957399368286133, step time: 21.09980583190918ms\r\nStep 772, loss: 0.4966166019439697, step time: 21.817922592163086ms\r\n",,terminal_output +23096,15014609,"models/dynamics.py",753,1,")",python,selection_mouse +23097,15014624,"models/dynamics.py",754,0,"",python,selection_command +23098,15014668,"models/dynamics.py",712,42," (1, 1, 1, self.model_dim),\n )",python,selection_mouse +23099,15014682,"models/dynamics.py",666,88," nn.initializers.lecun_uniform(),\n (1, 1, 1, self.model_dim),\n )",python,selection_mouse +23100,15014697,"models/dynamics.py",663,91," nn.initializers.lecun_uniform(),\n (1, 1, 1, self.model_dim),\n )",python,selection_mouse +23101,15014711,"models/dynamics.py",636,118," ""mask_token"",\n nn.initializers.lecun_uniform(),\n (1, 1, 1, self.model_dim),\n )",python,selection_mouse +23102,15014725,"models/dynamics.py",635,119," ""mask_token"",\n nn.initializers.lecun_uniform(),\n (1, 1, 1, self.model_dim),\n )",python,selection_mouse +23103,15014726,"TERMINAL",0,0,"Step 773, loss: 0.49467310309410095, step time: 22.23825454711914ms\r\nStep 774, loss: 0.49212950468063354, step time: 21.08621597290039ms\r\n",,terminal_output +23104,15014820,"TERMINAL",0,0,"Step 775, loss: 0.5000503659248352, step time: 22.565364837646484ms\r\n",,terminal_output +23105,15014928,"TERMINAL",0,0,"Step 776, loss: 0.5223215818405151, step time: 22.333145141601562ms\r\n",,terminal_output +23106,15014991,"TERMINAL",0,0,"Step 777, loss: 0.5563997030258179, step time: 21.253347396850586ms\r\n",,terminal_output +23107,15015028,"models/dynamics.py",597,157," self.mask_token = self.param(\n ""mask_token"",\n nn.initializers.lecun_uniform(),\n (1, 1, 1, self.model_dim),\n )",python,selection_mouse +23108,15015092,"TERMINAL",0,0,"Step 778, loss: 0.5117717385292053, step time: 22.38011360168457ms\r\n",,terminal_output +23109,15015156,"TERMINAL",0,0,"Step 779, loss: 0.4998152256011963, step time: 22.513866424560547ms\r\n",,terminal_output +23110,15015261,"TERMINAL",0,0,"Step 780, loss: 0.5188514590263367, step time: 21.78335189819336ms\r\n",,terminal_output +23111,15015323,"TERMINAL",0,0,"Step 781, loss: 0.4856399595737457, step time: 20.95818519592285ms\r\n",,terminal_output +23112,15015428,"TERMINAL",0,0,"Step 782, loss: 0.5060142278671265, step time: 23.44036102294922ms\r\n",,terminal_output +23113,15015535,"TERMINAL",0,0,"Step 783, loss: 0.4890379011631012, step time: 24.790048599243164ms\r\n",,terminal_output +23114,15015596,"TERMINAL",0,0,"Step 784, loss: 0.4869235157966614, step time: 29.401540756225586ms\r\n",,terminal_output +23115,15015684,"models/dynamics.py",598,156," self.mask_token = self.param(\n ""mask_token"",\n nn.initializers.lecun_uniform(),\n (1, 1, 1, self.model_dim),\n )",python,selection_mouse +23116,15015719,"TERMINAL",0,0,"Step 785, loss: 0.4854657053947449, step time: 27.497053146362305ms\r\n",,terminal_output +23117,15015770,"TERMINAL",0,0,"Step 786, loss: 0.48068252205848694, step time: 23.50020408630371ms\r\n",,terminal_output +23118,15015892,"TERMINAL",0,0,"Step 787, loss: 0.476696640253067, step time: 24.816274642944336ms\r\n",,terminal_output +23119,15015953,"TERMINAL",0,0,"Step 788, loss: 0.4782849848270416, step time: 24.190664291381836ms\r\n",,terminal_output +23120,15016037,"TERMINAL",0,0,"Step 789, loss: 0.4678478240966797, step time: 27.445077896118164ms\r\n",,terminal_output +23121,15016146,"TERMINAL",0,0,"Step 790, loss: 0.4707943797111511, step time: 29.2661190032959ms\r\n",,terminal_output +23122,15016254,"TERMINAL",0,0,"Step 791, loss: 0.4606219232082367, step time: 28.708934783935547ms\r\n",,terminal_output +23123,15016278,"models/dynamics.py",598,0,"",python,selection_mouse +23124,15016348,"TERMINAL",0,0,"Step 792, loss: 0.4641176462173462, step time: 25.498390197753906ms\r\n",,terminal_output +23125,15016363,"models/dynamics.py",597,8," ",python,selection_mouse +23126,15016438,"TERMINAL",0,0,"Step 793, loss: 0.4567486345767975, step time: 24.24454689025879ms\r\n",,terminal_output +23127,15016490,"TERMINAL",0,0,"Step 794, loss: 0.45540541410446167, step time: 27.340173721313477ms\r\n",,terminal_output +23128,15016507,"models/dynamics.py",597,38," self.mask_token = self.param(\n",python,selection_mouse +23129,15016586,"TERMINAL",0,0,"Step 795, loss: 0.45484551787376404, step time: 23.56886863708496ms\r\n",,terminal_output +23130,15016619,"models/dynamics.py",597,64," self.mask_token = self.param(\n ""mask_token"",\n",python,selection_mouse +23131,15016665,"models/dynamics.py",597,109," self.mask_token = self.param(\n ""mask_token"",\n nn.initializers.lecun_uniform(),\n",python,selection_mouse +23132,15016684,"TERMINAL",0,0,"Step 796, loss: 0.44923532009124756, step time: 27.31013298034668ms\r\n",,terminal_output +23133,15016741,"models/dynamics.py",597,148," self.mask_token = self.param(\n ""mask_token"",\n nn.initializers.lecun_uniform(),\n (1, 1, 1, self.model_dim),\n",python,selection_mouse +23134,15016772,"TERMINAL",0,0,"Step 797, loss: 0.4515014588832855, step time: 22.983551025390625ms\r\n",,terminal_output +23135,15016849,"TERMINAL",0,0,"Step 798, loss: 0.4554116427898407, step time: 22.160053253173828ms\r\n",,terminal_output +23136,15016957,"models/dynamics.py",597,158," self.mask_token = self.param(\n ""mask_token"",\n nn.initializers.lecun_uniform(),\n (1, 1, 1, self.model_dim),\n )\n",python,selection_mouse +23137,15016973,"TERMINAL",0,0,"Step 799, loss: 0.4731813669204712, step time: 22.7358341217041ms\r\n",,terminal_output +23138,15017036,"TERMINAL",0,0,"Step 800, loss: 0.49392110109329224, step time: 23.242473602294922ms\r\n",,terminal_output +23139,15017149,"TERMINAL",0,0,"Step 801, loss: 0.46636292338371277, step time: 22.30978012084961ms\r\n",,terminal_output +23140,15017211,"TERMINAL",0,0,"Step 802, loss: 0.4582217037677765, step time: 24.363279342651367ms\r\n",,terminal_output +23141,15017320,"TERMINAL",0,0,"Step 803, loss: 0.46491000056266785, step time: 24.518489837646484ms\r\n",,terminal_output +23142,15017377,"TERMINAL",0,0,"Step 804, loss: 0.4521845579147339, step time: 23.15664291381836ms\r\n",,terminal_output +23143,15017397,"models/dynamics.py",754,0,"",python,selection_mouse +23144,15017411,"models/dynamics.py",753,0,"",python,selection_command +23145,15017490,"TERMINAL",0,0,"Step 805, loss: 0.44744443893432617, step time: 22.05061912536621ms\r\n",,terminal_output +23146,15017555,"TERMINAL",0,0,"Step 806, loss: 0.44984081387519836, step time: 24.333715438842773ms\r\n",,terminal_output +23147,15017664,"TERMINAL",0,0,"Step 807, loss: 0.4369793236255646, step time: 23.21624755859375ms\r\n",,terminal_output +23148,15017728,"TERMINAL",0,0,"Step 808, loss: 0.44623759388923645, step time: 21.598100662231445ms\r\n",,terminal_output +23149,15017749,"models/dynamics.py",754,0,"",python,selection_mouse +23150,15017756,"models/dynamics.py",753,0,"",python,selection_command +23151,15017838,"TERMINAL",0,0,"Step 809, loss: 0.43114978075027466, step time: 22.364139556884766ms\r\n",,terminal_output +23152,15017958,"models/dynamics.py",753,1,")",python,selection_mouse +23153,15017972,"models/dynamics.py",754,0,"",python,selection_command +23154,15017972,"TERMINAL",0,0,"Step 810, loss: 0.4348137676715851, step time: 22.500276565551758ms\r\n",,terminal_output +23155,15018026,"models/dynamics.py",717,37," (1, 1, 1, self.model_dim),\n )",python,selection_mouse +23156,15018039,"models/dynamics.py",669,85," nn.initializers.lecun_uniform(),\n (1, 1, 1, self.model_dim),\n )",python,selection_mouse +23157,15018040,"TERMINAL",0,0,"Step 811, loss: 0.4330011010169983, step time: 21.954774856567383ms\r\n",,terminal_output +23158,15018057,"models/dynamics.py",667,87," nn.initializers.lecun_uniform(),\n (1, 1, 1, self.model_dim),\n )",python,selection_mouse +23159,15018074,"models/dynamics.py",640,114," ""mask_token"",\n nn.initializers.lecun_uniform(),\n (1, 1, 1, self.model_dim),\n )",python,selection_mouse +23160,15018089,"models/dynamics.py",639,115," ""mask_token"",\n nn.initializers.lecun_uniform(),\n (1, 1, 1, self.model_dim),\n )",python,selection_mouse +23161,15018090,"TERMINAL",0,0,"Step 812, loss: 0.4235150218009949, step time: 22.51577377319336ms\r\n",,terminal_output +23162,15018104,"models/dynamics.py",601,153," self.mask_token = self.param(\n ""mask_token"",\n nn.initializers.lecun_uniform(),\n (1, 1, 1, self.model_dim),\n )",python,selection_mouse +23163,15018116,"models/dynamics.py",600,154," self.mask_token = self.param(\n ""mask_token"",\n nn.initializers.lecun_uniform(),\n (1, 1, 1, self.model_dim),\n )",python,selection_mouse +23164,15018151,"models/dynamics.py",599,155," self.mask_token = self.param(\n ""mask_token"",\n nn.initializers.lecun_uniform(),\n (1, 1, 1, self.model_dim),\n )",python,selection_mouse +23165,15018198,"TERMINAL",0,0,"Step 813, loss: 0.4387536346912384, step time: 20.50471305847168ms\r\n",,terminal_output +23166,15018250,"TERMINAL",0,0,"Step 814, loss: 0.4378261864185333, step time: 20.713090896606445ms\r\n",,terminal_output +23167,15018362,"TERMINAL",0,0,"Step 815, loss: 0.46283742785453796, step time: 25.651216506958008ms\r\n",,terminal_output +23168,15018468,"TERMINAL",0,0,"Step 816, loss: 0.46047908067703247, step time: 22.815465927124023ms\r\n",,terminal_output +23169,15018508,"models/dynamics.py",598,156," self.mask_token = self.param(\n ""mask_token"",\n nn.initializers.lecun_uniform(),\n (1, 1, 1, self.model_dim),\n )",python,selection_mouse +23170,15018527,"TERMINAL",0,0,"Step 817, loss: 0.4334715008735657, step time: 21.279573440551758ms\r\n",,terminal_output +23171,15018632,"TERMINAL",0,0,"Step 818, loss: 0.46592652797698975, step time: 21.694660186767578ms\r\n",,terminal_output +23172,15018687,"TERMINAL",0,0,"Step 819, loss: 0.4365686774253845, step time: 20.549774169921875ms\r\n",,terminal_output +23173,15018796,"TERMINAL",0,0,"Step 820, loss: 0.42592698335647583, step time: 21.287202835083008ms\r\n",,terminal_output +23174,15018876,"models/dynamics.py",597,157," self.mask_token = self.param(\n ""mask_token"",\n nn.initializers.lecun_uniform(),\n (1, 1, 1, self.model_dim),\n )",python,selection_mouse +23175,15018940,"TERMINAL",0,0,"Step 821, loss: 0.4407346844673157, step time: 21.574020385742188ms\r\n",,terminal_output +23176,15019208,"TERMINAL",0,0,"Step 822, loss: 0.41166046261787415, step time: 341.17794036865234ms\r\n",,terminal_output +23177,15019315,"TERMINAL",0,0,"Step 823, loss: 0.43382638692855835, step time: 24.875402450561523ms\r\n",,terminal_output +23178,15019422,"TERMINAL",0,0,"Step 824, loss: 0.40794795751571655, step time: 22.76015281677246ms\r\n",,terminal_output +23179,15019483,"TERMINAL",0,0,"Step 825, loss: 0.4222277104854584, step time: 28.16319465637207ms\r\n",,terminal_output +23180,15019588,"TERMINAL",0,0,"Step 826, loss: 0.4111477732658386, step time: 28.942346572875977ms\r\n",,terminal_output +23181,15019700,"TERMINAL",0,0,"Step 827, loss: 0.41025030612945557, step time: 28.093338012695312ms\r\n",,terminal_output +23182,15019753,"TERMINAL",0,0,"Step 828, loss: 0.4087286591529846, step time: 25.497913360595703ms\r\n",,terminal_output +23183,15019860,"TERMINAL",0,0,"Step 829, loss: 0.40188509225845337, step time: 28.49555015563965ms\r\n",,terminal_output +23184,15019968,"TERMINAL",0,0,"Step 830, loss: 0.4018351435661316, step time: 25.16460418701172ms\r\n",,terminal_output +23185,15020032,"TERMINAL",0,0,"Step 831, loss: 0.3987157642841339, step time: 24.292707443237305ms\r\n",,terminal_output +23186,15020139,"TERMINAL",0,0,"Step 832, loss: 0.39631661772727966, step time: 27.951717376708984ms\r\n",,terminal_output +23187,15020201,"TERMINAL",0,0,"Step 833, loss: 0.39425021409988403, step time: 23.20408821105957ms\r\n",,terminal_output +23188,15020308,"TERMINAL",0,0,"Step 834, loss: 0.3912818729877472, step time: 24.872779846191406ms\r\n",,terminal_output +23189,15020415,"TERMINAL",0,0,"Step 835, loss: 0.388258159160614, step time: 22.44114875793457ms\r\n",,terminal_output +23190,15020466,"TERMINAL",0,0,"Step 836, loss: 0.39395156502723694, step time: 23.360729217529297ms\r\n",,terminal_output +23191,15020571,"TERMINAL",0,0,"Step 837, loss: 0.4108976125717163, step time: 24.789810180664062ms\r\n",,terminal_output +23192,15020676,"TERMINAL",0,0,"Step 838, loss: 0.4810144603252411, step time: 23.843050003051758ms\r\n",,terminal_output +23193,15020739,"TERMINAL",0,0,"Step 839, loss: 0.48753440380096436, step time: 23.71525764465332ms\r\n",,terminal_output +23194,15020844,"TERMINAL",0,0,"Step 840, loss: 0.40224209427833557, step time: 24.266481399536133ms\r\n",,terminal_output +23195,15020953,"TERMINAL",0,0,"Step 841, loss: 0.46373888850212097, step time: 24.05381202697754ms\r\n",,terminal_output +23196,15021007,"TERMINAL",0,0,"Step 842, loss: 0.3932746946811676, step time: 22.76897430419922ms\r\n",,terminal_output +23197,15021102,"TERMINAL",0,0,"Step 843, loss: 0.44273674488067627, step time: 21.761178970336914ms\r\n",,terminal_output +23198,15021211,"TERMINAL",0,0,"Step 844, loss: 0.38730594515800476, step time: 21.360158920288086ms\r\n",,terminal_output +23199,15021243,"models/dynamics.py",754,0,"",python,selection_mouse +23200,15021257,"models/dynamics.py",753,0,"",python,selection_command +23201,15021303,"TERMINAL",0,0,"Step 845, loss: 0.4375985860824585, step time: 24.527788162231445ms\r\n",,terminal_output +23202,15021364,"TERMINAL",0,0,"Step 846, loss: 0.37876471877098083, step time: 22.758960723876953ms\r\n",,terminal_output +23203,15021432,"models/dynamics.py",754,0,"",python,selection_mouse +23204,15021449,"models/dynamics.py",753,0,"",python,selection_command +23205,15021479,"TERMINAL",0,0,"Step 847, loss: 0.43070146441459656, step time: 21.15774154663086ms\r\n",,terminal_output +23206,15021532,"TERMINAL",0,0,"Step 848, loss: 0.37960895895957947, step time: 21.28314971923828ms\r\n",,terminal_output +23207,15021660,"models/dynamics.py",753,1,")",python,selection_mouse +23208,15021660,"models/dynamics.py",754,0,"",python,selection_command +23209,15021662,"models/dynamics.py",717,37," (1, 1, 1, self.model_dim),\n )",python,selection_mouse +23210,15021662,"models/dynamics.py",715,39," (1, 1, 1, self.model_dim),\n )",python,selection_mouse +23211,15021667,"models/dynamics.py",668,86," nn.initializers.lecun_uniform(),\n (1, 1, 1, self.model_dim),\n )",python,selection_mouse +23212,15021683,"models/dynamics.py",641,113," ""mask_token"",\n nn.initializers.lecun_uniform(),\n (1, 1, 1, self.model_dim),\n )",python,selection_mouse +23213,15021700,"models/dynamics.py",640,114," ""mask_token"",\n nn.initializers.lecun_uniform(),\n (1, 1, 1, self.model_dim),\n )",python,selection_mouse +23214,15021742,"models/dynamics.py",639,115," ""mask_token"",\n nn.initializers.lecun_uniform(),\n (1, 1, 1, self.model_dim),\n )",python,selection_mouse +23215,15021742,"TERMINAL",0,0,"Step 849, loss: 0.4082721173763275, step time: 22.085905075073242ms\r\nStep 850, loss: 0.38196998834609985, step time: 22.891521453857422ms\r\n",,terminal_output +23216,15021823,"TERMINAL",0,0,"Step 851, loss: 0.39298030734062195, step time: 21.915435791015625ms\r\n",,terminal_output +23217,15021879,"TERMINAL",0,0,"Step 852, loss: 0.37697720527648926, step time: 22.312641143798828ms\r\n",,terminal_output +23218,15021937,"models/dynamics.py",638,116," ""mask_token"",\n nn.initializers.lecun_uniform(),\n (1, 1, 1, self.model_dim),\n )",python,selection_mouse +23219,15022002,"models/dynamics.py",599,155," self.mask_token = self.param(\n ""mask_token"",\n nn.initializers.lecun_uniform(),\n (1, 1, 1, self.model_dim),\n )",python,selection_mouse +23220,15022015,"models/dynamics.py",598,156," self.mask_token = self.param(\n ""mask_token"",\n nn.initializers.lecun_uniform(),\n (1, 1, 1, self.model_dim),\n )",python,selection_mouse +23221,15022016,"models/dynamics.py",597,157," self.mask_token = self.param(\n ""mask_token"",\n nn.initializers.lecun_uniform(),\n (1, 1, 1, self.model_dim),\n )",python,selection_mouse +23222,15022016,"TERMINAL",0,0,"Step 853, loss: 0.3868758976459503, step time: 21.199941635131836ms\r\n",,terminal_output +23223,15022090,"TERMINAL",0,0,"Step 854, loss: 0.36946597695350647, step time: 22.071123123168945ms\r\n",,terminal_output +23224,15022143,"TERMINAL",0,0,"Step 855, loss: 0.3775663375854492, step time: 21.840572357177734ms\r\n",,terminal_output +23225,15022241,"TERMINAL",0,0,"Step 856, loss: 0.3686091899871826, step time: 22.633790969848633ms\r\n",,terminal_output +23226,15022348,"TERMINAL",0,0,"Step 857, loss: 0.36653849482536316, step time: 28.92756462097168ms\r\n",,terminal_output +23227,15022410,"TERMINAL",0,0,"Step 858, loss: 0.368185430765152, step time: 28.413057327270508ms\r\n",,terminal_output +23228,15022520,"TERMINAL",0,0,"Step 859, loss: 0.36366453766822815, step time: 23.36740493774414ms\r\n",,terminal_output +23229,15022633,"TERMINAL",0,0,"Step 860, loss: 0.3656424582004547, step time: 23.13065528869629ms\r\n",,terminal_output +23230,15022685,"TERMINAL",0,0,"Step 861, loss: 0.3644041121006012, step time: 23.125410079956055ms\r\n",,terminal_output +23231,15022804,"TERMINAL",0,0,"Step 862, loss: 0.3664092421531677, step time: 25.586366653442383ms\r\n",,terminal_output +23232,15022855,"TERMINAL",0,0,"Step 863, loss: 0.3638022541999817, step time: 28.315305709838867ms\r\n",,terminal_output +23233,15022963,"TERMINAL",0,0,"Step 864, loss: 0.3579707443714142, step time: 28.9003849029541ms\r\n",,terminal_output +23234,15023074,"TERMINAL",0,0,"Step 865, loss: 0.3524489402770996, step time: 28.94735336303711ms\r\n",,terminal_output +23235,15023138,"TERMINAL",0,0,"Step 866, loss: 0.35131072998046875, step time: 26.831865310668945ms\r\n",,terminal_output +23236,15023252,"models/dynamics.py",721,0,"",python,selection_mouse +23237,15023258,"TERMINAL",0,0,"Step 867, loss: 0.35265645384788513, step time: 23.865222930908203ms\r\n",,terminal_output +23238,15023315,"TERMINAL",0,0,"Step 868, loss: 0.3509920835494995, step time: 27.52375602722168ms\r\n",,terminal_output +23239,15023432,"TERMINAL",0,0,"Step 869, loss: 0.34709593653678894, step time: 23.162126541137695ms\r\n",,terminal_output +23240,15023486,"TERMINAL",0,0,"Step 870, loss: 0.3453502655029297, step time: 25.777578353881836ms\r\n",,terminal_output +23241,15023617,"TERMINAL",0,0,"Step 871, loss: 0.34763845801353455, step time: 32.62734413146973ms\r\n",,terminal_output +23242,15023670,"TERMINAL",0,0,"Step 872, loss: 0.3418891131877899, step time: 21.628618240356445ms\r\n",,terminal_output +23243,15023792,"TERMINAL",0,0,"Step 873, loss: 0.3369655907154083, step time: 19.58918571472168ms\r\n",,terminal_output +23244,15023859,"TERMINAL",0,0,"Step 874, loss: 0.3430526852607727, step time: 19.208908081054688ms\r\n",,terminal_output +23245,15023934,"TERMINAL",0,0,"Step 875, loss: 0.35393354296684265, step time: 18.92542839050293ms\r\n",,terminal_output +23246,15024031,"TERMINAL",0,0,"Step 876, loss: 0.3587615489959717, step time: 21.546363830566406ms\r\n",,terminal_output +23247,15024121,"TERMINAL",0,0,"Step 877, loss: 0.3491324484348297, step time: 19.26875114440918ms\r\n",,terminal_output +23248,15024228,"TERMINAL",0,0,"Step 878, loss: 0.36603641510009766, step time: 18.79286766052246ms\r\n",,terminal_output +23249,15024290,"TERMINAL",0,0,"Step 879, loss: 0.3869035840034485, step time: 18.953323364257812ms\r\n",,terminal_output +23250,15024384,"TERMINAL",0,0,"Step 880, loss: 0.3517562747001648, step time: 18.557310104370117ms\r\n",,terminal_output +23251,15024449,"TERMINAL",0,0,"Step 881, loss: 0.36183786392211914, step time: 18.25571060180664ms\r\n",,terminal_output +23252,15024561,"TERMINAL",0,0,"Step 882, loss: 0.3487088978290558, step time: 18.812894821166992ms\r\n",,terminal_output +23253,15024620,"TERMINAL",0,0,"Step 883, loss: 0.3520820736885071, step time: 18.5091495513916ms\r\n",,terminal_output +23254,15024731,"TERMINAL",0,0,"Step 884, loss: 0.34415340423583984, step time: 18.326997756958008ms\r\n",,terminal_output +23255,15024796,"TERMINAL",0,0,"Step 885, loss: 0.3384961783885956, step time: 18.42641830444336ms\r\n",,terminal_output +23256,15024904,"TERMINAL",0,0,"Step 886, loss: 0.3375733196735382, step time: 18.344402313232422ms\r\n",,terminal_output +23257,15024966,"TERMINAL",0,0,"Step 887, loss: 0.3339550495147705, step time: 19.382476806640625ms\r\n",,terminal_output +23258,15025074,"TERMINAL",0,0,"Step 888, loss: 0.3280318081378937, step time: 18.94664764404297ms\r\n",,terminal_output +23259,15025152,"TERMINAL",0,0,"Step 889, loss: 0.3298007845878601, step time: 18.169641494750977ms\r\n",,terminal_output +23260,15025262,"TERMINAL",0,0,"Step 890, loss: 0.32309919595718384, step time: 18.3413028717041ms\r\n",,terminal_output +23261,15025323,"TERMINAL",0,0,"Step 891, loss: 0.324337363243103, step time: 18.464088439941406ms\r\n",,terminal_output +23262,15025434,"TERMINAL",0,0,"Step 892, loss: 0.3181018829345703, step time: 18.40996742248535ms\r\n",,terminal_output +23263,15025487,"TERMINAL",0,0,"Step 893, loss: 0.3201826214790344, step time: 18.16082000732422ms\r\n",,terminal_output +23264,15025596,"TERMINAL",0,0,"Step 894, loss: 0.31205475330352783, step time: 18.515586853027344ms\r\n",,terminal_output +23265,15025659,"TERMINAL",0,0,"Step 895, loss: 0.31945058703422546, step time: 18.17035675048828ms\r\n",,terminal_output +23266,15025767,"TERMINAL",0,0,"Step 896, loss: 0.308800607919693, step time: 17.982959747314453ms\r\n",,terminal_output +23267,15025824,"TERMINAL",0,0,"Step 897, loss: 0.3217436671257019, step time: 18.379688262939453ms\r\n",,terminal_output +23268,15025939,"TERMINAL",0,0,"Step 898, loss: 0.3236544728279114, step time: 18.347740173339844ms\r\n",,terminal_output +23269,15026002,"TERMINAL",0,0,"Step 899, loss: 0.347838819026947, step time: 18.41259002685547ms\r\n",,terminal_output +23270,15026098,"TERMINAL",0,0,"Step 900, loss: 0.3516770899295807, step time: 18.632173538208008ms\r\n",,terminal_output +23271,15026208,"TERMINAL",0,0,"Step 901, loss: 0.31301116943359375, step time: 19.238710403442383ms\r\n",,terminal_output +23272,15026260,"TERMINAL",0,0,"Step 902, loss: 0.3277120888233185, step time: 26.298046112060547ms\r\n",,terminal_output +23273,15026370,"TERMINAL",0,0,"Step 903, loss: 0.3285072147846222, step time: 28.788089752197266ms\r\n",,terminal_output +23274,15026481,"TERMINAL",0,0,"Step 904, loss: 0.30137526988983154, step time: 28.403520584106445ms\r\n",,terminal_output +23275,15026535,"TERMINAL",0,0,"Step 905, loss: 0.3242488503456116, step time: 27.9998779296875ms\r\n",,terminal_output +23276,15026643,"TERMINAL",0,0,"Step 906, loss: 0.30059996247291565, step time: 29.122352600097656ms\r\n",,terminal_output +23277,15026756,"TERMINAL",0,0,"Step 907, loss: 0.30558234453201294, step time: 28.194904327392578ms\r\n",,terminal_output +23278,15026810,"TERMINAL",0,0,"Step 908, loss: 0.3107186257839203, step time: 28.003215789794922ms\r\n",,terminal_output +23279,15026918,"TERMINAL",0,0,"Step 909, loss: 0.2907089293003082, step time: 27.821779251098633ms\r\n",,terminal_output +23280,15027026,"TERMINAL",0,0,"Step 910, loss: 0.30774810910224915, step time: 26.029348373413086ms\r\n",,terminal_output +23281,15027078,"TERMINAL",0,0,"Step 911, loss: 0.29184433817863464, step time: 30.754566192626953ms\r\n",,terminal_output +23282,15027185,"TERMINAL",0,0,"Step 912, loss: 0.29511508345603943, step time: 25.38299560546875ms\r\n",,terminal_output +23283,15027293,"TERMINAL",0,0,"Step 913, loss: 0.29448461532592773, step time: 20.198583602905273ms\r\n",,terminal_output +23284,15027354,"TERMINAL",0,0,"Step 914, loss: 0.2936200797557831, step time: 19.530773162841797ms\r\n",,terminal_output +23285,15027421,"models/dynamics.py",1091,0,"",python,selection_mouse +23286,15027446,"TERMINAL",0,0,"Step 915, loss: 0.29349997639656067, step time: 20.313501358032227ms\r\n",,terminal_output +23287,15027557,"TERMINAL",0,0,"Step 916, loss: 0.3030538260936737, step time: 20.816564559936523ms\r\n",,terminal_output +23288,15027610,"TERMINAL",0,0,"Step 917, loss: 0.29313281178474426, step time: 19.39105987548828ms\r\n",,terminal_output +23289,15027718,"TERMINAL",0,0,"Step 918, loss: 0.2909516394138336, step time: 19.339561462402344ms\r\n",,terminal_output +23290,15027780,"TERMINAL",0,0,"Step 919, loss: 0.2885519862174988, step time: 18.69368553161621ms\r\n",,terminal_output +23291,15027861,"TERMINAL",0,0,"Step 920, loss: 0.2881346642971039, step time: 18.448591232299805ms\r\n",,terminal_output +23292,15027975,"TERMINAL",0,0,"Step 921, loss: 0.29451242089271545, step time: 18.480539321899414ms\r\n",,terminal_output +23293,15028039,"TERMINAL",0,0,"Step 922, loss: 0.28980156779289246, step time: 18.468856811523438ms\r\n",,terminal_output +23294,15028094,"models/dynamics.py",1288,0,"",python,selection_mouse +23295,15028157,"TERMINAL",0,0,"Step 923, loss: 0.28874120116233826, step time: 18.183231353759766ms\r\n",,terminal_output +23296,15028251,"TERMINAL",0,0,"Step 924, loss: 0.27878376841545105, step time: 18.701553344726562ms\r\n",,terminal_output +23297,15028355,"TERMINAL",0,0,"Step 925, loss: 0.2821590006351471, step time: 18.129587173461914ms\r\n",,terminal_output +23298,15028413,"TERMINAL",0,0,"Step 926, loss: 0.2768360674381256, step time: 18.088579177856445ms\r\n",,terminal_output +23299,15028474,"TERMINAL",0,0,"Step 927, loss: 0.2732255458831787, step time: 18.407106399536133ms\r\n",,terminal_output +23300,15028590,"TERMINAL",0,0,"Step 928, loss: 0.2718335688114166, step time: 18.523454666137695ms\r\n",,terminal_output +23301,15028646,"TERMINAL",0,0,"Step 929, loss: 0.2718852758407593, step time: 18.106460571289062ms\r\n",,terminal_output +23302,15028751,"TERMINAL",0,0,"Step 930, loss: 0.2679460644721985, step time: 18.530845642089844ms\r\n",,terminal_output +23303,15028813,"TERMINAL",0,0,"Step 931, loss: 0.26702791452407837, step time: 18.013715744018555ms\r\n",,terminal_output +23304,15028922,"TERMINAL",0,0,"Step 932, loss: 0.2690158486366272, step time: 17.993927001953125ms\r\n",,terminal_output +23305,15028976,"TERMINAL",0,0,"Step 933, loss: 0.2705545425415039, step time: 18.316268920898438ms\r\n",,terminal_output +23306,15029071,"TERMINAL",0,0,"Step 934, loss: 0.2793739438056946, step time: 18.259525299072266ms\r\n",,terminal_output +23307,15029182,"TERMINAL",0,0,"Step 935, loss: 0.3110383152961731, step time: 18.06354522705078ms\r\n",,terminal_output +23308,15029245,"TERMINAL",0,0,"Step 936, loss: 0.3418107032775879, step time: 18.49961280822754ms\r\n",,terminal_output +23309,15029351,"TERMINAL",0,0,"Step 937, loss: 0.26394397020339966, step time: 18.02992820739746ms\r\n",,terminal_output +23310,15029412,"TERMINAL",0,0,"Step 938, loss: 0.32167235016822815, step time: 18.253087997436523ms\r\n",,terminal_output +23311,15029518,"TERMINAL",0,0,"Step 939, loss: 0.2911670207977295, step time: 18.58210563659668ms\r\n",,terminal_output +23312,15029580,"TERMINAL",0,0,"Step 940, loss: 0.2979240417480469, step time: 18.254518508911133ms\r\n",,terminal_output +23313,15029688,"TERMINAL",0,0,"Step 941, loss: 0.2732418477535248, step time: 18.088579177856445ms\r\n",,terminal_output +23314,15029751,"TERMINAL",0,0,"Step 942, loss: 0.30313944816589355, step time: 18.504619598388672ms\r\n",,terminal_output +23315,15029843,"TERMINAL",0,0,"Step 943, loss: 0.2586275637149811, step time: 18.16391944885254ms\r\n",,terminal_output +23316,15029949,"TERMINAL",0,0,"Step 944, loss: 0.3117862343788147, step time: 17.957210540771484ms\r\n",,terminal_output +23317,15030010,"TERMINAL",0,0,"Step 945, loss: 0.2592993676662445, step time: 18.51630210876465ms\r\n",,terminal_output +23318,15030118,"TERMINAL",0,0,"Step 946, loss: 0.299162358045578, step time: 18.365859985351562ms\r\n",,terminal_output +23319,15030179,"TERMINAL",0,0,"Step 947, loss: 0.26297634840011597, step time: 18.686771392822266ms\r\n",,terminal_output +23320,15030285,"TERMINAL",0,0,"Step 948, loss: 0.2759903371334076, step time: 18.602371215820312ms\r\n",,terminal_output +23321,15030349,"TERMINAL",0,0,"Step 949, loss: 0.2693082392215729, step time: 18.064022064208984ms\r\n",,terminal_output +23322,15030469,"TERMINAL",0,0,"Step 950, loss: 0.25585851073265076, step time: 18.018722534179688ms\r\n",,terminal_output +23323,15030529,"TERMINAL",0,0,"Step 951, loss: 0.2702764570713043, step time: 18.42188835144043ms\r\n",,terminal_output +23324,15030634,"TERMINAL",0,0,"Step 952, loss: 0.2506958544254303, step time: 18.20683479309082ms\r\n",,terminal_output +23325,15030694,"TERMINAL",0,0,"Step 953, loss: 0.26206105947494507, step time: 18.326997756958008ms\r\n",,terminal_output +23326,15030810,"TERMINAL",0,0,"Step 954, loss: 0.25438252091407776, step time: 18.37754249572754ms\r\n",,terminal_output +23327,15030861,"TERMINAL",0,0,"Step 955, loss: 0.2469746321439743, step time: 18.57757568359375ms\r\n",,terminal_output +23328,15030974,"TERMINAL",0,0,"Step 956, loss: 0.25304096937179565, step time: 18.691301345825195ms\r\n",,terminal_output +23329,15031034,"TERMINAL",0,0,"Step 957, loss: 0.2408786565065384, step time: 18.829345703125ms\r\n",,terminal_output +23330,15031142,"TERMINAL",0,0,"Step 958, loss: 0.24622581899166107, step time: 18.50581169128418ms\r\n",,terminal_output +23331,15031204,"TERMINAL",0,0,"Step 959, loss: 0.24324984848499298, step time: 18.147945404052734ms\r\n",,terminal_output +23332,15031335,"TERMINAL",0,0,"Step 960, loss: 0.23966269195079803, step time: 18.68414878845215ms\r\n",,terminal_output +23333,15031428,"TERMINAL",0,0,"Step 961, loss: 0.2425442487001419, step time: 18.044471740722656ms\r\n",,terminal_output +23334,15031479,"TERMINAL",0,0,"Step 962, loss: 0.2380303591489792, step time: 18.167972564697266ms\r\n",,terminal_output +23335,15031584,"TERMINAL",0,0,"Step 963, loss: 0.23578381538391113, step time: 18.286466598510742ms\r\n",,terminal_output +23336,15031646,"TERMINAL",0,0,"Step 964, loss: 0.23817546665668488, step time: 18.219947814941406ms\r\n",,terminal_output +23337,15031740,"TERMINAL",0,0,"Step 965, loss: 0.23628893494606018, step time: 18.06950569152832ms\r\n",,terminal_output +23338,15031898,"TERMINAL",0,0,"Step 966, loss: 0.25475311279296875, step time: 18.37635040283203ms\r\nStep 967, loss: 0.2955417335033417, step time: 18.176794052124023ms\r\n",,terminal_output +23339,15031988,"TERMINAL",0,0,"Step 968, loss: 0.3217455744743347, step time: 17.960309982299805ms\r\n",,terminal_output +23340,15032100,"TERMINAL",0,0,"Step 969, loss: 0.2522556483745575, step time: 18.527984619140625ms\r\n",,terminal_output +23341,15032164,"TERMINAL",0,0,"Step 970, loss: 0.3146572411060333, step time: 18.140316009521484ms\r\n",,terminal_output +23342,15032273,"TERMINAL",0,0,"Step 971, loss: 0.2447325587272644, step time: 17.953157424926758ms\r\n",,terminal_output +23343,15032333,"TERMINAL",0,0,"Step 972, loss: 0.31292417645454407, step time: 18.499374389648438ms\r\n",,terminal_output +23344,15032431,"TERMINAL",0,0,"Step 973, loss: 0.24035125970840454, step time: 18.23568344116211ms\r\n",,terminal_output +23345,15032539,"TERMINAL",0,0,"Step 974, loss: 0.33462366461753845, step time: 18.027067184448242ms\r\n",,terminal_output +23346,15032595,"TERMINAL",0,0,"Step 975, loss: 0.23405632376670837, step time: 18.25237274169922ms\r\n",,terminal_output +23347,15032702,"TERMINAL",0,0,"Step 976, loss: 0.34930169582366943, step time: 18.275022506713867ms\r\n",,terminal_output +23348,15032796,"TERMINAL",0,0,"Step 977, loss: 0.23074403405189514, step time: 17.91977882385254ms\r\n",,terminal_output +23349,15032857,"TERMINAL",0,0,"Step 978, loss: 0.3177618980407715, step time: 18.419981002807617ms\r\n",,terminal_output +23350,15032965,"TERMINAL",0,0,"Step 979, loss: 0.24053117632865906, step time: 18.15485954284668ms\r\n",,terminal_output +23351,15033028,"TERMINAL",0,0,"Step 980, loss: 0.2690935730934143, step time: 17.908096313476562ms\r\n",,terminal_output +23352,15033135,"TERMINAL",0,0,"Step 981, loss: 0.25839465856552124, step time: 18.418312072753906ms\r\n",,terminal_output +23353,15033188,"TERMINAL",0,0,"Step 982, loss: 0.2372337430715561, step time: 18.152713775634766ms\r\n",,terminal_output +23354,15033316,"TERMINAL",0,0,"Step 983, loss: 0.265945702791214, step time: 17.990589141845703ms\r\n",,terminal_output +23355,15033373,"TERMINAL",0,0,"Step 984, loss: 0.22540411353111267, step time: 18.84770393371582ms\r\n",,terminal_output +23356,15033468,"TERMINAL",0,0,"Step 985, loss: 0.24608375132083893, step time: 17.979145050048828ms\r\n",,terminal_output +23357,15033532,"TERMINAL",0,0,"Step 986, loss: 0.23825016617774963, step time: 17.875194549560547ms\r\n",,terminal_output +23358,15033637,"TERMINAL",0,0,"Step 987, loss: 0.22094249725341797, step time: 18.671751022338867ms\r\n",,terminal_output +23359,15033748,"TERMINAL",0,0,"Step 988, loss: 0.23901353776454926, step time: 18.203020095825195ms\r\n",,terminal_output +23360,15033810,"TERMINAL",0,0,"Step 989, loss: 0.22289220988750458, step time: 18.10622215270996ms\r\n",,terminal_output +23361,15033919,"TERMINAL",0,0,"Step 990, loss: 0.22249123454093933, step time: 18.485307693481445ms\r\n",,terminal_output +23362,15033982,"TERMINAL",0,0,"Step 991, loss: 0.2212315797805786, step time: 22.432327270507812ms\r\n",,terminal_output +23363,15034089,"TERMINAL",0,0,"Step 992, loss: 0.22242403030395508, step time: 19.160985946655273ms\r\n",,terminal_output +23364,15034141,"TERMINAL",0,0,"Step 993, loss: 0.2122138887643814, step time: 18.6767578125ms\r\n",,terminal_output +23365,15034246,"TERMINAL",0,0,"Step 994, loss: 0.21570329368114471, step time: 18.642187118530273ms\r\n",,terminal_output +23366,15034309,"TERMINAL",0,0,"Step 995, loss: 0.21707390248775482, step time: 18.260478973388672ms\r\n",,terminal_output +23367,15034421,"TERMINAL",0,0,"Step 996, loss: 0.2071509212255478, step time: 18.517494201660156ms\r\n",,terminal_output +23368,15034483,"TERMINAL",0,0,"Step 997, loss: 0.21837463974952698, step time: 18.204212188720703ms\r\n",,terminal_output +23369,15034596,"TERMINAL",0,0,"Step 998, loss: 0.21720361709594727, step time: 17.9903507232666ms\r\n",,terminal_output +23370,15034652,"TERMINAL",0,0,"Step 999, loss: 0.21886658668518066, step time: 18.25714111328125ms\r\n",,terminal_output +23371,15037587,"TERMINAL",0,0,"Step 1000, loss: 0.2198774516582489, step time: 26.162147521972656ms\r\nStep 1001, loss: 0.21473169326782227, step time: 25.479793548583984ms\r\n",,terminal_output +23372,15037682,"TERMINAL",0,0,"Step 1002, loss: 0.21765920519828796, step time: 21.126508712768555ms\r\n",,terminal_output +23373,15037802,"TERMINAL",0,0,"Step 1003, loss: 0.23690497875213623, step time: 20.796775817871094ms\r\n",,terminal_output +23374,15037854,"TERMINAL",0,0,"Step 1004, loss: 0.23226791620254517, step time: 19.577980041503906ms\r\n",,terminal_output +23375,15037962,"TERMINAL",0,0,"Step 1005, loss: 0.20488016307353973, step time: 19.783973693847656ms\r\n",,terminal_output +23376,15038069,"TERMINAL",0,0,"Step 1006, loss: 0.22505588829517365, step time: 19.877910614013672ms\r\n",,terminal_output +23377,15038121,"TERMINAL",0,0,"Step 1007, loss: 0.21117956936359406, step time: 19.437789916992188ms\r\n",,terminal_output +23378,15038215,"TERMINAL",0,0,"Step 1008, loss: 0.20462924242019653, step time: 19.505023956298828ms\r\n",,terminal_output +23379,15038325,"TERMINAL",0,0,"Step 1009, loss: 0.2165394127368927, step time: 19.980430603027344ms\r\n",,terminal_output +23380,15038431,"TERMINAL",0,0,"Step 1010, loss: 0.19598618149757385, step time: 19.140243530273438ms\r\n",,terminal_output +23381,15038484,"TERMINAL",0,0,"Step 1011, loss: 0.205893412232399, step time: 18.254756927490234ms\r\n",,terminal_output +23382,15038586,"TERMINAL",0,0,"Step 1012, loss: 0.20255443453788757, step time: 19.74630355834961ms\r\n",,terminal_output +23383,15038644,"TERMINAL",0,0,"Step 1013, loss: 0.19428294897079468, step time: 18.98932456970215ms\r\n",,terminal_output +23384,15038752,"TERMINAL",0,0,"Step 1014, loss: 0.19896216690540314, step time: 18.987655639648438ms\r\n",,terminal_output +23385,15038829,"TERMINAL",0,0,"Step 1015, loss: 0.19595684111118317, step time: 19.69432830810547ms\r\n",,terminal_output +23386,15038942,"TERMINAL",0,0,"Step 1016, loss: 0.18987330794334412, step time: 19.09804344177246ms\r\n",,terminal_output +23387,15039004,"TERMINAL",0,0,"Step 1017, loss: 0.19376836717128754, step time: 19.701242446899414ms\r\n",,terminal_output +23388,15039112,"TERMINAL",0,0,"Step 1018, loss: 0.1867610663175583, step time: 26.38101577758789ms\r\n",,terminal_output +23389,15039165,"TERMINAL",0,0,"Step 1019, loss: 0.18666020035743713, step time: 21.620512008666992ms\r\n",,terminal_output +23390,15039259,"TERMINAL",0,0,"Step 1020, loss: 0.18583904206752777, step time: 20.971298217773438ms\r\n",,terminal_output +23391,15039365,"TERMINAL",0,0,"Step 1021, loss: 0.1838667094707489, step time: 21.915435791015625ms\r\n",,terminal_output +23392,15039429,"TERMINAL",0,0,"Step 1022, loss: 0.18042252957820892, step time: 22.21083641052246ms\r\n",,terminal_output +23393,15039535,"TERMINAL",0,0,"Step 1023, loss: 0.18663161993026733, step time: 25.431394577026367ms\r\n",,terminal_output +23394,15039643,"TERMINAL",0,0,"Step 1024, loss: 0.18874864280223846, step time: 22.011280059814453ms\r\n",,terminal_output +23395,15039697,"TERMINAL",0,0,"Step 1025, loss: 0.229585200548172, step time: 20.516395568847656ms\r\n",,terminal_output +23396,15039820,"TERMINAL",0,0,"Step 1026, loss: 0.33748236298561096, step time: 20.201683044433594ms\r\n",,terminal_output +23397,15039871,"TERMINAL",0,0,"Step 1027, loss: 0.2560233473777771, step time: 20.493030548095703ms\r\n",,terminal_output +23398,15039978,"TERMINAL",0,0,"Step 1028, loss: 0.26137086749076843, step time: 19.773483276367188ms\r\n",,terminal_output +23399,15040043,"TERMINAL",0,0,"Step 1029, loss: 0.22780726850032806, step time: 19.93250846862793ms\r\n",,terminal_output +23400,15040148,"TERMINAL",0,0,"Step 1030, loss: 0.2742219567298889, step time: 20.46942710876465ms\r\n",,terminal_output +23401,15040255,"TERMINAL",0,0,"Step 1031, loss: 0.19776864349842072, step time: 19.850492477416992ms\r\n",,terminal_output +23402,15040311,"TERMINAL",0,0,"Step 1032, loss: 0.3094199597835541, step time: 19.751310348510742ms\r\n",,terminal_output +23403,15040402,"TERMINAL",0,0,"Step 1033, loss: 0.19072070717811584, step time: 20.1723575592041ms\r\n",,terminal_output +23404,15040510,"TERMINAL",0,0,"Step 1034, loss: 0.24769620597362518, step time: 19.929170608520508ms\r\n",,terminal_output +23405,15040571,"TERMINAL",0,0,"Step 1035, loss: 0.2297726720571518, step time: 19.744157791137695ms\r\n",,terminal_output +23406,15040677,"TERMINAL",0,0,"Step 1036, loss: 0.1890914887189865, step time: 20.30205726623535ms\r\n",,terminal_output +23407,15040742,"TERMINAL",0,0,"Step 1037, loss: 0.23411662876605988, step time: 19.649744033813477ms\r\n",,terminal_output +23408,15040849,"TERMINAL",0,0,"Step 1038, loss: 0.20307880640029907, step time: 19.672393798828125ms\r\n",,terminal_output +23409,15040955,"TERMINAL",0,0,"Step 1039, loss: 0.18441510200500488, step time: 20.388364791870117ms\r\n",,terminal_output +23410,15041021,"TERMINAL",0,0,"Step 1040, loss: 0.22527672350406647, step time: 19.660234451293945ms\r\n",,terminal_output +23411,15041129,"TERMINAL",0,0,"Step 1041, loss: 0.18279807269573212, step time: 19.63496208190918ms\r\n",,terminal_output +23412,15041180,"TERMINAL",0,0,"Step 1042, loss: 0.18497821688652039, step time: 20.059823989868164ms\r\n",,terminal_output +23413,15041290,"TERMINAL",0,0,"Step 1043, loss: 0.20619426667690277, step time: 19.15287971496582ms\r\n",,terminal_output +23414,15041398,"TERMINAL",0,0,"Step 1044, loss: 0.17578911781311035, step time: 19.88816261291504ms\r\n",,terminal_output +23415,15041453,"TERMINAL",0,0,"Step 1045, loss: 0.18402829766273499, step time: 19.578933715820312ms\r\n",,terminal_output +23416,15041561,"TERMINAL",0,0,"Step 1046, loss: 0.19038638472557068, step time: 19.408464431762695ms\r\n",,terminal_output +23417,15041626,"TERMINAL",0,0,"Step 1047, loss: 0.17199619114398956, step time: 19.489049911499023ms\r\n",,terminal_output +23418,15041733,"TERMINAL",0,0,"Step 1048, loss: 0.17910103499889374, step time: 19.626855850219727ms\r\n",,terminal_output +23419,15041800,"TERMINAL",0,0,"Step 1049, loss: 0.1802491545677185, step time: 19.810199737548828ms\r\n",,terminal_output +23420,15041884,"TERMINAL",0,0,"Step 1050, loss: 0.16770252585411072, step time: 19.02008056640625ms\r\n",,terminal_output +23421,15041998,"TERMINAL",0,0,"Step 1051, loss: 0.17555275559425354, step time: 19.33145523071289ms\r\n",,terminal_output +23422,15042057,"TERMINAL",0,0,"Step 1052, loss: 0.17119938135147095, step time: 18.970966339111328ms\r\n",,terminal_output +23423,15042165,"TERMINAL",0,0,"Step 1053, loss: 0.16544680297374725, step time: 18.496036529541016ms\r\n",,terminal_output +23424,15042226,"TERMINAL",0,0,"Step 1054, loss: 0.17019613087177277, step time: 19.562482833862305ms\r\n",,terminal_output +23425,15042339,"TERMINAL",0,0,"Step 1055, loss: 0.1642007827758789, step time: 18.383264541625977ms\r\n",,terminal_output +23426,15042402,"TERMINAL",0,0,"Step 1056, loss: 0.16473649442195892, step time: 18.74828338623047ms\r\n",,terminal_output +23427,15042509,"TERMINAL",0,0,"Step 1057, loss: 0.16618013381958008, step time: 18.74232292175293ms\r\n",,terminal_output +23428,15042616,"TERMINAL",0,0,"Step 1058, loss: 0.177974671125412, step time: 18.95737648010254ms\r\n",,terminal_output +23429,15042676,"TERMINAL",0,0,"Step 1059, loss: 0.22937753796577454, step time: 19.20008659362793ms\r\n",,terminal_output +23430,15042775,"TERMINAL",0,0,"Step 1060, loss: 0.2823147475719452, step time: 18.898725509643555ms\r\n",,terminal_output +23431,15042854,"TERMINAL",0,0,"Step 1061, loss: 0.19920574128627777, step time: 18.980026245117188ms\r\n",,terminal_output +23432,15042962,"TERMINAL",0,0,"Step 1062, loss: 0.25510233640670776, step time: 18.628597259521484ms\r\n",,terminal_output +23433,15043024,"TERMINAL",0,0,"Step 1063, loss: 0.18574021756649017, step time: 19.3634033203125ms\r\n",,terminal_output +23434,15043134,"TERMINAL",0,0,"Step 1064, loss: 0.2887926399707794, step time: 19.30522918701172ms\r\n",,terminal_output +23435,15043187,"TERMINAL",0,0,"Step 1065, loss: 0.17215535044670105, step time: 18.447160720825195ms\r\n",,terminal_output +23436,15043296,"TERMINAL",0,0,"Step 1066, loss: 0.33874741196632385, step time: 19.51122283935547ms\r\n",,terminal_output +23437,15043360,"TERMINAL",0,0,"Step 1067, loss: 0.1639583706855774, step time: 18.5701847076416ms\r\n",,terminal_output +23438,15043469,"TERMINAL",0,0,"Step 1068, loss: 0.28219011425971985, step time: 18.94855499267578ms\r\n",,terminal_output +23439,15043529,"TERMINAL",0,0,"Step 1069, loss: 0.19179154932498932, step time: 19.464492797851562ms\r\n",,terminal_output +23440,15043843,"TERMINAL",0,0,"Step 1070, loss: 0.18162010610103607, step time: 306.6525459289551ms\r\n",,terminal_output +23441,15043955,"TERMINAL",0,0,"Step 1071, loss: 0.24122843146324158, step time: 32.1962833404541ms\r\n",,terminal_output +23442,15044024,"TERMINAL",0,0,"Step 1072, loss: 0.17269366979599, step time: 24.11198616027832ms\r\n",,terminal_output +23443,15044135,"TERMINAL",0,0,"Step 1073, loss: 0.18603913486003876, step time: 21.432161331176758ms\r\n",,terminal_output +23444,15044198,"TERMINAL",0,0,"Step 1074, loss: 0.20477880537509918, step time: 19.306421279907227ms\r\n",,terminal_output +23445,15044307,"TERMINAL",0,0,"Step 1075, loss: 0.1671402007341385, step time: 19.591808319091797ms\r\n",,terminal_output +23446,15044374,"TERMINAL",0,0,"Step 1076, loss: 0.18697036802768707, step time: 19.92630958557129ms\r\n",,terminal_output +23447,15044487,"TERMINAL",0,0,"Step 1077, loss: 0.1806238740682602, step time: 19.13619041442871ms\r\n",,terminal_output +23448,15044546,"TERMINAL",0,0,"Step 1078, loss: 0.16308410465717316, step time: 19.611597061157227ms\r\n",,terminal_output +23449,15044661,"TERMINAL",0,0,"Step 1079, loss: 0.1790979951620102, step time: 19.845247268676758ms\r\n",,terminal_output +23450,15044723,"TERMINAL",0,0,"Step 1080, loss: 0.1678675264120102, step time: 18.625974655151367ms\r\n",,terminal_output +23451,15044833,"TERMINAL",0,0,"Step 1081, loss: 0.15777085721492767, step time: 19.710779190063477ms\r\n",,terminal_output +23452,15044898,"TERMINAL",0,0,"Step 1082, loss: 0.17044979333877563, step time: 18.787860870361328ms\r\n",,terminal_output +23453,15045016,"TERMINAL",0,0,"Step 1083, loss: 0.15927664935588837, step time: 19.44589614868164ms\r\n",,terminal_output +23454,15045069,"TERMINAL",0,0,"Step 1084, loss: 0.1536024659872055, step time: 19.958972930908203ms\r\n",,terminal_output +23455,15045182,"TERMINAL",0,0,"Step 1085, loss: 0.16141730546951294, step time: 19.802570343017578ms\r\n",,terminal_output +23456,15045246,"TERMINAL",0,0,"Step 1086, loss: 0.15137869119644165, step time: 19.423484802246094ms\r\n",,terminal_output +23457,15045354,"TERMINAL",0,0,"Step 1087, loss: 0.15200775861740112, step time: 19.123077392578125ms\r\n",,terminal_output +23458,15045419,"TERMINAL",0,0,"Step 1088, loss: 0.15329022705554962, step time: 19.697189331054688ms\r\n",,terminal_output +23459,15045530,"TERMINAL",0,0,"Step 1089, loss: 0.14595435559749603, step time: 19.374608993530273ms\r\n",,terminal_output +23460,15045597,"TERMINAL",0,0,"Step 1090, loss: 0.1487073004245758, step time: 18.510818481445312ms\r\n",,terminal_output +23461,15045705,"TERMINAL",0,0,"Step 1091, loss: 0.14685069024562836, step time: 19.72794532775879ms\r\n",,terminal_output +23462,15045775,"TERMINAL",0,0,"Step 1092, loss: 0.14306937158107758, step time: 18.653154373168945ms\r\n",,terminal_output +23463,15045855,"TERMINAL",0,0,"Step 1093, loss: 0.1450778692960739, step time: 19.314289093017578ms\r\n",,terminal_output +23464,15045964,"TERMINAL",0,0,"Step 1094, loss: 0.145161435008049, step time: 19.654512405395508ms\r\n",,terminal_output +23465,15046027,"TERMINAL",0,0,"Step 1095, loss: 0.14827625453472137, step time: 18.864870071411133ms\r\n",,terminal_output +23466,15046138,"TERMINAL",0,0,"Step 1096, loss: 0.16833773255348206, step time: 19.35100555419922ms\r\n",,terminal_output +23467,15046206,"TERMINAL",0,0,"Step 1097, loss: 0.218843013048172, step time: 19.661664962768555ms\r\n",,terminal_output +23468,15046318,"TERMINAL",0,0,"Step 1098, loss: 0.2202063947916031, step time: 19.49024200439453ms\r\n",,terminal_output +23469,15046376,"TERMINAL",0,0,"Step 1099, loss: 0.1942296177148819, step time: 18.87345314025879ms\r\n",,terminal_output +23470,15046484,"TERMINAL",0,0,"Step 1100, loss: 0.20906031131744385, step time: 19.87290382385254ms\r\n",,terminal_output +23471,15046593,"TERMINAL",0,0,"Step 1101, loss: 0.1627381294965744, step time: 19.483089447021484ms\r\n",,terminal_output +23472,15046645,"TERMINAL",0,0,"Step 1102, loss: 0.18674805760383606, step time: 19.3479061126709ms\r\n",,terminal_output +23473,15046752,"TERMINAL",0,0,"Step 1103, loss: 0.15887314081192017, step time: 19.743919372558594ms\r\n",,terminal_output +23474,15046841,"TERMINAL",0,0,"Step 1104, loss: 0.1670888215303421, step time: 19.613981246948242ms\r\n",,terminal_output +23475,15046902,"TERMINAL",0,0,"Step 1105, loss: 0.16071245074272156, step time: 19.31142807006836ms\r\n",,terminal_output +23476,15047011,"TERMINAL",0,0,"Step 1106, loss: 0.1506151556968689, step time: 19.451379776000977ms\r\n",,terminal_output +23477,15047074,"TERMINAL",0,0,"Step 1107, loss: 0.15973512828350067, step time: 19.34504508972168ms\r\n",,terminal_output +23478,15047186,"TERMINAL",0,0,"Step 1108, loss: 0.14504429697990417, step time: 19.327163696289062ms\r\n",,terminal_output +23479,15047297,"TERMINAL",0,0,"Step 1109, loss: 0.15286090970039368, step time: 19.02604103088379ms\r\n",,terminal_output +23480,15047359,"TERMINAL",0,0,"Step 1110, loss: 0.1437441110610962, step time: 19.44255828857422ms\r\n",,terminal_output +23481,15047423,"TERMINAL",0,0,"Step 1111, loss: 0.14333854615688324, step time: 19.437551498413086ms\r\n",,terminal_output +23482,15047533,"TERMINAL",0,0,"Step 1112, loss: 0.14260518550872803, step time: 19.683837890625ms\r\n",,terminal_output +23483,15047632,"TERMINAL",0,0,"Step 1113, loss: 0.1369611620903015, step time: 19.394397735595703ms\r\n",,terminal_output +23484,15047682,"TERMINAL",0,0,"Step 1114, loss: 0.14224325120449066, step time: 18.525123596191406ms\r\n",,terminal_output +23485,15047775,"TERMINAL",0,0,"Step 1115, loss: 0.131520614027977, step time: 19.601821899414062ms\r\n",,terminal_output +23486,15047863,"TERMINAL",0,0,"Step 1116, loss: 0.1396941989660263, step time: 19.836902618408203ms\r\n",,terminal_output +23487,15047968,"TERMINAL",0,0,"Step 1117, loss: 0.12936589121818542, step time: 19.365310668945312ms\r\n",,terminal_output +23488,15048034,"TERMINAL",0,0,"Step 1118, loss: 0.1320708841085434, step time: 19.597291946411133ms\r\n",,terminal_output +23489,15048142,"TERMINAL",0,0,"Step 1119, loss: 0.13085417449474335, step time: 19.401073455810547ms\r\n",,terminal_output +23490,15048205,"TERMINAL",0,0,"Step 1120, loss: 0.1271640509366989, step time: 19.685983657836914ms\r\n",,terminal_output +23491,15048312,"TERMINAL",0,0,"Step 1121, loss: 0.12917017936706543, step time: 19.852161407470703ms\r\n",,terminal_output +23492,15048432,"TERMINAL",0,0,"Step 1122, loss: 0.12340952455997467, step time: 19.428253173828125ms\r\n",,terminal_output +23493,15048480,"TERMINAL",0,0,"Step 1123, loss: 0.12689726054668427, step time: 19.480228424072266ms\r\n",,terminal_output +23494,15048574,"TERMINAL",0,0,"Step 1124, loss: 0.12216703593730927, step time: 19.95706558227539ms\r\n",,terminal_output +23495,15048651,"TERMINAL",0,0,"Step 1125, loss: 0.12131670862436295, step time: 19.342899322509766ms\r\n",,terminal_output +23496,15048757,"TERMINAL",0,0,"Step 1126, loss: 0.12313178181648254, step time: 19.411563873291016ms\r\n",,terminal_output +23497,15048842,"TERMINAL",0,0,"Step 1127, loss: 0.119317427277565, step time: 19.229650497436523ms\r\n",,terminal_output +23498,15048932,"TERMINAL",0,0,"Step 1128, loss: 0.1240573525428772, step time: 19.34504508972168ms\r\n",,terminal_output +23499,15049028,"TERMINAL",0,0,"Step 1129, loss: 0.12461007386445999, step time: 19.38629150390625ms\r\n",,terminal_output +23500,15049079,"TERMINAL",0,0,"Step 1130, loss: 0.1390470564365387, step time: 19.654512405395508ms\r\n",,terminal_output +23501,15049186,"TERMINAL",0,0,"Step 1131, loss: 0.17421330511569977, step time: 19.33598518371582ms\r\n",,terminal_output +23502,15049252,"TERMINAL",0,0,"Step 1132, loss: 0.1714552491903305, step time: 19.142627716064453ms\r\n",,terminal_output +23503,15049362,"TERMINAL",0,0,"Step 1133, loss: 0.15140070021152496, step time: 19.159317016601562ms\r\n",,terminal_output +23504,15049424,"TERMINAL",0,0,"Step 1134, loss: 0.18182992935180664, step time: 19.365310668945312ms\r\n",,terminal_output +23505,15049532,"TERMINAL",0,0,"Step 1135, loss: 0.12253770977258682, step time: 19.79994773864746ms\r\n",,terminal_output +23506,15049639,"TERMINAL",0,0,"Step 1136, loss: 0.18211939930915833, step time: 19.741296768188477ms\r\n",,terminal_output +23507,15049703,"TERMINAL",0,0,"Step 1137, loss: 0.12864676117897034, step time: 19.607067108154297ms\r\n",,terminal_output +23508,15049816,"TERMINAL",0,0,"Step 1138, loss: 0.19154198467731476, step time: 19.402265548706055ms\r\n",,terminal_output +23509,15049874,"TERMINAL",0,0,"Step 1139, loss: 0.1255328357219696, step time: 19.60015296936035ms\r\n",,terminal_output +23510,15049984,"TERMINAL",0,0,"Step 1140, loss: 0.20913200080394745, step time: 19.440889358520508ms\r\n",,terminal_output +23511,15050036,"TERMINAL",0,0,"Step 1141, loss: 0.1251741200685501, step time: 19.329071044921875ms\r\n",,terminal_output +23512,15050144,"TERMINAL",0,0,"Step 1142, loss: 0.22697818279266357, step time: 19.91128921508789ms\r\n",,terminal_output +23513,15050206,"TERMINAL",0,0,"Step 1143, loss: 0.1268354058265686, step time: 18.758296966552734ms\r\n",,terminal_output +23514,15050318,"TERMINAL",0,0,"Step 1144, loss: 0.1857704222202301, step time: 19.315004348754883ms\r\n",,terminal_output +23515,15050387,"TERMINAL",0,0,"Step 1145, loss: 0.15746907889842987, step time: 19.814729690551758ms\r\n",,terminal_output +23516,15050494,"TERMINAL",0,0,"Step 1146, loss: 0.13120149075984955, step time: 18.700599670410156ms\r\n",,terminal_output +23517,15050556,"TERMINAL",0,0,"Step 1147, loss: 0.17370527982711792, step time: 19.321680068969727ms\r\n",,terminal_output +23518,15050670,"TERMINAL",0,0,"Step 1148, loss: 0.13233903050422668, step time: 19.87624168395996ms\r\n",,terminal_output +23519,15050738,"TERMINAL",0,0,"Step 1149, loss: 0.13318417966365814, step time: 19.255399703979492ms\r\n",,terminal_output +23520,15050846,"TERMINAL",0,0,"Step 1150, loss: 0.14880980551242828, step time: 18.4783935546875ms\r\n",,terminal_output +23521,15050901,"TERMINAL",0,0,"Step 1151, loss: 0.12732213735580444, step time: 19.853591918945312ms\r\n",,terminal_output +23522,15051009,"TERMINAL",0,0,"Step 1152, loss: 0.12912477552890778, step time: 19.41514015197754ms\r\n",,terminal_output +23523,15051115,"TERMINAL",0,0,"Step 1153, loss: 0.1330934762954712, step time: 19.285202026367188ms\r\n",,terminal_output +23524,15051166,"TERMINAL",0,0,"Step 1154, loss: 0.12217765301465988, step time: 19.62757110595703ms\r\n",,terminal_output +23525,15051271,"TERMINAL",0,0,"Step 1155, loss: 0.12470745295286179, step time: 19.347190856933594ms\r\n",,terminal_output +23526,15051378,"TERMINAL",0,0,"Step 1156, loss: 0.12287801504135132, step time: 19.415855407714844ms\r\n",,terminal_output +23527,15051430,"TERMINAL",0,0,"Step 1157, loss: 0.11682882905006409, step time: 19.254446029663086ms\r\n",,terminal_output +23528,15051536,"TERMINAL",0,0,"Step 1158, loss: 0.11971593648195267, step time: 18.854856491088867ms\r\n",,terminal_output +23529,15051599,"TERMINAL",0,0,"Step 1159, loss: 0.11617028713226318, step time: 18.991470336914062ms\r\n",,terminal_output +23530,15051709,"TERMINAL",0,0,"Step 1160, loss: 0.111526258289814, step time: 19.32501792907715ms\r\n",,terminal_output +23531,15051859,"TERMINAL",0,0,"Step 1161, loss: 0.11509737372398376, step time: 19.283056259155273ms\r\nStep 1162, loss: 0.11190485209226608, step time: 18.596649169921875ms\r\n",,terminal_output +23532,15051966,"TERMINAL",0,0,"Step 1163, loss: 0.10919848084449768, step time: 19.588470458984375ms\r\n",,terminal_output +23533,15052032,"TERMINAL",0,0,"Step 1164, loss: 0.11573384702205658, step time: 18.778324127197266ms\r\n",,terminal_output +23534,15052144,"TERMINAL",0,0,"Step 1165, loss: 0.116026371717453, step time: 19.402503967285156ms\r\n",,terminal_output +23535,15052210,"TERMINAL",0,0,"Step 1166, loss: 0.1153104305267334, step time: 19.74773406982422ms\r\n",,terminal_output +23536,15052321,"TERMINAL",0,0,"Step 1167, loss: 0.11134221404790878, step time: 18.552064895629883ms\r\n",,terminal_output +23537,15052383,"TERMINAL",0,0,"Step 1168, loss: 0.10772634297609329, step time: 19.506454467773438ms\r\n",,terminal_output +23538,15052488,"TERMINAL",0,0,"Step 1169, loss: 0.11541316658258438, step time: 18.8291072845459ms\r\n",,terminal_output +23539,15052599,"TERMINAL",0,0,"Step 1170, loss: 0.11268924176692963, step time: 18.98050308227539ms\r\n",,terminal_output +23540,15052650,"TERMINAL",0,0,"Step 1171, loss: 0.10335014760494232, step time: 18.508195877075195ms\r\n",,terminal_output +23541,15052757,"TERMINAL",0,0,"Step 1172, loss: 0.10794195532798767, step time: 19.542217254638672ms\r\n",,terminal_output +23542,15052818,"TERMINAL",0,0,"Step 1173, loss: 0.10600642114877701, step time: 19.417762756347656ms\r\n",,terminal_output +23543,15052922,"TERMINAL",0,0,"Step 1174, loss: 0.09968548268079758, step time: 19.391298294067383ms\r\n",,terminal_output +23544,15053032,"TERMINAL",0,0,"Step 1175, loss: 0.1041317954659462, step time: 20.01810073852539ms\r\n",,terminal_output +23545,15053083,"TERMINAL",0,0,"Step 1176, loss: 0.10115624219179153, step time: 19.481420516967773ms\r\n",,terminal_output +23546,15053188,"TERMINAL",0,0,"Step 1177, loss: 0.09708578884601593, step time: 19.077062606811523ms\r\n",,terminal_output +23547,15053252,"TERMINAL",0,0,"Step 1178, loss: 0.1008056178689003, step time: 19.16670799255371ms\r\n",,terminal_output +23548,15053358,"TERMINAL",0,0,"Step 1179, loss: 0.09746239334344864, step time: 19.060134887695312ms\r\n",,terminal_output +23549,15053465,"TERMINAL",0,0,"Step 1180, loss: 0.09373794496059418, step time: 18.675804138183594ms\r\n",,terminal_output +23550,15053517,"TERMINAL",0,0,"Step 1181, loss: 0.09871777147054672, step time: 19.71292495727539ms\r\n",,terminal_output +23551,15053633,"TERMINAL",0,0,"Step 1182, loss: 0.09831570088863373, step time: 18.907785415649414ms\r\n",,terminal_output +23552,15053684,"TERMINAL",0,0,"Step 1183, loss: 0.10045845806598663, step time: 19.086122512817383ms\r\n",,terminal_output +23553,15053780,"TERMINAL",0,0,"Step 1184, loss: 0.12740355730056763, step time: 19.55723762512207ms\r\n",,terminal_output +23554,15053862,"TERMINAL",0,0,"Step 1185, loss: 0.2944555878639221, step time: 19.38009262084961ms\r\n",,terminal_output +23555,15053969,"TERMINAL",0,0,"Step 1186, loss: 0.25000137090682983, step time: 22.28689193725586ms\r\n",,terminal_output +23556,15054078,"TERMINAL",0,0,"Step 1187, loss: 0.14890970289707184, step time: 20.657777786254883ms\r\n",,terminal_output +23557,15054130,"TERMINAL",0,0,"Step 1188, loss: 0.12386367470026016, step time: 19.23084259033203ms\r\n",,terminal_output +23558,15054236,"TERMINAL",0,0,"Step 1189, loss: 0.25493666529655457, step time: 19.433259963989258ms\r\n",,terminal_output +23559,15054298,"TERMINAL",0,0,"Step 1190, loss: 0.10256779938936234, step time: 20.04837989807129ms\r\n",,terminal_output +23560,15054405,"TERMINAL",0,0,"Step 1191, loss: 0.25667139887809753, step time: 18.961429595947266ms\r\n",,terminal_output +23561,15054469,"TERMINAL",0,0,"Step 1192, loss: 0.1267896145582199, step time: 18.932819366455078ms\r\n",,terminal_output +23562,15054575,"TERMINAL",0,0,"Step 1193, loss: 0.13245749473571777, step time: 19.2568302154541ms\r\n",,terminal_output +23563,15054638,"TERMINAL",0,0,"Step 1194, loss: 0.2137557566165924, step time: 19.40608024597168ms\r\n",,terminal_output +23564,15054745,"TERMINAL",0,0,"Step 1195, loss: 0.10631333291530609, step time: 19.75703239440918ms\r\n",,terminal_output +23565,15054823,"TERMINAL",0,0,"Step 1196, loss: 0.14789070188999176, step time: 19.31309700012207ms\r\n",,terminal_output +23566,15054929,"TERMINAL",0,0,"Step 1197, loss: 0.16124144196510315, step time: 19.589662551879883ms\r\n",,terminal_output +23567,15054993,"TERMINAL",0,0,"Step 1198, loss: 0.10872375965118408, step time: 19.283294677734375ms\r\n",,terminal_output +23568,15055102,"TERMINAL",0,0,"Step 1199, loss: 0.12447389960289001, step time: 19.405603408813477ms\r\n",,terminal_output +23569,15055161,"TERMINAL",0,0,"Step 1200, loss: 0.14808374643325806, step time: 19.500017166137695ms\r\n",,terminal_output +23570,15055268,"TERMINAL",0,0,"Step 1201, loss: 0.10580216348171234, step time: 18.734455108642578ms\r\n",,terminal_output +23571,15055378,"TERMINAL",0,0,"Step 1202, loss: 0.11527737230062485, step time: 19.893646240234375ms\r\n",,terminal_output +23572,15055430,"TERMINAL",0,0,"Step 1203, loss: 0.12970927357673645, step time: 18.621206283569336ms\r\n",,terminal_output +23573,15055538,"TERMINAL",0,0,"Step 1204, loss: 0.10639218986034393, step time: 19.573688507080078ms\r\n",,terminal_output +23574,15055600,"TERMINAL",0,0,"Step 1205, loss: 0.10226007550954819, step time: 18.96500587463379ms\r\n",,terminal_output +23575,15055705,"TERMINAL",0,0,"Step 1206, loss: 0.12063749879598618, step time: 19.66691017150879ms\r\n",,terminal_output +23576,15055771,"TERMINAL",0,0,"Step 1207, loss: 0.10049223154783249, step time: 19.381284713745117ms\r\n",,terminal_output +23577,15055873,"TERMINAL",0,0,"Step 1208, loss: 0.09741168469190598, step time: 20.01166343688965ms\r\n",,terminal_output +23578,15056001,"TERMINAL",0,0,"Step 1209, loss: 0.10726244747638702, step time: 19.452333450317383ms\r\n",,terminal_output +23579,15056042,"TERMINAL",0,0,"Step 1210, loss: 0.10104909539222717, step time: 19.58322525024414ms\r\n",,terminal_output +23580,15056149,"TERMINAL",0,0,"Step 1211, loss: 0.09148429334163666, step time: 20.30205726623535ms\r\n",,terminal_output +23581,15056209,"TERMINAL",0,0,"Step 1212, loss: 0.10011247545480728, step time: 19.787073135375977ms\r\n",,terminal_output +23582,15056315,"TERMINAL",0,0,"Step 1213, loss: 0.09656817466020584, step time: 19.373416900634766ms\r\n",,terminal_output +23583,15056379,"TERMINAL",0,0,"Step 1214, loss: 0.08887309581041336, step time: 19.209861755371094ms\r\n",,terminal_output +23584,15056486,"TERMINAL",0,0,"Step 1215, loss: 0.09328216314315796, step time: 19.251346588134766ms\r\n",,terminal_output +23585,15056593,"TERMINAL",0,0,"Step 1216, loss: 0.09305273741483688, step time: 19.20628547668457ms\r\n",,terminal_output +23586,15056649,"TERMINAL",0,0,"Step 1217, loss: 0.08622556924819946, step time: 19.814491271972656ms\r\n",,terminal_output +23587,15056758,"TERMINAL",0,0,"Step 1218, loss: 0.08897090703248978, step time: 19.466161727905273ms\r\n",,terminal_output +23588,15056821,"TERMINAL",0,0,"Step 1219, loss: 0.08802172541618347, step time: 21.297693252563477ms\r\n",,terminal_output +23589,15056926,"TERMINAL",0,0,"Step 1220, loss: 0.08506932109594345, step time: 19.370317459106445ms\r\n",,terminal_output +23590,15056992,"TERMINAL",0,0,"Step 1221, loss: 0.08491072803735733, step time: 19.255876541137695ms\r\n",,terminal_output +23591,15057126,"TERMINAL",0,0,"Step 1222, loss: 0.0841168612241745, step time: 18.959760665893555ms\r\n",,terminal_output +23592,15057182,"TERMINAL",0,0,"Step 1223, loss: 0.08332079648971558, step time: 19.723892211914062ms\r\n",,terminal_output +23593,15057280,"TERMINAL",0,0,"Step 1224, loss: 0.08098577708005905, step time: 19.769906997680664ms\r\n",,terminal_output +23594,15057337,"TERMINAL",0,0,"Step 1225, loss: 0.08159217238426208, step time: 18.98479461669922ms\r\n",,terminal_output +23595,15057442,"TERMINAL",0,0,"Step 1226, loss: 0.08039990067481995, step time: 20.00570297241211ms\r\n",,terminal_output +23596,15057549,"TERMINAL",0,0,"Step 1227, loss: 0.07825206220149994, step time: 19.590139389038086ms\r\n",,terminal_output +23597,15057600,"TERMINAL",0,0,"Step 1228, loss: 0.07915733009576797, step time: 18.8143253326416ms\r\n",,terminal_output +23598,15057706,"TERMINAL",0,0,"Step 1229, loss: 0.07715773582458496, step time: 19.657611846923828ms\r\n",,terminal_output +23599,15057776,"TERMINAL",0,0,"Step 1230, loss: 0.0772344246506691, step time: 19.00768280029297ms\r\n",,terminal_output +23600,15057892,"TERMINAL",0,0,"Step 1231, loss: 0.07823054492473602, step time: 18.990755081176758ms\r\n",,terminal_output +23601,15057943,"TERMINAL",0,0,"Step 1232, loss: 0.08267199993133545, step time: 19.667863845825195ms\r\n",,terminal_output +23602,15058049,"TERMINAL",0,0,"Step 1233, loss: 0.12741802632808685, step time: 18.639564514160156ms\r\n",,terminal_output +23603,15058157,"TERMINAL",0,0,"Step 1234, loss: 0.31346553564071655, step time: 19.519329071044922ms\r\n",,terminal_output +23604,15058212,"TERMINAL",0,0,"Step 1235, loss: 0.13090167939662933, step time: 19.084930419921875ms\r\n",,terminal_output +23605,15058319,"TERMINAL",0,0,"Step 1236, loss: 0.41662663221359253, step time: 18.873929977416992ms\r\n",,terminal_output +23606,15058383,"TERMINAL",0,0,"Step 1237, loss: 0.10142870247364044, step time: 19.5009708404541ms\r\n",,terminal_output +23607,15058492,"TERMINAL",0,0,"Step 1238, loss: 0.4843846559524536, step time: 19.045114517211914ms\r\n",,terminal_output +23608,15058546,"TERMINAL",0,0,"Step 1239, loss: 0.1981804370880127, step time: 18.59736442565918ms\r\n",,terminal_output +23609,15058646,"TERMINAL",0,0,"Step 1240, loss: 0.11229832470417023, step time: 19.47498321533203ms\r\n",,terminal_output +23610,15058755,"TERMINAL",0,0,"Step 1241, loss: 0.38614729046821594, step time: 20.386457443237305ms\r\n",,terminal_output +23611,15058818,"TERMINAL",0,0,"Step 1242, loss: 0.16138827800750732, step time: 19.56486701965332ms\r\n",,terminal_output +23612,15058925,"TERMINAL",0,0,"Step 1243, loss: 0.10914040356874466, step time: 19.40178871154785ms\r\n",,terminal_output +23613,15058988,"TERMINAL",0,0,"Step 1244, loss: 0.2226421982049942, step time: 20.00594139099121ms\r\n",,terminal_output +23614,15059097,"TERMINAL",0,0,"Step 1245, loss: 0.2131902426481247, step time: 19.385814666748047ms\r\n",,terminal_output +23615,15059159,"TERMINAL",0,0,"Step 1246, loss: 0.1120549812912941, step time: 19.672393798828125ms\r\n",,terminal_output +23616,15059267,"TERMINAL",0,0,"Step 1247, loss: 0.12018251419067383, step time: 19.74797248840332ms\r\n",,terminal_output +23617,15059331,"TERMINAL",0,0,"Step 1248, loss: 0.17650143802165985, step time: 19.513607025146484ms\r\n",,terminal_output +23618,15059438,"TERMINAL",0,0,"Step 1249, loss: 0.14941251277923584, step time: 19.46425437927246ms\r\n",,terminal_output +23619,15059545,"TERMINAL",0,0,"Step 1250, loss: 0.10038567334413528, step time: 20.042896270751953ms\r\n",,terminal_output +23620,15059602,"TERMINAL",0,0,"Step 1251, loss: 0.10972943156957626, step time: 19.576549530029297ms\r\n",,terminal_output +23621,15059688,"TERMINAL",0,0,"Step 1252, loss: 0.14457283914089203, step time: 19.422054290771484ms\r\n",,terminal_output +23622,15059801,"TERMINAL",0,0,"Step 1253, loss: 0.1251094788312912, step time: 19.115447998046875ms\r\n",,terminal_output +23623,15059852,"TERMINAL",0,0,"Step 1254, loss: 0.09617555141448975, step time: 19.304513931274414ms\r\n",,terminal_output +23624,15059961,"TERMINAL",0,0,"Step 1255, loss: 0.09578056633472443, step time: 18.877267837524414ms\r\n",,terminal_output +23625,15060024,"TERMINAL",0,0,"Step 1256, loss: 0.11705945432186127, step time: 19.719600677490234ms\r\n",,terminal_output +23626,15060132,"TERMINAL",0,0,"Step 1257, loss: 0.10993844270706177, step time: 18.650054931640625ms\r\n",,terminal_output +23627,15060198,"TERMINAL",0,0,"Step 1258, loss: 0.09044893831014633, step time: 19.538402557373047ms\r\n",,terminal_output +23628,15060306,"TERMINAL",0,0,"Step 1259, loss: 0.08952885121107101, step time: 20.33209800720215ms\r\n",,terminal_output +23629,15060421,"TERMINAL",0,0,"Step 1260, loss: 0.0998968631029129, step time: 19.65022087097168ms\r\n",,terminal_output +23630,15060473,"TERMINAL",0,0,"Step 1261, loss: 0.0977206602692604, step time: 18.915653228759766ms\r\n",,terminal_output +23631,15060579,"TERMINAL",0,0,"Step 1262, loss: 0.08723314851522446, step time: 20.075559616088867ms\r\n",,terminal_output +23632,15060643,"TERMINAL",0,0,"Step 1263, loss: 0.08419422805309296, step time: 19.36173439025879ms\r\n",,terminal_output +23633,15060753,"TERMINAL",0,0,"Step 1264, loss: 0.08876640349626541, step time: 19.481420516967773ms\r\n",,terminal_output +23634,15060827,"TERMINAL",0,0,"Step 1265, loss: 0.08763210475444794, step time: 19.66571807861328ms\r\n",,terminal_output +23635,15060937,"TERMINAL",0,0,"Step 1266, loss: 0.0818740501999855, step time: 19.646644592285156ms\r\n",,terminal_output +23636,15061002,"TERMINAL",0,0,"Step 1267, loss: 0.0811280906200409, step time: 20.823240280151367ms\r\n",,terminal_output +23637,15061302,"TERMINAL",0,0,"Step 1268, loss: 0.08191803842782974, step time: 316.59770011901855ms\r\n",,terminal_output +23638,15061410,"TERMINAL",0,0,"Step 1269, loss: 0.08083277195692062, step time: 27.097702026367188ms\r\n",,terminal_output +23639,15061520,"TERMINAL",0,0,"Step 1270, loss: 0.07694432139396667, step time: 20.895957946777344ms\r\n",,terminal_output +23640,15061572,"TERMINAL",0,0,"Step 1271, loss: 0.07670193910598755, step time: 20.66349983215332ms\r\n",,terminal_output +23641,15061696,"TERMINAL",0,0,"Step 1272, loss: 0.0768570676445961, step time: 19.867897033691406ms\r\n",,terminal_output +23642,15061761,"TERMINAL",0,0,"Step 1273, loss: 0.07598015666007996, step time: 19.627094268798828ms\r\n",,terminal_output +23643,15061835,"TERMINAL",0,0,"Step 1274, loss: 0.07276848703622818, step time: 20.03765106201172ms\r\n",,terminal_output +23644,15061944,"TERMINAL",0,0,"Step 1275, loss: 0.07274609804153442, step time: 19.547462463378906ms\r\n",,terminal_output +23645,15062015,"TERMINAL",0,0,"Step 1276, loss: 0.07310067862272263, step time: 18.86272430419922ms\r\n",,terminal_output +23646,15062128,"TERMINAL",0,0,"Step 1277, loss: 0.07115033268928528, step time: 18.92995834350586ms\r\n",,terminal_output +23647,15062193,"TERMINAL",0,0,"Step 1278, loss: 0.07092614471912384, step time: 19.651174545288086ms\r\n",,terminal_output +23648,15062284,"TERMINAL",0,0,"Step 1279, loss: 0.0710197389125824, step time: 19.466638565063477ms\r\n",,terminal_output +23649,15062352,"TERMINAL",0,0,"Step 1280, loss: 0.06965455412864685, step time: 19.034862518310547ms\r\n",,terminal_output +23650,15062463,"TERMINAL",0,0,"Step 1281, loss: 0.06852492690086365, step time: 19.705772399902344ms\r\n",,terminal_output +23651,15062538,"TERMINAL",0,0,"Step 1282, loss: 0.06887464225292206, step time: 19.435405731201172ms\r\n",,terminal_output +23652,15062647,"TERMINAL",0,0,"Step 1283, loss: 0.0682678297162056, step time: 19.121170043945312ms\r\n",,terminal_output +23653,15062707,"TERMINAL",0,0,"Step 1284, loss: 0.06738171726465225, step time: 19.524335861206055ms\r\n",,terminal_output +23654,15062829,"TERMINAL",0,0,"Step 1285, loss: 0.06866992264986038, step time: 18.47243309020996ms\r\n",,terminal_output +23655,15062882,"TERMINAL",0,0,"Step 1286, loss: 0.06934182345867157, step time: 18.980026245117188ms\r\n",,terminal_output +23656,15062991,"TERMINAL",0,0,"Step 1287, loss: 0.06917713582515717, step time: 18.72849464416504ms\r\n",,terminal_output +23657,15063057,"TERMINAL",0,0,"Step 1288, loss: 0.06950946897268295, step time: 18.60833168029785ms\r\n",,terminal_output +23658,15063169,"TERMINAL",0,0,"Step 1289, loss: 0.07392270117998123, step time: 18.983840942382812ms\r\n",,terminal_output +23659,15063226,"TERMINAL",0,0,"Step 1290, loss: 0.10411575436592102, step time: 19.54174041748047ms\r\n",,terminal_output +23660,15063336,"TERMINAL",0,0,"Step 1291, loss: 0.26771220564842224, step time: 19.21844482421875ms\r\n",,terminal_output +23661,15063433,"TERMINAL",0,0,"Step 1292, loss: 0.17628049850463867, step time: 19.89436149597168ms\r\n",,terminal_output +23662,15063489,"TERMINAL",0,0,"Step 1293, loss: 0.20032525062561035, step time: 18.9669132232666ms\r\n",,terminal_output +23663,15063610,"TERMINAL",0,0,"Step 1294, loss: 0.08555353432893753, step time: 19.29497718811035ms\r\n",,terminal_output +23664,15063662,"TERMINAL",0,0,"Step 1295, loss: 0.3865891993045807, step time: 19.813060760498047ms\r\n",,terminal_output +23665,15063777,"TERMINAL",0,0,"Step 1296, loss: 0.07595454901456833, step time: 18.683195114135742ms\r\n",,terminal_output +23666,15063835,"TERMINAL",0,0,"Step 1297, loss: 0.28341415524482727, step time: 19.33431625366211ms\r\n",,terminal_output +23667,15063947,"TERMINAL",0,0,"Step 1298, loss: 0.16067059338092804, step time: 19.81377601623535ms\r\n",,terminal_output +23668,15064012,"TERMINAL",0,0,"Step 1299, loss: 0.08769427239894867, step time: 24.067163467407227ms\r\n",,terminal_output +23669,15064118,"TERMINAL",0,0,"Step 1300, loss: 0.2130330204963684, step time: 20.60389518737793ms\r\n",,terminal_output +23670,15064229,"TERMINAL",0,0,"Step 1301, loss: 0.1459195464849472, step time: 19.61970329284668ms\r\n",,terminal_output +23671,15064281,"TERMINAL",0,0,"Step 1302, loss: 0.08539589494466782, step time: 19.626617431640625ms\r\n",,terminal_output +23672,15064389,"TERMINAL",0,0,"Step 1303, loss: 0.13052603602409363, step time: 18.776416778564453ms\r\n",,terminal_output +23673,15064470,"TERMINAL",0,0,"Step 1304, loss: 0.1470067799091339, step time: 19.779682159423828ms\r\n",,terminal_output +23674,15064575,"TERMINAL",0,0,"Step 1305, loss: 0.09166571497917175, step time: 18.57590675354004ms\r\n",,terminal_output +23675,15064627,"TERMINAL",0,0,"Step 1306, loss: 0.08964989334344864, step time: 19.374608993530273ms\r\n",,terminal_output +23676,15064733,"TERMINAL",0,0,"Step 1307, loss: 0.12153639644384384, step time: 19.088029861450195ms\r\n",,terminal_output +23677,15064810,"TERMINAL",0,0,"Step 1308, loss: 0.10648660361766815, step time: 19.388675689697266ms\r\n",,terminal_output +23678,15064918,"TERMINAL",0,0,"Step 1309, loss: 0.07732855528593063, step time: 19.509315490722656ms\r\n",,terminal_output +23679,15064980,"TERMINAL",0,0,"Step 1310, loss: 0.08918848633766174, step time: 18.951416015625ms\r\n",,terminal_output +23680,15065073,"TERMINAL",0,0,"Step 1311, loss: 0.10462592542171478, step time: 19.46234703063965ms\r\n",,terminal_output +23681,15065179,"TERMINAL",0,0,"Step 1312, loss: 0.08510556071996689, step time: 18.537044525146484ms\r\n",,terminal_output +23682,15065232,"TERMINAL",0,0,"Step 1313, loss: 0.07145500183105469, step time: 19.700050354003906ms\r\n",,terminal_output +23683,15065329,"TERMINAL",0,0,"Step 1314, loss: 0.08316969126462936, step time: 18.53179931640625ms\r\n",,terminal_output +23684,15065435,"TERMINAL",0,0,"Step 1315, loss: 0.09071194380521774, step time: 19.405603408813477ms\r\n",,terminal_output +23685,15065493,"TERMINAL",0,0,"Step 1316, loss: 0.072279192507267, step time: 19.798994064331055ms\r\n",,terminal_output +23686,15065605,"TERMINAL",0,0,"Step 1317, loss: 0.06781569123268127, step time: 19.38486099243164ms\r\n",,terminal_output +23687,15065698,"TERMINAL",0,0,"Step 1318, loss: 0.07933083176612854, step time: 19.066810607910156ms\r\n",,terminal_output +23688,15065759,"TERMINAL",0,0,"Step 1319, loss: 0.07759314775466919, step time: 19.74797248840332ms\r\n",,terminal_output +23689,15065842,"TERMINAL",0,0,"Step 1320, loss: 0.06610605865716934, step time: 19.060373306274414ms\r\n",,terminal_output +23690,15065949,"TERMINAL",0,0,"Step 1321, loss: 0.06616725772619247, step time: 19.379138946533203ms\r\n",,terminal_output +23691,15066058,"TERMINAL",0,0,"Step 1322, loss: 0.07304640114307404, step time: 20.040273666381836ms\r\n",,terminal_output +23692,15066110,"TERMINAL",0,0,"Step 1323, loss: 0.06852271407842636, step time: 18.83864402770996ms\r\n",,terminal_output +23693,15066217,"TERMINAL",0,0,"Step 1324, loss: 0.06340433657169342, step time: 19.586563110351562ms\r\n",,terminal_output +23694,15066280,"TERMINAL",0,0,"Step 1325, loss: 0.06400544196367264, step time: 19.756317138671875ms\r\n",,terminal_output +23695,15066375,"TERMINAL",0,0,"Step 1326, loss: 0.06652182340621948, step time: 19.65618133544922ms\r\n",,terminal_output +23696,15066483,"TERMINAL",0,0,"Step 1327, loss: 0.06271756440401077, step time: 18.95618438720703ms\r\n",,terminal_output +23697,15066537,"TERMINAL",0,0,"Step 1328, loss: 0.06064559146761894, step time: 19.895076751708984ms\r\n",,terminal_output +23698,15066629,"TERMINAL",0,0,"Step 1329, loss: 0.06153002381324768, step time: 18.81575584411621ms\r\n",,terminal_output +23699,15066745,"TERMINAL",0,0,"Step 1330, loss: 0.06077580526471138, step time: 19.29950714111328ms\r\n",,terminal_output +23700,15066796,"TERMINAL",0,0,"Step 1331, loss: 0.059028059244155884, step time: 19.5772647857666ms\r\n",,terminal_output +23701,15066912,"TERMINAL",0,0,"Step 1332, loss: 0.058329515159130096, step time: 18.805503845214844ms\r\n",,terminal_output +23702,15066970,"TERMINAL",0,0,"Step 1333, loss: 0.05778636410832405, step time: 18.555164337158203ms\r\n",,terminal_output +23703,15067079,"TERMINAL",0,0,"Step 1334, loss: 0.05703061446547508, step time: 19.66261863708496ms\r\n",,terminal_output +23704,15067188,"TERMINAL",0,0,"Step 1335, loss: 0.05589127540588379, step time: 18.912076950073242ms\r\n",,terminal_output +23705,15067241,"TERMINAL",0,0,"Step 1336, loss: 0.0552256740629673, step time: 19.550800323486328ms\r\n",,terminal_output +23706,15067348,"TERMINAL",0,0,"Step 1337, loss: 0.05494597926735878, step time: 19.681692123413086ms\r\n",,terminal_output +23707,15067411,"TERMINAL",0,0,"Step 1338, loss: 0.05373893678188324, step time: 19.29950714111328ms\r\n",,terminal_output +23708,15067521,"TERMINAL",0,0,"Step 1339, loss: 0.05356321483850479, step time: 18.49508285522461ms\r\n",,terminal_output +23709,15067584,"TERMINAL",0,0,"Step 1340, loss: 0.05301513895392418, step time: 19.47164535522461ms\r\n",,terminal_output +23710,15067695,"TERMINAL",0,0,"Step 1341, loss: 0.05145013704895973, step time: 18.556833267211914ms\r\n",,terminal_output +23711,15067755,"TERMINAL",0,0,"Step 1342, loss: 0.05126015841960907, step time: 19.115686416625977ms\r\n",,terminal_output +23712,15067842,"TERMINAL",0,0,"Step 1343, loss: 0.05073101073503494, step time: 18.75925064086914ms\r\n",,terminal_output +23713,15067953,"TERMINAL",0,0,"Step 1344, loss: 0.05085717886686325, step time: 19.180774688720703ms\r\n",,terminal_output +23714,15068013,"TERMINAL",0,0,"Step 1345, loss: 0.05245387181639671, step time: 18.725872039794922ms\r\n",,terminal_output +23715,15068125,"TERMINAL",0,0,"Step 1346, loss: 0.06467815488576889, step time: 19.689559936523438ms\r\n",,terminal_output +23716,15068190,"TERMINAL",0,0,"Step 1347, loss: 0.21164046227931976, step time: 18.4783935546875ms\r\n",,terminal_output +23717,15068288,"TERMINAL",0,0,"Step 1348, loss: 0.28231486678123474, step time: 19.369840621948242ms\r\n",,terminal_output +23718,15068397,"TERMINAL",0,0,"Step 1349, loss: 0.11774428933858871, step time: 18.897294998168945ms\r\n",,terminal_output +23719,15068459,"TERMINAL",0,0,"Step 1350, loss: 0.13146109879016876, step time: 18.627405166625977ms\r\n",,terminal_output +23720,15068566,"TERMINAL",0,0,"Step 1351, loss: 0.16153484582901, step time: 18.662452697753906ms\r\n",,terminal_output +23721,15068634,"TERMINAL",0,0,"Step 1352, loss: 0.07246782630681992, step time: 19.603967666625977ms\r\n",,terminal_output +23722,15068745,"TERMINAL",0,0,"Step 1353, loss: 0.16200879216194153, step time: 18.7530517578125ms\r\n",,terminal_output +23723,15068819,"TERMINAL",0,0,"Step 1354, loss: 0.10520026832818985, step time: 18.900156021118164ms\r\n",,terminal_output +23724,15068881,"TERMINAL",0,0,"Step 1355, loss: 0.08297416567802429, step time: 18.941402435302734ms\r\n",,terminal_output +23725,15068991,"TERMINAL",0,0,"Step 1356, loss: 0.10605909675359726, step time: 19.28234100341797ms\r\n",,terminal_output +23726,15069057,"TERMINAL",0,0,"Step 1357, loss: 0.10145103186368942, step time: 25.07495880126953ms\r\n",,terminal_output +23727,15069169,"TERMINAL",0,0,"Step 1358, loss: 0.07547987252473831, step time: 25.10523796081543ms\r\n",,terminal_output +23728,15069232,"TERMINAL",0,0,"Step 1359, loss: 0.08355465531349182, step time: 27.26125717163086ms\r\n",,terminal_output +23729,15069348,"TERMINAL",0,0,"Step 1360, loss: 0.08830444514751434, step time: 28.250932693481445ms\r\n",,terminal_output +23730,15069457,"TERMINAL",0,0,"Step 1361, loss: 0.07339774817228317, step time: 28.233051300048828ms\r\n",,terminal_output +23731,15069510,"TERMINAL",0,0,"Step 1362, loss: 0.06812550127506256, step time: 21.461009979248047ms\r\n",,terminal_output +23732,15069615,"TERMINAL",0,0,"Step 1363, loss: 0.07713551819324493, step time: 20.541667938232422ms\r\n",,terminal_output +23733,15069676,"TERMINAL",0,0,"Step 1364, loss: 0.07427239418029785, step time: 20.10178565979004ms\r\n",,terminal_output +23734,15069785,"TERMINAL",0,0,"Step 1365, loss: 0.061416640877723694, step time: 19.870519638061523ms\r\n",,terminal_output +23735,15069850,"TERMINAL",0,0,"Step 1366, loss: 0.06431568413972855, step time: 18.734216690063477ms\r\n",,terminal_output +23736,15069957,"TERMINAL",0,0,"Step 1367, loss: 0.0687381699681282, step time: 20.09749412536621ms\r\n",,terminal_output +23737,15070075,"TERMINAL",0,0,"Step 1368, loss: 0.06369426101446152, step time: 19.307851791381836ms\r\n",,terminal_output +23738,15070123,"TERMINAL",0,0,"Step 1369, loss: 0.05681563913822174, step time: 19.58918571472168ms\r\n",,terminal_output +23739,15070228,"TERMINAL",0,0,"Step 1370, loss: 0.05934618040919304, step time: 19.652605056762695ms\r\n",,terminal_output +23740,15070290,"TERMINAL",0,0,"Step 1371, loss: 0.06179547309875488, step time: 19.765377044677734ms\r\n",,terminal_output +23741,15070395,"TERMINAL",0,0,"Step 1372, loss: 0.056464195251464844, step time: 18.82338523864746ms\r\n",,terminal_output +23742,15070461,"TERMINAL",0,0,"Step 1373, loss: 0.054010406136512756, step time: 20.066261291503906ms\r\n",,terminal_output +23743,15070569,"TERMINAL",0,0,"Step 1374, loss: 0.055072687566280365, step time: 19.111156463623047ms\r\n",,terminal_output +23744,15070668,"TERMINAL",0,0,"Step 1375, loss: 0.05446478724479675, step time: 19.722938537597656ms\r\n",,terminal_output +23745,15070720,"TERMINAL",0,0,"Step 1376, loss: 0.052581824362277985, step time: 19.359111785888672ms\r\n",,terminal_output +23746,15070823,"TERMINAL",0,0,"Step 1377, loss: 0.052083730697631836, step time: 19.69003677368164ms\r\n",,terminal_output +23747,15070928,"TERMINAL",0,0,"Step 1378, loss: 0.05075928568840027, step time: 20.017147064208984ms\r\n",,terminal_output +23748,15070993,"TERMINAL",0,0,"Step 1379, loss: 0.050203513354063034, step time: 20.188331604003906ms\r\n",,terminal_output +23749,15071091,"TERMINAL",0,0,"Step 1380, loss: 0.04981658235192299, step time: 18.863916397094727ms\r\n",,terminal_output +23750,15071158,"TERMINAL",0,0,"Step 1381, loss: 0.04857140779495239, step time: 19.763708114624023ms\r\n",,terminal_output +23751,15071263,"TERMINAL",0,0,"Step 1382, loss: 0.047369975596666336, step time: 19.31142807006836ms\r\n",,terminal_output +23752,15071323,"TERMINAL",0,0,"Step 1383, loss: 0.04773706570267677, step time: 19.05226707458496ms\r\n",,terminal_output +23753,15071435,"TERMINAL",0,0,"Step 1384, loss: 0.04672318324446678, step time: 18.80478858947754ms\r\n",,terminal_output +23754,15071499,"TERMINAL",0,0,"Step 1385, loss: 0.04551061987876892, step time: 19.85931396484375ms\r\n",,terminal_output +23755,15071592,"TERMINAL",0,0,"Step 1386, loss: 0.045431219041347504, step time: 18.651962280273438ms\r\n",,terminal_output +23756,15071699,"TERMINAL",0,0,"Step 1387, loss: 0.044912371784448624, step time: 19.101381301879883ms\r\n",,terminal_output +23757,15071758,"TERMINAL",0,0,"Step 1388, loss: 0.04397706314921379, step time: 19.93107795715332ms\r\n",,terminal_output +23758,15071872,"TERMINAL",0,0,"Step 1389, loss: 0.043385859578847885, step time: 18.845558166503906ms\r\n",,terminal_output +23759,15071934,"TERMINAL",0,0,"Step 1390, loss: 0.043015915900468826, step time: 20.870447158813477ms\r\n",,terminal_output +23760,15072027,"TERMINAL",0,0,"Step 1391, loss: 0.04245232790708542, step time: 19.99664306640625ms\r\n",,terminal_output +23761,15072139,"TERMINAL",0,0,"Step 1392, loss: 0.041655875742435455, step time: 19.559621810913086ms\r\n",,terminal_output +23762,15072193,"TERMINAL",0,0,"Step 1393, loss: 0.041268520057201385, step time: 18.996000289916992ms\r\n",,terminal_output +23763,15072285,"TERMINAL",0,0,"Step 1394, loss: 0.04078574851155281, step time: 19.875764846801758ms\r\n",,terminal_output +23764,15072392,"TERMINAL",0,0,"Step 1395, loss: 0.04051332548260689, step time: 19.09160614013672ms\r\n",,terminal_output +23765,15072454,"TERMINAL",0,0,"Step 1396, loss: 0.04092441871762276, step time: 19.666194915771484ms\r\n",,terminal_output +23766,15072563,"TERMINAL",0,0,"Step 1397, loss: 0.045049771666526794, step time: 19.470930099487305ms\r\n",,terminal_output +23767,15072628,"TERMINAL",0,0,"Step 1398, loss: 0.07943470031023026, step time: 19.726037979125977ms\r\n",,terminal_output +23768,15072741,"TERMINAL",0,0,"Step 1399, loss: 0.32299357652664185, step time: 19.689321517944336ms\r\n",,terminal_output +23769,15072811,"TERMINAL",0,0,"Step 1400, loss: 0.13925623893737793, step time: 19.48261260986328ms\r\n",,terminal_output +23770,15072922,"TERMINAL",0,0,"Step 1401, loss: 0.5426580309867859, step time: 19.86837387084961ms\r\n",,terminal_output +23771,15072983,"TERMINAL",0,0,"Step 1402, loss: 0.049454186111688614, step time: 19.211292266845703ms\r\n",,terminal_output +23772,15073094,"TERMINAL",0,0,"Step 1403, loss: 0.537922739982605, step time: 19.74177360534668ms\r\n",,terminal_output +23773,15073153,"TERMINAL",0,0,"Step 1404, loss: 0.24218741059303284, step time: 19.631624221801758ms\r\n",,terminal_output +23774,15073265,"TERMINAL",0,0,"Step 1405, loss: 0.06201714277267456, step time: 19.23990249633789ms\r\n",,terminal_output +23775,15073323,"TERMINAL",0,0,"Step 1406, loss: 0.26000088453292847, step time: 19.99044418334961ms\r\n",,terminal_output +23776,15073432,"TERMINAL",0,0,"Step 1407, loss: 0.27995404601097107, step time: 18.74995231628418ms\r\n",,terminal_output +23777,15073539,"TERMINAL",0,0,"Step 1408, loss: 0.07271653413772583, step time: 19.295692443847656ms\r\n",,terminal_output +23778,15073621,"TERMINAL",0,0,"Step 1409, loss: 0.08108867704868317, step time: 19.989728927612305ms\r\n",,terminal_output +23779,15073675,"TERMINAL",0,0,"Step 1410, loss: 0.22661933302879333, step time: 19.518613815307617ms\r\n",,terminal_output +23780,15073783,"TERMINAL",0,0,"Step 1411, loss: 0.1634153574705124, step time: 18.686771392822266ms\r\n",,terminal_output +23781,15073847,"TERMINAL",0,0,"Step 1412, loss: 0.06860247999429703, step time: 19.878387451171875ms\r\n",,terminal_output +23782,15073957,"TERMINAL",0,0,"Step 1413, loss: 0.07077401131391525, step time: 19.581079483032227ms\r\n",,terminal_output +23783,15074023,"TERMINAL",0,0,"Step 1414, loss: 0.14173389971256256, step time: 22.304296493530273ms\r\n",,terminal_output +23784,15074128,"TERMINAL",0,0,"Step 1415, loss: 0.13127785921096802, step time: 20.227909088134766ms\r\n",,terminal_output +23785,15074235,"TERMINAL",0,0,"Step 1416, loss: 0.06880078464746475, step time: 19.573450088500977ms\r\n",,terminal_output +23786,15074289,"TERMINAL",0,0,"Step 1417, loss: 0.060165002942085266, step time: 19.840717315673828ms\r\n",,terminal_output +23787,15074398,"TERMINAL",0,0,"Step 1418, loss: 0.0875486359000206, step time: 20.122766494750977ms\r\n",,terminal_output +23788,15074462,"TERMINAL",0,0,"Step 1419, loss: 0.1103452518582344, step time: 19.20628547668457ms\r\n",,terminal_output +23789,15074571,"TERMINAL",0,0,"Step 1420, loss: 0.07593447715044022, step time: 18.785715103149414ms\r\n",,terminal_output +23790,15074637,"TERMINAL",0,0,"Step 1421, loss: 0.05478524789214134, step time: 19.16980743408203ms\r\n",,terminal_output +23791,15074746,"TERMINAL",0,0,"Step 1422, loss: 0.06034684181213379, step time: 19.66118812561035ms\r\n",,terminal_output +23792,15074830,"TERMINAL",0,0,"Step 1423, loss: 0.07835375517606735, step time: 19.46091651916504ms\r\n",,terminal_output +23793,15074939,"TERMINAL",0,0,"Step 1424, loss: 0.07974466681480408, step time: 20.130157470703125ms\r\n",,terminal_output +23794,15075001,"TERMINAL",0,0,"Step 1425, loss: 0.05716791749000549, step time: 19.962787628173828ms\r\n",,terminal_output +23795,15075111,"TERMINAL",0,0,"Step 1426, loss: 0.049466073513031006, step time: 19.811153411865234ms\r\n",,terminal_output +23796,15075164,"TERMINAL",0,0,"Step 1427, loss: 0.0566883385181427, step time: 20.000219345092773ms\r\n",,terminal_output +23797,15075273,"TERMINAL",0,0,"Step 1428, loss: 0.06684780865907669, step time: 19.634008407592773ms\r\n",,terminal_output +23798,15075336,"TERMINAL",0,0,"Step 1429, loss: 0.060655247420072556, step time: 19.487619400024414ms\r\n",,terminal_output +23799,15075444,"TERMINAL",0,0,"Step 1430, loss: 0.049088604748249054, step time: 20.166397094726562ms\r\n",,terminal_output +23800,15075506,"TERMINAL",0,0,"Step 1431, loss: 0.04677435755729675, step time: 19.84381675720215ms\r\n",,terminal_output +23801,15075708,"TERMINAL",0,0,"Step 1432, loss: 0.05179796740412712, step time: 19.472599029541016ms\r\nStep 1433, loss: 0.05594916269183159, step time: 19.19412612915039ms\r\n",,terminal_output +23802,15075767,"TERMINAL",0,0,"Step 1434, loss: 0.05027669668197632, step time: 19.655466079711914ms\r\n",,terminal_output +23803,15075854,"TERMINAL",0,0,"Step 1435, loss: 0.04405367001891136, step time: 19.640207290649414ms\r\n",,terminal_output +23804,15076172,"TERMINAL",0,0,"Step 1436, loss: 0.04459216073155403, step time: 314.26024436950684ms\r\n",,terminal_output +23805,15076284,"TERMINAL",0,0,"Step 1437, loss: 0.048091597855091095, step time: 26.663780212402344ms\r\n",,terminal_output +23806,15076393,"TERMINAL",0,0,"Step 1438, loss: 0.04756281524896622, step time: 22.004365921020508ms\r\n",,terminal_output +23807,15076450,"TERMINAL",0,0,"Step 1439, loss: 0.04276585951447487, step time: 20.186662673950195ms\r\n",,terminal_output +23808,15076555,"TERMINAL",0,0,"Step 1440, loss: 0.041241031140089035, step time: 19.06895637512207ms\r\n",,terminal_output +23809,15076619,"TERMINAL",0,0,"Step 1441, loss: 0.042904749512672424, step time: 19.405364990234375ms\r\n",,terminal_output +23810,15076725,"TERMINAL",0,0,"Step 1442, loss: 0.04373686760663986, step time: 20.26653289794922ms\r\n",,terminal_output +23811,15076821,"TERMINAL",0,0,"Step 1443, loss: 0.04058205708861351, step time: 19.38009262084961ms\r\n",,terminal_output +23812,15076881,"TERMINAL",0,0,"Step 1444, loss: 0.038994282484054565, step time: 19.74177360534668ms\r\n",,terminal_output +23813,15076970,"TERMINAL",0,0,"Step 1445, loss: 0.039714884012937546, step time: 19.744396209716797ms\r\n",,terminal_output +23814,15077076,"TERMINAL",0,0,"Step 1446, loss: 0.040100328624248505, step time: 20.628690719604492ms\r\n",,terminal_output +23815,15077135,"TERMINAL",0,0,"Step 1447, loss: 0.038331616669893265, step time: 20.056962966918945ms\r\n",,terminal_output +23816,15077244,"TERMINAL",0,0,"Step 1448, loss: 0.03736840561032295, step time: 19.298315048217773ms\r\n",,terminal_output +23817,15077314,"TERMINAL",0,0,"Step 1449, loss: 0.03725730627775192, step time: 19.787073135375977ms\r\n",,terminal_output +23818,15077424,"TERMINAL",0,0,"Step 1450, loss: 0.03696924075484276, step time: 18.845319747924805ms\r\n",,terminal_output +23819,15077482,"TERMINAL",0,0,"Step 1451, loss: 0.03599729388952255, step time: 20.2178955078125ms\r\n",,terminal_output +23820,15077590,"TERMINAL",0,0,"Step 1452, loss: 0.03564661741256714, step time: 19.654035568237305ms\r\n",,terminal_output +23821,15077656,"TERMINAL",0,0,"Step 1453, loss: 0.03503404185175896, step time: 19.683837890625ms\r\n",,terminal_output +23822,15077774,"TERMINAL",0,0,"Step 1454, loss: 0.03435848653316498, step time: 19.405126571655273ms\r\n",,terminal_output +23823,15077830,"TERMINAL",0,0,"Step 1455, loss: 0.034251365810632706, step time: 19.67763900756836ms\r\n",,terminal_output +23824,15077940,"TERMINAL",0,0,"Step 1456, loss: 0.03354339674115181, step time: 19.123554229736328ms\r\n",,terminal_output +23825,15078004,"TERMINAL",0,0,"Step 1457, loss: 0.033053919672966, step time: 20.149946212768555ms\r\n",,terminal_output +23826,15078123,"TERMINAL",0,0,"Step 1458, loss: 0.03326420485973358, step time: 19.590139389038086ms\r\n",,terminal_output +23827,15078174,"TERMINAL",0,0,"Step 1459, loss: 0.03361796587705612, step time: 18.746137619018555ms\r\n",,terminal_output +23828,15078284,"TERMINAL",0,0,"Step 1460, loss: 0.03667415678501129, step time: 20.449399948120117ms\r\n",,terminal_output +23829,15078392,"TERMINAL",0,0,"Step 1461, loss: 0.05554695799946785, step time: 19.620656967163086ms\r\n",,terminal_output +23830,15078447,"TERMINAL",0,0,"Step 1462, loss: 0.19288115203380585, step time: 19.669532775878906ms\r\n",,terminal_output +23831,15078542,"TERMINAL",0,0,"Step 1463, loss: 0.15017585456371307, step time: 19.614696502685547ms\r\n",,terminal_output +23832,15078636,"TERMINAL",0,0,"Step 1464, loss: 0.13603384792804718, step time: 19.078731536865234ms\r\n",,terminal_output +23833,15078734,"TERMINAL",0,0,"Step 1465, loss: 0.0463249534368515, step time: 19.40155029296875ms\r\n",,terminal_output +23834,15078786,"TERMINAL",0,0,"Step 1466, loss: 0.32395076751708984, step time: 19.735336303710938ms\r\n",,terminal_output +23835,15078909,"TERMINAL",0,0,"Step 1467, loss: 0.04926440119743347, step time: 19.774675369262695ms\r\n",,terminal_output +23836,15078961,"TERMINAL",0,0,"Step 1468, loss: 0.14361201226711273, step time: 19.471406936645508ms\r\n",,terminal_output +23837,15079068,"TERMINAL",0,0,"Step 1469, loss: 0.21493059396743774, step time: 19.063234329223633ms\r\n",,terminal_output +23838,15079176,"TERMINAL",0,0,"Step 1470, loss: 0.048752084374427795, step time: 19.639253616333008ms\r\n",,terminal_output +23839,15079228,"TERMINAL",0,0,"Step 1471, loss: 0.12405388057231903, step time: 19.347190856933594ms\r\n",,terminal_output +23840,15079335,"TERMINAL",0,0,"Step 1472, loss: 0.1632622927427292, step time: 19.231081008911133ms\r\n",,terminal_output +23841,15079403,"TERMINAL",0,0,"Step 1473, loss: 0.06407909840345383, step time: 19.0277099609375ms\r\n",,terminal_output +23842,15079514,"TERMINAL",0,0,"Step 1474, loss: 0.06183769181370735, step time: 19.292116165161133ms\r\n",,terminal_output +23843,15079568,"TERMINAL",0,0,"Step 1475, loss: 0.11296825855970383, step time: 20.427227020263672ms\r\n",,terminal_output +23844,15079680,"TERMINAL",0,0,"Step 1476, loss: 0.09687206894159317, step time: 19.740581512451172ms\r\n",,terminal_output +23845,15079744,"TERMINAL",0,0,"Step 1477, loss: 0.055251166224479675, step time: 19.59991455078125ms\r\n",,terminal_output +23846,15079852,"TERMINAL",0,0,"Step 1478, loss: 0.054331839084625244, step time: 20.009517669677734ms\r\n",,terminal_output +23847,15079954,"TERMINAL",0,0,"Step 1479, loss: 0.08312059938907623, step time: 19.11473274230957ms\r\n",,terminal_output +23848,15080016,"TERMINAL",0,0,"Step 1480, loss: 0.0728742852807045, step time: 19.53721046447754ms\r\n",,terminal_output +23849,15080127,"TERMINAL",0,0,"Step 1481, loss: 0.05453610047698021, step time: 19.526958465576172ms\r\n",,terminal_output +23850,15080193,"TERMINAL",0,0,"Step 1482, loss: 0.04964284971356392, step time: 19.54793930053711ms\r\n",,terminal_output +23851,15080350,"TERMINAL",0,0,"Step 1483, loss: 0.05611148104071617, step time: 18.850088119506836ms\r\nStep 1484, loss: 0.0611787773668766, step time: 19.893646240234375ms\r\n",,terminal_output +23852,15080448,"TERMINAL",0,0,"Step 1485, loss: 0.05161042883992195, step time: 19.733190536499023ms\r\n",,terminal_output +23853,15080553,"TERMINAL",0,0,"Step 1486, loss: 0.04501735046505928, step time: 19.510746002197266ms\r\n",,terminal_output +23854,15080615,"TERMINAL",0,0,"Step 1487, loss: 0.04569476842880249, step time: 19.84882354736328ms\r\n",,terminal_output +23855,15080726,"TERMINAL",0,0,"Step 1488, loss: 0.05046606436371803, step time: 19.414186477661133ms\r\n",,terminal_output +23856,15080791,"TERMINAL",0,0,"Step 1489, loss: 0.04731499403715134, step time: 19.46711540222168ms\r\n",,terminal_output +23857,15080876,"TERMINAL",0,0,"Step 1490, loss: 0.04203900694847107, step time: 21.703481674194336ms\r\n",,terminal_output +23858,15080985,"TERMINAL",0,0,"Step 1491, loss: 0.04069337993860245, step time: 19.307374954223633ms\r\n",,terminal_output +23859,15081057,"TERMINAL",0,0,"Step 1492, loss: 0.04111139848828316, step time: 19.17243003845215ms\r\n",,terminal_output +23860,15081165,"TERMINAL",0,0,"Step 1493, loss: 0.04205356910824776, step time: 19.336462020874023ms\r\n",,terminal_output +23861,15081226,"TERMINAL",0,0,"Step 1494, loss: 0.039581574499607086, step time: 19.484519958496094ms\r\n",,terminal_output +23862,15081334,"TERMINAL",0,0,"Step 1495, loss: 0.036518361419439316, step time: 19.722700119018555ms\r\n",,terminal_output +23863,15081402,"TERMINAL",0,0,"Step 1496, loss: 0.037591058760881424, step time: 19.455671310424805ms\r\n",,terminal_output +23864,15081518,"TERMINAL",0,0,"Step 1497, loss: 0.037326812744140625, step time: 19.220590591430664ms\r\n",,terminal_output +23865,15081575,"TERMINAL",0,0,"Step 1498, loss: 0.036335550248622894, step time: 19.62447166442871ms\r\n",,terminal_output +23866,15081683,"TERMINAL",0,0,"Step 1499, loss: 0.0344192311167717, step time: 19.057750701904297ms\r\n",,terminal_output +23867,15084583,"TERMINAL",0,0,"Step 1500, loss: 0.034122589975595474, step time: 25.89273452758789ms\r\nStep 1501, loss: 0.034269727766513824, step time: 25.473594665527344ms\r\n",,terminal_output +23868,15084695,"TERMINAL",0,0,"Step 1502, loss: 0.03342524915933609, step time: 21.132707595825195ms\r\n",,terminal_output +23869,15084756,"TERMINAL",0,0,"Step 1503, loss: 0.03222730755805969, step time: 20.866870880126953ms\r\n",,terminal_output +23870,15084862,"TERMINAL",0,0,"Step 1504, loss: 0.03205859288573265, step time: 19.931316375732422ms\r\n",,terminal_output +23871,15084968,"TERMINAL",0,0,"Step 1505, loss: 0.031547412276268005, step time: 20.192861557006836ms\r\n",,terminal_output +23872,15085030,"TERMINAL",0,0,"Step 1506, loss: 0.03105311095714569, step time: 19.49763298034668ms\r\n",,terminal_output +23873,15085139,"TERMINAL",0,0,"Step 1507, loss: 0.030290517956018448, step time: 19.31309700012207ms\r\n",,terminal_output +23874,15085193,"TERMINAL",0,0,"Step 1508, loss: 0.030260227620601654, step time: 19.895076751708984ms\r\n",,terminal_output +23875,15085302,"TERMINAL",0,0,"Step 1509, loss: 0.029550641775131226, step time: 19.642353057861328ms\r\n",,terminal_output +23876,15085368,"TERMINAL",0,0,"Step 1510, loss: 0.028929494321346283, step time: 19.362211227416992ms\r\n",,terminal_output +23877,15085476,"TERMINAL",0,0,"Step 1511, loss: 0.028710758313536644, step time: 19.763469696044922ms\r\n",,terminal_output +23878,15085538,"TERMINAL",0,0,"Step 1512, loss: 0.028633486479520798, step time: 17.529010772705078ms\r\n",,terminal_output +23879,15085645,"TERMINAL",0,0,"Step 1513, loss: 0.027615994215011597, step time: 18.123149871826172ms\r\n",,terminal_output +23880,15085755,"TERMINAL",0,0,"Step 1514, loss: 0.027376269921660423, step time: 18.11838150024414ms\r\n",,terminal_output +23881,15085807,"TERMINAL",0,0,"Step 1515, loss: 0.027190513908863068, step time: 17.735958099365234ms\r\n",,terminal_output +23882,15085912,"TERMINAL",0,0,"Step 1516, loss: 0.026813648641109467, step time: 19.84095573425293ms\r\n",,terminal_output +23883,15085970,"TERMINAL",0,0,"Step 1517, loss: 0.026074238121509552, step time: 19.194364547729492ms\r\n",,terminal_output +23884,15086080,"TERMINAL",0,0,"Step 1518, loss: 0.026002930477261543, step time: 17.73524284362793ms\r\n",,terminal_output +23885,15086138,"TERMINAL",0,0,"Step 1519, loss: 0.025817040354013443, step time: 17.925024032592773ms\r\n",,terminal_output +23886,15086245,"TERMINAL",0,0,"Step 1520, loss: 0.025098081678152084, step time: 18.922090530395508ms\r\n",,terminal_output +23887,15086355,"TERMINAL",0,0,"Step 1521, loss: 0.024932553991675377, step time: 18.05424690246582ms\r\n",,terminal_output +23888,15086408,"TERMINAL",0,0,"Step 1522, loss: 0.024696925655007362, step time: 17.84658432006836ms\r\n",,terminal_output +23889,15086516,"TERMINAL",0,0,"Step 1523, loss: 0.024355771020054817, step time: 18.22805404663086ms\r\n",,terminal_output +23890,15086580,"TERMINAL",0,0,"Step 1524, loss: 0.02446988970041275, step time: 17.395973205566406ms\r\n",,terminal_output +23891,15086691,"TERMINAL",0,0,"Step 1525, loss: 0.026038112118840218, step time: 18.30124855041504ms\r\n",,terminal_output +23892,15086745,"TERMINAL",0,0,"Step 1526, loss: 0.043797414749860764, step time: 18.555641174316406ms\r\n",,terminal_output +23893,15086856,"TERMINAL",0,0,"Step 1527, loss: 0.35253533720970154, step time: 18.041133880615234ms\r\n",,terminal_output +23894,15086954,"TERMINAL",0,0,"Step 1528, loss: 0.13538570702075958, step time: 18.033504486083984ms\r\n",,terminal_output +23895,15087018,"TERMINAL",0,0,"Step 1529, loss: 0.565110445022583, step time: 19.118309020996094ms\r\n",,terminal_output +23896,15087128,"TERMINAL",0,0,"Step 1530, loss: 0.032798804342746735, step time: 18.032550811767578ms\r\n",,terminal_output +23897,15087181,"TERMINAL",0,0,"Step 1531, loss: 0.4385414123535156, step time: 18.10002326965332ms\r\n",,terminal_output +23898,15087289,"TERMINAL",0,0,"Step 1532, loss: 0.3252089321613312, step time: 18.492460250854492ms\r\n",,terminal_output +23899,15087354,"TERMINAL",0,0,"Step 1533, loss: 0.06632387638092041, step time: 18.003225326538086ms\r\n",,terminal_output +23900,15087466,"TERMINAL",0,0,"Step 1534, loss: 0.11384356021881104, step time: 18.00680160522461ms\r\n",,terminal_output +23901,15087523,"TERMINAL",0,0,"Step 1535, loss: 0.3299507796764374, step time: 18.615007400512695ms\r\n",,terminal_output +23902,15087628,"TERMINAL",0,0,"Step 1536, loss: 0.1126427948474884, step time: 18.724441528320312ms\r\n",,terminal_output +23903,15087739,"TERMINAL",0,0,"Step 1537, loss: 0.055542878806591034, step time: 18.488168716430664ms\r\n",,terminal_output +23904,15087800,"TERMINAL",0,0,"Step 1538, loss: 0.118747778236866, step time: 18.781661987304688ms\r\n",,terminal_output +23905,15087863,"TERMINAL",0,0,"Step 1539, loss: 0.1972481608390808, step time: 18.265724182128906ms\r\n",,terminal_output +23906,15087975,"TERMINAL",0,0,"Step 1540, loss: 0.11139331758022308, step time: 18.24641227722168ms\r\n",,terminal_output +23907,15088035,"TERMINAL",0,0,"Step 1541, loss: 0.04900963231921196, step time: 18.522024154663086ms\r\n",,terminal_output +23908,15088139,"TERMINAL",0,0,"Step 1542, loss: 0.059853579849004745, step time: 17.913103103637695ms\r\n",,terminal_output +23909,15088205,"TERMINAL",0,0,"Step 1543, loss: 0.11316397786140442, step time: 18.148183822631836ms\r\n",,terminal_output +23910,15088313,"TERMINAL",0,0,"Step 1544, loss: 0.10843260586261749, step time: 18.556833267211914ms\r\n",,terminal_output +23911,15088408,"TERMINAL",0,0,"Step 1545, loss: 0.0541277751326561, step time: 18.024444580078125ms\r\n",,terminal_output +23912,15088469,"TERMINAL",0,0,"Step 1546, loss: 0.04375380650162697, step time: 18.06807518005371ms\r\n",,terminal_output +23913,15088588,"TERMINAL",0,0,"Step 1547, loss: 0.06177075579762459, step time: 18.533706665039062ms\r\n",,terminal_output +23914,15088653,"TERMINAL",0,0,"Step 1548, loss: 0.08182308077812195, step time: 18.091201782226562ms\r\n",,terminal_output +23915,15088761,"TERMINAL",0,0,"Step 1549, loss: 0.06527310609817505, step time: 17.986536026000977ms\r\n",,terminal_output +23916,15088820,"TERMINAL",0,0,"Step 1550, loss: 0.045874714851379395, step time: 19.056081771850586ms\r\n",,terminal_output +23917,15088932,"TERMINAL",0,0,"Step 1551, loss: 0.04118095710873604, step time: 18.3718204498291ms\r\n",,terminal_output +23918,15088983,"TERMINAL",0,0,"Step 1552, loss: 0.04760662093758583, step time: 18.51654052734375ms\r\n",,terminal_output +23919,15089088,"TERMINAL",0,0,"Step 1553, loss: 0.05611424148082733, step time: 18.880605697631836ms\r\n",,terminal_output +23920,15089150,"TERMINAL",0,0,"Step 1554, loss: 0.04912335053086281, step time: 17.87257194519043ms\r\n",,terminal_output +23921,15089258,"TERMINAL",0,0,"Step 1555, loss: 0.04050741344690323, step time: 18.100500106811523ms\r\n",,terminal_output +23922,15089375,"TERMINAL",0,0,"Step 1556, loss: 0.03801761567592621, step time: 18.5549259185791ms\r\n",,terminal_output +23923,15089426,"TERMINAL",0,0,"Step 1557, loss: 0.039526838809251785, step time: 18.20683479309082ms\r\n",,terminal_output +23924,15089531,"TERMINAL",0,0,"Step 1558, loss: 0.04169146344065666, step time: 18.136262893676758ms\r\n",,terminal_output +23925,15089594,"TERMINAL",0,0,"Step 1559, loss: 0.04018407687544823, step time: 18.59116554260254ms\r\n",,terminal_output +23926,15089702,"TERMINAL",0,0,"Step 1560, loss: 0.03617161139845848, step time: 17.864704132080078ms\r\n",,terminal_output +23927,15089753,"TERMINAL",0,0,"Step 1561, loss: 0.03363899886608124, step time: 18.13983917236328ms\r\n",,terminal_output +23928,15089859,"TERMINAL",0,0,"Step 1562, loss: 0.033624958246946335, step time: 18.51677894592285ms\r\n",,terminal_output +23929,15089964,"TERMINAL",0,0,"Step 1563, loss: 0.03441353514790535, step time: 18.085956573486328ms\r\n",,terminal_output +23930,15090024,"TERMINAL",0,0,"Step 1564, loss: 0.0341588594019413, step time: 18.074512481689453ms\r\n",,terminal_output +23931,15090132,"TERMINAL",0,0,"Step 1565, loss: 0.03195347264409065, step time: 18.819093704223633ms\r\n",,terminal_output +23932,15090183,"TERMINAL",0,0,"Step 1566, loss: 0.030257636681199074, step time: 18.132448196411133ms\r\n",,terminal_output +23933,15090288,"TERMINAL",0,0,"Step 1567, loss: 0.02992338500916958, step time: 18.106698989868164ms\r\n",,terminal_output +23934,15090395,"TERMINAL",0,0,"Step 1568, loss: 0.030035091564059258, step time: 18.605709075927734ms\r\n",,terminal_output +23935,15090446,"TERMINAL",0,0,"Step 1569, loss: 0.029489845037460327, step time: 17.99917221069336ms\r\n",,terminal_output +23936,15090553,"TERMINAL",0,0,"Step 1570, loss: 0.028045915067195892, step time: 18.01133155822754ms\r\n",,terminal_output +23937,15090614,"TERMINAL",0,0,"Step 1571, loss: 0.027180882170796394, step time: 18.691301345825195ms\r\n",,terminal_output +23938,15090720,"TERMINAL",0,0,"Step 1572, loss: 0.027299940586090088, step time: 17.754077911376953ms\r\n",,terminal_output +23939,15090780,"TERMINAL",0,0,"Step 1573, loss: 0.02726517990231514, step time: 18.36991310119629ms\r\n",,terminal_output +23940,15090866,"TERMINAL",0,0,"Step 1574, loss: 0.026212651282548904, step time: 19.696474075317383ms\r\n",,terminal_output +23941,15090978,"TERMINAL",0,0,"Step 1575, loss: 0.025185946375131607, step time: 18.6004638671875ms\r\n",,terminal_output +23942,15091040,"TERMINAL",0,0,"Step 1576, loss: 0.02500639297068119, step time: 18.418073654174805ms\r\n",,terminal_output +23943,15091146,"TERMINAL",0,0,"Step 1577, loss: 0.02504677325487137, step time: 18.72873306274414ms\r\n",,terminal_output +23944,15091256,"TERMINAL",0,0,"Step 1578, loss: 0.024537155404686928, step time: 17.934083938598633ms\r\n",,terminal_output +23945,15091307,"TERMINAL",0,0,"Step 1579, loss: 0.02366030029952526, step time: 18.072843551635742ms\r\n",,terminal_output +23946,15091413,"TERMINAL",0,0,"Step 1580, loss: 0.023501897230744362, step time: 18.69511604309082ms\r\n",,terminal_output +23947,15091473,"TERMINAL",0,0,"Step 1581, loss: 0.023182332515716553, step time: 18.00537109375ms\r\n",,terminal_output +23948,15091582,"TERMINAL",0,0,"Step 1582, loss: 0.02266724407672882, step time: 18.279552459716797ms\r\n",,terminal_output +23949,15091643,"TERMINAL",0,0,"Step 1583, loss: 0.02240922674536705, step time: 18.766164779663086ms\r\n",,terminal_output +23950,15091751,"TERMINAL",0,0,"Step 1584, loss: 0.02219485118985176, step time: 17.93360710144043ms\r\n",,terminal_output +23951,15091827,"TERMINAL",0,0,"Step 1585, loss: 0.02164992317557335, step time: 17.956018447875977ms\r\n",,terminal_output +23952,15091932,"TERMINAL",0,0,"Step 1586, loss: 0.021289056167006493, step time: 18.484115600585938ms\r\n",,terminal_output +23953,15091994,"TERMINAL",0,0,"Step 1587, loss: 0.021258670836687088, step time: 18.024206161499023ms\r\n",,terminal_output +23954,15092103,"TERMINAL",0,0,"Step 1588, loss: 0.02073274366557598, step time: 18.20230484008789ms\r\n",,terminal_output +23955,15092161,"TERMINAL",0,0,"Step 1589, loss: 0.020237745717167854, step time: 18.543004989624023ms\r\n",,terminal_output +23956,15092268,"TERMINAL",0,0,"Step 1590, loss: 0.020226264372467995, step time: 17.838001251220703ms\r\n",,terminal_output +23957,15092331,"TERMINAL",0,0,"Step 1591, loss: 0.019943783059716225, step time: 18.358945846557617ms\r\n",,terminal_output +23958,15092439,"TERMINAL",0,0,"Step 1592, loss: 0.019388047978281975, step time: 19.124984741210938ms\r\n",,terminal_output +23959,15092546,"TERMINAL",0,0,"Step 1593, loss: 0.01930316723883152, step time: 18.28289031982422ms\r\n",,terminal_output +23960,15092597,"TERMINAL",0,0,"Step 1594, loss: 0.01906093955039978, step time: 18.062829971313477ms\r\n",,terminal_output +23961,15092717,"TERMINAL",0,0,"Step 1595, loss: 0.01885133795440197, step time: 18.52726936340332ms\r\n",,terminal_output +23962,15092783,"TERMINAL",0,0,"Step 1596, loss: 0.019207535311579704, step time: 17.833709716796875ms\r\n",,terminal_output +23963,15092859,"TERMINAL",0,0,"Step 1597, loss: 0.020687434822320938, step time: 18.073320388793945ms\r\n",,terminal_output +23964,15092965,"TERMINAL",0,0,"Step 1598, loss: 0.033164218068122864, step time: 18.666744232177734ms\r\n",,terminal_output +23965,15093019,"TERMINAL",0,0,"Step 1599, loss: 0.26206374168395996, step time: 18.147945404052734ms\r\n",,terminal_output +23966,15093112,"TERMINAL",0,0,"Step 1600, loss: 0.16820649802684784, step time: 18.07093620300293ms\r\n",,terminal_output +23967,15093222,"TERMINAL",0,0,"Step 1601, loss: 0.403946191072464, step time: 18.833160400390625ms\r\n",,terminal_output +23968,15093277,"TERMINAL",0,0,"Step 1602, loss: 0.027725163847208023, step time: 17.720460891723633ms\r\n",,terminal_output +23969,15093382,"TERMINAL",0,0,"Step 1603, loss: 0.4756892919540405, step time: 17.98701286315918ms\r\n",,terminal_output +23970,15093488,"TERMINAL",0,0,"Step 1604, loss: 0.18999719619750977, step time: 18.48769187927246ms\r\n",,terminal_output +23971,15093541,"TERMINAL",0,0,"Step 1605, loss: 0.04723510891199112, step time: 18.355607986450195ms\r\n",,terminal_output +23972,15093647,"TERMINAL",0,0,"Step 1606, loss: 0.18318496644496918, step time: 18.14103126525879ms\r\n",,terminal_output +23973,15093738,"TERMINAL",0,0,"Step 1607, loss: 0.2538606524467468, step time: 18.564462661743164ms\r\n",,terminal_output +23974,15093879,"TERMINAL",0,0,"Step 1608, loss: 0.0654568001627922, step time: 17.832279205322266ms\r\nStep 1609, loss: 0.04266839846968651, step time: 17.994403839111328ms\r\n",,terminal_output +23975,15093985,"TERMINAL",0,0,"Step 1610, loss: 0.13598239421844482, step time: 23.44202995300293ms\r\n",,terminal_output +23976,15094092,"TERMINAL",0,0,"Step 1611, loss: 0.1495572179555893, step time: 19.49930191040039ms\r\n",,terminal_output +23977,15094145,"TERMINAL",0,0,"Step 1612, loss: 0.06452208012342453, step time: 18.602848052978516ms\r\n",,terminal_output +23978,15094252,"TERMINAL",0,0,"Step 1613, loss: 0.04173624888062477, step time: 18.825292587280273ms\r\n",,terminal_output +23979,15094316,"TERMINAL",0,0,"Step 1614, loss: 0.06062713637948036, step time: 17.9901123046875ms\r\n",,terminal_output +23980,15094426,"TERMINAL",0,0,"Step 1615, loss: 0.09796153753995895, step time: 18.18990707397461ms\r\n",,terminal_output +23981,15094486,"TERMINAL",0,0,"Step 1616, loss: 0.07821110635995865, step time: 18.71657371520996ms\r\n",,terminal_output +23982,15094579,"TERMINAL",0,0,"Step 1617, loss: 0.043387386947870255, step time: 18.201828002929688ms\r\n",,terminal_output +23983,15094686,"TERMINAL",0,0,"Step 1618, loss: 0.0366511307656765, step time: 18.2647705078125ms\r\n",,terminal_output +23984,15094744,"TERMINAL",0,0,"Step 1619, loss: 0.04594956710934639, step time: 18.702030181884766ms\r\n",,terminal_output +23985,15094855,"TERMINAL",0,0,"Step 1620, loss: 0.058935899287462234, step time: 18.078327178955078ms\r\n",,terminal_output +23986,15094954,"TERMINAL",0,0,"Step 1621, loss: 0.05712592974305153, step time: 18.306255340576172ms\r\n",,terminal_output +23987,15095016,"TERMINAL",0,0,"Step 1622, loss: 0.040159426629543304, step time: 18.738746643066406ms\r\n",,terminal_output +23988,15095128,"TERMINAL",0,0,"Step 1623, loss: 0.030915653333067894, step time: 18.26000213623047ms\r\n",,terminal_output +23989,15095182,"TERMINAL",0,0,"Step 1624, loss: 0.03246450424194336, step time: 18.192052841186523ms\r\n",,terminal_output +23990,15095291,"TERMINAL",0,0,"Step 1625, loss: 0.04000123217701912, step time: 18.65243911743164ms\r\n",,terminal_output +23991,15095354,"TERMINAL",0,0,"Step 1626, loss: 0.04167221486568451, step time: 18.00060272216797ms\r\n",,terminal_output +23992,15095464,"TERMINAL",0,0,"Step 1627, loss: 0.03634927421808243, step time: 18.302202224731445ms\r\n",,terminal_output +23993,15095521,"TERMINAL",0,0,"Step 1628, loss: 0.030467845499515533, step time: 18.6612606048584ms\r\n",,terminal_output +23994,15095630,"TERMINAL",0,0,"Step 1629, loss: 0.027615681290626526, step time: 18.19896697998047ms\r\n",,terminal_output +23995,15095695,"TERMINAL",0,0,"Step 1630, loss: 0.02975264936685562, step time: 17.90761947631836ms\r\n",,terminal_output +23996,15095817,"TERMINAL",0,0,"Step 1631, loss: 0.03239695727825165, step time: 18.156051635742188ms\r\n",,terminal_output +23997,15096180,"TERMINAL",0,0,"Step 1632, loss: 0.03018742986023426, step time: 307.02972412109375ms\r\nStep 1633, loss: 0.027284516021609306, step time: 25.272369384765625ms\r\n",,terminal_output +23998,15096275,"TERMINAL",0,0,"Step 1634, loss: 0.025551320984959602, step time: 20.251989364624023ms\r\n",,terminal_output +23999,15096385,"TERMINAL",0,0,"Step 1635, loss: 0.02485487423837185, step time: 18.938302993774414ms\r\n",,terminal_output +24000,15096439,"TERMINAL",0,0,"Step 1636, loss: 0.025725850835442543, step time: 18.349409103393555ms\r\n",,terminal_output +24001,15096534,"TERMINAL",0,0,"Step 1637, loss: 0.025911517441272736, step time: 18.25261116027832ms\r\n",,terminal_output +24002,15096643,"TERMINAL",0,0,"Step 1638, loss: 0.024589255452156067, step time: 17.8375244140625ms\r\n",,terminal_output +24003,15096696,"TERMINAL",0,0,"Step 1639, loss: 0.023258358240127563, step time: 18.013715744018555ms\r\n",,terminal_output +24004,15096817,"TERMINAL",0,0,"Step 1640, loss: 0.022195199504494667, step time: 17.70806312561035ms\r\n",,terminal_output +24005,15096869,"TERMINAL",0,0,"Step 1641, loss: 0.022077513858675957, step time: 17.98725128173828ms\r\n",,terminal_output +24006,15096977,"TERMINAL",0,0,"Step 1642, loss: 0.022414810955524445, step time: 17.90595054626465ms\r\n",,terminal_output +24007,15097042,"TERMINAL",0,0,"Step 1643, loss: 0.02197994664311409, step time: 17.97199249267578ms\r\n",,terminal_output +24008,15097152,"TERMINAL",0,0,"Step 1644, loss: 0.021146269515156746, step time: 18.005847930908203ms\r\n",,terminal_output +24009,15097214,"TERMINAL",0,0,"Step 1645, loss: 0.020481742918491364, step time: 17.932653427124023ms\r\n",,terminal_output +24010,15097332,"TERMINAL",0,0,"Step 1646, loss: 0.0200617928057909, step time: 18.007278442382812ms\r\n",,terminal_output +24011,15097386,"TERMINAL",0,0,"Step 1647, loss: 0.019972357898950577, step time: 18.219947814941406ms\r\n",,terminal_output +24012,15097493,"TERMINAL",0,0,"Step 1648, loss: 0.01971748284995556, step time: 17.700910568237305ms\r\n",,terminal_output +24013,15097557,"TERMINAL",0,0,"Step 1649, loss: 0.019398842006921768, step time: 17.674684524536133ms\r\n",,terminal_output +24014,15097667,"TERMINAL",0,0,"Step 1650, loss: 0.018947415053844452, step time: 18.35036277770996ms\r\n",,terminal_output +24015,15097726,"TERMINAL",0,0,"Step 1651, loss: 0.01840115897357464, step time: 18.38970184326172ms\r\n",,terminal_output +24016,15097838,"TERMINAL",0,0,"Step 1652, loss: 0.01820400357246399, step time: 18.646240234375ms\r\n",,terminal_output +24017,15097896,"TERMINAL",0,0,"Step 1653, loss: 0.018107274547219276, step time: 18.812179565429688ms\r\n",,terminal_output +24018,15098006,"TERMINAL",0,0,"Step 1654, loss: 0.017893077805638313, step time: 18.247604370117188ms\r\n",,terminal_output +24019,15098090,"TERMINAL",0,0,"Step 1655, loss: 0.017341889441013336, step time: 18.232345581054688ms\r\n",,terminal_output +24020,15098156,"TERMINAL",0,0,"Step 1656, loss: 0.017041325569152832, step time: 18.280506134033203ms\r\n",,terminal_output +24021,15098268,"TERMINAL",0,0,"Step 1657, loss: 0.01690196618437767, step time: 18.269062042236328ms\r\n",,terminal_output +24022,15098328,"TERMINAL",0,0,"Step 1658, loss: 0.016694070771336555, step time: 18.303632736206055ms\r\n",,terminal_output +24023,15098437,"TERMINAL",0,0,"Step 1659, loss: 0.016352707520127296, step time: 18.548965454101562ms\r\n",,terminal_output +24024,15098502,"TERMINAL",0,0,"Step 1660, loss: 0.016076501458883286, step time: 18.275022506713867ms\r\n",,terminal_output +24025,15098619,"TERMINAL",0,0,"Step 1661, loss: 0.015894420444965363, step time: 18.30124855041504ms\r\n",,terminal_output +24026,15098671,"TERMINAL",0,0,"Step 1662, loss: 0.015714038163423538, step time: 18.290281295776367ms\r\n",,terminal_output +24027,15098780,"TERMINAL",0,0,"Step 1663, loss: 0.01539532095193863, step time: 18.218278884887695ms\r\n",,terminal_output +24028,15098846,"TERMINAL",0,0,"Step 1664, loss: 0.015161196701228619, step time: 18.198251724243164ms\r\n",,terminal_output +24029,15098953,"TERMINAL",0,0,"Step 1665, loss: 0.015014431439340115, step time: 18.284082412719727ms\r\n",,terminal_output +24030,15099018,"TERMINAL",0,0,"Step 1666, loss: 0.014765716157853603, step time: 18.29218864440918ms\r\n",,terminal_output +24031,15099128,"TERMINAL",0,0,"Step 1667, loss: 0.014514312148094177, step time: 27.062416076660156ms\r\n",,terminal_output +24032,15099237,"TERMINAL",0,0,"Step 1668, loss: 0.014309102669358253, step time: 26.294708251953125ms\r\n",,terminal_output +24033,15099291,"TERMINAL",0,0,"Step 1669, loss: 0.014102133922278881, step time: 27.02951431274414ms\r\n",,terminal_output +24034,15099399,"TERMINAL",0,0,"Step 1670, loss: 0.013910508714616299, step time: 25.521516799926758ms\r\n",,terminal_output +24035,15099466,"TERMINAL",0,0,"Step 1671, loss: 0.013732127845287323, step time: 22.298812866210938ms\r\n",,terminal_output +24036,15099574,"TERMINAL",0,0,"Step 1672, loss: 0.013622577302157879, step time: 20.23148536682129ms\r\n",,terminal_output +24037,15099639,"TERMINAL",0,0,"Step 1673, loss: 0.01369792316108942, step time: 19.033193588256836ms\r\n",,terminal_output +24038,15099753,"TERMINAL",0,0,"Step 1674, loss: 0.015038380399346352, step time: 18.861770629882812ms\r\n",,terminal_output +24039,15099820,"TERMINAL",0,0,"Step 1675, loss: 0.033209703862667084, step time: 18.71347427368164ms\r\n",,terminal_output +24040,15099931,"TERMINAL",0,0,"Step 1676, loss: 0.3920627534389496, step time: 18.511533737182617ms\r\n",,terminal_output +24041,15099994,"TERMINAL",0,0,"Step 1677, loss: 0.0796971544623375, step time: 18.9816951751709ms\r\n",,terminal_output +24042,15100104,"TERMINAL",0,0,"Step 1678, loss: 0.8504263162612915, step time: 18.536090850830078ms\r\n",,terminal_output +24043,15100157,"TERMINAL",0,0,"Step 1679, loss: 0.08429714292287827, step time: 18.613576889038086ms\r\n",,terminal_output +24044,15100272,"TERMINAL",0,0,"Step 1680, loss: 0.14289164543151855, step time: 18.39470863342285ms\r\n",,terminal_output +24045,15100335,"TERMINAL",0,0,"Step 1681, loss: 0.5826229453086853, step time: 18.395185470581055ms\r\n",,terminal_output +24046,15100444,"TERMINAL",0,0,"Step 1682, loss: 0.1537940800189972, step time: 18.4023380279541ms\r\n",,terminal_output +24047,15100501,"TERMINAL",0,0,"Step 1683, loss: 0.03380584716796875, step time: 18.896102905273438ms\r\n",,terminal_output +24048,15100612,"TERMINAL",0,0,"Step 1684, loss: 0.17237474024295807, step time: 18.70250701904297ms\r\n",,terminal_output +24049,15100708,"TERMINAL",0,0,"Step 1685, loss: 0.308352530002594, step time: 18.57900619506836ms\r\n",,terminal_output +24050,15100761,"TERMINAL",0,0,"Step 1686, loss: 0.12618158757686615, step time: 18.291950225830078ms\r\n",,terminal_output +24051,15100867,"TERMINAL",0,0,"Step 1687, loss: 0.04157061502337456, step time: 18.6617374420166ms\r\n",,terminal_output +24052,15100968,"TERMINAL",0,0,"Step 1688, loss: 0.056620121002197266, step time: 18.3866024017334ms\r\n",,terminal_output +24053,15101030,"TERMINAL",0,0,"Step 1689, loss: 0.1419677585363388, step time: 18.68891716003418ms\r\n",,terminal_output +24054,15101141,"TERMINAL",0,0,"Step 1690, loss: 0.15637214481830597, step time: 17.691373825073242ms\r\n",,terminal_output +24055,15101195,"TERMINAL",0,0,"Step 1691, loss: 0.06563469022512436, step time: 17.73357391357422ms\r\n",,terminal_output +24056,15101306,"TERMINAL",0,0,"Step 1692, loss: 0.03292582929134369, step time: 17.856597900390625ms\r\n",,terminal_output +24057,15101369,"TERMINAL",0,0,"Step 1693, loss: 0.04304099455475807, step time: 17.822265625ms\r\n",,terminal_output +24058,15101478,"TERMINAL",0,0,"Step 1694, loss: 0.07798536121845245, step time: 17.72928237915039ms\r\n",,terminal_output +24059,15101533,"TERMINAL",0,0,"Step 1695, loss: 0.09199345111846924, step time: 17.966270446777344ms\r\n",,terminal_output +24060,15101628,"TERMINAL",0,0,"Step 1696, loss: 0.06024273857474327, step time: 17.748355865478516ms\r\n",,terminal_output +24061,15101738,"TERMINAL",0,0,"Step 1697, loss: 0.03723934292793274, step time: 17.691373825073242ms\r\n",,terminal_output +24062,15101791,"TERMINAL",0,0,"Step 1698, loss: 0.03195783868432045, step time: 17.714977264404297ms\r\n",,terminal_output +24063,15101897,"TERMINAL",0,0,"Step 1699, loss: 0.037950124591588974, step time: 17.814159393310547ms\r\n",,terminal_output +24064,15101998,"TERMINAL",0,0,"Step 1700, loss: 0.04952678829431534, step time: 17.73834228515625ms\r\n",,terminal_output +24065,15102060,"TERMINAL",0,0,"Step 1701, loss: 0.04946368932723999, step time: 18.033742904663086ms\r\n",,terminal_output +24066,15102171,"TERMINAL",0,0,"Step 1702, loss: 0.04049759730696678, step time: 17.70615577697754ms\r\n",,terminal_output +24067,15102237,"TERMINAL",0,0,"Step 1703, loss: 0.03239456191658974, step time: 17.757892608642578ms\r\n",,terminal_output +24068,15102358,"TERMINAL",0,0,"Step 1704, loss: 0.027480872347950935, step time: 17.983198165893555ms\r\n",,terminal_output +24069,15102408,"TERMINAL",0,0,"Step 1705, loss: 0.027508873492479324, step time: 17.798423767089844ms\r\n",,terminal_output +24070,15102516,"TERMINAL",0,0,"Step 1706, loss: 0.030244437977671623, step time: 17.796754837036133ms\r\n",,terminal_output +24071,15102576,"TERMINAL",0,0,"Step 1707, loss: 0.03304271772503853, step time: 17.990589141845703ms\r\n",,terminal_output +24072,15102693,"TERMINAL",0,0,"Step 1708, loss: 0.03129640221595764, step time: 17.68183708190918ms\r\n",,terminal_output +24073,15102745,"TERMINAL",0,0,"Step 1709, loss: 0.026870612055063248, step time: 17.720460891723633ms\r\n",,terminal_output +24074,15102853,"TERMINAL",0,0,"Step 1710, loss: 0.023511795327067375, step time: 17.800569534301758ms\r\n",,terminal_output +24075,15102906,"TERMINAL",0,0,"Step 1711, loss: 0.021595057100057602, step time: 17.742633819580078ms\r\n",,terminal_output +24076,15103028,"TERMINAL",0,0,"Step 1712, loss: 0.02184961922466755, step time: 17.793655395507812ms\r\n",,terminal_output +24077,15103079,"TERMINAL",0,0,"Step 1713, loss: 0.023748116567730904, step time: 18.001794815063477ms\r\n",,terminal_output +24078,15103187,"TERMINAL",0,0,"Step 1714, loss: 0.024911461398005486, step time: 19.58608627319336ms\r\n",,terminal_output +24079,15103295,"TERMINAL",0,0,"Step 1715, loss: 0.0234526414424181, step time: 17.868757247924805ms\r\n",,terminal_output +24080,15103358,"TERMINAL",0,0,"Step 1716, loss: 0.020393438637256622, step time: 17.747163772583008ms\r\n",,terminal_output +24081,15103433,"TERMINAL",0,0,"Step 1717, loss: 0.01857549138367176, step time: 17.859220504760742ms\r\n",,terminal_output +24082,15103539,"TERMINAL",0,0,"Step 1718, loss: 0.018309202045202255, step time: 17.85731315612793ms\r\n",,terminal_output +24083,15103636,"TERMINAL",0,0,"Step 1719, loss: 0.01911989040672779, step time: 19.255399703979492ms\r\n",,terminal_output +24084,15103734,"TERMINAL",0,0,"Step 1720, loss: 0.019976485520601273, step time: 18.195629119873047ms\r\n",,terminal_output +24085,15103798,"TERMINAL",0,0,"Step 1721, loss: 0.019393635913729668, step time: 17.97652244567871ms\r\n",,terminal_output +24086,15103907,"TERMINAL",0,0,"Step 1722, loss: 0.017830723896622658, step time: 18.590450286865234ms\r\n",,terminal_output +24087,15103959,"TERMINAL",0,0,"Step 1723, loss: 0.01655397191643715, step time: 21.954059600830078ms\r\n",,terminal_output +24088,15104069,"TERMINAL",0,0,"Step 1724, loss: 0.01620413362979889, step time: 19.83356475830078ms\r\n",,terminal_output +24089,15104133,"TERMINAL",0,0,"Step 1725, loss: 0.01654386706650257, step time: 19.321441650390625ms\r\n",,terminal_output +24090,15104239,"TERMINAL",0,0,"Step 1726, loss: 0.016855040565133095, step time: 18.694639205932617ms\r\n",,terminal_output +24091,15104297,"TERMINAL",0,0,"Step 1727, loss: 0.016457311809062958, step time: 18.49985122680664ms\r\n",,terminal_output +24092,15104406,"TERMINAL",0,0,"Step 1728, loss: 0.015583145432174206, step time: 18.430471420288086ms\r\n",,terminal_output +24093,15104487,"TERMINAL",0,0,"Step 1729, loss: 0.01492459699511528, step time: 18.584489822387695ms\r\n",,terminal_output +24094,15104600,"TERMINAL",0,0,"Step 1730, loss: 0.014742996543645859, step time: 18.706083297729492ms\r\n",,terminal_output +24095,15104651,"TERMINAL",0,0,"Step 1731, loss: 0.014793464913964272, step time: 18.727779388427734ms\r\n",,terminal_output +24096,15104779,"TERMINAL",0,0,"Step 1732, loss: 0.014760098420083523, step time: 18.35465431213379ms\r\n",,terminal_output +24097,15104837,"TERMINAL",0,0,"Step 1733, loss: 0.014258047565817833, step time: 18.372774124145508ms\r\n",,terminal_output +24098,15104898,"TERMINAL",0,0,"Step 1734, loss: 0.013840684667229652, step time: 18.333911895751953ms\r\n",,terminal_output +24099,15105005,"TERMINAL",0,0,"Step 1735, loss: 0.013591110706329346, step time: 18.216371536254883ms\r\n",,terminal_output +24100,15105070,"TERMINAL",0,0,"Step 1736, loss: 0.013500957749783993, step time: 18.259286880493164ms\r\n",,terminal_output +24101,15105178,"TERMINAL",0,0,"Step 1737, loss: 0.013335468247532845, step time: 18.659114837646484ms\r\n",,terminal_output +24102,15105245,"TERMINAL",0,0,"Step 1738, loss: 0.0129788713529706, step time: 18.55325698852539ms\r\n",,terminal_output +24103,15105352,"TERMINAL",0,0,"Step 1739, loss: 0.012691217474639416, step time: 18.592119216918945ms\r\n",,terminal_output +24104,15105417,"TERMINAL",0,0,"Step 1740, loss: 0.012536217458546162, step time: 18.61882209777832ms\r\n",,terminal_output +24105,15105527,"TERMINAL",0,0,"Step 1741, loss: 0.012443620711565018, step time: 18.512725830078125ms\r\n",,terminal_output +24106,15105590,"TERMINAL",0,0,"Step 1742, loss: 0.012212052009999752, step time: 18.705368041992188ms\r\n",,terminal_output +24107,15105697,"TERMINAL",0,0,"Step 1743, loss: 0.01189944613724947, step time: 18.915414810180664ms\r\n",,terminal_output +24108,15105762,"TERMINAL",0,0,"Step 1744, loss: 0.011749664321541786, step time: 18.556594848632812ms\r\n",,terminal_output +24109,15105848,"TERMINAL",0,0,"Step 1745, loss: 0.011629822663962841, step time: 18.451213836669922ms\r\n",,terminal_output +24110,15105954,"TERMINAL",0,0,"Step 1746, loss: 0.011400031857192516, step time: 18.653154373168945ms\r\n",,terminal_output +24111,15106019,"TERMINAL",0,0,"Step 1747, loss: 0.011177708394825459, step time: 18.744468688964844ms\r\n",,terminal_output +24112,15106128,"TERMINAL",0,0,"Step 1748, loss: 0.011067031882703304, step time: 18.55611801147461ms\r\n",,terminal_output +24113,15106200,"TERMINAL",0,0,"Step 1749, loss: 0.010942934080958366, step time: 18.62812042236328ms\r\n",,terminal_output +24114,15106276,"TERMINAL",0,0,"Step 1750, loss: 0.010787326842546463, step time: 18.238544464111328ms\r\n",,terminal_output +24115,15106382,"TERMINAL",0,0,"Step 1751, loss: 0.01084111351519823, step time: 18.549680709838867ms\r\n",,terminal_output +24116,15106537,"TERMINAL",0,0,"Step 1752, loss: 0.011842391453683376, step time: 18.29838752746582ms\r\nStep 1753, loss: 0.021692944690585136, step time: 18.307209014892578ms\r\n",,terminal_output +24117,15106650,"TERMINAL",0,0,"Step 1754, loss: 0.3350283205509186, step time: 18.299341201782227ms\r\n",,terminal_output +24118,15106713,"TERMINAL",0,0,"Step 1755, loss: 0.12995317578315735, step time: 18.926143646240234ms\r\n",,terminal_output +24119,15106835,"TERMINAL",0,0,"Step 1756, loss: 0.6434417963027954, step time: 18.277406692504883ms\r\n",,terminal_output +24120,15106886,"TERMINAL",0,0,"Step 1757, loss: 0.023537611588835716, step time: 18.210887908935547ms\r\n",,terminal_output +24121,15106992,"TERMINAL",0,0,"Step 1758, loss: 0.23069678246974945, step time: 18.599987030029297ms\r\n",,terminal_output +24122,15107054,"TERMINAL",0,0,"Step 1759, loss: 0.3577631711959839, step time: 18.706798553466797ms\r\n",,terminal_output +24123,15107164,"TERMINAL",0,0,"Step 1760, loss: 0.04228637367486954, step time: 18.28169822692871ms\r\n",,terminal_output +24124,15107226,"TERMINAL",0,0,"Step 1761, loss: 0.058158405125141144, step time: 18.8446044921875ms\r\n",,terminal_output +24125,15107333,"TERMINAL",0,0,"Step 1762, loss: 0.2662898302078247, step time: 18.673419952392578ms\r\n",,terminal_output +24126,15107400,"TERMINAL",0,0,"Step 1763, loss: 0.13609649240970612, step time: 18.270015716552734ms\r\n",,terminal_output +24127,15107505,"TERMINAL",0,0,"Step 1764, loss: 0.036723118275403976, step time: 18.477678298950195ms\r\n",,terminal_output +24128,15107612,"TERMINAL",0,0,"Step 1765, loss: 0.05431380495429039, step time: 18.19586753845215ms\r\n",,terminal_output +24129,15107663,"TERMINAL",0,0,"Step 1766, loss: 0.13585352897644043, step time: 18.23139190673828ms\r\n",,terminal_output +24130,15107770,"TERMINAL",0,0,"Step 1767, loss: 0.10395143926143646, step time: 18.471717834472656ms\r\n",,terminal_output +24131,15107831,"TERMINAL",0,0,"Step 1768, loss: 0.0427238792181015, step time: 18.291234970092773ms\r\n",,terminal_output +24132,15107936,"TERMINAL",0,0,"Step 1769, loss: 0.03722327575087547, step time: 18.648624420166016ms\r\n",,terminal_output +24133,15108045,"TERMINAL",0,0,"Step 1770, loss: 0.05711862072348595, step time: 18.325090408325195ms\r\n",,terminal_output +24134,15108106,"TERMINAL",0,0,"Step 1771, loss: 0.07787076383829117, step time: 18.28455924987793ms\r\n",,terminal_output +24135,15108215,"TERMINAL",0,0,"Step 1772, loss: 0.05839100107550621, step time: 18.280029296875ms\r\n",,terminal_output +24136,15108266,"TERMINAL",0,0,"Step 1773, loss: 0.03278336673974991, step time: 18.69940757751465ms\r\n",,terminal_output +24137,15108373,"TERMINAL",0,0,"Step 1774, loss: 0.02849428728222847, step time: 18.16415786743164ms\r\n",,terminal_output +24138,15108432,"TERMINAL",0,0,"Step 1775, loss: 0.03629608079791069, step time: 18.25737953186035ms\r\n",,terminal_output +24139,15108538,"TERMINAL",0,0,"Step 1776, loss: 0.04959265515208244, step time: 18.4328556060791ms\r\n",,terminal_output +24140,15108634,"TERMINAL",0,0,"Step 1777, loss: 0.040088433772325516, step time: 18.335819244384766ms\r\n",,terminal_output +24141,15108693,"TERMINAL",0,0,"Step 1778, loss: 0.026586350053548813, step time: 18.310070037841797ms\r\n",,terminal_output +24142,15108798,"TERMINAL",0,0,"Step 1779, loss: 0.022962383925914764, step time: 19.388675689697266ms\r\n",,terminal_output +24143,15108870,"TERMINAL",0,0,"Step 1780, loss: 0.02563321962952614, step time: 18.60809326171875ms\r\n",,terminal_output +24144,15108982,"TERMINAL",0,0,"Step 1781, loss: 0.030131155624985695, step time: 18.565893173217773ms\r\n",,terminal_output +24145,15109035,"TERMINAL",0,0,"Step 1782, loss: 0.029621437191963196, step time: 18.343448638916016ms\r\n",,terminal_output +24146,15109131,"TERMINAL",0,0,"Step 1783, loss: 0.0255591943860054, step time: 18.59307289123535ms\r\n",,terminal_output +24147,15109241,"TERMINAL",0,0,"Step 1784, loss: 0.02143687568604946, step time: 18.27216148376465ms\r\n",,terminal_output +24148,15109296,"TERMINAL",0,0,"Step 1785, loss: 0.0194232314825058, step time: 18.56374740600586ms\r\n",,terminal_output +24149,15109389,"TERMINAL",0,0,"Step 1786, loss: 0.020454375073313713, step time: 18.377065658569336ms\r\n",,terminal_output +24150,15109498,"TERMINAL",0,0,"Step 1787, loss: 0.022223452106118202, step time: 18.320798873901367ms\r\n",,terminal_output +24151,15109552,"TERMINAL",0,0,"Step 1788, loss: 0.021863535046577454, step time: 18.337011337280273ms\r\n",,terminal_output +24152,15109645,"TERMINAL",0,0,"Step 1789, loss: 0.019291533157229424, step time: 18.355369567871094ms\r\n",,terminal_output +24153,15109755,"TERMINAL",0,0,"Step 1790, loss: 0.016971107572317123, step time: 18.382549285888672ms\r\n",,terminal_output +24154,15109847,"TERMINAL",0,0,"Step 1791, loss: 0.01606997475028038, step time: 18.619298934936523ms\r\n",,terminal_output +24155,15109914,"TERMINAL",0,0,"Step 1792, loss: 0.016589928418397903, step time: 18.59903335571289ms\r\n",,terminal_output +24156,15110021,"TERMINAL",0,0,"Step 1793, loss: 0.017434073612093925, step time: 18.633365631103516ms\r\n",,terminal_output +24157,15110073,"TERMINAL",0,0,"Step 1794, loss: 0.017161797732114792, step time: 18.26190948486328ms\r\n",,terminal_output +24158,15110179,"TERMINAL",0,0,"Step 1795, loss: 0.015790347009897232, step time: 18.124103546142578ms\r\n",,terminal_output +24159,15110285,"TERMINAL",0,0,"Step 1796, loss: 0.014480030164122581, step time: 18.693208694458008ms\r\n",,terminal_output +24160,15110336,"TERMINAL",0,0,"Step 1797, loss: 0.014061691239476204, step time: 18.67055892944336ms\r\n",,terminal_output +24161,15110464,"TERMINAL",0,0,"Step 1798, loss: 0.014147404581308365, step time: 18.262147903442383ms\r\n",,terminal_output +24162,15110526,"TERMINAL",0,0,"Step 1799, loss: 0.01419290341436863, step time: 18.354415893554688ms\r\n",,terminal_output +24163,15110637,"TERMINAL",0,0,"Step 1800, loss: 0.013963252305984497, step time: 18.357038497924805ms\r\n",,terminal_output +24164,15110695,"TERMINAL",0,0,"Step 1801, loss: 0.013593141920864582, step time: 18.443584442138672ms\r\n",,terminal_output +24165,15110803,"TERMINAL",0,0,"Step 1802, loss: 0.012952221557497978, step time: 18.317222595214844ms\r\n",,terminal_output +24166,15110867,"TERMINAL",0,0,"Step 1803, loss: 0.012415807694196701, step time: 18.74566078186035ms\r\n",,terminal_output +24167,15110974,"TERMINAL",0,0,"Step 1804, loss: 0.012247205711901188, step time: 18.424034118652344ms\r\n",,terminal_output +24168,15111049,"TERMINAL",0,0,"Step 1805, loss: 0.01225979533046484, step time: 18.294572830200195ms\r\n",,terminal_output +24169,15111108,"TERMINAL",0,0,"Step 1806, loss: 0.012102022767066956, step time: 18.394947052001953ms\r\n",,terminal_output +24170,15111213,"TERMINAL",0,0,"Step 1807, loss: 0.011782960034906864, step time: 18.26643943786621ms\r\n",,terminal_output +24171,15111318,"TERMINAL",0,0,"Step 1808, loss: 0.011498822830617428, step time: 18.31364631652832ms\r\n",,terminal_output +24172,15111383,"TERMINAL",0,0,"Step 1809, loss: 0.011211722157895565, step time: 18.895387649536133ms\r\n",,terminal_output +24173,15111491,"TERMINAL",0,0,"Step 1810, loss: 0.010993000119924545, step time: 18.411636352539062ms\r\n",,terminal_output +24174,15111550,"TERMINAL",0,0,"Step 1811, loss: 0.010887309908866882, step time: 18.6617374420166ms\r\n",,terminal_output +24175,15111642,"TERMINAL",0,0,"Step 1812, loss: 0.010768611915409565, step time: 18.622398376464844ms\r\n",,terminal_output +24176,15111748,"TERMINAL",0,0,"Step 1813, loss: 0.010523045435547829, step time: 18.740415573120117ms\r\n",,terminal_output +24177,15111800,"TERMINAL",0,0,"Step 1814, loss: 0.010231810621917248, step time: 18.70584487915039ms\r\n",,terminal_output +24178,15111916,"TERMINAL",0,0,"Step 1815, loss: 0.010054354555904865, step time: 18.527746200561523ms\r\n",,terminal_output +24179,15111972,"TERMINAL",0,0,"Step 1816, loss: 0.009956986643373966, step time: 18.644332885742188ms\r\n",,terminal_output +24180,15112079,"TERMINAL",0,0,"Step 1817, loss: 0.009822700172662735, step time: 18.30601692199707ms\r\n",,terminal_output +24181,15112144,"TERMINAL",0,0,"Step 1818, loss: 0.00960437674075365, step time: 18.45574378967285ms\r\n",,terminal_output +24182,15112255,"TERMINAL",0,0,"Step 1819, loss: 0.009407207369804382, step time: 18.60356330871582ms\r\n",,terminal_output +24183,15112320,"TERMINAL",0,0,"Step 1820, loss: 0.009287337772548199, step time: 18.596410751342773ms\r\n",,terminal_output +24184,15112432,"TERMINAL",0,0,"Step 1821, loss: 0.00916688609868288, step time: 19.002437591552734ms\r\n",,terminal_output +24185,15112490,"TERMINAL",0,0,"Step 1822, loss: 0.009025206789374352, step time: 18.362045288085938ms\r\n",,terminal_output +24186,15112598,"TERMINAL",0,0,"Step 1823, loss: 0.008848341181874275, step time: 18.32294464111328ms\r\n",,terminal_output +24187,15112661,"TERMINAL",0,0,"Step 1824, loss: 0.008696814998984337, step time: 18.32747459411621ms\r\n",,terminal_output +24188,15112827,"TERMINAL",0,0,"Step 1825, loss: 0.00859142653644085, step time: 18.286466598510742ms\r\n",,terminal_output +24189,15112921,"TERMINAL",0,0,"Step 1826, loss: 0.008447452448308468, step time: 18.349409103393555ms\r\nStep 1827, loss: 0.008304351940751076, step time: 18.652915954589844ms\r\n",,terminal_output +24190,15113034,"TERMINAL",0,0,"Step 1828, loss: 0.008167033083736897, step time: 18.377304077148438ms\r\n",,terminal_output +24191,15113097,"TERMINAL",0,0,"Step 1829, loss: 0.008043985813856125, step time: 18.32437515258789ms\r\n",,terminal_output +24192,15113433,"TERMINAL",0,0,"Step 1830, loss: 0.00790830422192812, step time: 316.8058395385742ms\r\n",,terminal_output +24193,15113544,"TERMINAL",0,0,"Step 1831, loss: 0.007765936199575663, step time: 26.186466217041016ms\r\n",,terminal_output +24194,15113626,"TERMINAL",0,0,"Step 1832, loss: 0.007644034456461668, step time: 21.370410919189453ms\r\n",,terminal_output +24195,15113679,"TERMINAL",0,0,"Step 1833, loss: 0.0075177475810050964, step time: 19.856929779052734ms\r\n",,terminal_output +24196,15113786,"TERMINAL",0,0,"Step 1834, loss: 0.007395369466394186, step time: 18.78809928894043ms\r\n",,terminal_output +24197,15113863,"TERMINAL",0,0,"Step 1835, loss: 0.007306172978132963, step time: 18.496274948120117ms\r\n",,terminal_output +24198,15113974,"TERMINAL",0,0,"Step 1836, loss: 0.007273209281265736, step time: 19.46282386779785ms\r\n",,terminal_output +24199,15114026,"TERMINAL",0,0,"Step 1837, loss: 0.007414217106997967, step time: 20.316362380981445ms\r\n",,terminal_output +24200,15114132,"TERMINAL",0,0,"Step 1838, loss: 0.008640579879283905, step time: 19.272327423095703ms\r\n",,terminal_output +24201,15114240,"TERMINAL",0,0,"Step 1839, loss: 0.04329339787364006, step time: 19.047260284423828ms\r\n",,terminal_output +24202,15114293,"TERMINAL",0,0,"Step 1840, loss: 0.4106314182281494, step time: 18.607378005981445ms\r\n",,terminal_output +24203,15114401,"TERMINAL",0,0,"Step 1841, loss: 0.01944120042026043, step time: 18.561601638793945ms\r\n",,terminal_output +24204,15114469,"TERMINAL",0,0,"Step 1842, loss: 1.105421543121338, step time: 18.59283447265625ms\r\n",,terminal_output +24205,15114584,"TERMINAL",0,0,"Step 1843, loss: 0.10620404034852982, step time: 18.717527389526367ms\r\n",,terminal_output +24206,15114650,"TERMINAL",0,0,"Step 1844, loss: 0.04618602991104126, step time: 18.44477653503418ms\r\n",,terminal_output +24207,15114758,"TERMINAL",0,0,"Step 1845, loss: 0.69759202003479, step time: 18.74542236328125ms\r\n",,terminal_output +24208,15114821,"TERMINAL",0,0,"Step 1846, loss: 0.30083194375038147, step time: 18.463134765625ms\r\n",,terminal_output +24209,15114934,"TERMINAL",0,0,"Step 1847, loss: 0.023101696744561195, step time: 18.520832061767578ms\r\n",,terminal_output +24210,15114997,"TERMINAL",0,0,"Step 1848, loss: 0.07838849723339081, step time: 18.455982208251953ms\r\n",,terminal_output +24211,15115065,"TERMINAL",0,0,"Step 1849, loss: 0.4008443355560303, step time: 18.6007022857666ms\r\n",,terminal_output +24212,15115174,"TERMINAL",0,0,"Step 1850, loss: 0.22129632532596588, step time: 18.36705207824707ms\r\n",,terminal_output +24213,15115241,"TERMINAL",0,0,"Step 1851, loss: 0.038318831473588943, step time: 19.06895637512207ms\r\n",,terminal_output +24214,15115352,"TERMINAL",0,0,"Step 1852, loss: 0.03502720966935158, step time: 18.309593200683594ms\r\n",,terminal_output +24215,15115412,"TERMINAL",0,0,"Step 1853, loss: 0.12420377135276794, step time: 18.576622009277344ms\r\n",,terminal_output +24216,15115521,"TERMINAL",0,0,"Step 1854, loss: 0.20828191936016083, step time: 18.414735794067383ms\r\n",,terminal_output +24217,15115586,"TERMINAL",0,0,"Step 1855, loss: 0.10685212165117264, step time: 18.46909523010254ms\r\n",,terminal_output +24218,15115695,"TERMINAL",0,0,"Step 1856, loss: 0.03388335555791855, step time: 18.375873565673828ms\r\n",,terminal_output +24219,15115759,"TERMINAL",0,0,"Step 1857, loss: 0.02885878086090088, step time: 18.692493438720703ms\r\n",,terminal_output +24220,15115845,"TERMINAL",0,0,"Step 1858, loss: 0.061013877391815186, step time: 18.32747459411621ms\r\n",,terminal_output +24221,15115954,"TERMINAL",0,0,"Step 1859, loss: 0.094783253967762, step time: 18.404722213745117ms\r\n",,terminal_output +24222,15116016,"TERMINAL",0,0,"Step 1860, loss: 0.0761500746011734, step time: 18.125295639038086ms\r\n",,terminal_output +24223,15116125,"TERMINAL",0,0,"Step 1861, loss: 0.04339010640978813, step time: 18.04494857788086ms\r\n",,terminal_output +24224,15116187,"TERMINAL",0,0,"Step 1862, loss: 0.02964917942881584, step time: 17.951250076293945ms\r\n",,terminal_output +24225,15116284,"TERMINAL",0,0,"Step 1863, loss: 0.029638810083270073, step time: 18.459558486938477ms\r\n",,terminal_output +24226,15116394,"TERMINAL",0,0,"Step 1864, loss: 0.03763479366898537, step time: 17.963409423828125ms\r\n",,terminal_output +24227,15116447,"TERMINAL",0,0,"Step 1865, loss: 0.04452211782336235, step time: 17.714262008666992ms\r\n",,terminal_output +24228,15116555,"TERMINAL",0,0,"Step 1866, loss: 0.04193045571446419, step time: 17.755508422851562ms\r\n",,terminal_output +24229,15116619,"TERMINAL",0,0,"Step 1867, loss: 0.03313561901450157, step time: 17.64702796936035ms\r\n",,terminal_output +24230,15116728,"TERMINAL",0,0,"Step 1868, loss: 0.025394191965460777, step time: 17.922401428222656ms\r\n",,terminal_output +24231,15116785,"TERMINAL",0,0,"Step 1869, loss: 0.021920932456851006, step time: 18.34702491760254ms\r\n",,terminal_output +24232,15116872,"TERMINAL",0,0,"Step 1870, loss: 0.0213277917355299, step time: 18.16701889038086ms\r\n",,terminal_output +24233,15116980,"TERMINAL",0,0,"Step 1871, loss: 0.023295903578400612, step time: 17.784595489501953ms\r\n",,terminal_output +24234,15117046,"TERMINAL",0,0,"Step 1872, loss: 0.025888249278068542, step time: 17.902851104736328ms\r\n",,terminal_output +24235,15117162,"TERMINAL",0,0,"Step 1873, loss: 0.025510422885417938, step time: 18.087148666381836ms\r\n",,terminal_output +24236,15117215,"TERMINAL",0,0,"Step 1874, loss: 0.021674253046512604, step time: 17.788171768188477ms\r\n",,terminal_output +24237,15117324,"TERMINAL",0,0,"Step 1875, loss: 0.017813019454479218, step time: 18.051624298095703ms\r\n",,terminal_output +24238,15117389,"TERMINAL",0,0,"Step 1876, loss: 0.01572628878057003, step time: 17.725229263305664ms\r\n",,terminal_output +24239,15117503,"TERMINAL",0,0,"Step 1877, loss: 0.015447952784597874, step time: 17.847299575805664ms\r\n",,terminal_output +24240,15117559,"TERMINAL",0,0,"Step 1878, loss: 0.016200287267565727, step time: 17.88926124572754ms\r\n",,terminal_output +24241,15117665,"TERMINAL",0,0,"Step 1879, loss: 0.017028650268912315, step time: 17.766714096069336ms\r\n",,terminal_output +24242,15117729,"TERMINAL",0,0,"Step 1880, loss: 0.016958313062787056, step time: 18.004894256591797ms\r\n",,terminal_output +24243,15117835,"TERMINAL",0,0,"Step 1881, loss: 0.015608313493430614, step time: 18.40996742248535ms\r\n",,terminal_output +24244,15117935,"TERMINAL",0,0,"Step 1882, loss: 0.013856279663741589, step time: 17.989158630371094ms\r\n",,terminal_output +24245,15117995,"TERMINAL",0,0,"Step 1883, loss: 0.012757370248436928, step time: 18.07856559753418ms\r\n",,terminal_output +24246,15118103,"TERMINAL",0,0,"Step 1884, loss: 0.012504519894719124, step time: 17.99297332763672ms\r\n",,terminal_output +24247,15118157,"TERMINAL",0,0,"Step 1885, loss: 0.012666196562349796, step time: 17.72761344909668ms\r\n",,terminal_output +24248,15118256,"TERMINAL",0,0,"Step 1886, loss: 0.012647842988371849, step time: 17.831087112426758ms\r\n",,terminal_output +24249,15118362,"TERMINAL",0,0,"Step 1887, loss: 0.012483569793403149, step time: 18.141508102416992ms\r\n",,terminal_output +24250,15118433,"TERMINAL",0,0,"Step 1888, loss: 0.011956922709941864, step time: 17.74907112121582ms\r\n",,terminal_output +24251,15118515,"TERMINAL",0,0,"Step 1889, loss: 0.011288697831332684, step time: 17.808914184570312ms\r\n",,terminal_output +24252,15118671,"TERMINAL",0,0,"Step 1890, loss: 0.01079353503882885, step time: 17.8682804107666ms\r\nStep 1891, loss: 0.010504361242055893, step time: 17.67897605895996ms\r\n",,terminal_output +24253,15118779,"TERMINAL",0,0,"Step 1892, loss: 0.010404221713542938, step time: 17.763614654541016ms\r\n",,terminal_output +24254,15118847,"TERMINAL",0,0,"Step 1893, loss: 0.010350988246500492, step time: 18.094301223754883ms\r\n",,terminal_output +24255,15118955,"TERMINAL",0,0,"Step 1894, loss: 0.010044081136584282, step time: 20.160436630249023ms\r\n",,terminal_output +24256,15119018,"TERMINAL",0,0,"Step 1895, loss: 0.009620286524295807, step time: 17.921924591064453ms\r\n",,terminal_output +24257,15119112,"TERMINAL",0,0,"Step 1896, loss: 0.00932735949754715, step time: 18.0203914642334ms\r\n",,terminal_output +24258,15119222,"TERMINAL",0,0,"Step 1897, loss: 0.009101340547204018, step time: 17.637968063354492ms\r\n",,terminal_output +24259,15119275,"TERMINAL",0,0,"Step 1898, loss: 0.008950984105467796, step time: 17.713069915771484ms\r\n",,terminal_output +24260,15119382,"TERMINAL",0,0,"Step 1899, loss: 0.00890706293284893, step time: 18.018245697021484ms\r\n",,terminal_output +24261,15119448,"TERMINAL",0,0,"Step 1900, loss: 0.008740619756281376, step time: 17.78554916381836ms\r\n",,terminal_output +24262,15119557,"TERMINAL",0,0,"Step 1901, loss: 0.008455428294837475, step time: 17.821073532104492ms\r\n",,terminal_output +24263,15119617,"TERMINAL",0,0,"Step 1902, loss: 0.008260661736130714, step time: 17.787694931030273ms\r\n",,terminal_output +24264,15119727,"TERMINAL",0,0,"Step 1903, loss: 0.008074036799371243, step time: 17.97962188720703ms\r\n",,terminal_output +24265,15119789,"TERMINAL",0,0,"Step 1904, loss: 0.007938457652926445, step time: 17.711877822875977ms\r\n",,terminal_output +24266,15119897,"TERMINAL",0,0,"Step 1905, loss: 0.007842677645385265, step time: 18.033981323242188ms\r\n",,terminal_output +24267,15119993,"TERMINAL",0,0,"Step 1906, loss: 0.007675559259951115, step time: 17.725467681884766ms\r\n",,terminal_output +24268,15120056,"TERMINAL",0,0,"Step 1907, loss: 0.007534403819590807, step time: 17.758846282958984ms\r\n",,terminal_output +24269,15120165,"TERMINAL",0,0,"Step 1908, loss: 0.007381404284387827, step time: 18.22042465209961ms\r\n",,terminal_output +24270,15120217,"TERMINAL",0,0,"Step 1909, loss: 0.00724972877651453, step time: 17.883777618408203ms\r\n",,terminal_output +24271,15120326,"TERMINAL",0,0,"Step 1910, loss: 0.007139245979487896, step time: 17.82965660095215ms\r\n",,terminal_output +24272,15120390,"TERMINAL",0,0,"Step 1911, loss: 0.0070028905756771564, step time: 18.086910247802734ms\r\n",,terminal_output +24273,15120502,"TERMINAL",0,0,"Step 1912, loss: 0.006905336398631334, step time: 17.950773239135742ms\r\n",,terminal_output +24274,15120559,"TERMINAL",0,0,"Step 1913, loss: 0.006785420700907707, step time: 17.73238182067871ms\r\n",,terminal_output +24275,15120658,"TERMINAL",0,0,"Step 1914, loss: 0.006665120366960764, step time: 17.766952514648438ms\r\n",,terminal_output +24276,15120767,"TERMINAL",0,0,"Step 1915, loss: 0.00653057498857379, step time: 17.741918563842773ms\r\n",,terminal_output +24277,15120820,"TERMINAL",0,0,"Step 1916, loss: 0.006439621560275555, step time: 17.685890197753906ms\r\n",,terminal_output +24278,15120928,"TERMINAL",0,0,"Step 1917, loss: 0.00632910942658782, step time: 19.971609115600586ms\r\n",,terminal_output +24279,15120990,"TERMINAL",0,0,"Step 1918, loss: 0.0062292953953146935, step time: 18.476247787475586ms\r\n",,terminal_output +24280,15121101,"TERMINAL",0,0,"Step 1919, loss: 0.006114982068538666, step time: 18.000125885009766ms\r\n",,terminal_output +24281,15121156,"TERMINAL",0,0,"Step 1920, loss: 0.00602759001776576, step time: 18.07570457458496ms\r\n",,terminal_output +24282,15121266,"TERMINAL",0,0,"Step 1921, loss: 0.005929550155997276, step time: 17.79007911682129ms\r\n",,terminal_output +24283,15121331,"TERMINAL",0,0,"Step 1922, loss: 0.005819675046950579, step time: 17.808198928833008ms\r\n",,terminal_output +24284,15121441,"TERMINAL",0,0,"Step 1923, loss: 0.005724981892853975, step time: 18.053054809570312ms\r\n",,terminal_output +24285,15121515,"TERMINAL",0,0,"Step 1924, loss: 0.005630102939903736, step time: 18.024682998657227ms\r\n",,terminal_output +24286,15121624,"TERMINAL",0,0,"Step 1925, loss: 0.00554592115804553, step time: 17.776012420654297ms\r\n",,terminal_output +24287,15121677,"TERMINAL",0,0,"Step 1926, loss: 0.005478377919644117, step time: 17.8067684173584ms\r\n",,terminal_output +24288,15121789,"TERMINAL",0,0,"Step 1927, loss: 0.005472842138260603, step time: 17.786741256713867ms\r\n",,terminal_output +24289,15121854,"TERMINAL",0,0,"Step 1928, loss: 0.005841126665472984, step time: 17.77052879333496ms\r\n",,terminal_output +24290,15121962,"TERMINAL",0,0,"Step 1929, loss: 0.008293058723211288, step time: 18.285751342773438ms\r\n",,terminal_output +24291,15122025,"TERMINAL",0,0,"Step 1930, loss: 0.056554559618234634, step time: 17.664670944213867ms\r\n",,terminal_output +24292,15122134,"TERMINAL",0,0,"Step 1931, loss: 0.20734438300132751, step time: 17.64225959777832ms\r\n",,terminal_output +24293,15122187,"TERMINAL",0,0,"Step 1932, loss: 0.03388851508498192, step time: 17.908334732055664ms\r\n",,terminal_output +24294,15122296,"TERMINAL",0,0,"Step 1933, loss: 0.2356482595205307, step time: 17.618417739868164ms\r\n",,terminal_output +24295,15122361,"TERMINAL",0,0,"Step 1934, loss: 0.034180402755737305, step time: 17.793655395507812ms\r\n",,terminal_output +24296,15122473,"TERMINAL",0,0,"Step 1935, loss: 0.1294628083705902, step time: 17.965316772460938ms\r\n",,terminal_output +24297,15122531,"TERMINAL",0,0,"Step 1936, loss: 0.10869531333446503, step time: 17.62104034423828ms\r\n",,terminal_output +24298,15122628,"TERMINAL",0,0,"Step 1937, loss: 0.06266611069440842, step time: 17.5936222076416ms\r\n",,terminal_output +24299,15122737,"TERMINAL",0,0,"Step 1938, loss: 0.037444617599248886, step time: 17.968177795410156ms\r\n",,terminal_output +24300,15122788,"TERMINAL",0,0,"Step 1939, loss: 0.06699330359697342, step time: 17.66824722290039ms\r\n",,terminal_output +24301,15122893,"TERMINAL",0,0,"Step 1940, loss: 0.07834260910749435, step time: 18.12458038330078ms\r\n",,terminal_output +24302,15123000,"TERMINAL",0,0,"Step 1941, loss: 0.02252383530139923, step time: 18.517255783081055ms\r\n",,terminal_output +24303,15123061,"TERMINAL",0,0,"Step 1942, loss: 0.024389268830418587, step time: 18.08643341064453ms\r\n",,terminal_output +24304,15123171,"TERMINAL",0,0,"Step 1943, loss: 0.049951232969760895, step time: 18.036603927612305ms\r\n",,terminal_output +24305,15123224,"TERMINAL",0,0,"Step 1944, loss: 0.04576143994927406, step time: 18.18680763244629ms\r\n",,terminal_output +24306,15123331,"TERMINAL",0,0,"Step 1945, loss: 0.02542915754020214, step time: 18.012523651123047ms\r\n",,terminal_output +24307,15123432,"TERMINAL",0,0,"Step 1946, loss: 0.017297519370913506, step time: 18.11051368713379ms\r\n",,terminal_output +24308,15123480,"TERMINAL",0,0,"Step 1947, loss: 0.021615615114569664, step time: 19.138097763061523ms\r\n",,terminal_output +24309,15123595,"TERMINAL",0,0,"Step 1948, loss: 0.02979350835084915, step time: 18.132448196411133ms\r\n",,terminal_output +24310,15123648,"TERMINAL",0,0,"Step 1949, loss: 0.025804419070482254, step time: 18.080711364746094ms\r\n",,terminal_output +24311,15123761,"TERMINAL",0,0,"Step 1950, loss: 0.020297013223171234, step time: 18.377065658569336ms\r\n",,terminal_output +24312,15123825,"TERMINAL",0,0,"Step 1951, loss: 0.016194578260183334, step time: 18.32437515258789ms\r\n",,terminal_output +24313,15123933,"TERMINAL",0,0,"Step 1952, loss: 0.014973209239542484, step time: 18.282175064086914ms\r\n",,terminal_output +24314,15123998,"TERMINAL",0,0,"Step 1953, loss: 0.01587781496345997, step time: 23.675918579101562ms\r\n",,terminal_output +24315,15124111,"TERMINAL",0,0,"Step 1954, loss: 0.017581364139914513, step time: 19.643545150756836ms\r\n",,terminal_output +24316,15124172,"TERMINAL",0,0,"Step 1955, loss: 0.01696292869746685, step time: 18.904685974121094ms\r\n",,terminal_output +24317,15124281,"TERMINAL",0,0,"Step 1956, loss: 0.014208956621587276, step time: 18.612384796142578ms\r\n",,terminal_output +24318,15124343,"TERMINAL",0,0,"Step 1957, loss: 0.012174751609563828, step time: 19.222259521484375ms\r\n",,terminal_output +24319,15124454,"TERMINAL",0,0,"Step 1958, loss: 0.011645039543509483, step time: 18.536090850830078ms\r\n",,terminal_output +24320,15124516,"TERMINAL",0,0,"Step 1959, loss: 0.01198833342641592, step time: 19.18172836303711ms\r\n",,terminal_output +24321,15124628,"TERMINAL",0,0,"Step 1960, loss: 0.012388631701469421, step time: 18.669843673706055ms\r\n",,terminal_output +24322,15124689,"TERMINAL",0,0,"Step 1961, loss: 0.012332463636994362, step time: 18.306493759155273ms\r\n",,terminal_output +24323,15124797,"TERMINAL",0,0,"Step 1962, loss: 0.011159100569784641, step time: 18.350601196289062ms\r\n",,terminal_output +24324,15124870,"TERMINAL",0,0,"Step 1963, loss: 0.009942109696567059, step time: 18.1729793548584ms\r\n",,terminal_output +24325,15124976,"TERMINAL",0,0,"Step 1964, loss: 0.009224949404597282, step time: 18.270254135131836ms\r\n",,terminal_output +24326,15125038,"TERMINAL",0,0,"Step 1965, loss: 0.009168540127575397, step time: 18.91326904296875ms\r\n",,terminal_output +24327,15125134,"TERMINAL",0,0,"Step 1966, loss: 0.009295633062720299, step time: 18.487215042114258ms\r\n",,terminal_output +24328,15125244,"TERMINAL",0,0,"Step 1967, loss: 0.009379757568240166, step time: 18.692731857299805ms\r\n",,terminal_output +24329,15125297,"TERMINAL",0,0,"Step 1968, loss: 0.008851832710206509, step time: 18.340349197387695ms\r\n",,terminal_output +24330,15125408,"TERMINAL",0,0,"Step 1969, loss: 0.008227367885410786, step time: 18.31221580505371ms\r\n",,terminal_output +24331,15125472,"TERMINAL",0,0,"Step 1970, loss: 0.007831116206943989, step time: 18.311500549316406ms\r\n",,terminal_output +24332,15125581,"TERMINAL",0,0,"Step 1971, loss: 0.007831722497940063, step time: 18.48745346069336ms\r\n",,terminal_output +24333,15125678,"TERMINAL",0,0,"Step 1972, loss: 0.007833363488316536, step time: 18.19014549255371ms\r\n",,terminal_output +24334,15125731,"TERMINAL",0,0,"Step 1973, loss: 0.007616316433995962, step time: 18.292665481567383ms\r\n",,terminal_output +24335,15125839,"TERMINAL",0,0,"Step 1974, loss: 0.007297067437320948, step time: 18.608570098876953ms\r\n",,terminal_output +24336,15125893,"TERMINAL",0,0,"Step 1975, loss: 0.006977672688663006, step time: 18.59450340270996ms\r\n",,terminal_output +24337,15126004,"TERMINAL",0,0,"Step 1976, loss: 0.0068000080063939095, step time: 18.524646759033203ms\r\n",,terminal_output +24338,15126066,"TERMINAL",0,0,"Step 1977, loss: 0.0067944517359137535, step time: 18.552303314208984ms\r\n",,terminal_output +24339,15126173,"TERMINAL",0,0,"Step 1978, loss: 0.006667823530733585, step time: 18.429279327392578ms\r\n",,terminal_output +24340,15126239,"TERMINAL",0,0,"Step 1979, loss: 0.006463111378252506, step time: 18.518447875976562ms\r\n",,terminal_output +24341,15126347,"TERMINAL",0,0,"Step 1980, loss: 0.006276045460253954, step time: 18.73302459716797ms\r\n",,terminal_output +24342,15126411,"TERMINAL",0,0,"Step 1981, loss: 0.006123180501163006, step time: 18.358707427978516ms\r\n",,terminal_output +24343,15126520,"TERMINAL",0,0,"Step 1982, loss: 0.006017886567860842, step time: 18.28312873840332ms\r\n",,terminal_output +24344,15126582,"TERMINAL",0,0,"Step 1983, loss: 0.005924256518483162, step time: 18.614530563354492ms\r\n",,terminal_output +24345,15126676,"TERMINAL",0,0,"Step 1984, loss: 0.005807751324027777, step time: 18.3718204498291ms\r\n",,terminal_output +24346,15126784,"TERMINAL",0,0,"Step 1985, loss: 0.0056739007122814655, step time: 18.11361312866211ms\r\n",,terminal_output +24347,15126841,"TERMINAL",0,0,"Step 1986, loss: 0.005583315622061491, step time: 18.42784881591797ms\r\n",,terminal_output +24348,15126949,"TERMINAL",0,0,"Step 1987, loss: 0.005501762498170137, step time: 18.220901489257812ms\r\n",,terminal_output +24349,15127014,"TERMINAL",0,0,"Step 1988, loss: 0.005392689257860184, step time: 18.25547218322754ms\r\n",,terminal_output +24350,15127123,"TERMINAL",0,0,"Step 1989, loss: 0.005272491369396448, step time: 18.542766571044922ms\r\n",,terminal_output +24351,15127186,"TERMINAL",0,0,"Step 1990, loss: 0.005177106708288193, step time: 18.295764923095703ms\r\n",,terminal_output +24352,15127297,"TERMINAL",0,0,"Step 1991, loss: 0.005094597116112709, step time: 18.248319625854492ms\r\n",,terminal_output +24353,15127356,"TERMINAL",0,0,"Step 1992, loss: 0.005008616019040346, step time: 18.410682678222656ms\r\n",,terminal_output +24354,15127467,"TERMINAL",0,0,"Step 1993, loss: 0.004919107537716627, step time: 18.425464630126953ms\r\n",,terminal_output +24355,15127530,"TERMINAL",0,0,"Step 1994, loss: 0.004826480057090521, step time: 18.380403518676758ms\r\n",,terminal_output +24356,15127641,"TERMINAL",0,0,"Step 1995, loss: 0.0047575198113918304, step time: 18.559932708740234ms\r\n",,terminal_output +24357,15127779,"TERMINAL",0,0,"Step 1996, loss: 0.004697316791862249, step time: 18.228530883789062ms\r\n",,terminal_output +24358,15127831,"TERMINAL",0,0,"Step 1997, loss: 0.004598756320774555, step time: 18.32747459411621ms\r\n",,terminal_output +24359,15128129,"TERMINAL",0,0,"Step 1998, loss: 0.00450768880546093, step time: 336.17186546325684ms\r\n",,terminal_output +24360,15128306,"TERMINAL",0,0,"Step 1999, loss: 0.004453530069440603, step time: 25.901079177856445ms\r\n",,terminal_output +24361,15130785,"TERMINAL",0,0,"Step 2000, loss: 0.0044024353846907616, step time: 26.555776596069336ms\r\nStep 2001, loss: 0.004317385144531727, step time: 25.9091854095459ms\r\n",,terminal_output +24362,15130928,"TERMINAL",0,0,"Step 2002, loss: 0.004232408944517374, step time: 21.1334228515625ms\r\n",,terminal_output +24363,15130982,"TERMINAL",0,0,"Step 2003, loss: 0.0041733370162546635, step time: 20.751476287841797ms\r\n",,terminal_output +24364,15131134,"TERMINAL",0,0,"Step 2004, loss: 0.004121991340070963, step time: 19.520282745361328ms\r\n",,terminal_output +24365,15131199,"TERMINAL",0,0,"Step 2005, loss: 0.004043898545205593, step time: 20.055055618286133ms\r\nStep 2006, loss: 0.00396457826718688, step time: 20.225048065185547ms\r\n",,terminal_output +24366,15131393,"TERMINAL",0,0,"Step 2007, loss: 0.003895817557349801, step time: 20.082473754882812ms\r\nStep 2008, loss: 0.0038477247580885887, step time: 20.051240921020508ms\r\n",,terminal_output +24367,15131475,"TERMINAL",0,0,"Step 2009, loss: 0.003777381731197238, step time: 20.53046226501465ms\r\n",,terminal_output +24368,15131587,"TERMINAL",0,0,"Step 2010, loss: 0.003705577226355672, step time: 19.956350326538086ms\r\n",,terminal_output +24369,15131650,"TERMINAL",0,0,"Step 2011, loss: 0.00365059869363904, step time: 19.86980438232422ms\r\n",,terminal_output +24370,15131761,"TERMINAL",0,0,"Step 2012, loss: 0.0035827672109007835, step time: 20.358562469482422ms\r\n",,terminal_output +24371,15131824,"TERMINAL",0,0,"Step 2013, loss: 0.0035136891528964043, step time: 19.88673210144043ms\r\n",,terminal_output +24372,15131944,"TERMINAL",0,0,"Step 2014, loss: 0.0034699179232120514, step time: 19.57392692565918ms\r\n",,terminal_output +24373,15132004,"TERMINAL",0,0,"Step 2015, loss: 0.0034406334161758423, step time: 20.328760147094727ms\r\n",,terminal_output +24374,15132079,"TERMINAL",0,0,"Step 2016, loss: 0.003470754949375987, step time: 19.51432228088379ms\r\n",,terminal_output +24375,15132187,"TERMINAL",0,0,"Step 2017, loss: 0.0036929240450263023, step time: 19.39105987548828ms\r\n",,terminal_output +24376,15132334,"TERMINAL",0,0,"Step 2018, loss: 0.006417162716388702, step time: 20.307064056396484ms\r\nStep 2019, loss: 0.2198282927274704, step time: 19.984006881713867ms\r\n",,terminal_output +24377,15132440,"TERMINAL",0,0,"Step 2020, loss: 0.10229979455471039, step time: 18.790006637573242ms\r\n",,terminal_output +24378,15132597,"TERMINAL",0,0,"Step 2021, loss: 0.46910759806632996, step time: 19.8061466217041ms\r\nStep 2022, loss: 0.010035254992544651, step time: 18.934965133666992ms\r\n",,terminal_output +24379,15132706,"TERMINAL",0,0,"Step 2023, loss: 0.1774108111858368, step time: 19.102811813354492ms\r\n",,terminal_output +24380,15132771,"TERMINAL",0,0,"Step 2024, loss: 0.312852144241333, step time: 18.771886825561523ms\r\n",,terminal_output +24381,15132859,"TERMINAL",0,0,"Step 2025, loss: 0.02607247792184353, step time: 19.252300262451172ms\r\n",,terminal_output +24382,15132967,"TERMINAL",0,0,"Step 2026, loss: 0.02888214774429798, step time: 19.021034240722656ms\r\n",,terminal_output +24383,15133032,"TERMINAL",0,0,"Step 2027, loss: 0.1677665114402771, step time: 19.18649673461914ms\r\n",,terminal_output +24384,15133159,"TERMINAL",0,0,"Step 2028, loss: 0.13544219732284546, step time: 18.982410430908203ms\r\n^C",,terminal_output +24385,15133218,"TERMINAL",0,0,"Traceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py"", line 241, in \r\n print(f""Step {step}, loss: {loss}, step time: {elapsed_time}ms"")\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/array.py"", line 341, in __format__\r\n return format(self._value[()], format_spec)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/profiler.py"", line 354, in wrapper\r\n return func(*args, **kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/array.py"", line 641, in _value\r\n npy_value, did_copy = self._single_device_array_to_np_array_did_copy()\r\nKeyboardInterrupt\r\n",,terminal_output +24386,15133357,"TERMINAL",0,0,"^C",,terminal_output +24387,15133559,"TERMINAL",0,0,"^C",,terminal_output +24388,15133699,"TERMINAL",0,0,"Exception ignored in atexit callback: .teardown_atexit at 0x1552800ce7a0>\r\nTraceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/wandb/sdk/lib/service_connection.py"", line 94, in teardown_atexit\r\n conn.teardown(hooks.exit_code)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/wandb/sdk/lib/service_connection.py"", line 236, in teardown\r\n return self._proc.join()\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/wandb/sdk/service/service.py"", line 251, in join\r\n ret = self._internal_proc.wait()\r\n File ""/home/hk-project-p0023960/tum_cte0515/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/subprocess.py"", line 1222, in wait\r\n self._wait(timeout=sigint_timeout)\r\n File ""/home/hk-project-p0023960/tum_cte0515/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/subprocess.py"", line 1953, in _wait\r\n time.sleep(delay)\r\nKeyboardInterrupt: \r\n^CException ignored in: .remove at 0x1552dd922710>\r\nTraceback (most recent call last):\r\n File ""/home/hk-project-p0023960/tum_cte0515/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/weakref.py"", line 370, in remove\r\n def remove(k, selfref=ref(self)):\r\nKeyboardInterrupt: \r\n",,terminal_output +24389,15134459,"TERMINAL",0,0,"wandb: \r\nwandb: 🚀 View run dynamics-tiny-overfit-big-lr-0000 at: https://wandb.ai/instant-uv/jafar/runs/k6pfalr3\r\nwandb: Find logs at: ../../../../../hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/wandb/run-20250630_183109-k6pfalr3/logs\r\n",,terminal_output +24390,15134998,"TERMINAL",0,0,"^C",,terminal_output +24391,15135366,"TERMINAL",0,0,"\r\n]0;tum_cte0515@hkn0531:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0531 jafar]$ ",,terminal_output +24392,15140555,"TERMINAL",0,0,"[?25ll[?25h[?25ls[?25h",,terminal_output +24393,15140724,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +24394,15142855,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +24395,15143979,"scripts_horeka/train_dynamics.sh",0,0,"",shellscript,tab +24396,15149412,"scripts_horeka/train_dynamics.sh",167,0,"",shellscript,selection_mouse +24397,15149542,"scripts_horeka/train_dynamics.sh",167,1,"/",shellscript,selection_mouse +24398,15149554,"scripts_horeka/train_dynamics.sh",167,2,"/h",shellscript,selection_mouse +24399,15149571,"scripts_horeka/train_dynamics.sh",167,5,"/hkfs",shellscript,selection_mouse +24400,15149630,"scripts_horeka/train_dynamics.sh",167,7,"/hkfs/w",shellscript,selection_mouse +24401,15149631,"scripts_horeka/train_dynamics.sh",167,9,"/hkfs/wor",shellscript,selection_mouse +24402,15149631,"scripts_horeka/train_dynamics.sh",167,11,"/hkfs/work/",shellscript,selection_mouse +24403,15149636,"scripts_horeka/train_dynamics.sh",167,12,"/hkfs/work/w",shellscript,selection_mouse +24404,15149653,"scripts_horeka/train_dynamics.sh",167,14,"/hkfs/work/wor",shellscript,selection_mouse +24405,15149717,"scripts_horeka/train_dynamics.sh",167,17,"/hkfs/work/worksp",shellscript,selection_mouse +24406,15149718,"scripts_horeka/train_dynamics.sh",167,18,"/hkfs/work/workspa",shellscript,selection_mouse +24407,15149718,"scripts_horeka/train_dynamics.sh",167,20,"/hkfs/work/workspace",shellscript,selection_mouse +24408,15149731,"scripts_horeka/train_dynamics.sh",167,21,"/hkfs/work/workspace/",shellscript,selection_mouse +24409,15149746,"scripts_horeka/train_dynamics.sh",167,23,"/hkfs/work/workspace/sc",shellscript,selection_mouse +24410,15149776,"scripts_horeka/train_dynamics.sh",167,25,"/hkfs/work/workspace/scra",shellscript,selection_mouse +24411,15149835,"scripts_horeka/train_dynamics.sh",167,27,"/hkfs/work/workspace/scratc",shellscript,selection_mouse +24412,15149836,"scripts_horeka/train_dynamics.sh",167,28,"/hkfs/work/workspace/scratch",shellscript,selection_mouse +24413,15149849,"scripts_horeka/train_dynamics.sh",167,29,"/hkfs/work/workspace/scratch/",shellscript,selection_mouse +24414,15149849,"scripts_horeka/train_dynamics.sh",167,30,"/hkfs/work/workspace/scratch/t",shellscript,selection_mouse +24415,15149855,"scripts_horeka/train_dynamics.sh",167,31,"/hkfs/work/workspace/scratch/tu",shellscript,selection_mouse +24416,15149869,"scripts_horeka/train_dynamics.sh",167,32,"/hkfs/work/workspace/scratch/tum",shellscript,selection_mouse +24417,15149882,"scripts_horeka/train_dynamics.sh",167,33,"/hkfs/work/workspace/scratch/tum_",shellscript,selection_mouse +24418,15149940,"scripts_horeka/train_dynamics.sh",167,34,"/hkfs/work/workspace/scratch/tum_i",shellscript,selection_mouse +24419,15149941,"scripts_horeka/train_dynamics.sh",167,35,"/hkfs/work/workspace/scratch/tum_in",shellscript,selection_mouse +24420,15149941,"scripts_horeka/train_dynamics.sh",167,36,"/hkfs/work/workspace/scratch/tum_ind",shellscript,selection_mouse +24421,15149949,"scripts_horeka/train_dynamics.sh",167,37,"/hkfs/work/workspace/scratch/tum_ind3",shellscript,selection_mouse +24422,15149970,"scripts_horeka/train_dynamics.sh",155,12,"rds\nws_dir='",shellscript,selection_mouse +24423,15149984,"scripts_horeka/train_dynamics.sh",156,11,"ds\nws_dir='",shellscript,selection_mouse +24424,15149999,"scripts_horeka/train_dynamics.sh",157,10,"s\nws_dir='",shellscript,selection_mouse +24425,15150057,"scripts_horeka/train_dynamics.sh",158,9,"\nws_dir='",shellscript,selection_mouse +24426,15150066,"scripts_horeka/train_dynamics.sh",167,45,"/hkfs/work/workspace/scratch/tum_ind3695-jafa",shellscript,selection_mouse +24427,15150083,"scripts_horeka/train_dynamics.sh",167,46,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_",shellscript,selection_mouse +24428,15150137,"scripts_horeka/train_dynamics.sh",167,47,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_w",shellscript,selection_mouse +24429,15150138,"scripts_horeka/train_dynamics.sh",167,48,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws",shellscript,selection_mouse +24430,15150138,"scripts_horeka/train_dynamics.sh",167,49,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_",shellscript,selection_mouse +24431,15150149,"scripts_horeka/train_dynamics.sh",167,50,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_s",shellscript,selection_mouse +24432,15150167,"scripts_horeka/train_dynamics.sh",167,51,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_sh",shellscript,selection_mouse +24433,15150224,"scripts_horeka/train_dynamics.sh",167,52,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_sha",shellscript,selection_mouse +24434,15150233,"scripts_horeka/train_dynamics.sh",167,53,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shar",shellscript,selection_mouse +24435,15150290,"scripts_horeka/train_dynamics.sh",167,54,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_share",shellscript,selection_mouse +24436,15150349,"scripts_horeka/train_dynamics.sh",167,55,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared",shellscript,selection_mouse +24437,15150659,"scripts_horeka/train_dynamics.sh",167,56,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/",shellscript,selection_mouse +24438,15154073,"TERMINAL",0,0,"[?25lk[?25h",,terminal_output +24439,15154742,"TERMINAL",0,0,"[?25ls[?25h",,terminal_output +24440,15155280,"TERMINAL",0,0,"[?25ll[?25h",,terminal_output +24441,15155354,"TERMINAL",0,0,"[?25ls[?25h",,terminal_output +24442,15155418,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +24443,15155934,"TERMINAL",0,0,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/",,terminal_output +24444,15157328,"TERMINAL",0,0,"\r/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/",,terminal_output +24445,15157558,"TERMINAL",0,0,"\r\ncheckpoints/ data/ logs/ scripts/ .venv/ \r\n(jafar) [tum_cte0515@hkn0531 jafar]$ ls /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/",,terminal_output +24446,15158904,"TERMINAL",0,0,"[?25lc[?25h",,terminal_output +24447,15159062,"TERMINAL",0,0,"[?25lh[?25h",,terminal_output +24448,15159127,"TERMINAL",0,0,"eckpoints/",,terminal_output +24449,15159335,"TERMINAL",0,0,"",,terminal_output +24450,15159585,"TERMINAL",0,0,"\r\n0000/ 3292258/ 3294601/ 3297582/ 3299258/ 3301031/\r\n3290283/ 3292328/ 3294602/ 3297586/ 3299259/ 3306801/\r\n3290284/ 3292329/ 3294603/ 3297606/ 3299272/ dyn/\r\n3290295/ 3292330/ 3296502/ 3297671/ 3299579/ dynamics_ckpt_dir/\r\n",,terminal_output +24451,15159772,"TERMINAL",0,0,"3290296/ 3292331/ 3296540/ 3297693/ 3300233/ lam/\r\n3290366/ 3292332/ 3296571/ 3297706/ 3300290/ lam-1-action/\r\n3290367/ 3292333/ 3296573/ 3297727/ 3300658/ lam_ckpt_dir/\r\n3290391/ 3292334/ 3296574/ 3299016/ 3300663/ tokenizer/\r\n3290392/ 3292335/ 3296575/ 3299062/ 3300672/ tokenizer_ckpt_dir/\r\n3290439/ 3292336/ 3297569/ 3299063/ 3301025/ \r\n3290440/ 3292337/ 3297575/ 3299065/ 3301026/ \r\n3291405/ 3292338/ 3297576/ 3299066/ 3301027/ \r\n3292213/ 3292339/ 3297577/ 3299068/ 3301029/ \r\n3292221/ 3294600/ 3297578/ 3299069/ 3301030/ \r\n(jafar) [tum_cte0515@hkn0531 jafar]$ ls /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/",,terminal_output +24452,15167872,"scripts_horeka/train_dynamics.sh",0,0,"",shellscript,tab +24453,15173912,"scripts_horeka/train_dynamics.sh",309,0,"",shellscript,selection_mouse +24454,15174764,"scripts_horeka/train_dynamics.sh",308,1,"",shellscript,content +24455,15175710,"scripts_horeka/train_dynamics.sh",308,0,"/",shellscript,content +24456,15175710,"scripts_horeka/train_dynamics.sh",309,0,"",shellscript,selection_keyboard +24457,15180262,"TERMINAL",0,0,"[?25l0[?25h",,terminal_output +24458,15180384,"TERMINAL",0,0,"[?25l0[?25h",,terminal_output +24459,15180507,"TERMINAL",0,0,"00/",,terminal_output +24460,15180866,"TERMINAL",0,0,"",,terminal_output +24461,15181103,"TERMINAL",0,0,"\r\ngenie_1750859892_500/ genie_1751299181_1500/ genie_1751301068_1000/\r\ngenie_1751066700_1000/ genie_1751299181_2000/ genie_1751301068_1500/\r\n",,terminal_output +24462,15181156,"TERMINAL",0,0,"genie_1751066700_500/ genie_1751299181_500/ genie_1751301068_2000/\r\ngenie_1751298360_1000/ genie_1751299499_1000/ genie_1751301068_500/\r\ngenie_1751298360_1500/ genie_1751299499_1500/ lam_1751030160_1000/\r\ngenie_1751298360_2000/ genie_1751299499_2000/ lam_1751030160_500/\r\ngenie_1751298360_2500/ genie_1751299499_2500/ lam_1751297992_10/\r\ngenie_1751298360_3000/ genie_1751299499_3000/ lam_1751297992_15/\r\ngenie_1751298360_3500/ genie_1751299499_3500/ lam_1751297992_20/\r\ngenie_1751298360_4000/ genie_1751299499_4000/ lam_1751297992_25/\r\ngenie_1751298360_500/ genie_1751299499_4500/ lam_1751297992_30/\r\ngenie_1751298805_1000/ genie_1751299499_500/ lam_1751297992_35/\r\ngenie_1751298805_1500/ genie_1751299499_5000/ lam_1751297992_40/\r\ngenie_1751298805_2000/ genie_1751299499_5500/ lam_1751297992_45/\r\ngenie_1751298805_2500/ genie_1751299499_6000/ lam_1751297992_50.orbax-checkpoint-tmp-9/\r\ngenie_1751298805_500/ genie_1751300634_1000/ \r\ngenie_1751299181_1000/ genie_1751300634_500/ \r\n(jafar) [tum_cte0515@hkn0531 jafar]$ ls /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/0000/",,terminal_output +24463,15188496,"TERMINAL",0,0,"[?25lg[?25h",,terminal_output +24464,15188562,"TERMINAL",0,0,"enie_175",,terminal_output +24465,15197945,"TERMINAL",0,0,"\r",,terminal_output +24466,15198114,"TERMINAL",0,0,"",,terminal_output +24467,15198254,"TERMINAL",0,0,"",,terminal_output +24468,15203794,"scripts_horeka/train_dynamics.sh",0,0,"",shellscript,tab +24469,15205851,"scripts_horeka/train_dynamics.sh",347,0,"",shellscript,selection_mouse +24470,15206350,"scripts_horeka/train_dynamics.sh",310,0,"",shellscript,selection_mouse +24471,15206481,"scripts_horeka/train_dynamics.sh",310,12,"slurm_job_id",shellscript,selection_mouse +24472,15207167,"scripts_horeka/train_dynamics.sh",307,0,"",shellscript,selection_mouse +24473,15207749,"scripts_horeka/train_dynamics.sh",320,0,"",shellscript,selection_mouse +24474,15208571,"scripts_horeka/train_dynamics.sh",260,0,"",shellscript,selection_mouse +24475,15208696,"scripts_horeka/train_dynamics.sh",257,4,"0000",shellscript,selection_mouse +24476,15213639,"TERMINAL",0,0,"sh scripts_horeka/train_dynamics.sh ",,terminal_output +24477,15214930,"TERMINAL",0,0,"",,terminal_output +24478,15215797,"TERMINAL",0,0,"sh scripts_horeka/train_dynamics.sh ",,terminal_output +24479,15217234,"TERMINAL",0,0,"",,terminal_output +24480,15217667,"TERMINAL",0,0,"sh scripts_horeka/train_dynamics.sh ",,terminal_output +24481,15217998,"TERMINAL",0,0,"",,terminal_output +24482,15218315,"TERMINAL",0,0,"sh scripts_horeka/train_dynamics.sh ",,terminal_output +24483,15218567,"TERMINAL",0,0,"",,terminal_output +24484,15218769,"TERMINAL",0,0,"sh scripts_horeka/train_dynamics.sh ",,terminal_output +24485,15218982,"TERMINAL",0,0,"",,terminal_output +24486,15219226,"TERMINAL",0,0,"sh scripts_horeka/train_dynamics.sh ",,terminal_output +24487,15219479,"TERMINAL",0,0,"",,terminal_output +24488,15228894,"scripts_horeka/train_dynamics.sh",0,0,"",shellscript,tab +24489,15230003,"scripts_horeka/train_lam.sh",0,0,"",shellscript,tab +24490,15232531,"genie.py",0,0,"",python,tab +24491,15237353,"train_dynamics.py",0,0,"",python,tab +24492,15237765,"train_dynamics.py",2158,0,"",python,selection_mouse +24493,15239061,"train_dynamics.py",2157,0,"",python,selection_command +24494,15242803,"train_dynamics.py",6192,0,"",python,selection_command +24495,15243088,"train_dynamics.py",9656,0,"",python,selection_command +24496,15244495,"train_dynamics.py",9755,0,"",python,selection_mouse +24497,15245005,"train_dynamics.py",9770,0,"",python,selection_mouse +24498,15245575,"train_dynamics.py",9852,0,"",python,selection_mouse +24499,15245727,"train_dynamics.py",9843,21,"save_args_from_target",python,selection_mouse +24500,15246520,"train_dynamics.py",9897,0,"",python,selection_mouse +24501,15247053,"train_dynamics.py",9947,0,"",python,selection_mouse +24502,15247210,"train_dynamics.py",9945,2,"os",python,selection_mouse +24503,15247716,"train_dynamics.py",9964,0,"",python,selection_mouse +24504,15247870,"train_dynamics.py",9963,8,"ckpt_dir",python,selection_mouse +24505,15248492,"train_dynamics.py",9981,0,"",python,selection_mouse +24506,15248609,"train_dynamics.py",9975,6,"genie_",python,selection_mouse +24507,15249107,"train_dynamics.py",9985,0,"",python,selection_mouse +24508,15249735,"train_dynamics.py",9983,0,"",python,selection_mouse +24509,15249850,"train_dynamics.py",9982,2,"ts",python,selection_mouse +24510,15276528,"TERMINAL",0,0,"sh scripts_horeka/train_dynamics.sh ",,terminal_output +24511,15278131,"TERMINAL",0,0,"",,terminal_output +24512,15282117,"TERMINAL",0,0,"ls /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/0000/",,terminal_output +24513,15284162,"TERMINAL",0,0,"\rls /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/0000/genie_1751301068_2000/",,terminal_output +24514,15285151,"TERMINAL",0,0,"genie_1751301068_2000/\r\n[?2004l\rarray_metadatas _CHECKPOINT_METADATA d manifest.ocdbt _METADATA ocdbt.process_0 _sharding\r\n]0;tum_cte0515@hkn0531:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0531 jafar]$ ",,terminal_output +24515,15288722,"TERMINAL",0,0,"ls /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/0000/genie_1751301068_2000/",,terminal_output +24516,15292131,"TERMINAL",0,0,"[?25l_ \r[?25h",,terminal_output +24517,15292199,"TERMINAL",0,0,"",,terminal_output +24518,15293457,"TERMINAL",0,0,"[?25lC[?25h",,terminal_output +24519,15293704,"TERMINAL",0,0,"HECKPOINT_METADATA ",,terminal_output +24520,15294627,"TERMINAL",0,0,"\r\n[?2004l\r/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/0000/genie_1751301068_2000/_CHECKPOINT_METADATA\r\n]0;tum_cte0515@hkn0531:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0531 jafar]$ ",,terminal_output +24521,15296474,"TERMINAL",0,0,"ls /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/0000/genie_1751301068_2000/_CHECKPOINT_METADATA ",,terminal_output +24522,15297630,"TERMINAL",0,0,"[?25l\r[?25h",,terminal_output +24523,15297992,"TERMINAL",0,0,"s /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/0000/genie_1751301068_2000/_CH",,terminal_output +24524,15298206,"TERMINAL",0,0," /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/0000/genie_1751301068_2000/_CHE",,terminal_output +24525,15298371,"TERMINAL",0,0,"c /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/0000/genie_1751301068_2000/_CH[1@E",,terminal_output +24526,15298690,"TERMINAL",0,0,"[?25la/ /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/0000/genie_1751301068_2000/_C[1@H[?25ht /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/0000/genie_1751301068_2000/_[1@C",,terminal_output +24527,15298957,"TERMINAL",0,0,"[?25l\r\r\n[?2004l\r{""item_handlers"": ""orbax.checkpoint._src.handlers.pytree_checkpoint_handler.PyTreeCheckpointHandler"", ""metrics"": {}, ""performance_metrics"": {}, ""init_timestamp_nsecs"": 1751301372983203886, ""commit_timestamp_nsecs"": 1751301374993446173, ""custom_metadata"": {}}]0;tum_cte0515@hkn0531:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0531 jafar]$ [?25h",,terminal_output +24528,15339710,"train_dynamics.py",0,0,"",python,tab +24529,15341839,"models/dynamics.py",0,0,"",python,tab +24530,15344333,"train_dynamics.py",0,0,"",python,tab +24531,15367806,"TERMINAL",0,0,"bash",,terminal_focus +24532,15399413,"TERMINAL",0,0,"python -c ""import time; print(time.strftime('%Y-%m-%d %H:%M:%S', time.localtime(int(time.time()))))""^C",,terminal_command +24533,15399431,"TERMINAL",0,0,"^C[?2004l\r[?2004h[?2004l\r\r\n]633;E;;ead59344-49db-4336-9336-47fae706e637]633;C]0;tum_cte0515@hkn1991:/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar]633;D",,terminal_output +24534,15403016,"TERMINAL",0,0,"python",,terminal_command +24535,15403047,"TERMINAL",0,0,"]633;E;2025-06-30 18:40:47 python;ead59344-49db-4336-9336-47fae706e637]633;C",,terminal_output +24536,15403468,"TERMINAL",0,0,"Python 3.10.18 (main, Jun 4 2025, 17:36:27) [Clang 20.1.4 ] on linux\r\nType ""help"", ""copyright"", ""credits"" or ""license"" for more information.\r\n",,terminal_output +24537,15403872,"TERMINAL",0,0,">>> ",,terminal_output +24538,15405308,"TERMINAL",0,0,"i",,terminal_output +24539,15405420,"TERMINAL",0,0,"[?25lm[?25h",,terminal_output +24540,15405617,"TERMINAL",0,0,"[?25lpo[?25h",,terminal_output +24541,15405821,"TERMINAL",0,0,"[?25lr[?25h",,terminal_output +24542,15406007,"TERMINAL",0,0,"[?25lt[?25h",,terminal_output +24543,15406104,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +24544,15406215,"TERMINAL",0,0,"[?25lt[?25h",,terminal_output +24545,15406282,"TERMINAL",0,0,"[?25li[?25h",,terminal_output +24546,15406358,"TERMINAL",0,0,"[?25lm[?25h",,terminal_output +24547,15406424,"TERMINAL",0,0,"[?25le[?25h",,terminal_output +24548,15407095,"TERMINAL",0,0,"\r\n>>> ",,terminal_output +24549,15410111,"TERMINAL",0,0,"t",,terminal_output +24550,15410176,"TERMINAL",0,0,"[?25li[?25h",,terminal_output +24551,15410243,"TERMINAL",0,0,"[?25lm[?25h",,terminal_output +24552,15410386,"TERMINAL",0,0,"[?25le[?25h",,terminal_output +24553,15410542,"TERMINAL",0,0,"[?25l.[?25h",,terminal_output +24554,15410800,"TERMINAL",0,0,"[?25ll[?25h",,terminal_output +24555,15410972,"TERMINAL",0,0,"[?25lo[?25h",,terminal_output +24556,15411054,"TERMINAL",0,0,"[?25la[?25h",,terminal_output +24557,15411371,"TERMINAL",0,0,"[?25la\r>>> time.lo[?25h",,terminal_output +24558,15411482,"TERMINAL",0,0,"[?25lc[?25h",,terminal_output +24559,15411654,"TERMINAL",0,0,"[?25la[?25h",,terminal_output +24560,15411707,"TERMINAL",0,0,"[?25ll[?25h",,terminal_output +24561,15412091,"TERMINAL",0,0,"[?25lt[?25h",,terminal_output +24562,15412156,"TERMINAL",0,0,"[?25li[?25h",,terminal_output +24563,15412232,"TERMINAL",0,0,"[?25lm[?25h",,terminal_output +24564,15412284,"TERMINAL",0,0,"[?25le[?25h",,terminal_output +24565,15412931,"TERMINAL",0,0,"[?25l([?25h",,terminal_output +24566,15414821,"TERMINAL",0,0,"[?25l1751301372983203886[?25h",,terminal_output +24567,15415698,"TERMINAL",0,0,"[?25l)[?25h",,terminal_output +24568,15417371,"TERMINAL",0,0,"\r\nTraceback (most recent call last):\r\n File """", line 1, in \r\nOSError: [Errno 75] Value too large for defined data type\r\n>>> ",,terminal_output +24569,15420506,"TERMINAL",0,0,"srun",,terminal_focus +24570,15426515,"TERMINAL",0,0,"python",,terminal_focus +24571,15427170,"TERMINAL",0,0,"\r>>> time.localtime(1751301372983203886)",,terminal_output +24572,15427938,"TERMINAL",0,0,"\r)",,terminal_output +24573,15428755,"TERMINAL",0,0,"\r1)\r7)\r5)\r1)\r3)\r0)\r1)\r0)\r6)\r8)",,terminal_output +24574,15429058,"TERMINAL",0,0,"\r\ntime.struct_time(tm_year=2025, tm_mon=6, tm_mday=30, tm_hour=18, tm_min=31, tm_sec=8, tm_wday=0, tm_yday=181, tm_isdst=1)\r\n>>> ",,terminal_output +24575,15445704,"TERMINAL",0,0,"srun",,terminal_focus +24576,15451774,"TERMINAL",0,0,"python",,terminal_focus +24577,15452424,"TERMINAL",0,0,"\r>>> time.localtime(1751301068)",,terminal_output +24578,15453149,"TERMINAL",0,0,"[?25l8\r)[?25h",,terminal_output +24579,15453740,"TERMINAL",0,0,"[?25l06\r)[?25h[?25l1\r)[?25h[?25l1\r)[?25h[?25l0\r)[?25h\r)",,terminal_output +24580,15453849,"TERMINAL",0,0,"[?25l1\r)[?25h",,terminal_output +24581,15454057,"TERMINAL",0,0,"[?25l5\r)[?25h",,terminal_output +24582,15454728,"TERMINAL",0,0,"[?25l7\r)[?25h",,terminal_output +24583,15455196,"TERMINAL",0,0,"[?25l1\r)[?25h",,terminal_output +24584,15458140,"TERMINAL",0,0,"\r1)\r7)\r5)\r1)\r3)\r0)\r0)\r6)\r3)\r4)",,terminal_output +24585,15458457,"TERMINAL",0,0,"\r\ntime.struct_time(tm_year=2025, tm_mon=6, tm_mday=30, tm_hour=18, tm_min=23, tm_sec=54, tm_wday=0, tm_yday=181, tm_isdst=1)\r\n>>> ",,terminal_output +24586,15464827,"TERMINAL",0,0,"srun",,terminal_focus +24587,15466806,"TERMINAL",0,0,"python",,terminal_focus +24588,15467332,"TERMINAL",0,0,"\r>>> time.localtime(1751300634)",,terminal_output +24589,15468624,"TERMINAL",0,0,"[?25l4\r)[?25h",,terminal_output +24590,15469154,"TERMINAL",0,0,"[?25l63\r)[?25h[?25l6\r)[?25h[?25l0\r)[?25h[?25l0\r)[?25h\r)",,terminal_output +24591,15469263,"TERMINAL",0,0,"[?25l1\r)[?25h",,terminal_output +24592,15469397,"TERMINAL",0,0,"[?25l5\r)[?25h",,terminal_output +24593,15469525,"TERMINAL",0,0,"[?25l7\r)[?25h",,terminal_output +24594,15469870,"TERMINAL",0,0,"[?25l1\r)[?25h",,terminal_output +24595,15470270,"TERMINAL",0,0,"\r1)\r7)\r5)\r1)\r2)\r9)\r9)\r4)\r9)\r9)",,terminal_output +24596,15470541,"TERMINAL",0,0,"\r\ntime.struct_time(tm_year=2025, tm_mon=6, tm_mday=30, tm_hour=18, tm_min=4, tm_sec=59, tm_wday=0, tm_yday=181, tm_isdst=1)\r\n>>> ",,terminal_output +24597,15475719,"TERMINAL",0,0,"srun",,terminal_focus +24598,15477724,"scripts_horeka/overfit_sample_tiny/sample.sh",0,0,"",shellscript,tab +24599,15479245,"scripts_horeka/overfit_sample_tiny/sample.sh",837,0,"",shellscript,selection_mouse +24600,15482926,"scripts_horeka/overfit_sample_tiny/sample.sh",416,0,"",shellscript,selection_mouse +24601,15482927,"scripts_horeka/overfit_sample_tiny/sample.sh",415,0,"",shellscript,selection_command +24602,15486223,"scripts_horeka/overfit_sample_tiny/sample.sh",839,0,"",shellscript,selection_mouse +24603,15486225,"scripts_horeka/overfit_sample_tiny/sample.sh",838,0,"",shellscript,selection_command +24604,15486842,"scripts_horeka/overfit_sample_tiny/sample.sh",840,0,"",shellscript,selection_mouse +24605,15490836,"scripts_horeka/overfit_sample_tiny/sample.sh",840,0,"C",shellscript,content +24606,15490837,"scripts_horeka/overfit_sample_tiny/sample.sh",841,0,"",shellscript,selection_keyboard +24607,15491211,"scripts_horeka/overfit_sample_tiny/sample.sh",841,0,"H",shellscript,content +24608,15491212,"scripts_horeka/overfit_sample_tiny/sample.sh",842,0,"",shellscript,selection_keyboard +24609,15492003,"scripts_horeka/overfit_sample_tiny/sample.sh",840,2,"CHECKPOINT_PATH",shellscript,content +24610,15492434,"scripts_horeka/overfit_sample_tiny/sample.sh",855,0,"=",shellscript,content +24611,15492435,"scripts_horeka/overfit_sample_tiny/sample.sh",856,0,"",shellscript,selection_keyboard +24612,15493717,"scripts_horeka/overfit_sample_tiny/sample.sh",856,0,"genie_1751301068_2000/",shellscript,content +24613,15494692,"scripts_horeka/overfit_sample_tiny/sample.sh",856,0,"",shellscript,selection_mouse +24614,15496894,"TERMINAL",0,0,"cat /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/0000/genie_1751301068_2000/_CHECKPOINT_METADATA ",,terminal_output +24615,15497067,"TERMINAL",0,0,"ls /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/0000/genie_1751301068_2000/_C",,terminal_output +24616,15504258,"scripts_horeka/overfit_sample_tiny/sample.sh",0,0,"",shellscript,tab +24617,15505530,"scripts_horeka/overfit_sample_tiny/sample.sh",856,1,"g",shellscript,selection_mouse +24618,15505591,"scripts_horeka/overfit_sample_tiny/sample.sh",856,3,"gen",shellscript,selection_mouse +24619,15505592,"scripts_horeka/overfit_sample_tiny/sample.sh",856,5,"genie",shellscript,selection_mouse +24620,15505600,"scripts_horeka/overfit_sample_tiny/sample.sh",856,9,"genie_175",shellscript,selection_mouse +24621,15505612,"scripts_horeka/overfit_sample_tiny/sample.sh",856,10,"genie_1751",shellscript,selection_mouse +24622,15505637,"scripts_horeka/overfit_sample_tiny/sample.sh",856,12,"genie_175130",shellscript,selection_mouse +24623,15505697,"scripts_horeka/overfit_sample_tiny/sample.sh",856,14,"genie_17513010",shellscript,selection_mouse +24624,15505698,"scripts_horeka/overfit_sample_tiny/sample.sh",856,15,"genie_175130106",shellscript,selection_mouse +24625,15505698,"scripts_horeka/overfit_sample_tiny/sample.sh",856,16,"genie_1751301068",shellscript,selection_mouse +24626,15505711,"scripts_horeka/overfit_sample_tiny/sample.sh",856,17,"genie_1751301068_",shellscript,selection_mouse +24627,15505728,"scripts_horeka/overfit_sample_tiny/sample.sh",856,18,"genie_1751301068_2",shellscript,selection_mouse +24628,15505784,"scripts_horeka/overfit_sample_tiny/sample.sh",856,19,"genie_1751301068_20",shellscript,selection_mouse +24629,15505785,"scripts_horeka/overfit_sample_tiny/sample.sh",856,20,"genie_1751301068_200",shellscript,selection_mouse +24630,15505794,"scripts_horeka/overfit_sample_tiny/sample.sh",856,21,"genie_1751301068_2000",shellscript,selection_mouse +24631,15505857,"scripts_horeka/overfit_sample_tiny/sample.sh",856,22,"genie_1751301068_2000/",shellscript,selection_mouse +24632,15506424,"scripts_horeka/overfit_sample_tiny/sample.sh",856,22,"",shellscript,content +24633,15506745,"scripts_horeka/overfit_sample_tiny/sample.sh",856,0,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/0000/genie_1751301068_2000/",shellscript,content +24634,15513698,"scripts_horeka/overfit_sample_tiny/sample.sh",1495,0,"",shellscript,selection_mouse +24635,15514127,"scripts_horeka/overfit_sample_tiny/sample.sh",1494,0,"",shellscript,selection_command +24636,15515570,"scripts_horeka/overfit_sample_tiny/sample.sh",1476,20,"",shellscript,content +24637,15515694,"scripts_horeka/overfit_sample_tiny/sample.sh",1452,0,"",shellscript,selection_command +24638,15520235,"scripts_horeka/overfit_sample_tiny/sample.sh",1452,24,"",shellscript,content +24639,15523639,"TERMINAL",0,0,"^C[?2004l\r[?2004h[?2004l\r\r\n]0;tum_cte0515@hkn0531:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0531 jafar]$ ",,terminal_output +24640,15523910,"TERMINAL",0,0,"s",,terminal_output +24641,15523985,"TERMINAL",0,0,"[?25lh[?25h",,terminal_output +24642,15524104,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +24643,15524257,"TERMINAL",0,0,"[?25ls[?25h",,terminal_output +24644,15524323,"TERMINAL",0,0,"[?25lc[?25h",,terminal_output +24645,15524527,"TERMINAL",0,0,"ripts_",,terminal_output +24646,15525607,"TERMINAL",0,0,"[?25lh[?25h",,terminal_output +24647,15525716,"TERMINAL",0,0,"oreka/",,terminal_output +24648,15528409,"TERMINAL",0,0,"[?25ls[?25h",,terminal_output +24649,15528533,"TERMINAL",0,0,"ync_runner.sh ",,terminal_output +24650,15531077,"TERMINAL",0,0,"[?25lo[?25h",,terminal_output +24651,15531188,"TERMINAL",0,0,"verfit_",,terminal_output +24652,15531763,"TERMINAL",0,0,"[?25ls[?25h",,terminal_output +24653,15531888,"TERMINAL",0,0,"ample",,terminal_output +24654,15533462,"TERMINAL",0,0,"[?25l_[?25h",,terminal_output +24655,15533824,"TERMINAL",0,0,"[?25lt[?25h",,terminal_output +24656,15533977,"TERMINAL",0,0,"iny/",,terminal_output +24657,15534503,"TERMINAL",0,0,"[?25ls[?25h",,terminal_output +24658,15535354,"TERMINAL",0,0,"ample.sh ",,terminal_output +24659,15536114,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +24660,15536239,"TERMINAL",0,0,"Sampling from checkpoint: /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/0000/genie_1751301068_2000/\r\n",,terminal_output +24661,15540203,"TERMINAL",0,0,"2025-06-30 18:43:04.629683: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +24662,15544160,"TERMINAL",0,0,"2025-06-30 18:43:08.584485: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +24663,15545455,"TERMINAL",0,0,"python",,terminal_focus +24664,15546539,"TERMINAL",0,0,"^D\r\n]0;tum_cte0515@hkn1991:/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar]633;D;0",,terminal_output +24665,15551378,"TERMINAL",0,0,"srun",,terminal_focus +24666,15552661,"TERMINAL",0,0,"2025-06-30 18:43:17.041457: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +24667,15559786,"TERMINAL",0,0,"2025-06-30 18:43:24.207921: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +24668,15566076,"TERMINAL",0,0,"2025-06-30 18:43:30.394914: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +24669,15571272,"TERMINAL",0,0,"2025-06-30 18:43:35.699100: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +24670,15575088,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/serialization/type_handlers.py:1251: UserWarning: Sharding info not provided when restoring. Populating sharding info from sharding file. Please note restoration time will be slightly increased due to reading from file. Note also that this option is unsafe when restoring on a different topology than the checkpoint was saved with.\r\n warnings.warn(\r\n",,terminal_output +24671,15584917,"TERMINAL",0,0,"jax.errors.SimplifiedTraceback: For simplicity, JAX has removed its internal frames from the traceback of the following exception. Set JAX_TRACEBACK_FILTERING=off to include these.\r\n\r\nThe above exception was the direct cause of the following exception:\r\n\r\nTraceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py"", line 116, in \r\n action_batch = genie.apply(params, batch, False, method=Genie.vq_encode)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/genie.py"", line 148, in vq_encode\r\n lam_output = self.lam.vq_encode(batch[""videos""], training=training)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/models/lam.py"", line 81, in vq_encode\r\n z_q, z, emb, indices = self.vq(z, training)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/utils/nn.py"", line 107, in setup\r\n self.param(\r\nflax.errors.ScopeParamShapeError: Initializer expected to generate shape (1, 32) but got shape (6, 32) instead for parameter ""codebook"" in ""/lam/vq"". (https://flax.readthedocs.io/en/latest/api_reference/flax.errors.html#flax.errors.ScopeParamShapeError)\r\n",,terminal_output +24672,15586870,"TERMINAL",0,0,"]0;tum_cte0515@hkn0531:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0531 jafar]$ ",,terminal_output +24673,15625409,"scripts_horeka/overfit_sample_tiny/sample.sh",0,0,"",shellscript,tab +24674,15626612,"scripts_horeka/overfit_sample_tiny/sample.sh",1426,0,"",shellscript,selection_mouse +24675,15626614,"scripts_horeka/overfit_sample_tiny/sample.sh",1425,0,"",shellscript,selection_command +24676,15627378,"scripts_horeka/overfit_sample_tiny/sample.sh",1451,0,"",shellscript,selection_mouse +24677,15627387,"scripts_horeka/overfit_sample_tiny/sample.sh",1450,0,"",shellscript,selection_command +24678,15628266,"scripts_horeka/overfit_sample_tiny/sample.sh",1451,0,"\n ",shellscript,content +24679,15628632,"scripts_horeka/overfit_sample_tiny/sample.sh",1456,0,"-",shellscript,content +24680,15628633,"scripts_horeka/overfit_sample_tiny/sample.sh",1457,0,"",shellscript,selection_keyboard +24681,15628790,"scripts_horeka/overfit_sample_tiny/sample.sh",1457,0,"-",shellscript,content +24682,15628791,"scripts_horeka/overfit_sample_tiny/sample.sh",1458,0,"",shellscript,selection_keyboard +24683,15632716,"sample.py",0,0,"",python,tab +24684,15634766,"sample.py",962,0,"",python,selection_mouse +24685,15634907,"sample.py",953,18,"num_latent_actions",python,selection_mouse +24686,15638005,"scripts_horeka/overfit_sample_tiny/sample.sh",0,0,"",shellscript,tab +24687,15638995,"scripts_horeka/overfit_sample_tiny/sample.sh",1458,0,"num_latent_actions",shellscript,content +24688,15639933,"scripts_horeka/overfit_sample_tiny/sample.sh",1476,0,"=",shellscript,content +24689,15639934,"scripts_horeka/overfit_sample_tiny/sample.sh",1477,0,"",shellscript,selection_keyboard +24690,15640274,"scripts_horeka/overfit_sample_tiny/sample.sh",1477,0,"6",shellscript,content +24691,15640274,"scripts_horeka/overfit_sample_tiny/sample.sh",1478,0,"",shellscript,selection_keyboard +24692,15644012,"TERMINAL",0,0,"sh scripts_horeka/overfit_sample_tiny/sample.sh ",,terminal_output +24693,15644251,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +24694,15644363,"TERMINAL",0,0,"Sampling from checkpoint: /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/0000/genie_1751301068_2000/\r\n",,terminal_output +24695,15647161,"TERMINAL",0,0,"2025-06-30 18:44:51.586321: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +24696,15651138,"TERMINAL",0,0,"2025-06-30 18:44:55.563521: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +24697,15659601,"TERMINAL",0,0,"2025-06-30 18:45:04.028333: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +24698,15667152,"TERMINAL",0,0,"2025-06-30 18:45:11.520213: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +24699,15673767,"TERMINAL",0,0,"2025-06-30 18:45:18.193905: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +24700,15679385,"TERMINAL",0,0,"2025-06-30 18:45:23.812186: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +24701,15683259,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/serialization/type_handlers.py:1251: UserWarning: Sharding info not provided when restoring. Populating sharding info from sharding file. Please note restoration time will be slightly increased due to reading from file. Note also that this option is unsafe when restoring on a different topology than the checkpoint was saved with.\r\n warnings.warn(\r\n",,terminal_output +24702,15693761,"TERMINAL",0,0,"jax.errors.SimplifiedTraceback: For simplicity, JAX has removed its internal frames from the traceback of the following exception. Set JAX_TRACEBACK_FILTERING=off to include these.\r\n\r\nThe above exception was the direct cause of the following exception:\r\n\r\nTraceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py"", line 116, in \r\n action_batch = genie.apply(params, batch, False, method=Genie.vq_encode)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/genie.py"", line 148, in vq_encode\r\n lam_output = self.lam.vq_encode(batch[""videos""], training=training)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/models/lam.py"", line 81, in vq_encode\r\n z_q, z, emb, indices = self.vq(z, training)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/utils/nn.py"", line 107, in setup\r\n self.param(\r\nflax.errors.ScopeParamShapeError: Initializer expected to generate shape (1, 32) but got shape (6, 32) instead for parameter ""codebook"" in ""/lam/vq"". (https://flax.readthedocs.io/en/latest/api_reference/flax.errors.html#flax.errors.ScopeParamShapeError)\r\n",,terminal_output +24703,15695008,"TERMINAL",0,0,"]0;tum_cte0515@hkn0531:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0531 jafar]$ ",,terminal_output +24704,15734145,"train_dynamics.py",0,0,"",python,tab +24705,15737159,"scripts_horeka/overfit_sample_tiny/sample.sh",0,0,"",shellscript,tab +24706,15747640,"scripts_horeka/overfit_sample_tiny/sample.sh",1046,0,"",shellscript,selection_mouse +24707,15747772,"scripts_horeka/overfit_sample_tiny/sample.sh",1041,15,"CHECKPOINT_PATH",shellscript,selection_mouse +24708,15748387,"scripts_horeka/overfit_sample_tiny/sample.sh",988,0,"",shellscript,selection_mouse +24709,15748514,"scripts_horeka/overfit_sample_tiny/sample.sh",985,15,"CHECKPOINT_PATH",shellscript,selection_mouse +24710,15756396,"scripts_horeka/overfit_sample_tiny/sample.sh",951,0,"",shellscript,selection_mouse +24711,15756589,"scripts_horeka/overfit_sample_tiny/sample.sh",950,1,"/",shellscript,selection_mouse +24712,15756603,"scripts_horeka/overfit_sample_tiny/sample.sh",948,3,"00/",shellscript,selection_mouse +24713,15756617,"scripts_horeka/overfit_sample_tiny/sample.sh",944,7,"8_2000/",shellscript,selection_mouse +24714,15756677,"scripts_horeka/overfit_sample_tiny/sample.sh",938,13,"1301068_2000/",shellscript,selection_mouse +24715,15756678,"scripts_horeka/overfit_sample_tiny/sample.sh",839,112,"\nCHECKPOINT_PATH=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/0000/genie_1751301068_2000/",shellscript,selection_mouse +24716,15756684,"scripts_horeka/overfit_sample_tiny/sample.sh",782,169,"\nCHECKPOINT_PATH=$CHECKPOINT_DIR/genie_1751067601_200000/\nCHECKPOINT_PATH=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/0000/genie_1751301068_2000/",shellscript,selection_mouse +24717,15757758,"scripts_horeka/overfit_sample_tiny/sample.sh",839,0,"",shellscript,selection_mouse +24718,15758374,"scripts_horeka/overfit_sample_tiny/sample.sh",1001,0,"",shellscript,selection_mouse +24719,15759069,"scripts_horeka/overfit_sample_tiny/sample.sh",909,0,"",shellscript,selection_mouse +24720,15759226,"scripts_horeka/overfit_sample_tiny/sample.sh",897,14,"jafa_ws_shared",shellscript,selection_mouse +24721,15759341,"scripts_horeka/overfit_sample_tiny/sample.sh",840,112,"CHECKPOINT_PATH=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/0000/genie_1751301068_2000/\n",shellscript,selection_mouse +24722,15759916,"scripts_horeka/overfit_sample_tiny/sample.sh",909,0,"",shellscript,selection_mouse +24723,15759917,"scripts_horeka/overfit_sample_tiny/sample.sh",897,14,"jafa_ws_shared",shellscript,selection_mouse +24724,15760066,"scripts_horeka/overfit_sample_tiny/sample.sh",840,112,"CHECKPOINT_PATH=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/0000/genie_1751301068_2000/\n",shellscript,selection_mouse +24725,15761377,"scripts_horeka/train_dynamics.sh",0,0,"",shellscript,tab +24726,15769125,"scripts_horeka/train_dynamics.sh",0,0,"",shellscript,tab +24727,15769875,"scripts_horeka/train_lam.sh",0,0,"",shellscript,tab +24728,15785653,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/utils/nn.py",0,0,"import math\nfrom typing import Dict, Tuple\n\nfrom flax import linen as nn\nimport jax\nimport jax.numpy as jnp\n\n\nclass PositionalEncoding(nn.Module):\n """"""https://uvadlc-notebooks.readthedocs.io/en/latest/tutorial_notebooks/JAX/tutorial6/Transformers_and_MHAttention.html""""""\n\n d_model: int # Hidden dimensionality of the input.\n max_len: int = 5000 # Maximum length of a sequence to expect.\n\n def setup(self):\n # Create matrix of [SeqLen, HiddenDim] representing the positional encoding for max_len inputs\n self.pe = jnp.zeros((self.max_len, self.d_model))\n position = jnp.arange(0, self.max_len, dtype=jnp.float32)[:, None]\n div_term = jnp.exp(\n jnp.arange(0, self.d_model, 2) * (-math.log(10000.0) / self.d_model)\n )\n self.pe = self.pe.at[:, 0::2].set(jnp.sin(position * div_term))\n self.pe = self.pe.at[:, 1::2].set(jnp.cos(position * div_term))\n\n def __call__(self, x):\n x = x + self.pe[: x.shape[2]]\n return x\n\n\nclass STBlock(nn.Module):\n dim: int\n num_heads: int\n dropout: float\n\n @nn.remat\n @nn.compact\n def __call__(self, x: jax.Array) -> jax.Array:\n # --- Spatial attention ---\n z = PositionalEncoding(self.dim)(x)\n z = nn.LayerNorm()(z)\n z = nn.MultiHeadAttention(\n num_heads=self.num_heads,\n qkv_features=self.dim,\n dropout_rate=self.dropout,\n )(z)\n x = x + z\n\n # --- Temporal attention ---\n x = x.swapaxes(1, 2)\n z = PositionalEncoding(self.dim)(x)\n z = nn.LayerNorm()(z)\n causal_mask = jnp.tri(z.shape[-2])\n z = nn.MultiHeadAttention(\n num_heads=self.num_heads,\n qkv_features=self.dim,\n dropout_rate=self.dropout,\n )(z, mask=causal_mask)\n x = x + z\n x = x.swapaxes(1, 2)\n\n # --- Feedforward ---\n z = nn.LayerNorm()(x)\n z = nn.Dense(self.dim)(z)\n z = nn.gelu(z)\n x = x + z\n\n return x\n\n\nclass STTransformer(nn.Module):\n model_dim: int\n out_dim: int\n num_blocks: int\n num_heads: int\n dropout: float\n\n @nn.compact\n def __call__(self, x: jax.Array) -> jax.Array:\n x = nn.Sequential(\n [\n nn.LayerNorm(),\n nn.Dense(self.model_dim),\n nn.LayerNorm(),\n ]\n )(x)\n for _ in range(self.num_blocks):\n x = STBlock(\n dim=self.model_dim,\n num_heads=self.num_heads,\n dropout=self.dropout,\n )(x)\n x = nn.Dense(self.out_dim)(x)\n return x # (B, T, E)\n\n\ndef normalize(x):\n return x / (jnp.linalg.norm(x, ord=2, axis=-1, keepdims=True) + 1e-8)\n\n\nclass VectorQuantizer(nn.Module):\n latent_dim: int\n num_latents: int\n dropout: float\n\n def setup(self):\n self.codebook = normalize(\n self.param(\n ""codebook"",\n nn.initializers.lecun_uniform(),\n (self.num_latents, self.latent_dim),\n )\n )\n self.drop = nn.Dropout(self.dropout, deterministic=False)\n\n def __call__(\n self, x: jax.Array, training: bool\n ) -> Tuple[jax.Array, jax.Array, jax.Array, jax.Array]:\n # --- Compute distances ---\n x = normalize(x)\n codebook = normalize(self.codebook)\n distance = -jnp.matmul(x, codebook.T)\n if training:\n dropout_key = self.make_rng(""dropout"")\n distance = self.drop(distance, rng=dropout_key)\n\n # --- Get indices and embeddings ---\n indices = jnp.argmin(distance, axis=-1)\n z = self.codebook[indices]\n\n # --- Straight through estimator ---\n z_q = x + jax.lax.stop_gradient(z - x)\n return z_q, z, x, indices\n\n def get_codes(self, indices: jax.Array):\n return self.codebook[indices]\n",python,tab +24729,15796209,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",0,0,"from dataclasses import dataclass\nimport time\n\nimport dm_pix as pix\nimport einops\nimport jax\nimport jax.numpy as jnp\nimport numpy as np\nfrom orbax.checkpoint import PyTreeCheckpointer\nfrom PIL import Image, ImageDraw\nimport tyro\n\nfrom genie import Genie\n#from utils.dataloader import get_dataloader\n\n\n@dataclass\nclass Args:\n # Experiment\n seed: int = 0\n seq_len: int = 16\n image_channels: int = 3\n image_resolution: int = 64\n data_dir: str = ""data/coinrun_episodes""\n checkpoint: str = """"\n # Sampling\n batch_size: int = 1\n maskgit_steps: int = 25\n temperature: float = 1.0\n sample_argmax: bool = True\n start_frame: int = 0\n # Tokenizer checkpoint\n tokenizer_dim: int = 512\n latent_patch_dim: int = 32\n num_patch_latents: int = 1024\n patch_size: int = 4\n tokenizer_num_blocks: int = 8\n tokenizer_num_heads: int = 8\n # LAM checkpoint\n lam_dim: int = 512\n latent_action_dim: int = 32\n num_latent_actions: int = 6\n lam_patch_size: int = 16\n lam_num_blocks: int = 8\n lam_num_heads: int = 8\n # Dynamics checkpoint\n dyna_dim: int = 512\n dyna_num_blocks: int = 12\n dyna_num_heads: int = 8\n\n\nargs = tyro.cli(Args)\nrng = jax.random.PRNGKey(args.seed)\n\n# --- Load Genie checkpoint ---\ngenie = Genie(\n # Tokenizer\n in_dim=args.image_channels,\n tokenizer_dim=args.tokenizer_dim,\n latent_patch_dim=args.latent_patch_dim,\n num_patch_latents=args.num_patch_latents,\n patch_size=args.patch_size,\n tokenizer_num_blocks=args.tokenizer_num_blocks,\n tokenizer_num_heads=args.tokenizer_num_heads,\n # LAM\n lam_dim=args.lam_dim,\n latent_action_dim=args.latent_action_dim,\n num_latent_actions=args.num_latent_actions,\n lam_patch_size=args.lam_patch_size,\n lam_num_blocks=args.lam_num_blocks,\n lam_num_heads=args.lam_num_heads,\n # Dynamics\n dyna_dim=args.dyna_dim,\n dyna_num_blocks=args.dyna_num_blocks,\n dyna_num_heads=args.dyna_num_heads,\n)\nrng, _rng = jax.random.split(rng)\nimage_shape = (args.image_resolution, args.image_resolution, args.image_channels)\ndummy_inputs = dict(\n videos=jnp.zeros((args.batch_size, args.seq_len, *image_shape), dtype=jnp.float32),\n mask_rng=_rng,\n)\nrng, _rng = jax.random.split(rng)\nparams = genie.init(_rng, dummy_inputs)\nckpt = PyTreeCheckpointer().restore(args.checkpoint)[""model""][""params""][""params""]\nparams[""params""].update(ckpt)\n\n\n# --- Define autoregressive sampling loop ---\ndef _autoreg_sample(rng, video_batch, action_batch):\n vid = video_batch[:, : args.start_frame + 1]\n for frame_idx in range(args.start_frame + 1, args.seq_len):\n # --- Sample next frame ---\n print(""Frame"", frame_idx)\n rng, _rng = jax.random.split(rng)\n batch = dict(videos=vid, latent_actions=action_batch[:, :frame_idx], rng=_rng)\n new_frame = genie.apply(\n params,\n batch,\n args.maskgit_steps,\n args.temperature,\n args.sample_argmax,\n method=Genie.sample,\n )\n vid = jnp.concatenate([vid, new_frame], axis=1)\n return vid\n\n\n# --- Get video + latent actions ---\n# dataloader = get_dataloader(args.data_dir, args.seq_len, args.batch_size)\n# video_batch = next(iter(dataloader))\nvideo_batch = np.load(""overfit_dir/single_sample_corner.npy"")\n# Get latent actions from first video only\nfirst_video = video_batch[:1, :args.seq_len]\nbatch = dict(videos=first_video)\naction_batch = genie.apply(params, batch, False, method=Genie.vq_encode)\naction_batch = action_batch.reshape(1, args.seq_len - 1, 1)\n# Use actions from first video for all videos\naction_batch = jnp.repeat(action_batch, video_batch.shape[0], axis=0)\n\n# --- Sample + evaluate video ---\nvid = _autoreg_sample(rng, video_batch, action_batch)\ngt = video_batch[:, : vid.shape[1]].clip(0, 1).reshape(-1, *video_batch.shape[2:])\nrecon = vid.clip(0, 1).reshape(-1, *vid.shape[2:])\nssim = pix.ssim(gt[:, args.start_frame + 1 :], recon[:, args.start_frame + 1 :]).mean()\nprint(f""SSIM: {ssim}"")\n\n# --- Construct video ---\nfirst_true = (video_batch[0:1] * 255).astype(np.uint8)\nfirst_pred = (vid[0:1] * 255).astype(np.uint8)\nfirst_video_comparison = np.zeros((2, *vid.shape[1:5]), dtype=np.uint8)\nfirst_video_comparison[0] = first_true[:, : vid.shape[1]]\nfirst_video_comparison[1] = first_pred\n# For other videos, only show generated video\nother_preds = (vid[1:] * 255).astype(np.uint8)\nall_frames = np.concatenate([first_video_comparison, other_preds], axis=0)\nflat_vid = einops.rearrange(all_frames, ""n t h w c -> t h (n w) c"")\n\n# --- Save video ---\nimgs = [Image.fromarray(img) for img in flat_vid]\n# Write actions on each frame\nfor img, action in zip(imgs[1:], action_batch[0, :, 0]):\n d = ImageDraw.Draw(img)\n d.text((2, 2), f""{action}"", fill=255)\nimgs[0].save(\n f""generation_{time.time()}.gif"",\n save_all=True,\n append_images=imgs[1:],\n duration=250,\n loop=0,\n)\n",python,tab +24730,15798659,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",3492,0,"",python,selection_mouse +24731,15798834,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",3490,9,"vq_encode",python,selection_mouse +24732,15799384,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",3484,0,"",python,selection_mouse +24733,15799488,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",3484,5,"Genie",python,selection_mouse +24734,15800190,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",3461,0,"",python,selection_mouse +24735,15800326,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",3455,6,"params",python,selection_mouse +24736,15800949,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",3458,0,"",python,selection_mouse +24737,15800949,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",3455,6,"params",python,selection_mouse +24738,15802970,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",2860,0,"",python,selection_mouse +24739,15803111,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",2856,6,"params",python,selection_mouse +24740,15806018,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",2397,0,"",python,selection_mouse +24741,15806019,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",2396,0,"",python,selection_command +24742,15806173,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",2397,0,"",python,selection_mouse +24743,15806188,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",2396,0,"",python,selection_command +24744,15806722,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",2397,0,"",python,selection_mouse +24745,15806723,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",2396,0,"",python,selection_command +24746,15812830,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",3273,0,"",python,selection_mouse +24747,15812982,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",3268,11,"overfit_dir",python,selection_mouse +24748,15813213,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",3268,12,"overfit_dir/",python,selection_mouse +24749,15813227,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",3268,32,"overfit_dir/single_sample_corner",python,selection_mouse +24750,15813802,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",3293,0,"",python,selection_mouse +24751,15813803,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",3280,20,"single_sample_corner",python,selection_mouse +24752,15814082,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",3279,21,"/single_sample_corner",python,selection_mouse +24753,15814102,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",3268,32,"overfit_dir/single_sample_corner",python,selection_mouse +24754,15814476,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",3277,0,"",python,selection_mouse +24755,15814477,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",3268,11,"overfit_dir",python,selection_mouse +24756,15814685,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",3268,32,"overfit_dir/single_sample_corner",python,selection_mouse +24757,15815136,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",3292,0,"",python,selection_mouse +24758,15815567,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",3305,0,"",python,selection_mouse +24759,15816091,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",3349,0,"",python,selection_mouse +24760,15816108,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",3348,0,"",python,selection_command +24761,15816252,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",3345,4,"only",python,selection_mouse +24762,15816252,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",3346,3,"nly",python,selection_command +24763,15816432,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",3300,46,".npy"")\n# Get latent actions from first video o",python,selection_mouse +24764,15816452,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",3280,66,"single_sample_corner.npy"")\n# Get latent actions from first video o",python,selection_mouse +24765,15817105,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",3303,0,"",python,selection_mouse +24766,15817105,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",3301,3,"npy",python,selection_mouse +24767,15817262,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",3301,48,"npy"")\n# Get latent actions from first video only",python,selection_mouse +24768,15817287,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",3280,24,"single_sample_corner.npy",python,selection_mouse +24769,15817462,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",3301,43,"npy"")\n# Get latent actions from first video",python,selection_mouse +24770,15817530,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",3301,38,"npy"")\n# Get latent actions from first ",python,selection_mouse +24771,15817551,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",3268,36,"overfit_dir/single_sample_corner.npy",python,selection_mouse +24772,15818037,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",3271,0,"",python,selection_mouse +24773,15823088,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",3460,0,"",python,selection_mouse +24774,15823592,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",3465,0,"",python,selection_mouse +24775,15824231,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",3531,0,"",python,selection_mouse +24776,15825042,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",3466,0,"",python,selection_mouse +24777,15825975,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",3460,0,"",python,selection_mouse +24778,15828801,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",3427,0,"",python,selection_mouse +24779,15828804,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",3426,0,"",python,selection_command +24780,15829974,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",3459,0,"",python,selection_mouse +24781,15830146,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",3455,6,"params",python,selection_mouse +24782,15830716,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",3427,0,"",python,selection_mouse +24783,15830720,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",3426,0,"",python,selection_command +24784,15830862,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",3427,0,"",python,selection_mouse +24785,15830876,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",3426,0,"",python,selection_command +24786,15832272,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",3314,0,"",python,selection_mouse +24787,15832437,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",3313,6,"latent",python,selection_mouse +24788,15832946,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",3357,0,"",python,selection_mouse +24789,15833101,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",3350,11,"first_video",python,selection_mouse +24790,15833662,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",3370,0,"",python,selection_mouse +24791,15833832,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",3364,11,"video_batch",python,selection_mouse +24792,15834086,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",3364,13,"video_batch[:",python,selection_mouse +24793,15834103,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",3364,16,"video_batch[:1, ",python,selection_mouse +24794,15834118,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",3364,21,"video_batch[:1, :args",python,selection_mouse +24795,15834181,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",3364,22,"video_batch[:1, :args.",python,selection_mouse +24796,15834486,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",3386,0,"",python,selection_mouse +24797,15834486,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",3386,7,"seq_len",python,selection_mouse +24798,15835154,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",3368,0,"",python,selection_mouse +24799,15835300,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",3364,11,"video_batch",python,selection_mouse +24800,15840423,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",3589,0,"",python,selection_mouse +24801,15841031,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",3369,0,"",python,selection_mouse +24802,15841195,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",3364,11,"video_batch",python,selection_mouse +24803,15841924,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",3251,0,"",python,selection_mouse +24804,15842561,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",3360,0,"",python,selection_mouse +24805,15842714,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",3350,11,"first_video",python,selection_mouse +24806,15843617,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",3394,0,"",python,selection_mouse +24807,15843619,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",3393,0,"",python,selection_command +24808,15845648,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",3394,0,"\n",python,content +24809,15848664,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",3395,0,"j",python,content +24810,15848665,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",3396,0,"",python,selection_keyboard +24811,15849145,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",3395,1,"",python,content +24812,15849564,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",3395,0,"b",python,content +24813,15849565,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",3396,0,"",python,selection_keyboard +24814,15849814,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",3396,0,"r",python,content +24815,15849816,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",3397,0,"",python,selection_keyboard +24816,15850938,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",3395,2,"breakpoint",python,content +24817,15851840,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",3405,0,"()",python,content +24818,15851841,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",3406,0,"",python,selection_keyboard +24819,15851914,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",3406,1,")",python,content +24820,15851914,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",3407,0,"",python,selection_keyboard +24821,15853305,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",3395,0,"j",python,content +24822,15853306,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",3396,0,"",python,selection_keyboard +24823,15853383,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",3396,0,"a",python,content +24824,15853384,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",3397,0,"",python,selection_keyboard +24825,15853585,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",3397,0,"x",python,content +24826,15853587,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",3398,0,"",python,selection_keyboard +24827,15853683,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",3398,0,".",python,content +24828,15853685,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",3399,0,"",python,selection_keyboard +24829,15853908,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",3399,0,"d",python,content +24830,15853910,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",3400,0,"",python,selection_keyboard +24831,15854088,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",3400,0,"e",python,content +24832,15854090,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",3401,0,"",python,selection_keyboard +24833,15854142,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",3401,0,"b",python,content +24834,15854143,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",3402,0,"",python,selection_keyboard +24835,15854370,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",3402,0,"u",python,content +24836,15854371,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",3403,0,"",python,selection_keyboard +24837,15854549,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",3403,0,"g",python,content +24838,15854550,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",3404,0,"",python,selection_keyboard +24839,15854801,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",3404,0,".",python,content +24840,15854802,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",3405,0,"",python,selection_keyboard +24841,15857351,"TERMINAL",0,0,"sh scripts_horeka/overfit_sample_tiny/sample.sh ",,terminal_output +24842,15857574,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +24843,15857685,"TERMINAL",0,0,"Sampling from checkpoint: /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/0000/genie_1751301068_2000/\r\n",,terminal_output +24844,15858941,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",0,0,"",python,tab +24845,15860575,"TERMINAL",0,0,"2025-06-30 18:48:24.999059: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +24846,15864483,"TERMINAL",0,0,"2025-06-30 18:48:28.897881: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +24847,15868909,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",963,0,"",python,selection_mouse +24848,15869050,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",953,18,"num_latent_actions",python,selection_mouse +24849,15872970,"TERMINAL",0,0,"2025-06-30 18:48:37.326772: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +24850,15873105,"scripts_horeka/overfit_sample_tiny/sample.sh",0,0,"",shellscript,tab +24851,15876712,"scripts_horeka/overfit_sample_tiny/sample.sh",1478,0,"",shellscript,selection_mouse +24852,15877090,"scripts_horeka/overfit_sample_tiny/sample.sh",1477,1,"",shellscript,content +24853,15877417,"scripts_horeka/overfit_sample_tiny/sample.sh",1477,0,"1",shellscript,content +24854,15877418,"scripts_horeka/overfit_sample_tiny/sample.sh",1478,0,"",shellscript,selection_keyboard +24855,15878994,"TERMINAL",0,0,"^C",,terminal_output +24856,15879265,"TERMINAL",0,0,"Traceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py"", line 84, in \r\n params = genie.init(_rng, dummy_inputs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/genie.py"", line 77, in __call__\r\n lam_outputs = self.lam.vq_encode(batch[""videos""], training=False)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/models/lam.py"", line 75, in vq_encode\r\n z = self.encoder(padded_patches) # (B, T, N, E)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/utils/nn.py"", line 87, in __call__\r\n x = STBlock(\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/utils/nn.py"", line 53, in __call__\r\n z = nn.MultiHeadAttention(\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/attention.py"", line 676, in __call__\r\n out = DenseGeneral(\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/linear.py"", line 199, in __call__\r\n out = dot_general(\r\njax._src.source_info_util.JaxStackTraceBeforeTransformation: KeyboardInterrupt\r\n\r\nThe preceding stack trace is the source of the JAX operation that, once transformed by JAX, triggered the following exception.\r\n\r\n--------------------\r\n\r\nThe above exception was the direct cause of the following exception:\r\n\r\nTraceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py"", line 84, in \r\n params = genie.init(_rng, dummy_inputs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/traceback_util.py"", line 182, in reraise_with_filtered_traceback\r\n return fun(*args, **kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/module.py"", line 2452, in init\r\n _, v_out = self.init_with_output(\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/traceback_util.py"", line 182, in reraise_with_filtered_traceback\r\n return fun(*args, **kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/module.py"", line 2304, in init_with_output\r\n return init_with_output(\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/core/scope.py"", line 1115, in wrapper\r\n return apply(fn, mutable=mutable, flags=init_flags)(\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/core/scope.py"", line 1079, in wrapper\r\n y = fn(root, *args, **kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/module.py"", line 3093, in scope_fn\r\n return fn(module.clone(parent=scope, _deep_clone=True), *args, **kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/module.py"", line 699, in wrapped_module_method\r\n return self._call_wrapped_method(fun, args, kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/module.py"", line 1216, in _call_wrapped_method\r\n y = run_fun(self, *args, **kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/genie.py"", line 77, in __call__\r\n lam_outputs = self.lam.vq_encode(batch[""videos""], training=False)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/module.py"", line 699, in wrapped_module_method\r\n return self._call_wrapped_method(fun, args, kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/module.py"", line 1216, in _call_wrapped_method\r\n y = run_fun(self, *args, **kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/models/lam.py"", line 75, in vq_encode\r\n z = self.encoder(padded_patches) # (B, T, N, E)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/module.py"", line 699, in wrapped_module_method\r\n return self._call_wrapped_method(fun, args, kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/module.py"", line 1216, in _call_wrapped_method\r\n y = run_fun(self, *args, **kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/utils/nn.py"", line 87, in __call__\r\n x = STBlock(\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/transforms.py"", line 433, in wrapped_fn\r\n return trafo_fn(module_scopes, *args, **kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/core/lift.py"", line 319, in wrapper\r\n^C File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/core/lift.py"", line 1474, in inner\r\n return rematted(variable_groups, rng_groups, *args, **kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/traceback_util.py"", line 182, in reraise_with_filtered_traceback\r\n return fun(*args, **kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/ad_checkpoint.py"", line 333, in fun_remat\r\n out_flat = remat_p.bind(\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/core.py"", line 531, in bind\r\n return self._true_bind(*args, **params)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/core.py"", line 551, in _true_bind\r\n return self.bind_with_trace(prev_trace, args, params)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/core.py"", line 556, in bind_with_trace\r\n return trace.process_primitive(self, args, params)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/core.py"", line 1060, in process_primitive\r\n return primitive.impl(*args, **params)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/ad_checkpoint.py"", line 514, in remat_impl\r\n return core.eval_jaxpr(jaxpr, (), *args)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/core.py"", line 624, in eval_jaxpr\r\n ans = eqn.primitive.bind(*subfuns, *map(read, eqn.invars), **bind_params)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/core.py"", line 531, in bind\r\n return self._true_bind(*args, **params)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/core.py"", line 551, in _true_bind\r\n return self.bind_with_trace(prev_trace, args, params)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/core.py"", line 556, in bind_with_trace\r\n return trace.process_primitive(self, args, params)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/core.py"", line 1060, in process_primitive\r\n return primitive.impl(*args, **params)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/dispatch.py"", line 88, in apply_primitive\r\n outs = fun(*args)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/traceback_util.py"", line 182, in reraise_with_filtered_traceback\r\n return fun(*args, **kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/pjit.py"", line 334, in cache_miss\r\n executable, pgle_profiler) = _python_pjit_helper(fun, jit_info, *args, **kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/pjit.py"", line 195, in _python_pjit_helper\r\n out_flat, compiled, profiler = _pjit_call_impl_python(*args_flat, **p.params)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/pjit.py"", line 1862, in _pjit_call_impl_python\r\n ).compile()\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/interpreters/pxla.py"", line 2467, in compile\r\n executable = UnloadedMeshExecutable.from_hlo(\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/interpreters/pxla.py"", line 3009, in from_hlo\r\n xla_executable = _cached_compilation(\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/interpreters/pxla.py"", line 2800, in _cached_compilation\r\n xla_executable = compiler.compile_or_get_cached(\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/compiler.py"", line 447, in compile_or_get_cached\r\n return _compile_and_write_cache(\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/compiler.py"", line 719, in _compile_and_write_cache\r\n executable = backend_compile(\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/profiler.py"", line 354, in wrapper\r\n return func(*args, **kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/compiler.py"", line 335, in backend_compile\r\n return backend.compile(\r\nKeyboardInterrupt\r\n",,terminal_output +24857,15879403,"TERMINAL",0,0,"^C",,terminal_output +24858,15879489,"TERMINAL",0,0,"Exception ignored in: .remove at 0x1520d6547370>\r\nTraceback (most recent call last):\r\n File ""/home/hk-project-p0023960/tum_cte0515/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/weakref.py"", line 370, in remove\r\n def remove(k, selfref=ref(self)):\r\nKeyboardInterrupt: \r\n",,terminal_output +24859,15880032,"TERMINAL",0,0,"\r\n]0;tum_cte0515@hkn0531:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0531 jafar]$ ^C[?2004l\r[?2004h[?2004l\r\r\n]0;tum_cte0515@hkn0531:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0531 jafar]$ ",,terminal_output +24860,15880276,"TERMINAL",0,0,"^C[?2004l\r[?2004h[?2004l\r\r\n]0;tum_cte0515@hkn0531:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0531 jafar]$ ",,terminal_output +24861,15881136,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",0,0,"",python,tab +24862,15890214,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",0,0,"",python,tab +24863,15891635,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",971,0,"",python,selection_command +24864,15892780,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",3405,0,"",python,selection_command +24865,15893213,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py",3395,23,"",python,content +24866,15895623,"TERMINAL",0,0,"sh scripts_horeka/overfit_sample_tiny/sample.sh ",,terminal_output +24867,15896125,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +24868,15896239,"TERMINAL",0,0,"Sampling from checkpoint: /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/0000/genie_1751301068_2000/\r\n",,terminal_output +24869,15899186,"TERMINAL",0,0,"2025-06-30 18:49:03.562460: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +24870,15903388,"TERMINAL",0,0,"2025-06-30 18:49:07.736092: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +24871,15911485,"TERMINAL",0,0,"2025-06-30 18:49:15.910244: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +24872,15918485,"TERMINAL",0,0,"2025-06-30 18:49:22.877987: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +24873,15924892,"TERMINAL",0,0,"2025-06-30 18:49:29.215364: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +24874,15930512,"TERMINAL",0,0,"2025-06-30 18:49:34.938436: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +24875,15934319,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/serialization/type_handlers.py:1251: UserWarning: Sharding info not provided when restoring. Populating sharding info from sharding file. Please note restoration time will be slightly increased due to reading from file. Note also that this option is unsafe when restoring on a different topology than the checkpoint was saved with.\r\n warnings.warn(\r\n",,terminal_output +24876,15946090,"TERMINAL",0,0,"Frame 1\r\n",,terminal_output +24877,15946572,"TERMINAL",0,0,"2025-06-30 18:49:50.995470: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +24878,15949570,"TERMINAL",0,0,"2025-06-30 18:49:53.937503: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +24879,15957249,"TERMINAL",0,0,"2025-06-30 18:50:01.657204: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +24880,15960755,"TERMINAL",0,0,"2025-06-30 18:50:05.157182: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +24881,15965594,"TERMINAL",0,0,"2025-06-30 18:50:10.018560: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +24882,15968114,"TERMINAL",0,0,"Frame 2\r\n",,terminal_output +24883,15968638,"TERMINAL",0,0,"2025-06-30 18:50:13.063504: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +24884,15971585,"TERMINAL",0,0,"2025-06-30 18:50:15.979149: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +24885,15979583,"TERMINAL",0,0,"2025-06-30 18:50:23.990455: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +24886,15982542,"TERMINAL",0,0,"2025-06-30 18:50:26.870337: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +24887,15988278,"TERMINAL",0,0,"Frame 3\r\n",,terminal_output +24888,15988897,"TERMINAL",0,0,"2025-06-30 18:50:33.227879: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +24889,15991860,"TERMINAL",0,0,"2025-06-30 18:50:36.193295: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +24890,16000359,"TERMINAL",0,0,"2025-06-30 18:50:44.723550: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +24891,16003021,"TERMINAL",0,0,"2025-06-30 18:50:47.403730: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +24892,16008961,"TERMINAL",0,0,"Frame 4\r\n",,terminal_output +24893,16009575,"TERMINAL",0,0,"2025-06-30 18:50:53.955513: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +24894,16012443,"TERMINAL",0,0,"2025-06-30 18:50:56.857304: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +24895,16021066,"TERMINAL",0,0,"2025-06-30 18:51:05.490501: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +24896,16024130,"TERMINAL",0,0,"2025-06-30 18:51:08.552556: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +24897,16029956,"TERMINAL",0,0,"Frame 5\r\n",,terminal_output +24898,16030679,"TERMINAL",0,0,"2025-06-30 18:51:14.996139: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +24899,16033695,"TERMINAL",0,0,"2025-06-30 18:51:18.121090: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +24900,16042139,"TERMINAL",0,0,"2025-06-30 18:51:26.555956: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +24901,16045211,"TERMINAL",0,0,"2025-06-30 18:51:29.531922: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +24902,16051167,"TERMINAL",0,0,"Frame 6\r\n",,terminal_output +24903,16051823,"TERMINAL",0,0,"2025-06-30 18:51:36.249062: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +24904,16054939,"TERMINAL",0,0,"2025-06-30 18:51:39.283136: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +24905,16063661,"TERMINAL",0,0,"2025-06-30 18:51:48.086571: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +24906,16066699,"TERMINAL",0,0,"2025-06-30 18:51:51.122789: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +24907,16072545,"TERMINAL",0,0,"Frame 7\r\n",,terminal_output +24908,16073270,"TERMINAL",0,0,"2025-06-30 18:51:57.622557: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +24909,16076240,"TERMINAL",0,0,"2025-06-30 18:52:00.618825: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +24910,16085031,"TERMINAL",0,0,"2025-06-30 18:52:09.455288: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +24911,16088022,"TERMINAL",0,0,"2025-06-30 18:52:12.447573: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +24912,16094261,"TERMINAL",0,0,"Frame 8\r\n",,terminal_output +24913,16094871,"TERMINAL",0,0,"2025-06-30 18:52:19.294947: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +24914,16097881,"TERMINAL",0,0,"2025-06-30 18:52:22.305533: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +24915,16107368,"TERMINAL",0,0,"2025-06-30 18:52:31.710324: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +24916,16110236,"TERMINAL",0,0,"2025-06-30 18:52:34.563949: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +24917,16116484,"TERMINAL",0,0,"Frame 9\r\n",,terminal_output +24918,16117204,"TERMINAL",0,0,"2025-06-30 18:52:41.545102: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +24919,16118282,"TERMINAL",0,0,"bash",,terminal_focus +24920,16128988,"TERMINAL",0,0,"2025-06-30 18:52:53.417326: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +24921,16132045,"TERMINAL",0,0,"2025-06-30 18:52:56.393840: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +24922,16138641,"TERMINAL",0,0,"Frame 10\r\n",,terminal_output +24923,16139418,"TERMINAL",0,0,"2025-06-30 18:53:03.773418: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +24924,16149036,"TERMINAL",0,0,"srun",,terminal_focus +24925,16151203,"TERMINAL",0,0,"2025-06-30 18:53:15.555965: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +24926,16154267,"TERMINAL",0,0,"2025-06-30 18:53:18.662037: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +24927,16161202,"TERMINAL",0,0,"Frame 11\r\n",,terminal_output +24928,16162053,"TERMINAL",0,0,"2025-06-30 18:53:26.373997: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +24929,16174235,"TERMINAL",0,0,"2025-06-30 18:53:38.658649: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +24930,16177312,"TERMINAL",0,0,"2025-06-30 18:53:41.713735: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +24931,16184271,"TERMINAL",0,0,"Frame 12\r\n",,terminal_output +24932,16184851,"TERMINAL",0,0,"2025-06-30 18:53:49.149128: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +24933,16196869,"TERMINAL",0,0,"2025-06-30 18:54:01.267905: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +24934,16200045,"TERMINAL",0,0,"2025-06-30 18:54:04.386248: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +24935,16207107,"TERMINAL",0,0,"Frame 13\r\n",,terminal_output +24936,16207751,"TERMINAL",0,0,"2025-06-30 18:54:12.178563: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +24937,16220113,"TERMINAL",0,0,"2025-06-30 18:54:24.429019: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +24938,16223253,"TERMINAL",0,0,"2025-06-30 18:54:27.680804: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +24939,16229917,"TERMINAL",0,0,"Frame 14\r\n",,terminal_output +24940,16230687,"TERMINAL",0,0,"2025-06-30 18:54:35.110356: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +24941,16242538,"TERMINAL",0,0,"2025-06-30 18:54:46.954449: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +24942,16245711,"TERMINAL",0,0,"2025-06-30 18:54:50.066177: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +24943,16252761,"TERMINAL",0,0,"Frame 15\r\n",,terminal_output +24944,16253435,"TERMINAL",0,0,"2025-06-30 18:54:57.860654: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +24945,16266091,"TERMINAL",0,0,"2025-06-30 18:55:10.415258: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +24946,16269066,"TERMINAL",0,0,"2025-06-30 18:55:13.451198: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +24947,16280429,"TERMINAL",0,0,"SSIM: 0.17403215169906616\r\n",,terminal_output +24948,16284729,"TERMINAL",0,0,"]0;tum_cte0515@hkn0531:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0531 jafar]$ ",,terminal_output +24949,16328329,"scripts_horeka/overfit_sample_tiny/sample.sh",0,0,"",shellscript,tab +24950,16329494,"scripts_horeka/overfit_sample_tiny/sample.sh",1451,0,"",shellscript,selection_mouse +24951,16330302,"scripts_horeka/overfit_sample_tiny/sample.sh",1451,0,"\n ",shellscript,content +24952,16330629,"scripts_horeka/overfit_sample_tiny/sample.sh",1456,0,"-",shellscript,content +24953,16330630,"scripts_horeka/overfit_sample_tiny/sample.sh",1457,0,"",shellscript,selection_keyboard +24954,16330816,"scripts_horeka/overfit_sample_tiny/sample.sh",1457,0,"-",shellscript,content +24955,16330817,"scripts_horeka/overfit_sample_tiny/sample.sh",1458,0,"",shellscript,selection_keyboard +24956,16331266,"scripts_horeka/overfit_sample_tiny/sample.sh",1458,0,"m",shellscript,content +24957,16331267,"scripts_horeka/overfit_sample_tiny/sample.sh",1459,0,"",shellscript,selection_keyboard +24958,16331431,"scripts_horeka/overfit_sample_tiny/sample.sh",1459,0,"a",shellscript,content +24959,16331432,"scripts_horeka/overfit_sample_tiny/sample.sh",1460,0,"",shellscript,selection_keyboard +24960,16331577,"scripts_horeka/overfit_sample_tiny/sample.sh",1460,0,"s",shellscript,content +24961,16331578,"scripts_horeka/overfit_sample_tiny/sample.sh",1461,0,"",shellscript,selection_keyboard +24962,16331663,"scripts_horeka/overfit_sample_tiny/sample.sh",1461,0,"k",shellscript,content +24963,16331664,"scripts_horeka/overfit_sample_tiny/sample.sh",1462,0,"",shellscript,selection_keyboard +24964,16332326,"scripts_horeka/overfit_sample_tiny/sample.sh",1462,0,"g",shellscript,content +24965,16332327,"scripts_horeka/overfit_sample_tiny/sample.sh",1463,0,"",shellscript,selection_keyboard +24966,16332414,"scripts_horeka/overfit_sample_tiny/sample.sh",1463,0,"i",shellscript,content +24967,16332415,"scripts_horeka/overfit_sample_tiny/sample.sh",1464,0,"",shellscript,selection_keyboard +24968,16332510,"scripts_horeka/overfit_sample_tiny/sample.sh",1464,0,"t",shellscript,content +24969,16332511,"scripts_horeka/overfit_sample_tiny/sample.sh",1465,0,"",shellscript,selection_keyboard +24970,16332765,"scripts_horeka/overfit_sample_tiny/sample.sh",1465,0,"_",shellscript,content +24971,16332766,"scripts_horeka/overfit_sample_tiny/sample.sh",1466,0,"",shellscript,selection_keyboard +24972,16333105,"scripts_horeka/overfit_sample_tiny/sample.sh",1466,0,"s",shellscript,content +24973,16333106,"scripts_horeka/overfit_sample_tiny/sample.sh",1467,0,"",shellscript,selection_keyboard +24974,16333270,"scripts_horeka/overfit_sample_tiny/sample.sh",1467,0,"t",shellscript,content +24975,16333271,"scripts_horeka/overfit_sample_tiny/sample.sh",1468,0,"",shellscript,selection_keyboard +24976,16333504,"scripts_horeka/overfit_sample_tiny/sample.sh",1468,0,"e",shellscript,content +24977,16333505,"scripts_horeka/overfit_sample_tiny/sample.sh",1469,0,"",shellscript,selection_keyboard +24978,16333580,"scripts_horeka/overfit_sample_tiny/sample.sh",1469,0,"p",shellscript,content +24979,16333581,"scripts_horeka/overfit_sample_tiny/sample.sh",1470,0,"",shellscript,selection_keyboard +24980,16333798,"scripts_horeka/overfit_sample_tiny/sample.sh",1470,0,"s",shellscript,content +24981,16333798,"scripts_horeka/overfit_sample_tiny/sample.sh",1471,0,"",shellscript,selection_keyboard +24982,16334071,"scripts_horeka/overfit_sample_tiny/sample.sh",1471,0,"=",shellscript,content +24983,16334072,"scripts_horeka/overfit_sample_tiny/sample.sh",1472,0,"",shellscript,selection_keyboard +24984,16334320,"scripts_horeka/overfit_sample_tiny/sample.sh",1472,0,"1",shellscript,content +24985,16334320,"scripts_horeka/overfit_sample_tiny/sample.sh",1473,0,"",shellscript,selection_keyboard +24986,16334696,"scripts_horeka/overfit_sample_tiny/sample.sh",1473,0," ",shellscript,content +24987,16334697,"scripts_horeka/overfit_sample_tiny/sample.sh",1474,0,"",shellscript,selection_keyboard +24988,16335026,"scripts_horeka/overfit_sample_tiny/sample.sh",1474,0,"\",shellscript,content +24989,16335027,"scripts_horeka/overfit_sample_tiny/sample.sh",1475,0,"",shellscript,selection_keyboard +24990,16339677,"sample.py",0,0,"",python,tab +24991,16344430,"sample.py",559,0,"",python,selection_mouse +24992,16344600,"sample.py",552,13,"maskgit_steps",python,selection_mouse +24993,16348147,"scripts_horeka/overfit_sample_tiny/sample.sh",0,0,"",shellscript,tab +24994,16349125,"scripts_horeka/overfit_sample_tiny/sample.sh",1474,1,"",shellscript,content +24995,16349577,"scripts_horeka/overfit_sample_tiny/sample.sh",1472,2,"",shellscript,content +24996,16349755,"scripts_horeka/overfit_sample_tiny/sample.sh",1471,1,"",shellscript,content +24997,16350074,"scripts_horeka/overfit_sample_tiny/sample.sh",1458,13,"",shellscript,content +24998,16350443,"scripts_horeka/overfit_sample_tiny/sample.sh",1458,0,"maskgit_steps",shellscript,content +24999,16351280,"scripts_horeka/overfit_sample_tiny/sample.sh",1471,0,"=",shellscript,content +25000,16351281,"scripts_horeka/overfit_sample_tiny/sample.sh",1472,0,"",shellscript,selection_keyboard +25001,16351396,"scripts_horeka/overfit_sample_tiny/sample.sh",1472,0,"!",shellscript,content +25002,16351397,"scripts_horeka/overfit_sample_tiny/sample.sh",1473,0,"",shellscript,selection_keyboard +25003,16352169,"scripts_horeka/overfit_sample_tiny/sample.sh",1472,1,"",shellscript,content +25004,16352360,"scripts_horeka/overfit_sample_tiny/sample.sh",1472,0,"1",shellscript,content +25005,16352361,"scripts_horeka/overfit_sample_tiny/sample.sh",1473,0,"",shellscript,selection_keyboard +25006,16352877,"scripts_horeka/overfit_sample_tiny/sample.sh",1473,0," ",shellscript,content +25007,16352878,"scripts_horeka/overfit_sample_tiny/sample.sh",1474,0,"",shellscript,selection_keyboard +25008,16353028,"scripts_horeka/overfit_sample_tiny/sample.sh",1474,0,"\",shellscript,content +25009,16353029,"scripts_horeka/overfit_sample_tiny/sample.sh",1475,0,"",shellscript,selection_keyboard +25010,16354035,"scripts_horeka/overfit_sample_tiny/sample.sh",1503,0,"",shellscript,selection_mouse +25011,16355198,"scripts_horeka/overfit_sample_tiny/sample.sh",1426,0,"",shellscript,selection_mouse +25012,16355689,"scripts_horeka/overfit_sample_tiny/sample.sh",1502,0,"",shellscript,selection_mouse +25013,16356335,"scripts_horeka/overfit_sample_tiny/sample.sh",1497,0,"",shellscript,selection_mouse +25014,16360827,"TERMINAL",0,0,"sh scripts_horeka/overfit_sample_tiny/sample.sh ",,terminal_output +25015,16361827,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +25016,16361920,"TERMINAL",0,0,"Sampling from checkpoint: /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/0000/genie_1751301068_2000/\r\n",,terminal_output +25017,16365724,"TERMINAL",0,0,"2025-06-30 18:56:50.064385: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +25018,16369504,"TERMINAL",0,0,"2025-06-30 18:56:53.923780: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +25019,16377901,"TERMINAL",0,0,"2025-06-30 18:57:02.247131: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +25020,16385225,"TERMINAL",0,0,"2025-06-30 18:57:09.581299: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +25021,16391634,"TERMINAL",0,0,"2025-06-30 18:57:16.048376: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +25022,16397163,"TERMINAL",0,0,"2025-06-30 18:57:21.536966: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +25023,16401053,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/serialization/type_handlers.py:1251: UserWarning: Sharding info not provided when restoring. Populating sharding info from sharding file. Please note restoration time will be slightly increased due to reading from file. Note also that this option is unsafe when restoring on a different topology than the checkpoint was saved with.\r\n warnings.warn(\r\n",,terminal_output +25024,16411740,"TERMINAL",0,0,"Frame 1\r\n",,terminal_output +25025,16412279,"TERMINAL",0,0,"2025-06-30 18:57:36.704958: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +25026,16415154,"TERMINAL",0,0,"2025-06-30 18:57:39.582340: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +25027,16422967,"TERMINAL",0,0,"2025-06-30 18:57:47.374073: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +25028,16426270,"TERMINAL",0,0,"2025-06-30 18:57:50.698759: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +25029,16431387,"TERMINAL",0,0,"2025-06-30 18:57:55.814628: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +25030,16433686,"TERMINAL",0,0,"Frame 2\r\n",,terminal_output +25031,16434255,"TERMINAL",0,0,"2025-06-30 18:57:58.674054: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +25032,16437213,"TERMINAL",0,0,"2025-06-30 18:58:01.594362: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +25033,16445192,"TERMINAL",0,0,"2025-06-30 18:58:09.531455: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +25034,16447927,"TERMINAL",0,0,"2025-06-30 18:58:12.353573: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +25035,16453470,"TERMINAL",0,0,"Frame 3\r\n",,terminal_output +25036,16454100,"TERMINAL",0,0,"2025-06-30 18:58:18.476692: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +25037,16456973,"TERMINAL",0,0,"2025-06-30 18:58:21.365428: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +25038,16465453,"TERMINAL",0,0,"2025-06-30 18:58:29.834362: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +25039,16468128,"TERMINAL",0,0,"2025-06-30 18:58:32.496799: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +25040,16473995,"TERMINAL",0,0,"Frame 4\r\n",,terminal_output +25041,16474682,"TERMINAL",0,0,"2025-06-30 18:58:39.032922: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +25042,16477651,"TERMINAL",0,0,"2025-06-30 18:58:42.064046: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +25043,16486357,"TERMINAL",0,0,"2025-06-30 18:58:50.711199: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +25044,16489207,"TERMINAL",0,0,"2025-06-30 18:58:53.601524: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +25045,16495469,"TERMINAL",0,0,"Frame 5\r\n",,terminal_output +25046,16496005,"TERMINAL",0,0,"2025-06-30 18:59:00.428111: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +25047,16497886,"TERMINAL",0,0,"^CTraceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py"", line 122, in \r\n vid = _autoreg_sample(rng, video_batch, action_batch)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py"", line 97, in _autoreg_sample\r\n new_frame = genie.apply(\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/genie.py"", line 107, in sample\r\n tokenizer_out = self.tokenizer.vq_encode(batch[""videos""], training=False)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/models/tokenizer.py"", line 57, in vq_encode\r\n x = self.encoder(x) # (B, T, N, E)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/utils/nn.py"", line 87, in __call__\r\n x = STBlock(\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/utils/nn.py"", line 40, in __call__\r\n z = nn.LayerNorm()(z)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/normalization.py"", line 507, in __call__\r\n mean, var = _compute_stats(\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/normalization.py"", line 134, in _compute_stats\r\n mu, mu2 = maybe_distributed_mean(x, _abs_sq(x), mask=mask)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/normalization.py"", line 117, in maybe_distributed_mean\r\n mus = tuple(x.mean(axes, where=mask) for x in xs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/normalization.py"", line 117, in \r\n mus = tuple(x.mean(axes, where=mask) for x in xs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/numpy/array_methods.py"", line 1087, in meth\r\n",,terminal_output +25048,16498036,"TERMINAL",0,0," return getattr(self.aval, name).fun(self, *args, **kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/numpy/array_methods.py"", line 247, in _mean\r\n return reductions.mean(self, axis=axis, dtype=dtype, out=out,\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/numpy/reductions.py"", line 864, in mean\r\n return _mean(a, _ensure_optional_axes(axis), dtype, out, keepdims,\r\njax._src.source_info_util.JaxStackTraceBeforeTransformation: KeyboardInterrupt\r\n\r\nThe preceding stack trace is the source of the JAX operation that, once transformed by JAX, triggered the following exception.\r\n\r\n--------------------\r\n\r\nThe above exception was the direct cause of the following exception:\r\n\r\nTraceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py"", line 122, in \r\n vid = _autoreg_sample(rng, video_batch, action_batch)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/sample.py"", line 97, in _autoreg_sample\r\n new_frame = genie.apply(\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/traceback_util.py"", line 182, in reraise_with_filtered_traceback\r\n return fun(*args, **kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/module.py"", line 2240, in apply\r\n return apply(\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/core/scope.py"", line 1079, in wrapper\r\n y = fn(root, *args, **kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/module.py"", line 3022, in scope_fn\r\n return fn(module.clone(parent=scope, _deep_clone=True), *args, **kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/module.py"", line 699, in wrapped_module_method\r\n return self._call_wrapped_method(fun, args, kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/module.py"", line 1216, in _call_wrapped_method\r\n y = run_fun(self, *args, **kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/genie.py"", line 107, in sample\r\n tokenizer_out = self.tokenizer.vq_encode(batch[""videos""], training=False)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/module.py"", line 699, in wrapped_module_method\r\n return self._call_wrapped_method(fun, args, kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/module.py"", line 1216, in _call_wrapped_method\r\n y = run_fun(self, *args, **kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/models/tokenizer.py"", line 57, in vq_encode\r\n x = self.encoder(x) # (B, T, N, E)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/module.py"", line 699, in wrapped_module_method\r\n return self._call_wrapped_method(fun, args, kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/module.py"", line 1216, in _call_wrapped_method\r\n y = run_fun(self, *args, **kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/utils/nn.py"", line 87, in __call__\r\n x = STBlock(\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/transforms.py"", line 433, in wrapped_fn\r\n return trafo_fn(module_scopes, *args, **kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/core/lift.py"", line 319, in wrapper\r\n y, out_variable_groups_xs_t = fn(\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/core/lift.py"", line 1474, in inner\r\n return rematted(variable_groups, rng_groups, *args, **kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/traceback_util.py"", line 182, in reraise_with_filtered_traceback\r\n return fun(*args, **kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/ad_checkpoint.py"", line 333, in fun_remat\r\n out_flat = remat_p.bind(\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/core.py"", line 531, in bind\r\n return self._true_bind(*args, **params)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/core.py"", line 551, in _true_bind\r\n return self.bind_with_trace(prev_trace, args, params)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/core.py"", line 556, in bind_with_trace\r\n return trace.process_primitive(self, args, params)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/core.py"", line 1060, in process_primitive\r\n return primitive.impl(*args, **params)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/ad_checkpoint.py"", line 514, in remat_impl\r\n return core.eval_jaxpr(jaxpr, (), *args)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/core.py"", line 624, in eval_jaxpr\r\n ans = eqn.primitive.bind(*subfuns, *map(read, eqn.invars), **bind_params)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/core.py"", line 531, in bind\r\n return self._true_bind(*args, **params)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/core.py"", line 551, in _true_bind\r\n return self.bind_with_trace(prev_trace, args, params)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/core.py"", line 556, in bind_with_trace\r\n return trace.process_primitive(self, args, params)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/core.py"", line 1060, in process_primitive\r\n return primitive.impl(*args, **params)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/dispatch.py"", line 88, in apply_primitive\r\n outs = fun(*args)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/traceback_util.py"", line 182, in reraise_with_filtered_traceback\r\n return fun(*args, **kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/pjit.py"", line 334, in cache_miss\r\n executable, pgle_profiler) = _python_pjit_helper(fun, jit_info, *args, **kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/pjit.py"", line 195, in _python_pjit_helper\r\n out_flat, compiled, profiler = _pjit_call_impl_python(*args_flat, **p.params)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/pjit.py"", line 1853, in _pjit_call_impl_python\r\n compiled = _resolve_and_lower(\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/pjit.py"", line 1820, in _resolve_and_lower\r\n return _pjit_lower(\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/pjit.py"", line 1953, in _pjit_lower\r\n return pxla.lower_sharding_computation(\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/profiler.py"", line 354, in wrapper\r\n return func(*args, **kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/interpreters/pxla.py"", line 2378, in lower_sharding_computation\r\n nreps, tuple_args, shape_poly_state) = _cached_lowering_to_hlo(\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/interpreters/pxla.py"", line 1968, in _cached_lowering_to_hlo\r\n lowering_result = mlir.lower_jaxpr_to_module(\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/interpreters/mlir.py"", line 1271, in lower_jaxpr_to_module\r\n lower_jaxpr_to_fun(\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/interpreters/mlir.py"", line 1544, in lower_jaxpr_to_fun\r\n ftype = ir.FunctionType.get(flat_input_types, flat_output_types)\r\nKeyboardInterrupt\r\n^CException ignored in: .remove at 0x152b7512f370>\r\nTraceback (most recent call last):\r\n File ""/home/hk-project-p0023960/tum_cte0515/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/weakref.py"", line 370, in remove\r\n def remove(k, selfref=ref(self)):\r\nKeyboardInterrupt: \r\n",,terminal_output +25049,16498258,"TERMINAL",0,0,"^CException ignored in: .remove at 0x152b7512f370>\r\nTraceback (most recent call last):\r\n File ""/home/hk-project-p0023960/tum_cte0515/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/weakref.py"", line 370, in remove\r\n def remove(k, selfref=ref(self)):\r\nKeyboardInterrupt: \r\n",,terminal_output +25050,16498596,"TERMINAL",0,0,"^CException ignored in: .remove at 0x152b7512f370>\r\nTraceback (most recent call last):\r\n File ""/home/hk-project-p0023960/tum_cte0515/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/weakref.py"", line 370, in remove\r\n def remove(k, selfref=ref(self)):\r\nKeyboardInterrupt: \r\n",,terminal_output +25051,16499012,"TERMINAL",0,0,"^C",,terminal_output +25052,16499204,"TERMINAL",0,0,"Exception ignored in atexit callback: \r\nTraceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/api.py"", line 3168, in clean_up\r\n",,terminal_output +25053,16499312,"TERMINAL",0,0," clear_backends()\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/api.py"", line 3158, in clear_backends\r\n pjit._infer_params_cached.cache_clear()\r\nKeyboardInterrupt: \r\n",,terminal_output +25054,16499808,"TERMINAL",0,0,"^C",,terminal_output +25055,16499964,"TERMINAL",0,0,"\r\n]0;tum_cte0515@hkn0531:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0531 jafar]$ ",,terminal_output +25056,16504161,"sample.py",0,0,"",python,tab +25057,16515797,"sample.py",535,0,"",python,selection_mouse +25058,16517020,"sample.py",367,0,"",python,selection_mouse +25059,16517594,"sample.py",365,0,"",python,selection_mouse +25060,16517747,"sample.py",363,7,"seq_len",python,selection_mouse +25061,16522338,"scripts_horeka/train_lam.sh",0,0,"",shellscript,tab +25062,16523378,"scripts_horeka/train_lam.sh",917,0,"",shellscript,selection_mouse +25063,16524164,"scripts_horeka/train_lam.sh",917,0,"\n ",shellscript,content +25064,16524651,"scripts_horeka/train_lam.sh",918,4,"",shellscript,content +25065,16524924,"scripts_horeka/train_lam.sh",917,1,"",shellscript,content +25066,16525477,"scripts_horeka/train_lam.sh",917,0," ",shellscript,content +25067,16525478,"scripts_horeka/train_lam.sh",918,0,"",shellscript,selection_keyboard +25068,16525930,"scripts_horeka/train_lam.sh",918,0,"\",shellscript,content +25069,16525931,"scripts_horeka/train_lam.sh",919,0,"",shellscript,selection_keyboard +25070,16526240,"scripts_horeka/train_lam.sh",919,0,"\n ",shellscript,content +25071,16527036,"scripts_horeka/train_lam.sh",924,0,"-",shellscript,content +25072,16527037,"scripts_horeka/train_lam.sh",925,0,"",shellscript,selection_keyboard +25073,16527147,"scripts_horeka/train_lam.sh",925,0,"-",shellscript,content +25074,16527148,"scripts_horeka/train_lam.sh",926,0,"",shellscript,selection_keyboard +25075,16527677,"scripts_horeka/train_lam.sh",926,0,"seq_len",shellscript,content +25076,16528784,"scripts_horeka/train_lam.sh",933,0,"=",shellscript,content +25077,16528785,"scripts_horeka/train_lam.sh",934,0,"",shellscript,selection_keyboard +25078,16529244,"scripts_horeka/train_lam.sh",934,0,"3",shellscript,content +25079,16529245,"scripts_horeka/train_lam.sh",935,0,"",shellscript,selection_keyboard +25080,16531158,"TERMINAL",0,0,"sh scripts_horeka/overfit_sample_tiny/sample.sh ",,terminal_output +25081,16531356,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +25082,16531464,"TERMINAL",0,0,"Sampling from checkpoint: /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/0000/genie_1751301068_2000/\r\n",,terminal_output +25083,16534485,"TERMINAL",0,0,"2025-06-30 18:59:38.881703: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +25084,16538625,"TERMINAL",0,0,"2025-06-30 18:59:43.024514: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +25085,16546465,"TERMINAL",0,0,"2025-06-30 18:59:50.818384: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +25086,16553633,"TERMINAL",0,0,"2025-06-30 18:59:58.020154: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +25087,16560017,"TERMINAL",0,0,"2025-06-30 19:00:04.443151: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +25088,16565430,"TERMINAL",0,0,"2025-06-30 19:00:09.854954: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +25089,16569302,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/serialization/type_handlers.py:1251: UserWarning: Sharding info not provided when restoring. Populating sharding info from sharding file. Please note restoration time will be slightly increased due to reading from file. Note also that this option is unsafe when restoring on a different topology than the checkpoint was saved with.\r\n warnings.warn(\r\n",,terminal_output +25090,16581172,"TERMINAL",0,0,"Frame 1\r\n",,terminal_output +25091,16581793,"TERMINAL",0,0,"2025-06-30 19:00:26.144767: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +25092,16584968,"TERMINAL",0,0,"2025-06-30 19:00:29.360696: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +25093,16592854,"TERMINAL",0,0,"2025-06-30 19:00:37.179506: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +25094,16596333,"TERMINAL",0,0,"2025-06-30 19:00:40.673581: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +25095,16601044,"TERMINAL",0,0,"2025-06-30 19:00:45.469219: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +25096,16603505,"TERMINAL",0,0,"Frame 2\r\n",,terminal_output +25097,16604021,"TERMINAL",0,0,"2025-06-30 19:00:48.420464: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +25098,16606884,"TERMINAL",0,0,"2025-06-30 19:00:51.265864: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +25099,16614870,"TERMINAL",0,0,"2025-06-30 19:00:59.297122: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +25100,16617613,"TERMINAL",0,0,"2025-06-30 19:01:02.040159: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +25101,16623263,"TERMINAL",0,0,"Frame 3\r\n",,terminal_output +25102,16623734,"TERMINAL",0,0,"2025-06-30 19:01:08.160987: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +25103,16626746,"TERMINAL",0,0,"2025-06-30 19:01:11.155913: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +25104,16635086,"TERMINAL",0,0,"2025-06-30 19:01:19.511695: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +25105,16637705,"TERMINAL",0,0,"2025-06-30 19:01:22.113296: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +25106,16644164,"TERMINAL",0,0,"Frame 4\r\n",,terminal_output +25107,16644667,"TERMINAL",0,0,"2025-06-30 19:01:28.984097: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +25108,16647636,"TERMINAL",0,0,"2025-06-30 19:01:32.017020: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +25109,16656300,"TERMINAL",0,0,"2025-06-30 19:01:40.725592: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +25110,16659106,"TERMINAL",0,0,"2025-06-30 19:01:43.480301: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +25111,16665251,"TERMINAL",0,0,"Frame 5\r\n",,terminal_output +25112,16665754,"TERMINAL",0,0,"2025-06-30 19:01:50.182108: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +25113,16668648,"TERMINAL",0,0,"2025-06-30 19:01:53.075426: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +25114,16673644,"scripts_horeka/overfit_sample_tiny/sample.sh",0,0,"",shellscript,tab +25115,16675566,"scripts_horeka/overfit_sample_tiny/sample.sh",1475,0,"",shellscript,selection_mouse +25116,16676920,"scripts_horeka/overfit_sample_tiny/sample.sh",1472,0,"",shellscript,selection_mouse +25117,16677108,"TERMINAL",0,0,"2025-06-30 19:02:01.486590: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +25118,16678004,"scripts_horeka/train_dynamics.sh",0,0,"",shellscript,tab +25119,16679792,"TERMINAL",0,0,"2025-06-30 19:02:04.172758: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +25120,16681038,"scripts_horeka/overfit_sample_tiny/sample.sh",0,0,"",shellscript,tab +25121,16681687,"scripts_horeka/train_lam.sh",0,0,"",shellscript,tab +25122,16683077,"scripts_horeka/train_lam.sh",925,10,"-seq_len=3",shellscript,selection_mouse +25123,16683094,"scripts_horeka/train_lam.sh",921,14," --seq_len=3",shellscript,selection_mouse +25124,16683151,"scripts_horeka/train_lam.sh",920,15," --seq_len=3",shellscript,selection_mouse +25125,16683152,"scripts_horeka/train_lam.sh",935,0,"",shellscript,selection_mouse +25126,16683414,"scripts_horeka/train_lam.sh",920,15," --seq_len=3",shellscript,selection_mouse +25127,16685938,"TERMINAL",0,0,"Frame 6\r\n",,terminal_output +25128,16686092,"scripts_horeka/train_lam.sh",920,0,"",shellscript,selection_command +25129,16686754,"TERMINAL",0,0,"2025-06-30 19:02:11.105496: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +25130,16687302,"scripts_horeka/train_lam.sh",919,16,"",shellscript,content +25131,16687307,"scripts_horeka/train_lam.sh",902,0,"",shellscript,selection_command +25132,16689210,"TERMINAL",0,0,"^C2025-06-30 19:02:13.601806: F external/xla/xla/service/gpu/autotuning/gemm_fusion_autotuner.cc:1136] Non-OK-status: executable.status()\r\nStatus: INTERNAL: ptxas exited with non-zero error code 2, output: - Failure occured when compiling fusion gemm_fusion_dot with config '{block_m:64,block_n:128,block_k:16,split_k:1,num_stages:4,num_warps:2,num_ctas:1}'\r\nFused HLO computation:\r\n%gemm_fusion_dot_computation (parameter_0: f32[1,6,920,384], parameter_1: f32[384,8,48]) -> f32[1,6,920,8,48] {\r\n %parameter_0 = f32[1,6,920,384]{3,2,1,0} parameter(0)\r\n %bitcast.3 = f32[5520,384]{1,0} bitcast(%parameter_0), metadata={op_name=""args[0]""}\r\n %parameter_1 = f32[384,8,48]{2,1,0} parameter(1)\r\n %bitcast.4 = f32[384,384]{1,0} bitcast(%parameter_1), metadata={op_name=""args[1]""}\r\n %dot.1 = f32[5520,384]{1,0} dot(%bitcast.3, %bitcast.4), lhs_contracting_dims={1}, rhs_contracting_dims={0}, metadata={op_name=""jit(dot_general)/jit(main)/dot_general"" source_file=""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/linear.py"" source_line=199}\r\n ROOT %bitcast.5 = f32[1,6,920,8,48]{4,3,2,1,0} bitcast(%dot.1), metadata={op_name=""jit(dot_general)/jit(main)/dot_general"" source_file=""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/flax/linen/linear.py"" source_line=199}\r\n}\r\n",,terminal_output +25133,16689404,"TERMINAL",0,0,"^C",,terminal_output +25134,16689566,"TERMINAL",0,0,"^C",,terminal_output +25135,16689649,"TERMINAL",0,0,"scripts_horeka/overfit_sample_tiny/sample.sh: line 44: 3965852 Aborted (core dumped) python sample.py --checkpoint ""$CHECKPOINT_PATH"" --tokenizer_dim=384 --latent_patch_dim=32 --num_patch_latents=1024 --patch_size=4 --tokenizer_num_blocks=8 --tokenizer_num_heads=8 --lam_dim=384 --latent_action_dim=32 --num_latent_actions=6 --lam_patch_size=16 --lam_num_blocks=8 --lam_num_heads=8 --dyna_dim=128 --dyna_num_blocks=2 --dyna_num_heads=4 --maskgit_steps=1 --num_latent_actions=1\r\n]0;tum_cte0515@hkn0531:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0531 jafar]$ ",,terminal_output +25136,16693000,"scripts_horeka/overfit_sample_tiny/sample.sh",0,0,"",shellscript,tab +25137,16694051,"scripts_horeka/overfit_sample_tiny/sample.sh",1496,0,"",shellscript,selection_command +25138,16694652,"scripts_horeka/overfit_sample_tiny/sample.sh",1495,0,"",shellscript,selection_command +25139,16694908,"scripts_horeka/overfit_sample_tiny/sample.sh",1502,0,"\n ",shellscript,content +25140,16695273,"scripts_horeka/overfit_sample_tiny/sample.sh",1507,0,"seq_len",shellscript,content +25141,16696798,"scripts_horeka/overfit_sample_tiny/sample.sh",1513,0,"",shellscript,selection_command +25142,16697409,"scripts_horeka/overfit_sample_tiny/sample.sh",1507,0,"",shellscript,selection_command +25143,16698614,"scripts_horeka/overfit_sample_tiny/sample.sh",1507,0,"-",shellscript,content +25144,16698615,"scripts_horeka/overfit_sample_tiny/sample.sh",1508,0,"",shellscript,selection_keyboard +25145,16698720,"scripts_horeka/overfit_sample_tiny/sample.sh",1508,0,"-",shellscript,content +25146,16698721,"scripts_horeka/overfit_sample_tiny/sample.sh",1509,0,"",shellscript,selection_keyboard +25147,16698813,"scripts_horeka/overfit_sample_tiny/sample.sh",1509,0," ",shellscript,content +25148,16698814,"scripts_horeka/overfit_sample_tiny/sample.sh",1510,0,"",shellscript,selection_keyboard +25149,16700494,"scripts_horeka/overfit_sample_tiny/sample.sh",1509,1,"",shellscript,content +25150,16700674,"scripts_horeka/overfit_sample_tiny/sample.sh",1508,0,"",shellscript,selection_command +25151,16701262,"scripts_horeka/overfit_sample_tiny/sample.sh",1516,0,"",shellscript,selection_command +25152,16702528,"scripts_horeka/overfit_sample_tiny/sample.sh",1516,0," ",shellscript,content +25153,16702530,"scripts_horeka/overfit_sample_tiny/sample.sh",1517,0,"",shellscript,selection_keyboard +25154,16702696,"scripts_horeka/overfit_sample_tiny/sample.sh",1517,0,"\",shellscript,content +25155,16702697,"scripts_horeka/overfit_sample_tiny/sample.sh",1518,0,"",shellscript,selection_keyboard +25156,16703453,"scripts_horeka/overfit_sample_tiny/sample.sh",1517,1,"",shellscript,content +25157,16703817,"scripts_horeka/overfit_sample_tiny/sample.sh",1490,0,"",shellscript,selection_command +25158,16704167,"scripts_horeka/overfit_sample_tiny/sample.sh",1489,0,"",shellscript,selection_command +25159,16704626,"scripts_horeka/overfit_sample_tiny/sample.sh",1502,0,"",shellscript,selection_command +25160,16705235,"scripts_horeka/overfit_sample_tiny/sample.sh",1502,0," ",shellscript,content +25161,16705236,"scripts_horeka/overfit_sample_tiny/sample.sh",1503,0,"",shellscript,selection_keyboard +25162,16705419,"scripts_horeka/overfit_sample_tiny/sample.sh",1503,0,"\",shellscript,content +25163,16705420,"scripts_horeka/overfit_sample_tiny/sample.sh",1504,0,"",shellscript,selection_keyboard +25164,16705808,"scripts_horeka/overfit_sample_tiny/sample.sh",1503,0,"",shellscript,selection_command +25165,16707827,"TERMINAL",0,0,"sh scripts_horeka/overfit_sample_tiny/sample.sh ",,terminal_output +25166,16708131,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +25167,16708255,"TERMINAL",0,0,"Sampling from checkpoint: /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/0000/genie_1751301068_2000/\r\n",,terminal_output +25168,16709109,"TERMINAL",0,0,"╭─ Parsing error ─────────────────────────╮\r\n│ Argument --seq-len: expected 1 argument │\r\n│ ─────────────────────────────────────── │\r\n│ For full helptext, run sample.py --help │\r\n╰─────────────────────────────────────────╯\r\n]0;tum_cte0515@hkn0531:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0531 jafar]$ ",,terminal_output +25169,16710908,"TERMINAL",0,0,"sh scripts_horeka/overfit_sample_tiny/sample.sh ",,terminal_output +25170,16712910,"scripts_horeka/overfit_sample_tiny/sample.sh",0,0,"",shellscript,tab +25171,16715308,"scripts_horeka/overfit_sample_tiny/sample.sh",1519,0,"",shellscript,selection_mouse +25172,16715312,"scripts_horeka/overfit_sample_tiny/sample.sh",1518,0,"",shellscript,selection_command +25173,16716475,"scripts_horeka/overfit_sample_tiny/sample.sh",1519,0,"",shellscript,selection_command +25174,16716835,"scripts_horeka/overfit_sample_tiny/sample.sh",1519,0,"=",shellscript,content +25175,16716837,"scripts_horeka/overfit_sample_tiny/sample.sh",1520,0,"",shellscript,selection_keyboard +25176,16717038,"scripts_horeka/overfit_sample_tiny/sample.sh",1520,0,"1",shellscript,content +25177,16717039,"scripts_horeka/overfit_sample_tiny/sample.sh",1521,0,"",shellscript,selection_keyboard +25178,16717764,"scripts_horeka/overfit_sample_tiny/sample.sh",1520,1,"",shellscript,content +25179,16718430,"scripts_horeka/overfit_sample_tiny/sample.sh",1520,0,"3",shellscript,content +25180,16718432,"scripts_horeka/overfit_sample_tiny/sample.sh",1521,0,"",shellscript,selection_keyboard +25181,16718761,"scripts_horeka/overfit_sample_tiny/sample.sh",1520,0,"",shellscript,selection_command +25182,16718956,"scripts_horeka/overfit_sample_tiny/sample.sh",1519,0,"",shellscript,selection_command +25183,16719088,"scripts_horeka/overfit_sample_tiny/sample.sh",1518,1,"",shellscript,content +25184,16721294,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +25185,16721436,"TERMINAL",0,0,"Sampling from checkpoint: /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/0000/genie_1751301068_2000/\r\n",,terminal_output +25186,16724132,"TERMINAL",0,0,"2025-06-30 19:02:48.544983: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +25187,16727983,"TERMINAL",0,0,"2025-06-30 19:02:52.396711: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +25188,16734679,"TERMINAL",0,0,"2025-06-30 19:02:59.076658: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +25189,16737051,"TERMINAL",0,0,"2025-06-30 19:03:01.381356: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +25190,16740214,"TERMINAL",0,0,"2025-06-30 19:03:04.638921: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +25191,16742226,"TERMINAL",0,0,"2025-06-30 19:03:06.650867: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +25192,16744406,"TERMINAL",0,0,"2025-06-30 19:03:08.811427: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +25193,16746864,"TERMINAL",0,0,"2025-06-30 19:03:11.197165: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +25194,16749500,"TERMINAL",0,0,"2025-06-30 19:03:13.922131: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +25195,16751554,"TERMINAL",0,0,"2025-06-30 19:03:15.979271: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +25196,16754246,"TERMINAL",0,0,"2025-06-30 19:03:18.669307: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +25197,16758949,"TERMINAL",0,0,"2025-06-30 19:03:23.288285: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +25198,16761213,"TERMINAL",0,0,"2025-06-30 19:03:25.637304: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +25199,16762405,"TERMINAL",0,0,"2025-06-30 19:03:26.830628: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +25200,16763969,"TERMINAL",0,0,"2025-06-30 19:03:28.362003: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +25201,16765586,"TERMINAL",0,0,"2025-06-30 19:03:29.933741: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +25202,16767551,"TERMINAL",0,0,"2025-06-30 19:03:31.913107: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +25203,16770415,"TERMINAL",0,0,"2025-06-30 19:03:34.780749: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +25204,16773596,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/serialization/type_handlers.py:1251: UserWarning: Sharding info not provided when restoring. Populating sharding info from sharding file. Please note restoration time will be slightly increased due to reading from file. Note also that this option is unsafe when restoring on a different topology than the checkpoint was saved with.\r\n warnings.warn(\r\n",,terminal_output +25205,16777184,"TERMINAL",0,0,"2025-06-30 19:03:41.607218: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +25206,16779132,"TERMINAL",0,0,"2025-06-30 19:03:43.557414: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +25207,16784343,"TERMINAL",0,0,"2025-06-30 19:03:48.665703: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +25208,16786805,"TERMINAL",0,0,"Frame 1\r\n",,terminal_output +25209,16787417,"TERMINAL",0,0,"2025-06-30 19:03:51.738788: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +25210,16790347,"TERMINAL",0,0,"2025-06-30 19:03:54.773401: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +25211,16798189,"TERMINAL",0,0,"2025-06-30 19:04:02.615007: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +25212,16801649,"TERMINAL",0,0,"2025-06-30 19:04:06.015181: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +25213,16806666,"TERMINAL",0,0,"2025-06-30 19:04:11.038557: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +25214,16809022,"TERMINAL",0,0,"Frame 2\r\n",,terminal_output +25215,16809534,"TERMINAL",0,0,"2025-06-30 19:04:13.880508: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +25216,16812503,"TERMINAL",0,0,"2025-06-30 19:04:16.851534: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +25217,16820434,"TERMINAL",0,0,"2025-06-30 19:04:24.820675: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +25218,16823153,"TERMINAL",0,0,"2025-06-30 19:04:27.547916: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +25219,16830873,"TERMINAL",0,0,"SSIM: 0.5413179993629456\r\n",,terminal_output +25220,16832850,"TERMINAL",0,0,"]0;tum_cte0515@hkn0531:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0531 jafar]$ ",,terminal_output +25221,17309781,"TERMINAL",0,0,"sh scripts_horeka/overfit_sample_tiny/sample.sh ",,terminal_output +25222,17310485,"TERMINAL",0,0,"\r",,terminal_output +25223,17311224,"TERMINAL",0,0,"sh scripts_horeka/overfit_sample_tiny/sample.sh ",,terminal_output +25224,17311605,"TERMINAL",0,0,"\r",,terminal_output +25225,17311896,"TERMINAL",0,0,"sh scripts_horeka/overfit_sample_tiny/sample.sh ",,terminal_output +25226,17313298,"TERMINAL",0,0,"\r",,terminal_output +25227,17313698,"TERMINAL",0,0,"sh scripts_horeka/overfit_sample_tiny/sample.sh ",,terminal_output +25228,17314032,"TERMINAL",0,0,"\r",,terminal_output +25229,17314352,"TERMINAL",0,0,"sh scripts_horeka/overfit_sample_tiny/sample.sh ",,terminal_output +25230,17314566,"TERMINAL",0,0,"\r",,terminal_output +25231,17316324,"TERMINAL",0,0,"sh scripts_horeka/overfit_sample_tiny/sample.sh ",,terminal_output +25232,17316497,"TERMINAL",0,0,"\r",,terminal_output +25233,17316715,"TERMINAL",0,0,"sh scripts_horeka/overfit_sample_tiny/sample.sh ",,terminal_output +25234,17316782,"TERMINAL",0,0,"\r",,terminal_output +25235,17317037,"TERMINAL",0,0,"sh scripts_horeka/overfit_sample_tiny/sample.sh ",,terminal_output +25236,17317104,"TERMINAL",0,0,"\r",,terminal_output +25237,17317317,"TERMINAL",0,0,"sh scripts_horeka/overfit_sample_tiny/sample.sh ",,terminal_output +25238,17317383,"TERMINAL",0,0,"\r",,terminal_output +25239,17317510,"TERMINAL",0,0,"sh scripts_horeka/overfit_sample_tiny/sample.sh ",,terminal_output +25240,17317638,"TERMINAL",0,0,"\r",,terminal_output +25241,17317764,"TERMINAL",0,0,"sh scripts_horeka/overfit_sample_tiny/sample.sh ",,terminal_output +25242,17317891,"TERMINAL",0,0,"\r",,terminal_output +25243,17382282,"TERMINAL",0,0,"\r(jafar) [tum_cte0515@hkn0531 jafar]$ ",,terminal_output +25244,17382365,"TERMINAL",0,0,"\r(jafar) [tum_cte0515@hkn0531 jafar]$ \r(jafar) [tum_cte0515@hkn0531 jafar]$ ",,terminal_output diff --git a/927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-3fb0e2a5-88e1-4992-bce0-2a2c4a35a7161758449976442-2025_09_21-12.20.21.273/source.csv b/927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-3fb0e2a5-88e1-4992-bce0-2a2c4a35a7161758449976442-2025_09_21-12.20.21.273/source.csv new file mode 100644 index 0000000000000000000000000000000000000000..b6cc0d9ff22f1fa574db6ff26d70b292914eb7d0 --- /dev/null +++ b/927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-3fb0e2a5-88e1-4992-bce0-2a2c4a35a7161758449976442-2025_09_21-12.20.21.273/source.csv @@ -0,0 +1,1299 @@ +Sequence,Time,File,RangeOffset,RangeLength,Text,Language,Type +1,5,"train_dynamics.py",0,0,"import os\n\n\nos.environ.setdefault(""XLA_PYTHON_CLIENT_MEM_FRACTION"", ""0.98"")\n\nfrom dataclasses import dataclass, field\nimport itertools\nfrom typing import cast, Optional\n\nimport einops\nfrom jax.sharding import Mesh, PartitionSpec, NamedSharding\nfrom jax.experimental.mesh_utils import create_device_mesh\nimport optax\nimport orbax.checkpoint as ocp\nimport numpy as np\nimport dm_pix as pix\nimport jax\nimport jax.numpy as jnp\nimport tyro\nimport wandb\nimport grain\nimport flax.nnx as nnx\n\nfrom genie import Genie, restore_genie_components\nfrom utils.dataloader import get_dataloader\nfrom utils.train_utils import (\n get_lr_schedule,\n count_parameters_by_component,\n print_mem_stats,\n print_compiled_memory_stats,\n print_compiled_cost_analysis,\n)\n\n\n@dataclass\nclass Args:\n # Experiment\n num_steps: int = 200_000\n seed: int = 0\n seq_len: int = 16\n image_channels: int = 3\n image_height: int = 90\n image_width: int = 160\n data_dir: str = """"\n save_ckpt: bool = False\n restore_ckpt: bool = False\n # Optimization\n batch_size: int = 36\n init_lr: float = 0.0\n max_lr: float = 3e-5\n decay_end: float = 0.0\n wsd_decay_steps: int = (\n 10000 # NOTE: wsd_decay_steps will only be used when using a wsd-schedule\n )\n warmup_steps: int = 5000\n lr_schedule: str = ""wsd"" # supported options: wsd, cos\n # Tokenizer\n tokenizer_dim: int = 512\n tokenizer_ffn_dim: int = 2048\n latent_patch_dim: int = 32\n num_patch_latents: int = 1024\n patch_size: int = 4\n tokenizer_num_blocks: int = 4\n tokenizer_num_heads: int = 8\n tokenizer_checkpoint: str = """"\n # LAM\n lam_dim: int = 512\n lam_ffn_dim: int = 2048\n latent_action_dim: int = 32\n num_latent_actions: int = 6\n lam_patch_size: int = 16\n lam_num_blocks: int = 4\n lam_num_heads: int = 8\n lam_checkpoint: str = """"\n # Dynamics\n dyna_type: str = ""maskgit"" # supported options: maskgit, causal\n dyna_dim: int = 512\n dyna_ffn_dim: int = 2048\n dyna_num_blocks: int = 6\n dyna_num_heads: int = 8\n dropout: float = 0.0\n mask_limit: float = 0.5\n param_dtype = jnp.float32\n dtype = jnp.bfloat16\n use_flash_attention: bool = True\n use_gt_actions: bool = False\n # Logging\n log: bool = False\n entity: str = """"\n project: str = """"\n name: str = ""train_dynamics""\n tags: list[str] = field(default_factory=lambda: [""dynamics""])\n log_interval: int = 5\n log_image_interval: int = 250\n ckpt_dir: str = """"\n log_checkpoint_interval: int = 25000\n log_checkpoint_keep_period: int = 20000\n log_gradients: bool = False\n val_data_dir: str = """"\n val_interval: int = 20_000\n val_steps: int = 50\n eval_full_frame: bool = False\n val_maskgit_steps: int = 25\n val_temperature: float = 1\n val_sample_argmax: bool = False\n wandb_id: str = """"\n\n\ndef build_model(args: Args, rng: jax.Array) -> tuple[Genie, jax.Array]:\n rng, _rng = jax.random.split(rng)\n rngs = nnx.Rngs(_rng)\n genie = Genie(\n # Tokenizer\n in_dim=args.image_channels,\n tokenizer_dim=args.tokenizer_dim,\n tokenizer_ffn_dim=args.tokenizer_ffn_dim,\n latent_patch_dim=args.latent_patch_dim,\n num_patch_latents=args.num_patch_latents,\n patch_size=args.patch_size,\n tokenizer_num_blocks=args.tokenizer_num_blocks,\n tokenizer_num_heads=args.tokenizer_num_heads,\n # LAM\n lam_dim=args.lam_dim,\n lam_ffn_dim=args.lam_ffn_dim,\n latent_action_dim=args.latent_action_dim,\n num_latent_actions=args.num_latent_actions,\n lam_patch_size=args.lam_patch_size,\n lam_num_blocks=args.lam_num_blocks,\n lam_num_heads=args.lam_num_heads,\n lam_co_train=not args.lam_checkpoint,\n use_gt_actions=args.use_gt_actions,\n # Dynamics\n dyna_type=args.dyna_type,\n dyna_dim=args.dyna_dim,\n dyna_ffn_dim=args.dyna_ffn_dim,\n dyna_num_blocks=args.dyna_num_blocks,\n dyna_num_heads=args.dyna_num_heads,\n dropout=args.dropout,\n mask_limit=args.mask_limit,\n param_dtype=args.param_dtype,\n dtype=args.dtype,\n use_flash_attention=args.use_flash_attention,\n decode=False,\n rngs=rngs,\n )\n assert not (\n args.lam_checkpoint and args.use_gt_actions\n ), ""Can not use LAM when using GT actions. Please choose either.""\n if not args.use_gt_actions:\n del genie.lam.decoder\n return genie, rng\n\n\ndef build_optimizer(genie: Genie, args: Args) -> tuple[nnx.Optimizer, optax.Schedule]:\n lr_schedule = get_lr_schedule(\n args.lr_schedule,\n args.init_lr,\n args.max_lr,\n args.decay_end,\n args.num_steps,\n args.warmup_steps,\n args.wsd_decay_steps,\n )\n tx = optax.adamw(\n learning_rate=lr_schedule,\n b1=0.9,\n b2=0.9,\n weight_decay=1e-4,\n mu_dtype=args.param_dtype, # moments in full precision\n )\n optimizer = nnx.Optimizer(genie, tx)\n return optimizer, lr_schedule\n\n\ndef build_mesh_and_sharding(\n num_devices: int,\n) -> tuple[Mesh, NamedSharding, NamedSharding, NamedSharding]:\n device_mesh_arr = create_device_mesh((num_devices,))\n mesh = Mesh(devices=device_mesh_arr, axis_names=(""data"",))\n replicated_sharding = NamedSharding(mesh, PartitionSpec())\n videos_sharding = NamedSharding(mesh, PartitionSpec(""data"", None, None, None, None))\n actions_sharding = NamedSharding(mesh, PartitionSpec(""data"", None))\n return mesh, replicated_sharding, videos_sharding, actions_sharding\n\n\ndef shard_optimizer_states(\n optimizer: nnx.Optimizer, replicated_sharding: NamedSharding\n) -> None:\n model_state = nnx.state(optimizer.model)\n model_sharded_state = jax.lax.with_sharding_constraint(\n model_state, replicated_sharding\n )\n nnx.update(optimizer.model, model_sharded_state)\n optimizer_state = nnx.state(optimizer, nnx.optimizer.OptState)\n optimizer_sharded_state = jax.lax.with_sharding_constraint(\n optimizer_state, replicated_sharding\n )\n nnx.update(optimizer, optimizer_sharded_state)\n\n\ndef build_dataloader(args: Args, data_dir: str) -> grain.DataLoaderIterator:\n image_shape = (args.image_height, args.image_width, args.image_channels)\n array_record_files = [\n os.path.join(data_dir, x)\n for x in os.listdir(data_dir)\n if x.endswith("".array_record"")\n ]\n grain_dataloader = get_dataloader(\n array_record_files,\n args.seq_len,\n # NOTE: We deliberately pass the global batch size\n # The dataloader shards the dataset across all processes\n args.batch_size,\n *image_shape,\n num_workers=8,\n prefetch_buffer_size=1,\n seed=args.seed,\n )\n initial_state = grain_dataloader._create_initial_state()\n grain_iterator = grain.DataLoaderIterator(grain_dataloader, initial_state)\n return grain_iterator\n\n\ndef build_checkpoint_manager(args: Args) -> ocp.CheckpointManager:\n handler_registry = ocp.handlers.DefaultCheckpointHandlerRegistry()\n handler_registry.add(\n ""model_state"", ocp.args.PyTreeSave, ocp.handlers.PyTreeCheckpointHandler\n )\n handler_registry.add(\n ""model_state"", ocp.args.PyTreeRestore, ocp.handlers.PyTreeCheckpointHandler\n )\n handler_registry.add(\n ""train_dataloader_state"",\n grain.checkpoint.CheckpointSave,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n handler_registry.add(\n ""train_dataloader_state"",\n grain.checkpoint.CheckpointRestore,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n if args.val_data_dir:\n handler_registry.add(\n ""val_dataloader_state"",\n grain.checkpoint.CheckpointSave,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n handler_registry.add(\n ""val_dataloader_state"",\n grain.checkpoint.CheckpointRestore,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n checkpoint_options = ocp.CheckpointManagerOptions(\n save_interval_steps=args.log_checkpoint_interval,\n max_to_keep=3,\n keep_period=args.log_checkpoint_keep_period,\n step_format_fixed_length=6,\n cleanup_tmp_directories=True,\n )\n checkpoint_manager = ocp.CheckpointManager(\n args.ckpt_dir,\n options=checkpoint_options,\n handler_registry=handler_registry,\n )\n return checkpoint_manager\n\n\ndef restore_or_initialize_components(\n args: Args,\n checkpoint_manager: ocp.CheckpointManager,\n optimizer: nnx.Optimizer,\n train_iterator: grain.DataLoaderIterator,\n rng: jax.Array,\n replicated_sharding: NamedSharding,\n val_iterator: Optional[grain.DataLoaderIterator],\n restore_step: Optional[int] = None,\n) -> tuple[\n int, nnx.Optimizer, grain.DataLoaderIterator, grain.DataLoaderIterator, jax.Array\n]:\n step = 0\n if restore_step is None:\n restore_step = checkpoint_manager.latest_step()\n if args.restore_ckpt:\n abstract_optimizer = nnx.eval_shape(lambda: optimizer)\n abstract_optimizer_state = nnx.state(abstract_optimizer)\n if val_iterator:\n restore_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore(abstract_optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointRestore(train_iterator), # type: ignore\n val_dataloader_state=grain.checkpoint.CheckpointRestore(val_iterator), # type: ignore\n )\n else:\n restore_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore(abstract_optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointRestore(train_iterator), # type: ignore\n )\n restored = checkpoint_manager.restore(\n checkpoint_manager.latest_step(), args=restore_args\n )\n restored_optimizer_state = restored[""model_state""]\n nnx.update(optimizer, restored_optimizer_state)\n train_iterator = restored[""train_dataloader_state""]\n if val_iterator:\n val_iterator = restored[""val_dataloader_state""]\n step = checkpoint_manager.latest_step() or 0\n print(f""Restored dataloader and model state from step {step}"")\n else:\n # Restore from pre-trained tokenizer (and LAM)\n rng, _rng = jax.random.split(rng)\n optimizer = restore_genie_components(optimizer, replicated_sharding, _rng, args)\n # NOTE: We have to remove the (unused) tokenizer vq dropout due flax.nnx lazily initializing modules.\n # Specifically, the first dynamics model checkpoint will contain the vq dropout module,\n # but the first full restore will fail due to nnx not initializing the module when\n # dropout is set to 0.0.\n del optimizer.model.tokenizer.vq.drop\n return step, optimizer, train_iterator, val_iterator, rng\n\n\ndef _calculate_step_metrics(\n outputs: dict[str, jax.Array],\n gt: jax.Array,\n num_latent_actions: int,\n num_patch_latents: int,\n) -> tuple[jax.Array, dict]:\n mask = outputs[""mask""]\n outputs[""token_logits""] = outputs[""token_logits""].astype(jnp.float32)\n ce_loss = optax.softmax_cross_entropy_with_integer_labels(\n outputs[""token_logits""], outputs[""video_tokens""]\n )\n ce_loss = (mask * ce_loss).sum() / mask.sum()\n acc = outputs[""token_logits""].argmax(-1) == outputs[""video_tokens""]\n acc = (mask * acc).sum() / mask.sum()\n select_probs = jax.nn.softmax(outputs[""token_logits""])\n gt_val = gt.clip(0, 1).reshape(-1, *gt.shape[2:])\n recon = outputs[""recon""].clip(0, 1).reshape(-1, *outputs[""recon""].shape[2:])\n psnr = jnp.asarray(pix.psnr(gt_val, recon)).mean()\n ssim = jnp.asarray(pix.ssim(gt_val, recon)).mean()\n _, index_counts_tokenizer = jnp.unique_counts(\n jnp.ravel(outputs[""video_tokens""]),\n size=num_patch_latents,\n fill_value=0,\n )\n codebook_usage_tokenizer = (index_counts_tokenizer != 0).mean()\n metrics = dict(\n cross_entropy_loss=ce_loss,\n masked_token_accuracy=acc,\n select_logit=outputs[""token_logits""].max(-1).mean(),\n select_p=select_probs.max(-1).mean(),\n entropy=jax.scipy.special.entr(select_probs).sum(-1).mean(),\n psnr=psnr,\n ssim=ssim,\n codebook_usage_tokenizer=codebook_usage_tokenizer,\n )\n if ""lam_indices"" in outputs.keys():\n _, index_counts_lam = jnp.unique_counts(\n jnp.ravel(outputs[""lam_indices""]),\n size=num_latent_actions,\n fill_value=0,\n )\n codebook_usage_lam = (index_counts_lam != 0).mean()\n metrics[""codebook_usage_lam""] = codebook_usage_lam\n return ce_loss, metrics\n\n\ndef main(args: Args) -> None:\n jax.distributed.initialize()\n num_devices = jax.device_count()\n if num_devices == 0:\n raise ValueError(""No JAX devices found."")\n print(f""Running on {num_devices} devices."")\n\n if args.batch_size % num_devices != 0:\n raise ValueError(\n f""Global batch size {args.batch_size} must be divisible by ""\n f""number of devices {num_devices}.""\n )\n\n rng = jax.random.key(args.seed)\n\n # --- Initialize model ---\n genie, rng = build_model(args, rng)\n _, params, _ = nnx.split(genie, nnx.Param, ...)\n param_counts = count_parameters_by_component(params)\n\n if args.log and jax.process_index() == 0:\n wandb_init_kwargs = {\n ""entity"": args.entity,\n ""project"": args.project,\n ""name"": args.name,\n ""tags"": args.tags,\n ""group"": ""debug"",\n ""config"": args,\n }\n\n if args.wandb_id:\n wandb_init_kwargs.update(\n {\n ""id"": args.wandb_id,\n ""resume"": ""allow"",\n }\n )\n wandb.init(**wandb_init_kwargs)\n\n wandb.config.update({""model_param_count"": param_counts})\n\n print(""Parameter counts:"")\n print(param_counts)\n\n # --- Initialize optimizer ---\n optimizer, lr_schedule = build_optimizer(genie, args)\n del genie\n\n # FIXME: switch to create_hybrid_device_mesh for runs spanning multiple nodes\n _, replicated_sharding, videos_sharding, actions_sharding = build_mesh_and_sharding(\n num_devices\n )\n\n shard_optimizer_states(optimizer, replicated_sharding)\n\n # --- Initialize checkpoint manager ---\n checkpoint_manager = build_checkpoint_manager(args)\n\n # --- Create DataLoaderIterator from dataloader ---\n train_iterator = build_dataloader(args, args.data_dir)\n val_iterator = None\n if args.val_data_dir:\n val_iterator = build_dataloader(args, args.val_data_dir)\n\n # --- Restore checkpoint ---\n step, optimizer, train_iterator, val_iterator, rng = (\n restore_or_initialize_components(\n args,\n checkpoint_manager,\n optimizer,\n train_iterator,\n rng,\n replicated_sharding,\n val_iterator,\n )\n )\n\n # --- Define loss and train step (close over args) ---\n def dynamics_loss_fn(\n model: Genie,\n inputs: dict,\n training: bool = False,\n ) -> tuple[jax.Array, tuple[jax.Array, dict]]:\n gt = jnp.asarray(inputs[""videos""], dtype=jnp.float32) / 255.0\n inputs[""videos""] = gt.astype(args.dtype)\n outputs = model(inputs, training=training)\n ce_loss, metrics = _calculate_step_metrics(\n outputs, gt, args.num_latent_actions, args.num_patch_latents\n )\n return ce_loss, (outputs[""recon""], metrics)\n\n @nnx.jit(donate_argnums=0)\n def train_step(\n optimizer: nnx.Optimizer, inputs: dict\n ) -> tuple[jax.Array, jax.Array, dict]:\n def loss_fn(model: Genie) -> tuple[jax.Array, tuple[jax.Array, dict]]:\n model.train()\n return dynamics_loss_fn(model, inputs, training=True)\n\n (loss, (recon, metrics)), grads = nnx.value_and_grad(loss_fn, has_aux=True)(\n optimizer.model\n )\n optimizer.update(grads)\n if args.log_gradients:\n metrics[""gradients_std/""] = jax.tree.map(\n lambda x: x.std(), grads[""params""][""dynamics""]\n )\n return loss, recon, metrics\n\n @nnx.jit\n def val_step(genie: Genie, inputs: dict) -> dict:\n """"""Evaluate model and compute metrics""""""\n genie.eval()\n (loss, (recon, metrics)) = dynamics_loss_fn(genie, inputs, training=False)\n val_output = {""loss"": loss, ""recon"": recon, ""metrics"": metrics}\n\n # --- Evaluate full frame prediction (sampling) ---\n if args.eval_full_frame:\n lam_indices = genie.vq_encode(inputs, training=False)\n tokenizer_outputs = genie.tokenizer.vq_encode(\n inputs[""videos""], training=False\n )\n tokens_full_frame = tokenizer_outputs[""indices""]\n inputs[""latent_actions""] = lam_indices\n gt = jnp.asarray(inputs[""videos""], dtype=jnp.float32) / 255.0\n inputs[""videos""] = gt[:, :-1].astype(\n args.dtype\n ) # remove last frame for generation\n recon_full_frame, logits_full_frame = genie.sample(\n inputs,\n args.seq_len,\n args.val_temperature,\n args.val_sample_argmax,\n args.val_maskgit_steps,\n )\n step_outputs = {\n ""recon"": recon_full_frame,\n ""token_logits"": logits_full_frame,\n ""video_tokens"": tokens_full_frame,\n ""mask"": jnp.zeros_like(tokens_full_frame).at[:, -1].set(True),\n ""lam_indices"": lam_indices,\n }\n loss_full_frame, metrics_full_frame = _calculate_step_metrics(\n step_outputs, gt, args.num_latent_actions, args.num_patch_latents\n )\n val_output.update(\n {\n ""loss_full_frame"": loss_full_frame,\n ""recon_full_frame"": recon_full_frame,\n ""metrics_full_frame"": metrics_full_frame,\n }\n )\n return val_output\n\n def calculate_validation_metrics(val_dataloader, genie, rng):\n step = 0\n loss_per_step = []\n metrics_per_step = []\n loss_full_frame_per_step = []\n metrics_full_frame_per_step = []\n batch = None\n recon = None\n recon_full_frame = None\n for batch in val_dataloader:\n rng, _rng_mask = jax.random.split(rng, 2)\n batch[""rng""] = _rng_mask\n val_outputs = val_step(genie, batch)\n loss_per_step.append(val_outputs[""loss""])\n metrics_per_step.append(val_outputs[""metrics""])\n recon = val_outputs[""recon""]\n if args.eval_full_frame:\n loss_full_frame_per_step.append(val_outputs[""loss_full_frame""])\n metrics_full_frame_per_step.append(val_outputs[""metrics_full_frame""])\n recon_full_frame = val_outputs[""recon_full_frame""]\n step += 1\n if step > args.val_steps:\n break\n\n if step < args.val_steps:\n print(\n f""Warning: Your validation dataset is too small to make val_steps many steps. Made {step} steps, expected {args.val_steps}""\n )\n\n val_metrics = {\n f""val_{key}"": np.mean([float(m[key]) for m in metrics_per_step])\n for key in metrics_per_step[0].keys()\n }\n val_metrics[""val_loss""] = np.mean(loss_per_step)\n if args.eval_full_frame:\n val_metrics_full_frame = {\n f""val_full_frame_{key}"": np.mean(\n [float(m[key]) for m in metrics_full_frame_per_step]\n )\n for key in metrics_full_frame_per_step[0].keys()\n }\n val_metrics.update(val_metrics_full_frame)\n val_metrics[""val_loss_full_frame""] = np.mean(loss_full_frame_per_step)\n return val_metrics, batch, recon, recon_full_frame\n\n # --- TRAIN LOOP ---\n dataloader_train = (\n {\n ""videos"": jax.make_array_from_process_local_data(\n videos_sharding, local_data=elem[""videos""]\n ),\n ""actions"": (\n jax.make_array_from_process_local_data(\n actions_sharding, elem[""actions""]\n )\n if args.use_gt_actions\n else None\n ),\n }\n for elem in train_iterator\n )\n dataloader_val = None\n if val_iterator:\n dataloader_val = (\n {\n ""videos"": jax.make_array_from_process_local_data(\n videos_sharding, elem[""videos""]\n ),\n ""actions"": (\n jax.make_array_from_process_local_data(\n actions_sharding, elem[""actions""]\n )\n if args.use_gt_actions\n else None\n ),\n }\n for elem in val_iterator\n )\n if jax.process_index() == 0:\n first_batch = next(dataloader_train)\n first_batch[""rng""] = rng # type: ignore\n compiled = train_step.lower(optimizer, first_batch).compile()\n print_compiled_memory_stats(compiled.memory_analysis())\n print_compiled_cost_analysis(compiled.cost_analysis())\n # Do not skip the first batch during training\n dataloader_train = itertools.chain([first_batch], dataloader_train)\n print(f""Starting training from step {step}..."")\n first_step = step\n while step < args.num_steps:\n for batch in dataloader_train:\n # --- Train step ---\n rng, _rng_mask = jax.random.split(rng, 2)\n batch[""rng""] = _rng_mask\n loss, recon, metrics = train_step(optimizer, batch)\n if step == first_step:\n print_mem_stats(""After params initialized"")\n metrics[""lr""] = lr_schedule(step)\n print(f""Step {step}, loss: {loss}"")\n step += 1\n\n # --- Validation loss ---\n val_results = {}\n if dataloader_val and step % args.val_interval == 0:\n rng, _rng_mask_val = jax.random.split(rng, 2)\n print(""Calculating validation metrics..."")\n val_metrics, val_gt_batch, val_recon, val_recon_full_frame = (\n calculate_validation_metrics(\n dataloader_val, optimizer.model, _rng_mask_val\n )\n )\n print(f""Step {step}, validation loss: {val_metrics['val_loss']}"")\n val_results = {\n ""metrics"": val_metrics,\n ""gt_batch"": val_gt_batch,\n ""recon"": val_recon,\n ""full_frame"": val_recon_full_frame,\n }\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {""loss"": loss, ""step"": step, **metrics}\n if val_results:\n log_dict.update(val_results[""metrics""])\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if val_results:\n val_results[""gt_seq_val""] = (\n val_results[""gt_batch""][""videos""][0].astype(jnp.float32)\n / 255.0\n )\n val_results[""recon_seq_val""] = val_results[""recon""][0].clip(\n 0, 1\n )\n val_comparison_seq = jnp.concatenate(\n (val_results[""gt_seq_val""], val_results[""recon_seq_val""]),\n axis=1,\n )\n val_results[""val_comparison_seq""] = einops.rearrange(\n val_comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if args.eval_full_frame:\n val_results[""full_frame_seq_val""] = val_results[\n ""full_frame""\n ][0].clip(0, 1)\n val_results[""val_full_frame_comparison_seq""] = (\n jnp.concatenate(\n (\n val_results[""gt_seq_val""],\n val_results[""full_frame_seq_val""],\n ),\n axis=1,\n )\n )\n val_results[""val_full_frame_comparison_seq""] = (\n einops.rearrange(\n val_results[""val_full_frame_comparison_seq""] * 255,\n ""t h w c -> h (t w) c"",\n )\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if val_results:\n log_images.update(\n dict(\n val_image=wandb.Image(\n np.asarray(\n val_results[""gt_seq_val""][args.seq_len - 1]\n )\n ),\n val_recon=wandb.Image(\n np.asarray(\n val_results[""recon_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_recon=wandb.Image(\n np.asarray(\n val_results[""val_comparison_seq""].astype(\n np.uint8\n )\n )\n ),\n )\n )\n if args.eval_full_frame:\n log_images.update(\n dict(\n val_full_frame=wandb.Image(\n np.asarray(\n val_results[""full_frame_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_full_frame=wandb.Image(\n np.asarray(\n val_results[\n ""val_full_frame_comparison_seq""\n ].astype(np.uint8)\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n optimizer_state = nnx.state(optimizer)\n if args.val_data_dir:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break\n\n checkpoint_manager.close()\n\n\nif __name__ == ""__main__"":\n args = tyro.cli(Args)\n main(args)\n",python,tab +2,257,"extension-output-pdoom-org.crowd-code-#1-crowd-code",0,0,"12:20:21 PM [info] Activating crowd-code\n12:20:21 PM [info] Recording started\n12:20:21 PM [info] Initializing git provider using file system watchers...\n",Log,tab +3,430,"extension-output-pdoom-org.crowd-code-#1-crowd-code",153,0,"12:20:21 PM [info] Git repository found\n12:20:21 PM [info] Git provider initialized successfully\n12:20:21 PM [info] Initial git state: [object Object]\n",Log,content +4,1791,"train_dynamics.py",0,0,"",python,tab +5,5630,"TERMINAL",0,0,"queue",,terminal_command +6,5700,"TERMINAL",0,0,"]633;C[?1049h(B[?7hEvery 1.0s: squeue --mehkn1990.localdomain: Sun Sep 21 12:20:26 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)",,terminal_output +7,6449,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn1990:~/Projects/jasmine",,terminal_output +8,7852,"TERMINAL",0,0,"idling",,terminal_command +9,7902,"TERMINAL",0,0,"]633;C",,terminal_output +10,8001,"TERMINAL",0,0,"[?1049h(B[?7hEvery 1.0s: sinfo_t_idlehkn1990.localdomain: Sun Sep 21 12:20:29 2025Partition dev_cpuonly: 12 nodes idle\rPartition cpuonly: 118 nodes idle\rPartition dev_accelerated:\t 1 nodes idle\rPartition accelerated: 11 nodes idle\rPartition dev_accelerated-h100 :\t 1 nodes idle\rPartition accelerated-h100:\t 1 nodes idle\rPartition large:\t 4 nodes idle\rPartition accelerated-h200:\t 4 nodes idle",,terminal_output +11,9042,"TERMINAL",0,0,"30",,terminal_output +12,10063,"TERMINAL",0,0,"1",,terminal_output +13,11097,"TERMINAL",0,0,"2",,terminal_output +14,11984,"TERMINAL",0,0,"",,terminal_command +15,12150,"TERMINAL",0,0,"3",,terminal_output +16,13176,"TERMINAL",0,0,"4",,terminal_output +17,14255,"TERMINAL",0,0,"5",,terminal_output +18,14874,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn1990:~/Projects/jasmine",,terminal_output +19,19030,"TERMINAL",0,0,"",,terminal_command +20,36936,"train_dynamics.py",2229,0,"",python,selection_mouse +21,45969,"TERMINAL",0,0,"source .venv/bin/activate",,terminal_command +22,860208,"train_dynamics.py",0,0,"",python,tab +23,860891,"train_lam.py",0,0,"import os\n\nos.environ.setdefault(""XLA_PYTHON_CLIENT_MEM_FRACTION"", ""0.98"")\n\nfrom dataclasses import dataclass, field\nimport itertools\nfrom typing import cast, Optional\n\nimport einops\nfrom jax.sharding import Mesh, PartitionSpec, NamedSharding\nfrom jax.experimental.mesh_utils import create_device_mesh\nimport optax\nimport orbax.checkpoint as ocp\nimport numpy as np\nimport dm_pix as pix\nimport jax\nimport jax.numpy as jnp\nimport tyro\nimport wandb\nimport grain\nimport flax.nnx as nnx\n\nfrom models.lam import LatentActionModel\nfrom utils.dataloader import get_dataloader\nfrom utils.train_utils import (\n get_lr_schedule,\n count_parameters_by_component,\n print_mem_stats,\n print_compiled_memory_stats,\n print_compiled_cost_analysis,\n)\n\n\n@dataclass\nclass Args:\n # Experiment\n num_steps: int = 200_000\n seed: int = 0\n seq_len: int = 16\n image_channels: int = 3\n image_height: int = 90\n image_width: int = 160\n data_dir: str = """"\n save_ckpt: bool = False\n restore_ckpt: bool = False\n # Optimization\n batch_size: int = 36\n vq_beta: float = 0.25\n init_lr: float = 0.0\n max_lr: float = 3e-5\n decay_end: float = 0.0\n wsd_decay_steps: int = (\n 10000 # NOTE: wsd_decay_steps will only be used when using a wsd-schedule\n )\n warmup_steps: int = 5000\n lr_schedule: str = ""wsd"" # supported options: wsd, cos\n vq_reset_thresh: int = 50\n # LAM\n model_dim: int = 512\n ffn_dim: int = 2048\n latent_dim: int = 32\n num_latents: int = 6\n patch_size: int = 16\n num_blocks: int = 4\n num_heads: int = 8\n dropout: float = 0.0\n codebook_dropout: float = 0.0\n param_dtype = jnp.float32\n dtype = jnp.bfloat16\n # Logging\n log: bool = False\n entity: str = """"\n project: str = """"\n name: str = ""train_lam""\n tags: list[str] = field(default_factory=lambda: [""lam""])\n log_interval: int = 5\n log_image_interval: int = 250\n ckpt_dir: str = """"\n log_checkpoint_interval: int = 10000\n log_checkpoint_keep_period: int = 20000\n val_data_dir: str = """"\n val_interval: int = 20_000\n val_steps: int = 50\n wandb_id: str = """"\n use_flash_attention: bool = True\n\n\ndef build_model(args: Args, rng: jax.Array) -> tuple[LatentActionModel, jax.Array]:\n rng, _rng = jax.random.split(rng)\n rngs = nnx.Rngs(_rng)\n return (\n LatentActionModel(\n in_dim=args.image_channels,\n model_dim=args.model_dim,\n ffn_dim=args.ffn_dim,\n latent_dim=args.latent_dim,\n num_latents=args.num_latents,\n patch_size=args.patch_size,\n num_blocks=args.num_blocks,\n num_heads=args.num_heads,\n dropout=args.dropout,\n codebook_dropout=args.codebook_dropout,\n param_dtype=args.param_dtype,\n dtype=args.dtype,\n use_flash_attention=args.use_flash_attention,\n rngs=rngs,\n ),\n rng,\n )\n\n\ndef build_optimizer(\n model: LatentActionModel, args: Args\n) -> tuple[nnx.Optimizer, optax.Schedule]:\n lr_schedule = get_lr_schedule(\n args.lr_schedule,\n args.init_lr,\n args.max_lr,\n args.decay_end,\n args.num_steps,\n args.warmup_steps,\n args.wsd_decay_steps,\n )\n tx = optax.adamw(\n learning_rate=lr_schedule,\n b1=0.9,\n b2=0.9,\n weight_decay=1e-4,\n mu_dtype=args.param_dtype, # moments in full precision\n )\n optimizer = nnx.Optimizer(model, tx)\n return optimizer, lr_schedule\n\n\ndef build_mesh_and_sharding(\n num_devices: int,\n) -> tuple[Mesh, NamedSharding, NamedSharding]:\n device_mesh_arr = create_device_mesh((num_devices,))\n mesh = Mesh(devices=device_mesh_arr, axis_names=(""data"",))\n replicated_sharding = NamedSharding(mesh, PartitionSpec())\n videos_sharding = NamedSharding(mesh, PartitionSpec(""data"", None, None, None, None))\n return mesh, replicated_sharding, videos_sharding\n\n\ndef shard_optimizer_states(\n optimizer: nnx.Optimizer, replicated_sharding: NamedSharding\n) -> None:\n model_state = nnx.state(optimizer.model)\n model_sharded_state = jax.lax.with_sharding_constraint(\n model_state, replicated_sharding\n )\n nnx.update(optimizer.model, model_sharded_state)\n optimizer_state = nnx.state(optimizer, nnx.optimizer.OptState)\n optimizer_sharded_state = jax.lax.with_sharding_constraint(\n optimizer_state, replicated_sharding\n )\n nnx.update(optimizer, optimizer_sharded_state)\n\n\ndef build_dataloader(args: Args, data_dir: str) -> grain.DataLoaderIterator:\n image_shape = (args.image_height, args.image_width, args.image_channels)\n array_record_files = [\n os.path.join(data_dir, x)\n for x in os.listdir(data_dir)\n if x.endswith("".array_record"")\n ]\n grain_dataloader = get_dataloader(\n array_record_files,\n args.seq_len,\n # NOTE: We deliberately pass the global batch size\n # The dataloader shards the dataset across all processes\n args.batch_size,\n *image_shape,\n num_workers=8,\n prefetch_buffer_size=1,\n seed=args.seed,\n )\n initial_state = grain_dataloader._create_initial_state()\n grain_iterator = grain.DataLoaderIterator(grain_dataloader, initial_state)\n return grain_iterator\n\n\ndef build_checkpoint_manager(args: Args) -> ocp.CheckpointManager:\n handler_registry = ocp.handlers.DefaultCheckpointHandlerRegistry()\n handler_registry.add(\n ""model_state"", ocp.args.PyTreeSave, ocp.handlers.PyTreeCheckpointHandler\n )\n handler_registry.add(\n ""model_state"", ocp.args.PyTreeRestore, ocp.handlers.PyTreeCheckpointHandler\n )\n handler_registry.add(\n ""train_dataloader_state"",\n grain.checkpoint.CheckpointSave,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n handler_registry.add(\n ""train_dataloader_state"",\n grain.checkpoint.CheckpointRestore,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n if args.val_data_dir:\n handler_registry.add(\n ""val_dataloader_state"",\n grain.checkpoint.CheckpointSave,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n handler_registry.add(\n ""val_dataloader_state"",\n grain.checkpoint.CheckpointRestore,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n checkpoint_options = ocp.CheckpointManagerOptions(\n save_interval_steps=args.log_checkpoint_interval,\n max_to_keep=3,\n keep_period=args.log_checkpoint_keep_period,\n step_format_fixed_length=6,\n cleanup_tmp_directories=True,\n )\n checkpoint_manager = ocp.CheckpointManager(\n args.ckpt_dir,\n options=checkpoint_options,\n handler_registry=handler_registry,\n )\n return checkpoint_manager\n\n\ndef restore_checkpoint_if_needed(\n args: Args,\n checkpoint_manager: ocp.CheckpointManager,\n optimizer: nnx.Optimizer,\n train_iterator: grain.DataLoaderIterator,\n val_iterator: Optional[grain.DataLoaderIterator],\n restore_step: Optional[int] = None,\n) -> tuple[int, nnx.Optimizer, grain.DataLoaderIterator, grain.DataLoaderIterator]:\n step = 0\n if restore_step is None:\n restore_step = checkpoint_manager.latest_step()\n if args.restore_ckpt:\n abstract_optimizer = nnx.eval_shape(lambda: optimizer)\n abstract_optimizer_state = nnx.state(abstract_optimizer)\n if args.val_data_dir:\n restore_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore(abstract_optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointRestore(train_iterator), # type: ignore\n val_dataloader_state=grain.checkpoint.CheckpointRestore(val_iterator), # type: ignore\n )\n else:\n restore_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore(abstract_optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointRestore(train_iterator), # type: ignore\n )\n restored = checkpoint_manager.restore(restore_step, args=restore_args)\n restored_optimizer_state = restored[""model_state""]\n nnx.update(optimizer, restored_optimizer_state)\n train_iterator = restored[""train_dataloader_state""]\n if args.val_data_dir:\n val_iterator = restored[""val_dataloader_state""]\n step = restore_step or 0\n print(f""Restored dataloader and model state from step {step}"")\n return step, optimizer, train_iterator, val_iterator\n\n\ndef enable_sowing(lam: LatentActionModel) -> None:\n for model in [lam.encoder, lam.decoder]:\n setattr(model, ""sow_logits"", True)\n for blk in getattr(model, ""blocks"", []):\n setattr(blk, ""sow_weights"", True)\n setattr(blk, ""sow_activations"", True)\n\n\ndef main(args: Args) -> None:\n jax.distributed.initialize()\n num_devices = jax.device_count()\n if num_devices == 0:\n raise ValueError(""No JAX devices found."")\n print(f""Running on {num_devices} devices."")\n\n if args.batch_size % num_devices != 0:\n raise ValueError(\n f""Global batch size {args.batch_size} must be divisible by ""\n f""number of devices {num_devices}.""\n )\n\n rng = jax.random.key(args.seed)\n\n # --- Initialize model ---\n lam, rng = build_model(args, rng)\n\n # Count parameters\n _, params, _ = nnx.split(lam, nnx.Param, ...)\n param_counts = count_parameters_by_component(params)\n\n if args.log and jax.process_index() == 0:\n wandb_init_kwargs = {\n ""entity"": args.entity,\n ""project"": args.project,\n ""name"": args.name,\n ""tags"": args.tags,\n ""group"": ""debug"",\n ""config"": args,\n }\n\n if args.wandb_id:\n wandb_init_kwargs.update(\n {\n ""id"": args.wandb_id,\n ""resume"": ""allow"",\n }\n )\n wandb.init(**wandb_init_kwargs)\n\n wandb.config.update({""model_param_count"": param_counts})\n\n print(""Parameter counts:"")\n print(param_counts)\n\n # --- Initialize optimizer ---\n optimizer, lr_schedule = build_optimizer(lam, args)\n del lam\n\n # FIXME: switch to create_hybrid_device_mesh for runs spanning multiple nodes\n _, replicated_sharding, videos_sharding = build_mesh_and_sharding(num_devices)\n\n shard_optimizer_states(optimizer, replicated_sharding)\n\n # --- Initialize checkpoint manager ---\n checkpoint_manager = build_checkpoint_manager(args)\n\n # --- Create DataLoaderIterator from dataloader ---\n train_iterator = build_dataloader(args, args.data_dir)\n val_iterator = None\n if args.val_data_dir:\n val_iterator = build_dataloader(args, args.val_data_dir)\n\n # --- Restore checkpoint ---\n step, optimizer, train_iterator, val_iterator = restore_checkpoint_if_needed(\n args, checkpoint_manager, optimizer, train_iterator, val_iterator\n )\n\n # --- Define loss and train step (close over args) ---\n def lam_loss_fn(\n model: LatentActionModel, inputs: dict, training: bool = True\n ) -> tuple[jax.Array, tuple[jax.Array, jax.Array, dict]]:\n gt = jnp.asarray(inputs[""videos""], dtype=jnp.float32) / 255.0\n inputs[""videos""] = gt.astype(args.dtype)\n outputs = model(inputs, training)\n outputs[""recon""] = outputs[""recon""].astype(jnp.float32)\n gt_future_frames = gt[:, 1:]\n mse = jnp.square(gt_future_frames - outputs[""recon""]).mean()\n q_loss = jnp.square(jax.lax.stop_gradient(outputs[""emb""]) - outputs[""z""]).mean()\n commitment_loss = jnp.square(\n outputs[""emb""] - jax.lax.stop_gradient(outputs[""z""])\n ).mean()\n loss = mse + q_loss + args.vq_beta * commitment_loss\n\n gt_val = gt_future_frames.clip(0, 1).reshape(-1, *gt_future_frames.shape[2:])\n recon = outputs[""recon""].clip(0, 1).reshape(-1, *outputs[""recon""].shape[2:])\n psnr = jnp.asarray(pix.psnr(gt_val, recon)).mean()\n ssim = jnp.asarray(pix.ssim(gt_val, recon)).mean()\n count_fn = jax.vmap(lambda i: (outputs[""indices""] == i).sum())\n index_counts = count_fn(jnp.arange(args.num_latents))\n metrics = dict(\n loss=loss,\n mse=mse,\n q_loss=q_loss,\n commitment_loss=commitment_loss,\n psnr=psnr,\n ssim=ssim,\n codebook_usage=(index_counts != 0).mean(),\n )\n return loss, (outputs[""recon""], index_counts, metrics)\n\n @nnx.jit(donate_argnums=0)\n def train_step(\n optimizer: nnx.Optimizer,\n inputs: dict,\n action_last_active: jax.Array,\n rng: jax.Array,\n ) -> tuple[jax.Array, jax.Array, jax.Array, dict]:\n def loss_fn(\n model: LatentActionModel,\n ) -> tuple[jax.Array, tuple[jax.Array, jax.Array, dict]]:\n model.train()\n return lam_loss_fn(model, inputs, training=True)\n\n # --- Update model ---\n (loss, (recon, idx_counts, metrics)), grads = nnx.value_and_grad(\n loss_fn, has_aux=True\n )(optimizer.model)\n optimizer.update(grads)\n\n # --- Reset inactive latent actions ---\n codebook = optimizer.model.vq.codebook\n num_codes = len(codebook)\n active_codes = idx_counts != 0.0\n action_last_active = jnp.where(active_codes, 0, action_last_active + 1)\n p_code = active_codes / active_codes.sum()\n reset_idxs = jax.random.choice(rng, num_codes, shape=(num_codes,), p=p_code)\n do_reset = action_last_active >= args.vq_reset_thresh\n new_codebook = jnp.where(\n jnp.expand_dims(do_reset, -1), codebook[reset_idxs], codebook.value\n )\n optimizer.model.vq.codebook.value = new_codebook\n action_last_active = jnp.where(do_reset, 0, action_last_active)\n return loss, recon, action_last_active, metrics\n\n @nnx.jit\n def val_step(\n lam: LatentActionModel, inputs: dict\n ) -> tuple[jax.Array, jax.Array, dict]:\n lam.eval()\n (loss, (recon, _, metrics)) = lam_loss_fn(lam, inputs, training=False)\n return loss, recon, metrics\n\n def calculate_validation_metrics(val_dataloader, lam):\n step = 0\n loss_per_step = []\n metrics_per_step = []\n batch = None\n recon = None\n for batch in val_dataloader:\n loss, recon, metrics = val_step(lam, batch)\n loss_per_step.append(loss)\n metrics_per_step.append(metrics)\n step += 1\n if step > args.val_steps:\n break\n\n if step < args.val_steps:\n print(\n f""Warning: Your validation dataset is too small to make val_steps many steps. Made {step} steps, expected {args.val_steps}""\n )\n\n val_loss = np.mean(loss_per_step)\n val_metrics = {\n f""val_{key}"": np.mean([float(m[key]) for m in metrics_per_step])\n for key in metrics_per_step[0].keys()\n }\n val_metrics[""val_loss""] = val_loss\n return val_metrics, batch, recon\n\n # --- TRAIN LOOP ---\n dataloader_train = (\n {\n ""videos"": jax.make_array_from_process_local_data(\n videos_sharding, elem[""videos""]\n ),\n }\n for elem in train_iterator\n )\n dataloader_val = None\n if val_iterator:\n dataloader_val = (\n {\n ""videos"": jax.make_array_from_process_local_data(\n videos_sharding, elem[""videos""]\n ),\n }\n for elem in val_iterator\n )\n action_last_active = jnp.zeros(args.num_latents, dtype=jnp.int32)\n if jax.process_index() == 0:\n first_batch = next(dataloader_train)\n compiled = train_step.lower(\n optimizer, first_batch, action_last_active, rng\n ).compile()\n print_compiled_memory_stats(compiled.memory_analysis())\n print_compiled_cost_analysis(compiled.cost_analysis())\n # Do not skip the first batch during training\n dataloader_train = itertools.chain([first_batch], dataloader_train)\n print(f""Starting training from step {step}..."")\n first_step = step\n while step < args.num_steps:\n for batch in dataloader_train:\n # --- Train step ---\n rng, _rng = jax.random.split(rng, 2)\n loss, recon, action_last_active, metrics = train_step(\n optimizer, batch, action_last_active, _rng\n )\n if step == first_step:\n print_mem_stats(""After params initialized"")\n metrics[""lr""] = lr_schedule(step)\n print(f""Step {step}, loss: {loss}"")\n step += 1\n\n # --- Validation loss ---\n val_results = {}\n if dataloader_val and step % args.val_interval == 0:\n print(""Calculating validation metrics..."")\n val_metrics, val_gt_batch, val_recon = calculate_validation_metrics(\n dataloader_val, optimizer.model\n )\n print(f""Step {step}, validation loss: {val_metrics['val_loss']}"")\n val_results = {\n ""metrics"": val_metrics,\n ""gt_batch"": val_gt_batch,\n ""recon"": val_recon,\n }\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {""loss"": loss, ""step"": step, **metrics}\n if val_results:\n log_dict.update(val_results[""metrics""])\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = batch[""videos""][0, 1:].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if val_results:\n val_results[""gt_seq_val""] = (\n val_results[""gt_batch""][""videos""][0, 1:].astype(jnp.float32)\n / 255.0\n )\n val_results[""recon_seq_val""] = val_results[""recon""][0].clip(\n 0, 1\n )\n val_results[""val_comparison_seq""] = jnp.concatenate(\n (val_results[""gt_seq_val""], val_results[""recon_seq_val""]),\n axis=1,\n )\n val_results[""val_comparison_seq""] = einops.rearrange(\n val_results[""val_comparison_seq""] * 255,\n ""t h w c -> h (t w) c"",\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if val_results:\n log_images.update(\n {\n ""val_image"": wandb.Image(\n np.asarray(val_results[""gt_seq_val""][0])\n ),\n ""val_recon"": wandb.Image(\n np.asarray(val_results[""recon_seq_val""][0])\n ),\n ""val_true_vs_recon"": wandb.Image(\n np.asarray(\n val_results[""val_comparison_seq""].astype(\n np.uint8\n )\n )\n ),\n }\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n optimizer_state = nnx.state(optimizer)\n if args.val_data_dir:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break\n\n checkpoint_manager.close()\n\n\nif __name__ == ""__main__"":\n args = tyro.cli(Args)\n main(args)\n",python,tab +24,861299,"train_lam.py",0,0,"",python,tab +25,861923,"train_tokenizer.py",0,0,"import os\n\nos.environ.setdefault(""XLA_PYTHON_CLIENT_MEM_FRACTION"", ""0.98"")\n\nfrom dataclasses import dataclass, field\nfrom typing import cast, Optional\n\nimport einops\nimport itertools\nfrom jax.sharding import Mesh, PartitionSpec, NamedSharding\nfrom jax.experimental.mesh_utils import create_device_mesh\nimport optax\nimport orbax.checkpoint as ocp\nimport numpy as np\nimport dm_pix as pix\nimport jax\nimport jax.numpy as jnp\nimport tyro\nimport wandb\nimport grain\nimport flax.nnx as nnx\n\nfrom models.tokenizer import TokenizerVQVAE\nfrom utils.dataloader import get_dataloader\nfrom utils.train_utils import (\n get_lr_schedule,\n count_parameters_by_component,\n print_mem_stats,\n print_compiled_memory_stats,\n print_compiled_cost_analysis,\n)\n\n\n@dataclass\nclass Args:\n # Experiment\n num_steps: int = 300_000\n seed: int = 0\n seq_len: int = 16\n image_channels: int = 3\n image_height: int = 90\n image_width: int = 160\n data_dir: str = """"\n save_ckpt: bool = False\n restore_ckpt: bool = False\n # Optimization\n vq_beta: float = 0.25\n batch_size: int = 48\n init_lr: float = 0.0\n max_lr: float = 3e-4\n decay_end: float = 0.0\n wsd_decay_steps: int = (\n 20000 # NOTE: wsd_decay_steps will only be used when using a wsd-schedule\n )\n lr_schedule: str = ""wsd"" # supported options: wsd, cos\n warmup_steps: int = 10000\n # Tokenizer\n model_dim: int = 512\n ffn_dim: int = 2048\n latent_dim: int = 32\n num_latents: int = 1024\n patch_size: int = 4\n num_blocks: int = 4\n num_heads: int = 8\n dropout: float = 0.0\n codebook_dropout: float = 0.01\n param_dtype = jnp.float32\n dtype = jnp.bfloat16\n # Logging\n log: bool = False\n entity: str = """"\n project: str = """"\n name: str = ""train_tokenizer""\n tags: list[str] = field(default_factory=lambda: [""tokenizer""])\n log_interval: int = 5\n log_image_interval: int = 250\n ckpt_dir: str = """"\n log_checkpoint_interval: int = 10000\n log_checkpoint_keep_period: int = 20000\n log_gradients: bool = False\n val_data_dir: str = """"\n val_interval: int = 20_000\n val_steps: int = 50\n wandb_id: str = """"\n use_flash_attention: bool = True\n\n\ndef build_model(args: Args, rng: jax.Array) -> tuple[TokenizerVQVAE, jax.Array]:\n rng, _rng = jax.random.split(rng)\n rngs = nnx.Rngs(_rng)\n return (\n TokenizerVQVAE(\n in_dim=args.image_channels,\n model_dim=args.model_dim,\n ffn_dim=args.ffn_dim,\n latent_dim=args.latent_dim,\n num_latents=args.num_latents,\n patch_size=args.patch_size,\n num_blocks=args.num_blocks,\n num_heads=args.num_heads,\n dropout=args.dropout,\n codebook_dropout=args.codebook_dropout,\n param_dtype=args.param_dtype,\n dtype=args.dtype,\n use_flash_attention=args.use_flash_attention,\n rngs=rngs,\n ),\n rng,\n )\n\n\ndef build_optimizer(\n model: TokenizerVQVAE, args: Args\n) -> tuple[nnx.Optimizer, optax.Schedule]:\n lr_schedule = get_lr_schedule(\n args.lr_schedule,\n args.init_lr,\n args.max_lr,\n args.decay_end,\n args.num_steps,\n args.warmup_steps,\n args.wsd_decay_steps,\n )\n tx = optax.adamw(\n learning_rate=lr_schedule,\n b1=0.9,\n b2=0.9,\n weight_decay=1e-4,\n mu_dtype=args.param_dtype, # moments in full precision\n )\n optimizer = nnx.Optimizer(model, tx)\n return optimizer, lr_schedule\n\n\ndef build_mesh_and_sharding(\n num_devices: int,\n) -> tuple[Mesh, NamedSharding, NamedSharding]:\n device_mesh_arr = create_device_mesh((num_devices,))\n mesh = Mesh(devices=device_mesh_arr, axis_names=(""data"",))\n replicated_sharding = NamedSharding(mesh, PartitionSpec())\n videos_sharding = NamedSharding(mesh, PartitionSpec(""data"", None, None, None, None))\n return mesh, replicated_sharding, videos_sharding\n\n\ndef shard_optimizer_states(\n optimizer: nnx.Optimizer, replicated_sharding: NamedSharding\n) -> None:\n model_state = nnx.state(optimizer.model)\n model_sharded_state = jax.lax.with_sharding_constraint(\n model_state, replicated_sharding\n )\n nnx.update(optimizer.model, model_sharded_state)\n optimizer_state = nnx.state(optimizer, nnx.optimizer.OptState)\n optimizer_sharded_state = jax.lax.with_sharding_constraint(\n optimizer_state, replicated_sharding\n )\n nnx.update(optimizer, optimizer_sharded_state)\n\n\ndef build_dataloader(args: Args, data_dir: str) -> grain.DataLoaderIterator:\n image_shape = (args.image_height, args.image_width, args.image_channels)\n array_record_files = [\n os.path.join(data_dir, x)\n for x in os.listdir(data_dir)\n if x.endswith("".array_record"")\n ]\n grain_dataloader = get_dataloader(\n array_record_files,\n args.seq_len,\n # NOTE: We deliberately pass the global batch size\n # The dataloader shards the dataset across all processes\n args.batch_size,\n *image_shape,\n num_workers=8,\n prefetch_buffer_size=1,\n seed=args.seed,\n )\n initial_state = grain_dataloader._create_initial_state()\n grain_iterator = grain.DataLoaderIterator(grain_dataloader, initial_state)\n return grain_iterator\n\n\ndef build_checkpoint_manager(args: Args) -> ocp.CheckpointManager:\n handler_registry = ocp.handlers.DefaultCheckpointHandlerRegistry()\n handler_registry.add(\n ""model_state"", ocp.args.PyTreeSave, ocp.handlers.PyTreeCheckpointHandler\n )\n handler_registry.add(\n ""model_state"", ocp.args.PyTreeRestore, ocp.handlers.PyTreeCheckpointHandler\n )\n handler_registry.add(\n ""train_dataloader_state"",\n grain.checkpoint.CheckpointSave,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n handler_registry.add(\n ""train_dataloader_state"",\n grain.checkpoint.CheckpointRestore,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n if args.val_data_dir:\n handler_registry.add(\n ""val_dataloader_state"",\n grain.checkpoint.CheckpointSave,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n handler_registry.add(\n ""val_dataloader_state"",\n grain.checkpoint.CheckpointRestore,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n checkpoint_options = ocp.CheckpointManagerOptions(\n save_interval_steps=args.log_checkpoint_interval,\n max_to_keep=3,\n keep_period=args.log_checkpoint_keep_period,\n step_format_fixed_length=6,\n cleanup_tmp_directories=True,\n )\n checkpoint_manager = ocp.CheckpointManager(\n args.ckpt_dir,\n options=checkpoint_options,\n handler_registry=handler_registry,\n )\n return checkpoint_manager\n\n\ndef restore_checkpoint_if_needed(\n args: Args,\n checkpoint_manager: ocp.CheckpointManager,\n optimizer: nnx.Optimizer,\n train_iterator: grain.DataLoaderIterator,\n val_iterator: Optional[grain.DataLoaderIterator],\n restore_step: Optional[int] = None,\n) -> tuple[int, nnx.Optimizer, grain.DataLoaderIterator, grain.DataLoaderIterator]:\n step = 0\n if restore_step is None:\n restore_step = checkpoint_manager.latest_step()\n if args.restore_ckpt:\n abstract_optimizer = nnx.eval_shape(lambda: optimizer)\n abstract_optimizer_state = nnx.state(abstract_optimizer)\n if args.val_data_dir:\n restore_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore(abstract_optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointRestore(train_iterator), # type: ignore\n val_dataloader_state=grain.checkpoint.CheckpointRestore(val_iterator), # type: ignore\n )\n else:\n restore_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore(abstract_optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointRestore(train_iterator), # type: ignore\n )\n restored = checkpoint_manager.restore(restore_step, args=restore_args)\n restored_optimizer_state = restored[""model_state""]\n nnx.update(optimizer, restored_optimizer_state)\n train_iterator = restored[""train_dataloader_state""]\n if args.val_data_dir:\n val_iterator = restored[""val_dataloader_state""]\n step = restore_step or 0\n print(f""Restored dataloader and model state from step {step}"")\n return step, optimizer, train_iterator, val_iterator\n\n\ndef main(args: Args) -> None:\n jax.distributed.initialize()\n num_devices = jax.device_count()\n if num_devices == 0:\n raise ValueError(""No JAX devices found."")\n print(f""Running on {num_devices} devices."")\n\n if args.batch_size % num_devices != 0:\n raise ValueError(\n f""Global batch size {args.batch_size} must be divisible by ""\n f""number of devices {num_devices}.""\n )\n\n rng = jax.random.key(args.seed)\n\n # --- Initialize model ---\n tokenizer, rng = build_model(args, rng)\n\n _, params, _ = nnx.split(tokenizer, nnx.Param, ...)\n param_counts = count_parameters_by_component(params)\n\n if args.log and jax.process_index() == 0:\n wandb_init_kwargs = {\n ""entity"": args.entity,\n ""project"": args.project,\n ""name"": args.name,\n ""tags"": args.tags,\n ""group"": ""debug"",\n ""config"": args,\n }\n\n if args.wandb_id:\n wandb_init_kwargs.update(\n {\n ""id"": args.wandb_id,\n ""resume"": ""allow"",\n }\n )\n wandb.init(**wandb_init_kwargs)\n\n wandb.config.update({""model_param_count"": param_counts})\n\n print(""Parameter counts:"")\n print(param_counts)\n\n # --- Initialize optimizer ---\n optimizer, lr_schedule = build_optimizer(tokenizer, args)\n del tokenizer\n\n # FIXME: switch to create_hybrid_device_mesh for runs spanning multiple nodes\n _, replicated_sharding, videos_sharding = build_mesh_and_sharding(num_devices)\n\n shard_optimizer_states(optimizer, replicated_sharding)\n\n # --- Initialize checkpoint manager ---\n checkpoint_manager = build_checkpoint_manager(args)\n\n # --- Create DataLoaderIterator from dataloader ---\n train_iterator = build_dataloader(args, args.data_dir)\n val_iterator = None\n if args.val_data_dir:\n val_iterator = build_dataloader(args, args.val_data_dir)\n\n # --- Restore checkpoint ---\n step, optimizer, train_iterator, val_iterator = restore_checkpoint_if_needed(\n args, checkpoint_manager, optimizer, train_iterator, val_iterator\n )\n\n # --- Define loss and train step (close over args) ---\n def tokenizer_loss_fn(\n model: TokenizerVQVAE, inputs: dict, training: bool = False\n ) -> tuple[jax.Array, tuple[jax.Array, dict]]:\n gt = jnp.asarray(inputs[""videos""], dtype=jnp.float32) / 255.0\n inputs[""videos""] = gt.astype(args.dtype)\n outputs = model(inputs, training=training)\n outputs[""recon""] = outputs[""recon""].astype(jnp.float32)\n mse = jnp.square(gt - outputs[""recon""]).mean()\n q_loss = jnp.square(jax.lax.stop_gradient(outputs[""emb""]) - outputs[""z""]).mean()\n commitment_loss = jnp.square(\n outputs[""emb""] - jax.lax.stop_gradient(outputs[""z""])\n ).mean()\n loss = mse + q_loss + args.vq_beta * commitment_loss\n\n gt_clipped = gt.clip(0, 1).reshape(-1, *gt.shape[2:])\n recon = outputs[""recon""].clip(0, 1).reshape(-1, *outputs[""recon""].shape[2:])\n psnr = jnp.asarray(pix.psnr(gt_clipped, recon)).mean()\n ssim = jnp.asarray(pix.ssim(gt_clipped, recon)).mean()\n _, index_counts = jnp.unique_counts(\n jnp.ravel(outputs[""indices""]), size=args.num_latents, fill_value=0\n )\n codebook_usage = (index_counts != 0).mean()\n metrics = dict(\n loss=loss,\n mse=mse,\n q_loss=q_loss,\n commitment_loss=commitment_loss,\n psnr=psnr,\n ssim=ssim,\n codebook_usage=codebook_usage,\n )\n return loss, (outputs[""recon""], metrics)\n\n @nnx.jit(donate_argnums=0)\n def train_step(\n optimizer: nnx.Optimizer, inputs: dict\n ) -> tuple[jax.Array, jax.Array, dict]:\n def loss_fn(model: TokenizerVQVAE) -> tuple[jax.Array, tuple[jax.Array, dict]]:\n model.train()\n return tokenizer_loss_fn(model, inputs, training=True)\n\n (loss, (recon, metrics)), grads = nnx.value_and_grad(loss_fn, has_aux=True)(\n optimizer.model\n )\n optimizer.update(grads)\n if args.log_gradients:\n metrics[""encoder_gradients_std/""] = jax.tree.map(\n lambda x: x.std(), grads[""params""][""encoder""]\n )\n metrics[""vq_gradients_std/""] = jax.tree.map(\n lambda x: x.std(), grads[""params""][""vq""]\n )\n metrics[""decoder_gradients_std/""] = jax.tree.map(\n lambda x: x.std(), grads[""params""][""decoder""]\n )\n return loss, recon, metrics\n\n @nnx.jit\n def val_step(\n tokenizer: TokenizerVQVAE, inputs: dict\n ) -> tuple[jax.Array, jax.Array, dict]:\n tokenizer.eval()\n (loss, (recon, metrics)) = tokenizer_loss_fn(tokenizer, inputs, training=False)\n return loss, recon, metrics\n\n def calculate_validation_metrics(val_dataloader, tokenizer):\n step = 0\n loss_per_step = []\n metrics_per_step = []\n batch = None\n recon = None\n for batch in val_dataloader:\n loss, recon, metrics = val_step(tokenizer, batch)\n loss_per_step.append(loss)\n metrics_per_step.append(metrics)\n step += 1\n if step > args.val_steps:\n break\n\n if step < args.val_steps:\n print(\n f""Warning: Your validation dataset is too small to make val_steps many steps. Made {step} steps, expected {args.val_steps}""\n )\n\n val_loss = np.mean(loss_per_step)\n val_metrics = {\n f""val_{key}"": np.mean([float(m[key]) for m in metrics_per_step])\n for key in metrics_per_step[0].keys()\n }\n val_metrics[""val_loss""] = val_loss\n return val_metrics, batch, recon\n\n # --- TRAIN LOOP ---\n dataloader_train = (\n {\n ""videos"": jax.make_array_from_process_local_data(\n videos_sharding, elem[""videos""]\n ),\n }\n for elem in train_iterator\n )\n dataloader_val = None\n if val_iterator:\n dataloader_val = (\n {\n ""videos"": jax.make_array_from_process_local_data(\n videos_sharding, elem[""videos""]\n ),\n }\n for elem in val_iterator\n )\n if jax.process_index() == 0:\n first_batch = next(dataloader_train)\n compiled = train_step.lower(optimizer, first_batch).compile()\n print_compiled_memory_stats(compiled.memory_analysis())\n print_compiled_cost_analysis(compiled.cost_analysis())\n # Do not skip the first batch during training\n dataloader_train = itertools.chain([first_batch], dataloader_train)\n print(f""Starting training from step {step}..."")\n first_step = step\n while step < args.num_steps:\n for batch in dataloader_train:\n # --- Train step ---\n loss, recon, metrics = train_step(optimizer, batch)\n if step == first_step:\n print_mem_stats(""After params initialized"")\n metrics[""lr""] = lr_schedule(step)\n print(f""Step {step}, loss: {loss}"")\n step += 1\n\n # --- Validation loss ---\n val_results = {}\n if dataloader_val and step % args.val_interval == 0:\n print(""Calculating validation metrics..."")\n val_metrics, val_gt_batch, val_recon = calculate_validation_metrics(\n dataloader_val, optimizer.model\n )\n print(f""Step {step}, validation loss: {val_metrics['val_loss']}"")\n val_results = {\n ""metrics"": val_metrics,\n ""gt_batch"": val_gt_batch,\n ""recon"": val_recon,\n }\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {""loss"": loss, ""step"": step, **metrics}\n if val_results:\n log_dict.update(val_results[""metrics""])\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if args.val_data_dir and step % args.val_interval == 0:\n val_results[""gt_seq_val""] = (\n val_results[""gt_batch""][""videos""][0].astype(jnp.float32)\n / 255.0\n )\n val_results[""recon_seq_val""] = val_results[""recon""][0].clip(\n 0, 1\n )\n val_results[""val_comparison_seq""] = jnp.concatenate(\n (val_results[""gt_seq_val""], val_results[""recon_seq_val""]),\n axis=1,\n )\n val_results[""val_comparison_seq""] = einops.rearrange(\n val_results[""val_comparison_seq""] * 255,\n ""t h w c -> h (t w) c"",\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if args.val_data_dir and step % args.val_interval == 0:\n log_images.update(\n dict(\n val_image=wandb.Image(\n np.asarray(val_results[""gt_seq_val""][0])\n ),\n val_recon=wandb.Image(\n np.asarray(val_results[""recon_seq_val""][0])\n ),\n val_true_vs_recon=wandb.Image(\n np.asarray(\n val_results[""val_comparison_seq""].astype(\n np.uint8\n )\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n optimizer_state = nnx.state(optimizer)\n if args.val_data_dir:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break\n\n checkpoint_manager.close()\n\n\nif __name__ == ""__main__"":\n args = tyro.cli(Args)\n main(args)\n",python,tab +26,862285,"train_tokenizer.py",0,0,"",python,tab +27,864363,"TERMINAL",0,0,"",,terminal_focus +28,866187,"TERMINAL",0,0,"source /home/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/bin/activate",,terminal_command +29,867678,"TERMINAL",0,0,"git status",,terminal_command +30,867728,"TERMINAL",0,0,"]633;C",,terminal_output +31,867868,"TERMINAL",0,0,"On branch gt-actions\r\nYour branch is up to date with 'origin/gt-actions'.\r\n\r\nLast commands done (2 commands done):\r\n pick ba37453 feat: generate coinrun dataset with val split\r\n pick faadd10 feat: implemented validation loss for all three models\r\nNext commands to do (26 remaining commands):\r\n pick 9a17dbb fix: pass val data path to dataloader\r\n pick 6e69cdb fix typo in image logging\r\n (use ""git rebase --edit-todo"" to view and edit)\r\nYou are currently editing a commit while rebasing branch 'gt-actions' on 'c7522f2'.\r\n (use ""git commit --amend"" to amend the current commit)\r\n (use ""git rebase --continue"" once you are satisfied with your changes)\r\n\r\nUntracked files:\r\n (use ""git add ..."" to include in what will be committed)\r\n\tdiff.diff\r\n\tinput_pipeline/generate_breakout_dataset_agent.py\r\n\tkiller.sh\r\n\tkiller_partition.sh\r\n\tlog.log\r\n\toverfit_dir.zip\r\n\trequirements-franz.txt\r\n\tsamples/\r\n\tscripts_cremers/\r\n\tslurm/\r\n\ttest.py\r\n\tutils/visualizer.py\r\n\r\nnothing added to commit but untracked files present (use ""git add"" to track)\r\n]0;tum_cte0515@hkn1990:~/Projects/jasmine",,terminal_output +32,923976,"TERMINAL",0,0,"git diff",,terminal_command +33,924023,"TERMINAL",0,0,"]633;C[?1h=\r\r[?1l>]0;tum_cte0515@hkn1990:~/Projects/jasmine",,terminal_output +34,928849,"slurm/jobs/mihir/horeka/preprocessing/breakout_chunked.sh",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=1\n#SBATCH --time=10:00:00\n#SBATCH --partition=large\n#SBATCH --cpus-per-task=16\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/preprocess/breakout/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/preprocess/breakout/%x_%j.log\n#SBATCH --job-name=preprocess_breakout_chunked\n\nsource .venv/bin/activate\n\npython input_pipeline/generate_breakout_dataset.py \\n --output_dir /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_breakout/breakout_episodes_10m_gt_actions_split_2",shellscript,tab +35,935335,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu.sh",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=4\n#SBATCH --time=48:00:00\n#SBATCH --partition=accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:1\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/breakout/dyn/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/breakout/dyn/%x_%j.log\n#SBATCH --job-name=train_dyn_default_breakout_longer\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_breakout/breakout_episodes_10m_gt_actions_split/train\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_breakout/breakout_episodes_10m_gt_actions_split/val\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\nlam_checkpoint=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/lam/interactive/3502552\ntokenizer_checkpoint=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/tokenizer/interactive/3502552\n\nenv | grep SLURM\n\nexport PYTHONUNBUFFERED=1\n\nsrun python train_dynamics.py \\n --save_ckpt \\n --image_height=10 \\n --image_width=10 \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=120 \\n --init_lr=0 \\n --max_lr=3e-6 \\n --log_image_interval=250 \\n --log_checkpoint_interval=250 \\n --dyna_type=maskgit \\n --log \\n --name=breakout-dyn-default-$slurm_job_id \\n --tags dyn breakout default \\n --entity instant-uv \\n --project jafar \\n --patch_size 4 \\n --lam_patch_size 4 \\n --warmup_steps 500 \\n --wsd_decay_steps 2000 \\n --num_steps 10000 \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val \\n --tokenizer_checkpoint $tokenizer_checkpoint \\n --lam_checkpoint $lam_checkpoint \\n --val_interval 250 \\n --eval_full_frame \\n",shellscript,tab +36,939401,"TERMINAL",0,0,"bash",,terminal_focus +37,939403,"train_tokenizer.py",0,0,"",python,tab +38,944178,"train_lam.py",0,0,"",python,tab +39,944619,"slurm/jobs/mihir/horeka/breakout/default_runs/train_lam_default.sh",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=4\n#SBATCH --time=48:00:00\n#SBATCH --partition=accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:1\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/breakout/lam/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/breakout/lam/%x_%j.log\n#SBATCH --job-name=train_lam_default_breakout_long\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_breakout/breakout_episodes_10m_gt_actions_split/train\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_breakout/breakout_episodes_10m_gt_actions_split/val\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/lam/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\nenv | grep SLURM\n\nexport PYTHONUNBUFFERED=1\n\nsrun python train_lam.py \\n --save_ckpt \\n --image_height=10 \\n --image_width=10 \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=120 \\n --patch_size=4 \\n --log \\n --name=breakout-lam-default-$slurm_job_id \\n --tags lam breakout default \\n --entity instant-uv \\n --project jafar \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val \\n --num_steps 3500 \\n --max_lr 3e-5 \\n --warmup_steps 100 \\n --wsd_decay_steps 1000 \\n --log_image_interval 250 \\n --log_checkpoint_interval 250 \\n --log_checkpoint_keep_period 250 \\n --val_interval 250 \",shellscript,tab +40,945527,"slurm/jobs/mihir/horeka/breakout/default_runs/train_tokenizer_default.sh",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=4\n#SBATCH --time=48:00:00\n#SBATCH --partition=accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:1\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/breakout/tokenizer/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/breakout/tokenizer/%x_%j.log\n#SBATCH --job-name=train_tokenizer_default_breakout_big_TS_long\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_breakout/breakout_episodes_10m_gt_actions_split/train\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_breakout/breakout_episodes_10m_gt_actions_split/val\n\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/tokenizer/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\nenv | grep SLURM\n\nexport PYTHONUNBUFFERED=1\n\nsrun python train_tokenizer.py \\n --save_ckpt \\n --image_height=10 \\n --image_width=10 \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=120 \\n --init_lr=0 \\n --patch_size=4 \\n --log \\n --name=breakout-tokenizer-default-$slurm_job_id \\n --tags tokenizer breakout default \\n --entity instant-uv \\n --project jafar \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val \\n --num_steps 3500 \\n --max_lr 3e-5 \\n --warmup_steps 100 \\n --wsd_decay_steps 1000 \\n --log_image_interval 250 \\n --log_checkpoint_interval 250 \\n --log_checkpoint_keep_period 250 \\n --val_interval 250 \\n #--num_latents 16 \\n",shellscript,tab +41,947861,"TERMINAL",0,0,"",,terminal_focus +42,949692,"TERMINAL",0,0,"source /home/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/bin/activate",,terminal_command +43,1049396,"input_pipeline/generate_coinrun_dataset.py",0,0,"""""""\nGenerates a dataset of random-action CoinRun episodes.\nEpisodes are saved individually as memory-mapped files for efficient loading.\n""""""\n\nfrom dataclasses import dataclass\n\nfrom gym3 import types_np\nimport numpy as np\nfrom procgen import ProcgenGym3Env\nimport tyro\nimport json\nimport os\nfrom utils import save_chunks\n\n\n@dataclass\nclass Args:\n num_episodes_train: int = 10000\n num_episodes_val: int = 500\n num_episodes_test: int = 500\n output_dir: str = ""data/coinrun_episodes""\n min_episode_length: int = 1000\n max_episode_length: int = 1000\n chunk_size: int = 160\n chunks_per_file: int = 100\n seed: int = 0\n\n\nargs = tyro.cli(Args)\nassert (\n args.max_episode_length >= args.min_episode_length\n), ""Maximum episode length must be greater than or equal to minimum episode length.""\n\nif args.min_episode_length < args.chunk_size:\n print(\n ""Warning: Minimum episode length is smaller than chunk size. Note that episodes shorter than the chunk size will be discarded.""\n )\n\n\n# --- Generate episodes ---\ndef generate_episodes(num_episodes, split):\n episode_idx = 0\n episode_metadata = []\n obs_chunks = []\n act_chunks = []\n file_idx = 0\n output_dir_split = os.path.join(args.output_dir, split)\n while episode_idx < num_episodes:\n seed = np.random.randint(0, 10000)\n env = ProcgenGym3Env(num=1, env_name=""coinrun"", start_level=seed)\n\n observations_seq = []\n actions_seq = []\n episode_obs_chunks = []\n episode_act_chunks = []\n\n # --- Run episode ---\n step_t = 0\n for step_t in range(args.max_episode_length):\n action = types_np.sample(env.ac_space, bshape=(env.num,))\n env.act(action)\n _, obs, first = env.observe()\n observations_seq.append(obs[""rgb""])\n actions_seq.append(action)\n if len(observations_seq) == args.chunk_size:\n episode_obs_chunks.append(observations_seq)\n episode_act_chunks.append(actions_seq)\n observations_seq = []\n actions_seq = []\n if first:\n break\n\n # --- Save episode ---\n if step_t + 1 >= args.min_episode_length:\n if observations_seq:\n if len(observations_seq) < args.chunk_size:\n print(\n f""Warning: Inconsistent chunk_sizes. Episode has {len(observations_seq)} frames, ""\n f""which is smaller than the requested chunk_size: {args.chunk_size}. ""\n ""This might lead to performance degradation during training.""\n )\n episode_obs_chunks.append(observations_seq)\n episode_act_chunks.append(actions_seq)\n\n obs_chunks_data = [\n np.concatenate(seq, axis=0).astype(np.uint8)\n for seq in episode_obs_chunks\n ]\n act_chunks_data = [\n np.concatenate(act, axis=0) for act in episode_act_chunks\n ]\n obs_chunks.extend(obs_chunks_data)\n act_chunks.extend(act_chunks_data)\n\n ep_metadata, obs_chunks, file_idx, act_chunks = save_chunks(\n obs_chunks, file_idx, args.chunks_per_file, output_dir_split, act_chunks\n )\n episode_metadata.extend(ep_metadata)\n\n print(f""Episode {episode_idx} completed, length: {step_t + 1}."")\n episode_idx += 1\n else:\n print(f""Episode too short ({step_t + 1}), resampling..."")\n\n if len(obs_chunks) > 0:\n print(\n f""Warning: Dropping {len(obs_chunks)} chunks for consistent number of chunks per file."",\n ""Consider changing the chunk_size and chunks_per_file parameters to prevent data-loss."",\n )\n\n print(f""Done generating {split} split"")\n return episode_metadata\n\n\ndef get_action_space():\n env = ProcgenGym3Env(num=1, env_name=""coinrun"", start_level=0)\n return env.ac_space.eltype.n\n\n\ndef main():\n # Set random seed and create dataset directories\n np.random.seed(args.seed)\n # --- Generate episodes ---\n train_episode_metadata = generate_episodes(args.num_episodes_train, ""train"")\n val_episode_metadata = generate_episodes(args.num_episodes_val, ""val"")\n test_episode_metadata = generate_episodes(args.num_episodes_test, ""test"")\n\n # --- Save metadata ---\n metadata = {\n ""env"": ""coinrun"",\n ""num_actions"": get_action_space(),\n ""num_episodes_train"": args.num_episodes_train,\n ""num_episodes_val"": args.num_episodes_val,\n ""num_episodes_test"": args.num_episodes_test,\n ""avg_episode_len_train"": np.mean(\n [ep[""avg_seq_len""] for ep in train_episode_metadata]\n ),\n ""avg_episode_len_val"": np.mean(\n [ep[""avg_seq_len""] for ep in val_episode_metadata]\n ),\n ""avg_episode_len_test"": np.mean(\n [ep[""avg_seq_len""] for ep in test_episode_metadata]\n ),\n ""episode_metadata_train"": train_episode_metadata,\n ""episode_metadata_val"": val_episode_metadata,\n ""episode_metadata_test"": test_episode_metadata,\n }\n with open(os.path.join(args.output_dir, ""metadata.json""), ""w"") as f:\n json.dump(metadata, f)\n\n print(f""Done generating dataset."")\n\n\nif __name__ == ""__main__"":\n main()\n",python,tab +44,1050022,"input_pipeline/generate_coinrun_dataset.py",5074,0,"",python,selection_mouse +45,1052414,"input_pipeline/generate_coinrun_dataset.py",5195,1,"r",python,selection_command +46,1052503,"input_pipeline/generate_coinrun_dataset.py",143,2,"ro",python,selection_command +47,1054298,"input_pipeline/generate_coinrun_dataset.py",5089,1,"i",python,selection_command +48,1054405,"input_pipeline/generate_coinrun_dataset.py",5191,2,"in",python,selection_command +49,1054747,"input_pipeline/generate_coinrun_dataset.py",5278,3,"int",python,selection_command +50,1064498,"input_pipeline/pngs_to_array_records.py",0,0,"import os\nimport numpy as np\nfrom PIL import Image\nimport tyro\nfrom dataclasses import dataclass\nimport json\nimport multiprocessing as mp\nfrom utils import save_chunks\n\n\n@dataclass\nclass Args:\n input_path: str\n output_path: str\n env_name: str\n train_ratio: float = 0.8\n val_ratio: float = 0.1\n test_ratio: float = 0.1\n multigame: bool = False\n original_fps: int = 60\n target_fps: int = 10\n target_width: int = 64\n chunk_size: int = 160\n chunks_per_file: int = 100\n\n\ndef preprocess_pngs(input_dir, original_fps, target_fps, chunk_size, target_width):\n print(f""Processing PNGs in {input_dir}"")\n try:\n png_files = sorted(\n [f for f in os.listdir(input_dir) if f.lower().endswith("".png"")],\n key=lambda x: int(os.path.splitext(x)[0]),\n )\n\n if not png_files:\n print(f""No PNG files found in {input_dir}"")\n return []\n\n # Downsample indices\n n_total = len(png_files)\n if original_fps == target_fps:\n selected_indices = np.arange(n_total)\n else:\n n_target = int(np.floor(n_total * target_fps / original_fps))\n selected_indices = np.linspace(0, n_total - 1, n_target, dtype=int)\n\n selected_files = [png_files[i] for i in selected_indices]\n\n # Load images\n chunks = []\n frames = []\n for fname in selected_files:\n img = Image.open(os.path.join(input_dir, fname)).convert(""RGB"")\n w, h = img.size # PIL gives (width, height)\n if w != target_width:\n target_height = int(round(h * (target_width / float(w))))\n resample_filter = Image.LANCZOS\n img = img.resize(\n (target_width, target_height), resample=resample_filter\n )\n frames.append(np.array(img))\n if len(frames) == chunk_size:\n chunks.append(frames)\n frames = []\n\n if len(frames) < chunk_size:\n print(\n f""Warning: Inconsistent chunk_sizes. Episode has {len(frames)} frames, ""\n f""which is smaller than the requested chunk_size: {chunk_size}. ""\n ""This might lead to performance degradation during training.""\n )\n chunks.append(frames)\n chunks = [np.stack(chunk, axis=0) for chunk in chunks]\n\n return chunks\n except Exception as e:\n print(f""Error processing {input_dir}: {e}"")\n return []\n\n\ndef save_split(pool_args, chunks_per_file, output_path):\n num_processes = mp.cpu_count()\n print(f""Number of processes: {num_processes}"")\n chunks = []\n file_idx = 0\n results = []\n for bucket_idx in range(0, len(pool_args), num_processes):\n args_batch = pool_args[bucket_idx : bucket_idx + num_processes]\n with mp.Pool(processes=num_processes) as pool:\n for episode_chunks in pool.starmap(preprocess_pngs, args_batch):\n chunks.extend(episode_chunks)\n results_batch, chunks, file_idx, _ = save_chunks(\n chunks, file_idx, chunks_per_file, output_path\n )\n results.extend(results_batch)\n\n if len(chunks) > 0:\n print(\n f""Warning: Dropping {len(chunks)} chunks for consistent number of chunks per file."",\n ""Consider changing the chunk_size and chunks_per_file parameters to prevent data-loss."",\n )\n\n print(f""Done processing files. Saved to {output_path}"")\n return results\n\n\ndef main():\n args = tyro.cli(Args)\n print(f""Output path: {args.output_path}"")\n total_ratio = args.train_ratio + args.val_ratio + args.test_ratio\n assert np.isclose(total_ratio, 1.0), ""Ratios must sum to 1.0""\n\n directories = [\n os.path.join(args.input_path, d)\n for d in os.listdir(args.input_path)\n if os.path.isdir(os.path.join(args.input_path, d))\n ]\n if args.multigame:\n episodes = [\n os.path.join(game, d) for game in directories for d in os.listdir(game)\n ]\n else:\n episodes = directories\n\n n_total = sum([len(os.listdir(episode)) for episode in episodes])\n n_train = int(n_total * args.train_ratio)\n n_val = int(n_total * args.val_ratio)\n\n pool_args_train = []\n pool_args_val = []\n pool_args_test = []\n\n train_counter = 0\n val_counter = 0\n np.random.shuffle(episodes)\n for episode in episodes:\n pool_arg = (\n episode,\n args.original_fps,\n args.target_fps,\n args.chunk_size,\n args.target_width,\n )\n n_frames = len(os.listdir(episode))\n if train_counter < n_train:\n pool_args_train.append(pool_arg)\n train_counter += n_frames\n elif val_counter < n_val:\n pool_args_val.append(pool_arg)\n val_counter += n_frames\n else:\n pool_args_test.append(pool_arg)\n\n train_episode_metadata = save_split(\n pool_args_train, args.chunks_per_file, os.path.join(args.output_path, ""train"")\n )\n val_episode_metadata = save_split(\n pool_args_val, args.chunks_per_file, os.path.join(args.output_path, ""val"")\n )\n test_episode_metadata = save_split(\n pool_args_test, args.chunks_per_file, os.path.join(args.output_path, ""test"")\n )\n\n # Calculate total number of chunks\n total_chunks = sum(\n ep[""num_chunks""]\n for ep in train_episode_metadata + val_episode_metadata + test_episode_metadata\n )\n\n print(""Done converting png to array_record files"")\n\n print(f""Total number of chunks: {total_chunks}"")\n\n metadata = {\n ""env"": args.env_name,\n ""total_chunks"": total_chunks,\n ""avg_episode_len_train"": np.mean(\n [ep[""avg_seq_len""] for ep in train_episode_metadata]\n ),\n ""avg_episode_len_val"": np.mean(\n [ep[""avg_seq_len""] for ep in val_episode_metadata]\n ),\n ""avg_episode_len_test"": np.mean(\n [ep[""avg_seq_len""] for ep in test_episode_metadata]\n ),\n ""episode_metadata_train"": train_episode_metadata,\n ""episode_metadata_val"": val_episode_metadata,\n ""episode_metadata_test"": test_episode_metadata,\n }\n\n with open(os.path.join(args.output_path, ""metadata.json""), ""w"") as f:\n json.dump(metadata, f)\n\n print(""Done."")\n\n\nif __name__ == ""__main__"":\n main()\n",python,tab +51,1068038,"input_pipeline/pngs_to_array_records.py",3210,3,"int",python,selection_command +52,1068736,"input_pipeline/pngs_to_array_records.py",3430,3,"int",python,selection_command +53,1069470,"input_pipeline/pngs_to_array_records.py",3549,3,"int",python,selection_command +54,1069626,"input_pipeline/pngs_to_array_records.py",4161,3,"int",python,selection_command +55,1072269,"input_pipeline/pngs_to_array_records.py",4207,0,"",python,selection_mouse +56,1073082,"input_pipeline/pngs_to_array_records.py",4161,0,"",python,selection_command +57,1073738,"input_pipeline/pngs_to_array_records.py",4161,3,"",python,content +58,1074333,"input_pipeline/pngs_to_array_records.py",4161,0,"r",python,content +59,1074334,"input_pipeline/pngs_to_array_records.py",4162,0,"",python,selection_keyboard +60,1074408,"input_pipeline/pngs_to_array_records.py",4162,0,"o",python,content +61,1074411,"input_pipeline/pngs_to_array_records.py",4163,0,"",python,selection_keyboard +62,1074583,"input_pipeline/pngs_to_array_records.py",4163,0,"u",python,content +63,1074585,"input_pipeline/pngs_to_array_records.py",4164,0,"",python,selection_keyboard +64,1074694,"input_pipeline/pngs_to_array_records.py",4164,0,"n",python,content +65,1074696,"input_pipeline/pngs_to_array_records.py",4165,0,"",python,selection_keyboard +66,1074702,"input_pipeline/pngs_to_array_records.py",4165,0,"t",python,content +67,1074704,"input_pipeline/pngs_to_array_records.py",4166,0,"",python,selection_keyboard +68,1075954,"input_pipeline/pngs_to_array_records.py",4096,0,"",python,selection_command +69,1076354,"input_pipeline/pngs_to_array_records.py",4166,0,"",python,selection_command +70,1076514,"input_pipeline/pngs_to_array_records.py",4165,1,"",python,content +71,1076588,"input_pipeline/pngs_to_array_records.py",4165,0,"d",python,content +72,1076590,"input_pipeline/pngs_to_array_records.py",4166,0,"",python,selection_keyboard +73,1077719,"input_pipeline/pngs_to_array_records.py",4214,0,"",python,selection_command +74,1078058,"input_pipeline/pngs_to_array_records.py",4213,0,"",python,selection_command +75,1078759,"input_pipeline/pngs_to_array_records.py",4210,0,"",python,selection_command +76,1079135,"input_pipeline/pngs_to_array_records.py",4209,1,"",python,content +77,1079372,"input_pipeline/pngs_to_array_records.py",4208,1,"",python,content +78,1079533,"input_pipeline/pngs_to_array_records.py",4207,1,"",python,content +79,1079870,"input_pipeline/pngs_to_array_records.py",4207,0,"r",python,content +80,1079871,"input_pipeline/pngs_to_array_records.py",4208,0,"",python,selection_keyboard +81,1080005,"input_pipeline/pngs_to_array_records.py",4208,0,"o",python,content +82,1080006,"input_pipeline/pngs_to_array_records.py",4209,0,"",python,selection_keyboard +83,1080219,"input_pipeline/pngs_to_array_records.py",4209,0,"u",python,content +84,1080220,"input_pipeline/pngs_to_array_records.py",4210,0,"",python,selection_keyboard +85,1080293,"input_pipeline/pngs_to_array_records.py",4210,0,"n",python,content +86,1080294,"input_pipeline/pngs_to_array_records.py",4211,0,"",python,selection_keyboard +87,1080362,"input_pipeline/pngs_to_array_records.py",4211,0,"d",python,content +88,1080364,"input_pipeline/pngs_to_array_records.py",4212,0,"",python,selection_keyboard +89,1082701,"input_pipeline/pngs_to_array_records.py",381,4," int",python,selection_command +90,1084902,"input_pipeline/generate_coinrun_dataset.py",0,0,"",python,tab +91,1087096,"input_pipeline/generate_coinrun_dataset.py",369,4," int",python,selection_command +92,1087912,"input_pipeline/generate_coinrun_dataset.py",403,4," int",python,selection_command +93,1088078,"input_pipeline/generate_coinrun_dataset.py",436,4," int",python,selection_command +94,1088230,"input_pipeline/generate_coinrun_dataset.py",516,4," int",python,selection_command +95,1088396,"input_pipeline/generate_coinrun_dataset.py",551,4," int",python,selection_command +96,1088508,"input_pipeline/generate_coinrun_dataset.py",578,4," int",python,selection_command +97,1088734,"input_pipeline/generate_coinrun_dataset.py",609,4," int",python,selection_command +98,1089026,"input_pipeline/generate_coinrun_dataset.py",629,4," int",python,selection_command +99,1089322,"input_pipeline/generate_coinrun_dataset.py",369,4," int",python,selection_command +100,1091375,"input_pipeline/pngs_to_array_records.py",0,0,"",python,tab +101,1092983,"input_pipeline/pngs_to_array_records.py",406,4," int",python,selection_command +102,1093319,"input_pipeline/pngs_to_array_records.py",433,4," int",python,selection_command +103,1093472,"input_pipeline/pngs_to_array_records.py",458,4," int",python,selection_command +104,1093628,"input_pipeline/pngs_to_array_records.py",489,4," int",python,selection_command +105,1093976,"input_pipeline/pngs_to_array_records.py",771,4," int",python,selection_command +106,1094335,"input_pipeline/pngs_to_array_records.py",1104,4," int",python,selection_command +107,1096969,"input_pipeline/pngs_to_array_records.py",1601,4," int",python,selection_command +108,1098533,"input_pipeline/pngs_to_array_records.py",381,4," int",python,selection_command +109,1099490,"input_pipeline/pngs_to_array_records.py",406,4," int",python,selection_command +110,1099663,"input_pipeline/pngs_to_array_records.py",433,4," int",python,selection_command +111,1099827,"input_pipeline/pngs_to_array_records.py",458,4," int",python,selection_command +112,1099947,"input_pipeline/pngs_to_array_records.py",489,4," int",python,selection_command +113,1100265,"input_pipeline/pngs_to_array_records.py",771,4," int",python,selection_command +114,1106444,"input_pipeline/video_to_array_records.py",0,0,"import ffmpeg\nimport numpy as np\nimport os\nimport tyro\nimport multiprocessing as mp\nfrom dataclasses import dataclass\nimport json\nimport pickle\nfrom array_record.python.array_record_module import ArrayRecordWriter\n\n""""""\nThis file processes video files by converting them into array records.\nIt splits videos into chunks of a specified size and saves them in a specified output folder.\nThe script uses multiprocessing to handle multiple videos concurrently and generates metadata for the processed videos.\n""""""\n\n\n@dataclass\nclass Args:\n input_path: str\n output_path: str\n env_name: str\n train_ratio: float = 0.8\n val_ratio: float = 0.1\n test_ratio: float = 0.1\n target_width: int = 160\n target_height: int = 90\n target_fps: int = 10\n chunk_size: int = 160\n chunks_per_file: int = 100\n\n\ndef _chunk_and_save_video(\n video_tensor,\n video_file_name: str,\n output_folder: str,\n chunk_size: int,\n chunks_per_file: int,\n file_index: int,\n) -> list[str]:\n """"""\n Reprocess a single ArrayRecord file by splitting videos into chunks.\n\n Args:\n video_file_name: Name of the video file\n output_folder: Output folder for the chunked files\n chunk_size: Number of frames per video chunk\n chunks_per_file: Number of video chunks per output file\n file_index: Index for naming output files\n\n Returns:\n List of paths to created ArrayRecord files\n """"""\n file_chunks = []\n\n current_episode_len = video_tensor.shape[0]\n if current_episode_len < chunk_size:\n print(\n f""Warning: Video has {current_episode_len} frames, skipping (need {chunk_size})""\n )\n return [{""path"": """", ""length"": 0, ""video_file_name"": video_file_name}]\n\n for start_idx in range(0, current_episode_len - chunk_size + 1, chunk_size):\n chunk = video_tensor[start_idx : start_idx + chunk_size]\n\n chunk_record = {\n ""raw_video"": chunk.tobytes(),\n ""sequence_length"": chunk_size,\n ""video_file_name"": video_file_name,\n }\n\n file_chunks.append(chunk_record)\n\n # Write chunks to output files\n output_files = []\n for i in range(0, len(file_chunks), chunks_per_file):\n batch_chunks = file_chunks[i : i + chunks_per_file]\n output_filename = (\n f""chunked_videos_{file_index:04d}_{i//chunks_per_file:04d}.array_record""\n )\n output_file = os.path.join(output_folder, output_filename)\n\n writer = ArrayRecordWriter(output_file, ""group_size:1"")\n for chunk in batch_chunks:\n writer.write(pickle.dumps(chunk))\n writer.close()\n\n output_files.append(\n {\n ""path"": output_file,\n ""length"": chunk_size,\n ""video_file_name"": video_file_name,\n }\n )\n print(f""Created {output_filename} with {len(batch_chunks)} video chunks"")\n\n print(\n f""Processed {video_file_name}: {len(file_chunks)} chunks -> {len(output_files)} files""\n )\n return output_files\n\n\ndef preprocess_video(\n idx,\n in_filename,\n output_path,\n target_width,\n target_height,\n target_fps,\n chunk_size,\n chunks_per_file,\n):\n """"""\n Preprocess a video file by reading it, resizing, changing its frame rate,\n and then chunking it into smaller segments to be saved as ArrayRecord files.\n\n Args:\n idx (int): Index of the video being processed.\n in_filename (str): Path to the input video file.\n output_path (str): Directory where the output ArrayRecord files will be saved.\n target_width (int): The target width for resizing the video frames.\n target_height (int): The target height for resizing the video frames.\n target_fps (int): The target frames per second for the output video.\n chunk_size (int): Number of frames per chunk.\n chunks_per_file (int): Number of chunks to be saved in each ArrayRecord file.\n\n Returns:\n list: A list of dictionaries containing metadata about the created ArrayRecord files.\n """"""\n\n print(f""Processing video {idx}, Filename: {in_filename}"")\n try:\n out, _ = (\n ffmpeg.input(in_filename)\n .filter(""fps"", fps=target_fps, round=""up"")\n .filter(""scale"", target_width, target_height)\n .output(""pipe:"", format=""rawvideo"", pix_fmt=""rgb24"")\n .run(capture_stdout=True, quiet=True)\n )\n\n frame_size = target_height * target_width * 3\n n_frames = len(out) // frame_size\n frames = np.frombuffer(out, np.uint8).reshape(\n n_frames, target_height, target_width, 3\n )\n\n result = _chunk_and_save_video(\n video_tensor=frames,\n video_file_name=in_filename,\n output_folder=output_path,\n chunk_size=chunk_size,\n chunks_per_file=chunks_per_file,\n file_index=idx,\n )\n return result\n except Exception as e:\n print(f""Error processing video {idx} ({in_filename}): {e}"")\n return [{""path"": """", ""length"": 0, ""video_file_name"": in_filename}]\n\n\ndef save_split(pool_args):\n num_processes = mp.cpu_count()\n print(f""Number of processes: {num_processes}"")\n results = []\n with mp.Pool(processes=num_processes) as pool:\n for result in pool.starmap(preprocess_video, pool_args):\n results.extend(result)\n return results\n\n\ndef main():\n args = tyro.cli(Args)\n\n print(f""Output path: {args.output_path}"")\n\n total_ratio = args.train_ratio + args.val_ratio + args.test_ratio\n assert np.isclose(total_ratio, 1.0), ""Ratios must sum to 1.0""\n\n print(""Converting video to array_record files..."")\n input_files = [\n os.path.join(args.input_path, in_filename)\n for in_filename in os.listdir(args.input_path)\n if in_filename.endswith("".mp4"") or in_filename.endswith("".webm"")\n ]\n n_total = len(input_files)\n n_train = int(n_total * args.train_ratio)\n n_val = int(n_total * args.val_ratio)\n\n np.random.shuffle(input_files)\n file_splits = {\n ""train"": input_files[:n_train],\n ""val"": input_files[n_train : n_train + n_val],\n ""test"": input_files[n_train + n_val :],\n }\n\n pool_args = dict()\n for split in file_splits.keys():\n pool_args[split] = []\n os.makedirs(os.path.join(args.output_path, split), exist_ok=True)\n for idx, in_filename in enumerate(file_splits[split]):\n pool_args[split].append(\n (\n idx,\n in_filename,\n os.path.join(args.output_path, split),\n args.target_width,\n args.target_height,\n args.target_fps,\n args.chunk_size,\n args.chunks_per_file,\n )\n )\n\n train_episode_metadata = save_split(pool_args[""train""])\n val_episode_metadata = save_split(pool_args[""val""])\n test_episode_metadata = save_split(pool_args[""test""])\n\n print(""Done converting video to array_record files"")\n\n results = train_episode_metadata + val_episode_metadata + test_episode_metadata\n # count the number of short and failed videos\n failed_videos = [result for result in results if result[""length""] == 0]\n num_successful_videos = len(results) - len(failed_videos)\n print(f""Number of failed videos: {len(failed_videos)}"")\n print(f""Number of successful videos: {num_successful_videos}"")\n print(f""Number of total files: {len(input_files)}"")\n print(f""Number of total chunks: {len(results)}"")\n\n metadata = {\n ""env"": args.env_name,\n ""total_chunks"": len(results),\n ""total_videos"": len(input_files),\n ""num_successful_videos"": len(input_files) - len(failed_videos),\n ""num_failed_videos"": len(failed_videos),\n ""avg_episode_len_train"": np.mean(\n [ep[""length""] for ep in train_episode_metadata]\n ),\n ""avg_episode_len_val"": np.mean([ep[""length""] for ep in val_episode_metadata]),\n ""avg_episode_len_test"": np.mean([ep[""length""] for ep in test_episode_metadata]),\n ""episode_metadata_train"": train_episode_metadata,\n ""episode_metadata_val"": val_episode_metadata,\n ""episode_metadata_test"": test_episode_metadata,\n }\n\n with open(os.path.join(args.output_path, ""metadata.json""), ""w"") as f:\n json.dump(metadata, f)\n\n\nif __name__ == ""__main__"":\n main()\n",python,tab +115,1107744,"input_pipeline/video_to_array_records.py",269,4," int",python,selection_command +116,1108276,"input_pipeline/video_to_array_records.py",306,4," int",python,selection_command +117,1108434,"input_pipeline/video_to_array_records.py",693,4," int",python,selection_command +118,1108614,"input_pipeline/video_to_array_records.py",722,4," int",python,selection_command +119,1108811,"input_pipeline/video_to_array_records.py",747,4," int",python,selection_command +120,1108995,"input_pipeline/video_to_array_records.py",772,4," int",python,selection_command +121,1109185,"input_pipeline/video_to_array_records.py",803,4," int",python,selection_command +122,1109368,"input_pipeline/video_to_array_records.py",926,4," int",python,selection_command +123,1109591,"input_pipeline/video_to_array_records.py",952,4," int",python,selection_command +124,1109855,"input_pipeline/video_to_array_records.py",973,4," int",python,selection_command +125,1110171,"input_pipeline/video_to_array_records.py",1062,4," int",python,selection_command +126,1110646,"input_pipeline/video_to_array_records.py",3319,4," int",python,selection_command +127,1111376,"input_pipeline/video_to_array_records.py",5950,4," int",python,selection_command +128,1113980,"input_pipeline/video_to_array_records.py",5953,0,"",python,selection_mouse +129,1114798,"input_pipeline/video_to_array_records.py",5954,0,"",python,selection_command +130,1115304,"input_pipeline/video_to_array_records.py",5951,3,"",python,content +131,1115965,"input_pipeline/video_to_array_records.py",5951,0,"r",python,content +132,1115966,"input_pipeline/video_to_array_records.py",5952,0,"",python,selection_keyboard +133,1116083,"input_pipeline/video_to_array_records.py",5952,0,"o",python,content +134,1116085,"input_pipeline/video_to_array_records.py",5953,0,"",python,selection_keyboard +135,1116289,"input_pipeline/video_to_array_records.py",5953,0,"u",python,content +136,1116291,"input_pipeline/video_to_array_records.py",5954,0,"",python,selection_keyboard +137,1116362,"input_pipeline/video_to_array_records.py",5954,0,"n",python,content +138,1116363,"input_pipeline/video_to_array_records.py",5955,0,"",python,selection_keyboard +139,1116461,"input_pipeline/video_to_array_records.py",5955,0,"d",python,content +140,1116463,"input_pipeline/video_to_array_records.py",5956,0,"",python,selection_keyboard +141,1116932,"input_pipeline/video_to_array_records.py",6004,0,"",python,selection_command +142,1118229,"input_pipeline/video_to_array_records.py",6000,0,"",python,selection_command +143,1118553,"input_pipeline/video_to_array_records.py",5997,3,"",python,content +144,1119113,"input_pipeline/video_to_array_records.py",5997,0,"r",python,content +145,1119114,"input_pipeline/video_to_array_records.py",5998,0,"",python,selection_keyboard +146,1119214,"input_pipeline/video_to_array_records.py",5998,0,"o",python,content +147,1119216,"input_pipeline/video_to_array_records.py",5999,0,"",python,selection_keyboard +148,1119418,"input_pipeline/video_to_array_records.py",5999,0,"u",python,content +149,1119420,"input_pipeline/video_to_array_records.py",6000,0,"",python,selection_keyboard +150,1119485,"input_pipeline/video_to_array_records.py",6000,0,"n",python,content +151,1119487,"input_pipeline/video_to_array_records.py",6001,0,"",python,selection_keyboard +152,1119552,"input_pipeline/video_to_array_records.py",6001,0,"d",python,content +153,1119553,"input_pipeline/video_to_array_records.py",6002,0,"",python,selection_keyboard +154,1120061,"input_pipeline/video_to_array_records.py",6001,0,"",python,selection_command +155,1122061,"input_pipeline/video_to_array_records.py",269,4," int",python,selection_command +156,1122618,"input_pipeline/video_to_array_records.py",306,4," int",python,selection_command +157,1122773,"input_pipeline/video_to_array_records.py",693,4," int",python,selection_command +158,1122927,"input_pipeline/video_to_array_records.py",722,4," int",python,selection_command +159,1123062,"input_pipeline/video_to_array_records.py",747,4," int",python,selection_command +160,1123241,"input_pipeline/video_to_array_records.py",772,4," int",python,selection_command +161,1123347,"input_pipeline/video_to_array_records.py",803,4," int",python,selection_command +162,1123678,"input_pipeline/video_to_array_records.py",926,4," int",python,selection_command +163,1123792,"input_pipeline/video_to_array_records.py",952,4," int",python,selection_command +164,1123937,"input_pipeline/video_to_array_records.py",973,4," int",python,selection_command +165,1124124,"input_pipeline/video_to_array_records.py",1062,4," int",python,selection_command +166,1124292,"input_pipeline/video_to_array_records.py",3319,4," int",python,selection_command +167,1124401,"input_pipeline/video_to_array_records.py",269,4," int",python,selection_command +168,1124558,"input_pipeline/video_to_array_records.py",306,4," int",python,selection_command +169,1124710,"input_pipeline/video_to_array_records.py",693,4," int",python,selection_command +170,1124950,"input_pipeline/video_to_array_records.py",722,4," int",python,selection_command +171,1125131,"input_pipeline/video_to_array_records.py",747,4," int",python,selection_command +172,1125266,"input_pipeline/video_to_array_records.py",772,4," int",python,selection_command +173,1125404,"input_pipeline/video_to_array_records.py",803,4," int",python,selection_command +174,1125554,"input_pipeline/video_to_array_records.py",926,4," int",python,selection_command +175,1125701,"input_pipeline/video_to_array_records.py",952,4," int",python,selection_command +176,1125994,"input_pipeline/video_to_array_records.py",973,4," int",python,selection_command +177,1126333,"input_pipeline/video_to_array_records.py",1062,4," int",python,selection_command +178,1126478,"input_pipeline/video_to_array_records.py",3319,4," int",python,selection_command +179,1126678,"input_pipeline/video_to_array_records.py",269,4," int",python,selection_command +180,1140099,"TERMINAL",0,0,"git commit -am ""revert round->int regression""",,terminal_command +181,1140139,"TERMINAL",0,0,"]633;C",,terminal_output +182,1142763,"TERMINAL",0,0,"black....................................................................",,terminal_output +183,1145033,"TERMINAL",0,0,"Passed\r\n",,terminal_output +184,1145421,"TERMINAL",0,0,"[gt-actions 8a0085b] revert round->int regression\r\n 2 files changed, 4 insertions(+), 4 deletions(-)\r\n]0;tum_cte0515@hkn1990:~/Projects/jasmine",,terminal_output +185,1146664,"TERMINAL",0,0,"gi tpush",,terminal_command +186,1146700,"TERMINAL",0,0,"]633;Cbash: gi: command not found...\r\nSimilar command is: 'go'\r\n]0;tum_cte0515@hkn1990:~/Projects/jasmine",,terminal_output +187,1148285,"TERMINAL",0,0,"git push",,terminal_command +188,1148364,"TERMINAL",0,0,"]633;C",,terminal_output +189,1149795,"TERMINAL",0,0,"Enumerating objects: 8, done.\r\nCounting objects: 12% (1/8)\rCounting objects: 25% (2/8)\rCounting objects: 37% (3/8)\rCounting objects: 50% (4/8)\rCounting objects: 62% (5/8)\rCounting objects: 75% (6/8)\rCounting objects: 87% (7/8)\rCounting objects: 100% (8/8)\rCounting objects: 100% (8/8), done.\r\nDelta compression using up to 152 threads\r\nCompressing objects: 20% (1/5)\rCompressing objects: 40% (2/5)\rCompressing objects: 60% (3/5)\rCompressing objects: 80% (4/5)\rCompressing objects: 100% (5/5)\rCompressing objects: 100% (5/5), done.\r\nWriting objects: 20% (1/5)\rWriting objects: 40% (2/5)\rWriting objects: 60% (3/5)\rWriting objects: 80% (4/5)\rWriting objects: 100% (5/5)\rWriting objects: 100% (5/5), 2.90 KiB | 988.00 KiB/s, done.\r\nTotal 5 (delta 3), reused 0 (delta 0), pack-reused 0\r\nremote: Resolving deltas: 0% (0/3)\rremote: Resolving deltas: 33% (1/3)\rremote: Resolving deltas: 66% (2/3)\rremote: Resolving deltas: 100% (3/3)\rremote: Resolving deltas: 100% (3/3), completed with 3 local objects.\r\n",,terminal_output +190,1149961,"TERMINAL",0,0,"To github.com:p-doom/jasmine.git\r\n 7c97398..8a0085b gt-actions -> gt-actions\r\n",,terminal_output +191,1149989,"TERMINAL",0,0,"]0;tum_cte0515@hkn1990:~/Projects/jasmine",,terminal_output +192,1190315,"input_pipeline/generate_coinrun_dataset.py",0,0,"",python,tab +193,1192410,"input_pipeline/pngs_to_array_records.py",0,0,"",python,tab +194,1213411,"genie.py",0,0,"from typing import Dict\n\nimport einops\nimport jax\nimport jax.numpy as jnp\nimport flax.nnx as nnx\nimport orbax.checkpoint as ocp\n\nfrom models.dynamics import DynamicsMaskGIT, DynamicsCausal\nfrom models.lam import LatentActionModel\nfrom models.tokenizer import TokenizerVQVAE\n\n\nclass Genie(nnx.Module):\n """"""Genie model""""""\n\n def __init__(\n self,\n in_dim: int,\n tokenizer_dim: int,\n tokenizer_ffn_dim: int,\n latent_patch_dim: int,\n num_patch_latents: int,\n patch_size: int,\n tokenizer_num_blocks: int,\n tokenizer_num_heads: int,\n lam_dim: int,\n lam_ffn_dim: int,\n latent_action_dim: int,\n num_latent_actions: int,\n lam_patch_size: int,\n lam_num_blocks: int,\n lam_num_heads: int,\n lam_co_train: bool,\n use_gt_actions: bool,\n dyna_type: str,\n dyna_dim: int,\n dyna_ffn_dim: int,\n dyna_num_blocks: int,\n dyna_num_heads: int,\n param_dtype: jnp.dtype,\n dtype: jnp.dtype,\n use_flash_attention: bool,\n decode: bool,\n rngs: nnx.Rngs,\n dropout: float = 0.0,\n mask_limit: float = 0.0,\n ):\n # --- Tokenizer ---\n self.in_dim = in_dim\n self.tokenizer_dim = tokenizer_dim\n self.tokenizer_ffn_dim = tokenizer_ffn_dim\n self.latent_patch_dim = latent_patch_dim\n self.num_patch_latents = num_patch_latents\n self.patch_size = patch_size\n self.tokenizer_num_blocks = tokenizer_num_blocks\n self.tokenizer_num_heads = tokenizer_num_heads\n # --- LAM ---\n self.lam_dim = lam_dim\n self.lam_ffn_dim = lam_ffn_dim\n self.latent_action_dim = latent_action_dim\n self.num_latent_actions = num_latent_actions\n self.lam_patch_size = lam_patch_size\n self.lam_num_blocks = lam_num_blocks\n self.lam_num_heads = lam_num_heads\n self.lam_co_train = lam_co_train\n self.use_gt_actions = use_gt_actions\n # --- Dynamics ---\n self.dyna_type = dyna_type\n self.dyna_dim = dyna_dim\n self.dyna_ffn_dim = dyna_ffn_dim\n self.dyna_num_blocks = dyna_num_blocks\n self.dyna_num_heads = dyna_num_heads\n self.param_dtype = param_dtype\n self.dtype = dtype\n self.use_flash_attention = use_flash_attention\n self.dropout = dropout\n self.mask_limit = mask_limit\n self.decode = decode\n\n self.tokenizer = TokenizerVQVAE(\n in_dim=self.in_dim,\n model_dim=self.tokenizer_dim,\n ffn_dim=self.tokenizer_ffn_dim,\n latent_dim=self.latent_patch_dim,\n num_latents=self.num_patch_latents,\n patch_size=self.patch_size,\n num_blocks=self.tokenizer_num_blocks,\n num_heads=self.tokenizer_num_heads,\n dropout=0.0,\n codebook_dropout=0.0,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n use_flash_attention=self.use_flash_attention,\n rngs=rngs,\n )\n if self.use_gt_actions:\n self.lam = None\n else:\n self.lam = LatentActionModel(\n in_dim=self.in_dim,\n model_dim=self.lam_dim,\n ffn_dim=self.lam_ffn_dim,\n latent_dim=self.latent_patch_dim,\n num_latents=self.num_latent_actions,\n patch_size=self.lam_patch_size,\n num_blocks=self.lam_num_blocks,\n num_heads=self.lam_num_heads,\n dropout=0.0,\n codebook_dropout=0.0,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n use_flash_attention=self.use_flash_attention,\n rngs=rngs,\n )\n if self.dyna_type == ""maskgit"":\n self.dynamics = DynamicsMaskGIT(\n model_dim=self.dyna_dim,\n ffn_dim=self.dyna_ffn_dim,\n num_latents=self.num_patch_latents,\n latent_action_dim=self.latent_action_dim,\n num_blocks=self.dyna_num_blocks,\n num_heads=self.dyna_num_heads,\n dropout=self.dropout,\n mask_limit=self.mask_limit,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n use_flash_attention=self.use_flash_attention,\n rngs=rngs,\n )\n elif self.dyna_type == ""causal"":\n self.dynamics = DynamicsCausal(\n model_dim=self.dyna_dim,\n ffn_dim=self.dyna_ffn_dim,\n num_latents=self.num_patch_latents,\n latent_action_dim=self.latent_action_dim,\n num_blocks=self.dyna_num_blocks,\n num_heads=self.dyna_num_heads,\n dropout=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n use_flash_attention=self.use_flash_attention,\n decode=decode,\n rngs=rngs,\n )\n else:\n raise ValueError(f""Invalid dynamics type: {self.dyna_type}"")\n\n def __call__(\n self,\n batch: Dict[str, jax.Array],\n training: bool = True,\n ) -> Dict[str, jax.Array]:\n videos_BTHWC = batch[""videos""]\n tokenizer_outputs = self.tokenizer.vq_encode(videos_BTHWC, training=False)\n token_indices_BTN = tokenizer_outputs[""indices""]\n if self.use_gt_actions:\n action_indices_E = None\n latent_actions_BT1L = jax.nn.one_hot(\n batch[""actions""], num_classes=self.latent_action_dim\n ).reshape(*batch[""actions""].shape[:2], 1, self.latent_action_dim)\n latent_actions_BTm11L = latent_actions_BT1L[:, 1:]\n else:\n lam_outputs = self.lam.vq_encode(videos_BTHWC, training=False)\n z_q_BTm11L = lam_outputs[""z_q""]\n action_indices_E = lam_outputs[""indices""]\n latent_actions_BTm11L = jax.lax.cond(\n self.lam_co_train,\n lambda: z_q_BTm11L,\n lambda: jax.lax.stop_gradient(z_q_BTm11L),\n )\n outputs = dict(\n video_tokens=jax.lax.stop_gradient(token_indices_BTN),\n latent_actions=latent_actions_BTm11L,\n )\n outputs[""mask_rng""] = batch[""rng""]\n dyna_logits_BTNV, dyna_mask = self.dynamics(outputs, training)\n outputs[""token_logits""] = dyna_logits_BTNV\n outputs[""mask""] = dyna_mask\n mle_indices_BTN = jnp.argmax(outputs[""token_logits""], axis=-1)\n H, W = batch[""videos""].shape[2:4]\n outputs[""recon""] = self.tokenizer.decode(mle_indices_BTN, (H, W))\n if action_indices_E is not None:\n outputs[""lam_indices""] = action_indices_E\n return outputs\n\n def sample(\n self,\n batch: Dict[str, jax.Array],\n seq_len: int,\n temperature: float = 1,\n sample_argmax: bool = False,\n maskgit_steps: int = 25,\n ) -> tuple[jax.Array, jax.Array]:\n if self.dyna_type == ""maskgit"":\n return self.sample_maskgit(\n batch, seq_len, maskgit_steps, temperature, sample_argmax\n )\n elif self.dyna_type == ""causal"":\n return self.sample_causal(batch, seq_len, temperature, sample_argmax)\n else:\n raise ValueError(f""Dynamics model type unknown: {self.dyna_type}"")\n\n def sample_maskgit(\n self,\n batch: Dict[str, jax.Array],\n seq_len: int,\n steps: int = 25,\n temperature: float = 1,\n sample_argmax: bool = False,\n ) -> tuple[jax.Array, jax.Array]:\n """"""\n Autoregressively samples up to `seq_len` future frames, following Figure 8 of the paper.\n\n - Input frames are tokenized once.\n - Future frames are generated autoregressively in token space.\n - All frames are detokenized in a single pass.\n\n Note:\n - For interactive or step-wise sampling, detokenization should occur after each action.\n - To maintain consistent tensor shapes across timesteps, all current and future frames are decoded at every step.\n - Temporal causal structure is preserved by\n a) reapplying the mask before each decoding step.\n b) a temporal causal mask is applied within each ST-transformer block.\n\n Dimension keys:\n B: batch size\n T: number of input (conditioning) frames\n N: number of patches per frame\n M: model dimension\n S: sequence length\n H: height\n W: width\n E: B * (S - 1)\n P: S * N\n """"""\n assert isinstance(self.dynamics, DynamicsMaskGIT)\n # --- Encode videos and actions ---\n videos_BTHWC = batch[""videos""]\n tokenizer_out = self.tokenizer.vq_encode(videos_BTHWC, training=False)\n token_idxs_BTN = tokenizer_out[""indices""]\n B, T, N = token_idxs_BTN.shape\n pad_shape = (B, seq_len - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs_BTN.dtype)\n token_idxs_BSN = jnp.concatenate([token_idxs_BTN, pad], axis=1)\n init_logits_BSNV = jnp.zeros(\n shape=(*token_idxs_BSN.shape, self.num_patch_latents)\n )\n if self.use_gt_actions:\n latent_actions_BT1L = jax.nn.one_hot(\n batch[""actions""], num_classes=self.latent_action_dim\n ).reshape(*batch[""actions""].shape[:2], 1, self.latent_action_dim)\n latent_actions_BTm11L = latent_actions_BT1L[:, 1:]\n action_tokens_EL = latent_actions_BTm11L.reshape(-1, self.latent_action_dim)\n else:\n latent_actions_E = batch[""latent_actions""]\n action_tokens_EL = self.lam.vq.get_codes(latent_actions_E)\n\n # --- Extract submodule state ---\n dynamics_state = nnx.state(self.dynamics)\n\n @nnx.scan(in_axes=(nnx.Carry, 0), out_axes=nnx.Carry)\n def maskgit_step_fn(\n carry: tuple[jax.Array, jax.Array, jax.Array, jax.Array, jax.Array],\n step: jax.Array,\n ) -> tuple[jax.Array, jax.Array, jax.Array, jax.Array, jax.Array]:\n rng, token_idxs_BSN, logits_BSNV, mask_BSN, action_tokens_EL = carry\n S, N = token_idxs_BSN.shape[1:]\n L = action_tokens_EL.shape[-1]\n\n # We need to reconstruct the submodule inside scan body to prevent trace context mismatches\n dynamics_maskgit = DynamicsMaskGIT(\n model_dim=self.dyna_dim,\n ffn_dim=self.dyna_ffn_dim,\n num_latents=self.num_patch_latents,\n latent_action_dim=self.latent_action_dim,\n num_blocks=self.dyna_num_blocks,\n num_heads=self.dyna_num_heads,\n dropout=self.dropout,\n mask_limit=self.mask_limit,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n use_flash_attention=self.use_flash_attention,\n rngs=nnx.Rngs(0),\n )\n nnx.update(dynamics_maskgit, dynamics_state)\n\n # --- Construct + encode video ---\n vid_embed_BSNM = dynamics_maskgit.patch_embed(token_idxs_BSN)\n mask_token_111M = dynamics_maskgit.mask_token.value\n mask_expanded_BSN1 = mask_BSN[..., None]\n vid_embed_BSNM = jnp.where(\n mask_expanded_BSN1, mask_token_111M, vid_embed_BSNM\n )\n\n # --- Predict transition ---\n action_tokens_BSm1L = jnp.reshape(action_tokens_EL, (B, S - 1, L))\n act_embed_BSm1M = dynamics_maskgit.action_up(action_tokens_BSm1L)\n act_embed_BSM = jnp.pad(act_embed_BSm1M, ((0, 0), (1, 0), (0, 0)))\n act_embed_BS1M = jnp.reshape(\n act_embed_BSM, (B, S, 1, act_embed_BSM.shape[-1])\n )\n vid_embed_BSNM += act_embed_BS1M\n unmasked_ratio = jnp.cos(jnp.pi * (step + 1) / (steps * 2))\n step_temp = temperature * (1.0 - unmasked_ratio)\n final_logits_BSNV = dynamics_maskgit.transformer(vid_embed_BSNM) / step_temp\n\n # --- Sample new tokens for final frame ---\n if sample_argmax:\n sampled_token_idxs_BSN = jnp.argmax(final_logits_BSNV, axis=-1)\n else:\n rng, _rng = jax.random.split(rng)\n sampled_token_idxs_BSN = jax.random.categorical(_rng, final_logits_BSNV)\n gather_fn = jax.vmap(jax.vmap(jax.vmap(lambda x, y: x[y])))\n final_token_probs_BSN = gather_fn(\n jax.nn.softmax(final_logits_BSNV), sampled_token_idxs_BSN\n )\n final_token_probs_BSN += ~mask_BSN\n # Update masked tokens and logits only\n token_idxs_BSN = jnp.where(mask_BSN, sampled_token_idxs_BSN, token_idxs_BSN)\n logits_BSNV = jnp.where(\n jnp.expand_dims(mask_BSN, -1), final_logits_BSNV, logits_BSNV\n )\n\n # --- Update mask ---\n num_unmasked_tokens = jnp.round(N * (1.0 - unmasked_ratio)).astype(int)\n final_token_probs_flat_BP = einops.rearrange(\n final_token_probs_BSN, ""b s n -> b (s n)""\n )\n idx_mask_P = (\n jnp.arange(final_token_probs_flat_BP.shape[-1])\n <= N - num_unmasked_tokens\n )\n sorted_idxs_BP = jnp.argsort(final_token_probs_flat_BP, axis=-1)\n mask_update_fn = jax.vmap(lambda msk, ids: msk.at[ids].set(idx_mask_P))\n mask_flat_BP = einops.rearrange(mask_BSN, ""b s n -> b (s n)"")\n new_mask_flat_BP = mask_update_fn(mask_flat_BP, sorted_idxs_BP)\n new_mask_BSN = einops.rearrange(new_mask_flat_BP, ""b (s n) -> b s n"", n=N)\n\n new_carry = (\n rng,\n token_idxs_BSN,\n logits_BSNV,\n new_mask_BSN,\n action_tokens_EL,\n )\n return new_carry\n\n @nnx.scan(in_axes=(nnx.Carry, 0), out_axes=nnx.Carry)\n def generation_step_fn(\n carry: tuple[jax.Array, jax.Array, jax.Array], step_t: jax.Array\n ) -> tuple[jax.Array, jax.Array, jax.Array]:\n rng, current_token_idxs_BSN, current_logits_BSNV = carry\n rng, step_rng = jax.random.split(rng)\n\n # Mask current frame (i.e., t == step_t)\n mask_S = jnp.arange(seq_len) == step_t\n mask_BSN = jnp.broadcast_to(mask_S[None, :, None], (B, seq_len, N)).astype(\n bool\n )\n masked_token_idxs_BSN = current_token_idxs_BSN * ~mask_BSN\n masked_logits_BSNV = current_logits_BSNV * jnp.expand_dims(~mask_BSN, -1)\n\n # --- Initialize and run MaskGIT loop ---\n init_carry_maskgit = (\n step_rng,\n masked_token_idxs_BSN,\n masked_logits_BSNV,\n mask_BSN,\n action_tokens_EL,\n )\n final_carry_maskgit = maskgit_step_fn(init_carry_maskgit, jnp.arange(steps))\n updated_token_idxs_BSN = final_carry_maskgit[1]\n updated_logits_BSNV = final_carry_maskgit[2]\n new_carry = (rng, updated_token_idxs_BSN, updated_logits_BSNV)\n return new_carry\n\n # --- Run the autoregressive generation using jax.lax.scan ---\n initial_carry = (batch[""rng""], token_idxs_BSN, init_logits_BSNV)\n timesteps_to_scan = jnp.arange(T, seq_len)\n final_carry = generation_step_fn(initial_carry, timesteps_to_scan)\n final_token_idxs_BSN = final_carry[1]\n final_logits_BSNV = final_carry[2]\n\n # --- Decode all tokens at once at the end ---\n H, W = batch[""videos""].shape[2:4]\n final_frames_BSHWC = self.tokenizer.decode(\n final_token_idxs_BSN,\n video_hw=(H, W),\n )\n return final_frames_BSHWC, final_logits_BSNV\n\n def sample_causal(\n self,\n batch: Dict[str, jax.Array],\n seq_len: int,\n temperature: float = 1,\n sample_argmax: bool = False,\n ) -> tuple[jax.Array, jax.Array]:\n """"""\n Autoregressively samples up to `seq_len` future frames, following Figure 8 of the paper.\n\n - Input frames are tokenized once.\n - Future frames are generated autoregressively in token space.\n - All frames are detokenized in a single pass.\n\n Note:\n - For interactive or step-wise sampling, detokenization should occur after each action.\n - To maintain consistent tensor shapes across timesteps, all current and future frames are decoded at every step.\n - Temporal causal structure is preserved by\n a) reapplying the mask before each decoding step.\n b) a temporal causal mask is applied within each ST-transformer block.\n\n Dimension keys:\n B: batch size\n T: number of input (conditioning) frames\n N: number of patches per frame\n M: model dimension\n S: sequence length\n H: height\n W: width\n E: B * (S - 1)\n """"""\n assert isinstance(self.dynamics, DynamicsCausal)\n # --- Encode videos and actions ---\n videos_BTHWC = batch[""videos""]\n tokenizer_out = self.tokenizer.vq_encode(videos_BTHWC, training=False)\n token_idxs_BTN = tokenizer_out[""indices""]\n B, T, N = token_idxs_BTN.shape\n pad_shape = (B, seq_len - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs_BTN.dtype)\n token_idxs_BSN = jnp.concatenate([token_idxs_BTN, pad], axis=1)\n logits_BSNV = jnp.zeros((*token_idxs_BSN.shape, self.num_patch_latents))\n dynamics_state = nnx.state(self.dynamics)\n\n if self.use_gt_actions:\n latent_actions_BT1L = jax.nn.one_hot(\n batch[""actions""], num_classes=self.latent_action_dim\n ).reshape(*batch[""actions""].shape[:2], 1, self.latent_action_dim)\n latent_actions_BTm11L = latent_actions_BT1L[:, 1:]\n action_tokens_EL = latent_actions_BTm11L.reshape(-1, self.latent_action_dim)\n else:\n latent_actions_E = batch[""latent_actions""]\n action_tokens_EL = self.lam.vq.get_codes(latent_actions_E)\n\n @nnx.scan(in_axes=(nnx.Carry, 0), out_axes=nnx.Carry)\n def causal_step_fn(\n carry: tuple[jax.Array, jax.Array, jax.Array, jax.Array, jax.Array],\n step_n: jax.Array,\n ) -> tuple[jax.Array, jax.Array, jax.Array, jax.Array, jax.Array]:\n rng, token_idxs_BSN, logits_BSNV, action_tokens_EL, step_t = carry\n S, N = token_idxs_BSN.shape[1:]\n L = action_tokens_EL.shape[-1]\n\n # We need to reconstruct the submodule inside scan body to prevent trace context mismatches\n dynamics_causal = DynamicsCausal(\n model_dim=self.dyna_dim,\n ffn_dim=self.dyna_ffn_dim,\n num_latents=self.num_patch_latents,\n latent_action_dim=self.latent_action_dim,\n num_blocks=self.dyna_num_blocks,\n num_heads=self.dyna_num_heads,\n dropout=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n use_flash_attention=self.use_flash_attention,\n decode=self.decode,\n rngs=nnx.Rngs(0),\n )\n nnx.update(dynamics_causal, dynamics_state)\n\n # --- Construct + encode video ---\n vid_embed_BSNM = dynamics_causal.patch_embed(token_idxs_BSN)\n\n # --- Predict transition ---\n action_tokens_BSm1L = jnp.reshape(action_tokens_EL, (B, S - 1, L))\n act_embed_BSm1M = dynamics_causal.action_up(action_tokens_BSm1L)\n act_embed_BSM = jnp.pad(act_embed_BSm1M, ((0, 0), (1, 0), (0, 0)))\n act_embed_BS1M = jnp.reshape(\n act_embed_BSM, (B, S, 1, act_embed_BSM.shape[-1])\n )\n vid_embed_BSNp1M = jnp.concatenate([act_embed_BS1M, vid_embed_BSNM], axis=2)\n final_logits_BTNp1V = (\n dynamics_causal.transformer(vid_embed_BSNp1M, (step_t, step_n))\n / temperature\n )\n final_logits_BV = final_logits_BTNp1V[:, step_t, step_n, :]\n\n # --- Sample new tokens for final frame ---\n if sample_argmax:\n sampled_token_idxs_B = jnp.argmax(final_logits_BV, axis=-1)\n else:\n rng, _rng = jax.random.split(rng)\n sampled_token_idxs_B = jax.random.categorical(_rng, final_logits_BV)\n # Update next tokens only\n token_idxs_BSN = token_idxs_BSN.at[:, step_t, step_n].set(\n sampled_token_idxs_B\n )\n logits_BSNV = logits_BSNV.at[:, step_t, step_n].set(final_logits_BV)\n\n new_carry = (rng, token_idxs_BSN, logits_BSNV, action_tokens_EL, step_t)\n return new_carry\n\n @nnx.scan(in_axes=(nnx.Carry, 0), out_axes=nnx.Carry)\n def generation_step_fn(\n carry: tuple[jax.Array, jax.Array, jax.Array], step_t: jax.Array\n ) -> tuple[jax.Array, jax.Array, jax.Array]:\n rng, current_token_idxs_BSN, current_logits_BSNV = carry\n rng, step_rng = jax.random.split(rng)\n\n # --- Initialize and run causal loop ---\n init_carry_causal = (\n step_rng,\n current_token_idxs_BSN,\n current_logits_BSNV,\n action_tokens_EL,\n step_t,\n )\n final_carry_causal = causal_step_fn(init_carry_causal, jnp.arange(N))\n updated_token_idxs_BSN = final_carry_causal[1]\n updated_logits_BSNV = final_carry_causal[2]\n new_carry = (rng, updated_token_idxs_BSN, updated_logits_BSNV)\n return new_carry\n\n # --- Run the autoregressive generation using jax.lax.scan ---\n initial_carry = (batch[""rng""], token_idxs_BSN, logits_BSNV)\n timesteps_to_scan = jnp.arange(T, seq_len)\n final_carry = generation_step_fn(initial_carry, timesteps_to_scan)\n final_token_idxs_BSN = final_carry[1]\n final_logits_BSNV = final_carry[2]\n\n # --- Decode all tokens at once at the end ---\n H, W = batch[""videos""].shape[2:4]\n final_frames_BSHWC = self.tokenizer.decode(\n final_token_idxs_BSN,\n video_hw=(H, W),\n )\n return final_frames_BSHWC, final_logits_BSNV\n\n def vq_encode(self, batch: Dict[str, jax.Array], training: bool) -> jax.Array:\n # --- Preprocess videos ---\n video_BTHWC = batch[""videos""]\n lam_output: Dict[str, jax.Array] = self.lam.vq_encode(\n video_BTHWC, training=training\n )\n lam_indices_E = lam_output[""indices""]\n return lam_indices_E\n\n\n# FIXME (f.srambical): add conversion script for old checkpoints\ndef restore_genie_components(\n optimizer: nnx.Optimizer,\n sharding: jax.sharding.NamedSharding,\n rng: jax.Array,\n args,\n) -> nnx.Optimizer:\n """"""Restore pre-trained Genie components""""""\n rng_tokenizer, rng_lam = jax.random.split(rng)\n rngs_tokenizer = nnx.Rngs(rng_tokenizer)\n rngs_lam = nnx.Rngs(rng_lam)\n\n tx = optimizer.tx\n model = optimizer.model\n handler_registry = ocp.handlers.DefaultCheckpointHandlerRegistry()\n handler_registry.add(\n ""model_state"", ocp.args.PyTreeRestore, ocp.handlers.PyTreeCheckpointHandler\n )\n\n checkpoint_options = ocp.CheckpointManagerOptions(\n step_format_fixed_length=6,\n )\n tokenizer_checkpoint_manager = ocp.CheckpointManager(\n directory=args.tokenizer_checkpoint,\n options=checkpoint_options,\n handler_registry=handler_registry,\n )\n dummy_tokenizer = TokenizerVQVAE(\n in_dim=args.image_channels,\n model_dim=args.tokenizer_dim,\n ffn_dim=args.tokenizer_ffn_dim,\n latent_dim=args.latent_patch_dim,\n num_latents=args.num_patch_latents,\n patch_size=args.patch_size,\n num_blocks=args.tokenizer_num_blocks,\n num_heads=args.tokenizer_num_heads,\n dropout=args.dropout,\n codebook_dropout=args.dropout,\n param_dtype=args.param_dtype,\n dtype=args.dtype,\n use_flash_attention=args.use_flash_attention,\n rngs=rngs_tokenizer,\n )\n dummy_tokenizer_optimizer = nnx.Optimizer(dummy_tokenizer, tx)\n dummy_tokenizer_optimizer_state = nnx.state(dummy_tokenizer_optimizer)\n abstract_sharded_tokenizer_optimizer_state = _create_abstract_sharded_pytree(\n dummy_tokenizer_optimizer_state, sharding\n )\n restored_tokenizer = tokenizer_checkpoint_manager.restore(\n step=tokenizer_checkpoint_manager.latest_step(),\n args=ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore( # type: ignore\n abstract_sharded_tokenizer_optimizer_state # type: ignore\n ),\n ),\n )[""model_state""]\n nnx.update(dummy_tokenizer_optimizer.model, restored_tokenizer.model)\n model.tokenizer = dummy_tokenizer_optimizer.model\n tokenizer_checkpoint_manager.close()\n\n if args.lam_checkpoint:\n lam_checkpoint_manager = ocp.CheckpointManager(\n directory=args.lam_checkpoint,\n options=checkpoint_options,\n handler_registry=handler_registry,\n )\n dummy_lam = LatentActionModel(\n in_dim=args.image_channels,\n model_dim=args.lam_dim,\n ffn_dim=args.lam_ffn_dim,\n latent_dim=args.latent_patch_dim,\n num_latents=args.num_latent_actions,\n patch_size=args.lam_patch_size,\n num_blocks=args.lam_num_blocks,\n num_heads=args.lam_num_heads,\n dropout=args.dropout,\n codebook_dropout=args.dropout,\n param_dtype=args.param_dtype,\n dtype=args.dtype,\n use_flash_attention=args.use_flash_attention,\n rngs=rngs_lam,\n )\n dummy_lam_optimizer = nnx.Optimizer(dummy_lam, tx)\n dummy_lam_optimizer_state = nnx.state(dummy_lam_optimizer)\n abstract_sharded_lam_optimizer_state = _create_abstract_sharded_pytree(\n dummy_lam_optimizer_state, sharding\n )\n restored_lam_optimizer = lam_checkpoint_manager.restore(\n step=lam_checkpoint_manager.latest_step(),\n args=ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore( # type: ignore\n abstract_sharded_lam_optimizer_state # type: ignore\n ),\n ),\n )[""model_state""]\n nnx.update(dummy_lam_optimizer.model, restored_lam_optimizer.model)\n model.lam = dummy_lam_optimizer.model\n # Remove the LAM decoder to save memory and avoid unnecessary computation.\n del model.lam.decoder\n lam_checkpoint_manager.close()\n\n # Reinitialize the optimizer states\n optimizer = nnx.Optimizer(model, tx)\n return optimizer\n\n\ndef _create_abstract_sharded_pytree(\n pytree_template: nnx.GraphState, sharding_spec: jax.sharding.NamedSharding\n) -> jax.Array:\n """"""Replaces arrays in a pytree with ShapeDtypeStructs having the given sharding.""""""\n\n def map_fn(leaf_template):\n if hasattr(leaf_template, ""shape"") and hasattr(leaf_template, ""dtype""):\n return jax.ShapeDtypeStruct(\n leaf_template.shape, leaf_template.dtype, sharding=sharding_spec\n )\n return leaf_template\n\n return jax.tree_util.tree_map(map_fn, pytree_template)\n",python,tab +195,1213414,"genie.py",5571,7,"one_hot",python,selection_command +196,1223437,"genie.py",3162,0,"",python,selection_mouse +197,1223983,"genie.py",3172,0,"",python,selection_mouse +198,1238204,"genie.py",5552,0,"",python,selection_mouse +199,1238319,"genie.py",5542,19,"latent_actions_BT1L",python,selection_mouse +200,1244409,"genie.py",5755,0,"",python,selection_mouse +201,1244550,"genie.py",5739,21,"latent_actions_BTm11L",python,selection_mouse +202,1250646,"genie.py",6090,0,"",python,selection_mouse +203,1250792,"genie.py",6086,10,"z_q_BTm11L",python,selection_mouse +204,1252722,"genie.py",5954,0,"",python,selection_mouse +205,1252869,"genie.py",5954,11,"lam_outputs",python,selection_mouse +206,1253466,"genie.py",5827,0,"",python,selection_mouse +207,1253831,"genie.py",5827,0,": Dict[str, Array]",python,content +208,1253832,"genie.py",0,0,"from jax._src.basearray import Array\n",python,content +209,1254971,"genie.py",5965,0,"",python,selection_mouse +210,1256689,"genie.py",5864,18,"",python,content +211,1256689,"genie.py",0,37,"",python,content +212,1264588,"genie.py",5842,0,"",python,selection_mouse +213,1264950,"models/lam.py",0,0,"from typing import Dict\n\nimport jax\nimport jax.numpy as jnp\nimport flax.nnx as nnx\n\nfrom utils.preprocess import patchify, unpatchify\nfrom utils.nn import STTransformer, VectorQuantizer\n\n\nclass LatentActionModel(nnx.Module):\n """"""Latent Action ST-ViVit VQ-VAE\n\n Dimension keys:\n B: batch size\n T: sequence length\n N: number of patches per frame\n M: model dimension\n L: latent dimension\n E: B * (T - 1)\n H: height\n W: width\n C: number of channels (n_dim)\n P: patch token dimension (patch_size^2 * C)\n\n Tm1: T - 1\n Np1: N + 1\n """"""\n\n def __init__(\n self,\n in_dim: int,\n model_dim: int,\n ffn_dim: int,\n latent_dim: int,\n num_latents: int,\n patch_size: int,\n num_blocks: int,\n num_heads: int,\n dropout: float,\n codebook_dropout: float,\n param_dtype: jnp.dtype,\n dtype: jnp.dtype,\n use_flash_attention: bool,\n rngs: nnx.Rngs,\n ):\n self.in_dim = in_dim\n self.model_dim = model_dim\n self.ffn_dim = ffn_dim\n self.latent_dim = latent_dim\n self.num_latents = num_latents\n self.patch_size = patch_size\n self.num_blocks = num_blocks\n self.num_heads = num_heads\n self.dropout = dropout\n self.codebook_dropout = codebook_dropout\n self.param_dtype = param_dtype\n self.dtype = dtype\n self.use_flash_attention = use_flash_attention\n\n self.patch_token_dim = self.in_dim * self.patch_size**2\n self.encoder = STTransformer(\n self.patch_token_dim,\n self.model_dim,\n self.ffn_dim,\n self.latent_dim,\n self.num_blocks,\n self.num_heads,\n self.dropout,\n self.param_dtype,\n self.dtype,\n use_flash_attention=self.use_flash_attention,\n rngs=rngs,\n )\n self.action_in = nnx.Param(\n nnx.initializers.lecun_uniform()(\n rngs.params(), (1, 1, 1, self.patch_token_dim)\n )\n )\n self.vq = VectorQuantizer(\n self.latent_dim,\n self.num_latents,\n self.codebook_dropout,\n self.dtype,\n rngs=rngs,\n )\n self.patch_up = nnx.Linear(\n self.patch_token_dim,\n self.model_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n rngs=rngs,\n )\n self.action_up = nnx.Linear(\n self.latent_dim,\n self.model_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n rngs=rngs,\n )\n self.decoder = STTransformer(\n self.model_dim,\n self.model_dim,\n self.ffn_dim,\n self.patch_token_dim,\n self.num_blocks,\n self.num_heads,\n self.dropout,\n self.param_dtype,\n self.dtype,\n use_flash_attention=self.use_flash_attention,\n rngs=rngs,\n )\n\n def __call__(\n self, batch: Dict[str, jax.Array], training: bool = True\n ) -> Dict[str, jax.Array]:\n # --- Encode + VQ ---\n H, W = batch[""videos""].shape[2:4]\n videos_BTHWC = batch[""videos""]\n outputs = self.vq_encode(videos_BTHWC, training)\n patch_BTNP = outputs[""patches""]\n z_q_BTm11L = outputs[""z_q""]\n action_BTm11M = self.action_up(z_q_BTm11L)\n patch_BTm1NM = self.patch_up(patch_BTNP[:, :-1])\n action_BTm1NM = jnp.broadcast_to(action_BTm11M, patch_BTm1NM.shape)\n video_action_patches_BTm1NM = action_BTm1NM + patch_BTm1NM\n del outputs[""patches""], patch_BTNP, patch_BTm1NM\n\n # --- Decode ---\n video_recon_BTm1P = self.decoder(video_action_patches_BTm1NM)\n video_recon_BTm1P = video_recon_BTm1P.astype(jnp.float32)\n video_recon_BTm1P = nnx.sigmoid(video_recon_BTm1P)\n video_recon_BTm1P = video_recon_BTm1P.astype(self.dtype)\n video_recon_BTm1HWC = unpatchify(video_recon_BTm1P, self.patch_size, H, W)\n outputs[""recon""] = video_recon_BTm1HWC\n return outputs\n\n def vq_encode(\n self, videos_BTHWC: jax.Array, training: bool = True\n ) -> Dict[str, jax.Array]:\n # --- Preprocess videos ---\n B, T = videos_BTHWC.shape[:2]\n patch_BTNP = patchify(videos_BTHWC, self.patch_size)\n action_pad_BT1P = jnp.broadcast_to(\n self.action_in.value, (B, T, 1, self.patch_token_dim)\n )\n padded_patch_BTNp1P = jnp.concatenate((action_pad_BT1P, patch_BTNP), axis=2)\n\n # --- Encode ---\n z_BTNp1L = self.encoder(padded_patch_BTNp1P)\n # Get latent action for all future frames\n z_BTm1L = z_BTNp1L[:, 1:, 0]\n\n # --- Vector quantize ---\n z_EL = z_BTm1L.reshape(B * (T - 1), self.latent_dim)\n z_q_EL, z_EL, emb_EL, indices_E = self.vq(z_EL, training)\n z_q_BTm11L = z_q_EL.reshape(B, T - 1, 1, self.latent_dim)\n return dict(\n patches=patch_BTNP, z_q=z_q_BTm11L, z=z_EL, emb=emb_EL, indices=indices_E\n )\n",python,tab +214,1264951,"models/lam.py",4210,0,"",python,selection_command +215,1274533,"models/lam.py",5163,0,"",python,selection_mouse +216,1274568,"models/lam.py",5162,0,"",python,selection_command +217,1274988,"models/lam.py",5111,0,"",python,selection_mouse +218,1302320,"models/lam.py",5125,0,"",python,selection_mouse +219,1303798,"models/lam.py",5129,0,"",python,selection_mouse +220,1303975,"models/lam.py",5128,6,"emb_EL",python,selection_mouse +221,1307155,"models/lam.py",4963,0,"",python,selection_mouse +222,1310125,"models/lam.py",2138,0,"",python,selection_command +223,1311848,"models/lam.py",2148,0,"",python,selection_mouse +224,1312200,"utils/nn.py",0,0,"import math\nfrom typing import Tuple, Callable, List\n\nfrom flax import nnx\nimport jax\nimport jax.numpy as jnp\nimport einops\n\n\ndef _get_spatiotemporal_positional_encoding(d_model: int, max_len: int = 5000):\n """"""\n Creates a function that applies separate sinusoidal positional encodings to the temporal and spatial dimensions.\n """"""\n pe = jnp.zeros((max_len, d_model))\n position = jnp.arange(0, max_len, dtype=jnp.float32)[:, None]\n div_term = jnp.exp(jnp.arange(0, d_model, 2) * (-math.log(10000.0) / d_model))\n pe = pe.at[:, 0::2].set(jnp.sin(position * div_term))\n pe = pe.at[:, 1::2].set(jnp.cos(position * div_term))\n\n def _encode(x: jax.Array) -> jax.Array:\n """"""\n Args:\n x: The input tensor of shape (Batch, Time, Space, Dimension).\n\n Returns:\n The input tensor with positional encodings added.\n """"""\n assert x.ndim == 4, f""Input must be 4-dimensional, but got shape {x.shape}""\n\n num_timesteps = x.shape[1]\n num_spatial_patches = x.shape[2]\n\n # Temporal positional encoding: (1, T, 1, D)\n temporal_pe = pe[None, :num_timesteps, None, :]\n x = x + temporal_pe\n\n # Spatial positional encoding: (1, 1, S, D)\n spatial_pe = pe[None, None, :num_spatial_patches, :]\n x = x + spatial_pe\n\n return x\n\n return _encode\n\n\nclass STBlock(nnx.Module):\n def __init__(\n self,\n dim: int,\n ffn_dim: int,\n num_heads: int,\n dropout: float,\n param_dtype: jnp.dtype,\n dtype: jnp.dtype,\n use_flash_attention: bool,\n rngs: nnx.Rngs,\n sow_weights: bool,\n sow_activations: bool,\n ):\n self.dim = dim\n self.ffn_dim = ffn_dim\n self.num_heads = num_heads\n self.dropout = dropout\n self.param_dtype = param_dtype\n self.dtype = dtype\n self.use_flash_attention = use_flash_attention\n self.sow_weights = sow_weights\n self.sow_activations = sow_activations\n\n self.spatial_norm = nnx.LayerNorm(\n num_features=self.dim,\n param_dtype=self.param_dtype,\n dtype=self.param_dtype, # layer norm in full precision\n rngs=rngs,\n )\n self.spatial_attention = nnx.MultiHeadAttention(\n num_heads=self.num_heads,\n in_features=self.dim,\n qkv_features=self.dim,\n dropout_rate=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n attention_fn=_create_flash_attention_fn(\n self.use_flash_attention, is_causal=False\n ),\n rngs=rngs,\n decode=False,\n )\n\n self.temporal_norm = nnx.LayerNorm(\n num_features=self.dim,\n param_dtype=self.param_dtype,\n dtype=self.param_dtype, # layer norm in full precision\n rngs=rngs,\n )\n self.temporal_attention = nnx.MultiHeadAttention(\n num_heads=self.num_heads,\n in_features=self.dim,\n qkv_features=self.dim,\n dropout_rate=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n attention_fn=_create_flash_attention_fn(\n self.use_flash_attention, is_causal=True\n ),\n rngs=rngs,\n decode=False,\n )\n\n self.ffn_norm = nnx.LayerNorm(\n num_features=self.dim,\n param_dtype=self.param_dtype,\n dtype=self.param_dtype, # layer norm in full precision\n rngs=rngs,\n )\n self.ffn_dense1 = nnx.Linear(\n in_features=self.dim,\n out_features=self.ffn_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n rngs=rngs,\n )\n self.ffn_dense2 = nnx.Linear(\n in_features=self.ffn_dim,\n out_features=self.dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n rngs=rngs,\n )\n\n @nnx.remat\n def __call__(self, x_BTNM: jax.Array) -> jax.Array:\n # --- Spatial attention ---\n z_BTNM = self.spatial_norm(x_BTNM)\n z_BTNM = self.spatial_attention(z_BTNM, sow_weights=self.sow_weights)\n x_BTNM = x_BTNM + z_BTNM\n\n # --- Temporal attention ---\n x_BNTM = x_BTNM.swapaxes(1, 2)\n z_BNTM = self.temporal_norm(x_BNTM)\n z_BNTM = self.temporal_attention(z_BNTM, sow_weights=self.sow_weights)\n x_BNTM = x_BNTM + z_BNTM\n x_BTNM = x_BNTM.swapaxes(1, 2)\n\n # --- Feedforward ---\n z_BTNM = self.ffn_norm(x_BTNM)\n z_BTND = self.ffn_dense1(z_BTNM)\n z_BTND = jax.nn.gelu(z_BTND)\n z_BTNM = self.ffn_dense2(z_BTND)\n x_BTNM = x_BTNM + z_BTNM\n if self.sow_activations:\n self.sow(nnx.Intermediate, ""activations"", x_BTNM)\n return x_BTNM\n\n\nclass STTransformer(nnx.Module):\n """"""\n Dimension keys:\n B: batch size\n T: number of frames\n N: number of patches per frame\n I: number of input features\n M: model dimension\n D: FFN dimension\n V: vocabulary size\n """"""\n\n def __init__(\n self,\n input_dim: int,\n model_dim: int,\n ffn_dim: int,\n out_dim: int,\n num_blocks: int,\n num_heads: int,\n dropout: float,\n param_dtype: jnp.dtype,\n dtype: jnp.dtype,\n use_flash_attention: bool,\n rngs: nnx.Rngs,\n sow_weights: bool = False,\n sow_activations: bool = False,\n sow_logits: bool = False,\n max_len: int = 5000,\n ):\n self.input_dim = input_dim\n self.model_dim = model_dim\n self.ffn_dim = ffn_dim\n self.out_dim = out_dim\n self.num_blocks = num_blocks\n self.num_heads = num_heads\n self.dropout = dropout\n self.param_dtype = param_dtype\n self.dtype = dtype\n self.use_flash_attention = use_flash_attention\n self.sow_logits = sow_logits\n self.sow_weights = sow_weights\n self.sow_activations = sow_activations\n\n self.input_norm1 = nnx.LayerNorm(\n num_features=self.input_dim,\n param_dtype=self.param_dtype,\n dtype=self.param_dtype, # layer norm in full precision\n rngs=rngs,\n )\n self.input_dense = nnx.Linear(\n in_features=self.input_dim,\n out_features=self.model_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n rngs=rngs,\n )\n self.input_norm2 = nnx.LayerNorm(\n num_features=self.model_dim,\n param_dtype=self.param_dtype,\n dtype=self.param_dtype, # layer norm in full precision\n rngs=rngs,\n )\n\n self.pos_enc = _get_spatiotemporal_positional_encoding(\n self.model_dim, max_len=max_len\n )\n\n self.blocks = []\n for _ in range(self.num_blocks):\n self.blocks.append(\n STBlock(\n dim=self.model_dim,\n ffn_dim=self.ffn_dim,\n num_heads=self.num_heads,\n dropout=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n use_flash_attention=self.use_flash_attention,\n rngs=rngs,\n sow_weights=self.sow_weights,\n sow_activations=self.sow_activations,\n )\n )\n\n self.output_dense = nnx.Linear(\n in_features=self.model_dim,\n out_features=self.out_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n rngs=rngs,\n )\n\n def __call__(self, x_BTNI: jax.Array) -> jax.Array:\n x_BTNI = self.input_norm1(x_BTNI)\n x_BTNM = self.input_dense(x_BTNI)\n x_BTNM = self.input_norm2(x_BTNM)\n x_BTNM = self.pos_enc(x_BTNM)\n for block in self.blocks:\n x_BTNM = block(x_BTNM)\n\n x_BTNV = self.output_dense(x_BTNM)\n if self.sow_logits:\n self.sow(nnx.Intermediate, ""logits"", x_BTNV)\n return x_BTNV\n\n\nclass TransformerBlock(nnx.Module):\n def __init__(\n self,\n model_dim: int,\n ffn_dim: int,\n num_heads: int,\n dropout: float,\n param_dtype: jnp.dtype,\n dtype: jnp.dtype,\n use_flash_attention: bool,\n decode: bool,\n rngs: nnx.Rngs,\n sow_weights: bool,\n sow_activations: bool,\n ):\n self.model_dim = model_dim\n self.ffn_dim = ffn_dim\n self.num_heads = num_heads\n self.dropout = dropout\n self.param_dtype = param_dtype\n self.dtype = dtype\n self.use_flash_attention = use_flash_attention\n self.decode = decode\n self.sow_weights = sow_weights\n self.sow_activations = sow_activations\n\n self.temporal_norm = nnx.LayerNorm(\n num_features=self.model_dim,\n param_dtype=self.param_dtype,\n dtype=self.param_dtype, # layer norm in full precision\n rngs=rngs,\n )\n self.spatial_norm = nnx.LayerNorm(\n num_features=self.model_dim,\n param_dtype=self.param_dtype,\n dtype=self.param_dtype, # layer norm in full precision\n rngs=rngs,\n )\n self.ffn_norm = nnx.LayerNorm(\n num_features=self.model_dim,\n param_dtype=self.param_dtype,\n dtype=self.param_dtype, # layer norm in full precision\n rngs=rngs,\n )\n self.temporal_attention = nnx.MultiHeadAttention(\n num_heads=self.num_heads,\n in_features=self.model_dim,\n qkv_features=self.model_dim,\n dropout_rate=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n attention_fn=_create_flash_attention_fn(\n self.use_flash_attention, is_causal=True\n ),\n rngs=rngs,\n decode=self.decode,\n )\n self.spatial_attention = nnx.MultiHeadAttention(\n num_heads=self.num_heads,\n in_features=self.model_dim,\n qkv_features=self.model_dim,\n dropout_rate=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n attention_fn=_create_flash_attention_fn(\n self.use_flash_attention, is_causal=True\n ),\n rngs=rngs,\n decode=self.decode,\n )\n self.ffn_dense1 = nnx.Linear(\n in_features=self.model_dim,\n out_features=self.ffn_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n rngs=rngs,\n )\n self.ffn_dense2 = nnx.Linear(\n in_features=self.ffn_dim,\n out_features=self.model_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n rngs=rngs,\n )\n\n @nnx.remat\n def __call__(\n self, x_BTNM: jax.Array, pos_index: Tuple[jax.Array, jax.Array] | None = None\n ) -> jax.Array:\n # --- Spatial attention ---\n B, T, N, M = x_BTNM.shape\n z_FNM = einops.rearrange(x_BTNM, ""b t n m -> (b t) n m"")\n z_FNM = self.spatial_norm(z_FNM)\n z_FNM = self.spatial_attention(z_FNM, sow_weights=self.sow_weights)\n z_BTNM = einops.rearrange(z_FNM, ""(b t) n m -> b t n m"", t=T)\n x_BTNM = x_BTNM + z_BTNM\n # --- Temporal attention ---\n z_PTM = einops.rearrange(x_BTNM, ""b t n m -> (b n) t m"")\n z_PTM = self.temporal_norm(z_PTM)\n z_PTM = self.temporal_attention(z_PTM, sow_weights=self.sow_weights)\n z_BTNM = einops.rearrange(z_PTM, ""(b n) t m -> b t n m"", n=N)\n x_BTNM = x_BTNM + z_BTNM\n # --- Feedforward ---\n z_BTNM = self.ffn_norm(x_BTNM)\n z_BTND = self.ffn_dense1(z_BTNM)\n z_BTND = jax.nn.gelu(z_BTND)\n z_BTNM = self.ffn_dense2(z_BTND)\n x_BTNM = x_BTNM + z_BTNM\n if self.sow_activations:\n self.sow(nnx.Intermediate, ""activations"", x_BTNM)\n\n return x_BTNM\n\n\nclass Transformer(nnx.Module):\n """"""\n Dimension keys:\n B: batch size\n T: number of frames\n N: number of patches per frame\n I: number of input features\n M: model dimension\n D: FFN dimension\n V: vocabulary size\n F: number of frames in batch\n P: number of patch positions in batch\n """"""\n\n def __init__(\n self,\n input_dim: int,\n model_dim: int,\n ffn_dim: int,\n out_dim: int,\n num_blocks: int,\n num_heads: int,\n dropout: float,\n param_dtype: jnp.dtype,\n dtype: jnp.dtype,\n use_flash_attention: bool,\n decode: bool,\n rngs: nnx.Rngs,\n sow_logits: bool = False,\n sow_weights: bool = False,\n sow_activations: bool = False,\n max_len: int = 5000,\n ):\n self.input_dim = input_dim\n self.model_dim = model_dim\n self.ffn_dim = ffn_dim\n self.out_dim = out_dim\n self.num_blocks = num_blocks\n self.num_heads = num_heads\n self.dropout = dropout\n self.param_dtype = param_dtype\n self.dtype = dtype\n self.use_flash_attention = use_flash_attention\n self.sow_logits = sow_logits\n self.sow_weights = sow_weights\n self.sow_activations = sow_activations\n\n self.input_norm1 = nnx.LayerNorm(\n num_features=self.input_dim,\n param_dtype=self.param_dtype,\n dtype=self.param_dtype, # layer norm in full precision\n rngs=rngs,\n )\n self.input_dense = nnx.Linear(\n in_features=self.input_dim,\n out_features=self.model_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n rngs=rngs,\n )\n self.input_norm2 = nnx.LayerNorm(\n num_features=self.model_dim,\n param_dtype=self.param_dtype,\n dtype=self.param_dtype, # layer norm in full precision\n rngs=rngs,\n )\n\n self.pos_enc = _get_spatiotemporal_positional_encoding(\n self.model_dim, max_len=max_len\n )\n\n self.blocks: List[TransformerBlock] = []\n for _ in range(self.num_blocks):\n self.blocks.append(\n TransformerBlock(\n model_dim=self.model_dim,\n ffn_dim=self.ffn_dim,\n num_heads=self.num_heads,\n dropout=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n use_flash_attention=self.use_flash_attention,\n decode=decode,\n sow_weights=self.sow_weights,\n sow_activations=self.sow_activations,\n rngs=rngs,\n )\n )\n self.output_dense = nnx.Linear(\n in_features=self.model_dim,\n out_features=self.out_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n rngs=rngs,\n )\n\n def __call__(\n self, x_BTNI: jax.Array, pos_index: Tuple[jax.Array, jax.Array] | None = None\n ) -> jax.Array:\n x_BTNI = self.input_norm1(x_BTNI)\n x_BTNM = self.input_dense(x_BTNI)\n x_BTNM = self.input_norm2(x_BTNM)\n x_BTNM = self.pos_enc(x_BTNM)\n for block in self.blocks:\n x_BTNM = block(x_BTNM, pos_index)\n\n x_BTNV = self.output_dense(x_BTNM)\n if self.sow_logits:\n self.sow(nnx.Intermediate, ""logits"", x_BTNV)\n return x_BTNV\n\n\ndef normalize(x: jax.Array) -> jax.Array:\n return x / (jnp.linalg.norm(x, ord=2, axis=-1, keepdims=True) + 1e-8)\n\n\nclass VectorQuantizer(nnx.Module):\n """"""\n Dimension keys:\n D: B * T * N\n K: number of latents\n L: latent dimension\n """"""\n\n def __init__(\n self,\n latent_dim: int,\n num_latents: int,\n dropout: float,\n dtype: jnp.dtype,\n rngs: nnx.Rngs,\n ):\n self.latent_dim = latent_dim\n self.num_latents = num_latents\n self.dropout = dropout\n self.dtype = dtype\n\n self.codebook = nnx.Param(\n normalize(\n nnx.initializers.normal(stddev=1)(\n rngs.params(), (self.num_latents, self.latent_dim)\n )\n )\n )\n self.drop = nnx.Dropout(self.dropout, rngs=rngs)\n\n def __call__(\n self, x_DL: jax.Array, training: bool\n ) -> Tuple[jax.Array, jax.Array, jax.Array, jax.Array]:\n # --- Compute distances ---\n x_DL = x_DL.astype(self.dtype)\n codebook = self.codebook.value.astype(self.dtype)\n\n x_DL = normalize(x_DL)\n normalized_codebook_KL = normalize(codebook)\n distance_DK = -jnp.matmul(x_DL, normalized_codebook_KL.T)\n if training:\n distance_DK = self.drop(distance_DK)\n\n # --- Get indices and embeddings ---\n indices_D = jnp.argmin(distance_DK, axis=-1)\n z_DL = codebook[indices_D]\n\n # --- Straight through estimator ---\n z_q_DL = x_DL + jax.lax.stop_gradient(z_DL - x_DL)\n return z_q_DL, z_DL, x_DL, indices_D\n\n def get_codes(self, indices_E: jax.Array) -> jax.Array:\n return self.codebook[indices_E]\n\n\ndef _create_flash_attention_fn(use_flash_attention: bool, is_causal: bool) -> Callable:\n """"""\n Create an attention function that uses flash attention if enabled.\n\n flax.nnx.MultiHeadAttention provides tensors with shape (batch..., length, num_heads, head_dim),\n but jax.nn.dot_product_attention expects (batch, length, num_heads, head_dim). We reshape to\n ensure compatibility. cuDNN's flash attention additionally requires a sequence length that\n is a multiple of 4. We pad the sequence length to the nearest multiple of 4 and mask\n accordingly. Note that cuDNN requires the mask to be broadcast before calling the attention\n function due to strict shape checking.\n """"""\n\n def attention_fn(\n query_BTHD, key_BSHD, value_BSHD, bias=None, mask_B111=None, **kwargs\n ):\n implementation = ""cudnn"" if use_flash_attention else None\n\n def _merge_batch_dims(x):\n return einops.rearrange(x, ""... l h k -> (...) l h k"")\n\n def _pad(x, pad_size):\n return jnp.pad(x, ((0, 0), (0, pad_size), (0, 0), (0, 0)))\n\n original_shape = query_BTHD.shape\n T = query_BTHD.shape[-3]\n S = key_BSHD.shape[-3]\n\n # Pad to nearest multiple of 4\n Q = ((T + 3) // 4) * 4\n pad_size_Q = Q - T\n K = ((S + 3) // 4) * 4\n pad_size_K = K - S\n\n query_BQHD = _pad(_merge_batch_dims(query_BTHD), pad_size_Q)\n key_BKHD = _pad(_merge_batch_dims(key_BSHD), pad_size_K)\n value_BKHD = _pad(_merge_batch_dims(value_BSHD), pad_size_K)\n\n attention_mask = jnp.ones((Q, K), dtype=jnp.bool_)\n attention_mask = attention_mask.at[T:, :].set(False)\n attention_mask = attention_mask.at[:, S:].set(False)\n\n mask_11TS = attention_mask[jnp.newaxis, jnp.newaxis, :, :]\n\n bias_4d = (\n jnp.pad(\n _merge_batch_dims(bias),\n ((0, 0), (0, 0), (0, pad_size_Q), (0, pad_size_K)),\n )\n if bias is not None\n else None\n )\n\n # NOTE: jax.nn.dot_product_attention does not support dropout\n output_4d = jax.nn.dot_product_attention(\n query=query_BQHD,\n key=key_BKHD,\n value=value_BKHD,\n bias=bias_4d,\n mask=mask_11TS,\n implementation=implementation,\n is_causal=is_causal,\n )\n return output_4d[..., :T, :, :].reshape(original_shape)\n\n return attention_fn\n",python,tab +225,1312202,"utils/nn.py",15847,0,"",python,selection_command +226,1318978,"genie.py",0,0,"",python,tab +227,1322094,"genie.py",5554,0,"",python,selection_mouse +228,1322797,"genie.py",5557,0,"",python,selection_mouse +229,1323067,"genie.py",5557,1,"B",python,selection_mouse +230,1323149,"genie.py",5557,2,"BT",python,selection_mouse +231,1323441,"genie.py",5557,3,"BT1",python,selection_mouse +232,1323664,"genie.py",5557,4,"BT1L",python,selection_mouse +233,1362565,"utils/nn.py",0,0,"",python,tab +234,1410351,"models/dynamics.py",0,0,"from typing import Dict\n\nimport jax\nimport jax.numpy as jnp\nimport flax.nnx as nnx\n\nfrom utils.nn import STTransformer, Transformer\n\n\nclass DynamicsMaskGIT(nnx.Module):\n """"""\n MaskGIT dynamics model\n\n Dimension keys:\n B: batch size\n T: sequence length\n N: number of patches per frame\n L: latent dimension\n V: vocabulary size (number of latents)\n """"""\n\n def __init__(\n self,\n model_dim: int,\n ffn_dim: int,\n num_latents: int,\n latent_action_dim: int,\n num_blocks: int,\n num_heads: int,\n dropout: float,\n mask_limit: float,\n param_dtype: jnp.dtype,\n dtype: jnp.dtype,\n use_flash_attention: bool,\n rngs: nnx.Rngs,\n ):\n self.model_dim = model_dim\n self.ffn_dim = ffn_dim\n self.num_latents = num_latents\n self.latent_action_dim = latent_action_dim\n self.num_blocks = num_blocks\n self.num_heads = num_heads\n self.dropout = dropout\n self.mask_limit = mask_limit\n self.param_dtype = param_dtype\n self.dtype = dtype\n self.use_flash_attention = use_flash_attention\n\n self.transformer = STTransformer(\n self.model_dim,\n self.model_dim,\n self.ffn_dim,\n self.num_latents,\n self.num_blocks,\n self.num_heads,\n self.dropout,\n self.param_dtype,\n self.dtype,\n use_flash_attention=self.use_flash_attention,\n rngs=rngs,\n )\n self.patch_embed = nnx.Embed(self.num_latents, self.model_dim, rngs=rngs)\n self.mask_token = nnx.Param(\n nnx.initializers.lecun_uniform()(rngs.params(), (1, 1, 1, self.model_dim))\n )\n self.action_up = nnx.Linear(\n self.latent_action_dim,\n self.model_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n rngs=rngs,\n )\n\n def __call__(\n self,\n batch: Dict[str, jax.Array],\n training: bool = True,\n ) -> tuple[jax.Array, jax.Array]:\n # --- Mask videos ---\n video_tokens_BTN = batch[""video_tokens""]\n latent_actions_BTm11L = batch[""latent_actions""]\n vid_embed_BTNM = self.patch_embed(video_tokens_BTN)\n if training:\n batch_size = vid_embed_BTNM.shape[0]\n _rng_prob, *_rngs_mask = jax.random.split(batch[""mask_rng""], batch_size + 1)\n mask_prob = jax.random.uniform(\n _rng_prob, shape=(batch_size,), minval=self.mask_limit\n )\n per_sample_shape = vid_embed_BTNM.shape[1:-1]\n mask = jax.vmap(\n lambda rng, prob: jax.random.bernoulli(rng, prob, per_sample_shape),\n in_axes=(0, 0),\n )(jnp.asarray(_rngs_mask), mask_prob)\n mask = mask.at[:, 0].set(False)\n vid_embed_BTNM = jnp.where(\n jnp.expand_dims(mask, -1), self.mask_token.value, vid_embed_BTNM\n )\n else:\n mask = jnp.ones_like(video_tokens_BTN)\n\n # --- Predict transition ---\n act_embed_BTm11M = self.action_up(latent_actions_BTm11L)\n padded_act_embed_BT1M = jnp.pad(\n act_embed_BTm11M, ((0, 0), (1, 0), (0, 0), (0, 0))\n )\n padded_act_embed_BTNM = jnp.broadcast_to(\n padded_act_embed_BT1M, vid_embed_BTNM.shape\n )\n vid_embed_BTNM += padded_act_embed_BTNM\n logits_BTNV = self.transformer(vid_embed_BTNM)\n return logits_BTNV, mask\n\n\nclass DynamicsCausal(nnx.Module):\n """"""Causal dynamics model""""""\n\n def __init__(\n self,\n model_dim: int,\n ffn_dim: int,\n num_latents: int,\n latent_action_dim: int,\n num_blocks: int,\n num_heads: int,\n dropout: float,\n param_dtype: jnp.dtype,\n dtype: jnp.dtype,\n use_flash_attention: bool,\n decode: bool,\n rngs: nnx.Rngs,\n ):\n self.model_dim = model_dim\n self.ffn_dim = ffn_dim\n self.num_latents = num_latents\n self.latent_action_dim = latent_action_dim\n self.num_blocks = num_blocks\n self.num_heads = num_heads\n self.dropout = dropout\n self.param_dtype = param_dtype\n self.dtype = dtype\n self.use_flash_attention = use_flash_attention\n self.decode = decode\n\n self.transformer = Transformer(\n self.model_dim,\n self.model_dim,\n self.ffn_dim,\n self.num_latents,\n self.num_blocks,\n self.num_heads,\n self.dropout,\n self.param_dtype,\n self.dtype,\n use_flash_attention=self.use_flash_attention,\n decode=self.decode,\n rngs=rngs,\n )\n self.patch_embed = nnx.Embed(self.num_latents, self.model_dim, rngs=rngs)\n self.action_up = nnx.Linear(\n self.latent_action_dim,\n self.model_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n rngs=rngs,\n )\n\n def __call__(\n self,\n batch: Dict[str, jax.Array],\n training: bool = True,\n ) -> tuple[jax.Array, jax.Array]:\n video_tokens_BTN = batch[""video_tokens""]\n latent_actions_BTm11L = batch[""latent_actions""]\n vid_embed_BTNM = self.patch_embed(video_tokens_BTN)\n act_embed_BTm11M = self.action_up(latent_actions_BTm11L)\n padded_act_embed_BT1M = jnp.pad(\n act_embed_BTm11M, ((0, 0), (1, 0), (0, 0), (0, 0))\n )\n vid_embed_BTNp1M = jnp.concatenate(\n [padded_act_embed_BT1M, vid_embed_BTNM], axis=2\n )\n logits_BTNp1V = self.transformer(vid_embed_BTNp1M)\n logits_BTNV = logits_BTNp1V[:, :, :-1]\n return logits_BTNV, jnp.ones_like(video_tokens_BTN)\n",python,tab +235,1410355,"models/dynamics.py",4845,5,"Embed",python,selection_command +236,1425997,"genie.py",0,0,"",python,tab +237,1432003,"genie.py",3126,0,"",python,selection_mouse +238,1432939,"genie.py",3113,0,"",python,selection_mouse +239,1433091,"genie.py",3112,4,"self",python,selection_mouse +240,1433325,"genie.py",3112,8,"self.lam",python,selection_mouse +241,1433373,"genie.py",3120,0,": None",python,content +242,1434164,"genie.py",3099,0,"",python,selection_mouse +243,1435228,"genie.py",3133,0,"",python,selection_mouse +244,1436020,"genie.py",3120,6,"",python,content +245,1436585,"genie.py",3127,0,"",python,selection_mouse +246,1437036,"genie.py",3127,0,"\n ",python,content +247,1437795,"genie.py",3140,0,"s",python,content +248,1437796,"genie.py",3141,0,"",python,selection_keyboard +249,1438030,"genie.py",3141,0,"e",python,content +250,1438032,"genie.py",3142,0,"",python,selection_keyboard +251,1438106,"genie.py",3142,0,"l",python,content +252,1438107,"genie.py",3143,0,"",python,selection_keyboard +253,1438217,"genie.py",3143,0,"d",python,content +254,1438219,"genie.py",3144,0,"",python,selection_keyboard +255,1438321,"genie.py",3144,0,".",python,content +256,1438323,"genie.py",3145,0,"",python,selection_keyboard +257,1438826,"genie.py",3145,0,"a",python,content +258,1438828,"genie.py",3146,0,"",python,selection_keyboard +259,1439056,"genie.py",3146,0,"c",python,content +260,1439058,"genie.py",3147,0,"",python,selection_keyboard +261,1439385,"genie.py",3146,1,"",python,content +262,1439482,"genie.py",3145,1,"",python,content +263,1439616,"genie.py",3144,1,"",python,content +264,1439742,"genie.py",3143,1,"",python,content +265,1439854,"genie.py",3143,0,"f",python,content +266,1439855,"genie.py",3144,0,"",python,selection_keyboard +267,1439957,"genie.py",3144,0,".",python,content +268,1439959,"genie.py",3145,0,"",python,selection_keyboard +269,1440192,"genie.py",3145,0,"a",python,content +270,1440194,"genie.py",3146,0,"",python,selection_keyboard +271,1440308,"genie.py",3146,0,"c",python,content +272,1440310,"genie.py",3147,0,"",python,selection_keyboard +273,1440504,"genie.py",3147,0,"t",python,content +274,1440506,"genie.py",3148,0,"",python,selection_keyboard +275,1440618,"genie.py",3148,0,"i",python,content +276,1440620,"genie.py",3149,0,"",python,selection_keyboard +277,1440695,"genie.py",3149,0,"o",python,content +278,1440697,"genie.py",3150,0,"",python,selection_keyboard +279,1440843,"genie.py",3150,0,"n",python,content +280,1440845,"genie.py",3151,0,"",python,selection_keyboard +281,1441167,"genie.py",3151,0,"_",python,content +282,1441169,"genie.py",3152,0,"",python,selection_keyboard +283,1441413,"genie.py",3152,0,"e",python,content +284,1441415,"genie.py",3153,0,"",python,selection_keyboard +285,1441674,"genie.py",3153,0,"m",python,content +286,1441676,"genie.py",3154,0,"",python,selection_keyboard +287,1441884,"genie.py",3154,0,"b",python,content +288,1441886,"genie.py",3155,0,"",python,selection_keyboard +289,1441893,"genie.py",3155,0,"e",python,content +290,1441894,"genie.py",3156,0,"",python,selection_keyboard +291,1441973,"genie.py",3156,0,"d",python,content +292,1441975,"genie.py",3157,0,"",python,selection_keyboard +293,1443987,"genie.py",3157,0," ",python,content +294,1443989,"genie.py",3158,0,"",python,selection_keyboard +295,1444169,"genie.py",3158,0,"=",python,content +296,1444170,"genie.py",3159,0,"",python,selection_keyboard +297,1444293,"genie.py",3159,0," ",python,content +298,1444294,"genie.py",3160,0,"",python,selection_keyboard +299,1444779,"genie.py",3160,0,"n",python,content +300,1444781,"genie.py",3161,0,"",python,selection_keyboard +301,1444901,"genie.py",3161,0,"n",python,content +302,1444903,"genie.py",3162,0,"",python,selection_keyboard +303,1445038,"genie.py",3162,0,"x",python,content +304,1445039,"genie.py",3163,0,"",python,selection_keyboard +305,1445169,"genie.py",3163,0,".",python,content +306,1445171,"genie.py",3164,0,"",python,selection_keyboard +307,1452221,"genie.py",3164,0,"E",python,content +308,1452224,"genie.py",3165,0,"",python,selection_keyboard +309,1452455,"genie.py",3165,0,"m",python,content +310,1452456,"genie.py",3166,0,"",python,selection_keyboard +311,1453483,"genie.py",3164,2,"Embed",python,content +312,1454313,"genie.py",3169,0,"()",python,content +313,1454314,"genie.py",3170,0,"",python,selection_keyboard +314,1464149,"genie.py",3170,0,"s",python,content +315,1464151,"genie.py",3171,0,"",python,selection_keyboard +316,1464306,"genie.py",3171,0,"e",python,content +317,1464308,"genie.py",3172,0,"",python,selection_keyboard +318,1464374,"genie.py",3172,0,"l",python,content +319,1464375,"genie.py",3173,0,"",python,selection_keyboard +320,1464480,"genie.py",3173,0,"f",python,content +321,1464482,"genie.py",3174,0,"",python,selection_keyboard +322,1464595,"genie.py",3174,0,".",python,content +323,1464597,"genie.py",3175,0,"",python,selection_keyboard +324,1469038,"genie.py",3174,0,"",python,selection_command +325,1477621,"genie.py",5620,6,"one_ho",python,selection_command +326,1478892,"genie.py",5610,0,"",python,selection_mouse +327,1484735,"genie.py",3140,0,"",python,selection_command +328,1486028,"genie.py",3176,0,"",python,selection_command +329,1486476,"genie.py",3175,0,"",python,selection_command +330,1486812,"genie.py",3175,0,"l",python,content +331,1486813,"genie.py",3176,0,"",python,selection_keyboard +332,1486917,"genie.py",3176,0,"a",python,content +333,1486919,"genie.py",3177,0,"",python,selection_keyboard +334,1488112,"genie.py",3177,0,"t",python,content +335,1488113,"genie.py",3178,0,"",python,selection_keyboard +336,1489155,"genie.py",3175,3,"latent_action_dim",python,content +337,1490319,"genie.py",3192,0,",",python,content +338,1490321,"genie.py",3193,0,"",python,selection_keyboard +339,1490468,"genie.py",3193,0," ",python,content +340,1490468,"genie.py",3194,0,"",python,selection_keyboard +341,1492536,"genie.py",3193,0,"",python,selection_command +342,1494054,"genie.py",3140,0,"",python,selection_command +343,1495446,"genie.py",5610,0,"",python,selection_command +344,1512713,"genie.py",5637,0,"",python,selection_mouse +345,1513429,"genie.py",5642,0,"",python,selection_mouse +346,1515858,"genie.py",5597,0,"",python,selection_mouse +347,1515859,"genie.py",5596,0,"",python,selection_command +348,1516875,"genie.py",5632,0,"",python,selection_mouse +349,1517120,"genie.py",5632,4,"jax.",python,selection_mouse +350,1517154,"genie.py",5632,5,"jax.n",python,selection_mouse +351,1517155,"genie.py",5632,6,"jax.nn",python,selection_mouse +352,1517188,"genie.py",5632,7,"jax.nn.",python,selection_mouse +353,1517222,"genie.py",5632,8,"jax.nn.o",python,selection_mouse +354,1517223,"genie.py",5632,9,"jax.nn.on",python,selection_mouse +355,1517352,"genie.py",5632,10,"jax.nn.one",python,selection_mouse +356,1517434,"genie.py",5632,11,"jax.nn.one_",python,selection_mouse +357,1517508,"genie.py",5632,12,"jax.nn.one_h",python,selection_mouse +358,1517513,"genie.py",5632,13,"jax.nn.one_ho",python,selection_mouse +359,1517587,"genie.py",5632,14,"jax.nn.one_hot",python,selection_mouse +360,1518509,"genie.py",5646,0,"",python,selection_mouse +361,1519607,"genie.py",5634,0,"",python,selection_mouse +362,1520601,"genie.py",5633,0,"",python,selection_mouse +363,1521845,"genie.py",5632,0,"",python,selection_mouse +364,1551432,"genie.py",5962,0,"",python,selection_mouse +365,1551600,"genie.py",5959,10,"z_q_BTm11L",python,selection_mouse +366,1552264,"genie.py",5890,0,"",python,selection_mouse +367,1552462,"genie.py",5884,11,"lam_outputs",python,selection_mouse +368,1553990,"genie.py",6009,0,"",python,selection_mouse +369,1554178,"genie.py",6003,16,"action_indices_E",python,selection_mouse +370,1554938,"genie.py",5964,0,"",python,selection_mouse +371,1555115,"genie.py",5959,10,"z_q_BTm11L",python,selection_mouse +372,1558722,"genie.py",5977,0,"",python,selection_mouse +373,1564680,"genie.py",6259,0,"",python,selection_mouse +374,1564847,"genie.py",6257,4,"dict",python,selection_mouse +375,1570027,"genie.py",6479,0,"",python,selection_mouse +376,1574353,"genie.py",0,0,"",python,tab +377,1574354,"genie.py",3939,0,"",python,selection_mouse +378,1574662,"models/dynamics.py",0,0,"",python,tab +379,1574663,"models/dynamics.py",140,0,"",python,selection_command +380,1579079,"models/dynamics.py",2158,0,"",python,selection_mouse +381,1579778,"models/dynamics.py",2211,0,"",python,selection_mouse +382,1579931,"models/dynamics.py",2205,21,"latent_actions_BTm11L",python,selection_mouse +383,1583221,"models/dynamics.py",3175,0,"",python,selection_mouse +384,1583389,"models/dynamics.py",3169,9,"action_up",python,selection_mouse +385,1590542,"models/dynamics.py",2832,0,"",python,selection_mouse +386,1591506,"models/dynamics.py",2167,0,"",python,selection_mouse +387,1591680,"models/dynamics.py",2156,16,"video_tokens_BTN",python,selection_mouse +388,1592235,"models/dynamics.py",2270,0,"",python,selection_mouse +389,1592404,"models/dynamics.py",2261,14,"vid_embed_BTNM",python,selection_mouse +390,1592810,"models/dynamics.py",2216,0,"",python,selection_mouse +391,1592962,"models/dynamics.py",2205,21,"latent_actions_BTm11L",python,selection_mouse +392,1596130,"genie.py",0,0,"",python,tab +393,1600200,"genie.py",5350,0,"",python,selection_mouse +394,1600206,"genie.py",5349,0,"",python,selection_command +395,1602024,"genie.py",3140,0,"",python,selection_command +396,1602995,"genie.py",3195,0,"",python,selection_command +397,1603347,"genie.py",3194,0,"",python,selection_command +398,1605822,"genie.py",3194,0,"latent_action_dim",python,content +399,1606409,"genie.py",3210,0,"",python,selection_command +400,1607172,"genie.py",3194,0,"s",python,content +401,1607173,"genie.py",3195,0,"",python,selection_keyboard +402,1607340,"genie.py",3195,0,"e",python,content +403,1607342,"genie.py",3196,0,"",python,selection_keyboard +404,1607377,"genie.py",3196,0,"l",python,content +405,1607378,"genie.py",3197,0,"",python,selection_keyboard +406,1607537,"genie.py",3197,0,"f",python,content +407,1607539,"genie.py",3198,0,"",python,selection_keyboard +408,1607593,"genie.py",3198,0,".",python,content +409,1607594,"genie.py",3199,0,"",python,selection_keyboard +410,1608804,"genie.py",3216,0,",",python,content +411,1608806,"genie.py",3217,0,"",python,selection_keyboard +412,1610301,"genie.py",3216,1,"",python,content +413,1611421,"genie.py",3192,0,"",python,selection_mouse +414,1614689,"genie.py",3309,0,"",python,selection_mouse +415,1615627,"genie.py",3231,0,"",python,selection_mouse +416,1616303,"genie.py",3193,0,"",python,selection_mouse +417,1618325,"genie.py",3216,0,"",python,selection_mouse +418,1619037,"genie.py",3216,0,",",python,content +419,1619038,"genie.py",3217,0,"",python,selection_keyboard +420,1619098,"genie.py",3217,0," ",python,content +421,1619099,"genie.py",3218,0,"",python,selection_keyboard +422,1619343,"genie.py",3218,0,"r",python,content +423,1619344,"genie.py",3219,0,"",python,selection_keyboard +424,1619480,"genie.py",3219,0,"n",python,content +425,1619482,"genie.py",3220,0,"",python,selection_keyboard +426,1619561,"genie.py",3220,0,"g",python,content +427,1619563,"genie.py",3221,0,"",python,selection_keyboard +428,1623282,"genie.py",3221,0,"s",python,content +429,1623284,"genie.py",3222,0,"",python,selection_keyboard +430,1626551,"genie.py",3218,4,"rngs=",python,content +431,1627500,"genie.py",3223,0,"r",python,content +432,1627502,"genie.py",3224,0,"",python,selection_keyboard +433,1629128,"genie.py",3224,0,"n",python,content +434,1629130,"genie.py",3225,0,"",python,selection_keyboard +435,1629327,"genie.py",3225,0,"g",python,content +436,1629328,"genie.py",3226,0,"",python,selection_keyboard +437,1629481,"genie.py",3226,0,"s",python,content +438,1629483,"genie.py",3227,0,"",python,selection_keyboard +439,1631930,"genie.py",3242,0,"",python,selection_mouse +440,1633012,"genie.py",3194,0,"",python,selection_mouse +441,1633469,"genie.py",3194,0,"\n ",python,content +442,1636680,"genie.py",3231,0,"",python,selection_mouse +443,1637011,"genie.py",3231,0,"\n ",python,content +444,1638524,"genie.py",3207,0,"",python,selection_mouse +445,1638683,"genie.py",3207,39,"self.latent_action_dim, \n rn",python,selection_mouse +446,1638699,"genie.py",3207,41,"self.latent_action_dim, \n rngs",python,selection_mouse +447,1638719,"genie.py",3207,43,"self.latent_action_dim, \n rngs=r",python,selection_mouse +448,1638729,"genie.py",3207,45,"self.latent_action_dim, \n rngs=rng",python,selection_mouse +449,1638745,"genie.py",3207,46,"self.latent_action_dim, \n rngs=rngs",python,selection_mouse +450,1638786,"genie.py",3207,47,"self.latent_action_dim, \n rngs=rngs)",python,selection_mouse +451,1641605,"genie.py",3170,0,"",python,selection_mouse +452,1641916,"genie.py",3170,0,"\n ",python,content +453,1643818,"genie.py",3224,0,"",python,selection_mouse +454,1644758,"genie.py",3220,4,"",python,content +455,1644890,"genie.py",3216,4,"",python,content +456,1645228,"genie.py",3212,4,"",python,content +457,1645560,"genie.py",3211,1,"",python,content +458,1645824,"genie.py",3211,0,"\n ",python,content +459,1647107,"genie.py",3264,0,"",python,selection_mouse +460,1647867,"genie.py",3265,0,"",python,selection_command +461,1648111,"genie.py",3261,4,"",python,content +462,1648245,"genie.py",3257,4,"",python,content +463,1648539,"genie.py",3253,4,"",python,content +464,1648863,"genie.py",3252,1,"",python,content +465,1649185,"genie.py",3252,0,"\n ",python,content +466,1650055,"genie.py",3279,0,"",python,selection_mouse +467,1653856,"genie.py",3146,0,"",python,selection_mouse +468,1653972,"genie.py",3145,12,"action_embed",python,selection_mouse +469,1659927,"genie.py",3120,0,"",python,selection_mouse +470,1660569,"genie.py",3119,0,"",python,selection_command +471,1661385,"genie.py",3100,28,"",python,content +472,1661402,"genie.py",3112,0,"",python,selection_command +473,1661898,"genie.py",3155,0,"",python,selection_command +474,1662198,"genie.py",3196,0,"",python,selection_command +475,1662387,"genie.py",3237,0,"",python,selection_command +476,1662657,"genie.py",3251,0,"\n self.lam = None",python,content +477,1662657,"genie.py",3264,0,"",python,selection_command +478,1662995,"genie.py",3292,0,"",python,selection_command +479,1663191,"genie.py",3306,0,"",python,selection_command +480,1663671,"genie.py",3348,0,"",python,selection_command +481,1663673,"genie.py",3384,0,"",python,selection_command +482,1663711,"genie.py",3424,0,"",python,selection_command +483,1663744,"genie.py",3466,0,"",python,selection_command +484,1663777,"genie.py",3516,0,"",python,selection_command +485,1663812,"genie.py",3569,0,"",python,selection_command +486,1663845,"genie.py",3617,0,"",python,selection_command +487,1663876,"genie.py",3665,0,"",python,selection_command +488,1664030,"genie.py",3711,0,"",python,selection_command +489,1664304,"genie.py",3740,0,"",python,selection_command +490,1664425,"genie.py",3778,0,"",python,selection_command +491,1664591,"genie.py",3824,0,"",python,selection_command +492,1664740,"genie.py",3858,0,"",python,selection_command +493,1664888,"genie.py",3920,0,"",python,selection_command +494,1665052,"genie.py",3947,0,"",python,selection_command +495,1665295,"genie.py",3948,0,"\n self.lam = None",python,content +496,1665296,"genie.py",3961,0,"",python,selection_command +497,1665791,"genie.py",3961,1,"",python,content +498,1666029,"genie.py",3961,0,"e",python,content +499,1666031,"genie.py",3962,0,"",python,selection_keyboard +500,1666121,"genie.py",3962,0,"l",python,content +501,1666123,"genie.py",3963,0,"",python,selection_keyboard +502,1666233,"genie.py",3963,0,"f",python,content +503,1666234,"genie.py",3964,0,"",python,selection_keyboard +504,1666334,"genie.py",3964,0,".",python,content +505,1666336,"genie.py",3965,0,"",python,selection_keyboard +506,1666829,"genie.py",3964,1,"",python,content +507,1667341,"genie.py",3964,0,".",python,content +508,1667703,"genie.py",3961,4,"",python,content +509,1669637,"genie.py",3961,0,"s",python,content +510,1669639,"genie.py",3962,0,"",python,selection_keyboard +511,1671275,"genie.py",3966,3,"",python,content +512,1671553,"genie.py",3966,0,"a",python,content +513,1671554,"genie.py",3967,0,"",python,selection_keyboard +514,1671689,"genie.py",3967,0,"c",python,content +515,1671690,"genie.py",3968,0,"",python,selection_keyboard +516,1671898,"genie.py",3968,0,"t",python,content +517,1671900,"genie.py",3969,0,"",python,selection_keyboard +518,1672025,"genie.py",3969,0,"i",python,content +519,1672026,"genie.py",3970,0,"",python,selection_keyboard +520,1672091,"genie.py",3970,0,"o",python,content +521,1672092,"genie.py",3971,0,"",python,selection_keyboard +522,1672969,"genie.py",3971,0,"n",python,content +523,1672971,"genie.py",3972,0,"",python,selection_keyboard +524,1673230,"genie.py",3972,0,"_",python,content +525,1673231,"genie.py",3973,0,"",python,selection_keyboard +526,1673822,"genie.py",3966,7,"action_embed",python,content +527,1675658,"genie.py",3934,0,"",python,selection_mouse +528,1676227,"genie.py",3985,0,"",python,selection_mouse +529,1682957,"genie.py",3984,0,"",python,selection_command +530,1687910,"genie.py",3241,0,"",python,selection_command +531,1690742,"genie.py",5731,0,"",python,selection_command +532,1691828,"genie.py",5732,0,"",python,selection_command +533,1692326,"genie.py",5733,0,"",python,selection_command +534,1692344,"genie.py",5734,0,"",python,selection_command +535,1692384,"genie.py",5735,0,"",python,selection_command +536,1692417,"genie.py",5736,0,"",python,selection_command +537,1692456,"genie.py",5737,0,"",python,selection_command +538,1692496,"genie.py",5738,0,"",python,selection_command +539,1692530,"genie.py",5739,0,"",python,selection_command +540,1692531,"genie.py",5740,0,"",python,selection_command +541,1692564,"genie.py",5741,0,"",python,selection_command +542,1692598,"genie.py",5742,0,"",python,selection_command +543,1692632,"genie.py",5743,0,"",python,selection_command +544,1692665,"genie.py",5744,0,"",python,selection_command +545,1692701,"genie.py",5745,0,"",python,selection_command +546,1692705,"genie.py",5746,0,"",python,selection_command +547,1692741,"genie.py",5747,0,"",python,selection_command +548,1692775,"genie.py",5748,0,"",python,selection_command +549,1692809,"genie.py",5749,0,"",python,selection_command +550,1692839,"genie.py",5750,0,"",python,selection_command +551,1692874,"genie.py",5751,0,"",python,selection_command +552,1692908,"genie.py",5752,0,"",python,selection_command +553,1692939,"genie.py",5753,0,"",python,selection_command +554,1692976,"genie.py",5754,0,"",python,selection_command +555,1693005,"genie.py",5755,0,"",python,selection_command +556,1693038,"genie.py",5756,0,"",python,selection_command +557,1693086,"genie.py",5757,0,"",python,selection_command +558,1693088,"genie.py",5758,0,"",python,selection_command +559,1693098,"genie.py",5759,0,"",python,selection_command +560,1693406,"genie.py",5758,0,"",python,selection_command +561,1693561,"genie.py",5757,0,"",python,selection_command +562,1693716,"genie.py",5756,0,"",python,selection_command +563,1693870,"genie.py",5755,0,"",python,selection_command +564,1693994,"genie.py",5754,0,"",python,selection_command +565,1694180,"genie.py",5753,0,"",python,selection_command +566,1696266,"genie.py",5753,0,"\n ",python,content +567,1697513,"genie.py",5765,0,"",python,selection_command +568,1699419,"genie.py",5730,0,"",python,selection_command +569,1699745,"genie.py",5753,0,"",python,selection_command +570,1702465,"genie.py",5753,0,"s",python,content +571,1702466,"genie.py",5754,0,"",python,selection_keyboard +572,1702662,"genie.py",5754,0,"e",python,content +573,1702664,"genie.py",5755,0,"",python,selection_keyboard +574,1703348,"genie.py",5755,0,"f",python,content +575,1703350,"genie.py",5756,0,"",python,selection_keyboard +576,1703784,"genie.py",5755,1,"",python,content +577,1703964,"genie.py",5755,0,"l",python,content +578,1703966,"genie.py",5756,0,"",python,selection_keyboard +579,1704058,"genie.py",5756,0,"f",python,content +580,1704059,"genie.py",5757,0,"",python,selection_keyboard +581,1704196,"genie.py",5757,0,".",python,content +582,1704196,"genie.py",5758,0,"",python,selection_keyboard +583,1705108,"genie.py",5758,0,"a",python,content +584,1705109,"genie.py",5759,0,"",python,selection_keyboard +585,1705806,"genie.py",5758,1,"action_embed",python,content +586,1706531,"genie.py",5770,0,"()",python,content +587,1706532,"genie.py",5771,0,"",python,selection_keyboard +588,1709050,"genie.py",5771,0,"b",python,content +589,1709052,"genie.py",5772,0,"",python,selection_keyboard +590,1709153,"genie.py",5772,0,"a",python,content +591,1709155,"genie.py",5773,0,"",python,selection_keyboard +592,1709312,"genie.py",5773,0,"t",python,content +593,1709314,"genie.py",5774,0,"",python,selection_keyboard +594,1709473,"genie.py",5774,0,"c",python,content +595,1709475,"genie.py",5775,0,"",python,selection_keyboard +596,1709593,"genie.py",5775,0,"h",python,content +597,1709595,"genie.py",5776,0,"",python,selection_keyboard +598,1709931,"genie.py",5776,0,"_",python,content +599,1709933,"genie.py",5777,0,"",python,selection_keyboard +600,1710636,"genie.py",5777,0,"a",python,content +601,1710638,"genie.py",5778,0,"",python,selection_keyboard +602,1711022,"genie.py",5777,1,"",python,content +603,1711345,"genie.py",5777,0,"§",python,content +604,1711347,"genie.py",5778,0,"",python,selection_keyboard +605,1711653,"genie.py",5778,0,"a",python,content +606,1711654,"genie.py",5779,0,"",python,selection_keyboard +607,1711947,"genie.py",5778,1,"",python,content +608,1712084,"genie.py",5777,1,"",python,content +609,1712700,"genie.py",5777,0,"""",python,content +610,1712701,"genie.py",5778,0,"",python,selection_keyboard +611,1712989,"genie.py",5778,0,"a",python,content +612,1712991,"genie.py",5779,0,"",python,selection_keyboard +613,1713169,"genie.py",5779,0,"c",python,content +614,1713170,"genie.py",5780,0,"",python,selection_keyboard +615,1713359,"genie.py",5780,0,"t",python,content +616,1713361,"genie.py",5781,0,"",python,selection_keyboard +617,1713478,"genie.py",5781,0,"i",python,content +618,1713480,"genie.py",5782,0,"",python,selection_keyboard +619,1713535,"genie.py",5782,0,"o",python,content +620,1713536,"genie.py",5783,0,"",python,selection_keyboard +621,1713712,"genie.py",5783,0,"n",python,content +622,1713713,"genie.py",5784,0,"",python,selection_keyboard +623,1713828,"genie.py",5784,0,"s",python,content +624,1713830,"genie.py",5785,0,"",python,selection_keyboard +625,1714496,"genie.py",5785,0,"""",python,content +626,1714497,"genie.py",5786,0,"",python,selection_keyboard +627,1715382,"genie.py",5786,0,"]",python,content +628,1715384,"genie.py",5787,0,"",python,selection_keyboard +629,1716381,"genie.py",5777,0,"",python,selection_command +630,1716618,"genie.py",5776,1,"",python,content +631,1717362,"genie.py",5776,0,"[]",python,content +632,1717363,"genie.py",5777,0,"",python,selection_keyboard +633,1718416,"genie.py",5777,1,"",python,content +634,1719688,"genie.py",5816,0,"",python,selection_mouse +635,1720097,"genie.py",5815,0,"",python,selection_command +636,1723141,"genie.py",5999,0,"",python,selection_mouse +637,1723824,"genie.py",5816,0,"",python,selection_mouse +638,1723830,"genie.py",5815,0,"",python,selection_command +639,1724774,"genie.py",5816,0,"",python,selection_mouse +640,1724810,"genie.py",5815,0,"",python,selection_command +641,1725254,"genie.py",5912,0,"",python,selection_mouse +642,1725411,"genie.py",5909,5,"batch",python,selection_mouse +643,1725899,"genie.py",5901,0,"",python,selection_mouse +644,1726079,"genie.py",5900,7,"reshape",python,selection_mouse +645,1726225,"genie.py",5900,8,"reshape(",python,selection_mouse +646,1726259,"genie.py",5900,9,"reshape(*",python,selection_mouse +647,1726260,"genie.py",5900,14,"reshape(*batch",python,selection_mouse +648,1726335,"genie.py",5900,15,"reshape(*batch[",python,selection_mouse +649,1727207,"genie.py",5899,0,"",python,selection_mouse +650,1729076,"genie.py",5898,1,"",python,content +651,1729321,"genie.py",5886,12,"",python,content +652,1729596,"genie.py",5885,1,"",python,content +653,1729996,"genie.py",5868,17,"",python,content +654,1730329,"genie.py",5867,1,"",python,content +655,1730493,"genie.py",5863,4,"",python,content +656,1730611,"genie.py",5862,1,"",python,content +657,1730765,"genie.py",5851,11,"",python,content +658,1730973,"genie.py",5847,4,"",python,content +659,1731166,"genie.py",5840,7,"",python,content +660,1731354,"genie.py",5838,2,"",python,content +661,1731522,"genie.py",5833,5,"",python,content +662,1731655,"genie.py",5817,16,"",python,content +663,1731883,"genie.py",5816,1,"",python,content +664,1732242,"genie.py",5815,1,"",python,content +665,1732466,"genie.py",5808,7,"",python,content +666,1732812,"genie.py",5807,1,"",python,content +667,1733193,"genie.py",5805,2,"",python,content +668,1733571,"genie.py",5804,1,"",python,content +669,1733894,"genie.py",5801,3,"",python,content +670,1735688,"genie.py",5797,4,"",python,content +671,1736085,"genie.py",5793,4,"",python,content +672,1736593,"genie.py",5789,4,"",python,content +673,1737529,"genie.py",5788,1,"",python,content +674,1742467,"genie.py",5958,0,"",python,selection_mouse +675,1744345,"genie.py",5765,0,"",python,selection_mouse +676,1745067,"genie.py",5671,0,"",python,selection_mouse +677,1745191,"genie.py",5667,14,"use_gt_actions",python,selection_mouse +678,1758997,"genie.py",5895,0,"",python,selection_mouse +679,1760055,"genie.py",5898,0,"",python,selection_mouse +680,1766597,"genie.py",9490,8,"one_hot(",python,selection_command +681,1768762,"genie.py",9581,0,"",python,selection_mouse +682,1768903,"genie.py",9580,1,")",python,selection_mouse +683,1768986,"genie.py",9512,69," batch[""actions""], num_classes=self.latent_action_dim\n )",python,selection_mouse +684,1769012,"genie.py",9514,67," batch[""actions""], num_classes=self.latent_action_dim\n )",python,selection_mouse +685,1769018,"genie.py",9515,66,"batch[""actions""], num_classes=self.latent_action_dim\n )",python,selection_mouse +686,1769043,"genie.py",9469,112,"ctions_BT1L = jax.nn.one_hot(\n batch[""actions""], num_classes=self.latent_action_dim\n )",python,selection_mouse +687,1769053,"genie.py",9473,108,"ns_BT1L = jax.nn.one_hot(\n batch[""actions""], num_classes=self.latent_action_dim\n )",python,selection_mouse +688,1769075,"genie.py",9477,104,"T1L = jax.nn.one_hot(\n batch[""actions""], num_classes=self.latent_action_dim\n )",python,selection_mouse +689,1769087,"genie.py",9478,103,"1L = jax.nn.one_hot(\n batch[""actions""], num_classes=self.latent_action_dim\n )",python,selection_mouse +690,1769110,"genie.py",9480,101," = jax.nn.one_hot(\n batch[""actions""], num_classes=self.latent_action_dim\n )",python,selection_mouse +691,1769312,"genie.py",9481,100,"= jax.nn.one_hot(\n batch[""actions""], num_classes=self.latent_action_dim\n )",python,selection_mouse +692,1769338,"genie.py",9448,133,"\n latent_actions_BT1L = jax.nn.one_hot(\n batch[""actions""], num_classes=self.latent_action_dim\n )",python,selection_mouse +693,1769451,"genie.py",9483,98,"jax.nn.one_hot(\n batch[""actions""], num_classes=self.latent_action_dim\n )",python,selection_mouse +694,1781694,"genie.py",7488,0,"",python,selection_mouse +695,1782122,"genie.py",7487,0,"",python,selection_command +696,1783222,"genie.py",3241,0,"",python,selection_command +697,1784244,"genie.py",5659,0,"",python,selection_command +698,1785964,"genie.py",5753,0,"",python,selection_mouse +699,1786149,"genie.py",5753,1,"s",python,selection_mouse +700,1786150,"genie.py",5753,3,"sel",python,selection_mouse +701,1786189,"genie.py",5753,5,"self.",python,selection_mouse +702,1786225,"genie.py",5753,9,"self.acti",python,selection_mouse +703,1786226,"genie.py",5753,12,"self.action_",python,selection_mouse +704,1786226,"genie.py",5753,14,"self.action_em",python,selection_mouse +705,1786261,"genie.py",5753,15,"self.action_emb",python,selection_mouse +706,1786296,"genie.py",5753,18,"self.action_embed(",python,selection_mouse +707,1786329,"genie.py",5753,19,"self.action_embed(b",python,selection_mouse +708,1786361,"genie.py",5753,20,"self.action_embed(ba",python,selection_mouse +709,1786393,"genie.py",5753,21,"self.action_embed(bat",python,selection_mouse +710,1786394,"genie.py",5753,22,"self.action_embed(batc",python,selection_mouse +711,1786707,"genie.py",5753,23,"self.action_embed(batch",python,selection_mouse +712,1786741,"genie.py",5718,35,"\n latent_actions_BT1L = ",python,selection_mouse +713,1787104,"genie.py",5753,39,"self.action_embed(batch[""actions""]).res",python,selection_mouse +714,1787185,"genie.py",5753,38,"self.action_embed(batch[""actions""]).re",python,selection_mouse +715,1787214,"genie.py",5753,37,"self.action_embed(batch[""actions""]).r",python,selection_mouse +716,1787293,"genie.py",5753,162,"self.action_embed(batch[""actions""]).reshape(*batch[""actions""].shape[:2], 1, self.latent_action_dim)\n latent_actions_BTm11L = latent_actions_BT1L[:, 1:]",python,selection_mouse +717,1787745,"genie.py",5753,36,"self.action_embed(batch[""actions""]).",python,selection_mouse +718,1787873,"genie.py",5753,35,"self.action_embed(batch[""actions""])",python,selection_mouse +719,1795968,"genie.py",0,0,"from typing import Dict\n\nimport einops\nimport jax\nimport jax.numpy as jnp\nimport flax.nnx as nnx\nimport orbax.checkpoint as ocp\n\nfrom models.dynamics import DynamicsMaskGIT, DynamicsCausal\nfrom models.lam import LatentActionModel\nfrom models.tokenizer import TokenizerVQVAE\n\n\nclass Genie(nnx.Module):\n """"""Genie model""""""\n\n def __init__(\n self,\n in_dim: int,\n tokenizer_dim: int,\n tokenizer_ffn_dim: int,\n latent_patch_dim: int,\n num_patch_latents: int,\n patch_size: int,\n tokenizer_num_blocks: int,\n tokenizer_num_heads: int,\n lam_dim: int,\n lam_ffn_dim: int,\n latent_action_dim: int,\n num_latent_actions: int,\n lam_patch_size: int,\n lam_num_blocks: int,\n lam_num_heads: int,\n lam_co_train: bool,\n use_gt_actions: bool,\n dyna_type: str,\n dyna_dim: int,\n dyna_ffn_dim: int,\n dyna_num_blocks: int,\n dyna_num_heads: int,\n param_dtype: jnp.dtype,\n dtype: jnp.dtype,\n use_flash_attention: bool,\n decode: bool,\n rngs: nnx.Rngs,\n dropout: float = 0.0,\n mask_limit: float = 0.0,\n ):\n # --- Tokenizer ---\n self.in_dim = in_dim\n self.tokenizer_dim = tokenizer_dim\n self.tokenizer_ffn_dim = tokenizer_ffn_dim\n self.latent_patch_dim = latent_patch_dim\n self.num_patch_latents = num_patch_latents\n self.patch_size = patch_size\n self.tokenizer_num_blocks = tokenizer_num_blocks\n self.tokenizer_num_heads = tokenizer_num_heads\n # --- LAM ---\n self.lam_dim = lam_dim\n self.lam_ffn_dim = lam_ffn_dim\n self.latent_action_dim = latent_action_dim\n self.num_latent_actions = num_latent_actions\n self.lam_patch_size = lam_patch_size\n self.lam_num_blocks = lam_num_blocks\n self.lam_num_heads = lam_num_heads\n self.lam_co_train = lam_co_train\n self.use_gt_actions = use_gt_actions\n # --- Dynamics ---\n self.dyna_type = dyna_type\n self.dyna_dim = dyna_dim\n self.dyna_ffn_dim = dyna_ffn_dim\n self.dyna_num_blocks = dyna_num_blocks\n self.dyna_num_heads = dyna_num_heads\n self.param_dtype = param_dtype\n self.dtype = dtype\n self.use_flash_attention = use_flash_attention\n self.dropout = dropout\n self.mask_limit = mask_limit\n self.decode = decode\n\n self.tokenizer = TokenizerVQVAE(\n in_dim=self.in_dim,\n model_dim=self.tokenizer_dim,\n ffn_dim=self.tokenizer_ffn_dim,\n latent_dim=self.latent_patch_dim,\n num_latents=self.num_patch_latents,\n patch_size=self.patch_size,\n num_blocks=self.tokenizer_num_blocks,\n num_heads=self.tokenizer_num_heads,\n dropout=0.0,\n codebook_dropout=0.0,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n use_flash_attention=self.use_flash_attention,\n rngs=rngs,\n )\n if self.use_gt_actions:\n self.action_embed = nnx.Embed(\n self.latent_action_dim, \n self.latent_action_dim, \n rngs=rngs)\n self.lam = None\n else:\n self.lam = LatentActionModel(\n in_dim=self.in_dim,\n model_dim=self.lam_dim,\n ffn_dim=self.lam_ffn_dim,\n latent_dim=self.latent_patch_dim,\n num_latents=self.num_latent_actions,\n patch_size=self.lam_patch_size,\n num_blocks=self.lam_num_blocks,\n num_heads=self.lam_num_heads,\n dropout=0.0,\n codebook_dropout=0.0,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n use_flash_attention=self.use_flash_attention,\n rngs=rngs,\n )\n self.action_embed = None\n if self.dyna_type == ""maskgit"":\n self.dynamics = DynamicsMaskGIT(\n model_dim=self.dyna_dim,\n ffn_dim=self.dyna_ffn_dim,\n num_latents=self.num_patch_latents,\n latent_action_dim=self.latent_action_dim,\n num_blocks=self.dyna_num_blocks,\n num_heads=self.dyna_num_heads,\n dropout=self.dropout,\n mask_limit=self.mask_limit,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n use_flash_attention=self.use_flash_attention,\n rngs=rngs,\n )\n elif self.dyna_type == ""causal"":\n self.dynamics = DynamicsCausal(\n model_dim=self.dyna_dim,\n ffn_dim=self.dyna_ffn_dim,\n num_latents=self.num_patch_latents,\n latent_action_dim=self.latent_action_dim,\n num_blocks=self.dyna_num_blocks,\n num_heads=self.dyna_num_heads,\n dropout=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n use_flash_attention=self.use_flash_attention,\n decode=decode,\n rngs=rngs,\n )\n else:\n raise ValueError(f""Invalid dynamics type: {self.dyna_type}"")\n\n def __call__(\n self,\n batch: Dict[str, jax.Array],\n training: bool = True,\n ) -> Dict[str, jax.Array]:\n videos_BTHWC = batch[""videos""]\n tokenizer_outputs = self.tokenizer.vq_encode(videos_BTHWC, training=False)\n token_indices_BTN = tokenizer_outputs[""indices""]\n if self.use_gt_actions:\n action_indices_E = None\n latent_actions_BT1L = self.action_embed(batch[""actions""]).reshape(*batch[""actions""].shape[:2], 1, self.latent_action_dim)\n latent_actions_BTm11L = latent_actions_BT1L[:, 1:]\n else:\n lam_outputs = self.lam.vq_encode(videos_BTHWC, training=False)\n z_q_BTm11L = lam_outputs[""z_q""]\n action_indices_E = lam_outputs[""indices""]\n latent_actions_BTm11L = jax.lax.cond(\n self.lam_co_train,\n lambda: z_q_BTm11L,\n lambda: jax.lax.stop_gradient(z_q_BTm11L),\n )\n outputs = dict(\n video_tokens=jax.lax.stop_gradient(token_indices_BTN),\n latent_actions=latent_actions_BTm11L,\n )\n outputs[""mask_rng""] = batch[""rng""]\n dyna_logits_BTNV, dyna_mask = self.dynamics(outputs, training)\n outputs[""token_logits""] = dyna_logits_BTNV\n outputs[""mask""] = dyna_mask\n mle_indices_BTN = jnp.argmax(outputs[""token_logits""], axis=-1)\n H, W = batch[""videos""].shape[2:4]\n outputs[""recon""] = self.tokenizer.decode(mle_indices_BTN, (H, W))\n if action_indices_E is not None:\n outputs[""lam_indices""] = action_indices_E\n return outputs\n\n def sample(\n self,\n batch: Dict[str, jax.Array],\n seq_len: int,\n temperature: float = 1,\n sample_argmax: bool = False,\n maskgit_steps: int = 25,\n ) -> tuple[jax.Array, jax.Array]:\n if self.dyna_type == ""maskgit"":\n return self.sample_maskgit(\n batch, seq_len, maskgit_steps, temperature, sample_argmax\n )\n elif self.dyna_type == ""causal"":\n return self.sample_causal(batch, seq_len, temperature, sample_argmax)\n else:\n raise ValueError(f""Dynamics model type unknown: {self.dyna_type}"")\n\n def sample_maskgit(\n self,\n batch: Dict[str, jax.Array],\n seq_len: int,\n steps: int = 25,\n temperature: float = 1,\n sample_argmax: bool = False,\n ) -> tuple[jax.Array, jax.Array]:\n """"""\n Autoregressively samples up to `seq_len` future frames, following Figure 8 of the paper.\n\n - Input frames are tokenized once.\n - Future frames are generated autoregressively in token space.\n - All frames are detokenized in a single pass.\n\n Note:\n - For interactive or step-wise sampling, detokenization should occur after each action.\n - To maintain consistent tensor shapes across timesteps, all current and future frames are decoded at every step.\n - Temporal causal structure is preserved by\n a) reapplying the mask before each decoding step.\n b) a temporal causal mask is applied within each ST-transformer block.\n\n Dimension keys:\n B: batch size\n T: number of input (conditioning) frames\n N: number of patches per frame\n M: model dimension\n S: sequence length\n H: height\n W: width\n E: B * (S - 1)\n P: S * N\n """"""\n assert isinstance(self.dynamics, DynamicsMaskGIT)\n # --- Encode videos and actions ---\n videos_BTHWC = batch[""videos""]\n tokenizer_out = self.tokenizer.vq_encode(videos_BTHWC, training=False)\n token_idxs_BTN = tokenizer_out[""indices""]\n B, T, N = token_idxs_BTN.shape\n pad_shape = (B, seq_len - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs_BTN.dtype)\n token_idxs_BSN = jnp.concatenate([token_idxs_BTN, pad], axis=1)\n init_logits_BSNV = jnp.zeros(\n shape=(*token_idxs_BSN.shape, self.num_patch_latents)\n )\n if self.use_gt_actions:\n latent_actions_BT1L = jax.nn.one_hot(\n batch[""actions""], num_classes=self.latent_action_dim\n ).reshape(*batch[""actions""].shape[:2], 1, self.latent_action_dim)\n latent_actions_BTm11L = latent_actions_BT1L[:, 1:]\n action_tokens_EL = latent_actions_BTm11L.reshape(-1, self.latent_action_dim)\n else:\n latent_actions_E = batch[""latent_actions""]\n action_tokens_EL = self.lam.vq.get_codes(latent_actions_E)\n\n # --- Extract submodule state ---\n dynamics_state = nnx.state(self.dynamics)\n\n @nnx.scan(in_axes=(nnx.Carry, 0), out_axes=nnx.Carry)\n def maskgit_step_fn(\n carry: tuple[jax.Array, jax.Array, jax.Array, jax.Array, jax.Array],\n step: jax.Array,\n ) -> tuple[jax.Array, jax.Array, jax.Array, jax.Array, jax.Array]:\n rng, token_idxs_BSN, logits_BSNV, mask_BSN, action_tokens_EL = carry\n S, N = token_idxs_BSN.shape[1:]\n L = action_tokens_EL.shape[-1]\n\n # We need to reconstruct the submodule inside scan body to prevent trace context mismatches\n dynamics_maskgit = DynamicsMaskGIT(\n model_dim=self.dyna_dim,\n ffn_dim=self.dyna_ffn_dim,\n num_latents=self.num_patch_latents,\n latent_action_dim=self.latent_action_dim,\n num_blocks=self.dyna_num_blocks,\n num_heads=self.dyna_num_heads,\n dropout=self.dropout,\n mask_limit=self.mask_limit,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n use_flash_attention=self.use_flash_attention,\n rngs=nnx.Rngs(0),\n )\n nnx.update(dynamics_maskgit, dynamics_state)\n\n # --- Construct + encode video ---\n vid_embed_BSNM = dynamics_maskgit.patch_embed(token_idxs_BSN)\n mask_token_111M = dynamics_maskgit.mask_token.value\n mask_expanded_BSN1 = mask_BSN[..., None]\n vid_embed_BSNM = jnp.where(\n mask_expanded_BSN1, mask_token_111M, vid_embed_BSNM\n )\n\n # --- Predict transition ---\n action_tokens_BSm1L = jnp.reshape(action_tokens_EL, (B, S - 1, L))\n act_embed_BSm1M = dynamics_maskgit.action_up(action_tokens_BSm1L)\n act_embed_BSM = jnp.pad(act_embed_BSm1M, ((0, 0), (1, 0), (0, 0)))\n act_embed_BS1M = jnp.reshape(\n act_embed_BSM, (B, S, 1, act_embed_BSM.shape[-1])\n )\n vid_embed_BSNM += act_embed_BS1M\n unmasked_ratio = jnp.cos(jnp.pi * (step + 1) / (steps * 2))\n step_temp = temperature * (1.0 - unmasked_ratio)\n final_logits_BSNV = dynamics_maskgit.transformer(vid_embed_BSNM) / step_temp\n\n # --- Sample new tokens for final frame ---\n if sample_argmax:\n sampled_token_idxs_BSN = jnp.argmax(final_logits_BSNV, axis=-1)\n else:\n rng, _rng = jax.random.split(rng)\n sampled_token_idxs_BSN = jax.random.categorical(_rng, final_logits_BSNV)\n gather_fn = jax.vmap(jax.vmap(jax.vmap(lambda x, y: x[y])))\n final_token_probs_BSN = gather_fn(\n jax.nn.softmax(final_logits_BSNV), sampled_token_idxs_BSN\n )\n final_token_probs_BSN += ~mask_BSN\n # Update masked tokens and logits only\n token_idxs_BSN = jnp.where(mask_BSN, sampled_token_idxs_BSN, token_idxs_BSN)\n logits_BSNV = jnp.where(\n jnp.expand_dims(mask_BSN, -1), final_logits_BSNV, logits_BSNV\n )\n\n # --- Update mask ---\n num_unmasked_tokens = jnp.round(N * (1.0 - unmasked_ratio)).astype(int)\n final_token_probs_flat_BP = einops.rearrange(\n final_token_probs_BSN, ""b s n -> b (s n)""\n )\n idx_mask_P = (\n jnp.arange(final_token_probs_flat_BP.shape[-1])\n <= N - num_unmasked_tokens\n )\n sorted_idxs_BP = jnp.argsort(final_token_probs_flat_BP, axis=-1)\n mask_update_fn = jax.vmap(lambda msk, ids: msk.at[ids].set(idx_mask_P))\n mask_flat_BP = einops.rearrange(mask_BSN, ""b s n -> b (s n)"")\n new_mask_flat_BP = mask_update_fn(mask_flat_BP, sorted_idxs_BP)\n new_mask_BSN = einops.rearrange(new_mask_flat_BP, ""b (s n) -> b s n"", n=N)\n\n new_carry = (\n rng,\n token_idxs_BSN,\n logits_BSNV,\n new_mask_BSN,\n action_tokens_EL,\n )\n return new_carry\n\n @nnx.scan(in_axes=(nnx.Carry, 0), out_axes=nnx.Carry)\n def generation_step_fn(\n carry: tuple[jax.Array, jax.Array, jax.Array], step_t: jax.Array\n ) -> tuple[jax.Array, jax.Array, jax.Array]:\n rng, current_token_idxs_BSN, current_logits_BSNV = carry\n rng, step_rng = jax.random.split(rng)\n\n # Mask current frame (i.e., t == step_t)\n mask_S = jnp.arange(seq_len) == step_t\n mask_BSN = jnp.broadcast_to(mask_S[None, :, None], (B, seq_len, N)).astype(\n bool\n )\n masked_token_idxs_BSN = current_token_idxs_BSN * ~mask_BSN\n masked_logits_BSNV = current_logits_BSNV * jnp.expand_dims(~mask_BSN, -1)\n\n # --- Initialize and run MaskGIT loop ---\n init_carry_maskgit = (\n step_rng,\n masked_token_idxs_BSN,\n masked_logits_BSNV,\n mask_BSN,\n action_tokens_EL,\n )\n final_carry_maskgit = maskgit_step_fn(init_carry_maskgit, jnp.arange(steps))\n updated_token_idxs_BSN = final_carry_maskgit[1]\n updated_logits_BSNV = final_carry_maskgit[2]\n new_carry = (rng, updated_token_idxs_BSN, updated_logits_BSNV)\n return new_carry\n\n # --- Run the autoregressive generation using jax.lax.scan ---\n initial_carry = (batch[""rng""], token_idxs_BSN, init_logits_BSNV)\n timesteps_to_scan = jnp.arange(T, seq_len)\n final_carry = generation_step_fn(initial_carry, timesteps_to_scan)\n final_token_idxs_BSN = final_carry[1]\n final_logits_BSNV = final_carry[2]\n\n # --- Decode all tokens at once at the end ---\n H, W = batch[""videos""].shape[2:4]\n final_frames_BSHWC = self.tokenizer.decode(\n final_token_idxs_BSN,\n video_hw=(H, W),\n )\n return final_frames_BSHWC, final_logits_BSNV\n\n def sample_causal(\n self,\n batch: Dict[str, jax.Array],\n seq_len: int,\n temperature: float = 1,\n sample_argmax: bool = False,\n ) -> tuple[jax.Array, jax.Array]:\n """"""\n Autoregressively samples up to `seq_len` future frames, following Figure 8 of the paper.\n\n - Input frames are tokenized once.\n - Future frames are generated autoregressively in token space.\n - All frames are detokenized in a single pass.\n\n Note:\n - For interactive or step-wise sampling, detokenization should occur after each action.\n - To maintain consistent tensor shapes across timesteps, all current and future frames are decoded at every step.\n - Temporal causal structure is preserved by\n a) reapplying the mask before each decoding step.\n b) a temporal causal mask is applied within each ST-transformer block.\n\n Dimension keys:\n B: batch size\n T: number of input (conditioning) frames\n N: number of patches per frame\n M: model dimension\n S: sequence length\n H: height\n W: width\n E: B * (S - 1)\n """"""\n assert isinstance(self.dynamics, DynamicsCausal)\n # --- Encode videos and actions ---\n videos_BTHWC = batch[""videos""]\n tokenizer_out = self.tokenizer.vq_encode(videos_BTHWC, training=False)\n token_idxs_BTN = tokenizer_out[""indices""]\n B, T, N = token_idxs_BTN.shape\n pad_shape = (B, seq_len - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs_BTN.dtype)\n token_idxs_BSN = jnp.concatenate([token_idxs_BTN, pad], axis=1)\n logits_BSNV = jnp.zeros((*token_idxs_BSN.shape, self.num_patch_latents))\n dynamics_state = nnx.state(self.dynamics)\n\n if self.use_gt_actions:\n latent_actions_BT1L = jax.nn.one_hot(\n batch[""actions""], num_classes=self.latent_action_dim\n ).reshape(*batch[""actions""].shape[:2], 1, self.latent_action_dim)\n latent_actions_BTm11L = latent_actions_BT1L[:, 1:]\n action_tokens_EL = latent_actions_BTm11L.reshape(-1, self.latent_action_dim)\n else:\n latent_actions_E = batch[""latent_actions""]\n action_tokens_EL = self.lam.vq.get_codes(latent_actions_E)\n\n @nnx.scan(in_axes=(nnx.Carry, 0), out_axes=nnx.Carry)\n def causal_step_fn(\n carry: tuple[jax.Array, jax.Array, jax.Array, jax.Array, jax.Array],\n step_n: jax.Array,\n ) -> tuple[jax.Array, jax.Array, jax.Array, jax.Array, jax.Array]:\n rng, token_idxs_BSN, logits_BSNV, action_tokens_EL, step_t = carry\n S, N = token_idxs_BSN.shape[1:]\n L = action_tokens_EL.shape[-1]\n\n # We need to reconstruct the submodule inside scan body to prevent trace context mismatches\n dynamics_causal = DynamicsCausal(\n model_dim=self.dyna_dim,\n ffn_dim=self.dyna_ffn_dim,\n num_latents=self.num_patch_latents,\n latent_action_dim=self.latent_action_dim,\n num_blocks=self.dyna_num_blocks,\n num_heads=self.dyna_num_heads,\n dropout=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n use_flash_attention=self.use_flash_attention,\n decode=self.decode,\n rngs=nnx.Rngs(0),\n )\n nnx.update(dynamics_causal, dynamics_state)\n\n # --- Construct + encode video ---\n vid_embed_BSNM = dynamics_causal.patch_embed(token_idxs_BSN)\n\n # --- Predict transition ---\n action_tokens_BSm1L = jnp.reshape(action_tokens_EL, (B, S - 1, L))\n act_embed_BSm1M = dynamics_causal.action_up(action_tokens_BSm1L)\n act_embed_BSM = jnp.pad(act_embed_BSm1M, ((0, 0), (1, 0), (0, 0)))\n act_embed_BS1M = jnp.reshape(\n act_embed_BSM, (B, S, 1, act_embed_BSM.shape[-1])\n )\n vid_embed_BSNp1M = jnp.concatenate([act_embed_BS1M, vid_embed_BSNM], axis=2)\n final_logits_BTNp1V = (\n dynamics_causal.transformer(vid_embed_BSNp1M, (step_t, step_n))\n / temperature\n )\n final_logits_BV = final_logits_BTNp1V[:, step_t, step_n, :]\n\n # --- Sample new tokens for final frame ---\n if sample_argmax:\n sampled_token_idxs_B = jnp.argmax(final_logits_BV, axis=-1)\n else:\n rng, _rng = jax.random.split(rng)\n sampled_token_idxs_B = jax.random.categorical(_rng, final_logits_BV)\n # Update next tokens only\n token_idxs_BSN = token_idxs_BSN.at[:, step_t, step_n].set(\n sampled_token_idxs_B\n )\n logits_BSNV = logits_BSNV.at[:, step_t, step_n].set(final_logits_BV)\n\n new_carry = (rng, token_idxs_BSN, logits_BSNV, action_tokens_EL, step_t)\n return new_carry\n\n @nnx.scan(in_axes=(nnx.Carry, 0), out_axes=nnx.Carry)\n def generation_step_fn(\n carry: tuple[jax.Array, jax.Array, jax.Array], step_t: jax.Array\n ) -> tuple[jax.Array, jax.Array, jax.Array]:\n rng, current_token_idxs_BSN, current_logits_BSNV = carry\n rng, step_rng = jax.random.split(rng)\n\n # --- Initialize and run causal loop ---\n init_carry_causal = (\n step_rng,\n current_token_idxs_BSN,\n current_logits_BSNV,\n action_tokens_EL,\n step_t,\n )\n final_carry_causal = causal_step_fn(init_carry_causal, jnp.arange(N))\n updated_token_idxs_BSN = final_carry_causal[1]\n updated_logits_BSNV = final_carry_causal[2]\n new_carry = (rng, updated_token_idxs_BSN, updated_logits_BSNV)\n return new_carry\n\n # --- Run the autoregressive generation using jax.lax.scan ---\n initial_carry = (batch[""rng""], token_idxs_BSN, logits_BSNV)\n timesteps_to_scan = jnp.arange(T, seq_len)\n final_carry = generation_step_fn(initial_carry, timesteps_to_scan)\n final_token_idxs_BSN = final_carry[1]\n final_logits_BSNV = final_carry[2]\n\n # --- Decode all tokens at once at the end ---\n H, W = batch[""videos""].shape[2:4]\n final_frames_BSHWC = self.tokenizer.decode(\n final_token_idxs_BSN,\n video_hw=(H, W),\n )\n return final_frames_BSHWC, final_logits_BSNV\n\n def vq_encode(self, batch: Dict[str, jax.Array], training: bool) -> jax.Array:\n # --- Preprocess videos ---\n video_BTHWC = batch[""videos""]\n lam_output: Dict[str, jax.Array] = self.lam.vq_encode(\n video_BTHWC, training=training\n )\n lam_indices_E = lam_output[""indices""]\n return lam_indices_E\n\n\n# FIXME (f.srambical): add conversion script for old checkpoints\ndef restore_genie_components(\n optimizer: nnx.Optimizer,\n sharding: jax.sharding.NamedSharding,\n rng: jax.Array,\n args,\n) -> nnx.Optimizer:\n """"""Restore pre-trained Genie components""""""\n rng_tokenizer, rng_lam = jax.random.split(rng)\n rngs_tokenizer = nnx.Rngs(rng_tokenizer)\n rngs_lam = nnx.Rngs(rng_lam)\n\n tx = optimizer.tx\n model = optimizer.model\n handler_registry = ocp.handlers.DefaultCheckpointHandlerRegistry()\n handler_registry.add(\n ""model_state"", ocp.args.PyTreeRestore, ocp.handlers.PyTreeCheckpointHandler\n )\n\n checkpoint_options = ocp.CheckpointManagerOptions(\n step_format_fixed_length=6,\n )\n tokenizer_checkpoint_manager = ocp.CheckpointManager(\n directory=args.tokenizer_checkpoint,\n options=checkpoint_options,\n handler_registry=handler_registry,\n )\n dummy_tokenizer = TokenizerVQVAE(\n in_dim=args.image_channels,\n model_dim=args.tokenizer_dim,\n ffn_dim=args.tokenizer_ffn_dim,\n latent_dim=args.latent_patch_dim,\n num_latents=args.num_patch_latents,\n patch_size=args.patch_size,\n num_blocks=args.tokenizer_num_blocks,\n num_heads=args.tokenizer_num_heads,\n dropout=args.dropout,\n codebook_dropout=args.dropout,\n param_dtype=args.param_dtype,\n dtype=args.dtype,\n use_flash_attention=args.use_flash_attention,\n rngs=rngs_tokenizer,\n )\n dummy_tokenizer_optimizer = nnx.Optimizer(dummy_tokenizer, tx)\n dummy_tokenizer_optimizer_state = nnx.state(dummy_tokenizer_optimizer)\n abstract_sharded_tokenizer_optimizer_state = _create_abstract_sharded_pytree(\n dummy_tokenizer_optimizer_state, sharding\n )\n restored_tokenizer = tokenizer_checkpoint_manager.restore(\n step=tokenizer_checkpoint_manager.latest_step(),\n args=ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore( # type: ignore\n abstract_sharded_tokenizer_optimizer_state # type: ignore\n ),\n ),\n )[""model_state""]\n nnx.update(dummy_tokenizer_optimizer.model, restored_tokenizer.model)\n model.tokenizer = dummy_tokenizer_optimizer.model\n tokenizer_checkpoint_manager.close()\n\n if args.lam_checkpoint:\n lam_checkpoint_manager = ocp.CheckpointManager(\n directory=args.lam_checkpoint,\n options=checkpoint_options,\n handler_registry=handler_registry,\n )\n dummy_lam = LatentActionModel(\n in_dim=args.image_channels,\n model_dim=args.lam_dim,\n ffn_dim=args.lam_ffn_dim,\n latent_dim=args.latent_patch_dim,\n num_latents=args.num_latent_actions,\n patch_size=args.lam_patch_size,\n num_blocks=args.lam_num_blocks,\n num_heads=args.lam_num_heads,\n dropout=args.dropout,\n codebook_dropout=args.dropout,\n param_dtype=args.param_dtype,\n dtype=args.dtype,\n use_flash_attention=args.use_flash_attention,\n rngs=rngs_lam,\n )\n dummy_lam_optimizer = nnx.Optimizer(dummy_lam, tx)\n dummy_lam_optimizer_state = nnx.state(dummy_lam_optimizer)\n abstract_sharded_lam_optimizer_state = _create_abstract_sharded_pytree(\n dummy_lam_optimizer_state, sharding\n )\n restored_lam_optimizer = lam_checkpoint_manager.restore(\n step=lam_checkpoint_manager.latest_step(),\n args=ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore( # type: ignore\n abstract_sharded_lam_optimizer_state # type: ignore\n ),\n ),\n )[""model_state""]\n nnx.update(dummy_lam_optimizer.model, restored_lam_optimizer.model)\n model.lam = dummy_lam_optimizer.model\n # Remove the LAM decoder to save memory and avoid unnecessary computation.\n del model.lam.decoder\n lam_checkpoint_manager.close()\n\n # Reinitialize the optimizer states\n optimizer = nnx.Optimizer(model, tx)\n return optimizer\n\n\ndef _create_abstract_sharded_pytree(\n pytree_template: nnx.GraphState, sharding_spec: jax.sharding.NamedSharding\n) -> jax.Array:\n """"""Replaces arrays in a pytree with ShapeDtypeStructs having the given sharding.""""""\n\n def map_fn(leaf_template):\n if hasattr(leaf_template, ""shape"") and hasattr(leaf_template, ""dtype""):\n return jax.ShapeDtypeStruct(\n leaf_template.shape, leaf_template.dtype, sharding=sharding_spec\n )\n return leaf_template\n\n return jax.tree_util.tree_map(map_fn, pytree_template)\n",python,tab +720,1795970,"genie.py",17959,98,"self.action_embed(batch[""actions""])",python,content +721,1795970,"genie.py",9483,98,"self.action_embed(batch[""actions""])",python,content +722,1796135,"genie.py",9449,0,"",python,selection_command +723,1800068,"genie.py",0,0,"",python,tab +724,1804192,"genie.py",0,0,"",python,tab +725,1804193,"genie.py",17959,98,"self.action_embed(batch[""actions""])",python,content +726,1804193,"genie.py",9483,98,"self.action_embed(batch[""actions""])",python,content +727,1804295,"genie.py",17896,35,"jax.nn.one_hot(\n batch[""actions""], num_classes=self.latent_action_dim\n )",python,content +728,1804295,"genie.py",9483,35,"jax.nn.one_hot(\n batch[""actions""], num_classes=self.latent_action_dim\n )",python,content +729,1804296,"genie.py",17959,98,"self.action_embed(batch[""actions""])",python,content +730,1804296,"genie.py",9483,98,"self.action_embed(batch[""actions""])",python,content +731,1811527,"genie.py",17896,35,"jax.nn.one_hot(\n batch[""actions""], num_classes=self.latent_action_dim\n )",python,content +732,1811528,"genie.py",9483,35,"jax.nn.one_hot(\n batch[""actions""], num_classes=self.latent_action_dim\n )",python,content +733,1811528,"genie.py",17959,98,"self.action_embed(batch[""actions""])",python,content +734,1811529,"genie.py",17959,35,"jax.nn.one_hot(\n batch[""actions""], num_classes=self.latent_action_dim\n )",python,content +735,1811529,"genie.py",17959,98,"self.action_embed(batch[""actions""])",python,content +736,1811603,"genie.py",0,27654,"from typing import Dict\n\nimport einops\nimport jax\nimport jax.numpy as jnp\nimport flax.nnx as nnx\nimport orbax.checkpoint as ocp\n\nfrom models.dynamics import DynamicsMaskGIT, DynamicsCausal\nfrom models.lam import LatentActionModel\nfrom models.tokenizer import TokenizerVQVAE\n\n\nclass Genie(nnx.Module):\n """"""Genie model""""""\n\n def __init__(\n self,\n in_dim: int,\n tokenizer_dim: int,\n tokenizer_ffn_dim: int,\n latent_patch_dim: int,\n num_patch_latents: int,\n patch_size: int,\n tokenizer_num_blocks: int,\n tokenizer_num_heads: int,\n lam_dim: int,\n lam_ffn_dim: int,\n latent_action_dim: int,\n num_latent_actions: int,\n lam_patch_size: int,\n lam_num_blocks: int,\n lam_num_heads: int,\n lam_co_train: bool,\n use_gt_actions: bool,\n dyna_type: str,\n dyna_dim: int,\n dyna_ffn_dim: int,\n dyna_num_blocks: int,\n dyna_num_heads: int,\n param_dtype: jnp.dtype,\n dtype: jnp.dtype,\n use_flash_attention: bool,\n decode: bool,\n rngs: nnx.Rngs,\n dropout: float = 0.0,\n mask_limit: float = 0.0,\n ):\n # --- Tokenizer ---\n self.in_dim = in_dim\n self.tokenizer_dim = tokenizer_dim\n self.tokenizer_ffn_dim = tokenizer_ffn_dim\n self.latent_patch_dim = latent_patch_dim\n self.num_patch_latents = num_patch_latents\n self.patch_size = patch_size\n self.tokenizer_num_blocks = tokenizer_num_blocks\n self.tokenizer_num_heads = tokenizer_num_heads\n # --- LAM ---\n self.lam_dim = lam_dim\n self.lam_ffn_dim = lam_ffn_dim\n self.latent_action_dim = latent_action_dim\n self.num_latent_actions = num_latent_actions\n self.lam_patch_size = lam_patch_size\n self.lam_num_blocks = lam_num_blocks\n self.lam_num_heads = lam_num_heads\n self.lam_co_train = lam_co_train\n self.use_gt_actions = use_gt_actions\n # --- Dynamics ---\n self.dyna_type = dyna_type\n self.dyna_dim = dyna_dim\n self.dyna_ffn_dim = dyna_ffn_dim\n self.dyna_num_blocks = dyna_num_blocks\n self.dyna_num_heads = dyna_num_heads\n self.param_dtype = param_dtype\n self.dtype = dtype\n self.use_flash_attention = use_flash_attention\n self.dropout = dropout\n self.mask_limit = mask_limit\n self.decode = decode\n\n self.tokenizer = TokenizerVQVAE(\n in_dim=self.in_dim,\n model_dim=self.tokenizer_dim,\n ffn_dim=self.tokenizer_ffn_dim,\n latent_dim=self.latent_patch_dim,\n num_latents=self.num_patch_latents,\n patch_size=self.patch_size,\n num_blocks=self.tokenizer_num_blocks,\n num_heads=self.tokenizer_num_heads,\n dropout=0.0,\n codebook_dropout=0.0,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n use_flash_attention=self.use_flash_attention,\n rngs=rngs,\n )\n if self.use_gt_actions:\n self.action_embed = nnx.Embed(\n self.latent_action_dim, \n self.latent_action_dim, \n rngs=rngs)\n self.lam = None\n else:\n self.lam = LatentActionModel(\n in_dim=self.in_dim,\n model_dim=self.lam_dim,\n ffn_dim=self.lam_ffn_dim,\n latent_dim=self.latent_patch_dim,\n num_latents=self.num_latent_actions,\n patch_size=self.lam_patch_size,\n num_blocks=self.lam_num_blocks,\n num_heads=self.lam_num_heads,\n dropout=0.0,\n codebook_dropout=0.0,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n use_flash_attention=self.use_flash_attention,\n rngs=rngs,\n )\n self.action_embed = None\n if self.dyna_type == ""maskgit"":\n self.dynamics = DynamicsMaskGIT(\n model_dim=self.dyna_dim,\n ffn_dim=self.dyna_ffn_dim,\n num_latents=self.num_patch_latents,\n latent_action_dim=self.latent_action_dim,\n num_blocks=self.dyna_num_blocks,\n num_heads=self.dyna_num_heads,\n dropout=self.dropout,\n mask_limit=self.mask_limit,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n use_flash_attention=self.use_flash_attention,\n rngs=rngs,\n )\n elif self.dyna_type == ""causal"":\n self.dynamics = DynamicsCausal(\n model_dim=self.dyna_dim,\n ffn_dim=self.dyna_ffn_dim,\n num_latents=self.num_patch_latents,\n latent_action_dim=self.latent_action_dim,\n num_blocks=self.dyna_num_blocks,\n num_heads=self.dyna_num_heads,\n dropout=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n use_flash_attention=self.use_flash_attention,\n decode=decode,\n rngs=rngs,\n )\n else:\n raise ValueError(f""Invalid dynamics type: {self.dyna_type}"")\n\n def __call__(\n self,\n batch: Dict[str, jax.Array],\n training: bool = True,\n ) -> Dict[str, jax.Array]:\n videos_BTHWC = batch[""videos""]\n tokenizer_outputs = self.tokenizer.vq_encode(videos_BTHWC, training=False)\n token_indices_BTN = tokenizer_outputs[""indices""]\n if self.use_gt_actions:\n action_indices_E = None\n latent_actions_BT1L = self.action_embed(batch[""actions""]).reshape(*batch[""actions""].shape[:2], 1, self.latent_action_dim)\n latent_actions_BTm11L = latent_actions_BT1L[:, 1:]\n else:\n lam_outputs = self.lam.vq_encode(videos_BTHWC, training=False)\n z_q_BTm11L = lam_outputs[""z_q""]\n action_indices_E = lam_outputs[""indices""]\n latent_actions_BTm11L = jax.lax.cond(\n self.lam_co_train,\n lambda: z_q_BTm11L,\n lambda: jax.lax.stop_gradient(z_q_BTm11L),\n )\n outputs = dict(\n video_tokens=jax.lax.stop_gradient(token_indices_BTN),\n latent_actions=latent_actions_BTm11L,\n )\n outputs[""mask_rng""] = batch[""rng""]\n dyna_logits_BTNV, dyna_mask = self.dynamics(outputs, training)\n outputs[""token_logits""] = dyna_logits_BTNV\n outputs[""mask""] = dyna_mask\n mle_indices_BTN = jnp.argmax(outputs[""token_logits""], axis=-1)\n H, W = batch[""videos""].shape[2:4]\n outputs[""recon""] = self.tokenizer.decode(mle_indices_BTN, (H, W))\n if action_indices_E is not None:\n outputs[""lam_indices""] = action_indices_E\n return outputs\n\n def sample(\n self,\n batch: Dict[str, jax.Array],\n seq_len: int,\n temperature: float = 1,\n sample_argmax: bool = False,\n maskgit_steps: int = 25,\n ) -> tuple[jax.Array, jax.Array]:\n if self.dyna_type == ""maskgit"":\n return self.sample_maskgit(\n batch, seq_len, maskgit_steps, temperature, sample_argmax\n )\n elif self.dyna_type == ""causal"":\n return self.sample_causal(batch, seq_len, temperature, sample_argmax)\n else:\n raise ValueError(f""Dynamics model type unknown: {self.dyna_type}"")\n\n def sample_maskgit(\n self,\n batch: Dict[str, jax.Array],\n seq_len: int,\n steps: int = 25,\n temperature: float = 1,\n sample_argmax: bool = False,\n ) -> tuple[jax.Array, jax.Array]:\n """"""\n Autoregressively samples up to `seq_len` future frames, following Figure 8 of the paper.\n\n - Input frames are tokenized once.\n - Future frames are generated autoregressively in token space.\n - All frames are detokenized in a single pass.\n\n Note:\n - For interactive or step-wise sampling, detokenization should occur after each action.\n - To maintain consistent tensor shapes across timesteps, all current and future frames are decoded at every step.\n - Temporal causal structure is preserved by\n a) reapplying the mask before each decoding step.\n b) a temporal causal mask is applied within each ST-transformer block.\n\n Dimension keys:\n B: batch size\n T: number of input (conditioning) frames\n N: number of patches per frame\n M: model dimension\n S: sequence length\n H: height\n W: width\n E: B * (S - 1)\n P: S * N\n """"""\n assert isinstance(self.dynamics, DynamicsMaskGIT)\n # --- Encode videos and actions ---\n videos_BTHWC = batch[""videos""]\n tokenizer_out = self.tokenizer.vq_encode(videos_BTHWC, training=False)\n token_idxs_BTN = tokenizer_out[""indices""]\n B, T, N = token_idxs_BTN.shape\n pad_shape = (B, seq_len - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs_BTN.dtype)\n token_idxs_BSN = jnp.concatenate([token_idxs_BTN, pad], axis=1)\n init_logits_BSNV = jnp.zeros(\n shape=(*token_idxs_BSN.shape, self.num_patch_latents)\n )\n if self.use_gt_actions:\n latent_actions_BT1L = self.action_embed(batch[""actions""]).reshape(*batch[""actions""].shape[:2], 1, self.latent_action_dim)\n latent_actions_BTm11L = latent_actions_BT1L[:, 1:]\n action_tokens_EL = latent_actions_BTm11L.reshape(-1, self.latent_action_dim)\n else:\n latent_actions_E = batch[""latent_actions""]\n action_tokens_EL = self.lam.vq.get_codes(latent_actions_E)\n\n # --- Extract submodule state ---\n dynamics_state = nnx.state(self.dynamics)\n\n @nnx.scan(in_axes=(nnx.Carry, 0), out_axes=nnx.Carry)\n def maskgit_step_fn(\n carry: tuple[jax.Array, jax.Array, jax.Array, jax.Array, jax.Array],\n step: jax.Array,\n ) -> tuple[jax.Array, jax.Array, jax.Array, jax.Array, jax.Array]:\n rng, token_idxs_BSN, logits_BSNV, mask_BSN, action_tokens_EL = carry\n S, N = token_idxs_BSN.shape[1:]\n L = action_tokens_EL.shape[-1]\n\n # We need to reconstruct the submodule inside scan body to prevent trace context mismatches\n dynamics_maskgit = DynamicsMaskGIT(\n model_dim=self.dyna_dim,\n ffn_dim=self.dyna_ffn_dim,\n num_latents=self.num_patch_latents,\n latent_action_dim=self.latent_action_dim,\n num_blocks=self.dyna_num_blocks,\n num_heads=self.dyna_num_heads,\n dropout=self.dropout,\n mask_limit=self.mask_limit,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n use_flash_attention=self.use_flash_attention,\n rngs=nnx.Rngs(0),\n )\n nnx.update(dynamics_maskgit, dynamics_state)\n\n # --- Construct + encode video ---\n vid_embed_BSNM = dynamics_maskgit.patch_embed(token_idxs_BSN)\n mask_token_111M = dynamics_maskgit.mask_token.value\n mask_expanded_BSN1 = mask_BSN[..., None]\n vid_embed_BSNM = jnp.where(\n mask_expanded_BSN1, mask_token_111M, vid_embed_BSNM\n )\n\n # --- Predict transition ---\n action_tokens_BSm1L = jnp.reshape(action_tokens_EL, (B, S - 1, L))\n act_embed_BSm1M = dynamics_maskgit.action_up(action_tokens_BSm1L)\n act_embed_BSM = jnp.pad(act_embed_BSm1M, ((0, 0), (1, 0), (0, 0)))\n act_embed_BS1M = jnp.reshape(\n act_embed_BSM, (B, S, 1, act_embed_BSM.shape[-1])\n )\n vid_embed_BSNM += act_embed_BS1M\n unmasked_ratio = jnp.cos(jnp.pi * (step + 1) / (steps * 2))\n step_temp = temperature * (1.0 - unmasked_ratio)\n final_logits_BSNV = dynamics_maskgit.transformer(vid_embed_BSNM) / step_temp\n\n # --- Sample new tokens for final frame ---\n if sample_argmax:\n sampled_token_idxs_BSN = jnp.argmax(final_logits_BSNV, axis=-1)\n else:\n rng, _rng = jax.random.split(rng)\n sampled_token_idxs_BSN = jax.random.categorical(_rng, final_logits_BSNV)\n gather_fn = jax.vmap(jax.vmap(jax.vmap(lambda x, y: x[y])))\n final_token_probs_BSN = gather_fn(\n jax.nn.softmax(final_logits_BSNV), sampled_token_idxs_BSN\n )\n final_token_probs_BSN += ~mask_BSN\n # Update masked tokens and logits only\n token_idxs_BSN = jnp.where(mask_BSN, sampled_token_idxs_BSN, token_idxs_BSN)\n logits_BSNV = jnp.where(\n jnp.expand_dims(mask_BSN, -1), final_logits_BSNV, logits_BSNV\n )\n\n # --- Update mask ---\n num_unmasked_tokens = jnp.round(N * (1.0 - unmasked_ratio)).astype(int)\n final_token_probs_flat_BP = einops.rearrange(\n final_token_probs_BSN, ""b s n -> b (s n)""\n )\n idx_mask_P = (\n jnp.arange(final_token_probs_flat_BP.shape[-1])\n <= N - num_unmasked_tokens\n )\n sorted_idxs_BP = jnp.argsort(final_token_probs_flat_BP, axis=-1)\n mask_update_fn = jax.vmap(lambda msk, ids: msk.at[ids].set(idx_mask_P))\n mask_flat_BP = einops.rearrange(mask_BSN, ""b s n -> b (s n)"")\n new_mask_flat_BP = mask_update_fn(mask_flat_BP, sorted_idxs_BP)\n new_mask_BSN = einops.rearrange(new_mask_flat_BP, ""b (s n) -> b s n"", n=N)\n\n new_carry = (\n rng,\n token_idxs_BSN,\n logits_BSNV,\n new_mask_BSN,\n action_tokens_EL,\n )\n return new_carry\n\n @nnx.scan(in_axes=(nnx.Carry, 0), out_axes=nnx.Carry)\n def generation_step_fn(\n carry: tuple[jax.Array, jax.Array, jax.Array], step_t: jax.Array\n ) -> tuple[jax.Array, jax.Array, jax.Array]:\n rng, current_token_idxs_BSN, current_logits_BSNV = carry\n rng, step_rng = jax.random.split(rng)\n\n # Mask current frame (i.e., t == step_t)\n mask_S = jnp.arange(seq_len) == step_t\n mask_BSN = jnp.broadcast_to(mask_S[None, :, None], (B, seq_len, N)).astype(\n bool\n )\n masked_token_idxs_BSN = current_token_idxs_BSN * ~mask_BSN\n masked_logits_BSNV = current_logits_BSNV * jnp.expand_dims(~mask_BSN, -1)\n\n # --- Initialize and run MaskGIT loop ---\n init_carry_maskgit = (\n step_rng,\n masked_token_idxs_BSN,\n masked_logits_BSNV,\n mask_BSN,\n action_tokens_EL,\n )\n final_carry_maskgit = maskgit_step_fn(init_carry_maskgit, jnp.arange(steps))\n updated_token_idxs_BSN = final_carry_maskgit[1]\n updated_logits_BSNV = final_carry_maskgit[2]\n new_carry = (rng, updated_token_idxs_BSN, updated_logits_BSNV)\n return new_carry\n\n # --- Run the autoregressive generation using jax.lax.scan ---\n initial_carry = (batch[""rng""], token_idxs_BSN, init_logits_BSNV)\n timesteps_to_scan = jnp.arange(T, seq_len)\n final_carry = generation_step_fn(initial_carry, timesteps_to_scan)\n final_token_idxs_BSN = final_carry[1]\n final_logits_BSNV = final_carry[2]\n\n # --- Decode all tokens at once at the end ---\n H, W = batch[""videos""].shape[2:4]\n final_frames_BSHWC = self.tokenizer.decode(\n final_token_idxs_BSN,\n video_hw=(H, W),\n )\n return final_frames_BSHWC, final_logits_BSNV\n\n def sample_causal(\n self,\n batch: Dict[str, jax.Array],\n seq_len: int,\n temperature: float = 1,\n sample_argmax: bool = False,\n ) -> tuple[jax.Array, jax.Array]:\n """"""\n Autoregressively samples up to `seq_len` future frames, following Figure 8 of the paper.\n\n - Input frames are tokenized once.\n - Future frames are generated autoregressively in token space.\n - All frames are detokenized in a single pass.\n\n Note:\n - For interactive or step-wise sampling, detokenization should occur after each action.\n - To maintain consistent tensor shapes across timesteps, all current and future frames are decoded at every step.\n - Temporal causal structure is preserved by\n a) reapplying the mask before each decoding step.\n b) a temporal causal mask is applied within each ST-transformer block.\n\n Dimension keys:\n B: batch size\n T: number of input (conditioning) frames\n N: number of patches per frame\n M: model dimension\n S: sequence length\n H: height\n W: width\n E: B * (S - 1)\n """"""\n assert isinstance(self.dynamics, DynamicsCausal)\n # --- Encode videos and actions ---\n videos_BTHWC = batch[""videos""]\n tokenizer_out = self.tokenizer.vq_encode(videos_BTHWC, training=False)\n token_idxs_BTN = tokenizer_out[""indices""]\n B, T, N = token_idxs_BTN.shape\n pad_shape = (B, seq_len - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs_BTN.dtype)\n token_idxs_BSN = jnp.concatenate([token_idxs_BTN, pad], axis=1)\n logits_BSNV = jnp.zeros((*token_idxs_BSN.shape, self.num_patch_latents))\n dynamics_state = nnx.state(self.dynamics)\n\n if self.use_gt_actions:\n latent_actions_BT1L = jax.nn.one_hot(\n batch[""actions""], num_classes=self.latent_action_dim\n ).reshape(*batch[""actions""].shape[:2], 1, self.latent_action_dim)\n latent_actions_BTm11L = latent_actions_BT1L[:, 1:]\n action_tokens_EL = latent_actions_BTm11L.reshape(-1, self.latent_action_dim)\n else:\n latent_actions_E = batch[""latent_actions""]\n action_tokens_EL = self.lam.vq.get_codes(latent_actions_E)\n\n @nnx.scan(in_axes=(nnx.Carry, 0), out_axes=nnx.Carry)\n def causal_step_fn(\n carry: tuple[jax.Array, jax.Array, jax.Array, jax.Array, jax.Array],\n step_n: jax.Array,\n ) -> tuple[jax.Array, jax.Array, jax.Array, jax.Array, jax.Array]:\n rng, token_idxs_BSN, logits_BSNV, action_tokens_EL, step_t = carry\n S, N = token_idxs_BSN.shape[1:]\n L = action_tokens_EL.shape[-1]\n\n # We need to reconstruct the submodule inside scan body to prevent trace context mismatches\n dynamics_causal = DynamicsCausal(\n model_dim=self.dyna_dim,\n ffn_dim=self.dyna_ffn_dim,\n num_latents=self.num_patch_latents,\n latent_action_dim=self.latent_action_dim,\n num_blocks=self.dyna_num_blocks,\n num_heads=self.dyna_num_heads,\n dropout=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n use_flash_attention=self.use_flash_attention,\n decode=self.decode,\n rngs=nnx.Rngs(0),\n )\n nnx.update(dynamics_causal, dynamics_state)\n\n # --- Construct + encode video ---\n vid_embed_BSNM = dynamics_causal.patch_embed(token_idxs_BSN)\n\n # --- Predict transition ---\n action_tokens_BSm1L = jnp.reshape(action_tokens_EL, (B, S - 1, L))\n act_embed_BSm1M = dynamics_causal.action_up(action_tokens_BSm1L)\n act_embed_BSM = jnp.pad(act_embed_BSm1M, ((0, 0), (1, 0), (0, 0)))\n act_embed_BS1M = jnp.reshape(\n act_embed_BSM, (B, S, 1, act_embed_BSM.shape[-1])\n )\n vid_embed_BSNp1M = jnp.concatenate([act_embed_BS1M, vid_embed_BSNM], axis=2)\n final_logits_BTNp1V = (\n dynamics_causal.transformer(vid_embed_BSNp1M, (step_t, step_n))\n / temperature\n )\n final_logits_BV = final_logits_BTNp1V[:, step_t, step_n, :]\n\n # --- Sample new tokens for final frame ---\n if sample_argmax:\n sampled_token_idxs_B = jnp.argmax(final_logits_BV, axis=-1)\n else:\n rng, _rng = jax.random.split(rng)\n sampled_token_idxs_B = jax.random.categorical(_rng, final_logits_BV)\n # Update next tokens only\n token_idxs_BSN = token_idxs_BSN.at[:, step_t, step_n].set(\n sampled_token_idxs_B\n )\n logits_BSNV = logits_BSNV.at[:, step_t, step_n].set(final_logits_BV)\n\n new_carry = (rng, token_idxs_BSN, logits_BSNV, action_tokens_EL, step_t)\n return new_carry\n\n @nnx.scan(in_axes=(nnx.Carry, 0), out_axes=nnx.Carry)\n def generation_step_fn(\n carry: tuple[jax.Array, jax.Array, jax.Array], step_t: jax.Array\n ) -> tuple[jax.Array, jax.Array, jax.Array]:\n rng, current_token_idxs_BSN, current_logits_BSNV = carry\n rng, step_rng = jax.random.split(rng)\n\n # --- Initialize and run causal loop ---\n init_carry_causal = (\n step_rng,\n current_token_idxs_BSN,\n current_logits_BSNV,\n action_tokens_EL,\n step_t,\n )\n final_carry_causal = causal_step_fn(init_carry_causal, jnp.arange(N))\n updated_token_idxs_BSN = final_carry_causal[1]\n updated_logits_BSNV = final_carry_causal[2]\n new_carry = (rng, updated_token_idxs_BSN, updated_logits_BSNV)\n return new_carry\n\n # --- Run the autoregressive generation using jax.lax.scan ---\n initial_carry = (batch[""rng""], token_idxs_BSN, logits_BSNV)\n timesteps_to_scan = jnp.arange(T, seq_len)\n final_carry = generation_step_fn(initial_carry, timesteps_to_scan)\n final_token_idxs_BSN = final_carry[1]\n final_logits_BSNV = final_carry[2]\n\n # --- Decode all tokens at once at the end ---\n H, W = batch[""videos""].shape[2:4]\n final_frames_BSHWC = self.tokenizer.decode(\n final_token_idxs_BSN,\n video_hw=(H, W),\n )\n return final_frames_BSHWC, final_logits_BSNV\n\n def vq_encode(self, batch: Dict[str, jax.Array], training: bool) -> jax.Array:\n # --- Preprocess videos ---\n video_BTHWC = batch[""videos""]\n lam_output: Dict[str, jax.Array] = self.lam.vq_encode(\n video_BTHWC, training=training\n )\n lam_indices_E = lam_output[""indices""]\n return lam_indices_E\n\n\n# FIXME (f.srambical): add conversion script for old checkpoints\ndef restore_genie_components(\n optimizer: nnx.Optimizer,\n sharding: jax.sharding.NamedSharding,\n rng: jax.Array,\n args,\n) -> nnx.Optimizer:\n """"""Restore pre-trained Genie components""""""\n rng_tokenizer, rng_lam = jax.random.split(rng)\n rngs_tokenizer = nnx.Rngs(rng_tokenizer)\n rngs_lam = nnx.Rngs(rng_lam)\n\n tx = optimizer.tx\n model = optimizer.model\n handler_registry = ocp.handlers.DefaultCheckpointHandlerRegistry()\n handler_registry.add(\n ""model_state"", ocp.args.PyTreeRestore, ocp.handlers.PyTreeCheckpointHandler\n )\n\n checkpoint_options = ocp.CheckpointManagerOptions(\n step_format_fixed_length=6,\n )\n tokenizer_checkpoint_manager = ocp.CheckpointManager(\n directory=args.tokenizer_checkpoint,\n options=checkpoint_options,\n handler_registry=handler_registry,\n )\n dummy_tokenizer = TokenizerVQVAE(\n in_dim=args.image_channels,\n model_dim=args.tokenizer_dim,\n ffn_dim=args.tokenizer_ffn_dim,\n latent_dim=args.latent_patch_dim,\n num_latents=args.num_patch_latents,\n patch_size=args.patch_size,\n num_blocks=args.tokenizer_num_blocks,\n num_heads=args.tokenizer_num_heads,\n dropout=args.dropout,\n codebook_dropout=args.dropout,\n param_dtype=args.param_dtype,\n dtype=args.dtype,\n use_flash_attention=args.use_flash_attention,\n rngs=rngs_tokenizer,\n )\n dummy_tokenizer_optimizer = nnx.Optimizer(dummy_tokenizer, tx)\n dummy_tokenizer_optimizer_state = nnx.state(dummy_tokenizer_optimizer)\n abstract_sharded_tokenizer_optimizer_state = _create_abstract_sharded_pytree(\n dummy_tokenizer_optimizer_state, sharding\n )\n restored_tokenizer = tokenizer_checkpoint_manager.restore(\n step=tokenizer_checkpoint_manager.latest_step(),\n args=ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore( # type: ignore\n abstract_sharded_tokenizer_optimizer_state # type: ignore\n ),\n ),\n )[""model_state""]\n nnx.update(dummy_tokenizer_optimizer.model, restored_tokenizer.model)\n model.tokenizer = dummy_tokenizer_optimizer.model\n tokenizer_checkpoint_manager.close()\n\n if args.lam_checkpoint:\n lam_checkpoint_manager = ocp.CheckpointManager(\n directory=args.lam_checkpoint,\n options=checkpoint_options,\n handler_registry=handler_registry,\n )\n dummy_lam = LatentActionModel(\n in_dim=args.image_channels,\n model_dim=args.lam_dim,\n ffn_dim=args.lam_ffn_dim,\n latent_dim=args.latent_patch_dim,\n num_latents=args.num_latent_actions,\n patch_size=args.lam_patch_size,\n num_blocks=args.lam_num_blocks,\n num_heads=args.lam_num_heads,\n dropout=args.dropout,\n codebook_dropout=args.dropout,\n param_dtype=args.param_dtype,\n dtype=args.dtype,\n use_flash_attention=args.use_flash_attention,\n rngs=rngs_lam,\n )\n dummy_lam_optimizer = nnx.Optimizer(dummy_lam, tx)\n dummy_lam_optimizer_state = nnx.state(dummy_lam_optimizer)\n abstract_sharded_lam_optimizer_state = _create_abstract_sharded_pytree(\n dummy_lam_optimizer_state, sharding\n )\n restored_lam_optimizer = lam_checkpoint_manager.restore(\n step=lam_checkpoint_manager.latest_step(),\n args=ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore( # type: ignore\n abstract_sharded_lam_optimizer_state # type: ignore\n ),\n ),\n )[""model_state""]\n nnx.update(dummy_lam_optimizer.model, restored_lam_optimizer.model)\n model.lam = dummy_lam_optimizer.model\n # Remove the LAM decoder to save memory and avoid unnecessary computation.\n del model.lam.decoder\n lam_checkpoint_manager.close()\n\n # Reinitialize the optimizer states\n optimizer = nnx.Optimizer(model, tx)\n return optimizer\n\n\ndef _create_abstract_sharded_pytree(\n pytree_template: nnx.GraphState, sharding_spec: jax.sharding.NamedSharding\n) -> jax.Array:\n """"""Replaces arrays in a pytree with ShapeDtypeStructs having the given sharding.""""""\n\n def map_fn(leaf_template):\n if hasattr(leaf_template, ""shape"") and hasattr(leaf_template, ""dtype""):\n return jax.ShapeDtypeStruct(\n leaf_template.shape, leaf_template.dtype, sharding=sharding_spec\n )\n return leaf_template\n\n return jax.tree_util.tree_map(map_fn, pytree_template)\n",python,content +737,1811604,"genie.py",17896,98,"self.action_embed(batch[""actions""])",python,content +738,1813207,"genie.py",0,27591,"from typing import Dict\n\nimport einops\nimport jax\nimport jax.numpy as jnp\nimport flax.nnx as nnx\nimport orbax.checkpoint as ocp\n\nfrom models.dynamics import DynamicsMaskGIT, DynamicsCausal\nfrom models.lam import LatentActionModel\nfrom models.tokenizer import TokenizerVQVAE\n\n\nclass Genie(nnx.Module):\n """"""Genie model""""""\n\n def __init__(\n self,\n in_dim: int,\n tokenizer_dim: int,\n tokenizer_ffn_dim: int,\n latent_patch_dim: int,\n num_patch_latents: int,\n patch_size: int,\n tokenizer_num_blocks: int,\n tokenizer_num_heads: int,\n lam_dim: int,\n lam_ffn_dim: int,\n latent_action_dim: int,\n num_latent_actions: int,\n lam_patch_size: int,\n lam_num_blocks: int,\n lam_num_heads: int,\n lam_co_train: bool,\n use_gt_actions: bool,\n dyna_type: str,\n dyna_dim: int,\n dyna_ffn_dim: int,\n dyna_num_blocks: int,\n dyna_num_heads: int,\n param_dtype: jnp.dtype,\n dtype: jnp.dtype,\n use_flash_attention: bool,\n decode: bool,\n rngs: nnx.Rngs,\n dropout: float = 0.0,\n mask_limit: float = 0.0,\n ):\n # --- Tokenizer ---\n self.in_dim = in_dim\n self.tokenizer_dim = tokenizer_dim\n self.tokenizer_ffn_dim = tokenizer_ffn_dim\n self.latent_patch_dim = latent_patch_dim\n self.num_patch_latents = num_patch_latents\n self.patch_size = patch_size\n self.tokenizer_num_blocks = tokenizer_num_blocks\n self.tokenizer_num_heads = tokenizer_num_heads\n # --- LAM ---\n self.lam_dim = lam_dim\n self.lam_ffn_dim = lam_ffn_dim\n self.latent_action_dim = latent_action_dim\n self.num_latent_actions = num_latent_actions\n self.lam_patch_size = lam_patch_size\n self.lam_num_blocks = lam_num_blocks\n self.lam_num_heads = lam_num_heads\n self.lam_co_train = lam_co_train\n self.use_gt_actions = use_gt_actions\n # --- Dynamics ---\n self.dyna_type = dyna_type\n self.dyna_dim = dyna_dim\n self.dyna_ffn_dim = dyna_ffn_dim\n self.dyna_num_blocks = dyna_num_blocks\n self.dyna_num_heads = dyna_num_heads\n self.param_dtype = param_dtype\n self.dtype = dtype\n self.use_flash_attention = use_flash_attention\n self.dropout = dropout\n self.mask_limit = mask_limit\n self.decode = decode\n\n self.tokenizer = TokenizerVQVAE(\n in_dim=self.in_dim,\n model_dim=self.tokenizer_dim,\n ffn_dim=self.tokenizer_ffn_dim,\n latent_dim=self.latent_patch_dim,\n num_latents=self.num_patch_latents,\n patch_size=self.patch_size,\n num_blocks=self.tokenizer_num_blocks,\n num_heads=self.tokenizer_num_heads,\n dropout=0.0,\n codebook_dropout=0.0,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n use_flash_attention=self.use_flash_attention,\n rngs=rngs,\n )\n if self.use_gt_actions:\n self.action_embed = nnx.Embed(\n self.latent_action_dim, \n self.latent_action_dim, \n rngs=rngs)\n self.lam = None\n else:\n self.lam = LatentActionModel(\n in_dim=self.in_dim,\n model_dim=self.lam_dim,\n ffn_dim=self.lam_ffn_dim,\n latent_dim=self.latent_patch_dim,\n num_latents=self.num_latent_actions,\n patch_size=self.lam_patch_size,\n num_blocks=self.lam_num_blocks,\n num_heads=self.lam_num_heads,\n dropout=0.0,\n codebook_dropout=0.0,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n use_flash_attention=self.use_flash_attention,\n rngs=rngs,\n )\n self.action_embed = None\n if self.dyna_type == ""maskgit"":\n self.dynamics = DynamicsMaskGIT(\n model_dim=self.dyna_dim,\n ffn_dim=self.dyna_ffn_dim,\n num_latents=self.num_patch_latents,\n latent_action_dim=self.latent_action_dim,\n num_blocks=self.dyna_num_blocks,\n num_heads=self.dyna_num_heads,\n dropout=self.dropout,\n mask_limit=self.mask_limit,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n use_flash_attention=self.use_flash_attention,\n rngs=rngs,\n )\n elif self.dyna_type == ""causal"":\n self.dynamics = DynamicsCausal(\n model_dim=self.dyna_dim,\n ffn_dim=self.dyna_ffn_dim,\n num_latents=self.num_patch_latents,\n latent_action_dim=self.latent_action_dim,\n num_blocks=self.dyna_num_blocks,\n num_heads=self.dyna_num_heads,\n dropout=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n use_flash_attention=self.use_flash_attention,\n decode=decode,\n rngs=rngs,\n )\n else:\n raise ValueError(f""Invalid dynamics type: {self.dyna_type}"")\n\n def __call__(\n self,\n batch: Dict[str, jax.Array],\n training: bool = True,\n ) -> Dict[str, jax.Array]:\n videos_BTHWC = batch[""videos""]\n tokenizer_outputs = self.tokenizer.vq_encode(videos_BTHWC, training=False)\n token_indices_BTN = tokenizer_outputs[""indices""]\n if self.use_gt_actions:\n action_indices_E = None\n latent_actions_BT1L = self.action_embed(batch[""actions""]).reshape(*batch[""actions""].shape[:2], 1, self.latent_action_dim)\n latent_actions_BTm11L = latent_actions_BT1L[:, 1:]\n else:\n lam_outputs = self.lam.vq_encode(videos_BTHWC, training=False)\n z_q_BTm11L = lam_outputs[""z_q""]\n action_indices_E = lam_outputs[""indices""]\n latent_actions_BTm11L = jax.lax.cond(\n self.lam_co_train,\n lambda: z_q_BTm11L,\n lambda: jax.lax.stop_gradient(z_q_BTm11L),\n )\n outputs = dict(\n video_tokens=jax.lax.stop_gradient(token_indices_BTN),\n latent_actions=latent_actions_BTm11L,\n )\n outputs[""mask_rng""] = batch[""rng""]\n dyna_logits_BTNV, dyna_mask = self.dynamics(outputs, training)\n outputs[""token_logits""] = dyna_logits_BTNV\n outputs[""mask""] = dyna_mask\n mle_indices_BTN = jnp.argmax(outputs[""token_logits""], axis=-1)\n H, W = batch[""videos""].shape[2:4]\n outputs[""recon""] = self.tokenizer.decode(mle_indices_BTN, (H, W))\n if action_indices_E is not None:\n outputs[""lam_indices""] = action_indices_E\n return outputs\n\n def sample(\n self,\n batch: Dict[str, jax.Array],\n seq_len: int,\n temperature: float = 1,\n sample_argmax: bool = False,\n maskgit_steps: int = 25,\n ) -> tuple[jax.Array, jax.Array]:\n if self.dyna_type == ""maskgit"":\n return self.sample_maskgit(\n batch, seq_len, maskgit_steps, temperature, sample_argmax\n )\n elif self.dyna_type == ""causal"":\n return self.sample_causal(batch, seq_len, temperature, sample_argmax)\n else:\n raise ValueError(f""Dynamics model type unknown: {self.dyna_type}"")\n\n def sample_maskgit(\n self,\n batch: Dict[str, jax.Array],\n seq_len: int,\n steps: int = 25,\n temperature: float = 1,\n sample_argmax: bool = False,\n ) -> tuple[jax.Array, jax.Array]:\n """"""\n Autoregressively samples up to `seq_len` future frames, following Figure 8 of the paper.\n\n - Input frames are tokenized once.\n - Future frames are generated autoregressively in token space.\n - All frames are detokenized in a single pass.\n\n Note:\n - For interactive or step-wise sampling, detokenization should occur after each action.\n - To maintain consistent tensor shapes across timesteps, all current and future frames are decoded at every step.\n - Temporal causal structure is preserved by\n a) reapplying the mask before each decoding step.\n b) a temporal causal mask is applied within each ST-transformer block.\n\n Dimension keys:\n B: batch size\n T: number of input (conditioning) frames\n N: number of patches per frame\n M: model dimension\n S: sequence length\n H: height\n W: width\n E: B * (S - 1)\n P: S * N\n """"""\n assert isinstance(self.dynamics, DynamicsMaskGIT)\n # --- Encode videos and actions ---\n videos_BTHWC = batch[""videos""]\n tokenizer_out = self.tokenizer.vq_encode(videos_BTHWC, training=False)\n token_idxs_BTN = tokenizer_out[""indices""]\n B, T, N = token_idxs_BTN.shape\n pad_shape = (B, seq_len - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs_BTN.dtype)\n token_idxs_BSN = jnp.concatenate([token_idxs_BTN, pad], axis=1)\n init_logits_BSNV = jnp.zeros(\n shape=(*token_idxs_BSN.shape, self.num_patch_latents)\n )\n if self.use_gt_actions:\n latent_actions_BT1L = self.action_embed(batch[""actions""]).reshape(*batch[""actions""].shape[:2], 1, self.latent_action_dim)\n latent_actions_BTm11L = latent_actions_BT1L[:, 1:]\n action_tokens_EL = latent_actions_BTm11L.reshape(-1, self.latent_action_dim)\n else:\n latent_actions_E = batch[""latent_actions""]\n action_tokens_EL = self.lam.vq.get_codes(latent_actions_E)\n\n # --- Extract submodule state ---\n dynamics_state = nnx.state(self.dynamics)\n\n @nnx.scan(in_axes=(nnx.Carry, 0), out_axes=nnx.Carry)\n def maskgit_step_fn(\n carry: tuple[jax.Array, jax.Array, jax.Array, jax.Array, jax.Array],\n step: jax.Array,\n ) -> tuple[jax.Array, jax.Array, jax.Array, jax.Array, jax.Array]:\n rng, token_idxs_BSN, logits_BSNV, mask_BSN, action_tokens_EL = carry\n S, N = token_idxs_BSN.shape[1:]\n L = action_tokens_EL.shape[-1]\n\n # We need to reconstruct the submodule inside scan body to prevent trace context mismatches\n dynamics_maskgit = DynamicsMaskGIT(\n model_dim=self.dyna_dim,\n ffn_dim=self.dyna_ffn_dim,\n num_latents=self.num_patch_latents,\n latent_action_dim=self.latent_action_dim,\n num_blocks=self.dyna_num_blocks,\n num_heads=self.dyna_num_heads,\n dropout=self.dropout,\n mask_limit=self.mask_limit,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n use_flash_attention=self.use_flash_attention,\n rngs=nnx.Rngs(0),\n )\n nnx.update(dynamics_maskgit, dynamics_state)\n\n # --- Construct + encode video ---\n vid_embed_BSNM = dynamics_maskgit.patch_embed(token_idxs_BSN)\n mask_token_111M = dynamics_maskgit.mask_token.value\n mask_expanded_BSN1 = mask_BSN[..., None]\n vid_embed_BSNM = jnp.where(\n mask_expanded_BSN1, mask_token_111M, vid_embed_BSNM\n )\n\n # --- Predict transition ---\n action_tokens_BSm1L = jnp.reshape(action_tokens_EL, (B, S - 1, L))\n act_embed_BSm1M = dynamics_maskgit.action_up(action_tokens_BSm1L)\n act_embed_BSM = jnp.pad(act_embed_BSm1M, ((0, 0), (1, 0), (0, 0)))\n act_embed_BS1M = jnp.reshape(\n act_embed_BSM, (B, S, 1, act_embed_BSM.shape[-1])\n )\n vid_embed_BSNM += act_embed_BS1M\n unmasked_ratio = jnp.cos(jnp.pi * (step + 1) / (steps * 2))\n step_temp = temperature * (1.0 - unmasked_ratio)\n final_logits_BSNV = dynamics_maskgit.transformer(vid_embed_BSNM) / step_temp\n\n # --- Sample new tokens for final frame ---\n if sample_argmax:\n sampled_token_idxs_BSN = jnp.argmax(final_logits_BSNV, axis=-1)\n else:\n rng, _rng = jax.random.split(rng)\n sampled_token_idxs_BSN = jax.random.categorical(_rng, final_logits_BSNV)\n gather_fn = jax.vmap(jax.vmap(jax.vmap(lambda x, y: x[y])))\n final_token_probs_BSN = gather_fn(\n jax.nn.softmax(final_logits_BSNV), sampled_token_idxs_BSN\n )\n final_token_probs_BSN += ~mask_BSN\n # Update masked tokens and logits only\n token_idxs_BSN = jnp.where(mask_BSN, sampled_token_idxs_BSN, token_idxs_BSN)\n logits_BSNV = jnp.where(\n jnp.expand_dims(mask_BSN, -1), final_logits_BSNV, logits_BSNV\n )\n\n # --- Update mask ---\n num_unmasked_tokens = jnp.round(N * (1.0 - unmasked_ratio)).astype(int)\n final_token_probs_flat_BP = einops.rearrange(\n final_token_probs_BSN, ""b s n -> b (s n)""\n )\n idx_mask_P = (\n jnp.arange(final_token_probs_flat_BP.shape[-1])\n <= N - num_unmasked_tokens\n )\n sorted_idxs_BP = jnp.argsort(final_token_probs_flat_BP, axis=-1)\n mask_update_fn = jax.vmap(lambda msk, ids: msk.at[ids].set(idx_mask_P))\n mask_flat_BP = einops.rearrange(mask_BSN, ""b s n -> b (s n)"")\n new_mask_flat_BP = mask_update_fn(mask_flat_BP, sorted_idxs_BP)\n new_mask_BSN = einops.rearrange(new_mask_flat_BP, ""b (s n) -> b s n"", n=N)\n\n new_carry = (\n rng,\n token_idxs_BSN,\n logits_BSNV,\n new_mask_BSN,\n action_tokens_EL,\n )\n return new_carry\n\n @nnx.scan(in_axes=(nnx.Carry, 0), out_axes=nnx.Carry)\n def generation_step_fn(\n carry: tuple[jax.Array, jax.Array, jax.Array], step_t: jax.Array\n ) -> tuple[jax.Array, jax.Array, jax.Array]:\n rng, current_token_idxs_BSN, current_logits_BSNV = carry\n rng, step_rng = jax.random.split(rng)\n\n # Mask current frame (i.e., t == step_t)\n mask_S = jnp.arange(seq_len) == step_t\n mask_BSN = jnp.broadcast_to(mask_S[None, :, None], (B, seq_len, N)).astype(\n bool\n )\n masked_token_idxs_BSN = current_token_idxs_BSN * ~mask_BSN\n masked_logits_BSNV = current_logits_BSNV * jnp.expand_dims(~mask_BSN, -1)\n\n # --- Initialize and run MaskGIT loop ---\n init_carry_maskgit = (\n step_rng,\n masked_token_idxs_BSN,\n masked_logits_BSNV,\n mask_BSN,\n action_tokens_EL,\n )\n final_carry_maskgit = maskgit_step_fn(init_carry_maskgit, jnp.arange(steps))\n updated_token_idxs_BSN = final_carry_maskgit[1]\n updated_logits_BSNV = final_carry_maskgit[2]\n new_carry = (rng, updated_token_idxs_BSN, updated_logits_BSNV)\n return new_carry\n\n # --- Run the autoregressive generation using jax.lax.scan ---\n initial_carry = (batch[""rng""], token_idxs_BSN, init_logits_BSNV)\n timesteps_to_scan = jnp.arange(T, seq_len)\n final_carry = generation_step_fn(initial_carry, timesteps_to_scan)\n final_token_idxs_BSN = final_carry[1]\n final_logits_BSNV = final_carry[2]\n\n # --- Decode all tokens at once at the end ---\n H, W = batch[""videos""].shape[2:4]\n final_frames_BSHWC = self.tokenizer.decode(\n final_token_idxs_BSN,\n video_hw=(H, W),\n )\n return final_frames_BSHWC, final_logits_BSNV\n\n def sample_causal(\n self,\n batch: Dict[str, jax.Array],\n seq_len: int,\n temperature: float = 1,\n sample_argmax: bool = False,\n ) -> tuple[jax.Array, jax.Array]:\n """"""\n Autoregressively samples up to `seq_len` future frames, following Figure 8 of the paper.\n\n - Input frames are tokenized once.\n - Future frames are generated autoregressively in token space.\n - All frames are detokenized in a single pass.\n\n Note:\n - For interactive or step-wise sampling, detokenization should occur after each action.\n - To maintain consistent tensor shapes across timesteps, all current and future frames are decoded at every step.\n - Temporal causal structure is preserved by\n a) reapplying the mask before each decoding step.\n b) a temporal causal mask is applied within each ST-transformer block.\n\n Dimension keys:\n B: batch size\n T: number of input (conditioning) frames\n N: number of patches per frame\n M: model dimension\n S: sequence length\n H: height\n W: width\n E: B * (S - 1)\n """"""\n assert isinstance(self.dynamics, DynamicsCausal)\n # --- Encode videos and actions ---\n videos_BTHWC = batch[""videos""]\n tokenizer_out = self.tokenizer.vq_encode(videos_BTHWC, training=False)\n token_idxs_BTN = tokenizer_out[""indices""]\n B, T, N = token_idxs_BTN.shape\n pad_shape = (B, seq_len - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs_BTN.dtype)\n token_idxs_BSN = jnp.concatenate([token_idxs_BTN, pad], axis=1)\n logits_BSNV = jnp.zeros((*token_idxs_BSN.shape, self.num_patch_latents))\n dynamics_state = nnx.state(self.dynamics)\n\n if self.use_gt_actions:\n latent_actions_BT1L = self.action_embed(batch[""actions""]).reshape(*batch[""actions""].shape[:2], 1, self.latent_action_dim)\n latent_actions_BTm11L = latent_actions_BT1L[:, 1:]\n action_tokens_EL = latent_actions_BTm11L.reshape(-1, self.latent_action_dim)\n else:\n latent_actions_E = batch[""latent_actions""]\n action_tokens_EL = self.lam.vq.get_codes(latent_actions_E)\n\n @nnx.scan(in_axes=(nnx.Carry, 0), out_axes=nnx.Carry)\n def causal_step_fn(\n carry: tuple[jax.Array, jax.Array, jax.Array, jax.Array, jax.Array],\n step_n: jax.Array,\n ) -> tuple[jax.Array, jax.Array, jax.Array, jax.Array, jax.Array]:\n rng, token_idxs_BSN, logits_BSNV, action_tokens_EL, step_t = carry\n S, N = token_idxs_BSN.shape[1:]\n L = action_tokens_EL.shape[-1]\n\n # We need to reconstruct the submodule inside scan body to prevent trace context mismatches\n dynamics_causal = DynamicsCausal(\n model_dim=self.dyna_dim,\n ffn_dim=self.dyna_ffn_dim,\n num_latents=self.num_patch_latents,\n latent_action_dim=self.latent_action_dim,\n num_blocks=self.dyna_num_blocks,\n num_heads=self.dyna_num_heads,\n dropout=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n use_flash_attention=self.use_flash_attention,\n decode=self.decode,\n rngs=nnx.Rngs(0),\n )\n nnx.update(dynamics_causal, dynamics_state)\n\n # --- Construct + encode video ---\n vid_embed_BSNM = dynamics_causal.patch_embed(token_idxs_BSN)\n\n # --- Predict transition ---\n action_tokens_BSm1L = jnp.reshape(action_tokens_EL, (B, S - 1, L))\n act_embed_BSm1M = dynamics_causal.action_up(action_tokens_BSm1L)\n act_embed_BSM = jnp.pad(act_embed_BSm1M, ((0, 0), (1, 0), (0, 0)))\n act_embed_BS1M = jnp.reshape(\n act_embed_BSM, (B, S, 1, act_embed_BSM.shape[-1])\n )\n vid_embed_BSNp1M = jnp.concatenate([act_embed_BS1M, vid_embed_BSNM], axis=2)\n final_logits_BTNp1V = (\n dynamics_causal.transformer(vid_embed_BSNp1M, (step_t, step_n))\n / temperature\n )\n final_logits_BV = final_logits_BTNp1V[:, step_t, step_n, :]\n\n # --- Sample new tokens for final frame ---\n if sample_argmax:\n sampled_token_idxs_B = jnp.argmax(final_logits_BV, axis=-1)\n else:\n rng, _rng = jax.random.split(rng)\n sampled_token_idxs_B = jax.random.categorical(_rng, final_logits_BV)\n # Update next tokens only\n token_idxs_BSN = token_idxs_BSN.at[:, step_t, step_n].set(\n sampled_token_idxs_B\n )\n logits_BSNV = logits_BSNV.at[:, step_t, step_n].set(final_logits_BV)\n\n new_carry = (rng, token_idxs_BSN, logits_BSNV, action_tokens_EL, step_t)\n return new_carry\n\n @nnx.scan(in_axes=(nnx.Carry, 0), out_axes=nnx.Carry)\n def generation_step_fn(\n carry: tuple[jax.Array, jax.Array, jax.Array], step_t: jax.Array\n ) -> tuple[jax.Array, jax.Array, jax.Array]:\n rng, current_token_idxs_BSN, current_logits_BSNV = carry\n rng, step_rng = jax.random.split(rng)\n\n # --- Initialize and run causal loop ---\n init_carry_causal = (\n step_rng,\n current_token_idxs_BSN,\n current_logits_BSNV,\n action_tokens_EL,\n step_t,\n )\n final_carry_causal = causal_step_fn(init_carry_causal, jnp.arange(N))\n updated_token_idxs_BSN = final_carry_causal[1]\n updated_logits_BSNV = final_carry_causal[2]\n new_carry = (rng, updated_token_idxs_BSN, updated_logits_BSNV)\n return new_carry\n\n # --- Run the autoregressive generation using jax.lax.scan ---\n initial_carry = (batch[""rng""], token_idxs_BSN, logits_BSNV)\n timesteps_to_scan = jnp.arange(T, seq_len)\n final_carry = generation_step_fn(initial_carry, timesteps_to_scan)\n final_token_idxs_BSN = final_carry[1]\n final_logits_BSNV = final_carry[2]\n\n # --- Decode all tokens at once at the end ---\n H, W = batch[""videos""].shape[2:4]\n final_frames_BSHWC = self.tokenizer.decode(\n final_token_idxs_BSN,\n video_hw=(H, W),\n )\n return final_frames_BSHWC, final_logits_BSNV\n\n def vq_encode(self, batch: Dict[str, jax.Array], training: bool) -> jax.Array:\n # --- Preprocess videos ---\n video_BTHWC = batch[""videos""]\n lam_output: Dict[str, jax.Array] = self.lam.vq_encode(\n video_BTHWC, training=training\n )\n lam_indices_E = lam_output[""indices""]\n return lam_indices_E\n\n\n# FIXME (f.srambical): add conversion script for old checkpoints\ndef restore_genie_components(\n optimizer: nnx.Optimizer,\n sharding: jax.sharding.NamedSharding,\n rng: jax.Array,\n args,\n) -> nnx.Optimizer:\n """"""Restore pre-trained Genie components""""""\n rng_tokenizer, rng_lam = jax.random.split(rng)\n rngs_tokenizer = nnx.Rngs(rng_tokenizer)\n rngs_lam = nnx.Rngs(rng_lam)\n\n tx = optimizer.tx\n model = optimizer.model\n handler_registry = ocp.handlers.DefaultCheckpointHandlerRegistry()\n handler_registry.add(\n ""model_state"", ocp.args.PyTreeRestore, ocp.handlers.PyTreeCheckpointHandler\n )\n\n checkpoint_options = ocp.CheckpointManagerOptions(\n step_format_fixed_length=6,\n )\n tokenizer_checkpoint_manager = ocp.CheckpointManager(\n directory=args.tokenizer_checkpoint,\n options=checkpoint_options,\n handler_registry=handler_registry,\n )\n dummy_tokenizer = TokenizerVQVAE(\n in_dim=args.image_channels,\n model_dim=args.tokenizer_dim,\n ffn_dim=args.tokenizer_ffn_dim,\n latent_dim=args.latent_patch_dim,\n num_latents=args.num_patch_latents,\n patch_size=args.patch_size,\n num_blocks=args.tokenizer_num_blocks,\n num_heads=args.tokenizer_num_heads,\n dropout=args.dropout,\n codebook_dropout=args.dropout,\n param_dtype=args.param_dtype,\n dtype=args.dtype,\n use_flash_attention=args.use_flash_attention,\n rngs=rngs_tokenizer,\n )\n dummy_tokenizer_optimizer = nnx.Optimizer(dummy_tokenizer, tx)\n dummy_tokenizer_optimizer_state = nnx.state(dummy_tokenizer_optimizer)\n abstract_sharded_tokenizer_optimizer_state = _create_abstract_sharded_pytree(\n dummy_tokenizer_optimizer_state, sharding\n )\n restored_tokenizer = tokenizer_checkpoint_manager.restore(\n step=tokenizer_checkpoint_manager.latest_step(),\n args=ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore( # type: ignore\n abstract_sharded_tokenizer_optimizer_state # type: ignore\n ),\n ),\n )[""model_state""]\n nnx.update(dummy_tokenizer_optimizer.model, restored_tokenizer.model)\n model.tokenizer = dummy_tokenizer_optimizer.model\n tokenizer_checkpoint_manager.close()\n\n if args.lam_checkpoint:\n lam_checkpoint_manager = ocp.CheckpointManager(\n directory=args.lam_checkpoint,\n options=checkpoint_options,\n handler_registry=handler_registry,\n )\n dummy_lam = LatentActionModel(\n in_dim=args.image_channels,\n model_dim=args.lam_dim,\n ffn_dim=args.lam_ffn_dim,\n latent_dim=args.latent_patch_dim,\n num_latents=args.num_latent_actions,\n patch_size=args.lam_patch_size,\n num_blocks=args.lam_num_blocks,\n num_heads=args.lam_num_heads,\n dropout=args.dropout,\n codebook_dropout=args.dropout,\n param_dtype=args.param_dtype,\n dtype=args.dtype,\n use_flash_attention=args.use_flash_attention,\n rngs=rngs_lam,\n )\n dummy_lam_optimizer = nnx.Optimizer(dummy_lam, tx)\n dummy_lam_optimizer_state = nnx.state(dummy_lam_optimizer)\n abstract_sharded_lam_optimizer_state = _create_abstract_sharded_pytree(\n dummy_lam_optimizer_state, sharding\n )\n restored_lam_optimizer = lam_checkpoint_manager.restore(\n step=lam_checkpoint_manager.latest_step(),\n args=ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore( # type: ignore\n abstract_sharded_lam_optimizer_state # type: ignore\n ),\n ),\n )[""model_state""]\n nnx.update(dummy_lam_optimizer.model, restored_lam_optimizer.model)\n model.lam = dummy_lam_optimizer.model\n # Remove the LAM decoder to save memory and avoid unnecessary computation.\n del model.lam.decoder\n lam_checkpoint_manager.close()\n\n # Reinitialize the optimizer states\n optimizer = nnx.Optimizer(model, tx)\n return optimizer\n\n\ndef _create_abstract_sharded_pytree(\n pytree_template: nnx.GraphState, sharding_spec: jax.sharding.NamedSharding\n) -> jax.Array:\n """"""Replaces arrays in a pytree with ShapeDtypeStructs having the given sharding.""""""\n\n def map_fn(leaf_template):\n if hasattr(leaf_template, ""shape"") and hasattr(leaf_template, ""dtype""):\n return jax.ShapeDtypeStruct(\n leaf_template.shape, leaf_template.dtype, sharding=sharding_spec\n )\n return leaf_template\n\n return jax.tree_util.tree_map(map_fn, pytree_template)\n",python,content +739,1815863,"genie.py",0,0,"",python,tab +740,1819452,"TERMINAL",0,0,"salloc --time=05:00:00 --partition=accelerated --nodes=1 --gres=gpu:1 --cpus-per-task=8",,terminal_command +741,1819499,"TERMINAL",0,0,"]633;Csalloc: Pending job allocation 3507624\r\nsalloc: job 3507624 queued and waiting for resources\r\n",,terminal_output +742,1820927,"TERMINAL",0,0,"^Csalloc: Job allocation 3507624 has been revoked.\r\nsalloc: Job aborted due to signal\r\n]0;tum_cte0515@hkn1990:~/Projects/jasmine",,terminal_output +743,1822157,"TERMINAL",0,0,"idling",,terminal_command +744,1822222,"TERMINAL",0,0,"]633;C[?1049h(B[?7hEvery 1.0s: sinfo_t_idlehkn1990.localdomain: Sun Sep 21 12:50:43 2025Partition dev_cpuonly: 11 nodes idle\rPartition cpuonly: 118 nodes idle\rPartition dev_accelerated:\t 1 nodes idle\rPartition accelerated: 11 nodes idle\rPartition dev_accelerated-h100 :\t 1 nodes idle\rPartition accelerated-h100:\t 1 nodes idle\rPartition large:\t 4 nodes idle\rPartition accelerated-h200:\t 4 nodes idle",,terminal_output +745,1823259,"TERMINAL",0,0,"4",,terminal_output +746,1823511,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn1990:~/Projects/jasmine",,terminal_output +747,1827085,"TERMINAL",0,0,"salloc --time=01:00:00 --partition=dev_accelerated --nodes=1 --gres=gpu:1 --cpus-per-task=8",,terminal_command +748,1827151,"TERMINAL",0,0,"]633;Csalloc: Granted job allocation 3507625\r\n",,terminal_output +749,1827267,"TERMINAL",0,0,"salloc: Waiting for resource configuration\r\n",,terminal_output +750,1854371,"TERMINAL",0,0,"salloc: Nodes hkn0402 are ready for job\r\n",,terminal_output +751,1855699,"TERMINAL",0,0,"]0;tum_cte0515@hkn0402:~/Projects/jasmine[?2004h[tum_cte0515@hkn0402 jasmine]$ ",,terminal_output +752,1858109,"TERMINAL",0,0,"s",,terminal_output +753,1858245,"TERMINAL",0,0,"o",,terminal_output +754,1858302,"TERMINAL",0,0,"u",,terminal_output +755,1858349,"TERMINAL",0,0,"r",,terminal_output +756,1858574,"TERMINAL",0,0,"c",,terminal_output +757,1858674,"TERMINAL",0,0,"e",,terminal_output +758,1858738,"TERMINAL",0,0," ",,terminal_output +759,1858841,"TERMINAL",0,0,".",,terminal_output +760,1858967,"TERMINAL",0,0,"v",,terminal_output +761,1859098,"TERMINAL",0,0,"env/",,terminal_output +762,1859277,"TERMINAL",0,0,"b",,terminal_output +763,1859362,"TERMINAL",0,0,"in/",,terminal_output +764,1859567,"TERMINAL",0,0,"a",,terminal_output +765,1859664,"TERMINAL",0,0,"c",,terminal_output +766,1859762,"TERMINAL",0,0,"\r\n[?2004l\rbash: .venv/bin/ac: No such file or directory\r\n]0;tum_cte0515@hkn0402:~/Projects/jasmine[?2004h[tum_cte0515@hkn0402 jasmine]$ ",,terminal_output +767,1860739,"TERMINAL",0,0,"source .venv/bin/ac",,terminal_output +768,1860938,"TERMINAL",0,0,"tivate",,terminal_output +769,1861431,"TERMINAL",0,0,"\r\n[?2004l\r]0;tum_cte0515@hkn0402:~/Projects/jasmine[?2004h(jasmine) [tum_cte0515@hkn0402 jasmine]$ ",,terminal_output +770,1880594,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu_gt_actions.sh",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=4\n#SBATCH --time=00:20:00\n#SBATCH --partition=dev_accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:1\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/dyn/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/dyn/%x_%j.log\n#SBATCH --job-name=train_lam_1e-4\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\n# array_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_test\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_test/train\n# array_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_chunked\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_test/val\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/dyn/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\nlam_checkpoint=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/lam/interactive/3498707\ntokenizer_checkpoint=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/interactive/3498707\n\nenv | grep SLURM\n\nexport PYTHONUNBUFFERED=1\n\nsrun python train_dynamics.py \\n --save_ckpt \\n --image_height=64 \\n --image_width=64 \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=110 \\n --init_lr=0 \\n --max_lr=1e-4 \\n --log_image_interval=50 \\n --log_checkpoint_interval=2 \\n --dyna_type=maskgit \\n --log \\n --name=coinrun-dyn-dev-gt-actions-$slurm_job_id \\n --tags dyn coinrun dev \\n --entity instant-uv \\n --project jafar \\n --warmup_steps 0 \\n --wsd_decay_steps 0 \\n --num_steps 10 \\n --use_gt_actions \\n --data_dir $array_records_dir_train \\n --tokenizer_checkpoint $tokenizer_checkpoint \\n --val_data_dir $array_records_dir_val \\n --val_interval 2 \\n --val_steps 5\n",shellscript,tab +771,1881899,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu_gt_actions.sh",2118,0,"",shellscript,selection_mouse +772,1882453,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu_gt_actions.sh",2095,0,"",shellscript,selection_mouse +773,1882602,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu_gt_actions.sh",2087,14,"use_gt_actions",shellscript,selection_mouse +774,1883437,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu_gt_actions.sh",1941,0,"",shellscript,selection_mouse +775,1889959,"genie.py",0,0,"",python,tab +776,1891640,"genie.py",6187,0,"",python,selection_mouse +777,1891675,"genie.py",6186,0,"",python,selection_command +778,1892327,"genie.py",6296,0,"",python,selection_mouse +779,1892331,"genie.py",6295,0,"",python,selection_command +780,1892981,"genie.py",6027,0,"",python,selection_mouse +781,1893721,"genie.py",5872,0,"",python,selection_mouse +782,1893869,"genie.py",5865,21,"latent_actions_BTm11L",python,selection_mouse +783,1894797,"genie.py",6187,0,"",python,selection_mouse +784,1894803,"genie.py",6186,0,"",python,selection_command +785,1915849,"TERMINAL",0,0,"s",,terminal_output +786,1915946,"TERMINAL",0,0,"h",,terminal_output +787,1916019,"TERMINAL",0,0," ",,terminal_output +788,1916344,"TERMINAL",0,0,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu_gt_actions.sh",,terminal_output +789,1916824,"TERMINAL",0,0,"\rslurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu_gt_actions.sh\r\n[?2004l\r#!/usr/bin/env bash\r\n\r\n#SBATCH --nodes=1\r\n#SBATCH --ntasks-per-node=4\r\n#SBATCH --time=00:20:00\r\n#SBATCH --partition=dev_accelerated\r\n#SBATCH --cpus-per-task=5\r\n#SBATCH --gres=gpu:1\r\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/dyn/%x_%j.log\r\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/dyn/%x_%j.log\r\n#SBATCH --job-name=train_lam_1e-4\r\n#SBATCH --requeue\r\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\r\n\r\n# Log the sbatch script\r\ncat $0\r\n\r\nmodule unload mpi/openmpi/5.0\r\nmodule unload devel/cuda/12.4\r\nsource .venv/bin/activate\r\n\r\n# array_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_test\r\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_test/train\r\n# array_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_chunked\r\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_test/val\r\n\r\njob_name=$SLURM_JOB_NAME\r\nslurm_job_id=$SLURM_JOB_ID\r\n\r\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/dyn/$job_name/$slurm_job_id\r\nmkdir -p $CHECKPOINT_DIR\r\n\r\nlam_checkpoint=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/lam/interactive/3498707\r\ntokenizer_checkpoint=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/interactive/3498707\r\n\r\nenv | grep SLURM\r\n\r\nexport PYTHONUNBUFFERED=1\r\n\r\nsrun python train_dynamics.py \\r\n --save_ckpt \\r\n --image_height=64 \\r\n --image_width=64 \\r\n --ckpt_dir $CHECKPOINT_DIR \\r\n --batch_size=110 \\r\n --init_lr=0 \\r\n --max_lr=1e-4 \\r\n --log_image_interval=50 \\r\n --log_checkpoint_interval=2 \\r\n --dyna_type=maskgit \\r\n --log \\r\n --name=coinrun-dyn-dev-gt-actions-$slurm_job_id \\r\n --tags dyn coinrun dev \\r\n --entity instant-uv \\r\n --project jafar \\r\n --warmup_steps 0 \\r\n --wsd_decay_steps 0 \\r\n --num_steps 10 \\r\n --use_gt_actions \\r\n --data_dir $array_records_dir_train \\r\n --tokenizer_checkpoint $tokenizer_checkpoint \\r\n --val_data_dir $array_records_dir_val \\r\n --val_interval 2 \\r\n --val_steps 5\r\n",,terminal_output +790,1921969,"TERMINAL",0,0,"SLURM_STEP_NUM_TASKS=1\r\nSLURM_JOB_USER=tum_cte0515\r\nSLURM_TASKS_PER_NODE=1\r\nSLURM_JOB_UID=999226\r\nSLURM_TASK_PID=3967626\r\nSLURM_JOB_GPUS=0\r\nSLURM_LOCALID=0\r\nSLURM_SUBMIT_DIR=/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine\r\nSLURMD_NODENAME=hkn0402\r\nSLURM_JOB_START_TIME=1758451848\r\nSLURM_STEP_NODELIST=hkn0402\r\nSLURM_CLUSTER_NAME=hk\r\nSLURM_JOB_END_TIME=1758455448\r\nSLURM_PMI2_SRUN_PORT=44231\r\nSLURM_CPUS_ON_NODE=8\r\nSLURM_JOB_CPUS_PER_NODE=8\r\nSLURM_GPUS_ON_NODE=1\r\nSLURM_GTIDS=0\r\nSLURM_JOB_PARTITION=dev_accelerated\r\nSLURM_TRES_PER_TASK=cpu=8\r\nSLURM_OOM_KILL_STEP=0\r\nSLURM_JOB_NUM_NODES=1\r\nSLURM_STEPID=4294967290\r\nSLURM_JOBID=3507625\r\nSLURM_PTY_PORT=41565\r\nSLURM_JOB_QOS=normal\r\nSLURM_LAUNCH_NODE_IPADDR=10.0.7.198\r\nSLURM_PTY_WIN_ROW=47\r\nSLURM_PMI2_PROC_MAPPING=(vector,(0,1,1))\r\nSLURMD_DEBUG=2\r\nSLURM_PROCID=0\r\nSLURM_CPUS_PER_TASK=8\r\nSLURM_TOPOLOGY_ADDR=hkibb.hkibbi1.hkibbi1e11.hkn0402\r\nSLURM_TOPOLOGY_ADDR_PATTERN=switch.switch.switch.node\r\nSLURM_SRUN_COMM_HOST=10.0.7.198\r\nSLURM_SCRIPT_CONTEXT=prolog_task\r\nSLURM_PTY_WIN_COL=179\r\nSLURM_NODELIST=hkn0402\r\nSLURM_SRUN_COMM_PORT=41415\r\nSLURM_STEP_ID=4294967290\r\nSLURM_JOB_ACCOUNT=hk-project-p0023960\r\nSLURM_PRIO_PROCESS=0\r\nSLURM_NNODES=1\r\nSLURM_SUBMIT_HOST=hkn1990.localdomain\r\nSLURM_JOB_ID=3507625\r\nSLURM_NODEID=0\r\nSLURM_STEP_NUM_NODES=1\r\nSLURM_STEP_TASKS_PER_NODE=1\r\nSLURM_MPI_TYPE=pmi2\r\nSLURM_PMI2_STEP_NODES=hkn0402\r\nSLURM_CONF=/etc/slurm/slurm.conf\r\nSLURM_JOB_NAME=interactive\r\nSLURM_STEP_LAUNCHER_PORT=41415\r\nSLURM_JOB_GID=502226\r\nSLURM_JOB_NODELIST=hkn0402\r\nGpuFreq=control_disabled\r\n",,terminal_output +791,1937193,"TERMINAL",0,0,"Running on 1 devices.\r\n",,terminal_output +792,1943013,"TERMINAL",0,0,"Counting all components: ['action_embed', 'dynamics', 'tokenizer']\r\n",,terminal_output +793,1943443,"TERMINAL",0,0,"wandb: Currently logged in as: mihir-mahajan2002 (instant-uv) to https://api.wandb.ai. Use `wandb login --relogin` to force relogin\r\n",,terminal_output +794,1944533,"TERMINAL",0,0,"wandb: creating run\r\n",,terminal_output +795,1944894,"TERMINAL",0,0,"wandb: Tracking run with wandb version 0.21.3\r\nwandb: Run data is saved locally in /hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/wandb/run-20250921_125244-n8or1qrr\r\nwandb: Run `wandb offline` to turn off syncing.\r\nwandb: Syncing run coinrun-dyn-dev-gt-actions-3507625\r\nwandb: ⭐️ View project at https://wandb.ai/instant-uv/jafar\r\nwandb: 🚀 View run at https://wandb.ai/instant-uv/jafar/runs/n8or1qrr\r\n",,terminal_output +796,1945741,"TERMINAL",0,0,"Parameter counts:\r\n{'action_embed': 1024, 'dynamics': 26555904, 'tokenizer': 33750256, 'total': 60307184}\r\n",,terminal_output +797,1947595,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/serialization/type_handlers.py:1269: UserWarning: Sharding info not provided when restoring. Populating sharding info from sharding file. Please note restoration time will be slightly increased due to reading from file. Note also that this option is unsafe when restoring on a different topology than the checkpoint was saved with.\r\n warnings.warn(\r\n",,terminal_output +798,1959088,"TERMINAL",0,0,"2025-09-21 12:53:00.265271: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +799,1996193,"TERMINAL",0,0,"Total memory size: 24.9 GB, Output size: 0.7 GB, Temp size: 24.1 GB, Argument size: 0.7 GB, Host temp size: 0.0 GB.\r\nFLOPs: 5.185e+12, Bytes: 7.115e+11 (662.6 GB), Intensity: 7.3 FLOPs/byte\r\nStarting training from step 0...\r\n",,terminal_output +800,1996571,"TERMINAL",0,0,"\r\nMemstats: After params initialized:\r\n\tUsing (GB) 0.94 / 38.7 (2.428941%) on cuda:0\r\n",,terminal_output +801,1998172,"TERMINAL",0,0,"Step 0, loss: 11.79269027709961\r\n",,terminal_output +802,2021686,"TERMINAL",0,0,"Step 1, loss: 6.164367198944092\r\nCalculating validation metrics...\r\n",,terminal_output +803,2042685,"TERMINAL",0,0,"Step 2, validation loss: 7.849388122558594\r\n",,terminal_output +804,2064968,"TERMINAL",0,0,"Saved checkpoint at step 2\r\n",,terminal_output +805,2088044,"TERMINAL",0,0,"Step 2, loss: 11.39675521850586\r\n",,terminal_output +806,2089518,"TERMINAL",0,0,"Step 3, loss: 15.968230247497559\r\nCalculating validation metrics...\r\n",,terminal_output +807,2093778,"TERMINAL",0,0,"Step 4, validation loss: 10.428061485290527\r\n",,terminal_output +808,2094088,"TERMINAL",0,0,"Saved checkpoint at step 4\r\n",,terminal_output +809,2095823,"TERMINAL",0,0,"Step 4, loss: 13.61802864074707\r\n",,terminal_output +810,2099102,"TERMINAL",0,0,"Step 5, loss: 12.104687690734863\r\nCalculating validation metrics...\r\n",,terminal_output +811,2103309,"TERMINAL",0,0,"Step 6, validation loss: 6.1460442543029785\r\n",,terminal_output +812,2103815,"TERMINAL",0,0,"Saved checkpoint at step 6\r\n",,terminal_output +813,2105553,"TERMINAL",0,0,"Step 6, loss: 7.249760150909424\r\n",,terminal_output +814,2107188,"TERMINAL",0,0,"Step 7, loss: 7.12235164642334\r\nCalculating validation metrics...\r\n",,terminal_output +815,2111391,"TERMINAL",0,0,"Step 8, validation loss: 5.276025772094727\r\n",,terminal_output +816,2111765,"TERMINAL",0,0,"Saved checkpoint at step 8\r\n",,terminal_output +817,2114051,"TERMINAL",0,0,"Step 8, loss: 6.503427982330322\r\n",,terminal_output +818,2115589,"TERMINAL",0,0,"Step 9, loss: 7.890003681182861\r\nCalculating validation metrics...\r\n",,terminal_output +819,2119793,"TERMINAL",0,0,"Step 10, validation loss: 4.987190246582031\r\n",,terminal_output +820,2120439,"TERMINAL",0,0,"Saved checkpoint at step 10\r\n",,terminal_output +821,2124500,"TERMINAL",0,0,"wandb: \r\nwandb: 🚀 View run coinrun-dyn-dev-gt-actions-3507625 at: https://wandb.ai/instant-uv/jafar/runs/n8or1qrr\r\nwandb: Find logs at: ../../../../../hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/wandb/run-20250921_125244-n8or1qrr/logs\r\n",,terminal_output +822,2126501,"TERMINAL",0,0,"]0;tum_cte0515@hkn0402:~/Projects/jasmine[?2004h(jasmine) [tum_cte0515@hkn0402 jasmine]$ ",,terminal_output +823,2209494,"TERMINAL",0,0,"g",,terminal_output +824,2209588,"TERMINAL",0,0,"i",,terminal_output +825,2209654,"TERMINAL",0,0,"t",,terminal_output +826,2209717,"TERMINAL",0,0," ",,terminal_output +827,2209872,"TERMINAL",0,0,"c",,terminal_output +828,2209929,"TERMINAL",0,0,"o",,terminal_output +829,2210075,"TERMINAL",0,0,"m",,terminal_output +830,2210231,"TERMINAL",0,0,"m",,terminal_output +831,2210428,"TERMINAL",0,0,"it",,terminal_output +832,2210553,"TERMINAL",0,0," ",,terminal_output +833,2210605,"TERMINAL",0,0,"-",,terminal_output +834,2210765,"TERMINAL",0,0,"m",,terminal_output +835,2211145,"TERMINAL",0,0,"",,terminal_output +836,2211384,"TERMINAL",0,0,"a",,terminal_output +837,2211469,"TERMINAL",0,0,"m ",,terminal_output +838,2211712,"TERMINAL",0,0,"""",,terminal_output +839,2214412,"TERMINAL",0,0,"e",,terminal_output +840,2214507,"TERMINAL",0,0,"m",,terminal_output +841,2214673,"TERMINAL",0,0,"b",,terminal_output +842,2214791,"TERMINAL",0,0,"e",,terminal_output +843,2214893,"TERMINAL",0,0,"d",,terminal_output +844,2214995,"TERMINAL",0,0," ",,terminal_output +845,2215223,"TERMINAL",0,0,"i",,terminal_output +846,2215317,"TERMINAL",0,0,"n",,terminal_output +847,2215379,"TERMINAL",0,0,"s",,terminal_output +848,2215566,"TERMINAL",0,0,"e",,terminal_output +849,2215890,"TERMINAL",0,0,"",,terminal_output +850,2216105,"TERMINAL",0,0,"t",,terminal_output +851,2216159,"TERMINAL",0,0,"e",,terminal_output +852,2216383,"TERMINAL",0,0,"a",,terminal_output +853,2216485,"TERMINAL",0,0,"d",,terminal_output +854,2216591,"TERMINAL",0,0," ",,terminal_output +855,2216685,"TERMINAL",0,0,"o",,terminal_output +856,2216750,"TERMINAL",0,0,"f",,terminal_output +857,2216835,"TERMINAL",0,0," ",,terminal_output +858,2217017,"TERMINAL",0,0,"o",,terminal_output +859,2217161,"TERMINAL",0,0,"n",,terminal_output +860,2217223,"TERMINAL",0,0,"e",,terminal_output +861,2217351,"TERMINAL",0,0,"-",,terminal_output +862,2217642,"TERMINAL",0,0,"h",,terminal_output +863,2217813,"TERMINAL",0,0,"o",,terminal_output +864,2217877,"TERMINAL",0,0,"t",,terminal_output +865,2218013,"TERMINAL",0,0," ",,terminal_output +866,2218125,"TERMINAL",0,0,"e",,terminal_output +867,2218227,"TERMINAL",0,0,"n",,terminal_output +868,2218326,"TERMINAL",0,0,"c",,terminal_output +869,2218480,"TERMINAL",0,0,"o",,terminal_output +870,2218576,"TERMINAL",0,0,"d",,terminal_output +871,2218642,"TERMINAL",0,0,"e",,terminal_output +872,2218895,"TERMINAL",0,0,"""",,terminal_output +873,2219140,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +874,2219938,"TERMINAL",0,0,"black....................................................................",,terminal_output +875,2221403,"TERMINAL",0,0,"Failed\r\n- hook id: black\r\n- files were modified by this hook\r\n\r\nreformatted genie.py\r\n\r\nAll done! ✨ 🍰 ✨\r\n1 file reformatted.\r\n\r\n]0;tum_cte0515@hkn0402:~/Projects/jasmine[?2004h(jasmine) [tum_cte0515@hkn0402 jasmine]$ ",,terminal_output +876,2222501,"TERMINAL",0,0,"git commit -am ""embed instead of one-hot encode""",,terminal_output +877,2223839,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +878,2224084,"TERMINAL",0,0,"black....................................................................",,terminal_output +879,2224221,"TERMINAL",0,0,"Passed\r\n",,terminal_output +880,2224423,"TERMINAL",0,0,"[gt-actions 5af365e] embed instead of one-hot encode\r\n 1 file changed, 13 insertions(+), 9 deletions(-)\r\n]0;tum_cte0515@hkn0402:~/Projects/jasmine[?2004h(jasmine) [tum_cte0515@hkn0402 jasmine]$ ",,terminal_output +881,2225288,"TERMINAL",0,0,"g",,terminal_output +882,2225449,"TERMINAL",0,0,"i",,terminal_output +883,2225544,"TERMINAL",0,0,"t",,terminal_output +884,2225693,"TERMINAL",0,0," ",,terminal_output +885,2225786,"TERMINAL",0,0,"p",,terminal_output +886,2225967,"TERMINAL",0,0,"u",,terminal_output +887,2226069,"TERMINAL",0,0,"s",,terminal_output +888,2226132,"TERMINAL",0,0,"h",,terminal_output +889,2227224,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +890,2229528,"TERMINAL",0,0,"Enumerating objects: 5, done.\r\nCounting objects: 20% (1/5)\rCounting objects: 40% (2/5)\rCounting objects: 60% (3/5)\rCounting objects: 80% (4/5)\rCounting objects: 100% (5/5)\rCounting objects: 100% (5/5), done.\r\nDelta compression using up to 152 threads\r\nCompressing objects: 33% (1/3)\rCompressing objects: 66% (2/3)\rCompressing objects: 100% (3/3)\rCompressing objects: 100% (3/3), done.\r\nWriting objects: 33% (1/3)\rWriting objects: 66% (2/3)\rWriting objects: 100% (3/3)\rWriting objects: 100% (3/3), 476 bytes | 476.00 KiB/s, done.\r\nTotal 3 (delta 2), reused 0 (delta 0), pack-reused 0\r\n",,terminal_output +891,2229593,"TERMINAL",0,0,"remote: Resolving deltas: 0% (0/2)\rremote: Resolving deltas: 50% (1/2)\rremote: Resolving deltas: 100% (2/2)\rremote: Resolving deltas: 100% (2/2), completed with 2 local objects.\r\n",,terminal_output +892,2229896,"TERMINAL",0,0,"To github.com:p-doom/jasmine.git\r\n 8a0085b..5af365e gt-actions -> gt-actions\r\n]0;tum_cte0515@hkn0402:~/Projects/jasmine[?2004h(jasmine) [tum_cte0515@hkn0402 jasmine]$ ",,terminal_output +893,2231134,"train_dynamics.py",0,0,"",python,tab +894,2231137,"train_dynamics.py",5048,23,"build_mesh_and_sharding",python,selection_command +895,2235250,"train_dynamics.py",6119,0,"",python,selection_command +896,2236709,"train_dynamics.py",6137,0,"",python,selection_mouse +897,2284655,"input_pipeline/generate_coinrun_dataset.py",0,0,"",python,tab +898,2284656,"input_pipeline/generate_coinrun_dataset.py",1475,10,"obs_chunks",python,selection_command +899,2289869,"input_pipeline/generate_coinrun_dataset.py",3226,10,"obs_chunks",python,selection_command +900,2292380,"input_pipeline/generate_coinrun_dataset.py",3162,10,"obs_chunks",python,selection_command +901,2297123,"input_pipeline/generate_coinrun_dataset.py",2738,0,"",python,selection_mouse +902,2298688,"input_pipeline/generate_coinrun_dataset.py",3166,0,"",python,selection_mouse +903,2306856,"input_pipeline/generate_coinrun_dataset.py",3181,0,"",python,selection_mouse +904,2307729,"input_pipeline/generate_coinrun_dataset.py",3162,0,"",python,selection_mouse +905,2331900,"input_pipeline/generate_coinrun_dataset.py",3238,0,"",python,selection_mouse +906,2332472,"input_pipeline/generate_coinrun_dataset.py",3161,0,"",python,selection_mouse +907,2333154,"input_pipeline/generate_coinrun_dataset.py",3162,0,"",python,selection_mouse +908,2333960,"input_pipeline/generate_coinrun_dataset.py",3152,0,"",python,selection_mouse +909,2344585,"input_pipeline/generate_coinrun_dataset.py",3239,0,"",python,selection_mouse +910,2345251,"input_pipeline/generate_coinrun_dataset.py",3162,0,"",python,selection_mouse +911,2348399,"input_pipeline/generate_coinrun_dataset.py",3292,0,"",python,selection_mouse +912,2349855,"input_pipeline/generate_coinrun_dataset.py",3164,0,"",python,selection_mouse +913,2350507,"input_pipeline/generate_coinrun_dataset.py",3162,0,"",python,selection_mouse +914,2350825,"input_pipeline/generate_coinrun_dataset.py",3162,10,"",python,content +915,2351981,"input_pipeline/generate_coinrun_dataset.py",3162,1,"",python,content +916,2352180,"input_pipeline/generate_coinrun_dataset.py",3162,1,"",python,content +917,2352584,"input_pipeline/generate_coinrun_dataset.py",3172,0,"",python,selection_command +918,2353752,"input_pipeline/generate_coinrun_dataset.py",3171,0,"",python,selection_command +919,2353952,"input_pipeline/generate_coinrun_dataset.py",3172,0," ",python,content +920,2353953,"input_pipeline/generate_coinrun_dataset.py",3172,0,"",python,selection_command +921,2355261,"input_pipeline/generate_coinrun_dataset.py",3172,1,"",python,content +922,2355263,"input_pipeline/generate_coinrun_dataset.py",3171,0,"",python,selection_command +923,2356119,"input_pipeline/generate_coinrun_dataset.py",3171,0," ",python,content +924,2356121,"input_pipeline/generate_coinrun_dataset.py",3172,0,"",python,selection_keyboard +925,2356269,"input_pipeline/generate_coinrun_dataset.py",3172,0,"o",python,content +926,2356270,"input_pipeline/generate_coinrun_dataset.py",3173,0,"",python,selection_keyboard +927,2356462,"input_pipeline/generate_coinrun_dataset.py",3173,0,"b",python,content +928,2356463,"input_pipeline/generate_coinrun_dataset.py",3174,0,"",python,selection_keyboard +929,2356579,"input_pipeline/generate_coinrun_dataset.py",3174,0,"s",python,content +930,2356581,"input_pipeline/generate_coinrun_dataset.py",3175,0,"",python,selection_keyboard +931,2356892,"input_pipeline/generate_coinrun_dataset.py",3175,0,"_",python,content +932,2356894,"input_pipeline/generate_coinrun_dataset.py",3176,0,"",python,selection_keyboard +933,2357097,"input_pipeline/generate_coinrun_dataset.py",3176,0,"c",python,content +934,2357098,"input_pipeline/generate_coinrun_dataset.py",3177,0,"",python,selection_keyboard +935,2357258,"input_pipeline/generate_coinrun_dataset.py",3177,0,"h",python,content +936,2357260,"input_pipeline/generate_coinrun_dataset.py",3178,0,"",python,selection_keyboard +937,2357381,"input_pipeline/generate_coinrun_dataset.py",3178,0,"u",python,content +938,2357383,"input_pipeline/generate_coinrun_dataset.py",3179,0,"",python,selection_keyboard +939,2357521,"input_pipeline/generate_coinrun_dataset.py",3179,0,"n",python,content +940,2357522,"input_pipeline/generate_coinrun_dataset.py",3180,0,"",python,selection_keyboard +941,2357617,"input_pipeline/generate_coinrun_dataset.py",3180,0,"k",python,content +942,2357619,"input_pipeline/generate_coinrun_dataset.py",3181,0,"",python,selection_keyboard +943,2357749,"input_pipeline/generate_coinrun_dataset.py",3181,0,"s",python,content +944,2357750,"input_pipeline/generate_coinrun_dataset.py",3182,0,"",python,selection_keyboard +945,2357929,"input_pipeline/generate_coinrun_dataset.py",3182,0,",",python,content +946,2357929,"input_pipeline/generate_coinrun_dataset.py",3183,0,"",python,selection_keyboard +947,2358429,"input_pipeline/generate_coinrun_dataset.py",3182,0,"",python,selection_command +948,2358794,"input_pipeline/generate_coinrun_dataset.py",3255,0,"",python,selection_command +949,2359589,"input_pipeline/generate_coinrun_dataset.py",3254,0,"",python,selection_command +950,2360053,"input_pipeline/generate_coinrun_dataset.py",3253,0,"",python,selection_command +951,2360069,"input_pipeline/generate_coinrun_dataset.py",3252,0,"",python,selection_command +952,2360101,"input_pipeline/generate_coinrun_dataset.py",3251,0,"",python,selection_command +953,2360132,"input_pipeline/generate_coinrun_dataset.py",3250,0,"",python,selection_command +954,2360166,"input_pipeline/generate_coinrun_dataset.py",3249,0,"",python,selection_command +955,2360199,"input_pipeline/generate_coinrun_dataset.py",3248,0,"",python,selection_command +956,2360233,"input_pipeline/generate_coinrun_dataset.py",3247,0,"",python,selection_command +957,2360265,"input_pipeline/generate_coinrun_dataset.py",3246,0,"",python,selection_command +958,2360332,"input_pipeline/generate_coinrun_dataset.py",3245,0,"",python,selection_command +959,2360333,"input_pipeline/generate_coinrun_dataset.py",3244,0,"",python,selection_command +960,2360364,"input_pipeline/generate_coinrun_dataset.py",3243,0,"",python,selection_command +961,2360396,"input_pipeline/generate_coinrun_dataset.py",3242,0,"",python,selection_command +962,2360429,"input_pipeline/generate_coinrun_dataset.py",3241,0,"",python,selection_command +963,2360461,"input_pipeline/generate_coinrun_dataset.py",3240,0,"",python,selection_command +964,2360493,"input_pipeline/generate_coinrun_dataset.py",3239,0,"",python,selection_command +965,2360525,"input_pipeline/generate_coinrun_dataset.py",3238,0,"",python,selection_command +966,2360556,"input_pipeline/generate_coinrun_dataset.py",3237,0,"",python,selection_command +967,2360588,"input_pipeline/generate_coinrun_dataset.py",3236,0,"",python,selection_command +968,2360589,"input_pipeline/generate_coinrun_dataset.py",3235,0,"",python,selection_command +969,2360625,"input_pipeline/generate_coinrun_dataset.py",3234,0,"",python,selection_command +970,2360654,"input_pipeline/generate_coinrun_dataset.py",3233,0,"",python,selection_command +971,2360684,"input_pipeline/generate_coinrun_dataset.py",3232,0,"",python,selection_command +972,2360827,"input_pipeline/generate_coinrun_dataset.py",3231,0,"",python,selection_command +973,2360951,"input_pipeline/generate_coinrun_dataset.py",3230,0,"",python,selection_command +974,2361105,"input_pipeline/generate_coinrun_dataset.py",3229,0,"",python,selection_command +975,2361249,"input_pipeline/generate_coinrun_dataset.py",3228,0,"",python,selection_command +976,2361401,"input_pipeline/generate_coinrun_dataset.py",3227,0,"",python,selection_command +977,2361552,"input_pipeline/generate_coinrun_dataset.py",3226,0,"",python,selection_command +978,2361830,"input_pipeline/generate_coinrun_dataset.py",3226,10,"",python,content +979,2362246,"input_pipeline/generate_coinrun_dataset.py",3226,1,"",python,content +980,2362413,"input_pipeline/generate_coinrun_dataset.py",3226,1,"",python,content +981,2363722,"input_pipeline/generate_coinrun_dataset.py",3276,0,"",python,selection_mouse +982,2364820,"input_pipeline/generate_coinrun_dataset.py",3276,0,"o",python,content +983,2364821,"input_pipeline/generate_coinrun_dataset.py",3277,0,"",python,selection_keyboard +984,2365382,"input_pipeline/generate_coinrun_dataset.py",3277,0,"b",python,content +985,2365384,"input_pipeline/generate_coinrun_dataset.py",3278,0,"",python,selection_keyboard +986,2365486,"input_pipeline/generate_coinrun_dataset.py",3278,0,"s",python,content +987,2365487,"input_pipeline/generate_coinrun_dataset.py",3279,0,"",python,selection_keyboard +988,2366118,"input_pipeline/generate_coinrun_dataset.py",3279,0,"_",python,content +989,2366119,"input_pipeline/generate_coinrun_dataset.py",3280,0,"",python,selection_keyboard +990,2366316,"input_pipeline/generate_coinrun_dataset.py",3280,0,"c",python,content +991,2366317,"input_pipeline/generate_coinrun_dataset.py",3281,0,"",python,selection_keyboard +992,2366484,"input_pipeline/generate_coinrun_dataset.py",3281,0,"h",python,content +993,2366485,"input_pipeline/generate_coinrun_dataset.py",3282,0,"",python,selection_keyboard +994,2366536,"input_pipeline/generate_coinrun_dataset.py",3282,0,"u",python,content +995,2366538,"input_pipeline/generate_coinrun_dataset.py",3283,0,"",python,selection_keyboard +996,2366680,"input_pipeline/generate_coinrun_dataset.py",3283,0,"n",python,content +997,2366681,"input_pipeline/generate_coinrun_dataset.py",3284,0,"",python,selection_keyboard +998,2366931,"input_pipeline/generate_coinrun_dataset.py",3284,0,"k",python,content +999,2366932,"input_pipeline/generate_coinrun_dataset.py",3285,0,"",python,selection_keyboard +1000,2366932,"input_pipeline/generate_coinrun_dataset.py",3285,0,"s",python,content +1001,2366933,"input_pipeline/generate_coinrun_dataset.py",3286,0,"",python,selection_keyboard +1002,2366988,"input_pipeline/generate_coinrun_dataset.py",3286,0,",",python,content +1003,2366989,"input_pipeline/generate_coinrun_dataset.py",3287,0,"",python,selection_keyboard +1004,2367145,"input_pipeline/generate_coinrun_dataset.py",3287,0," ",python,content +1005,2367145,"input_pipeline/generate_coinrun_dataset.py",3288,0,"",python,selection_keyboard +1006,2367554,"input_pipeline/generate_coinrun_dataset.py",3287,0,"",python,selection_command +1007,2381288,"input_pipeline/utils.py",0,0,"import os\nimport pickle\nimport numpy as np\nfrom array_record.python.array_record_module import ArrayRecordWriter\n\n\ndef save_chunks(obs_chunks, file_idx, chunks_per_file, output_dir, act_chunks=None):\n os.makedirs(output_dir, exist_ok=True)\n\n metadata = []\n while len(obs_chunks) >= chunks_per_file:\n chunk_batch = obs_chunks[:chunks_per_file]\n obs_chunks = obs_chunks[chunks_per_file:]\n act_chunk_batch = None\n if act_chunks:\n act_chunk_batch = act_chunks[:chunks_per_file]\n act_chunks = act_chunks[chunks_per_file:]\n episode_path = os.path.join(output_dir, f""data_{file_idx:04d}.array_record"")\n writer = ArrayRecordWriter(str(episode_path), ""group_size:1"")\n seq_lens = []\n for idx, chunk in enumerate(chunk_batch):\n seq_len = chunk.shape[0]\n seq_lens.append(seq_len)\n chunk_record = {\n ""raw_video"": chunk.tobytes(),\n ""sequence_length"": seq_len,\n }\n if act_chunk_batch:\n assert len(chunk) == len(\n act_chunk_batch[idx]\n ), f""Observation data length and action sequence length do not match: {len(chunk)} != {len(act_chunk_batch[idx])}""\n chunk_record[""actions""] = act_chunk_batch[idx]\n writer.write(pickle.dumps(chunk_record))\n writer.close()\n file_idx += 1\n metadata.append(\n {\n ""path"": episode_path,\n ""num_chunks"": len(chunk_batch),\n ""avg_seq_len"": np.mean(seq_lens),\n }\n )\n print(f""Created {episode_path} with {len(chunk_batch)} video chunks"")\n\n return metadata, obs_chunks, file_idx, act_chunks\n",python,tab +1008,2383019,"input_pipeline/utils.py",0,0,"",python,tab +1009,2387197,"input_pipeline/utils.py",1717,0,"",python,selection_mouse +1010,2389800,"input_pipeline/utils.py",1716,0,"",python,selection_command +1011,2390032,"input_pipeline/utils.py",1716,10,"",python,content +1012,2390475,"input_pipeline/utils.py",1716,1,"",python,content +1013,2390623,"input_pipeline/utils.py",1716,1,"",python,content +1014,2391071,"input_pipeline/utils.py",1736,0,"",python,selection_command +1015,2392330,"input_pipeline/utils.py",1726,0,"o",python,content +1016,2392331,"input_pipeline/utils.py",1727,0,"",python,selection_keyboard +1017,2392488,"input_pipeline/utils.py",1727,0,"b",python,content +1018,2392489,"input_pipeline/utils.py",1728,0,"",python,selection_keyboard +1019,2392598,"input_pipeline/utils.py",1728,0,"s",python,content +1020,2392598,"input_pipeline/utils.py",1729,0,"",python,selection_keyboard +1021,2392912,"input_pipeline/utils.py",1729,0,"c",python,content +1022,2392913,"input_pipeline/utils.py",1730,0,"",python,selection_keyboard +1023,2393420,"input_pipeline/utils.py",1729,1,"",python,content +1024,2393722,"input_pipeline/utils.py",1729,0,"_",python,content +1025,2393723,"input_pipeline/utils.py",1730,0,"",python,selection_keyboard +1026,2393936,"input_pipeline/utils.py",1730,0,"c",python,content +1027,2393937,"input_pipeline/utils.py",1731,0,"",python,selection_keyboard +1028,2394034,"input_pipeline/utils.py",1731,0,"h",python,content +1029,2394034,"input_pipeline/utils.py",1732,0,"",python,selection_keyboard +1030,2394132,"input_pipeline/utils.py",1732,0,"u",python,content +1031,2394133,"input_pipeline/utils.py",1733,0,"",python,selection_keyboard +1032,2394366,"input_pipeline/utils.py",1733,0,"k",python,content +1033,2394367,"input_pipeline/utils.py",1734,0,"",python,selection_keyboard +1034,2394689,"input_pipeline/utils.py",1733,1,"",python,content +1035,2394924,"input_pipeline/utils.py",1733,0,"n",python,content +1036,2394924,"input_pipeline/utils.py",1734,0,"",python,selection_keyboard +1037,2395026,"input_pipeline/utils.py",1734,0,"k",python,content +1038,2395027,"input_pipeline/utils.py",1735,0,"",python,selection_keyboard +1039,2395096,"input_pipeline/utils.py",1735,0,"s",python,content +1040,2395097,"input_pipeline/utils.py",1736,0,"",python,selection_keyboard +1041,2395206,"input_pipeline/utils.py",1736,0,",",python,content +1042,2395207,"input_pipeline/utils.py",1737,0,"",python,selection_keyboard +1043,2395272,"input_pipeline/utils.py",1737,0," ",python,content +1044,2395272,"input_pipeline/utils.py",1738,0,"",python,selection_keyboard +1045,2395693,"input_pipeline/utils.py",1737,0,"",python,selection_command +1046,2398348,"input_pipeline/utils.py",131,0,"",python,selection_mouse +1047,2398750,"input_pipeline/utils.py",131,10,"",python,content +1048,2399030,"input_pipeline/utils.py",131,1,"",python,content +1049,2399191,"input_pipeline/utils.py",131,1,"",python,content +1050,2401011,"input_pipeline/utils.py",169,0,"",python,selection_mouse +1051,2402737,"input_pipeline/utils.py",169,0,"o",python,content +1052,2402738,"input_pipeline/utils.py",170,0,"",python,selection_keyboard +1053,2402964,"input_pipeline/utils.py",170,0,"b",python,content +1054,2402965,"input_pipeline/utils.py",171,0,"",python,selection_keyboard +1055,2403296,"input_pipeline/utils.py",171,0,"s",python,content +1056,2403297,"input_pipeline/utils.py",172,0,"",python,selection_keyboard +1057,2403532,"input_pipeline/utils.py",172,0,"_",python,content +1058,2403533,"input_pipeline/utils.py",173,0,"",python,selection_keyboard +1059,2403698,"input_pipeline/utils.py",173,0,"c",python,content +1060,2403698,"input_pipeline/utils.py",174,0,"",python,selection_keyboard +1061,2403836,"input_pipeline/utils.py",174,0,"h",python,content +1062,2403837,"input_pipeline/utils.py",175,0,"",python,selection_keyboard +1063,2403965,"input_pipeline/utils.py",175,0,"u",python,content +1064,2403966,"input_pipeline/utils.py",176,0,"",python,selection_keyboard +1065,2404093,"input_pipeline/utils.py",176,0,"n",python,content +1066,2404094,"input_pipeline/utils.py",177,0,"",python,selection_keyboard +1067,2404227,"input_pipeline/utils.py",177,0,"k",python,content +1068,2404228,"input_pipeline/utils.py",178,0,"",python,selection_keyboard +1069,2404329,"input_pipeline/utils.py",178,0,"s",python,content +1070,2404330,"input_pipeline/utils.py",179,0,"",python,selection_keyboard +1071,2404457,"input_pipeline/utils.py",179,0,",",python,content +1072,2404457,"input_pipeline/utils.py",180,0,"",python,selection_keyboard +1073,2405657,"input_pipeline/utils.py",169,0," ",python,content +1074,2405658,"input_pipeline/utils.py",170,0,"",python,selection_keyboard +1075,2405967,"input_pipeline/utils.py",169,0,"",python,selection_command +1076,2425504,"input_pipeline/generate_coinrun_dataset.py",0,0,"",python,tab +1077,2425505,"input_pipeline/generate_coinrun_dataset.py",3266,0,"",python,selection_mouse +1078,2425605,"input_pipeline/generate_coinrun_dataset.py",3258,16,"output_dir_split",python,selection_mouse +1079,2430152,"input_pipeline/utils.py",0,0,"",python,tab +1080,2430153,"input_pipeline/utils.py",124,0,"",python,selection_mouse +1081,2430226,"input_pipeline/utils.py",119,11,"save_chunks",python,selection_mouse +1082,2440167,"input_pipeline/generate_coinrun_dataset.py",0,0,"",python,tab +1083,2446530,"input_pipeline/pngs_to_array_records.py",0,0,"",python,tab +1084,2446531,"input_pipeline/pngs_to_array_records.py",3055,11,"save_chunks",python,selection_command +1085,2448937,"input_pipeline/pngs_to_array_records.py",3033,0,"",python,selection_mouse +1086,2449971,"input_pipeline/pngs_to_array_records.py",3033,6,"",python,content +1087,2450301,"input_pipeline/pngs_to_array_records.py",3033,1,"",python,content +1088,2450439,"input_pipeline/pngs_to_array_records.py",3033,1,"",python,content +1089,2450713,"input_pipeline/pngs_to_array_records.py",3034,0,"",python,selection_command +1090,2450900,"input_pipeline/pngs_to_array_records.py",3035,0,"",python,selection_command +1091,2451039,"input_pipeline/pngs_to_array_records.py",3036,0,"",python,selection_command +1092,2451544,"input_pipeline/pngs_to_array_records.py",3037,0,"",python,selection_command +1093,2451577,"input_pipeline/pngs_to_array_records.py",3038,0,"",python,selection_command +1094,2451616,"input_pipeline/pngs_to_array_records.py",3039,0,"",python,selection_command +1095,2451648,"input_pipeline/pngs_to_array_records.py",3040,0,"",python,selection_command +1096,2451679,"input_pipeline/pngs_to_array_records.py",3041,0,"",python,selection_command +1097,2451813,"input_pipeline/pngs_to_array_records.py",3042,0,"",python,selection_command +1098,2453136,"input_pipeline/pngs_to_array_records.py",3033,0," ",python,content +1099,2453233,"input_pipeline/pngs_to_array_records.py",3033,0,",",python,content +1100,2453658,"input_pipeline/pngs_to_array_records.py",3033,0,"chunks",python,content +1101,2454298,"input_pipeline/pngs_to_array_records.py",3032,0,"",python,selection_command +1102,2454701,"input_pipeline/pngs_to_array_records.py",3033,0,"",python,selection_command +1103,2455658,"input_pipeline/pngs_to_array_records.py",3033,1,"c",python,selection_command +1104,2455738,"input_pipeline/pngs_to_array_records.py",3033,2,"ch",python,selection_command +1105,2456208,"input_pipeline/pngs_to_array_records.py",3033,3,"chu",python,selection_command +1106,2456213,"input_pipeline/pngs_to_array_records.py",3033,4,"chun",python,selection_command +1107,2456251,"input_pipeline/pngs_to_array_records.py",3033,5,"chunk",python,selection_command +1108,2456284,"input_pipeline/pngs_to_array_records.py",3033,6,"chunks",python,selection_command +1109,2456400,"input_pipeline/pngs_to_array_records.py",3033,7,"chunks,",python,selection_command +1110,2456758,"input_pipeline/pngs_to_array_records.py",3033,8,"chunks, ",python,selection_command +1111,2458813,"input_pipeline/pngs_to_array_records.py",3033,8,"",python,content +1112,2458996,"input_pipeline/pngs_to_array_records.py",3034,0,"",python,selection_command +1113,2459495,"input_pipeline/pngs_to_array_records.py",3035,0,"",python,selection_command +1114,2459511,"input_pipeline/pngs_to_array_records.py",3036,0,"",python,selection_command +1115,2459542,"input_pipeline/pngs_to_array_records.py",3037,0,"",python,selection_command +1116,2459574,"input_pipeline/pngs_to_array_records.py",3038,0,"",python,selection_command +1117,2459675,"input_pipeline/pngs_to_array_records.py",3039,0,"",python,selection_command +1118,2459884,"input_pipeline/pngs_to_array_records.py",3040,0,"",python,selection_command +1119,2459989,"input_pipeline/pngs_to_array_records.py",3041,0,"",python,selection_command +1120,2460190,"input_pipeline/pngs_to_array_records.py",3042,0,"",python,selection_command +1121,2460431,"input_pipeline/pngs_to_array_records.py",3043,0,"chunks, ",python,content +1122,2460433,"input_pipeline/pngs_to_array_records.py",3050,0,"",python,selection_command +1123,2462805,"input_pipeline/pngs_to_array_records.py",3049,0,"",python,selection_command +1124,2462999,"input_pipeline/pngs_to_array_records.py",3048,0,"",python,selection_command +1125,2463169,"input_pipeline/pngs_to_array_records.py",3047,0,"",python,selection_command +1126,2463311,"input_pipeline/pngs_to_array_records.py",3046,0,"",python,selection_command +1127,2463458,"input_pipeline/pngs_to_array_records.py",3045,0,"",python,selection_command +1128,2463597,"input_pipeline/pngs_to_array_records.py",3044,0,"",python,selection_command +1129,2468246,"input_pipeline/pngs_to_array_records.py",3102,0,"",python,selection_command +1130,2468977,"input_pipeline/pngs_to_array_records.py",3101,0,"",python,selection_command +1131,2469127,"input_pipeline/pngs_to_array_records.py",3100,0,"",python,selection_command +1132,2469291,"input_pipeline/pngs_to_array_records.py",3099,0,"",python,selection_command +1133,2469424,"input_pipeline/pngs_to_array_records.py",3098,0,"",python,selection_command +1134,2469750,"input_pipeline/pngs_to_array_records.py",3097,0,"",python,selection_command +1135,2469935,"input_pipeline/pngs_to_array_records.py",3096,0,"",python,selection_command +1136,2470424,"input_pipeline/pngs_to_array_records.py",3095,0,"",python,selection_command +1137,2470455,"input_pipeline/pngs_to_array_records.py",3094,0,"",python,selection_command +1138,2470498,"input_pipeline/pngs_to_array_records.py",3093,0,"",python,selection_command +1139,2470533,"input_pipeline/pngs_to_array_records.py",3092,0,"",python,selection_command +1140,2470540,"input_pipeline/pngs_to_array_records.py",3091,0,"",python,selection_command +1141,2470571,"input_pipeline/pngs_to_array_records.py",3090,0,"",python,selection_command +1142,2470605,"input_pipeline/pngs_to_array_records.py",3089,0,"",python,selection_command +1143,2470640,"input_pipeline/pngs_to_array_records.py",3088,0,"",python,selection_command +1144,2470675,"input_pipeline/pngs_to_array_records.py",3087,0,"",python,selection_command +1145,2470709,"input_pipeline/pngs_to_array_records.py",3086,0,"",python,selection_command +1146,2470854,"input_pipeline/pngs_to_array_records.py",3085,0,"",python,selection_command +1147,2471029,"input_pipeline/pngs_to_array_records.py",3084,0,"",python,selection_command +1148,2471181,"input_pipeline/pngs_to_array_records.py",3083,0,"",python,selection_command +1149,2471352,"input_pipeline/pngs_to_array_records.py",3082,0,"",python,selection_command +1150,2471488,"input_pipeline/pngs_to_array_records.py",3081,0,"",python,selection_command +1151,2471639,"input_pipeline/pngs_to_array_records.py",3080,0,"",python,selection_command +1152,2473112,"input_pipeline/pngs_to_array_records.py",3080,6,"chunks",python,selection_mouse +1153,2473978,"input_pipeline/pngs_to_array_records.py",3060,0,"",python,selection_mouse +1154,2474133,"input_pipeline/pngs_to_array_records.py",3055,11,"save_chunks",python,selection_mouse +1155,2481762,"input_pipeline/pngs_to_array_records.py",3080,0,"",python,selection_mouse +1156,2481909,"input_pipeline/pngs_to_array_records.py",3080,2,"ch",python,selection_mouse +1157,2481947,"input_pipeline/pngs_to_array_records.py",3080,3,"chu",python,selection_mouse +1158,2481947,"input_pipeline/pngs_to_array_records.py",3080,5,"chunk",python,selection_mouse +1159,2481982,"input_pipeline/pngs_to_array_records.py",3080,6,"chunks",python,selection_mouse +1160,2482021,"input_pipeline/pngs_to_array_records.py",3080,7,"chunks,",python,selection_mouse +1161,2482876,"input_pipeline/pngs_to_array_records.py",3080,7,"",python,content +1162,2484200,"input_pipeline/pngs_to_array_records.py",3107,0,"",python,selection_mouse +1163,2484563,"input_pipeline/pngs_to_array_records.py",3108,0,"chunks,",python,content +1164,2484564,"input_pipeline/pngs_to_array_records.py",3114,0,"",python,selection_command +1165,2485497,"input_pipeline/pngs_to_array_records.py",3115,0,"",python,selection_command +1166,2485819,"input_pipeline/pngs_to_array_records.py",3115,0," ",python,content +1167,2485820,"input_pipeline/pngs_to_array_records.py",3116,0,"",python,selection_keyboard +1168,2486088,"input_pipeline/pngs_to_array_records.py",3115,0,"",python,selection_command +1169,2486306,"input_pipeline/pngs_to_array_records.py",3114,0,"",python,selection_command +1170,2486816,"input_pipeline/pngs_to_array_records.py",3113,0,"",python,selection_command +1171,2487019,"input_pipeline/pngs_to_array_records.py",3081,0,"",python,selection_command +1172,2487630,"input_pipeline/pngs_to_array_records.py",3081,1,"",python,content +1173,2487797,"input_pipeline/pngs_to_array_records.py",3080,0,"",python,selection_command +1174,2488845,"input_pipeline/pngs_to_array_records.py",3081,0,"f",python,content +1175,2488846,"input_pipeline/pngs_to_array_records.py",3081,0,"",python,selection_command +1176,2490409,"input_pipeline/pngs_to_array_records.py",3080,1,"",python,content +1177,2490659,"input_pipeline/pngs_to_array_records.py",3079,0,"",python,selection_command +1178,2491690,"input_pipeline/pngs_to_array_records.py",3110,0,"",python,selection_mouse +1179,2491866,"input_pipeline/pngs_to_array_records.py",3107,6,"chunks",python,selection_mouse +1180,2501284,"input_pipeline/pngs_to_array_records.py",3116,0,"",python,selection_mouse +1181,2501428,"input_pipeline/pngs_to_array_records.py",3115,11,"output_path",python,selection_mouse +1182,2514257,"input_pipeline/pngs_to_array_records.py",3116,0,"",python,selection_mouse +1183,2514258,"input_pipeline/pngs_to_array_records.py",3115,11,"output_path",python,selection_mouse +1184,2516709,"input_pipeline/pngs_to_array_records.py",3174,0,"",python,selection_mouse +1185,2516715,"input_pipeline/pngs_to_array_records.py",3173,0,"",python,selection_command +1186,2517229,"input_pipeline/pngs_to_array_records.py",3136,0,"",python,selection_mouse +1187,2517230,"input_pipeline/pngs_to_array_records.py",3135,0,"",python,selection_command +1188,2518451,"input_pipeline/pngs_to_array_records.py",3084,0,"",python,selection_mouse +1189,2518584,"input_pipeline/pngs_to_array_records.py",3080,8,"file_idx",python,selection_mouse +1190,2519460,"input_pipeline/pngs_to_array_records.py",3096,0,"",python,selection_mouse +1191,2519655,"input_pipeline/pngs_to_array_records.py",3090,15,"chunks_per_file",python,selection_mouse +1192,2521223,"input_pipeline/pngs_to_array_records.py",3110,0,"",python,selection_mouse +1193,2521420,"input_pipeline/pngs_to_array_records.py",3107,6,"chunks",python,selection_mouse +1194,2523092,"input_pipeline/pngs_to_array_records.py",3102,0,"",python,selection_mouse +1195,2523285,"input_pipeline/pngs_to_array_records.py",3090,15,"chunks_per_file",python,selection_mouse +1196,2523763,"input_pipeline/pngs_to_array_records.py",3112,0,"",python,selection_mouse +1197,2523941,"input_pipeline/pngs_to_array_records.py",3107,6,"chunks",python,selection_mouse +1198,2525542,"input_pipeline/pngs_to_array_records.py",3109,0,"",python,selection_mouse +1199,2526263,"input_pipeline/pngs_to_array_records.py",3107,0,"",python,selection_mouse +1200,2526396,"input_pipeline/pngs_to_array_records.py",3107,2,"ch",python,selection_mouse +1201,2526425,"input_pipeline/pngs_to_array_records.py",3107,4,"chun",python,selection_mouse +1202,2526448,"input_pipeline/pngs_to_array_records.py",3107,5,"chunk",python,selection_mouse +1203,2526484,"input_pipeline/pngs_to_array_records.py",3107,6,"chunks",python,selection_mouse +1204,2526628,"input_pipeline/pngs_to_array_records.py",3107,7,"chunks,",python,selection_mouse +1205,2527420,"input_pipeline/pngs_to_array_records.py",3107,7,"",python,content +1206,2527990,"input_pipeline/pngs_to_array_records.py",3119,0,"",python,selection_mouse +1207,2528025,"input_pipeline/pngs_to_array_records.py",3118,0,"",python,selection_command +1208,2528792,"input_pipeline/pngs_to_array_records.py",3119,0,"chunks,",python,content +1209,2528813,"input_pipeline/pngs_to_array_records.py",3125,0,"",python,selection_command +1210,2529894,"input_pipeline/pngs_to_array_records.py",3119,0,"",python,selection_mouse +1211,2530763,"input_pipeline/pngs_to_array_records.py",3119,0,",",python,content +1212,2530764,"input_pipeline/pngs_to_array_records.py",3120,0,"",python,selection_keyboard +1213,2530831,"input_pipeline/pngs_to_array_records.py",3120,0," ",python,content +1214,2530832,"input_pipeline/pngs_to_array_records.py",3121,0,"",python,selection_keyboard +1215,2532640,"input_pipeline/pngs_to_array_records.py",3127,1,"",python,content +1216,2533957,"input_pipeline/pngs_to_array_records.py",3107,1,"",python,content +1217,2534355,"input_pipeline/pngs_to_array_records.py",3106,0,"",python,selection_command +1218,2546202,"input_pipeline/utils.py",0,0,"",python,tab +1219,2546203,"input_pipeline/utils.py",1404,0,"",python,selection_mouse +1220,2563612,"input_pipeline/pngs_to_array_records.py",0,0,"",python,tab +1221,2563613,"input_pipeline/pngs_to_array_records.py",3110,0,"",python,selection_mouse +1222,2565112,"TERMINAL",0,0,"\r(jasmine) [tum_cte0515@hkn0402 jasmine]$ ",,terminal_output +1223,2566153,"TERMINAL",0,0,"g",,terminal_output +1224,2566277,"TERMINAL",0,0,"it",,terminal_output +1225,2566379,"TERMINAL",0,0," ",,terminal_output +1226,2566729,"TERMINAL",0,0,"stat",,terminal_output +1227,2566789,"TERMINAL",0,0,"u",,terminal_output +1228,2566953,"TERMINAL",0,0,"s",,terminal_output +1229,2567083,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +1230,2567310,"TERMINAL",0,0,"On branch gt-actions\r\nYour branch is up to date with 'origin/gt-actions'.\r\n\r\nLast commands done (2 commands done):\r\n pick ba37453 feat: generate coinrun dataset with val split\r\n pick faadd10 feat: implemented validation loss for all three models\r\nNext commands to do (26 remaining commands):\r\n pick 9a17dbb fix: pass val data path to dataloader\r\n pick 6e69cdb fix typo in image logging\r\n (use ""git rebase --edit-todo"" to view and edit)\r\nYou are currently editing a commit while rebasing branch 'gt-actions' on 'c7522f2'.\r\n (use ""git commit --amend"" to amend the current commit)\r\n (use ""git rebase --continue"" once you are satisfied with your changes)\r\n\r\nChanges not staged for commit:\r\n (use ""git add ..."" to update what will be committed)\r\n (use ""git restore ..."" to discard changes in working directory)\r\n\tmodified: input_pipeline/generate_coinrun_dataset.py\r\n\tmodified: input_pipeline/pngs_to_array_records.py\r\n\tmodified: input_pipeline/utils.py\r\n\r\nUntracked files:\r\n (use ""git add ..."" to include in what will be committed)\r\n\tdiff.diff\r\n\tinput_pipeline/generate_breakout_dataset_agent.py\r\n\tkiller.sh\r\n\tkiller_partition.sh\r\n\tlog.log\r\n\toverfit_dir.zip\r\n\trequirements-franz.txt\r\n\tsamples/\r\n\tscripts_cremers/\r\n\tslurm/\r\n\ttest.py\r\n\tutils/visualizer.py\r\n\r\nno changes added to commit (use ""git add"" and/or ""git commit -a"")\r\n]0;tum_cte0515@hkn0402:~/Projects/jasmine[?2004h(jasmine) [tum_cte0515@hkn0402 jasmine]$ ",,terminal_output +1231,2567993,"TERMINAL",0,0,"g",,terminal_output +1232,2568121,"TERMINAL",0,0,"ot",,terminal_output +1233,2568276,"TERMINAL",0,0," ",,terminal_output +1234,2568383,"TERMINAL",0,0,"c",,terminal_output +1235,2568501,"TERMINAL",0,0,"p",,terminal_output +1236,2569558,"TERMINAL",0,0,"",,terminal_output +1237,2570354,"TERMINAL",0,0,"",,terminal_output +1238,2570803,"TERMINAL",0,0,"",,terminal_output +1239,2571437,"TERMINAL",0,0,"",,terminal_output +1240,2571531,"TERMINAL",0,0,"",,terminal_output +1241,2571815,"TERMINAL",0,0,"i",,terminal_output +1242,2571929,"TERMINAL",0,0,"t",,terminal_output +1243,2572026,"TERMINAL",0,0," ",,terminal_output +1244,2573564,"TERMINAL",0,0,"c",,terminal_output +1245,2573630,"TERMINAL",0,0,"o",,terminal_output +1246,2573829,"TERMINAL",0,0,"m",,terminal_output +1247,2573987,"TERMINAL",0,0,"m",,terminal_output +1248,2574083,"TERMINAL",0,0,"i",,terminal_output +1249,2574145,"TERMINAL",0,0,"t",,terminal_output +1250,2575206,"TERMINAL",0,0," ",,terminal_output +1251,2575296,"TERMINAL",0,0,"-",,terminal_output +1252,2575397,"TERMINAL",0,0,"a",,terminal_output +1253,2575608,"TERMINAL",0,0," ",,terminal_output +1254,2575928,"TERMINAL",0,0,"",,terminal_output +1255,2576197,"TERMINAL",0,0,"m ",,terminal_output +1256,2576473,"TERMINAL",0,0,"""",,terminal_output +1257,2582319,"TERMINAL",0,0,"ap",,terminal_output +1258,2582781,"TERMINAL",0,0,"",,terminal_output +1259,2582911,"TERMINAL",0,0,"",,terminal_output +1260,2588278,"TERMINAL",0,0,"move obs_chunks in arguments and returns such that obs_chunks and act_chunks are next to each other",,terminal_output +1261,2588677,"TERMINAL",0,0,"move obs_chunks in arguments and returns such that obs_chunks and act_chunks are next to each other""",,terminal_output +1262,2589347,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +1263,2589807,"TERMINAL",0,0,"black....................................................................",,terminal_output +1264,2590239,"TERMINAL",0,0,"Passed\r\n",,terminal_output +1265,2590530,"TERMINAL",0,0,"[gt-actions 6b6d2b9] move obs_chunks in arguments and returns such that obs_chunks and act_chunks are next to each other\r\n 3 files changed, 6 insertions(+), 6 deletions(-)\r\n]0;tum_cte0515@hkn0402:~/Projects/jasmine[?2004h(jasmine) [tum_cte0515@hkn0402 jasmine]$ ",,terminal_output +1266,2591641,"TERMINAL",0,0,"g",,terminal_output +1267,2591735,"TERMINAL",0,0,"i",,terminal_output +1268,2591829,"TERMINAL",0,0,"t ",,terminal_output +1269,2591957,"TERMINAL",0,0,"p",,terminal_output +1270,2592135,"TERMINAL",0,0,"u",,terminal_output +1271,2592231,"TERMINAL",0,0,"s",,terminal_output +1272,2592299,"TERMINAL",0,0,"h",,terminal_output +1273,2592713,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +1274,2594009,"TERMINAL",0,0,"Enumerating objects: 11, done.\r\nCounting objects: 9% (1/11)\rCounting objects: 18% (2/11)\rCounting objects: 27% (3/11)\rCounting objects: 36% (4/11)\rCounting objects: 45% (5/11)\rCounting objects: 54% (6/11)\rCounting objects: 63% (7/11)\rCounting objects: 72% (8/11)\rCounting objects: 81% (9/11)\rCounting objects: 90% (10/11)\rCounting objects: 100% (11/11)\rCounting objects: 100% (11/11), done.\r\nDelta compression using up to 152 threads\r\nCompressing objects: 16% (1/6)\rCompressing objects: 33% (2/6)\rCompressing objects: 50% (3/6)\rCompressing objects: 66% (4/6)\rCompressing objects: 83% (5/6)\rCompressing objects: 100% (6/6)\rCompressing objects: 100% (6/6), done.\r\nWriting objects: 16% (1/6)\rWriting objects: 33% (2/6)\rWriting objects: 50% (3/6)\rWriting objects: 66% (4/6)\rWriting objects: 83% (5/6)\rWriting objects: 100% (6/6)\rWriting objects: 100% (6/6), 734 bytes | 734.00 KiB/s, done.\r\nTotal 6 (delta 4), reused 0 (delta 0), pack-reused 0\r\n",,terminal_output +1275,2594104,"TERMINAL",0,0,"remote: Resolving deltas: 0% (0/4)\rremote: Resolving deltas: 25% (1/4)\rremote: Resolving deltas: 50% (2/4)\rremote: Resolving deltas: 75% (3/4)\rremote: Resolving deltas: 100% (4/4)\rremote: Resolving deltas: 100% (4/4), completed with 4 local objects.\r\n",,terminal_output +1276,2594268,"TERMINAL",0,0,"To github.com:p-doom/jasmine.git\r\n 5af365e..6b6d2b9 gt-actions -> gt-actions\r\n",,terminal_output +1277,2594374,"TERMINAL",0,0,"]0;tum_cte0515@hkn0402:~/Projects/jasmine[?2004h(jasmine) [tum_cte0515@hkn0402 jasmine]$ ",,terminal_output +1278,2599278,"input_pipeline/utils.py",0,0,"",python,tab +1279,2600184,"TERMINAL",0,0,"\r(jasmine) [tum_cte0515@hkn0402 jasmine]$ ",,terminal_output +1280,2610522,"TERMINAL",0,0,"gi",,terminal_output +1281,2610598,"TERMINAL",0,0,"t",,terminal_output +1282,2610686,"TERMINAL",0,0," ",,terminal_output +1283,2610931,"TERMINAL",0,0,"s",,terminal_output +1284,2611271,"TERMINAL",0,0,"t",,terminal_output +1285,2611355,"TERMINAL",0,0,"a",,terminal_output +1286,2611642,"TERMINAL",0,0,"t",,terminal_output +1287,2612400,"TERMINAL",0,0,"us",,terminal_output +1288,2612538,"TERMINAL",0,0,"\r\n[?2004l\rOn branch gt-actions\r\nYour branch is up to date with 'origin/gt-actions'.\r\n\r\nLast commands done (2 commands done):\r\n pick ba37453 feat: generate coinrun dataset with val split\r\n pick faadd10 feat: implemented validation loss for all three models\r\nNext commands to do (26 remaining commands):\r\n pick 9a17dbb fix: pass val data path to dataloader\r\n pick 6e69cdb fix typo in image logging\r\n (use ""git rebase --edit-todo"" to view and edit)\r\nYou are currently editing a commit while rebasing branch 'gt-actions' on 'c7522f2'.\r\n (use ""git commit --amend"" to amend the current commit)\r\n (use ""git rebase --continue"" once you are satisfied with your changes)\r\n\r\nUntracked files:\r\n (use ""git add ..."" to include in what will be committed)\r\n\tdiff.diff\r\n\tinput_pipeline/generate_breakout_dataset_agent.py\r\n\tkiller.sh\r\n\tkiller_partition.sh\r\n\tlog.log\r\n\toverfit_dir.zip\r\n\trequirements-franz.txt\r\n\tsamples/\r\n\tscripts_cremers/\r\n\tslurm/\r\n\ttest.py\r\n\tutils/visualizer.py\r\n\r\nnothing added to commit but untracked files present (use ""git add"" to track)\r\n]0;tum_cte0515@hkn0402:~/Projects/jasmine[?2004h(jasmine) [tum_cte0515@hkn0402 jasmine]$ ",,terminal_output +1289,2620427,"TERMINAL",0,0,"q",,terminal_output +1290,2620546,"TERMINAL",0,0,"u",,terminal_output +1291,2620671,"TERMINAL",0,0,"e",,terminal_output +1292,2620735,"TERMINAL",0,0,"u",,terminal_output +1293,2620796,"TERMINAL",0,0,"e",,terminal_output +1294,2620919,"TERMINAL",0,0,"\r\n[?2004l\r[?1049h(B[?7hEvery 1.0s: squeue --mehkn0402.localdomain: Sun Sep 21 13:04:02 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3507625 dev_accel interact tum_cte0 R13:14\t 1 hkn0402",,terminal_output +1295,2621961,"TERMINAL",0,0,"35",,terminal_output +1296,2622427,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn0402:~/Projects/jasmine[?2004h(jasmine) [tum_cte0515@hkn0402 jasmine]$ ",,terminal_output +1297,2623354,"TERMINAL",0,0,"[?2004l\r\r\nexit\r\n",,terminal_output +1298,2623709,"TERMINAL",0,0,"salloc: Relinquishing job allocation 3507625\r\nsalloc: Job allocation 3507625 has been revoked.\r\n]0;tum_cte0515@hkn1990:~/Projects/jasmine",,terminal_output diff --git a/927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-5c146b3b-a208-4bdf-96e7-7e0722fd3fa01751383718572-2025_07_01-18.25.45.514/source.csv b/927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-5c146b3b-a208-4bdf-96e7-7e0722fd3fa01751383718572-2025_07_01-18.25.45.514/source.csv new file mode 100644 index 0000000000000000000000000000000000000000..2c42d45387b584eeb2ff816a76d74b0f0aa7cf30 --- /dev/null +++ b/927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-5c146b3b-a208-4bdf-96e7-7e0722fd3fa01751383718572-2025_07_01-18.25.45.514/source.csv @@ -0,0 +1,4488 @@ +Sequence,Time,File,RangeOffset,RangeLength,Text,Language,Type +2,322,"extension-output-pdoom-org.crowd-code-#2-crowd-code",0,0,"6:25:45 PM [info] Activating crowd-code\n6:25:45 PM [info] Recording started\n6:25:45 PM [info] Initializing git provider using file system watchers...\n6:25:45 PM [info] Git repository found\n6:25:45 PM [info] Git provider initialized successfully\n",Log,tab +3,394,"extension-output-pdoom-org.crowd-code-#2-crowd-code",245,0,"6:25:45 PM [info] Initial git state: [object Object]\n",Log,content +4,3358,"TERMINAL",0,0,"/bin/python3 /hkfs/home/project/hk-project-p0023960/tum_cte0515/.cursor-server/extensions/ms-python.python-2024.12.3-linux-x64/python_files/printEnvVariablesToFile.py /hkfs/home/project/hk-project-p0023960/tum_cte0515/.cursor-server/extensions/ms-python.python-2024.12.3-linux-x64/python_files/deactivate/bash/envVars.txt",,terminal_command +5,3407,"TERMINAL",0,0,"]633;E;2025-07-01 18:25:48 /bin/python3 /hkfs/home/project/hk-project-p0023960/tum_cte0515/.cursor-server/extensions/ms-python.python-2024.12.3-linux-x64/python_files/printEnvVariablesToFile.py /hkfs/home/project/hk-project-p0023960/tum_cte0515/.cursor-server/extensions/ms-python.python-2024.12.3-linux-x64/python_files/deactivate/bash/envVars.txt;e2b1d2b0-37d9-4d54-881f-1b8925ccda66]633;C",,terminal_output +6,3472,"TERMINAL",0,0,"]0;tum_cte0515@hkn1993:/hkfs/home/project/hk-project-p0023960/tum_cte0515/.cursor-server/extensions/ms-python.python-2024.12.3-linux-x64/python_files/deactivate/bash]633;D;0",,terminal_output +7,897538,"TERMINAL",0,0,"salloc --time=01:00:00 --partition=accelerated --nodes=1 --ntasks-per-node=1 --gres=gpu:1 --cpus-per-task=5 --mem=50G",,terminal_command +8,897576,"TERMINAL",0,0,"]633;E;2025-07-01 18:40:42 salloc --time=01:00:00 --partition=accelerated --nodes=1 --ntasks-per-node=1 --gres=gpu:1 --cpus-per-task=5 --mem=50G;b1a412dd-3827-45fd-af4c-b6c15dd865d1]633;C",,terminal_output +9,897686,"TERMINAL",0,0,"salloc: Granted job allocation 3309821\r\n",,terminal_output +10,897796,"TERMINAL",0,0,"salloc: Waiting for resource configuration\r\n",,terminal_output +11,930370,"TERMINAL",0,0,"salloc: Nodes hkn0706 are ready for job\r\n",,terminal_output +12,931134,"TERMINAL",0,0,"]0;tum_cte0515@hkn0706:~/Projects/jafar[?2004h[tum_cte0515@hkn0706 jafar]$ ",,terminal_output +13,984837,"TERMINAL",0,0,"q",,terminal_output +14,984945,"TERMINAL",0,0,"[?25lu[?25h",,terminal_output +15,985007,"TERMINAL",0,0,"[?25le[?25h",,terminal_output +16,985117,"TERMINAL",0,0,"[?25lu[?25h[?25le[?25h",,terminal_output +17,985273,"TERMINAL",0,0,"[?25l[?2004l\r[?25h",,terminal_output +18,985342,"TERMINAL",0,0,"[?1049h(B[?7hEvery 1.0s: squeue --mehkn0706.localdomain: Tue Jul 1 18:42:10 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3309699 accelerat train_dy tum_cte0 R 1:01:43\t 1 hkn07123309821 accelerat interact tum_cte0 R\t1:27\t 1 hkn07063309662 accelerat train_to tum_cte0 R 1:51:57\t 1 hkn06323309663 accelerat train_la tum_cte0 R 1:51:57\t 1 hkn0632",,terminal_output +19,986321,"TERMINAL",0,0,"14888",,terminal_output +20,987338,"TERMINAL",0,0,"25999",,terminal_output +21,987898,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn0706:~/Projects/jafar[?2004h[tum_cte0515@hkn0706 jafar]$ ",,terminal_output +22,1004488,"sample.py",0,0,"from dataclasses import dataclass\nimport time\nimport os\n\nimport dm_pix as pix\nimport einops\nimport jax\nimport jax.numpy as jnp\nimport numpy as np\nfrom orbax.checkpoint import PyTreeCheckpointer\nfrom PIL import Image, ImageDraw\nimport tyro\n\nfrom genie import Genie\n# from utils.dataloader import get_dataloader\n\n\n@dataclass\nclass Args:\n # Experiment\n seed: int = 0\n seq_len: int = 16\n image_channels: int = 3\n image_resolution: int = 64\n data_dir: str = ""data/coinrun_episodes""\n checkpoint: str = """"\n # Sampling\n batch_size: int = 1\n maskgit_steps: int = 25\n temperature: float = 1.0\n sample_argmax: bool = True\n start_frame: int = 0\n # Tokenizer checkpoint\n tokenizer_dim: int = 512\n latent_patch_dim: int = 32\n num_patch_latents: int = 1024\n patch_size: int = 4\n tokenizer_num_blocks: int = 8\n tokenizer_num_heads: int = 8\n # LAM checkpoint\n lam_dim: int = 512\n latent_action_dim: int = 32\n num_latent_actions: int = 6\n lam_patch_size: int = 16\n lam_num_blocks: int = 8\n lam_num_heads: int = 8\n # Dynamics checkpoint\n dyna_dim: int = 512\n dyna_num_blocks: int = 12\n dyna_num_heads: int = 8\n\n\nargs = tyro.cli(Args)\nrng = jax.random.PRNGKey(args.seed)\n\n# --- Load Genie checkpoint ---\ngenie = Genie(\n # Tokenizer\n in_dim=args.image_channels,\n tokenizer_dim=args.tokenizer_dim,\n latent_patch_dim=args.latent_patch_dim,\n num_patch_latents=args.num_patch_latents,\n patch_size=args.patch_size,\n tokenizer_num_blocks=args.tokenizer_num_blocks,\n tokenizer_num_heads=args.tokenizer_num_heads,\n # LAM\n lam_dim=args.lam_dim,\n latent_action_dim=args.latent_action_dim,\n num_latent_actions=args.num_latent_actions,\n lam_patch_size=args.lam_patch_size,\n lam_num_blocks=args.lam_num_blocks,\n lam_num_heads=args.lam_num_heads,\n # Dynamics\n dyna_dim=args.dyna_dim,\n dyna_num_blocks=args.dyna_num_blocks,\n dyna_num_heads=args.dyna_num_heads,\n)\nrng, _rng = jax.random.split(rng)\nimage_shape = (args.image_resolution, args.image_resolution, args.image_channels)\ndummy_inputs = dict(\n videos=jnp.zeros((args.batch_size, args.seq_len, *image_shape), dtype=jnp.float32),\n mask_rng=_rng,\n)\nrng, _rng = jax.random.split(rng)\nparams = genie.init(_rng, dummy_inputs)\nckpt = PyTreeCheckpointer().restore(args.checkpoint)[""model""][""params""][""params""]\nparams[""params""].update(ckpt)\n\n\n# --- Utility function to save an image as PNG ---\n\ndef save_frame_as_png(img_array, frame_idx, out_dir=""frames"", prefix=""frame""):\n """"""\n Save a single image (numpy or jax array) as a PNG file.\n img_array: shape (H, W, C), values in [0, 1] or [0, 255]\n frame_idx: int, frame number\n out_dir: directory to save images\n prefix: filename prefix\n """"""\n if not os.path.exists(out_dir):\n os.makedirs(out_dir)\n # Convert to numpy if needed\n if hasattr(img_array, ""device_buffer"") or hasattr(img_array, ""block_until_ready""):\n img_array = np.array(img_array)\n # Clip and convert to uint8\n img_uint8 = (img_array.clip(0, 1) * 255).astype(np.uint8) if img_array.max() <= 1.0 else img_array.astype(np.uint8)\n img = Image.fromarray(img_uint8)\n img.save(os.path.join(out_dir, f""{prefix}_{frame_idx:03d}.png""))\n\n# --- Define autoregressive sampling loop ---\ndef _autoreg_sample(rng, video_batch, action_batch):\n vid = video_batch[:, : args.start_frame + 1]\n # Save the initial video frames before sampling\n for idx in range(vid.shape[1]):\n # Save the first sample in the batch for each initial frame\n save_frame_as_png(vid[0, idx], idx)\n for frame_idx in range(args.start_frame + 1, args.seq_len):\n # --- Sample next frame ---\n print(""=""*100)\n print(""Frame"", frame_idx)\n print(""=""*100)\n rng, _rng = jax.random.split(rng)\n batch = dict(videos=vid, latent_actions=action_batch[:, :frame_idx], rng=_rng)\n new_frame = genie.apply(\n params,\n batch,\n args.maskgit_steps,\n args.temperature,\n args.sample_argmax,\n method=Genie.sample,\n )\n vid = jnp.concatenate([vid, new_frame], axis=1)\n # Save the first sample in the batch for this frame\n save_frame_as_png(new_frame[0, 0], frame_idx)\n return vid\n\ndef _oneshot_sample(rng, video_batch, action_batch):\n # Pass the full video batch, as in training\n batch = dict(\n videos=video_batch, # full batch, not just first frame\n latent_actions=action_batch, # shape should match what was used in training\n mask_rng=rng,\n )\n outputs = genie.apply(params, batch, False) # training=False for eval\n return outputs[""recon""]\n\n# --- Get video + latent actions ---\n# dataloader = get_dataloader(args.data_dir, args.seq_len, args.batch_size)\n# video_batch = next(iter(dataloader))\n# video_batch = np.load(""overfit_dir/single_sample_corner.npy"")\nvideo_batch = np.load(""overfit_dir/single_batch_12_elems.npy"")\n# Get latent actions from first video only\nfirst_video = video_batch[:1, :args.seq_len]\nbatch = dict(videos=first_video)\naction_batch = genie.apply(params, batch, False, method=Genie.vq_encode)\naction_batch = action_batch.reshape(1, args.seq_len - 1, 1)\naction_batch = jnp.zeros_like(action_batch)\n# Use actions from first video for all videos\naction_batch = jnp.repeat(action_batch, video_batch.shape[0], axis=0)\n\n# --- Sample + evaluate video ---\nvid = _autoreg_sample(rng, video_batch, action_batch)\n# vid = _oneshot_sample(rng, video_batch, action_batch)\n# vid = jnp.zeros_like(video_batch)\n# vid = _oneshot_sample(rng, video_batch, action_batch)\ngt = video_batch[:, : vid.shape[1]].clip(0, 1).reshape(-1, *video_batch.shape[2:])\nrecon = vid.clip(0, 1).reshape(-1, *vid.shape[2:])\nssim = pix.ssim(gt[:, args.start_frame + 1 :], recon[:, args.start_frame + 1 :]).mean()\nprint(f""SSIM: {ssim}"")\n\n# --- Construct video ---\nfirst_true = (video_batch[0:1] * 255).astype(np.uint8)\nfirst_pred = (vid[0:1] * 255).astype(np.uint8)\nfirst_video_comparison = np.zeros((2, *vid.shape[1:5]), dtype=np.uint8)\nfirst_video_comparison[0] = first_true[:, : vid.shape[1]]\nfirst_video_comparison[1] = first_pred\n# For other videos, only show generated video\nother_preds = (vid[1:] * 255).astype(np.uint8)\nall_frames = np.concatenate([first_video_comparison, other_preds], axis=0)\nflat_vid = einops.rearrange(all_frames, ""n t h w c -> t h (n w) c"")\n\n# --- Save video ---\nimgs = [Image.fromarray(img) for img in flat_vid]\n# Write actions on each frame\nfor img, action in zip(imgs[1:], action_batch[0, :, 0]):\n d = ImageDraw.Draw(img)\n d.text((2, 2), f""{action}"", fill=255)\nimgs[0].save(\n f""generation_{time.time()}.gif"",\n save_all=True,\n append_images=imgs[1:],\n duration=250,\n loop=0,\n)\n",python,tab +23,1008274,"sample.py",5640,3,"ch)",python,selection_mouse +24,1008275,"sample.py",5632,11,"tion_batch)",python,selection_mouse +25,1008276,"sample.py",5614,29,"g, video_batch, action_batch)",python,selection_mouse +26,1008277,"sample.py",5609,34,"le(rng, video_batch, action_batch)",python,selection_mouse +27,1008277,"sample.py",5604,39,"_sample(rng, video_batch, action_batch)",python,selection_mouse +28,1008278,"sample.py",5601,42,"hot_sample(rng, video_batch, action_batch)",python,selection_mouse +29,1008295,"sample.py",5563,80,".zeros_like(video_batch)\n# vid = _oneshot_sample(rng, video_batch, action_batch)",python,selection_mouse +30,1008331,"sample.py",5562,81,"p.zeros_like(video_batch)\n# vid = _oneshot_sample(rng, video_batch, action_batch)",python,selection_mouse +31,1008333,"sample.py",5561,82,"np.zeros_like(video_batch)\n# vid = _oneshot_sample(rng, video_batch, action_batch)",python,selection_mouse +32,1008364,"sample.py",5560,83,"jnp.zeros_like(video_batch)\n# vid = _oneshot_sample(rng, video_batch, action_batch)",python,selection_mouse +33,1008411,"sample.py",5503,140," _oneshot_sample(rng, video_batch, action_batch)\n# vid = jnp.zeros_like(video_batch)\n# vid = _oneshot_sample(rng, video_batch, action_batch)",python,selection_mouse +34,1008412,"sample.py",5502,141,"= _oneshot_sample(rng, video_batch, action_batch)\n# vid = jnp.zeros_like(video_batch)\n# vid = _oneshot_sample(rng, video_batch, action_batch)",python,selection_mouse +35,1008469,"sample.py",5501,142," = _oneshot_sample(rng, video_batch, action_batch)\n# vid = jnp.zeros_like(video_batch)\n# vid = _oneshot_sample(rng, video_batch, action_batch)",python,selection_mouse +36,1008470,"sample.py",5445,198," = _autoreg_sample(rng, video_batch, action_batch)\n# vid = _oneshot_sample(rng, video_batch, action_batch)\n# vid = jnp.zeros_like(video_batch)\n# vid = _oneshot_sample(rng, video_batch, action_batch)",python,selection_mouse +37,1008487,"sample.py",5444,199,"d = _autoreg_sample(rng, video_batch, action_batch)\n# vid = _oneshot_sample(rng, video_batch, action_batch)\n# vid = jnp.zeros_like(video_batch)\n# vid = _oneshot_sample(rng, video_batch, action_batch)",python,selection_mouse +38,1008504,"sample.py",5443,200,"id = _autoreg_sample(rng, video_batch, action_batch)\n# vid = _oneshot_sample(rng, video_batch, action_batch)\n# vid = jnp.zeros_like(video_batch)\n# vid = _oneshot_sample(rng, video_batch, action_batch)",python,selection_mouse +39,1008523,"sample.py",5442,201,"vid = _autoreg_sample(rng, video_batch, action_batch)\n# vid = _oneshot_sample(rng, video_batch, action_batch)\n# vid = jnp.zeros_like(video_batch)\n# vid = _oneshot_sample(rng, video_batch, action_batch)",python,selection_mouse +40,1009334,"sample.py",5672,0,"",python,selection_mouse +41,1144019,"TERMINAL",0,0,"[?25lso[?25h[?25lo[?25h",,terminal_output +42,1144152,"TERMINAL",0,0,"[?25lu[?25h",,terminal_output +43,1144219,"TERMINAL",0,0,"[?25lr[?25h",,terminal_output +44,1144441,"TERMINAL",0,0,"[?25lc[?25h",,terminal_output +45,1144521,"TERMINAL",0,0,"[?25le[?25h",,terminal_output +46,1144640,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +47,1144706,"TERMINAL",0,0,"[?25l.[?25h",,terminal_output +48,1144930,"TERMINAL",0,0,"[?25lv[?25h",,terminal_output +49,1145134,"TERMINAL",0,0,"env/",,terminal_output +50,1145397,"TERMINAL",0,0,"[?25lb[?25h",,terminal_output +51,1145513,"TERMINAL",0,0,"in/",,terminal_output +52,1145745,"TERMINAL",0,0,"[?25la[?25h[?25lc[?25h",,terminal_output +53,1145932,"TERMINAL",0,0,"tivate",,terminal_output +54,1146310,"TERMINAL",0,0,"[?25l[?2004l\r[?25h]0;tum_cte0515@hkn0706:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0706 jafar]$ ",,terminal_output +55,1146529,"TERMINAL",0,0,"source .venv/bin/activate",,terminal_output +56,1146803,"TERMINAL",0,0,"queue",,terminal_output +57,1147032,"TERMINAL",0,0,"ls $ws_dir/../logs/logs_alfred/logs_training_tokenizer/train_tokenizer_batch_size_scaling_8_node_3307474.log",,terminal_output +58,1147238,"TERMINAL",0,0,"",,terminal_output +59,1147633,"TERMINAL",0,0,"",,terminal_output +60,1147835,"TERMINAL",0,0,"",,terminal_output +61,1148012,"TERMINAL",0,0,"",,terminal_output +62,1148169,"TERMINAL",0,0,"",,terminal_output +63,1148287,"TERMINAL",0,0,"# save_checkpoint_multiprocess",,terminal_output +64,1148506,"TERMINAL",0,0,"idling",,terminal_output +65,1148636,"TERMINAL",0,0,"/bin/python3 /hkfs/home/project/hk-project-p0023960/tum_cte0515/.cursor-server/extensions/ms-python.python-2024.12.3-linux-x64/python_files/printEnvVariablesToFile.py /hkfs/home/project/hk-project-p0023960/tum_cte0515/.cursor-server/extensions/ms-python.python-2024.12.3-linux-x64/python_files/deactivate/bash/envVars.txt",,terminal_output +66,1149170,"TERMINAL",0,0,"salloc --time=02:00:00 --partition=accelerated --nodes=1 --ntasks-per-node=4 --gres=gpu:4 --cpus-per-task=5 --mem=50G\r\n\r\r\n\r",,terminal_output +67,1149286,"TERMINAL",0,0,"\rqueue",,terminal_output +68,1149819,"TERMINAL",0,0,"ls",,terminal_output +69,1150035,"TERMINAL",0,0,"cd ../jafar",,terminal_output +70,1150217,"TERMINAL",0,0,"sbatch scripts_horeka/overfit_sample_tiny/train_tokenizer_overfit_sample.sbatch",,terminal_output +71,1150447,"TERMINAL",0,0,"lam_overfit_sample.sbatch",,terminal_output +72,1151243,"TERMINAL",0,0,"tokenizer_overfit_sample.sbatch",,terminal_output +73,1155992,"TERMINAL",0,0,"s",,terminal_output +74,1156376,"TERMINAL",0,0,"ample.sh",,terminal_output +75,1158725,"TERMINAL",0,0,"[?25lh scripts_horeka/overfit_sample_tiny/sample.sh\r[?25h",,terminal_output +76,1158876,"TERMINAL",0,0,"[?25lc scripts_horeka/overfit_sample_tiny/sample.sh\r[?25h",,terminal_output +77,1159061,"TERMINAL",0,0,"[?25lt scripts_horeka/overfit_sample_tiny/sample.sh\r[?25h",,terminal_output +78,1159220,"TERMINAL",0,0,"[?25la scripts_horeka/overfit_sample_tiny/sample.sh\r[?25h",,terminal_output +79,1160378,"TERMINAL",0,0," scripts_horeka/overfit_sample_tiny/sample.sh\r",,terminal_output +80,1160553,"TERMINAL",0,0,"[?25lh scripts_horeka/overfit_sample_tiny/sample.sh\r[?25h",,terminal_output +81,1160977,"TERMINAL",0,0,"[?25l[?2004l\r[?25h",,terminal_output +82,1161121,"TERMINAL",0,0,"Sampling from checkpoint: /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/0000/genie_1751381954_17000\r\n",,terminal_output +83,1162299,"TERMINAL",0,0,"bash",,terminal_focus +84,1180989,"TERMINAL",0,0,"2025-07-01 18:45:26.351890: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +85,1185058,"TERMINAL",0,0,"2025-07-01 18:45:30.497043: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +86,1189115,"TERMINAL",0,0,"srun",,terminal_focus +87,1193283,"TERMINAL",0,0,"2025-07-01 18:45:38.702025: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +88,1200604,"TERMINAL",0,0,"2025-07-01 18:45:46.028250: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +89,1207516,"TERMINAL",0,0,"2025-07-01 18:45:52.898571: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +90,1213108,"TERMINAL",0,0,"2025-07-01 18:45:58.548925: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +91,1216414,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/serialization/type_handlers.py:1251: UserWarning: Sharding info not provided when restoring. Populating sharding info from sharding file. Please note restoration time will be slightly increased due to reading from file. Note also that this option is unsafe when restoring on a different topology than the checkpoint was saved with.\r\n warnings.warn(\r\n",,terminal_output +92,1227761,"TERMINAL",0,0,"====================================================================================================\r\nFrame 1\r\n====================================================================================================\r\n",,terminal_output +93,1228344,"TERMINAL",0,0,"2025-07-01 18:46:13.776223: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +94,1239864,"TERMINAL",0,0,"2025-07-01 18:46:25.287141: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +95,1252356,"TERMINAL",0,0,"2025-07-01 18:46:37.738120: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +96,1255439,"TERMINAL",0,0,"====================================================================================================\r\nFrame 2\r\n====================================================================================================\r\n",,terminal_output +97,1256045,"TERMINAL",0,0,"2025-07-01 18:46:41.455149: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +98,1267274,"TERMINAL",0,0,"2025-07-01 18:46:52.675053: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +99,1270380,"TERMINAL",0,0,"2025-07-01 18:46:55.717739: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +100,1284203,"TERMINAL",0,0,"====================================================================================================\r\nFrame 3\r\n====================================================================================================\r\n",,terminal_output +101,1284727,"TERMINAL",0,0,"2025-07-01 18:47:10.166457: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +102,1296427,"TERMINAL",0,0,"2025-07-01 18:47:21.864907: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +103,1299469,"TERMINAL",0,0,"2025-07-01 18:47:24.907288: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +104,1314025,"TERMINAL",0,0,"====================================================================================================\r\nFrame 4\r\n====================================================================================================\r\n",,terminal_output +105,1314632,"TERMINAL",0,0,"2025-07-01 18:47:40.073620: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +106,1326589,"TERMINAL",0,0,"2025-07-01 18:47:52.026328: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +107,1329718,"TERMINAL",0,0,"2025-07-01 18:47:55.156499: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +108,1346409,"TERMINAL",0,0,"====================================================================================================\r\nFrame 5\r\n====================================================================================================\r\n",,terminal_output +109,1347035,"TERMINAL",0,0,"2025-07-01 18:48:12.475222: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +110,1359043,"TERMINAL",0,0,"2025-07-01 18:48:24.466301: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +111,1362438,"TERMINAL",0,0,"2025-07-01 18:48:27.768500: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +112,1381258,"TERMINAL",0,0,"====================================================================================================\r\nFrame 6\r\n====================================================================================================\r\n",,terminal_output +113,1381995,"TERMINAL",0,0,"2025-07-01 18:48:47.335884: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +114,1392909,"train_tokenizer.py",0,0,"from dataclasses import dataclass, field\nimport os\nimport time\n\nimport einops\nfrom flax.training import orbax_utils\nfrom flax.training.train_state import TrainState\nfrom jax.sharding import Mesh, PartitionSpec, NamedSharding\nfrom jax.experimental.mesh_utils import create_device_mesh\nimport optax\nimport orbax\nfrom orbax.checkpoint import PyTreeCheckpointer\nimport numpy as np\nimport dm_pix as pix\nimport jax\nimport jax.numpy as jnp\nimport tyro\nimport wandb\n\nfrom models.tokenizer import TokenizerVQVAE\nfrom utils.dataloader import get_dataloader\n\nts = int(time.time())\n\n\n@dataclass\nclass Args:\n # Experiment\n num_steps: int = 300_000\n seed: int = 0\n seq_len: int = 16\n image_channels: int = 3\n image_height: int = 90\n image_width: int = 160\n data_dir: str = ""data_tfrecords/coinrun""\n checkpoint: str = """"\n # Optimization\n vq_beta: float = 0.25\n batch_size: int = 48\n min_lr: float = 3e-4\n max_lr: float = 3e-4\n warmup_steps: int = 10000\n # Tokenizer\n model_dim: int = 512\n latent_dim: int = 32\n num_latents: int = 1024\n patch_size: int = 4\n num_blocks: int = 8\n num_heads: int = 8\n dropout: float = 0.0\n codebook_dropout: float = 0.01\n # Logging\n log: bool = False\n entity: str = """"\n project: str = """"\n name: str = ""train_tokenizer""\n tags: list = field(default_factory=lambda: [""tokenizer""])\n log_interval: int = 5\n log_image_interval: int = 250\n ckpt_dir: str = """"\n log_checkpoint_interval: int = 10000\n log_gradients: bool = False\n\n\nargs = tyro.cli(Args)\n\n\ndef tokenizer_loss_fn(params, state, inputs):\n # --- Compute loss ---\n outputs = state.apply_fn(\n params,\n inputs,\n training=True,\n rngs={""params"": inputs[""rng""], ""dropout"": inputs[""dropout_rng""]},\n )\n mse = jnp.square(inputs[""videos""] - outputs[""recon""]).mean()\n q_loss = jnp.square(jax.lax.stop_gradient(outputs[""emb""]) - outputs[""z""]).mean()\n commitment_loss = jnp.square(\n outputs[""emb""] - jax.lax.stop_gradient(outputs[""z""])\n ).mean()\n loss = mse + q_loss + args.vq_beta * commitment_loss\n\n # --- Compute validation metrics ---\n gt = inputs[""videos""].clip(0, 1).reshape(-1, *inputs[""videos""].shape[2:])\n recon = outputs[""recon""].clip(0, 1).reshape(-1, *outputs[""recon""].shape[2:])\n psnr = pix.psnr(gt, recon).mean()\n ssim = pix.ssim(gt, recon).mean()\n _, index_counts = jnp.unique_counts(\n jnp.ravel(outputs[""indices""]), size=args.num_latents, fill_value=0\n )\n codebook_usage = (index_counts != 0).mean()\n metrics = dict(\n loss=loss,\n mse=mse,\n q_loss=q_loss,\n commitment_loss=commitment_loss,\n psnr=psnr,\n ssim=ssim,\n codebook_usage=codebook_usage,\n )\n return loss, (outputs[""recon""], metrics)\n\n\n@jax.jit\ndef train_step(state, inputs):\n grad_fn = jax.value_and_grad(tokenizer_loss_fn, has_aux=True, allow_int=True)\n (loss, (recon, metrics)), grads = grad_fn(state.params, state, inputs)\n state = state.apply_gradients(grads=grads)\n if args.log_gradients:\n metrics[""encoder_gradients_std/""] = jax.tree.map(\n lambda x: x.std(), grads[""params""][""encoder""]\n )\n metrics[""vq_gradients_std/""] = jax.tree.map(\n lambda x: x.std(), grads[""params""][""vq""]\n )\n metrics[""decoder_gradients_std/""] = jax.tree.map(\n lambda x: x.std(), grads[""params""][""decoder""]\n )\n return state, loss, recon, metrics\n\n\nif __name__ == ""__main__"":\n jax.distributed.initialize()\n num_devices = jax.device_count()\n if num_devices == 0:\n raise ValueError(""No JAX devices found."")\n print(f""Running on {num_devices} devices."")\n\n if args.batch_size % num_devices != 0:\n raise ValueError(\n f""Global batch size {args.batch_size} must be divisible by ""\n f""number of devices {num_devices}.""\n )\n\n per_device_batch_size_for_init = args.batch_size // num_devices\n\n rng = jax.random.PRNGKey(args.seed)\n if args.log and jax.process_index() == 0:\n wandb.init(\n entity=args.entity,\n project=args.project,\n name=args.name,\n tags=args.tags,\n group=""debug"",\n config=args\n )\n\n # --- Initialize model ---\n tokenizer = TokenizerVQVAE(\n in_dim=args.image_channels,\n model_dim=args.model_dim,\n latent_dim=args.latent_dim,\n num_latents=args.num_latents,\n patch_size=args.patch_size,\n num_blocks=args.num_blocks,\n num_heads=args.num_heads,\n dropout=args.dropout,\n codebook_dropout=args.codebook_dropout,\n )\n rng, _rng = jax.random.split(rng)\n image_shape = (args.image_height, args.image_width, args.image_channels)\n inputs = dict(\n videos=jnp.zeros(\n (per_device_batch_size_for_init, args.seq_len, *image_shape),\n dtype=jnp.float32,\n ),\n )\n init_params = tokenizer.init(_rng, inputs)\n\n # --- Initialize optimizer ---\n lr_schedule = optax.warmup_cosine_decay_schedule(\n args.min_lr, args.max_lr, args.warmup_steps, args.num_steps\n )\n tx = optax.adamw(learning_rate=lr_schedule, b1=0.9, b2=0.9, weight_decay=1e-4)\n train_state = TrainState.create(apply_fn=tokenizer.apply, params=init_params, tx=tx)\n\n # FIXME: switch to create_hybrid_device_mesh for runs spanning multiple nodes\n device_mesh_arr = create_device_mesh((num_devices,))\n mesh = Mesh(devices=device_mesh_arr, axis_names=(""data"",))\n\n replicated_sharding = NamedSharding(mesh, PartitionSpec())\n train_state = jax.device_put(train_state, replicated_sharding)\n\n # --- Load checkpoint ---\n step = 0\n if args.checkpoint:\n restore_target = {""model"": train_state}\n restore_args = orbax_utils.restore_args_from_target(restore_target)\n train_state.params[""params""].update(\n PyTreeCheckpointer()\n .restore(args.checkpoint, item=restore_target, restore_args=restore_args)[\n ""model""\n ]\n .params[""params""]\n )\n # Assume checkpoint is of the form tokenizer__\n step += int(args.checkpoint.split(""_"")[-1])\n\n # --- TRAIN LOOP ---\n tfrecord_files = [\n os.path.join(args.data_dir, x)\n for x in os.listdir(args.data_dir)\n if x.endswith("".tfrecord"")\n ]\n dataloader = get_dataloader(\n # NOTE: We deliberately pass the global batch size\n # The dataloader shards the dataset across all processes\n tfrecord_files,\n args.seq_len,\n args.batch_size,\n *image_shape,\n )\n print(f""Starting training from step {step}..."")\n while step < args.num_steps:\n # for videos in dataloader:\n # npy_path = ""overfit_dir/single_sample_corner.npy""\n npy_path = ""overfit_dir/single_batch_12_elems.npy""\n videos = np.load(npy_path)\n print(""batch shape: "", videos.shape)\n while(True):\n # --- Train step ---\n rng, _rng, _rng_dropout = jax.random.split(rng, 3)\n\n videos_sharding = NamedSharding(\n mesh, PartitionSpec(""data"", None, None, None, None)\n )\n videos = jax.make_array_from_process_local_data(videos_sharding, videos)\n\n inputs = dict(videos=videos, rng=_rng, dropout_rng=_rng_dropout)\n train_state, loss, recon, metrics = train_step(train_state, inputs)\n print(f""Step {step}, loss: {loss}"")\n step += 1\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n wandb.log(\n {\n ""loss"": loss,\n ""step"": step,\n **metrics,\n }\n )\n if step % args.log_image_interval == 0:\n gt_seq = inputs[""videos""][0]\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)\n if step % args.log_checkpoint_interval == 0:\n ckpt = {""model"": train_state}\n orbax_checkpointer = orbax.checkpoint.PyTreeCheckpointer()\n save_args = orbax_utils.save_args_from_target(ckpt)\n orbax_checkpointer.save(\n os.path.join(os.getcwd(), args.ckpt_dir, f""tokenizer_{ts}_{step}""),\n ckpt,\n save_args=save_args,\n )\n if step >= args.num_steps:\n break\n",python,tab +115,1393671,"TERMINAL",0,0,"2025-07-01 18:48:59.112086: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +116,1397139,"TERMINAL",0,0,"2025-07-01 18:49:02.387734: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +117,1401429,"train_tokenizer.py",6755,0,"",python,selection_mouse +118,1401546,"train_tokenizer.py",6754,3,"for",python,selection_mouse +119,1401713,"train_tokenizer.py",6744,36," # for videos in dataloader:\n",python,selection_mouse +120,1403010,"train_tokenizer.py",6755,0,"",python,selection_mouse +121,1403716,"train_tokenizer.py",6751,0,"",python,selection_mouse +122,1403847,"train_tokenizer.py",6744,8," ",python,selection_mouse +123,1404100,"train_tokenizer.py",6744,9," #",python,selection_mouse +124,1404101,"train_tokenizer.py",6744,13," # for",python,selection_mouse +125,1404175,"train_tokenizer.py",6744,20," # for videos",python,selection_mouse +126,1404270,"train_tokenizer.py",6744,21," # for videos ",python,selection_mouse +127,1404270,"train_tokenizer.py",6744,23," # for videos in",python,selection_mouse +128,1404331,"train_tokenizer.py",6744,24," # for videos in ",python,selection_mouse +129,1404332,"train_tokenizer.py",6744,34," # for videos in dataloader",python,selection_mouse +130,1404983,"train_tokenizer.py",6777,0,"",python,selection_mouse +131,1409755,"train_tokenizer.py",6874,0,"",python,selection_mouse +132,1410536,"train_tokenizer.py",6813,0,"",python,selection_mouse +133,1412760,"TERMINAL",0,0,"bash",,terminal_focus +134,1413804,"TERMINAL",0,0,"queue",,terminal_command +135,1413877,"TERMINAL",0,0,"]633;E;2025-07-01 18:49:19 queue;ea2a9ac7-e5a0-488a-9623-9429487e0dac]633;C[?1049h(B[?7hEvery 1.0s: squeue --mehkn1993.localdomain: Tue Jul 1 18:49:19 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3309699 accelerat train_dy tum_cte0 R 1:08:52\t 1 hkn07123309821 accelerat interact tum_cte0 R\t8:36\t 1 hkn07063309662 accelerat train_to tum_cte0 R 1:59:06\t 1 hkn06323309663 accelerat train_la tum_cte0 R 1:59:06\t 1 hkn0632",,terminal_output +136,1414901,"TERMINAL",0,0,"203777",,terminal_output +137,1415944,"TERMINAL",0,0,"14888",,terminal_output +138,1417025,"TERMINAL",0,0,"25999",,terminal_output +139,1417432,"TERMINAL",0,0,"====================================================================================================\r\nFrame 7\r\n====================================================================================================\r\n",,terminal_output +140,1418064,"TERMINAL",0,0,"Every 1.0s: squeue --mehkn1993.localdomain: Tue Jul 1 18:49:23 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3309699 accelerat train_dy tum_cte0 R 1:08:56\t 1 hkn07123309821 accelerat interact tum_cte0 R\t8:40\t 1 hkn07063309662 accelerat train_to tum_cte0 R 1:59:10\t 1 hkn06323309663 accelerat train_la tum_cte0 R 1:59:10\t 1 hkn0632",,terminal_output +141,1418064,"TERMINAL",0,0,"2025-07-01 18:49:23.466175: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +142,1419463,"TERMINAL",0,0,"Every 1.0s: squeue --mehkn1993.localdomain: Tue Jul 1 18:49:24 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3309699 accelerat train_dy tum_cte0 R 1:08:57\t 1 hkn07123309821 accelerat interact tum_cte0 R\t8:41\t 1 hkn07063309662 accelerat train_to tum_cte0 R 1:59:11\t 1 hkn06323309663 accelerat train_la tum_cte0 R 1:59:11\t 1 hkn0632",,terminal_output +143,1419581,"TERMINAL",0,0,"Every 1.0s: squeue --mehkn1993.localdomain: Tue Jul 1 18:49:24 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3309699 accelerat train_dy tum_cte0 R 1:08:58\t 1 hkn07123309821 accelerat interact tum_cte0 R\t8:42\t 1 hkn07063309662 accelerat train_to tum_cte0 R 1:59:12\t 1 hkn06323309663 accelerat train_la tum_cte0 R 1:59:12\t 1 hkn0632",,terminal_output +144,1419658,"TERMINAL",0,0,"Every 1.0s: squeue --mehkn1993.localdomain: Tue Jul 1 18:49:25 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3309699 accelerat train_dy tum_cte0 R 1:08:58\t 1 hkn07123309821 accelerat interact tum_cte0 R\t8:42\t 1 hkn07063309662 accelerat train_to tum_cte0 R 1:59:12\t 1 hkn06323309663 accelerat train_la tum_cte0 R 1:59:12\t 1 hkn0632",,terminal_output +145,1419918,"TERMINAL",0,0,"Every 1.0s: squeue --mehkn1993.localdomain: Tue Jul 1 18:49:25 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3309699 accelerat train_dy tum_cte0 R 1:08:58\t 1 hkn07123309821 accelerat interact tum_cte0 R\t8:42\t 1 hkn07063309662 accelerat train_to tum_cte0 R 1:59:12\t 1 hkn06323309663 accelerat train_la tum_cte0 R 1:59:12\t 1 hkn0632",,terminal_output +146,1420413,"TERMINAL",0,0,"Every 1.0s: squeue --mehkn1993.localdomain: Tue Jul 1 18:49:25 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3309699 accelerat train_dy tum_cte0 R 1:08:58\t 1 hkn07123309821 accelerat interact tum_cte0 R\t8:42\t 1 hkn07063309662 accelerat train_to tum_cte0 R 1:59:12\t 1 hkn06323309663 accelerat train_la tum_cte0 R 1:59:12\t 1 hkn0632",,terminal_output +147,1420505,"TERMINAL",0,0,"Every 1.0s: squeue --mehkn1993.localdomain: Tue Jul 1 18:49:25 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3309699 accelerat train_dy tum_cte0 R 1:08:58\t 1 hkn07123309821 accelerat interact tum_cte0 R\t8:42\t 1 hkn07063309662 accelerat train_to tum_cte0 R 1:59:12\t 1 hkn06323309663 accelerat train_la tum_cte0 R 1:59:12\t 1 hkn0632",,terminal_output +148,1421283,"TERMINAL",0,0,"Every 1.0s: squeue --mehkn1993.localdomain: Tue Jul 1 18:49:26 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3309699 accelerat train_dy tum_cte0 R 1:08:59\t 1 hkn07123309821 accelerat interact tum_cte0 R\t8:43\t 1 hkn07063309662 accelerat train_to tum_cte0 R 1:59:13\t 1 hkn06323309663 accelerat train_la tum_cte0 R 1:59:13\t 1 hkn0632",,terminal_output +149,1422341,"TERMINAL",0,0,"Every 1.0s: squeue --mehkn1993.localdomain: Tue Jul 1 18:49:27 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3309699 accelerat train_dy tum_cte0 R 1:09:00\t 1 hkn07123309821 accelerat interact tum_cte0 R\t8:44\t 1 hkn07063309662 accelerat train_to tum_cte0 R 1:59:14\t 1 hkn06323309663 accelerat train_la tum_cte0 R 1:59:14\t 1 hkn0632",,terminal_output +150,1422379,"TERMINAL",0,0,"Every 1.0s: squeue --mehkn1993.localdomain: Tue Jul 1 18:49:27 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3309699 accelerat train_dy tum_cte0 R 1:09:00\t 1 hkn07123309821 accelerat interact tum_cte0 R\t8:44\t 1 hkn07063309662 accelerat train_to tum_cte0 R 1:59:14\t 1 hkn06323309663 accelerat train_la tum_cte0 R 1:59:14\t 1 hkn0632",,terminal_output +151,1422453,"TERMINAL",0,0,"Every 1.0s: squeue --mehkn1993.localdomain: Tue Jul 1 18:49:27 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3309699 accelerat train_dy tum_cte0 R 1:09:00\t 1 hkn07123309821 accelerat interact tum_cte0 R\t8:44\t 1 hkn07063309662 accelerat train_to tum_cte0 R 1:59:14\t 1 hkn06323309663 accelerat train_la tum_cte0 R 1:59:14\t 1 hkn0632",,terminal_output +152,1422564,"TERMINAL",0,0,"Every 1.0s: squeue --mehkn1993.localdomain: Tue Jul 1 18:49:27 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3309699 accelerat train_dy tum_cte0 R 1:09:00\t 1 hkn07123309821 accelerat interact tum_cte0 R\t8:44\t 1 hkn07063309662 accelerat train_to tum_cte0 R 1:59:14\t 1 hkn06323309663 accelerat train_la tum_cte0 R 1:59:14\t 1 hkn0632",,terminal_output +153,1423214,"TERMINAL",0,0,"Every 1.0s: squeue --mehkn1993.localdomain: Tue Jul 1 18:49:28 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3309699 accelerat train_dy tum_cte0 R 1:09:01\t 1 hkn07123309821 accelerat interact tum_cte0 R\t8:45\t 1 hkn07063309662 accelerat train_to tum_cte0 R 1:59:15\t 1 hkn06323309663 accelerat train_la tum_cte0 R 1:59:15\t 1 hkn0632",,terminal_output +154,1423351,"TERMINAL",0,0,"Every 1.0s: squeue --mehkn1993.localdomain: Tue Jul 1 18:49:28 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3309699 accelerat train_dy tum_cte0 R 1:09:01\t 1 hkn07123309821 accelerat interact tum_cte0 R\t8:45\t 1 hkn07063309662 accelerat train_to tum_cte0 R 1:59:15\t 1 hkn06323309663 accelerat train_la tum_cte0 R 1:59:15\t 1 hkn0632",,terminal_output +155,1424371,"TERMINAL",0,0,"92666",,terminal_output +156,1425510,"TERMINAL",0,0,"303777",,terminal_output +157,1425931,"TERMINAL",0,0,"Every 1.0s: squeue --mehkn1993.localdomain: Tue Jul 1 18:49:31 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3309699 accelerat train_dy tum_cte0 R 1:09:04\t 1 hkn07123309821 accelerat interact tum_cte0 R\t8:48\t 1 hkn07063309662 accelerat train_to tum_cte0 R 1:59:18\t 1 hkn06323309663 accelerat train_la tum_cte0 R 1:59:18\t 1 hkn0632",,terminal_output +158,1427063,"TERMINAL",0,0,"Every 1.0s: squeue --mehkn1993.localdomain: Tue Jul 1 18:49:32 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3309699 accelerat train_dy tum_cte0 R 1:09:05\t 1 hkn07123309821 accelerat interact tum_cte0 R\t8:49\t 1 hkn07063309662 accelerat train_to tum_cte0 R 1:59:19\t 1 hkn06323309663 accelerat train_la tum_cte0 R 1:59:19\t 1 hkn0632",,terminal_output +159,1428016,"TERMINAL",0,0,"36502020",,terminal_output +160,1429074,"TERMINAL",0,0,"47111",,terminal_output +161,1430123,"TERMINAL",0,0,"58222",,terminal_output +162,1431168,"TERMINAL",0,0,"69333",,terminal_output +163,1432213,"TERMINAL",0,0,"710444",,terminal_output +164,1433196,"TERMINAL",0,0,"2025-07-01 18:49:38.613364: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +165,1433249,"TERMINAL",0,0,"81555",,terminal_output +166,1433491,"train_tokenizer.py",6898,0,"",python,selection_mouse +167,1433520,"train_tokenizer.py",6897,0,"",python,selection_command +168,1433875,"train_tokenizer.py",6893,4,".npy",python,selection_mouse +169,1433876,"train_tokenizer.py",6897,36,"""\n videos = np.load(npy_path)",python,selection_mouse +170,1433876,"train_tokenizer.py",6897,34,"""\n videos = np.load(npy_pat",python,selection_mouse +171,1433876,"train_tokenizer.py",6858,39," ""overfit_dir/single_batch_12_elems.npy",python,selection_mouse +172,1433877,"train_tokenizer.py",6788,109,"# npy_path = ""overfit_dir/single_sample_corner.npy""\n npy_path = ""overfit_dir/single_batch_12_elems.npy",python,selection_mouse +173,1433877,"train_tokenizer.py",6747,150," # for videos in dataloader:\n # npy_path = ""overfit_dir/single_sample_corner.npy""\n npy_path = ""overfit_dir/single_batch_12_elems.npy",python,selection_mouse +174,1433877,"train_tokenizer.py",6744,153," # for videos in dataloader:\n # npy_path = ""overfit_dir/single_sample_corner.npy""\n npy_path = ""overfit_dir/single_batch_12_elems.npy",python,selection_mouse +175,1433900,"train_tokenizer.py",6893,5,".npy""",python,selection_command +176,1433901,"train_tokenizer.py",6711,187," while step < args.num_steps:\n # for videos in dataloader:\n # npy_path = ""overfit_dir/single_sample_corner.npy""\n npy_path = ""overfit_dir/single_batch_12_elems.npy""",python,selection_mouse +177,1434120,"train_tokenizer.py",6711,33," while step < args.num_steps:\n",python,selection_mouse +178,1434308,"TERMINAL",0,0,"92666",,terminal_output +179,1434987,"train_tokenizer.py",6750,0,"",python,selection_mouse +180,1435143,"train_tokenizer.py",6744,8," ",python,selection_mouse +181,1435347,"train_tokenizer.py",6744,54," # for videos in dataloader:\n # npy_path",python,selection_mouse +182,1435438,"train_tokenizer.py",6744,127," # for videos in dataloader:\n # npy_path = ""overfit_dir/single_sample_corner.npy""\n npy_path = ""overfit_dir",python,selection_mouse +183,1435438,"train_tokenizer.py",6744,188," # for videos in dataloader:\n # npy_path = ""overfit_dir/single_sample_corner.npy""\n npy_path = ""overfit_dir/single_batch_12_elems.npy""\n videos = np.load(npy_path",python,selection_mouse +184,1435439,"train_tokenizer.py",6744,189," # for videos in dataloader:\n # npy_path = ""overfit_dir/single_sample_corner.npy""\n npy_path = ""overfit_dir/single_batch_12_elems.npy""\n videos = np.load(npy_path)",python,selection_mouse +185,1435439,"train_tokenizer.py",6744,233," # for videos in dataloader:\n # npy_path = ""overfit_dir/single_sample_corner.npy""\n npy_path = ""overfit_dir/single_batch_12_elems.npy""\n videos = np.load(npy_path)\n print(""batch shape: "", videos.shape",python,selection_mouse +186,1435455,"TERMINAL",0,0,"403777",,terminal_output +187,1435479,"train_tokenizer.py",6744,234," # for videos in dataloader:\n # npy_path = ""overfit_dir/single_sample_corner.npy""\n npy_path = ""overfit_dir/single_batch_12_elems.npy""\n videos = np.load(npy_path)\n print(""batch shape: "", videos.shape)",python,selection_mouse +188,1435863,"train_tokenizer.py",6744,189," # for videos in dataloader:\n # npy_path = ""overfit_dir/single_sample_corner.npy""\n npy_path = ""overfit_dir/single_batch_12_elems.npy""\n videos = np.load(npy_path)",python,selection_mouse +189,1436081,"train_tokenizer.py",6933,0,"",python,selection_mouse +190,1436099,"train_tokenizer.py",6932,0,"",python,selection_command +191,1436418,"TERMINAL",0,0,"14888",,terminal_output +192,1436535,"train_tokenizer.py",6933,0,"",python,selection_mouse +193,1436542,"train_tokenizer.py",6932,0,"",python,selection_command +194,1436691,"train_tokenizer.py",6932,1,")",python,selection_mouse +195,1436885,"train_tokenizer.py",6933,0,"",python,selection_command +196,1436886,"train_tokenizer.py",6931,2,"h)",python,selection_mouse +197,1436886,"train_tokenizer.py",6920,13,"oad(npy_path)",python,selection_mouse +198,1436886,"train_tokenizer.py",6856,77," = ""overfit_dir/single_batch_12_elems.npy""\n videos = np.load(npy_path)",python,selection_mouse +199,1436887,"train_tokenizer.py",6851,82,"_path = ""overfit_dir/single_batch_12_elems.npy""\n videos = np.load(npy_path)",python,selection_mouse +200,1436887,"train_tokenizer.py",6848,85,"npy_path = ""overfit_dir/single_batch_12_elems.npy""\n videos = np.load(npy_path)",python,selection_mouse +201,1436887,"train_tokenizer.py",6846,87," npy_path = ""overfit_dir/single_batch_12_elems.npy""\n videos = np.load(npy_path)",python,selection_mouse +202,1436887,"train_tokenizer.py",6784,149," # npy_path = ""overfit_dir/single_sample_corner.npy""\n npy_path = ""overfit_dir/single_batch_12_elems.npy""\n videos = np.load(npy_path)",python,selection_mouse +203,1436887,"train_tokenizer.py",6783,150," # npy_path = ""overfit_dir/single_sample_corner.npy""\n npy_path = ""overfit_dir/single_batch_12_elems.npy""\n videos = np.load(npy_path)",python,selection_mouse +204,1436937,"train_tokenizer.py",6782,151," # npy_path = ""overfit_dir/single_sample_corner.npy""\n npy_path = ""overfit_dir/single_batch_12_elems.npy""\n videos = np.load(npy_path)",python,selection_mouse +205,1436966,"train_tokenizer.py",6781,152," # npy_path = ""overfit_dir/single_sample_corner.npy""\n npy_path = ""overfit_dir/single_batch_12_elems.npy""\n videos = np.load(npy_path)",python,selection_mouse +206,1436998,"train_tokenizer.py",6780,153," # npy_path = ""overfit_dir/single_sample_corner.npy""\n npy_path = ""overfit_dir/single_batch_12_elems.npy""\n videos = np.load(npy_path)",python,selection_mouse +207,1437026,"train_tokenizer.py",6744,189," # for videos in dataloader:\n # npy_path = ""overfit_dir/single_sample_corner.npy""\n npy_path = ""overfit_dir/single_batch_12_elems.npy""\n videos = np.load(npy_path)",python,selection_mouse +208,1437497,"TERMINAL",0,0,"25999",,terminal_output +209,1438229,"train_tokenizer.py",6854,0,"",python,selection_mouse +210,1438521,"TERMINAL",0,0,"369:003030",,terminal_output +211,1439555,"TERMINAL",0,0,"47111",,terminal_output +212,1440680,"TERMINAL",0,0,"59333",,terminal_output +213,1441625,"TERMINAL",0,0,"720444",,terminal_output +214,1442699,"train_tokenizer.py",6864,0,"",python,selection_mouse +215,1442710,"TERMINAL",0,0,"81555",,terminal_output +216,1443718,"TERMINAL",0,0,"92666",,terminal_output +217,1443956,"train_tokenizer.py",6840,58," npy_path = ""overfit_dir/single_batch_12_elems.npy""",python,selection_command +218,1444141,"train_tokenizer.py",6840,93," npy_path = ""overfit_dir/single_batch_12_elems.npy""\n videos = np.load(npy_path)",python,selection_command +219,1444289,"train_tokenizer.py",6840,138," npy_path = ""overfit_dir/single_batch_12_elems.npy""\n videos = np.load(npy_path)\n print(""batch shape: "", videos.shape)",python,selection_command +220,1444730,"train_tokenizer.py",6848,0,"",python,selection_command +221,1444772,"TERMINAL",0,0,"503777",,terminal_output +222,1445596,"train_tokenizer.py",6942,0,"#",python,content +223,1445597,"train_tokenizer.py",6907,0,"#",python,content +224,1445597,"train_tokenizer.py",6848,0,"#",python,content +225,1445601,"train_tokenizer.py",6849,0,"",python,selection_keyboard +226,1445684,"train_tokenizer.py",6945,0," ",python,content +227,1445684,"train_tokenizer.py",6909,0," ",python,content +228,1445684,"train_tokenizer.py",6849,0," ",python,content +229,1445686,"train_tokenizer.py",6850,0,"",python,selection_keyboard +230,1445823,"TERMINAL",0,0,"14888",,terminal_output +231,1446076,"train_tokenizer.py",6849,0,"",python,selection_command +232,1446223,"train_tokenizer.py",6789,0,"",python,selection_command +233,1446307,"train_tokenizer.py",6753,0,"",python,selection_command +234,1446684,"train_tokenizer.py",6752,0,"",python,selection_command +235,1446960,"train_tokenizer.py",6752,1,"",python,content +236,1446962,"TERMINAL",0,0,"25999",,terminal_output +237,1447304,"train_tokenizer.py",6752,1,"",python,content +238,1447801,"train_tokenizer.py",6786,0,"",python,selection_command +239,1447905,"TERMINAL",0,0,"36104040",,terminal_output +240,1447946,"train_tokenizer.py",6846,0,"",python,selection_command +241,1448094,"train_tokenizer.py",6907,0,"",python,selection_command +242,1448252,"train_tokenizer.py",6944,0,"",python,selection_command +243,1448409,"train_tokenizer.py",6991,0,"",python,selection_command +244,1448975,"TERMINAL",0,0,"47111",,terminal_output +245,1449809,"train_tokenizer.py",6991,0,"#",python,content +246,1449811,"train_tokenizer.py",6992,0,"",python,selection_keyboard +247,1449837,"train_tokenizer.py",6992,0," ",python,content +248,1449838,"train_tokenizer.py",6993,0,"",python,selection_keyboard +249,1449976,"TERMINAL",0,0,"58222",,terminal_output +250,1450220,"train_tokenizer.py",6992,0,"",python,selection_command +251,1450879,"train_tokenizer.py",6945,0,"",python,selection_command +252,1451024,"TERMINAL",0,0,"69333",,terminal_output +253,1451048,"train_tokenizer.py",6908,0,"",python,selection_command +254,1451364,"train_tokenizer.py",6847,0,"",python,selection_command +255,1451651,"train_tokenizer.py",6908,0,"",python,selection_command +256,1451881,"train_tokenizer.py",6945,0,"",python,selection_command +257,1452075,"train_tokenizer.py",6908,0,"",python,selection_command +258,1452091,"TERMINAL",0,0,"730444",,terminal_output +259,1452195,"train_tokenizer.py",6847,0,"",python,selection_command +260,1452325,"train_tokenizer.py",6787,0,"",python,selection_command +261,1452832,"train_tokenizer.py",6753,0,"",python,selection_command +262,1453109,"TERMINAL",0,0,"81555",,terminal_output +263,1453946,"train_tokenizer.py",6759,0,"",python,selection_mouse +264,1454092,"train_tokenizer.py",6756,6,"videos",python,selection_mouse +265,1454158,"TERMINAL",0,0,"92666",,terminal_output +266,1454933,"train_tokenizer.py",7004,0,"",python,selection_mouse +267,1455212,"TERMINAL",0,0,"50:003777",,terminal_output +268,1455431,"train_tokenizer.py",7035,0,"",python,selection_mouse +269,1455746,"TERMINAL",0,0,"====================================================================================================\r\nFrame 8\r\n====================================================================================================\r\n",,terminal_output +270,1455967,"train_tokenizer.py",7038,0,"",python,selection_mouse +271,1455983,"train_tokenizer.py",7037,0,"",python,selection_command +272,1456268,"TERMINAL",0,0,"14888",,terminal_output +273,1456403,"TERMINAL",0,0,"2025-07-01 18:50:01.840178: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +274,1456753,"train_tokenizer.py",7038,0,"\n ",python,content +275,1457334,"TERMINAL",0,0,"25999",,terminal_output +276,1458362,"TERMINAL",0,0,"36205050",,terminal_output +277,1459399,"TERMINAL",0,0,"47111",,terminal_output +278,1460444,"TERMINAL",0,0,"58222",,terminal_output +279,1461577,"TERMINAL",0,0,"69333",,terminal_output +280,1462534,"TERMINAL",0,0,"740444",,terminal_output +281,1463574,"TERMINAL",0,0,"82666",,terminal_output +282,1464634,"TERMINAL",0,0,"103777",,terminal_output +283,1465673,"TERMINAL",0,0,"14888",,terminal_output +284,1466725,"TERMINAL",0,0,"25999",,terminal_output +285,1467761,"TERMINAL",0,0,"36302:00:002:00:00",,terminal_output +286,1468889,"TERMINAL",0,0,"47111",,terminal_output +287,1469905,"TERMINAL",0,0,"58222",,terminal_output +288,1470843,"train_tokenizer.py",7039,0,"",python,selection_command +289,1470903,"TERMINAL",0,0,"69333",,terminal_output +290,1471135,"TERMINAL",0,0,"2025-07-01 18:50:16.575173: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +291,1471951,"TERMINAL",0,0,"750444",,terminal_output +292,1472720,"train_tokenizer.py",7039,0," np.save(f""train_tokenizer_batch_{step}.npy"", np.array(videos))\n",python,content +293,1472725,"train_tokenizer.py",7114,13,"",python,content +294,1473093,"TERMINAL",0,0,"81555",,terminal_output +295,1474053,"TERMINAL",0,0,"92666",,terminal_output +296,1475091,"TERMINAL",0,0,"203777",,terminal_output +297,1475458,"train_tokenizer.py",7087,0,"",python,selection_mouse +298,1475621,"train_tokenizer.py",7084,4,"step",python,selection_mouse +299,1476210,"TERMINAL",0,0,"14888",,terminal_output +300,1477224,"TERMINAL",0,0,"25999",,terminal_output +301,1478293,"TERMINAL",0,0,"36401010",,terminal_output +302,1479270,"TERMINAL",0,0,"47111",,terminal_output +303,1479443,"train_tokenizer.py",7073,0,"",python,selection_mouse +304,1479570,"train_tokenizer.py",7061,22,"train_tokenizer_batch_",python,selection_mouse +305,1480275,"train_tokenizer.py",7082,0,"",python,selection_mouse +306,1480375,"TERMINAL",0,0,"58222",,terminal_output +307,1480707,"train_tokenizer.py",7067,0,"",python,selection_mouse +308,1481427,"TERMINAL",0,0,"69333",,terminal_output +309,1481652,"train_tokenizer.py",7152,0,"",python,selection_mouse +310,1482451,"TERMINAL",0,0,"710:00444",,terminal_output +311,1482678,"train_tokenizer.py",7077,0,"",python,selection_mouse +312,1483578,"TERMINAL",0,0,"81555",,terminal_output +313,1484018,"train_tokenizer.py",7077,1,"",python,content +314,1484154,"train_tokenizer.py",7077,1,"",python,content +315,1484274,"train_tokenizer.py",7077,1,"",python,content +316,1484403,"train_tokenizer.py",7077,1,"",python,content +317,1484523,"TERMINAL",0,0,"92666",,terminal_output +318,1484572,"train_tokenizer.py",7077,1,"",python,content +319,1484806,"train_tokenizer.py",7077,0,"s",python,content +320,1484807,"train_tokenizer.py",7078,0,"",python,selection_keyboard +321,1484956,"train_tokenizer.py",7078,0,"a",python,content +322,1484957,"train_tokenizer.py",7079,0,"",python,selection_keyboard +323,1485309,"train_tokenizer.py",7079,0,"m",python,content +324,1485311,"train_tokenizer.py",7080,0,"",python,selection_keyboard +325,1485458,"train_tokenizer.py",7080,0,"p",python,content +326,1485460,"train_tokenizer.py",7081,0,"",python,selection_keyboard +327,1485514,"train_tokenizer.py",7081,0,"l",python,content +328,1485515,"train_tokenizer.py",7082,0,"",python,selection_keyboard +329,1485628,"train_tokenizer.py",7082,0,"e",python,content +330,1485630,"train_tokenizer.py",7083,0,"",python,selection_keyboard +331,1485630,"TERMINAL",0,0,"304888",,terminal_output +332,1486598,"train_tokenizer.py",7092,0,"",python,selection_mouse +333,1486630,"TERMINAL",0,0,"25999",,terminal_output +334,1487673,"TERMINAL",0,0,"36502020",,terminal_output +335,1488532,"train_tokenizer.py",7075,0,"",python,selection_mouse +336,1488686,"TERMINAL",0,0,"47111",,terminal_output +337,1489727,"TERMINAL",0,0,"58222",,terminal_output +338,1490771,"TERMINAL",0,0,"69333",,terminal_output +339,1491873,"TERMINAL",0,0,"710444",,terminal_output +340,1492898,"TERMINAL",0,0,"81555",,terminal_output +341,1493101,"train_tokenizer.py",7143,0,"",python,selection_mouse +342,1493881,"train_tokenizer.py",7493,0,"",python,selection_mouse +343,1493935,"TERMINAL",0,0,"92666",,terminal_output +344,1495052,"TERMINAL",0,0,"403777",,terminal_output +345,1495360,"TERMINAL",0,0,"====================================================================================================\r\nFrame 9\r\n====================================================================================================\r\n",,terminal_output +346,1496003,"TERMINAL",0,0,"14888",,terminal_output +347,1496017,"TERMINAL",0,0,"2025-07-01 18:50:41.455679: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +348,1496401,"train_tokenizer.py",7492,0,"",python,selection_command +349,1497096,"TERMINAL",0,0,"25999",,terminal_output +350,1497742,"train_tokenizer.py",7493,0,"",python,selection_command +351,1498119,"TERMINAL",0,0,"3610:003030",,terminal_output +352,1498227,"train_tokenizer.py",7494,0,"",python,selection_command +353,1498260,"train_tokenizer.py",7495,0,"",python,selection_command +354,1498294,"train_tokenizer.py",7496,0,"",python,selection_command +355,1498319,"train_tokenizer.py",7497,0,"",python,selection_command +356,1498360,"train_tokenizer.py",7498,0,"",python,selection_command +357,1498425,"train_tokenizer.py",7499,0,"",python,selection_command +358,1498567,"train_tokenizer.py",7500,0,"",python,selection_command +359,1498568,"train_tokenizer.py",7501,0,"",python,selection_command +360,1498626,"train_tokenizer.py",7502,0,"",python,selection_command +361,1498627,"train_tokenizer.py",7503,0,"",python,selection_command +362,1498628,"train_tokenizer.py",7504,0,"",python,selection_command +363,1498659,"train_tokenizer.py",7505,0,"",python,selection_command +364,1498660,"train_tokenizer.py",7506,0,"",python,selection_command +365,1498661,"train_tokenizer.py",7507,0,"",python,selection_command +366,1498689,"train_tokenizer.py",7508,0,"",python,selection_command +367,1498728,"train_tokenizer.py",7509,0,"",python,selection_command +368,1498743,"train_tokenizer.py",7510,0,"",python,selection_command +369,1498754,"train_tokenizer.py",7511,0,"",python,selection_command +370,1498791,"train_tokenizer.py",7512,0,"",python,selection_command +371,1498840,"train_tokenizer.py",7513,0,"",python,selection_command +372,1498872,"train_tokenizer.py",7514,0,"",python,selection_command +373,1498890,"train_tokenizer.py",7515,0,"",python,selection_command +374,1498906,"train_tokenizer.py",7516,0,"",python,selection_command +375,1498963,"train_tokenizer.py",7517,0,"",python,selection_command +376,1499137,"TERMINAL",0,0,"47111",,terminal_output +377,1500272,"TERMINAL",0,0,"58222",,terminal_output +378,1501234,"TERMINAL",0,0,"69333",,terminal_output +379,1502324,"TERMINAL",0,0,"720444",,terminal_output +380,1503318,"TERMINAL",0,0,"81555",,terminal_output +381,1504365,"TERMINAL",0,0,"92666",,terminal_output +382,1505399,"TERMINAL",0,0,"503777",,terminal_output +383,1506450,"TERMINAL",0,0,"14888",,terminal_output +384,1506582,"train_tokenizer.py",7481,0,"",python,selection_command +385,1507540,"TERMINAL",0,0,"25999",,terminal_output +386,1507760,"train_tokenizer.py",7481,0,"#",python,content +387,1507762,"train_tokenizer.py",7482,0,"",python,selection_keyboard +388,1507799,"train_tokenizer.py",7482,0," ",python,content +389,1507800,"train_tokenizer.py",7483,0,"",python,selection_keyboard +390,1508045,"train_tokenizer.py",7482,0,"",python,selection_command +391,1508449,"train_tokenizer.py",7550,0,"\n ",python,content +392,1508543,"TERMINAL",0,0,"36104040",,terminal_output +393,1509005,"train_tokenizer.py",7563,0,"l",python,content +394,1509006,"train_tokenizer.py",7564,0,"",python,selection_keyboard +395,1509623,"TERMINAL",0,0,"48222",,terminal_output +396,1510714,"TERMINAL",0,0,"69333",,terminal_output +397,1511377,"TERMINAL",0,0,"2025-07-01 18:50:56.789385: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +398,1511690,"TERMINAL",0,0,"730444",,terminal_output +399,1512715,"TERMINAL",0,0,"81555",,terminal_output +400,1513757,"TERMINAL",0,0,"92666",,terminal_output +401,1513890,"train_tokenizer.py",7564,0,"o",python,content +402,1513892,"train_tokenizer.py",7565,0,"",python,selection_keyboard +403,1514023,"train_tokenizer.py",7565,0,"s",python,content +404,1514025,"train_tokenizer.py",7566,0,"",python,selection_keyboard +405,1514191,"train_tokenizer.py",7566,0,"s",python,content +406,1514194,"train_tokenizer.py",7567,0,"",python,selection_keyboard +407,1514461,"train_tokenizer.py",7567,0," ",python,content +408,1514463,"train_tokenizer.py",7568,0,"",python,selection_keyboard +409,1514645,"train_tokenizer.py",7568,0,"=",python,content +410,1514647,"train_tokenizer.py",7569,0,"",python,selection_keyboard +411,1514746,"train_tokenizer.py",7569,0," ",python,content +412,1514747,"train_tokenizer.py",7570,0,"",python,selection_keyboard +413,1514830,"TERMINAL",0,0,"1:003777",,terminal_output +414,1515747,"train_tokenizer.py",7569,1,"",python,content +415,1515860,"train_tokenizer.py",7568,1,"",python,content +416,1515877,"TERMINAL",0,0,"14888",,terminal_output +417,1515985,"train_tokenizer.py",7567,1,"",python,content +418,1516625,"train_tokenizer.py",7567,0,",",python,content +419,1516626,"train_tokenizer.py",7568,0,"",python,selection_keyboard +420,1516739,"train_tokenizer.py",7568,0," ",python,content +421,1516740,"train_tokenizer.py",7569,0,"",python,selection_keyboard +422,1516891,"TERMINAL",0,0,"25999",,terminal_output +423,1517944,"TERMINAL",0,0,"36205050",,terminal_output +424,1518970,"TERMINAL",0,0,"47111",,terminal_output +425,1520033,"TERMINAL",0,0,"58222",,terminal_output +426,1521160,"TERMINAL",0,0,"69333",,terminal_output +427,1522182,"TERMINAL",0,0,"740444",,terminal_output +428,1523211,"TERMINAL",0,0,"81555",,terminal_output +429,1523245,"train_tokenizer.py",7567,2,"",python,content +430,1523577,"train_tokenizer.py",7563,4,"",python,content +431,1524230,"TERMINAL",0,0,"92666",,terminal_output +432,1524380,"train_tokenizer.py",7559,4,"",python,content +433,1524610,"train_tokenizer.py",7551,8,"",python,content +434,1525270,"TERMINAL",0,0,"103777",,terminal_output +435,1525332,"train_tokenizer.py",7550,1,"",python,content +436,1526168,"train_tokenizer.py",7549,0,"",python,selection_command +437,1526307,"TERMINAL",0,0,"14888",,terminal_output +438,1526439,"train_tokenizer.py",7597,0,"",python,selection_command +439,1526747,"train_tokenizer.py",7549,0,"",python,selection_command +440,1527076,"train_tokenizer.py",7481,0,"",python,selection_command +441,1527407,"TERMINAL",0,0,"25999",,terminal_output +442,1527557,"train_tokenizer.py",7481,1,"",python,content +443,1527693,"train_tokenizer.py",7481,1,"",python,content +444,1527809,"train_tokenizer.py",7480,0,"",python,selection_command +445,1528395,"TERMINAL",0,0,"36301:001:00",,terminal_output +446,1528795,"train_tokenizer.py",7488,0,"",python,selection_mouse +447,1528941,"train_tokenizer.py",7481,11,"train_state",python,selection_mouse +448,1529535,"TERMINAL",0,0,"47111",,terminal_output +449,1529818,"train_tokenizer.py",7523,0,"",python,selection_mouse +450,1529915,"train_tokenizer.py",7517,10,"train_step",python,selection_mouse +451,1530551,"TERMINAL",0,0,"58222",,terminal_output +452,1531605,"TERMINAL",0,0,"69333",,terminal_output +453,1532558,"train_tokenizer.py",9530,0,"",python,selection_mouse +454,1532581,"train_tokenizer.py",9529,0,"",python,selection_command +455,1532671,"TERMINAL",0,0,"751555",,terminal_output +456,1533655,"TERMINAL",0,0,"92666",,terminal_output +457,1534000,"train_tokenizer.py",9513,17," )",python,selection_command +458,1534220,"train_tokenizer.py",9472,58," save_args=save_args,\n )",python,selection_command +459,1534664,"TERMINAL",0,0,"203777",,terminal_output +460,1534720,"train_tokenizer.py",9446,84," ckpt,\n save_args=save_args,\n )",python,selection_command +461,1534768,"train_tokenizer.py",9358,172," os.path.join(os.getcwd(), args.ckpt_dir, f""tokenizer_{ts}_{step}""),\n ckpt,\n save_args=save_args,\n )",python,selection_command +462,1534856,"train_tokenizer.py",9317,213," orbax_checkpointer.save(\n os.path.join(os.getcwd(), args.ckpt_dir, f""tokenizer_{ts}_{step}""),\n ckpt,\n save_args=save_args,\n )",python,selection_command +463,1534857,"train_tokenizer.py",9249,281," save_args = orbax_utils.save_args_from_target(ckpt)\n orbax_checkpointer.save(\n os.path.join(os.getcwd(), args.ckpt_dir, f""tokenizer_{ts}_{step}""),\n ckpt,\n save_args=save_args,\n )",python,selection_command +464,1534906,"train_tokenizer.py",9174,356," orbax_checkpointer = orbax.checkpoint.PyTreeCheckpointer()\n save_args = orbax_utils.save_args_from_target(ckpt)\n orbax_checkpointer.save(\n os.path.join(os.getcwd(), args.ckpt_dir, f""tokenizer_{ts}_{step}""),\n ckpt,\n save_args=save_args,\n )",python,selection_command +465,1534907,"train_tokenizer.py",9128,402," ckpt = {""model"": train_state}\n orbax_checkpointer = orbax.checkpoint.PyTreeCheckpointer()\n save_args = orbax_utils.save_args_from_target(ckpt)\n orbax_checkpointer.save(\n os.path.join(os.getcwd(), args.ckpt_dir, f""tokenizer_{ts}_{step}""),\n ckpt,\n save_args=save_args,\n )",python,selection_command +466,1534953,"train_tokenizer.py",9071,459," if step % args.log_checkpoint_interval == 0:\n ckpt = {""model"": train_state}\n orbax_checkpointer = orbax.checkpoint.PyTreeCheckpointer()\n save_args = orbax_utils.save_args_from_target(ckpt)\n orbax_checkpointer.save(\n os.path.join(os.getcwd(), args.ckpt_dir, f""tokenizer_{ts}_{step}""),\n ckpt,\n save_args=save_args,\n )",python,selection_command +467,1535017,"train_tokenizer.py",9025,505," wandb.log(log_images)\n if step % args.log_checkpoint_interval == 0:\n ckpt = {""model"": train_state}\n orbax_checkpointer = orbax.checkpoint.PyTreeCheckpointer()\n save_args = orbax_utils.save_args_from_target(ckpt)\n orbax_checkpointer.save(\n os.path.join(os.getcwd(), args.ckpt_dir, f""tokenizer_{ts}_{step}""),\n ckpt,\n save_args=save_args,\n )",python,selection_command +468,1535018,"train_tokenizer.py",8999,531," )\n wandb.log(log_images)\n if step % args.log_checkpoint_interval == 0:\n ckpt = {""model"": train_state}\n orbax_checkpointer = orbax.checkpoint.PyTreeCheckpointer()\n save_args = orbax_utils.save_args_from_target(ckpt)\n orbax_checkpointer.save(\n os.path.join(os.getcwd(), args.ckpt_dir, f""tokenizer_{ts}_{step}""),\n ckpt,\n save_args=save_args,\n )",python,selection_command +469,1535124,"train_tokenizer.py",8968,562," ),\n )\n wandb.log(log_images)\n if step % args.log_checkpoint_interval == 0:\n ckpt = {""model"": train_state}\n orbax_checkpointer = orbax.checkpoint.PyTreeCheckpointer()\n save_args = orbax_utils.save_args_from_target(ckpt)\n orbax_checkpointer.save(\n os.path.join(os.getcwd(), args.ckpt_dir, f""tokenizer_{ts}_{step}""),\n ckpt,\n save_args=save_args,\n )",python,selection_command +470,1535125,"train_tokenizer.py",8892,638," np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)\n if step % args.log_checkpoint_interval == 0:\n ckpt = {""model"": train_state}\n orbax_checkpointer = orbax.checkpoint.PyTreeCheckpointer()\n save_args = orbax_utils.save_args_from_target(ckpt)\n orbax_checkpointer.save(\n os.path.join(os.getcwd(), args.ckpt_dir, f""tokenizer_{ts}_{step}""),\n ckpt,\n save_args=save_args,\n )",python,selection_command +471,1535125,"train_tokenizer.py",8837,693," true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)\n if step % args.log_checkpoint_interval == 0:\n ckpt = {""model"": train_state}\n orbax_checkpointer = orbax.checkpoint.PyTreeCheckpointer()\n save_args = orbax_utils.save_args_from_target(ckpt)\n orbax_checkpointer.save(\n os.path.join(os.getcwd(), args.ckpt_dir, f""tokenizer_{ts}_{step}""),\n ckpt,\n save_args=save_args,\n )",python,selection_command +472,1535292,"train_tokenizer.py",8764,766," recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)\n if step % args.log_checkpoint_interval == 0:\n ckpt = {""model"": train_state}\n orbax_checkpointer = orbax.checkpoint.PyTreeCheckpointer()\n save_args = orbax_utils.save_args_from_target(ckpt)\n orbax_checkpointer.save(\n os.path.join(os.getcwd(), args.ckpt_dir, f""tokenizer_{ts}_{step}""),\n ckpt,\n save_args=save_args,\n )",python,selection_command +473,1535293,"train_tokenizer.py",8694,836," image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)\n if step % args.log_checkpoint_interval == 0:\n ckpt = {""model"": train_state}\n orbax_checkpointer = orbax.checkpoint.PyTreeCheckpointer()\n save_args = orbax_utils.save_args_from_target(ckpt)\n orbax_checkpointer.save(\n os.path.join(os.getcwd(), args.ckpt_dir, f""tokenizer_{ts}_{step}""),\n ckpt,\n save_args=save_args,\n )",python,selection_command +474,1535294,"train_tokenizer.py",8651,879," log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)\n if step % args.log_checkpoint_interval == 0:\n ckpt = {""model"": train_state}\n orbax_checkpointer = orbax.checkpoint.PyTreeCheckpointer()\n save_args = orbax_utils.save_args_from_target(ckpt)\n orbax_checkpointer.save(\n os.path.join(os.getcwd(), args.ckpt_dir, f""tokenizer_{ts}_{step}""),\n ckpt,\n save_args=save_args,\n )",python,selection_command +475,1535294,"train_tokenizer.py",8602,928," if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)\n if step % args.log_checkpoint_interval == 0:\n ckpt = {""model"": train_state}\n orbax_checkpointer = orbax.checkpoint.PyTreeCheckpointer()\n save_args = orbax_utils.save_args_from_target(ckpt)\n orbax_checkpointer.save(\n os.path.join(os.getcwd(), args.ckpt_dir, f""tokenizer_{ts}_{step}""),\n ckpt,\n save_args=save_args,\n )",python,selection_command +476,1535407,"train_tokenizer.py",8525,1005," # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)\n if step % args.log_checkpoint_interval == 0:\n ckpt = {""model"": train_state}\n orbax_checkpointer = orbax.checkpoint.PyTreeCheckpointer()\n save_args = orbax_utils.save_args_from_target(ckpt)\n orbax_checkpointer.save(\n os.path.join(os.getcwd(), args.ckpt_dir, f""tokenizer_{ts}_{step}""),\n ckpt,\n save_args=save_args,\n )",python,selection_command +477,1535408,"train_tokenizer.py",8447,1083," # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)\n if step % args.log_checkpoint_interval == 0:\n ckpt = {""model"": train_state}\n orbax_checkpointer = orbax.checkpoint.PyTreeCheckpointer()\n save_args = orbax_utils.save_args_from_target(ckpt)\n orbax_checkpointer.save(\n os.path.join(os.getcwd(), args.ckpt_dir, f""tokenizer_{ts}_{step}""),\n ckpt,\n save_args=save_args,\n )",python,selection_command +478,1535408,"train_tokenizer.py",8367,1163," # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)\n if step % args.log_checkpoint_interval == 0:\n ckpt = {""model"": train_state}\n orbax_checkpointer = orbax.checkpoint.PyTreeCheckpointer()\n save_args = orbax_utils.save_args_from_target(ckpt)\n orbax_checkpointer.save(\n os.path.join(os.getcwd(), args.ckpt_dir, f""tokenizer_{ts}_{step}""),\n ckpt,\n save_args=save_args,\n )",python,selection_command +479,1535410,"train_tokenizer.py",8345,1185," )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)\n if step % args.log_checkpoint_interval == 0:\n ckpt = {""model"": train_state}\n orbax_checkpointer = orbax.checkpoint.PyTreeCheckpointer()\n save_args = orbax_utils.save_args_from_target(ckpt)\n orbax_checkpointer.save(\n os.path.join(os.getcwd(), args.ckpt_dir, f""tokenizer_{ts}_{step}""),\n ckpt,\n save_args=save_args,\n )",python,selection_command +480,1535756,"train_tokenizer.py",8276,1254," comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)\n if step % args.log_checkpoint_interval == 0:\n ckpt = {""model"": train_state}\n orbax_checkpointer = orbax.checkpoint.PyTreeCheckpointer()\n save_args = orbax_utils.save_args_from_target(ckpt)\n orbax_checkpointer.save(\n os.path.join(os.getcwd(), args.ckpt_dir, f""tokenizer_{ts}_{step}""),\n ckpt,\n save_args=save_args,\n )",python,selection_command +481,1535757,"train_tokenizer.py",8221,1309," comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)\n if step % args.log_checkpoint_interval == 0:\n ckpt = {""model"": train_state}\n orbax_checkpointer = orbax.checkpoint.PyTreeCheckpointer()\n save_args = orbax_utils.save_args_from_target(ckpt)\n orbax_checkpointer.save(\n os.path.join(os.getcwd(), args.ckpt_dir, f""tokenizer_{ts}_{step}""),\n ckpt,\n save_args=save_args,\n )",python,selection_command +482,1535758,"train_tokenizer.py",8139,1391," comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)\n if step % args.log_checkpoint_interval == 0:\n ckpt = {""model"": train_state}\n orbax_checkpointer = orbax.checkpoint.PyTreeCheckpointer()\n save_args = orbax_utils.save_args_from_target(ckpt)\n orbax_checkpointer.save(\n os.path.join(os.getcwd(), args.ckpt_dir, f""tokenizer_{ts}_{step}""),\n ckpt,\n save_args=save_args,\n )",python,selection_command +483,1535758,"train_tokenizer.py",8087,1443," recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)\n if step % args.log_checkpoint_interval == 0:\n ckpt = {""model"": train_state}\n orbax_checkpointer = orbax.checkpoint.PyTreeCheckpointer()\n save_args = orbax_utils.save_args_from_target(ckpt)\n orbax_checkpointer.save(\n os.path.join(os.getcwd(), args.ckpt_dir, f""tokenizer_{ts}_{step}""),\n ckpt,\n save_args=save_args,\n )",python,selection_command +484,1536051,"train_tokenizer.py",8038,1492," gt_seq = inputs[""videos""][0]\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)\n if step % args.log_checkpoint_interval == 0:\n ckpt = {""model"": train_state}\n orbax_checkpointer = orbax.checkpoint.PyTreeCheckpointer()\n save_args = orbax_utils.save_args_from_target(ckpt)\n orbax_checkpointer.save(\n os.path.join(os.getcwd(), args.ckpt_dir, f""tokenizer_{ts}_{step}""),\n ckpt,\n save_args=save_args,\n )",python,selection_command +485,1536051,"train_tokenizer.py",7982,1548," if step % args.log_image_interval == 0:\n gt_seq = inputs[""videos""][0]\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)\n if step % args.log_checkpoint_interval == 0:\n ckpt = {""model"": train_state}\n orbax_checkpointer = orbax.checkpoint.PyTreeCheckpointer()\n save_args = orbax_utils.save_args_from_target(ckpt)\n orbax_checkpointer.save(\n os.path.join(os.getcwd(), args.ckpt_dir, f""tokenizer_{ts}_{step}""),\n ckpt,\n save_args=save_args,\n )",python,selection_command +486,1536051,"train_tokenizer.py",7960,1570," )\n if step % args.log_image_interval == 0:\n gt_seq = inputs[""videos""][0]\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)\n if step % args.log_checkpoint_interval == 0:\n ckpt = {""model"": train_state}\n orbax_checkpointer = orbax.checkpoint.PyTreeCheckpointer()\n save_args = orbax_utils.save_args_from_target(ckpt)\n orbax_checkpointer.save(\n os.path.join(os.getcwd(), args.ckpt_dir, f""tokenizer_{ts}_{step}""),\n ckpt,\n save_args=save_args,\n )",python,selection_command +487,1536065,"train_tokenizer.py",7934,1596," }\n )\n if step % args.log_image_interval == 0:\n gt_seq = inputs[""videos""][0]\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)\n if step % args.log_checkpoint_interval == 0:\n ckpt = {""model"": train_state}\n orbax_checkpointer = orbax.checkpoint.PyTreeCheckpointer()\n save_args = orbax_utils.save_args_from_target(ckpt)\n orbax_checkpointer.save(\n os.path.join(os.getcwd(), args.ckpt_dir, f""tokenizer_{ts}_{step}""),\n ckpt,\n save_args=save_args,\n )",python,selection_command +488,1536066,"train_tokenizer.py",7895,1635," **metrics,\n }\n )\n if step % args.log_image_interval == 0:\n gt_seq = inputs[""videos""][0]\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)\n if step % args.log_checkpoint_interval == 0:\n ckpt = {""model"": train_state}\n orbax_checkpointer = orbax.checkpoint.PyTreeCheckpointer()\n save_args = orbax_utils.save_args_from_target(ckpt)\n orbax_checkpointer.save(\n os.path.join(os.getcwd(), args.ckpt_dir, f""tokenizer_{ts}_{step}""),\n ckpt,\n save_args=save_args,\n )",python,selection_command +489,1536066,"train_tokenizer.py",7853,1677," ""step"": step,\n **metrics,\n }\n )\n if step % args.log_image_interval == 0:\n gt_seq = inputs[""videos""][0]\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)\n if step % args.log_checkpoint_interval == 0:\n ckpt = {""model"": train_state}\n orbax_checkpointer = orbax.checkpoint.PyTreeCheckpointer()\n save_args = orbax_utils.save_args_from_target(ckpt)\n orbax_checkpointer.save(\n os.path.join(os.getcwd(), args.ckpt_dir, f""tokenizer_{ts}_{step}""),\n ckpt,\n save_args=save_args,\n )",python,selection_command +490,1536067,"train_tokenizer.py",7811,1719," ""loss"": loss,\n ""step"": step,\n **metrics,\n }\n )\n if step % args.log_image_interval == 0:\n gt_seq = inputs[""videos""][0]\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)\n if step % args.log_checkpoint_interval == 0:\n ckpt = {""model"": train_state}\n orbax_checkpointer = orbax.checkpoint.PyTreeCheckpointer()\n save_args = orbax_utils.save_args_from_target(ckpt)\n orbax_checkpointer.save(\n os.path.join(os.getcwd(), args.ckpt_dir, f""tokenizer_{ts}_{step}""),\n ckpt,\n save_args=save_args,\n )",python,selection_command +491,1536067,"train_tokenizer.py",7785,1745," {\n ""loss"": loss,\n ""step"": step,\n **metrics,\n }\n )\n if step % args.log_image_interval == 0:\n gt_seq = inputs[""videos""][0]\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)\n if step % args.log_checkpoint_interval == 0:\n ckpt = {""model"": train_state}\n orbax_checkpointer = orbax.checkpoint.PyTreeCheckpointer()\n save_args = orbax_utils.save_args_from_target(ckpt)\n orbax_checkpointer.save(\n os.path.join(os.getcwd(), args.ckpt_dir, f""tokenizer_{ts}_{step}""),\n ckpt,\n save_args=save_args,\n )",python,selection_command +492,1536068,"train_tokenizer.py",7754,1776," wandb.log(\n {\n ""loss"": loss,\n ""step"": step,\n **metrics,\n }\n )\n if step % args.log_image_interval == 0:\n gt_seq = inputs[""videos""][0]\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)\n if step % args.log_checkpoint_interval == 0:\n ckpt = {""model"": train_state}\n orbax_checkpointer = orbax.checkpoint.PyTreeCheckpointer()\n save_args = orbax_utils.save_args_from_target(ckpt)\n orbax_checkpointer.save(\n os.path.join(os.getcwd(), args.ckpt_dir, f""tokenizer_{ts}_{step}""),\n ckpt,\n save_args=save_args,\n )",python,selection_command +493,1536068,"train_tokenizer.py",7675,1855," if step % args.log_interval == 0 and jax.process_index() == 0:\n wandb.log(\n {\n ""loss"": loss,\n ""step"": step,\n **metrics,\n }\n )\n if step % args.log_image_interval == 0:\n gt_seq = inputs[""videos""][0]\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)\n if step % args.log_checkpoint_interval == 0:\n ckpt = {""model"": train_state}\n orbax_checkpointer = orbax.checkpoint.PyTreeCheckpointer()\n save_args = orbax_utils.save_args_from_target(ckpt)\n orbax_checkpointer.save(\n os.path.join(os.getcwd(), args.ckpt_dir, f""tokenizer_{ts}_{step}""),\n ckpt,\n save_args=save_args,\n )",python,selection_command +494,1536184,"train_tokenizer.py",7650,1880," if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n wandb.log(\n {\n ""loss"": loss,\n ""step"": step,\n **metrics,\n }\n )\n if step % args.log_image_interval == 0:\n gt_seq = inputs[""videos""][0]\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)\n if step % args.log_checkpoint_interval == 0:\n ckpt = {""model"": train_state}\n orbax_checkpointer = orbax.checkpoint.PyTreeCheckpointer()\n save_args = orbax_utils.save_args_from_target(ckpt)\n orbax_checkpointer.save(\n os.path.join(os.getcwd(), args.ckpt_dir, f""tokenizer_{ts}_{step}""),\n ckpt,\n save_args=save_args,\n )",python,selection_command +495,1536185,"train_tokenizer.py",7620,1910," # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n wandb.log(\n {\n ""loss"": loss,\n ""step"": step,\n **metrics,\n }\n )\n if step % args.log_image_interval == 0:\n gt_seq = inputs[""videos""][0]\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)\n if step % args.log_checkpoint_interval == 0:\n ckpt = {""model"": train_state}\n orbax_checkpointer = orbax.checkpoint.PyTreeCheckpointer()\n save_args = orbax_utils.save_args_from_target(ckpt)\n orbax_checkpointer.save(\n os.path.join(os.getcwd(), args.ckpt_dir, f""tokenizer_{ts}_{step}""),\n ckpt,\n save_args=save_args,\n )",python,selection_command +496,1536185,"train_tokenizer.py",7619,1911,"\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n wandb.log(\n {\n ""loss"": loss,\n ""step"": step,\n **metrics,\n }\n )\n if step % args.log_image_interval == 0:\n gt_seq = inputs[""videos""][0]\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)\n if step % args.log_checkpoint_interval == 0:\n ckpt = {""model"": train_state}\n orbax_checkpointer = orbax.checkpoint.PyTreeCheckpointer()\n save_args = orbax_utils.save_args_from_target(ckpt)\n orbax_checkpointer.save(\n os.path.join(os.getcwd(), args.ckpt_dir, f""tokenizer_{ts}_{step}""),\n ckpt,\n save_args=save_args,\n )",python,selection_command +497,1536185,"train_tokenizer.py",7597,1933," step += 1\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n wandb.log(\n {\n ""loss"": loss,\n ""step"": step,\n **metrics,\n }\n )\n if step % args.log_image_interval == 0:\n gt_seq = inputs[""videos""][0]\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)\n if step % args.log_checkpoint_interval == 0:\n ckpt = {""model"": train_state}\n orbax_checkpointer = orbax.checkpoint.PyTreeCheckpointer()\n save_args = orbax_utils.save_args_from_target(ckpt)\n orbax_checkpointer.save(\n os.path.join(os.getcwd(), args.ckpt_dir, f""tokenizer_{ts}_{step}""),\n ckpt,\n save_args=save_args,\n )",python,selection_command +498,1536186,"train_tokenizer.py",7549,1981," print(f""Step {step}, loss: {loss}"")\n step += 1\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n wandb.log(\n {\n ""loss"": loss,\n ""step"": step,\n **metrics,\n }\n )\n if step % args.log_image_interval == 0:\n gt_seq = inputs[""videos""][0]\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)\n if step % args.log_checkpoint_interval == 0:\n ckpt = {""model"": train_state}\n orbax_checkpointer = orbax.checkpoint.PyTreeCheckpointer()\n save_args = orbax_utils.save_args_from_target(ckpt)\n orbax_checkpointer.save(\n os.path.join(os.getcwd(), args.ckpt_dir, f""tokenizer_{ts}_{step}""),\n ckpt,\n save_args=save_args,\n )",python,selection_command +499,1536186,"train_tokenizer.py",7469,2061," train_state, loss, recon, metrics = train_step(train_state, inputs)\n print(f""Step {step}, loss: {loss}"")\n step += 1\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n wandb.log(\n {\n ""loss"": loss,\n ""step"": step,\n **metrics,\n }\n )\n if step % args.log_image_interval == 0:\n gt_seq = inputs[""videos""][0]\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)\n if step % args.log_checkpoint_interval == 0:\n ckpt = {""model"": train_state}\n orbax_checkpointer = orbax.checkpoint.PyTreeCheckpointer()\n save_args = orbax_utils.save_args_from_target(ckpt)\n orbax_checkpointer.save(\n os.path.join(os.getcwd(), args.ckpt_dir, f""tokenizer_{ts}_{step}""),\n ckpt,\n save_args=save_args,\n )",python,selection_command +500,1536186,"train_tokenizer.py",7392,2138," inputs = dict(videos=videos, rng=_rng, dropout_rng=_rng_dropout)\n train_state, loss, recon, metrics = train_step(train_state, inputs)\n print(f""Step {step}, loss: {loss}"")\n step += 1\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n wandb.log(\n {\n ""loss"": loss,\n ""step"": step,\n **metrics,\n }\n )\n if step % args.log_image_interval == 0:\n gt_seq = inputs[""videos""][0]\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)\n if step % args.log_checkpoint_interval == 0:\n ckpt = {""model"": train_state}\n orbax_checkpointer = orbax.checkpoint.PyTreeCheckpointer()\n save_args = orbax_utils.save_args_from_target(ckpt)\n orbax_checkpointer.save(\n os.path.join(os.getcwd(), args.ckpt_dir, f""tokenizer_{ts}_{step}""),\n ckpt,\n save_args=save_args,\n )",python,selection_command +501,1536187,"train_tokenizer.py",7391,2139,"\n inputs = dict(videos=videos, rng=_rng, dropout_rng=_rng_dropout)\n train_state, loss, recon, metrics = train_step(train_state, inputs)\n print(f""Step {step}, loss: {loss}"")\n step += 1\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n wandb.log(\n {\n ""loss"": loss,\n ""step"": step,\n **metrics,\n }\n )\n if step % args.log_image_interval == 0:\n gt_seq = inputs[""videos""][0]\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)\n if step % args.log_checkpoint_interval == 0:\n ckpt = {""model"": train_state}\n orbax_checkpointer = orbax.checkpoint.PyTreeCheckpointer()\n save_args = orbax_utils.save_args_from_target(ckpt)\n orbax_checkpointer.save(\n os.path.join(os.getcwd(), args.ckpt_dir, f""tokenizer_{ts}_{step}""),\n ckpt,\n save_args=save_args,\n )",python,selection_command +502,1536187,"train_tokenizer.py",7306,2224," videos = jax.make_array_from_process_local_data(videos_sharding, videos)\n\n inputs = dict(videos=videos, rng=_rng, dropout_rng=_rng_dropout)\n train_state, loss, recon, metrics = train_step(train_state, inputs)\n print(f""Step {step}, loss: {loss}"")\n step += 1\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n wandb.log(\n {\n ""loss"": loss,\n ""step"": step,\n **metrics,\n }\n )\n if step % args.log_image_interval == 0:\n gt_seq = inputs[""videos""][0]\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)\n if step % args.log_checkpoint_interval == 0:\n ckpt = {""model"": train_state}\n orbax_checkpointer = orbax.checkpoint.PyTreeCheckpointer()\n save_args = orbax_utils.save_args_from_target(ckpt)\n orbax_checkpointer.save(\n os.path.join(os.getcwd(), args.ckpt_dir, f""tokenizer_{ts}_{step}""),\n ckpt,\n save_args=save_args,\n )",python,selection_command +503,1536188,"train_tokenizer.py",7292,2238," )\n videos = jax.make_array_from_process_local_data(videos_sharding, videos)\n\n inputs = dict(videos=videos, rng=_rng, dropout_rng=_rng_dropout)\n train_state, loss, recon, metrics = train_step(train_state, inputs)\n print(f""Step {step}, loss: {loss}"")\n step += 1\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n wandb.log(\n {\n ""loss"": loss,\n ""step"": step,\n **metrics,\n }\n )\n if step % args.log_image_interval == 0:\n gt_seq = inputs[""videos""][0]\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)\n if step % args.log_checkpoint_interval == 0:\n ckpt = {""model"": train_state}\n orbax_checkpointer = orbax.checkpoint.PyTreeCheckpointer()\n save_args = orbax_utils.save_args_from_target(ckpt)\n orbax_checkpointer.save(\n os.path.join(os.getcwd(), args.ckpt_dir, f""tokenizer_{ts}_{step}""),\n ckpt,\n save_args=save_args,\n )",python,selection_command +504,1536188,"train_tokenizer.py",7224,2306," mesh, PartitionSpec(""data"", None, None, None, None)\n )\n videos = jax.make_array_from_process_local_data(videos_sharding, videos)\n\n inputs = dict(videos=videos, rng=_rng, dropout_rng=_rng_dropout)\n train_state, loss, recon, metrics = train_step(train_state, inputs)\n print(f""Step {step}, loss: {loss}"")\n step += 1\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n wandb.log(\n {\n ""loss"": loss,\n ""step"": step,\n **metrics,\n }\n )\n if step % args.log_image_interval == 0:\n gt_seq = inputs[""videos""][0]\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)\n if step % args.log_checkpoint_interval == 0:\n ckpt = {""model"": train_state}\n orbax_checkpointer = orbax.checkpoint.PyTreeCheckpointer()\n save_args = orbax_utils.save_args_from_target(ckpt)\n orbax_checkpointer.save(\n os.path.join(os.getcwd(), args.ckpt_dir, f""tokenizer_{ts}_{step}""),\n ckpt,\n save_args=save_args,\n )",python,selection_command +505,1536268,"train_tokenizer.py",7179,2351," videos_sharding = NamedSharding(\n mesh, PartitionSpec(""data"", None, None, None, None)\n )\n videos = jax.make_array_from_process_local_data(videos_sharding, videos)\n\n inputs = dict(videos=videos, rng=_rng, dropout_rng=_rng_dropout)\n train_state, loss, recon, metrics = train_step(train_state, inputs)\n print(f""Step {step}, loss: {loss}"")\n step += 1\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n wandb.log(\n {\n ""loss"": loss,\n ""step"": step,\n **metrics,\n }\n )\n if step % args.log_image_interval == 0:\n gt_seq = inputs[""videos""][0]\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)\n if step % args.log_checkpoint_interval == 0:\n ckpt = {""model"": train_state}\n orbax_checkpointer = orbax.checkpoint.PyTreeCheckpointer()\n save_args = orbax_utils.save_args_from_target(ckpt)\n orbax_checkpointer.save(\n os.path.join(os.getcwd(), args.ckpt_dir, f""tokenizer_{ts}_{step}""),\n ckpt,\n save_args=save_args,\n )",python,selection_command +506,1536346,"TERMINAL",0,0,"14888",,terminal_output +507,1536355,"train_tokenizer.py",7178,2352,"\n videos_sharding = NamedSharding(\n mesh, PartitionSpec(""data"", None, None, None, None)\n )\n videos = jax.make_array_from_process_local_data(videos_sharding, videos)\n\n inputs = dict(videos=videos, rng=_rng, dropout_rng=_rng_dropout)\n train_state, loss, recon, metrics = train_step(train_state, inputs)\n print(f""Step {step}, loss: {loss}"")\n step += 1\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n wandb.log(\n {\n ""loss"": loss,\n ""step"": step,\n **metrics,\n }\n )\n if step % args.log_image_interval == 0:\n gt_seq = inputs[""videos""][0]\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)\n if step % args.log_checkpoint_interval == 0:\n ckpt = {""model"": train_state}\n orbax_checkpointer = orbax.checkpoint.PyTreeCheckpointer()\n save_args = orbax_utils.save_args_from_target(ckpt)\n orbax_checkpointer.save(\n os.path.join(os.getcwd(), args.ckpt_dir, f""tokenizer_{ts}_{step}""),\n ckpt,\n save_args=save_args,\n )",python,selection_command +508,1536535,"train_tokenizer.py",7115,2415," rng, _rng, _rng_dropout = jax.random.split(rng, 3)\n\n videos_sharding = NamedSharding(\n mesh, PartitionSpec(""data"", None, None, None, None)\n )\n videos = jax.make_array_from_process_local_data(videos_sharding, videos)\n\n inputs = dict(videos=videos, rng=_rng, dropout_rng=_rng_dropout)\n train_state, loss, recon, metrics = train_step(train_state, inputs)\n print(f""Step {step}, loss: {loss}"")\n step += 1\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n wandb.log(\n {\n ""loss"": loss,\n ""step"": step,\n **metrics,\n }\n )\n if step % args.log_image_interval == 0:\n gt_seq = inputs[""videos""][0]\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)\n if step % args.log_checkpoint_interval == 0:\n ckpt = {""model"": train_state}\n orbax_checkpointer = orbax.checkpoint.PyTreeCheckpointer()\n save_args = orbax_utils.save_args_from_target(ckpt)\n orbax_checkpointer.save(\n os.path.join(os.getcwd(), args.ckpt_dir, f""tokenizer_{ts}_{step}""),\n ckpt,\n save_args=save_args,\n )",python,selection_command +509,1536751,"TERMINAL",0,0,"25999",,terminal_output +510,1536985,"train_tokenizer.py",7178,2352,"\n videos_sharding = NamedSharding(\n mesh, PartitionSpec(""data"", None, None, None, None)\n )\n videos = jax.make_array_from_process_local_data(videos_sharding, videos)\n\n inputs = dict(videos=videos, rng=_rng, dropout_rng=_rng_dropout)\n train_state, loss, recon, metrics = train_step(train_state, inputs)\n print(f""Step {step}, loss: {loss}"")\n step += 1\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n wandb.log(\n {\n ""loss"": loss,\n ""step"": step,\n **metrics,\n }\n )\n if step % args.log_image_interval == 0:\n gt_seq = inputs[""videos""][0]\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)\n if step % args.log_checkpoint_interval == 0:\n ckpt = {""model"": train_state}\n orbax_checkpointer = orbax.checkpoint.PyTreeCheckpointer()\n save_args = orbax_utils.save_args_from_target(ckpt)\n orbax_checkpointer.save(\n os.path.join(os.getcwd(), args.ckpt_dir, f""tokenizer_{ts}_{step}""),\n ckpt,\n save_args=save_args,\n )",python,selection_command +511,1537153,"train_tokenizer.py",7179,2351," videos_sharding = NamedSharding(\n mesh, PartitionSpec(""data"", None, None, None, None)\n )\n videos = jax.make_array_from_process_local_data(videos_sharding, videos)\n\n inputs = dict(videos=videos, rng=_rng, dropout_rng=_rng_dropout)\n train_state, loss, recon, metrics = train_step(train_state, inputs)\n print(f""Step {step}, loss: {loss}"")\n step += 1\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n wandb.log(\n {\n ""loss"": loss,\n ""step"": step,\n **metrics,\n }\n )\n if step % args.log_image_interval == 0:\n gt_seq = inputs[""videos""][0]\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)\n if step % args.log_checkpoint_interval == 0:\n ckpt = {""model"": train_state}\n orbax_checkpointer = orbax.checkpoint.PyTreeCheckpointer()\n save_args = orbax_utils.save_args_from_target(ckpt)\n orbax_checkpointer.save(\n os.path.join(os.getcwd(), args.ckpt_dir, f""tokenizer_{ts}_{step}""),\n ckpt,\n save_args=save_args,\n )",python,selection_command +512,1537404,"train_tokenizer.py",7224,2306," mesh, PartitionSpec(""data"", None, None, None, None)\n )\n videos = jax.make_array_from_process_local_data(videos_sharding, videos)\n\n inputs = dict(videos=videos, rng=_rng, dropout_rng=_rng_dropout)\n train_state, loss, recon, metrics = train_step(train_state, inputs)\n print(f""Step {step}, loss: {loss}"")\n step += 1\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n wandb.log(\n {\n ""loss"": loss,\n ""step"": step,\n **metrics,\n }\n )\n if step % args.log_image_interval == 0:\n gt_seq = inputs[""videos""][0]\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)\n if step % args.log_checkpoint_interval == 0:\n ckpt = {""model"": train_state}\n orbax_checkpointer = orbax.checkpoint.PyTreeCheckpointer()\n save_args = orbax_utils.save_args_from_target(ckpt)\n orbax_checkpointer.save(\n os.path.join(os.getcwd(), args.ckpt_dir, f""tokenizer_{ts}_{step}""),\n ckpt,\n save_args=save_args,\n )",python,selection_command +513,1537529,"train_tokenizer.py",7179,2351," videos_sharding = NamedSharding(\n mesh, PartitionSpec(""data"", None, None, None, None)\n )\n videos = jax.make_array_from_process_local_data(videos_sharding, videos)\n\n inputs = dict(videos=videos, rng=_rng, dropout_rng=_rng_dropout)\n train_state, loss, recon, metrics = train_step(train_state, inputs)\n print(f""Step {step}, loss: {loss}"")\n step += 1\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n wandb.log(\n {\n ""loss"": loss,\n ""step"": step,\n **metrics,\n }\n )\n if step % args.log_image_interval == 0:\n gt_seq = inputs[""videos""][0]\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)\n if step % args.log_checkpoint_interval == 0:\n ckpt = {""model"": train_state}\n orbax_checkpointer = orbax.checkpoint.PyTreeCheckpointer()\n save_args = orbax_utils.save_args_from_target(ckpt)\n orbax_checkpointer.save(\n os.path.join(os.getcwd(), args.ckpt_dir, f""tokenizer_{ts}_{step}""),\n ckpt,\n save_args=save_args,\n )",python,selection_command +514,1537700,"train_tokenizer.py",7178,2352,"\n videos_sharding = NamedSharding(\n mesh, PartitionSpec(""data"", None, None, None, None)\n )\n videos = jax.make_array_from_process_local_data(videos_sharding, videos)\n\n inputs = dict(videos=videos, rng=_rng, dropout_rng=_rng_dropout)\n train_state, loss, recon, metrics = train_step(train_state, inputs)\n print(f""Step {step}, loss: {loss}"")\n step += 1\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n wandb.log(\n {\n ""loss"": loss,\n ""step"": step,\n **metrics,\n }\n )\n if step % args.log_image_interval == 0:\n gt_seq = inputs[""videos""][0]\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)\n if step % args.log_checkpoint_interval == 0:\n ckpt = {""model"": train_state}\n orbax_checkpointer = orbax.checkpoint.PyTreeCheckpointer()\n save_args = orbax_utils.save_args_from_target(ckpt)\n orbax_checkpointer.save(\n os.path.join(os.getcwd(), args.ckpt_dir, f""tokenizer_{ts}_{step}""),\n ckpt,\n save_args=save_args,\n )",python,selection_command +515,1537833,"TERMINAL",0,0,"====================================================================================================\r\nFrame 10\r\n====================================================================================================\r\n",,terminal_output +516,1537833,"TERMINAL",0,0,"36401010",,terminal_output +517,1537857,"train_tokenizer.py",7115,2415," rng, _rng, _rng_dropout = jax.random.split(rng, 3)\n\n videos_sharding = NamedSharding(\n mesh, PartitionSpec(""data"", None, None, None, None)\n )\n videos = jax.make_array_from_process_local_data(videos_sharding, videos)\n\n inputs = dict(videos=videos, rng=_rng, dropout_rng=_rng_dropout)\n train_state, loss, recon, metrics = train_step(train_state, inputs)\n print(f""Step {step}, loss: {loss}"")\n step += 1\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n wandb.log(\n {\n ""loss"": loss,\n ""step"": step,\n **metrics,\n }\n )\n if step % args.log_image_interval == 0:\n gt_seq = inputs[""videos""][0]\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[0])),\n recon=wandb.Image(np.asarray(recon_seq[0])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)\n if step % args.log_checkpoint_interval == 0:\n ckpt = {""model"": train_state}\n orbax_checkpointer = orbax.checkpoint.PyTreeCheckpointer()\n save_args = orbax_utils.save_args_from_target(ckpt)\n orbax_checkpointer.save(\n os.path.join(os.getcwd(), args.ckpt_dir, f""tokenizer_{ts}_{step}""),\n ckpt,\n save_args=save_args,\n )",python,selection_command +518,1538465,"TERMINAL",0,0,"2025-07-01 18:51:23.894691: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +519,1538863,"TERMINAL",0,0,"47111",,terminal_output +520,1539547,"train_tokenizer.py",7127,0,"",python,selection_command +521,1539916,"TERMINAL",0,0,"58222",,terminal_output +522,1540628,"train_tokenizer.py",9529,0,"#",python,content +523,1540628,"train_tokenizer.py",9492,0,"#",python,content +524,1540629,"train_tokenizer.py",9466,0,"#",python,content +525,1540629,"train_tokenizer.py",9378,0,"#",python,content +526,1540629,"train_tokenizer.py",9333,0,"#",python,content +527,1540629,"train_tokenizer.py",9265,0,"#",python,content +528,1540629,"train_tokenizer.py",9190,0,"#",python,content +529,1540629,"train_tokenizer.py",9144,0,"#",python,content +530,1540629,"train_tokenizer.py",9083,0,"#",python,content +531,1540629,"train_tokenizer.py",9049,0,"#",python,content +532,1540629,"train_tokenizer.py",9023,0,"#",python,content +533,1540630,"train_tokenizer.py",8996,0,"#",python,content +534,1540630,"train_tokenizer.py",8924,0,"#",python,content +535,1540630,"train_tokenizer.py",8865,0,"#",python,content +536,1540630,"train_tokenizer.py",8792,0,"#",python,content +537,1540630,"train_tokenizer.py",8722,0,"#",python,content +538,1540630,"train_tokenizer.py",8675,0,"#",python,content +539,1540630,"train_tokenizer.py",8622,0,"#",python,content +540,1540630,"train_tokenizer.py",8545,0,"#",python,content +541,1540630,"train_tokenizer.py",8467,0,"#",python,content +542,1540630,"train_tokenizer.py",8387,0,"#",python,content +543,1540630,"train_tokenizer.py",8365,0,"#",python,content +544,1540630,"train_tokenizer.py",8300,0,"#",python,content +545,1540631,"train_tokenizer.py",8241,0,"#",python,content +546,1540631,"train_tokenizer.py",8159,0,"#",python,content +547,1540631,"train_tokenizer.py",8107,0,"#",python,content +548,1540631,"train_tokenizer.py",8058,0,"#",python,content +549,1540631,"train_tokenizer.py",7998,0,"#",python,content +550,1540631,"train_tokenizer.py",7980,0,"#",python,content +551,1540631,"train_tokenizer.py",7958,0,"#",python,content +552,1540631,"train_tokenizer.py",7923,0,"#",python,content +553,1540631,"train_tokenizer.py",7881,0,"#",python,content +554,1540631,"train_tokenizer.py",7839,0,"#",python,content +555,1540631,"train_tokenizer.py",7809,0,"#",python,content +556,1540631,"train_tokenizer.py",7774,0,"#",python,content +557,1540632,"train_tokenizer.py",7691,0,"#",python,content +558,1540632,"train_tokenizer.py",7662,0,"#",python,content +559,1540632,"train_tokenizer.py",7632,0,"#",python,content +560,1540632,"train_tokenizer.py",7609,0,"#",python,content +561,1540632,"train_tokenizer.py",7561,0,"#",python,content +562,1540632,"train_tokenizer.py",7481,0,"#",python,content +563,1540632,"train_tokenizer.py",7404,0,"#",python,content +564,1540632,"train_tokenizer.py",7318,0,"#",python,content +565,1540633,"train_tokenizer.py",7304,0,"#",python,content +566,1540633,"train_tokenizer.py",7240,0,"#",python,content +567,1540633,"train_tokenizer.py",7191,0,"#",python,content +568,1540633,"train_tokenizer.py",7127,0,"#",python,content +569,1540637,"train_tokenizer.py",7128,0,"",python,selection_keyboard +570,1540755,"train_tokenizer.py",9576,0," ",python,content +571,1540756,"train_tokenizer.py",9538,0," ",python,content +572,1540756,"train_tokenizer.py",9511,0," ",python,content +573,1540756,"train_tokenizer.py",9422,0," ",python,content +574,1540756,"train_tokenizer.py",9376,0," ",python,content +575,1540756,"train_tokenizer.py",9307,0," ",python,content +576,1540756,"train_tokenizer.py",9231,0," ",python,content +577,1540756,"train_tokenizer.py",9184,0," ",python,content +578,1540756,"train_tokenizer.py",9122,0," ",python,content +579,1540756,"train_tokenizer.py",9087,0," ",python,content +580,1540756,"train_tokenizer.py",9060,0," ",python,content +581,1540756,"train_tokenizer.py",9032,0," ",python,content +582,1540756,"train_tokenizer.py",8959,0," ",python,content +583,1540757,"train_tokenizer.py",8899,0," ",python,content +584,1540757,"train_tokenizer.py",8825,0," ",python,content +585,1540757,"train_tokenizer.py",8754,0," ",python,content +586,1540757,"train_tokenizer.py",8706,0," ",python,content +587,1540757,"train_tokenizer.py",8652,0," ",python,content +588,1540757,"train_tokenizer.py",8574,0," ",python,content +589,1540757,"train_tokenizer.py",8495,0," ",python,content +590,1540757,"train_tokenizer.py",8414,0," ",python,content +591,1540757,"train_tokenizer.py",8391,0," ",python,content +592,1540757,"train_tokenizer.py",8325,0," ",python,content +593,1540757,"train_tokenizer.py",8265,0," ",python,content +594,1540757,"train_tokenizer.py",8182,0," ",python,content +595,1540757,"train_tokenizer.py",8129,0," ",python,content +596,1540757,"train_tokenizer.py",8079,0," ",python,content +597,1540757,"train_tokenizer.py",8018,0," ",python,content +598,1540757,"train_tokenizer.py",7999,0," ",python,content +599,1540758,"train_tokenizer.py",7976,0," ",python,content +600,1540758,"train_tokenizer.py",7940,0," ",python,content +601,1540758,"train_tokenizer.py",7897,0," ",python,content +602,1540758,"train_tokenizer.py",7854,0," ",python,content +603,1540758,"train_tokenizer.py",7823,0," ",python,content +604,1540758,"train_tokenizer.py",7787,0," ",python,content +605,1540758,"train_tokenizer.py",7703,0," ",python,content +606,1540758,"train_tokenizer.py",7673,0," ",python,content +607,1540758,"train_tokenizer.py",7642,0," ",python,content +608,1540758,"train_tokenizer.py",7618,0," ",python,content +609,1540758,"train_tokenizer.py",7569,0," ",python,content +610,1540758,"train_tokenizer.py",7488,0," ",python,content +611,1540758,"train_tokenizer.py",7410,0," ",python,content +612,1540758,"train_tokenizer.py",7323,0," ",python,content +613,1540758,"train_tokenizer.py",7308,0," ",python,content +614,1540758,"train_tokenizer.py",7243,0," ",python,content +615,1540758,"train_tokenizer.py",7193,0," ",python,content +616,1540758,"train_tokenizer.py",7128,0," ",python,content +617,1540759,"train_tokenizer.py",7129,0,"",python,selection_keyboard +618,1540961,"TERMINAL",0,0,"69333",,terminal_output +619,1541320,"train_tokenizer.py",7128,0,"",python,selection_command +620,1542001,"TERMINAL",0,0,"71:00444",,terminal_output +621,1543027,"TERMINAL",0,0,"81555",,terminal_output +622,1544098,"TERMINAL",0,0,"92666",,terminal_output +623,1545157,"TERMINAL",0,0,"303777",,terminal_output +624,1546234,"TERMINAL",0,0,"14888",,terminal_output +625,1547183,"TERMINAL",0,0,"25999",,terminal_output +626,1548224,"TERMINAL",0,0,"36502020",,terminal_output +627,1548691,"train_tokenizer.py",9620,2,"",python,content +628,1548709,"train_tokenizer.py",9581,2,"",python,content +629,1548723,"train_tokenizer.py",9554,2,"",python,content +630,1548724,"train_tokenizer.py",9464,2,"",python,content +631,1548739,"train_tokenizer.py",9416,2,"",python,content +632,1548764,"train_tokenizer.py",9347,2,"",python,content +633,1548790,"train_tokenizer.py",9270,2,"",python,content +634,1548813,"train_tokenizer.py",9221,2,"",python,content +635,1548814,"train_tokenizer.py",9159,2,"",python,content +636,1548832,"train_tokenizer.py",9123,2,"",python,content +637,1548833,"train_tokenizer.py",9094,2,"",python,content +638,1548853,"train_tokenizer.py",9066,2,"",python,content +639,1548854,"train_tokenizer.py",8992,2,"",python,content +640,1548868,"train_tokenizer.py",8930,2,"",python,content +641,1548905,"train_tokenizer.py",8856,2,"",python,content +642,1548934,"train_tokenizer.py",8784,2,"",python,content +643,1548935,"train_tokenizer.py",8734,2,"",python,content +644,1548936,"train_tokenizer.py",8680,2,"",python,content +645,1548949,"train_tokenizer.py",8603,2,"",python,content +646,1548963,"train_tokenizer.py",8520,2,"",python,content +647,1548964,"train_tokenizer.py",8441,2,"",python,content +648,1548964,"train_tokenizer.py",8415,2,"",python,content +649,1548976,"train_tokenizer.py",8347,2,"",python,content +650,1548977,"train_tokenizer.py",8287,2,"",python,content +651,1548979,"train_tokenizer.py",8202,2,"",python,content +652,1548980,"train_tokenizer.py",8149,2,"",python,content +653,1548998,"train_tokenizer.py",8097,2,"",python,content +654,1549011,"train_tokenizer.py",8035,2,"",python,content +655,1549012,"train_tokenizer.py",8016,2,"",python,content +656,1549012,"train_tokenizer.py",7991,2,"",python,content +657,1549013,"train_tokenizer.py",7955,2,"",python,content +658,1549031,"train_tokenizer.py",7911,2,"",python,content +659,1549044,"train_tokenizer.py",7866,2,"",python,content +660,1549044,"train_tokenizer.py",7835,2,"",python,content +661,1549045,"train_tokenizer.py",7798,2,"",python,content +662,1549056,"train_tokenizer.py",7712,2,"",python,content +663,1549076,"train_tokenizer.py",7682,2,"",python,content +664,1549089,"train_tokenizer.py",7652,2,"",python,content +665,1549090,"train_tokenizer.py",7624,2,"",python,content +666,1549091,"train_tokenizer.py",7575,2,"",python,content +667,1549105,"train_tokenizer.py",7493,2,"",python,content +668,1549118,"train_tokenizer.py",7413,2,"",python,content +669,1549144,"train_tokenizer.py",7325,2,"",python,content +670,1549160,"train_tokenizer.py",7310,2,"",python,content +671,1549174,"train_tokenizer.py",7243,2,"",python,content +672,1549187,"train_tokenizer.py",7193,2,"",python,content +673,1549188,"train_tokenizer.py",7127,2,"",python,content +674,1549291,"TERMINAL",0,0,"47111",,terminal_output +675,1550040,"train_tokenizer.py",7051,0,"",python,selection_command +676,1550323,"train_tokenizer.py",7114,0,"\n ",python,content +677,1550367,"TERMINAL",0,0,"58222",,terminal_output +678,1550830,"train_tokenizer.py",7127,0,"c",python,content +679,1550832,"train_tokenizer.py",7128,0,"",python,selection_keyboard +680,1550913,"train_tokenizer.py",7128,0,"o",python,content +681,1550914,"train_tokenizer.py",7129,0,"",python,selection_keyboard +682,1551082,"train_tokenizer.py",7129,0,"n",python,content +683,1551083,"train_tokenizer.py",7130,0,"",python,selection_keyboard +684,1551226,"train_tokenizer.py",7130,0,"t",python,content +685,1551228,"train_tokenizer.py",7131,0,"",python,selection_keyboard +686,1551352,"train_tokenizer.py",7131,0,"i",python,content +687,1551355,"train_tokenizer.py",7132,0,"",python,selection_keyboard +688,1551423,"TERMINAL",0,0,"69333",,terminal_output +689,1551526,"train_tokenizer.py",7132,0,"n",python,content +690,1551528,"train_tokenizer.py",7133,0,"",python,selection_keyboard +691,1551626,"train_tokenizer.py",7133,0,"u",python,content +692,1551627,"train_tokenizer.py",7134,0,"",python,selection_keyboard +693,1551727,"train_tokenizer.py",7134,0,"e",python,content +694,1551729,"train_tokenizer.py",7135,0,"",python,selection_keyboard +695,1552450,"TERMINAL",0,0,"710444",,terminal_output +696,1553518,"TERMINAL",0,0,"81555",,terminal_output +697,1553913,"train_tokenizer.py",7005,0,"",python,selection_mouse +698,1553998,"TERMINAL",0,0,"2025-07-01 18:51:39.427346: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +699,1554543,"TERMINAL",0,0,"92666",,terminal_output +700,1554580,"train_tokenizer.py",7374,0,"",python,selection_mouse +701,1555546,"TERMINAL",0,0,"403777",,terminal_output +702,1556705,"TERMINAL",0,0,"15999",,terminal_output +703,1557718,"TERMINAL",0,0,"361:003030",,terminal_output +704,1558287,"train_tokenizer.py",7127,0,"",python,selection_mouse +705,1558684,"TERMINAL",0,0,"47111",,terminal_output +706,1558806,"train_tokenizer.py",7135,0,"",python,selection_mouse +707,1558951,"train_tokenizer.py",7127,8,"continue",python,selection_mouse +708,1559714,"TERMINAL",0,0,"58222",,terminal_output +709,1560758,"TERMINAL",0,0,"69333",,terminal_output +710,1561022,"train_tokenizer.py",7135,0,"",python,selection_mouse +711,1561799,"TERMINAL",0,0,"720444",,terminal_output +712,1561962,"train_tokenizer.py",7114,0,"",python,selection_mouse +713,1562852,"TERMINAL",0,0,"81555",,terminal_output +714,1562986,"train_tokenizer.py",7114,0,"\n ",python,content +715,1563903,"TERMINAL",0,0,"92666",,terminal_output +716,1564001,"train_tokenizer.py",7127,0,"i",python,content +717,1564002,"train_tokenizer.py",7128,0,"",python,selection_keyboard +718,1564106,"train_tokenizer.py",7128,0,"f",python,content +719,1564107,"train_tokenizer.py",7129,0,"",python,selection_keyboard +720,1564950,"TERMINAL",0,0,"503777",,terminal_output +721,1566009,"TERMINAL",0,0,"14888",,terminal_output +722,1567146,"TERMINAL",0,0,"25999",,terminal_output +723,1568161,"TERMINAL",0,0,"36104040",,terminal_output +724,1569187,"TERMINAL",0,0,"47111",,terminal_output +725,1570165,"TERMINAL",0,0,"58222",,terminal_output +726,1571233,"TERMINAL",0,0,"69333",,terminal_output +727,1572323,"TERMINAL",0,0,"730444",,terminal_output +728,1573535,"TERMINAL",0,0,"81555",,terminal_output +729,1574969,"TERMINAL",0,0,"92666",,terminal_output +730,1575455,"TERMINAL",0,0,"2:003777",,terminal_output +731,1576878,"train_tokenizer.py",7129,0," ",python,content +732,1576880,"train_tokenizer.py",7130,0,"",python,selection_keyboard +733,1576882,"TERMINAL",0,0,"14888",,terminal_output +734,1577449,"TERMINAL",0,0,"25999",,terminal_output +735,1578480,"TERMINAL",0,0,"36205050",,terminal_output +736,1578654,"train_tokenizer.py",7130,0,"s",python,content +737,1578655,"train_tokenizer.py",7131,0,"",python,selection_keyboard +738,1578891,"train_tokenizer.py",7131,0,"t",python,content +739,1578893,"train_tokenizer.py",7132,0,"",python,selection_keyboard +740,1579540,"TERMINAL",0,0,"47111",,terminal_output +741,1579579,"train_tokenizer.py",7132,0,"e",python,content +742,1579581,"train_tokenizer.py",7133,0,"",python,selection_keyboard +743,1579654,"train_tokenizer.py",7133,0,"p",python,content +744,1579655,"train_tokenizer.py",7134,0,"",python,selection_keyboard +745,1579783,"train_tokenizer.py",7134,0," ",python,content +746,1579784,"train_tokenizer.py",7135,0,"",python,selection_keyboard +747,1580008,"train_tokenizer.py",7135,0,"<",python,content +748,1580010,"train_tokenizer.py",7136,0,"",python,selection_keyboard +749,1580566,"TERMINAL",0,0,"59333",,terminal_output +750,1580633,"train_tokenizer.py",7136,0," ",python,content +751,1580635,"train_tokenizer.py",7137,0,"",python,selection_keyboard +752,1581111,"train_tokenizer.py",7137,0,"5",python,content +753,1581112,"train_tokenizer.py",7138,0,"",python,selection_keyboard +754,1581198,"train_tokenizer.py",7138,0,"0",python,content +755,1581199,"train_tokenizer.py",7139,0,"",python,selection_keyboard +756,1581342,"train_tokenizer.py",7139,0,"0",python,content +757,1581343,"train_tokenizer.py",7140,0,"",python,selection_keyboard +758,1581482,"train_tokenizer.py",7140,0,"0",python,content +759,1581482,"train_tokenizer.py",7141,0,"",python,selection_keyboard +760,1581623,"TERMINAL",0,0,"740444",,terminal_output +761,1582314,"TERMINAL",0,0,"====================================================================================================\r\nFrame 11\r\n====================================================================================================\r\n",,terminal_output +762,1582456,"train_tokenizer.py",7141,0,":",python,content +763,1582456,"train_tokenizer.py",7142,0,"",python,selection_keyboard +764,1582667,"TERMINAL",0,0,"81555",,terminal_output +765,1582714,"train_tokenizer.py",7142,0,"\n ",python,content +766,1583082,"TERMINAL",0,0,"2025-07-01 18:52:08.486191: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +767,1583706,"TERMINAL",0,0,"92666",,terminal_output +768,1584690,"train_tokenizer.py",7159,0,"b",python,content +769,1584692,"train_tokenizer.py",7160,0,"",python,selection_keyboard +770,1584729,"train_tokenizer.py",7160,0,"r",python,content +771,1584731,"train_tokenizer.py",7161,0,"",python,selection_keyboard +772,1584808,"TERMINAL",0,0,"103777",,terminal_output +773,1585255,"train_tokenizer.py",7161,0,"e",python,content +774,1585257,"train_tokenizer.py",7162,0,"",python,selection_keyboard +775,1585430,"train_tokenizer.py",7162,0,"a",python,content +776,1585432,"train_tokenizer.py",7163,0,"",python,selection_keyboard +777,1585797,"TERMINAL",0,0,"14888",,terminal_output +778,1585853,"train_tokenizer.py",7163,0,"k",python,content +779,1585855,"train_tokenizer.py",7164,0,"",python,selection_keyboard +780,1586894,"TERMINAL",0,0,"25999",,terminal_output +781,1587893,"TERMINAL",0,0,"36302:002:00",,terminal_output +782,1588930,"TERMINAL",0,0,"47111",,terminal_output +783,1589481,"train_tokenizer.py",7142,0,"",python,selection_mouse +784,1589978,"TERMINAL",0,0,"58222",,terminal_output +785,1590369,"train_tokenizer.py",7137,0,"",python,selection_mouse +786,1590516,"train_tokenizer.py",7137,4,"5000",python,selection_mouse +787,1591286,"TERMINAL",0,0,"69333",,terminal_output +788,1592091,"TERMINAL",0,0,"750444",,terminal_output +789,1592272,"train_tokenizer.py",7164,0,"",python,selection_mouse +790,1593113,"TERMINAL",0,0,"81555",,terminal_output +791,1593891,"train_tokenizer.py",7038,0,"",python,selection_mouse +792,1594158,"TERMINAL",0,0,"92666",,terminal_output +793,1595060,"train_tokenizer.py",7164,0,"",python,selection_mouse +794,1595203,"TERMINAL",0,0,"203777",,terminal_output +795,1596254,"TERMINAL",0,0,"14888",,terminal_output +796,1597304,"TERMINAL",0,0,"25999",,terminal_output +797,1598339,"TERMINAL",0,0,"36401010",,terminal_output +798,1598591,"TERMINAL",0,0,"watch",,terminal_focus +799,1599023,"TERMINAL",0,0,"2025-07-01 18:52:24.447211: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +800,1599371,"TERMINAL",0,0,"47111",,terminal_output +801,1600420,"TERMINAL",0,0,"58222",,terminal_output +802,1600761,"TERMINAL",0,0,"srun",,terminal_focus +803,1601451,"TERMINAL",0,0,"69333",,terminal_output +804,1602573,"TERMINAL",0,0,"72:00444",,terminal_output +805,1603591,"TERMINAL",0,0,"81555",,terminal_output +806,1604616,"TERMINAL",0,0,"93777",,terminal_output +807,1605744,"TERMINAL",0,0,"314888",,terminal_output +808,1606686,"TERMINAL",0,0,"25999",,terminal_output +809,1607792,"TERMINAL",0,0,"36502020",,terminal_output +810,1608463,"TERMINAL",0,0,"watch",,terminal_focus +811,1608782,"TERMINAL",0,0,"47111",,terminal_output +812,1609832,"TERMINAL",0,0,"58222",,terminal_output +813,1610094,"train_tokenizer.py",0,0,"",python,tab +814,1610870,"TERMINAL",0,0,"69333",,terminal_output +815,1611824,"train_tokenizer.py",7572,0,"",python,selection_mouse +816,1611937,"TERMINAL",0,0,"710444",,terminal_output +817,1612730,"train_tokenizer.py",7030,0,"",python,selection_mouse +818,1612862,"train_tokenizer.py",7030,4,"step",python,selection_mouse +819,1612997,"TERMINAL",0,0,"81555",,terminal_output +820,1613347,"train_tokenizer.py",7067,0,"",python,selection_mouse +821,1613474,"train_tokenizer.py",7061,23,"train_tokenizer_sample_",python,selection_mouse +822,1614035,"TERMINAL",0,0,"watch",,terminal_focus +823,1614109,"TERMINAL",0,0,"92666",,terminal_output +824,1615164,"TERMINAL",0,0,"403777",,terminal_output +825,1615911,"train_tokenizer.py",7142,0,"",python,selection_mouse +826,1616195,"TERMINAL",0,0,"14888",,terminal_output +827,1616472,"train_tokenizer.py",7066,0,"",python,selection_mouse +828,1617230,"train_tokenizer.py",7069,0,"",python,selection_mouse +829,1617254,"TERMINAL",0,0,"25999",,terminal_output +830,1617387,"train_tokenizer.py",7061,23,"train_tokenizer_sample_",python,selection_mouse +831,1617559,"train_tokenizer.py",7039,76," np.save(f""train_tokenizer_sample_{step}.npy"", np.array(videos))\n",python,selection_mouse +832,1618313,"train_tokenizer.py",7066,0,"",python,selection_mouse +833,1618314,"train_tokenizer.py",7061,23,"train_tokenizer_sample_",python,selection_mouse +834,1618330,"TERMINAL",0,0,"362:003030",,terminal_output +835,1618525,"train_tokenizer.py",7061,28,"train_tokenizer_sample_{step",python,selection_mouse +836,1618549,"train_tokenizer.py",7038,46,"\n np.save(f""train_tokenizer_sample_",python,selection_mouse +837,1618836,"train_tokenizer.py",7061,33,"train_tokenizer_sample_{step}.npy",python,selection_mouse +838,1619362,"TERMINAL",0,0,"47111",,terminal_output +839,1620426,"TERMINAL",0,0,"58222",,terminal_output +840,1621424,"TERMINAL",0,0,"69333",,terminal_output +841,1622178,"train_tokenizer.py",7075,0,"",python,selection_mouse +842,1622179,"train_tokenizer.py",7061,23,"train_tokenizer_sample_",python,selection_mouse +843,1622318,"train_tokenizer.py",7039,76," np.save(f""train_tokenizer_sample_{step}.npy"", np.array(videos))\n",python,selection_mouse +844,1622482,"TERMINAL",0,0,"720444",,terminal_output +845,1623146,"train_tokenizer.py",7051,0,"",python,selection_command +846,1623519,"TERMINAL",0,0,"81555",,terminal_output +847,1624586,"TERMINAL",0,0,"93777",,terminal_output +848,1625712,"TERMINAL",0,0,"514888",,terminal_output +849,1626658,"TERMINAL",0,0,"25999",,terminal_output +850,1627762,"TERMINAL",0,0,"36104040",,terminal_output +851,1627762,"TERMINAL",0,0,"====================================================================================================\r\nFrame 12\r\n====================================================================================================\r\n",,terminal_output +852,1628465,"TERMINAL",0,0,"2025-07-01 18:52:53.882314: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +853,1628787,"TERMINAL",0,0,"47111",,terminal_output +854,1629860,"TERMINAL",0,0,"58222",,terminal_output +855,1630821,"TERMINAL",0,0,"69333",,terminal_output +856,1631868,"TERMINAL",0,0,"730444",,terminal_output +857,1632916,"TERMINAL",0,0,"81555",,terminal_output +858,1633961,"TERMINAL",0,0,"92666",,terminal_output +859,1635050,"TERMINAL",0,0,"3:003777",,terminal_output +860,1636055,"TERMINAL",0,0,"14888",,terminal_output +861,1637093,"TERMINAL",0,0,"25999",,terminal_output +862,1638153,"TERMINAL",0,0,"36205050",,terminal_output +863,1639232,"TERMINAL",0,0,"47111",,terminal_output +864,1640251,"TERMINAL",0,0,"58222",,terminal_output +865,1641378,"TERMINAL",0,0,"69333",,terminal_output +866,1642403,"TERMINAL",0,0,"740444",,terminal_output +867,1643377,"TERMINAL",0,0,"81555",,terminal_output +868,1644423,"TERMINAL",0,0,"92666",,terminal_output +869,1644661,"TERMINAL",0,0,"2025-07-01 18:53:10.086566: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +870,1645459,"TERMINAL",0,0,"103777",,terminal_output +871,1646601,"TERMINAL",0,0,"14888",,terminal_output +872,1647624,"TERMINAL",0,0,"25999",,terminal_output +873,1648648,"TERMINAL",0,0,"37313:013:01",,terminal_output +874,1649689,"TERMINAL",0,0,"58222",,terminal_output +875,1650693,"TERMINAL",0,0,"69333",,terminal_output +876,1651721,"TERMINAL",0,0,"750444",,terminal_output +877,1652744,"TERMINAL",0,0,"81555",,terminal_output +878,1653788,"TERMINAL",0,0,"92666",,terminal_output +879,1654827,"TERMINAL",0,0,"203777",,terminal_output +880,1655919,"TERMINAL",0,0,"14888",,terminal_output +881,1656945,"TERMINAL",0,0,"25999",,terminal_output +882,1658069,"TERMINAL",0,0,"36401010",,terminal_output +883,1659020,"TERMINAL",0,0,"47111",,terminal_output +884,1660119,"TERMINAL",0,0,"58222",,terminal_output +885,1661139,"TERMINAL",0,0,"69333",,terminal_output +886,1662161,"TERMINAL",0,0,"73:00444",,terminal_output +887,1663211,"TERMINAL",0,0,"81555",,terminal_output +888,1664260,"TERMINAL",0,0,"92666",,terminal_output +889,1665369,"TERMINAL",0,0,"303777",,terminal_output +890,1666468,"TERMINAL",0,0,"14888",,terminal_output +891,1667501,"TERMINAL",0,0,"25999",,terminal_output +892,1668424,"TERMINAL",0,0,"36502020",,terminal_output +893,1669474,"TERMINAL",0,0,"47111",,terminal_output +894,1669928,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn1993:~/Projects/jafar]633;D;0",,terminal_output +895,1671169,"TERMINAL",0,0,"ls",,terminal_command +896,1671213,"TERMINAL",0,0,"]633;E;2025-07-01 18:53:36 ls;ea2a9ac7-e5a0-488a-9623-9429487e0dac]633;C",,terminal_output +897,1671374,"TERMINAL",0,0,"data frames genie.py overfit_dir requirements.txt slurm-3309772.out wandb\r\ndata_tfrecord_duplicated generate_dataset.py gifs __pycache__ sample.py train_dynamics.py\r\ndata_tfrecords generation_1751373553.4811275.gif LICENSE README.md scripts_cremers train_lam.py\r\nframe-knoms.png generation_1751384259.6501038.gif logs read_tf_record.py scripts_horeka train_tokenizer.py\r\nframe.png generation_1751385872.8038208.gif models requirements-franz.txt slurm utils\r\n]0;tum_cte0515@hkn1993:~/Projects/jafar]633;D;0",,terminal_output +898,1675071,"TERMINAL",0,0,"====================================================================================================\r\nFrame 13\r\n====================================================================================================\r\n",,terminal_output +899,1690293,"TERMINAL",0,0,"cd $ws_dir",,terminal_command +900,1691282,"TERMINAL",0,0,"cd ..",,terminal_command +901,1691604,"TERMINAL",0,0,"ls",,terminal_command +902,1691643,"TERMINAL",0,0,"]633;E;2025-07-01 18:53:57 ls;ea2a9ac7-e5a0-488a-9623-9429487e0dac]633;Ccheckpoints count_items.sh data logs scripts\r\n]0;tum_cte0515@hkn1993:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared]633;D;0",,terminal_output +903,1694956,"TERMINAL",0,0,"cd data/",,terminal_command +904,1694965,"TERMINAL",0,0,"]633;E;2025-07-01 18:54:00 cd data/;ea2a9ac7-e5a0-488a-9623-9429487e0dac]633;C]0;tum_cte0515@hkn1993:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data]633;D;0",,terminal_output +905,1695476,"TERMINAL",0,0,"ls",,terminal_command +906,1695515,"TERMINAL",0,0,"]633;E;2025-07-01 18:54:00 ls;ea2a9ac7-e5a0-488a-9623-9429487e0dac]633;C",,terminal_output +907,1695591,"TERMINAL",0,0,"checkpoints knoms_mp4 knoms_tfrecords_2_shards_overfit open_ai_minecraft_first_try overfit_dir\r\ncoinrun knoms_mp4_clips knoms_tfrecords_500_shards open_ai_minecraft_first_try_npy procgen_env_16_episodes_20000\r\ndata_knoms knoms_npy knoms_tfrecords_500_shards_overfit_1 open_ai_minecraft_first_try_tfrecord\r\ndummy knoms_tfrecords knoms_tfrecords_500_shards_overfit_10 open_ai_minecraft_npy\r\nknoms_arrayrecords_500_shards knoms_tfrecords_200_shards open_ai_minecraft open_ai_minecraft_tfrecord\r\n]0;tum_cte0515@hkn1993:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data]633;D;0",,terminal_output +908,1695907,"TERMINAL",0,0,"2025-07-01 18:54:01.327395: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +909,1700262,"TERMINAL",0,0,"ls -la",,terminal_command +910,1711005,"TERMINAL",0,0,"chmod 770 overfit_dir/",,terminal_command +911,1727968,"TERMINAL",0,0,"====================================================================================================\r\nFrame 14\r\n====================================================================================================\r\n",,terminal_output +912,1731000,"TERMINAL",0,0,"cd overfit_dir/single_sample/",,terminal_command +913,1731365,"TERMINAL",0,0,"ls",,terminal_command +914,1732491,"TERMINAL",0,0,"ls",,terminal_command +915,1734264,"TERMINAL",0,0,"pwd",,terminal_command +916,1741207,"train_tokenizer.py",7039,0,"",python,selection_command +917,1742786,"train_tokenizer.py",7039,0," np.save(f""/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/overfit_dir/single_sample/train_tokenizer_sample_{step}.npy"", np.array(videos))\n",python,content +918,1742791,"train_tokenizer.py",7202,76,"",python,content +919,1745885,"train_tokenizer.py",7102,0,"",python,selection_mouse +920,1747919,"TERMINAL",0,0,"srun",,terminal_focus +921,1748439,"TERMINAL",0,0,"2025-07-01 18:54:53.870883: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +922,1782016,"TERMINAL",0,0,"====================================================================================================\r\nFrame 15\r\n====================================================================================================\r\n",,terminal_output +923,1803692,"TERMINAL",0,0,"2025-07-01 18:55:49.071536: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +924,1842289,"TERMINAL",0,0,"SSIM: 0.13965484499931335\r\n",,terminal_output +925,1849557,"TERMINAL",0,0,"]0;tum_cte0515@hkn0706:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0706 jafar]$ ",,terminal_output +926,1853467,"TERMINAL",0,0,"bash",,terminal_focus +927,1854013,"TERMINAL",0,0,"srun",,terminal_focus +928,1856071,"TERMINAL",0,0,"bash",,terminal_focus +929,1856726,"TERMINAL",0,0,"ls",,terminal_command +930,1860015,"scripts_horeka/overfit_sample_tiny/sample.sh",0,0,"#!/usr/bin/env bash\n\n# Unload modules that may interfere\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\n\n# Activate virtual environment\nsource .venv/bin/activate\n\n# Set workspace and checkpoint directory (update slurm_job_id as needed)\nws_dir='/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared'\n# Replace the following with the actual job id/checkpoint you want to sample from\nslurm_job_id=3301029\n\n# job_name=train_dynamics_minecraft_overfit_sample_tiny\nCHECKPOINT_DIR=$ws_dir/checkpoints/${slurm_job_id}\n\n# Example: If you want to use a specific checkpoint, set it here\n# CHECKPOINT_PATH=$ws_dir/checkpoints/3299272/dynamics-tiny-overfit-big-lr-3299272_50000/\n# Or use the latest in the directory\n# CHECKPOINT_PATH=$(ls -d $CHECKPOINT_DIR/*/ | sort | tail -n 1)\nCHECKPOINT_PATH=$CHECKPOINT_DIR/genie_1751067601_200000/\n# CHECKPOINT_PATH=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/0000/genie_1751301068_2000/\n# CHECKPOINT_PATH=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/../checkpoints/3307618/genie_1751322003_15500/\n# CHECKPOINT_PATH=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/3307619/genie_1751322003_200000/\nCHECKPOINT_PATH=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/0000/genie_1751381954_17000\n\n\necho ""Sampling from checkpoint: $CHECKPOINT_PATH""\n\npython sample.py \\n --checkpoint ""$CHECKPOINT_PATH"" \\n --tokenizer_dim=384 \\n --latent_patch_dim=32 \\n --num_patch_latents=1024 \\n --patch_size=4 \\n --tokenizer_num_blocks=8 \\n --tokenizer_num_heads=8 \\n --lam_dim=384 \\n --latent_action_dim=32 \\n --lam_patch_size=16 \\n --lam_num_blocks=8 \\n --lam_num_heads=8 \\n --dyna_dim=128 \\n --dyna_num_blocks=2 \\n --dyna_num_heads=4 \\n --maskgit_steps=1000 \\n --num_latent_actions=6 \\n --seq_len=16 \\n --start_frame=0\n\n# python sample.py \\n # --checkpoint ""$CHECKPOINT_PATH"" \\n # --data_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/coinrun_episodes\n",shellscript,tab +931,1861191,"scripts_horeka/overfit_sample_tiny/sample.sh",1265,0,"",shellscript,selection_mouse +932,1861346,"scripts_horeka/overfit_sample_tiny/sample.sh",1256,14,"jafa_ws_shared",shellscript,selection_mouse +933,1862171,"scripts_horeka/overfit_sample_tiny/sample.sh",1265,0,"",shellscript,selection_mouse +934,1872183,"scripts_horeka/overfit_sample_tiny/sample.sh",1446,0,"",shellscript,selection_mouse +935,1872276,"scripts_horeka/overfit_sample_tiny/sample.sh",1445,0,"",shellscript,selection_command +936,1873218,"scripts_horeka/overfit_sample_tiny/sample.sh",1732,0,"",shellscript,selection_mouse +937,1873235,"scripts_horeka/overfit_sample_tiny/sample.sh",1731,0,"",shellscript,selection_command +938,1907450,"scripts_horeka/overfit_sample_tiny/sample.sh",0,0,"",shellscript,tab +939,1913436,"TERMINAL",0,0,"srun",,terminal_focus +940,1914763,"TERMINAL",0,0,"[?25lsh[?25h[?25lh[?25h",,terminal_output +941,1914830,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +942,1914990,"TERMINAL",0,0,"[?25ls[?25h",,terminal_output +943,1915057,"TERMINAL",0,0,"[?25lc[?25h",,terminal_output +944,1915284,"TERMINAL",0,0,"ripts_",,terminal_output +945,1916823,"TERMINAL",0,0,"[?25lh[?25horeka/",,terminal_output +946,1917090,"TERMINAL",0,0,"",,terminal_output +947,1917245,"TERMINAL",0,0,"\r\nbatchsize_scaling/ overfit_batch/ overfit_sample/ sync_runner.sh train_lam.sh \r\nmodelsize_scaling/ overfit_batch_tiny/ overfit_sample_tiny/ train_dynamics.sh train_tokenizer.sh \r\n(jafar) [tum_cte0515@hkn0706 jafar]$ sh scripts_horeka/",,terminal_output +948,1918914,"TERMINAL",0,0,"o",,terminal_output +949,1919070,"TERMINAL",0,0,"verfit_",,terminal_output +950,1919810,"TERMINAL",0,0,"[?25ls[?25h",,terminal_output +951,1919936,"TERMINAL",0,0,"ample",,terminal_output +952,1920769,"TERMINAL",0,0,"[?25l_[?25h",,terminal_output +953,1920932,"TERMINAL",0,0,"tiny/",,terminal_output +954,1921209,"TERMINAL",0,0,"",,terminal_output +955,1921639,"TERMINAL",0,0,"\r\nsample.sh tester.sh \r\n(jafar) [tum_cte0515@hkn0706 jafar]$ sh scripts_horeka/overfit_sample_tiny/",,terminal_output +956,1923533,"TERMINAL",0,0,"s",,terminal_output +957,1924453,"TERMINAL",0,0,"[?25lt[?25h",,terminal_output +958,1924517,"TERMINAL",0,0,"ester.sh ",,terminal_output +959,1926593,"scripts_horeka/overfit_sample_tiny/tester.sh",0,0,"#!/usr/bin/env bash\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\ntf_records_dir=$ws_dir/knoms_tfrecords_500_shards\nws_dir='/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/'\n\njob_name=""debug""\nslurm_job_id=""0000""\n\nCHECKPOINT_DIR=$ws_dir/checkpoints/$job_name_$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\nenv | grep SLURM\n\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/3299272/tokenizer_1751037678_153500/\nlam_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/3299259/lam_1751036759_200000/\n\n\npython train_dynamics.py \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=1 \\n --min_lr=1e-4 \\n --max_lr=1e-4 \\n --log_image_interval=500 \\n --log \\n --log_checkpoint_interval=500 \\n --name=masklim-0-dynamics-tiny-overfit-big-lr-gaussian-noise-$slurm_job_id \\n --tags dynamics overfit tiny masklimit-0 gaussian-noise \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --lam_checkpoint=$lam_ckpt_dir \\n --data_dir $tf_records_dir \\n --tokenizer_dim=384 \\n --latent_patch_dim=32 \\n --num_patch_latents=1024 \\n --patch_size=4 \\n --tokenizer_num_blocks=8 \\n --tokenizer_num_heads=8 \\n --lam_dim=384 \\n --latent_action_dim=32 \\n --num_latent_actions=6 \\n --lam_patch_size=16 \\n --lam_num_blocks=8 \\n --lam_num_heads=8 \\n --dyna_dim=128 \\n --dyna_num_blocks=2 \\n --dyna_num_heads=4 \\n --mask_limit=0.0\n",shellscript,tab +960,1929039,"scripts_horeka/overfit_sample_tiny/train_tokenizer_overfit_sample.sbatch",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=1\n#SBATCH --time=15:00:00\n#SBATCH --partition=accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:1\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/%x_%j.log\n#SBATCH --mail-user=mihir.mahajan2002@gmail.com\n#SBATCH --job-name=train_tokenizer_minecraft_overfit_sample\n#SBATCH --mem=50G\n#SBATCH --mail-type=ALL\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\ntf_records_dir=$ws_dir/knoms_tfrecords_500_shards\nws_dir='/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/'\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=$ws_dir/checkpoints/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\nenv | grep SLURM\n\nsrun python train_tokenizer.py \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=1 \\n --min_lr=1e-4 \\n --max_lr=1e-4 \\n --log_image_interval=100 \\n --log \\n --log_checkpoint_interval=500 \\n --name=tokenizer-tiny-overfit-$slurm_job_id \\n --tags tokenizer overfit tiny \\n --entity instant-uv \\n --project jafar \\n --data_dir $tf_records_dir \\n --model_dim 384 \\n --latent_dim 32 \\n --num_latents 1024 \\n --patch_size 4 \\n --num_blocks 8 \\n --num_heads 8 \\n --codebook_dropout 0.0 \ \n",shellscript,tab +961,1929979,"scripts_horeka/overfit_sample_tiny/train_tokenizer_overfit_sample.sbatch",1444,0,"",shellscript,selection_mouse +962,1930182,"scripts_horeka/overfit_sample_tiny/train_tokenizer_overfit_sample.sbatch",1443,1,"\n",shellscript,selection_mouse +963,1930182,"scripts_horeka/overfit_sample_tiny/train_tokenizer_overfit_sample.sbatch",1372,72,"\n --num_blocks 8 \\n --num_heads 8 \\n --codebook_dropout 0.0 \ \n",shellscript,selection_mouse +964,1930182,"scripts_horeka/overfit_sample_tiny/train_tokenizer_overfit_sample.sbatch",1242,202,"jafar \\n --data_dir $tf_records_dir \\n --model_dim 384 \\n --latent_dim 32 \\n --num_latents 1024 \\n --patch_size 4 \\n --num_blocks 8 \\n --num_heads 8 \\n --codebook_dropout 0.0 \ \n",shellscript,selection_mouse +965,1930183,"scripts_horeka/overfit_sample_tiny/train_tokenizer_overfit_sample.sbatch",1040,404," --log_image_interval=100 \\n --log \\n --log_checkpoint_interval=500 \\n --name=tokenizer-tiny-overfit-$slurm_job_id \\n --tags tokenizer overfit tiny \\n --entity instant-uv \\n --project jafar \\n --data_dir $tf_records_dir \\n --model_dim 384 \\n --latent_dim 32 \\n --num_latents 1024 \\n --patch_size 4 \\n --num_blocks 8 \\n --num_heads 8 \\n --codebook_dropout 0.0 \ \n",shellscript,selection_mouse +966,1930183,"scripts_horeka/overfit_sample_tiny/train_tokenizer_overfit_sample.sbatch",943,501," --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=1 \\n --min_lr=1e-4 \\n --max_lr=1e-4 \\n --log_image_interval=100 \\n --log \\n --log_checkpoint_interval=500 \\n --name=tokenizer-tiny-overfit-$slurm_job_id \\n --tags tokenizer overfit tiny \\n --entity instant-uv \\n --project jafar \\n --data_dir $tf_records_dir \\n --model_dim 384 \\n --latent_dim 32 \\n --num_latents 1024 \\n --patch_size 4 \\n --num_blocks 8 \\n --num_heads 8 \\n --codebook_dropout 0.0 \ \n",shellscript,selection_mouse +967,1930193,"scripts_horeka/overfit_sample_tiny/train_tokenizer_overfit_sample.sbatch",1443,1,"\n",shellscript,selection_command +968,1930194,"scripts_horeka/overfit_sample_tiny/train_tokenizer_overfit_sample.sbatch",943,501," --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=1 \\n --min_lr=1e-4 \\n --max_lr=1e-4 \\n --log_image_interval=100 \\n --log \\n --log_checkpoint_interval=500 \\n --name=tokenizer-tiny-overfit-$slurm_job_id \\n --tags tokenizer overfit tiny \\n --entity instant-uv \\n --project jafar \\n --data_dir $tf_records_dir \\n --model_dim 384 \\n --latent_dim 32 \\n --num_latents 1024 \\n --patch_size 4 \\n --num_blocks 8 \\n --num_heads 8 \\n --codebook_dropout 0.0 \ \n",shellscript,selection_mouse +969,1930249,"scripts_horeka/overfit_sample_tiny/train_tokenizer_overfit_sample.sbatch",910,534,"srun python train_tokenizer.py \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=1 \\n --min_lr=1e-4 \\n --max_lr=1e-4 \\n --log_image_interval=100 \\n --log \\n --log_checkpoint_interval=500 \\n --name=tokenizer-tiny-overfit-$slurm_job_id \\n --tags tokenizer overfit tiny \\n --entity instant-uv \\n --project jafar \\n --data_dir $tf_records_dir \\n --model_dim 384 \\n --latent_dim 32 \\n --num_latents 1024 \\n --patch_size 4 \\n --num_blocks 8 \\n --num_heads 8 \\n --codebook_dropout 0.0 \ \n",shellscript,selection_mouse +970,1930341,"scripts_horeka/overfit_sample_tiny/train_tokenizer_overfit_sample.sbatch",943,501," --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=1 \\n --min_lr=1e-4 \\n --max_lr=1e-4 \\n --log_image_interval=100 \\n --log \\n --log_checkpoint_interval=500 \\n --name=tokenizer-tiny-overfit-$slurm_job_id \\n --tags tokenizer overfit tiny \\n --entity instant-uv \\n --project jafar \\n --data_dir $tf_records_dir \\n --model_dim 384 \\n --latent_dim 32 \\n --num_latents 1024 \\n --patch_size 4 \\n --num_blocks 8 \\n --num_heads 8 \\n --codebook_dropout 0.0 \ \n",shellscript,selection_mouse +971,1934183,"scripts_horeka/overfit_sample_tiny/tester.sh",0,0,"",shellscript,tab +972,1934809,"scripts_horeka/overfit_sample_tiny/tester.sh",1527,0,"",shellscript,selection_mouse +973,1935024,"scripts_horeka/overfit_sample_tiny/tester.sh",1526,1,"\n",shellscript,selection_mouse +974,1935024,"scripts_horeka/overfit_sample_tiny/tester.sh",1480,47,"\n --dyna_num_heads=4 \\n --mask_limit=0.0\n",shellscript,selection_mouse +975,1935025,"scripts_horeka/overfit_sample_tiny/tester.sh",1325,202,"32 \\n --num_latent_actions=6 \\n --lam_patch_size=16 \\n --lam_num_blocks=8 \\n --lam_num_heads=8 \\n --dyna_dim=128 \\n --dyna_num_blocks=2 \\n --dyna_num_heads=4 \\n --mask_limit=0.0\n",shellscript,selection_mouse +976,1935025,"scripts_horeka/overfit_sample_tiny/tester.sh",1088,439,"ata_dir $tf_records_dir \\n --tokenizer_dim=384 \\n --latent_patch_dim=32 \\n --num_patch_latents=1024 \\n --patch_size=4 \\n --tokenizer_num_blocks=8 \\n --tokenizer_num_heads=8 \\n --lam_dim=384 \\n --latent_action_dim=32 \\n --num_latent_actions=6 \\n --lam_patch_size=16 \\n --lam_num_blocks=8 \\n --lam_num_heads=8 \\n --dyna_dim=128 \\n --dyna_num_blocks=2 \\n --dyna_num_heads=4 \\n --mask_limit=0.0\n",shellscript,selection_mouse +977,1935025,"scripts_horeka/overfit_sample_tiny/tester.sh",948,579," --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --lam_checkpoint=$lam_ckpt_dir \\n --data_dir $tf_records_dir \\n --tokenizer_dim=384 \\n --latent_patch_dim=32 \\n --num_patch_latents=1024 \\n --patch_size=4 \\n --tokenizer_num_blocks=8 \\n --tokenizer_num_heads=8 \\n --lam_dim=384 \\n --latent_action_dim=32 \\n --num_latent_actions=6 \\n --lam_patch_size=16 \\n --lam_num_blocks=8 \\n --lam_num_heads=8 \\n --dyna_dim=128 \\n --dyna_num_blocks=2 \\n --dyna_num_heads=4 \\n --mask_limit=0.0\n",shellscript,selection_mouse +978,1935048,"scripts_horeka/overfit_sample_tiny/tester.sh",1526,1,"\n",shellscript,selection_command +979,1935048,"scripts_horeka/overfit_sample_tiny/tester.sh",947,580," --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --lam_checkpoint=$lam_ckpt_dir \\n --data_dir $tf_records_dir \\n --tokenizer_dim=384 \\n --latent_patch_dim=32 \\n --num_patch_latents=1024 \\n --patch_size=4 \\n --tokenizer_num_blocks=8 \\n --tokenizer_num_heads=8 \\n --lam_dim=384 \\n --latent_action_dim=32 \\n --num_latent_actions=6 \\n --lam_patch_size=16 \\n --lam_num_blocks=8 \\n --lam_num_heads=8 \\n --dyna_dim=128 \\n --dyna_num_blocks=2 \\n --dyna_num_heads=4 \\n --mask_limit=0.0\n",shellscript,selection_mouse +980,1935086,"scripts_horeka/overfit_sample_tiny/tester.sh",885,642," --tags dynamics overfit tiny masklimit-0 gaussian-noise \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --lam_checkpoint=$lam_ckpt_dir \\n --data_dir $tf_records_dir \\n --tokenizer_dim=384 \\n --latent_patch_dim=32 \\n --num_patch_latents=1024 \\n --patch_size=4 \\n --tokenizer_num_blocks=8 \\n --tokenizer_num_heads=8 \\n --lam_dim=384 \\n --latent_action_dim=32 \\n --num_latent_actions=6 \\n --lam_patch_size=16 \\n --lam_num_blocks=8 \\n --lam_num_heads=8 \\n --dyna_dim=128 \\n --dyna_num_blocks=2 \\n --dyna_num_heads=4 \\n --mask_limit=0.0\n",shellscript,selection_mouse +981,1935135,"scripts_horeka/overfit_sample_tiny/tester.sh",804,723," --name=masklim-0-dynamics-tiny-overfit-big-lr-gaussian-noise-$slurm_job_id \\n --tags dynamics overfit tiny masklimit-0 gaussian-noise \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --lam_checkpoint=$lam_ckpt_dir \\n --data_dir $tf_records_dir \\n --tokenizer_dim=384 \\n --latent_patch_dim=32 \\n --num_patch_latents=1024 \\n --patch_size=4 \\n --tokenizer_num_blocks=8 \\n --tokenizer_num_heads=8 \\n --lam_dim=384 \\n --latent_action_dim=32 \\n --num_latent_actions=6 \\n --lam_patch_size=16 \\n --lam_num_blocks=8 \\n --lam_num_heads=8 \\n --dyna_dim=128 \\n --dyna_num_blocks=2 \\n --dyna_num_heads=4 \\n --mask_limit=0.0\n",shellscript,selection_mouse +982,1935198,"scripts_horeka/overfit_sample_tiny/tester.sh",768,759," --log_checkpoint_interval=500 \\n --name=masklim-0-dynamics-tiny-overfit-big-lr-gaussian-noise-$slurm_job_id \\n --tags dynamics overfit tiny masklimit-0 gaussian-noise \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --lam_checkpoint=$lam_ckpt_dir \\n --data_dir $tf_records_dir \\n --tokenizer_dim=384 \\n --latent_patch_dim=32 \\n --num_patch_latents=1024 \\n --patch_size=4 \\n --tokenizer_num_blocks=8 \\n --tokenizer_num_heads=8 \\n --lam_dim=384 \\n --latent_action_dim=32 \\n --num_latent_actions=6 \\n --lam_patch_size=16 \\n --lam_num_blocks=8 \\n --lam_num_heads=8 \\n --dyna_dim=128 \\n --dyna_num_blocks=2 \\n --dyna_num_heads=4 \\n --mask_limit=0.0\n",shellscript,selection_mouse +983,1935225,"scripts_horeka/overfit_sample_tiny/tester.sh",756,771," --log \\n --log_checkpoint_interval=500 \\n --name=masklim-0-dynamics-tiny-overfit-big-lr-gaussian-noise-$slurm_job_id \\n --tags dynamics overfit tiny masklimit-0 gaussian-noise \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --lam_checkpoint=$lam_ckpt_dir \\n --data_dir $tf_records_dir \\n --tokenizer_dim=384 \\n --latent_patch_dim=32 \\n --num_patch_latents=1024 \\n --patch_size=4 \\n --tokenizer_num_blocks=8 \\n --tokenizer_num_heads=8 \\n --lam_dim=384 \\n --latent_action_dim=32 \\n --num_latent_actions=6 \\n --lam_patch_size=16 \\n --lam_num_blocks=8 \\n --lam_num_heads=8 \\n --dyna_dim=128 \\n --dyna_num_blocks=2 \\n --dyna_num_heads=4 \\n --mask_limit=0.0\n",shellscript,selection_mouse +984,1935268,"scripts_horeka/overfit_sample_tiny/tester.sh",705,822," --max_lr=1e-4 \\n --log_image_interval=500 \\n --log \\n --log_checkpoint_interval=500 \\n --name=masklim-0-dynamics-tiny-overfit-big-lr-gaussian-noise-$slurm_job_id \\n --tags dynamics overfit tiny masklimit-0 gaussian-noise \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --lam_checkpoint=$lam_ckpt_dir \\n --data_dir $tf_records_dir \\n --tokenizer_dim=384 \\n --latent_patch_dim=32 \\n --num_patch_latents=1024 \\n --patch_size=4 \\n --tokenizer_num_blocks=8 \\n --tokenizer_num_heads=8 \\n --lam_dim=384 \\n --latent_action_dim=32 \\n --num_latent_actions=6 \\n --lam_patch_size=16 \\n --lam_num_blocks=8 \\n --lam_num_heads=8 \\n --dyna_dim=128 \\n --dyna_num_blocks=2 \\n --dyna_num_heads=4 \\n --mask_limit=0.0\n",shellscript,selection_mouse +985,1935335,"scripts_horeka/overfit_sample_tiny/tester.sh",685,842," --min_lr=1e-4 \\n --max_lr=1e-4 \\n --log_image_interval=500 \\n --log \\n --log_checkpoint_interval=500 \\n --name=masklim-0-dynamics-tiny-overfit-big-lr-gaussian-noise-$slurm_job_id \\n --tags dynamics overfit tiny masklimit-0 gaussian-noise \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --lam_checkpoint=$lam_ckpt_dir \\n --data_dir $tf_records_dir \\n --tokenizer_dim=384 \\n --latent_patch_dim=32 \\n --num_patch_latents=1024 \\n --patch_size=4 \\n --tokenizer_num_blocks=8 \\n --tokenizer_num_heads=8 \\n --lam_dim=384 \\n --latent_action_dim=32 \\n --num_latent_actions=6 \\n --lam_patch_size=16 \\n --lam_num_blocks=8 \\n --lam_num_heads=8 \\n --dyna_dim=128 \\n --dyna_num_blocks=2 \\n --dyna_num_heads=4 \\n --mask_limit=0.0\n",shellscript,selection_mouse +986,1935456,"scripts_horeka/overfit_sample_tiny/tester.sh",664,863," --batch_size=1 \\n --min_lr=1e-4 \\n --max_lr=1e-4 \\n --log_image_interval=500 \\n --log \\n --log_checkpoint_interval=500 \\n --name=masklim-0-dynamics-tiny-overfit-big-lr-gaussian-noise-$slurm_job_id \\n --tags dynamics overfit tiny masklimit-0 gaussian-noise \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --lam_checkpoint=$lam_ckpt_dir \\n --data_dir $tf_records_dir \\n --tokenizer_dim=384 \\n --latent_patch_dim=32 \\n --num_patch_latents=1024 \\n --patch_size=4 \\n --tokenizer_num_blocks=8 \\n --tokenizer_num_heads=8 \\n --lam_dim=384 \\n --latent_action_dim=32 \\n --num_latent_actions=6 \\n --lam_patch_size=16 \\n --lam_num_blocks=8 \\n --lam_num_heads=8 \\n --dyna_dim=128 \\n --dyna_num_blocks=2 \\n --dyna_num_heads=4 \\n --mask_limit=0.0\n",shellscript,selection_mouse +987,1936117,"scripts_horeka/overfit_sample_tiny/tester.sh",631,896," --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=1 \\n --min_lr=1e-4 \\n --max_lr=1e-4 \\n --log_image_interval=500 \\n --log \\n --log_checkpoint_interval=500 \\n --name=masklim-0-dynamics-tiny-overfit-big-lr-gaussian-noise-$slurm_job_id \\n --tags dynamics overfit tiny masklimit-0 gaussian-noise \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --lam_checkpoint=$lam_ckpt_dir \\n --data_dir $tf_records_dir \\n --tokenizer_dim=384 \\n --latent_patch_dim=32 \\n --num_patch_latents=1024 \\n --patch_size=4 \\n --tokenizer_num_blocks=8 \\n --tokenizer_num_heads=8 \\n --lam_dim=384 \\n --latent_action_dim=32 \\n --num_latent_actions=6 \\n --lam_patch_size=16 \\n --lam_num_blocks=8 \\n --lam_num_heads=8 \\n --dyna_dim=128 \\n --dyna_num_blocks=2 \\n --dyna_num_heads=4 \\n --mask_limit=0.0\n",shellscript,selection_mouse +988,1937098,"scripts_horeka/overfit_sample_tiny/tester.sh",631,896,"",shellscript,content +989,1938119,"scripts_horeka/overfit_sample_tiny/tester.sh",631,0," --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=1 \\n --min_lr=1e-4 \\n --max_lr=1e-4 \\n --log_image_interval=100 \\n --log \\n --log_checkpoint_interval=500 \\n --name=tokenizer-tiny-overfit-$slurm_job_id \\n --tags tokenizer overfit tiny \\n --entity instant-uv \\n --project jafar \\n --data_dir $tf_records_dir \\n --model_dim 384 \\n --latent_dim 32 \\n --num_latents 1024 \\n --patch_size 4 \\n --num_blocks 8 \\n --num_heads 8 \\n --codebook_dropout 0.0 \ \n",shellscript,content +990,1941110,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +991,1941231,"TERMINAL",0,0,"SLURM_STEP_NUM_TASKS=1\r\nSLURM_JOB_USER=tum_cte0515\r\nSLURM_TASKS_PER_NODE=1\r\nSLURM_JOB_UID=999226\r\nSLURM_TASK_PID=3337997\r\nSLURM_JOB_GPUS=1\r\nSLURM_LOCALID=0\r\nSLURM_SUBMIT_DIR=/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar\r\nSLURMD_NODENAME=hkn0706\r\nSLURM_JOB_START_TIME=1751388043\r\nSLURM_STEP_NODELIST=hkn0706\r\nSLURM_CLUSTER_NAME=hk\r\nSLURM_JOB_END_TIME=1751391643\r\nSLURM_PMI2_SRUN_PORT=43539\r\nSLURM_CPUS_ON_NODE=6\r\nSLURM_JOB_CPUS_PER_NODE=6\r\nSLURM_GPUS_ON_NODE=1\r\nSLURM_GTIDS=0\r\nSLURM_JOB_PARTITION=accelerated\r\nSLURM_TRES_PER_TASK=cpu=5\r\nSLURM_OOM_KILL_STEP=0\r\nSLURM_JOB_NUM_NODES=1\r\nSLURM_STEPID=4294967290\r\nSLURM_JOBID=3309821\r\nSLURM_PTY_PORT=45555\r\nSLURM_JOB_QOS=normal\r\nSLURM_LAUNCH_NODE_IPADDR=10.0.7.201\r\nSLURM_PTY_WIN_ROW=36\r\nSLURM_PMI2_PROC_MAPPING=(vector,(0,1,1))\r\nSLURMD_DEBUG=2\r\nSLURM_PROCID=0\r\nSLURM_CPUS_PER_TASK=5\r\nSLURM_NTASKS=1\r\nSLURM_TOPOLOGY_ADDR=hkibb.hkibbi1.hkibbi1e8.hkn0706\r\nSLURM_TOPOLOGY_ADDR_PATTERN=switch.switch.switch.node\r\nSLURM_SRUN_COMM_HOST=10.0.7.201\r\nSLURM_SCRIPT_CONTEXT=prolog_task\r\nSLURM_MEM_PER_NODE=51200\r\nSLURM_PTY_WIN_COL=175\r\nSLURM_NODELIST=hkn0706\r\nSLURM_SRUN_COMM_PORT=36793\r\nSLURM_STEP_ID=4294967290\r\nSLURM_JOB_ACCOUNT=hk-project-p0023960\r\nSLURM_PRIO_PROCESS=0\r\nSLURM_NPROCS=1\r\nSLURM_NNODES=1\r\nSLURM_SUBMIT_HOST=hkn1993.localdomain\r\nSLURM_JOB_ID=3309821\r\nSLURM_NODEID=0\r\nSLURM_STEP_NUM_NODES=1\r\nSLURM_STEP_TASKS_PER_NODE=1\r\nSLURM_MPI_TYPE=pmi2\r\nSLURM_PMI2_STEP_NODES=hkn0706\r\nSLURM_CONF=/etc/slurm/slurm.conf\r\nSLURM_JOB_NAME=interactive\r\nSLURM_NTASKS_PER_NODE=1\r\nSLURM_STEP_LAUNCHER_PORT=36793\r\nSLURM_JOB_GID=502226\r\nSLURM_JOB_NODELIST=hkn0706\r\n",,terminal_output +992,1945477,"TERMINAL",0,0,"bash",,terminal_focus +993,1947232,"TERMINAL",0,0,"ls",,terminal_command +994,1949719,"TERMINAL",0,0,"ls",,terminal_command +995,1954234,"TERMINAL",0,0,"2025-07-01 18:58:19.672573: E external/local_xla/xla/stream_executor/cuda/cuda_fft.cc:467] Unable to register cuFFT factory: Attempting to register factory for plugin cuFFT when one has already been registered\r\n",,terminal_output +996,1954535,"TERMINAL",0,0,"WARNING: All log messages before absl::InitializeLog() is called are written to STDERR\r\nE0000 00:00:1751389099.943974 3348431 cuda_dnn.cc:8579] Unable to register cuDNN factory: Attempting to register factory for plugin cuDNN when one has already been registered\r\nE0000 00:00:1751389099.975953 3348431 cuda_blas.cc:1407] Unable to register cuBLAS factory: Attempting to register factory for plugin cuBLAS when one has already been registered\r\n",,terminal_output +997,1954841,"TERMINAL",0,0,"W0000 00:00:1751389100.280343 3348431 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\nW0000 00:00:1751389100.280380 3348431 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\nW0000 00:00:1751389100.280383 3348431 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\nW0000 00:00:1751389100.280386 3348431 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\n",,terminal_output +998,1960382,"TERMINAL",0,0,"watch -n1 ""ls -la | wc -l""",,terminal_command +999,1960455,"TERMINAL",0,0,"]633;E;2025-07-01 18:58:25 watch -n1 ""ls -la | wc -l"";ea2a9ac7-e5a0-488a-9623-9429487e0dac]633;C[?1049h(B[?7hEvery 1.0s: ls -la | wc -lhkn1993.localdomain: Tue Jul 1 18:58:25 20253",,terminal_output +1000,1961485,"TERMINAL",0,0,"6",,terminal_output +1001,1962511,"TERMINAL",0,0,"7",,terminal_output +1002,1963531,"TERMINAL",0,0,"8",,terminal_output +1003,1964520,"TERMINAL",0,0,"9",,terminal_output +1004,1965580,"TERMINAL",0,0,"30",,terminal_output +1005,1966606,"TERMINAL",0,0,"1",,terminal_output +1006,1967529,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn1993:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/overfit_dir/single_sample]633;D;0",,terminal_output +1007,1970190,"TERMINAL",0,0,"watch -n1 ""ls -l | wc -l""",,terminal_command +1008,1970241,"TERMINAL",0,0,"]633;E;2025-07-01 18:58:35 watch -n1 ""ls -l | wc -l"";ea2a9ac7-e5a0-488a-9623-9429487e0dac]633;C[?1049h(B[?7hEvery 1.0s: ls -l | wc -lhkn1993.localdomain: Tue Jul 1 18:58:35 20251",,terminal_output +1009,1971244,"TERMINAL",0,0,"6",,terminal_output +1010,1972340,"TERMINAL",0,0,"7",,terminal_output +1011,1973363,"TERMINAL",0,0,"8",,terminal_output +1012,1974291,"TERMINAL",0,0,"9",,terminal_output +1013,1975392,"TERMINAL",0,0,"40",,terminal_output +1014,1976319,"TERMINAL",0,0,"1",,terminal_output +1015,1977357,"TERMINAL",0,0,"2",,terminal_output +1016,1978386,"TERMINAL",0,0,"3",,terminal_output +1017,1979408,"TERMINAL",0,0,"4",,terminal_output +1018,1980396,"TERMINAL",0,0,"5",,terminal_output +1019,1981452,"TERMINAL",0,0,"6",,terminal_output +1020,1982443,"TERMINAL",0,0,"7",,terminal_output +1021,1982655,"TERMINAL",0,0,"srun",,terminal_focus +1022,1983107,"TERMINAL",0,0,"^CTraceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py"", line 22, in \r\n from utils.dataloader import get_dataloader\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/utils/dataloader.py"", line 4, in \r\n import tensorflow as tf\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tensorflow/__init__.py"", line 468, in \r\n importlib.import_module(""keras.src.optimizers"")\r\n File ""/home/hk-project-p0023960/tum_cte0515/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/importlib/__init__.py"", line 126, in import_module\r\n return _bootstrap._gcd_import(name[level:], package, level)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/keras/__init__.py"", line 7, in \r\n from keras import _tf_keras as _tf_keras\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/keras/_tf_keras/__init__.py"", line 1, in \r\n from keras._tf_keras import keras\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/keras/_tf_keras/keras/__init__.py"", line 7, in \r\n from keras import activations as activations\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/keras/activations/__init__.py"", line 7, in \r\n from keras.src.activations import deserialize as deserialize\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/keras/src/__init__.py"", line 13, in \r\n from keras.src import visualization\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/keras/src/visualization/__init__.py"", line 2, in \r\n from keras.src.visualization import plot_image_gallery\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/keras/src/visualization/plot_image_gallery.py"", line 13, in \r\n import matplotlib.pyplot as plt\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/matplotlib/__init__.py"", line 161, in \r\n from . import _api, _version, cbook, _docstring, rcsetup\r\n File """", line 1027, in _find_and_load\r\n File """", line 1002, in _find_and_load_unlocked\r\n File """", line 945, in _find_spec\r\n File """", line 1439, in find_spec\r\n File """", line 1411, in _get_spec\r\n File """", line 1577, in find_spec\r\n File """", line 161, in _path_isfile\r\n File """", line 153, in _path_is_mode_type\r\n File """", line 147, in _path_stat\r\nKeyboardInterrupt\r\n",,terminal_output +1023,1983487,"TERMINAL",0,0,"8",,terminal_output +1024,1983550,"TERMINAL",0,0,"^C\r\n]0;tum_cte0515@hkn0706:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0706 jafar]$ ",,terminal_output +1025,1983783,"TERMINAL",0,0,"^C[?2004l\r[?2004h[?2004l\r\r\n]0;tum_cte0515@hkn0706:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0706 jafar]$ ",,terminal_output +1026,1984470,"TERMINAL",0,0,"9",,terminal_output +1027,1985486,"TERMINAL",0,0,"50",,terminal_output +1028,1986513,"TERMINAL",0,0,"1",,terminal_output +1029,1986823,"models/dynamics.py",0,0,"from typing import Dict, Any\n\nimport jax\nimport jax.numpy as jnp\nimport flax.linen as nn\n\nfrom utils.nn import STTransformer\n\n\nclass DynamicsMaskGIT(nn.Module):\n """"""MaskGIT dynamics model""""""\n\n model_dim: int\n num_latents: int\n num_blocks: int\n num_heads: int\n dropout: float\n mask_limit: float\n\n def setup(self):\n self.dynamics = STTransformer(\n self.model_dim,\n self.num_latents,\n self.num_blocks,\n self.num_heads,\n self.dropout,\n )\n self.patch_embed = nn.Embed(self.num_latents, self.model_dim)\n self.mask_token = self.param(\n ""mask_token"",\n nn.initializers.lecun_uniform(),\n (1, 1, 1, self.model_dim),\n )\n self.action_up = nn.Dense(self.model_dim)\n\n def __call__(self, batch: Dict[str, Any], training: bool = True) -> Dict[str, Any]:\n # --- Mask videos ---\n vid_embed = self.patch_embed(batch[""video_tokens""])\n if training:\n rng1, rng2 = jax.random.split(batch[""mask_rng""])\n mask_prob = jax.random.uniform(rng1, minval=self.mask_limit)\n mask = jax.random.bernoulli(rng2, mask_prob, vid_embed.shape[:-1])\n mask = mask.at[:, 0].set(False)\n # FIXME mihir\n # vid_embed = jnp.where(jnp.expand_dims(mask, -1), self.mask_token, vid_embed)\n\n ############################## \n rng2, _rng = jax.random.split(rng2)\n noise = jax.random.normal(_rng, self.mask_token.shape) # Gaussian noise\n vid_embed = jnp.where(jnp.expand_dims(mask, -1), noise, vid_embed)\n ##############################\n else:\n mask = None\n\n # --- Predict transition ---\n act_embed = self.action_up(batch[""latent_actions""])\n vid_embed += jnp.pad(act_embed, ((0, 0), (1, 0), (0, 0), (0, 0)))\n logits = self.dynamics(vid_embed)\n return dict(token_logits=logits, mask=mask)\n",python,tab +1030,1987528,"TERMINAL",0,0,"2",,terminal_output +1031,1988574,"TERMINAL",0,0,"3",,terminal_output +1032,1989523,"utils/dataloader.py",0,0,"import functools\nimport jax\n\nimport tensorflow as tf\n\n# reserve GPU memory for JAX only if tensorflow is built with GPU support\ntf.config.experimental.set_visible_devices([], ""GPU"")\n\n\n# --- TensorFlow function for processing: slicing, normalization ---\ndef _tf_process_episode(episode_tensor, seq_len, image_h, image_w, image_c):\n """"""\n Processes a raw episode tensor in TensorFlow.\n Takes a full episode, extracts a random sequence, and normalizes it.\n Args:\n episode_tensor: A TensorFlow tensor representing a full video episode.\n Expected shape: (dynamic_length, image_h, image_w, image_c)\n Expected dtype: e.g., tf.uint8 (raw pixel values)\n seq_len: The desired length of the sub-sequence to extract.\n image_h: The height of each frame.\n image_w: The width of each frame.\n image_c: The number of channels in each frame.\n Returns:\n A TensorFlow tensor representing the processed video sequence.\n Shape: (seq_len, image_h, image_w, image_c)\n Dtype: tf.float32 (normalized pixel values)\n """"""\n current_episode_len = tf.shape(episode_tensor)[0]\n\n max_start_idx = current_episode_len - seq_len\n\n start_idx = tf.random.uniform(\n shape=(), minval=0, maxval=max_start_idx + 1, dtype=tf.int32\n )\n\n seq = episode_tensor[start_idx : start_idx + seq_len]\n\n seq = tf.cast(seq, tf.float32) / 255.0\n\n # Ensure the final shape is statically known for batching.\n # tf.reshape is robust, but tf.ensure_shape or set_shape can also be used if confident.\n processed_sequence = tf.reshape(seq, [seq_len, image_h, image_w, image_c])\n\n return processed_sequence\n\n\ndef _parse_tfrecord_fn(example_proto, image_h, image_w, image_c):\n feature_description = {\n ""height"": tf.io.FixedLenFeature([], tf.int64),\n ""width"": tf.io.FixedLenFeature([], tf.int64),\n ""channels"": tf.io.FixedLenFeature([], tf.int64),\n ""sequence_length"": tf.io.FixedLenFeature([], tf.int64),\n ""raw_video"": tf.io.FixedLenFeature([], tf.string),\n }\n example = tf.io.parse_single_example(example_proto, feature_description)\n\n video_shape = (example[""sequence_length""], image_h, image_w, image_c)\n\n episode_tensor = tf.io.decode_raw(example[""raw_video""], out_type=tf.uint8)\n episode_tensor = tf.reshape(episode_tensor, video_shape)\n\n episode_tensor = tf.ensure_shape(episode_tensor, [None, image_h, image_w, image_c])\n return episode_tensor\n\n\ndef get_dataloader(\n tfrecord_paths: list[str], # List of TFRecord file paths\n seq_len: int,\n global_batch_size: int,\n image_h: int,\n image_w: int,\n image_c: int,\n shuffle_buffer_size: int = -1,\n num_parallel_calls: int = tf.data.AUTOTUNE,\n seed: int = 42,\n):\n """"""\n Creates a tf.data.Dataset pipeline from TFRecord files.\n """"""\n if not tfrecord_paths:\n raise ValueError(""tfrecord_paths list cannot be empty."")\n\n process_id = jax.process_index()\n num_processes = jax.process_count()\n\n assert (\n global_batch_size % num_processes == 0\n ), ""Global batch size {global_batch_size} \\n must be divisible by the number of JAX processes {num_processes} for proper sharding.""\n per_process_batch_size = global_batch_size // num_processes\n\n dataset = tf.data.TFRecordDataset(\n tfrecord_paths, num_parallel_reads=tf.data.AUTOTUNE\n )\n\n dataset = dataset.shard(num_shards=num_processes, index=process_id)\n\n # (f.srambical) NOTE: For TFRecords, it's often good to have a large shuffle buffer.\n if shuffle_buffer_size > 0:\n dataset = dataset.shuffle(\n buffer_size=shuffle_buffer_size, seed=seed, reshuffle_each_iteration=True\n )\n parse_fn = functools.partial(\n _parse_tfrecord_fn, image_h=image_h, image_w=image_w, image_c=image_c\n )\n dataset = dataset.map(parse_fn, num_parallel_calls=num_parallel_calls)\n\n tf_process_fn = functools.partial(\n _tf_process_episode,\n seq_len=seq_len,\n image_h=image_h,\n image_w=image_w,\n image_c=image_c,\n )\n dataset = dataset.map(tf_process_fn, num_parallel_calls=num_parallel_calls)\n\n dataset = dataset.repeat(None)\n dataset = dataset.batch(per_process_batch_size, drop_remainder=True)\n dataset = dataset.prefetch(tf.data.AUTOTUNE)\n\n return dataset.as_numpy_iterator()\n",python,tab +1033,1989724,"TERMINAL",0,0,"4",,terminal_output +1034,1990597,"TERMINAL",0,0,"6",,terminal_output +1035,1991701,"TERMINAL",0,0,"7",,terminal_output +1036,1992624,"TERMINAL",0,0,"8",,terminal_output +1037,1993663,"TERMINAL",0,0,"9",,terminal_output +1038,1994734,"TERMINAL",0,0,"9:00",,terminal_output +1039,1995689,"TERMINAL",0,0,"1",,terminal_output +1040,1996712,"TERMINAL",0,0,"2",,terminal_output +1041,1997735,"TERMINAL",0,0,"3",,terminal_output +1042,1998542,"utils/dataloader.py",2713,0,"",python,selection_mouse +1043,1998768,"TERMINAL",0,0,"4",,terminal_output +1044,1999760,"TERMINAL",0,0,"5",,terminal_output +1045,2000806,"TERMINAL",0,0,"6",,terminal_output +1046,2001829,"TERMINAL",0,0,"7",,terminal_output +1047,2002855,"TERMINAL",0,0,"8",,terminal_output +1048,2003861,"TERMINAL",0,0,"9",,terminal_output +1049,2004698,"utils/dataloader.py",2712,1,"",python,content +1050,2004790,"utils/dataloader.py",2711,1,"",python,content +1051,2004874,"TERMINAL",0,0,"10",,terminal_output +1052,2004902,"utils/dataloader.py",2711,0,"1",python,content +1053,2004903,"utils/dataloader.py",2712,0,"",python,selection_keyboard +1054,2005207,"utils/dataloader.py",2712,0,"0",python,content +1055,2005208,"utils/dataloader.py",2713,0,"",python,selection_keyboard +1056,2005370,"utils/dataloader.py",2712,0,"",python,selection_command +1057,2005885,"TERMINAL",0,0,"1",,terminal_output +1058,2006907,"TERMINAL",0,0,"2",,terminal_output +1059,2007109,"utils/dataloader.py",2695,0,"",python,selection_mouse +1060,2007236,"utils/dataloader.py",2684,19,"shuffle_buffer_size",python,selection_mouse +1061,2007918,"TERMINAL",0,0,"3",,terminal_output +1062,2009050,"TERMINAL",0,0,"4",,terminal_output +1063,2009953,"TERMINAL",0,0,"5",,terminal_output +1064,2011059,"TERMINAL",0,0,"6",,terminal_output +1065,2011990,"TERMINAL",0,0,"7",,terminal_output +1066,2013094,"TERMINAL",0,0,"8",,terminal_output +1067,2014030,"TERMINAL",0,0,"9",,terminal_output +1068,2014318,"TERMINAL",0,0,"watch",,terminal_focus +1069,2015141,"TERMINAL",0,0,"20",,terminal_output +1070,2016166,"TERMINAL",0,0,"1",,terminal_output +1071,2016589,"TERMINAL",0,0,"srun",,terminal_focus +1072,2017114,"TERMINAL",0,0,"2",,terminal_output +1073,2018268,"train_tokenizer.py",0,0,"",python,tab +1074,2018378,"TERMINAL",0,0,"3",,terminal_output +1075,2019138,"TERMINAL",0,0,"4",,terminal_output +1076,2020142,"train_tokenizer.py",7463,0,"",python,selection_mouse +1077,2020172,"TERMINAL",0,0,"5",,terminal_output +1078,2021020,"train_tokenizer.py",7462,0,"",python,selection_command +1079,2021158,"TERMINAL",0,0,"6",,terminal_output +1080,2021653,"train_tokenizer.py",7776,0,"",python,selection_mouse +1081,2021669,"train_tokenizer.py",7775,0,"",python,selection_command +1082,2022153,"train_tokenizer.py",7739,0,"",python,selection_mouse +1083,2022179,"TERMINAL",0,0,"7",,terminal_output +1084,2023082,"train_tokenizer.py",7659,0,"",python,selection_command +1085,2023200,"train_tokenizer.py",7582,0,"",python,selection_command +1086,2023201,"TERMINAL",0,0,"8",,terminal_output +1087,2023748,"train_tokenizer.py",7549,0,"",python,selection_command +1088,2023770,"train_tokenizer.py",7496,0,"",python,selection_command +1089,2023888,"train_tokenizer.py",7462,0,"",python,selection_command +1090,2023888,"train_tokenizer.py",7414,0,"",python,selection_command +1091,2023889,"train_tokenizer.py",7369,0,"",python,selection_command +1092,2023890,"train_tokenizer.py",7336,0,"",python,selection_command +1093,2023915,"train_tokenizer.py",7305,0,"",python,selection_command +1094,2023932,"train_tokenizer.py",7271,0,"",python,selection_command +1095,2024076,"train_tokenizer.py",7250,0,"",python,selection_command +1096,2024216,"train_tokenizer.py",7228,0,"",python,selection_command +1097,2024231,"TERMINAL",0,0,"9",,terminal_output +1098,2024368,"train_tokenizer.py",7071,0,"",python,selection_command +1099,2024741,"train_tokenizer.py",7201,0,"\n print(f""Step {step}, loss: {loss}"")",python,content +1100,2024803,"train_tokenizer.py",7214,0,"",python,selection_command +1101,2025236,"TERMINAL",0,0,"30",,terminal_output +1102,2025269,"train_tokenizer.py",7215,0,"",python,selection_command +1103,2025758,"train_tokenizer.py",7216,0,"",python,selection_command +1104,2025809,"train_tokenizer.py",7217,0,"",python,selection_command +1105,2025823,"train_tokenizer.py",7218,0,"",python,selection_command +1106,2025853,"train_tokenizer.py",7219,0,"",python,selection_command +1107,2025913,"train_tokenizer.py",7220,0,"",python,selection_command +1108,2025914,"train_tokenizer.py",7221,0,"",python,selection_command +1109,2025946,"train_tokenizer.py",7222,0,"",python,selection_command +1110,2025985,"train_tokenizer.py",7223,0,"",python,selection_command +1111,2025999,"train_tokenizer.py",7224,0,"",python,selection_command +1112,2026047,"train_tokenizer.py",7225,0,"",python,selection_command +1113,2026089,"train_tokenizer.py",7226,0,"",python,selection_command +1114,2026114,"train_tokenizer.py",7227,0,"",python,selection_command +1115,2026224,"train_tokenizer.py",7228,0,"",python,selection_command +1116,2026269,"TERMINAL",0,0,"1",,terminal_output +1117,2026427,"train_tokenizer.py",7229,0,"",python,selection_command +1118,2026574,"train_tokenizer.py",7230,0,"",python,selection_command +1119,2026711,"train_tokenizer.py",7231,0,"",python,selection_command +1120,2026846,"train_tokenizer.py",7232,0,"",python,selection_command +1121,2027004,"train_tokenizer.py",7233,0,"",python,selection_command +1122,2027124,"train_tokenizer.py",7234,0,"",python,selection_command +1123,2027300,"TERMINAL",0,0,"2",,terminal_output +1124,2027507,"train_tokenizer.py",7234,1,"",python,content +1125,2027893,"train_tokenizer.py",7234,4,"",python,content +1126,2028157,"train_tokenizer.py",7234,2,"",python,content +1127,2028344,"TERMINAL",0,0,"3",,terminal_output +1128,2028428,"train_tokenizer.py",7235,0,"",python,selection_command +1129,2029055,"train_tokenizer.py",7235,4,"",python,content +1130,2029309,"TERMINAL",0,0,"4",,terminal_output +1131,2029435,"train_tokenizer.py",7235,1,"",python,content +1132,2029684,"train_tokenizer.py",7234,0,"",python,selection_command +1133,2029884,"train_tokenizer.py",7234,1,"",python,content +1134,2030232,"train_tokenizer.py",7233,0,"",python,selection_command +1135,2030376,"TERMINAL",0,0,"5",,terminal_output +1136,2030381,"train_tokenizer.py",7233,1,"",python,content +1137,2031346,"TERMINAL",0,0,"6",,terminal_output +1138,2032380,"TERMINAL",0,0,"7",,terminal_output +1139,2033416,"TERMINAL",0,0,"8",,terminal_output +1140,2034399,"TERMINAL",0,0,"9",,terminal_output +1141,2034886,"TERMINAL",0,0,"sh scripts_horeka/overfit_sample_tiny/tester.sh ",,terminal_output +1142,2035421,"TERMINAL",0,0,"40",,terminal_output +1143,2035680,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +1144,2035812,"TERMINAL",0,0,"SLURM_STEP_NUM_TASKS=1\r\nSLURM_JOB_USER=tum_cte0515\r\nSLURM_TASKS_PER_NODE=1\r\nSLURM_JOB_UID=999226\r\nSLURM_TASK_PID=3337997\r\nSLURM_JOB_GPUS=1\r\nSLURM_LOCALID=0\r\nSLURM_SUBMIT_DIR=/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar\r\nSLURMD_NODENAME=hkn0706\r\nSLURM_JOB_START_TIME=1751388043\r\nSLURM_STEP_NODELIST=hkn0706\r\nSLURM_CLUSTER_NAME=hk\r\nSLURM_JOB_END_TIME=1751391643\r\nSLURM_PMI2_SRUN_PORT=43539\r\nSLURM_CPUS_ON_NODE=6\r\nSLURM_JOB_CPUS_PER_NODE=6\r\nSLURM_GPUS_ON_NODE=1\r\nSLURM_GTIDS=0\r\nSLURM_JOB_PARTITION=accelerated\r\nSLURM_TRES_PER_TASK=cpu=5\r\nSLURM_OOM_KILL_STEP=0\r\nSLURM_JOB_NUM_NODES=1\r\nSLURM_STEPID=4294967290\r\nSLURM_JOBID=3309821\r\nSLURM_PTY_PORT=45555\r\nSLURM_JOB_QOS=normal\r\nSLURM_LAUNCH_NODE_IPADDR=10.0.7.201\r\nSLURM_PTY_WIN_ROW=36\r\nSLURM_PMI2_PROC_MAPPING=(vector,(0,1,1))\r\nSLURMD_DEBUG=2\r\nSLURM_PROCID=0\r\nSLURM_CPUS_PER_TASK=5\r\nSLURM_NTASKS=1\r\nSLURM_TOPOLOGY_ADDR=hkibb.hkibbi1.hkibbi1e8.hkn0706\r\nSLURM_TOPOLOGY_ADDR_PATTERN=switch.switch.switch.node\r\nSLURM_SRUN_COMM_HOST=10.0.7.201\r\nSLURM_SCRIPT_CONTEXT=prolog_task\r\nSLURM_MEM_PER_NODE=51200\r\nSLURM_PTY_WIN_COL=175\r\nSLURM_NODELIST=hkn0706\r\nSLURM_SRUN_COMM_PORT=36793\r\nSLURM_STEP_ID=4294967290\r\nSLURM_JOB_ACCOUNT=hk-project-p0023960\r\nSLURM_PRIO_PROCESS=0\r\nSLURM_NPROCS=1\r\nSLURM_NNODES=1\r\nSLURM_SUBMIT_HOST=hkn1993.localdomain\r\nSLURM_JOB_ID=3309821\r\nSLURM_NODEID=0\r\nSLURM_STEP_NUM_NODES=1\r\nSLURM_STEP_TASKS_PER_NODE=1\r\nSLURM_MPI_TYPE=pmi2\r\nSLURM_PMI2_STEP_NODES=hkn0706\r\nSLURM_CONF=/etc/slurm/slurm.conf\r\nSLURM_JOB_NAME=interactive\r\nSLURM_NTASKS_PER_NODE=1\r\nSLURM_STEP_LAUNCHER_PORT=36793\r\nSLURM_JOB_GID=502226\r\nSLURM_JOB_NODELIST=hkn0706\r\n",,terminal_output +1145,2036442,"TERMINAL",0,0,"1",,terminal_output +1146,2037464,"TERMINAL",0,0,"2",,terminal_output +1147,2038310,"TERMINAL",0,0,"2025-07-01 18:59:43.704838: E external/local_xla/xla/stream_executor/cuda/cuda_fft.cc:467] Unable to register cuFFT factory: Attempting to register factory for plugin cuFFT when one has already been registered\r\nWARNING: All log messages before absl::InitializeLog() is called are written to STDERR\r\nE0000 00:00:1751389183.717400 3348884 cuda_dnn.cc:8579] Unable to register cuDNN factory: Attempting to register factory for plugin cuDNN when one has already been registered\r\nE0000 00:00:1751389183.721746 3348884 cuda_blas.cc:1407] Unable to register cuBLAS factory: Attempting to register factory for plugin cuBLAS when one has already been registered\r\nW0000 00:00:1751389183.734519 3348884 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\nW0000 00:00:1751389183.734536 3348884 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\nW0000 00:00:1751389183.734538 3348884 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\nW0000 00:00:1751389183.734540 3348884 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\n",,terminal_output +1148,2038480,"TERMINAL",0,0,"3",,terminal_output +1149,2039514,"TERMINAL",0,0,"4",,terminal_output +1150,2040519,"TERMINAL",0,0,"5",,terminal_output +1151,2041563,"TERMINAL",0,0,"6",,terminal_output +1152,2042556,"TERMINAL",0,0,"7",,terminal_output +1153,2043609,"TERMINAL",0,0,"8",,terminal_output +1154,2044332,"TERMINAL",0,0,"W0000 00:00:1751389189.706856 3348884 gpu_device.cc:2341] Cannot dlopen some GPU libraries. Please make sure the missing libraries mentioned above are installed properly if you would like to use GPU. Follow the guide at https://www.tensorflow.org/install/gpu for how to download and setup the required libraries for your platform.\r\nSkipping registering GPU devices...\r\n╭─ Unrecognized options ───────────────────────────────────────────────────────────────────────────────────╮\r\n│ Unrecognized options: --model-dim --latent-dim --num-latents --num-blocks --num-heads --codebook-dropout │\r\n│ ──────────────────────────────────────────────────────────────────────────────────────────────────────── │\r\n│ Arguments similar to --latent-dim: │\r\n│ --latent-patch-dim INT │\r\n│ Tokenizer (default: 32) │\r\n│ ──────────────────────────────────────────────────────────────────────────────────────────────────────── │\r\n│ Arguments similar to --num-blocks: │\r\n│ --dyna-num-blocks INT │\r\n│ Dynamics (default: 12) │\r\n│ --lam-num-blocks INT │\r\n│ LAM (default: 8) │\r\n│ --tokenizer-num-blocks INT │\r\n│ Tokenizer (default: 8) │\r\n│ ──────────────────────────────────────────────────────────────────────────────────────────────────────── │\r\n│ Arguments similar to --num-heads: │\r\n│ --dyna-num-heads INT │\r\n│ Dynamics (default: 8) │\r\n│ --lam-num-heads INT │\r\n│ LAM (default: 8) │\r\n│ --tokenizer-num-heads INT │\r\n│ Tokenizer (default: 8) │\r\n│ ──────────────────────────────────────────────────────────────────────────────────────────────────────── │\r\n│ Arguments similar to --num-latents: │\r\n│ --num-patch-latents INT │\r\n│ Tokenizer (default: 1024) │\r\n│ ──────────────────────────────────────────────────────────────────────────────────────────────────────── │\r\n│ For full helptext, run train_dynamics.py --help │\r\n╰──────────────────────────────────────────────────────────────────────────────────────────────────────────╯\r\n",,terminal_output +1155,2044635,"TERMINAL",0,0,"50",,terminal_output +1156,2045157,"TERMINAL",0,0,"]0;tum_cte0515@hkn0706:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0706 jafar]$ ",,terminal_output +1157,2045660,"TERMINAL",0,0,"1",,terminal_output +1158,2046684,"TERMINAL",0,0,"2",,terminal_output +1159,2047708,"TERMINAL",0,0,"3",,terminal_output +1160,2048701,"TERMINAL",0,0,"4",,terminal_output +1161,2049754,"TERMINAL",0,0,"5",,terminal_output +1162,2050796,"TERMINAL",0,0,"6",,terminal_output +1163,2051729,"TERMINAL",0,0,"7",,terminal_output +1164,2052747,"TERMINAL",0,0,"8",,terminal_output +1165,2053762,"TERMINAL",0,0,"9",,terminal_output +1166,2054597,"train_tokenizer.py",0,0,"",python,tab +1167,2054801,"TERMINAL",0,0,"9:00:00",,terminal_output +1168,2055800,"TERMINAL",0,0,"1",,terminal_output +1169,2056119,"scripts_horeka/overfit_sample_tiny/tester.sh",0,0,"",shellscript,tab +1170,2056846,"TERMINAL",0,0,"2",,terminal_output +1171,2057618,"scripts_horeka/overfit_sample_tiny/tester.sh",625,0,"",shellscript,selection_mouse +1172,2057780,"scripts_horeka/overfit_sample_tiny/tester.sh",624,1,"s",shellscript,selection_mouse +1173,2057781,"scripts_horeka/overfit_sample_tiny/tester.sh",623,2,"cs",shellscript,selection_mouse +1174,2057782,"scripts_horeka/overfit_sample_tiny/tester.sh",621,4,"mics",shellscript,selection_mouse +1175,2057833,"scripts_horeka/overfit_sample_tiny/tester.sh",619,6,"namics",shellscript,selection_mouse +1176,2057834,"scripts_horeka/overfit_sample_tiny/tester.sh",617,8,"dynamics",shellscript,selection_mouse +1177,2057835,"scripts_horeka/overfit_sample_tiny/tester.sh",616,9,"_dynamics",shellscript,selection_mouse +1178,2057866,"scripts_horeka/overfit_sample_tiny/tester.sh",615,10,"n_dynamics",shellscript,selection_mouse +1179,2057893,"TERMINAL",0,0,"3",,terminal_output +1180,2058147,"scripts_horeka/overfit_sample_tiny/tester.sh",616,9,"_dynamics",shellscript,selection_mouse +1181,2058213,"scripts_horeka/overfit_sample_tiny/tester.sh",617,8,"dynamics",shellscript,selection_mouse +1182,2058879,"TERMINAL",0,0,"4",,terminal_output +1183,2058897,"scripts_horeka/overfit_sample_tiny/tester.sh",617,8,"t",shellscript,content +1184,2058899,"scripts_horeka/overfit_sample_tiny/tester.sh",618,0,"",shellscript,selection_keyboard +1185,2058963,"scripts_horeka/overfit_sample_tiny/tester.sh",618,0,"o",shellscript,content +1186,2058964,"scripts_horeka/overfit_sample_tiny/tester.sh",619,0,"",shellscript,selection_keyboard +1187,2059086,"scripts_horeka/overfit_sample_tiny/tester.sh",619,0,"k",shellscript,content +1188,2059087,"scripts_horeka/overfit_sample_tiny/tester.sh",620,0,"",shellscript,selection_keyboard +1189,2059444,"scripts_horeka/overfit_sample_tiny/tester.sh",620,0,"e",shellscript,content +1190,2059445,"scripts_horeka/overfit_sample_tiny/tester.sh",621,0,"",shellscript,selection_keyboard +1191,2059510,"scripts_horeka/overfit_sample_tiny/tester.sh",621,0,"n",shellscript,content +1192,2059511,"scripts_horeka/overfit_sample_tiny/tester.sh",622,0,"",shellscript,selection_keyboard +1193,2059729,"scripts_horeka/overfit_sample_tiny/tester.sh",622,0,"z",shellscript,content +1194,2059729,"scripts_horeka/overfit_sample_tiny/tester.sh",623,0,"",shellscript,selection_keyboard +1195,2059865,"TERMINAL",0,0,"5",,terminal_output +1196,2060074,"scripts_horeka/overfit_sample_tiny/tester.sh",622,1,"",shellscript,content +1197,2060269,"scripts_horeka/overfit_sample_tiny/tester.sh",622,0,"i",shellscript,content +1198,2060270,"scripts_horeka/overfit_sample_tiny/tester.sh",623,0,"",shellscript,selection_keyboard +1199,2060453,"scripts_horeka/overfit_sample_tiny/tester.sh",623,0,"z",shellscript,content +1200,2060454,"scripts_horeka/overfit_sample_tiny/tester.sh",624,0,"",shellscript,selection_keyboard +1201,2060549,"scripts_horeka/overfit_sample_tiny/tester.sh",624,0,"e",shellscript,content +1202,2060550,"scripts_horeka/overfit_sample_tiny/tester.sh",625,0,"",shellscript,selection_keyboard +1203,2060613,"scripts_horeka/overfit_sample_tiny/tester.sh",625,0,"r",shellscript,content +1204,2060614,"scripts_horeka/overfit_sample_tiny/tester.sh",626,0,"",shellscript,selection_keyboard +1205,2060893,"TERMINAL",0,0,"6",,terminal_output +1206,2061917,"TERMINAL",0,0,"7",,terminal_output +1207,2062938,"TERMINAL",0,0,"8",,terminal_output +1208,2063004,"TERMINAL",0,0,"sh scripts_horeka/overfit_sample_tiny/tester.sh ",,terminal_output +1209,2063947,"TERMINAL",0,0,"9",,terminal_output +1210,2064599,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +1211,2064729,"TERMINAL",0,0,"SLURM_STEP_NUM_TASKS=1\r\nSLURM_JOB_USER=tum_cte0515\r\nSLURM_TASKS_PER_NODE=1\r\nSLURM_JOB_UID=999226\r\nSLURM_TASK_PID=3337997\r\nSLURM_JOB_GPUS=1\r\nSLURM_LOCALID=0\r\nSLURM_SUBMIT_DIR=/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar\r\nSLURMD_NODENAME=hkn0706\r\nSLURM_JOB_START_TIME=1751388043\r\nSLURM_STEP_NODELIST=hkn0706\r\nSLURM_CLUSTER_NAME=hk\r\nSLURM_JOB_END_TIME=1751391643\r\nSLURM_PMI2_SRUN_PORT=43539\r\nSLURM_CPUS_ON_NODE=6\r\nSLURM_JOB_CPUS_PER_NODE=6\r\nSLURM_GPUS_ON_NODE=1\r\nSLURM_GTIDS=0\r\nSLURM_JOB_PARTITION=accelerated\r\nSLURM_TRES_PER_TASK=cpu=5\r\nSLURM_OOM_KILL_STEP=0\r\nSLURM_JOB_NUM_NODES=1\r\nSLURM_STEPID=4294967290\r\nSLURM_JOBID=3309821\r\nSLURM_PTY_PORT=45555\r\nSLURM_JOB_QOS=normal\r\nSLURM_LAUNCH_NODE_IPADDR=10.0.7.201\r\nSLURM_PTY_WIN_ROW=36\r\nSLURM_PMI2_PROC_MAPPING=(vector,(0,1,1))\r\nSLURMD_DEBUG=2\r\nSLURM_PROCID=0\r\nSLURM_CPUS_PER_TASK=5\r\nSLURM_NTASKS=1\r\nSLURM_TOPOLOGY_ADDR=hkibb.hkibbi1.hkibbi1e8.hkn0706\r\nSLURM_TOPOLOGY_ADDR_PATTERN=switch.switch.switch.node\r\nSLURM_SRUN_COMM_HOST=10.0.7.201\r\nSLURM_SCRIPT_CONTEXT=prolog_task\r\nSLURM_MEM_PER_NODE=51200\r\nSLURM_PTY_WIN_COL=175\r\nSLURM_NODELIST=hkn0706\r\nSLURM_SRUN_COMM_PORT=36793\r\nSLURM_STEP_ID=4294967290\r\nSLURM_JOB_ACCOUNT=hk-project-p0023960\r\nSLURM_PRIO_PROCESS=0\r\nSLURM_NPROCS=1\r\nSLURM_NNODES=1\r\nSLURM_SUBMIT_HOST=hkn1993.localdomain\r\nSLURM_JOB_ID=3309821\r\nSLURM_NODEID=0\r\nSLURM_STEP_NUM_NODES=1\r\nSLURM_STEP_TASKS_PER_NODE=1\r\nSLURM_MPI_TYPE=pmi2\r\nSLURM_PMI2_STEP_NODES=hkn0706\r\nSLURM_CONF=/etc/slurm/slurm.conf\r\nSLURM_JOB_NAME=interactive\r\nSLURM_NTASKS_PER_NODE=1\r\nSLURM_STEP_LAUNCHER_PORT=36793\r\nSLURM_JOB_GID=502226\r\nSLURM_JOB_NODELIST=hkn0706\r\n",,terminal_output +1212,2065005,"TERMINAL",0,0,"10",,terminal_output +1213,2066034,"TERMINAL",0,0,"1",,terminal_output +1214,2066260,"scripts_horeka/overfit_sample_tiny/tester.sh",0,0,"",shellscript,tab +1215,2066694,"TERMINAL",0,0,"2025-07-01 19:00:12.100206: E external/local_xla/xla/stream_executor/cuda/cuda_fft.cc:467] Unable to register cuFFT factory: Attempting to register factory for plugin cuFFT when one has already been registered\r\nWARNING: All log messages before absl::InitializeLog() is called are written to STDERR\r\nE0000 00:00:1751389212.113380 3349085 cuda_dnn.cc:8579] Unable to register cuDNN factory: Attempting to register factory for plugin cuDNN when one has already been registered\r\nE0000 00:00:1751389212.117560 3349085 cuda_blas.cc:1407] Unable to register cuBLAS factory: Attempting to register factory for plugin cuBLAS when one has already been registered\r\nW0000 00:00:1751389212.129937 3349085 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\nW0000 00:00:1751389212.129955 3349085 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\nW0000 00:00:1751389212.129958 3349085 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\nW0000 00:00:1751389212.129959 3349085 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\n",,terminal_output +1216,2067003,"TERMINAL",0,0,"2",,terminal_output +1217,2068084,"TERMINAL",0,0,"3",,terminal_output +1218,2068956,"TERMINAL",0,0,"W0000 00:00:1751389214.372791 3349085 gpu_device.cc:2341] Cannot dlopen some GPU libraries. Please make sure the missing libraries mentioned above are installed properly if you would like to use GPU. Follow the guide at https://www.tensorflow.org/install/gpu for how to download and setup the required libraries for your platform.\r\nSkipping registering GPU devices...\r\n╭─ Parsing error ──────────────────────────────────╮\r\n│ Unrecognized arguments: │\r\n│ ──────────────────────────────────────────────── │\r\n│ For full helptext, run train_tokenizer.py --help │\r\n╰──────────────────────────────────────────────────╯\r\n",,terminal_output +1219,2069070,"TERMINAL",0,0,"4",,terminal_output +1220,2069838,"TERMINAL",0,0,"]0;tum_cte0515@hkn0706:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0706 jafar]$ ",,terminal_output +1221,2070133,"TERMINAL",0,0,"5",,terminal_output +1222,2071158,"TERMINAL",0,0,"6",,terminal_output +1223,2072102,"TERMINAL",0,0,"7",,terminal_output +1224,2073115,"TERMINAL",0,0,"8",,terminal_output +1225,2073259,"scripts_horeka/overfit_sample_tiny/tester.sh",0,0,"",shellscript,tab +1226,2074131,"TERMINAL",0,0,"9",,terminal_output +1227,2075251,"TERMINAL",0,0,"20",,terminal_output +1228,2076189,"TERMINAL",0,0,"1",,terminal_output +1229,2077195,"TERMINAL",0,0,"2",,terminal_output +1230,2077803,"scripts_horeka/overfit_sample_tiny/train_tokenizer_overfit_sample.sbatch",0,0,"",shellscript,tab +1231,2078213,"TERMINAL",0,0,"3",,terminal_output +1232,2078718,"scripts_horeka/overfit_sample_tiny/train_tokenizer_overfit_sample.sbatch",1443,0,"",shellscript,selection_mouse +1233,2078733,"scripts_horeka/overfit_sample_tiny/train_tokenizer_overfit_sample.sbatch",1442,0,"",shellscript,selection_command +1234,2079070,"scripts_horeka/overfit_sample_tiny/train_tokenizer_overfit_sample.sbatch",1442,1," ",shellscript,selection_mouse +1235,2079084,"scripts_horeka/overfit_sample_tiny/train_tokenizer_overfit_sample.sbatch",1443,0,"",shellscript,selection_command +1236,2079331,"scripts_horeka/overfit_sample_tiny/train_tokenizer_overfit_sample.sbatch",1413,30,"\n --codebook_dropout 0.0 \ ",shellscript,selection_mouse +1237,2079332,"scripts_horeka/overfit_sample_tiny/train_tokenizer_overfit_sample.sbatch",1393,50,"\n --num_heads 8 \\n --codebook_dropout 0.0 \ ",shellscript,selection_mouse +1238,2079332,"scripts_horeka/overfit_sample_tiny/train_tokenizer_overfit_sample.sbatch",1351,92,"\n --patch_size 4 \\n --num_blocks 8 \\n --num_heads 8 \\n --codebook_dropout 0.0 \ ",shellscript,selection_mouse +1239,2079332,"scripts_horeka/overfit_sample_tiny/train_tokenizer_overfit_sample.sbatch",1266,177,"tf_records_dir \\n --model_dim 384 \\n --latent_dim 32 \\n --num_latents 1024 \\n --patch_size 4 \\n --num_blocks 8 \\n --num_heads 8 \\n --codebook_dropout 0.0 \ ",shellscript,selection_mouse +1240,2079333,"scripts_horeka/overfit_sample_tiny/train_tokenizer_overfit_sample.sbatch",1086,357,"log_checkpoint_interval=500 \\n --name=tokenizer-tiny-overfit-$slurm_job_id \\n --tags tokenizer overfit tiny \\n --entity instant-uv \\n --project jafar \\n --data_dir $tf_records_dir \\n --model_dim 384 \\n --latent_dim 32 \\n --num_latents 1024 \\n --patch_size 4 \\n --num_blocks 8 \\n --num_heads 8 \\n --codebook_dropout 0.0 \ ",shellscript,selection_mouse +1241,2079333,"scripts_horeka/overfit_sample_tiny/train_tokenizer_overfit_sample.sbatch",1079,364,"\n --log_checkpoint_interval=500 \\n --name=tokenizer-tiny-overfit-$slurm_job_id \\n --tags tokenizer overfit tiny \\n --entity instant-uv \\n --project jafar \\n --data_dir $tf_records_dir \\n --model_dim 384 \\n --latent_dim 32 \\n --num_latents 1024 \\n --patch_size 4 \\n --num_blocks 8 \\n --num_heads 8 \\n --codebook_dropout 0.0 \ ",shellscript,selection_mouse +1242,2079333,"scripts_horeka/overfit_sample_tiny/train_tokenizer_overfit_sample.sbatch",1043,400,"log_image_interval=100 \\n --log \\n --log_checkpoint_interval=500 \\n --name=tokenizer-tiny-overfit-$slurm_job_id \\n --tags tokenizer overfit tiny \\n --entity instant-uv \\n --project jafar \\n --data_dir $tf_records_dir \\n --model_dim 384 \\n --latent_dim 32 \\n --num_latents 1024 \\n --patch_size 4 \\n --num_blocks 8 \\n --num_heads 8 \\n --codebook_dropout 0.0 \ ",shellscript,selection_mouse +1243,2079348,"TERMINAL",0,0,"4",,terminal_output +1244,2079370,"scripts_horeka/overfit_sample_tiny/train_tokenizer_overfit_sample.sbatch",1023,420,"max_lr=1e-4 \\n --log_image_interval=100 \\n --log \\n --log_checkpoint_interval=500 \\n --name=tokenizer-tiny-overfit-$slurm_job_id \\n --tags tokenizer overfit tiny \\n --entity instant-uv \\n --project jafar \\n --data_dir $tf_records_dir \\n --model_dim 384 \\n --latent_dim 32 \\n --num_latents 1024 \\n --patch_size 4 \\n --num_blocks 8 \\n --num_heads 8 \\n --codebook_dropout 0.0 \ ",shellscript,selection_mouse +1245,2079426,"scripts_horeka/overfit_sample_tiny/train_tokenizer_overfit_sample.sbatch",1003,440,"min_lr=1e-4 \\n --max_lr=1e-4 \\n --log_image_interval=100 \\n --log \\n --log_checkpoint_interval=500 \\n --name=tokenizer-tiny-overfit-$slurm_job_id \\n --tags tokenizer overfit tiny \\n --entity instant-uv \\n --project jafar \\n --data_dir $tf_records_dir \\n --model_dim 384 \\n --latent_dim 32 \\n --num_latents 1024 \\n --patch_size 4 \\n --num_blocks 8 \\n --num_heads 8 \\n --codebook_dropout 0.0 \ ",shellscript,selection_mouse +1246,2079438,"scripts_horeka/overfit_sample_tiny/train_tokenizer_overfit_sample.sbatch",982,461,"batch_size=1 \\n --min_lr=1e-4 \\n --max_lr=1e-4 \\n --log_image_interval=100 \\n --log \\n --log_checkpoint_interval=500 \\n --name=tokenizer-tiny-overfit-$slurm_job_id \\n --tags tokenizer overfit tiny \\n --entity instant-uv \\n --project jafar \\n --data_dir $tf_records_dir \\n --model_dim 384 \\n --latent_dim 32 \\n --num_latents 1024 \\n --patch_size 4 \\n --num_blocks 8 \\n --num_heads 8 \\n --codebook_dropout 0.0 \ ",shellscript,selection_mouse +1247,2079509,"scripts_horeka/overfit_sample_tiny/train_tokenizer_overfit_sample.sbatch",949,494,"ckpt_dir $CHECKPOINT_DIR \\n --batch_size=1 \\n --min_lr=1e-4 \\n --max_lr=1e-4 \\n --log_image_interval=100 \\n --log \\n --log_checkpoint_interval=500 \\n --name=tokenizer-tiny-overfit-$slurm_job_id \\n --tags tokenizer overfit tiny \\n --entity instant-uv \\n --project jafar \\n --data_dir $tf_records_dir \\n --model_dim 384 \\n --latent_dim 32 \\n --num_latents 1024 \\n --patch_size 4 \\n --num_blocks 8 \\n --num_heads 8 \\n --codebook_dropout 0.0 \ ",shellscript,selection_mouse +1248,2079527,"scripts_horeka/overfit_sample_tiny/train_tokenizer_overfit_sample.sbatch",915,528,"python train_tokenizer.py \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=1 \\n --min_lr=1e-4 \\n --max_lr=1e-4 \\n --log_image_interval=100 \\n --log \\n --log_checkpoint_interval=500 \\n --name=tokenizer-tiny-overfit-$slurm_job_id \\n --tags tokenizer overfit tiny \\n --entity instant-uv \\n --project jafar \\n --data_dir $tf_records_dir \\n --model_dim 384 \\n --latent_dim 32 \\n --num_latents 1024 \\n --patch_size 4 \\n --num_blocks 8 \\n --num_heads 8 \\n --codebook_dropout 0.0 \ ",shellscript,selection_mouse +1249,2079562,"scripts_horeka/overfit_sample_tiny/train_tokenizer_overfit_sample.sbatch",914,529," python train_tokenizer.py \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=1 \\n --min_lr=1e-4 \\n --max_lr=1e-4 \\n --log_image_interval=100 \\n --log \\n --log_checkpoint_interval=500 \\n --name=tokenizer-tiny-overfit-$slurm_job_id \\n --tags tokenizer overfit tiny \\n --entity instant-uv \\n --project jafar \\n --data_dir $tf_records_dir \\n --model_dim 384 \\n --latent_dim 32 \\n --num_latents 1024 \\n --patch_size 4 \\n --num_blocks 8 \\n --num_heads 8 \\n --codebook_dropout 0.0 \ ",shellscript,selection_mouse +1250,2079586,"scripts_horeka/overfit_sample_tiny/train_tokenizer_overfit_sample.sbatch",910,533,"srun python train_tokenizer.py \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=1 \\n --min_lr=1e-4 \\n --max_lr=1e-4 \\n --log_image_interval=100 \\n --log \\n --log_checkpoint_interval=500 \\n --name=tokenizer-tiny-overfit-$slurm_job_id \\n --tags tokenizer overfit tiny \\n --entity instant-uv \\n --project jafar \\n --data_dir $tf_records_dir \\n --model_dim 384 \\n --latent_dim 32 \\n --num_latents 1024 \\n --patch_size 4 \\n --num_blocks 8 \\n --num_heads 8 \\n --codebook_dropout 0.0 \ ",shellscript,selection_mouse +1251,2079756,"scripts_horeka/overfit_sample_tiny/train_tokenizer_overfit_sample.sbatch",914,529," python train_tokenizer.py \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=1 \\n --min_lr=1e-4 \\n --max_lr=1e-4 \\n --log_image_interval=100 \\n --log \\n --log_checkpoint_interval=500 \\n --name=tokenizer-tiny-overfit-$slurm_job_id \\n --tags tokenizer overfit tiny \\n --entity instant-uv \\n --project jafar \\n --data_dir $tf_records_dir \\n --model_dim 384 \\n --latent_dim 32 \\n --num_latents 1024 \\n --patch_size 4 \\n --num_blocks 8 \\n --num_heads 8 \\n --codebook_dropout 0.0 \ ",shellscript,selection_mouse +1252,2079783,"scripts_horeka/overfit_sample_tiny/train_tokenizer_overfit_sample.sbatch",915,528,"python train_tokenizer.py \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=1 \\n --min_lr=1e-4 \\n --max_lr=1e-4 \\n --log_image_interval=100 \\n --log \\n --log_checkpoint_interval=500 \\n --name=tokenizer-tiny-overfit-$slurm_job_id \\n --tags tokenizer overfit tiny \\n --entity instant-uv \\n --project jafar \\n --data_dir $tf_records_dir \\n --model_dim 384 \\n --latent_dim 32 \\n --num_latents 1024 \\n --patch_size 4 \\n --num_blocks 8 \\n --num_heads 8 \\n --codebook_dropout 0.0 \ ",shellscript,selection_mouse +1253,2080269,"TERMINAL",0,0,"5",,terminal_output +1254,2081350,"TERMINAL",0,0,"6",,terminal_output +1255,2082374,"TERMINAL",0,0,"7",,terminal_output +1256,2083342,"TERMINAL",0,0,"8",,terminal_output +1257,2084100,"scripts_horeka/overfit_sample_tiny/tester.sh",0,0,"",shellscript,tab +1258,2084360,"TERMINAL",0,0,"9",,terminal_output +1259,2084823,"scripts_horeka/overfit_sample_tiny/tester.sh",1133,0,"",shellscript,selection_mouse +1260,2084957,"scripts_horeka/overfit_sample_tiny/tester.sh",1132,1,"\n",shellscript,selection_mouse +1261,2085004,"scripts_horeka/overfit_sample_tiny/tester.sh",1102,31,"\n --codebook_dropout 0.0 \ \n",shellscript,selection_mouse +1262,2085072,"scripts_horeka/overfit_sample_tiny/tester.sh",1080,53," \\n --num_heads 8 \\n --codebook_dropout 0.0 \ \n",shellscript,selection_mouse +1263,2085073,"scripts_horeka/overfit_sample_tiny/tester.sh",1077,56,"s 8 \\n --num_heads 8 \\n --codebook_dropout 0.0 \ \n",shellscript,selection_mouse +1264,2085073,"scripts_horeka/overfit_sample_tiny/tester.sh",1055,78,"ze 4 \\n --num_blocks 8 \\n --num_heads 8 \\n --codebook_dropout 0.0 \ \n",shellscript,selection_mouse +1265,2085073,"scripts_horeka/overfit_sample_tiny/tester.sh",1054,79,"ize 4 \\n --num_blocks 8 \\n --num_heads 8 \\n --codebook_dropout 0.0 \ \n",shellscript,selection_mouse +1266,2085180,"scripts_horeka/overfit_sample_tiny/tester.sh",1029,104,"ents 1024 \\n --patch_size 4 \\n --num_blocks 8 \\n --num_heads 8 \\n --codebook_dropout 0.0 \ \n",shellscript,selection_mouse +1267,2085220,"scripts_horeka/overfit_sample_tiny/tester.sh",1007,126,"dim 32 \\n --num_latents 1024 \\n --patch_size 4 \\n --num_blocks 8 \\n --num_heads 8 \\n --codebook_dropout 0.0 \ \n",shellscript,selection_mouse +1268,2085276,"scripts_horeka/overfit_sample_tiny/tester.sh",1006,127,"_dim 32 \\n --num_latents 1024 \\n --patch_size 4 \\n --num_blocks 8 \\n --num_heads 8 \\n --codebook_dropout 0.0 \ \n",shellscript,selection_mouse +1269,2085292,"scripts_horeka/overfit_sample_tiny/tester.sh",1005,128,"t_dim 32 \\n --num_latents 1024 \\n --patch_size 4 \\n --num_blocks 8 \\n --num_heads 8 \\n --codebook_dropout 0.0 \ \n",shellscript,selection_mouse +1270,2085293,"scripts_horeka/overfit_sample_tiny/tester.sh",1003,130,"ent_dim 32 \\n --num_latents 1024 \\n --patch_size 4 \\n --num_blocks 8 \\n --num_heads 8 \\n --codebook_dropout 0.0 \ \n",shellscript,selection_mouse +1271,2085293,"scripts_horeka/overfit_sample_tiny/tester.sh",1002,131,"tent_dim 32 \\n --num_latents 1024 \\n --patch_size 4 \\n --num_blocks 8 \\n --num_heads 8 \\n --codebook_dropout 0.0 \ \n",shellscript,selection_mouse +1272,2085350,"scripts_horeka/overfit_sample_tiny/tester.sh",1000,133,"latent_dim 32 \\n --num_latents 1024 \\n --patch_size 4 \\n --num_blocks 8 \\n --num_heads 8 \\n --codebook_dropout 0.0 \ \n",shellscript,selection_mouse +1273,2085350,"scripts_horeka/overfit_sample_tiny/tester.sh",999,134,"-latent_dim 32 \\n --num_latents 1024 \\n --patch_size 4 \\n --num_blocks 8 \\n --num_heads 8 \\n --codebook_dropout 0.0 \ \n",shellscript,selection_mouse +1274,2085350,"scripts_horeka/overfit_sample_tiny/tester.sh",998,135,"--latent_dim 32 \\n --num_latents 1024 \\n --patch_size 4 \\n --num_blocks 8 \\n --num_heads 8 \\n --codebook_dropout 0.0 \ \n",shellscript,selection_mouse +1275,2085411,"scripts_horeka/overfit_sample_tiny/tester.sh",997,136," --latent_dim 32 \\n --num_latents 1024 \\n --patch_size 4 \\n --num_blocks 8 \\n --num_heads 8 \\n --codebook_dropout 0.0 \ \n",shellscript,selection_mouse +1276,2085421,"TERMINAL",0,0,"30",,terminal_output +1277,2085573,"scripts_horeka/overfit_sample_tiny/tester.sh",996,137," --latent_dim 32 \\n --num_latents 1024 \\n --patch_size 4 \\n --num_blocks 8 \\n --num_heads 8 \\n --codebook_dropout 0.0 \ \n",shellscript,selection_mouse +1278,2085574,"scripts_horeka/overfit_sample_tiny/tester.sh",973,160," --model_dim 384 \\n --latent_dim 32 \\n --num_latents 1024 \\n --patch_size 4 \\n --num_blocks 8 \\n --num_heads 8 \\n --codebook_dropout 0.0 \ \n",shellscript,selection_mouse +1279,2085574,"scripts_horeka/overfit_sample_tiny/tester.sh",972,161," --model_dim 384 \\n --latent_dim 32 \\n --num_latents 1024 \\n --patch_size 4 \\n --num_blocks 8 \\n --num_heads 8 \\n --codebook_dropout 0.0 \ \n",shellscript,selection_mouse +1280,2086414,"TERMINAL",0,0,"1",,terminal_output +1281,2087437,"TERMINAL",0,0,"2",,terminal_output +1282,2088396,"TERMINAL",0,0,"3",,terminal_output +1283,2088975,"scripts_horeka/overfit_sample_tiny/tester.sh",972,161,"",shellscript,content +1284,2089415,"TERMINAL",0,0,"4",,terminal_output +1285,2089744,"scripts_horeka/overfit_sample_tiny/tester.sh",971,1,"",shellscript,content +1286,2090435,"TERMINAL",0,0,"5",,terminal_output +1287,2091570,"TERMINAL",0,0,"6",,terminal_output +1288,2092470,"TERMINAL",0,0,"7",,terminal_output +1289,2092766,"TERMINAL",0,0,"sh scripts_horeka/overfit_sample_tiny/tester.sh ",,terminal_output +1290,2093489,"TERMINAL",0,0,"8",,terminal_output +1291,2094513,"TERMINAL",0,0,"9",,terminal_output +1292,2095560,"TERMINAL",0,0,"40",,terminal_output +1293,2096552,"TERMINAL",0,0,"1",,terminal_output +1294,2097575,"TERMINAL",0,0,"2",,terminal_output +1295,2098598,"TERMINAL",0,0,"3",,terminal_output +1296,2098853,"scripts_horeka/overfit_sample_tiny/tester.sh",0,0,"",shellscript,tab +1297,2099597,"TERMINAL",0,0,"5",,terminal_output +1298,2100621,"TERMINAL",0,0,"6",,terminal_output +1299,2101632,"TERMINAL",0,0,"7",,terminal_output +1300,2102695,"TERMINAL",0,0,"8",,terminal_output +1301,2103673,"TERMINAL",0,0,"9",,terminal_output +1302,2104689,"TERMINAL",0,0,"50",,terminal_output +1303,2105375,"scripts_horeka/overfit_sample_tiny/tester.sh",960,0,"",shellscript,selection_mouse +1304,2105503,"scripts_horeka/overfit_sample_tiny/tester.sh",955,14,"tf_records_dir",shellscript,selection_mouse +1305,2105711,"TERMINAL",0,0,"1",,terminal_output +1306,2106809,"TERMINAL",0,0,"2",,terminal_output +1307,2107188,"scripts_horeka/overfit_sample_tiny/tester.sh",157,0,"",shellscript,selection_mouse +1308,2107364,"scripts_horeka/overfit_sample_tiny/tester.sh",156,1,"s",shellscript,selection_mouse +1309,2107365,"scripts_horeka/overfit_sample_tiny/tester.sh",107,50,"\ntf_records_dir=$ws_dir/knoms_tfrecords_500_shards",shellscript,selection_mouse +1310,2107519,"scripts_horeka/overfit_sample_tiny/tester.sh",137,20,"tfrecords_500_shards",shellscript,selection_mouse +1311,2107520,"scripts_horeka/overfit_sample_tiny/tester.sh",135,22,"s_tfrecords_500_shards",shellscript,selection_mouse +1312,2107520,"scripts_horeka/overfit_sample_tiny/tester.sh",134,23,"ms_tfrecords_500_shards",shellscript,selection_mouse +1313,2107584,"scripts_horeka/overfit_sample_tiny/tester.sh",133,24,"oms_tfrecords_500_shards",shellscript,selection_mouse +1314,2107677,"scripts_horeka/overfit_sample_tiny/tester.sh",132,25,"noms_tfrecords_500_shards",shellscript,selection_mouse +1315,2107760,"TERMINAL",0,0,"3",,terminal_output +1316,2107962,"scripts_horeka/overfit_sample_tiny/tester.sh",131,26,"knoms_tfrecords_500_shards",shellscript,selection_mouse +1317,2108060,"scripts_horeka/overfit_sample_tiny/tester.sh",130,27,"/knoms_tfrecords_500_shards",shellscript,selection_mouse +1318,2108139,"scripts_horeka/overfit_sample_tiny/tester.sh",131,26,"knoms_tfrecords_500_shards",shellscript,selection_mouse +1319,2108819,"TERMINAL",0,0,"4",,terminal_output +1320,2108929,"TERMINAL",0,0,"watch",,terminal_focus +1321,2109436,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn1993:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/overfit_dir/single_sample]633;D;0",,terminal_output +1322,2112028,"TERMINAL",0,0,"cd ..",,terminal_command +1323,2112341,"TERMINAL",0,0,"ls",,terminal_command +1324,2112356,"TERMINAL",0,0,"]633;E;2025-07-01 19:00:57 ls;ea2a9ac7-e5a0-488a-9623-9429487e0dac]633;Csingle_batch_12_elems.npy single_batch_3_elems.npy single_batch_6_elems.npy single_sample single_sample_axe.npy single_sample_corner.npy\r\n]0;tum_cte0515@hkn1993:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/overfit_dir]633;D;0",,terminal_output +1325,2113658,"TERMINAL",0,0,"cd ..",,terminal_command +1326,2114167,"TERMINAL",0,0,"ls",,terminal_command +1327,2114212,"TERMINAL",0,0,"]633;E;2025-07-01 19:00:59 ls;ea2a9ac7-e5a0-488a-9623-9429487e0dac]633;Ccheckpoints knoms_mp4 knoms_tfrecords_2_shards_overfit open_ai_minecraft_first_try overfit_dir\r\ncoinrun knoms_mp4_clips knoms_tfrecords_500_shards open_ai_minecraft_first_try_npy procgen_env_16_episodes_20000\r\ndata_knoms knoms_npy knoms_tfrecords_500_shards_overfit_1 open_ai_minecraft_first_try_tfrecord\r\ndummy knoms_tfrecords knoms_tfrecords_500_shards_overfit_10 open_ai_minecraft_npy\r\nknoms_arrayrecords_500_shards knoms_tfrecords_200_shards open_ai_minecraft open_ai_minecraft_tfrecord\r\n]0;tum_cte0515@hkn1993:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data]633;D;0",,terminal_output +1328,2124981,"TERMINAL",0,0,"cd open_ai_minecraft_tfrecord/",,terminal_command +1329,2125764,"TERMINAL",0,0,"pwd",,terminal_command +1330,2125769,"TERMINAL",0,0,"]633;E;2025-07-01 19:01:11 pwd;ea2a9ac7-e5a0-488a-9623-9429487e0dac]633;C/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/open_ai_minecraft_tfrecord\r\n]0;tum_cte0515@hkn1993:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/open_ai_minecraft_tfrecord]633;D;0",,terminal_output +1331,2129002,"scripts_horeka/overfit_sample_tiny/tester.sh",157,0,"",shellscript,selection_mouse +1332,2129147,"scripts_horeka/overfit_sample_tiny/tester.sh",157,48,"\nws_dir='/hkfs/work/workspace/scratch/tum_ind369",shellscript,selection_mouse +1333,2129197,"scripts_horeka/overfit_sample_tiny/tester.sh",157,43,"\nws_dir='/hkfs/work/workspace/scratch/tum_i",shellscript,selection_mouse +1334,2129197,"scripts_horeka/overfit_sample_tiny/tester.sh",157,38,"\nws_dir='/hkfs/work/workspace/scratch/",shellscript,selection_mouse +1335,2129198,"scripts_horeka/overfit_sample_tiny/tester.sh",157,32,"\nws_dir='/hkfs/work/workspace/sc",shellscript,selection_mouse +1336,2129246,"scripts_horeka/overfit_sample_tiny/tester.sh",157,29,"\nws_dir='/hkfs/work/workspace",shellscript,selection_mouse +1337,2129247,"scripts_horeka/overfit_sample_tiny/tester.sh",157,27,"\nws_dir='/hkfs/work/workspa",shellscript,selection_mouse +1338,2129248,"scripts_horeka/overfit_sample_tiny/tester.sh",157,26,"\nws_dir='/hkfs/work/worksp",shellscript,selection_mouse +1339,2129265,"scripts_horeka/overfit_sample_tiny/tester.sh",157,25,"\nws_dir='/hkfs/work/works",shellscript,selection_mouse +1340,2129351,"scripts_horeka/overfit_sample_tiny/tester.sh",157,24,"\nws_dir='/hkfs/work/work",shellscript,selection_mouse +1341,2129446,"scripts_horeka/overfit_sample_tiny/tester.sh",131,26,"knoms_tfrecords_500_shards",shellscript,selection_mouse +1342,2129508,"scripts_horeka/overfit_sample_tiny/tester.sh",129,28,"r/knoms_tfrecords_500_shards",shellscript,selection_mouse +1343,2129530,"scripts_horeka/overfit_sample_tiny/tester.sh",128,29,"ir/knoms_tfrecords_500_shards",shellscript,selection_mouse +1344,2129530,"scripts_horeka/overfit_sample_tiny/tester.sh",127,30,"dir/knoms_tfrecords_500_shards",shellscript,selection_mouse +1345,2129550,"scripts_horeka/overfit_sample_tiny/tester.sh",126,31,"_dir/knoms_tfrecords_500_shards",shellscript,selection_mouse +1346,2129610,"scripts_horeka/overfit_sample_tiny/tester.sh",125,32,"s_dir/knoms_tfrecords_500_shards",shellscript,selection_mouse +1347,2129683,"scripts_horeka/overfit_sample_tiny/tester.sh",124,33,"ws_dir/knoms_tfrecords_500_shards",shellscript,selection_mouse +1348,2129922,"scripts_horeka/overfit_sample_tiny/tester.sh",123,34,"$ws_dir/knoms_tfrecords_500_shards",shellscript,selection_mouse +1349,2130399,"scripts_horeka/overfit_sample_tiny/tester.sh",123,34,"",shellscript,content +1350,2130650,"scripts_horeka/overfit_sample_tiny/tester.sh",123,0,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/open_ai_minecraft_tfrecord",shellscript,content +1351,2132851,"scripts_horeka/overfit_sample_tiny/tester.sh",214,0,"",shellscript,selection_mouse +1352,2133028,"scripts_horeka/overfit_sample_tiny/tester.sh",211,6,"ws_dir",shellscript,selection_mouse +1353,2135719,"scripts_horeka/overfit_sample_tiny/tester.sh",210,0,"",shellscript,selection_mouse +1354,2136890,"scripts_horeka/overfit_sample_tiny/tester.sh",123,87,"",shellscript,content +1355,2137638,"scripts_horeka/overfit_sample_tiny/tester.sh",123,0,"$ws_dir/knoms_tfrecords_500_shards",shellscript,content +1356,2139141,"scripts_horeka/overfit_sample_tiny/tester.sh",123,34,"",shellscript,content +1357,2139357,"scripts_horeka/overfit_sample_tiny/tester.sh",123,0,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/open_ai_minecraft_tfrecord",shellscript,content +1358,2140849,"TERMINAL",0,0,"srun",,terminal_focus +1359,2141884,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +1360,2142057,"TERMINAL",0,0,"SLURM_STEP_NUM_TASKS=1\r\nSLURM_JOB_USER=tum_cte0515\r\nSLURM_TASKS_PER_NODE=1\r\nSLURM_JOB_UID=999226\r\nSLURM_TASK_PID=3337997\r\nSLURM_JOB_GPUS=1\r\nSLURM_LOCALID=0\r\nSLURM_SUBMIT_DIR=/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar\r\nSLURMD_NODENAME=hkn0706\r\nSLURM_JOB_START_TIME=1751388043\r\nSLURM_STEP_NODELIST=hkn0706\r\nSLURM_CLUSTER_NAME=hk\r\nSLURM_JOB_END_TIME=1751391643\r\nSLURM_PMI2_SRUN_PORT=43539\r\nSLURM_CPUS_ON_NODE=6\r\nSLURM_JOB_CPUS_PER_NODE=6\r\nSLURM_GPUS_ON_NODE=1\r\nSLURM_GTIDS=0\r\nSLURM_JOB_PARTITION=accelerated\r\nSLURM_TRES_PER_TASK=cpu=5\r\nSLURM_OOM_KILL_STEP=0\r\nSLURM_JOB_NUM_NODES=1\r\nSLURM_STEPID=4294967290\r\nSLURM_JOBID=3309821\r\nSLURM_PTY_PORT=45555\r\nSLURM_JOB_QOS=normal\r\nSLURM_LAUNCH_NODE_IPADDR=10.0.7.201\r\nSLURM_PTY_WIN_ROW=36\r\nSLURM_PMI2_PROC_MAPPING=(vector,(0,1,1))\r\nSLURMD_DEBUG=2\r\nSLURM_PROCID=0\r\nSLURM_CPUS_PER_TASK=5\r\nSLURM_NTASKS=1\r\nSLURM_TOPOLOGY_ADDR=hkibb.hkibbi1.hkibbi1e8.hkn0706\r\nSLURM_TOPOLOGY_ADDR_PATTERN=switch.switch.switch.node\r\nSLURM_SRUN_COMM_HOST=10.0.7.201\r\nSLURM_SCRIPT_CONTEXT=prolog_task\r\nSLURM_MEM_PER_NODE=51200\r\nSLURM_PTY_WIN_COL=175\r\nSLURM_NODELIST=hkn0706\r\nSLURM_SRUN_COMM_PORT=36793\r\nSLURM_STEP_ID=4294967290\r\nSLURM_JOB_ACCOUNT=hk-project-p0023960\r\nSLURM_PRIO_PROCESS=0\r\nSLURM_NPROCS=1\r\nSLURM_NNODES=1\r\nSLURM_SUBMIT_HOST=hkn1993.localdomain\r\nSLURM_JOB_ID=3309821\r\nSLURM_NODEID=0\r\nSLURM_STEP_NUM_NODES=1\r\nSLURM_STEP_TASKS_PER_NODE=1\r\nSLURM_MPI_TYPE=pmi2\r\nSLURM_PMI2_STEP_NODES=hkn0706\r\nSLURM_CONF=/etc/slurm/slurm.conf\r\nSLURM_JOB_NAME=interactive\r\nSLURM_NTASKS_PER_NODE=1\r\nSLURM_STEP_LAUNCHER_PORT=36793\r\nSLURM_JOB_GID=502226\r\nSLURM_JOB_NODELIST=hkn0706\r\n",,terminal_output +1361,2144038,"TERMINAL",0,0,"2025-07-01 19:01:29.417813: E external/local_xla/xla/stream_executor/cuda/cuda_fft.cc:467] Unable to register cuFFT factory: Attempting to register factory for plugin cuFFT when one has already been registered\r\nWARNING: All log messages before absl::InitializeLog() is called are written to STDERR\r\nE0000 00:00:1751389289.430856 3349433 cuda_dnn.cc:8579] Unable to register cuDNN factory: Attempting to register factory for plugin cuDNN when one has already been registered\r\nE0000 00:00:1751389289.434897 3349433 cuda_blas.cc:1407] Unable to register cuBLAS factory: Attempting to register factory for plugin cuBLAS when one has already been registered\r\nW0000 00:00:1751389289.446772 3349433 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\nW0000 00:00:1751389289.446788 3349433 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\nW0000 00:00:1751389289.446790 3349433 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\nW0000 00:00:1751389289.446792 3349433 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\n",,terminal_output +1362,2146278,"TERMINAL",0,0,"W0000 00:00:1751389291.716505 3349433 gpu_device.cc:2341] Cannot dlopen some GPU libraries. Please make sure the missing libraries mentioned above are installed properly if you would like to use GPU. Follow the guide at https://www.tensorflow.org/install/gpu for how to download and setup the required libraries for your platform.\r\nSkipping registering GPU devices...\r\n",,terminal_output +1363,2146833,"TERMINAL",0,0,"Running on 1 devices.\r\n",,terminal_output +1364,2147755,"TERMINAL",0,0,"wandb: Currently logged in as: mihir-mahajan2002 (instant-uv) to https://api.wandb.ai. Use `wandb login --relogin` to force relogin\r\n",,terminal_output +1365,2148417,"TERMINAL",0,0,"wandb: Tracking run with wandb version 0.19.11\r\nwandb: Run data is saved locally in /hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/wandb/run-20250701_190133-9khqhr8o\r\nwandb: Run `wandb offline` to turn off syncing.\r\nwandb: Syncing run tokenizer-tiny-overfit-0000\r\nwandb: ⭐️ View project at https://wandb.ai/instant-uv/jafar\r\nwandb: 🚀 View run at https://wandb.ai/instant-uv/jafar/runs/9khqhr8o\r\n",,terminal_output +1366,2150992,"TERMINAL",0,0,"2025-07-01 19:01:36.426544: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +1367,2155439,"scripts_horeka/overfit_sample_tiny/tester.sh",0,0,"",shellscript,tab +1368,2156925,"scripts_horeka/overfit_sample_tiny/tester.sh",736,0,"",shellscript,selection_mouse +1369,2164653,"TERMINAL",0,0,"2025-07-01 19:01:50.003625: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +1370,2167024,"TERMINAL",0,0,"2025-07-01 19:01:52.450326: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +1371,2170978,"TERMINAL",0,0,"Starting training from step 0...\r\n",,terminal_output +1372,2171415,"TERMINAL",0,0,"Step 0\r\nStep 0\r\nStep 0\r\n",,terminal_output +1373,2171480,"TERMINAL",0,0,"Step 0\r\n",,terminal_output +1374,2171543,"TERMINAL",0,0,"Step 0\r\nStep 0\r\n",,terminal_output +1375,2171661,"TERMINAL",0,0,"Step 0\r\nStep 0\r\nStep 0\r\n",,terminal_output +1376,2171727,"TERMINAL",0,0,"Step 0\r\nStep 0\r\nStep 0\r\n",,terminal_output +1377,2171832,"TERMINAL",0,0,"Step 0\r\nStep 0\r\nStep 0\r\nStep 0\r\n",,terminal_output +1378,2171928,"TERMINAL",0,0,"Step 0\r\nStep 0\r\nStep 0\r\nStep 0\r\n",,terminal_output +1379,2172055,"TERMINAL",0,0,"Step 0\r\nStep 0\r\nStep 0\r\nStep 0\r\n",,terminal_output +1380,2172152,"TERMINAL",0,0,"Step 0\r\nStep 0\r\n",,terminal_output +1381,2172252,"TERMINAL",0,0,"Step 0\r\nStep 0\r\nStep 0\r\nStep 0\r\nStep 0\r\nStep 0\r\nStep 0\r\n",,terminal_output +1382,2172361,"TERMINAL",0,0,"Step 0\r\nStep 0\r\n",,terminal_output +1383,2172550,"TERMINAL",0,0,"Step 0\r\nStep 0\r\nStep 0\r\nStep 0\r\nStep 0\r\nStep 0\r\n",,terminal_output +1384,2172674,"TERMINAL",0,0,"Step 0\r\nStep 0\r\n",,terminal_output +1385,2172806,"TERMINAL",0,0,"Step 0\r\nStep 0\r\nStep 0\r\n",,terminal_output +1386,2172873,"TERMINAL",0,0,"Step 0\r\nStep 0\r\nStep 0\r\nStep 0\r\nStep 0\r\n",,terminal_output +1387,2172927,"TERMINAL",0,0,"Step 0\r\n",,terminal_output +1388,2173004,"TERMINAL",0,0,"Step 0\r\nStep 0\r\nStep 0\r\n",,terminal_output +1389,2173138,"TERMINAL",0,0,"Step 0\r\nStep 0\r\nStep 0\r\n",,terminal_output +1390,2173236,"TERMINAL",0,0,"Step 0\r\nStep 0\r\nStep 0\r\nStep 0\r\n",,terminal_output +1391,2173379,"TERMINAL",0,0,"Step 0\r\nStep 0\r\nStep 0\r\nStep 0\r\n",,terminal_output +1392,2173513,"TERMINAL",0,0,"Step 0\r\nStep 0\r\nStep 0\r\nStep 0\r\nStep 0\r\nStep 0\r\nStep 0\r\n",,terminal_output +1393,2173574,"TERMINAL",0,0,"Step 0\r\n",,terminal_output +1394,2173693,"TERMINAL",0,0,"Step 0\r\nStep 0\r\nStep 0\r\nStep 0\r\nStep 0\r\n",,terminal_output +1395,2173758,"TERMINAL",0,0,"Step 0\r\n",,terminal_output +1396,2173873,"TERMINAL",0,0,"Step 0\r\nStep 0\r\nStep 0\r\nStep 0\r\nStep 0\r\n",,terminal_output +1397,2173993,"TERMINAL",0,0,"Step 0\r\nStep 0\r\nStep 0\r\nStep 0\r\n",,terminal_output +1398,2174089,"TERMINAL",0,0,"Step 0\r\nStep 0\r\n",,terminal_output +1399,2174252,"TERMINAL",0,0,"Step 0\r\nStep 0\r\nStep 0\r\nStep 0\r\nStep 0\r\n",,terminal_output +1400,2174339,"TERMINAL",0,0,"Step 0\r\nStep 0\r\n^C",,terminal_output +1401,2174400,"TERMINAL",0,0,"Traceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_tokenizer.py"", line 215, in \r\n for videos in dataloader:\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tensorflow/python/data/ops/dataset_ops.py"", line 4788, in __next__\r\n return nest.map_structure(to_numpy, next(self._iterator))\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tensorflow/python/data/ops/iterator_ops.py"", line 826, in __next__\r\n return self._next_internal()\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tensorflow/python/data/ops/iterator_ops.py"", line 776, in _next_internal\r\n ret = gen_dataset_ops.iterator_get_next(\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tensorflow/python/ops/gen_dataset_ops.py"", line 3081, in iterator_get_next\r\n _result = pywrap_tfe.TFE_Py_FastPathExecute(\r\nKeyboardInterrupt\r\n",,terminal_output +1402,2174580,"TERMINAL",0,0,"^CException ignored in atexit callback: .teardown_atexit at 0x15439a5c7e20>\r\nTraceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/wandb/sdk/lib/service_connection.py"", line 94, in teardown_atexit\r\n conn.teardown(hooks.exit_code)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/wandb/sdk/lib/service_connection.py"", line 226, in teardown\r\n self._router.join()\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/wandb/sdk/interface/router.py"", line 75, in join\r\n self._thread.join()\r\n File ""/home/hk-project-p0023960/tum_cte0515/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/threading.py"", line 1096, in join\r\n self._wait_for_tstate_lock()\r\n File ""/home/hk-project-p0023960/tum_cte0515/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/threading.py"", line 1116, in _wait_for_tstate_lock\r\n if lock.acquire(block, timeout):\r\nKeyboardInterrupt: \r\n",,terminal_output +1403,2174744,"TERMINAL",0,0,"^CException ignored in: .remove at 0x1543f1936680>\r\nTraceback (most recent call last):\r\n File ""/home/hk-project-p0023960/tum_cte0515/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/weakref.py"", line 370, in remove\r\n",,terminal_output +1404,2174861,"TERMINAL",0,0," def remove(k, selfref=ref(self)):\r\nKeyboardInterrupt: \r\n",,terminal_output +1405,2174915,"TERMINAL",0,0,"^CException ignored in: .remove at 0x1543f1936680>\r\nTraceback (most recent call last):\r\n File ""/home/hk-project-p0023960/tum_cte0515/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/weakref.py"", line 370, in remove\r\n def remove(k, selfref=ref(self)):\r\nKeyboardInterrupt: \r\n",,terminal_output +1406,2175141,"TERMINAL",0,0,"^C",,terminal_output +1407,2175514,"TERMINAL",0,0,"\r\n]0;tum_cte0515@hkn0706:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0706 jafar]$ ",,terminal_output +1408,2175616,"TERMINAL",0,0,"^C[?2004l\r[?2004h[?2004l\r\r\n]0;tum_cte0515@hkn0706:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0706 jafar]$ ",,terminal_output +1409,2176120,"TERMINAL",0,0,"watch",,terminal_focus +1410,2177931,"train_tokenizer.py",0,0,"",python,tab +1411,2179330,"train_tokenizer.py",7263,0,"",python,selection_mouse +1412,2179346,"train_tokenizer.py",7262,0,"",python,selection_command +1413,2179807,"train_tokenizer.py",7306,0,"",python,selection_mouse +1414,2179822,"train_tokenizer.py",7305,0,"",python,selection_command +1415,2180464,"train_tokenizer.py",7285,0,"",python,selection_mouse +1416,2180478,"train_tokenizer.py",7284,0,"",python,selection_command +1417,2182023,"train_tokenizer.py",7285,0,"\n ",python,content +1418,2182826,"train_tokenizer.py",7298,0,"s",python,content +1419,2182827,"train_tokenizer.py",7299,0,"",python,selection_keyboard +1420,2182906,"train_tokenizer.py",7299,0,"t",python,content +1421,2182908,"train_tokenizer.py",7300,0,"",python,selection_keyboard +1422,2183028,"train_tokenizer.py",7300,0,"e",python,content +1423,2183030,"train_tokenizer.py",7301,0,"",python,selection_keyboard +1424,2185536,"train_tokenizer.py",7300,1,"",python,content +1425,2185847,"train_tokenizer.py",7298,2,"",python,content +1426,2185953,"train_tokenizer.py",7286,12,"",python,content +1427,2186933,"train_tokenizer.py",7285,1,"",python,content +1428,2189105,"train_tokenizer.py",7285,0,"\n ",python,content +1429,2189764,"train_tokenizer.py",7298,0,"s",python,content +1430,2189765,"train_tokenizer.py",7299,0,"",python,selection_keyboard +1431,2189905,"train_tokenizer.py",7299,0,"t",python,content +1432,2189907,"train_tokenizer.py",7300,0,"",python,selection_keyboard +1433,2189992,"train_tokenizer.py",7300,0,"e",python,content +1434,2189994,"train_tokenizer.py",7301,0,"",python,selection_keyboard +1435,2190890,"train_tokenizer.py",7301,0,"p",python,content +1436,2190892,"train_tokenizer.py",7302,0,"",python,selection_keyboard +1437,2191068,"train_tokenizer.py",7302,0," ",python,content +1438,2191070,"train_tokenizer.py",7303,0,"",python,selection_keyboard +1439,2191431,"train_tokenizer.py",7303,0,"+",python,content +1440,2191432,"train_tokenizer.py",7304,0,"",python,selection_keyboard +1441,2192153,"train_tokenizer.py",7304,0,"=",python,content +1442,2192154,"train_tokenizer.py",7305,0,"",python,selection_keyboard +1443,2192672,"train_tokenizer.py",7305,0,"1",python,content +1444,2192673,"train_tokenizer.py",7306,0,"",python,selection_keyboard +1445,2193270,"train_tokenizer.py",7305,0,"",python,selection_command +1446,2193431,"train_tokenizer.py",7305,0," ",python,content +1447,2193432,"train_tokenizer.py",7306,0,"",python,selection_keyboard +1448,2195892,"TERMINAL",0,0,"srun",,terminal_focus +1449,2196848,"TERMINAL",0,0,"sh scripts_horeka/overfit_sample_tiny/tester.sh ",,terminal_output +1450,2197088,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +1451,2197227,"TERMINAL",0,0,"SLURM_STEP_NUM_TASKS=1\r\nSLURM_JOB_USER=tum_cte0515\r\nSLURM_TASKS_PER_NODE=1\r\nSLURM_JOB_UID=999226\r\nSLURM_TASK_PID=3337997\r\nSLURM_JOB_GPUS=1\r\nSLURM_LOCALID=0\r\nSLURM_SUBMIT_DIR=/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar\r\nSLURMD_NODENAME=hkn0706\r\nSLURM_JOB_START_TIME=1751388043\r\nSLURM_STEP_NODELIST=hkn0706\r\nSLURM_CLUSTER_NAME=hk\r\nSLURM_JOB_END_TIME=1751391643\r\nSLURM_PMI2_SRUN_PORT=43539\r\nSLURM_CPUS_ON_NODE=6\r\nSLURM_JOB_CPUS_PER_NODE=6\r\nSLURM_GPUS_ON_NODE=1\r\nSLURM_GTIDS=0\r\nSLURM_JOB_PARTITION=accelerated\r\nSLURM_TRES_PER_TASK=cpu=5\r\nSLURM_OOM_KILL_STEP=0\r\nSLURM_JOB_NUM_NODES=1\r\nSLURM_STEPID=4294967290\r\nSLURM_JOBID=3309821\r\nSLURM_PTY_PORT=45555\r\nSLURM_JOB_QOS=normal\r\nSLURM_LAUNCH_NODE_IPADDR=10.0.7.201\r\nSLURM_PTY_WIN_ROW=36\r\nSLURM_PMI2_PROC_MAPPING=(vector,(0,1,1))\r\nSLURMD_DEBUG=2\r\nSLURM_PROCID=0\r\nSLURM_CPUS_PER_TASK=5\r\nSLURM_NTASKS=1\r\nSLURM_TOPOLOGY_ADDR=hkibb.hkibbi1.hkibbi1e8.hkn0706\r\nSLURM_TOPOLOGY_ADDR_PATTERN=switch.switch.switch.node\r\nSLURM_SRUN_COMM_HOST=10.0.7.201\r\nSLURM_SCRIPT_CONTEXT=prolog_task\r\nSLURM_MEM_PER_NODE=51200\r\nSLURM_PTY_WIN_COL=175\r\nSLURM_NODELIST=hkn0706\r\nSLURM_SRUN_COMM_PORT=36793\r\nSLURM_STEP_ID=4294967290\r\nSLURM_JOB_ACCOUNT=hk-project-p0023960\r\nSLURM_PRIO_PROCESS=0\r\nSLURM_NPROCS=1\r\nSLURM_NNODES=1\r\nSLURM_SUBMIT_HOST=hkn1993.localdomain\r\nSLURM_JOB_ID=3309821\r\nSLURM_NODEID=0\r\nSLURM_STEP_NUM_NODES=1\r\nSLURM_STEP_TASKS_PER_NODE=1\r\nSLURM_MPI_TYPE=pmi2\r\nSLURM_PMI2_STEP_NODES=hkn0706\r\nSLURM_CONF=/etc/slurm/slurm.conf\r\nSLURM_JOB_NAME=interactive\r\nSLURM_NTASKS_PER_NODE=1\r\nSLURM_STEP_LAUNCHER_PORT=36793\r\nSLURM_JOB_GID=502226\r\nSLURM_JOB_NODELIST=hkn0706\r\n",,terminal_output +1452,2199328,"TERMINAL",0,0,"2025-07-01 19:02:24.718039: E external/local_xla/xla/stream_executor/cuda/cuda_fft.cc:467] Unable to register cuFFT factory: Attempting to register factory for plugin cuFFT when one has already been registered\r\nWARNING: All log messages before absl::InitializeLog() is called are written to STDERR\r\nE0000 00:00:1751389344.730972 3350207 cuda_dnn.cc:8579] Unable to register cuDNN factory: Attempting to register factory for plugin cuDNN when one has already been registered\r\nE0000 00:00:1751389344.735106 3350207 cuda_blas.cc:1407] Unable to register cuBLAS factory: Attempting to register factory for plugin cuBLAS when one has already been registered\r\nW0000 00:00:1751389344.747342 3350207 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\nW0000 00:00:1751389344.747369 3350207 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\nW0000 00:00:1751389344.747371 3350207 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\nW0000 00:00:1751389344.747373 3350207 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\n",,terminal_output +1453,2201848,"TERMINAL",0,0,"W0000 00:00:1751389347.192909 3350207 gpu_device.cc:2341] Cannot dlopen some GPU libraries. Please make sure the missing libraries mentioned above are installed properly if you would like to use GPU. Follow the guide at https://www.tensorflow.org/install/gpu for how to download and setup the required libraries for your platform.\r\nSkipping registering GPU devices...\r\n",,terminal_output +1454,2202141,"TERMINAL",0,0,"Running on 1 devices.\r\n",,terminal_output +1455,2202439,"sample.py",0,0,"",python,tab +1456,2202850,"TERMINAL",0,0,"wandb: Currently logged in as: mihir-mahajan2002 (instant-uv) to https://api.wandb.ai. Use `wandb login --relogin` to force relogin\r\n",,terminal_output +1457,2203459,"TERMINAL",0,0,"wandb: Tracking run with wandb version 0.19.11\r\nwandb: Run data is saved locally in /hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/wandb/run-20250701_190228-l27ici2v\r\nwandb: Run `wandb offline` to turn off syncing.\r\nwandb: Syncing run tokenizer-tiny-overfit-0000\r\nwandb: ⭐️ View project at https://wandb.ai/instant-uv/jafar\r\nwandb: 🚀 View run at https://wandb.ai/instant-uv/jafar/runs/l27ici2v\r\n",,terminal_output +1458,2204097,"train_tokenizer.py",0,0,"",python,tab +1459,2204928,"TERMINAL",0,0,"2025-07-01 19:02:30.271609: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +1460,2218523,"TERMINAL",0,0,"2025-07-01 19:02:43.936782: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +1461,2220924,"TERMINAL",0,0,"2025-07-01 19:02:46.352978: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +1462,2224757,"TERMINAL",0,0,"Starting training from step 0...\r\n",,terminal_output +1463,2224989,"TERMINAL",0,0,"Step 0\r\n",,terminal_output +1464,2225103,"TERMINAL",0,0,"Step 0\r\nStep 0\r\n",,terminal_output +1465,2225167,"TERMINAL",0,0,"Step 0\r\n",,terminal_output +1466,2225282,"TERMINAL",0,0,"Step 0\r\nStep 0\r\nStep 0\r\n",,terminal_output +1467,2225424,"TERMINAL",0,0,"Step 0\r\nStep 0\r\nStep 0\r\nStep 0\r\nStep 0\r\n",,terminal_output +1468,2225512,"TERMINAL",0,0,"Step 0\r\nStep 0\r\nStep 0\r\nStep 0\r\n",,terminal_output +1469,2225624,"TERMINAL",0,0,"Step 0\r\nStep 0\r\nStep 0\r\nStep 0\r\n",,terminal_output +1470,2225766,"TERMINAL",0,0,"Step 0\r\nStep 0\r\nStep 0\r\nStep 0\r\n",,terminal_output +1471,2225972,"TERMINAL",0,0,"Step 0\r\nStep 0\r\nStep 0\r\nStep 0\r\nStep 0\r\nStep 0\r\nStep 0\r\nStep 0\r\nStep 0\r\n",,terminal_output +1472,2226065,"TERMINAL",0,0,"Step 0\r\nStep 0\r\n",,terminal_output +1473,2226175,"TERMINAL",0,0,"Step 0\r\nStep 0\r\nStep 0\r\nStep 0\r\n",,terminal_output +1474,2226239,"TERMINAL",0,0,"Step 0\r\nStep 0\r\n",,terminal_output +1475,2226370,"TERMINAL",0,0,"Step 0\r\nStep 0\r\nStep 0\r\n",,terminal_output +1476,2226434,"TERMINAL",0,0,"Step 0\r\n",,terminal_output +1477,2226526,"TERMINAL",0,0,"Step 0\r\nStep 0\r\nStep 0\r\nStep 0\r\nStep 0\r\nStep 0\r\n",,terminal_output +1478,2226590,"TERMINAL",0,0,"Step 0\r\nStep 0\r\n",,terminal_output +1479,2226655,"TERMINAL",0,0,"Step 0\r\nStep 0\r\n",,terminal_output +1480,2226764,"TERMINAL",0,0,"Step 0\r\nStep 0\r\nStep 0\r\n",,terminal_output +1481,2226872,"TERMINAL",0,0,"Step 0\r\nStep 0\r\nStep 0\r\nStep 0\r\n",,terminal_output +1482,2227037,"TERMINAL",0,0,"Step 0\r\nStep 0\r\nStep 0\r\nStep 0\r\nStep 0\r\nStep 0\r\nStep 0\r\nStep 0\r\n",,terminal_output +1483,2227090,"TERMINAL",0,0,"Step 0\r\n",,terminal_output +1484,2227155,"TERMINAL",0,0,"Step 0\r\nStep 0\r\nStep 0\r\n",,terminal_output +1485,2227288,"TERMINAL",0,0,"Step 0\r\nStep 0\r\nStep 0\r\nStep 0\r\n",,terminal_output +1486,2227353,"TERMINAL",0,0,"Step 0\r\nStep 0\r\n",,terminal_output +1487,2227416,"TERMINAL",0,0,"Step 0\r\n",,terminal_output +1488,2227487,"TERMINAL",0,0,"Step 0\r\nStep 0\r\nStep 0\r\nStep 0\r\n",,terminal_output +1489,2227568,"TERMINAL",0,0,"^CTraceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_tokenizer.py"", line 222, in \r\n np.save(f""/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/overfit_dir/single_sample/train_tokenizer_sample_{step}.npy"", np.array(videos))\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/numpy/lib/npyio.py"", line 546, in save\r\n format.write_array(fid, arr, allow_pickle=allow_pickle,\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/numpy/lib/format.py"", line 730, in write_array\r\n array.tofile(fp)\r\nKeyboardInterrupt\r\n",,terminal_output +1490,2227832,"TERMINAL",0,0,"^C",,terminal_output +1491,2227956,"TERMINAL",0,0,"^C",,terminal_output +1492,2228087,"TERMINAL",0,0,"Exception ignored in atexit callback: .teardown_atexit at 0x14b5c69cbe20>\r\nTraceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/wandb/sdk/lib/service_connection.py"", line 94, in teardown_atexit\r\n conn.teardown(hooks.exit_code)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/wandb/sdk/lib/service_connection.py"", line 236, in teardown\r\n return self._proc.join()\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/wandb/sdk/service/service.py"", line 251, in join\r\n ret = self._internal_proc.wait()\r\n File ""/home/hk-project-p0023960/tum_cte0515/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/subprocess.py"", line 1222, in wait\r\n self._wait(timeout=sigint_timeout)\r\n File ""/home/hk-project-p0023960/tum_cte0515/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/subprocess.py"", line 1953, in _wait\r\n time.sleep(delay)\r\nKeyboardInterrupt: \r\n",,terminal_output +1493,2228432,"TERMINAL",0,0,"^C",,terminal_output +1494,2228846,"TERMINAL",0,0,"^C\r\n]0;tum_cte0515@hkn0706:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0706 jafar]$ ",,terminal_output +1495,2229696,"TERMINAL",0,0,"sh scripts_horeka/overfit_sample_tiny/tester.sh ",,terminal_output +1496,2231754,"train_tokenizer.py",0,0,"",python,tab +1497,2235399,"train_tokenizer.py",7307,0,"",python,selection_mouse +1498,2236321,"train_tokenizer.py",7297,0,"",python,selection_mouse +1499,2236544,"train_tokenizer.py",7286,12," ",python,selection_mouse +1500,2236838,"train_tokenizer.py",7301,0,"",python,selection_mouse +1501,2236995,"train_tokenizer.py",7298,4,"step",python,selection_mouse +1502,2244989,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +1503,2245115,"TERMINAL",0,0,"SLURM_STEP_NUM_TASKS=1\r\nSLURM_JOB_USER=tum_cte0515\r\nSLURM_TASKS_PER_NODE=1\r\nSLURM_JOB_UID=999226\r\nSLURM_TASK_PID=3337997\r\nSLURM_JOB_GPUS=1\r\nSLURM_LOCALID=0\r\nSLURM_SUBMIT_DIR=/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar\r\nSLURMD_NODENAME=hkn0706\r\nSLURM_JOB_START_TIME=1751388043\r\nSLURM_STEP_NODELIST=hkn0706\r\nSLURM_CLUSTER_NAME=hk\r\nSLURM_JOB_END_TIME=1751391643\r\nSLURM_PMI2_SRUN_PORT=43539\r\nSLURM_CPUS_ON_NODE=6\r\nSLURM_JOB_CPUS_PER_NODE=6\r\nSLURM_GPUS_ON_NODE=1\r\nSLURM_GTIDS=0\r\nSLURM_JOB_PARTITION=accelerated\r\nSLURM_TRES_PER_TASK=cpu=5\r\nSLURM_OOM_KILL_STEP=0\r\nSLURM_JOB_NUM_NODES=1\r\nSLURM_STEPID=4294967290\r\nSLURM_JOBID=3309821\r\nSLURM_PTY_PORT=45555\r\nSLURM_JOB_QOS=normal\r\nSLURM_LAUNCH_NODE_IPADDR=10.0.7.201\r\nSLURM_PTY_WIN_ROW=36\r\nSLURM_PMI2_PROC_MAPPING=(vector,(0,1,1))\r\nSLURMD_DEBUG=2\r\nSLURM_PROCID=0\r\nSLURM_CPUS_PER_TASK=5\r\nSLURM_NTASKS=1\r\nSLURM_TOPOLOGY_ADDR=hkibb.hkibbi1.hkibbi1e8.hkn0706\r\nSLURM_TOPOLOGY_ADDR_PATTERN=switch.switch.switch.node\r\nSLURM_SRUN_COMM_HOST=10.0.7.201\r\nSLURM_SCRIPT_CONTEXT=prolog_task\r\nSLURM_MEM_PER_NODE=51200\r\nSLURM_PTY_WIN_COL=175\r\nSLURM_NODELIST=hkn0706\r\nSLURM_SRUN_COMM_PORT=36793\r\nSLURM_STEP_ID=4294967290\r\nSLURM_JOB_ACCOUNT=hk-project-p0023960\r\nSLURM_PRIO_PROCESS=0\r\nSLURM_NPROCS=1\r\nSLURM_NNODES=1\r\nSLURM_SUBMIT_HOST=hkn1993.localdomain\r\nSLURM_JOB_ID=3309821\r\nSLURM_NODEID=0\r\nSLURM_STEP_NUM_NODES=1\r\nSLURM_STEP_TASKS_PER_NODE=1\r\nSLURM_MPI_TYPE=pmi2\r\nSLURM_PMI2_STEP_NODES=hkn0706\r\nSLURM_CONF=/etc/slurm/slurm.conf\r\nSLURM_JOB_NAME=interactive\r\nSLURM_NTASKS_PER_NODE=1\r\nSLURM_STEP_LAUNCHER_PORT=36793\r\nSLURM_JOB_GID=502226\r\nSLURM_JOB_NODELIST=hkn0706\r\n",,terminal_output +1504,2247308,"TERMINAL",0,0,"2025-07-01 19:03:12.688321: E external/local_xla/xla/stream_executor/cuda/cuda_fft.cc:467] Unable to register cuFFT factory: Attempting to register factory for plugin cuFFT when one has already been registered\r\nWARNING: All log messages before absl::InitializeLog() is called are written to STDERR\r\nE0000 00:00:1751389392.701445 3350970 cuda_dnn.cc:8579] Unable to register cuDNN factory: Attempting to register factory for plugin cuDNN when one has already been registered\r\nE0000 00:00:1751389392.705507 3350970 cuda_blas.cc:1407] Unable to register cuBLAS factory: Attempting to register factory for plugin cuBLAS when one has already been registered\r\nW0000 00:00:1751389392.717417 3350970 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\nW0000 00:00:1751389392.717434 3350970 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\nW0000 00:00:1751389392.717437 3350970 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\nW0000 00:00:1751389392.717438 3350970 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\n",,terminal_output +1505,2249805,"TERMINAL",0,0,"W0000 00:00:1751389395.240943 3350970 gpu_device.cc:2341] Cannot dlopen some GPU libraries. Please make sure the missing libraries mentioned above are installed properly if you would like to use GPU. Follow the guide at https://www.tensorflow.org/install/gpu for how to download and setup the required libraries for your platform.\r\nSkipping registering GPU devices...\r\n",,terminal_output +1506,2250155,"TERMINAL",0,0,"Running on 1 devices.\r\n",,terminal_output +1507,2250980,"TERMINAL",0,0,"wandb: Currently logged in as: mihir-mahajan2002 (instant-uv) to https://api.wandb.ai. Use `wandb login --relogin` to force relogin\r\n",,terminal_output +1508,2251590,"TERMINAL",0,0,"wandb: Tracking run with wandb version 0.19.11\r\nwandb: Run data is saved locally in /hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/wandb/run-20250701_190316-gi2p6n6x\r\nwandb: Run `wandb offline` to turn off syncing.\r\nwandb: Syncing run tokenizer-tiny-overfit-0000\r\nwandb: ⭐️ View project at https://wandb.ai/instant-uv/jafar\r\nwandb: 🚀 View run at https://wandb.ai/instant-uv/jafar/runs/gi2p6n6x\r\n",,terminal_output +1509,2252880,"TERMINAL",0,0,"2025-07-01 19:03:18.300210: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +1510,2266336,"TERMINAL",0,0,"2025-07-01 19:03:31.774963: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +1511,2268699,"TERMINAL",0,0,"2025-07-01 19:03:34.108966: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +1512,2272580,"TERMINAL",0,0,"Starting training from step 0...\r\n",,terminal_output +1513,2272810,"TERMINAL",0,0,"Step 0\r\n",,terminal_output +1514,2272895,"TERMINAL",0,0,"Step 0\r\nStep 0\r\n",,terminal_output +1515,2273039,"TERMINAL",0,0,"Step 0\r\nStep 0\r\nStep 0\r\n",,terminal_output +1516,2273147,"TERMINAL",0,0,"Step 0\r\nStep 0\r\n",,terminal_output +1517,2273218,"TERMINAL",0,0,"Step 0\r\nStep 0\r\nStep 0\r\nStep 0\r\n",,terminal_output +1518,2273280,"TERMINAL",0,0,"Step 0\r\nStep 0\r\n",,terminal_output +1519,2273332,"TERMINAL",0,0,"Step 0\r\nStep 0\r\n",,terminal_output +1520,2273440,"TERMINAL",0,0,"Step 0\r\nStep 0\r\nStep 0\r\nStep 0\r\n",,terminal_output +1521,2273515,"TERMINAL",0,0,"Step 0\r\nStep 0\r\n",,terminal_output +1522,2273612,"TERMINAL",0,0,"Step 0\r\nStep 0\r\n",,terminal_output +1523,2273679,"TERMINAL",0,0,"Step 0\r\nStep 0\r\n",,terminal_output +1524,2273761,"TERMINAL",0,0,"Step 0\r\nStep 0\r\nStep 0\r\nStep 0\r\nStep 0\r\nStep 0\r\nStep 0\r\n",,terminal_output +1525,2273876,"TERMINAL",0,0,"Step 0\r\nStep 0\r\n",,terminal_output +1526,2273987,"TERMINAL",0,0,"Step 0\r\nStep 0\r\nStep 0\r\nStep 0\r\n",,terminal_output +1527,2274050,"TERMINAL",0,0,"Step 0\r\nStep 0\r\n",,terminal_output +1528,2274182,"TERMINAL",0,0,"Step 0\r\nStep 0\r\nStep 0\r\n",,terminal_output +1529,2274292,"TERMINAL",0,0,"Step 0\r\nStep 0\r\nStep 0\r\nStep 0\r\nStep 0\r\nStep 0\r\n",,terminal_output +1530,2274357,"TERMINAL",0,0,"Step 0\r\nStep 0\r\n",,terminal_output +1531,2274421,"TERMINAL",0,0,"Step 0\r\n",,terminal_output +1532,2274486,"TERMINAL",0,0,"Step 0\r\nStep 0\r\nStep 0\r\n",,terminal_output +1533,2274595,"TERMINAL",0,0,"Step 0\r\nStep 0\r\n",,terminal_output +1534,2274706,"TERMINAL",0,0,"Step 0\r\nStep 0\r\n",,terminal_output +1535,2274760,"TERMINAL",0,0,"Step 0\r\nStep 0\r\n",,terminal_output +1536,2274919,"TERMINAL",0,0,"Step 0\r\nStep 0\r\nStep 0\r\nStep 0\r\nStep 0\r\nStep 0\r\nStep 0\r\nStep 0\r\nStep 0\r\n",,terminal_output +1537,2275032,"TERMINAL",0,0,"Step 0\r\nStep 0\r\nStep 0\r\n",,terminal_output +1538,2275157,"TERMINAL",0,0,"Step 0\r\nStep 0\r\nStep 0\r\nStep 0\r\nStep 0\r\n",,terminal_output +1539,2275244,"TERMINAL",0,0,"Step 0\r\nStep 0\r\n",,terminal_output +1540,2275314,"TERMINAL",0,0,"Step 0\r\nStep 0\r\nStep 0\r\nStep 0\r\n",,terminal_output +1541,2275408,"TERMINAL",0,0,"Step 0\r\nStep 0\r\nStep 0\r\nStep 0\r\n",,terminal_output +1542,2275517,"TERMINAL",0,0,"Step 0\r\nStep 0\r\n",,terminal_output +1543,2275613,"TERMINAL",0,0,"Step 0\r\nStep 0\r\nStep 0\r\nStep 0\r\nStep 0\r\n",,terminal_output +1544,2275712,"TERMINAL",0,0,"^CTraceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_tokenizer.py"", line 215, in \r\n for videos in dataloader:\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tensorflow/python/data/ops/dataset_ops.py"", line 4788, in __next__\r\n return nest.map_structure(to_numpy, next(self._iterator))\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tensorflow/python/data/ops/iterator_ops.py"", line 826, in __next__\r\n return self._next_internal()\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tensorflow/python/data/ops/iterator_ops.py"", line 776, in _next_internal\r\n ret = gen_dataset_ops.iterator_get_next(\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tensorflow/python/ops/gen_dataset_ops.py"", line 3081, in iterator_get_next\r\n _result = pywrap_tfe.TFE_Py_FastPathExecute(\r\nKeyboardInterrupt\r\n",,terminal_output +1545,2276006,"TERMINAL",0,0,"^CException ignored in atexit callback: .teardown_atexit at 0x153de5a03e20>\r\nTraceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/wandb/sdk/lib/service_connection.py"", line 94, in teardown_atexit\r\n conn.teardown(hooks.exit_code)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/wandb/sdk/lib/service_connection.py"", line 226, in teardown\r\n self._router.join()\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/wandb/sdk/interface/router.py"", line 75, in join\r\n self._thread.join()\r\n File ""/home/hk-project-p0023960/tum_cte0515/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/threading.py"", line 1096, in join\r\n self._wait_for_tstate_lock()\r\n File ""/home/hk-project-p0023960/tum_cte0515/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/threading.py"", line 1116, in _wait_for_tstate_lock\r\n if lock.acquire(block, timeout):\r\nKeyboardInterrupt: \r\n",,terminal_output +1546,2276196,"TERMINAL",0,0,"^C",,terminal_output +1547,2276315,"TERMINAL",0,0,"Exception ignored in: .remove at 0x153e3cd56680>\r\nTraceback (most recent call last):\r\n File ""/home/hk-project-p0023960/tum_cte0515/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/weakref.py"", line 370, in remove\r\n def remove(k, selfref=ref(self)):\r\nKeyboardInterrupt: \r\n^C",,terminal_output +1548,2276382,"TERMINAL",0,0,"Exception ignored in atexit callback: \r\nTraceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/api.py"", line 3168, in clean_up\r\n clear_backends()\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/api.py"", line 3158, in clear_backends\r\n pjit._infer_params_cached.cache_clear()\r\nKeyboardInterrupt: \r\n",,terminal_output +1549,2276979,"TERMINAL",0,0,"^C",,terminal_output +1550,2277165,"TERMINAL",0,0,"^C",,terminal_output +1551,2277399,"TERMINAL",0,0,"\r\n]0;tum_cte0515@hkn0706:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0706 jafar]$ ",,terminal_output +1552,2278032,"train_tokenizer.py",0,0,"",python,tab +1553,2281671,"train_tokenizer.py",7832,0,"",python,selection_mouse +1554,2281930,"train_tokenizer.py",7831,1,"1",python,selection_mouse +1555,2281930,"train_tokenizer.py",7828,4,"+= 1",python,selection_mouse +1556,2281931,"train_tokenizer.py",7826,6,"p += 1",python,selection_mouse +1557,2281931,"train_tokenizer.py",7825,7,"ep += 1",python,selection_mouse +1558,2281931,"train_tokenizer.py",7824,8,"tep += 1",python,selection_mouse +1559,2281931,"train_tokenizer.py",7823,9,"step += 1",python,selection_mouse +1560,2281962,"train_tokenizer.py",7822,10," step += 1",python,selection_mouse +1561,2282049,"train_tokenizer.py",7821,11," step += 1",python,selection_mouse +1562,2282410,"train_tokenizer.py",7822,10," step += 1",python,selection_mouse +1563,2282473,"train_tokenizer.py",7823,9,"step += 1",python,selection_mouse +1564,2282532,"train_tokenizer.py",7824,8,"tep += 1",python,selection_mouse +1565,2283007,"train_tokenizer.py",7824,0,"",python,selection_mouse +1566,2283008,"train_tokenizer.py",7823,4,"step",python,selection_mouse +1567,2283257,"train_tokenizer.py",7823,6,"step +",python,selection_mouse +1568,2283258,"train_tokenizer.py",7823,8,"step += ",python,selection_mouse +1569,2283258,"train_tokenizer.py",7823,9,"step += 1",python,selection_mouse +1570,2283258,"train_tokenizer.py",7823,10,"step += 1\n",python,selection_mouse +1571,2286053,"train_tokenizer.py",7300,0,"",python,selection_mouse +1572,2286194,"train_tokenizer.py",7298,4,"step",python,selection_mouse +1573,2290761,"train_tokenizer.py",6752,0,"",python,selection_mouse +1574,2290924,"train_tokenizer.py",6752,3,"for",python,selection_mouse +1575,2291058,"train_tokenizer.py",6752,34,"for videos in dataloader:\n ",python,selection_mouse +1576,2291059,"train_tokenizer.py",6752,96,"for videos in dataloader:\n # npy_path = ""overfit_dir/single_sample_corner.npy""\n # ",python,selection_mouse +1577,2291059,"train_tokenizer.py",6752,199,"for videos in dataloader:\n # npy_path = ""overfit_dir/single_sample_corner.npy""\n # npy_path = ""overfit_dir/single_batch_12_elems.npy""\n # videos = np.load(npy_path)\n # print",python,selection_mouse +1578,2291112,"train_tokenizer.py",6752,269,"for videos in dataloader:\n # npy_path = ""overfit_dir/single_sample_corner.npy""\n # npy_path = ""overfit_dir/single_batch_12_elems.npy""\n # videos = np.load(npy_path)\n # print(""batch shape: "", videos.shape)\n # while(True):\n # -",python,selection_mouse +1579,2291113,"train_tokenizer.py",6752,306,"for videos in dataloader:\n # npy_path = ""overfit_dir/single_sample_corner.npy""\n # npy_path = ""overfit_dir/single_batch_12_elems.npy""\n # videos = np.load(npy_path)\n # print(""batch shape: "", videos.shape)\n # while(True):\n # --- Train step ---\n np.save",python,selection_mouse +1580,2291138,"train_tokenizer.py",6752,504,"for videos in dataloader:\n # npy_path = ""overfit_dir/single_sample_corner.npy""\n # npy_path = ""overfit_dir/single_batch_12_elems.npy""\n # videos = np.load(npy_path)\n # print(""batch shape: "", videos.shape)\n # while(True):\n # --- Train step ---\n np.save(f""/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/overfit_dir/single_sample/train_tokenizer_sample_{step}.npy"", np.array(videos))\n print(f""Step {step}"")\n if step ",python,selection_mouse +1581,2291161,"train_tokenizer.py",6752,506,"for videos in dataloader:\n # npy_path = ""overfit_dir/single_sample_corner.npy""\n # npy_path = ""overfit_dir/single_batch_12_elems.npy""\n # videos = np.load(npy_path)\n # print(""batch shape: "", videos.shape)\n # while(True):\n # --- Train step ---\n np.save(f""/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/overfit_dir/single_sample/train_tokenizer_sample_{step}.npy"", np.array(videos))\n print(f""Step {step}"")\n if step < ",python,selection_mouse +1582,2291180,"train_tokenizer.py",6752,510,"for videos in dataloader:\n # npy_path = ""overfit_dir/single_sample_corner.npy""\n # npy_path = ""overfit_dir/single_batch_12_elems.npy""\n # videos = np.load(npy_path)\n # print(""batch shape: "", videos.shape)\n # while(True):\n # --- Train step ---\n np.save(f""/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/overfit_dir/single_sample/train_tokenizer_sample_{step}.npy"", np.array(videos))\n print(f""Step {step}"")\n if step < 5000",python,selection_mouse +1583,2291205,"train_tokenizer.py",6752,533,"for videos in dataloader:\n # npy_path = ""overfit_dir/single_sample_corner.npy""\n # npy_path = ""overfit_dir/single_batch_12_elems.npy""\n # videos = np.load(npy_path)\n # print(""batch shape: "", videos.shape)\n # while(True):\n # --- Train step ---\n np.save(f""/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/overfit_dir/single_sample/train_tokenizer_sample_{step}.npy"", np.array(videos))\n print(f""Step {step}"")\n if step < 5000:\n break",python,selection_mouse +1584,2291305,"train_tokenizer.py",6752,555,"for videos in dataloader:\n # npy_path = ""overfit_dir/single_sample_corner.npy""\n # npy_path = ""overfit_dir/single_batch_12_elems.npy""\n # videos = np.load(npy_path)\n # print(""batch shape: "", videos.shape)\n # while(True):\n # --- Train step ---\n np.save(f""/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/overfit_dir/single_sample/train_tokenizer_sample_{step}.npy"", np.array(videos))\n print(f""Step {step}"")\n if step < 5000:\n break\n step += 1",python,selection_mouse +1585,2292958,"train_tokenizer.py",7307,0,"",python,selection_mouse +1586,2293106,"train_tokenizer.py",7306,1,"1",python,selection_mouse +1587,2293473,"train_tokenizer.py",7285,22,"\n step += 1",python,selection_mouse +1588,2293473,"train_tokenizer.py",7280,27,"break\n step += 1",python,selection_mouse +1589,2293474,"train_tokenizer.py",7251,56,"step < 5000:\n break\n step += 1",python,selection_mouse +1590,2293474,"train_tokenizer.py",7214,93,"print(f""Step {step}"")\n if step < 5000:\n break\n step += 1",python,selection_mouse +1591,2293474,"train_tokenizer.py",7213,94," print(f""Step {step}"")\n if step < 5000:\n break\n step += 1",python,selection_mouse +1592,2293474,"train_tokenizer.py",7212,95," print(f""Step {step}"")\n if step < 5000:\n break\n step += 1",python,selection_mouse +1593,2293475,"train_tokenizer.py",7048,259," np.save(f""/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/overfit_dir/single_sample/train_tokenizer_sample_{step}.npy"", np.array(videos))\n print(f""Step {step}"")\n if step < 5000:\n break\n step += 1",python,selection_mouse +1594,2293475,"train_tokenizer.py",7047,260," np.save(f""/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/overfit_dir/single_sample/train_tokenizer_sample_{step}.npy"", np.array(videos))\n print(f""Step {step}"")\n if step < 5000:\n break\n step += 1",python,selection_mouse +1595,2293476,"train_tokenizer.py",7046,261," np.save(f""/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/overfit_dir/single_sample/train_tokenizer_sample_{step}.npy"", np.array(videos))\n print(f""Step {step}"")\n if step < 5000:\n break\n step += 1",python,selection_mouse +1596,2293511,"train_tokenizer.py",7012,295," # --- Train step ---\n np.save(f""/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/overfit_dir/single_sample/train_tokenizer_sample_{step}.npy"", np.array(videos))\n print(f""Step {step}"")\n if step < 5000:\n break\n step += 1",python,selection_mouse +1597,2293533,"train_tokenizer.py",7011,296," # --- Train step ---\n np.save(f""/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/overfit_dir/single_sample/train_tokenizer_sample_{step}.npy"", np.array(videos))\n print(f""Step {step}"")\n if step < 5000:\n break\n step += 1",python,selection_mouse +1598,2293560,"train_tokenizer.py",6987,320," # while(True):\n # --- Train step ---\n np.save(f""/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/overfit_dir/single_sample/train_tokenizer_sample_{step}.npy"", np.array(videos))\n print(f""Step {step}"")\n if step < 5000:\n break\n step += 1",python,selection_mouse +1599,2293597,"train_tokenizer.py",6940,367," # print(""batch shape: "", videos.shape)\n # while(True):\n # --- Train step ---\n np.save(f""/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/overfit_dir/single_sample/train_tokenizer_sample_{step}.npy"", np.array(videos))\n print(f""Step {step}"")\n if step < 5000:\n break\n step += 1",python,selection_mouse +1600,2293637,"train_tokenizer.py",6903,404," # videos = np.load(npy_path)\n # print(""batch shape: "", videos.shape)\n # while(True):\n # --- Train step ---\n np.save(f""/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/overfit_dir/single_sample/train_tokenizer_sample_{step}.npy"", np.array(videos))\n print(f""Step {step}"")\n if step < 5000:\n break\n step += 1",python,selection_mouse +1601,2293668,"train_tokenizer.py",6902,405," # videos = np.load(npy_path)\n # print(""batch shape: "", videos.shape)\n # while(True):\n # --- Train step ---\n np.save(f""/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/overfit_dir/single_sample/train_tokenizer_sample_{step}.npy"", np.array(videos))\n print(f""Step {step}"")\n if step < 5000:\n break\n step += 1",python,selection_mouse +1602,2293779,"train_tokenizer.py",6840,467," # npy_path = ""overfit_dir/single_batch_12_elems.npy""\n # videos = np.load(npy_path)\n # print(""batch shape: "", videos.shape)\n # while(True):\n # --- Train step ---\n np.save(f""/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/overfit_dir/single_sample/train_tokenizer_sample_{step}.npy"", np.array(videos))\n print(f""Step {step}"")\n if step < 5000:\n break\n step += 1",python,selection_mouse +1603,2293822,"train_tokenizer.py",6839,468," # npy_path = ""overfit_dir/single_batch_12_elems.npy""\n # videos = np.load(npy_path)\n # print(""batch shape: "", videos.shape)\n # while(True):\n # --- Train step ---\n np.save(f""/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/overfit_dir/single_sample/train_tokenizer_sample_{step}.npy"", np.array(videos))\n print(f""Step {step}"")\n if step < 5000:\n break\n step += 1",python,selection_mouse +1604,2293888,"train_tokenizer.py",6838,469," # npy_path = ""overfit_dir/single_batch_12_elems.npy""\n # videos = np.load(npy_path)\n # print(""batch shape: "", videos.shape)\n # while(True):\n # --- Train step ---\n np.save(f""/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/overfit_dir/single_sample/train_tokenizer_sample_{step}.npy"", np.array(videos))\n print(f""Step {step}"")\n if step < 5000:\n break\n step += 1",python,selection_mouse +1605,2293952,"train_tokenizer.py",6778,529," # npy_path = ""overfit_dir/single_sample_corner.npy""\n # npy_path = ""overfit_dir/single_batch_12_elems.npy""\n # videos = np.load(npy_path)\n # print(""batch shape: "", videos.shape)\n # while(True):\n # --- Train step ---\n np.save(f""/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/overfit_dir/single_sample/train_tokenizer_sample_{step}.npy"", np.array(videos))\n print(f""Step {step}"")\n if step < 5000:\n break\n step += 1",python,selection_mouse +1606,2294757,"train_tokenizer.py",6744,563," for videos in dataloader:\n # npy_path = ""overfit_dir/single_sample_corner.npy""\n # npy_path = ""overfit_dir/single_batch_12_elems.npy""\n # videos = np.load(npy_path)\n # print(""batch shape: "", videos.shape)\n # while(True):\n # --- Train step ---\n np.save(f""/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/overfit_dir/single_sample/train_tokenizer_sample_{step}.npy"", np.array(videos))\n print(f""Step {step}"")\n if step < 5000:\n break\n step += 1",python,selection_mouse +1607,2312748,"TERMINAL",0,0,"c",,terminal_output +1608,2312811,"TERMINAL",0,0,"[?25la[?25h",,terminal_output +1609,2312891,"TERMINAL",0,0,"[?25lt[?25h",,terminal_output +1610,2313006,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +1611,2314884,"TERMINAL",0,0,"sh scripts_horeka/overfit_sample_tiny/tester.sh ",,terminal_output +1612,2316874,"TERMINAL",0,0,"[?25lh[?25h",,terminal_output +1613,2317053,"TERMINAL",0,0,"[?25ls[?25h",,terminal_output +1614,2317119,"TERMINAL",0,0,"[?25l [1@c[?25h",,terminal_output +1615,2317220,"TERMINAL",0,0,"[?25l [1@a[?25h",,terminal_output +1616,2317853,"TERMINAL",0,0,"[?25l [1@t[?25h",,terminal_output +1617,2318078,"TERMINAL",0,0,"\r\n[?2004l\r#!/usr/bin/env bash\r\n\r\nmodule unload mpi/openmpi/5.0\r\nmodule unload devel/cuda/12.4\r\nsource .venv/bin/activate\r\n\r\ntf_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/open_ai_minecraft_tfrecord\r\nws_dir='/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/'\r\n\r\njob_name=""debug""\r\nslurm_job_id=""0000""\r\n\r\nCHECKPOINT_DIR=$ws_dir/checkpoints/$job_name_$slurm_job_id\r\nmkdir -p $CHECKPOINT_DIR\r\n\r\nenv | grep SLURM\r\n\r\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/3299272/tokenizer_1751037678_153500/\r\nlam_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/3299259/lam_1751036759_200000/\r\n\r\n\r\npython train_tokenizer.py \\r\n --ckpt_dir $CHECKPOINT_DIR \\r\n --batch_size=1 \\r\n --min_lr=1e-4 \\r\n --max_lr=1e-4 \\r\n --log_image_interval=100 \\r\n --log \\r\n --log_checkpoint_interval=500 \\r\n --name=tokenizer-tiny-overfit-$slurm_job_id \\r\n --tags tokenizer overfit tiny \\r\n --entity instant-uv \\r\n --project jafar \\r\n --data_dir $tf_records_dir \]0;tum_cte0515@hkn0706:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0706 jafar]$ ",,terminal_output +1618,2325198,"TERMINAL",0,0,"[?25lc[?25h",,terminal_output +1619,2325341,"TERMINAL",0,0,"[?25la[?25h",,terminal_output +1620,2325460,"TERMINAL",0,0,"[?25lt[?25h[?25l [?25h",,terminal_output +1621,2325749,"TERMINAL",0,0,"[?25lt[?25h",,terminal_output +1622,2326372,"TERMINAL",0,0,"[?25lr[?25h",,terminal_output +1623,2326985,"TERMINAL",0,0,"ain_",,terminal_output +1624,2327851,"TERMINAL",0,0,"[?25lt[?25h",,terminal_output +1625,2327904,"TERMINAL",0,0,"okenizer.py ",,terminal_output +1626,2328387,"TERMINAL",0,0,"\r\n[?2004l\rfrom dataclasses import dataclass, field\r\nimport os\r\nimport time\r\n\r\nimport einops\r\nfrom flax.training import orbax_utils\r\nfrom flax.training.train_state import TrainState\r\nfrom jax.sharding import Mesh, PartitionSpec, NamedSharding\r\nfrom jax.experimental.mesh_utils import create_device_mesh\r\nimport optax\r\nimport orbax\r\nfrom orbax.checkpoint import PyTreeCheckpointer\r\nimport numpy as np\r\nimport dm_pix as pix\r\nimport jax\r\nimport jax.numpy as jnp\r\nimport tyro\r\nimport wandb\r\n\r\nfrom models.tokenizer import TokenizerVQVAE\r\nfrom utils.dataloader import get_dataloader\r\n\r\nts = int(time.time())\r\n\r\n\r\n@dataclass\r\nclass Args:\r\n # Experiment\r\n num_steps: int = 300_000\r\n seed: int = 0\r\n seq_len: int = 16\r\n image_channels: int = 3\r\n image_height: int = 90\r\n image_width: int = 160\r\n data_dir: str = ""data_tfrecords/coinrun""\r\n checkpoint: str = """"\r\n # Optimization\r\n vq_beta: float = 0.25\r\n batch_size: int = 48\r\n min_lr: float = 3e-4\r\n max_lr: float = 3e-4\r\n warmup_steps: int = 10000\r\n # Tokenizer\r\n model_dim: int = 512\r\n latent_dim: int = 32\r\n num_latents: int = 1024\r\n patch_size: int = 4\r\n num_blocks: int = 8\r\n num_heads: int = 8\r\n dropout: float = 0.0\r\n codebook_dropout: float = 0.01\r\n # Logging\r\n log: bool = False\r\n entity: str = """"\r\n project: str = """"\r\n name: str = ""train_tokenizer""\r\n tags: list = field(default_factory=lambda: [""tokenizer""])\r\n log_interval: int = 5\r\n log_image_interval: int = 250\r\n ckpt_dir: str = """"\r\n log_checkpoint_interval: int = 10000\r\n log_gradients: bool = False\r\n\r\n\r\nargs = tyro.cli(Args)\r\n\r\n\r\ndef tokenizer_loss_fn(params, state, inputs):\r\n # --- Compute loss ---\r\n outputs = state.apply_fn(\r\n params,\r\n inputs,\r\n training=True,\r\n rngs={""params"": inputs[""rng""], ""dropout"": inputs[""dropout_rng""]},\r\n )\r\n mse = jnp.square(inputs[""videos""] - outputs[""recon""]).mean()\r\n q_loss = jnp.square(jax.lax.stop_gradient(outputs[""emb""]) - outputs[""z""]).mean()\r\n commitment_loss = jnp.square(\r\n outputs[""emb""] - jax.lax.stop_gradient(outputs[""z""])\r\n ).mean()\r\n loss = mse + q_loss + args.vq_beta * commitment_loss\r\n\r\n # --- Compute validation metrics ---\r\n gt = inputs[""videos""].clip(0, 1).reshape(-1, *inputs[""videos""].shape[2:])\r\n recon = outputs[""recon""].clip(0, 1).reshape(-1, *outputs[""recon""].shape[2:])\r\n psnr = pix.psnr(gt, recon).mean()\r\n ssim = pix.ssim(gt, recon).mean()\r\n _, index_counts = jnp.unique_counts(\r\n jnp.ravel(outputs[""indices""]), size=args.num_latents, fill_value=0\r\n )\r\n codebook_usage = (index_counts != 0).mean()\r\n metrics = dict(\r\n loss=loss,\r\n mse=mse,\r\n q_loss=q_loss,\r\n commitment_loss=commitment_loss,\r\n psnr=psnr,\r\n ssim=ssim,\r\n codebook_usage=codebook_usage,\r\n )\r\n return loss, (outputs[""recon""], metrics)\r\n\r\n\r\n@jax.jit\r\ndef train_step(state, inputs):\r\n grad_fn = jax.value_and_grad(tokenizer_loss_fn, has_aux=True, allow_int=True)\r\n (loss, (recon, metrics)), grads = grad_fn(state.params, state, inputs)\r\n state = state.apply_gradients(grads=grads)\r\n if args.log_gradients:\r\n metrics[""encoder_gradients_std/""] = jax.tree.map(\r\n lambda x: x.std(), grads[""params""][""encoder""]\r\n )\r\n metrics[""vq_gradients_std/""] = jax.tree.map(\r\n lambda x: x.std(), grads[""params""][""vq""]\r\n )\r\n metrics[""decoder_gradients_std/""] = jax.tree.map(\r\n lambda x: x.std(), grads[""params""][""decoder""]\r\n )\r\n return state, loss, recon, metrics\r\n\r\n\r\nif __name__ == ""__main__"":\r\n jax.distributed.initialize()\r\n num_devices = jax.device_count()\r\n if num_devices == 0:\r\n raise ValueError(""No JAX devices found."")\r\n print(f""Running on {num_devices} devices."")\r\n\r\n if args.batch_size % num_devices != 0:\r\n raise ValueError(\r\n f""Global batch size {args.batch_size} must be divisible by ""\r\n f""number of devices {num_devices}.""\r\n )\r\n\r\n per_device_batch_size_for_init = args.batch_size // num_devices\r\n\r\n rng = jax.random.PRNGKey(args.seed)\r\n if args.log and jax.process_index() == 0:\r\n wandb.init(\r\n entity=args.entity,\r\n project=args.project,\r\n name=args.name,\r\n tags=args.tags,\r\n group=""debug"",\r\n config=args\r\n )\r\n\r\n # --- Initialize model ---\r\n tokenizer = TokenizerVQVAE(\r\n in_dim=args.image_channels,\r\n model_dim=args.model_dim,\r\n latent_dim=args.latent_dim,\r\n num_latents=args.num_latents,\r\n patch_size=args.patch_size,\r\n num_blocks=args.num_blocks,\r\n num_heads=args.num_heads,\r\n dropout=args.dropout,\r\n codebook_dropout=args.codebook_dropout,\r\n )\r\n rng, _rng = jax.random.split(rng)\r\n image_shape = (args.image_height, args.image_width, args.image_channels)\r\n inputs = dict(\r\n videos=jnp.zeros(\r\n (per_device_batch_size_for_init, args.seq_len, *image_shape),\r\n dtype=jnp.float32,\r\n ),\r\n )\r\n init_params = tokenizer.init(_rng, inputs)\r\n\r\n # --- Initialize optimizer ---\r\n lr_schedule = optax.warmup_cosine_decay_schedule(\r\n args.min_lr, args.max_lr, args.warmup_steps, args.num_steps\r\n )\r\n tx = optax.adamw(learning_rate=lr_schedule, b1=0.9, b2=0.9, weight_decay=1e-4)\r\n train_state = TrainState.create(apply_fn=tokenizer.apply, params=init_params, tx=tx)\r\n\r\n # FIXME: switch to create_hybrid_device_mesh for runs spanning multiple nodes\r\n device_mesh_arr = create_device_mesh((num_devices,))\r\n mesh = Mesh(devices=device_mesh_arr, axis_names=(""data"",))\r\n\r\n replicated_sharding = NamedSharding(mesh, PartitionSpec())\r\n train_state = jax.device_put(train_state, replicated_sharding)\r\n\r\n # --- Load checkpoint ---\r\n step = 0\r\n if args.checkpoint:\r\n restore_target = {""model"": train_state}\r\n restore_args = orbax_utils.restore_args_from_target(restore_target)\r\n train_state.params[""params""].update(\r\n PyTreeCheckpointer()\r\n .restore(args.checkpoint, item=restore_target, restore_args=restore_args)[\r\n ""model""\r\n ]\r\n .params[""params""]\r\n )\r\n # Assume checkpoint is of the form tokenizer__\r\n step += int(args.checkpoint.split(""_"")[-1])\r\n\r\n # --- TRAIN LOOP ---\r\n tfrecord_files = [\r\n os.path.join(args.data_dir, x)\r\n for x in os.listdir(args.data_dir)\r\n if x.endswith("".tfrecord"")\r\n ]\r\n dataloader = get_dataloader(\r\n # NOTE: We deliberately pass the global batch size\r\n # The dataloader shards the dataset across all processes\r\n tfrecord_files,\r\n args.seq_len,\r\n args.batch_size,\r\n *image_shape,\r\n )\r\n print(f""Starting training from step {step}..."")\r\n while step < args.num_steps:\r\n for videos in dataloader:\r\n # npy_path = ""overfit_dir/single_sample_corner.npy""\r\n # npy_path = ""overfit_dir/single_batch_12_elems.npy""\r\n # videos = np.load(npy_path)\r\n # print(""batch shape: "", videos.shape)\r\n # while(True):\r\n # --- Train step ---\r\n np.save(f""/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/overfit_dir/single_sample/train_tokenizer_sample_{step}.npy"", np.array(videos))\r\n print(f""Step {step}"")\r\n if step < 5000:\r\n break\r\n step += 1\r\n continue\r\n rng, _rng, _rng_dropout = jax.random.split(rng, 3)\r\n\r\n videos_sharding = NamedSharding(\r\n mesh, PartitionSpec(""data"", None, None, None, None)\r\n )\r\n videos = jax.make_array_from_process_local_data(videos_sharding, videos)\r\n\r\n inputs = dict(videos=videos, rng=_rng, dropout_rng=_rng_dropout)\r\n train_state, loss, recon, metrics = train_step(train_state, inputs)\r\n print(f""Step {step}, loss: {loss}"")\r\n step += 1\r\n\r\n # --- Logging ---\r\n if args.log:\r\n if step % args.log_interval == 0 and jax.process_index() == 0:\r\n wandb.log(\r\n {\r\n ""loss"": loss,\r\n ""step"": step,\r\n **metrics,\r\n }\r\n )\r\n if step % args.log_image_interval == 0:\r\n gt_seq = inputs[""videos""][0]\r\n recon_seq = recon[0].clip(0, 1)\r\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\r\n comparison_seq = einops.rearrange(\r\n comparison_seq * 255, ""t h w c -> h (t w) c""\r\n )\r\n # NOTE: Process-dependent control flow deliberately happens\r\n # after indexing operation since it must not contain code\r\n # sections that lead to cross-accelerator communication.\r\n if jax.process_index() == 0:\r\n log_images = dict(\r\n image=wandb.Image(np.asarray(gt_seq[0])),\r\n recon=wandb.Image(np.asarray(recon_seq[0])),\r\n true_vs_recon=wandb.Image(\r\n np.asarray(comparison_seq.astype(np.uint8))\r\n ),\r\n )\r\n wandb.log(log_images)\r\n if step % args.log_checkpoint_interval == 0:\r\n ckpt = {""model"": train_state}\r\n orbax_checkpointer = orbax.checkpoint.PyTreeCheckpointer()\r\n save_args = orbax_utils.save_args_from_target(ckpt)\r\n orbax_checkpointer.save(\r\n os.path.join(os.getcwd(), args.ckpt_dir, f""tokenizer_{ts}_{step}""),\r\n ckpt,\r\n save_args=save_args,\r\n )\r\n if step >= args.num_steps:\r\n break\r\n]0;tum_cte0515@hkn0706:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0706 jafar]$ ",,terminal_output +1627,2341129,"train_tokenizer.py",0,0,"",python,tab +1628,2341866,"TERMINAL",0,0,"watch",,terminal_focus +1629,2342809,"train_tokenizer.py",0,0,"",python,tab +1630,2343861,"train_tokenizer.py",7263,0,"",python,selection_mouse +1631,2344468,"train_tokenizer.py",7285,0,"",python,selection_mouse +1632,2345199,"train_tokenizer.py",7307,0,"",python,selection_mouse +1633,2345987,"train_tokenizer.py",7306,0,"",python,selection_command +1634,2346803,"train_tokenizer.py",7286,22,"",python,content +1635,2346909,"train_tokenizer.py",7298,0,"",python,selection_command +1636,2346931,"train_tokenizer.py",7276,0,"",python,selection_command +1637,2347062,"train_tokenizer.py",7248,0,"",python,selection_command +1638,2347195,"train_tokenizer.py",7214,0,"",python,selection_command +1639,2348212,"train_tokenizer.py",7051,0,"",python,selection_command +1640,2349279,"train_tokenizer.py",7201,0,"\n step += 1",python,content +1641,2349314,"train_tokenizer.py",7214,0,"",python,selection_command +1642,2352893,"train_tokenizer.py",7223,0,"",python,selection_mouse +1643,2352895,"train_tokenizer.py",7222,0,"",python,selection_command +1644,2353477,"train_tokenizer.py",7219,0,"",python,selection_mouse +1645,2357056,"TERMINAL",0,0,"srun",,terminal_focus +1646,2358126,"TERMINAL",0,0,"cat train_tokenizer.py ",,terminal_output +1647,2359075,"TERMINAL",0,0,"[26@scripts_horeka/overfit_sample_tiny/tester.sh",,terminal_output +1648,2359538,"TERMINAL",0,0,"\rsh",,terminal_output +1649,2359969,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +1650,2360084,"TERMINAL",0,0,"SLURM_STEP_NUM_TASKS=1\r\nSLURM_JOB_USER=tum_cte0515\r\nSLURM_TASKS_PER_NODE=1\r\nSLURM_JOB_UID=999226\r\nSLURM_TASK_PID=3337997\r\nSLURM_JOB_GPUS=1\r\nSLURM_LOCALID=0\r\nSLURM_SUBMIT_DIR=/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar\r\nSLURMD_NODENAME=hkn0706\r\nSLURM_JOB_START_TIME=1751388043\r\nSLURM_STEP_NODELIST=hkn0706\r\nSLURM_CLUSTER_NAME=hk\r\nSLURM_JOB_END_TIME=1751391643\r\nSLURM_PMI2_SRUN_PORT=43539\r\nSLURM_CPUS_ON_NODE=6\r\nSLURM_JOB_CPUS_PER_NODE=6\r\nSLURM_GPUS_ON_NODE=1\r\nSLURM_GTIDS=0\r\nSLURM_JOB_PARTITION=accelerated\r\nSLURM_TRES_PER_TASK=cpu=5\r\nSLURM_OOM_KILL_STEP=0\r\nSLURM_JOB_NUM_NODES=1\r\nSLURM_STEPID=4294967290\r\nSLURM_JOBID=3309821\r\nSLURM_PTY_PORT=45555\r\nSLURM_JOB_QOS=normal\r\nSLURM_LAUNCH_NODE_IPADDR=10.0.7.201\r\nSLURM_PTY_WIN_ROW=36\r\nSLURM_PMI2_PROC_MAPPING=(vector,(0,1,1))\r\nSLURMD_DEBUG=2\r\nSLURM_PROCID=0\r\nSLURM_CPUS_PER_TASK=5\r\nSLURM_NTASKS=1\r\nSLURM_TOPOLOGY_ADDR=hkibb.hkibbi1.hkibbi1e8.hkn0706\r\nSLURM_TOPOLOGY_ADDR_PATTERN=switch.switch.switch.node\r\nSLURM_SRUN_COMM_HOST=10.0.7.201\r\nSLURM_SCRIPT_CONTEXT=prolog_task\r\nSLURM_MEM_PER_NODE=51200\r\nSLURM_PTY_WIN_COL=175\r\nSLURM_NODELIST=hkn0706\r\nSLURM_SRUN_COMM_PORT=36793\r\nSLURM_STEP_ID=4294967290\r\nSLURM_JOB_ACCOUNT=hk-project-p0023960\r\nSLURM_PRIO_PROCESS=0\r\nSLURM_NPROCS=1\r\nSLURM_NNODES=1\r\nSLURM_SUBMIT_HOST=hkn1993.localdomain\r\nSLURM_JOB_ID=3309821\r\nSLURM_NODEID=0\r\nSLURM_STEP_NUM_NODES=1\r\nSLURM_STEP_TASKS_PER_NODE=1\r\nSLURM_MPI_TYPE=pmi2\r\nSLURM_PMI2_STEP_NODES=hkn0706\r\nSLURM_CONF=/etc/slurm/slurm.conf\r\nSLURM_JOB_NAME=interactive\r\nSLURM_NTASKS_PER_NODE=1\r\nSLURM_STEP_LAUNCHER_PORT=36793\r\nSLURM_JOB_GID=502226\r\nSLURM_JOB_NODELIST=hkn0706\r\n",,terminal_output +1651,2362080,"TERMINAL",0,0,"bash",,terminal_focus +1652,2362212,"TERMINAL",0,0,"2025-07-01 19:05:07.607529: E external/local_xla/xla/stream_executor/cuda/cuda_fft.cc:467] Unable to register cuFFT factory: Attempting to register factory for plugin cuFFT when one has already been registered\r\nWARNING: All log messages before absl::InitializeLog() is called are written to STDERR\r\nE0000 00:00:1751389507.620433 3352054 cuda_dnn.cc:8579] Unable to register cuDNN factory: Attempting to register factory for plugin cuDNN when one has already been registered\r\nE0000 00:00:1751389507.624895 3352054 cuda_blas.cc:1407] Unable to register cuBLAS factory: Attempting to register factory for plugin cuBLAS when one has already been registered\r\nW0000 00:00:1751389507.637656 3352054 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\nW0000 00:00:1751389507.637673 3352054 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\nW0000 00:00:1751389507.637676 3352054 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\nW0000 00:00:1751389507.637678 3352054 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\n",,terminal_output +1653,2364692,"TERMINAL",0,0,"W0000 00:00:1751389510.130985 3352054 gpu_device.cc:2341] Cannot dlopen some GPU libraries. Please make sure the missing libraries mentioned above are installed properly if you would like to use GPU. Follow the guide at https://www.tensorflow.org/install/gpu for how to download and setup the required libraries for your platform.\r\nSkipping registering GPU devices...\r\n",,terminal_output +1654,2365019,"TERMINAL",0,0,"Running on 1 devices.\r\n",,terminal_output +1655,2365554,"TERMINAL",0,0,"cd ..",,terminal_command +1656,2365590,"TERMINAL",0,0,"]633;E;2025-07-01 19:05:11 cd ..;ea2a9ac7-e5a0-488a-9623-9429487e0dac]633;C]0;tum_cte0515@hkn1993:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data]633;D;0",,terminal_output +1657,2365875,"TERMINAL",0,0,"wandb: Currently logged in as: mihir-mahajan2002 (instant-uv) to https://api.wandb.ai. Use `wandb login --relogin` to force relogin\r\n",,terminal_output +1658,2366464,"TERMINAL",0,0,"wandb: Tracking run with wandb version 0.19.11\r\nwandb: Run data is saved locally in /hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/wandb/run-20250701_190511-eqy0m22f\r\nwandb: Run `wandb offline` to turn off syncing.\r\nwandb: Syncing run tokenizer-tiny-overfit-0000\r\nwandb: ⭐️ View project at https://wandb.ai/instant-uv/jafar\r\nwandb: 🚀 View run at https://wandb.ai/instant-uv/jafar/runs/eqy0m22f\r\n",,terminal_output +1659,2367912,"TERMINAL",0,0,"2025-07-01 19:05:13.270129: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +1660,2370034,"TERMINAL",0,0,"cd overfit_dir/single_sample/",,terminal_command +1661,2372757,"TERMINAL",0,0,"watch -n1 ""ls -l | wc -l""",,terminal_command +1662,2372811,"TERMINAL",0,0,"]633;E;2025-07-01 19:05:18 watch -n1 ""ls -l | wc -l"";ea2a9ac7-e5a0-488a-9623-9429487e0dac]633;C[?1049h(B[?7hEvery 1.0s: ls -l | wc -lhkn1993.localdomain: Tue Jul 1 19:05:18 20252",,terminal_output +1663,2373831,"TERMINAL",0,0,"9",,terminal_output +1664,2374892,"TERMINAL",0,0,"20",,terminal_output +1665,2375903,"TERMINAL",0,0,"1",,terminal_output +1666,2376927,"TERMINAL",0,0,"2",,terminal_output +1667,2377950,"TERMINAL",0,0,"3",,terminal_output +1668,2378932,"TERMINAL",0,0,"4",,terminal_output +1669,2379951,"TERMINAL",0,0,"5",,terminal_output +1670,2380973,"TERMINAL",0,0,"6",,terminal_output +1671,2381329,"TERMINAL",0,0,"2025-07-01 19:05:26.722413: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +1672,2381995,"TERMINAL",0,0,"7",,terminal_output +1673,2383009,"TERMINAL",0,0,"8",,terminal_output +1674,2383728,"TERMINAL",0,0,"2025-07-01 19:05:29.032805: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +1675,2384089,"TERMINAL",0,0,"9",,terminal_output +1676,2385080,"TERMINAL",0,0,"30",,terminal_output +1677,2386070,"TERMINAL",0,0,"1",,terminal_output +1678,2387166,"TERMINAL",0,0,"2",,terminal_output +1679,2387417,"TERMINAL",0,0,"Starting training from step 0...\r\n",,terminal_output +1680,2387781,"TERMINAL",0,0,"Step 1\r\n",,terminal_output +1681,2387834,"TERMINAL",0,0,"Step 2\r\nStep 3\r\n",,terminal_output +1682,2387950,"TERMINAL",0,0,"Step 4\r\nStep 5\r\n",,terminal_output +1683,2388069,"TERMINAL",0,0,"Step 6\r\nStep 7\r\nStep 8\r\n",,terminal_output +1684,2388134,"TERMINAL",0,0,"Step 9\r\n",,terminal_output +1685,2388203,"TERMINAL",0,0,"Step 10\r\nStep 11\r\nStep 12\r\n",,terminal_output +1686,2388264,"TERMINAL",0,0,"Step 13\r\nStep 14\r\n",,terminal_output +1687,2388371,"TERMINAL",0,0,"Step 15\r\nStep 16\r\nStep 17\r\nStep 18\r\n",,terminal_output +1688,2388435,"TERMINAL",0,0,"Step 19\r\nStep 20\r\n",,terminal_output +1689,2388447,"TERMINAL",0,0,"3\r10",,terminal_output +1690,2388501,"TERMINAL",0,0,"Step 21\r\n",,terminal_output +1691,2388566,"TERMINAL",0,0,"Step 22\r\nStep 23\r\n",,terminal_output +1692,2388618,"TERMINAL",0,0,"Step 24\r\n",,terminal_output +1693,2388818,"TERMINAL",0,0,"Step 25\r\nStep 26\r\nStep 27\r\nStep 28\r\nStep 29\r\nStep 30\r\nStep 31\r\nStep 32\r\nStep 33\r\nStep 34\r\n",,terminal_output +1694,2388869,"TERMINAL",0,0,"Step 35\r\n",,terminal_output +1695,2388979,"TERMINAL",0,0,"Step 36\r\nStep 37\r\nStep 38\r\nStep 39\r\n",,terminal_output +1696,2389041,"TERMINAL",0,0,"Step 40\r\nStep 41\r\n",,terminal_output +1697,2389150,"TERMINAL",0,0,"Step 42\r\nStep 43\r\n",,terminal_output +1698,2389270,"TERMINAL",0,0,"Step 44\r\nStep 45\r\n",,terminal_output +1699,2389333,"TERMINAL",0,0,"Step 46\r\nStep 47\r\nStep 48\r\nStep 49\r\nStep 50\r\nStep 51\r\n",,terminal_output +1700,2389392,"TERMINAL",0,0,"Step 52\r\n",,terminal_output +1701,2389456,"TERMINAL",0,0,"Step 53\r\n",,terminal_output +1702,2389523,"TERMINAL",0,0,"Step 54\r\nStep 55\r\n",,terminal_output +1703,2389589,"TERMINAL",0,0,"Step 56\r\n",,terminal_output +1704,2389654,"TERMINAL",0,0,"Step 57\r\nStep 58\r\n",,terminal_output +1705,2389766,"TERMINAL",0,0,"Step 59\r\nStep 60\r\n",,terminal_output +1706,2389767,"TERMINAL",0,0,"4\r54",,terminal_output +1707,2389823,"TERMINAL",0,0,"Step 61\r\nStep 62\r\n",,terminal_output +1708,2390023,"TERMINAL",0,0,"Step 63\r\nStep 64\r\nStep 65\r\nStep 66\r\nStep 67\r\nStep 68\r\nStep 69\r\nStep 70\r\nStep 71\r\n",,terminal_output +1709,2390131,"TERMINAL",0,0,"Step 72\r\nStep 73\r\n",,terminal_output +1710,2390194,"TERMINAL",0,0,"Step 74\r\n",,terminal_output +1711,2390279,"TERMINAL",0,0,"Step 75\r\nStep 76\r\nStep 77\r\n",,terminal_output +1712,2390430,"TERMINAL",0,0,"Step 78\r\nStep 79\r\nStep 80\r\n",,terminal_output +1713,2390483,"TERMINAL",0,0,"Step 81\r\n",,terminal_output +1714,2390591,"TERMINAL",0,0,"Step 82\r\nStep 83\r\nStep 84\r\nStep 85\r\n",,terminal_output +1715,2390731,"TERMINAL",0,0,"Step 86\r\nStep 87\r\nStep 88\r\nStep 89\r\n",,terminal_output +1716,2390852,"TERMINAL",0,0,"6\r90",,terminal_output +1717,2390866,"TERMINAL",0,0,"Step 90\r\nStep 91\r\n",,terminal_output +1718,2391009,"TERMINAL",0,0,"Step 92\r\nStep 93\r\nStep 94\r\nStep 95\r\nStep 96\r\n",,terminal_output +1719,2391071,"TERMINAL",0,0,"Step 97\r\n",,terminal_output +1720,2391136,"TERMINAL",0,0,"Step 98\r\nStep 99\r\n",,terminal_output +1721,2391201,"TERMINAL",0,0,"Step 100\r\nStep 101\r\n",,terminal_output +1722,2391313,"TERMINAL",0,0,"Step 102\r\nStep 103\r\nStep 104\r\n",,terminal_output +1723,2391422,"TERMINAL",0,0,"Step 105\r\nStep 106\r\n",,terminal_output +1724,2391556,"TERMINAL",0,0,"Step 107\r\nStep 108\r\nStep 109\r\n",,terminal_output +1725,2391610,"TERMINAL",0,0,"Step 110\r\n",,terminal_output +1726,2391758,"TERMINAL",0,0,"Step 111\r\nStep 112\r\nStep 113\r\nStep 114\r\nStep 115\r\nStep 116\r\nStep 117\r\n",,terminal_output +1727,2391899,"TERMINAL",0,0,"Step 118\r\nStep 119\r\nStep 120\r\nStep 121\r\n",,terminal_output +1728,2391996,"TERMINAL",0,0,"Step 122\r\nStep 123\r\n",,terminal_output +1729,2392064,"TERMINAL",0,0,"7\r119",,terminal_output +1730,2392130,"TERMINAL",0,0,"Step 124\r\nStep 125\r\nStep 126\r\nStep 127\r\n",,terminal_output +1731,2392238,"TERMINAL",0,0,"Step 128\r\nStep 129\r\n",,terminal_output +1732,2392365,"TERMINAL",0,0,"Step 130\r\nStep 131\r\nStep 132\r\nStep 133\r\n",,terminal_output +1733,2392429,"TERMINAL",0,0,"Step 134\r\nStep 135\r\nStep 136\r\nStep 137\r\n",,terminal_output +1734,2392566,"TERMINAL",0,0,"Step 138\r\nStep 139\r\nStep 140\r\nStep 141\r\nStep 142\r\nStep 143\r\nStep 144\r\n",,terminal_output +1735,2392713,"TERMINAL",0,0,"Step 145\r\nStep 146\r\nStep 147\r\n",,terminal_output +1736,2392783,"TERMINAL",0,0,"Step 148\r\nStep 149\r\nTraceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_tokenizer.py"", line 215, in \r\n for videos in dataloader:\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tensorflow/python/data/ops/dataset_ops.py"", line 4788, in __next__\r\n return nest.map_structure(to_numpy, next(self._iterator))\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tensorflow/python/data/ops/iterator_ops.py"", line 826, in __next__\r\n return self._next_internal()\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tensorflow/python/data/ops/iterator_ops.py"", line 776, in _next_internal\r\n ret = gen_dataset_ops.iterator_get_next(\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tensorflow/python/ops/gen_dataset_ops.py"", line 3086, in iterator_get_next\r\n _ops.raise_from_not_ok_status(e, name)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tensorflow/python/framework/ops.py"", line 6006, in raise_from_not_ok_status\r\n raise core._status_to_exception(e) from None # pylint: disable=protected-access\r\ntensorflow.python.framework.errors_impl.InvalidArgumentError: {{function_node __wrapped__IteratorGetNext_output_types_1_device_/job:localhost/replica:0/task:0/device:CPU:0}} Error in user-defined function passed to ParallelMapDatasetV2:5 transformation with iterator: Iterator::Root::Prefetch::BatchV2::ForeverRepeat[0]::ParallelMapV2: Need minval < maxval, got 0 >= -14\r\n\t [[{{node random_uniform}}]] [Op:IteratorGetNext] name: \r\n",,terminal_output +1737,2393545,"TERMINAL",0,0,"8\r150",,terminal_output +1738,2394375,"TERMINAL",0,0,"wandb: \r\nwandb: 🚀 View run tokenizer-tiny-overfit-0000 at: https://wandb.ai/instant-uv/jafar/runs/eqy0m22f\r\nwandb: Find logs at: ../../../../../hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/wandb/run-20250701_190511-eqy0m22f/logs\r\n",,terminal_output +1739,2394572,"TERMINAL",0,0,"9",,terminal_output +1740,2395667,"TERMINAL",0,0,"41",,terminal_output +1741,2395730,"TERMINAL",0,0,"]0;tum_cte0515@hkn0706:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0706 jafar]$ ",,terminal_output +1742,2396616,"TERMINAL",0,0,"2",,terminal_output +1743,2397713,"TERMINAL",0,0,"3",,terminal_output +1744,2398738,"TERMINAL",0,0,"4",,terminal_output +1745,2399762,"TERMINAL",0,0,"5",,terminal_output +1746,2400746,"TERMINAL",0,0,"6",,terminal_output +1747,2401776,"TERMINAL",0,0,"7",,terminal_output +1748,2402834,"TERMINAL",0,0,"8",,terminal_output +1749,2403882,"TERMINAL",0,0,"9",,terminal_output +1750,2404881,"TERMINAL",0,0,"50",,terminal_output +1751,2405888,"TERMINAL",0,0,"1",,terminal_output +1752,2406925,"TERMINAL",0,0,"2",,terminal_output +1753,2407957,"TERMINAL",0,0,"3",,terminal_output +1754,2408989,"TERMINAL",0,0,"4",,terminal_output +1755,2410019,"TERMINAL",0,0,"5",,terminal_output +1756,2411049,"TERMINAL",0,0,"6",,terminal_output +1757,2412089,"TERMINAL",0,0,"7",,terminal_output +1758,2413117,"TERMINAL",0,0,"8",,terminal_output +1759,2414162,"TERMINAL",0,0,"9",,terminal_output +1760,2415224,"TERMINAL",0,0,"6:00",,terminal_output +1761,2416208,"TERMINAL",0,0,"1",,terminal_output +1762,2417272,"TERMINAL",0,0,"2",,terminal_output +1763,2418296,"TERMINAL",0,0,"3",,terminal_output +1764,2419331,"TERMINAL",0,0,"4",,terminal_output +1765,2420366,"TERMINAL",0,0,"5",,terminal_output +1766,2421473,"TERMINAL",0,0,"6",,terminal_output +1767,2422412,"TERMINAL",0,0,"7",,terminal_output +1768,2423459,"TERMINAL",0,0,"8",,terminal_output +1769,2424474,"TERMINAL",0,0,"9",,terminal_output +1770,2425565,"TERMINAL",0,0,"10",,terminal_output +1771,2426541,"TERMINAL",0,0,"1",,terminal_output +1772,2427615,"TERMINAL",0,0,"2",,terminal_output +1773,2428638,"TERMINAL",0,0,"4",,terminal_output +1774,2429663,"TERMINAL",0,0,"5",,terminal_output +1775,2430687,"TERMINAL",0,0,"6",,terminal_output +1776,2431712,"TERMINAL",0,0,"7",,terminal_output +1777,2432727,"TERMINAL",0,0,"8",,terminal_output +1778,2433758,"TERMINAL",0,0,"9",,terminal_output +1779,2434885,"TERMINAL",0,0,"20",,terminal_output +1780,2435909,"TERMINAL",0,0,"1",,terminal_output +1781,2436931,"TERMINAL",0,0,"2",,terminal_output +1782,2437957,"TERMINAL",0,0,"3",,terminal_output +1783,2438903,"TERMINAL",0,0,"4",,terminal_output +1784,2439939,"TERMINAL",0,0,"5",,terminal_output +1785,2440975,"TERMINAL",0,0,"6",,terminal_output +1786,2442006,"TERMINAL",0,0,"7",,terminal_output +1787,2443043,"TERMINAL",0,0,"8",,terminal_output +1788,2444101,"TERMINAL",0,0,"9",,terminal_output +1789,2445093,"TERMINAL",0,0,"30",,terminal_output +1790,2446151,"TERMINAL",0,0,"1",,terminal_output +1791,2447172,"TERMINAL",0,0,"2",,terminal_output +1792,2448301,"TERMINAL",0,0,"3",,terminal_output +1793,2449227,"TERMINAL",0,0,"4",,terminal_output +1794,2450258,"TERMINAL",0,0,"5",,terminal_output +1795,2451296,"TERMINAL",0,0,"6",,terminal_output +1796,2452397,"TERMINAL",0,0,"7",,terminal_output +1797,2453420,"TERMINAL",0,0,"8",,terminal_output +1798,2454382,"TERMINAL",0,0,"9",,terminal_output +1799,2455415,"TERMINAL",0,0,"40",,terminal_output +1800,2456432,"TERMINAL",0,0,"1",,terminal_output +1801,2457463,"TERMINAL",0,0,"2",,terminal_output +1802,2458539,"TERMINAL",0,0,"3",,terminal_output +1803,2459528,"TERMINAL",0,0,"4",,terminal_output +1804,2460589,"TERMINAL",0,0,"5",,terminal_output +1805,2461595,"TERMINAL",0,0,"6",,terminal_output +1806,2462621,"TERMINAL",0,0,"8",,terminal_output +1807,2463763,"TERMINAL",0,0,"9",,terminal_output +1808,2464692,"TERMINAL",0,0,"50",,terminal_output +1809,2465810,"TERMINAL",0,0,"1",,terminal_output +1810,2466836,"TERMINAL",0,0,"2",,terminal_output +1811,2467879,"TERMINAL",0,0,"3",,terminal_output +1812,2468873,"TERMINAL",0,0,"4",,terminal_output +1813,2469912,"TERMINAL",0,0,"5",,terminal_output +1814,2470931,"TERMINAL",0,0,"6",,terminal_output +1815,2471932,"TERMINAL",0,0,"7",,terminal_output +1816,2472965,"TERMINAL",0,0,"8",,terminal_output +1817,2474002,"TERMINAL",0,0,"9",,terminal_output +1818,2475034,"TERMINAL",0,0,"7:00",,terminal_output +1819,2476082,"TERMINAL",0,0,"1",,terminal_output +1820,2477108,"TERMINAL",0,0,"2",,terminal_output +1821,2478145,"TERMINAL",0,0,"3",,terminal_output +1822,2479178,"TERMINAL",0,0,"4",,terminal_output +1823,2480208,"TERMINAL",0,0,"5",,terminal_output +1824,2481242,"TERMINAL",0,0,"6",,terminal_output +1825,2482281,"TERMINAL",0,0,"7",,terminal_output +1826,2483326,"TERMINAL",0,0,"8",,terminal_output +1827,2484345,"TERMINAL",0,0,"9",,terminal_output +1828,2485397,"TERMINAL",0,0,"10",,terminal_output +1829,2486410,"TERMINAL",0,0,"1",,terminal_output +1830,2487520,"TERMINAL",0,0,"2",,terminal_output +1831,2488474,"TERMINAL",0,0,"3",,terminal_output +1832,2489569,"TERMINAL",0,0,"4",,terminal_output +1833,2490553,"TERMINAL",0,0,"5",,terminal_output +1834,2491564,"TERMINAL",0,0,"6",,terminal_output +1835,2492640,"TERMINAL",0,0,"8",,terminal_output +1836,2493662,"TERMINAL",0,0,"9",,terminal_output +1837,2494688,"TERMINAL",0,0,"20",,terminal_output +1838,2495711,"TERMINAL",0,0,"1",,terminal_output +1839,2496740,"TERMINAL",0,0,"2",,terminal_output +1840,2497764,"TERMINAL",0,0,"3",,terminal_output +1841,2498876,"TERMINAL",0,0,"4",,terminal_output +1842,2499915,"TERMINAL",0,0,"5",,terminal_output +1843,2500849,"TERMINAL",0,0,"6",,terminal_output +1844,2501967,"TERMINAL",0,0,"7",,terminal_output +1845,2502913,"TERMINAL",0,0,"8",,terminal_output +1846,2503951,"TERMINAL",0,0,"9",,terminal_output +1847,2504969,"TERMINAL",0,0,"30",,terminal_output +1848,2506001,"TERMINAL",0,0,"1",,terminal_output +1849,2507031,"TERMINAL",0,0,"2",,terminal_output +1850,2508069,"TERMINAL",0,0,"3",,terminal_output +1851,2509100,"TERMINAL",0,0,"4",,terminal_output +1852,2510153,"TERMINAL",0,0,"5",,terminal_output +1853,2511173,"TERMINAL",0,0,"6",,terminal_output +1854,2512199,"TERMINAL",0,0,"7",,terminal_output +1855,2513222,"TERMINAL",0,0,"8",,terminal_output +1856,2514242,"TERMINAL",0,0,"9",,terminal_output +1857,2515371,"TERMINAL",0,0,"40",,terminal_output +1858,2516317,"TERMINAL",0,0,"1",,terminal_output +1859,2517356,"TERMINAL",0,0,"2",,terminal_output +1860,2518382,"TERMINAL",0,0,"3",,terminal_output +1861,2519468,"TERMINAL",0,0,"4",,terminal_output +1862,2520456,"TERMINAL",0,0,"5",,terminal_output +1863,2521479,"TERMINAL",0,0,"6",,terminal_output +1864,2522510,"TERMINAL",0,0,"7",,terminal_output +1865,2523553,"TERMINAL",0,0,"8",,terminal_output +1866,2524578,"TERMINAL",0,0,"9",,terminal_output +1867,2525719,"TERMINAL",0,0,"51",,terminal_output +1868,2526737,"TERMINAL",0,0,"2",,terminal_output +1869,2527676,"TERMINAL",0,0,"3",,terminal_output +1870,2528785,"TERMINAL",0,0,"4",,terminal_output +1871,2529816,"TERMINAL",0,0,"5",,terminal_output +1872,2530787,"TERMINAL",0,0,"6",,terminal_output +1873,2531859,"TERMINAL",0,0,"7",,terminal_output +1874,2532809,"TERMINAL",0,0,"8",,terminal_output +1875,2533846,"TERMINAL",0,0,"9",,terminal_output +1876,2534874,"TERMINAL",0,0,"8:00",,terminal_output +1877,2535906,"TERMINAL",0,0,"1",,terminal_output +1878,2536473,"utils/dataloader.py",0,0,"",python,tab +1879,2536913,"utils/dataloader.py",707,0,"",python,selection_mouse +1880,2536945,"utils/dataloader.py",706,0,"",python,selection_command +1881,2536960,"TERMINAL",0,0,"2",,terminal_output +1882,2538005,"TERMINAL",0,0,"3",,terminal_output +1883,2538073,"utils/dataloader.py",0,0,"",python,selection_command +1884,2538593,"utils/dataloader.py",0,16,"import functools",python,selection_command +1885,2539000,"TERMINAL",0,0,"4",,terminal_output +1886,2539169,"utils/dataloader.py",0,4378,"import functools\nimport jax\n\nimport tensorflow as tf\n\n# reserve GPU memory for JAX only if tensorflow is built with GPU support\ntf.config.experimental.set_visible_devices([], ""GPU"")\n\n\n# --- TensorFlow function for processing: slicing, normalization ---\ndef _tf_process_episode(episode_tensor, seq_len, image_h, image_w, image_c):\n """"""\n Processes a raw episode tensor in TensorFlow.\n Takes a full episode, extracts a random sequence, and normalizes it.\n Args:\n episode_tensor: A TensorFlow tensor representing a full video episode.\n Expected shape: (dynamic_length, image_h, image_w, image_c)\n Expected dtype: e.g., tf.uint8 (raw pixel values)\n seq_len: The desired length of the sub-sequence to extract.\n image_h: The height of each frame.\n image_w: The width of each frame.\n image_c: The number of channels in each frame.\n Returns:\n A TensorFlow tensor representing the processed video sequence.\n Shape: (seq_len, image_h, image_w, image_c)\n Dtype: tf.float32 (normalized pixel values)\n """"""\n current_episode_len = tf.shape(episode_tensor)[0]\n\n max_start_idx = current_episode_len - seq_len\n\n start_idx = tf.random.uniform(\n shape=(), minval=0, maxval=max_start_idx + 1, dtype=tf.int32\n )\n\n seq = episode_tensor[start_idx : start_idx + seq_len]\n\n seq = tf.cast(seq, tf.float32) / 255.0\n\n # Ensure the final shape is statically known for batching.\n # tf.reshape is robust, but tf.ensure_shape or set_shape can also be used if confident.\n processed_sequence = tf.reshape(seq, [seq_len, image_h, image_w, image_c])\n\n return processed_sequence\n\n\ndef _parse_tfrecord_fn(example_proto, image_h, image_w, image_c):\n feature_description = {\n ""height"": tf.io.FixedLenFeature([], tf.int64),\n ""width"": tf.io.FixedLenFeature([], tf.int64),\n ""channels"": tf.io.FixedLenFeature([], tf.int64),\n ""sequence_length"": tf.io.FixedLenFeature([], tf.int64),\n ""raw_video"": tf.io.FixedLenFeature([], tf.string),\n }\n example = tf.io.parse_single_example(example_proto, feature_description)\n\n video_shape = (example[""sequence_length""], image_h, image_w, image_c)\n\n episode_tensor = tf.io.decode_raw(example[""raw_video""], out_type=tf.uint8)\n episode_tensor = tf.reshape(episode_tensor, video_shape)\n\n episode_tensor = tf.ensure_shape(episode_tensor, [None, image_h, image_w, image_c])\n return episode_tensor\n\n\ndef get_dataloader(\n tfrecord_paths: list[str], # List of TFRecord file paths\n seq_len: int,\n global_batch_size: int,\n image_h: int,\n image_w: int,\n image_c: int,\n shuffle_buffer_size: int = 10,\n num_parallel_calls: int = tf.data.AUTOTUNE,\n seed: int = 42,\n):\n """"""\n Creates a tf.data.Dataset pipeline from TFRecord files.\n """"""\n if not tfrecord_paths:\n raise ValueError(""tfrecord_paths list cannot be empty."")\n\n process_id = jax.process_index()\n num_processes = jax.process_count()\n\n assert (\n global_batch_size % num_processes == 0\n ), ""Global batch size {global_batch_size} \\n must be divisible by the number of JAX processes {num_processes} for proper sharding.""\n per_process_batch_size = global_batch_size // num_processes\n\n dataset = tf.data.TFRecordDataset(\n tfrecord_paths, num_parallel_reads=tf.data.AUTOTUNE\n )\n\n dataset = dataset.shard(num_shards=num_processes, index=process_id)\n\n # (f.srambical) NOTE: For TFRecords, it's often good to have a large shuffle buffer.\n if shuffle_buffer_size > 0:\n dataset = dataset.shuffle(\n buffer_size=shuffle_buffer_size, seed=seed, reshuffle_each_iteration=True\n )\n parse_fn = functools.partial(\n _parse_tfrecord_fn, image_h=image_h, image_w=image_w, image_c=image_c\n )\n dataset = dataset.map(parse_fn, num_parallel_calls=num_parallel_calls)\n\n tf_process_fn = functools.partial(\n _tf_process_episode,\n seq_len=seq_len,\n image_h=image_h,\n image_w=image_w,\n image_c=image_c,\n )\n dataset = dataset.map(tf_process_fn, num_parallel_calls=num_parallel_calls)\n\n dataset = dataset.repeat(None)\n dataset = dataset.batch(per_process_batch_size, drop_remainder=True)\n dataset = dataset.prefetch(tf.data.AUTOTUNE)\n\n return dataset.as_numpy_iterator()\n",python,selection_command +1887,2540033,"TERMINAL",0,0,"5",,terminal_output +1888,2540625,"utils/dataloader.py",0,4378,"",python,content +1889,2541083,"utils/dataloader.py",0,0,"import functools\nimport jax\n\nimport tensorflow as tf\n\n# reserve GPU memory for JAX only if tensorflow is built with GPU support\ntf.config.experimental.set_visible_devices([], ""GPU"")\n\n\n# --- TensorFlow function for processing: slicing, normalization ---\ndef _tf_process_episode(episode_tensor, seq_len, image_h, image_w, image_c, seed):\n """"""\n Processes a raw episode tensor in TensorFlow.\n Takes a full episode, extracts a random sequence, and normalizes it.\n Args:\n episode_tensor: A TensorFlow tensor representing a full video episode.\n Expected shape: (dynamic_length, image_h, image_w, image_c)\n Expected dtype: e.g., tf.uint8 (raw pixel values)\n seq_len: The desired length of the sub-sequence to extract.\n image_h: The height of each frame.\n image_w: The width of each frame.\n image_c: The number of channels in each frame.\n seed: The seed for the random number generator.\n Returns:\n A TensorFlow tensor representing the processed video sequence.\n Shape: (seq_len, image_h, image_w, image_c)\n Dtype: tf.float32 (normalized pixel values)\n """"""\n current_episode_len = tf.shape(episode_tensor)[0]\n max_start_idx = current_episode_len - seq_len\n\n start_idx = tf.random.uniform(\n shape=(), minval=0, maxval=max_start_idx + 1, dtype=tf.int32, seed=seed\n )\n\n seq = episode_tensor[start_idx : start_idx + seq_len]\n\n seq = tf.cast(seq, tf.float32) / 255.0\n\n # Ensure the final shape is statically known for batching.\n # tf.reshape is robust, but tf.ensure_shape or set_shape can also be used if confident.\n processed_sequence = tf.reshape(seq, [seq_len, image_h, image_w, image_c])\n\n return processed_sequence\n\n\ndef _parse_tfrecord_fn(example_proto, image_h, image_w, image_c):\n feature_description = {\n ""height"": tf.io.FixedLenFeature([], tf.int64),\n ""width"": tf.io.FixedLenFeature([], tf.int64),\n ""channels"": tf.io.FixedLenFeature([], tf.int64),\n ""sequence_length"": tf.io.FixedLenFeature([], tf.int64),\n ""raw_video"": tf.io.FixedLenFeature([], tf.string),\n }\n example = tf.io.parse_single_example(example_proto, feature_description)\n\n video_shape = (example[""sequence_length""], image_h, image_w, image_c)\n\n episode_tensor = tf.io.decode_raw(example[""raw_video""], out_type=tf.uint8)\n episode_tensor = tf.reshape(episode_tensor, video_shape)\n\n episode_tensor = tf.ensure_shape(episode_tensor, [None, image_h, image_w, image_c])\n return episode_tensor\n\n\ndef _create_processed_dataset_from_file(file_path, image_h, image_w, image_c, seq_len, num_parallel_calls, seed):\n """"""Creates a fully processed dataset from a single TFRecord file.""""""\n dataset = tf.data.TFRecordDataset([file_path])\n \n parse_fn = functools.partial(\n _parse_tfrecord_fn, image_h=image_h, image_w=image_w, image_c=image_c\n )\n dataset = dataset.map(parse_fn, num_parallel_calls=num_parallel_calls)\n\n # Filter out episodes that are too short\n def filter_short_episodes(episode_tensor):\n return tf.shape(episode_tensor)[0] >= seq_len\n \n dataset = dataset.filter(filter_short_episodes)\n\n tf_process_fn = functools.partial(\n _tf_process_episode,\n seq_len=seq_len,\n image_h=image_h,\n image_w=image_w,\n image_c=image_c,\n seed=seed,\n )\n dataset = dataset.map(tf_process_fn, num_parallel_calls=num_parallel_calls)\n \n return dataset\n\n\ndef get_dataloader(\n tfrecord_paths: list[str],\n seq_len: int,\n global_batch_size: int,\n image_h: int,\n image_w: int,\n image_c: int,\n shuffle_buffer_size: int = 10,\n num_parallel_calls: int = tf.data.AUTOTUNE,\n seed: int = 42,\n cycle_length: int = 4,\n block_length: int = 1,\n):\n """"""\n Creates a tf.data.Dataset pipeline from TFRecord files.\n """"""\n if not tfrecord_paths:\n raise ValueError(""tfrecord_paths list cannot be empty."")\n\n process_id = jax.process_index()\n num_processes = jax.process_count()\n\n assert (\n global_batch_size % num_processes == 0\n ), f""Global batch size {global_batch_size} \\n must be divisible by the number of JAX processes {num_processes} for proper sharding.""\n per_process_batch_size = global_batch_size // num_processes\n\n def dataset_fn(file_path):\n return _create_processed_dataset_from_file(\n file_path, image_h, image_w, image_c, seq_len, num_parallel_calls, seed\n )\n \n dataset = tf.data.Dataset.from_tensor_slices(tfrecord_paths)\n dataset = dataset.shard(num_shards=num_processes, index=process_id)\n \n dataset = dataset.interleave(\n dataset_fn,\n cycle_length=cycle_length,\n block_length=block_length,\n num_parallel_calls=num_parallel_calls,\n deterministic=False\n )\n \n if shuffle_buffer_size > 0:\n dataset = dataset.shuffle(\n buffer_size=shuffle_buffer_size, seed=seed, reshuffle_each_iteration=True\n )\n\n dataset = dataset.repeat(None)\n dataset = dataset.batch(per_process_batch_size, drop_remainder=True)\n dataset = dataset.prefetch(tf.data.AUTOTUNE)\n\n return dataset.as_numpy_iterator()",python,content +1890,2541127,"TERMINAL",0,0,"6",,terminal_output +1891,2542159,"TERMINAL",0,0,"7",,terminal_output +1892,2543122,"TERMINAL",0,0,"8",,terminal_output +1893,2544159,"TERMINAL",0,0,"9",,terminal_output +1894,2545187,"TERMINAL",0,0,"10",,terminal_output +1895,2546098,"utils/dataloader.py",3778,0,"",python,selection_mouse +1896,2546216,"TERMINAL",0,0,"1",,terminal_output +1897,2546273,"utils/dataloader.py",3768,12,"cycle_length",python,selection_mouse +1898,2546794,"utils/dataloader.py",3788,0,"",python,selection_mouse +1899,2546930,"utils/dataloader.py",3788,1,"4",python,selection_mouse +1900,2547079,"utils/dataloader.py",3787,2," 4",python,selection_mouse +1901,2547121,"utils/dataloader.py",3785,4," = 4",python,selection_mouse +1902,2547145,"utils/dataloader.py",3782,7,"int = 4",python,selection_mouse +1903,2547243,"TERMINAL",0,0,"2",,terminal_output +1904,2547568,"utils/dataloader.py",3784,0,"",python,selection_mouse +1905,2547649,"utils/dataloader.py",3782,3,"int",python,selection_mouse +1906,2547905,"utils/dataloader.py",3782,4,"int ",python,selection_mouse +1907,2547906,"utils/dataloader.py",3782,5,"int =",python,selection_mouse +1908,2547906,"utils/dataloader.py",3782,6,"int = ",python,selection_mouse +1909,2548348,"TERMINAL",0,0,"3",,terminal_output +1910,2548907,"utils/dataloader.py",3682,0,"",python,selection_mouse +1911,2549296,"TERMINAL",0,0,"4",,terminal_output +1912,2550340,"TERMINAL",0,0,"5",,terminal_output +1913,2551185,"TERMINAL",0,0,"srun",,terminal_focus +1914,2551369,"TERMINAL",0,0,"6",,terminal_output +1915,2552091,"TERMINAL",0,0,"sh scripts_horeka/overfit_sample_tiny/tester.sh ",,terminal_output +1916,2552442,"TERMINAL",0,0,"7",,terminal_output +1917,2552705,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +1918,2552827,"TERMINAL",0,0,"SLURM_STEP_NUM_TASKS=1\r\nSLURM_JOB_USER=tum_cte0515\r\nSLURM_TASKS_PER_NODE=1\r\nSLURM_JOB_UID=999226\r\nSLURM_TASK_PID=3337997\r\nSLURM_JOB_GPUS=1\r\nSLURM_LOCALID=0\r\nSLURM_SUBMIT_DIR=/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar\r\nSLURMD_NODENAME=hkn0706\r\nSLURM_JOB_START_TIME=1751388043\r\nSLURM_STEP_NODELIST=hkn0706\r\nSLURM_CLUSTER_NAME=hk\r\nSLURM_JOB_END_TIME=1751391643\r\nSLURM_PMI2_SRUN_PORT=43539\r\nSLURM_CPUS_ON_NODE=6\r\nSLURM_JOB_CPUS_PER_NODE=6\r\nSLURM_GPUS_ON_NODE=1\r\nSLURM_GTIDS=0\r\nSLURM_JOB_PARTITION=accelerated\r\nSLURM_TRES_PER_TASK=cpu=5\r\nSLURM_OOM_KILL_STEP=0\r\nSLURM_JOB_NUM_NODES=1\r\nSLURM_STEPID=4294967290\r\nSLURM_JOBID=3309821\r\nSLURM_PTY_PORT=45555\r\nSLURM_JOB_QOS=normal\r\nSLURM_LAUNCH_NODE_IPADDR=10.0.7.201\r\nSLURM_PTY_WIN_ROW=36\r\nSLURM_PMI2_PROC_MAPPING=(vector,(0,1,1))\r\nSLURMD_DEBUG=2\r\nSLURM_PROCID=0\r\nSLURM_CPUS_PER_TASK=5\r\nSLURM_NTASKS=1\r\nSLURM_TOPOLOGY_ADDR=hkibb.hkibbi1.hkibbi1e8.hkn0706\r\nSLURM_TOPOLOGY_ADDR_PATTERN=switch.switch.switch.node\r\nSLURM_SRUN_COMM_HOST=10.0.7.201\r\nSLURM_SCRIPT_CONTEXT=prolog_task\r\nSLURM_MEM_PER_NODE=51200\r\nSLURM_PTY_WIN_COL=175\r\nSLURM_NODELIST=hkn0706\r\nSLURM_SRUN_COMM_PORT=36793\r\nSLURM_STEP_ID=4294967290\r\nSLURM_JOB_ACCOUNT=hk-project-p0023960\r\nSLURM_PRIO_PROCESS=0\r\nSLURM_NPROCS=1\r\nSLURM_NNODES=1\r\nSLURM_SUBMIT_HOST=hkn1993.localdomain\r\nSLURM_JOB_ID=3309821\r\nSLURM_NODEID=0\r\nSLURM_STEP_NUM_NODES=1\r\nSLURM_STEP_TASKS_PER_NODE=1\r\nSLURM_MPI_TYPE=pmi2\r\nSLURM_PMI2_STEP_NODES=hkn0706\r\nSLURM_CONF=/etc/slurm/slurm.conf\r\nSLURM_JOB_NAME=interactive\r\nSLURM_NTASKS_PER_NODE=1\r\nSLURM_STEP_LAUNCHER_PORT=36793\r\nSLURM_JOB_GID=502226\r\nSLURM_JOB_NODELIST=hkn0706\r\n",,terminal_output +1919,2553415,"TERMINAL",0,0,"8",,terminal_output +1920,2554447,"TERMINAL",0,0,"9",,terminal_output +1921,2555012,"TERMINAL",0,0,"2025-07-01 19:08:20.423596: E external/local_xla/xla/stream_executor/cuda/cuda_fft.cc:467] Unable to register cuFFT factory: Attempting to register factory for plugin cuFFT when one has already been registered\r\nWARNING: All log messages before absl::InitializeLog() is called are written to STDERR\r\nE0000 00:00:1751389700.436310 3353438 cuda_dnn.cc:8579] Unable to register cuDNN factory: Attempting to register factory for plugin cuDNN when one has already been registered\r\nE0000 00:00:1751389700.440565 3353438 cuda_blas.cc:1407] Unable to register cuBLAS factory: Attempting to register factory for plugin cuBLAS when one has already been registered\r\nW0000 00:00:1751389700.453329 3353438 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\nW0000 00:00:1751389700.453347 3353438 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\nW0000 00:00:1751389700.453349 3353438 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\nW0000 00:00:1751389700.453351 3353438 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\n",,terminal_output +1922,2555475,"TERMINAL",0,0,"20",,terminal_output +1923,2556539,"TERMINAL",0,0,"1",,terminal_output +1924,2557561,"TERMINAL",0,0,"W0000 00:00:1751389702.940572 3353438 gpu_device.cc:2341] Cannot dlopen some GPU libraries. Please make sure the missing libraries mentioned above are installed properly if you would like to use GPU. Follow the guide at https://www.tensorflow.org/install/gpu for how to download and setup the required libraries for your platform.\r\nSkipping registering GPU devices...\r\n",,terminal_output +1925,2557574,"TERMINAL",0,0,"2",,terminal_output +1926,2557816,"TERMINAL",0,0,"Running on 1 devices.\r\n",,terminal_output +1927,2558586,"TERMINAL",0,0,"3",,terminal_output +1928,2558600,"TERMINAL",0,0,"wandb: Currently logged in as: mihir-mahajan2002 (instant-uv) to https://api.wandb.ai. Use `wandb login --relogin` to force relogin\r\n",,terminal_output +1929,2559204,"TERMINAL",0,0,"wandb: Tracking run with wandb version 0.19.11\r\nwandb: Run data is saved locally in /hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/wandb/run-20250701_190824-1s0vgz12\r\nwandb: Run `wandb offline` to turn off syncing.\r\nwandb: Syncing run tokenizer-tiny-overfit-0000\r\nwandb: ⭐️ View project at https://wandb.ai/instant-uv/jafar\r\nwandb: 🚀 View run at https://wandb.ai/instant-uv/jafar/runs/1s0vgz12\r\n",,terminal_output +1930,2559608,"TERMINAL",0,0,"4",,terminal_output +1931,2560584,"TERMINAL",0,0,"2025-07-01 19:08:26.010543: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +1932,2560602,"TERMINAL",0,0,"6",,terminal_output +1933,2561657,"TERMINAL",0,0,"7",,terminal_output +1934,2562683,"TERMINAL",0,0,"8",,terminal_output +1935,2563700,"TERMINAL",0,0,"9",,terminal_output +1936,2564834,"TERMINAL",0,0,"30",,terminal_output +1937,2565856,"TERMINAL",0,0,"1",,terminal_output +1938,2566882,"TERMINAL",0,0,"2",,terminal_output +1939,2567823,"TERMINAL",0,0,"3",,terminal_output +1940,2568863,"TERMINAL",0,0,"4",,terminal_output +1941,2569925,"TERMINAL",0,0,"5",,terminal_output +1942,2570977,"TERMINAL",0,0,"6",,terminal_output +1943,2571934,"TERMINAL",0,0,"7",,terminal_output +1944,2572955,"TERMINAL",0,0,"8",,terminal_output +1945,2573992,"TERMINAL",0,0,"9",,terminal_output +1946,2574142,"TERMINAL",0,0,"2025-07-01 19:08:39.580717: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +1947,2575023,"TERMINAL",0,0,"40",,terminal_output +1948,2576057,"TERMINAL",0,0,"1",,terminal_output +1949,2576428,"TERMINAL",0,0,"2025-07-01 19:08:41.866695: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +1950,2577087,"TERMINAL",0,0,"2",,terminal_output +1951,2578143,"TERMINAL",0,0,"3",,terminal_output +1952,2579171,"TERMINAL",0,0,"4",,terminal_output +1953,2580295,"TERMINAL",0,0,"5",,terminal_output +1954,2580432,"TERMINAL",0,0,"Starting training from step 0...\r\n",,terminal_output +1955,2581216,"TERMINAL",0,0,"6",,terminal_output +1956,2582342,"TERMINAL",0,0,"7",,terminal_output +1957,2583368,"TERMINAL",0,0,"8",,terminal_output +1958,2583682,"TERMINAL",0,0,"Step 1\r\nStep 2\r\n",,terminal_output +1959,2583734,"TERMINAL",0,0,"Step 3\r\n",,terminal_output +1960,2583860,"TERMINAL",0,0,"Step 4\r\nStep 5\r\n",,terminal_output +1961,2583934,"TERMINAL",0,0,"Step 6\r\n",,terminal_output +1962,2584049,"TERMINAL",0,0,"Step 7\r\n",,terminal_output +1963,2584391,"TERMINAL",0,0,"Step 8\r\n",,terminal_output +1964,2584501,"TERMINAL",0,0,"9",,terminal_output +1965,2584612,"TERMINAL",0,0,"Step 9\r\n",,terminal_output +1966,2584725,"TERMINAL",0,0,"Step 10\r\n",,terminal_output +1967,2584835,"TERMINAL",0,0,"Step 11\r\n",,terminal_output +1968,2584922,"TERMINAL",0,0,"Step 12\r\nStep 13\r\nStep 14\r\nStep 15\r\nStep 16\r\nStep 17\r\n",,terminal_output +1969,2585058,"TERMINAL",0,0,"Step 18\r\nStep 19\r\nStep 20\r\nStep 21\r\nStep 22\r\nStep 23\r\nStep 24\r\nStep 25\r\nStep 26\r\nStep 27\r\nStep 28\r\n",,terminal_output +1970,2585184,"TERMINAL",0,0,"Step 29\r\nStep 30\r\nStep 31\r\nStep 32\r\nStep 33\r\nStep 34\r\n",,terminal_output +1971,2585262,"TERMINAL",0,0,"Step 35\r\nStep 36\r\nStep 37\r\nStep 38\r\nStep 39\r\nStep 40\r\nStep 41\r\nStep 42\r\nStep 43\r\nStep 44\r\n",,terminal_output +1972,2585397,"TERMINAL",0,0,"Step 45\r\nStep 46\r\nStep 47\r\nStep 48\r\nStep 49\r\nStep 50\r\nStep 51\r\nStep 52\r\nStep 53\r\nStep 54\r\nStep 55\r\n",,terminal_output +1973,2585479,"TERMINAL",0,0,"Step 56\r\nStep 57\r\nStep 58\r\nStep 59\r\nStep 60\r\nStep 61\r\n",,terminal_output +1974,2585543,"TERMINAL",0,0,"Step 62\r\nStep 63\r\n",,terminal_output +1975,2585676,"TERMINAL",0,0,"Step 64\r\nStep 65\r\nStep 66\r\nStep 67\r\nStep 68\r\n",,terminal_output +1976,2585737,"TERMINAL",0,0,"Step 69\r\nStep 70\r\nStep 71\r\n",,terminal_output +1977,2585873,"TERMINAL",0,0,"Step 72\r\nStep 73\r\nStep 74\r\nStep 75\r\nStep 76\r\nStep 77\r\n",,terminal_output +1978,2585938,"TERMINAL",0,0,"Step 78\r\nStep 79\r\nStep 80\r\nStep 81\r\n",,terminal_output +1979,2586114,"TERMINAL",0,0,"Step 82\r\nStep 83\r\nStep 84\r\nStep 85\r\nStep 86\r\nStep 87\r\nStep 88\r\nStep 89\r\nStep 90\r\nStep 91\r\nStep 92\r\nStep 93\r\n",,terminal_output +1980,2586182,"TERMINAL",0,0,"50",,terminal_output +1981,2586182,"TERMINAL",0,0,"Step 94\r\nStep 95\r\n",,terminal_output +1982,2586256,"TERMINAL",0,0,"Step 96\r\nStep 97\r\nStep 98\r\nStep 99\r\nStep 100\r\nStep 101\r\n",,terminal_output +1983,2586396,"TERMINAL",0,0,"Step 102\r\nStep 103\r\nStep 104\r\nStep 105\r\nStep 106\r\nStep 107\r\n",,terminal_output +1984,2586532,"TERMINAL",0,0,"Step 108\r\nStep 109\r\nStep 110\r\nStep 111\r\nStep 112\r\nStep 113\r\nStep 114\r\nStep 115\r\nStep 116\r\n",,terminal_output +1985,2586598,"TERMINAL",0,0,"Step 117\r\nStep 118\r\nStep 119\r\n",,terminal_output +1986,2586690,"TERMINAL",0,0,"Step 120\r\nStep 121\r\nStep 122\r\nStep 123\r\nStep 124\r\n",,terminal_output +1987,2586750,"TERMINAL",0,0,"Step 125\r\nStep 126\r\nStep 127\r\nStep 128\r\nStep 129\r\n",,terminal_output +1988,2586879,"TERMINAL",0,0,"Step 130\r\nStep 131\r\nStep 132\r\nStep 133\r\nStep 134\r\nStep 135\r\nStep 136\r\nStep 137\r\nStep 138\r\nStep 139\r\nStep 140\r\nStep 141\r\n",,terminal_output +1989,2587207,"TERMINAL",0,0,"Step 142\r\nStep 143\r\nStep 144\r\nStep 145\r\nStep 146\r\nStep 147\r\nStep 148\r\nStep 149\r\nStep 150\r\nStep 151\r\nStep 152\r\nStep 153\r\nStep 154\r\nStep 155\r\nStep 156\r\nStep 157\r\nStep 158\r\nStep 159\r\nStep 160\r\nStep 161\r\nStep 162\r\nStep 163\r\nStep 164\r\nStep 165\r\nStep 166\r\nStep 167\r\nStep 168\r\nStep 169\r\n",,terminal_output +1990,2587292,"TERMINAL",0,0,"Step 170\r\nStep 171\r\n",,terminal_output +1991,2587353,"TERMINAL",0,0,"Step 172\r\nStep 173\r\n",,terminal_output +1992,2587474,"TERMINAL",0,0,"Step 174\r\nStep 175\r\nStep 176\r\nStep 177\r\nStep 178\r\n",,terminal_output +1993,2587619,"TERMINAL",0,0,"2\r165",,terminal_output +1994,2587619,"TERMINAL",0,0,"Step 179\r\nStep 180\r\nStep 181\r\nStep 182\r\nStep 183\r\nStep 184\r\nStep 185\r\nStep 186\r\nStep 187\r\nStep 188\r\nStep 189\r\n",,terminal_output +1995,2587684,"TERMINAL",0,0,"Step 190\r\nStep 191\r\n",,terminal_output +1996,2587755,"TERMINAL",0,0,"Step 192\r\n",,terminal_output +1997,2587812,"TERMINAL",0,0,"Step 193\r\nStep 194\r\nStep 195\r\n",,terminal_output +1998,2587946,"TERMINAL",0,0,"Step 196\r\nStep 197\r\nStep 198\r\nStep 199\r\nStep 200\r\nStep 201\r\n",,terminal_output +1999,2588060,"TERMINAL",0,0,"Step 202\r\nStep 203\r\nStep 204\r\nStep 205\r\nStep 206\r\n",,terminal_output +2000,2588156,"TERMINAL",0,0,"Step 207\r\nStep 208\r\nStep 209\r\n",,terminal_output +2001,2588293,"TERMINAL",0,0,"Step 210\r\nStep 211\r\nStep 212\r\nStep 213\r\nStep 214\r\nStep 215\r\nStep 216\r\nStep 217\r\nStep 218\r\nStep 219\r\nStep 220\r\n",,terminal_output +2002,2588354,"TERMINAL",0,0,"Step 221\r\nStep 222\r\nStep 223\r\n",,terminal_output +2003,2588469,"TERMINAL",0,0,"Step 224\r\nStep 225\r\nStep 226\r\nStep 227\r\nStep 228\r\n",,terminal_output +2004,2588526,"TERMINAL",0,0,"Step 229\r\nStep 230\r\nStep 231\r\n",,terminal_output +2005,2588652,"TERMINAL",0,0,"Step 232\r\nStep 233\r\nStep 234\r\nStep 235\r\nStep 236\r\n",,terminal_output +2006,2588768,"TERMINAL",0,0,"Step 237\r\nStep 238\r\nStep 239\r\nStep 240\r\nStep 241\r\n",,terminal_output +2007,2588885,"TERMINAL",0,0,"Step 242\r\nStep 243\r\nStep 244\r\nStep 245\r\nStep 246\r\nStep 247\r\nStep 248\r\nStep 249\r\nStep 250\r\nStep 251\r\n",,terminal_output +2008,2588962,"TERMINAL",0,0,"Step 252\r\nStep 253\r\nStep 254\r\nStep 255\r\n",,terminal_output +2009,2589069,"TERMINAL",0,0,"Step 256\r\n",,terminal_output +2010,2589199,"TERMINAL",0,0,"Step 257\r\nStep 258\r\nStep 259\r\nStep 260\r\nStep 261\r\nStep 262\r\nStep 263\r\nStep 264\r\nStep 265\r\nStep 266\r\nStep 267\r\nStep 268\r\n",,terminal_output +2011,2589263,"TERMINAL",0,0,"4\r23",,terminal_output +2012,2589386,"TERMINAL",0,0,"Step 269\r\nStep 270\r\nStep 271\r\nStep 272\r\nStep 273\r\nStep 274\r\nStep 275\r\n",,terminal_output +2013,2589492,"TERMINAL",0,0,"Step 276\r\nStep 277\r\nStep 278\r\nStep 279\r\nStep 280\r\nStep 281\r\n",,terminal_output +2014,2589588,"TERMINAL",0,0,"Step 282\r\nStep 283\r\n",,terminal_output +2015,2589656,"TERMINAL",0,0,"Step 284\r\nStep 285\r\nStep 286\r\nStep 287\r\n",,terminal_output +2016,2589715,"TERMINAL",0,0,"Step 288\r\n",,terminal_output +2017,2589787,"TERMINAL",0,0,"Step 289\r\nStep 290\r\nStep 291\r\nStep 292\r\n",,terminal_output +2018,2589848,"TERMINAL",0,0,"Step 293\r\n",,terminal_output +2019,2589971,"TERMINAL",0,0,"Step 294\r\nStep 295\r\nStep 296\r\nStep 297\r\nStep 298\r\nStep 299\r\nStep 300\r\nStep 301\r\n",,terminal_output +2020,2590083,"TERMINAL",0,0,"Step 302\r\nStep 303\r\nStep 304\r\nStep 305\r\n",,terminal_output +2021,2590219,"TERMINAL",0,0,"Step 306\r\nStep 307\r\nStep 308\r\nStep 309\r\nStep 310\r\nStep 311\r\nStep 312\r\n",,terminal_output +2022,2590294,"TERMINAL",0,0,"Step 313\r\nStep 314\r\nStep 315\r\nStep 316\r\nStep 317\r\nStep 318\r\nStep 319\r\n",,terminal_output +2023,2590411,"TERMINAL",0,0,"Step 320\r\nStep 321\r\nStep 322\r\nStep 323\r\nStep 324\r\nStep 325\r\n",,terminal_output +2024,2590572,"TERMINAL",0,0,"Step 326\r\nStep 327\r\nStep 328\r\nStep 329\r\nStep 330\r\nStep 331\r\nStep 332\r\n",,terminal_output +2025,2590631,"TERMINAL",0,0,"Step 333\r\nStep 334\r\n",,terminal_output +2026,2590764,"TERMINAL",0,0,"Step 335\r\nStep 336\r\nStep 337\r\nStep 338\r\nStep 339\r\nStep 340\r\nStep 341\r\n",,terminal_output +2027,2590826,"TERMINAL",0,0,"Step 342\r\nStep 343\r\nStep 344\r\nStep 345\r\nStep 346\r\n",,terminal_output +2028,2590971,"TERMINAL",0,0,"Step 347\r\nStep 348\r\nStep 349\r\nStep 350\r\nStep 351\r\nStep 352\r\nStep 353\r\nStep 354\r\nStep 355\r\nStep 356\r\n",,terminal_output +2029,2591037,"TERMINAL",0,0,"Step 357\r\nStep 358\r\n",,terminal_output +2030,2591101,"TERMINAL",0,0,"Step 359\r\nStep 360\r\nStep 361\r\nStep 362\r\n",,terminal_output +2031,2591154,"TERMINAL",0,0,"Step 363\r\n",,terminal_output +2032,2591262,"TERMINAL",0,0,"Step 364\r\nStep 365\r\nStep 366\r\nStep 367\r\nStep 368\r\n",,terminal_output +2033,2591387,"TERMINAL",0,0,"Step 369\r\nStep 370\r\nStep 371\r\nStep 372\r\nStep 373\r\nStep 374\r\nStep 375\r\nStep 376\r\nStep 377\r\n",,terminal_output +2034,2591440,"TERMINAL",0,0,"Step 378\r\n",,terminal_output +2035,2591535,"TERMINAL",0,0,"5\r319",,terminal_output +2036,2591612,"TERMINAL",0,0,"Step 379\r\nStep 380\r\nStep 381\r\nStep 382\r\nStep 383\r\nStep 384\r\nStep 385\r\nStep 386\r\nStep 387\r\nStep 388\r\n",,terminal_output +2037,2591674,"TERMINAL",0,0,"Step 389\r\n",,terminal_output +2038,2591742,"TERMINAL",0,0,"Step 390\r\nStep 391\r\nStep 392\r\nStep 393\r\n",,terminal_output +2039,2591803,"TERMINAL",0,0,"Step 394\r\nStep 395\r\n",,terminal_output +2040,2591869,"TERMINAL",0,0,"Step 396\r\nStep 397\r\nStep 398\r\nStep 399\r\nStep 400\r\nStep 401\r\n",,terminal_output +2041,2591973,"TERMINAL",0,0,"Step 402\r\nStep 403\r\n",,terminal_output +2042,2592036,"TERMINAL",0,0,"Step 404\r\nStep 405\r\n",,terminal_output +2043,2592097,"TERMINAL",0,0,"Step 406\r\nStep 407\r\nStep 408\r\nStep 409\r\n",,terminal_output +2044,2592163,"TERMINAL",0,0,"Step 410\r\nStep 411\r\nStep 412\r\n",,terminal_output +2045,2592227,"TERMINAL",0,0,"Step 413\r\nStep 414\r\nStep 415\r\n",,terminal_output +2046,2592357,"TERMINAL",0,0,"Step 416\r\nStep 417\r\nStep 418\r\nStep 419\r\nStep 420\r\nStep 421\r\n",,terminal_output +2047,2592419,"TERMINAL",0,0,"Step 422\r\nStep 423\r\n",,terminal_output +2048,2592537,"TERMINAL",0,0,"Step 424\r\nStep 425\r\nStep 426\r\nStep 427\r\nStep 428\r\nStep 429\r\nStep 430\r\nStep 431\r\n",,terminal_output +2049,2592666,"TERMINAL",0,0,"Step 432\r\nStep 433\r\nStep 434\r\nStep 435\r\n",,terminal_output +2050,2592732,"TERMINAL",0,0,"Step 436\r\nStep 437\r\nStep 438\r\n",,terminal_output +2051,2592860,"TERMINAL",0,0,"Step 439\r\nStep 440\r\nStep 441\r\nStep 442\r\nStep 443\r\nStep 444\r\nStep 445\r\nStep 446\r\nStep 447\r\nStep 448\r\nStep 449\r\n",,terminal_output +2052,2592965,"TERMINAL",0,0,"Step 450\r\nStep 451\r\nStep 452\r\nStep 453\r\nStep 454\r\nStep 455\r\nStep 456\r\nStep 457\r\nStep 458\r\nStep 459\r\nStep 460\r\nStep 461\r\n",,terminal_output +2053,2593069,"TERMINAL",0,0,"Step 462\r\nStep 463\r\nStep 464\r\nStep 465\r\nStep 466\r\nStep 467\r\nStep 468\r\nStep 469\r\nStep 470\r\nStep 471\r\nStep 472\r\nStep 473\r\nStep 474\r\n",,terminal_output +2054,2593130,"TERMINAL",0,0,"Step 475\r\nStep 476\r\nStep 477\r\nStep 478\r\n",,terminal_output +2055,2593411,"TERMINAL",0,0,"Step 479\r\nStep 480\r\nStep 481\r\nStep 482\r\nStep 483\r\nStep 484\r\nStep 485\r\nStep 486\r\nStep 487\r\nStep 488\r\nStep 489\r\nStep 490\r\nStep 491\r\n",,terminal_output +2056,2593468,"TERMINAL",0,0,"Step 492\r\nStep 493\r\nStep 494\r\nStep 495\r\n",,terminal_output +2057,2593578,"TERMINAL",0,0,"Step 496\r\nStep 497\r\n",,terminal_output +2058,2593907,"TERMINAL",0,0,"Step 498\r\nStep 499\r\nStep 500\r\nStep 501\r\nStep 502\r\nStep 503\r\nStep 504\r\nStep 505\r\nStep 506\r\nStep 507\r\nStep 508\r\nStep 509\r\nStep 510\r\nStep 511\r\nStep 512\r\nStep 513\r\nStep 514\r\nStep 515\r\nStep 516\r\nStep 517\r\nStep 518\r\nStep 519\r\nStep 520\r\nStep 521\r\nStep 522\r\nStep 523\r\nStep 524\r\nStep 525\r\nStep 526\r\nStep 527\r\nStep 528\r\nStep 529\r\nStep 530\r\nStep 531\r\nStep 532\r\n",,terminal_output +2059,2594012,"TERMINAL",0,0,"Step 533\r\nStep 534\r\nStep 535\r\nStep 536\r\nStep 537\r\nStep 538\r\nStep 539\r\nStep 540\r\nStep 541\r\n",,terminal_output +2060,2594121,"TERMINAL",0,0,"Step 542\r\nStep 543\r\nStep 544\r\n",,terminal_output +2061,2594235,"TERMINAL",0,0,"Step 545\r\nStep 546\r\n",,terminal_output +2062,2594297,"TERMINAL",0,0,"7\r431",,terminal_output +2063,2594359,"TERMINAL",0,0,"Step 547\r\n",,terminal_output +2064,2594556,"TERMINAL",0,0,"Step 548\r\nStep 549\r\nStep 550\r\nStep 551\r\nStep 552\r\nStep 553\r\nStep 554\r\n",,terminal_output +2065,2594858,"TERMINAL",0,0,"Step 555\r\nStep 556\r\nStep 557\r\nStep 558\r\nStep 559\r\n",,terminal_output +2066,2594924,"TERMINAL",0,0,"Step 560\r\nStep 561\r\n",,terminal_output +2067,2595075,"TERMINAL",0,0,"Step 562\r\nStep 563\r\nStep 564\r\n",,terminal_output +2068,2595262,"TERMINAL",0,0,"Step 565\r\n",,terminal_output +2069,2595398,"TERMINAL",0,0,"Step 566\r\nStep 567\r\nStep 568\r\nStep 569\r\nStep 570\r\nStep 571\r\nStep 572\r\nStep 573\r\nStep 574\r\n",,terminal_output +2070,2595677,"TERMINAL",0,0,"Step 575\r\nStep 576\r\nStep 577\r\nStep 578\r\nStep 579\r\n",,terminal_output +2071,2595740,"TERMINAL",0,0,"Step 580\r\nStep 581\r\nStep 582\r\nStep 583\r\n",,terminal_output +2072,2595803,"TERMINAL",0,0,"Step 584\r\n",,terminal_output +2073,2595865,"TERMINAL",0,0,"Step 585\r\n",,terminal_output +2074,2595959,"TERMINAL",0,0,"Step 586\r\nStep 587\r\nStep 588\r\nStep 589\r\n",,terminal_output +2075,2596079,"TERMINAL",0,0,"Step 590\r\nStep 591\r\nStep 592\r\nStep 593\r\nStep 594\r\n",,terminal_output +2076,2596143,"TERMINAL",0,0,"Step 595\r\n",,terminal_output +2077,2596443,"train_tokenizer.py",0,0,"",python,tab +2078,2596533,"TERMINAL",0,0,"Step 596\r\nStep 597\r\nStep 598\r\nStep 599\r\nStep 600\r\nStep 601\r\nStep 602\r\n",,terminal_output +2079,2596700,"TERMINAL",0,0,"Step 603\r\nStep 604\r\nStep 605\r\nStep 606\r\nStep 607\r\nStep 608\r\nStep 609\r\nStep 610\r\nStep 611\r\nStep 612\r\nStep 613\r\nStep 614\r\nStep 615\r\nStep 616\r\nStep 617\r\nStep 618\r\nStep 619\r\n",,terminal_output +2080,2596931,"TERMINAL",0,0,"Step 620\r\nStep 621\r\nStep 622\r\nStep 623\r\nStep 624\r\nStep 625\r\nStep 626\r\nStep 627\r\nStep 628\r\nStep 629\r\nStep 630\r\nStep 631\r\nStep 632\r\nStep 633\r\nStep 634\r\nStep 635\r\nStep 636\r\nStep 637\r\n",,terminal_output +2081,2597078,"TERMINAL",0,0,"9:00\r566",,terminal_output +2082,2597166,"TERMINAL",0,0,"Step 638\r\nStep 639\r\nStep 640\r\nStep 641\r\nStep 642\r\nStep 643\r\nStep 644\r\nStep 645\r\nStep 646\r\nStep 647\r\nStep 648\r\n",,terminal_output +2083,2597273,"TERMINAL",0,0,"Step 649\r\nStep 650\r\n",,terminal_output +2084,2597376,"TERMINAL",0,0,"Step 651\r\nStep 652\r\nStep 653\r\nStep 654\r\n",,terminal_output +2085,2597505,"TERMINAL",0,0,"Step 655\r\nStep 656\r\nStep 657\r\nStep 658\r\nStep 659\r\nStep 660\r\nStep 661\r\nStep 662\r\n",,terminal_output +2086,2597557,"TERMINAL",0,0,"Step 663\r\n",,terminal_output +2087,2597622,"TERMINAL",0,0,"Step 664\r\nStep 665\r\n",,terminal_output +2088,2597686,"TERMINAL",0,0,"Step 666\r\nStep 667\r\nStep 668\r\nStep 669\r\n",,terminal_output +2089,2597749,"TERMINAL",0,0,"Step 670\r\nStep 671\r\nStep 672\r\nStep 673\r\n",,terminal_output +2090,2598003,"TERMINAL",0,0,"Step 674\r\nStep 675\r\nStep 676\r\nStep 677\r\nStep 678\r\nStep 679\r\nStep 680\r\nStep 681\r\n",,terminal_output +2091,2598078,"TERMINAL",0,0,"Step 682\r\nStep 683\r\nStep 684\r\n",,terminal_output +2092,2598176,"TERMINAL",0,0,"Step 685\r\nStep 686\r\nStep 687\r\nStep 688\r\nStep 689\r\nStep 690\r\nStep 691\r\n",,terminal_output +2093,2598240,"TERMINAL",0,0,"Step 692\r\nStep 693\r\nStep 694\r\n",,terminal_output +2094,2598319,"TERMINAL",0,0,"Step 695\r\nStep 696\r\nStep 697\r\nStep 698\r\n",,terminal_output +2095,2598375,"TERMINAL",0,0,"Step 699\r\n",,terminal_output +2096,2598441,"TERMINAL",0,0,"Step 700\r\nStep 701\r\nStep 702\r\n",,terminal_output +2097,2598629,"TERMINAL",0,0,"Step 703\r\nStep 704\r\n",,terminal_output +2098,2598743,"TERMINAL",0,0,"Step 705\r\nStep 706\r\nStep 707\r\nStep 708\r\nStep 709\r\nStep 710\r\nStep 711\r\nStep 712\r\nStep 713\r\nStep 714\r\nStep 715\r\n",,terminal_output +2099,2598878,"TERMINAL",0,0,"Step 716\r\nStep 717\r\nStep 718\r\nStep 719\r\nStep 720\r\nStep 721\r\nStep 722\r\nStep 723\r\n",,terminal_output +2100,2599030,"TERMINAL",0,0,"Step 724\r\nStep 725\r\nStep 726\r\nStep 727\r\nStep 728\r\nStep 729\r\nStep 730\r\nStep 731\r\nStep 732\r\n",,terminal_output +2101,2599123,"TERMINAL",0,0,"Step 733\r\nStep 734\r\nStep 735\r\nStep 736\r\nStep 737\r\nStep 738\r\n",,terminal_output +2102,2599189,"TERMINAL",0,0,"Step 739\r\nStep 740\r\nStep 741\r\nStep 742\r\nStep 743\r\n",,terminal_output +2103,2599302,"TERMINAL",0,0,"Step 744\r\nStep 745\r\nStep 746\r\nStep 747\r\n",,terminal_output +2104,2599361,"TERMINAL",0,0,"Step 748\r\nStep 749\r\nStep 750\r\nStep 751\r\n",,terminal_output +2105,2599522,"TERMINAL",0,0,"Step 752\r\nStep 753\r\nStep 754\r\nStep 755\r\nStep 756\r\nStep 757\r\nStep 758\r\n",,terminal_output +2106,2599586,"TERMINAL",0,0,"Step 759\r\nStep 760\r\n",,terminal_output +2107,2599711,"TERMINAL",0,0,"Step 761\r\nStep 762\r\nStep 763\r\nStep 764\r\nStep 765\r\nStep 766\r\nStep 767\r\nStep 768\r\nStep 769\r\nStep 770\r\n",,terminal_output +2108,2599821,"TERMINAL",0,0,"Step 771\r\nStep 772\r\n",,terminal_output +2109,2599883,"TERMINAL",0,0,"Step 773\r\nStep 774\r\n",,terminal_output +2110,2599883,"TERMINAL",0,0,"3\r685",,terminal_output +2111,2599999,"TERMINAL",0,0,"Step 775\r\nStep 776\r\nStep 777\r\nStep 778\r\nStep 779\r\nStep 780\r\n",,terminal_output +2112,2600106,"TERMINAL",0,0,"Step 781\r\nStep 782\r\nStep 783\r\nStep 784\r\n",,terminal_output +2113,2600168,"TERMINAL",0,0,"Step 785\r\n",,terminal_output +2114,2600230,"TERMINAL",0,0,"Step 786\r\nStep 787\r\n",,terminal_output +2115,2600292,"TERMINAL",0,0,"Step 788\r\nStep 789\r\nStep 790\r\n",,terminal_output +2116,2600425,"TERMINAL",0,0,"Step 791\r\nStep 792\r\nStep 793\r\nStep 794\r\nStep 795\r\nStep 796\r\nStep 797\r\nStep 798\r\nStep 799\r\nStep 800\r\n",,terminal_output +2117,2600572,"TERMINAL",0,0,"Step 801\r\nStep 802\r\nStep 803\r\n",,terminal_output +2118,2600637,"TERMINAL",0,0,"Step 804\r\nStep 805\r\nStep 806\r\n",,terminal_output +2119,2600746,"TERMINAL",0,0,"Step 807\r\nStep 808\r\nStep 809\r\nStep 810\r\nStep 811\r\nStep 812\r\nStep 813\r\nStep 814\r\nStep 815\r\n",,terminal_output +2120,2600949,"TERMINAL",0,0,"Step 816\r\nStep 817\r\nStep 818\r\nStep 819\r\nStep 820\r\nStep 821\r\nStep 822\r\nStep 823\r\nStep 824\r\n",,terminal_output +2121,2601016,"TERMINAL",0,0,"Step 825\r\nStep 826\r\nStep 827\r\n",,terminal_output +2122,2601141,"TERMINAL",0,0,"Step 828\r\nStep 829\r\nStep 830\r\nStep 831\r\nStep 832\r\nStep 833\r\n",,terminal_output +2123,2601256,"TERMINAL",0,0,"Step 834\r\nStep 835\r\nStep 836\r\n",,terminal_output +2124,2601315,"TERMINAL",0,0,"Step 837\r\nStep 838\r\nStep 839\r\nStep 840\r\nStep 841\r\nStep 842\r\nStep 843\r\n",,terminal_output +2125,2601437,"TERMINAL",0,0,"Step 844\r\nStep 845\r\nStep 846\r\nStep 847\r\nStep 848\r\n",,terminal_output +2126,2601497,"TERMINAL",0,0,"Step 849\r\nStep 850\r\nStep 851\r\n",,terminal_output +2127,2601622,"TERMINAL",0,0,"Step 852\r\nStep 853\r\nStep 854\r\nStep 855\r\nStep 856\r\nStep 857\r\nStep 858\r\n",,terminal_output +2128,2601871,"TERMINAL",0,0,"Step 859\r\nStep 860\r\nStep 861\r\nStep 862\r\nStep 863\r\nStep 864\r\nStep 865\r\nStep 866\r\nStep 867\r\nStep 868\r\nStep 869\r\n",,terminal_output +2129,2601955,"TERMINAL",0,0,"Step 870\r\nStep 871\r\nStep 872\r\n",,terminal_output +2130,2602079,"TERMINAL",0,0,"Step 873\r\nStep 874\r\nStep 875\r\nStep 876\r\n",,terminal_output +2131,2602152,"TERMINAL",0,0,"Step 877\r\nStep 878\r\nStep 879\r\nStep 880\r\n",,terminal_output +2132,2602282,"TERMINAL",0,0,"Step 881\r\nStep 882\r\nStep 883\r\nStep 884\r\n",,terminal_output +2133,2602339,"TERMINAL",0,0,"Step 885\r\nStep 886\r\nStep 887\r\n",,terminal_output +2134,2602466,"TERMINAL",0,0,"Step 888\r\nStep 889\r\nStep 890\r\nStep 891\r\nStep 892\r\nStep 893\r\nStep 894\r\n",,terminal_output +2135,2602518,"TERMINAL",0,0,"Step 895\r\n",,terminal_output +2136,2602618,"TERMINAL",0,0,"Step 896\r\nStep 897\r\nStep 898\r\nStep 899\r\nStep 900\r\nStep 901\r\nStep 902\r\nStep 903\r\nStep 904\r\nStep 905\r\nStep 906\r\nStep 907\r\nStep 908\r\n",,terminal_output +2137,2602779,"TERMINAL",0,0,"Step 909\r\nStep 910\r\nStep 911\r\nStep 912\r\nStep 913\r\nStep 914\r\nStep 915\r\nStep 916\r\nStep 917\r\nStep 918\r\nStep 919\r\nStep 920\r\nStep 921\r\nStep 922\r\nStep 923\r\nStep 924\r\n",,terminal_output +2138,2602842,"TERMINAL",0,0,"Step 925\r\nStep 926\r\n",,terminal_output +2139,2602843,"TERMINAL",0,0,"6\r821",,terminal_output +2140,2603067,"TERMINAL",0,0,"Step 927\r\nStep 928\r\nStep 929\r\nStep 930\r\nStep 931\r\nStep 932\r\nStep 933\r\nStep 934\r\nStep 935\r\nStep 936\r\nStep 937\r\nStep 938\r\nStep 939\r\nStep 940\r\nStep 941\r\nStep 942\r\nStep 943\r\nStep 944\r\nStep 945\r\nStep 946\r\nStep 947\r\n",,terminal_output +2141,2603162,"TERMINAL",0,0,"Step 948\r\nStep 949\r\n",,terminal_output +2142,2603216,"TERMINAL",0,0,"Step 950\r\n",,terminal_output +2143,2603305,"TERMINAL",0,0,"Step 951\r\nStep 952\r\nStep 953\r\nStep 954\r\nStep 955\r\n",,terminal_output +2144,2603497,"TERMINAL",0,0,"Step 956\r\nStep 957\r\nStep 958\r\nStep 959\r\nStep 960\r\nStep 961\r\nStep 962\r\nStep 963\r\nStep 964\r\nStep 965\r\nStep 966\r\nStep 967\r\nStep 968\r\nStep 969\r\nStep 970\r\nStep 971\r\nStep 972\r\nStep 973\r\nStep 974\r\nStep 975\r\nStep 976\r\nStep 977\r\n",,terminal_output +2145,2603564,"TERMINAL",0,0,"Step 978\r\n",,terminal_output +2146,2603624,"TERMINAL",0,0,"Step 979\r\n",,terminal_output +2147,2603750,"TERMINAL",0,0,"Step 980\r\nStep 981\r\nStep 982\r\nStep 983\r\n",,terminal_output +2148,2603978,"TERMINAL",0,0,"Step 984\r\nStep 985\r\nStep 986\r\nStep 987\r\nStep 988\r\nStep 989\r\nStep 990\r\nStep 991\r\nStep 992\r\nStep 993\r\nStep 994\r\nStep 995\r\nStep 996\r\nStep 997\r\n",,terminal_output +2149,2604030,"TERMINAL",0,0,"Step 998\r\nStep 999\r\nStep 1000\r\n",,terminal_output +2150,2604176,"TERMINAL",0,0,"Step 1001\r\n",,terminal_output +2151,2604239,"TERMINAL",0,0,"Step 1002\r\n",,terminal_output +2152,2604370,"TERMINAL",0,0,"Step 1003\r\nStep 1004\r\nStep 1005\r\nStep 1006\r\nStep 1007\r\nStep 1008\r\nStep 1009\r\n",,terminal_output +2153,2604437,"TERMINAL",0,0,"Step 1010\r\nStep 1011\r\nStep 1012\r\nStep 1013\r\n",,terminal_output +2154,2604789,"TERMINAL",0,0,"Step 1014\r\nStep 1015\r\n",,terminal_output +2155,2605055,"TERMINAL",0,0,"Step 1016\r\nStep 1017\r\nStep 1018\r\nStep 1019\r\nStep 1020\r\nStep 1021\r\nStep 1022\r\nStep 1023\r\nStep 1024\r\nStep 1025\r\nStep 1026\r\nStep 1027\r\nStep 1028\r\nStep 1029\r\n",,terminal_output +2156,2605200,"TERMINAL",0,0,"Step 1030\r\nStep 1031\r\nStep 1032\r\nStep 1033\r\nStep 1034\r\n",,terminal_output +2157,2605271,"TERMINAL",0,0,"Step 1035\r\nStep 1036\r\n",,terminal_output +2158,2605414,"TERMINAL",0,0,"Step 1037\r\nStep 1038\r\nStep 1039\r\n",,terminal_output +2159,2605479,"TERMINAL",0,0,"Step 1040\r\nStep 1041\r\nStep 1042\r\n",,terminal_output +2160,2605547,"TERMINAL",0,0,"Step 1043\r\nStep 1044\r\nStep 1045\r\nStep 1046\r\n",,terminal_output +2161,2605591,"TERMINAL",0,0,"9\r988",,terminal_output +2162,2605606,"TERMINAL",0,0,"Step 1047\r\n",,terminal_output +2163,2605746,"TERMINAL",0,0,"Step 1048\r\nStep 1049\r\nStep 1050\r\nStep 1051\r\nStep 1052\r\nStep 1053\r\nStep 1054\r\n",,terminal_output +2164,2605818,"TERMINAL",0,0,"Step 1055\r\nStep 1056\r\nStep 1057\r\n",,terminal_output +2165,2605962,"TERMINAL",0,0,"Step 1058\r\nStep 1059\r\nStep 1060\r\nStep 1061\r\nStep 1062\r\nStep 1063\r\nStep 1064\r\nStep 1065\r\n",,terminal_output +2166,2606054,"TERMINAL",0,0,"Step 1066\r\n",,terminal_output +2167,2606171,"TERMINAL",0,0,"Step 1067\r\nStep 1068\r\nStep 1069\r\nStep 1070\r\nStep 1071\r\nStep 1072\r\nStep 1073\r\nStep 1074\r\nStep 1075\r\nStep 1076\r\n",,terminal_output +2168,2606229,"TERMINAL",0,0,"Step 1077\r\nStep 1078\r\n",,terminal_output +2169,2606317,"TERMINAL",0,0,"Step 1079\r\nStep 1080\r\nStep 1081\r\nStep 1082\r\n",,terminal_output +2170,2606369,"TERMINAL",0,0,"Step 1083\r\nStep 1084\r\nStep 1085\r\nStep 1086\r\n",,terminal_output +2171,2606487,"TERMINAL",0,0,"Step 1087\r\nStep 1088\r\nStep 1089\r\nStep 1090\r\nStep 1091\r\nStep 1092\r\nStep 1093\r\nStep 1094\r\n",,terminal_output +2172,2606611,"TERMINAL",0,0,"Step 1095\r\nStep 1096\r\nStep 1097\r\nStep 1098\r\nStep 1099\r\nStep 1100\r\n",,terminal_output +2173,2606695,"TERMINAL",0,0,"Step 1101\r\nStep 1102\r\nStep 1103\r\nStep 1104\r\n",,terminal_output +2174,2606757,"TERMINAL",0,0,"Step 1105\r\nStep 1106\r\nStep 1107\r\n",,terminal_output +2175,2606870,"TERMINAL",0,0,"Step 1108\r\nStep 1109\r\n",,terminal_output +2176,2606940,"TERMINAL",0,0,"Step 1110\r\nStep 1111\r\nStep 1112\r\nStep 1113\r\nStep 1114\r\nStep 1115\r\nStep 1116\r\nStep 1117\r\n",,terminal_output +2177,2607010,"TERMINAL",0,0,"Step 1118\r\nStep 1119\r\nStep 1120\r\n",,terminal_output +2178,2607139,"TERMINAL",0,0,"Step 1121\r\nStep 1122\r\n",,terminal_output +2179,2607208,"TERMINAL",0,0,"Step 1123\r\nStep 1124\r\nStep 1125\r\nStep 1126\r\nStep 1127\r\nStep 1128\r\nStep 1129\r\n",,terminal_output +2180,2607307,"TERMINAL",0,0,"Step 1130\r\nStep 1131\r\nStep 1132\r\n",,terminal_output +2181,2607424,"TERMINAL",0,0,"Step 1133\r\nStep 1134\r\nStep 1135\r\nStep 1136\r\nStep 1137\r\nStep 1138\r\nStep 1139\r\nStep 1140\r\n",,terminal_output +2182,2607533,"TERMINAL",0,0,"Step 1141\r\nStep 1142\r\nStep 1143\r\nStep 1144\r\n",,terminal_output +2183,2607600,"TERMINAL",0,0,"Step 1145\r\nStep 1146\r\nStep 1147\r\nStep 1148\r\n",,terminal_output +2184,2607730,"TERMINAL",0,0,"Step 1149\r\nStep 1150\r\nStep 1151\r\nStep 1152\r\nStep 1153\r\n",,terminal_output +2185,2607928,"TERMINAL",0,0,"12\r1100",,terminal_output +2186,2607993,"TERMINAL",0,0,"Step 1154\r\nStep 1155\r\nStep 1156\r\nStep 1157\r\nStep 1158\r\nStep 1159\r\nStep 1160\r\nStep 1161\r\nStep 1162\r\nStep 1163\r\nStep 1164\r\nStep 1165\r\nStep 1166\r\nStep 1167\r\nStep 1168\r\n",,terminal_output +2187,2608101,"TERMINAL",0,0,"Step 1169\r\nStep 1170\r\nStep 1171\r\n",,terminal_output +2188,2608216,"TERMINAL",0,0,"Step 1172\r\nStep 1173\r\nStep 1174\r\nStep 1175\r\nStep 1176\r\nStep 1177\r\n",,terminal_output +2189,2608268,"TERMINAL",0,0,"Step 1178\r\n",,terminal_output +2190,2608376,"TERMINAL",0,0,"Step 1179\r\nStep 1180\r\n",,terminal_output +2191,2608486,"TERMINAL",0,0,"Step 1181\r\nStep 1182\r\nStep 1183\r\nStep 1184\r\nStep 1185\r\nStep 1186\r\nStep 1187\r\n",,terminal_output +2192,2608555,"TERMINAL",0,0,"Step 1188\r\nStep 1189\r\nStep 1190\r\nStep 1191\r\n",,terminal_output +2193,2608612,"TERMINAL",0,0,"Step 1192\r\n",,terminal_output +2194,2608677,"TERMINAL",0,0,"Step 1193\r\nStep 1194\r\n",,terminal_output +2195,2608810,"TERMINAL",0,0,"Step 1195\r\nStep 1196\r\nStep 1197\r\nStep 1198\r\nStep 1199\r\nStep 1200\r\nStep 1201\r\nStep 1202\r\nStep 1203\r\n",,terminal_output +2196,2608870,"TERMINAL",0,0,"Step 1204\r\nStep 1205\r\nStep 1206\r\nStep 1207\r\nStep 1208\r\n",,terminal_output +2197,2609042,"TERMINAL",0,0,"Step 1209\r\nStep 1210\r\nStep 1211\r\nStep 1212\r\n",,terminal_output +2198,2609166,"TERMINAL",0,0,"Step 1213\r\nStep 1214\r\nStep 1215\r\nStep 1216\r\nStep 1217\r\nStep 1218\r\nStep 1219\r\nStep 1220\r\nStep 1221\r\n",,terminal_output +2199,2609285,"TERMINAL",0,0,"Step 1222\r\nStep 1223\r\nStep 1224\r\nStep 1225\r\nStep 1226\r\nStep 1227\r\n",,terminal_output +2200,2609352,"TERMINAL",0,0,"Step 1228\r\nStep 1229\r\nStep 1230\r\nStep 1231\r\nStep 1232\r\nStep 1233\r\n",,terminal_output +2201,2609466,"TERMINAL",0,0,"Step 1234\r\nStep 1235\r\nStep 1236\r\nStep 1237\r\n",,terminal_output +2202,2609542,"TERMINAL",0,0,"Step 1238\r\nStep 1239\r\nStep 1240\r\nStep 1241\r\nStep 1242\r\nStep 1243\r\nStep 1244\r\nStep 1245\r\n",,terminal_output +2203,2609693,"TERMINAL",0,0,"Step 1246\r\nStep 1247\r\nStep 1248\r\nStep 1249\r\nStep 1250\r\n",,terminal_output +2204,2609782,"TERMINAL",0,0,"Step 1251\r\nStep 1252\r\nStep 1253\r\nStep 1254\r\n",,terminal_output +2205,2609845,"TERMINAL",0,0,"Step 1255\r\nStep 1256\r\nStep 1257\r\nStep 1258\r\n",,terminal_output +2206,2609982,"TERMINAL",0,0,"Step 1259\r\nStep 1260\r\nStep 1261\r\nStep 1262\r\nStep 1263\r\nStep 1264\r\nStep 1265\r\nStep 1266\r\n",,terminal_output +2207,2610034,"TERMINAL",0,0,"4\r1209",,terminal_output +2208,2610058,"TERMINAL",0,0,"Step 1267\r\nStep 1268\r\n",,terminal_output +2209,2610125,"TERMINAL",0,0,"Step 1269\r\nStep 1270\r\nStep 1271\r\nStep 1272\r\n",,terminal_output +2210,2610216,"TERMINAL",0,0,"Step 1273\r\nStep 1274\r\nStep 1275\r\nStep 1276\r\nStep 1277\r\nStep 1278\r\n",,terminal_output +2211,2610278,"TERMINAL",0,0,"Step 1279\r\nStep 1280\r\nStep 1281\r\n",,terminal_output +2212,2610402,"TERMINAL",0,0,"Step 1282\r\nStep 1283\r\nStep 1284\r\nStep 1285\r\nStep 1286\r\nStep 1287\r\n",,terminal_output +2213,2610512,"TERMINAL",0,0,"Step 1288\r\nStep 1289\r\n",,terminal_output +2214,2610573,"TERMINAL",0,0,"Step 1290\r\nStep 1291\r\n",,terminal_output +2215,2610633,"TERMINAL",0,0,"Step 1292\r\n",,terminal_output +2216,2610909,"TERMINAL",0,0,"Step 1293\r\nStep 1294\r\nStep 1295\r\nStep 1296\r\nStep 1297\r\nStep 1298\r\nStep 1299\r\nStep 1300\r\nStep 1301\r\nStep 1302\r\nStep 1303\r\nStep 1304\r\nStep 1305\r\nStep 1306\r\nStep 1307\r\nStep 1308\r\nStep 1309\r\nStep 1310\r\nStep 1311\r\nStep 1312\r\nStep 1313\r\nStep 1314\r\nStep 1315\r\nStep 1316\r\n",,terminal_output +2217,2611013,"TERMINAL",0,0,"Step 1317\r\n",,terminal_output +2218,2611079,"TERMINAL",0,0,"Step 1318\r\nStep 1319\r\nStep 1320\r\nStep 1321\r\nStep 1322\r\nStep 1323\r\nStep 1324\r\n",,terminal_output +2219,2611147,"TERMINAL",0,0,"Step 1325\r\n",,terminal_output +2220,2611209,"TERMINAL",0,0,"Step 1326\r\nStep 1327\r\nStep 1328\r\nStep 1329\r\nStep 1330\r\n",,terminal_output +2221,2611344,"TERMINAL",0,0,"Step 1331\r\nStep 1332\r\nStep 1333\r\nStep 1334\r\n",,terminal_output +2222,2611477,"TERMINAL",0,0,"Step 1335\r\nStep 1336\r\nStep 1337\r\nStep 1338\r\nStep 1339\r\nStep 1340\r\nStep 1341\r\nStep 1342\r\nStep 1343\r\nStep 1344\r\nStep 1345\r\nStep 1346\r\nStep 1347\r\nStep 1348\r\nStep 1349\r\nStep 1350\r\nStep 1351\r\n",,terminal_output +2223,2611540,"TERMINAL",0,0,"Step 1352\r\nStep 1353\r\nStep 1354\r\nStep 1355\r\nStep 1356\r\n",,terminal_output +2224,2611604,"TERMINAL",0,0,"Step 1357\r\nStep 1358\r\nStep 1359\r\nStep 1360\r\nStep 1361\r\n",,terminal_output +2225,2611732,"TERMINAL",0,0,"Step 1362\r\nStep 1363\r\nStep 1364\r\nStep 1365\r\nStep 1366\r\nStep 1367\r\nStep 1368\r\nStep 1369\r\nStep 1370\r\n",,terminal_output +2226,2611855,"TERMINAL",0,0,"Step 1371\r\nStep 1372\r\nStep 1373\r\nStep 1374\r\nStep 1375\r\nStep 1376\r\nStep 1377\r\nStep 1378\r\nStep 1379\r\nStep 1380\r\nStep 1381\r\nStep 1382\r\nStep 1383\r\nStep 1384\r\nStep 1385\r\nStep 1386\r\nStep 1387\r\nStep 1388\r\n",,terminal_output +2227,2611977,"TERMINAL",0,0,"Step 1389\r\nStep 1390\r\nStep 1391\r\n",,terminal_output +2228,2612086,"TERMINAL",0,0,"Step 1392\r\nStep 1393\r\n",,terminal_output +2229,2612150,"TERMINAL",0,0,"Step 1394\r\n",,terminal_output +2230,2612273,"TERMINAL",0,0,"Step 1395\r\nStep 1396\r\n",,terminal_output +2231,2612336,"TERMINAL",0,0,"Step 1397\r\nStep 1398\r\nStep 1399\r\nStep 1400\r\n",,terminal_output +2232,2612396,"TERMINAL",0,0,"Step 1401\r\nStep 1402\r\nStep 1403\r\nStep 1404\r\nStep 1405\r\n",,terminal_output +2233,2612458,"TERMINAL",0,0,"Step 1406\r\n",,terminal_output +2234,2612529,"TERMINAL",0,0,"Step 1407\r\nStep 1408\r\nStep 1409\r\nStep 1410\r\n",,terminal_output +2235,2612593,"TERMINAL",0,0,"Step 1411\r\nStep 1412\r\n",,terminal_output +2236,2612709,"TERMINAL",0,0,"Step 1413\r\nStep 1414\r\nStep 1415\r\n",,terminal_output +2237,2612770,"TERMINAL",0,0,"Step 1416\r\nStep 1417\r\n",,terminal_output +2238,2612878,"TERMINAL",0,0,"6\r1318",,terminal_output +2239,2612959,"TERMINAL",0,0,"Step 1418\r\nStep 1419\r\nStep 1420\r\nStep 1421\r\nStep 1422\r\nStep 1423\r\nStep 1424\r\n",,terminal_output +2240,2613083,"TERMINAL",0,0,"Step 1425\r\nStep 1426\r\n",,terminal_output +2241,2613147,"TERMINAL",0,0,"Step 1427\r\nStep 1428\r\nStep 1429\r\nStep 1430\r\n",,terminal_output +2242,2613213,"TERMINAL",0,0,"Step 1431\r\nStep 1432\r\nStep 1433\r\n",,terminal_output +2243,2613284,"TERMINAL",0,0,"Step 1434\r\n",,terminal_output +2244,2613392,"TERMINAL",0,0,"Step 1435\r\nStep 1436\r\nStep 1437\r\nStep 1438\r\nStep 1439\r\n",,terminal_output +2245,2613506,"TERMINAL",0,0,"Step 1440\r\nStep 1441\r\nStep 1442\r\nStep 1443\r\nStep 1444\r\nStep 1445\r\nStep 1446\r\n",,terminal_output +2246,2613572,"TERMINAL",0,0,"Step 1447\r\nStep 1448\r\n",,terminal_output +2247,2613624,"TERMINAL",0,0,"Step 1449\r\n",,terminal_output +2248,2613868,"TERMINAL",0,0,"Step 1450\r\nStep 1451\r\nStep 1452\r\nStep 1453\r\nStep 1454\r\nStep 1455\r\nStep 1456\r\nStep 1457\r\nStep 1458\r\nStep 1459\r\nStep 1460\r\nStep 1461\r\nStep 1462\r\nStep 1463\r\nStep 1464\r\nStep 1465\r\nStep 1466\r\nStep 1467\r\nStep 1468\r\nStep 1469\r\nStep 1470\r\nStep 1471\r\nStep 1472\r\nStep 1473\r\nStep 1474\r\nStep 1475\r\nStep 1476\r\nStep 1477\r\nStep 1478\r\nStep 1479\r\nStep 1480\r\nStep 1481\r\nStep 1482\r\nStep 1483\r\n",,terminal_output +2249,2613967,"TERMINAL",0,0,"Step 1484\r\n",,terminal_output +2250,2614102,"TERMINAL",0,0,"Step 1485\r\nStep 1486\r\nStep 1487\r\nStep 1488\r\nStep 1489\r\n",,terminal_output +2251,2614165,"TERMINAL",0,0,"Step 1490\r\nStep 1491\r\nStep 1492\r\nStep 1493\r\n",,terminal_output +2252,2614272,"TERMINAL",0,0,"Step 1494\r\nStep 1495\r\nStep 1496\r\nStep 1497\r\n",,terminal_output +2253,2614334,"TERMINAL",0,0,"Step 1498\r\n",,terminal_output +2254,2614440,"TERMINAL",0,0,"Step 1499\r\nStep 1500\r\nStep 1501\r\nStep 1502\r\nStep 1503\r\n",,terminal_output +2255,2614441,"TERMINAL",0,0,"9\r1484",,terminal_output +2256,2614492,"TERMINAL",0,0,"Step 1504\r\n",,terminal_output +2257,2614558,"TERMINAL",0,0,"Step 1505\r\nStep 1506\r\nStep 1507\r\n",,terminal_output +2258,2614668,"TERMINAL",0,0,"Step 1508\r\n",,terminal_output +2259,2614737,"TERMINAL",0,0,"Step 1509\r\nStep 1510\r\nStep 1511\r\nStep 1512\r\nStep 1513\r\n",,terminal_output +2260,2614795,"TERMINAL",0,0,"Step 1514\r\nStep 1515\r\n",,terminal_output +2261,2615004,"TERMINAL",0,0,"Step 1516\r\nStep 1517\r\nStep 1518\r\nStep 1519\r\nStep 1520\r\nStep 1521\r\nStep 1522\r\nStep 1523\r\n",,terminal_output +2262,2615112,"TERMINAL",0,0,"Step 1524\r\n",,terminal_output +2263,2615221,"TERMINAL",0,0,"Step 1525\r\nStep 1526\r\n",,terminal_output +2264,2615290,"TERMINAL",0,0,"Step 1527\r\nStep 1528\r\nStep 1529\r\n",,terminal_output +2265,2615355,"TERMINAL",0,0,"Step 1530\r\nStep 1531\r\nStep 1532\r\nStep 1533\r\n",,terminal_output +2266,2615473,"TERMINAL",0,0,"Step 1534\r\n",,terminal_output +2267,2615536,"TERMINAL",0,0,"Step 1535\r\n",,terminal_output +2268,2615662,"TERMINAL",0,0,"Step 1536\r\nStep 1537\r\nStep 1538\r\nStep 1539\r\nStep 1540\r\nStep 1541\r\nStep 1542\r\nStep 1543\r\nStep 1544\r\n",,terminal_output +2269,2615793,"TERMINAL",0,0,"Step 1545\r\nStep 1546\r\n",,terminal_output +2270,2615849,"TERMINAL",0,0,"Step 1547\r\nStep 1548\r\nStep 1549\r\n",,terminal_output +2271,2615977,"TERMINAL",0,0,"Step 1550\r\nStep 1551\r\nStep 1552\r\nStep 1553\r\n",,terminal_output +2272,2616115,"TERMINAL",0,0,"Step 1554\r\nStep 1555\r\nStep 1556\r\nStep 1557\r\nStep 1558\r\nStep 1559\r\nStep 1560\r\n",,terminal_output +2273,2616167,"TERMINAL",0,0,"Step 1561\r\n",,terminal_output +2274,2616178,"TERMINAL",0,0,"20\r153",,terminal_output +2275,2616231,"TERMINAL",0,0,"Step 1562\r\n",,terminal_output +2276,2616368,"TERMINAL",0,0,"Step 1563\r\nStep 1564\r\nStep 1565\r\nStep 1566\r\nStep 1567\r\nStep 1568\r\nStep 1569\r\nStep 1570\r\nStep 1571\r\nStep 1572\r\nStep 1573\r\n",,terminal_output +2277,2616519,"TERMINAL",0,0,"Step 1574\r\nStep 1575\r\nStep 1576\r\nStep 1577\r\n",,terminal_output +2278,2616643,"TERMINAL",0,0,"Step 1578\r\nStep 1579\r\nStep 1580\r\nStep 1581\r\nStep 1582\r\nStep 1583\r\nStep 1584\r\nStep 1585\r\n",,terminal_output +2279,2616707,"TERMINAL",0,0,"Step 1586\r\nStep 1587\r\n",,terminal_output +2280,2616770,"TERMINAL",0,0,"Step 1588\r\nStep 1589\r\nStep 1590\r\nStep 1591\r\n",,terminal_output +2281,2616831,"TERMINAL",0,0,"Step 1592\r\nStep 1593\r\nStep 1594\r\nStep 1595\r\n",,terminal_output +2282,2616952,"TERMINAL",0,0,"Step 1596\r\nStep 1597\r\nStep 1598\r\nStep 1599\r\n",,terminal_output +2283,2617094,"TERMINAL",0,0,"Step 1600\r\nStep 1601\r\nStep 1602\r\nStep 1603\r\nStep 1604\r\nStep 1605\r\n",,terminal_output +2284,2617203,"TERMINAL",0,0,"Step 1606\r\nStep 1607\r\n",,terminal_output +2285,2617314,"TERMINAL",0,0,"Step 1608\r\nStep 1609\r\nStep 1610\r\n",,terminal_output +2286,2617424,"TERMINAL",0,0,"Step 1611\r\nStep 1612\r\nStep 1613\r\nStep 1614\r\nStep 1615\r\nStep 1616\r\n",,terminal_output +2287,2617533,"TERMINAL",0,0,"Step 1617\r\nStep 1618\r\nStep 1619\r\nStep 1620\r\nStep 1621\r\nStep 1622\r\n",,terminal_output +2288,2617800,"TERMINAL",0,0,"Step 1623\r\nStep 1624\r\nStep 1625\r\nStep 1626\r\nStep 1627\r\nStep 1628\r\nStep 1629\r\nStep 1630\r\nStep 1631\r\nStep 1632\r\nStep 1633\r\nStep 1634\r\nStep 1635\r\n",,terminal_output +2289,2617861,"TERMINAL",0,0,"Step 1636\r\nStep 1637\r\nStep 1638\r\nStep 1639\r\n",,terminal_output +2290,2617927,"TERMINAL",0,0,"Step 1640\r\nStep 1641\r\nStep 1642\r\n",,terminal_output +2291,2617997,"TERMINAL",0,0,"2\r1606",,terminal_output +2292,2618039,"TERMINAL",0,0,"Step 1643\r\nStep 1644\r\nStep 1645\r\nStep 1646\r\nStep 1647\r\nStep 1648\r\n",,terminal_output +2293,2618202,"TERMINAL",0,0,"Step 1649\r\nStep 1650\r\nStep 1651\r\nStep 1652\r\nStep 1653\r\nStep 1654\r\nStep 1655\r\nStep 1656\r\n",,terminal_output +2294,2618315,"TERMINAL",0,0,"Step 1657\r\nStep 1658\r\nStep 1659\r\n",,terminal_output +2295,2618440,"TERMINAL",0,0,"Step 1660\r\nStep 1661\r\nStep 1662\r\nStep 1663\r\nStep 1664\r\nStep 1665\r\nStep 1666\r\nStep 1667\r\nStep 1668\r\nStep 1669\r\nStep 1670\r\nStep 1671\r\n",,terminal_output +2296,2618491,"TERMINAL",0,0,"Step 1672\r\n",,terminal_output +2297,2618559,"TERMINAL",0,0,"Step 1673\r\nStep 1674\r\nStep 1675\r\nStep 1676\r\n",,terminal_output +2298,2618709,"TERMINAL",0,0,"Step 1677\r\nStep 1678\r\nStep 1679\r\nStep 1680\r\nStep 1681\r\nStep 1682\r\nStep 1683\r\nStep 1684\r\nStep 1685\r\nStep 1686\r\n",,terminal_output +2299,2618765,"TERMINAL",0,0,"Step 1687\r\nStep 1688\r\n",,terminal_output +2300,2618869,"TERMINAL",0,0,"Step 1689\r\nStep 1690\r\nStep 1691\r\nStep 1692\r\n",,terminal_output +2301,2618979,"TERMINAL",0,0,"Step 1693\r\nStep 1694\r\nStep 1695\r\n",,terminal_output +2302,2619045,"TERMINAL",0,0,"Step 1696\r\nStep 1697\r\nStep 1698\r\n",,terminal_output +2303,2619172,"TERMINAL",0,0,"Step 1699\r\nStep 1700\r\nStep 1701\r\nStep 1702\r\nStep 1703\r\nStep 1704\r\n",,terminal_output +2304,2619440,"TERMINAL",0,0,"Step 1705\r\nStep 1706\r\nStep 1707\r\nStep 1708\r\nStep 1709\r\nStep 1710\r\nStep 1711\r\nStep 1712\r\nStep 1713\r\nStep 1714\r\nStep 1715\r\nStep 1716\r\n",,terminal_output +2305,2619503,"TERMINAL",0,0,"Step 1717\r\nStep 1718\r\nStep 1719\r\nStep 1720\r\nStep 1721\r\n",,terminal_output +2306,2619613,"TERMINAL",0,0,"Step 1722\r\nStep 1723\r\nStep 1724\r\n",,terminal_output +2307,2619665,"TERMINAL",0,0,"49",,terminal_output +2308,2619676,"TERMINAL",0,0,"Step 1725\r\nStep 1726\r\nStep 1727\r\n",,terminal_output +2309,2619788,"TERMINAL",0,0,"Step 1728\r\nStep 1729\r\nStep 1730\r\nStep 1731\r\nStep 1732\r\nStep 1733\r\n",,terminal_output +2310,2619911,"TERMINAL",0,0,"Step 1734\r\nStep 1735\r\nStep 1736\r\n",,terminal_output +2311,2620053,"TERMINAL",0,0,"Step 1737\r\nStep 1738\r\nStep 1739\r\nStep 1740\r\nStep 1741\r\nStep 1742\r\nStep 1743\r\nStep 1744\r\nStep 1745\r\n",,terminal_output +2312,2620171,"TERMINAL",0,0,"Step 1746\r\nStep 1747\r\nStep 1748\r\nStep 1749\r\n",,terminal_output +2313,2620278,"TERMINAL",0,0,"Step 1750\r\nStep 1751\r\n",,terminal_output +2314,2620431,"TERMINAL",0,0,"Step 1752\r\nStep 1753\r\nStep 1754\r\nStep 1755\r\nStep 1756\r\nStep 1757\r\nStep 1758\r\nStep 1759\r\n",,terminal_output +2315,2620550,"TERMINAL",0,0,"Step 1760\r\nStep 1761\r\nStep 1762\r\nStep 1763\r\nStep 1764\r\n",,terminal_output +2316,2620656,"TERMINAL",0,0,"Step 1765\r\nStep 1766\r\nStep 1767\r\nStep 1768\r\n",,terminal_output +2317,2620719,"TERMINAL",0,0,"Step 1769\r\nStep 1770\r\nStep 1771\r\nStep 1772\r\n",,terminal_output +2318,2620832,"TERMINAL",0,0,"Step 1773\r\nStep 1774\r\nStep 1775\r\nStep 1776\r\nStep 1777\r\nStep 1778\r\n",,terminal_output +2319,2620950,"TERMINAL",0,0,"Step 1779\r\nStep 1780\r\nStep 1781\r\nStep 1782\r\nStep 1783\r\nStep 1784\r\n",,terminal_output +2320,2621151,"TERMINAL",0,0,"Step 1785\r\nStep 1786\r\nStep 1787\r\nStep 1788\r\nStep 1789\r\nStep 1790\r\nStep 1791\r\nStep 1792\r\nStep 1793\r\nStep 1794\r\nStep 1795\r\nStep 1796\r\nStep 1797\r\nStep 1798\r\nStep 1799\r\nStep 1800\r\nStep 1801\r\nStep 1802\r\nStep 1803\r\nStep 1804\r\nStep 1805\r\nStep 1806\r\n",,terminal_output +2321,2621213,"TERMINAL",0,0,"Step 1807\r\nStep 1808\r\nStep 1809\r\n",,terminal_output +2322,2621309,"TERMINAL",0,0,"Step 1810\r\nStep 1811\r\nStep 1812\r\nStep 1813\r\n",,terminal_output +2323,2621372,"TERMINAL",0,0,"Step 1814\r\nStep 1815\r\nStep 1816\r\nStep 1817\r\n",,terminal_output +2324,2621585,"TERMINAL",0,0,"Step 1818\r\nStep 1819\r\nStep 1820\r\nStep 1821\r\nStep 1822\r\nStep 1823\r\nStep 1824\r\nStep 1825\r\nStep 1826\r\n",,terminal_output +2325,2621660,"TERMINAL",0,0,"Step 1827\r\nStep 1828\r\nStep 1829\r\nStep 1830\r\n",,terminal_output +2326,2621805,"TERMINAL",0,0,"Step 1831\r\nStep 1832\r\nStep 1833\r\nStep 1834\r\nStep 1835\r\nStep 1836\r\nStep 1837\r\nStep 1838\r\nStep 1839\r\nStep 1840\r\nStep 1841\r\nStep 1842\r\nStep 1843\r\nStep 1844\r\nStep 1845\r\nStep 1846\r\n",,terminal_output +2327,2621871,"TERMINAL",0,0,"Step 1847\r\n",,terminal_output +2328,2621921,"TERMINAL",0,0,"Step 1848\r\n",,terminal_output +2329,2621934,"TERMINAL",0,0,"6\r1769",,terminal_output +2330,2622002,"TERMINAL",0,0,"Step 1849\r\nStep 1850\r\nStep 1851\r\nStep 1852\r\nStep 1853\r\n",,terminal_output +2331,2622148,"TERMINAL",0,0,"Step 1854\r\nStep 1855\r\nStep 1856\r\n",,terminal_output +2332,2622279,"TERMINAL",0,0,"Step 1857\r\nStep 1858\r\nStep 1859\r\nStep 1860\r\nStep 1861\r\nStep 1862\r\n",,terminal_output +2333,2622345,"TERMINAL",0,0,"Step 1863\r\nStep 1864\r\n",,terminal_output +2334,2622407,"TERMINAL",0,0,"Step 1865\r\n",,terminal_output +2335,2622524,"TERMINAL",0,0,"Step 1866\r\nStep 1867\r\nStep 1868\r\nStep 1869\r\nStep 1870\r\nStep 1871\r\n",,terminal_output +2336,2622636,"TERMINAL",0,0,"Step 1872\r\nStep 1873\r\nStep 1874\r\nStep 1875\r\nStep 1876\r\nStep 1877\r\n",,terminal_output +2337,2622705,"TERMINAL",0,0,"Step 1878\r\nStep 1879\r\nStep 1880\r\nStep 1881\r\n",,terminal_output +2338,2622820,"TERMINAL",0,0,"Step 1882\r\nStep 1883\r\nStep 1884\r\n",,terminal_output +2339,2622973,"TERMINAL",0,0,"Step 1885\r\nStep 1886\r\nStep 1887\r\nStep 1888\r\nStep 1889\r\nStep 1890\r\nStep 1891\r\n",,terminal_output +2340,2623113,"TERMINAL",0,0,"Step 1892\r\nStep 1893\r\nStep 1894\r\nStep 1895\r\nStep 1896\r\n",,terminal_output +2341,2623167,"TERMINAL",0,0,"Step 1897\r\n",,terminal_output +2342,2623282,"TERMINAL",0,0,"Step 1898\r\nStep 1899\r\n",,terminal_output +2343,2623346,"TERMINAL",0,0,"Step 1900\r\nStep 1901\r\nStep 1902\r\nStep 1903\r\nStep 1904\r\n",,terminal_output +2344,2623427,"TERMINAL",0,0,"Step 1905\r\nStep 1906\r\nStep 1907\r\nStep 1908\r\n",,terminal_output +2345,2623547,"TERMINAL",0,0,"Step 1909\r\nStep 1910\r\nStep 1911\r\nStep 1912\r\n",,terminal_output +2346,2623613,"TERMINAL",0,0,"Step 1913\r\nStep 1914\r\n",,terminal_output +2347,2623671,"TERMINAL",0,0,"Step 1915\r\n",,terminal_output +2348,2623820,"TERMINAL",0,0,"Step 1916\r\nStep 1917\r\nStep 1918\r\nStep 1919\r\nStep 1920\r\nStep 1921\r\nStep 1922\r\n",,terminal_output +2349,2623958,"TERMINAL",0,0,"Step 1923\r\nStep 1924\r\nStep 1925\r\nStep 1926\r\nStep 1927\r\nStep 1928\r\nStep 1929\r\nStep 1930\r\nStep 1931\r\n",,terminal_output +2350,2624039,"TERMINAL",0,0,"Step 1932\r\nStep 1933\r\nStep 1934\r\nStep 1935\r\nStep 1936\r\nStep 1937\r\nStep 1938\r\nStep 1939\r\n",,terminal_output +2351,2624277,"TERMINAL",0,0,"Step 1940\r\nStep 1941\r\nStep 1942\r\nStep 1943\r\nStep 1944\r\nStep 1945\r\nStep 1946\r\nStep 1947\r\nStep 1948\r\nStep 1949\r\nStep 1950\r\nStep 1951\r\nStep 1952\r\nStep 1953\r\nStep 1954\r\nStep 1955\r\nStep 1956\r\nStep 1957\r\nStep 1958\r\nStep 1959\r\nStep 1960\r\nStep 1961\r\nStep 1962\r\nStep 1963\r\nStep 1964\r\nStep 1965\r\nStep 1966\r\nStep 1967\r\nStep 1968\r\nStep 1969\r\nStep 1970\r\nStep 1971\r\n",,terminal_output +2352,2624339,"TERMINAL",0,0,"Step 1972\r\nStep 1973\r\nStep 1974\r\nStep 1975\r\n",,terminal_output +2353,2624472,"TERMINAL",0,0,"Step 1976\r\nStep 1977\r\nStep 1978\r\nStep 1979\r\n",,terminal_output +2354,2624536,"TERMINAL",0,0,"Step 1980\r\nStep 1981\r\nStep 1982\r\nStep 1983\r\n",,terminal_output +2355,2624548,"TERMINAL",0,0,"8\r188",,terminal_output +2356,2624606,"TERMINAL",0,0,"Step 1984\r\nStep 1985\r\nStep 1986\r\n",,terminal_output +2357,2624655,"TERMINAL",0,0,"Step 1987\r\n",,terminal_output +2358,2624887,"TERMINAL",0,0,"Step 1988\r\nStep 1989\r\nStep 1990\r\nStep 1991\r\nStep 1992\r\nStep 1993\r\nStep 1994\r\n",,terminal_output +2359,2625087,"TERMINAL",0,0,"Step 1995\r\nStep 1996\r\nStep 1997\r\nStep 1998\r\n",,terminal_output +2360,2625156,"TERMINAL",0,0,"Step 1999\r\nStep 2000\r\n",,terminal_output +2361,2625300,"TERMINAL",0,0,"Step 2001\r\nStep 2002\r\nStep 2003\r\nStep 2004\r\nStep 2005\r\nStep 2006\r\n",,terminal_output +2362,2625392,"TERMINAL",0,0,"Step 2007\r\nStep 2008\r\n",,terminal_output +2363,2625459,"TERMINAL",0,0,"Step 2009\r\nStep 2010\r\nStep 2011\r\nStep 2012\r\n",,terminal_output +2364,2625568,"TERMINAL",0,0,"Step 2013\r\nStep 2014\r\nStep 2015\r\n",,terminal_output +2365,2625695,"TERMINAL",0,0,"Step 2016\r\nStep 2017\r\nStep 2018\r\nStep 2019\r\nStep 2020\r\n",,terminal_output +2366,2625772,"TERMINAL",0,0,"Step 2021\r\n",,terminal_output +2367,2625885,"TERMINAL",0,0,"Step 2022\r\nStep 2023\r\nStep 2024\r\nStep 2025\r\n",,terminal_output +2368,2626060,"TERMINAL",0,0,"Step 2026\r\nStep 2027\r\nStep 2028\r\nStep 2029\r\n",,terminal_output +2369,2626124,"TERMINAL",0,0,"Step 2030\r\nStep 2031\r\nStep 2032\r\nStep 2033\r\n",,terminal_output +2370,2626229,"TERMINAL",0,0,"Step 2034\r\nStep 2035\r\nStep 2036\r\nStep 2037\r\nStep 2038\r\nStep 2039\r\nStep 2040\r\n",,terminal_output +2371,2626335,"TERMINAL",0,0,"Step 2041\r\nStep 2042\r\nStep 2043\r\nStep 2044\r\nStep 2045\r\nStep 2046\r\n",,terminal_output +2372,2626397,"TERMINAL",0,0,"Step 2047\r\nStep 2048\r\nStep 2049\r\nStep 2050\r\n",,terminal_output +2373,2626615,"TERMINAL",0,0,"Step 2051\r\nStep 2052\r\nStep 2053\r\nStep 2054\r\nStep 2055\r\nStep 2056\r\nStep 2057\r\nStep 2058\r\nStep 2059\r\nStep 2060\r\nStep 2061\r\nStep 2062\r\n",,terminal_output +2374,2626766,"TERMINAL",0,0,"Step 2063\r\nStep 2064\r\nStep 2065\r\nStep 2066\r\nStep 2067\r\n",,terminal_output +2375,2626819,"TERMINAL",0,0,"Step 2068\r\nStep 2069\r\n",,terminal_output +2376,2627023,"TERMINAL",0,0,"Step 2070\r\nStep 2071\r\nStep 2072\r\nStep 2073\r\nStep 2074\r\nStep 2075\r\nStep 2076\r\nStep 2077\r\nStep 2078\r\nStep 2079\r\n",,terminal_output +2377,2627085,"TERMINAL",0,0,"Step 2080\r\nStep 2081\r\n",,terminal_output +2378,2627200,"TERMINAL",0,0,"Step 2082\r\nStep 2083\r\nStep 2084\r\nStep 2085\r\nStep 2086\r\nStep 2087\r\nStep 2088\r\n",,terminal_output +2379,2627259,"TERMINAL",0,0,"Step 2089\r\nStep 2090\r\nStep 2091\r\n",,terminal_output +2380,2627322,"TERMINAL",0,0,"Step 2092\r\n",,terminal_output +2381,2627382,"TERMINAL",0,0,"Step 2093\r\nStep 2094\r\nStep 2095\r\n",,terminal_output +2382,2627445,"TERMINAL",0,0,"30\r2016",,terminal_output +2383,2627454,"TERMINAL",0,0,"Step 2096\r\nStep 2097\r\nStep 2098\r\nStep 2099\r\nStep 2100\r\nStep 2101\r\nStep 2102\r\n",,terminal_output +2384,2627519,"TERMINAL",0,0,"Step 2103\r\nStep 2104\r\nStep 2105\r\nStep 2106\r\nStep 2107\r\nStep 2108\r\n",,terminal_output +2385,2627626,"TERMINAL",0,0,"Step 2109\r\n",,terminal_output +2386,2627692,"TERMINAL",0,0,"Step 2110\r\nStep 2111\r\nStep 2112\r\n",,terminal_output +2387,2627757,"TERMINAL",0,0,"Step 2113\r\nStep 2114\r\nStep 2115\r\n",,terminal_output +2388,2627836,"TERMINAL",0,0,"Step 2116\r\nStep 2117\r\nStep 2118\r\nStep 2119\r\nStep 2120\r\n",,terminal_output +2389,2628014,"TERMINAL",0,0,"Step 2121\r\nStep 2122\r\nStep 2123\r\nStep 2124\r\nStep 2125\r\nStep 2126\r\nStep 2127\r\nStep 2128\r\nStep 2129\r\nStep 2130\r\nStep 2131\r\nStep 2132\r\n",,terminal_output +2390,2628079,"TERMINAL",0,0,"Step 2133\r\nStep 2134\r\nStep 2135\r\nStep 2136\r\n",,terminal_output +2391,2628160,"TERMINAL",0,0,"Step 2137\r\nStep 2138\r\nStep 2139\r\nStep 2140\r\nStep 2141\r\n",,terminal_output +2392,2628302,"TERMINAL",0,0,"Step 2142\r\nStep 2143\r\nStep 2144\r\nStep 2145\r\nStep 2146\r\nStep 2147\r\nStep 2148\r\nStep 2149\r\nStep 2150\r\n",,terminal_output +2393,2628368,"TERMINAL",0,0,"Step 2151\r\nStep 2152\r\nStep 2153\r\n",,terminal_output +2394,2628428,"TERMINAL",0,0,"Step 2154\r\nStep 2155\r\nStep 2156\r\n",,terminal_output +2395,2628553,"TERMINAL",0,0,"Step 2157\r\nStep 2158\r\nStep 2159\r\nStep 2160\r\nStep 2161\r\nStep 2162\r\nStep 2163\r\n",,terminal_output +2396,2628705,"TERMINAL",0,0,"Step 2164\r\nStep 2165\r\nStep 2166\r\nStep 2167\r\nStep 2168\r\nStep 2169\r\nStep 2170\r\n",,terminal_output +2397,2628816,"TERMINAL",0,0,"Step 2171\r\nStep 2172\r\nStep 2173\r\nStep 2174\r\nStep 2175\r\nStep 2176\r\n",,terminal_output +2398,2628878,"TERMINAL",0,0,"Step 2177\r\n",,terminal_output +2399,2629005,"TERMINAL",0,0,"Step 2178\r\nStep 2179\r\nStep 2180\r\nStep 2181\r\nStep 2182\r\nStep 2183\r\nStep 2184\r\nStep 2185\r\n",,terminal_output +2400,2629056,"TERMINAL",0,0,"Step 2186\r\n",,terminal_output +2401,2629243,"TERMINAL",0,0,"Step 2187\r\nStep 2188\r\nStep 2189\r\nStep 2190\r\nStep 2191\r\nStep 2192\r\nStep 2193\r\n",,terminal_output +2402,2629402,"TERMINAL",0,0,"Step 2194\r\nStep 2195\r\nStep 2196\r\nStep 2197\r\nStep 2198\r\nStep 2199\r\nStep 2200\r\nStep 2201\r\n",,terminal_output +2403,2629525,"TERMINAL",0,0,"Step 2202\r\nStep 2203\r\nStep 2204\r\nStep 2205\r\nStep 2206\r\nStep 2207\r\n",,terminal_output +2404,2629645,"TERMINAL",0,0,"Step 2208\r\nStep 2209\r\nStep 2210\r\n",,terminal_output +2405,2629699,"TERMINAL",0,0,"Step 2211\r\n",,terminal_output +2406,2629810,"TERMINAL",0,0,"Step 2212\r\nStep 2213\r\nStep 2214\r\nStep 2215\r\nStep 2216\r\n",,terminal_output +2407,2629928,"TERMINAL",0,0,"Step 2217\r\nStep 2218\r\nStep 2219\r\nStep 2220\r\nStep 2221\r\nStep 2222\r\nStep 2223\r\n",,terminal_output +2408,2630087,"TERMINAL",0,0,"Step 2224\r\nStep 2225\r\nStep 2226\r\nStep 2227\r\nStep 2228\r\nStep 2229\r\nStep 2230\r\n",,terminal_output +2409,2630222,"TERMINAL",0,0,"Step 2231\r\nStep 2232\r\nStep 2233\r\nStep 2234\r\nStep 2235\r\nStep 2236\r\nStep 2237\r\nStep 2238\r\nStep 2239\r\nStep 2240\r\nStep 2241\r\n",,terminal_output +2410,2630393,"TERMINAL",0,0,"Step 2242\r\nStep 2243\r\nStep 2244\r\nStep 2245\r\n",,terminal_output +2411,2630522,"TERMINAL",0,0,"Step 2246\r\nStep 2247\r\nStep 2248\r\nStep 2249\r\nStep 2250\r\nStep 2251\r\n",,terminal_output +2412,2630582,"TERMINAL",0,0,"Step 2252\r\nStep 2253\r\nStep 2254\r\nStep 2255\r\nStep 2256\r\n",,terminal_output +2413,2630583,"TERMINAL",0,0,"3\r2157",,terminal_output +2414,2630695,"TERMINAL",0,0,"Step 2257\r\nStep 2258\r\nStep 2259\r\nStep 2260\r\nStep 2261\r\n",,terminal_output +2415,2630846,"TERMINAL",0,0,"Step 2262\r\nStep 2263\r\nStep 2264\r\nStep 2265\r\nStep 2266\r\nStep 2267\r\nStep 2268\r\nStep 2269\r\nStep 2270\r\nStep 2271\r\n",,terminal_output +2416,2631003,"TERMINAL",0,0,"Step 2272\r\nStep 2273\r\nStep 2274\r\nStep 2275\r\nStep 2276\r\nStep 2277\r\nStep 2278\r\nStep 2279\r\nStep 2280\r\nStep 2281\r\nStep 2282\r\nStep 2283\r\nStep 2284\r\nStep 2285\r\nStep 2286\r\n",,terminal_output +2417,2631238,"TERMINAL",0,0,"Step 2287\r\nStep 2288\r\nStep 2289\r\nStep 2290\r\nStep 2291\r\nStep 2292\r\nStep 2293\r\nStep 2294\r\nStep 2295\r\nStep 2296\r\nStep 2297\r\nStep 2298\r\nStep 2299\r\nStep 2300\r\nStep 2301\r\nStep 2302\r\nStep 2303\r\nStep 2304\r\nStep 2305\r\nStep 2306\r\nStep 2307\r\nStep 2308\r\nStep 2309\r\nStep 2310\r\nStep 2311\r\nStep 2312\r\nStep 2313\r\nStep 2314\r\nStep 2315\r\nStep 2316\r\n",,terminal_output +2418,2631362,"TERMINAL",0,0,"Step 2317\r\nStep 2318\r\nStep 2319\r\n",,terminal_output +2419,2631516,"TERMINAL",0,0,"Step 2320\r\nStep 2321\r\nStep 2322\r\nStep 2323\r\nStep 2324\r\n",,terminal_output +2420,2631625,"TERMINAL",0,0,"Step 2325\r\n",,terminal_output +2421,2631709,"TERMINAL",0,0,"Step 2326\r\nStep 2327\r\nStep 2328\r\n",,terminal_output +2422,2631777,"TERMINAL",0,0,"Step 2329\r\nStep 2330\r\nStep 2331\r\n",,terminal_output +2423,2632074,"TERMINAL",0,0,"Step 2332\r\nStep 2333\r\nStep 2334\r\nStep 2335\r\nStep 2336\r\nStep 2337\r\nStep 2338\r\n",,terminal_output +2424,2632162,"TERMINAL",0,0,"7\r2325",,terminal_output +2425,2632294,"TERMINAL",0,0,"Step 2339\r\nStep 2340\r\nStep 2341\r\nStep 2342\r\n",,terminal_output +2426,2632355,"TERMINAL",0,0,"Step 2343\r\nStep 2344\r\nStep 2345\r\nStep 2346\r\nStep 2347\r\nStep 2348\r\n",,terminal_output +2427,2632463,"TERMINAL",0,0,"Step 2349\r\nStep 2350\r\nStep 2351\r\n",,terminal_output +2428,2632580,"TERMINAL",0,0,"Step 2352\r\nStep 2353\r\nStep 2354\r\nStep 2355\r\nStep 2356\r\n",,terminal_output +2429,2632715,"TERMINAL",0,0,"Step 2357\r\nStep 2358\r\nStep 2359\r\nStep 2360\r\nStep 2361\r\n",,terminal_output +2430,2632778,"TERMINAL",0,0,"Step 2362\r\nStep 2363\r\n",,terminal_output +2431,2632850,"TERMINAL",0,0,"Step 2364\r\nStep 2365\r\n",,terminal_output +2432,2632969,"TERMINAL",0,0,"Step 2366\r\nStep 2367\r\nStep 2368\r\nStep 2369\r\nStep 2370\r\nStep 2371\r\nStep 2372\r\nStep 2373\r\n",,terminal_output +2433,2633056,"TERMINAL",0,0,"Step 2374\r\nStep 2375\r\nStep 2376\r\n",,terminal_output +2434,2633181,"TERMINAL",0,0,"Step 2377\r\nStep 2378\r\nStep 2379\r\nStep 2380\r\nStep 2381\r\nStep 2382\r\nStep 2383\r\nStep 2384\r\nStep 2385\r\nStep 2386\r\nStep 2387\r\nStep 2388\r\nStep 2389\r\nStep 2390\r\nStep 2391\r\n",,terminal_output +2435,2633246,"TERMINAL",0,0,"Step 2392\r\nStep 2393\r\nStep 2394\r\nStep 2395\r\nStep 2396\r\nStep 2397\r\n",,terminal_output +2436,2633308,"TERMINAL",0,0,"Step 2398\r\nStep 2399\r\nStep 2400\r\nStep 2401\r\nStep 2402\r\n",,terminal_output +2437,2633403,"TERMINAL",0,0,"Step 2403\r\nStep 2404\r\nStep 2405\r\nStep 2406\r\nStep 2407\r\nStep 2408\r\nStep 2409\r\nStep 2410\r\nStep 2411\r\n",,terminal_output +2438,2633463,"TERMINAL",0,0,"Step 2412\r\n",,terminal_output +2439,2633692,"TERMINAL",0,0,"Step 2413\r\nStep 2414\r\nStep 2415\r\nStep 2416\r\nStep 2417\r\nStep 2418\r\nStep 2419\r\nStep 2420\r\nStep 2421\r\nStep 2422\r\nStep 2423\r\nStep 2424\r\nStep 2425\r\nStep 2426\r\nStep 2427\r\nStep 2428\r\nStep 2429\r\nStep 2430\r\nStep 2431\r\nStep 2432\r\nStep 2433\r\nStep 2434\r\n",,terminal_output +2440,2633744,"TERMINAL",0,0,"Step 2435\r\n",,terminal_output +2441,2633903,"TERMINAL",0,0,"Step 2436\r\nStep 2437\r\nStep 2438\r\nStep 2439\r\nStep 2440\r\nStep 2441\r\nStep 2442\r\nStep 2443\r\nStep 2444\r\n",,terminal_output +2442,2633967,"TERMINAL",0,0,"Step 2445\r\nStep 2446\r\nStep 2447\r\nStep 2448\r\nStep 2449\r\n",,terminal_output +2443,2634078,"TERMINAL",0,0,"Step 2450\r\n",,terminal_output +2444,2634148,"TERMINAL",0,0,"Step 2451\r\nStep 2452\r\n",,terminal_output +2445,2634216,"TERMINAL",0,0,"Step 2453\r\nStep 2454\r\nStep 2455\r\n",,terminal_output +2446,2634327,"TERMINAL",0,0,"Step 2456\r\nStep 2457\r\n",,terminal_output +2447,2634393,"TERMINAL",0,0,"Step 2458\r\nStep 2459\r\nStep 2460\r\n",,terminal_output +2448,2634455,"TERMINAL",0,0,"Step 2461\r\nStep 2462\r\nStep 2463\r\nStep 2464\r\nStep 2465\r\nStep 2466\r\n",,terminal_output +2449,2634661,"TERMINAL",0,0,"Step 2467\r\nStep 2468\r\nStep 2469\r\nStep 2470\r\n",,terminal_output +2450,2634772,"TERMINAL",0,0,"892",,terminal_output +2451,2634852,"TERMINAL",0,0,"Step 2471\r\nStep 2472\r\nStep 2473\r\nStep 2474\r\n",,terminal_output +2452,2634917,"TERMINAL",0,0,"Step 2475\r\nStep 2476\r\n",,terminal_output +2453,2635171,"TERMINAL",0,0,"Step 2477\r\nStep 2478\r\nStep 2479\r\nStep 2480\r\nStep 2481\r\nStep 2482\r\n",,terminal_output +2454,2635238,"TERMINAL",0,0,"Step 2483\r\nStep 2484\r\nStep 2485\r\n",,terminal_output +2455,2635309,"TERMINAL",0,0,"Step 2486\r\nStep 2487\r\nStep 2488\r\nStep 2489\r\n",,terminal_output +2456,2635390,"TERMINAL",0,0,"Step 2490\r\n",,terminal_output +2457,2635456,"TERMINAL",0,0,"Step 2491\r\nStep 2492\r\nStep 2493\r\nStep 2494\r\nStep 2495\r\n",,terminal_output +2458,2635563,"TERMINAL",0,0,"Step 2496\r\nStep 2497\r\nStep 2498\r\n",,terminal_output +2459,2635626,"TERMINAL",0,0,"Step 2499\r\nStep 2500\r\n",,terminal_output +2460,2635733,"TERMINAL",0,0,"Step 2501\r\nStep 2502\r\nStep 2503\r\nStep 2504\r\nStep 2505\r\nStep 2506\r\nStep 2507\r\nStep 2508\r\nStep 2509\r\n",,terminal_output +2461,2635919,"TERMINAL",0,0,"Step 2510\r\nStep 2511\r\nStep 2512\r\nStep 2513\r\nStep 2514\r\nStep 2515\r\nStep 2516\r\nStep 2517\r\n",,terminal_output +2462,2635973,"TERMINAL",0,0,"Step 2518\r\nStep 2519\r\n",,terminal_output +2463,2636079,"TERMINAL",0,0,"Step 2520\r\nStep 2521\r\nStep 2522\r\nStep 2523\r\nStep 2524\r\n",,terminal_output +2464,2636132,"TERMINAL",0,0,"Step 2525\r\n",,terminal_output +2465,2636243,"TERMINAL",0,0,"Step 2526\r\nStep 2527\r\nStep 2528\r\nStep 2529\r\n",,terminal_output +2466,2636370,"TERMINAL",0,0,"Step 2530\r\nStep 2531\r\nStep 2532\r\nStep 2533\r\nStep 2534\r\nStep 2535\r\nStep 2536\r\n",,terminal_output +2467,2636431,"TERMINAL",0,0,"Step 2537\r\nStep 2538\r\nStep 2539\r\nStep 2540\r\n",,terminal_output +2468,2636539,"TERMINAL",0,0,"Step 2541\r\nStep 2542\r\nStep 2543\r\nStep 2544\r\n",,terminal_output +2469,2636607,"TERMINAL",0,0,"Step 2545\r\n",,terminal_output +2470,2636671,"TERMINAL",0,0,"41\r2510",,terminal_output +2471,2636672,"TERMINAL",0,0,"Step 2546\r\nStep 2547\r\nStep 2548\r\nStep 2549\r\nStep 2550\r\nStep 2551\r\nStep 2552\r\n",,terminal_output +2472,2636800,"TERMINAL",0,0,"Step 2553\r\nStep 2554\r\nStep 2555\r\nStep 2556\r\nStep 2557\r\n",,terminal_output +2473,2636852,"TERMINAL",0,0,"Step 2558\r\nStep 2559\r\nStep 2560\r\n",,terminal_output +2474,2636938,"TERMINAL",0,0,"Step 2561\r\nStep 2562\r\nStep 2563\r\nStep 2564\r\n",,terminal_output +2475,2637048,"TERMINAL",0,0,"Step 2565\r\nStep 2566\r\nStep 2567\r\n",,terminal_output +2476,2637115,"TERMINAL",0,0,"Step 2568\r\nStep 2569\r\nStep 2570\r\n",,terminal_output +2477,2637222,"TERMINAL",0,0,"Step 2571\r\nStep 2572\r\nStep 2573\r\nStep 2574\r\nStep 2575\r\nStep 2576\r\nStep 2577\r\nStep 2578\r\n",,terminal_output +2478,2637285,"TERMINAL",0,0,"Step 2579\r\nStep 2580\r\n",,terminal_output +2479,2637410,"TERMINAL",0,0,"Step 2581\r\nStep 2582\r\nStep 2583\r\nStep 2584\r\nStep 2585\r\n",,terminal_output +2480,2637526,"TERMINAL",0,0,"Step 2586\r\nStep 2587\r\nStep 2588\r\nStep 2589\r\n",,terminal_output +2481,2637586,"TERMINAL",0,0,"Step 2590\r\nStep 2591\r\nStep 2592\r\nStep 2593\r\nStep 2594\r\n",,terminal_output +2482,2637686,"TERMINAL",0,0,"Step 2595\r\nStep 2596\r\nStep 2597\r\nStep 2598\r\nStep 2599\r\n",,terminal_output +2483,2637795,"TERMINAL",0,0,"Step 2600\r\nStep 2601\r\nStep 2602\r\nStep 2603\r\nStep 2604\r\nStep 2605\r\nStep 2606\r\nStep 2607\r\nStep 2608\r\nStep 2609\r\n",,terminal_output +2484,2637978,"TERMINAL",0,0,"Step 2610\r\nStep 2611\r\nStep 2612\r\nStep 2613\r\nStep 2614\r\nStep 2615\r\nStep 2616\r\nStep 2617\r\n",,terminal_output +2485,2638100,"TERMINAL",0,0,"Step 2618\r\nStep 2619\r\nStep 2620\r\nStep 2621\r\nStep 2622\r\nStep 2623\r\n",,terminal_output +2486,2638209,"TERMINAL",0,0,"Step 2624\r\nStep 2625\r\nStep 2626\r\nStep 2627\r\nStep 2628\r\nStep 2629\r\nStep 2630\r\nStep 2631\r\n",,terminal_output +2487,2638338,"TERMINAL",0,0,"Step 2632\r\nStep 2633\r\nStep 2634\r\nStep 2635\r\nStep 2636\r\nStep 2637\r\nStep 2638\r\n",,terminal_output +2488,2638402,"TERMINAL",0,0,"Step 2639\r\nStep 2640\r\n",,terminal_output +2489,2638469,"TERMINAL",0,0,"Step 2641\r\nStep 2642\r\nStep 2643\r\nStep 2644\r\nStep 2645\r\nStep 2646\r\n",,terminal_output +2490,2638565,"TERMINAL",0,0,"Step 2647\r\nStep 2648\r\n",,terminal_output +2491,2638634,"TERMINAL",0,0,"Step 2649\r\nStep 2650\r\nStep 2651\r\nStep 2652\r\n",,terminal_output +2492,2638728,"TERMINAL",0,0,"Step 2653\r\n",,terminal_output +2493,2638878,"TERMINAL",0,0,"Step 2654\r\nStep 2655\r\nStep 2656\r\nStep 2657\r\nStep 2658\r\nStep 2659\r\nStep 2660\r\nStep 2661\r\nStep 2662\r\nStep 2663\r\nStep 2664\r\n",,terminal_output +2494,2639019,"TERMINAL",0,0,"Step 2665\r\nStep 2666\r\nStep 2667\r\nStep 2668\r\nStep 2669\r\nStep 2670\r\nStep 2671\r\n",,terminal_output +2495,2639136,"TERMINAL",0,0,"Step 2672\r\nStep 2673\r\nStep 2674\r\n",,terminal_output +2496,2639196,"TERMINAL",0,0,"Step 2675\r\nStep 2676\r\n",,terminal_output +2497,2639399,"TERMINAL",0,0,"Step 2677\r\nStep 2678\r\nStep 2679\r\nStep 2680\r\nStep 2681\r\nStep 2682\r\nStep 2683\r\nStep 2684\r\nStep 2685\r\nStep 2686\r\n",,terminal_output +2498,2639507,"TERMINAL",0,0,"3\r260",,terminal_output +2499,2639560,"TERMINAL",0,0,"Step 2687\r\nStep 2688\r\nStep 2689\r\nStep 2690\r\nStep 2691\r\n",,terminal_output +2500,2639626,"TERMINAL",0,0,"Step 2692\r\nStep 2693\r\n",,terminal_output +2501,2639688,"TERMINAL",0,0,"Step 2694\r\nStep 2695\r\n",,terminal_output +2502,2639751,"TERMINAL",0,0,"Step 2696\r\nStep 2697\r\nStep 2698\r\n",,terminal_output +2503,2639813,"TERMINAL",0,0,"Step 2699\r\nStep 2700\r\nStep 2701\r\nStep 2702\r\n",,terminal_output +2504,2639877,"TERMINAL",0,0,"Step 2703\r\nStep 2704\r\n",,terminal_output +2505,2639929,"TERMINAL",0,0,"Step 2705\r\nStep 2706\r\n",,terminal_output +2506,2640018,"TERMINAL",0,0,"Step 2707\r\nStep 2708\r\nStep 2709\r\nStep 2710\r\nStep 2711\r\nStep 2712\r\nStep 2713\r\nStep 2714\r\n",,terminal_output +2507,2640189,"TERMINAL",0,0,"Step 2715\r\nStep 2716\r\nStep 2717\r\nStep 2718\r\nStep 2719\r\nStep 2720\r\nStep 2721\r\nStep 2722\r\nStep 2723\r\nStep 2724\r\nStep 2725\r\nStep 2726\r\nStep 2727\r\nStep 2728\r\nStep 2729\r\n",,terminal_output +2508,2640254,"TERMINAL",0,0,"Step 2730\r\nStep 2731\r\nStep 2732\r\nStep 2733\r\nStep 2734\r\nStep 2735\r\nStep 2736\r\n",,terminal_output +2509,2640318,"TERMINAL",0,0,"Step 2737\r\nStep 2738\r\nStep 2739\r\n",,terminal_output +2510,2640381,"TERMINAL",0,0,"Step 2740\r\nStep 2741\r\nStep 2742\r\nStep 2743\r\n",,terminal_output +2511,2640448,"TERMINAL",0,0,"Step 2744\r\nStep 2745\r\n",,terminal_output +2512,2640573,"TERMINAL",0,0,"Step 2746\r\nStep 2747\r\nStep 2748\r\n",,terminal_output +2513,2640633,"TERMINAL",0,0,"Step 2749\r\nStep 2750\r\nStep 2751\r\n",,terminal_output +2514,2640759,"TERMINAL",0,0,"Step 2752\r\nStep 2753\r\nStep 2754\r\nStep 2755\r\nStep 2756\r\n",,terminal_output +2515,2640817,"TERMINAL",0,0,"Step 2757\r\n",,terminal_output +2516,2640989,"TERMINAL",0,0,"Step 2758\r\nStep 2759\r\nStep 2760\r\nStep 2761\r\nStep 2762\r\nStep 2763\r\nStep 2764\r\nStep 2765\r\nStep 2766\r\nStep 2767\r\nStep 2768\r\nStep 2769\r\nStep 2770\r\nStep 2771\r\nStep 2772\r\nStep 2773\r\nStep 2774\r\nStep 2775\r\nStep 2776\r\nStep 2777\r\nStep 2778\r\nStep 2779\r\n",,terminal_output +2517,2641050,"TERMINAL",0,0,"Step 2780\r\n",,terminal_output +2518,2641128,"TERMINAL",0,0,"Step 2781\r\n",,terminal_output +2519,2641255,"TERMINAL",0,0,"Step 2782\r\nStep 2783\r\n",,terminal_output +2520,2641322,"TERMINAL",0,0,"Step 2784\r\nStep 2785\r\nStep 2786\r\nStep 2787\r\nStep 2788\r\nStep 2789\r\n",,terminal_output +2521,2641446,"TERMINAL",0,0,"Step 2790\r\nStep 2791\r\nStep 2792\r\nStep 2793\r\nStep 2794\r\nStep 2795\r\n",,terminal_output +2522,2641609,"TERMINAL",0,0,"Step 2796\r\nStep 2797\r\n",,terminal_output +2523,2641669,"TERMINAL",0,0,"Step 2798\r\n",,terminal_output +2524,2641728,"TERMINAL",0,0,"Step 2799\r\n",,terminal_output +2525,2641797,"TERMINAL",0,0,"Step 2800\r\nStep 2801\r\n",,terminal_output +2526,2641859,"TERMINAL",0,0,"Step 2802\r\n",,terminal_output +2527,2641988,"TERMINAL",0,0,"5\r2746",,terminal_output +2528,2642041,"TERMINAL",0,0,"Step 2803\r\nStep 2804\r\nStep 2805\r\nStep 2806\r\nStep 2807\r\nStep 2808\r\n",,terminal_output +2529,2642151,"TERMINAL",0,0,"Step 2809\r\nStep 2810\r\n",,terminal_output +2530,2642215,"TERMINAL",0,0,"Step 2811\r\nStep 2812\r\nStep 2813\r\nStep 2814\r\nStep 2815\r\nStep 2816\r\n",,terminal_output +2531,2642267,"TERMINAL",0,0,"Step 2817\r\n",,terminal_output +2532,2642375,"TERMINAL",0,0,"Step 2818\r\nStep 2819\r\nStep 2820\r\nStep 2821\r\n",,terminal_output +2533,2642517,"TERMINAL",0,0,"Step 2822\r\nStep 2823\r\nStep 2824\r\nStep 2825\r\nStep 2826\r\n",,terminal_output +2534,2642652,"TERMINAL",0,0,"Step 2827\r\nStep 2828\r\nStep 2829\r\nStep 2830\r\nStep 2831\r\nStep 2832\r\nStep 2833\r\nStep 2834\r\nStep 2835\r\n",,terminal_output +2535,2642759,"TERMINAL",0,0,"Step 2836\r\nStep 2837\r\nStep 2838\r\nStep 2839\r\nStep 2840\r\n",,terminal_output +2536,2642866,"TERMINAL",0,0,"Step 2841\r\nStep 2842\r\nStep 2843\r\nStep 2844\r\n",,terminal_output +2537,2642974,"TERMINAL",0,0,"Step 2845\r\nStep 2846\r\nStep 2847\r\nStep 2848\r\nStep 2849\r\nStep 2850\r\n",,terminal_output +2538,2643101,"TERMINAL",0,0,"Step 2851\r\nStep 2852\r\nStep 2853\r\n",,terminal_output +2539,2643266,"TERMINAL",0,0,"Step 2854\r\nStep 2855\r\nStep 2856\r\nStep 2857\r\nStep 2858\r\nStep 2859\r\nStep 2860\r\nStep 2861\r\nStep 2862\r\nStep 2863\r\nStep 2864\r\nStep 2865\r\nStep 2866\r\nStep 2867\r\nStep 2868\r\nStep 2869\r\nStep 2870\r\nStep 2871\r\nStep 2872\r\nStep 2873\r\nStep 2874\r\nStep 2875\r\nStep 2876\r\n",,terminal_output +2540,2643433,"TERMINAL",0,0,"Step 2877\r\nStep 2878\r\nStep 2879\r\nStep 2880\r\nStep 2881\r\nStep 2882\r\nStep 2883\r\n",,terminal_output +2541,2643615,"TERMINAL",0,0,"8\r2851",,terminal_output +2542,2643718,"TERMINAL",0,0,"Step 2884\r\nStep 2885\r\nStep 2886\r\nStep 2887\r\nStep 2888\r\nStep 2889\r\nStep 2890\r\nStep 2891\r\nStep 2892\r\nStep 2893\r\nStep 2894\r\nStep 2895\r\nStep 2896\r\nStep 2897\r\nStep 2898\r\nStep 2899\r\nStep 2900\r\nStep 2901\r\nStep 2902\r\nStep 2903\r\nStep 2904\r\n",,terminal_output +2543,2643846,"TERMINAL",0,0,"Step 2905\r\nStep 2906\r\nStep 2907\r\nStep 2908\r\nStep 2909\r\n",,terminal_output +2544,2643901,"TERMINAL",0,0,"Step 2910\r\nStep 2911\r\nStep 2912\r\n",,terminal_output +2545,2643965,"TERMINAL",0,0,"Step 2913\r\nStep 2914\r\nStep 2915\r\nStep 2916\r\n",,terminal_output +2546,2644069,"TERMINAL",0,0,"Step 2917\r\nStep 2918\r\nStep 2919\r\nStep 2920\r\nStep 2921\r\n",,terminal_output +2547,2644177,"TERMINAL",0,0,"Step 2922\r\nStep 2923\r\n",,terminal_output +2548,2644305,"TERMINAL",0,0,"Step 2924\r\nStep 2925\r\nStep 2926\r\nStep 2927\r\nStep 2928\r\n",,terminal_output +2549,2644369,"TERMINAL",0,0,"Step 2929\r\nStep 2930\r\nStep 2931\r\n",,terminal_output +2550,2644482,"TERMINAL",0,0,"Step 2932\r\nStep 2933\r\n",,terminal_output +2551,2644547,"TERMINAL",0,0,"Step 2934\r\n",,terminal_output +2552,2644611,"TERMINAL",0,0,"Step 2935\r\nStep 2936\r\n",,terminal_output +2553,2644672,"TERMINAL",0,0,"Step 2937\r\n",,terminal_output +2554,2644803,"TERMINAL",0,0,"Step 2938\r\nStep 2939\r\nStep 2940\r\nStep 2941\r\nStep 2942\r\nStep 2943\r\nStep 2944\r\nStep 2945\r\nStep 2946\r\nStep 2947\r\n",,terminal_output +2555,2644898,"TERMINAL",0,0,"Step 2948\r\n",,terminal_output +2556,2644989,"TERMINAL",0,0,"Step 2949\r\nStep 2950\r\nStep 2951\r\nStep 2952\r\nStep 2953\r\nStep 2954\r\n",,terminal_output +2557,2645120,"TERMINAL",0,0,"Step 2955\r\nStep 2956\r\nStep 2957\r\n",,terminal_output +2558,2645239,"TERMINAL",0,0,"Step 2958\r\n",,terminal_output +2559,2645305,"TERMINAL",0,0,"Step 2959\r\n",,terminal_output +2560,2645382,"TERMINAL",0,0,"Step 2960\r\nStep 2961\r\nStep 2962\r\nStep 2963\r\nStep 2964\r\nStep 2965\r\nStep 2966\r\nStep 2967\r\nStep 2968\r\n",,terminal_output +2561,2645500,"TERMINAL",0,0,"Step 2969\r\nStep 2970\r\nStep 2971\r\nStep 2972\r\n",,terminal_output +2562,2645561,"TERMINAL",0,0,"Step 2973\r\nStep 2974\r\nStep 2975\r\nStep 2976\r\n",,terminal_output +2563,2645626,"TERMINAL",0,0,"Step 2977\r\nStep 2978\r\nStep 2979\r\nStep 2980\r\n",,terminal_output +2564,2645689,"TERMINAL",0,0,"Step 2981\r\n",,terminal_output +2565,2645754,"TERMINAL",0,0,"50\r2937",,terminal_output +2566,2645780,"TERMINAL",0,0,"Step 2982\r\nStep 2983\r\nStep 2984\r\nStep 2985\r\n",,terminal_output +2567,2645981,"TERMINAL",0,0,"Step 2986\r\nStep 2987\r\nStep 2988\r\nStep 2989\r\nStep 2990\r\n",,terminal_output +2568,2646078,"TERMINAL",0,0,"Step 2991\r\nStep 2992\r\nStep 2993\r\nStep 2994\r\nStep 2995\r\nStep 2996\r\nStep 2997\r\nStep 2998\r\n",,terminal_output +2569,2646289,"TERMINAL",0,0,"Step 2999\r\nStep 3000\r\nStep 3001\r\nStep 3002\r\nStep 3003\r\nStep 3004\r\nStep 3005\r\nStep 3006\r\nStep 3007\r\nStep 3008\r\nStep 3009\r\n",,terminal_output +2570,2646449,"TERMINAL",0,0,"Step 3010\r\nStep 3011\r\nStep 3012\r\nStep 3013\r\nStep 3014\r\n",,terminal_output +2571,2646514,"TERMINAL",0,0,"Step 3015\r\n",,terminal_output +2572,2646636,"TERMINAL",0,0,"Step 3016\r\nStep 3017\r\nStep 3018\r\nStep 3019\r\nStep 3020\r\nStep 3021\r\nStep 3022\r\nStep 3023\r\nStep 3024\r\nStep 3025\r\n",,terminal_output +2573,2646764,"TERMINAL",0,0,"Step 3026\r\nStep 3027\r\nStep 3028\r\nStep 3029\r\nStep 3030\r\nStep 3031\r\n",,terminal_output +2574,2646899,"TERMINAL",0,0,"Step 3032\r\nStep 3033\r\nStep 3034\r\nStep 3035\r\n",,terminal_output +2575,2646959,"TERMINAL",0,0,"Step 3036\r\nStep 3037\r\n",,terminal_output +2576,2647022,"TERMINAL",0,0,"Step 3038\r\nStep 3039\r\nStep 3040\r\nStep 3041\r\n",,terminal_output +2577,2647085,"TERMINAL",0,0,"Step 3042\r\n",,terminal_output +2578,2647228,"TERMINAL",0,0,"Step 3043\r\nStep 3044\r\nStep 3045\r\nStep 3046\r\nStep 3047\r\nStep 3048\r\nStep 3049\r\n",,terminal_output +2579,2647311,"TERMINAL",0,0,"Step 3050\r\nStep 3051\r\nStep 3052\r\n",,terminal_output +2580,2647372,"TERMINAL",0,0,"Step 3053\r\nStep 3054\r\nStep 3055\r\n",,terminal_output +2581,2647452,"TERMINAL",0,0,"Step 3056\r\nStep 3057\r\nStep 3058\r\nStep 3059\r\nStep 3060\r\nStep 3061\r\n",,terminal_output +2582,2647513,"TERMINAL",0,0,"Step 3062\r\nStep 3063\r\n",,terminal_output +2583,2647620,"TERMINAL",0,0,"Step 3064\r\nStep 3065\r\nStep 3066\r\nStep 3067\r\nStep 3068\r\nStep 3069\r\nStep 3070\r\nStep 3071\r\nStep 3072\r\nStep 3073\r\nStep 3074\r\n",,terminal_output +2584,2647684,"TERMINAL",0,0,"Step 3075\r\n",,terminal_output +2585,2647753,"TERMINAL",0,0,"2\r3032",,terminal_output +2586,2647754,"TERMINAL",0,0,"Step 3076\r\nStep 3077\r\n",,terminal_output +2587,2647833,"TERMINAL",0,0,"Step 3078\r\nStep 3079\r\nStep 3080\r\nStep 3081\r\nStep 3082\r\nStep 3083\r\nStep 3084\r\n",,terminal_output +2588,2647911,"TERMINAL",0,0,"Step 3085\r\nStep 3086\r\nStep 3087\r\nStep 3088\r\nStep 3089\r\n",,terminal_output +2589,2648054,"TERMINAL",0,0,"Step 3090\r\nStep 3091\r\nStep 3092\r\nStep 3093\r\nStep 3094\r\n",,terminal_output +2590,2648114,"TERMINAL",0,0,"Step 3095\r\nStep 3096\r\nStep 3097\r\nStep 3098\r\n",,terminal_output +2591,2648253,"TERMINAL",0,0,"Step 3099\r\nStep 3100\r\nStep 3101\r\nStep 3102\r\nStep 3103\r\nStep 3104\r\nStep 3105\r\nStep 3106\r\nStep 3107\r\nStep 3108\r\n",,terminal_output +2592,2648314,"TERMINAL",0,0,"Step 3109\r\n",,terminal_output +2593,2648434,"TERMINAL",0,0,"Step 3110\r\nStep 3111\r\nStep 3112\r\nStep 3113\r\nStep 3114\r\n",,terminal_output +2594,2648582,"TERMINAL",0,0,"Step 3115\r\nStep 3116\r\nStep 3117\r\nStep 3118\r\nStep 3119\r\nStep 3120\r\n",,terminal_output +2595,2648669,"TERMINAL",0,0,"Step 3121\r\nStep 3122\r\nStep 3123\r\n",,terminal_output +2596,2648777,"TERMINAL",0,0,"Step 3124\r\nStep 3125\r\n",,terminal_output +2597,2648839,"TERMINAL",0,0,"Step 3126\r\nStep 3127\r\nStep 3128\r\nStep 3129\r\n",,terminal_output +2598,2648905,"TERMINAL",0,0,"Step 3130\r\nStep 3131\r\nStep 3132\r\n",,terminal_output +2599,2649044,"TERMINAL",0,0,"Step 3133\r\nStep 3134\r\nStep 3135\r\nStep 3136\r\nStep 3137\r\nStep 3138\r\nStep 3139\r\n",,terminal_output +2600,2649108,"TERMINAL",0,0,"Step 3140\r\nStep 3141\r\n",,terminal_output +2601,2649172,"TERMINAL",0,0,"Step 3142\r\n",,terminal_output +2602,2649312,"TERMINAL",0,0,"Step 3143\r\nStep 3144\r\nStep 3145\r\nStep 3146\r\nStep 3147\r\nStep 3148\r\nStep 3149\r\n",,terminal_output +2603,2649419,"TERMINAL",0,0,"Step 3150\r\nStep 3151\r\nStep 3152\r\nStep 3153\r\nStep 3154\r\n",,terminal_output +2604,2649472,"TERMINAL",0,0,"Step 3155\r\n",,terminal_output +2605,2649596,"TERMINAL",0,0,"Step 3156\r\nStep 3157\r\nStep 3158\r\nStep 3159\r\nStep 3160\r\nStep 3161\r\n",,terminal_output +2606,2649744,"TERMINAL",0,0,"Step 3162\r\nStep 3163\r\nStep 3164\r\nStep 3165\r\nStep 3166\r\nStep 3167\r\nStep 3168\r\n",,terminal_output +2607,2649850,"TERMINAL",0,0,"Step 3169\r\nStep 3170\r\nStep 3171\r\nStep 3172\r\n",,terminal_output +2608,2649912,"TERMINAL",0,0,"Step 3173\r\nStep 3174\r\n",,terminal_output +2609,2649978,"TERMINAL",0,0,"Step 3175\r\nStep 3176\r\nStep 3177\r\nStep 3178\r\nStep 3179\r\n",,terminal_output +2610,2650142,"TERMINAL",0,0,"Step 3180\r\nStep 3181\r\nStep 3182\r\nStep 3183\r\nStep 3184\r\nStep 3185\r\nStep 3186\r\nStep 3187\r\nStep 3188\r\nStep 3189\r\n",,terminal_output +2611,2650228,"TERMINAL",0,0,"Step 3190\r\nStep 3191\r\nStep 3192\r\nStep 3193\r\nStep 3194\r\nStep 3195\r\nStep 3196\r\nStep 3197\r\nStep 3198\r\nStep 3199\r\nStep 3200\r\nStep 3201\r\nStep 3202\r\nStep 3203\r\n",,terminal_output +2612,2650343,"TERMINAL",0,0,"Step 3204\r\nStep 3205\r\nStep 3206\r\nStep 3207\r\n",,terminal_output +2613,2650397,"TERMINAL",0,0,"Step 3208\r\nStep 3209\r\nStep 3210\r\nStep 3211\r\n",,terminal_output +2614,2650557,"TERMINAL",0,0,"4\r3126",,terminal_output +2615,2650643,"TERMINAL",0,0,"Step 3212\r\nStep 3213\r\nStep 3214\r\nStep 3215\r\nStep 3216\r\nStep 3217\r\n",,terminal_output +2616,2650709,"TERMINAL",0,0,"Step 3218\r\nStep 3219\r\nStep 3220\r\nStep 3221\r\nStep 3222\r\nStep 3223\r\n",,terminal_output +2617,2650796,"TERMINAL",0,0,"Step 3224\r\nStep 3225\r\n",,terminal_output +2618,2650912,"TERMINAL",0,0,"Step 3226\r\nStep 3227\r\nStep 3228\r\n",,terminal_output +2619,2650970,"TERMINAL",0,0,"Step 3229\r\n",,terminal_output +2620,2651257,"TERMINAL",0,0,"Step 3230\r\nStep 3231\r\nStep 3232\r\nStep 3233\r\nStep 3234\r\nStep 3235\r\nStep 3236\r\nStep 3237\r\nStep 3238\r\nStep 3239\r\nStep 3240\r\nStep 3241\r\nStep 3242\r\nStep 3243\r\nStep 3244\r\nStep 3245\r\nStep 3246\r\nStep 3247\r\nStep 3248\r\nStep 3249\r\n",,terminal_output +2621,2651423,"TERMINAL",0,0,"Step 3250\r\nStep 3251\r\nStep 3252\r\nStep 3253\r\nStep 3254\r\nStep 3255\r\nStep 3256\r\nStep 3257\r\nStep 3258\r\n",,terminal_output +2622,2651518,"TERMINAL",0,0,"Step 3259\r\nStep 3260\r\n",,terminal_output +2623,2651581,"TERMINAL",0,0,"Step 3261\r\n",,terminal_output +2624,2651704,"TERMINAL",0,0,"Step 3262\r\nStep 3263\r\nStep 3264\r\nStep 3265\r\nStep 3266\r\nStep 3267\r\nStep 3268\r\nStep 3269\r\n",,terminal_output +2625,2651840,"TERMINAL",0,0,"Step 3270\r\nStep 3271\r\nStep 3272\r\nStep 3273\r\nStep 3274\r\n",,terminal_output +2626,2651893,"TERMINAL",0,0,"Step 3275\r\nStep 3276\r\n",,terminal_output +2627,2652014,"TERMINAL",0,0,"Step 3277\r\nStep 3278\r\nStep 3279\r\nStep 3280\r\nStep 3281\r\n",,terminal_output +2628,2652124,"TERMINAL",0,0,"Step 3282\r\n",,terminal_output +2629,2652187,"TERMINAL",0,0,"Step 3283\r\n",,terminal_output +2630,2652353,"TERMINAL",0,0,"Step 3284\r\nStep 3285\r\nStep 3286\r\nStep 3287\r\nStep 3288\r\nStep 3289\r\nStep 3290\r\nStep 3291\r\nStep 3292\r\n",,terminal_output +2631,2652467,"TERMINAL",0,0,"Step 3293\r\nStep 3294\r\nStep 3295\r\nStep 3296\r\n",,terminal_output +2632,2652539,"TERMINAL",0,0,"Step 3297\r\nStep 3298\r\n",,terminal_output +2633,2652647,"TERMINAL",0,0,"Step 3299\r\nStep 3300\r\nStep 3301\r\nStep 3302\r\nStep 3303\r\n",,terminal_output +2634,2652765,"TERMINAL",0,0,"Step 3304\r\nStep 3305\r\nStep 3306\r\nStep 3307\r\n",,terminal_output +2635,2652889,"TERMINAL",0,0,"Step 3308\r\nStep 3309\r\nStep 3310\r\n",,terminal_output +2636,2652978,"TERMINAL",0,0,"Step 3311\r\nStep 3312\r\nStep 3313\r\n",,terminal_output +2637,2653040,"TERMINAL",0,0,"6\r3262",,terminal_output +2638,2653128,"TERMINAL",0,0,"Step 3314\r\nStep 3315\r\nStep 3316\r\nStep 3317\r\nStep 3318\r\nStep 3319\r\nStep 3320\r\nStep 3321\r\nStep 3322\r\nStep 3323\r\nStep 3324\r\nStep 3325\r\nStep 3326\r\nStep 3327\r\nStep 3328\r\nStep 3329\r\nStep 3330\r\nStep 3331\r\n",,terminal_output +2639,2653234,"TERMINAL",0,0,"Step 3332\r\nStep 3333\r\nStep 3334\r\nStep 3335\r\n",,terminal_output +2640,2653353,"TERMINAL",0,0,"Step 3336\r\nStep 3337\r\nStep 3338\r\nStep 3339\r\nStep 3340\r\nStep 3341\r\nStep 3342\r\n",,terminal_output +2641,2653455,"TERMINAL",0,0,"Step 3343\r\n",,terminal_output +2642,2653582,"TERMINAL",0,0,"Step 3344\r\nStep 3345\r\nStep 3346\r\nStep 3347\r\n",,terminal_output +2643,2653706,"TERMINAL",0,0,"Step 3348\r\nStep 3349\r\nStep 3350\r\nStep 3351\r\nStep 3352\r\nStep 3353\r\nStep 3354\r\nStep 3355\r\nStep 3356\r\nStep 3357\r\n",,terminal_output +2644,2653959,"TERMINAL",0,0,"Step 3358\r\nStep 3359\r\nStep 3360\r\nStep 3361\r\nStep 3362\r\nStep 3363\r\nStep 3364\r\nStep 3365\r\nStep 3366\r\nStep 3367\r\nStep 3368\r\nStep 3369\r\nStep 3370\r\nStep 3371\r\nStep 3372\r\nStep 3373\r\nStep 3374\r\nStep 3375\r\nStep 3376\r\nStep 3377\r\nStep 3378\r\nStep 3379\r\n",,terminal_output +2645,2654026,"TERMINAL",0,0,"Step 3380\r\nStep 3381\r\n",,terminal_output +2646,2654085,"TERMINAL",0,0,"Step 3382\r\n",,terminal_output +2647,2654149,"TERMINAL",0,0,"Step 3383\r\n",,terminal_output +2648,2654248,"TERMINAL",0,0,"Step 3384\r\nStep 3385\r\nStep 3386\r\nStep 3387\r\nStep 3388\r\nStep 3389\r\nStep 3390\r\nStep 3391\r\n",,terminal_output +2649,2654310,"TERMINAL",0,0,"Step 3392\r\n",,terminal_output +2650,2654437,"TERMINAL",0,0,"Step 3393\r\nStep 3394\r\nStep 3395\r\nStep 3396\r\nStep 3397\r\n",,terminal_output +2651,2654499,"TERMINAL",0,0,"Step 3398\r\nStep 3399\r\n",,terminal_output +2652,2654606,"TERMINAL",0,0,"Step 3400\r\nStep 3401\r\nStep 3402\r\n",,terminal_output +2653,2654798,"TERMINAL",0,0,"Step 3403\r\nStep 3404\r\nStep 3405\r\nStep 3406\r\nStep 3407\r\nStep 3408\r\n",,terminal_output +2654,2654928,"TERMINAL",0,0,"Step 3409\r\n",,terminal_output +2655,2654995,"TERMINAL",0,0,"Step 3410\r\nStep 3411\r\nStep 3412\r\nStep 3413\r\nStep 3414\r\nStep 3415\r\nStep 3416\r\n",,terminal_output +2656,2655102,"TERMINAL",0,0,"Step 3417\r\n",,terminal_output +2657,2655218,"TERMINAL",0,0,"Step 3418\r\nStep 3419\r\nStep 3420\r\nStep 3421\r\n",,terminal_output +2658,2655311,"TERMINAL",0,0,"Step 3422\r\nStep 3423\r\nStep 3424\r\nStep 3425\r\n",,terminal_output +2659,2655364,"TERMINAL",0,0,"Step 3426\r\n",,terminal_output +2660,2655531,"TERMINAL",0,0,"Step 3427\r\nStep 3428\r\nStep 3429\r\nStep 3430\r\nStep 3431\r\nStep 3432\r\nStep 3433\r\n",,terminal_output +2661,2655668,"TERMINAL",0,0,"Step 3434\r\nStep 3435\r\nStep 3436\r\nStep 3437\r\n",,terminal_output +2662,2655731,"TERMINAL",0,0,"Step 3438\r\nStep 3439\r\nStep 3440\r\nStep 3441\r\nStep 3442\r\nStep 3443\r\n",,terminal_output +2663,2655795,"TERMINAL",0,0,"Step 3444\r\nStep 3445\r\nStep 3446\r\n",,terminal_output +2664,2655808,"TERMINAL",0,0,"9\r3383",,terminal_output +2665,2655863,"TERMINAL",0,0,"Step 3447\r\nStep 3448\r\nStep 3449\r\n",,terminal_output +2666,2655950,"TERMINAL",0,0,"Step 3450\r\nStep 3451\r\nStep 3452\r\nStep 3453\r\nStep 3454\r\n",,terminal_output +2667,2656097,"TERMINAL",0,0,"Step 3455\r\nStep 3456\r\nStep 3457\r\nStep 3458\r\nStep 3459\r\nStep 3460\r\nStep 3461\r\n",,terminal_output +2668,2656221,"TERMINAL",0,0,"Step 3462\r\nStep 3463\r\nStep 3464\r\nStep 3465\r\nStep 3466\r\n",,terminal_output +2669,2656302,"TERMINAL",0,0,"Step 3467\r\nStep 3468\r\nStep 3469\r\nStep 3470\r\n",,terminal_output +2670,2656413,"TERMINAL",0,0,"Step 3471\r\nStep 3472\r\nStep 3473\r\nStep 3474\r\n",,terminal_output +2671,2656528,"TERMINAL",0,0,"Step 3475\r\nStep 3476\r\nStep 3477\r\n",,terminal_output +2672,2656587,"TERMINAL",0,0,"Step 3478\r\nStep 3479\r\n",,terminal_output +2673,2656652,"TERMINAL",0,0,"Step 3480\r\n",,terminal_output +2674,2656782,"TERMINAL",0,0,"Step 3481\r\nStep 3482\r\nStep 3483\r\nStep 3484\r\nStep 3485\r\nStep 3486\r\nStep 3487\r\nStep 3488\r\n",,terminal_output +2675,2656906,"TERMINAL",0,0,"Step 3489\r\nStep 3490\r\nStep 3491\r\nStep 3492\r\nStep 3493\r\nStep 3494\r\n",,terminal_output +2676,2657029,"TERMINAL",0,0,"Step 3495\r\nStep 3496\r\nStep 3497\r\n",,terminal_output +2677,2657128,"TERMINAL",0,0,"Step 3498\r\nStep 3499\r\nStep 3500\r\nStep 3501\r\nStep 3502\r\n",,terminal_output +2678,2657182,"TERMINAL",0,0,"Step 3503\r\nStep 3504\r\n",,terminal_output +2679,2657288,"TERMINAL",0,0,"Step 3505\r\nStep 3506\r\nStep 3507\r\nStep 3508\r\nStep 3509\r\nStep 3510\r\nStep 3511\r\n",,terminal_output +2680,2657349,"TERMINAL",0,0,"Step 3512\r\n",,terminal_output +2681,2657511,"TERMINAL",0,0,"Step 3513\r\nStep 3514\r\nStep 3515\r\nStep 3516\r\nStep 3517\r\nStep 3518\r\nStep 3519\r\nStep 3520\r\nStep 3521\r\nStep 3522\r\n",,terminal_output +2682,2657695,"TERMINAL",0,0,"Step 3523\r\nStep 3524\r\nStep 3525\r\nStep 3526\r\n",,terminal_output +2683,2657760,"TERMINAL",0,0,"Step 3527\r\nStep 3528\r\nStep 3529\r\nStep 3530\r\nStep 3531\r\nStep 3532\r\nStep 3533\r\n",,terminal_output +2684,2657823,"TERMINAL",0,0,"Step 3534\r\nStep 3535\r\nStep 3536\r\nStep 3537\r\n",,terminal_output +2685,2657886,"TERMINAL",0,0,"Step 3538\r\n",,terminal_output +2686,2657964,"TERMINAL",0,0,"Step 3539\r\nStep 3540\r\nStep 3541\r\n",,terminal_output +2687,2658029,"TERMINAL",0,0,"Step 3542\r\nStep 3543\r\n",,terminal_output +2688,2658127,"TERMINAL",0,0,"Step 3544\r\nStep 3545\r\nStep 3546\r\nStep 3547\r\nStep 3548\r\nStep 3549\r\nStep 3550\r\nStep 3551\r\nStep 3552\r\nStep 3553\r\n",,terminal_output +2689,2658261,"TERMINAL",0,0,"Step 3554\r\nStep 3555\r\nStep 3556\r\nStep 3557\r\nStep 3558\r\nStep 3559\r\nStep 3560\r\nStep 3561\r\n",,terminal_output +2690,2658320,"TERMINAL",0,0,"Step 3562\r\n",,terminal_output +2691,2658384,"TERMINAL",0,0,"Step 3563\r\nStep 3564\r\nStep 3565\r\n",,terminal_output +2692,2658475,"TERMINAL",0,0,"Step 3566\r\nStep 3567\r\nStep 3568\r\nStep 3569\r\nStep 3570\r\nStep 3571\r\nStep 3572\r\n",,terminal_output +2693,2658539,"TERMINAL",0,0,"Step 3573\r\nStep 3574\r\nStep 3575\r\n",,terminal_output +2694,2658604,"TERMINAL",0,0,"Step 3576\r\nStep 3577\r\nStep 3578\r\n",,terminal_output +2695,2658736,"TERMINAL",0,0,"Step 3579\r\nStep 3580\r\nStep 3581\r\nStep 3582\r\nStep 3583\r\nStep 3584\r\nStep 3585\r\nStep 3586\r\nStep 3587\r\n",,terminal_output +2696,2658789,"TERMINAL",0,0,"Step 3588\r\n",,terminal_output +2697,2658814,"TERMINAL",0,0,"10:02\r3489",,terminal_output +2698,2658880,"TERMINAL",0,0,"Step 3589\r\nStep 3590\r\nStep 3591\r\n",,terminal_output +2699,2658945,"TERMINAL",0,0,"Step 3592\r\nStep 3593\r\n",,terminal_output +2700,2659055,"TERMINAL",0,0,"Step 3594\r\nStep 3595\r\nStep 3596\r\nStep 3597\r\nStep 3598\r\nStep 3599\r\nStep 3600\r\nStep 3601\r\n",,terminal_output +2701,2659121,"TERMINAL",0,0,"Step 3602\r\n",,terminal_output +2702,2659275,"TERMINAL",0,0,"Step 3603\r\nStep 3604\r\nStep 3605\r\nStep 3606\r\nStep 3607\r\nStep 3608\r\nStep 3609\r\nStep 3610\r\nStep 3611\r\nStep 3612\r\n",,terminal_output +2703,2659399,"TERMINAL",0,0,"Step 3613\r\nStep 3614\r\nStep 3615\r\nStep 3616\r\n",,terminal_output +2704,2659540,"TERMINAL",0,0,"Step 3617\r\nStep 3618\r\nStep 3619\r\nStep 3620\r\nStep 3621\r\nStep 3622\r\nStep 3623\r\nStep 3624\r\nStep 3625\r\nStep 3626\r\nStep 3627\r\nStep 3628\r\nStep 3629\r\nStep 3630\r\nStep 3631\r\nStep 3632\r\nStep 3633\r\nStep 3634\r\nStep 3635\r\n",,terminal_output +2705,2659604,"TERMINAL",0,0,"Step 3636\r\nStep 3637\r\nStep 3638\r\nStep 3639\r\nStep 3640\r\nStep 3641\r\n",,terminal_output +2706,2659667,"TERMINAL",0,0,"Step 3642\r\nStep 3643\r\nStep 3644\r\nStep 3645\r\n",,terminal_output +2707,2659779,"TERMINAL",0,0,"Step 3646\r\nStep 3647\r\n",,terminal_output +2708,2659890,"TERMINAL",0,0,"Step 3648\r\nStep 3649\r\nStep 3650\r\n",,terminal_output +2709,2660020,"TERMINAL",0,0,"Step 3651\r\nStep 3652\r\nStep 3653\r\nStep 3654\r\nStep 3655\r\nStep 3656\r\n",,terminal_output +2710,2660138,"TERMINAL",0,0,"Step 3657\r\n",,terminal_output +2711,2660347,"TERMINAL",0,0,"Step 3658\r\nStep 3659\r\nStep 3660\r\nStep 3661\r\nStep 3662\r\nStep 3663\r\nStep 3664\r\nStep 3665\r\nStep 3666\r\nStep 3667\r\nStep 3668\r\nStep 3669\r\n",,terminal_output +2712,2660401,"TERMINAL",0,0,"Step 3670\r\nStep 3671\r\nStep 3672\r\n",,terminal_output +2713,2660533,"TERMINAL",0,0,"Step 3673\r\nStep 3674\r\n",,terminal_output +2714,2660639,"TERMINAL",0,0,"Step 3675\r\nStep 3676\r\nStep 3677\r\nStep 3678\r\n",,terminal_output +2715,2660703,"TERMINAL",0,0,"Step 3679\r\n",,terminal_output +2716,2660800,"TERMINAL",0,0,"Step 3680\r\nStep 3681\r\nStep 3682\r\nStep 3683\r\nStep 3684\r\nStep 3685\r\n",,terminal_output +2717,2660960,"TERMINAL",0,0,"Step 3686\r\nStep 3687\r\nStep 3688\r\nStep 3689\r\n",,terminal_output +2718,2661026,"TERMINAL",0,0,"Step 3690\r\nStep 3691\r\n",,terminal_output +2719,2661136,"TERMINAL",0,0,"Step 3692\r\n",,terminal_output +2720,2661364,"TERMINAL",0,0,"Step 3693\r\nStep 3694\r\nStep 3695\r\nStep 3696\r\nStep 3697\r\nStep 3698\r\nStep 3699\r\nStep 3700\r\nStep 3701\r\nStep 3702\r\nStep 3703\r\nStep 3704\r\nStep 3705\r\nStep 3706\r\nStep 3707\r\nStep 3708\r\nStep 3709\r\nStep 3710\r\nStep 3711\r\nStep 3712\r\nStep 3713\r\nStep 3714\r\nStep 3715\r\nStep 3716\r\nStep 3717\r\nStep 3718\r\nStep 3719\r\nStep 3720\r\n",,terminal_output +2721,2661475,"TERMINAL",0,0,"Step 3721\r\n",,terminal_output +2722,2661559,"TERMINAL",0,0,"Step 3722\r\nStep 3723\r\nStep 3724\r\n",,terminal_output +2723,2661623,"TERMINAL",0,0,"Step 3725\r\nStep 3726\r\nStep 3727\r\nStep 3728\r\n",,terminal_output +2724,2661635,"TERMINAL",0,0,"5\r3648",,terminal_output +2725,2661742,"TERMINAL",0,0,"Step 3729\r\n",,terminal_output +2726,2661810,"TERMINAL",0,0,"Step 3730\r\nStep 3731\r\nStep 3732\r\nStep 3733\r\n",,terminal_output +2727,2661866,"TERMINAL",0,0,"Step 3734\r\nStep 3735\r\nStep 3736\r\n",,terminal_output +2728,2662109,"TERMINAL",0,0,"Step 3737\r\nStep 3738\r\nStep 3739\r\nStep 3740\r\nStep 3741\r\nStep 3742\r\nStep 3743\r\nStep 3744\r\nStep 3745\r\nStep 3746\r\n",,terminal_output +2729,2662229,"TERMINAL",0,0,"Step 3747\r\nStep 3748\r\n",,terminal_output +2730,2662341,"TERMINAL",0,0,"Step 3749\r\nStep 3750\r\nStep 3751\r\nStep 3752\r\nStep 3753\r\n",,terminal_output +2731,2662529,"TERMINAL",0,0,"Step 3754\r\nStep 3755\r\nStep 3756\r\nStep 3757\r\nStep 3758\r\nStep 3759\r\nStep 3760\r\nStep 3761\r\nStep 3762\r\n",,terminal_output +2732,2662657,"TERMINAL",0,0,"Step 3763\r\nStep 3764\r\nStep 3765\r\n",,terminal_output +2733,2662775,"TERMINAL",0,0,"Step 3766\r\nStep 3767\r\nStep 3768\r\nStep 3769\r\nStep 3770\r\nStep 3771\r\nStep 3772\r\n",,terminal_output +2734,2662839,"TERMINAL",0,0,"Step 3773\r\nStep 3774\r\nStep 3775\r\n",,terminal_output +2735,2662952,"TERMINAL",0,0,"Step 3776\r\nStep 3777\r\nStep 3778\r\n",,terminal_output +2736,2663092,"TERMINAL",0,0,"Step 3779\r\nStep 3780\r\nStep 3781\r\nStep 3782\r\nStep 3783\r\nStep 3784\r\nStep 3785\r\nStep 3786\r\nStep 3787\r\nStep 3788\r\nStep 3789\r\nStep 3790\r\n",,terminal_output +2737,2663155,"TERMINAL",0,0,"Step 3791\r\nStep 3792\r\n",,terminal_output +2738,2663289,"TERMINAL",0,0,"Step 3793\r\nStep 3794\r\nStep 3795\r\nStep 3796\r\nStep 3797\r\nStep 3798\r\nStep 3799\r\n",,terminal_output +2739,2663350,"TERMINAL",0,0,"Step 3800\r\nStep 3801\r\nStep 3802\r\n",,terminal_output +2740,2663479,"TERMINAL",0,0,"Step 3803\r\nStep 3804\r\nStep 3805\r\nStep 3806\r\nStep 3807\r\nStep 3808\r\nStep 3809\r\nStep 3810\r\nStep 3811\r\nStep 3812\r\nStep 3813\r\nStep 3814\r\nStep 3815\r\nStep 3816\r\nStep 3817\r\nStep 3818\r\nStep 3819\r\nStep 3820\r\n",,terminal_output +2741,2663596,"TERMINAL",0,0,"Step 3821\r\nStep 3822\r\nStep 3823\r\n",,terminal_output +2742,2663845,"TERMINAL",0,0,"Step 3824\r\nStep 3825\r\nStep 3826\r\nStep 3827\r\nStep 3828\r\nStep 3829\r\nStep 3830\r\nStep 3831\r\nStep 3832\r\nStep 3833\r\nStep 3834\r\nStep 3835\r\nStep 3836\r\nStep 3837\r\nStep 3838\r\nStep 3839\r\nStep 3840\r\nStep 3841\r\nStep 3842\r\nStep 3843\r\nStep 3844\r\nStep 3845\r\nStep 3846\r\nStep 3847\r\nStep 3848\r\nStep 3849\r\n",,terminal_output +2743,2663919,"TERMINAL",0,0,"Step 3850\r\n",,terminal_output +2744,2664004,"TERMINAL",0,0,"Step 3851\r\nStep 3852\r\nStep 3853\r\n",,terminal_output +2745,2664065,"TERMINAL",0,0,"Step 3854\r\n",,terminal_output +2746,2664156,"TERMINAL",0,0,"Step 3855\r\nStep 3856\r\n",,terminal_output +2747,2664348,"TERMINAL",0,0,"Step 3857\r\nStep 3858\r\nStep 3859\r\nStep 3860\r\nStep 3861\r\nStep 3862\r\nStep 3863\r\n",,terminal_output +2748,2664490,"TERMINAL",0,0,"Step 3864\r\nStep 3865\r\n",,terminal_output +2749,2664589,"TERMINAL",0,0,"Step 3866\r\nStep 3867\r\nStep 3868\r\nStep 3869\r\n",,terminal_output +2750,2664788,"TERMINAL",0,0,"8\r3766",,terminal_output +2751,2664809,"TERMINAL",0,0,"Step 3870\r\nStep 3871\r\nStep 3872\r\nStep 3873\r\n",,terminal_output +2752,2664931,"TERMINAL",0,0,"Step 3874\r\n",,terminal_output +2753,2665050,"TERMINAL",0,0,"Step 3875\r\nStep 3876\r\nStep 3877\r\nStep 3878\r\nStep 3879\r\n",,terminal_output +2754,2665110,"TERMINAL",0,0,"Step 3880\r\nStep 3881\r\nStep 3882\r\nStep 3883\r\nStep 3884\r\nStep 3885\r\n",,terminal_output +2755,2665248,"TERMINAL",0,0,"Step 3886\r\nStep 3887\r\nStep 3888\r\nStep 3889\r\nStep 3890\r\nStep 3891\r\n",,terminal_output +2756,2665356,"TERMINAL",0,0,"Step 3892\r\nStep 3893\r\nStep 3894\r\nStep 3895\r\n",,terminal_output +2757,2665413,"TERMINAL",0,0,"Step 3896\r\nStep 3897\r\n",,terminal_output +2758,2665469,"TERMINAL",0,0,"Step 3898\r\nStep 3899\r\n",,terminal_output +2759,2665529,"TERMINAL",0,0,"Step 3900\r\n",,terminal_output +2760,2665591,"TERMINAL",0,0,"Step 3901\r\nStep 3902\r\n",,terminal_output +2761,2665660,"TERMINAL",0,0,"Step 3903\r\nStep 3904\r\nStep 3905\r\nStep 3906\r\nStep 3907\r\nStep 3908\r\n",,terminal_output +2762,2665719,"TERMINAL",0,0,"Step 3909\r\n",,terminal_output +2763,2665771,"TERMINAL",0,0,"Step 3910\r\n",,terminal_output +2764,2665824,"TERMINAL",0,0,"Step 3911\r\n",,terminal_output +2765,2665940,"TERMINAL",0,0,"Step 3912\r\nStep 3913\r\nStep 3914\r\nStep 3915\r\nStep 3916\r\nStep 3917\r\nStep 3918\r\nStep 3919\r\nStep 3920\r\n",,terminal_output +2766,2666008,"TERMINAL",0,0,"Step 3921\r\nStep 3922\r\nStep 3923\r\n",,terminal_output +2767,2666118,"TERMINAL",0,0,"Step 3924\r\n",,terminal_output +2768,2666208,"TERMINAL",0,0,"Step 3925\r\nStep 3926\r\nStep 3927\r\nStep 3928\r\nStep 3929\r\n",,terminal_output +2769,2666336,"TERMINAL",0,0,"Step 3930\r\nStep 3931\r\nStep 3932\r\nStep 3933\r\nStep 3934\r\nStep 3935\r\nStep 3936\r\n",,terminal_output +2770,2666432,"TERMINAL",0,0,"Step 3937\r\nStep 3938\r\nStep 3939\r\n",,terminal_output +2771,2666529,"TERMINAL",0,0,"Step 3940\r\nStep 3941\r\nStep 3942\r\nStep 3943\r\nStep 3944\r\nStep 3945\r\n",,terminal_output +2772,2666670,"TERMINAL",0,0,"Step 3946\r\nStep 3947\r\nStep 3948\r\nStep 3949\r\nStep 3950\r\n",,terminal_output +2773,2666732,"TERMINAL",0,0,"Step 3951\r\nStep 3952\r\nStep 3953\r\nStep 3954\r\n",,terminal_output +2774,2666824,"TERMINAL",0,0,"Step 3955\r\nStep 3956\r\nStep 3957\r\nStep 3958\r\nStep 3959\r\n",,terminal_output +2775,2666910,"TERMINAL",0,0,"Step 3960\r\nStep 3961\r\nStep 3962\r\nStep 3963\r\nStep 3964\r\n",,terminal_output +2776,2667001,"TERMINAL",0,0,"Step 3965\r\nStep 3966\r\nStep 3967\r\nStep 3968\r\n",,terminal_output +2777,2667145,"TERMINAL",0,0,"Step 3969\r\nStep 3970\r\nStep 3971\r\nStep 3972\r\nStep 3973\r\nStep 3974\r\nStep 3975\r\n",,terminal_output +2778,2667242,"TERMINAL",0,0,"11\r3912",,terminal_output +2779,2667316,"TERMINAL",0,0,"Step 3976\r\nStep 3977\r\nStep 3978\r\nStep 3979\r\nStep 3980\r\nStep 3981\r\nStep 3982\r\nStep 3983\r\nStep 3984\r\n",,terminal_output +2780,2667376,"TERMINAL",0,0,"Step 3985\r\nStep 3986\r\n",,terminal_output +2781,2667504,"TERMINAL",0,0,"Step 3987\r\nStep 3988\r\nStep 3989\r\nStep 3990\r\nStep 3991\r\nStep 3992\r\nStep 3993\r\nStep 3994\r\nStep 3995\r\nStep 3996\r\n",,terminal_output +2782,2667636,"TERMINAL",0,0,"Step 3997\r\nStep 3998\r\nStep 3999\r\nStep 4000\r\nStep 4001\r\n",,terminal_output +2783,2667702,"TERMINAL",0,0,"Step 4002\r\nStep 4003\r\nStep 4004\r\nStep 4005\r\nStep 4006\r\nStep 4007\r\nStep 4008\r\n",,terminal_output +2784,2667914,"TERMINAL",0,0,"Step 4009\r\nStep 4010\r\nStep 4011\r\nStep 4012\r\nStep 4013\r\nStep 4014\r\nStep 4015\r\nStep 4016\r\nStep 4017\r\nStep 4018\r\nStep 4019\r\nStep 4020\r\n",,terminal_output +2785,2668019,"TERMINAL",0,0,"Step 4021\r\nStep 4022\r\nStep 4023\r\nStep 4024\r\nStep 4025\r\nStep 4026\r\nStep 4027\r\n",,terminal_output +2786,2668132,"TERMINAL",0,0,"Step 4028\r\nStep 4029\r\nStep 4030\r\n",,terminal_output +2787,2668204,"TERMINAL",0,0,"Step 4031\r\nStep 4032\r\nStep 4033\r\nStep 4034\r\n",,terminal_output +2788,2668328,"TERMINAL",0,0,"Step 4035\r\nStep 4036\r\nStep 4037\r\nStep 4038\r\nStep 4039\r\nStep 4040\r\nStep 4041\r\nStep 4042\r\nStep 4043\r\n",,terminal_output +2789,2668583,"TERMINAL",0,0,"Step 4044\r\nStep 4045\r\nStep 4046\r\nStep 4047\r\nStep 4048\r\nStep 4049\r\nStep 4050\r\nStep 4051\r\nStep 4052\r\nStep 4053\r\nStep 4054\r\nStep 4055\r\nStep 4056\r\nStep 4057\r\nStep 4058\r\nStep 4059\r\nStep 4060\r\nStep 4061\r\nStep 4062\r\nStep 4063\r\nStep 4064\r\nStep 4065\r\nStep 4066\r\nStep 4067\r\n",,terminal_output +2790,2668651,"TERMINAL",0,0,"Step 4068\r\nStep 4069\r\nStep 4070\r\nStep 4071\r\nStep 4072\r\n",,terminal_output +2791,2668708,"TERMINAL",0,0,"Step 4073\r\nStep 4074\r\n",,terminal_output +2792,2668856,"TERMINAL",0,0,"Step 4075\r\nStep 4076\r\nStep 4077\r\nStep 4078\r\nStep 4079\r\n",,terminal_output +2793,2669026,"TERMINAL",0,0,"Step 4080\r\nStep 4081\r\nStep 4082\r\nStep 4083\r\nStep 4084\r\nStep 4085\r\nStep 4086\r\nStep 4087\r\nStep 4088\r\n",,terminal_output +2794,2669135,"TERMINAL",0,0,"Step 4089\r\nStep 4090\r\nStep 4091\r\nStep 4092\r\n",,terminal_output +2795,2669197,"TERMINAL",0,0,"3\r4035",,terminal_output +2796,2669264,"TERMINAL",0,0,"Step 4093\r\nStep 4094\r\nStep 4095\r\n",,terminal_output +2797,2669458,"TERMINAL",0,0,"Step 4096\r\nStep 4097\r\nStep 4098\r\nStep 4099\r\n",,terminal_output +2798,2669514,"TERMINAL",0,0,"Step 4100\r\n",,terminal_output +2799,2669615,"TERMINAL",0,0,"Step 4101\r\nStep 4102\r\nStep 4103\r\nStep 4104\r\nStep 4105\r\n",,terminal_output +2800,2669680,"TERMINAL",0,0,"Step 4106\r\nStep 4107\r\nStep 4108\r\nStep 4109\r\n",,terminal_output +2801,2669798,"TERMINAL",0,0,"Step 4110\r\nStep 4111\r\nStep 4112\r\nStep 4113\r\n",,terminal_output +2802,2669863,"TERMINAL",0,0,"Step 4114\r\nStep 4115\r\nStep 4116\r\nStep 4117\r\nStep 4118\r\n",,terminal_output +2803,2669933,"TERMINAL",0,0,"Step 4119\r\nStep 4120\r\nStep 4121\r\n",,terminal_output +2804,2669986,"TERMINAL",0,0,"Step 4122\r\nStep 4123\r\n",,terminal_output +2805,2670100,"TERMINAL",0,0,"Step 4124\r\nStep 4125\r\nStep 4126\r\nStep 4127\r\nStep 4128\r\nStep 4129\r\nStep 4130\r\nStep 4131\r\n",,terminal_output +2806,2670188,"TERMINAL",0,0,"Step 4132\r\nStep 4133\r\nStep 4134\r\nStep 4135\r\nStep 4136\r\n",,terminal_output +2807,2670277,"TERMINAL",0,0,"Step 4137\r\nStep 4138\r\nStep 4139\r\nStep 4140\r\n",,terminal_output +2808,2670370,"TERMINAL",0,0,"Step 4141\r\nStep 4142\r\nStep 4143\r\nStep 4144\r\nStep 4145\r\nStep 4146\r\n",,terminal_output +2809,2670446,"TERMINAL",0,0,"Step 4147\r\nStep 4148\r\nStep 4149\r\nStep 4150\r\n",,terminal_output +2810,2670512,"TERMINAL",0,0,"Step 4151\r\nStep 4152\r\n",,terminal_output +2811,2670583,"TERMINAL",0,0,"Step 4153\r\nStep 4154\r\nStep 4155\r\nStep 4156\r\n",,terminal_output +2812,2670642,"TERMINAL",0,0,"Step 4157\r\nStep 4158\r\nStep 4159\r\n",,terminal_output +2813,2670755,"TERMINAL",0,0,"Step 4160\r\nStep 4161\r\nStep 4162\r\n",,terminal_output +2814,2670830,"TERMINAL",0,0,"Step 4163\r\nStep 4164\r\nStep 4165\r\nStep 4166\r\n",,terminal_output +2815,2671049,"TERMINAL",0,0,"5\r4137",,terminal_output +2816,2671095,"TERMINAL",0,0,"Step 4167\r\nStep 4168\r\nStep 4169\r\nStep 4170\r\nStep 4171\r\nStep 4172\r\nStep 4173\r\nStep 4174\r\nStep 4175\r\nStep 4176\r\nStep 4177\r\nStep 4178\r\nStep 4179\r\nStep 4180\r\nStep 4181\r\nStep 4182\r\nStep 4183\r\nStep 4184\r\n",,terminal_output +2817,2671214,"TERMINAL",0,0,"Step 4185\r\nStep 4186\r\nStep 4187\r\nStep 4188\r\nStep 4189\r\n",,terminal_output +2818,2671433,"TERMINAL",0,0,"Step 4190\r\nStep 4191\r\nStep 4192\r\nStep 4193\r\nStep 4194\r\nStep 4195\r\nStep 4196\r\nStep 4197\r\nStep 4198\r\nStep 4199\r\nStep 4200\r\nStep 4201\r\nStep 4202\r\nStep 4203\r\nStep 4204\r\nStep 4205\r\nStep 4206\r\n",,terminal_output +2819,2671538,"TERMINAL",0,0,"Step 4207\r\nStep 4208\r\nStep 4209\r\nStep 4210\r\nStep 4211\r\nStep 4212\r\nStep 4213\r\nStep 4214\r\n",,terminal_output +2820,2671664,"TERMINAL",0,0,"Step 4215\r\nStep 4216\r\nStep 4217\r\nStep 4218\r\nStep 4219\r\nStep 4220\r\nStep 4221\r\nStep 4222\r\nStep 4223\r\nStep 4224\r\nStep 4225\r\nStep 4226\r\nStep 4227\r\n",,terminal_output +2821,2671784,"TERMINAL",0,0,"Step 4228\r\nStep 4229\r\nStep 4230\r\nStep 4231\r\n",,terminal_output +2822,2672072,"TERMINAL",0,0,"Step 4232\r\nStep 4233\r\nStep 4234\r\nStep 4235\r\nStep 4236\r\nStep 4237\r\nStep 4238\r\nStep 4239\r\nStep 4240\r\nStep 4241\r\nStep 4242\r\nStep 4243\r\nStep 4244\r\nStep 4245\r\nStep 4246\r\nStep 4247\r\nStep 4248\r\nStep 4249\r\nStep 4250\r\nStep 4251\r\nStep 4252\r\n",,terminal_output +2823,2672199,"TERMINAL",0,0,"Step 4253\r\nStep 4254\r\nStep 4255\r\nStep 4256\r\nStep 4257\r\nStep 4258\r\nStep 4259\r\nStep 4260\r\n",,terminal_output +2824,2672393,"TERMINAL",0,0,"Step 4261\r\nStep 4262\r\nStep 4263\r\n",,terminal_output +2825,2672455,"TERMINAL",0,0,"Step 4264\r\nStep 4265\r\n",,terminal_output +2826,2672606,"TERMINAL",0,0,"Step 4266\r\nStep 4267\r\n",,terminal_output +2827,2672660,"TERMINAL",0,0,"Step 4268\r\nStep 4269\r\n",,terminal_output +2828,2672778,"TERMINAL",0,0,"Step 4270\r\nStep 4271\r\nStep 4272\r\nStep 4273\r\nStep 4274\r\nStep 4275\r\nStep 4276\r\n",,terminal_output +2829,2672848,"TERMINAL",0,0,"Step 4277\r\nStep 4278\r\nStep 4279\r\nStep 4280\r\nStep 4281\r\nStep 4282\r\n",,terminal_output +2830,2672866,"TERMINAL",0,0,"7\r4253",,terminal_output +2831,2672995,"TERMINAL",0,0,"Step 4283\r\nStep 4284\r\nStep 4285\r\nStep 4286\r\nStep 4287\r\nStep 4288\r\n",,terminal_output +2832,2673092,"TERMINAL",0,0,"Step 4289\r\n",,terminal_output +2833,2673165,"TERMINAL",0,0,"Step 4290\r\nStep 4291\r\nStep 4292\r\n",,terminal_output +2834,2673368,"TERMINAL",0,0,"Step 4293\r\nStep 4294\r\nStep 4295\r\nStep 4296\r\nStep 4297\r\nStep 4298\r\nStep 4299\r\nStep 4300\r\nStep 4301\r\nStep 4302\r\nStep 4303\r\nStep 4304\r\nStep 4305\r\nStep 4306\r\nStep 4307\r\nStep 4308\r\nStep 4309\r\nStep 4310\r\n",,terminal_output +2835,2673423,"TERMINAL",0,0,"Step 4311\r\nStep 4312\r\nStep 4313\r\n",,terminal_output +2836,2673488,"TERMINAL",0,0,"Step 4314\r\n",,terminal_output +2837,2673551,"TERMINAL",0,0,"Step 4315\r\nStep 4316\r\n",,terminal_output +2838,2673697,"TERMINAL",0,0,"Step 4317\r\nStep 4318\r\nStep 4319\r\nStep 4320\r\nStep 4321\r\nStep 4322\r\nStep 4323\r\n",,terminal_output +2839,2673816,"TERMINAL",0,0,"Step 4324\r\n",,terminal_output +2840,2673904,"TERMINAL",0,0,"Step 4325\r\nStep 4326\r\nStep 4327\r\nStep 4328\r\nStep 4329\r\n",,terminal_output +2841,2674012,"TERMINAL",0,0,"Step 4330\r\nStep 4331\r\nStep 4332\r\nStep 4333\r\nStep 4334\r\nStep 4335\r\nStep 4336\r\nStep 4337\r\nStep 4338\r\n",,terminal_output +2842,2674149,"TERMINAL",0,0,"Step 4339\r\nStep 4340\r\nStep 4341\r\nStep 4342\r\nStep 4343\r\nStep 4344\r\nStep 4345\r\nStep 4346\r\n",,terminal_output +2843,2674208,"TERMINAL",0,0,"Step 4347\r\nStep 4348\r\n",,terminal_output +2844,2674302,"TERMINAL",0,0,"Step 4349\r\nStep 4350\r\nStep 4351\r\nStep 4352\r\n",,terminal_output +2845,2674355,"TERMINAL",0,0,"Step 4353\r\n",,terminal_output +2846,2674409,"TERMINAL",0,0,"Step 4354\r\nStep 4355\r\n",,terminal_output +2847,2674532,"TERMINAL",0,0,"Step 4356\r\nStep 4357\r\nStep 4358\r\n",,terminal_output +2848,2674639,"TERMINAL",0,0,"Step 4359\r\nStep 4360\r\nStep 4361\r\nStep 4362\r\nStep 4363\r\n",,terminal_output +2849,2674723,"TERMINAL",0,0,"Step 4364\r\nStep 4365\r\nStep 4366\r\nStep 4367\r\n",,terminal_output +2850,2674789,"TERMINAL",0,0,"Step 4368\r\nStep 4369\r\nStep 4370\r\nStep 4371\r\n",,terminal_output +2851,2674924,"TERMINAL",0,0,"Step 4372\r\nStep 4373\r\nStep 4374\r\nStep 4375\r\nStep 4376\r\nStep 4377\r\nStep 4378\r\n",,terminal_output +2852,2675056,"TERMINAL",0,0,"9\r4330",,terminal_output +2853,2675122,"TERMINAL",0,0,"Step 4379\r\nStep 4380\r\nStep 4381\r\nStep 4382\r\nStep 4383\r\nStep 4384\r\nStep 4385\r\nStep 4386\r\nStep 4387\r\nStep 4388\r\n",,terminal_output +2854,2675184,"TERMINAL",0,0,"Step 4389\r\n",,terminal_output +2855,2675267,"TERMINAL",0,0,"Step 4390\r\nStep 4391\r\nStep 4392\r\nStep 4393\r\n",,terminal_output +2856,2675345,"TERMINAL",0,0,"Step 4394\r\nStep 4395\r\n",,terminal_output +2857,2675413,"TERMINAL",0,0,"Step 4396\r\nStep 4397\r\nStep 4398\r\nStep 4399\r\nStep 4400\r\n",,terminal_output +2858,2675491,"TERMINAL",0,0,"Step 4401\r\nStep 4402\r\nStep 4403\r\nStep 4404\r\nStep 4405\r\nStep 4406\r\n",,terminal_output +2859,2675604,"TERMINAL",0,0,"Step 4407\r\nStep 4408\r\nStep 4409\r\nStep 4410\r\nStep 4411\r\nStep 4412\r\n",,terminal_output +2860,2675672,"TERMINAL",0,0,"Step 4413\r\nStep 4414\r\nStep 4415\r\nStep 4416\r\n",,terminal_output +2861,2675800,"TERMINAL",0,0,"Step 4417\r\nStep 4418\r\nStep 4419\r\nStep 4420\r\nStep 4421\r\nStep 4422\r\n",,terminal_output +2862,2675859,"TERMINAL",0,0,"Step 4423\r\nStep 4424\r\n",,terminal_output +2863,2675966,"TERMINAL",0,0,"Step 4425\r\nStep 4426\r\nStep 4427\r\nStep 4428\r\n",,terminal_output +2864,2676039,"TERMINAL",0,0,"Step 4429\r\nStep 4430\r\nStep 4431\r\nStep 4432\r\n",,terminal_output +2865,2676104,"TERMINAL",0,0,"Step 4433\r\nStep 4434\r\nStep 4435\r\n",,terminal_output +2866,2676160,"TERMINAL",0,0,"Step 4436\r\n",,terminal_output +2867,2676266,"TERMINAL",0,0,"Step 4437\r\nStep 4438\r\nStep 4439\r\nStep 4440\r\nStep 4441\r\n",,terminal_output +2868,2676402,"TERMINAL",0,0,"Step 4442\r\nStep 4443\r\nStep 4444\r\nStep 4445\r\nStep 4446\r\n",,terminal_output +2869,2676544,"TERMINAL",0,0,"Step 4447\r\nStep 4448\r\nStep 4449\r\nStep 4450\r\nStep 4451\r\nStep 4452\r\n",,terminal_output +2870,2676609,"TERMINAL",0,0,"Step 4453\r\nStep 4454\r\nStep 4455\r\nStep 4456\r\n",,terminal_output +2871,2676744,"TERMINAL",0,0,"Step 4457\r\nStep 4458\r\nStep 4459\r\nStep 4460\r\nStep 4461\r\nStep 4462\r\nStep 4463\r\nStep 4464\r\nStep 4465\r\n",,terminal_output +2872,2676809,"TERMINAL",0,0,"Step 4466\r\nStep 4467\r\nStep 4468\r\nStep 4469\r\nStep 4470\r\n",,terminal_output +2873,2676898,"TERMINAL",0,0,"Step 4471\r\nStep 4472\r\nStep 4473\r\nStep 4474\r\nStep 4475\r\nStep 4476\r\nStep 4477\r\nStep 4478\r\n",,terminal_output +2874,2676995,"TERMINAL",0,0,"Step 4479\r\nStep 4480\r\nStep 4481\r\n",,terminal_output +2875,2677047,"TERMINAL",0,0,"Step 4482\r\nStep 4483\r\n",,terminal_output +2876,2677155,"TERMINAL",0,0,"Step 4484\r\nStep 4485\r\nStep 4486\r\nStep 4487\r\nStep 4488\r\nStep 4489\r\nStep 4490\r\n",,terminal_output +2877,2677256,"TERMINAL",0,0,"Step 4491\r\n",,terminal_output +2878,2677319,"TERMINAL",0,0,"Step 4492\r\n",,terminal_output +2879,2677430,"TERMINAL",0,0,"21\r4435",,terminal_output +2880,2677527,"TERMINAL",0,0,"Step 4493\r\nStep 4494\r\nStep 4495\r\nStep 4496\r\nStep 4497\r\nStep 4498\r\nStep 4499\r\nStep 4500\r\nStep 4501\r\nStep 4502\r\nStep 4503\r\nStep 4504\r\nStep 4505\r\nStep 4506\r\nStep 4507\r\nStep 4508\r\nStep 4509\r\nStep 4510\r\nStep 4511\r\nStep 4512\r\nStep 4513\r\nStep 4514\r\n",,terminal_output +2881,2677587,"TERMINAL",0,0,"Step 4515\r\n",,terminal_output +2882,2677713,"TERMINAL",0,0,"Step 4516\r\nStep 4517\r\nStep 4518\r\nStep 4519\r\nStep 4520\r\nStep 4521\r\nStep 4522\r\nStep 4523\r\n",,terminal_output +2883,2677963,"TERMINAL",0,0,"Step 4524\r\nStep 4525\r\nStep 4526\r\nStep 4527\r\nStep 4528\r\nStep 4529\r\nStep 4530\r\nStep 4531\r\nStep 4532\r\nStep 4533\r\nStep 4534\r\nStep 4535\r\nStep 4536\r\nStep 4537\r\nStep 4538\r\nStep 4539\r\nStep 4540\r\nStep 4541\r\nStep 4542\r\nStep 4543\r\nStep 4544\r\nStep 4545\r\nStep 4546\r\n",,terminal_output +2884,2678125,"TERMINAL",0,0,"Step 4547\r\nStep 4548\r\nStep 4549\r\nStep 4550\r\nStep 4551\r\nStep 4552\r\nStep 4553\r\nStep 4554\r\nStep 4555\r\nStep 4556\r\nStep 4557\r\n",,terminal_output +2885,2678192,"TERMINAL",0,0,"Step 4558\r\n",,terminal_output +2886,2678251,"TERMINAL",0,0,"Step 4559\r\nStep 4560\r\nStep 4561\r\n",,terminal_output +2887,2678316,"TERMINAL",0,0,"Step 4562\r\nStep 4563\r\n",,terminal_output +2888,2678372,"TERMINAL",0,0,"Step 4564\r\n",,terminal_output +2889,2678430,"TERMINAL",0,0,"Step 4565\r\nStep 4566\r\n",,terminal_output +2890,2678523,"TERMINAL",0,0,"Step 4567\r\nStep 4568\r\nStep 4569\r\nStep 4570\r\n",,terminal_output +2891,2678570,"TERMINAL",0,0,"Step 4571\r\nStep 4572\r\nStep 4573\r\n",,terminal_output +2892,2678658,"TERMINAL",0,0,"Step 4574\r\nStep 4575\r\nStep 4576\r\nStep 4577\r\nStep 4578\r\n",,terminal_output +2893,2678891,"TERMINAL",0,0,"Step 4579\r\nStep 4580\r\nStep 4581\r\nStep 4582\r\nStep 4583\r\nStep 4584\r\nStep 4585\r\nStep 4586\r\nStep 4587\r\nStep 4588\r\n",,terminal_output +2894,2679000,"TERMINAL",0,0,"Step 4589\r\nStep 4590\r\nStep 4591\r\n",,terminal_output +2895,2679189,"TERMINAL",0,0,"Step 4592\r\nStep 4593\r\nStep 4594\r\nStep 4595\r\nStep 4596\r\nStep 4597\r\nStep 4598\r\nStep 4599\r\nStep 4600\r\nStep 4601\r\n",,terminal_output +2896,2679295,"TERMINAL",0,0,"3\r4567",,terminal_output +2897,2679425,"TERMINAL",0,0,"Step 4602\r\nStep 4603\r\nStep 4604\r\nStep 4605\r\nStep 4606\r\nStep 4607\r\nStep 4608\r\nStep 4609\r\nStep 4610\r\nStep 4611\r\n",,terminal_output +2898,2679485,"TERMINAL",0,0,"Step 4612\r\nStep 4613\r\n",,terminal_output +2899,2679639,"TERMINAL",0,0,"Step 4614\r\nStep 4615\r\nStep 4616\r\nStep 4617\r\nStep 4618\r\nStep 4619\r\n",,terminal_output +2900,2679744,"TERMINAL",0,0,"Step 4620\r\nStep 4621\r\nStep 4622\r\n",,terminal_output +2901,2679816,"TERMINAL",0,0,"Step 4623\r\nStep 4624\r\nStep 4625\r\nStep 4626\r\nStep 4627\r\nStep 4628\r\n",,terminal_output +2902,2679872,"TERMINAL",0,0,"Step 4629\r\nStep 4630\r\n",,terminal_output +2903,2679941,"TERMINAL",0,0,"Step 4631\r\nStep 4632\r\n",,terminal_output +2904,2680047,"TERMINAL",0,0,"Step 4633\r\nStep 4634\r\nStep 4635\r\nStep 4636\r\n",,terminal_output +2905,2680098,"TERMINAL",0,0,"Step 4637\r\n",,terminal_output +2906,2680205,"TERMINAL",0,0,"Step 4638\r\nStep 4639\r\nStep 4640\r\nStep 4641\r\nStep 4642\r\nStep 4643\r\nStep 4644\r\nStep 4645\r\n",,terminal_output +2907,2680285,"TERMINAL",0,0,"Step 4646\r\nStep 4647\r\nStep 4648\r\n",,terminal_output +2908,2680443,"TERMINAL",0,0,"Step 4649\r\nStep 4650\r\nStep 4651\r\nStep 4652\r\nStep 4653\r\nStep 4654\r\nStep 4655\r\n",,terminal_output +2909,2680676,"TERMINAL",0,0,"Step 4656\r\nStep 4657\r\nStep 4658\r\nStep 4659\r\nStep 4660\r\nStep 4661\r\nStep 4662\r\nStep 4663\r\nStep 4664\r\nStep 4665\r\nStep 4666\r\nStep 4667\r\n",,terminal_output +2910,2680843,"TERMINAL",0,0,"Step 4668\r\nStep 4669\r\nStep 4670\r\nStep 4671\r\nStep 4672\r\nStep 4673\r\n",,terminal_output +2911,2680903,"TERMINAL",0,0,"Step 4674\r\nStep 4675\r\n",,terminal_output +2912,2680990,"TERMINAL",0,0,"5\r4649",,terminal_output +2913,2681047,"TERMINAL",0,0,"Step 4676\r\nStep 4677\r\nStep 4678\r\nStep 4679\r\nStep 4680\r\n",,terminal_output +2914,2681126,"TERMINAL",0,0,"Step 4681\r\nStep 4682\r\nStep 4683\r\nStep 4684\r\nStep 4685\r\n",,terminal_output +2915,2681240,"TERMINAL",0,0,"Step 4686\r\nStep 4687\r\nStep 4688\r\nStep 4689\r\nStep 4690\r\nStep 4691\r\nStep 4692\r\nStep 4693\r\nStep 4694\r\nStep 4695\r\nStep 4696\r\n",,terminal_output +2916,2681371,"TERMINAL",0,0,"Step 4697\r\nStep 4698\r\nStep 4699\r\nStep 4700\r\nStep 4701\r\nStep 4702\r\nStep 4703\r\nStep 4704\r\nStep 4705\r\nStep 4706\r\nStep 4707\r\nStep 4708\r\nStep 4709\r\nStep 4710\r\nStep 4711\r\n",,terminal_output +2917,2681534,"TERMINAL",0,0,"Step 4712\r\nStep 4713\r\nStep 4714\r\nStep 4715\r\nStep 4716\r\nStep 4717\r\nStep 4718\r\nStep 4719\r\nStep 4720\r\nStep 4721\r\nStep 4722\r\nStep 4723\r\n",,terminal_output +2918,2681726,"TERMINAL",0,0,"Step 4724\r\nStep 4725\r\nStep 4726\r\nStep 4727\r\nStep 4728\r\nStep 4729\r\nStep 4730\r\nStep 4731\r\nStep 4732\r\nStep 4733\r\nStep 4734\r\nStep 4735\r\nStep 4736\r\n",,terminal_output +2919,2681839,"TERMINAL",0,0,"Step 4737\r\nStep 4738\r\nStep 4739\r\nStep 4740\r\nStep 4741\r\n",,terminal_output +2920,2681900,"TERMINAL",0,0,"Step 4742\r\n",,terminal_output +2921,2681967,"TERMINAL",0,0,"Step 4743\r\nStep 4744\r\n",,terminal_output +2922,2682267,"TERMINAL",0,0,"Step 4745\r\nStep 4746\r\nStep 4747\r\nStep 4748\r\nStep 4749\r\nStep 4750\r\nStep 4751\r\nStep 4752\r\nStep 4753\r\nStep 4754\r\nStep 4755\r\nStep 4756\r\nStep 4757\r\nStep 4758\r\nStep 4759\r\nStep 4760\r\nStep 4761\r\nStep 4762\r\nStep 4763\r\nStep 4764\r\n",,terminal_output +2923,2682379,"TERMINAL",0,0,"Step 4765\r\nStep 4766\r\nStep 4767\r\n",,terminal_output +2924,2682449,"TERMINAL",0,0,"Step 4768\r\nStep 4769\r\nStep 4770\r\nStep 4771\r\n",,terminal_output +2925,2682556,"TERMINAL",0,0,"Step 4772\r\n",,terminal_output +2926,2682664,"TERMINAL",0,0,"Step 4773\r\n",,terminal_output +2927,2682777,"TERMINAL",0,0,"Step 4774\r\nStep 4775\r\n",,terminal_output +2928,2682875,"TERMINAL",0,0,"Step 4776\r\n",,terminal_output +2929,2683020,"TERMINAL",0,0,"Step 4777\r\n",,terminal_output +2930,2683159,"TERMINAL",0,0,"Step 4778\r\nStep 4779\r\nStep 4780\r\nStep 4781\r\nStep 4782\r\nStep 4783\r\nStep 4784\r\nStep 4785\r\nStep 4786\r\n",,terminal_output +2931,2683243,"TERMINAL",0,0,"Step 4787\r\nStep 4788\r\nStep 4789\r\nStep 4790\r\nStep 4791\r\n",,terminal_output +2932,2683373,"TERMINAL",0,0,"7\r4746",,terminal_output +2933,2683405,"TERMINAL",0,0,"Step 4792\r\nStep 4793\r\nStep 4794\r\nStep 4795\r\nStep 4796\r\n",,terminal_output +2934,2683617,"TERMINAL",0,0,"Step 4797\r\nStep 4798\r\nStep 4799\r\nStep 4800\r\nStep 4801\r\nStep 4802\r\nStep 4803\r\nStep 4804\r\nStep 4805\r\nStep 4806\r\nStep 4807\r\n",,terminal_output +2935,2683690,"TERMINAL",0,0,"Step 4808\r\nStep 4809\r\n",,terminal_output +2936,2683756,"TERMINAL",0,0,"Step 4810\r\nStep 4811\r\nStep 4812\r\nStep 4813\r\nStep 4814\r\nStep 4815\r\n",,terminal_output +2937,2683941,"TERMINAL",0,0,"Step 4816\r\nStep 4817\r\nStep 4818\r\nStep 4819\r\nStep 4820\r\nStep 4821\r\nStep 4822\r\nStep 4823\r\nStep 4824\r\nStep 4825\r\n",,terminal_output +2938,2684052,"TERMINAL",0,0,"Step 4826\r\nStep 4827\r\nStep 4828\r\n",,terminal_output +2939,2684115,"TERMINAL",0,0,"Step 4829\r\nStep 4830\r\nStep 4831\r\n",,terminal_output +2940,2684236,"TERMINAL",0,0,"Step 4832\r\nStep 4833\r\nStep 4834\r\nStep 4835\r\nStep 4836\r\nStep 4837\r\n",,terminal_output +2941,2684293,"TERMINAL",0,0,"Step 4838\r\n",,terminal_output +2942,2684358,"TERMINAL",0,0,"Step 4839\r\n",,terminal_output +2943,2684498,"TERMINAL",0,0,"Step 4840\r\nStep 4841\r\nStep 4842\r\nStep 4843\r\nStep 4844\r\nStep 4845\r\nStep 4846\r\nStep 4847\r\nStep 4848\r\nStep 4849\r\nStep 4850\r\nStep 4851\r\nStep 4852\r\nStep 4853\r\n",,terminal_output +2944,2684617,"TERMINAL",0,0,"Step 4854\r\nStep 4855\r\nStep 4856\r\nStep 4857\r\nStep 4858\r\nStep 4859\r\nStep 4860\r\nStep 4861\r\n",,terminal_output +2945,2684669,"TERMINAL",0,0,"Step 4862\r\n",,terminal_output +2946,2684748,"TERMINAL",0,0,"Step 4863\r\nStep 4864\r\nStep 4865\r\n",,terminal_output +2947,2684849,"TERMINAL",0,0,"Step 4866\r\nStep 4867\r\nStep 4868\r\nStep 4869\r\nStep 4870\r\nStep 4871\r\nStep 4872\r\nStep 4873\r\nStep 4874\r\nStep 4875\r\nStep 4876\r\n",,terminal_output +2948,2684946,"TERMINAL",0,0,"Step 4877\r\nStep 4878\r\nStep 4879\r\nStep 4880\r\nStep 4881\r\nStep 4882\r\n",,terminal_output +2949,2685002,"TERMINAL",0,0,"Step 4883\r\n",,terminal_output +2950,2685121,"TERMINAL",0,0,"Step 4884\r\nStep 4885\r\nStep 4886\r\nStep 4887\r\nStep 4888\r\nStep 4889\r\n",,terminal_output +2951,2685231,"TERMINAL",0,0,"Step 4890\r\n",,terminal_output +2952,2685298,"TERMINAL",0,0,"Step 4891\r\nStep 4892\r\nStep 4893\r\nStep 4894\r\nStep 4895\r\nStep 4896\r\nStep 4897\r\nStep 4898\r\n",,terminal_output +2953,2685395,"TERMINAL",0,0,"Step 4899\r\nStep 4900\r\nStep 4901\r\n",,terminal_output +2954,2685812,"TERMINAL",0,0,"9\r4840",,terminal_output +2955,2685812,"TERMINAL",0,0,"Step 4902\r\nStep 4903\r\nStep 4904\r\nStep 4905\r\nStep 4906\r\nStep 4907\r\nStep 4908\r\nStep 4909\r\nStep 4910\r\nStep 4911\r\nStep 4912\r\nStep 4913\r\nStep 4914\r\nStep 4915\r\nStep 4916\r\nStep 4917\r\nStep 4918\r\nStep 4919\r\nStep 4920\r\nStep 4921\r\nStep 4922\r\nStep 4923\r\nStep 4924\r\n",,terminal_output +2956,2685907,"TERMINAL",0,0,"Step 4925\r\nStep 4926\r\nStep 4927\r\nStep 4928\r\nStep 4929\r\nStep 4930\r\nStep 4931\r\nStep 4932\r\n",,terminal_output +2957,2685984,"TERMINAL",0,0,"Step 4933\r\nStep 4934\r\nStep 4935\r\nStep 4936\r\nStep 4937\r\n",,terminal_output +2958,2686158,"TERMINAL",0,0,"Step 4938\r\nStep 4939\r\nStep 4940\r\nStep 4941\r\nStep 4942\r\nStep 4943\r\nStep 4944\r\nStep 4945\r\n",,terminal_output +2959,2686221,"TERMINAL",0,0,"Step 4946\r\n",,terminal_output +2960,2686403,"TERMINAL",0,0,"Step 4947\r\nStep 4948\r\nStep 4949\r\nStep 4950\r\nStep 4951\r\nStep 4952\r\nStep 4953\r\nStep 4954\r\nStep 4955\r\nStep 4956\r\nStep 4957\r\n",,terminal_output +2961,2686747,"TERMINAL",0,0,"Step 4958\r\nStep 4959\r\nStep 4960\r\nStep 4961\r\nStep 4962\r\nStep 4963\r\nStep 4964\r\nStep 4965\r\nStep 4966\r\nStep 4967\r\nStep 4968\r\n",,terminal_output +2962,2686808,"TERMINAL",0,0,"Step 4969\r\nStep 4970\r\nStep 4971\r\nStep 4972\r\nStep 4973\r\n",,terminal_output +2963,2686932,"TERMINAL",0,0,"Step 4974\r\nStep 4975\r\nStep 4976\r\nStep 4977\r\nStep 4978\r\nStep 4979\r\nStep 4980\r\nStep 4981\r\nStep 4982\r\n",,terminal_output +2964,2687027,"TERMINAL",0,0,"Step 4983\r\nStep 4984\r\nStep 4985\r\nStep 4986\r\nStep 4987\r\nStep 4988\r\nStep 4989\r\nStep 4990\r\nStep 4991\r\nStep 4992\r\nStep 4993\r\nStep 4994\r\nStep 4995\r\nStep 4996\r\n",,terminal_output +2965,2687107,"TERMINAL",0,0,"Step 4997\r\nStep 4998\r\nStep 4999\r\nStep 5000\r\nStep 5001\r\n",,terminal_output +2966,2687221,"TERMINAL",0,0,"Step 5002\r\nStep 5003\r\nStep 5004\r\n",,terminal_output +2967,2687341,"TERMINAL",0,0,"Step 5005\r\n",,terminal_output +2968,2687401,"TERMINAL",0,0,"Step 5006\r\nStep 5007\r\nStep 5008\r\nStep 5009\r\nStep 5010\r\nStep 5011\r\nStep 5012\r\nStep 5013\r\n",,terminal_output +2969,2687566,"TERMINAL",0,0,"Step 5014\r\n",,terminal_output +2970,2687771,"TERMINAL",0,0,"Step 5015\r\nStep 5016\r\nStep 5017\r\nStep 5018\r\nStep 5019\r\nStep 5020\r\nStep 5021\r\nStep 5022\r\nStep 5023\r\nStep 5024\r\n",,terminal_output +2971,2687832,"TERMINAL",0,0,"Step 5025\r\nStep 5026\r\nStep 5027\r\nStep 5028\r\nStep 5029\r\nStep 5030\r\n",,terminal_output +2972,2687874,"TERMINAL",0,0,"31\r4962",,terminal_output +2973,2687931,"TERMINAL",0,0,"Step 5031\r\nStep 5032\r\nStep 5033\r\nStep 5034\r\n",,terminal_output +2974,2687983,"TERMINAL",0,0,"Step 5035\r\n",,terminal_output +2975,2688050,"TERMINAL",0,0,"Step 5036\r\nStep 5037\r\nStep 5038\r\n",,terminal_output +2976,2688112,"TERMINAL",0,0,"Step 5039\r\n",,terminal_output +2977,2688232,"TERMINAL",0,0,"Step 5040\r\nStep 5041\r\nStep 5042\r\nStep 5043\r\nStep 5044\r\n",,terminal_output +2978,2688294,"TERMINAL",0,0,"Step 5045\r\nStep 5046\r\n",,terminal_output +2979,2688405,"TERMINAL",0,0,"Step 5047\r\nStep 5048\r\nStep 5049\r\nStep 5050\r\n",,terminal_output +2980,2688469,"TERMINAL",0,0,"Step 5051\r\nStep 5052\r\n",,terminal_output +2981,2688595,"TERMINAL",0,0,"Step 5053\r\nStep 5054\r\nStep 5055\r\nStep 5056\r\nStep 5057\r\nStep 5058\r\n",,terminal_output +2982,2688711,"TERMINAL",0,0,"Step 5059\r\nStep 5060\r\nStep 5061\r\n",,terminal_output +2983,2688771,"TERMINAL",0,0,"Step 5062\r\nStep 5063\r\n",,terminal_output +2984,2688836,"TERMINAL",0,0,"Step 5064\r\nStep 5065\r\nStep 5066\r\nStep 5067\r\nStep 5068\r\n",,terminal_output +2985,2688983,"TERMINAL",0,0,"Step 5069\r\nStep 5070\r\nStep 5071\r\n",,terminal_output +2986,2689045,"TERMINAL",0,0,"Step 5072\r\nStep 5073\r\nStep 5074\r\n",,terminal_output +2987,2689217,"TERMINAL",0,0,"Step 5075\r\nStep 5076\r\nStep 5077\r\nStep 5078\r\nStep 5079\r\nStep 5080\r\nStep 5081\r\nStep 5082\r\n",,terminal_output +2988,2689281,"TERMINAL",0,0,"Step 5083\r\nStep 5084\r\nStep 5085\r\n",,terminal_output +2989,2689409,"TERMINAL",0,0,"Step 5086\r\nStep 5087\r\nStep 5088\r\nStep 5089\r\nStep 5090\r\nStep 5091\r\nStep 5092\r\nStep 5093\r\n",,terminal_output +2990,2689469,"TERMINAL",0,0,"Step 5094\r\nStep 5095\r\n",,terminal_output +2991,2689635,"TERMINAL",0,0,"Step 5096\r\nStep 5097\r\nStep 5098\r\nStep 5099\r\nStep 5100\r\nStep 5101\r\nStep 5102\r\nStep 5103\r\n",,terminal_output +2992,2689731,"TERMINAL",0,0,"Step 5104\r\nStep 5105\r\nStep 5106\r\nStep 5107\r\n",,terminal_output +2993,2689793,"TERMINAL",0,0,"Step 5108\r\nStep 5109\r\n",,terminal_output +2994,2689925,"TERMINAL",0,0,"4\r5071",,terminal_output +2995,2689961,"TERMINAL",0,0,"Step 5110\r\nStep 5111\r\nStep 5112\r\nStep 5113\r\nStep 5114\r\nStep 5115\r\nStep 5116\r\nStep 5117\r\nStep 5118\r\n",,terminal_output +2996,2690026,"TERMINAL",0,0,"Step 5119\r\nStep 5120\r\n",,terminal_output +2997,2690093,"TERMINAL",0,0,"Step 5121\r\nStep 5122\r\nStep 5123\r\nStep 5124\r\nStep 5125\r\nStep 5126\r\nStep 5127\r\n",,terminal_output +2998,2690156,"TERMINAL",0,0,"Step 5128\r\n",,terminal_output +2999,2690363,"TERMINAL",0,0,"Step 5129\r\nStep 5130\r\nStep 5131\r\nStep 5132\r\nStep 5133\r\nStep 5134\r\nStep 5135\r\nStep 5136\r\nStep 5137\r\nStep 5138\r\nStep 5139\r\nStep 5140\r\nStep 5141\r\nStep 5142\r\nStep 5143\r\nStep 5144\r\nStep 5145\r\nStep 5146\r\nStep 5147\r\nStep 5148\r\nStep 5149\r\nStep 5150\r\nStep 5151\r\n",,terminal_output +3000,2690470,"TERMINAL",0,0,"Step 5152\r\nStep 5153\r\nStep 5154\r\n",,terminal_output +3001,2690536,"TERMINAL",0,0,"Step 5155\r\n",,terminal_output +3002,2690716,"TERMINAL",0,0,"Step 5156\r\nStep 5157\r\nStep 5158\r\nStep 5159\r\nStep 5160\r\nStep 5161\r\nStep 5162\r\nStep 5163\r\n",,terminal_output +3003,2690807,"TERMINAL",0,0,"Step 5164\r\n",,terminal_output +3004,2690953,"TERMINAL",0,0,"Step 5165\r\nStep 5166\r\nStep 5167\r\nStep 5168\r\nStep 5169\r\nStep 5170\r\nStep 5171\r\nStep 5172\r\n",,terminal_output +3005,2691060,"TERMINAL",0,0,"Step 5173\r\nStep 5174\r\nStep 5175\r\nStep 5176\r\nStep 5177\r\nStep 5178\r\nStep 5179\r\nStep 5180\r\n",,terminal_output +3006,2691315,"TERMINAL",0,0,"Step 5181\r\nStep 5182\r\nStep 5183\r\nStep 5184\r\nStep 5185\r\nStep 5186\r\nStep 5187\r\nStep 5188\r\nStep 5189\r\nStep 5190\r\nStep 5191\r\nStep 5192\r\nStep 5193\r\nStep 5194\r\nStep 5195\r\nStep 5196\r\nStep 5197\r\nStep 5198\r\nStep 5199\r\nStep 5200\r\nStep 5201\r\nStep 5202\r\nStep 5203\r\n",,terminal_output +3007,2691379,"TERMINAL",0,0,"Step 5204\r\nStep 5205\r\nStep 5206\r\n",,terminal_output +3008,2691450,"TERMINAL",0,0,"Step 5207\r\nStep 5208\r\nStep 5209\r\n",,terminal_output +3009,2691512,"TERMINAL",0,0,"Step 5210\r\nStep 5211\r\nStep 5212\r\n",,terminal_output +3010,2691626,"TERMINAL",0,0,"Step 5213\r\nStep 5214\r\n",,terminal_output +3011,2691693,"TERMINAL",0,0,"Step 5215\r\n",,terminal_output +3012,2691839,"TERMINAL",0,0,"Step 5216\r\nStep 5217\r\nStep 5218\r\nStep 5219\r\nStep 5220\r\n",,terminal_output +3013,2691959,"TERMINAL",0,0,"6\r5174",,terminal_output +3014,2691992,"TERMINAL",0,0,"Step 5221\r\nStep 5222\r\nStep 5223\r\nStep 5224\r\nStep 5225\r\nStep 5226\r\nStep 5227\r\n",,terminal_output +3015,2692046,"TERMINAL",0,0,"Step 5228\r\n",,terminal_output +3016,2692099,"TERMINAL",0,0,"Step 5229\r\n",,terminal_output +3017,2692205,"TERMINAL",0,0,"Step 5230\r\nStep 5231\r\nStep 5232\r\n",,terminal_output +3018,2692258,"TERMINAL",0,0,"Step 5233\r\n",,terminal_output +3019,2692322,"TERMINAL",0,0,"Step 5234\r\n",,terminal_output +3020,2692386,"TERMINAL",0,0,"Step 5235\r\nStep 5236\r\nStep 5237\r\nStep 5238\r\nStep 5239\r\n",,terminal_output +3021,2692481,"TERMINAL",0,0,"Step 5240\r\n",,terminal_output +3022,2692560,"TERMINAL",0,0,"Step 5241\r\nStep 5242\r\nStep 5243\r\nStep 5244\r\nStep 5245\r\nStep 5246\r\n",,terminal_output +3023,2692622,"TERMINAL",0,0,"Step 5247\r\n",,terminal_output +3024,2692818,"TERMINAL",0,0,"Step 5248\r\nStep 5249\r\nStep 5250\r\nStep 5251\r\nStep 5252\r\nStep 5253\r\nStep 5254\r\nStep 5255\r\nStep 5256\r\nStep 5257\r\nStep 5258\r\nStep 5259\r\nStep 5260\r\nStep 5261\r\nStep 5262\r\nStep 5263\r\nStep 5264\r\nStep 5265\r\nStep 5266\r\nStep 5267\r\n",,terminal_output +3025,2692949,"TERMINAL",0,0,"Step 5268\r\nStep 5269\r\nStep 5270\r\nStep 5271\r\nStep 5272\r\nStep 5273\r\nStep 5274\r\nStep 5275\r\nStep 5276\r\nStep 5277\r\n",,terminal_output +3026,2693125,"TERMINAL",0,0,"Step 5278\r\nStep 5279\r\nStep 5280\r\nStep 5281\r\nStep 5282\r\nStep 5283\r\nStep 5284\r\nStep 5285\r\nStep 5286\r\nStep 5287\r\nStep 5288\r\nStep 5289\r\nStep 5290\r\nStep 5291\r\n",,terminal_output +3027,2693256,"TERMINAL",0,0,"Step 5292\r\nStep 5293\r\n",,terminal_output +3028,2693384,"TERMINAL",0,0,"Step 5294\r\nStep 5295\r\nStep 5296\r\nStep 5297\r\nStep 5298\r\n",,terminal_output +3029,2693450,"TERMINAL",0,0,"Step 5299\r\nStep 5300\r\nStep 5301\r\nStep 5302\r\n",,terminal_output +3030,2693522,"TERMINAL",0,0,"Step 5303\r\n",,terminal_output +3031,2693636,"TERMINAL",0,0,"Step 5304\r\nStep 5305\r\nStep 5306\r\nStep 5307\r\n",,terminal_output +3032,2693696,"TERMINAL",0,0,"Step 5308\r\n",,terminal_output +3033,2693833,"TERMINAL",0,0,"Step 5309\r\nStep 5310\r\nStep 5311\r\nStep 5312\r\nStep 5313\r\n",,terminal_output +3034,2693934,"TERMINAL",0,0,"Step 5314\r\nStep 5315\r\nStep 5316\r\nStep 5317\r\nStep 5318\r\nStep 5319\r\nStep 5320\r\n",,terminal_output +3035,2694199,"train_tokenizer.py",0,0,"",python,tab +3036,2694397,"TERMINAL",0,0,"8\r528",,terminal_output +3037,2694448,"TERMINAL",0,0,"Step 5321\r\nStep 5322\r\nStep 5323\r\nStep 5324\r\nStep 5325\r\nStep 5326\r\nStep 5327\r\nStep 5328\r\nStep 5329\r\nStep 5330\r\nStep 5331\r\nStep 5332\r\nStep 5333\r\nStep 5334\r\nStep 5335\r\n",,terminal_output +3038,2694643,"TERMINAL",0,0,"Step 5336\r\nStep 5337\r\nStep 5338\r\nStep 5339\r\n",,terminal_output +3039,2694719,"TERMINAL",0,0,"Step 5340\r\nStep 5341\r\nStep 5342\r\nStep 5343\r\nStep 5344\r\nStep 5345\r\nStep 5346\r\n",,terminal_output +3040,2694856,"TERMINAL",0,0,"Step 5347\r\nStep 5348\r\nStep 5349\r\nStep 5350\r\nStep 5351\r\nStep 5352\r\nStep 5353\r\n",,terminal_output +3041,2695091,"TERMINAL",0,0,"Step 5354\r\nStep 5355\r\nStep 5356\r\nStep 5357\r\nStep 5358\r\nStep 5359\r\nStep 5360\r\nStep 5361\r\nStep 5362\r\nStep 5363\r\nStep 5364\r\nStep 5365\r\nStep 5366\r\n",,terminal_output +3042,2695238,"TERMINAL",0,0,"Step 5367\r\nStep 5368\r\nStep 5369\r\nStep 5370\r\nStep 5371\r\nStep 5372\r\n",,terminal_output +3043,2695436,"TERMINAL",0,0,"Step 5373\r\nStep 5374\r\nStep 5375\r\nStep 5376\r\nStep 5377\r\nStep 5378\r\nStep 5379\r\nStep 5380\r\nStep 5381\r\n",,terminal_output +3044,2695613,"TERMINAL",0,0,"Step 5382\r\nStep 5383\r\nStep 5384\r\nStep 5385\r\nStep 5386\r\nStep 5387\r\nStep 5388\r\nStep 5389\r\n",,terminal_output +3045,2695743,"TERMINAL",0,0,"Step 5390\r\nStep 5391\r\nStep 5392\r\nStep 5393\r\nStep 5394\r\nStep 5395\r\n",,terminal_output +3046,2696065,"TERMINAL",0,0,"Step 5396\r\nStep 5397\r\nStep 5398\r\nStep 5399\r\nStep 5400\r\nStep 5401\r\nStep 5402\r\nStep 5403\r\nStep 5404\r\nStep 5405\r\nStep 5406\r\nStep 5407\r\nStep 5408\r\nStep 5409\r\nStep 5410\r\nStep 5411\r\nStep 5412\r\nStep 5413\r\nStep 5414\r\nStep 5415\r\nStep 5416\r\nStep 5417\r\nStep 5418\r\nStep 5419\r\nStep 5420\r\nStep 5421\r\nStep 5422\r\n",,terminal_output +3047,2696177,"TERMINAL",0,0,"Step 5423\r\n",,terminal_output +3048,2696274,"TERMINAL",0,0,"Step 5424\r\nStep 5425\r\nStep 5426\r\nStep 5427\r\nStep 5428\r\nStep 5429\r\n",,terminal_output +3049,2696365,"TERMINAL",0,0,"Step 5430\r\nStep 5431\r\nStep 5432\r\n",,terminal_output +3050,2696527,"TERMINAL",0,0,"40\r5380",,terminal_output +3051,2696527,"TERMINAL",0,0,"Step 5433\r\nStep 5434\r\nStep 5435\r\nStep 5436\r\nStep 5437\r\nStep 5438\r\nStep 5439\r\n",,terminal_output +3052,2696610,"TERMINAL",0,0,"Step 5440\r\nStep 5441\r\n",,terminal_output +3053,2696731,"TERMINAL",0,0,"Step 5442\r\nStep 5443\r\nStep 5444\r\nStep 5445\r\nStep 5446\r\nStep 5447\r\n",,terminal_output +3054,2696787,"TERMINAL",0,0,"Step 5448\r\nStep 5449\r\n",,terminal_output +3055,2696895,"TERMINAL",0,0,"Step 5450\r\nStep 5451\r\nStep 5452\r\nStep 5453\r\n",,terminal_output +3056,2697135,"TERMINAL",0,0,"Step 5454\r\nStep 5455\r\nStep 5456\r\nStep 5457\r\nStep 5458\r\nStep 5459\r\nStep 5460\r\nStep 5461\r\nStep 5462\r\nStep 5463\r\n",,terminal_output +3057,2697307,"TERMINAL",0,0,"Step 5464\r\nStep 5465\r\n",,terminal_output +3058,2697307,"TERMINAL",0,0,"Step 5466\r\n",,terminal_output +3059,2697330,"TERMINAL",0,0,"Step 5467\r\nStep 5468\r\nStep 5469\r\nStep 5470\r\nStep 5471\r\n",,terminal_output +3060,2697440,"TERMINAL",0,0,"Step 5472\r\nStep 5473\r\nStep 5474\r\n",,terminal_output +3061,2697503,"TERMINAL",0,0,"Step 5475\r\nStep 5476\r\nStep 5477\r\nStep 5478\r\n",,terminal_output +3062,2697682,"TERMINAL",0,0,"Step 5479\r\nStep 5480\r\nStep 5481\r\nStep 5482\r\nStep 5483\r\nStep 5484\r\nStep 5485\r\nStep 5486\r\nStep 5487\r\nStep 5488\r\nStep 5489\r\nStep 5490\r\nStep 5491\r\nStep 5492\r\n",,terminal_output +3063,2697747,"TERMINAL",0,0,"Step 5493\r\n",,terminal_output +3064,2697872,"TERMINAL",0,0,"Step 5494\r\nStep 5495\r\nStep 5496\r\nStep 5497\r\nStep 5498\r\nStep 5499\r\n",,terminal_output +3065,2697945,"TERMINAL",0,0,"Step 5500\r\nStep 5501\r\nStep 5502\r\nStep 5503\r\n",,terminal_output +3066,2698117,"train_tokenizer.py",7278,0,"",python,selection_mouse +3067,2698118,"TERMINAL",0,0,"Step 5504\r\nStep 5505\r\nStep 5506\r\nStep 5507\r\nStep 5508\r\nStep 5509\r\nStep 5510\r\nStep 5511\r\nStep 5512\r\nStep 5513\r\nStep 5514\r\n",,terminal_output +3068,2698181,"TERMINAL",0,0,"Step 5515\r\nStep 5516\r\n",,terminal_output +3069,2698288,"TERMINAL",0,0,"Step 5517\r\nStep 5518\r\nStep 5519\r\nStep 5520\r\nStep 5521\r\nStep 5522\r\nStep 5523\r\n",,terminal_output +3070,2698402,"TERMINAL",0,0,"Step 5524\r\nStep 5525\r\nStep 5526\r\nStep 5527\r\nStep 5528\r\nStep 5529\r\n",,terminal_output +3071,2698469,"TERMINAL",0,0,"Step 5530\r\nStep 5531\r\nStep 5532\r\nStep 5533\r\n",,terminal_output +3072,2698533,"TERMINAL",0,0,"2\r5484",,terminal_output +3073,2698600,"TERMINAL",0,0,"Step 5534\r\nStep 5535\r\nStep 5536\r\nStep 5537\r\n",,terminal_output +3074,2698651,"TERMINAL",0,0,"Step 5538\r\n",,terminal_output +3075,2698762,"TERMINAL",0,0,"Step 5539\r\nStep 5540\r\nStep 5541\r\nStep 5542\r\nStep 5543\r\nStep 5544\r\n",,terminal_output +3076,2698824,"TERMINAL",0,0,"Step 5545\r\n",,terminal_output +3077,2699011,"TERMINAL",0,0,"Step 5546\r\nStep 5547\r\nStep 5548\r\nStep 5549\r\nStep 5550\r\nStep 5551\r\nStep 5552\r\nStep 5553\r\nStep 5554\r\nStep 5555\r\nStep 5556\r\n",,terminal_output +3078,2699134,"TERMINAL",0,0,"Step 5557\r\nStep 5558\r\nStep 5559\r\nStep 5560\r\nStep 5561\r\nStep 5562\r\n",,terminal_output +3079,2699418,"TERMINAL",0,0,"Step 5563\r\nStep 5564\r\nStep 5565\r\nStep 5566\r\nStep 5567\r\nStep 5568\r\nStep 5569\r\nStep 5570\r\nStep 5571\r\nStep 5572\r\nStep 5573\r\nStep 5574\r\nStep 5575\r\nStep 5576\r\nStep 5577\r\nStep 5578\r\n",,terminal_output +3080,2699472,"TERMINAL",0,0,"Step 5579\r\nStep 5580\r\n",,terminal_output +3081,2699550,"TERMINAL",0,0,"Step 5581\r\nStep 5582\r\nStep 5583\r\nStep 5584\r\n",,terminal_output +3082,2699680,"TERMINAL",0,0,"Step 5585\r\nStep 5586\r\nStep 5587\r\nStep 5588\r\nStep 5589\r\nStep 5590\r\nStep 5591\r\n",,terminal_output +3083,2699937,"TERMINAL",0,0,"Step 5592\r\nStep 5593\r\nStep 5594\r\nStep 5595\r\nStep 5596\r\nStep 5597\r\nStep 5598\r\nStep 5599\r\nStep 5600\r\nStep 5601\r\nStep 5602\r\nStep 5603\r\nStep 5604\r\nStep 5605\r\nStep 5606\r\nStep 5607\r\nStep 5608\r\nStep 5609\r\nStep 5610\r\nStep 5611\r\nStep 5612\r\nStep 5613\r\nStep 5614\r\nStep 5615\r\nStep 5616\r\n",,terminal_output +3084,2700152,"TERMINAL",0,0,"Step 5617\r\nStep 5618\r\nStep 5619\r\nStep 5620\r\nStep 5621\r\nStep 5622\r\nStep 5623\r\nStep 5624\r\nStep 5625\r\nStep 5626\r\n",,terminal_output +3085,2700216,"TERMINAL",0,0,"Step 5627\r\nStep 5628\r\nStep 5629\r\nStep 5630\r\nStep 5631\r\nStep 5632\r\nStep 5633\r\n",,terminal_output +3086,2700275,"TERMINAL",0,0,"Step 5634\r\n",,terminal_output +3087,2700598,"TERMINAL",0,0,"Step 5635\r\nStep 5636\r\nStep 5637\r\nStep 5638\r\nStep 5639\r\nStep 5640\r\nStep 5641\r\nStep 5642\r\nStep 5643\r\nStep 5644\r\nStep 5645\r\nStep 5646\r\nStep 5647\r\nStep 5648\r\nStep 5649\r\nStep 5650\r\nStep 5651\r\nStep 5652\r\nStep 5653\r\nStep 5654\r\nStep 5655\r\nStep 5656\r\nStep 5657\r\nStep 5658\r\nStep 5659\r\n",,terminal_output +3088,2700666,"TERMINAL",0,0,"Step 5660\r\nStep 5661\r\nStep 5662\r\n",,terminal_output +3089,2700735,"TERMINAL",0,0,"Step 5663\r\nStep 5664\r\nStep 5665\r\nStep 5666\r\nStep 5667\r\n",,terminal_output +3090,2700787,"train_tokenizer.py",7066,0,"",python,selection_mouse +3091,2700939,"TERMINAL",0,0,"Step 5668\r\nStep 5669\r\nStep 5670\r\nStep 5671\r\nStep 5672\r\n",,terminal_output +3092,2701063,"TERMINAL",0,0,"Step 5673\r\nStep 5674\r\nStep 5675\r\nStep 5676\r\nStep 5677\r\nStep 5678\r\nStep 5679\r\n",,terminal_output +3093,2701116,"TERMINAL",0,0,"4\r560",,terminal_output +3094,2701293,"TERMINAL",0,0,"Step 5680\r\nStep 5681\r\nStep 5682\r\nStep 5683\r\nStep 5684\r\nStep 5685\r\nStep 5686\r\n",,terminal_output +3095,2701372,"TERMINAL",0,0,"Step 5687\r\nStep 5688\r\n",,terminal_output +3096,2701523,"TERMINAL",0,0,"Step 5689\r\n",,terminal_output +3097,2701599,"TERMINAL",0,0,"Step 5690\r\n",,terminal_output +3098,2701709,"TERMINAL",0,0,"Step 5691\r\nStep 5692\r\nStep 5693\r\nStep 5694\r\nStep 5695\r\nStep 5696\r\nStep 5697\r\nStep 5698\r\nStep 5699\r\nStep 5700\r\nStep 5701\r\nStep 5702\r\nStep 5703\r\n",,terminal_output +3099,2701762,"TERMINAL",0,0,"Step 5704\r\n",,terminal_output +3100,2701932,"TERMINAL",0,0,"Step 5705\r\nStep 5706\r\nStep 5707\r\nStep 5708\r\n",,terminal_output +3101,2701999,"TERMINAL",0,0,"Step 5709\r\nStep 5710\r\n",,terminal_output +3102,2702113,"TERMINAL",0,0,"Step 5711\r\nStep 5712\r\nStep 5713\r\nStep 5714\r\nStep 5715\r\n",,terminal_output +3103,2702175,"TERMINAL",0,0,"Step 5716\r\nStep 5717\r\n",,terminal_output +3104,2702329,"TERMINAL",0,0,"Step 5718\r\nStep 5719\r\nStep 5720\r\nStep 5721\r\nStep 5722\r\nStep 5723\r\nStep 5724\r\nStep 5725\r\nStep 5726\r\nStep 5727\r\nStep 5728\r\nStep 5729\r\nStep 5730\r\nStep 5731\r\nStep 5732\r\nStep 5733\r\n",,terminal_output +3105,2702525,"TERMINAL",0,0,"Step 5734\r\nStep 5735\r\nStep 5736\r\nStep 5737\r\nStep 5738\r\nStep 5739\r\nStep 5740\r\nStep 5741\r\nStep 5742\r\nStep 5743\r\nStep 5744\r\nStep 5745\r\nStep 5746\r\nStep 5747\r\nStep 5748\r\nStep 5749\r\nStep 5750\r\n",,terminal_output +3106,2702717,"train_tokenizer.py",7328,0,"",python,selection_mouse +3107,2702735,"train_tokenizer.py",7327,0,"",python,selection_command +3108,2702813,"TERMINAL",0,0,"Step 5751\r\nStep 5752\r\nStep 5753\r\nStep 5754\r\nStep 5755\r\nStep 5756\r\n",,terminal_output +3109,2702899,"TERMINAL",0,0,"Step 5757\r\nStep 5758\r\nStep 5759\r\nStep 5760\r\n",,terminal_output +3110,2703026,"TERMINAL",0,0,"Step 5761\r\nStep 5762\r\nStep 5763\r\nStep 5764\r\nStep 5765\r\nStep 5766\r\nStep 5767\r\nStep 5768\r\n",,terminal_output +3111,2703079,"TERMINAL",0,0,"Step 5769\r\n",,terminal_output +3112,2703214,"TERMINAL",0,0,"7\r5725",,terminal_output +3113,2703226,"TERMINAL",0,0,"Step 5770\r\nStep 5771\r\nStep 5772\r\nStep 5773\r\n",,terminal_output +3114,2703391,"TERMINAL",0,0,"Step 5774\r\nStep 5775\r\nStep 5776\r\nStep 5777\r\nStep 5778\r\nStep 5779\r\nStep 5780\r\n",,terminal_output +3115,2703455,"TERMINAL",0,0,"Step 5781\r\n",,terminal_output +3116,2703515,"TERMINAL",0,0,"Step 5782\r\nStep 5783\r\nStep 5784\r\nStep 5785\r\nStep 5786\r\n",,terminal_output +3117,2703595,"TERMINAL",0,0,"Step 5787\r\nStep 5788\r\nStep 5789\r\n",,terminal_output +3118,2703669,"TERMINAL",0,0,"Step 5790\r\nStep 5791\r\n",,terminal_output +3119,2703761,"TERMINAL",0,0,"Step 5792\r\nStep 5793\r\nStep 5794\r\nStep 5795\r\nStep 5796\r\nStep 5797\r\n",,terminal_output +3120,2703783,"train_tokenizer.py",7279,0,"",python,selection_mouse +3121,2703860,"TERMINAL",0,0,"Step 5798\r\nStep 5799\r\nStep 5800\r\nStep 5801\r\nStep 5802\r\n",,terminal_output +3122,2704051,"TERMINAL",0,0,"Step 5803\r\nStep 5804\r\nStep 5805\r\nStep 5806\r\nStep 5807\r\nStep 5808\r\nStep 5809\r\nStep 5810\r\nStep 5811\r\nStep 5812\r\n",,terminal_output +3123,2704114,"TERMINAL",0,0,"Step 5813\r\nStep 5814\r\n",,terminal_output +3124,2704178,"TERMINAL",0,0,"Step 5815\r\nStep 5816\r\n",,terminal_output +3125,2704239,"TERMINAL",0,0,"Step 5817\r\nStep 5818\r\nStep 5819\r\n",,terminal_output +3126,2704343,"TERMINAL",0,0,"Step 5820\r\n",,terminal_output +3127,2704452,"TERMINAL",0,0,"Step 5821\r\nStep 5822\r\nStep 5823\r\nStep 5824\r\n",,terminal_output +3128,2704558,"TERMINAL",0,0,"Step 5825\r\nStep 5826\r\n",,terminal_output +3129,2704612,"TERMINAL",0,0,"Step 5827\r\nStep 5828\r\n",,terminal_output +3130,2704732,"TERMINAL",0,0,"Step 5829\r\nStep 5830\r\nStep 5831\r\nStep 5832\r\nStep 5833\r\nStep 5834\r\n",,terminal_output +3131,2704863,"TERMINAL",0,0,"Step 5835\r\nStep 5836\r\nStep 5837\r\nStep 5838\r\nStep 5839\r\nStep 5840\r\nStep 5841\r\nStep 5842\r\nStep 5843\r\nStep 5844\r\nStep 5845\r\nStep 5846\r\n",,terminal_output +3132,2704922,"train_tokenizer.py",7278,1,"",python,content +3133,2705027,"TERMINAL",0,0,"Step 5847\r\nStep 5848\r\nStep 5849\r\nStep 5850\r\nStep 5851\r\nStep 5852\r\nStep 5853\r\nStep 5854\r\nStep 5855\r\n",,terminal_output +3134,2705095,"train_tokenizer.py",7278,0,">",python,content +3135,2705096,"train_tokenizer.py",7279,0,"",python,selection_keyboard +3136,2705224,"TERMINAL",0,0,"Step 5856\r\nStep 5857\r\nStep 5858\r\nStep 5859\r\nStep 5860\r\nStep 5861\r\nStep 5862\r\nStep 5863\r\nStep 5864\r\nStep 5865\r\nStep 5866\r\nStep 5867\r\nStep 5868\r\nStep 5869\r\nStep 5870\r\nStep 5871\r\nStep 5872\r\nStep 5873\r\n",,terminal_output +3137,2705646,"TERMINAL",0,0,"9\r5831",,terminal_output +3138,2705713,"TERMINAL",0,0,"Step 5874\r\nStep 5875\r\nStep 5876\r\nStep 5877\r\nStep 5878\r\nStep 5879\r\nStep 5880\r\nStep 5881\r\nStep 5882\r\nStep 5883\r\nStep 5884\r\nStep 5885\r\nStep 5886\r\nStep 5887\r\nStep 5888\r\nStep 5889\r\nStep 5890\r\nStep 5891\r\nStep 5892\r\nStep 5893\r\nStep 5894\r\nStep 5895\r\nStep 5896\r\nStep 5897\r\nStep 5898\r\nStep 5899\r\nStep 5900\r\nStep 5901\r\nStep 5902\r\nStep 5903\r\nStep 5904\r\nStep 5905\r\nStep 5906\r\nStep 5907\r\nStep 5908\r\nStep 5909\r\nStep 5910\r\n",,terminal_output +3139,2705838,"TERMINAL",0,0,"Step 5911\r\nStep 5912\r\nStep 5913\r\nStep 5914\r\n",,terminal_output +3140,2706048,"TERMINAL",0,0,"Step 5915\r\nStep 5916\r\nStep 5917\r\nStep 5918\r\nStep 5919\r\nStep 5920\r\n",,terminal_output +3141,2706126,"TERMINAL",0,0,"Step 5921\r\nStep 5922\r\nStep 5923\r\nStep 5924\r\nStep 5925\r\nStep 5926\r\n",,terminal_output +3142,2706199,"TERMINAL",0,0,"Step 5927\r\nStep 5928\r\n",,terminal_output +3143,2706281,"TERMINAL",0,0,"Step 5929\r\nStep 5930\r\nStep 5931\r\n",,terminal_output +3144,2706402,"TERMINAL",0,0,"Step 5932\r\nStep 5933\r\nStep 5934\r\nStep 5935\r\n",,terminal_output +3145,2706483,"TERMINAL",0,0,"Step 5936\r\nStep 5937\r\n",,terminal_output +3146,2706546,"TERMINAL",0,0,"Step 5938\r\nStep 5939\r\n",,terminal_output +3147,2706715,"TERMINAL",0,0,"Step 5940\r\nStep 5941\r\nStep 5942\r\n",,terminal_output +3148,2706780,"TERMINAL",0,0,"Step 5943\r\nStep 5944\r\n",,terminal_output +3149,2706870,"TERMINAL",0,0,"Step 5945\r\nStep 5946\r\nStep 5947\r\nStep 5948\r\nStep 5949\r\n",,terminal_output +3150,2707068,"TERMINAL",0,0,"Step 5950\r\nStep 5951\r\nStep 5952\r\nStep 5953\r\nStep 5954\r\nStep 5955\r\nStep 5956\r\nStep 5957\r\nStep 5958\r\nStep 5959\r\nStep 5960\r\nStep 5961\r\n",,terminal_output +3151,2707121,"TERMINAL",0,0,"Step 5962\r\nStep 5963\r\n",,terminal_output +3152,2707255,"TERMINAL",0,0,"Step 5964\r\nStep 5965\r\nStep 5966\r\nStep 5967\r\nStep 5968\r\nStep 5969\r\nStep 5970\r\nStep 5971\r\n",,terminal_output +3153,2707381,"train_tokenizer.py",6787,0,"",python,selection_mouse +3154,2707396,"TERMINAL",0,0,"Step 5972\r\nStep 5973\r\nStep 5974\r\n",,terminal_output +3155,2707436,"TERMINAL",0,0,"Step 5975\r\nStep 5976\r\nStep 5977\r\nStep 5978\r\n",,terminal_output +3156,2707507,"TERMINAL",0,0,"Step 5979\r\nStep 5980\r\nStep 5981\r\nStep 5982\r\n",,terminal_output +3157,2707534,"train_tokenizer.py",6787,1," ",python,selection_mouse +3158,2707593,"TERMINAL",0,0,"Step 5983\r\nStep 5984\r\nStep 5985\r\nStep 5986\r\n",,terminal_output +3159,2707652,"train_tokenizer.py",6752,36,"for videos in dataloader:\n # ",python,selection_mouse +3160,2707652,"train_tokenizer.py",6756,32,"videos in dataloader:\n # ",python,selection_mouse +3161,2707664,"TERMINAL",0,0,"Step 5987\r\n",,terminal_output +3162,2707697,"train_tokenizer.py",6763,25,"in dataloader:\n # ",python,selection_mouse +3163,2707726,"train_tokenizer.py",6765,23," dataloader:\n # ",python,selection_mouse +3164,2707737,"train_tokenizer.py",6766,22,"dataloader:\n # ",python,selection_mouse +3165,2707748,"TERMINAL",0,0,"Step 5988\r\nStep 5989\r\nStep 5990\r\nStep 5991\r\nStep 5992\r\n",,terminal_output +3166,2707981,"TERMINAL",0,0,"Step 5993\r\nStep 5994\r\nStep 5995\r\nStep 5996\r\nStep 5997\r\nStep 5998\r\nStep 5999\r\nStep 6000\r\nStep 6001\r\nStep 6002\r\nStep 6003\r\nStep 6004\r\nStep 6005\r\nStep 6006\r\nStep 6007\r\nStep 6008\r\nStep 6009\r\nStep 6010\r\nStep 6011\r\nStep 6012\r\n",,terminal_output +3167,2708046,"TERMINAL",0,0,"Step 6013\r\nStep 6014\r\nStep 6015\r\n",,terminal_output +3168,2708111,"TERMINAL",0,0,"52\r5956",,terminal_output +3169,2708181,"TERMINAL",0,0,"Step 6016\r\nStep 6017\r\nStep 6018\r\nStep 6019\r\nStep 6020\r\nStep 6021\r\nStep 6022\r\nStep 6023\r\nStep 6024\r\n",,terminal_output +3170,2708319,"train_tokenizer.py",6775,0,"",python,selection_mouse +3171,2708319,"train_tokenizer.py",6766,10,"dataloader",python,selection_mouse +3172,2708319,"TERMINAL",0,0,"Step 6025\r\nStep 6026\r\nStep 6027\r\n",,terminal_output +3173,2708355,"TERMINAL",0,0,"Step 6028\r\nStep 6029\r\nStep 6030\r\nStep 6031\r\nStep 6032\r\nStep 6033\r\nStep 6034\r\nStep 6035\r\n",,terminal_output +3174,2708443,"TERMINAL",0,0,"Step 6036\r\nStep 6037\r\nStep 6038\r\n",,terminal_output +3175,2708551,"train_tokenizer.py",6765,11," dataloader",python,selection_mouse +3176,2708551,"train_tokenizer.py",6762,14," in dataloader",python,selection_mouse +3177,2708551,"train_tokenizer.py",6756,20,"videos in dataloader",python,selection_mouse +3178,2708566,"TERMINAL",0,0,"Step 6039\r\nStep 6040\r\nStep 6041\r\n",,terminal_output +3179,2708617,"train_tokenizer.py",6755,21," videos in dataloader",python,selection_mouse +3180,2708618,"train_tokenizer.py",6752,24,"for videos in dataloader",python,selection_mouse +3181,2708717,"train_tokenizer.py",6751,25," for videos in dataloader",python,selection_mouse +3182,2708863,"TERMINAL",0,0,"Step 6042\r\nStep 6043\r\nStep 6044\r\nStep 6045\r\nStep 6046\r\nStep 6047\r\nStep 6048\r\nStep 6049\r\nStep 6050\r\nStep 6051\r\nStep 6052\r\nStep 6053\r\nStep 6054\r\nStep 6055\r\nStep 6056\r\nStep 6057\r\nStep 6058\r\nStep 6059\r\nStep 6060\r\nStep 6061\r\nStep 6062\r\nStep 6063\r\nStep 6064\r\n",,terminal_output +3183,2708959,"TERMINAL",0,0,"Step 6065\r\nStep 6066\r\nStep 6067\r\n",,terminal_output +3184,2709050,"train_tokenizer.py",6751,0,"",python,selection_mouse +3185,2709051,"train_tokenizer.py",6744,8," ",python,selection_mouse +3186,2709074,"TERMINAL",0,0,"Step 6068\r\nStep 6069\r\nStep 6070\r\nStep 6071\r\nStep 6072\r\n",,terminal_output +3187,2709165,"TERMINAL",0,0,"Step 6073\r\nStep 6074\r\nStep 6075\r\n",,terminal_output +3188,2709226,"TERMINAL",0,0,"Step 6076\r\nStep 6077\r\n",,terminal_output +3189,2709265,"train_tokenizer.py",6744,18," for videos",python,selection_mouse +3190,2709338,"train_tokenizer.py",6744,19," for videos ",python,selection_mouse +3191,2709339,"train_tokenizer.py",6744,32," for videos in dataloader",python,selection_mouse +3192,2709339,"train_tokenizer.py",6744,33," for videos in dataloader:",python,selection_mouse +3193,2709340,"train_tokenizer.py",6744,88," for videos in dataloader:\n # npy_path = ""overfit_dir/single_sample_corner",python,selection_mouse +3194,2709345,"TERMINAL",0,0,"Step 6078\r\nStep 6079\r\n",,terminal_output +3195,2709616,"TERMINAL",0,0,"Step 6080\r\nStep 6081\r\nStep 6082\r\nStep 6083\r\nStep 6084\r\nStep 6085\r\nStep 6086\r\nStep 6087\r\nStep 6088\r\nStep 6089\r\nStep 6090\r\nStep 6091\r\nStep 6092\r\nStep 6093\r\nStep 6094\r\nStep 6095\r\nStep 6096\r\nStep 6097\r\nStep 6098\r\nStep 6099\r\nStep 6100\r\nStep 6101\r\nStep 6102\r\nStep 6103\r\nStep 6104\r\nStep 6105\r\nStep 6106\r\nStep 6107\r\n",,terminal_output +3196,2709723,"train_tokenizer.py",6823,0,"",python,selection_mouse +3197,2709752,"TERMINAL",0,0,"Step 6108\r\nStep 6109\r\nStep 6110\r\nStep 6111\r\n",,terminal_output +3198,2709869,"TERMINAL",0,0,"Step 6112\r\nStep 6113\r\nStep 6114\r\nStep 6115\r\nStep 6116\r\nStep 6117\r\nStep 6118\r\nStep 6119\r\nStep 6120\r\nStep 6121\r\nStep 6122\r\nStep 6123\r\nStep 6124\r\nStep 6125\r\nStep 6126\r\nStep 6127\r\n",,terminal_output +3199,2709982,"TERMINAL",0,0,"Step 6128\r\nStep 6129\r\n",,terminal_output +3200,2710032,"TERMINAL",0,0,"Step 6130\r\nStep 6131\r\nStep 6132\r\nStep 6133\r\n",,terminal_output +3201,2710157,"TERMINAL",0,0,"Step 6134\r\nStep 6135\r\nStep 6136\r\n",,terminal_output +3202,2710265,"TERMINAL",0,0,"Step 6137\r\nStep 6138\r\nStep 6139\r\n",,terminal_output +3203,2710318,"TERMINAL",0,0,"4\r6084",,terminal_output +3204,2710345,"TERMINAL",0,0,"Step 6140\r\nStep 6141\r\nStep 6142\r\n",,terminal_output +3205,2710753,"TERMINAL",0,0,"Step 6143\r\nStep 6144\r\nStep 6145\r\nStep 6146\r\nStep 6147\r\nStep 6148\r\nStep 6149\r\nStep 6150\r\nStep 6151\r\nStep 6152\r\nStep 6153\r\nStep 6154\r\nStep 6155\r\nStep 6156\r\nStep 6157\r\nStep 6158\r\n",,terminal_output +3206,2710893,"TERMINAL",0,0,"Step 6159\r\nStep 6160\r\nStep 6161\r\nStep 6162\r\nStep 6163\r\nStep 6164\r\nStep 6165\r\nStep 6166\r\nStep 6167\r\nStep 6168\r\nStep 6169\r\nStep 6170\r\nStep 6171\r\nStep 6172\r\nStep 6173\r\nStep 6174\r\nStep 6175\r\nStep 6176\r\n",,terminal_output +3207,2711181,"TERMINAL",0,0,"Step 6177\r\nStep 6178\r\nStep 6179\r\nStep 6180\r\nStep 6181\r\nStep 6182\r\nStep 6183\r\nStep 6184\r\n",,terminal_output +3208,2711239,"TERMINAL",0,0,"Step 6185\r\nStep 6186\r\nStep 6187\r\n",,terminal_output +3209,2711301,"TERMINAL",0,0,"Step 6188\r\n",,terminal_output +3210,2711367,"TERMINAL",0,0,"Step 6189\r\nStep 6190\r\n",,terminal_output +3211,2711469,"TERMINAL",0,0,"Step 6191\r\nStep 6192\r\nStep 6193\r\nStep 6194\r\nStep 6195\r\n",,terminal_output +3212,2711636,"TERMINAL",0,0,"Step 6196\r\nStep 6197\r\n",,terminal_output +3213,2711702,"TERMINAL",0,0,"Step 6198\r\nStep 6199\r\nStep 6200\r\nStep 6201\r\nStep 6202\r\nStep 6203\r\nStep 6204\r\nStep 6205\r\n",,terminal_output +3214,2711759,"TERMINAL",0,0,"Step 6206\r\nStep 6207\r\nStep 6208\r\nStep 6209\r\n",,terminal_output +3215,2711885,"TERMINAL",0,0,"Step 6210\r\nStep 6211\r\nStep 6212\r\nStep 6213\r\nStep 6214\r\nStep 6215\r\nStep 6216\r\nStep 6217\r\n",,terminal_output +3216,2712059,"TERMINAL",0,0,"Step 6218\r\nStep 6219\r\nStep 6220\r\nStep 6221\r\nStep 6222\r\nStep 6223\r\n",,terminal_output +3217,2712243,"TERMINAL",0,0,"Step 6224\r\nStep 6225\r\nStep 6226\r\nStep 6227\r\nStep 6228\r\n",,terminal_output +3218,2712302,"TERMINAL",0,0,"Step 6229\r\n",,terminal_output +3219,2712380,"TERMINAL",0,0,"Step 6230\r\nStep 6231\r\nStep 6232\r\nStep 6233\r\n",,terminal_output +3220,2712478,"TERMINAL",0,0,"Step 6234\r\nStep 6235\r\nStep 6236\r\n",,terminal_output +3221,2712535,"TERMINAL",0,0,"Step 6237\r\nStep 6238\r\n",,terminal_output +3222,2712649,"TERMINAL",0,0,"Step 6239\r\nStep 6240\r\nStep 6241\r\nStep 6242\r\nStep 6243\r\n",,terminal_output +3223,2712739,"TERMINAL",0,0,"Step 6244\r\nStep 6245\r\nStep 6246\r\nStep 6247\r\nStep 6248\r\nStep 6249\r\nStep 6250\r\n",,terminal_output +3224,2712835,"TERMINAL",0,0,"Step 6251\r\n",,terminal_output +3225,2712946,"TERMINAL",0,0,"Step 6252\r\nStep 6253\r\nStep 6254\r\nStep 6255\r\nStep 6256\r\nStep 6257\r\nStep 6258\r\nStep 6259\r\n",,terminal_output +3226,2713055,"TERMINAL",0,0,"6\r6203",,terminal_output +3227,2713121,"TERMINAL",0,0,"Step 6260\r\nStep 6261\r\nStep 6262\r\nStep 6263\r\nStep 6264\r\nStep 6265\r\nStep 6266\r\n",,terminal_output +3228,2713185,"TERMINAL",0,0,"Step 6267\r\nStep 6268\r\nStep 6269\r\n",,terminal_output +3229,2713314,"TERMINAL",0,0,"Step 6270\r\nStep 6271\r\nStep 6272\r\nStep 6273\r\nStep 6274\r\nStep 6275\r\n",,terminal_output +3230,2713455,"TERMINAL",0,0,"Step 6276\r\nStep 6277\r\nStep 6278\r\nStep 6279\r\nStep 6280\r\nStep 6281\r\nStep 6282\r\nStep 6283\r\n",,terminal_output +3231,2713519,"TERMINAL",0,0,"Step 6284\r\n",,terminal_output +3232,2713645,"TERMINAL",0,0,"Step 6285\r\nStep 6286\r\nStep 6287\r\nStep 6288\r\nStep 6289\r\nStep 6290\r\nStep 6291\r\nStep 6292\r\n",,terminal_output +3233,2713708,"TERMINAL",0,0,"Step 6293\r\nStep 6294\r\nStep 6295\r\nStep 6296\r\nStep 6297\r\n",,terminal_output +3234,2713770,"TERMINAL",0,0,"Step 6298\r\nStep 6299\r\nStep 6300\r\nStep 6301\r\n",,terminal_output +3235,2713906,"TERMINAL",0,0,"Step 6302\r\nStep 6303\r\nStep 6304\r\nStep 6305\r\nStep 6306\r\nStep 6307\r\n^C",,terminal_output +3236,2714009,"TERMINAL",0,0,"Traceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_tokenizer.py"", line 215, in \r\n for videos in dataloader:\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tensorflow/python/data/ops/dataset_ops.py"", line 4788, in __next__\r\n return nest.map_structure(to_numpy, next(self._iterator))\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tensorflow/python/data/ops/iterator_ops.py"", line 826, in __next__\r\n return self._next_internal()\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tensorflow/python/data/ops/iterator_ops.py"", line 776, in _next_internal\r\n ret = gen_dataset_ops.iterator_get_next(\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tensorflow/python/ops/gen_dataset_ops.py"", line 3081, in iterator_get_next\r\n _result = pywrap_tfe.TFE_Py_FastPathExecute(\r\nKeyboardInterrupt\r\n",,terminal_output +3237,2714118,"TERMINAL",0,0,"^CException ignored in atexit callback: .teardown_atexit at 0x14f4b40dbd90>\r\nTraceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/wandb/sdk/lib/service_connection.py"", line 94, in teardown_atexit\r\n conn.teardown(hooks.exit_code)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/wandb/sdk/lib/service_connection.py"", line 226, in teardown\r\n self._router.join()\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/wandb/sdk/interface/router.py"", line 75, in join\r\n self._thread.join()\r\n File ""/home/hk-project-p0023960/tum_cte0515/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/threading.py"", line 1096, in join\r\n self._wait_for_tstate_lock()\r\n File ""/home/hk-project-p0023960/tum_cte0515/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/threading.py"", line 1116, in _wait_for_tstate_lock\r\n if lock.acquire(block, timeout):\r\nKeyboardInterrupt: \r\n",,terminal_output +3238,2714295,"TERMINAL",0,0,"^CException ignored in: .remove at 0x14f58cb02680>\r\nTraceback (most recent call last):\r\n File ""/home/hk-project-p0023960/tum_cte0515/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/weakref.py"", line 370, in remove\r\n",,terminal_output +3239,2714454,"TERMINAL",0,0,"^C",,terminal_output +3240,2714564,"TERMINAL",0,0,"KeyboardInterrupt: \r\n",,terminal_output +3241,2714632,"TERMINAL",0,0,"^CException ignored in: .remove at 0x14f58cb02680>\r\nTraceback (most recent call last):\r\n File ""/home/hk-project-p0023960/tum_cte0515/.local/share/uv/python/cpython-3.10.18-linux-x86_64-gnu/lib/python3.10/weakref.py"", line 370, in remove\r\n def remove(k, selfref=ref(self)):\r\nKeyboardInterrupt: \r\n",,terminal_output +3242,2714789,"TERMINAL",0,0,"^C",,terminal_output +3243,2714854,"TERMINAL",0,0,"9\r6308",,terminal_output +3244,2714982,"TERMINAL",0,0,"Exception ignored in atexit callback: \r\nTraceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/api.py"", line 3168, in clean_up\r\n clear_backends()\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/jax/_src/api.py"", line 3158, in clear_backends\r\n pjit._infer_params_cached.cache_clear()\r\nKeyboardInterrupt: \r\n",,terminal_output +3245,2715565,"TERMINAL",0,0,"^C",,terminal_output +3246,2716100,"TERMINAL",0,0,"\r\n]0;tum_cte0515@hkn0706:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0706 jafar]$ ",,terminal_output +3247,2716134,"TERMINAL",0,0,"1:01",,terminal_output +3248,2717410,"TERMINAL",0,0,"2",,terminal_output +3249,2718639,"TERMINAL",0,0,"3",,terminal_output +3250,2719878,"TERMINAL",0,0,"5",,terminal_output +3251,2720195,"train_tokenizer.py",0,0,"",python,tab +3252,2721131,"TERMINAL",0,0,"6",,terminal_output +3253,2721521,"train_tokenizer.py",7307,0,"",python,selection_mouse +3254,2722221,"train_tokenizer.py",7306,0,"",python,selection_mouse +3255,2722355,"train_tokenizer.py",7302,5,"break",python,selection_mouse +3256,2722488,"TERMINAL",0,0,"7",,terminal_output +3257,2722969,"train_tokenizer.py",7302,5,"e",python,content +3258,2722971,"train_tokenizer.py",7303,0,"",python,selection_keyboard +3259,2723188,"train_tokenizer.py",7303,0,"c",python,content +3260,2723191,"train_tokenizer.py",7304,0,"",python,selection_keyboard +3261,2723296,"train_tokenizer.py",7304,0,"i",python,content +3262,2723298,"train_tokenizer.py",7305,0,"",python,selection_keyboard +3263,2723414,"train_tokenizer.py",7305,0,"t",python,content +3264,2723416,"train_tokenizer.py",7306,0,"",python,selection_keyboard +3265,2723684,"TERMINAL",0,0,"8",,terminal_output +3266,2724357,"train_tokenizer.py",7305,1,"",python,content +3267,2724504,"train_tokenizer.py",7304,1,"",python,content +3268,2724620,"train_tokenizer.py",7303,1,"",python,content +3269,2724739,"train_tokenizer.py",7303,0,"e",python,content +3270,2724742,"train_tokenizer.py",7304,0,"",python,selection_keyboard +3271,2724946,"TERMINAL",0,0,"10",,terminal_output +3272,2724996,"train_tokenizer.py",7304,0,"x",python,content +3273,2724997,"train_tokenizer.py",7305,0,"",python,selection_keyboard +3274,2725358,"train_tokenizer.py",7304,1,"",python,content +3275,2725513,"train_tokenizer.py",7303,1,"",python,content +3276,2725738,"train_tokenizer.py",7303,0,"x",python,content +3277,2725740,"train_tokenizer.py",7304,0,"",python,selection_keyboard +3278,2726014,"train_tokenizer.py",7304,0,"i",python,content +3279,2726017,"train_tokenizer.py",7305,0,"",python,selection_keyboard +3280,2726235,"train_tokenizer.py",7305,0,"t",python,content +3281,2726236,"train_tokenizer.py",7306,0,"",python,selection_keyboard +3282,2726257,"TERMINAL",0,0,"1",,terminal_output +3283,2726831,"train_tokenizer.py",7306,0,")",python,content +3284,2726832,"train_tokenizer.py",7307,0,"",python,selection_keyboard +3285,2727333,"train_tokenizer.py",7306,1,"",python,content +3286,2727553,"TERMINAL",0,0,"2",,terminal_output +3287,2727971,"train_tokenizer.py",7306,0,"()",python,content +3288,2727972,"train_tokenizer.py",7307,0,"",python,selection_keyboard +3289,2728867,"TERMINAL",0,0,"3",,terminal_output +3290,2730164,"TERMINAL",0,0,"5",,terminal_output +3291,2731402,"TERMINAL",0,0,"6",,terminal_output +3292,2731659,"train_tokenizer.py",7308,0,"",python,selection_mouse +3293,2732826,"TERMINAL",0,0,"7",,terminal_output +3294,2733928,"TERMINAL",0,0,"9",,terminal_output +3295,2735129,"train_tokenizer.py",7279,0,"",python,selection_mouse +3296,2735221,"TERMINAL",0,0,"20",,terminal_output +3297,2736463,"TERMINAL",0,0,"1",,terminal_output +3298,2737768,"TERMINAL",0,0,"2",,terminal_output +3299,2739092,"TERMINAL",0,0,"4",,terminal_output +3300,2740371,"TERMINAL",0,0,"5",,terminal_output +3301,2741541,"TERMINAL",0,0,"6",,terminal_output +3302,2742908,"TERMINAL",0,0,"7",,terminal_output +3303,2744121,"TERMINAL",0,0,"9",,terminal_output +3304,2745168,"TERMINAL",0,0,"sh scripts_horeka/overfit_sample_tiny/tester.sh ",,terminal_output +3305,2745321,"TERMINAL",0,0,"30",,terminal_output +3306,2746584,"TERMINAL",0,0,"1",,terminal_output +3307,2746633,"TERMINAL",0,0,"watch",,terminal_focus +3308,2746935,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn1993:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/overfit_dir/single_sample]633;D;0",,terminal_output +3309,2748066,"TERMINAL",0,0,"ls",,terminal_command +3310,2748116,"TERMINAL",0,0,"]633;E;2025-07-01 19:11:33 ls;ea2a9ac7-e5a0-488a-9623-9429487e0dac]633;C",,terminal_output +3311,2748343,"TERMINAL",0,0,"train_tokenizer_sample_0.npy train_tokenizer_sample_2136.npy train_tokenizer_sample_3272.npy train_tokenizer_sample_4408.npy train_tokenizer_sample_5544.npy\r\ntrain_tokenizer_sample_1000.npy train_tokenizer_sample_2137.npy train_tokenizer_sample_3273.npy train_tokenizer_sample_4409.npy train_tokenizer_sample_5545.npy\r\ntrain_tokenizer_sample_1001.npy train_tokenizer_sample_2138.npy train_tokenizer_sample_3274.npy train_tokenizer_sample_440.npy train_tokenizer_sample_5546.npy\r\ntrain_tokenizer_sample_1002.npy train_tokenizer_sample_2139.npy train_tokenizer_sample_3275.npy train_tokenizer_sample_4410.npy train_tokenizer_sample_5547.npy\r\ntrain_tokenizer_sample_1003.npy train_tokenizer_sample_213.npy train_tokenizer_sample_3276.npy train_tokenizer_sample_4411.npy train_tokenizer_sample_5548.npy\r\ntrain_tokenizer_sample_1004.npy train_tokenizer_sample_2140.npy train_tokenizer_sample_3277.npy train_tokenizer_sample_4412.npy train_tokenizer_sample_5549.npy\r\ntrain_tokenizer_sample_1005.npy train_tokenizer_sample_2141.npy train_tokenizer_sample_3278.npy train_tokenizer_sample_4413.npy train_tokenizer_sample_554.npy\r\ntrain_tokenizer_sample_1006.npy train_tokenizer_sample_2142.npy train_tokenizer_sample_3279.npy train_tokenizer_sample_4414.npy train_tokenizer_sample_5550.npy\r\ntrain_tokenizer_sample_1007.npy train_tokenizer_sample_2143.npy train_tokenizer_sample_327.npy train_tokenizer_sample_4415.npy train_tokenizer_sample_5551.npy\r\ntrain_tokenizer_sample_1008.npy train_tokenizer_sample_2144.npy train_tokenizer_sample_3280.npy train_tokenizer_sample_4416.npy train_tokenizer_sample_5552.npy\r\ntrain_tokenizer_sample_1009.npy train_tokenizer_sample_2145.npy train_tokenizer_sample_3281.npy train_tokenizer_sample_4417.npy train_tokenizer_sample_5553.npy\r\ntrain_tokenizer_sample_100.npy train_tokenizer_sample_2146.npy train_tokenizer_sample_3282.npy train_tokenizer_sample_4418.npy train_tokenizer_sample_5554.npy\r\ntrain_tokenizer_sample_1010.npy train_tokenizer_sample_2147.npy train_tokenizer_sample_3283.npy train_tokenizer_sample_4419.npy train_tokenizer_sample_5555.npy\r\ntrain_tokenizer_sample_1011.npy train_tokenizer_sample_2148.npy train_tokenizer_sample_3284.npy train_tokenizer_sample_441.npy train_tokenizer_sample_5556.npy\r\ntrain_tokenizer_sample_1012.npy train_tokenizer_sample_2149.npy train_tokenizer_sample_3285.npy train_tokenizer_sample_4420.npy train_tokenizer_sample_5557.npy\r\ntrain_tokenizer_sample_1013.npy train_tokenizer_sample_214.npy train_tokenizer_sample_3286.npy train_tokenizer_sample_4421.npy train_tokenizer_sample_5558.npy\r\ntrain_tokenizer_sample_1014.npy train_tokenizer_sample_2150.npy train_tokenizer_sample_3287.npy train_tokenizer_sample_4422.npy train_tokenizer_sample_5559.npy\r\ntrain_tokenizer_sample_1015.npy train_tokenizer_sample_2151.npy train_tokenizer_sample_3288.npy train_tokenizer_sample_4423.npy train_tokenizer_sample_555.npy\r\ntrain_tokenizer_sample_1016.npy train_tokenizer_sample_2152.npy train_tokenizer_sample_3289.npy train_tokenizer_sample_4424.npy train_tokenizer_sample_5560.npy\r\ntrain_tokenizer_sample_1017.npy train_tokenizer_sample_2153.npy train_tokenizer_sample_328.npy train_tokenizer_sample_4425.npy train_tokenizer_sample_5561.npy\r\ntrain_tokenizer_sample_1018.npy train_tokenizer_sample_2154.npy train_tokenizer_sample_3290.npy train_tokenizer_sample_4426.npy train_tokenizer_sample_5562.npy\r\ntrain_tokenizer_sample_1019.npy train_tokenizer_sample_2155.npy train_tokenizer_sample_3291.npy train_tokenizer_sample_4427.npy train_tokenizer_sample_5563.npy\r\ntrain_tokenizer_sample_101.npy train_tokenizer_sample_2156.npy train_tokenizer_sample_3292.npy train_tokenizer_sample_4428.npy train_tokenizer_sample_5564.npy\r\ntrain_tokenizer_sample_1020.npy train_tokenizer_sample_2157.npy train_tokenizer_sample_3293.npy train_tokenizer_sample_4429.npy train_tokenizer_sample_5565.npy\r\ntrain_tokenizer_sample_1021.npy train_tokenizer_sample_2158.npy train_tokenizer_sample_3294.npy train_tokenizer_sample_442.npy train_tokenizer_sample_5566.npy\r\ntrain_tokenizer_sample_1022.npy train_tokenizer_sample_2159.npy train_tokenizer_sample_3295.npy train_tokenizer_sample_4430.npy train_tokenizer_sample_5567.npy\r\ntrain_tokenizer_sample_1023.npy train_tokenizer_sample_215.npy train_tokenizer_sample_3296.npy train_tokenizer_sample_4431.npy train_tokenizer_sample_5568.npy\r\ntrain_tokenizer_sample_1024.npy train_tokenizer_sample_2160.npy train_tokenizer_sample_3297.npy train_tokenizer_sample_4432.npy train_tokenizer_sample_5569.npy\r\ntrain_tokenizer_sample_1025.npy train_tokenizer_sample_2161.npy train_tokenizer_sample_3298.npy train_tokenizer_sample_4433.npy train_tokenizer_sample_556.npy\r\ntrain_tokenizer_sample_1026.npy train_tokenizer_sample_2162.npy train_tokenizer_sample_3299.npy train_tokenizer_sample_4434.npy train_tokenizer_sample_5570.npy\r\ntrain_tokenizer_sample_1027.npy train_tokenizer_sample_2163.npy train_tokenizer_sample_329.npy train_tokenizer_sample_4435.npy train_tokenizer_sample_5571.npy\r\ntrain_tokenizer_sample_1028.npy train_tokenizer_sample_2164.npy train_tokenizer_sample_32.npy train_tokenizer_sample_4436.npy train_tokenizer_sample_5572.npy\r\ntrain_tokenizer_sample_1029.npy train_tokenizer_sample_2165.npy train_tokenizer_sample_3300.npy train_tokenizer_sample_4437.npy train_tokenizer_sample_5573.npy\r\ntrain_tokenizer_sample_102.npy train_tokenizer_sample_2166.npy train_tokenizer_sample_3301.npy train_tokenizer_sample_4438.npy train_tokenizer_sample_5574.npy\r\ntrain_tokenizer_sample_1030.npy train_tokenizer_sample_2167.npy train_tokenizer_sample_3302.npy train_tokenizer_sample_4439.npy train_tokenizer_sample_5575.npy\r\ntrain_tokenizer_sample_1031.npy train_tokenizer_sample_2168.npy train_tokenizer_sample_3303.npy train_tokenizer_sample_443.npy train_tokenizer_sample_5576.npy\r\ntrain_tokenizer_sample_1032.npy train_tokenizer_sample_2169.npy train_tokenizer_sample_3304.npy train_tokenizer_sample_4440.npy train_tokenizer_sample_5577.npy\r\ntrain_tokenizer_sample_1033.npy train_tokenizer_sample_216.npy train_tokenizer_sample_3305.npy train_tokenizer_sample_4441.npy train_tokenizer_sample_5578.npy\r\ntrain_tokenizer_sample_1034.npy train_tokenizer_sample_2170.npy train_tokenizer_sample_3306.npy train_tokenizer_sample_4442.npy train_tokenizer_sample_5579.npy\r\ntrain_tokenizer_sample_1035.npy train_tokenizer_sample_2171.npy train_tokenizer_sample_3307.npy train_tokenizer_sample_4443.npy train_tokenizer_sample_557.npy\r\ntrain_tokenizer_sample_1036.npy train_tokenizer_sample_2172.npy train_tokenizer_sample_3308.npy train_tokenizer_sample_4444.npy train_tokenizer_sample_5580.npy\r\ntrain_tokenizer_sample_1037.npy train_tokenizer_sample_2173.npy train_tokenizer_sample_3309.npy train_tokenizer_sample_4445.npy train_tokenizer_sample_5581.npy\r\ntrain_tokenizer_sample_1038.npy train_tokenizer_sample_2174.npy train_tokenizer_sample_330.npy train_tokenizer_sample_4446.npy train_tokenizer_sample_5582.npy\r\ntrain_tokenizer_sample_1039.npy train_tokenizer_sample_2175.npy train_tokenizer_sample_3310.npy train_tokenizer_sample_4447.npy train_tokenizer_sample_5583.npy\r\ntrain_tokenizer_sample_103.npy train_tokenizer_sample_2176.npy train_tokenizer_sample_3311.npy train_tokenizer_sample_4448.npy train_tokenizer_sample_5584.npy\r\ntrain_tokenizer_sample_1040.npy train_tokenizer_sample_2177.npy train_tokenizer_sample_3312.npy train_tokenizer_sample_4449.npy train_tokenizer_sample_5585.npy\r\ntrain_tokenizer_sample_1041.npy train_tokenizer_sample_2178.npy train_tokenizer_sample_3313.npy train_tokenizer_sample_444.npy train_tokenizer_sample_5586.npy\r\ntrain_tokenizer_sample_1042.npy train_tokenizer_sample_2179.npy train_tokenizer_sample_3314.npy train_tokenizer_sample_4450.npy train_tokenizer_sample_5587.npy\r\ntrain_tokenizer_sample_1043.npy train_tokenizer_sample_217.npy train_tokenizer_sample_3315.npy train_tokenizer_sample_4451.npy train_tokenizer_sample_5588.npy\r\ntrain_tokenizer_sample_1044.npy train_tokenizer_sample_2180.npy train_tokenizer_sample_3316.npy train_tokenizer_sample_4452.npy train_tokenizer_sample_5589.npy\r\ntrain_tokenizer_sample_1045.npy train_tokenizer_sample_2181.npy train_tokenizer_sample_3317.npy train_tokenizer_sample_4453.npy train_tokenizer_sample_558.npy\r\ntrain_tokenizer_sample_1046.npy train_tokenizer_sample_2182.npy train_tokenizer_sample_3318.npy train_tokenizer_sample_4454.npy train_tokenizer_sample_5590.npy\r\ntrain_tokenizer_sample_1047.npy train_tokenizer_sample_2183.npy train_tokenizer_sample_3319.npy train_tokenizer_sample_4455.npy train_tokenizer_sample_5591.npy\r\ntrain_tokenizer_sample_1048.npy train_tokenizer_sample_2184.npy train_tokenizer_sample_331.npy train_tokenizer_sample_4456.npy train_tokenizer_sample_5592.npy\r\ntrain_tokenizer_sample_1049.npy train_tokenizer_sample_2185.npy train_tokenizer_sample_3320.npy train_tokenizer_sample_4457.npy train_tokenizer_sample_5593.npy\r\ntrain_tokenizer_sample_104.npy train_tokenizer_sample_2186.npy train_tokenizer_sample_3321.npy train_tokenizer_sample_4458.npy train_tokenizer_sample_5594.npy\r\ntrain_tokenizer_sample_1050.npy train_tokenizer_sample_2187.npy train_tokenizer_sample_3322.npy train_tokenizer_sample_4459.npy train_tokenizer_sample_5595.npy\r\ntrain_tokenizer_sample_1051.npy train_tokenizer_sample_2188.npy train_tokenizer_sample_3323.npy train_tokenizer_sample_445.npy train_tokenizer_sample_5596.npy\r\ntrain_tokenizer_sample_1052.npy train_tokenizer_sample_2189.npy train_tokenizer_sample_3324.npy train_tokenizer_sample_4460.npy train_tokenizer_sample_5597.npy\r\ntrain_tokenizer_sample_1053.npy train_tokenizer_sample_218.npy train_tokenizer_sample_3325.npy train_tokenizer_sample_4461.npy train_tokenizer_sample_5598.npy\r\ntrain_tokenizer_sample_1054.npy train_tokenizer_sample_2190.npy train_tokenizer_sample_3326.npy train_tokenizer_sample_4462.npy train_tokenizer_sample_5599.npy\r\ntrain_tokenizer_sample_1055.npy train_tokenizer_sample_2191.npy train_tokenizer_sample_3327.npy train_tokenizer_sample_4463.npy train_tokenizer_sample_559.npy\r\ntrain_tokenizer_sample_1056.npy train_tokenizer_sample_2192.npy train_tokenizer_sample_3328.npy train_tokenizer_sample_4464.npy train_tokenizer_sample_55.npy\r\ntrain_tokenizer_sample_1057.npy train_tokenizer_sample_2193.npy train_tokenizer_sample_3329.npy train_tokenizer_sample_4465.npy train_tokenizer_sample_5600.npy\r\ntrain_tokenizer_sample_1058.npy train_tokenizer_sample_2194.npy train_tokenizer_sample_332.npy train_tokenizer_sample_4466.npy train_tokenizer_sample_5601.npy\r\ntrain_tokenizer_sample_1059.npy train_tokenizer_sample_2195.npy train_tokenizer_sample_3330.npy train_tokenizer_sample_4467.npy train_tokenizer_sample_5602.npy\r\ntrain_tokenizer_sample_105.npy train_tokenizer_sample_2196.npy train_tokenizer_sample_3331.npy train_tokenizer_sample_4468.npy train_tokenizer_sample_5603.npy\r\ntrain_tokenizer_sample_1060.npy train_tokenizer_sample_2197.npy train_tokenizer_sample_3332.npy train_tokenizer_sample_4469.npy train_tokenizer_sample_5604.npy\r\ntrain_tokenizer_sample_1061.npy train_tokenizer_sample_2198.npy train_tokenizer_sample_3333.npy train_tokenizer_sample_446.npy train_tokenizer_sample_5605.npy\r\ntrain_tokenizer_sample_1062.npy train_tokenizer_sample_2199.npy train_tokenizer_sample_3334.npy train_tokenizer_sample_4470.npy train_tokenizer_sample_5606.npy\r\ntrain_tokenizer_sample_1063.npy train_tokenizer_sample_219.npy train_tokenizer_sample_3335.npy train_tokenizer_sample_4471.npy train_tokenizer_sample_5607.npy\r\ntrain_tokenizer_sample_1064.npy train_tokenizer_sample_21.npy train_tokenizer_sample_3336.npy train_tokenizer_sample_4472.npy train_tokenizer_sample_5608.npy\r\ntrain_tokenizer_sample_1065.npy train_tokenizer_sample_2200.npy train_tokenizer_sample_3337.npy train_tokenizer_sample_4473.npy train_tokenizer_sample_5609.npy\r\ntrain_tokenizer_sample_1066.npy train_tokenizer_sample_2201.npy train_tokenizer_sample_3338.npy train_tokenizer_sample_4474.npy train_tokenizer_sample_560.npy\r\ntrain_tokenizer_sample_1067.npy train_tokenizer_sample_2202.npy train_tokenizer_sample_3339.npy train_tokenizer_sample_4475.npy train_tokenizer_sample_5610.npy\r\ntrain_tokenizer_sample_1068.npy train_tokenizer_sample_2203.npy train_tokenizer_sample_333.npy train_tokenizer_sample_4476.npy train_tokenizer_sample_5611.npy\r\ntrain_tokenizer_sample_1069.npy train_tokenizer_sample_2204.npy train_tokenizer_sample_3340.npy train_tokenizer_sample_4477.npy train_tokenizer_sample_5612.npy\r\ntrain_tokenizer_sample_106.npy train_tokenizer_sample_2205.npy train_tokenizer_sample_3341.npy train_tokenizer_sample_4478.npy train_tokenizer_sample_5613.npy\r\ntrain_tokenizer_sample_1070.npy train_tokenizer_sample_2206.npy train_tokenizer_sample_3342.npy train_tokenizer_sample_4479.npy train_tokenizer_sample_5614.npy\r\ntrain_tokenizer_sample_1071.npy train_tokenizer_sample_2207.npy train_tokenizer_sample_3343.npy train_tokenizer_sample_447.npy train_tokenizer_sample_5615.npy\r\ntrain_tokenizer_sample_1072.npy train_tokenizer_sample_2208.npy train_tokenizer_sample_3344.npy train_tokenizer_sample_4480.npy train_tokenizer_sample_5616.npy\r\ntrain_tokenizer_sample_1073.npy train_tokenizer_sample_2209.npy train_tokenizer_sample_3345.npy train_tokenizer_sample_4481.npy train_tokenizer_sample_5617.npy\r\ntrain_tokenizer_sample_1074.npy train_tokenizer_sample_220.npy train_tokenizer_sample_3346.npy train_tokenizer_sample_4482.npy train_tokenizer_sample_5618.npy\r\ntrain_tokenizer_sample_1075.npy train_tokenizer_sample_2210.npy train_tokenizer_sample_3347.npy train_tokenizer_sample_4483.npy train_tokenizer_sample_5619.npy\r\ntrain_tokenizer_sample_1076.npy train_tokenizer_sample_2211.npy train_tokenizer_sample_3348.npy train_tokenizer_sample_4484.npy train_tokenizer_sample_561.npy\r\ntrain_tokenizer_sample_1077.npy train_tokenizer_sample_2212.npy train_tokenizer_sample_3349.npy train_tokenizer_sample_4485.npy train_tokenizer_sample_5620.npy\r\ntrain_tokenizer_sample_1078.npy train_tokenizer_sample_2213.npy train_tokenizer_sample_334.npy train_tokenizer_sample_4486.npy train_tokenizer_sample_5621.npy\r\ntrain_tokenizer_sample_1079.npy train_tokenizer_sample_2214.npy train_tokenizer_sample_3350.npy train_tokenizer_sample_4487.npy train_tokenizer_sample_5622.npy\r\ntrain_tokenizer_sample_107.npy train_tokenizer_sample_2215.npy train_tokenizer_sample_3351.npy train_tokenizer_sample_4488.npy train_tokenizer_sample_5623.npy\r\ntrain_tokenizer_sample_1080.npy train_tokenizer_sample_2216.npy train_tokenizer_sample_3352.npy train_tokenizer_sample_4489.npy train_tokenizer_sample_5624.npy\r\ntrain_tokenizer_sample_1081.npy train_tokenizer_sample_2217.npy train_tokenizer_sample_3353.npy train_tokenizer_sample_448.npy train_tokenizer_sample_5625.npy\r\ntrain_tokenizer_sample_1082.npy train_tokenizer_sample_2218.npy train_tokenizer_sample_3354.npy train_tokenizer_sample_4490.npy train_tokenizer_sample_5626.npy\r\ntrain_tokenizer_sample_1083.npy train_tokenizer_sample_2219.npy train_tokenizer_sample_3355.npy train_tokenizer_sample_4491.npy train_tokenizer_sample_5627.npy\r\ntrain_tokenizer_sample_1084.npy train_tokenizer_sample_221.npy train_tokenizer_sample_3356.npy train_tokenizer_sample_4492.npy train_tokenizer_sample_5628.npy\r\ntrain_tokenizer_sample_1085.npy train_tokenizer_sample_2220.npy train_tokenizer_sample_3357.npy train_tokenizer_sample_4493.npy train_tokenizer_sample_5629.npy\r\ntrain_tokenizer_sample_1086.npy train_tokenizer_sample_2221.npy train_tokenizer_sample_3358.npy train_tokenizer_sample_4494.npy train_tokenizer_sample_562.npy\r\ntrain_tokenizer_sample_1087.npy train_tokenizer_sample_2222.npy train_tokenizer_sample_3359.npy train_tokenizer_sample_4495.npy train_tokenizer_sample_5630.npy\r\ntrain_tokenizer_sample_1088.npy train_tokenizer_sample_2223.npy train_tokenizer_sample_335.npy train_tokenizer_sample_4496.npy train_tokenizer_sample_5631.npy\r\ntrain_tokenizer_sample_1089.npy train_tokenizer_sample_2224.npy train_tokenizer_sample_3360.npy train_tokenizer_sample_4497.npy train_tokenizer_sample_5632.npy\r\ntrain_tokenizer_sample_108.npy train_tokenizer_sample_2225.npy train_tokenizer_sample_3361.npy train_tokenizer_sample_4498.npy train_tokenizer_sample_5633.npy\r\ntrain_tokenizer_sample_1090.npy train_tokenizer_sample_2226.npy train_tokenizer_sample_3362.npy train_tokenizer_sample_4499.npy train_tokenizer_sample_5634.npy\r\ntrain_tokenizer_sample_1091.npy train_tokenizer_sample_2227.npy train_tokenizer_sample_3363.npy train_tokenizer_sample_449.npy train_tokenizer_sample_5635.npy\r\ntrain_tokenizer_sample_1092.npy train_tokenizer_sample_2228.npy train_tokenizer_sample_3364.npy train_tokenizer_sample_44.npy train_tokenizer_sample_5636.npy\r\ntrain_tokenizer_sample_1093.npy train_tokenizer_sample_2229.npy train_tokenizer_sample_3365.npy train_tokenizer_sample_4500.npy train_tokenizer_sample_5637.npy\r\ntrain_tokenizer_sample_1094.npy train_tokenizer_sample_222.npy train_tokenizer_sample_3366.npy train_tokenizer_sample_4501.npy train_tokenizer_sample_5638.npy\r\ntrain_tokenizer_sample_1095.npy train_tokenizer_sample_2230.npy train_tokenizer_sample_3367.npy train_tokenizer_sample_4502.npy train_tokenizer_sample_5639.npy\r\ntrain_tokenizer_sample_1096.npy train_tokenizer_sample_2231.npy train_tokenizer_sample_3368.npy train_tokenizer_sample_4503.npy train_tokenizer_sample_563.npy\r\ntrain_tokenizer_sample_1097.npy train_tokenizer_sample_2232.npy train_tokenizer_sample_3369.npy train_tokenizer_sample_4504.npy train_tokenizer_sample_5640.npy\r\ntrain_tokenizer_sample_1098.npy train_tokenizer_sample_2233.npy train_tokenizer_sample_336.npy train_tokenizer_sample_4505.npy train_tokenizer_sample_5641.npy\r\ntrain_tokenizer_sample_1099.npy train_tokenizer_sample_2234.npy train_tokenizer_sample_3370.npy train_tokenizer_sample_4506.npy train_tokenizer_sample_5642.npy\r\ntrain_tokenizer_sample_109.npy train_tokenizer_sample_2235.npy train_tokenizer_sample_3371.npy train_tokenizer_sample_4507.npy train_tokenizer_sample_5643.npy\r\ntrain_tokenizer_sample_10.npy train_tokenizer_sample_2236.npy train_tokenizer_sample_3372.npy train_tokenizer_sample_4508.npy train_tokenizer_sample_5644.npy\r\ntrain_tokenizer_sample_1100.npy train_tokenizer_sample_2237.npy train_tokenizer_sample_3373.npy train_tokenizer_sample_4509.npy train_tokenizer_sample_5645.npy\r\ntrain_tokenizer_sample_1101.npy train_tokenizer_sample_2238.npy train_tokenizer_sample_3374.npy train_tokenizer_sample_450.npy train_tokenizer_sample_5646.npy\r\ntrain_tokenizer_sample_1102.npy train_tokenizer_sample_2239.npy train_tokenizer_sample_3375.npy train_tokenizer_sample_4510.npy train_tokenizer_sample_5647.npy\r\ntrain_tokenizer_sample_1103.npy train_tokenizer_sample_223.npy train_tokenizer_sample_3376.npy train_tokenizer_sample_4511.npy train_tokenizer_sample_5648.npy\r\ntrain_tokenizer_sample_1104.npy train_tokenizer_sample_2240.npy train_tokenizer_sample_3377.npy train_tokenizer_sample_4512.npy train_tokenizer_sample_5649.npy\r\ntrain_tokenizer_sample_1105.npy train_tokenizer_sample_2241.npy train_tokenizer_sample_3378.npy train_tokenizer_sample_4513.npy train_tokenizer_sample_564.npy\r\ntrain_tokenizer_sample_1106.npy train_tokenizer_sample_2242.npy train_tokenizer_sample_3379.npy train_tokenizer_sample_4514.npy train_tokenizer_sample_5650.npy\r\ntrain_tokenizer_sample_1107.npy train_tokenizer_sample_2243.npy train_tokenizer_sample_337.npy train_tokenizer_sample_4515.npy train_tokenizer_sample_5651.npy\r\ntrain_tokenizer_sample_1108.npy train_tokenizer_sample_2244.npy train_tokenizer_sample_3380.npy train_tokenizer_sample_4516.npy train_tokenizer_sample_5652.npy\r\ntrain_tokenizer_sample_1109.npy train_tokenizer_sample_2245.npy train_tokenizer_sample_3381.npy train_tokenizer_sample_4517.npy train_tokenizer_sample_5653.npy\r\ntrain_tokenizer_sample_110.npy train_tokenizer_sample_2246.npy train_tokenizer_sample_3382.npy train_tokenizer_sample_4518.npy train_tokenizer_sample_5654.npy\r\ntrain_tokenizer_sample_1110.npy train_tokenizer_sample_2247.npy train_tokenizer_sample_3383.npy train_tokenizer_sample_4519.npy train_tokenizer_sample_5655.npy\r\ntrain_tokenizer_sample_1111.npy train_tokenizer_sample_2248.npy train_tokenizer_sample_3384.npy train_tokenizer_sample_451.npy train_tokenizer_sample_5656.npy\r\ntrain_tokenizer_sample_1112.npy train_tokenizer_sample_2249.npy train_tokenizer_sample_3385.npy train_tokenizer_sample_4520.npy train_tokenizer_sample_5657.npy\r\ntrain_tokenizer_sample_1113.npy train_tokenizer_sample_224.npy train_tokenizer_sample_3386.npy train_tokenizer_sample_4521.npy train_tokenizer_sample_5658.npy\r\ntrain_tokenizer_sample_1114.npy train_tokenizer_sample_2250.npy train_tokenizer_sample_3387.npy train_tokenizer_sample_4522.npy train_tokenizer_sample_5659.npy\r\ntrain_tokenizer_sample_1115.npy train_tokenizer_sample_2251.npy train_tokenizer_sample_3388.npy train_tokenizer_sample_4523.npy train_tokenizer_sample_565.npy\r\ntrain_tokenizer_sample_1116.npy train_tokenizer_sample_2252.npy train_tokenizer_sample_3389.npy train_tokenizer_sample_4524.npy train_tokenizer_sample_5660.npy\r\ntrain_tokenizer_sample_1117.npy train_tokenizer_sample_2253.npy train_tokenizer_sample_338.npy train_tokenizer_sample_4525.npy train_tokenizer_sample_5661.npy\r\ntrain_tokenizer_sample_1118.npy train_tokenizer_sample_2254.npy train_tokenizer_sample_3390.npy train_tokenizer_sample_4526.npy train_tokenizer_sample_5662.npy\r\ntrain_tokenizer_sample_1119.npy train_tokenizer_sample_2255.npy train_tokenizer_sample_3391.npy train_tokenizer_sample_4527.npy train_tokenizer_sample_5663.npy\r\ntrain_tokenizer_sample_111.npy train_tokenizer_sample_2256.npy train_tokenizer_sample_3392.npy train_tokenizer_sample_4528.npy train_tokenizer_sample_5664.npy\r\ntrain_tokenizer_sample_1120.npy train_tokenizer_sample_2257.npy train_tokenizer_sample_3393.npy train_tokenizer_sample_4529.npy train_tokenizer_sample_5665.npy\r\ntrain_tokenizer_sample_1121.npy train_tokenizer_sample_2258.npy train_tokenizer_sample_3394.npy train_tokenizer_sample_452.npy train_tokenizer_sample_5666.npy\r\ntrain_tokenizer_sample_1122.npy train_tokenizer_sample_2259.npy train_tokenizer_sample_3395.npy train_tokenizer_sample_4530.npy train_tokenizer_sample_5667.npy\r\ntrain_tokenizer_sample_1123.npy train_tokenizer_sample_225.npy train_tokenizer_sample_3396.npy train_tokenizer_sample_4531.npy train_tokenizer_sample_5668.npy\r\ntrain_tokenizer_sample_1124.npy train_tokenizer_sample_2260.npy train_tokenizer_sample_3397.npy train_tokenizer_sample_4532.npy train_tokenizer_sample_5669.npy\r\ntrain_tokenizer_sample_1125.npy train_tokenizer_sample_2261.npy train_tokenizer_sample_3398.npy train_tokenizer_sample_4533.npy train_tokenizer_sample_566.npy\r\ntrain_tokenizer_sample_1126.npy train_tokenizer_sample_2262.npy train_tokenizer_sample_3399.npy train_tokenizer_sample_4534.npy train_tokenizer_sample_5670.npy\r\ntrain_tokenizer_sample_1127.npy train_tokenizer_sample_2263.npy train_tokenizer_sample_339.npy train_tokenizer_sample_4535.npy train_tokenizer_sample_5671.npy\r\ntrain_tokenizer_sample_1128.npy train_tokenizer_sample_2264.npy train_tokenizer_sample_33.npy train_tokenizer_sample_4536.npy train_tokenizer_sample_5672.npy\r\ntrain_tokenizer_sample_1129.npy train_tokenizer_sample_2265.npy train_tokenizer_sample_3400.npy train_tokenizer_sample_4537.npy train_tokenizer_sample_5673.npy\r\ntrain_tokenizer_sample_112.npy train_tokenizer_sample_2266.npy train_tokenizer_sample_3401.npy train_tokenizer_sample_4538.npy train_tokenizer_sample_5674.npy\r\ntrain_tokenizer_sample_1130.npy train_tokenizer_sample_2267.npy train_tokenizer_sample_3402.npy train_tokenizer_sample_4539.npy train_tokenizer_sample_5675.npy\r\ntrain_tokenizer_sample_1131.npy train_tokenizer_sample_2268.npy train_tokenizer_sample_3403.npy train_tokenizer_sample_453.npy train_tokenizer_sample_5676.npy\r\ntrain_tokenizer_sample_1132.npy train_tokenizer_sample_2269.npy train_tokenizer_sample_3404.npy train_tokenizer_sample_4540.npy train_tokenizer_sample_5677.npy\r\ntrain_tokenizer_sample_1133.npy train_tokenizer_sample_226.npy train_tokenizer_sample_3405.npy train_tokenizer_sample_4541.npy train_tokenizer_sample_5678.npy\r\ntrain_tokenizer_sample_1134.npy train_tokenizer_sample_2270.npy train_tokenizer_sample_3406.npy train_tokenizer_sample_4542.npy train_tokenizer_sample_5679.npy\r\ntrain_tokenizer_sample_1135.npy train_tokenizer_sample_2271.npy train_tokenizer_sample_3407.npy train_tokenizer_sample_4543.npy train_tokenizer_sample_567.npy\r\ntrain_tokenizer_sample_1136.npy train_tokenizer_sample_2272.npy train_tokenizer_sample_3408.npy train_tokenizer_sample_4544.npy train_tokenizer_sample_5680.npy\r\ntrain_tokenizer_sample_1137.npy train_tokenizer_sample_2273.npy train_tokenizer_sample_3409.npy train_tokenizer_sample_4545.npy train_tokenizer_sample_5681.npy\r\ntrain_tokenizer_sample_1138.npy train_tokenizer_sample_2274.npy train_tokenizer_sample_340.npy train_tokenizer_sample_4546.npy train_tokenizer_sample_5682.npy\r\ntrain_tokenizer_sample_1139.npy train_tokenizer_sample_2275.npy train_tokenizer_sample_3410.npy train_tokenizer_sample_4547.npy train_tokenizer_sample_5683.npy\r\ntrain_tokenizer_sample_113.npy train_tokenizer_sample_2276.npy train_tokenizer_sample_3411.npy train_tokenizer_sample_4548.npy train_tokenizer_sample_5684.npy\r\ntrain_tokenizer_sample_1140.npy train_tokenizer_sample_2277.npy train_tokenizer_sample_3412.npy train_tokenizer_sample_4549.npy train_tokenizer_sample_5685.npy\r\ntrain_tokenizer_sample_1141.npy train_tokenizer_sample_2278.npy train_tokenizer_sample_3413.npy train_tokenizer_sample_454.npy train_tokenizer_sample_5686.npy\r\ntrain_tokenizer_sample_1142.npy train_tokenizer_sample_2279.npy train_tokenizer_sample_3414.npy train_tokenizer_sample_4550.npy train_tokenizer_sample_5687.npy\r\ntrain_tokenizer_sample_1143.npy train_tokenizer_sample_227.npy train_tokenizer_sample_3415.npy train_tokenizer_sample_4551.npy train_tokenizer_sample_5688.npy\r\ntrain_tokenizer_sample_1144.npy train_tokenizer_sample_2280.npy train_tokenizer_sample_3416.npy train_tokenizer_sample_4552.npy train_tokenizer_sample_5689.npy\r\ntrain_tokenizer_sample_1145.npy train_tokenizer_sample_2281.npy train_tokenizer_sample_3417.npy train_tokenizer_sample_4553.npy train_tokenizer_sample_568.npy\r\ntrain_tokenizer_sample_1146.npy train_tokenizer_sample_2282.npy train_tokenizer_sample_3418.npy train_tokenizer_sample_4554.npy train_tokenizer_sample_5690.npy\r\ntrain_tokenizer_sample_1147.npy train_tokenizer_sample_2283.npy train_tokenizer_sample_3419.npy train_tokenizer_sample_4555.npy train_tokenizer_sample_5691.npy\r\ntrain_tokenizer_sample_1148.npy train_tokenizer_sample_2284.npy train_tokenizer_sample_341.npy train_tokenizer_sample_4556.npy train_tokenizer_sample_5692.npy\r\ntrain_tokenizer_sample_1149.npy train_tokenizer_sample_2285.npy train_tokenizer_sample_3420.npy train_tokenizer_sample_4557.npy train_tokenizer_sample_5693.npy\r\ntrain_tokenizer_sample_114.npy train_tokenizer_sample_2286.npy train_tokenizer_sample_3421.npy train_tokenizer_sample_4558.npy train_tokenizer_sample_5694.npy\r\ntrain_tokenizer_sample_1150.npy train_tokenizer_sample_2287.npy train_tokenizer_sample_3422.npy train_tokenizer_sample_4559.npy train_tokenizer_sample_5695.npy\r\ntrain_tokenizer_sample_1151.npy train_tokenizer_sample_2288.npy train_tokenizer_sample_3423.npy train_tokenizer_sample_455.npy train_tokenizer_sample_5696.npy\r\ntrain_tokenizer_sample_1152.npy train_tokenizer_sample_2289.npy train_tokenizer_sample_3424.npy train_tokenizer_sample_4560.npy train_tokenizer_sample_5697.npy\r\ntrain_tokenizer_sample_1153.npy train_tokenizer_sample_228.npy train_tokenizer_sample_3425.npy train_tokenizer_sample_4561.npy train_tokenizer_sample_5698.npy\r\ntrain_tokenizer_sample_1154.npy train_tokenizer_sample_2290.npy train_tokenizer_sample_3426.npy train_tokenizer_sample_4562.npy train_tokenizer_sample_5699.npy\r\ntrain_tokenizer_sample_1155.npy train_tokenizer_sample_2291.npy train_tokenizer_sample_3427.npy train_tokenizer_sample_4563.npy train_tokenizer_sample_569.npy\r\ntrain_tokenizer_sample_1156.npy train_tokenizer_sample_2292.npy train_tokenizer_sample_3428.npy train_tokenizer_sample_4564.npy train_tokenizer_sample_56.npy\r\ntrain_tokenizer_sample_1157.npy train_tokenizer_sample_2293.npy train_tokenizer_sample_3429.npy train_tokenizer_sample_4565.npy train_tokenizer_sample_5700.npy\r\ntrain_tokenizer_sample_1158.npy train_tokenizer_sample_2294.npy train_tokenizer_sample_342.npy train_tokenizer_sample_4566.npy train_tokenizer_sample_5701.npy\r\ntrain_tokenizer_sample_1159.npy train_tokenizer_sample_2295.npy train_tokenizer_sample_3430.npy train_tokenizer_sample_4567.npy train_tokenizer_sample_5702.npy\r\ntrain_tokenizer_sample_115.npy train_tokenizer_sample_2296.npy train_tokenizer_sample_3431.npy train_tokenizer_sample_4568.npy train_tokenizer_sample_5703.npy\r\ntrain_tokenizer_sample_1160.npy train_tokenizer_sample_2297.npy train_tokenizer_sample_3432.npy train_tokenizer_sample_4569.npy train_tokenizer_sample_5704.npy\r\ntrain_tokenizer_sample_1161.npy train_tokenizer_sample_2298.npy train_tokenizer_sample_3433.npy train_tokenizer_sample_456.npy train_tokenizer_sample_5705.npy\r\ntrain_tokenizer_sample_1162.npy train_tokenizer_sample_2299.npy train_tokenizer_sample_3434.npy train_tokenizer_sample_4570.npy train_tokenizer_sample_5706.npy\r\ntrain_tokenizer_sample_1163.npy train_tokenizer_sample_229.npy train_tokenizer_sample_3435.npy train_tokenizer_sample_4571.npy train_tokenizer_sample_5707.npy\r\ntrain_tokenizer_sample_1164.npy train_tokenizer_sample_22.npy train_tokenizer_sample_3436.npy train_tokenizer_sample_4572.npy train_tokenizer_sample_5708.npy\r\ntrain_tokenizer_sample_1165.npy train_tokenizer_sample_2300.npy train_tokenizer_sample_3437.npy train_tokenizer_sample_4573.npy train_tokenizer_sample_5709.npy\r\ntrain_tokenizer_sample_1166.npy train_tokenizer_sample_2301.npy train_tokenizer_sample_3438.npy train_tokenizer_sample_4574.npy train_tokenizer_sample_570.npy\r\ntrain_tokenizer_sample_1167.npy train_tokenizer_sample_2302.npy train_tokenizer_sample_3439.npy train_tokenizer_sample_4575.npy train_tokenizer_sample_5710.npy\r\ntrain_tokenizer_sample_1168.npy train_tokenizer_sample_2303.npy train_tokenizer_sample_343.npy train_tokenizer_sample_4576.npy train_tokenizer_sample_5711.npy\r\ntrain_tokenizer_sample_1169.npy train_tokenizer_sample_2304.npy train_tokenizer_sample_3440.npy train_tokenizer_sample_4577.npy train_tokenizer_sample_5712.npy\r\ntrain_tokenizer_sample_116.npy train_tokenizer_sample_2305.npy train_tokenizer_sample_3441.npy train_tokenizer_sample_4578.npy train_tokenizer_sample_5713.npy\r\ntrain_tokenizer_sample_1170.npy train_tokenizer_sample_2306.npy train_tokenizer_sample_3442.npy train_tokenizer_sample_4579.npy train_tokenizer_sample_5714.npy\r\ntrain_tokenizer_sample_1171.npy train_tokenizer_sample_2307.npy train_tokenizer_sample_3443.npy train_tokenizer_sample_457.npy train_tokenizer_sample_5715.npy\r\ntrain_tokenizer_sample_1172.npy train_tokenizer_sample_2308.npy train_tokenizer_sample_3444.npy train_tokenizer_sample_4580.npy train_tokenizer_sample_5716.npy\r\ntrain_tokenizer_sample_1173.npy train_tokenizer_sample_2309.npy train_tokenizer_sample_3445.npy train_tokenizer_sample_4581.npy train_tokenizer_sample_5717.npy\r\ntrain_tokenizer_sample_1174.npy train_tokenizer_sample_230.npy train_tokenizer_sample_3446.npy train_tokenizer_sample_4582.npy train_tokenizer_sample_5718.npy\r\ntrain_tokenizer_sample_1175.npy train_tokenizer_sample_2310.npy train_tokenizer_sample_3447.npy train_tokenizer_sample_4583.npy train_tokenizer_sample_5719.npy\r\ntrain_tokenizer_sample_1176.npy train_tokenizer_sample_2311.npy train_tokenizer_sample_3448.npy train_tokenizer_sample_4584.npy train_tokenizer_sample_571.npy\r\ntrain_tokenizer_sample_1177.npy train_tokenizer_sample_2312.npy train_tokenizer_sample_3449.npy train_tokenizer_sample_4585.npy train_tokenizer_sample_5720.npy\r\ntrain_tokenizer_sample_1178.npy train_tokenizer_sample_2313.npy train_tokenizer_sample_344.npy train_tokenizer_sample_4586.npy train_tokenizer_sample_5721.npy\r\ntrain_tokenizer_sample_1179.npy train_tokenizer_sample_2314.npy train_tokenizer_sample_3450.npy train_tokenizer_sample_4587.npy train_tokenizer_sample_5722.npy\r\ntrain_tokenizer_sample_117.npy train_tokenizer_sample_2315.npy train_tokenizer_sample_3451.npy train_tokenizer_sample_4588.npy train_tokenizer_sample_5723.npy\r\ntrain_tokenizer_sample_1180.npy train_tokenizer_sample_2316.npy train_tokenizer_sample_3452.npy train_tokenizer_sample_4589.npy train_tokenizer_sample_5724.npy\r\ntrain_tokenizer_sample_1181.npy train_tokenizer_sample_2317.npy train_tokenizer_sample_3453.npy train_tokenizer_sample_458.npy train_tokenizer_sample_5725.npy\r\ntrain_tokenizer_sample_1182.npy train_tokenizer_sample_2318.npy train_tokenizer_sample_3454.npy train_tokenizer_sample_4590.npy train_tokenizer_sample_5726.npy\r\ntrain_tokenizer_sample_1183.npy train_tokenizer_sample_2319.npy train_tokenizer_sample_3455.npy train_tokenizer_sample_4591.npy train_tokenizer_sample_5727.npy\r\ntrain_tokenizer_sample_1184.npy train_tokenizer_sample_231.npy train_tokenizer_sample_3456.npy train_tokenizer_sample_4592.npy train_tokenizer_sample_5728.npy\r\ntrain_tokenizer_sample_1185.npy train_tokenizer_sample_2320.npy train_tokenizer_sample_3457.npy train_tokenizer_sample_4593.npy train_tokenizer_sample_5729.npy\r\ntrain_tokenizer_sample_1186.npy train_tokenizer_sample_2321.npy train_tokenizer_sample_3458.npy train_tokenizer_sample_4594.npy train_tokenizer_sample_572.npy\r\ntrain_tokenizer_sample_1187.npy train_tokenizer_sample_2322.npy train_tokenizer_sample_3459.npy train_tokenizer_sample_4595.npy train_tokenizer_sample_5730.npy\r\ntrain_tokenizer_sample_1188.npy train_tokenizer_sample_2323.npy train_tokenizer_sample_345.npy train_tokenizer_sample_4596.npy train_tokenizer_sample_5731.npy\r\ntrain_tokenizer_sample_1189.npy train_tokenizer_sample_2324.npy train_tokenizer_sample_3460.npy train_tokenizer_sample_4597.npy train_tokenizer_sample_5732.npy\r\ntrain_tokenizer_sample_118.npy train_tokenizer_sample_2325.npy train_tokenizer_sample_3461.npy train_tokenizer_sample_4598.npy train_tokenizer_sample_5733.npy\r\ntrain_tokenizer_sample_1190.npy train_tokenizer_sample_2326.npy train_tokenizer_sample_3462.npy train_tokenizer_sample_4599.npy train_tokenizer_sample_5734.npy\r\ntrain_tokenizer_sample_1191.npy train_tokenizer_sample_2327.npy train_tokenizer_sample_3463.npy train_tokenizer_sample_459.npy train_tokenizer_sample_5735.npy\r\ntrain_tokenizer_sample_1192.npy train_tokenizer_sample_2328.npy train_tokenizer_sample_3464.npy train_tokenizer_sample_45.npy train_tokenizer_sample_5736.npy\r\ntrain_tokenizer_sample_1193.npy train_tokenizer_sample_2329.npy train_tokenizer_sample_3465.npy train_tokenizer_sample_4600.npy train_tokenizer_sample_5737.npy\r\ntrain_tokenizer_sample_1194.npy train_tokenizer_sample_232.npy train_tokenizer_sample_3466.npy train_tokenizer_sample_4601.npy train_tokenizer_sample_5738.npy\r\ntrain_tokenizer_sample_1195.npy train_tokenizer_sample_2330.npy train_tokenizer_sample_3467.npy train_tokenizer_sample_4602.npy train_tokenizer_sample_5739.npy\r\ntrain_tokenizer_sample_1196.npy train_tokenizer_sample_2331.npy train_tokenizer_sample_3468.npy train_tokenizer_sample_4603.npy train_tokenizer_sample_573.npy\r\ntrain_tokenizer_sample_1197.npy train_tokenizer_sample_2332.npy train_tokenizer_sample_3469.npy train_tokenizer_sample_4604.npy train_tokenizer_sample_5740.npy\r\ntrain_tokenizer_sample_1198.npy train_tokenizer_sample_2333.npy train_tokenizer_sample_346.npy train_tokenizer_sample_4605.npy train_tokenizer_sample_5741.npy\r\ntrain_tokenizer_sample_1199.npy train_tokenizer_sample_2334.npy train_tokenizer_sample_3470.npy train_tokenizer_sample_4606.npy train_tokenizer_sample_5742.npy\r\ntrain_tokenizer_sample_119.npy train_tokenizer_sample_2335.npy train_tokenizer_sample_3471.npy train_tokenizer_sample_4607.npy train_tokenizer_sample_5743.npy\r\ntrain_tokenizer_sample_11.npy train_tokenizer_sample_2336.npy train_tokenizer_sample_3472.npy train_tokenizer_sample_4608.npy train_tokenizer_sample_5744.npy\r\ntrain_tokenizer_sample_1200.npy train_tokenizer_sample_2337.npy train_tokenizer_sample_3473.npy train_tokenizer_sample_4609.npy train_tokenizer_sample_5745.npy\r\ntrain_tokenizer_sample_1201.npy train_tokenizer_sample_2338.npy train_tokenizer_sample_3474.npy train_tokenizer_sample_460.npy train_tokenizer_sample_5746.npy\r\ntrain_tokenizer_sample_1202.npy train_tokenizer_sample_2339.npy train_tokenizer_sample_3475.npy train_tokenizer_sample_4610.npy train_tokenizer_sample_5747.npy\r\ntrain_tokenizer_sample_1203.npy train_tokenizer_sample_233.npy train_tokenizer_sample_3476.npy train_tokenizer_sample_4611.npy train_tokenizer_sample_5748.npy\r\ntrain_tokenizer_sample_1204.npy train_tokenizer_sample_2340.npy train_tokenizer_sample_3477.npy train_tokenizer_sample_4612.npy train_tokenizer_sample_5749.npy\r\ntrain_tokenizer_sample_1205.npy train_tokenizer_sample_2341.npy train_tokenizer_sample_3478.npy train_tokenizer_sample_4613.npy train_tokenizer_sample_574.npy\r\ntrain_tokenizer_sample_1206.npy train_tokenizer_sample_2342.npy train_tokenizer_sample_3479.npy train_tokenizer_sample_4614.npy train_tokenizer_sample_5750.npy\r\ntrain_tokenizer_sample_1207.npy train_tokenizer_sample_2343.npy train_tokenizer_sample_347.npy train_tokenizer_sample_4615.npy train_tokenizer_sample_5751.npy\r\ntrain_tokenizer_sample_1208.npy train_tokenizer_sample_2344.npy train_tokenizer_sample_3480.npy train_tokenizer_sample_4616.npy train_tokenizer_sample_5752.npy\r\ntrain_tokenizer_sample_1209.npy train_tokenizer_sample_2345.npy train_tokenizer_sample_3481.npy train_tokenizer_sample_4617.npy train_tokenizer_sample_5753.npy\r\ntrain_tokenizer_sample_120.npy train_tokenizer_sample_2346.npy train_tokenizer_sample_3482.npy train_tokenizer_sample_4618.npy train_tokenizer_sample_5754.npy\r\ntrain_tokenizer_sample_1210.npy train_tokenizer_sample_2347.npy train_tokenizer_sample_3483.npy train_tokenizer_sample_4619.npy train_tokenizer_sample_5755.npy\r\ntrain_tokenizer_sample_1211.npy train_tokenizer_sample_2348.npy train_tokenizer_sample_3484.npy train_tokenizer_sample_461.npy train_tokenizer_sample_5756.npy\r\ntrain_tokenizer_sample_1212.npy train_tokenizer_sample_2349.npy train_tokenizer_sample_3485.npy train_tokenizer_sample_4620.npy train_tokenizer_sample_5757.npy\r\ntrain_tokenizer_sample_1213.npy train_tokenizer_sample_234.npy train_tokenizer_sample_3486.npy train_tokenizer_sample_4621.npy train_tokenizer_sample_5758.npy\r\ntrain_tokenizer_sample_1214.npy train_tokenizer_sample_2350.npy train_tokenizer_sample_3487.npy train_tokenizer_sample_4622.npy train_tokenizer_sample_5759.npy\r\ntrain_tokenizer_sample_1215.npy train_tokenizer_sample_2351.npy train_tokenizer_sample_3488.npy train_tokenizer_sample_4623.npy train_tokenizer_sample_575.npy\r\ntrain_tokenizer_sample_1216.npy train_tokenizer_sample_2352.npy train_tokenizer_sample_3489.npy train_tokenizer_sample_4624.npy train_tokenizer_sample_5760.npy\r\ntrain_tokenizer_sample_1217.npy train_tokenizer_sample_2353.npy train_tokenizer_sample_348.npy train_tokenizer_sample_4625.npy train_tokenizer_sample_5761.npy\r\ntrain_tokenizer_sample_1218.npy train_tokenizer_sample_2354.npy train_tokenizer_sample_3490.npy train_tokenizer_sample_4626.npy train_tokenizer_sample_5762.npy\r\ntrain_tokenizer_sample_1219.npy train_tokenizer_sample_2355.npy train_tokenizer_sample_3491.npy train_tokenizer_sample_4627.npy train_tokenizer_sample_5763.npy\r\ntrain_tokenizer_sample_121.npy train_tokenizer_sample_2356.npy train_tokenizer_sample_3492.npy train_tokenizer_sample_4628.npy train_tokenizer_sample_5764.npy\r\ntrain_tokenizer_sample_1220.npy train_tokenizer_sample_2357.npy train_tokenizer_sample_3493.npy train_tokenizer_sample_4629.npy train_tokenizer_sample_5765.npy\r\ntrain_tokenizer_sample_1221.npy train_tokenizer_sample_2358.npy train_tokenizer_sample_3494.npy train_tokenizer_sample_462.npy train_tokenizer_sample_5766.npy\r\ntrain_tokenizer_sample_1222.npy train_tokenizer_sample_2359.npy train_tokenizer_sample_3495.npy train_tokenizer_sample_4630.npy train_tokenizer_sample_5767.npy\r\ntrain_tokenizer_sample_1223.npy train_tokenizer_sample_235.npy train_tokenizer_sample_3496.npy train_tokenizer_sample_4631.npy train_tokenizer_sample_5768.npy\r\ntrain_tokenizer_sample_1224.npy train_tokenizer_sample_2360.npy train_tokenizer_sample_3497.npy train_tokenizer_sample_4632.npy train_tokenizer_sample_5769.npy\r\ntrain_tokenizer_sample_1225.npy train_tokenizer_sample_2361.npy train_tokenizer_sample_3498.npy train_tokenizer_sample_4633.npy train_tokenizer_sample_576.npy\r\ntrain_tokenizer_sample_1226.npy train_tokenizer_sample_2362.npy train_tokenizer_sample_3499.npy train_tokenizer_sample_4634.npy train_tokenizer_sample_5770.npy\r\ntrain_tokenizer_sample_1227.npy train_tokenizer_sample_2363.npy train_tokenizer_sample_349.npy train_tokenizer_sample_4635.npy train_tokenizer_sample_5771.npy\r\ntrain_tokenizer_sample_1228.npy train_tokenizer_sample_2364.npy train_tokenizer_sample_34.npy train_tokenizer_sample_4636.npy train_tokenizer_sample_5772.npy\r\ntrain_tokenizer_sample_1229.npy train_tokenizer_sample_2365.npy train_tokenizer_sample_3500.npy train_tokenizer_sample_4637.npy train_tokenizer_sample_5773.npy\r\ntrain_tokenizer_sample_122.npy train_tokenizer_sample_2366.npy train_tokenizer_sample_3501.npy train_tokenizer_sample_4638.npy train_tokenizer_sample_5774.npy\r\ntrain_tokenizer_sample_1230.npy train_tokenizer_sample_2367.npy train_tokenizer_sample_3502.npy train_tokenizer_sample_4639.npy train_tokenizer_sample_5775.npy\r\ntrain_tokenizer_sample_1231.npy train_tokenizer_sample_2368.npy train_tokenizer_sample_3503.npy train_tokenizer_sample_463.npy train_tokenizer_sample_5776.npy\r\ntrain_tokenizer_sample_1232.npy train_tokenizer_sample_2369.npy train_tokenizer_sample_3504.npy train_tokenizer_sample_4640.npy train_tokenizer_sample_5777.npy\r\ntrain_tokenizer_sample_1233.npy train_tokenizer_sample_236.npy train_tokenizer_sample_3505.npy train_tokenizer_sample_4641.npy train_tokenizer_sample_5778.npy\r\ntrain_tokenizer_sample_1234.npy train_tokenizer_sample_2370.npy train_tokenizer_sample_3506.npy train_tokenizer_sample_4642.npy train_tokenizer_sample_5779.npy\r\ntrain_tokenizer_sample_1235.npy train_tokenizer_sample_2371.npy train_tokenizer_sample_3507.npy train_tokenizer_sample_4643.npy train_tokenizer_sample_577.npy\r\ntrain_tokenizer_sample_1236.npy train_tokenizer_sample_2372.npy train_tokenizer_sample_3508.npy train_tokenizer_sample_4644.npy train_tokenizer_sample_5780.npy\r\ntrain_tokenizer_sample_1237.npy train_tokenizer_sample_2373.npy train_tokenizer_sample_3509.npy train_tokenizer_sample_4645.npy train_tokenizer_sample_5781.npy\r\ntrain_tokenizer_sample_1238.npy train_tokenizer_sample_2374.npy train_tokenizer_sample_350.npy train_tokenizer_sample_4646.npy train_tokenizer_sample_5782.npy\r\ntrain_tokenizer_sample_1239.npy train_tokenizer_sample_2375.npy train_tokenizer_sample_3510.npy train_tokenizer_sample_4647.npy train_tokenizer_sample_5783.npy\r\ntrain_tokenizer_sample_123.npy train_tokenizer_sample_2376.npy train_tokenizer_sample_3511.npy train_tokenizer_sample_4648.npy train_tokenizer_sample_5784.npy\r\ntrain_tokenizer_sample_1240.npy train_tokenizer_sample_2377.npy train_tokenizer_sample_3512.npy train_tokenizer_sample_4649.npy train_tokenizer_sample_5785.npy\r\ntrain_tokenizer_sample_1241.npy train_tokenizer_sample_2378.npy train_tokenizer_sample_3513.npy train_tokenizer_sample_464.npy train_tokenizer_sample_5786.npy\r\ntrain_tokenizer_sample_1242.npy train_tokenizer_sample_2379.npy train_tokenizer_sample_3514.npy train_tokenizer_sample_4650.npy train_tokenizer_sample_5787.npy\r\ntrain_tokenizer_sample_1243.npy train_tokenizer_sample_237.npy train_tokenizer_sample_3515.npy train_tokenizer_sample_4651.npy train_tokenizer_sample_5788.npy\r\ntrain_tokenizer_sample_1244.npy train_tokenizer_sample_2380.npy train_tokenizer_sample_3516.npy train_tokenizer_sample_4652.npy train_tokenizer_sample_5789.npy\r\ntrain_tokenizer_sample_1245.npy train_tokenizer_sample_2381.npy train_tokenizer_sample_3517.npy train_tokenizer_sample_4653.npy train_tokenizer_sample_578.npy\r\ntrain_tokenizer_sample_1246.npy train_tokenizer_sample_2382.npy train_tokenizer_sample_3518.npy train_tokenizer_sample_4654.npy train_tokenizer_sample_5790.npy\r\ntrain_tokenizer_sample_1247.npy train_tokenizer_sample_2383.npy train_tokenizer_sample_3519.npy train_tokenizer_sample_4655.npy train_tokenizer_sample_5791.npy\r\ntrain_tokenizer_sample_1248.npy train_tokenizer_sample_2384.npy train_tokenizer_sample_351.npy train_tokenizer_sample_4656.npy train_tokenizer_sample_5792.npy\r\ntrain_tokenizer_sample_1249.npy train_tokenizer_sample_2385.npy train_tokenizer_sample_3520.npy train_tokenizer_sample_4657.npy train_tokenizer_sample_5793.npy\r\ntrain_tokenizer_sample_124.npy train_tokenizer_sample_2386.npy train_tokenizer_sample_3521.npy train_tokenizer_sample_4658.npy train_tokenizer_sample_5794.npy\r\ntrain_tokenizer_sample_1250.npy train_tokenizer_sample_2387.npy train_tokenizer_sample_3522.npy train_tokenizer_sample_4659.npy train_tokenizer_sample_5795.npy\r\ntrain_tokenizer_sample_1251.npy train_tokenizer_sample_2388.npy train_tokenizer_sample_3523.npy train_tokenizer_sample_465.npy train_tokenizer_sample_5796.npy\r\ntrain_tokenizer_sample_1252.npy train_tokenizer_sample_2389.npy train_tokenizer_sample_3524.npy train_tokenizer_sample_4660.npy train_tokenizer_sample_5797.npy\r\ntrain_tokenizer_sample_1253.npy train_tokenizer_sample_238.npy train_tokenizer_sample_3525.npy train_tokenizer_sample_4661.npy train_tokenizer_sample_5798.npy\r\ntrain_tokenizer_sample_1254.npy train_tokenizer_sample_2390.npy train_tokenizer_sample_3526.npy train_tokenizer_sample_4662.npy train_tokenizer_sample_5799.npy\r\ntrain_tokenizer_sample_1255.npy train_tokenizer_sample_2391.npy train_tokenizer_sample_3527.npy train_tokenizer_sample_4663.npy train_tokenizer_sample_579.npy\r\ntrain_tokenizer_sample_1256.npy train_tokenizer_sample_2392.npy train_tokenizer_sample_3528.npy train_tokenizer_sample_4664.npy train_tokenizer_sample_57.npy\r\ntrain_tokenizer_sample_1257.npy train_tokenizer_sample_2393.npy train_tokenizer_sample_3529.npy train_tokenizer_sample_4665.npy train_tokenizer_sample_5800.npy\r\ntrain_tokenizer_sample_1258.npy train_tokenizer_sample_2394.npy train_tokenizer_sample_352.npy train_tokenizer_sample_4666.npy train_tokenizer_sample_5801.npy\r\ntrain_tokenizer_sample_1259.npy train_tokenizer_sample_2395.npy train_tokenizer_sample_3530.npy train_tokenizer_sample_4667.npy train_tokenizer_sample_5802.npy\r\ntrain_tokenizer_sample_125.npy train_tokenizer_sample_2396.npy train_tokenizer_sample_3531.npy train_tokenizer_sample_4668.npy train_tokenizer_sample_5803.npy\r\ntrain_tokenizer_sample_1260.npy train_tokenizer_sample_2397.npy train_tokenizer_sample_3532.npy train_tokenizer_sample_4669.npy train_tokenizer_sample_5804.npy\r\ntrain_tokenizer_sample_1261.npy train_tokenizer_sample_2398.npy train_tokenizer_sample_3533.npy train_tokenizer_sample_466.npy train_tokenizer_sample_5805.npy\r\ntrain_tokenizer_sample_1262.npy train_tokenizer_sample_2399.npy train_tokenizer_sample_3534.npy train_tokenizer_sample_4670.npy train_tokenizer_sample_5806.npy\r\ntrain_tokenizer_sample_1263.npy train_tokenizer_sample_239.npy train_tokenizer_sample_3535.npy train_tokenizer_sample_4671.npy train_tokenizer_sample_5807.npy\r\ntrain_tokenizer_sample_1264.npy train_tokenizer_sample_23.npy train_tokenizer_sample_3536.npy train_tokenizer_sample_4672.npy train_tokenizer_sample_5808.npy\r\ntrain_tokenizer_sample_1265.npy train_tokenizer_sample_2400.npy train_tokenizer_sample_3537.npy train_tokenizer_sample_4673.npy train_tokenizer_sample_5809.npy\r\ntrain_tokenizer_sample_1266.npy train_tokenizer_sample_2401.npy train_tokenizer_sample_3538.npy train_tokenizer_sample_4674.npy train_tokenizer_sample_580.npy\r\ntrain_tokenizer_sample_1267.npy train_tokenizer_sample_2402.npy train_tokenizer_sample_3539.npy train_tokenizer_sample_4675.npy train_tokenizer_sample_5810.npy\r\ntrain_tokenizer_sample_1268.npy train_tokenizer_sample_2403.npy train_tokenizer_sample_353.npy train_tokenizer_sample_4676.npy train_tokenizer_sample_5811.npy\r\ntrain_tokenizer_sample_1269.npy train_tokenizer_sample_2404.npy train_tokenizer_sample_3540.npy train_tokenizer_sample_4677.npy train_tokenizer_sample_5812.npy\r\ntrain_tokenizer_sample_126.npy train_tokenizer_sample_2405.npy train_tokenizer_sample_3541.npy train_tokenizer_sample_4678.npy train_tokenizer_sample_5813.npy\r\ntrain_tokenizer_sample_1270.npy train_tokenizer_sample_2406.npy train_tokenizer_sample_3542.npy train_tokenizer_sample_4679.npy train_tokenizer_sample_5814.npy\r\ntrain_tokenizer_sample_1271.npy train_tokenizer_sample_2407.npy train_tokenizer_sample_3543.npy train_tokenizer_sample_467.npy train_tokenizer_sample_5815.npy\r\ntrain_tokenizer_sample_1272.npy train_tokenizer_sample_2408.npy train_tokenizer_sample_3544.npy train_tokenizer_sample_4680.npy train_tokenizer_sample_5816.npy\r\ntrain_tokenizer_sample_1273.npy train_tokenizer_sample_2409.npy train_tokenizer_sample_3545.npy train_tokenizer_sample_4681.npy train_tokenizer_sample_5817.npy\r\ntrain_tokenizer_sample_1274.npy train_tokenizer_sample_240.npy train_tokenizer_sample_3546.npy train_tokenizer_sample_4682.npy train_tokenizer_sample_5818.npy\r\ntrain_tokenizer_sample_1275.npy train_tokenizer_sample_2410.npy train_tokenizer_sample_3547.npy train_tokenizer_sample_4683.npy train_tokenizer_sample_5819.npy\r\ntrain_tokenizer_sample_1276.npy train_tokenizer_sample_2411.npy train_tokenizer_sample_3548.npy train_tokenizer_sample_4684.npy train_tokenizer_sample_581.npy\r\ntrain_tokenizer_sample_1277.npy train_tokenizer_sample_2412.npy train_tokenizer_sample_3549.npy train_tokenizer_sample_4685.npy train_tokenizer_sample_5820.npy\r\ntrain_tokenizer_sample_1278.npy train_tokenizer_sample_2413.npy train_tokenizer_sample_354.npy train_tokenizer_sample_4686.npy train_tokenizer_sample_5821.npy\r\ntrain_tokenizer_sample_1279.npy train_tokenizer_sample_2414.npy train_tokenizer_sample_3550.npy train_tokenizer_sample_4687.npy train_tokenizer_sample_5822.npy\r\ntrain_tokenizer_sample_127.npy train_tokenizer_sample_2415.npy train_tokenizer_sample_3551.npy train_tokenizer_sample_4688.npy train_tokenizer_sample_5823.npy\r\ntrain_tokenizer_sample_1280.npy train_tokenizer_sample_2416.npy train_tokenizer_sample_3552.npy train_tokenizer_sample_4689.npy train_tokenizer_sample_5824.npy\r\ntrain_tokenizer_sample_1281.npy train_tokenizer_sample_2417.npy train_tokenizer_sample_3553.npy train_tokenizer_sample_468.npy train_tokenizer_sample_5825.npy\r\ntrain_tokenizer_sample_1282.npy train_tokenizer_sample_2418.npy train_tokenizer_sample_3554.npy train_tokenizer_sample_4690.npy train_tokenizer_sample_5826.npy\r\ntrain_tokenizer_sample_1283.npy train_tokenizer_sample_2419.npy train_tokenizer_sample_3555.npy train_tokenizer_sample_4691.npy train_tokenizer_sample_5827.npy\r\ntrain_tokenizer_sample_1284.npy train_tokenizer_sample_241.npy train_tokenizer_sample_3556.npy train_tokenizer_sample_4692.npy train_tokenizer_sample_5828.npy\r\ntrain_tokenizer_sample_1285.npy train_tokenizer_sample_2420.npy train_tokenizer_sample_3557.npy train_tokenizer_sample_4693.npy train_tokenizer_sample_5829.npy\r\ntrain_tokenizer_sample_1286.npy train_tokenizer_sample_2421.npy train_tokenizer_sample_3558.npy train_tokenizer_sample_4694.npy train_tokenizer_sample_582.npy\r\ntrain_tokenizer_sample_1287.npy train_tokenizer_sample_2422.npy train_tokenizer_sample_3559.npy train_tokenizer_sample_4695.npy train_tokenizer_sample_5830.npy\r\ntrain_tokenizer_sample_1288.npy train_tokenizer_sample_2423.npy train_tokenizer_sample_355.npy train_tokenizer_sample_4696.npy train_tokenizer_sample_5831.npy\r\ntrain_tokenizer_sample_1289.npy train_tokenizer_sample_2424.npy train_tokenizer_sample_3560.npy train_tokenizer_sample_4697.npy train_tokenizer_sample_5832.npy\r\ntrain_tokenizer_sample_128.npy train_tokenizer_sample_2425.npy train_tokenizer_sample_3561.npy train_tokenizer_sample_4698.npy train_tokenizer_sample_5833.npy\r\ntrain_tokenizer_sample_1290.npy train_tokenizer_sample_2426.npy train_tokenizer_sample_3562.npy train_tokenizer_sample_4699.npy train_tokenizer_sample_5834.npy\r\ntrain_tokenizer_sample_1291.npy train_tokenizer_sample_2427.npy train_tokenizer_sample_3563.npy train_tokenizer_sample_469.npy train_tokenizer_sample_5835.npy\r\ntrain_tokenizer_sample_1292.npy train_tokenizer_sample_2428.npy train_tokenizer_sample_3564.npy train_tokenizer_sample_46.npy train_tokenizer_sample_5836.npy\r\ntrain_tokenizer_sample_1293.npy train_tokenizer_sample_2429.npy train_tokenizer_sample_3565.npy train_tokenizer_sample_4700.npy train_tokenizer_sample_5837.npy\r\ntrain_tokenizer_sample_1294.npy train_tokenizer_sample_242.npy train_tokenizer_sample_3566.npy train_tokenizer_sample_4701.npy train_tokenizer_sample_5838.npy\r\ntrain_tokenizer_sample_1295.npy train_tokenizer_sample_2430.npy train_tokenizer_sample_3567.npy train_tokenizer_sample_4702.npy train_tokenizer_sample_5839.npy\r\ntrain_tokenizer_sample_1296.npy train_tokenizer_sample_2431.npy train_tokenizer_sample_3568.npy train_tokenizer_sample_4703.npy train_tokenizer_sample_583.npy\r\ntrain_tokenizer_sample_1297.npy train_tokenizer_sample_2432.npy train_tokenizer_sample_3569.npy train_tokenizer_sample_4704.npy train_tokenizer_sample_5840.npy\r\ntrain_tokenizer_sample_1298.npy train_tokenizer_sample_2433.npy train_tokenizer_sample_356.npy train_tokenizer_sample_4705.npy train_tokenizer_sample_5841.npy\r\ntrain_tokenizer_sample_1299.npy train_tokenizer_sample_2434.npy train_tokenizer_sample_3570.npy train_tokenizer_sample_4706.npy train_tokenizer_sample_5842.npy\r\ntrain_tokenizer_sample_129.npy train_tokenizer_sample_2435.npy train_tokenizer_sample_3571.npy train_tokenizer_sample_4707.npy train_tokenizer_sample_5843.npy\r\ntrain_tokenizer_sample_12.npy train_tokenizer_sample_2436.npy train_tokenizer_sample_3572.npy train_tokenizer_sample_4708.npy train_tokenizer_sample_5844.npy\r\ntrain_tokenizer_sample_1300.npy train_tokenizer_sample_2437.npy train_tokenizer_sample_3573.npy train_tokenizer_sample_4709.npy train_tokenizer_sample_5845.npy\r\ntrain_tokenizer_sample_1301.npy train_tokenizer_sample_2438.npy train_tokenizer_sample_3574.npy train_tokenizer_sample_470.npy train_tokenizer_sample_5846.npy\r\ntrain_tokenizer_sample_1302.npy train_tokenizer_sample_2439.npy train_tokenizer_sample_3575.npy train_tokenizer_sample_4710.npy train_tokenizer_sample_5847.npy\r\ntrain_tokenizer_sample_1303.npy train_tokenizer_sample_243.npy train_tokenizer_sample_3576.npy train_tokenizer_sample_4711.npy train_tokenizer_sample_5848.npy\r\ntrain_tokenizer_sample_1304.npy train_tokenizer_sample_2440.npy train_tokenizer_sample_3577.npy train_tokenizer_sample_4712.npy train_tokenizer_sample_5849.npy\r\ntrain_tokenizer_sample_1305.npy train_tokenizer_sample_2441.npy train_tokenizer_sample_3578.npy train_tokenizer_sample_4713.npy train_tokenizer_sample_584.npy\r\ntrain_tokenizer_sample_1306.npy train_tokenizer_sample_2442.npy train_tokenizer_sample_3579.npy train_tokenizer_sample_4714.npy train_tokenizer_sample_5850.npy\r\ntrain_tokenizer_sample_1307.npy train_tokenizer_sample_2443.npy train_tokenizer_sample_357.npy train_tokenizer_sample_4715.npy train_tokenizer_sample_5851.npy\r\ntrain_tokenizer_sample_1308.npy train_tokenizer_sample_2444.npy train_tokenizer_sample_3580.npy train_tokenizer_sample_4716.npy train_tokenizer_sample_5852.npy\r\ntrain_tokenizer_sample_1309.npy train_tokenizer_sample_2445.npy train_tokenizer_sample_3581.npy train_tokenizer_sample_4717.npy train_tokenizer_sample_5853.npy\r\ntrain_tokenizer_sample_130.npy train_tokenizer_sample_2446.npy train_tokenizer_sample_3582.npy train_tokenizer_sample_4718.npy train_tokenizer_sample_5854.npy\r\ntrain_tokenizer_sample_1310.npy train_tokenizer_sample_2447.npy train_tokenizer_sample_3583.npy train_tokenizer_sample_4719.npy train_tokenizer_sample_5855.npy\r\ntrain_tokenizer_sample_1311.npy train_tokenizer_sample_2448.npy train_tokenizer_sample_3584.npy train_tokenizer_sample_471.npy train_tokenizer_sample_5856.npy\r\ntrain_tokenizer_sample_1312.npy train_tokenizer_sample_2449.npy train_tokenizer_sample_3585.npy train_tokenizer_sample_4720.npy train_tokenizer_sample_5857.npy\r\ntrain_tokenizer_sample_1313.npy train_tokenizer_sample_244.npy train_tokenizer_sample_3586.npy train_tokenizer_sample_4721.npy train_tokenizer_sample_5858.npy\r\ntrain_tokenizer_sample_1314.npy train_tokenizer_sample_2450.npy train_tokenizer_sample_3587.npy train_tokenizer_sample_4722.npy train_tokenizer_sample_5859.npy\r\ntrain_tokenizer_sample_1315.npy train_tokenizer_sample_2451.npy train_tokenizer_sample_3588.npy train_tokenizer_sample_4723.npy train_tokenizer_sample_585.npy\r\ntrain_tokenizer_sample_1316.npy train_tokenizer_sample_2452.npy train_tokenizer_sample_3589.npy train_tokenizer_sample_4724.npy train_tokenizer_sample_5860.npy\r\ntrain_tokenizer_sample_1317.npy train_tokenizer_sample_2453.npy train_tokenizer_sample_358.npy train_tokenizer_sample_4725.npy train_tokenizer_sample_5861.npy\r\ntrain_tokenizer_sample_1318.npy train_tokenizer_sample_2454.npy train_tokenizer_sample_3590.npy train_tokenizer_sample_4726.npy train_tokenizer_sample_5862.npy\r\ntrain_tokenizer_sample_1319.npy train_tokenizer_sample_2455.npy train_tokenizer_sample_3591.npy train_tokenizer_sample_4727.npy train_tokenizer_sample_5863.npy\r\ntrain_tokenizer_sample_131.npy train_tokenizer_sample_2456.npy train_tokenizer_sample_3592.npy train_tokenizer_sample_4728.npy train_tokenizer_sample_5864.npy\r\ntrain_tokenizer_sample_1320.npy train_tokenizer_sample_2457.npy train_tokenizer_sample_3593.npy train_tokenizer_sample_4729.npy train_tokenizer_sample_5865.npy\r\ntrain_tokenizer_sample_1321.npy train_tokenizer_sample_2458.npy train_tokenizer_sample_3594.npy train_tokenizer_sample_472.npy train_tokenizer_sample_5866.npy\r\ntrain_tokenizer_sample_1322.npy train_tokenizer_sample_2459.npy train_tokenizer_sample_3595.npy train_tokenizer_sample_4730.npy train_tokenizer_sample_5867.npy\r\ntrain_tokenizer_sample_1323.npy train_tokenizer_sample_245.npy train_tokenizer_sample_3596.npy train_tokenizer_sample_4731.npy train_tokenizer_sample_5868.npy\r\ntrain_tokenizer_sample_1324.npy train_tokenizer_sample_2460.npy train_tokenizer_sample_3597.npy train_tokenizer_sample_4732.npy train_tokenizer_sample_5869.npy\r\ntrain_tokenizer_sample_1325.npy train_tokenizer_sample_2461.npy train_tokenizer_sample_3598.npy train_tokenizer_sample_4733.npy train_tokenizer_sample_586.npy\r\ntrain_tokenizer_sample_1326.npy train_tokenizer_sample_2462.npy train_tokenizer_sample_3599.npy train_tokenizer_sample_4734.npy train_tokenizer_sample_5870.npy\r\ntrain_tokenizer_sample_1327.npy train_tokenizer_sample_2463.npy train_tokenizer_sample_359.npy train_tokenizer_sample_4735.npy train_tokenizer_sample_5871.npy\r\ntrain_tokenizer_sample_1328.npy train_tokenizer_sample_2464.npy train_tokenizer_sample_35.npy train_tokenizer_sample_4736.npy train_tokenizer_sample_5872.npy\r\ntrain_tokenizer_sample_1329.npy train_tokenizer_sample_2465.npy train_tokenizer_sample_3600.npy train_tokenizer_sample_4737.npy train_tokenizer_sample_5873.npy\r\ntrain_tokenizer_sample_132.npy train_tokenizer_sample_2466.npy train_tokenizer_sample_3601.npy train_tokenizer_sample_4738.npy train_tokenizer_sample_5874.npy\r\ntrain_tokenizer_sample_1330.npy train_tokenizer_sample_2467.npy train_tokenizer_sample_3602.npy train_tokenizer_sample_4739.npy train_tokenizer_sample_5875.npy\r\ntrain_tokenizer_sample_1331.npy train_tokenizer_sample_2468.npy train_tokenizer_sample_3603.npy train_tokenizer_sample_473.npy train_tokenizer_sample_5876.npy\r\ntrain_tokenizer_sample_1332.npy train_tokenizer_sample_2469.npy train_tokenizer_sample_3604.npy train_tokenizer_sample_4740.npy train_tokenizer_sample_5877.npy\r\ntrain_tokenizer_sample_1333.npy train_tokenizer_sample_246.npy train_tokenizer_sample_3605.npy train_tokenizer_sample_4741.npy train_tokenizer_sample_5878.npy\r\ntrain_tokenizer_sample_1334.npy train_tokenizer_sample_2470.npy train_tokenizer_sample_3606.npy train_tokenizer_sample_4742.npy train_tokenizer_sample_5879.npy\r\ntrain_tokenizer_sample_1335.npy train_tokenizer_sample_2471.npy train_tokenizer_sample_3607.npy train_tokenizer_sample_4743.npy train_tokenizer_sample_587.npy\r\ntrain_tokenizer_sample_1336.npy train_tokenizer_sample_2472.npy train_tokenizer_sample_3608.npy train_tokenizer_sample_4744.npy train_tokenizer_sample_5880.npy\r\ntrain_tokenizer_sample_1337.npy train_tokenizer_sample_2473.npy train_tokenizer_sample_3609.npy train_tokenizer_sample_4745.npy train_tokenizer_sample_5881.npy\r\ntrain_tokenizer_sample_1338.npy train_tokenizer_sample_2474.npy train_tokenizer_sample_360.npy train_tokenizer_sample_4746.npy train_tokenizer_sample_5882.npy\r\ntrain_tokenizer_sample_1339.npy train_tokenizer_sample_2475.npy train_tokenizer_sample_3610.npy train_tokenizer_sample_4747.npy train_tokenizer_sample_5883.npy\r\ntrain_tokenizer_sample_133.npy train_tokenizer_sample_2476.npy train_tokenizer_sample_3611.npy train_tokenizer_sample_4748.npy train_tokenizer_sample_5884.npy\r\ntrain_tokenizer_sample_1340.npy train_tokenizer_sample_2477.npy train_tokenizer_sample_3612.npy train_tokenizer_sample_4749.npy train_tokenizer_sample_5885.npy\r\ntrain_tokenizer_sample_1341.npy train_tokenizer_sample_2478.npy train_tokenizer_sample_3613.npy train_tokenizer_sample_474.npy train_tokenizer_sample_5886.npy\r\ntrain_tokenizer_sample_1342.npy train_tokenizer_sample_2479.npy train_tokenizer_sample_3614.npy train_tokenizer_sample_4750.npy train_tokenizer_sample_5887.npy\r\ntrain_tokenizer_sample_1343.npy train_tokenizer_sample_247.npy train_tokenizer_sample_3615.npy train_tokenizer_sample_4751.npy train_tokenizer_sample_5888.npy\r\ntrain_tokenizer_sample_1344.npy train_tokenizer_sample_2480.npy train_tokenizer_sample_3616.npy train_tokenizer_sample_4752.npy train_tokenizer_sample_5889.npy\r\ntrain_tokenizer_sample_1345.npy train_tokenizer_sample_2481.npy train_tokenizer_sample_3617.npy train_tokenizer_sample_4753.npy train_tokenizer_sample_588.npy\r\ntrain_tokenizer_sample_1346.npy train_tokenizer_sample_2482.npy train_tokenizer_sample_3618.npy train_tokenizer_sample_4754.npy train_tokenizer_sample_5890.npy\r\ntrain_tokenizer_sample_1347.npy train_tokenizer_sample_2483.npy train_tokenizer_sample_3619.npy train_tokenizer_sample_4755.npy train_tokenizer_sample_5891.npy\r\ntrain_tokenizer_sample_1348.npy train_tokenizer_sample_2484.npy train_tokenizer_sample_361.npy train_tokenizer_sample_4756.npy train_tokenizer_sample_5892.npy\r\ntrain_tokenizer_sample_1349.npy train_tokenizer_sample_2485.npy train_tokenizer_sample_3620.npy train_tokenizer_sample_4757.npy train_tokenizer_sample_5893.npy\r\ntrain_tokenizer_sample_134.npy train_tokenizer_sample_2486.npy train_tokenizer_sample_3621.npy train_tokenizer_sample_4758.npy train_tokenizer_sample_5894.npy\r\ntrain_tokenizer_sample_1350.npy train_tokenizer_sample_2487.npy train_tokenizer_sample_3622.npy train_tokenizer_sample_4759.npy train_tokenizer_sample_5895.npy\r\ntrain_tokenizer_sample_1351.npy train_tokenizer_sample_2488.npy train_tokenizer_sample_3623.npy train_tokenizer_sample_475.npy train_tokenizer_sample_5896.npy\r\ntrain_tokenizer_sample_1352.npy train_tokenizer_sample_2489.npy train_tokenizer_sample_3624.npy train_tokenizer_sample_4760.npy train_tokenizer_sample_5897.npy\r\ntrain_tokenizer_sample_1353.npy train_tokenizer_sample_248.npy train_tokenizer_sample_3625.npy train_tokenizer_sample_4761.npy train_tokenizer_sample_5898.npy\r\ntrain_tokenizer_sample_1354.npy train_tokenizer_sample_2490.npy train_tokenizer_sample_3626.npy train_tokenizer_sample_4762.npy train_tokenizer_sample_5899.npy\r\ntrain_tokenizer_sample_1355.npy train_tokenizer_sample_2491.npy train_tokenizer_sample_3627.npy train_tokenizer_sample_4763.npy train_tokenizer_sample_589.npy\r\ntrain_tokenizer_sample_1356.npy train_tokenizer_sample_2492.npy train_tokenizer_sample_3628.npy train_tokenizer_sample_4764.npy train_tokenizer_sample_58.npy\r\ntrain_tokenizer_sample_1357.npy train_tokenizer_sample_2493.npy train_tokenizer_sample_3629.npy train_tokenizer_sample_4765.npy train_tokenizer_sample_5900.npy\r\ntrain_tokenizer_sample_1358.npy train_tokenizer_sample_2494.npy train_tokenizer_sample_362.npy train_tokenizer_sample_4766.npy train_tokenizer_sample_5901.npy\r\ntrain_tokenizer_sample_1359.npy train_tokenizer_sample_2495.npy train_tokenizer_sample_3630.npy train_tokenizer_sample_4767.npy train_tokenizer_sample_5902.npy\r\ntrain_tokenizer_sample_135.npy train_tokenizer_sample_2496.npy train_tokenizer_sample_3631.npy train_tokenizer_sample_4768.npy train_tokenizer_sample_5903.npy\r\ntrain_tokenizer_sample_1360.npy train_tokenizer_sample_2497.npy train_tokenizer_sample_3632.npy train_tokenizer_sample_4769.npy train_tokenizer_sample_5904.npy\r\ntrain_tokenizer_sample_1361.npy train_tokenizer_sample_2498.npy train_tokenizer_sample_3633.npy train_tokenizer_sample_476.npy train_tokenizer_sample_5905.npy\r\ntrain_tokenizer_sample_1362.npy train_tokenizer_sample_2499.npy train_tokenizer_sample_3634.npy train_tokenizer_sample_4770.npy train_tokenizer_sample_5906.npy\r\ntrain_tokenizer_sample_1363.npy train_tokenizer_sample_249.npy train_tokenizer_sample_3635.npy train_tokenizer_sample_4771.npy train_tokenizer_sample_5907.npy\r\ntrain_tokenizer_sample_1364.npy train_tokenizer_sample_24.npy train_tokenizer_sample_3636.npy train_tokenizer_sample_4772.npy train_tokenizer_sample_5908.npy\r\ntrain_tokenizer_sample_1365.npy train_tokenizer_sample_2500.npy train_tokenizer_sample_3637.npy train_tokenizer_sample_4773.npy train_tokenizer_sample_5909.npy\r\ntrain_tokenizer_sample_1366.npy train_tokenizer_sample_2501.npy train_tokenizer_sample_3638.npy train_tokenizer_sample_4774.npy train_tokenizer_sample_590.npy\r\ntrain_tokenizer_sample_1367.npy train_tokenizer_sample_2502.npy train_tokenizer_sample_3639.npy train_tokenizer_sample_4775.npy train_tokenizer_sample_5910.npy\r\ntrain_tokenizer_sample_1368.npy train_tokenizer_sample_2503.npy train_tokenizer_sample_363.npy train_tokenizer_sample_4776.npy train_tokenizer_sample_5911.npy\r\ntrain_tokenizer_sample_1369.npy train_tokenizer_sample_2504.npy train_tokenizer_sample_3640.npy train_tokenizer_sample_4777.npy train_tokenizer_sample_5912.npy\r\ntrain_tokenizer_sample_136.npy train_tokenizer_sample_2505.npy train_tokenizer_sample_3641.npy train_tokenizer_sample_4778.npy train_tokenizer_sample_5913.npy\r\ntrain_tokenizer_sample_1370.npy train_tokenizer_sample_2506.npy train_tokenizer_sample_3642.npy train_tokenizer_sample_4779.npy train_tokenizer_sample_5914.npy\r\ntrain_tokenizer_sample_1371.npy train_tokenizer_sample_2507.npy train_tokenizer_sample_3643.npy train_tokenizer_sample_477.npy train_tokenizer_sample_5915.npy\r\ntrain_tokenizer_sample_1372.npy train_tokenizer_sample_2508.npy train_tokenizer_sample_3644.npy train_tokenizer_sample_4780.npy train_tokenizer_sample_5916.npy\r\ntrain_tokenizer_sample_1373.npy train_tokenizer_sample_2509.npy train_tokenizer_sample_3645.npy train_tokenizer_sample_4781.npy train_tokenizer_sample_5917.npy\r\ntrain_tokenizer_sample_1374.npy train_tokenizer_sample_250.npy train_tokenizer_sample_3646.npy train_tokenizer_sample_4782.npy train_tokenizer_sample_5918.npy\r\ntrain_tokenizer_sample_1375.npy train_tokenizer_sample_2510.npy train_tokenizer_sample_3647.npy train_tokenizer_sample_4783.npy train_tokenizer_sample_5919.npy\r\ntrain_tokenizer_sample_1376.npy train_tokenizer_sample_2511.npy train_tokenizer_sample_3648.npy train_tokenizer_sample_4784.npy train_tokenizer_sample_591.npy\r\ntrain_tokenizer_sample_1377.npy train_tokenizer_sample_2512.npy train_tokenizer_sample_3649.npy train_tokenizer_sample_4785.npy train_tokenizer_sample_5920.npy\r\ntrain_tokenizer_sample_1378.npy train_tokenizer_sample_2513.npy train_tokenizer_sample_364.npy train_tokenizer_sample_4786.npy train_tokenizer_sample_5921.npy\r\ntrain_tokenizer_sample_1379.npy train_tokenizer_sample_2514.npy train_tokenizer_sample_3650.npy train_tokenizer_sample_4787.npy train_tokenizer_sample_5922.npy\r\ntrain_tokenizer_sample_137.npy train_tokenizer_sample_2515.npy train_tokenizer_sample_3651.npy train_tokenizer_sample_4788.npy train_tokenizer_sample_5923.npy\r\ntrain_tokenizer_sample_1380.npy train_tokenizer_sample_2516.npy train_tokenizer_sample_3652.npy train_tokenizer_sample_4789.npy train_tokenizer_sample_5924.npy\r\ntrain_tokenizer_sample_1381.npy train_tokenizer_sample_2517.npy train_tokenizer_sample_3653.npy train_tokenizer_sample_478.npy train_tokenizer_sample_5925.npy\r\ntrain_tokenizer_sample_1382.npy train_tokenizer_sample_2518.npy train_tokenizer_sample_3654.npy train_tokenizer_sample_4790.npy train_tokenizer_sample_5926.npy\r\ntrain_tokenizer_sample_1383.npy train_tokenizer_sample_2519.npy train_tokenizer_sample_3655.npy train_tokenizer_sample_4791.npy train_tokenizer_sample_5927.npy\r\ntrain_tokenizer_sample_1384.npy train_tokenizer_sample_251.npy train_tokenizer_sample_3656.npy train_tokenizer_sample_4792.npy train_tokenizer_sample_5928.npy\r\ntrain_tokenizer_sample_1385.npy train_tokenizer_sample_2520.npy train_tokenizer_sample_3657.npy train_tokenizer_sample_4793.npy train_tokenizer_sample_5929.npy\r\ntrain_tokenizer_sample_1386.npy train_tokenizer_sample_2521.npy train_tokenizer_sample_3658.npy train_tokenizer_sample_4794.npy train_tokenizer_sample_592.npy\r\ntrain_tokenizer_sample_1387.npy train_tokenizer_sample_2522.npy train_tokenizer_sample_3659.npy train_tokenizer_sample_4795.npy train_tokenizer_sample_5930.npy\r\ntrain_tokenizer_sample_1388.npy train_tokenizer_sample_2523.npy train_tokenizer_sample_365.npy train_tokenizer_sample_4796.npy train_tokenizer_sample_5931.npy\r\ntrain_tokenizer_sample_1389.npy train_tokenizer_sample_2524.npy train_tokenizer_sample_3660.npy train_tokenizer_sample_4797.npy train_tokenizer_sample_5932.npy\r\ntrain_tokenizer_sample_138.npy train_tokenizer_sample_2525.npy train_tokenizer_sample_3661.npy train_tokenizer_sample_4798.npy train_tokenizer_sample_5933.npy\r\ntrain_tokenizer_sample_1390.npy train_tokenizer_sample_2526.npy train_tokenizer_sample_3662.npy train_tokenizer_sample_4799.npy train_tokenizer_sample_5934.npy\r\ntrain_tokenizer_sample_1391.npy train_tokenizer_sample_2527.npy train_tokenizer_sample_3663.npy train_tokenizer_sample_479.npy train_tokenizer_sample_5935.npy\r\ntrain_tokenizer_sample_1392.npy train_tokenizer_sample_2528.npy train_tokenizer_sample_3664.npy train_tokenizer_sample_47.npy train_tokenizer_sample_5936.npy\r\ntrain_tokenizer_sample_1393.npy train_tokenizer_sample_2529.npy train_tokenizer_sample_3665.npy train_tokenizer_sample_4800.npy train_tokenizer_sample_5937.npy\r\ntrain_tokenizer_sample_1394.npy train_tokenizer_sample_252.npy train_tokenizer_sample_3666.npy train_tokenizer_sample_4801.npy train_tokenizer_sample_5938.npy\r\ntrain_tokenizer_sample_1395.npy train_tokenizer_sample_2530.npy train_tokenizer_sample_3667.npy train_tokenizer_sample_4802.npy train_tokenizer_sample_5939.npy\r\ntrain_tokenizer_sample_1396.npy train_tokenizer_sample_2531.npy train_tokenizer_sample_3668.npy train_tokenizer_sample_4803.npy train_tokenizer_sample_593.npy\r\ntrain_tokenizer_sample_1397.npy train_tokenizer_sample_2532.npy train_tokenizer_sample_3669.npy train_tokenizer_sample_4804.npy train_tokenizer_sample_5940.npy\r\ntrain_tokenizer_sample_1398.npy train_tokenizer_sample_2533.npy train_tokenizer_sample_366.npy train_tokenizer_sample_4805.npy train_tokenizer_sample_5941.npy\r\ntrain_tokenizer_sample_1399.npy train_tokenizer_sample_2534.npy train_tokenizer_sample_3670.npy train_tokenizer_sample_4806.npy train_tokenizer_sample_5942.npy\r\ntrain_tokenizer_sample_139.npy train_tokenizer_sample_2535.npy train_tokenizer_sample_3671.npy train_tokenizer_sample_4807.npy train_tokenizer_sample_5943.npy\r\ntrain_tokenizer_sample_13.npy train_tokenizer_sample_2536.npy train_tokenizer_sample_3672.npy train_tokenizer_sample_4808.npy train_tokenizer_sample_5944.npy\r\ntrain_tokenizer_sample_1400.npy train_tokenizer_sample_2537.npy train_tokenizer_sample_3673.npy train_tokenizer_sample_4809.npy train_tokenizer_sample_5945.npy\r\ntrain_tokenizer_sample_1401.npy train_tokenizer_sample_2538.npy train_tokenizer_sample_3674.npy train_tokenizer_sample_480.npy train_tokenizer_sample_5946.npy\r\ntrain_tokenizer_sample_1402.npy train_tokenizer_sample_2539.npy train_tokenizer_sample_3675.npy train_tokenizer_sample_4810.npy train_tokenizer_sample_5947.npy\r\ntrain_tokenizer_sample_1403.npy train_tokenizer_sample_253.npy train_tokenizer_sample_3676.npy train_tokenizer_sample_4811.npy train_tokenizer_sample_5948.npy\r\ntrain_tokenizer_sample_1404.npy train_tokenizer_sample_2540.npy train_tokenizer_sample_3677.npy train_tokenizer_sample_4812.npy train_tokenizer_sample_5949.npy\r\ntrain_tokenizer_sample_1405.npy train_tokenizer_sample_2541.npy train_tokenizer_sample_3678.npy train_tokenizer_sample_4813.npy train_tokenizer_sample_594.npy\r\ntrain_tokenizer_sample_1406.npy train_tokenizer_sample_2542.npy train_tokenizer_sample_3679.npy train_tokenizer_sample_4814.npy train_tokenizer_sample_5950.npy\r\ntrain_tokenizer_sample_1407.npy train_tokenizer_sample_2543.npy train_tokenizer_sample_367.npy train_tokenizer_sample_4815.npy train_tokenizer_sample_5951.npy\r\ntrain_tokenizer_sample_1408.npy train_tokenizer_sample_2544.npy train_tokenizer_sample_3680.npy train_tokenizer_sample_4816.npy train_tokenizer_sample_5952.npy\r\ntrain_tokenizer_sample_1409.npy train_tokenizer_sample_2545.npy train_tokenizer_sample_3681.npy train_tokenizer_sample_4817.npy train_tokenizer_sample_5953.npy\r\ntrain_tokenizer_sample_140.npy train_tokenizer_sample_2546.npy train_tokenizer_sample_3682.npy train_tokenizer_sample_4818.npy train_tokenizer_sample_5954.npy\r\ntrain_tokenizer_sample_1410.npy train_tokenizer_sample_2547.npy train_tokenizer_sample_3683.npy train_tokenizer_sample_4819.npy train_tokenizer_sample_5955.npy\r\ntrain_tokenizer_sample_1411.npy train_tokenizer_sample_2548.npy train_tokenizer_sample_3684.npy train_tokenizer_sample_481.npy train_tokenizer_sample_5956.npy\r\ntrain_tokenizer_sample_1412.npy train_tokenizer_sample_2549.npy train_tokenizer_sample_3685.npy train_tokenizer_sample_4820.npy train_tokenizer_sample_5957.npy\r\ntrain_tokenizer_sample_1413.npy train_tokenizer_sample_254.npy train_tokenizer_sample_3686.npy train_tokenizer_sample_4821.npy train_tokenizer_sample_5958.npy\r\ntrain_tokenizer_sample_1414.npy train_tokenizer_sample_2550.npy train_tokenizer_sample_3687.npy train_tokenizer_sample_4822.npy train_tokenizer_sample_5959.npy\r\ntrain_tokenizer_sample_1415.npy train_tokenizer_sample_2551.npy train_tokenizer_sample_3688.npy train_tokenizer_sample_4823.npy train_tokenizer_sample_595.npy\r\ntrain_tokenizer_sample_1416.npy train_tokenizer_sample_2552.npy train_tokenizer_sample_3689.npy train_tokenizer_sample_4824.npy train_tokenizer_sample_5960.npy\r\ntrain_tokenizer_sample_1417.npy train_tokenizer_sample_2553.npy train_tokenizer_sample_368.npy train_tokenizer_sample_4825.npy train_tokenizer_sample_5961.npy\r\ntrain_tokenizer_sample_1418.npy train_tokenizer_sample_2554.npy train_tokenizer_sample_3690.npy train_tokenizer_sample_4826.npy train_tokenizer_sample_5962.npy\r\ntrain_tokenizer_sample_1419.npy train_tokenizer_sample_2555.npy train_tokenizer_sample_3691.npy train_tokenizer_sample_4827.npy train_tokenizer_sample_5963.npy\r\ntrain_tokenizer_sample_141.npy train_tokenizer_sample_2556.npy train_tokenizer_sample_3692.npy train_tokenizer_sample_4828.npy train_tokenizer_sample_5964.npy\r\ntrain_tokenizer_sample_1420.npy train_tokenizer_sample_2557.npy train_tokenizer_sample_3693.npy train_tokenizer_sample_4829.npy train_tokenizer_sample_5965.npy\r\ntrain_tokenizer_sample_1421.npy train_tokenizer_sample_2558.npy train_tokenizer_sample_3694.npy train_tokenizer_sample_482.npy train_tokenizer_sample_5966.npy\r\ntrain_tokenizer_sample_1422.npy train_tokenizer_sample_2559.npy train_tokenizer_sample_3695.npy train_tokenizer_sample_4830.npy train_tokenizer_sample_5967.npy\r\ntrain_tokenizer_sample_1423.npy train_tokenizer_sample_255.npy train_tokenizer_sample_3696.npy train_tokenizer_sample_4831.npy train_tokenizer_sample_5968.npy\r\ntrain_tokenizer_sample_1424.npy train_tokenizer_sample_2560.npy train_tokenizer_sample_3697.npy train_tokenizer_sample_4832.npy train_tokenizer_sample_5969.npy\r\ntrain_tokenizer_sample_1425.npy train_tokenizer_sample_2561.npy train_tokenizer_sample_3698.npy train_tokenizer_sample_4833.npy train_tokenizer_sample_596.npy\r\ntrain_tokenizer_sample_1426.npy train_tokenizer_sample_2562.npy train_tokenizer_sample_3699.npy train_tokenizer_sample_4834.npy train_tokenizer_sample_5970.npy\r\ntrain_tokenizer_sample_1427.npy train_tokenizer_sample_2563.npy train_tokenizer_sample_369.npy train_tokenizer_sample_4835.npy train_tokenizer_sample_5971.npy\r\ntrain_tokenizer_sample_1428.npy train_tokenizer_sample_2564.npy train_tokenizer_sample_36.npy train_tokenizer_sample_4836.npy train_tokenizer_sample_5972.npy\r\ntrain_tokenizer_sample_1429.npy train_tokenizer_sample_2565.npy train_tokenizer_sample_3700.npy train_tokenizer_sample_4837.npy train_tokenizer_sample_5973.npy\r\ntrain_tokenizer_sample_142.npy train_tokenizer_sample_2566.npy train_tokenizer_sample_3701.npy train_tokenizer_sample_4838.npy train_tokenizer_sample_5974.npy\r\ntrain_tokenizer_sample_1430.npy train_tokenizer_sample_2567.npy train_tokenizer_sample_3702.npy train_tokenizer_sample_4839.npy train_tokenizer_sample_5975.npy\r\ntrain_tokenizer_sample_1431.npy train_tokenizer_sample_2568.npy train_tokenizer_sample_3703.npy train_tokenizer_sample_483.npy train_tokenizer_sample_5976.npy\r\ntrain_tokenizer_sample_1432.npy train_tokenizer_sample_2569.npy train_tokenizer_sample_3704.npy train_tokenizer_sample_4840.npy train_tokenizer_sample_5977.npy\r\ntrain_tokenizer_sample_1433.npy train_tokenizer_sample_256.npy train_tokenizer_sample_3705.npy train_tokenizer_sample_4841.npy train_tokenizer_sample_5978.npy\r\ntrain_tokenizer_sample_1434.npy train_tokenizer_sample_2570.npy train_tokenizer_sample_3706.npy train_tokenizer_sample_4842.npy train_tokenizer_sample_5979.npy\r\ntrain_tokenizer_sample_1435.npy train_tokenizer_sample_2571.npy train_tokenizer_sample_3707.npy train_tokenizer_sample_4843.npy train_tokenizer_sample_597.npy\r\ntrain_tokenizer_sample_1436.npy train_tokenizer_sample_2572.npy train_tokenizer_sample_3708.npy train_tokenizer_sample_4844.npy train_tokenizer_sample_5980.npy\r\ntrain_tokenizer_sample_1437.npy train_tokenizer_sample_2573.npy train_tokenizer_sample_3709.npy train_tokenizer_sample_4845.npy train_tokenizer_sample_5981.npy\r\ntrain_tokenizer_sample_1438.npy train_tokenizer_sample_2574.npy train_tokenizer_sample_370.npy train_tokenizer_sample_4846.npy train_tokenizer_sample_5982.npy\r\ntrain_tokenizer_sample_1439.npy train_tokenizer_sample_2575.npy train_tokenizer_sample_3710.npy train_tokenizer_sample_4847.npy train_tokenizer_sample_5983.npy\r\ntrain_tokenizer_sample_143.npy train_tokenizer_sample_2576.npy train_tokenizer_sample_3711.npy train_tokenizer_sample_4848.npy train_tokenizer_sample_5984.npy\r\ntrain_tokenizer_sample_1440.npy train_tokenizer_sample_2577.npy train_tokenizer_sample_3712.npy train_tokenizer_sample_4849.npy train_tokenizer_sample_5985.npy\r\ntrain_tokenizer_sample_1441.npy train_tokenizer_sample_2578.npy train_tokenizer_sample_3713.npy train_tokenizer_sample_484.npy train_tokenizer_sample_5986.npy\r\ntrain_tokenizer_sample_1442.npy train_tokenizer_sample_2579.npy train_tokenizer_sample_3714.npy train_tokenizer_sample_4850.npy train_tokenizer_sample_5987.npy\r\ntrain_tokenizer_sample_1443.npy train_tokenizer_sample_257.npy train_tokenizer_sample_3715.npy train_tokenizer_sample_4851.npy train_tokenizer_sample_5988.npy\r\ntrain_tokenizer_sample_1444.npy train_tokenizer_sample_2580.npy train_tokenizer_sample_3716.npy train_tokenizer_sample_4852.npy train_tokenizer_sample_5989.npy\r\ntrain_tokenizer_sample_1445.npy train_tokenizer_sample_2581.npy train_tokenizer_sample_3717.npy train_tokenizer_sample_4853.npy train_tokenizer_sample_598.npy\r\ntrain_tokenizer_sample_1446.npy train_tokenizer_sample_2582.npy train_tokenizer_sample_3718.npy train_tokenizer_sample_4854.npy train_tokenizer_sample_5990.npy\r\ntrain_tokenizer_sample_1447.npy train_tokenizer_sample_2583.npy train_tokenizer_sample_3719.npy train_tokenizer_sample_4855.npy train_tokenizer_sample_5991.npy\r\ntrain_tokenizer_sample_1448.npy train_tokenizer_sample_2584.npy train_tokenizer_sample_371.npy train_tokenizer_sample_4856.npy train_tokenizer_sample_5992.npy\r\ntrain_tokenizer_sample_1449.npy train_tokenizer_sample_2585.npy train_tokenizer_sample_3720.npy train_tokenizer_sample_4857.npy train_tokenizer_sample_5993.npy\r\ntrain_tokenizer_sample_144.npy train_tokenizer_sample_2586.npy train_tokenizer_sample_3721.npy train_tokenizer_sample_4858.npy train_tokenizer_sample_5994.npy\r\ntrain_tokenizer_sample_1450.npy train_tokenizer_sample_2587.npy train_tokenizer_sample_3722.npy train_tokenizer_sample_4859.npy train_tokenizer_sample_5995.npy\r\ntrain_tokenizer_sample_1451.npy train_tokenizer_sample_2588.npy train_tokenizer_sample_3723.npy train_tokenizer_sample_485.npy train_tokenizer_sample_5996.npy\r\ntrain_tokenizer_sample_1452.npy train_tokenizer_sample_2589.npy train_tokenizer_sample_3724.npy train_tokenizer_sample_4860.npy train_tokenizer_sample_5997.npy\r\ntrain_tokenizer_sample_1453.npy train_tokenizer_sample_258.npy train_tokenizer_sample_3725.npy train_tokenizer_sample_4861.npy train_tokenizer_sample_5998.npy\r\ntrain_tokenizer_sample_1454.npy train_tokenizer_sample_2590.npy train_tokenizer_sample_3726.npy train_tokenizer_sample_4862.npy train_tokenizer_sample_5999.npy\r\ntrain_tokenizer_sample_1455.npy train_tokenizer_sample_2591.npy train_tokenizer_sample_3727.npy train_tokenizer_sample_4863.npy train_tokenizer_sample_599.npy\r\ntrain_tokenizer_sample_1456.npy train_tokenizer_sample_2592.npy train_tokenizer_sample_3728.npy train_tokenizer_sample_4864.npy train_tokenizer_sample_59.npy\r\ntrain_tokenizer_sample_1457.npy train_tokenizer_sample_2593.npy train_tokenizer_sample_3729.npy train_tokenizer_sample_4865.npy train_tokenizer_sample_5.npy\r\ntrain_tokenizer_sample_1458.npy train_tokenizer_sample_2594.npy train_tokenizer_sample_372.npy train_tokenizer_sample_4866.npy train_tokenizer_sample_6000.npy\r\ntrain_tokenizer_sample_1459.npy train_tokenizer_sample_2595.npy train_tokenizer_sample_3730.npy train_tokenizer_sample_4867.npy train_tokenizer_sample_6001.npy\r\ntrain_tokenizer_sample_145.npy train_tokenizer_sample_2596.npy train_tokenizer_sample_3731.npy train_tokenizer_sample_4868.npy train_tokenizer_sample_6002.npy\r\ntrain_tokenizer_sample_1460.npy train_tokenizer_sample_2597.npy train_tokenizer_sample_3732.npy train_tokenizer_sample_4869.npy train_tokenizer_sample_6003.npy\r\ntrain_tokenizer_sample_1461.npy train_tokenizer_sample_2598.npy train_tokenizer_sample_3733.npy train_tokenizer_sample_486.npy train_tokenizer_sample_6004.npy\r\ntrain_tokenizer_sample_1462.npy train_tokenizer_sample_2599.npy train_tokenizer_sample_3734.npy train_tokenizer_sample_4870.npy train_tokenizer_sample_6005.npy\r\ntrain_tokenizer_sample_1463.npy train_tokenizer_sample_259.npy train_tokenizer_sample_3735.npy train_tokenizer_sample_4871.npy train_tokenizer_sample_6006.npy\r\ntrain_tokenizer_sample_1464.npy train_tokenizer_sample_25.npy train_tokenizer_sample_3736.npy train_tokenizer_sample_4872.npy train_tokenizer_sample_6007.npy\r\ntrain_tokenizer_sample_1465.npy train_tokenizer_sample_2600.npy train_tokenizer_sample_3737.npy train_tokenizer_sample_4873.npy train_tokenizer_sample_6008.npy\r\ntrain_tokenizer_sample_1466.npy train_tokenizer_sample_2601.npy train_tokenizer_sample_3738.npy train_tokenizer_sample_4874.npy train_tokenizer_sample_6009.npy\r\ntrain_tokenizer_sample_1467.npy train_tokenizer_sample_2602.npy train_tokenizer_sample_3739.npy train_tokenizer_sample_4875.npy train_tokenizer_sample_600.npy\r\ntrain_tokenizer_sample_1468.npy train_tokenizer_sample_2603.npy train_tokenizer_sample_373.npy train_tokenizer_sample_4876.npy train_tokenizer_sample_6010.npy\r\ntrain_tokenizer_sample_1469.npy train_tokenizer_sample_2604.npy train_tokenizer_sample_3740.npy train_tokenizer_sample_4877.npy train_tokenizer_sample_6011.npy\r\ntrain_tokenizer_sample_146.npy train_tokenizer_sample_2605.npy train_tokenizer_sample_3741.npy train_tokenizer_sample_4878.npy train_tokenizer_sample_6012.npy\r\ntrain_tokenizer_sample_1470.npy train_tokenizer_sample_2606.npy train_tokenizer_sample_3742.npy train_tokenizer_sample_4879.npy train_tokenizer_sample_6013.npy\r\ntrain_tokenizer_sample_1471.npy train_tokenizer_sample_2607.npy train_tokenizer_sample_3743.npy train_tokenizer_sample_487.npy train_tokenizer_sample_6014.npy\r\ntrain_tokenizer_sample_1472.npy train_tokenizer_sample_2608.npy train_tokenizer_sample_3744.npy train_tokenizer_sample_4880.npy train_tokenizer_sample_6015.npy\r\ntrain_tokenizer_sample_1473.npy train_tokenizer_sample_2609.npy train_tokenizer_sample_3745.npy train_tokenizer_sample_4881.npy train_tokenizer_sample_6016.npy\r\ntrain_tokenizer_sample_1474.npy train_tokenizer_sample_260.npy train_tokenizer_sample_3746.npy train_tokenizer_sample_4882.npy train_tokenizer_sample_6017.npy\r\ntrain_tokenizer_sample_1475.npy train_tokenizer_sample_2610.npy train_tokenizer_sample_3747.npy train_tokenizer_sample_4883.npy train_tokenizer_sample_6018.npy\r\ntrain_tokenizer_sample_1476.npy train_tokenizer_sample_2611.npy train_tokenizer_sample_3748.npy train_tokenizer_sample_4884.npy train_tokenizer_sample_6019.npy\r\ntrain_tokenizer_sample_1477.npy train_tokenizer_sample_2612.npy train_tokenizer_sample_3749.npy train_tokenizer_sample_4885.npy train_tokenizer_sample_601.npy\r\ntrain_tokenizer_sample_1478.npy train_tokenizer_sample_2613.npy train_tokenizer_sample_374.npy train_tokenizer_sample_4886.npy train_tokenizer_sample_6020.npy\r\ntrain_tokenizer_sample_1479.npy train_tokenizer_sample_2614.npy train_tokenizer_sample_3750.npy train_tokenizer_sample_4887.npy train_tokenizer_sample_6021.npy\r\ntrain_tokenizer_sample_147.npy train_tokenizer_sample_2615.npy train_tokenizer_sample_3751.npy train_tokenizer_sample_4888.npy train_tokenizer_sample_6022.npy\r\ntrain_tokenizer_sample_1480.npy train_tokenizer_sample_2616.npy train_tokenizer_sample_3752.npy train_tokenizer_sample_4889.npy train_tokenizer_sample_6023.npy\r\ntrain_tokenizer_sample_1481.npy train_tokenizer_sample_2617.npy train_tokenizer_sample_3753.npy train_tokenizer_sample_488.npy train_tokenizer_sample_6024.npy\r\ntrain_tokenizer_sample_1482.npy train_tokenizer_sample_2618.npy train_tokenizer_sample_3754.npy train_tokenizer_sample_4890.npy train_tokenizer_sample_6025.npy\r\ntrain_tokenizer_sample_1483.npy train_tokenizer_sample_2619.npy train_tokenizer_sample_3755.npy train_tokenizer_sample_4891.npy train_tokenizer_sample_6026.npy\r\ntrain_tokenizer_sample_1484.npy train_tokenizer_sample_261.npy train_tokenizer_sample_3756.npy train_tokenizer_sample_4892.npy train_tokenizer_sample_6027.npy\r\ntrain_tokenizer_sample_1485.npy train_tokenizer_sample_2620.npy train_tokenizer_sample_3757.npy train_tokenizer_sample_4893.npy train_tokenizer_sample_6028.npy\r\ntrain_tokenizer_sample_1486.npy train_tokenizer_sample_2621.npy train_tokenizer_sample_3758.npy train_tokenizer_sample_4894.npy train_tokenizer_sample_6029.npy\r\ntrain_tokenizer_sample_1487.npy train_tokenizer_sample_2622.npy train_tokenizer_sample_3759.npy train_tokenizer_sample_4895.npy train_tokenizer_sample_602.npy\r\ntrain_tokenizer_sample_1488.npy train_tokenizer_sample_2623.npy train_tokenizer_sample_375.npy train_tokenizer_sample_4896.npy train_tokenizer_sample_6030.npy\r\ntrain_tokenizer_sample_1489.npy train_tokenizer_sample_2624.npy train_tokenizer_sample_3760.npy train_tokenizer_sample_4897.npy train_tokenizer_sample_6031.npy\r\ntrain_tokenizer_sample_148.npy train_tokenizer_sample_2625.npy train_tokenizer_sample_3761.npy train_tokenizer_sample_4898.npy train_tokenizer_sample_6032.npy\r\ntrain_tokenizer_sample_1490.npy train_tokenizer_sample_2626.npy train_tokenizer_sample_3762.npy train_tokenizer_sample_4899.npy train_tokenizer_sample_6033.npy\r\ntrain_tokenizer_sample_1491.npy train_tokenizer_sample_2627.npy train_tokenizer_sample_3763.npy train_tokenizer_sample_489.npy train_tokenizer_sample_6034.npy\r\ntrain_tokenizer_sample_1492.npy train_tokenizer_sample_2628.npy train_tokenizer_sample_3764.npy train_tokenizer_sample_48.npy train_tokenizer_sample_6035.npy\r\ntrain_tokenizer_sample_1493.npy train_tokenizer_sample_2629.npy train_tokenizer_sample_3765.npy train_tokenizer_sample_4900.npy train_tokenizer_sample_6036.npy\r\ntrain_tokenizer_sample_1494.npy train_tokenizer_sample_262.npy train_tokenizer_sample_3766.npy train_tokenizer_sample_4901.npy train_tokenizer_sample_6037.npy\r\ntrain_tokenizer_sample_1495.npy train_tokenizer_sample_2630.npy train_tokenizer_sample_3767.npy train_tokenizer_sample_4902.npy train_tokenizer_sample_6038.npy\r\ntrain_tokenizer_sample_1496.npy train_tokenizer_sample_2631.npy train_tokenizer_sample_3768.npy train_tokenizer_sample_4903.npy train_tokenizer_sample_6039.npy\r\ntrain_tokenizer_sample_1497.npy train_tokenizer_sample_2632.npy train_tokenizer_sample_3769.npy train_tokenizer_sample_4904.npy train_tokenizer_sample_603.npy\r\ntrain_tokenizer_sample_1498.npy train_tokenizer_sample_2633.npy train_tokenizer_sample_376.npy train_tokenizer_sample_4905.npy train_tokenizer_sample_6040.npy\r\ntrain_tokenizer_sample_1499.npy train_tokenizer_sample_2634.npy train_tokenizer_sample_3770.npy train_tokenizer_sample_4906.npy train_tokenizer_sample_6041.npy\r\ntrain_tokenizer_sample_149.npy train_tokenizer_sample_2635.npy train_tokenizer_sample_3771.npy train_tokenizer_sample_4907.npy train_tokenizer_sample_6042.npy\r\ntrain_tokenizer_sample_14.npy train_tokenizer_sample_2636.npy train_tokenizer_sample_3772.npy train_tokenizer_sample_4908.npy train_tokenizer_sample_6043.npy\r\ntrain_tokenizer_sample_1500.npy train_tokenizer_sample_2637.npy train_tokenizer_sample_3773.npy train_tokenizer_sample_4909.npy train_tokenizer_sample_6044.npy\r\ntrain_tokenizer_sample_1501.npy train_tokenizer_sample_2638.npy train_tokenizer_sample_3774.npy train_tokenizer_sample_490.npy train_tokenizer_sample_6045.npy\r\ntrain_tokenizer_sample_1502.npy train_tokenizer_sample_2639.npy train_tokenizer_sample_3775.npy train_tokenizer_sample_4910.npy train_tokenizer_sample_6046.npy\r\ntrain_tokenizer_sample_1503.npy train_tokenizer_sample_263.npy train_tokenizer_sample_3776.npy train_tokenizer_sample_4911.npy train_tokenizer_sample_6047.npy\r\ntrain_tokenizer_sample_1504.npy train_tokenizer_sample_2640.npy train_tokenizer_sample_3777.npy train_tokenizer_sample_4912.npy train_tokenizer_sample_6048.npy\r\ntrain_tokenizer_sample_1505.npy train_tokenizer_sample_2641.npy train_tokenizer_sample_3778.npy train_tokenizer_sample_4913.npy train_tokenizer_sample_6049.npy\r\ntrain_tokenizer_sample_1506.npy train_tokenizer_sample_2642.npy train_tokenizer_sample_3779.npy train_tokenizer_sample_4914.npy train_tokenizer_sample_604.npy\r\ntrain_tokenizer_sample_1507.npy train_tokenizer_sample_2643.npy train_tokenizer_sample_377.npy train_tokenizer_sample_4915.npy train_tokenizer_sample_6050.npy\r\ntrain_tokenizer_sample_1508.npy train_tokenizer_sample_2644.npy train_tokenizer_sample_3780.npy train_tokenizer_sample_4916.npy train_tokenizer_sample_6051.npy\r\ntrain_tokenizer_sample_1509.npy train_tokenizer_sample_2645.npy train_tokenizer_sample_3781.npy train_tokenizer_sample_4917.npy train_tokenizer_sample_6052.npy\r\ntrain_tokenizer_sample_150.npy train_tokenizer_sample_2646.npy train_tokenizer_sample_3782.npy train_tokenizer_sample_4918.npy train_tokenizer_sample_6053.npy\r\ntrain_tokenizer_sample_1510.npy train_tokenizer_sample_2647.npy train_tokenizer_sample_3783.npy train_tokenizer_sample_4919.npy train_tokenizer_sample_6054.npy\r\ntrain_tokenizer_sample_1511.npy train_tokenizer_sample_2648.npy train_tokenizer_sample_3784.npy train_tokenizer_sample_491.npy train_tokenizer_sample_6055.npy\r\ntrain_tokenizer_sample_1512.npy train_tokenizer_sample_2649.npy train_tokenizer_sample_3785.npy train_tokenizer_sample_4920.npy train_tokenizer_sample_6056.npy\r\ntrain_tokenizer_sample_1513.npy train_tokenizer_sample_264.npy train_tokenizer_sample_3786.npy train_tokenizer_sample_4921.npy train_tokenizer_sample_6057.npy\r\ntrain_tokenizer_sample_1514.npy train_tokenizer_sample_2650.npy train_tokenizer_sample_3787.npy train_tokenizer_sample_4922.npy train_tokenizer_sample_6058.npy\r\ntrain_tokenizer_sample_1515.npy train_tokenizer_sample_2651.npy train_tokenizer_sample_3788.npy train_tokenizer_sample_4923.npy train_tokenizer_sample_6059.npy\r\ntrain_tokenizer_sample_1516.npy train_tokenizer_sample_2652.npy train_tokenizer_sample_3789.npy train_tokenizer_sample_4924.npy train_tokenizer_sample_605.npy\r\ntrain_tokenizer_sample_1517.npy train_tokenizer_sample_2653.npy train_tokenizer_sample_378.npy train_tokenizer_sample_4925.npy train_tokenizer_sample_6060.npy\r\ntrain_tokenizer_sample_1518.npy train_tokenizer_sample_2654.npy train_tokenizer_sample_3790.npy train_tokenizer_sample_4926.npy train_tokenizer_sample_6061.npy\r\ntrain_tokenizer_sample_1519.npy train_tokenizer_sample_2655.npy train_tokenizer_sample_3791.npy train_tokenizer_sample_4927.npy train_tokenizer_sample_6062.npy\r\ntrain_tokenizer_sample_151.npy train_tokenizer_sample_2656.npy train_tokenizer_sample_3792.npy train_tokenizer_sample_4928.npy train_tokenizer_sample_6063.npy\r\ntrain_tokenizer_sample_1520.npy train_tokenizer_sample_2657.npy train_tokenizer_sample_3793.npy train_tokenizer_sample_4929.npy train_tokenizer_sample_6064.npy\r\ntrain_tokenizer_sample_1521.npy train_tokenizer_sample_2658.npy train_tokenizer_sample_3794.npy train_tokenizer_sample_492.npy train_tokenizer_sample_6065.npy\r\ntrain_tokenizer_sample_1522.npy train_tokenizer_sample_2659.npy train_tokenizer_sample_3795.npy train_tokenizer_sample_4930.npy train_tokenizer_sample_6066.npy\r\ntrain_tokenizer_sample_1523.npy train_tokenizer_sample_265.npy train_tokenizer_sample_3796.npy train_tokenizer_sample_4931.npy train_tokenizer_sample_6067.npy\r\ntrain_tokenizer_sample_1524.npy train_tokenizer_sample_2660.npy train_tokenizer_sample_3797.npy train_tokenizer_sample_4932.npy train_tokenizer_sample_6068.npy\r\ntrain_tokenizer_sample_1525.npy train_tokenizer_sample_2661.npy train_tokenizer_sample_3798.npy train_tokenizer_sample_4933.npy train_tokenizer_sample_6069.npy\r\ntrain_tokenizer_sample_1526.npy train_tokenizer_sample_2662.npy train_tokenizer_sample_3799.npy train_tokenizer_sample_4934.npy train_tokenizer_sample_606.npy\r\ntrain_tokenizer_sample_1527.npy train_tokenizer_sample_2663.npy train_tokenizer_sample_379.npy train_tokenizer_sample_4935.npy train_tokenizer_sample_6070.npy\r\ntrain_tokenizer_sample_1528.npy train_tokenizer_sample_2664.npy train_tokenizer_sample_37.npy train_tokenizer_sample_4936.npy train_tokenizer_sample_6071.npy\r\ntrain_tokenizer_sample_1529.npy train_tokenizer_sample_2665.npy train_tokenizer_sample_3800.npy train_tokenizer_sample_4937.npy train_tokenizer_sample_6072.npy\r\ntrain_tokenizer_sample_152.npy train_tokenizer_sample_2666.npy train_tokenizer_sample_3801.npy train_tokenizer_sample_4938.npy train_tokenizer_sample_6073.npy\r\ntrain_tokenizer_sample_1530.npy train_tokenizer_sample_2667.npy train_tokenizer_sample_3802.npy train_tokenizer_sample_4939.npy train_tokenizer_sample_6074.npy\r\ntrain_tokenizer_sample_1531.npy train_tokenizer_sample_2668.npy train_tokenizer_sample_3803.npy train_tokenizer_sample_493.npy train_tokenizer_sample_6075.npy\r\ntrain_tokenizer_sample_1532.npy train_tokenizer_sample_2669.npy train_tokenizer_sample_3804.npy train_tokenizer_sample_4940.npy train_tokenizer_sample_6076.npy\r\ntrain_tokenizer_sample_1533.npy train_tokenizer_sample_266.npy train_tokenizer_sample_3805.npy train_tokenizer_sample_4941.npy train_tokenizer_sample_6077.npy\r\ntrain_tokenizer_sample_1534.npy train_tokenizer_sample_2670.npy train_tokenizer_sample_3806.npy train_tokenizer_sample_4942.npy train_tokenizer_sample_6078.npy\r\ntrain_tokenizer_sample_1535.npy train_tokenizer_sample_2671.npy train_tokenizer_sample_3807.npy train_tokenizer_sample_4943.npy train_tokenizer_sample_6079.npy\r\ntrain_tokenizer_sample_1536.npy train_tokenizer_sample_2672.npy train_tokenizer_sample_3808.npy train_tokenizer_sample_4944.npy train_tokenizer_sample_607.npy\r\ntrain_tokenizer_sample_1537.npy train_tokenizer_sample_2673.npy train_tokenizer_sample_3809.npy train_tokenizer_sample_4945.npy train_tokenizer_sample_6080.npy\r\ntrain_tokenizer_sample_1538.npy train_tokenizer_sample_2674.npy train_tokenizer_sample_380.npy train_tokenizer_sample_4946.npy train_tokenizer_sample_6081.npy\r\ntrain_tokenizer_sample_1539.npy train_tokenizer_sample_2675.npy train_tokenizer_sample_3810.npy train_tokenizer_sample_4947.npy train_tokenizer_sample_6082.npy\r\ntrain_tokenizer_sample_153.npy train_tokenizer_sample_2676.npy train_tokenizer_sample_3811.npy train_tokenizer_sample_4948.npy train_tokenizer_sample_6083.npy\r\ntrain_tokenizer_sample_1540.npy train_tokenizer_sample_2677.npy train_tokenizer_sample_3812.npy train_tokenizer_sample_4949.npy train_tokenizer_sample_6084.npy\r\ntrain_tokenizer_sample_1541.npy train_tokenizer_sample_2678.npy train_tokenizer_sample_3813.npy train_tokenizer_sample_494.npy train_tokenizer_sample_6085.npy\r\ntrain_tokenizer_sample_1542.npy train_tokenizer_sample_2679.npy train_tokenizer_sample_3814.npy train_tokenizer_sample_4950.npy train_tokenizer_sample_6086.npy\r\ntrain_tokenizer_sample_1543.npy train_tokenizer_sample_267.npy train_tokenizer_sample_3815.npy train_tokenizer_sample_4951.npy train_tokenizer_sample_6087.npy\r\ntrain_tokenizer_sample_1544.npy train_tokenizer_sample_2680.npy train_tokenizer_sample_3816.npy train_tokenizer_sample_4952.npy train_tokenizer_sample_6088.npy\r\ntrain_tokenizer_sample_1545.npy train_tokenizer_sample_2681.npy train_tokenizer_sample_3817.npy train_tokenizer_sample_4953.npy train_tokenizer_sample_6089.npy\r\ntrain_tokenizer_sample_1546.npy train_tokenizer_sample_2682.npy train_tokenizer_sample_3818.npy train_tokenizer_sample_4954.npy train_tokenizer_sample_608.npy\r\ntrain_tokenizer_sample_1547.npy train_tokenizer_sample_2683.npy train_tokenizer_sample_3819.npy train_tokenizer_sample_4955.npy train_tokenizer_sample_6090.npy\r\ntrain_tokenizer_sample_1548.npy train_tokenizer_sample_2684.npy train_tokenizer_sample_381.npy train_tokenizer_sample_4956.npy train_tokenizer_sample_6091.npy\r\ntrain_tokenizer_sample_1549.npy train_tokenizer_sample_2685.npy train_tokenizer_sample_3820.npy train_tokenizer_sample_4957.npy train_tokenizer_sample_6092.npy\r\ntrain_tokenizer_sample_154.npy train_tokenizer_sample_2686.npy train_tokenizer_sample_3821.npy train_tokenizer_sample_4958.npy train_tokenizer_sample_6093.npy\r\ntrain_tokenizer_sample_1550.npy train_tokenizer_sample_2687.npy train_tokenizer_sample_3822.npy train_tokenizer_sample_4959.npy train_tokenizer_sample_6094.npy\r\ntrain_tokenizer_sample_1551.npy train_tokenizer_sample_2688.npy train_tokenizer_sample_3823.npy train_tokenizer_sample_495.npy train_tokenizer_sample_6095.npy\r\ntrain_tokenizer_sample_1552.npy train_tokenizer_sample_2689.npy train_tokenizer_sample_3824.npy train_tokenizer_sample_4960.npy train_tokenizer_sample_6096.npy\r\ntrain_tokenizer_sample_1553.npy train_tokenizer_sample_268.npy train_tokenizer_sample_3825.npy train_tokenizer_sample_4961.npy train_tokenizer_sample_6097.npy\r\ntrain_tokenizer_sample_1554.npy train_tokenizer_sample_2690.npy train_tokenizer_sample_3826.npy train_tokenizer_sample_4962.npy train_tokenizer_sample_6098.npy\r\ntrain_tokenizer_sample_1555.npy train_tokenizer_sample_2691.npy train_tokenizer_sample_3827.npy train_tokenizer_sample_4963.npy train_tokenizer_sample_6099.npy\r\ntrain_tokenizer_sample_1556.npy train_tokenizer_sample_2692.npy train_tokenizer_sample_3828.npy train_tokenizer_sample_4964.npy train_tokenizer_sample_609.npy\r\ntrain_tokenizer_sample_1557.npy train_tokenizer_sample_2693.npy train_tokenizer_sample_3829.npy train_tokenizer_sample_4965.npy train_tokenizer_sample_60.npy\r\ntrain_tokenizer_sample_1558.npy train_tokenizer_sample_2694.npy train_tokenizer_sample_382.npy train_tokenizer_sample_4966.npy train_tokenizer_sample_6100.npy\r\ntrain_tokenizer_sample_1559.npy train_tokenizer_sample_2695.npy train_tokenizer_sample_3830.npy train_tokenizer_sample_4967.npy train_tokenizer_sample_6101.npy\r\ntrain_tokenizer_sample_155.npy train_tokenizer_sample_2696.npy train_tokenizer_sample_3831.npy train_tokenizer_sample_4968.npy train_tokenizer_sample_6102.npy\r\ntrain_tokenizer_sample_1560.npy train_tokenizer_sample_2697.npy train_tokenizer_sample_3832.npy train_tokenizer_sample_4969.npy train_tokenizer_sample_6103.npy\r\ntrain_tokenizer_sample_1561.npy train_tokenizer_sample_2698.npy train_tokenizer_sample_3833.npy train_tokenizer_sample_496.npy train_tokenizer_sample_6104.npy\r\ntrain_tokenizer_sample_1562.npy train_tokenizer_sample_2699.npy train_tokenizer_sample_3834.npy train_tokenizer_sample_4970.npy train_tokenizer_sample_6105.npy\r\ntrain_tokenizer_sample_1563.npy train_tokenizer_sample_269.npy train_tokenizer_sample_3835.npy train_tokenizer_sample_4971.npy train_tokenizer_sample_6106.npy\r\ntrain_tokenizer_sample_1564.npy train_tokenizer_sample_26.npy train_tokenizer_sample_3836.npy train_tokenizer_sample_4972.npy train_tokenizer_sample_6107.npy\r\ntrain_tokenizer_sample_1565.npy train_tokenizer_sample_2700.npy train_tokenizer_sample_3837.npy train_tokenizer_sample_4973.npy train_tokenizer_sample_6108.npy\r\ntrain_tokenizer_sample_1566.npy train_tokenizer_sample_2701.npy train_tokenizer_sample_3838.npy train_tokenizer_sample_4974.npy train_tokenizer_sample_6109.npy\r\ntrain_tokenizer_sample_1567.npy train_tokenizer_sample_2702.npy train_tokenizer_sample_3839.npy train_tokenizer_sample_4975.npy train_tokenizer_sample_610.npy\r\ntrain_tokenizer_sample_1568.npy train_tokenizer_sample_2703.npy train_tokenizer_sample_383.npy train_tokenizer_sample_4976.npy train_tokenizer_sample_6110.npy\r\ntrain_tokenizer_sample_1569.npy train_tokenizer_sample_2704.npy train_tokenizer_sample_3840.npy train_tokenizer_sample_4977.npy train_tokenizer_sample_6111.npy\r\ntrain_tokenizer_sample_156.npy train_tokenizer_sample_2705.npy train_tokenizer_sample_3841.npy train_tokenizer_sample_4978.npy train_tokenizer_sample_6112.npy\r\ntrain_tokenizer_sample_1570.npy train_tokenizer_sample_2706.npy train_tokenizer_sample_3842.npy train_tokenizer_sample_4979.npy train_tokenizer_sample_6113.npy\r\ntrain_tokenizer_sample_1571.npy train_tokenizer_sample_2707.npy train_tokenizer_sample_3843.npy train_tokenizer_sample_497.npy train_tokenizer_sample_6114.npy\r\ntrain_tokenizer_sample_1572.npy train_tokenizer_sample_2708.npy train_tokenizer_sample_3844.npy train_tokenizer_sample_4980.npy train_tokenizer_sample_6115.npy\r\ntrain_tokenizer_sample_1573.npy train_tokenizer_sample_2709.npy train_tokenizer_sample_3845.npy train_tokenizer_sample_4981.npy train_tokenizer_sample_6116.npy\r\ntrain_tokenizer_sample_1574.npy train_tokenizer_sample_270.npy train_tokenizer_sample_3846.npy train_tokenizer_sample_4982.npy train_tokenizer_sample_6117.npy\r\ntrain_tokenizer_sample_1575.npy train_tokenizer_sample_2710.npy train_tokenizer_sample_3847.npy train_tokenizer_sample_4983.npy train_tokenizer_sample_6118.npy\r\ntrain_tokenizer_sample_1576.npy train_tokenizer_sample_2711.npy train_tokenizer_sample_3848.npy train_tokenizer_sample_4984.npy train_tokenizer_sample_6119.npy\r\ntrain_tokenizer_sample_1577.npy train_tokenizer_sample_2712.npy train_tokenizer_sample_3849.npy train_tokenizer_sample_4985.npy train_tokenizer_sample_611.npy\r\ntrain_tokenizer_sample_1578.npy train_tokenizer_sample_2713.npy train_tokenizer_sample_384.npy train_tokenizer_sample_4986.npy train_tokenizer_sample_6120.npy\r\ntrain_tokenizer_sample_1579.npy train_tokenizer_sample_2714.npy train_tokenizer_sample_3850.npy train_tokenizer_sample_4987.npy train_tokenizer_sample_6121.npy\r\ntrain_tokenizer_sample_157.npy train_tokenizer_sample_2715.npy train_tokenizer_sample_3851.npy train_tokenizer_sample_4988.npy train_tokenizer_sample_6122.npy\r\ntrain_tokenizer_sample_1580.npy train_tokenizer_sample_2716.npy train_tokenizer_sample_3852.npy train_tokenizer_sample_4989.npy train_tokenizer_sample_6123.npy\r\ntrain_tokenizer_sample_1581.npy train_tokenizer_sample_2717.npy train_tokenizer_sample_3853.npy train_tokenizer_sample_498.npy train_tokenizer_sample_6124.npy\r\ntrain_tokenizer_sample_1582.npy train_tokenizer_sample_2718.npy train_tokenizer_sample_3854.npy train_tokenizer_sample_4990.npy train_tokenizer_sample_6125.npy\r\ntrain_tokenizer_sample_1583.npy train_tokenizer_sample_2719.npy train_tokenizer_sample_3855.npy train_tokenizer_sample_4991.npy train_tokenizer_sample_6126.npy\r\ntrain_tokenizer_sample_1584.npy train_tokenizer_sample_271.npy train_tokenizer_sample_3856.npy train_tokenizer_sample_4992.npy train_tokenizer_sample_6127.npy\r\ntrain_tokenizer_sample_1585.npy train_tokenizer_sample_2720.npy train_tokenizer_sample_3857.npy train_tokenizer_sample_4993.npy train_tokenizer_sample_6128.npy\r\ntrain_tokenizer_sample_1586.npy train_tokenizer_sample_2721.npy train_tokenizer_sample_3858.npy train_tokenizer_sample_4994.npy train_tokenizer_sample_6129.npy\r\ntrain_tokenizer_sample_1587.npy train_tokenizer_sample_2722.npy train_tokenizer_sample_3859.npy train_tokenizer_sample_4995.npy train_tokenizer_sample_612.npy\r\ntrain_tokenizer_sample_1588.npy train_tokenizer_sample_2723.npy train_tokenizer_sample_385.npy train_tokenizer_sample_4996.npy train_tokenizer_sample_6130.npy\r\ntrain_tokenizer_sample_1589.npy train_tokenizer_sample_2724.npy train_tokenizer_sample_3860.npy train_tokenizer_sample_4997.npy train_tokenizer_sample_6131.npy\r\ntrain_tokenizer_sample_158.npy train_tokenizer_sample_2725.npy train_tokenizer_sample_3861.npy train_tokenizer_sample_4998.npy train_tokenizer_sample_6132.npy\r\ntrain_tokenizer_sample_1590.npy train_tokenizer_sample_2726.npy train_tokenizer_sample_3862.npy train_tokenizer_sample_4999.npy train_tokenizer_sample_6133.npy\r\ntrain_tokenizer_sample_1591.npy train_tokenizer_sample_2727.npy train_tokenizer_sample_3863.npy train_tokenizer_sample_499.npy train_tokenizer_sample_6134.npy\r\ntrain_tokenizer_sample_1592.npy train_tokenizer_sample_2728.npy train_tokenizer_sample_3864.npy train_tokenizer_sample_49.npy train_tokenizer_sample_6135.npy\r\ntrain_tokenizer_sample_1593.npy train_tokenizer_sample_2729.npy train_tokenizer_sample_3865.npy train_tokenizer_sample_4.npy train_tokenizer_sample_6136.npy\r\ntrain_tokenizer_sample_1594.npy train_tokenizer_sample_272.npy train_tokenizer_sample_3866.npy train_tokenizer_sample_5000.npy train_tokenizer_sample_6137.npy\r\ntrain_tokenizer_sample_1595.npy train_tokenizer_sample_2730.npy train_tokenizer_sample_3867.npy train_tokenizer_sample_5001.npy train_tokenizer_sample_6138.npy\r\ntrain_tokenizer_sample_1596.npy train_tokenizer_sample_2731.npy train_tokenizer_sample_3868.npy train_tokenizer_sample_5002.npy train_tokenizer_sample_6139.npy\r\ntrain_tokenizer_sample_1597.npy train_tokenizer_sample_2732.npy train_tokenizer_sample_3869.npy train_tokenizer_sample_5003.npy train_tokenizer_sample_613.npy\r\ntrain_tokenizer_sample_1598.npy train_tokenizer_sample_2733.npy train_tokenizer_sample_386.npy train_tokenizer_sample_5004.npy train_tokenizer_sample_6140.npy\r\ntrain_tokenizer_sample_1599.npy train_tokenizer_sample_2734.npy train_tokenizer_sample_3870.npy train_tokenizer_sample_5005.npy train_tokenizer_sample_6141.npy\r\ntrain_tokenizer_sample_159.npy train_tokenizer_sample_2735.npy train_tokenizer_sample_3871.npy train_tokenizer_sample_5006.npy train_tokenizer_sample_6142.npy\r\ntrain_tokenizer_sample_15.npy train_tokenizer_sample_2736.npy train_tokenizer_sample_3872.npy train_tokenizer_sample_5007.npy train_tokenizer_sample_6143.npy\r\ntrain_tokenizer_sample_1600.npy train_tokenizer_sample_2737.npy train_tokenizer_sample_3873.npy train_tokenizer_sample_5008.npy train_tokenizer_sample_6144.npy\r\ntrain_tokenizer_sample_1601.npy train_tokenizer_sample_2738.npy train_tokenizer_sample_3874.npy train_tokenizer_sample_5009.npy train_tokenizer_sample_6145.npy\r\ntrain_tokenizer_sample_1602.npy train_tokenizer_sample_2739.npy train_tokenizer_sample_3875.npy train_tokenizer_sample_500.npy train_tokenizer_sample_6146.npy\r\ntrain_tokenizer_sample_1603.npy train_tokenizer_sample_273.npy train_tokenizer_sample_3876.npy train_tokenizer_sample_5010.npy train_tokenizer_sample_6147.npy\r\ntrain_tokenizer_sample_1604.npy train_tokenizer_sample_2740.npy train_tokenizer_sample_3877.npy train_tokenizer_sample_5011.npy train_tokenizer_sample_6148.npy\r\ntrain_tokenizer_sample_1605.npy train_tokenizer_sample_2741.npy train_tokenizer_sample_3878.npy train_tokenizer_sample_5012.npy train_tokenizer_sample_6149.npy\r\ntrain_tokenizer_sample_1606.npy train_tokenizer_sample_2742.npy train_tokenizer_sample_3879.npy train_tokenizer_sample_5013.npy train_tokenizer_sample_614.npy\r\ntrain_tokenizer_sample_1607.npy train_tokenizer_sample_2743.npy train_tokenizer_sample_387.npy train_tokenizer_sample_5014.npy train_tokenizer_sample_6150.npy\r\ntrain_tokenizer_sample_1608.npy train_tokenizer_sample_2744.npy train_tokenizer_sample_3880.npy train_tokenizer_sample_5015.npy train_tokenizer_sample_6151.npy\r\ntrain_tokenizer_sample_1609.npy train_tokenizer_sample_2745.npy train_tokenizer_sample_3881.npy train_tokenizer_sample_5016.npy train_tokenizer_sample_6152.npy\r\ntrain_tokenizer_sample_160.npy train_tokenizer_sample_2746.npy train_tokenizer_sample_3882.npy train_tokenizer_sample_5017.npy train_tokenizer_sample_6153.npy\r\ntrain_tokenizer_sample_1610.npy train_tokenizer_sample_2747.npy train_tokenizer_sample_3883.npy train_tokenizer_sample_5018.npy train_tokenizer_sample_6154.npy\r\ntrain_tokenizer_sample_1611.npy train_tokenizer_sample_2748.npy train_tokenizer_sample_3884.npy train_tokenizer_sample_5019.npy train_tokenizer_sample_6155.npy\r\ntrain_tokenizer_sample_1612.npy train_tokenizer_sample_2749.npy train_tokenizer_sample_3885.npy train_tokenizer_sample_501.npy train_tokenizer_sample_6156.npy\r\ntrain_tokenizer_sample_1613.npy train_tokenizer_sample_274.npy train_tokenizer_sample_3886.npy train_tokenizer_sample_5020.npy train_tokenizer_sample_6157.npy\r\ntrain_tokenizer_sample_1614.npy train_tokenizer_sample_2750.npy train_tokenizer_sample_3887.npy train_tokenizer_sample_5021.npy train_tokenizer_sample_6158.npy\r\ntrain_tokenizer_sample_1615.npy train_tokenizer_sample_2751.npy train_tokenizer_sample_3888.npy train_tokenizer_sample_5022.npy train_tokenizer_sample_6159.npy\r\ntrain_tokenizer_sample_1616.npy train_tokenizer_sample_2752.npy train_tokenizer_sample_3889.npy train_tokenizer_sample_5023.npy train_tokenizer_sample_615.npy\r\ntrain_tokenizer_sample_1617.npy train_tokenizer_sample_2753.npy train_tokenizer_sample_388.npy train_tokenizer_sample_5024.npy train_tokenizer_sample_6160.npy\r\ntrain_tokenizer_sample_1618.npy train_tokenizer_sample_2754.npy train_tokenizer_sample_3890.npy train_tokenizer_sample_5025.npy train_tokenizer_sample_6161.npy\r\ntrain_tokenizer_sample_1619.npy train_tokenizer_sample_2755.npy train_tokenizer_sample_3891.npy train_tokenizer_sample_5026.npy train_tokenizer_sample_6162.npy\r\ntrain_tokenizer_sample_161.npy train_tokenizer_sample_2756.npy train_tokenizer_sample_3892.npy train_tokenizer_sample_5027.npy train_tokenizer_sample_6163.npy\r\ntrain_tokenizer_sample_1620.npy train_tokenizer_sample_2757.npy train_tokenizer_sample_3893.npy train_tokenizer_sample_5028.npy train_tokenizer_sample_6164.npy\r\ntrain_tokenizer_sample_1621.npy train_tokenizer_sample_2758.npy train_tokenizer_sample_3894.npy train_tokenizer_sample_5029.npy train_tokenizer_sample_6165.npy\r\ntrain_tokenizer_sample_1622.npy train_tokenizer_sample_2759.npy train_tokenizer_sample_3895.npy train_tokenizer_sample_502.npy train_tokenizer_sample_6166.npy\r\ntrain_tokenizer_sample_1623.npy train_tokenizer_sample_275.npy train_tokenizer_sample_3896.npy train_tokenizer_sample_5030.npy train_tokenizer_sample_6167.npy\r\ntrain_tokenizer_sample_1624.npy train_tokenizer_sample_2760.npy train_tokenizer_sample_3897.npy train_tokenizer_sample_5031.npy train_tokenizer_sample_6168.npy\r\ntrain_tokenizer_sample_1625.npy train_tokenizer_sample_2761.npy train_tokenizer_sample_3898.npy train_tokenizer_sample_5032.npy train_tokenizer_sample_6169.npy\r\ntrain_tokenizer_sample_1626.npy train_tokenizer_sample_2762.npy train_tokenizer_sample_3899.npy train_tokenizer_sample_5033.npy train_tokenizer_sample_616.npy\r\ntrain_tokenizer_sample_1627.npy train_tokenizer_sample_2763.npy train_tokenizer_sample_389.npy train_tokenizer_sample_5034.npy train_tokenizer_sample_6170.npy\r\ntrain_tokenizer_sample_1628.npy train_tokenizer_sample_2764.npy train_tokenizer_sample_38.npy train_tokenizer_sample_5035.npy train_tokenizer_sample_6171.npy\r\ntrain_tokenizer_sample_1629.npy train_tokenizer_sample_2765.npy train_tokenizer_sample_3900.npy train_tokenizer_sample_5036.npy train_tokenizer_sample_6172.npy\r\ntrain_tokenizer_sample_162.npy train_tokenizer_sample_2766.npy train_tokenizer_sample_3901.npy train_tokenizer_sample_5037.npy train_tokenizer_sample_6173.npy\r\ntrain_tokenizer_sample_1630.npy train_tokenizer_sample_2767.npy train_tokenizer_sample_3902.npy train_tokenizer_sample_5038.npy train_tokenizer_sample_6174.npy\r\ntrain_tokenizer_sample_1631.npy train_tokenizer_sample_2768.npy train_tokenizer_sample_3903.npy train_tokenizer_sample_5039.npy train_tokenizer_sample_6175.npy\r\ntrain_tokenizer_sample_1632.npy train_tokenizer_sample_2769.npy train_tokenizer_sample_3904.npy train_tokenizer_sample_503.npy train_tokenizer_sample_6176.npy\r\ntrain_tokenizer_sample_1633.npy train_tokenizer_sample_276.npy train_tokenizer_sample_3905.npy train_tokenizer_sample_5040.npy train_tokenizer_sample_6177.npy\r\ntrain_tokenizer_sample_1634.npy train_tokenizer_sample_2770.npy train_tokenizer_sample_3906.npy train_tokenizer_sample_5041.npy train_tokenizer_sample_6178.npy\r\ntrain_tokenizer_sample_1635.npy train_tokenizer_sample_2771.npy train_tokenizer_sample_3907.npy train_tokenizer_sample_5042.npy train_tokenizer_sample_6179.npy\r\ntrain_tokenizer_sample_1636.npy train_tokenizer_sample_2772.npy train_tokenizer_sample_3908.npy train_tokenizer_sample_5043.npy train_tokenizer_sample_617.npy\r\ntrain_tokenizer_sample_1637.npy train_tokenizer_sample_2773.npy train_tokenizer_sample_3909.npy train_tokenizer_sample_5044.npy train_tokenizer_sample_6180.npy\r\ntrain_tokenizer_sample_1638.npy train_tokenizer_sample_2774.npy train_tokenizer_sample_390.npy train_tokenizer_sample_5045.npy train_tokenizer_sample_6181.npy\r\ntrain_tokenizer_sample_1639.npy train_tokenizer_sample_2775.npy train_tokenizer_sample_3910.npy train_tokenizer_sample_5046.npy train_tokenizer_sample_6182.npy\r\ntrain_tokenizer_sample_163.npy train_tokenizer_sample_2776.npy train_tokenizer_sample_3911.npy train_tokenizer_sample_5047.npy train_tokenizer_sample_6183.npy\r\ntrain_tokenizer_sample_1640.npy train_tokenizer_sample_2777.npy train_tokenizer_sample_3912.npy train_tokenizer_sample_5048.npy train_tokenizer_sample_6184.npy\r\ntrain_tokenizer_sample_1641.npy train_tokenizer_sample_2778.npy train_tokenizer_sample_3913.npy train_tokenizer_sample_5049.npy train_tokenizer_sample_6185.npy\r\ntrain_tokenizer_sample_1642.npy train_tokenizer_sample_2779.npy train_tokenizer_sample_3914.npy train_tokenizer_sample_504.npy train_tokenizer_sample_6186.npy\r\ntrain_tokenizer_sample_1643.npy train_tokenizer_sample_277.npy train_tokenizer_sample_3915.npy train_tokenizer_sample_5050.npy train_tokenizer_sample_6187.npy\r\ntrain_tokenizer_sample_1644.npy train_tokenizer_sample_2780.npy train_tokenizer_sample_3916.npy train_tokenizer_sample_5051.npy train_tokenizer_sample_6188.npy\r\ntrain_tokenizer_sample_1645.npy train_tokenizer_sample_2781.npy train_tokenizer_sample_3917.npy train_tokenizer_sample_5052.npy train_tokenizer_sample_6189.npy\r\ntrain_tokenizer_sample_1646.npy train_tokenizer_sample_2782.npy train_tokenizer_sample_3918.npy train_tokenizer_sample_5053.npy train_tokenizer_sample_618.npy\r\ntrain_tokenizer_sample_1647.npy train_tokenizer_sample_2783.npy train_tokenizer_sample_3919.npy train_tokenizer_sample_5054.npy train_tokenizer_sample_6190.npy\r\ntrain_tokenizer_sample_1648.npy train_tokenizer_sample_2784.npy train_tokenizer_sample_391.npy train_tokenizer_sample_5055.npy train_tokenizer_sample_6191.npy\r\ntrain_tokenizer_sample_1649.npy train_tokenizer_sample_2785.npy train_tokenizer_sample_3920.npy train_tokenizer_sample_5056.npy train_tokenizer_sample_6192.npy\r\ntrain_tokenizer_sample_164.npy train_tokenizer_sample_2786.npy train_tokenizer_sample_3921.npy train_tokenizer_sample_5057.npy train_tokenizer_sample_6193.npy\r\ntrain_tokenizer_sample_1650.npy train_tokenizer_sample_2787.npy train_tokenizer_sample_3922.npy train_tokenizer_sample_5058.npy train_tokenizer_sample_6194.npy\r\ntrain_tokenizer_sample_1651.npy train_tokenizer_sample_2788.npy train_tokenizer_sample_3923.npy train_tokenizer_sample_5059.npy train_tokenizer_sample_6195.npy\r\ntrain_tokenizer_sample_1652.npy train_tokenizer_sample_2789.npy train_tokenizer_sample_3924.npy train_tokenizer_sample_505.npy train_tokenizer_sample_6196.npy\r\ntrain_tokenizer_sample_1653.npy train_tokenizer_sample_278.npy train_tokenizer_sample_3925.npy train_tokenizer_sample_5060.npy train_tokenizer_sample_6197.npy\r\ntrain_tokenizer_sample_1654.npy train_tokenizer_sample_2790.npy train_tokenizer_sample_3926.npy train_tokenizer_sample_5061.npy train_tokenizer_sample_6198.npy\r\ntrain_tokenizer_sample_1655.npy train_tokenizer_sample_2791.npy train_tokenizer_sample_3927.npy train_tokenizer_sample_5062.npy train_tokenizer_sample_6199.npy\r\ntrain_tokenizer_sample_1656.npy train_tokenizer_sample_2792.npy train_tokenizer_sample_3928.npy train_tokenizer_sample_5063.npy train_tokenizer_sample_619.npy\r\ntrain_tokenizer_sample_1657.npy train_tokenizer_sample_2793.npy train_tokenizer_sample_3929.npy train_tokenizer_sample_5064.npy train_tokenizer_sample_61.npy\r\ntrain_tokenizer_sample_1658.npy train_tokenizer_sample_2794.npy train_tokenizer_sample_392.npy train_tokenizer_sample_5065.npy train_tokenizer_sample_6200.npy\r\ntrain_tokenizer_sample_1659.npy train_tokenizer_sample_2795.npy train_tokenizer_sample_3930.npy train_tokenizer_sample_5066.npy train_tokenizer_sample_6201.npy\r\ntrain_tokenizer_sample_165.npy train_tokenizer_sample_2796.npy train_tokenizer_sample_3931.npy train_tokenizer_sample_5067.npy train_tokenizer_sample_6202.npy\r\ntrain_tokenizer_sample_1660.npy train_tokenizer_sample_2797.npy train_tokenizer_sample_3932.npy train_tokenizer_sample_5068.npy train_tokenizer_sample_6203.npy\r\ntrain_tokenizer_sample_1661.npy train_tokenizer_sample_2798.npy train_tokenizer_sample_3933.npy train_tokenizer_sample_5069.npy train_tokenizer_sample_6204.npy\r\ntrain_tokenizer_sample_1662.npy train_tokenizer_sample_2799.npy train_tokenizer_sample_3934.npy train_tokenizer_sample_506.npy train_tokenizer_sample_6205.npy\r\ntrain_tokenizer_sample_1663.npy train_tokenizer_sample_279.npy train_tokenizer_sample_3935.npy train_tokenizer_sample_5070.npy train_tokenizer_sample_6206.npy\r\ntrain_tokenizer_sample_1664.npy train_tokenizer_sample_27.npy train_tokenizer_sample_3936.npy train_tokenizer_sample_5071.npy train_tokenizer_sample_6207.npy\r\ntrain_tokenizer_sample_1665.npy train_tokenizer_sample_2800.npy train_tokenizer_sample_3937.npy train_tokenizer_sample_5072.npy train_tokenizer_sample_6208.npy\r\ntrain_tokenizer_sample_1666.npy train_tokenizer_sample_2801.npy train_tokenizer_sample_3938.npy train_tokenizer_sample_5073.npy train_tokenizer_sample_6209.npy\r\ntrain_tokenizer_sample_1667.npy train_tokenizer_sample_2802.npy train_tokenizer_sample_3939.npy train_tokenizer_sample_5074.npy train_tokenizer_sample_620.npy\r\ntrain_tokenizer_sample_1668.npy train_tokenizer_sample_2803.npy train_tokenizer_sample_393.npy train_tokenizer_sample_5075.npy train_tokenizer_sample_6210.npy\r\ntrain_tokenizer_sample_1669.npy train_tokenizer_sample_2804.npy train_tokenizer_sample_3940.npy train_tokenizer_sample_5076.npy train_tokenizer_sample_6211.npy\r\ntrain_tokenizer_sample_166.npy train_tokenizer_sample_2805.npy train_tokenizer_sample_3941.npy train_tokenizer_sample_5077.npy train_tokenizer_sample_6212.npy\r\ntrain_tokenizer_sample_1670.npy train_tokenizer_sample_2806.npy train_tokenizer_sample_3942.npy train_tokenizer_sample_5078.npy train_tokenizer_sample_6213.npy\r\ntrain_tokenizer_sample_1671.npy train_tokenizer_sample_2807.npy train_tokenizer_sample_3943.npy train_tokenizer_sample_5079.npy train_tokenizer_sample_6214.npy\r\ntrain_tokenizer_sample_1672.npy train_tokenizer_sample_2808.npy train_tokenizer_sample_3944.npy train_tokenizer_sample_507.npy train_tokenizer_sample_6215.npy\r\ntrain_tokenizer_sample_1673.npy train_tokenizer_sample_2809.npy train_tokenizer_sample_3945.npy train_tokenizer_sample_5080.npy train_tokenizer_sample_6216.npy\r\ntrain_tokenizer_sample_1674.npy train_tokenizer_sample_280.npy train_tokenizer_sample_3946.npy train_tokenizer_sample_5081.npy train_tokenizer_sample_6217.npy\r\ntrain_tokenizer_sample_1675.npy train_tokenizer_sample_2810.npy train_tokenizer_sample_3947.npy train_tokenizer_sample_5082.npy train_tokenizer_sample_6218.npy\r\ntrain_tokenizer_sample_1676.npy train_tokenizer_sample_2811.npy train_tokenizer_sample_3948.npy train_tokenizer_sample_5083.npy train_tokenizer_sample_6219.npy\r\ntrain_tokenizer_sample_1677.npy train_tokenizer_sample_2812.npy train_tokenizer_sample_3949.npy train_tokenizer_sample_5084.npy train_tokenizer_sample_621.npy\r\ntrain_tokenizer_sample_1678.npy train_tokenizer_sample_2813.npy train_tokenizer_sample_394.npy train_tokenizer_sample_5085.npy train_tokenizer_sample_6220.npy\r\ntrain_tokenizer_sample_1679.npy train_tokenizer_sample_2814.npy train_tokenizer_sample_3950.npy train_tokenizer_sample_5086.npy train_tokenizer_sample_6221.npy\r\ntrain_tokenizer_sample_167.npy train_tokenizer_sample_2815.npy train_tokenizer_sample_3951.npy train_tokenizer_sample_5087.npy train_tokenizer_sample_6222.npy\r\ntrain_tokenizer_sample_1680.npy train_tokenizer_sample_2816.npy train_tokenizer_sample_3952.npy train_tokenizer_sample_5088.npy train_tokenizer_sample_6223.npy\r\ntrain_tokenizer_sample_1681.npy train_tokenizer_sample_2817.npy train_tokenizer_sample_3953.npy train_tokenizer_sample_5089.npy train_tokenizer_sample_6224.npy\r\ntrain_tokenizer_sample_1682.npy train_tokenizer_sample_2818.npy train_tokenizer_sample_3954.npy train_tokenizer_sample_508.npy train_tokenizer_sample_6225.npy\r\ntrain_tokenizer_sample_1683.npy train_tokenizer_sample_2819.npy train_tokenizer_sample_3955.npy train_tokenizer_sample_5090.npy train_tokenizer_sample_6226.npy\r\ntrain_tokenizer_sample_1684.npy train_tokenizer_sample_281.npy train_tokenizer_sample_3956.npy train_tokenizer_sample_5091.npy train_tokenizer_sample_6227.npy\r\ntrain_tokenizer_sample_1685.npy train_tokenizer_sample_2820.npy train_tokenizer_sample_3957.npy train_tokenizer_sample_5092.npy train_tokenizer_sample_6228.npy\r\ntrain_tokenizer_sample_1686.npy train_tokenizer_sample_2821.npy train_tokenizer_sample_3958.npy train_tokenizer_sample_5093.npy train_tokenizer_sample_6229.npy\r\ntrain_tokenizer_sample_1687.npy train_tokenizer_sample_2822.npy train_tokenizer_sample_3959.npy train_tokenizer_sample_5094.npy train_tokenizer_sample_622.npy\r\ntrain_tokenizer_sample_1688.npy train_tokenizer_sample_2823.npy train_tokenizer_sample_395.npy train_tokenizer_sample_5095.npy train_tokenizer_sample_6230.npy\r\ntrain_tokenizer_sample_1689.npy train_tokenizer_sample_2824.npy train_tokenizer_sample_3960.npy train_tokenizer_sample_5096.npy train_tokenizer_sample_6231.npy\r\ntrain_tokenizer_sample_168.npy train_tokenizer_sample_2825.npy train_tokenizer_sample_3961.npy train_tokenizer_sample_5097.npy train_tokenizer_sample_6232.npy\r\ntrain_tokenizer_sample_1690.npy train_tokenizer_sample_2826.npy train_tokenizer_sample_3962.npy train_tokenizer_sample_5098.npy train_tokenizer_sample_6233.npy\r\ntrain_tokenizer_sample_1691.npy train_tokenizer_sample_2827.npy train_tokenizer_sample_3963.npy train_tokenizer_sample_5099.npy train_tokenizer_sample_6234.npy\r\ntrain_tokenizer_sample_1692.npy train_tokenizer_sample_2828.npy train_tokenizer_sample_3964.npy train_tokenizer_sample_509.npy train_tokenizer_sample_6235.npy\r\ntrain_tokenizer_sample_1693.npy train_tokenizer_sample_2829.npy train_tokenizer_sample_3965.npy train_tokenizer_sample_50.npy train_tokenizer_sample_6236.npy\r\ntrain_tokenizer_sample_1694.npy train_tokenizer_sample_282.npy train_tokenizer_sample_3966.npy train_tokenizer_sample_5100.npy train_tokenizer_sample_6237.npy\r\ntrain_tokenizer_sample_1695.npy train_tokenizer_sample_2830.npy train_tokenizer_sample_3967.npy train_tokenizer_sample_5101.npy train_tokenizer_sample_6238.npy\r\ntrain_tokenizer_sample_1696.npy train_tokenizer_sample_2831.npy train_tokenizer_sample_3968.npy train_tokenizer_sample_5102.npy train_tokenizer_sample_6239.npy\r\ntrain_tokenizer_sample_1697.npy train_tokenizer_sample_2832.npy train_tokenizer_sample_3969.npy train_tokenizer_sample_5103.npy train_tokenizer_sample_623.npy\r\ntrain_tokenizer_sample_1698.npy train_tokenizer_sample_2833.npy train_tokenizer_sample_396.npy train_tokenizer_sample_5104.npy train_tokenizer_sample_6240.npy\r\ntrain_tokenizer_sample_1699.npy train_tokenizer_sample_2834.npy train_tokenizer_sample_3970.npy train_tokenizer_sample_5105.npy train_tokenizer_sample_6241.npy\r\ntrain_tokenizer_sample_169.npy train_tokenizer_sample_2835.npy train_tokenizer_sample_3971.npy train_tokenizer_sample_5106.npy train_tokenizer_sample_6242.npy\r\ntrain_tokenizer_sample_16.npy train_tokenizer_sample_2836.npy train_tokenizer_sample_3972.npy train_tokenizer_sample_5107.npy train_tokenizer_sample_6243.npy\r\ntrain_tokenizer_sample_1700.npy train_tokenizer_sample_2837.npy train_tokenizer_sample_3973.npy train_tokenizer_sample_5108.npy train_tokenizer_sample_6244.npy\r\ntrain_tokenizer_sample_1701.npy train_tokenizer_sample_2838.npy train_tokenizer_sample_3974.npy train_tokenizer_sample_5109.npy train_tokenizer_sample_6245.npy\r\ntrain_tokenizer_sample_1702.npy train_tokenizer_sample_2839.npy train_tokenizer_sample_3975.npy train_tokenizer_sample_510.npy train_tokenizer_sample_6246.npy\r\ntrain_tokenizer_sample_1703.npy train_tokenizer_sample_283.npy train_tokenizer_sample_3976.npy train_tokenizer_sample_5110.npy train_tokenizer_sample_6247.npy\r\ntrain_tokenizer_sample_1704.npy train_tokenizer_sample_2840.npy train_tokenizer_sample_3977.npy train_tokenizer_sample_5111.npy train_tokenizer_sample_6248.npy\r\ntrain_tokenizer_sample_1705.npy train_tokenizer_sample_2841.npy train_tokenizer_sample_3978.npy train_tokenizer_sample_5112.npy train_tokenizer_sample_6249.npy\r\ntrain_tokenizer_sample_1706.npy train_tokenizer_sample_2842.npy train_tokenizer_sample_3979.npy train_tokenizer_sample_5113.npy train_tokenizer_sample_624.npy\r\ntrain_tokenizer_sample_1707.npy train_tokenizer_sample_2843.npy train_tokenizer_sample_397.npy train_tokenizer_sample_5114.npy train_tokenizer_sample_6250.npy\r\ntrain_tokenizer_sample_1708.npy train_tokenizer_sample_2844.npy train_tokenizer_sample_3980.npy train_tokenizer_sample_5115.npy train_tokenizer_sample_6251.npy\r\ntrain_tokenizer_sample_1709.npy train_tokenizer_sample_2845.npy train_tokenizer_sample_3981.npy train_tokenizer_sample_5116.npy train_tokenizer_sample_6252.npy\r\ntrain_tokenizer_sample_170.npy train_tokenizer_sample_2846.npy train_tokenizer_sample_3982.npy train_tokenizer_sample_5117.npy train_tokenizer_sample_6253.npy\r\ntrain_tokenizer_sample_1710.npy train_tokenizer_sample_2847.npy train_tokenizer_sample_3983.npy train_tokenizer_sample_5118.npy train_tokenizer_sample_6254.npy\r\ntrain_tokenizer_sample_1711.npy train_tokenizer_sample_2848.npy train_tokenizer_sample_3984.npy train_tokenizer_sample_5119.npy train_tokenizer_sample_6255.npy\r\ntrain_tokenizer_sample_1712.npy train_tokenizer_sample_2849.npy train_tokenizer_sample_3985.npy train_tokenizer_sample_511.npy train_tokenizer_sample_6256.npy\r\ntrain_tokenizer_sample_1713.npy train_tokenizer_sample_284.npy train_tokenizer_sample_3986.npy train_tokenizer_sample_5120.npy train_tokenizer_sample_6257.npy\r\ntrain_tokenizer_sample_1714.npy train_tokenizer_sample_2850.npy train_tokenizer_sample_3987.npy train_tokenizer_sample_5121.npy train_tokenizer_sample_6258.npy\r\ntrain_tokenizer_sample_1715.npy train_tokenizer_sample_2851.npy train_tokenizer_sample_3988.npy train_tokenizer_sample_5122.npy train_tokenizer_sample_6259.npy\r\ntrain_tokenizer_sample_1716.npy train_tokenizer_sample_2852.npy train_tokenizer_sample_3989.npy train_tokenizer_sample_5123.npy train_tokenizer_sample_625.npy\r\ntrain_tokenizer_sample_1717.npy train_tokenizer_sample_2853.npy train_tokenizer_sample_398.npy train_tokenizer_sample_5124.npy train_tokenizer_sample_6260.npy\r\ntrain_tokenizer_sample_1718.npy train_tokenizer_sample_2854.npy train_tokenizer_sample_3990.npy train_tokenizer_sample_5125.npy train_tokenizer_sample_6261.npy\r\ntrain_tokenizer_sample_1719.npy train_tokenizer_sample_2855.npy train_tokenizer_sample_3991.npy train_tokenizer_sample_5126.npy train_tokenizer_sample_6262.npy\r\ntrain_tokenizer_sample_171.npy train_tokenizer_sample_2856.npy train_tokenizer_sample_3992.npy train_tokenizer_sample_5127.npy train_tokenizer_sample_6263.npy\r\ntrain_tokenizer_sample_1720.npy train_tokenizer_sample_2857.npy train_tokenizer_sample_3993.npy train_tokenizer_sample_5128.npy train_tokenizer_sample_6264.npy\r\ntrain_tokenizer_sample_1721.npy train_tokenizer_sample_2858.npy train_tokenizer_sample_3994.npy train_tokenizer_sample_5129.npy train_tokenizer_sample_6265.npy\r\ntrain_tokenizer_sample_1722.npy train_tokenizer_sample_2859.npy train_tokenizer_sample_3995.npy train_tokenizer_sample_512.npy train_tokenizer_sample_6266.npy\r\ntrain_tokenizer_sample_1723.npy train_tokenizer_sample_285.npy train_tokenizer_sample_3996.npy train_tokenizer_sample_5130.npy train_tokenizer_sample_6267.npy\r\ntrain_tokenizer_sample_1724.npy train_tokenizer_sample_2860.npy train_tokenizer_sample_3997.npy train_tokenizer_sample_5131.npy train_tokenizer_sample_6268.npy\r\ntrain_tokenizer_sample_1725.npy train_tokenizer_sample_2861.npy train_tokenizer_sample_3998.npy train_tokenizer_sample_5132.npy train_tokenizer_sample_6269.npy\r\ntrain_tokenizer_sample_1726.npy train_tokenizer_sample_2862.npy train_tokenizer_sample_3999.npy train_tokenizer_sample_5133.npy train_tokenizer_sample_626.npy\r\ntrain_tokenizer_sample_1727.npy train_tokenizer_sample_2863.npy train_tokenizer_sample_399.npy train_tokenizer_sample_5134.npy train_tokenizer_sample_6270.npy\r\ntrain_tokenizer_sample_1728.npy train_tokenizer_sample_2864.npy train_tokenizer_sample_39.npy train_tokenizer_sample_5135.npy train_tokenizer_sample_6271.npy\r\ntrain_tokenizer_sample_1729.npy train_tokenizer_sample_2865.npy train_tokenizer_sample_3.npy train_tokenizer_sample_5136.npy train_tokenizer_sample_6272.npy\r\ntrain_tokenizer_sample_172.npy train_tokenizer_sample_2866.npy train_tokenizer_sample_4000.npy train_tokenizer_sample_5137.npy train_tokenizer_sample_6273.npy\r\ntrain_tokenizer_sample_1730.npy train_tokenizer_sample_2867.npy train_tokenizer_sample_4001.npy train_tokenizer_sample_5138.npy train_tokenizer_sample_6274.npy\r\ntrain_tokenizer_sample_1731.npy train_tokenizer_sample_2868.npy train_tokenizer_sample_4002.npy train_tokenizer_sample_5139.npy train_tokenizer_sample_6275.npy\r\ntrain_tokenizer_sample_1732.npy train_tokenizer_sample_2869.npy train_tokenizer_sample_4003.npy train_tokenizer_sample_513.npy train_tokenizer_sample_6276.npy\r\ntrain_tokenizer_sample_1733.npy train_tokenizer_sample_286.npy train_tokenizer_sample_4004.npy train_tokenizer_sample_5140.npy train_tokenizer_sample_6277.npy\r\ntrain_tokenizer_sample_1734.npy train_tokenizer_sample_2870.npy train_tokenizer_sample_4005.npy train_tokenizer_sample_5141.npy train_tokenizer_sample_6278.npy\r\ntrain_tokenizer_sample_1735.npy train_tokenizer_sample_2871.npy train_tokenizer_sample_4006.npy train_tokenizer_sample_5142.npy train_tokenizer_sample_6279.npy\r\ntrain_tokenizer_sample_1736.npy train_tokenizer_sample_2872.npy train_tokenizer_sample_4007.npy train_tokenizer_sample_5143.npy train_tokenizer_sample_627.npy\r\ntrain_tokenizer_sample_1737.npy train_tokenizer_sample_2873.npy train_tokenizer_sample_4008.npy train_tokenizer_sample_5144.npy train_tokenizer_sample_6280.npy\r\ntrain_tokenizer_sample_1738.npy train_tokenizer_sample_2874.npy train_tokenizer_sample_4009.npy train_tokenizer_sample_5145.npy train_tokenizer_sample_6281.npy\r\ntrain_tokenizer_sample_1739.npy train_tokenizer_sample_2875.npy train_tokenizer_sample_400.npy train_tokenizer_sample_5146.npy train_tokenizer_sample_6282.npy\r\ntrain_tokenizer_sample_173.npy train_tokenizer_sample_2876.npy train_tokenizer_sample_4010.npy train_tokenizer_sample_5147.npy train_tokenizer_sample_6283.npy\r\ntrain_tokenizer_sample_1740.npy train_tokenizer_sample_2877.npy train_tokenizer_sample_4011.npy train_tokenizer_sample_5148.npy train_tokenizer_sample_6284.npy\r\ntrain_tokenizer_sample_1741.npy train_tokenizer_sample_2878.npy train_tokenizer_sample_4012.npy train_tokenizer_sample_5149.npy train_tokenizer_sample_6285.npy\r\ntrain_tokenizer_sample_1742.npy train_tokenizer_sample_2879.npy train_tokenizer_sample_4013.npy train_tokenizer_sample_514.npy train_tokenizer_sample_6286.npy\r\ntrain_tokenizer_sample_1743.npy train_tokenizer_sample_287.npy train_tokenizer_sample_4014.npy train_tokenizer_sample_5150.npy train_tokenizer_sample_6287.npy\r\ntrain_tokenizer_sample_1744.npy train_tokenizer_sample_2880.npy train_tokenizer_sample_4015.npy train_tokenizer_sample_5151.npy train_tokenizer_sample_6288.npy\r\ntrain_tokenizer_sample_1745.npy train_tokenizer_sample_2881.npy train_tokenizer_sample_4016.npy train_tokenizer_sample_5152.npy train_tokenizer_sample_6289.npy\r\ntrain_tokenizer_sample_1746.npy train_tokenizer_sample_2882.npy train_tokenizer_sample_4017.npy train_tokenizer_sample_5153.npy train_tokenizer_sample_628.npy\r\ntrain_tokenizer_sample_1747.npy train_tokenizer_sample_2883.npy train_tokenizer_sample_4018.npy train_tokenizer_sample_5154.npy train_tokenizer_sample_6290.npy\r\ntrain_tokenizer_sample_1748.npy train_tokenizer_sample_2884.npy train_tokenizer_sample_4019.npy train_tokenizer_sample_5155.npy train_tokenizer_sample_6291.npy\r\ntrain_tokenizer_sample_1749.npy train_tokenizer_sample_2885.npy train_tokenizer_sample_401.npy train_tokenizer_sample_5156.npy train_tokenizer_sample_6292.npy\r\ntrain_tokenizer_sample_174.npy train_tokenizer_sample_2886.npy train_tokenizer_sample_4020.npy train_tokenizer_sample_5157.npy train_tokenizer_sample_6293.npy\r\ntrain_tokenizer_sample_1750.npy train_tokenizer_sample_2887.npy train_tokenizer_sample_4021.npy train_tokenizer_sample_5158.npy train_tokenizer_sample_6294.npy\r\ntrain_tokenizer_sample_1751.npy train_tokenizer_sample_2888.npy train_tokenizer_sample_4022.npy train_tokenizer_sample_5159.npy train_tokenizer_sample_6295.npy\r\ntrain_tokenizer_sample_1752.npy train_tokenizer_sample_2889.npy train_tokenizer_sample_4023.npy train_tokenizer_sample_515.npy train_tokenizer_sample_6296.npy\r\ntrain_tokenizer_sample_1753.npy train_tokenizer_sample_288.npy train_tokenizer_sample_4024.npy train_tokenizer_sample_5160.npy train_tokenizer_sample_6297.npy\r\ntrain_tokenizer_sample_1754.npy train_tokenizer_sample_2890.npy train_tokenizer_sample_4025.npy train_tokenizer_sample_5161.npy train_tokenizer_sample_6298.npy\r\ntrain_tokenizer_sample_1755.npy train_tokenizer_sample_2891.npy train_tokenizer_sample_4026.npy train_tokenizer_sample_5162.npy train_tokenizer_sample_6299.npy\r\ntrain_tokenizer_sample_1756.npy train_tokenizer_sample_2892.npy train_tokenizer_sample_4027.npy train_tokenizer_sample_5163.npy train_tokenizer_sample_629.npy\r\ntrain_tokenizer_sample_1757.npy train_tokenizer_sample_2893.npy train_tokenizer_sample_4028.npy train_tokenizer_sample_5164.npy train_tokenizer_sample_62.npy\r\ntrain_tokenizer_sample_1758.npy train_tokenizer_sample_2894.npy train_tokenizer_sample_4029.npy train_tokenizer_sample_5165.npy train_tokenizer_sample_6300.npy\r\ntrain_tokenizer_sample_1759.npy train_tokenizer_sample_2895.npy train_tokenizer_sample_402.npy train_tokenizer_sample_5166.npy train_tokenizer_sample_6301.npy\r\ntrain_tokenizer_sample_175.npy train_tokenizer_sample_2896.npy train_tokenizer_sample_4030.npy train_tokenizer_sample_5167.npy train_tokenizer_sample_6302.npy\r\ntrain_tokenizer_sample_1760.npy train_tokenizer_sample_2897.npy train_tokenizer_sample_4031.npy train_tokenizer_sample_5168.npy train_tokenizer_sample_6303.npy\r\ntrain_tokenizer_sample_1761.npy train_tokenizer_sample_2898.npy train_tokenizer_sample_4032.npy train_tokenizer_sample_5169.npy train_tokenizer_sample_6304.npy\r\ntrain_tokenizer_sample_1762.npy train_tokenizer_sample_2899.npy train_tokenizer_sample_4033.npy train_tokenizer_sample_516.npy train_tokenizer_sample_6305.npy\r\ntrain_tokenizer_sample_1763.npy train_tokenizer_sample_289.npy train_tokenizer_sample_4034.npy train_tokenizer_sample_5170.npy train_tokenizer_sample_6306.npy\r\ntrain_tokenizer_sample_1764.npy train_tokenizer_sample_28.npy train_tokenizer_sample_4035.npy train_tokenizer_sample_5171.npy train_tokenizer_sample_630.npy\r\ntrain_tokenizer_sample_1765.npy train_tokenizer_sample_2900.npy train_tokenizer_sample_4036.npy train_tokenizer_sample_5172.npy train_tokenizer_sample_631.npy\r\ntrain_tokenizer_sample_1766.npy train_tokenizer_sample_2901.npy train_tokenizer_sample_4037.npy train_tokenizer_sample_5173.npy train_tokenizer_sample_632.npy\r\ntrain_tokenizer_sample_1767.npy train_tokenizer_sample_2902.npy train_tokenizer_sample_4038.npy train_tokenizer_sample_5174.npy train_tokenizer_sample_633.npy\r\ntrain_tokenizer_sample_1768.npy train_tokenizer_sample_2903.npy train_tokenizer_sample_4039.npy train_tokenizer_sample_5175.npy train_tokenizer_sample_634.npy\r\ntrain_tokenizer_sample_1769.npy train_tokenizer_sample_2904.npy train_tokenizer_sample_403.npy train_tokenizer_sample_5176.npy train_tokenizer_sample_635.npy\r\ntrain_tokenizer_sample_176.npy train_tokenizer_sample_2905.npy train_tokenizer_sample_4040.npy train_tokenizer_sample_5177.npy train_tokenizer_sample_636.npy\r\ntrain_tokenizer_sample_1770.npy train_tokenizer_sample_2906.npy train_tokenizer_sample_4041.npy train_tokenizer_sample_5178.npy train_tokenizer_sample_637.npy\r\ntrain_tokenizer_sample_1771.npy train_tokenizer_sample_2907.npy train_tokenizer_sample_4042.npy train_tokenizer_sample_5179.npy train_tokenizer_sample_638.npy\r\ntrain_tokenizer_sample_1772.npy train_tokenizer_sample_2908.npy train_tokenizer_sample_4043.npy train_tokenizer_sample_517.npy train_tokenizer_sample_639.npy\r\ntrain_tokenizer_sample_1773.npy train_tokenizer_sample_2909.npy train_tokenizer_sample_4044.npy train_tokenizer_sample_5180.npy train_tokenizer_sample_63.npy\r\ntrain_tokenizer_sample_1774.npy train_tokenizer_sample_290.npy train_tokenizer_sample_4045.npy train_tokenizer_sample_5181.npy train_tokenizer_sample_640.npy\r\ntrain_tokenizer_sample_1775.npy train_tokenizer_sample_2910.npy train_tokenizer_sample_4046.npy train_tokenizer_sample_5182.npy train_tokenizer_sample_641.npy\r\ntrain_tokenizer_sample_1776.npy train_tokenizer_sample_2911.npy train_tokenizer_sample_4047.npy train_tokenizer_sample_5183.npy train_tokenizer_sample_642.npy\r\ntrain_tokenizer_sample_1777.npy train_tokenizer_sample_2912.npy train_tokenizer_sample_4048.npy train_tokenizer_sample_5184.npy train_tokenizer_sample_643.npy\r\ntrain_tokenizer_sample_1778.npy train_tokenizer_sample_2913.npy train_tokenizer_sample_4049.npy train_tokenizer_sample_5185.npy train_tokenizer_sample_644.npy\r\ntrain_tokenizer_sample_1779.npy train_tokenizer_sample_2914.npy train_tokenizer_sample_404.npy train_tokenizer_sample_5186.npy train_tokenizer_sample_645.npy\r\ntrain_tokenizer_sample_177.npy train_tokenizer_sample_2915.npy train_tokenizer_sample_4050.npy train_tokenizer_sample_5187.npy train_tokenizer_sample_646.npy\r\ntrain_tokenizer_sample_1780.npy train_tokenizer_sample_2916.npy train_tokenizer_sample_4051.npy train_tokenizer_sample_5188.npy train_tokenizer_sample_647.npy\r\ntrain_tokenizer_sample_1781.npy train_tokenizer_sample_2917.npy train_tokenizer_sample_4052.npy train_tokenizer_sample_5189.npy train_tokenizer_sample_648.npy\r\ntrain_tokenizer_sample_1782.npy train_tokenizer_sample_2918.npy train_tokenizer_sample_4053.npy train_tokenizer_sample_518.npy train_tokenizer_sample_649.npy\r\ntrain_tokenizer_sample_1783.npy train_tokenizer_sample_2919.npy train_tokenizer_sample_4054.npy train_tokenizer_sample_5190.npy train_tokenizer_sample_64.npy\r\ntrain_tokenizer_sample_1784.npy train_tokenizer_sample_291.npy train_tokenizer_sample_4055.npy train_tokenizer_sample_5191.npy train_tokenizer_sample_650.npy\r\ntrain_tokenizer_sample_1785.npy train_tokenizer_sample_2920.npy train_tokenizer_sample_4056.npy train_tokenizer_sample_5192.npy train_tokenizer_sample_651.npy\r\ntrain_tokenizer_sample_1786.npy train_tokenizer_sample_2921.npy train_tokenizer_sample_4057.npy train_tokenizer_sample_5193.npy train_tokenizer_sample_652.npy\r\ntrain_tokenizer_sample_1787.npy train_tokenizer_sample_2922.npy train_tokenizer_sample_4058.npy train_tokenizer_sample_5194.npy train_tokenizer_sample_653.npy\r\ntrain_tokenizer_sample_1788.npy train_tokenizer_sample_2923.npy train_tokenizer_sample_4059.npy train_tokenizer_sample_5195.npy train_tokenizer_sample_654.npy\r\ntrain_tokenizer_sample_1789.npy train_tokenizer_sample_2924.npy train_tokenizer_sample_405.npy train_tokenizer_sample_5196.npy train_tokenizer_sample_655.npy\r\ntrain_tokenizer_sample_178.npy train_tokenizer_sample_2925.npy train_tokenizer_sample_4060.npy train_tokenizer_sample_5197.npy train_tokenizer_sample_656.npy\r\ntrain_tokenizer_sample_1790.npy train_tokenizer_sample_2926.npy train_tokenizer_sample_4061.npy train_tokenizer_sample_5198.npy train_tokenizer_sample_657.npy\r\ntrain_tokenizer_sample_1791.npy train_tokenizer_sample_2927.npy train_tokenizer_sample_4062.npy train_tokenizer_sample_5199.npy train_tokenizer_sample_658.npy\r\ntrain_tokenizer_sample_1792.npy train_tokenizer_sample_2928.npy train_tokenizer_sample_4063.npy train_tokenizer_sample_519.npy train_tokenizer_sample_659.npy\r\ntrain_tokenizer_sample_1793.npy train_tokenizer_sample_2929.npy train_tokenizer_sample_4064.npy train_tokenizer_sample_51.npy train_tokenizer_sample_65.npy\r\ntrain_tokenizer_sample_1794.npy train_tokenizer_sample_292.npy train_tokenizer_sample_4065.npy train_tokenizer_sample_5200.npy train_tokenizer_sample_660.npy\r\ntrain_tokenizer_sample_1795.npy train_tokenizer_sample_2930.npy train_tokenizer_sample_4066.npy train_tokenizer_sample_5201.npy train_tokenizer_sample_661.npy\r\ntrain_tokenizer_sample_1796.npy train_tokenizer_sample_2931.npy train_tokenizer_sample_4067.npy train_tokenizer_sample_5202.npy train_tokenizer_sample_662.npy\r\ntrain_tokenizer_sample_1797.npy train_tokenizer_sample_2932.npy train_tokenizer_sample_4068.npy train_tokenizer_sample_5203.npy train_tokenizer_sample_663.npy\r\ntrain_tokenizer_sample_1798.npy train_tokenizer_sample_2933.npy train_tokenizer_sample_4069.npy train_tokenizer_sample_5204.npy train_tokenizer_sample_664.npy\r\ntrain_tokenizer_sample_1799.npy train_tokenizer_sample_2934.npy train_tokenizer_sample_406.npy train_tokenizer_sample_5205.npy train_tokenizer_sample_665.npy\r\ntrain_tokenizer_sample_179.npy train_tokenizer_sample_2935.npy train_tokenizer_sample_4070.npy train_tokenizer_sample_5206.npy train_tokenizer_sample_666.npy\r\ntrain_tokenizer_sample_17.npy train_tokenizer_sample_2936.npy train_tokenizer_sample_4071.npy train_tokenizer_sample_5207.npy train_tokenizer_sample_667.npy\r\ntrain_tokenizer_sample_1800.npy train_tokenizer_sample_2937.npy train_tokenizer_sample_4072.npy train_tokenizer_sample_5208.npy train_tokenizer_sample_668.npy\r\ntrain_tokenizer_sample_1801.npy train_tokenizer_sample_2938.npy train_tokenizer_sample_4073.npy train_tokenizer_sample_5209.npy train_tokenizer_sample_669.npy\r\ntrain_tokenizer_sample_1802.npy train_tokenizer_sample_2939.npy train_tokenizer_sample_4074.npy train_tokenizer_sample_520.npy train_tokenizer_sample_66.npy\r\ntrain_tokenizer_sample_1803.npy train_tokenizer_sample_293.npy train_tokenizer_sample_4075.npy train_tokenizer_sample_5210.npy train_tokenizer_sample_670.npy\r\ntrain_tokenizer_sample_1804.npy train_tokenizer_sample_2940.npy train_tokenizer_sample_4076.npy train_tokenizer_sample_5211.npy train_tokenizer_sample_671.npy\r\ntrain_tokenizer_sample_1805.npy train_tokenizer_sample_2941.npy train_tokenizer_sample_4077.npy train_tokenizer_sample_5212.npy train_tokenizer_sample_672.npy\r\ntrain_tokenizer_sample_1806.npy train_tokenizer_sample_2942.npy train_tokenizer_sample_4078.npy train_tokenizer_sample_5213.npy train_tokenizer_sample_673.npy\r\ntrain_tokenizer_sample_1807.npy train_tokenizer_sample_2943.npy train_tokenizer_sample_4079.npy train_tokenizer_sample_5214.npy train_tokenizer_sample_674.npy\r\ntrain_tokenizer_sample_1808.npy train_tokenizer_sample_2944.npy train_tokenizer_sample_407.npy train_tokenizer_sample_5215.npy train_tokenizer_sample_675.npy\r\ntrain_tokenizer_sample_1809.npy train_tokenizer_sample_2945.npy train_tokenizer_sample_4080.npy train_tokenizer_sample_5216.npy train_tokenizer_sample_676.npy\r\ntrain_tokenizer_sample_180.npy train_tokenizer_sample_2946.npy train_tokenizer_sample_4081.npy train_tokenizer_sample_5217.npy train_tokenizer_sample_677.npy\r\ntrain_tokenizer_sample_1810.npy train_tokenizer_sample_2947.npy train_tokenizer_sample_4082.npy train_tokenizer_sample_5218.npy train_tokenizer_sample_678.npy\r\ntrain_tokenizer_sample_1811.npy train_tokenizer_sample_2948.npy train_tokenizer_sample_4083.npy train_tokenizer_sample_5219.npy train_tokenizer_sample_679.npy\r\ntrain_tokenizer_sample_1812.npy train_tokenizer_sample_2949.npy train_tokenizer_sample_4084.npy train_tokenizer_sample_521.npy train_tokenizer_sample_67.npy\r\ntrain_tokenizer_sample_1813.npy train_tokenizer_sample_294.npy train_tokenizer_sample_4085.npy train_tokenizer_sample_5220.npy train_tokenizer_sample_680.npy\r\ntrain_tokenizer_sample_1814.npy train_tokenizer_sample_2950.npy train_tokenizer_sample_4086.npy train_tokenizer_sample_5221.npy train_tokenizer_sample_681.npy\r\ntrain_tokenizer_sample_1815.npy train_tokenizer_sample_2951.npy train_tokenizer_sample_4087.npy train_tokenizer_sample_5222.npy train_tokenizer_sample_682.npy\r\ntrain_tokenizer_sample_1816.npy train_tokenizer_sample_2952.npy train_tokenizer_sample_4088.npy train_tokenizer_sample_5223.npy train_tokenizer_sample_683.npy\r\ntrain_tokenizer_sample_1817.npy train_tokenizer_sample_2953.npy train_tokenizer_sample_4089.npy train_tokenizer_sample_5224.npy train_tokenizer_sample_684.npy\r\ntrain_tokenizer_sample_1818.npy train_tokenizer_sample_2954.npy train_tokenizer_sample_408.npy train_tokenizer_sample_5225.npy train_tokenizer_sample_685.npy\r\ntrain_tokenizer_sample_1819.npy train_tokenizer_sample_2955.npy train_tokenizer_sample_4090.npy train_tokenizer_sample_5226.npy train_tokenizer_sample_686.npy\r\ntrain_tokenizer_sample_181.npy train_tokenizer_sample_2956.npy train_tokenizer_sample_4091.npy train_tokenizer_sample_5227.npy train_tokenizer_sample_687.npy\r\ntrain_tokenizer_sample_1820.npy train_tokenizer_sample_2957.npy train_tokenizer_sample_4092.npy train_tokenizer_sample_5228.npy train_tokenizer_sample_688.npy\r\ntrain_tokenizer_sample_1821.npy train_tokenizer_sample_2958.npy train_tokenizer_sample_4093.npy train_tokenizer_sample_5229.npy train_tokenizer_sample_689.npy\r\ntrain_tokenizer_sample_1822.npy train_tokenizer_sample_2959.npy train_tokenizer_sample_4094.npy train_tokenizer_sample_522.npy train_tokenizer_sample_68.npy\r\ntrain_tokenizer_sample_1823.npy train_tokenizer_sample_295.npy train_tokenizer_sample_4095.npy train_tokenizer_sample_5230.npy train_tokenizer_sample_690.npy\r\ntrain_tokenizer_sample_1824.npy train_tokenizer_sample_2960.npy train_tokenizer_sample_4096.npy train_tokenizer_sample_5231.npy train_tokenizer_sample_691.npy\r\ntrain_tokenizer_sample_1825.npy train_tokenizer_sample_2961.npy train_tokenizer_sample_4097.npy train_tokenizer_sample_5232.npy train_tokenizer_sample_692.npy\r\ntrain_tokenizer_sample_1826.npy train_tokenizer_sample_2962.npy train_tokenizer_sample_4098.npy train_tokenizer_sample_5233.npy train_tokenizer_sample_693.npy\r\ntrain_tokenizer_sample_1827.npy train_tokenizer_sample_2963.npy train_tokenizer_sample_4099.npy train_tokenizer_sample_5234.npy train_tokenizer_sample_694.npy\r\ntrain_tokenizer_sample_1828.npy train_tokenizer_sample_2964.npy train_tokenizer_sample_409.npy train_tokenizer_sample_5235.npy train_tokenizer_sample_695.npy\r\ntrain_tokenizer_sample_1829.npy train_tokenizer_sample_2965.npy train_tokenizer_sample_40.npy train_tokenizer_sample_5236.npy train_tokenizer_sample_696.npy\r\ntrain_tokenizer_sample_182.npy train_tokenizer_sample_2966.npy train_tokenizer_sample_4100.npy train_tokenizer_sample_5237.npy train_tokenizer_sample_697.npy\r\ntrain_tokenizer_sample_1830.npy train_tokenizer_sample_2967.npy train_tokenizer_sample_4101.npy train_tokenizer_sample_5238.npy train_tokenizer_sample_698.npy\r\ntrain_tokenizer_sample_1831.npy train_tokenizer_sample_2968.npy train_tokenizer_sample_4102.npy train_tokenizer_sample_5239.npy train_tokenizer_sample_699.npy\r\ntrain_tokenizer_sample_1832.npy train_tokenizer_sample_2969.npy train_tokenizer_sample_4103.npy train_tokenizer_sample_523.npy train_tokenizer_sample_69.npy\r\ntrain_tokenizer_sample_1833.npy train_tokenizer_sample_296.npy train_tokenizer_sample_4104.npy train_tokenizer_sample_5240.npy train_tokenizer_sample_6.npy\r\ntrain_tokenizer_sample_1834.npy train_tokenizer_sample_2970.npy train_tokenizer_sample_4105.npy train_tokenizer_sample_5241.npy train_tokenizer_sample_700.npy\r\ntrain_tokenizer_sample_1835.npy train_tokenizer_sample_2971.npy train_tokenizer_sample_4106.npy train_tokenizer_sample_5242.npy train_tokenizer_sample_701.npy\r\ntrain_tokenizer_sample_1836.npy train_tokenizer_sample_2972.npy train_tokenizer_sample_4107.npy train_tokenizer_sample_5243.npy train_tokenizer_sample_702.npy\r\ntrain_tokenizer_sample_1837.npy train_tokenizer_sample_2973.npy train_tokenizer_sample_4108.npy train_tokenizer_sample_5244.npy train_tokenizer_sample_703.npy\r\ntrain_tokenizer_sample_1838.npy train_tokenizer_sample_2974.npy train_tokenizer_sample_4109.npy train_tokenizer_sample_5245.npy train_tokenizer_sample_704.npy\r\ntrain_tokenizer_sample_1839.npy train_tokenizer_sample_2975.npy train_tokenizer_sample_410.npy train_tokenizer_sample_5246.npy train_tokenizer_sample_705.npy\r\ntrain_tokenizer_sample_183.npy train_tokenizer_sample_2976.npy train_tokenizer_sample_4110.npy train_tokenizer_sample_5247.npy train_tokenizer_sample_706.npy\r\ntrain_tokenizer_sample_1840.npy train_tokenizer_sample_2977.npy train_tokenizer_sample_4111.npy train_tokenizer_sample_5248.npy train_tokenizer_sample_707.npy\r\ntrain_tokenizer_sample_1841.npy train_tokenizer_sample_2978.npy train_tokenizer_sample_4112.npy train_tokenizer_sample_5249.npy train_tokenizer_sample_708.npy\r\ntrain_tokenizer_sample_1842.npy train_tokenizer_sample_2979.npy train_tokenizer_sample_4113.npy train_tokenizer_sample_524.npy train_tokenizer_sample_709.npy\r\ntrain_tokenizer_sample_1843.npy train_tokenizer_sample_297.npy train_tokenizer_sample_4114.npy train_tokenizer_sample_5250.npy train_tokenizer_sample_70.npy\r\ntrain_tokenizer_sample_1844.npy train_tokenizer_sample_2980.npy train_tokenizer_sample_4115.npy train_tokenizer_sample_5251.npy train_tokenizer_sample_710.npy\r\ntrain_tokenizer_sample_1845.npy train_tokenizer_sample_2981.npy train_tokenizer_sample_4116.npy train_tokenizer_sample_5252.npy train_tokenizer_sample_711.npy\r\ntrain_tokenizer_sample_1846.npy train_tokenizer_sample_2982.npy train_tokenizer_sample_4117.npy train_tokenizer_sample_5253.npy train_tokenizer_sample_712.npy\r\ntrain_tokenizer_sample_1847.npy train_tokenizer_sample_2983.npy train_tokenizer_sample_4118.npy train_tokenizer_sample_5254.npy train_tokenizer_sample_713.npy\r\ntrain_tokenizer_sample_1848.npy train_tokenizer_sample_2984.npy train_tokenizer_sample_4119.npy train_tokenizer_sample_5255.npy train_tokenizer_sample_714.npy\r\ntrain_tokenizer_sample_1849.npy train_tokenizer_sample_2985.npy train_tokenizer_sample_411.npy train_tokenizer_sample_5256.npy train_tokenizer_sample_715.npy\r\ntrain_tokenizer_sample_184.npy train_tokenizer_sample_2986.npy train_tokenizer_sample_4120.npy train_tokenizer_sample_5257.npy train_tokenizer_sample_716.npy\r\ntrain_tokenizer_sample_1850.npy train_tokenizer_sample_2987.npy train_tokenizer_sample_4121.npy train_tokenizer_sample_5258.npy train_tokenizer_sample_717.npy\r\ntrain_tokenizer_sample_1851.npy train_tokenizer_sample_2988.npy train_tokenizer_sample_4122.npy train_tokenizer_sample_5259.npy train_tokenizer_sample_718.npy\r\ntrain_tokenizer_sample_1852.npy train_tokenizer_sample_2989.npy train_tokenizer_sample_4123.npy train_tokenizer_sample_525.npy train_tokenizer_sample_719.npy\r\ntrain_tokenizer_sample_1853.npy train_tokenizer_sample_298.npy train_tokenizer_sample_4124.npy train_tokenizer_sample_5260.npy train_tokenizer_sample_71.npy\r\ntrain_tokenizer_sample_1854.npy train_tokenizer_sample_2990.npy train_tokenizer_sample_4125.npy train_tokenizer_sample_5261.npy train_tokenizer_sample_720.npy\r\ntrain_tokenizer_sample_1855.npy train_tokenizer_sample_2991.npy train_tokenizer_sample_4126.npy train_tokenizer_sample_5262.npy train_tokenizer_sample_721.npy\r\ntrain_tokenizer_sample_1856.npy train_tokenizer_sample_2992.npy train_tokenizer_sample_4127.npy train_tokenizer_sample_5263.npy train_tokenizer_sample_722.npy\r\ntrain_tokenizer_sample_1857.npy train_tokenizer_sample_2993.npy train_tokenizer_sample_4128.npy train_tokenizer_sample_5264.npy train_tokenizer_sample_723.npy\r\ntrain_tokenizer_sample_1858.npy train_tokenizer_sample_2994.npy train_tokenizer_sample_4129.npy train_tokenizer_sample_5265.npy train_tokenizer_sample_724.npy\r\ntrain_tokenizer_sample_1859.npy train_tokenizer_sample_2995.npy train_tokenizer_sample_412.npy train_tokenizer_sample_5266.npy train_tokenizer_sample_725.npy\r\ntrain_tokenizer_sample_185.npy train_tokenizer_sample_2996.npy train_tokenizer_sample_4130.npy train_tokenizer_sample_5267.npy train_tokenizer_sample_726.npy\r\ntrain_tokenizer_sample_1860.npy train_tokenizer_sample_2997.npy train_tokenizer_sample_4131.npy train_tokenizer_sample_5268.npy train_tokenizer_sample_727.npy\r\ntrain_tokenizer_sample_1861.npy train_tokenizer_sample_2998.npy train_tokenizer_sample_4132.npy train_tokenizer_sample_5269.npy train_tokenizer_sample_728.npy\r\ntrain_tokenizer_sample_1862.npy train_tokenizer_sample_2999.npy train_tokenizer_sample_4133.npy train_tokenizer_sample_526.npy train_tokenizer_sample_729.npy\r\ntrain_tokenizer_sample_1863.npy train_tokenizer_sample_299.npy train_tokenizer_sample_4134.npy train_tokenizer_sample_5270.npy train_tokenizer_sample_72.npy\r\ntrain_tokenizer_sample_1864.npy train_tokenizer_sample_29.npy train_tokenizer_sample_4135.npy train_tokenizer_sample_5271.npy train_tokenizer_sample_730.npy\r\ntrain_tokenizer_sample_1865.npy train_tokenizer_sample_2.npy train_tokenizer_sample_4136.npy train_tokenizer_sample_5272.npy train_tokenizer_sample_731.npy\r\ntrain_tokenizer_sample_1866.npy train_tokenizer_sample_3000.npy train_tokenizer_sample_4137.npy train_tokenizer_sample_5273.npy train_tokenizer_sample_732.npy\r\ntrain_tokenizer_sample_1867.npy train_tokenizer_sample_3001.npy train_tokenizer_sample_4138.npy train_tokenizer_sample_5274.npy train_tokenizer_sample_733.npy\r\ntrain_tokenizer_sample_1868.npy train_tokenizer_sample_3002.npy train_tokenizer_sample_4139.npy train_tokenizer_sample_5275.npy train_tokenizer_sample_734.npy\r\ntrain_tokenizer_sample_1869.npy train_tokenizer_sample_3003.npy train_tokenizer_sample_413.npy train_tokenizer_sample_5276.npy train_tokenizer_sample_735.npy\r\ntrain_tokenizer_sample_186.npy train_tokenizer_sample_3004.npy train_tokenizer_sample_4140.npy train_tokenizer_sample_5277.npy train_tokenizer_sample_736.npy\r\ntrain_tokenizer_sample_1870.npy train_tokenizer_sample_3005.npy train_tokenizer_sample_4141.npy train_tokenizer_sample_5278.npy train_tokenizer_sample_737.npy\r\ntrain_tokenizer_sample_1871.npy train_tokenizer_sample_3006.npy train_tokenizer_sample_4142.npy train_tokenizer_sample_5279.npy train_tokenizer_sample_738.npy\r\ntrain_tokenizer_sample_1872.npy train_tokenizer_sample_3007.npy train_tokenizer_sample_4143.npy train_tokenizer_sample_527.npy train_tokenizer_sample_739.npy\r\ntrain_tokenizer_sample_1873.npy train_tokenizer_sample_3008.npy train_tokenizer_sample_4144.npy train_tokenizer_sample_5280.npy train_tokenizer_sample_73.npy\r\ntrain_tokenizer_sample_1874.npy train_tokenizer_sample_3009.npy train_tokenizer_sample_4145.npy train_tokenizer_sample_5281.npy train_tokenizer_sample_740.npy\r\ntrain_tokenizer_sample_1875.npy train_tokenizer_sample_300.npy train_tokenizer_sample_4146.npy train_tokenizer_sample_5282.npy train_tokenizer_sample_741.npy\r\ntrain_tokenizer_sample_1876.npy train_tokenizer_sample_3010.npy train_tokenizer_sample_4147.npy train_tokenizer_sample_5283.npy train_tokenizer_sample_742.npy\r\ntrain_tokenizer_sample_1877.npy train_tokenizer_sample_3011.npy train_tokenizer_sample_4148.npy train_tokenizer_sample_5284.npy train_tokenizer_sample_743.npy\r\ntrain_tokenizer_sample_1878.npy train_tokenizer_sample_3012.npy train_tokenizer_sample_4149.npy train_tokenizer_sample_5285.npy train_tokenizer_sample_744.npy\r\ntrain_tokenizer_sample_1879.npy train_tokenizer_sample_3013.npy train_tokenizer_sample_414.npy train_tokenizer_sample_5286.npy train_tokenizer_sample_745.npy\r\ntrain_tokenizer_sample_187.npy train_tokenizer_sample_3014.npy train_tokenizer_sample_4150.npy train_tokenizer_sample_5287.npy train_tokenizer_sample_746.npy\r\ntrain_tokenizer_sample_1880.npy train_tokenizer_sample_3015.npy train_tokenizer_sample_4151.npy train_tokenizer_sample_5288.npy train_tokenizer_sample_747.npy\r\ntrain_tokenizer_sample_1881.npy train_tokenizer_sample_3016.npy train_tokenizer_sample_4152.npy train_tokenizer_sample_5289.npy train_tokenizer_sample_748.npy\r\ntrain_tokenizer_sample_1882.npy train_tokenizer_sample_3017.npy train_tokenizer_sample_4153.npy train_tokenizer_sample_528.npy train_tokenizer_sample_749.npy\r\ntrain_tokenizer_sample_1883.npy train_tokenizer_sample_3018.npy train_tokenizer_sample_4154.npy train_tokenizer_sample_5290.npy train_tokenizer_sample_74.npy\r\ntrain_tokenizer_sample_1884.npy train_tokenizer_sample_3019.npy train_tokenizer_sample_4155.npy train_tokenizer_sample_5291.npy train_tokenizer_sample_750.npy\r\ntrain_tokenizer_sample_1885.npy train_tokenizer_sample_301.npy train_tokenizer_sample_4156.npy train_tokenizer_sample_5292.npy train_tokenizer_sample_751.npy\r\ntrain_tokenizer_sample_1886.npy train_tokenizer_sample_3020.npy train_tokenizer_sample_4157.npy train_tokenizer_sample_5293.npy train_tokenizer_sample_752.npy\r\ntrain_tokenizer_sample_1887.npy train_tokenizer_sample_3021.npy train_tokenizer_sample_4158.npy train_tokenizer_sample_5294.npy train_tokenizer_sample_753.npy\r\ntrain_tokenizer_sample_1888.npy train_tokenizer_sample_3022.npy train_tokenizer_sample_4159.npy train_tokenizer_sample_5295.npy train_tokenizer_sample_754.npy\r\ntrain_tokenizer_sample_1889.npy train_tokenizer_sample_3023.npy train_tokenizer_sample_415.npy train_tokenizer_sample_5296.npy train_tokenizer_sample_755.npy\r\ntrain_tokenizer_sample_188.npy train_tokenizer_sample_3024.npy train_tokenizer_sample_4160.npy train_tokenizer_sample_5297.npy train_tokenizer_sample_756.npy\r\ntrain_tokenizer_sample_1890.npy train_tokenizer_sample_3025.npy train_tokenizer_sample_4161.npy train_tokenizer_sample_5298.npy train_tokenizer_sample_757.npy\r\ntrain_tokenizer_sample_1891.npy train_tokenizer_sample_3026.npy train_tokenizer_sample_4162.npy train_tokenizer_sample_5299.npy train_tokenizer_sample_758.npy\r\ntrain_tokenizer_sample_1892.npy train_tokenizer_sample_3027.npy train_tokenizer_sample_4163.npy train_tokenizer_sample_529.npy train_tokenizer_sample_759.npy\r\ntrain_tokenizer_sample_1893.npy train_tokenizer_sample_3028.npy train_tokenizer_sample_4164.npy train_tokenizer_sample_52.npy train_tokenizer_sample_75.npy\r\ntrain_tokenizer_sample_1894.npy train_tokenizer_sample_3029.npy train_tokenizer_sample_4165.npy train_tokenizer_sample_5300.npy train_tokenizer_sample_760.npy\r\ntrain_tokenizer_sample_1895.npy train_tokenizer_sample_302.npy train_tokenizer_sample_4166.npy train_tokenizer_sample_5301.npy train_tokenizer_sample_761.npy\r\ntrain_tokenizer_sample_1896.npy train_tokenizer_sample_3030.npy train_tokenizer_sample_4167.npy train_tokenizer_sample_5302.npy train_tokenizer_sample_762.npy\r\ntrain_tokenizer_sample_1897.npy train_tokenizer_sample_3031.npy train_tokenizer_sample_4168.npy train_tokenizer_sample_5303.npy train_tokenizer_sample_763.npy\r\ntrain_tokenizer_sample_1898.npy train_tokenizer_sample_3032.npy train_tokenizer_sample_4169.npy train_tokenizer_sample_5304.npy train_tokenizer_sample_764.npy\r\ntrain_tokenizer_sample_1899.npy train_tokenizer_sample_3033.npy train_tokenizer_sample_416.npy train_tokenizer_sample_5305.npy train_tokenizer_sample_765.npy\r\ntrain_tokenizer_sample_189.npy train_tokenizer_sample_3034.npy train_tokenizer_sample_4170.npy train_tokenizer_sample_5306.npy train_tokenizer_sample_766.npy\r\ntrain_tokenizer_sample_18.npy train_tokenizer_sample_3035.npy train_tokenizer_sample_4171.npy train_tokenizer_sample_5307.npy train_tokenizer_sample_767.npy\r\ntrain_tokenizer_sample_1900.npy train_tokenizer_sample_3036.npy train_tokenizer_sample_4172.npy train_tokenizer_sample_5308.npy train_tokenizer_sample_768.npy\r\ntrain_tokenizer_sample_1901.npy train_tokenizer_sample_3037.npy train_tokenizer_sample_4173.npy train_tokenizer_sample_5309.npy train_tokenizer_sample_769.npy\r\ntrain_tokenizer_sample_1902.npy train_tokenizer_sample_3038.npy train_tokenizer_sample_4174.npy train_tokenizer_sample_530.npy train_tokenizer_sample_76.npy\r\ntrain_tokenizer_sample_1903.npy train_tokenizer_sample_3039.npy train_tokenizer_sample_4175.npy train_tokenizer_sample_5310.npy train_tokenizer_sample_770.npy\r\ntrain_tokenizer_sample_1904.npy train_tokenizer_sample_303.npy train_tokenizer_sample_4176.npy train_tokenizer_sample_5311.npy train_tokenizer_sample_771.npy\r\ntrain_tokenizer_sample_1905.npy train_tokenizer_sample_3040.npy train_tokenizer_sample_4177.npy train_tokenizer_sample_5312.npy train_tokenizer_sample_772.npy\r\ntrain_tokenizer_sample_1906.npy train_tokenizer_sample_3041.npy train_tokenizer_sample_4178.npy train_tokenizer_sample_5313.npy train_tokenizer_sample_773.npy\r\ntrain_tokenizer_sample_1907.npy train_tokenizer_sample_3042.npy train_tokenizer_sample_4179.npy train_tokenizer_sample_5314.npy train_tokenizer_sample_774.npy\r\ntrain_tokenizer_sample_1908.npy train_tokenizer_sample_3043.npy train_tokenizer_sample_417.npy train_tokenizer_sample_5315.npy train_tokenizer_sample_775.npy\r\ntrain_tokenizer_sample_1909.npy train_tokenizer_sample_3044.npy train_tokenizer_sample_4180.npy train_tokenizer_sample_5316.npy train_tokenizer_sample_776.npy\r\ntrain_tokenizer_sample_190.npy train_tokenizer_sample_3045.npy train_tokenizer_sample_4181.npy train_tokenizer_sample_5317.npy train_tokenizer_sample_777.npy\r\ntrain_tokenizer_sample_1910.npy train_tokenizer_sample_3046.npy train_tokenizer_sample_4182.npy train_tokenizer_sample_5318.npy train_tokenizer_sample_778.npy\r\ntrain_tokenizer_sample_1911.npy train_tokenizer_sample_3047.npy train_tokenizer_sample_4183.npy train_tokenizer_sample_5319.npy train_tokenizer_sample_779.npy\r\ntrain_tokenizer_sample_1912.npy train_tokenizer_sample_3048.npy train_tokenizer_sample_4184.npy train_tokenizer_sample_531.npy train_tokenizer_sample_77.npy\r\ntrain_tokenizer_sample_1913.npy train_tokenizer_sample_3049.npy train_tokenizer_sample_4185.npy train_tokenizer_sample_5320.npy train_tokenizer_sample_780.npy\r\ntrain_tokenizer_sample_1914.npy train_tokenizer_sample_304.npy train_tokenizer_sample_4186.npy train_tokenizer_sample_5321.npy train_tokenizer_sample_781.npy\r\ntrain_tokenizer_sample_1915.npy train_tokenizer_sample_3050.npy train_tokenizer_sample_4187.npy train_tokenizer_sample_5322.npy train_tokenizer_sample_782.npy\r\ntrain_tokenizer_sample_1916.npy train_tokenizer_sample_3051.npy train_tokenizer_sample_4188.npy train_tokenizer_sample_5323.npy train_tokenizer_sample_783.npy\r\ntrain_tokenizer_sample_1917.npy train_tokenizer_sample_3052.npy train_tokenizer_sample_4189.npy train_tokenizer_sample_5324.npy train_tokenizer_sample_784.npy\r\ntrain_tokenizer_sample_1918.npy train_tokenizer_sample_3053.npy train_tokenizer_sample_418.npy train_tokenizer_sample_5325.npy train_tokenizer_sample_785.npy\r\ntrain_tokenizer_sample_1919.npy train_tokenizer_sample_3054.npy train_tokenizer_sample_4190.npy train_tokenizer_sample_5326.npy train_tokenizer_sample_786.npy\r\ntrain_tokenizer_sample_191.npy train_tokenizer_sample_3055.npy train_tokenizer_sample_4191.npy train_tokenizer_sample_5327.npy train_tokenizer_sample_787.npy\r\ntrain_tokenizer_sample_1920.npy train_tokenizer_sample_3056.npy train_tokenizer_sample_4192.npy train_tokenizer_sample_5328.npy train_tokenizer_sample_788.npy\r\ntrain_tokenizer_sample_1921.npy train_tokenizer_sample_3057.npy train_tokenizer_sample_4193.npy train_tokenizer_sample_5329.npy train_tokenizer_sample_789.npy\r\ntrain_tokenizer_sample_1922.npy train_tokenizer_sample_3058.npy train_tokenizer_sample_4194.npy train_tokenizer_sample_532.npy train_tokenizer_sample_78.npy\r\ntrain_tokenizer_sample_1923.npy train_tokenizer_sample_3059.npy train_tokenizer_sample_4195.npy train_tokenizer_sample_5330.npy train_tokenizer_sample_790.npy\r\ntrain_tokenizer_sample_1924.npy train_tokenizer_sample_305.npy train_tokenizer_sample_4196.npy train_tokenizer_sample_5331.npy train_tokenizer_sample_791.npy\r\ntrain_tokenizer_sample_1925.npy train_tokenizer_sample_3060.npy train_tokenizer_sample_4197.npy train_tokenizer_sample_5332.npy train_tokenizer_sample_792.npy\r\ntrain_tokenizer_sample_1926.npy train_tokenizer_sample_3061.npy train_tokenizer_sample_4198.npy train_tokenizer_sample_5333.npy train_tokenizer_sample_793.npy\r\ntrain_tokenizer_sample_1927.npy train_tokenizer_sample_3062.npy train_tokenizer_sample_4199.npy train_tokenizer_sample_5334.npy train_tokenizer_sample_794.npy\r\ntrain_tokenizer_sample_1928.npy train_tokenizer_sample_3063.npy train_tokenizer_sample_419.npy train_tokenizer_sample_5335.npy train_tokenizer_sample_795.npy\r\ntrain_tokenizer_sample_1929.npy train_tokenizer_sample_3064.npy train_tokenizer_sample_41.npy train_tokenizer_sample_5336.npy train_tokenizer_sample_796.npy\r\ntrain_tokenizer_sample_192.npy train_tokenizer_sample_3065.npy train_tokenizer_sample_4200.npy train_tokenizer_sample_5337.npy train_tokenizer_sample_797.npy\r\ntrain_tokenizer_sample_1930.npy train_tokenizer_sample_3066.npy train_tokenizer_sample_4201.npy train_tokenizer_sample_5338.npy train_tokenizer_sample_798.npy\r\ntrain_tokenizer_sample_1931.npy train_tokenizer_sample_3067.npy train_tokenizer_sample_4202.npy train_tokenizer_sample_5339.npy train_tokenizer_sample_799.npy\r\ntrain_tokenizer_sample_1932.npy train_tokenizer_sample_3068.npy train_tokenizer_sample_4203.npy train_tokenizer_sample_533.npy train_tokenizer_sample_79.npy\r\ntrain_tokenizer_sample_1933.npy train_tokenizer_sample_3069.npy train_tokenizer_sample_4204.npy train_tokenizer_sample_5340.npy train_tokenizer_sample_7.npy\r\ntrain_tokenizer_sample_1934.npy train_tokenizer_sample_306.npy train_tokenizer_sample_4205.npy train_tokenizer_sample_5341.npy train_tokenizer_sample_800.npy\r\ntrain_tokenizer_sample_1935.npy train_tokenizer_sample_3070.npy train_tokenizer_sample_4206.npy train_tokenizer_sample_5342.npy train_tokenizer_sample_801.npy\r\ntrain_tokenizer_sample_1936.npy train_tokenizer_sample_3071.npy train_tokenizer_sample_4207.npy train_tokenizer_sample_5343.npy train_tokenizer_sample_802.npy\r\ntrain_tokenizer_sample_1937.npy train_tokenizer_sample_3072.npy train_tokenizer_sample_4208.npy train_tokenizer_sample_5344.npy train_tokenizer_sample_803.npy\r\ntrain_tokenizer_sample_1938.npy train_tokenizer_sample_3073.npy train_tokenizer_sample_4209.npy train_tokenizer_sample_5345.npy train_tokenizer_sample_804.npy\r\ntrain_tokenizer_sample_1939.npy train_tokenizer_sample_3074.npy train_tokenizer_sample_420.npy train_tokenizer_sample_5346.npy train_tokenizer_sample_805.npy\r\ntrain_tokenizer_sample_193.npy train_tokenizer_sample_3075.npy train_tokenizer_sample_4210.npy train_tokenizer_sample_5347.npy train_tokenizer_sample_806.npy\r\ntrain_tokenizer_sample_1940.npy train_tokenizer_sample_3076.npy train_tokenizer_sample_4211.npy train_tokenizer_sample_5348.npy train_tokenizer_sample_807.npy\r\ntrain_tokenizer_sample_1941.npy train_tokenizer_sample_3077.npy train_tokenizer_sample_4212.npy train_tokenizer_sample_5349.npy train_tokenizer_sample_808.npy\r\ntrain_tokenizer_sample_1942.npy train_tokenizer_sample_3078.npy train_tokenizer_sample_4213.npy train_tokenizer_sample_534.npy train_tokenizer_sample_809.npy\r\ntrain_tokenizer_sample_1943.npy train_tokenizer_sample_3079.npy train_tokenizer_sample_4214.npy train_tokenizer_sample_5350.npy train_tokenizer_sample_80.npy\r\ntrain_tokenizer_sample_1944.npy train_tokenizer_sample_307.npy train_tokenizer_sample_4215.npy train_tokenizer_sample_5351.npy train_tokenizer_sample_810.npy\r\ntrain_tokenizer_sample_1945.npy train_tokenizer_sample_3080.npy train_tokenizer_sample_4216.npy train_tokenizer_sample_5352.npy train_tokenizer_sample_811.npy\r\ntrain_tokenizer_sample_1946.npy train_tokenizer_sample_3081.npy train_tokenizer_sample_4217.npy train_tokenizer_sample_5353.npy train_tokenizer_sample_812.npy\r\ntrain_tokenizer_sample_1947.npy train_tokenizer_sample_3082.npy train_tokenizer_sample_4218.npy train_tokenizer_sample_5354.npy train_tokenizer_sample_813.npy\r\ntrain_tokenizer_sample_1948.npy train_tokenizer_sample_3083.npy train_tokenizer_sample_4219.npy train_tokenizer_sample_5355.npy train_tokenizer_sample_814.npy\r\ntrain_tokenizer_sample_1949.npy train_tokenizer_sample_3084.npy train_tokenizer_sample_421.npy train_tokenizer_sample_5356.npy train_tokenizer_sample_815.npy\r\ntrain_tokenizer_sample_194.npy train_tokenizer_sample_3085.npy train_tokenizer_sample_4220.npy train_tokenizer_sample_5357.npy train_tokenizer_sample_816.npy\r\ntrain_tokenizer_sample_1950.npy train_tokenizer_sample_3086.npy train_tokenizer_sample_4221.npy train_tokenizer_sample_5358.npy train_tokenizer_sample_817.npy\r\ntrain_tokenizer_sample_1951.npy train_tokenizer_sample_3087.npy train_tokenizer_sample_4222.npy train_tokenizer_sample_5359.npy train_tokenizer_sample_818.npy\r\ntrain_tokenizer_sample_1952.npy train_tokenizer_sample_3088.npy train_tokenizer_sample_4223.npy train_tokenizer_sample_535.npy train_tokenizer_sample_819.npy\r\ntrain_tokenizer_sample_1953.npy train_tokenizer_sample_3089.npy train_tokenizer_sample_4224.npy train_tokenizer_sample_5360.npy train_tokenizer_sample_81.npy\r\ntrain_tokenizer_sample_1954.npy train_tokenizer_sample_308.npy train_tokenizer_sample_4225.npy train_tokenizer_sample_5361.npy train_tokenizer_sample_820.npy\r\ntrain_tokenizer_sample_1955.npy train_tokenizer_sample_3090.npy train_tokenizer_sample_4226.npy train_tokenizer_sample_5362.npy train_tokenizer_sample_821.npy\r\ntrain_tokenizer_sample_1956.npy train_tokenizer_sample_3091.npy train_tokenizer_sample_4227.npy train_tokenizer_sample_5363.npy train_tokenizer_sample_822.npy\r\ntrain_tokenizer_sample_1957.npy train_tokenizer_sample_3092.npy train_tokenizer_sample_4228.npy train_tokenizer_sample_5364.npy train_tokenizer_sample_823.npy\r\ntrain_tokenizer_sample_1958.npy train_tokenizer_sample_3093.npy train_tokenizer_sample_4229.npy train_tokenizer_sample_5365.npy train_tokenizer_sample_824.npy\r\ntrain_tokenizer_sample_1959.npy train_tokenizer_sample_3094.npy train_tokenizer_sample_422.npy train_tokenizer_sample_5366.npy train_tokenizer_sample_825.npy\r\ntrain_tokenizer_sample_195.npy train_tokenizer_sample_3095.npy train_tokenizer_sample_4230.npy train_tokenizer_sample_5367.npy train_tokenizer_sample_826.npy\r\ntrain_tokenizer_sample_1960.npy train_tokenizer_sample_3096.npy train_tokenizer_sample_4231.npy train_tokenizer_sample_5368.npy train_tokenizer_sample_827.npy\r\ntrain_tokenizer_sample_1961.npy train_tokenizer_sample_3097.npy train_tokenizer_sample_4232.npy train_tokenizer_sample_5369.npy train_tokenizer_sample_828.npy\r\ntrain_tokenizer_sample_1962.npy train_tokenizer_sample_3098.npy train_tokenizer_sample_4233.npy train_tokenizer_sample_536.npy train_tokenizer_sample_829.npy\r\ntrain_tokenizer_sample_1963.npy train_tokenizer_sample_3099.npy train_tokenizer_sample_4234.npy train_tokenizer_sample_5370.npy train_tokenizer_sample_82.npy\r\ntrain_tokenizer_sample_1964.npy train_tokenizer_sample_309.npy train_tokenizer_sample_4235.npy train_tokenizer_sample_5371.npy train_tokenizer_sample_830.npy\r\ntrain_tokenizer_sample_1965.npy train_tokenizer_sample_30.npy train_tokenizer_sample_4236.npy train_tokenizer_sample_5372.npy train_tokenizer_sample_831.npy\r\ntrain_tokenizer_sample_1966.npy train_tokenizer_sample_3100.npy train_tokenizer_sample_4237.npy train_tokenizer_sample_5373.npy train_tokenizer_sample_832.npy\r\ntrain_tokenizer_sample_1967.npy train_tokenizer_sample_3101.npy train_tokenizer_sample_4238.npy train_tokenizer_sample_5374.npy train_tokenizer_sample_833.npy\r\ntrain_tokenizer_sample_1968.npy train_tokenizer_sample_3102.npy train_tokenizer_sample_4239.npy train_tokenizer_sample_5375.npy train_tokenizer_sample_834.npy\r\ntrain_tokenizer_sample_1969.npy train_tokenizer_sample_3103.npy train_tokenizer_sample_423.npy train_tokenizer_sample_5376.npy train_tokenizer_sample_835.npy\r\ntrain_tokenizer_sample_196.npy train_tokenizer_sample_3104.npy train_tokenizer_sample_4240.npy train_tokenizer_sample_5377.npy train_tokenizer_sample_836.npy\r\ntrain_tokenizer_sample_1970.npy train_tokenizer_sample_3105.npy train_tokenizer_sample_4241.npy train_tokenizer_sample_5378.npy train_tokenizer_sample_837.npy\r\ntrain_tokenizer_sample_1971.npy train_tokenizer_sample_3106.npy train_tokenizer_sample_4242.npy train_tokenizer_sample_5379.npy train_tokenizer_sample_838.npy\r\ntrain_tokenizer_sample_1972.npy train_tokenizer_sample_3107.npy train_tokenizer_sample_4243.npy train_tokenizer_sample_537.npy train_tokenizer_sample_839.npy\r\ntrain_tokenizer_sample_1973.npy train_tokenizer_sample_3108.npy train_tokenizer_sample_4244.npy train_tokenizer_sample_5380.npy train_tokenizer_sample_83.npy\r\ntrain_tokenizer_sample_1974.npy train_tokenizer_sample_3109.npy train_tokenizer_sample_4245.npy train_tokenizer_sample_5381.npy train_tokenizer_sample_840.npy\r\ntrain_tokenizer_sample_1975.npy train_tokenizer_sample_310.npy train_tokenizer_sample_4246.npy train_tokenizer_sample_5382.npy train_tokenizer_sample_841.npy\r\ntrain_tokenizer_sample_1976.npy train_tokenizer_sample_3110.npy train_tokenizer_sample_4247.npy train_tokenizer_sample_5383.npy train_tokenizer_sample_842.npy\r\ntrain_tokenizer_sample_1977.npy train_tokenizer_sample_3111.npy train_tokenizer_sample_4248.npy train_tokenizer_sample_5384.npy train_tokenizer_sample_843.npy\r\ntrain_tokenizer_sample_1978.npy train_tokenizer_sample_3112.npy train_tokenizer_sample_4249.npy train_tokenizer_sample_5385.npy train_tokenizer_sample_844.npy\r\ntrain_tokenizer_sample_1979.npy train_tokenizer_sample_3113.npy train_tokenizer_sample_424.npy train_tokenizer_sample_5386.npy train_tokenizer_sample_845.npy\r\ntrain_tokenizer_sample_197.npy train_tokenizer_sample_3114.npy train_tokenizer_sample_4250.npy train_tokenizer_sample_5387.npy train_tokenizer_sample_846.npy\r\ntrain_tokenizer_sample_1980.npy train_tokenizer_sample_3115.npy train_tokenizer_sample_4251.npy train_tokenizer_sample_5388.npy train_tokenizer_sample_847.npy\r\ntrain_tokenizer_sample_1981.npy train_tokenizer_sample_3116.npy train_tokenizer_sample_4252.npy train_tokenizer_sample_5389.npy train_tokenizer_sample_848.npy\r\ntrain_tokenizer_sample_1982.npy train_tokenizer_sample_3117.npy train_tokenizer_sample_4253.npy train_tokenizer_sample_538.npy train_tokenizer_sample_849.npy\r\ntrain_tokenizer_sample_1983.npy train_tokenizer_sample_3118.npy train_tokenizer_sample_4254.npy train_tokenizer_sample_5390.npy train_tokenizer_sample_84.npy\r\ntrain_tokenizer_sample_1984.npy train_tokenizer_sample_3119.npy train_tokenizer_sample_4255.npy train_tokenizer_sample_5391.npy train_tokenizer_sample_850.npy\r\ntrain_tokenizer_sample_1985.npy train_tokenizer_sample_311.npy train_tokenizer_sample_4256.npy train_tokenizer_sample_5392.npy train_tokenizer_sample_851.npy\r\ntrain_tokenizer_sample_1986.npy train_tokenizer_sample_3120.npy train_tokenizer_sample_4257.npy train_tokenizer_sample_5393.npy train_tokenizer_sample_852.npy\r\ntrain_tokenizer_sample_1987.npy train_tokenizer_sample_3121.npy train_tokenizer_sample_4258.npy train_tokenizer_sample_5394.npy train_tokenizer_sample_853.npy\r\ntrain_tokenizer_sample_1988.npy train_tokenizer_sample_3122.npy train_tokenizer_sample_4259.npy train_tokenizer_sample_5395.npy train_tokenizer_sample_854.npy\r\ntrain_tokenizer_sample_1989.npy train_tokenizer_sample_3123.npy train_tokenizer_sample_425.npy train_tokenizer_sample_5396.npy train_tokenizer_sample_855.npy\r\ntrain_tokenizer_sample_198.npy train_tokenizer_sample_3124.npy train_tokenizer_sample_4260.npy train_tokenizer_sample_5397.npy train_tokenizer_sample_856.npy\r\ntrain_tokenizer_sample_1990.npy train_tokenizer_sample_3125.npy train_tokenizer_sample_4261.npy train_tokenizer_sample_5398.npy train_tokenizer_sample_857.npy\r\ntrain_tokenizer_sample_1991.npy train_tokenizer_sample_3126.npy train_tokenizer_sample_4262.npy train_tokenizer_sample_5399.npy train_tokenizer_sample_858.npy\r\ntrain_tokenizer_sample_1992.npy train_tokenizer_sample_3127.npy train_tokenizer_sample_4263.npy train_tokenizer_sample_539.npy train_tokenizer_sample_859.npy\r\ntrain_tokenizer_sample_1993.npy train_tokenizer_sample_3128.npy train_tokenizer_sample_4264.npy train_tokenizer_sample_53.npy train_tokenizer_sample_85.npy\r\ntrain_tokenizer_sample_1994.npy train_tokenizer_sample_3129.npy train_tokenizer_sample_4265.npy train_tokenizer_sample_5400.npy train_tokenizer_sample_860.npy\r\ntrain_tokenizer_sample_1995.npy train_tokenizer_sample_312.npy train_tokenizer_sample_4266.npy train_tokenizer_sample_5401.npy train_tokenizer_sample_861.npy\r\ntrain_tokenizer_sample_1996.npy train_tokenizer_sample_3130.npy train_tokenizer_sample_4267.npy train_tokenizer_sample_5402.npy train_tokenizer_sample_862.npy\r\ntrain_tokenizer_sample_1997.npy train_tokenizer_sample_3131.npy train_tokenizer_sample_4268.npy train_tokenizer_sample_5403.npy train_tokenizer_sample_863.npy\r\ntrain_tokenizer_sample_1998.npy train_tokenizer_sample_3132.npy train_tokenizer_sample_4269.npy train_tokenizer_sample_5404.npy train_tokenizer_sample_864.npy\r\ntrain_tokenizer_sample_1999.npy train_tokenizer_sample_3133.npy train_tokenizer_sample_426.npy train_tokenizer_sample_5405.npy train_tokenizer_sample_865.npy\r\ntrain_tokenizer_sample_199.npy train_tokenizer_sample_3134.npy train_tokenizer_sample_4270.npy train_tokenizer_sample_5406.npy train_tokenizer_sample_866.npy\r\ntrain_tokenizer_sample_19.npy train_tokenizer_sample_3135.npy train_tokenizer_sample_4271.npy train_tokenizer_sample_5407.npy train_tokenizer_sample_867.npy\r\ntrain_tokenizer_sample_1.npy train_tokenizer_sample_3136.npy train_tokenizer_sample_4272.npy train_tokenizer_sample_5408.npy train_tokenizer_sample_868.npy\r\ntrain_tokenizer_sample_2000.npy train_tokenizer_sample_3137.npy train_tokenizer_sample_4273.npy train_tokenizer_sample_5409.npy train_tokenizer_sample_869.npy\r\ntrain_tokenizer_sample_2001.npy train_tokenizer_sample_3138.npy train_tokenizer_sample_4274.npy train_tokenizer_sample_540.npy train_tokenizer_sample_86.npy\r\ntrain_tokenizer_sample_2002.npy train_tokenizer_sample_3139.npy train_tokenizer_sample_4275.npy train_tokenizer_sample_5410.npy train_tokenizer_sample_870.npy\r\ntrain_tokenizer_sample_2003.npy train_tokenizer_sample_313.npy train_tokenizer_sample_4276.npy train_tokenizer_sample_5411.npy train_tokenizer_sample_871.npy\r\ntrain_tokenizer_sample_2004.npy train_tokenizer_sample_3140.npy train_tokenizer_sample_4277.npy train_tokenizer_sample_5412.npy train_tokenizer_sample_872.npy\r\ntrain_tokenizer_sample_2005.npy train_tokenizer_sample_3141.npy train_tokenizer_sample_4278.npy train_tokenizer_sample_5413.npy train_tokenizer_sample_873.npy\r\ntrain_tokenizer_sample_2006.npy train_tokenizer_sample_3142.npy train_tokenizer_sample_4279.npy train_tokenizer_sample_5414.npy train_tokenizer_sample_874.npy\r\ntrain_tokenizer_sample_2007.npy train_tokenizer_sample_3143.npy train_tokenizer_sample_427.npy train_tokenizer_sample_5415.npy train_tokenizer_sample_875.npy\r\ntrain_tokenizer_sample_2008.npy train_tokenizer_sample_3144.npy train_tokenizer_sample_4280.npy train_tokenizer_sample_5416.npy train_tokenizer_sample_876.npy\r\ntrain_tokenizer_sample_2009.npy train_tokenizer_sample_3145.npy train_tokenizer_sample_4281.npy train_tokenizer_sample_5417.npy train_tokenizer_sample_877.npy\r\ntrain_tokenizer_sample_200.npy train_tokenizer_sample_3146.npy train_tokenizer_sample_4282.npy train_tokenizer_sample_5418.npy train_tokenizer_sample_878.npy\r\ntrain_tokenizer_sample_2010.npy train_tokenizer_sample_3147.npy train_tokenizer_sample_4283.npy train_tokenizer_sample_5419.npy train_tokenizer_sample_879.npy\r\ntrain_tokenizer_sample_2011.npy train_tokenizer_sample_3148.npy train_tokenizer_sample_4284.npy train_tokenizer_sample_541.npy train_tokenizer_sample_87.npy\r\ntrain_tokenizer_sample_2012.npy train_tokenizer_sample_3149.npy train_tokenizer_sample_4285.npy train_tokenizer_sample_5420.npy train_tokenizer_sample_880.npy\r\ntrain_tokenizer_sample_2013.npy train_tokenizer_sample_314.npy train_tokenizer_sample_4286.npy train_tokenizer_sample_5421.npy train_tokenizer_sample_881.npy\r\ntrain_tokenizer_sample_2014.npy train_tokenizer_sample_3150.npy train_tokenizer_sample_4287.npy train_tokenizer_sample_5422.npy train_tokenizer_sample_882.npy\r\ntrain_tokenizer_sample_2015.npy train_tokenizer_sample_3151.npy train_tokenizer_sample_4288.npy train_tokenizer_sample_5423.npy train_tokenizer_sample_883.npy\r\ntrain_tokenizer_sample_2016.npy train_tokenizer_sample_3152.npy train_tokenizer_sample_4289.npy train_tokenizer_sample_5424.npy train_tokenizer_sample_884.npy\r\ntrain_tokenizer_sample_2017.npy train_tokenizer_sample_3153.npy train_tokenizer_sample_428.npy train_tokenizer_sample_5425.npy train_tokenizer_sample_885.npy\r\ntrain_tokenizer_sample_2018.npy train_tokenizer_sample_3154.npy train_tokenizer_sample_4290.npy train_tokenizer_sample_5426.npy train_tokenizer_sample_886.npy\r\ntrain_tokenizer_sample_2019.npy train_tokenizer_sample_3155.npy train_tokenizer_sample_4291.npy train_tokenizer_sample_5427.npy train_tokenizer_sample_887.npy\r\ntrain_tokenizer_sample_201.npy train_tokenizer_sample_3156.npy train_tokenizer_sample_4292.npy train_tokenizer_sample_5428.npy train_tokenizer_sample_888.npy\r\ntrain_tokenizer_sample_2020.npy train_tokenizer_sample_3157.npy train_tokenizer_sample_4293.npy train_tokenizer_sample_5429.npy train_tokenizer_sample_889.npy\r\ntrain_tokenizer_sample_2021.npy train_tokenizer_sample_3158.npy train_tokenizer_sample_4294.npy train_tokenizer_sample_542.npy train_tokenizer_sample_88.npy\r\ntrain_tokenizer_sample_2022.npy train_tokenizer_sample_3159.npy train_tokenizer_sample_4295.npy train_tokenizer_sample_5430.npy train_tokenizer_sample_890.npy\r\ntrain_tokenizer_sample_2023.npy train_tokenizer_sample_315.npy train_tokenizer_sample_4296.npy train_tokenizer_sample_5431.npy train_tokenizer_sample_891.npy\r\ntrain_tokenizer_sample_2024.npy train_tokenizer_sample_3160.npy train_tokenizer_sample_4297.npy train_tokenizer_sample_5432.npy train_tokenizer_sample_892.npy\r\ntrain_tokenizer_sample_2025.npy train_tokenizer_sample_3161.npy train_tokenizer_sample_4298.npy train_tokenizer_sample_5433.npy train_tokenizer_sample_893.npy\r\ntrain_tokenizer_sample_2026.npy train_tokenizer_sample_3162.npy train_tokenizer_sample_4299.npy train_tokenizer_sample_5434.npy train_tokenizer_sample_894.npy\r\ntrain_tokenizer_sample_2027.npy train_tokenizer_sample_3163.npy train_tokenizer_sample_429.npy train_tokenizer_sample_5435.npy train_tokenizer_sample_895.npy\r\ntrain_tokenizer_sample_2028.npy train_tokenizer_sample_3164.npy train_tokenizer_sample_42.npy train_tokenizer_sample_5436.npy train_tokenizer_sample_896.npy\r\ntrain_tokenizer_sample_2029.npy train_tokenizer_sample_3165.npy train_tokenizer_sample_4300.npy train_tokenizer_sample_5437.npy train_tokenizer_sample_897.npy\r\ntrain_tokenizer_sample_202.npy train_tokenizer_sample_3166.npy train_tokenizer_sample_4301.npy train_tokenizer_sample_5438.npy train_tokenizer_sample_898.npy\r\ntrain_tokenizer_sample_2030.npy train_tokenizer_sample_3167.npy train_tokenizer_sample_4302.npy train_tokenizer_sample_5439.npy train_tokenizer_sample_899.npy\r\ntrain_tokenizer_sample_2031.npy train_tokenizer_sample_3168.npy train_tokenizer_sample_4303.npy train_tokenizer_sample_543.npy train_tokenizer_sample_89.npy\r\ntrain_tokenizer_sample_2032.npy train_tokenizer_sample_3169.npy train_tokenizer_sample_4304.npy train_tokenizer_sample_5440.npy train_tokenizer_sample_8.npy\r\ntrain_tokenizer_sample_2033.npy train_tokenizer_sample_316.npy train_tokenizer_sample_4305.npy train_tokenizer_sample_5441.npy train_tokenizer_sample_900.npy\r\ntrain_tokenizer_sample_2034.npy train_tokenizer_sample_3170.npy train_tokenizer_sample_4306.npy train_tokenizer_sample_5442.npy train_tokenizer_sample_901.npy\r\ntrain_tokenizer_sample_2035.npy train_tokenizer_sample_3171.npy train_tokenizer_sample_4307.npy train_tokenizer_sample_5443.npy train_tokenizer_sample_902.npy\r\ntrain_tokenizer_sample_2036.npy train_tokenizer_sample_3172.npy train_tokenizer_sample_4308.npy train_tokenizer_sample_5444.npy train_tokenizer_sample_903.npy\r\ntrain_tokenizer_sample_2037.npy train_tokenizer_sample_3173.npy train_tokenizer_sample_4309.npy train_tokenizer_sample_5445.npy train_tokenizer_sample_904.npy\r\ntrain_tokenizer_sample_2038.npy train_tokenizer_sample_3174.npy train_tokenizer_sample_430.npy train_tokenizer_sample_5446.npy train_tokenizer_sample_905.npy\r\ntrain_tokenizer_sample_2039.npy train_tokenizer_sample_3175.npy train_tokenizer_sample_4310.npy train_tokenizer_sample_5447.npy train_tokenizer_sample_906.npy\r\ntrain_tokenizer_sample_203.npy train_tokenizer_sample_3176.npy train_tokenizer_sample_4311.npy train_tokenizer_sample_5448.npy train_tokenizer_sample_907.npy\r\ntrain_tokenizer_sample_2040.npy train_tokenizer_sample_3177.npy train_tokenizer_sample_4312.npy train_tokenizer_sample_5449.npy train_tokenizer_sample_908.npy\r\ntrain_tokenizer_sample_2041.npy train_tokenizer_sample_3178.npy train_tokenizer_sample_4313.npy train_tokenizer_sample_544.npy train_tokenizer_sample_909.npy\r\ntrain_tokenizer_sample_2042.npy train_tokenizer_sample_3179.npy train_tokenizer_sample_4314.npy train_tokenizer_sample_5450.npy train_tokenizer_sample_90.npy\r\ntrain_tokenizer_sample_2043.npy train_tokenizer_sample_317.npy train_tokenizer_sample_4315.npy train_tokenizer_sample_5451.npy train_tokenizer_sample_910.npy\r\ntrain_tokenizer_sample_2044.npy train_tokenizer_sample_3180.npy train_tokenizer_sample_4316.npy train_tokenizer_sample_5452.npy train_tokenizer_sample_911.npy\r\ntrain_tokenizer_sample_2045.npy train_tokenizer_sample_3181.npy train_tokenizer_sample_4317.npy train_tokenizer_sample_5453.npy train_tokenizer_sample_912.npy\r\ntrain_tokenizer_sample_2046.npy train_tokenizer_sample_3182.npy train_tokenizer_sample_4318.npy train_tokenizer_sample_5454.npy train_tokenizer_sample_913.npy\r\ntrain_tokenizer_sample_2047.npy train_tokenizer_sample_3183.npy train_tokenizer_sample_4319.npy train_tokenizer_sample_5455.npy train_tokenizer_sample_914.npy\r\ntrain_tokenizer_sample_2048.npy train_tokenizer_sample_3184.npy train_tokenizer_sample_431.npy train_tokenizer_sample_5456.npy train_tokenizer_sample_915.npy\r\ntrain_tokenizer_sample_2049.npy train_tokenizer_sample_3185.npy train_tokenizer_sample_4320.npy train_tokenizer_sample_5457.npy train_tokenizer_sample_916.npy\r\ntrain_tokenizer_sample_204.npy train_tokenizer_sample_3186.npy train_tokenizer_sample_4321.npy train_tokenizer_sample_5458.npy train_tokenizer_sample_917.npy\r\ntrain_tokenizer_sample_2050.npy train_tokenizer_sample_3187.npy train_tokenizer_sample_4322.npy train_tokenizer_sample_5459.npy train_tokenizer_sample_918.npy\r\ntrain_tokenizer_sample_2051.npy train_tokenizer_sample_3188.npy train_tokenizer_sample_4323.npy train_tokenizer_sample_545.npy train_tokenizer_sample_919.npy\r\ntrain_tokenizer_sample_2052.npy train_tokenizer_sample_3189.npy train_tokenizer_sample_4324.npy train_tokenizer_sample_5460.npy train_tokenizer_sample_91.npy\r\ntrain_tokenizer_sample_2053.npy train_tokenizer_sample_318.npy train_tokenizer_sample_4325.npy train_tokenizer_sample_5461.npy train_tokenizer_sample_920.npy\r\ntrain_tokenizer_sample_2054.npy train_tokenizer_sample_3190.npy train_tokenizer_sample_4326.npy train_tokenizer_sample_5462.npy train_tokenizer_sample_921.npy\r\ntrain_tokenizer_sample_2055.npy train_tokenizer_sample_3191.npy train_tokenizer_sample_4327.npy train_tokenizer_sample_5463.npy train_tokenizer_sample_922.npy\r\ntrain_tokenizer_sample_2056.npy train_tokenizer_sample_3192.npy train_tokenizer_sample_4328.npy train_tokenizer_sample_5464.npy train_tokenizer_sample_923.npy\r\ntrain_tokenizer_sample_2057.npy train_tokenizer_sample_3193.npy train_tokenizer_sample_4329.npy train_tokenizer_sample_5465.npy train_tokenizer_sample_924.npy\r\ntrain_tokenizer_sample_2058.npy train_tokenizer_sample_3194.npy train_tokenizer_sample_432.npy train_tokenizer_sample_5466.npy train_tokenizer_sample_925.npy\r\ntrain_tokenizer_sample_2059.npy train_tokenizer_sample_3195.npy train_tokenizer_sample_4330.npy train_tokenizer_sample_5467.npy train_tokenizer_sample_926.npy\r\ntrain_tokenizer_sample_205.npy train_tokenizer_sample_3196.npy train_tokenizer_sample_4331.npy train_tokenizer_sample_5468.npy train_tokenizer_sample_927.npy\r\ntrain_tokenizer_sample_2060.npy train_tokenizer_sample_3197.npy train_tokenizer_sample_4332.npy train_tokenizer_sample_5469.npy train_tokenizer_sample_928.npy\r\ntrain_tokenizer_sample_2061.npy train_tokenizer_sample_3198.npy train_tokenizer_sample_4333.npy train_tokenizer_sample_546.npy train_tokenizer_sample_929.npy\r\ntrain_tokenizer_sample_2062.npy train_tokenizer_sample_3199.npy train_tokenizer_sample_4334.npy train_tokenizer_sample_5470.npy train_tokenizer_sample_92.npy\r\ntrain_tokenizer_sample_2063.npy train_tokenizer_sample_319.npy train_tokenizer_sample_4335.npy train_tokenizer_sample_5471.npy train_tokenizer_sample_930.npy\r\ntrain_tokenizer_sample_2064.npy train_tokenizer_sample_31.npy train_tokenizer_sample_4336.npy train_tokenizer_sample_5472.npy train_tokenizer_sample_931.npy\r\ntrain_tokenizer_sample_2065.npy train_tokenizer_sample_3200.npy train_tokenizer_sample_4337.npy train_tokenizer_sample_5473.npy train_tokenizer_sample_932.npy\r\ntrain_tokenizer_sample_2066.npy train_tokenizer_sample_3201.npy train_tokenizer_sample_4338.npy train_tokenizer_sample_5474.npy train_tokenizer_sample_933.npy\r\ntrain_tokenizer_sample_2067.npy train_tokenizer_sample_3202.npy train_tokenizer_sample_4339.npy train_tokenizer_sample_5475.npy train_tokenizer_sample_934.npy\r\ntrain_tokenizer_sample_2068.npy train_tokenizer_sample_3203.npy train_tokenizer_sample_433.npy train_tokenizer_sample_5476.npy train_tokenizer_sample_935.npy\r\ntrain_tokenizer_sample_2069.npy train_tokenizer_sample_3204.npy train_tokenizer_sample_4340.npy train_tokenizer_sample_5477.npy train_tokenizer_sample_936.npy\r\ntrain_tokenizer_sample_206.npy train_tokenizer_sample_3205.npy train_tokenizer_sample_4341.npy train_tokenizer_sample_5478.npy train_tokenizer_sample_937.npy\r\ntrain_tokenizer_sample_2070.npy train_tokenizer_sample_3206.npy train_tokenizer_sample_4342.npy train_tokenizer_sample_5479.npy train_tokenizer_sample_938.npy\r\ntrain_tokenizer_sample_2071.npy train_tokenizer_sample_3207.npy train_tokenizer_sample_4343.npy train_tokenizer_sample_547.npy train_tokenizer_sample_939.npy\r\ntrain_tokenizer_sample_2072.npy train_tokenizer_sample_3208.npy train_tokenizer_sample_4344.npy train_tokenizer_sample_5480.npy train_tokenizer_sample_93.npy\r\ntrain_tokenizer_sample_2073.npy train_tokenizer_sample_3209.npy train_tokenizer_sample_4345.npy train_tokenizer_sample_5481.npy train_tokenizer_sample_940.npy\r\ntrain_tokenizer_sample_2074.npy train_tokenizer_sample_320.npy train_tokenizer_sample_4346.npy train_tokenizer_sample_5482.npy train_tokenizer_sample_941.npy\r\ntrain_tokenizer_sample_2075.npy train_tokenizer_sample_3210.npy train_tokenizer_sample_4347.npy train_tokenizer_sample_5483.npy train_tokenizer_sample_942.npy\r\ntrain_tokenizer_sample_2076.npy train_tokenizer_sample_3211.npy train_tokenizer_sample_4348.npy train_tokenizer_sample_5484.npy train_tokenizer_sample_943.npy\r\ntrain_tokenizer_sample_2077.npy train_tokenizer_sample_3212.npy train_tokenizer_sample_4349.npy train_tokenizer_sample_5485.npy train_tokenizer_sample_944.npy\r\ntrain_tokenizer_sample_2078.npy train_tokenizer_sample_3213.npy train_tokenizer_sample_434.npy train_tokenizer_sample_5486.npy train_tokenizer_sample_945.npy\r\ntrain_tokenizer_sample_2079.npy train_tokenizer_sample_3214.npy train_tokenizer_sample_4350.npy train_tokenizer_sample_5487.npy train_tokenizer_sample_946.npy\r\ntrain_tokenizer_sample_207.npy train_tokenizer_sample_3215.npy train_tokenizer_sample_4351.npy train_tokenizer_sample_5488.npy train_tokenizer_sample_947.npy\r\ntrain_tokenizer_sample_2080.npy train_tokenizer_sample_3216.npy train_tokenizer_sample_4352.npy train_tokenizer_sample_5489.npy train_tokenizer_sample_948.npy\r\ntrain_tokenizer_sample_2081.npy train_tokenizer_sample_3217.npy train_tokenizer_sample_4353.npy train_tokenizer_sample_548.npy train_tokenizer_sample_949.npy\r\ntrain_tokenizer_sample_2082.npy train_tokenizer_sample_3218.npy train_tokenizer_sample_4354.npy train_tokenizer_sample_5490.npy train_tokenizer_sample_94.npy\r\ntrain_tokenizer_sample_2083.npy train_tokenizer_sample_3219.npy train_tokenizer_sample_4355.npy train_tokenizer_sample_5491.npy train_tokenizer_sample_950.npy\r\ntrain_tokenizer_sample_2084.npy train_tokenizer_sample_321.npy train_tokenizer_sample_4356.npy train_tokenizer_sample_5492.npy train_tokenizer_sample_951.npy\r\ntrain_tokenizer_sample_2085.npy train_tokenizer_sample_3220.npy train_tokenizer_sample_4357.npy train_tokenizer_sample_5493.npy train_tokenizer_sample_952.npy\r\ntrain_tokenizer_sample_2086.npy train_tokenizer_sample_3221.npy train_tokenizer_sample_4358.npy train_tokenizer_sample_5494.npy train_tokenizer_sample_953.npy\r\ntrain_tokenizer_sample_2087.npy train_tokenizer_sample_3222.npy train_tokenizer_sample_4359.npy train_tokenizer_sample_5495.npy train_tokenizer_sample_954.npy\r\ntrain_tokenizer_sample_2088.npy train_tokenizer_sample_3223.npy train_tokenizer_sample_435.npy train_tokenizer_sample_5496.npy train_tokenizer_sample_955.npy\r\ntrain_tokenizer_sample_2089.npy train_tokenizer_sample_3224.npy train_tokenizer_sample_4360.npy train_tokenizer_sample_5497.npy train_tokenizer_sample_956.npy\r\ntrain_tokenizer_sample_208.npy train_tokenizer_sample_3225.npy train_tokenizer_sample_4361.npy train_tokenizer_sample_5498.npy train_tokenizer_sample_957.npy\r\ntrain_tokenizer_sample_2090.npy train_tokenizer_sample_3226.npy train_tokenizer_sample_4362.npy train_tokenizer_sample_5499.npy train_tokenizer_sample_958.npy\r\ntrain_tokenizer_sample_2091.npy train_tokenizer_sample_3227.npy train_tokenizer_sample_4363.npy train_tokenizer_sample_549.npy train_tokenizer_sample_959.npy\r\ntrain_tokenizer_sample_2092.npy train_tokenizer_sample_3228.npy train_tokenizer_sample_4364.npy train_tokenizer_sample_54.npy train_tokenizer_sample_95.npy\r\ntrain_tokenizer_sample_2093.npy train_tokenizer_sample_3229.npy train_tokenizer_sample_4365.npy train_tokenizer_sample_5500.npy train_tokenizer_sample_960.npy\r\ntrain_tokenizer_sample_2094.npy train_tokenizer_sample_322.npy train_tokenizer_sample_4366.npy train_tokenizer_sample_5501.npy train_tokenizer_sample_961.npy\r\ntrain_tokenizer_sample_2095.npy train_tokenizer_sample_3230.npy train_tokenizer_sample_4367.npy train_tokenizer_sample_5502.npy train_tokenizer_sample_962.npy\r\ntrain_tokenizer_sample_2096.npy train_tokenizer_sample_3231.npy train_tokenizer_sample_4368.npy train_tokenizer_sample_5503.npy train_tokenizer_sample_963.npy\r\ntrain_tokenizer_sample_2097.npy train_tokenizer_sample_3232.npy train_tokenizer_sample_4369.npy train_tokenizer_sample_5504.npy train_tokenizer_sample_964.npy\r\ntrain_tokenizer_sample_2098.npy train_tokenizer_sample_3233.npy train_tokenizer_sample_436.npy train_tokenizer_sample_5505.npy train_tokenizer_sample_965.npy\r\ntrain_tokenizer_sample_2099.npy train_tokenizer_sample_3234.npy train_tokenizer_sample_4370.npy train_tokenizer_sample_5506.npy train_tokenizer_sample_966.npy\r\ntrain_tokenizer_sample_209.npy train_tokenizer_sample_3235.npy train_tokenizer_sample_4371.npy train_tokenizer_sample_5507.npy train_tokenizer_sample_967.npy\r\ntrain_tokenizer_sample_20.npy train_tokenizer_sample_3236.npy train_tokenizer_sample_4372.npy train_tokenizer_sample_5508.npy train_tokenizer_sample_968.npy\r\ntrain_tokenizer_sample_2100.npy train_tokenizer_sample_3237.npy train_tokenizer_sample_4373.npy train_tokenizer_sample_5509.npy train_tokenizer_sample_969.npy\r\ntrain_tokenizer_sample_2101.npy train_tokenizer_sample_3238.npy train_tokenizer_sample_4374.npy train_tokenizer_sample_550.npy train_tokenizer_sample_96.npy\r\ntrain_tokenizer_sample_2102.npy train_tokenizer_sample_3239.npy train_tokenizer_sample_4375.npy train_tokenizer_sample_5510.npy train_tokenizer_sample_970.npy\r\ntrain_tokenizer_sample_2103.npy train_tokenizer_sample_323.npy train_tokenizer_sample_4376.npy train_tokenizer_sample_5511.npy train_tokenizer_sample_971.npy\r\n",,terminal_output +3312,2750759,"TERMINAL",0,0,"rm *.npy",,terminal_command +3313,2750808,"TERMINAL",0,0,"]633;E;2025-07-01 19:11:36 rm *.npy;ea2a9ac7-e5a0-488a-9623-9429487e0dac]633;C",,terminal_output +3314,2774715,"TERMINAL",0,0,"]0;tum_cte0515@hkn1993:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/overfit_dir/single_sample]633;D;0",,terminal_output +3315,2778143,"TERMINAL",0,0,"srun",,terminal_focus +3316,2778834,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +3317,2778970,"TERMINAL",0,0,"SLURM_STEP_NUM_TASKS=1\r\nSLURM_JOB_USER=tum_cte0515\r\nSLURM_TASKS_PER_NODE=1\r\nSLURM_JOB_UID=999226\r\nSLURM_TASK_PID=3337997\r\nSLURM_JOB_GPUS=1\r\nSLURM_LOCALID=0\r\nSLURM_SUBMIT_DIR=/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar\r\nSLURMD_NODENAME=hkn0706\r\nSLURM_JOB_START_TIME=1751388043\r\nSLURM_STEP_NODELIST=hkn0706\r\nSLURM_CLUSTER_NAME=hk\r\nSLURM_JOB_END_TIME=1751391643\r\nSLURM_PMI2_SRUN_PORT=43539\r\nSLURM_CPUS_ON_NODE=6\r\nSLURM_JOB_CPUS_PER_NODE=6\r\nSLURM_GPUS_ON_NODE=1\r\nSLURM_GTIDS=0\r\nSLURM_JOB_PARTITION=accelerated\r\nSLURM_TRES_PER_TASK=cpu=5\r\nSLURM_OOM_KILL_STEP=0\r\nSLURM_JOB_NUM_NODES=1\r\nSLURM_STEPID=4294967290\r\nSLURM_JOBID=3309821\r\nSLURM_PTY_PORT=45555\r\nSLURM_JOB_QOS=normal\r\nSLURM_LAUNCH_NODE_IPADDR=10.0.7.201\r\nSLURM_PTY_WIN_ROW=36\r\nSLURM_PMI2_PROC_MAPPING=(vector,(0,1,1))\r\nSLURMD_DEBUG=2\r\nSLURM_PROCID=0\r\nSLURM_CPUS_PER_TASK=5\r\nSLURM_NTASKS=1\r\nSLURM_TOPOLOGY_ADDR=hkibb.hkibbi1.hkibbi1e8.hkn0706\r\nSLURM_TOPOLOGY_ADDR_PATTERN=switch.switch.switch.node\r\nSLURM_SRUN_COMM_HOST=10.0.7.201\r\nSLURM_SCRIPT_CONTEXT=prolog_task\r\nSLURM_MEM_PER_NODE=51200\r\nSLURM_PTY_WIN_COL=175\r\nSLURM_NODELIST=hkn0706\r\nSLURM_SRUN_COMM_PORT=36793\r\nSLURM_STEP_ID=4294967290\r\nSLURM_JOB_ACCOUNT=hk-project-p0023960\r\nSLURM_PRIO_PROCESS=0\r\nSLURM_NPROCS=1\r\nSLURM_NNODES=1\r\nSLURM_SUBMIT_HOST=hkn1993.localdomain\r\nSLURM_JOB_ID=3309821\r\nSLURM_NODEID=0\r\nSLURM_STEP_NUM_NODES=1\r\nSLURM_STEP_TASKS_PER_NODE=1\r\nSLURM_MPI_TYPE=pmi2\r\nSLURM_PMI2_STEP_NODES=hkn0706\r\nSLURM_CONF=/etc/slurm/slurm.conf\r\nSLURM_JOB_NAME=interactive\r\nSLURM_NTASKS_PER_NODE=1\r\nSLURM_STEP_LAUNCHER_PORT=36793\r\nSLURM_JOB_GID=502226\r\nSLURM_JOB_NODELIST=hkn0706\r\n",,terminal_output +3318,2781611,"train_tokenizer.py",0,0,"",python,tab +3319,2785583,"train_tokenizer.py",7306,0,"",python,selection_mouse +3320,2785754,"train_tokenizer.py",7302,4,"exit",python,selection_mouse +3321,2786555,"train_tokenizer.py",7303,0,"",python,selection_mouse +3322,2787111,"TERMINAL",0,0,"2025-07-01 19:12:12.519821: E external/local_xla/xla/stream_executor/cuda/cuda_fft.cc:467] Unable to register cuFFT factory: Attempting to register factory for plugin cuFFT when one has already been registered\r\nWARNING: All log messages before absl::InitializeLog() is called are written to STDERR\r\nE0000 00:00:1751389932.533170 3355207 cuda_dnn.cc:8579] Unable to register cuDNN factory: Attempting to register factory for plugin cuDNN when one has already been registered\r\nE0000 00:00:1751389932.537896 3355207 cuda_blas.cc:1407] Unable to register cuBLAS factory: Attempting to register factory for plugin cuBLAS when one has already been registered\r\nW0000 00:00:1751389932.550932 3355207 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\nW0000 00:00:1751389932.550949 3355207 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\nW0000 00:00:1751389932.550951 3355207 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\nW0000 00:00:1751389932.550953 3355207 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\r\n",,terminal_output +3323,2787209,"train_tokenizer.py",7308,0,"",python,selection_mouse +3324,2791221,"TERMINAL",0,0,"bash",,terminal_focus +3325,2793131,"TERMINAL",0,0,"python",,terminal_command +3326,2793166,"TERMINAL",0,0,"]633;E;2025-07-01 19:12:18 python;ea2a9ac7-e5a0-488a-9623-9429487e0dac]633;C",,terminal_output +3327,2793329,"TERMINAL",0,0,"Python 3.10.18 (main, Jun 4 2025, 17:36:27) [Clang 20.1.4 ] on linux\r\nType ""help"", ""copyright"", ""credits"" or ""license"" for more information.\r\n",,terminal_output +3328,2793596,"TERMINAL",0,0,">>> ",,terminal_output +3329,2794084,"TERMINAL",0,0,"[?25le[?25h",,terminal_output +3330,2794319,"TERMINAL",0,0,"[?25lx[?25h[?25li[?25h",,terminal_output +3331,2794628,"TERMINAL",0,0,"[?25lt[?25h",,terminal_output +3332,2795318,"TERMINAL",0,0,"[?25l([?25h",,terminal_output +3333,2795372,"TERMINAL",0,0,"[?25l)[?25h",,terminal_output +3334,2795613,"TERMINAL",0,0,"\r\n]0;tum_cte0515@hkn1993:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/overfit_dir/single_sample]633;D;0",,terminal_output +3335,2796565,"TERMINAL",0,0,"W0000 00:00:1751389941.968666 3355207 gpu_device.cc:2341] Cannot dlopen some GPU libraries. Please make sure the missing libraries mentioned above are installed properly if you would like to use GPU. Follow the guide at https://www.tensorflow.org/install/gpu for how to download and setup the required libraries for your platform.\r\nSkipping registering GPU devices...\r\n",,terminal_output +3336,2796873,"TERMINAL",0,0,"Running on 1 devices.\r\n",,terminal_output +3337,2797676,"TERMINAL",0,0,"wandb: Currently logged in as: mihir-mahajan2002 (instant-uv) to https://api.wandb.ai. Use `wandb login --relogin` to force relogin\r\n",,terminal_output +3338,2798287,"TERMINAL",0,0,"wandb: Tracking run with wandb version 0.19.11\r\nwandb: Run data is saved locally in /hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/wandb/run-20250701_191223-rz4wjk4f\r\nwandb: Run `wandb offline` to turn off syncing.\r\nwandb: Syncing run tokenizer-tiny-overfit-0000\r\nwandb: ⭐️ View project at https://wandb.ai/instant-uv/jafar\r\nwandb: 🚀 View run at https://wandb.ai/instant-uv/jafar/runs/rz4wjk4f\r\n",,terminal_output +3339,2798682,"train_tokenizer.py",7933,0,"",python,selection_mouse +3340,2799945,"TERMINAL",0,0,"2025-07-01 19:12:25.353426: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3341,2800361,"TERMINAL",0,0,"srun",,terminal_focus +3342,2813670,"TERMINAL",0,0,"2025-07-01 19:12:39.011464: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3343,2815914,"TERMINAL",0,0,"2025-07-01 19:12:41.351858: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +3344,2819913,"TERMINAL",0,0,"Starting training from step 0...\r\n",,terminal_output +3345,2823144,"TERMINAL",0,0,"Step 1\r\nStep 2\r\n",,terminal_output +3346,2823362,"TERMINAL",0,0,"Step 3\r\n",,terminal_output +3347,2823420,"TERMINAL",0,0,"Step 4\r\nStep 5\r\nStep 6\r\n",,terminal_output +3348,2823493,"TERMINAL",0,0,"Step 7\r\nStep 8\r\n",,terminal_output +3349,2823845,"TERMINAL",0,0,"Step 9\r\n",,terminal_output +3350,2824022,"TERMINAL",0,0,"Step 10\r\n",,terminal_output +3351,2824196,"TERMINAL",0,0,"Step 11\r\n",,terminal_output +3352,2824326,"TERMINAL",0,0,"Step 12\r\nStep 13\r\nStep 14\r\nStep 15\r\nStep 16\r\nStep 17\r\nStep 18\r\nStep 19\r\n",,terminal_output +3353,2824672,"TERMINAL",0,0,"Step 20\r\nStep 21\r\nStep 22\r\nStep 23\r\nStep 24\r\nStep 25\r\nStep 26\r\nStep 27\r\nStep 28\r\nStep 29\r\nStep 30\r\nStep 31\r\nStep 32\r\nStep 33\r\nStep 34\r\nStep 35\r\nStep 36\r\nStep 37\r\nStep 38\r\nStep 39\r\nStep 40\r\nStep 41\r\nStep 42\r\nStep 43\r\nStep 44\r\nStep 45\r\nStep 46\r\nStep 47\r\nStep 48\r\nStep 49\r\nStep 50\r\nStep 51\r\nStep 52\r\nStep 53\r\nStep 54\r\nStep 55\r\nStep 56\r\nStep 57\r\nStep 58\r\nStep 59\r\nStep 60\r\nStep 61\r\nStep 62\r\nStep 63\r\nStep 64\r\nStep 65\r\nStep 66\r\nStep 67\r\nStep 68\r\nStep 69\r\nStep 70\r\nStep 71\r\nStep 72\r\nStep 73\r\n",,terminal_output +3354,2824739,"TERMINAL",0,0,"Step 74\r\nStep 75\r\nStep 76\r\nStep 77\r\n",,terminal_output +3355,2824809,"TERMINAL",0,0,"Step 78\r\nStep 79\r\nStep 80\r\nStep 81\r\nStep 82\r\nStep 83\r\nStep 84\r\nStep 85\r\nStep 86\r\n",,terminal_output +3356,2824929,"TERMINAL",0,0,"Step 87\r\nStep 88\r\nStep 89\r\nStep 90\r\nStep 91\r\nStep 92\r\n",,terminal_output +3357,2825037,"TERMINAL",0,0,"Step 93\r\nStep 94\r\nStep 95\r\n",,terminal_output +3358,2825098,"TERMINAL",0,0,"Step 96\r\n",,terminal_output +3359,2825161,"TERMINAL",0,0,"Step 97\r\nStep 98\r\nStep 99\r\n",,terminal_output +3360,2825269,"TERMINAL",0,0,"Step 100\r\nStep 101\r\nStep 102\r\nStep 103\r\nStep 104\r\nStep 105\r\nStep 106\r\nStep 107\r\n",,terminal_output +3361,2825345,"TERMINAL",0,0,"Step 108\r\nStep 109\r\nStep 110\r\n",,terminal_output +3362,2825426,"TERMINAL",0,0,"Step 111\r\nStep 112\r\nStep 113\r\n",,terminal_output +3363,2825482,"TERMINAL",0,0,"Step 114\r\nStep 115\r\n",,terminal_output +3364,2825535,"TERMINAL",0,0,"Step 116\r\n",,terminal_output +3365,2825643,"TERMINAL",0,0,"Step 117\r\nStep 118\r\nStep 119\r\nStep 120\r\nStep 121\r\nStep 122\r\n",,terminal_output +3366,2825750,"TERMINAL",0,0,"Step 123\r\nStep 124\r\nStep 125\r\n",,terminal_output +3367,2825814,"TERMINAL",0,0,"Step 126\r\nStep 127\r\nStep 128\r\n",,terminal_output +3368,2825923,"TERMINAL",0,0,"Step 129\r\nStep 130\r\nStep 131\r\nStep 132\r\nStep 133\r\nStep 134\r\nStep 135\r\nStep 136\r\n",,terminal_output +3369,2826084,"TERMINAL",0,0,"Step 137\r\nStep 138\r\nStep 139\r\nStep 140\r\nStep 141\r\nStep 142\r\nStep 143\r\nStep 144\r\nStep 145\r\nStep 146\r\n",,terminal_output +3370,2826255,"TERMINAL",0,0,"Step 147\r\nStep 148\r\nStep 149\r\nStep 150\r\nStep 151\r\nStep 152\r\nStep 153\r\n",,terminal_output +3371,2826315,"TERMINAL",0,0,"Step 154\r\n",,terminal_output +3372,2826411,"TERMINAL",0,0,"Step 155\r\nStep 156\r\nStep 157\r\nStep 158\r\nStep 159\r\nStep 160\r\nStep 161\r\n",,terminal_output +3373,2826520,"TERMINAL",0,0,"Step 162\r\nStep 163\r\nStep 164\r\nStep 165\r\nStep 166\r\n",,terminal_output +3374,2826582,"TERMINAL",0,0,"Step 167\r\n",,terminal_output +3375,2826656,"TERMINAL",0,0,"Step 168\r\nStep 169\r\nStep 170\r\n",,terminal_output +3376,2826782,"TERMINAL",0,0,"Step 171\r\nStep 172\r\nStep 173\r\nStep 174\r\nStep 175\r\nStep 176\r\n",,terminal_output +3377,2826897,"TERMINAL",0,0,"Step 177\r\nStep 178\r\nStep 179\r\nStep 180\r\n",,terminal_output +3378,2826958,"TERMINAL",0,0,"Step 181\r\nStep 182\r\nStep 183\r\nStep 184\r\n",,terminal_output +3379,2827108,"TERMINAL",0,0,"Step 185\r\nStep 186\r\nStep 187\r\nStep 188\r\nStep 189\r\nStep 190\r\nStep 191\r\nStep 192\r\nStep 193\r\nStep 194\r\n",,terminal_output +3380,2827181,"TERMINAL",0,0,"Step 195\r\nStep 196\r\n",,terminal_output +3381,2827274,"TERMINAL",0,0,"Step 197\r\n",,terminal_output +3382,2827336,"TERMINAL",0,0,"Step 198\r\nStep 199\r\nStep 200\r\nStep 201\r\n",,terminal_output +3383,2827401,"TERMINAL",0,0,"Step 202\r\nStep 203\r\nStep 204\r\n",,terminal_output +3384,2827491,"TERMINAL",0,0,"Step 205\r\nStep 206\r\nStep 207\r\nStep 208\r\nStep 209\r\nStep 210\r\nStep 211\r\n",,terminal_output +3385,2827544,"TERMINAL",0,0,"Step 212\r\n",,terminal_output +3386,2827622,"TERMINAL",0,0,"Step 213\r\nStep 214\r\nStep 215\r\nStep 216\r\nStep 217\r\nStep 218\r\n",,terminal_output +3387,2827749,"TERMINAL",0,0,"Step 219\r\nStep 220\r\nStep 221\r\n",,terminal_output +3388,2827809,"TERMINAL",0,0,"Step 222\r\nStep 223\r\nStep 224\r\nStep 225\r\n",,terminal_output +3389,2827939,"TERMINAL",0,0,"Step 226\r\nStep 227\r\nStep 228\r\nStep 229\r\nStep 230\r\nStep 231\r\n",,terminal_output +3390,2828000,"TERMINAL",0,0,"Step 232\r\nStep 233\r\nStep 234\r\n",,terminal_output +3391,2828065,"TERMINAL",0,0,"Step 235\r\n",,terminal_output +3392,2828127,"TERMINAL",0,0,"Step 236\r\nStep 237\r\nStep 238\r\nStep 239\r\n",,terminal_output +3393,2828189,"TERMINAL",0,0,"Step 240\r\nStep 241\r\nStep 242\r\nStep 243\r\nStep 244\r\nStep 245\r\nStep 246\r\n",,terminal_output +3394,2828255,"TERMINAL",0,0,"Step 247\r\nStep 248\r\n",,terminal_output +3395,2828319,"TERMINAL",0,0,"Step 249\r\nStep 250\r\nStep 251\r\nStep 252\r\n",,terminal_output +3396,2828426,"TERMINAL",0,0,"Step 253\r\nStep 254\r\nStep 255\r\nStep 256\r\nStep 257\r\n",,terminal_output +3397,2828544,"TERMINAL",0,0,"Step 258\r\nStep 259\r\nStep 260\r\nStep 261\r\nStep 262\r\n",,terminal_output +3398,2828665,"TERMINAL",0,0,"Step 263\r\nStep 264\r\nStep 265\r\nStep 266\r\nStep 267\r\nStep 268\r\nStep 269\r\n",,terminal_output +3399,2828730,"TERMINAL",0,0,"Step 270\r\nStep 271\r\nStep 272\r\nStep 273\r\n",,terminal_output +3400,2828885,"TERMINAL",0,0,"Step 274\r\nStep 275\r\nStep 276\r\nStep 277\r\n",,terminal_output +3401,2829012,"TERMINAL",0,0,"Step 278\r\nStep 279\r\nStep 280\r\nStep 281\r\nStep 282\r\n",,terminal_output +3402,2829075,"TERMINAL",0,0,"Step 283\r\nStep 284\r\n",,terminal_output +3403,2829230,"TERMINAL",0,0,"Step 285\r\nStep 286\r\n",,terminal_output +3404,2829343,"TERMINAL",0,0,"Step 287\r\nStep 288\r\nStep 289\r\nStep 290\r\nStep 291\r\nStep 292\r\nStep 293\r\n",,terminal_output +3405,2829403,"TERMINAL",0,0,"Step 294\r\nStep 295\r\nStep 296\r\n",,terminal_output +3406,2829466,"TERMINAL",0,0,"Step 297\r\nStep 298\r\n",,terminal_output +3407,2829592,"TERMINAL",0,0,"Step 299\r\nStep 300\r\nStep 301\r\nStep 302\r\nStep 303\r\nStep 304\r\nStep 305\r\n",,terminal_output +3408,2829697,"TERMINAL",0,0,"Step 306\r\nStep 307\r\nStep 308\r\nStep 309\r\nStep 310\r\nStep 311\r\nStep 312\r\nStep 313\r\nStep 314\r\n",,terminal_output +3409,2829775,"TERMINAL",0,0,"Step 315\r\nStep 316\r\nStep 317\r\nStep 318\r\n",,terminal_output +3410,2829837,"TERMINAL",0,0,"Step 319\r\nStep 320\r\nStep 321\r\nStep 322\r\nStep 323\r\n",,terminal_output +3411,2829906,"TERMINAL",0,0,"Step 324\r\nStep 325\r\n",,terminal_output +3412,2829969,"TERMINAL",0,0,"Step 326\r\nStep 327\r\n",,terminal_output +3413,2830075,"TERMINAL",0,0,"Step 328\r\nStep 329\r\nStep 330\r\nStep 331\r\n",,terminal_output +3414,2830151,"TERMINAL",0,0,"Step 332\r\nStep 333\r\nStep 334\r\nStep 335\r\nStep 336\r\n",,terminal_output +3415,2830366,"TERMINAL",0,0,"Step 337\r\nStep 338\r\nStep 339\r\nStep 340\r\nStep 341\r\nStep 342\r\nStep 343\r\nStep 344\r\nStep 345\r\nStep 346\r\nStep 347\r\nStep 348\r\nStep 349\r\nStep 350\r\n",,terminal_output +3416,2830511,"TERMINAL",0,0,"Step 351\r\nStep 352\r\nStep 353\r\nStep 354\r\nStep 355\r\nStep 356\r\nStep 357\r\nStep 358\r\nStep 359\r\n",,terminal_output +3417,2830563,"TERMINAL",0,0,"Step 360\r\n",,terminal_output +3418,2830678,"TERMINAL",0,0,"Step 361\r\nStep 362\r\nStep 363\r\n",,terminal_output +3419,2830789,"TERMINAL",0,0,"Step 364\r\nStep 365\r\nStep 366\r\nStep 367\r\nStep 368\r\nStep 369\r\nStep 370\r\nStep 371\r\n",,terminal_output +3420,2830846,"TERMINAL",0,0,"Step 372\r\nStep 373\r\n",,terminal_output +3421,2830906,"TERMINAL",0,0,"Step 374\r\nStep 375\r\n",,terminal_output +3422,2831043,"TERMINAL",0,0,"Step 376\r\nStep 377\r\nStep 378\r\nStep 379\r\nStep 380\r\nStep 381\r\nStep 382\r\n",,terminal_output +3423,2831103,"TERMINAL",0,0,"Step 383\r\nStep 384\r\nStep 385\r\nStep 386\r\n",,terminal_output +3424,2831211,"TERMINAL",0,0,"Step 387\r\nStep 388\r\nStep 389\r\nStep 390\r\nStep 391\r\nStep 392\r\n",,terminal_output +3425,2831307,"TERMINAL",0,0,"Step 393\r\nStep 394\r\n",,terminal_output +3426,2831362,"TERMINAL",0,0,"Step 395\r\nStep 396\r\nStep 397\r\nStep 398\r\nStep 399\r\n",,terminal_output +3427,2831470,"TERMINAL",0,0,"Step 400\r\nStep 401\r\nStep 402\r\nStep 403\r\n",,terminal_output +3428,2831539,"TERMINAL",0,0,"Step 404\r\nStep 405\r\nStep 406\r\nStep 407\r\n",,terminal_output +3429,2831682,"TERMINAL",0,0,"Step 408\r\nStep 409\r\nStep 410\r\nStep 411\r\nStep 412\r\nStep 413\r\nStep 414\r\n",,terminal_output +3430,2831746,"TERMINAL",0,0,"Step 415\r\nStep 416\r\n",,terminal_output +3431,2831805,"TERMINAL",0,0,"Step 417\r\nStep 418\r\n",,terminal_output +3432,2831936,"TERMINAL",0,0,"Step 419\r\nStep 420\r\nStep 421\r\nStep 422\r\nStep 423\r\nStep 424\r\nStep 425\r\n",,terminal_output +3433,2831988,"TERMINAL",0,0,"Step 426\r\n",,terminal_output +3434,2832065,"TERMINAL",0,0,"Step 427\r\nStep 428\r\nStep 429\r\nStep 430\r\n",,terminal_output +3435,2832130,"TERMINAL",0,0,"Step 431\r\n",,terminal_output +3436,2832238,"TERMINAL",0,0,"Step 432\r\nStep 433\r\nStep 434\r\nStep 435\r\nStep 436\r\nStep 437\r\nStep 438\r\nStep 439\r\nStep 440\r\nStep 441\r\nStep 442\r\nStep 443\r\nStep 444\r\nStep 445\r\n",,terminal_output +3437,2832460,"TERMINAL",0,0,"Step 446\r\nStep 447\r\nStep 448\r\nStep 449\r\nStep 450\r\nStep 451\r\nStep 452\r\nStep 453\r\nStep 454\r\nStep 455\r\nStep 456\r\nStep 457\r\nStep 458\r\nStep 459\r\nStep 460\r\nStep 461\r\nStep 462\r\nStep 463\r\nStep 464\r\n",,terminal_output +3438,2832616,"TERMINAL",0,0,"Step 465\r\nStep 466\r\nStep 467\r\nStep 468\r\n",,terminal_output +3439,2832681,"TERMINAL",0,0,"Step 469\r\n",,terminal_output +3440,2832881,"TERMINAL",0,0,"Step 470\r\nStep 471\r\nStep 472\r\nStep 473\r\nStep 474\r\nStep 475\r\nStep 476\r\nStep 477\r\nStep 478\r\n",,terminal_output +3441,2833127,"TERMINAL",0,0,"Step 479\r\nStep 480\r\nStep 481\r\nStep 482\r\nStep 483\r\nStep 484\r\nStep 485\r\nStep 486\r\nStep 487\r\nStep 488\r\nStep 489\r\nStep 490\r\nStep 491\r\nStep 492\r\nStep 493\r\nStep 494\r\nStep 495\r\nStep 496\r\nStep 497\r\nStep 498\r\nStep 499\r\nStep 500\r\nStep 501\r\nStep 502\r\nStep 503\r\nStep 504\r\nStep 505\r\nStep 506\r\nStep 507\r\nStep 508\r\nStep 509\r\nStep 510\r\nStep 511\r\nStep 512\r\n",,terminal_output +3442,2833243,"TERMINAL",0,0,"Step 513\r\n",,terminal_output +3443,2833311,"TERMINAL",0,0,"Step 514\r\n",,terminal_output +3444,2833381,"TERMINAL",0,0,"Step 515\r\nStep 516\r\n",,terminal_output +3445,2833579,"TERMINAL",0,0,"Step 517\r\nStep 518\r\nStep 519\r\nStep 520\r\nStep 521\r\nStep 522\r\nStep 523\r\nStep 524\r\nStep 525\r\nStep 526\r\nStep 527\r\nStep 528\r\nStep 529\r\n",,terminal_output +3446,2833644,"TERMINAL",0,0,"Step 530\r\nStep 531\r\nStep 532\r\nStep 533\r\nStep 534\r\nStep 535\r\nStep 536\r\nStep 537\r\nStep 538\r\nStep 539\r\n",,terminal_output +3447,2833776,"TERMINAL",0,0,"Step 540\r\nStep 541\r\nStep 542\r\nStep 543\r\n",,terminal_output +3448,2833829,"TERMINAL",0,0,"Step 544\r\n",,terminal_output +3449,2833918,"TERMINAL",0,0,"Step 545\r\nStep 546\r\nStep 547\r\nStep 548\r\n",,terminal_output +3450,2833971,"TERMINAL",0,0,"Step 549\r\n",,terminal_output +3451,2834025,"TERMINAL",0,0,"Step 550\r\n",,terminal_output +3452,2834122,"TERMINAL",0,0,"Step 551\r\n",,terminal_output +3453,2834186,"TERMINAL",0,0,"Step 552\r\nStep 553\r\nStep 554\r\nStep 555\r\n",,terminal_output +3454,2834249,"TERMINAL",0,0,"Step 556\r\n",,terminal_output +3455,2834439,"TERMINAL",0,0,"Step 557\r\nStep 558\r\n",,terminal_output +3456,2834548,"TERMINAL",0,0,"Step 559\r\nStep 560\r\nStep 561\r\nStep 562\r\nStep 563\r\nStep 564\r\n",,terminal_output +3457,2834646,"TERMINAL",0,0,"Step 565\r\n",,terminal_output +3458,2834780,"TERMINAL",0,0,"Step 566\r\nStep 567\r\nStep 568\r\nStep 569\r\n",,terminal_output +3459,2834912,"TERMINAL",0,0,"Step 570\r\n",,terminal_output +3460,2835014,"TERMINAL",0,0,"Step 571\r\nStep 572\r\nStep 573\r\nStep 574\r\n",,terminal_output +3461,2835134,"TERMINAL",0,0,"Step 575\r\nStep 576\r\nStep 577\r\nStep 578\r\nStep 579\r\nStep 580\r\n",,terminal_output +3462,2835190,"TERMINAL",0,0,"Step 581\r\n",,terminal_output +3463,2835243,"TERMINAL",0,0,"Step 582\r\n",,terminal_output +3464,2835579,"TERMINAL",0,0,"Step 583\r\nStep 584\r\nStep 585\r\nStep 586\r\nStep 587\r\nStep 588\r\nStep 589\r\nStep 590\r\nStep 591\r\nStep 592\r\nStep 593\r\nStep 594\r\nStep 595\r\n",,terminal_output +3465,2835703,"TERMINAL",0,0,"Step 596\r\nStep 597\r\n",,terminal_output +3466,2835814,"TERMINAL",0,0,"Step 598\r\nStep 599\r\nStep 600\r\nStep 601\r\nStep 602\r\nStep 603\r\nStep 604\r\n",,terminal_output +3467,2835895,"TERMINAL",0,0,"Step 605\r\nStep 606\r\nStep 607\r\nStep 608\r\n",,terminal_output +3468,2836059,"TERMINAL",0,0,"Step 609\r\nStep 610\r\nStep 611\r\nStep 612\r\nStep 613\r\nStep 614\r\nStep 615\r\nStep 616\r\n",,terminal_output +3469,2836119,"TERMINAL",0,0,"Step 617\r\nStep 618\r\n",,terminal_output +3470,2836228,"TERMINAL",0,0,"Step 619\r\nStep 620\r\nStep 621\r\nStep 622\r\nStep 623\r\n",,terminal_output +3471,2836359,"TERMINAL",0,0,"Step 624\r\nStep 625\r\nStep 626\r\nStep 627\r\nStep 628\r\nStep 629\r\nStep 630\r\nStep 631\r\nStep 632\r\n",,terminal_output +3472,2836490,"TERMINAL",0,0,"Step 633\r\nStep 634\r\nStep 635\r\nStep 636\r\n",,terminal_output +3473,2836593,"TERMINAL",0,0,"Step 637\r\nStep 638\r\nStep 639\r\nStep 640\r\n",,terminal_output +3474,2836656,"TERMINAL",0,0,"Step 641\r\nStep 642\r\n",,terminal_output +3475,2836756,"TERMINAL",0,0,"Step 643\r\nStep 644\r\nStep 645\r\nStep 646\r\nStep 647\r\n",,terminal_output +3476,2836929,"TERMINAL",0,0,"Step 648\r\nStep 649\r\nStep 650\r\nStep 651\r\nStep 652\r\nStep 653\r\nStep 654\r\nStep 655\r\nStep 656\r\n",,terminal_output +3477,2837072,"TERMINAL",0,0,"Step 657\r\nStep 658\r\nStep 659\r\nStep 660\r\nStep 661\r\nStep 662\r\nStep 663\r\n",,terminal_output +3478,2837206,"TERMINAL",0,0,"Step 664\r\nStep 665\r\nStep 666\r\n",,terminal_output +3479,2837270,"TERMINAL",0,0,"Step 667\r\nStep 668\r\nStep 669\r\n",,terminal_output +3480,2837379,"TERMINAL",0,0,"Step 670\r\nStep 671\r\nStep 672\r\nStep 673\r\n",,terminal_output +3481,2837439,"TERMINAL",0,0,"Step 674\r\nStep 675\r\nStep 676\r\n",,terminal_output +3482,2837582,"TERMINAL",0,0,"Step 677\r\nStep 678\r\nStep 679\r\nStep 680\r\nStep 681\r\nStep 682\r\nStep 683\r\nStep 684\r\n",,terminal_output +3483,2837649,"TERMINAL",0,0,"Step 685\r\nStep 686\r\n",,terminal_output +3484,2837709,"TERMINAL",0,0,"Step 687\r\nStep 688\r\nStep 689\r\n",,terminal_output +3485,2837818,"TERMINAL",0,0,"Step 690\r\nStep 691\r\nStep 692\r\nStep 693\r\n",,terminal_output +3486,2837880,"TERMINAL",0,0,"Step 694\r\nStep 695\r\nStep 696\r\n",,terminal_output +3487,2837951,"TERMINAL",0,0,"Step 697\r\nStep 698\r\n",,terminal_output +3488,2838039,"TERMINAL",0,0,"Step 699\r\nStep 700\r\nStep 701\r\nStep 702\r\n",,terminal_output +3489,2838120,"TERMINAL",0,0,"Step 703\r\nStep 704\r\nStep 705\r\nStep 706\r\nStep 707\r\nStep 708\r\nStep 709\r\n",,terminal_output +3490,2838253,"TERMINAL",0,0,"Step 710\r\nStep 711\r\nStep 712\r\n",,terminal_output +3491,2838306,"TERMINAL",0,0,"Step 713\r\nStep 714\r\nStep 715\r\nStep 716\r\nStep 717\r\n",,terminal_output +3492,2838520,"TERMINAL",0,0,"Step 718\r\nStep 719\r\nStep 720\r\nStep 721\r\nStep 722\r\nStep 723\r\nStep 724\r\nStep 725\r\nStep 726\r\n",,terminal_output +3493,2838633,"TERMINAL",0,0,"Step 727\r\nStep 728\r\nStep 729\r\n",,terminal_output +3494,2838702,"TERMINAL",0,0,"Step 730\r\nStep 731\r\nStep 732\r\nStep 733\r\nStep 734\r\n",,terminal_output +3495,2838878,"TERMINAL",0,0,"Step 735\r\nStep 736\r\nStep 737\r\nStep 738\r\nStep 739\r\nStep 740\r\nStep 741\r\nStep 742\r\nStep 743\r\nStep 744\r\nStep 745\r\n",,terminal_output +3496,2839016,"TERMINAL",0,0,"Step 746\r\nStep 747\r\nStep 748\r\nStep 749\r\nStep 750\r\nStep 751\r\nStep 752\r\n",,terminal_output +3497,2839079,"TERMINAL",0,0,"Step 753\r\nStep 754\r\nStep 755\r\n",,terminal_output +3498,2839245,"TERMINAL",0,0,"Step 756\r\nStep 757\r\nStep 758\r\nStep 759\r\nStep 760\r\nStep 761\r\nStep 762\r\n",,terminal_output +3499,2839313,"TERMINAL",0,0,"Step 763\r\nStep 764\r\nStep 765\r\n",,terminal_output +3500,2839369,"TERMINAL",0,0,"Step 766\r\nStep 767\r\nStep 768\r\nStep 769\r\nStep 770\r\n",,terminal_output +3501,2839478,"TERMINAL",0,0,"Step 771\r\nStep 772\r\nStep 773\r\nStep 774\r\nStep 775\r\n",,terminal_output +3502,2839667,"TERMINAL",0,0,"Step 776\r\nStep 777\r\nStep 778\r\nStep 779\r\nStep 780\r\nStep 781\r\n",,terminal_output +3503,2839757,"TERMINAL",0,0,"Step 782\r\nStep 783\r\nStep 784\r\nStep 785\r\nStep 786\r\nStep 787\r\nStep 788\r\nStep 789\r\n",,terminal_output +3504,2839822,"TERMINAL",0,0,"Step 790\r\n",,terminal_output +3505,2839947,"TERMINAL",0,0,"Step 791\r\nStep 792\r\nStep 793\r\nStep 794\r\nStep 795\r\nStep 796\r\n",,terminal_output +3506,2840050,"TERMINAL",0,0,"Step 797\r\nStep 798\r\nStep 799\r\n",,terminal_output +3507,2840107,"TERMINAL",0,0,"Step 800\r\nStep 801\r\nStep 802\r\nStep 803\r\n",,terminal_output +3508,2840171,"TERMINAL",0,0,"Step 804\r\nStep 805\r\nStep 806\r\n",,terminal_output +3509,2840235,"TERMINAL",0,0,"Step 807\r\n",,terminal_output +3510,2840298,"TERMINAL",0,0,"Step 808\r\nStep 809\r\nStep 810\r\n",,terminal_output +3511,2840417,"TERMINAL",0,0,"Step 811\r\nStep 812\r\nStep 813\r\nStep 814\r\nStep 815\r\nStep 816\r\nStep 817\r\nStep 818\r\nStep 819\r\n",,terminal_output +3512,2840592,"TERMINAL",0,0,"Step 820\r\nStep 821\r\nStep 822\r\nStep 823\r\nStep 824\r\nStep 825\r\nStep 826\r\n",,terminal_output +3513,2840700,"TERMINAL",0,0,"Step 827\r\nStep 828\r\nStep 829\r\nStep 830\r\nStep 831\r\nStep 832\r\n",,terminal_output +3514,2840810,"TERMINAL",0,0,"Step 833\r\nStep 834\r\nStep 835\r\nStep 836\r\nStep 837\r\n",,terminal_output +3515,2840874,"TERMINAL",0,0,"Step 838\r\n",,terminal_output +3516,2840938,"TERMINAL",0,0,"Step 839\r\nStep 840\r\nStep 841\r\nStep 842\r\nStep 843\r\nStep 844\r\n",,terminal_output +3517,2841021,"TERMINAL",0,0,"Step 845\r\nStep 846\r\nStep 847\r\nStep 848\r\nStep 849\r\n",,terminal_output +3518,2841118,"TERMINAL",0,0,"Step 850\r\n",,terminal_output +3519,2841177,"TERMINAL",0,0,"Step 851\r\nStep 852\r\nStep 853\r\nStep 854\r\n",,terminal_output +3520,2841245,"TERMINAL",0,0,"Step 855\r\nStep 856\r\n",,terminal_output +3521,2841362,"TERMINAL",0,0,"Step 857\r\nStep 858\r\nStep 859\r\nStep 860\r\nStep 861\r\nStep 862\r\n",,terminal_output +3522,2841475,"TERMINAL",0,0,"Step 863\r\nStep 864\r\nStep 865\r\nStep 866\r\nStep 867\r\nStep 868\r\n",,terminal_output +3523,2841540,"TERMINAL",0,0,"Step 869\r\nStep 870\r\nStep 871\r\nStep 872\r\n",,terminal_output +3524,2841614,"TERMINAL",0,0,"Step 873\r\nStep 874\r\nStep 875\r\nStep 876\r\n",,terminal_output +3525,2841665,"TERMINAL",0,0,"Step 877\r\n",,terminal_output +3526,2841772,"TERMINAL",0,0,"Step 878\r\nStep 879\r\nStep 880\r\nStep 881\r\n",,terminal_output +3527,2841860,"TERMINAL",0,0,"Step 882\r\nStep 883\r\nStep 884\r\n",,terminal_output +3528,2841983,"TERMINAL",0,0,"Step 885\r\nStep 886\r\nStep 887\r\nStep 888\r\nStep 889\r\nStep 890\r\nStep 891\r\n",,terminal_output +3529,2842058,"TERMINAL",0,0,"Step 892\r\nStep 893\r\nStep 894\r\nStep 895\r\nStep 896\r\nStep 897\r\n",,terminal_output +3530,2842196,"TERMINAL",0,0,"Step 898\r\nStep 899\r\nStep 900\r\nStep 901\r\nStep 902\r\nStep 903\r\nStep 904\r\nStep 905\r\nStep 906\r\nStep 907\r\n",,terminal_output +3531,2842366,"TERMINAL",0,0,"Step 908\r\nStep 909\r\nStep 910\r\nStep 911\r\nStep 912\r\nStep 913\r\nStep 914\r\nStep 915\r\nStep 916\r\nStep 917\r\nStep 918\r\nStep 919\r\nStep 920\r\nStep 921\r\nStep 922\r\nStep 923\r\nStep 924\r\nStep 925\r\n",,terminal_output +3532,2842488,"TERMINAL",0,0,"Step 926\r\nStep 927\r\nStep 928\r\n",,terminal_output +3533,2842557,"TERMINAL",0,0,"Step 929\r\nStep 930\r\nStep 931\r\n",,terminal_output +3534,2842773,"TERMINAL",0,0,"Step 932\r\nStep 933\r\nStep 934\r\nStep 935\r\nStep 936\r\nStep 937\r\nStep 938\r\nStep 939\r\nStep 940\r\nStep 941\r\nStep 942\r\nStep 943\r\nStep 944\r\nStep 945\r\nStep 946\r\nStep 947\r\nStep 948\r\nStep 949\r\n",,terminal_output +3535,2842958,"TERMINAL",0,0,"Step 950\r\nStep 951\r\nStep 952\r\nStep 953\r\nStep 954\r\nStep 955\r\nStep 956\r\nStep 957\r\n",,terminal_output +3536,2843448,"TERMINAL",0,0,"Step 958\r\nStep 959\r\nStep 960\r\nStep 961\r\nStep 962\r\nStep 963\r\nStep 964\r\nStep 965\r\nStep 966\r\nStep 967\r\nStep 968\r\nStep 969\r\nStep 970\r\nStep 971\r\nStep 972\r\nStep 973\r\nStep 974\r\nStep 975\r\nStep 976\r\nStep 977\r\nStep 978\r\nStep 979\r\nStep 980\r\nStep 981\r\nStep 982\r\nStep 983\r\nStep 984\r\nStep 985\r\nStep 986\r\nStep 987\r\nStep 988\r\nStep 989\r\nStep 990\r\nStep 991\r\nStep 992\r\nStep 993\r\nStep 994\r\nStep 995\r\n",,terminal_output +3537,2843512,"TERMINAL",0,0,"Step 996\r\nStep 997\r\n",,terminal_output +3538,2843692,"TERMINAL",0,0,"Step 998\r\nStep 999\r\n",,terminal_output +3539,2843819,"TERMINAL",0,0,"Step 1000\r\nStep 1001\r\nStep 1002\r\nStep 1003\r\nStep 1004\r\nStep 1005\r\n",,terminal_output +3540,2844003,"TERMINAL",0,0,"Step 1006\r\nStep 1007\r\n",,terminal_output +3541,2844141,"TERMINAL",0,0,"Step 1008\r\nStep 1009\r\nStep 1010\r\nStep 1011\r\nStep 1012\r\n",,terminal_output +3542,2844212,"TERMINAL",0,0,"Step 1013\r\n",,terminal_output +3543,2844538,"TERMINAL",0,0,"Step 1014\r\nStep 1015\r\nStep 1016\r\nStep 1017\r\nStep 1018\r\nStep 1019\r\nStep 1020\r\n",,terminal_output +3544,2844599,"TERMINAL",0,0,"Step 1021\r\nStep 1022\r\nStep 1023\r\n",,terminal_output +3545,2844661,"TERMINAL",0,0,"Step 1024\r\n",,terminal_output +3546,2844772,"TERMINAL",0,0,"Step 1025\r\nStep 1026\r\nStep 1027\r\nStep 1028\r\nStep 1029\r\nStep 1030\r\n",,terminal_output +3547,2844837,"TERMINAL",0,0,"Step 1031\r\nStep 1032\r\n",,terminal_output +3548,2844945,"TERMINAL",0,0,"Step 1033\r\nStep 1034\r\nStep 1035\r\nStep 1036\r\n",,terminal_output +3549,2845013,"TERMINAL",0,0,"Step 1037\r\nStep 1038\r\nStep 1039\r\n",,terminal_output +3550,2845121,"TERMINAL",0,0,"Step 1040\r\nStep 1041\r\nStep 1042\r\nStep 1043\r\n",,terminal_output +3551,2845185,"TERMINAL",0,0,"Step 1044\r\nStep 1045\r\n",,terminal_output +3552,2845256,"TERMINAL",0,0,"Step 1046\r\nStep 1047\r\nStep 1048\r\nStep 1049\r\n",,terminal_output +3553,2845312,"TERMINAL",0,0,"Step 1050\r\nStep 1051\r\n",,terminal_output +3554,2845392,"TERMINAL",0,0,"Step 1052\r\nStep 1053\r\nStep 1054\r\nStep 1055\r\nStep 1056\r\nStep 1057\r\n",,terminal_output +3555,2845581,"TERMINAL",0,0,"Step 1058\r\nStep 1059\r\nStep 1060\r\nStep 1061\r\nStep 1062\r\nStep 1063\r\nStep 1064\r\nStep 1065\r\nStep 1066\r\nStep 1067\r\nStep 1068\r\n",,terminal_output +3556,2845628,"TERMINAL",0,0,"Step 1069\r\n",,terminal_output +3557,2845880,"TERMINAL",0,0,"Step 1070\r\nStep 1071\r\nStep 1072\r\nStep 1073\r\nStep 1074\r\nStep 1075\r\nStep 1076\r\nStep 1077\r\nStep 1078\r\nStep 1079\r\n",,terminal_output +3558,2845987,"TERMINAL",0,0,"Step 1080\r\nStep 1081\r\nStep 1082\r\nStep 1083\r\nStep 1084\r\nStep 1085\r\n",,terminal_output +3559,2846041,"TERMINAL",0,0,"Step 1086\r\n",,terminal_output +3560,2846156,"TERMINAL",0,0,"Step 1087\r\nStep 1088\r\nStep 1089\r\nStep 1090\r\nStep 1091\r\n",,terminal_output +3561,2846228,"TERMINAL",0,0,"Step 1092\r\nStep 1093\r\nStep 1094\r\nStep 1095\r\nStep 1096\r\n",,terminal_output +3562,2846342,"TERMINAL",0,0,"Step 1097\r\nStep 1098\r\nStep 1099\r\nStep 1100\r\n",,terminal_output +3563,2846451,"TERMINAL",0,0,"Step 1101\r\nStep 1102\r\nStep 1103\r\nStep 1104\r\nStep 1105\r\nStep 1106\r\nStep 1107\r\nStep 1108\r\nStep 1109\r\nStep 1110\r\n",,terminal_output +3564,2846534,"TERMINAL",0,0,"Step 1111\r\nStep 1112\r\nStep 1113\r\nStep 1114\r\n",,terminal_output +3565,2846590,"TERMINAL",0,0,"Step 1115\r\nStep 1116\r\n",,terminal_output +3566,2846660,"TERMINAL",0,0,"Step 1117\r\nStep 1118\r\n",,terminal_output +3567,2846732,"TERMINAL",0,0,"Step 1119\r\nStep 1120\r\nStep 1121\r\nStep 1122\r\nStep 1123\r\n",,terminal_output +3568,2846795,"TERMINAL",0,0,"Step 1124\r\nStep 1125\r\nStep 1126\r\nStep 1127\r\n",,terminal_output +3569,2846948,"TERMINAL",0,0,"Step 1128\r\nStep 1129\r\nStep 1130\r\nStep 1131\r\nStep 1132\r\nStep 1133\r\nStep 1134\r\nStep 1135\r\n",,terminal_output +3570,2847077,"TERMINAL",0,0,"Step 1136\r\nStep 1137\r\nStep 1138\r\nStep 1139\r\nStep 1140\r\nStep 1141\r\n",,terminal_output +3571,2847208,"TERMINAL",0,0,"Step 1142\r\nStep 1143\r\nStep 1144\r\nStep 1145\r\n",,terminal_output +3572,2847332,"TERMINAL",0,0,"Step 1146\r\nStep 1147\r\nStep 1148\r\nStep 1149\r\nStep 1150\r\n",,terminal_output +3573,2847458,"TERMINAL",0,0,"Step 1151\r\nStep 1152\r\nStep 1153\r\nStep 1154\r\nStep 1155\r\nStep 1156\r\nStep 1157\r\nStep 1158\r\nStep 1159\r\nStep 1160\r\nStep 1161\r\nStep 1162\r\n",,terminal_output +3574,2847511,"TERMINAL",0,0,"Step 1163\r\n",,terminal_output +3575,2847635,"TERMINAL",0,0,"Step 1164\r\nStep 1165\r\nStep 1166\r\nStep 1167\r\nStep 1168\r\n",,terminal_output +3576,2847747,"TERMINAL",0,0,"Step 1169\r\nStep 1170\r\nStep 1171\r\nStep 1172\r\n",,terminal_output +3577,2847858,"TERMINAL",0,0,"Step 1173\r\nStep 1174\r\n",,terminal_output +3578,2847921,"TERMINAL",0,0,"Step 1175\r\nStep 1176\r\n",,terminal_output +3579,2847992,"TERMINAL",0,0,"Step 1177\r\n",,terminal_output +3580,2848054,"TERMINAL",0,0,"Step 1178\r\nStep 1179\r\nStep 1180\r\n",,terminal_output +3581,2848117,"TERMINAL",0,0,"Step 1181\r\nStep 1182\r\nStep 1183\r\nStep 1184\r\nStep 1185\r\n",,terminal_output +3582,2848183,"TERMINAL",0,0,"Step 1186\r\nStep 1187\r\n",,terminal_output +3583,2848293,"TERMINAL",0,0,"Step 1188\r\nStep 1189\r\nStep 1190\r\nStep 1191\r\nStep 1192\r\nStep 1193\r\nStep 1194\r\nStep 1195\r\nStep 1196\r\n",,terminal_output +3584,2848355,"TERMINAL",0,0,"Step 1197\r\n",,terminal_output +3585,2848547,"TERMINAL",0,0,"Step 1198\r\nStep 1199\r\nStep 1200\r\nStep 1201\r\nStep 1202\r\nStep 1203\r\nStep 1204\r\nStep 1205\r\nStep 1206\r\nStep 1207\r\n",,terminal_output +3586,2848609,"TERMINAL",0,0,"Step 1208\r\n",,terminal_output +3587,2848738,"TERMINAL",0,0,"Step 1209\r\nStep 1210\r\nStep 1211\r\nStep 1212\r\nStep 1213\r\nStep 1214\r\nStep 1215\r\nStep 1216\r\nStep 1217\r\nStep 1218\r\nStep 1219\r\n",,terminal_output +3588,2849057,"TERMINAL",0,0,"Step 1220\r\nStep 1221\r\nStep 1222\r\nStep 1223\r\nStep 1224\r\nStep 1225\r\nStep 1226\r\nStep 1227\r\nStep 1228\r\nStep 1229\r\nStep 1230\r\nStep 1231\r\nStep 1232\r\nStep 1233\r\n",,terminal_output +3589,2849173,"TERMINAL",0,0,"Step 1234\r\nStep 1235\r\nStep 1236\r\n",,terminal_output +3590,2849237,"TERMINAL",0,0,"Step 1237\r\nStep 1238\r\nStep 1239\r\nStep 1240\r\nStep 1241\r\nStep 1242\r\nStep 1243\r\n",,terminal_output +3591,2849343,"TERMINAL",0,0,"Step 1244\r\nStep 1245\r\nStep 1246\r\n",,terminal_output +3592,2849458,"TERMINAL",0,0,"Step 1247\r\nStep 1248\r\nStep 1249\r\nStep 1250\r\nStep 1251\r\n",,terminal_output +3593,2849571,"TERMINAL",0,0,"Step 1252\r\nStep 1253\r\nStep 1254\r\nStep 1255\r\nStep 1256\r\nStep 1257\r\nStep 1258\r\nStep 1259\r\n",,terminal_output +3594,2849699,"TERMINAL",0,0,"Step 1260\r\nStep 1261\r\nStep 1262\r\nStep 1263\r\nStep 1264\r\nStep 1265\r\n",,terminal_output +3595,2849794,"TERMINAL",0,0,"Step 1266\r\n",,terminal_output +3596,2849919,"TERMINAL",0,0,"Step 1267\r\nStep 1268\r\nStep 1269\r\nStep 1270\r\nStep 1271\r\nStep 1272\r\nStep 1273\r\nStep 1274\r\n",,terminal_output +3597,2849987,"TERMINAL",0,0,"Step 1275\r\nStep 1276\r\nStep 1277\r\nStep 1278\r\nStep 1279\r\n",,terminal_output +3598,2850051,"TERMINAL",0,0,"Step 1280\r\nStep 1281\r\n",,terminal_output +3599,2850181,"TERMINAL",0,0,"Step 1282\r\nStep 1283\r\nStep 1284\r\nStep 1285\r\nStep 1286\r\nStep 1287\r\nStep 1288\r\n",,terminal_output +3600,2850310,"TERMINAL",0,0,"Step 1289\r\nStep 1290\r\nStep 1291\r\nStep 1292\r\nStep 1293\r\nStep 1294\r\n",,terminal_output +3601,2850370,"TERMINAL",0,0,"Step 1295\r\nStep 1296\r\nStep 1297\r\nStep 1298\r\nStep 1299\r\nStep 1300\r\nStep 1301\r\nStep 1302\r\n",,terminal_output +3602,2850436,"TERMINAL",0,0,"Step 1303\r\nStep 1304\r\n",,terminal_output +3603,2850514,"TERMINAL",0,0,"Step 1305\r\nStep 1306\r\nStep 1307\r\nStep 1308\r\n",,terminal_output +3604,2850626,"TERMINAL",0,0,"Step 1309\r\nStep 1310\r\nStep 1311\r\nStep 1312\r\n",,terminal_output +3605,2850691,"TERMINAL",0,0,"Step 1313\r\nStep 1314\r\nStep 1315\r\nStep 1316\r\nStep 1317\r\nStep 1318\r\n",,terminal_output +3606,2850816,"TERMINAL",0,0,"Step 1319\r\nStep 1320\r\nStep 1321\r\n",,terminal_output +3607,2850877,"TERMINAL",0,0,"Step 1322\r\nStep 1323\r\nStep 1324\r\n",,terminal_output +3608,2851014,"TERMINAL",0,0,"Step 1325\r\nStep 1326\r\nStep 1327\r\nStep 1328\r\nStep 1329\r\nStep 1330\r\nStep 1331\r\nStep 1332\r\n",,terminal_output +3609,2851121,"TERMINAL",0,0,"Step 1333\r\nStep 1334\r\nStep 1335\r\nStep 1336\r\nStep 1337\r\n",,terminal_output +3610,2851252,"TERMINAL",0,0,"Step 1338\r\nStep 1339\r\nStep 1340\r\nStep 1341\r\nStep 1342\r\nStep 1343\r\nStep 1344\r\n",,terminal_output +3611,2851346,"TERMINAL",0,0,"Step 1345\r\nStep 1346\r\nStep 1347\r\nStep 1348\r\nStep 1349\r\nStep 1350\r\nStep 1351\r\nStep 1352\r\nStep 1353\r\n",,terminal_output +3612,2851487,"TERMINAL",0,0,"Step 1354\r\nStep 1355\r\nStep 1356\r\nStep 1357\r\nStep 1358\r\nStep 1359\r\nStep 1360\r\nStep 1361\r\nStep 1362\r\nStep 1363\r\nStep 1364\r\nStep 1365\r\nStep 1366\r\nStep 1367\r\nStep 1368\r\nStep 1369\r\nStep 1370\r\nStep 1371\r\nStep 1372\r\nStep 1373\r\nStep 1374\r\nStep 1375\r\nStep 1376\r\nStep 1377\r\nStep 1378\r\nStep 1379\r\nStep 1380\r\n",,terminal_output +3613,2851550,"TERMINAL",0,0,"Step 1381\r\nStep 1382\r\nStep 1383\r\nStep 1384\r\nStep 1385\r\nStep 1386\r\nStep 1387\r\n",,terminal_output +3614,2851660,"TERMINAL",0,0,"Step 1388\r\nStep 1389\r\nStep 1390\r\n",,terminal_output +3615,2851845,"TERMINAL",0,0,"Step 1391\r\nStep 1392\r\nStep 1393\r\n",,terminal_output +3616,2851913,"TERMINAL",0,0,"Step 1394\r\nStep 1395\r\nStep 1396\r\nStep 1397\r\n",,terminal_output +3617,2852072,"TERMINAL",0,0,"Step 1398\r\nStep 1399\r\n",,terminal_output +3618,2852185,"TERMINAL",0,0,"Step 1400\r\nStep 1401\r\n",,terminal_output +3619,2852307,"TERMINAL",0,0,"Step 1402\r\nStep 1403\r\nStep 1404\r\nStep 1405\r\nStep 1406\r\nStep 1407\r\nStep 1408\r\nStep 1409\r\n",,terminal_output +3620,2852371,"TERMINAL",0,0,"Step 1410\r\n",,terminal_output +3621,2852486,"TERMINAL",0,0,"Step 1411\r\nStep 1412\r\nStep 1413\r\nStep 1414\r\n",,terminal_output +3622,2852579,"TERMINAL",0,0,"Step 1415\r\nStep 1416\r\nStep 1417\r\nStep 1418\r\n",,terminal_output +3623,2852706,"TERMINAL",0,0,"Step 1419\r\nStep 1420\r\nStep 1421\r\nStep 1422\r\n",,terminal_output +3624,2852776,"TERMINAL",0,0,"Step 1423\r\nStep 1424\r\n",,terminal_output +3625,2852887,"TERMINAL",0,0,"Step 1425\r\nStep 1426\r\nStep 1427\r\n",,terminal_output +3626,2853031,"TERMINAL",0,0,"Step 1428\r\nStep 1429\r\nStep 1430\r\nStep 1431\r\nStep 1432\r\nStep 1433\r\n",,terminal_output +3627,2853093,"TERMINAL",0,0,"Step 1434\r\n",,terminal_output +3628,2853203,"TERMINAL",0,0,"Step 1435\r\nStep 1436\r\nStep 1437\r\nStep 1438\r\nStep 1439\r\nStep 1440\r\nStep 1441\r\n",,terminal_output +3629,2853512,"TERMINAL",0,0,"Step 1442\r\nStep 1443\r\nStep 1444\r\nStep 1445\r\nStep 1446\r\nStep 1447\r\nStep 1448\r\nStep 1449\r\nStep 1450\r\nStep 1451\r\nStep 1452\r\nStep 1453\r\nStep 1454\r\nStep 1455\r\nStep 1456\r\nStep 1457\r\nStep 1458\r\nStep 1459\r\nStep 1460\r\nStep 1461\r\nStep 1462\r\nStep 1463\r\nStep 1464\r\nStep 1465\r\nStep 1466\r\nStep 1467\r\n",,terminal_output +3630,2853638,"TERMINAL",0,0,"Step 1468\r\nStep 1469\r\nStep 1470\r\nStep 1471\r\n",,terminal_output +3631,2853736,"TERMINAL",0,0,"Step 1472\r\nStep 1473\r\nStep 1474\r\nStep 1475\r\nStep 1476\r\nStep 1477\r\nStep 1478\r\nStep 1479\r\nStep 1480\r\nStep 1481\r\nStep 1482\r\nStep 1483\r\nStep 1484\r\nStep 1485\r\nStep 1486\r\nStep 1487\r\nStep 1488\r\n",,terminal_output +3632,2853811,"TERMINAL",0,0,"Step 1489\r\n",,terminal_output +3633,2853949,"TERMINAL",0,0,"Step 1490\r\nStep 1491\r\nStep 1492\r\nStep 1493\r\nStep 1494\r\n",,terminal_output +3634,2854012,"TERMINAL",0,0,"Step 1495\r\nStep 1496\r\n",,terminal_output +3635,2854140,"TERMINAL",0,0,"Step 1497\r\nStep 1498\r\nStep 1499\r\n",,terminal_output +3636,2854207,"TERMINAL",0,0,"Step 1500\r\nStep 1501\r\nStep 1502\r\n",,terminal_output +3637,2854339,"TERMINAL",0,0,"Step 1503\r\nStep 1504\r\nStep 1505\r\nStep 1506\r\nStep 1507\r\n",,terminal_output +3638,2854415,"TERMINAL",0,0,"Step 1508\r\nStep 1509\r\nStep 1510\r\nStep 1511\r\nStep 1512\r\n",,terminal_output +3639,2854558,"TERMINAL",0,0,"Step 1513\r\nStep 1514\r\n",,terminal_output +3640,2854618,"TERMINAL",0,0,"Step 1515\r\nStep 1516\r\n",,terminal_output +3641,2854753,"TERMINAL",0,0,"Step 1517\r\nStep 1518\r\n",,terminal_output +3642,2854815,"TERMINAL",0,0,"Step 1519\r\nStep 1520\r\nStep 1521\r\nStep 1522\r\n",,terminal_output +3643,2855093,"TERMINAL",0,0,"Step 1523\r\nStep 1524\r\nStep 1525\r\nStep 1526\r\nStep 1527\r\nStep 1528\r\nStep 1529\r\nStep 1530\r\n",,terminal_output +3644,2855155,"TERMINAL",0,0,"Step 1531\r\n",,terminal_output +3645,2855207,"TERMINAL",0,0,"Step 1532\r\n",,terminal_output +3646,2855418,"TERMINAL",0,0,"Step 1533\r\nStep 1534\r\nStep 1535\r\nStep 1536\r\nStep 1537\r\nStep 1538\r\nStep 1539\r\nStep 1540\r\nStep 1541\r\nStep 1542\r\n",,terminal_output +3647,2855483,"TERMINAL",0,0,"Step 1543\r\nStep 1544\r\n",,terminal_output +3648,2855612,"TERMINAL",0,0,"Step 1545\r\nStep 1546\r\nStep 1547\r\nStep 1548\r\n",,terminal_output +3649,2855747,"TERMINAL",0,0,"Step 1549\r\nStep 1550\r\nStep 1551\r\nStep 1552\r\n",,terminal_output +3650,2855811,"TERMINAL",0,0,"Step 1553\r\nStep 1554\r\nStep 1555\r\n",,terminal_output +3651,2855882,"TERMINAL",0,0,"Step 1556\r\nStep 1557\r\nStep 1558\r\nStep 1559\r\n",,terminal_output +3652,2855934,"TERMINAL",0,0,"Step 1560\r\nStep 1561\r\n",,terminal_output +3653,2856089,"TERMINAL",0,0,"Step 1562\r\nStep 1563\r\nStep 1564\r\nStep 1565\r\nStep 1566\r\nStep 1567\r\nStep 1568\r\nStep 1569\r\nStep 1570\r\n",,terminal_output +3654,2856149,"TERMINAL",0,0,"Step 1571\r\n",,terminal_output +3655,2856258,"TERMINAL",0,0,"Step 1572\r\nStep 1573\r\nStep 1574\r\nStep 1575\r\nStep 1576\r\nStep 1577\r\n",,terminal_output +3656,2856368,"TERMINAL",0,0,"Step 1578\r\nStep 1579\r\nStep 1580\r\nStep 1581\r\n",,terminal_output +3657,2856491,"TERMINAL",0,0,"Step 1582\r\nStep 1583\r\nStep 1584\r\nStep 1585\r\nStep 1586\r\nStep 1587\r\n",,terminal_output +3658,2856553,"TERMINAL",0,0,"Step 1588\r\nStep 1589\r\n",,terminal_output +3659,2856617,"TERMINAL",0,0,"Step 1590\r\nStep 1591\r\nStep 1592\r\nStep 1593\r\n",,terminal_output +3660,2856745,"TERMINAL",0,0,"Step 1594\r\nStep 1595\r\nStep 1596\r\nStep 1597\r\nStep 1598\r\n",,terminal_output +3661,2856807,"TERMINAL",0,0,"Step 1599\r\nStep 1600\r\nStep 1601\r\nStep 1602\r\nStep 1603\r\n",,terminal_output +3662,2856915,"TERMINAL",0,0,"Step 1604\r\nStep 1605\r\nStep 1606\r\n",,terminal_output +3663,2857090,"TERMINAL",0,0,"Step 1607\r\nStep 1608\r\nStep 1609\r\nStep 1610\r\nStep 1611\r\nStep 1612\r\nStep 1613\r\nStep 1614\r\n",,terminal_output +3664,2857153,"TERMINAL",0,0,"Step 1615\r\nStep 1616\r\n",,terminal_output +3665,2857204,"TERMINAL",0,0,"Step 1617\r\n",,terminal_output +3666,2857305,"TERMINAL",0,0,"Step 1618\r\nStep 1619\r\nStep 1620\r\nStep 1621\r\nStep 1622\r\nStep 1623\r\n",,terminal_output +3667,2857419,"TERMINAL",0,0,"Step 1624\r\nStep 1625\r\nStep 1626\r\nStep 1627\r\nStep 1628\r\nStep 1629\r\n",,terminal_output +3668,2857473,"TERMINAL",0,0,"Step 1630\r\n",,terminal_output +3669,2857600,"TERMINAL",0,0,"Step 1631\r\nStep 1632\r\nStep 1633\r\nStep 1634\r\nStep 1635\r\nStep 1636\r\nStep 1637\r\nStep 1638\r\nStep 1639\r\n",,terminal_output +3670,2857695,"TERMINAL",0,0,"Step 1640\r\n",,terminal_output +3671,2857760,"TERMINAL",0,0,"Step 1641\r\nStep 1642\r\n",,terminal_output +3672,2857874,"TERMINAL",0,0,"Step 1643\r\nStep 1644\r\nStep 1645\r\nStep 1646\r\nStep 1647\r\nStep 1648\r\nStep 1649\r\nStep 1650\r\nStep 1651\r\nStep 1652\r\n",,terminal_output +3673,2857941,"TERMINAL",0,0,"Step 1653\r\nStep 1654\r\nStep 1655\r\nStep 1656\r\n",,terminal_output +3674,2858113,"TERMINAL",0,0,"Step 1657\r\nStep 1658\r\nStep 1659\r\nStep 1660\r\nStep 1661\r\nStep 1662\r\nStep 1663\r\nStep 1664\r\nStep 1665\r\nStep 1666\r\nStep 1667\r\n",,terminal_output +3675,2858236,"TERMINAL",0,0,"Step 1668\r\nStep 1669\r\nStep 1670\r\nStep 1671\r\nStep 1672\r\nStep 1673\r\nStep 1674\r\nStep 1675\r\n",,terminal_output +3676,2858363,"TERMINAL",0,0,"Step 1676\r\nStep 1677\r\nStep 1678\r\nStep 1679\r\nStep 1680\r\nStep 1681\r\n",,terminal_output +3677,2858472,"TERMINAL",0,0,"Step 1682\r\nStep 1683\r\nStep 1684\r\nStep 1685\r\nStep 1686\r\nStep 1687\r\nStep 1688\r\nStep 1689\r\n",,terminal_output +3678,2858582,"TERMINAL",0,0,"Step 1690\r\nStep 1691\r\nStep 1692\r\nStep 1693\r\nStep 1694\r\n",,terminal_output +3679,2858694,"TERMINAL",0,0,"Step 1695\r\nStep 1696\r\nStep 1697\r\nStep 1698\r\nStep 1699\r\nStep 1700\r\n",,terminal_output +3680,2858811,"TERMINAL",0,0,"Step 1701\r\nStep 1702\r\nStep 1703\r\nStep 1704\r\n",,terminal_output +3681,2858866,"TERMINAL",0,0,"Step 1705\r\nStep 1706\r\n",,terminal_output +3682,2858968,"TERMINAL",0,0,"Step 1707\r\nStep 1708\r\nStep 1709\r\nStep 1710\r\nStep 1711\r\n",,terminal_output +3683,2859137,"TERMINAL",0,0,"Step 1712\r\nStep 1713\r\nStep 1714\r\nStep 1715\r\nStep 1716\r\nStep 1717\r\nStep 1718\r\nStep 1719\r\nStep 1720\r\nStep 1721\r\n",,terminal_output +3684,2859205,"TERMINAL",0,0,"Step 1722\r\nStep 1723\r\n",,terminal_output +3685,2859302,"TERMINAL",0,0,"Step 1724\r\n",,terminal_output +3686,2859370,"TERMINAL",0,0,"Step 1725\r\nStep 1726\r\nStep 1727\r\nStep 1728\r\nStep 1729\r\nStep 1730\r\n",,terminal_output +3687,2859487,"TERMINAL",0,0,"Step 1731\r\nStep 1732\r\n",,terminal_output +3688,2859547,"TERMINAL",0,0,"Step 1733\r\nStep 1734\r\nStep 1735\r\n",,terminal_output +3689,2859610,"TERMINAL",0,0,"Step 1736\r\n",,terminal_output +3690,2859735,"TERMINAL",0,0,"Step 1737\r\nStep 1738\r\nStep 1739\r\nStep 1740\r\nStep 1741\r\nStep 1742\r\nStep 1743\r\n",,terminal_output +3691,2859800,"TERMINAL",0,0,"Step 1744\r\n",,terminal_output +3692,2859863,"TERMINAL",0,0,"Step 1745\r\nStep 1746\r\nStep 1747\r\n",,terminal_output +3693,2859922,"TERMINAL",0,0,"Step 1748\r\n",,terminal_output +3694,2860011,"TERMINAL",0,0,"Step 1749\r\nStep 1750\r\nStep 1751\r\nStep 1752\r\n",,terminal_output +3695,2860074,"TERMINAL",0,0,"Step 1753\r\nStep 1754\r\n",,terminal_output +3696,2860150,"TERMINAL",0,0,"Step 1755\r\nStep 1756\r\nStep 1757\r\nStep 1758\r\nStep 1759\r\nStep 1760\r\nStep 1761\r\n",,terminal_output +3697,2860259,"TERMINAL",0,0,"Step 1762\r\nStep 1763\r\nStep 1764\r\nStep 1765\r\n",,terminal_output +3698,2860390,"TERMINAL",0,0,"Step 1766\r\nStep 1767\r\nStep 1768\r\nStep 1769\r\nStep 1770\r\nStep 1771\r\nStep 1772\r\n",,terminal_output +3699,2860454,"TERMINAL",0,0,"Step 1773\r\nStep 1774\r\nStep 1775\r\nStep 1776\r\n",,terminal_output +3700,2860563,"TERMINAL",0,0,"Step 1777\r\nStep 1778\r\nStep 1779\r\nStep 1780\r\nStep 1781\r\nStep 1782\r\nStep 1783\r\n",,terminal_output +3701,2860646,"TERMINAL",0,0,"Step 1784\r\nStep 1785\r\nStep 1786\r\nStep 1787\r\n",,terminal_output +3702,2860720,"TERMINAL",0,0,"Step 1788\r\nStep 1789\r\nStep 1790\r\n",,terminal_output +3703,2860788,"TERMINAL",0,0,"Step 1791\r\n",,terminal_output +3704,2860927,"TERMINAL",0,0,"Step 1792\r\nStep 1793\r\nStep 1794\r\nStep 1795\r\nStep 1796\r\nStep 1797\r\nStep 1798\r\nStep 1799\r\n",,terminal_output +3705,2861367,"TERMINAL",0,0,"Step 1800\r\nStep 1801\r\nStep 1802\r\nStep 1803\r\nStep 1804\r\nStep 1805\r\nStep 1806\r\nStep 1807\r\nStep 1808\r\nStep 1809\r\nStep 1810\r\nStep 1811\r\nStep 1812\r\nStep 1813\r\nStep 1814\r\nStep 1815\r\nStep 1816\r\nStep 1817\r\nStep 1818\r\nStep 1819\r\nStep 1820\r\nStep 1821\r\nStep 1822\r\nStep 1823\r\nStep 1824\r\nStep 1825\r\nStep 1826\r\nStep 1827\r\nStep 1828\r\nStep 1829\r\nStep 1830\r\nStep 1831\r\nStep 1832\r\nStep 1833\r\nStep 1834\r\nStep 1835\r\nStep 1836\r\nStep 1837\r\nStep 1838\r\nStep 1839\r\nStep 1840\r\nStep 1841\r\nStep 1842\r\nStep 1843\r\nStep 1844\r\n",,terminal_output +3706,2861422,"TERMINAL",0,0,"Step 1845\r\nStep 1846\r\n",,terminal_output +3707,2861594,"TERMINAL",0,0,"Step 1847\r\n",,terminal_output +3708,2861666,"TERMINAL",0,0,"Step 1848\r\nStep 1849\r\nStep 1850\r\nStep 1851\r\nStep 1852\r\nStep 1853\r\nStep 1854\r\n",,terminal_output +3709,2861830,"TERMINAL",0,0,"Step 1855\r\nStep 1856\r\n",,terminal_output +3710,2861918,"TERMINAL",0,0,"Step 1857\r\nStep 1858\r\nStep 1859\r\n",,terminal_output +3711,2861985,"TERMINAL",0,0,"Step 1860\r\nStep 1861\r\nStep 1862\r\nStep 1863\r\nStep 1864\r\n",,terminal_output +3712,2862062,"TERMINAL",0,0,"Step 1865\r\nStep 1866\r\n",,terminal_output +3713,2862125,"TERMINAL",0,0,"Step 1867\r\nStep 1868\r\nStep 1869\r\n",,terminal_output +3714,2862235,"TERMINAL",0,0,"Step 1870\r\nStep 1871\r\nStep 1872\r\n",,terminal_output +3715,2862390,"TERMINAL",0,0,"Step 1873\r\nStep 1874\r\nStep 1875\r\nStep 1876\r\nStep 1877\r\n",,terminal_output +3716,2862454,"TERMINAL",0,0,"Step 1878\r\n",,terminal_output +3717,2862651,"TERMINAL",0,0,"Step 1879\r\nStep 1880\r\nStep 1881\r\nStep 1882\r\nStep 1883\r\nStep 1884\r\nStep 1885\r\nStep 1886\r\n",,terminal_output +3718,2862703,"TERMINAL",0,0,"Step 1887\r\n",,terminal_output +3719,2862878,"TERMINAL",0,0,"Step 1888\r\nStep 1889\r\nStep 1890\r\nStep 1891\r\nStep 1892\r\nStep 1893\r\nStep 1894\r\nStep 1895\r\nStep 1896\r\nStep 1897\r\n",,terminal_output +3720,2862995,"TERMINAL",0,0,"Step 1898\r\nStep 1899\r\nStep 1900\r\nStep 1901\r\n",,terminal_output +3721,2863059,"TERMINAL",0,0,"Step 1902\r\nStep 1903\r\nStep 1904\r\nStep 1905\r\nStep 1906\r\n",,terminal_output +3722,2863168,"TERMINAL",0,0,"Step 1907\r\nStep 1908\r\n",,terminal_output +3723,2863229,"TERMINAL",0,0,"Step 1909\r\nStep 1910\r\nStep 1911\r\n",,terminal_output +3724,2863313,"TERMINAL",0,0,"Step 1912\r\nStep 1913\r\n",,terminal_output +3725,2863379,"TERMINAL",0,0,"Step 1914\r\nStep 1915\r\nStep 1916\r\nStep 1917\r\nStep 1918\r\nStep 1919\r\n",,terminal_output +3726,2863645,"TERMINAL",0,0,"Step 1920\r\nStep 1921\r\nStep 1922\r\nStep 1923\r\nStep 1924\r\nStep 1925\r\nStep 1926\r\nStep 1927\r\nStep 1928\r\nStep 1929\r\nStep 1930\r\nStep 1931\r\nStep 1932\r\nStep 1933\r\nStep 1934\r\nStep 1935\r\nStep 1936\r\nStep 1937\r\nStep 1938\r\nStep 1939\r\nStep 1940\r\nStep 1941\r\nStep 1942\r\nStep 1943\r\nStep 1944\r\nStep 1945\r\nStep 1946\r\n",,terminal_output +3727,2863704,"TERMINAL",0,0,"Step 1947\r\nStep 1948\r\nStep 1949\r\nStep 1950\r\nStep 1951\r\nStep 1952\r\nStep 1953\r\nStep 1954\r\nStep 1955\r\nStep 1956\r\nStep 1957\r\nStep 1958\r\n",,terminal_output +3728,2863785,"TERMINAL",0,0,"Step 1959\r\nStep 1960\r\nStep 1961\r\nStep 1962\r\nStep 1963\r\nStep 1964\r\n",,terminal_output +3729,2864098,"TERMINAL",0,0,"Step 1965\r\nStep 1966\r\nStep 1967\r\nStep 1968\r\nStep 1969\r\nStep 1970\r\nStep 1971\r\nStep 1972\r\nStep 1973\r\nStep 1974\r\nStep 1975\r\nStep 1976\r\n",,terminal_output +3730,2864159,"TERMINAL",0,0,"Step 1977\r\nStep 1978\r\n",,terminal_output +3731,2864220,"TERMINAL",0,0,"Step 1979\r\nStep 1980\r\nStep 1981\r\nStep 1982\r\nStep 1983\r\n",,terminal_output +3732,2864327,"TERMINAL",0,0,"Step 1984\r\nStep 1985\r\n",,terminal_output +3733,2864406,"TERMINAL",0,0,"Step 1986\r\nStep 1987\r\nStep 1988\r\nStep 1989\r\n",,terminal_output +3734,2864502,"TERMINAL",0,0,"Step 1990\r\n",,terminal_output +3735,2864555,"TERMINAL",0,0,"Step 1991\r\n",,terminal_output +3736,2864705,"TERMINAL",0,0,"Step 1992\r\nStep 1993\r\nStep 1994\r\nStep 1995\r\n",,terminal_output +3737,2864761,"TERMINAL",0,0,"Step 1996\r\n",,terminal_output +3738,2864814,"TERMINAL",0,0,"Step 1997\r\n",,terminal_output +3739,2864877,"TERMINAL",0,0,"Step 1998\r\n",,terminal_output +3740,2864941,"TERMINAL",0,0,"Step 1999\r\n",,terminal_output +3741,2865049,"TERMINAL",0,0,"Step 2000\r\nStep 2001\r\nStep 2002\r\n",,terminal_output +3742,2865119,"TERMINAL",0,0,"Step 2003\r\nStep 2004\r\n",,terminal_output +3743,2865296,"TERMINAL",0,0,"Step 2005\r\nStep 2006\r\nStep 2007\r\nStep 2008\r\nStep 2009\r\nStep 2010\r\nStep 2011\r\nStep 2012\r\nStep 2013\r\nStep 2014\r\n",,terminal_output +3744,2865418,"TERMINAL",0,0,"Step 2015\r\nStep 2016\r\n",,terminal_output +3745,2865528,"TERMINAL",0,0,"Step 2017\r\nStep 2018\r\nStep 2019\r\nStep 2020\r\nStep 2021\r\n",,terminal_output +3746,2865593,"TERMINAL",0,0,"Step 2022\r\nStep 2023\r\nStep 2024\r\n",,terminal_output +3747,2865656,"TERMINAL",0,0,"Step 2025\r\nStep 2026\r\nStep 2027\r\n",,terminal_output +3748,2865718,"TERMINAL",0,0,"Step 2028\r\n",,terminal_output +3749,2865854,"TERMINAL",0,0,"Step 2029\r\nStep 2030\r\nStep 2031\r\nStep 2032\r\nStep 2033\r\nStep 2034\r\nStep 2035\r\nStep 2036\r\nStep 2037\r\n",,terminal_output +3750,2865976,"TERMINAL",0,0,"Step 2038\r\nStep 2039\r\n",,terminal_output +3751,2866063,"TERMINAL",0,0,"Step 2040\r\nStep 2041\r\nStep 2042\r\nStep 2043\r\nStep 2044\r\nStep 2045\r\nStep 2046\r\nStep 2047\r\n",,terminal_output +3752,2866177,"TERMINAL",0,0,"Step 2048\r\nStep 2049\r\nStep 2050\r\nStep 2051\r\nStep 2052\r\nStep 2053\r\n",,terminal_output +3753,2866293,"TERMINAL",0,0,"Step 2054\r\nStep 2055\r\nStep 2056\r\nStep 2057\r\nStep 2058\r\n",,terminal_output +3754,2866450,"TERMINAL",0,0,"Step 2059\r\nStep 2060\r\nStep 2061\r\nStep 2062\r\nStep 2063\r\nStep 2064\r\nStep 2065\r\nStep 2066\r\nStep 2067\r\nStep 2068\r\n",,terminal_output +3755,2866566,"TERMINAL",0,0,"Step 2069\r\nStep 2070\r\nStep 2071\r\nStep 2072\r\nStep 2073\r\n",,terminal_output +3756,2866681,"TERMINAL",0,0,"Step 2074\r\nStep 2075\r\nStep 2076\r\nStep 2077\r\nStep 2078\r\nStep 2079\r\n",,terminal_output +3757,2866825,"TERMINAL",0,0,"Step 2080\r\nStep 2081\r\nStep 2082\r\nStep 2083\r\nStep 2084\r\n",,terminal_output +3758,2866942,"TERMINAL",0,0,"Step 2085\r\nStep 2086\r\nStep 2087\r\nStep 2088\r\nStep 2089\r\nStep 2090\r\nStep 2091\r\n",,terminal_output +3759,2867001,"TERMINAL",0,0,"Step 2092\r\n",,terminal_output +3760,2867072,"TERMINAL",0,0,"Step 2093\r\nStep 2094\r\nStep 2095\r\nStep 2096\r\n",,terminal_output +3761,2867164,"TERMINAL",0,0,"Step 2097\r\nStep 2098\r\nStep 2099\r\nStep 2100\r\nStep 2101\r\nStep 2102\r\nStep 2103\r\nStep 2104\r\nStep 2105\r\nStep 2106\r\nStep 2107\r\n",,terminal_output +3762,2867305,"TERMINAL",0,0,"Step 2108\r\nStep 2109\r\nStep 2110\r\nStep 2111\r\nStep 2112\r\n",,terminal_output +3763,2867422,"TERMINAL",0,0,"Step 2113\r\nStep 2114\r\nStep 2115\r\n",,terminal_output +3764,2867489,"TERMINAL",0,0,"Step 2116\r\nStep 2117\r\nStep 2118\r\nStep 2119\r\nStep 2120\r\n",,terminal_output +3765,2867609,"TERMINAL",0,0,"Step 2121\r\nStep 2122\r\nStep 2123\r\nStep 2124\r\nStep 2125\r\nStep 2126\r\nStep 2127\r\nStep 2128\r\nStep 2129\r\n",,terminal_output +3766,2867719,"TERMINAL",0,0,"Step 2130\r\nStep 2131\r\nStep 2132\r\n",,terminal_output +3767,2867849,"TERMINAL",0,0,"Step 2133\r\nStep 2134\r\nStep 2135\r\nStep 2136\r\nStep 2137\r\nStep 2138\r\nStep 2139\r\nStep 2140\r\nStep 2141\r\nStep 2142\r\n",,terminal_output +3768,2867925,"TERMINAL",0,0,"Step 2143\r\nStep 2144\r\nStep 2145\r\nStep 2146\r\nStep 2147\r\n",,terminal_output +3769,2868029,"TERMINAL",0,0,"Step 2148\r\nStep 2149\r\nStep 2150\r\nStep 2151\r\nStep 2152\r\n",,terminal_output +3770,2868136,"TERMINAL",0,0,"Step 2153\r\nStep 2154\r\nStep 2155\r\nStep 2156\r\nStep 2157\r\nStep 2158\r\nStep 2159\r\n",,terminal_output +3771,2868243,"TERMINAL",0,0,"Step 2160\r\nStep 2161\r\nStep 2162\r\nStep 2163\r\n",,terminal_output +3772,2868355,"TERMINAL",0,0,"Step 2164\r\nStep 2165\r\nStep 2166\r\nStep 2167\r\nStep 2168\r\nStep 2169\r\nStep 2170\r\n",,terminal_output +3773,2868416,"TERMINAL",0,0,"Step 2171\r\n",,terminal_output +3774,2868479,"TERMINAL",0,0,"Step 2172\r\nStep 2173\r\nStep 2174\r\nStep 2175\r\nStep 2176\r\n",,terminal_output +3775,2868542,"TERMINAL",0,0,"Step 2177\r\n",,terminal_output +3776,2868629,"TERMINAL",0,0,"Step 2178\r\nStep 2179\r\n",,terminal_output +3777,2868708,"TERMINAL",0,0,"Step 2180\r\nStep 2181\r\nStep 2182\r\nStep 2183\r\nStep 2184\r\nStep 2185\r\nStep 2186\r\n",,terminal_output +3778,2868891,"TERMINAL",0,0,"Step 2187\r\nStep 2188\r\nStep 2189\r\nStep 2190\r\nStep 2191\r\nStep 2192\r\nStep 2193\r\n",,terminal_output +3779,2869000,"TERMINAL",0,0,"Step 2194\r\nStep 2195\r\nStep 2196\r\nStep 2197\r\nStep 2198\r\nStep 2199\r\n",,terminal_output +3780,2869128,"TERMINAL",0,0,"Step 2200\r\nStep 2201\r\nStep 2202\r\nStep 2203\r\n",,terminal_output +3781,2869238,"TERMINAL",0,0,"Step 2204\r\nStep 2205\r\nStep 2206\r\nStep 2207\r\nStep 2208\r\n",,terminal_output +3782,2869297,"TERMINAL",0,0,"Step 2209\r\n",,terminal_output +3783,2869423,"TERMINAL",0,0,"Step 2210\r\nStep 2211\r\nStep 2212\r\nStep 2213\r\nStep 2214\r\n",,terminal_output +3784,2869487,"TERMINAL",0,0,"Step 2215\r\nStep 2216\r\nStep 2217\r\nStep 2218\r\n",,terminal_output +3785,2869550,"TERMINAL",0,0,"Step 2219\r\n",,terminal_output +3786,2869701,"TERMINAL",0,0,"Step 2220\r\nStep 2221\r\nStep 2222\r\nStep 2223\r\nStep 2224\r\nStep 2225\r\nStep 2226\r\n",,terminal_output +3787,2869775,"TERMINAL",0,0,"Step 2227\r\nStep 2228\r\nStep 2229\r\nStep 2230\r\nStep 2231\r\nStep 2232\r\n",,terminal_output +3788,2869887,"TERMINAL",0,0,"Step 2233\r\nStep 2234\r\nStep 2235\r\nStep 2236\r\nStep 2237\r\nStep 2238\r\nStep 2239\r\n",,terminal_output +3789,2870007,"TERMINAL",0,0,"Step 2240\r\nStep 2241\r\nStep 2242\r\nStep 2243\r\nStep 2244\r\n",,terminal_output +3790,2870251,"TERMINAL",0,0,"Step 2245\r\nStep 2246\r\nStep 2247\r\nStep 2248\r\nStep 2249\r\nStep 2250\r\nStep 2251\r\nStep 2252\r\nStep 2253\r\nStep 2254\r\nStep 2255\r\nStep 2256\r\nStep 2257\r\n",,terminal_output +3791,2870371,"TERMINAL",0,0,"Step 2258\r\nStep 2259\r\nStep 2260\r\nStep 2261\r\nStep 2262\r\n",,terminal_output +3792,2870558,"TERMINAL",0,0,"Step 2263\r\nStep 2264\r\nStep 2265\r\nStep 2266\r\nStep 2267\r\nStep 2268\r\nStep 2269\r\nStep 2270\r\nStep 2271\r\nStep 2272\r\nStep 2273\r\n",,terminal_output +3793,2870827,"TERMINAL",0,0,"Step 2274\r\nStep 2275\r\nStep 2276\r\nStep 2277\r\nStep 2278\r\nStep 2279\r\nStep 2280\r\nStep 2281\r\nStep 2282\r\nStep 2283\r\nStep 2284\r\nStep 2285\r\nStep 2286\r\nStep 2287\r\nStep 2288\r\nStep 2289\r\nStep 2290\r\nStep 2291\r\nStep 2292\r\nStep 2293\r\nStep 2294\r\nStep 2295\r\nStep 2296\r\nStep 2297\r\nStep 2298\r\nStep 2299\r\nStep 2300\r\nStep 2301\r\nStep 2302\r\nStep 2303\r\nStep 2304\r\nStep 2305\r\nStep 2306\r\nStep 2307\r\nStep 2308\r\nStep 2309\r\n",,terminal_output +3794,2870895,"TERMINAL",0,0,"Step 2310\r\nStep 2311\r\nStep 2312\r\n",,terminal_output +3795,2871011,"TERMINAL",0,0,"Step 2313\r\nStep 2314\r\nStep 2315\r\nStep 2316\r\nStep 2317\r\nStep 2318\r\n",,terminal_output +3796,2871118,"TERMINAL",0,0,"Step 2319\r\n",,terminal_output +3797,2871277,"TERMINAL",0,0,"Step 2320\r\nStep 2321\r\nStep 2322\r\nStep 2323\r\nStep 2324\r\n",,terminal_output +3798,2871343,"TERMINAL",0,0,"Step 2325\r\nStep 2326\r\n",,terminal_output +3799,2871454,"TERMINAL",0,0,"Step 2327\r\nStep 2328\r\nStep 2329\r\n",,terminal_output +3800,2871512,"TERMINAL",0,0,"Step 2330\r\nStep 2331\r\nStep 2332\r\n",,terminal_output +3801,2871609,"TERMINAL",0,0,"Step 2333\r\nStep 2334\r\n",,terminal_output +3802,2871672,"TERMINAL",0,0,"Step 2335\r\nStep 2336\r\n",,terminal_output +3803,2871859,"TERMINAL",0,0,"Step 2337\r\nStep 2338\r\nStep 2339\r\nStep 2340\r\nStep 2341\r\nStep 2342\r\nStep 2343\r\nStep 2344\r\nStep 2345\r\n",,terminal_output +3804,2872064,"TERMINAL",0,0,"Step 2346\r\nStep 2347\r\nStep 2348\r\nStep 2349\r\nStep 2350\r\nStep 2351\r\n",,terminal_output +3805,2872279,"TERMINAL",0,0,"Step 2352\r\nStep 2353\r\nStep 2354\r\nStep 2355\r\nStep 2356\r\nStep 2357\r\nStep 2358\r\nStep 2359\r\n",,terminal_output +3806,2872332,"TERMINAL",0,0,"Step 2360\r\nStep 2361\r\n",,terminal_output +3807,2872445,"TERMINAL",0,0,"Step 2362\r\nStep 2363\r\nStep 2364\r\nStep 2365\r\nStep 2366\r\nStep 2367\r\n",,terminal_output +3808,2872555,"TERMINAL",0,0,"Step 2368\r\nStep 2369\r\nStep 2370\r\n",,terminal_output +3809,2872751,"TERMINAL",0,0,"Step 2371\r\nStep 2372\r\nStep 2373\r\nStep 2374\r\nStep 2375\r\nStep 2376\r\nStep 2377\r\nStep 2378\r\nStep 2379\r\nStep 2380\r\nStep 2381\r\nStep 2382\r\nStep 2383\r\nStep 2384\r\n",,terminal_output +3810,2872920,"TERMINAL",0,0,"Step 2385\r\nStep 2386\r\nStep 2387\r\nStep 2388\r\nStep 2389\r\nStep 2390\r\nStep 2391\r\nStep 2392\r\nStep 2393\r\nStep 2394\r\nStep 2395\r\nStep 2396\r\n",,terminal_output +3811,2872983,"TERMINAL",0,0,"Step 2397\r\nStep 2398\r\nStep 2399\r\nStep 2400\r\nStep 2401\r\nStep 2402\r\nStep 2403\r\nStep 2404\r\nStep 2405\r\n",,terminal_output +3812,2873133,"TERMINAL",0,0,"Step 2406\r\nStep 2407\r\nStep 2408\r\nStep 2409\r\nStep 2410\r\nStep 2411\r\nStep 2412\r\nStep 2413\r\n",,terminal_output +3813,2873370,"TERMINAL",0,0,"Step 2414\r\nStep 2415\r\nStep 2416\r\nStep 2417\r\nStep 2418\r\nStep 2419\r\nStep 2420\r\nStep 2421\r\nStep 2422\r\nStep 2423\r\nStep 2424\r\nStep 2425\r\nStep 2426\r\nStep 2427\r\nStep 2428\r\n",,terminal_output +3814,2873440,"TERMINAL",0,0,"Step 2429\r\nStep 2430\r\nStep 2431\r\nStep 2432\r\nStep 2433\r\nStep 2434\r\nStep 2435\r\nStep 2436\r\n",,terminal_output +3815,2873599,"TERMINAL",0,0,"Step 2437\r\n",,terminal_output +3816,2873729,"TERMINAL",0,0,"Step 2438\r\nStep 2439\r\nStep 2440\r\nStep 2441\r\nStep 2442\r\nStep 2443\r\nStep 2444\r\nStep 2445\r\nStep 2446\r\nStep 2447\r\nStep 2448\r\nStep 2449\r\nStep 2450\r\nStep 2451\r\nStep 2452\r\nStep 2453\r\n",,terminal_output +3817,2874030,"TERMINAL",0,0,"Step 2454\r\nStep 2455\r\nStep 2456\r\nStep 2457\r\nStep 2458\r\nStep 2459\r\nStep 2460\r\nStep 2461\r\nStep 2462\r\nStep 2463\r\nStep 2464\r\nStep 2465\r\n",,terminal_output +3818,2874162,"TERMINAL",0,0,"Step 2466\r\n",,terminal_output +3819,2874226,"TERMINAL",0,0,"Step 2467\r\nStep 2468\r\nStep 2469\r\n",,terminal_output +3820,2874289,"TERMINAL",0,0,"Step 2470\r\nStep 2471\r\nStep 2472\r\n",,terminal_output +3821,2874468,"TERMINAL",0,0,"Step 2473\r\nStep 2474\r\nStep 2475\r\nStep 2476\r\n",,terminal_output +3822,2874579,"TERMINAL",0,0,"Step 2477\r\nStep 2478\r\nStep 2479\r\n",,terminal_output +3823,2874705,"TERMINAL",0,0,"Step 2480\r\nStep 2481\r\nStep 2482\r\nStep 2483\r\nStep 2484\r\n",,terminal_output +3824,2874815,"TERMINAL",0,0,"Step 2485\r\n",,terminal_output +3825,2874946,"TERMINAL",0,0,"Step 2486\r\nStep 2487\r\nStep 2488\r\nStep 2489\r\nStep 2490\r\n",,terminal_output +3826,2875132,"TERMINAL",0,0,"Step 2491\r\nStep 2492\r\nStep 2493\r\nStep 2494\r\nStep 2495\r\n",,terminal_output +3827,2875270,"TERMINAL",0,0,"Step 2496\r\nStep 2497\r\nStep 2498\r\nStep 2499\r\n",,terminal_output +3828,2875325,"TERMINAL",0,0,"Step 2500\r\nStep 2501\r\nStep 2502\r\nStep 2503\r\n",,terminal_output +3829,2875398,"TERMINAL",0,0,"Step 2504\r\nStep 2505\r\nStep 2506\r\nStep 2507\r\n",,terminal_output +3830,2875456,"TERMINAL",0,0,"Step 2508\r\nStep 2509\r\nStep 2510\r\n",,terminal_output +3831,2875590,"TERMINAL",0,0,"Step 2511\r\nStep 2512\r\nStep 2513\r\nStep 2514\r\nStep 2515\r\n",,terminal_output +3832,2875651,"TERMINAL",0,0,"Step 2516\r\nStep 2517\r\nStep 2518\r\nStep 2519\r\n",,terminal_output +3833,2875758,"TERMINAL",0,0,"Step 2520\r\nStep 2521\r\n",,terminal_output +3834,2875866,"TERMINAL",0,0,"Step 2522\r\nStep 2523\r\nStep 2524\r\nStep 2525\r\n",,terminal_output +3835,2875983,"TERMINAL",0,0,"Step 2526\r\nStep 2527\r\nStep 2528\r\nStep 2529\r\nStep 2530\r\nStep 2531\r\nStep 2532\r\nStep 2533\r\n",,terminal_output +3836,2876057,"TERMINAL",0,0,"Step 2534\r\nStep 2535\r\nStep 2536\r\nStep 2537\r\n",,terminal_output +3837,2876178,"TERMINAL",0,0,"Step 2538\r\nStep 2539\r\nStep 2540\r\nStep 2541\r\nStep 2542\r\n",,terminal_output +3838,2876327,"TERMINAL",0,0,"Step 2543\r\nStep 2544\r\nStep 2545\r\nStep 2546\r\nStep 2547\r\nStep 2548\r\nStep 2549\r\nStep 2550\r\n",,terminal_output +3839,2876399,"TERMINAL",0,0,"Step 2551\r\nStep 2552\r\nStep 2553\r\nStep 2554\r\n",,terminal_output +3840,2876489,"TERMINAL",0,0,"Step 2555\r\nStep 2556\r\nStep 2557\r\nStep 2558\r\nStep 2559\r\n",,terminal_output +3841,2876556,"TERMINAL",0,0,"Step 2560\r\nStep 2561\r\nStep 2562\r\n",,terminal_output +3842,2876670,"TERMINAL",0,0,"Step 2563\r\nStep 2564\r\nStep 2565\r\nStep 2566\r\n",,terminal_output +3843,2876734,"TERMINAL",0,0,"Step 2567\r\nStep 2568\r\n",,terminal_output +3844,2876796,"TERMINAL",0,0,"Step 2569\r\nStep 2570\r\n",,terminal_output +3845,2876892,"TERMINAL",0,0,"Step 2571\r\nStep 2572\r\nStep 2573\r\n",,terminal_output +3846,2877129,"TERMINAL",0,0,"Step 2574\r\nStep 2575\r\nStep 2576\r\nStep 2577\r\nStep 2578\r\nStep 2579\r\nStep 2580\r\nStep 2581\r\nStep 2582\r\nStep 2583\r\nStep 2584\r\nStep 2585\r\nStep 2586\r\nStep 2587\r\nStep 2588\r\nStep 2589\r\n",,terminal_output +3847,2877244,"TERMINAL",0,0,"Step 2590\r\nStep 2591\r\nStep 2592\r\nStep 2593\r\nStep 2594\r\n",,terminal_output +3848,2877395,"TERMINAL",0,0,"Step 2595\r\nStep 2596\r\nStep 2597\r\nStep 2598\r\nStep 2599\r\nStep 2600\r\nStep 2601\r\n",,terminal_output +3849,2877459,"TERMINAL",0,0,"Step 2602\r\nStep 2603\r\nStep 2604\r\nStep 2605\r\n",,terminal_output +3850,2877579,"TERMINAL",0,0,"Step 2606\r\nStep 2607\r\nStep 2608\r\nStep 2609\r\nStep 2610\r\nStep 2611\r\nStep 2612\r\nStep 2613\r\n",,terminal_output +3851,2877687,"TERMINAL",0,0,"Step 2614\r\nStep 2615\r\nStep 2616\r\nStep 2617\r\nStep 2618\r\nStep 2619\r\nStep 2620\r\n",,terminal_output +3852,2877800,"TERMINAL",0,0,"Step 2621\r\nStep 2622\r\nStep 2623\r\nStep 2624\r\nStep 2625\r\n",,terminal_output +3853,2877927,"TERMINAL",0,0,"Step 2626\r\nStep 2627\r\nStep 2628\r\nStep 2629\r\nStep 2630\r\nStep 2631\r\nStep 2632\r\nStep 2633\r\nStep 2634\r\nStep 2635\r\nStep 2636\r\n",,terminal_output +3854,2878049,"TERMINAL",0,0,"Step 2637\r\nStep 2638\r\nStep 2639\r\nStep 2640\r\nStep 2641\r\n",,terminal_output +3855,2878106,"TERMINAL",0,0,"Step 2642\r\nStep 2643\r\nStep 2644\r\nStep 2645\r\n",,terminal_output +3856,2878296,"TERMINAL",0,0,"Step 2646\r\nStep 2647\r\nStep 2648\r\nStep 2649\r\nStep 2650\r\nStep 2651\r\nStep 2652\r\n",,terminal_output +3857,2878483,"TERMINAL",0,0,"Step 2653\r\nStep 2654\r\nStep 2655\r\nStep 2656\r\nStep 2657\r\nStep 2658\r\nStep 2659\r\nStep 2660\r\nStep 2661\r\nStep 2662\r\n",,terminal_output +3858,2878602,"TERMINAL",0,0,"Step 2663\r\nStep 2664\r\nStep 2665\r\n",,terminal_output +3859,2878810,"TERMINAL",0,0,"Step 2666\r\nStep 2667\r\nStep 2668\r\nStep 2669\r\nStep 2670\r\nStep 2671\r\nStep 2672\r\nStep 2673\r\nStep 2674\r\n",,terminal_output +3860,2878990,"TERMINAL",0,0,"Step 2675\r\nStep 2676\r\nStep 2677\r\nStep 2678\r\nStep 2679\r\nStep 2680\r\nStep 2681\r\nStep 2682\r\nStep 2683\r\n",,terminal_output +3861,2879057,"TERMINAL",0,0,"Step 2684\r\n",,terminal_output +3862,2879120,"TERMINAL",0,0,"Step 2685\r\nStep 2686\r\n",,terminal_output +3863,2879191,"TERMINAL",0,0,"Step 2687\r\nStep 2688\r\nStep 2689\r\nStep 2690\r\n",,terminal_output +3864,2879244,"TERMINAL",0,0,"Step 2691\r\n",,terminal_output +3865,2879373,"TERMINAL",0,0,"Step 2692\r\nStep 2693\r\nStep 2694\r\nStep 2695\r\n",,terminal_output +3866,2879433,"TERMINAL",0,0,"Step 2696\r\nStep 2697\r\nStep 2698\r\nStep 2699\r\nStep 2700\r\n",,terminal_output +3867,2879497,"TERMINAL",0,0,"Step 2701\r\nStep 2702\r\n",,terminal_output +3868,2879560,"TERMINAL",0,0,"Step 2703\r\n",,terminal_output +3869,2879623,"TERMINAL",0,0,"Step 2704\r\nStep 2705\r\nStep 2706\r\n",,terminal_output +3870,2879687,"TERMINAL",0,0,"Step 2707\r\nStep 2708\r\nStep 2709\r\nStep 2710\r\nStep 2711\r\nStep 2712\r\nStep 2713\r\nStep 2714\r\nStep 2715\r\nStep 2716\r\n",,terminal_output +3871,2879773,"TERMINAL",0,0,"Step 2717\r\nStep 2718\r\nStep 2719\r\nStep 2720\r\nStep 2721\r\nStep 2722\r\nStep 2723\r\nStep 2724\r\nStep 2725\r\nStep 2726\r\nStep 2727\r\nStep 2728\r\n",,terminal_output +3872,2879824,"TERMINAL",0,0,"Step 2729\r\n",,terminal_output +3873,2879930,"TERMINAL",0,0,"Step 2730\r\nStep 2731\r\n",,terminal_output +3874,2879998,"TERMINAL",0,0,"Step 2732\r\nStep 2733\r\nStep 2734\r\nStep 2735\r\nStep 2736\r\nStep 2737\r\nStep 2738\r\nStep 2739\r\n",,terminal_output +3875,2880105,"TERMINAL",0,0,"Step 2740\r\nStep 2741\r\nStep 2742\r\n",,terminal_output +3876,2880299,"TERMINAL",0,0,"Step 2743\r\nStep 2744\r\nStep 2745\r\nStep 2746\r\nStep 2747\r\nStep 2748\r\nStep 2749\r\nStep 2750\r\nStep 2751\r\nStep 2752\r\nStep 2753\r\nStep 2754\r\nStep 2755\r\nStep 2756\r\nStep 2757\r\nStep 2758\r\nStep 2759\r\nStep 2760\r\nStep 2761\r\n",,terminal_output +3877,2880387,"TERMINAL",0,0,"Step 2762\r\nStep 2763\r\nStep 2764\r\nStep 2765\r\n",,terminal_output +3878,2880556,"TERMINAL",0,0,"Step 2766\r\nStep 2767\r\nStep 2768\r\n",,terminal_output +3879,2880672,"TERMINAL",0,0,"Step 2769\r\nStep 2770\r\nStep 2771\r\nStep 2772\r\nStep 2773\r\nStep 2774\r\nStep 2775\r\n",,terminal_output +3880,2880724,"TERMINAL",0,0,"Step 2776\r\n",,terminal_output +3881,2880907,"TERMINAL",0,0,"Step 2777\r\nStep 2778\r\nStep 2779\r\nStep 2780\r\nStep 2781\r\nStep 2782\r\nStep 2783\r\nStep 2784\r\nStep 2785\r\n",,terminal_output +3882,2881078,"TERMINAL",0,0,"Step 2786\r\nStep 2787\r\nStep 2788\r\nStep 2789\r\n",,terminal_output +3883,2881148,"TERMINAL",0,0,"Step 2790\r\nStep 2791\r\n",,terminal_output +3884,2881320,"TERMINAL",0,0,"Step 2792\r\n",,terminal_output +3885,2881432,"TERMINAL",0,0,"Step 2793\r\nStep 2794\r\nStep 2795\r\n",,terminal_output +3886,2881546,"TERMINAL",0,0,"Step 2796\r\nStep 2797\r\nStep 2798\r\nStep 2799\r\n",,terminal_output +3887,2881625,"TERMINAL",0,0,"Step 2800\r\nStep 2801\r\nStep 2802\r\nStep 2803\r\n",,terminal_output +3888,2881733,"TERMINAL",0,0,"Step 2804\r\nStep 2805\r\nStep 2806\r\n",,terminal_output +3889,2881829,"TERMINAL",0,0,"Step 2807\r\nStep 2808\r\n",,terminal_output +3890,2881895,"TERMINAL",0,0,"Step 2809\r\nStep 2810\r\nStep 2811\r\nStep 2812\r\n",,terminal_output +3891,2882022,"TERMINAL",0,0,"Step 2813\r\nStep 2814\r\nStep 2815\r\nStep 2816\r\nStep 2817\r\n",,terminal_output +3892,2882086,"TERMINAL",0,0,"Step 2818\r\n",,terminal_output +3893,2882200,"TERMINAL",0,0,"Step 2819\r\nStep 2820\r\nStep 2821\r\nStep 2822\r\n",,terminal_output +3894,2882263,"TERMINAL",0,0,"Step 2823\r\nStep 2824\r\nStep 2825\r\nStep 2826\r\nStep 2827\r\n",,terminal_output +3895,2882323,"TERMINAL",0,0,"Step 2828\r\nStep 2829\r\nStep 2830\r\n",,terminal_output +3896,2882433,"TERMINAL",0,0,"Step 2831\r\nStep 2832\r\nStep 2833\r\nStep 2834\r\nStep 2835\r\n",,terminal_output +3897,2882493,"TERMINAL",0,0,"Step 2836\r\nStep 2837\r\n",,terminal_output +3898,2882567,"TERMINAL",0,0,"Step 2838\r\nStep 2839\r\n",,terminal_output +3899,2882759,"TERMINAL",0,0,"Step 2840\r\nStep 2841\r\nStep 2842\r\nStep 2843\r\nStep 2844\r\nStep 2845\r\nStep 2846\r\nStep 2847\r\nStep 2848\r\nStep 2849\r\nStep 2850\r\nStep 2851\r\nStep 2852\r\n",,terminal_output +3900,2882909,"TERMINAL",0,0,"Step 2853\r\nStep 2854\r\nStep 2855\r\nStep 2856\r\nStep 2857\r\nStep 2858\r\nStep 2859\r\nStep 2860\r\n",,terminal_output +3901,2882961,"TERMINAL",0,0,"Step 2861\r\n",,terminal_output +3902,2883168,"TERMINAL",0,0,"Step 2862\r\nStep 2863\r\nStep 2864\r\nStep 2865\r\nStep 2866\r\nStep 2867\r\nStep 2868\r\nStep 2869\r\nStep 2870\r\nStep 2871\r\nStep 2872\r\nStep 2873\r\nStep 2874\r\nStep 2875\r\nStep 2876\r\nStep 2877\r\nStep 2878\r\n",,terminal_output +3903,2883386,"TERMINAL",0,0,"Step 2879\r\nStep 2880\r\nStep 2881\r\nStep 2882\r\nStep 2883\r\nStep 2884\r\nStep 2885\r\nStep 2886\r\nStep 2887\r\nStep 2888\r\nStep 2889\r\nStep 2890\r\nStep 2891\r\nStep 2892\r\nStep 2893\r\nStep 2894\r\nStep 2895\r\nStep 2896\r\nStep 2897\r\nStep 2898\r\nStep 2899\r\nStep 2900\r\nStep 2901\r\nStep 2902\r\nStep 2903\r\nStep 2904\r\nStep 2905\r\n",,terminal_output +3904,2883455,"TERMINAL",0,0,"Step 2906\r\nStep 2907\r\nStep 2908\r\nStep 2909\r\n",,terminal_output +3905,2883579,"TERMINAL",0,0,"Step 2910\r\nStep 2911\r\n",,terminal_output +3906,2883645,"TERMINAL",0,0,"Step 2912\r\nStep 2913\r\n",,terminal_output +3907,2883699,"TERMINAL",0,0,"Step 2914\r\n",,terminal_output +3908,2884031,"TERMINAL",0,0,"Step 2915\r\nStep 2916\r\nStep 2917\r\nStep 2918\r\nStep 2919\r\nStep 2920\r\nStep 2921\r\nStep 2922\r\nStep 2923\r\nStep 2924\r\nStep 2925\r\nStep 2926\r\n",,terminal_output +3909,2884103,"TERMINAL",0,0,"Step 2927\r\nStep 2928\r\nStep 2929\r\n",,terminal_output +3910,2884159,"TERMINAL",0,0,"Step 2930\r\n",,terminal_output +3911,2884329,"TERMINAL",0,0,"Step 2931\r\nStep 2932\r\nStep 2933\r\nStep 2934\r\n",,terminal_output +3912,2884393,"TERMINAL",0,0,"Step 2935\r\nStep 2936\r\nStep 2937\r\nStep 2938\r\nStep 2939\r\nStep 2940\r\n",,terminal_output +3913,2884453,"TERMINAL",0,0,"Step 2941\r\nStep 2942\r\n",,terminal_output +3914,2884552,"TERMINAL",0,0,"Step 2943\r\nStep 2944\r\nStep 2945\r\n",,terminal_output +3915,2884665,"TERMINAL",0,0,"Step 2946\r\nStep 2947\r\nStep 2948\r\n",,terminal_output +3916,2884779,"TERMINAL",0,0,"Step 2949\r\nStep 2950\r\nStep 2951\r\n",,terminal_output +3917,2884843,"TERMINAL",0,0,"Step 2952\r\n",,terminal_output +3918,2884907,"TERMINAL",0,0,"Step 2953\r\n",,terminal_output +3919,2885052,"TERMINAL",0,0,"Step 2954\r\nStep 2955\r\nStep 2956\r\nStep 2957\r\nStep 2958\r\nStep 2959\r\nStep 2960\r\nStep 2961\r\nStep 2962\r\nStep 2963\r\nStep 2964\r\n",,terminal_output +3920,2885166,"TERMINAL",0,0,"Step 2965\r\nStep 2966\r\nStep 2967\r\nStep 2968\r\n",,terminal_output +3921,2885292,"TERMINAL",0,0,"Step 2969\r\nStep 2970\r\nStep 2971\r\nStep 2972\r\nStep 2973\r\nStep 2974\r\nStep 2975\r\nStep 2976\r\n",,terminal_output +3922,2885360,"TERMINAL",0,0,"Step 2977\r\nStep 2978\r\n",,terminal_output +3923,2885420,"TERMINAL",0,0,"Step 2979\r\nStep 2980\r\n",,terminal_output +3924,2885486,"TERMINAL",0,0,"Step 2981\r\n",,terminal_output +3925,2885546,"TERMINAL",0,0,"Step 2982\r\nStep 2983\r\nStep 2984\r\nStep 2985\r\n",,terminal_output +3926,2885656,"TERMINAL",0,0,"Step 2986\r\nStep 2987\r\nStep 2988\r\nStep 2989\r\n",,terminal_output +3927,2885721,"TERMINAL",0,0,"Step 2990\r\nStep 2991\r\nStep 2992\r\nStep 2993\r\n",,terminal_output +3928,2885841,"TERMINAL",0,0,"Step 2994\r\nStep 2995\r\nStep 2996\r\nStep 2997\r\nStep 2998\r\nStep 2999\r\nStep 3000\r\nStep 3001\r\n",,terminal_output +3929,2885939,"TERMINAL",0,0,"Step 3002\r\nStep 3003\r\nStep 3004\r\nStep 3005\r\nStep 3006\r\n",,terminal_output +3930,2886048,"TERMINAL",0,0,"Step 3007\r\nStep 3008\r\nStep 3009\r\nStep 3010\r\n",,terminal_output +3931,2886200,"TERMINAL",0,0,"Step 3011\r\nStep 3012\r\nStep 3013\r\nStep 3014\r\nStep 3015\r\n",,terminal_output +3932,2886332,"TERMINAL",0,0,"Step 3016\r\nStep 3017\r\nStep 3018\r\nStep 3019\r\n",,terminal_output +3933,2886393,"TERMINAL",0,0,"Step 3020\r\nStep 3021\r\nStep 3022\r\nStep 3023\r\nStep 3024\r\nStep 3025\r\nStep 3026\r\n",,terminal_output +3934,2886578,"TERMINAL",0,0,"Step 3027\r\nStep 3028\r\nStep 3029\r\nStep 3030\r\nStep 3031\r\nStep 3032\r\nStep 3033\r\nStep 3034\r\nStep 3035\r\nStep 3036\r\n",,terminal_output +3935,2886694,"TERMINAL",0,0,"Step 3037\r\nStep 3038\r\nStep 3039\r\n",,terminal_output +3936,2886807,"TERMINAL",0,0,"Step 3040\r\nStep 3041\r\nStep 3042\r\nStep 3043\r\n",,terminal_output +3937,2886870,"TERMINAL",0,0,"Step 3044\r\nStep 3045\r\n",,terminal_output +3938,2886933,"TERMINAL",0,0,"Step 3046\r\nStep 3047\r\nStep 3048\r\n",,terminal_output +3939,2887036,"TERMINAL",0,0,"Step 3049\r\nStep 3050\r\nStep 3051\r\nStep 3052\r\nStep 3053\r\nStep 3054\r\nStep 3055\r\nStep 3056\r\n",,terminal_output +3940,2887159,"TERMINAL",0,0,"Step 3057\r\nStep 3058\r\nStep 3059\r\nStep 3060\r\nStep 3061\r\nStep 3062\r\nStep 3063\r\nStep 3064\r\nStep 3065\r\n",,terminal_output +3941,2887274,"TERMINAL",0,0,"Step 3066\r\nStep 3067\r\nStep 3068\r\nStep 3069\r\n",,terminal_output +3942,2887368,"TERMINAL",0,0,"Step 3070\r\nStep 3071\r\nStep 3072\r\nStep 3073\r\nStep 3074\r\nStep 3075\r\n",,terminal_output +3943,2887530,"TERMINAL",0,0,"Step 3076\r\nStep 3077\r\nStep 3078\r\nStep 3079\r\nStep 3080\r\nStep 3081\r\nStep 3082\r\nStep 3083\r\nStep 3084\r\n",,terminal_output +3944,2887582,"TERMINAL",0,0,"Step 3085\r\nStep 3086\r\n",,terminal_output +3945,2887696,"TERMINAL",0,0,"Step 3087\r\nStep 3088\r\nStep 3089\r\n",,terminal_output +3946,2887760,"TERMINAL",0,0,"Step 3090\r\nStep 3091\r\nStep 3092\r\nStep 3093\r\nStep 3094\r\nStep 3095\r\n",,terminal_output +3947,2887869,"TERMINAL",0,0,"Step 3096\r\nStep 3097\r\nStep 3098\r\nStep 3099\r\nStep 3100\r\n",,terminal_output +3948,2887932,"TERMINAL",0,0,"Step 3101\r\nStep 3102\r\nStep 3103\r\n",,terminal_output +3949,2888065,"TERMINAL",0,0,"Step 3104\r\nStep 3105\r\nStep 3106\r\nStep 3107\r\nStep 3108\r\n",,terminal_output +3950,2888127,"TERMINAL",0,0,"Step 3109\r\nStep 3110\r\n",,terminal_output +3951,2888253,"TERMINAL",0,0,"Step 3111\r\nStep 3112\r\nStep 3113\r\nStep 3114\r\nStep 3115\r\nStep 3116\r\nStep 3117\r\n",,terminal_output +3952,2888317,"TERMINAL",0,0,"Step 3118\r\nStep 3119\r\nStep 3120\r\n",,terminal_output +3953,2888383,"TERMINAL",0,0,"Step 3121\r\nStep 3122\r\nStep 3123\r\n",,terminal_output +3954,2888565,"TERMINAL",0,0,"Step 3124\r\nStep 3125\r\nStep 3126\r\nStep 3127\r\nStep 3128\r\nStep 3129\r\nStep 3130\r\n",,terminal_output +3955,2888629,"TERMINAL",0,0,"Step 3131\r\nStep 3132\r\n",,terminal_output +3956,2888692,"TERMINAL",0,0,"Step 3133\r\n",,terminal_output +3957,2888834,"TERMINAL",0,0,"Step 3134\r\nStep 3135\r\nStep 3136\r\nStep 3137\r\nStep 3138\r\nStep 3139\r\nStep 3140\r\nStep 3141\r\n",,terminal_output +3958,2888888,"TERMINAL",0,0,"Step 3142\r\nStep 3143\r\n",,terminal_output +3959,2888941,"TERMINAL",0,0,"Step 3144\r\nStep 3145\r\n",,terminal_output +3960,2889066,"TERMINAL",0,0,"Step 3146\r\nStep 3147\r\nStep 3148\r\nStep 3149\r\n",,terminal_output +3961,2889193,"TERMINAL",0,0,"Step 3150\r\nStep 3151\r\nStep 3152\r\nStep 3153\r\nStep 3154\r\nStep 3155\r\n",,terminal_output +3962,2889321,"TERMINAL",0,0,"Step 3156\r\nStep 3157\r\nStep 3158\r\nStep 3159\r\nStep 3160\r\nStep 3161\r\nStep 3162\r\nStep 3163\r\n",,terminal_output +3963,2889449,"TERMINAL",0,0,"Step 3164\r\nStep 3165\r\nStep 3166\r\nStep 3167\r\nStep 3168\r\n",,terminal_output +3964,2889558,"TERMINAL",0,0,"Step 3169\r\nStep 3170\r\nStep 3171\r\n",,terminal_output +3965,2889666,"TERMINAL",0,0,"Step 3172\r\nStep 3173\r\nStep 3174\r\nStep 3175\r\n",,terminal_output +3966,2889717,"TERMINAL",0,0,"Step 3176\r\nStep 3177\r\n",,terminal_output +3967,2889841,"TERMINAL",0,0,"Step 3178\r\nStep 3179\r\nStep 3180\r\nStep 3181\r\nStep 3182\r\nStep 3183\r\nStep 3184\r\n",,terminal_output +3968,2890057,"TERMINAL",0,0,"Step 3185\r\nStep 3186\r\nStep 3187\r\nStep 3188\r\nStep 3189\r\nStep 3190\r\nStep 3191\r\n",,terminal_output +3969,2890260,"TERMINAL",0,0,"Step 3192\r\nStep 3193\r\nStep 3194\r\nStep 3195\r\nStep 3196\r\nStep 3197\r\nStep 3198\r\nStep 3199\r\nStep 3200\r\nStep 3201\r\nStep 3202\r\nStep 3203\r\nStep 3204\r\nStep 3205\r\nStep 3206\r\nStep 3207\r\nStep 3208\r\nStep 3209\r\nStep 3210\r\nStep 3211\r\nStep 3212\r\nStep 3213\r\nStep 3214\r\nStep 3215\r\nStep 3216\r\nStep 3217\r\n",,terminal_output +3970,2890505,"TERMINAL",0,0,"Step 3218\r\nStep 3219\r\nStep 3220\r\nStep 3221\r\nStep 3222\r\nStep 3223\r\nStep 3224\r\n",,terminal_output +3971,2890557,"TERMINAL",0,0,"Step 3225\r\n",,terminal_output +3972,2890623,"TERMINAL",0,0,"Step 3226\r\nStep 3227\r\n",,terminal_output +3973,2890750,"TERMINAL",0,0,"Step 3228\r\nStep 3229\r\nStep 3230\r\nStep 3231\r\nStep 3232\r\nStep 3233\r\nStep 3234\r\nStep 3235\r\nStep 3236\r\nStep 3237\r\nStep 3238\r\nStep 3239\r\nStep 3240\r\nStep 3241\r\n",,terminal_output +3974,2890812,"TERMINAL",0,0,"Step 3242\r\nStep 3243\r\nStep 3244\r\nStep 3245\r\nStep 3246\r\nStep 3247\r\n",,terminal_output +3975,2890910,"TERMINAL",0,0,"Step 3248\r\n",,terminal_output +3976,2891073,"TERMINAL",0,0,"Step 3249\r\nStep 3250\r\nStep 3251\r\nStep 3252\r\n",,terminal_output +3977,2891124,"TERMINAL",0,0,"Step 3253\r\nStep 3254\r\n",,terminal_output +3978,2891333,"TERMINAL",0,0,"Step 3255\r\nStep 3256\r\nStep 3257\r\nStep 3258\r\nStep 3259\r\nStep 3260\r\nStep 3261\r\nStep 3262\r\n",,terminal_output +3979,2891459,"TERMINAL",0,0,"Step 3263\r\nStep 3264\r\nStep 3265\r\n",,terminal_output +3980,2891585,"TERMINAL",0,0,"Step 3266\r\nStep 3267\r\nStep 3268\r\n",,terminal_output +3981,2891697,"TERMINAL",0,0,"Step 3269\r\nStep 3270\r\nStep 3271\r\n",,terminal_output +3982,2891770,"TERMINAL",0,0,"Step 3272\r\nStep 3273\r\nStep 3274\r\nStep 3275\r\nStep 3276\r\nStep 3277\r\n",,terminal_output +3983,2891865,"TERMINAL",0,0,"Step 3278\r\nStep 3279\r\n",,terminal_output +3984,2892028,"TERMINAL",0,0,"Step 3280\r\nStep 3281\r\nStep 3282\r\nStep 3283\r\nStep 3284\r\nStep 3285\r\nStep 3286\r\nStep 3287\r\nStep 3288\r\n",,terminal_output +3985,2892092,"TERMINAL",0,0,"Step 3289\r\nStep 3290\r\nStep 3291\r\n",,terminal_output +3986,2892201,"TERMINAL",0,0,"Step 3292\r\nStep 3293\r\nStep 3294\r\nStep 3295\r\n",,terminal_output +3987,2892256,"TERMINAL",0,0,"Step 3296\r\n",,terminal_output +3988,2892319,"TERMINAL",0,0,"Step 3297\r\nStep 3298\r\nStep 3299\r\n",,terminal_output +3989,2892386,"TERMINAL",0,0,"Step 3300\r\nStep 3301\r\n",,terminal_output +3990,2892493,"TERMINAL",0,0,"Step 3302\r\nStep 3303\r\nStep 3304\r\n",,terminal_output +3991,2892557,"TERMINAL",0,0,"Step 3305\r\nStep 3306\r\nStep 3307\r\n",,terminal_output +3992,2892622,"TERMINAL",0,0,"Step 3308\r\n",,terminal_output +3993,2892738,"TERMINAL",0,0,"Step 3309\r\nStep 3310\r\nStep 3311\r\nStep 3312\r\nStep 3313\r\n",,terminal_output +3994,2892882,"TERMINAL",0,0,"Step 3314\r\nStep 3315\r\nStep 3316\r\nStep 3317\r\nStep 3318\r\nStep 3319\r\nStep 3320\r\nStep 3321\r\nStep 3322\r\nStep 3323\r\nStep 3324\r\nStep 3325\r\nStep 3326\r\nStep 3327\r\nStep 3328\r\nStep 3329\r\nStep 3330\r\nStep 3331\r\nStep 3332\r\n",,terminal_output +3995,2892946,"TERMINAL",0,0,"Step 3333\r\nStep 3334\r\nStep 3335\r\n",,terminal_output +3996,2893007,"TERMINAL",0,0,"Step 3336\r\nStep 3337\r\n",,terminal_output +3997,2893123,"TERMINAL",0,0,"Step 3338\r\nStep 3339\r\nStep 3340\r\nStep 3341\r\n",,terminal_output +3998,2893235,"TERMINAL",0,0,"Step 3342\r\nStep 3343\r\n",,terminal_output +3999,2893300,"TERMINAL",0,0,"Step 3344\r\nStep 3345\r\nStep 3346\r\nStep 3347\r\n",,terminal_output +4000,2893360,"TERMINAL",0,0,"Step 3348\r\nStep 3349\r\n",,terminal_output +4001,2893467,"TERMINAL",0,0,"Step 3350\r\n",,terminal_output +4002,2893532,"TERMINAL",0,0,"Step 3351\r\nStep 3352\r\n",,terminal_output +4003,2893596,"TERMINAL",0,0,"Step 3353\r\n",,terminal_output +4004,2893770,"TERMINAL",0,0,"Step 3354\r\nStep 3355\r\nStep 3356\r\nStep 3357\r\nStep 3358\r\nStep 3359\r\nStep 3360\r\nStep 3361\r\nStep 3362\r\nStep 3363\r\nStep 3364\r\nStep 3365\r\nStep 3366\r\nStep 3367\r\nStep 3368\r\nStep 3369\r\nStep 3370\r\nStep 3371\r\nStep 3372\r\nStep 3373\r\nStep 3374\r\nStep 3375\r\nStep 3376\r\nStep 3377\r\n",,terminal_output +4005,2893879,"TERMINAL",0,0,"Step 3378\r\nStep 3379\r\nStep 3380\r\n",,terminal_output +4006,2893946,"TERMINAL",0,0,"Step 3381\r\nStep 3382\r\nStep 3383\r\nStep 3384\r\n",,terminal_output +4007,2894009,"TERMINAL",0,0,"Step 3385\r\n",,terminal_output +4008,2894075,"TERMINAL",0,0,"Step 3386\r\nStep 3387\r\n",,terminal_output +4009,2894296,"TERMINAL",0,0,"Step 3388\r\nStep 3389\r\nStep 3390\r\nStep 3391\r\nStep 3392\r\nStep 3393\r\nStep 3394\r\nStep 3395\r\n",,terminal_output +4010,2894383,"TERMINAL",0,0,"Step 3396\r\nStep 3397\r\nStep 3398\r\n",,terminal_output +4011,2894451,"TERMINAL",0,0,"Step 3399\r\nStep 3400\r\nStep 3401\r\n",,terminal_output +4012,2894551,"TERMINAL",0,0,"Step 3402\r\nStep 3403\r\nStep 3404\r\n",,terminal_output +4013,2894698,"TERMINAL",0,0,"Step 3405\r\nStep 3406\r\nStep 3407\r\nStep 3408\r\n",,terminal_output +4014,2894763,"TERMINAL",0,0,"Step 3409\r\nStep 3410\r\nStep 3411\r\nStep 3412\r\n",,terminal_output +4015,2894944,"TERMINAL",0,0,"Step 3413\r\nStep 3414\r\nStep 3415\r\nStep 3416\r\nStep 3417\r\nStep 3418\r\nStep 3419\r\nStep 3420\r\nStep 3421\r\n",,terminal_output +4016,2895063,"TERMINAL",0,0,"Step 3422\r\nStep 3423\r\nStep 3424\r\n",,terminal_output +4017,2895130,"TERMINAL",0,0,"Step 3425\r\nStep 3426\r\nStep 3427\r\n",,terminal_output +4018,2895276,"TERMINAL",0,0,"Step 3428\r\nStep 3429\r\nStep 3430\r\nStep 3431\r\n",,terminal_output +4019,2895365,"TERMINAL",0,0,"Step 3432\r\nStep 3433\r\nStep 3434\r\n",,terminal_output +4020,2895432,"TERMINAL",0,0,"Step 3435\r\nStep 3436\r\n",,terminal_output +4021,2895536,"TERMINAL",0,0,"Step 3437\r\nStep 3438\r\nStep 3439\r\nStep 3440\r\nStep 3441\r\n",,terminal_output +4022,2895673,"TERMINAL",0,0,"Step 3442\r\nStep 3443\r\nStep 3444\r\nStep 3445\r\nStep 3446\r\n",,terminal_output +4023,2895790,"TERMINAL",0,0,"Step 3447\r\nStep 3448\r\nStep 3449\r\n",,terminal_output +4024,2895848,"TERMINAL",0,0,"Step 3450\r\n",,terminal_output +4025,2895913,"TERMINAL",0,0,"Step 3451\r\nStep 3452\r\nStep 3453\r\nStep 3454\r\n",,terminal_output +4026,2895987,"TERMINAL",0,0,"Step 3455\r\nStep 3456\r\nStep 3457\r\nStep 3458\r\nStep 3459\r\nStep 3460\r\n",,terminal_output +4027,2896110,"TERMINAL",0,0,"Step 3461\r\nStep 3462\r\nStep 3463\r\nStep 3464\r\nStep 3465\r\nStep 3466\r\n",,terminal_output +4028,2896177,"TERMINAL",0,0,"Step 3467\r\nStep 3468\r\nStep 3469\r\n",,terminal_output +4029,2896303,"TERMINAL",0,0,"Step 3470\r\nStep 3471\r\nStep 3472\r\nStep 3473\r\nStep 3474\r\nStep 3475\r\n",,terminal_output +4030,2896453,"TERMINAL",0,0,"Step 3476\r\nStep 3477\r\nStep 3478\r\nStep 3479\r\nStep 3480\r\nStep 3481\r\nStep 3482\r\nStep 3483\r\n",,terminal_output +4031,2896607,"TERMINAL",0,0,"Step 3484\r\nStep 3485\r\nStep 3486\r\n",,terminal_output +4032,2896702,"TERMINAL",0,0,"Step 3487\r\nStep 3488\r\nStep 3489\r\n",,terminal_output +4033,2896826,"TERMINAL",0,0,"Step 3490\r\nStep 3491\r\nStep 3492\r\nStep 3493\r\nStep 3494\r\nStep 3495\r\n",,terminal_output +4034,2896910,"TERMINAL",0,0,"Step 3496\r\nStep 3497\r\nStep 3498\r\nStep 3499\r\nStep 3500\r\nStep 3501\r\nStep 3502\r\n",,terminal_output +4035,2897080,"TERMINAL",0,0,"Step 3503\r\nStep 3504\r\nStep 3505\r\nStep 3506\r\nStep 3507\r\nStep 3508\r\nStep 3509\r\n",,terminal_output +4036,2897187,"TERMINAL",0,0,"Step 3510\r\nStep 3511\r\n",,terminal_output +4037,2897309,"TERMINAL",0,0,"Step 3512\r\nStep 3513\r\nStep 3514\r\nStep 3515\r\nStep 3516\r\nStep 3517\r\nStep 3518\r\nStep 3519\r\nStep 3520\r\n",,terminal_output +4038,2897430,"TERMINAL",0,0,"Step 3521\r\nStep 3522\r\nStep 3523\r\nStep 3524\r\nStep 3525\r\n",,terminal_output +4039,2897610,"TERMINAL",0,0,"Step 3526\r\nStep 3527\r\nStep 3528\r\nStep 3529\r\nStep 3530\r\n",,terminal_output +4040,2897673,"TERMINAL",0,0,"Step 3531\r\nStep 3532\r\nStep 3533\r\nStep 3534\r\nStep 3535\r\nStep 3536\r\nStep 3537\r\n",,terminal_output +4041,2897785,"TERMINAL",0,0,"Step 3538\r\nStep 3539\r\nStep 3540\r\nStep 3541\r\nStep 3542\r\nStep 3543\r\n",,terminal_output +4042,2897894,"TERMINAL",0,0,"Step 3544\r\nStep 3545\r\nStep 3546\r\n",,terminal_output +4043,2897942,"TERMINAL",0,0,"Step 3547\r\nStep 3548\r\n",,terminal_output +4044,2898122,"TERMINAL",0,0,"Step 3549\r\nStep 3550\r\nStep 3551\r\nStep 3552\r\nStep 3553\r\nStep 3554\r\nStep 3555\r\nStep 3556\r\nStep 3557\r\n",,terminal_output +4045,2898188,"TERMINAL",0,0,"Step 3558\r\nStep 3559\r\nStep 3560\r\n",,terminal_output +4046,2898349,"TERMINAL",0,0,"Step 3561\r\nStep 3562\r\nStep 3563\r\nStep 3564\r\nStep 3565\r\nStep 3566\r\nStep 3567\r\nStep 3568\r\n",,terminal_output +4047,2898411,"TERMINAL",0,0,"Step 3569\r\nStep 3570\r\nStep 3571\r\n",,terminal_output +4048,2898509,"TERMINAL",0,0,"Step 3572\r\nStep 3573\r\nStep 3574\r\nStep 3575\r\nStep 3576\r\n",,terminal_output +4049,2898640,"TERMINAL",0,0,"Step 3577\r\nStep 3578\r\nStep 3579\r\nStep 3580\r\nStep 3581\r\n",,terminal_output +4050,2898707,"TERMINAL",0,0,"Step 3582\r\nStep 3583\r\nStep 3584\r\nStep 3585\r\nStep 3586\r\nStep 3587\r\nStep 3588\r\n",,terminal_output +4051,2898816,"TERMINAL",0,0,"Step 3589\r\nStep 3590\r\nStep 3591\r\nStep 3592\r\nStep 3593\r\n",,terminal_output +4052,2898882,"TERMINAL",0,0,"Step 3594\r\nStep 3595\r\nStep 3596\r\n",,terminal_output +4053,2898946,"TERMINAL",0,0,"Step 3597\r\n",,terminal_output +4054,2899103,"TERMINAL",0,0,"Step 3598\r\nStep 3599\r\nStep 3600\r\nStep 3601\r\nStep 3602\r\nStep 3603\r\nStep 3604\r\nStep 3605\r\nStep 3606\r\n",,terminal_output +4055,2899163,"TERMINAL",0,0,"Step 3607\r\n",,terminal_output +4056,2899234,"TERMINAL",0,0,"Step 3608\r\nStep 3609\r\nStep 3610\r\n",,terminal_output +4057,2899299,"TERMINAL",0,0,"Step 3611\r\nStep 3612\r\nStep 3613\r\nStep 3614\r\nStep 3615\r\nStep 3616\r\nStep 3617\r\nStep 3618\r\n",,terminal_output +4058,2899408,"TERMINAL",0,0,"Step 3619\r\nStep 3620\r\nStep 3621\r\n",,terminal_output +4059,2899558,"TERMINAL",0,0,"Step 3622\r\nStep 3623\r\nStep 3624\r\nStep 3625\r\nStep 3626\r\nStep 3627\r\nStep 3628\r\nStep 3629\r\nStep 3630\r\nStep 3631\r\nStep 3632\r\nStep 3633\r\nStep 3634\r\nStep 3635\r\nStep 3636\r\n",,terminal_output +4060,2899622,"TERMINAL",0,0,"Step 3637\r\nStep 3638\r\nStep 3639\r\nStep 3640\r\nStep 3641\r\nStep 3642\r\nStep 3643\r\n",,terminal_output +4061,2899712,"TERMINAL",0,0,"Step 3644\r\nStep 3645\r\nStep 3646\r\nStep 3647\r\nStep 3648\r\nStep 3649\r\nStep 3650\r\n",,terminal_output +4062,2899817,"TERMINAL",0,0,"Step 3651\r\nStep 3652\r\nStep 3653\r\n",,terminal_output +4063,2899957,"TERMINAL",0,0,"Step 3654\r\nStep 3655\r\nStep 3656\r\nStep 3657\r\n",,terminal_output +4064,2900080,"TERMINAL",0,0,"Step 3658\r\nStep 3659\r\nStep 3660\r\n",,terminal_output +4065,2900146,"TERMINAL",0,0,"Step 3661\r\nStep 3662\r\nStep 3663\r\nStep 3664\r\n",,terminal_output +4066,2900392,"TERMINAL",0,0,"Step 3665\r\nStep 3666\r\nStep 3667\r\nStep 3668\r\nStep 3669\r\nStep 3670\r\nStep 3671\r\nStep 3672\r\nStep 3673\r\nStep 3674\r\n",,terminal_output +4067,2900504,"TERMINAL",0,0,"Step 3675\r\n",,terminal_output +4068,2900726,"TERMINAL",0,0,"Step 3676\r\nStep 3677\r\nStep 3678\r\nStep 3679\r\nStep 3680\r\nStep 3681\r\nStep 3682\r\nStep 3683\r\nStep 3684\r\nStep 3685\r\nStep 3686\r\nStep 3687\r\nStep 3688\r\nStep 3689\r\nStep 3690\r\nStep 3691\r\nStep 3692\r\nStep 3693\r\nStep 3694\r\nStep 3695\r\nStep 3696\r\nStep 3697\r\nStep 3698\r\nStep 3699\r\nStep 3700\r\nStep 3701\r\n",,terminal_output +4069,2900781,"TERMINAL",0,0,"Step 3702\r\nStep 3703\r\n",,terminal_output +4070,2900942,"TERMINAL",0,0,"Step 3704\r\nStep 3705\r\nStep 3706\r\nStep 3707\r\n",,terminal_output +4071,2900994,"TERMINAL",0,0,"Step 3708\r\n",,terminal_output +4072,2901168,"TERMINAL",0,0,"Step 3709\r\nStep 3710\r\nStep 3711\r\nStep 3712\r\n",,terminal_output +4073,2901226,"TERMINAL",0,0,"Step 3713\r\nStep 3714\r\n",,terminal_output +4074,2901410,"TERMINAL",0,0,"Step 3715\r\nStep 3716\r\nStep 3717\r\nStep 3718\r\nStep 3719\r\nStep 3720\r\nStep 3721\r\nStep 3722\r\n",,terminal_output +4075,2901544,"TERMINAL",0,0,"Step 3723\r\n",,terminal_output +4076,2901659,"TERMINAL",0,0,"Step 3724\r\nStep 3725\r\nStep 3726\r\nStep 3727\r\nStep 3728\r\n",,terminal_output +4077,2901713,"TERMINAL",0,0,"Step 3729\r\nStep 3730\r\n",,terminal_output +4078,2901780,"TERMINAL",0,0,"Step 3731\r\nStep 3732\r\n",,terminal_output +4079,2901844,"TERMINAL",0,0,"Step 3733\r\nStep 3734\r\n",,terminal_output +4080,2901906,"TERMINAL",0,0,"Step 3735\r\nStep 3736\r\nStep 3737\r\nStep 3738\r\n",,terminal_output +4081,2901969,"TERMINAL",0,0,"Step 3739\r\nStep 3740\r\nStep 3741\r\n",,terminal_output +4082,2902048,"TERMINAL",0,0,"Step 3742\r\nStep 3743\r\n",,terminal_output +4083,2902112,"TERMINAL",0,0,"Step 3744\r\n",,terminal_output +4084,2902164,"TERMINAL",0,0,"Step 3745\r\nStep 3746\r\n",,terminal_output +4085,2902270,"TERMINAL",0,0,"Step 3747\r\nStep 3748\r\nStep 3749\r\n",,terminal_output +4086,2902334,"TERMINAL",0,0,"Step 3750\r\nStep 3751\r\nStep 3752\r\nStep 3753\r\nStep 3754\r\n",,terminal_output +4087,2902408,"TERMINAL",0,0,"Step 3755\r\n",,terminal_output +4088,2902466,"TERMINAL",0,0,"Step 3756\r\nStep 3757\r\nStep 3758\r\nStep 3759\r\nStep 3760\r\nStep 3761\r\nStep 3762\r\n",,terminal_output +4089,2902568,"TERMINAL",0,0,"Step 3763\r\nStep 3764\r\nStep 3765\r\nStep 3766\r\nStep 3767\r\nStep 3768\r\n",,terminal_output +4090,2902696,"TERMINAL",0,0,"Step 3769\r\nStep 3770\r\nStep 3771\r\nStep 3772\r\nStep 3773\r\nStep 3774\r\n",,terminal_output +4091,2902828,"TERMINAL",0,0,"Step 3775\r\nStep 3776\r\nStep 3777\r\nStep 3778\r\nStep 3779\r\n",,terminal_output +4092,2902893,"TERMINAL",0,0,"Step 3780\r\nStep 3781\r\nStep 3782\r\n",,terminal_output +4093,2902956,"TERMINAL",0,0,"Step 3783\r\nStep 3784\r\nStep 3785\r\nStep 3786\r\nStep 3787\r\n",,terminal_output +4094,2903040,"TERMINAL",0,0,"Step 3788\r\nStep 3789\r\nStep 3790\r\nStep 3791\r\n",,terminal_output +4095,2903135,"TERMINAL",0,0,"Step 3792\r\nStep 3793\r\n",,terminal_output +4096,2903217,"TERMINAL",0,0,"Step 3794\r\nStep 3795\r\nStep 3796\r\nStep 3797\r\nStep 3798\r\nStep 3799\r\nStep 3800\r\n",,terminal_output +4097,2903284,"TERMINAL",0,0,"Step 3801\r\nStep 3802\r\nStep 3803\r\n",,terminal_output +4098,2903348,"TERMINAL",0,0,"Step 3804\r\nStep 3805\r\n",,terminal_output +4099,2903414,"TERMINAL",0,0,"Step 3806\r\nStep 3807\r\n",,terminal_output +4100,2903523,"TERMINAL",0,0,"Step 3808\r\nStep 3809\r\nStep 3810\r\nStep 3811\r\nStep 3812\r\nStep 3813\r\nStep 3814\r\nStep 3815\r\nStep 3816\r\nStep 3817\r\nStep 3818\r\nStep 3819\r\nStep 3820\r\nStep 3821\r\n",,terminal_output +4101,2903589,"TERMINAL",0,0,"Step 3822\r\nStep 3823\r\nStep 3824\r\nStep 3825\r\nStep 3826\r\nStep 3827\r\nStep 3828\r\n",,terminal_output +4102,2903712,"TERMINAL",0,0,"Step 3829\r\nStep 3830\r\nStep 3831\r\nStep 3832\r\nStep 3833\r\nStep 3834\r\nStep 3835\r\nStep 3836\r\nStep 3837\r\nStep 3838\r\nStep 3839\r\nStep 3840\r\nStep 3841\r\nStep 3842\r\nStep 3843\r\nStep 3844\r\nStep 3845\r\nStep 3846\r\n",,terminal_output +4103,2903774,"TERMINAL",0,0,"Step 3847\r\nStep 3848\r\n",,terminal_output +4104,2903836,"TERMINAL",0,0,"Step 3849\r\nStep 3850\r\n",,terminal_output +4105,2903903,"TERMINAL",0,0,"Step 3851\r\nStep 3852\r\n",,terminal_output +4106,2903967,"TERMINAL",0,0,"Step 3853\r\nStep 3854\r\nStep 3855\r\nStep 3856\r\n",,terminal_output +4107,2904208,"TERMINAL",0,0,"Step 3857\r\nStep 3858\r\nStep 3859\r\nStep 3860\r\nStep 3861\r\nStep 3862\r\nStep 3863\r\n",,terminal_output +4108,2904322,"TERMINAL",0,0,"Step 3864\r\n",,terminal_output +4109,2904396,"TERMINAL",0,0,"Step 3865\r\nStep 3866\r\n",,terminal_output +4110,2904474,"TERMINAL",0,0,"Step 3867\r\nStep 3868\r\nStep 3869\r\n",,terminal_output +4111,2904534,"TERMINAL",0,0,"Step 3870\r\n",,terminal_output +4112,2904584,"TERMINAL",0,0,"Step 3871\r\nStep 3872\r\n",,terminal_output +4113,2904766,"TERMINAL",0,0,"Step 3873\r\n",,terminal_output +4114,2904909,"TERMINAL",0,0,"Step 3874\r\nStep 3875\r\nStep 3876\r\nStep 3877\r\nStep 3878\r\nStep 3879\r\nStep 3880\r\nStep 3881\r\nStep 3882\r\nStep 3883\r\nStep 3884\r\nStep 3885\r\nStep 3886\r\n",,terminal_output +4115,2905006,"TERMINAL",0,0,"Step 3887\r\nStep 3888\r\n",,terminal_output +4116,2905132,"TERMINAL",0,0,"Step 3889\r\nStep 3890\r\nStep 3891\r\nStep 3892\r\nStep 3893\r\n",,terminal_output +4117,2905248,"TERMINAL",0,0,"Step 3894\r\nStep 3895\r\nStep 3896\r\n",,terminal_output +4118,2905395,"TERMINAL",0,0,"Step 3897\r\nStep 3898\r\nStep 3899\r\nStep 3900\r\nStep 3901\r\n",,terminal_output +4119,2905649,"TERMINAL",0,0,"Step 3902\r\nStep 3903\r\nStep 3904\r\nStep 3905\r\nStep 3906\r\nStep 3907\r\nStep 3908\r\nStep 3909\r\n",,terminal_output +4120,2905773,"TERMINAL",0,0,"Step 3910\r\nStep 3911\r\nStep 3912\r\nStep 3913\r\nStep 3914\r\nStep 3915\r\n",,terminal_output +4121,2905833,"TERMINAL",0,0,"Step 3916\r\nStep 3917\r\nStep 3918\r\nStep 3919\r\nStep 3920\r\n",,terminal_output +4122,2905940,"TERMINAL",0,0,"Step 3921\r\nStep 3922\r\nStep 3923\r\nStep 3924\r\nStep 3925\r\n",,terminal_output +4123,2906087,"TERMINAL",0,0,"Step 3926\r\nStep 3927\r\nStep 3928\r\nStep 3929\r\nStep 3930\r\n",,terminal_output +4124,2906150,"TERMINAL",0,0,"Step 3931\r\n",,terminal_output +4125,2906209,"TERMINAL",0,0,"Step 3932\r\nStep 3933\r\nStep 3934\r\n",,terminal_output +4126,2906293,"TERMINAL",0,0,"Step 3935\r\nStep 3936\r\nStep 3937\r\nStep 3938\r\n",,terminal_output +4127,2906488,"TERMINAL",0,0,"Step 3939\r\nStep 3940\r\nStep 3941\r\nStep 3942\r\nStep 3943\r\nStep 3944\r\nStep 3945\r\nStep 3946\r\nStep 3947\r\nStep 3948\r\nStep 3949\r\nStep 3950\r\nStep 3951\r\n",,terminal_output +4128,2906585,"TERMINAL",0,0,"Step 3952\r\nStep 3953\r\n",,terminal_output +4129,2906711,"TERMINAL",0,0,"Step 3954\r\nStep 3955\r\nStep 3956\r\nStep 3957\r\nStep 3958\r\nStep 3959\r\nStep 3960\r\nStep 3961\r\n",,terminal_output +4130,2906945,"TERMINAL",0,0,"Step 3962\r\nStep 3963\r\nStep 3964\r\nStep 3965\r\nStep 3966\r\nStep 3967\r\nStep 3968\r\nStep 3969\r\nStep 3970\r\n",,terminal_output +4131,2907035,"TERMINAL",0,0,"Step 3971\r\nStep 3972\r\nStep 3973\r\nStep 3974\r\nStep 3975\r\nStep 3976\r\nStep 3977\r\nStep 3978\r\nStep 3979\r\nStep 3980\r\n",,terminal_output +4132,2907147,"TERMINAL",0,0,"Step 3981\r\nStep 3982\r\nStep 3983\r\nStep 3984\r\n",,terminal_output +4133,2907198,"TERMINAL",0,0,"Step 3985\r\n",,terminal_output +4134,2907305,"TERMINAL",0,0,"Step 3986\r\nStep 3987\r\nStep 3988\r\nStep 3989\r\n",,terminal_output +4135,2907414,"TERMINAL",0,0,"Step 3990\r\nStep 3991\r\nStep 3992\r\nStep 3993\r\n",,terminal_output +4136,2907475,"TERMINAL",0,0,"Step 3994\r\nStep 3995\r\nStep 3996\r\nStep 3997\r\n",,terminal_output +4137,2907583,"TERMINAL",0,0,"Step 3998\r\nStep 3999\r\nStep 4000\r\nStep 4001\r\n",,terminal_output +4138,2907683,"TERMINAL",0,0,"Step 4002\r\nStep 4003\r\nStep 4004\r\nStep 4005\r\nStep 4006\r\nStep 4007\r\nStep 4008\r\n",,terminal_output +4139,2907740,"TERMINAL",0,0,"Step 4009\r\n",,terminal_output +4140,2907876,"TERMINAL",0,0,"Step 4010\r\nStep 4011\r\nStep 4012\r\nStep 4013\r\nStep 4014\r\nStep 4015\r\nStep 4016\r\nStep 4017\r\nStep 4018\r\n",,terminal_output +4141,2907992,"TERMINAL",0,0,"Step 4019\r\nStep 4020\r\nStep 4021\r\nStep 4022\r\nStep 4023\r\n",,terminal_output +4142,2908213,"TERMINAL",0,0,"Step 4024\r\nStep 4025\r\nStep 4026\r\nStep 4027\r\nStep 4028\r\nStep 4029\r\nStep 4030\r\nStep 4031\r\nStep 4032\r\nStep 4033\r\nStep 4034\r\nStep 4035\r\nStep 4036\r\nStep 4037\r\nStep 4038\r\nStep 4039\r\n",,terminal_output +4143,2908319,"TERMINAL",0,0,"Step 4040\r\nStep 4041\r\nStep 4042\r\nStep 4043\r\nStep 4044\r\n",,terminal_output +4144,2908506,"TERMINAL",0,0,"Step 4045\r\nStep 4046\r\nStep 4047\r\nStep 4048\r\nStep 4049\r\nStep 4050\r\nStep 4051\r\nStep 4052\r\nStep 4053\r\nStep 4054\r\nStep 4055\r\n",,terminal_output +4145,2908660,"TERMINAL",0,0,"Step 4056\r\nStep 4057\r\nStep 4058\r\nStep 4059\r\nStep 4060\r\nStep 4061\r\nStep 4062\r\nStep 4063\r\nStep 4064\r\n",,terminal_output +4146,2908880,"TERMINAL",0,0,"Step 4065\r\nStep 4066\r\nStep 4067\r\nStep 4068\r\nStep 4069\r\nStep 4070\r\nStep 4071\r\nStep 4072\r\nStep 4073\r\nStep 4074\r\nStep 4075\r\nStep 4076\r\nStep 4077\r\nStep 4078\r\nStep 4079\r\nStep 4080\r\nStep 4081\r\nStep 4082\r\nStep 4083\r\nStep 4084\r\nStep 4085\r\nStep 4086\r\nStep 4087\r\nStep 4088\r\n",,terminal_output +4147,2909001,"TERMINAL",0,0,"Step 4089\r\nStep 4090\r\nStep 4091\r\nStep 4092\r\nStep 4093\r\nStep 4094\r\n",,terminal_output +4148,2909065,"TERMINAL",0,0,"Step 4095\r\nStep 4096\r\n",,terminal_output +4149,2909129,"TERMINAL",0,0,"Step 4097\r\nStep 4098\r\nStep 4099\r\n",,terminal_output +4150,2909310,"TERMINAL",0,0,"Step 4100\r\nStep 4101\r\nStep 4102\r\nStep 4103\r\nStep 4104\r\n",,terminal_output +4151,2909428,"TERMINAL",0,0,"Step 4105\r\nStep 4106\r\nStep 4107\r\nStep 4108\r\n",,terminal_output +4152,2909494,"TERMINAL",0,0,"Step 4109\r\n",,terminal_output +4153,2909558,"TERMINAL",0,0,"Step 4110\r\nStep 4111\r\n",,terminal_output +4154,2909673,"TERMINAL",0,0,"Step 4112\r\nStep 4113\r\nStep 4114\r\n",,terminal_output +4155,2909813,"TERMINAL",0,0,"Step 4115\r\nStep 4116\r\nStep 4117\r\nStep 4118\r\nStep 4119\r\nStep 4120\r\nStep 4121\r\nStep 4122\r\n",,terminal_output +4156,2909943,"TERMINAL",0,0,"Step 4123\r\nStep 4124\r\nStep 4125\r\nStep 4126\r\nStep 4127\r\nStep 4128\r\nStep 4129\r\n",,terminal_output +4157,2910037,"TERMINAL",0,0,"Step 4130\r\n",,terminal_output +4158,2910188,"TERMINAL",0,0,"Step 4131\r\nStep 4132\r\nStep 4133\r\nStep 4134\r\nStep 4135\r\nStep 4136\r\nStep 4137\r\nStep 4138\r\n",,terminal_output +4159,2910270,"TERMINAL",0,0,"Step 4139\r\nStep 4140\r\nStep 4141\r\nStep 4142\r\nStep 4143\r\nStep 4144\r\n",,terminal_output +4160,2910426,"TERMINAL",0,0,"Step 4145\r\nStep 4146\r\nStep 4147\r\nStep 4148\r\nStep 4149\r\nStep 4150\r\nStep 4151\r\nStep 4152\r\nStep 4153\r\nStep 4154\r\nStep 4155\r\nStep 4156\r\nStep 4157\r\nStep 4158\r\nStep 4159\r\nStep 4160\r\n",,terminal_output +4161,2910608,"TERMINAL",0,0,"Step 4161\r\nStep 4162\r\nStep 4163\r\nStep 4164\r\nStep 4165\r\nStep 4166\r\nStep 4167\r\nStep 4168\r\nStep 4169\r\nStep 4170\r\nStep 4171\r\nStep 4172\r\n",,terminal_output +4162,2910733,"TERMINAL",0,0,"Step 4173\r\n",,terminal_output +4163,2910812,"TERMINAL",0,0,"Step 4174\r\nStep 4175\r\nStep 4176\r\nStep 4177\r\nStep 4178\r\nStep 4179\r\n",,terminal_output +4164,2910954,"TERMINAL",0,0,"Step 4180\r\nStep 4181\r\nStep 4182\r\nStep 4183\r\nStep 4184\r\n",,terminal_output +4165,2911007,"TERMINAL",0,0,"Step 4185\r\nStep 4186\r\nStep 4187\r\n",,terminal_output +4166,2911159,"TERMINAL",0,0,"Step 4188\r\nStep 4189\r\nStep 4190\r\nStep 4191\r\n",,terminal_output +4167,2911225,"TERMINAL",0,0,"Step 4192\r\nStep 4193\r\n",,terminal_output +4168,2911284,"TERMINAL",0,0,"Step 4194\r\nStep 4195\r\nStep 4196\r\n",,terminal_output +4169,2911472,"TERMINAL",0,0,"Step 4197\r\nStep 4198\r\nStep 4199\r\nStep 4200\r\nStep 4201\r\nStep 4202\r\nStep 4203\r\nStep 4204\r\nStep 4205\r\nStep 4206\r\nStep 4207\r\n",,terminal_output +4170,2911540,"TERMINAL",0,0,"Step 4208\r\nStep 4209\r\nStep 4210\r\nStep 4211\r\n",,terminal_output +4171,2911650,"TERMINAL",0,0,"Step 4212\r\nStep 4213\r\nStep 4214\r\nStep 4215\r\n",,terminal_output +4172,2911750,"TERMINAL",0,0,"Step 4216\r\nStep 4217\r\nStep 4218\r\nStep 4219\r\n",,terminal_output +4173,2911813,"TERMINAL",0,0,"Step 4220\r\nStep 4221\r\n",,terminal_output +4174,2911869,"TERMINAL",0,0,"Step 4222\r\nStep 4223\r\n",,terminal_output +4175,2911930,"TERMINAL",0,0,"Step 4224\r\nStep 4225\r\nStep 4226\r\n",,terminal_output +4176,2912035,"TERMINAL",0,0,"Step 4227\r\nStep 4228\r\nStep 4229\r\nStep 4230\r\nStep 4231\r\nStep 4232\r\nStep 4233\r\nStep 4234\r\n",,terminal_output +4177,2912094,"TERMINAL",0,0,"Step 4235\r\n",,terminal_output +4178,2912313,"TERMINAL",0,0,"Step 4236\r\nStep 4237\r\nStep 4238\r\nStep 4239\r\nStep 4240\r\nStep 4241\r\nStep 4242\r\nStep 4243\r\nStep 4244\r\nStep 4245\r\nStep 4246\r\nStep 4247\r\nStep 4248\r\nStep 4249\r\nStep 4250\r\nStep 4251\r\nStep 4252\r\nStep 4253\r\nStep 4254\r\nStep 4255\r\nStep 4256\r\nStep 4257\r\nStep 4258\r\nStep 4259\r\nStep 4260\r\nStep 4261\r\n",,terminal_output +4179,2912376,"TERMINAL",0,0,"Step 4262\r\nStep 4263\r\nStep 4264\r\n",,terminal_output +4180,2912485,"TERMINAL",0,0,"Step 4265\r\nStep 4266\r\n",,terminal_output +4181,2912549,"TERMINAL",0,0,"Step 4267\r\nStep 4268\r\nStep 4269\r\n",,terminal_output +4182,2912626,"TERMINAL",0,0,"Step 4270\r\nStep 4271\r\nStep 4272\r\nStep 4273\r\n",,terminal_output +4183,2912722,"TERMINAL",0,0,"Step 4274\r\nStep 4275\r\nStep 4276\r\n",,terminal_output +4184,2912793,"TERMINAL",0,0,"Step 4277\r\nStep 4278\r\nStep 4279\r\nStep 4280\r\n",,terminal_output +4185,2912871,"TERMINAL",0,0,"Step 4281\r\nStep 4282\r\nStep 4283\r\nStep 4284\r\n",,terminal_output +4186,2912978,"TERMINAL",0,0,"Step 4285\r\n",,terminal_output +4187,2913079,"TERMINAL",0,0,"Step 4286\r\nStep 4287\r\nStep 4288\r\nStep 4289\r\nStep 4290\r\nStep 4291\r\nStep 4292\r\n",,terminal_output +4188,2913198,"TERMINAL",0,0,"Step 4293\r\n",,terminal_output +4189,2913445,"TERMINAL",0,0,"Step 4294\r\nStep 4295\r\nStep 4296\r\nStep 4297\r\nStep 4298\r\nStep 4299\r\nStep 4300\r\nStep 4301\r\nStep 4302\r\nStep 4303\r\nStep 4304\r\nStep 4305\r\nStep 4306\r\nStep 4307\r\nStep 4308\r\nStep 4309\r\nStep 4310\r\nStep 4311\r\nStep 4312\r\nStep 4313\r\nStep 4314\r\nStep 4315\r\nStep 4316\r\nStep 4317\r\nStep 4318\r\nStep 4319\r\nStep 4320\r\n",,terminal_output +4190,2913719,"TERMINAL",0,0,"Step 4321\r\nStep 4322\r\nStep 4323\r\nStep 4324\r\nStep 4325\r\nStep 4326\r\nStep 4327\r\nStep 4328\r\nStep 4329\r\nStep 4330\r\n",,terminal_output +4191,2913781,"TERMINAL",0,0,"Step 4331\r\n",,terminal_output +4192,2913908,"TERMINAL",0,0,"Step 4332\r\nStep 4333\r\nStep 4334\r\nStep 4335\r\nStep 4336\r\n",,terminal_output +4193,2914041,"TERMINAL",0,0,"Step 4337\r\nStep 4338\r\nStep 4339\r\nStep 4340\r\nStep 4341\r\nStep 4342\r\nStep 4343\r\n",,terminal_output +4194,2914212,"TERMINAL",0,0,"Step 4344\r\nStep 4345\r\nStep 4346\r\nStep 4347\r\nStep 4348\r\nStep 4349\r\nStep 4350\r\nStep 4351\r\n",,terminal_output +4195,2914273,"TERMINAL",0,0,"Step 4352\r\nStep 4353\r\nStep 4354\r\n",,terminal_output +4196,2914417,"TERMINAL",0,0,"Step 4355\r\nStep 4356\r\nStep 4357\r\n",,terminal_output +4197,2914483,"TERMINAL",0,0,"Step 4358\r\nStep 4359\r\nStep 4360\r\n",,terminal_output +4198,2914556,"TERMINAL",0,0,"Step 4361\r\nStep 4362\r\nStep 4363\r\nStep 4364\r\nStep 4365\r\nStep 4366\r\nStep 4367\r\nStep 4368\r\n",,terminal_output +4199,2914714,"TERMINAL",0,0,"Step 4369\r\nStep 4370\r\nStep 4371\r\nStep 4372\r\nStep 4373\r\n",,terminal_output +4200,2914814,"TERMINAL",0,0,"Step 4374\r\nStep 4375\r\nStep 4376\r\nStep 4377\r\n",,terminal_output +4201,2914897,"TERMINAL",0,0,"Step 4378\r\nStep 4379\r\nStep 4380\r\nStep 4381\r\nStep 4382\r\nStep 4383\r\n",,terminal_output +4202,2915008,"TERMINAL",0,0,"Step 4384\r\nStep 4385\r\nStep 4386\r\nStep 4387\r\n",,terminal_output +4203,2915151,"TERMINAL",0,0,"Step 4388\r\nStep 4389\r\nStep 4390\r\nStep 4391\r\nStep 4392\r\nStep 4393\r\nStep 4394\r\nStep 4395\r\n",,terminal_output +4204,2915260,"TERMINAL",0,0,"Step 4396\r\nStep 4397\r\nStep 4398\r\nStep 4399\r\n",,terminal_output +4205,2915323,"TERMINAL",0,0,"Step 4400\r\nStep 4401\r\nStep 4402\r\n",,terminal_output +4206,2915375,"TERMINAL",0,0,"Step 4403\r\nStep 4404\r\n",,terminal_output +4207,2915453,"TERMINAL",0,0,"Step 4405\r\nStep 4406\r\nStep 4407\r\nStep 4408\r\nStep 4409\r\n",,terminal_output +4208,2915570,"TERMINAL",0,0,"Step 4410\r\nStep 4411\r\nStep 4412\r\nStep 4413\r\nStep 4414\r\nStep 4415\r\n",,terminal_output +4209,2915630,"TERMINAL",0,0,"Step 4416\r\nStep 4417\r\n",,terminal_output +4210,2915800,"TERMINAL",0,0,"Step 4418\r\nStep 4419\r\nStep 4420\r\nStep 4421\r\nStep 4422\r\nStep 4423\r\nStep 4424\r\nStep 4425\r\nStep 4426\r\nStep 4427\r\nStep 4428\r\nStep 4429\r\nStep 4430\r\n",,terminal_output +4211,2915862,"TERMINAL",0,0,"Step 4431\r\nStep 4432\r\n",,terminal_output +4212,2915925,"TERMINAL",0,0,"Step 4433\r\n",,terminal_output +4213,2916053,"TERMINAL",0,0,"Step 4434\r\nStep 4435\r\nStep 4436\r\nStep 4437\r\nStep 4438\r\nStep 4439\r\nStep 4440\r\n",,terminal_output +4214,2916162,"TERMINAL",0,0,"Step 4441\r\nStep 4442\r\nStep 4443\r\n",,terminal_output +4215,2916277,"TERMINAL",0,0,"Step 4444\r\nStep 4445\r\nStep 4446\r\nStep 4447\r\nStep 4448\r\nStep 4449\r\nStep 4450\r\n",,terminal_output +4216,2916513,"TERMINAL",0,0,"Step 4451\r\nStep 4452\r\nStep 4453\r\nStep 4454\r\nStep 4455\r\nStep 4456\r\nStep 4457\r\nStep 4458\r\nStep 4459\r\nStep 4460\r\nStep 4461\r\nStep 4462\r\n",,terminal_output +4217,2916576,"TERMINAL",0,0,"Step 4463\r\nStep 4464\r\nStep 4465\r\nStep 4466\r\nStep 4467\r\n",,terminal_output +4218,2916680,"TERMINAL",0,0,"Step 4468\r\nStep 4469\r\nStep 4470\r\nStep 4471\r\nStep 4472\r\n",,terminal_output +4219,2916812,"TERMINAL",0,0,"Step 4473\r\nStep 4474\r\nStep 4475\r\nStep 4476\r\nStep 4477\r\nStep 4478\r\nStep 4479\r\nStep 4480\r\nStep 4481\r\nStep 4482\r\n",,terminal_output +4220,2916924,"TERMINAL",0,0,"Step 4483\r\nStep 4484\r\nStep 4485\r\nStep 4486\r\nStep 4487\r\n",,terminal_output +4221,2917051,"TERMINAL",0,0,"Step 4488\r\nStep 4489\r\nStep 4490\r\nStep 4491\r\nStep 4492\r\nStep 4493\r\nStep 4494\r\nStep 4495\r\nStep 4496\r\n",,terminal_output +4222,2917146,"TERMINAL",0,0,"Step 4497\r\nStep 4498\r\nStep 4499\r\n",,terminal_output +4223,2917207,"TERMINAL",0,0,"Step 4500\r\nStep 4501\r\nStep 4502\r\nStep 4503\r\n",,terminal_output +4224,2917267,"TERMINAL",0,0,"Step 4504\r\nStep 4505\r\nStep 4506\r\nStep 4507\r\nStep 4508\r\n",,terminal_output +4225,2917403,"TERMINAL",0,0,"Step 4509\r\nStep 4510\r\nStep 4511\r\nStep 4512\r\nStep 4513\r\nStep 4514\r\nStep 4515\r\nStep 4516\r\nStep 4517\r\nStep 4518\r\nStep 4519\r\nStep 4520\r\nStep 4521\r\n",,terminal_output +4226,2917515,"TERMINAL",0,0,"Step 4522\r\n",,terminal_output +4227,2917580,"TERMINAL",0,0,"Step 4523\r\nStep 4524\r\n",,terminal_output +4228,2917641,"TERMINAL",0,0,"Step 4525\r\nStep 4526\r\nStep 4527\r\nStep 4528\r\nStep 4529\r\n",,terminal_output +4229,2917762,"TERMINAL",0,0,"Step 4530\r\nStep 4531\r\nStep 4532\r\nStep 4533\r\nStep 4534\r\nStep 4535\r\n",,terminal_output +4230,2917894,"TERMINAL",0,0,"Step 4536\r\nStep 4537\r\nStep 4538\r\nStep 4539\r\nStep 4540\r\nStep 4541\r\nStep 4542\r\nStep 4543\r\n",,terminal_output +4231,2918059,"TERMINAL",0,0,"Step 4544\r\nStep 4545\r\nStep 4546\r\nStep 4547\r\nStep 4548\r\nStep 4549\r\n",,terminal_output +4232,2918183,"TERMINAL",0,0,"Step 4550\r\nStep 4551\r\nStep 4552\r\nStep 4553\r\nStep 4554\r\nStep 4555\r\n",,terminal_output +4233,2918323,"TERMINAL",0,0,"Step 4556\r\nStep 4557\r\nStep 4558\r\nStep 4559\r\nStep 4560\r\nStep 4561\r\nStep 4562\r\nStep 4563\r\nStep 4564\r\nStep 4565\r\nStep 4566\r\nStep 4567\r\nStep 4568\r\nStep 4569\r\nStep 4570\r\nStep 4571\r\n",,terminal_output +4234,2918404,"TERMINAL",0,0,"Step 4572\r\nStep 4573\r\nStep 4574\r\nStep 4575\r\n",,terminal_output +4235,2918590,"TERMINAL",0,0,"Step 4576\r\nStep 4577\r\nStep 4578\r\nStep 4579\r\nStep 4580\r\nStep 4581\r\nStep 4582\r\nStep 4583\r\nStep 4584\r\nStep 4585\r\n",,terminal_output +4236,2918736,"TERMINAL",0,0,"Step 4586\r\nStep 4587\r\nStep 4588\r\nStep 4589\r\n",,terminal_output +4237,2918797,"TERMINAL",0,0,"Step 4590\r\nStep 4591\r\nStep 4592\r\n",,terminal_output +4238,2918860,"TERMINAL",0,0,"Step 4593\r\nStep 4594\r\n",,terminal_output +4239,2918942,"TERMINAL",0,0,"Step 4595\r\nStep 4596\r\nStep 4597\r\nStep 4598\r\nStep 4599\r\nStep 4600\r\nStep 4601\r\n",,terminal_output +4240,2919114,"TERMINAL",0,0,"Step 4602\r\nStep 4603\r\nStep 4604\r\nStep 4605\r\nStep 4606\r\nStep 4607\r\nStep 4608\r\n",,terminal_output +4241,2919182,"TERMINAL",0,0,"Step 4609\r\nStep 4610\r\nStep 4611\r\nStep 4612\r\nStep 4613\r\nStep 4614\r\n",,terminal_output +4242,2919361,"TERMINAL",0,0,"Step 4615\r\nStep 4616\r\nStep 4617\r\nStep 4618\r\n",,terminal_output +4243,2919422,"TERMINAL",0,0,"Step 4619\r\nStep 4620\r\nStep 4621\r\nStep 4622\r\nStep 4623\r\n",,terminal_output +4244,2919483,"TERMINAL",0,0,"Step 4624\r\nStep 4625\r\nStep 4626\r\n",,terminal_output +4245,2919544,"TERMINAL",0,0,"Step 4627\r\n",,terminal_output +4246,2919668,"TERMINAL",0,0,"Step 4628\r\nStep 4629\r\nStep 4630\r\nStep 4631\r\n",,terminal_output +4247,2919730,"TERMINAL",0,0,"Step 4632\r\nStep 4633\r\nStep 4634\r\nStep 4635\r\n",,terminal_output +4248,2919907,"TERMINAL",0,0,"Step 4636\r\nStep 4637\r\nStep 4638\r\nStep 4639\r\nStep 4640\r\nStep 4641\r\nStep 4642\r\n",,terminal_output +4249,2920067,"TERMINAL",0,0,"Step 4643\r\nStep 4644\r\nStep 4645\r\nStep 4646\r\nStep 4647\r\nStep 4648\r\nStep 4649\r\n",,terminal_output +4250,2920175,"TERMINAL",0,0,"Step 4650\r\nStep 4651\r\nStep 4652\r\nStep 4653\r\nStep 4654\r\nStep 4655\r\nStep 4656\r\nStep 4657\r\nStep 4658\r\nStep 4659\r\nStep 4660\r\nStep 4661\r\n",,terminal_output +4251,2920239,"TERMINAL",0,0,"Step 4662\r\nStep 4663\r\nStep 4664\r\nStep 4665\r\n",,terminal_output +4252,2920427,"TERMINAL",0,0,"Step 4666\r\nStep 4667\r\nStep 4668\r\nStep 4669\r\nStep 4670\r\nStep 4671\r\nStep 4672\r\nStep 4673\r\nStep 4674\r\nStep 4675\r\n",,terminal_output +4253,2920492,"TERMINAL",0,0,"Step 4676\r\nStep 4677\r\nStep 4678\r\nStep 4679\r\n",,terminal_output +4254,2920555,"TERMINAL",0,0,"Step 4680\r\nStep 4681\r\n",,terminal_output +4255,2920689,"TERMINAL",0,0,"Step 4682\r\nStep 4683\r\nStep 4684\r\n",,terminal_output +4256,2920811,"TERMINAL",0,0,"Step 4685\r\nStep 4686\r\nStep 4687\r\nStep 4688\r\nStep 4689\r\n",,terminal_output +4257,2920920,"TERMINAL",0,0,"Step 4690\r\nStep 4691\r\n",,terminal_output +4258,2920994,"TERMINAL",0,0,"Step 4692\r\nStep 4693\r\nStep 4694\r\n",,terminal_output +4259,2921090,"TERMINAL",0,0,"Step 4695\r\nStep 4696\r\nStep 4697\r\n",,terminal_output +4260,2921162,"TERMINAL",0,0,"Step 4698\r\nStep 4699\r\nStep 4700\r\n",,terminal_output +4261,2921273,"TERMINAL",0,0,"Step 4701\r\nStep 4702\r\nStep 4703\r\nStep 4704\r\nStep 4705\r\nStep 4706\r\nStep 4707\r\n",,terminal_output +4262,2921333,"TERMINAL",0,0,"Step 4708\r\n",,terminal_output +4263,2921384,"TERMINAL",0,0,"Step 4709\r\nStep 4710\r\n",,terminal_output +4264,2921499,"TERMINAL",0,0,"Step 4711\r\nStep 4712\r\nStep 4713\r\nStep 4714\r\n",,terminal_output +4265,2921559,"TERMINAL",0,0,"Step 4715\r\n",,terminal_output +4266,2921692,"TERMINAL",0,0,"Step 4716\r\nStep 4717\r\nStep 4718\r\nStep 4719\r\nStep 4720\r\nStep 4721\r\nStep 4722\r\n",,terminal_output +4267,2921837,"TERMINAL",0,0,"Step 4723\r\nStep 4724\r\nStep 4725\r\nStep 4726\r\nStep 4727\r\nStep 4728\r\nStep 4729\r\nStep 4730\r\nStep 4731\r\nStep 4732\r\nStep 4733\r\nStep 4734\r\nStep 4735\r\n",,terminal_output +4268,2921909,"TERMINAL",0,0,"Step 4736\r\nStep 4737\r\nStep 4738\r\nStep 4739\r\nStep 4740\r\nStep 4741\r\nStep 4742\r\nStep 4743\r\nStep 4744\r\nStep 4745\r\nStep 4746\r\n",,terminal_output +4269,2922076,"TERMINAL",0,0,"Step 4747\r\nStep 4748\r\nStep 4749\r\nStep 4750\r\nStep 4751\r\n",,terminal_output +4270,2922337,"TERMINAL",0,0,"Step 4752\r\nStep 4753\r\nStep 4754\r\nStep 4755\r\nStep 4756\r\nStep 4757\r\nStep 4758\r\nStep 4759\r\nStep 4760\r\nStep 4761\r\nStep 4762\r\nStep 4763\r\nStep 4764\r\nStep 4765\r\nStep 4766\r\nStep 4767\r\nStep 4768\r\nStep 4769\r\nStep 4770\r\nStep 4771\r\nStep 4772\r\nStep 4773\r\nStep 4774\r\nStep 4775\r\n",,terminal_output +4271,2922453,"TERMINAL",0,0,"Step 4776\r\nStep 4777\r\n",,terminal_output +4272,2922566,"TERMINAL",0,0,"Step 4778\r\nStep 4779\r\nStep 4780\r\nStep 4781\r\n",,terminal_output +4273,2922628,"TERMINAL",0,0,"Step 4782\r\nStep 4783\r\n",,terminal_output +4274,2922736,"TERMINAL",0,0,"Step 4784\r\nStep 4785\r\nStep 4786\r\n",,terminal_output +4275,2922852,"TERMINAL",0,0,"Step 4787\r\nStep 4788\r\n",,terminal_output +4276,2922905,"TERMINAL",0,0,"Step 4789\r\n",,terminal_output +4277,2923034,"TERMINAL",0,0,"Step 4790\r\nStep 4791\r\nStep 4792\r\nStep 4793\r\n",,terminal_output +4278,2923099,"TERMINAL",0,0,"Step 4794\r\n",,terminal_output +4279,2923162,"TERMINAL",0,0,"Step 4795\r\n",,terminal_output +4280,2923226,"TERMINAL",0,0,"Step 4796\r\nStep 4797\r\nStep 4798\r\n",,terminal_output +4281,2923289,"TERMINAL",0,0,"Step 4799\r\n",,terminal_output +4282,2923415,"TERMINAL",0,0,"Step 4800\r\nStep 4801\r\nStep 4802\r\nStep 4803\r\nStep 4804\r\nStep 4805\r\nStep 4806\r\nStep 4807\r\n",,terminal_output +4283,2923525,"TERMINAL",0,0,"Step 4808\r\nStep 4809\r\nStep 4810\r\n",,terminal_output +4284,2923641,"TERMINAL",0,0,"Step 4811\r\nStep 4812\r\nStep 4813\r\nStep 4814\r\nStep 4815\r\nStep 4816\r\nStep 4817\r\nStep 4818\r\nStep 4819\r\nStep 4820\r\n",,terminal_output +4285,2923703,"TERMINAL",0,0,"Step 4821\r\nStep 4822\r\nStep 4823\r\n",,terminal_output +4286,2923808,"TERMINAL",0,0,"Step 4824\r\nStep 4825\r\nStep 4826\r\nStep 4827\r\nStep 4828\r\nStep 4829\r\n",,terminal_output +4287,2923864,"TERMINAL",0,0,"Step 4830\r\nStep 4831\r\nStep 4832\r\nStep 4833\r\nStep 4834\r\nStep 4835\r\n",,terminal_output +4288,2923971,"TERMINAL",0,0,"Step 4836\r\nStep 4837\r\nStep 4838\r\nStep 4839\r\n",,terminal_output +4289,2924037,"TERMINAL",0,0,"Step 4840\r\nStep 4841\r\nStep 4842\r\nStep 4843\r\nStep 4844\r\nStep 4845\r\n",,terminal_output +4290,2924219,"TERMINAL",0,0,"Step 4846\r\nStep 4847\r\nStep 4848\r\n",,terminal_output +4291,2924294,"TERMINAL",0,0,"Step 4849\r\nStep 4850\r\nStep 4851\r\nStep 4852\r\nStep 4853\r\nStep 4854\r\nStep 4855\r\n",,terminal_output +4292,2924355,"TERMINAL",0,0,"Step 4856\r\nStep 4857\r\nStep 4858\r\nStep 4859\r\n",,terminal_output +4293,2924419,"TERMINAL",0,0,"Step 4860\r\nStep 4861\r\nStep 4862\r\nStep 4863\r\n",,terminal_output +4294,2924487,"TERMINAL",0,0,"Step 4864\r\nStep 4865\r\nStep 4866\r\n",,terminal_output +4295,2924542,"TERMINAL",0,0,"Step 4867\r\nStep 4868\r\n",,terminal_output +4296,2924699,"TERMINAL",0,0,"Step 4869\r\nStep 4870\r\nStep 4871\r\nStep 4872\r\nStep 4873\r\nStep 4874\r\nStep 4875\r\nStep 4876\r\nStep 4877\r\nStep 4878\r\n",,terminal_output +4297,2924757,"TERMINAL",0,0,"Step 4879\r\nStep 4880\r\nStep 4881\r\nStep 4882\r\n",,terminal_output +4298,2924879,"TERMINAL",0,0,"Step 4883\r\nStep 4884\r\nStep 4885\r\nStep 4886\r\nStep 4887\r\nStep 4888\r\nStep 4889\r\n",,terminal_output +4299,2924952,"TERMINAL",0,0,"Step 4890\r\nStep 4891\r\nStep 4892\r\nStep 4893\r\nStep 4894\r\n",,terminal_output +4300,2925063,"TERMINAL",0,0,"Step 4895\r\nStep 4896\r\nStep 4897\r\nStep 4898\r\nStep 4899\r\n",,terminal_output +4301,2925186,"TERMINAL",0,0,"Step 4900\r\nStep 4901\r\nStep 4902\r\nStep 4903\r\nStep 4904\r\nStep 4905\r\nStep 4906\r\nStep 4907\r\nStep 4908\r\nStep 4909\r\n",,terminal_output +4302,2925256,"TERMINAL",0,0,"Step 4910\r\nStep 4911\r\nStep 4912\r\nStep 4913\r\n",,terminal_output +4303,2925322,"TERMINAL",0,0,"Step 4914\r\nStep 4915\r\nStep 4916\r\n",,terminal_output +4304,2925386,"TERMINAL",0,0,"Step 4917\r\nStep 4918\r\nStep 4919\r\nStep 4920\r\n",,terminal_output +4305,2925450,"TERMINAL",0,0,"Step 4921\r\nStep 4922\r\nStep 4923\r\n",,terminal_output +4306,2925528,"TERMINAL",0,0,"Step 4924\r\nStep 4925\r\nStep 4926\r\n",,terminal_output +4307,2925715,"TERMINAL",0,0,"Step 4927\r\nStep 4928\r\nStep 4929\r\nStep 4930\r\nStep 4931\r\nStep 4932\r\nStep 4933\r\nStep 4934\r\nStep 4935\r\nStep 4936\r\nStep 4937\r\nStep 4938\r\n",,terminal_output +4308,2925843,"TERMINAL",0,0,"Step 4939\r\nStep 4940\r\nStep 4941\r\nStep 4942\r\n",,terminal_output +4309,2925907,"TERMINAL",0,0,"Step 4943\r\nStep 4944\r\nStep 4945\r\nStep 4946\r\n",,terminal_output +4310,2925970,"TERMINAL",0,0,"Step 4947\r\n",,terminal_output +4311,2926087,"TERMINAL",0,0,"Step 4948\r\nStep 4949\r\nStep 4950\r\nStep 4951\r\nStep 4952\r\nStep 4953\r\nStep 4954\r\nStep 4955\r\n",,terminal_output +4312,2926196,"TERMINAL",0,0,"Step 4956\r\nStep 4957\r\nStep 4958\r\nStep 4959\r\n",,terminal_output +4313,2926260,"TERMINAL",0,0,"Step 4960\r\nStep 4961\r\n",,terminal_output +4314,2926319,"TERMINAL",0,0,"Step 4962\r\nStep 4963\r\nStep 4964\r\n",,terminal_output +4315,2926426,"TERMINAL",0,0,"Step 4965\r\nStep 4966\r\nStep 4967\r\nStep 4968\r\n",,terminal_output +4316,2926498,"TERMINAL",0,0,"Step 4969\r\nStep 4970\r\nStep 4971\r\n",,terminal_output +4317,2926562,"TERMINAL",0,0,"Step 4972\r\nStep 4973\r\n",,terminal_output +4318,2926625,"TERMINAL",0,0,"Step 4974\r\nStep 4975\r\nStep 4976\r\nStep 4977\r\n",,terminal_output +4319,2926686,"TERMINAL",0,0,"Step 4978\r\nStep 4979\r\n",,terminal_output +4320,2926813,"TERMINAL",0,0,"Step 4980\r\nStep 4981\r\nStep 4982\r\nStep 4983\r\nStep 4984\r\nStep 4985\r\nStep 4986\r\n",,terminal_output +4321,2926876,"TERMINAL",0,0,"Step 4987\r\n",,terminal_output +4322,2926946,"TERMINAL",0,0,"Step 4988\r\nStep 4989\r\nStep 4990\r\nStep 4991\r\nStep 4992\r\n",,terminal_output +4323,2927103,"TERMINAL",0,0,"Step 4993\r\nStep 4994\r\nStep 4995\r\nStep 4996\r\nStep 4997\r\nStep 4998\r\nStep 4999\r\nStep 5000\r\nStep 5001\r\n",,terminal_output +4324,2928679,"TERMINAL",0,0,"wandb: \r\nwandb: 🚀 View run tokenizer-tiny-overfit-0000 at: https://wandb.ai/instant-uv/jafar/runs/rz4wjk4f\r\nwandb: Find logs at: ../../../../../hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/wandb/run-20250701_191223-rz4wjk4f/logs\r\n",,terminal_output +4325,2930401,"TERMINAL",0,0,"]0;tum_cte0515@hkn0706:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0706 jafar]$ ",,terminal_output +4326,3040560,"scripts_horeka/overfit_sample_tiny/sample.sh",0,0,"",shellscript,tab +4327,3042891,"scripts_horeka/overfit_sample_tiny/sample.sh",1130,0,"",shellscript,selection_mouse +4328,3056861,"TERMINAL",0,0,"bash",,terminal_focus +4329,3060228,"TERMINAL",0,0,"cd ..",,terminal_command +4330,3060253,"TERMINAL",0,0,"]633;E;2025-07-01 19:16:45 cd ..;ea2a9ac7-e5a0-488a-9623-9429487e0dac]633;C]0;tum_cte0515@hkn1993:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/overfit_dir]633;D;0",,terminal_output +4331,3060557,"TERMINAL",0,0,"ls",,terminal_command +4332,3060608,"TERMINAL",0,0,"]633;E;2025-07-01 19:16:45 ls;ea2a9ac7-e5a0-488a-9623-9429487e0dac]633;C",,terminal_output +4333,3060647,"TERMINAL",0,0,"single_batch_12_elems.npy single_batch_3_elems.npy single_batch_6_elems.npy single_sample single_sample_axe.npy single_sample_corner.npy\r\n]0;tum_cte0515@hkn1993:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/overfit_dir]633;D;0",,terminal_output +4334,3061918,"TERMINAL",0,0,"cd ..",,terminal_command +4335,3061943,"TERMINAL",0,0,"]633;E;2025-07-01 19:16:47 cd ..;ea2a9ac7-e5a0-488a-9623-9429487e0dac]633;C]0;tum_cte0515@hkn1993:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data]633;D;0",,terminal_output +4336,3063325,"TERMINAL",0,0,"cd ..",,terminal_command +4337,3064702,"TERMINAL",0,0,"cd checkpoints/",,terminal_command +4338,3065059,"TERMINAL",0,0,"ls",,terminal_command +4339,3065149,"TERMINAL",0,0,"]633;E;2025-07-01 19:16:50 ls;ea2a9ac7-e5a0-488a-9623-9429487e0dac]633;C0000 3290391 3292258 3292334 3294601 3296574 3297582 3299016 3299258 3300663 3301031 debug tokenizer_ckpt_dir\r\n3290283 3290392 3292328 3292335 3294602 3296575 3297586 3299062 3299259 3300672 3306801 dyn train_lam_minecraft_overfit_sample\r\n3290284 3290439 3292329 3292336 3294603 3297569 3297606 3299063 3299272 3301025 3307618 dynamics_ckpt_dir train_tokenizer_minecraft_overfit_sample\r\n3290295 3290440 3292330 3292337 3296502 3297575 3297671 3299065 3299579 3301026 3307619 lam\r\n3290296 3291405 3292331 3292338 3296540 3297576 3297693 3299066 3300233 3301027 3309662 lam-1-action\r\n3290366 3292213 3292332 3292339 3296571 3297577 3297706 3299068 3300290 3301029 3309663 lam_ckpt_dir\r\n3290367 3292221 3292333 3294600 3296573 3297578 3297727 3299069 3300658 3301030 3309699 tokenizer\r\n]0;tum_cte0515@hkn1993:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints]633;D;0",,terminal_output +4340,3073097,"TERMINAL",0,0,"cd 3309699/genie_1751384516_",,terminal_command +4341,3076466,"TERMINAL",0,0,"cd 3309699/",,terminal_command +4342,3077744,"TERMINAL",0,0,"ls",,terminal_command +4343,3077774,"TERMINAL",0,0,"]633;E;2025-07-01 19:17:03 ls;ea2a9ac7-e5a0-488a-9623-9429487e0dac]633;Cgenie_1751384516_1000 genie_1751384516_2000 genie_1751384516_3000 genie_1751384516_4000 genie_1751384516_500 genie_1751384516_59500 genie_1751384516_69500\r\ngenie_1751384516_10000 genie_1751384516_20000 genie_1751384516_30000 genie_1751384516_40000 genie_1751384516_5000 genie_1751384516_6000 genie_1751384516_7000\r\ngenie_1751384516_10500 genie_1751384516_20500 genie_1751384516_30500 genie_1751384516_40500 genie_1751384516_50000 genie_1751384516_60000 genie_1751384516_70000\r\ngenie_1751384516_11000 genie_1751384516_21000 genie_1751384516_31000 genie_1751384516_41000 genie_1751384516_50500 genie_1751384516_60500 genie_1751384516_70500\r\ngenie_1751384516_11500 genie_1751384516_21500 genie_1751384516_31500 genie_1751384516_41500 genie_1751384516_51000 genie_1751384516_61000 genie_1751384516_71000\r\ngenie_1751384516_12000 genie_1751384516_22000 genie_1751384516_32000 genie_1751384516_42000 genie_1751384516_51500 genie_1751384516_61500 genie_1751384516_71500\r\ngenie_1751384516_12500 genie_1751384516_22500 genie_1751384516_32500 genie_1751384516_42500 genie_1751384516_52000 genie_1751384516_62000 genie_1751384516_72000\r\ngenie_1751384516_13000 genie_1751384516_23000 genie_1751384516_33000 genie_1751384516_43000 genie_1751384516_52500 genie_1751384516_62500 genie_1751384516_72500\r\ngenie_1751384516_13500 genie_1751384516_23500 genie_1751384516_33500 genie_1751384516_43500 genie_1751384516_53000 genie_1751384516_63000 genie_1751384516_73000\r\ngenie_1751384516_14000 genie_1751384516_24000 genie_1751384516_34000 genie_1751384516_44000 genie_1751384516_53500 genie_1751384516_63500 genie_1751384516_73500\r\ngenie_1751384516_14500 genie_1751384516_24500 genie_1751384516_34500 genie_1751384516_44500 genie_1751384516_54000 genie_1751384516_64000 genie_1751384516_74000\r\ngenie_1751384516_1500 genie_1751384516_2500 genie_1751384516_3500 genie_1751384516_4500 genie_1751384516_54500 genie_1751384516_64500 genie_1751384516_74500\r\ngenie_1751384516_15000 genie_1751384516_25000 genie_1751384516_35000 genie_1751384516_45000 genie_1751384516_5500 genie_1751384516_6500 genie_1751384516_7500\r\ngenie_1751384516_15500 genie_1751384516_25500 genie_1751384516_35500 genie_1751384516_45500 genie_1751384516_55000 genie_1751384516_65000 genie_1751384516_8000\r\ngenie_1751384516_16000 genie_1751384516_26000 genie_1751384516_36000 genie_1751384516_46000 genie_1751384516_55500 genie_1751384516_65500 genie_1751384516_8500\r\ngenie_1751384516_16500 genie_1751384516_26500 genie_1751384516_36500 genie_1751384516_46500 genie_1751384516_56000 genie_1751384516_66000 genie_1751384516_9000\r\ngenie_1751384516_17000 genie_1751384516_27000 genie_1751384516_37000 genie_1751384516_47000 genie_1751384516_56500 genie_1751384516_66500 genie_1751384516_9500\r\ngenie_1751384516_17500 genie_1751384516_27500 genie_1751384516_37500 genie_1751384516_47500 genie_1751384516_57000 genie_1751384516_67000\r\ngenie_1751384516_18000 genie_1751384516_28000 genie_1751384516_38000 genie_1751384516_48000 genie_1751384516_57500 genie_1751384516_67500\r\ngenie_1751384516_18500 genie_1751384516_28500 genie_1751384516_38500 genie_1751384516_48500 genie_1751384516_58000 genie_1751384516_68000\r\ngenie_1751384516_19000 genie_1751384516_29000 genie_1751384516_39000 genie_1751384516_49000 genie_1751384516_58500 genie_1751384516_68500\r\ngenie_1751384516_19500 genie_1751384516_29500 genie_1751384516_39500 genie_1751384516_49500 genie_1751384516_59000 genie_1751384516_69000\r\n]0;tum_cte0515@hkn1993:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/3309699]633;D;0",,terminal_output +4344,3103549,"TERMINAL",0,0,"ls genie_1751384516_74500/",,terminal_command +4345,3103585,"TERMINAL",0,0,"]633;E;2025-07-01 19:17:28 ls genie_1751384516_74500/;ea2a9ac7-e5a0-488a-9623-9429487e0dac]633;C",,terminal_output +4346,3103694,"TERMINAL",0,0,"array_metadatas _CHECKPOINT_METADATA d manifest.ocdbt _METADATA ocdbt.process_0 _sharding\r\n]0;tum_cte0515@hkn1993:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/3309699]633;D;0",,terminal_output +4347,3106615,"TERMINAL",0,0,"cd genie_1751384516_74500/",,terminal_command +4348,3108627,"TERMINAL",0,0,"pwd",,terminal_command +4349,3111835,"scripts_horeka/overfit_sample_tiny/sample.sh",1310,0,"",shellscript,selection_mouse +4350,3111852,"scripts_horeka/overfit_sample_tiny/sample.sh",1309,0,"",shellscript,selection_command +4351,3111981,"scripts_horeka/overfit_sample_tiny/sample.sh",1305,4,"1700",shellscript,selection_mouse +4352,3111982,"scripts_horeka/overfit_sample_tiny/sample.sh",1296,13,"51381954_1700",shellscript,selection_mouse +4353,3112022,"scripts_horeka/overfit_sample_tiny/sample.sh",1287,22,"/genie_1751381954_1700",shellscript,selection_mouse +4354,3112023,"scripts_horeka/overfit_sample_tiny/sample.sh",1305,5,"17000",shellscript,selection_command +4355,3112023,"scripts_horeka/overfit_sample_tiny/sample.sh",1310,1,"\n",shellscript,selection_mouse +4356,3112455,"scripts_horeka/overfit_sample_tiny/sample.sh",1222,88,"ork/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/0000/genie_1751381954_17000",shellscript,selection_mouse +4357,3112455,"scripts_horeka/overfit_sample_tiny/sample.sh",1221,89,"work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/0000/genie_1751381954_17000",shellscript,selection_mouse +4358,3112456,"scripts_horeka/overfit_sample_tiny/sample.sh",1220,90,"/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/0000/genie_1751381954_17000",shellscript,selection_mouse +4359,3112456,"scripts_horeka/overfit_sample_tiny/sample.sh",1219,91,"s/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/0000/genie_1751381954_17000",shellscript,selection_mouse +4360,3112525,"scripts_horeka/overfit_sample_tiny/sample.sh",1218,92,"fs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/0000/genie_1751381954_17000",shellscript,selection_mouse +4361,3112726,"scripts_horeka/overfit_sample_tiny/sample.sh",1217,93,"kfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/0000/genie_1751381954_17000",shellscript,selection_mouse +4362,3112727,"scripts_horeka/overfit_sample_tiny/sample.sh",1216,94,"hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/0000/genie_1751381954_17000",shellscript,selection_mouse +4363,3112743,"scripts_horeka/overfit_sample_tiny/sample.sh",1215,95,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/0000/genie_1751381954_17000",shellscript,selection_mouse +4364,3112794,"scripts_horeka/overfit_sample_tiny/sample.sh",1214,96,"=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/0000/genie_1751381954_17000",shellscript,selection_mouse +4365,3113025,"scripts_horeka/overfit_sample_tiny/sample.sh",1215,95,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/0000/genie_1751381954_17000",shellscript,selection_mouse +4366,3113952,"scripts_horeka/overfit_sample_tiny/sample.sh",1215,95,"",shellscript,content +4367,3114010,"scripts_horeka/overfit_sample_tiny/sample.sh",1214,0,"",shellscript,selection_command +4368,3114206,"scripts_horeka/overfit_sample_tiny/sample.sh",1215,0,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/0000/genie_1751381954_17000",shellscript,content +4369,3114225,"scripts_horeka/overfit_sample_tiny/sample.sh",1215,0,"",shellscript,selection_command +4370,3117801,"scripts_horeka/overfit_sample_tiny/sample.sh",1215,95,"",shellscript,content +4371,3117820,"scripts_horeka/overfit_sample_tiny/sample.sh",1214,0,"",shellscript,selection_command +4372,3119113,"scripts_horeka/overfit_sample_tiny/sample.sh",1215,0,"",shellscript,selection_command +4373,3119870,"scripts_horeka/overfit_sample_tiny/sample.sh",1215,0,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/3309699/genie_1751384516_74500",shellscript,content +4374,3122703,"scripts_horeka/overfit_sample_tiny/sample.sh",1313,0,"/",shellscript,content +4375,3122704,"scripts_horeka/overfit_sample_tiny/sample.sh",1314,0,"",shellscript,selection_keyboard +4376,3123203,"scripts_horeka/overfit_sample_tiny/sample.sh",1313,0,"",shellscript,selection_command +4377,3124304,"scripts_horeka/overfit_sample_tiny/sample.sh",1386,0,"",shellscript,selection_mouse +4378,3124308,"scripts_horeka/overfit_sample_tiny/sample.sh",1385,0,"",shellscript,selection_command +4379,3130393,"sample.py",0,0,"",python,tab +4380,3133900,"sample.py",4946,0,"",python,selection_mouse +4381,3136126,"sample.py",4945,0,"",python,selection_command +4382,3136704,"sample.py",4930,0,"",python,selection_command +4383,3137707,"sample.py",4930,0,"#",python,content +4384,3137708,"sample.py",4931,0,"",python,selection_keyboard +4385,3137787,"sample.py",4931,0," ",python,content +4386,3137788,"sample.py",4932,0,"",python,selection_keyboard +4387,3137992,"sample.py",4868,0,"",python,selection_command +4388,3138361,"sample.py",4867,1,"",python,content +4389,3138503,"sample.py",4866,1,"",python,content +4390,3139572,"sample.py",4927,0,"",python,selection_mouse +4391,3141433,"TERMINAL",0,0,"srun",,terminal_focus +4392,3142903,"TERMINAL",0,0,"\r(reverse-i-search)`': ",,terminal_output +4393,3143269,"TERMINAL",0,0,"[?25l[49@s': sh scripts_horeka/overfit_sample_tiny/tester.sh[?25h",,terminal_output +4394,3143473,"TERMINAL",0,0,"[?25lhs\r[1@a': sh scripts_horeka/overfit_sample_tiny/tester.s[?25h",,terminal_output +4395,3143569,"TERMINAL",0,0,"\r[1@m': sh scripts_horeka/overfit_sam",,terminal_output +4396,3143768,"TERMINAL",0,0,"[?25ls\r[8@failed reverse-i-search)`saml': sh scripts_horeka/overfit_sam[?25h",,terminal_output +4397,3145023,"TERMINAL",0,0,"^C[?2004l\r[?2004h[?2004l\r\r\n]0;tum_cte0515@hkn0706:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0706 jafar]$ ",,terminal_output +4398,3145731,"TERMINAL",0,0,"\r(reverse-i-search)`': ",,terminal_output +4399,3145951,"TERMINAL",0,0,"[?25l[49@s': sh scripts_horeka/overfit_sample_tiny/tester.sh[?25h",,terminal_output +4400,3146152,"TERMINAL",0,0,"[?25ls\r[1@a': sh scripts_horeka/overfit_sample_tiny/tester.s[?25h",,terminal_output +4401,3146263,"TERMINAL",0,0,"[?25ls\r[1@m': sh scripts_horeka/overfit_sam[?25h",,terminal_output +4402,3146841,"TERMINAL",0,0,"[?25ls\r[1@p': sh scripts_horeka/overfit_samp[?25h",,terminal_output +4403,3146908,"TERMINAL",0,0,"[?25ls\r[1@l': sh scripts_horeka/overfit_sampl[?25h",,terminal_output +4404,3147049,"TERMINAL",0,0,"[?25ls\r[1@e': sh scripts_horeka/overfit_sample[?25h",,terminal_output +4405,3147988,"TERMINAL",0,0,"[?25ls\r.': sh scripts_horeka/overfit_sample_tiny/sample.sh[?25h",,terminal_output +4406,3148919,"TERMINAL",0,0,"\rjafar) [tum_cte0515@hkn0706 jafar]$ sh scripts_horeka/overfit_sample_tiny/sample.sh\r\n[?2004l\r",,terminal_output +4407,3149066,"TERMINAL",0,0,"Sampling from checkpoint: /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/3309699/genie_1751384516_74500/\r\n",,terminal_output +4408,3152652,"TERMINAL",0,0,"2025-07-01 19:18:18.082804: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4409,3156812,"TERMINAL",0,0,"2025-07-01 19:18:22.240511: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4410,3165111,"TERMINAL",0,0,"2025-07-01 19:18:30.459489: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4411,3172122,"TERMINAL",0,0,"2025-07-01 19:18:37.559995: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4412,3178920,"TERMINAL",0,0,"2025-07-01 19:18:44.358837: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4413,3184872,"TERMINAL",0,0,"2025-07-01 19:18:50.231464: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4414,3187944,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/orbax/checkpoint/_src/serialization/type_handlers.py:1251: UserWarning: Sharding info not provided when restoring. Populating sharding info from sharding file. Please note restoration time will be slightly increased due to reading from file. Note also that this option is unsafe when restoring on a different topology than the checkpoint was saved with.\r\n warnings.warn(\r\n",,terminal_output +4415,3198932,"TERMINAL",0,0,"====================================================================================================\r\nFrame 1\r\n====================================================================================================\r\n",,terminal_output +4416,3199481,"TERMINAL",0,0,"2025-07-01 19:19:04.918958: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4417,3202486,"TERMINAL",0,0,"2025-07-01 19:19:07.842028: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4418,3210151,"TERMINAL",0,0,"2025-07-01 19:19:15.589577: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4419,3213616,"TERMINAL",0,0,"2025-07-01 19:19:19.039230: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4420,3218988,"TERMINAL",0,0,"2025-07-01 19:19:24.425877: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4421,3221428,"TERMINAL",0,0,"====================================================================================================\r\nFrame 2\r\n====================================================================================================\r\n",,terminal_output +4422,3221943,"TERMINAL",0,0,"2025-07-01 19:19:27.374934: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4423,3225014,"TERMINAL",0,0,"2025-07-01 19:19:30.396211: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4424,3233003,"TERMINAL",0,0,"2025-07-01 19:19:38.341188: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4425,3235868,"TERMINAL",0,0,"2025-07-01 19:19:41.201428: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4426,3241879,"TERMINAL",0,0,"====================================================================================================\r\nFrame 3\r\n====================================================================================================\r\n",,terminal_output +4427,3242456,"TERMINAL",0,0,"2025-07-01 19:19:47.895246: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4428,3245496,"TERMINAL",0,0,"2025-07-01 19:19:50.930785: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4429,3253892,"TERMINAL",0,0,"2025-07-01 19:19:59.330684: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4430,3256860,"TERMINAL",0,0,"2025-07-01 19:20:02.242268: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4431,3263723,"TERMINAL",0,0,"====================================================================================================\r\nFrame 4\r\n====================================================================================================\r\n",,terminal_output +4432,3264249,"TERMINAL",0,0,"2025-07-01 19:20:09.687419: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4433,3267510,"TERMINAL",0,0,"2025-07-01 19:20:12.869740: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4434,3276249,"TERMINAL",0,0,"2025-07-01 19:20:21.630696: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4435,3279009,"TERMINAL",0,0,"2025-07-01 19:20:24.447624: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4436,3285942,"TERMINAL",0,0,"====================================================================================================\r\nFrame 5\r\n====================================================================================================\r\n",,terminal_output +4437,3286557,"TERMINAL",0,0,"2025-07-01 19:20:31.978798: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4438,3289569,"TERMINAL",0,0,"2025-07-01 19:20:35.007614: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4439,3298482,"TERMINAL",0,0,"2025-07-01 19:20:43.893615: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4440,3301333,"TERMINAL",0,0,"2025-07-01 19:20:46.772637: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4441,3308362,"TERMINAL",0,0,"====================================================================================================\r\nFrame 6\r\n====================================================================================================\r\n",,terminal_output +4442,3308996,"TERMINAL",0,0,"2025-07-01 19:20:54.434309: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4443,3312171,"TERMINAL",0,0,"2025-07-01 19:20:57.557554: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4444,3320853,"TERMINAL",0,0,"2025-07-01 19:21:06.292677: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4445,3323744,"TERMINAL",0,0,"2025-07-01 19:21:09.071898: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4446,3330641,"TERMINAL",0,0,"====================================================================================================\r\nFrame 7\r\n====================================================================================================\r\n",,terminal_output +4447,3331305,"TERMINAL",0,0,"2025-07-01 19:21:16.740291: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4448,3334341,"TERMINAL",0,0,"2025-07-01 19:21:19.763525: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4449,3343185,"TERMINAL",0,0,"2025-07-01 19:21:28.557268: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4450,3346153,"TERMINAL",0,0,"2025-07-01 19:21:31.580079: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4451,3353544,"TERMINAL",0,0,"====================================================================================================\r\nFrame 8\r\n====================================================================================================\r\n",,terminal_output +4452,3354245,"TERMINAL",0,0,"2025-07-01 19:21:39.643612: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4453,3357121,"TERMINAL",0,0,"2025-07-01 19:21:42.560030: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4454,3367042,"TERMINAL",0,0,"2025-07-01 19:21:52.454032: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4455,3369912,"TERMINAL",0,0,"2025-07-01 19:21:55.315654: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4456,3377377,"TERMINAL",0,0,"====================================================================================================\r\nFrame 9\r\n====================================================================================================\r\n",,terminal_output +4457,3378104,"TERMINAL",0,0,"2025-07-01 19:22:03.494631: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4458,3390168,"TERMINAL",0,0,"2025-07-01 19:22:15.607221: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4459,3393216,"TERMINAL",0,0,"2025-07-01 19:22:18.624525: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4460,3401245,"TERMINAL",0,0,"====================================================================================================\r\nFrame 10\r\n====================================================================================================\r\n",,terminal_output +4461,3401860,"TERMINAL",0,0,"2025-07-01 19:22:27.261701: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4462,3413637,"TERMINAL",0,0,"2025-07-01 19:22:39.022962: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4463,3416527,"TERMINAL",0,0,"2025-07-01 19:22:41.966974: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4464,3424984,"TERMINAL",0,0,"====================================================================================================\r\nFrame 11\r\n====================================================================================================\r\n",,terminal_output +4465,3425714,"TERMINAL",0,0,"2025-07-01 19:22:51.153900: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4466,3437756,"TERMINAL",0,0,"2025-07-01 19:23:03.154220: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4467,3440878,"TERMINAL",0,0,"2025-07-01 19:23:06.269910: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4468,3449579,"TERMINAL",0,0,"====================================================================================================\r\nFrame 12\r\n====================================================================================================\r\n",,terminal_output +4469,3450295,"TERMINAL",0,0,"2025-07-01 19:23:15.672822: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4470,3462175,"TERMINAL",0,0,"2025-07-01 19:23:27.580238: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4471,3465680,"TERMINAL",0,0,"2025-07-01 19:23:31.110358: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4472,3474256,"TERMINAL",0,0,"====================================================================================================\r\nFrame 13\r\n====================================================================================================\r\n",,terminal_output +4473,3474976,"TERMINAL",0,0,"2025-07-01 19:23:40.331184: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4474,3487009,"TERMINAL",0,0,"2025-07-01 19:23:52.445922: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4475,3490130,"TERMINAL",0,0,"2025-07-01 19:23:55.470528: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4476,3498738,"TERMINAL",0,0,"====================================================================================================\r\nFrame 14\r\n====================================================================================================\r\n",,terminal_output +4477,3499426,"TERMINAL",0,0,"2025-07-01 19:24:04.863400: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4478,3511635,"TERMINAL",0,0,"2025-07-01 19:24:16.985874: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4479,3514534,"TERMINAL",0,0,"2025-07-01 19:24:19.972623: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4480,3523723,"TERMINAL",0,0,"====================================================================================================\r\nFrame 15\r\n====================================================================================================\r\n",,terminal_output +4481,3524426,"TERMINAL",0,0,"2025-07-01 19:24:29.861311: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4482,3536872,"TERMINAL",0,0,"2025-07-01 19:24:42.309456: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4483,3540213,"TERMINAL",0,0,"2025-07-01 19:24:45.650340: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +4484,3551886,"TERMINAL",0,0,"SSIM: 0.6456081867218018\r\n",,terminal_output +4485,3556368,"TERMINAL",0,0,"]0;tum_cte0515@hkn0706:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0706 jafar]$ ",,terminal_output +4486,4525565,"TERMINAL",0,0,"salloc: Job 3309821 has exceeded its time limit and its allocation has been revoked.\n",,terminal_output +4487,4525632,"TERMINAL",0,0,"srun: Job step aborted: Waiting up to 32 seconds for job step to finish.\r\nslurmstepd: error: *** STEP 3309821.interactive ON hkn0706 CANCELLED AT 2025-07-01T19:41:11 DUE TO TIME LIMIT ***\r\n",,terminal_output +4488,4555293,"TERMINAL",0,0,"srun: error: hkn0706: task 0: Killed\r\n]0;tum_cte0515@hkn1993:~/Projects/jafar]633;D;137",,terminal_output diff --git a/927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-5e1c58f1-93d2-473f-9eaf-a2de01442cff1758800954786-2025_09_25-13.49.57.480/source.csv b/927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-5e1c58f1-93d2-473f-9eaf-a2de01442cff1758800954786-2025_09_25-13.49.57.480/source.csv new file mode 100644 index 0000000000000000000000000000000000000000..ea159269606980f95eab28360e28e99e73f17f5d --- /dev/null +++ b/927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-5e1c58f1-93d2-473f-9eaf-a2de01442cff1758800954786-2025_09_25-13.49.57.480/source.csv @@ -0,0 +1,268 @@ +Sequence,Time,File,RangeOffset,RangeLength,Text,Language,Type +1,4,"jasmine/models/dynamics.py",0,0,"from typing import Dict\n\nimport jax\nimport jax.numpy as jnp\nimport flax.nnx as nnx\n\nfrom utils.nn import STTransformer, Transformer\n\n\nclass DynamicsMaskGIT(nnx.Module):\n """"""\n MaskGIT dynamics model\n\n Dimension keys:\n B: batch size\n T: sequence length\n N: number of patches per frame\n L: latent dimension\n V: vocabulary size (number of latents)\n """"""\n\n def __init__(\n self,\n model_dim: int,\n ffn_dim: int,\n num_latents: int,\n latent_action_dim: int,\n num_blocks: int,\n num_heads: int,\n dropout: float,\n mask_limit: float,\n param_dtype: jnp.dtype,\n dtype: jnp.dtype,\n use_flash_attention: bool,\n rngs: nnx.Rngs,\n ):\n self.model_dim = model_dim\n self.ffn_dim = ffn_dim\n self.num_latents = num_latents\n self.latent_action_dim = latent_action_dim\n self.num_blocks = num_blocks\n self.num_heads = num_heads\n self.dropout = dropout\n self.mask_limit = mask_limit\n self.param_dtype = param_dtype\n self.dtype = dtype\n self.use_flash_attention = use_flash_attention\n\n self.transformer = STTransformer(\n self.model_dim,\n self.model_dim,\n self.ffn_dim,\n self.num_latents,\n self.num_blocks,\n self.num_heads,\n self.dropout,\n self.param_dtype,\n self.dtype,\n use_flash_attention=self.use_flash_attention,\n rngs=rngs,\n )\n self.patch_embed = nnx.Embed(self.num_latents, self.model_dim, rngs=rngs)\n self.mask_token = nnx.Param(\n nnx.initializers.lecun_uniform()(rngs.params(), (1, 1, 1, self.model_dim))\n )\n self.action_up = nnx.Linear(\n self.latent_action_dim,\n self.model_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n rngs=rngs,\n )\n\n def __call__(\n self,\n batch: Dict[str, jax.Array],\n ) -> tuple[jax.Array, jax.Array]:\n # --- Mask videos ---\n video_tokens_BTN = batch[""video_tokens""]\n latent_actions_BTm11L = batch[""latent_actions""]\n vid_embed_BTNM = self.patch_embed(video_tokens_BTN)\n\n batch_size = vid_embed_BTNM.shape[0]\n _rng_prob, *_rngs_mask = jax.random.split(batch[""mask_rng""], batch_size + 1)\n mask_prob = jax.random.uniform(\n _rng_prob, shape=(batch_size,), minval=self.mask_limit\n )\n per_sample_shape = vid_embed_BTNM.shape[1:-1]\n mask = jax.vmap(\n lambda rng, prob: jax.random.bernoulli(rng, prob, per_sample_shape),\n in_axes=(0, 0),\n )(jnp.asarray(_rngs_mask), mask_prob)\n mask = mask.at[:, 0].set(False)\n vid_embed_BTNM = jnp.where(\n jnp.expand_dims(mask, -1), self.mask_token.value, vid_embed_BTNM\n )\n\n # --- Predict transition ---\n act_embed_BTm11M = self.action_up(latent_actions_BTm11L)\n padded_act_embed_BT1M = jnp.pad(\n act_embed_BTm11M, ((0, 0), (1, 0), (0, 0), (0, 0))\n )\n padded_act_embed_BTNM = jnp.broadcast_to(\n padded_act_embed_BT1M, vid_embed_BTNM.shape\n )\n vid_embed_BTNM += padded_act_embed_BTNM\n logits_BTNV = self.transformer(vid_embed_BTNM)\n return logits_BTNV, mask\n\n\nclass DynamicsCausal(nnx.Module):\n """"""Causal dynamics model""""""\n\n def __init__(\n self,\n model_dim: int,\n ffn_dim: int,\n num_latents: int,\n latent_action_dim: int,\n num_blocks: int,\n num_heads: int,\n dropout: float,\n param_dtype: jnp.dtype,\n dtype: jnp.dtype,\n use_flash_attention: bool,\n decode: bool,\n rngs: nnx.Rngs,\n ):\n self.model_dim = model_dim\n self.ffn_dim = ffn_dim\n self.num_latents = num_latents\n self.latent_action_dim = latent_action_dim\n self.num_blocks = num_blocks\n self.num_heads = num_heads\n self.dropout = dropout\n self.param_dtype = param_dtype\n self.dtype = dtype\n self.use_flash_attention = use_flash_attention\n self.decode = decode\n\n self.transformer = Transformer(\n self.model_dim,\n self.model_dim,\n self.ffn_dim,\n self.num_latents,\n self.num_blocks,\n self.num_heads,\n self.dropout,\n self.param_dtype,\n self.dtype,\n use_flash_attention=self.use_flash_attention,\n decode=self.decode,\n rngs=rngs,\n )\n self.patch_embed = nnx.Embed(self.num_latents, self.model_dim, rngs=rngs)\n self.action_up = nnx.Linear(\n self.latent_action_dim,\n self.model_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n rngs=rngs,\n )\n\n def __call__(\n self,\n batch: Dict[str, jax.Array],\n ) -> tuple[jax.Array, jax.Array]:\n video_tokens_BTN = batch[""video_tokens""]\n latent_actions_BTm11L = batch[""latent_actions""]\n vid_embed_BTNM = self.patch_embed(video_tokens_BTN)\n act_embed_BTm11M = self.action_up(latent_actions_BTm11L)\n padded_act_embed_BT1M = jnp.pad(\n act_embed_BTm11M, ((0, 0), (1, 0), (0, 0), (0, 0))\n )\n vid_embed_BTNp1M = jnp.concatenate(\n [padded_act_embed_BT1M, vid_embed_BTNM], axis=2\n )\n logits_BTNp1V = self.transformer(vid_embed_BTNp1M)\n logits_BTNV = logits_BTNp1V[:, :, :-1]\n return logits_BTNV, jnp.ones_like(video_tokens_BTN)\n",python,tab +2,732,"extension-output-pdoom-org.crowd-code-#1-crowd-code",0,0,"1:49:57 PM [info] Activating crowd-code\n1:49:57 PM [info] Recording started\n1:49:57 PM [info] Initializing git provider using file system watchers...\n",Log,tab +3,1087,"extension-output-pdoom-org.crowd-code-#1-crowd-code",150,0,"1:49:57 PM [info] Git repository found\n1:49:57 PM [info] Git provider initialized successfully\n1:49:57 PM [info] Initial git state: [object Object]\n",Log,content +4,6981,"TERMINAL",0,0,"bash",,terminal_focus +5,11991,"TERMINAL",0,0,"queue",,terminal_command +6,12012,"TERMINAL",0,0,"]633;C[?1049h(B[?7hEvery 1.0s: squeue --mehkn1991.localdomain: Thu Sep 25 13:50:09 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)",,terminal_output +7,12913,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn1991:~/Projects/jasmine",,terminal_output +8,21218,"TERMINAL",0,0,"salloc --time=05:00:00 --partition=accelerated --nodes=1 --gres=gpu:1 --cpus-per-task=8",,terminal_command +9,21287,"TERMINAL",0,0,"]633;Csalloc: Pending job allocation 3520566\r\nsalloc: job 3520566 queued and waiting for resources\r\n",,terminal_output +10,26666,"TERMINAL",0,0,"salloc: job 3520566 has been allocated resources\r\nsalloc: Granted job allocation 3520566\r\nsalloc: Waiting for resource configuration\r\n",,terminal_output +11,53262,"TERMINAL",0,0,"salloc: Nodes hkn0801 are ready for job\r\n",,terminal_output +12,53922,"TERMINAL",0,0,"]0;tum_cte0515@hkn0801:~/Projects/jasmine[?2004h[tum_cte0515@hkn0801 jasmine]$ ",,terminal_output +13,80884,"TERMINAL",0,0,"so",,terminal_output +14,80963,"TERMINAL",0,0,"u",,terminal_output +15,81017,"TERMINAL",0,0,"r",,terminal_output +16,81231,"TERMINAL",0,0,"c",,terminal_output +17,81353,"TERMINAL",0,0,"e",,terminal_output +18,81418,"TERMINAL",0,0," ",,terminal_output +19,81615,"TERMINAL",0,0,".",,terminal_output +20,81736,"TERMINAL",0,0,"",,terminal_output +21,82521,"TERMINAL",0,0,"v",,terminal_output +22,82753,"TERMINAL",0,0,"e",,terminal_output +23,82846,"TERMINAL",0,0,"n",,terminal_output +24,82953,"TERMINAL",0,0,"v/",,terminal_output +25,83176,"TERMINAL",0,0,"b",,terminal_output +26,83303,"TERMINAL",0,0,"a",,terminal_output +27,83664,"TERMINAL",0,0,"",,terminal_output +28,83813,"TERMINAL",0,0,"i",,terminal_output +29,83923,"TERMINAL",0,0,"n/",,terminal_output +30,84413,"TERMINAL",0,0,"ac",,terminal_output +31,84549,"TERMINAL",0,0,"tivate",,terminal_output +32,85001,"TERMINAL",0,0,"\r\n[?2004l\r]0;tum_cte0515@hkn0801:~/Projects/jasmine[?2004h(jasmine) [tum_cte0515@hkn0801 jasmine]$ ",,terminal_output +33,85218,"TERMINAL",0,0,"\r\n[?2004l\r]0;tum_cte0515@hkn0801:~/Projects/jasmine[?2004h(jasmine) [tum_cte0515@hkn0801 jasmine]$ ",,terminal_output +34,86479,"TERMINAL",0,0,"q",,terminal_output +35,86549,"TERMINAL",0,0,"u",,terminal_output +36,86664,"TERMINAL",0,0,"eu",,terminal_output +37,86813,"TERMINAL",0,0,"e",,terminal_output +38,86879,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +39,86948,"TERMINAL",0,0,"[?1049h(B[?7hEvery 1.0s: squeue --mehkn0801.localdomain: Thu Sep 25 13:51:24 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3520566 accelerat interact tum_cte0 R\t1:01\t 1 hkn0801",,terminal_output +40,87731,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn0801:~/Projects/jasmine[?2004h(jasmine) [tum_cte0515@hkn0801 jasmine]$ ",,terminal_output +41,88231,"TERMINAL",0,0,"c",,terminal_output +42,88284,"TERMINAL",0,0,"l",,terminal_output +43,88350,"TERMINAL",0,0,"e",,terminal_output +44,88515,"TERMINAL",0,0,"a",,terminal_output +45,88684,"TERMINAL",0,0,"r",,terminal_output +46,88766,"TERMINAL",0,0,"\r\n[?2004l\r]0;tum_cte0515@hkn0801:~/Projects/jasmine[?2004h(jasmine) [tum_cte0515@hkn0801 jasmine]$ ",,terminal_output +47,91357,"TERMINAL",0,0,"\r(jasmine) [tum_cte0515@hkn0801 jasmine]$ \r(jasmine) [tum_cte0515@hkn0801 jasmine]$ \r(jasmine) [tum_cte0515@hkn0801 jasmine]$ ",,terminal_output +48,91969,"jasmine/models/dynamics.py",0,0,"",python,tab +49,91972,"jasmine/models/dynamics.py",2538,0,"",python,selection_mouse +50,92574,"jasmine/models/dynamics.py",2927,0,"",python,selection_mouse +51,93139,"jasmine/models/dynamics.py",2860,0,"",python,selection_mouse +52,93960,"jasmine/models/dynamics.py",2926,0,"",python,selection_mouse +53,93975,"jasmine/models/dynamics.py",2925,0,"",python,selection_command +54,144844,"TERMINAL",0,0,"q",,terminal_output +55,144954,"TERMINAL",0,0,"u",,terminal_output +56,145025,"TERMINAL",0,0,"e",,terminal_output +57,145132,"TERMINAL",0,0,"u",,terminal_output +58,145197,"TERMINAL",0,0,"e",,terminal_output +59,145368,"TERMINAL",0,0,"\r\n[?2004l\r[?1049h(B[?7hEvery 1.0s: squeue --mehkn0801.localdomain: Thu Sep 25 13:52:22 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3520566 accelerat interact tum_cte0 R\t1:59\t 1 hkn0801",,terminal_output +60,146193,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn0801:~/Projects/jasmine[?2004h(jasmine) [tum_cte0515@hkn0801 jasmine]$ ",,terminal_output +61,147631,"TERMINAL",0,0,"g",,terminal_output +62,147695,"TERMINAL",0,0,"i",,terminal_output +63,147769,"TERMINAL",0,0,"t",,terminal_output +64,147833,"TERMINAL",0,0," ",,terminal_output +65,147977,"TERMINAL",0,0,"b",,terminal_output +66,148073,"TERMINAL",0,0,"r",,terminal_output +67,148298,"TERMINAL",0,0,"a",,terminal_output +68,148385,"TERMINAL",0,0,"c",,terminal_output +69,148727,"TERMINAL",0,0,"",,terminal_output +70,149401,"TERMINAL",0,0,"n",,terminal_output +71,149512,"TERMINAL",0,0,"c",,terminal_output +72,149885,"TERMINAL",0,0,"h",,terminal_output +73,150141,"TERMINAL",0,0,"\r\n[?2004l\r[?1h=\r",,terminal_output +74,150381,"TERMINAL",0,0," action-mapper\r\n add-noise-to-combat-exposure-bias\r\n add-wandb-name-and-tags\r\n before-nnx\r\n causal-mem-reduce\r\n causal-spatiotemporal-kv-cache\r\n causal-st-transformer\r\n causal-transformer-dynamics-model\r\n causal-transformer-nnx-no-kv-cache\r\n coinrun-gt-actions\r\n convert-to-jax-array-in-iter\r\n correct-batched-sampling\r\n dev\r\n dont-let-tf-see-gpu\r\n feat/darkness-filter\r\n feat/explicit-image-dims\r\n fix-action-padding-lam-future-information-access\r\n fix-sampling\r\n fix-transformer-forwardpass\r\n fix/dyn-restore-after-nnx-upgrade\r\n fix/spatiotemporal-pe-once-in-STTransformer\r\n generate-minatar-breakout-dataset\r\n grad-norm-log-and-clip\r\n grain-dataloader\r\n gt-actions\r\n hotfix/eval-full-frame-fix\r\n hotfix/fix-val-loss-maskgit-masking\r\n hotfix/full-frame-eval-only-calculate-last-frame-metrics\r\n hotfix/sampling-shapes-error\r\n input_pipeline/add-npy2array_record\r\n logging-variants\r\n lr-schedules\r\n* main\r\n maskgit-different-maskprob-per-sample\r\n maskgit-sampling-iterative-unmasking-fix\r\n metrics-logging-for-dynamics-model\r\n monkey-patch\r\n new-arch-sampling\r\n preprocess_video\r\n refactor-full-frame-val-loss\r\n refactor-tmp\r\n remove-restore-branching\r\n revised-dataloader\r\n runner\r\n runner-grain\r\n sample-ali-branch\r\n sample-from-different-topologies\r\n sampling-script-add-metrics\r\n sampling-startframe-indexing-fix\r\n speedup-tfrecord-preprocessing\r\n train_lam_coinrun_ablation_wsd_3e-6_28747\r\n val-loss\r\n\r[?1l>]0;tum_cte0515@hkn0801:~/Projects/jasmine[?2004h(jasmine) [tum_cte0515@hkn0801 jasmine]$ ",,terminal_output +75,152630,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=4\n#SBATCH --time=01:00:00\n#SBATCH --partition=accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:4\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/dynamics/sampling/maskgit/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/dynamics/sampling/maskgit/%x_%j.log\n#SBATCH --job-name=coinrun_sample_maskgit\n\n# Unload modules that may interfere\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\n\n# Activate virtual environment\n# source .venv/bin/activate\n\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_breakout/breakout_episodes_perfect/val\n\nCHECKPOINT_PATH=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn-gt-actions/train_dyn_default_gt_actions_breakout_longer_smaller_lr/3519530\n\n\necho ""Sampling from checkpoint: $CHECKPOINT_PATH""\n\nsrun python jasmine/sample.py \\n --checkpoint ""$CHECKPOINT_PATH"" \\n --data_dir=$array_records_dir \\n --seq_len=16 \\n --batch_size=12 \\n --start_frame=4 \\n --image_height=10 \\n --image_width=10 \\n --dyna_type=maskgit \\n --lam_patch_size=4 \\n --no-print-action-indices \\n --use_gt_actions \\n --output_dir ""gifs/50k/gt-actions""",shellscript,tab +76,153858,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",770,0,"",shellscript,selection_mouse +77,154737,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",770,0,"\n",shellscript,content +78,155739,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",770,0,"",shellscript,selection_mouse +79,156088,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",770,0,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn-gt-actions/train_dyn_default_gt_actions_breakout_longer/3519698",shellscript,content +80,156887,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1071,0,"",shellscript,selection_mouse +81,157929,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",915,0,"",shellscript,selection_command +82,158165,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",914,0,"\n",shellscript,content +83,159368,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",916,0,"",shellscript,selection_command +84,160343,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",916,0,"#",shellscript,content +85,160345,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",917,0,"",shellscript,selection_keyboard +86,160830,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",917,0," ",shellscript,content +87,160831,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",918,0,"",shellscript,selection_keyboard +88,160967,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",917,0,"",shellscript,selection_command +89,245053,"TERMINAL",0,0,"g",,terminal_output +90,245177,"TERMINAL",0,0,"t",,terminal_output +91,245888,"TERMINAL",0,0,"",,terminal_output +92,246100,"TERMINAL",0,0,"i",,terminal_output +93,246188,"TERMINAL",0,0,"t",,terminal_output +94,246349,"TERMINAL",0,0," ",,terminal_output +95,246424,"TERMINAL",0,0,"c",,terminal_output +96,246555,"TERMINAL",0,0,"he",,terminal_output +97,246692,"TERMINAL",0,0,"c",,terminal_output +98,246745,"TERMINAL",0,0,"k",,terminal_output +99,246921,"TERMINAL",0,0,"o",,terminal_output +100,246985,"TERMINAL",0,0,"u",,terminal_output +101,247049,"TERMINAL",0,0,"t",,terminal_output +102,247114,"TERMINAL",0,0," ",,terminal_output +103,247497,"TERMINAL",0,0,"add-noise-to-combat-exposure-bias",,terminal_output +104,247717,"TERMINAL",0,0,"add-noise-to-combat-exposure-bias\r\n[?2004l\r",,terminal_output +105,248450,"TERMINAL",0,0,"Switched to branch 'add-noise-to-combat-exposure-bias'\r\nYour branch is ahead of 'origin/add-noise-to-combat-exposure-bias' by 3 commits.\r\n (use ""git push"" to publish your local commits)\r\n]0;tum_cte0515@hkn0801:~/Projects/jasmine[?2004h(jasmine) [tum_cte0515@hkn0801 jasmine]$ ",,terminal_output +106,250505,"",0,0,"Switched from branch 'main' to 'add-noise-to-combat-exposure-bias'",,git_branch_checkout +107,275721,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=4\n#SBATCH --time=01:00:00\n#SBATCH --partition=accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:4\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/dynamics/sampling/maskgit/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/dynamics/sampling/maskgit/%x_%j.log\n#SBATCH --job-name=coinrun_sample_maskgit\n\n# Unload modules that may interfere\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\n\n# Activate virtual environment\n# source .venv/bin/activate\n\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_breakout/breakout_episodes_perfect/val\n\nCHECKPOINT_PATH=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn-gt-actions/train_dyn_default_gt_actions_breakout_longer/3519698\n\n# /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn-gt-actions/train_dyn_default_gt_actions_breakout_longer_smaller_lr/3519530\n\n\necho ""Sampling from checkpoint: $CHECKPOINT_PATH""\n\nsrun python jasmine/sample.py \\n --checkpoint ""$CHECKPOINT_PATH"" \\n --data_dir=$array_records_dir \\n --seq_len=16 \\n --batch_size=12 \\n --start_frame=4 \\n --image_height=10 \\n --image_width=10 \\n --dyna_type=maskgit \\n --lam_patch_size=4 \\n --no-print-action-indices \\n --use_gt_actions \\n --output_dir ""gifs/50k/gt-actions""",shellscript,tab +108,275722,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1487,0,"",shellscript,selection_mouse +109,275723,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1486,0,"",shellscript,selection_command +110,275729,"TERMINAL",0,0,"bash",,terminal_focus +111,275787,"TERMINAL",0,0,"git git^C[?2004l\r[?2004h[?2004l\r\r\n]0;tum_cte0515@hkn0801:~/Projects/jasmine[?2004h(jasmine) [tum_cte0515@hkn0801 jasmine]$ ",,terminal_output +112,279744,"TERMINAL",0,0,"srun",,terminal_focus +113,281646,"TERMINAL",0,0,"g",,terminal_output +114,281705,"TERMINAL",0,0,"i",,terminal_output +115,281777,"TERMINAL",0,0,"t",,terminal_output +116,281834,"TERMINAL",0,0," ",,terminal_output +117,282004,"TERMINAL",0,0,"st",,terminal_output +118,282157,"TERMINAL",0,0,"a",,terminal_output +119,282210,"TERMINAL",0,0,"t",,terminal_output +120,282274,"TERMINAL",0,0,"u",,terminal_output +121,282538,"TERMINAL",0,0,"s",,terminal_output +122,282611,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +123,282782,"TERMINAL",0,0,"On branch add-noise-to-combat-exposure-bias\r\nYour branch is ahead of 'origin/add-noise-to-combat-exposure-bias' by 3 commits.\r\n (use ""git push"" to publish your local commits)\r\n\r\n",,terminal_output +124,282859,"TERMINAL",0,0,"Last commands done (2 commands done):\r\n pick ba37453 feat: generate coinrun dataset with val split\r\n pick faadd10 feat: implemented validation loss for all three models\r\nNext commands to do (26 remaining commands):\r\n pick 9a17dbb fix: pass val data path to dataloader\r\n pick 6e69cdb fix typo in image logging\r\n (use ""git rebase --edit-todo"" to view and edit)\r\nYou are currently editing a commit while rebasing branch 'gt-actions' on 'c7522f2'.\r\n (use ""git commit --amend"" to amend the current commit)\r\n (use ""git rebase --continue"" once you are satisfied with your changes)\r\n\r\nUntracked files:\r\n (use ""git add ..."" to include in what will be committed)\r\n\tdiff.diff\r\n\tdiff2.diff\r\n\tinput_pipeline/\r\n\tkiller.sh\r\n\tkiller_partition.sh\r\n\tlog.log\r\n\toverfit_dir.zip\r\n\trequirements-franz.txt\r\n\tsamples/\r\n\tscripts_cremers/\r\n\tslurm/\r\n\ttest.py\r\n\tutils/\r\n\tuv.lock\r\n\r\nnothing added to commit but untracked files present (use ""git add"" to track)\r\n]0;tum_cte0515@hkn0801:~/Projects/jasmine[?2004h(jasmine) [tum_cte0515@hkn0801 jasmine]$ ",,terminal_output +125,294827,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",0,0,"",shellscript,tab +126,294828,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1475,0,"",shellscript,selection_mouse +127,295978,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1475,0,"-",shellscript,content +128,295980,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1476,0,"",shellscript,selection_keyboard +129,296561,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1476,0,"w",shellscript,content +130,296563,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1477,0,"",shellscript,selection_keyboard +131,296654,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1477,0,"-",shellscript,content +132,296655,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1478,0,"",shellscript,selection_keyboard +133,297206,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1478,0,"n",shellscript,content +134,297207,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1479,0,"",shellscript,selection_keyboard +135,297496,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1479,0,"i",shellscript,content +136,297497,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1480,0,"",shellscript,selection_keyboard +137,297797,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1479,1,"",shellscript,content +138,298013,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1479,0,"p",shellscript,content +139,298015,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1480,0,"",shellscript,selection_keyboard +140,298115,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1480,0,"i",shellscript,content +141,298117,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1481,0,"",shellscript,selection_keyboard +142,298415,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1480,1,"",shellscript,content +143,298540,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1479,1,"",shellscript,content +144,298713,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1479,0,"o",shellscript,content +145,298715,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1480,0,"",shellscript,selection_keyboard +146,298844,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1480,0,"i",shellscript,content +147,298846,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1481,0,"",shellscript,selection_keyboard +148,298877,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1481,0,"s",shellscript,content +149,298878,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1482,0,"",shellscript,selection_keyboard +150,299003,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1482,0,"e",shellscript,content +151,299004,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1483,0,"",shellscript,selection_keyboard +152,308200,"TERMINAL",0,0,"q",,terminal_output +153,308306,"TERMINAL",0,0,"u",,terminal_output +154,308385,"TERMINAL",0,0,"e",,terminal_output +155,308467,"TERMINAL",0,0,"ue",,terminal_output +156,308698,"TERMINAL",0,0,"\r\n[?2004l\r[?1049h(B[?7hEvery 1.0s: squeue --mehkn0801.localdomain: Thu Sep 25 13:55:06 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3520566 accelerat interact tum_cte0 R\t4:43\t 1 hkn0801",,terminal_output +157,309613,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn0801:~/Projects/jasmine[?2004h(jasmine) [tum_cte0515@hkn0801 jasmine]$ ",,terminal_output +158,310080,"TERMINAL",0,0,"c",,terminal_output +159,310144,"TERMINAL",0,0,"l",,terminal_output +160,310315,"TERMINAL",0,0,"e",,terminal_output +161,310520,"TERMINAL",0,0,"a",,terminal_output +162,310634,"TERMINAL",0,0,"r",,terminal_output +163,310784,"TERMINAL",0,0,"\r\n[?2004l\r]0;tum_cte0515@hkn0801:~/Projects/jasmine[?2004h(jasmine) [tum_cte0515@hkn0801 jasmine]$ ",,terminal_output +164,316438,"TERMINAL",0,0,"s",,terminal_output +165,316543,"TERMINAL",0,0,"h",,terminal_output +166,316636,"TERMINAL",0,0," ",,terminal_output +167,317338,"TERMINAL",0,0,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",,terminal_output +168,317731,"TERMINAL",0,0,"\rslurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch\r\n[?2004l\r",,terminal_output +169,317863,"TERMINAL",0,0,"Sampling from checkpoint: /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn-gt-actions/train_dyn_default_gt_actions_breakout_longer/3519698\r\n",,terminal_output +170,317994,"TERMINAL",0,0,"GpuFreq=control_disabled\r\n",,terminal_output +171,333523,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib64/python3.12/site-packages/orbax/checkpoint/_src/serialization/type_handlers.py:1269: UserWarning: Sharding info not provided when restoring. Populating sharding info from sharding file. Please note restoration time will be slightly increased due to reading from file. Note also that this option is unsafe when restoring on a different topology than the checkpoint was saved with.\r\n warnings.warn(\r\n",,terminal_output +172,363785,"TERMINAL",0,0,"Per-frame SSIM:\r\n [0.30596483 0.414882 0.5163348 0.51084375 0.5155197 0.5155652\r\n 0.51382315 0.4914374 0.45811233 0.4443463 0.44947392 0.4618884 ]\r\nPer-frame PSNR:\r\n [18.745987 18.47467 18.217417 18.06475 18.032337 17.890877 17.707855\r\n 17.542704 17.384125 17.292942 17.300459 17.316257]\r\nSSIM: 0.4665159583091736\r\nPSNR: 17.83086585998535\r\n",,terminal_output +173,364404,"TERMINAL",0,0,"W0925 13:56:01.732225 2006205 pjrt_client.cc:1469] WatchJobStateAsync failed for task goo.gle/debugproto job_name: ""jax_worker"": UNAVAILABLE: failed to connect to all addresses; last error: UNKNOWN: ipv4:10.0.1.177:63542: Failed to connect to remote host: Connection refused\r\nAdditional GRPC error information from remote target coordination_service while calling /tensorflow.CoordinationService/WatchJobState:\r\n:UNKNOWN:Error received from peer {grpc_message:""failed to connect to all addresses; last error: UNKNOWN: ipv4:10.0.1.177:63542: Failed to connect to remote host: Connection refused"", grpc_status:14}\r\n",,terminal_output +174,365078,"TERMINAL",0,0,"]0;tum_cte0515@hkn0801:~/Projects/jasmine[?2004h(jasmine) [tum_cte0515@hkn0801 jasmine]$ ",,terminal_output +175,372087,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",0,0,"",shellscript,tab +176,372088,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1425,0,"",shellscript,selection_mouse +177,372588,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1487,0,"",shellscript,selection_mouse +178,373273,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1425,0,"",shellscript,selection_mouse +179,373893,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1448,0,"",shellscript,selection_mouse +180,442282,"jasmine/sample.py",0,0,"from dataclasses import dataclass\nimport time\nimport os\nimport optax\n\nimport dm_pix as pix\nimport einops\nimport jax\nimport jax.numpy as jnp\nimport flax.linen as nn\nimport numpy as np\nimport orbax.checkpoint as ocp\nfrom PIL import Image, ImageDraw\nimport tyro\nfrom flax import nnx\n\nfrom genie import Genie\nfrom utils.dataloader import get_dataloader\n\n\n@dataclass\nclass Args:\n # Experiment\n seed: int = 0\n seq_len: int = 16\n image_channels: int = 3\n image_height: int = 90\n image_width: int = 160\n data_dir: str = ""data/coinrun_episodes""\n checkpoint: str = """"\n print_action_indices: bool = True\n output_dir: str = ""gifs/""\n # Sampling\n batch_size: int = 1\n maskgit_steps: int = 25\n temperature: float = 1.0\n sample_argmax: bool = True\n start_frame: int = 1\n noise_level: float = 0.0\n noise_buckets: int = 10\n # Tokenizer checkpoint\n tokenizer_dim: int = 512\n tokenizer_ffn_dim: int = 2048\n latent_patch_dim: int = 32\n num_patch_latents: int = 1024\n patch_size: int = 4\n tokenizer_num_blocks: int = 4\n tokenizer_num_heads: int = 8\n # LAM checkpoint\n lam_dim: int = 512\n lam_ffn_dim: int = 2048\n latent_action_dim: int = 32\n num_actions: int = 6\n lam_patch_size: int = 16\n lam_num_blocks: int = 4\n lam_num_heads: int = 8\n use_gt_actions: bool = False\n # Dynamics checkpoint\n dyna_type: str = ""maskgit""\n dyna_dim: int = 512\n dyna_ffn_dim: int = 2048\n dyna_num_blocks: int = 6\n dyna_num_heads: int = 8\n param_dtype = jnp.float32\n dtype = jnp.bfloat16\n use_flash_attention: bool = True\n\n\nargs = tyro.cli(Args)\n\nif __name__ == ""__main__"":\n """"""\n Dimension keys:\n B: batch size\n T: number of input (conditioning) frames\n N: number of patches per frame\n S: sequence length\n H: height\n W: width\n E: B * (S - 1)\n """"""\n jax.distributed.initialize()\n\n rng = jax.random.key(args.seed)\n\n # --- Load Genie checkpoint ---\n rngs = nnx.Rngs(rng)\n genie = Genie(\n # Tokenizer\n in_dim=args.image_channels,\n tokenizer_dim=args.tokenizer_dim,\n tokenizer_ffn_dim=args.tokenizer_ffn_dim,\n latent_patch_dim=args.latent_patch_dim,\n num_patch_latents=args.num_patch_latents,\n patch_size=args.patch_size,\n tokenizer_num_blocks=args.tokenizer_num_blocks,\n tokenizer_num_heads=args.tokenizer_num_heads,\n # LAM\n lam_dim=args.lam_dim,\n lam_ffn_dim=args.lam_ffn_dim,\n latent_action_dim=args.latent_action_dim,\n num_actions=args.num_actions,\n lam_patch_size=args.lam_patch_size,\n lam_num_blocks=args.lam_num_blocks,\n lam_num_heads=args.lam_num_heads,\n lam_co_train=False,\n max_noise_level=0.0,\n noise_buckets=args.noise_buckets,\n use_gt_actions=args.use_gt_actions,\n # Dynamics\n dyna_type=args.dyna_type,\n dyna_dim=args.dyna_dim,\n dyna_ffn_dim=args.dyna_ffn_dim,\n dyna_num_blocks=args.dyna_num_blocks,\n dyna_num_heads=args.dyna_num_heads,\n param_dtype=args.param_dtype,\n dtype=args.dtype,\n use_flash_attention=args.use_flash_attention,\n # FIXME (f.srambical): implement spatiotemporal KV caching and set decode=True\n decode=False,\n rngs=rngs,\n )\n\n # Need to delete lam decoder for checkpoint loading\n if not args.use_gt_actions:\n assert genie.lam is not None\n del genie.lam.decoder\n\n handler_registry = ocp.handlers.DefaultCheckpointHandlerRegistry()\n handler_registry.add(\n ""model_state"", ocp.args.PyTreeSave, ocp.handlers.PyTreeCheckpointHandler\n )\n handler_registry.add(\n ""model_state"", ocp.args.PyTreeRestore, ocp.handlers.PyTreeCheckpointHandler\n )\n checkpoint_options = ocp.CheckpointManagerOptions(\n step_format_fixed_length=6,\n )\n checkpoint_manager = ocp.CheckpointManager(\n args.checkpoint,\n options=checkpoint_options,\n handler_registry=handler_registry,\n )\n\n dummy_tx = optax.adamw(\n learning_rate=optax.linear_schedule(0.0001, 0.0001, 10000),\n b1=0.9,\n b2=0.9,\n weight_decay=1e-4,\n mu_dtype=args.dtype,\n )\n dummy_optimizer = nnx.ModelAndOptimizer(genie, dummy_tx)\n\n abstract_optimizer = nnx.eval_shape(lambda: dummy_optimizer)\n abstract_optimizer_state = nnx.state(abstract_optimizer)\n restored = checkpoint_manager.restore(\n checkpoint_manager.latest_step(),\n args=ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore(abstract_optimizer_state), # type: ignore\n ),\n )\n restored_optimizer_state = restored[""model_state""]\n nnx.update(dummy_optimizer, restored_optimizer_state)\n\n # --- Define sampling function ---\n def _sampling_fn(model: Genie, batch: dict) -> jax.Array:\n """"""Runs Genie.sample with pre-defined generation hyper-parameters.""""""\n assert args.dyna_type in [\n ""maskgit"",\n ""causal"",\n ], f""Invalid dynamics type: {args.dyna_type}""\n frames, _ = model.sample(\n batch,\n args.seq_len,\n args.temperature,\n args.sample_argmax,\n args.maskgit_steps,\n )\n return frames\n\n # --- Define autoregressive sampling loop ---\n def _autoreg_sample(genie, rng, batch):\n batch[""videos""] = batch[""videos""][:, : args.start_frame]\n batch[""rng""] = rng\n generated_vid_BSHWC = _sampling_fn(genie, batch)\n return generated_vid_BSHWC\n\n # --- Get video + latent actions ---\n array_record_files = [\n os.path.join(args.data_dir, x)\n for x in os.listdir(args.data_dir)\n if x.endswith("".array_record"")\n ]\n dataloader = get_dataloader(\n array_record_files,\n args.seq_len,\n args.batch_size,\n args.image_height,\n args.image_width,\n args.image_channels,\n # We don't use workers in order to avoid grain shutdown issues (https://github.com/google/grain/issues/398)\n num_workers=0,\n prefetch_buffer_size=1,\n seed=args.seed,\n )\n dataloader = iter(dataloader)\n batch = next(dataloader)\n gt_video = jnp.asarray(batch[""videos""], dtype=jnp.float32) / 255.0\n batch[""videos""] = gt_video.astype(args.dtype)\n # Get latent actions for all videos in the batch\n action_batch_E = None\n if not args.use_gt_actions:\n action_batch_E = genie.vq_encode(batch, training=False)\n batch[""latent_actions""] = action_batch_E\n\n # --- Sample + evaluate video ---\n recon_video_BSHWC = _autoreg_sample(genie, rng, batch)\n recon_video_BSHWC = recon_video_BSHWC.astype(jnp.float32)\n\n gt = gt_video.clip(0, 1)[:, args.start_frame :]\n recon = recon_video_BSHWC.clip(0, 1)[:, args.start_frame :]\n\n ssim_vmap = jax.vmap(pix.ssim, in_axes=(0, 0))\n psnr_vmap = jax.vmap(pix.psnr, in_axes=(0, 0))\n ssim = ssim_vmap(gt, recon)\n psnr = psnr_vmap(gt, recon)\n per_frame_ssim = ssim.mean(0)\n per_frame_psnr = psnr.mean(0)\n avg_ssim = ssim.mean()\n avg_psnr = psnr.mean()\n\n print(""Per-frame SSIM:\n"", per_frame_ssim)\n print(""Per-frame PSNR:\n"", per_frame_psnr)\n\n print(f""SSIM: {avg_ssim}"")\n print(f""PSNR: {avg_psnr}"")\n\n # --- Construct video ---\n true_videos = (gt_video * 255).astype(np.uint8)\n pred_videos = (recon_video_BSHWC * 255).astype(np.uint8)\n video_comparison = np.zeros((2, *recon_video_BSHWC.shape), dtype=np.uint8)\n video_comparison[0] = true_videos[:, : args.seq_len]\n video_comparison[1] = pred_videos\n frames = einops.rearrange(video_comparison, ""n b t h w c -> t (b h) (n w) c"")\n\n # --- Save video ---\n imgs = [Image.fromarray(img) for img in frames]\n # Write actions on each frame, on each row (i.e., for each video in the batch, on the GT row)\n B = batch[""videos""].shape[0]\n if action_batch_E is not None:\n action_batch_BSm11 = jnp.reshape(action_batch_E, (B, args.seq_len - 1, 1))\n else:\n action_batch_BSm11 = jnp.reshape(\n batch[""actions""][:, :-1], (B, args.seq_len - 1, 1)\n )\n for t, img in enumerate(imgs[1:]):\n d = ImageDraw.Draw(img)\n for row in range(B):\n if args.print_action_indices:\n action = action_batch_BSm11[row, t, 0]\n y_offset = row * batch[""videos""].shape[2] + 2\n d.text((2, y_offset), f""{action}"", fill=255)\n\n os.makedirs(args.output_dir, exist_ok=True)\n imgs[0].save(\n os.path.join(args.output_dir, f""generation_{time.time()}.gif""),\n save_all=True,\n append_images=imgs[1:],\n duration=250,\n loop=0,\n )\n",python,tab +181,454590,"jasmine/sample.py",5519,0,"",python,selection_mouse +182,454728,"jasmine/sample.py",4823,0,"",python,selection_command +183,457080,"jasmine/sample.py",5118,0,"",python,selection_mouse +184,457562,"jasmine/genie.py",0,0,"from typing import Dict\n\nimport einops\nimport jax\nimport jax.numpy as jnp\nimport flax.nnx as nnx\nimport orbax.checkpoint as ocp\n\nfrom models.dynamics import DynamicsMaskGIT, DynamicsCausal\nfrom models.lam import LatentActionModel\nfrom models.tokenizer import TokenizerVQVAE\n\n\nclass Genie(nnx.Module):\n """"""Genie model""""""\n\n def __init__(\n self,\n in_dim: int,\n tokenizer_dim: int,\n tokenizer_ffn_dim: int,\n latent_patch_dim: int,\n num_patch_latents: int,\n patch_size: int,\n tokenizer_num_blocks: int,\n tokenizer_num_heads: int,\n lam_dim: int,\n lam_ffn_dim: int,\n latent_action_dim: int,\n num_actions: int,\n lam_patch_size: int,\n lam_num_blocks: int,\n lam_num_heads: int,\n lam_co_train: bool,\n use_gt_actions: bool,\n dyna_type: str,\n dyna_dim: int,\n dyna_ffn_dim: int,\n dyna_num_blocks: int,\n dyna_num_heads: int,\n max_noise_level: float,\n noise_buckets: int,\n param_dtype: jnp.dtype,\n dtype: jnp.dtype,\n use_flash_attention: bool,\n decode: bool,\n rngs: nnx.Rngs,\n dropout: float = 0.0,\n mask_limit: float = 0.0,\n ):\n # --- Tokenizer ---\n self.in_dim = in_dim\n self.tokenizer_dim = tokenizer_dim\n self.tokenizer_ffn_dim = tokenizer_ffn_dim\n self.latent_patch_dim = latent_patch_dim\n self.num_patch_latents = num_patch_latents\n self.patch_size = patch_size\n self.tokenizer_num_blocks = tokenizer_num_blocks\n self.tokenizer_num_heads = tokenizer_num_heads\n # --- LAM ---\n self.lam_dim = lam_dim\n self.lam_ffn_dim = lam_ffn_dim\n self.latent_action_dim = latent_action_dim\n self.num_actions = num_actions\n self.lam_patch_size = lam_patch_size\n self.lam_num_blocks = lam_num_blocks\n self.lam_num_heads = lam_num_heads\n self.lam_co_train = lam_co_train\n self.use_gt_actions = use_gt_actions\n # --- Dynamics ---\n self.dyna_type = dyna_type\n self.dyna_dim = dyna_dim\n self.dyna_ffn_dim = dyna_ffn_dim\n self.dyna_num_blocks = dyna_num_blocks\n self.dyna_num_heads = dyna_num_heads\n self.max_noise_level = max_noise_level\n self.noise_buckets = noise_buckets\n self.param_dtype = param_dtype\n self.dtype = dtype\n self.use_flash_attention = use_flash_attention\n self.dropout = dropout\n self.mask_limit = mask_limit\n self.decode = decode\n\n self.tokenizer = TokenizerVQVAE(\n in_dim=self.in_dim,\n model_dim=self.tokenizer_dim,\n ffn_dim=self.tokenizer_ffn_dim,\n latent_dim=self.latent_patch_dim,\n num_latents=self.num_patch_latents,\n patch_size=self.patch_size,\n num_blocks=self.tokenizer_num_blocks,\n num_heads=self.tokenizer_num_heads,\n dropout=0.0,\n codebook_dropout=0.0,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n use_flash_attention=self.use_flash_attention,\n rngs=rngs,\n )\n if self.use_gt_actions:\n self.action_embed = nnx.Embed(\n self.num_actions, self.latent_action_dim, rngs=rngs\n )\n self.lam = None\n else:\n self.lam = LatentActionModel(\n in_dim=self.in_dim,\n model_dim=self.lam_dim,\n ffn_dim=self.lam_ffn_dim,\n latent_dim=self.latent_patch_dim,\n num_latents=self.num_actions,\n patch_size=self.lam_patch_size,\n num_blocks=self.lam_num_blocks,\n num_heads=self.lam_num_heads,\n dropout=0.0,\n codebook_dropout=0.0,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n use_flash_attention=self.use_flash_attention,\n rngs=rngs,\n )\n self.action_embed = None\n if self.dyna_type == ""maskgit"":\n self.dynamics = DynamicsMaskGIT(\n model_dim=self.dyna_dim,\n ffn_dim=self.dyna_ffn_dim,\n num_latents=self.num_patch_latents,\n latent_action_dim=self.latent_action_dim,\n num_blocks=self.dyna_num_blocks,\n num_heads=self.dyna_num_heads,\n dropout=self.dropout,\n mask_limit=self.mask_limit,\n max_noise_level=self.max_noise_level,\n noise_buckets=self.noise_buckets,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n use_flash_attention=self.use_flash_attention,\n rngs=rngs,\n )\n elif self.dyna_type == ""causal"":\n self.dynamics = DynamicsCausal(\n model_dim=self.dyna_dim,\n ffn_dim=self.dyna_ffn_dim,\n num_latents=self.num_patch_latents,\n latent_action_dim=self.latent_action_dim,\n num_blocks=self.dyna_num_blocks,\n num_heads=self.dyna_num_heads,\n dropout=self.dropout,\n max_noise_level=self.max_noise_level,\n noise_buckets=self.noise_buckets,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n use_flash_attention=self.use_flash_attention,\n decode=decode,\n rngs=rngs,\n )\n else:\n raise ValueError(f""Invalid dynamics type: {self.dyna_type}"")\n\n def __call__(\n self,\n batch: Dict[str, jax.Array],\n ) -> Dict[str, jax.Array]:\n videos_BTHWC = batch[""videos""]\n tokenizer_outputs = self.tokenizer.vq_encode(videos_BTHWC, training=False)\n token_indices_BTN = tokenizer_outputs[""indices""]\n latent_actions_BTm11L = None\n action_embeddings_BTm11L = None\n if self.use_gt_actions:\n assert self.action_embed is not None\n action_indices_E = None\n action_embeddings_BT1L = self.action_embed(batch[""actions""]).reshape(\n *batch[""actions""].shape[:2], 1, self.latent_action_dim\n )\n action_embeddings_BTm11L = action_embeddings_BT1L[:, :-1]\n else:\n assert self.lam is not None\n lam_outputs = self.lam.vq_encode(videos_BTHWC, training=False)\n z_q_BTm11L = lam_outputs[""z_q""]\n action_indices_E = lam_outputs[""indices""]\n latent_actions_BTm11L = jax.lax.cond(\n self.lam_co_train,\n lambda: z_q_BTm11L,\n lambda: jax.lax.stop_gradient(z_q_BTm11L),\n )\n outputs = dict(\n video_tokens=jax.lax.stop_gradient(token_indices_BTN),\n latent_actions=(\n action_embeddings_BTm11L\n if self.use_gt_actions\n else latent_actions_BTm11L\n ),\n )\n outputs[""mask_rng""] = batch[""rng""]\n dyna_logits_BTNV, dyna_mask = self.dynamics(outputs)\n outputs[""token_logits""] = dyna_logits_BTNV\n outputs[""mask""] = dyna_mask\n mle_indices_BTN = jnp.argmax(outputs[""token_logits""], axis=-1)\n H, W = batch[""videos""].shape[2:4]\n outputs[""recon""] = self.tokenizer.decode(mle_indices_BTN, (H, W))\n if action_indices_E is not None:\n outputs[""lam_indices""] = action_indices_E\n return outputs\n\n def sample(\n self,\n batch: Dict[str, jax.Array],\n seq_len: int,\n temperature: float = 1,\n sample_argmax: bool = False,\n maskgit_steps: int = 25,\n ) -> tuple[jax.Array, jax.Array]:\n if self.dyna_type == ""maskgit"":\n return self.sample_maskgit(\n batch, seq_len, 0.0, maskgit_steps, temperature, sample_argmax\n )\n elif self.dyna_type == ""causal"":\n return self.sample_causal(batch, seq_len, temperature, sample_argmax)\n else:\n raise ValueError(f""Dynamics model type unknown: {self.dyna_type}"")\n\n def sample_maskgit(\n self,\n batch: Dict[str, jax.Array],\n seq_len: int,\n noise_level: float,\n steps: int = 25,\n temperature: float = 1,\n sample_argmax: bool = False,\n ) -> tuple[jax.Array, jax.Array]:\n """"""\n Autoregressively samples up to `seq_len` future frames, following Figure 8 of the paper.\n\n - Input frames are tokenized once.\n - Future frames are generated autoregressively in token space.\n - All frames are detokenized in a single pass.\n\n Note:\n - For interactive or step-wise sampling, detokenization should occur after each action.\n - To maintain consistent tensor shapes across timesteps, all current and future frames are decoded at every step.\n - Temporal causal structure is preserved by\n a) reapplying the mask before each decoding step.\n b) a temporal causal mask is applied within each ST-transformer block.\n\n Dimension keys:\n B: batch size\n T: number of input (conditioning) frames\n N: number of patches per frame\n M: model dimension\n S: sequence length\n H: height\n W: width\n E: B * (S - 1)\n P: S * N\n """"""\n assert isinstance(self.dynamics, DynamicsMaskGIT)\n # --- Encode videos and actions ---\n videos_BTHWC = batch[""videos""]\n tokenizer_out = self.tokenizer.vq_encode(videos_BTHWC, training=False)\n token_idxs_BTN = tokenizer_out[""indices""]\n B, T, N = token_idxs_BTN.shape\n pad_shape = (B, seq_len - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs_BTN.dtype)\n token_idxs_BSN = jnp.concatenate([token_idxs_BTN, pad], axis=1)\n init_logits_BSNV = jnp.zeros(\n shape=(*token_idxs_BSN.shape, self.num_patch_latents)\n )\n noise_level = jnp.array(noise_level)\n if self.use_gt_actions:\n assert self.action_embed is not None\n latent_actions_BT1L = self.action_embed(batch[""actions""]).reshape(\n *batch[""actions""].shape[:2], 1, self.latent_action_dim\n )\n latent_actions_BTm11L = latent_actions_BT1L[:, :-1]\n action_tokens_EL = latent_actions_BTm11L.reshape(-1, self.latent_action_dim)\n else:\n assert self.lam is not None\n latent_actions_E = batch[""latent_actions""]\n action_tokens_EL = self.lam.vq.get_codes(latent_actions_E)\n\n # --- Extract submodule state ---\n dynamics_state = nnx.state(self.dynamics)\n\n @nnx.scan(in_axes=(nnx.Carry, 0), out_axes=nnx.Carry)\n def maskgit_step_fn(\n carry: tuple[jax.Array, jax.Array, jax.Array, jax.Array, jax.Array],\n step: jax.Array,\n ) -> tuple[jax.Array, jax.Array, jax.Array, jax.Array, jax.Array]:\n rng, token_idxs_BSN, logits_BSNV, mask_BSN, action_tokens_EL = carry\n S, N = token_idxs_BSN.shape[1:]\n L = action_tokens_EL.shape[-1]\n\n # We need to reconstruct the submodule inside scan body to prevent trace context mismatches\n dynamics_maskgit = DynamicsMaskGIT(\n model_dim=self.dyna_dim,\n ffn_dim=self.dyna_ffn_dim,\n num_latents=self.num_patch_latents,\n latent_action_dim=self.latent_action_dim,\n num_blocks=self.dyna_num_blocks,\n num_heads=self.dyna_num_heads,\n dropout=self.dropout,\n max_noise_level=self.max_noise_level,\n noise_buckets=self.noise_buckets,\n mask_limit=self.mask_limit,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n use_flash_attention=self.use_flash_attention,\n rngs=nnx.Rngs(0),\n )\n nnx.update(dynamics_maskgit, dynamics_state)\n\n # --- Construct + encode video ---\n vid_embed_BSNM = dynamics_maskgit.patch_embed(token_idxs_BSN)\n mask_token_111M = dynamics_maskgit.mask_token.value\n mask_expanded_BSN1 = mask_BSN[..., None]\n vid_embed_BSNM = jnp.where(\n mask_expanded_BSN1, mask_token_111M, vid_embed_BSNM\n )\n\n # --- Predict transition ---\n action_tokens_BSm1L = jnp.reshape(action_tokens_EL, (B, S - 1, L))\n act_embed_BSm1M = dynamics_maskgit.action_up(action_tokens_BSm1L)\n act_embed_BSM = jnp.pad(act_embed_BSm1M, ((0, 0), (1, 0), (0, 0)))\n act_embed_BS1M = jnp.reshape(\n act_embed_BSM, (B, S, 1, act_embed_BSM.shape[-1])\n )\n # TODO mihir\n\n rng, _rng_noise = jax.random.split(rng)\n noise_level_111 = noise_level.reshape(1, 1, 1)\n noise_level_B11 = jnp.tile(noise_level_111, (B, 1, 1))\n noise_bucket_idx_B11 = jnp.floor(\n (noise_level_B11 / self.max_noise_level) * self.noise_buckets\n ).astype(jnp.int32)\n noise_level_embed_B11M = dynamics_maskgit.noise_level_embed(\n noise_bucket_idx_B11\n )\n noise_level_embed_BS1M = jnp.tile(noise_level_embed_B11M, (1, S, 1, 1))\n vid_embed_BSNM += jnp.expand_dims(noise_level_B11, -1)\n\n vid_embed_BSNp2M = jnp.concatenate(\n [act_embed_BS1M, noise_level_embed_BS1M, vid_embed_BSNM], axis=2\n )\n unmasked_ratio = jnp.cos(jnp.pi * (step + 1) / (steps * 2))\n step_temp = temperature * (1.0 - unmasked_ratio)\n final_logits_BSNp2V = (\n dynamics_maskgit.transformer(vid_embed_BSNp2M) / step_temp\n )\n final_logits_BSNV = final_logits_BSNp2V[:, :, 2:]\n\n # --- Sample new tokens for final frame ---\n if sample_argmax:\n sampled_token_idxs_BSN = jnp.argmax(final_logits_BSNV, axis=-1)\n else:\n rng, _rng = jax.random.split(rng)\n sampled_token_idxs_BSN = jax.random.categorical(_rng, final_logits_BSNV)\n gather_fn = jax.vmap(jax.vmap(jax.vmap(lambda x, y: x[y])))\n final_token_probs_BSN = gather_fn(\n jax.nn.softmax(final_logits_BSNV), sampled_token_idxs_BSN\n )\n final_token_probs_BSN += ~mask_BSN\n # Update masked tokens and logits only\n token_idxs_BSN = jnp.where(mask_BSN, sampled_token_idxs_BSN, token_idxs_BSN)\n logits_BSNV = jnp.where(\n jnp.expand_dims(mask_BSN, -1), final_logits_BSNV, logits_BSNV\n )\n\n # --- Update mask ---\n num_unmasked_tokens = jnp.round(N * (1.0 - unmasked_ratio)).astype(int)\n final_token_probs_flat_BP = einops.rearrange(\n final_token_probs_BSN, ""b s n -> b (s n)""\n )\n idx_mask_P = (\n jnp.arange(final_token_probs_flat_BP.shape[-1])\n <= N - num_unmasked_tokens\n )\n sorted_idxs_BP = jnp.argsort(final_token_probs_flat_BP, axis=-1)\n mask_update_fn = jax.vmap(lambda msk, ids: msk.at[ids].set(idx_mask_P))\n mask_flat_BP = einops.rearrange(mask_BSN, ""b s n -> b (s n)"")\n new_mask_flat_BP = mask_update_fn(mask_flat_BP, sorted_idxs_BP)\n new_mask_BSN = einops.rearrange(new_mask_flat_BP, ""b (s n) -> b s n"", n=N)\n\n new_carry = (\n rng,\n token_idxs_BSN,\n logits_BSNV,\n new_mask_BSN,\n action_tokens_EL,\n )\n return new_carry\n\n @nnx.scan(in_axes=(nnx.Carry, 0), out_axes=nnx.Carry)\n def generation_step_fn(\n carry: tuple[jax.Array, jax.Array, jax.Array], step_t: jax.Array\n ) -> tuple[jax.Array, jax.Array, jax.Array]:\n rng, current_token_idxs_BSN, current_logits_BSNV = carry\n rng, step_rng = jax.random.split(rng)\n\n # Mask current frame (i.e., t == step_t)\n mask_S = jnp.arange(seq_len) == step_t\n mask_BSN = jnp.broadcast_to(mask_S[None, :, None], (B, seq_len, N)).astype(\n bool\n )\n masked_token_idxs_BSN = current_token_idxs_BSN * ~mask_BSN\n masked_logits_BSNV = current_logits_BSNV * jnp.expand_dims(~mask_BSN, -1)\n\n # --- Initialize and run MaskGIT loop ---\n init_carry_maskgit = (\n step_rng,\n masked_token_idxs_BSN,\n masked_logits_BSNV,\n mask_BSN,\n action_tokens_EL,\n )\n final_carry_maskgit = maskgit_step_fn(init_carry_maskgit, jnp.arange(steps))\n updated_token_idxs_BSN = final_carry_maskgit[1]\n updated_logits_BSNV = final_carry_maskgit[2]\n new_carry = (rng, updated_token_idxs_BSN, updated_logits_BSNV)\n return new_carry\n\n # --- Run the autoregressive generation using jax.lax.scan ---\n initial_carry = (batch[""rng""], token_idxs_BSN, init_logits_BSNV)\n timesteps_to_scan = jnp.arange(T, seq_len)\n final_carry = generation_step_fn(initial_carry, timesteps_to_scan)\n final_token_idxs_BSN = final_carry[1]\n final_logits_BSNV = final_carry[2]\n\n # --- Decode all tokens at once at the end ---\n H, W = batch[""videos""].shape[2:4]\n final_frames_BSHWC = self.tokenizer.decode(\n final_token_idxs_BSN,\n video_hw=(H, W),\n )\n return final_frames_BSHWC, final_logits_BSNV\n\n def sample_causal(\n self,\n batch: Dict[str, jax.Array],\n seq_len: int,\n temperature: float = 1,\n sample_argmax: bool = False,\n ) -> tuple[jax.Array, jax.Array]:\n """"""\n Autoregressively samples up to `seq_len` future frames, following Figure 8 of the paper.\n\n - Input frames are tokenized once.\n - Future frames are generated autoregressively in token space.\n - All frames are detokenized in a single pass.\n\n Note:\n - For interactive or step-wise sampling, detokenization should occur after each action.\n - To maintain consistent tensor shapes across timesteps, all current and future frames are decoded at every step.\n - Temporal causal structure is preserved by\n a) reapplying the mask before each decoding step.\n b) a temporal causal mask is applied within each ST-transformer block.\n\n Dimension keys:\n B: batch size\n T: number of input (conditioning) frames\n N: number of patches per frame\n M: model dimension\n S: sequence length\n H: height\n W: width\n E: B * (S - 1)\n """"""\n assert isinstance(self.dynamics, DynamicsCausal)\n # --- Encode videos and actions ---\n videos_BTHWC = batch[""videos""]\n tokenizer_out = self.tokenizer.vq_encode(videos_BTHWC, training=False)\n token_idxs_BTN = tokenizer_out[""indices""]\n B, T, N = token_idxs_BTN.shape\n pad_shape = (B, seq_len - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs_BTN.dtype)\n token_idxs_BSN = jnp.concatenate([token_idxs_BTN, pad], axis=1)\n logits_BSNV = jnp.zeros((*token_idxs_BSN.shape, self.num_patch_latents))\n dynamics_state = nnx.state(self.dynamics)\n\n if self.use_gt_actions:\n assert self.action_embed is not None\n latent_actions_BT1L = self.action_embed(batch[""actions""]).reshape(\n *batch[""actions""].shape[:2], 1, self.latent_action_dim\n )\n latent_actions_BTm11L = latent_actions_BT1L[:, :-1]\n action_tokens_EL = latent_actions_BTm11L.reshape(-1, self.latent_action_dim)\n else:\n assert self.lam is not None\n latent_actions_E = batch[""latent_actions""]\n action_tokens_EL = self.lam.vq.get_codes(latent_actions_E)\n\n @nnx.scan(in_axes=(nnx.Carry, 0), out_axes=nnx.Carry)\n def causal_step_fn(\n carry: tuple[jax.Array, jax.Array, jax.Array, jax.Array, jax.Array],\n step_n: jax.Array,\n ) -> tuple[jax.Array, jax.Array, jax.Array, jax.Array, jax.Array]:\n rng, token_idxs_BSN, logits_BSNV, action_tokens_EL, step_t = carry\n S, N = token_idxs_BSN.shape[1:]\n L = action_tokens_EL.shape[-1]\n\n # We need to reconstruct the submodule inside scan body to prevent trace context mismatches\n dynamics_causal = DynamicsCausal(\n model_dim=self.dyna_dim,\n ffn_dim=self.dyna_ffn_dim,\n num_latents=self.num_patch_latents,\n latent_action_dim=self.latent_action_dim,\n num_blocks=self.dyna_num_blocks,\n num_heads=self.dyna_num_heads,\n dropout=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n use_flash_attention=self.use_flash_attention,\n decode=self.decode,\n rngs=nnx.Rngs(0),\n )\n nnx.update(dynamics_causal, dynamics_state)\n\n # --- Construct + encode video ---\n vid_embed_BSNM = dynamics_causal.patch_embed(token_idxs_BSN)\n\n # --- Predict transition ---\n action_tokens_BSm1L = jnp.reshape(action_tokens_EL, (B, S - 1, L))\n act_embed_BSm1M = dynamics_causal.action_up(action_tokens_BSm1L)\n act_embed_BSM = jnp.pad(act_embed_BSm1M, ((0, 0), (1, 0), (0, 0)))\n act_embed_BS1M = jnp.reshape(\n act_embed_BSM, (B, S, 1, act_embed_BSM.shape[-1])\n )\n vid_embed_BSNp1M = jnp.concatenate([act_embed_BS1M, vid_embed_BSNM], axis=2)\n final_logits_BTNp1V = (\n dynamics_causal.transformer(vid_embed_BSNp1M, (step_t, step_n))\n / temperature\n )\n final_logits_BV = final_logits_BTNp1V[:, step_t, step_n, :]\n\n # --- Sample new tokens for final frame ---\n if sample_argmax:\n sampled_token_idxs_B = jnp.argmax(final_logits_BV, axis=-1)\n else:\n rng, _rng = jax.random.split(rng)\n sampled_token_idxs_B = jax.random.categorical(_rng, final_logits_BV)\n # Update next tokens only\n token_idxs_BSN = token_idxs_BSN.at[:, step_t, step_n].set(\n sampled_token_idxs_B\n )\n logits_BSNV = logits_BSNV.at[:, step_t, step_n].set(final_logits_BV)\n\n new_carry = (rng, token_idxs_BSN, logits_BSNV, action_tokens_EL, step_t)\n return new_carry\n\n @nnx.scan(in_axes=(nnx.Carry, 0), out_axes=nnx.Carry)\n def generation_step_fn(\n carry: tuple[jax.Array, jax.Array, jax.Array], step_t: jax.Array\n ) -> tuple[jax.Array, jax.Array, jax.Array]:\n rng, current_token_idxs_BSN, current_logits_BSNV = carry\n rng, step_rng = jax.random.split(rng)\n\n # --- Initialize and run causal loop ---\n init_carry_causal = (\n step_rng,\n current_token_idxs_BSN,\n current_logits_BSNV,\n action_tokens_EL,\n step_t,\n )\n final_carry_causal = causal_step_fn(init_carry_causal, jnp.arange(N))\n updated_token_idxs_BSN = final_carry_causal[1]\n updated_logits_BSNV = final_carry_causal[2]\n new_carry = (rng, updated_token_idxs_BSN, updated_logits_BSNV)\n return new_carry\n\n # --- Run the autoregressive generation using jax.lax.scan ---\n initial_carry = (batch[""rng""], token_idxs_BSN, logits_BSNV)\n timesteps_to_scan = jnp.arange(T, seq_len)\n final_carry = generation_step_fn(initial_carry, timesteps_to_scan)\n final_token_idxs_BSN = final_carry[1]\n final_logits_BSNV = final_carry[2]\n\n # --- Decode all tokens at once at the end ---\n H, W = batch[""videos""].shape[2:4]\n final_frames_BSHWC = self.tokenizer.decode(\n final_token_idxs_BSN,\n video_hw=(H, W),\n )\n return final_frames_BSHWC, final_logits_BSNV\n\n def vq_encode(self, batch: Dict[str, jax.Array], training: bool) -> jax.Array:\n # --- Preprocess videos ---\n assert self.lam is not None\n video_BTHWC = batch[""videos""]\n lam_output: Dict[str, jax.Array] = self.lam.vq_encode(\n video_BTHWC, training=training\n )\n lam_indices_E = lam_output[""indices""]\n return lam_indices_E\n\n\n# FIXME (f.srambical): add conversion script for old checkpoints\ndef restore_genie_components(\n optimizer: nnx.ModelAndOptimizer,\n sharding: jax.sharding.NamedSharding,\n rng: jax.Array,\n args,\n) -> nnx.ModelAndOptimizer:\n """"""Restore pre-trained Genie components""""""\n rng_tokenizer, rng_lam = jax.random.split(rng)\n rngs_tokenizer = nnx.Rngs(rng_tokenizer)\n rngs_lam = nnx.Rngs(rng_lam)\n\n tx = optimizer.tx\n model = optimizer.model\n handler_registry = ocp.handlers.DefaultCheckpointHandlerRegistry()\n handler_registry.add(\n ""model_state"", ocp.args.PyTreeRestore, ocp.handlers.PyTreeCheckpointHandler\n )\n\n checkpoint_options = ocp.CheckpointManagerOptions(\n step_format_fixed_length=6,\n )\n tokenizer_checkpoint_manager = ocp.CheckpointManager(\n directory=args.tokenizer_checkpoint,\n options=checkpoint_options,\n handler_registry=handler_registry,\n )\n dummy_tokenizer = TokenizerVQVAE(\n in_dim=args.image_channels,\n model_dim=args.tokenizer_dim,\n ffn_dim=args.tokenizer_ffn_dim,\n latent_dim=args.latent_patch_dim,\n num_latents=args.num_patch_latents,\n patch_size=args.patch_size,\n num_blocks=args.tokenizer_num_blocks,\n num_heads=args.tokenizer_num_heads,\n dropout=args.dropout,\n codebook_dropout=args.dropout,\n param_dtype=args.param_dtype,\n dtype=args.dtype,\n use_flash_attention=args.use_flash_attention,\n rngs=rngs_tokenizer,\n )\n dummy_tokenizer_optimizer = nnx.ModelAndOptimizer(dummy_tokenizer, tx)\n dummy_tokenizer_optimizer_state = nnx.state(dummy_tokenizer_optimizer)\n abstract_sharded_tokenizer_optimizer_state = _create_abstract_sharded_pytree(\n dummy_tokenizer_optimizer_state, sharding\n )\n restored_tokenizer = tokenizer_checkpoint_manager.restore(\n step=tokenizer_checkpoint_manager.latest_step(),\n args=ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore( # type: ignore\n abstract_sharded_tokenizer_optimizer_state # type: ignore\n ),\n ),\n )[""model_state""]\n nnx.update(dummy_tokenizer_optimizer.model, restored_tokenizer.model)\n model.tokenizer = dummy_tokenizer_optimizer.model\n tokenizer_checkpoint_manager.close()\n\n if args.lam_checkpoint:\n lam_checkpoint_manager = ocp.CheckpointManager(\n directory=args.lam_checkpoint,\n options=checkpoint_options,\n handler_registry=handler_registry,\n )\n dummy_lam = LatentActionModel(\n in_dim=args.image_channels,\n model_dim=args.lam_dim,\n ffn_dim=args.lam_ffn_dim,\n latent_dim=args.latent_patch_dim,\n num_latents=args.num_actions,\n patch_size=args.lam_patch_size,\n num_blocks=args.lam_num_blocks,\n num_heads=args.lam_num_heads,\n dropout=args.dropout,\n codebook_dropout=args.dropout,\n param_dtype=args.param_dtype,\n dtype=args.dtype,\n use_flash_attention=args.use_flash_attention,\n rngs=rngs_lam,\n )\n dummy_lam_optimizer = nnx.ModelAndOptimizer(dummy_lam, tx)\n dummy_lam_optimizer_state = nnx.state(dummy_lam_optimizer)\n abstract_sharded_lam_optimizer_state = _create_abstract_sharded_pytree(\n dummy_lam_optimizer_state, sharding\n )\n restored_lam_optimizer = lam_checkpoint_manager.restore(\n step=lam_checkpoint_manager.latest_step(),\n args=ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore( # type: ignore\n abstract_sharded_lam_optimizer_state # type: ignore\n ),\n ),\n )[""model_state""]\n nnx.update(dummy_lam_optimizer.model, restored_lam_optimizer.model)\n model.lam = dummy_lam_optimizer.model\n # Remove the LAM decoder to save memory and avoid unnecessary computation.\n del model.lam.decoder\n lam_checkpoint_manager.close()\n\n # Reinitialize the optimizer states\n optimizer = nnx.ModelAndOptimizer(model, tx)\n return optimizer\n\n\ndef _create_abstract_sharded_pytree(\n pytree_template: nnx.GraphState, sharding_spec: jax.sharding.NamedSharding\n) -> jax.Array:\n """"""Replaces arrays in a pytree with ShapeDtypeStructs having the given sharding.""""""\n\n def map_fn(leaf_template):\n if hasattr(leaf_template, ""shape"") and hasattr(leaf_template, ""dtype""):\n return jax.ShapeDtypeStruct(\n leaf_template.shape, leaf_template.dtype, sharding=sharding_spec\n )\n return leaf_template\n\n return jax.tree_util.tree_map(map_fn, pytree_template)\n",python,tab +185,457564,"jasmine/genie.py",7548,0,"",python,selection_command +186,460084,"jasmine/genie.py",7881,0,"",python,selection_mouse +187,460257,"jasmine/genie.py",7881,1,"0",python,selection_mouse +188,460258,"jasmine/genie.py",7881,2,"0.",python,selection_mouse +189,460304,"jasmine/genie.py",7881,3,"0.0",python,selection_mouse +190,460682,"jasmine/genie.py",7884,0,"",python,selection_mouse +191,461284,"jasmine/genie.py",7881,0,"",python,selection_mouse +192,461418,"jasmine/genie.py",7881,1,"0",python,selection_mouse +193,461475,"jasmine/genie.py",7881,2,"0.",python,selection_mouse +194,461475,"jasmine/genie.py",7881,3,"0.0",python,selection_mouse +195,461861,"jasmine/genie.py",7884,0,"",python,selection_mouse +196,468902,"jasmine/genie.py",7883,1,"",python,content +197,469934,"jasmine/genie.py",7883,0,"3",python,content +198,469935,"jasmine/genie.py",7884,0,"",python,selection_keyboard +199,471302,"TERMINAL",0,0,"sh slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",,terminal_output +200,472807,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +201,472919,"TERMINAL",0,0,"Sampling from checkpoint: /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn-gt-actions/train_dyn_default_gt_actions_breakout_longer/3519698\r\n",,terminal_output +202,473043,"TERMINAL",0,0,"GpuFreq=control_disabled\r\n",,terminal_output +203,481577,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib64/python3.12/site-packages/orbax/checkpoint/_src/serialization/type_handlers.py:1269: UserWarning: Sharding info not provided when restoring. Populating sharding info from sharding file. Please note restoration time will be slightly increased due to reading from file. Note also that this option is unsafe when restoring on a different topology than the checkpoint was saved with.\r\n warnings.warn(\r\n",,terminal_output +204,511471,"TERMINAL",0,0,"Per-frame SSIM:\r\n [0.30596483 0.414882 0.5163348 0.51084375 0.5155197 0.5155652\r\n 0.51382315 0.4914374 0.45811233 0.4443463 0.44947392 0.4618884 ]\r\nPer-frame PSNR:\r\n [18.745987 18.47467 18.217417 18.06475 18.032337 17.890877 17.707855\r\n 17.542704 17.384125 17.292942 17.300459 17.316257]\r\nSSIM: 0.4665159583091736\r\nPSNR: 17.83086585998535\r\n",,terminal_output +205,511990,"TERMINAL",0,0,"W0925 13:58:29.386010 2007888 pjrt_client.cc:1469] WatchJobStateAsync failed for task goo.gle/debugstr job_name: ""jax_worker"": CANCELLED: CANCELLED\r\nAdditional GRPC error information from remote target coordination_service while calling /tensorflow.CoordinationService/WatchJobState:\r\n:UNKNOWN:Error received from peer {grpc_message:""CANCELLED"", grpc_status:1} [type.googleapis.com/tensorflow.DerivedStatus='']\r\n",,terminal_output +206,512521,"TERMINAL",0,0,"]0;tum_cte0515@hkn0801:~/Projects/jasmine[?2004h(jasmine) [tum_cte0515@hkn0801 jasmine]$ ",,terminal_output +207,541605,"jasmine/genie.py",0,0,"",python,tab +208,541607,"jasmine/genie.py",7883,0,"",python,selection_mouse +209,542863,"jasmine/genie.py",7883,1,"",python,content +210,542996,"jasmine/genie.py",7883,0,"7",python,content +211,542998,"jasmine/genie.py",7884,0,"",python,selection_keyboard +212,545050,"TERMINAL",0,0,"sh slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",,terminal_output +213,545231,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +214,545329,"TERMINAL",0,0,"Sampling from checkpoint: /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn-gt-actions/train_dyn_default_gt_actions_breakout_longer/3519698\r\n",,terminal_output +215,545463,"TERMINAL",0,0,"GpuFreq=control_disabled\r\n",,terminal_output +216,548159,"jasmine/genie.py",0,0,"",python,tab +217,548160,"jasmine/genie.py",7841,0,"",python,selection_mouse +218,548757,"jasmine/genie.py",8167,0,"",python,selection_command +219,553802,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib64/python3.12/site-packages/orbax/checkpoint/_src/serialization/type_handlers.py:1269: UserWarning: Sharding info not provided when restoring. Populating sharding info from sharding file. Please note restoration time will be slightly increased due to reading from file. Note also that this option is unsafe when restoring on a different topology than the checkpoint was saved with.\r\n warnings.warn(\r\n",,terminal_output +220,555890,"jasmine/genie.py",8269,0,"",python,selection_mouse +221,556022,"jasmine/genie.py",8264,11,"noise_level",python,selection_mouse +222,583516,"TERMINAL",0,0,"Per-frame SSIM:\r\n [0.30596483 0.414882 0.5163348 0.51084375 0.5155197 0.5155652\r\n 0.51382315 0.4914374 0.45811233 0.4443463 0.44947392 0.4618884 ]\r\nPer-frame PSNR:\r\n [18.745987 18.47467 18.217417 18.06475 18.032337 17.890877 17.707855\r\n 17.542704 17.384125 17.292942 17.300459 17.316257]\r\nSSIM: 0.4665159583091736\r\nPSNR: 17.83086585998535\r\n",,terminal_output +223,584098,"TERMINAL",0,0,"W0925 13:59:41.451424 2009277 pjrt_client.cc:1469] WatchJobStateAsync failed for task goo.gle/debugproto job_name: ""jax_worker"": UNAVAILABLE: failed to connect to all addresses; last error: UNKNOWN: ipv4:10.0.1.177:63542: Failed to connect to remote host: Connection refused\r\nAdditional GRPC error information from remote target coordination_service while calling /tensorflow.CoordinationService/WatchJobState:\r\n:UNKNOWN:Error received from peer {grpc_message:""failed to connect to all addresses; last error: UNKNOWN: ipv4:10.0.1.177:63542: Failed to connect to remote host: Connection refused"", grpc_status:14}\r\n",,terminal_output +224,584621,"TERMINAL",0,0,"]0;tum_cte0515@hkn0801:~/Projects/jasmine[?2004h(jasmine) [tum_cte0515@hkn0801 jasmine]$ ",,terminal_output +225,759719,"jasmine/genie.py",13893,0,"",python,selection_mouse +226,814183,"jasmine/genie.py",0,0,"",python,tab +227,814184,"jasmine/genie.py",13869,0,"",python,selection_mouse +228,823620,"jasmine/genie.py",14116,0,"",python,selection_mouse +229,893246,"TERMINAL",0,0,"q",,terminal_output +230,893311,"TERMINAL",0,0,"u",,terminal_output +231,893434,"TERMINAL",0,0,"e",,terminal_output +232,893487,"TERMINAL",0,0,"u",,terminal_output +233,893627,"TERMINAL",0,0,"e",,terminal_output +234,893813,"TERMINAL",0,0,"\r\n[?2004l\r[?1049h(B[?7hEvery 1.0s: squeue --mehkn0801.localdomain: Thu Sep 25 14:04:51 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3520566 accelerat interact tum_cte0 R14:28\t 1 hkn0801",,terminal_output +235,894871,"TERMINAL",0,0,"29",,terminal_output +236,894980,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn0801:~/Projects/jasmine[?2004h(jasmine) [tum_cte0515@hkn0801 jasmine]$ ",,terminal_output +237,900148,"jasmine/genie.py",0,0,"",python,tab +238,900149,"jasmine/genie.py",14080,0,"",python,selection_mouse +239,900647,"jasmine/genie.py",14079,0,"",python,selection_command +240,900965,"jasmine/genie.py",14115,0,"",python,selection_command +241,901139,"jasmine/genie.py",14165,0,"",python,selection_command +242,901286,"jasmine/genie.py",14227,0,"",python,selection_command +243,901585,"jasmine/genie.py",14316,0,"",python,selection_command +244,901908,"jasmine/genie.py",14373,0,"",python,selection_command +245,902094,"jasmine/genie.py",14435,0,"",python,selection_command +246,902232,"jasmine/genie.py",14461,0,"",python,selection_command +247,902395,"jasmine/genie.py",14508,0,"",python,selection_command +248,902561,"jasmine/genie.py",14559,0,"",python,selection_command +249,902711,"jasmine/genie.py",14621,0,"",python,selection_command +250,902867,"jasmine/genie.py",14685,0,"",python,selection_command +251,903018,"jasmine/genie.py",14747,0,"",python,selection_command +252,903161,"jasmine/genie.py",14777,0,"",python,selection_command +253,903344,"jasmine/genie.py",14779,0,"",python,selection_command +254,903485,"jasmine/genie.py",14812,0,"",python,selection_command +255,903626,"jasmine/genie.py",14874,0,"",python,selection_command +256,903785,"jasmine/genie.py",14954,0,"",python,selection_command +257,904314,"jasmine/genie.py",15012,0,"",python,selection_command +258,904340,"jasmine/genie.py",15026,0,"",python,selection_command +259,904414,"jasmine/genie.py",15053,0,"",python,selection_command +260,904645,"jasmine/genie.py",15115,0,"",python,selection_command +261,904840,"jasmine/genie.py",15160,0,"",python,selection_command +262,905342,"jasmine/genie.py",15174,0,"",python,selection_command +263,905374,"jasmine/genie.py",15236,0,"",python,selection_command +264,905440,"jasmine/genie.py",15313,0,"",python,selection_command +265,905488,"jasmine/genie.py",15397,0,"",python,selection_command +266,905520,"jasmine/genie.py",15471,0,"",python,selection_command +267,905521,"jasmine/genie.py",15547,0,"",python,selection_command diff --git a/927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-6791460b-ec38-4da2-872f-193943c12d601753274780799-2025_07_23-17.17.23.114/source.csv b/927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-6791460b-ec38-4da2-872f-193943c12d601753274780799-2025_07_23-17.17.23.114/source.csv new file mode 100644 index 0000000000000000000000000000000000000000..4c806d89e18558b5c6af501da0301b1881c01a64 --- /dev/null +++ b/927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-6791460b-ec38-4da2-872f-193943c12d601753274780799-2025_07_23-17.17.23.114/source.csv @@ -0,0 +1,1087 @@ +Sequence,Time,File,RangeOffset,RangeLength,Text,Language,Type +1,6,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes_dev.sbatch",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=1\n#SBATCH --time=00:15:00\n#SBATCH --partition=dev_accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:1\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/maskgit-maskprob-fix/dynamics-cotraining/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/maskgit-maskprob-fix/dynamics-cotraining/%x_%j.log\n#SBATCH --job-name=train_dynamics_maskprob_fix_8_node\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_new/open_ai_minecraft_arrayrecords_chunked\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/maskgit-maskprob-fix/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\n# tokenizer with the new structure supporting larger ffn_dim\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/checkpoints/train_tokenizer_lr_sweep_1e-4_larger_ffn/\n\nenv | grep SLURM\n\nsrun python train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=12 \\n --init_lr=0 \\n --max_lr=8e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=dynamics-maskprob-fix-dev-$slurm_job_id \\n --tags dynamics maskprob-fix \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir \\n --dyna_dim=768 \\n --dyna_num_blocks=16 \\n --dyna_num_heads=12\n",shellscript,tab +2,1047,"TERMINAL",0,0,"ls",,terminal_command +3,1065,"TERMINAL",0,0,"]633;E;2025-07-23 17:17:24 ls;3d19b9bf-55ca-4a1a-89a2-d3473be0b612]633;C0000 3290392 3292329 3292337 3296540 3297577 3297727 3299258 3300672 3307618 3311672 3316022 interactive train_dynamics_lr_schedule_const train_tokenizer_minecraft_overfit_sample\r\n3290283 3290439 3292330 3292338 3296571 3297578 3299016 3299259 3301025 3307619 3313562 big-runs lam train_dynamics_lr_schedule_cos wrap\r\n3290284 3290440 3292331 3292339 3296573 3297582 3299062 3299272 3301026 3309662 3313563 causal lam-1-action train_dynamics_lr_schedule_wsd\r\n3290295 3291405 3292332 3294600 3296574 3297586 3299063 3299579 3301027 3309663 3313564 checkpoints_alfred lam_ckpt_dir train_dyn_new_arch-bugfixed-spatial-shift\r\n3290296 3292213 3292333 3294601 3296575 3297606 3299065 3300233 3301029 3309699 3313565 coinrun lam_main_test train_dyn_new_arch-bugfixed-temporal-shift\r\n3290366 3292221 3292334 3294602 3297569 3297671 3299066 3300290 3301030 3310436 3313570 debug maskgit-maskprob-fix train_dyn_yolorun_new_arch\r\n3290367 3292258 3292335 3294603 3297575 3297693 3299068 3300658 3301031 3310437 3313571 dyn tokenizer train_lam_minecraft_overfit_sample\r\n3290391 3292328 3292336 3296502 3297576 3297706 3299069 3300663 3306801 3311671 3313572 dynamics_ckpt_dir tokenizer_ckpt_dir train_tokenizer_batch_size_scaling_16_node\r\n]0;tum_cte0515@hkn1990:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints]633;D;0",,terminal_output +4,3195,"TERMINAL",0,0,"cd maskgit-maskprob-fix/",,terminal_command +5,3523,"TERMINAL",0,0,"l",,terminal_command +6,4098,"TERMINAL",0,0,"ls",,terminal_command +7,4137,"TERMINAL",0,0,"]633;E;2025-07-23 17:17:27 ls;3d19b9bf-55ca-4a1a-89a2-d3473be0b612]633;Cinteractive train_dynamics_maskprob_fix_8_node\r\n]0;tum_cte0515@hkn1990:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/maskgit-maskprob-fix]633;D;0",,terminal_output +8,6310,"TERMINAL",0,0,"cd interactive/",,terminal_command +9,6699,"TERMINAL",0,0,"ls",,terminal_command +10,6726,"TERMINAL",0,0,"]633;E;2025-07-23 17:17:29 ls;3d19b9bf-55ca-4a1a-89a2-d3473be0b612]633;C3371005\r\n]0;tum_cte0515@hkn1990:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/maskgit-maskprob-fix/interactive]633;D;0",,terminal_output +11,8537,"TERMINAL",0,0,"rm -rf 3371005/",,terminal_command +12,8552,"TERMINAL",0,0,"]633;E;2025-07-23 17:17:31 rm -rf 3371005/;3d19b9bf-55ca-4a1a-89a2-d3473be0b612]633;C]0;tum_cte0515@hkn1990:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/maskgit-maskprob-fix/interactive]633;D;0",,terminal_output +13,9562,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes_dev.sbatch",1369,0,"",shellscript,selection_mouse +14,9565,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes_dev.sbatch",1368,0,"",shellscript,selection_command +15,10343,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes_dev.sbatch",1397,0,"",shellscript,selection_mouse +16,10985,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes_dev.sbatch",1397,1,"",shellscript,content +17,11679,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes_dev.sbatch",1397,1,"",shellscript,content +18,11878,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes_dev.sbatch",1410,0,"",shellscript,selection_command +19,12207,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes_dev.sbatch",1439,0,"",shellscript,selection_command +20,13321,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes_dev.sbatch",1397,0,"0",shellscript,content +21,13326,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes_dev.sbatch",1397,0,"",shellscript,selection_command +22,14052,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes_dev.sbatch",1398,0,"0",shellscript,content +23,14442,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes_dev.sbatch",1412,0,"",shellscript,selection_command +24,14869,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes_dev.sbatch",1425,0,"",shellscript,selection_command +25,15044,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes_dev.sbatch",1426,0,"",shellscript,selection_command +26,15529,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes_dev.sbatch",1427,0,"",shellscript,selection_command +27,15571,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes_dev.sbatch",1428,0,"",shellscript,selection_command +28,15629,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes_dev.sbatch",1429,0,"",shellscript,selection_command +29,15630,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes_dev.sbatch",1430,0,"",shellscript,selection_command +30,15669,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes_dev.sbatch",1431,0,"",shellscript,selection_command +31,15690,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes_dev.sbatch",1432,0,"",shellscript,selection_command +32,15748,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes_dev.sbatch",1433,0,"",shellscript,selection_command +33,15748,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes_dev.sbatch",1434,0,"",shellscript,selection_command +34,15787,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes_dev.sbatch",1435,0,"",shellscript,selection_command +35,15807,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes_dev.sbatch",1436,0,"",shellscript,selection_command +36,15869,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes_dev.sbatch",1437,0,"",shellscript,selection_command +37,15869,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes_dev.sbatch",1438,0,"",shellscript,selection_command +38,15942,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes_dev.sbatch",1439,0,"",shellscript,selection_command +39,15944,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes_dev.sbatch",1440,0,"",shellscript,selection_command +40,15960,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes_dev.sbatch",1441,0,"",shellscript,selection_command +41,15989,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes_dev.sbatch",1442,0,"",shellscript,selection_command +42,16021,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes_dev.sbatch",1443,0,"",shellscript,selection_command +43,16155,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes_dev.sbatch",1444,0,"",shellscript,selection_command +44,16327,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes_dev.sbatch",1445,0,"",shellscript,selection_command +45,16459,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes_dev.sbatch",1446,0,"",shellscript,selection_command +46,17141,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes_dev.sbatch",1446,1,"",shellscript,content +47,17315,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes_dev.sbatch",1446,1,"",shellscript,content +48,18653,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes_dev.sbatch",1562,0,"",shellscript,selection_mouse +49,18654,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes_dev.sbatch",1561,0,"",shellscript,selection_command +50,27901,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes_dev.sbatch",1446,0,"",shellscript,selection_mouse +51,28928,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes_dev.sbatch",1445,0,"",shellscript,selection_mouse +52,29070,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes_dev.sbatch",1444,2,"10",shellscript,selection_mouse +53,29814,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes_dev.sbatch",1444,0,"",shellscript,selection_mouse +54,29815,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes_dev.sbatch",1444,2,"10",shellscript,selection_mouse +55,30830,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes_dev.sbatch",1444,0,"",shellscript,selection_mouse +56,31200,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes_dev.sbatch",1437,0,"",shellscript,selection_mouse +57,31368,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes_dev.sbatch",1420,23,"log_checkpoint_interval",shellscript,selection_mouse +58,32226,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes_dev.sbatch",1445,0,"",shellscript,selection_mouse +59,32364,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes_dev.sbatch",1444,2,"10",shellscript,selection_mouse +60,33707,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes_dev.sbatch",1467,0,"",shellscript,selection_mouse +61,33844,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes_dev.sbatch",1460,8,"dynamics",shellscript,selection_mouse +62,34405,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes_dev.sbatch",1477,0,"",shellscript,selection_mouse +63,34567,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes_dev.sbatch",1469,8,"maskprob",shellscript,selection_mouse +64,35090,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes_dev.sbatch",1481,0,"",shellscript,selection_mouse +65,35196,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes_dev.sbatch",1478,3,"fix",shellscript,selection_mouse +66,35735,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes_dev.sbatch",1436,0,"",shellscript,selection_mouse +67,35872,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes_dev.sbatch",1420,23,"log_checkpoint_interval",shellscript,selection_mouse +68,36538,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes_dev.sbatch",1445,0,"",shellscript,selection_mouse +69,36687,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes_dev.sbatch",1444,2,"10",shellscript,selection_mouse +70,53238,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes_dev.sbatch",1413,0,"",shellscript,selection_mouse +71,53239,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes_dev.sbatch",1412,0,"",shellscript,selection_command +72,59293,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes_dev.sbatch",955,0,"",shellscript,selection_mouse +73,59294,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes_dev.sbatch",954,0,"",shellscript,selection_command +74,59899,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes_dev.sbatch",1413,0,"",shellscript,selection_mouse +75,59901,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes_dev.sbatch",1412,0,"",shellscript,selection_command +76,60366,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes_dev.sbatch",1424,0,"",shellscript,selection_command +77,60538,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes_dev.sbatch",1459,0,"",shellscript,selection_command +78,60702,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes_dev.sbatch",1512,0,"",shellscript,selection_command +79,60843,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes_dev.sbatch",1547,0,"",shellscript,selection_command +80,60984,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes_dev.sbatch",1573,0,"",shellscript,selection_command +81,61101,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes_dev.sbatch",1595,0,"",shellscript,selection_command +82,82617,"slurm/jobs/mihir/horeka/modelsize_scaling/dynamics/3_train_dyn_180M.sbatch",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=12\n#SBATCH --ntasks-per-node=4\n#SBATCH --time=48:00:00\n#SBATCH --partition=accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:4\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/big-runs/dynamics-cotraining-modelsize-scaling/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/big-runs/dynamics-cotraining-modelsize-scaling/%x_%j.log\n#SBATCH --job-name=train_dynamics_modelsize_scaling_180M_12_node\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_new/open_ai_minecraft_arrayrecords_chunked\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/dynamics-cotraining-modelsize-scaling/$job_name\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/train_tokenizer_batch_size_scaling_16_node/3321526/tokenizer_22000/\n\nenv | grep SLURM\n\nsrun python train_dynamics.py \\n --save_ckpt \\n --restore_ckpt \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=96 \\n --min_lr=0 \\n --max_lr=1.5e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=dynamics-modelsize-scaling-180M-$slurm_job_id \\n --tags dynamics modelsize-scaling 180M \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir \\n --dyna_dim=1024 \\n --dyna_num_blocks=16 \\n --dyna_num_heads=16\n",shellscript,tab +83,84301,"slurm/jobs/mihir/horeka/modelsize_scaling/dynamics/3_train_dyn_180M.sbatch",1719,0,"",shellscript,selection_mouse +84,84431,"slurm/jobs/mihir/horeka/modelsize_scaling/dynamics/3_train_dyn_180M.sbatch",1718,1,"\n",shellscript,selection_mouse +85,84446,"slurm/jobs/mihir/horeka/modelsize_scaling/dynamics/3_train_dyn_180M.sbatch",1715,4,"=16\n",shellscript,selection_mouse +86,84468,"slurm/jobs/mihir/horeka/modelsize_scaling/dynamics/3_train_dyn_180M.sbatch",1658,61,"im=1024 \\n --dyna_num_blocks=16 \\n --dyna_num_heads=16\n",shellscript,selection_mouse +87,84523,"slurm/jobs/mihir/horeka/modelsize_scaling/dynamics/3_train_dyn_180M.sbatch",1617,102,"ata_dir $array_records_dir \\n --dyna_dim=1024 \\n --dyna_num_blocks=16 \\n --dyna_num_heads=16\n",shellscript,selection_mouse +88,84524,"slurm/jobs/mihir/horeka/modelsize_scaling/dynamics/3_train_dyn_180M.sbatch",1615,104,"-data_dir $array_records_dir \\n --dyna_dim=1024 \\n --dyna_num_blocks=16 \\n --dyna_num_heads=16\n",shellscript,selection_mouse +89,84524,"slurm/jobs/mihir/horeka/modelsize_scaling/dynamics/3_train_dyn_180M.sbatch",1614,105,"--data_dir $array_records_dir \\n --dyna_dim=1024 \\n --dyna_num_blocks=16 \\n --dyna_num_heads=16\n",shellscript,selection_mouse +90,84542,"slurm/jobs/mihir/horeka/modelsize_scaling/dynamics/3_train_dyn_180M.sbatch",1613,106," --data_dir $array_records_dir \\n --dyna_dim=1024 \\n --dyna_num_blocks=16 \\n --dyna_num_heads=16\n",shellscript,selection_mouse +91,84617,"slurm/jobs/mihir/horeka/modelsize_scaling/dynamics/3_train_dyn_180M.sbatch",1612,107," --data_dir $array_records_dir \\n --dyna_dim=1024 \\n --dyna_num_blocks=16 \\n --dyna_num_heads=16\n",shellscript,selection_mouse +92,84704,"slurm/jobs/mihir/horeka/modelsize_scaling/dynamics/3_train_dyn_180M.sbatch",1611,108," --data_dir $array_records_dir \\n --dyna_dim=1024 \\n --dyna_num_blocks=16 \\n --dyna_num_heads=16\n",shellscript,selection_mouse +93,84761,"slurm/jobs/mihir/horeka/modelsize_scaling/dynamics/3_train_dyn_180M.sbatch",1647,72," --dyna_dim=1024 \\n --dyna_num_blocks=16 \\n --dyna_num_heads=16\n",shellscript,selection_mouse +94,84791,"slurm/jobs/mihir/horeka/modelsize_scaling/dynamics/3_train_dyn_180M.sbatch",1646,73," --dyna_dim=1024 \\n --dyna_num_blocks=16 \\n --dyna_num_heads=16\n",shellscript,selection_mouse +95,84851,"slurm/jobs/mihir/horeka/modelsize_scaling/dynamics/3_train_dyn_180M.sbatch",1668,51," --dyna_num_blocks=16 \\n --dyna_num_heads=16\n",shellscript,selection_mouse +96,85215,"slurm/jobs/mihir/horeka/modelsize_scaling/dynamics/3_train_dyn_180M.sbatch",1646,73," --dyna_dim=1024 \\n --dyna_num_blocks=16 \\n --dyna_num_heads=16\n",shellscript,selection_mouse +97,87784,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes_dev.sbatch",0,0,"",shellscript,tab +98,89664,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes_dev.sbatch",1741,0,"",shellscript,selection_mouse +99,89666,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes_dev.sbatch",1740,0,"",shellscript,selection_command +100,89815,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes_dev.sbatch",1740,1,"2",shellscript,selection_mouse +101,89825,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes_dev.sbatch",1741,0,"",shellscript,selection_command +102,89858,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes_dev.sbatch",1737,4,"s=12",shellscript,selection_mouse +103,89877,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes_dev.sbatch",1703,38,"um_blocks=16 \\n --dyna_num_heads=12",shellscript,selection_mouse +104,89898,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes_dev.sbatch",1699,42,"na_num_blocks=16 \\n --dyna_num_heads=12",shellscript,selection_mouse +105,89912,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes_dev.sbatch",1695,46,"--dyna_num_blocks=16 \\n --dyna_num_heads=12",shellscript,selection_mouse +106,89928,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes_dev.sbatch",1692,49," --dyna_num_blocks=16 \\n --dyna_num_heads=12",shellscript,selection_mouse +107,89944,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes_dev.sbatch",1691,50," --dyna_num_blocks=16 \\n --dyna_num_heads=12",shellscript,selection_mouse +108,90231,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes_dev.sbatch",1670,71," --dyna_dim=768 \\n --dyna_num_blocks=16 \\n --dyna_num_heads=12",shellscript,selection_mouse +109,90928,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes_dev.sbatch",1670,71,"",shellscript,content +110,91417,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes_dev.sbatch",1670,0," --dyna_dim=1024 \\n --dyna_num_blocks=16 \\n --dyna_num_heads=16\n",shellscript,content +111,94360,"slurm/jobs/mihir/horeka/modelsize_scaling/dynamics/3_train_dyn_180M.sbatch",0,0,"",shellscript,tab +112,95469,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes_dev.sbatch",0,0,"",shellscript,tab +113,96888,"TERMINAL",0,0,"srun",,terminal_focus +114,97959,"TERMINAL",0,0,"sh slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes_dev.sbatch",,terminal_output +115,98829,"TERMINAL",0,0,"\r\n[?2004l\r#!/usr/bin/env bash\r\n\r\n#SBATCH --nodes=1\r\n#SBATCH --ntasks-per-node=1\r\n#SBATCH --time=00:15:00\r\n#SBATCH --partition=dev_accelerated\r\n#SBATCH --cpus-per-task=5\r\n#SBATCH --gres=gpu:1\r\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/maskgit-maskprob-fix/dynamics-cotraining/%x_%j.log\r\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/maskgit-maskprob-fix/dynamics-cotraining/%x_%j.log\r\n#SBATCH --job-name=train_dynamics_maskprob_fix_8_node\r\n\r\n# Log the sbatch script\r\ncat $0\r\n\r\nmodule unload mpi/openmpi/5.0\r\nmodule unload devel/cuda/12.4\r\nsource .venv/bin/activate\r\n\r\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_new/open_ai_minecraft_arrayrecords_chunked\r\n\r\njob_name=$SLURM_JOB_NAME\r\nslurm_job_id=$SLURM_JOB_ID\r\n\r\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/maskgit-maskprob-fix/$job_name/$slurm_job_id\r\nmkdir -p $CHECKPOINT_DIR\r\n\r\n# tokenizer with the new structure supporting larger ffn_dim\r\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/checkpoints/train_tokenizer_lr_sweep_1e-4_larger_ffn/\r\n\r\nenv | grep SLURM\r\n\r\nsrun python train_dynamics.py \\r\n --save_ckpt \\r\n $restore_ckpt_flag \\r\n --wandb_id $SLURM_JOB_ID \\r\n --ckpt_dir $CHECKPOINT_DIR \\r\n --batch_size=12 \\r\n --init_lr=0 \\r\n --max_lr=8e-5 \\r\n --log_image_interval=1000 \\r\n --log \\r\n --log_checkpoint_interval=10 \\r\n --name=dynamics-maskprob-fix-dev-$slurm_job_id \\r\n --tags dynamics maskprob-fix \\r\n --entity instant-uv \\r\n --project jafar \\r\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\r\n --data_dir $array_records_dir \\r\n --dyna_dim=1024 \\r\n --dyna_num_blocks=16 \\r\n --dyna_num_heads=16\r\n\r\n",,terminal_output +116,98957,"TERMINAL",0,0,"SLURM_STEP_NUM_TASKS=1\r\nSLURM_JOB_USER=tum_cte0515\r\nSLURM_TASKS_PER_NODE=1\r\nSLURM_JOB_UID=999226\r\nSLURM_TASK_PID=1952006\r\nSLURM_JOB_GPUS=0\r\nSLURM_LOCALID=0\r\nSLURM_SUBMIT_DIR=/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar\r\nSLURMD_NODENAME=hkn0402\r\nSLURM_JOB_START_TIME=1753283209\r\nSLURM_STEP_NODELIST=hkn0402\r\nSLURM_CLUSTER_NAME=hk\r\nSLURM_JOB_END_TIME=1753286809\r\nSLURM_PMI2_SRUN_PORT=34677\r\nSLURM_CPUS_ON_NODE=6\r\nSLURM_JOB_CPUS_PER_NODE=6\r\nSLURM_GPUS_ON_NODE=1\r\nSLURM_GTIDS=0\r\nSLURM_JOB_PARTITION=dev_accelerated\r\nSLURM_TRES_PER_TASK=cpu=5\r\nSLURM_OOM_KILL_STEP=0\r\nSLURM_JOB_NUM_NODES=1\r\nSLURM_STEPID=4294967290\r\nSLURM_JOBID=3371005\r\nSLURM_PTY_PORT=45451\r\nSLURM_JOB_QOS=normal\r\nSLURM_LAUNCH_NODE_IPADDR=10.0.7.198\r\nSLURM_PTY_WIN_ROW=45\r\nSLURM_PMI2_PROC_MAPPING=(vector,(0,1,1))\r\nSLURMD_DEBUG=2\r\nSLURM_PROCID=0\r\nSLURM_CPUS_PER_TASK=5\r\nSLURM_NTASKS=1\r\nSLURM_TOPOLOGY_ADDR=hkibb.hkibbi1.hkibbi1e11.hkn0402\r\nSLURM_TOPOLOGY_ADDR_PATTERN=switch.switch.switch.node\r\nSLURM_SRUN_COMM_HOST=10.0.7.198\r\nSLURM_SCRIPT_CONTEXT=prolog_task\r\nSLURM_PTY_WIN_COL=252\r\nSLURM_NODELIST=hkn0402\r\nSLURM_SRUN_COMM_PORT=33207\r\nSLURM_STEP_ID=4294967290\r\nSLURM_JOB_ACCOUNT=hk-project-p0023960\r\nSLURM_PRIO_PROCESS=0\r\nSLURM_NPROCS=1\r\nSLURM_NNODES=1\r\nSLURM_SUBMIT_HOST=hkn1990.localdomain\r\nSLURM_JOB_ID=3371005\r\nSLURM_NODEID=0\r\nSLURM_STEP_NUM_NODES=1\r\nSLURM_STEP_TASKS_PER_NODE=1\r\nSLURM_MPI_TYPE=pmi2\r\nSLURM_PMI2_STEP_NODES=hkn0402\r\nSLURM_CONF=/etc/slurm/slurm.conf\r\nSLURM_JOB_NAME=interactive\r\nSLURM_NTASKS_PER_NODE=1\r\nSLURM_STEP_LAUNCHER_PORT=33207\r\nSLURM_JOB_GID=502226\r\nSLURM_JOB_NODELIST=hkn0402\r\n",,terminal_output +117,99071,"TERMINAL",0,0,"GpuFreq=control_disabled\r\n",,terminal_output +118,100026,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes_dev.sbatch",0,0,"",shellscript,tab +119,101270,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes_dev.sbatch",1742,0,"",shellscript,selection_mouse +120,104512,"TERMINAL",0,0,"bash",,terminal_focus +121,110121,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n",,terminal_output +122,112231,"TERMINAL",0,0,"2025-07-23 17:19:15.270281: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +123,122363,"TERMINAL",0,0,"srun",,terminal_focus +124,139829,"TERMINAL",0,0,"2025-07-23 17:19:42.788188: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +125,157223,"TERMINAL",0,0,"2025-07-23 17:20:00.244671: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +126,160312,"TERMINAL",0,0,"wandb: Currently logged in as: mihir-mahajan2002 (instant-uv) to https://api.wandb.ai. Use `wandb login --relogin` to force relogin\r\n",,terminal_output +127,161226,"TERMINAL",0,0,"wandb: Tracking run with wandb version 0.19.11\r\nwandb: Run data is saved locally in /hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/wandb/run-20250723_172003-3371005\r\nwandb: Run `wandb offline` to turn off syncing.\r\nwandb: Resuming run dynamics-maskprob-fix-dev-3371005\r\nwandb: ⭐️ View project at https://wandb.ai/instant-uv/jafar\r\nwandb: 🚀 View run at https://wandb.ai/instant-uv/jafar/runs/3371005\r\n",,terminal_output +128,161948,"TERMINAL",0,0,"Running on 1 devices.\r\nCounting all components: ['tokenizer', 'lam', 'dynamics']\r\nwandb: ERROR Attempted to change value of key ""model_param_count"" from {'lam': 17229792, 'total': 179219920, 'dynamics': 128239872, 'tokenizer': 33750256} to {'tokenizer': 33750256, 'lam': 17229792, 'dynamics': 204791808, 'total': 255771856}\r\nwandb: ERROR If you really want to do this, pass allow_val_change=True to config.update()\r\nTraceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_dynamics.py"", line 233, in \r\n wandb.config.update({""model_param_count"": param_counts})\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/wandb/sdk/wandb_config.py"", line 187, in update\r\n sanitized = self._update(d, allow_val_change)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/wandb/sdk/wandb_config.py"", line 180, in _update\r\n sanitized = self._sanitize_dict(\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/wandb/sdk/wandb_config.py"", line 267, in _sanitize_dict\r\n k, v = self._sanitize(k, v, allow_val_change)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/wandb/sdk/wandb_config.py"", line 288, in _sanitize\r\n raise config_util.ConfigError(\r\nwandb.sdk.lib.config_util.ConfigError: Attempted to change value of key ""model_param_count"" from {'lam': 17229792, 'total': 179219920, 'dynamics': 128239872, 'tokenizer': 33750256} to {'tokenizer': 33750256, 'lam': 17229792, 'dynamics': 204791808, 'total': 255771856}\r\nIf you really want to do this, pass allow_val_change=True to config.update()\r\n",,terminal_output +129,163281,"TERMINAL",0,0,"wandb: \r\nwandb: 🚀 View run dynamics-maskprob-fix-dev-3371005 at: https://wandb.ai/instant-uv/jafar/runs/3371005\r\nwandb: Find logs at: ../../../../../hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/wandb/run-20250723_172003-3371005/logs\r\n",,terminal_output +130,164713,"TERMINAL",0,0,"srun: error: hkn0402: task 0: Exited with exit code 1\r\n]0;tum_cte0515@hkn0402:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0402 jafar]$ ",,terminal_output +131,200006,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes_dev.sbatch",0,0,"",shellscript,tab +132,201018,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes_dev.sbatch",1328,0,"",shellscript,selection_mouse +133,201496,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes_dev.sbatch",1266,0,"",shellscript,selection_mouse +134,203199,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes_dev.sbatch",1265,0,"",shellscript,selection_command +135,205466,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes_dev.sbatch",1221,56,"",shellscript,content +136,205471,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes_dev.sbatch",1225,0,"",shellscript,selection_command +137,209746,"TERMINAL",0,0,"sh slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes_dev.sbatch",,terminal_output +138,210290,"TERMINAL",0,0,"\r\n[?2004l\r#!/usr/bin/env bash\r\n\r\n#SBATCH --nodes=1\r\n#SBATCH --ntasks-per-node=1\r\n#SBATCH --time=00:15:00\r\n#SBATCH --partition=dev_accelerated\r\n#SBATCH --cpus-per-task=5\r\n#SBATCH --gres=gpu:1\r\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/maskgit-maskprob-fix/dynamics-cotraining/%x_%j.log\r\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/maskgit-maskprob-fix/dynamics-cotraining/%x_%j.log\r\n#SBATCH --job-name=train_dynamics_maskprob_fix_8_node\r\n\r\n# Log the sbatch script\r\ncat $0\r\n\r\nmodule unload mpi/openmpi/5.0\r\nmodule unload devel/cuda/12.4\r\nsource .venv/bin/activate\r\n\r\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_new/open_ai_minecraft_arrayrecords_chunked\r\n\r\njob_name=$SLURM_JOB_NAME\r\nslurm_job_id=$SLURM_JOB_ID\r\n\r\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/maskgit-maskprob-fix/$job_name/$slurm_job_id\r\nmkdir -p $CHECKPOINT_DIR\r\n\r\n# tokenizer with the new structure supporting larger ffn_dim\r\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/checkpoints/train_tokenizer_lr_sweep_1e-4_larger_ffn/\r\n\r\nenv | grep SLURM\r\n\r\nsrun python train_dynamics.py \\r\n --save_ckpt \\r\n --ckpt_dir $CHECKPOINT_DIR \\r\n --batch_size=12 \\r\n --init_lr=0 \\r\n --max_lr=8e-5 \\r\n --log_image_interval=1000 \\r\n --log \\r\n --log_checkpoint_interval=10 \\r\n --name=dynamics-maskprob-fix-dev-$slurm_job_id \\r\n --tags dynamics maskprob-fix \\r\n --entity instant-uv \\r\n --project jafar \\r\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\r\n --data_dir $array_records_dir \\r\n --dyna_dim=1024 \\r\n --dyna_num_blocks=16 \\r\n --dyna_num_heads=16\r\n\r\n",,terminal_output +139,210407,"TERMINAL",0,0,"SLURM_STEP_NUM_TASKS=1\r\nSLURM_JOB_USER=tum_cte0515\r\nSLURM_TASKS_PER_NODE=1\r\nSLURM_JOB_UID=999226\r\nSLURM_TASK_PID=1952006\r\nSLURM_JOB_GPUS=0\r\nSLURM_LOCALID=0\r\nSLURM_SUBMIT_DIR=/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar\r\nSLURMD_NODENAME=hkn0402\r\nSLURM_JOB_START_TIME=1753283209\r\nSLURM_STEP_NODELIST=hkn0402\r\nSLURM_CLUSTER_NAME=hk\r\nSLURM_JOB_END_TIME=1753286809\r\nSLURM_PMI2_SRUN_PORT=34677\r\nSLURM_CPUS_ON_NODE=6\r\nSLURM_JOB_CPUS_PER_NODE=6\r\nSLURM_GPUS_ON_NODE=1\r\nSLURM_GTIDS=0\r\nSLURM_JOB_PARTITION=dev_accelerated\r\nSLURM_TRES_PER_TASK=cpu=5\r\nSLURM_OOM_KILL_STEP=0\r\nSLURM_JOB_NUM_NODES=1\r\nSLURM_STEPID=4294967290\r\nSLURM_JOBID=3371005\r\nSLURM_PTY_PORT=45451\r\nSLURM_JOB_QOS=normal\r\nSLURM_LAUNCH_NODE_IPADDR=10.0.7.198\r\nSLURM_PTY_WIN_ROW=45\r\nSLURM_PMI2_PROC_MAPPING=(vector,(0,1,1))\r\nSLURMD_DEBUG=2\r\nSLURM_PROCID=0\r\nSLURM_CPUS_PER_TASK=5\r\nSLURM_NTASKS=1\r\nSLURM_TOPOLOGY_ADDR=hkibb.hkibbi1.hkibbi1e11.hkn0402\r\nSLURM_TOPOLOGY_ADDR_PATTERN=switch.switch.switch.node\r\nSLURM_SRUN_COMM_HOST=10.0.7.198\r\nSLURM_SCRIPT_CONTEXT=prolog_task\r\nSLURM_PTY_WIN_COL=252\r\nSLURM_NODELIST=hkn0402\r\nSLURM_SRUN_COMM_PORT=33207\r\nSLURM_STEP_ID=4294967290\r\nSLURM_JOB_ACCOUNT=hk-project-p0023960\r\nSLURM_PRIO_PROCESS=0\r\nSLURM_NPROCS=1\r\nSLURM_NNODES=1\r\nSLURM_SUBMIT_HOST=hkn1990.localdomain\r\nSLURM_JOB_ID=3371005\r\nSLURM_NODEID=0\r\nSLURM_STEP_NUM_NODES=1\r\nSLURM_STEP_TASKS_PER_NODE=1\r\nSLURM_MPI_TYPE=pmi2\r\nSLURM_PMI2_STEP_NODES=hkn0402\r\nSLURM_CONF=/etc/slurm/slurm.conf\r\nSLURM_JOB_NAME=interactive\r\nSLURM_NTASKS_PER_NODE=1\r\nSLURM_STEP_LAUNCHER_PORT=33207\r\nSLURM_JOB_GID=502226\r\nSLURM_JOB_NODELIST=hkn0402\r\n",,terminal_output +140,210517,"TERMINAL",0,0,"GpuFreq=control_disabled\r\n",,terminal_output +141,212526,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n",,terminal_output +142,214672,"TERMINAL",0,0,"2025-07-23 17:20:57.629774: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +143,241296,"TERMINAL",0,0,"2025-07-23 17:21:24.329400: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +144,259234,"TERMINAL",0,0,"2025-07-23 17:21:42.162340: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +145,262325,"TERMINAL",0,0,"wandb: Currently logged in as: mihir-mahajan2002 (instant-uv) to https://api.wandb.ai. Use `wandb login --relogin` to force relogin\r\n",,terminal_output +146,262995,"TERMINAL",0,0,"wandb: Tracking run with wandb version 0.19.11\r\nwandb: Run data is saved locally in /hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/wandb/run-20250723_172145-9vscb3jf\r\nwandb: Run `wandb offline` to turn off syncing.\r\nwandb: Syncing run dynamics-maskprob-fix-dev-3371005\r\nwandb: ⭐️ View project at https://wandb.ai/instant-uv/jafar\r\nwandb: 🚀 View run at https://wandb.ai/instant-uv/jafar/runs/9vscb3jf\r\n",,terminal_output +147,278867,"TERMINAL",0,0,"bash",,terminal_focus +148,280205,"TERMINAL",0,0,"queue",,terminal_command +149,280243,"TERMINAL",0,0,"]633;E;2025-07-23 17:22:03 queue;3d19b9bf-55ca-4a1a-89a2-d3473be0b612]633;C",,terminal_output +150,280360,"TERMINAL",0,0,"[?1049h(B[?7hEvery 1.0s: squeue --mehkn1990.localdomain: Wed Jul 23 17:22:03 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3370822 accelerat train_dy tum_cte0 PD\t0:00\t 8 (Priority)3370952 accelerat interact tum_cte0 R32:38\t 2 hkn[0717,0735]3371005 dev_accel interact tum_cte0 R15:14\t 1 hkn0402",,terminal_output +151,281388,"TERMINAL",0,0,"495",,terminal_output +152,282416,"TERMINAL",0,0,"5406",,terminal_output +153,283487,"TERMINAL",0,0,"617",,terminal_output +154,284407,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn1990:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/maskgit-maskprob-fix/interactive]633;D;0",,terminal_output +155,298643,"TERMINAL",0,0,"WARNING:absl:Missing metrics for step 51000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/checkpoints/train_tokenizer_lr_sweep_1e-4_larger_ffn/051000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 40000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/checkpoints/train_tokenizer_lr_sweep_1e-4_larger_ffn/040000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 52000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/checkpoints/train_tokenizer_lr_sweep_1e-4_larger_ffn/052000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 53000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/checkpoints/train_tokenizer_lr_sweep_1e-4_larger_ffn/053000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 20000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/checkpoints/train_tokenizer_lr_sweep_1e-4_larger_ffn/020000/metrics/metrics not found.\r\n",,terminal_output +156,300656,"TERMINAL",0,0,"Running on 1 devices.\r\nCounting all components: ['tokenizer', 'lam', 'dynamics']\r\nParameter counts:\r\n{'tokenizer': 33750256, 'lam': 17229792, 'dynamics': 204791808, 'total': 255771856}\r\n",,terminal_output +157,304598,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n",,terminal_output +158,304706,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n",,terminal_output +159,304787,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n",,terminal_output +160,328820,"TERMINAL",0,0,"2025-07-23 17:22:51.779471: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-23 17:22:51.780367: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-23 17:22:51.780899: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-23 17:22:51.781836: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-23 17:22:51.781859: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-23 17:22:51.783317: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +161,474722,"TERMINAL",0,0,"srun",,terminal_focus +162,477894,"TERMINAL",0,0,"^Csrun: interrupt (one more within 1 sec to abort)\r\nsrun: StepId=3371005.2 task 0: running\r\n",,terminal_output +163,478036,"TERMINAL",0,0,"^Csrun: sending Ctrl-C to StepId=3371005.2\r\nsrun: forcing job termination\r\nProcess SpawnProcess-4:\r\nProcess SpawnProcess-6:\r\nProcess SpawnProcess-8:\r\nProcess SpawnProcess-2:\r\nProcess SpawnProcess-1:\r\nProcess SpawnProcess-7:\r\nProcess SpawnProcess-3:\r\nProcess SpawnProcess-5:\r\nsrun: Job step aborted: Waiting up to 32 seconds for job step to finish.\r\nslurmstepd: error: *** STEP 3371005.2 ON hkn0402 CANCELLED AT 2025-07-23T17:25:21 ***\r\n",,terminal_output +164,478231,"TERMINAL",0,0,"^Csrun: sending Ctrl-C to StepId=3371005.2\r\nsrun: job abort in progress\r\n",,terminal_output +165,478408,"TERMINAL",0,0,"^Csrun: sending Ctrl-C to StepId=3371005.2\r\n",,terminal_output +166,478624,"TERMINAL",0,0,"^Csrun: sending Ctrl-C to StepId=3371005.2\r\n",,terminal_output +167,478694,"TERMINAL",0,0,"]0;tum_cte0515@hkn0402:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0402 jafar]$ ",,terminal_output +168,479040,"TERMINAL",0,0,"^C[?2004l\r[?2004h[?2004l\r\r\n]0;tum_cte0515@hkn0402:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0402 jafar]$ ",,terminal_output +169,483002,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes_dev.sbatch",0,0,"",shellscript,tab +170,552545,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes_dev.sbatch",1687,0,"",shellscript,selection_mouse +171,576085,"TERMINAL",0,0,"[?25lq[?25h",,terminal_output +172,576508,"TERMINAL",0,0,"[?25lu[?25h",,terminal_output +173,576583,"TERMINAL",0,0,"[?25le[?25h",,terminal_output +174,576654,"TERMINAL",0,0,"[?25lu[?25h",,terminal_output +175,576762,"TERMINAL",0,0,"[?25le[?25h",,terminal_output +176,576892,"TERMINAL",0,0,"\r\n[?2004l\r[?1049h(B[?7hEvery 1.0s: squeue --mehkn0402.localdomain: Wed Jul 23 17:26:59 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3370822 accelerat train_dy tum_cte0 PD\t0:00\t 8 (Priority)3370952 accelerat interact tum_cte0 R37:34\t 2 hkn[0717,0735]3371005 dev_accel interact tum_cte0 R20:10\t 1 hkn0402",,terminal_output +177,577898,"TERMINAL",0,0,"7:0051",,terminal_output +178,578496,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn0402:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0402 jafar]$ ",,terminal_output +179,589370,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes_dev.sbatch",0,0,"",shellscript,tab +180,594340,"slurm/jobs/mihir/horeka/modelsize_scaling/dynamics/3_train_dyn_180M.sbatch",0,0,"",shellscript,tab +181,595854,"slurm/jobs/mihir/horeka/modelsize_scaling/dynamics/4_train_dyn_270M.sbatch",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=12\n#SBATCH --ntasks-per-node=4\n#SBATCH --time=48:00:00\n#SBATCH --partition=accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:4\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/big-runs/dynamics-cotraining-modelsize-scaling/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/big-runs/dynamics-cotraining-modelsize-scaling/%x_%j.log\n#SBATCH --job-name=train_dynamics_modelsize_scaling_270M_16_node\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_new/open_ai_minecraft_arrayrecords_chunked\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/dynamics-cotraining-modelsize-scaling/$job_name\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/train_tokenizer_batch_size_scaling_16_node/3321526/tokenizer_22000/\n\nenv | grep SLURM\n\nsrun python train_dynamics.py \\n --save_ckpt \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=96 \\n --min_lr=0 \\n --max_lr=1.5e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=dynamics-modelsize-scaling-270M-$slurm_job_id \\n --tags dynamics modelsize-scaling 270M \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir \\n --dyna_dim=1024 \\n --dyna_num_blocks=24 \\n --dyna_num_heads=16\n",shellscript,tab +182,600225,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes_dev.sbatch",0,0,"",shellscript,tab +183,602528,"slurm/jobs/mihir/horeka/modelsize_scaling/dynamics/4_train_dyn_270M.sbatch",0,0,"",shellscript,tab +184,605170,"slurm/jobs/mihir/horeka/modelsize_scaling/dynamics/3_train_dyn_180M.sbatch",0,0,"",shellscript,tab +185,606781,"slurm/jobs/mihir/horeka/modelsize_scaling/dynamics/4_train_dyn_270M.sbatch",0,0,"",shellscript,tab +186,607889,"slurm/jobs/mihir/horeka/modelsize_scaling/dynamics/4_train_dyn_270M.sbatch",1697,0,"",shellscript,selection_mouse +187,607890,"slurm/jobs/mihir/horeka/modelsize_scaling/dynamics/4_train_dyn_270M.sbatch",1696,0,"",shellscript,selection_command +188,608017,"slurm/jobs/mihir/horeka/modelsize_scaling/dynamics/4_train_dyn_270M.sbatch",1696,1,"6",shellscript,selection_mouse +189,608021,"slurm/jobs/mihir/horeka/modelsize_scaling/dynamics/4_train_dyn_270M.sbatch",1697,0,"",shellscript,selection_command +190,608044,"slurm/jobs/mihir/horeka/modelsize_scaling/dynamics/4_train_dyn_270M.sbatch",1696,1,"6",shellscript,selection_mouse +191,608104,"slurm/jobs/mihir/horeka/modelsize_scaling/dynamics/4_train_dyn_270M.sbatch",1693,4,"s=16",shellscript,selection_mouse +192,608105,"slurm/jobs/mihir/horeka/modelsize_scaling/dynamics/4_train_dyn_270M.sbatch",1687,10,"m_heads=16",shellscript,selection_mouse +193,608105,"slurm/jobs/mihir/horeka/modelsize_scaling/dynamics/4_train_dyn_270M.sbatch",1658,39,"num_blocks=24 \\n --dyna_num_heads=16",shellscript,selection_mouse +194,608106,"slurm/jobs/mihir/horeka/modelsize_scaling/dynamics/4_train_dyn_270M.sbatch",1655,42,"na_num_blocks=24 \\n --dyna_num_heads=16",shellscript,selection_mouse +195,608119,"slurm/jobs/mihir/horeka/modelsize_scaling/dynamics/4_train_dyn_270M.sbatch",1653,44,"dyna_num_blocks=24 \\n --dyna_num_heads=16",shellscript,selection_mouse +196,608137,"slurm/jobs/mihir/horeka/modelsize_scaling/dynamics/4_train_dyn_270M.sbatch",1652,45,"-dyna_num_blocks=24 \\n --dyna_num_heads=16",shellscript,selection_mouse +197,608196,"slurm/jobs/mihir/horeka/modelsize_scaling/dynamics/4_train_dyn_270M.sbatch",1651,46,"--dyna_num_blocks=24 \\n --dyna_num_heads=16",shellscript,selection_mouse +198,608198,"slurm/jobs/mihir/horeka/modelsize_scaling/dynamics/4_train_dyn_270M.sbatch",1650,47," --dyna_num_blocks=24 \\n --dyna_num_heads=16",shellscript,selection_mouse +199,608199,"slurm/jobs/mihir/horeka/modelsize_scaling/dynamics/4_train_dyn_270M.sbatch",1649,48," --dyna_num_blocks=24 \\n --dyna_num_heads=16",shellscript,selection_mouse +200,608202,"slurm/jobs/mihir/horeka/modelsize_scaling/dynamics/4_train_dyn_270M.sbatch",1648,49," --dyna_num_blocks=24 \\n --dyna_num_heads=16",shellscript,selection_mouse +201,608265,"slurm/jobs/mihir/horeka/modelsize_scaling/dynamics/4_train_dyn_270M.sbatch",1647,50," --dyna_num_blocks=24 \\n --dyna_num_heads=16",shellscript,selection_mouse +202,608284,"slurm/jobs/mihir/horeka/modelsize_scaling/dynamics/4_train_dyn_270M.sbatch",1625,72," --dyna_dim=1024 \\n --dyna_num_blocks=24 \\n --dyna_num_heads=16",shellscript,selection_mouse +203,611139,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes_dev.sbatch",0,0,"",shellscript,tab +204,612131,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes_dev.sbatch",1686,0,"",shellscript,selection_mouse +205,612133,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes_dev.sbatch",1685,0,"",shellscript,selection_command +206,612242,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes_dev.sbatch",1685,1,"6",shellscript,selection_mouse +207,612244,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes_dev.sbatch",1686,0,"",shellscript,selection_command +208,612288,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes_dev.sbatch",1682,4,"s=16",shellscript,selection_mouse +209,612305,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes_dev.sbatch",1678,8,"heads=16",shellscript,selection_mouse +210,612362,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes_dev.sbatch",1645,41,"a_num_blocks=16 \\n --dyna_num_heads=16",shellscript,selection_mouse +211,612363,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes_dev.sbatch",1643,43,"yna_num_blocks=16 \\n --dyna_num_heads=16",shellscript,selection_mouse +212,612363,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes_dev.sbatch",1641,45,"-dyna_num_blocks=16 \\n --dyna_num_heads=16",shellscript,selection_mouse +213,612373,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes_dev.sbatch",1639,47," --dyna_num_blocks=16 \\n --dyna_num_heads=16",shellscript,selection_mouse +214,612417,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes_dev.sbatch",1638,48," --dyna_num_blocks=16 \\n --dyna_num_heads=16",shellscript,selection_mouse +215,612482,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes_dev.sbatch",1637,49," --dyna_num_blocks=16 \\n --dyna_num_heads=16",shellscript,selection_mouse +216,612553,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes_dev.sbatch",1614,72," --dyna_dim=1024 \\n --dyna_num_blocks=16 \\n --dyna_num_heads=16",shellscript,selection_mouse +217,613326,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes_dev.sbatch",1614,72,"",shellscript,content +218,613839,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes_dev.sbatch",1614,0," --dyna_dim=1024 \\n --dyna_num_blocks=24 \\n --dyna_num_heads=16",shellscript,content +219,643684,"TERMINAL",0,0,"queue",,terminal_output +220,644023,"TERMINAL",0,0,"sh slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes_dev.sbatch",,terminal_output +221,644828,"TERMINAL",0,0,"\r\n[?2004l\r#!/usr/bin/env bash\r\n\r\n#SBATCH --nodes=1\r\n#SBATCH --ntasks-per-node=1\r\n#SBATCH --time=00:15:00\r\n#SBATCH --partition=dev_accelerated\r\n#SBATCH --cpus-per-task=5\r\n#SBATCH --gres=gpu:1\r\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/maskgit-maskprob-fix/dynamics-cotraining/%x_%j.log\r\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/maskgit-maskprob-fix/dynamics-cotraining/%x_%j.log\r\n#SBATCH --job-name=train_dynamics_maskprob_fix_8_node\r\n\r\n# Log the sbatch script\r\ncat $0\r\n\r\nmodule unload mpi/openmpi/5.0\r\nmodule unload devel/cuda/12.4\r\nsource .venv/bin/activate\r\n\r\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_new/open_ai_minecraft_arrayrecords_chunked\r\n\r\njob_name=$SLURM_JOB_NAME\r\nslurm_job_id=$SLURM_JOB_ID\r\n\r\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/maskgit-maskprob-fix/$job_name/$slurm_job_id\r\nmkdir -p $CHECKPOINT_DIR\r\n\r\n# tokenizer with the new structure supporting larger ffn_dim\r\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/checkpoints/train_tokenizer_lr_sweep_1e-4_larger_ffn/\r\n\r\nenv | grep SLURM\r\n\r\nsrun python train_dynamics.py \\r\n --save_ckpt \\r\n --ckpt_dir $CHECKPOINT_DIR \\r\n --batch_size=12 \\r\n --init_lr=0 \\r\n --max_lr=8e-5 \\r\n --log_image_interval=1000 \\r\n --log \\r\n --log_checkpoint_interval=10 \\r\n --name=dynamics-maskprob-fix-dev-$slurm_job_id \\r\n --tags dynamics maskprob-fix \\r\n --entity instant-uv \\r\n --project jafar \\r\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\r\n --data_dir $array_records_dir \\r\n --dyna_dim=1024 \\r\n --dyna_num_blocks=24 \\r\n --dyna_num_heads=16\r\n\r\n",,terminal_output +222,644989,"TERMINAL",0,0,"SLURM_STEP_NUM_TASKS=1\r\nSLURM_JOB_USER=tum_cte0515\r\nSLURM_TASKS_PER_NODE=1\r\nSLURM_JOB_UID=999226\r\nSLURM_TASK_PID=1952006\r\nSLURM_JOB_GPUS=0\r\nSLURM_LOCALID=0\r\nSLURM_SUBMIT_DIR=/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar\r\nSLURMD_NODENAME=hkn0402\r\nSLURM_JOB_START_TIME=1753283209\r\nSLURM_STEP_NODELIST=hkn0402\r\nSLURM_CLUSTER_NAME=hk\r\nSLURM_JOB_END_TIME=1753286809\r\nSLURM_PMI2_SRUN_PORT=34677\r\nSLURM_CPUS_ON_NODE=6\r\nSLURM_JOB_CPUS_PER_NODE=6\r\nSLURM_GPUS_ON_NODE=1\r\nSLURM_GTIDS=0\r\nSLURM_JOB_PARTITION=dev_accelerated\r\nSLURM_TRES_PER_TASK=cpu=5\r\nSLURM_OOM_KILL_STEP=0\r\nSLURM_JOB_NUM_NODES=1\r\nSLURM_STEPID=4294967290\r\nSLURM_JOBID=3371005\r\nSLURM_PTY_PORT=45451\r\nSLURM_JOB_QOS=normal\r\nSLURM_LAUNCH_NODE_IPADDR=10.0.7.198\r\nSLURM_PTY_WIN_ROW=45\r\nSLURM_PMI2_PROC_MAPPING=(vector,(0,1,1))\r\nSLURMD_DEBUG=2\r\nSLURM_PROCID=0\r\nSLURM_CPUS_PER_TASK=5\r\nSLURM_NTASKS=1\r\nSLURM_TOPOLOGY_ADDR=hkibb.hkibbi1.hkibbi1e11.hkn0402\r\nSLURM_TOPOLOGY_ADDR_PATTERN=switch.switch.switch.node\r\nSLURM_SRUN_COMM_HOST=10.0.7.198\r\nSLURM_SCRIPT_CONTEXT=prolog_task\r\nSLURM_PTY_WIN_COL=252\r\nSLURM_NODELIST=hkn0402\r\nSLURM_SRUN_COMM_PORT=33207\r\nSLURM_STEP_ID=4294967290\r\nSLURM_JOB_ACCOUNT=hk-project-p0023960\r\nSLURM_PRIO_PROCESS=0\r\nSLURM_NPROCS=1\r\nSLURM_NNODES=1\r\nSLURM_SUBMIT_HOST=hkn1990.localdomain\r\nSLURM_JOB_ID=3371005\r\nSLURM_NODEID=0\r\nSLURM_STEP_NUM_NODES=1\r\nSLURM_STEP_TASKS_PER_NODE=1\r\nSLURM_MPI_TYPE=pmi2\r\nSLURM_PMI2_STEP_NODES=hkn0402\r\nSLURM_CONF=/etc/slurm/slurm.conf\r\nSLURM_JOB_NAME=interactive\r\nSLURM_NTASKS_PER_NODE=1\r\nSLURM_STEP_LAUNCHER_PORT=33207\r\nSLURM_JOB_GID=502226\r\nSLURM_JOB_NODELIST=hkn0402\r\n",,terminal_output +223,645105,"TERMINAL",0,0,"GpuFreq=control_disabled\r\n",,terminal_output +224,654198,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n",,terminal_output +225,656397,"TERMINAL",0,0,"2025-07-23 17:28:19.441055: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +226,683581,"TERMINAL",0,0,"2025-07-23 17:28:46.536485: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +227,702106,"TERMINAL",0,0,"2025-07-23 17:29:05.072210: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +228,705069,"TERMINAL",0,0,"wandb: Currently logged in as: mihir-mahajan2002 (instant-uv) to https://api.wandb.ai. Use `wandb login --relogin` to force relogin\r\n",,terminal_output +229,705787,"TERMINAL",0,0,"wandb: Tracking run with wandb version 0.19.11\r\nwandb: Run data is saved locally in /hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/wandb/run-20250723_172908-2g56o49q\r\nwandb: Run `wandb offline` to turn off syncing.\r\nwandb: Syncing run dynamics-maskprob-fix-dev-3371005\r\nwandb: ⭐️ View project at https://wandb.ai/instant-uv/jafar\r\nwandb: 🚀 View run at https://wandb.ai/instant-uv/jafar/runs/2g56o49q\r\n",,terminal_output +230,708452,"TERMINAL",0,0,"WARNING:absl:Missing metrics for step 10\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/maskgit-maskprob-fix/interactive/3371005/000010/metrics/metrics not found.\r\n",,terminal_output +231,741422,"TERMINAL",0,0,"WARNING:absl:Missing metrics for step 40000\r\nWARNING:absl:Missing metrics for step 51000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/checkpoints/train_tokenizer_lr_sweep_1e-4_larger_ffn/040000/metrics/metrics not found.\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/checkpoints/train_tokenizer_lr_sweep_1e-4_larger_ffn/051000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 53000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/checkpoints/train_tokenizer_lr_sweep_1e-4_larger_ffn/053000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 52000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/checkpoints/train_tokenizer_lr_sweep_1e-4_larger_ffn/052000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 20000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/checkpoints/train_tokenizer_lr_sweep_1e-4_larger_ffn/020000/metrics/metrics not found.\r\n",,terminal_output +232,743285,"TERMINAL",0,0,"Running on 1 devices.\r\nCounting all components: ['tokenizer', 'lam', 'dynamics']\r\nParameter counts:\r\n{'tokenizer': 33750256, 'lam': 17229792, 'dynamics': 305594368, 'total': 356574416}\r\n",,terminal_output +233,747131,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n",,terminal_output +234,747285,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n",,terminal_output +235,747404,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n",,terminal_output +236,777265,"TERMINAL",0,0,"2025-07-23 17:30:20.294126: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-23 17:30:20.294986: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-23 17:30:20.295550: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-23 17:30:20.296526: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-23 17:30:20.296559: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-23 17:30:20.298061: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +237,817611,"TERMINAL",0,0,"2025-07-23 17:31:00.573163: W external/xla/xla/tsl/framework/bfc_allocator.cc:310] Allocator (GPU_0_bfc) ran out of memory trying to allocate 25.53GiB with freed_by_count=0. The caller indicates that this is not a failure, but this may mean that there could be performance gains if more memory were available.\r\n",,terminal_output +238,1112318,"TERMINAL",0,0,"^Csrun: interrupt (one more within 1 sec to abort)\r\nsrun: StepId=3371005.3 task 0: running\r\n",,terminal_output +239,1112499,"TERMINAL",0,0,"^Csrun: sending Ctrl-C to StepId=3371005.3\r\nsrun: forcing job termination\r\nProcess SpawnProcess-2:\r\nProcess SpawnProcess-3:\r\nProcess SpawnProcess-5:\r\nProcess SpawnProcess-8:\r\nProcess SpawnProcess-1:\r\nProcess SpawnProcess-4:\r\nProcess SpawnProcess-6:\r\nProcess SpawnProcess-7:\r\nsrun: Job step aborted: Waiting up to 32 seconds for job step to finish.\r\nslurmstepd: error: *** STEP 3371005.3 ON hkn0402 CANCELLED AT 2025-07-23T17:35:55 ***\r\n",,terminal_output +240,1112680,"TERMINAL",0,0,"^Csrun: sending Ctrl-C to StepId=3371005.3\r\nsrun: job abort in progress\r\n",,terminal_output +241,1112884,"TERMINAL",0,0,"^Csrun: sending Ctrl-C to StepId=3371005.3\r\n",,terminal_output +242,1113153,"TERMINAL",0,0,"]0;tum_cte0515@hkn0402:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0402 jafar]$ ",,terminal_output +243,1120092,"sample.py",0,0,"from dataclasses import dataclass\nimport time\nimport os\n\nimport dm_pix as pix\nimport einops\nimport jax\nimport jax.numpy as jnp\nimport flax.linen as nn\nimport numpy as np\nfrom orbax.checkpoint import PyTreeCheckpointer\nfrom PIL import Image, ImageDraw\nimport tyro\n\nfrom genie import Genie\nfrom utils.dataloader import get_dataloader\n\n\n@dataclass\nclass Args:\n # Experiment\n seed: int = 0\n seq_len: int = 16\n image_channels: int = 3\n image_height: int = 90\n image_width: int = 160\n data_dir: str = ""data/coinrun_episodes""\n checkpoint: str = """"\n # Sampling\n batch_size: int = 1\n maskgit_steps: int = 25\n temperature: float = 1.0\n sample_argmax: bool = True\n start_frame: int = 0\n # Tokenizer checkpoint\n tokenizer_dim: int = 512\n tokenizer_ffn_dim: int = 2048\n latent_patch_dim: int = 32\n num_patch_latents: int = 1024\n patch_size: int = 4\n tokenizer_num_blocks: int = 4\n tokenizer_num_heads: int = 8\n # LAM checkpoint\n lam_dim: int = 512\n lam_ffn_dim: int = 2048\n latent_action_dim: int = 32\n num_latent_actions: int = 6\n lam_patch_size: int = 16\n lam_num_blocks: int = 4\n lam_num_heads: int = 8\n # Dynamics checkpoint\n dyna_dim: int = 512\n dyna_ffn_dim: int = 2048\n dyna_num_blocks: int = 6\n dyna_num_heads: int = 8\n param_dtype: jnp.dtype = jnp.float32\n dtype: jnp.dtype = jnp.bfloat16\n use_flash_attention: bool = True\n\n\nargs = tyro.cli(Args)\nrng = jax.random.PRNGKey(args.seed)\n\n# --- Load Genie checkpoint ---\ngenie = Genie(\n # Tokenizer\n in_dim=args.image_channels,\n tokenizer_dim=args.tokenizer_dim,\n tokenizer_ffn_dim=args.tokenizer_ffn_dim,\n latent_patch_dim=args.latent_patch_dim,\n num_patch_latents=args.num_patch_latents,\n patch_size=args.patch_size,\n tokenizer_num_blocks=args.tokenizer_num_blocks,\n tokenizer_num_heads=args.tokenizer_num_heads,\n # LAM\n lam_dim=args.lam_dim,\n lam_ffn_dim=args.lam_ffn_dim,\n latent_action_dim=args.latent_action_dim,\n num_latent_actions=args.num_latent_actions,\n lam_patch_size=args.lam_patch_size,\n lam_num_blocks=args.lam_num_blocks,\n lam_num_heads=args.lam_num_heads,\n lam_co_train=False,\n # Dynamics\n dyna_dim=args.dyna_dim,\n dyna_ffn_dim=args.dyna_ffn_dim,\n dyna_num_blocks=args.dyna_num_blocks,\n dyna_num_heads=args.dyna_num_heads,\n param_dtype=args.param_dtype,\n dtype=args.dtype,\n use_flash_attention=args.use_flash_attention,\n)\nrng, _rng = jax.random.split(rng)\nimage_shape = (args.image_height, args.image_width, args.image_channels)\ndummy_inputs = dict(\n videos=jnp.zeros((args.batch_size, args.seq_len, *image_shape), dtype=jnp.float32),\n mask_rng=_rng,\n)\nrng, _rng = jax.random.split(rng)\nparams = genie.init(_rng, dummy_inputs)\nckpt = PyTreeCheckpointer().restore(args.checkpoint)[""model""][""params""][""params""]\nparams[""params""].update(ckpt)\n\n\ndef _sampling_wrapper(module, batch):\n return module.sample(\n batch, args.seq_len, args.maskgit_steps, args.temperature, args.sample_argmax\n )\n\n\n# --- Define autoregressive sampling loop ---\ndef _autoreg_sample(rng, video_batch, action_batch):\n vid = video_batch[:, : args.start_frame + 1]\n sampling_fn = jax.jit(nn.apply(_sampling_wrapper, genie))\n rng, _rng = jax.random.split(rng)\n batch = dict(videos=vid, latent_actions=action_batch, rng=_rng)\n generated_vid = sampling_fn(params, batch)\n return generated_vid\n\n\n# --- Get video + latent actions ---\narray_record_files = [\n os.path.join(args.data_dir, x)\n for x in os.listdir(args.data_dir)\n if x.endswith("".array_record"")\n]\ndataloader = get_dataloader(\n array_record_files,\n args.seq_len,\n args.batch_size,\n args.image_height,\n args.image_width,\n args.image_channels,\n num_workers=8,\n prefetch_buffer_size=1,\n seed=args.seed,\n)\nvideo_batch = next(iter(dataloader))\n# Get latent actions for all videos in the batch\nbatch = dict(videos=video_batch)\naction_batch = genie.apply(params, batch, False, method=Genie.vq_encode)\naction_batch = action_batch.reshape(video_batch.shape[0], args.seq_len - 1, 1)\n\n# --- Sample + evaluate video ---\nvid = _autoreg_sample(rng, video_batch, action_batch)\ngt = video_batch[:, : vid.shape[1]].clip(0, 1).reshape(-1, *video_batch.shape[2:])\nrecon = vid.clip(0, 1).reshape(-1, *vid.shape[2:])\nssim = pix.ssim(gt[:, args.start_frame + 1 :], recon[:, args.start_frame + 1 :]).mean()\nprint(f""SSIM: {ssim}"")\n\n# --- Construct video ---\ntrue_videos = (video_batch * 255).astype(np.uint8)\npred_videos = (vid * 255).astype(np.uint8)\nvideo_comparison = np.zeros((2, *vid.shape), dtype=np.uint8)\nvideo_comparison[0] = true_videos[:, : args.seq_len]\nvideo_comparison[1] = pred_videos\nframes = einops.rearrange(video_comparison, ""n b t h w c -> t (b h) (n w) c"")\n\n# --- Save video ---\nimgs = [Image.fromarray(img) for img in frames]\n# Write actions on each frame, on each row (i.e., for each video in the batch, on the GT row)\nfor t, img in enumerate(imgs[1:]):\n d = ImageDraw.Draw(img)\n for row in range(action_batch.shape[0]):\n action = action_batch[row, t, 0]\n y_offset = row * video_batch.shape[2] + 2\n d.text((2, y_offset), f""{action}"", fill=255)\nimgs[0].save(\n f""generation_{time.time()}.gif"",\n save_all=True,\n append_images=imgs[1:],\n duration=250,\n loop=0,\n)\n",python,tab +244,1127148,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes_dev.sbatch",0,0,"",shellscript,tab +245,1127514,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes_dev.sbatch",1688,0,"",shellscript,selection_mouse +246,1128621,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes_dev.sbatch",1614,72,"",shellscript,content +247,1129175,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes_dev.sbatch",1614,0," --dyna_dim=1024 \\n --dyna_num_blocks=16 \\n --dyna_num_heads=16",shellscript,content +248,1130040,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes_dev.sbatch",1688,0,"",shellscript,selection_mouse +249,1190639,"train_dynamics.py",0,0,"from dataclasses import dataclass, field\nimport os\n\nimport einops\nfrom flax.training.train_state import TrainState\nfrom jax.sharding import Mesh, PartitionSpec, NamedSharding\nfrom jax.experimental.mesh_utils import create_device_mesh\nimport optax\nimport orbax.checkpoint as ocp\nimport numpy as np\nimport dm_pix as pix\nimport jax\nimport jax.numpy as jnp\nimport tyro\nimport wandb\nimport grain\n\nfrom genie import Genie, restore_genie_components\nfrom utils.dataloader import get_dataloader\nfrom utils.lr_utils import get_lr_schedule\nfrom utils.parameter_utils import count_parameters_by_component\n\n\n@dataclass\nclass Args:\n # Experiment\n num_steps: int = 200_000\n seed: int = 0\n seq_len: int = 16\n image_channels: int = 3\n image_height: int = 90\n image_width: int = 160\n data_dir: str = """"\n save_ckpt: bool = False\n restore_ckpt: bool = False\n # Optimization\n batch_size: int = 36\n init_lr: float = 0.0\n max_lr: float = 3e-5\n decay_end: float = 0.0\n wsd_decay_steps: int = (\n 10000 # NOTE: wsd_decay_steps will only be used when using a wsd-schedule\n )\n warmup_steps: int = 5000\n lr_schedule: str = ""wsd"" # supported options: wsd, cos\n # Tokenizer\n tokenizer_dim: int = 512\n tokenizer_ffn_dim: int = 2048\n latent_patch_dim: int = 32\n num_patch_latents: int = 1024\n patch_size: int = 4\n tokenizer_num_blocks: int = 4\n tokenizer_num_heads: int = 8\n tokenizer_checkpoint: str = """"\n # LAM\n lam_dim: int = 512\n lam_ffn_dim: int = 2048\n latent_action_dim: int = 32\n num_latent_actions: int = 6\n lam_patch_size: int = 16\n lam_num_blocks: int = 4\n lam_num_heads: int = 8\n lam_checkpoint: str = """"\n # Dynamics\n dyna_dim: int = 512\n dyna_ffn_dim: int = 2048\n dyna_num_blocks: int = 6\n dyna_num_heads: int = 8\n dropout: float = 0.0\n mask_limit: float = 0.5\n param_dtype: jnp.dtype = jnp.float32\n dtype: jnp.dtype = jnp.bfloat16\n use_flash_attention: bool = True\n # Logging\n log: bool = False\n entity: str = """"\n project: str = """"\n name: str = ""train_dynamics""\n tags: list[str] = field(default_factory=lambda: [""dynamics""])\n log_interval: int = 5\n log_image_interval: int = 250\n ckpt_dir: str = """"\n log_checkpoint_interval: int = 25000\n log_checkpoint_keep_period: int = 20000\n log_gradients: bool = False\n wandb_id: str = """"\n\n\nargs = tyro.cli(Args)\n\n\ndef dynamics_loss_fn(params, state, inputs):\n """"""Compute masked dynamics loss""""""\n inputs[""videos""] = inputs[""videos""].astype(args.dtype) / 255.0\n outputs = state.apply_fn(\n params,\n inputs,\n training=True,\n rngs={""params"": inputs[""rng""], ""dropout"": inputs[""dropout_rng""]},\n )\n mask = outputs[""mask""]\n outputs[""token_logits""] = outputs[""token_logits""].astype(jnp.float32)\n ce_loss = optax.softmax_cross_entropy_with_integer_labels(\n outputs[""token_logits""], outputs[""video_tokens""]\n )\n ce_loss = (mask * ce_loss).sum() / mask.sum()\n acc = outputs[""token_logits""].argmax(-1) == outputs[""video_tokens""]\n acc = (mask * acc).sum() / mask.sum()\n select_probs = jax.nn.softmax(outputs[""token_logits""])\n gt = inputs[""videos""].clip(0, 1).reshape(-1, *inputs[""videos""].shape[2:])\n recon = outputs[""recon""].clip(0, 1).reshape(-1, *outputs[""recon""].shape[2:])\n psnr = pix.psnr(gt, recon).mean() # type: ignore\n ssim = pix.ssim(gt, recon).mean() # type: ignore\n _, index_counts_lam = jnp.unique_counts(\n jnp.ravel(outputs[""lam_indices""]), size=args.num_latent_actions, fill_value=0\n )\n _, index_counts_tokenizer = jnp.unique_counts(\n jnp.ravel(outputs[""video_tokens""]), size=args.num_patch_latents, fill_value=0\n )\n codebook_usage_lam = (index_counts_lam != 0).mean()\n codebook_usage_tokenizer = (index_counts_tokenizer != 0).mean()\n metrics = dict(\n cross_entropy_loss=ce_loss,\n masked_token_accuracy=acc,\n select_logit=outputs[""token_logits""].max(-1).mean(),\n select_p=select_probs.max(-1).mean(),\n entropy=jax.scipy.special.entr(select_probs).sum(-1).mean(),\n psnr=psnr,\n ssim=ssim,\n codebook_usage_lam=codebook_usage_lam,\n codebook_usage_tokenizer=codebook_usage_tokenizer,\n )\n return ce_loss, (outputs[""recon""], metrics)\n\n\n@jax.jit\ndef train_step(state, inputs):\n """"""Update state and compute metrics""""""\n grad_fn = jax.value_and_grad(dynamics_loss_fn, has_aux=True, allow_int=True)\n (loss, (recon, metrics)), grads = grad_fn(state.params, state, inputs)\n state = state.apply_gradients(grads=grads)\n if args.log_gradients:\n metrics[""gradients_std/""] = jax.tree.map(\n lambda x: x.std(), grads[""params""][""dynamics""]\n )\n return state, loss, recon, metrics\n\n\nif __name__ == ""__main__"":\n jax.distributed.initialize()\n num_devices = jax.device_count()\n if num_devices == 0:\n raise ValueError(""No JAX devices found."")\n print(f""Running on {num_devices} devices."")\n\n if args.batch_size % num_devices != 0:\n raise ValueError(\n f""Global batch size {args.batch_size} must be divisible by ""\n f""number of devices {num_devices}.""\n )\n\n per_device_batch_size_for_init = args.batch_size // num_devices\n\n rng = jax.random.PRNGKey(args.seed)\n\n # --- Initialize model ---\n genie = Genie(\n # Tokenizer\n in_dim=args.image_channels,\n tokenizer_dim=args.tokenizer_dim,\n tokenizer_ffn_dim=args.tokenizer_ffn_dim,\n latent_patch_dim=args.latent_patch_dim,\n num_patch_latents=args.num_patch_latents,\n patch_size=args.patch_size,\n tokenizer_num_blocks=args.tokenizer_num_blocks,\n tokenizer_num_heads=args.tokenizer_num_heads,\n # LAM\n lam_dim=args.lam_dim,\n lam_ffn_dim=args.lam_ffn_dim,\n latent_action_dim=args.latent_action_dim,\n num_latent_actions=args.num_latent_actions,\n lam_patch_size=args.lam_patch_size,\n lam_num_blocks=args.lam_num_blocks,\n lam_num_heads=args.lam_num_heads,\n lam_co_train=not args.lam_checkpoint,\n # Dynamics\n dyna_dim=args.dyna_dim,\n dyna_ffn_dim=args.dyna_ffn_dim,\n dyna_num_blocks=args.dyna_num_blocks,\n dyna_num_heads=args.dyna_num_heads,\n dropout=args.dropout,\n mask_limit=args.mask_limit,\n param_dtype=args.param_dtype,\n dtype=args.dtype,\n use_flash_attention=args.use_flash_attention,\n )\n rng, _rng = jax.random.split(rng)\n image_shape = (args.image_height, args.image_width, args.image_channels)\n dummy_inputs = dict(\n videos=jnp.zeros(\n (per_device_batch_size_for_init, args.seq_len, *image_shape),\n dtype=args.dtype,\n ),\n action=jnp.zeros(\n (per_device_batch_size_for_init, args.seq_len), dtype=args.dtype\n ),\n mask_rng=_rng,\n )\n rng, _rng = jax.random.split(rng)\n init_params = genie.init(_rng, dummy_inputs)\n\n param_counts = count_parameters_by_component(init_params)\n\n if args.log and jax.process_index() == 0:\n wandb_init_kwargs = {\n ""entity"": args.entity,\n ""project"": args.project,\n ""name"": args.name,\n ""tags"": args.tags,\n ""group"": ""debug"",\n ""config"": args,\n }\n\n if args.wandb_id:\n wandb_init_kwargs.update(\n {\n ""id"": args.wandb_id,\n ""resume"": ""allow"",\n }\n )\n wandb.init(**wandb_init_kwargs)\n\n wandb.config.update({""model_param_count"": param_counts})\n\n print(""Parameter counts:"")\n print(param_counts)\n\n # --- Initialize optimizer ---\n lr_schedule = get_lr_schedule(\n args.lr_schedule,\n args.init_lr,\n args.max_lr,\n args.decay_end,\n args.num_steps,\n args.warmup_steps,\n args.wsd_decay_steps,\n )\n tx = optax.adamw(\n learning_rate=lr_schedule,\n b1=0.9,\n b2=0.9,\n weight_decay=1e-4,\n mu_dtype=args.dtype,\n )\n train_state = TrainState.create(apply_fn=genie.apply, params=init_params, tx=tx)\n\n device_mesh_arr = create_device_mesh((num_devices,))\n mesh = Mesh(devices=device_mesh_arr, axis_names=(""data"",))\n\n replicated_sharding = NamedSharding(mesh, PartitionSpec())\n videos_sharding = NamedSharding(mesh, PartitionSpec(""data"", None, None, None, None))\n train_state = jax.device_put(train_state, replicated_sharding)\n\n # --- Initialize checkpoint manager ---\n step = 0\n handler_registry = ocp.handlers.DefaultCheckpointHandlerRegistry()\n handler_registry.add(\n ""model_state"", ocp.args.StandardSave, ocp.handlers.StandardCheckpointHandler\n )\n handler_registry.add(\n ""model_state"", ocp.args.StandardRestore, ocp.handlers.StandardCheckpointHandler\n )\n handler_registry.add(""dataloader_state"", grain.checkpoint.CheckpointSave, grain.checkpoint.CheckpointHandler) # type: ignore\n handler_registry.add(""dataloader_state"", grain.checkpoint.CheckpointRestore, grain.checkpoint.CheckpointHandler) # type: ignore\n\n checkpoint_options = ocp.CheckpointManagerOptions(\n save_interval_steps=args.log_checkpoint_interval,\n max_to_keep=3,\n keep_period=args.log_checkpoint_keep_period,\n step_format_fixed_length=6,\n cleanup_tmp_directories=True,\n )\n\n checkpoint_manager = ocp.CheckpointManager(\n args.ckpt_dir,\n options=checkpoint_options,\n handler_registry=handler_registry,\n )\n\n # --- Create DataLoaderIterator from dataloader ---\n array_record_files = [\n os.path.join(args.data_dir, x)\n for x in os.listdir(args.data_dir)\n if x.endswith("".array_record"")\n ]\n grain_dataloader = get_dataloader(\n array_record_files,\n args.seq_len,\n # NOTE: We deliberately pass the global batch size\n # The dataloader shards the dataset across all processes\n args.batch_size,\n *image_shape,\n num_workers=8,\n prefetch_buffer_size=1,\n seed=args.seed,\n )\n initial_state = grain_dataloader._create_initial_state()\n grain_iterator = grain.DataLoaderIterator(grain_dataloader, initial_state)\n\n # --- Restore checkpoint ---\n if args.restore_ckpt:\n # Restore full dynamics model\n abstract_train_state = jax.tree_util.tree_map(\n ocp.utils.to_shape_dtype_struct, train_state\n )\n restored = checkpoint_manager.restore(\n checkpoint_manager.latest_step(),\n args=ocp.args.Composite(\n model_state=ocp.args.StandardRestore(abstract_train_state),\n dataloader_state=grain.checkpoint.CheckpointRestore(grain_iterator),\n ),\n )\n train_state = restored[""model_state""]\n grain_iterator = restored[""dataloader_state""]\n step = checkpoint_manager.latest_step() or 0\n print(f""Restored dataloader and model state from step {step}"")\n else:\n # Restore from pre-trained tokenizer (and LAM)\n train_state = restore_genie_components(\n train_state, replicated_sharding, grain_iterator, dummy_inputs, rng, args\n )\n\n # --- TRAIN LOOP ---\n dataloader = (jax.make_array_from_process_local_data(videos_sharding, elem) for elem in grain_iterator) # type: ignore\n while step < args.num_steps:\n for videos in dataloader:\n # --- Train step ---\n rng, _rng, _rng_dropout, _rng_mask = jax.random.split(rng, 4)\n\n inputs = dict(\n videos=videos,\n rng=_rng,\n dropout_rng=_rng_dropout,\n mask_rng=_rng_mask,\n )\n train_state, loss, recon, metrics = train_step(train_state, inputs)\n metrics[""lr""] = lr_schedule(step)\n print(f""Step {step}, loss: {loss}"")\n step += 1\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n wandb.log(\n {\n ""loss"": loss,\n ""step"": step,\n **metrics,\n }\n )\n if step % args.log_image_interval == 0:\n gt_seq = inputs[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n checkpoint_manager.save(\n step,\n args=ocp.args.Composite(\n model_state=ocp.args.StandardSave(train_state),\n dataloader_state=grain.checkpoint.CheckpointSave(\n grain_iterator\n ),\n ),\n )\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break\n\n checkpoint_manager.close()\n",python,tab +250,1196104,"train_dynamics.py",1508,0,"",python,selection_mouse +251,1196275,"train_dynamics.py",1503,11,"lam_ffn_dim",python,selection_mouse +252,1197526,"train_dynamics.py",1780,0,"",python,selection_mouse +253,1197692,"train_dynamics.py",1776,15,"dyna_num_blocks",python,selection_mouse +254,1198193,"train_dynamics.py",1753,0,"",python,selection_mouse +255,1198334,"train_dynamics.py",1747,12,"dyna_ffn_dim",python,selection_mouse +256,1215047,"train_dynamics.py",1798,0,"",python,selection_mouse +257,1215214,"train_dynamics.py",1798,1," ",python,selection_mouse +258,1215666,"train_dynamics.py",1795,0,"",python,selection_mouse +259,1215830,"train_dynamics.py",1793,3,"int",python,selection_mouse +260,1216301,"train_dynamics.py",1769,0,"",python,selection_mouse +261,1216462,"train_dynamics.py",1767,4,"2048",python,selection_mouse +262,1548697,"TERMINAL",0,0,"sh slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes_dev.sbatch",,terminal_output +263,1550613,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes_dev.sbatch",0,0,"",shellscript,tab +264,1551591,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes_dev.sbatch",1662,0,"",shellscript,selection_mouse +265,2096936,"train_dynamics.py",0,0,"",python,tab +266,2098874,"train_dynamics.py",1809,0,"",python,selection_mouse +267,2099046,"train_dynamics.py",1805,14,"dyna_num_heads",python,selection_mouse +268,2100072,"train_dynamics.py",1810,0,"",python,selection_mouse +269,2103845,"train_dynamics.py",5378,0,"",python,selection_mouse +270,2104104,"genie.py",0,0,"from typing import Dict, Any\n\nimport optax\nimport jax\nimport jax.numpy as jnp\nimport flax.linen as nn\nfrom flax.training.train_state import TrainState\nimport orbax.checkpoint as ocp\n\nfrom models.dynamics import DynamicsMaskGIT\nfrom models.lam import LatentActionModel\nfrom models.tokenizer import TokenizerVQVAE\n\nimport grain\n\n\nclass Genie(nn.Module):\n """"""Genie model""""""\n\n # --- Tokenizer ---\n in_dim: int\n tokenizer_dim: int\n tokenizer_ffn_dim: int\n latent_patch_dim: int\n num_patch_latents: int\n patch_size: int\n tokenizer_num_blocks: int\n tokenizer_num_heads: int\n # --- LAM ---\n lam_dim: int\n lam_ffn_dim: int\n latent_action_dim: int\n num_latent_actions: int\n lam_patch_size: int\n lam_num_blocks: int\n lam_num_heads: int\n lam_co_train: bool\n # --- Dynamics ---\n dyna_dim: int\n dyna_ffn_dim: int\n dyna_num_blocks: int\n dyna_num_heads: int\n param_dtype: jnp.dtype\n dtype: jnp.dtype\n use_flash_attention: bool\n dropout: float = 0.0\n mask_limit: float = 0.0\n\n def setup(self):\n self.tokenizer = TokenizerVQVAE(\n in_dim=self.in_dim,\n model_dim=self.tokenizer_dim,\n ffn_dim=self.tokenizer_ffn_dim,\n latent_dim=self.latent_patch_dim,\n num_latents=self.num_patch_latents,\n patch_size=self.patch_size,\n num_blocks=self.tokenizer_num_blocks,\n num_heads=self.tokenizer_num_heads,\n dropout=0.0,\n codebook_dropout=0.0,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n use_flash_attention=self.use_flash_attention,\n )\n self.lam = LatentActionModel(\n in_dim=self.in_dim,\n model_dim=self.lam_dim,\n ffn_dim=self.lam_ffn_dim,\n latent_dim=self.latent_patch_dim,\n num_latents=self.num_latent_actions,\n patch_size=self.lam_patch_size,\n num_blocks=self.lam_num_blocks,\n num_heads=self.lam_num_heads,\n dropout=0.0,\n codebook_dropout=0.0,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n use_flash_attention=self.use_flash_attention,\n )\n self.dynamics = DynamicsMaskGIT(\n model_dim=self.dyna_dim,\n ffn_dim=self.dyna_ffn_dim,\n num_latents=self.num_patch_latents,\n num_blocks=self.dyna_num_blocks,\n num_heads=self.dyna_num_heads,\n dropout=self.dropout,\n mask_limit=self.mask_limit,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n use_flash_attention=self.use_flash_attention,\n )\n\n def __call__(self, batch: Dict[str, Any], training: bool = True) -> Dict[str, Any]:\n tokenizer_outputs = self.tokenizer.vq_encode(batch[""videos""], training=False)\n lam_outputs = self.lam.vq_encode(batch[""videos""], training=False)\n latent_actions = jax.lax.cond(\n self.lam_co_train,\n lambda: lam_outputs[""z_q""],\n lambda: jax.lax.stop_gradient(lam_outputs[""z_q""]),\n )\n outputs = dict(\n video_tokens=jax.lax.stop_gradient(tokenizer_outputs[""indices""]),\n latent_actions=latent_actions,\n )\n outputs[""mask_rng""] = batch[""mask_rng""]\n dyna_outputs = self.dynamics(outputs, training)\n outputs.update(dyna_outputs)\n mle_indices = jnp.argmax(outputs[""token_logits""], axis=-1)\n outputs[""recon""] = self.tokenizer.decode(\n mle_indices, batch[""videos""].shape[2:4]\n )\n outputs[""lam_indices""] = lam_outputs[""indices""]\n return outputs\n\n @nn.compact\n def sample(\n self,\n batch: Dict[str, Any],\n seq_len: int,\n steps: int = 25,\n temperature: float = 1,\n sample_argmax: bool = False,\n ) -> Any:\n """"""\n Autoregressively samples up to `seq_len` future frames, following Figure 8 of the paper.\n\n - Input frames are tokenized once.\n - Future frames are generated autoregressively in token space.\n - All frames are detokenized in a single pass.\n\n Note:\n - For interactive or step-wise sampling, detokenization should occur after each action.\n - To maintain consistent tensor shapes across timesteps, all current and future frames are decoded at every step.\n - Temporal causal structure is preserved by\n a) reapplying the mask before each decoding step.\n b) a temporal causal mask is applied within each ST-transformer block.\n\n Dimension keys:\n B: batch size\n T: number of input (conditioning) frames\n N: patches per frame\n S: sequence length\n A: action space\n D: model latent dimension\n """"""\n # --- Encode videos and actions ---\n tokenizer_out = self.tokenizer.vq_encode(batch[""videos""], training=False)\n token_idxs = tokenizer_out[""indices""] # (B, T, N)\n B, T, N = token_idxs.shape\n pad_shape = (B, seq_len - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1) # (B, S, N)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n\n MaskGITLoop = nn.scan(\n MaskGITStep,\n variable_broadcast=""params"",\n split_rngs={""params"": False},\n in_axes=0,\n out_axes=0,\n length=steps,\n )\n\n loop_fn = MaskGITLoop(\n dynamics=self.dynamics,\n tokenizer=self.tokenizer,\n temperature=temperature,\n sample_argmax=sample_argmax,\n steps=steps,\n )\n\n def generation_step_fn(carry, step_t):\n rng, current_token_idxs = carry\n rng, step_rng = jax.random.split(rng)\n\n # Mask current and future frames (i.e., t >= step_t)\n mask = jnp.arange(seq_len) >= step_t # (S,)\n mask = jnp.broadcast_to(mask[None, :, None], (B, seq_len, N)) # (B, S, N)\n mask = mask.astype(bool)\n masked_token_idxs = current_token_idxs * ~mask\n\n # --- Initialize and run MaskGIT loop ---\n init_carry_maskgit = (\n step_rng,\n masked_token_idxs,\n mask,\n action_tokens,\n )\n final_carry_maskgit, _ = loop_fn(init_carry_maskgit, jnp.arange(steps))\n updated_token_idxs = final_carry_maskgit[1]\n new_carry = (rng, updated_token_idxs)\n return new_carry, None\n\n # --- Run the autoregressive generation using scan ---\n initial_carry = (batch[""rng""], token_idxs)\n timesteps_to_scan = jnp.arange(T, seq_len)\n final_carry, _ = jax.lax.scan(\n generation_step_fn, initial_carry, timesteps_to_scan\n )\n final_token_idxs = final_carry[1]\n\n # --- Decode all tokens at once at the end ---\n final_frames = self.tokenizer.decode(\n final_token_idxs,\n video_hw=batch[""videos""].shape[2:4],\n )\n return final_frames\n\n def vq_encode(self, batch, training) -> Dict[str, Any]:\n # --- Preprocess videos ---\n lam_output = self.lam.vq_encode(batch[""videos""], training=training)\n return lam_output[""indices""]\n\n\nclass MaskGITStep(nn.Module):\n dynamics: nn.Module\n tokenizer: nn.Module\n temperature: float\n sample_argmax: bool\n steps: int\n\n @nn.compact\n def __call__(self, carry, x):\n rng, token_idxs, mask, action_tokens = carry\n step = x\n N = token_idxs.shape[2]\n\n # --- Construct + encode video ---\n vid_embed = self.dynamics.patch_embed(token_idxs) # (B, S, N, D)\n mask_token = self.dynamics.mask_token # (1, 1, 1, D,)\n mask_expanded = mask[..., None] # (B, S, N, 1)\n vid_embed = jnp.where(mask_expanded, mask_token, vid_embed)\n\n # --- Predict transition ---\n act_embed = self.dynamics.action_up(action_tokens)\n vid_embed += jnp.pad(act_embed, ((0, 0), (1, 0), (0, 0), (0, 0)))\n unmasked_ratio = jnp.cos(jnp.pi * (step + 1) / (self.steps * 2))\n step_temp = self.temperature * (1.0 - unmasked_ratio)\n final_logits = self.dynamics.dynamics(vid_embed) / step_temp\n\n # --- Sample new tokens for final frame ---\n if self.sample_argmax:\n sampled_token_idxs = jnp.argmax(final_logits, axis=-1)\n else:\n rng, _rng = jax.random.split(rng)\n sampled_token_idxs = jax.random.categorical(_rng, final_logits)\n gather_fn = jax.vmap(jax.vmap(jax.vmap(lambda x, y: x[y])))\n final_token_probs = gather_fn(jax.nn.softmax(final_logits), sampled_token_idxs)\n final_token_probs += ~mask\n # Update masked tokens only\n token_idxs = jnp.where(mask, sampled_token_idxs, token_idxs)\n\n # --- Update mask ---\n num_unmasked_tokens = jnp.round(N * (1.0 - unmasked_ratio)).astype(int)\n idx_mask = jnp.arange(final_token_probs.shape[-1]) > num_unmasked_tokens\n sorted_idxs = jnp.argsort(final_token_probs, axis=-1, descending=True)\n mask_update_fn = jax.vmap(lambda msk, ids: msk.at[ids].set(idx_mask))\n new_mask = mask_update_fn(mask, sorted_idxs)\n\n new_carry = (rng, token_idxs, new_mask, action_tokens)\n return new_carry, None\n\n\ndef restore_genie_components(\n train_state: TrainState,\n sharding: jax.sharding.NamedSharding,\n grain_iterator: grain.DataLoaderIterator,\n inputs: Dict[str, jax.Array],\n rng: jax.Array,\n args,\n):\n """"""Restore pre-trained Genie components""""""\n rng, _rng = jax.random.split(rng)\n\n # dummy values since we only use tx to initialize the dummy train states\n dummy_tx = optax.adamw(\n learning_rate=optax.constant_schedule(args.max_lr),\n b1=0.9,\n b2=0.9,\n weight_decay=1e-4,\n )\n handler_registry = ocp.handlers.DefaultCheckpointHandlerRegistry()\n handler_registry.add(\n ""model_state"", ocp.args.StandardRestore, ocp.handlers.StandardCheckpointHandler\n )\n\n checkpoint_options = ocp.CheckpointManagerOptions(\n step_format_fixed_length=6,\n )\n tokenizer_checkpoint_manager = ocp.CheckpointManager(\n directory=args.tokenizer_checkpoint,\n options=checkpoint_options,\n handler_registry=handler_registry,\n )\n dummy_tokenizer = TokenizerVQVAE(\n in_dim=args.image_channels,\n model_dim=args.tokenizer_dim,\n ffn_dim=args.tokenizer_ffn_dim,\n latent_dim=args.latent_patch_dim,\n num_latents=args.num_patch_latents,\n patch_size=args.patch_size,\n num_blocks=args.tokenizer_num_blocks,\n num_heads=args.tokenizer_num_heads,\n dropout=args.dropout,\n codebook_dropout=args.dropout,\n param_dtype=args.param_dtype,\n dtype=args.dtype,\n use_flash_attention=args.use_flash_attention,\n )\n tokenizer_init_params = dummy_tokenizer.init(_rng, inputs)\n dummy_tokenizer_train_state = TrainState.create(\n apply_fn=dummy_tokenizer.apply, params=tokenizer_init_params, tx=dummy_tx\n )\n abstract_sharded_tokenizer_state = _create_abstract_sharded_pytree(\n dummy_tokenizer_train_state, sharding\n )\n restored_tokenizer = tokenizer_checkpoint_manager.restore(\n step=tokenizer_checkpoint_manager.latest_step(),\n args=ocp.args.Composite(\n model_state=ocp.args.StandardRestore(abstract_sharded_tokenizer_state),\n ),\n )[""model_state""]\n restored_tokenizer_params = restored_tokenizer.params[""params""]\n train_state.params[""params""][""tokenizer""].update(restored_tokenizer_params)\n tokenizer_checkpoint_manager.close()\n\n if args.lam_checkpoint:\n lam_checkpoint_manager = ocp.CheckpointManager(\n directory=args.lam_checkpoint,\n options=checkpoint_options,\n handler_registry=handler_registry,\n )\n dummy_lam = LatentActionModel(\n in_dim=args.image_channels,\n model_dim=args.lam_dim,\n ffn_dim=args.lam_ffn_dim,\n latent_dim=args.latent_patch_dim,\n num_latents=args.num_latent_actions,\n patch_size=args.lam_patch_size,\n num_blocks=args.lam_num_blocks,\n num_heads=args.lam_num_heads,\n dropout=args.dropout,\n codebook_dropout=args.dropout,\n param_dtype=args.param_dtype,\n dtype=args.dtype,\n use_flash_attention=args.use_flash_attention,\n )\n lam_init_params = dummy_lam.init(_rng, inputs)\n dummy_lam_train_state = TrainState.create(\n apply_fn=dummy_lam.apply, params=lam_init_params, tx=dummy_tx\n )\n abstract_sharded_lam_state = _create_abstract_sharded_pytree(\n dummy_lam_train_state, sharding\n )\n restored_lam = lam_checkpoint_manager.restore(\n step=lam_checkpoint_manager.latest_step(),\n args=ocp.args.Composite(\n model_state=ocp.args.StandardRestore(abstract_sharded_lam_state),\n ),\n )[""model_state""]\n restored_lam_params = restored_lam.params[""params""]\n # Genie does not initialize all LAM modules, thus we omit those extra modules during restoration\n # (f.srambical) FIXME: Currently, this is a small HBM memory crunch since the LAM's decoder is loaded into HBM and immediately dicarded.\n # A workaround would be to restore to host memory first, and only move the weights to HBM after pruning the decoder\n restored_lam_params = {\n k: v\n for k, v in restored_lam_params.items()\n if k in train_state.params[""params""][""lam""]\n }\n train_state.params[""params""][""lam""].update(restored_lam_params)\n lam_checkpoint_manager.close()\n\n return train_state\n\n\ndef _create_abstract_sharded_pytree(pytree_template, sharding_spec):\n """"""Replaces arrays in a pytree with ShapeDtypeStructs having the given sharding.""""""\n\n def map_fn(leaf_template):\n if hasattr(leaf_template, ""shape"") and hasattr(leaf_template, ""dtype""):\n return jax.ShapeDtypeStruct(\n leaf_template.shape, leaf_template.dtype, sharding=sharding_spec\n )\n return leaf_template\n\n return jax.tree_util.tree_map(map_fn, pytree_template)\n",python,tab +271,2106589,"genie.py",901,0,"",python,selection_mouse +272,2106746,"genie.py",894,14,"dyna_num_heads",python,selection_mouse +273,2107353,"genie.py",901,0,"",python,selection_mouse +274,2115313,"genie.py",2300,0,"",python,selection_mouse +275,2117545,"genie.py",2259,0,"",python,selection_mouse +276,2117839,"models/dynamics.py",0,0,"from typing import Dict, Any\n\nimport jax\nimport jax.numpy as jnp\nimport flax.linen as nn\n\nfrom utils.nn import STTransformer\n\n\nclass DynamicsMaskGIT(nn.Module):\n """"""MaskGIT dynamics model""""""\n\n model_dim: int\n ffn_dim: int\n num_latents: int\n num_blocks: int\n num_heads: int\n dropout: float\n mask_limit: float\n param_dtype: jnp.dtype\n dtype: jnp.dtype\n use_flash_attention: bool\n\n def setup(self):\n self.dynamics = STTransformer(\n self.model_dim,\n self.ffn_dim,\n self.num_latents,\n self.num_blocks,\n self.num_heads,\n self.dropout,\n self.param_dtype,\n self.dtype,\n use_flash_attention=self.use_flash_attention,\n )\n self.patch_embed = nn.Embed(self.num_latents, self.model_dim)\n self.mask_token = self.param(\n ""mask_token"",\n nn.initializers.lecun_uniform(),\n (1, 1, 1, self.model_dim),\n )\n self.action_up = nn.Dense(\n self.model_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )\n\n def __call__(self, batch: Dict[str, Any], training: bool = True) -> Dict[str, Any]:\n # --- Mask videos ---\n vid_embed = self.patch_embed(batch[""video_tokens""])\n if training:\n batch_size = vid_embed.shape[0]\n rng1, rng2 = jax.random.split(batch[""mask_rng""])\n mask_prob = jax.random.uniform(\n rng1, shape=(batch_size,), minval=self.mask_limit\n )\n mask_rngs = jax.random.split(rng2, batch_size)\n per_sample_shape = vid_embed.shape[1:-1]\n mask = jax.vmap(\n lambda rng, prob: jax.random.bernoulli(rng, prob, per_sample_shape),\n in_axes=(0, 0),\n )(mask_rngs, mask_prob)\n mask = mask.at[:, 0].set(False)\n vid_embed = jnp.where(jnp.expand_dims(mask, -1), self.mask_token, vid_embed)\n else:\n mask = None\n\n # --- Predict transition ---\n act_embed = self.action_up(batch[""latent_actions""])\n vid_embed += jnp.pad(act_embed, ((0, 0), (1, 0), (0, 0), (0, 0)))\n logits = self.dynamics(vid_embed)\n return dict(token_logits=logits, mask=mask)\n",python,tab +277,2128958,"models/dynamics.py",463,0,"",python,selection_mouse +278,2129256,"utils/nn.py",0,0,"import math\nfrom typing import Tuple\n\nfrom flax import linen as nn\nimport jax\nimport jax.numpy as jnp\nimport einops\n\n\nclass PositionalEncoding(nn.Module):\n """"""https://uvadlc-notebooks.readthedocs.io/en/latest/tutorial_notebooks/JAX/tutorial6/Transformers_and_MHAttention.html""""""\n\n d_model: int # Hidden dimensionality of the input.\n max_len: int = 5000 # Maximum length of a sequence to expect.\n\n def setup(self):\n # Create matrix of [SeqLen, HiddenDim] representing the positional encoding for max_len inputs\n self.pe = jnp.zeros((self.max_len, self.d_model))\n position = jnp.arange(0, self.max_len, dtype=jnp.float32)[:, None]\n div_term = jnp.exp(\n jnp.arange(0, self.d_model, 2) * (-math.log(10000.0) / self.d_model)\n )\n self.pe = self.pe.at[:, 0::2].set(jnp.sin(position * div_term))\n self.pe = self.pe.at[:, 1::2].set(jnp.cos(position * div_term))\n\n def __call__(self, x):\n x = x + self.pe[: x.shape[2]]\n return x\n\n\nclass STBlock(nn.Module):\n dim: int\n ffn_dim: int\n num_heads: int\n dropout: float\n param_dtype: jnp.dtype\n dtype: jnp.dtype\n use_flash_attention: bool\n\n @nn.remat\n @nn.compact\n def __call__(self, x: jax.Array) -> jax.Array:\n # --- Spatial attention ---\n z = PositionalEncoding(self.dim)(x)\n z = nn.LayerNorm(\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(z)\n z = nn.MultiHeadAttention(\n num_heads=self.num_heads,\n qkv_features=self.dim,\n dropout_rate=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n attention_fn=_create_flash_attention_fn(\n self.use_flash_attention, is_causal=False\n ),\n )(z)\n x = x + z\n\n # --- Temporal attention ---\n x = x.swapaxes(1, 2)\n z = PositionalEncoding(self.dim)(x)\n z = nn.LayerNorm(\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(z)\n causal_mask = jnp.tri(z.shape[-2])\n z = nn.MultiHeadAttention(\n num_heads=self.num_heads,\n qkv_features=self.dim,\n dropout_rate=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n attention_fn=_create_flash_attention_fn(\n self.use_flash_attention, is_causal=True\n ),\n # FIXME (f.srambical): check whether we should still pass the mask if we set is_causal=True\n )(z, mask=causal_mask)\n x = x + z\n x = x.swapaxes(1, 2)\n\n # --- Feedforward ---\n z = nn.LayerNorm(\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(x)\n z = nn.Dense(\n self.ffn_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(z)\n z = nn.gelu(z)\n z = nn.Dense(\n self.dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(z)\n x = x + z\n\n return x\n\n\nclass STTransformer(nn.Module):\n model_dim: int\n ffn_dim: int\n out_dim: int\n num_blocks: int\n num_heads: int\n dropout: float\n param_dtype: jnp.dtype\n dtype: jnp.dtype\n use_flash_attention: bool\n\n @nn.compact\n def __call__(self, x: jax.Array) -> jax.Array:\n x = nn.Sequential(\n [\n nn.LayerNorm(\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n ),\n nn.Dense(\n self.model_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n ),\n nn.LayerNorm(\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n ),\n ]\n )(x)\n for _ in range(self.num_blocks):\n x = STBlock(\n dim=self.model_dim,\n ffn_dim=self.ffn_dim,\n num_heads=self.num_heads,\n dropout=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n use_flash_attention=self.use_flash_attention,\n )(x)\n x = nn.Dense(\n self.out_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(x)\n return x # (B, T, E)\n\n\ndef normalize(x):\n return x / (jnp.linalg.norm(x, ord=2, axis=-1, keepdims=True) + 1e-8)\n\n\nclass VectorQuantizer(nn.Module):\n latent_dim: int\n num_latents: int\n dropout: float\n\n def setup(self):\n self.codebook = normalize(\n self.param(\n ""codebook"",\n nn.initializers.lecun_uniform(),\n (self.num_latents, self.latent_dim),\n )\n )\n self.drop = nn.Dropout(self.dropout, deterministic=False)\n\n def __call__(\n self, x: jax.Array, training: bool\n ) -> Tuple[jax.Array, jax.Array, jax.Array, jax.Array]:\n # --- Compute distances ---\n x = normalize(x)\n codebook = normalize(self.codebook)\n distance = -jnp.matmul(x, codebook.T)\n if training:\n dropout_key = self.make_rng(""dropout"")\n distance = self.drop(distance, rng=dropout_key)\n\n # --- Get indices and embeddings ---\n indices = jnp.argmin(distance, axis=-1)\n z = self.codebook[indices]\n\n # --- Straight through estimator ---\n z_q = x + jax.lax.stop_gradient(z - x)\n return z_q, z, x, indices\n\n def get_codes(self, indices: jax.Array):\n return self.codebook[indices]\n\n\ndef _create_flash_attention_fn(use_flash_attention: bool, is_causal: bool):\n """"""\n Create an attention function that uses flash attention if enabled.\n\n Flax MultiHeadAttention provides tensors with shape (batch..., length, num_heads, head_dim)\n jax.nn.dot_product_attention expects (batch, length, num_heads, head_dim).\n\n We need to reshape to ensure compatibility. cuDNN's flash attention additionally\n requires a sequence length that is a multiple of 4. We pad the sequence length to the nearest\n multiple of 4 and mask accordingly.\n """"""\n\n def attention_fn(query, key, value, bias=None, mask=None, **kwargs):\n implementation = ""cudnn"" if use_flash_attention else None\n\n def _rearrange(x):\n return einops.rearrange(x, ""... l h d -> (...) l h d"")\n\n def _pad(x):\n return jnp.pad(x, ((0, 0), (0, pad_size), (0, 0), (0, 0)))\n\n def _fuse_masks(mask: jax.Array, attention_mask: jax.Array) -> jax.Array:\n mask_bool = mask.astype(jnp.bool_)\n expanded_mask = jnp.pad(\n mask_bool, ((0, pad_size), (0, pad_size)), constant_values=False\n )\n return jnp.logical_and(attention_mask, expanded_mask)\n\n original_shape = query.shape\n original_seq_len = query.shape[-3]\n\n # Pad to nearest multiple of 4\n target_seq_len = ((original_seq_len + 3) // 4) * 4\n pad_size = target_seq_len - original_seq_len\n\n query_4d = _pad(_rearrange(query))\n key_4d = _pad(_rearrange(key))\n value_4d = _pad(_rearrange(value))\n\n attention_mask = jnp.ones((target_seq_len, target_seq_len), dtype=jnp.bool_)\n attention_mask = attention_mask.at[original_seq_len:, :].set(False)\n attention_mask = attention_mask.at[:, original_seq_len:].set(False)\n\n mask_4d = (\n _fuse_masks(mask, attention_mask) if mask is not None else attention_mask\n )\n mask_4d = mask_4d[jnp.newaxis, jnp.newaxis, :, :] # (1, 1, seq_len, seq_len)\n\n bias_4d = _pad(_rearrange(bias)) if bias is not None else None\n\n output_4d = jax.nn.dot_product_attention(\n query=query_4d,\n key=key_4d,\n value=value_4d,\n bias=bias_4d,\n mask=mask_4d,\n implementation=implementation,\n is_causal=is_causal,\n **kwargs\n )\n return output_4d[..., :original_seq_len, :, :].reshape(original_shape)\n\n return attention_fn\n",python,tab +279,2130724,"utils/nn.py",3203,0,"",python,selection_mouse +280,2130885,"utils/nn.py",3198,9,"num_heads",python,selection_mouse +281,2134797,"utils/nn.py",3951,0,"",python,selection_mouse +282,2136714,"utils/nn.py",1456,0,"",python,selection_mouse +283,2136716,"utils/nn.py",1455,0,"",python,selection_command +284,2136838,"utils/nn.py",1456,0,"",python,selection_mouse +285,2136853,"utils/nn.py",1455,0,"",python,selection_command +286,2137456,"utils/nn.py",1482,0,"",python,selection_mouse +287,2137587,"utils/nn.py",1472,18,"MultiHeadAttention",python,selection_mouse +288,2138810,"utils/nn.py",1546,0,"",python,selection_mouse +289,2138926,"utils/nn.py",1542,12,"qkv_features",python,selection_mouse +290,2144901,"utils/nn.py",1435,0,"",python,selection_mouse +291,2145434,"utils/nn.py",1456,0,"",python,selection_mouse +292,2145435,"utils/nn.py",1455,0,"",python,selection_command +293,2145973,"utils/nn.py",1480,0,"",python,selection_mouse +294,2146786,".venv/lib/python3.10/site-packages/flax/linen/attention.py",0,0,"# Copyright 2024 The Flax Authors.\n#\n# Licensed under the Apache License, Version 2.0 (the ""License"");\n# you may not use this file except in compliance with the License.\n# You may obtain a copy of the License at\n#\n# http://www.apache.org/licenses/LICENSE-2.0\n#\n# Unless required by applicable law or agreed to in writing, software\n# distributed under the License is distributed on an ""AS IS"" BASIS,\n# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\n# See the License for the specific language governing permissions and\n# limitations under the License.\n\n""""""Attention core modules for Flax.""""""\nfrom __future__ import annotations\n\nimport functools\nimport inspect\nimport warnings\nfrom typing import Any, overload\nfrom collections.abc import Callable\n\nimport jax\nimport jax.numpy as jnp\nfrom jax import lax, random\n\nfrom flax.linen import initializers\nfrom flax.linen.dtypes import promote_dtype\nfrom flax.linen.linear import (\n DenseGeneral,\n default_kernel_init,\n)\nfrom flax.linen.module import Module, compact, merge_param\nfrom flax.linen.normalization import LayerNorm\nfrom flax.typing import (\n Array,\n PRNGKey,\n Dtype,\n Shape as Shape,\n Initializer,\n PrecisionLike,\n DotGeneralT,\n)\n\n\ndef dot_product_attention_weights(\n query: Array,\n key: Array,\n bias: Array | None = None,\n mask: Array | None = None,\n broadcast_dropout: bool = True,\n dropout_rng: PRNGKey | None = None,\n dropout_rate: float = 0.0,\n deterministic: bool = False,\n dtype: Dtype | None = None,\n precision: PrecisionLike = None,\n module: Module | None = None,\n force_fp32_for_softmax: bool = False,\n einsum_dot_general: Callable[..., Array] | None = None,\n einsum: Callable[..., Array] | None = None,\n):\n """"""Computes dot-product attention weights given query and key.\n\n Used by :func:`dot_product_attention`, which is what you'll most likely use.\n But if you want access to the attention weights for introspection, then\n you can directly call this function and call einsum yourself.\n\n Args:\n query: queries for calculating attention with shape of ``[batch...,\n q_length, num_heads, qk_depth_per_head]``.\n key: keys for calculating attention with shape of ``[batch..., kv_length,\n num_heads, qk_depth_per_head]``.\n bias: bias for the attention weights. This should be broadcastable to the\n shape ``[batch..., num_heads, q_length, kv_length]``. This can be used for\n incorporating causal masks, padding masks, proximity bias, etc.\n mask: mask for the attention weights. This should be broadcastable to the\n shape ``[batch..., num_heads, q_length, kv_length]``. This can be used for\n incorporating causal masks. Attention weights are masked out if their\n corresponding mask value is ``False``.\n broadcast_dropout: bool: use a broadcasted dropout along batch dims.\n dropout_rng: JAX PRNGKey: to be used for dropout\n dropout_rate: dropout rate\n deterministic: bool, deterministic or not (to apply dropout)\n dtype: the dtype of the computation (default: infer from inputs and params)\n precision: numerical precision of the computation see ``jax.lax.Precision``\n for details.\n module: the Module that will sow the attention weights into the\n 'intermediates' collection. Remember to mark 'intermediates' as mutable\n via ``mutable=['intermediates']`` in order to have that collection\n returned. If ``module`` is None, the attention weights will not be sowed.\n force_fp32_for_softmax: bool, whether to force the softmax to be computed in\n fp32. This is useful for mixed-precision training where higher precision\n is desired for numerical stability.\n einsum_dot_general: the dot_general to use in einsum.\n einsum: If unspecified, default `jnp.einsum` will be used. This argument is\n mutually exclusive with `precision` and `einsum_dot_general`.\n\n Raises:\n ValueError: if both `precision`/`einsum_dot_general` and `einsum` are\n specified.\n\n Returns:\n Output of shape ``[batch..., num_heads, q_length, kv_length]``.\n """"""\n if (precision or einsum_dot_general) and einsum:\n raise ValueError(\n 'precision/einsum_dot_general and einsum are mutually exclusive. Please'\n ' specify only one of them.'\n )\n if not einsum:\n einsum = functools.partial(\n jnp.einsum,\n precision=precision,\n _dot_general=einsum_dot_general\n if einsum_dot_general\n else jax.lax.dot_general,\n )\n\n query, key = promote_dtype(query, key, dtype=dtype)\n dtype = query.dtype\n\n assert query.ndim == key.ndim, 'q, k must have same rank.'\n assert query.shape[:-3] == key.shape[:-3], 'q, k batch dims must match.'\n assert query.shape[-2] == key.shape[-2], 'q, k num_heads must match.'\n assert query.shape[-1] == key.shape[-1], 'q, k depths must match.'\n\n # calculate attention matrix\n depth = query.shape[-1]\n query = query / jnp.sqrt(depth).astype(dtype)\n # attn weight shape is (batch..., num_heads, q_length, kv_length)\n attn_weights = einsum('...qhd,...khd->...hqk', query, key)\n\n # apply attention bias: masking, dropout, proximity bias, etc.\n if bias is not None:\n attn_weights = attn_weights + bias\n # apply attention mask\n if mask is not None:\n big_neg = jnp.finfo(dtype).min\n attn_weights = jnp.where(mask, attn_weights, big_neg)\n\n # normalize the attention weights\n if force_fp32_for_softmax and dtype != jnp.float32:\n attn_weights = jax.nn.softmax(attn_weights.astype(jnp.float32))\n else:\n attn_weights = jax.nn.softmax(attn_weights).astype(dtype)\n\n if module:\n module.sow('intermediates', 'attention_weights', attn_weights)\n\n # apply attention dropout\n if not deterministic and dropout_rate > 0.0:\n keep_prob = 1.0 - dropout_rate\n if broadcast_dropout:\n # dropout is broadcast across the batch + head dimensions\n dropout_shape = tuple([1] * (key.ndim - 2)) + attn_weights.shape[-2:]\n keep = random.bernoulli(dropout_rng, keep_prob, dropout_shape) # type: ignore\n else:\n keep = random.bernoulli(dropout_rng, keep_prob, attn_weights.shape) # type: ignore\n multiplier = keep.astype(dtype) / jnp.asarray(keep_prob, dtype=dtype)\n attn_weights = attn_weights * multiplier\n\n return attn_weights\n\n\ndef dot_product_attention(\n query: Array,\n key: Array,\n value: Array,\n bias: Array | None = None,\n mask: Array | None = None,\n broadcast_dropout: bool = True,\n dropout_rng: PRNGKey | None = None,\n dropout_rate: float = 0.0,\n deterministic: bool = False,\n dtype: Dtype | None = None,\n precision: PrecisionLike = None,\n module: Module | None = None,\n force_fp32_for_softmax: bool = False,\n einsum_dot_general: Callable[..., Array] | None = None,\n qk_attn_weights_einsum: Callable[..., Array] | None = None,\n attn_weights_value_einsum: Callable[..., Array] | None = None,\n):\n """"""Computes dot-product attention given query, key, and value.\n\n This is the core function for applying attention based on\n https://arxiv.org/abs/1706.03762. It calculates the attention weights given\n query and key and combines the values using the attention weights.\n\n .. note::\n ``query``, ``key``, ``value`` needn't have any batch dimensions.\n\n Args:\n query: queries for calculating attention with shape of ``[batch...,\n q_length, num_heads, qk_depth_per_head]``.\n key: keys for calculating attention with shape of ``[batch..., kv_length,\n num_heads, qk_depth_per_head]``.\n value: values to be used in attention with shape of ``[batch..., kv_length,\n num_heads, v_depth_per_head]``.\n bias: bias for the attention weights. This should be broadcastable to the\n shape ``[batch..., num_heads, q_length, kv_length]``. This can be used for\n incorporating causal masks, padding masks, proximity bias, etc.\n mask: mask for the attention weights. This should be broadcastable to the\n shape ``[batch..., num_heads, q_length, kv_length]``. This can be used for\n incorporating causal masks. Attention weights are masked out if their\n corresponding mask value is ``False``.\n broadcast_dropout: bool: use a broadcasted dropout along batch dims.\n dropout_rng: JAX PRNGKey: to be used for dropout\n dropout_rate: dropout rate\n deterministic: bool, deterministic or not (to apply dropout)\n dtype: the dtype of the computation (default: infer from inputs)\n precision: numerical precision of the computation see ``jax.lax.Precision`\n for details.\n module: the Module that will sow the attention weights into the\n 'intermediates' collection. Remember to mark 'intermediates' as mutable\n via ``mutable=['intermediates']`` in order to have that collection\n returned. If ``module`` is None, the attention weights will not be sowed.\n force_fp32_for_softmax: bool, whether to force the softmax to be computed in\n fp32. This is useful for mixed-precision training where higher precision\n is desired for numerical stability.\n einsum_dot_general: the dot_general to use in `jnp.einsum`.\n qk_attn_weights_einsum: the einsum for computing the attention weights. When\n unspecified, the default `jnp.einsum` will be used. This argument is\n mutually exclusive with `precision` and `einsum_dot_general`.\n attn_weights_value_einsum: the einsum for computing the product of the\n attention weights and the values. When unspecified, the default\n `jnp.einsum` will be used. This argument is mutually exclusive with\n `precision` and `einsum_dot_general`.\n\n Returns:\n Output of shape ``[batch..., q_length, num_heads, v_depth_per_head]``.\n\n Raises:\n ValueError: if both `precision`/`einsum_dot_general` and\n `qk_attn_weights_einsum`/`attn_weights_value_einsum` are\n specified.\n """"""\n if (qk_attn_weights_einsum and not attn_weights_value_einsum) or (\n not qk_attn_weights_einsum and attn_weights_value_einsum\n ):\n raise ValueError(\n 'qk_attn_weights_einsum and attn_weights_value_einsum must be specified'\n ' together.'\n )\n if (precision or einsum_dot_general) and (\n qk_attn_weights_einsum or attn_weights_value_einsum\n ):\n raise ValueError(\n 'precision/einsum_dot_general and'\n ' qk_attn_weights_einsum/attn_weights_value_einsum are mutually'\n ' exclusive. Please specify only one of them.'\n )\n\n query, key, value = promote_dtype(query, key, value, dtype=dtype)\n dtype = query.dtype\n assert key.ndim == query.ndim == value.ndim, 'q, k, v must have same rank.'\n assert (\n query.shape[:-3] == key.shape[:-3] == value.shape[:-3]\n ), 'q, k, v batch dims must match.'\n assert (\n query.shape[-2] == key.shape[-2] == value.shape[-2]\n ), 'q, k, v num_heads must match.'\n assert key.shape[-3] == value.shape[-3], 'k, v lengths must match.'\n\n # compute attention weights\n attn_weights = dot_product_attention_weights(\n query,\n key,\n bias,\n mask,\n broadcast_dropout,\n dropout_rng,\n dropout_rate,\n deterministic,\n dtype,\n precision,\n module,\n force_fp32_for_softmax,\n einsum_dot_general=einsum_dot_general,\n einsum=qk_attn_weights_einsum,\n )\n if not attn_weights_value_einsum:\n attn_weights_value_einsum = functools.partial(\n jnp.einsum,\n precision=precision,\n _dot_general=einsum_dot_general\n if einsum_dot_general\n else jax.lax.dot_general,\n )\n # return weighted sum over values for each query position\n return attn_weights_value_einsum(\n '...hqk,...khd->...qhd',\n attn_weights,\n value,\n )\n\n\nclass MultiHeadDotProductAttention(Module):\n """"""Multi-head dot-product attention.\n\n Example usage::\n\n >>> import flax.linen as nn\n >>> import jax\n\n >>> layer = nn.MultiHeadDotProductAttention(num_heads=8, qkv_features=16)\n >>> key1, key2, key3, key4, key5, key6 = jax.random.split(jax.random.key(0), 6)\n >>> shape = (4, 3, 2, 5)\n >>> q, k, v = jax.random.uniform(key1, shape), jax.random.uniform(key2, shape), jax.random.uniform(key3, shape)\n >>> variables = layer.init(jax.random.key(0), q)\n\n >>> # different inputs for inputs_q, inputs_k and inputs_v\n >>> out = layer.apply(variables, q, k, v)\n >>> # equivalent to layer.apply(variables, inputs_q=q, inputs_k=k, inputs_v=k)\n >>> out = layer.apply(variables, q, k)\n >>> # equivalent to layer.apply(variables, inputs_q=q, inputs_k=q) and layer.apply(variables, inputs_q=q, inputs_k=q, inputs_v=q)\n >>> out = layer.apply(variables, q)\n\n >>> attention_kwargs = dict(\n ... num_heads=8,\n ... qkv_features=16,\n ... kernel_init=nn.initializers.ones,\n ... bias_init=nn.initializers.zeros,\n ... dropout_rate=0.5,\n ... deterministic=False,\n ... )\n >>> class Module(nn.Module):\n ... attention_kwargs: dict\n ...\n ... @nn.compact\n ... def __call__(self, x, dropout_rng=None):\n ... out1 = nn.MultiHeadDotProductAttention(**self.attention_kwargs)(x, dropout_rng=dropout_rng)\n ... out2 = nn.MultiHeadDotProductAttention(**self.attention_kwargs)(x, dropout_rng=dropout_rng)\n ... return out1, out2\n >>> module = Module(attention_kwargs)\n >>> variables = module.init({'params': key1, 'dropout': key2}, q)\n\n >>> # out1 and out2 are different.\n >>> out1, out2 = module.apply(variables, q, rngs={'dropout': key3})\n >>> # out3 and out4 are different.\n >>> # out1 and out3 are different. out2 and out4 are different.\n >>> out3, out4 = module.apply(variables, q, rngs={'dropout': key4})\n >>> # out1 and out2 are the same.\n >>> out1, out2 = module.apply(variables, q, dropout_rng=key5)\n >>> # out1 and out2 are the same as out3 and out4.\n >>> # providing a `dropout_rng` arg will take precedence over the `rngs` arg in `.apply`\n >>> out3, out4 = module.apply(variables, q, rngs={'dropout': key6}, dropout_rng=key5)\n\n Attributes:\n num_heads: Number of attention heads. Features (i.e. inputs_q.shape[-1])\n should be divisible by the number of heads.\n dtype: The dtype of the computation (default: infer from inputs and params)\n param_dtype: The dtype passed to parameter initializers (default: float32)\n qkv_features: Dimension of the key, query, and value.\n out_features: Dimension of the last projection\n broadcast_dropout: Use a broadcasted dropout along batch dims.\n dropout_rate: Dropout rate.\n deterministic: If False, the attention weight is masked randomly using\n dropout, whereas if True, the attention weights are deterministic.\n precision: Numerical precision of the computation see ``jax.lax.Precision``\n for details.\n kernel_init: Initializer for the kernel of the Dense layers.\n out_kernel_init: Optional Initializer for the kernel of the output Dense layer,\n if None, ``kernel_init`` will be used.\n bias_init: Initializer for the bias of the Dense layers.\n out_bias_init: Optional Initializer for the bias of the output Dense layer,\n if None, ``bias_init`` will be used.\n use_bias: Whether pointwise QKVO dense transforms use bias.\n attention_fn: dot_product_attention or compatible function. Accepts query,\n key, value, and returns output of shape ``[bs, dim1, dim2, ..., dimN,,\n num_heads, value_channels]``\n decode: Whether to prepare and use an autoregressive cache.\n normalize_qk: Should QK normalization be applied (arxiv.org/abs/2302.05442).\n qk_attn_weights_einsum_cls: factory function to create the einsum for\n computing the attention weights.\n attn_weights_value_einsum_cls: factory function to create the einsum for\n computing the product of the attention weights and the values.\n """"""\n\n num_heads: int\n dtype: Dtype | None = None\n param_dtype: Dtype = jnp.float32\n qkv_features: int | None = None\n out_features: int | None = None\n broadcast_dropout: bool = True\n dropout_rate: float = 0.0\n deterministic: bool | None = None\n precision: PrecisionLike = None\n kernel_init: Initializer = default_kernel_init\n out_kernel_init: Initializer | None = None\n bias_init: Initializer = initializers.zeros_init()\n out_bias_init: Initializer | None = None\n use_bias: bool = True\n attention_fn: Callable[..., Array] = dot_product_attention\n decode: bool = False\n normalize_qk: bool = False\n force_fp32_for_softmax: bool = False\n # Deprecated, will be removed.\n qkv_dot_general: DotGeneralT | None = None\n out_dot_general: DotGeneralT | None = None\n qkv_dot_general_cls: Any = None\n out_dot_general_cls: Any = None\n qk_attn_weights_einsum_cls: Callable[..., Callable[..., Array]] | None = None\n attn_weights_value_einsum_cls: Callable[..., Callable[..., Array]] | None = (\n None\n )\n\n @overload\n def __call__(\n self,\n inputs_q: Array,\n inputs_k: Array | None = None,\n inputs_v: Array | None = None,\n *,\n mask: Array | None = None,\n deterministic: bool | None = None,\n dropout_rng: PRNGKey | None = None,\n sow_weights: bool = False,\n ):\n ...\n\n @overload\n def __call__(\n self,\n inputs_q: Array,\n *,\n inputs_kv: Array | None = None,\n mask: Array | None = None,\n deterministic: bool | None = None,\n dropout_rng: PRNGKey | None = None,\n sow_weights: bool = False,\n ):\n ...\n\n @compact\n def __call__(\n self,\n inputs_q: Array,\n inputs_k: Array | None = None,\n inputs_v: Array | None = None,\n *,\n inputs_kv: Array | None = None,\n mask: Array | None = None,\n deterministic: bool | None = None,\n dropout_rng: PRNGKey | None = None,\n sow_weights: bool = False,\n ):\n """"""Applies multi-head dot product attention on the input data.\n\n Projects the inputs into multi-headed query, key, and value vectors,\n applies dot-product attention and project the results to an output vector.\n\n If both inputs_k and inputs_v are None, they will both copy the value of\n inputs_q (self attention).\n If only inputs_v is None, it will copy the value of inputs_k.\n\n Args:\n inputs_q: input queries of shape ``[batch_sizes..., length, features]``.\n inputs_k: key of shape ``[batch_sizes..., length, features]``. If None,\n inputs_k will copy the value of inputs_q.\n inputs_v: values of shape ``[batch_sizes..., length, features]``. If None,\n inputs_v will copy the value of inputs_k.\n inputs_kv: key/values of shape ``[batch_sizes..., length, features]``. If\n None, inputs_kv will copy the value of inputs_q. This arg will be\n deprecated soon. Use inputs_k and inputs_v instead.\n mask: attention mask of shape ``[batch_sizes..., num_heads, query_length,\n key/value_length]``. Attention weights are masked out if their\n corresponding mask value is ``False``.\n deterministic: if false, the attention weight is masked randomly using\n dropout, whereas if true, the attention weights are deterministic.\n dropout_rng: optional rng key to pass to the attention layer's dropout\n mask. Otherwise, self.make_rng('dropout') is used instead.\n sow_weights: if ``True``, the attention weights are sowed into the\n 'intermediates' collection. Remember to mark 'intermediates' as\n mutable via ``mutable=['intermediates']`` in order to have that\n collection returned.\n\n Returns:\n output of shape ``[batch_sizes..., length, features]``.\n """"""\n if inputs_kv is not None:\n if inputs_k is not None or inputs_v is not None:\n raise ValueError(\n 'If either `inputs_k` or `inputs_v` is not None, '\n '`inputs_kv` must be None. If `inputs_kv` is not None, both `inputs_k` '\n 'and `inputs_v` must be None. We recommend using `inputs_k` and '\n '`inputs_v` args, since `inputs_kv` will be deprecated soon. See '\n 'https://github.com/google/flax/discussions/3389 for more '\n 'information.'\n )\n inputs_k = inputs_v = inputs_kv\n warnings.warn(\n 'The inputs_kv arg will be deprecated soon. '\n 'Use inputs_k and inputs_v instead. See '\n 'https://github.com/google/flax/discussions/3389 '\n 'for more information.',\n DeprecationWarning,\n )\n else:\n if inputs_k is None:\n if inputs_v is not None:\n raise ValueError(\n '`inputs_k` cannot be None if `inputs_v` is not None. '\n 'To have both `inputs_k` and `inputs_v` be the same value, pass in the '\n 'value to `inputs_k` and leave `inputs_v` as None.'\n )\n inputs_k = inputs_q\n if inputs_v is None:\n inputs_v = inputs_k\n elif inputs_v.shape[-1] == inputs_v.shape[-2]:\n warnings.warn(\n f'You are passing an array of shape {inputs_v.shape} '\n 'to the `inputs_v` arg, when you may have intended '\n 'to pass it to the `mask` arg. As of Flax version '\n '0.7.4, the function signature of '\n ""MultiHeadDotProductAttention's `__call__` method ""\n 'has changed to `__call__(inputs_q, inputs_k=None, '\n 'inputs_v=None, *, inputs_kv=None, mask=None, '\n 'deterministic=None)`. Use the kwarg `mask` instead. '\n 'See https://github.com/google/flax/discussions/3389 '\n 'and read the docstring for more information.',\n DeprecationWarning,\n )\n\n features = self.out_features or inputs_q.shape[-1]\n qkv_features = self.qkv_features or inputs_q.shape[-1]\n assert qkv_features % self.num_heads == 0, (\n f'Memory dimension ({qkv_features}) must be divisible by number of'\n f' heads ({self.num_heads}).'\n )\n head_dim = qkv_features // self.num_heads\n\n dense = functools.partial(\n DenseGeneral,\n axis=-1,\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n features=(self.num_heads, head_dim),\n kernel_init=self.kernel_init,\n bias_init=self.bias_init,\n use_bias=self.use_bias,\n precision=self.precision,\n dot_general=self.qkv_dot_general,\n dot_general_cls=self.qkv_dot_general_cls,\n )\n # project inputs_q to multi-headed q/k/v\n # dimensions are then [batch..., length, n_heads, n_features_per_head]\n query, key, value = (\n dense(name='query')(inputs_q),\n dense(name='key')(inputs_k),\n dense(name='value')(inputs_v),\n )\n\n if self.normalize_qk:\n # Normalizing query and key projections stabilizes training with higher\n # LR. See ViT-22B paper http://arxiv.org/abs/2302.05442 for analysis.\n query = LayerNorm(\n name='query_ln',\n use_bias=False,\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n )(query) # type: ignore[call-arg]\n key = LayerNorm(\n name='key_ln',\n use_bias=False,\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n )(key) # type: ignore[call-arg]\n\n # During fast autoregressive decoding, we feed one position at a time,\n # and cache the keys and values step by step.\n if self.decode:\n # detect if we're initializing by absence of existing cache data.\n is_initialized = self.has_variable('cache', 'cached_key')\n cached_key = self.variable(\n 'cache', 'cached_key', jnp.zeros, key.shape, key.dtype\n )\n cached_value = self.variable(\n 'cache', 'cached_value', jnp.zeros, value.shape, value.dtype\n )\n cache_index = self.variable(\n 'cache', 'cache_index', lambda: jnp.array(0, dtype=jnp.int32)\n )\n if is_initialized:\n (\n *batch_dims,\n max_length,\n num_heads,\n depth_per_head,\n ) = cached_key.value.shape\n # shape check of cached keys against query input\n expected_shape = tuple(batch_dims) + (1, num_heads, depth_per_head)\n if expected_shape != query.shape:\n raise ValueError(\n 'Autoregressive cache shape error, '\n 'expected query shape %s instead got %s.'\n % (expected_shape, query.shape)\n )\n # update key, value caches with our new 1d spatial slices\n cur_index = cache_index.value\n zero = jnp.array(0, dtype=lax.dtype(cur_index.dtype))\n indices: tuple[int | jax.Array, ...] = (zero,) * len(\n batch_dims\n ) + (\n cur_index,\n zero,\n zero,\n )\n key = lax.dynamic_update_slice(cached_key.value, key, indices)\n value = lax.dynamic_update_slice(cached_value.value, value, indices)\n cached_key.value = key\n cached_value.value = value\n cache_index.value = cache_index.value + 1\n # causal mask for cached decoder self-attention:\n # our single query position should only attend to those key\n # positions that have already been generated and cached,\n # not the remaining zero elements.\n mask = combine_masks(\n mask,\n jnp.broadcast_to(\n jnp.arange(max_length) <= cur_index,\n tuple(batch_dims) + (1, 1, max_length),\n ),\n )\n\n if (\n self.dropout_rate > 0.0\n ): # Require `deterministic` only if using dropout.\n m_deterministic = merge_param(\n 'deterministic', self.deterministic, deterministic\n )\n if not m_deterministic and dropout_rng is None:\n dropout_rng = self.make_rng('dropout')\n else:\n m_deterministic = True\n\n # `qk_attn_weights_einsum` and `attn_weights_value_einsum` are optional\n # arguments that can be used to override the default `jnp.einsum`. They\n # exist for quantized einsum support in AQT.\n qk_attn_weights_einsum = (\n self.qk_attn_weights_einsum_cls()\n if self.qk_attn_weights_einsum_cls\n else None\n )\n attn_weights_value_einsum = (\n self.attn_weights_value_einsum_cls()\n if self.attn_weights_value_einsum_cls\n else None\n )\n # apply attention\n attn_args = (query, key, value)\n # This kwargs list match the default nn.dot_product_attention.\n # For custom `attention_fn`s, invalid kwargs will be filtered.\n attn_kwargs = dict(\n mask=mask,\n dropout_rng=dropout_rng,\n dropout_rate=self.dropout_rate,\n broadcast_dropout=self.broadcast_dropout,\n deterministic=m_deterministic,\n dtype=self.dtype,\n precision=self.precision,\n force_fp32_for_softmax=self.force_fp32_for_softmax,\n qk_attn_weights_einsum=qk_attn_weights_einsum,\n attn_weights_value_einsum=attn_weights_value_einsum,\n )\n attn_kwargs = {\n k: v\n for k, v in attn_kwargs.items()\n if k in inspect.signature(self.attention_fn).parameters\n }\n if sow_weights:\n x = self.attention_fn(*attn_args, **attn_kwargs, module=self)\n else:\n x = self.attention_fn(*attn_args, **attn_kwargs)\n # back to the original inputs dimensions\n out = DenseGeneral(\n features=features,\n axis=(-2, -1),\n kernel_init=self.out_kernel_init or self.kernel_init,\n bias_init=self.out_bias_init or self.bias_init,\n use_bias=self.use_bias,\n dtype=self.dtype,\n param_dtype=self.param_dtype,\n precision=self.precision,\n dot_general=self.out_dot_general,\n dot_general_cls=self.out_dot_general_cls,\n name='out', # type: ignore[call-arg]\n )(x)\n return out\n\n\nclass MultiHeadAttention(MultiHeadDotProductAttention):\n """"""Multi-head dot-product attention.\n Alias for ``MultiHeadDotProductAttention``.\n\n **NOTE**: ``MultiHeadAttention`` is a wrapper of ``MultiHeadDotProductAttention``,\n and so their implementations are identical. However ``MultiHeadAttention`` layers\n will, by default, be named ``MultiHeadAttention_{index}``, whereas ``MultiHeadDotProductAttention``\n will be named ``MultiHeadDotProductAttention_{index}``. Therefore, this could affect\n checkpointing, param collection names and RNG threading (since the layer name is\n used when generating new RNG's) within the module.\n\n Example usage::\n\n >>> import flax.linen as nn\n >>> import jax\n\n >>> layer = nn.MultiHeadAttention(num_heads=8, qkv_features=16)\n >>> key1, key2, key3, key4, key5, key6 = jax.random.split(jax.random.key(0), 6)\n >>> shape = (4, 3, 2, 5)\n >>> q, k, v = jax.random.uniform(key1, shape), jax.random.uniform(key2, shape), jax.random.uniform(key3, shape)\n >>> variables = layer.init(jax.random.key(0), q)\n\n >>> # different inputs for inputs_q, inputs_k and inputs_v\n >>> out = layer.apply(variables, q, k, v)\n >>> # equivalent to layer.apply(variables, inputs_q=q, inputs_k=k, inputs_v=k)\n >>> out = layer.apply(variables, q, k)\n >>> # equivalent to layer.apply(variables, inputs_q=q, inputs_k=q) and layer.apply(variables, inputs_q=q, inputs_k=q, inputs_v=q)\n >>> out = layer.apply(variables, q)\n\n >>> attention_kwargs = dict(\n ... num_heads=8,\n ... qkv_features=16,\n ... kernel_init=nn.initializers.ones,\n ... bias_init=nn.initializers.zeros,\n ... dropout_rate=0.5,\n ... deterministic=False,\n ... )\n >>> class Module(nn.Module):\n ... attention_kwargs: dict\n ...\n ... @nn.compact\n ... def __call__(self, x, dropout_rng=None):\n ... out1 = nn.MultiHeadAttention(**self.attention_kwargs)(x, dropout_rng=dropout_rng)\n ... out2 = nn.MultiHeadAttention(**self.attention_kwargs)(x, dropout_rng=dropout_rng)\n ... return out1, out2\n >>> module = Module(attention_kwargs)\n >>> variables = module.init({'params': key1, 'dropout': key2}, q)\n\n >>> # out1 and out2 are different.\n >>> out1, out2 = module.apply(variables, q, rngs={'dropout': key3})\n >>> # out3 and out4 are different.\n >>> # out1 and out3 are different. out2 and out4 are different.\n >>> out3, out4 = module.apply(variables, q, rngs={'dropout': key4})\n >>> # out1 and out2 are the same.\n >>> out1, out2 = module.apply(variables, q, dropout_rng=key5)\n >>> # out1 and out2 are the same as out3 and out4.\n >>> # providing a `dropout_rng` arg will take precedence over the `rngs` arg in `.apply`\n >>> out3, out4 = module.apply(variables, q, rngs={'dropout': key6}, dropout_rng=key5)\n\n Attributes:\n num_heads: number of attention heads. Features (i.e. inputs_q.shape[-1])\n should be divisible by the number of heads.\n dtype: the dtype of the computation (default: infer from inputs and params)\n param_dtype: the dtype passed to parameter initializers (default: float32)\n qkv_features: dimension of the key, query, and value.\n out_features: dimension of the last projection\n broadcast_dropout: bool: use a broadcasted dropout along batch dims.\n dropout_rate: dropout rate\n deterministic: if false, the attention weight is masked randomly using\n dropout, whereas if true, the attention weights are deterministic.\n precision: numerical precision of the computation see ``jax.lax.Precision``\n for details.\n kernel_init: initializer for the kernel of the Dense layers.\n bias_init: initializer for the bias of the Dense layers.\n use_bias: bool: whether pointwise QKVO dense transforms use bias.\n attention_fn: dot_product_attention or compatible function. Accepts query,\n key, value, and returns output of shape ``[bs, dim1, dim2, ..., dimN,,\n num_heads, value_channels]``\n decode: whether to prepare and use an autoregressive cache.\n normalize_qk: should QK normalization be applied (arxiv.org/abs/2302.05442).\n """"""\n\n\nclass SelfAttention(MultiHeadDotProductAttention):\n """"""Self-attention special case of multi-head dot-product attention.\n This layer is deprecated in favor of ``MultiHeadDotProductAttention``.\n\n Example usage::\n >>> import flax.linen as nn\n >>> import jax, jax.numpy as jnp\n >>> layer = nn.MultiHeadDotProductAttention(num_heads=8, qkv_features=16)\n >>> variables = layer.init(jax.random.key(0), jnp.ones((4, 3, 2, 5)))\n """"""\n\n @compact\n def __call__( # type: ignore\n self,\n inputs_q: Array,\n mask: Array | None = None,\n deterministic: bool | None = None,\n dropout_rng: PRNGKey | None = None,\n sow_weights: bool = False,\n ):\n """"""Applies multi-head dot product self-attention on the input data.\n\n Projects the inputs into multi-headed query, key, and value vectors,\n applies dot-product attention and project the results to an output vector.\n\n Args:\n inputs_q: input queries of shape ``[batch_sizes..., length, features]``.\n mask: attention mask of shape ``[batch_sizes..., num_heads, query_length,\n key/value_length]``. Attention weights are masked out if their\n corresponding mask value is ``False``.\n deterministic: if false, the attention weight is masked randomly using\n dropout, whereas if true, the attention weights are deterministic.\n\n Returns:\n output of shape ``[batch_sizes..., length, features]``.\n """"""\n warnings.warn(\n 'SelfAttention will be deprecated soon. Use '\n '`MultiHeadDotProductAttention.__call__(inputs_q)` instead. '\n 'See https://github.com/google/flax/discussions/3389 '\n 'for more information.',\n DeprecationWarning,\n )\n return super().__call__(\n inputs_q,\n mask=mask,\n deterministic=deterministic,\n dropout_rng=dropout_rng,\n sow_weights=sow_weights,\n )\n\n\n# mask-making utility functions\n\n\ndef make_attention_mask(\n query_input: Array,\n key_input: Array,\n pairwise_fn: Callable[..., Any] = jnp.multiply,\n extra_batch_dims: int = 0,\n dtype: Dtype = jnp.float32,\n):\n """"""Mask-making helper for attention weights.\n\n In case of 1d inputs (i.e., ``[batch..., len_q]``, ``[batch..., len_kv]``, the\n attention weights will be ``[batch..., heads, len_q, len_kv]`` and this\n function will produce ``[batch..., 1, len_q, len_kv]``.\n\n Args:\n query_input: a batched, flat input of query_length size\n key_input: a batched, flat input of key_length size\n pairwise_fn: broadcasting elementwise comparison function\n extra_batch_dims: number of extra batch dims to add singleton axes for, none\n by default\n dtype: mask return dtype\n\n Returns:\n A ``[batch..., 1, len_q, len_kv]`` shaped mask for 1d attention.\n """"""\n mask = pairwise_fn(\n jnp.expand_dims(query_input, axis=-1), jnp.expand_dims(key_input, axis=-2)\n )\n mask = jnp.expand_dims(mask, axis=-3)\n mask = jnp.expand_dims(mask, axis=tuple(range(extra_batch_dims)))\n return mask.astype(dtype)\n\n\ndef make_causal_mask(\n x: Array, extra_batch_dims: int = 0, dtype: Dtype = jnp.float32\n) -> Array:\n """"""Make a causal mask for self-attention.\n\n In case of 1d inputs (i.e., ``[batch..., len]``, the self-attention weights\n will be ``[batch..., heads, len, len]`` and this function will produce a\n causal mask of shape ``[batch..., 1, len, len]``.\n\n Args:\n x: input array of shape ``[batch..., len]``\n extra_batch_dims: number of batch dims to add singleton axes for, none by\n default\n dtype: mask return dtype\n\n Returns:\n A ``[batch..., 1, len, len]`` shaped causal mask for 1d attention.\n """"""\n idxs = jnp.broadcast_to(jnp.arange(x.shape[-1], dtype=jnp.int32), x.shape)\n return make_attention_mask(\n idxs,\n idxs,\n jnp.greater_equal,\n extra_batch_dims=extra_batch_dims,\n dtype=dtype,\n )\n\n\ndef combine_masks(\n *masks: Array | None, dtype: Dtype = jnp.float32\n) -> Array | None:\n """"""Combine attention masks.\n\n Args:\n *masks: set of attention mask arguments to combine, some can be None.\n dtype: dtype for the returned mask.\n\n Returns:\n Combined mask, reduced by logical and, returns None if no masks given.\n """"""\n masks_list = [m for m in masks if m is not None]\n if not masks_list:\n return None\n assert all(\n map(lambda x: x.ndim == masks_list[0].ndim, masks_list)\n ), f'masks must have same rank: {tuple(map(lambda x: x.ndim, masks_list))}'\n mask, *other_masks = masks_list\n for other_mask in other_masks:\n mask = jnp.logical_and(mask, other_mask)\n return mask.astype(dtype)\n",python,tab +295,2149700,".venv/lib/python3.10/site-packages/flax/linen/attention.py",27284,0,"",python,selection_mouse +296,2152086,".venv/lib/python3.10/site-packages/flax/linen/attention.py",14408,0,"",python,selection_mouse +297,2153019,".venv/lib/python3.10/site-packages/flax/linen/attention.py",14228,0,"",python,selection_mouse +298,2158842,".venv/lib/python3.10/site-packages/flax/linen/attention.py",15461,0,"",python,selection_mouse +299,2158968,".venv/lib/python3.10/site-packages/flax/linen/attention.py",15456,26,"qk_attn_weights_einsum_cls",python,selection_mouse +300,2167602,".venv/lib/python3.10/site-packages/flax/linen/attention.py",16099,0,"",python,selection_mouse +301,2171913,".venv/lib/python3.10/site-packages/flax/linen/attention.py",16223,0,"",python,selection_mouse +302,2172476,".venv/lib/python3.10/site-packages/flax/linen/attention.py",16333,0,"",python,selection_mouse +303,2173170,".venv/lib/python3.10/site-packages/flax/linen/attention.py",16456,0,"",python,selection_mouse +304,2176065,".venv/lib/python3.10/site-packages/flax/linen/attention.py",17617,0,"",python,selection_command +305,2177176,".venv/lib/python3.10/site-packages/flax/linen/attention.py",17699,0,"",python,selection_command +306,2178288,".venv/lib/python3.10/site-packages/flax/linen/attention.py",18613,0,"",python,selection_command +307,2180247,".venv/lib/python3.10/site-packages/flax/linen/attention.py",20920,0,"",python,selection_command +308,2181715,".venv/lib/python3.10/site-packages/flax/linen/attention.py",21465,0,"",python,selection_command +309,2182372,".venv/lib/python3.10/site-packages/flax/linen/attention.py",21562,0,"",python,selection_command +310,2183080,".venv/lib/python3.10/site-packages/flax/linen/attention.py",21465,0,"",python,selection_command +311,2183284,".venv/lib/python3.10/site-packages/flax/linen/attention.py",20920,0,"",python,selection_command +312,2183505,".venv/lib/python3.10/site-packages/flax/linen/attention.py",18613,0,"",python,selection_command +313,2183702,".venv/lib/python3.10/site-packages/flax/linen/attention.py",17699,0,"",python,selection_command +314,2184517,".venv/lib/python3.10/site-packages/flax/linen/attention.py",18613,0,"",python,selection_command +315,2190365,".venv/lib/python3.10/site-packages/flax/linen/attention.py",16389,0,"",python,selection_mouse +316,2191891,".venv/lib/python3.10/site-packages/flax/linen/attention.py",15726,0,"",python,selection_mouse +317,2192691,".venv/lib/python3.10/site-packages/flax/linen/attention.py",15724,0,"",python,selection_command +318,2195618,"utils/nn.py",0,0,"",python,tab +319,2335928,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes_dev.sbatch",0,0,"",shellscript,tab +320,2345354,"slurm/jobs/mihir/horeka/modelsize_scaling/dynamics/3_train_dyn_180M.sbatch",0,0,"",shellscript,tab +321,2346111,"slurm/jobs/mihir/horeka/modelsize_scaling/dynamics/3_train_dyn_180M.sbatch",1718,0,"",shellscript,selection_mouse +322,2346113,"slurm/jobs/mihir/horeka/modelsize_scaling/dynamics/3_train_dyn_180M.sbatch",1717,0,"",shellscript,selection_command +323,2346280,"slurm/jobs/mihir/horeka/modelsize_scaling/dynamics/3_train_dyn_180M.sbatch",1717,1,"6",shellscript,selection_mouse +324,2346287,"slurm/jobs/mihir/horeka/modelsize_scaling/dynamics/3_train_dyn_180M.sbatch",1718,0,"",shellscript,selection_command +325,2346307,"slurm/jobs/mihir/horeka/modelsize_scaling/dynamics/3_train_dyn_180M.sbatch",1718,1,"\n",shellscript,selection_mouse +326,2346328,"slurm/jobs/mihir/horeka/modelsize_scaling/dynamics/3_train_dyn_180M.sbatch",1714,4,"s=16",shellscript,selection_mouse +327,2346348,"slurm/jobs/mihir/horeka/modelsize_scaling/dynamics/3_train_dyn_180M.sbatch",1709,9,"_heads=16",shellscript,selection_mouse +328,2346362,"slurm/jobs/mihir/horeka/modelsize_scaling/dynamics/3_train_dyn_180M.sbatch",1706,12,"num_heads=16",shellscript,selection_mouse +329,2346394,"slurm/jobs/mihir/horeka/modelsize_scaling/dynamics/3_train_dyn_180M.sbatch",1676,42,"na_num_blocks=16 \\n --dyna_num_heads=16",shellscript,selection_mouse +330,2346410,"slurm/jobs/mihir/horeka/modelsize_scaling/dynamics/3_train_dyn_180M.sbatch",1673,45,"-dyna_num_blocks=16 \\n --dyna_num_heads=16",shellscript,selection_mouse +331,2346424,"slurm/jobs/mihir/horeka/modelsize_scaling/dynamics/3_train_dyn_180M.sbatch",1650,68,"--dyna_dim=1024 \\n --dyna_num_blocks=16 \\n --dyna_num_heads=16",shellscript,selection_mouse +332,2346440,"slurm/jobs/mihir/horeka/modelsize_scaling/dynamics/3_train_dyn_180M.sbatch",1649,69," --dyna_dim=1024 \\n --dyna_num_blocks=16 \\n --dyna_num_heads=16",shellscript,selection_mouse +333,2346455,"slurm/jobs/mihir/horeka/modelsize_scaling/dynamics/3_train_dyn_180M.sbatch",1648,70," --dyna_dim=1024 \\n --dyna_num_blocks=16 \\n --dyna_num_heads=16",shellscript,selection_mouse +334,2346526,"slurm/jobs/mihir/horeka/modelsize_scaling/dynamics/3_train_dyn_180M.sbatch",1647,71," --dyna_dim=1024 \\n --dyna_num_blocks=16 \\n --dyna_num_heads=16",shellscript,selection_mouse +335,2346594,"slurm/jobs/mihir/horeka/modelsize_scaling/dynamics/3_train_dyn_180M.sbatch",1646,72," --dyna_dim=1024 \\n --dyna_num_blocks=16 \\n --dyna_num_heads=16",shellscript,selection_mouse +336,2350969,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes_dev.sbatch",0,0,"",shellscript,tab +337,2351953,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes_dev.sbatch",1686,0,"",shellscript,selection_mouse +338,2352112,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes_dev.sbatch",1683,3,"=16",shellscript,selection_mouse +339,2352131,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes_dev.sbatch",1674,12,"num_heads=16",shellscript,selection_mouse +340,2352188,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes_dev.sbatch",1642,44,"dyna_num_blocks=16 \\n --dyna_num_heads=16",shellscript,selection_mouse +341,2352188,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes_dev.sbatch",1638,48," --dyna_num_blocks=16 \\n --dyna_num_heads=16",shellscript,selection_mouse +342,2352189,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes_dev.sbatch",1636,50," --dyna_num_blocks=16 \\n --dyna_num_heads=16",shellscript,selection_mouse +343,2352254,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes_dev.sbatch",1614,72," --dyna_dim=1024 \\n --dyna_num_blocks=16 \\n --dyna_num_heads=16",shellscript,selection_mouse +344,2352701,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes_dev.sbatch",1614,72,"",shellscript,content +345,2353377,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes_dev.sbatch",1614,0," --dyna_dim=1024 \\n --dyna_num_blocks=16 \\n --dyna_num_heads=16",shellscript,content +346,2354758,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes_dev.sbatch",1686,0,"\n ",shellscript,content +347,2355289,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes_dev.sbatch",1687,4,"",shellscript,content +348,2355631,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes_dev.sbatch",1686,1,"",shellscript,content +349,2356282,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes_dev.sbatch",1686,0," ",shellscript,content +350,2356283,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes_dev.sbatch",1687,0,"",shellscript,selection_keyboard +351,2356572,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes_dev.sbatch",1687,0,"\",shellscript,content +352,2356572,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes_dev.sbatch",1688,0,"",shellscript,selection_keyboard +353,2356753,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes_dev.sbatch",1688,0,"\n ",shellscript,content +354,2357163,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes_dev.sbatch",1693,0,"-",shellscript,content +355,2357164,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes_dev.sbatch",1694,0,"",shellscript,selection_keyboard +356,2357299,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes_dev.sbatch",1694,0,"-",shellscript,content +357,2357300,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes_dev.sbatch",1695,0,"",shellscript,selection_keyboard +358,2359550,"train_dynamics.py",0,0,"",python,tab +359,2364518,"train_dynamics.py",1436,0,"",python,selection_mouse +360,2367053,"train_dynamics.py",1750,0,"",python,selection_mouse +361,2367214,"train_dynamics.py",1747,12,"dyna_ffn_dim",python,selection_mouse +362,2370863,"models/tokenizer.py",0,0,"from typing import Dict, Any, Tuple\n\nimport flax.linen as nn\nimport jax.numpy as jnp\n\nfrom utils.preprocess import patchify, unpatchify\nfrom utils.nn import STTransformer, VectorQuantizer\n\n\nclass TokenizerVQVAE(nn.Module):\n """"""ST-ViVit VQ-VAE""""""\n\n in_dim: int\n model_dim: int\n ffn_dim: int\n latent_dim: int\n num_latents: int\n patch_size: int\n num_blocks: int\n num_heads: int\n dropout: float\n codebook_dropout: float\n param_dtype: jnp.dtype\n dtype: jnp.dtype\n use_flash_attention: bool\n\n def setup(self):\n self.encoder = STTransformer(\n self.model_dim,\n self.ffn_dim,\n self.latent_dim,\n self.num_blocks,\n self.num_heads,\n self.dropout,\n self.param_dtype,\n self.dtype,\n use_flash_attention=self.use_flash_attention,\n )\n self.vq = VectorQuantizer(\n self.latent_dim,\n self.num_latents,\n self.codebook_dropout,\n )\n self.out_dim = self.in_dim * self.patch_size**2\n self.decoder = STTransformer(\n self.model_dim,\n self.ffn_dim,\n self.out_dim,\n self.num_blocks,\n self.num_heads,\n self.dropout,\n self.param_dtype,\n self.dtype,\n use_flash_attention=self.use_flash_attention,\n )\n\n def __call__(self, batch: Dict[str, Any], training: bool = True) -> Dict[str, Any]:\n H, W = batch[""videos""].shape[2:4]\n outputs = self.vq_encode(batch[""videos""], training)\n recon = self.decoder(outputs[""z_q""]) # (B, T, H_down * W_down, C)\n recon = recon.astype(jnp.float32)\n recon = nn.sigmoid(recon)\n recon = recon.astype(self.dtype)\n outputs[""recon""] = unpatchify(recon, self.patch_size, H, W)\n return outputs\n\n def vq_encode(self, videos: Any, training: bool = True) -> Dict[str, Any]:\n # --- Preprocess + encode ---\n B, T = videos.shape[:2]\n x = patchify(videos, self.patch_size)\n N = x.shape[2]\n x = self.encoder(x) # (B, T, N, E)\n\n # --- Vector quantize ---\n x = x.reshape(B * T * N, self.latent_dim)\n z_q, z, emb, indices = self.vq(x, training)\n z_q = z_q.reshape(B, T, N, self.latent_dim)\n indices = indices.reshape(B, T, N)\n return dict(z_q=z_q, z=z, emb=emb, indices=indices)\n\n def decode(self, indices: Any, video_hw: Tuple[int, int]):\n z = self.vq.codebook[indices]\n recon = self.decoder(z)\n recon = recon.astype(jnp.float32)\n recon = nn.sigmoid(recon)\n recon = recon.astype(self.dtype)\n return unpatchify(recon, self.patch_size, *video_hw)\n",python,tab +363,2372167,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes_dev.sbatch",0,0,"",shellscript,tab +364,2372748,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes_dev.sbatch",1695,0,"dyna_ffn_dim",shellscript,content +365,2373876,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes_dev.sbatch",1707,0,"=",shellscript,content +366,2373878,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes_dev.sbatch",1708,0,"",shellscript,selection_keyboard +367,2375072,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes_dev.sbatch",1630,0,"",shellscript,selection_mouse +368,2375187,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes_dev.sbatch",1629,4,"1024",shellscript,selection_mouse +369,2376856,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes_dev.sbatch",1629,4,"4",shellscript,content +370,2376857,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes_dev.sbatch",1630,0,"",shellscript,selection_keyboard +371,2376914,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes_dev.sbatch",1630,0,"0",shellscript,content +372,2376915,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes_dev.sbatch",1631,0,"",shellscript,selection_keyboard +373,2377976,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes_dev.sbatch",1631,0,"4",shellscript,content +374,2377977,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes_dev.sbatch",1632,0,"",shellscript,selection_keyboard +375,2377977,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes_dev.sbatch",1632,0,"8",shellscript,content +376,2377977,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes_dev.sbatch",1633,0,"",shellscript,selection_keyboard +377,2379040,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes_dev.sbatch",1631,2,"",shellscript,content +378,2379472,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes_dev.sbatch",1629,2,"1024",shellscript,content +379,2380969,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes_dev.sbatch",1708,0,"",shellscript,selection_mouse +380,2387584,"TERMINAL",0,0,"bash",,terminal_focus +381,2388700,"TERMINAL",0,0,"python",,terminal_command +382,2388736,"TERMINAL",0,0,"]633;E;2025-07-23 17:57:11 python;3d19b9bf-55ca-4a1a-89a2-d3473be0b612]633;C",,terminal_output +383,2389059,"TERMINAL",0,0,"Python 3.10.18 (main, Jun 4 2025, 17:36:27) [Clang 20.1.4 ] on linux\r\nType ""help"", ""copyright"", ""credits"" or ""license"" for more information.\r\n",,terminal_output +384,2389575,"TERMINAL",0,0,">>> ",,terminal_output +385,2390490,"TERMINAL",0,0,"[?25l10[?25h[?25l0[?25h",,terminal_output +386,2391892,"TERMINAL",0,0,"[?25l2[?25h",,terminal_output +387,2392056,"TERMINAL",0,0,"[?25l4[?25h",,terminal_output +388,2392317,"TERMINAL",0,0,"[?25l*[?25h",,terminal_output +389,2392581,"TERMINAL",0,0,"[?25l4[?25h",,terminal_output +390,2392985,"TERMINAL",0,0,"\r\n4096\r\n>>> ",,terminal_output +391,2394690,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes_dev.sbatch",1709,0,"",shellscript,selection_mouse +392,2395147,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes_dev.sbatch",1708,0,"",shellscript,selection_mouse +393,2395931,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes_dev.sbatch",1708,0,"4",shellscript,content +394,2395932,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes_dev.sbatch",1709,0,"",shellscript,selection_keyboard +395,2396519,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes_dev.sbatch",1709,0,"0",shellscript,content +396,2396520,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes_dev.sbatch",1710,0,"",shellscript,selection_keyboard +397,2396860,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes_dev.sbatch",1710,0,"9",shellscript,content +398,2396861,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes_dev.sbatch",1711,0,"",shellscript,selection_keyboard +399,2397231,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes_dev.sbatch",1711,0,"4",shellscript,content +400,2397232,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes_dev.sbatch",1712,0,"",shellscript,selection_keyboard +401,2398659,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes_dev.sbatch",1711,1,"",shellscript,content +402,2399152,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes_dev.sbatch",1711,0,"6",shellscript,content +403,2399153,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes_dev.sbatch",1712,0,"",shellscript,selection_keyboard +404,2400459,"TERMINAL",0,0,"^D\r\n]0;tum_cte0515@hkn1990:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/maskgit-maskprob-fix/interactive]633;D;0",,terminal_output +405,2405707,"TERMINAL",0,0,"srun",,terminal_focus +406,2406902,"TERMINAL",0,0,"\r\n[?2004l\r#!/usr/bin/env bash\r\n\r\n#SBATCH --nodes=1\r\n#SBATCH --ntasks-per-node=1\r\n#SBATCH --time=00:15:00\r\n#SBATCH --partition=dev_accelerated\r\n#SBATCH --cpus-per-task=5\r\n#SBATCH --gres=gpu:1\r\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/maskgit-maskprob-fix/dynamics-cotraining/%x_%j.log\r\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/maskgit-maskprob-fix/dynamics-cotraining/%x_%j.log\r\n#SBATCH --job-name=train_dynamics_maskprob_fix_8_node\r\n\r\n# Log the sbatch script\r\ncat $0\r\n\r\nmodule unload mpi/openmpi/5.0\r\nmodule unload devel/cuda/12.4\r\nsource .venv/bin/activate\r\n\r\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_new/open_ai_minecraft_arrayrecords_chunked\r\n\r\njob_name=$SLURM_JOB_NAME\r\nslurm_job_id=$SLURM_JOB_ID\r\n\r\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/maskgit-maskprob-fix/$job_name/$slurm_job_id\r\nmkdir -p $CHECKPOINT_DIR\r\n\r\n# tokenizer with the new structure supporting larger ffn_dim\r\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/checkpoints/train_tokenizer_lr_sweep_1e-4_larger_ffn/\r\n\r\nenv | grep SLURM\r\n\r\nsrun python train_dynamics.py \\r\n --save_ckpt \\r\n --ckpt_dir $CHECKPOINT_DIR \\r\n --batch_size=12 \\r\n --init_lr=0 \\r\n --max_lr=8e-5 \\r\n --log_image_interval=1000 \\r\n --log \\r\n --log_checkpoint_interval=10 \\r\n --name=dynamics-maskprob-fix-dev-$slurm_job_id \\r\n --tags dynamics maskprob-fix \\r\n --entity instant-uv \\r\n --project jafar \\r\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\r\n --data_dir $array_records_dir \\r\n --dyna_dim=1024 \\r\n --dyna_num_blocks=16 \\r\n --dyna_num_heads=16 \\r\n --dyna_ffn_dim=4096\r\n\r\n",,terminal_output +407,2407031,"TERMINAL",0,0,"SLURM_STEP_NUM_TASKS=1\r\nSLURM_JOB_USER=tum_cte0515\r\nSLURM_TASKS_PER_NODE=1\r\nSLURM_JOB_UID=999226\r\nSLURM_TASK_PID=1952006\r\nSLURM_JOB_GPUS=0\r\nSLURM_LOCALID=0\r\nSLURM_SUBMIT_DIR=/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar\r\nSLURMD_NODENAME=hkn0402\r\nSLURM_JOB_START_TIME=1753283209\r\nSLURM_STEP_NODELIST=hkn0402\r\nSLURM_CLUSTER_NAME=hk\r\nSLURM_JOB_END_TIME=1753286809\r\nSLURM_PMI2_SRUN_PORT=34677\r\nSLURM_CPUS_ON_NODE=6\r\nSLURM_JOB_CPUS_PER_NODE=6\r\nSLURM_GPUS_ON_NODE=1\r\nSLURM_GTIDS=0\r\nSLURM_JOB_PARTITION=dev_accelerated\r\nSLURM_TRES_PER_TASK=cpu=5\r\nSLURM_OOM_KILL_STEP=0\r\nSLURM_JOB_NUM_NODES=1\r\nSLURM_STEPID=4294967290\r\nSLURM_JOBID=3371005\r\nSLURM_PTY_PORT=45451\r\nSLURM_JOB_QOS=normal\r\nSLURM_LAUNCH_NODE_IPADDR=10.0.7.198\r\nSLURM_PTY_WIN_ROW=45\r\nSLURM_PMI2_PROC_MAPPING=(vector,(0,1,1))\r\nSLURMD_DEBUG=2\r\nSLURM_PROCID=0\r\nSLURM_CPUS_PER_TASK=5\r\nSLURM_NTASKS=1\r\nSLURM_TOPOLOGY_ADDR=hkibb.hkibbi1.hkibbi1e11.hkn0402\r\nSLURM_TOPOLOGY_ADDR_PATTERN=switch.switch.switch.node\r\nSLURM_SRUN_COMM_HOST=10.0.7.198\r\nSLURM_SCRIPT_CONTEXT=prolog_task\r\nSLURM_PTY_WIN_COL=252\r\nSLURM_NODELIST=hkn0402\r\nSLURM_SRUN_COMM_PORT=33207\r\nSLURM_STEP_ID=4294967290\r\nSLURM_JOB_ACCOUNT=hk-project-p0023960\r\nSLURM_PRIO_PROCESS=0\r\nSLURM_NPROCS=1\r\nSLURM_NNODES=1\r\nSLURM_SUBMIT_HOST=hkn1990.localdomain\r\nSLURM_JOB_ID=3371005\r\nSLURM_NODEID=0\r\nSLURM_STEP_NUM_NODES=1\r\nSLURM_STEP_TASKS_PER_NODE=1\r\nSLURM_MPI_TYPE=pmi2\r\nSLURM_PMI2_STEP_NODES=hkn0402\r\nSLURM_CONF=/etc/slurm/slurm.conf\r\nSLURM_JOB_NAME=interactive\r\nSLURM_NTASKS_PER_NODE=1\r\nSLURM_STEP_LAUNCHER_PORT=33207\r\nSLURM_JOB_GID=502226\r\nSLURM_JOB_NODELIST=hkn0402\r\n",,terminal_output +408,2407169,"TERMINAL",0,0,"GpuFreq=control_disabled\r\n",,terminal_output +409,2411689,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n",,terminal_output +410,2413803,"TERMINAL",0,0,"2025-07-23 17:57:36.841110: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +411,2441618,"TERMINAL",0,0,"2025-07-23 17:58:04.629344: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +412,2460020,"TERMINAL",0,0,"2025-07-23 17:58:22.910346: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +413,2462981,"TERMINAL",0,0,"wandb: Currently logged in as: mihir-mahajan2002 (instant-uv) to https://api.wandb.ai. Use `wandb login --relogin` to force relogin\r\n",,terminal_output +414,2463710,"TERMINAL",0,0,"wandb: Tracking run with wandb version 0.19.11\r\nwandb: Run data is saved locally in /hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/wandb/run-20250723_175825-9qi5alo6\r\nwandb: Run `wandb offline` to turn off syncing.\r\nwandb: Syncing run dynamics-maskprob-fix-dev-3371005\r\nwandb: ⭐️ View project at https://wandb.ai/instant-uv/jafar\r\nwandb: 🚀 View run at https://wandb.ai/instant-uv/jafar/runs/9qi5alo6\r\n",,terminal_output +415,2465972,"TERMINAL",0,0,"WARNING:absl:Missing metrics for step 40\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/maskgit-maskprob-fix/interactive/3371005/000040/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 30\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/maskgit-maskprob-fix/interactive/3371005/000030/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 20\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/maskgit-maskprob-fix/interactive/3371005/000020/metrics/metrics not found.\r\n",,terminal_output +416,2501064,"TERMINAL",0,0,"WARNING:absl:Missing metrics for step 51000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/checkpoints/train_tokenizer_lr_sweep_1e-4_larger_ffn/051000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 40000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/checkpoints/train_tokenizer_lr_sweep_1e-4_larger_ffn/040000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 53000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/checkpoints/train_tokenizer_lr_sweep_1e-4_larger_ffn/053000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 20000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/checkpoints/train_tokenizer_lr_sweep_1e-4_larger_ffn/020000/metrics/metrics not found.\r\nWARNING:absl:Missing metrics for step 52000\r\nERROR:absl:File /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/checkpoints/train_tokenizer_lr_sweep_1e-4_larger_ffn/052000/metrics/metrics not found.\r\n",,terminal_output +417,2503294,"TERMINAL",0,0,"Running on 1 devices.\r\nCounting all components: ['tokenizer', 'lam', 'dynamics']\r\nParameter counts:\r\n{'tokenizer': 33750256, 'lam': 17229792, 'dynamics': 271933440, 'total': 322913488}\r\n",,terminal_output +418,2507253,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n",,terminal_output +419,2507351,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n",,terminal_output +420,2507470,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `param-dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/.venv/lib/python3.10/site-packages/tyro/_parsers.py:347: UserWarning: The field `dtype` is annotated with type ``, but the default value `` has type ``. We'll try to handle this gracefully, but it may cause unexpected behavior.\r\n warnings.warn(message)\r\n",,terminal_output +421,2532961,"TERMINAL",0,0,"2025-07-23 17:59:35.999155: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-23 17:59:36.000000: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-23 17:59:36.000557: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-23 17:59:36.001477: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-23 17:59:36.001509: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n2025-07-23 17:59:36.002988: W external/xla/xla/service/gpu/autotuning/dot_search_space.cc:200] All configs were filtered out because none of them sufficiently match the hints. Maybe the hints set does not contain a good representative set of valid configs?Working around this by using the full hints set instead.\r\n",,terminal_output +422,2765611,"TERMINAL",0,0,"^Csrun: interrupt (one more within 1 sec to abort)\r\nsrun: StepId=3371005.4 task 0: running\r\n",,terminal_output +423,2765815,"TERMINAL",0,0,"^Csrun: sending Ctrl-C to StepId=3371005.4\r\nsrun: forcing job termination\r\nProcess SpawnProcess-2:\r\nProcess SpawnProcess-3:\r\nProcess SpawnProcess-1:\r\nProcess SpawnProcess-6:\r\nProcess SpawnProcess-4:\r\nProcess SpawnProcess-8:\r\nProcess SpawnProcess-5:\r\nsrun: Job step aborted: Waiting up to 32 seconds for job step to finish.\r\nProcess SpawnProcess-7:\r\nslurmstepd: error: *** STEP 3371005.4 ON hkn0402 CANCELLED AT 2025-07-23T18:03:28 ***\r\n",,terminal_output +424,2765963,"TERMINAL",0,0,"^Csrun: sending Ctrl-C to StepId=3371005.4\r\nsrun: job abort in progress\r\n",,terminal_output +425,2766200,"TERMINAL",0,0,"^Csrun: sending Ctrl-C to StepId=3371005.4\r\nsrun: job abort in progress\r\n]0;tum_cte0515@hkn0402:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0402 jafar]$ ",,terminal_output +426,2799151,"TERMINAL",0,0,"bash",,terminal_focus +427,2799657,"TERMINAL",0,0,"srun",,terminal_focus +428,2800204,"TERMINAL",0,0,"[?25lq[?25h",,terminal_output +429,2800353,"TERMINAL",0,0,"[?25lu[?25h",,terminal_output +430,2800468,"TERMINAL",0,0,"[?25le[?25h[?25lu[?25h",,terminal_output +431,2800624,"TERMINAL",0,0,"[?25le[?25h",,terminal_output +432,2800759,"TERMINAL",0,0,"\r\n[?2004l\r[?1049h(B[?7hEvery 1.0s: squeue --mehkn0402.localdomain: Wed Jul 23 18:04:03 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3370822 accelerat train_dy tum_cte0 PD\t0:00\t 8 (Priority)3370952 accelerat interact tum_cte0 R 1:14:38\t 2 hkn[0717,0735]3371005 dev_accel interact tum_cte0 R57:14\t 1 hkn0402",,terminal_output +433,2801767,"TERMINAL",0,0,"495",,terminal_output +434,2802785,"TERMINAL",0,0,"5406",,terminal_output +435,2803885,"TERMINAL",0,0,"617",,terminal_output +436,2804287,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes_dev.sbatch",0,0,"",shellscript,tab +437,2804836,"TERMINAL",0,0,"728",,terminal_output +438,2805418,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes_dev.sbatch",1713,0,"",shellscript,selection_mouse +439,2805614,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes_dev.sbatch",1706,7,"m=4096\n",shellscript,selection_mouse +440,2805627,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes_dev.sbatch",1703,10,"_dim=4096\n",shellscript,selection_mouse +441,2805641,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes_dev.sbatch",1701,12,"fn_dim=4096\n",shellscript,selection_mouse +442,2805664,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes_dev.sbatch",1699,14,"_ffn_dim=4096\n",shellscript,selection_mouse +443,2805683,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes_dev.sbatch",1671,42,"na_num_heads=16 \\n --dyna_ffn_dim=4096\n",shellscript,selection_mouse +444,2805698,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes_dev.sbatch",1670,43,"yna_num_heads=16 \\n --dyna_ffn_dim=4096\n",shellscript,selection_mouse +445,2805713,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes_dev.sbatch",1668,45,"-dyna_num_heads=16 \\n --dyna_ffn_dim=4096\n",shellscript,selection_mouse +446,2805731,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes_dev.sbatch",1667,46,"--dyna_num_heads=16 \\n --dyna_ffn_dim=4096\n",shellscript,selection_mouse +447,2805748,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes_dev.sbatch",1666,47," --dyna_num_heads=16 \\n --dyna_ffn_dim=4096\n",shellscript,selection_mouse +448,2805776,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes_dev.sbatch",1665,48," --dyna_num_heads=16 \\n --dyna_ffn_dim=4096\n",shellscript,selection_mouse +449,2805806,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes_dev.sbatch",1664,49," --dyna_num_heads=16 \\n --dyna_ffn_dim=4096\n",shellscript,selection_mouse +450,2805864,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes_dev.sbatch",1663,50," --dyna_num_heads=16 \\n --dyna_ffn_dim=4096\n",shellscript,selection_mouse +451,2805865,"TERMINAL",0,0,"839",,terminal_output +452,2806009,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes_dev.sbatch",1636,77," --dyna_num_blocks=16 \\n --dyna_num_heads=16 \\n --dyna_ffn_dim=4096\n",shellscript,selection_mouse +453,2806275,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes_dev.sbatch",1614,99," --dyna_dim=1024 \\n --dyna_num_blocks=16 \\n --dyna_num_heads=16 \\n --dyna_ffn_dim=4096\n",shellscript,selection_mouse +454,2806863,"TERMINAL",0,0,"9420",,terminal_output +455,2807875,"TERMINAL",0,0,"1051",,terminal_output +456,2808899,"TERMINAL",0,0,"162",,terminal_output +457,2809157,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes.sbatch",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=8\n#SBATCH --ntasks-per-node=4\n#SBATCH --time=48:00:00\n#SBATCH --partition=accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:4\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/maskgit-maskprob-fix/dynamics-cotraining/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/maskgit-maskprob-fix/dynamics-cotraining/%x_%j.log\n#SBATCH --job-name=train_dynamics_maskprob_fix_8_node\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_new/open_ai_minecraft_arrayrecords_chunked\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/maskgit-maskprob-fix/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\n# tokenizer with the new structure supporting larger ffn_dim\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/checkpoints/train_tokenizer_lr_sweep_1e-4_larger_ffn/\n\nenv | grep SLURM\n\nsrun python train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=384 \\n --init_lr=0 \\n --max_lr=8e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=dynamics-maskprob-fix-8-node-$slurm_job_id \\n --tags dynamics maskprob-fix 8-node \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir &\n\nchild_pid=$!\n\nwait $child_pid\n",shellscript,tab +458,2809924,"TERMINAL",0,0,"273",,terminal_output +459,2810231,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes.sbatch",2345,0,"",shellscript,selection_mouse +460,2810247,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes.sbatch",2344,0,"",shellscript,selection_command +461,2810924,"TERMINAL",0,0,"384",,terminal_output +462,2811632,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes.sbatch",2260,0,"",shellscript,selection_mouse +463,2811647,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes.sbatch",2259,0,"",shellscript,selection_command +464,2811975,"TERMINAL",0,0,"495",,terminal_output +465,2812931,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes.sbatch",2260,0,"\n ",shellscript,content +466,2813018,"TERMINAL",0,0,"5517",,terminal_output +467,2813367,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes.sbatch",2265,0," --dyna_dim=1024 \\n --dyna_num_blocks=16 \\n --dyna_num_heads=16 \\n --dyna_ffn_dim=4096\n",shellscript,content +468,2813981,"TERMINAL",0,0,"728",,terminal_output +469,2814986,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes.sbatch",2363,0,"",shellscript,selection_mouse +470,2815002,"TERMINAL",0,0,"839",,terminal_output +471,2815440,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes.sbatch",2363,0," ",shellscript,content +472,2815442,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes.sbatch",2364,0,"",shellscript,selection_keyboard +473,2815977,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes.sbatch",2364,0,"\",shellscript,content +474,2815978,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes.sbatch",2365,0,"",shellscript,selection_keyboard +475,2816047,"TERMINAL",0,0,"9430",,terminal_output +476,2816397,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes.sbatch",2364,0,"",shellscript,selection_command +477,2816554,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes.sbatch",2366,0,"",shellscript,selection_command +478,2816887,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes.sbatch",2366,1,"",shellscript,content +479,2816893,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes.sbatch",2370,0,"",shellscript,selection_command +480,2816957,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes.sbatch",2344,0,"",shellscript,selection_command +481,2817041,"TERMINAL",0,0,"2051",,terminal_output +482,2817116,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes.sbatch",2318,0,"",shellscript,selection_command +483,2817270,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes.sbatch",2291,0,"",shellscript,selection_command +484,2817400,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes.sbatch",2265,0,"",shellscript,selection_command +485,2817723,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes.sbatch",2269,0,"",shellscript,selection_command +486,2817989,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes.sbatch",2269,1,"",shellscript,content +487,2818056,"TERMINAL",0,0,"162",,terminal_output +488,2819104,"TERMINAL",0,0,"273",,terminal_output +489,2819490,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes.sbatch",2269,0,"-",shellscript,content +490,2819491,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes.sbatch",2270,0,"",shellscript,selection_keyboard +491,2820060,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes.sbatch",2269,0,"",shellscript,selection_command +492,2820112,"TERMINAL",0,0,"384",,terminal_output +493,2820192,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes.sbatch",2265,4,"",shellscript,content +494,2820816,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes.sbatch",2264,0,"",shellscript,selection_command +495,2821161,"TERMINAL",0,0,"495",,terminal_output +496,2821389,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes.sbatch",2361,0,"",shellscript,selection_mouse +497,2821419,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes.sbatch",2360,0,"",shellscript,selection_command +498,2822213,"TERMINAL",0,0,"55:006",,terminal_output +499,2823142,"TERMINAL",0,0,"617",,terminal_output +500,2824175,"TERMINAL",0,0,"728",,terminal_output +501,2825286,"TERMINAL",0,0,"839",,terminal_output +502,2826209,"TERMINAL",0,0,"9440",,terminal_output +503,2826708,"TERMINAL",0,0,"bash",,terminal_focus +504,2827230,"TERMINAL",0,0,"3051",,terminal_output +505,2828238,"TERMINAL",0,0,"162",,terminal_output +506,2828645,"TERMINAL",0,0,"srun",,terminal_focus +507,2829256,"TERMINAL",0,0,"273",,terminal_output +508,2830115,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn0402:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0402 jafar]$ ",,terminal_output +509,2831028,"TERMINAL",0,0,"[?25lq[?25h",,terminal_output +510,2831096,"TERMINAL",0,0,"[?25lu[?25h",,terminal_output +511,2831213,"TERMINAL",0,0,"[?25le[?25h",,terminal_output +512,2831296,"TERMINAL",0,0,"[?25lu[?25h",,terminal_output +513,2831406,"TERMINAL",0,0,"[?25le[?25h",,terminal_output +514,2831506,"TERMINAL",0,0,"\r\n[?2004l\r[?1049h(B[?7hEvery 1.0s: squeue --mehkn0402.localdomain: Wed Jul 23 18:04:34 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3370822 accelerat train_dy tum_cte0 PD\t0:00\t 8 (Priority)3370952 accelerat interact tum_cte0 R 1:15:09\t 2 hkn[0717,0735]3371005 dev_accel interact tum_cte0 R57:45\t 1 hkn0402",,terminal_output +515,2832555,"TERMINAL",0,0,"5106",,terminal_output +516,2833585,"TERMINAL",0,0,"617",,terminal_output +517,2834336,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn0402:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0402 jafar]$ ",,terminal_output +518,2834725,"TERMINAL",0,0,"[?25ls[?25h",,terminal_output +519,2834859,"TERMINAL",0,0,"[?25lc[?25h",,terminal_output +520,2835049,"TERMINAL",0,0,"[?25la[?25h",,terminal_output +521,2835127,"TERMINAL",0,0,"[?25ln[?25h",,terminal_output +522,2835271,"TERMINAL",0,0,"[?25lc[?25h",,terminal_output +523,2835390,"TERMINAL",0,0,"[?25le[?25h",,terminal_output +524,2835473,"TERMINAL",0,0,"[?25ll[?25h",,terminal_output +525,2835581,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +526,2836222,"TERMINAL",0,0,"3370822",,terminal_output +527,2837000,"TERMINAL",0,0,"3370822\r\n[?2004l\r]0;tum_cte0515@hkn0402:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0402 jafar]$ ",,terminal_output +528,2837879,"TERMINAL",0,0,"[?25lq[?25h",,terminal_output +529,2837988,"TERMINAL",0,0,"[?25lu[?25h",,terminal_output +530,2838078,"TERMINAL",0,0,"[?25le[?25h[?25lu[?25h",,terminal_output +531,2838214,"TERMINAL",0,0,"[?25le[?25h",,terminal_output +532,2838308,"TERMINAL",0,0,"\r\n[?2004l\r[?1049h(B[?7hEvery 1.0s: squeue --mehkn0402.localdomain: Wed Jul 23 18:04:41 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3370952 accelerat interact tum_cte0 R 1:15:16\t 2 hkn[0717,0735]3371005 dev_accel interact tum_cte0 R57:52\t 1 hkn0402",,terminal_output +533,2839419,"TERMINAL",0,0,"273",,terminal_output +534,2840449,"TERMINAL",0,0,"384",,terminal_output +535,2841350,"TERMINAL",0,0,"495",,terminal_output +536,2842363,"TERMINAL",0,0,"5206",,terminal_output +537,2843380,"TERMINAL",0,0,"617",,terminal_output +538,2844402,"TERMINAL",0,0,"728",,terminal_output +539,2845416,"TERMINAL",0,0,"839",,terminal_output +540,2846485,"TERMINAL",0,0,"948:00",,terminal_output +541,2847455,"TERMINAL",0,0,"5051",,terminal_output +542,2848532,"TERMINAL",0,0,"162",,terminal_output +543,2849554,"TERMINAL",0,0,"273",,terminal_output +544,2850575,"TERMINAL",0,0,"384",,terminal_output +545,2851532,"TERMINAL",0,0,"495",,terminal_output +546,2852559,"TERMINAL",0,0,"5306",,terminal_output +547,2853658,"TERMINAL",0,0,"617",,terminal_output +548,2854595,"TERMINAL",0,0,"728",,terminal_output +549,2855605,"TERMINAL",0,0,"839",,terminal_output +550,2856724,"TERMINAL",0,0,"9410",,terminal_output +551,2857645,"TERMINAL",0,0,"5:0051",,terminal_output +552,2858668,"TERMINAL",0,0,"162",,terminal_output +553,2859679,"TERMINAL",0,0,"273",,terminal_output +554,2860719,"TERMINAL",0,0,"384",,terminal_output +555,2861710,"TERMINAL",0,0,"495",,terminal_output +556,2862724,"TERMINAL",0,0,"5406",,terminal_output +557,2863745,"TERMINAL",0,0,"617",,terminal_output +558,2864816,"TERMINAL",0,0,"728",,terminal_output +559,2865795,"TERMINAL",0,0,"839",,terminal_output +560,2866859,"TERMINAL",0,0,"9420",,terminal_output +561,2867847,"TERMINAL",0,0,"1051",,terminal_output +562,2868848,"TERMINAL",0,0,"162",,terminal_output +563,2869937,"TERMINAL",0,0,"273",,terminal_output +564,2870883,"TERMINAL",0,0,"384",,terminal_output +565,2871912,"TERMINAL",0,0,"495",,terminal_output +566,2872919,"TERMINAL",0,0,"5506",,terminal_output +567,2873941,"TERMINAL",0,0,"617",,terminal_output +568,2874957,"TERMINAL",0,0,"728",,terminal_output +569,2875982,"TERMINAL",0,0,"8430",,terminal_output +570,2876993,"TERMINAL",0,0,"2051",,terminal_output +571,2878019,"TERMINAL",0,0,"162",,terminal_output +572,2879029,"TERMINAL",0,0,"273",,terminal_output +573,2880066,"TERMINAL",0,0,"384",,terminal_output +574,2881096,"TERMINAL",0,0,"495",,terminal_output +575,2882081,"TERMINAL",0,0,"56:006",,terminal_output +576,2883148,"TERMINAL",0,0,"617",,terminal_output +577,2884172,"TERMINAL",0,0,"728",,terminal_output +578,2885136,"TERMINAL",0,0,"839",,terminal_output +579,2886224,"TERMINAL",0,0,"9440",,terminal_output +580,2887170,"TERMINAL",0,0,"3051",,terminal_output +581,2888261,"TERMINAL",0,0,"162",,terminal_output +582,2889204,"TERMINAL",0,0,"273",,terminal_output +583,2890221,"TERMINAL",0,0,"384",,terminal_output +584,2891338,"TERMINAL",0,0,"495",,terminal_output +585,2892360,"TERMINAL",0,0,"5106",,terminal_output +586,2893279,"TERMINAL",0,0,"617",,terminal_output +587,2894287,"TERMINAL",0,0,"728",,terminal_output +588,2895327,"TERMINAL",0,0,"839",,terminal_output +589,2896326,"TERMINAL",0,0,"9450",,terminal_output +590,2897379,"TERMINAL",0,0,"4051",,terminal_output +591,2898401,"TERMINAL",0,0,"162",,terminal_output +592,2899423,"TERMINAL",0,0,"273",,terminal_output +593,2900449,"TERMINAL",0,0,"384",,terminal_output +594,2901412,"TERMINAL",0,0,"495",,terminal_output +595,2902494,"TERMINAL",0,0,"5206",,terminal_output +596,2903525,"TERMINAL",0,0,"617",,terminal_output +597,2904466,"TERMINAL",0,0,"728",,terminal_output +598,2905570,"TERMINAL",0,0,"839",,terminal_output +599,2906692,"TERMINAL",0,0,"949:00",,terminal_output +600,2907613,"TERMINAL",0,0,"5051",,terminal_output +601,2908637,"TERMINAL",0,0,"162",,terminal_output +602,2909662,"TERMINAL",0,0,"273",,terminal_output +603,2910702,"TERMINAL",0,0,"384",,terminal_output +604,2911713,"TERMINAL",0,0,"495",,terminal_output +605,2912710,"TERMINAL",0,0,"5306",,terminal_output +606,2913763,"TERMINAL",0,0,"617",,terminal_output +607,2914751,"TERMINAL",0,0,"728",,terminal_output +608,2915763,"TERMINAL",0,0,"839",,terminal_output +609,2916783,"TERMINAL",0,0,"9410",,terminal_output +610,2917807,"TERMINAL",0,0,"6:0051",,terminal_output +611,2918815,"TERMINAL",0,0,"162",,terminal_output +612,2919840,"TERMINAL",0,0,"273",,terminal_output +613,2920937,"TERMINAL",0,0,"384",,terminal_output +614,2921869,"TERMINAL",0,0,"495",,terminal_output +615,2922901,"TERMINAL",0,0,"5406",,terminal_output +616,2923909,"TERMINAL",0,0,"617",,terminal_output +617,2924923,"TERMINAL",0,0,"728",,terminal_output +618,2926001,"TERMINAL",0,0,"839",,terminal_output +619,2926954,"TERMINAL",0,0,"9420",,terminal_output +620,2927968,"TERMINAL",0,0,"1062",,terminal_output +621,2928993,"TERMINAL",0,0,"273",,terminal_output +622,2930016,"TERMINAL",0,0,"384",,terminal_output +623,2931031,"TERMINAL",0,0,"495",,terminal_output +624,2932088,"TERMINAL",0,0,"5506",,terminal_output +625,2933062,"TERMINAL",0,0,"617",,terminal_output +626,2934079,"TERMINAL",0,0,"728",,terminal_output +627,2935097,"TERMINAL",0,0,"839",,terminal_output +628,2936115,"TERMINAL",0,0,"9430",,terminal_output +629,2937138,"TERMINAL",0,0,"2051",,terminal_output +630,2938151,"TERMINAL",0,0,"162",,terminal_output +631,2939169,"TERMINAL",0,0,"273",,terminal_output +632,2940196,"TERMINAL",0,0,"384",,terminal_output +633,2941221,"TERMINAL",0,0,"495",,terminal_output +634,2942232,"TERMINAL",0,0,"57:006",,terminal_output +635,2943240,"TERMINAL",0,0,"617",,terminal_output +636,2944277,"TERMINAL",0,0,"728",,terminal_output +637,2945397,"TERMINAL",0,0,"839",,terminal_output +638,2946307,"TERMINAL",0,0,"9440",,terminal_output +639,2947354,"TERMINAL",0,0,"3051",,terminal_output +640,2948509,"TERMINAL",0,0,"162",,terminal_output +641,2949514,"TERMINAL",0,0,"273",,terminal_output +642,2950419,"TERMINAL",0,0,"384",,terminal_output +643,2951447,"TERMINAL",0,0,"495",,terminal_output +644,2952475,"TERMINAL",0,0,"5106",,terminal_output +645,2953430,"TERMINAL",0,0,"617",,terminal_output +646,2954519,"TERMINAL",0,0,"728",,terminal_output +647,2955539,"TERMINAL",0,0,"839",,terminal_output +648,2956573,"TERMINAL",0,0,"9450",,terminal_output +649,2957597,"TERMINAL",0,0,"4051",,terminal_output +650,2958515,"TERMINAL",0,0,"162",,terminal_output +651,2959640,"TERMINAL",0,0,"273",,terminal_output +652,2960672,"TERMINAL",0,0,"384",,terminal_output +653,2961578,"TERMINAL",0,0,"495",,terminal_output +654,2962597,"TERMINAL",0,0,"5206",,terminal_output +655,2963640,"TERMINAL",0,0,"617",,terminal_output +656,2964628,"TERMINAL",0,0,"728",,terminal_output +657,2965683,"TERMINAL",0,0,"839",,terminal_output +658,2966707,"TERMINAL",0,0,"941:00:00",,terminal_output +659,2967728,"TERMINAL",0,0,"5051",,terminal_output +660,2968751,"TERMINAL",0,0,"162",,terminal_output +661,2969721,"TERMINAL",0,0,"273",,terminal_output +662,2970849,"TERMINAL",0,0,"384",,terminal_output +663,2971028,"TERMINAL",0,0,"salloc: Job 3371005 has exceeded its time limit and its allocation has been revoked.\nsrun: Job step aborted: Waiting up to 32 seconds for job step to finish.\r\nslurmstepd: error: *** STEP 3371005.interactive ON hkn0402 CANCELLED AT 2025-07-23T18:06:54 DUE TO TIME LIMIT ***\n[?1049l\r[?1l>]0;tum_cte0515@hkn0402:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0402 jafar]$ ",,terminal_output +664,2992610,"TERMINAL",0,0,"bash",,terminal_focus +665,2993545,"TERMINAL",0,0,"srun",,terminal_focus +666,2994157,"TERMINAL",0,0,"bash",,terminal_focus +667,3000760,"TERMINAL",0,0,"srun: error: hkn0402: task 0: Killed\r\n]0;tum_cte0515@hkn1990:~/Projects/jafar]633;D;137",,terminal_output +668,3003060,"TERMINAL",0,0,"bash",,terminal_focus +669,3004348,"TERMINAL",0,0,"queue",,terminal_command +670,3004419,"TERMINAL",0,0,"]633;E;2025-07-23 18:07:27 queue;469e5d18-6e08-4909-a55e-e2644c9abc02]633;C[?1049h(B[?7hEvery 1.0s: squeue --mehkn1990.localdomain: Wed Jul 23 18:07:27 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3370952 accelerat interact tum_cte0 R 1:18:02\t 2 hkn[0717,0735]3371005 dev_accel interact tum_cte0 CG 1:00:05\t 1 hkn0402",,terminal_output +671,3005467,"TERMINAL",0,0,"83",,terminal_output +672,3006535,"TERMINAL",0,0,"94",,terminal_output +673,3007456,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn1990:~/Projects/jafar]633;D;0",,terminal_output +674,3008904,"TERMINAL",0,0,"bash",,terminal_focus +675,3008907,"slurm/jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes.sbatch",0,0,"",shellscript,tab +676,3011082,"TERMINAL",0,0,"srun",,terminal_focus +677,3013207,"TERMINAL",0,0,"bash",,terminal_focus +678,3014576,"TERMINAL",0,0,"srun",,terminal_focus +679,3105851,"slurm/utils/mihir/model_sizes.md",0,0,"# Genie 1 - Model Sizes and their configs\n\n## Tokenizer model: sizes\n\ndefault: \n| Model | model_dim | num_blocks | num_heads | latent_dim | num_latents | Est. Params |\n|-------|-----------|------------|-----------|------------|-------------|-------------|\n| default | 512 | 8 | 8 | 32 | 1024 | ~38M |\n\n### scaling up \n#### (not tested yet - TODO @mihir)\n\n| Model | model_dim | num_blocks | num_heads | latent_dim | num_latents | Est. Params |\n|-------|-----------|------------|-----------|------------|-------------|-------------|\n| L1 | 768 | 12 | 12 | 64 | 2048 | ~80M |\n| L2 | 1024 | 12 | 16 | 128 | 2048 | ~140M |\n| L3 | 1152 | 16 | 16 | 128 | 4096 | ~200M |\n| L4 | 896 | 16 | 14 | 96 | 4096 | ~120M |\n| L5 | 1536 | 12 | 24 | 256 | 2048 | ~190M |\n\n\n### tiny models\n| Model | model_dim | num_blocks | num_heads | latent_dim | num_latents | Est. Params |\n|-------|-----------|------------|-----------|------------|-------------|-------------|\n| S1 | 128 | 2 | 2 | 8 | 128 | ~0.6M |\n| S2 | 192 | 2 | 3 | 16 | 128 | ~1.3M |\n| S3 | 256 | 3 | 4 | 16 | 256 | ~3.6M |\n| S4 | 320 | 4 | 5 | 24 | 256 | ~7.4M |\n| S5 | 384 | 4 | 6 | 32 | 512 | ~10M |\n\n\n## Latent Action model: sizes\ndefault: \n| Model | model_dim | num_blocks | num_heads | latent_dim | num_latents | Est. Params |\n|-------|-----------|------------|-----------|------------|-------------|-------------|\n| default | 512 | 8 | 8 | 32 | 6 | ~39M |\n\n### scaling up \n#### (not tested yet - TODO @mihir)\n\n| Name | model_dim | num_blocks | num_heads | latent_dim | num_latents | Est. Params |\n|--------------|-----------|------------|-----------|------------|-------------|-------------|\n| XL | 1024 | 12 | 16 | 64 | 12 | ~200M |\n| L | 896 | 12 | 14 | 48 | 8 | ~150M |\n| M+ | 768 | 10 | 12 | 48 | 8 | ~100M |\n| M | 640 | 10 | 10 | 32 | 8 | ~70M |\n| Base+ | 512 | 12 | 8 | 32 | 8 | ~55M |\n\n\n### tiny models\n| Name | model_dim | num_blocks | num_heads | latent_dim | num_latents | Est. Params |\n|--------------|-----------|------------|-----------|------------|-------------|-------------|\n| XS | 128 | 2 | 2 | 8 | 4 | ~0.9M |\n| S | 160 | 2 | 2 | 8 | 4 | ~1.3M |\n| S+ | 192 | 3 | 3 | 8 | 4 | ~2.4M |\n| M- | 256 | 4 | 4 | 16 | 6 | ~5.4M |\n| M | 320 | 6 | 4 | 16 | 6 | ~12M |\n\n\n## Dynamics model: sizes \n\n| Config | dyna_dim | dyna_num_blocks | dyna_num_heads | Approx. Params |\n|--------|----------|-----------------|---------------|----------------|\n| 1 | 512 | 12 | 8 | ~36M |\n| 2 | 768 | 16 | 12 | ~110M |\n| 3 | 1024 | 16 | 16 | ~180M |\n| 4 | 1024 | 24 | 16 | ~270M |\n| 5 | 1536 | 24 | 24 | ~500M |\n\n\n### tiny models\n| Config | dyna_dim | dyna_num_blocks | dyna_num_heads | Approx. Params |\n|--------|----------|-----------------|---------------|----------------|\n| A | 128 | 2 | 4 | ~1.5M |\n| B | 256 | 2 | 4 | ~3.5M |\n| C | 256 | 4 | 4 | ~6M |\n| D | 384 | 4 | 6 | ~12M |\n| E | 512 | 4 | 8 | ~18M |",markdown,tab +680,3117871,"slurm/utils/mihir/dataset_sizes.md",0,0,"",markdown,tab +681,3137762,"TERMINAL",0,0,"bash",,terminal_focus +682,3141897,"TERMINAL",0,0,"cd $ws_dir",,terminal_command +683,3142583,"TERMINAL",0,0,"ls",,terminal_command +684,3142597,"TERMINAL",0,0,"]633;E;2025-07-23 18:09:45 ls;3d19b9bf-55ca-4a1a-89a2-d3473be0b612]633;Ccheckpoints count_items.sh data data_new huggingface logs possibly_corrupt_files_in_this_workspace.txt scripts\r\n]0;tum_cte0515@hkn1990:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared]633;D;0",,terminal_output +685,3148663,"TERMINAL",0,0,"cd data_new/",,terminal_command +686,3149054,"TERMINAL",0,0,"ls",,terminal_command +687,3149090,"TERMINAL",0,0,"]633;E;2025-07-23 18:09:52 ls;3d19b9bf-55ca-4a1a-89a2-d3473be0b612]633;C",,terminal_output +688,3149184,"TERMINAL",0,0,"open_ai_minecraft open_ai_minecraft_arrayrecords_chunked open_ai_minecraft_arrayrecords_chunked_train_val_split open_ai_minecraft_arrayrecords_w_actions_dev open_ai_minecraft_tfrecord_uncurrupted\r\nopen_ai_minecraft_actions open_ai_minecraft_arrayrecords_chunked_compressed open_ai_minecraft_arrayrecords_chunked_uncompressed open_ai_minecraft_arrayrecords_w_actions_login open_ai_minecraft_tfrecord_uncurrupted-2\r\nopen_ai_minecraft_arrayrecords open_ai_minecraft_arrayrecords_chunked_subset open_ai_minecraft_arrayrecords_w_actions open_ai_minecraft_npy\r\n]0;tum_cte0515@hkn1990:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_new]633;D;0",,terminal_output +689,3159373,"TERMINAL",0,0,"cd open_ai_minecraft_arrayrecords_chunked",,terminal_command +690,3159868,"TERMINAL",0,0,"ls",,terminal_command +691,3159918,"TERMINAL",0,0,"]633;E;2025-07-23 18:10:02 ls;3d19b9bf-55ca-4a1a-89a2-d3473be0b612]633;C",,terminal_output +692,3161028,"TERMINAL",0,0,"^C\r\n]0;tum_cte0515@hkn1990:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_new/open_ai_minecraft_arrayrecords_chunked]633;D;130",,terminal_output +693,3161188,"TERMINAL",0,0,"^C",,terminal_command +694,3161225,"TERMINAL",0,0,"^C[?2004l\r[?2004h[?2004l\r\r\n]633;E;;3d19b9bf-55ca-4a1a-89a2-d3473be0b612]633;C]0;tum_cte0515@hkn1990:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_new/open_ai_minecraft_arrayrecords_chunked]633;D",,terminal_output +695,3173693,"TERMINAL",0,0,"ls -l | wc -l",,terminal_command +696,3173732,"TERMINAL",0,0,"]633;E;2025-07-23 18:10:16 ls -l | wc -l;3d19b9bf-55ca-4a1a-89a2-d3473be0b612]633;C",,terminal_output +697,3208661,"TERMINAL",0,0,"89396\r\n]0;tum_cte0515@hkn1990:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_new/open_ai_minecraft_arrayrecords_chunked]633;D;0",,terminal_output +698,3223276,"slurm/jobs/mihir/horeka/batchsize_scaling/dynamics_cotraining/sqrt_lr/train_dynamics_16_nodes.sbatch",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=16\n#SBATCH --ntasks-per-node=4\n#SBATCH --time=48:00:00\n#SBATCH --partition=accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:4\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/big-runs/dynamics-cotraining-batchsize-scaling/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/big-runs/dynamics-cotraining-batchsize-scaling/%x_%j.log\n#SBATCH --job-name=train_tokenizer_batch_size_scaling_16_node\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_new/open_ai_minecraft_arrayrecords_chunked\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/dynamics-cotraining-batchsize-scaling/$job_name\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/train_tokenizer_batch_size_scaling_16_node/3321526/tokenizer_22000/\n\nenv | grep SLURM\n\nsrun python train_dynamics.py \\n --save_ckpt \\n --restore_ckpt \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=768 \\n --min_lr=0 \\n --max_lr=4.2e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=dynamics-batch-size-scaling-16-node-$slurm_job_id \\n --tags dynamics batch-size-scaling 16-node \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir\n",shellscript,tab +699,3224645,"slurm/jobs/mihir/horeka/batchsize_scaling/dynamics_cotraining/sqrt_lr/train_dynamics_16_nodes.sbatch",1283,0,"",shellscript,selection_mouse +700,3224787,"slurm/jobs/mihir/horeka/batchsize_scaling/dynamics_cotraining/sqrt_lr/train_dynamics_16_nodes.sbatch",1281,3,"768",shellscript,selection_mouse +701,3227196,"slurm/utils/mihir/dataset_sizes.md",0,0,"",markdown,tab +702,3342418,"slurm/utils/mihir/dataset_sizes.md",0,0,"| Batch Size | Steps per Epoch |\n|------------|----------------|\n| 48 | 234,375 |\n| 96 | 117,187 |\n| 192 | 58,593 |\n| 384 | 29,297 |\n| 768 | 14,648 |",markdown,content +703,3343482,"slurm/utils/mihir/dataset_sizes.md",192,0,"",markdown,selection_command +704,3343698,"slurm/utils/mihir/dataset_sizes.md",160,0,"",markdown,selection_command +705,3343877,"slurm/utils/mihir/dataset_sizes.md",128,0,"",markdown,selection_command +706,3344006,"slurm/utils/mihir/dataset_sizes.md",96,0,"",markdown,selection_command +707,3344165,"slurm/utils/mihir/dataset_sizes.md",64,0,"",markdown,selection_command +708,3344306,"slurm/utils/mihir/dataset_sizes.md",31,0,"",markdown,selection_command +709,3346452,"slurm/utils/mihir/dataset_sizes.md",0,0,"\n",markdown,content +710,3346745,"slurm/utils/mihir/dataset_sizes.md",0,0,"",markdown,selection_command +711,3347466,"slurm/utils/mihir/dataset_sizes.md",0,0,"\n",markdown,content +712,3347683,"slurm/utils/mihir/dataset_sizes.md",0,0,"",markdown,selection_command +713,3348878,"slurm/utils/mihir/dataset_sizes.md",0,0,"#",markdown,content +714,3348880,"slurm/utils/mihir/dataset_sizes.md",1,0,"",markdown,selection_keyboard +715,3349020,"slurm/utils/mihir/dataset_sizes.md",1,0,"#",markdown,content +716,3349021,"slurm/utils/mihir/dataset_sizes.md",2,0,"",markdown,selection_keyboard +717,3349754,"slurm/utils/mihir/dataset_sizes.md",1,1,"",markdown,content +718,3349897,"slurm/utils/mihir/dataset_sizes.md",0,1,"",markdown,content +719,3351492,"slurm/utils/mihir/dataset_sizes.md",226,0,"",markdown,selection_mouse +720,3351638,"slurm/utils/mihir/dataset_sizes.md",160,66," |\n| 384 | 29,297 |\n| 768 | 14,648 |",markdown,selection_mouse +721,3351655,"slurm/utils/mihir/dataset_sizes.md",0,226,"\n\n| Batch Size | Steps per Epoch |\n|------------|----------------|\n| 48 | 234,375 |\n| 96 | 117,187 |\n| 192 | 58,593 |\n| 384 | 29,297 |\n| 768 | 14,648 |",markdown,selection_mouse +722,3353038,"slurm/utils/mihir/dataset_sizes.md",0,0,"",markdown,selection_command +723,3378262,"slurm/utils/mihir/dataset_sizes.md",0,0,"\n",markdown,content +724,3378391,"slurm/utils/mihir/dataset_sizes.md",1,0,"# Genie 1 - Dataset Size, Batch Size, and Steps per Epoch",markdown,content +725,3378770,"slurm/utils/mihir/dataset_sizes.md",60,0,"## Dataset: OpenAI Minecraft ArrayRecords (15M videos)\n",markdown,content +726,3378771,"slurm/utils/mihir/dataset_sizes.md",115,0,"\n",markdown,content +727,3379230,"slurm/utils/mihir/dataset_sizes.md",116,0,"- **Total frames:** ~3.6B (assuming 16 frames per video)\n",markdown,content +728,3379345,"slurm/utils/mihir/dataset_sizes.md",173,0,"- **Total videos:** ~15,000,000\n",markdown,content +729,3379346,"slurm/utils/mihir/dataset_sizes.md",205,0,"\n",markdown,content +730,3379367,"slurm/utils/mihir/dataset_sizes.md",206,0,"---\n",markdown,content +731,3379368,"slurm/utils/mihir/dataset_sizes.md",210,0,"\n",markdown,content +732,3379465,"slurm/utils/mihir/dataset_sizes.md",211,0,"## Batch Size vs. Steps per Epoch\n",markdown,content +733,3379466,"slurm/utils/mihir/dataset_sizes.md",245,0,"\n",markdown,content +734,3379784,"slurm/utils/mihir/dataset_sizes.md",246,0,"| Batch Size | Videos per Step | Steps per Epoch | Epoch Size (Videos) |\n",markdown,content +735,3379916,"slurm/utils/mihir/dataset_sizes.md",319,0,"|------------|----------------|-----------------|---------------------|\n",markdown,content +736,3380070,"slurm/utils/mihir/dataset_sizes.md",391,0,"| 48 | 48 | 234,375 | 11,250,000 |\n",markdown,content +737,3380220,"slurm/utils/mihir/dataset_sizes.md",463,0,"| 96 | 96 | 117,187 | 11,250,000 |\n",markdown,content +738,3380355,"slurm/utils/mihir/dataset_sizes.md",535,0,"| 192 | 192 | 58,593 | 11,250,000 |\n",markdown,content +739,3380479,"slurm/utils/mihir/dataset_sizes.md",607,0,"| 384 | 384 | 29,297 | 11,250,000 |\n",markdown,content +740,3380653,"slurm/utils/mihir/dataset_sizes.md",679,0,"| 768 | 768 | 14,648 | 11,250,000 |\n",markdown,content +741,3380719,"slurm/utils/mihir/dataset_sizes.md",751,0,"\n",markdown,content +742,3381273,"slurm/utils/mihir/dataset_sizes.md",752,0,"- **Note:** ""Epoch Size"" here is set to 11.25M videos, which is the typical training split size (75% of 15M).\n",markdown,content +743,3381591,"slurm/utils/mihir/dataset_sizes.md",862,0,"- For full-dataset epoch, multiply steps by 1.33 (to reach 15M).\n",markdown,content +744,3381592,"slurm/utils/mihir/dataset_sizes.md",927,0,"\n",markdown,content +745,3381605,"slurm/utils/mihir/dataset_sizes.md",928,0,"---\n",markdown,content +746,3381606,"slurm/utils/mihir/dataset_sizes.md",932,0,"\n",markdown,content +747,3381734,"slurm/utils/mihir/dataset_sizes.md",933,0,"## Example: Total Steps for 1 Epoch\n",markdown,content +748,3381760,"slurm/utils/mihir/dataset_sizes.md",969,0,"\n",markdown,content +749,3381946,"slurm/utils/mihir/dataset_sizes.md",970,0,"- **Steps per Epoch = Epoch Size / Batch Size**\n",markdown,content +750,3382233,"slurm/utils/mihir/dataset_sizes.md",1018,0,"- For 11,250,000 videos:\n",markdown,content +751,3382411,"slurm/utils/mihir/dataset_sizes.md",1043,0," - Batch 48: 11,250,000 / 48 = 234,375 steps\n",markdown,content +752,3382479,"slurm/utils/mihir/dataset_sizes.md",1091,0," - Batch 96: 11,250,000 / 96 = 117,187 steps\n",markdown,content +753,3382823,"slurm/utils/mihir/dataset_sizes.md",1139,0," - Batch 192: 11,250,000 / 192 = 58,593 steps\n",markdown,content +754,3382899,"slurm/utils/mihir/dataset_sizes.md",1188,0," - Batch 384: 11,250,000 / 384 = 29,297 steps\n",markdown,content +755,3383101,"slurm/utils/mihir/dataset_sizes.md",1237,0," - Batch 768: 11,250,000 / 768 = 14,648 steps\n",markdown,content +756,3383102,"slurm/utils/mihir/dataset_sizes.md",1286,0,"\n",markdown,content +757,3383132,"slurm/utils/mihir/dataset_sizes.md",1287,0,"---\n",markdown,content +758,3383133,"slurm/utils/mihir/dataset_sizes.md",1291,0,"\n",markdown,content +759,3383150,"slurm/utils/mihir/dataset_sizes.md",1292,0,"## Reference\n",markdown,content +760,3383151,"slurm/utils/mihir/dataset_sizes.md",1305,0,"\n",markdown,content +761,3383487,"slurm/utils/mihir/dataset_sizes.md",1306,0,"- For more details on model scaling, see [model_sizes.md](model_sizes.md).\n",markdown,content +762,3383488,"slurm/utils/mihir/dataset_sizes.md",1380,225,"",markdown,content +763,3396195,"slurm/utils/mihir/dataset_sizes.md",933,447,"| Batch Size | Steps per Epoch |\n|------------|----------------|\n| 48 | 234,375 |\n| 96 | 117,187 |\n| 192 | 58,593 |\n| 384 | 29,297 |\n| 768 | 14,648 |",markdown,content +764,3396195,"slurm/utils/mihir/dataset_sizes.md",1,931,"",markdown,content +765,3407798,"slurm/utils/mihir/dataset_sizes.md",0,0,"",markdown,selection_command +766,3411813,"slurm/utils/mihir/dataset_sizes.md",0,0,"\n",markdown,content +767,3412493,"slurm/utils/mihir/dataset_sizes.md",1,0,"# Genie 1 - Dataset Size, Batch Size, and Steps per Epoch",markdown,content +768,3412627,"slurm/utils/mihir/dataset_sizes.md",60,0,"## Dataset Details\n",markdown,content +769,3412648,"slurm/utils/mihir/dataset_sizes.md",79,0,"\n",markdown,content +770,3412690,"slurm/utils/mihir/dataset_sizes.md",80,0,"- **Total video hours:** 5,000 hours\n",markdown,content +771,3413091,"slurm/utils/mihir/dataset_sizes.md",117,0,"- **Frame rate:** 10 frames per second (fps)\n",markdown,content +772,3413122,"slurm/utils/mihir/dataset_sizes.md",162,0,"- **Total frames:** \n",markdown,content +773,3413452,"slurm/utils/mihir/dataset_sizes.md",184,0," 5,000 hours × 60 min/hour × 60 sec/min × 10 fps = **180,000,000 frames**\n",markdown,content +774,3413617,"slurm/utils/mihir/dataset_sizes.md",259,0,"- **Sample length:** 16 frames per sample \n",markdown,content +775,3413789,"slurm/utils/mihir/dataset_sizes.md",303,0," (Each training sample is a 16-frame video clip)\n",markdown,content +776,3413819,"slurm/utils/mihir/dataset_sizes.md",353,0,"\n",markdown,content +777,3413857,"slurm/utils/mihir/dataset_sizes.md",354,0,"- **Total number of samples:** \n",markdown,content +778,3414430,"slurm/utils/mihir/dataset_sizes.md",387,0," 180,000,000 frames ÷ 16 frames/sample = **11,250,000 samples**\n",markdown,content +779,3414431,"slurm/utils/mihir/dataset_sizes.md",452,0,"\n",markdown,content +780,3414477,"slurm/utils/mihir/dataset_sizes.md",453,0,"---\n",markdown,content +781,3414479,"slurm/utils/mihir/dataset_sizes.md",457,0,"\n",markdown,content +782,3414480,"slurm/utils/mihir/dataset_sizes.md",458,0,"## Steps per Epoch for Different Batch Sizes\n",markdown,content +783,3414480,"slurm/utils/mihir/dataset_sizes.md",503,0,"\n",markdown,content +784,3414508,"slurm/utils/mihir/dataset_sizes.md",504,0,"| Batch Size | Steps per Epoch | Formula Used |\n",markdown,content +785,3414532,"slurm/utils/mihir/dataset_sizes.md",575,0,"|------------|----------------|-------------------------------------|\n",markdown,content +786,3414793,"slurm/utils/mihir/dataset_sizes.md",645,0,"| 48 | 234,375 | 11,250,000 ÷ 48 |\n",markdown,content +787,3414905,"slurm/utils/mihir/dataset_sizes.md",715,0,"| 96 | 117,188 | 11,250,000 ÷ 96 |\n",markdown,content +788,3415104,"slurm/utils/mihir/dataset_sizes.md",785,0,"| 192 | 58,594 | 11,250,000 ÷ 192 |\n",markdown,content +789,3415341,"slurm/utils/mihir/dataset_sizes.md",855,0,"| 384 | 29,297 | 11,250,000 ÷ 384 |\n",markdown,content +790,3415568,"slurm/utils/mihir/dataset_sizes.md",925,0,"| 768 | 14,649 | 11,250,000 ÷ 768 |\n",markdown,content +791,3415580,"slurm/utils/mihir/dataset_sizes.md",995,0,"\n",markdown,content +792,3415759,"slurm/utils/mihir/dataset_sizes.md",996,0,"*All values are rounded up to the nearest integer.*\n",markdown,content +793,3415760,"slurm/utils/mihir/dataset_sizes.md",1048,0,"\n",markdown,content +794,3415848,"slurm/utils/mihir/dataset_sizes.md",1049,0,"---\n",markdown,content +795,3415849,"slurm/utils/mihir/dataset_sizes.md",1053,0,"\n",markdown,content +796,3415919,"slurm/utils/mihir/dataset_sizes.md",1054,0,"### Calculation Reference\n",markdown,content +797,3416034,"slurm/utils/mihir/dataset_sizes.md",1080,0,"\n",markdown,content +798,3416259,"slurm/utils/mihir/dataset_sizes.md",1081,0,"- **Steps per Epoch** = Total Samples ÷ Batch Size\n",markdown,content +799,3416276,"slurm/utils/mihir/dataset_sizes.md",1132,0,"\n",markdown,content +800,3416422,"slurm/utils/mihir/dataset_sizes.md",1133,0,"For example, with batch size 48: \n",markdown,content +801,3416665,"slurm/utils/mihir/dataset_sizes.md",1168,0,"11,250,000 ÷ 48 = 234,375 steps per epoch\n",markdown,content +802,3416666,"slurm/utils/mihir/dataset_sizes.md",1210,0,"\n",markdown,content +803,3416750,"slurm/utils/mihir/dataset_sizes.md",1211,0,"---\n",markdown,content +804,3416751,"slurm/utils/mihir/dataset_sizes.md",1215,0,"\n",markdown,content +805,3416751,"slurm/utils/mihir/dataset_sizes.md",1216,0,"## Quick Reference Table\n",markdown,content +806,3416752,"slurm/utils/mihir/dataset_sizes.md",1241,0,"\n",markdown,content +807,3416803,"slurm/utils/mihir/dataset_sizes.md",1339,0,"| 96 | 117,188 |\n",markdown,content +808,3416876,"slurm/utils/mihir/dataset_sizes.md",1371,0,"| 192 | 58,594 |\n",markdown,content +809,3416941,"slurm/utils/mihir/dataset_sizes.md",1403,64,"",markdown,content +810,3417086,"slurm/utils/mihir/dataset_sizes.md",1435,0,"| 768 | 14,649 |\n",markdown,content +811,3417087,"slurm/utils/mihir/dataset_sizes.md",1466,32,"",markdown,content +812,3433077,"slurm/utils/mihir/dataset_sizes.md",1466,0,"",markdown,selection_mouse +813,3433242,"slurm/utils/mihir/dataset_sizes.md",1428,38," |\n| 768 | 14,649 |",markdown,selection_mouse +814,3433260,"slurm/utils/mihir/dataset_sizes.md",1357,109,",188 |\n| 192 | 58,594 |\n| 384 | 29,297 |\n| 768 | 14,649 |",markdown,selection_mouse +815,3433324,"slurm/utils/mihir/dataset_sizes.md",1320,146,"| 234,375 |\n| 96 | 117,188 |\n| 192 | 58,594 |\n| 384 | 29,297 |\n| 768 | 14,649 |",markdown,selection_mouse +816,3433325,"slurm/utils/mihir/dataset_sizes.md",1282,184,"------|----------------|\n| 48 | 234,375 |\n| 96 | 117,188 |\n| 192 | 58,594 |\n| 384 | 29,297 |\n| 768 | 14,649 |",markdown,selection_mouse +817,3433325,"slurm/utils/mihir/dataset_sizes.md",1247,219,"ch Size | Steps per Epoch |\n|------------|----------------|\n| 48 | 234,375 |\n| 96 | 117,188 |\n| 192 | 58,594 |\n| 384 | 29,297 |\n| 768 | 14,649 |",markdown,selection_mouse +818,3433327,"slurm/utils/mihir/dataset_sizes.md",1246,220,"tch Size | Steps per Epoch |\n|------------|----------------|\n| 48 | 234,375 |\n| 96 | 117,188 |\n| 192 | 58,594 |\n| 384 | 29,297 |\n| 768 | 14,649 |",markdown,selection_mouse +819,3433341,"slurm/utils/mihir/dataset_sizes.md",1244,222,"Batch Size | Steps per Epoch |\n|------------|----------------|\n| 48 | 234,375 |\n| 96 | 117,188 |\n| 192 | 58,594 |\n| 384 | 29,297 |\n| 768 | 14,649 |",markdown,selection_mouse +820,3433411,"slurm/utils/mihir/dataset_sizes.md",1241,225,"\n| Batch Size | Steps per Epoch |\n|------------|----------------|\n| 48 | 234,375 |\n| 96 | 117,188 |\n| 192 | 58,594 |\n| 384 | 29,297 |\n| 768 | 14,649 |",markdown,selection_mouse +821,3433528,"slurm/utils/mihir/dataset_sizes.md",1216,250,"## Quick Reference Table\n\n| Batch Size | Steps per Epoch |\n|------------|----------------|\n| 48 | 234,375 |\n| 96 | 117,188 |\n| 192 | 58,594 |\n| 384 | 29,297 |\n| 768 | 14,649 |",markdown,selection_mouse +822,3435674,"slurm/utils/mihir/dataset_sizes.md",1231,0,"",markdown,selection_mouse +823,3435973,"slurm/utils/mihir/dataset_sizes.md",1210,0,"",markdown,selection_mouse +824,3443431,"slurm/utils/mihir/dataset_sizes.md",150,0,"",markdown,selection_mouse +825,3443970,"slurm/utils/mihir/dataset_sizes.md",137,0,"",markdown,selection_mouse +826,3462526,"TERMINAL",0,0,"python",,terminal_command +827,3462569,"TERMINAL",0,0,"]633;E;2025-07-23 18:15:05 python;3d19b9bf-55ca-4a1a-89a2-d3473be0b612]633;C",,terminal_output +828,3462887,"TERMINAL",0,0,"Python 3.10.18 (main, Jun 4 2025, 17:36:27) [Clang 20.1.4 ] on linux\r\nType ""help"", ""copyright"", ""credits"" or ""license"" for more information.\r\n",,terminal_output +829,3463690,"TERMINAL",0,0,">>> ",,terminal_output +830,3468799,"TERMINAL",0,0,"h",,terminal_output +831,3469021,"TERMINAL",0,0,"[?25lo[?25h",,terminal_output +832,3469397,"TERMINAL",0,0,"[?25lo\r[?25h",,terminal_output +833,3469509,"TERMINAL",0,0,"[?25lh\r>>> [?25h",,terminal_output +834,3469598,"TERMINAL",0,0,"",,terminal_output +835,3469708,"TERMINAL",0,0,"[?25lt[?25h",,terminal_output +836,3469837,"TERMINAL",0,0,"[?25lo[?25h",,terminal_output +837,3470085,"TERMINAL",0,0,"[?25lt[?25h",,terminal_output +838,3470303,"TERMINAL",0,0,"[?25la[?25h[?25ll[?25h",,terminal_output +839,3470697,"TERMINAL",0,0,"[?25l_[?25h",,terminal_output +840,3471288,"TERMINAL",0,0,"[?25lh[?25h",,terminal_output +841,3471455,"TERMINAL",0,0,"[?25lo[?25h",,terminal_output +842,3471531,"TERMINAL",0,0,"[?25lu[?25h",,terminal_output +843,3471609,"TERMINAL",0,0,"[?25lr[?25h",,terminal_output +844,3471814,"TERMINAL",0,0,"[?25ls[?25h",,terminal_output +845,3472393,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +846,3472628,"TERMINAL",0,0,"[?25l=[?25h",,terminal_output +847,3473437,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +848,3473700,"TERMINAL",0,0,"[?25l5[?25h[?25l0[?25h",,terminal_output +849,3473894,"TERMINAL",0,0,"[?25l0[?25h",,terminal_output +850,3474022,"TERMINAL",0,0,"[?25l0[?25h",,terminal_output +851,3474857,"TERMINAL",0,0,"\r\n>>> ",,terminal_output +852,3477951,"TERMINAL",0,0,"f",,terminal_output +853,3478175,"TERMINAL",0,0,"[?25lp[?25h",,terminal_output +854,3478244,"TERMINAL",0,0,"[?25ls[?25h",,terminal_output +855,3478540,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +856,3478622,"TERMINAL",0,0,"[?25l=[?25h",,terminal_output +857,3478781,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +858,3479145,"TERMINAL",0,0,"[?25l1[?25h",,terminal_output +859,3479209,"TERMINAL",0,0,"[?25l0[?25h",,terminal_output +860,3479536,"TERMINAL",0,0,"\r\n>>> ",,terminal_output +861,3483098,"TERMINAL",0,0,"[?25lto[?25h",,terminal_output +862,3483148,"TERMINAL",0,0,"[?25lo[?25h",,terminal_output +863,3483341,"TERMINAL",0,0,"[?25lt[?25h",,terminal_output +864,3483556,"TERMINAL",0,0,"[?25la[?25h",,terminal_output +865,3483611,"TERMINAL",0,0,"[?25ll[?25h",,terminal_output +866,3483918,"TERMINAL",0,0,"[?25l_[?25h",,terminal_output +867,3484057,"TERMINAL",0,0,"[?25lf[?25h",,terminal_output +868,3484186,"TERMINAL",0,0,"[?25lr[?25h",,terminal_output +869,3484391,"TERMINAL",0,0,"[?25la[?25h",,terminal_output +870,3484495,"TERMINAL",0,0,"[?25lm[?25h",,terminal_output +871,3484620,"TERMINAL",0,0,"[?25le[?25h",,terminal_output +872,3484736,"TERMINAL",0,0,"[?25ls[?25h",,terminal_output +873,3484960,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +874,3485142,"TERMINAL",0,0,"[?25l=[?25h",,terminal_output +875,3485212,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +876,3487875,"TERMINAL",0,0,"[?25lt[?25h[?25lo[?25h",,terminal_output +877,3488018,"TERMINAL",0,0,"[?25lt[?25h",,terminal_output +878,3488148,"TERMINAL",0,0,"[?25la[?25h",,terminal_output +879,3488262,"TERMINAL",0,0,"[?25ll[?25h",,terminal_output +880,3488592,"TERMINAL",0,0,"[?25l_[?25h",,terminal_output +881,3489050,"TERMINAL",0,0,"[?25lh[?25h",,terminal_output +882,3489247,"TERMINAL",0,0,"[?25lo[?25h",,terminal_output +883,3489369,"TERMINAL",0,0,"[?25lu[?25h",,terminal_output +884,3489434,"TERMINAL",0,0,"[?25lr[?25h",,terminal_output +885,3489597,"TERMINAL",0,0,"[?25ls[?25h",,terminal_output +886,3489838,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +887,3490290,"TERMINAL",0,0,"[?25l*[?25h",,terminal_output +888,3490359,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +889,3490905,"TERMINAL",0,0,"[?25l6[?25h",,terminal_output +890,3491264,"TERMINAL",0,0,"[?25l0[?25h",,terminal_output +891,3492477,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +892,3492688,"TERMINAL",0,0,"[?25l*[?25h",,terminal_output +893,3493711,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +894,3493899,"TERMINAL",0,0,"[?25l6[?25h",,terminal_output +895,3494421,"TERMINAL",0,0,"[?25l0[?25h",,terminal_output +896,3495284,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +897,3495431,"TERMINAL",0,0,"[?25l*[?25h",,terminal_output +898,3495487,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +899,3496503,"TERMINAL",0,0,"[?25lf[?25h",,terminal_output +900,3496618,"TERMINAL",0,0,"[?25lp[?25h",,terminal_output +901,3496750,"TERMINAL",0,0,"[?25ls[?25h",,terminal_output +902,3498681,"TERMINAL",0,0,"\r\n>>> ",,terminal_output +903,3499425,"TERMINAL",0,0,"t",,terminal_output +904,3500202,"TERMINAL",0,0,"[?25lo[?25h",,terminal_output +905,3500274,"TERMINAL",0,0,"[?25lt[?25h",,terminal_output +906,3500474,"TERMINAL",0,0,"[?25la[?25h",,terminal_output +907,3500569,"TERMINAL",0,0,"[?25ll[?25h",,terminal_output +908,3500893,"TERMINAL",0,0,"[?25l_[?25h",,terminal_output +909,3501113,"TERMINAL",0,0,"[?25lf[?25h",,terminal_output +910,3501182,"TERMINAL",0,0,"[?25lr[?25h",,terminal_output +911,3501391,"TERMINAL",0,0,"[?25la[?25h",,terminal_output +912,3501457,"TERMINAL",0,0,"[?25lm[?25h",,terminal_output +913,3501583,"TERMINAL",0,0,"[?25le[?25h",,terminal_output +914,3501743,"TERMINAL",0,0,"[?25ls[?25h",,terminal_output +915,3501811,"TERMINAL",0,0,"\r\n180000000\r\n>>> ",,terminal_output +916,3505662,"TERMINAL",0,0,"s",,terminal_output +917,3505962,"TERMINAL",0,0,"[?25la[?25h[?25lm[?25h",,terminal_output +918,3506210,"TERMINAL",0,0,"[?25lp[?25h[?25ll[?25h",,terminal_output +919,3506282,"TERMINAL",0,0,"[?25le[?25h",,terminal_output +920,3507187,"TERMINAL",0,0,"[?25l_[?25h",,terminal_output +921,3507446,"TERMINAL",0,0,"[?25ll[?25h",,terminal_output +922,3507531,"TERMINAL",0,0,"[?25le[?25h",,terminal_output +923,3507626,"TERMINAL",0,0,"[?25ln[?25h",,terminal_output +924,3507834,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +925,3508028,"TERMINAL",0,0,"[?25l=[?25h",,terminal_output +926,3508078,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +927,3508526,"TERMINAL",0,0,"[?25l1[?25h[?25l6[?25h",,terminal_output +928,3510048,"TERMINAL",0,0,"\r\n>>> ",,terminal_output +929,3510693,"TERMINAL",0,0,"n",,terminal_output +930,3510898,"TERMINAL",0,0,"[?25lu[?25h",,terminal_output +931,3511051,"TERMINAL",0,0,"[?25lm[?25h",,terminal_output +932,3511412,"TERMINAL",0,0,"[?25l_[?25h",,terminal_output +933,3511693,"TERMINAL",0,0,"[?25ls[?25h",,terminal_output +934,3511939,"TERMINAL",0,0,"[?25la[?25h[?25lm[?25h",,terminal_output +935,3512169,"TERMINAL",0,0,"[?25lp[?25h[?25ll[?25h",,terminal_output +936,3512249,"TERMINAL",0,0,"[?25le[?25h",,terminal_output +937,3512461,"TERMINAL",0,0,"[?25ls[?25h",,terminal_output +938,3512745,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +939,3512910,"TERMINAL",0,0,"[?25l=[?25h",,terminal_output +940,3513063,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +941,3514476,"TERMINAL",0,0,"[?25lt[?25h",,terminal_output +942,3514542,"TERMINAL",0,0,"[?25lo[?25h",,terminal_output +943,3514748,"TERMINAL",0,0,"[?25lt[?25h",,terminal_output +944,3514984,"TERMINAL",0,0,"[?25la[?25h",,terminal_output +945,3515050,"TERMINAL",0,0,"[?25ll[?25h",,terminal_output +946,3515592,"TERMINAL",0,0,"[?25l_[?25h",,terminal_output +947,3515902,"TERMINAL",0,0,"[?25lf[?25h",,terminal_output +948,3516061,"TERMINAL",0,0,"[?25lr[?25h",,terminal_output +949,3516216,"TERMINAL",0,0,"[?25la[?25h",,terminal_output +950,3516284,"TERMINAL",0,0,"[?25lm[?25h",,terminal_output +951,3516444,"TERMINAL",0,0,"[?25le[?25h",,terminal_output +952,3516610,"TERMINAL",0,0,"[?25ls[?25h",,terminal_output +953,3517107,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +954,3517679,"TERMINAL",0,0,"[?25l/[?25h",,terminal_output +955,3518783,"TERMINAL",0,0,"[?25l/[?25h",,terminal_output +956,3519063,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +957,3521195,"TERMINAL",0,0,"[?25l \r[?25h",,terminal_output +958,3521391,"TERMINAL",0,0,"[?25l/\r[?25h",,terminal_output +959,3522034,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +960,3522145,"TERMINAL",0,0,"[?25ls[?25h",,terminal_output +961,3522391,"TERMINAL",0,0,"[?25la[?25h",,terminal_output +962,3522457,"TERMINAL",0,0,"[?25lm[?25h",,terminal_output +963,3522663,"TERMINAL",0,0,"[?25lp[?25h[?25ll[?25h",,terminal_output +964,3522797,"TERMINAL",0,0,"[?25le[?25h",,terminal_output +965,3523546,"TERMINAL",0,0,"[?25le\r[?25h",,terminal_output +966,3523688,"TERMINAL",0,0,"[?25ll\r[?25h",,terminal_output +967,3523855,"TERMINAL",0,0,"[?25lp\r[?25h",,terminal_output +968,3523922,"TERMINAL",0,0,"[?25lm\r[?25h",,terminal_output +969,3524099,"TERMINAL",0,0,"[?25la\r[?25h",,terminal_output +970,3524230,"TERMINAL",0,0,"[?25ls\r[?25h",,terminal_output +971,3524426,"TERMINAL",0,0,"[?25l \r[?25h",,terminal_output +972,3525120,"TERMINAL",0,0,"[?25l/[?25h",,terminal_output +973,3525424,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +974,3525601,"TERMINAL",0,0,"[?25ls[?25h",,terminal_output +975,3525781,"TERMINAL",0,0,"[?25la[?25h[?25lm[?25h",,terminal_output +976,3526027,"TERMINAL",0,0,"[?25lp[?25h[?25ll[?25h",,terminal_output +977,3526092,"TERMINAL",0,0,"[?25le[?25h",,terminal_output +978,3526452,"TERMINAL",0,0,"[?25l_[?25h",,terminal_output +979,3526739,"TERMINAL",0,0,"[?25ll[?25h",,terminal_output +980,3526889,"TERMINAL",0,0,"[?25le[?25h",,terminal_output +981,3526957,"TERMINAL",0,0,"[?25ln[?25h",,terminal_output +982,3527898,"TERMINAL",0,0,"\r\n>>> ",,terminal_output +983,3528522,"TERMINAL",0,0,"n",,terminal_output +984,3528635,"TERMINAL",0,0,"[?25lu[?25h",,terminal_output +985,3528830,"TERMINAL",0,0,"[?25lm[?25h",,terminal_output +986,3529117,"TERMINAL",0,0,"[?25l_[?25h",,terminal_output +987,3529349,"TERMINAL",0,0,"[?25ls[?25h",,terminal_output +988,3529538,"TERMINAL",0,0,"[?25la[?25h",,terminal_output +989,3529606,"TERMINAL",0,0,"[?25lm[?25h",,terminal_output +990,3529806,"TERMINAL",0,0,"[?25lp[?25h[?25ll[?25h",,terminal_output +991,3529870,"TERMINAL",0,0,"[?25le[?25h",,terminal_output +992,3530095,"TERMINAL",0,0,"[?25ls[?25h",,terminal_output +993,3530319,"TERMINAL",0,0,"\r\n11250000\r\n>>> ",,terminal_output +994,3551389,"TERMINAL",0,0,"n",,terminal_output +995,3551501,"TERMINAL",0,0,"[?25lu[?25h",,terminal_output +996,3551715,"TERMINAL",0,0,"[?25lm[?25h",,terminal_output +997,3552203,"TERMINAL",0,0,"[?25l_[?25h",,terminal_output +998,3552478,"TERMINAL",0,0,"[?25ls[?25h",,terminal_output +999,3552588,"TERMINAL",0,0,"[?25lt[?25h",,terminal_output +1000,3552749,"TERMINAL",0,0,"[?25le[?25h",,terminal_output +1001,3552884,"TERMINAL",0,0,"[?25lp[?25h",,terminal_output +1002,3553210,"TERMINAL",0,0,"[?25ls[?25h",,terminal_output +1003,3556360,"TERMINAL",0,0,"[?25ls\r[?25h",,terminal_output +1004,3556539,"TERMINAL",0,0,"[?25lp\r[?25h",,terminal_output +1005,3556719,"TERMINAL",0,0,"[?25le\r[?25h",,terminal_output +1006,3556784,"TERMINAL",0,0,"[?25lt\r[?25h",,terminal_output +1007,3556979,"TERMINAL",0,0,"[?25ls\r[?25h",,terminal_output +1008,3557115,"TERMINAL",0,0,"[?25l_\r[?25h",,terminal_output +1009,3557180,"TERMINAL",0,0,"[?25lm\r[?25h",,terminal_output +1010,3557350,"TERMINAL",0,0,"[?25lu\r[?25h",,terminal_output +1011,3557467,"TERMINAL",0,0,"[?25ln\r>>> [?25h",,terminal_output +1012,3557901,"TERMINAL",0,0,"[?25lfo[?25h",,terminal_output +1013,3558076,"TERMINAL",0,0,"[?25lr[?25h[?25l [?25h",,terminal_output +1014,3558693,"TERMINAL",0,0,"[?25l \r[?25h",,terminal_output +1015,3558824,"TERMINAL",0,0,"[?25lr\r[?25h",,terminal_output +1016,3558938,"TERMINAL",0,0,"[?25lo\r[?25h",,terminal_output +1017,3559009,"TERMINAL",0,0,"[?25lf\r>>> [?25h",,terminal_output +1018,3559527,"TERMINAL",0,0,"[?25lb[?25h[?25la[?25h",,terminal_output +1019,3559657,"TERMINAL",0,0,"[?25lt[?25h",,terminal_output +1020,3559815,"TERMINAL",0,0,"[?25lc[?25h[?25lh[?25h",,terminal_output +1021,3560214,"TERMINAL",0,0,"[?25l_[?25h",,terminal_output +1022,3560392,"TERMINAL",0,0,"[?25ls[?25h",,terminal_output +1023,3560464,"TERMINAL",0,0,"[?25li[?25h",,terminal_output +1024,3560690,"TERMINAL",0,0,"[?25lz[?25h",,terminal_output +1025,3560753,"TERMINAL",0,0,"[?25le[?25h",,terminal_output +1026,3560949,"TERMINAL",0,0,"[?25ls[?25h",,terminal_output +1027,3561155,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +1028,3561335,"TERMINAL",0,0,"[?25l=[?25h",,terminal_output +1029,3561404,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +1030,3561847,"TERMINAL",0,0,"[?25l[[?25h",,terminal_output +1031,3564795,"TERMINAL",0,0,"[?25l48[?25h",,terminal_output +1032,3565195,"TERMINAL",0,0,"[?25l,[?25h[?25l [?25h",,terminal_output +1033,3566578,"TERMINAL",0,0,"[?25l9[?25h",,terminal_output +1034,3566893,"TERMINAL",0,0,"[?25l6[?25h",,terminal_output +1035,3567330,"TERMINAL",0,0,"[?25l,[?25h[?25l [?25h",,terminal_output +1036,3568283,"TERMINAL",0,0,"[?25l1[?25h",,terminal_output +1037,3568988,"TERMINAL",0,0,"[?25l9[?25h",,terminal_output +1038,3569234,"TERMINAL",0,0,"[?25l2[?25h",,terminal_output +1039,3569473,"TERMINAL",0,0,"[?25l,[?25h",,terminal_output +1040,3569523,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +1041,3571755,"TERMINAL",0,0,"[?25l3[?25h",,terminal_output +1042,3572091,"TERMINAL",0,0,"[?25l8[?25h",,terminal_output +1043,3572560,"TERMINAL",0,0,"[?25l4[?25h",,terminal_output +1044,3574164,"TERMINAL",0,0,"[?25l,[?25h",,terminal_output +1045,3574229,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +1046,3574711,"TERMINAL",0,0,"[?25l7[?25h",,terminal_output +1047,3577781,"TERMINAL",0,0,"[?25l6[?25h",,terminal_output +1048,3577888,"TERMINAL",0,0,"[?25l8[?25h",,terminal_output +1049,3578533,"TERMINAL",0,0,"[?25l][?25h",,terminal_output +1050,3580922,"TERMINAL",0,0,"\r\n>>> ",,terminal_output +1051,3585262,"TERMINAL",0,0,"s",,terminal_output +1052,3585373,"TERMINAL",0,0,"[?25lp[?25h",,terminal_output +1053,3585438,"TERMINAL",0,0,"[?25le[?25h",,terminal_output +1054,3585821,"TERMINAL",0,0,"[?25le\r[?25h",,terminal_output +1055,3585928,"TERMINAL",0,0,"[?25lp\r[?25h",,terminal_output +1056,3585993,"TERMINAL",0,0,"[?25lt[?25h",,terminal_output +1057,3586252,"TERMINAL",0,0,"[?25le[?25h",,terminal_output +1058,3586317,"TERMINAL",0,0,"[?25lp[?25h",,terminal_output +1059,3586482,"TERMINAL",0,0,"[?25ls[?25h",,terminal_output +1060,3587120,"TERMINAL",0,0,"[?25l_[?25h",,terminal_output +1061,3587409,"TERMINAL",0,0,"[?25lp[?25h",,terminal_output +1062,3587473,"TERMINAL",0,0,"[?25le[?25h",,terminal_output +1063,3587614,"TERMINAL",0,0,"[?25lr[?25h",,terminal_output +1064,3587871,"TERMINAL",0,0,"[?25l_[?25h",,terminal_output +1065,3588079,"TERMINAL",0,0,"[?25le[?25h",,terminal_output +1066,3588145,"TERMINAL",0,0,"[?25lp[?25h",,terminal_output +1067,3588616,"TERMINAL",0,0,"[?25lo[?25h",,terminal_output +1068,3588740,"TERMINAL",0,0,"[?25lc[?25h",,terminal_output +1069,3588803,"TERMINAL",0,0,"[?25lh[?25h",,terminal_output +1070,3588968,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +1071,3589218,"TERMINAL",0,0,"[?25l=[?25h",,terminal_output +1072,3589283,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +1073,3589700,"TERMINAL",0,0,"[?25l[[?25h",,terminal_output +1074,3596237,"TERMINAL",0,0,"[?25ln[?25h",,terminal_output +1075,3596366,"TERMINAL",0,0,"[?25lu[?25h",,terminal_output +1076,3596620,"TERMINAL",0,0,"[?25lm[?25h",,terminal_output +1077,3597239,"TERMINAL",0,0,"[?25l_[?25h",,terminal_output +1078,3597517,"TERMINAL",0,0,"[?25ls[?25h",,terminal_output +1079,3597719,"TERMINAL",0,0,"[?25la[?25h",,terminal_output +1080,3598257,"TERMINAL",0,0,"[?25lm[?25h",,terminal_output +1081,3598528,"TERMINAL",0,0,"[?25lp[?25h[?25ll[?25h",,terminal_output +1082,3598655,"TERMINAL",0,0,"[?25le[?25h",,terminal_output +1083,3598849,"TERMINAL",0,0,"[?25ls[?25h",,terminal_output +1084,3599300,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +1085,3599863,"TERMINAL",0,0,"[?25l/[?25h",,terminal_output +1086,3599932,"TERMINAL",0,0,"[?25l/[?25h",,terminal_output diff --git a/927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-75084265-2b4d-4d6f-86ae-c0ab064f62491758992086879-2025_09_27-18.55.18.494/source.csv b/927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-75084265-2b4d-4d6f-86ae-c0ab064f62491758992086879-2025_09_27-18.55.18.494/source.csv new file mode 100644 index 0000000000000000000000000000000000000000..844702363b7f43a114ca7cd01f44a652568499a8 --- /dev/null +++ b/927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-75084265-2b4d-4d6f-86ae-c0ab064f62491758992086879-2025_09_27-18.55.18.494/source.csv @@ -0,0 +1,822 @@ +Sequence,Time,File,RangeOffset,RangeLength,Text,Language,Type +2,426,"extension-output-pdoom-org.crowd-code-#1-crowd-code",0,0,"6:55:18 PM [info] Activating crowd-code\n6:55:18 PM [info] Recording started\n6:55:18 PM [info] Initializing git provider using file system watchers...\n6:55:18 PM [info] Git repository found\n6:55:18 PM [info] Git provider initialized successfully\n6:55:18 PM [info] Initial git state: [object Object]\n",Log,tab +3,8895,"TERMINAL",0,0,"queue",,terminal_command +4,8973,"TERMINAL",0,0,"]633;C",,terminal_output +5,9016,"TERMINAL",0,0,"[?1049h(B[?7hEvery 1.0s: squeue --mehkn1991.localdomain: Sat Sep 27 18:55:27 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)",,terminal_output +6,10033,"TERMINAL",0,0,"8[?1049l\r[?1l>]0;tum_cte0515@hkn1991:~/Projects/jasmine",,terminal_output +7,12053,"TERMINAL",0,0,"idling",,terminal_command +8,12130,"TERMINAL",0,0,"]633;C[?1049h(B[?7hEvery 1.0s: sinfo_t_idlehkn1991.localdomain: Sat Sep 27 18:55:30 2025Partition dev_cpuonly: 12 nodes idle\rPartition cpuonly: 283 nodes idle\rPartition dev_accelerated:\t 3 nodes idle\rPartition accelerated: 67 nodes idle\rPartition dev_accelerated-h100 :\t 0 nodes idle\rPartition accelerated-h100:\t 0 nodes idle\rPartition large:\t 2 nodes idle\rPartition accelerated-h200:\t 2 nodes idle",,terminal_output +9,13148,"TERMINAL",0,0,"1",,terminal_output +10,13303,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn1991:~/Projects/jasmine",,terminal_output +11,17909,"TERMINAL",0,0,"salloc --time=01:00:00 --partition=accelerated --nodes=1 --gres=gpu:1 --cpus-per-task=8",,terminal_command +12,17993,"TERMINAL",0,0,"]633;Csalloc: Granted job allocation 3526712\r\n",,terminal_output +13,18120,"TERMINAL",0,0,"salloc: Waiting for resource configuration\r\n",,terminal_output +14,20901,"TERMINAL",0,0,"bash",,terminal_focus +15,21552,"TERMINAL",0,0,"salloc",,terminal_focus +16,22905,"TERMINAL",0,0,"s",,terminal_output +17,23161,"TERMINAL",0,0,"o",,terminal_output +18,23399,"TERMINAL",0,0,"u",,terminal_output +19,23515,"TERMINAL",0,0,"r",,terminal_output +20,23749,"TERMINAL",0,0,"c",,terminal_output +21,23871,"TERMINAL",0,0,"e",,terminal_output +22,23968,"TERMINAL",0,0," ",,terminal_output +23,24130,"TERMINAL",0,0,".",,terminal_output +24,24252,"TERMINAL",0,0,"v",,terminal_output +25,24416,"TERMINAL",0,0,"\t",,terminal_output +26,24919,"TERMINAL",0,0,"b",,terminal_output +27,25210,"TERMINAL",0,0,"i",,terminal_output +28,25363,"TERMINAL",0,0,"n",,terminal_output +29,25694,"TERMINAL",0,0,"\t",,terminal_output +30,26144,"TERMINAL",0,0,"ac",,terminal_output +31,26633,"TERMINAL",0,0,"\t",,terminal_output +32,27519,"TERMINAL",0,0,"\r\n",,terminal_output +33,31307,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=4\n#SBATCH --time=01:00:00\n#SBATCH --partition=accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:4\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/dynamics/sampling/maskgit/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/dynamics/sampling/maskgit/%x_%j.log\n#SBATCH --job-name=coinrun_sample_maskgit\n\n# Unload modules that may interfere\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\n\n# Activate virtual environment\n# source .venv/bin/activate\n\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_breakout/breakout_episodes_perfect/val\n\nCHECKPOINT_PATH=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn-gt-actions/train_dyn_default_gt_actions_breakout_longer/3519698\n\n# /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn-gt-actions/train_dyn_default_gt_actions_breakout_longer_smaller_lr/3519530\n\n\necho ""Sampling from checkpoint: $CHECKPOINT_PATH""\n\nsrun python jasmine/sample.py \\n --checkpoint ""$CHECKPOINT_PATH"" \\n --data_dir=$array_records_dir \\n --seq_len=16 \\n --batch_size=12 \\n --start_frame=4 \\n --image_height=10 \\n --image_width=10 \\n --dyna_type=maskgit \\n --lam_patch_size=4 \\n --no-print-action-indices \\n --use_gt_actions \\n --output_dir ""gifs/50k-w-noise/gt-actions""",shellscript,tab +34,32072,"TERMINAL",0,0,"bash",,terminal_focus +35,40642,"jasmine/sample.py",0,0,"from dataclasses import dataclass\nimport time\nimport os\nimport optax\n\nimport dm_pix as pix\nimport einops\nimport jax\nimport jax.numpy as jnp\nimport flax.linen as nn\nimport numpy as np\nimport orbax.checkpoint as ocp\nfrom PIL import Image, ImageDraw\nimport tyro\nfrom flax import nnx\n\nfrom genie import Genie\nfrom utils.dataloader import get_dataloader\n\n\n@dataclass\nclass Args:\n # Experiment\n seed: int = 0\n seq_len: int = 16\n image_channels: int = 3\n image_height: int = 90\n image_width: int = 160\n data_dir: str = ""data/coinrun_episodes""\n checkpoint: str = """"\n print_action_indices: bool = True\n output_dir: str = ""gifs/""\n # Sampling\n batch_size: int = 1\n maskgit_steps: int = 25\n temperature: float = 1.0\n sample_argmax: bool = True\n start_frame: int = 1\n noise_level: float = 0.0\n noise_buckets: int = 10\n # Tokenizer checkpoint\n tokenizer_dim: int = 512\n tokenizer_ffn_dim: int = 2048\n latent_patch_dim: int = 32\n num_patch_latents: int = 1024\n patch_size: int = 4\n tokenizer_num_blocks: int = 4\n tokenizer_num_heads: int = 8\n # LAM checkpoint\n lam_dim: int = 512\n lam_ffn_dim: int = 2048\n latent_action_dim: int = 32\n num_actions: int = 6\n lam_patch_size: int = 16\n lam_num_blocks: int = 4\n lam_num_heads: int = 8\n use_gt_actions: bool = False\n # Dynamics checkpoint\n dyna_type: str = ""maskgit""\n dyna_dim: int = 512\n dyna_ffn_dim: int = 2048\n dyna_num_blocks: int = 6\n dyna_num_heads: int = 8\n param_dtype = jnp.float32\n dtype = jnp.bfloat16\n use_flash_attention: bool = True\n\n\nargs = tyro.cli(Args)\n\nif __name__ == ""__main__"":\n """"""\n Dimension keys:\n B: batch size\n T: number of input (conditioning) frames\n N: number of patches per frame\n S: sequence length\n H: height\n W: width\n E: B * (S - 1)\n """"""\n jax.distributed.initialize()\n\n rng = jax.random.key(args.seed)\n\n # --- Load Genie checkpoint ---\n rngs = nnx.Rngs(rng)\n genie = Genie(\n # Tokenizer\n in_dim=args.image_channels,\n tokenizer_dim=args.tokenizer_dim,\n tokenizer_ffn_dim=args.tokenizer_ffn_dim,\n latent_patch_dim=args.latent_patch_dim,\n num_patch_latents=args.num_patch_latents,\n patch_size=args.patch_size,\n tokenizer_num_blocks=args.tokenizer_num_blocks,\n tokenizer_num_heads=args.tokenizer_num_heads,\n # LAM\n lam_dim=args.lam_dim,\n lam_ffn_dim=args.lam_ffn_dim,\n latent_action_dim=args.latent_action_dim,\n num_actions=args.num_actions,\n lam_patch_size=args.lam_patch_size,\n lam_num_blocks=args.lam_num_blocks,\n lam_num_heads=args.lam_num_heads,\n lam_co_train=False,\n max_noise_level=0.0,\n noise_buckets=args.noise_buckets,\n use_gt_actions=args.use_gt_actions,\n # Dynamics\n dyna_type=args.dyna_type,\n dyna_dim=args.dyna_dim,\n dyna_ffn_dim=args.dyna_ffn_dim,\n dyna_num_blocks=args.dyna_num_blocks,\n dyna_num_heads=args.dyna_num_heads,\n param_dtype=args.param_dtype,\n dtype=args.dtype,\n use_flash_attention=args.use_flash_attention,\n # FIXME (f.srambical): implement spatiotemporal KV caching and set decode=True\n decode=False,\n rngs=rngs,\n )\n\n # Need to delete lam decoder for checkpoint loading\n if not args.use_gt_actions:\n assert genie.lam is not None\n del genie.lam.decoder\n\n handler_registry = ocp.handlers.DefaultCheckpointHandlerRegistry()\n handler_registry.add(\n ""model_state"", ocp.args.PyTreeSave, ocp.handlers.PyTreeCheckpointHandler\n )\n handler_registry.add(\n ""model_state"", ocp.args.PyTreeRestore, ocp.handlers.PyTreeCheckpointHandler\n )\n checkpoint_options = ocp.CheckpointManagerOptions(\n step_format_fixed_length=6,\n )\n checkpoint_manager = ocp.CheckpointManager(\n args.checkpoint,\n options=checkpoint_options,\n handler_registry=handler_registry,\n )\n\n dummy_tx = optax.adamw(\n learning_rate=optax.linear_schedule(0.0001, 0.0001, 10000),\n b1=0.9,\n b2=0.9,\n weight_decay=1e-4,\n mu_dtype=args.dtype,\n )\n dummy_optimizer = nnx.ModelAndOptimizer(genie, dummy_tx)\n\n abstract_optimizer = nnx.eval_shape(lambda: dummy_optimizer)\n abstract_optimizer_state = nnx.state(abstract_optimizer)\n restored = checkpoint_manager.restore(\n checkpoint_manager.latest_step(),\n args=ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore(abstract_optimizer_state), # type: ignore\n ),\n )\n restored_optimizer_state = restored[""model_state""]\n nnx.update(dummy_optimizer, restored_optimizer_state)\n\n # --- Define sampling function ---\n def _sampling_fn(model: Genie, batch: dict) -> jax.Array:\n """"""Runs Genie.sample with pre-defined generation hyper-parameters.""""""\n assert args.dyna_type in [\n ""maskgit"",\n ""causal"",\n ], f""Invalid dynamics type: {args.dyna_type}""\n frames, _ = model.sample(\n batch,\n args.seq_len,\n args.temperature,\n args.sample_argmax,\n args.maskgit_steps,\n )\n return frames\n\n # --- Define autoregressive sampling loop ---\n def _autoreg_sample(genie, rng, batch):\n batch[""videos""] = batch[""videos""][:, : args.start_frame]\n batch[""rng""] = rng\n generated_vid_BSHWC = _sampling_fn(genie, batch)\n return generated_vid_BSHWC\n\n # --- Get video + latent actions ---\n array_record_files = [\n os.path.join(args.data_dir, x)\n for x in os.listdir(args.data_dir)\n if x.endswith("".array_record"")\n ]\n dataloader = get_dataloader(\n array_record_files,\n args.seq_len,\n args.batch_size,\n args.image_height,\n args.image_width,\n args.image_channels,\n # We don't use workers in order to avoid grain shutdown issues (https://github.com/google/grain/issues/398)\n num_workers=0,\n prefetch_buffer_size=1,\n seed=args.seed,\n )\n dataloader = iter(dataloader)\n batch = next(dataloader)\n gt_video = jnp.asarray(batch[""videos""], dtype=jnp.float32) / 255.0\n batch[""videos""] = gt_video.astype(args.dtype)\n # Get latent actions for all videos in the batch\n action_batch_E = None\n if not args.use_gt_actions:\n action_batch_E = genie.vq_encode(batch, training=False)\n batch[""latent_actions""] = action_batch_E\n\n # --- Sample + evaluate video ---\n recon_video_BSHWC = _autoreg_sample(genie, rng, batch)\n recon_video_BSHWC = recon_video_BSHWC.astype(jnp.float32)\n\n gt = gt_video.clip(0, 1)[:, args.start_frame :]\n recon = recon_video_BSHWC.clip(0, 1)[:, args.start_frame :]\n\n ssim_vmap = jax.vmap(pix.ssim, in_axes=(0, 0))\n psnr_vmap = jax.vmap(pix.psnr, in_axes=(0, 0))\n ssim = ssim_vmap(gt, recon)\n psnr = psnr_vmap(gt, recon)\n per_frame_ssim = ssim.mean(0)\n per_frame_psnr = psnr.mean(0)\n avg_ssim = ssim.mean()\n avg_psnr = psnr.mean()\n\n print(""Per-frame SSIM:\n"", per_frame_ssim)\n print(""Per-frame PSNR:\n"", per_frame_psnr)\n\n print(f""SSIM: {avg_ssim}"")\n print(f""PSNR: {avg_psnr}"")\n\n # --- Construct video ---\n true_videos = (gt_video * 255).astype(np.uint8)\n pred_videos = (recon_video_BSHWC * 255).astype(np.uint8)\n video_comparison = np.zeros((2, *recon_video_BSHWC.shape), dtype=np.uint8)\n video_comparison[0] = true_videos[:, : args.seq_len]\n video_comparison[1] = pred_videos\n frames = einops.rearrange(video_comparison, ""n b t h w c -> t (b h) (n w) c"")\n\n # --- Save video ---\n imgs = [Image.fromarray(img) for img in frames]\n # Write actions on each frame, on each row (i.e., for each video in the batch, on the GT row)\n B = batch[""videos""].shape[0]\n if action_batch_E is not None:\n action_batch_BSm11 = jnp.reshape(action_batch_E, (B, args.seq_len - 1, 1))\n else:\n action_batch_BSm11 = jnp.reshape(\n batch[""actions""][:, :-1], (B, args.seq_len - 1, 1)\n )\n for t, img in enumerate(imgs[1:]):\n d = ImageDraw.Draw(img)\n for row in range(B):\n if args.print_action_indices:\n action = action_batch_BSm11[row, t, 0]\n y_offset = row * batch[""videos""].shape[2] + 2\n d.text((2, y_offset), f""{action}"", fill=255)\n\n os.makedirs(args.output_dir, exist_ok=True)\n imgs[0].save(\n os.path.join(args.output_dir, f""generation_{time.time()}.gif""),\n save_all=True,\n append_images=imgs[1:],\n duration=250,\n loop=0,\n )\n",python,tab +36,45144,"TERMINAL",0,0,"salloc: Nodes hkn0706 are ready for job\r\n",,terminal_output +37,45278,"TERMINAL",0,0,"source .v\tbin\tac\t\r\n",,terminal_output +38,45933,"TERMINAL",0,0,"]0;tum_cte0515@hkn0706:~/Projects/jasmine[?2004h[tum_cte0515@hkn0706 jasmine]$ source .venv/bin/activate\r\n[?2004l\r]0;tum_cte0515@hkn0706:~/Projects/jasmine[?2004h(jasmine) [tum_cte0515@hkn0706 jasmine]$ ",,terminal_output +39,52989,"jasmine/sample.py",4512,0,"",python,selection_mouse +40,53145,"jasmine/sample.py",4507,11,"latest_step",python,selection_mouse +41,54291,"jasmine/sample.py",4503,0,"",python,selection_mouse +42,54444,"jasmine/sample.py",4488,18,"checkpoint_manager",python,selection_mouse +43,100088,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",0,0,"",shellscript,tab +44,102240,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",771,0,"",shellscript,selection_mouse +45,103474,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",770,0,"",shellscript,selection_command +46,104227,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",770,144,"",shellscript,content +47,104239,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",769,0,"",shellscript,selection_command +48,104618,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",770,0,"",shellscript,selection_command +49,105229,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",770,0,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/train_dyn_default_breakout_longer/3520777",shellscript,content +50,109943,"TERMINAL",0,0,"srun",,terminal_focus +51,113989,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",0,0,"",shellscript,tab +52,113990,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1463,0,"",shellscript,selection_mouse +53,115013,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1465,0,"",shellscript,selection_mouse +54,115171,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1465,7,"actions",shellscript,selection_mouse +55,115421,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1464,8,"-actions",shellscript,selection_mouse +56,115545,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1462,10,"gt-actions",shellscript,selection_mouse +57,116614,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1462,10,"",shellscript,content +58,117233,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1461,0,"",shellscript,selection_command +59,117921,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1453,0,"",shellscript,selection_command +60,118090,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1452,0,"",shellscript,selection_command +61,118507,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1451,1,"",shellscript,content +62,120355,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1461,0,"",shellscript,selection_command +63,121027,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1461,0,"l",shellscript,content +64,121029,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1462,0,"",shellscript,selection_keyboard +65,121102,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1462,0,"a",shellscript,content +66,121103,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1463,0,"",shellscript,selection_keyboard +67,121212,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1463,0,"m",shellscript,content +68,121213,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1464,0,"",shellscript,selection_keyboard +69,121708,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1464,0,"-",shellscript,content +70,121709,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1465,0,"",shellscript,selection_keyboard +71,121808,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1465,0,"a",shellscript,content +72,121811,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1466,0,"",shellscript,selection_keyboard +73,122001,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1466,0,"c",shellscript,content +74,122003,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1467,0,"",shellscript,selection_keyboard +75,122183,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1467,0,"t",shellscript,content +76,122185,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1468,0,"",shellscript,selection_keyboard +77,122291,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1468,0,"i",shellscript,content +78,122293,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1469,0,"",shellscript,selection_keyboard +79,122338,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1469,0,"o",shellscript,content +80,122339,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1470,0,"",shellscript,selection_keyboard +81,122530,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1470,0,"n",shellscript,content +82,122532,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1471,0,"",shellscript,selection_keyboard +83,122550,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1471,0,"s",shellscript,content +84,122551,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1472,0,"",shellscript,selection_keyboard +85,135186,"TERMINAL",0,0,"s",,terminal_output +86,135239,"TERMINAL",0,0,"h",,terminal_output +87,135416,"TERMINAL",0,0," ",,terminal_output +88,135726,"TERMINAL",0,0,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",,terminal_output +89,136399,"TERMINAL",0,0,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch\r\n[?2004l\r",,terminal_output +90,136565,"TERMINAL",0,0,"Sampling from checkpoint: /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/train_dyn_default_breakout_longer/3520777\r\n",,terminal_output +91,136686,"TERMINAL",0,0,"GpuFreq=control_disabled\r\n",,terminal_output +92,154446,"TERMINAL",0,0,"Traceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/sample.py"", line 156, in \r\n restored = checkpoint_manager.restore(\r\n ^^^^^^^^^^^^^^^^^^^^^^^^^^^\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib64/python3.12/site-packages/orbax/checkpoint/checkpoint_manager.py"", line 1673, in restore\r\n restored = self._checkpointer.restore(restore_directory, args=args)\r\n ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib64/python3.12/site-packages/orbax/checkpoint/_src/checkpointers/async_checkpointer.py"", line 571, in restore\r\n return super().restore(directory, *args, **kwargs)\r\n ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib64/python3.12/site-packages/orbax/checkpoint/_src/checkpointers/checkpointer.py"", line 306, in restore\r\n restored = self._restore(directory, args=ckpt_args)\r\n ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib64/python3.12/site-packages/orbax/checkpoint/_src/checkpointers/checkpointer.py"", line 328, in _restore\r\n return self._handler.restore(directory, args=args)\r\n ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib64/python3.12/site-packages/orbax/checkpoint/_src/handlers/composite_checkpoint_handler.py"", line 857, in restore\r\n restored[item_name] = handler.restore(\r\n ^^^^^^^^^^^^^^^^\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib64/python3.12/site-packages/orbax/checkpoint/_src/handlers/pytree_checkpoint_handler.py"", line 835, in restore\r\n return self._handler_impl.restore(directory, args=args)\r\n ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib64/python3.12/site-packages/orbax/checkpoint/_src/handlers/base_pytree_checkpoint_handler.py"", line 949, in restore\r\n raise ValueError(\r\nValueError: User-provided restore item and on-disk value metadata tree structures do not match:\r\nmodel.action_embed:\r\n - Source: {'embedding': {'value': ShapeDtypeStruct(shape=(6, 32), dtype=float32)}}\r\n - Target: MISSING\r\n\r\nmodel.lam:\r\n - Source: MISSING\r\n - Target: {'action_in': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(1, 1, 1, 48))}, 'action_up': {'bias': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512,))}, 'kernel': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(32, 512))}}, 'encoder': {'blocks': {'0': {'ffn_dense1': {'bias': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(2048,))}, 'kernel': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512, 2048))}}, 'ffn_dense2': {'bias': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512,))}, 'kernel': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(2048, 512))}}, 'ffn_norm': {'bias': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512,))}, 'scale': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512,))}}, 'spatial_attention': {'key': {'bias': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(8, 64))}, 'kernel': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512, 8, 64))}}, 'out': {'bias': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512,))}, 'kernel': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(8, 64, 512))}}, 'query': {'bias': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(8, 64))}, 'kernel': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512, 8, 64))}}, 'value': {'bias': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(8, 64))}, 'kernel': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512, 8, 64))}}}, 'spatial_norm': {'bias': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512,))}, 'scale': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512,))}}, 'temporal_attention': {'key': {'bias': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(8, 64))}, 'kernel': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512, 8, 64))}}, 'out': {'bias': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512,))}, 'kernel': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(8, 64, 512))}}, 'query': {'bias': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(8, 64))}, 'kernel': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512, 8, 64))}}, 'value': {'bias': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(8, 64))}, 'kernel': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512, 8, 64))}}}, 'temporal_norm': {'bias': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512,))}, 'scale': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512,))}}}, '1': {'ffn_dense1': {'bias': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(2048,))}, 'kernel': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512, 2048))}}, 'ffn_dense2': {'bias': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512,))}, 'kernel': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(2048, 512))}}, 'ffn_norm': {'bias': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512,))}, 'scale': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512,))}}, 'spatial_attention': {'key': {'bias': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(8, 64))}, 'kernel': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512, 8, 64))}}, 'out': {'bias': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512,))}, 'kernel': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(8, 64, 512))}}, 'query': {'bias': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(8, 64))}, 'kernel': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512, 8, 64))}}, 'value': {'bias': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(8, 64))}, 'kernel': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512, 8, 64))}}}, 'spatial_norm': {'bias': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512,))}, 'scale': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512,))}}, 'temporal_attention': {'key': {'bias': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(8, 64))}, 'kernel': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512, 8, 64))}}, 'out': {'bias': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512,))}, 'kernel': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(8, 64, 512))}}, 'query': {'bias': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(8, 64))}, 'kernel': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512, 8, 64))}}, 'value': {'bias': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(8, 64))}, 'kernel': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512, 8, 64))}}}, 'temporal_norm': {'bias': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512,))}, 'scale': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512,))}}}, '2': {'ffn_dense1': {'bias': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(2048,))}, 'kernel': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512, 2048))}}, 'ffn_dense2': {'bias': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512,))}, 'kernel': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(2048, 512))}}, 'ffn_norm': {'bias': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512,))}, 'scale': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512,))}}, 'spatial_attention': {'key': {'bias': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(8, 64))}, 'kernel': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512, 8, 64))}}, 'out': {'bias': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512,))}, 'kernel': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(8, 64, 512))}}, 'query': {'bias': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(8, 64))}, 'kernel': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512, 8, 64))}}, 'value': {'bias': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(8, 64))}, 'kernel': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512, 8, 64))}}}, 'spatial_norm': {'bias': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512,))}, 'scale': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512,))}}, 'temporal_attention': {'key': {'bias': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(8, 64))}, 'kernel': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512, 8, 64))}}, 'out': {'bias': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512,))}, 'kernel': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(8, 64, 512))}}, 'query': {'bias': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(8, 64))}, 'kernel': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512, 8, 64))}}, 'value': {'bias': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(8, 64))}, 'kernel': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512, 8, 64))}}}, 'temporal_norm': {'bias': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512,))}, 'scale': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512,))}}}, '3': {'ffn_dense1': {'bias': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(2048,))}, 'kernel': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512, 2048))}}, 'ffn_dense2': {'bias': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512,))}, 'kernel': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(2048, 512))}}, 'ffn_norm': {'bias': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512,))}, 'scale': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512,))}}, 'spatial_attention': {'key': {'bias': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(8, 64))}, 'kernel': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512, 8, 64))}}, 'out': {'bias': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512,))}, 'kernel': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(8, 64, 512))}}, 'query': {'bias': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(8, 64))}, 'kernel': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512, 8, 64))}}, 'value': {'bias': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(8, 64))}, 'kernel': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512, 8, 64))}}}, 'spatial_norm': {'bias': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512,))}, 'scale': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512,))}}, 'temporal_attention': {'key': {'bias': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(8, 64))}, 'kernel': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512, 8, 64))}}, 'out': {'bias': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512,))}, 'kernel': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(8, 64, 512))}}, 'query': {'bias': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(8, 64))}, 'kernel': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512, 8, 64))}}, 'value': {'bias': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(8, 64))}, 'kernel': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512, 8, 64))}}}, 'temporal_norm': {'bias': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512,))}, 'scale': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512,))}}}}, 'input_dense': {'bias': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512,))}, 'kernel': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(48, 512))}}, 'input_norm1': {'bias': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(48,))}, 'scale': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(48,))}}, 'input_norm2': {'bias': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512,))}, 'scale': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512,))}}, 'output_dense': {'bias': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(32,))}, 'kernel': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512, 32))}}}, 'patch_up': {'bias': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512,))}, 'kernel': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(48, 512))}}, 'vq': {'codebook': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(6, 32))}, 'drop': {'rngs': {'count': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=())}, 'key': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(2,))}}}}}\r\n\r\nopt_state.0.mu.action_embed:\r\n - Source: {'embedding': {'value': ShapeDtypeStruct(shape=(6, 32), dtype=bfloat16)}}\r\n - Target: MISSING\r\n\r\nopt_state.0.mu.lam:\r\n - Source: MISSING\r\n - Target: {'action_in': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(1, 1, 1, 48))}, 'action_up': {'bias': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512,))}, 'kernel': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(32, 512))}}, 'encoder': {'blocks': {'0': {'ffn_dense1': {'bias': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(2048,))}, 'kernel': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512, 2048))}}, 'ffn_dense2': {'bias': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512,))}, 'kernel': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(2048, 512))}}, 'ffn_norm': {'bias': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512,))}, 'scale': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512,))}}, 'spatial_attention': {'key': {'bias': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(8, 64))}, 'kernel': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512, 8, 64))}}, 'out': {'bias': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512,))}, 'kernel': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(8, 64, 512))}}, 'query': {'bias': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(8, 64))}, 'kernel': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512, 8, 64))}}, 'value': {'bias': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(8, 64))}, 'kernel': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512, 8, 64))}}}, 'spatial_norm': {'bias': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512,))}, 'scale': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512,))}}, 'temporal_attention': {'key': {'bias': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(8, 64))}, 'kernel': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512, 8, 64))}}, 'out': {'bias': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512,))}, 'kernel': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(8, 64, 512))}}, 'query': {'bias': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(8, 64))}, 'kernel': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512, 8, 64))}}, 'value': {'bias': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(8, 64))}, 'kernel': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512, 8, 64))}}}, 'temporal_norm': {'bias': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512,))}, 'scale': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512,))}}}, '1': {'ffn_dense1': {'bias': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(2048,))}, 'kernel': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512, 2048))}}, 'ffn_dense2': {'bias': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512,))}, 'kernel': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(2048, 512))}}, 'ffn_norm': {'bias': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512,))}, 'scale': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512,))}}, 'spatial_attention': {'key': {'bias': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(8, 64))}, 'kernel': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512, 8, 64))}}, 'out': {'bias': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512,))}, 'kernel': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(8, 64, 512))}}, 'query': {'bias': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(8, 64))}, 'kernel': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512, 8, 64))}}, 'value': {'bias': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(8, 64))}, 'kernel': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512, 8, 64))}}}, 'spatial_norm': {'bias': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512,))}, 'scale': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512,))}}, 'temporal_attention': {'key': {'bias': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(8, 64))}, 'kernel': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512, 8, 64))}}, 'out': {'bias': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512,))}, 'kernel': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(8, 64, 512))}}, 'query': {'bias': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(8, 64))}, 'kernel': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512, 8, 64))}}, 'value': {'bias': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(8, 64))}, 'kernel': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512, 8, 64))}}}, 'temporal_norm': {'bias': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512,))}, 'scale': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512,))}}}, '2': {'ffn_dense1': {'bias': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(2048,))}, 'kernel': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512, 2048))}}, 'ffn_dense2': {'bias': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512,))}, 'kernel': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(2048, 512))}}, 'ffn_norm': {'bias': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512,))}, 'scale': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512,))}}, 'spatial_attention': {'key': {'bias': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(8, 64))}, 'kernel': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512, 8, 64))}}, 'out': {'bias': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512,))}, 'kernel': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(8, 64, 512))}}, 'query': {'bias': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(8, 64))}, 'kernel': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512, 8, 64))}}, 'value': {'bias': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(8, 64))}, 'kernel': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512, 8, 64))}}}, 'spatial_norm': {'bias': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512,))}, 'scale': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512,))}}, 'temporal_attention': {'key': {'bias': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(8, 64))}, 'kernel': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512, 8, 64))}}, 'out': {'bias': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512,))}, 'kernel': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(8, 64, 512))}}, 'query': {'bias': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(8, 64))}, 'kernel': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512, 8, 64))}}, 'value': {'bias': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(8, 64))}, 'kernel': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512, 8, 64))}}}, 'temporal_norm': {'bias': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512,))}, 'scale': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512,))}}}, '3': {'ffn_dense1': {'bias': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(2048,))}, 'kernel': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512, 2048))}}, 'ffn_dense2': {'bias': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512,))}, 'kernel': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(2048, 512))}}, 'ffn_norm': {'bias': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512,))}, 'scale': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512,))}}, 'spatial_attention': {'key': {'bias': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(8, 64))}, 'kernel': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512, 8, 64))}}, 'out': {'bias': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512,))}, 'kernel': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(8, 64, 512))}}, 'query': {'bias': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(8, 64))}, 'kernel': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512, 8, 64))}}, 'value': {'bias': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(8, 64))}, 'kernel': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512, 8, 64))}}}, 'spatial_norm': {'bias': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512,))}, 'scale': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512,))}}, 'temporal_attention': {'key': {'bias': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(8, 64))}, 'kernel': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512, 8, 64))}}, 'out': {'bias': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512,))}, 'kernel': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(8, 64, 512))}}, 'query': {'bias': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(8, 64))}, 'kernel': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512, 8, 64))}}, 'value': {'bias': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(8, 64))}, 'kernel': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512, 8, 64))}}}, 'temporal_norm': {'bias': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512,))}, 'scale': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512,))}}}}, 'input_dense': {'bias': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512,))}, 'kernel': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(48, 512))}}, 'input_norm1': {'bias': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(48,))}, 'scale': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(48,))}}, 'input_norm2': {'bias': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512,))}, 'scale': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512,))}}, 'output_dense': {'bias': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(32,))}, 'kernel': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512, 32))}}}, 'patch_up': {'bias': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512,))}, 'kernel': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(48, 512))}}, 'vq': {'codebook': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(6, 32))}}}\r\n\r\nopt_state.0.nu.action_embed:\r\n - Source: {'embedding': {'value': ShapeDtypeStruct(shape=(6, 32), dtype=float32)}}\r\n - Target: MISSING\r\n\r\nopt_state.0.nu.lam:\r\n - Source: MISSING\r\n - Target: {'action_in': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(1, 1, 1, 48))}, 'action_up': {'bias': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512,))}, 'kernel': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(32, 512))}}, 'encoder': {'blocks': {'0': {'ffn_dense1': {'bias': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(2048,))}, 'kernel': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512, 2048))}}, 'ffn_dense2': {'bias': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512,))}, 'kernel': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(2048, 512))}}, 'ffn_norm': {'bias': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512,))}, 'scale': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512,))}}, 'spatial_attention': {'key': {'bias': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(8, 64))}, 'kernel': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512, 8, 64))}}, 'out': {'bias': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512,))}, 'kernel': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(8, 64, 512))}}, 'query': {'bias': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(8, 64))}, 'kernel': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512, 8, 64))}}, 'value': {'bias': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(8, 64))}, 'kernel': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512, 8, 64))}}}, 'spatial_norm': {'bias': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512,))}, 'scale': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512,))}}, 'temporal_attention': {'key': {'bias': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(8, 64))}, 'kernel': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512, 8, 64))}}, 'out': {'bias': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512,))}, 'kernel': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(8, 64, 512))}}, 'query': {'bias': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(8, 64))}, 'kernel': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512, 8, 64))}}, 'value': {'bias': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(8, 64))}, 'kernel': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512, 8, 64))}}}, 'temporal_norm': {'bias': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512,))}, 'scale': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512,))}}}, '1': {'ffn_dense1': {'bias': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(2048,))}, 'kernel': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512, 2048))}}, 'ffn_dense2': {'bias': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512,))}, 'kernel': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(2048, 512))}}, 'ffn_norm': {'bias': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512,))}, 'scale': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512,))}}, 'spatial_attention': {'key': {'bias': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(8, 64))}, 'kernel': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512, 8, 64))}}, 'out': {'bias': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512,))}, 'kernel': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(8, 64, 512))}}, 'query': {'bias': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(8, 64))}, 'kernel': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512, 8, 64))}}, 'value': {'bias': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(8, 64))}, 'kernel': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512, 8, 64))}}}, 'spatial_norm': {'bias': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512,))}, 'scale': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512,))}}, 'temporal_attention': {'key': {'bias': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(8, 64))}, 'kernel': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512, 8, 64))}}, 'out': {'bias': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512,))}, 'kernel': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(8, 64, 512))}}, 'query': {'bias': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(8, 64))}, 'kernel': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512, 8, 64))}}, 'value': {'bias': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(8, 64))}, 'kernel': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512, 8, 64))}}}, 'temporal_norm': {'bias': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512,))}, 'scale': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512,))}}}, '2': {'ffn_dense1': {'bias': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(2048,))}, 'kernel': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512, 2048))}}, 'ffn_dense2': {'bias': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512,))}, 'kernel': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(2048, 512))}}, 'ffn_norm': {'bias': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512,))}, 'scale': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512,))}}, 'spatial_attention': {'key': {'bias': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(8, 64))}, 'kernel': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512, 8, 64))}}, 'out': {'bias': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512,))}, 'kernel': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(8, 64, 512))}}, 'query': {'bias': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(8, 64))}, 'kernel': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512, 8, 64))}}, 'value': {'bias': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(8, 64))}, 'kernel': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512, 8, 64))}}}, 'spatial_norm': {'bias': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512,))}, 'scale': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512,))}}, 'temporal_attention': {'key': {'bias': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(8, 64))}, 'kernel': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512, 8, 64))}}, 'out': {'bias': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512,))}, 'kernel': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(8, 64, 512))}}, 'query': {'bias': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(8, 64))}, 'kernel': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512, 8, 64))}}, 'value': {'bias': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(8, 64))}, 'kernel': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512, 8, 64))}}}, 'temporal_norm': {'bias': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512,))}, 'scale': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512,))}}}, '3': {'ffn_dense1': {'bias': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(2048,))}, 'kernel': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512, 2048))}}, 'ffn_dense2': {'bias': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512,))}, 'kernel': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(2048, 512))}}, 'ffn_norm': {'bias': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512,))}, 'scale': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512,))}}, 'spatial_attention': {'key': {'bias': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(8, 64))}, 'kernel': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512, 8, 64))}}, 'out': {'bias': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512,))}, 'kernel': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(8, 64, 512))}}, 'query': {'bias': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(8, 64))}, 'kernel': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512, 8, 64))}}, 'value': {'bias': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(8, 64))}, 'kernel': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512, 8, 64))}}}, 'spatial_norm': {'bias': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512,))}, 'scale': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512,))}}, 'temporal_attention': {'key': {'bias': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(8, 64))}, 'kernel': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512, 8, 64))}}, 'out': {'bias': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512,))}, 'kernel': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(8, 64, 512))}}, 'query': {'bias': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(8, 64))}, 'kernel': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512, 8, 64))}}, 'value': {'bias': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(8, 64))}, 'kernel': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512, 8, 64))}}}, 'temporal_norm': {'bias': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512,))}, 'scale': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512,))}}}}, 'input_dense': {'bias': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512,))}, 'kernel': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(48, 512))}}, 'input_norm1': {'bias': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(48,))}, 'scale': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(48,))}}, 'input_norm2': {'bias': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512,))}, 'scale': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512,))}}, 'output_dense': {'bias': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(32,))}, 'kernel': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512, 32))}}}, 'patch_up': {'bias': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(512,))}, 'kernel': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(48, 512))}}, 'vq': {'codebook': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(6, 32))}}}\r\n",,terminal_output +93,159471,"TERMINAL",0,0,"W0927 18:57:57.860337 1757038 pjrt_client.cc:1469] WatchJobStateAsync failed for task goo.gle/debugproto job_name: ""jax_worker"": UNAVAILABLE: Cancelling all calls\r\nAdditional GRPC error information from remote target coordination_service while calling /tensorflow.CoordinationService/WatchJobState:\r\n:UNKNOWN:Error received from peer {grpc_message:""Cancelling all calls"", grpc_status:14}\r\n",,terminal_output +94,159961,"TERMINAL",0,0,"srun: error: hkn0706: task 0: Exited with exit code 1\r\n]0;tum_cte0515@hkn0706:~/Projects/jasmine[?2004h(jasmine) [tum_cte0515@hkn0706 jasmine]$ ",,terminal_output +95,167014,"TERMINAL",0,0,"g",,terminal_output +96,167194,"TERMINAL",0,0,"ti",,terminal_output +97,167335,"TERMINAL",0,0," ",,terminal_output +98,167484,"TERMINAL",0,0,"b",,terminal_output +99,167566,"TERMINAL",0,0,"r",,terminal_output +100,167852,"TERMINAL",0,0,"a",,terminal_output +101,167911,"TERMINAL",0,0,"n",,terminal_output +102,168202,"TERMINAL",0,0,"",,terminal_output +103,168330,"TERMINAL",0,0,"",,terminal_output +104,168438,"TERMINAL",0,0,"",,terminal_output +105,168551,"TERMINAL",0,0,"",,terminal_output +106,168726,"TERMINAL",0,0,"",,terminal_output +107,168822,"TERMINAL",0,0,"",,terminal_output +108,168953,"TERMINAL",0,0,"",,terminal_output +109,171434,"TERMINAL",0,0,"i",,terminal_output +110,171651,"TERMINAL",0,0,"t",,terminal_output +111,171821,"TERMINAL",0,0," ",,terminal_output +112,172199,"TERMINAL",0,0,"b",,terminal_output +113,172253,"TERMINAL",0,0,"r",,terminal_output +114,172511,"TERMINAL",0,0,"a",,terminal_output +115,172590,"TERMINAL",0,0,"n",,terminal_output +116,172700,"TERMINAL",0,0,"c",,terminal_output +117,172809,"TERMINAL",0,0,"h",,terminal_output +118,172998,"TERMINAL",0,0,"\r\n[?2004l\r[?1h=\r",,terminal_output +119,173157,"TERMINAL",0,0," action-mapper\r\n* add-noise-to-combat-exposure-bias\r\n add-wandb-name-and-tags\r\n before-nnx\r\n causal-mem-reduce\r\n causal-spatiotemporal-kv-cache\r\n causal-st-transformer\r\n causal-transformer-dynamics-model\r\n causal-transformer-nnx-no-kv-cache\r\n coinrun-gt-actions\r\n convert-to-jax-array-in-iter\r\n correct-batched-sampling\r\n dev\r\n dont-let-tf-see-gpu\r\n feat/darkness-filter\r\n feat/explicit-image-dims\r\n fix-action-padding-lam-future-information-access\r\n fix-sampling\r\n fix-transformer-forwardpass\r\n fix/dyn-restore-after-nnx-upgrade\r\n fix/spatiotemporal-pe-once-in-STTransformer\r\n generate-minatar-breakout-dataset\r\n grad-norm-log-and-clip\r\n grain-dataloader\r\n gt-actions\r\n hotfix/eval-full-frame-fix\r\n hotfix/fix-val-loss-maskgit-masking\r\n hotfix/full-frame-eval-only-calculate-last-frame-metrics\r\n hotfix/sampling-shapes-error\r\n input_pipeline/add-npy2array_record\r\n logging-variants\r\n lr-schedules\r\n main\r\n maskgit-different-maskprob-per-sample\r\n maskgit-sampling-iterative-unmasking-fix\r\n metrics-logging-for-dynamics-model\r\n:",,terminal_output +120,174671,"TERMINAL",0,0,"\r[?1l>]0;tum_cte0515@hkn0706:~/Projects/jasmine[?2004h(jasmine) [tum_cte0515@hkn0706 jasmine]$ ",,terminal_output +121,189920,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",0,0,"",shellscript,tab +122,189922,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1425,0,"",shellscript,selection_mouse +123,190378,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1424,0,"",shellscript,selection_command +124,191346,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1404,23,"",shellscript,content +125,191357,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1408,0,"",shellscript,selection_command +126,193174,"TERMINAL",0,0,"git branch",,terminal_output +127,193240,"TERMINAL",0,0,"\r\n[?2004l\r[?1h=\r",,terminal_output +128,193327,"TERMINAL",0,0," action-mapper\r\n* add-noise-to-combat-exposure-bias\r\n add-wandb-name-and-tags\r\n before-nnx\r\n causal-mem-reduce\r\n causal-spatiotemporal-kv-cache\r\n causal-st-transformer\r\n causal-transformer-dynamics-model\r\n causal-transformer-nnx-no-kv-cache\r\n coinrun-gt-actions\r\n convert-to-jax-array-in-iter\r\n correct-batched-sampling\r\n dev\r\n dont-let-tf-see-gpu\r\n feat/darkness-filter\r\n feat/explicit-image-dims\r\n fix-action-padding-lam-future-information-access\r\n fix-sampling\r\n fix-transformer-forwardpass\r\n fix/dyn-restore-after-nnx-upgrade\r\n fix/spatiotemporal-pe-once-in-STTransformer\r\n generate-minatar-breakout-dataset\r\n grad-norm-log-and-clip\r\n grain-dataloader\r\n gt-actions\r\n hotfix/eval-full-frame-fix\r\n hotfix/fix-val-loss-maskgit-masking\r\n hotfix/full-frame-eval-only-calculate-last-frame-metrics\r\n hotfix/sampling-shapes-error\r\n input_pipeline/add-npy2array_record\r\n logging-variants\r\n lr-schedules\r\n main\r\n maskgit-different-maskprob-per-sample\r\n maskgit-sampling-iterative-unmasking-fix\r\n metrics-logging-for-dynamics-model\r\n:",,terminal_output +129,195619,"TERMINAL",0,0,"\r[?1l>]0;tum_cte0515@hkn0706:~/Projects/jasmine[?2004h(jasmine) [tum_cte0515@hkn0706 jasmine]$ ",,terminal_output +130,195837,"TERMINAL",0,0,"git branch",,terminal_output +131,196234,"TERMINAL",0,0,"sh slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",,terminal_output +132,196983,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +133,197159,"TERMINAL",0,0,"Sampling from checkpoint: /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/train_dyn_default_breakout_longer/3520777\r\n",,terminal_output +134,197233,"TERMINAL",0,0,"GpuFreq=control_disabled\r\n",,terminal_output +135,206620,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib64/python3.12/site-packages/orbax/checkpoint/_src/serialization/type_handlers.py:1269: UserWarning: Sharding info not provided when restoring. Populating sharding info from sharding file. Please note restoration time will be slightly increased due to reading from file. Note also that this option is unsafe when restoring on a different topology than the checkpoint was saved with.\r\n warnings.warn(\r\n",,terminal_output +136,222514,"TERMINAL",0,0,"Traceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/sample.py"", line 217, in \r\n recon_video_BSHWC = _autoreg_sample(genie, rng, batch)\r\n ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/sample.py"", line 185, in _autoreg_sample\r\n generated_vid_BSHWC = _sampling_fn(genie, batch)\r\n ^^^^^^^^^^^^^^^^^^^^^^^^^^\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/sample.py"", line 172, in _sampling_fn\r\n frames, _ = model.sample(\r\n ^^^^^^^^^^^^^\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/genie.py"", line 215, in sample\r\n return self.sample_maskgit(\r\n ^^^^^^^^^^^^^^^^^^^^\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/genie.py"", line 258, in sample_maskgit\r\n assert (\r\nAssertionError: Noise level must me smaller than max_noise_level.\r\n",,terminal_output +137,227798,"TERMINAL",0,0,"W0927 18:59:06.188182 1765666 pjrt_client.cc:1469] WatchJobStateAsync failed for task goo.gle/debugstr job_name: ""jax_worker"": UNAVAILABLE: Cancelling all calls\r\nAdditional GRPC error information from remote target coordination_service while calling /tensorflow.CoordinationService/WatchJobState:\r\n:UNKNOWN:Error received from peer {grpc_status:14, grpc_message:""Cancelling all calls""}\r\n",,terminal_output +138,228352,"TERMINAL",0,0,"srun: error: hkn0706: task 0: Exited with exit code 1\r\n]0;tum_cte0515@hkn0706:~/Projects/jasmine[?2004h(jasmine) [tum_cte0515@hkn0706 jasmine]$ ",,terminal_output +139,237427,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/genie.py",0,0,"from typing import Dict\n\nimport einops\nimport jax\nimport jax.numpy as jnp\nimport flax.nnx as nnx\nimport orbax.checkpoint as ocp\n\nfrom models.dynamics import DynamicsMaskGIT, DynamicsCausal\nfrom models.lam import LatentActionModel\nfrom models.tokenizer import TokenizerVQVAE\n\n\nclass Genie(nnx.Module):\n """"""Genie model""""""\n\n def __init__(\n self,\n in_dim: int,\n tokenizer_dim: int,\n tokenizer_ffn_dim: int,\n latent_patch_dim: int,\n num_patch_latents: int,\n patch_size: int,\n tokenizer_num_blocks: int,\n tokenizer_num_heads: int,\n lam_dim: int,\n lam_ffn_dim: int,\n latent_action_dim: int,\n num_actions: int,\n lam_patch_size: int,\n lam_num_blocks: int,\n lam_num_heads: int,\n lam_co_train: bool,\n use_gt_actions: bool,\n dyna_type: str,\n dyna_dim: int,\n dyna_ffn_dim: int,\n dyna_num_blocks: int,\n dyna_num_heads: int,\n max_noise_level: float,\n noise_buckets: int,\n param_dtype: jnp.dtype,\n dtype: jnp.dtype,\n use_flash_attention: bool,\n decode: bool,\n rngs: nnx.Rngs,\n dropout: float = 0.0,\n mask_limit: float = 0.0,\n ):\n # --- Tokenizer ---\n self.in_dim = in_dim\n self.tokenizer_dim = tokenizer_dim\n self.tokenizer_ffn_dim = tokenizer_ffn_dim\n self.latent_patch_dim = latent_patch_dim\n self.num_patch_latents = num_patch_latents\n self.patch_size = patch_size\n self.tokenizer_num_blocks = tokenizer_num_blocks\n self.tokenizer_num_heads = tokenizer_num_heads\n # --- LAM ---\n self.lam_dim = lam_dim\n self.lam_ffn_dim = lam_ffn_dim\n self.latent_action_dim = latent_action_dim\n self.num_actions = num_actions\n self.lam_patch_size = lam_patch_size\n self.lam_num_blocks = lam_num_blocks\n self.lam_num_heads = lam_num_heads\n self.lam_co_train = lam_co_train\n self.use_gt_actions = use_gt_actions\n # --- Dynamics ---\n self.dyna_type = dyna_type\n self.dyna_dim = dyna_dim\n self.dyna_ffn_dim = dyna_ffn_dim\n self.dyna_num_blocks = dyna_num_blocks\n self.dyna_num_heads = dyna_num_heads\n self.max_noise_level = max_noise_level\n self.noise_buckets = noise_buckets\n self.param_dtype = param_dtype\n self.dtype = dtype\n self.use_flash_attention = use_flash_attention\n self.dropout = dropout\n self.mask_limit = mask_limit\n self.decode = decode\n\n self.tokenizer = TokenizerVQVAE(\n in_dim=self.in_dim,\n model_dim=self.tokenizer_dim,\n ffn_dim=self.tokenizer_ffn_dim,\n latent_dim=self.latent_patch_dim,\n num_latents=self.num_patch_latents,\n patch_size=self.patch_size,\n num_blocks=self.tokenizer_num_blocks,\n num_heads=self.tokenizer_num_heads,\n dropout=0.0,\n codebook_dropout=0.0,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n use_flash_attention=self.use_flash_attention,\n rngs=rngs,\n )\n if self.use_gt_actions:\n self.action_embed = nnx.Embed(\n self.num_actions, self.latent_action_dim, rngs=rngs\n )\n self.lam = None\n else:\n self.lam = LatentActionModel(\n in_dim=self.in_dim,\n model_dim=self.lam_dim,\n ffn_dim=self.lam_ffn_dim,\n latent_dim=self.latent_patch_dim,\n num_latents=self.num_actions,\n patch_size=self.lam_patch_size,\n num_blocks=self.lam_num_blocks,\n num_heads=self.lam_num_heads,\n dropout=0.0,\n codebook_dropout=0.0,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n use_flash_attention=self.use_flash_attention,\n rngs=rngs,\n )\n self.action_embed = None\n if self.dyna_type == ""maskgit"":\n self.dynamics = DynamicsMaskGIT(\n model_dim=self.dyna_dim,\n ffn_dim=self.dyna_ffn_dim,\n num_latents=self.num_patch_latents,\n latent_action_dim=self.latent_action_dim,\n num_blocks=self.dyna_num_blocks,\n num_heads=self.dyna_num_heads,\n dropout=self.dropout,\n mask_limit=self.mask_limit,\n max_noise_level=self.max_noise_level,\n noise_buckets=self.noise_buckets,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n use_flash_attention=self.use_flash_attention,\n rngs=rngs,\n )\n elif self.dyna_type == ""causal"":\n self.dynamics = DynamicsCausal(\n model_dim=self.dyna_dim,\n ffn_dim=self.dyna_ffn_dim,\n num_latents=self.num_patch_latents,\n latent_action_dim=self.latent_action_dim,\n num_blocks=self.dyna_num_blocks,\n num_heads=self.dyna_num_heads,\n dropout=self.dropout,\n max_noise_level=self.max_noise_level,\n noise_buckets=self.noise_buckets,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n use_flash_attention=self.use_flash_attention,\n decode=decode,\n rngs=rngs,\n )\n else:\n raise ValueError(f""Invalid dynamics type: {self.dyna_type}"")\n\n def __call__(\n self,\n batch: Dict[str, jax.Array],\n ) -> Dict[str, jax.Array]:\n videos_BTHWC = batch[""videos""]\n tokenizer_outputs = self.tokenizer.vq_encode(videos_BTHWC, training=False)\n token_indices_BTN = tokenizer_outputs[""indices""]\n latent_actions_BTm11L = None\n action_embeddings_BTm11L = None\n if self.use_gt_actions:\n assert self.action_embed is not None\n action_indices_E = None\n action_embeddings_BT1L = self.action_embed(batch[""actions""]).reshape(\n *batch[""actions""].shape[:2], 1, self.latent_action_dim\n )\n action_embeddings_BTm11L = action_embeddings_BT1L[:, :-1]\n else:\n assert self.lam is not None\n lam_outputs = self.lam.vq_encode(videos_BTHWC, training=False)\n z_q_BTm11L = lam_outputs[""z_q""]\n action_indices_E = lam_outputs[""indices""]\n latent_actions_BTm11L = jax.lax.cond(\n self.lam_co_train,\n lambda: z_q_BTm11L,\n lambda: jax.lax.stop_gradient(z_q_BTm11L),\n )\n outputs = dict(\n video_tokens=jax.lax.stop_gradient(token_indices_BTN),\n latent_actions=(\n action_embeddings_BTm11L\n if self.use_gt_actions\n else latent_actions_BTm11L\n ),\n )\n outputs[""mask_rng""] = batch[""rng""]\n dyna_logits_BTNV, dyna_mask = self.dynamics(outputs)\n outputs[""token_logits""] = dyna_logits_BTNV\n outputs[""mask""] = dyna_mask\n mle_indices_BTN = jnp.argmax(outputs[""token_logits""], axis=-1)\n H, W = batch[""videos""].shape[2:4]\n outputs[""recon""] = self.tokenizer.decode(mle_indices_BTN, (H, W))\n if action_indices_E is not None:\n outputs[""lam_indices""] = action_indices_E\n return outputs\n\n def sample(\n self,\n batch: Dict[str, jax.Array],\n seq_len: int,\n temperature: float = 1,\n sample_argmax: bool = False,\n maskgit_steps: int = 25,\n ) -> tuple[jax.Array, jax.Array]:\n if self.dyna_type == ""maskgit"":\n return self.sample_maskgit(\n batch, seq_len, 0.0, maskgit_steps, temperature, sample_argmax\n )\n elif self.dyna_type == ""causal"":\n return self.sample_causal(batch, seq_len, temperature, sample_argmax)\n else:\n raise ValueError(f""Dynamics model type unknown: {self.dyna_type}"")\n\n def sample_maskgit(\n self,\n batch: Dict[str, jax.Array],\n seq_len: int,\n noise_level: float,\n steps: int = 25,\n temperature: float = 1,\n sample_argmax: bool = False,\n ) -> tuple[jax.Array, jax.Array]:\n """"""\n Autoregressively samples up to `seq_len` future frames, following Figure 8 of the paper.\n\n - Input frames are tokenized once.\n - Future frames are generated autoregressively in token space.\n - All frames are detokenized in a single pass.\n\n Note:\n - For interactive or step-wise sampling, detokenization should occur after each action.\n - To maintain consistent tensor shapes across timesteps, all current and future frames are decoded at every step.\n - Temporal causal structure is preserved by\n a) reapplying the mask before each decoding step.\n b) a temporal causal mask is applied within each ST-transformer block.\n\n Dimension keys:\n B: batch size\n T: number of input (conditioning) frames\n N: number of patches per frame\n M: model dimension\n S: sequence length\n H: height\n W: width\n E: B * (S - 1)\n P: S * N\n """"""\n assert isinstance(self.dynamics, DynamicsMaskGIT)\n assert (\n noise_level < self.max_noise_level\n ), ""Noise level must me smaller than max_noise_level.""\n # --- Encode videos and actions ---\n videos_BTHWC = batch[""videos""]\n tokenizer_out = self.tokenizer.vq_encode(videos_BTHWC, training=False)\n token_idxs_BTN = tokenizer_out[""indices""]\n B, T, N = token_idxs_BTN.shape\n pad_shape = (B, seq_len - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs_BTN.dtype)\n token_idxs_BSN = jnp.concatenate([token_idxs_BTN, pad], axis=1)\n init_logits_BSNV = jnp.zeros(\n shape=(*token_idxs_BSN.shape, self.num_patch_latents)\n )\n noise_level = jnp.array(noise_level)\n if self.use_gt_actions:\n assert self.action_embed is not None\n latent_actions_BT1L = self.action_embed(batch[""actions""]).reshape(\n *batch[""actions""].shape[:2], 1, self.latent_action_dim\n )\n latent_actions_BTm11L = latent_actions_BT1L[:, :-1]\n action_tokens_EL = latent_actions_BTm11L.reshape(-1, self.latent_action_dim)\n else:\n assert self.lam is not None\n latent_actions_E = batch[""latent_actions""]\n action_tokens_EL = self.lam.vq.get_codes(latent_actions_E)\n\n # --- Extract submodule state ---\n dynamics_state = nnx.state(self.dynamics)\n\n @nnx.scan(in_axes=(nnx.Carry, 0), out_axes=nnx.Carry)\n def maskgit_step_fn(\n carry: tuple[jax.Array, jax.Array, jax.Array, jax.Array, jax.Array],\n step: jax.Array,\n ) -> tuple[jax.Array, jax.Array, jax.Array, jax.Array, jax.Array]:\n rng, token_idxs_BSN, logits_BSNV, mask_BSN, action_tokens_EL = carry\n S, N = token_idxs_BSN.shape[1:]\n L = action_tokens_EL.shape[-1]\n\n # We need to reconstruct the submodule inside scan body to prevent trace context mismatches\n dynamics_maskgit = DynamicsMaskGIT(\n model_dim=self.dyna_dim,\n ffn_dim=self.dyna_ffn_dim,\n num_latents=self.num_patch_latents,\n latent_action_dim=self.latent_action_dim,\n num_blocks=self.dyna_num_blocks,\n num_heads=self.dyna_num_heads,\n dropout=self.dropout,\n max_noise_level=self.max_noise_level,\n noise_buckets=self.noise_buckets,\n mask_limit=self.mask_limit,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n use_flash_attention=self.use_flash_attention,\n rngs=nnx.Rngs(0),\n )\n nnx.update(dynamics_maskgit, dynamics_state)\n\n # --- Construct + encode video ---\n vid_embed_BSNM = dynamics_maskgit.patch_embed(token_idxs_BSN)\n mask_token_111M = dynamics_maskgit.mask_token.value\n mask_expanded_BSN1 = mask_BSN[..., None]\n vid_embed_BSNM = jnp.where(\n mask_expanded_BSN1, mask_token_111M, vid_embed_BSNM\n )\n\n # --- Predict transition ---\n action_tokens_BSm1L = jnp.reshape(action_tokens_EL, (B, S - 1, L))\n act_embed_BSm1M = dynamics_maskgit.action_up(action_tokens_BSm1L)\n act_embed_BSM = jnp.pad(act_embed_BSm1M, ((0, 0), (1, 0), (0, 0)))\n act_embed_BS1M = jnp.reshape(\n act_embed_BSM, (B, S, 1, act_embed_BSM.shape[-1])\n )\n # TODO mihir\n\n rng, _rng_noise = jax.random.split(rng)\n noise_level_111 = noise_level.reshape(1, 1, 1)\n noise_level_B11 = jnp.tile(noise_level_111, (B, 1, 1))\n noise_bucket_idx_B11 = jnp.floor(\n (noise_level_B11 / self.max_noise_level) * self.noise_buckets\n ).astype(jnp.int32)\n noise_level_embed_B11M = dynamics_maskgit.noise_level_embed(\n noise_bucket_idx_B11\n )\n noise_level_embed_BS1M = jnp.tile(noise_level_embed_B11M, (1, S, 1, 1))\n vid_embed_BSNM += jnp.expand_dims(noise_level_B11, -1)\n\n vid_embed_BSNp2M = jnp.concatenate(\n [act_embed_BS1M, noise_level_embed_BS1M, vid_embed_BSNM], axis=2\n )\n unmasked_ratio = jnp.cos(jnp.pi * (step + 1) / (steps * 2))\n step_temp = temperature * (1.0 - unmasked_ratio)\n final_logits_BSNp2V = (\n dynamics_maskgit.transformer(vid_embed_BSNp2M) / step_temp\n )\n final_logits_BSNV = final_logits_BSNp2V[:, :, 2:]\n\n # --- Sample new tokens for final frame ---\n if sample_argmax:\n sampled_token_idxs_BSN = jnp.argmax(final_logits_BSNV, axis=-1)\n else:\n rng, _rng = jax.random.split(rng)\n sampled_token_idxs_BSN = jax.random.categorical(_rng, final_logits_BSNV)\n gather_fn = jax.vmap(jax.vmap(jax.vmap(lambda x, y: x[y])))\n final_token_probs_BSN = gather_fn(\n jax.nn.softmax(final_logits_BSNV), sampled_token_idxs_BSN\n )\n final_token_probs_BSN += ~mask_BSN\n # Update masked tokens and logits only\n token_idxs_BSN = jnp.where(mask_BSN, sampled_token_idxs_BSN, token_idxs_BSN)\n logits_BSNV = jnp.where(\n jnp.expand_dims(mask_BSN, -1), final_logits_BSNV, logits_BSNV\n )\n\n # --- Update mask ---\n num_unmasked_tokens = jnp.round(N * (1.0 - unmasked_ratio)).astype(int)\n final_token_probs_flat_BP = einops.rearrange(\n final_token_probs_BSN, ""b s n -> b (s n)""\n )\n idx_mask_P = (\n jnp.arange(final_token_probs_flat_BP.shape[-1])\n <= N - num_unmasked_tokens\n )\n sorted_idxs_BP = jnp.argsort(final_token_probs_flat_BP, axis=-1)\n mask_update_fn = jax.vmap(lambda msk, ids: msk.at[ids].set(idx_mask_P))\n mask_flat_BP = einops.rearrange(mask_BSN, ""b s n -> b (s n)"")\n new_mask_flat_BP = mask_update_fn(mask_flat_BP, sorted_idxs_BP)\n new_mask_BSN = einops.rearrange(new_mask_flat_BP, ""b (s n) -> b s n"", n=N)\n\n new_carry = (\n rng,\n token_idxs_BSN,\n logits_BSNV,\n new_mask_BSN,\n action_tokens_EL,\n )\n return new_carry\n\n @nnx.scan(in_axes=(nnx.Carry, 0), out_axes=nnx.Carry)\n def generation_step_fn(\n carry: tuple[jax.Array, jax.Array, jax.Array], step_t: jax.Array\n ) -> tuple[jax.Array, jax.Array, jax.Array]:\n rng, current_token_idxs_BSN, current_logits_BSNV = carry\n rng, step_rng = jax.random.split(rng)\n\n # Mask current frame (i.e., t == step_t)\n mask_S = jnp.arange(seq_len) == step_t\n mask_BSN = jnp.broadcast_to(mask_S[None, :, None], (B, seq_len, N)).astype(\n bool\n )\n masked_token_idxs_BSN = current_token_idxs_BSN * ~mask_BSN\n masked_logits_BSNV = current_logits_BSNV * jnp.expand_dims(~mask_BSN, -1)\n\n # --- Initialize and run MaskGIT loop ---\n init_carry_maskgit = (\n step_rng,\n masked_token_idxs_BSN,\n masked_logits_BSNV,\n mask_BSN,\n action_tokens_EL,\n )\n final_carry_maskgit = maskgit_step_fn(init_carry_maskgit, jnp.arange(steps))\n updated_token_idxs_BSN = final_carry_maskgit[1]\n updated_logits_BSNV = final_carry_maskgit[2]\n new_carry = (rng, updated_token_idxs_BSN, updated_logits_BSNV)\n return new_carry\n\n # --- Run the autoregressive generation using jax.lax.scan ---\n initial_carry = (batch[""rng""], token_idxs_BSN, init_logits_BSNV)\n timesteps_to_scan = jnp.arange(T, seq_len)\n final_carry = generation_step_fn(initial_carry, timesteps_to_scan)\n final_token_idxs_BSN = final_carry[1]\n final_logits_BSNV = final_carry[2]\n\n # --- Decode all tokens at once at the end ---\n H, W = batch[""videos""].shape[2:4]\n final_frames_BSHWC = self.tokenizer.decode(\n final_token_idxs_BSN,\n video_hw=(H, W),\n )\n return final_frames_BSHWC, final_logits_BSNV\n\n def sample_causal(\n self,\n batch: Dict[str, jax.Array],\n seq_len: int,\n temperature: float = 1,\n sample_argmax: bool = False,\n ) -> tuple[jax.Array, jax.Array]:\n """"""\n Autoregressively samples up to `seq_len` future frames, following Figure 8 of the paper.\n\n - Input frames are tokenized once.\n - Future frames are generated autoregressively in token space.\n - All frames are detokenized in a single pass.\n\n Note:\n - For interactive or step-wise sampling, detokenization should occur after each action.\n - To maintain consistent tensor shapes across timesteps, all current and future frames are decoded at every step.\n - Temporal causal structure is preserved by\n a) reapplying the mask before each decoding step.\n b) a temporal causal mask is applied within each ST-transformer block.\n\n Dimension keys:\n B: batch size\n T: number of input (conditioning) frames\n N: number of patches per frame\n M: model dimension\n S: sequence length\n H: height\n W: width\n E: B * (S - 1)\n """"""\n assert isinstance(self.dynamics, DynamicsCausal)\n # --- Encode videos and actions ---\n videos_BTHWC = batch[""videos""]\n tokenizer_out = self.tokenizer.vq_encode(videos_BTHWC, training=False)\n token_idxs_BTN = tokenizer_out[""indices""]\n B, T, N = token_idxs_BTN.shape\n pad_shape = (B, seq_len - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs_BTN.dtype)\n token_idxs_BSN = jnp.concatenate([token_idxs_BTN, pad], axis=1)\n logits_BSNV = jnp.zeros((*token_idxs_BSN.shape, self.num_patch_latents))\n dynamics_state = nnx.state(self.dynamics)\n\n if self.use_gt_actions:\n assert self.action_embed is not None\n latent_actions_BT1L = self.action_embed(batch[""actions""]).reshape(\n *batch[""actions""].shape[:2], 1, self.latent_action_dim\n )\n latent_actions_BTm11L = latent_actions_BT1L[:, :-1]\n action_tokens_EL = latent_actions_BTm11L.reshape(-1, self.latent_action_dim)\n else:\n assert self.lam is not None\n latent_actions_E = batch[""latent_actions""]\n action_tokens_EL = self.lam.vq.get_codes(latent_actions_E)\n\n @nnx.scan(in_axes=(nnx.Carry, 0), out_axes=nnx.Carry)\n def causal_step_fn(\n carry: tuple[jax.Array, jax.Array, jax.Array, jax.Array, jax.Array],\n step_n: jax.Array,\n ) -> tuple[jax.Array, jax.Array, jax.Array, jax.Array, jax.Array]:\n rng, token_idxs_BSN, logits_BSNV, action_tokens_EL, step_t = carry\n S, N = token_idxs_BSN.shape[1:]\n L = action_tokens_EL.shape[-1]\n\n # We need to reconstruct the submodule inside scan body to prevent trace context mismatches\n dynamics_causal = DynamicsCausal(\n model_dim=self.dyna_dim,\n ffn_dim=self.dyna_ffn_dim,\n num_latents=self.num_patch_latents,\n latent_action_dim=self.latent_action_dim,\n num_blocks=self.dyna_num_blocks,\n num_heads=self.dyna_num_heads,\n dropout=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n use_flash_attention=self.use_flash_attention,\n decode=self.decode,\n rngs=nnx.Rngs(0),\n )\n nnx.update(dynamics_causal, dynamics_state)\n\n # --- Construct + encode video ---\n vid_embed_BSNM = dynamics_causal.patch_embed(token_idxs_BSN)\n\n # --- Predict transition ---\n action_tokens_BSm1L = jnp.reshape(action_tokens_EL, (B, S - 1, L))\n act_embed_BSm1M = dynamics_causal.action_up(action_tokens_BSm1L)\n act_embed_BSM = jnp.pad(act_embed_BSm1M, ((0, 0), (1, 0), (0, 0)))\n act_embed_BS1M = jnp.reshape(\n act_embed_BSM, (B, S, 1, act_embed_BSM.shape[-1])\n )\n vid_embed_BSNp1M = jnp.concatenate([act_embed_BS1M, vid_embed_BSNM], axis=2)\n final_logits_BTNp1V = (\n dynamics_causal.transformer(vid_embed_BSNp1M, (step_t, step_n))\n / temperature\n )\n final_logits_BV = final_logits_BTNp1V[:, step_t, step_n, :]\n\n # --- Sample new tokens for final frame ---\n if sample_argmax:\n sampled_token_idxs_B = jnp.argmax(final_logits_BV, axis=-1)\n else:\n rng, _rng = jax.random.split(rng)\n sampled_token_idxs_B = jax.random.categorical(_rng, final_logits_BV)\n # Update next tokens only\n token_idxs_BSN = token_idxs_BSN.at[:, step_t, step_n].set(\n sampled_token_idxs_B\n )\n logits_BSNV = logits_BSNV.at[:, step_t, step_n].set(final_logits_BV)\n\n new_carry = (rng, token_idxs_BSN, logits_BSNV, action_tokens_EL, step_t)\n return new_carry\n\n @nnx.scan(in_axes=(nnx.Carry, 0), out_axes=nnx.Carry)\n def generation_step_fn(\n carry: tuple[jax.Array, jax.Array, jax.Array], step_t: jax.Array\n ) -> tuple[jax.Array, jax.Array, jax.Array]:\n rng, current_token_idxs_BSN, current_logits_BSNV = carry\n rng, step_rng = jax.random.split(rng)\n\n # --- Initialize and run causal loop ---\n init_carry_causal = (\n step_rng,\n current_token_idxs_BSN,\n current_logits_BSNV,\n action_tokens_EL,\n step_t,\n )\n final_carry_causal = causal_step_fn(init_carry_causal, jnp.arange(N))\n updated_token_idxs_BSN = final_carry_causal[1]\n updated_logits_BSNV = final_carry_causal[2]\n new_carry = (rng, updated_token_idxs_BSN, updated_logits_BSNV)\n return new_carry\n\n # --- Run the autoregressive generation using jax.lax.scan ---\n initial_carry = (batch[""rng""], token_idxs_BSN, logits_BSNV)\n timesteps_to_scan = jnp.arange(T, seq_len)\n final_carry = generation_step_fn(initial_carry, timesteps_to_scan)\n final_token_idxs_BSN = final_carry[1]\n final_logits_BSNV = final_carry[2]\n\n # --- Decode all tokens at once at the end ---\n H, W = batch[""videos""].shape[2:4]\n final_frames_BSHWC = self.tokenizer.decode(\n final_token_idxs_BSN,\n video_hw=(H, W),\n )\n return final_frames_BSHWC, final_logits_BSNV\n\n def vq_encode(self, batch: Dict[str, jax.Array], training: bool) -> jax.Array:\n # --- Preprocess videos ---\n assert self.lam is not None\n video_BTHWC = batch[""videos""]\n lam_output: Dict[str, jax.Array] = self.lam.vq_encode(\n video_BTHWC, training=training\n )\n lam_indices_E = lam_output[""indices""]\n return lam_indices_E\n\n\n# FIXME (f.srambical): add conversion script for old checkpoints\ndef restore_genie_components(\n optimizer: nnx.ModelAndOptimizer,\n sharding: jax.sharding.NamedSharding,\n rng: jax.Array,\n args,\n) -> nnx.ModelAndOptimizer:\n """"""Restore pre-trained Genie components""""""\n rng_tokenizer, rng_lam = jax.random.split(rng)\n rngs_tokenizer = nnx.Rngs(rng_tokenizer)\n rngs_lam = nnx.Rngs(rng_lam)\n\n tx = optimizer.tx\n model = optimizer.model\n handler_registry = ocp.handlers.DefaultCheckpointHandlerRegistry()\n handler_registry.add(\n ""model_state"", ocp.args.PyTreeRestore, ocp.handlers.PyTreeCheckpointHandler\n )\n\n checkpoint_options = ocp.CheckpointManagerOptions(\n step_format_fixed_length=6,\n )\n tokenizer_checkpoint_manager = ocp.CheckpointManager(\n directory=args.tokenizer_checkpoint,\n options=checkpoint_options,\n handler_registry=handler_registry,\n )\n dummy_tokenizer = TokenizerVQVAE(\n in_dim=args.image_channels,\n model_dim=args.tokenizer_dim,\n ffn_dim=args.tokenizer_ffn_dim,\n latent_dim=args.latent_patch_dim,\n num_latents=args.num_patch_latents,\n patch_size=args.patch_size,\n num_blocks=args.tokenizer_num_blocks,\n num_heads=args.tokenizer_num_heads,\n dropout=args.dropout,\n codebook_dropout=args.dropout,\n param_dtype=args.param_dtype,\n dtype=args.dtype,\n use_flash_attention=args.use_flash_attention,\n rngs=rngs_tokenizer,\n )\n dummy_tokenizer_optimizer = nnx.ModelAndOptimizer(dummy_tokenizer, tx)\n dummy_tokenizer_optimizer_state = nnx.state(dummy_tokenizer_optimizer)\n abstract_sharded_tokenizer_optimizer_state = _create_abstract_sharded_pytree(\n dummy_tokenizer_optimizer_state, sharding\n )\n restored_tokenizer = tokenizer_checkpoint_manager.restore(\n step=tokenizer_checkpoint_manager.latest_step(),\n args=ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore( # type: ignore\n abstract_sharded_tokenizer_optimizer_state # type: ignore\n ),\n ),\n )[""model_state""]\n nnx.update(dummy_tokenizer_optimizer.model, restored_tokenizer.model)\n model.tokenizer = dummy_tokenizer_optimizer.model\n tokenizer_checkpoint_manager.close()\n\n if args.lam_checkpoint:\n lam_checkpoint_manager = ocp.CheckpointManager(\n directory=args.lam_checkpoint,\n options=checkpoint_options,\n handler_registry=handler_registry,\n )\n dummy_lam = LatentActionModel(\n in_dim=args.image_channels,\n model_dim=args.lam_dim,\n ffn_dim=args.lam_ffn_dim,\n latent_dim=args.latent_patch_dim,\n num_latents=args.num_actions,\n patch_size=args.lam_patch_size,\n num_blocks=args.lam_num_blocks,\n num_heads=args.lam_num_heads,\n dropout=args.dropout,\n codebook_dropout=args.dropout,\n param_dtype=args.param_dtype,\n dtype=args.dtype,\n use_flash_attention=args.use_flash_attention,\n rngs=rngs_lam,\n )\n dummy_lam_optimizer = nnx.ModelAndOptimizer(dummy_lam, tx)\n dummy_lam_optimizer_state = nnx.state(dummy_lam_optimizer)\n abstract_sharded_lam_optimizer_state = _create_abstract_sharded_pytree(\n dummy_lam_optimizer_state, sharding\n )\n restored_lam_optimizer = lam_checkpoint_manager.restore(\n step=lam_checkpoint_manager.latest_step(),\n args=ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore( # type: ignore\n abstract_sharded_lam_optimizer_state # type: ignore\n ),\n ),\n )[""model_state""]\n nnx.update(dummy_lam_optimizer.model, restored_lam_optimizer.model)\n model.lam = dummy_lam_optimizer.model\n # Remove the LAM decoder to save memory and avoid unnecessary computation.\n del model.lam.decoder\n lam_checkpoint_manager.close()\n\n # Reinitialize the optimizer states\n optimizer = nnx.ModelAndOptimizer(model, tx)\n return optimizer\n\n\ndef _create_abstract_sharded_pytree(\n pytree_template: nnx.GraphState, sharding_spec: jax.sharding.NamedSharding\n) -> jax.Array:\n """"""Replaces arrays in a pytree with ShapeDtypeStructs having the given sharding.""""""\n\n def map_fn(leaf_template):\n if hasattr(leaf_template, ""shape"") and hasattr(leaf_template, ""dtype""):\n return jax.ShapeDtypeStruct(\n leaf_template.shape, leaf_template.dtype, sharding=sharding_spec\n )\n return leaf_template\n\n return jax.tree_util.tree_map(map_fn, pytree_template)\n",python,tab +140,237431,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/genie.py",7809,0,"",python,selection_command +141,238981,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/genie.py",7883,0,"",python,selection_mouse +142,239153,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/genie.py",7883,1,"0",python,selection_mouse +143,239375,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/genie.py",7882,2,".0",python,selection_mouse +144,239459,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/genie.py",7881,3,"0.0",python,selection_mouse +145,239940,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/genie.py",7881,0,"",python,selection_mouse +146,240390,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/genie.py",7881,2,"0.",python,selection_mouse +147,240439,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/genie.py",7881,3,"0.0",python,selection_mouse +148,240934,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/genie.py",7884,0,"",python,selection_mouse +149,241103,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/genie.py",7883,1,"0",python,selection_mouse +150,241384,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/genie.py",7882,2,".0",python,selection_mouse +151,241661,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/genie.py",7881,3,"0.0",python,selection_mouse +152,242153,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/genie.py",7881,0,"",python,selection_mouse +153,242734,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/genie.py",7881,1,"0",python,selection_mouse +154,242766,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/genie.py",7881,2,"0.",python,selection_mouse +155,242777,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/genie.py",7881,3,"0.0",python,selection_mouse +156,242860,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/genie.py",7881,4,"0.0,",python,selection_mouse +157,243273,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/genie.py",7885,0,"",python,selection_mouse +158,244269,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/genie.py",7686,0,"",python,selection_mouse +159,256324,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/genie.py",9544,0,"",python,selection_mouse +160,256483,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/genie.py",9543,15,"max_noise_level",python,selection_mouse +161,257399,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/genie.py",9585,0,"",python,selection_mouse +162,257533,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/genie.py",9583,4,"must",python,selection_mouse +163,257765,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/genie.py",9582,5," must",python,selection_mouse +164,257807,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/genie.py",9577,10,"level must",python,selection_mouse +165,257914,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/genie.py",9576,11," level must",python,selection_mouse +166,257943,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/genie.py",9571,16,"Noise level must",python,selection_mouse +167,258397,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/genie.py",9574,0,"",python,selection_mouse +168,258400,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/genie.py",9571,5,"Noise",python,selection_mouse +169,258643,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/genie.py",9571,11,"Noise level",python,selection_mouse +170,258683,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/genie.py",9536,40,"< self.max_noise_level\n ), ""Noise",python,selection_mouse +171,258726,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/genie.py",9538,38,"self.max_noise_level\n ), ""Noise",python,selection_mouse +172,258727,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/genie.py",9542,34,".max_noise_level\n ), ""Noise",python,selection_mouse +173,258728,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/genie.py",9543,33,"max_noise_level\n ), ""Noise",python,selection_mouse +174,258909,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/genie.py",9558,18,"\n ), ""Noise",python,selection_mouse +175,258976,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/genie.py",9571,48,"Noise level must me smaller than max_noise_level",python,selection_mouse +176,259080,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/genie.py",9571,94,"Noise level must me smaller than max_noise_level.""\n # --- Encode videos and actions ---",python,selection_mouse +177,259447,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/genie.py",9571,48,"Noise level must me smaller than max_noise_level",python,selection_mouse +178,259870,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/genie.py",9617,0,"",python,selection_mouse +179,259871,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/genie.py",9604,15,"max_noise_level",python,selection_mouse +180,260106,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/genie.py",9558,61,"\n ), ""Noise level must me smaller than max_noise_level",python,selection_mouse +181,260163,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/genie.py",9604,15,"max_noise_level",python,selection_mouse +182,260185,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/genie.py",9603,16," max_noise_level",python,selection_mouse +183,260186,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/genie.py",9599,20,"than max_noise_level",python,selection_mouse +184,260204,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/genie.py",9591,28,"smaller than max_noise_level",python,selection_mouse +185,260292,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/genie.py",9590,29," smaller than max_noise_level",python,selection_mouse +186,260319,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/genie.py",9588,31,"me smaller than max_noise_level",python,selection_mouse +187,260359,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/genie.py",9587,32," me smaller than max_noise_level",python,selection_mouse +188,260400,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/genie.py",9583,36,"must me smaller than max_noise_level",python,selection_mouse +189,260444,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/genie.py",9582,37," must me smaller than max_noise_level",python,selection_mouse +190,260488,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/genie.py",9577,42,"level must me smaller than max_noise_level",python,selection_mouse +191,260647,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/genie.py",9576,43," level must me smaller than max_noise_level",python,selection_mouse +192,260677,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/genie.py",9571,48,"Noise level must me smaller than max_noise_level",python,selection_mouse +193,261164,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/genie.py",9574,0,"",python,selection_mouse +194,261164,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/genie.py",9571,5,"Noise",python,selection_mouse +195,261374,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/genie.py",9571,11,"Noise level",python,selection_mouse +196,261406,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/genie.py",9571,16,"Noise level must",python,selection_mouse +197,261452,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/genie.py",9571,17,"Noise level must ",python,selection_mouse +198,261453,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/genie.py",9571,27,"Noise level must me smaller",python,selection_mouse +199,261537,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/genie.py",9571,32,"Noise level must me smaller than",python,selection_mouse +200,261540,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/genie.py",9571,33,"Noise level must me smaller than ",python,selection_mouse +201,261561,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/genie.py",9571,48,"Noise level must me smaller than max_noise_level",python,selection_mouse +202,262064,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/genie.py",9613,0,"",python,selection_mouse +203,262812,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/genie.py",9533,0,"",python,selection_mouse +204,262957,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/genie.py",9524,11,"noise_level",python,selection_mouse +205,263638,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/genie.py",9551,0,"",python,selection_mouse +206,263799,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/genie.py",9543,15,"max_noise_level",python,selection_mouse +207,267889,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/genie.py",9549,0,"",python,selection_mouse +208,267943,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/genie.py",2288,0,"",python,selection_command +209,269077,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/genie.py",2312,0,"",python,selection_mouse +210,269255,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/genie.py",2306,15,"max_noise_level",python,selection_mouse +211,273967,"jasmine/sample.py",0,0,"",python,tab +212,277755,"jasmine/sample.py",2790,0,"",python,selection_mouse +213,282998,"jasmine/sample.py",2791,0,"",python,selection_command +214,283803,"jasmine/sample.py",2790,0,"",python,selection_command +215,284019,"jasmine/sample.py",2789,0,"",python,selection_command +216,284622,"jasmine/sample.py",2788,1,"",python,content +217,284684,"jasmine/sample.py",2788,0,"1",python,content +218,284685,"jasmine/sample.py",2789,0,"",python,selection_keyboard +219,286211,"TERMINAL",0,0,"sh slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",,terminal_output +220,287749,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +221,287914,"TERMINAL",0,0,"Sampling from checkpoint: /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/train_dyn_default_breakout_longer/3520777\r\n",,terminal_output +222,288014,"TERMINAL",0,0,"GpuFreq=control_disabled\r\n",,terminal_output +223,297714,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib64/python3.12/site-packages/orbax/checkpoint/_src/serialization/type_handlers.py:1269: UserWarning: Sharding info not provided when restoring. Populating sharding info from sharding file. Please note restoration time will be slightly increased due to reading from file. Note also that this option is unsafe when restoring on a different topology than the checkpoint was saved with.\r\n warnings.warn(\r\n",,terminal_output +224,337908,"TERMINAL",0,0,"Per-frame SSIM:\r\n [0.99937075 0.9982835 0.9993324 0.9990976 0.99867266 0.9990062\r\n 0.99910486 0.99886394 0.8909824 0.88095826 0.9455153 0.9742348 ]\r\nPer-frame PSNR:\r\n [52.800415 52.115204 52.03942 53.736916 52.424675 51.936172 52.852432\r\n 51.84021 40.20121 38.567196 38.773304 37.119797]\r\nSSIM: 0.9736186265945435\r\nPSNR: 47.867244720458984\r\n",,terminal_output +225,343576,"TERMINAL",0,0,"W0927 19:01:01.967914 1766958 pjrt_client.cc:1469] WatchJobStateAsync failed for task goo.gle/debugstr job_name: ""jax_worker"": UNAVAILABLE: Cancelling all calls\r\nAdditional GRPC error information from remote target coordination_service while calling /tensorflow.CoordinationService/WatchJobState:\r\n:UNKNOWN:Error received from peer {grpc_message:""Cancelling all calls"", grpc_status:14}\r\n",,terminal_output +226,344218,"TERMINAL",0,0,"]0;tum_cte0515@hkn0706:~/Projects/jasmine[?2004h(jasmine) [tum_cte0515@hkn0706 jasmine]$ ",,terminal_output +227,805062,"TERMINAL",0,0,"g",,terminal_output +228,805205,"TERMINAL",0,0,"it",,terminal_output +229,805295,"TERMINAL",0,0," ",,terminal_output +230,805390,"TERMINAL",0,0,"p",,terminal_output +231,805545,"TERMINAL",0,0,"u",,terminal_output +232,805873,"TERMINAL",0,0,"l",,terminal_output +233,806089,"TERMINAL",0,0,"l",,terminal_output +234,806191,"TERMINAL",0,0,"l",,terminal_output +235,806346,"TERMINAL",0,0,"\r\n[?2004l\rgit: 'pulll' is not a git command. See 'git --help'.\r\n\r\nThe most similar command is\r\n\tpull\r\n]0;tum_cte0515@hkn0706:~/Projects/jasmine[?2004h(jasmine) [tum_cte0515@hkn0706 jasmine]$ ",,terminal_output +236,807257,"TERMINAL",0,0,"g",,terminal_output +237,807393,"TERMINAL",0,0,"i",,terminal_output +238,807530,"TERMINAL",0,0,"t",,terminal_output +239,807615,"TERMINAL",0,0," ",,terminal_output +240,807713,"TERMINAL",0,0,"p",,terminal_output +241,807897,"TERMINAL",0,0,"u",,terminal_output +242,808206,"TERMINAL",0,0,"l",,terminal_output +243,808362,"TERMINAL",0,0,"\r\n[?2004l\rgit: 'pul' is not a git command. See 'git --help'.\r\n\r\nThe most similar commands are\r\n\tpull\r\n\tpush\r\n]0;tum_cte0515@hkn0706:~/Projects/jasmine[?2004h(jasmine) [tum_cte0515@hkn0706 jasmine]$ ",,terminal_output +244,810214,"TERMINAL",0,0,"g",,terminal_output +245,810420,"TERMINAL",0,0,"it",,terminal_output +246,810512,"TERMINAL",0,0," ",,terminal_output +247,810671,"TERMINAL",0,0,"p",,terminal_output +248,810872,"TERMINAL",0,0,"u",,terminal_output +249,811126,"TERMINAL",0,0,"l",,terminal_output +250,811229,"TERMINAL",0,0,"l",,terminal_output +251,811526,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +252,814413,"TERMINAL",0,0,"remote: Enumerating objects: 102, done.\r\nremote: Counting objects: 1% (1/82)\rremote: Counting objects: 2% (2/82)\rremote: Counting objects: 3% (3/82)\rremote: Counting objects: 4% (4/82)\rremote: Counting objects: 6% (5/82)\rremote: Counting objects: 7% (6/82)\rremote: Counting objects: 8% (7/82)\rremote: Counting objects: 9% (8/82)\rremote: Counting objects: 10% (9/82)\rremote: Counting objects: 12% (10/82)\rremote: Counting objects: 13% (11/82)\rremote: Counting objects: 14% (12/82)\rremote: Counting objects: 15% (13/82)\rremote: Counting objects: 17% (14/82)\rremote: Counting objects: 18% (15/82)\rremote: Counting objects: 19% (16/82)\rremote: Counting objects: 20% (17/82)\rremote: Counting objects: 21% (18/82)\rremote: Counting objects: 23% (19/82)\rremote: Counting objects: 24% (20/82)\rremote: Counting objects: 25% (21/82)\rremote: Counting objects: 26% (22/82)\rremote: Counting objects: 28% (23/82)\rremote: Counting objects: 29% (24/82)\rremote: Counting objects: 30% (25/82)\rremote: Counting objects: 31% (26/82)\rremote: Counting objects: 32% (27/82)\rremote: Counting objects: 34% (28/82)\rremote: Counting objects: 35% (29/82)\rremote: Counting objects: 36% (30/82)\rremote: Counting objects: 37% (31/82)\rremote: Counting objects: 39% (32/82)\rremote: Counting objects: 40% (33/82)\rremote: Counting objects: 41% (34/82)\rremote: Counting objects: 42% (35/82)\rremote: Counting objects: 43% (36/82)\rremote: Counting objects: 45% (37/82)\rremote: Counting objects: 46% (38/82)\rremote: Counting objects: 47% (39/82)\rremote: Counting objects: 48% (40/82)\rremote: Counting objects: 50% (41/82)\rremote: Counting objects: 51% (42/82)\rremote: Counting objects: 52% (43/82)\rremote: Counting objects: 53% (44/82)\rremote: Counting objects: 54% (45/82)\rremote: Counting objects: 56% (46/82)\rremote: Counting objects: 57% (47/82)\rremote: Counting objects: 58% (48/82)\rremote: Counting objects: 59% (49/82)\rremote: Counting objects: 60% (50/82)\rremote: Counting objects: 62% (51/82)\rremote: Counting objects: 63% (52/82)\rremote: Counting objects: 64% (53/82)\rremote: Counting objects: 65% (54/82)\rremote: Counting objects: 67% (55/82)\rremote: Counting objects: 68% (56/82)\rremote: Counting objects: 69% (57/82)\rremote: Counting objects: 70% (58/82)\rremote: Counting objects: 71% (59/82)\rremote: Counting objects: 73% (60/82)\rremote: Counting objects: 74% (61/82)\rremote: Counting objects: 75% (62/82)\rremote: Counting objects: 76% (63/82)\rremote: Counting objects: 78% (64/82)\rremote: Counting objects: 79% (65/82)\rremote: Counting objects: 80% (66/82)\rremote: Counting objects: 81% (67/82)\rremote: Counting objects: 82% (68/82)\rremote: Counting objects: 84% (69/82)\rremote: Counting objects: 85% (70/82)\rremote: Counting objects: 86% (71/82)\rremote: Counting objects: 87% (72/82)\rremote: Counting objects: 89% (73/82)\rremote: Counting objects: 90% (74/82)\rremote: Counting objects: 91% (75/82)\rremote: Counting objects: 92% (76/82)\rremote: Counting objects: 93% (77/82)\rremote: Counting objects: 95% (78/82)\rremote: Counting objects: 96% (79/82)\rremote: Counting objects: 97% (80/82)\rremote: Counting objects: 98% (81/82)\rremote: Counting objects: 100% (82/82)\rremote: Counting objects: 100% (82/82), done.\r\nremote: Compressing objects: 2% (1/50)\rremote: Compressing objects: 4% (2/50)\rremote: Compressing objects: 6% (3/50)\rremote: Compressing objects: 8% (4/50)\rremote: Compressing objects: 10% (5/50)\rremote: Compressing objects: 12% (6/50)\rremote: Compressing objects: 14% (7/50)\rremote: Compressing objects: 16% (8/50)\rremote: Compressing objects: 18% (9/50)\rremote: Compressing objects: 20% (10/50)\rremote: Compressing objects: 22% (11/50)\rremote: Compressing objects: 24% (12/50)\rremote: Compressing objects: 26% (13/50)\rremote: Compressing objects: 28% (14/50)\rremote: Compressing objects: 30% (15/50)\rremote: Compressing objects: 32% (16/50)\rremote: Compressing objects: 34% (17/50)\rremote: Compressing objects: 36% (18/50)\rremote: Compressing objects: 38% (19/50)\rremote: Compressing objects: 40% (20/50)\rremote: Compressing objects: 42% (21/50)\rremote: Compressing objects: 44% (22/50)\rremote: Compressing objects: 46% (23/50)\rremote: Compressing objects: 48% (24/50)\rremote: Compressing objects: 50% (25/50)\rremote: Compressing objects: 52% (26/50)\rremote: Compressing objects: 54% (27/50)\rremote: Compressing objects: 56% (28/50)\rremote: Compressing objects: 58% (29/50)\rremote: Compressing objects: 60% (30/50)\rremote: Compressing objects: 62% (31/50)\rremote: Compressing objects: 64% (32/50)\rremote: Compressing objects: 66% (33/50)\rremote: Compressing objects: 68% (34/50)\rremote: Compressing objects: 70% (35/50)\rremote: Compressing objects: 72% (36/50)\rremote: Compressing objects: 74% (37/50)\rremote: Compressing objects: 76% (38/50)\rremote: Compressing objects: 78% (39/50)\rremote: Compressing objects: 80% (40/50)\rremote: Compressing objects: 82% (41/50)\rremote: Compressing objects: 84% (42/50)\rremote: Compressing objects: 86% (43/50)\rremote: Compressing objects: 88% (44/50)\rremote: Compressing objects: 90% (45/50)\rremote: Compressing objects: 92% (46/50)\rremote: Compressing objects: 94% (47/50)\rremote: Compressing objects: 96% (48/50)\rremote: Compressing objects: 98% (49/50)\rremote: Compressing objects: 100% (50/50)\rremote: Compressing objects: 100% (50/50), done.\r\nReceiving objects: 0% (1/102)\rReceiving objects: 1% (2/102)\rReceiving objects: 2% (3/102)\rReceiving objects: 3% (4/102)\rReceiving objects: 4% (5/102)\rReceiving objects: 5% (6/102)\rReceiving objects: 6% (7/102)\rReceiving objects: 7% (8/102)\rReceiving objects: 8% (9/102)\rReceiving objects: 9% (10/102)\rReceiving objects: 10% (11/102)\rReceiving objects: 11% (12/102)\rReceiving objects: 12% (13/102)\rReceiving objects: 13% (14/102)\rReceiving objects: 14% (15/102)\rremote: Total 102 (delta 49), reused 58 (delta 32), pack-reused 20 (from 1)\r\nReceiving objects: 15% (16/102)\rReceiving objects: 16% (17/102)\rReceiving objects: 17% (18/102)\rReceiving objects: 18% (19/102)\rReceiving objects: 19% (20/102)\rReceiving objects: 20% (21/102)\rReceiving objects: 21% (22/102)\rReceiving objects: 22% (23/102)\rReceiving objects: 23% (24/102)\rReceiving objects: 24% (25/102)\rReceiving objects: 25% (26/102)\rReceiving objects: 26% (27/102)\rReceiving objects: 27% (28/102)\rReceiving objects: 28% (29/102)\rReceiving objects: 29% (30/102)\rReceiving objects: 30% (31/102)\rReceiving objects: 31% (32/102)\rReceiving objects: 32% (33/102)\rReceiving objects: 33% (34/102)\rReceiving objects: 34% (35/102)\rReceiving objects: 35% (36/102)\rReceiving objects: 36% (37/102)\rReceiving objects: 37% (38/102)\rReceiving objects: 38% (39/102)\rReceiving objects: 39% (40/102)\rReceiving objects: 40% (41/102)\rReceiving objects: 41% (42/102)\rReceiving objects: 42% (43/102)\rReceiving objects: 43% (44/102)\rReceiving objects: 44% (45/102)\rReceiving objects: 45% (46/102)\rReceiving objects: 46% (47/102)\rReceiving objects: 47% (48/102)\rReceiving objects: 48% (49/102)\rReceiving objects: 49% (50/102)\rReceiving objects: 50% (51/102)\rReceiving objects: 51% (53/102)\rReceiving objects: 52% (54/102)\rReceiving objects: 53% (55/102)\rReceiving objects: 54% (56/102)\rReceiving objects: 55% (57/102)\rReceiving objects: 56% (58/102)\rReceiving objects: 57% (59/102)\rReceiving objects: 58% (60/102)\rReceiving objects: 59% (61/102)\rReceiving objects: 60% (62/102)\rReceiving objects: 61% (63/102)\rReceiving objects: 62% (64/102)\rReceiving objects: 63% (65/102)\rReceiving objects: 64% (66/102)\rReceiving objects: 65% (67/102)\rReceiving objects: 66% (68/102)\rReceiving objects: 67% (69/102)\rReceiving objects: 68% (70/102)\rReceiving objects: 69% (71/102)\rReceiving objects: 70% (72/102)\rReceiving objects: 71% (73/102)\rReceiving objects: 72% (74/102)\rReceiving objects: 73% (75/102)\rReceiving objects: 74% (76/102)\rReceiving objects: 75% (77/102)\rReceiving objects: 76% (78/102)\rReceiving objects: 77% (79/102)\rReceiving objects: 78% (80/102)\rReceiving objects: 79% (81/102)\rReceiving objects: 80% (82/102)\rReceiving objects: 81% (83/102)\rReceiving objects: 82% (84/102)\rReceiving objects: 83% (85/102)\rReceiving objects: 84% (86/102)\rReceiving objects: 85% (87/102)\rReceiving objects: 86% (88/102)\rReceiving objects: 87% (89/102)\rReceiving objects: 88% (90/102)\rReceiving objects: 89% (91/102)\rReceiving objects: 90% (92/102)\rReceiving objects: 91% (93/102)\rReceiving objects: 92% (94/102)\rReceiving objects: 93% (95/102)\rReceiving objects: 94% (96/102)\rReceiving objects: 95% (97/102)\rReceiving objects: 96% (98/102)\rReceiving objects: 97% (99/102)\rReceiving objects: 98% (100/102)\rReceiving objects: 99% (101/102)\rReceiving objects: 100% (102/102)\rReceiving objects: 100% (102/102), 33.08 KiB | 445.00 KiB/s, done.\r\nResolving deltas: 0% (0/53)\rResolving deltas: 1% (1/53)\rResolving deltas: 3% (2/53)\rResolving deltas: 5% (3/53)\rResolving deltas: 7% (4/53)\rResolving deltas: 9% (5/53)\rResolving deltas: 11% (6/53)\rResolving deltas: 13% (7/53)\rResolving deltas: 15% (8/53)\rResolving deltas: 16% (9/53)\rResolving deltas: 18% (10/53)\rResolving deltas: 20% (11/53)\rResolving deltas: 22% (12/53)\rResolving deltas: 24% (13/53)\rResolving deltas: 26% (14/53)\rResolving deltas: 28% (15/53)\rResolving deltas: 30% (16/53)\rResolving deltas: 32% (17/53)\rResolving deltas: 33% (18/53)\rResolving deltas: 35% (19/53)\rResolving deltas: 37% (20/53)\rResolving deltas: 39% (21/53)\rResolving deltas: 41% (22/53)\rResolving deltas: 43% (23/53)\rResolving deltas: 45% (24/53)\rResolving deltas: 47% (25/53)\rResolving deltas: 49% (26/53)\rResolving deltas: 50% (27/53)\rResolving deltas: 52% (28/53)\rResolving deltas: 54% (29/53)\rResolving deltas: 56% (30/53)\rResolving deltas: 58% (31/53)\rResolving deltas: 60% (32/53)\rResolving deltas: 62% (33/53)\rResolving deltas: 64% (34/53)\rResolving deltas: 66% (35/53)\rResolving deltas: 67% (36/53)\rResolving deltas: 69% (37/53)\rResolving deltas: 71% (38/53)\rResolving deltas: 73% (39/53)\rResolving deltas: 75% (40/53)\rResolving deltas: 77% (41/53)\rResolving deltas: 79% (42/53)\rResolving deltas: 81% (43/53)\rResolving deltas: 83% (44/53)\rResolving deltas: 84% (45/53)\rResolving deltas: 86% (46/53)\rResolving deltas: 88% (47/53)\rResolving deltas: 90% (48/53)\rResolving deltas: 92% (49/53)\rResolving deltas: 94% (50/53)\rResolving deltas: 96% (51/53)\rResolving deltas: 98% (52/53)\rResolving deltas: 100% (53/53)\rResolving deltas: 100% (53/53), completed with 7 local objects.\r\n",,terminal_output +253,814682,"TERMINAL",0,0,"From github.com:p-doom/jasmine\r\n b59335a..718e533 atari-rainbow-agent-capture -> origin/atari-rainbow-agent-capture\r\n",,terminal_output +254,814758,"TERMINAL",0,0," 8993b1d..12cb0e5 main -> origin/main\r\n * [new branch] minatar-breakout-after-refactor -> origin/minatar-breakout-after-refactor\r\n * [new branch] seeding-data-generation -> origin/seeding-data-generation\r\n * [new branch] top-k-accuracy-calc -> origin/top-k-accuracy-calc\r\n * [new branch] z-loss -> origin/z-loss\r\n",,terminal_output +255,814868,"TERMINAL",0,0,"Already up to date.\r\n]0;tum_cte0515@hkn0706:~/Projects/jasmine[?2004h(jasmine) [tum_cte0515@hkn0706 jasmine]$ ",,terminal_output +256,877078,"TERMINAL",0,0,"gi",,terminal_output +257,877255,"TERMINAL",0,0,"t ",,terminal_output +258,877483,"TERMINAL",0,0,"ch",,terminal_output +259,877617,"TERMINAL",0,0,"e",,terminal_output +260,877756,"TERMINAL",0,0,"c",,terminal_output +261,878052,"TERMINAL",0,0,"k",,terminal_output +262,878187,"TERMINAL",0,0,"o",,terminal_output +263,878368,"TERMINAL",0,0,"ut",,terminal_output +264,878436,"TERMINAL",0,0," ",,terminal_output +265,878664,"TERMINAL",0,0,"-",,terminal_output +266,878900,"TERMINAL",0,0,"b",,terminal_output +267,879005,"TERMINAL",0,0," ",,terminal_output +268,879346,"TERMINAL",0,0,"""",,terminal_output +269,882094,"TERMINAL",0,0,"r",,terminal_output +270,882190,"TERMINAL",0,0,"u",,terminal_output +271,882242,"TERMINAL",0,0,"n",,terminal_output +272,883778,"TERMINAL",0,0,"-",,terminal_output +273,885048,"TERMINAL",0,0,"z",,terminal_output +274,885868,"TERMINAL",0,0,"l",,terminal_output +275,886048,"TERMINAL",0,0,"o",,terminal_output +276,886101,"TERMINAL",0,0,"s",,terminal_output +277,886283,"TERMINAL",0,0,"s",,terminal_output +278,886466,"TERMINAL",0,0,"-",,terminal_output +279,886648,"TERMINAL",0,0,"r",,terminal_output +280,886799,"TERMINAL",0,0,"un",,terminal_output +281,886918,"TERMINAL",0,0,"s",,terminal_output +282,887975,"TERMINAL",0,0,"",,terminal_output +283,888291,"TERMINAL",0,0,"",,terminal_output +284,888708,"TERMINAL",0,0,"",,terminal_output +285,889236,"TERMINAL",0,0,"zloss-runs",,terminal_output +286,889374,"TERMINAL",0,0,"zloss-runs",,terminal_output +287,889551,"TERMINAL",0,0,"zloss-runs",,terminal_output +288,889691,"TERMINAL",0,0,"zloss-runs",,terminal_output +289,890215,"TERMINAL",0,0,"",,terminal_output +290,890398,"TERMINAL",0,0,"",,terminal_output +291,890980,"TERMINAL",0,0,"""",,terminal_output +292,891254,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +293,891421,"TERMINAL",0,0,"Switched to a new branch 'zloss-runs'\r\n]0;tum_cte0515@hkn0706:~/Projects/jasmine[?2004h(jasmine) [tum_cte0515@hkn0706 jasmine]$ ",,terminal_output +294,893238,"TERMINAL",0,0,"g",,terminal_output +295,893374,"TERMINAL",0,0,"i",,terminal_output +296,893486,"TERMINAL",0,0,"t",,terminal_output +297,893548,"TERMINAL",0,0," ",,terminal_output +298,894000,"TERMINAL",0,0,"m",,terminal_output +299,894096,"TERMINAL",0,0,"e",,terminal_output +300,894187,"TERMINAL",0,0,"r",,terminal_output +301,894258,"TERMINAL",0,0,"g",,terminal_output +302,894469,"TERMINAL",0,0,"e ",,terminal_output +303,895134,"TERMINAL",0,0,"t",,terminal_output +304,895341,"",0,0,"Switched from branch 'add-noise-to-combat-exposure-bias' to 'zloss-runs'",,git_branch_checkout +305,895375,"TERMINAL",0,0,"-",,terminal_output +306,895704,"TERMINAL",0,0,"l",,terminal_output +307,895916,"TERMINAL",0,0,"",,terminal_output +308,896063,"TERMINAL",0,0,"",,terminal_output +309,896238,"TERMINAL",0,0,"",,terminal_output +310,896950,"TERMINAL",0,0,"z",,terminal_output +311,897449,"TERMINAL",0,0,"-",,terminal_output +312,897790,"TERMINAL",0,0,"l",,terminal_output +313,897922,"TERMINAL",0,0,"o",,terminal_output +314,898023,"TERMINAL",0,0,"s",,terminal_output +315,898131,"TERMINAL",0,0,"s",,terminal_output +316,898234,"TERMINAL",0,0,"\r\n[?2004l\rmerge: z-loss - not something we can merge\r\n\r\nDid you mean this?\r\n\torigin/z-loss\r\n]0;tum_cte0515@hkn0706:~/Projects/jasmine[?2004h(jasmine) [tum_cte0515@hkn0706 jasmine]$ ",,terminal_output +317,903811,"TERMINAL",0,0,"g",,terminal_output +318,903908,"TERMINAL",0,0,"i",,terminal_output +319,904099,"TERMINAL",0,0,"t ",,terminal_output +320,904204,"TERMINAL",0,0,"c",,terminal_output +321,905111,"TERMINAL",0,0,"h",,terminal_output +322,905305,"TERMINAL",0,0,"e",,terminal_output +323,905536,"TERMINAL",0,0,"c",,terminal_output +324,905814,"TERMINAL",0,0,"k",,terminal_output +325,905896,"TERMINAL",0,0,"o",,terminal_output +326,906030,"TERMINAL",0,0,"u",,terminal_output +327,906120,"TERMINAL",0,0,"t",,terminal_output +328,906195,"TERMINAL",0,0," ",,terminal_output +329,906952,"TERMINAL",0,0,"z",,terminal_output +330,907194,"TERMINAL",0,0,"-",,terminal_output +331,907519,"TERMINAL",0,0,"l",,terminal_output +332,907734,"TERMINAL",0,0,"o",,terminal_output +333,907866,"TERMINAL",0,0,"s",,terminal_output +334,907985,"TERMINAL",0,0,"s",,terminal_output +335,908101,"TERMINAL",0,0,"\r\n[?2004l\rerror: Your local changes to the following files would be overwritten by checkout:\r\n\tjasmine/sample.py\r\nPlease commit your changes or stash them before you switch branches.\r\nAborting\r\n]0;tum_cte0515@hkn0706:~/Projects/jasmine[?2004h(jasmine) [tum_cte0515@hkn0706 jasmine]$ ",,terminal_output +336,908711,"TERMINAL",0,0,"g",,terminal_output +337,908802,"TERMINAL",0,0,"i",,terminal_output +338,909000,"TERMINAL",0,0,"t ",,terminal_output +339,909101,"TERMINAL",0,0,"p",,terminal_output +340,909331,"TERMINAL",0,0,"u",,terminal_output +341,909906,"TERMINAL",0,0,"",,terminal_output +342,910623,"TERMINAL",0,0,"s",,terminal_output +343,910758,"TERMINAL",0,0,"t",,terminal_output +344,910941,"TERMINAL",0,0,"a",,terminal_output +345,911011,"TERMINAL",0,0,"s",,terminal_output +346,911076,"TERMINAL",0,0,"h",,terminal_output +347,911426,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +348,911727,"TERMINAL",0,0,"Saved working directory and index state WIP on zloss-runs: fa9afac fix noise augmentation logic\r\n]0;tum_cte0515@hkn0706:~/Projects/jasmine[?2004h(jasmine) [tum_cte0515@hkn0706 jasmine]$ ",,terminal_output +349,912172,"TERMINAL",0,0,"git stash",,terminal_output +350,912359,"TERMINAL",0,0,"checkout z-loss",,terminal_output +351,913302,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +352,913418,"TERMINAL",0,0,"branch 'z-loss' set up to track 'origin/z-loss'.\r\nSwitched to a new branch 'z-loss'\r\n]0;tum_cte0515@hkn0706:~/Projects/jasmine[?2004h(jasmine) [tum_cte0515@hkn0706 jasmine]$ ",,terminal_output +353,913888,"TERMINAL",0,0,"g",,terminal_output +354,914039,"TERMINAL",0,0,"i",,terminal_output +355,914116,"TERMINAL",0,0,"t",,terminal_output +356,914227,"TERMINAL",0,0," ",,terminal_output +357,914363,"TERMINAL",0,0,"p",,terminal_output +358,914623,"TERMINAL",0,0,"u",,terminal_output +359,914720,"TERMINAL",0,0,"l",,terminal_output +360,914884,"TERMINAL",0,0,"l",,terminal_output +361,915102,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +362,915378,"",0,0,"Switched from branch 'zloss-runs' to 'z-loss'",,git_branch_checkout +363,916488,"TERMINAL",0,0,"Already up to date.\r\n]0;tum_cte0515@hkn0706:~/Projects/jasmine[?2004h(jasmine) [tum_cte0515@hkn0706 jasmine]$ ",,terminal_output +364,917365,"TERMINAL",0,0,"g",,terminal_output +365,917480,"TERMINAL",0,0,"i",,terminal_output +366,917597,"TERMINAL",0,0,"t",,terminal_output +367,917693,"TERMINAL",0,0," ",,terminal_output +368,917872,"TERMINAL",0,0,"c",,terminal_output +369,918242,"TERMINAL",0,0,"h",,terminal_output +370,918337,"TERMINAL",0,0,"e",,terminal_output +371,918510,"TERMINAL",0,0,"c",,terminal_output +372,918595,"TERMINAL",0,0,"k",,terminal_output +373,918730,"TERMINAL",0,0,"o",,terminal_output +374,918806,"TERMINAL",0,0,"u",,terminal_output +375,918894,"TERMINAL",0,0,"t",,terminal_output +376,918993,"TERMINAL",0,0," ",,terminal_output +377,929920,"TERMINAL",0,0,"",,terminal_output +378,930373,"TERMINAL",0,0,"",,terminal_output +379,930751,"TERMINAL",0,0,"gi",,terminal_output +380,931134,"TERMINAL",0,0,"",,terminal_output +381,931321,"TERMINAL",0,0,"",,terminal_output +382,931426,"TERMINAL",0,0,"",,terminal_output +383,931681,"TERMINAL",0,0,"\r(reverse-i-search)`': ",,terminal_output +384,932028,"TERMINAL",0,0,"g': git pull[1@i': gi[1@t': git",,terminal_output +385,932129,"TERMINAL",0,0," ': git pull",,terminal_output +386,932364,"TERMINAL",0,0,"c': git checkout z-loss",,terminal_output +387,932481,"TERMINAL",0,0,"[1@h': git ch",,terminal_output +388,932570,"TERMINAL",0,0,"[1@e': git che",,terminal_output +389,932654,"TERMINAL",0,0,"[1@c': git chec",,terminal_output +390,932749,"TERMINAL",0,0,"[1@k': git check",,terminal_output +391,932985,"TERMINAL",0,0,"[1@o': git checko",,terminal_output +392,933130,"TERMINAL",0,0,"[1@u': git checkou",,terminal_output +393,933224,"TERMINAL",0,0,"[1@t': git checkout",,terminal_output +394,933346,"TERMINAL",0,0," ': git checkout z-loss",,terminal_output +395,935742,"TERMINAL",0,0,"a': git checkout add-noise-to-combat-exposure-bias\r",,terminal_output +396,936483,"TERMINAL",0,0,"\rjasmine) [tum_cte0515@hkn0706 jasmine]$ git checkout add-noise-to-combat-exposure-bias\r\n\r\r\n[?2004l\rSwitched to branch 'add-noise-to-combat-exposure-bias'\r\nYour branch is up to date with 'origin/add-noise-to-combat-exposure-bias'.\r\n]0;tum_cte0515@hkn0706:~/Projects/jasmine[?2004h(jasmine) [tum_cte0515@hkn0706 jasmine]$ ",,terminal_output +397,937582,"TERMINAL",0,0,"g",,terminal_output +398,937683,"TERMINAL",0,0,"i",,terminal_output +399,937899,"TERMINAL",0,0,"t",,terminal_output +400,937952,"TERMINAL",0,0," ",,terminal_output +401,938304,"TERMINAL",0,0,"s",,terminal_output +402,938413,"TERMINAL",0,0,"t",,terminal_output +403,938476,"TERMINAL",0,0,"a",,terminal_output +404,938600,"TERMINAL",0,0,"s",,terminal_output +405,938693,"TERMINAL",0,0,"h",,terminal_output +406,938937,"TERMINAL",0,0," ",,terminal_output +407,939114,"TERMINAL",0,0,"p",,terminal_output +408,939300,"TERMINAL",0,0,"o",,terminal_output +409,939448,"TERMINAL",0,0,"p",,terminal_output +410,939739,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +411,939901,"TERMINAL",0,0,"On branch add-noise-to-combat-exposure-bias\r\nYour branch is up to date with 'origin/add-noise-to-combat-exposure-bias'.\r\n\r\nLast commands done (2 commands done):\r\n pick ba37453 feat: generate coinrun dataset with val split\r\n pick faadd10 feat: implemented validation loss for all three models\r\nNext commands to do (26 remaining commands):\r\n pick 9a17dbb fix: pass val data path to dataloader\r\n pick 6e69cdb fix typo in image logging\r\n (use ""git rebase --edit-todo"" to view and edit)\r\nYou are currently editing a commit while rebasing branch 'gt-actions' on 'c7522f2'.\r\n (use ""git commit --amend"" to amend the current commit)\r\n (use ""git rebase --continue"" once you are satisfied with your changes)\r\n\r\nChanges not staged for commit:\r\n (use ""git add ..."" to update what will be committed)\r\n (use ""git restore ..."" to discard changes in working directory)\r\n\tmodified: jasmine/sample.py\r\n\r\nUntracked files:\r\n (use ""git add ..."" to include in what will be committed)\r\n\tdiff.diff\r\n\tdiff2.diff\r\n\tinput_pipeline/\r\n\tkiller.sh\r\n\tkiller_partition.sh\r\n\tlog.log\r\n\toverfit_dir.zip\r\n\trequirements-franz.txt\r\n\tsamples/\r\n\tscripts_cremers/\r\n\tslurm/\r\n\ttest.py\r\n\tutils/\r\n\tuv.lock\r\n\r\nno changes added to commit (use ""git add"" and/or ""git commit -a"")\r\nDropped refs/stash@{0} (f7ebe3d9d9f90d5691b9503ac5dc36c068a189ce)\r\n]0;tum_cte0515@hkn0706:~/Projects/jasmine[?2004h(jasmine) [tum_cte0515@hkn0706 jasmine]$ ",,terminal_output +412,940338,"",0,0,"Switched from branch 'z-loss' to 'add-noise-to-combat-exposure-bias'",,git_branch_checkout +413,940420,"TERMINAL",0,0,"ig",,terminal_output +414,940844,"TERMINAL",0,0,"",,terminal_output +415,940966,"TERMINAL",0,0,"",,terminal_output +416,941108,"TERMINAL",0,0,"g",,terminal_output +417,941205,"TERMINAL",0,0,"i",,terminal_output +418,941297,"TERMINAL",0,0,"t",,terminal_output +419,941374,"TERMINAL",0,0," ",,terminal_output +420,941456,"TERMINAL",0,0,"m",,terminal_output +421,941568,"TERMINAL",0,0,"e",,terminal_output +422,941629,"TERMINAL",0,0,"r",,terminal_output +423,941709,"TERMINAL",0,0,"g",,terminal_output +424,941839,"TERMINAL",0,0,"e",,terminal_output +425,941977,"TERMINAL",0,0," ",,terminal_output +426,942068,"TERMINAL",0,0,"z",,terminal_output +427,943349,"TERMINAL",0,0,"-",,terminal_output +428,943568,"TERMINAL",0,0,"l",,terminal_output +429,943688,"TERMINAL",0,0,"o",,terminal_output +430,943825,"TERMINAL",0,0,"s",,terminal_output +431,943960,"TERMINAL",0,0,"s\r\n[?2004l\r",,terminal_output +432,944132,"TERMINAL",0,0,"Auto-merging jasmine/train_dynamics.py\r\nhint: Waiting for your editor to close the file... [?1049h[>4;2m[?1h=[?2004h[?1004h[?12h[?12l[?25l""~/Projects/jasmine/.git/MERGE_MSG"" 6L, 287B▽ Pzz\[0%m [>c]10;?]11;?Merge branch 'z-loss' into add-noise-to-combat-exposure-bias\r\n# Please enter a commit message to explain why this merge is necessary,# especially if it merges an updated upstream into a topic branch.#\r\n# Lines starting with '#' will be ignored, and an empty message aborts\r\n# the commit.\r\n~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ 1,1All[?25h",,terminal_output +433,944230,"TERMINAL",0,0,"P+q436f\P+q6b75\P+q6b64\P+q6b72\P+q6b6c\P+q2332\P+q2334\P+q2569\P+q2a37\P+q6b31\[?12$p[?25l/3333/3333 [?25h[?25l/f6f6/e3e3 [?25h",,terminal_output +434,945417,"TERMINAL",0,0,"[?25l::[?25h",,terminal_output +435,945495,"TERMINAL",0,0,"w",,terminal_output +436,945629,"TERMINAL",0,0,"q",,terminal_output +437,945737,"TERMINAL",0,0,"\r[?25l[?2004l[>4;m"".git/MERGE_MSG"" 6L, 287B written\r\r\r\n[?1004l[?2004l[?1l>[?25h[>4;m[?1049l\r",,terminal_output +438,945830,"TERMINAL",0,0,"Merge made by the 'ort' strategy.\r\n data/jasmine_data/atari/atari_utils.py | 337 +++++++++\r\n data/jasmine_data/atari/generate_atari_dataset.py | 805 ++++++++++++++++++++++\r\n data/jasmine_data/atari/visualize_array_record.py | 210 ++++++\r\n .../minatar/generate_minatar_breakout_dataset.py | 175 +++++\r\n data/pyproject.toml | 6 +\r\n jasmine/train_dynamics.py | 48 +-\r\n 6 files changed, 1575 insertions(+), 6 deletions(-)\r\n create mode 100644 data/jasmine_data/atari/atari_utils.py\r\n create mode 100644 data/jasmine_data/atari/generate_atari_dataset.py\r\n create mode 100644 data/jasmine_data/atari/visualize_array_record.py\r\n create mode 100644 data/jasmine_data/minatar/generate_minatar_breakout_dataset.py\r\n]0;tum_cte0515@hkn0706:~/Projects/jasmine[?2004h(jasmine) [tum_cte0515@hkn0706 jasmine]$ ",,terminal_output +439,981335,"TERMINAL",0,0,"g",,terminal_output +440,981516,"TERMINAL",0,0,"it",,terminal_output +441,981589,"TERMINAL",0,0," ",,terminal_output +442,981741,"TERMINAL",0,0,"l",,terminal_output +443,981887,"TERMINAL",0,0,"o",,terminal_output +444,982161,"TERMINAL",0,0,"g",,terminal_output +445,982358,"TERMINAL",0,0,"\r\n[?2004l\r[?1h=\rcommit e6a22eea4fc104b58de635435ff0556a32908277 (HEAD -> add-noise-to-combat-exposure -bias)\r\nMerge: fa9afac ff1a87c\r\nAuthor: Mihir Mahajan \r\nDate: Sat Sep 27 19:11:02 2025 +0200\r\n\r\n Merge branch 'z-loss' into add-noise-to-combat-exposure-bias\r\n\r\ncommit ff1a87c0798e8b52b27fc0880c6a61b1431ba0d0 (origin/z-loss, z-loss)\r\nMerge: 90174e5 12cb0e5\r\nAuthor: emergenz \r\nDate: Sat Sep 27 11:55:12 2025 +0200\r\n\r\n Merge branch 'main' into z-loss\r\n\r\ncommit 90174e5b335b3b79d13c4516e883a423fae2a93c\r\nAuthor: emergenz \r\nDate: Sat Sep 27 11:50:19 2025 +0200\r\n\r\n feat: z-loss\r\n\r\ncommit 12cb0e5fb9b4aab7de6e8636001e686762006aa4 (origin/main, origin/HEAD)\r\nAuthor: Alfred Nguyen <85162596+avocadoali@users.noreply.github.com>\r\nDate: Fri Sep 26 15:50:51 2025 +0200\r\n\r\n feat: Top-k-accuracy calculation (#211)\r\n \r\n ---------\r\n \r\n Co-authored-by: Franz Srambical <79149449+emergenz@users.noreply.github.com>\r\n\r\ncommit 7caa16fa80af555d362f5ef07908a6564d828348\r\nAuthor: Franz Srambical <79149449+emergenz@users.noreply.github.com>\r\nDate: Thu Sep 25 16:57:05 2025 +0200\r\n\r\n feat: generate minatar breakout dataset (#210)\r\n:",,terminal_output +446,992873,"TERMINAL",0,0,"\r[?1l>]0;tum_cte0515@hkn0706:~/Projects/jasmine[?2004h(jasmine) [tum_cte0515@hkn0706 jasmine]$ ",,terminal_output +447,994204,"TERMINAL",0,0,"\r(jasmine) [tum_cte0515@hkn0706 jasmine]$ \r(jasmine) [tum_cte0515@hkn0706 jasmine]$ ",,terminal_output +448,1008623,"TERMINAL",0,0,"git reset",,terminal_output +449,1008751,"TERMINAL",0,0," --hard fa9afac",,terminal_output +450,1013063,"TERMINAL",0,0,"\r(jasmine) [tum_cte0515@hkn0706 jasmine]$ git reset --hard fa9afac",,terminal_output +451,1035155,"TERMINAL",0,0,"\r\n[?2004l\rHEAD is now at fa9afac fix noise augmentation logic\r\n]0;tum_cte0515@hkn0706:~/Projects/jasmine[?2004h(jasmine) [tum_cte0515@hkn0706 jasmine]$ ",,terminal_output +452,1049713,"TERMINAL",0,0,"g",,terminal_output +453,1049865,"TERMINAL",0,0,"i",,terminal_output +454,1049949,"TERMINAL",0,0,"t",,terminal_output +455,1050046,"TERMINAL",0,0," ",,terminal_output +456,1050419,"TERMINAL",0,0,"s",,terminal_output +457,1050502,"TERMINAL",0,0,"i",,terminal_output +458,1050766,"TERMINAL",0,0,"",,terminal_output +459,1050880,"TERMINAL",0,0,"",,terminal_output +460,1051494,"TERMINAL",0,0,"d",,terminal_output +461,1051547,"TERMINAL",0,0,"i",,terminal_output +462,1051710,"TERMINAL",0,0,"f",,terminal_output +463,1051849,"TERMINAL",0,0,"f",,terminal_output +464,1051963,"TERMINAL",0,0,"\r\n[?2004l\r[?1h=\r\r[?1l>]0;tum_cte0515@hkn0706:~/Projects/jasmine[?2004h(jasmine) [tum_cte0515@hkn0706 jasmine]$ ",,terminal_output +465,1055301,"jasmine/sample.py",0,0,"from dataclasses import dataclass\nimport time\nimport os\nimport optax\n\nimport dm_pix as pix\nimport einops\nimport jax\nimport jax.numpy as jnp\nimport flax.linen as nn\nimport numpy as np\nimport orbax.checkpoint as ocp\nfrom PIL import Image, ImageDraw\nimport tyro\nfrom flax import nnx\n\nfrom genie import Genie\nfrom utils.dataloader import get_dataloader\n\n\n@dataclass\nclass Args:\n # Experiment\n seed: int = 0\n seq_len: int = 16\n image_channels: int = 3\n image_height: int = 90\n image_width: int = 160\n data_dir: str = ""data/coinrun_episodes""\n checkpoint: str = """"\n print_action_indices: bool = True\n output_dir: str = ""gifs/""\n # Sampling\n batch_size: int = 1\n maskgit_steps: int = 25\n temperature: float = 1.0\n sample_argmax: bool = True\n start_frame: int = 1\n noise_level: float = 0.0\n noise_buckets: int = 10\n # Tokenizer checkpoint\n tokenizer_dim: int = 512\n tokenizer_ffn_dim: int = 2048\n latent_patch_dim: int = 32\n num_patch_latents: int = 1024\n patch_size: int = 4\n tokenizer_num_blocks: int = 4\n tokenizer_num_heads: int = 8\n # LAM checkpoint\n lam_dim: int = 512\n lam_ffn_dim: int = 2048\n latent_action_dim: int = 32\n num_actions: int = 6\n lam_patch_size: int = 16\n lam_num_blocks: int = 4\n lam_num_heads: int = 8\n use_gt_actions: bool = False\n # Dynamics checkpoint\n dyna_type: str = ""maskgit""\n dyna_dim: int = 512\n dyna_ffn_dim: int = 2048\n dyna_num_blocks: int = 6\n dyna_num_heads: int = 8\n param_dtype = jnp.float32\n dtype = jnp.bfloat16\n use_flash_attention: bool = True\n\n\nargs = tyro.cli(Args)\n\nif __name__ == ""__main__"":\n """"""\n Dimension keys:\n B: batch size\n T: number of input (conditioning) frames\n N: number of patches per frame\n S: sequence length\n H: height\n W: width\n E: B * (S - 1)\n """"""\n jax.distributed.initialize()\n\n rng = jax.random.key(args.seed)\n\n # --- Load Genie checkpoint ---\n rngs = nnx.Rngs(rng)\n genie = Genie(\n # Tokenizer\n in_dim=args.image_channels,\n tokenizer_dim=args.tokenizer_dim,\n tokenizer_ffn_dim=args.tokenizer_ffn_dim,\n latent_patch_dim=args.latent_patch_dim,\n num_patch_latents=args.num_patch_latents,\n patch_size=args.patch_size,\n tokenizer_num_blocks=args.tokenizer_num_blocks,\n tokenizer_num_heads=args.tokenizer_num_heads,\n # LAM\n lam_dim=args.lam_dim,\n lam_ffn_dim=args.lam_ffn_dim,\n latent_action_dim=args.latent_action_dim,\n num_actions=args.num_actions,\n lam_patch_size=args.lam_patch_size,\n lam_num_blocks=args.lam_num_blocks,\n lam_num_heads=args.lam_num_heads,\n lam_co_train=False,\n max_noise_level=1.0,\n noise_buckets=args.noise_buckets,\n use_gt_actions=args.use_gt_actions,\n # Dynamics\n dyna_type=args.dyna_type,\n dyna_dim=args.dyna_dim,\n dyna_ffn_dim=args.dyna_ffn_dim,\n dyna_num_blocks=args.dyna_num_blocks,\n dyna_num_heads=args.dyna_num_heads,\n param_dtype=args.param_dtype,\n dtype=args.dtype,\n use_flash_attention=args.use_flash_attention,\n # FIXME (f.srambical): implement spatiotemporal KV caching and set decode=True\n decode=False,\n rngs=rngs,\n )\n\n # Need to delete lam decoder for checkpoint loading\n if not args.use_gt_actions:\n assert genie.lam is not None\n del genie.lam.decoder\n\n handler_registry = ocp.handlers.DefaultCheckpointHandlerRegistry()\n handler_registry.add(\n ""model_state"", ocp.args.PyTreeSave, ocp.handlers.PyTreeCheckpointHandler\n )\n handler_registry.add(\n ""model_state"", ocp.args.PyTreeRestore, ocp.handlers.PyTreeCheckpointHandler\n )\n checkpoint_options = ocp.CheckpointManagerOptions(\n step_format_fixed_length=6,\n )\n checkpoint_manager = ocp.CheckpointManager(\n args.checkpoint,\n options=checkpoint_options,\n handler_registry=handler_registry,\n )\n\n dummy_tx = optax.adamw(\n learning_rate=optax.linear_schedule(0.0001, 0.0001, 10000),\n b1=0.9,\n b2=0.9,\n weight_decay=1e-4,\n mu_dtype=args.dtype,\n )\n dummy_optimizer = nnx.ModelAndOptimizer(genie, dummy_tx)\n\n abstract_optimizer = nnx.eval_shape(lambda: dummy_optimizer)\n abstract_optimizer_state = nnx.state(abstract_optimizer)\n restored = checkpoint_manager.restore(\n checkpoint_manager.latest_step(),\n args=ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore(abstract_optimizer_state), # type: ignore\n ),\n )\n restored_optimizer_state = restored[""model_state""]\n nnx.update(dummy_optimizer, restored_optimizer_state)\n\n # --- Define sampling function ---\n def _sampling_fn(model: Genie, batch: dict) -> jax.Array:\n """"""Runs Genie.sample with pre-defined generation hyper-parameters.""""""\n assert args.dyna_type in [\n ""maskgit"",\n ""causal"",\n ], f""Invalid dynamics type: {args.dyna_type}""\n frames, _ = model.sample(\n batch,\n args.seq_len,\n args.temperature,\n args.sample_argmax,\n args.maskgit_steps,\n )\n return frames\n\n # --- Define autoregressive sampling loop ---\n def _autoreg_sample(genie, rng, batch):\n batch[""videos""] = batch[""videos""][:, : args.start_frame]\n batch[""rng""] = rng\n generated_vid_BSHWC = _sampling_fn(genie, batch)\n return generated_vid_BSHWC\n\n # --- Get video + latent actions ---\n array_record_files = [\n os.path.join(args.data_dir, x)\n for x in os.listdir(args.data_dir)\n if x.endswith("".array_record"")\n ]\n dataloader = get_dataloader(\n array_record_files,\n args.seq_len,\n args.batch_size,\n args.image_height,\n args.image_width,\n args.image_channels,\n # We don't use workers in order to avoid grain shutdown issues (https://github.com/google/grain/issues/398)\n num_workers=0,\n prefetch_buffer_size=1,\n seed=args.seed,\n )\n dataloader = iter(dataloader)\n batch = next(dataloader)\n gt_video = jnp.asarray(batch[""videos""], dtype=jnp.float32) / 255.0\n batch[""videos""] = gt_video.astype(args.dtype)\n # Get latent actions for all videos in the batch\n action_batch_E = None\n if not args.use_gt_actions:\n action_batch_E = genie.vq_encode(batch, training=False)\n batch[""latent_actions""] = action_batch_E\n\n # --- Sample + evaluate video ---\n recon_video_BSHWC = _autoreg_sample(genie, rng, batch)\n recon_video_BSHWC = recon_video_BSHWC.astype(jnp.float32)\n\n gt = gt_video.clip(0, 1)[:, args.start_frame :]\n recon = recon_video_BSHWC.clip(0, 1)[:, args.start_frame :]\n\n ssim_vmap = jax.vmap(pix.ssim, in_axes=(0, 0))\n psnr_vmap = jax.vmap(pix.psnr, in_axes=(0, 0))\n ssim = ssim_vmap(gt, recon)\n psnr = psnr_vmap(gt, recon)\n per_frame_ssim = ssim.mean(0)\n per_frame_psnr = psnr.mean(0)\n avg_ssim = ssim.mean()\n avg_psnr = psnr.mean()\n\n print(""Per-frame SSIM:\n"", per_frame_ssim)\n print(""Per-frame PSNR:\n"", per_frame_psnr)\n\n print(f""SSIM: {avg_ssim}"")\n print(f""PSNR: {avg_psnr}"")\n\n # --- Construct video ---\n true_videos = (gt_video * 255).astype(np.uint8)\n pred_videos = (recon_video_BSHWC * 255).astype(np.uint8)\n video_comparison = np.zeros((2, *recon_video_BSHWC.shape), dtype=np.uint8)\n video_comparison[0] = true_videos[:, : args.seq_len]\n video_comparison[1] = pred_videos\n frames = einops.rearrange(video_comparison, ""n b t h w c -> t (b h) (n w) c"")\n\n # --- Save video ---\n imgs = [Image.fromarray(img) for img in frames]\n # Write actions on each frame, on each row (i.e., for each video in the batch, on the GT row)\n B = batch[""videos""].shape[0]\n if action_batch_E is not None:\n action_batch_BSm11 = jnp.reshape(action_batch_E, (B, args.seq_len - 1, 1))\n else:\n action_batch_BSm11 = jnp.reshape(\n batch[""actions""][:, :-1], (B, args.seq_len - 1, 1)\n )\n for t, img in enumerate(imgs[1:]):\n d = ImageDraw.Draw(img)\n for row in range(B):\n if args.print_action_indices:\n action = action_batch_BSm11[row, t, 0]\n y_offset = row * batch[""videos""].shape[2] + 2\n d.text((2, y_offset), f""{action}"", fill=255)\n\n os.makedirs(args.output_dir, exist_ok=True)\n imgs[0].save(\n os.path.join(args.output_dir, f""generation_{time.time()}.gif""),\n save_all=True,\n append_images=imgs[1:],\n duration=250,\n loop=0,\n )\n",python,tab +466,1055542,"jasmine/sample.py",2764,29," max_noise_level=0.0,\n",python,content +467,1057867,"jasmine/sample.py",2789,0,"",python,selection_mouse +468,1058209,"jasmine/sample.py",2788,1,"",python,content +469,1058294,"jasmine/sample.py",2788,0,"1",python,content +470,1058296,"jasmine/sample.py",2789,0,"",python,selection_keyboard +471,1065935,"TERMINAL",0,0,"gi",,terminal_output +472,1066117,"TERMINAL",0,0,"t ",,terminal_output +473,1067153,"TERMINAL",0,0,"s",,terminal_output +474,1067349,"TERMINAL",0,0,"t",,terminal_output +475,1067551,"TERMINAL",0,0,"a",,terminal_output +476,1069108,"TERMINAL",0,0,"s",,terminal_output +477,1069163,"TERMINAL",0,0,"h",,terminal_output +478,1069705,"TERMINAL",0,0,"",,terminal_output +479,1069845,"TERMINAL",0,0,"",,terminal_output +480,1069945,"TERMINAL",0,0,"",,terminal_output +481,1070064,"TERMINAL",0,0,"",,terminal_output +482,1070215,"TERMINAL",0,0,"",,terminal_output +483,1070548,"TERMINAL",0,0,"c",,terminal_output +484,1070634,"TERMINAL",0,0,"o",,terminal_output +485,1070734,"TERMINAL",0,0,"m",,terminal_output +486,1070954,"TERMINAL",0,0,"m",,terminal_output +487,1071053,"TERMINAL",0,0,"it",,terminal_output +488,1071151,"TERMINAL",0,0," ",,terminal_output +489,1071247,"TERMINAL",0,0,"-",,terminal_output +490,1071335,"TERMINAL",0,0,"a",,terminal_output +491,1071510,"TERMINAL",0,0,"m ",,terminal_output +492,1071694,"TERMINAL",0,0,"""",,terminal_output +493,1072081,"TERMINAL",0,0,"a",,terminal_output +494,1072318,"TERMINAL",0,0,"d",,terminal_output +495,1072528,"TERMINAL",0,0,"d",,terminal_output +496,1072805,"TERMINAL",0,0," ",,terminal_output +497,1074498,"TERMINAL",0,0,"",,terminal_output +498,1075246,"TERMINAL",0,0,"m",,terminal_output +499,1075710,"TERMINAL",0,0,"",,terminal_output +500,1075972,"TERMINAL",0,0,"""",,terminal_output +501,1076169,"TERMINAL",0,0,"m",,terminal_output +502,1076232,"TERMINAL",0,0,"a",,terminal_output +503,1076534,"TERMINAL",0,0,"x",,terminal_output +504,1076764,"TERMINAL",0,0," ",,terminal_output +505,1078621,"TERMINAL",0,0," ",,terminal_output +506,1078888,"TERMINAL",0,0,"",,terminal_output +507,1079099,"TERMINAL",0,0,"n",,terminal_output +508,1079374,"TERMINAL",0,0,"oi",,terminal_output +509,1079504,"TERMINAL",0,0,"s",,terminal_output +510,1079623,"TERMINAL",0,0,"e",,terminal_output +511,1079704,"TERMINAL",0,0," ",,terminal_output +512,1079878,"TERMINAL",0,0,"to",,terminal_output +513,1079931,"TERMINAL",0,0," ",,terminal_output +514,1080481,"TERMINAL",0,0,"1",,terminal_output +515,1080578,"TERMINAL",0,0," ",,terminal_output +516,1080941,"TERMINAL",0,0,"i",,terminal_output +517,1081043,"TERMINAL",0,0,"n",,terminal_output +518,1081170,"TERMINAL",0,0," ",,terminal_output +519,1081252,"TERMINAL",0,0,"s",,terminal_output +520,1081470,"TERMINAL",0,0,"am",,terminal_output +521,1081696,"TERMINAL",0,0,"p",,terminal_output +522,1081958,"TERMINAL",0,0,"in",,terminal_output +523,1082134,"TERMINAL",0,0,"g",,terminal_output +524,1082737,"TERMINAL",0,0,"",,terminal_output +525,1082829,"TERMINAL",0,0,"",,terminal_output +526,1082958,"TERMINAL",0,0,"",,terminal_output +527,1083425,"TERMINAL",0,0,"l",,terminal_output +528,1083601,"TERMINAL",0,0,"i",,terminal_output +529,1083652,"TERMINAL",0,0,"n",,terminal_output +530,1083742,"TERMINAL",0,0,"g",,terminal_output +531,1083820,"TERMINAL",0,0," ",,terminal_output +532,1084006,"TERMINAL",0,0,"l \r",,terminal_output +533,1084148,"TERMINAL",0,0,"o",,terminal_output +534,1084283,"TERMINAL",0,0,"g",,terminal_output +535,1084340,"TERMINAL",0,0,"i",,terminal_output +536,1084750,"TERMINAL",0,0,"c",,terminal_output +537,1085010,"TERMINAL",0,0,"""",,terminal_output +538,1085280,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +539,1085763,"TERMINAL",0,0,"black....................................................................",,terminal_output +540,1086232,"TERMINAL",0,0,"Passed\r\n",,terminal_output +541,1086380,"TERMINAL",0,0,"[add-noise-to-combat-exposure-bias 28c4315] max noise to 1 in sampling logic\r\n 1 file changed, 1 insertion(+), 1 deletion(-)\r\n]0;tum_cte0515@hkn0706:~/Projects/jasmine[?2004h(jasmine) [tum_cte0515@hkn0706 jasmine]$ ",,terminal_output +542,1088336,"TERMINAL",0,0,"g",,terminal_output +543,1088408,"TERMINAL",0,0,"i",,terminal_output +544,1088549,"TERMINAL",0,0,"t ",,terminal_output +545,1090984,"TERMINAL",0,0,"s",,terminal_output +546,1091037,"TERMINAL",0,0,"c",,terminal_output +547,1091091,"TERMINAL",0,0,"h",,terminal_output +548,1091458,"TERMINAL",0,0,"",,terminal_output +549,1091670,"TERMINAL",0,0,"",,terminal_output +550,1091807,"TERMINAL",0,0,"",,terminal_output +551,1091911,"TERMINAL",0,0,"c",,terminal_output +552,1092030,"TERMINAL",0,0,"h",,terminal_output +553,1092129,"TERMINAL",0,0,"e",,terminal_output +554,1092224,"TERMINAL",0,0,"c",,terminal_output +555,1092308,"TERMINAL",0,0,"k",,terminal_output +556,1092476,"TERMINAL",0,0,"o",,terminal_output +557,1092604,"TERMINAL",0,0,"ut",,terminal_output +558,1092747,"TERMINAL",0,0," ",,terminal_output +559,1092895,"TERMINAL",0,0,"z",,terminal_output +560,1093867,"TERMINAL",0,0,"l",,terminal_output +561,1094085,"TERMINAL",0,0,"os",,terminal_output +562,1094269,"TERMINAL",0,0,"s",,terminal_output +563,1094366,"TERMINAL",0,0,"-",,terminal_output +564,1094567,"TERMINAL",0,0,"r",,terminal_output +565,1094656,"TERMINAL",0,0,"u",,terminal_output +566,1094758,"TERMINAL",0,0,"n",,terminal_output +567,1094946,"TERMINAL",0,0,"en",,terminal_output +568,1095201,"TERMINAL",0,0,"r",,terminal_output +569,1095720,"TERMINAL",0,0,"",,terminal_output +570,1095904,"TERMINAL",0,0,"",,terminal_output +571,1096198,"TERMINAL",0,0,"ne",,terminal_output +572,1096313,"TERMINAL",0,0,"r",,terminal_output +573,1096439,"TERMINAL",0,0,"\r\n[?2004l\rerror: pathspec 'zloss-runner' did not match any file(s) known to git\r\n]0;tum_cte0515@hkn0706:~/Projects/jasmine[?2004h(jasmine) [tum_cte0515@hkn0706 jasmine]$ ",,terminal_output +574,1096968,"TERMINAL",0,0,"g",,terminal_output +575,1097067,"TERMINAL",0,0,"i",,terminal_output +576,1097159,"TERMINAL",0,0,"t",,terminal_output +577,1097211,"TERMINAL",0,0," ",,terminal_output +578,1098289,"TERMINAL",0,0,"",,terminal_output +579,1099157,"TERMINAL",0,0,"",,terminal_output +580,1099520,"TERMINAL",0,0,"g",,terminal_output +581,1099573,"TERMINAL",0,0,"i",,terminal_output +582,1099848,"TERMINAL",0,0,"t",,terminal_output +583,1099980,"TERMINAL",0,0," ",,terminal_output +584,1100082,"TERMINAL",0,0,"b",,terminal_output +585,1100285,"TERMINAL",0,0,"r",,terminal_output +586,1100500,"TERMINAL",0,0,"e",,terminal_output +587,1100850,"TERMINAL",0,0,"",,terminal_output +588,1100934,"TERMINAL",0,0,"a",,terminal_output +589,1101070,"TERMINAL",0,0,"n",,terminal_output +590,1101204,"TERMINAL",0,0,"ch",,terminal_output +591,1101470,"TERMINAL",0,0,"\r\n[?2004l\r[?1h=\r action-mapper\r\n* add-noise-to-combat-exposure-bias\r\n add-wandb-name-and-tags\r\n before-nnx\r\n causal-mem-reduce\r\n causal-spatiotemporal-kv-cache\r\n causal-st-transformer\r\n causal-transformer-dynamics-model\r\n causal-transformer-nnx-no-kv-cache\r\n coinrun-gt-actions\r\n convert-to-jax-array-in-iter\r\n correct-batched-sampling\r\n dev\r\n dont-let-tf-see-gpu\r\n feat/darkness-filter\r\n feat/explicit-image-dims\r\n fix-action-padding-lam-future-information-access\r\n fix-sampling\r\n fix-transformer-forwardpass\r\n fix/dyn-restore-after-nnx-upgrade\r\n fix/spatiotemporal-pe-once-in-STTransformer\r\n generate-minatar-breakout-dataset\r\n grad-norm-log-and-clip\r\n grain-dataloader\r\n gt-actions\r\n hotfix/eval-full-frame-fix\r\n hotfix/fix-val-loss-maskgit-masking\r\n hotfix/full-frame-eval-only-calculate-last-frame-metrics\r\n hotfix/sampling-shapes-error\r\n input_pipeline/add-npy2array_record\r\n logging-variants\r\n lr-schedules\r\n main\r\n maskgit-different-maskprob-per-sample\r\n maskgit-sampling-iterative-unmasking-fix\r\n metrics-logging-for-dynamics-model\r\n:",,terminal_output +592,1102622,"TERMINAL",0,0,"\r/",,terminal_output +593,1103172,"TERMINAL",0,0,"zz",,terminal_output +594,1103623,"TERMINAL",0,0,"ll",,terminal_output +595,1103787,"TERMINAL",0,0,"oo",,terminal_output +596,1103922,"TERMINAL",0,0,"ss",,terminal_output +597,1104049,"TERMINAL",0,0,"ss",,terminal_output +598,1104106,"TERMINAL",0,0,"\r action-mapper\r\n* add-noise-to-combat-exposure-bias\r\n add-wandb-name-and-tags\r\n before-nnx\r\n causal-mem-reduce\r\n causal-spatiotemporal-kv-cache\r\n causal-st-transformer\r\n causal-transformer-dynamics-model\r\n causal-transformer-nnx-no-kv-cache\r\n coinrun-gt-actions\r\n convert-to-jax-array-in-iter\r\n correct-batched-sampling\r\n dev\r\n dont-let-tf-see-gpu\r\n feat/darkness-filter\r\n feat/explicit-image-dims\r\n fix-action-padding-lam-future-information-access\r\n fix-sampling\r\n fix-transformer-forwardpass\r\n fix/dyn-restore-after-nnx-upgrade\r\n fix/spatiotemporal-pe-once-in-STTransformer\r\n generate-minatar-breakout-dataset\r\n grad-norm-log-and-clip\r\n grain-dataloader\r\n gt-actions\r\n hotfix/eval-full-frame-fix\r\n hotfix/fix-val-loss-maskgit-masking\r\n hotfix/full-frame-eval-only-calculate-last-frame-metrics\r\n hotfix/sampling-shapes-error\r\n input_pipeline/add-npy2array_record\r\n logging-variants\r\n lr-schedules\r\n main\r\n maskgit-different-maskprob-per-sample\r\n maskgit-sampling-iterative-unmasking-fix\r\n metrics-logging-for-dynamics-model\r\n action-mapper\r\n* add-noise-to-combat-exposure-bias\r\n add-wandb-name-and-tags\r\n before-nnx\r\n causal-mem-reduce\r\n causal-spatiotemporal-kv-cache\r\n causal-st-transformer\r\n causal-transformer-dynamics-model\r\n causal-transformer-nnx-no-kv-cache\r\n coinrun-gt-actions\r\n convert-to-jax-array-in-iter\r\n correct-batched-sampling\r\n dev\r\n dont-let-tf-see-gpu\r\n feat/darkness-filter\r\n feat/explicit-image-dims\r\n fix-action-padding-lam-future-information-access\r\n fix-sampling\r\n fix-transformer-forwardpass\r\n fix/dyn-restore-after-nnx-upgrade\r\n fix/spatiotemporal-pe-once-in-STTransformer\r\n generate-minatar-breakout-dataset\r\n grad-norm-log-and-clip\r\n grain-dataloader\r\n gt-actions\r\n hotfix/eval-full-frame-fix\r\n hotfix/fix-val-loss-maskgit-masking\r\n hotfix/full-frame-eval-only-calculate-last-frame-metrics\r\n hotfix/sampling-shapes-error\r\n input_pipeline/add-npy2array_record\r\n logging-variants\r\n lr-schedules\r\n main\r\n maskgit-different-maskprob-per-sample\r\n maskgit-sampling-iterative-unmasking-fix\r\n metrics-logging-for-dynamics-model\r\n...skipping...\r\n zloss-runs\r\n~\r\n~\r\n~\r\n~\r\n~\r\n~\r\n~\r\n~\r\n~\r\n~\r\n~\r\n~\r\n~\r\n~\r\n~\r\n~\r\n~\r\n~\r\n~\r\n~\r\n~\r\n~\r\n~\r\n~\r\n~\r\n~\r\n~\r\n~\r\n~\r\n~\r\n~\r\n~\r\n~\r\n~\r\n~\r\n(END)",,terminal_output +599,1105872,"TERMINAL",0,0,"\r[?1l>]0;tum_cte0515@hkn0706:~/Projects/jasmine[?2004h(jasmine) [tum_cte0515@hkn0706 jasmine]$ ",,terminal_output +600,1106606,"TERMINAL",0,0,"git branch",,terminal_output +601,1107047,"TERMINAL",0,0,"checkout zloss-runner",,terminal_output +602,1107631,"TERMINAL",0,0,"",,terminal_output +603,1107685,"TERMINAL",0,0,"",,terminal_output +604,1107890,"TERMINAL",0,0,"",,terminal_output +605,1108066,"TERMINAL",0,0,"s",,terminal_output +606,1108243,"TERMINAL",0,0,"\r\n[?2004l\rSwitched to branch 'zloss-runs'\r\n]0;tum_cte0515@hkn0706:~/Projects/jasmine[?2004h(jasmine) [tum_cte0515@hkn0706 jasmine]$ ",,terminal_output +607,1108811,"TERMINAL",0,0,"g",,terminal_output +608,1108961,"TERMINAL",0,0,"i",,terminal_output +609,1108983,"TERMINAL",0,0,"t",,terminal_output +610,1109054,"TERMINAL",0,0," ",,terminal_output +611,1109285,"TERMINAL",0,0,"me",,terminal_output +612,1109454,"TERMINAL",0,0,"rg",,terminal_output +613,1109545,"TERMINAL",0,0,"e",,terminal_output +614,1109625,"TERMINAL",0,0," ",,terminal_output +615,1110106,"TERMINAL",0,0,"z",,terminal_output +616,1110352,"",0,0,"Switched from branch 'add-noise-to-combat-exposure-bias' to 'zloss-runs'",,git_branch_checkout +617,1111364,"TERMINAL",0,0,"-",,terminal_output +618,1111649,"TERMINAL",0,0,"l",,terminal_output +619,1111808,"TERMINAL",0,0,"o",,terminal_output +620,1111882,"TERMINAL",0,0,"s",,terminal_output +621,1112139,"TERMINAL",0,0,"s",,terminal_output +622,1112468,"TERMINAL",0,0,"\r\n[?2004l\rAuto-merging jasmine/train_dynamics.py\r\nhint: Waiting for your editor to close the file... [?1049h[>4;2m[?1h=[?2004h[?1004h[?12h[?12l[?25l""~/Projects/jasmine/.git/MERGE_MSG"" 6L, 264B▽ Pzz\[0%m [>c]10;?]11;?Merge branch 'z-loss' into zloss-runs\r\n# Please enter a commit message to explain why this merge is necessary,# especially if it merges an updated upstream into a topic branch.#\r\n# Lines starting with '#' will be ignored, and an empty message aborts\r\n# the commit.\r\n~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ 1,1All[?25h",,terminal_output +623,1112561,"TERMINAL",0,0,"P+q436f\P+q6b75\P+q6b64\P+q6b72\P+q6b6c\P+q2332\P+q2334\P+q2569\P+q2a37\P+q6b31\[?12$p[?25l/3333/3333 [?25h[?25l/f6f6/e3e3 [?25h",,terminal_output +624,1113800,"TERMINAL",0,0,"[?25l::[?25h",,terminal_output +625,1113897,"TERMINAL",0,0,"w",,terminal_output +626,1114054,"TERMINAL",0,0,"q",,terminal_output +627,1114192,"TERMINAL",0,0,"\r[?25l[?2004l[>4;m"".git/MERGE_MSG"" 6L, 264B written\r\r\r\n[?1004l[?2004l[?1l>[?25h[>4;m[?1049l\rMerge made by the 'ort' strategy.\r\n data/jasmine_data/atari/atari_utils.py | 337 +++++++++\r\n data/jasmine_data/atari/generate_atari_dataset.py | 805 ++++++++++++++++++++++\r\n data/jasmine_data/atari/visualize_array_record.py | 210 ++++++\r\n .../minatar/generate_minatar_breakout_dataset.py | 175 +++++\r\n data/pyproject.toml | 6 +\r\n jasmine/train_dynamics.py | 48 +-\r\n 6 files changed, 1575 insertions(+), 6 deletions(-)\r\n create mode 100644 data/jasmine_data/atari/atari_utils.py\r\n create mode 100644 data/jasmine_data/atari/generate_atari_dataset.py\r\n create mode 100644 data/jasmine_data/atari/visualize_array_record.py\r\n create mode 100644 data/jasmine_data/minatar/generate_minatar_breakout_dataset.py\r\n]0;tum_cte0515@hkn0706:~/Projects/jasmine[?2004h(jasmine) [tum_cte0515@hkn0706 jasmine]$ ",,terminal_output +628,1118439,"TERMINAL",0,0,"bash",,terminal_focus +629,1120292,"TERMINAL",0,0,"srun",,terminal_focus +630,1158162,"slurm/jobs/mihir/horeka/breakout/noise_schedule_runs/train_dyn_single_gpu.sh",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=1\n#SBATCH --time=48:00:00\n#SBATCH --partition=accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:1\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/breakout/dyn/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/breakout/dyn/%x_%j.log\n#SBATCH --job-name=train_dyn_default_breakout_longer\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\n# source .venv/bin/activate\n\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_breakout/breakout_episodes_perfect/train\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_breakout/breakout_episodes_perfect/val\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\n# lam_checkpoint=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/lam/interactive/3512576\n# tokenizer_checkpoint=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/tokenizer/interactive/3512502\n\nlam_checkpoint=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/lam/interactive/3518963\ntokenizer_checkpoint=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/tokenizer/interactive/3518963\n\nenv | grep SLURM\n\nexport PYTHONUNBUFFERED=1\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n --image_height=10 \\n --image_width=10 \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=120 \\n --init_lr=0 \\n --max_lr=3e-5 \\n --log_image_interval=250 \\n --log_checkpoint_interval=250 \\n --dyna_type=maskgit \\n --log \\n --name=breakout-dyn-noise-lvl-default-$slurm_job_id \\n --tags dyn breakout noise-lvl default \\n --entity instant-uv \\n --project jafar \\n --patch_size 4 \\n --lam_patch_size 4 \\n --warmup_steps 100 \\n --wsd_decay_steps 1000 \\n --num_steps 5000 \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val \\n --tokenizer_checkpoint $tokenizer_checkpoint \\n --lam_checkpoint $lam_checkpoint \\n --val_interval 500 \\n --eval_full_frame \\n",shellscript,tab +631,1164241,"slurm/jobs/mihir/horeka/breakout/noise_schedule_runs/mixed_prec/train_dyn_single_gpu.sh",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=1\n#SBATCH --time=48:00:00\n#SBATCH --partition=accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:1\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/breakout/dyn/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/breakout/dyn/%x_%j.log\n#SBATCH --job-name=train_dyn_default_breakout_longer\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\n# source .venv/bin/activate\n\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_breakout/breakout_episodes_perfect/train\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_breakout/breakout_episodes_perfect/val\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\n# lam_checkpoint=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/lam/interactive/3512576\n# tokenizer_checkpoint=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/tokenizer/interactive/3512502\n\nlam_checkpoint=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/lam/interactive/3518963\ntokenizer_checkpoint=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/tokenizer/interactive/3518963\n\nenv | grep SLURM\n\nexport PYTHONUNBUFFERED=1\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n --image_height=10 \\n --image_width=10 \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=120 \\n --init_lr=0 \\n --max_lr=3e-5 \\n --log_image_interval=250 \\n --log_checkpoint_interval=250 \\n --dyna_type=maskgit \\n --log \\n --name=breakout-dyn-noise-lvl-default-$slurm_job_id \\n --tags dyn breakout noise-lvl default \\n --entity instant-uv \\n --project jafar \\n --patch_size 4 \\n --lam_patch_size 4 \\n --warmup_steps 100 \\n --wsd_decay_steps 1000 \\n --num_steps 5000 \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val \\n --tokenizer_checkpoint $tokenizer_checkpoint \\n --lam_checkpoint $lam_checkpoint \\n --val_interval 500 \\n --eval_full_frame \\n",shellscript,tab +632,1171205,"slurm/jobs/mihir/horeka/breakout/noise_schedule_runs/mixed_prec/train_dyn_single_gpu_50k.sh",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=1\n#SBATCH --time=06:00:00\n#SBATCH --partition=accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:1\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/breakout/dyn/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/breakout/dyn/%x_%j.log\n#SBATCH --job-name=train_dyn_default_breakout_longer\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\n# source .venv/bin/activate\n\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_breakout/breakout_episodes_perfect/train\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_breakout/breakout_episodes_perfect/val\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\n# lam_checkpoint=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/lam/interactive/3512576\n# tokenizer_checkpoint=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/tokenizer/interactive/3512502\n\nlam_checkpoint=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/lam/interactive/3518963\ntokenizer_checkpoint=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/tokenizer/interactive/3518963\n\nenv | grep SLURM\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n --image_height=10 \\n --image_width=10 \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=120 \\n --init_lr=0 \\n --max_lr=3e-5 \\n --log_image_interval=1000 \\n --log_checkpoint_interval=1000 \\n --dyna_type=maskgit \\n --log \\n --name=breakout-dyn-50ksteps-3e-5-noise-lvl-$slurm_job_id \\n --tags dyn breakout 50ksteps 3e-5 noise-lvl \\n --entity instant-uv \\n --project jafar \\n --patch_size 4 \\n --lam_patch_size 4 \\n --warmup_steps 1000 \\n --wsd_decay_steps 10000 \\n --num_steps 50000 \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val \\n --tokenizer_checkpoint $tokenizer_checkpoint \\n --lam_checkpoint $lam_checkpoint \\n --val_interval 1000 \\n --eval_full_frame \\n",shellscript,tab +633,1182471,"jasmine/sample.py",0,0,"from dataclasses import dataclass\nimport time\nimport os\nimport optax\n\nimport dm_pix as pix\nimport einops\nimport jax\nimport jax.numpy as jnp\nimport flax.linen as nn\nimport numpy as np\nimport orbax.checkpoint as ocp\nfrom PIL import Image, ImageDraw\nimport tyro\nfrom flax import nnx\n\nfrom genie import Genie\nfrom utils.dataloader import get_dataloader\n\n\n@dataclass\nclass Args:\n # Experiment\n seed: int = 0\n seq_len: int = 16\n image_channels: int = 3\n image_height: int = 90\n image_width: int = 160\n data_dir: str = ""data/coinrun_episodes""\n checkpoint: str = """"\n print_action_indices: bool = True\n output_dir: str = ""gifs/""\n # Sampling\n batch_size: int = 1\n maskgit_steps: int = 25\n temperature: float = 1.0\n sample_argmax: bool = True\n start_frame: int = 1\n noise_level: float = 0.0\n noise_buckets: int = 10\n # Tokenizer checkpoint\n tokenizer_dim: int = 512\n tokenizer_ffn_dim: int = 2048\n latent_patch_dim: int = 32\n num_patch_latents: int = 1024\n patch_size: int = 4\n tokenizer_num_blocks: int = 4\n tokenizer_num_heads: int = 8\n # LAM checkpoint\n lam_dim: int = 512\n lam_ffn_dim: int = 2048\n latent_action_dim: int = 32\n num_actions: int = 6\n lam_patch_size: int = 16\n lam_num_blocks: int = 4\n lam_num_heads: int = 8\n use_gt_actions: bool = False\n # Dynamics checkpoint\n dyna_type: str = ""maskgit""\n dyna_dim: int = 512\n dyna_ffn_dim: int = 2048\n dyna_num_blocks: int = 6\n dyna_num_heads: int = 8\n param_dtype = jnp.float32\n dtype = jnp.bfloat16\n use_flash_attention: bool = True\n\n\nargs = tyro.cli(Args)\n\nif __name__ == ""__main__"":\n """"""\n Dimension keys:\n B: batch size\n T: number of input (conditioning) frames\n N: number of patches per frame\n S: sequence length\n H: height\n W: width\n E: B * (S - 1)\n """"""\n jax.distributed.initialize()\n\n rng = jax.random.key(args.seed)\n\n # --- Load Genie checkpoint ---\n rngs = nnx.Rngs(rng)\n genie = Genie(\n # Tokenizer\n in_dim=args.image_channels,\n tokenizer_dim=args.tokenizer_dim,\n tokenizer_ffn_dim=args.tokenizer_ffn_dim,\n latent_patch_dim=args.latent_patch_dim,\n num_patch_latents=args.num_patch_latents,\n patch_size=args.patch_size,\n tokenizer_num_blocks=args.tokenizer_num_blocks,\n tokenizer_num_heads=args.tokenizer_num_heads,\n # LAM\n lam_dim=args.lam_dim,\n lam_ffn_dim=args.lam_ffn_dim,\n latent_action_dim=args.latent_action_dim,\n num_actions=args.num_actions,\n lam_patch_size=args.lam_patch_size,\n lam_num_blocks=args.lam_num_blocks,\n lam_num_heads=args.lam_num_heads,\n lam_co_train=False,\n max_noise_level=1.0,\n noise_buckets=args.noise_buckets,\n use_gt_actions=args.use_gt_actions,\n # Dynamics\n dyna_type=args.dyna_type,\n dyna_dim=args.dyna_dim,\n dyna_ffn_dim=args.dyna_ffn_dim,\n dyna_num_blocks=args.dyna_num_blocks,\n dyna_num_heads=args.dyna_num_heads,\n param_dtype=args.param_dtype,\n dtype=args.dtype,\n use_flash_attention=args.use_flash_attention,\n # FIXME (f.srambical): implement spatiotemporal KV caching and set decode=True\n decode=False,\n rngs=rngs,\n )\n\n # Need to delete lam decoder for checkpoint loading\n if not args.use_gt_actions:\n assert genie.lam is not None\n del genie.lam.decoder\n\n handler_registry = ocp.handlers.DefaultCheckpointHandlerRegistry()\n handler_registry.add(\n ""model_state"", ocp.args.PyTreeSave, ocp.handlers.PyTreeCheckpointHandler\n )\n handler_registry.add(\n ""model_state"", ocp.args.PyTreeRestore, ocp.handlers.PyTreeCheckpointHandler\n )\n checkpoint_options = ocp.CheckpointManagerOptions(\n step_format_fixed_length=6,\n )\n checkpoint_manager = ocp.CheckpointManager(\n args.checkpoint,\n options=checkpoint_options,\n handler_registry=handler_registry,\n )\n\n dummy_tx = optax.adamw(\n learning_rate=optax.linear_schedule(0.0001, 0.0001, 10000),\n b1=0.9,\n b2=0.9,\n weight_decay=1e-4,\n mu_dtype=args.dtype,\n )\n dummy_optimizer = nnx.ModelAndOptimizer(genie, dummy_tx)\n\n abstract_optimizer = nnx.eval_shape(lambda: dummy_optimizer)\n abstract_optimizer_state = nnx.state(abstract_optimizer)\n restored = checkpoint_manager.restore(\n checkpoint_manager.latest_step(),\n args=ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore(abstract_optimizer_state), # type: ignore\n ),\n )\n restored_optimizer_state = restored[""model_state""]\n nnx.update(dummy_optimizer, restored_optimizer_state)\n\n # --- Define sampling function ---\n def _sampling_fn(model: Genie, batch: dict) -> jax.Array:\n """"""Runs Genie.sample with pre-defined generation hyper-parameters.""""""\n assert args.dyna_type in [\n ""maskgit"",\n ""causal"",\n ], f""Invalid dynamics type: {args.dyna_type}""\n frames, _ = model.sample(\n batch,\n args.seq_len,\n args.temperature,\n args.sample_argmax,\n args.maskgit_steps,\n )\n return frames\n\n # --- Define autoregressive sampling loop ---\n def _autoreg_sample(genie, rng, batch):\n batch[""videos""] = batch[""videos""][:, : args.start_frame]\n batch[""rng""] = rng\n generated_vid_BSHWC = _sampling_fn(genie, batch)\n return generated_vid_BSHWC\n\n # --- Get video + latent actions ---\n array_record_files = [\n os.path.join(args.data_dir, x)\n for x in os.listdir(args.data_dir)\n if x.endswith("".array_record"")\n ]\n dataloader = get_dataloader(\n array_record_files,\n args.seq_len,\n args.batch_size,\n args.image_height,\n args.image_width,\n args.image_channels,\n # We don't use workers in order to avoid grain shutdown issues (https://github.com/google/grain/issues/398)\n num_workers=0,\n prefetch_buffer_size=1,\n seed=args.seed,\n )\n dataloader = iter(dataloader)\n batch = next(dataloader)\n gt_video = jnp.asarray(batch[""videos""], dtype=jnp.float32) / 255.0\n batch[""videos""] = gt_video.astype(args.dtype)\n # Get latent actions for all videos in the batch\n action_batch_E = None\n if not args.use_gt_actions:\n action_batch_E = genie.vq_encode(batch, training=False)\n batch[""latent_actions""] = action_batch_E\n\n # --- Sample + evaluate video ---\n recon_video_BSHWC = _autoreg_sample(genie, rng, batch)\n recon_video_BSHWC = recon_video_BSHWC.astype(jnp.float32)\n\n gt = gt_video.clip(0, 1)[:, args.start_frame :]\n recon = recon_video_BSHWC.clip(0, 1)[:, args.start_frame :]\n\n ssim_vmap = jax.vmap(pix.ssim, in_axes=(0, 0))\n psnr_vmap = jax.vmap(pix.psnr, in_axes=(0, 0))\n ssim = ssim_vmap(gt, recon)\n psnr = psnr_vmap(gt, recon)\n per_frame_ssim = ssim.mean(0)\n per_frame_psnr = psnr.mean(0)\n avg_ssim = ssim.mean()\n avg_psnr = psnr.mean()\n\n print(""Per-frame SSIM:\n"", per_frame_ssim)\n print(""Per-frame PSNR:\n"", per_frame_psnr)\n\n print(f""SSIM: {avg_ssim}"")\n print(f""PSNR: {avg_psnr}"")\n\n # --- Construct video ---\n true_videos = (gt_video * 255).astype(np.uint8)\n pred_videos = (recon_video_BSHWC * 255).astype(np.uint8)\n video_comparison = np.zeros((2, *recon_video_BSHWC.shape), dtype=np.uint8)\n video_comparison[0] = true_videos[:, : args.seq_len]\n video_comparison[1] = pred_videos\n frames = einops.rearrange(video_comparison, ""n b t h w c -> t (b h) (n w) c"")\n\n # --- Save video ---\n imgs = [Image.fromarray(img) for img in frames]\n # Write actions on each frame, on each row (i.e., for each video in the batch, on the GT row)\n B = batch[""videos""].shape[0]\n if action_batch_E is not None:\n action_batch_BSm11 = jnp.reshape(action_batch_E, (B, args.seq_len - 1, 1))\n else:\n action_batch_BSm11 = jnp.reshape(\n batch[""actions""][:, :-1], (B, args.seq_len - 1, 1)\n )\n for t, img in enumerate(imgs[1:]):\n d = ImageDraw.Draw(img)\n for row in range(B):\n if args.print_action_indices:\n action = action_batch_BSm11[row, t, 0]\n y_offset = row * batch[""videos""].shape[2] + 2\n d.text((2, y_offset), f""{action}"", fill=255)\n\n os.makedirs(args.output_dir, exist_ok=True)\n imgs[0].save(\n os.path.join(args.output_dir, f""generation_{time.time()}.gif""),\n save_all=True,\n append_images=imgs[1:],\n duration=250,\n loop=0,\n )\n",python,tab +634,1182637,"jasmine/sample.py",2764,29," max_noise_level=0.0,\n",python,content +635,1190408,"slurm/jobs/mihir/horeka/breakout/noise_schedule_runs/full-prec/train_dyn_single_gpu_50k.sh",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=1\n#SBATCH --time=06:00:00\n#SBATCH --partition=accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:1\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/breakout/dyn/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/breakout/dyn/%x_%j.log\n#SBATCH --job-name=train_dyn_default_breakout_longer\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\n# source .venv/bin/activate\n\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_breakout/breakout_episodes_perfect/train\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_breakout/breakout_episodes_perfect/val\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\n# lam_checkpoint=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/lam/interactive/3512576\n# tokenizer_checkpoint=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/tokenizer/interactive/3512502\n\nlam_checkpoint=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/lam/interactive/3518963\ntokenizer_checkpoint=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/tokenizer/interactive/3518963\n\nenv | grep SLURM\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n --image_height=10 \\n --image_width=10 \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=120 \\n --init_lr=0 \\n --max_lr=3e-5 \\n --log_image_interval=1000 \\n --log_checkpoint_interval=1000 \\n --dyna_type=maskgit \\n --log \\n --name=breakout-dyn-50ksteps-3e-5-noise-lvl-$slurm_job_id \\n --tags dyn breakout 50ksteps 3e-5 noise-lvl \\n --entity instant-uv \\n --project jafar \\n --patch_size 4 \\n --lam_patch_size 4 \\n --warmup_steps 1000 \\n --wsd_decay_steps 10000 \\n --num_steps 50000 \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val \\n --tokenizer_checkpoint $tokenizer_checkpoint \\n --lam_checkpoint $lam_checkpoint \\n --val_interval 1000 \\n --eval_full_frame \\n",shellscript,tab +636,1194872,"slurm/jobs/mihir/horeka/breakout/noise_schedule_runs/full-prec/train_dyn_single_gpu_50k.sh",2295,0,"",shellscript,selection_mouse +637,1195417,"slurm/jobs/mihir/horeka/breakout/noise_schedule_runs/full-prec/train_dyn_single_gpu_50k.sh",2417,0,"",shellscript,selection_mouse +638,1202610,"slurm/jobs/mihir/horeka/breakout/noise_schedule_runs/full-prec/train_dyn_single_gpu_50k.sh",1922,0,"",shellscript,selection_mouse +639,1203740,"slurm/jobs/mihir/horeka/breakout/noise_schedule_runs/full-prec/train_dyn_single_gpu_50k.sh",1944,0,"",shellscript,selection_mouse +640,1205430,"slurm/jobs/mihir/horeka/breakout/noise_schedule_runs/full-prec/train_dyn_single_gpu_50k.sh",1991,0,"",shellscript,selection_mouse +641,1206264,"slurm/jobs/mihir/horeka/breakout/noise_schedule_runs/full-prec/train_dyn_single_gpu_50k.sh",1902,0,"",shellscript,selection_mouse +642,1206265,"slurm/jobs/mihir/horeka/breakout/noise_schedule_runs/full-prec/train_dyn_single_gpu_50k.sh",1901,0,"",shellscript,selection_command +643,1206823,"slurm/jobs/mihir/horeka/breakout/noise_schedule_runs/full-prec/train_dyn_single_gpu_50k.sh",1940,0,"",shellscript,selection_mouse +644,1207351,"slurm/jobs/mihir/horeka/breakout/noise_schedule_runs/full-prec/train_dyn_single_gpu_50k.sh",1951,0,"",shellscript,selection_mouse +645,1208509,"slurm/jobs/mihir/horeka/breakout/noise_schedule_runs/full-prec/train_dyn_single_gpu_50k.sh",1950,0,"",shellscript,selection_command +646,1208858,"slurm/jobs/mihir/horeka/breakout/noise_schedule_runs/full-prec/train_dyn_single_gpu_50k.sh",1950,1,"-",shellscript,selection_command +647,1209092,"slurm/jobs/mihir/horeka/breakout/noise_schedule_runs/full-prec/train_dyn_single_gpu_50k.sh",1950,1,"-",shellscript,selection_command +648,1209592,"slurm/jobs/mihir/horeka/breakout/noise_schedule_runs/full-prec/train_dyn_single_gpu_50k.sh",1950,0,"",shellscript,selection_command +649,1211620,"slurm/jobs/mihir/horeka/breakout/noise_schedule_runs/full-prec/train_dyn_single_gpu_50k.sh",2014,0,"-",shellscript,content +650,1211620,"slurm/jobs/mihir/horeka/breakout/noise_schedule_runs/full-prec/train_dyn_single_gpu_50k.sh",1950,0,"-",shellscript,content +651,1211622,"slurm/jobs/mihir/horeka/breakout/noise_schedule_runs/full-prec/train_dyn_single_gpu_50k.sh",1951,0,"",shellscript,selection_keyboard +652,1212304,"slurm/jobs/mihir/horeka/breakout/noise_schedule_runs/full-prec/train_dyn_single_gpu_50k.sh",2016,0,"f",shellscript,content +653,1212305,"slurm/jobs/mihir/horeka/breakout/noise_schedule_runs/full-prec/train_dyn_single_gpu_50k.sh",1951,0,"f",shellscript,content +654,1212305,"slurm/jobs/mihir/horeka/breakout/noise_schedule_runs/full-prec/train_dyn_single_gpu_50k.sh",1952,0,"",shellscript,selection_keyboard +655,1212419,"slurm/jobs/mihir/horeka/breakout/noise_schedule_runs/full-prec/train_dyn_single_gpu_50k.sh",2018,0,"u",shellscript,content +656,1212420,"slurm/jobs/mihir/horeka/breakout/noise_schedule_runs/full-prec/train_dyn_single_gpu_50k.sh",1952,0,"u",shellscript,content +657,1212420,"slurm/jobs/mihir/horeka/breakout/noise_schedule_runs/full-prec/train_dyn_single_gpu_50k.sh",1953,0,"",shellscript,selection_keyboard +658,1212604,"slurm/jobs/mihir/horeka/breakout/noise_schedule_runs/full-prec/train_dyn_single_gpu_50k.sh",2020,0,"l",shellscript,content +659,1212604,"slurm/jobs/mihir/horeka/breakout/noise_schedule_runs/full-prec/train_dyn_single_gpu_50k.sh",1953,0,"l",shellscript,content +660,1212605,"slurm/jobs/mihir/horeka/breakout/noise_schedule_runs/full-prec/train_dyn_single_gpu_50k.sh",1954,0,"",shellscript,selection_keyboard +661,1212728,"slurm/jobs/mihir/horeka/breakout/noise_schedule_runs/full-prec/train_dyn_single_gpu_50k.sh",2022,0,"l",shellscript,content +662,1212729,"slurm/jobs/mihir/horeka/breakout/noise_schedule_runs/full-prec/train_dyn_single_gpu_50k.sh",1954,0,"l",shellscript,content +663,1212730,"slurm/jobs/mihir/horeka/breakout/noise_schedule_runs/full-prec/train_dyn_single_gpu_50k.sh",1955,0,"",shellscript,selection_keyboard +664,1212933,"slurm/jobs/mihir/horeka/breakout/noise_schedule_runs/full-prec/train_dyn_single_gpu_50k.sh",2024,0,"-",shellscript,content +665,1212934,"slurm/jobs/mihir/horeka/breakout/noise_schedule_runs/full-prec/train_dyn_single_gpu_50k.sh",1955,0,"-",shellscript,content +666,1212934,"slurm/jobs/mihir/horeka/breakout/noise_schedule_runs/full-prec/train_dyn_single_gpu_50k.sh",1956,0,"",shellscript,selection_keyboard +667,1213169,"slurm/jobs/mihir/horeka/breakout/noise_schedule_runs/full-prec/train_dyn_single_gpu_50k.sh",2026,0,"p",shellscript,content +668,1213169,"slurm/jobs/mihir/horeka/breakout/noise_schedule_runs/full-prec/train_dyn_single_gpu_50k.sh",1956,0,"p",shellscript,content +669,1213170,"slurm/jobs/mihir/horeka/breakout/noise_schedule_runs/full-prec/train_dyn_single_gpu_50k.sh",1957,0,"",shellscript,selection_keyboard +670,1213317,"slurm/jobs/mihir/horeka/breakout/noise_schedule_runs/full-prec/train_dyn_single_gpu_50k.sh",2028,0,"r",shellscript,content +671,1213317,"slurm/jobs/mihir/horeka/breakout/noise_schedule_runs/full-prec/train_dyn_single_gpu_50k.sh",1957,0,"r",shellscript,content +672,1213318,"slurm/jobs/mihir/horeka/breakout/noise_schedule_runs/full-prec/train_dyn_single_gpu_50k.sh",1958,0,"",shellscript,selection_keyboard +673,1213469,"slurm/jobs/mihir/horeka/breakout/noise_schedule_runs/full-prec/train_dyn_single_gpu_50k.sh",2030,0,"e",shellscript,content +674,1213470,"slurm/jobs/mihir/horeka/breakout/noise_schedule_runs/full-prec/train_dyn_single_gpu_50k.sh",1958,0,"e",shellscript,content +675,1213470,"slurm/jobs/mihir/horeka/breakout/noise_schedule_runs/full-prec/train_dyn_single_gpu_50k.sh",1959,0,"",shellscript,selection_keyboard +676,1213577,"slurm/jobs/mihir/horeka/breakout/noise_schedule_runs/full-prec/train_dyn_single_gpu_50k.sh",2032,0,"c",shellscript,content +677,1213577,"slurm/jobs/mihir/horeka/breakout/noise_schedule_runs/full-prec/train_dyn_single_gpu_50k.sh",1959,0,"c",shellscript,content +678,1213578,"slurm/jobs/mihir/horeka/breakout/noise_schedule_runs/full-prec/train_dyn_single_gpu_50k.sh",1960,0,"",shellscript,selection_keyboard +679,1213836,"slurm/jobs/mihir/horeka/breakout/noise_schedule_runs/full-prec/train_dyn_single_gpu_50k.sh",2034,0,"\n ",shellscript,content +680,1213836,"slurm/jobs/mihir/horeka/breakout/noise_schedule_runs/full-prec/train_dyn_single_gpu_50k.sh",1960,0,"\n ",shellscript,content +681,1214420,"slurm/jobs/mihir/horeka/breakout/noise_schedule_runs/full-prec/train_dyn_single_gpu_50k.sh",2040,4,"",shellscript,content +682,1214421,"slurm/jobs/mihir/horeka/breakout/noise_schedule_runs/full-prec/train_dyn_single_gpu_50k.sh",1961,4,"",shellscript,content +683,1214808,"slurm/jobs/mihir/horeka/breakout/noise_schedule_runs/full-prec/train_dyn_single_gpu_50k.sh",2035,1,"",shellscript,content +684,1214808,"slurm/jobs/mihir/horeka/breakout/noise_schedule_runs/full-prec/train_dyn_single_gpu_50k.sh",1960,1,"",shellscript,content +685,1215694,"slurm/jobs/mihir/horeka/breakout/noise_schedule_runs/full-prec/train_dyn_single_gpu_50k.sh",1959,0,"",shellscript,selection_command +686,1227139,"slurm/jobs/mihir/horeka/breakout/noise_schedule_runs/full-prec/train_dyn_single_gpu_50k.sh",0,0,"",shellscript,tab +687,1235488,"jasmine/train_dynamics.py",0,0,"import os\n\n\nos.environ.setdefault(""XLA_PYTHON_CLIENT_MEM_FRACTION"", ""0.98"")\n\nfrom dataclasses import dataclass, field\nimport itertools\nfrom typing import cast, Optional\n\nimport einops\nfrom jax.sharding import Mesh, PartitionSpec, NamedSharding\nfrom jax.experimental.mesh_utils import create_device_mesh\nimport optax\nimport orbax.checkpoint as ocp\nimport numpy as np\nimport dm_pix as pix\nimport jax\nimport jax.numpy as jnp\nimport tyro\nimport wandb\nimport grain\nimport flax.nnx as nnx\n\nfrom genie import Genie, restore_genie_components\nfrom utils.dataloader import get_dataloader\nfrom utils.train_utils import (\n get_lr_schedule,\n count_parameters_by_component,\n print_mem_stats,\n print_compiled_memory_stats,\n print_compiled_cost_analysis,\n)\n\n\n@dataclass\nclass Args:\n # Experiment\n num_steps: int = 200_000\n seed: int = 0\n seq_len: int = 16\n image_channels: int = 3\n image_height: int = 90\n image_width: int = 160\n data_dir: str = """"\n save_ckpt: bool = False\n restore_ckpt: bool = False\n # Optimization\n batch_size: int = 36\n init_lr: float = 0.0\n max_lr: float = 3e-5\n decay_end: float = 0.0\n wsd_decay_steps: int = (\n 10000 # NOTE: wsd_decay_steps will only be used when using a wsd-schedule\n )\n warmup_steps: int = 5000\n lr_schedule: str = ""wsd"" # supported options: wsd, cos\n # Tokenizer\n tokenizer_dim: int = 512\n tokenizer_ffn_dim: int = 2048\n latent_patch_dim: int = 32\n num_patch_latents: int = 1024\n patch_size: int = 4\n tokenizer_num_blocks: int = 4\n tokenizer_num_heads: int = 8\n tokenizer_checkpoint: str = """"\n # LAM\n lam_dim: int = 512\n lam_ffn_dim: int = 2048\n latent_action_dim: int = 32\n num_actions: int = 6\n lam_patch_size: int = 16\n lam_num_blocks: int = 4\n lam_num_heads: int = 8\n lam_checkpoint: str = """"\n # Dynamics\n dyna_type: str = ""maskgit"" # supported options: maskgit, causal\n dyna_dim: int = 512\n dyna_ffn_dim: int = 2048\n dyna_num_blocks: int = 6\n dyna_num_heads: int = 8\n max_noise_level: float = 0.7\n noise_buckets: int = 10\n dropout: float = 0.0\n mask_limit: float = 0.5\n z_loss_weight: float = 0.0\n param_dtype = jnp.float32\n dtype = jnp.bfloat16\n use_flash_attention: bool = True\n use_gt_actions: bool = False\n # Logging\n log: bool = False\n entity: str = """"\n project: str = """"\n name: str = ""train_dynamics""\n tags: list[str] = field(default_factory=lambda: [""dynamics""])\n log_interval: int = 5\n log_image_interval: int = 250\n ckpt_dir: str = """"\n log_checkpoint_interval: int = 25000\n log_checkpoint_keep_period: int = 20000\n log_gradients: bool = False\n val_data_dir: str = """"\n val_interval: int = 20_000\n val_steps: int = 50\n eval_full_frame: bool = False\n val_maskgit_steps: int = 25\n val_temperature: float = 1\n val_sample_argmax: bool = False\n wandb_id: str = """"\n\n\ndef build_model(args: Args, rng: jax.Array) -> tuple[Genie, jax.Array]:\n rng, _rng = jax.random.split(rng)\n rngs = nnx.Rngs(_rng)\n genie = Genie(\n # Tokenizer\n in_dim=args.image_channels,\n tokenizer_dim=args.tokenizer_dim,\n tokenizer_ffn_dim=args.tokenizer_ffn_dim,\n latent_patch_dim=args.latent_patch_dim,\n num_patch_latents=args.num_patch_latents,\n patch_size=args.patch_size,\n tokenizer_num_blocks=args.tokenizer_num_blocks,\n tokenizer_num_heads=args.tokenizer_num_heads,\n # LAM\n lam_dim=args.lam_dim,\n lam_ffn_dim=args.lam_ffn_dim,\n latent_action_dim=args.latent_action_dim,\n num_actions=args.num_actions,\n lam_patch_size=args.lam_patch_size,\n lam_num_blocks=args.lam_num_blocks,\n lam_num_heads=args.lam_num_heads,\n lam_co_train=not args.lam_checkpoint,\n use_gt_actions=args.use_gt_actions,\n # Dynamics\n dyna_type=args.dyna_type,\n dyna_dim=args.dyna_dim,\n dyna_ffn_dim=args.dyna_ffn_dim,\n dyna_num_blocks=args.dyna_num_blocks,\n dyna_num_heads=args.dyna_num_heads,\n dropout=args.dropout,\n max_noise_level=args.max_noise_level,\n noise_buckets=args.noise_buckets,\n mask_limit=args.mask_limit,\n param_dtype=args.param_dtype,\n dtype=args.dtype,\n use_flash_attention=args.use_flash_attention,\n decode=False,\n rngs=rngs,\n )\n if args.use_gt_actions:\n assert (\n not args.lam_checkpoint\n ), ""Cannot use LAM when using ground-truth actions.""\n else:\n assert genie.lam is not None\n del genie.lam.decoder\n return genie, rng\n\n\ndef build_optimizer(genie: Genie, args: Args) -> nnx.ModelAndOptimizer:\n lr_schedule = get_lr_schedule(\n args.lr_schedule,\n args.init_lr,\n args.max_lr,\n args.decay_end,\n args.num_steps,\n args.warmup_steps,\n args.wsd_decay_steps,\n )\n tx = optax.adamw(\n learning_rate=lr_schedule,\n b1=0.9,\n b2=0.9,\n weight_decay=1e-4,\n mu_dtype=args.param_dtype, # moments in full precision\n )\n optimizer = nnx.ModelAndOptimizer(genie, tx)\n return optimizer\n\n\ndef build_mesh_and_sharding(\n num_devices: int,\n) -> tuple[Mesh, NamedSharding, NamedSharding, NamedSharding]:\n device_mesh_arr = create_device_mesh((num_devices,))\n mesh = Mesh(devices=device_mesh_arr, axis_names=(""data"",))\n replicated_sharding = NamedSharding(mesh, PartitionSpec())\n videos_sharding = NamedSharding(mesh, PartitionSpec(""data"", None, None, None, None))\n actions_sharding = NamedSharding(mesh, PartitionSpec(""data"", None))\n return mesh, replicated_sharding, videos_sharding, actions_sharding\n\n\ndef shard_optimizer_states(\n optimizer: nnx.ModelAndOptimizer, replicated_sharding: NamedSharding\n) -> None:\n model_state = nnx.state(optimizer.model)\n model_sharded_state = jax.lax.with_sharding_constraint(\n model_state, replicated_sharding\n )\n nnx.update(optimizer.model, model_sharded_state)\n optimizer_state = nnx.state(optimizer, nnx.optimizer.OptState)\n optimizer_sharded_state = jax.lax.with_sharding_constraint(\n optimizer_state, replicated_sharding\n )\n nnx.update(optimizer, optimizer_sharded_state)\n\n\ndef build_dataloader(args: Args, data_dir: str) -> grain.DataLoaderIterator:\n image_shape = (args.image_height, args.image_width, args.image_channels)\n array_record_files = [\n os.path.join(data_dir, x)\n for x in os.listdir(data_dir)\n if x.endswith("".array_record"")\n ]\n grain_dataloader = get_dataloader(\n array_record_files,\n args.seq_len,\n # NOTE: We deliberately pass the global batch size\n # The dataloader shards the dataset across all processes\n args.batch_size,\n *image_shape,\n num_workers=8,\n prefetch_buffer_size=1,\n seed=args.seed,\n )\n initial_state = grain_dataloader._create_initial_state()\n grain_iterator = grain.DataLoaderIterator(grain_dataloader, initial_state)\n return grain_iterator\n\n\ndef build_checkpoint_manager(args: Args) -> Optional[ocp.CheckpointManager]:\n if args.restore_ckpt or args.save_ckpt:\n handler_registry = ocp.handlers.DefaultCheckpointHandlerRegistry()\n handler_registry.add(\n ""model_state"", ocp.args.PyTreeSave, ocp.handlers.PyTreeCheckpointHandler\n )\n handler_registry.add(\n ""model_state"", ocp.args.PyTreeRestore, ocp.handlers.PyTreeCheckpointHandler\n )\n handler_registry.add(\n ""train_dataloader_state"",\n grain.checkpoint.CheckpointSave,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n handler_registry.add(\n ""train_dataloader_state"",\n grain.checkpoint.CheckpointRestore,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n if args.val_data_dir:\n handler_registry.add(\n ""val_dataloader_state"",\n grain.checkpoint.CheckpointSave,\n cast(\n ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler\n ),\n )\n handler_registry.add(\n ""val_dataloader_state"",\n grain.checkpoint.CheckpointRestore,\n cast(\n ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler\n ),\n )\n checkpoint_options = ocp.CheckpointManagerOptions(\n save_interval_steps=args.log_checkpoint_interval,\n max_to_keep=3,\n keep_period=args.log_checkpoint_keep_period,\n step_format_fixed_length=6,\n cleanup_tmp_directories=True,\n )\n checkpoint_manager = ocp.CheckpointManager(\n args.ckpt_dir,\n options=checkpoint_options,\n handler_registry=handler_registry,\n )\n return checkpoint_manager\n else:\n return None\n\n\ndef restore_or_initialize_components(\n args: Args,\n checkpoint_manager: Optional[ocp.CheckpointManager],\n optimizer: nnx.ModelAndOptimizer,\n train_iterator: grain.DataLoaderIterator,\n rng: jax.Array,\n replicated_sharding: NamedSharding,\n val_iterator: Optional[grain.DataLoaderIterator],\n restore_step: Optional[int] = None,\n) -> tuple[\n int,\n nnx.ModelAndOptimizer,\n grain.DataLoaderIterator,\n grain.DataLoaderIterator,\n jax.Array,\n]:\n step = 0\n if checkpoint_manager and restore_step is None:\n restore_step = checkpoint_manager.latest_step()\n if args.restore_ckpt:\n assert checkpoint_manager is not None\n abstract_optimizer = nnx.eval_shape(lambda: optimizer)\n abstract_optimizer_state = nnx.state(abstract_optimizer)\n if val_iterator:\n restore_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore(abstract_optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointRestore(train_iterator), # type: ignore\n val_dataloader_state=grain.checkpoint.CheckpointRestore(val_iterator), # type: ignore\n )\n else:\n restore_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore(abstract_optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointRestore(train_iterator), # type: ignore\n )\n restored = checkpoint_manager.restore(\n checkpoint_manager.latest_step(), args=restore_args\n )\n restored_optimizer_state = restored[""model_state""]\n nnx.update(optimizer, restored_optimizer_state)\n train_iterator = restored[""train_dataloader_state""]\n if val_iterator:\n val_iterator = restored[""val_dataloader_state""]\n step = checkpoint_manager.latest_step() or 0\n print(f""Restored dataloader and model state from step {step}"")\n else:\n # Restore from pre-trained tokenizer (and LAM)\n rng, _rng = jax.random.split(rng)\n optimizer = restore_genie_components(optimizer, replicated_sharding, _rng, args)\n return step, optimizer, train_iterator, val_iterator, rng\n\n\ndef _calculate_top_k_accuracy(\n token_logits_BTNV: jax.Array,\n video_tokens_BTN: jax.Array,\n mask_BTN: jax.Array,\n k: int,\n) -> jax.Array:\n _, topk_indices_BTNK = jax.lax.top_k(token_logits_BTNV, k)\n topk_correct = jnp.any(\n topk_indices_BTNK == video_tokens_BTN[..., jnp.newaxis], axis=-1\n )\n topk_acc = (mask_BTN * topk_correct).sum() / mask_BTN.sum()\n return topk_acc\n\n\ndef _calculate_step_metrics(\n outputs: dict[str, jax.Array],\n gt: jax.Array,\n num_actions: int,\n num_patch_latents: int,\n) -> tuple[jax.Array, dict]:\n mask_BTN = outputs[""mask""]\n outputs[""token_logits""] = outputs[""token_logits""].astype(jnp.float32)\n ce_loss = optax.softmax_cross_entropy_with_integer_labels(\n outputs[""token_logits""], outputs[""video_tokens""]\n )\n ce_loss = (mask_BTN * ce_loss).sum() / mask_BTN.sum()\n z_val = jax.nn.logsumexp(outputs[""token_logits""], axis=-1)\n z_loss_metric = (mask_BTN * (z_val**2)).sum() / mask_BTN.sum()\n\n masked_token_top_1_acc = _calculate_top_k_accuracy(\n outputs[""token_logits""], outputs[""video_tokens""], mask_BTN, 1\n )\n masked_token_top_2_acc = _calculate_top_k_accuracy(\n outputs[""token_logits""], outputs[""video_tokens""], mask_BTN, 2\n )\n masked_token_top_5_acc = _calculate_top_k_accuracy(\n outputs[""token_logits""], outputs[""video_tokens""], mask_BTN, 5\n )\n masked_token_top_16_acc = _calculate_top_k_accuracy(\n outputs[""token_logits""], outputs[""video_tokens""], mask_BTN, 16\n )\n\n select_probs = jax.nn.softmax(outputs[""token_logits""])\n gt_val = gt.clip(0, 1).reshape(-1, *gt.shape[2:])\n recon = outputs[""recon""].clip(0, 1).reshape(-1, *outputs[""recon""].shape[2:])\n psnr = jnp.asarray(pix.psnr(gt_val, recon)).mean()\n ssim = jnp.asarray(pix.ssim(gt_val, recon)).mean()\n _, index_counts_tokenizer = jnp.unique_counts(\n jnp.ravel(outputs[""video_tokens""]),\n size=num_patch_latents,\n fill_value=0,\n )\n codebook_usage_tokenizer = (index_counts_tokenizer != 0).mean()\n metrics = dict(\n cross_entropy_loss=ce_loss,\n masked_token_top1_accuracy=masked_token_top_1_acc,\n masked_token_top2_accuracy=masked_token_top_2_acc,\n masked_token_top5_accuracy=masked_token_top_5_acc,\n masked_token_top16_accuracy=masked_token_top_16_acc,\n select_logit=outputs[""token_logits""].max(-1).mean(),\n select_p=select_probs.max(-1).mean(),\n entropy=jax.scipy.special.entr(select_probs).sum(-1).mean(),\n z_loss=z_loss_metric,\n psnr=psnr,\n ssim=ssim,\n codebook_usage_tokenizer=codebook_usage_tokenizer,\n )\n if ""lam_indices"" in outputs.keys():\n _, index_counts_lam = jnp.unique_counts(\n jnp.ravel(outputs[""lam_indices""]),\n size=num_actions,\n fill_value=0,\n )\n codebook_usage_lam = (index_counts_lam != 0).mean()\n metrics[""codebook_usage_lam""] = codebook_usage_lam\n return ce_loss, metrics\n\n\ndef main(args: Args) -> None:\n jax.distributed.initialize()\n num_devices = jax.device_count()\n if num_devices == 0:\n raise ValueError(""No JAX devices found."")\n print(f""Running on {num_devices} devices."")\n\n if args.batch_size % num_devices != 0:\n raise ValueError(\n f""Global batch size {args.batch_size} must be divisible by ""\n f""number of devices {num_devices}.""\n )\n\n rng = jax.random.key(args.seed)\n\n # --- Initialize model ---\n genie, rng = build_model(args, rng)\n _, params, _ = nnx.split(genie, nnx.Param, ...)\n param_counts = count_parameters_by_component(params)\n\n if args.log and jax.process_index() == 0:\n wandb_init_kwargs = {\n ""entity"": args.entity,\n ""project"": args.project,\n ""name"": args.name,\n ""tags"": args.tags,\n ""group"": ""debug"",\n ""config"": args,\n }\n\n if args.wandb_id:\n wandb_init_kwargs.update(\n {\n ""id"": args.wandb_id,\n ""resume"": ""allow"",\n }\n )\n wandb.init(**wandb_init_kwargs)\n\n wandb.config.update({""model_param_count"": param_counts})\n\n print(""Parameter counts:"")\n print(param_counts)\n\n # --- Initialize optimizer ---\n optimizer = build_optimizer(genie, args)\n del genie\n\n # FIXME: switch to create_hybrid_device_mesh for runs spanning multiple nodes\n _, replicated_sharding, videos_sharding, actions_sharding = build_mesh_and_sharding(\n num_devices\n )\n\n shard_optimizer_states(optimizer, replicated_sharding)\n\n # --- Initialize checkpoint manager ---\n checkpoint_manager = build_checkpoint_manager(args)\n\n # --- Create DataLoaderIterator from dataloader ---\n train_iterator = build_dataloader(args, args.data_dir)\n val_iterator = None\n if args.val_data_dir:\n val_iterator = build_dataloader(args, args.val_data_dir)\n\n # --- Restore checkpoint ---\n step, optimizer, train_iterator, val_iterator, rng = (\n restore_or_initialize_components(\n args,\n checkpoint_manager,\n optimizer,\n train_iterator,\n rng,\n replicated_sharding,\n val_iterator,\n )\n )\n\n # --- Define loss and train step (close over args) ---\n def dynamics_loss_fn(\n model: Genie,\n inputs: dict,\n ) -> tuple[jax.Array, tuple[jax.Array, dict]]:\n gt = jnp.asarray(inputs[""videos""], dtype=jnp.float32) / 255.0\n inputs[""videos""] = gt.astype(args.dtype)\n outputs = model(inputs)\n ce_loss, metrics = _calculate_step_metrics(\n outputs, gt, args.num_actions, args.num_patch_latents\n )\n z_loss = metrics[""z_loss""]\n total_loss = ce_loss + args.z_loss_weight * z_loss\n metrics[""total_loss""] = total_loss\n return total_loss, (outputs[""recon""], metrics)\n\n @nnx.jit(donate_argnums=0)\n def train_step(\n optimizer: nnx.ModelAndOptimizer, inputs: dict\n ) -> tuple[jax.Array, jax.Array, dict]:\n def loss_fn(model: Genie) -> tuple[jax.Array, tuple[jax.Array, dict]]:\n model.train()\n return dynamics_loss_fn(model, inputs)\n\n (loss, (recon, metrics)), grads = nnx.value_and_grad(loss_fn, has_aux=True)(\n optimizer.model\n )\n optimizer.update(grads)\n if args.log_gradients:\n metrics[""gradients_std/""] = jax.tree.map(\n lambda x: x.std(), grads[""params""][""dynamics""]\n )\n return loss, recon, metrics\n\n @nnx.jit\n def val_step(genie: Genie, inputs: dict) -> dict:\n """"""Evaluate model and compute metrics""""""\n genie.eval()\n gt = jnp.asarray(inputs[""videos""], dtype=jnp.float32) / 255.0\n (loss, (recon, metrics)) = dynamics_loss_fn(genie, inputs)\n val_output = {""loss"": loss, ""recon"": recon, ""metrics"": metrics}\n\n # --- Evaluate full frame prediction (sampling) ---\n if args.eval_full_frame:\n inputs[""videos""] = gt.astype(args.dtype)\n tokenizer_outputs = genie.tokenizer.vq_encode(\n inputs[""videos""], training=False\n )\n tokens_full_frame = tokenizer_outputs[""indices""]\n lam_indices_E = None\n if not args.use_gt_actions:\n lam_indices_E = genie.vq_encode(inputs, training=False)\n inputs[""latent_actions""] = lam_indices_E\n inputs[""videos""] = inputs[""videos""][\n :, :-1\n ] # remove last frame for generation\n recon_full_frame, logits_full_frame = genie.sample(\n inputs,\n args.seq_len,\n args.val_temperature,\n args.val_sample_argmax,\n args.val_maskgit_steps,\n )\n # Calculate metrics for the last frame only\n step_outputs = {\n ""recon"": recon_full_frame[:, -1],\n ""token_logits"": logits_full_frame[:, -1],\n ""video_tokens"": tokens_full_frame[:, -1],\n ""mask"": jnp.ones_like(tokens_full_frame[:, -1]),\n }\n if lam_indices_E is not None:\n lam_indices_B = lam_indices_E.reshape((-1, args.seq_len - 1))[:, -1]\n step_outputs[""lam_indices""] = lam_indices_B\n\n loss_full_frame, metrics_full_frame = _calculate_step_metrics(\n step_outputs, gt[:, -1], args.num_actions, args.num_patch_latents\n )\n val_output.update(\n {\n ""loss_full_frame"": loss_full_frame,\n ""recon_full_frame"": recon_full_frame,\n ""metrics_full_frame"": metrics_full_frame,\n }\n )\n return val_output\n\n def calculate_validation_metrics(val_dataloader, genie, rng):\n step = 0\n loss_per_step = []\n metrics_per_step = []\n loss_full_frame_per_step = []\n metrics_full_frame_per_step = []\n batch = None\n recon = None\n recon_full_frame = None\n for batch in val_dataloader:\n rng, _rng_mask = jax.random.split(rng, 2)\n batch[""rng""] = _rng_mask\n val_outputs = val_step(genie, batch)\n loss_per_step.append(val_outputs[""loss""])\n metrics_per_step.append(val_outputs[""metrics""])\n recon = val_outputs[""recon""]\n if args.eval_full_frame:\n loss_full_frame_per_step.append(val_outputs[""loss_full_frame""])\n metrics_full_frame_per_step.append(val_outputs[""metrics_full_frame""])\n recon_full_frame = val_outputs[""recon_full_frame""]\n step += 1\n if step > args.val_steps:\n break\n\n if step < args.val_steps:\n print(\n f""Warning: Your validation dataset is too small to make val_steps many steps. Made {step} steps, expected {args.val_steps}""\n )\n\n val_metrics = {\n f""val_{key}"": np.mean([float(m[key]) for m in metrics_per_step])\n for key in metrics_per_step[0].keys()\n }\n val_metrics[""val_loss""] = np.mean(loss_per_step)\n if args.eval_full_frame:\n val_metrics_full_frame = {\n f""val_full_frame_{key}"": np.mean(\n [float(m[key]) for m in metrics_full_frame_per_step]\n )\n for key in metrics_full_frame_per_step[0].keys()\n }\n val_metrics.update(val_metrics_full_frame)\n val_metrics[""val_full_frame_loss""] = np.mean(loss_full_frame_per_step)\n return val_metrics, batch, recon, recon_full_frame\n\n # --- TRAIN LOOP ---\n dataloader_train = (\n {\n ""videos"": jax.make_array_from_process_local_data(\n videos_sharding, local_data=elem[""videos""]\n ),\n ""actions"": (\n jax.make_array_from_process_local_data(\n actions_sharding, elem[""actions""]\n )\n if args.use_gt_actions\n else None\n ),\n }\n for elem in train_iterator\n )\n dataloader_val = None\n if val_iterator:\n dataloader_val = (\n {\n ""videos"": jax.make_array_from_process_local_data(\n videos_sharding, elem[""videos""]\n ),\n ""actions"": (\n jax.make_array_from_process_local_data(\n actions_sharding, elem[""actions""]\n )\n if args.use_gt_actions\n else None\n ),\n }\n for elem in val_iterator\n )\n if jax.process_index() == 0:\n first_batch = next(dataloader_train)\n first_batch[""rng""] = rng # type: ignore\n compiled = train_step.lower(optimizer, first_batch).compile()\n print_compiled_memory_stats(compiled.memory_analysis())\n print_compiled_cost_analysis(compiled.cost_analysis())\n # Do not skip the first batch during training\n dataloader_train = itertools.chain([first_batch], dataloader_train)\n print(f""Starting training from step {step}..."")\n first_step = step\n while step < args.num_steps:\n for batch in dataloader_train:\n # --- Train step ---\n rng, _rng_mask = jax.random.split(rng, 2)\n batch[""rng""] = _rng_mask\n loss, recon, metrics = train_step(optimizer, batch)\n if step == first_step:\n print_mem_stats(""After params initialized"")\n step += 1\n\n # --- Validation loss ---\n val_results = {}\n if dataloader_val and step % args.val_interval == 0:\n rng, _rng_mask_val = jax.random.split(rng, 2)\n print(""Calculating validation metrics..."")\n val_metrics, val_gt_batch, val_recon, val_recon_full_frame = (\n calculate_validation_metrics(\n dataloader_val, optimizer.model, _rng_mask_val\n )\n )\n print(f""Step {step}, validation loss: {val_metrics['val_loss']}"")\n val_results = {\n ""metrics"": val_metrics,\n ""gt_batch"": val_gt_batch,\n ""recon"": val_recon,\n ""full_frame"": val_recon_full_frame,\n }\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {""loss"": loss, ""step"": step, **metrics}\n if val_results:\n log_dict.update(val_results[""metrics""])\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if val_results:\n val_results[""gt_seq_val""] = (\n val_results[""gt_batch""][""videos""][0].astype(jnp.float32)\n / 255.0\n )\n val_results[""recon_seq_val""] = val_results[""recon""][0].clip(\n 0, 1\n )\n val_comparison_seq = jnp.concatenate(\n (val_results[""gt_seq_val""], val_results[""recon_seq_val""]),\n axis=1,\n )\n val_results[""val_comparison_seq""] = einops.rearrange(\n val_comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if args.eval_full_frame:\n val_results[""full_frame_seq_val""] = val_results[\n ""full_frame""\n ][0].clip(0, 1)\n val_results[""val_full_frame_comparison_seq""] = (\n jnp.concatenate(\n (\n val_results[""gt_seq_val""],\n val_results[""full_frame_seq_val""],\n ),\n axis=1,\n )\n )\n val_results[""val_full_frame_comparison_seq""] = (\n einops.rearrange(\n val_results[""val_full_frame_comparison_seq""] * 255,\n ""t h w c -> h (t w) c"",\n )\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if val_results:\n log_images.update(\n dict(\n val_image=wandb.Image(\n np.asarray(\n val_results[""gt_seq_val""][args.seq_len - 1]\n )\n ),\n val_recon=wandb.Image(\n np.asarray(\n val_results[""recon_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_recon=wandb.Image(\n np.asarray(\n val_results[""val_comparison_seq""].astype(\n np.uint8\n )\n )\n ),\n )\n )\n if args.eval_full_frame:\n log_images.update(\n dict(\n val_full_frame=wandb.Image(\n np.asarray(\n val_results[""full_frame_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_full_frame=wandb.Image(\n np.asarray(\n val_results[\n ""val_full_frame_comparison_seq""\n ].astype(np.uint8)\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break\n\n if checkpoint_manager:\n checkpoint_manager.close()\n\n\nif __name__ == ""__main__"":\n args = tyro.cli(Args)\n main(args)\n",python,tab +688,1239116,"jasmine/train_dynamics.py",2783,0,"",python,selection_command +689,1241036,"jasmine/train_dynamics.py",2267,0,"",python,selection_command +690,1242336,"jasmine/train_dynamics.py",2258,19,"use_flash_attention",python,selection_mouse +691,1244606,"jasmine/train_dynamics.py",2260,0,"",python,selection_mouse +692,1244719,"jasmine/train_dynamics.py",2258,19,"use_flash_attention",python,selection_mouse +693,1245839,"jasmine/train_dynamics.py",2257,0,"",python,selection_mouse +694,1247612,"jasmine/train_dynamics.py",2330,0,"",python,selection_command +695,1249117,"jasmine/train_dynamics.py",2234,0,"",python,selection_mouse +696,1249302,"jasmine/train_dynamics.py",2233,5,"dtype",python,selection_mouse +697,1270664,"slurm/jobs/mihir/horeka/breakout/noise_schedule_runs/full-prec/train_dyn_single_gpu_50k.sh",0,0,"",shellscript,tab +698,1272956,"slurm/jobs/mihir/horeka/breakout/noise_schedule_runs/full-prec/train_dyn_single_gpu_50k.sh",2436,0,"",shellscript,selection_mouse +699,1272958,"slurm/jobs/mihir/horeka/breakout/noise_schedule_runs/full-prec/train_dyn_single_gpu_50k.sh",2435,0,"",shellscript,selection_command +700,1273798,"slurm/jobs/mihir/horeka/breakout/noise_schedule_runs/full-prec/train_dyn_single_gpu_50k.sh",2436,0,"\n ",shellscript,content +701,1274039,"slurm/jobs/mihir/horeka/breakout/noise_schedule_runs/full-prec/train_dyn_single_gpu_50k.sh",2441,0,"-",shellscript,content +702,1274040,"slurm/jobs/mihir/horeka/breakout/noise_schedule_runs/full-prec/train_dyn_single_gpu_50k.sh",2442,0,"",shellscript,selection_keyboard +703,1274168,"slurm/jobs/mihir/horeka/breakout/noise_schedule_runs/full-prec/train_dyn_single_gpu_50k.sh",2442,0,"-",shellscript,content +704,1274169,"slurm/jobs/mihir/horeka/breakout/noise_schedule_runs/full-prec/train_dyn_single_gpu_50k.sh",2443,0,"",shellscript,selection_keyboard +705,1274824,"slurm/jobs/mihir/horeka/breakout/noise_schedule_runs/full-prec/train_dyn_single_gpu_50k.sh",2443,0,"d",shellscript,content +706,1274825,"slurm/jobs/mihir/horeka/breakout/noise_schedule_runs/full-prec/train_dyn_single_gpu_50k.sh",2444,0,"",shellscript,selection_keyboard +707,1275373,"slurm/jobs/mihir/horeka/breakout/noise_schedule_runs/full-prec/train_dyn_single_gpu_50k.sh",2444,0,"t",shellscript,content +708,1275374,"slurm/jobs/mihir/horeka/breakout/noise_schedule_runs/full-prec/train_dyn_single_gpu_50k.sh",2445,0,"",shellscript,selection_keyboard +709,1275541,"slurm/jobs/mihir/horeka/breakout/noise_schedule_runs/full-prec/train_dyn_single_gpu_50k.sh",2445,0,"y",shellscript,content +710,1275542,"slurm/jobs/mihir/horeka/breakout/noise_schedule_runs/full-prec/train_dyn_single_gpu_50k.sh",2446,0,"",shellscript,selection_keyboard +711,1275886,"slurm/jobs/mihir/horeka/breakout/noise_schedule_runs/full-prec/train_dyn_single_gpu_50k.sh",2446,0,"p",shellscript,content +712,1275887,"slurm/jobs/mihir/horeka/breakout/noise_schedule_runs/full-prec/train_dyn_single_gpu_50k.sh",2447,0,"",shellscript,selection_keyboard +713,1275989,"slurm/jobs/mihir/horeka/breakout/noise_schedule_runs/full-prec/train_dyn_single_gpu_50k.sh",2447,0,"e",shellscript,content +714,1275990,"slurm/jobs/mihir/horeka/breakout/noise_schedule_runs/full-prec/train_dyn_single_gpu_50k.sh",2448,0,"",shellscript,selection_keyboard +715,1277426,"slurm/jobs/mihir/horeka/breakout/noise_schedule_runs/full-prec/train_dyn_single_gpu_50k.sh",2448,0," ",shellscript,content +716,1277427,"slurm/jobs/mihir/horeka/breakout/noise_schedule_runs/full-prec/train_dyn_single_gpu_50k.sh",2449,0,"",shellscript,selection_keyboard +717,1278006,"slurm/jobs/mihir/horeka/breakout/noise_schedule_runs/full-prec/train_dyn_single_gpu_50k.sh",2449,0,"j",shellscript,content +718,1278007,"slurm/jobs/mihir/horeka/breakout/noise_schedule_runs/full-prec/train_dyn_single_gpu_50k.sh",2450,0,"",shellscript,selection_keyboard +719,1278159,"slurm/jobs/mihir/horeka/breakout/noise_schedule_runs/full-prec/train_dyn_single_gpu_50k.sh",2450,0,"n",shellscript,content +720,1278160,"slurm/jobs/mihir/horeka/breakout/noise_schedule_runs/full-prec/train_dyn_single_gpu_50k.sh",2451,0,"",shellscript,selection_keyboard +721,1278394,"slurm/jobs/mihir/horeka/breakout/noise_schedule_runs/full-prec/train_dyn_single_gpu_50k.sh",2451,0,"p",shellscript,content +722,1278395,"slurm/jobs/mihir/horeka/breakout/noise_schedule_runs/full-prec/train_dyn_single_gpu_50k.sh",2452,0,"",shellscript,selection_keyboard +723,1278634,"slurm/jobs/mihir/horeka/breakout/noise_schedule_runs/full-prec/train_dyn_single_gpu_50k.sh",2452,0,".",shellscript,content +724,1278635,"slurm/jobs/mihir/horeka/breakout/noise_schedule_runs/full-prec/train_dyn_single_gpu_50k.sh",2453,0,"",shellscript,selection_keyboard +725,1280836,"slurm/jobs/mihir/horeka/breakout/noise_schedule_runs/full-prec/train_dyn_single_gpu_50k.sh",2453,0,"f",shellscript,content +726,1280837,"slurm/jobs/mihir/horeka/breakout/noise_schedule_runs/full-prec/train_dyn_single_gpu_50k.sh",2454,0,"",shellscript,selection_keyboard +727,1280914,"slurm/jobs/mihir/horeka/breakout/noise_schedule_runs/full-prec/train_dyn_single_gpu_50k.sh",2454,0,"l",shellscript,content +728,1280915,"slurm/jobs/mihir/horeka/breakout/noise_schedule_runs/full-prec/train_dyn_single_gpu_50k.sh",2455,0,"",shellscript,selection_keyboard +729,1281274,"slurm/jobs/mihir/horeka/breakout/noise_schedule_runs/full-prec/train_dyn_single_gpu_50k.sh",2455,0,"o",shellscript,content +730,1281275,"slurm/jobs/mihir/horeka/breakout/noise_schedule_runs/full-prec/train_dyn_single_gpu_50k.sh",2456,0,"",shellscript,selection_keyboard +731,1281437,"slurm/jobs/mihir/horeka/breakout/noise_schedule_runs/full-prec/train_dyn_single_gpu_50k.sh",2456,0,"a",shellscript,content +732,1281438,"slurm/jobs/mihir/horeka/breakout/noise_schedule_runs/full-prec/train_dyn_single_gpu_50k.sh",2457,0,"",shellscript,selection_keyboard +733,1281622,"slurm/jobs/mihir/horeka/breakout/noise_schedule_runs/full-prec/train_dyn_single_gpu_50k.sh",2457,0,"t",shellscript,content +734,1281623,"slurm/jobs/mihir/horeka/breakout/noise_schedule_runs/full-prec/train_dyn_single_gpu_50k.sh",2458,0,"",shellscript,selection_keyboard +735,1282466,"slurm/jobs/mihir/horeka/breakout/noise_schedule_runs/full-prec/train_dyn_single_gpu_50k.sh",2458,0,"3",shellscript,content +736,1282467,"slurm/jobs/mihir/horeka/breakout/noise_schedule_runs/full-prec/train_dyn_single_gpu_50k.sh",2459,0,"",shellscript,selection_keyboard +737,1282573,"slurm/jobs/mihir/horeka/breakout/noise_schedule_runs/full-prec/train_dyn_single_gpu_50k.sh",2459,0,"2",shellscript,content +738,1282574,"slurm/jobs/mihir/horeka/breakout/noise_schedule_runs/full-prec/train_dyn_single_gpu_50k.sh",2460,0,"",shellscript,selection_keyboard +739,1283325,"slurm/jobs/mihir/horeka/breakout/noise_schedule_runs/full-prec/train_dyn_single_gpu_50k.sh",2460,0," ",shellscript,content +740,1283326,"slurm/jobs/mihir/horeka/breakout/noise_schedule_runs/full-prec/train_dyn_single_gpu_50k.sh",2461,0,"",shellscript,selection_keyboard +741,1283619,"slurm/jobs/mihir/horeka/breakout/noise_schedule_runs/full-prec/train_dyn_single_gpu_50k.sh",2461,0,"\",shellscript,content +742,1283620,"slurm/jobs/mihir/horeka/breakout/noise_schedule_runs/full-prec/train_dyn_single_gpu_50k.sh",2462,0,"",shellscript,selection_keyboard +743,1283995,"slurm/jobs/mihir/horeka/breakout/noise_schedule_runs/full-prec/train_dyn_single_gpu_50k.sh",2462,0,"\n ",shellscript,content +744,1284415,"slurm/jobs/mihir/horeka/breakout/noise_schedule_runs/full-prec/train_dyn_single_gpu_50k.sh",2467,0,"-",shellscript,content +745,1284416,"slurm/jobs/mihir/horeka/breakout/noise_schedule_runs/full-prec/train_dyn_single_gpu_50k.sh",2468,0,"",shellscript,selection_keyboard +746,1284548,"slurm/jobs/mihir/horeka/breakout/noise_schedule_runs/full-prec/train_dyn_single_gpu_50k.sh",2468,0,"-",shellscript,content +747,1284549,"slurm/jobs/mihir/horeka/breakout/noise_schedule_runs/full-prec/train_dyn_single_gpu_50k.sh",2469,0,"",shellscript,selection_keyboard +748,1284815,"slurm/jobs/mihir/horeka/breakout/noise_schedule_runs/full-prec/train_dyn_single_gpu_50k.sh",2469,0,"n",shellscript,content +749,1284816,"slurm/jobs/mihir/horeka/breakout/noise_schedule_runs/full-prec/train_dyn_single_gpu_50k.sh",2470,0,"",shellscript,selection_keyboard +750,1284967,"slurm/jobs/mihir/horeka/breakout/noise_schedule_runs/full-prec/train_dyn_single_gpu_50k.sh",2470,0,"o",shellscript,content +751,1284968,"slurm/jobs/mihir/horeka/breakout/noise_schedule_runs/full-prec/train_dyn_single_gpu_50k.sh",2471,0,"",shellscript,selection_keyboard +752,1285960,"slurm/jobs/mihir/horeka/breakout/noise_schedule_runs/full-prec/train_dyn_single_gpu_50k.sh",2471,0,"-",shellscript,content +753,1285961,"slurm/jobs/mihir/horeka/breakout/noise_schedule_runs/full-prec/train_dyn_single_gpu_50k.sh",2472,0,"",shellscript,selection_keyboard +754,1286296,"slurm/jobs/mihir/horeka/breakout/noise_schedule_runs/full-prec/train_dyn_single_gpu_50k.sh",2472,0,"u",shellscript,content +755,1286297,"slurm/jobs/mihir/horeka/breakout/noise_schedule_runs/full-prec/train_dyn_single_gpu_50k.sh",2473,0,"",shellscript,selection_keyboard +756,1286401,"slurm/jobs/mihir/horeka/breakout/noise_schedule_runs/full-prec/train_dyn_single_gpu_50k.sh",2473,0,"s",shellscript,content +757,1286402,"slurm/jobs/mihir/horeka/breakout/noise_schedule_runs/full-prec/train_dyn_single_gpu_50k.sh",2474,0,"",shellscript,selection_keyboard +758,1286781,"slurm/jobs/mihir/horeka/breakout/noise_schedule_runs/full-prec/train_dyn_single_gpu_50k.sh",2474,0,"e",shellscript,content +759,1286782,"slurm/jobs/mihir/horeka/breakout/noise_schedule_runs/full-prec/train_dyn_single_gpu_50k.sh",2475,0,"",shellscript,selection_keyboard +760,1286867,"slurm/jobs/mihir/horeka/breakout/noise_schedule_runs/full-prec/train_dyn_single_gpu_50k.sh",2475,0,"-",shellscript,content +761,1286868,"slurm/jobs/mihir/horeka/breakout/noise_schedule_runs/full-prec/train_dyn_single_gpu_50k.sh",2476,0,"",shellscript,selection_keyboard +762,1287264,"slurm/jobs/mihir/horeka/breakout/noise_schedule_runs/full-prec/train_dyn_single_gpu_50k.sh",2476,0,"f",shellscript,content +763,1287265,"slurm/jobs/mihir/horeka/breakout/noise_schedule_runs/full-prec/train_dyn_single_gpu_50k.sh",2477,0,"",shellscript,selection_keyboard +764,1287382,"slurm/jobs/mihir/horeka/breakout/noise_schedule_runs/full-prec/train_dyn_single_gpu_50k.sh",2477,0,"l",shellscript,content +765,1287383,"slurm/jobs/mihir/horeka/breakout/noise_schedule_runs/full-prec/train_dyn_single_gpu_50k.sh",2478,0,"",shellscript,selection_keyboard +766,1287510,"slurm/jobs/mihir/horeka/breakout/noise_schedule_runs/full-prec/train_dyn_single_gpu_50k.sh",2478,0,"a",shellscript,content +767,1287511,"slurm/jobs/mihir/horeka/breakout/noise_schedule_runs/full-prec/train_dyn_single_gpu_50k.sh",2479,0,"",shellscript,selection_keyboard +768,1287665,"slurm/jobs/mihir/horeka/breakout/noise_schedule_runs/full-prec/train_dyn_single_gpu_50k.sh",2479,0,"s",shellscript,content +769,1287666,"slurm/jobs/mihir/horeka/breakout/noise_schedule_runs/full-prec/train_dyn_single_gpu_50k.sh",2480,0,"",shellscript,selection_keyboard +770,1287796,"slurm/jobs/mihir/horeka/breakout/noise_schedule_runs/full-prec/train_dyn_single_gpu_50k.sh",2480,0,"h",shellscript,content +771,1287797,"slurm/jobs/mihir/horeka/breakout/noise_schedule_runs/full-prec/train_dyn_single_gpu_50k.sh",2481,0,"",shellscript,selection_keyboard +772,1288149,"slurm/jobs/mihir/horeka/breakout/noise_schedule_runs/full-prec/train_dyn_single_gpu_50k.sh",2481,0,"-",shellscript,content +773,1288150,"slurm/jobs/mihir/horeka/breakout/noise_schedule_runs/full-prec/train_dyn_single_gpu_50k.sh",2482,0,"",shellscript,selection_keyboard +774,1288433,"slurm/jobs/mihir/horeka/breakout/noise_schedule_runs/full-prec/train_dyn_single_gpu_50k.sh",2482,0,"a",shellscript,content +775,1288434,"slurm/jobs/mihir/horeka/breakout/noise_schedule_runs/full-prec/train_dyn_single_gpu_50k.sh",2483,0,"",shellscript,selection_keyboard +776,1288817,"slurm/jobs/mihir/horeka/breakout/noise_schedule_runs/full-prec/train_dyn_single_gpu_50k.sh",2483,0,"t",shellscript,content +777,1288818,"slurm/jobs/mihir/horeka/breakout/noise_schedule_runs/full-prec/train_dyn_single_gpu_50k.sh",2484,0,"",shellscript,selection_keyboard +778,1288984,"slurm/jobs/mihir/horeka/breakout/noise_schedule_runs/full-prec/train_dyn_single_gpu_50k.sh",2484,0,"t",shellscript,content +779,1288985,"slurm/jobs/mihir/horeka/breakout/noise_schedule_runs/full-prec/train_dyn_single_gpu_50k.sh",2485,0,"",shellscript,selection_keyboard +780,1289214,"slurm/jobs/mihir/horeka/breakout/noise_schedule_runs/full-prec/train_dyn_single_gpu_50k.sh",2485,0,"e",shellscript,content +781,1289215,"slurm/jobs/mihir/horeka/breakout/noise_schedule_runs/full-prec/train_dyn_single_gpu_50k.sh",2486,0,"",shellscript,selection_keyboard +782,1289242,"slurm/jobs/mihir/horeka/breakout/noise_schedule_runs/full-prec/train_dyn_single_gpu_50k.sh",2486,0,"n",shellscript,content +783,1289243,"slurm/jobs/mihir/horeka/breakout/noise_schedule_runs/full-prec/train_dyn_single_gpu_50k.sh",2487,0,"",shellscript,selection_keyboard +784,1289402,"slurm/jobs/mihir/horeka/breakout/noise_schedule_runs/full-prec/train_dyn_single_gpu_50k.sh",2487,0,"t",shellscript,content +785,1289403,"slurm/jobs/mihir/horeka/breakout/noise_schedule_runs/full-prec/train_dyn_single_gpu_50k.sh",2488,0,"",shellscript,selection_keyboard +786,1289515,"slurm/jobs/mihir/horeka/breakout/noise_schedule_runs/full-prec/train_dyn_single_gpu_50k.sh",2488,0,"i",shellscript,content +787,1289517,"slurm/jobs/mihir/horeka/breakout/noise_schedule_runs/full-prec/train_dyn_single_gpu_50k.sh",2489,0,"",shellscript,selection_keyboard +788,1289582,"slurm/jobs/mihir/horeka/breakout/noise_schedule_runs/full-prec/train_dyn_single_gpu_50k.sh",2489,0,"o",shellscript,content +789,1289583,"slurm/jobs/mihir/horeka/breakout/noise_schedule_runs/full-prec/train_dyn_single_gpu_50k.sh",2490,0,"",shellscript,selection_keyboard +790,1289748,"slurm/jobs/mihir/horeka/breakout/noise_schedule_runs/full-prec/train_dyn_single_gpu_50k.sh",2490,0,"n",shellscript,content +791,1289749,"slurm/jobs/mihir/horeka/breakout/noise_schedule_runs/full-prec/train_dyn_single_gpu_50k.sh",2491,0,"",shellscript,selection_keyboard +792,1290434,"slurm/jobs/mihir/horeka/breakout/noise_schedule_runs/full-prec/train_dyn_single_gpu_50k.sh",2491,0," ",shellscript,content +793,1290435,"slurm/jobs/mihir/horeka/breakout/noise_schedule_runs/full-prec/train_dyn_single_gpu_50k.sh",2492,0,"",shellscript,selection_keyboard +794,1290596,"slurm/jobs/mihir/horeka/breakout/noise_schedule_runs/full-prec/train_dyn_single_gpu_50k.sh",2492,0,"0",shellscript,content +795,1290597,"slurm/jobs/mihir/horeka/breakout/noise_schedule_runs/full-prec/train_dyn_single_gpu_50k.sh",2493,0,"",shellscript,selection_keyboard +796,1291007,"slurm/jobs/mihir/horeka/breakout/noise_schedule_runs/full-prec/train_dyn_single_gpu_50k.sh",2492,1,"",shellscript,content +797,1291113,"slurm/jobs/mihir/horeka/breakout/noise_schedule_runs/full-prec/train_dyn_single_gpu_50k.sh",2492,0," ",shellscript,content +798,1291114,"slurm/jobs/mihir/horeka/breakout/noise_schedule_runs/full-prec/train_dyn_single_gpu_50k.sh",2493,0,"",shellscript,selection_keyboard +799,1291212,"slurm/jobs/mihir/horeka/breakout/noise_schedule_runs/full-prec/train_dyn_single_gpu_50k.sh",2493,0,"ß",shellscript,content +800,1291213,"slurm/jobs/mihir/horeka/breakout/noise_schedule_runs/full-prec/train_dyn_single_gpu_50k.sh",2494,0,"",shellscript,selection_keyboard +801,1291657,"slurm/jobs/mihir/horeka/breakout/noise_schedule_runs/full-prec/train_dyn_single_gpu_50k.sh",2493,1,"",shellscript,content +802,1291778,"slurm/jobs/mihir/horeka/breakout/noise_schedule_runs/full-prec/train_dyn_single_gpu_50k.sh",2492,1,"",shellscript,content +803,1292536,"slurm/jobs/mihir/horeka/breakout/noise_schedule_runs/full-prec/train_dyn_single_gpu_50k.sh",2492,0,"\",shellscript,content +804,1292537,"slurm/jobs/mihir/horeka/breakout/noise_schedule_runs/full-prec/train_dyn_single_gpu_50k.sh",2493,0,"",shellscript,selection_keyboard +805,1293956,"jasmine/sample.py",0,0,"",python,tab +806,1294688,"slurm/jobs/mihir/horeka/breakout/noise_schedule_runs/full-prec/train_dyn_single_gpu_50k.sh",0,0,"",shellscript,tab +807,1295267,"jasmine/train_dynamics.py",0,0,"",python,tab +808,1311067,"slurm/jobs/mihir/horeka/breakout/noise_schedule_runs/full-prec/train_dyn_single_gpu_50k.sh",0,0,"",shellscript,tab +809,1317343,"TERMINAL",0,0,"s",,terminal_output +810,1317559,"TERMINAL",0,0,"h",,terminal_output +811,1317739,"TERMINAL",0,0," ",,terminal_output +812,1318047,"TERMINAL",0,0,"slurm/jobs/mihir/horeka/breakout/noise_schedule_runs/full-prec/train_dyn_single_gpu_50k.sh",,terminal_output +813,1318409,"TERMINAL",0,0,"slurm/jobs/mihir/horeka/breakout/noise_schedule_runs/full-prec/train_dyn_single_gpu_50k.sh\r\n[?2004l\r#!/usr/bin/env bash\r\n\r\n#SBATCH --nodes=1\r\n#SBATCH --ntasks-per-node=1\r\n#SBATCH --time=06:00:00\r\n#SBATCH --partition=accelerated\r\n#SBATCH --cpus-per-task=5\r\n#SBATCH --gres=gpu:1\r\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/breakout/dyn/%x_%j.log\r\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/breakout/dyn/%x_%j.log\r\n#SBATCH --job-name=train_dyn_default_breakout_longer\r\n#SBATCH --requeue\r\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\r\n\r\n# Log the sbatch script\r\ncat $0\r\n\r\nmodule unload mpi/openmpi/5.0\r\nmodule unload devel/cuda/12.4\r\n# source .venv/bin/activate\r\n\r\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_breakout/breakout_episodes_perfect/train\r\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_breakout/breakout_episodes_perfect/val\r\n\r\njob_name=$SLURM_JOB_NAME\r\nslurm_job_id=$SLURM_JOB_ID\r\n\r\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/$job_name/$slurm_job_id\r\nmkdir -p $CHECKPOINT_DIR\r\n\r\n# lam_checkpoint=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/lam/interactive/3512576\r\n# tokenizer_checkpoint=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/tokenizer/interactive/3512502\r\n\r\nlam_checkpoint=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/lam/interactive/3518963\r\ntokenizer_checkpoint=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/tokenizer/interactive/3518963\r\n\r\nenv | grep SLURM\r\n\r\nsrun python jasmine/train_dynamics.py \\r\n --save_ckpt \\r\n --image_height=10 \\r\n --image_width=10 \\r\n --ckpt_dir $CHECKPOINT_DIR \\r\n --batch_size=120 \\r\n --init_lr=0 \\r\n --max_lr=3e-5 \\r\n --log_image_interval=1000 \\r\n --log_checkpoint_interval=1000 \\r\n --dyna_type=maskgit \\r\n --log \\r\n --name=breakout-dyn-50ksteps-3e-5-noise-lvl-full-prec-$slurm_job_id \\r\n --tags dyn breakout 50ksteps 3e-5 noise-lvl-full-prec \\r\n --entity instant-uv \\r\n --project jafar \\r\n --patch_size 4 \\r\n --lam_patch_size 4 \\r\n --warmup_steps 1000 \\r\n --wsd_decay_steps 10000 \\r\n --num_steps 50000 \\r\n --data_dir $array_records_dir_train \\r\n --val_data_dir $array_records_dir_val \\r\n --tokenizer_checkpoint $tokenizer_checkpoint \\r\n --lam_checkpoint $lam_checkpoint \\r\n --val_interval 1000 \\r\n --eval_full_frame \\r\n --dtype jnp.float32 \\r\n --no-use-flash-attention \\r\n",,terminal_output +814,1318568,"TERMINAL",0,0,"SLURM_STEP_NUM_TASKS=1\r\nSLURM_JOB_USER=tum_cte0515\r\nSLURM_TASKS_PER_NODE=1\r\nSLURM_JOB_UID=999226\r\nSLURM_TASK_PID=1756481\r\nSLURM_JOB_GPUS=0\r\nSLURM_LOCALID=0\r\nSLURM_SUBMIT_DIR=/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine\r\nSLURMD_NODENAME=hkn0706\r\nSLURM_JOB_START_TIME=1758992136\r\nSLURM_STEP_NODELIST=hkn0706\r\nSLURM_CLUSTER_NAME=hk\r\nSLURM_JOB_END_TIME=1758995736\r\nSLURM_PMI2_SRUN_PORT=39897\r\nSLURM_CPUS_ON_NODE=8\r\nSLURM_JOB_CPUS_PER_NODE=8\r\nSLURM_GPUS_ON_NODE=1\r\nSLURM_GTIDS=0\r\nSLURM_JOB_PARTITION=accelerated\r\nSLURM_TRES_PER_TASK=cpu=8\r\nSLURM_OOM_KILL_STEP=0\r\nSLURM_JOB_NUM_NODES=1\r\nSLURM_STEPID=4294967290\r\nSLURM_JOBID=3526712\r\nSLURM_PTY_PORT=33423\r\nSLURM_JOB_QOS=normal\r\nSLURM_LAUNCH_NODE_IPADDR=10.0.7.199\r\nSLURM_PTY_WIN_ROW=37\r\nSLURM_PMI2_PROC_MAPPING=(vector,(0,1,1))\r\nSLURMD_DEBUG=2\r\nSLURM_PROCID=0\r\nSLURM_CPUS_PER_TASK=8\r\nSLURM_TOPOLOGY_ADDR=hkibb.hkibbi1.hkibbi1e8.hkn0706\r\nSLURM_TOPOLOGY_ADDR_PATTERN=switch.switch.switch.node\r\nSLURM_SRUN_COMM_HOST=10.0.7.199\r\nSLURM_SCRIPT_CONTEXT=prolog_task\r\nSLURM_PTY_WIN_COL=85\r\nSLURM_NODELIST=hkn0706\r\nSLURM_SRUN_COMM_PORT=37081\r\nSLURM_STEP_ID=4294967290\r\nSLURM_JOB_ACCOUNT=hk-project-p0023960\r\nSLURM_PRIO_PROCESS=0\r\nSLURM_NNODES=1\r\nSLURM_SUBMIT_HOST=hkn1991.localdomain\r\nSLURM_JOB_ID=3526712\r\nSLURM_NODEID=0\r\nSLURM_STEP_NUM_NODES=1\r\nSLURM_STEP_TASKS_PER_NODE=1\r\nSLURM_MPI_TYPE=pmi2\r\nSLURM_PMI2_STEP_NODES=hkn0706\r\nSLURM_CONF=/etc/slurm/slurm.conf\r\nSLURM_JOB_NAME=interactive\r\nSLURM_STEP_LAUNCHER_PORT=37081\r\nSLURM_JOB_GID=502226\r\nSLURM_JOB_NODELIST=hkn0706\r\n",,terminal_output +815,1318707,"TERMINAL",0,0,"GpuFreq=control_disabled\r\n",,terminal_output +816,1323296,"TERMINAL",0,0,"╭─ Unrecognized options ──────────────────────────╮\r\n│ Unrecognized options: --dtype │\r\n│ ─────────────────────────────────────────────── │\r\n│ For full helptext, run train_dynamics.py --help │\r\n╰─────────────────────────────────────────────────╯\r\n",,terminal_output +817,1323511,"TERMINAL",0,0,"srun: error: hkn0706: task 0: Exited with exit code 2\r\n]0;tum_cte0515@hkn0706:~/Projects/jasmine[?2004h(jasmine) [tum_cte0515@hkn0706 jasmine]$ ",,terminal_output +818,1357572,"jasmine/sample.py",0,0,"",python,tab +819,1359333,"jasmine/train_dynamics.py",0,0,"",python,tab +820,1361002,"jasmine/train_dynamics.py",2235,0,"",python,selection_mouse +821,1361127,"jasmine/train_dynamics.py",2233,5,"dtype",python,selection_mouse +822,1383252,"jasmine/train_dynamics.py",2237,0,"",python,selection_command diff --git a/927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-7b06cc31-85b7-4591-87e0-b26b0ddee2111758710564601-2025_09_24-12.43.30.174/source.csv b/927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-7b06cc31-85b7-4591-87e0-b26b0ddee2111758710564601-2025_09_24-12.43.30.174/source.csv new file mode 100644 index 0000000000000000000000000000000000000000..407008a146db6be2ee7da857e9bfc89a37adb6ab --- /dev/null +++ b/927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-7b06cc31-85b7-4591-87e0-b26b0ddee2111758710564601-2025_09_24-12.43.30.174/source.csv @@ -0,0 +1,7089 @@ +Sequence,Time,File,RangeOffset,RangeLength,Text,Language,Type +2,738,"extension-output-pdoom-org.crowd-code-#1-crowd-code",0,0,"12:43:30 PM [info] Activating crowd-code\n12:43:30 PM [info] Recording started\n12:43:30 PM [info] Initializing git provider using file system watchers...\n12:43:30 PM [info] Git repository found\n12:43:30 PM [info] Git provider initialized successfully\n12:43:30 PM [info] Initial git state: [object Object]\n",Log,tab +3,8484,"TERMINAL",0,0,"queue",,terminal_command +4,8602,"TERMINAL",0,0,"]633;C[?1049h(B[?7hEvery 1.0s: squeue --mehkn1991.localdomain: Wed Sep 24 12:43:38 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)",,terminal_output +5,9293,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn1991:~/Projects/jasmine",,terminal_output +6,10693,"TERMINAL",0,0,"bash",,terminal_focus +7,11376,"TERMINAL",0,0,"bash",,terminal_focus +8,25300,"DONE.txt",0,0,"DONE\n",plaintext,tab +9,157305,"slurm/jobs/mihir/horeka/breakout/default_runs/train_lam_default.sh",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=4\n#SBATCH --time=48:00:00\n#SBATCH --partition=accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:1\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/breakout/lam/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/breakout/lam/%x_%j.log\n#SBATCH --job-name=train_lam_default_breakout_long\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\n# source .venv/bin/activate\n\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_breakout/breakout_episodes_10m_gt_actions_split/train\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_breakout/breakout_episodes_10m_gt_actions_split/val\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/lam/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\nenv | grep SLURM\n\nexport PYTHONUNBUFFERED=1\n\nsrun python train_lam.py \\n --save_ckpt \\n --image_height=10 \\n --image_width=10 \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=120 \\n --patch_size=4 \\n --log \\n --name=breakout-lam-default-$slurm_job_id \\n --tags lam breakout default \\n --entity instant-uv \\n --project jafar \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val \\n --num_steps 5000 \\n --max_lr 3e-5 \\n --warmup_steps 500 \\n --wsd_decay_steps 1000 \\n --log_image_interval 500 \\n --log_checkpoint_interval 500 \\n --log_checkpoint_keep_period 500 \\n --val_interval 500 \",shellscript,tab +10,157308,"slurm/jobs/mihir/horeka/breakout/default_runs/train_lam_default.sh",1301,0,"",shellscript,selection_mouse +11,157346,"slurm/jobs/mihir/horeka/breakout/default_runs/train_lam_default.sh",1300,0,"",shellscript,selection_command +12,158015,"DONE.txt",0,0,"",plaintext,tab +13,158027,"DONE.txt",5,0,"",plaintext,selection_mouse +14,163968,"DONE.txt",0,0,"",plaintext,tab +15,175009,"TERMINAL",0,0,"bash",,terminal_focus +16,176223,"TERMINAL",0,0,"idling",,terminal_command +17,176288,"TERMINAL",0,0,"]633;C[?1049h(B[?7hEvery 1.0s: sinfo_t_idlehkn1991.localdomain: Wed Sep 24 12:46:26 2025Partition dev_cpuonly: 10 nodes idle\rPartition cpuonly: 197 nodes idle\rPartition dev_accelerated:\t 2 nodes idle\rPartition accelerated:\t 2 nodes idle\rPartition dev_accelerated-h100 :\t 0 nodes idle\rPartition accelerated-h100:\t 0 nodes idle\rPartition large:\t 3 nodes idle\rPartition accelerated-h200:\t 2 nodes idle",,terminal_output +18,177333,"TERMINAL",0,0,"7",,terminal_output +19,178401,"TERMINAL",0,0,"8",,terminal_output +20,178483,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn1991:~/Projects/jasmine",,terminal_output +21,181128,"TERMINAL",0,0,"bash",,terminal_focus +22,182051,"TERMINAL",0,0,"salloc --time=01:00:00 --partition=dev_accelerated --nodes=1 --gres=gpu:1 --cpus-per-task=8",,terminal_command +23,182125,"TERMINAL",0,0,"]633;Csalloc: Granted job allocation 3519095\r\n",,terminal_output +24,182180,"TERMINAL",0,0,"salloc: Waiting for resource configuration\r\n",,terminal_output +25,209220,"TERMINAL",0,0,"salloc: Nodes hkn0402 are ready for job\r\n",,terminal_output +26,210056,"TERMINAL",0,0,"]0;tum_cte0515@hkn0402:~/Projects/jasmine[?2004h[tum_cte0515@hkn0402 jasmine]$ ",,terminal_output +27,210967,"TERMINAL",0,0,"so",,terminal_output +28,211031,"TERMINAL",0,0,"u",,terminal_output +29,211094,"TERMINAL",0,0,"r",,terminal_output +30,211275,"TERMINAL",0,0,"c",,terminal_output +31,211439,"TERMINAL",0,0,"e",,terminal_output +32,211553,"TERMINAL",0,0," .",,terminal_output +33,211735,"TERMINAL",0,0,"v",,terminal_output +34,212524,"TERMINAL",0,0,"e",,terminal_output +35,212829,"TERMINAL",0,0,"nv/",,terminal_output +36,212998,"TERMINAL",0,0,"b",,terminal_output +37,213145,"TERMINAL",0,0,"in/",,terminal_output +38,213381,"TERMINAL",0,0,"ca",,terminal_output +39,213621,"TERMINAL",0,0,"",,terminal_output +40,214163,"TERMINAL",0,0,"",,terminal_output +41,214274,"TERMINAL",0,0,"",,terminal_output +42,214476,"TERMINAL",0,0,"a",,terminal_output +43,214581,"TERMINAL",0,0,"c",,terminal_output +44,214749,"TERMINAL",0,0,"tivate",,terminal_output +45,215207,"TERMINAL",0,0,"\r\n[?2004l\r]0;tum_cte0515@hkn0402:~/Projects/jasmine[?2004h(jasmine) [tum_cte0515@hkn0402 jasmine]$ ",,terminal_output +46,215655,"TERMINAL",0,0,"l",,terminal_output +47,215810,"TERMINAL",0,0,"s\r\n[?2004l\r",,terminal_output +48,216096,"TERMINAL",0,0,"debug generation_1758630215.9213252.gif logs slurm\r\ndiff2.diff generation_1758643402.7346563.gif models test.py\r\ndiff.diff generation_1758646589.5360243.gif overfit_dir tests\r\nframe-knoms.png generation_1758646654.8434227.gif overfit_dir.zip train_dynamics.py\r\nframe.png genie.py __pycache__ train_lam.py\r\nframes gifs README.md train_tokenizer.py\r\ngeneration_1758628640.1116674.gif input_pipeline requirements-franz.txt utils\r\ngeneration_1758628700.1090345.gif killer_partition.sh requirements.txt venv_3_11\r\ngeneration_1758628818.6393511.gif killer.sh sample.py wandb\r\ngeneration_1758629742.4141676.gif LICENSE samples\r\ngeneration_1758630061.74718.gif log.log scripts_cremers\r\n]0;tum_cte0515@hkn0402:~/Projects/jasmine[?2004h(jasmine) [tum_cte0515@hkn0402 jasmine]$ ",,terminal_output +49,295911,"slurm/jobs/mihir/horeka/breakout/default_runs/train_lam_default.sh",0,0,"",shellscript,tab +50,295912,"slurm/jobs/mihir/horeka/breakout/default_runs/train_lam_default.sh",962,0,"",shellscript,selection_mouse +51,306332,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu.sh",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=1\n#SBATCH --time=48:00:00\n#SBATCH --partition=accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:1\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/breakout/dyn/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/breakout/dyn/%x_%j.log\n#SBATCH --job-name=train_dyn_default_breakout_longer\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\n# source .venv/bin/activate\n\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_breakout/breakout_episodes_perfect/train\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_breakout/breakout_episodes_perfect/val\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\n# lam_checkpoint=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/lam/interactive/3512576\n# tokenizer_checkpoint=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/tokenizer/interactive/3512502\n\nlam_checkpoint=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/lam/interactive/3518843\ntokenizer_checkpoint=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/tokenizer/interactive/3518843\n\nenv | grep SLURM\n\nexport PYTHONUNBUFFERED=1\n\nsrun python train_dynamics.py \\n --save_ckpt \\n --image_height=10 \\n --restore-ckpt \\n --image_width=10 \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=120 \\n --init_lr=0 \\n --max_lr=3e-5 \\n --log_image_interval=500 \\n --log_checkpoint_interval=5 \\n --dyna_type=maskgit \\n --log \\n --name=breakout-dyn-default-$slurm_job_id \\n --tags dyn breakout default \\n --entity instant-uv \\n --project jafar \\n --patch_size 4 \\n --lam_patch_size 4 \\n --warmup_steps 100 \\n --wsd_decay_steps 1000 \\n --num_steps 5000 \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val \\n --tokenizer_checkpoint $tokenizer_checkpoint \\n --lam_checkpoint $lam_checkpoint \\n --val_interval 500 \\n --eval_full_frame \\n",shellscript,tab +52,328612,"TERMINAL",0,0,"g",,terminal_output +53,328754,"TERMINAL",0,0,"i",,terminal_output +54,328755,"TERMINAL",0,0,"t",,terminal_output +55,328945,"TERMINAL",0,0," ",,terminal_output +56,329108,"TERMINAL",0,0,"b",,terminal_output +57,329120,"TERMINAL",0,0,"r",,terminal_output +58,329282,"TERMINAL",0,0,"a",,terminal_output +59,329390,"TERMINAL",0,0,"nc",,terminal_output +60,329517,"TERMINAL",0,0,"h",,terminal_output +61,329778,"TERMINAL",0,0,"\r\n[?2004l\r[?1h=\r",,terminal_output +62,329901,"TERMINAL",0,0," action-mapper\r\n add-wandb-name-and-tags\r\n before-nnx\r\n causal-mem-reduce\r\n causal-spatiotemporal-kv-cache\r\n causal-st-transformer\r\n causal-transformer-dynamics-model\r\n causal-transformer-nnx-no-kv-cache\r\n coinrun-gt-actions\r\n convert-to-jax-array-in-iter\r\n correct-batched-sampling\r\n* dev\r\n dont-let-tf-see-gpu\r\n feat/darkness-filter\r\n feat/explicit-image-dims\r\n fix-action-padding-lam-future-information-access\r\n fix-sampling\r\n fix-transformer-forwardpass\r\n fix/dyn-restore-after-nnx-upgrade\r\n fix/spatiotemporal-pe-once-in-STTransformer\r\n generate-minatar-breakout-dataset\r\n grad-norm-log-and-clip\r\n grain-dataloader\r\n gt-actions\r\n hotfix/eval-full-frame-fix\r\n hotfix/fix-val-loss-maskgit-masking\r\n hotfix/full-frame-eval-only-calculate-last-frame-metrics\r\n hotfix/sampling-shapes-error\r\n input_pipeline/add-npy2array_record\r\n logging-variants\r\n lr-schedules\r\n main\r\n maskgit-different-maskprob-per-sample\r\n maskgit-sampling-iterative-unmasking-fix\r\n metrics-logging-for-dynamics-model\r\n monkey-patch\r\n new-arch-sampling\r\n preprocess_video\r\n refactor-full-frame-val-loss\r\n refactor-tmp\r\n remove-restore-branching\r\n revised-dataloader\r\n runner\r\n runner-grain\r\n sample-ali-branch\r\n sample-from-different-topologies\r\n sampling-startframe-indexing-fix\r\n speedup-tfrecord-preprocessing\r\n train_lam_coinrun_ablation_wsd_3e-6_28747\r\n val-loss\r\n",,terminal_output +63,329965,"TERMINAL",0,0,"\r[?1l>]0;tum_cte0515@hkn0402:~/Projects/jasmine[?2004h(jasmine) [tum_cte0515@hkn0402 jasmine]$ ",,terminal_output +64,337142,"TERMINAL",0,0,"bash",,terminal_focus +65,337847,"TERMINAL",0,0,"srun",,terminal_focus +66,381465,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu.sh",0,0,"",shellscript,tab +67,381466,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu.sh",2068,0,"",shellscript,selection_mouse +68,381585,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu.sh",2067,0,"",shellscript,selection_command +69,391787,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu_gt_actions.sh",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=1\n#SBATCH --time=48:00:00\n#SBATCH --partition=accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:1\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/breakout/dyn/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/breakout/dyn/%x_%j.log\n#SBATCH --job-name=train_dyn_default_breakout_longer\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_breakout/breakout_episodes_perfect_big/train\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_breakout/breakout_episodes_perfect_big/val\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_checkpoint=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/tokenizer/interactive/3512502\n\nenv | grep SLURM\n\nexport PYTHONUNBUFFERED=1\n\nsrun python train_dynamics.py \\n --save_ckpt \\n --image_height=10 \\n --image_width=10 \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=120 \\n --init_lr=0 \\n --max_lr=3e-5 \\n --log_image_interval=250 \\n --log_checkpoint_interval=250 \\n --dyna_type=maskgit \\n --log \\n --name=breakout-dyn-default-gt-actions-$slurm_job_id \\n --tags dyn breakout default \\n --entity instant-uv \\n --project jafar \\n --patch_size 4 \\n --lam_patch_size 4 \\n --warmup_steps 100 \\n --wsd_decay_steps 1000 \\n --num_steps 5000 \\n --use_gt_actions \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val \\n --tokenizer_checkpoint $tokenizer_checkpoint \\n --val_interval 750 \\n --eval_full_frame \\n",shellscript,tab +70,395358,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu_gt_actions.sh",0,0,"",shellscript,tab +71,399565,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu.sh",0,0,"",shellscript,tab +72,401440,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu_gt_actions.sh",0,0,"",shellscript,tab +73,402253,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu_gt_actions.sh",1455,0,"",shellscript,selection_mouse +74,402264,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu_gt_actions.sh",1454,0,"",shellscript,selection_command +75,402779,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu_gt_actions.sh",1514,0,"",shellscript,selection_mouse +76,403575,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu_gt_actions.sh",1483,0,"",shellscript,selection_mouse +77,413558,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu.sh",0,0,"",shellscript,tab +78,415475,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu.sh",1863,0,"",shellscript,selection_mouse +79,416198,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu.sh",1864,0,"",shellscript,selection_command +80,416593,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu.sh",1863,1,"",shellscript,content +81,416690,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu.sh",1862,1,"",shellscript,content +82,417030,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu.sh",1861,1,"",shellscript,content +83,417583,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu.sh",1861,0,"2",shellscript,content +84,417584,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu.sh",1862,0,"",shellscript,selection_keyboard +85,417780,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu.sh",1862,0,"5",shellscript,content +86,417781,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu.sh",1863,0,"",shellscript,selection_keyboard +87,417843,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu.sh",1863,0,"0",shellscript,content +88,417844,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu.sh",1864,0,"",shellscript,selection_keyboard +89,418234,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu.sh",1863,0,"",shellscript,selection_command +90,418430,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu.sh",1894,0,"",shellscript,selection_command +91,418686,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu.sh",1925,0,"",shellscript,selection_command +92,419172,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu.sh",1937,0,"",shellscript,selection_command +93,419204,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu.sh",1966,0,"",shellscript,selection_command +94,419265,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu.sh",2014,0,"",shellscript,selection_command +95,419280,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu.sh",2045,0,"",shellscript,selection_command +96,419290,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu.sh",2067,0,"",shellscript,selection_command +97,419318,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu.sh",2088,0,"",shellscript,selection_command +98,419354,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu.sh",2113,0,"",shellscript,selection_command +99,419385,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu.sh",2138,0,"",shellscript,selection_command +100,419418,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu.sh",2167,0,"",shellscript,selection_command +101,419456,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu.sh",2190,0,"",shellscript,selection_command +102,419554,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu.sh",2219,0,"",shellscript,selection_command +103,419555,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu.sh",2261,0,"",shellscript,selection_command +104,419556,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu.sh",2305,0,"",shellscript,selection_command +105,419565,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu.sh",2356,0,"",shellscript,selection_command +106,419733,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu.sh",2391,0,"",shellscript,selection_command +107,420264,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu.sh",2390,0,"",shellscript,selection_command +108,420468,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu.sh",2389,0,"",shellscript,selection_command +109,420707,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu.sh",2390,0,"",shellscript,selection_command +110,421997,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu.sh",2389,1,"",shellscript,content +111,422101,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu.sh",2388,1,"",shellscript,content +112,422367,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu.sh",2387,1,"",shellscript,content +113,422858,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu.sh",2387,0,"7",shellscript,content +114,422859,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu.sh",2388,0,"",shellscript,selection_keyboard +115,423145,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu.sh",2388,0,"5",shellscript,content +116,423146,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu.sh",2389,0,"",shellscript,selection_keyboard +117,423177,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu.sh",2389,0,"0",shellscript,content +118,423177,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu.sh",2390,0,"",shellscript,selection_keyboard +119,423400,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu.sh",2389,0,"",shellscript,selection_command +120,423571,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu.sh",2350,0,"",shellscript,selection_command +121,423734,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu.sh",2299,0,"",shellscript,selection_command +122,423876,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu.sh",2255,0,"",shellscript,selection_command +123,424087,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu.sh",2213,0,"",shellscript,selection_command +124,424598,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu.sh",2190,0,"",shellscript,selection_command +125,424637,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu.sh",2161,0,"",shellscript,selection_command +126,424658,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu.sh",2136,0,"",shellscript,selection_command +127,424709,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu.sh",2111,0,"",shellscript,selection_command +128,424738,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu.sh",2088,0,"",shellscript,selection_command +129,424746,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu.sh",2067,0,"",shellscript,selection_command +130,424823,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu.sh",2042,0,"",shellscript,selection_command +131,424925,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu.sh",2008,0,"",shellscript,selection_command +132,424984,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu.sh",1960,0,"",shellscript,selection_command +133,424984,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu.sh",1937,0,"",shellscript,selection_command +134,425013,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu.sh",1922,0,"",shellscript,selection_command +135,425013,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu.sh",1888,0,"",shellscript,selection_command +136,425225,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu.sh",1857,0,"",shellscript,selection_command +137,425530,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu.sh",1858,0,"",shellscript,selection_command +138,425587,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu.sh",1889,0,"",shellscript,selection_command +139,425901,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu.sh",1890,0,"",shellscript,selection_command +140,426408,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu.sh",1891,0,"",shellscript,selection_command +141,426444,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu.sh",1892,0,"",shellscript,selection_command +142,426476,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu.sh",1893,0,"",shellscript,selection_command +143,426504,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu.sh",1894,0,"",shellscript,selection_command +144,426543,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu.sh",1895,0,"",shellscript,selection_command +145,426561,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu.sh",1896,0,"",shellscript,selection_command +146,426667,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu.sh",1897,0,"",shellscript,selection_command +147,427447,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu.sh",1897,0,"2",shellscript,content +148,427448,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu.sh",1898,0,"",shellscript,selection_keyboard +149,427845,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu.sh",1897,0,"",shellscript,selection_command +150,427968,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu.sh",1898,0,"",shellscript,selection_command +151,428796,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu.sh",1899,0,"",shellscript,selection_command +152,429440,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu.sh",1899,0,"0",shellscript,content +153,429442,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu.sh",1900,0,"",shellscript,selection_keyboard +154,429628,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu.sh",1899,0,"",shellscript,selection_command +155,430004,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu.sh",1927,0,"",shellscript,selection_command +156,430317,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu.sh",1939,0,"",shellscript,selection_command +157,430565,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu.sh",1927,0,"",shellscript,selection_command +158,430737,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu.sh",1899,0,"",shellscript,selection_command +159,431150,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu.sh",1865,0,"",shellscript,selection_command +160,432917,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu_gt_actions.sh",0,0,"",shellscript,tab +161,437276,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu.sh",0,0,"",shellscript,tab +162,477871,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu_gt_actions.sh",0,0,"",shellscript,tab +163,482190,"TERMINAL",0,0,"s",,terminal_output +164,482276,"TERMINAL",0,0,"h",,terminal_output +165,482383,"TERMINAL",0,0," ",,terminal_output +166,483262,"TERMINAL",0,0,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu_gt_actions.sh",,terminal_output +167,484811,"TERMINAL",0,0,"\rslurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu_gt_actions.sh ",,terminal_output +168,486987,"TERMINAL",0,0,"&",,terminal_output +169,487187,"TERMINAL",0,0,"&",,terminal_output +170,487468,"TERMINAL",0,0," ",,terminal_output +171,487779,"TERMINAL",0,0,"\",,terminal_output +172,488608,"TERMINAL",0,0,"\r\n[?2004l\r[?2004h> ",,terminal_output +173,490680,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu.sh",0,0,"",shellscript,tab +174,495220,"TERMINAL",0,0,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu.sh",,terminal_output +175,496026,"TERMINAL",0,0,"\rslurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu.sh",,terminal_output +176,496472,"TERMINAL",0,0,"",,terminal_output +177,496529,"TERMINAL",0,0,"",,terminal_output +178,496617,"TERMINAL",0,0,"",,terminal_output +179,496680,"TERMINAL",0,0,"",,terminal_output +180,496890,"TERMINAL",0,0,"",,terminal_output +181,497049,"TERMINAL",0,0,"",,terminal_output +182,497216,"TERMINAL",0,0,"",,terminal_output +183,497536,"TERMINAL",0,0,"",,terminal_output +184,497913,"TERMINAL",0,0,"\r",,terminal_output +185,498495,"TERMINAL",0,0,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu.sh\r",,terminal_output +186,498557,"TERMINAL",0,0,"hslurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu.sh\r",,terminal_output +187,498621,"TERMINAL",0,0," slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu.sh\r",,terminal_output +188,499024,"TERMINAL",0,0,"",,terminal_output +189,499193,"TERMINAL",0,0,"",,terminal_output +190,499373,"TERMINAL",0,0,"",,terminal_output +191,499536,"TERMINAL",0,0,"",,terminal_output +192,499697,"TERMINAL",0,0,"",,terminal_output +193,499871,"TERMINAL",0,0,"",,terminal_output +194,500040,"TERMINAL",0,0,"",,terminal_output +195,500226,"TERMINAL",0,0,"",,terminal_output +196,500460,"TERMINAL",0,0,"",,terminal_output +197,500567,"TERMINAL",0,0,"",,terminal_output +198,500747,"TERMINAL",0,0,"",,terminal_output +199,500925,"TERMINAL",0,0,"",,terminal_output +200,516726,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu.sh",0,0,"",shellscript,tab +201,516726,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu.sh",1471,0,"",shellscript,selection_mouse +202,519771,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu.sh",1471,106,"",shellscript,content +203,519810,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu.sh",1470,0,"",shellscript,selection_command +204,520191,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu.sh",1471,0,"",shellscript,selection_command +205,520708,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu.sh",1470,0,"",shellscript,selection_command +206,521736,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu.sh",1471,0,"",shellscript,selection_command +207,522267,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu.sh",1471,0,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/tokenizer/interactive/3518963",shellscript,content +208,524726,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu_gt_actions.sh",0,0,"",shellscript,tab +209,526817,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu_gt_actions.sh",1112,0,"",shellscript,selection_mouse +210,528012,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu_gt_actions.sh",1112,106,"",shellscript,content +211,528059,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu_gt_actions.sh",1111,0,"",shellscript,selection_command +212,528373,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu_gt_actions.sh",1112,0,"",shellscript,selection_command +213,528830,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu_gt_actions.sh",1112,0,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/tokenizer/interactive/3518963",shellscript,content +214,530672,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu.sh",0,0,"",shellscript,tab +215,540389,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu.sh",1349,0,"",shellscript,selection_mouse +216,541788,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu.sh",1348,0,"",shellscript,selection_command +217,541998,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu.sh",1349,0,"",shellscript,selection_command +218,543004,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu.sh",1349,100,"",shellscript,content +219,543015,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu.sh",1348,0,"",shellscript,selection_command +220,543369,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu.sh",1349,0,"",shellscript,selection_command +221,543842,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu.sh",1349,0,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/lam/interactive/3518963",shellscript,content +222,544322,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu.sh",1448,0,"",shellscript,selection_command +223,544633,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu.sh",1333,0,"",shellscript,selection_command +224,544798,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu.sh",1317,0,"",shellscript,selection_command +225,545130,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu.sh",1085,248,"",shellscript,content +226,545677,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu.sh",1084,0,"",shellscript,selection_command +227,546177,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu.sh",1084,1,"",shellscript,content +228,546422,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu.sh",1085,0,"",shellscript,selection_command +229,547377,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu.sh",1084,0,"",shellscript,selection_command +230,548236,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu.sh",1085,0,"\n",shellscript,content +231,548608,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu.sh",1085,0,"# lam_checkpoint=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/lam/interactive/3512576\n# tokenizer_checkpoint=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/tokenizer/interactive/3512502\n",shellscript,content +232,548637,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu.sh",1317,0,"",shellscript,selection_command +233,549386,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu.sh",1333,0,"",shellscript,selection_command +234,559920,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +235,559988,"TERMINAL",0,0,"#!/usr/bin/env bash\r\n\r\n#SBATCH --nodes=1\r\n#SBATCH --ntasks-per-node=1\r\n#SBATCH --time=48:00:00\r\n#SBATCH --partition=accelerated\r\n#SBATCH --cpus-per-task=5\r\n#SBATCH --gres=gpu:1\r\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/breakout/dyn/%x_%j.log\r\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/breakout/dyn/%x_%j.log\r\n#SBATCH --job-name=train_dyn_default_breakout_longer\r\n#SBATCH --requeue\r\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\r\n\r\n# Log the sbatch script\r\ncat $0\r\n\r\nmodule unload mpi/openmpi/5.0\r\nmodule unload devel/cuda/12.4\r\nsource .venv/bin/activate\r\n\r\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_breakout/breakout_episodes_perfect_big/train\r\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_breakout/breakout_episodes_perfect_big/val\r\n\r\njob_name=$SLURM_JOB_NAME\r\nslurm_job_id=$SLURM_JOB_ID\r\n\r\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/$job_name/$slurm_job_id\r\nmkdir -p $CHECKPOINT_DIR\r\n\r\ntokenizer_checkpoint=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/tokenizer/interactive/3518963\r\n\r\nenv | grep SLURM\r\n\r\nexport PYTHONUNBUFFERED=1\r\n\r\nsrun python train_dynamics.py \\r\n --save_ckpt \\r\n --image_height=10 \\r\n --image_width=10 \\r\n --ckpt_dir $CHECKPOINT_DIR \\r\n --batch_size=120 \\r\n --init_lr=0 \\r\n --max_lr=3e-5 \\r\n --log_image_interval=250 \\r\n --log_checkpoint_interval=250 \\r\n --dyna_type=maskgit \\r\n --log \\r\n --name=breakout-dyn-default-gt-actions-$slurm_job_id \\r\n --tags dyn breakout default \\r\n --entity instant-uv \\r\n --project jafar \\r\n --patch_size 4 \\r\n --lam_patch_size 4 \\r\n --warmup_steps 100 \\r\n --wsd_decay_steps 1000 \\r\n --num_steps 5000 \\r\n --use_gt_actions \\r\n --data_dir $array_records_dir_train \\r\n --val_data_dir $array_records_dir_val \\r\n --tokenizer_checkpoint $tokenizer_checkpoint \\r\n --val_interval 750 \\r\n --eval_full_frame \\r\n",,terminal_output +236,560191,"TERMINAL",0,0,"SLURM_STEP_NUM_TASKS=1\r\nSLURM_JOB_USER=tum_cte0515\r\nSLURM_TASKS_PER_NODE=1\r\nSLURM_JOB_UID=999226\r\nSLURM_TASK_PID=2405968\r\nSLURM_JOB_GPUS=0\r\nSLURM_LOCALID=0\r\nSLURM_SUBMIT_DIR=/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine\r\nSLURMD_NODENAME=hkn0402\r\nSLURM_JOB_START_TIME=1758710792\r\nSLURM_STEP_NODELIST=hkn0402\r\nSLURM_CLUSTER_NAME=hk\r\nSLURM_JOB_END_TIME=1758714392\r\nSLURM_PMI2_SRUN_PORT=46725\r\nSLURM_CPUS_ON_NODE=8\r\nSLURM_JOB_CPUS_PER_NODE=8\r\nSLURM_GPUS_ON_NODE=1\r\nSLURM_GTIDS=0\r\nSLURM_JOB_PARTITION=dev_accelerated\r\nSLURM_TRES_PER_TASK=cpu=8\r\nSLURM_OOM_KILL_STEP=0\r\nSLURM_JOB_NUM_NODES=1\r\nSLURM_STEPID=4294967290\r\nSLURM_JOBID=3519095\r\nSLURM_PTY_PORT=40611\r\nSLURM_JOB_QOS=normal\r\nSLURM_LAUNCH_NODE_IPADDR=10.0.7.199\r\nSLURM_PTY_WIN_ROW=52\r\nSLURM_PMI2_PROC_MAPPING=(vector,(0,1,1))\r\nSLURMD_DEBUG=2\r\nSLURM_PROCID=0\r\nSLURM_CPUS_PER_TASK=8\r\nSLURM_TOPOLOGY_ADDR=hkibb.hkibbi1.hkibbi1e11.hkn0402\r\nSLURM_TOPOLOGY_ADDR_PATTERN=switch.switch.switch.node\r\nSLURM_SRUN_COMM_HOST=10.0.7.199\r\nSLURM_SCRIPT_CONTEXT=prolog_task\r\nSLURM_PTY_WIN_COL=131\r\nSLURM_NODELIST=hkn0402\r\nSLURM_SRUN_COMM_PORT=46339\r\nSLURM_STEP_ID=4294967290\r\nSLURM_JOB_ACCOUNT=hk-project-p0023960\r\nSLURM_PRIO_PROCESS=0\r\nSLURM_NNODES=1\r\nSLURM_SUBMIT_HOST=hkn1991.localdomain\r\nSLURM_JOB_ID=3519095\r\nSLURM_NODEID=0\r\nSLURM_STEP_NUM_NODES=1\r\nSLURM_STEP_TASKS_PER_NODE=1\r\nSLURM_MPI_TYPE=pmi2\r\nSLURM_PMI2_STEP_NODES=hkn0402\r\nSLURM_CONF=/etc/slurm/slurm.conf\r\nSLURM_JOB_NAME=interactive\r\nSLURM_STEP_LAUNCHER_PORT=46339\r\nSLURM_JOB_GID=502226\r\nSLURM_JOB_NODELIST=hkn0402\r\n",,terminal_output +237,560316,"TERMINAL",0,0,"GpuFreq=control_disabled\r\n",,terminal_output +238,567581,"TERMINAL",0,0,"^Csrun: interrupt (one more within 1 sec to abort)\r\nsrun: StepId=3519095.0 task 0: running\r\n",,terminal_output +239,568001,"TERMINAL",0,0,"^Csrun: sending Ctrl-C to StepId=3519095.0\r\nsrun: forcing job termination\r\nTraceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_dynamics.py"", line 13, in \r\n import optax\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/optax/__init__.py"", line 23, in \r\n from optax import contrib\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/optax/contrib/__init__.py"", line 19, in \r\n from optax.contrib._acprop import acprop\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/optax/contrib/_acprop.py"", line 27, in \r\nsrun: Job step aborted: Waiting up to 32 seconds for job step to finish.\r\n from optax._src import combine\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/optax/_src/combine.py"", line 17, in \r\n from optax.transforms import _combining\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/optax/transforms/__init__.py"", line 19, in \r\n from optax.transforms._accumulation import ema\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib/python3.10/site-packages/optax/transforms/_accumulation.py"", line 27, in \r\nslurmstepd: error: *** STEP 3519095.0 ON hkn0402 CANCELLED AT 2025-09-24T12:52:58 ***\r\n]0;tum_cte0515@hkn0402:~/Projects/jasmine[?2004h(jasmine) [tum_cte0515@hkn0402 jasmine]$ ",,terminal_output +240,568896,"TERMINAL",0,0,"sh slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu_gt_actions.sh && sh slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu.sh",,terminal_output +241,572452,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu_gt_actions.sh",0,0,"",shellscript,tab +242,577016,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu_gt_actions.sh",1040,0,"",shellscript,selection_mouse +243,577996,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu_gt_actions.sh",1040,0,"-",shellscript,content +244,577997,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu_gt_actions.sh",1041,0,"",shellscript,selection_keyboard +245,578237,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu_gt_actions.sh",1041,0,"g",shellscript,content +246,578238,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu_gt_actions.sh",1042,0,"",shellscript,selection_keyboard +247,578355,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu_gt_actions.sh",1042,0,"t",shellscript,content +248,578356,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu_gt_actions.sh",1043,0,"",shellscript,selection_keyboard +249,578482,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu_gt_actions.sh",1043,0,"-",shellscript,content +250,578482,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu_gt_actions.sh",1044,0,"",shellscript,selection_keyboard +251,578676,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu_gt_actions.sh",1044,0,"a",shellscript,content +252,578677,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu_gt_actions.sh",1045,0,"",shellscript,selection_keyboard +253,578860,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu_gt_actions.sh",1045,0,"c",shellscript,content +254,578861,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu_gt_actions.sh",1046,0,"",shellscript,selection_keyboard +255,579087,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu_gt_actions.sh",1046,0,"t",shellscript,content +256,579088,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu_gt_actions.sh",1047,0,"",shellscript,selection_keyboard +257,579193,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu_gt_actions.sh",1047,0,"i",shellscript,content +258,579194,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu_gt_actions.sh",1048,0,"",shellscript,selection_keyboard +259,579310,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu_gt_actions.sh",1048,0,"o",shellscript,content +260,579311,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu_gt_actions.sh",1049,0,"",shellscript,selection_keyboard +261,579423,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu_gt_actions.sh",1049,0,"n",shellscript,content +262,579424,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu_gt_actions.sh",1050,0,"",shellscript,selection_keyboard +263,579513,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu_gt_actions.sh",1050,0,"s",shellscript,content +264,579514,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu_gt_actions.sh",1051,0,"",shellscript,selection_keyboard +265,579922,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu_gt_actions.sh",1050,0,"",shellscript,selection_command +266,583240,"TERMINAL",0,0,"\r\n[?2004l\r#!/usr/bin/env bash\r\n\r\n#SBATCH --nodes=1\r\n#SBATCH --ntasks-per-node=1\r\n#SBATCH --time=48:00:00\r\n#SBATCH --partition=accelerated\r\n#SBATCH --cpus-per-task=5\r\n#SBATCH --gres=gpu:1\r\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/breakout/dyn/%x_%j.log\r\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/breakout/dyn/%x_%j.log\r\n#SBATCH --job-name=train_dyn_default_breakout_longer\r\n#SBATCH --requeue\r\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\r\n\r\n# Log the sbatch script\r\ncat $0\r\n\r\nmodule unload mpi/openmpi/5.0\r\nmodule unload devel/cuda/12.4\r\nsource .venv/bin/activate\r\n\r\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_breakout/breakout_episodes_perfect_big/train\r\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_breakout/breakout_episodes_perfect_big/val\r\n\r\njob_name=$SLURM_JOB_NAME\r\nslurm_job_id=$SLURM_JOB_ID\r\n\r\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn-gt-actions/$job_name/$slurm_job_id\r\nmkdir -p $CHECKPOINT_DIR\r\n\r\ntokenizer_checkpoint=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/tokenizer/interactive/3518963\r\n\r\nenv | grep SLURM\r\n\r\nexport PYTHONUNBUFFERED=1\r\n\r\nsrun python train_dynamics.py \\r\n --save_ckpt \\r\n --image_height=10 \\r\n --image_width=10 \\r\n --ckpt_dir $CHECKPOINT_DIR \\r\n --batch_size=120 \\r\n --init_lr=0 \\r\n --max_lr=3e-5 \\r\n --log_image_interval=250 \\r\n --log_checkpoint_interval=250 \\r\n --dyna_type=maskgit \\r\n --log \\r\n --name=breakout-dyn-default-gt-actions-$slurm_job_id \\r\n --tags dyn breakout default \\r\n --entity instant-uv \\r\n --project jafar \\r\n --patch_size 4 \\r\n --lam_patch_size 4 \\r\n --warmup_steps 100 \\r\n --wsd_decay_steps 1000 \\r\n --num_steps 5000 \\r\n --use_gt_actions \\r\n --data_dir $array_records_dir_train \\r\n --val_data_dir $array_records_dir_val \\r\n --tokenizer_checkpoint $tokenizer_checkpoint \\r\n --val_interval 750 \\r\n --eval_full_frame \\r\n",,terminal_output +267,583352,"TERMINAL",0,0,"SLURM_STEP_NUM_TASKS=1\r\nSLURM_JOB_USER=tum_cte0515\r\nSLURM_TASKS_PER_NODE=1\r\nSLURM_JOB_UID=999226\r\nSLURM_TASK_PID=2405968\r\nSLURM_JOB_GPUS=0\r\nSLURM_LOCALID=0\r\nSLURM_SUBMIT_DIR=/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine\r\nSLURMD_NODENAME=hkn0402\r\nSLURM_JOB_START_TIME=1758710792\r\nSLURM_STEP_NODELIST=hkn0402\r\nSLURM_CLUSTER_NAME=hk\r\nSLURM_JOB_END_TIME=1758714392\r\nSLURM_PMI2_SRUN_PORT=46725\r\nSLURM_CPUS_ON_NODE=8\r\nSLURM_JOB_CPUS_PER_NODE=8\r\nSLURM_GPUS_ON_NODE=1\r\nSLURM_GTIDS=0\r\nSLURM_JOB_PARTITION=dev_accelerated\r\nSLURM_TRES_PER_TASK=cpu=8\r\nSLURM_OOM_KILL_STEP=0\r\nSLURM_JOB_NUM_NODES=1\r\nSLURM_STEPID=4294967290\r\nSLURM_JOBID=3519095\r\nSLURM_PTY_PORT=40611\r\nSLURM_JOB_QOS=normal\r\nSLURM_LAUNCH_NODE_IPADDR=10.0.7.199\r\nSLURM_PTY_WIN_ROW=52\r\nSLURM_PMI2_PROC_MAPPING=(vector,(0,1,1))\r\nSLURMD_DEBUG=2\r\nSLURM_PROCID=0\r\nSLURM_CPUS_PER_TASK=8\r\nSLURM_TOPOLOGY_ADDR=hkibb.hkibbi1.hkibbi1e11.hkn0402\r\nSLURM_TOPOLOGY_ADDR_PATTERN=switch.switch.switch.node\r\nSLURM_SRUN_COMM_HOST=10.0.7.199\r\nSLURM_SCRIPT_CONTEXT=prolog_task\r\nSLURM_PTY_WIN_COL=131\r\nSLURM_NODELIST=hkn0402\r\nSLURM_SRUN_COMM_PORT=46339\r\nSLURM_STEP_ID=4294967290\r\nSLURM_JOB_ACCOUNT=hk-project-p0023960\r\nSLURM_PRIO_PROCESS=0\r\nSLURM_NNODES=1\r\nSLURM_SUBMIT_HOST=hkn1991.localdomain\r\nSLURM_JOB_ID=3519095\r\nSLURM_NODEID=0\r\nSLURM_STEP_NUM_NODES=1\r\nSLURM_STEP_TASKS_PER_NODE=1\r\nSLURM_MPI_TYPE=pmi2\r\nSLURM_PMI2_STEP_NODES=hkn0402\r\nSLURM_CONF=/etc/slurm/slurm.conf\r\nSLURM_JOB_NAME=interactive\r\nSLURM_STEP_LAUNCHER_PORT=46339\r\nSLURM_JOB_GID=502226\r\nSLURM_JOB_NODELIST=hkn0402\r\n",,terminal_output +268,583465,"TERMINAL",0,0,"GpuFreq=control_disabled\r\n",,terminal_output +269,594863,"TERMINAL",0,0,"Traceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_dynamics.py"", line 24, in \r\n from genie import Genie, restore_genie_components\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/genie.py"", line 568, in \r\n optimizer: nnx.ModelAndOptimizer,\r\nAttributeError: module 'flax.nnx' has no attribute 'ModelAndOptimizer'\r\n",,terminal_output +270,595015,"TERMINAL",0,0,"srun: error: hkn0402: task 0: Exited with exit code 1\r\n]0;tum_cte0515@hkn0402:~/Projects/jasmine[?2004h(jasmine) [tum_cte0515@hkn0402 jasmine]$ ",,terminal_output +271,668531,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu_gt_actions.sh",0,0,"",shellscript,tab +272,668532,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu_gt_actions.sh",632,0,"",shellscript,selection_mouse +273,669294,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu_gt_actions.sh",608,0,"",shellscript,selection_command +274,670411,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu_gt_actions.sh",608,0,"#",shellscript,content +275,670412,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu_gt_actions.sh",609,0,"",shellscript,selection_keyboard +276,670483,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu_gt_actions.sh",609,0," ",shellscript,content +277,670484,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu_gt_actions.sh",610,0,"",shellscript,selection_keyboard +278,671048,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu_gt_actions.sh",609,0,"",shellscript,selection_command +279,671988,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu.sh",0,0,"",shellscript,tab +280,673210,"TERMINAL",0,0,"sh slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu_gt_actions.sh && sh slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu.sh",,terminal_output +281,673318,"TERMINAL",0,0,"\r\n[?2004l\r#!/usr/bin/env bash\r\n\r\n#SBATCH --nodes=1\r\n#SBATCH --ntasks-per-node=1\r\n#SBATCH --time=48:00:00\r\n#SBATCH --partition=accelerated\r\n#SBATCH --cpus-per-task=5\r\n#SBATCH --gres=gpu:1\r\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/breakout/dyn/%x_%j.log\r\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/breakout/dyn/%x_%j.log\r\n#SBATCH --job-name=train_dyn_default_breakout_longer\r\n#SBATCH --requeue\r\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\r\n\r\n# Log the sbatch script\r\ncat $0\r\n\r\nmodule unload mpi/openmpi/5.0\r\nmodule unload devel/cuda/12.4\r\n# source .venv/bin/activate\r\n\r\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_breakout/breakout_episodes_perfect_big/train\r\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_breakout/breakout_episodes_perfect_big/val\r\n\r\njob_name=$SLURM_JOB_NAME\r\nslurm_job_id=$SLURM_JOB_ID\r\n\r\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn-gt-actions/$job_name/$slurm_job_id\r\nmkdir -p $CHECKPOINT_DIR\r\n\r\ntokenizer_checkpoint=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/tokenizer/interactive/3518963\r\n\r\nenv | grep SLURM\r\n\r\nexport PYTHONUNBUFFERED=1\r\n\r\nsrun python train_dynamics.py \\r\n --save_ckpt \\r\n --image_height=10 \\r\n --image_width=10 \\r\n --ckpt_dir $CHECKPOINT_DIR \\r\n --batch_size=120 \\r\n --init_lr=0 \\r\n --max_lr=3e-5 \\r\n --log_image_interval=250 \\r\n --log_checkpoint_interval=250 \\r\n --dyna_type=maskgit \\r\n --log \\r\n --name=breakout-dyn-default-gt-actions-$slurm_job_id \\r\n --tags dyn breakout default \\r\n --entity instant-uv \\r\n --project jafar \\r\n --patch_size 4 \\r\n --lam_patch_size 4 \\r\n --warmup_steps 100 \\r\n --wsd_decay_steps 1000 \\r\n --num_steps 5000 \\r\n --use_gt_actions \\r\n --data_dir $array_records_dir_train \\r\n --val_data_dir $array_records_dir_val \\r\n --tokenizer_checkpoint $tokenizer_checkpoint \\r\n --val_interval 750 \\r\n --eval_full_frame \\r\n",,terminal_output +282,673446,"TERMINAL",0,0,"SLURM_STEP_NUM_TASKS=1\r\nSLURM_JOB_USER=tum_cte0515\r\nSLURM_TASKS_PER_NODE=1\r\nSLURM_JOB_UID=999226\r\nSLURM_TASK_PID=2405968\r\nSLURM_JOB_GPUS=0\r\nSLURM_LOCALID=0\r\nSLURM_SUBMIT_DIR=/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine\r\nSLURMD_NODENAME=hkn0402\r\nSLURM_JOB_START_TIME=1758710792\r\nSLURM_STEP_NODELIST=hkn0402\r\nSLURM_CLUSTER_NAME=hk\r\nSLURM_JOB_END_TIME=1758714392\r\nSLURM_PMI2_SRUN_PORT=46725\r\nSLURM_CPUS_ON_NODE=8\r\nSLURM_JOB_CPUS_PER_NODE=8\r\nSLURM_GPUS_ON_NODE=1\r\nSLURM_GTIDS=0\r\nSLURM_JOB_PARTITION=dev_accelerated\r\nSLURM_TRES_PER_TASK=cpu=8\r\nSLURM_OOM_KILL_STEP=0\r\nSLURM_JOB_NUM_NODES=1\r\nSLURM_STEPID=4294967290\r\nSLURM_JOBID=3519095\r\nSLURM_PTY_PORT=40611\r\nSLURM_JOB_QOS=normal\r\nSLURM_LAUNCH_NODE_IPADDR=10.0.7.199\r\nSLURM_PTY_WIN_ROW=52\r\nSLURM_PMI2_PROC_MAPPING=(vector,(0,1,1))\r\nSLURMD_DEBUG=2\r\nSLURM_PROCID=0\r\nSLURM_CPUS_PER_TASK=8\r\nSLURM_TOPOLOGY_ADDR=hkibb.hkibbi1.hkibbi1e11.hkn0402\r\nSLURM_TOPOLOGY_ADDR_PATTERN=switch.switch.switch.node\r\nSLURM_SRUN_COMM_HOST=10.0.7.199\r\nSLURM_SCRIPT_CONTEXT=prolog_task\r\nSLURM_PTY_WIN_COL=131\r\nSLURM_NODELIST=hkn0402\r\nSLURM_SRUN_COMM_PORT=46339\r\nSLURM_STEP_ID=4294967290\r\nSLURM_JOB_ACCOUNT=hk-project-p0023960\r\nSLURM_PRIO_PROCESS=0\r\nSLURM_NNODES=1\r\nSLURM_SUBMIT_HOST=hkn1991.localdomain\r\nSLURM_JOB_ID=3519095\r\nSLURM_NODEID=0\r\nSLURM_STEP_NUM_NODES=1\r\nSLURM_STEP_TASKS_PER_NODE=1\r\nSLURM_MPI_TYPE=pmi2\r\nSLURM_PMI2_STEP_NODES=hkn0402\r\nSLURM_CONF=/etc/slurm/slurm.conf\r\nSLURM_JOB_NAME=interactive\r\nSLURM_STEP_LAUNCHER_PORT=46339\r\nSLURM_JOB_GID=502226\r\nSLURM_JOB_NODELIST=hkn0402\r\n",,terminal_output +283,673570,"TERMINAL",0,0,"GpuFreq=control_disabled\r\n",,terminal_output +284,675595,"TERMINAL",0,0,"Traceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_dynamics.py"", line 24, in \r\n from genie import Genie, restore_genie_components\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/genie.py"", line 568, in \r\n optimizer: nnx.ModelAndOptimizer,\r\nAttributeError: module 'flax.nnx' has no attribute 'ModelAndOptimizer'\r\n",,terminal_output +285,675773,"TERMINAL",0,0,"srun: error: hkn0402: task 0: Exited with exit code 1\r\n]0;tum_cte0515@hkn0402:~/Projects/jasmine[?2004h(jasmine) [tum_cte0515@hkn0402 jasmine]$ ",,terminal_output +286,677183,"TERMINAL",0,0,"",,terminal_output +287,677244,"TERMINAL",0,0,"sh slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu_gt_actions.sh && sh slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu.sh",,terminal_output +288,678834,"TERMINAL",0,0,"\r\n[?2004l\r#!/usr/bin/env bash\r\n\r\n#SBATCH --nodes=1\r\n#SBATCH --ntasks-per-node=1\r\n#SBATCH --time=48:00:00\r\n#SBATCH --partition=accelerated\r\n#SBATCH --cpus-per-task=5\r\n#SBATCH --gres=gpu:1\r\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/breakout/dyn/%x_%j.log\r\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/breakout/dyn/%x_%j.log\r\n#SBATCH --job-name=train_dyn_default_breakout_longer\r\n#SBATCH --requeue\r\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\r\n\r\n# Log the sbatch script\r\ncat $0\r\n\r\nmodule unload mpi/openmpi/5.0\r\nmodule unload devel/cuda/12.4\r\n# source .venv/bin/activate\r\n\r\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_breakout/breakout_episodes_perfect_big/train\r\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_breakout/breakout_episodes_perfect_big/val\r\n\r\njob_name=$SLURM_JOB_NAME\r\nslurm_job_id=$SLURM_JOB_ID\r\n\r\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn-gt-actions/$job_name/$slurm_job_id\r\nmkdir -p $CHECKPOINT_DIR\r\n\r\ntokenizer_checkpoint=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/tokenizer/interactive/3518963\r\n\r\nenv | grep SLURM\r\n\r\nexport PYTHONUNBUFFERED=1\r\n\r\nsrun python train_dynamics.py \\r\n --save_ckpt \\r\n --image_height=10 \\r\n --image_width=10 \\r\n --ckpt_dir $CHECKPOINT_DIR \\r\n --batch_size=120 \\r\n --init_lr=0 \\r\n --max_lr=3e-5 \\r\n --log_image_interval=250 \\r\n --log_checkpoint_interval=250 \\r\n --dyna_type=maskgit \\r\n --log \\r\n --name=breakout-dyn-default-gt-actions-$slurm_job_id \\r\n --tags dyn breakout default \\r\n --entity instant-uv \\r\n --project jafar \\r\n --patch_size 4 \\r\n --lam_patch_size 4 \\r\n --warmup_steps 100 \\r\n --wsd_decay_steps 1000 \\r\n --num_steps 5000 \\r\n --use_gt_actions \\r\n --data_dir $array_records_dir_train \\r\n --val_data_dir $array_records_dir_val \\r\n --tokenizer_checkpoint $tokenizer_checkpoint \\r\n --val_interval 750 \\r\n --eval_full_frame \\r\n",,terminal_output +289,678952,"TERMINAL",0,0,"SLURM_STEP_NUM_TASKS=1\r\nSLURM_JOB_USER=tum_cte0515\r\nSLURM_TASKS_PER_NODE=1\r\nSLURM_JOB_UID=999226\r\nSLURM_TASK_PID=2405968\r\nSLURM_JOB_GPUS=0\r\nSLURM_LOCALID=0\r\nSLURM_SUBMIT_DIR=/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine\r\nSLURMD_NODENAME=hkn0402\r\nSLURM_JOB_START_TIME=1758710792\r\nSLURM_STEP_NODELIST=hkn0402\r\nSLURM_CLUSTER_NAME=hk\r\nSLURM_JOB_END_TIME=1758714392\r\nSLURM_PMI2_SRUN_PORT=46725\r\nSLURM_CPUS_ON_NODE=8\r\nSLURM_JOB_CPUS_PER_NODE=8\r\nSLURM_GPUS_ON_NODE=1\r\nSLURM_GTIDS=0\r\nSLURM_JOB_PARTITION=dev_accelerated\r\nSLURM_TRES_PER_TASK=cpu=8\r\nSLURM_OOM_KILL_STEP=0\r\nSLURM_JOB_NUM_NODES=1\r\nSLURM_STEPID=4294967290\r\nSLURM_JOBID=3519095\r\nSLURM_PTY_PORT=40611\r\nSLURM_JOB_QOS=normal\r\nSLURM_LAUNCH_NODE_IPADDR=10.0.7.199\r\nSLURM_PTY_WIN_ROW=52\r\nSLURM_PMI2_PROC_MAPPING=(vector,(0,1,1))\r\nSLURMD_DEBUG=2\r\nSLURM_PROCID=0\r\nSLURM_CPUS_PER_TASK=8\r\nSLURM_TOPOLOGY_ADDR=hkibb.hkibbi1.hkibbi1e11.hkn0402\r\nSLURM_TOPOLOGY_ADDR_PATTERN=switch.switch.switch.node\r\nSLURM_SRUN_COMM_HOST=10.0.7.199\r\nSLURM_SCRIPT_CONTEXT=prolog_task\r\nSLURM_PTY_WIN_COL=131\r\nSLURM_NODELIST=hkn0402\r\nSLURM_SRUN_COMM_PORT=46339\r\nSLURM_STEP_ID=4294967290\r\nSLURM_JOB_ACCOUNT=hk-project-p0023960\r\nSLURM_PRIO_PROCESS=0\r\nSLURM_NNODES=1\r\nSLURM_SUBMIT_HOST=hkn1991.localdomain\r\nSLURM_JOB_ID=3519095\r\nSLURM_NODEID=0\r\nSLURM_STEP_NUM_NODES=1\r\nSLURM_STEP_TASKS_PER_NODE=1\r\nSLURM_MPI_TYPE=pmi2\r\nSLURM_PMI2_STEP_NODES=hkn0402\r\nSLURM_CONF=/etc/slurm/slurm.conf\r\nSLURM_JOB_NAME=interactive\r\nSLURM_STEP_LAUNCHER_PORT=46339\r\nSLURM_JOB_GID=502226\r\nSLURM_JOB_NODELIST=hkn0402\r\n",,terminal_output +290,679065,"TERMINAL",0,0,"GpuFreq=control_disabled\r\n",,terminal_output +291,681078,"TERMINAL",0,0,"Traceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_dynamics.py"", line 24, in \r\n from genie import Genie, restore_genie_components\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/genie.py"", line 568, in \r\n optimizer: nnx.ModelAndOptimizer,\r\nAttributeError: module 'flax.nnx' has no attribute 'ModelAndOptimizer'\r\n",,terminal_output +292,681233,"TERMINAL",0,0,"srun: error: hkn0402: task 0: Exited with exit code 1\r\n]0;tum_cte0515@hkn0402:~/Projects/jasmine[?2004h(jasmine) [tum_cte0515@hkn0402 jasmine]$ ",,terminal_output +293,689514,"TERMINAL",0,0,"de",,terminal_output +294,689722,"TERMINAL",0,0,"a",,terminal_output +295,689814,"TERMINAL",0,0,"c",,terminal_output +296,689935,"TERMINAL",0,0,"t",,terminal_output +297,690114,"TERMINAL",0,0,"i",,terminal_output +298,690238,"TERMINAL",0,0,"v",,terminal_output +299,690314,"TERMINAL",0,0,"a",,terminal_output +300,690406,"TERMINAL",0,0,"z",,terminal_output +301,690513,"TERMINAL",0,0,"e",,terminal_output +302,690668,"TERMINAL",0,0,"\r\n[?2004l\rbash: deactivaze: command not found...\r\n",,terminal_output +303,691756,"TERMINAL",0,0,"]0;tum_cte0515@hkn0402:~/Projects/jasmine[?2004h(jasmine) [tum_cte0515@hkn0402 jasmine]$ ",,terminal_output +304,692070,"TERMINAL",0,0,"^C[?2004l\r[?2004h[?2004l\r\r\n]0;tum_cte0515@hkn0402:~/Projects/jasmine[?2004h(jasmine) [tum_cte0515@hkn0402 jasmine]$ ",,terminal_output +305,692417,"TERMINAL",0,0,"deactivaze",,terminal_output +306,692720,"TERMINAL",0,0,"",,terminal_output +307,692986,"TERMINAL",0,0,"",,terminal_output +308,693545,"TERMINAL",0,0,"te",,terminal_output +309,693662,"TERMINAL",0,0,"\r\n[?2004l\r]0;tum_cte0515@hkn0402:~/Projects/jasmine[?2004h[tum_cte0515@hkn0402 jasmine]$ ",,terminal_output +310,694212,"TERMINAL",0,0,"s",,terminal_output +311,694369,"TERMINAL",0,0,"ou",,terminal_output +312,694430,"TERMINAL",0,0,"r",,terminal_output +313,694651,"TERMINAL",0,0,"c",,terminal_output +314,694718,"TERMINAL",0,0,"e",,terminal_output +315,694834,"TERMINAL",0,0," ",,terminal_output +316,695073,"TERMINAL",0,0,"v",,terminal_output +317,695275,"TERMINAL",0,0,"env_3_11/",,terminal_output +318,695740,"TERMINAL",0,0,"",,terminal_output +319,696229,"TERMINAL",0,0,"b",,terminal_output +320,696291,"TERMINAL",0,0,"in/",,terminal_output +321,696682,"TERMINAL",0,0,"a",,terminal_output +322,696746,"TERMINAL",0,0,"c",,terminal_output +323,696929,"TERMINAL",0,0,"tivate",,terminal_output +324,697440,"TERMINAL",0,0,"\r\n[?2004l\r]0;tum_cte0515@hkn0402:~/Projects/jasmine[?2004h(venv_3_11) [tum_cte0515@hkn0402 jasmine]$ ",,terminal_output +325,698400,"TERMINAL",0,0,"source venv_3_11/bin/activate",,terminal_output +326,698589,"TERMINAL",0,0,"deactivate",,terminal_output +327,699727,"TERMINAL",0,0,"ze",,terminal_output +328,700183,"TERMINAL",0,0,"sh slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu_gt_actions.sh && sh slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu.sh",,terminal_output +329,703658,"TERMINAL",0,0,"\r\n[?2004l\r#!/usr/bin/env bash\r\n\r\n#SBATCH --nodes=1\r\n#SBATCH --ntasks-per-node=1\r\n#SBATCH --time=48:00:00\r\n#SBATCH --partition=accelerated\r\n#SBATCH --cpus-per-task=5\r\n#SBATCH --gres=gpu:1\r\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/breakout/dyn/%x_%j.log\r\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/breakout/dyn/%x_%j.log\r\n#SBATCH --job-name=train_dyn_default_breakout_longer\r\n#SBATCH --requeue\r\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\r\n\r\n# Log the sbatch script\r\ncat $0\r\n\r\nmodule unload mpi/openmpi/5.0\r\nmodule unload devel/cuda/12.4\r\n# source .venv/bin/activate\r\n\r\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_breakout/breakout_episodes_perfect_big/train\r\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_breakout/breakout_episodes_perfect_big/val\r\n\r\njob_name=$SLURM_JOB_NAME\r\nslurm_job_id=$SLURM_JOB_ID\r\n\r\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn-gt-actions/$job_name/$slurm_job_id\r\nmkdir -p $CHECKPOINT_DIR\r\n\r\ntokenizer_checkpoint=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/tokenizer/interactive/3518963\r\n\r\nenv | grep SLURM\r\n\r\nexport PYTHONUNBUFFERED=1\r\n\r\nsrun python train_dynamics.py \\r\n --save_ckpt \\r\n --image_height=10 \\r\n --image_width=10 \\r\n --ckpt_dir $CHECKPOINT_DIR \\r\n --batch_size=120 \\r\n --init_lr=0 \\r\n --max_lr=3e-5 \\r\n --log_image_interval=250 \\r\n --log_checkpoint_interval=250 \\r\n --dyna_type=maskgit \\r\n --log \\r\n --name=breakout-dyn-default-gt-actions-$slurm_job_id \\r\n --tags dyn breakout default \\r\n --entity instant-uv \\r\n --project jafar \\r\n --patch_size 4 \\r\n --lam_patch_size 4 \\r\n --warmup_steps 100 \\r\n --wsd_decay_steps 1000 \\r\n --num_steps 5000 \\r\n --use_gt_actions \\r\n --data_dir $array_records_dir_train \\r\n --val_data_dir $array_records_dir_val \\r\n --tokenizer_checkpoint $tokenizer_checkpoint \\r\n --val_interval 750 \\r\n --eval_full_frame \\r\n",,terminal_output +330,703782,"TERMINAL",0,0,"SLURM_STEP_NUM_TASKS=1\r\nSLURM_JOB_USER=tum_cte0515\r\nSLURM_TASKS_PER_NODE=1\r\nSLURM_JOB_UID=999226\r\nSLURM_TASK_PID=2405968\r\nSLURM_JOB_GPUS=0\r\nSLURM_LOCALID=0\r\nSLURM_SUBMIT_DIR=/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine\r\nSLURMD_NODENAME=hkn0402\r\nSLURM_JOB_START_TIME=1758710792\r\nSLURM_STEP_NODELIST=hkn0402\r\nSLURM_CLUSTER_NAME=hk\r\nSLURM_JOB_END_TIME=1758714392\r\nSLURM_PMI2_SRUN_PORT=46725\r\nSLURM_CPUS_ON_NODE=8\r\nSLURM_JOB_CPUS_PER_NODE=8\r\nSLURM_GPUS_ON_NODE=1\r\nSLURM_GTIDS=0\r\nSLURM_JOB_PARTITION=dev_accelerated\r\nSLURM_TRES_PER_TASK=cpu=8\r\nSLURM_OOM_KILL_STEP=0\r\nSLURM_JOB_NUM_NODES=1\r\nSLURM_STEPID=4294967290\r\nSLURM_JOBID=3519095\r\nSLURM_PTY_PORT=40611\r\nSLURM_JOB_QOS=normal\r\nSLURM_LAUNCH_NODE_IPADDR=10.0.7.199\r\nSLURM_PTY_WIN_ROW=52\r\nSLURM_PMI2_PROC_MAPPING=(vector,(0,1,1))\r\nSLURMD_DEBUG=2\r\nSLURM_PROCID=0\r\nSLURM_CPUS_PER_TASK=8\r\nSLURM_TOPOLOGY_ADDR=hkibb.hkibbi1.hkibbi1e11.hkn0402\r\nSLURM_TOPOLOGY_ADDR_PATTERN=switch.switch.switch.node\r\nSLURM_SRUN_COMM_HOST=10.0.7.199\r\nSLURM_SCRIPT_CONTEXT=prolog_task\r\nSLURM_PTY_WIN_COL=131\r\nSLURM_NODELIST=hkn0402\r\nSLURM_SRUN_COMM_PORT=46339\r\nSLURM_STEP_ID=4294967290\r\nSLURM_JOB_ACCOUNT=hk-project-p0023960\r\nSLURM_PRIO_PROCESS=0\r\nSLURM_NNODES=1\r\nSLURM_SUBMIT_HOST=hkn1991.localdomain\r\nSLURM_JOB_ID=3519095\r\nSLURM_NODEID=0\r\nSLURM_STEP_NUM_NODES=1\r\nSLURM_STEP_TASKS_PER_NODE=1\r\nSLURM_MPI_TYPE=pmi2\r\nSLURM_PMI2_STEP_NODES=hkn0402\r\nSLURM_CONF=/etc/slurm/slurm.conf\r\nSLURM_JOB_NAME=interactive\r\nSLURM_STEP_LAUNCHER_PORT=46339\r\nSLURM_JOB_GID=502226\r\nSLURM_JOB_NODELIST=hkn0402\r\n",,terminal_output +331,703926,"TERMINAL",0,0,"GpuFreq=control_disabled\r\n",,terminal_output +332,718011,"TERMINAL",0,0,"Running on 1 devices.\r\n",,terminal_output +333,723435,"TERMINAL",0,0,"Counting all components: ['action_embed', 'dynamics', 'tokenizer']\r\n",,terminal_output +334,723598,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu.sh",0,0,"",shellscript,tab +335,723598,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu.sh",1596,0,"",shellscript,selection_mouse +336,723756,"TERMINAL",0,0,"wandb: Currently logged in as: mihir-mahajan2002 (instant-uv) to https://api.wandb.ai. Use `wandb login --relogin` to force relogin\r\n",,terminal_output +337,724441,"TERMINAL",0,0,"wandb: Tracking run with wandb version 0.22.0\r\nwandb: Run data is saved locally in /hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/wandb/run-20250924_125533-ueytnxmm\r\nwandb: Run `wandb offline` to turn off syncing.\r\nwandb: Syncing run breakout-dyn-default-gt-actions-3519095\r\nwandb: ⭐️ View project at https://wandb.ai/instant-uv/jafar\r\nwandb: 🚀 View run at https://wandb.ai/instant-uv/jafar/runs/ueytnxmm\r\n",,terminal_output +338,725114,"TERMINAL",0,0,"Parameter counts:\r\n{'action_embed': 192, 'dynamics': 26555904, 'tokenizer': 33750256, 'total': 60306352}\r\n",,terminal_output +339,750676,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/venv_3_11/lib64/python3.11/site-packages/orbax/checkpoint/_src/serialization/type_handlers.py:1269: UserWarning: Sharding info not provided when restoring. Populating sharding info from sharding file. Please note restoration time will be slightly increased due to reading from file. Note also that this option is unsafe when restoring on a different topology than the checkpoint was saved with.\r\n warnings.warn(\r\n",,terminal_output +340,796802,"TERMINAL",0,0,"Total memory size: 1.7 GB, Output size: 0.7 GB, Temp size: 1.0 GB, Argument size: 0.7 GB, Host temp size: 0.0 GB.\r\nFLOPs: 3.479e+10, Bytes: 3.040e+10 (28.3 GB), Intensity: 1.1 FLOPs/byte\r\nStarting training from step 0...\r\n",,terminal_output +341,797257,"TERMINAL",0,0,"\r\nMemstats: After params initialized:\r\n\tUsing (GB) 0.84 / 38.7 (2.170543%) on cuda:0\r\n",,terminal_output +342,847094,"TERMINAL",0,0,"WARNING:absl:[process=0][thread=MainThread][operation_id=1] _SignalingThread.join() waiting for signals ([, ]) blocking the main thread will slow down blocking save times. This is likely due to main thread calling result() on a CommitFuture.\r\nWARNING:absl:[process=0][thread=MainThread][operation_id=1] _SignalingThread.join() waiting for signals ([, ]) blocking the main thread will slow down blocking save times. This is likely due to main thread calling result() on a CommitFuture.\r\n",,terminal_output +343,847555,"TERMINAL",0,0,"Saved checkpoint at step 250\r\n",,terminal_output +344,873806,"TERMINAL",0,0,"WARNING:absl:[process=0][thread=MainThread][operation_id=2] _SignalingThread.join() waiting for signals ([, ]) blocking the main thread will slow down blocking save times. This is likely due to main thread calling result() on a CommitFuture.\r\nWARNING:absl:[process=0][thread=MainThread][operation_id=2] _SignalingThread.join() waiting for signals ([, ]) blocking the main thread will slow down blocking save times. This is likely due to main thread calling result() on a CommitFuture.\r\n",,terminal_output +345,874209,"TERMINAL",0,0,"Saved checkpoint at step 500\r\n",,terminal_output +346,900308,"TERMINAL",0,0,"Calculating validation metrics...\r\n",,terminal_output +347,957052,"TERMINAL",0,0,"Step 750, validation loss: 0.013664108701050282\r\nWARNING:absl:[process=0][thread=MainThread][operation_id=3] _SignalingThread.join() waiting for signals ([, ]) blocking the main thread will slow down blocking save times. This is likely due to main thread calling result() on a CommitFuture.\r\nWARNING:absl:[process=0][thread=MainThread][operation_id=3] _SignalingThread.join() waiting for signals ([, ]) blocking the main thread will slow down blocking save times. This is likely due to main thread calling result() on a CommitFuture.\r\n",,terminal_output +348,957326,"TERMINAL",0,0,"Saved checkpoint at step 750\r\n",,terminal_output +349,1009687,"TERMINAL",0,0,"WARNING:absl:[process=0][thread=MainThread][operation_id=4] _SignalingThread.join() waiting for signals ([, ]) blocking the main thread will slow down blocking save times. This is likely due to main thread calling result() on a CommitFuture.\r\nWARNING:absl:[process=0][thread=MainThread][operation_id=4] _SignalingThread.join() waiting for signals ([, ]) blocking the main thread will slow down blocking save times. This is likely due to main thread calling result() on a CommitFuture.\r\n",,terminal_output +350,1010094,"TERMINAL",0,0,"Saved checkpoint at step 1000\r\n",,terminal_output +351,1011632,"TERMINAL",0,0,"WARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn-gt-actions/interactive/3519095/000750 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn-gt-actions/interactive/3519095/000750) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn-gt-actions/interactive/3519095/000250 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn-gt-actions/interactive/3519095/000250) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn-gt-actions/interactive/3519095/001000 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn-gt-actions/interactive/3519095/001000) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn-gt-actions/interactive/3519095/000500 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn-gt-actions/interactive/3519095/000500) to end with "".orbax-checkpoint-tmp"".\r\n",,terminal_output +352,1037199,"TERMINAL",0,0,"WARNING:absl:[process=0][thread=MainThread][operation_id=5] _SignalingThread.join() waiting for signals ([, ]) blocking the main thread will slow down blocking save times. This is likely due to main thread calling result() on a CommitFuture.\r\nWARNING:absl:[process=0][thread=MainThread][operation_id=5] _SignalingThread.join() waiting for signals ([, ]) blocking the main thread will slow down blocking save times. This is likely due to main thread calling result() on a CommitFuture.\r\n",,terminal_output +353,1037539,"TERMINAL",0,0,"Saved checkpoint at step 1250\r\n",,terminal_output +354,1039583,"TERMINAL",0,0,"WARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn-gt-actions/interactive/3519095/000750 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn-gt-actions/interactive/3519095/000750) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn-gt-actions/interactive/3519095/001000 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn-gt-actions/interactive/3519095/001000) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn-gt-actions/interactive/3519095/000500 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn-gt-actions/interactive/3519095/000500) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn-gt-actions/interactive/3519095/001250 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn-gt-actions/interactive/3519095/001250) to end with "".orbax-checkpoint-tmp"".\r\n",,terminal_output +355,1063935,"TERMINAL",0,0,"Calculating validation metrics...\r\n",,terminal_output +356,1084539,"TERMINAL",0,0,"Step 1500, validation loss: 0.013969812542200089\r\n",,terminal_output +357,1084600,"TERMINAL",0,0,"WARNING:absl:[process=0][thread=MainThread][operation_id=6] _SignalingThread.join() waiting for signals ([, ]) blocking the main thread will slow down blocking save times. This is likely due to main thread calling result() on a CommitFuture.\r\nWARNING:absl:[process=0][thread=MainThread][operation_id=6] _SignalingThread.join() waiting for signals ([, ]) blocking the main thread will slow down blocking save times. This is likely due to main thread calling result() on a CommitFuture.\r\n",,terminal_output +358,1085257,"TERMINAL",0,0,"Saved checkpoint at step 1500\r\n",,terminal_output +359,1087001,"TERMINAL",0,0,"WARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn-gt-actions/interactive/3519095/001500 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn-gt-actions/interactive/3519095/001500) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn-gt-actions/interactive/3519095/000750 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn-gt-actions/interactive/3519095/000750) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn-gt-actions/interactive/3519095/001000 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn-gt-actions/interactive/3519095/001000) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn-gt-actions/interactive/3519095/001250 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn-gt-actions/interactive/3519095/001250) to end with "".orbax-checkpoint-tmp"".\r\n",,terminal_output +360,1112492,"TERMINAL",0,0,"WARNING:absl:[process=0][thread=MainThread][operation_id=7] _SignalingThread.join() waiting for signals ([, ]) blocking the main thread will slow down blocking save times. This is likely due to main thread calling result() on a CommitFuture.\r\nWARNING:absl:[process=0][thread=MainThread][operation_id=7] _SignalingThread.join() waiting for signals ([, ]) blocking the main thread will slow down blocking save times. This is likely due to main thread calling result() on a CommitFuture.\r\n",,terminal_output +361,1112757,"TERMINAL",0,0,"Saved checkpoint at step 1750\r\n",,terminal_output +362,1114804,"TERMINAL",0,0,"WARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn-gt-actions/interactive/3519095/001500 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn-gt-actions/interactive/3519095/001500) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn-gt-actions/interactive/3519095/001750 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn-gt-actions/interactive/3519095/001750) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn-gt-actions/interactive/3519095/001000 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn-gt-actions/interactive/3519095/001000) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn-gt-actions/interactive/3519095/001250 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn-gt-actions/interactive/3519095/001250) to end with "".orbax-checkpoint-tmp"".\r\n",,terminal_output +363,1140010,"TERMINAL",0,0,"WARNING:absl:[process=0][thread=MainThread][operation_id=8] _SignalingThread.join() waiting for signals ([, ]) blocking the main thread will slow down blocking save times. This is likely due to main thread calling result() on a CommitFuture.\r\nWARNING:absl:[process=0][thread=MainThread][operation_id=8] _SignalingThread.join() waiting for signals ([, ]) blocking the main thread will slow down blocking save times. This is likely due to main thread calling result() on a CommitFuture.\r\n",,terminal_output +364,1140192,"TERMINAL",0,0,"Saved checkpoint at step 2000\r\n",,terminal_output +365,1142397,"TERMINAL",0,0,"WARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn-gt-actions/interactive/3519095/001500 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn-gt-actions/interactive/3519095/001500) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn-gt-actions/interactive/3519095/001750 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn-gt-actions/interactive/3519095/001750) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn-gt-actions/interactive/3519095/002000 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn-gt-actions/interactive/3519095/002000) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn-gt-actions/interactive/3519095/001250 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn-gt-actions/interactive/3519095/001250) to end with "".orbax-checkpoint-tmp"".\r\n",,terminal_output +366,1167384,"TERMINAL",0,0,"Calculating validation metrics...\r\n",,terminal_output +367,1188067,"TERMINAL",0,0,"Step 2250, validation loss: 0.0015086758648976684\r\nWARNING:absl:[process=0][thread=MainThread][operation_id=9] _SignalingThread.join() waiting for signals ([, ]) blocking the main thread will slow down blocking save times. This is likely due to main thread calling result() on a CommitFuture.\r\nWARNING:absl:[process=0][thread=MainThread][operation_id=9] _SignalingThread.join() waiting for signals ([, ]) blocking the main thread will slow down blocking save times. This is likely due to main thread calling result() on a CommitFuture.\r\n",,terminal_output +368,1188374,"TERMINAL",0,0,"Saved checkpoint at step 2250\r\n",,terminal_output +369,1190474,"TERMINAL",0,0,"WARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn-gt-actions/interactive/3519095/001500 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn-gt-actions/interactive/3519095/001500) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn-gt-actions/interactive/3519095/002250 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn-gt-actions/interactive/3519095/002250) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn-gt-actions/interactive/3519095/001750 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn-gt-actions/interactive/3519095/001750) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn-gt-actions/interactive/3519095/002000 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn-gt-actions/interactive/3519095/002000) to end with "".orbax-checkpoint-tmp"".\r\n",,terminal_output +370,1215310,"TERMINAL",0,0,"WARNING:absl:[process=0][thread=MainThread][operation_id=10] _SignalingThread.join() waiting for signals ([, ]) blocking the main thread will slow down blocking save times. This is likely due to main thread calling result() on a CommitFuture.\r\nWARNING:absl:[process=0][thread=MainThread][operation_id=10] _SignalingThread.join() waiting for signals ([, ]) blocking the main thread will slow down blocking save times. This is likely due to main thread calling result() on a CommitFuture.\r\n",,terminal_output +371,1215919,"TERMINAL",0,0,"Saved checkpoint at step 2500\r\n",,terminal_output +372,1217522,"TERMINAL",0,0,"WARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn-gt-actions/interactive/3519095/002500 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn-gt-actions/interactive/3519095/002500) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn-gt-actions/interactive/3519095/002250 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn-gt-actions/interactive/3519095/002250) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn-gt-actions/interactive/3519095/001750 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn-gt-actions/interactive/3519095/001750) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn-gt-actions/interactive/3519095/002000 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn-gt-actions/interactive/3519095/002000) to end with "".orbax-checkpoint-tmp"".\r\n",,terminal_output +373,1243468,"TERMINAL",0,0,"WARNING:absl:[process=0][thread=MainThread][operation_id=11] _SignalingThread.join() waiting for signals ([, ]) blocking the main thread will slow down blocking save times. This is likely due to main thread calling result() on a CommitFuture.\r\nWARNING:absl:[process=0][thread=MainThread][operation_id=11] _SignalingThread.join() waiting for signals ([, ]) blocking the main thread will slow down blocking save times. This is likely due to main thread calling result() on a CommitFuture.\r\n",,terminal_output +374,1243654,"TERMINAL",0,0,"Saved checkpoint at step 2750\r\n",,terminal_output +375,1245906,"TERMINAL",0,0,"WARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn-gt-actions/interactive/3519095/002500 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn-gt-actions/interactive/3519095/002500) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn-gt-actions/interactive/3519095/002250 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn-gt-actions/interactive/3519095/002250) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn-gt-actions/interactive/3519095/002750 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn-gt-actions/interactive/3519095/002750) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn-gt-actions/interactive/3519095/002000 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn-gt-actions/interactive/3519095/002000) to end with "".orbax-checkpoint-tmp"".\r\n",,terminal_output +376,1271420,"TERMINAL",0,0,"Calculating validation metrics...\r\n",,terminal_output +377,1292013,"TERMINAL",0,0,"Step 3000, validation loss: 0.000494364881888032\r\nWARNING:absl:[process=0][thread=MainThread][operation_id=12] _SignalingThread.join() waiting for signals ([, ]) blocking the main thread will slow down blocking save times. This is likely due to main thread calling result() on a CommitFuture.\r\nWARNING:absl:[process=0][thread=MainThread][operation_id=12] _SignalingThread.join() waiting for signals ([, ]) blocking the main thread will slow down blocking save times. This is likely due to main thread calling result() on a CommitFuture.\r\n",,terminal_output +378,1292423,"TERMINAL",0,0,"Saved checkpoint at step 3000\r\n",,terminal_output +379,1294502,"TERMINAL",0,0,"WARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn-gt-actions/interactive/3519095/002500 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn-gt-actions/interactive/3519095/002500) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn-gt-actions/interactive/3519095/002250 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn-gt-actions/interactive/3519095/002250) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn-gt-actions/interactive/3519095/002750 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn-gt-actions/interactive/3519095/002750) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn-gt-actions/interactive/3519095/003000 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn-gt-actions/interactive/3519095/003000) to end with "".orbax-checkpoint-tmp"".\r\n",,terminal_output +380,1320061,"TERMINAL",0,0,"WARNING:absl:[process=0][thread=MainThread][operation_id=13] _SignalingThread.join() waiting for signals ([, ]) blocking the main thread will slow down blocking save times. This is likely due to main thread calling result() on a CommitFuture.\r\nWARNING:absl:[process=0][thread=MainThread][operation_id=13] _SignalingThread.join() waiting for signals ([, ]) blocking the main thread will slow down blocking save times. This is likely due to main thread calling result() on a CommitFuture.\r\n",,terminal_output +381,1320596,"TERMINAL",0,0,"Saved checkpoint at step 3250\r\n",,terminal_output +382,1322303,"TERMINAL",0,0,"WARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn-gt-actions/interactive/3519095/002500 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn-gt-actions/interactive/3519095/002500) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn-gt-actions/interactive/3519095/003250 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn-gt-actions/interactive/3519095/003250) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn-gt-actions/interactive/3519095/002750 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn-gt-actions/interactive/3519095/002750) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn-gt-actions/interactive/3519095/003000 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn-gt-actions/interactive/3519095/003000) to end with "".orbax-checkpoint-tmp"".\r\n",,terminal_output +383,1347829,"TERMINAL",0,0,"WARNING:absl:[process=0][thread=MainThread][operation_id=14] _SignalingThread.join() waiting for signals ([, ]) blocking the main thread will slow down blocking save times. This is likely due to main thread calling result() on a CommitFuture.\r\nWARNING:absl:[process=0][thread=MainThread][operation_id=14] _SignalingThread.join() waiting for signals ([, ]) blocking the main thread will slow down blocking save times. This is likely due to main thread calling result() on a CommitFuture.\r\n",,terminal_output +384,1348224,"TERMINAL",0,0,"Saved checkpoint at step 3500\r\n",,terminal_output +385,1349843,"TERMINAL",0,0,"WARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn-gt-actions/interactive/3519095/003500 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn-gt-actions/interactive/3519095/003500) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn-gt-actions/interactive/3519095/003250 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn-gt-actions/interactive/3519095/003250) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn-gt-actions/interactive/3519095/002750 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn-gt-actions/interactive/3519095/002750) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn-gt-actions/interactive/3519095/003000 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn-gt-actions/interactive/3519095/003000) to end with "".orbax-checkpoint-tmp"".\r\n",,terminal_output +386,1374923,"TERMINAL",0,0,"Calculating validation metrics...\r\n",,terminal_output +387,1395549,"TERMINAL",0,0,"Step 3750, validation loss: 0.00034804645110853016\r\n",,terminal_output +388,1395609,"TERMINAL",0,0,"WARNING:absl:[process=0][thread=MainThread][operation_id=15] _SignalingThread.join() waiting for signals ([, ]) blocking the main thread will slow down blocking save times. This is likely due to main thread calling result() on a CommitFuture.\r\n",,terminal_output +389,1395670,"TERMINAL",0,0,"WARNING:absl:[process=0][thread=MainThread][operation_id=15] _SignalingThread.join() waiting for signals ([, ]) blocking the main thread will slow down blocking save times. This is likely due to main thread calling result() on a CommitFuture.\r\n",,terminal_output +390,1395960,"TERMINAL",0,0,"Saved checkpoint at step 3750\r\n",,terminal_output +391,1398196,"TERMINAL",0,0,"WARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn-gt-actions/interactive/3519095/003500 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn-gt-actions/interactive/3519095/003500) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn-gt-actions/interactive/3519095/003250 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn-gt-actions/interactive/3519095/003250) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn-gt-actions/interactive/3519095/003000 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn-gt-actions/interactive/3519095/003000) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn-gt-actions/interactive/3519095/003750 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn-gt-actions/interactive/3519095/003750) to end with "".orbax-checkpoint-tmp"".\r\n",,terminal_output +392,1423283,"TERMINAL",0,0,"WARNING:absl:[process=0][thread=MainThread][operation_id=16] _SignalingThread.join() waiting for signals ([, ]) blocking the main thread will slow down blocking save times. This is likely due to main thread calling result() on a CommitFuture.\r\nWARNING:absl:[process=0][thread=MainThread][operation_id=16] _SignalingThread.join() waiting for signals ([, ]) blocking the main thread will slow down blocking save times. This is likely due to main thread calling result() on a CommitFuture.\r\n",,terminal_output +393,1423807,"TERMINAL",0,0,"Saved checkpoint at step 4000\r\n",,terminal_output +394,1425445,"TERMINAL",0,0,"WARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn-gt-actions/interactive/3519095/003500 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn-gt-actions/interactive/3519095/003500) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn-gt-actions/interactive/3519095/004000 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn-gt-actions/interactive/3519095/004000) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn-gt-actions/interactive/3519095/003250 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn-gt-actions/interactive/3519095/003250) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn-gt-actions/interactive/3519095/003750 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn-gt-actions/interactive/3519095/003750) to end with "".orbax-checkpoint-tmp"".\r\n",,terminal_output +395,1451145,"TERMINAL",0,0,"WARNING:absl:[process=0][thread=MainThread][operation_id=17] _SignalingThread.join() waiting for signals ([, ]) blocking the main thread will slow down blocking save times. This is likely due to main thread calling result() on a CommitFuture.\r\n",,terminal_output +396,1451211,"TERMINAL",0,0,"WARNING:absl:[process=0][thread=MainThread][operation_id=17] _SignalingThread.join() waiting for signals ([, ]) blocking the main thread will slow down blocking save times. This is likely due to main thread calling result() on a CommitFuture.\r\n",,terminal_output +397,1451648,"TERMINAL",0,0,"Saved checkpoint at step 4250\r\n",,terminal_output +398,1453080,"TERMINAL",0,0,"WARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn-gt-actions/interactive/3519095/003500 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn-gt-actions/interactive/3519095/003500) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn-gt-actions/interactive/3519095/004000 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn-gt-actions/interactive/3519095/004000) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn-gt-actions/interactive/3519095/004250 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn-gt-actions/interactive/3519095/004250) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn-gt-actions/interactive/3519095/003750 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn-gt-actions/interactive/3519095/003750) to end with "".orbax-checkpoint-tmp"".\r\n",,terminal_output +399,1478890,"TERMINAL",0,0,"Calculating validation metrics...\r\n",,terminal_output +400,1499586,"TERMINAL",0,0,"Step 4500, validation loss: 0.00025157633353956044\r\nWARNING:absl:[process=0][thread=MainThread][operation_id=18] _SignalingThread.join() waiting for signals ([, ]) blocking the main thread will slow down blocking save times. This is likely due to main thread calling result() on a CommitFuture.\r\nWARNING:absl:[process=0][thread=MainThread][operation_id=18] _SignalingThread.join() waiting for signals ([, ]) blocking the main thread will slow down blocking save times. This is likely due to main thread calling result() on a CommitFuture.\r\n",,terminal_output +401,1499825,"TERMINAL",0,0,"Saved checkpoint at step 4500\r\n",,terminal_output +402,1501976,"TERMINAL",0,0,"WARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn-gt-actions/interactive/3519095/004000 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn-gt-actions/interactive/3519095/004000) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn-gt-actions/interactive/3519095/004500 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn-gt-actions/interactive/3519095/004500) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn-gt-actions/interactive/3519095/004250 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn-gt-actions/interactive/3519095/004250) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn-gt-actions/interactive/3519095/003750 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn-gt-actions/interactive/3519095/003750) to end with "".orbax-checkpoint-tmp"".\r\n",,terminal_output +403,1526924,"TERMINAL",0,0,"WARNING:absl:[process=0][thread=MainThread][operation_id=19] _SignalingThread.join() waiting for signals ([, ]) blocking the main thread will slow down blocking save times. This is likely due to main thread calling result() on a CommitFuture.\r\nWARNING:absl:[process=0][thread=MainThread][operation_id=19] _SignalingThread.join() waiting for signals ([, ]) blocking the main thread will slow down blocking save times. This is likely due to main thread calling result() on a CommitFuture.\r\n",,terminal_output +404,1527509,"TERMINAL",0,0,"Saved checkpoint at step 4750\r\n",,terminal_output +405,1529269,"TERMINAL",0,0,"WARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn-gt-actions/interactive/3519095/004750 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn-gt-actions/interactive/3519095/004750) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn-gt-actions/interactive/3519095/004000 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn-gt-actions/interactive/3519095/004000) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn-gt-actions/interactive/3519095/004500 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn-gt-actions/interactive/3519095/004500) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn-gt-actions/interactive/3519095/004250 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn-gt-actions/interactive/3519095/004250) to end with "".orbax-checkpoint-tmp"".\r\n",,terminal_output +406,1554562,"TERMINAL",0,0,"WARNING:absl:[process=0][thread=MainThread][operation_id=20] _SignalingThread.join() waiting for signals ([, ]) blocking the main thread will slow down blocking save times. This is likely due to main thread calling result() on a CommitFuture.\r\nWARNING:absl:[process=0][thread=MainThread][operation_id=20] _SignalingThread.join() waiting for signals ([, ]) blocking the main thread will slow down blocking save times. This is likely due to main thread calling result() on a CommitFuture.\r\n",,terminal_output +407,1554867,"TERMINAL",0,0,"Saved checkpoint at step 5000\r\n",,terminal_output +408,1556060,"TERMINAL",0,0,"WARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn-gt-actions/interactive/3519095/004750 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn-gt-actions/interactive/3519095/004750) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn-gt-actions/interactive/3519095/005000 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn-gt-actions/interactive/3519095/005000) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn-gt-actions/interactive/3519095/004500 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn-gt-actions/interactive/3519095/004500) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn-gt-actions/interactive/3519095/004250 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn-gt-actions/interactive/3519095/004250) to end with "".orbax-checkpoint-tmp"".\r\n",,terminal_output +409,1560426,"TERMINAL",0,0,"wandb: \r\nwandb: 🚀 View run breakout-dyn-default-gt-actions-3519095 at: https://wandb.ai/instant-uv/jafar/runs/ueytnxmm\r\nwandb: Find logs at: ../../../../../hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/wandb/run-20250924_125533-ueytnxmm/logs\r\n",,terminal_output +410,1561218,"TERMINAL",0,0,"W0924 13:09:31.306710 2406999 pjrt_client.cc:1469] WatchJobStateAsync failed for task goo.gle/debugproto job_name: ""jax_worker"": CANCELLED: CANCELLED\r\nAdditional GRPC error information from remote target coordination_service while calling /tensorflow.CoordinationService/WatchJobState:\r\n:UNKNOWN:Error received from peer {grpc_status:1, grpc_message:""CANCELLED""} [type.googleapis.com/tensorflow.DerivedStatus='']\r\n",,terminal_output +411,1562756,"TERMINAL",0,0,"#!/usr/bin/env bash\r\n\r\n#SBATCH --nodes=1\r\n#SBATCH --ntasks-per-node=1\r\n#SBATCH --time=48:00:00\r\n#SBATCH --partition=accelerated\r\n#SBATCH --cpus-per-task=5\r\n#SBATCH --gres=gpu:1\r\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/breakout/dyn/%x_%j.log\r\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/breakout/dyn/%x_%j.log\r\n#SBATCH --job-name=train_dyn_default_breakout_longer\r\n#SBATCH --requeue\r\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\r\n\r\n# Log the sbatch script\r\ncat $0\r\n\r\nmodule unload mpi/openmpi/5.0\r\nmodule unload devel/cuda/12.4\r\n# source .venv/bin/activate\r\n\r\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_breakout/breakout_episodes_perfect/train\r\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_breakout/breakout_episodes_perfect/val\r\n\r\njob_name=$SLURM_JOB_NAME\r\nslurm_job_id=$SLURM_JOB_ID\r\n\r\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/$job_name/$slurm_job_id\r\nmkdir -p $CHECKPOINT_DIR\r\n\r\n# lam_checkpoint=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/lam/interactive/3512576\r\n# tokenizer_checkpoint=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/tokenizer/interactive/3512502\r\n\r\nlam_checkpoint=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/lam/interactive/3518963\r\ntokenizer_checkpoint=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/tokenizer/interactive/3518963\r\n\r\nenv | grep SLURM\r\n\r\nexport PYTHONUNBUFFERED=1\r\n\r\nsrun python train_dynamics.py \\r\n --save_ckpt \\r\n --image_height=10 \\r\n --restore-ckpt \\r\n --image_width=10 \\r\n --ckpt_dir $CHECKPOINT_DIR \\r\n --batch_size=120 \\r\n --init_lr=0 \\r\n --max_lr=3e-5 \\r\n --log_image_interval=250 \\r\n --log_checkpoint_interval=250 \\r\n --dyna_type=maskgit \\r\n --log \\r\n --name=breakout-dyn-default-$slurm_job_id \\r\n --tags dyn breakout default \\r\n --entity instant-uv \\r\n --project jafar \\r\n --patch_size 4 \\r\n --lam_patch_size 4 \\r\n --warmup_steps 100 \\r\n --wsd_decay_steps 1000 \\r\n --num_steps 5000 \\r\n --data_dir $array_records_dir_train \\r\n --val_data_dir $array_records_dir_val \\r\n --tokenizer_checkpoint $tokenizer_checkpoint \\r\n --lam_checkpoint $lam_checkpoint \\r\n --val_interval 750 \\r\n --eval_full_frame \\r\n",,terminal_output +412,1562929,"TERMINAL",0,0,"SLURM_STEP_NUM_TASKS=1\r\nSLURM_JOB_USER=tum_cte0515\r\nSLURM_TASKS_PER_NODE=1\r\nSLURM_JOB_UID=999226\r\nSLURM_TASK_PID=2405968\r\nSLURM_JOB_GPUS=0\r\nSLURM_LOCALID=0\r\nSLURM_SUBMIT_DIR=/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine\r\nSLURMD_NODENAME=hkn0402\r\nSLURM_JOB_START_TIME=1758710792\r\nSLURM_STEP_NODELIST=hkn0402\r\nSLURM_CLUSTER_NAME=hk\r\nSLURM_JOB_END_TIME=1758714392\r\nSLURM_PMI2_SRUN_PORT=46725\r\nSLURM_CPUS_ON_NODE=8\r\nSLURM_JOB_CPUS_PER_NODE=8\r\nSLURM_GPUS_ON_NODE=1\r\nSLURM_GTIDS=0\r\nSLURM_JOB_PARTITION=dev_accelerated\r\nSLURM_TRES_PER_TASK=cpu=8\r\nSLURM_OOM_KILL_STEP=0\r\nSLURM_JOB_NUM_NODES=1\r\nSLURM_STEPID=4294967290\r\nSLURM_JOBID=3519095\r\nSLURM_PTY_PORT=40611\r\nSLURM_JOB_QOS=normal\r\nSLURM_LAUNCH_NODE_IPADDR=10.0.7.199\r\nSLURM_PTY_WIN_ROW=52\r\nSLURM_PMI2_PROC_MAPPING=(vector,(0,1,1))\r\nSLURMD_DEBUG=2\r\nSLURM_PROCID=0\r\nSLURM_CPUS_PER_TASK=8\r\nSLURM_TOPOLOGY_ADDR=hkibb.hkibbi1.hkibbi1e11.hkn0402\r\nSLURM_TOPOLOGY_ADDR_PATTERN=switch.switch.switch.node\r\nSLURM_SRUN_COMM_HOST=10.0.7.199\r\nSLURM_SCRIPT_CONTEXT=prolog_task\r\nSLURM_PTY_WIN_COL=131\r\nSLURM_NODELIST=hkn0402\r\nSLURM_SRUN_COMM_PORT=46339\r\nSLURM_STEP_ID=4294967290\r\nSLURM_JOB_ACCOUNT=hk-project-p0023960\r\nSLURM_PRIO_PROCESS=0\r\nSLURM_NNODES=1\r\nSLURM_SUBMIT_HOST=hkn1991.localdomain\r\nSLURM_JOB_ID=3519095\r\nSLURM_NODEID=0\r\nSLURM_STEP_NUM_NODES=1\r\nSLURM_STEP_TASKS_PER_NODE=1\r\nSLURM_MPI_TYPE=pmi2\r\nSLURM_PMI2_STEP_NODES=hkn0402\r\nSLURM_CONF=/etc/slurm/slurm.conf\r\nSLURM_JOB_NAME=interactive\r\nSLURM_STEP_LAUNCHER_PORT=46339\r\nSLURM_JOB_GID=502226\r\nSLURM_JOB_NODELIST=hkn0402\r\n",,terminal_output +413,1563165,"TERMINAL",0,0,"GpuFreq=control_disabled\r\n",,terminal_output +414,1566702,"TERMINAL",0,0,"Running on 1 devices.\r\n",,terminal_output +415,1572686,"TERMINAL",0,0,"Counting all components: ['dynamics', 'lam', 'tokenizer']\r\n",,terminal_output +416,1572961,"TERMINAL",0,0,"wandb: Currently logged in as: mihir-mahajan2002 (instant-uv) to https://api.wandb.ai. Use `wandb login --relogin` to force relogin\r\n",,terminal_output +417,1573841,"TERMINAL",0,0,"wandb: Tracking run with wandb version 0.22.0\r\nwandb: Run data is saved locally in /hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/wandb/run-20250924_130943-jmq6921a\r\nwandb: Run `wandb offline` to turn off syncing.\r\nwandb: Syncing run breakout-dyn-default-3519095\r\nwandb: ⭐️ View project at https://wandb.ai/instant-uv/jafar\r\nwandb: 🚀 View run at https://wandb.ai/instant-uv/jafar/runs/jmq6921a\r\n",,terminal_output +418,1574220,"TERMINAL",0,0,"Parameter counts:\r\n{'dynamics': 26555904, 'lam': 16900976, 'tokenizer': 33750256, 'total': 77207136}\r\n",,terminal_output +419,1578942,"TERMINAL",0,0,"Traceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_dynamics.py"", line 791, in \r\n main(args)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_dynamics.py"", line 446, in main\r\n restore_or_initialize_components(\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_dynamics.py"", line 312, in restore_or_initialize_components\r\n restored = checkpoint_manager.restore(\r\n ^^^^^^^^^^^^^^^^^^^^^^^^^^^\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/venv_3_11/lib64/python3.11/site-packages/orbax/checkpoint/checkpoint_manager.py"", line 1628, in restore\r\n raise FileNotFoundError(f'No steps found in {self.directory}.')\r\nFileNotFoundError: No steps found in /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519095.\r\n",,terminal_output +420,1579548,"TERMINAL",0,0,"wandb: \r\nwandb: 🚀 View run breakout-dyn-default-3519095 at: https://wandb.ai/instant-uv/jafar/runs/jmq6921a\r\nwandb: Find logs at: ../../../../../hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/wandb/run-20250924_130943-jmq6921a/logs\r\n",,terminal_output +421,1579658,"TERMINAL",0,0,"W0924 13:09:49.758065 2412337 pjrt_client.cc:1469] WatchJobStateAsync failed for task goo.gle/debugproto job_name: ""jax_worker"": CANCELLED: CANCELLED\r\nAdditional GRPC error information from remote target coordination_service while calling /tensorflow.CoordinationService/WatchJobState:\r\n:UNKNOWN:Error received from peer {grpc_message:""CANCELLED"", grpc_status:1} [type.googleapis.com/tensorflow.DerivedStatus='']\r\n",,terminal_output +422,1580168,"TERMINAL",0,0,"srun: error: hkn0402: task 0: Exited with exit code 1\r\n]0;tum_cte0515@hkn0402:~/Projects/jasmine[?2004h(venv_3_11) [tum_cte0515@hkn0402 jasmine]$ ",,terminal_output +423,2789745,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu.sh",0,0,"",shellscript,tab +424,2789746,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu.sh",1718,0,"",shellscript,selection_mouse +425,2789844,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu.sh",1717,0,"",shellscript,selection_command +426,2791779,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu.sh",1698,21,"",shellscript,content +427,2791861,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu.sh",1702,0,"",shellscript,selection_command +428,2794602,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu_gt_actions.sh",0,0,"",shellscript,tab +429,2816740,"TERMINAL",0,0,"sh slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu_gt_actions.sh && sh slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu.sh",,terminal_output +430,2817354,"TERMINAL",0,0,"",,terminal_output +431,2818137,"TERMINAL",0,0,"\r",,terminal_output +432,2818416,"TERMINAL",0,0,"",,terminal_output +433,2818987,"TERMINAL",0,0,"",,terminal_output +434,2819114,"TERMINAL",0,0,"\r\n\r",,terminal_output +435,2820361,"TERMINAL",0,0,"",,terminal_output +436,2821071,"TERMINAL",0,0,"sh\r\n\rlurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu.sh",,terminal_output +437,2821860,"TERMINAL",0,0,"\rh slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu.sh\r\n\rlurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu.sh\r",,terminal_output +438,2822073,"TERMINAL",0,0,"\r\n[?2004l\r#!/usr/bin/env bash\r\n\r\n#SBATCH --nodes=1\r\n#SBATCH --ntasks-per-node=1\r\n#SBATCH --time=48:00:00\r\n#SBATCH --partition=accelerated\r\n#SBATCH --cpus-per-task=5\r\n#SBATCH --gres=gpu:1\r\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/breakout/dyn/%x_%j.log\r\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/breakout/dyn/%x_%j.log\r\n#SBATCH --job-name=train_dyn_default_breakout_longer\r\n#SBATCH --requeue\r\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\r\n\r\n# Log the sbatch script\r\ncat $0\r\n\r\nmodule unload mpi/openmpi/5.0\r\nmodule unload devel/cuda/12.4\r\n# source .venv/bin/activate\r\n\r\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_breakout/breakout_episodes_perfect/train\r\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_breakout/breakout_episodes_perfect/val\r\n\r\njob_name=$SLURM_JOB_NAME\r\nslurm_job_id=$SLURM_JOB_ID\r\n\r\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/$job_name/$slurm_job_id\r\nmkdir -p $CHECKPOINT_DIR\r\n\r\n# lam_checkpoint=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/lam/interactive/3512576\r\n# tokenizer_checkpoint=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/tokenizer/interactive/3512502\r\n\r\nlam_checkpoint=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/lam/interactive/3518963\r\ntokenizer_checkpoint=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/tokenizer/interactive/3518963\r\n\r\nenv | grep SLURM\r\n\r\nexport PYTHONUNBUFFERED=1\r\n\r\nsrun python train_dynamics.py \\r\n --save_ckpt \\r\n --image_height=10 \\r\n --image_width=10 \\r\n --ckpt_dir $CHECKPOINT_DIR \\r\n --batch_size=120 \\r\n --init_lr=0 \\r\n --max_lr=3e-5 \\r\n --log_image_interval=250 \\r\n --log_checkpoint_interval=250 \\r\n --dyna_type=maskgit \\r\n --log \\r\n --name=breakout-dyn-default-$slurm_job_id \\r\n --tags dyn breakout default \\r\n --entity instant-uv \\r\n --project jafar \\r\n --patch_size 4 \\r\n --lam_patch_size 4 \\r\n --warmup_steps 100 \\r\n --wsd_decay_steps 1000 \\r\n --num_steps 5000 \\r\n --data_dir $array_records_dir_train \\r\n --val_data_dir $array_records_dir_val \\r\n --tokenizer_checkpoint $tokenizer_checkpoint \\r\n --lam_checkpoint $lam_checkpoint \\r\n --val_interval 750 \\r\n --eval_full_frame \\r\n",,terminal_output +439,2822190,"TERMINAL",0,0,"SLURM_STEP_NUM_TASKS=1\r\nSLURM_JOB_USER=tum_cte0515\r\nSLURM_TASKS_PER_NODE=1\r\nSLURM_JOB_UID=999226\r\nSLURM_TASK_PID=2405968\r\nSLURM_JOB_GPUS=0\r\nSLURM_LOCALID=0\r\nSLURM_SUBMIT_DIR=/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine\r\nSLURMD_NODENAME=hkn0402\r\nSLURM_JOB_START_TIME=1758710792\r\nSLURM_STEP_NODELIST=hkn0402\r\nSLURM_CLUSTER_NAME=hk\r\nSLURM_JOB_END_TIME=1758714392\r\nSLURM_PMI2_SRUN_PORT=46725\r\nSLURM_CPUS_ON_NODE=8\r\nSLURM_JOB_CPUS_PER_NODE=8\r\nSLURM_GPUS_ON_NODE=1\r\nSLURM_GTIDS=0\r\nSLURM_JOB_PARTITION=dev_accelerated\r\nSLURM_TRES_PER_TASK=cpu=8\r\nSLURM_OOM_KILL_STEP=0\r\nSLURM_JOB_NUM_NODES=1\r\nSLURM_STEPID=4294967290\r\nSLURM_JOBID=3519095\r\nSLURM_PTY_PORT=40611\r\nSLURM_JOB_QOS=normal\r\nSLURM_LAUNCH_NODE_IPADDR=10.0.7.199\r\nSLURM_PTY_WIN_ROW=52\r\nSLURM_PMI2_PROC_MAPPING=(vector,(0,1,1))\r\nSLURMD_DEBUG=2\r\nSLURM_PROCID=0\r\nSLURM_CPUS_PER_TASK=8\r\nSLURM_TOPOLOGY_ADDR=hkibb.hkibbi1.hkibbi1e11.hkn0402\r\nSLURM_TOPOLOGY_ADDR_PATTERN=switch.switch.switch.node\r\nSLURM_SRUN_COMM_HOST=10.0.7.199\r\nSLURM_SCRIPT_CONTEXT=prolog_task\r\nSLURM_PTY_WIN_COL=131\r\nSLURM_NODELIST=hkn0402\r\nSLURM_SRUN_COMM_PORT=46339\r\nSLURM_STEP_ID=4294967290\r\nSLURM_JOB_ACCOUNT=hk-project-p0023960\r\nSLURM_PRIO_PROCESS=0\r\nSLURM_NNODES=1\r\nSLURM_SUBMIT_HOST=hkn1991.localdomain\r\nSLURM_JOB_ID=3519095\r\nSLURM_NODEID=0\r\nSLURM_STEP_NUM_NODES=1\r\nSLURM_STEP_TASKS_PER_NODE=1\r\nSLURM_MPI_TYPE=pmi2\r\nSLURM_PMI2_STEP_NODES=hkn0402\r\nSLURM_CONF=/etc/slurm/slurm.conf\r\nSLURM_JOB_NAME=interactive\r\nSLURM_STEP_LAUNCHER_PORT=46339\r\nSLURM_JOB_GID=502226\r\nSLURM_JOB_NODELIST=hkn0402\r\n",,terminal_output +440,2822349,"TERMINAL",0,0,"GpuFreq=control_disabled\r\n",,terminal_output +441,2822839,"TERMINAL",0,0,"bash",,terminal_focus +442,2823886,"TERMINAL",0,0,"queue",,terminal_command +443,2823963,"TERMINAL",0,0,"]633;C[?1049h(B[?7hEvery 1.0s: squeue --mehkn1991.localdomain: Wed Sep 24 13:30:34 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3519095 dev_accel interact tum_cte0 R44:02\t 1 hkn0402",,terminal_output +444,2824747,"TERMINAL",0,0,"Running on 1 devices.\r\n",,terminal_output +445,2824904,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn1991:~/Projects/jasmine",,terminal_output +446,2830504,"TERMINAL",0,0,"Counting all components: ['dynamics', 'lam', 'tokenizer']\r\n",,terminal_output +447,2830911,"TERMINAL",0,0,"wandb: Currently logged in as: mihir-mahajan2002 (instant-uv) to https://api.wandb.ai. Use `wandb login --relogin` to force relogin\r\n",,terminal_output +448,2831638,"TERMINAL",0,0,"wandb: Tracking run with wandb version 0.22.0\r\nwandb: Run data is saved locally in /hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/wandb/run-20250924_133040-xzf417ru\r\nwandb: Run `wandb offline` to turn off syncing.\r\nwandb: Syncing run breakout-dyn-default-3519095\r\nwandb: ⭐️ View project at https://wandb.ai/instant-uv/jafar\r\nwandb: 🚀 View run at https://wandb.ai/instant-uv/jafar/runs/xzf417ru\r\n",,terminal_output +449,2831812,"TERMINAL",0,0,"Parameter counts:\r\n{'dynamics': 26555904, 'lam': 16900976, 'tokenizer': 33750256, 'total': 77207136}\r\n",,terminal_output +450,2834272,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/venv_3_11/lib64/python3.11/site-packages/orbax/checkpoint/_src/serialization/type_handlers.py:1269: UserWarning: Sharding info not provided when restoring. Populating sharding info from sharding file. Please note restoration time will be slightly increased due to reading from file. Note also that this option is unsafe when restoring on a different topology than the checkpoint was saved with.\r\n warnings.warn(\r\n",,terminal_output +451,2905358,"TERMINAL",0,0,"Total memory size: 2.5 GB, Output size: 0.9 GB, Temp size: 1.7 GB, Argument size: 0.9 GB, Host temp size: 0.0 GB.\r\nFLOPs: 2.706e+11, Bytes: 4.687e+10 (43.7 GB), Intensity: 5.8 FLOPs/byte\r\nStarting training from step 0...\r\n",,terminal_output +452,2905847,"TERMINAL",0,0,"\r\nMemstats: After params initialized:\r\n\tUsing (GB) 1.1 / 38.7 (2.842377%) on cuda:0\r\n",,terminal_output +453,2978529,"TERMINAL",0,0,"WARNING:absl:[process=0][thread=MainThread][operation_id=1] _SignalingThread.join() waiting for signals ([, ]) blocking the main thread will slow down blocking save times. This is likely due to main thread calling result() on a CommitFuture.\r\n",,terminal_output +454,2987567,"TERMINAL",0,0,"WARNING:absl:[process=0][thread=MainThread][operation_id=1] _SignalingThread.join() waiting for signals ([, ]) blocking the main thread will slow down blocking save times. This is likely due to main thread calling result() on a CommitFuture.\r\n",,terminal_output +455,2988380,"TERMINAL",0,0,"Saved checkpoint at step 250\r\n",,terminal_output +456,3026385,"TERMINAL",0,0,"WARNING:absl:[process=0][thread=MainThread][operation_id=2] _SignalingThread.join() waiting for signals ([, ]) blocking the main thread will slow down blocking save times. This is likely due to main thread calling result() on a CommitFuture.\r\nWARNING:absl:[process=0][thread=MainThread][operation_id=2] _SignalingThread.join() waiting for signals ([, ]) blocking the main thread will slow down blocking save times. This is likely due to main thread calling result() on a CommitFuture.\r\n",,terminal_output +457,3026987,"TERMINAL",0,0,"Saved checkpoint at step 500\r\n",,terminal_output +458,3064471,"TERMINAL",0,0,"Calculating validation metrics...\r\n",,terminal_output +459,3126010,"TERMINAL",0,0,"Step 750, validation loss: 0.0105203902348876\r\n",,terminal_output +460,3126067,"TERMINAL",0,0,"WARNING:absl:[process=0][thread=MainThread][operation_id=3] _SignalingThread.join() waiting for signals ([, ]) blocking the main thread will slow down blocking save times. This is likely due to main thread calling result() on a CommitFuture.\r\nWARNING:absl:[process=0][thread=MainThread][operation_id=3] _SignalingThread.join() waiting for signals ([, ]) blocking the main thread will slow down blocking save times. This is likely due to main thread calling result() on a CommitFuture.\r\n",,terminal_output +461,3126628,"TERMINAL",0,0,"Saved checkpoint at step 750\r\n",,terminal_output +462,3201378,"TERMINAL",0,0,"WARNING:absl:[process=0][thread=MainThread][operation_id=4] _SignalingThread.join() waiting for signals ([, ]) blocking the main thread will slow down blocking save times. This is likely due to main thread calling result() on a CommitFuture.\r\nWARNING:absl:[process=0][thread=MainThread][operation_id=4] _SignalingThread.join() waiting for signals ([, ]) blocking the main thread will slow down blocking save times. This is likely due to main thread calling result() on a CommitFuture.\r\n",,terminal_output +463,3201786,"TERMINAL",0,0,"Saved checkpoint at step 1000\r\n",,terminal_output +464,3204154,"TERMINAL",0,0,"WARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519095/000750 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519095/000750) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519095/000250 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519095/000250) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519095/001000 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519095/001000) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519095/000500 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519095/000500) to end with "".orbax-checkpoint-tmp"".\r\n",,terminal_output +465,3240903,"TERMINAL",0,0,"WARNING:absl:[process=0][thread=MainThread][operation_id=5] _SignalingThread.join() waiting for signals ([, ]) blocking the main thread will slow down blocking save times. This is likely due to main thread calling result() on a CommitFuture.\r\nWARNING:absl:[process=0][thread=MainThread][operation_id=5] _SignalingThread.join() waiting for signals ([, ]) blocking the main thread will slow down blocking save times. This is likely due to main thread calling result() on a CommitFuture.\r\n",,terminal_output +466,3241315,"TERMINAL",0,0,"Saved checkpoint at step 1250\r\n",,terminal_output +467,3243668,"TERMINAL",0,0,"WARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519095/000750 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519095/000750) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519095/001000 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519095/001000) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519095/000500 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519095/000500) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519095/001250 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519095/001250) to end with "".orbax-checkpoint-tmp"".\r\n",,terminal_output +468,3280355,"TERMINAL",0,0,"Calculating validation metrics...\r\n",,terminal_output +469,3301627,"TERMINAL",0,0,"Step 1500, validation loss: 0.0016179573722183704\r\n",,terminal_output +470,3301682,"TERMINAL",0,0,"WARNING:absl:[process=0][thread=MainThread][operation_id=6] _SignalingThread.join() waiting for signals ([, ]) blocking the main thread will slow down blocking save times. This is likely due to main thread calling result() on a CommitFuture.\r\nWARNING:absl:[process=0][thread=MainThread][operation_id=6] _SignalingThread.join() waiting for signals ([, ]) blocking the main thread will slow down blocking save times. This is likely due to main thread calling result() on a CommitFuture.\r\n",,terminal_output +471,3302140,"TERMINAL",0,0,"Saved checkpoint at step 1500\r\n",,terminal_output +472,3304084,"TERMINAL",0,0,"WARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519095/001500 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519095/001500) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519095/000750 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519095/000750) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519095/001000 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519095/001000) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519095/001250 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519095/001250) to end with "".orbax-checkpoint-tmp"".\r\n",,terminal_output +473,3341383,"TERMINAL",0,0,"WARNING:absl:[process=0][thread=MainThread][operation_id=7] _SignalingThread.join() waiting for signals ([, ]) blocking the main thread will slow down blocking save times. This is likely due to main thread calling result() on a CommitFuture.\r\nWARNING:absl:[process=0][thread=MainThread][operation_id=7] _SignalingThread.join() waiting for signals ([, ]) blocking the main thread will slow down blocking save times. This is likely due to main thread calling result() on a CommitFuture.\r\n",,terminal_output +474,3341783,"TERMINAL",0,0,"Saved checkpoint at step 1750\r\n",,terminal_output +475,3343746,"TERMINAL",0,0,"WARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519095/001500 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519095/001500) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519095/001750 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519095/001750) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519095/001000 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519095/001000) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519095/001250 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519095/001250) to end with "".orbax-checkpoint-tmp"".\r\n",,terminal_output +476,3381105,"TERMINAL",0,0,"WARNING:absl:[process=0][thread=MainThread][operation_id=8] _SignalingThread.join() waiting for signals ([, ]) blocking the main thread will slow down blocking save times. This is likely due to main thread calling result() on a CommitFuture.\r\nWARNING:absl:[process=0][thread=MainThread][operation_id=8] _SignalingThread.join() waiting for signals ([, ]) blocking the main thread will slow down blocking save times. This is likely due to main thread calling result() on a CommitFuture.\r\n",,terminal_output +477,3381518,"TERMINAL",0,0,"Saved checkpoint at step 2000\r\n",,terminal_output +478,3383328,"TERMINAL",0,0,"WARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519095/001500 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519095/001500) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519095/001750 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519095/001750) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519095/002000 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519095/002000) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519095/001250 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519095/001250) to end with "".orbax-checkpoint-tmp"".\r\n",,terminal_output +479,3420491,"TERMINAL",0,0,"Calculating validation metrics...\r\n",,terminal_output +480,3441714,"TERMINAL",0,0,"Step 2250, validation loss: 0.0007954408647492528\r\n",,terminal_output +481,3441780,"TERMINAL",0,0,"WARNING:absl:[process=0][thread=MainThread][operation_id=9] _SignalingThread.join() waiting for signals ([, ]) blocking the main thread will slow down blocking save times. This is likely due to main thread calling result() on a CommitFuture.\r\nWARNING:absl:[process=0][thread=MainThread][operation_id=9] _SignalingThread.join() waiting for signals ([, ]) blocking the main thread will slow down blocking save times. This is likely due to main thread calling result() on a CommitFuture.\r\n",,terminal_output +482,3442632,"TERMINAL",0,0,"Saved checkpoint at step 2250\r\n",,terminal_output +483,3444526,"TERMINAL",0,0,"WARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519095/001500 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519095/001500) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519095/002250 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519095/002250) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519095/001750 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519095/001750) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519095/002000 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519095/002000) to end with "".orbax-checkpoint-tmp"".\r\n",,terminal_output +484,3481545,"TERMINAL",0,0,"WARNING:absl:[process=0][thread=MainThread][operation_id=10] _SignalingThread.join() waiting for signals ([, ]) blocking the main thread will slow down blocking save times. This is likely due to main thread calling result() on a CommitFuture.\r\nWARNING:absl:[process=0][thread=MainThread][operation_id=10] _SignalingThread.join() waiting for signals ([, ]) blocking the main thread will slow down blocking save times. This is likely due to main thread calling result() on a CommitFuture.\r\n",,terminal_output +485,3482261,"TERMINAL",0,0,"Saved checkpoint at step 2500\r\n",,terminal_output +486,3484211,"TERMINAL",0,0,"WARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519095/002500 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519095/002500) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519095/002250 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519095/002250) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519095/001750 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519095/001750) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519095/002000 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519095/002000) to end with "".orbax-checkpoint-tmp"".\r\n",,terminal_output +487,3521585,"TERMINAL",0,0,"WARNING:absl:[process=0][thread=MainThread][operation_id=11] _SignalingThread.join() waiting for signals ([, ]) blocking the main thread will slow down blocking save times. This is likely due to main thread calling result() on a CommitFuture.\r\nWARNING:absl:[process=0][thread=MainThread][operation_id=11] _SignalingThread.join() waiting for signals ([, ]) blocking the main thread will slow down blocking save times. This is likely due to main thread calling result() on a CommitFuture.\r\n",,terminal_output +488,3522245,"TERMINAL",0,0,"Saved checkpoint at step 2750\r\n",,terminal_output +489,3524044,"TERMINAL",0,0,"WARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519095/002500 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519095/002500) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519095/002250 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519095/002250) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519095/002750 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519095/002750) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519095/002000 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519095/002000) to end with "".orbax-checkpoint-tmp"".\r\n",,terminal_output +490,3561317,"TERMINAL",0,0,"Calculating validation metrics...\r\n",,terminal_output +491,3582620,"TERMINAL",0,0,"Step 3000, validation loss: 0.00035759308957494795\r\nWARNING:absl:[process=0][thread=MainThread][operation_id=12] _SignalingThread.join() waiting for signals ([, ]) blocking the main thread will slow down blocking save times. This is likely due to main thread calling result() on a CommitFuture.\r\nWARNING:absl:[process=0][thread=MainThread][operation_id=12] _SignalingThread.join() waiting for signals ([, ]) blocking the main thread will slow down blocking save times. This is likely due to main thread calling result() on a CommitFuture.\r\n",,terminal_output +492,3583034,"TERMINAL",0,0,"Saved checkpoint at step 3000\r\n",,terminal_output +493,3585259,"TERMINAL",0,0,"WARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519095/002500 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519095/002500) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519095/002250 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519095/002250) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519095/002750 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519095/002750) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519095/003000 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519095/003000) to end with "".orbax-checkpoint-tmp"".\r\n",,terminal_output +494,3622305,"TERMINAL",0,0,"WARNING:absl:[process=0][thread=MainThread][operation_id=13] _SignalingThread.join() waiting for signals ([, ]) blocking the main thread will slow down blocking save times. This is likely due to main thread calling result() on a CommitFuture.\r\nWARNING:absl:[process=0][thread=MainThread][operation_id=13] _SignalingThread.join() waiting for signals ([, ]) blocking the main thread will slow down blocking save times. This is likely due to main thread calling result() on a CommitFuture.\r\n",,terminal_output +495,3622758,"TERMINAL",0,0,"Saved checkpoint at step 3250\r\n",,terminal_output +496,3624601,"TERMINAL",0,0,"WARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519095/002500 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519095/002500) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519095/003250 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519095/003250) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519095/002750 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519095/002750) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519095/003000 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519095/003000) to end with "".orbax-checkpoint-tmp"".\r\n",,terminal_output +497,3662285,"TERMINAL",0,0,"WARNING:absl:[process=0][thread=MainThread][operation_id=14] _SignalingThread.join() waiting for signals ([, ]) blocking the main thread will slow down blocking save times. This is likely due to main thread calling result() on a CommitFuture.\r\nWARNING:absl:[process=0][thread=MainThread][operation_id=14] _SignalingThread.join() waiting for signals ([, ]) blocking the main thread will slow down blocking save times. This is likely due to main thread calling result() on a CommitFuture.\r\n",,terminal_output +498,3662692,"TERMINAL",0,0,"Saved checkpoint at step 3500\r\n",,terminal_output +499,3664433,"TERMINAL",0,0,"WARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519095/003500 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519095/003500) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519095/003250 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519095/003250) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519095/002750 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519095/002750) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519095/003000 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519095/003000) to end with "".orbax-checkpoint-tmp"".\r\n",,terminal_output +500,3701812,"TERMINAL",0,0,"Calculating validation metrics...\r\n",,terminal_output +501,3723186,"TERMINAL",0,0,"Step 3750, validation loss: 0.0008963419240899384\r\nWARNING:absl:[process=0][thread=MainThread][operation_id=15] _SignalingThread.join() waiting for signals ([, ]) blocking the main thread will slow down blocking save times. This is likely due to main thread calling result() on a CommitFuture.\r\nWARNING:absl:[process=0][thread=MainThread][operation_id=15] _SignalingThread.join() waiting for signals ([, ]) blocking the main thread will slow down blocking save times. This is likely due to main thread calling result() on a CommitFuture.\r\n",,terminal_output +502,3723828,"TERMINAL",0,0,"Saved checkpoint at step 3750\r\n",,terminal_output +503,3725615,"TERMINAL",0,0,"WARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519095/003500 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519095/003500) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519095/003250 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519095/003250) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519095/003000 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519095/003000) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519095/003750 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519095/003750) to end with "".orbax-checkpoint-tmp"".\r\n",,terminal_output +504,3763460,"TERMINAL",0,0,"WARNING:absl:[process=0][thread=MainThread][operation_id=16] _SignalingThread.join() waiting for signals ([, ]) blocking the main thread will slow down blocking save times. This is likely due to main thread calling result() on a CommitFuture.\r\nWARNING:absl:[process=0][thread=MainThread][operation_id=16] _SignalingThread.join() waiting for signals ([, ]) blocking the main thread will slow down blocking save times. This is likely due to main thread calling result() on a CommitFuture.\r\n",,terminal_output +505,3764175,"TERMINAL",0,0,"Saved checkpoint at step 4000\r\n",,terminal_output +506,3765914,"TERMINAL",0,0,"WARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519095/003500 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519095/003500) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519095/004000 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519095/004000) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519095/003250 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519095/003250) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519095/003750 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519095/003750) to end with "".orbax-checkpoint-tmp"".\r\n",,terminal_output +507,3793922,"TERMINAL",0,0,"salloc: Job 3519095 has exceeded its time limit and its allocation has been revoked.\n",,terminal_output +508,3794026,"TERMINAL",0,0,"srun: Job step aborted: Waiting up to 32 seconds for job step to finish.\r\nsrun: Job step aborted: Waiting up to 32 seconds for job step to finish.\r\nslurmstepd: error: *** STEP 3519095.6 ON hkn0402 CANCELLED AT 2025-09-24T13:46:44 DUE TO TIME LIMIT ***\r\nslurmstepd: error: *** STEP 3519095.interactive ON hkn0402 CANCELLED AT 2025-09-24T13:46:44 DUE TO TIME LIMIT ***\r\nsrun: forcing job termination\r\nTerminated\r\n]0;tum_cte0515@hkn0402:~/Projects/jasmine[?2004h(venv_3_11) [tum_cte0515@hkn0402 jasmine]$ srun: got SIGCONT\r\nsrun: forcing job termination\r\n",,terminal_output +509,3794327,"TERMINAL",0,0,"W0924 13:46:44.328219 2414124 preemption_notifier.cc:89] SIGTERM caught at 2025-09-24T13:46:44.328219402+02:00\r\n",,terminal_output +510,3796087,"TERMINAL",0,0,"WARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\n",,terminal_output +511,3796173,"TERMINAL",0,0,"WARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\n",,terminal_output +512,3796670,"TERMINAL",0,0,"WARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nERROR:absl:Processing Failed. Shutting down.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\n",,terminal_output +513,3797400,"TERMINAL",0,0,"WARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\n",,terminal_output +514,3797519,"TERMINAL",0,0,"WARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nTraceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_dynamics.py"", line 791, in \r\n main(args)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_dynamics.py"", line 629, in main\r\n for batch in dataloader_train:\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_dynamics.py"", line 586, in \r\n dataloader_train = (\r\n ^\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/venv_3_11/lib64/python3.11/site-packages/grain/_src/python/data_loader.py"", line 497, in __next__\r\n result_record = next(self._iterator)\r\n ^^^^^^^^^^^^^^^^^^^^\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/venv_3_11/lib64/python3.11/site-packages/grain/_src/python/data_loader.py"", line 393, in _iterator_with_context\r\n yield from it\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/venv_3_11/lib64/python3.11/site-packages/grain/_src/python/grain_pool.py"", line 812, in __next__\r\n raise element\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/venv_3_11/lib64/python3.11/site-packages/grain/_src/python/grain_pool.py"", line 636, in _process_elements_in_grain_pool\r\n for element in g_pool:\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/venv_3_11/lib64/python3.11/site-packages/grain/_src/python/grain_pool.py"", line 521, in __next__\r\n raise RuntimeError(\r\nRuntimeError: Grain worker process 3 was terminated unexpectedly with exit code -15. Search the logs above for the source of the crash.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\n",,terminal_output +515,3797584,"TERMINAL",0,0,"WARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\n",,terminal_output +516,3797711,"TERMINAL",0,0,"WARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\n",,terminal_output +517,3797872,"TERMINAL",0,0,"WARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\n",,terminal_output +518,3798033,"TERMINAL",0,0,"WARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\n",,terminal_output +519,3798095,"TERMINAL",0,0,"WARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\n",,terminal_output +520,3798160,"TERMINAL",0,0,"WARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\n",,terminal_output +521,3798223,"TERMINAL",0,0,"WARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\n",,terminal_output +522,3798280,"TERMINAL",0,0,"WARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\n",,terminal_output +523,3798338,"TERMINAL",0,0,"WARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\n",,terminal_output +524,3798449,"TERMINAL",0,0,"WARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\n",,terminal_output +525,3799201,"TERMINAL",0,0,"WARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\n",,terminal_output +526,3799339,"TERMINAL",0,0,"WARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\n",,terminal_output +527,3799786,"TERMINAL",0,0,"WARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\n",,terminal_output +528,3799951,"TERMINAL",0,0,"WARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\n",,terminal_output +529,3800021,"TERMINAL",0,0,"WARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\n",,terminal_output +530,3800086,"TERMINAL",0,0,"WARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\n",,terminal_output +531,3800145,"TERMINAL",0,0,"WARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\n",,terminal_output +532,3800497,"TERMINAL",0,0,"WARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\n",,terminal_output +533,3800558,"TERMINAL",0,0,"WARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\n",,terminal_output +534,3800697,"TERMINAL",0,0,"WARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\n",,terminal_output +535,3800767,"TERMINAL",0,0,"WARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\n",,terminal_output +536,3800840,"TERMINAL",0,0,"WARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\n",,terminal_output +537,3801019,"TERMINAL",0,0,"WARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\n",,terminal_output +538,3801087,"TERMINAL",0,0,"WARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\n",,terminal_output +539,3801159,"TERMINAL",0,0,"WARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\n",,terminal_output +540,3801272,"TERMINAL",0,0,"WARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\n",,terminal_output +541,3801348,"TERMINAL",0,0,"WARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\n",,terminal_output +542,3801524,"TERMINAL",0,0,"WARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\n",,terminal_output +543,3801642,"TERMINAL",0,0,"WARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\n",,terminal_output +544,3801705,"TERMINAL",0,0,"WARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\n",,terminal_output +545,3801839,"TERMINAL",0,0,"WARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\n",,terminal_output +546,3801906,"TERMINAL",0,0,"WARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\n",,terminal_output +547,3801964,"TERMINAL",0,0,"WARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\n",,terminal_output +548,3802028,"TERMINAL",0,0,"WARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\n",,terminal_output +549,3802094,"TERMINAL",0,0,"WARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\n",,terminal_output +550,3802158,"TERMINAL",0,0,"WARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\n",,terminal_output +551,3802281,"TERMINAL",0,0,"WARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\n",,terminal_output +552,3802386,"TERMINAL",0,0,"WARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\n",,terminal_output +553,3802444,"TERMINAL",0,0,"WARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\n",,terminal_output +554,3802672,"TERMINAL",0,0,"WARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\n",,terminal_output +555,3802724,"TERMINAL",0,0,"WARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\n",,terminal_output +556,3802853,"TERMINAL",0,0,"WARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\n",,terminal_output +557,3802969,"TERMINAL",0,0,"WARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\n",,terminal_output +558,3803088,"TERMINAL",0,0,"WARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\n",,terminal_output +559,3803211,"TERMINAL",0,0,"WARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\n",,terminal_output +560,3803424,"TERMINAL",0,0,"WARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\n",,terminal_output +561,3803708,"TERMINAL",0,0,"WARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\n",,terminal_output +562,3803754,"TERMINAL",0,0,"WARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\n",,terminal_output +563,3803892,"TERMINAL",0,0,"WARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\n",,terminal_output +564,3804149,"TERMINAL",0,0,"WARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\n",,terminal_output +565,3804258,"TERMINAL",0,0,"WARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\nW0924 13:46:54.316249 2414161 pjrt_client.cc:1469] WatchJobStateAsync failed for task goo.gle/debugproto job_name: ""jax_worker"": UNAVAILABLE: Cancelling all calls\r\nAdditional GRPC error information from remote target coordination_service while calling /tensorflow.CoordinationService/WatchJobState:\r\n:UNKNOWN:Error received from peer {grpc_status:14, grpc_message:""Cancelling all calls""}\r\nWARNING:asyncio:socket.send() raised exception.\r\nWARNING:asyncio:socket.send() raised exception.\r\n",,terminal_output +566,3804725,"TERMINAL",0,0,"/usr/lib64/python3.11/multiprocessing/resource_tracker.py:254: UserWarning: resource_tracker: There appear to be 66 leaked shared_memory objects to clean up at shutdown\r\n warnings.warn('resource_tracker: There appear to be %d '\r\n",,terminal_output +567,3805131,"TERMINAL",0,0,"srun: error: hkn0402: task 0: Exited with exit code 1\r\n",,terminal_output +568,3823659,"TERMINAL",0,0,"srun: error: hkn0402: task 0: Killed\r\n]0;tum_cte0515@hkn1991:~/Projects/jasmine",,terminal_output +569,4425955,"TERMINAL",0,0,"bash",,terminal_focus +570,4429939,"TERMINAL",0,0,"queue",,terminal_command +571,4430004,"TERMINAL",0,0,"]633;C[?1049h(B[?7hEvery 1.0s: squeue --mehkn1991.localdomain: Wed Sep 24 13:57:20 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)",,terminal_output +572,4430812,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn1991:~/Projects/jasmine",,terminal_output +573,4451980,"TERMINAL",0,0,"salloc --time=01:00:00 --partition=dev_accelerated --nodes=1 --gres=gpu:1 --cpus-per-task=8",,terminal_command +574,4452049,"TERMINAL",0,0,"]633;Csalloc: Pending job allocation 3519258\r\nsalloc: job 3519258 queued and waiting for resources\r\n",,terminal_output +575,4454404,"TERMINAL",0,0,"bash",,terminal_focus +576,4455889,"TERMINAL",0,0,"idling",,terminal_command +577,4455957,"TERMINAL",0,0,"]633;C[?1049h(B[?7hEvery 1.0s: sinfo_t_idlehkn1991.localdomain: Wed Sep 24 13:57:45 2025Partition dev_cpuonly: 11 nodes idle\rPartition cpuonly: 223 nodes idle\rPartition dev_accelerated:\t 0 nodes idle\rPartition accelerated:\t 2 nodes idle\rPartition dev_accelerated-h100 :\t 0 nodes idle\rPartition accelerated-h100:\t 1 nodes idle\rPartition large:\t 3 nodes idle\rPartition accelerated-h200:\t 2 nodes idle",,terminal_output +578,4456982,"TERMINAL",0,0,"7",,terminal_output +579,4457463,"TERMINAL",0,0,"salloc",,terminal_focus +580,4458010,"TERMINAL",0,0,"8",,terminal_output +581,4458741,"TERMINAL",0,0,"^",,terminal_output +582,4459089,"TERMINAL",0,0,"9",,terminal_output +583,4459579,"TERMINAL",0,0,"^C",,terminal_output +584,4460082,"TERMINAL",0,0,"50",,terminal_output +585,4460145,"TERMINAL",0,0,"^Csalloc: Job allocation 3519258 has been revoked.\r\nsalloc: Job aborted due to signal\r\n]0;tum_cte0515@hkn1991:~/Projects/jasmine",,terminal_output +586,4461145,"TERMINAL",0,0,"1",,terminal_output +587,4462149,"TERMINAL",0,0,"2",,terminal_output +588,4463190,"TERMINAL",0,0,"3",,terminal_output +589,4464229,"TERMINAL",0,0,"4",,terminal_output +590,4465288,"TERMINAL",0,0,"5",,terminal_output +591,4465753,"TERMINAL",0,0,"salloc --time=01:00:00 --partition=dev_accelerated-h100 --nodes=1 --gres=gpu:1 --cpus-per-task=8",,terminal_command +592,4465840,"TERMINAL",0,0,"]633;Csalloc: Pending job allocation 3519259\r\nsalloc: job 3519259 queued and waiting for resources\r\n",,terminal_output +593,4466301,"TERMINAL",0,0,"6",,terminal_output +594,4467338,"TERMINAL",0,0,"7",,terminal_output +595,4468393,"TERMINAL",0,0,"8",,terminal_output +596,4469514,"TERMINAL",0,0,"9",,terminal_output +597,4470459,"TERMINAL",0,0,"8:00",,terminal_output +598,4471577,"TERMINAL",0,0,"1",,terminal_output +599,4471670,"TERMINAL",0,0,"^Csalloc: Job allocation 3519259 has been revoked.\r\nsalloc: Job aborted due to signal\r\n]0;tum_cte0515@hkn1991:~/Projects/jasmine",,terminal_output +600,4472524,"TERMINAL",0,0,"2",,terminal_output +601,4473607,"TERMINAL",0,0,"3",,terminal_output +602,4474618,"TERMINAL",0,0,"4",,terminal_output +603,4475625,"TERMINAL",0,0,"5",,terminal_output +604,4476752,"TERMINAL",0,0,"6",,terminal_output +605,4477496,"TERMINAL",0,0,"salloc --time=01:00:00 --partition=accelerated --nodes=1 --gres=gpu:1 --cpus-per-task=8",,terminal_command +606,4477611,"TERMINAL",0,0,"]633;Csalloc: Pending job allocation 3519260\r\nsalloc: job 3519260 queued and waiting for resources\r\n",,terminal_output +607,4477716,"TERMINAL",0,0,"7",,terminal_output +608,4478835,"TERMINAL",0,0,"8",,terminal_output +609,4479861,"TERMINAL",0,0,"9",,terminal_output +610,4480837,"TERMINAL",0,0,"10",,terminal_output +611,4481905,"TERMINAL",0,0,"1",,terminal_output +612,4482963,"TERMINAL",0,0,"2",,terminal_output +613,4483850,"TERMINAL",0,0,"^Csalloc: Job allocation 3519260 has been revoked.\r\nsalloc: Job aborted due to signal\r\n]0;tum_cte0515@hkn1991:~/Projects/jasmine",,terminal_output +614,4483948,"TERMINAL",0,0,"4",,terminal_output +615,4484990,"TERMINAL",0,0,"58",,terminal_output +616,4486028,"TERMINAL",0,0,"6",,terminal_output +617,4487076,"TERMINAL",0,0,"7",,terminal_output +618,4488165,"TERMINAL",0,0,"8",,terminal_output +619,4489172,"TERMINAL",0,0,"salloc --time=01:00:00 --partition=accelerated-h100 --nodes=1 --gres=gpu:1 --cpus-per-task=8",,terminal_command +620,4489227,"TERMINAL",0,0,"9",,terminal_output +621,4489243,"TERMINAL",0,0,"]633;Csalloc: Pending job allocation 3519262\r\nsalloc: job 3519262 queued and waiting for resources\r\n",,terminal_output +622,4490181,"TERMINAL",0,0,"20",,terminal_output +623,4491230,"TERMINAL",0,0,"1",,terminal_output +624,4492264,"TERMINAL",0,0,"2",,terminal_output +625,4493216,"TERMINAL",0,0,"^Csalloc: Job allocation 3519262 has been revoked.\r\nsalloc: Job aborted due to signal\r\n]0;tum_cte0515@hkn1991:~/Projects/jasmine",,terminal_output +626,4493352,"TERMINAL",0,0,"3",,terminal_output +627,4494383,"TERMINAL",0,0,"4",,terminal_output +628,4495396,"TERMINAL",0,0,"salloc --time=01:00:00 --partition=dev_accelerated --nodes=1 --gres=gpu:1 --cpus-per-task=8",,terminal_command +629,4495493,"TERMINAL",0,0,"5",,terminal_output +630,4495493,"TERMINAL",0,0,"]633;Csalloc: Pending job allocation 3519264\r\nsalloc: job 3519264 queued and waiting for resources\r\n",,terminal_output +631,4496408,"TERMINAL",0,0,"6",,terminal_output +632,4497468,"TERMINAL",0,0,"7",,terminal_output +633,4498482,"TERMINAL",0,0,"8",,terminal_output +634,4499515,"TERMINAL",0,0,"9",,terminal_output +635,4500550,"TERMINAL",0,0,"30",,terminal_output +636,4501643,"TERMINAL",0,0,"1",,terminal_output +637,4502690,"TERMINAL",0,0,"2",,terminal_output +638,4503689,"TERMINAL",0,0,"3",,terminal_output +639,4504737,"TERMINAL",0,0,"4",,terminal_output +640,4505730,"TERMINAL",0,0,"5",,terminal_output +641,4506786,"TERMINAL",0,0,"6",,terminal_output +642,4507813,"TERMINAL",0,0,"7",,terminal_output +643,4508937,"TERMINAL",0,0,"8",,terminal_output +644,4509890,"TERMINAL",0,0,"9",,terminal_output +645,4510988,"TERMINAL",0,0,"40",,terminal_output +646,4512007,"TERMINAL",0,0,"2",,terminal_output +647,4512998,"TERMINAL",0,0,"3",,terminal_output +648,4514039,"TERMINAL",0,0,"4",,terminal_output +649,4515080,"TERMINAL",0,0,"5",,terminal_output +650,4516104,"TERMINAL",0,0,"6",,terminal_output +651,4517256,"TERMINAL",0,0,"7",,terminal_output +652,4518265,"TERMINAL",0,0,"8",,terminal_output +653,4519225,"TERMINAL",0,0,"9",,terminal_output +654,4520251,"TERMINAL",0,0,"50",,terminal_output +655,4521290,"TERMINAL",0,0,"1",,terminal_output +656,4522354,"TERMINAL",0,0,"2",,terminal_output +657,4523363,"TERMINAL",0,0,"3",,terminal_output +658,4524399,"TERMINAL",0,0,"4",,terminal_output +659,4525463,"TERMINAL",0,0,"5",,terminal_output +660,4526471,"TERMINAL",0,0,"6",,terminal_output +661,4527575,"TERMINAL",0,0,"7",,terminal_output +662,4528545,"TERMINAL",0,0,"8",,terminal_output +663,4529622,"TERMINAL",0,0,"9",,terminal_output +664,4530693,"TERMINAL",0,0,"9:00",,terminal_output +665,4531671,"TERMINAL",0,0,"1",,terminal_output +666,4532798,"TERMINAL",0,0,"2",,terminal_output +667,4533819,"TERMINAL",0,0,"3",,terminal_output +668,4534843,"TERMINAL",0,0,"4",,terminal_output +669,4535803,"TERMINAL",0,0,"5",,terminal_output +670,4536895,"TERMINAL",0,0,"6",,terminal_output +671,4537933,"TERMINAL",0,0,"7",,terminal_output +672,4538930,"TERMINAL",0,0,"salloc: job 3519264 has been allocated resources\r\nsalloc: Granted job allocation 3519264\r\n",,terminal_output +673,4538957,"TERMINAL",0,0,"8 9",,terminal_output +674,4539071,"TERMINAL",0,0,"salloc: Waiting for resource configuration\r\n",,terminal_output +675,4539976,"TERMINAL",0,0,"10",,terminal_output +676,4541093,"TERMINAL",0,0,"1",,terminal_output +677,4542116,"TERMINAL",0,0,"2",,terminal_output +678,4542986,"TERMINAL",0,0,"s",,terminal_output +679,4543095,"TERMINAL",0,0,"33",,terminal_output +680,4543185,"TERMINAL",0,0,"uo",,terminal_output +681,4543960,"TERMINAL",0,0,"´",,terminal_output +682,4544120,"TERMINAL",0,0,"4",,terminal_output +683,4544299,"TERMINAL",0,0," ",,terminal_output +684,4544436,"TERMINAL",0,0," ",,terminal_output +685,4544553,"TERMINAL",0,0," ",,terminal_output +686,4544860,"TERMINAL",0,0,"o",,terminal_output +687,4545046,"TERMINAL",0,0,"u",,terminal_output +688,4545108,"TERMINAL",0,0,"r",,terminal_output +689,4545175,"TERMINAL",0,0,"5",,terminal_output +690,4545204,"TERMINAL",0,0,"c",,terminal_output +691,4545402,"TERMINAL",0,0,"e",,terminal_output +692,4545464,"TERMINAL",0,0," ",,terminal_output +693,4545586,"TERMINAL",0,0,".",,terminal_output +694,4545730,"TERMINAL",0,0,"v",,terminal_output +695,4545801,"TERMINAL",0,0,"\t",,terminal_output +696,4546100,"TERMINAL",0,0,"b",,terminal_output +697,4546207,"TERMINAL",0,0,"6",,terminal_output +698,4546887,"TERMINAL",0,0,"e",,terminal_output +699,4547348,"TERMINAL",0,0,"7",,terminal_output +700,4547582,"TERMINAL",0,0," ",,terminal_output +701,4547818,"TERMINAL",0,0,"i",,terminal_output +702,4547881,"TERMINAL",0,0,"\t",,terminal_output +703,4548260,"TERMINAL",0,0,"8",,terminal_output +704,4548951,"TERMINAL",0,0,"a",,terminal_output +705,4549062,"TERMINAL",0,0,"c",,terminal_output +706,4549299,"TERMINAL",0,0,"9",,terminal_output +707,4549719,"TERMINAL",0,0,"\t",,terminal_output +708,4550335,"TERMINAL",0,0,"20",,terminal_output +709,4551433,"TERMINAL",0,0,"1",,terminal_output +710,4552409,"TERMINAL",0,0,"2",,terminal_output +711,4553483,"TERMINAL",0,0,"3",,terminal_output +712,4554505,"TERMINAL",0,0,"4",,terminal_output +713,4555519,"TERMINAL",0,0,"5",,terminal_output +714,4556593,"TERMINAL",0,0,"6",,terminal_output +715,4557680,"TERMINAL",0,0,"7",,terminal_output +716,4558703,"TERMINAL",0,0,"8",,terminal_output +717,4559728,"TERMINAL",0,0,"9",,terminal_output +718,4560708,"TERMINAL",0,0,"30",,terminal_output +719,4561776,"TERMINAL",0,0,"1",,terminal_output +720,4562798,"TERMINAL",0,0,"2",,terminal_output +721,4563927,"TERMINAL",0,0,"3",,terminal_output +722,4564951,"TERMINAL",0,0,"4",,terminal_output +723,4565974,"TERMINAL",0,0,"5",,terminal_output +724,4566099,"TERMINAL",0,0,"salloc: Nodes hkn0401 are ready for job\r\n",,terminal_output +725,4566275,"TERMINAL",0,0,"source .v\tbi\tac\t",,terminal_output +726,4567037,"TERMINAL",0,0,"]0;tum_cte0515@hkn0401:~/Projects/jasmine[?2004h[tum_cte0515@hkn0401 jasmine]$ source .venv/bin/activate",,terminal_output +727,4567038,"TERMINAL",0,0,"7",,terminal_output +728,4568049,"TERMINAL",0,0,"8",,terminal_output +729,4568615,"TERMINAL",0,0,"\r\n[?2004l\r]0;tum_cte0515@hkn0401:~/Projects/jasmine[?2004h(jasmine) [tum_cte0515@hkn0401 jasmine]$ ",,terminal_output +730,4569047,"TERMINAL",0,0,"9",,terminal_output +731,4570068,"TERMINAL",0,0,"40",,terminal_output +732,4571122,"TERMINAL",0,0,"111",,terminal_output +733,4572231,"TERMINAL",0,0,"2",,terminal_output +734,4573077,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu.sh",0,0,"",shellscript,tab +735,4573280,"TERMINAL",0,0,"3",,terminal_output +736,4574292,"TERMINAL",0,0,"4",,terminal_output +737,4575256,"TERMINAL",0,0,"5",,terminal_output +738,4576269,"TERMINAL",0,0,"6",,terminal_output +739,4576968,"TERMINAL",0,0,"sh",,terminal_output +740,4577098,"TERMINAL",0,0," ",,terminal_output +741,4577301,"TERMINAL",0,0,"7",,terminal_output +742,4577429,"TERMINAL",0,0,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu.sh",,terminal_output +743,4577812,"TERMINAL",0,0,"\rslurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu.sh\r\n[?2004l\r#!/usr/bin/env bash\r\n\r\n#SBATCH --nodes=1\r\n#SBATCH --ntasks-per-node=1\r\n#SBATCH --time=48:00:00\r\n#SBATCH --partition=accelerated\r\n#SBATCH --cpus-per-task=5\r\n#SBATCH --gres=gpu:1\r\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/breakout/dyn/%x_%j.log\r\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/breakout/dyn/%x_%j.log\r\n#SBATCH --job-name=train_dyn_default_breakout_longer\r\n#SBATCH --requeue\r\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\r\n\r\n# Log the sbatch script\r\ncat $0\r\n\r\nmodule unload mpi/openmpi/5.0\r\nmodule unload devel/cuda/12.4\r\n# source .venv/bin/activate\r\n\r\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_breakout/breakout_episodes_perfect/train\r\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_breakout/breakout_episodes_perfect/val\r\n\r\njob_name=$SLURM_JOB_NAME\r\nslurm_job_id=$SLURM_JOB_ID\r\n\r\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/$job_name/$slurm_job_id\r\nmkdir -p $CHECKPOINT_DIR\r\n\r\n# lam_checkpoint=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/lam/interactive/3512576\r\n# tokenizer_checkpoint=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/tokenizer/interactive/3512502\r\n\r\nlam_checkpoint=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/lam/interactive/3518963\r\ntokenizer_checkpoint=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/tokenizer/interactive/3518963\r\n\r\nenv | grep SLURM\r\n\r\nexport PYTHONUNBUFFERED=1\r\n\r\nsrun python train_dynamics.py \\r\n --save_ckpt \\r\n --image_height=10 \\r\n --image_width=10 \\r\n --ckpt_dir $CHECKPOINT_DIR \\r\n --batch_size=120 \\r\n --init_lr=0 \\r\n --max_lr=3e-5 \\r\n --log_image_interval=250 \\r\n --log_checkpoint_interval=250 \\r\n --dyna_type=maskgit \\r\n --log \\r\n --name=breakout-dyn-default-$slurm_job_id \\r\n --tags dyn breakout default \\r\n --entity instant-uv \\r\n --project jafar \\r\n --patch_size 4 \\r\n --lam_patch_size 4 \\r\n --warmup_steps 100 \\r\n --wsd_decay_steps 1000 \\r\n --num_steps 5000 \\r\n --data_dir $array_records_dir_train \\r\n --val_data_dir $array_records_dir_val \\r\n --tokenizer_checkpoint $tokenizer_checkpoint \\r\n --lam_checkpoint $lam_checkpoint \\r\n --val_interval 750 \\r\n --eval_full_frame \\r\n",,terminal_output +744,4577939,"TERMINAL",0,0,"SLURM_STEP_NUM_TASKS=1\r\nSLURM_JOB_USER=tum_cte0515\r\nSLURM_TASKS_PER_NODE=1\r\nSLURM_JOB_UID=999226\r\nSLURM_TASK_PID=3429943\r\nSLURM_JOB_GPUS=0\r\nSLURM_LOCALID=0\r\nSLURM_SUBMIT_DIR=/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine\r\nSLURMD_NODENAME=hkn0401\r\nSLURM_JOB_START_TIME=1758715149\r\nSLURM_STEP_NODELIST=hkn0401\r\nSLURM_CLUSTER_NAME=hk\r\nSLURM_JOB_END_TIME=1758718749\r\nSLURM_PMI2_SRUN_PORT=36025\r\nSLURM_CPUS_ON_NODE=8\r\nSLURM_JOB_CPUS_PER_NODE=8\r\nSLURM_GPUS_ON_NODE=1\r\nSLURM_GTIDS=0\r\nSLURM_JOB_PARTITION=dev_accelerated\r\nSLURM_TRES_PER_TASK=cpu=8\r\nSLURM_OOM_KILL_STEP=0\r\nSLURM_JOB_NUM_NODES=1\r\nSLURM_STEPID=4294967290\r\nSLURM_JOBID=3519264\r\nSLURM_PTY_PORT=38047\r\nSLURM_JOB_QOS=normal\r\nSLURM_LAUNCH_NODE_IPADDR=10.0.7.199\r\nSLURM_PTY_WIN_ROW=52\r\nSLURM_PMI2_PROC_MAPPING=(vector,(0,1,1))\r\nSLURMD_DEBUG=2\r\nSLURM_PROCID=0\r\nSLURM_CPUS_PER_TASK=8\r\nSLURM_TOPOLOGY_ADDR=hkibb.hkibbi1.hkibbi1e11.hkn0401\r\nSLURM_TOPOLOGY_ADDR_PATTERN=switch.switch.switch.node\r\nSLURM_SRUN_COMM_HOST=10.0.7.199\r\nSLURM_SCRIPT_CONTEXT=prolog_task\r\nSLURM_PTY_WIN_COL=131\r\nSLURM_NODELIST=hkn0401\r\nSLURM_SRUN_COMM_PORT=36657\r\nSLURM_STEP_ID=4294967290\r\nSLURM_JOB_ACCOUNT=hk-project-p0023960\r\nSLURM_PRIO_PROCESS=0\r\nSLURM_NNODES=1\r\nSLURM_SUBMIT_HOST=hkn1991.localdomain\r\nSLURM_JOB_ID=3519264\r\nSLURM_NODEID=0\r\nSLURM_STEP_NUM_NODES=1\r\nSLURM_STEP_TASKS_PER_NODE=1\r\nSLURM_MPI_TYPE=pmi2\r\nSLURM_PMI2_STEP_NODES=hkn0401\r\nSLURM_CONF=/etc/slurm/slurm.conf\r\nSLURM_JOB_NAME=interactive\r\nSLURM_STEP_LAUNCHER_PORT=36657\r\nSLURM_JOB_GID=502226\r\nSLURM_JOB_NODELIST=hkn0401\r\n",,terminal_output +745,4578074,"TERMINAL",0,0,"GpuFreq=control_disabled\r\n",,terminal_output +746,4578340,"TERMINAL",0,0,"8",,terminal_output +747,4579375,"TERMINAL",0,0,"9",,terminal_output +748,4580459,"TERMINAL",0,0,"50",,terminal_output +749,4581452,"TERMINAL",0,0,"1",,terminal_output +750,4582488,"TERMINAL",0,0,"2",,terminal_output +751,4583745,"TERMINAL",0,0,"3",,terminal_output +752,4584609,"TERMINAL",0,0,"4",,terminal_output +753,4585598,"TERMINAL",0,0,"5",,terminal_output +754,4586643,"TERMINAL",0,0,"6",,terminal_output +755,4587682,"TERMINAL",0,0,"7",,terminal_output +756,4588768,"TERMINAL",0,0,"8",,terminal_output +757,4589841,"TERMINAL",0,0,"9",,terminal_output +758,4590392,"TERMINAL",0,0,"Traceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/train_dynamics.py"", line 24, in \r\n from genie import Genie, restore_genie_components\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/genie.py"", line 568, in \r\n optimizer: nnx.ModelAndOptimizer,\r\nAttributeError: module 'flax.nnx' has no attribute 'ModelAndOptimizer'\r\n",,terminal_output +759,4590672,"TERMINAL",0,0,"srun: error: hkn0401: task 0: Exited with exit code 1\r\n]0;tum_cte0515@hkn0401:~/Projects/jasmine[?2004h(jasmine) [tum_cte0515@hkn0401 jasmine]$ ",,terminal_output +760,4590791,"TERMINAL",0,0,"4:00:00",,terminal_output +761,4591910,"TERMINAL",0,0,"1",,terminal_output +762,4592934,"TERMINAL",0,0,"2",,terminal_output +763,4593931,"TERMINAL",0,0,"3",,terminal_output +764,4594926,"TERMINAL",0,0,"4",,terminal_output +765,4595992,"TERMINAL",0,0,"6",,terminal_output +766,4597125,"TERMINAL",0,0,"7",,terminal_output +767,4598135,"TERMINAL",0,0,"8",,terminal_output +768,4599152,"TERMINAL",0,0,"9",,terminal_output +769,4600121,"TERMINAL",0,0,"10",,terminal_output +770,4601157,"TERMINAL",0,0,"1",,terminal_output +771,4602207,"TERMINAL",0,0,"2",,terminal_output +772,4603254,"TERMINAL",0,0,"3",,terminal_output +773,4604278,"TERMINAL",0,0,"4",,terminal_output +774,4605317,"TERMINAL",0,0,"5",,terminal_output +775,4606426,"TERMINAL",0,0,"6",,terminal_output +776,4607390,"TERMINAL",0,0,"78",,terminal_output +777,4608470,"TERMINAL",0,0,"8",,terminal_output +778,4609501,"TERMINAL",0,0,"9",,terminal_output +779,4610515,"TERMINAL",0,0,"20",,terminal_output +780,4611645,"TERMINAL",0,0,"1",,terminal_output +781,4612670,"TERMINAL",0,0,"2",,terminal_output +782,4613744,"TERMINAL",0,0,"3",,terminal_output +783,4614716,"TERMINAL",0,0,"4",,terminal_output +784,4615688,"TERMINAL",0,0,"5",,terminal_output +785,4616724,"TERMINAL",0,0,"6",,terminal_output +786,4617776,"TERMINAL",0,0,"7",,terminal_output +787,4618802,"TERMINAL",0,0,"8",,terminal_output +788,4619942,"TERMINAL",0,0,"9",,terminal_output +789,4620973,"TERMINAL",0,0,"30",,terminal_output +790,4621991,"TERMINAL",0,0,"1",,terminal_output +791,4623012,"TERMINAL",0,0,"3",,terminal_output +792,4624034,"TERMINAL",0,0,"4",,terminal_output +793,4625071,"TERMINAL",0,0,"5",,terminal_output +794,4626197,"TERMINAL",0,0,"6",,terminal_output +795,4627424,"TERMINAL",0,0,"7",,terminal_output +796,4628253,"TERMINAL",0,0,"8",,terminal_output +797,4629259,"TERMINAL",0,0,"9",,terminal_output +798,4630284,"TERMINAL",0,0,"40",,terminal_output +799,4631250,"TERMINAL",0,0,"1",,terminal_output +800,4632289,"TERMINAL",0,0,"2",,terminal_output +801,4633324,"TERMINAL",0,0,"3",,terminal_output +802,4634360,"TERMINAL",0,0,"4",,terminal_output +803,4635458,"TERMINAL",0,0,"5",,terminal_output +804,4636527,"TERMINAL",0,0,"6",,terminal_output +805,4637470,"TERMINAL",0,0,"7",,terminal_output +806,4638576,"TERMINAL",0,0,"8",,terminal_output +807,4639545,"TERMINAL",0,0,"9",,terminal_output +808,4640585,"TERMINAL",0,0,"50",,terminal_output +809,4641648,"TERMINAL",0,0,"1",,terminal_output +810,4642673,"TERMINAL",0,0,"2",,terminal_output +811,4643797,"TERMINAL",0,0,"3",,terminal_output +812,4644823,"TERMINAL",0,0,"4",,terminal_output +813,4645823,"TERMINAL",0,0,"5",,terminal_output +814,4646875,"TERMINAL",0,0,"6",,terminal_output +815,4647843,"TERMINAL",0,0,"7",,terminal_output +816,4648918,"TERMINAL",0,0,"8",,terminal_output +817,4649912,"TERMINAL",0,0,"9",,terminal_output +818,4650967,"TERMINAL",0,0,"1:01",,terminal_output +819,4651986,"TERMINAL",0,0,"2",,terminal_output +820,4653038,"TERMINAL",0,0,"3",,terminal_output +821,4654142,"TERMINAL",0,0,"4",,terminal_output +822,4655164,"TERMINAL",0,0,"5",,terminal_output +823,4655243,"TERMINAL",0,0,"sh slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu.sh",,terminal_output +824,4655495,"TERMINAL",0,0,"\r",,terminal_output +825,4656130,"TERMINAL",0,0,"6",,terminal_output +826,4656843,"TERMINAL",0,0,"c",,terminal_output +827,4656906,"TERMINAL",0,0,"o",,terminal_output +828,4657044,"TERMINAL",0,0,"n",,terminal_output +829,4657160,"TERMINAL",0,0,"d",,terminal_output +830,4657227,"TERMINAL",0,0,"7",,terminal_output +831,4657370,"TERMINAL",0,0,"a",,terminal_output +832,4657412,"TERMINAL",0,0," ",,terminal_output +833,4657678,"TERMINAL",0,0,"d",,terminal_output +834,4657731,"TERMINAL",0,0,"e",,terminal_output +835,4657984,"TERMINAL",0,0,"a",,terminal_output +836,4658037,"TERMINAL",0,0,"c",,terminal_output +837,4658211,"TERMINAL",0,0,"89",,terminal_output +838,4658712,"TERMINAL",0,0,"",,terminal_output +839,4658822,"TERMINAL",0,0,"",,terminal_output +840,4659242,"TERMINAL",0,0,"",,terminal_output +841,4659245,"TERMINAL",0,0,"9",,terminal_output +842,4659435,"TERMINAL",0,0,"",,terminal_output +843,4659660,"TERMINAL",0,0,"",,terminal_output +844,4660225,"TERMINAL",0,0,"d",,terminal_output +845,4660290,"TERMINAL",0,0,"10",,terminal_output +846,4660333,"TERMINAL",0,0,"e",,terminal_output +847,4660396,"TERMINAL",0,0,"a",,terminal_output +848,4660572,"TERMINAL",0,0,"c",,terminal_output +849,4660634,"TERMINAL",0,0,"t",,terminal_output +850,4660818,"TERMINAL",0,0,"i",,terminal_output +851,4661028,"TERMINAL",0,0,"v",,terminal_output +852,4661076,"TERMINAL",0,0,"a",,terminal_output +853,4661154,"TERMINAL",0,0,"t",,terminal_output +854,4661271,"TERMINAL",0,0,"e",,terminal_output +855,4661378,"TERMINAL",0,0,"1",,terminal_output +856,4661475,"TERMINAL",0,0,"\r\n[?2004l\r]0;tum_cte0515@hkn0401:~/Projects/jasmine[?2004h[tum_cte0515@hkn0401 jasmine]$ ",,terminal_output +857,4661651,"TERMINAL",0,0,"s",,terminal_output +858,4661714,"TERMINAL",0,0,"o",,terminal_output +859,4661827,"TERMINAL",0,0,"u",,terminal_output +860,4661891,"TERMINAL",0,0,"r",,terminal_output +861,4662080,"TERMINAL",0,0,"c",,terminal_output +862,4662201,"TERMINAL",0,0,"e",,terminal_output +863,4662264,"TERMINAL",0,0," ",,terminal_output +864,4662395,"TERMINAL",0,0,"v",,terminal_output +865,4662395,"TERMINAL",0,0,"2",,terminal_output +866,4662457,"TERMINAL",0,0,"env_3_11/",,terminal_output +867,4662998,"TERMINAL",0,0,"b",,terminal_output +868,4663060,"TERMINAL",0,0,"in/",,terminal_output +869,4663320,"TERMINAL",0,0,"a",,terminal_output +870,4663382,"TERMINAL",0,0,"c",,terminal_output +871,4663397,"TERMINAL",0,0,"33",,terminal_output +872,4663545,"TERMINAL",0,0,"tivate",,terminal_output +873,4663905,"TERMINAL",0,0,"\r\n[?2004l\r]0;tum_cte0515@hkn0401:~/Projects/jasmine[?2004h(venv_3_11) [tum_cte0515@hkn0401 jasmine]$ ",,terminal_output +874,4664225,"TERMINAL",0,0,"source venv_3_11/bin/activate",,terminal_output +875,4664349,"TERMINAL",0,0,"deactivate",,terminal_output +876,4664455,"TERMINAL",0,0,"4",,terminal_output +877,4664913,"TERMINAL",0,0,"sh slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu.sh",,terminal_output +878,4665466,"TERMINAL",0,0,"\r\n[?2004l\r#!/usr/bin/env bash\r\n\r\n#SBATCH --nodes=1\r\n#SBATCH --ntasks-per-node=1\r\n#SBATCH --time=48:00:00\r\n#SBATCH --partition=accelerated\r\n#SBATCH --cpus-per-task=5\r\n#SBATCH --gres=gpu:1\r\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/breakout/dyn/%x_%j.log\r\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/breakout/dyn/%x_%j.log\r\n#SBATCH --job-name=train_dyn_default_breakout_longer\r\n#SBATCH --requeue\r\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\r\n\r\n# Log the sbatch script\r\ncat $0\r\n\r\nmodule unload mpi/openmpi/5.0\r\nmodule unload devel/cuda/12.4\r\n# source .venv/bin/activate\r\n\r\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_breakout/breakout_episodes_perfect/train\r\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_breakout/breakout_episodes_perfect/val\r\n\r\njob_name=$SLURM_JOB_NAME\r\nslurm_job_id=$SLURM_JOB_ID\r\n\r\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/$job_name/$slurm_job_id\r\nmkdir -p $CHECKPOINT_DIR\r\n\r\n# lam_checkpoint=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/lam/interactive/3512576\r\n# tokenizer_checkpoint=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/tokenizer/interactive/3512502\r\n\r\nlam_checkpoint=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/lam/interactive/3518963\r\ntokenizer_checkpoint=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/tokenizer/interactive/3518963\r\n\r\nenv | grep SLURM\r\n\r\nexport PYTHONUNBUFFERED=1\r\n\r\nsrun python train_dynamics.py \\r\n --save_ckpt \\r\n --image_height=10 \\r\n --image_width=10 \\r\n --ckpt_dir $CHECKPOINT_DIR \\r\n --batch_size=120 \\r\n --init_lr=0 \\r\n --max_lr=3e-5 \\r\n --log_image_interval=250 \\r\n --log_checkpoint_interval=250 \\r\n --dyna_type=maskgit \\r\n --log \\r\n --name=breakout-dyn-default-$slurm_job_id \\r\n --tags dyn breakout default \\r\n --entity instant-uv \\r\n --project jafar \\r\n --patch_size 4 \\r\n --lam_patch_size 4 \\r\n --warmup_steps 100 \\r\n --wsd_decay_steps 1000 \\r\n --num_steps 5000 \\r\n --data_dir $array_records_dir_train \\r\n --val_data_dir $array_records_dir_val \\r\n --tokenizer_checkpoint $tokenizer_checkpoint \\r\n --lam_checkpoint $lam_checkpoint \\r\n --val_interval 750 \\r\n --eval_full_frame \\r\n",,terminal_output +879,4665480,"TERMINAL",0,0,"5",,terminal_output +880,4665587,"TERMINAL",0,0,"SLURM_STEP_NUM_TASKS=1\r\nSLURM_JOB_USER=tum_cte0515\r\nSLURM_TASKS_PER_NODE=1\r\nSLURM_JOB_UID=999226\r\nSLURM_TASK_PID=3429943\r\nSLURM_JOB_GPUS=0\r\nSLURM_LOCALID=0\r\nSLURM_SUBMIT_DIR=/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine\r\nSLURMD_NODENAME=hkn0401\r\nSLURM_JOB_START_TIME=1758715149\r\nSLURM_STEP_NODELIST=hkn0401\r\nSLURM_CLUSTER_NAME=hk\r\nSLURM_JOB_END_TIME=1758718749\r\nSLURM_PMI2_SRUN_PORT=36025\r\nSLURM_CPUS_ON_NODE=8\r\nSLURM_JOB_CPUS_PER_NODE=8\r\nSLURM_GPUS_ON_NODE=1\r\nSLURM_GTIDS=0\r\nSLURM_JOB_PARTITION=dev_accelerated\r\nSLURM_TRES_PER_TASK=cpu=8\r\nSLURM_OOM_KILL_STEP=0\r\nSLURM_JOB_NUM_NODES=1\r\nSLURM_STEPID=4294967290\r\nSLURM_JOBID=3519264\r\nSLURM_PTY_PORT=38047\r\nSLURM_JOB_QOS=normal\r\nSLURM_LAUNCH_NODE_IPADDR=10.0.7.199\r\nSLURM_PTY_WIN_ROW=52\r\nSLURM_PMI2_PROC_MAPPING=(vector,(0,1,1))\r\nSLURMD_DEBUG=2\r\nSLURM_PROCID=0\r\nSLURM_CPUS_PER_TASK=8\r\nSLURM_TOPOLOGY_ADDR=hkibb.hkibbi1.hkibbi1e11.hkn0401\r\nSLURM_TOPOLOGY_ADDR_PATTERN=switch.switch.switch.node\r\nSLURM_SRUN_COMM_HOST=10.0.7.199\r\nSLURM_SCRIPT_CONTEXT=prolog_task\r\nSLURM_PTY_WIN_COL=131\r\nSLURM_NODELIST=hkn0401\r\nSLURM_SRUN_COMM_PORT=36657\r\nSLURM_STEP_ID=4294967290\r\nSLURM_JOB_ACCOUNT=hk-project-p0023960\r\nSLURM_PRIO_PROCESS=0\r\nSLURM_NNODES=1\r\nSLURM_SUBMIT_HOST=hkn1991.localdomain\r\nSLURM_JOB_ID=3519264\r\nSLURM_NODEID=0\r\nSLURM_STEP_NUM_NODES=1\r\nSLURM_STEP_TASKS_PER_NODE=1\r\nSLURM_MPI_TYPE=pmi2\r\nSLURM_PMI2_STEP_NODES=hkn0401\r\nSLURM_CONF=/etc/slurm/slurm.conf\r\nSLURM_JOB_NAME=interactive\r\nSLURM_STEP_LAUNCHER_PORT=36657\r\nSLURM_JOB_GID=502226\r\nSLURM_JOB_NODELIST=hkn0401\r\n",,terminal_output +881,4665733,"TERMINAL",0,0,"GpuFreq=control_disabled\r\n",,terminal_output +882,4666496,"TERMINAL",0,0,"6",,terminal_output +883,4667536,"TERMINAL",0,0,"7",,terminal_output +884,4668578,"TERMINAL",0,0,"8",,terminal_output +885,4669355,"TERMINAL",0,0,"Running on 1 devices.\r\n",,terminal_output +886,4669706,"TERMINAL",0,0,"9",,terminal_output +887,4670658,"TERMINAL",0,0,"20",,terminal_output +888,4671688,"TERMINAL",0,0,"1",,terminal_output +889,4672777,"TERMINAL",0,0,"2",,terminal_output +890,4673904,"TERMINAL",0,0,"3",,terminal_output +891,4674826,"TERMINAL",0,0,"4",,terminal_output +892,4675528,"TERMINAL",0,0,"Counting all components: ['dynamics', 'lam', 'tokenizer']\r\n",,terminal_output +893,4675850,"TERMINAL",0,0,"5",,terminal_output +894,4675910,"TERMINAL",0,0,"wandb: Currently logged in as: mihir-mahajan2002 (instant-uv) to https://api.wandb.ai. Use `wandb login --relogin` to force relogin\r\n",,terminal_output +895,4676704,"TERMINAL",0,0,"wandb: Tracking run with wandb version 0.22.0\r\nwandb: Run data is saved locally in /hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/wandb/run-20250924_140125-hpcfm5xe\r\nwandb: Run `wandb offline` to turn off syncing.\r\nwandb: Syncing run breakout-dyn-default-3519264\r\nwandb: ⭐️ View project at https://wandb.ai/instant-uv/jafar\r\nwandb: 🚀 View run at https://wandb.ai/instant-uv/jafar/runs/hpcfm5xe\r\n",,terminal_output +896,4676873,"TERMINAL",0,0,"6",,terminal_output +897,4676933,"TERMINAL",0,0,"Parameter counts:\r\n{'dynamics': 26555904, 'lam': 16900976, 'tokenizer': 33750256, 'total': 77207136}\r\n",,terminal_output +898,4677910,"TERMINAL",0,0,"7",,terminal_output +899,4678947,"TERMINAL",0,0,"9",,terminal_output +900,4679980,"TERMINAL",0,0,"30",,terminal_output +901,4681017,"TERMINAL",0,0,"1",,terminal_output +902,4681122,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/venv_3_11/lib64/python3.11/site-packages/orbax/checkpoint/_src/serialization/type_handlers.py:1269: UserWarning: Sharding info not provided when restoring. Populating sharding info from sharding file. Please note restoration time will be slightly increased due to reading from file. Note also that this option is unsafe when restoring on a different topology than the checkpoint was saved with.\r\n warnings.warn(\r\n",,terminal_output +903,4682099,"TERMINAL",0,0,"2",,terminal_output +904,4683121,"TERMINAL",0,0,"3",,terminal_output +905,4684145,"TERMINAL",0,0,"4",,terminal_output +906,4685179,"TERMINAL",0,0,"5",,terminal_output +907,4686225,"TERMINAL",0,0,"6",,terminal_output +908,4687325,"TERMINAL",0,0,"7",,terminal_output +909,4688279,"TERMINAL",0,0,"8",,terminal_output +910,4689312,"TERMINAL",0,0,"9",,terminal_output +911,4690350,"TERMINAL",0,0,"40",,terminal_output +912,4691391,"TERMINAL",0,0,"1",,terminal_output +913,4692440,"TERMINAL",0,0,"2",,terminal_output +914,4693572,"TERMINAL",0,0,"3",,terminal_output +915,4694590,"TERMINAL",0,0,"4",,terminal_output +916,4695622,"TERMINAL",0,0,"5",,terminal_output +917,4696637,"TERMINAL",0,0,"6",,terminal_output +918,4697662,"TERMINAL",0,0,"7",,terminal_output +919,4698690,"TERMINAL",0,0,"8",,terminal_output +920,4699713,"TERMINAL",0,0,"9",,terminal_output +921,4700725,"TERMINAL",0,0,"50",,terminal_output +922,4701769,"TERMINAL",0,0,"1",,terminal_output +923,4702882,"TERMINAL",0,0,"2",,terminal_output +924,4703909,"TERMINAL",0,0,"3",,terminal_output +925,4704988,"TERMINAL",0,0,"4",,terminal_output +926,4705959,"TERMINAL",0,0,"5",,terminal_output +927,4707011,"TERMINAL",0,0,"7",,terminal_output +928,4707983,"TERMINAL",0,0,"8",,terminal_output +929,4709027,"TERMINAL",0,0,"9",,terminal_output +930,4710052,"TERMINAL",0,0,"2:00",,terminal_output +931,4711179,"TERMINAL",0,0,"1",,terminal_output +932,4712203,"TERMINAL",0,0,"2",,terminal_output +933,4713287,"TERMINAL",0,0,"3",,terminal_output +934,4714250,"TERMINAL",0,0,"4",,terminal_output +935,4715275,"TERMINAL",0,0,"5",,terminal_output +936,4716273,"TERMINAL",0,0,"6",,terminal_output +937,4717326,"TERMINAL",0,0,"7",,terminal_output +938,4718347,"TERMINAL",0,0,"8",,terminal_output +939,4719387,"TERMINAL",0,0,"9",,terminal_output +940,4720458,"TERMINAL",0,0,"10",,terminal_output +941,4721459,"TERMINAL",0,0,"1",,terminal_output +942,4722656,"TERMINAL",0,0,"2",,terminal_output +943,4723572,"TERMINAL",0,0,"3",,terminal_output +944,4724596,"TERMINAL",0,0,"4",,terminal_output +945,4725621,"TERMINAL",0,0,"5",,terminal_output +946,4726679,"TERMINAL",0,0,"6",,terminal_output +947,4727766,"TERMINAL",0,0,"7",,terminal_output +948,4728795,"TERMINAL",0,0,"88",,terminal_output +949,4729759,"TERMINAL",0,0,"9",,terminal_output +950,4730821,"TERMINAL",0,0,"20",,terminal_output +951,4731862,"TERMINAL",0,0,"1",,terminal_output +952,4732942,"TERMINAL",0,0,"2",,terminal_output +953,4733912,"TERMINAL",0,0,"3",,terminal_output +954,4735048,"TERMINAL",0,0,"5",,terminal_output +955,4735972,"TERMINAL",0,0,"6",,terminal_output +956,4737159,"TERMINAL",0,0,"7",,terminal_output +957,4738056,"TERMINAL",0,0,"8",,terminal_output +958,4739173,"TERMINAL",0,0,"9",,terminal_output +959,4740273,"TERMINAL",0,0,"30",,terminal_output +960,4741217,"TERMINAL",0,0,"1",,terminal_output +961,4742258,"TERMINAL",0,0,"2",,terminal_output +962,4743791,"TERMINAL",0,0,"3",,terminal_output +963,4744473,"TERMINAL",0,0,"4",,terminal_output +964,4745299,"TERMINAL",0,0,"5",,terminal_output +965,4746338,"TERMINAL",0,0,"6",,terminal_output +966,4747373,"TERMINAL",0,0,"7",,terminal_output +967,4748412,"TERMINAL",0,0,"8",,terminal_output +968,4749449,"TERMINAL",0,0,"9",,terminal_output +969,4750484,"TERMINAL",0,0,"40",,terminal_output +970,4751627,"TERMINAL",0,0,"1",,terminal_output +971,4752654,"TERMINAL",0,0,"2",,terminal_output +972,4753676,"TERMINAL",0,0,"3",,terminal_output +973,4754635,"TERMINAL",0,0,"4",,terminal_output +974,4755576,"TERMINAL",0,0,"Total memory size: 2.5 GB, Output size: 0.9 GB, Temp size: 1.7 GB, Argument size: 0.9 GB, Host temp size: 0.0 GB.\r\n",,terminal_output +975,4755637,"TERMINAL",0,0,"FLOPs: 1.075e+11, Bytes: 4.655e+10 (43.4 GB), Intensity: 2.3 FLOPs/byte\r\nStarting training from step 0...\r\n",,terminal_output +976,4755734,"TERMINAL",0,0,"5",,terminal_output +977,4756288,"TERMINAL",0,0,"\r\nMemstats: After params initialized:\r\n\tUsing (GB) 1.1 / 38.7 (2.842377%) on cuda:0\r\n",,terminal_output +978,4756750,"TERMINAL",0,0,"6",,terminal_output +979,4757749,"TERMINAL",0,0,"7",,terminal_output +980,4758777,"TERMINAL",0,0,"8",,terminal_output +981,4759815,"TERMINAL",0,0,"9",,terminal_output +982,4760945,"TERMINAL",0,0,"50",,terminal_output +983,4761968,"TERMINAL",0,0,"1",,terminal_output +984,4763098,"TERMINAL",0,0,"23",,terminal_output +985,4764068,"TERMINAL",0,0,"4",,terminal_output +986,4765106,"TERMINAL",0,0,"5",,terminal_output +987,4766139,"TERMINAL",0,0,"6",,terminal_output +988,4767190,"TERMINAL",0,0,"7",,terminal_output +989,4768218,"TERMINAL",0,0,"8",,terminal_output +990,4769267,"TERMINAL",0,0,"9",,terminal_output +991,4770289,"TERMINAL",0,0,"3:00",,terminal_output +992,4771326,"TERMINAL",0,0,"1",,terminal_output +993,4772363,"TERMINAL",0,0,"2",,terminal_output +994,4773440,"TERMINAL",0,0,"3",,terminal_output +995,4774462,"TERMINAL",0,0,"4",,terminal_output +996,4775476,"TERMINAL",0,0,"5",,terminal_output +997,4776612,"TERMINAL",0,0,"6",,terminal_output +998,4777642,"TERMINAL",0,0,"7",,terminal_output +999,4778661,"TERMINAL",0,0,"8",,terminal_output +1000,4779686,"TERMINAL",0,0,"9",,terminal_output +1001,4780670,"TERMINAL",0,0,"10",,terminal_output +1002,4781693,"TERMINAL",0,0,"1",,terminal_output +1003,4782733,"TERMINAL",0,0,"2",,terminal_output +1004,4783883,"TERMINAL",0,0,"332",,terminal_output +1005,4784890,"TERMINAL",0,0,"4",,terminal_output +1006,4785932,"TERMINAL",0,0,"5",,terminal_output +1007,4786955,"TERMINAL",0,0,"6",,terminal_output +1008,4787980,"TERMINAL",0,0,"7",,terminal_output +1009,4789006,"TERMINAL",0,0,"9",,terminal_output +1010,4790023,"TERMINAL",0,0,"20",,terminal_output +1011,4791041,"TERMINAL",0,0,"13",,terminal_output +1012,4792077,"TERMINAL",0,0,"2",,terminal_output +1013,4793203,"TERMINAL",0,0,"3",,terminal_output +1014,4794226,"TERMINAL",0,0,"4",,terminal_output +1015,4795207,"TERMINAL",0,0,"5",,terminal_output +1016,4796261,"TERMINAL",0,0,"6",,terminal_output +1017,4797297,"TERMINAL",0,0,"7",,terminal_output +1018,4798325,"TERMINAL",0,0,"8",,terminal_output +1019,4799346,"TERMINAL",0,0,"9",,terminal_output +1020,4800410,"TERMINAL",0,0,"30",,terminal_output +1021,4801407,"TERMINAL",0,0,"1",,terminal_output +1022,4802457,"TERMINAL",0,0,"2",,terminal_output +1023,4803483,"TERMINAL",0,0,"3",,terminal_output +1024,4804570,"TERMINAL",0,0,"4",,terminal_output +1025,4805564,"TERMINAL",0,0,"5",,terminal_output +1026,4806616,"TERMINAL",0,0,"6",,terminal_output +1027,4807748,"TERMINAL",0,0,"7",,terminal_output +1028,4808685,"TERMINAL",0,0,"8",,terminal_output +1029,4809814,"TERMINAL",0,0,"9",,terminal_output +1030,4810767,"TERMINAL",0,0,"40",,terminal_output +1031,4811784,"TERMINAL",0,0,"1",,terminal_output +1032,4812861,"TERMINAL",0,0,"2",,terminal_output +1033,4813851,"TERMINAL",0,0,"3",,terminal_output +1034,4814967,"TERMINAL",0,0,"4",,terminal_output +1035,4816047,"TERMINAL",0,0,"5",,terminal_output +1036,4817062,"TERMINAL",0,0,"7",,terminal_output +1037,4818093,"TERMINAL",0,0,"8",,terminal_output +1038,4819108,"TERMINAL",0,0,"9",,terminal_output +1039,4820132,"TERMINAL",0,0,"50",,terminal_output +1040,4821162,"TERMINAL",0,0,"1",,terminal_output +1041,4822152,"TERMINAL",0,0,"2",,terminal_output +1042,4823206,"TERMINAL",0,0,"3",,terminal_output +1043,4824236,"TERMINAL",0,0,"4",,terminal_output +1044,4825277,"TERMINAL",0,0,"5",,terminal_output +1045,4826381,"TERMINAL",0,0,"6",,terminal_output +1046,4827333,"TERMINAL",0,0,"7",,terminal_output +1047,4828368,"TERMINAL",0,0,"8",,terminal_output +1048,4829406,"TERMINAL",0,0,"9",,terminal_output +1049,4830462,"TERMINAL",0,0,"4:00",,terminal_output +1050,4831519,"TERMINAL",0,0,"1",,terminal_output +1051,4832623,"TERMINAL",0,0,"2",,terminal_output +1052,4833429,"TERMINAL",0,0,"WARNING:absl:[process=0][thread=MainThread][operation_id=1] _SignalingThread.join() waiting for signals ([, ]) blocking the main thread will slow down blocking save times. This is likely due to main thread calling result() on a CommitFuture.\r\nWARNING:absl:[process=0][thread=MainThread][operation_id=1] _SignalingThread.join() waiting for signals ([, ]) blocking the main thread will slow down blocking save times. This is likely due to main thread calling result() on a CommitFuture.\r\n",,terminal_output +1053,4833552,"TERMINAL",0,0,"3",,terminal_output +1054,4834370,"TERMINAL",0,0,"Saved checkpoint at step 250\r\n",,terminal_output +1055,4834675,"TERMINAL",0,0,"4",,terminal_output +1056,4835666,"TERMINAL",0,0,"5",,terminal_output +1057,4836722,"TERMINAL",0,0,"6",,terminal_output +1058,4837905,"train_dynamics.py",0,0,"import os\n\n\nos.environ.setdefault(""XLA_PYTHON_CLIENT_MEM_FRACTION"", ""0.98"")\n\nfrom dataclasses import dataclass, field\nimport itertools\nfrom typing import cast, Optional\n\nimport einops\nfrom jax.sharding import Mesh, PartitionSpec, NamedSharding\nfrom jax.experimental.mesh_utils import create_device_mesh\nimport optax\nimport orbax.checkpoint as ocp\nimport numpy as np\nimport dm_pix as pix\nimport jax\nimport jax.numpy as jnp\nimport tyro\nimport wandb\nimport grain\nimport flax.nnx as nnx\n\nfrom genie import Genie, restore_genie_components\nfrom utils.dataloader import get_dataloader\nfrom utils.train_utils import (\n get_lr_schedule,\n count_parameters_by_component,\n print_mem_stats,\n print_compiled_memory_stats,\n print_compiled_cost_analysis,\n)\n\n\n@dataclass\nclass Args:\n # Experiment\n num_steps: int = 200_000\n seed: int = 0\n seq_len: int = 16\n image_channels: int = 3\n image_height: int = 90\n image_width: int = 160\n data_dir: str = """"\n save_ckpt: bool = False\n restore_ckpt: bool = False\n # Optimization\n batch_size: int = 36\n init_lr: float = 0.0\n max_lr: float = 3e-5\n decay_end: float = 0.0\n wsd_decay_steps: int = (\n 10000 # NOTE: wsd_decay_steps will only be used when using a wsd-schedule\n )\n warmup_steps: int = 5000\n lr_schedule: str = ""wsd"" # supported options: wsd, cos\n # Tokenizer\n tokenizer_dim: int = 512\n tokenizer_ffn_dim: int = 2048\n latent_patch_dim: int = 32\n num_patch_latents: int = 1024\n patch_size: int = 4\n tokenizer_num_blocks: int = 4\n tokenizer_num_heads: int = 8\n tokenizer_checkpoint: str = """"\n # LAM\n lam_dim: int = 512\n lam_ffn_dim: int = 2048\n latent_action_dim: int = 32\n num_actions: int = 6\n lam_patch_size: int = 16\n lam_num_blocks: int = 4\n lam_num_heads: int = 8\n lam_checkpoint: str = """"\n # Dynamics\n dyna_type: str = ""maskgit"" # supported options: maskgit, causal\n dyna_dim: int = 512\n dyna_ffn_dim: int = 2048\n dyna_num_blocks: int = 6\n dyna_num_heads: int = 8\n dropout: float = 0.0\n mask_limit: float = 0.5\n param_dtype = jnp.float32\n dtype = jnp.bfloat16\n use_flash_attention: bool = True\n use_gt_actions: bool = False\n # Logging\n log: bool = False\n entity: str = """"\n project: str = """"\n name: str = ""train_dynamics""\n tags: list[str] = field(default_factory=lambda: [""dynamics""])\n log_interval: int = 5\n log_image_interval: int = 250\n ckpt_dir: str = """"\n log_checkpoint_interval: int = 25000\n log_checkpoint_keep_period: int = 20000\n log_gradients: bool = False\n val_data_dir: str = """"\n val_interval: int = 20_000\n val_steps: int = 50\n eval_full_frame: bool = False\n val_maskgit_steps: int = 25\n val_temperature: float = 1\n val_sample_argmax: bool = False\n wandb_id: str = """"\n\n\ndef build_model(args: Args, rng: jax.Array) -> tuple[Genie, jax.Array]:\n rng, _rng = jax.random.split(rng)\n rngs = nnx.Rngs(_rng)\n genie = Genie(\n # Tokenizer\n in_dim=args.image_channels,\n tokenizer_dim=args.tokenizer_dim,\n tokenizer_ffn_dim=args.tokenizer_ffn_dim,\n latent_patch_dim=args.latent_patch_dim,\n num_patch_latents=args.num_patch_latents,\n patch_size=args.patch_size,\n tokenizer_num_blocks=args.tokenizer_num_blocks,\n tokenizer_num_heads=args.tokenizer_num_heads,\n # LAM\n lam_dim=args.lam_dim,\n lam_ffn_dim=args.lam_ffn_dim,\n latent_action_dim=args.latent_action_dim,\n num_actions=args.num_actions,\n lam_patch_size=args.lam_patch_size,\n lam_num_blocks=args.lam_num_blocks,\n lam_num_heads=args.lam_num_heads,\n lam_co_train=not args.lam_checkpoint,\n use_gt_actions=args.use_gt_actions,\n # Dynamics\n dyna_type=args.dyna_type,\n dyna_dim=args.dyna_dim,\n dyna_ffn_dim=args.dyna_ffn_dim,\n dyna_num_blocks=args.dyna_num_blocks,\n dyna_num_heads=args.dyna_num_heads,\n dropout=args.dropout,\n mask_limit=args.mask_limit,\n param_dtype=args.param_dtype,\n dtype=args.dtype,\n use_flash_attention=args.use_flash_attention,\n decode=False,\n rngs=rngs,\n )\n if args.use_gt_actions:\n assert (\n not args.lam_checkpoint\n ), ""Cannot use LAM when using ground-truth actions.""\n else:\n assert genie.lam is not None\n del genie.lam.decoder\n return genie, rng\n\n\ndef build_optimizer(genie: Genie, args: Args) -> nnx.ModelAndOptimizer:\n lr_schedule = get_lr_schedule(\n args.lr_schedule,\n args.init_lr,\n args.max_lr,\n args.decay_end,\n args.num_steps,\n args.warmup_steps,\n args.wsd_decay_steps,\n )\n tx = optax.adamw(\n learning_rate=lr_schedule,\n b1=0.9,\n b2=0.9,\n weight_decay=1e-4,\n mu_dtype=args.param_dtype, # moments in full precision\n )\n optimizer = nnx.ModelAndOptimizer(genie, tx)\n return optimizer\n\n\ndef build_mesh_and_sharding(\n num_devices: int,\n) -> tuple[Mesh, NamedSharding, NamedSharding, NamedSharding]:\n device_mesh_arr = create_device_mesh((num_devices,))\n mesh = Mesh(devices=device_mesh_arr, axis_names=(""data"",))\n replicated_sharding = NamedSharding(mesh, PartitionSpec())\n videos_sharding = NamedSharding(mesh, PartitionSpec(""data"", None, None, None, None))\n actions_sharding = NamedSharding(mesh, PartitionSpec(""data"", None))\n return mesh, replicated_sharding, videos_sharding, actions_sharding\n\n\ndef shard_optimizer_states(\n optimizer: nnx.ModelAndOptimizer, replicated_sharding: NamedSharding\n) -> None:\n model_state = nnx.state(optimizer.model)\n model_sharded_state = jax.lax.with_sharding_constraint(\n model_state, replicated_sharding\n )\n nnx.update(optimizer.model, model_sharded_state)\n optimizer_state = nnx.state(optimizer, nnx.optimizer.OptState)\n optimizer_sharded_state = jax.lax.with_sharding_constraint(\n optimizer_state, replicated_sharding\n )\n nnx.update(optimizer, optimizer_sharded_state)\n\n\ndef build_dataloader(args: Args, data_dir: str) -> grain.DataLoaderIterator:\n image_shape = (args.image_height, args.image_width, args.image_channels)\n array_record_files = [\n os.path.join(data_dir, x)\n for x in os.listdir(data_dir)\n if x.endswith("".array_record"")\n ]\n grain_dataloader = get_dataloader(\n array_record_files,\n args.seq_len,\n # NOTE: We deliberately pass the global batch size\n # The dataloader shards the dataset across all processes\n args.batch_size,\n *image_shape,\n num_workers=8,\n prefetch_buffer_size=1,\n seed=args.seed,\n )\n initial_state = grain_dataloader._create_initial_state()\n grain_iterator = grain.DataLoaderIterator(grain_dataloader, initial_state)\n return grain_iterator\n\n\ndef build_checkpoint_manager(args: Args) -> Optional[ocp.CheckpointManager]:\n if args.restore_ckpt or args.save_ckpt:\n handler_registry = ocp.handlers.DefaultCheckpointHandlerRegistry()\n handler_registry.add(\n ""model_state"", ocp.args.PyTreeSave, ocp.handlers.PyTreeCheckpointHandler\n )\n handler_registry.add(\n ""model_state"", ocp.args.PyTreeRestore, ocp.handlers.PyTreeCheckpointHandler\n )\n handler_registry.add(\n ""train_dataloader_state"",\n grain.checkpoint.CheckpointSave,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n handler_registry.add(\n ""train_dataloader_state"",\n grain.checkpoint.CheckpointRestore,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n if args.val_data_dir:\n handler_registry.add(\n ""val_dataloader_state"",\n grain.checkpoint.CheckpointSave,\n cast(\n ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler\n ),\n )\n handler_registry.add(\n ""val_dataloader_state"",\n grain.checkpoint.CheckpointRestore,\n cast(\n ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler\n ),\n )\n checkpoint_options = ocp.CheckpointManagerOptions(\n save_interval_steps=args.log_checkpoint_interval,\n max_to_keep=3,\n keep_period=args.log_checkpoint_keep_period,\n step_format_fixed_length=6,\n cleanup_tmp_directories=True,\n )\n checkpoint_manager = ocp.CheckpointManager(\n args.ckpt_dir,\n options=checkpoint_options,\n handler_registry=handler_registry,\n )\n return checkpoint_manager\n else:\n return None\n\n\ndef restore_or_initialize_components(\n args: Args,\n checkpoint_manager: Optional[ocp.CheckpointManager],\n optimizer: nnx.ModelAndOptimizer,\n train_iterator: grain.DataLoaderIterator,\n rng: jax.Array,\n replicated_sharding: NamedSharding,\n val_iterator: Optional[grain.DataLoaderIterator],\n restore_step: Optional[int] = None,\n) -> tuple[\n int,\n nnx.ModelAndOptimizer,\n grain.DataLoaderIterator,\n grain.DataLoaderIterator,\n jax.Array,\n]:\n step = 0\n if checkpoint_manager and restore_step is None:\n restore_step = checkpoint_manager.latest_step()\n if args.restore_ckpt:\n assert checkpoint_manager is not None\n # del optimizer.model.tokenizer.vq.drop\n abstract_optimizer = nnx.eval_shape(lambda: optimizer)\n abstract_optimizer_state = nnx.state(abstract_optimizer)\n if val_iterator:\n restore_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore(abstract_optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointRestore(train_iterator), # type: ignore\n val_dataloader_state=grain.checkpoint.CheckpointRestore(val_iterator), # type: ignore\n )\n else:\n restore_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore(abstract_optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointRestore(train_iterator), # type: ignore\n )\n restored = checkpoint_manager.restore(\n checkpoint_manager.latest_step(), args=restore_args\n )\n restored_optimizer_state = restored[""model_state""]\n nnx.update(optimizer, restored_optimizer_state)\n train_iterator = restored[""train_dataloader_state""]\n if val_iterator:\n val_iterator = restored[""val_dataloader_state""]\n step = checkpoint_manager.latest_step() or 0\n print(f""Restored dataloader and model state from step {step}"")\n else:\n # Restore from pre-trained tokenizer (and LAM)\n rng, _rng = jax.random.split(rng)\n optimizer = restore_genie_components(optimizer, replicated_sharding, _rng, args)\n # NOTE: We have to remove the (unused) tokenizer vq dropout due flax.nnx lazily initializing modules.\n # Specifically, the first dynamics model checkpoint will contain the vq dropout module,\n # but the first full restore will fail due to nnx not initializing the module when\n # dropout is set to 0.0.\n # del optimizer.model.tokenizer.vq.drop\n return step, optimizer, train_iterator, val_iterator, rng\n\n\ndef _calculate_step_metrics(\n outputs: dict[str, jax.Array],\n gt: jax.Array,\n num_actions: int,\n num_patch_latents: int,\n) -> tuple[jax.Array, dict]:\n mask = outputs[""mask""]\n outputs[""token_logits""] = outputs[""token_logits""].astype(jnp.float32)\n ce_loss = optax.softmax_cross_entropy_with_integer_labels(\n outputs[""token_logits""], outputs[""video_tokens""]\n )\n ce_loss = (mask * ce_loss).sum() / mask.sum()\n acc = outputs[""token_logits""].argmax(-1) == outputs[""video_tokens""]\n acc = (mask * acc).sum() / mask.sum()\n select_probs = jax.nn.softmax(outputs[""token_logits""])\n gt_val = gt.clip(0, 1).reshape(-1, *gt.shape[2:])\n recon = outputs[""recon""].clip(0, 1).reshape(-1, *outputs[""recon""].shape[2:])\n psnr = jnp.asarray(pix.psnr(gt_val, recon)).mean()\n ssim = jnp.asarray(pix.ssim(gt_val, recon)).mean()\n _, index_counts_tokenizer = jnp.unique_counts(\n jnp.ravel(outputs[""video_tokens""]),\n size=num_patch_latents,\n fill_value=0,\n )\n codebook_usage_tokenizer = (index_counts_tokenizer != 0).mean()\n metrics = dict(\n cross_entropy_loss=ce_loss,\n masked_token_accuracy=acc,\n select_logit=outputs[""token_logits""].max(-1).mean(),\n select_p=select_probs.max(-1).mean(),\n entropy=jax.scipy.special.entr(select_probs).sum(-1).mean(),\n psnr=psnr,\n ssim=ssim,\n codebook_usage_tokenizer=codebook_usage_tokenizer,\n )\n if ""lam_indices"" in outputs.keys():\n _, index_counts_lam = jnp.unique_counts(\n jnp.ravel(outputs[""lam_indices""]),\n size=num_actions,\n fill_value=0,\n )\n codebook_usage_lam = (index_counts_lam != 0).mean()\n metrics[""codebook_usage_lam""] = codebook_usage_lam\n return ce_loss, metrics\n\n\ndef main(args: Args) -> None:\n jax.distributed.initialize()\n num_devices = jax.device_count()\n if num_devices == 0:\n raise ValueError(""No JAX devices found."")\n print(f""Running on {num_devices} devices."")\n\n if args.batch_size % num_devices != 0:\n raise ValueError(\n f""Global batch size {args.batch_size} must be divisible by ""\n f""number of devices {num_devices}.""\n )\n\n rng = jax.random.key(args.seed)\n\n # --- Initialize model ---\n genie, rng = build_model(args, rng)\n _, params, _ = nnx.split(genie, nnx.Param, ...)\n param_counts = count_parameters_by_component(params)\n\n if args.log and jax.process_index() == 0:\n wandb_init_kwargs = {\n ""entity"": args.entity,\n ""project"": args.project,\n ""name"": args.name,\n ""tags"": args.tags,\n ""group"": ""debug"",\n ""config"": args,\n }\n\n if args.wandb_id:\n wandb_init_kwargs.update(\n {\n ""id"": args.wandb_id,\n ""resume"": ""allow"",\n }\n )\n wandb.init(**wandb_init_kwargs)\n\n wandb.config.update({""model_param_count"": param_counts})\n\n print(""Parameter counts:"")\n print(param_counts)\n\n # --- Initialize optimizer ---\n optimizer = build_optimizer(genie, args)\n del genie\n\n # FIXME: switch to create_hybrid_device_mesh for runs spanning multiple nodes\n _, replicated_sharding, videos_sharding, actions_sharding = build_mesh_and_sharding(\n num_devices\n )\n\n shard_optimizer_states(optimizer, replicated_sharding)\n\n # --- Initialize checkpoint manager ---\n checkpoint_manager = build_checkpoint_manager(args)\n\n # --- Create DataLoaderIterator from dataloader ---\n train_iterator = build_dataloader(args, args.data_dir)\n val_iterator = None\n if args.val_data_dir:\n val_iterator = build_dataloader(args, args.val_data_dir)\n\n # --- Restore checkpoint ---\n step, optimizer, train_iterator, val_iterator, rng = (\n restore_or_initialize_components(\n args,\n checkpoint_manager,\n optimizer,\n train_iterator,\n rng,\n replicated_sharding,\n val_iterator,\n )\n )\n\n # --- Define loss and train step (close over args) ---\n def dynamics_loss_fn(\n model: Genie,\n inputs: dict,\n ) -> tuple[jax.Array, tuple[jax.Array, dict]]:\n gt = jnp.asarray(inputs[""videos""], dtype=jnp.float32) / 255.0\n inputs[""videos""] = gt.astype(args.dtype)\n outputs = model(inputs)\n ce_loss, metrics = _calculate_step_metrics(\n outputs, gt, args.num_actions, args.num_patch_latents\n )\n return ce_loss, (outputs[""recon""], metrics)\n\n @nnx.jit(donate_argnums=0)\n def train_step(\n optimizer: nnx.ModelAndOptimizer, inputs: dict\n ) -> tuple[jax.Array, jax.Array, dict]:\n def loss_fn(model: Genie) -> tuple[jax.Array, tuple[jax.Array, dict]]:\n model.train()\n return dynamics_loss_fn(model, inputs)\n\n (loss, (recon, metrics)), grads = nnx.value_and_grad(loss_fn, has_aux=True)(\n optimizer.model\n )\n optimizer.update(grads)\n if args.log_gradients:\n metrics[""gradients_std/""] = jax.tree.map(\n lambda x: x.std(), grads[""params""][""dynamics""]\n )\n return loss, recon, metrics\n\n @nnx.jit\n def val_step(genie: Genie, inputs: dict) -> dict:\n """"""Evaluate model and compute metrics""""""\n genie.eval()\n gt = jnp.asarray(inputs[""videos""], dtype=jnp.float32) / 255.0\n (loss, (recon, metrics)) = dynamics_loss_fn(genie, inputs)\n val_output = {""loss"": loss, ""recon"": recon, ""metrics"": metrics}\n\n # --- Evaluate full frame prediction (sampling) ---\n if args.eval_full_frame:\n inputs[""videos""] = gt.astype(args.dtype)\n tokenizer_outputs = genie.tokenizer.vq_encode(\n inputs[""videos""], training=False\n )\n tokens_full_frame = tokenizer_outputs[""indices""]\n lam_indices_E = None\n if not args.use_gt_actions:\n lam_indices_E = genie.vq_encode(inputs, training=False)\n inputs[""latent_actions""] = lam_indices_E\n inputs[""videos""] = inputs[""videos""][\n :, :-1\n ] # remove last frame for generation\n recon_full_frame, logits_full_frame = genie.sample(\n inputs,\n args.seq_len,\n args.val_temperature,\n args.val_sample_argmax,\n args.val_maskgit_steps,\n )\n # Calculate metrics for the last frame only\n step_outputs = {\n ""recon"": recon_full_frame[:, -1],\n ""token_logits"": logits_full_frame[:, -1],\n ""video_tokens"": tokens_full_frame[:, -1],\n ""mask"": jnp.ones_like(tokens_full_frame[:, -1]),\n }\n if lam_indices_E is not None:\n lam_indices_F = lam_indices_E.reshape((-1, args.seq_len - 1))[:, -1]\n step_outputs[""lam_indices""] = lam_indices_F\n\n loss_full_frame, metrics_full_frame = _calculate_step_metrics(\n step_outputs, gt[:, -1], args.num_actions, args.num_patch_latents\n )\n val_output.update(\n {\n ""loss_full_frame"": loss_full_frame,\n ""recon_full_frame"": recon_full_frame,\n ""metrics_full_frame"": metrics_full_frame,\n }\n )\n return val_output\n\n def calculate_validation_metrics(val_dataloader, genie, rng):\n step = 0\n loss_per_step = []\n metrics_per_step = []\n loss_full_frame_per_step = []\n metrics_full_frame_per_step = []\n batch = None\n recon = None\n recon_full_frame = None\n for batch in val_dataloader:\n rng, _rng_mask = jax.random.split(rng, 2)\n batch[""rng""] = _rng_mask\n val_outputs = val_step(genie, batch)\n loss_per_step.append(val_outputs[""loss""])\n metrics_per_step.append(val_outputs[""metrics""])\n recon = val_outputs[""recon""]\n if args.eval_full_frame:\n loss_full_frame_per_step.append(val_outputs[""loss_full_frame""])\n metrics_full_frame_per_step.append(val_outputs[""metrics_full_frame""])\n recon_full_frame = val_outputs[""recon_full_frame""]\n step += 1\n if step > args.val_steps:\n break\n\n if step < args.val_steps:\n print(\n f""Warning: Your validation dataset is too small to make val_steps many steps. Made {step} steps, expected {args.val_steps}""\n )\n\n val_metrics = {\n f""val_{key}"": np.mean([float(m[key]) for m in metrics_per_step])\n for key in metrics_per_step[0].keys()\n }\n val_metrics[""val_loss""] = np.mean(loss_per_step)\n if args.eval_full_frame:\n val_metrics_full_frame = {\n f""val_full_frame_{key}"": np.mean(\n [float(m[key]) for m in metrics_full_frame_per_step]\n )\n for key in metrics_full_frame_per_step[0].keys()\n }\n val_metrics.update(val_metrics_full_frame)\n val_metrics[""val_full_frame_loss""] = np.mean(loss_full_frame_per_step)\n return val_metrics, batch, recon, recon_full_frame\n\n # --- TRAIN LOOP ---\n dataloader_train = (\n {\n ""videos"": jax.make_array_from_process_local_data(\n videos_sharding, local_data=elem[""videos""]\n ),\n ""actions"": (\n jax.make_array_from_process_local_data(\n actions_sharding, elem[""actions""]\n )\n if args.use_gt_actions\n else None\n ),\n }\n for elem in train_iterator\n )\n dataloader_val = None\n if val_iterator:\n dataloader_val = (\n {\n ""videos"": jax.make_array_from_process_local_data(\n videos_sharding, elem[""videos""]\n ),\n ""actions"": (\n jax.make_array_from_process_local_data(\n actions_sharding, elem[""actions""]\n )\n if args.use_gt_actions\n else None\n ),\n }\n for elem in val_iterator\n )\n if jax.process_index() == 0:\n first_batch = next(dataloader_train)\n first_batch[""rng""] = rng # type: ignore\n compiled = train_step.lower(optimizer, first_batch).compile()\n print_compiled_memory_stats(compiled.memory_analysis())\n print_compiled_cost_analysis(compiled.cost_analysis())\n # Do not skip the first batch during training\n dataloader_train = itertools.chain([first_batch], dataloader_train)\n print(f""Starting training from step {step}..."")\n first_step = step\n while step < args.num_steps:\n for batch in dataloader_train:\n # --- Train step ---\n rng, _rng_mask = jax.random.split(rng, 2)\n batch[""rng""] = _rng_mask\n loss, recon, metrics = train_step(optimizer, batch)\n if step == first_step:\n print_mem_stats(""After params initialized"")\n step += 1\n\n # --- Validation loss ---\n val_results = {}\n if dataloader_val and step % args.val_interval == 0:\n rng, _rng_mask_val = jax.random.split(rng, 2)\n print(""Calculating validation metrics..."")\n val_metrics, val_gt_batch, val_recon, val_recon_full_frame = (\n calculate_validation_metrics(\n dataloader_val, optimizer.model, _rng_mask_val\n )\n )\n print(f""Step {step}, validation loss: {val_metrics['val_loss']}"")\n val_results = {\n ""metrics"": val_metrics,\n ""gt_batch"": val_gt_batch,\n ""recon"": val_recon,\n ""full_frame"": val_recon_full_frame,\n }\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {""loss"": loss, ""step"": step, **metrics}\n if val_results:\n log_dict.update(val_results[""metrics""])\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if val_results:\n val_results[""gt_seq_val""] = (\n val_results[""gt_batch""][""videos""][0].astype(jnp.float32)\n / 255.0\n )\n val_results[""recon_seq_val""] = val_results[""recon""][0].clip(\n 0, 1\n )\n val_comparison_seq = jnp.concatenate(\n (val_results[""gt_seq_val""], val_results[""recon_seq_val""]),\n axis=1,\n )\n val_results[""val_comparison_seq""] = einops.rearrange(\n val_comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if args.eval_full_frame:\n val_results[""full_frame_seq_val""] = val_results[\n ""full_frame""\n ][0].clip(0, 1)\n val_results[""val_full_frame_comparison_seq""] = (\n jnp.concatenate(\n (\n val_results[""gt_seq_val""],\n val_results[""full_frame_seq_val""],\n ),\n axis=1,\n )\n )\n val_results[""val_full_frame_comparison_seq""] = (\n einops.rearrange(\n val_results[""val_full_frame_comparison_seq""] * 255,\n ""t h w c -> h (t w) c"",\n )\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if val_results:\n log_images.update(\n dict(\n val_image=wandb.Image(\n np.asarray(\n val_results[""gt_seq_val""][args.seq_len - 1]\n )\n ),\n val_recon=wandb.Image(\n np.asarray(\n val_results[""recon_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_recon=wandb.Image(\n np.asarray(\n val_results[""val_comparison_seq""].astype(\n np.uint8\n )\n )\n ),\n )\n )\n if args.eval_full_frame:\n log_images.update(\n dict(\n val_full_frame=wandb.Image(\n np.asarray(\n val_results[""full_frame_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_full_frame=wandb.Image(\n np.asarray(\n val_results[\n ""val_full_frame_comparison_seq""\n ].astype(np.uint8)\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break\n\n if checkpoint_manager:\n checkpoint_manager.close()\n\n\nif __name__ == ""__main__"":\n args = tyro.cli(Args)\n main(args)\n",python,tab +1059,4838114,"TERMINAL",0,0,"7",,terminal_output +1060,4838910,"TERMINAL",0,0,"8",,terminal_output +1061,4839820,"TERMINAL",0,0,"9",,terminal_output +1062,4840807,"TERMINAL",0,0,"10",,terminal_output +1063,4841849,"TERMINAL",0,0,"1",,terminal_output +1064,4842243,"train_dynamics.py",5551,0,"",python,selection_mouse +1065,4842970,"TERMINAL",0,0,"2",,terminal_output +1066,4843894,"train_dynamics.py",5825,1,"u",python,selection_command +1067,4844021,"train_dynamics.py",11140,2,"us",python,selection_command +1068,4844035,"TERMINAL",0,0,"3",,terminal_output +1069,4844155,"train_dynamics.py",11140,3,"use",python,selection_command +1070,4845002,"TERMINAL",0,0,"5",,terminal_output +1071,4845116,"train_dynamics.py",17540,4,"use_",python,selection_command +1072,4845980,"train_dynamics.py",17540,5,"use_g",python,selection_command +1073,4846049,"TERMINAL",0,0,"6",,terminal_output +1074,4846128,"train_dynamics.py",17540,6,"use_gt",python,selection_command +1075,4846320,"train_dynamics.py",17540,7,"use_gt_",python,selection_command +1076,4846514,"train_dynamics.py",17540,8,"use_gt_a",python,selection_command +1077,4846683,"train_dynamics.py",17540,9,"use_gt_ac",python,selection_command +1078,4846849,"train_dynamics.py",17540,10,"use_gt_act",python,selection_command +1079,4846917,"train_dynamics.py",17540,11,"use_gt_acti",python,selection_command +1080,4846988,"train_dynamics.py",17540,12,"use_gt_actio",python,selection_command +1081,4847052,"TERMINAL",0,0,"77",,terminal_output +1082,4847158,"train_dynamics.py",17540,13,"use_gt_action",python,selection_command +1083,4847244,"train_dynamics.py",17540,14,"use_gt_actions",python,selection_command +1084,4848190,"TERMINAL",0,0,"832",,terminal_output +1085,4849215,"TERMINAL",0,0,"9",,terminal_output +1086,4850250,"TERMINAL",0,0,"20",,terminal_output +1087,4851217,"TERMINAL",0,0,"1",,terminal_output +1088,4852292,"TERMINAL",0,0,"2",,terminal_output +1089,4853277,"TERMINAL",0,0,"3",,terminal_output +1090,4854337,"TERMINAL",0,0,"4",,terminal_output +1091,4855349,"TERMINAL",0,0,"5",,terminal_output +1092,4856444,"TERMINAL",0,0,"6",,terminal_output +1093,4857419,"TERMINAL",0,0,"7",,terminal_output +1094,4858472,"TERMINAL",0,0,"8",,terminal_output +1095,4859489,"TERMINAL",0,0,"9",,terminal_output +1096,4860527,"TERMINAL",0,0,"30",,terminal_output +1097,4861606,"TERMINAL",0,0,"1",,terminal_output +1098,4862632,"TERMINAL",0,0,"2",,terminal_output +1099,4863656,"TERMINAL",0,0,"3",,terminal_output +1100,4864786,"TERMINAL",0,0,"4",,terminal_output +1101,4865709,"TERMINAL",0,0,"5",,terminal_output +1102,4866832,"TERMINAL",0,0,"6",,terminal_output +1103,4867858,"TERMINAL",0,0,"7",,terminal_output +1104,4868878,"TERMINAL",0,0,"8",,terminal_output +1105,4869900,"TERMINAL",0,0,"9",,terminal_output +1106,4870924,"TERMINAL",0,0,"40",,terminal_output +1107,4871948,"TERMINAL",0,0,"1",,terminal_output +1108,4872957,"TERMINAL",0,0,"3",,terminal_output +1109,4872984,"TERMINAL",0,0,"WARNING:absl:[process=0][thread=MainThread][operation_id=2] _SignalingThread.join() waiting for signals ([, ]) blocking the main thread will slow down blocking save times. This is likely due to main thread calling result() on a CommitFuture.\r\nWARNING:absl:[process=0][thread=MainThread][operation_id=2] _SignalingThread.join() waiting for signals ([, ]) blocking the main thread will slow down blocking save times. This is likely due to main thread calling result() on a CommitFuture.\r\n",,terminal_output +1110,4873690,"TERMINAL",0,0,"Saved checkpoint at step 500\r\n",,terminal_output +1111,4874099,"TERMINAL",0,0,"4",,terminal_output +1112,4875038,"TERMINAL",0,0,"5",,terminal_output +1113,4876150,"TERMINAL",0,0,"6",,terminal_output +1114,4877169,"TERMINAL",0,0,"7",,terminal_output +1115,4878195,"TERMINAL",0,0,"8",,terminal_output +1116,4879222,"TERMINAL",0,0,"9",,terminal_output +1117,4880221,"TERMINAL",0,0,"50",,terminal_output +1118,4881256,"TERMINAL",0,0,"1",,terminal_output +1119,4882405,"TERMINAL",0,0,"2",,terminal_output +1120,4883418,"TERMINAL",0,0,"3",,terminal_output +1121,4884435,"TERMINAL",0,0,"4",,terminal_output +1122,4885420,"TERMINAL",0,0,"5",,terminal_output +1123,4885836,"train_dynamics.py",17831,0,"",python,selection_mouse +1124,4886490,"TERMINAL",0,0,"6",,terminal_output +1125,4887524,"TERMINAL",0,0,"7",,terminal_output +1126,4888325,"train_dynamics.py",17589,0,"",python,selection_mouse +1127,4888515,"TERMINAL",0,0,"8",,terminal_output +1128,4888996,"train_dynamics.py",17545,0,"",python,selection_mouse +1129,4889545,"TERMINAL",0,0,"9",,terminal_output +1130,4890581,"TERMINAL",0,0,"5:00",,terminal_output +1131,4891617,"TERMINAL",0,0,"1",,terminal_output +1132,4892838,"TERMINAL",0,0,"2",,terminal_output +1133,4893689,"TERMINAL",0,0,"3",,terminal_output +1134,4894787,"TERMINAL",0,0,"4",,terminal_output +1135,4895762,"TERMINAL",0,0,"5",,terminal_output +1136,4896808,"TERMINAL",0,0,"6",,terminal_output +1137,4897857,"TERMINAL",0,0,"7",,terminal_output +1138,4898882,"TERMINAL",0,0,"8",,terminal_output +1139,4899907,"TERMINAL",0,0,"9",,terminal_output +1140,4901030,"TERMINAL",0,0,"11",,terminal_output +1141,4901984,"TERMINAL",0,0,"2",,terminal_output +1142,4903054,"TERMINAL",0,0,"3",,terminal_output +1143,4904055,"TERMINAL",0,0,"42",,terminal_output +1144,4905094,"TERMINAL",0,0,"53",,terminal_output +1145,4906215,"TERMINAL",0,0,"6",,terminal_output +1146,4907248,"TERMINAL",0,0,"7",,terminal_output +1147,4908300,"TERMINAL",0,0,"8",,terminal_output +1148,4909434,"TERMINAL",0,0,"9",,terminal_output +1149,4910345,"TERMINAL",0,0,"20",,terminal_output +1150,4911263,"TERMINAL",0,0,"Calculating validation metrics...\r\n",,terminal_output +1151,4911327,"TERMINAL",0,0,"1",,terminal_output +1152,4912355,"TERMINAL",0,0,"2",,terminal_output +1153,4913390,"TERMINAL",0,0,"3",,terminal_output +1154,4914457,"TERMINAL",0,0,"4",,terminal_output +1155,4915467,"TERMINAL",0,0,"5",,terminal_output +1156,4916523,"TERMINAL",0,0,"6",,terminal_output +1157,4917534,"TERMINAL",0,0,"7",,terminal_output +1158,4918642,"TERMINAL",0,0,"8",,terminal_output +1159,4919606,"TERMINAL",0,0,"9",,terminal_output +1160,4920693,"TERMINAL",0,0,"30",,terminal_output +1161,4921714,"TERMINAL",0,0,"1",,terminal_output +1162,4922739,"TERMINAL",0,0,"2",,terminal_output +1163,4923754,"TERMINAL",0,0,"3",,terminal_output +1164,4924895,"TERMINAL",0,0,"4",,terminal_output +1165,4925911,"TERMINAL",0,0,"5",,terminal_output +1166,4926870,"TERMINAL",0,0,"6",,terminal_output +1167,4927915,"TERMINAL",0,0,"7",,terminal_output +1168,4928999,"TERMINAL",0,0,"9",,terminal_output +1169,4930041,"TERMINAL",0,0,"40",,terminal_output +1170,4931041,"TERMINAL",0,0,"1",,terminal_output +1171,4932163,"TERMINAL",0,0,"2",,terminal_output +1172,4933184,"TERMINAL",0,0,"3",,terminal_output +1173,4934208,"TERMINAL",0,0,"4",,terminal_output +1174,4935235,"TERMINAL",0,0,"5",,terminal_output +1175,4936205,"TERMINAL",0,0,"6",,terminal_output +1176,4937242,"TERMINAL",0,0,"7",,terminal_output +1177,4938406,"TERMINAL",0,0,"8",,terminal_output +1178,4939328,"TERMINAL",0,0,"9",,terminal_output +1179,4940354,"TERMINAL",0,0,"504",,terminal_output +1180,4941478,"TERMINAL",0,0,"16",,terminal_output +1181,4942503,"TERMINAL",0,0,"2",,terminal_output +1182,4943537,"TERMINAL",0,0,"3",,terminal_output +1183,4944510,"TERMINAL",0,0,"47",,terminal_output +1184,4945554,"TERMINAL",0,0,"5",,terminal_output +1185,4946614,"TERMINAL",0,0,"6",,terminal_output +1186,4947621,"TERMINAL",0,0,"7",,terminal_output +1187,4948657,"TERMINAL",0,0,"8",,terminal_output +1188,4949772,"TERMINAL",0,0,"9",,terminal_output +1189,4950736,"TERMINAL",0,0,"6:00",,terminal_output +1190,4951821,"TERMINAL",0,0,"1",,terminal_output +1191,4952846,"TERMINAL",0,0,"2",,terminal_output +1192,4953868,"TERMINAL",0,0,"3",,terminal_output +1193,4954879,"TERMINAL",0,0,"4",,terminal_output +1194,4956020,"TERMINAL",0,0,"5",,terminal_output +1195,4957043,"TERMINAL",0,0,"7",,terminal_output +1196,4958066,"TERMINAL",0,0,"8",,terminal_output +1197,4959095,"TERMINAL",0,0,"9",,terminal_output +1198,4960124,"TERMINAL",0,0,"10",,terminal_output +1199,4961117,"TERMINAL",0,0,"1",,terminal_output +1200,4962165,"TERMINAL",0,0,"2",,terminal_output +1201,4963291,"TERMINAL",0,0,"3",,terminal_output +1202,4964221,"TERMINAL",0,0,"4",,terminal_output +1203,4965337,"TERMINAL",0,0,"5",,terminal_output +1204,4966363,"TERMINAL",0,0,"6",,terminal_output +1205,4967385,"TERMINAL",0,0,"7",,terminal_output +1206,4968409,"TERMINAL",0,0,"8",,terminal_output +1207,4969440,"TERMINAL",0,0,"9",,terminal_output +1208,4970444,"TERMINAL",0,0,"20",,terminal_output +1209,4971482,"TERMINAL",0,0,"1",,terminal_output +1210,4972471,"TERMINAL",0,0,"Step 750, validation loss: 0.014559334143996239\r\n",,terminal_output +1211,4972516,"TERMINAL",0,0,"2",,terminal_output +1212,4972567,"TERMINAL",0,0,"WARNING:absl:[process=0][thread=MainThread][operation_id=3] _SignalingThread.join() waiting for signals ([, ]) blocking the main thread will slow down blocking save times. This is likely due to main thread calling result() on a CommitFuture.\r\nWARNING:absl:[process=0][thread=MainThread][operation_id=3] _SignalingThread.join() waiting for signals ([, ]) blocking the main thread will slow down blocking save times. This is likely due to main thread calling result() on a CommitFuture.\r\n",,terminal_output +1213,4972915,"TERMINAL",0,0,"Saved checkpoint at step 750\r\n",,terminal_output +1214,4973636,"TERMINAL",0,0,"3",,terminal_output +1215,4974593,"TERMINAL",0,0,"4",,terminal_output +1216,4975627,"TERMINAL",0,0,"5",,terminal_output +1217,4976665,"TERMINAL",0,0,"6",,terminal_output +1218,4977728,"TERMINAL",0,0,"7",,terminal_output +1219,4978755,"TERMINAL",0,0,"8",,terminal_output +1220,4979879,"TERMINAL",0,0,"9",,terminal_output +1221,4980814,"TERMINAL",0,0,"30",,terminal_output +1222,4981887,"TERMINAL",0,0,"1",,terminal_output +1223,4982951,"TERMINAL",0,0,"2",,terminal_output +1224,4983957,"TERMINAL",0,0,"3",,terminal_output +1225,4985003,"TERMINAL",0,0,"5",,terminal_output +1226,4985993,"TERMINAL",0,0,"6",,terminal_output +1227,4987174,"TERMINAL",0,0,"7",,terminal_output +1228,4988172,"TERMINAL",0,0,"8",,terminal_output +1229,4989197,"TERMINAL",0,0,"9",,terminal_output +1230,4990237,"TERMINAL",0,0,"40",,terminal_output +1231,4991275,"TERMINAL",0,0,"1",,terminal_output +1232,4992272,"TERMINAL",0,0,"2",,terminal_output +1233,4993292,"TERMINAL",0,0,"3",,terminal_output +1234,4994321,"TERMINAL",0,0,"4",,terminal_output +1235,4995343,"TERMINAL",0,0,"5",,terminal_output +1236,4996367,"TERMINAL",0,0,"6",,terminal_output +1237,4997493,"TERMINAL",0,0,"7",,terminal_output +1238,4998514,"TERMINAL",0,0,"8",,terminal_output +1239,4999504,"TERMINAL",0,0,"9",,terminal_output +1240,5000534,"TERMINAL",0,0,"50",,terminal_output +1241,5001545,"TERMINAL",0,0,"1",,terminal_output +1242,5002620,"TERMINAL",0,0,"2",,terminal_output +1243,5003620,"TERMINAL",0,0,"3",,terminal_output +1244,5004656,"TERMINAL",0,0,"4",,terminal_output +1245,5005729,"TERMINAL",0,0,"5",,terminal_output +1246,5006811,"TERMINAL",0,0,"6",,terminal_output +1247,5007834,"TERMINAL",0,0,"7",,terminal_output +1248,5008861,"TERMINAL",0,0,"8",,terminal_output +1249,5009881,"TERMINAL",0,0,"9",,terminal_output +1250,5010905,"TERMINAL",0,0,"7:00",,terminal_output +1251,5011929,"TERMINAL",0,0,"1",,terminal_output +1252,5012963,"TERMINAL",0,0,"3",,terminal_output +1253,5014081,"TERMINAL",0,0,"4",,terminal_output +1254,5015106,"TERMINAL",0,0,"5",,terminal_output +1255,5016130,"TERMINAL",0,0,"6",,terminal_output +1256,5017154,"TERMINAL",0,0,"7",,terminal_output +1257,5018179,"TERMINAL",0,0,"8",,terminal_output +1258,5019203,"TERMINAL",0,0,"9",,terminal_output +1259,5020223,"TERMINAL",0,0,"10",,terminal_output +1260,5021244,"TERMINAL",0,0,"1",,terminal_output +1261,5022281,"TERMINAL",0,0,"2",,terminal_output +1262,5023401,"TERMINAL",0,0,"3",,terminal_output +1263,5024425,"TERMINAL",0,0,"43",,terminal_output +1264,5025477,"TERMINAL",0,0,"54",,terminal_output +1265,5026574,"TERMINAL",0,0,"6",,terminal_output +1266,5027537,"TERMINAL",0,0,"7",,terminal_output +1267,5028543,"TERMINAL",0,0,"8",,terminal_output +1268,5029665,"TERMINAL",0,0,"9",,terminal_output +1269,5030736,"TERMINAL",0,0,"20",,terminal_output +1270,5031650,"TERMINAL",0,0,"1",,terminal_output +1271,5032717,"TERMINAL",0,0,"2",,terminal_output +1272,5033742,"TERMINAL",0,0,"3",,terminal_output +1273,5034876,"TERMINAL",0,0,"4",,terminal_output +1274,5035796,"TERMINAL",0,0,"5",,terminal_output +1275,5036832,"TERMINAL",0,0,"6",,terminal_output +1276,5037866,"TERMINAL",0,0,"7",,terminal_output +1277,5038964,"TERMINAL",0,0,"8",,terminal_output +1278,5039989,"TERMINAL",0,0,"30",,terminal_output +1279,5041027,"TERMINAL",0,0,"1",,terminal_output +1280,5042039,"TERMINAL",0,0,"2",,terminal_output +1281,5043056,"TERMINAL",0,0,"3",,terminal_output +1282,5044088,"TERMINAL",0,0,"4",,terminal_output +1283,5045211,"TERMINAL",0,0,"5",,terminal_output +1284,5046164,"TERMINAL",0,0,"6",,terminal_output +1285,5047262,"TERMINAL",0,0,"7",,terminal_output +1286,5047978,"TERMINAL",0,0,"WARNING:absl:[process=0][thread=MainThread][operation_id=4] _SignalingThread.join() waiting for signals ([, ]) blocking the main thread will slow down blocking save times. This is likely due to main thread calling result() on a CommitFuture.\r\nWARNING:absl:[process=0][thread=MainThread][operation_id=4] _SignalingThread.join() waiting for signals ([, ]) blocking the main thread will slow down blocking save times. This is likely due to main thread calling result() on a CommitFuture.\r\n",,terminal_output +1287,5048288,"TERMINAL",0,0,"8",,terminal_output +1288,5048288,"TERMINAL",0,0,"Saved checkpoint at step 1000\r\n",,terminal_output +1289,5049310,"TERMINAL",0,0,"9",,terminal_output +1290,5050333,"TERMINAL",0,0,"40",,terminal_output +1291,5050490,"TERMINAL",0,0,"WARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519264/000750 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519264/000750) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519264/000250 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519264/000250) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519264/001000 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519264/001000) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519264/000500 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519264/000500) to end with "".orbax-checkpoint-tmp"".\r\n",,terminal_output +1292,5051363,"TERMINAL",0,0,"1",,terminal_output +1293,5052481,"TERMINAL",0,0,"2",,terminal_output +1294,5053414,"TERMINAL",0,0,"3",,terminal_output +1295,5054564,"TERMINAL",0,0,"4",,terminal_output +1296,5055517,"TERMINAL",0,0,"5",,terminal_output +1297,5056586,"TERMINAL",0,0,"6",,terminal_output +1298,5057615,"TERMINAL",0,0,"77",,terminal_output +1299,5058630,"TERMINAL",0,0,"8",,terminal_output +1300,5059650,"TERMINAL",0,0,"9",,terminal_output +1301,5060684,"TERMINAL",0,0,"50",,terminal_output +1302,5061710,"TERMINAL",0,0,"1",,terminal_output +1303,5062826,"TERMINAL",0,0,"2",,terminal_output +1304,5063872,"TERMINAL",0,0,"3",,terminal_output +1305,5064823,"TERMINAL",0,0,"4",,terminal_output +1306,5065899,"TERMINAL",0,0,"5",,terminal_output +1307,5066919,"TERMINAL",0,0,"6",,terminal_output +1308,5067951,"TERMINAL",0,0,"8",,terminal_output +1309,5068976,"TERMINAL",0,0,"9",,terminal_output +1310,5070012,"TERMINAL",0,0,"8:00",,terminal_output +1311,5071049,"TERMINAL",0,0,"1",,terminal_output +1312,5072143,"TERMINAL",0,0,"2",,terminal_output +1313,5073166,"TERMINAL",0,0,"3",,terminal_output +1314,5074164,"TERMINAL",0,0,"4",,terminal_output +1315,5075200,"TERMINAL",0,0,"5",,terminal_output +1316,5076342,"TERMINAL",0,0,"6",,terminal_output +1317,5077274,"TERMINAL",0,0,"7",,terminal_output +1318,5078390,"TERMINAL",0,0,"8",,terminal_output +1319,5079417,"TERMINAL",0,0,"9",,terminal_output +1320,5080382,"TERMINAL",0,0,"10",,terminal_output +1321,5081430,"TERMINAL",0,0,"1",,terminal_output +1322,5082502,"TERMINAL",0,0,"2",,terminal_output +1323,5083533,"TERMINAL",0,0,"3",,terminal_output +1324,5084528,"TERMINAL",0,0,"4",,terminal_output +1325,5085565,"TERMINAL",0,0,"5",,terminal_output +1326,5086602,"TERMINAL",0,0,"6",,terminal_output +1327,5087637,"TERMINAL",0,0,"7",,terminal_output +1328,5087810,"TERMINAL",0,0,"WARNING:absl:[process=0][thread=MainThread][operation_id=5] _SignalingThread.join() waiting for signals ([, ]) blocking the main thread will slow down blocking save times. This is likely due to main thread calling result() on a CommitFuture.\r\nWARNING:absl:[process=0][thread=MainThread][operation_id=5] _SignalingThread.join() waiting for signals ([, ]) blocking the main thread will slow down blocking save times. This is likely due to main thread calling result() on a CommitFuture.\r\n",,terminal_output +1329,5088190,"TERMINAL",0,0,"Saved checkpoint at step 1250\r\n",,terminal_output +1330,5088734,"TERMINAL",0,0,"8",,terminal_output +1331,5089759,"TERMINAL",0,0,"9",,terminal_output +1332,5090358,"TERMINAL",0,0,"WARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519264/000750 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519264/000750) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519264/001000 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519264/001000) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519264/000500 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519264/000500) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519264/001250 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519264/001250) to end with "".orbax-checkpoint-tmp"".\r\n",,terminal_output +1333,5090754,"TERMINAL",0,0,"20",,terminal_output +1334,5091954,"TERMINAL",0,0,"1",,terminal_output +1335,5092839,"TERMINAL",0,0,"2",,terminal_output +1336,5094220,"TERMINAL",0,0,"3",,terminal_output +1337,5094978,"TERMINAL",0,0,"4",,terminal_output +1338,5096017,"TERMINAL",0,0,"5",,terminal_output +1339,5096331,"train_dynamics.py",0,0,"",python,tab +1340,5097016,"TERMINAL",0,0,"7",,terminal_output +1341,5098007,"TERMINAL",0,0,"8",,terminal_output +1342,5098878,"sample.py",0,0,"from dataclasses import dataclass\nimport time\nimport os\nimport optax\n\nimport dm_pix as pix\nimport einops\nimport jax\nimport jax.numpy as jnp\nimport flax.linen as nn\nimport numpy as np\nimport orbax.checkpoint as ocp\nfrom PIL import Image, ImageDraw\nimport tyro\nfrom flax import nnx\n\nfrom genie import Genie\nfrom utils.dataloader import get_dataloader\n\n\n@dataclass\nclass Args:\n # Experiment\n seed: int = 0\n seq_len: int = 16\n image_channels: int = 3\n image_height: int = 90\n image_width: int = 160\n data_dir: str = ""data/coinrun_episodes""\n checkpoint: str = """"\n # Sampling\n batch_size: int = 1\n maskgit_steps: int = 25\n temperature: float = 1.0\n sample_argmax: bool = True\n start_frame: int = 1\n # Tokenizer checkpoint\n tokenizer_dim: int = 512\n tokenizer_ffn_dim: int = 2048\n latent_patch_dim: int = 32\n num_patch_latents: int = 1024\n patch_size: int = 4\n tokenizer_num_blocks: int = 4\n tokenizer_num_heads: int = 8\n # LAM checkpoint\n lam_dim: int = 512\n lam_ffn_dim: int = 2048\n latent_action_dim: int = 32\n num_actions: int = 6\n lam_patch_size: int = 16\n lam_num_blocks: int = 4\n lam_num_heads: int = 8\n use_gt_actions: bool = False\n # Dynamics checkpoint\n dyna_type: str = ""maskgit""\n dyna_dim: int = 512\n dyna_ffn_dim: int = 2048\n dyna_num_blocks: int = 6\n dyna_num_heads: int = 8\n param_dtype = jnp.float32\n dtype = jnp.bfloat16\n use_flash_attention: bool = True\n\n\nargs = tyro.cli(Args)\n\nif __name__ == ""__main__"":\n """"""\n Dimension keys:\n B: batch size\n T: number of input (conditioning) frames\n N: number of patches per frame\n S: sequence length\n H: height\n W: width\n E: B * (S - 1)\n """"""\n jax.distributed.initialize()\n\n rng = jax.random.key(args.seed)\n\n # --- Load Genie checkpoint ---\n rngs = nnx.Rngs(rng)\n genie = Genie(\n # Tokenizer\n in_dim=args.image_channels,\n tokenizer_dim=args.tokenizer_dim,\n tokenizer_ffn_dim=args.tokenizer_ffn_dim,\n latent_patch_dim=args.latent_patch_dim,\n num_patch_latents=args.num_patch_latents,\n patch_size=args.patch_size,\n tokenizer_num_blocks=args.tokenizer_num_blocks,\n tokenizer_num_heads=args.tokenizer_num_heads,\n # LAM\n lam_dim=args.lam_dim,\n lam_ffn_dim=args.lam_ffn_dim,\n latent_action_dim=args.latent_action_dim,\n num_actions=args.num_actions,\n lam_patch_size=args.lam_patch_size,\n lam_num_blocks=args.lam_num_blocks,\n lam_num_heads=args.lam_num_heads,\n lam_co_train=False,\n use_gt_actions=args.use_gt_actions,\n # Dynamics\n dyna_type=args.dyna_type,\n dyna_dim=args.dyna_dim,\n dyna_ffn_dim=args.dyna_ffn_dim,\n dyna_num_blocks=args.dyna_num_blocks,\n dyna_num_heads=args.dyna_num_heads,\n param_dtype=args.param_dtype,\n dtype=args.dtype,\n use_flash_attention=args.use_flash_attention,\n # FIXME (f.srambical): implement spatiotemporal KV caching and set decode=True\n decode=False,\n rngs=rngs,\n )\n\n del genie.tokenizer.vq.drop\n # Need to delete lam decoder for checkpoint loading\n if not args.use_gt_actions:\n assert genie.lam is not None\n del genie.lam.decoder\n\n handler_registry = ocp.handlers.DefaultCheckpointHandlerRegistry()\n handler_registry.add(\n ""model_state"", ocp.args.PyTreeSave, ocp.handlers.PyTreeCheckpointHandler\n )\n handler_registry.add(\n ""model_state"", ocp.args.PyTreeRestore, ocp.handlers.PyTreeCheckpointHandler\n )\n checkpoint_options = ocp.CheckpointManagerOptions(\n step_format_fixed_length=6,\n )\n checkpoint_manager = ocp.CheckpointManager(\n args.checkpoint,\n options=checkpoint_options,\n handler_registry=handler_registry,\n )\n\n dummy_tx = optax.adamw(\n learning_rate=optax.linear_schedule(0.0001, 0.0001, 10000),\n b1=0.9,\n b2=0.9,\n weight_decay=1e-4,\n mu_dtype=args.dtype,\n )\n dummy_optimizer = nnx.ModelAndOptimizer(genie, dummy_tx)\n\n abstract_optimizer = nnx.eval_shape(lambda: dummy_optimizer)\n abstract_optimizer_state = nnx.state(abstract_optimizer)\n restored = checkpoint_manager.restore(\n checkpoint_manager.latest_step(),\n args=ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore(abstract_optimizer_state), # type: ignore\n ),\n )\n restored_optimizer_state = restored[""model_state""]\n nnx.update(dummy_optimizer, restored_optimizer_state)\n\n # --- Define sampling function ---\n def _sampling_fn(model: Genie, batch: dict) -> jax.Array:\n """"""Runs Genie.sample with pre-defined generation hyper-parameters.""""""\n assert args.dyna_type in [\n ""maskgit"",\n ""causal"",\n ], f""Invalid dynamics type: {args.dyna_type}""\n frames, _ = model.sample(\n batch,\n args.seq_len,\n args.temperature,\n args.sample_argmax,\n args.maskgit_steps,\n )\n return frames\n\n # --- Define autoregressive sampling loop ---\n def _autoreg_sample(genie, rng, batch):\n batch[""videos""] = batch[""videos""][:, : args.start_frame]\n batch[""rng""] = rng\n generated_vid_BSHWC = _sampling_fn(genie, batch)\n return generated_vid_BSHWC\n\n # --- Get video + latent actions ---\n array_record_files = [\n os.path.join(args.data_dir, x)\n for x in os.listdir(args.data_dir)\n if x.endswith("".array_record"")\n ]\n dataloader = get_dataloader(\n array_record_files,\n args.seq_len,\n args.batch_size,\n args.image_height,\n args.image_width,\n args.image_channels,\n # We don't use workers in order to avoid grain shutdown issues (https://github.com/google/grain/issues/398)\n num_workers=0,\n prefetch_buffer_size=1,\n seed=args.seed,\n )\n dataloader = iter(dataloader)\n batch = next(dataloader)\n gt_video = jnp.asarray(batch[""videos""], dtype=jnp.float32) / 255.0\n batch[""videos""] = gt_video.astype(args.dtype)\n # Get latent actions for all videos in the batch\n action_batch_E = None\n if not args.use_gt_actions:\n action_batch_E = genie.vq_encode(batch, training=False)\n batch[""latent_actions""] = action_batch_E\n\n # --- Sample + evaluate video ---\n recon_video_BSHWC = _autoreg_sample(genie, rng, batch)\n recon_video_BSHWC = recon_video_BSHWC.astype(jnp.float32)\n gt = (\n gt_video[:, : recon_video_BSHWC.shape[1]]\n .clip(0, 1)\n .reshape(-1, *gt_video.shape[2:])\n )\n recon = recon_video_BSHWC.clip(0, 1).reshape(-1, *recon_video_BSHWC.shape[2:])\n ssim = jnp.asarray(\n pix.ssim(gt[:, args.start_frame :], recon[:, args.start_frame :])\n ).mean()\n print(f""SSIM: {ssim}"")\n\n # --- Construct video ---\n true_videos = (gt_video * 255).astype(np.uint8)\n pred_videos = (recon_video_BSHWC * 255).astype(np.uint8)\n video_comparison = np.zeros((2, *recon_video_BSHWC.shape), dtype=np.uint8)\n video_comparison[0] = true_videos[:, : args.seq_len]\n video_comparison[1] = pred_videos\n frames = einops.rearrange(video_comparison, ""n b t h w c -> t (b h) (n w) c"")\n\n # --- Save video ---\n imgs = [Image.fromarray(img) for img in frames]\n # Write actions on each frame, on each row (i.e., for each video in the batch, on the GT row)\n B = batch[""videos""].shape[0]\n if action_batch_E is not None:\n action_batch_BSm11 = jnp.reshape(action_batch_E, (B, args.seq_len - 1, 1))\n else:\n action_batch_BSm11 = jnp.reshape(\n batch[""actions""][:, :-1], (B, args.seq_len - 1, 1)\n )\n for t, img in enumerate(imgs[1:]):\n d = ImageDraw.Draw(img)\n for row in range(B):\n action = action_batch_BSm11[row, t, 0]\n y_offset = row * batch[""videos""].shape[2] + 2\n d.text((2, y_offset), f""{action}"", fill=255)\n imgs[0].save(\n f""generation_{time.time()}.gif"",\n save_all=True,\n append_images=imgs[1:],\n duration=250,\n loop=0,\n )\n",python,tab +1343,5099167,"TERMINAL",0,0,"9",,terminal_output +1344,5100094,"TERMINAL",0,0,"30",,terminal_output +1345,5101147,"TERMINAL",0,0,"1",,terminal_output +1346,5102251,"TERMINAL",0,0,"2",,terminal_output +1347,5103279,"TERMINAL",0,0,"3",,terminal_output +1348,5104296,"TERMINAL",0,0,"4",,terminal_output +1349,5105324,"TERMINAL",0,0,"5",,terminal_output +1350,5106300,"TERMINAL",0,0,"6",,terminal_output +1351,5107374,"TERMINAL",0,0,"7",,terminal_output +1352,5108393,"TERMINAL",0,0,"8",,terminal_output +1353,5109420,"TERMINAL",0,0,"9",,terminal_output +1354,5110466,"TERMINAL",0,0,"40",,terminal_output +1355,5111475,"TERMINAL",0,0,"1",,terminal_output +1356,5112520,"TERMINAL",0,0,"2",,terminal_output +1357,5113546,"TERMINAL",0,0,"3",,terminal_output +1358,5114194,"sample.py",6911,0,"",python,selection_mouse +1359,5114350,"sample.py",6910,3,"}"")",python,selection_mouse +1360,5114586,"TERMINAL",0,0,"4",,terminal_output +1361,5115616,"TERMINAL",0,0,"5",,terminal_output +1362,5116652,"TERMINAL",0,0,"6",,terminal_output +1363,5118122,"TERMINAL",0,0,"7",,terminal_output +1364,5119145,"TERMINAL",0,0,"9",,terminal_output +1365,5120130,"TERMINAL",0,0,"50",,terminal_output +1366,5121182,"TERMINAL",0,0,"1",,terminal_output +1367,5122206,"TERMINAL",0,0,"2",,terminal_output +1368,5123343,"TERMINAL",0,0,"3",,terminal_output +1369,5124368,"TERMINAL",0,0,"4",,terminal_output +1370,5125326,"TERMINAL",0,0,"5",,terminal_output +1371,5126376,"TERMINAL",0,0,"6",,terminal_output +1372,5126883,"sample.py",6903,0,"",python,selection_mouse +1373,5127438,"TERMINAL",0,0,"7",,terminal_output +1374,5127491,"TERMINAL",0,0,"Calculating validation metrics...\r\n",,terminal_output +1375,5127986,"sample.py",6886,0,"",python,selection_mouse +1376,5127988,"sample.py",6885,0,"",python,selection_command +1377,5128441,"TERMINAL",0,0,"8",,terminal_output +1378,5129589,"TERMINAL",0,0,"9",,terminal_output +1379,5130527,"TERMINAL",0,0,"9:00",,terminal_output +1380,5131564,"TERMINAL",0,0,"1",,terminal_output +1381,5132427,"sample.py",6918,0,"",python,selection_mouse +1382,5132598,"TERMINAL",0,0,"2",,terminal_output +1383,5133119,"sample.py",6914,0,"",python,selection_mouse +1384,5133638,"TERMINAL",0,0,"3",,terminal_output +1385,5134679,"TERMINAL",0,0,"4",,terminal_output +1386,5135719,"TERMINAL",0,0,"5",,terminal_output +1387,5136874,"TERMINAL",0,0,"6",,terminal_output +1388,5137801,"TERMINAL",0,0,"7",,terminal_output +1389,5138909,"TERMINAL",0,0,"8",,terminal_output +1390,5139932,"TERMINAL",0,0,"9",,terminal_output +1391,5140919,"TERMINAL",0,0,"10",,terminal_output +1392,5141960,"TERMINAL",0,0,"2",,terminal_output +1393,5143005,"TERMINAL",0,0,"3",,terminal_output +1394,5144131,"TERMINAL",0,0,"4",,terminal_output +1395,5145159,"TERMINAL",0,0,"563",,terminal_output +1396,5146188,"TERMINAL",0,0,"6",,terminal_output +1397,5147201,"TERMINAL",0,0,"7",,terminal_output +1398,5148229,"TERMINAL",0,0,"8",,terminal_output +1399,5148668,"TERMINAL",0,0,"Step 1500, validation loss: 0.001575813046656549\r\n",,terminal_output +1400,5148732,"TERMINAL",0,0,"WARNING:absl:[process=0][thread=MainThread][operation_id=6] _SignalingThread.join() waiting for signals ([, ]) blocking the main thread will slow down blocking save times. This is likely due to main thread calling result() on a CommitFuture.\r\nWARNING:absl:[process=0][thread=MainThread][operation_id=6] _SignalingThread.join() waiting for signals ([, ]) blocking the main thread will slow down blocking save times. This is likely due to main thread calling result() on a CommitFuture.\r\n",,terminal_output +1401,5149159,"TERMINAL",0,0,"Saved checkpoint at step 1500\r\n",,terminal_output +1402,5149268,"TERMINAL",0,0,"9",,terminal_output +1403,5150350,"TERMINAL",0,0,"20",,terminal_output +1404,5151402,"TERMINAL",0,0,"1",,terminal_output +1405,5151403,"TERMINAL",0,0,"WARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519264/001500 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519264/001500) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519264/000750 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519264/000750) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519264/001000 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519264/001000) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519264/001250 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519264/001250) to end with "".orbax-checkpoint-tmp"".\r\n",,terminal_output +1406,5152498,"TERMINAL",0,0,"2",,terminal_output +1407,5153450,"TERMINAL",0,0,"3",,terminal_output +1408,5154473,"TERMINAL",0,0,"4",,terminal_output +1409,5155460,"TERMINAL",0,0,"5",,terminal_output +1410,5156543,"TERMINAL",0,0,"6",,terminal_output +1411,5157545,"TERMINAL",0,0,"7",,terminal_output +1412,5158575,"TERMINAL",0,0,"8",,terminal_output +1413,5159619,"TERMINAL",0,0,"9",,terminal_output +1414,5160677,"TERMINAL",0,0,"30",,terminal_output +1415,5161694,"TERMINAL",0,0,"1",,terminal_output +1416,5162769,"TERMINAL",0,0,"2",,terminal_output +1417,5163796,"TERMINAL",0,0,"3",,terminal_output +1418,5164916,"TERMINAL",0,0,"4",,terminal_output +1419,5165866,"TERMINAL",0,0,"5",,terminal_output +1420,5166964,"TERMINAL",0,0,"6",,terminal_output +1421,5167997,"TERMINAL",0,0,"7",,terminal_output +1422,5169014,"TERMINAL",0,0,"9",,terminal_output +1423,5170003,"TERMINAL",0,0,"40",,terminal_output +1424,5171061,"TERMINAL",0,0,"1",,terminal_output +1425,5172081,"TERMINAL",0,0,"2",,terminal_output +1426,5173116,"TERMINAL",0,0,"37",,terminal_output +1427,5174239,"TERMINAL",0,0,"4",,terminal_output +1428,5175262,"TERMINAL",0,0,"5",,terminal_output +1429,5176228,"TERMINAL",0,0,"6",,terminal_output +1430,5177264,"TERMINAL",0,0,"7",,terminal_output +1431,5178300,"TERMINAL",0,0,"8",,terminal_output +1432,5179357,"TERMINAL",0,0,"9",,terminal_output +1433,5180390,"TERMINAL",0,0,"50",,terminal_output +1434,5181421,"TERMINAL",0,0,"1",,terminal_output +1435,5182540,"TERMINAL",0,0,"2",,terminal_output +1436,5183555,"TERMINAL",0,0,"3",,terminal_output +1437,5184579,"TERMINAL",0,0,"4",,terminal_output +1438,5185570,"TERMINAL",0,0,"5",,terminal_output +1439,5186690,"TERMINAL",0,0,"6",,terminal_output +1440,5187647,"TERMINAL",0,0,"7",,terminal_output +1441,5188598,"TERMINAL",0,0,"WARNING:absl:[process=0][thread=MainThread][operation_id=7] _SignalingThread.join() waiting for signals ([, ]) blocking the main thread will slow down blocking save times. This is likely due to main thread calling result() on a CommitFuture.\r\nWARNING:absl:[process=0][thread=MainThread][operation_id=7] _SignalingThread.join() waiting for signals ([, ]) blocking the main thread will slow down blocking save times. This is likely due to main thread calling result() on a CommitFuture.\r\n",,terminal_output +1442,5188764,"TERMINAL",0,0,"8",,terminal_output +1443,5188934,"TERMINAL",0,0,"Saved checkpoint at step 1750\r\n",,terminal_output +1444,5189721,"TERMINAL",0,0,"9",,terminal_output +1445,5190756,"TERMINAL",0,0,"10:00",,terminal_output +1446,5191133,"TERMINAL",0,0,"WARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519264/001500 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519264/001500) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519264/001750 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519264/001750) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519264/001000 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519264/001000) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519264/001250 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519264/001250) to end with "".orbax-checkpoint-tmp"".\r\n",,terminal_output +1447,5191793,"TERMINAL",0,0,"1",,terminal_output +1448,5192878,"TERMINAL",0,0,"2",,terminal_output +1449,5193898,"TERMINAL",0,0,"3",,terminal_output +1450,5194925,"TERMINAL",0,0,"4",,terminal_output +1451,5196048,"TERMINAL",0,0,"6",,terminal_output +1452,5197072,"TERMINAL",0,0,"7",,terminal_output +1453,5198094,"TERMINAL",0,0,"8",,terminal_output +1454,5199058,"TERMINAL",0,0,"9",,terminal_output +1455,5200145,"TERMINAL",0,0,"10",,terminal_output +1456,5201169,"TERMINAL",0,0,"1",,terminal_output +1457,5202190,"TERMINAL",0,0,"241",,terminal_output +1458,5203217,"TERMINAL",0,0,"3",,terminal_output +1459,5204238,"TERMINAL",0,0,"4",,terminal_output +1460,5205270,"TERMINAL",0,0,"5",,terminal_output +1461,5206433,"TERMINAL",0,0,"6",,terminal_output +1462,5207415,"TERMINAL",0,0,"7",,terminal_output +1463,5208439,"TERMINAL",0,0,"8",,terminal_output +1464,5209461,"TERMINAL",0,0,"9",,terminal_output +1465,5210466,"TERMINAL",0,0,"20",,terminal_output +1466,5211614,"TERMINAL",0,0,"1",,terminal_output +1467,5212561,"TERMINAL",0,0,"2",,terminal_output +1468,5213628,"TERMINAL",0,0,"3",,terminal_output +1469,5214630,"TERMINAL",0,0,"4",,terminal_output +1470,5215681,"TERMINAL",0,0,"5",,terminal_output +1471,5216704,"TERMINAL",0,0,"6",,terminal_output +1472,5217744,"TERMINAL",0,0,"7",,terminal_output +1473,5218883,"TERMINAL",0,0,"8",,terminal_output +1474,5219814,"TERMINAL",0,0,"9",,terminal_output +1475,5220929,"TERMINAL",0,0,"30",,terminal_output +1476,5221989,"TERMINAL",0,0,"1",,terminal_output +1477,5222982,"TERMINAL",0,0,"2",,terminal_output +1478,5224007,"TERMINAL",0,0,"4",,terminal_output +1479,5225029,"TERMINAL",0,0,"5",,terminal_output +1480,5226053,"TERMINAL",0,0,"6",,terminal_output +1481,5227243,"TERMINAL",0,0,"7",,terminal_output +1482,5228102,"TERMINAL",0,0,"WARNING:absl:[process=0][thread=MainThread][operation_id=8] _SignalingThread.join() waiting for signals ([, ]) blocking the main thread will slow down blocking save times. This is likely due to main thread calling result() on a CommitFuture.\r\nWARNING:absl:[process=0][thread=MainThread][operation_id=8] _SignalingThread.join() waiting for signals ([, ]) blocking the main thread will slow down blocking save times. This is likely due to main thread calling result() on a CommitFuture.\r\n",,terminal_output +1483,5228112,"TERMINAL",0,0,"8",,terminal_output +1484,5228625,"TERMINAL",0,0,"Saved checkpoint at step 2000\r\n",,terminal_output +1485,5229235,"TERMINAL",0,0,"9",,terminal_output +1486,5230251,"TERMINAL",0,0,"40",,terminal_output +1487,5230263,"TERMINAL",0,0,"WARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519264/001500 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519264/001500) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519264/001750 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519264/001750) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519264/002000 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519264/002000) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519264/001250 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519264/001250) to end with "".orbax-checkpoint-tmp"".\r\n",,terminal_output +1488,5231406,"TERMINAL",0,0,"1",,terminal_output +1489,5232354,"TERMINAL",0,0,"2",,terminal_output +1490,5233436,"TERMINAL",0,0,"3",,terminal_output +1491,5234347,"TERMINAL",0,0,"4",,terminal_output +1492,5235367,"TERMINAL",0,0,"5",,terminal_output +1493,5236496,"TERMINAL",0,0,"6",,terminal_output +1494,5237554,"TERMINAL",0,0,"7",,terminal_output +1495,5238550,"TERMINAL",0,0,"8",,terminal_output +1496,5239517,"TERMINAL",0,0,"9",,terminal_output +1497,5240574,"TERMINAL",0,0,"50",,terminal_output +1498,5241619,"TERMINAL",0,0,"1",,terminal_output +1499,5242644,"TERMINAL",0,0,"2",,terminal_output +1500,5243675,"TERMINAL",0,0,"3",,terminal_output +1501,5244709,"TERMINAL",0,0,"4",,terminal_output +1502,5245752,"TERMINAL",0,0,"5",,terminal_output +1503,5246839,"TERMINAL",0,0,"6",,terminal_output +1504,5247862,"TERMINAL",0,0,"7",,terminal_output +1505,5248887,"TERMINAL",0,0,"8",,terminal_output +1506,5249910,"TERMINAL",0,0,"9",,terminal_output +1507,5251036,"TERMINAL",0,0,"1:01",,terminal_output +1508,5252061,"TERMINAL",0,0,"2",,terminal_output +1509,5253107,"TERMINAL",0,0,"33",,terminal_output +1510,5254109,"TERMINAL",0,0,"4",,terminal_output +1511,5255132,"TERMINAL",0,0,"5",,terminal_output +1512,5256155,"TERMINAL",0,0,"6",,terminal_output +1513,5257282,"TERMINAL",0,0,"7",,terminal_output +1514,5258219,"TERMINAL",0,0,"8",,terminal_output +1515,5259333,"TERMINAL",0,0,"9 9",,terminal_output +1516,5260356,"TERMINAL",0,0,"10",,terminal_output +1517,5261379,"TERMINAL",0,0,"1",,terminal_output +1518,5262469,"TERMINAL",0,0,"2",,terminal_output +1519,5263432,"TERMINAL",0,0,"3",,terminal_output +1520,5264453,"TERMINAL",0,0,"4",,terminal_output +1521,5265501,"TERMINAL",0,0,"539",,terminal_output +1522,5266603,"TERMINAL",0,0,"6",,terminal_output +1523,5267423,"TERMINAL",0,0,"Calculating validation metrics...\r\n",,terminal_output +1524,5267574,"TERMINAL",0,0,"7",,terminal_output +1525,5268651,"TERMINAL",0,0,"8",,terminal_output +1526,5269618,"TERMINAL",0,0,"9",,terminal_output +1527,5270683,"TERMINAL",0,0,"20",,terminal_output +1528,5271696,"TERMINAL",0,0,"1",,terminal_output +1529,5272734,"TERMINAL",0,0,"2",,terminal_output +1530,5273772,"TERMINAL",0,0,"3",,terminal_output +1531,5274819,"TERMINAL",0,0,"4",,terminal_output +1532,5275921,"TERMINAL",0,0,"5",,terminal_output +1533,5276900,"TERMINAL",0,0,"6",,terminal_output +1534,5277939,"TERMINAL",0,0,"7",,terminal_output +1535,5278992,"TERMINAL",0,0,"9",,terminal_output +1536,5280019,"TERMINAL",0,0,"30",,terminal_output +1537,5281144,"TERMINAL",0,0,"1",,terminal_output +1538,5282098,"TERMINAL",0,0,"2",,terminal_output +1539,5283192,"TERMINAL",0,0,"3",,terminal_output +1540,5284217,"TERMINAL",0,0,"4",,terminal_output +1541,5285214,"TERMINAL",0,0,"5",,terminal_output +1542,5286252,"TERMINAL",0,0,"6",,terminal_output +1543,5287305,"TERMINAL",0,0,"7",,terminal_output +1544,5288414,"TERMINAL",0,0,"8",,terminal_output +1545,5288425,"TERMINAL",0,0,"Step 2250, validation loss: 0.0011568127665668726\r\n",,terminal_output +1546,5288520,"TERMINAL",0,0,"WARNING:absl:[process=0][thread=MainThread][operation_id=9] _SignalingThread.join() waiting for signals ([, ]) blocking the main thread will slow down blocking save times. This is likely due to main thread calling result() on a CommitFuture.\r\nWARNING:absl:[process=0][thread=MainThread][operation_id=9] _SignalingThread.join() waiting for signals ([, ]) blocking the main thread will slow down blocking save times. This is likely due to main thread calling result() on a CommitFuture.\r\n",,terminal_output +1547,5289335,"TERMINAL",0,0,"Saved checkpoint at step 2250\r\n",,terminal_output +1548,5289387,"TERMINAL",0,0,"9",,terminal_output +1549,5290457,"TERMINAL",0,0,"40",,terminal_output +1550,5291088,"TERMINAL",0,0,"WARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519264/001500 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519264/001500) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519264/002250 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519264/002250) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519264/001750 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519264/001750) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519264/002000 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519264/002000) to end with "".orbax-checkpoint-tmp"".\r\n",,terminal_output +1551,5291490,"TERMINAL",0,0,"1",,terminal_output +1552,5292508,"TERMINAL",0,0,"2",,terminal_output +1553,5293551,"TERMINAL",0,0,"3",,terminal_output +1554,5294593,"TERMINAL",0,0,"4",,terminal_output +1555,5295663,"TERMINAL",0,0,"5",,terminal_output +1556,5296656,"TERMINAL",0,0,"611",,terminal_output +1557,5297694,"TERMINAL",0,0,"7",,terminal_output +1558,5298736,"TERMINAL",0,0,"8",,terminal_output +1559,5299774,"TERMINAL",0,0,"9",,terminal_output +1560,5300906,"TERMINAL",0,0,"50",,terminal_output +1561,5301932,"TERMINAL",0,0,"1",,terminal_output +1562,5302895,"TERMINAL",0,0,"2",,terminal_output +1563,5303982,"TERMINAL",0,0,"3",,terminal_output +1564,5304973,"TERMINAL",0,0,"5",,terminal_output +1565,5306029,"TERMINAL",0,0,"6",,terminal_output +1566,5307159,"TERMINAL",0,0,"7",,terminal_output +1567,5308118,"TERMINAL",0,0,"8",,terminal_output +1568,5309137,"TERMINAL",0,0,"9",,terminal_output +1569,5310225,"TERMINAL",0,0,"2:00",,terminal_output +1570,5311216,"TERMINAL",0,0,"1",,terminal_output +1571,5312279,"TERMINAL",0,0,"2",,terminal_output +1572,5313398,"TERMINAL",0,0,"3",,terminal_output +1573,5314424,"TERMINAL",0,0,"4",,terminal_output +1574,5315376,"TERMINAL",0,0,"5",,terminal_output +1575,5316471,"TERMINAL",0,0,"6",,terminal_output +1576,5317494,"TERMINAL",0,0,"7",,terminal_output +1577,5318521,"TERMINAL",0,0,"8",,terminal_output +1578,5319546,"TERMINAL",0,0,"9",,terminal_output +1579,5320589,"TERMINAL",0,0,"10",,terminal_output +1580,5321697,"TERMINAL",0,0,"1",,terminal_output +1581,5322721,"TERMINAL",0,0,"2",,terminal_output +1582,5323697,"TERMINAL",0,0,"3",,terminal_output +1583,5324780,"TERMINAL",0,0,"4",,terminal_output +1584,5325773,"TERMINAL",0,0,"5",,terminal_output +1585,5326812,"TERMINAL",0,0,"6",,terminal_output +1586,5327942,"TERMINAL",0,0,"7",,terminal_output +1587,5328455,"TERMINAL",0,0,"WARNING:absl:[process=0][thread=MainThread][operation_id=10] _SignalingThread.join() waiting for signals ([, ]) blocking the main thread will slow down blocking save times. This is likely due to main thread calling result() on a CommitFuture.\r\nWARNING:absl:[process=0][thread=MainThread][operation_id=10] _SignalingThread.join() waiting for signals ([, ]) blocking the main thread will slow down blocking save times. This is likely due to main thread calling result() on a CommitFuture.\r\n",,terminal_output +1588,5328971,"TERMINAL",0,0,"8",,terminal_output +1589,5329116,"TERMINAL",0,0,"Saved checkpoint at step 2500\r\n",,terminal_output +1590,5329995,"TERMINAL",0,0,"9",,terminal_output +1591,5331014,"TERMINAL",0,0,"21",,terminal_output +1592,5331067,"TERMINAL",0,0,"WARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519264/002500 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519264/002500) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519264/002250 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519264/002250) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519264/001750 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519264/001750) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519264/002000 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519264/002000) to end with "".orbax-checkpoint-tmp"".\r\n",,terminal_output +1593,5332044,"TERMINAL",0,0,"2",,terminal_output +1594,5333060,"TERMINAL",0,0,"3",,terminal_output +1595,5334084,"TERMINAL",0,0,"4",,terminal_output +1596,5335111,"TERMINAL",0,0,"5",,terminal_output +1597,5336135,"TERMINAL",0,0,"6",,terminal_output +1598,5337165,"TERMINAL",0,0,"7",,terminal_output +1599,5338200,"TERMINAL",0,0,"8",,terminal_output +1600,5339307,"TERMINAL",0,0,"9",,terminal_output +1601,5340275,"TERMINAL",0,0,"30",,terminal_output +1602,5341354,"TERMINAL",0,0,"1",,terminal_output +1603,5342378,"TERMINAL",0,0,"2",,terminal_output +1604,5343418,"TERMINAL",0,0,"3",,terminal_output +1605,5344528,"TERMINAL",0,0,"4",,terminal_output +1606,5345459,"TERMINAL",0,0,"5",,terminal_output +1607,5346494,"TERMINAL",0,0,"6",,terminal_output +1608,5347613,"TERMINAL",0,0,"7",,terminal_output +1609,5348565,"TERMINAL",0,0,"8",,terminal_output +1610,5349663,"TERMINAL",0,0,"9",,terminal_output +1611,5350781,"TERMINAL",0,0,"40",,terminal_output +1612,5351828,"TERMINAL",0,0,"1",,terminal_output +1613,5352722,"TERMINAL",0,0,"2",,terminal_output +1614,5353751,"TERMINAL",0,0,"3",,terminal_output +1615,5354791,"TERMINAL",0,0,"4",,terminal_output +1616,5355835,"TERMINAL",0,0,"5",,terminal_output +1617,5356918,"TERMINAL",0,0,"6",,terminal_output +1618,5357942,"TERMINAL",0,0,"7",,terminal_output +1619,5358941,"TERMINAL",0,0,"9",,terminal_output +1620,5359978,"TERMINAL",0,0,"50",,terminal_output +1621,5361120,"TERMINAL",0,0,"1",,terminal_output +1622,5362097,"TERMINAL",0,0,"2",,terminal_output +1623,5363167,"TERMINAL",0,0,"3",,terminal_output +1624,5364191,"TERMINAL",0,0,"4",,terminal_output +1625,5365161,"TERMINAL",0,0,"5",,terminal_output +1626,5366196,"TERMINAL",0,0,"6",,terminal_output +1627,5367263,"TERMINAL",0,0,"7",,terminal_output +1628,5368332,"TERMINAL",0,0,"8",,terminal_output +1629,5368431,"TERMINAL",0,0,"WARNING:absl:[process=0][thread=MainThread][operation_id=11] _SignalingThread.join() waiting for signals ([, ]) blocking the main thread will slow down blocking save times. This is likely due to main thread calling result() on a CommitFuture.\r\nWARNING:absl:[process=0][thread=MainThread][operation_id=11] _SignalingThread.join() waiting for signals ([, ]) blocking the main thread will slow down blocking save times. This is likely due to main thread calling result() on a CommitFuture.\r\n",,terminal_output +1630,5369107,"TERMINAL",0,0,"Saved checkpoint at step 2750\r\n",,terminal_output +1631,5369307,"TERMINAL",0,0,"9",,terminal_output +1632,5370349,"TERMINAL",0,0,"3:00",,terminal_output +1633,5370847,"TERMINAL",0,0,"WARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519264/002500 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519264/002500) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519264/002250 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519264/002250) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519264/002750 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519264/002750) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519264/002000 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519264/002000) to end with "".orbax-checkpoint-tmp"".\r\n",,terminal_output +1634,5371461,"TERMINAL",0,0,"1",,terminal_output +1635,5372487,"TERMINAL",0,0,"2",,terminal_output +1636,5373515,"TERMINAL",0,0,"3",,terminal_output +1637,5374533,"TERMINAL",0,0,"4",,terminal_output +1638,5375529,"TERMINAL",0,0,"5",,terminal_output +1639,5376600,"TERMINAL",0,0,"6",,terminal_output +1640,5377606,"TERMINAL",0,0,"7",,terminal_output +1641,5378737,"TERMINAL",0,0,"8",,terminal_output +1642,5379757,"TERMINAL",0,0,"9",,terminal_output +1643,5380724,"TERMINAL",0,0,"10",,terminal_output +1644,5381762,"TERMINAL",0,0,"1",,terminal_output +1645,5382799,"TERMINAL",0,0,"2",,terminal_output +1646,5383850,"TERMINAL",0,0,"3",,terminal_output +1647,5385078,"TERMINAL",0,0,"42",,terminal_output +1648,5386104,"TERMINAL",0,0,"6",,terminal_output +1649,5387131,"TERMINAL",0,0,"7",,terminal_output +1650,5388256,"TERMINAL",0,0,"8",,terminal_output +1651,5389276,"TERMINAL",0,0,"9",,terminal_output +1652,5390303,"TERMINAL",0,0,"20",,terminal_output +1653,5391349,"TERMINAL",0,0,"1",,terminal_output +1654,5392349,"TERMINAL",0,0,"2",,terminal_output +1655,5393374,"TERMINAL",0,0,"3",,terminal_output +1656,5394399,"TERMINAL",0,0,"4",,terminal_output +1657,5395457,"TERMINAL",0,0,"5",,terminal_output +1658,5396548,"TERMINAL",0,0,"6",,terminal_output +1659,5397573,"TERMINAL",0,0,"7",,terminal_output +1660,5398576,"TERMINAL",0,0,"8",,terminal_output +1661,5399664,"TERMINAL",0,0,"9",,terminal_output +1662,5400653,"TERMINAL",0,0,"30",,terminal_output +1663,5401668,"TERMINAL",0,0,"1",,terminal_output +1664,5402695,"TERMINAL",0,0,"2",,terminal_output +1665,5403773,"TERMINAL",0,0,"3",,terminal_output +1666,5404757,"TERMINAL",0,0,"4",,terminal_output +1667,5405865,"TERMINAL",0,0,"5",,terminal_output +1668,5406892,"TERMINAL",0,0,"6",,terminal_output +1669,5407914,"TERMINAL",0,0,"7",,terminal_output +1670,5408036,"TERMINAL",0,0,"Calculating validation metrics...\r\n",,terminal_output +1671,5408942,"TERMINAL",0,0,"8",,terminal_output +1672,5410066,"TERMINAL",0,0,"40",,terminal_output +1673,5411088,"TERMINAL",0,0,"1",,terminal_output +1674,5412114,"TERMINAL",0,0,"2",,terminal_output +1675,5413138,"TERMINAL",0,0,"3",,terminal_output +1676,5414106,"TERMINAL",0,0,"4",,terminal_output +1677,5415194,"TERMINAL",0,0,"5",,terminal_output +1678,5416252,"TERMINAL",0,0,"6",,terminal_output +1679,5417254,"TERMINAL",0,0,"7",,terminal_output +1680,5418253,"TERMINAL",0,0,"8",,terminal_output +1681,5419394,"TERMINAL",0,0,"9",,terminal_output +1682,5420349,"TERMINAL",0,0,"50",,terminal_output +1683,5421466,"TERMINAL",0,0,"1",,terminal_output +1684,5422458,"TERMINAL",0,0,"2",,terminal_output +1685,5423481,"TERMINAL",0,0,"3",,terminal_output +1686,5424504,"TERMINAL",0,0,"4",,terminal_output +1687,5425523,"TERMINAL",0,0,"5",,terminal_output +1688,5426558,"TERMINAL",0,0,"6",,terminal_output +1689,5427678,"TERMINAL",0,0,"7",,terminal_output +1690,5428702,"TERMINAL",0,0,"8",,terminal_output +1691,5429107,"TERMINAL",0,0,"Step 3000, validation loss: 0.001801744569092989\r\nWARNING:absl:[process=0][thread=MainThread][operation_id=12] _SignalingThread.join() waiting for signals ([, ]) blocking the main thread will slow down blocking save times. This is likely due to main thread calling result() on a CommitFuture.\r\nWARNING:absl:[process=0][thread=MainThread][operation_id=12] _SignalingThread.join() waiting for signals ([, ]) blocking the main thread will slow down blocking save times. This is likely due to main thread calling result() on a CommitFuture.\r\n",,terminal_output +1692,5429418,"TERMINAL",0,0,"Saved checkpoint at step 3000\r\n",,terminal_output +1693,5429725,"TERMINAL",0,0,"9",,terminal_output +1694,5430752,"TERMINAL",0,0,"4:00",,terminal_output +1695,5431672,"TERMINAL",0,0,"WARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519264/002500 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519264/002500) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519264/002250 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519264/002250) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519264/002750 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519264/002750) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519264/003000 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519264/003000) to end with "".orbax-checkpoint-tmp"".\r\n",,terminal_output +1696,5431845,"TERMINAL",0,0,"1",,terminal_output +1697,5432790,"TERMINAL",0,0,"2",,terminal_output +1698,5433925,"TERMINAL",0,0,"3",,terminal_output +1699,5434866,"TERMINAL",0,0,"4",,terminal_output +1700,5435972,"TERMINAL",0,0,"5",,terminal_output +1701,5436938,"TERMINAL",0,0,"7",,terminal_output +1702,5438022,"TERMINAL",0,0,"8",,terminal_output +1703,5439048,"TERMINAL",0,0,"9",,terminal_output +1704,5440068,"TERMINAL",0,0,"10",,terminal_output +1705,5441191,"TERMINAL",0,0,"1",,terminal_output +1706,5442224,"TERMINAL",0,0,"2",,terminal_output +1707,5443246,"TERMINAL",0,0,"3",,terminal_output +1708,5444199,"TERMINAL",0,0,"4",,terminal_output +1709,5445301,"TERMINAL",0,0,"5",,terminal_output +1710,5446274,"TERMINAL",0,0,"6",,terminal_output +1711,5447339,"TERMINAL",0,0,"7",,terminal_output +1712,5448368,"TERMINAL",0,0,"8",,terminal_output +1713,5449373,"TERMINAL",0,0,"9",,terminal_output +1714,5450458,"TERMINAL",0,0,"20",,terminal_output +1715,5451542,"TERMINAL",0,0,"1",,terminal_output +1716,5452562,"TERMINAL",0,0,"2",,terminal_output +1717,5453595,"TERMINAL",0,0,"3",,terminal_output +1718,5454610,"TERMINAL",0,0,"4",,terminal_output +1719,5455632,"TERMINAL",0,0,"5",,terminal_output +1720,5456634,"TERMINAL",0,0,"6",,terminal_output +1721,5457709,"TERMINAL",0,0,"7",,terminal_output +1722,5458709,"TERMINAL",0,0,"8",,terminal_output +1723,5459747,"TERMINAL",0,0,"9",,terminal_output +1724,5460789,"TERMINAL",0,0,"30",,terminal_output +1725,5461827,"TERMINAL",0,0,"1",,terminal_output +1726,5462863,"TERMINAL",0,0,"2",,terminal_output +1727,5463929,"TERMINAL",0,0,"3",,terminal_output +1728,5464953,"TERMINAL",0,0,"5",,terminal_output +1729,5466080,"TERMINAL",0,0,"6",,terminal_output +1730,5467105,"TERMINAL",0,0,"7",,terminal_output +1731,5468134,"TERMINAL",0,0,"8",,terminal_output +1732,5468433,"TERMINAL",0,0,"WARNING:absl:[process=0][thread=MainThread][operation_id=13] _SignalingThread.join() waiting for signals ([, ]) blocking the main thread will slow down blocking save times. This is likely due to main thread calling result() on a CommitFuture.\r\nWARNING:absl:[process=0][thread=MainThread][operation_id=13] _SignalingThread.join() waiting for signals ([, ]) blocking the main thread will slow down blocking save times. This is likely due to main thread calling result() on a CommitFuture.\r\n",,terminal_output +1733,5468742,"TERMINAL",0,0,"Saved checkpoint at step 3250\r\n",,terminal_output +1734,5469152,"TERMINAL",0,0,"9",,terminal_output +1735,5470180,"TERMINAL",0,0,"40",,terminal_output +1736,5470996,"TERMINAL",0,0,"WARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519264/002500 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519264/002500) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519264/003250 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519264/003250) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519264/002750 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519264/002750) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519264/003000 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519264/003000) to end with "".orbax-checkpoint-tmp"".\r\n",,terminal_output +1737,5471159,"TERMINAL",0,0,"1",,terminal_output +1738,5472226,"TERMINAL",0,0,"2",,terminal_output +1739,5473246,"TERMINAL",0,0,"3",,terminal_output +1740,5474267,"TERMINAL",0,0,"4",,terminal_output +1741,5475390,"TERMINAL",0,0,"5",,terminal_output +1742,5476341,"TERMINAL",0,0,"6",,terminal_output +1743,5477447,"TERMINAL",0,0,"7",,terminal_output +1744,5478471,"TERMINAL",0,0,"8",,terminal_output +1745,5479487,"TERMINAL",0,0,"9",,terminal_output +1746,5480509,"TERMINAL",0,0,"50",,terminal_output +1747,5481642,"TERMINAL",0,0,"1",,terminal_output +1748,5482689,"TERMINAL",0,0,"2",,terminal_output +1749,5483624,"TERMINAL",0,0,"3",,terminal_output +1750,5484718,"TERMINAL",0,0,"4",,terminal_output +1751,5485740,"TERMINAL",0,0,"5",,terminal_output +1752,5486766,"TERMINAL",0,0,"6",,terminal_output +1753,5487772,"TERMINAL",0,0,"7",,terminal_output +1754,5488836,"TERMINAL",0,0,"8",,terminal_output +1755,5489863,"TERMINAL",0,0,"9",,terminal_output +1756,5490963,"TERMINAL",0,0,"5:00",,terminal_output +1757,5491978,"TERMINAL",0,0,"1",,terminal_output +1758,5493010,"TERMINAL",0,0,"3",,terminal_output +1759,5494058,"TERMINAL",0,0,"4",,terminal_output +1760,5495060,"TERMINAL",0,0,"5",,terminal_output +1761,5496085,"TERMINAL",0,0,"6",,terminal_output +1762,5497208,"TERMINAL",0,0,"7",,terminal_output +1763,5498145,"TERMINAL",0,0,"8",,terminal_output +1764,5499257,"TERMINAL",0,0,"9",,terminal_output +1765,5500286,"TERMINAL",0,0,"10",,terminal_output +1766,5501259,"TERMINAL",0,0,"1",,terminal_output +1767,5502332,"TERMINAL",0,0,"2",,terminal_output +1768,5503355,"TERMINAL",0,0,"3",,terminal_output +1769,5504371,"TERMINAL",0,0,"4",,terminal_output +1770,5505408,"TERMINAL",0,0,"5",,terminal_output +1771,5506529,"TERMINAL",0,0,"6",,terminal_output +1772,5507480,"TERMINAL",0,0,"7",,terminal_output +1773,5507700,"TERMINAL",0,0,"WARNING:absl:[process=0][thread=MainThread][operation_id=14] _SignalingThread.join() waiting for signals ([, ]) blocking the main thread will slow down blocking save times. This is likely due to main thread calling result() on a CommitFuture.\r\nWARNING:absl:[process=0][thread=MainThread][operation_id=14] _SignalingThread.join() waiting for signals ([, ]) blocking the main thread will slow down blocking save times. This is likely due to main thread calling result() on a CommitFuture.\r\n",,terminal_output +1774,5508166,"TERMINAL",0,0,"Saved checkpoint at step 3500\r\n",,terminal_output +1775,5508580,"TERMINAL",0,0,"8",,terminal_output +1776,5509555,"TERMINAL",0,0,"9",,terminal_output +1777,5510363,"TERMINAL",0,0,"WARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519264/003500 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519264/003500) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519264/003250 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519264/003250) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519264/002750 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519264/002750) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519264/003000 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519264/003000) to end with "".orbax-checkpoint-tmp"".\r\n",,terminal_output +1778,5510590,"TERMINAL",0,0,"20",,terminal_output +1779,5511650,"TERMINAL",0,0,"1",,terminal_output +1780,5512675,"TERMINAL",0,0,"2",,terminal_output +1781,5513870,"TERMINAL",0,0,"3",,terminal_output +1782,5514754,"TERMINAL",0,0,"4",,terminal_output +1783,5515794,"TERMINAL",0,0,"5",,terminal_output +1784,5516834,"TERMINAL",0,0,"6",,terminal_output +1785,5517885,"TERMINAL",0,0,"7",,terminal_output +1786,5518918,"TERMINAL",0,0,"8",,terminal_output +1787,5519943,"TERMINAL",0,0,"30",,terminal_output +1788,5520982,"TERMINAL",0,0,"1",,terminal_output +1789,5522032,"TERMINAL",0,0,"2",,terminal_output +1790,5523193,"TERMINAL",0,0,"3",,terminal_output +1791,5524140,"TERMINAL",0,0,"4",,terminal_output +1792,5525168,"TERMINAL",0,0,"5",,terminal_output +1793,5526189,"TERMINAL",0,0,"6",,terminal_output +1794,5527317,"TERMINAL",0,0,"7",,terminal_output +1795,5528339,"TERMINAL",0,0,"8",,terminal_output +1796,5529364,"TERMINAL",0,0,"9",,terminal_output +1797,5530348,"TERMINAL",0,0,"40",,terminal_output +1798,5531413,"TERMINAL",0,0,"1",,terminal_output +1799,5532434,"TERMINAL",0,0,"2",,terminal_output +1800,5533561,"TERMINAL",0,0,"3",,terminal_output +1801,5534562,"TERMINAL",0,0,"4",,terminal_output +1802,5535529,"TERMINAL",0,0,"5",,terminal_output +1803,5536669,"TERMINAL",0,0,"6",,terminal_output +1804,5537602,"TERMINAL",0,0,"7",,terminal_output +1805,5538682,"TERMINAL",0,0,"8",,terminal_output +1806,5539716,"TERMINAL",0,0,"9",,terminal_output +1807,5540730,"TERMINAL",0,0,"50",,terminal_output +1808,5541756,"TERMINAL",0,0,"1",,terminal_output +1809,5542794,"TERMINAL",0,0,"2",,terminal_output +1810,5543832,"TERMINAL",0,0,"3",,terminal_output +1811,5544869,"TERMINAL",0,0,"4",,terminal_output +1812,5545963,"TERMINAL",0,0,"5",,terminal_output +1813,5546848,"TERMINAL",0,0,"Calculating validation metrics...\r\n",,terminal_output +1814,5546954,"TERMINAL",0,0,"7",,terminal_output +1815,5547998,"TERMINAL",0,0,"8",,terminal_output +1816,5549024,"TERMINAL",0,0,"9",,terminal_output +1817,5550149,"TERMINAL",0,0,"6:00",,terminal_output +1818,5551176,"TERMINAL",0,0,"1",,terminal_output +1819,5552218,"TERMINAL",0,0,"2",,terminal_output +1820,5553226,"TERMINAL",0,0,"3",,terminal_output +1821,5554197,"TERMINAL",0,0,"4",,terminal_output +1822,5555277,"TERMINAL",0,0,"5",,terminal_output +1823,5556273,"TERMINAL",0,0,"6",,terminal_output +1824,5557422,"TERMINAL",0,0,"7",,terminal_output +1825,5558452,"TERMINAL",0,0,"8",,terminal_output +1826,5559468,"TERMINAL",0,0,"9",,terminal_output +1827,5560461,"TERMINAL",0,0,"10",,terminal_output +1828,5561461,"TERMINAL",0,0,"1",,terminal_output +1829,5562593,"TERMINAL",0,0,"2",,terminal_output +1830,5563563,"TERMINAL",0,0,"3",,terminal_output +1831,5564566,"TERMINAL",0,0,"4",,terminal_output +1832,5565604,"TERMINAL",0,0,"5",,terminal_output +1833,5566653,"TERMINAL",0,0,"6",,terminal_output +1834,5567763,"TERMINAL",0,0,"7",,terminal_output +1835,5568082,"TERMINAL",0,0,"Step 3750, validation loss: 0.0004983035032637417\r\nWARNING:absl:[process=0][thread=MainThread][operation_id=15] _SignalingThread.join() waiting for signals ([, ]) blocking the main thread will slow down blocking save times. This is likely due to main thread calling result() on a CommitFuture.\r\nWARNING:absl:[process=0][thread=MainThread][operation_id=15] _SignalingThread.join() waiting for signals ([, ]) blocking the main thread will slow down blocking save times. This is likely due to main thread calling result() on a CommitFuture.\r\n",,terminal_output +1836,5568481,"TERMINAL",0,0,"Saved checkpoint at step 3750\r\n",,terminal_output +1837,5568726,"TERMINAL",0,0,"8",,terminal_output +1838,5569765,"TERMINAL",0,0,"9",,terminal_output +1839,5570325,"TERMINAL",0,0,"WARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519264/003500 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519264/003500) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519264/003250 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519264/003250) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519264/003000 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519264/003000) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519264/003750 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519264/003750) to end with "".orbax-checkpoint-tmp"".\r\n",,terminal_output +1840,5570802,"TERMINAL",0,0,"20",,terminal_output +1841,5571839,"TERMINAL",0,0,"1",,terminal_output +1842,5572875,"TERMINAL",0,0,"2",,terminal_output +1843,5573912,"TERMINAL",0,0,"3",,terminal_output +1844,5575033,"TERMINAL",0,0,"5",,terminal_output +1845,5576053,"TERMINAL",0,0,"6",,terminal_output +1846,5577085,"TERMINAL",0,0,"7",,terminal_output +1847,5578112,"TERMINAL",0,0,"8",,terminal_output +1848,5579099,"TERMINAL",0,0,"9",,terminal_output +1849,5580139,"TERMINAL",0,0,"30",,terminal_output +1850,5580864,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu.sh",0,0,"",shellscript,tab +1851,5581207,"TERMINAL",0,0,"1",,terminal_output +1852,5582330,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu_gt_actions.sh",0,0,"",shellscript,tab +1853,5582455,"TERMINAL",0,0,"2",,terminal_output +1854,5583254,"TERMINAL",0,0,"3",,terminal_output +1855,5584294,"TERMINAL",0,0,"4",,terminal_output +1856,5585326,"TERMINAL",0,0,"5",,terminal_output +1857,5586399,"TERMINAL",0,0,"6",,terminal_output +1858,5587425,"TERMINAL",0,0,"7",,terminal_output +1859,5588451,"TERMINAL",0,0,"8",,terminal_output +1860,5589575,"TERMINAL",0,0,"9",,terminal_output +1861,5590546,"TERMINAL",0,0,"40",,terminal_output +1862,5591624,"TERMINAL",0,0,"1",,terminal_output +1863,5592648,"TERMINAL",0,0,"2",,terminal_output +1864,5593621,"TERMINAL",0,0,"3",,terminal_output +1865,5594716,"TERMINAL",0,0,"4",,terminal_output +1866,5595721,"TERMINAL",0,0,"5",,terminal_output +1867,5596774,"TERMINAL",0,0,"6",,terminal_output +1868,5597767,"TERMINAL",0,0,"7",,terminal_output +1869,5598798,"TERMINAL",0,0,"8",,terminal_output +1870,5599853,"TERMINAL",0,0,"9",,terminal_output +1871,5600941,"TERMINAL",0,0,"50",,terminal_output +1872,5601915,"TERMINAL",0,0,"1",,terminal_output +1873,5602952,"TERMINAL",0,0,"3",,terminal_output +1874,5604014,"TERMINAL",0,0,"4",,terminal_output +1875,5605043,"TERMINAL",0,0,"5",,terminal_output +1876,5606181,"TERMINAL",0,0,"6",,terminal_output +1877,5607256,"TERMINAL",0,0,"7",,terminal_output +1878,5608014,"TERMINAL",0,0,"WARNING:absl:[process=0][thread=MainThread][operation_id=16] _SignalingThread.join() waiting for signals ([, ]) blocking the main thread will slow down blocking save times. This is likely due to main thread calling result() on a CommitFuture.\r\nWARNING:absl:[process=0][thread=MainThread][operation_id=16] _SignalingThread.join() waiting for signals ([, ]) blocking the main thread will slow down blocking save times. This is likely due to main thread calling result() on a CommitFuture.\r\n",,terminal_output +1879,5608125,"TERMINAL",0,0,"8",,terminal_output +1880,5608319,"TERMINAL",0,0,"Saved checkpoint at step 4000\r\n",,terminal_output +1881,5609161,"TERMINAL",0,0,"9",,terminal_output +1882,5610264,"TERMINAL",0,0,"7:00",,terminal_output +1883,5610564,"TERMINAL",0,0,"WARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519264/003500 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519264/003500) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519264/004000 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519264/004000) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519264/003250 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519264/003250) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519264/003750 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519264/003750) to end with "".orbax-checkpoint-tmp"".\r\n",,terminal_output +1884,5611233,"TERMINAL",0,0,"1",,terminal_output +1885,5612306,"TERMINAL",0,0,"2",,terminal_output +1886,5613333,"TERMINAL",0,0,"3",,terminal_output +1887,5614374,"TERMINAL",0,0,"4",,terminal_output +1888,5615378,"TERMINAL",0,0,"5",,terminal_output +1889,5616514,"TERMINAL",0,0,"6",,terminal_output +1890,5617528,"TERMINAL",0,0,"7",,terminal_output +1891,5618554,"TERMINAL",0,0,"8",,terminal_output +1892,5619580,"TERMINAL",0,0,"9",,terminal_output +1893,5620569,"TERMINAL",0,0,"10",,terminal_output +1894,5621626,"TERMINAL",0,0,"1",,terminal_output +1895,5622751,"TERMINAL",0,0,"2",,terminal_output +1896,5623778,"TERMINAL",0,0,"3",,terminal_output +1897,5624739,"TERMINAL",0,0,"4",,terminal_output +1898,5625757,"TERMINAL",0,0,"5",,terminal_output +1899,5626795,"TERMINAL",0,0,"62",,terminal_output +1900,5627829,"TERMINAL",0,0,"7",,terminal_output +1901,5628897,"TERMINAL",0,0,"8",,terminal_output +1902,5629904,"TERMINAL",0,0,"9",,terminal_output +1903,5630939,"TERMINAL",0,0,"21",,terminal_output +1904,5632072,"TERMINAL",0,0,"2",,terminal_output +1905,5633095,"TERMINAL",0,0,"3",,terminal_output +1906,5634118,"TERMINAL",0,0,"4",,terminal_output +1907,5635166,"TERMINAL",0,0,"5",,terminal_output +1908,5636206,"TERMINAL",0,0,"6",,terminal_output +1909,5637192,"TERMINAL",0,0,"7",,terminal_output +1910,5638318,"TERMINAL",0,0,"8",,terminal_output +1911,5639343,"TERMINAL",0,0,"91",,terminal_output +1912,5640293,"TERMINAL",0,0,"30",,terminal_output +1913,5641335,"TERMINAL",0,0,"1",,terminal_output +1914,5642363,"TERMINAL",0,0,"2",,terminal_output +1915,5643441,"TERMINAL",0,0,"3",,terminal_output +1916,5644464,"TERMINAL",0,0,"4",,terminal_output +1917,5645472,"TERMINAL",0,0,"5",,terminal_output +1918,5646612,"TERMINAL",0,0,"6",,terminal_output +1919,5647538,"TERMINAL",0,0,"WARNING:absl:[process=0][thread=MainThread][operation_id=17] _SignalingThread.join() waiting for signals ([, ]) blocking the main thread will slow down blocking save times. This is likely due to main thread calling result() on a CommitFuture.\r\nWARNING:absl:[process=0][thread=MainThread][operation_id=17] _SignalingThread.join() waiting for signals ([, ]) blocking the main thread will slow down blocking save times. This is likely due to main thread calling result() on a CommitFuture.\r\n",,terminal_output +1920,5647550,"TERMINAL",0,0,"7",,terminal_output +1921,5647843,"TERMINAL",0,0,"Saved checkpoint at step 4250\r\n",,terminal_output +1922,5648662,"TERMINAL",0,0,"8",,terminal_output +1923,5649685,"TERMINAL",0,0,"9",,terminal_output +1924,5650102,"TERMINAL",0,0,"WARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519264/003500 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519264/003500) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519264/004000 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519264/004000) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519264/004250 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519264/004250) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519264/003750 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519264/003750) to end with "".orbax-checkpoint-tmp"".\r\n",,terminal_output +1925,5650754,"TERMINAL",0,0,"40",,terminal_output +1926,5651743,"TERMINAL",0,0,"1",,terminal_output +1927,5652755,"TERMINAL",0,0,"2",,terminal_output +1928,5653782,"TERMINAL",0,0,"3",,terminal_output +1929,5654911,"TERMINAL",0,0,"4",,terminal_output +1930,5655916,"TERMINAL",0,0,"5",,terminal_output +1931,5656886,"TERMINAL",0,0,"6",,terminal_output +1932,5657920,"TERMINAL",0,0,"7",,terminal_output +1933,5659012,"TERMINAL",0,0,"9",,terminal_output +1934,5659999,"TERMINAL",0,0,"50",,terminal_output +1935,5661049,"TERMINAL",0,0,"1",,terminal_output +1936,5662121,"TERMINAL",0,0,"2",,terminal_output +1937,5663204,"TERMINAL",0,0,"3",,terminal_output +1938,5664227,"TERMINAL",0,0,"4",,terminal_output +1939,5665196,"TERMINAL",0,0,"5",,terminal_output +1940,5666232,"TERMINAL",0,0,"6",,terminal_output +1941,5667298,"TERMINAL",0,0,"7",,terminal_output +1942,5668322,"TERMINAL",0,0,"8",,terminal_output +1943,5669351,"TERMINAL",0,0,"9",,terminal_output +1944,5670389,"TERMINAL",0,0,"8:00",,terminal_output +1945,5671456,"TERMINAL",0,0,"1",,terminal_output +1946,5672519,"TERMINAL",0,0,"2",,terminal_output +1947,5673508,"TERMINAL",0,0,"3",,terminal_output +1948,5674549,"TERMINAL",0,0,"4",,terminal_output +1949,5675692,"TERMINAL",0,0,"5",,terminal_output +1950,5676719,"TERMINAL",0,0,"6",,terminal_output +1951,5677753,"TERMINAL",0,0,"7",,terminal_output +1952,5678766,"TERMINAL",0,0,"8",,terminal_output +1953,5679791,"TERMINAL",0,0,"9",,terminal_output +1954,5680790,"TERMINAL",0,0,"10",,terminal_output +1955,5681842,"TERMINAL",0,0,"1",,terminal_output +1956,5682873,"TERMINAL",0,0,"2",,terminal_output +1957,5683906,"TERMINAL",0,0,"3",,terminal_output +1958,5685071,"TERMINAL",0,0,"51",,terminal_output +1959,5686028,"TERMINAL",0,0,"6",,terminal_output +1960,5687169,"TERMINAL",0,0,"7",,terminal_output +1961,5688186,"TERMINAL",0,0,"8",,terminal_output +1962,5689212,"TERMINAL",0,0,"9",,terminal_output +1963,5690235,"TERMINAL",0,0,"20",,terminal_output +1964,5691233,"TERMINAL",0,0,"1",,terminal_output +1965,5692309,"TERMINAL",0,0,"2",,terminal_output +1966,5693414,"TERMINAL",0,0,"3",,terminal_output +1967,5694356,"TERMINAL",0,0,"4",,terminal_output +1968,5695397,"TERMINAL",0,0,"5",,terminal_output +1969,5696482,"TERMINAL",0,0,"6",,terminal_output +1970,5697505,"TERMINAL",0,0,"7",,terminal_output +1971,5698516,"TERMINAL",0,0,"8",,terminal_output +1972,5699660,"TERMINAL",0,0,"9",,terminal_output +1973,5700679,"TERMINAL",0,0,"30",,terminal_output +1974,5701637,"TERMINAL",0,0,"1",,terminal_output +1975,5702728,"TERMINAL",0,0,"2",,terminal_output +1976,5703789,"TERMINAL",0,0,"3",,terminal_output +1977,5704776,"TERMINAL",0,0,"4",,terminal_output +1978,5705806,"TERMINAL",0,0,"5",,terminal_output +1979,5706844,"TERMINAL",0,0,"6",,terminal_output +1980,5707881,"TERMINAL",0,0,"7",,terminal_output +1981,5708923,"TERMINAL",0,0,"8",,terminal_output +1982,5710000,"TERMINAL",0,0,"40",,terminal_output +1983,5711023,"TERMINAL",0,0,"1",,terminal_output +1984,5712048,"TERMINAL",0,0,"2",,terminal_output +1985,5713176,"TERMINAL",0,0,"3",,terminal_output +1986,5714197,"TERMINAL",0,0,"4",,terminal_output +1987,5715221,"TERMINAL",0,0,"5",,terminal_output +1988,5716202,"TERMINAL",0,0,"6",,terminal_output +1989,5717270,"TERMINAL",0,0,"7",,terminal_output +1990,5718278,"TERMINAL",0,0,"8",,terminal_output +1991,5719534,"TERMINAL",0,0,"9",,terminal_output +1992,5720367,"TERMINAL",0,0,"50",,terminal_output +1993,5721479,"TERMINAL",0,0,"1",,terminal_output +1994,5722525,"TERMINAL",0,0,"2",,terminal_output +1995,5723516,"TERMINAL",0,0,"3",,terminal_output +1996,5724238,"TERMINAL",0,0,"Calculating validation metrics...\r\n",,terminal_output +1997,5724522,"TERMINAL",0,0,"4",,terminal_output +1998,5725568,"TERMINAL",0,0,"5",,terminal_output +1999,5726603,"TERMINAL",0,0,"6",,terminal_output +2000,5727714,"TERMINAL",0,0,"7",,terminal_output +2001,5728743,"TERMINAL",0,0,"8",,terminal_output +2002,5729764,"TERMINAL",0,0,"9",,terminal_output +2003,5730765,"TERMINAL",0,0,"9:00",,terminal_output +2004,5731819,"TERMINAL",0,0,"1",,terminal_output +2005,5732843,"TERMINAL",0,0,"2",,terminal_output +2006,5733887,"TERMINAL",0,0,"3",,terminal_output +2007,5734928,"TERMINAL",0,0,"4",,terminal_output +2008,5736009,"TERMINAL",0,0,"6",,terminal_output +2009,5737009,"TERMINAL",0,0,"7",,terminal_output +2010,5738060,"TERMINAL",0,0,"8",,terminal_output +2011,5739084,"TERMINAL",0,0,"9",,terminal_output +2012,5740207,"TERMINAL",0,0,"10",,terminal_output +2013,5741231,"TERMINAL",0,0,"1",,terminal_output +2014,5742198,"TERMINAL",0,0,"2",,terminal_output +2015,5743232,"TERMINAL",0,0,"3",,terminal_output +2016,5744303,"TERMINAL",0,0,"4",,terminal_output +2017,5745329,"TERMINAL",0,0,"5",,terminal_output +2018,5745383,"TERMINAL",0,0,"Step 4500, validation loss: 0.0002722639183048159\r\n",,terminal_output +2019,5745468,"TERMINAL",0,0,"WARNING:absl:[process=0][thread=MainThread][operation_id=18] _SignalingThread.join() waiting for signals ([, ]) blocking the main thread will slow down blocking save times. This is likely due to main thread calling result() on a CommitFuture.\r\nWARNING:absl:[process=0][thread=MainThread][operation_id=18] _SignalingThread.join() waiting for signals ([, ]) blocking the main thread will slow down blocking save times. This is likely due to main thread calling result() on a CommitFuture.\r\n",,terminal_output +2020,5746177,"TERMINAL",0,0,"Saved checkpoint at step 4500\r\n",,terminal_output +2021,5746338,"TERMINAL",0,0,"6 97",,terminal_output +2022,5747374,"TERMINAL",0,0,"7",,terminal_output +2023,5747991,"TERMINAL",0,0,"WARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519264/004000 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519264/004000) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519264/004500 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519264/004500) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519264/004250 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519264/004250) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519264/003750 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519264/003750) to end with "".orbax-checkpoint-tmp"".\r\n",,terminal_output +2024,5748421,"TERMINAL",0,0,"8",,terminal_output +2025,5749527,"TERMINAL",0,0,"9",,terminal_output +2026,5750489,"TERMINAL",0,0,"20",,terminal_output +2027,5751530,"TERMINAL",0,0,"1",,terminal_output +2028,5752567,"TERMINAL",0,0,"2",,terminal_output +2029,5753639,"TERMINAL",0,0,"3",,terminal_output +2030,5754749,"TERMINAL",0,0,"4",,terminal_output +2031,5755724,"TERMINAL",0,0,"5",,terminal_output +2032,5756801,"TERMINAL",0,0,"6",,terminal_output +2033,5757750,"TERMINAL",0,0,"7",,terminal_output +2034,5758849,"TERMINAL",0,0,"8",,terminal_output +2035,5759823,"TERMINAL",0,0,"9",,terminal_output +2036,5760864,"TERMINAL",0,0,"30",,terminal_output +2037,5761907,"TERMINAL",0,0,"1",,terminal_output +2038,5762928,"TERMINAL",0,0,"2",,terminal_output +2039,5763964,"TERMINAL",0,0,"4",,terminal_output +2040,5764999,"TERMINAL",0,0,"5",,terminal_output +2041,5766118,"TERMINAL",0,0,"6",,terminal_output +2042,5767080,"TERMINAL",0,0,"7",,terminal_output +2043,5768163,"TERMINAL",0,0,"8",,terminal_output +2044,5769167,"TERMINAL",0,0,"9",,terminal_output +2045,5770199,"TERMINAL",0,0,"40",,terminal_output +2046,5771673,"TERMINAL",0,0,"1",,terminal_output +2047,5772263,"TERMINAL",0,0,"2",,terminal_output +2048,5773301,"TERMINAL",0,0,"3",,terminal_output +2049,5774456,"TERMINAL",0,0,"4",,terminal_output +2050,5775457,"TERMINAL",0,0,"5",,terminal_output +2051,5776480,"TERMINAL",0,0,"6",,terminal_output +2052,5777586,"TERMINAL",0,0,"7",,terminal_output +2053,5778552,"TERMINAL",0,0,"8",,terminal_output +2054,5779588,"TERMINAL",0,0,"9",,terminal_output +2055,5780625,"TERMINAL",0,0,"50",,terminal_output +2056,5781679,"TERMINAL",0,0,"1",,terminal_output +2057,5782701,"TERMINAL",0,0,"2",,terminal_output +2058,5783813,"TERMINAL",0,0,"3",,terminal_output +2059,5784867,"TERMINAL",0,0,"411",,terminal_output +2060,5785337,"TERMINAL",0,0,"WARNING:absl:[process=0][thread=MainThread][operation_id=19] _SignalingThread.join() waiting for signals ([, ]) blocking the main thread will slow down blocking save times. This is likely due to main thread calling result() on a CommitFuture.\r\nWARNING:absl:[process=0][thread=MainThread][operation_id=19] _SignalingThread.join() waiting for signals ([, ]) blocking the main thread will slow down blocking save times. This is likely due to main thread calling result() on a CommitFuture.\r\n",,terminal_output +2061,5785720,"TERMINAL",0,0,"Saved checkpoint at step 4750\r\n",,terminal_output +2062,5785829,"TERMINAL",0,0,"5",,terminal_output +2063,5786987,"TERMINAL",0,0,"6",,terminal_output +2064,5787617,"TERMINAL",0,0,"WARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519264/004750 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519264/004750) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519264/004000 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519264/004000) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519264/004500 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519264/004500) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519264/004250 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519264/004250) to end with "".orbax-checkpoint-tmp"".\r\n",,terminal_output +2065,5787877,"TERMINAL",0,0,"7",,terminal_output +2066,5788913,"TERMINAL",0,0,"8",,terminal_output +2067,5789956,"TERMINAL",0,0,"20:00",,terminal_output +2068,5790992,"TERMINAL",0,0,"1",,terminal_output +2069,5792032,"TERMINAL",0,0,"2",,terminal_output +2070,5793238,"TERMINAL",0,0,"3",,terminal_output +2071,5794173,"TERMINAL",0,0,"4",,terminal_output +2072,5795200,"TERMINAL",0,0,"53",,terminal_output +2073,5796185,"TERMINAL",0,0,"6",,terminal_output +2074,5797254,"TERMINAL",0,0,"7",,terminal_output +2075,5798275,"TERMINAL",0,0,"8",,terminal_output +2076,5799393,"TERMINAL",0,0,"9",,terminal_output +2077,5800352,"TERMINAL",0,0,"10",,terminal_output +2078,5801367,"TERMINAL",0,0,"1",,terminal_output +2079,5802522,"TERMINAL",0,0,"2",,terminal_output +2080,5803490,"TERMINAL",0,0,"3",,terminal_output +2081,5804474,"TERMINAL",0,0,"4",,terminal_output +2082,5805512,"TERMINAL",0,0,"5",,terminal_output +2083,5806561,"TERMINAL",0,0,"6",,terminal_output +2084,5807690,"TERMINAL",0,0,"7",,terminal_output +2085,5808618,"TERMINAL",0,0,"8",,terminal_output +2086,5809737,"TERMINAL",0,0,"9",,terminal_output +2087,5810840,"TERMINAL",0,0,"20",,terminal_output +2088,5811798,"TERMINAL",0,0,"1",,terminal_output +2089,5812773,"TERMINAL",0,0,"2",,terminal_output +2090,5813960,"TERMINAL",0,0,"3",,terminal_output +2091,5814836,"TERMINAL",0,0,"4",,terminal_output +2092,5815876,"TERMINAL",0,0,"5",,terminal_output +2093,5816913,"TERMINAL",0,0,"6",,terminal_output +2094,5817945,"TERMINAL",0,0,"8",,terminal_output +2095,5819010,"TERMINAL",0,0,"9",,terminal_output +2096,5820015,"TERMINAL",0,0,"30",,terminal_output +2097,5821052,"TERMINAL",0,0,"1",,terminal_output +2098,5822086,"TERMINAL",0,0,"2",,terminal_output +2099,5823155,"TERMINAL",0,0,"3",,terminal_output +2100,5824177,"TERMINAL",0,0,"4",,terminal_output +2101,5824792,"TERMINAL",0,0,"WARNING:absl:[process=0][thread=MainThread][operation_id=20] _SignalingThread.join() waiting for signals ([, ]) blocking the main thread will slow down blocking save times. This is likely due to main thread calling result() on a CommitFuture.\r\nWARNING:absl:[process=0][thread=MainThread][operation_id=20] _SignalingThread.join() waiting for signals ([, ]) blocking the main thread will slow down blocking save times. This is likely due to main thread calling result() on a CommitFuture.\r\n",,terminal_output +2102,5825200,"TERMINAL",0,0,"Saved checkpoint at step 5000\r\n",,terminal_output +2103,5825211,"TERMINAL",0,0,"5",,terminal_output +2104,5826273,"TERMINAL",0,0,"6",,terminal_output +2105,5826636,"TERMINAL",0,0,"WARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519264/004750 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519264/004750) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519264/005000 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519264/005000) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519264/004500 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519264/004500) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519264/004250 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519264/004250) to end with "".orbax-checkpoint-tmp"".\r\n",,terminal_output +2106,5827273,"TERMINAL",0,0,"7",,terminal_output +2107,5828376,"TERMINAL",0,0,"8",,terminal_output +2108,5829399,"TERMINAL",0,0,"9",,terminal_output +2109,5829409,"TERMINAL",0,0,"wandb: \r\nwandb: 🚀 View run breakout-dyn-default-3519264 at: https://wandb.ai/instant-uv/jafar/runs/hpcfm5xe\r\nwandb: Find logs at: ../../../../../hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/wandb/run-20250924_140125-hpcfm5xe/logs\r\n",,terminal_output +2110,5830391,"TERMINAL",0,0,"40",,terminal_output +2111,5831550,"TERMINAL",0,0,"1",,terminal_output +2112,5832576,"TERMINAL",0,0,"2",,terminal_output +2113,5833600,"TERMINAL",0,0,"3",,terminal_output +2114,5834536,"TERMINAL",0,0,"4",,terminal_output +2115,5835573,"TERMINAL",0,0,"5",,terminal_output +2116,5835632,"TERMINAL",0,0,"W0924 14:20:45.697985 3547920 pjrt_client.cc:1469] WatchJobStateAsync failed for task goo.gle/debugproto job_name: ""jax_worker"": UNAVAILABLE: Cancelling all calls\r\nAdditional GRPC error information from remote target coordination_service while calling /tensorflow.CoordinationService/WatchJobState:\r\n:UNKNOWN:Error received from peer {grpc_status:14, grpc_message:""Cancelling all calls""}\r\n",,terminal_output +2117,5836635,"TERMINAL",0,0,"]0;tum_cte0515@hkn0401:~/Projects/jasmine[?2004h(venv_3_11) [tum_cte0515@hkn0401 jasmine]$ ",,terminal_output +2118,5836637,"TERMINAL",0,0,"6",,terminal_output +2119,5837774,"TERMINAL",0,0,"7",,terminal_output +2120,5838838,"TERMINAL",0,0,"8",,terminal_output +2121,5839809,"TERMINAL",0,0,"9",,terminal_output +2122,5840765,"TERMINAL",0,0,"50",,terminal_output +2123,5841893,"TERMINAL",0,0,"1",,terminal_output +2124,5842914,"TERMINAL",0,0,"2",,terminal_output +2125,5843953,"TERMINAL",0,0,"3",,terminal_output +2126,5844977,"TERMINAL",0,0,"4",,terminal_output +2127,5845949,"TERMINAL",0,0,"6",,terminal_output +2128,5846983,"TERMINAL",0,0,"7",,terminal_output +2129,5848018,"TERMINAL",0,0,"8",,terminal_output +2130,5849059,"TERMINAL",0,0,"9",,terminal_output +2131,5850186,"TERMINAL",0,0,"1:00",,terminal_output +2132,5851205,"TERMINAL",0,0,"1",,terminal_output +2133,5852237,"TERMINAL",0,0,"2",,terminal_output +2134,5853201,"TERMINAL",0,0,"3",,terminal_output +2135,5854280,"TERMINAL",0,0,"4",,terminal_output +2136,5855304,"TERMINAL",0,0,"5",,terminal_output +2137,5855929,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=4\n#SBATCH --time=01:00:00\n#SBATCH --partition=accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:4\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/dynamics/sampling/maskgit/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/dynamics/sampling/maskgit/%x_%j.log\n#SBATCH --job-name=coinrun_sample_maskgit\n\n# Unload modules that may interfere\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\n\n# Activate virtual environment\nsource .venv/bin/activate\n\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_breakout/breakout_episodes_perfect/val\nCHECKPOINT_PATH=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3513908\n\n\necho ""Sampling from checkpoint: $CHECKPOINT_PATH""\n\nsrun python sample.py \\n --checkpoint ""$CHECKPOINT_PATH"" \\n --data_dir=$array_records_dir \\n --seq_len=16 \\n --batch_size=12 \\n --start_frame=1 \\n --image_height=10 \\n --image_width=10 \\n --use_gt_actions \\n --dyna_type=maskgit",shellscript,tab +2138,5856378,"TERMINAL",0,0,"6",,terminal_output +2139,5857690,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",0,0,"",shellscript,tab +2140,5857694,"TERMINAL",0,0,"7",,terminal_output +2141,5858380,"TERMINAL",0,0,"8",,terminal_output +2142,5859504,"TERMINAL",0,0,"9",,terminal_output +2143,5860456,"TERMINAL",0,0,"10",,terminal_output +2144,5861554,"TERMINAL",0,0,"1",,terminal_output +2145,5862357,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",0,0,"",shellscript,tab +2146,5862358,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1168,0,"",shellscript,selection_mouse +2147,5862547,"TERMINAL",0,0,"2",,terminal_output +2148,5863600,"TERMINAL",0,0,"3",,terminal_output +2149,5864623,"TERMINAL",0,0,"4",,terminal_output +2150,5865750,"TERMINAL",0,0,"5",,terminal_output +2151,5866685,"TERMINAL",0,0,"60",,terminal_output +2152,5867800,"TERMINAL",0,0,"7",,terminal_output +2153,5868928,"TERMINAL",0,0,"8",,terminal_output +2154,5869849,"TERMINAL",0,0,"9",,terminal_output +2155,5870850,"TERMINAL",0,0,"20",,terminal_output +2156,5871885,"TERMINAL",0,0,"1",,terminal_output +2157,5872923,"TERMINAL",0,0,"2",,terminal_output +2158,5873990,"TERMINAL",0,0,"4",,terminal_output +2159,5875029,"TERMINAL",0,0,"5",,terminal_output +2160,5876041,"TERMINAL",0,0,"6",,terminal_output +2161,5877079,"TERMINAL",0,0,"7",,terminal_output +2162,5878141,"TERMINAL",0,0,"8",,terminal_output +2163,5879282,"TERMINAL",0,0,"9",,terminal_output +2164,5880293,"TERMINAL",0,0,"30",,terminal_output +2165,5881239,"TERMINAL",0,0,"1",,terminal_output +2166,5882345,"TERMINAL",0,0,"2",,terminal_output +2167,5883327,"TERMINAL",0,0,"3",,terminal_output +2168,5883435,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1175,0,"",shellscript,selection_mouse +2169,5883445,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1174,0,"",shellscript,selection_command +2170,5884435,"TERMINAL",0,0,"4",,terminal_output +2171,5885406,"TERMINAL",0,0,"5",,terminal_output +2172,5886537,"TERMINAL",0,0,"6",,terminal_output +2173,5887175,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",0,0,"",shellscript,tab +2174,5887176,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",767,0,"",shellscript,selection_mouse +2175,5887176,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",767,2,"/h",shellscript,selection_mouse +2176,5887176,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",767,24,"/hkfs/work/workspace/scr",shellscript,selection_mouse +2177,5887177,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",767,39,"/hkfs/work/workspace/scratch/tum_ind369",shellscript,selection_mouse +2178,5887177,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",767,51,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_sh",shellscript,selection_mouse +2179,5887177,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",709,58,"ta_breakout/breakout_episodes_perfect/val\nCHECKPOINT_PATH=",shellscript,selection_mouse +2180,5887178,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",714,53,"eakout/breakout_episodes_perfect/val\nCHECKPOINT_PATH=",shellscript,selection_mouse +2181,5887282,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",767,69,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/b",shellscript,selection_mouse +2182,5887283,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",767,74,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breako",shellscript,selection_mouse +2183,5887336,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",767,79,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dy",shellscript,selection_mouse +2184,5887389,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",767,82,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/i",shellscript,selection_mouse +2185,5887399,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",767,83,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/in",shellscript,selection_mouse +2186,5887421,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",767,84,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/int",shellscript,selection_mouse +2187,5887460,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",767,86,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/inter",shellscript,selection_mouse +2188,5887477,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",767,88,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interac",shellscript,selection_mouse +2189,5887500,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",767,90,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interacti",shellscript,selection_mouse +2190,5887531,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",767,91,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactiv",shellscript,selection_mouse +2191,5887551,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",767,93,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/",shellscript,selection_mouse +2192,5887584,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",767,94,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3",shellscript,selection_mouse +2193,5887603,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",767,95,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/35",shellscript,selection_mouse +2194,5887625,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",767,96,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/351",shellscript,selection_mouse +2195,5887625,"TERMINAL",0,0,"7",,terminal_output +2196,5887662,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",767,97,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3513",shellscript,selection_mouse +2197,5887691,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",767,98,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/35139",shellscript,selection_mouse +2198,5887732,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",767,99,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/351390",shellscript,selection_mouse +2199,5887771,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",767,100,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3513908",shellscript,selection_mouse +2200,5888190,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",767,100,"",shellscript,content +2201,5888235,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",766,0,"",shellscript,selection_command +2202,5888514,"TERMINAL",0,0,"8",,terminal_output +2203,5888887,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",767,0,"",shellscript,selection_command +2204,5889432,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",767,0,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn-gt-actions/interactive/3519095",shellscript,content +2205,5889562,"TERMINAL",0,0,"9",,terminal_output +2206,5890614,"TERMINAL",0,0,"40",,terminal_output +2207,5891003,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",879,0,"",shellscript,selection_mouse +2208,5891630,"TERMINAL",0,0,"1",,terminal_output +2209,5891668,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",796,0,"",shellscript,selection_mouse +2210,5891759,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",795,0,"",shellscript,selection_command +2211,5892687,"TERMINAL",0,0,"2",,terminal_output +2212,5893821,"TERMINAL",0,0,"3",,terminal_output +2213,5894861,"TERMINAL",0,0,"4",,terminal_output +2214,5895800,"TERMINAL",0,0,"5",,terminal_output +2215,5895890,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1090,0,"",shellscript,selection_mouse +2216,5896881,"TERMINAL",0,0,"6",,terminal_output +2217,5897906,"TERMINAL",0,0,"7",,terminal_output +2218,5898223,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1089,1,"",shellscript,content +2219,5898395,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1089,0,"4",shellscript,content +2220,5898396,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1090,0,"",shellscript,selection_keyboard +2221,5898764,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1089,0,"",shellscript,selection_command +2222,5898881,"TERMINAL",0,0,"8",,terminal_output +2223,5899921,"TERMINAL",0,0,"9",,terminal_output +2224,5900958,"TERMINAL",0,0,"51",,terminal_output +2225,5901995,"TERMINAL",0,0,"2",,terminal_output +2226,5903031,"TERMINAL",0,0,"3",,terminal_output +2227,5904067,"TERMINAL",0,0,"4",,terminal_output +2228,5905178,"TERMINAL",0,0,"5",,terminal_output +2229,5906202,"TERMINAL",0,0,"6",,terminal_output +2230,5907305,"TERMINAL",0,0,"7",,terminal_output +2231,5907797,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1186,0,"",shellscript,selection_mouse +2232,5907809,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1185,0,"",shellscript,selection_command +2233,5908218,"TERMINAL",0,0,"8",,terminal_output +2234,5908406,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1162,0,"",shellscript,selection_mouse +2235,5908416,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1161,0,"",shellscript,selection_command +2236,5909035,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1116,0,"",shellscript,selection_mouse +2237,5909094,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1115,0,"",shellscript,selection_command +2238,5909320,"TERMINAL",0,0,"9",,terminal_output +2239,5909864,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1186,0,"",shellscript,selection_mouse +2240,5909867,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1185,0,"",shellscript,selection_command +2241,5910297,"TERMINAL",0,0,"2:00",,terminal_output +2242,5911435,"TERMINAL",0,0,"1",,terminal_output +2243,5912373,"TERMINAL",0,0,"2",,terminal_output +2244,5913469,"TERMINAL",0,0,"3",,terminal_output +2245,5914493,"TERMINAL",0,0,"4",,terminal_output +2246,5915484,"TERMINAL",0,0,"5",,terminal_output +2247,5915780,"TERMINAL",0,0,"s",,terminal_output +2248,5915842,"TERMINAL",0,0,"h",,terminal_output +2249,5915903,"TERMINAL",0,0," ",,terminal_output +2250,5916241,"TERMINAL",0,0,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",,terminal_output +2251,5916541,"TERMINAL",0,0,"6",,terminal_output +2252,5916550,"TERMINAL",0,0,"\rslurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch\r\n[?2004l\r",,terminal_output +2253,5916705,"TERMINAL",0,0,"Sampling from checkpoint: /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn-gt-actions/interactive/3519095\r\n",,terminal_output +2254,5916864,"TERMINAL",0,0,"GpuFreq=control_disabled\r\n",,terminal_output +2255,5917669,"TERMINAL",0,0,"7",,terminal_output +2256,5918387,"TERMINAL",0,0,"Traceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/sample.py"", line 17, in \r\n from genie import Genie\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/genie.py"", line 568, in \r\n optimizer: nnx.ModelAndOptimizer,\r\nAttributeError: module 'flax.nnx' has no attribute 'ModelAndOptimizer'\r\n",,terminal_output +2257,5918533,"TERMINAL",0,0,"srun: error: hkn0401: task 0: Exited with exit code 1\r\n]0;tum_cte0515@hkn0401:~/Projects/jasmine[?2004h(venv_3_11) [tum_cte0515@hkn0401 jasmine]$ ",,terminal_output +2258,5918648,"TERMINAL",0,0,"8",,terminal_output +2259,5919716,"TERMINAL",0,0,"9",,terminal_output +2260,5920746,"TERMINAL",0,0,"10",,terminal_output +2261,5921765,"TERMINAL",0,0,"1",,terminal_output +2262,5922761,"TERMINAL",0,0,"2",,terminal_output +2263,5923810,"TERMINAL",0,0,"3",,terminal_output +2264,5924433,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",0,0,"",shellscript,tab +2265,5924434,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",631,0,"",shellscript,selection_mouse +2266,5924518,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",630,0,"",shellscript,selection_command +2267,5924869,"TERMINAL",0,0,"4",,terminal_output +2268,5925044,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",606,0,"",shellscript,selection_command +2269,5925402,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",606,0,"#",shellscript,content +2270,5925404,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",607,0,"",shellscript,selection_keyboard +2271,5925498,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",607,0," ",shellscript,content +2272,5925499,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",608,0,"",shellscript,selection_keyboard +2273,5925720,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",607,0,"",shellscript,selection_command +2274,5925883,"TERMINAL",0,0,"5",,terminal_output +2275,5926928,"TERMINAL",0,0,"sh slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",,terminal_output +2276,5926930,"TERMINAL",0,0,"6",,terminal_output +2277,5927135,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +2278,5927254,"TERMINAL",0,0,"Sampling from checkpoint: /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn-gt-actions/interactive/3519095\r\n",,terminal_output +2279,5927360,"TERMINAL",0,0,"GpuFreq=control_disabled\r\n",,terminal_output +2280,5927947,"TERMINAL",0,0,"8",,terminal_output +2281,5928980,"TERMINAL",0,0,"9",,terminal_output +2282,5930017,"TERMINAL",0,0,"20",,terminal_output +2283,5931089,"TERMINAL",0,0,"1",,terminal_output +2284,5932091,"TERMINAL",0,0,"2",,terminal_output +2285,5933143,"TERMINAL",0,0,"3",,terminal_output +2286,5934166,"TERMINAL",0,0,"4",,terminal_output +2287,5935275,"TERMINAL",0,0,"5",,terminal_output +2288,5935477,"TERMINAL",0,0,"Traceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/sample.py"", line 151, in \r\n restored = checkpoint_manager.restore(\r\n ^^^^^^^^^^^^^^^^^^^^^^^^^^^\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/venv_3_11/lib64/python3.11/site-packages/orbax/checkpoint/checkpoint_manager.py"", line 1673, in restore\r\n restored = self._checkpointer.restore(restore_directory, args=args)\r\n ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/venv_3_11/lib64/python3.11/site-packages/orbax/checkpoint/_src/checkpointers/async_checkpointer.py"", line 571, in restore\r\n return super().restore(directory, *args, **kwargs)\r\n ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/venv_3_11/lib64/python3.11/site-packages/orbax/checkpoint/_src/checkpointers/checkpointer.py"", line 306, in restore\r\n restored = self._restore(directory, args=ckpt_args)\r\n ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/venv_3_11/lib64/python3.11/site-packages/orbax/checkpoint/_src/checkpointers/checkpointer.py"", line 328, in _restore\r\n return self._handler.restore(directory, args=args)\r\n ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/venv_3_11/lib64/python3.11/site-packages/orbax/checkpoint/_src/handlers/composite_checkpoint_handler.py"", line 857, in restore\r\n restored[item_name] = handler.restore(\r\n ^^^^^^^^^^^^^^^^\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/venv_3_11/lib64/python3.11/site-packages/orbax/checkpoint/_src/handlers/pytree_checkpoint_handler.py"", line 835, in restore\r\n return self._handler_impl.restore(directory, args=args)\r\n ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/venv_3_11/lib64/python3.11/site-packages/orbax/checkpoint/_src/handlers/base_pytree_checkpoint_handler.py"", line 949, in restore\r\n raise ValueError(\r\nValueError: User-provided restore item and on-disk value metadata tree structures do not match:\r\nmodel.tokenizer.vq.drop:\r\n - Source: MISSING\r\n - Target: {'rngs': {'count': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=())}, 'key': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(2,))}}}\r\n",,terminal_output +2289,5936251,"TERMINAL",0,0,"6",,terminal_output +2290,5937279,"TERMINAL",0,0,"7",,terminal_output +2291,5938354,"TERMINAL",0,0,"8",,terminal_output +2292,5939388,"TERMINAL",0,0,"9",,terminal_output +2293,5940390,"TERMINAL",0,0,"30",,terminal_output +2294,5940595,"TERMINAL",0,0,"W0924 14:22:30.679930 3740625 pjrt_client.cc:1469] WatchJobStateAsync failed for task goo.gle/debugproto job_name: ""jax_worker"": UNAVAILABLE: Cancelling all calls\r\nAdditional GRPC error information from remote target coordination_service while calling /tensorflow.CoordinationService/WatchJobState:\r\n:UNKNOWN:Error received from peer {grpc_message:""Cancelling all calls"", grpc_status:14}\r\n",,terminal_output +2295,5941001,"TERMINAL",0,0,"srun: error: hkn0401: task 0: Exited with exit code 1\r\n]0;tum_cte0515@hkn0401:~/Projects/jasmine[?2004h(venv_3_11) [tum_cte0515@hkn0401 jasmine]$ ",,terminal_output +2296,5941511,"TERMINAL",0,0,"1",,terminal_output +2297,5942464,"TERMINAL",0,0,"2",,terminal_output +2298,5943503,"TERMINAL",0,0,"3",,terminal_output +2299,5944539,"TERMINAL",0,0,"4",,terminal_output +2300,5945580,"TERMINAL",0,0,"5",,terminal_output +2301,5946646,"TERMINAL",0,0,"6",,terminal_output +2302,5947671,"TERMINAL",0,0,"7",,terminal_output +2303,5948959,"sample.py",0,0,"",python,tab +2304,5949241,"TERMINAL",0,0,"8",,terminal_output +2305,5949735,"TERMINAL",0,0,"9",,terminal_output +2306,5950079,"sample.py",6743,0,"",python,selection_mouse +2307,5950767,"TERMINAL",0,0,"40",,terminal_output +2308,5951869,"TERMINAL",0,0,"1",,terminal_output +2309,5952848,"TERMINAL",0,0,"2",,terminal_output +2310,5953439,"sample.py",3155,0,"",python,selection_command +2311,5953921,"TERMINAL",0,0,"3",,terminal_output +2312,5954324,"sample.py",3151,32,"",python,content +2313,5954342,"sample.py",3155,0,"",python,selection_command +2314,5954921,"TERMINAL",0,0,"4",,terminal_output +2315,5956038,"TERMINAL",0,0,"6",,terminal_output +2316,5956455,"TERMINAL",0,0,"sh slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",,terminal_output +2317,5956742,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +2318,5956897,"TERMINAL",0,0,"Sampling from checkpoint: /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn-gt-actions/interactive/3519095\r\n",,terminal_output +2319,5956959,"TERMINAL",0,0,"GpuFreq=control_disabled\r\n",,terminal_output +2320,5957012,"TERMINAL",0,0,"7",,terminal_output +2321,5958032,"TERMINAL",0,0,"8",,terminal_output +2322,5959069,"TERMINAL",0,0,"9",,terminal_output +2323,5960105,"TERMINAL",0,0,"50",,terminal_output +2324,5961200,"TERMINAL",0,0,"1",,terminal_output +2325,5962186,"TERMINAL",0,0,"2",,terminal_output +2326,5963214,"TERMINAL",0,0,"3",,terminal_output +2327,5964250,"TERMINAL",0,0,"4",,terminal_output +2328,5964967,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/venv_3_11/lib64/python3.11/site-packages/orbax/checkpoint/_src/serialization/type_handlers.py:1269: UserWarning: Sharding info not provided when restoring. Populating sharding info from sharding file. Please note restoration time will be slightly increased due to reading from file. Note also that this option is unsafe when restoring on a different topology than the checkpoint was saved with.\r\n warnings.warn(\r\n",,terminal_output +2329,5965286,"TERMINAL",0,0,"5",,terminal_output +2330,5966325,"TERMINAL",0,0,"6",,terminal_output +2331,5967441,"TERMINAL",0,0,"7",,terminal_output +2332,5968460,"TERMINAL",0,0,"8",,terminal_output +2333,5969487,"TERMINAL",0,0,"9",,terminal_output +2334,5970471,"TERMINAL",0,0,"3:00",,terminal_output +2335,5971532,"TERMINAL",0,0,"1",,terminal_output +2336,5972559,"TERMINAL",0,0,"2",,terminal_output +2337,5973682,"TERMINAL",0,0,"3",,terminal_output +2338,5974705,"TERMINAL",0,0,"48",,terminal_output +2339,5975696,"TERMINAL",0,0,"5",,terminal_output +2340,5976703,"TERMINAL",0,0,"6",,terminal_output +2341,5977783,"TERMINAL",0,0,"7",,terminal_output +2342,5978805,"TERMINAL",0,0,"8",,terminal_output +2343,5979809,"TERMINAL",0,0,"9",,terminal_output +2344,5980844,"TERMINAL",0,0,"10",,terminal_output +2345,5981978,"TERMINAL",0,0,"1",,terminal_output +2346,5982917,"TERMINAL",0,0,"2",,terminal_output +2347,5983955,"TERMINAL",0,0,"4",,terminal_output +2348,5984998,"TERMINAL",0,0,"5",,terminal_output +2349,5986027,"TERMINAL",0,0,"6",,terminal_output +2350,5987099,"TERMINAL",0,0,"72",,terminal_output +2351,5988124,"TERMINAL",0,0,"8",,terminal_output +2352,5989139,"TERMINAL",0,0,"9",,terminal_output +2353,5990272,"TERMINAL",0,0,"20",,terminal_output +2354,5991238,"TERMINAL",0,0,"1",,terminal_output +2355,5992318,"TERMINAL",0,0,"2",,terminal_output +2356,5993343,"TERMINAL",0,0,"3",,terminal_output +2357,5994368,"TERMINAL",0,0,"SSIM: 0.7272958755493164\r\n",,terminal_output +2358,5994369,"TERMINAL",0,0,"4",,terminal_output +2359,5995367,"TERMINAL",0,0,"5",,terminal_output +2360,5996416,"TERMINAL",0,0,"6",,terminal_output +2361,5997571,"TERMINAL",0,0,"7",,terminal_output +2362,5998648,"TERMINAL",0,0,"8",,terminal_output +2363,5999591,"TERMINAL",0,0,"9",,terminal_output +2364,5999923,"TERMINAL",0,0,"W0924 14:23:29.902904 3741034 pjrt_client.cc:1469] WatchJobStateAsync failed for task goo.gle/debugonly job_name: ""jax_worker"": UNAVAILABLE: Cancelling all calls\r\nAdditional GRPC error information from remote target coordination_service while calling /tensorflow.CoordinationService/WatchJobState:\r\n:UNKNOWN:Error received from peer {grpc_status:14, grpc_message:""Cancelling all calls""}\r\n",,terminal_output +2365,6000365,"TERMINAL",0,0,"]0;tum_cte0515@hkn0401:~/Projects/jasmine[?2004h(venv_3_11) [tum_cte0515@hkn0401 jasmine]$ ",,terminal_output +2366,6000544,"TERMINAL",0,0,"30",,terminal_output +2367,6001589,"TERMINAL",0,0,"1",,terminal_output +2368,6002661,"TERMINAL",0,0,"2",,terminal_output +2369,6003788,"TERMINAL",0,0,"3",,terminal_output +2370,6004813,"TERMINAL",0,0,"4",,terminal_output +2371,6005743,"TERMINAL",0,0,"5",,terminal_output +2372,6006769,"TERMINAL",0,0,"6",,terminal_output +2373,6007988,"TERMINAL",0,0,"7",,terminal_output +2374,6008908,"TERMINAL",0,0,"8",,terminal_output +2375,6010241,"TERMINAL",0,0,"9",,terminal_output +2376,6010994,"TERMINAL",0,0,"40",,terminal_output +2377,6012026,"TERMINAL",0,0,"2",,terminal_output +2378,6013005,"TERMINAL",0,0,"3",,terminal_output +2379,6014017,"TERMINAL",0,0,"4",,terminal_output +2380,6015052,"TERMINAL",0,0,"5",,terminal_output +2381,6016090,"TERMINAL",0,0,"6",,terminal_output +2382,6017170,"TERMINAL",0,0,"7",,terminal_output +2383,6018237,"TERMINAL",0,0,"8",,terminal_output +2384,6019249,"TERMINAL",0,0,"9",,terminal_output +2385,6020273,"TERMINAL",0,0,"50",,terminal_output +2386,6021273,"TERMINAL",0,0,"1",,terminal_output +2387,6022321,"TERMINAL",0,0,"2",,terminal_output +2388,6023449,"TERMINAL",0,0,"3",,terminal_output +2389,6024471,"TERMINAL",0,0,"4",,terminal_output +2390,6025457,"TERMINAL",0,0,"5",,terminal_output +2391,6026529,"TERMINAL",0,0,"6",,terminal_output +2392,6027491,"TERMINAL",0,0,"7",,terminal_output +2393,6028568,"TERMINAL",0,0,"8",,terminal_output +2394,6029594,"TERMINAL",0,0,"9",,terminal_output +2395,6030618,"TERMINAL",0,0,"4:00",,terminal_output +2396,6031641,"TERMINAL",0,0,"1",,terminal_output +2397,6032763,"TERMINAL",0,0,"2",,terminal_output +2398,6033791,"TERMINAL",0,0,"3",,terminal_output +2399,6034815,"TERMINAL",0,0,"4",,terminal_output +2400,6035802,"TERMINAL",0,0,"5",,terminal_output +2401,6036837,"TERMINAL",0,0,"6",,terminal_output +2402,6037913,"TERMINAL",0,0,"7",,terminal_output +2403,6038919,"TERMINAL",0,0,"8",,terminal_output +2404,6039366,"sample.py",0,0,"",python,tab +2405,6039941,"TERMINAL",0,0,"10",,terminal_output +2406,6040976,"TERMINAL",0,0,"1",,terminal_output +2407,6042034,"TERMINAL",0,0,"2",,terminal_output +2408,6042998,"sample.py",8012,0,"",python,selection_mouse +2409,6043116,"TERMINAL",0,0,"3",,terminal_output +2410,6043537,"sample.py",7958,0,"",python,selection_mouse +2411,6044134,"TERMINAL",0,0,"4",,terminal_output +2412,6044202,"sample.py",7955,0,"",python,selection_command +2413,6044614,"sample.py",7955,0,"#",python,content +2414,6044616,"sample.py",7956,0,"",python,selection_keyboard +2415,6044676,"sample.py",7956,0," ",python,content +2416,6044677,"sample.py",7957,0,"",python,selection_keyboard +2417,6045137,"TERMINAL",0,0,"5",,terminal_output +2418,6045200,"sample.py",7956,0,"",python,selection_command +2419,6046194,"TERMINAL",0,0,"6",,terminal_output +2420,6047219,"TERMINAL",0,0,"7",,terminal_output +2421,6048258,"TERMINAL",0,0,"8",,terminal_output +2422,6049357,"TERMINAL",0,0,"9",,terminal_output +2423,6050348,"TERMINAL",0,0,"20",,terminal_output +2424,6051405,"TERMINAL",0,0,"1",,terminal_output +2425,6052434,"TERMINAL",0,0,"2",,terminal_output +2426,6053558,"TERMINAL",0,0,"3",,terminal_output +2427,6054584,"TERMINAL",0,0,"4",,terminal_output +2428,6055532,"TERMINAL",0,0,"5",,terminal_output +2429,6056576,"TERMINAL",0,0,"6",,terminal_output +2430,6057653,"TERMINAL",0,0,"7",,terminal_output +2431,6058681,"TERMINAL",0,0,"8",,terminal_output +2432,6059689,"TERMINAL",0,0,"9",,terminal_output +2433,6060769,"TERMINAL",0,0,"30",,terminal_output +2434,6061759,"TERMINAL",0,0,"1",,terminal_output +2435,6061957,"TERMINAL",0,0,"sh slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",,terminal_output +2436,6062874,"TERMINAL",0,0,"2",,terminal_output +2437,6063841,"TERMINAL",0,0,"3",,terminal_output +2438,6063876,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +2439,6063982,"TERMINAL",0,0,"Sampling from checkpoint: /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn-gt-actions/interactive/3519095\r\n",,terminal_output +2440,6064104,"TERMINAL",0,0,"GpuFreq=control_disabled\r\n",,terminal_output +2441,6064922,"TERMINAL",0,0,"4",,terminal_output +2442,6065949,"TERMINAL",0,0,"5",,terminal_output +2443,6066970,"TERMINAL",0,0,"7",,terminal_output +2444,6067443,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",0,0,"",shellscript,tab +2445,6067443,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",767,0,"",shellscript,selection_mouse +2446,6067977,"TERMINAL",0,0,"8",,terminal_output +2447,6069060,"TERMINAL",0,0,"9",,terminal_output +2448,6070055,"TERMINAL",0,0,"40",,terminal_output +2449,6071169,"TERMINAL",0,0,"1",,terminal_output +2450,6072054,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/venv_3_11/lib64/python3.11/site-packages/orbax/checkpoint/_src/serialization/type_handlers.py:1269: UserWarning: Sharding info not provided when restoring. Populating sharding info from sharding file. Please note restoration time will be slightly increased due to reading from file. Note also that this option is unsafe when restoring on a different topology than the checkpoint was saved with.\r\n warnings.warn(\r\n",,terminal_output +2451,6072165,"TERMINAL",0,0,"2",,terminal_output +2452,6073217,"TERMINAL",0,0,"3",,terminal_output +2453,6073889,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",837,0,"",shellscript,selection_mouse +2454,6074195,"TERMINAL",0,0,"4",,terminal_output +2455,6075235,"TERMINAL",0,0,"5",,terminal_output +2456,6075614,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",880,0,"\n",shellscript,content +2457,6075999,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",881,0,"\n",shellscript,content +2458,6076303,"TERMINAL",0,0,"6",,terminal_output +2459,6076550,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",882,0,"C",shellscript,content +2460,6076551,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",883,0,"",shellscript,selection_keyboard +2461,6076684,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",883,0,"H",shellscript,content +2462,6076684,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",884,0,"",shellscript,selection_keyboard +2463,6076879,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",884,0,"E",shellscript,content +2464,6076880,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",885,0,"",shellscript,selection_keyboard +2465,6077317,"TERMINAL",0,0,"7",,terminal_output +2466,6078442,"TERMINAL",0,0,"8",,terminal_output +2467,6078558,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",882,3,"CHECKPOINT_PATH",shellscript,content +2468,6079033,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",897,0,"=",shellscript,content +2469,6079034,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",898,0,"",shellscript,selection_keyboard +2470,6079402,"TERMINAL",0,0,"9",,terminal_output +2471,6079537,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",898,0,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519264",shellscript,content +2472,6080456,"TERMINAL",0,0,"50",,terminal_output +2473,6081510,"TERMINAL",0,0,"1",,terminal_output +2474,6082532,"TERMINAL",0,0,"2",,terminal_output +2475,6083060,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1281,0,"",shellscript,selection_mouse +2476,6083543,"TERMINAL",0,0,"3",,terminal_output +2477,6083894,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1280,0,"",shellscript,selection_command +2478,6084568,"TERMINAL",0,0,"4",,terminal_output +2479,6084704,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1303,0,"",shellscript,selection_command +2480,6085035,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1306,0,"",shellscript,selection_command +2481,6085514,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1306,0," ",shellscript,content +2482,6085516,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1307,0,"",shellscript,selection_keyboard +2483,6085606,"TERMINAL",0,0,"5",,terminal_output +2484,6085768,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1307,0,"\",shellscript,content +2485,6085769,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1308,0,"",shellscript,selection_keyboard +2486,6086278,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1307,0,"",shellscript,selection_command +2487,6086469,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1281,0,"",shellscript,selection_command +2488,6086684,"TERMINAL",0,0,"6",,terminal_output +2489,6086907,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1260,23,"",shellscript,content +2490,6086980,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1264,0,"",shellscript,selection_command +2491,6087102,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1285,0,"\n --use_gt_actions \",shellscript,content +2492,6087102,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1290,0,"",shellscript,selection_command +2493,6087821,"TERMINAL",0,0,"7",,terminal_output +2494,6088715,"TERMINAL",0,0,"8",,terminal_output +2495,6089449,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1264,0,"",shellscript,selection_command +2496,6089763,"TERMINAL",0,0,"9",,terminal_output +2497,6089950,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1241,0,"",shellscript,selection_command +2498,6090036,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1217,0,"",shellscript,selection_command +2499,6090037,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1195,0,"",shellscript,selection_command +2500,6090050,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1173,0,"",shellscript,selection_command +2501,6090103,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1154,0,"",shellscript,selection_command +2502,6090104,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1118,0,"",shellscript,selection_command +2503,6090181,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1080,0,"",shellscript,selection_command +2504,6090182,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1056,0,"",shellscript,selection_command +2505,6090297,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1051,0,"",shellscript,selection_command +2506,6090466,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1056,0,"",shellscript,selection_command +2507,6090789,"TERMINAL",0,0,"5:00",,terminal_output +2508,6090964,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1080,0,"",shellscript,selection_command +2509,6091020,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1118,0,"",shellscript,selection_command +2510,6091028,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1154,0,"",shellscript,selection_command +2511,6091054,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1173,0,"",shellscript,selection_command +2512,6091146,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1195,0,"",shellscript,selection_command +2513,6091147,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1217,0,"",shellscript,selection_command +2514,6091407,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1241,0,"",shellscript,selection_command +2515,6091570,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1264,0,"",shellscript,selection_command +2516,6091753,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1290,0,"",shellscript,selection_command +2517,6091825,"TERMINAL",0,0,"1",,terminal_output +2518,6092875,"TERMINAL",0,0,"2",,terminal_output +2519,6093900,"TERMINAL",0,0,"3",,terminal_output +2520,6095160,"TERMINAL",0,0,"4",,terminal_output +2521,6095975,"TERMINAL",0,0,"6",,terminal_output +2522,6097011,"TERMINAL",0,0,"7",,terminal_output +2523,6097618,"slurm/jobs/mihir/horeka/breakout/sample_maskgit copy.sbatch",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=4\n#SBATCH --time=01:00:00\n#SBATCH --partition=accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:4\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/dynamics/sampling/maskgit/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/dynamics/sampling/maskgit/%x_%j.log\n#SBATCH --job-name=coinrun_sample_maskgit\n\n# Unload modules that may interfere\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\n\n# Activate virtual environment\n# source .venv/bin/activate\n\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_breakout/breakout_episodes_perfect/val\nCHECKPOINT_PATH=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn-gt-actions/interactive/3519095\n\nCHECKPOINT_PATH=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519264\n\n\necho ""Sampling from checkpoint: $CHECKPOINT_PATH""\n\nsrun python sample.py \\n --checkpoint ""$CHECKPOINT_PATH"" \\n --data_dir=$array_records_dir \\n --seq_len=16 \\n --batch_size=12 \\n --start_frame=4 \\n --image_height=10 \\n --image_width=10 \\n --dyna_type=maskgit \\n --use_gt_actions \",shellscript,tab +2524,6098052,"TERMINAL",0,0,"8",,terminal_output +2525,6099091,"TERMINAL",0,0,"9",,terminal_output +2526,6100048,"TERMINAL",0,0,"SSIM: 0.7272958755493164\r\n",,terminal_output +2527,6100155,"TERMINAL",0,0,"10",,terminal_output +2528,6100465,"TERMINAL",0,0,"W0924 14:25:10.565627 3742212 pjrt_client.cc:1469] WatchJobStateAsync failed for task goo.gle/debugproto job_name: ""jax_worker"": CANCELLED: CANCELLED\r\nAdditional GRPC error information from remote target coordination_service while calling /tensorflow.CoordinationService/WatchJobState:\r\n:UNKNOWN:Error received from peer {grpc_message:""CANCELLED"", grpc_status:1} [type.googleapis.com/tensorflow.DerivedStatus='']\r\n",,terminal_output +2529,6101042,"TERMINAL",0,0,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch: line 38: --start_frame=4: command not found\r\n]0;tum_cte0515@hkn0401:~/Projects/jasmine[?2004h(venv_3_11) [tum_cte0515@hkn0401 jasmine]$ ",,terminal_output +2530,6101162,"TERMINAL",0,0,"1",,terminal_output +2531,6102205,"TERMINAL",0,0,"2",,terminal_output +2532,6103232,"TERMINAL",0,0,"3",,terminal_output +2533,6104363,"TERMINAL",0,0,"4",,terminal_output +2534,6105348,"TERMINAL",0,0,"5",,terminal_output +2535,6106393,"TERMINAL",0,0,"6",,terminal_output +2536,6107364,"slurm/jobs/mihir/horeka/breakout/sample_maskgit-gt-actions.sbatch",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=4\n#SBATCH --time=01:00:00\n#SBATCH --partition=accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:4\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/dynamics/sampling/maskgit/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/dynamics/sampling/maskgit/%x_%j.log\n#SBATCH --job-name=coinrun_sample_maskgit\n\n# Unload modules that may interfere\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\n\n# Activate virtual environment\n# source .venv/bin/activate\n\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_breakout/breakout_episodes_perfect/val\nCHECKPOINT_PATH=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn-gt-actions/interactive/3519095\n\nCHECKPOINT_PATH=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519264\n\n\necho ""Sampling from checkpoint: $CHECKPOINT_PATH""\n\nsrun python sample.py \\n --checkpoint ""$CHECKPOINT_PATH"" \\n --data_dir=$array_records_dir \\n --seq_len=16 \\n --batch_size=12 \\n --start_frame=4 \\n --image_height=10 \\n --image_width=10 \\n --dyna_type=maskgit \\n --use_gt_actions \",shellscript,tab +2537,6107965,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",0,0,"",shellscript,tab +2538,6108343,"TERMINAL",0,0,"719",,terminal_output +2539,6108977,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1308,0,"",shellscript,selection_mouse +2540,6109006,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1307,0,"",shellscript,selection_command +2541,6109087,"TERMINAL",0,0,"9",,terminal_output +2542,6110086,"TERMINAL",0,0,"20",,terminal_output +2543,6110163,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1285,23,"",shellscript,content +2544,6110205,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1264,0,"",shellscript,selection_command +2545,6110379,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1241,0,"",shellscript,selection_command +2546,6110872,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1217,0,"",shellscript,selection_command +2547,6110904,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1195,0,"",shellscript,selection_command +2548,6110960,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1173,0,"",shellscript,selection_command +2549,6110969,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1154,0,"",shellscript,selection_command +2550,6110995,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1118,0,"",shellscript,selection_command +2551,6111022,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1080,0,"",shellscript,selection_command +2552,6111074,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1056,0,"",shellscript,selection_command +2553,6111075,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1051,0,"",shellscript,selection_command +2554,6111111,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1005,0,"",shellscript,selection_command +2555,6111166,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1000,0,"",shellscript,selection_command +2556,6111167,"TERMINAL",0,0,"1",,terminal_output +2557,6111173,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",999,0,"",shellscript,selection_command +2558,6111233,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",886,0,"",shellscript,selection_command +2559,6111244,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",881,0,"",shellscript,selection_command +2560,6111331,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",757,0,"",shellscript,selection_command +2561,6111523,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",639,0,"",shellscript,selection_command +2562,6111822,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",757,0,"",shellscript,selection_command +2563,6112265,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",753,128,"",shellscript,content +2564,6112269,"TERMINAL",0,0,"2",,terminal_output +2565,6113203,"TERMINAL",0,0,"3",,terminal_output +2566,6114330,"slurm/jobs/mihir/horeka/breakout/sample_maskgit-gt-actions.sbatch",0,0,"",shellscript,tab +2567,6114560,"TERMINAL",0,0,"4",,terminal_output +2568,6115287,"slurm/jobs/mihir/horeka/breakout/sample_maskgit-gt-actions.sbatch",904,0,"",shellscript,selection_mouse +2569,6115337,"TERMINAL",0,0,"5",,terminal_output +2570,6116456,"TERMINAL",0,0,"6",,terminal_output +2571,6117377,"TERMINAL",0,0,"7",,terminal_output +2572,6118198,"slurm/jobs/mihir/horeka/breakout/sample_maskgit-gt-actions.sbatch",0,0,"",shellscript,tab +2573,6118199,"slurm/jobs/mihir/horeka/breakout/sample_maskgit-gt-actions.sbatch",953,0,"",shellscript,selection_mouse +2574,6118432,"TERMINAL",0,0,"8",,terminal_output +2575,6118818,"slurm/jobs/mihir/horeka/breakout/sample_maskgit-gt-actions.sbatch",956,0,"",shellscript,selection_mouse +2576,6119438,"TERMINAL",0,0,"9",,terminal_output +2577,6119788,"slurm/jobs/mihir/horeka/breakout/sample_maskgit-gt-actions.sbatch",882,117,"",shellscript,content +2578,6120470,"TERMINAL",0,0,"30",,terminal_output +2579,6120601,"slurm/jobs/mihir/horeka/breakout/sample_maskgit-gt-actions.sbatch",882,1,"",shellscript,content +2580,6121424,"slurm/jobs/mihir/horeka/breakout/sample_maskgit-gt-actions.sbatch",882,1,"",shellscript,content +2581,6121499,"TERMINAL",0,0,"1",,terminal_output +2582,6122544,"TERMINAL",0,0,"2",,terminal_output +2583,6122990,"TERMINAL",0,0,"sh slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",,terminal_output +2584,6123598,"TERMINAL",0,0,"3",,terminal_output +2585,6123952,"TERMINAL",0,0,"",,terminal_output +2586,6124417,"TERMINAL",0,0,"",,terminal_output +2587,6124622,"TERMINAL",0,0,"",,terminal_output +2588,6124633,"TERMINAL",0,0,"4",,terminal_output +2589,6124796,"TERMINAL",0,0,"",,terminal_output +2590,6124976,"TERMINAL",0,0,"",,terminal_output +2591,6125146,"TERMINAL",0,0,"",,terminal_output +2592,6125356,"TERMINAL",0,0,"",,terminal_output +2593,6125755,"TERMINAL",0,0,"5",,terminal_output +2594,6125907,"TERMINAL",0,0,"",,terminal_output +2595,6126070,"TERMINAL",0,0,"",,terminal_output +2596,6126470,"TERMINAL",0,0,"",,terminal_output +2597,6126680,"TERMINAL",0,0,"6",,terminal_output +2598,6126697,"TERMINAL",0,0,"-",,terminal_output +2599,6127722,"TERMINAL",0,0,"7",,terminal_output +2600,6127872,"TERMINAL",0,0,"g",,terminal_output +2601,6127933,"TERMINAL",0,0,"t",,terminal_output +2602,6128771,"TERMINAL",0,0,"-",,terminal_output +2603,6128783,"TERMINAL",0,0,"8",,terminal_output +2604,6128926,"TERMINAL",0,0,"a",,terminal_output +2605,6129098,"TERMINAL",0,0,"c",,terminal_output +2606,6129239,"TERMINAL",0,0,"t",,terminal_output +2607,6129301,"TERMINAL",0,0,"i",,terminal_output +2608,6129364,"TERMINAL",0,0,"o",,terminal_output +2609,6129540,"TERMINAL",0,0,"n",,terminal_output +2610,6129664,"TERMINAL",0,0,"s",,terminal_output +2611,6129731,"TERMINAL",0,0,".",,terminal_output +2612,6129837,"TERMINAL",0,0,"9",,terminal_output +2613,6130422,"TERMINAL",0,0,"s",,terminal_output +2614,6130583,"TERMINAL",0,0,"b",,terminal_output +2615,6130696,"TERMINAL",0,0,"a",,terminal_output +2616,6130843,"TERMINAL",0,0,"t",,terminal_output +2617,6130853,"TERMINAL",0,0,"40",,terminal_output +2618,6131093,"TERMINAL",0,0,"ch",,terminal_output +2619,6131290,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +2620,6131410,"TERMINAL",0,0,"Sampling from checkpoint: /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn-gt-actions/interactive/3519095\r\n",,terminal_output +2621,6131530,"TERMINAL",0,0,"GpuFreq=control_disabled\r\n",,terminal_output +2622,6131892,"TERMINAL",0,0,"1",,terminal_output +2623,6132916,"TERMINAL",0,0,"2",,terminal_output +2624,6134043,"TERMINAL",0,0,"4",,terminal_output +2625,6135075,"TERMINAL",0,0,"5",,terminal_output +2626,6136021,"TERMINAL",0,0,"6",,terminal_output +2627,6137055,"TERMINAL",0,0,"7",,terminal_output +2628,6138089,"TERMINAL",0,0,"8",,terminal_output +2629,6139137,"TERMINAL",0,0,"9",,terminal_output +2630,6139551,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/venv_3_11/lib64/python3.11/site-packages/orbax/checkpoint/_src/serialization/type_handlers.py:1269: UserWarning: Sharding info not provided when restoring. Populating sharding info from sharding file. Please note restoration time will be slightly increased due to reading from file. Note also that this option is unsafe when restoring on a different topology than the checkpoint was saved with.\r\n warnings.warn(\r\n",,terminal_output +2631,6140331,"TERMINAL",0,0,"50",,terminal_output +2632,6141267,"TERMINAL",0,0,"1",,terminal_output +2633,6142345,"TERMINAL",0,0,"2",,terminal_output +2634,6143274,"TERMINAL",0,0,"3",,terminal_output +2635,6144311,"TERMINAL",0,0,"4",,terminal_output +2636,6145361,"TERMINAL",0,0,"5",,terminal_output +2637,6146458,"TERMINAL",0,0,"6",,terminal_output +2638,6147437,"TERMINAL",0,0,"7",,terminal_output +2639,6148482,"TERMINAL",0,0,"8",,terminal_output +2640,6149508,"TERMINAL",0,0,"9",,terminal_output +2641,6150534,"TERMINAL",0,0,"6:00",,terminal_output +2642,6151657,"TERMINAL",0,0,"1",,terminal_output +2643,6152678,"TERMINAL",0,0,"2",,terminal_output +2644,6153706,"TERMINAL",0,0,"3",,terminal_output +2645,6154728,"TERMINAL",0,0,"4",,terminal_output +2646,6155751,"TERMINAL",0,0,"5",,terminal_output +2647,6156777,"TERMINAL",0,0,"6",,terminal_output +2648,6157801,"TERMINAL",0,0,"7",,terminal_output +2649,6158926,"TERMINAL",0,0,"8",,terminal_output +2650,6159966,"TERMINAL",0,0,"9",,terminal_output +2651,6160962,"TERMINAL",0,0,"10",,terminal_output +2652,6162048,"TERMINAL",0,0,"2",,terminal_output +2653,6163024,"TERMINAL",0,0,"3",,terminal_output +2654,6164052,"TERMINAL",0,0,"4",,terminal_output +2655,6165072,"TERMINAL",0,0,"5",,terminal_output +2656,6166079,"TERMINAL",0,0,"6",,terminal_output +2657,6167132,"TERMINAL",0,0,"7",,terminal_output +2658,6167865,"TERMINAL",0,0,"SSIM: 0.7272958755493164\r\n",,terminal_output +2659,6168171,"TERMINAL",0,0,"8",,terminal_output +2660,6168449,"TERMINAL",0,0,"W0924 14:26:18.479467 3743333 pjrt_client.cc:1469] WatchJobStateAsync failed for task goo.gle/debugstr job_name: ""jax_worker"": CANCELLED: CANCELLED\r\nAdditional GRPC error information from remote target coordination_service while calling /tensorflow.CoordinationService/WatchJobState:\r\n:UNKNOWN:Error received from peer {grpc_status:1, grpc_message:""CANCELLED""} [type.googleapis.com/tensorflow.DerivedStatus='']\r\n",,terminal_output +2661,6168959,"TERMINAL",0,0,"]0;tum_cte0515@hkn0401:~/Projects/jasmine[?2004h(venv_3_11) [tum_cte0515@hkn0401 jasmine]$ ",,terminal_output +2662,6169200,"TERMINAL",0,0,"9",,terminal_output +2663,6170241,"TERMINAL",0,0,"20",,terminal_output +2664,6171291,"TERMINAL",0,0,"1",,terminal_output +2665,6172338,"TERMINAL",0,0,"2",,terminal_output +2666,6173362,"TERMINAL",0,0,"3",,terminal_output +2667,6174388,"TERMINAL",0,0,"4",,terminal_output +2668,6175458,"TERMINAL",0,0,"5",,terminal_output +2669,6176536,"TERMINAL",0,0,"6",,terminal_output +2670,6177495,"TERMINAL",0,0,"7",,terminal_output +2671,6178071,"TERMINAL",0,0,"sh slurm/jobs/mihir/horeka/breakout/sample_maskgit-gt-actions.sbatch",,terminal_output +2672,6178585,"TERMINAL",0,0,"8",,terminal_output +2673,6179665,"TERMINAL",0,0,"9",,terminal_output +2674,6180607,"TERMINAL",0,0,"30",,terminal_output +2675,6181658,"TERMINAL",0,0,"1",,terminal_output +2676,6182741,"TERMINAL",0,0,"2",,terminal_output +2677,6183721,"TERMINAL",0,0,"3",,terminal_output +2678,6183721,"TERMINAL",0,0,"",,terminal_output +2679,6184415,"TERMINAL",0,0,"",,terminal_output +2680,6184469,"TERMINAL",0,0,"",,terminal_output +2681,6184582,"TERMINAL",0,0,"",,terminal_output +2682,6184655,"TERMINAL",0,0,"",,terminal_output +2683,6184764,"TERMINAL",0,0,"",,terminal_output +2684,6184778,"TERMINAL",0,0,"4",,terminal_output +2685,6185077,"TERMINAL",0,0,"",,terminal_output +2686,6185608,"TERMINAL",0,0,".",,terminal_output +2687,6185793,"TERMINAL",0,0,"5",,terminal_output +2688,6185941,"TERMINAL",0,0,"s",,terminal_output +2689,6186123,"TERMINAL",0,0,"",,terminal_output +2690,6186746,"TERMINAL",0,0,"b",,terminal_output +2691,6186854,"TERMINAL",0,0,"6",,terminal_output +2692,6186917,"TERMINAL",0,0,"a",,terminal_output +2693,6187563,"TERMINAL",0,0,"t",,terminal_output +2694,6187709,"TERMINAL",0,0,"c",,terminal_output +2695,6187772,"TERMINAL",0,0,"h",,terminal_output +2696,6187881,"TERMINAL",0,0,"7",,terminal_output +2697,6188911,"TERMINAL",0,0,"8",,terminal_output +2698,6188983,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +2699,6189097,"TERMINAL",0,0,"Sampling from checkpoint: /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519264\r\n",,terminal_output +2700,6189208,"TERMINAL",0,0,"GpuFreq=control_disabled\r\n",,terminal_output +2701,6190079,"TERMINAL",0,0,"40",,terminal_output +2702,6191081,"TERMINAL",0,0,"1",,terminal_output +2703,6192067,"TERMINAL",0,0,"2",,terminal_output +2704,6193065,"TERMINAL",0,0,"3",,terminal_output +2705,6194106,"TERMINAL",0,0,"4",,terminal_output +2706,6195190,"TERMINAL",0,0,"5",,terminal_output +2707,6196204,"TERMINAL",0,0,"6",,terminal_output +2708,6197255,"TERMINAL",0,0,"7",,terminal_output +2709,6198349,"TERMINAL",0,0,"8",,terminal_output +2710,6198761,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/venv_3_11/lib64/python3.11/site-packages/orbax/checkpoint/_src/serialization/type_handlers.py:1269: UserWarning: Sharding info not provided when restoring. Populating sharding info from sharding file. Please note restoration time will be slightly increased due to reading from file. Note also that this option is unsafe when restoring on a different topology than the checkpoint was saved with.\r\n warnings.warn(\r\n",,terminal_output +2711,6199325,"TERMINAL",0,0,"9",,terminal_output +2712,6200628,"TERMINAL",0,0,"50",,terminal_output +2713,6201392,"TERMINAL",0,0,"1",,terminal_output +2714,6202452,"TERMINAL",0,0,"2",,terminal_output +2715,6203485,"TERMINAL",0,0,"3",,terminal_output +2716,6204596,"TERMINAL",0,0,"4",,terminal_output +2717,6205540,"TERMINAL",0,0,"5",,terminal_output +2718,6206337,"TERMINAL",0,0,"Traceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/sample.py"", line 207, in \r\n action_batch_E = genie.vq_encode(batch, training=False)\r\n ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/genie.py"", line 559, in vq_encode\r\n lam_output: Dict[str, jax.Array] = self.lam.vq_encode(\r\n ^^^^^^^^^^^^^^^^^^^\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/models/lam.py"", line 145, in vq_encode\r\n action_pad_BT1P = jnp.broadcast_to(\r\n ^^^^^^^^^^^^^^^^^\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/venv_3_11/lib64/python3.11/site-packages/jax/_src/numpy/lax_numpy.py"", line 3070, in broadcast_to\r\n return util._broadcast_to(array, shape, sharding=out_sharding)\r\n ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/venv_3_11/lib64/python3.11/site-packages/jax/_src/numpy/util.py"", line 292, in _broadcast_to\r\n raise ValueError(msg.format(arr_shape, shape))\r\nValueError: Incompatible shapes for broadcasting: (1, 1, 1, 48) and requested shape (12, 16, 1, 768)\r\n",,terminal_output +2719,6206448,"TERMINAL",0,0,"W0924 14:26:56.547312 3744470 pjrt_client.cc:1469] WatchJobStateAsync failed for task goo.gle/debugproto job_name: ""jax_worker"": CANCELLED: CANCELLED\r\nAdditional GRPC error information from remote target coordination_service while calling /tensorflow.CoordinationService/WatchJobState:\r\n:UNKNOWN:Error received from peer {grpc_status:1, grpc_message:""CANCELLED""} [type.googleapis.com/tensorflow.DerivedStatus='']\r\n",,terminal_output +2720,6206581,"TERMINAL",0,0,"6",,terminal_output +2721,6206965,"TERMINAL",0,0,"srun: error: hkn0401: task 0: Exited with exit code 1\r\n]0;tum_cte0515@hkn0401:~/Projects/jasmine[?2004h(venv_3_11) [tum_cte0515@hkn0401 jasmine]$ ",,terminal_output +2722,6207672,"TERMINAL",0,0,"7",,terminal_output +2723,6208692,"TERMINAL",0,0,"8",,terminal_output +2724,6209718,"TERMINAL",0,0,"9",,terminal_output +2725,6210746,"TERMINAL",0,0,"7:00",,terminal_output +2726,6211867,"TERMINAL",0,0,"1",,terminal_output +2727,6212891,"TERMINAL",0,0,"220",,terminal_output +2728,6213872,"TERMINAL",0,0,"3",,terminal_output +2729,6214903,"TERMINAL",0,0,"4",,terminal_output +2730,6215992,"TERMINAL",0,0,"6",,terminal_output +2731,6217095,"TERMINAL",0,0,"7",,terminal_output +2732,6218029,"TERMINAL",0,0,"8",,terminal_output +2733,6219071,"TERMINAL",0,0,"94",,terminal_output +2734,6220176,"TERMINAL",0,0,"10",,terminal_output +2735,6221185,"TERMINAL",0,0,"1",,terminal_output +2736,6222208,"TERMINAL",0,0,"2",,terminal_output +2737,6223233,"TERMINAL",0,0,"3",,terminal_output +2738,6224315,"TERMINAL",0,0,"4",,terminal_output +2739,6225317,"TERMINAL",0,0,"5",,terminal_output +2740,6226411,"TERMINAL",0,0,"6",,terminal_output +2741,6227437,"TERMINAL",0,0,"7",,terminal_output +2742,6228763,"TERMINAL",0,0,"8",,terminal_output +2743,6229788,"TERMINAL",0,0,"9",,terminal_output +2744,6230752,"TERMINAL",0,0,"20",,terminal_output +2745,6231837,"TERMINAL",0,0,"1",,terminal_output +2746,6232863,"TERMINAL",0,0,"2",,terminal_output +2747,6233886,"TERMINAL",0,0,"3",,terminal_output +2748,6234913,"TERMINAL",0,0,"4",,terminal_output +2749,6235948,"TERMINAL",0,0,"6",,terminal_output +2750,6237007,"TERMINAL",0,0,"7",,terminal_output +2751,6238048,"TERMINAL",0,0,"8",,terminal_output +2752,6239097,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/models/lam.py",0,0,"from typing import Dict\n\nimport jax\nimport jax.numpy as jnp\nimport flax.nnx as nnx\n\nfrom utils.preprocess import patchify, unpatchify\nfrom utils.nn import STTransformer, VectorQuantizer\n\n\nclass LatentActionModel(nnx.Module):\n """"""Latent Action ST-ViVit VQ-VAE\n\n Dimension keys:\n B: batch size\n T: sequence length\n N: number of patches per frame\n M: model dimension\n L: latent dimension\n E: B * (T - 1)\n H: height\n W: width\n C: number of channels (n_dim)\n P: patch token dimension (patch_size^2 * C)\n\n Tm1: T - 1\n Np1: N + 1\n """"""\n\n def __init__(\n self,\n in_dim: int,\n model_dim: int,\n ffn_dim: int,\n latent_dim: int,\n num_latents: int,\n patch_size: int,\n num_blocks: int,\n num_heads: int,\n dropout: float,\n codebook_dropout: float,\n param_dtype: jnp.dtype,\n dtype: jnp.dtype,\n use_flash_attention: bool,\n rngs: nnx.Rngs,\n ):\n self.in_dim = in_dim\n self.model_dim = model_dim\n self.ffn_dim = ffn_dim\n self.latent_dim = latent_dim\n self.num_latents = num_latents\n self.patch_size = patch_size\n self.num_blocks = num_blocks\n self.num_heads = num_heads\n self.dropout = dropout\n self.codebook_dropout = codebook_dropout\n self.param_dtype = param_dtype\n self.dtype = dtype\n self.use_flash_attention = use_flash_attention\n\n self.patch_token_dim = self.in_dim * self.patch_size**2\n self.encoder = STTransformer(\n self.patch_token_dim,\n self.model_dim,\n self.ffn_dim,\n self.latent_dim,\n self.num_blocks,\n self.num_heads,\n self.dropout,\n self.param_dtype,\n self.dtype,\n use_flash_attention=self.use_flash_attention,\n rngs=rngs,\n )\n self.action_in = nnx.Param(\n nnx.initializers.lecun_uniform()(\n rngs.params(), (1, 1, 1, self.patch_token_dim)\n )\n )\n self.vq = VectorQuantizer(\n self.latent_dim,\n self.num_latents,\n self.codebook_dropout,\n self.dtype,\n rngs=rngs,\n )\n self.patch_up = nnx.Linear(\n self.patch_token_dim,\n self.model_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n rngs=rngs,\n )\n self.action_up = nnx.Linear(\n self.latent_dim,\n self.model_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n rngs=rngs,\n )\n self.decoder = STTransformer(\n self.model_dim,\n self.model_dim,\n self.ffn_dim,\n self.patch_token_dim,\n self.num_blocks,\n self.num_heads,\n self.dropout,\n self.param_dtype,\n self.dtype,\n use_flash_attention=self.use_flash_attention,\n rngs=rngs,\n )\n\n def __call__(\n self, batch: Dict[str, jax.Array], training: bool = True\n ) -> Dict[str, jax.Array]:\n # --- Encode + VQ ---\n H, W = batch[""videos""].shape[2:4]\n videos_BTHWC = batch[""videos""]\n outputs = self.vq_encode(videos_BTHWC, training)\n patch_BTNP = outputs[""patches""]\n z_q_BTm11L = outputs[""z_q""]\n action_BTm11M = self.action_up(z_q_BTm11L)\n patch_BTm1NM = self.patch_up(patch_BTNP[:, :-1])\n action_BTm1NM = jnp.broadcast_to(action_BTm11M, patch_BTm1NM.shape)\n video_action_patches_BTm1NM = action_BTm1NM + patch_BTm1NM\n del outputs[""patches""], patch_BTNP, patch_BTm1NM\n\n # --- Decode ---\n video_recon_BTm1P = self.decoder(video_action_patches_BTm1NM)\n video_recon_BTm1P = video_recon_BTm1P.astype(jnp.float32)\n video_recon_BTm1P = nnx.sigmoid(video_recon_BTm1P)\n video_recon_BTm1P = video_recon_BTm1P.astype(self.dtype)\n video_recon_BTm1HWC = unpatchify(video_recon_BTm1P, self.patch_size, H, W)\n outputs[""recon""] = video_recon_BTm1HWC\n return outputs\n\n def vq_encode(\n self, videos_BTHWC: jax.Array, training: bool = True\n ) -> Dict[str, jax.Array]:\n # --- Preprocess videos ---\n B, T = videos_BTHWC.shape[:2]\n patch_BTNP = patchify(videos_BTHWC, self.patch_size)\n action_pad_BT1P = jnp.broadcast_to(\n self.action_in.value, (B, T, 1, self.patch_token_dim)\n )\n padded_patch_BTNp1P = jnp.concatenate((action_pad_BT1P, patch_BTNP), axis=2)\n\n # --- Encode ---\n z_BTNp1L = self.encoder(padded_patch_BTNp1P)\n # Get latent action for all future frames\n z_BTm1L = z_BTNp1L[:, 1:, 0]\n\n # --- Vector quantize ---\n z_EL = z_BTm1L.reshape(B * (T - 1), self.latent_dim)\n z_q_EL, z_EL, emb_EL, indices_E = self.vq(z_EL, training)\n z_q_BTm11L = z_q_EL.reshape(B, T - 1, 1, self.latent_dim)\n return dict(\n patches=patch_BTNP, z_q=z_q_BTm11L, z=z_EL, emb=emb_EL, indices=indices_E\n )\n",python,tab +2753,6239099,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/models/lam.py",4448,0,"",python,selection_command +2754,6239385,"TERMINAL",0,0,"9",,terminal_output +2755,6240134,"TERMINAL",0,0,"30",,terminal_output +2756,6241204,"TERMINAL",0,0,"1",,terminal_output +2757,6242203,"TERMINAL",0,0,"2",,terminal_output +2758,6243306,"TERMINAL",0,0,"3",,terminal_output +2759,6244291,"TERMINAL",0,0,"4",,terminal_output +2760,6245315,"TERMINAL",0,0,"5",,terminal_output +2761,6246355,"TERMINAL",0,0,"6",,terminal_output +2762,6247401,"TERMINAL",0,0,"7",,terminal_output +2763,6247934,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/models/lam.py",0,0,"",python,tab +2764,6248448,"TERMINAL",0,0,"8",,terminal_output +2765,6249555,"TERMINAL",0,0,"9",,terminal_output +2766,6250566,"TERMINAL",0,0,"40",,terminal_output +2767,6251549,"TERMINAL",0,0,"1",,terminal_output +2768,6252622,"TERMINAL",0,0,"2",,terminal_output +2769,6253646,"TERMINAL",0,0,"3",,terminal_output +2770,6254672,"TERMINAL",0,0,"4",,terminal_output +2771,6255694,"TERMINAL",0,0,"5",,terminal_output +2772,6256821,"TERMINAL",0,0,"6",,terminal_output +2773,6257494,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/models/lam.py",0,0,"",python,tab +2774,6257790,"TERMINAL",0,0,"7",,terminal_output +2775,6258820,"TERMINAL",0,0,"810",,terminal_output +2776,6259327,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/models/lam.py",4548,0,"",python,selection_mouse +2777,6259700,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/models/lam.py",4541,15,"patch_token_dim",python,selection_mouse +2778,6259846,"TERMINAL",0,0,"9",,terminal_output +2779,6260908,"TERMINAL",0,0,"50",,terminal_output +2780,6261274,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/models/lam.py",4538,0,"",python,selection_mouse +2781,6261455,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/models/lam.py",4536,4,"self",python,selection_mouse +2782,6261944,"TERMINAL",0,0,"1",,terminal_output +2783,6262371,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/models/lam.py",4567,0,"",python,selection_mouse +2784,6262377,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/models/lam.py",4566,0,"",python,selection_command +2785,6262537,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/models/lam.py",4567,0,"",python,selection_mouse +2786,6262541,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/models/lam.py",4566,0,"",python,selection_command +2787,6262917,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/models/lam.py",4515,0,"",python,selection_mouse +2788,6262987,"TERMINAL",0,0,"3",,terminal_output +2789,6263048,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/models/lam.py",4509,9,"action_in",python,selection_mouse +2790,6264011,"TERMINAL",0,0,"4",,terminal_output +2791,6265026,"TERMINAL",0,0,"5",,terminal_output +2792,6265061,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/models/lam.py",4520,0,"",python,selection_mouse +2793,6265223,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/models/lam.py",4519,5,"value",python,selection_mouse +2794,6266079,"TERMINAL",0,0,"6",,terminal_output +2795,6267158,"TERMINAL",0,0,"7",,terminal_output +2796,6268279,"TERMINAL",0,0,"8",,terminal_output +2797,6269174,"TERMINAL",0,0,"9",,terminal_output +2798,6270238,"TERMINAL",0,0,"8:00",,terminal_output +2799,6271250,"TERMINAL",0,0,"1",,terminal_output +2800,6272289,"TERMINAL",0,0,"2",,terminal_output +2801,6273410,"TERMINAL",0,0,"3",,terminal_output +2802,6274524,"TERMINAL",0,0,"4",,terminal_output +2803,6275511,"TERMINAL",0,0,"5",,terminal_output +2804,6276483,"TERMINAL",0,0,"6",,terminal_output +2805,6277477,"TERMINAL",0,0,"7",,terminal_output +2806,6277876,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/models/lam.py",2090,0,"",python,selection_mouse +2807,6277961,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/models/lam.py",2084,15,"patch_token_dim",python,selection_mouse +2808,6278537,"TERMINAL",0,0,"8",,terminal_output +2809,6279660,"TERMINAL",0,0,"9",,terminal_output +2810,6280780,"slurm/jobs/mihir/horeka/breakout/sample_maskgit-gt-actions.sbatch",0,0,"",shellscript,tab +2811,6280781,"slurm/jobs/mihir/horeka/breakout/sample_maskgit-gt-actions.sbatch",1093,0,"",shellscript,selection_mouse +2812,6280836,"slurm/jobs/mihir/horeka/breakout/sample_maskgit-gt-actions.sbatch",1092,0,"",shellscript,selection_command +2813,6280895,"TERMINAL",0,0,"10",,terminal_output +2814,6281317,"slurm/jobs/mihir/horeka/breakout/sample_maskgit-gt-actions.sbatch",1049,0,"",shellscript,selection_mouse +2815,6281325,"slurm/jobs/mihir/horeka/breakout/sample_maskgit-gt-actions.sbatch",1048,0,"",shellscript,selection_command +2816,6281707,"TERMINAL",0,0,"1",,terminal_output +2817,6282629,"slurm/jobs/mihir/horeka/breakout/sample_maskgit-gt-actions.sbatch",1049,0,"\n ",shellscript,content +2818,6282816,"TERMINAL",0,0,"2",,terminal_output +2819,6282889,"slurm/jobs/mihir/horeka/breakout/sample_maskgit-gt-actions.sbatch",1054,0,"-",shellscript,content +2820,6282890,"slurm/jobs/mihir/horeka/breakout/sample_maskgit-gt-actions.sbatch",1055,0,"",shellscript,selection_keyboard +2821,6283019,"slurm/jobs/mihir/horeka/breakout/sample_maskgit-gt-actions.sbatch",1055,0,"-",shellscript,content +2822,6283020,"slurm/jobs/mihir/horeka/breakout/sample_maskgit-gt-actions.sbatch",1056,0,"",shellscript,selection_keyboard +2823,6283754,"TERMINAL",0,0,"3",,terminal_output +2824,6284775,"TERMINAL",0,0,"4",,terminal_output +2825,6285770,"TERMINAL",0,0,"5",,terminal_output +2826,6286908,"TERMINAL",0,0,"6",,terminal_output +2827,6287917,"TERMINAL",0,0,"7",,terminal_output +2828,6288977,"TERMINAL",0,0,"8",,terminal_output +2829,6290016,"TERMINAL",0,0,"9",,terminal_output +2830,6291022,"TERMINAL",0,0,"21",,terminal_output +2831,6292049,"TERMINAL",0,0,"2",,terminal_output +2832,6293072,"TERMINAL",0,0,"3",,terminal_output +2833,6294099,"TERMINAL",0,0,"4",,terminal_output +2834,6295134,"TERMINAL",0,0,"5",,terminal_output +2835,6296156,"TERMINAL",0,0,"6",,terminal_output +2836,6297187,"TERMINAL",0,0,"7",,terminal_output +2837,6298231,"TERMINAL",0,0,"8",,terminal_output +2838,6299312,"TERMINAL",0,0,"9",,terminal_output +2839,6300340,"TERMINAL",0,0,"30",,terminal_output +2840,6301004,"slurm/jobs/mihir/horeka/breakout/sample_maskgit-gt-actions.sbatch",1055,0,"",shellscript,selection_command +2841,6301346,"TERMINAL",0,0,"1",,terminal_output +2842,6302492,"TERMINAL",0,0,"2",,terminal_output +2843,6303517,"TERMINAL",0,0,"3",,terminal_output +2844,6304543,"TERMINAL",0,0,"4",,terminal_output +2845,6305502,"TERMINAL",0,0,"5",,terminal_output +2846,6306611,"TERMINAL",0,0,"6",,terminal_output +2847,6307613,"TERMINAL",0,0,"7",,terminal_output +2848,6308712,"TERMINAL",0,0,"8",,terminal_output +2849,6309066,"slurm/jobs/mihir/horeka/breakout/sample_maskgit-gt-actions.sbatch",1050,7,"",shellscript,content +2850,6309113,"slurm/jobs/mihir/horeka/breakout/sample_maskgit-gt-actions.sbatch",1054,0,"",shellscript,selection_command +2851,6309659,"TERMINAL",0,0,"9",,terminal_output +2852,6310544,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",0,0,"",shellscript,tab +2853,6310739,"TERMINAL",0,0,"40",,terminal_output +2854,6311710,"TERMINAL",0,0,"1",,terminal_output +2855,6312835,"TERMINAL",0,0,"2",,terminal_output +2856,6313860,"TERMINAL",0,0,"3",,terminal_output +2857,6314451,"slurm/jobs/mihir/horeka/breakout/default_runs/train_lam_default.sh",0,0,"",shellscript,tab +2858,6314896,"TERMINAL",0,0,"4",,terminal_output +2859,6315855,"TERMINAL",0,0,"5",,terminal_output +2860,6316210,"slurm/jobs/mihir/horeka/breakout/default_runs/train_lam_default.sh",1317,0,"",shellscript,selection_mouse +2861,6316950,"TERMINAL",0,0,"6",,terminal_output +2862,6317953,"TERMINAL",0,0,"7",,terminal_output +2863,6318987,"TERMINAL",0,0,"9",,terminal_output +2864,6320003,"TERMINAL",0,0,"50",,terminal_output +2865,6321135,"TERMINAL",0,0,"1",,terminal_output +2866,6322070,"TERMINAL",0,0,"2",,terminal_output +2867,6323107,"TERMINAL",0,0,"3",,terminal_output +2868,6323482,"sample.py",0,0,"",python,tab +2869,6324196,"TERMINAL",0,0,"4",,terminal_output +2870,6325181,"TERMINAL",0,0,"5",,terminal_output +2871,6326221,"TERMINAL",0,0,"6",,terminal_output +2872,6327270,"TERMINAL",0,0,"7",,terminal_output +2873,6328462,"TERMINAL",0,0,"8",,terminal_output +2874,6329336,"TERMINAL",0,0,"9",,terminal_output +2875,6330374,"TERMINAL",0,0,"9:00",,terminal_output +2876,6331469,"TERMINAL",0,0,"1",,terminal_output +2877,6332494,"TERMINAL",0,0,"2",,terminal_output +2878,6333628,"TERMINAL",0,0,"3",,terminal_output +2879,6334534,"TERMINAL",0,0,"4",,terminal_output +2880,6335573,"TERMINAL",0,0,"5",,terminal_output +2881,6336696,"TERMINAL",0,0,"6",,terminal_output +2882,6337661,"TERMINAL",0,0,"7",,terminal_output +2883,6338741,"TERMINAL",0,0,"8",,terminal_output +2884,6339767,"TERMINAL",0,0,"9",,terminal_output +2885,6340830,"TERMINAL",0,0,"10",,terminal_output +2886,6342000,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",0,0,"",shellscript,tab +2887,6342195,"TERMINAL",0,0,"1",,terminal_output +2888,6342939,"TERMINAL",0,0,"2",,terminal_output +2889,6343295,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1108,0,"",shellscript,selection_mouse +2890,6343298,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1107,0,"",shellscript,selection_command +2891,6343880,"TERMINAL",0,0,"3",,terminal_output +2892,6343991,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1084,0,"",shellscript,selection_mouse +2893,6344002,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1083,0,"",shellscript,selection_command +2894,6344790,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1157,0,"",shellscript,selection_mouse +2895,6344872,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1156,0,"",shellscript,selection_command +2896,6345029,"TERMINAL",0,0,"4",,terminal_output +2897,6345946,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1157,0,"\n ",shellscript,content +2898,6346046,"TERMINAL",0,0,"6",,terminal_output +2899,6346340,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1162,0,"-",shellscript,content +2900,6346341,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1163,0,"",shellscript,selection_keyboard +2901,6346468,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1163,0,"-",shellscript,content +2902,6346469,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1164,0,"",shellscript,selection_keyboard +2903,6346651,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1164,0,"l",shellscript,content +2904,6346652,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1165,0,"",shellscript,selection_keyboard +2905,6346806,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1165,0,"a",shellscript,content +2906,6346806,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1166,0,"",shellscript,selection_keyboard +2907,6346900,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1166,0,"m",shellscript,content +2908,6346901,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1167,0,"",shellscript,selection_keyboard +2909,6346993,"TERMINAL",0,0,"7",,terminal_output +2910,6347134,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1167,0,"_",shellscript,content +2911,6347135,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1168,0,"",shellscript,selection_keyboard +2912,6347456,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1168,0,"p",shellscript,content +2913,6347457,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1169,0,"",shellscript,selection_keyboard +2914,6347583,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1169,0,"a",shellscript,content +2915,6347584,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1170,0,"",shellscript,selection_keyboard +2916,6347767,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1170,0,"t",shellscript,content +2917,6347768,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1171,0,"",shellscript,selection_keyboard +2918,6347976,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1171,0,"c",shellscript,content +2919,6347977,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1172,0,"",shellscript,selection_keyboard +2920,6348027,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1172,0,"h",shellscript,content +2921,6348028,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1173,0,"",shellscript,selection_keyboard +2922,6348096,"TERMINAL",0,0,"8",,terminal_output +2923,6348377,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1173,0,"_",shellscript,content +2924,6348378,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1174,0,"",shellscript,selection_keyboard +2925,6348550,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1174,0,"s",shellscript,content +2926,6348551,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1175,0,"",shellscript,selection_keyboard +2927,6348665,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1175,0,"i",shellscript,content +2928,6348665,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1176,0,"",shellscript,selection_keyboard +2929,6348882,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1176,0,"z",shellscript,content +2930,6348883,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1177,0,"",shellscript,selection_keyboard +2931,6349011,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1177,0,"e",shellscript,content +2932,6349012,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1178,0,"",shellscript,selection_keyboard +2933,6349144,"TERMINAL",0,0,"9062",,terminal_output +2934,6349618,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1178,0,"=",shellscript,content +2935,6349619,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1179,0,"",shellscript,selection_keyboard +2936,6350022,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1179,0,"4",shellscript,content +2937,6350023,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1180,0,"",shellscript,selection_keyboard +2938,6350110,"TERMINAL",0,0,"20",,terminal_output +2939,6350224,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1180,0," ",shellscript,content +2940,6350225,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1181,0,"",shellscript,selection_keyboard +2941,6350380,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1181,0,"0",shellscript,content +2942,6350381,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1182,0,"",shellscript,selection_keyboard +2943,6350775,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1181,1,"",shellscript,content +2944,6351151,"TERMINAL",0,0,"1",,terminal_output +2945,6351304,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1181,0,"\",shellscript,content +2946,6351305,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1182,0,"",shellscript,selection_keyboard +2947,6351495,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1181,0,"",shellscript,selection_command +2948,6352192,"TERMINAL",0,0,"2",,terminal_output +2949,6353284,"TERMINAL",0,0,"3",,terminal_output +2950,6354689,"TERMINAL",0,0,"4",,terminal_output +2951,6355236,"TERMINAL",0,0,"sh slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",,terminal_output +2952,6355345,"TERMINAL",0,0,"5",,terminal_output +2953,6355553,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +2954,6355907,"TERMINAL",0,0,"Sampling from checkpoint: /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519264\r\nGpuFreq=control_disabled\r\n",,terminal_output +2955,6356457,"TERMINAL",0,0,"6",,terminal_output +2956,6357395,"TERMINAL",0,0,"7",,terminal_output +2957,6358434,"TERMINAL",0,0,"8",,terminal_output +2958,6359530,"TERMINAL",0,0,"9",,terminal_output +2959,6360512,"TERMINAL",0,0,"30",,terminal_output +2960,6361578,"TERMINAL",0,0,"1",,terminal_output +2961,6362708,"TERMINAL",0,0,"2",,terminal_output +2962,6363639,"TERMINAL",0,0,"3",,terminal_output +2963,6364639,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/venv_3_11/lib64/python3.11/site-packages/orbax/checkpoint/_src/serialization/type_handlers.py:1269: UserWarning: Sharding info not provided when restoring. Populating sharding info from sharding file. Please note restoration time will be slightly increased due to reading from file. Note also that this option is unsafe when restoring on a different topology than the checkpoint was saved with.\r\n warnings.warn(\r\n",,terminal_output +2964,6364693,"TERMINAL",0,0,"4",,terminal_output +2965,6365776,"TERMINAL",0,0,"5",,terminal_output +2966,6366804,"TERMINAL",0,0,"6",,terminal_output +2967,6367826,"TERMINAL",0,0,"7",,terminal_output +2968,6368834,"TERMINAL",0,0,"8",,terminal_output +2969,6369868,"TERMINAL",0,0,"9",,terminal_output +2970,6370908,"TERMINAL",0,0,"40",,terminal_output +2971,6371945,"TERMINAL",0,0,"2",,terminal_output +2972,6373054,"TERMINAL",0,0,"3",,terminal_output +2973,6374071,"TERMINAL",0,0,"4",,terminal_output +2974,6375102,"TERMINAL",0,0,"5",,terminal_output +2975,6376104,"TERMINAL",0,0,"6",,terminal_output +2976,6377147,"TERMINAL",0,0,"7",,terminal_output +2977,6378184,"TERMINAL",0,0,"8",,terminal_output +2978,6379222,"TERMINAL",0,0,"9",,terminal_output +2979,6380270,"TERMINAL",0,0,"500",,terminal_output +2980,6381306,"TERMINAL",0,0,"1",,terminal_output +2981,6382366,"TERMINAL",0,0,"2",,terminal_output +2982,6383491,"TERMINAL",0,0,"3",,terminal_output +2983,6384515,"TERMINAL",0,0,"4",,terminal_output +2984,6385483,"TERMINAL",0,0,"5",,terminal_output +2985,6386559,"TERMINAL",0,0,"6",,terminal_output +2986,6387569,"TERMINAL",0,0,"7",,terminal_output +2987,6388614,"TERMINAL",0,0,"8",,terminal_output +2988,6389745,"TERMINAL",0,0,"9",,terminal_output +2989,6390763,"TERMINAL",0,0,"30:00",,terminal_output +2990,6391785,"TERMINAL",0,0,"1",,terminal_output +2991,6392810,"TERMINAL",0,0,"2",,terminal_output +2992,6393832,"TERMINAL",0,0,"3",,terminal_output +2993,6394861,"TERMINAL",0,0,"4",,terminal_output +2994,6395869,"TERMINAL",0,0,"5",,terminal_output +2995,6397008,"TERMINAL",0,0,"6",,terminal_output +2996,6398032,"TERMINAL",0,0,"8",,terminal_output +2997,6399057,"TERMINAL",0,0,"9",,terminal_output +2998,6400080,"TERMINAL",0,0,"10",,terminal_output +2999,6401106,"TERMINAL",0,0,"1",,terminal_output +3000,6401718,"TERMINAL",0,0,"SSIM: 0.8419094085693359\r\n",,terminal_output +3001,6402131,"TERMINAL",0,0,"2",,terminal_output +3002,6403157,"TERMINAL",0,0,"3",,terminal_output +3003,6404190,"TERMINAL",0,0,"4",,terminal_output +3004,6405292,"TERMINAL",0,0,"5",,terminal_output +3005,6406284,"TERMINAL",0,0,"6",,terminal_output +3006,6407309,"TERMINAL",0,0,"W0924 14:30:17.401407 3745198 pjrt_client.cc:1469] WatchJobStateAsync failed for task goo.gle/debugproto job_name: ""jax_worker"": UNAVAILABLE: Cancelling all calls\r\nAdditional GRPC error information from remote target coordination_service while calling /tensorflow.CoordinationService/WatchJobState:\r\n:UNKNOWN:Error received from peer {grpc_status:14, grpc_message:""Cancelling all calls""}\r\n",,terminal_output +3007,6407342,"TERMINAL",0,0,"7",,terminal_output +3008,6407901,"TERMINAL",0,0,"]0;tum_cte0515@hkn0401:~/Projects/jasmine[?2004h(venv_3_11) [tum_cte0515@hkn0401 jasmine]$ ",,terminal_output +3009,6408375,"TERMINAL",0,0,"8",,terminal_output +3010,6409458,"TERMINAL",0,0,"9",,terminal_output +3011,6410456,"TERMINAL",0,0,"20",,terminal_output +3012,6411554,"TERMINAL",0,0,"1",,terminal_output +3013,6412573,"TERMINAL",0,0,"2",,terminal_output +3014,6413714,"TERMINAL",0,0,"3",,terminal_output +3015,6414621,"TERMINAL",0,0,"4",,terminal_output +3016,6415649,"TERMINAL",0,0,"5",,terminal_output +3017,6416676,"TERMINAL",0,0,"6",,terminal_output +3018,6417797,"TERMINAL",0,0,"7",,terminal_output +3019,6418826,"TERMINAL",0,0,"8",,terminal_output +3020,6419844,"TERMINAL",0,0,"9",,terminal_output +3021,6420842,"TERMINAL",0,0,"30",,terminal_output +3022,6421893,"TERMINAL",0,0,"1",,terminal_output +3023,6423018,"TERMINAL",0,0,"2",,terminal_output +3024,6424056,"TERMINAL",0,0,"4",,terminal_output +3025,6425067,"TERMINAL",0,0,"5",,terminal_output +3026,6426090,"TERMINAL",0,0,"6",,terminal_output +3027,6427118,"TERMINAL",0,0,"7",,terminal_output +3028,6428138,"TERMINAL",0,0,"8",,terminal_output +3029,6429146,"TERMINAL",0,0,"9",,terminal_output +3030,6430181,"TERMINAL",0,0,"40",,terminal_output +3031,6431221,"TERMINAL",0,0,"1",,terminal_output +3032,6432254,"TERMINAL",0,0,"2",,terminal_output +3033,6433296,"TERMINAL",0,0,"3",,terminal_output +3034,6434389,"TERMINAL",0,0,"4",,terminal_output +3035,6435359,"TERMINAL",0,0,"5",,terminal_output +3036,6436433,"TERMINAL",0,0,"6",,terminal_output +3037,6437431,"TERMINAL",0,0,"7",,terminal_output +3038,6438480,"TERMINAL",0,0,"8",,terminal_output +3039,6439511,"TERMINAL",0,0,"9",,terminal_output +3040,6440542,"TERMINAL",0,0,"50",,terminal_output +3041,6441741,"TERMINAL",0,0,"1",,terminal_output +3042,6442811,"TERMINAL",0,0,"2",,terminal_output +3043,6443816,"TERMINAL",0,0,"3",,terminal_output +3044,6444728,"TERMINAL",0,0,"4",,terminal_output +3045,6445752,"TERMINAL",0,0,"5",,terminal_output +3046,6446774,"TERMINAL",0,0,"6",,terminal_output +3047,6447903,"TERMINAL",0,0,"7",,terminal_output +3048,6448925,"TERMINAL",0,0,"8",,terminal_output +3049,6449950,"TERMINAL",0,0,"9",,terminal_output +3050,6450905,"TERMINAL",0,0,"1:00",,terminal_output +3051,6452509,"TERMINAL",0,0,"2",,terminal_output +3052,6453536,"TERMINAL",0,0,"3",,terminal_output +3053,6454564,"TERMINAL",0,0,"4",,terminal_output +3054,6455684,"TERMINAL",0,0,"5",,terminal_output +3055,6456710,"TERMINAL",0,0,"6",,terminal_output +3056,6457733,"TERMINAL",0,0,"7",,terminal_output +3057,6458757,"TERMINAL",0,0,"8",,terminal_output +3058,6459773,"TERMINAL",0,0,"9",,terminal_output +3059,6460785,"TERMINAL",0,0,"10",,terminal_output +3060,6461930,"TERMINAL",0,0,"1",,terminal_output +3061,6462984,"TERMINAL",0,0,"2",,terminal_output +3062,6463978,"TERMINAL",0,0,"3",,terminal_output +3063,6465005,"TERMINAL",0,0,"4",,terminal_output +3064,6466026,"TERMINAL",0,0,"6",,terminal_output +3065,6467066,"TERMINAL",0,0,"7",,terminal_output +3066,6468083,"TERMINAL",0,0,"8",,terminal_output +3067,6469233,"TERMINAL",0,0,"919",,terminal_output +3068,6470272,"TERMINAL",0,0,"20",,terminal_output +3069,6471314,"TERMINAL",0,0,"1",,terminal_output +3070,6472376,"TERMINAL",0,0,"2",,terminal_output +3071,6473402,"TERMINAL",0,0,"3",,terminal_output +3072,6474478,"TERMINAL",0,0,"4",,terminal_output +3073,6475455,"TERMINAL",0,0,"5",,terminal_output +3074,6476577,"TERMINAL",0,0,"6",,terminal_output +3075,6477544,"TERMINAL",0,0,"7",,terminal_output +3076,6478723,"TERMINAL",0,0,"8",,terminal_output +3077,6479647,"TERMINAL",0,0,"9",,terminal_output +3078,6480644,"TERMINAL",0,0,"30",,terminal_output +3079,6481695,"TERMINAL",0,0,"1",,terminal_output +3080,6482711,"TERMINAL",0,0,"2",,terminal_output +3081,6483843,"TERMINAL",0,0,"3",,terminal_output +3082,6484867,"TERMINAL",0,0,"4",,terminal_output +3083,6485830,"TERMINAL",0,0,"5",,terminal_output +3084,6486916,"TERMINAL",0,0,"6",,terminal_output +3085,6487938,"TERMINAL",0,0,"7",,terminal_output +3086,6488963,"TERMINAL",0,0,"9",,terminal_output +3087,6489977,"TERMINAL",0,0,"40",,terminal_output +3088,6491114,"TERMINAL",0,0,"1",,terminal_output +3089,6492139,"TERMINAL",0,0,"2",,terminal_output +3090,6493173,"TERMINAL",0,0,"3",,terminal_output +3091,6494169,"TERMINAL",0,0,"4",,terminal_output +3092,6495218,"TERMINAL",0,0,"5",,terminal_output +3093,6496275,"TERMINAL",0,0,"6",,terminal_output +3094,6497232,"TERMINAL",0,0,"7",,terminal_output +3095,6498283,"TERMINAL",0,0,"8",,terminal_output +3096,6499408,"TERMINAL",0,0,"9",,terminal_output +3097,6500337,"TERMINAL",0,0,"50",,terminal_output +3098,6501468,"TERMINAL",0,0,"1",,terminal_output +3099,6502482,"TERMINAL",0,0,"2",,terminal_output +3100,6503504,"TERMINAL",0,0,"3",,terminal_output +3101,6504530,"TERMINAL",0,0,"4",,terminal_output +3102,6505667,"TERMINAL",0,0,"5",,terminal_output +3103,6506559,"TERMINAL",0,0,"6",,terminal_output +3104,6507726,"TERMINAL",0,0,"7",,terminal_output +3105,6508740,"TERMINAL",0,0,"8",,terminal_output +3106,6509678,"TERMINAL",0,0,"9",,terminal_output +3107,6510822,"TERMINAL",0,0,"2:00",,terminal_output +3108,6511744,"TERMINAL",0,0,"1",,terminal_output +3109,6512839,"TERMINAL",0,0,"2",,terminal_output +3110,6513851,"TERMINAL",0,0,"3",,terminal_output +3111,6514855,"TERMINAL",0,0,"4",,terminal_output +3112,6515899,"TERMINAL",0,0,"5",,terminal_output +3113,6517021,"TERMINAL",0,0,"6",,terminal_output +3114,6518046,"TERMINAL",0,0,"8",,terminal_output +3115,6519137,"TERMINAL",0,0,"9",,terminal_output +3116,6520095,"TERMINAL",0,0,"10",,terminal_output +3117,6521118,"TERMINAL",0,0,"1",,terminal_output +3118,6522142,"TERMINAL",0,0,"2",,terminal_output +3119,6523160,"TERMINAL",0,0,"3",,terminal_output +3120,6524200,"TERMINAL",0,0,"4",,terminal_output +3121,6525236,"TERMINAL",0,0,"5",,terminal_output +3122,6526275,"TERMINAL",0,0,"6",,terminal_output +3123,6527309,"TERMINAL",0,0,"7",,terminal_output +3124,6528342,"TERMINAL",0,0,"8",,terminal_output +3125,6529375,"TERMINAL",0,0,"9",,terminal_output +3126,6530457,"TERMINAL",0,0,"20",,terminal_output +3127,6531563,"TERMINAL",0,0,"1",,terminal_output +3128,6532588,"TERMINAL",0,0,"2",,terminal_output +3129,6533641,"TERMINAL",0,0,"3",,terminal_output +3130,6534571,"TERMINAL",0,0,"4",,terminal_output +3131,6535606,"TERMINAL",0,0,"5",,terminal_output +3132,6536660,"TERMINAL",0,0,"6",,terminal_output +3133,6537684,"TERMINAL",0,0,"7",,terminal_output +3134,6538730,"TERMINAL",0,0,"8",,terminal_output +3135,6539858,"TERMINAL",0,0,"9",,terminal_output +3136,6540825,"TERMINAL",0,0,"30",,terminal_output +3137,6541905,"TERMINAL",0,0,"1",,terminal_output +3138,6542934,"TERMINAL",0,0,"2",,terminal_output +3139,6543973,"TERMINAL",0,0,"3",,terminal_output +3140,6544977,"TERMINAL",0,0,"5",,terminal_output +3141,6546003,"TERMINAL",0,0,"6",,terminal_output +3142,6547128,"TERMINAL",0,0,"7",,terminal_output +3143,6548152,"TERMINAL",0,0,"8",,terminal_output +3144,6549114,"TERMINAL",0,0,"9",,terminal_output +3145,6550151,"TERMINAL",0,0,"40",,terminal_output +3146,6551237,"TERMINAL",0,0,"1",,terminal_output +3147,6552270,"TERMINAL",0,0,"2",,terminal_output +3148,6553261,"TERMINAL",0,0,"3",,terminal_output +3149,6554310,"TERMINAL",0,0,"4",,terminal_output +3150,6555382,"TERMINAL",0,0,"5",,terminal_output +3151,6556464,"TERMINAL",0,0,"6",,terminal_output +3152,6557471,"TERMINAL",0,0,"7",,terminal_output +3153,6558506,"TERMINAL",0,0,"8",,terminal_output +3154,6559500,"TERMINAL",0,0,"9",,terminal_output +3155,6560591,"TERMINAL",0,0,"50",,terminal_output +3156,6561668,"TERMINAL",0,0,"1",,terminal_output +3157,6562641,"TERMINAL",0,0,"2",,terminal_output +3158,6563783,"TERMINAL",0,0,"3",,terminal_output +3159,6564684,"TERMINAL",0,0,"4",,terminal_output +3160,6565718,"TERMINAL",0,0,"5",,terminal_output +3161,6566789,"TERMINAL",0,0,"6",,terminal_output +3162,6567790,"TERMINAL",0,0,"7",,terminal_output +3163,6568907,"TERMINAL",0,0,"8",,terminal_output +3164,6569864,"TERMINAL",0,0,"9",,terminal_output +3165,6570990,"TERMINAL",0,0,"3:00",,terminal_output +3166,6571945,"TERMINAL",0,0,"2",,terminal_output +3167,6573035,"TERMINAL",0,0,"3",,terminal_output +3168,6574008,"TERMINAL",0,0,"4",,terminal_output +3169,6575083,"TERMINAL",0,0,"5",,terminal_output +3170,6576216,"TERMINAL",0,0,"6",,terminal_output +3171,6577137,"TERMINAL",0,0,"7",,terminal_output +3172,6578157,"TERMINAL",0,0,"8",,terminal_output +3173,6579200,"TERMINAL",0,0,"9",,terminal_output +3174,6580227,"TERMINAL",0,0,"10",,terminal_output +3175,6581263,"TERMINAL",0,0,"1",,terminal_output +3176,6582354,"TERMINAL",0,0,"2",,terminal_output +3177,6583379,"TERMINAL",0,0,"3",,terminal_output +3178,6584403,"TERMINAL",0,0,"4",,terminal_output +3179,6585456,"TERMINAL",0,0,"5",,terminal_output +3180,6586555,"TERMINAL",0,0,"6",,terminal_output +3181,6587578,"TERMINAL",0,0,"7",,terminal_output +3182,6588599,"TERMINAL",0,0,"8",,terminal_output +3183,6589832,"TERMINAL",0,0,"9322",,terminal_output +3184,6590812,"TERMINAL",0,0,"20",,terminal_output +3185,6591876,"TERMINAL",0,0,"1",,terminal_output +3186,6592909,"TERMINAL",0,0,"2",,terminal_output +3187,6594029,"TERMINAL",0,0,"3",,terminal_output +3188,6595055,"TERMINAL",0,0,"5",,terminal_output +3189,6595998,"TERMINAL",0,0,"6",,terminal_output +3190,6597110,"TERMINAL",0,0,"7",,terminal_output +3191,6598126,"TERMINAL",0,0,"8",,terminal_output +3192,6599149,"TERMINAL",0,0,"9",,terminal_output +3193,6600153,"TERMINAL",0,0,"30",,terminal_output +3194,6601220,"TERMINAL",0,0,"1",,terminal_output +3195,6602220,"TERMINAL",0,0,"2",,terminal_output +3196,6603275,"TERMINAL",0,0,"3",,terminal_output +3197,6604288,"TERMINAL",0,0,"4",,terminal_output +3198,6605328,"TERMINAL",0,0,"5",,terminal_output +3199,6606364,"TERMINAL",0,0,"6",,terminal_output +3200,6607442,"TERMINAL",0,0,"7",,terminal_output +3201,6608467,"TERMINAL",0,0,"8",,terminal_output +3202,6609491,"TERMINAL",0,0,"9",,terminal_output +3203,6610510,"TERMINAL",0,0,"40",,terminal_output +3204,6611641,"TERMINAL",0,0,"1",,terminal_output +3205,6612665,"TERMINAL",0,0,"2",,terminal_output +3206,6613690,"TERMINAL",0,0,"3",,terminal_output +3207,6614672,"TERMINAL",0,0,"4",,terminal_output +3208,6615739,"TERMINAL",0,0,"5",,terminal_output +3209,6616749,"TERMINAL",0,0,"6",,terminal_output +3210,6617778,"TERMINAL",0,0,"7",,terminal_output +3211,6618912,"TERMINAL",0,0,"8",,terminal_output +3212,6619989,"TERMINAL",0,0,"9",,terminal_output +3213,6620894,"TERMINAL",0,0,"50",,terminal_output +3214,6621982,"TERMINAL",0,0,"1",,terminal_output +3215,6622974,"TERMINAL",0,0,"3",,terminal_output +3216,6624014,"TERMINAL",0,0,"4",,terminal_output +3217,6625181,"TERMINAL",0,0,"5",,terminal_output +3218,6626182,"TERMINAL",0,0,"6",,terminal_output +3219,6627206,"TERMINAL",0,0,"7",,terminal_output +3220,6628285,"TERMINAL",0,0,"8",,terminal_output +3221,6629214,"TERMINAL",0,0,"9",,terminal_output +3222,6630258,"TERMINAL",0,0,"4:00",,terminal_output +3223,6631295,"TERMINAL",0,0,"1",,terminal_output +3224,6632330,"TERMINAL",0,0,"2",,terminal_output +3225,6633374,"TERMINAL",0,0,"3",,terminal_output +3226,6634480,"TERMINAL",0,0,"4",,terminal_output +3227,6635456,"TERMINAL",0,0,"5",,terminal_output +3228,6636525,"TERMINAL",0,0,"6",,terminal_output +3229,6637550,"TERMINAL",0,0,"7",,terminal_output +3230,6638575,"TERMINAL",0,0,"8",,terminal_output +3231,6639702,"TERMINAL",0,0,"9",,terminal_output +3232,6640737,"TERMINAL",0,0,"103",,terminal_output +3233,6641676,"TERMINAL",0,0,"1",,terminal_output +3234,6642772,"TERMINAL",0,0,"2",,terminal_output +3235,6643864,"TERMINAL",0,0,"3",,terminal_output +3236,6644819,"TERMINAL",0,0,"4",,terminal_output +3237,6645837,"TERMINAL",0,0,"5",,terminal_output +3238,6646904,"TERMINAL",0,0,"6",,terminal_output +3239,6647996,"TERMINAL",0,0,"7",,terminal_output +3240,6649019,"TERMINAL",0,0,"9",,terminal_output +3241,6650043,"TERMINAL",0,0,"20",,terminal_output +3242,6651070,"TERMINAL",0,0,"1",,terminal_output +3243,6652092,"TERMINAL",0,0,"2",,terminal_output +3244,6653134,"TERMINAL",0,0,"3",,terminal_output +3245,6654247,"TERMINAL",0,0,"4",,terminal_output +3246,6655214,"TERMINAL",0,0,"5",,terminal_output +3247,6656259,"TERMINAL",0,0,"6",,terminal_output +3248,6657301,"TERMINAL",0,0,"7",,terminal_output +3249,6658349,"TERMINAL",0,0,"8",,terminal_output +3250,6659393,"TERMINAL",0,0,"9",,terminal_output +3251,6660456,"TERMINAL",0,0,"30",,terminal_output +3252,6661512,"TERMINAL",0,0,"1",,terminal_output +3253,6662535,"TERMINAL",0,0,"2",,terminal_output +3254,6663663,"TERMINAL",0,0,"3",,terminal_output +3255,6664684,"TERMINAL",0,0,"4",,terminal_output +3256,6665710,"TERMINAL",0,0,"5",,terminal_output +3257,6666735,"TERMINAL",0,0,"6",,terminal_output +3258,6667755,"TERMINAL",0,0,"7",,terminal_output +3259,6668781,"TERMINAL",0,0,"8",,terminal_output +3260,6669909,"TERMINAL",0,0,"9",,terminal_output +3261,6670845,"TERMINAL",0,0,"40",,terminal_output +3262,6671954,"TERMINAL",0,0,"1",,terminal_output +3263,6672983,"TERMINAL",0,0,"2",,terminal_output +3264,6674004,"TERMINAL",0,0,"4",,terminal_output +3265,6675029,"TERMINAL",0,0,"5",,terminal_output +3266,6676030,"TERMINAL",0,0,"6",,terminal_output +3267,6677170,"TERMINAL",0,0,"7",,terminal_output +3268,6678113,"TERMINAL",0,0,"8",,terminal_output +3269,6679229,"TERMINAL",0,0,"9",,terminal_output +3270,6680196,"TERMINAL",0,0,"50",,terminal_output +3271,6681274,"TERMINAL",0,0,"1",,terminal_output +3272,6682278,"TERMINAL",0,0,"2",,terminal_output +3273,6683316,"TERMINAL",0,0,"3",,terminal_output +3274,6684355,"TERMINAL",0,0,"4",,terminal_output +3275,6685390,"TERMINAL",0,0,"5",,terminal_output +3276,6686432,"TERMINAL",0,0,"6",,terminal_output +3277,6687520,"TERMINAL",0,0,"7",,terminal_output +3278,6688545,"TERMINAL",0,0,"8",,terminal_output +3279,6689586,"TERMINAL",0,0,"9",,terminal_output +3280,6690598,"TERMINAL",0,0,"5:00",,terminal_output +3281,6691750,"TERMINAL",0,0,"1",,terminal_output +3282,6692744,"TERMINAL",0,0,"2",,terminal_output +3283,6693767,"TERMINAL",0,0,"3",,terminal_output +3284,6694749,"TERMINAL",0,0,"4",,terminal_output +3285,6695856,"TERMINAL",0,0,"5",,terminal_output +3286,6696839,"TERMINAL",0,0,"6",,terminal_output +3287,6697967,"TERMINAL",0,0,"7",,terminal_output +3288,6698992,"TERMINAL",0,0,"8",,terminal_output +3289,6700012,"TERMINAL",0,0,"10",,terminal_output +3290,6701038,"TERMINAL",0,0,"1",,terminal_output +3291,6702014,"TERMINAL",0,0,"2",,terminal_output +3292,6703086,"TERMINAL",0,0,"3",,terminal_output +3293,6704092,"TERMINAL",0,0,"4",,terminal_output +3294,6705238,"TERMINAL",0,0,"5",,terminal_output +3295,6706259,"TERMINAL",0,0,"6",,terminal_output +3296,6707287,"TERMINAL",0,0,"7",,terminal_output +3297,6708246,"TERMINAL",0,0,"8",,terminal_output +3298,6709285,"TERMINAL",0,0,"9",,terminal_output +3299,6710324,"TERMINAL",0,0,"20",,terminal_output +3300,6711392,"TERMINAL",0,0,"1",,terminal_output +3301,6712508,"TERMINAL",0,0,"2",,terminal_output +3302,6713529,"TERMINAL",0,0,"3",,terminal_output +3303,6714555,"TERMINAL",0,0,"4",,terminal_output +3304,6715509,"TERMINAL",0,0,"5",,terminal_output +3305,6716580,"TERMINAL",0,0,"6",,terminal_output +3306,6717626,"TERMINAL",0,0,"7",,terminal_output +3307,6718650,"TERMINAL",0,0,"8",,terminal_output +3308,6719675,"TERMINAL",0,0,"9",,terminal_output +3309,6720687,"TERMINAL",0,0,"30",,terminal_output +3310,6721752,"TERMINAL",0,0,"1",,terminal_output +3311,6722850,"TERMINAL",0,0,"2",,terminal_output +3312,6723887,"TERMINAL",0,0,"3",,terminal_output +3313,6724898,"TERMINAL",0,0,"4",,terminal_output +3314,6725877,"TERMINAL",0,0,"5",,terminal_output +3315,6726947,"TERMINAL",0,0,"6",,terminal_output +3316,6727969,"TERMINAL",0,0,"8",,terminal_output +3317,6728987,"TERMINAL",0,0,"9",,terminal_output +3318,6730120,"TERMINAL",0,0,"40",,terminal_output +3319,6731144,"TERMINAL",0,0,"1",,terminal_output +3320,6732168,"TERMINAL",0,0,"2",,terminal_output +3321,6733204,"TERMINAL",0,0,"3",,terminal_output +3322,6734222,"TERMINAL",0,0,"4",,terminal_output +3323,6735246,"TERMINAL",0,0,"5",,terminal_output +3324,6736285,"TERMINAL",0,0,"6",,terminal_output +3325,6737327,"TERMINAL",0,0,"7",,terminal_output +3326,6738365,"TERMINAL",0,0,"8",,terminal_output +3327,6739443,"TERMINAL",0,0,"9",,terminal_output +3328,6740466,"TERMINAL",0,0,"50",,terminal_output +3329,6741593,"TERMINAL",0,0,"1",,terminal_output +3330,6742612,"TERMINAL",0,0,"2",,terminal_output +3331,6743575,"TERMINAL",0,0,"3",,terminal_output +3332,6744661,"TERMINAL",0,0,"4",,terminal_output +3333,6745695,"TERMINAL",0,0,"5",,terminal_output +3334,6746690,"TERMINAL",0,0,"6",,terminal_output +3335,6747736,"TERMINAL",0,0,"7",,terminal_output +3336,6748859,"TERMINAL",0,0,"8",,terminal_output +3337,6749885,"TERMINAL",0,0,"9",,terminal_output +3338,6750848,"TERMINAL",0,0,"6:00",,terminal_output +3339,6751932,"TERMINAL",0,0,"1",,terminal_output +3340,6752954,"TERMINAL",0,0,"2",,terminal_output +3341,6753978,"TERMINAL",0,0,"4",,terminal_output +3342,6755005,"TERMINAL",0,0,"5",,terminal_output +3343,6756030,"TERMINAL",0,0,"6",,terminal_output +3344,6757174,"TERMINAL",0,0,"7",,terminal_output +3345,6758194,"TERMINAL",0,0,"8",,terminal_output +3346,6759209,"TERMINAL",0,0,"9",,terminal_output +3347,6760227,"TERMINAL",0,0,"10",,terminal_output +3348,6761253,"TERMINAL",0,0,"1",,terminal_output +3349,6762280,"TERMINAL",0,0,"2",,terminal_output +3350,6763283,"TERMINAL",0,0,"3",,terminal_output +3351,6764320,"TERMINAL",0,0,"4",,terminal_output +3352,6765359,"TERMINAL",0,0,"5",,terminal_output +3353,6766471,"TERMINAL",0,0,"6",,terminal_output +3354,6767438,"TERMINAL",0,0,"7",,terminal_output +3355,6768474,"TERMINAL",0,0,"8",,terminal_output +3356,6769547,"TERMINAL",0,0,"9",,terminal_output +3357,6770568,"TERMINAL",0,0,"20",,terminal_output +3358,6771694,"TERMINAL",0,0,"1",,terminal_output +3359,6772718,"TERMINAL",0,0,"2",,terminal_output +3360,6773816,"TERMINAL",0,0,"3",,terminal_output +3361,6774736,"TERMINAL",0,0,"4",,terminal_output +3362,6775769,"TERMINAL",0,0,"5",,terminal_output +3363,6776920,"TERMINAL",0,0,"6",,terminal_output +3364,6777942,"TERMINAL",0,0,"7",,terminal_output +3365,6778966,"TERMINAL",0,0,"8",,terminal_output +3366,6779924,"TERMINAL",0,0,"9",,terminal_output +3367,6781015,"TERMINAL",0,0,"31",,terminal_output +3368,6782037,"TERMINAL",0,0,"2",,terminal_output +3369,6783106,"TERMINAL",0,0,"3",,terminal_output +3370,6784074,"TERMINAL",0,0,"4",,terminal_output +3371,6785125,"TERMINAL",0,0,"5",,terminal_output +3372,6786239,"TERMINAL",0,0,"6",,terminal_output +3373,6787259,"TERMINAL",0,0,"7",,terminal_output +3374,6788294,"TERMINAL",0,0,"8",,terminal_output +3375,6789262,"TERMINAL",0,0,"9",,terminal_output +3376,6790296,"TERMINAL",0,0,"40",,terminal_output +3377,6791338,"TERMINAL",0,0,"1",,terminal_output +3378,6792376,"TERMINAL",0,0,"2",,terminal_output +3379,6793409,"TERMINAL",0,0,"3",,terminal_output +3380,6794447,"TERMINAL",0,0,"4",,terminal_output +3381,6795483,"TERMINAL",0,0,"5",,terminal_output +3382,6796606,"TERMINAL",0,0,"6",,terminal_output +3383,6797604,"TERMINAL",0,0,"7",,terminal_output +3384,6798631,"TERMINAL",0,0,"8",,terminal_output +3385,6799651,"TERMINAL",0,0,"9",,terminal_output +3386,6800775,"TERMINAL",0,0,"50",,terminal_output +3387,6801802,"TERMINAL",0,0,"1",,terminal_output +3388,6802823,"TERMINAL",0,0,"2",,terminal_output +3389,6803861,"TERMINAL",0,0,"3",,terminal_output +3390,6804840,"TERMINAL",0,0,"4",,terminal_output +3391,6805877,"TERMINAL",0,0,"5",,terminal_output +3392,6807024,"TERMINAL",0,0,"6",,terminal_output +3393,6808049,"TERMINAL",0,0,"8",,terminal_output +3394,6809073,"TERMINAL",0,0,"9",,terminal_output +3395,6810097,"TERMINAL",0,0,"7:00",,terminal_output +3396,6811058,"TERMINAL",0,0,"1",,terminal_output +3397,6812145,"TERMINAL",0,0,"2",,terminal_output +3398,6813167,"TERMINAL",0,0,"3",,terminal_output +3399,6814165,"TERMINAL",0,0,"4",,terminal_output +3400,6815233,"TERMINAL",0,0,"5",,terminal_output +3401,6816242,"TERMINAL",0,0,"6",,terminal_output +3402,6817276,"TERMINAL",0,0,"7",,terminal_output +3403,6818893,"TERMINAL",0,0,"8 92",,terminal_output +3404,6819938,"TERMINAL",0,0,"9",,terminal_output +3405,6821053,"TERMINAL",0,0,"11",,terminal_output +3406,6822075,"TERMINAL",0,0,"2",,terminal_output +3407,6823101,"TERMINAL",0,0,"3",,terminal_output +3408,6824123,"TERMINAL",0,0,"4",,terminal_output +3409,6825157,"TERMINAL",0,0,"5",,terminal_output +3410,6826156,"TERMINAL",0,0,"6",,terminal_output +3411,6827302,"TERMINAL",0,0,"7",,terminal_output +3412,6828326,"TERMINAL",0,0,"8",,terminal_output +3413,6829269,"TERMINAL",0,0,"9",,terminal_output +3414,6830305,"TERMINAL",0,0,"20",,terminal_output +3415,6831345,"TERMINAL",0,0,"1",,terminal_output +3416,6832383,"TERMINAL",0,0,"2",,terminal_output +3417,6833443,"TERMINAL",0,0,"3",,terminal_output +3418,6834471,"TERMINAL",0,0,"4",,terminal_output +3419,6835499,"TERMINAL",0,0,"5",,terminal_output +3420,6836538,"TERMINAL",0,0,"6",,terminal_output +3421,6837606,"TERMINAL",0,0,"7",,terminal_output +3422,6838666,"TERMINAL",0,0,"8",,terminal_output +3423,6839646,"TERMINAL",0,0,"9",,terminal_output +3424,6840712,"TERMINAL",0,0,"30",,terminal_output +3425,6841716,"TERMINAL",0,0,"1",,terminal_output +3426,6842761,"TERMINAL",0,0,"2",,terminal_output +3427,6843888,"TERMINAL",0,0,"3",,terminal_output +3428,6844913,"TERMINAL",0,0,"4",,terminal_output +3429,6845865,"TERMINAL",0,0,"5",,terminal_output +3430,6846962,"TERMINAL",0,0,"6",,terminal_output +3431,6847998,"TERMINAL",0,0,"8",,terminal_output +3432,6849007,"TERMINAL",0,0,"9",,terminal_output +3433,6850030,"TERMINAL",0,0,"40",,terminal_output +3434,6851046,"TERMINAL",0,0,"1",,terminal_output +3435,6852087,"TERMINAL",0,0,"2",,terminal_output +3436,6853206,"TERMINAL",0,0,"3",,terminal_output +3437,6854237,"TERMINAL",0,0,"4",,terminal_output +3438,6855254,"TERMINAL",0,0,"5",,terminal_output +3439,6856289,"TERMINAL",0,0,"61",,terminal_output +3440,6857282,"TERMINAL",0,0,"711",,terminal_output +3441,6858327,"TERMINAL",0,0,"8",,terminal_output +3442,6859389,"TERMINAL",0,0,"9",,terminal_output +3443,6860432,"TERMINAL",0,0,"50",,terminal_output +3444,6861435,"TERMINAL",0,0,"1",,terminal_output +3445,6862526,"TERMINAL",0,0,"2",,terminal_output +3446,6863555,"TERMINAL",0,0,"3",,terminal_output +3447,6864673,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",0,0,"",shellscript,tab +3448,6864674,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1182,0,"",shellscript,selection_mouse +3449,6864812,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1181,0,"",shellscript,selection_command +3450,6864882,"TERMINAL",0,0,"4",,terminal_output +3451,6864998,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1182,0,"",shellscript,selection_mouse +3452,6865011,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1181,0,"",shellscript,selection_command +3453,6865591,"TERMINAL",0,0,"5",,terminal_output +3454,6866626,"TERMINAL",0,0,"6",,terminal_output +3455,6867746,"TERMINAL",0,0,"7",,terminal_output +3456,6868771,"TERMINAL",0,0,"8",,terminal_output +3457,6869795,"TERMINAL",0,0,"9",,terminal_output +3458,6870822,"TERMINAL",0,0,"8:00",,terminal_output +3459,6871843,"TERMINAL",0,0,"1",,terminal_output +3460,6872864,"TERMINAL",0,0,"2",,terminal_output +3461,6873892,"TERMINAL",0,0,"3",,terminal_output +3462,6874980,"TERMINAL",0,0,"4",,terminal_output +3463,6875967,"TERMINAL",0,0,"6",,terminal_output +3464,6877065,"TERMINAL",0,0,"7",,terminal_output +3465,6878087,"TERMINAL",0,0,"8",,terminal_output +3466,6879116,"TERMINAL",0,0,"94",,terminal_output +3467,6880148,"TERMINAL",0,0,"10",,terminal_output +3468,6881186,"TERMINAL",0,0,"1",,terminal_output +3469,6882296,"TERMINAL",0,0,"2",,terminal_output +3470,6883311,"TERMINAL",0,0,"3",,terminal_output +3471,6884276,"TERMINAL",0,0,"4",,terminal_output +3472,6885315,"TERMINAL",0,0,"5",,terminal_output +3473,6886352,"TERMINAL",0,0,"6",,terminal_output +3474,6887405,"TERMINAL",0,0,"7",,terminal_output +3475,6888427,"TERMINAL",0,0,"8",,terminal_output +3476,6889465,"TERMINAL",0,0,"9",,terminal_output +3477,6890511,"TERMINAL",0,0,"20",,terminal_output +3478,6891606,"TERMINAL",0,0,"1",,terminal_output +3479,6892581,"TERMINAL",0,0,"2",,terminal_output +3480,6893677,"TERMINAL",0,0,"3",,terminal_output +3481,6894678,"TERMINAL",0,0,"4",,terminal_output +3482,6895685,"TERMINAL",0,0,"5",,terminal_output +3483,6896724,"TERMINAL",0,0,"6",,terminal_output +3484,6897853,"TERMINAL",0,0,"7",,terminal_output +3485,6898876,"TERMINAL",0,0,"8",,terminal_output +3486,6899903,"TERMINAL",0,0,"9",,terminal_output +3487,6900923,"TERMINAL",0,0,"30",,terminal_output +3488,6901951,"TERMINAL",0,0,"1",,terminal_output +3489,6902972,"TERMINAL",0,0,"3",,terminal_output +3490,6903972,"TERMINAL",0,0,"4",,terminal_output +3491,6905022,"TERMINAL",0,0,"5",,terminal_output +3492,6906160,"TERMINAL",0,0,"6",,terminal_output +3493,6907224,"TERMINAL",0,0,"7",,terminal_output +3494,6908299,"TERMINAL",0,0,"8",,terminal_output +3495,6909471,"TERMINAL",0,0,"9",,terminal_output +3496,6910374,"TERMINAL",0,0,"40",,terminal_output +3497,6911271,"TERMINAL",0,0,"1",,terminal_output +3498,6912407,"TERMINAL",0,0,"2",,terminal_output +3499,6913364,"TERMINAL",0,0,"3",,terminal_output +3500,6914403,"TERMINAL",0,0,"4",,terminal_output +3501,6915484,"TERMINAL",0,0,"5",,terminal_output +3502,6916527,"TERMINAL",0,0,"62",,terminal_output +3503,6917494,"TERMINAL",0,0,"7",,terminal_output +3504,6918543,"TERMINAL",0,0,"8",,terminal_output +3505,6919543,"TERMINAL",0,0,"9",,terminal_output +3506,6920578,"TERMINAL",0,0,"50",,terminal_output +3507,6921681,"TERMINAL",0,0,"1",,terminal_output +3508,6922789,"TERMINAL",0,0,"2",,terminal_output +3509,6923711,"TERMINAL",0,0,"3",,terminal_output +3510,6924784,"TERMINAL",0,0,"4",,terminal_output +3511,6925854,"TERMINAL",0,0,"53",,terminal_output +3512,6926832,"TERMINAL",0,0,"6",,terminal_output +3513,6927961,"TERMINAL",0,0,"7",,terminal_output +3514,6928879,"TERMINAL",0,0,"8",,terminal_output +3515,6930010,"TERMINAL",0,0,"9",,terminal_output +3516,6930963,"TERMINAL",0,0,"9:01",,terminal_output +3517,6932003,"TERMINAL",0,0,"2",,terminal_output +3518,6933080,"TERMINAL",0,0,"3",,terminal_output +3519,6934105,"TERMINAL",0,0,"4",,terminal_output +3520,6935129,"TERMINAL",0,0,"5",,terminal_output +3521,6936471,"TERMINAL",0,0,"6",,terminal_output +3522,6937299,"TERMINAL",0,0,"7",,terminal_output +3523,6938300,"TERMINAL",0,0,"8 9",,terminal_output +3524,6939260,"TERMINAL",0,0,"9",,terminal_output +3525,6940287,"TERMINAL",0,0,"10",,terminal_output +3526,6941333,"TERMINAL",0,0,"1",,terminal_output +3527,6942361,"TERMINAL",0,0,"2",,terminal_output +3528,6943401,"TERMINAL",0,0,"3",,terminal_output +3529,6944433,"TERMINAL",0,0,"4",,terminal_output +3530,6945474,"TERMINAL",0,0,"5",,terminal_output +3531,6946604,"TERMINAL",0,0,"6",,terminal_output +3532,6947673,"TERMINAL",0,0,"7",,terminal_output +3533,6948576,"TERMINAL",0,0,"8",,terminal_output +3534,6949727,"TERMINAL",0,0,"9",,terminal_output +3535,6950690,"TERMINAL",0,0,"2038",,terminal_output +3536,6952026,"TERMINAL",0,0,"1",,terminal_output +3537,6952739,"TERMINAL",0,0,"2",,terminal_output +3538,6953806,"TERMINAL",0,0,"3",,terminal_output +3539,6954855,"TERMINAL",0,0,"4",,terminal_output +3540,6955853,"TERMINAL",0,0,"5",,terminal_output +3541,6956869,"TERMINAL",0,0,"60",,terminal_output +3542,6957963,"TERMINAL",0,0,"7",,terminal_output +3543,6958942,"TERMINAL",0,0,"9",,terminal_output +3544,6959976,"TERMINAL",0,0,"30",,terminal_output +3545,6961108,"TERMINAL",0,0,"1",,terminal_output +3546,6962075,"TERMINAL",0,0,"2",,terminal_output +3547,6963123,"TERMINAL",0,0,"3",,terminal_output +3548,6964206,"TERMINAL",0,0,"4",,terminal_output +3549,6965232,"TERMINAL",0,0,"5",,terminal_output +3550,6966218,"TERMINAL",0,0,"6",,terminal_output +3551,6967302,"TERMINAL",0,0,"7",,terminal_output +3552,6968305,"TERMINAL",0,0,"8",,terminal_output +3553,6969330,"TERMINAL",0,0,"9",,terminal_output +3554,6970366,"TERMINAL",0,0,"40",,terminal_output +3555,6971406,"TERMINAL",0,0,"1",,terminal_output +3556,6972448,"TERMINAL",0,0,"2",,terminal_output +3557,6973528,"TERMINAL",0,0,"3",,terminal_output +3558,6974527,"TERMINAL",0,0,"4",,terminal_output +3559,6975568,"TERMINAL",0,0,"511",,terminal_output +3560,6976610,"TERMINAL",0,0,"6",,terminal_output +3561,6977724,"TERMINAL",0,0,"7",,terminal_output +3562,6978749,"TERMINAL",0,0,"8",,terminal_output +3563,6979717,"TERMINAL",0,0,"9",,terminal_output +3564,6980843,"TERMINAL",0,0,"50",,terminal_output +3565,6981822,"TERMINAL",0,0,"1",,terminal_output +3566,6982851,"TERMINAL",0,0,"2",,terminal_output +3567,6983969,"TERMINAL",0,0,"3",,terminal_output +3568,6984995,"TERMINAL",0,0,"4",,terminal_output +3569,6985941,"TERMINAL",0,0,"6",,terminal_output +3570,6987047,"TERMINAL",0,0,"7",,terminal_output +3571,6988148,"TERMINAL",0,0,"8",,terminal_output +3572,6989090,"TERMINAL",0,0,"9",,terminal_output +3573,6990122,"TERMINAL",0,0,"40:00",,terminal_output +3574,6991139,"TERMINAL",0,0,"1",,terminal_output +3575,6992169,"TERMINAL",0,0,"2",,terminal_output +3576,6993192,"TERMINAL",0,0,"3",,terminal_output +3577,6994323,"TERMINAL",0,0,"4",,terminal_output +3578,6995340,"TERMINAL",0,0,"5",,terminal_output +3579,6996308,"TERMINAL",0,0,"6",,terminal_output +3580,6997345,"TERMINAL",0,0,"7",,terminal_output +3581,6998381,"TERMINAL",0,0,"8",,terminal_output +3582,6999416,"TERMINAL",0,0,"90",,terminal_output +3583,7000451,"TERMINAL",0,0,"10",,terminal_output +3584,7001584,"TERMINAL",0,0,"1",,terminal_output +3585,7002528,"TERMINAL",0,0,"2",,terminal_output +3586,7003634,"TERMINAL",0,0,"3",,terminal_output +3587,7004603,"TERMINAL",0,0,"4",,terminal_output +3588,7005633,"TERMINAL",0,0,"5",,terminal_output +3589,7006673,"TERMINAL",0,0,"6",,terminal_output +3590,7007728,"TERMINAL",0,0,"7",,terminal_output +3591,7008754,"TERMINAL",0,0,"8",,terminal_output +3592,7009783,"TERMINAL",0,0,"9",,terminal_output +3593,7010847,"TERMINAL",0,0,"20",,terminal_output +3594,7011928,"TERMINAL",0,0,"1",,terminal_output +3595,7012927,"TERMINAL",0,0,"2",,terminal_output +3596,7013933,"TERMINAL",0,0,"3",,terminal_output +3597,7015026,"TERMINAL",0,0,"5",,terminal_output +3598,7016023,"TERMINAL",0,0,"6",,terminal_output +3599,7017052,"TERMINAL",0,0,"7",,terminal_output +3600,7018172,"TERMINAL",0,0,"8",,terminal_output +3601,7019125,"TERMINAL",0,0,"9",,terminal_output +3602,7020241,"TERMINAL",0,0,"30",,terminal_output +3603,7021245,"TERMINAL",0,0,"1",,terminal_output +3604,7022242,"TERMINAL",0,0,"2",,terminal_output +3605,7023313,"TERMINAL",0,0,"3",,terminal_output +3606,7024323,"TERMINAL",0,0,"4",,terminal_output +3607,7025358,"TERMINAL",0,0,"5",,terminal_output +3608,7026396,"TERMINAL",0,0,"6",,terminal_output +3609,7027432,"TERMINAL",0,0,"7",,terminal_output +3610,7028472,"TERMINAL",0,0,"8",,terminal_output +3611,7029541,"TERMINAL",0,0,"9",,terminal_output +3612,7030536,"TERMINAL",0,0,"40",,terminal_output +3613,7031584,"TERMINAL",0,0,"1",,terminal_output +3614,7032616,"TERMINAL",0,0,"2",,terminal_output +3615,7033652,"TERMINAL",0,0,"3",,terminal_output +3616,7034787,"TERMINAL",0,0,"4",,terminal_output +3617,7035787,"TERMINAL",0,0,"5",,terminal_output +3618,7036810,"TERMINAL",0,0,"6",,terminal_output +3619,7037833,"TERMINAL",0,0,"7",,terminal_output +3620,7038858,"TERMINAL",0,0,"8",,terminal_output +3621,7039881,"TERMINAL",0,0,"9",,terminal_output +3622,7041008,"TERMINAL",0,0,"50",,terminal_output +3623,7042032,"TERMINAL",0,0,"2",,terminal_output +3624,7043057,"TERMINAL",0,0,"3",,terminal_output +3625,7044025,"TERMINAL",0,0,"4",,terminal_output +3626,7045069,"TERMINAL",0,0,"5",,terminal_output +3627,7046132,"TERMINAL",0,0,"6",,terminal_output +3628,7047264,"TERMINAL",0,0,"7",,terminal_output +3629,7048236,"TERMINAL",0,0,"8",,terminal_output +3630,7049316,"TERMINAL",0,0,"9",,terminal_output +3631,7050238,"TERMINAL",0,0,"1:00",,terminal_output +3632,7051297,"TERMINAL",0,0,"1",,terminal_output +3633,7052304,"TERMINAL",0,0,"2",,terminal_output +3634,7053405,"TERMINAL",0,0,"3",,terminal_output +3635,7054377,"TERMINAL",0,0,"4",,terminal_output +3636,7055408,"TERMINAL",0,0,"5",,terminal_output +3637,7056472,"TERMINAL",0,0,"6",,terminal_output +3638,7057485,"TERMINAL",0,0,"7",,terminal_output +3639,7058534,"TERMINAL",0,0,"8",,terminal_output +3640,7059650,"TERMINAL",0,0,"9",,terminal_output +3641,7060672,"TERMINAL",0,0,"10",,terminal_output +3642,7061693,"TERMINAL",0,0,"1",,terminal_output +3643,7062781,"TERMINAL",0,0,"2",,terminal_output +3644,7063735,"TERMINAL",0,0,"3",,terminal_output +3645,7064953,"TERMINAL",0,0,"4",,terminal_output +3646,7065830,"TERMINAL",0,0,"5",,terminal_output +3647,7066917,"TERMINAL",0,0,"6",,terminal_output +3648,7067939,"TERMINAL",0,0,"7",,terminal_output +3649,7068963,"TERMINAL",0,0,"8",,terminal_output +3650,7069991,"TERMINAL",0,0,"9",,terminal_output +3651,7071068,"TERMINAL",0,0,"212",,terminal_output +3652,7072038,"TERMINAL",0,0,"2",,terminal_output +3653,7073060,"TERMINAL",0,0,"3",,terminal_output +3654,7074072,"TERMINAL",0,0,"4",,terminal_output +3655,7075188,"TERMINAL",0,0,"5",,terminal_output +3656,7076133,"TERMINAL",0,0,"6",,terminal_output +3657,7077261,"TERMINAL",0,0,"7",,terminal_output +3658,7078286,"TERMINAL",0,0,"8",,terminal_output +3659,7079307,"TERMINAL",0,0,"9",,terminal_output +3660,7080293,"TERMINAL",0,0,"30",,terminal_output +3661,7081314,"TERMINAL",0,0,"1",,terminal_output +3662,7082379,"TERMINAL",0,0,"2",,terminal_output +3663,7083406,"TERMINAL",0,0,"3",,terminal_output +3664,7084428,"TERMINAL",0,0,"4",,terminal_output +3665,7085469,"TERMINAL",0,0,"5",,terminal_output +3666,7086508,"TERMINAL",0,0,"6",,terminal_output +3667,7087559,"TERMINAL",0,0,"7",,terminal_output +3668,7088626,"TERMINAL",0,0,"8",,terminal_output +3669,7089649,"TERMINAL",0,0,"9",,terminal_output +3670,7090676,"TERMINAL",0,0,"40",,terminal_output +3671,7091801,"TERMINAL",0,0,"1",,terminal_output +3672,7092825,"TERMINAL",0,0,"2",,terminal_output +3673,7093847,"TERMINAL",0,0,"3",,terminal_output +3674,7094920,"TERMINAL",0,0,"4",,terminal_output +3675,7095842,"TERMINAL",0,0,"5",,terminal_output +3676,7096921,"TERMINAL",0,0,"6",,terminal_output +3677,7097946,"TERMINAL",0,0,"7",,terminal_output +3678,7098968,"TERMINAL",0,0,"9",,terminal_output +3679,7099995,"TERMINAL",0,0,"50",,terminal_output +3680,7101118,"TERMINAL",0,0,"1",,terminal_output +3681,7102100,"TERMINAL",0,0,"2",,terminal_output +3682,7103165,"TERMINAL",0,0,"3",,terminal_output +3683,7104189,"TERMINAL",0,0,"4",,terminal_output +3684,7105177,"TERMINAL",0,0,"5",,terminal_output +3685,7106238,"TERMINAL",0,0,"6",,terminal_output +3686,7107248,"TERMINAL",0,0,"7",,terminal_output +3687,7108313,"TERMINAL",0,0,"8",,terminal_output +3688,7109433,"TERMINAL",0,0,"9",,terminal_output +3689,7110366,"TERMINAL",0,0,"2:00",,terminal_output +3690,7111392,"TERMINAL",0,0,"1",,terminal_output +3691,7112429,"TERMINAL",0,0,"2",,terminal_output +3692,7113464,"TERMINAL",0,0,"3",,terminal_output +3693,7114502,"TERMINAL",0,0,"4",,terminal_output +3694,7115535,"TERMINAL",0,0,"5",,terminal_output +3695,7116584,"TERMINAL",0,0,"6",,terminal_output +3696,7117710,"TERMINAL",0,0,"7",,terminal_output +3697,7118733,"TERMINAL",0,0,"8",,terminal_output +3698,7119755,"TERMINAL",0,0,"9",,terminal_output +3699,7120779,"TERMINAL",0,0,"10",,terminal_output +3700,7121860,"TERMINAL",0,0,"1",,terminal_output +3701,7122828,"TERMINAL",0,0,"2",,terminal_output +3702,7123854,"TERMINAL",0,0,"3",,terminal_output +3703,7125080,"TERMINAL",0,0,"4",,terminal_output +3704,7126299,"TERMINAL",0,0,"6",,terminal_output +3705,7127129,"TERMINAL",0,0,"7",,terminal_output +3706,7128138,"TERMINAL",0,0,"8",,terminal_output +3707,7129200,"TERMINAL",0,0,"9",,terminal_output +3708,7130304,"TERMINAL",0,0,"20",,terminal_output +3709,7131250,"TERMINAL",0,0,"1",,terminal_output +3710,7132356,"TERMINAL",0,0,"23",,terminal_output +3711,7133376,"TERMINAL",0,0,"3",,terminal_output +3712,7134399,"TERMINAL",0,0,"4",,terminal_output +3713,7135392,"TERMINAL",0,0,"5",,terminal_output +3714,7136448,"TERMINAL",0,0,"6",,terminal_output +3715,7137465,"TERMINAL",0,0,"7",,terminal_output +3716,7138502,"TERMINAL",0,0,"8",,terminal_output +3717,7139622,"TERMINAL",0,0,"9",,terminal_output +3718,7140577,"TERMINAL",0,0,"30",,terminal_output +3719,7141672,"TERMINAL",0,0,"1",,terminal_output +3720,7142640,"TERMINAL",0,0,"2",,terminal_output +3721,7143716,"TERMINAL",0,0,"3",,terminal_output +3722,7144741,"TERMINAL",0,0,"4",,terminal_output +3723,7145775,"TERMINAL",0,0,"5",,terminal_output +3724,7146793,"TERMINAL",0,0,"6",,terminal_output +3725,7147828,"TERMINAL",0,0,"7",,terminal_output +3726,7148940,"TERMINAL",0,0,"8",,terminal_output +3727,7149964,"TERMINAL",0,0,"9",,terminal_output +3728,7150989,"TERMINAL",0,0,"41",,terminal_output +3729,7152012,"TERMINAL",0,0,"2",,terminal_output +3730,7153035,"TERMINAL",0,0,"3",,terminal_output +3731,7154059,"TERMINAL",0,0,"4",,terminal_output +3732,7155089,"TERMINAL",0,0,"5",,terminal_output +3733,7156121,"TERMINAL",0,0,"6",,terminal_output +3734,7157268,"TERMINAL",0,0,"7",,terminal_output +3735,7158193,"TERMINAL",0,0,"8",,terminal_output +3736,7159229,"TERMINAL",0,0,"9",,terminal_output +3737,7160316,"TERMINAL",0,0,"50",,terminal_output +3738,7161306,"TERMINAL",0,0,"1",,terminal_output +3739,7162356,"TERMINAL",0,0,"2",,terminal_output +3740,7163380,"TERMINAL",0,0,"3",,terminal_output +3741,7164413,"TERMINAL",0,0,"4",,terminal_output +3742,7165462,"TERMINAL",0,0,"5",,terminal_output +3743,7166554,"TERMINAL",0,0,"6",,terminal_output +3744,7167629,"TERMINAL",0,0,"742",,terminal_output +3745,7168591,"TERMINAL",0,0,"8",,terminal_output +3746,7169731,"TERMINAL",0,0,"9",,terminal_output +3747,7170753,"TERMINAL",0,0,"3:00",,terminal_output +3748,7171777,"TERMINAL",0,0,"1",,terminal_output +3749,7172899,"TERMINAL",0,0,"2",,terminal_output +3750,7173821,"TERMINAL",0,0,"3",,terminal_output +3751,7174848,"TERMINAL",0,0,"4",,terminal_output +3752,7175854,"TERMINAL",0,0,"5",,terminal_output +3753,7176870,"TERMINAL",0,0,"6",,terminal_output +3754,7178020,"TERMINAL",0,0,"7",,terminal_output +3755,7178957,"TERMINAL",0,0,"9",,terminal_output +3756,7180034,"TERMINAL",0,0,"10",,terminal_output +3757,7181096,"TERMINAL",0,0,"1",,terminal_output +3758,7182118,"TERMINAL",0,0,"2",,terminal_output +3759,7183143,"TERMINAL",0,0,"3",,terminal_output +3760,7184131,"TERMINAL",0,0,"4",,terminal_output +3761,7185191,"TERMINAL",0,0,"5",,terminal_output +3762,7186205,"TERMINAL",0,0,"6",,terminal_output +3763,7187342,"TERMINAL",0,0,"7",,terminal_output +3764,7188332,"TERMINAL",0,0,"8",,terminal_output +3765,7189389,"TERMINAL",0,0,"9",,terminal_output +3766,7190352,"TERMINAL",0,0,"20",,terminal_output +3767,7191389,"TERMINAL",0,0,"1",,terminal_output +3768,7192426,"TERMINAL",0,0,"2",,terminal_output +3769,7193533,"TERMINAL",0,0,"3",,terminal_output +3770,7194552,"TERMINAL",0,0,"4",,terminal_output +3771,7195535,"TERMINAL",0,0,"5",,terminal_output +3772,7196575,"TERMINAL",0,0,"6",,terminal_output +3773,7197681,"TERMINAL",0,0,"7",,terminal_output +3774,7198640,"TERMINAL",0,0,"8",,terminal_output +3775,7199731,"TERMINAL",0,0,"9",,terminal_output +3776,7200754,"TERMINAL",0,0,"30",,terminal_output +3777,7201776,"TERMINAL",0,0,"1",,terminal_output +3778,7202801,"TERMINAL",0,0,"2",,terminal_output +3779,7203827,"TERMINAL",0,0,"3",,terminal_output +3780,7204963,"TERMINAL",0,0,"4",,terminal_output +3781,7205908,"TERMINAL",0,0,"5",,terminal_output +3782,7207021,"TERMINAL",0,0,"6",,terminal_output +3783,7208031,"TERMINAL",0,0,"8",,terminal_output +3784,7209206,"TERMINAL",0,0,"9",,terminal_output +3785,7210132,"TERMINAL",0,0,"40",,terminal_output +3786,7211098,"TERMINAL",0,0,"1",,terminal_output +3787,7212122,"TERMINAL",0,0,"2",,terminal_output +3788,7213250,"TERMINAL",0,0,"3",,terminal_output +3789,7214279,"TERMINAL",0,0,"4",,terminal_output +3790,7215300,"TERMINAL",0,0,"5",,terminal_output +3791,7216333,"TERMINAL",0,0,"6",,terminal_output +3792,7217345,"TERMINAL",0,0,"7",,terminal_output +3793,7218368,"TERMINAL",0,0,"8",,terminal_output +3794,7219394,"TERMINAL",0,0,"9",,terminal_output +3795,7220394,"TERMINAL",0,0,"50",,terminal_output +3796,7221429,"TERMINAL",0,0,"1",,terminal_output +3797,7222565,"TERMINAL",0,0,"2",,terminal_output +3798,7223573,"TERMINAL",0,0,"3",,terminal_output +3799,7224530,"TERMINAL",0,0,"4",,terminal_output +3800,7225565,"TERMINAL",0,0,"5",,terminal_output +3801,7226640,"TERMINAL",0,0,"6",,terminal_output +3802,7227685,"TERMINAL",0,0,"7",,terminal_output +3803,7228711,"TERMINAL",0,0,"8",,terminal_output +3804,7229735,"TERMINAL",0,0,"9",,terminal_output +3805,7230735,"TERMINAL",0,0,"4:00",,terminal_output +3806,7231782,"TERMINAL",0,0,"1",,terminal_output +3807,7232999,"TERMINAL",0,0,"2",,terminal_output +3808,7233960,"TERMINAL",0,0,"3",,terminal_output +3809,7234988,"TERMINAL",0,0,"4",,terminal_output +3810,7235979,"TERMINAL",0,0,"5",,terminal_output +3811,7237069,"TERMINAL",0,0,"7",,terminal_output +3812,7238028,"TERMINAL",0,0,"8",,terminal_output +3813,7239229,"TERMINAL",0,0,"9",,terminal_output +3814,7240132,"TERMINAL",0,0,"10",,terminal_output +3815,7241238,"TERMINAL",0,0,"1",,terminal_output +3816,7242202,"TERMINAL",0,0,"2",,terminal_output +3817,7243250,"TERMINAL",0,0,"3",,terminal_output +3818,7244205,"TERMINAL",0,0,"4",,terminal_output +3819,7245300,"TERMINAL",0,0,"5",,terminal_output +3820,7246337,"TERMINAL",0,0,"6",,terminal_output +3821,7247347,"TERMINAL",0,0,"7",,terminal_output +3822,7248373,"TERMINAL",0,0,"8",,terminal_output +3823,7249405,"TERMINAL",0,0,"9",,terminal_output +3824,7250456,"TERMINAL",0,0,"20",,terminal_output +3825,7251489,"TERMINAL",0,0,"1",,terminal_output +3826,7252569,"TERMINAL",0,0,"2",,terminal_output +3827,7253611,"TERMINAL",0,0,"3",,terminal_output +3828,7254619,"TERMINAL",0,0,"4",,terminal_output +3829,7255646,"TERMINAL",0,0,"5",,terminal_output +3830,7256675,"TERMINAL",0,0,"6",,terminal_output +3831,7257794,"TERMINAL",0,0,"7",,terminal_output +3832,7258816,"TERMINAL",0,0,"8",,terminal_output +3833,7259803,"TERMINAL",0,0,"9",,terminal_output +3834,7260839,"TERMINAL",0,0,"30",,terminal_output +3835,7261896,"TERMINAL",0,0,"1",,terminal_output +3836,7263015,"TERMINAL",0,0,"2",,terminal_output +3837,7264039,"TERMINAL",0,0,"4",,terminal_output +3838,7264997,"TERMINAL",0,0,"5",,terminal_output +3839,7266085,"TERMINAL",0,0,"6",,terminal_output +3840,7267110,"TERMINAL",0,0,"7",,terminal_output +3841,7268135,"TERMINAL",0,0,"8",,terminal_output +3842,7269167,"TERMINAL",0,0,"9",,terminal_output +3843,7270289,"TERMINAL",0,0,"40",,terminal_output +3844,7271315,"TERMINAL",0,0,"1",,terminal_output +3845,7272348,"TERMINAL",0,0,"2",,terminal_output +3846,7273356,"TERMINAL",0,0,"3",,terminal_output +3847,7274383,"TERMINAL",0,0,"4",,terminal_output +3848,7275405,"TERMINAL",0,0,"5",,terminal_output +3849,7276469,"TERMINAL",0,0,"6",,terminal_output +3850,7277485,"TERMINAL",0,0,"7",,terminal_output +3851,7278514,"TERMINAL",0,0,"8",,terminal_output +3852,7279554,"TERMINAL",0,0,"9",,terminal_output +3853,7280627,"TERMINAL",0,0,"50",,terminal_output +3854,7281637,"TERMINAL",0,0,"1",,terminal_output +3855,7282665,"TERMINAL",0,0,"2",,terminal_output +3856,7283810,"TERMINAL",0,0,"3",,terminal_output +3857,7284825,"TERMINAL",0,0,"4",,terminal_output +3858,7285849,"TERMINAL",0,0,"5",,terminal_output +3859,7286898,"TERMINAL",0,0,"6",,terminal_output +3860,7287899,"TERMINAL",0,0,"7",,terminal_output +3861,7288899,"TERMINAL",0,0,"8",,terminal_output +3862,7289945,"TERMINAL",0,0,"5:00",,terminal_output +3863,7291073,"TERMINAL",0,0,"1",,terminal_output +3864,7292101,"TERMINAL",0,0,"2",,terminal_output +3865,7293121,"TERMINAL",0,0,"3",,terminal_output +3866,7294147,"TERMINAL",0,0,"4",,terminal_output +3867,7295130,"TERMINAL",0,0,"5",,terminal_output +3868,7296194,"TERMINAL",0,0,"6",,terminal_output +3869,7297217,"TERMINAL",0,0,"7",,terminal_output +3870,7298353,"TERMINAL",0,0,"8",,terminal_output +3871,7299317,"TERMINAL",0,0,"9",,terminal_output +3872,7300356,"TERMINAL",0,0,"10",,terminal_output +3873,7301355,"TERMINAL",0,0,"1",,terminal_output +3874,7302441,"TERMINAL",0,0,"2",,terminal_output +3875,7303467,"TERMINAL",0,0,"3",,terminal_output +3876,7304489,"TERMINAL",0,0,"4",,terminal_output +3877,7305515,"TERMINAL",0,0,"5",,terminal_output +3878,7306638,"TERMINAL",0,0,"6",,terminal_output +3879,7307595,"TERMINAL",0,0,"7",,terminal_output +3880,7308687,"TERMINAL",0,0,"8",,terminal_output +3881,7309709,"TERMINAL",0,0,"9",,terminal_output +3882,7310742,"TERMINAL",0,0,"20",,terminal_output +3883,7311859,"TERMINAL",0,0,"139",,terminal_output +3884,7312887,"TERMINAL",0,0,"2",,terminal_output +3885,7313909,"TERMINAL",0,0,"3",,terminal_output +3886,7314937,"TERMINAL",0,0,"4",,terminal_output +3887,7315959,"TERMINAL",0,0,"5",,terminal_output +3888,7316953,"TERMINAL",0,0,"7",,terminal_output +3889,7318011,"TERMINAL",0,0,"8",,terminal_output +3890,7319129,"TERMINAL",0,0,"9",,terminal_output +3891,7320154,"TERMINAL",0,0,"30",,terminal_output +3892,7321179,"TERMINAL",0,0,"1",,terminal_output +3893,7322203,"TERMINAL",0,0,"2",,terminal_output +3894,7323253,"TERMINAL",0,0,"3",,terminal_output +3895,7324228,"TERMINAL",0,0,"4",,terminal_output +3896,7325277,"TERMINAL",0,0,"5",,terminal_output +3897,7326303,"TERMINAL",0,0,"6",,terminal_output +3898,7327430,"TERMINAL",0,0,"7",,terminal_output +3899,7328448,"TERMINAL",0,0,"8",,terminal_output +3900,7329475,"TERMINAL",0,0,"9",,terminal_output +3901,7330457,"TERMINAL",0,0,"40",,terminal_output +3902,7331472,"TERMINAL",0,0,"1",,terminal_output +3903,7332500,"TERMINAL",0,0,"2",,terminal_output +3904,7333552,"TERMINAL",0,0,"3",,terminal_output +3905,7334628,"TERMINAL",0,0,"4",,terminal_output +3906,7335606,"TERMINAL",0,0,"5",,terminal_output +3907,7336743,"TERMINAL",0,0,"6",,terminal_output +3908,7337766,"TERMINAL",0,0,"7",,terminal_output +3909,7338790,"TERMINAL",0,0,"8",,terminal_output +3910,7339831,"TERMINAL",0,0,"9",,terminal_output +3911,7340979,"TERMINAL",0,0,"50",,terminal_output +3912,7341887,"TERMINAL",0,0,"1",,terminal_output +3913,7342885,"TERMINAL",0,0,"2",,terminal_output +3914,7343910,"TERMINAL",0,0,"3",,terminal_output +3915,7344934,"TERMINAL",0,0,"4",,terminal_output +3916,7346063,"TERMINAL",0,0,"6",,terminal_output +3917,7347101,"TERMINAL",0,0,"7",,terminal_output +3918,7348111,"TERMINAL",0,0,"8",,terminal_output +3919,7349135,"TERMINAL",0,0,"9",,terminal_output +3920,7350383,"TERMINAL",0,0,"6:00",,terminal_output +3921,7351181,"TERMINAL",0,0,"1",,terminal_output +3922,7352421,"TERMINAL",0,0,"2",,terminal_output +3923,7353230,"TERMINAL",0,0,"3",,terminal_output +3924,7354355,"TERMINAL",0,0,"4",,terminal_output +3925,7355286,"TERMINAL",0,0,"5",,terminal_output +3926,7356328,"TERMINAL",0,0,"6",,terminal_output +3927,7357440,"TERMINAL",0,0,"7",,terminal_output +3928,7358455,"TERMINAL",0,0,"8",,terminal_output +3929,7359476,"TERMINAL",0,0,"9",,terminal_output +3930,7360472,"TERMINAL",0,0,"10",,terminal_output +3931,7361517,"TERMINAL",0,0,"1",,terminal_output +3932,7362559,"TERMINAL",0,0,"2",,terminal_output +3933,7363594,"TERMINAL",0,0,"3",,terminal_output +3934,7364732,"TERMINAL",0,0,"4",,terminal_output +3935,7365672,"TERMINAL",0,0,"5",,terminal_output +3936,7366749,"TERMINAL",0,0,"6",,terminal_output +3937,7367734,"TERMINAL",0,0,"7",,terminal_output +3938,7368800,"TERMINAL",0,0,"8",,terminal_output +3939,7369816,"TERMINAL",0,0,"9",,terminal_output +3940,7370863,"TERMINAL",0,0,"20",,terminal_output +3941,7371878,"TERMINAL",0,0,"1",,terminal_output +3942,7372994,"TERMINAL",0,0,"2",,terminal_output +3943,7374018,"TERMINAL",0,0,"4",,terminal_output +3944,7375039,"TERMINAL",0,0,"5",,terminal_output +3945,7376064,"TERMINAL",0,0,"6",,terminal_output +3946,7377089,"TERMINAL",0,0,"7",,terminal_output +3947,7378114,"TERMINAL",0,0,"8",,terminal_output +3948,7379238,"TERMINAL",0,0,"9",,terminal_output +3949,7380262,"TERMINAL",0,0,"30",,terminal_output +3950,7381298,"TERMINAL",0,0,"1",,terminal_output +3951,7382314,"TERMINAL",0,0,"2",,terminal_output +3952,7383357,"TERMINAL",0,0,"3",,terminal_output +3953,7384462,"TERMINAL",0,0,"4",,terminal_output +3954,7385355,"TERMINAL",0,0,"5",,terminal_output +3955,7386383,"TERMINAL",0,0,"6",,terminal_output +3956,7387432,"TERMINAL",0,0,"7",,terminal_output +3957,7388457,"TERMINAL",0,0,"8",,terminal_output +3958,7389479,"TERMINAL",0,0,"9",,terminal_output +3959,7390573,"TERMINAL",0,0,"40",,terminal_output +3960,7391552,"TERMINAL",0,0,"1",,terminal_output +3961,7392611,"TERMINAL",0,0,"2",,terminal_output +3962,7393630,"TERMINAL",0,0,"3",,terminal_output +3963,7394704,"TERMINAL",0,0,"4",,terminal_output +3964,7395725,"TERMINAL",0,0,"5",,terminal_output +3965,7396750,"TERMINAL",0,0,"6",,terminal_output +3966,7397876,"TERMINAL",0,0,"7",,terminal_output +3967,7398900,"TERMINAL",0,0,"8",,terminal_output +3968,7399844,"TERMINAL",0,0,"9",,terminal_output +3969,7400889,"TERMINAL",0,0,"50",,terminal_output +3970,7401972,"TERMINAL",0,0,"1",,terminal_output +3971,7402951,"TERMINAL",0,0,"3",,terminal_output +3972,7403984,"TERMINAL",0,0,"4",,terminal_output +3973,7405021,"TERMINAL",0,0,"5",,terminal_output +3974,7406070,"TERMINAL",0,0,"6",,terminal_output +3975,7407209,"TERMINAL",0,0,"7",,terminal_output +3976,7408138,"TERMINAL",0,0,"8",,terminal_output +3977,7409160,"TERMINAL",0,0,"9",,terminal_output +3978,7410342,"TERMINAL",0,0,"7:00",,terminal_output +3979,7411232,"TERMINAL",0,0,"1",,terminal_output +3980,7412314,"TERMINAL",0,0,"2",,terminal_output +3981,7413340,"TERMINAL",0,0,"3",,terminal_output +3982,7414337,"TERMINAL",0,0,"4",,terminal_output +3983,7415468,"TERMINAL",0,0,"5",,terminal_output +3984,7416449,"TERMINAL",0,0,"6",,terminal_output +3985,7417554,"TERMINAL",0,0,"7",,terminal_output +3986,7418546,"TERMINAL",0,0,"8",,terminal_output +3987,7419582,"TERMINAL",0,0,"9",,terminal_output +3988,7420640,"TERMINAL",0,0,"10",,terminal_output +3989,7421633,"TERMINAL",0,0,"1",,terminal_output +3990,7422643,"TERMINAL",0,0,"2",,terminal_output +3991,7423683,"TERMINAL",0,0,"3",,terminal_output +3992,7424860,"TERMINAL",0,0,"4",,terminal_output +3993,7425898,"TERMINAL",0,0,"5",,terminal_output +3994,7426865,"TERMINAL",0,0,"6",,terminal_output +3995,7427963,"TERMINAL",0,0,"7",,terminal_output +3996,7428913,"TERMINAL",0,0,"8",,terminal_output +3997,7430032,"TERMINAL",0,0,"9",,terminal_output +3998,7430926,"TERMINAL",0,0,"20",,terminal_output +3999,7432493,"TERMINAL",0,0,"22",,terminal_output +4000,7433581,"TERMINAL",0,0,"3",,terminal_output +4001,7434474,"TERMINAL",0,0,"4",,terminal_output +4002,7435511,"TERMINAL",0,0,"5",,terminal_output +4003,7436545,"TERMINAL",0,0,"6",,terminal_output +4004,7437609,"TERMINAL",0,0,"7",,terminal_output +4005,7438635,"TERMINAL",0,0,"8",,terminal_output +4006,7439662,"TERMINAL",0,0,"9",,terminal_output +4007,7440728,"TERMINAL",0,0,"30",,terminal_output +4008,7441805,"TERMINAL",0,0,"1",,terminal_output +4009,7442832,"TERMINAL",0,0,"2",,terminal_output +4010,7443821,"TERMINAL",0,0,"35",,terminal_output +4011,7444850,"TERMINAL",0,0,"4",,terminal_output +4012,7445911,"TERMINAL",0,0,"5",,terminal_output +4013,7447032,"TERMINAL",0,0,"6",,terminal_output +4014,7447969,"TERMINAL",0,0,"8",,terminal_output +4015,7449081,"TERMINAL",0,0,"9",,terminal_output +4016,7450107,"TERMINAL",0,0,"40",,terminal_output +4017,7451124,"TERMINAL",0,0,"1",,terminal_output +4018,7452151,"TERMINAL",0,0,"2",,terminal_output +4019,7453165,"TERMINAL",0,0,"3",,terminal_output +4020,7454301,"TERMINAL",0,0,"4",,terminal_output +4021,7455264,"TERMINAL",0,0,"5",,terminal_output +4022,7456354,"TERMINAL",0,0,"6",,terminal_output +4023,7457323,"TERMINAL",0,0,"7",,terminal_output +4024,7458403,"TERMINAL",0,0,"8",,terminal_output +4025,7459410,"TERMINAL",0,0,"9",,terminal_output +4026,7460440,"TERMINAL",0,0,"50",,terminal_output +4027,7461481,"TERMINAL",0,0,"1",,terminal_output +4028,7462520,"TERMINAL",0,0,"2",,terminal_output +4029,7463602,"TERMINAL",0,0,"3",,terminal_output +4030,7464644,"TERMINAL",0,0,"4",,terminal_output +4031,7465644,"TERMINAL",0,0,"5",,terminal_output +4032,7466684,"TERMINAL",0,0,"6",,terminal_output +4033,7467816,"TERMINAL",0,0,"7",,terminal_output +4034,7468840,"TERMINAL",0,0,"8",,terminal_output +4035,7469865,"TERMINAL",0,0,"9",,terminal_output +4036,7470842,"TERMINAL",0,0,"8:00",,terminal_output +4037,7471913,"TERMINAL",0,0,"1",,terminal_output +4038,7472936,"TERMINAL",0,0,"2",,terminal_output +4039,7473995,"TERMINAL",0,0,"4",,terminal_output +4040,7475002,"TERMINAL",0,0,"5",,terminal_output +4041,7476109,"TERMINAL",0,0,"6",,terminal_output +4042,7477138,"TERMINAL",0,0,"7",,terminal_output +4043,7478160,"TERMINAL",0,0,"8",,terminal_output +4044,7479183,"TERMINAL",0,0,"9",,terminal_output +4045,7480254,"TERMINAL",0,0,"10",,terminal_output +4046,7481336,"TERMINAL",0,0,"1",,terminal_output +4047,7482358,"TERMINAL",0,0,"2",,terminal_output +4048,7483382,"TERMINAL",0,0,"3",,terminal_output +4049,7484526,"TERMINAL",0,0,"4",,terminal_output +4050,7485394,"TERMINAL",0,0,"5",,terminal_output +4051,7486433,"TERMINAL",0,0,"6",,terminal_output +4052,7487493,"TERMINAL",0,0,"7",,terminal_output +4053,7488524,"TERMINAL",0,0,"8",,terminal_output +4054,7489627,"TERMINAL",0,0,"9",,terminal_output +4055,7490601,"TERMINAL",0,0,"20",,terminal_output +4056,7491677,"TERMINAL",0,0,"1",,terminal_output +4057,7492677,"TERMINAL",0,0,"2",,terminal_output +4058,7493893,"TERMINAL",0,0,"3",,terminal_output +4059,7494750,"TERMINAL",0,0,"4",,terminal_output +4060,7495815,"TERMINAL",0,0,"5",,terminal_output +4061,7496902,"TERMINAL",0,0,"6",,terminal_output +4062,7497923,"TERMINAL",0,0,"7",,terminal_output +4063,7498948,"TERMINAL",0,0,"8",,terminal_output +4064,7499970,"TERMINAL",0,0,"30",,terminal_output +4065,7500994,"TERMINAL",0,0,"1",,terminal_output +4066,7502011,"TERMINAL",0,0,"2",,terminal_output +4067,7503211,"TERMINAL",0,0,"3",,terminal_output +4068,7504091,"TERMINAL",0,0,"4",,terminal_output +4069,7505141,"TERMINAL",0,0,"5",,terminal_output +4070,7506209,"TERMINAL",0,0,"6",,terminal_output +4071,7507242,"TERMINAL",0,0,"7",,terminal_output +4072,7508263,"TERMINAL",0,0,"8",,terminal_output +4073,7509392,"TERMINAL",0,0,"9",,terminal_output +4074,7510346,"TERMINAL",0,0,"40",,terminal_output +4075,7511440,"TERMINAL",0,0,"1",,terminal_output +4076,7512400,"TERMINAL",0,0,"2",,terminal_output +4077,7513490,"TERMINAL",0,0,"3",,terminal_output +4078,7514514,"TERMINAL",0,0,"4",,terminal_output +4079,7515542,"TERMINAL",0,0,"5",,terminal_output +4080,7516554,"TERMINAL",0,0,"6",,terminal_output +4081,7517593,"TERMINAL",0,0,"7",,terminal_output +4082,7518664,"TERMINAL",0,0,"8",,terminal_output +4083,7519664,"TERMINAL",0,0,"9",,terminal_output +4084,7520713,"TERMINAL",0,0,"50",,terminal_output +4085,7521783,"TERMINAL",0,0,"1",,terminal_output +4086,7522785,"TERMINAL",0,0,"2",,terminal_output +4087,7523828,"TERMINAL",0,0,"3",,terminal_output +4088,7524855,"TERMINAL",0,0,"4",,terminal_output +4089,7525877,"TERMINAL",0,0,"5",,terminal_output +4090,7527020,"TERMINAL",0,0,"6",,terminal_output +4091,7528029,"TERMINAL",0,0,"8",,terminal_output +4092,7528985,"TERMINAL",0,0,"9",,terminal_output +4093,7530076,"TERMINAL",0,0,"9:00",,terminal_output +4094,7531101,"TERMINAL",0,0,"1",,terminal_output +4095,7532183,"TERMINAL",0,0,"2",,terminal_output +4096,7533251,"TERMINAL",0,0,"3",,terminal_output +4097,7534275,"TERMINAL",0,0,"4",,terminal_output +4098,7535384,"TERMINAL",0,0,"5",,terminal_output +4099,7536246,"TERMINAL",0,0,"6",,terminal_output +4100,7537476,"TERMINAL",0,0,"76",,terminal_output +4101,7538373,"TERMINAL",0,0,"80",,terminal_output +4102,7539396,"TERMINAL",0,0,"9",,terminal_output +4103,7540409,"TERMINAL",0,0,"10",,terminal_output +4104,7541441,"TERMINAL",0,0,"1",,terminal_output +4105,7542475,"TERMINAL",0,0,"2",,terminal_output +4106,7543516,"TERMINAL",0,0,"3",,terminal_output +4107,7544610,"TERMINAL",0,0,"4",,terminal_output +4108,7545647,"TERMINAL",0,0,"5",,terminal_output +4109,7546666,"TERMINAL",0,0,"6",,terminal_output +4110,7547774,"TERMINAL",0,0,"7",,terminal_output +4111,7548882,"TERMINAL",0,0,"8",,terminal_output +4112,7549842,"TERMINAL",0,0,"9",,terminal_output +4113,7550870,"TERMINAL",0,0,"20",,terminal_output +4114,7551889,"TERMINAL",0,0,"1",,terminal_output +4115,7552727,"TERMINAL",0,0,"q",,terminal_output +4116,7552873,"TERMINAL",0,0,"2",,terminal_output +4117,7552887,"TERMINAL",0,0,"ue",,terminal_output +4118,7553003,"TERMINAL",0,0,"u",,terminal_output +4119,7553066,"TERMINAL",0,0,"e",,terminal_output +4120,7553205,"TERMINAL",0,0,"\r\n[?2004l\r[?1049h(B[?7hEvery 1.0s: squeue --mehkn0401.localdomain: Wed Sep 24 14:49:23 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3519264 dev_accel interact tum_cte0 R50:14\t 1 hkn0401",,terminal_output +4121,7553937,"TERMINAL",0,0,"3",,terminal_output +4122,7554245,"TERMINAL",0,0,"45",,terminal_output +4123,7554958,"TERMINAL",0,0,"5",,terminal_output +4124,7555252,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn0401:~/Projects/jasmine[?2004h(venv_3_11) [tum_cte0515@hkn0401 jasmine]$ ",,terminal_output +4125,7555974,"TERMINAL",0,0,"6",,terminal_output +4126,7557035,"TERMINAL",0,0,"7",,terminal_output +4127,7558135,"TERMINAL",0,0,"8",,terminal_output +4128,7559175,"TERMINAL",0,0,"9",,terminal_output +4129,7560184,"TERMINAL",0,0,"30",,terminal_output +4130,7561184,"TERMINAL",0,0,"1",,terminal_output +4131,7562217,"TERMINAL",0,0,"2",,terminal_output +4132,7563358,"TERMINAL",0,0,"3",,terminal_output +4133,7564386,"TERMINAL",0,0,"4",,terminal_output +4134,7565327,"TERMINAL",0,0,"5",,terminal_output +4135,7566373,"TERMINAL",0,0,"6",,terminal_output +4136,7567417,"TERMINAL",0,0,"7",,terminal_output +4137,7568484,"TERMINAL",0,0,"8",,terminal_output +4138,7569501,"TERMINAL",0,0,"9",,terminal_output +4139,7570528,"TERMINAL",0,0,"40",,terminal_output +4140,7571567,"TERMINAL",0,0,"1",,terminal_output +4141,7572607,"TERMINAL",0,0,"2",,terminal_output +4142,7573702,"TERMINAL",0,0,"3",,terminal_output +4143,7574713,"TERMINAL",0,0,"4",,terminal_output +4144,7575774,"TERMINAL",0,0,"5",,terminal_output +4145,7576772,"TERMINAL",0,0,"6",,terminal_output +4146,7577803,"TERMINAL",0,0,"7",,terminal_output +4147,7578925,"TERMINAL",0,0,"8",,terminal_output +4148,7579877,"TERMINAL",0,0,"9",,terminal_output +4149,7580916,"TERMINAL",0,0,"50",,terminal_output +4150,7581993,"TERMINAL",0,0,"2",,terminal_output +4151,7583017,"TERMINAL",0,0,"3",,terminal_output +4152,7584045,"TERMINAL",0,0,"4",,terminal_output +4153,7585074,"TERMINAL",0,0,"5",,terminal_output +4154,7586093,"TERMINAL",0,0,"6",,terminal_output +4155,7587217,"TERMINAL",0,0,"7",,terminal_output +4156,7588245,"TERMINAL",0,0,"8",,terminal_output +4157,7589209,"TERMINAL",0,0,"9",,terminal_output +4158,7590303,"TERMINAL",0,0,"50:00",,terminal_output +4159,7591338,"TERMINAL",0,0,"1",,terminal_output +4160,7592325,"TERMINAL",0,0,"2",,terminal_output +4161,7593375,"TERMINAL",0,0,"3",,terminal_output +4162,7594481,"TERMINAL",0,0,"4",,terminal_output +4163,7595463,"TERMINAL",0,0,"5",,terminal_output +4164,7596480,"TERMINAL",0,0,"6",,terminal_output +4165,7597561,"TERMINAL",0,0,"7",,terminal_output +4166,7598549,"TERMINAL",0,0,"8",,terminal_output +4167,7599590,"TERMINAL",0,0,"9 8",,terminal_output +4168,7600626,"TERMINAL",0,0,"10",,terminal_output +4169,7601661,"TERMINAL",0,0,"1",,terminal_output +4170,7602705,"TERMINAL",0,0,"2",,terminal_output +4171,7603742,"TERMINAL",0,0,"3",,terminal_output +4172,7604785,"TERMINAL",0,0,"4",,terminal_output +4173,7605820,"TERMINAL",0,0,"5",,terminal_output +4174,7606876,"TERMINAL",0,0,"6",,terminal_output +4175,7607891,"TERMINAL",0,0,"7",,terminal_output +4176,7609028,"TERMINAL",0,0,"8",,terminal_output +4177,7610051,"TERMINAL",0,0,"20",,terminal_output +4178,7611075,"TERMINAL",0,0,"1",,terminal_output +4179,7612101,"TERMINAL",0,0,"2",,terminal_output +4180,7613122,"TERMINAL",0,0,"3",,terminal_output +4181,7614148,"TERMINAL",0,0,"4",,terminal_output +4182,7615161,"TERMINAL",0,0,"5",,terminal_output +4183,7616299,"TERMINAL",0,0,"6",,terminal_output +4184,7617378,"TERMINAL",0,0,"7",,terminal_output +4185,7618352,"TERMINAL",0,0,"8",,terminal_output +4186,7619304,"TERMINAL",0,0,"9",,terminal_output +4187,7620348,"TERMINAL",0,0,"30",,terminal_output +4188,7621419,"TERMINAL",0,0,"1",,terminal_output +4189,7622547,"TERMINAL",0,0,"2",,terminal_output +4190,7623668,"TERMINAL",0,0,"3",,terminal_output +4191,7624493,"TERMINAL",0,0,"4",,terminal_output +4192,7625527,"TERMINAL",0,0,"5",,terminal_output +4193,7626255,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",0,0,"",shellscript,tab +4194,7626256,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1182,0,"",shellscript,selection_mouse +4195,7626375,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1181,0,"",shellscript,selection_command +4196,7626570,"TERMINAL",0,0,"6",,terminal_output +4197,7626687,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1131,0,"",shellscript,selection_mouse +4198,7626691,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1130,0,"",shellscript,selection_command +4199,7627382,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1182,0,"",shellscript,selection_mouse +4200,7627395,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1181,0,"",shellscript,selection_command +4201,7627598,"TERMINAL",0,0,"7",,terminal_output +4202,7627933,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1131,0,"",shellscript,selection_mouse +4203,7627944,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1130,0,"",shellscript,selection_command +4204,7628528,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1084,0,"",shellscript,selection_mouse +4205,7628529,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1083,0,"",shellscript,selection_command +4206,7628645,"TERMINAL",0,0,"8",,terminal_output +4207,7629050,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1062,0,"",shellscript,selection_mouse +4208,7629063,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1061,0,"",shellscript,selection_command +4209,7629599,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1021,0,"",shellscript,selection_mouse +4210,7629610,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1020,0,"",shellscript,selection_command +4211,7629678,"TERMINAL",0,0,"9",,terminal_output +4212,7630226,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1040,0,"",shellscript,selection_mouse +4213,7630227,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1039,0,"",shellscript,selection_command +4214,7630741,"TERMINAL",0,0,"4010",,terminal_output +4215,7631865,"TERMINAL",0,0,"1",,terminal_output +4216,7632887,"TERMINAL",0,0,"2",,terminal_output +4217,7633828,"TERMINAL",0,0,"3",,terminal_output +4218,7634865,"TERMINAL",0,0,"4",,terminal_output +4219,7635898,"TERMINAL",0,0,"5",,terminal_output +4220,7636982,"TERMINAL",0,0,"7",,terminal_output +4221,7638007,"TERMINAL",0,0,"8",,terminal_output +4222,7639168,"TERMINAL",0,0,"9",,terminal_output +4223,7640055,"TERMINAL",0,0,"50",,terminal_output +4224,7641215,"TERMINAL",0,0,"1",,terminal_output +4225,7642130,"TERMINAL",0,0,"2",,terminal_output +4226,7643235,"TERMINAL",0,0,"3",,terminal_output +4227,7644260,"TERMINAL",0,0,"4",,terminal_output +4228,7645283,"TERMINAL",0,0,"5",,terminal_output +4229,7646305,"TERMINAL",0,0,"6",,terminal_output +4230,7647325,"TERMINAL",0,0,"7",,terminal_output +4231,7648350,"TERMINAL",0,0,"8",,terminal_output +4232,7649376,"TERMINAL",0,0,"9",,terminal_output +4233,7650411,"TERMINAL",0,0,"1:00",,terminal_output +4234,7651525,"TERMINAL",0,0,"1",,terminal_output +4235,7652553,"TERMINAL",0,0,"2",,terminal_output +4236,7653580,"TERMINAL",0,0,"3",,terminal_output +4237,7654657,"TERMINAL",0,0,"4",,terminal_output +4238,7655591,"TERMINAL",0,0,"5",,terminal_output +4239,7656621,"TERMINAL",0,0,"6",,terminal_output +4240,7657655,"TERMINAL",0,0,"7",,terminal_output +4241,7658689,"TERMINAL",0,0,"82",,terminal_output +4242,7659819,"TERMINAL",0,0,"9",,terminal_output +4243,7660842,"TERMINAL",0,0,"10",,terminal_output +4244,7661868,"TERMINAL",0,0,"1",,terminal_output +4245,7662892,"TERMINAL",0,0,"2",,terminal_output +4246,7663915,"TERMINAL",0,0,"3",,terminal_output +4247,7664896,"TERMINAL",0,0,"4",,terminal_output +4248,7665930,"TERMINAL",0,0,"5",,terminal_output +4249,7666968,"TERMINAL",0,0,"7",,terminal_output +4250,7668002,"TERMINAL",0,0,"8",,terminal_output +4251,7669035,"TERMINAL",0,0,"9",,terminal_output +4252,7670070,"TERMINAL",0,0,"20",,terminal_output +4253,7671111,"TERMINAL",0,0,"1",,terminal_output +4254,7672208,"TERMINAL",0,0,"2",,terminal_output +4255,7673234,"TERMINAL",0,0,"3",,terminal_output +4256,7674220,"TERMINAL",0,0,"4",,terminal_output +4257,7675255,"TERMINAL",0,0,"5",,terminal_output +4258,7676309,"TERMINAL",0,0,"6",,terminal_output +4259,7677357,"TERMINAL",0,0,"7",,terminal_output +4260,7678474,"TERMINAL",0,0,"8",,terminal_output +4261,7679404,"TERMINAL",0,0,"9",,terminal_output +4262,7680473,"TERMINAL",0,0,"30",,terminal_output +4263,7681479,"TERMINAL",0,0,"1",,terminal_output +4264,7682553,"TERMINAL",0,0,"2",,terminal_output +4265,7683579,"TERMINAL",0,0,"3",,terminal_output +4266,7684588,"TERMINAL",0,0,"4",,terminal_output +4267,7685683,"TERMINAL",0,0,"5",,terminal_output +4268,7686663,"TERMINAL",0,0,"6",,terminal_output +4269,7687893,"TERMINAL",0,0,"7",,terminal_output +4270,7688729,"TERMINAL",0,0,"8",,terminal_output +4271,7689772,"TERMINAL",0,0,"9",,terminal_output +4272,7689993,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",0,0,"",shellscript,tab +4273,7689993,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1182,0,"",shellscript,selection_mouse +4274,7690140,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",1181,0,"",shellscript,selection_command +4275,7690842,"TERMINAL",0,0,"40",,terminal_output +4276,7691744,"TERMINAL",0,0,"g",,terminal_output +4277,7691802,"TERMINAL",0,0,"i",,terminal_output +4278,7691840,"TERMINAL",0,0,"1",,terminal_output +4279,7691897,"TERMINAL",0,0,"t",,terminal_output +4280,7691960,"TERMINAL",0,0," ",,terminal_output +4281,7692507,"TERMINAL",0,0,"s",,terminal_output +4282,7692700,"TERMINAL",0,0,"t",,terminal_output +4283,7692833,"TERMINAL",0,0,"a",,terminal_output +4284,7692882,"TERMINAL",0,0,"2",,terminal_output +4285,7692947,"TERMINAL",0,0,"sh",,terminal_output +4286,7693233,"TERMINAL",0,0," ",,terminal_output +4287,7693619,"TERMINAL",0,0,"",,terminal_output +4288,7693918,"TERMINAL",0,0,"3",,terminal_output +4289,7694289,"TERMINAL",0,0,"",,terminal_output +4290,7694952,"TERMINAL",0,0,"di",,terminal_output +4291,7694984,"TERMINAL",0,0,"5",,terminal_output +4292,7695091,"TERMINAL",0,0,"f",,terminal_output +4293,7695244,"TERMINAL",0,0,"f",,terminal_output +4294,7695367,"TERMINAL",0,0,"\r\n[?2004l\r[?1h=\rdiff --git a/requirements.txt b/requirements.txt\r\nindex b6a4550..13c75e6 100644\r\n--- a/requirements.txt\r\n+++ b/requirements.txt\r\n@@ -14,5 +14,5 @@ ffmpeg-python==0.2.0\r\n hf-transfer==0.1.9\r\n huggingface-hub[cli]>=0.34.3\r\n pre-commit>=4.2.0\r\n-procgen>=0.10.7\r\n+# procgen>=0.10.7\r\n tqdm>=4.67.1\r\n\ No newline at end of file\r\ndiff --git a/sample.py b/sample.py\r\nindex e027162..f9c0af2 100644\r\n--- a/sample.py\r\n+++ b/sample.py\r\n@@ -115,7 +115,6 @@ if __name__ == ""__main__"":\r\n rngs=rngs,\r\n )\r\n \r\n- del genie.tokenizer.vq.drop\r\n # Need to delete lam decoder for checkpoint loading\r\n if not args.use_gt_actions:\r\n assert genie.lam is not None\r\n@@ -144,7 +143,7 @@ if __name__ == ""__main__"":\r\n weight_decay=1e-4,\r\n mu_dtype=args.dtype,\r\n )\r\n- dummy_optimizer = nnx.Optimizer(genie, dummy_tx)\r\n+ dummy_optimizer = nnx.ModelAndOptimizer(genie, dummy_tx)\r\n \r\n abstract_optimizer = nnx.eval_shape(lambda: dummy_optimizer)\r\n abstract_optimizer_state = nnx.state(abstract_optimizer)\r\n@@ -245,7 +244,7 @@ if __name__ == ""__main__"":\r\n for row in range(B):\r\n action = action_batch_BSm11[row, t, 0]\r\n y_offset = row * batch[""videos""].shape[2] + 2\r\n- d.text((2, y_offset), f""{action}"", fill=255)\r\n+ # d.text((2, y_offset), f""{action}"", fill=255)\r\n imgs[0].save(\r\n f""generation_{time.time()}.gif"",\r\n save_all=True,\r\ndiff --git a/train_dynamics.py b/train_dynamics.py\r\nindex 5f24c86..11145b7 100644\r\n--- a/train_dynamics.py\r\n+++ b/train_dynamics.py\r\n@@ -295,6 +295,7 @@ def restore_or_initialize_components(\r\n restore_step = checkpoint_manager.latest_step()\r\n if args.restore_ckpt:\r\n assert checkpoint_manager is not None\r\n+ # del optimizer.model.tokenizer.vq.drop\r\n:",,terminal_output +4295,7695995,"TERMINAL",0,0,"6",,terminal_output +4296,7696253,"TERMINAL",0,0,"\r abstract_optimizer = nnx.eval_shape(lambda: optimizer)\r\n:",,terminal_output +4297,7696778,"TERMINAL",0,0,"\r abstract_optimizer_state = nnx.state(abstract_optimizer)\r\n:\r if val_iterator:\r\n:\r@@ -326,7 +327,7 @@ def restore_or_initialize_components(\r\n:",,terminal_output +4298,7696835,"TERMINAL",0,0,"\r # Specifically, the first dynamics model checkpoint will contain the vq dropout module,\r\n:",,terminal_output +4299,7697043,"TERMINAL",0,0,"7",,terminal_output +4300,7697244,"TERMINAL",0,0,"\r # but the first full restore will fail due to nnx not initializing the module when\r\n:",,terminal_output +4301,7697862,"TERMINAL",0,0,"\r # dropout is set to 0.0.\r\n:\r- del optimizer.model.tokenizer.vq.drop\r\n:\r+ # del optimizer.model.tokenizer.vq.drop\r\n:\r return step, optimizer, train_iterator, val_iterator, rng\r\n:\r \r\n:\r \r\n:",,terminal_output +4302,7698100,"TERMINAL",0,0,"8",,terminal_output +4303,7698311,"TERMINAL",0,0,"\r\r(END)",,terminal_output +4304,7698664,"TERMINAL",0,0,"\r\r(END)",,terminal_output +4305,7698724,"TERMINAL",0,0,"\r\r(END)",,terminal_output +4306,7699335,"TERMINAL",0,0,"9",,terminal_output +4307,7699447,"TERMINAL",0,0,"\rM\ No newline at end of file\r\n\r:",,terminal_output +4308,7700131,"TERMINAL",0,0,"\rM tqdm>=4.67.1\r\n\r:\rM+# procgen>=0.10.7\r\n\r:\rM-procgen>=0.10.7\r\n\r:\rM pre-commit>=4.2.0\r\n\r:\rM huggingface-hub[cli]>=0.34.3\r\n\r:\rM hf-transfer==0.1.9\r\n\r:\rM@@ -14,5 +14,5 @@ ffmpeg-python==0.2.0\r\n\r:\rM+++ b/requirements.txt\r\n\r:",,terminal_output +4309,7700221,"TERMINAL",0,0,"\rM--- a/requirements.txt\r\n\r:",,terminal_output +4310,7700222,"TERMINAL",0,0,"50",,terminal_output +4311,7700304,"TERMINAL",0,0,"\rMindex b6a4550..13c75e6 100644\r\n\r:\rMdiff --git a/requirements.txt b/requirements.txt\r\n\r:\r\r:",,terminal_output +4312,7700725,"TERMINAL",0,0,"\r\r:",,terminal_output +4313,7701263,"TERMINAL",0,0,"\r\r:",,terminal_output +4314,7701264,"TERMINAL",0,0,"1",,terminal_output +4315,7702320,"TERMINAL",0,0,"2",,terminal_output +4316,7703496,"TERMINAL",0,0,"3",,terminal_output +4317,7704343,"TERMINAL",0,0,"4",,terminal_output +4318,7705378,"TERMINAL",0,0,"5",,terminal_output +4319,7706406,"TERMINAL",0,0,"6",,terminal_output +4320,7707561,"TERMINAL",0,0,"7",,terminal_output +4321,7708489,"TERMINAL",0,0,"8",,terminal_output +4322,7709588,"TERMINAL",0,0,"9",,terminal_output +4323,7710567,"TERMINAL",0,0,"2:00",,terminal_output +4324,7711599,"TERMINAL",0,0,"1",,terminal_output +4325,7712641,"TERMINAL",0,0,"2",,terminal_output +4326,7713692,"TERMINAL",0,0,"3",,terminal_output +4327,7714720,"TERMINAL",0,0,"4",,terminal_output +4328,7715837,"TERMINAL",0,0,"5",,terminal_output +4329,7716792,"TERMINAL",0,0,"6",,terminal_output +4330,7717882,"TERMINAL",0,0,"7",,terminal_output +4331,7718905,"TERMINAL",0,0,"8",,terminal_output +4332,7719928,"TERMINAL",0,0,"9",,terminal_output +4333,7721062,"TERMINAL",0,0,"11",,terminal_output +4334,7721995,"TERMINAL",0,0,"2",,terminal_output +4335,7723104,"TERMINAL",0,0,"3",,terminal_output +4336,7723691,"TERMINAL",0,0,"\r[?1l>]0;tum_cte0515@hkn0401:~/Projects/jasmine[?2004h(venv_3_11) [tum_cte0515@hkn0401 jasmine]$ ",,terminal_output +4337,7724002,"TERMINAL",0,0,"g",,terminal_output +4338,7724150,"TERMINAL",0,0,"4",,terminal_output +4339,7724151,"TERMINAL",0,0,"it",,terminal_output +4340,7724210,"TERMINAL",0,0," ",,terminal_output +4341,7724388,"TERMINAL",0,0,"c",,terminal_output +4342,7724510,"TERMINAL",0,0,"h",,terminal_output +4343,7724654,"TERMINAL",0,0,"e",,terminal_output +4344,7724795,"TERMINAL",0,0,"c",,terminal_output +4345,7724849,"TERMINAL",0,0,"k",,terminal_output +4346,7724902,"TERMINAL",0,0,"o",,terminal_output +4347,7725021,"TERMINAL",0,0,"u",,terminal_output +4348,7725082,"TERMINAL",0,0,"t",,terminal_output +4349,7725144,"TERMINAL",0,0,"5",,terminal_output +4350,7725207,"TERMINAL",0,0," ",,terminal_output +4351,7725351,"TERMINAL",0,0,"-",,terminal_output +4352,7725613,"TERMINAL",0,0,"b",,terminal_output +4353,7725684,"TERMINAL",0,0," ",,terminal_output +4354,7726179,"TERMINAL",0,0,"6",,terminal_output +4355,7726610,"TERMINAL",0,0,"",,terminal_output +4356,7727302,"TERMINAL",0,0,"7",,terminal_output +4357,7727680,"TERMINAL",0,0,"",,terminal_output +4358,7728293,"TERMINAL",0,0,"8",,terminal_output +4359,7728627,"TERMINAL",0,0,"c",,terminal_output +4360,7728821,"TERMINAL",0,0,"o",,terminal_output +4361,7729047,"TERMINAL",0,0,"m",,terminal_output +4362,7729109,"TERMINAL",0,0,"m",,terminal_output +4363,7729230,"TERMINAL",0,0,"i",,terminal_output +4364,7729293,"TERMINAL",0,0,"t",,terminal_output +4365,7729293,"TERMINAL",0,0,"9",,terminal_output +4366,7729408,"TERMINAL",0,0," ",,terminal_output +4367,7729530,"TERMINAL",0,0,"-",,terminal_output +4368,7729760,"TERMINAL",0,0,"a",,terminal_output +4369,7729825,"TERMINAL",0,0,"m",,terminal_output +4370,7729887,"TERMINAL",0,0," ",,terminal_output +4371,7730100,"TERMINAL",0,0,"""",,terminal_output +4372,7730349,"TERMINAL",0,0,"20",,terminal_output +4373,7730586,"TERMINAL",0,0,"e",,terminal_output +4374,7730941,"TERMINAL",0,0,"d",,terminal_output +4375,7731045,"TERMINAL",0,0,"e",,terminal_output +4376,7731206,"TERMINAL",0,0,"v",,terminal_output +4377,7731311,"TERMINAL",0,0," ",,terminal_output +4378,7731377,"TERMINAL",0,0,"1",,terminal_output +4379,7731590,"TERMINAL",0,0,"b",,terminal_output +4380,7731652,"TERMINAL",0,0,"r",,terminal_output +4381,7731806,"TERMINAL",0,0,"a",,terminal_output +4382,7731863,"TERMINAL",0,0,"n",,terminal_output +4383,7732012,"TERMINAL",0,0,"ch",,terminal_output +4384,7732375,"TERMINAL",0,0,"""",,terminal_output +4385,7732438,"TERMINAL",0,0,"2",,terminal_output +4386,7732600,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +4387,7733502,"TERMINAL",0,0,"3",,terminal_output +4388,7733765,"TERMINAL",0,0,"black....................................................................",,terminal_output +4389,7734484,"TERMINAL",0,0,"4",,terminal_output +4390,7734542,"TERMINAL",0,0,"Passed\r\n",,terminal_output +4391,7734704,"TERMINAL",0,0,"[dev 37241da] dev branch\r\n 3 files changed, 5 insertions(+), 5 deletions(-)\r\n]0;tum_cte0515@hkn0401:~/Projects/jasmine[?2004h(venv_3_11) [tum_cte0515@hkn0401 jasmine]$ ",,terminal_output +4392,7735264,"TERMINAL",0,0,"g",,terminal_output +4393,7735327,"TERMINAL",0,0,"i",,terminal_output +4394,7735395,"TERMINAL",0,0,"t",,terminal_output +4395,7735485,"TERMINAL",0,0," ",,terminal_output +4396,7735520,"TERMINAL",0,0,"5",,terminal_output +4397,7735719,"TERMINAL",0,0,"ch",,terminal_output +4398,7735877,"TERMINAL",0,0,"e",,terminal_output +4399,7735940,"TERMINAL",0,0,"c",,terminal_output +4400,7736003,"TERMINAL",0,0,"k",,terminal_output +4401,7736165,"TERMINAL",0,0,"o",,terminal_output +4402,7736228,"TERMINAL",0,0,"u",,terminal_output +4403,7736293,"TERMINAL",0,0,"t",,terminal_output +4404,7736359,"TERMINAL",0,0," ",,terminal_output +4405,7736523,"TERMINAL",0,0,"m",,terminal_output +4406,7736576,"TERMINAL",0,0,"6",,terminal_output +4407,7736648,"TERMINAL",0,0,"ai",,terminal_output +4408,7736708,"TERMINAL",0,0,"n",,terminal_output +4409,7736853,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +4410,7737055,"TERMINAL",0,0,"Switched to branch 'main'\r\nYour branch is up to date with 'origin/main'.\r\n]0;tum_cte0515@hkn0401:~/Projects/jasmine[?2004h(venv_3_11) [tum_cte0515@hkn0401 jasmine]$ ",,terminal_output +4411,7737596,"TERMINAL",0,0,"7",,terminal_output +4412,7737734,"TERMINAL",0,0,"g",,terminal_output +4413,7737796,"TERMINAL",0,0,"i",,terminal_output +4414,7737916,"TERMINAL",0,0,"t",,terminal_output +4415,7737978,"TERMINAL",0,0," ",,terminal_output +4416,7738338,"TERMINAL",0,0,"p",,terminal_output +4417,7738636,"TERMINAL",0,0,"8",,terminal_output +4418,7738855,"TERMINAL",0,0,"u",,terminal_output +4419,7739038,"TERMINAL",0,0,"l",,terminal_output +4420,7739194,"TERMINAL",0,0,"l",,terminal_output +4421,7739361,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +4422,7739693,"TERMINAL",0,0,"9",,terminal_output +4423,7740713,"",0,0,"Switched from branch 'dev' to 'main'",,git_branch_checkout +4424,7740820,"TERMINAL",0,0,"30",,terminal_output +4425,7741843,"TERMINAL",0,0,"1",,terminal_output +4426,7741904,"TERMINAL",0,0,"remote: Enumerating objects: 60, done.\r\nremote: Counting objects: 1% (1/58)\rremote: Counting objects: 3% (2/58)\rremote: Counting objects: 5% (3/58)\rremote: Counting objects: 6% (4/58)\rremote: Counting objects: 8% (5/58)\rremote: Counting objects: 10% (6/58)\rremote: Counting objects: 12% (7/58)\rremote: Counting objects: 13% (8/58)\rremote: Counting objects: 15% (9/58)\rremote: Counting objects: 17% (10/58)\rremote: Counting objects: 18% (11/58)\rremote: Counting objects: 20% (12/58)\rremote: Counting objects: 22% (13/58)\rremote: Counting objects: 24% (14/58)\rremote: Counting objects: 25% (15/58)\rremote: Counting objects: 27% (16/58)\rremote: Counting objects: 29% (17/58)\rremote: Counting objects: 31% (18/58)\rremote: Counting objects: 32% (19/58)\rremote: Counting objects: 34% (20/58)\rremote: Counting objects: 36% (21/58)\rremote: Counting objects: 37% (22/58)\rremote: Counting objects: 39% (23/58)\rremote: Counting objects: 41% (24/58)\rremote: Counting objects: 43% (25/58)\rremote: Counting objects: 44% (26/58)\rremote: Counting objects: 46% (27/58)\rremote: Counting objects: 48% (28/58)\rremote: Counting objects: 50% (29/58)\rremote: Counting objects: 51% (30/58)\rremote: Counting objects: 53% (31/58)\rremote: Counting objects: 55% (32/58)\rremote: Counting objects: 56% (33/58)\rremote: Counting objects: 58% (34/58)\rremote: Counting objects: 60% (35/58)\rremote: Counting objects: 62% (36/58)\rremote: Counting objects: 63% (37/58)\rremote: Counting objects: 65% (38/58)\rremote: Counting objects: 67% (39/58)\rremote: Counting objects: 68% (40/58)\rremote: Counting objects: 70% (41/58)\rremote: Counting objects: 72% (42/58)\rremote: Counting objects: 74% (43/58)\rremote: Counting objects: 75% (44/58)\rremote: Counting objects: 77% (45/58)\rremote: Counting objects: 79% (46/58)\rremote: Counting objects: 81% (47/58)\rremote: Counting objects: 82% (48/58)\rremote: Counting objects: 84% (49/58)\rremote: Counting objects: 86% (50/58)\rremote: Counting objects: 87% (51/58)\rremote: Counting objects: 89% (52/58)\rremote: Counting objects: 91% (53/58)\rremote: Counting objects: 93% (54/58)\rremote: Counting objects: 94% (55/58)\rremote: Counting objects: 96% (56/58)\rremote: Counting objects: 98% (57/58)\rremote: Counting objects: 100% (58/58)\rremote: Counting objects: 100% (58/58), done.\r\nremote: Compressing objects: 2% (1/45)\rremote: Compressing objects: 4% (2/45)\rremote: Compressing objects: 6% (3/45)\rremote: Compressing objects: 8% (4/45)\rremote: Compressing objects: 11% (5/45)\rremote: Compressing objects: 13% (6/45)\rremote: Compressing objects: 15% (7/45)\rremote: Compressing objects: 17% (8/45)\rremote: Compressing objects: 20% (9/45)\rremote: Compressing objects: 22% (10/45)\rremote: Compressing objects: 24% (11/45)\rremote: Compressing objects: 26% (12/45)\r",,terminal_output +4427,7742012,"TERMINAL",0,0,"remote: Compressing objects: 28% (13/45)\rremote: Compressing objects: 31% (14/45)\rremote: Compressing objects: 33% (15/45)\rremote: Compressing objects: 35% (16/45)\rremote: Compressing objects: 37% (17/45)\rremote: Compressing objects: 40% (18/45)\rremote: Compressing objects: 42% (19/45)\rremote: Compressing objects: 44% (20/45)\rremote: Compressing objects: 46% (21/45)\rremote: Compressing objects: 48% (22/45)\rremote: Compressing objects: 51% (23/45)\rremote: Compressing objects: 53% (24/45)\rremote: Compressing objects: 55% (25/45)\rremote: Compressing objects: 57% (26/45)\rremote: Compressing objects: 60% (27/45)\rremote: Compressing objects: 62% (28/45)\rremote: Compressing objects: 64% (29/45)\rremote: Compressing objects: 66% (30/45)\rremote: Compressing objects: 68% (31/45)\rremote: Compressing objects: 71% (32/45)\rremote: Compressing objects: 73% (33/45)\rremote: Compressing objects: 75% (34/45)\rremote: Compressing objects: 77% (35/45)\rremote: Compressing objects: 80% (36/45)\rremote: Compressing objects: 82% (37/45)\rremote: Compressing objects: 84% (38/45)\rremote: Compressing objects: 86% (39/45)\rremote: Compressing objects: 88% (40/45)\rremote: Compressing objects: 91% (41/45)\rremote: Compressing objects: 93% (42/45)\rremote: Compressing objects: 95% (43/45)\rremote: Compressing objects: 97% (44/45)\rremote: Compressing objects: 100% (45/45)\rremote: Compressing objects: 100% (45/45), done.\r\nremote: Total 54 (delta 20), reused 30 (delta 7), pack-reused 0 (from 0)\r\n",,terminal_output +4428,7742070,"TERMINAL",0,0,"Unpacking objects: 1% (1/54)\rUnpacking objects: 3% (2/54)\rUnpacking objects: 5% (3/54)\r",,terminal_output +4429,7742132,"TERMINAL",0,0,"Unpacking objects: 7% (4/54)\rUnpacking objects: 9% (5/54)\rUnpacking objects: 11% (6/54)\r",,terminal_output +4430,7742242,"TERMINAL",0,0,"Unpacking objects: 12% (7/54)\rUnpacking objects: 14% (8/54)\rUnpacking objects: 16% (9/54)\rUnpacking objects: 18% (10/54)\rUnpacking objects: 20% (11/54)\rUnpacking objects: 22% (12/54)\rUnpacking objects: 24% (13/54)\rUnpacking objects: 25% (14/54)\rUnpacking objects: 27% (15/54)\rUnpacking objects: 29% (16/54)\rUnpacking objects: 31% (17/54)\rUnpacking objects: 33% (18/54)\rUnpacking objects: 35% (19/54)\rUnpacking objects: 37% (20/54)\rUnpacking objects: 38% (21/54)\rUnpacking objects: 40% (22/54)\rUnpacking objects: 42% (23/54)\r",,terminal_output +4431,7742417,"TERMINAL",0,0,"Unpacking objects: 44% (24/54)\rUnpacking objects: 46% (25/54)\rUnpacking objects: 48% (26/54)\rUnpacking objects: 50% (27/54)\rUnpacking objects: 51% (28/54)\rUnpacking objects: 53% (29/54)\rUnpacking objects: 55% (30/54)\rUnpacking objects: 57% (31/54)\rUnpacking objects: 59% (32/54)\rUnpacking objects: 61% (33/54)\rUnpacking objects: 62% (34/54)\rUnpacking objects: 64% (35/54)\r",,terminal_output +4432,7742552,"TERMINAL",0,0,"Unpacking objects: 66% (36/54)\rUnpacking objects: 68% (37/54)\rUnpacking objects: 70% (38/54)\rUnpacking objects: 72% (39/54)\rUnpacking objects: 74% (40/54)\rUnpacking objects: 75% (41/54)\rUnpacking objects: 77% (42/54)\rUnpacking objects: 79% (43/54), 26.30 KiB | 51.00 KiB/s\rUnpacking objects: 81% (44/54), 26.30 KiB | 51.00 KiB/s\rUnpacking objects: 83% (45/54), 26.30 KiB | 51.00 KiB/s\rUnpacking objects: 85% (46/54), 26.30 KiB | 51.00 KiB/s\rUnpacking objects: 87% (47/54), 26.30 KiB | 51.00 KiB/s\rUnpacking objects: 88% (48/54), 26.30 KiB | 51.00 KiB/s\r",,terminal_output +4433,7742609,"TERMINAL",0,0,"Unpacking objects: 90% (49/54), 26.30 KiB | 51.00 KiB/s\r",,terminal_output +4434,7742668,"TERMINAL",0,0,"Unpacking objects: 92% (50/54), 26.30 KiB | 51.00 KiB/s\rUnpacking objects: 94% (51/54), 26.30 KiB | 51.00 KiB/s\rUnpacking objects: 96% (52/54), 26.30 KiB | 51.00 KiB/s\rUnpacking objects: 98% (53/54), 26.30 KiB | 51.00 KiB/s\rUnpacking objects: 100% (54/54), 26.30 KiB | 51.00 KiB/s\rUnpacking objects: 100% (54/54), 27.07 KiB | 41.00 KiB/s, done.\r\n",,terminal_output +4435,7742795,"TERMINAL",0,0,"2",,terminal_output +4436,7742957,"TERMINAL",0,0,"From github.com:p-doom/jasmine\r\n cb0f725..0ead7ee main -> origin/main\r\n ff45657..9938b6b hotfix/full-frame-eval-only-calculate-last-frame-metrics -> origin/hotfix/full-frame-eval-only-calculate-last-frame-metrics\r\n * [new branch] uv-migration -> origin/uv-migration\r\nUpdating cb0f725..0ead7ee\r\n",,terminal_output +4437,7743277,"TERMINAL",0,0,"Fast-forward\r\n README.md | 28 +++++++++++++++-------------\r\n data/jasmine_data/__init__.py | 1 +\r\n {input_pipeline => data/jasmine_data/coinrun}/generate_coinrun_dataset.py | 2 +-\r\n .../jasmine_data/minecraft}/huggingface/download_openai_array_records.sh | 0\r\n .../download => data/jasmine_data/minecraft}/openai/download_actions_files.py | 0\r\n {input_pipeline/download => data/jasmine_data/minecraft}/openai/download_index_files.sh | 0\r\n {input_pipeline/download => data/jasmine_data/minecraft}/openai/download_videos.py | 0\r\n {input_pipeline => data/jasmine_data}/pngs_to_array_records.py | 0\r\n {input_pipeline => data/jasmine_data}/utils.py | 0\r\n {input_pipeline => data/jasmine_data}/video_to_array_records.py | 0\r\n data/pyproject.toml | 28 ++++++++++++++++++++++++++++\r\n genie.py | 3 +--\r\n jasmine/__init__.py | 0\r\n {models => jasmine/models}/dynamics.py | 34 +++++++++++++++-------------------\r\n {models => jasmine/models}/lam.py | 0\r\n {models => jasmine/models}/tokenizer.py | 0\r\n sample.py => jasmine/sample.py | 0\r\n train_dynamics.py => jasmine/train_dynamics.py | 35 ++++++++++++++++-------------------\r\n train_lam.py => jasmine/train_lam.py | 0\r\n train_tokenizer.py => jasmine/train_tokenizer.py | 0\r\n {utils => jasmine/utils}/dataloader.py | 0\r\n {utils => jasmine/utils}/nn.py | 0\r\n {utils => jasmine/utils}/preprocess.py | 0\r\n {utils => jasmine/utils}/train_utils.py | 0\r\n pyproject.toml | 24 ++++++++++++++++++++++++\r\n requirements.txt | 18 ------------------\r\n 26 files changed, 101 insertions(+), 72 deletions(-)\r\n create mode 100644 data/jasmine_data/__init__.py\r\n rename {input_pipeline => data/jasmine_data/coinrun}/generate_coinrun_dataset.py (99%)\r\n rename {input_pipeline/download => data/jasmine_data/minecraft}/huggingface/download_openai_array_records.sh (100%)\r\n rename {input_pipeline/download => data/jasmine_data/minecraft}/openai/download_actions_files.py (100%)\r\n rename {input_pipeline/download => data/jasmine_data/minecraft}/openai/download_index_files.sh (100%)\r\n rename {input_pipeline/download => data/jasmine_data/minecraft}/openai/download_videos.py (100%)\r\n rename {input_pipeline => data/jasmine_data}/pngs_to_array_records.py (100%)\r\n rename {input_pipeline => data/jasmine_data}/utils.py (100%)\r\n rename {input_pipeline => data/jasmine_data}/video_to_array_records.py (100%)\r\n create mode 100644 data/pyproject.toml\r\n create mode 100644 jasmine/__init__.py\r\n rename {models => jasmine/models}/dynamics.py (85%)\r\n rename {models => jasmine/models}/lam.py (100%)\r\n rename {models => jasmine/models}/tokenizer.py (100%)\r\n rename sample.py => jasmine/sample.py (100%)\r\n rename train_dynamics.py => jasmine/train_dynamics.py (96%)\r\n rename train_lam.py => jasmine/train_lam.py (100%)\r\n rename train_tokenizer.py => jasmine/train_tokenizer.py (100%)\r\n rename {utils => jasmine/utils}/dataloader.py (100%)\r\n rename {utils => jasmine/utils}/nn.py (100%)\r\n rename {utils => jasmine/utils}/preprocess.py (100%)\r\n rename {utils => jasmine/utils}/train_utils.py (100%)\r\n create mode 100644 pyproject.toml\r\n delete mode 100644 requirements.txt\r\n]0;tum_cte0515@hkn0401:~/Projects/jasmine[?2004h(venv_3_11) [tum_cte0515@hkn0401 jasmine]$ ",,terminal_output +4438,7743843,"TERMINAL",0,0,"3",,terminal_output +4439,7744669,"TERMINAL",0,0,"d",,terminal_output +4440,7744732,"TERMINAL",0,0,"e",,terminal_output +4441,7744876,"TERMINAL",0,0,"4",,terminal_output +4442,7744970,"TERMINAL",0,0,"a",,terminal_output +4443,7745034,"TERMINAL",0,0,"c",,terminal_output +4444,7745240,"TERMINAL",0,0,"t",,terminal_output +4445,7745441,"TERMINAL",0,0,"i",,terminal_output +4446,7745634,"TERMINAL",0,0,"v",,terminal_output +4447,7745939,"TERMINAL",0,0,"5",,terminal_output +4448,7746563,"TERMINAL",0,0,"a",,terminal_output +4449,7746626,"TERMINAL",0,0,"t",,terminal_output +4450,7746738,"TERMINAL",0,0,"e",,terminal_output +4451,7746927,"TERMINAL",0,0,"\r\n[?2004l\r]0;tum_cte0515@hkn0401:~/Projects/jasmine[?2004h[tum_cte0515@hkn0401 jasmine]$ ",,terminal_output +4452,7747203,"TERMINAL",0,0,"73",,terminal_output +4453,7747497,"TERMINAL",0,0,"r",,terminal_output +4454,7747609,"TERMINAL",0,0,"m",,terminal_output +4455,7747672,"TERMINAL",0,0," ",,terminal_output +4456,7747812,"TERMINAL",0,0,"-",,terminal_output +4457,7747917,"TERMINAL",0,0,"r",,terminal_output +4458,7747982,"TERMINAL",0,0,"f",,terminal_output +4459,7748088,"TERMINAL",0,0," ",,terminal_output +4460,7748198,"TERMINAL",0,0,"8",,terminal_output +4461,7748261,"TERMINAL",0,0,"m",,terminal_output +4462,7748593,"TERMINAL",0,0,"",,terminal_output +4463,7748904,"TERMINAL",0,0,".",,terminal_output +4464,7749056,"TERMINAL",0,0,"v",,terminal_output +4465,7749205,"TERMINAL",0,0,"env/",,terminal_output +4466,7749216,"TERMINAL",0,0,"9",,terminal_output +4467,7749747,"TERMINAL",0,0,"bin/",,terminal_output +4468,7750020,"TERMINAL",0,0,"a",,terminal_output +4469,7750081,"TERMINAL",0,0,"c",,terminal_output +4470,7750257,"TERMINAL",0,0,"tivate",,terminal_output +4471,7750258,"TERMINAL",0,0,"40",,terminal_output +4472,7750611,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +4473,7750688,"TERMINAL",0,0,"]0;tum_cte0515@hkn0401:~/Projects/jasmine[?2004h[tum_cte0515@hkn0401 jasmine]$ ",,terminal_output +4474,7751282,"TERMINAL",0,0,"1",,terminal_output +4475,7751298,"TERMINAL",0,0,"m",,terminal_output +4476,7751744,"TERMINAL",0,0,"",,terminal_output +4477,7751931,"TERMINAL",0,0,"t",,terminal_output +4478,7751993,"TERMINAL",0,0,"m",,terminal_output +4479,7752056,"TERMINAL",0,0," ",,terminal_output +4480,7752239,"TERMINAL",0,0,"-",,terminal_output +4481,7752309,"TERMINAL",0,0,"t",,terminal_output +4482,7752322,"TERMINAL",0,0,"2",,terminal_output +4483,7752385,"TERMINAL",0,0,"g",,terminal_output +4484,7752646,"TERMINAL",0,0,"",,terminal_output +4485,7752747,"TERMINAL",0,0,"",,terminal_output +4486,7752927,"TERMINAL",0,0,"",,terminal_output +4487,7753060,"TERMINAL",0,0,"",,terminal_output +4488,7753216,"TERMINAL",0,0,"",,terminal_output +4489,7753357,"TERMINAL",0,0,"",,terminal_output +4490,7753373,"TERMINAL",0,0,"3",,terminal_output +4491,7753488,"TERMINAL",0,0,"r",,terminal_output +4492,7753549,"TERMINAL",0,0,"m",,terminal_output +4493,7753614,"TERMINAL",0,0," ",,terminal_output +4494,7753973,"TERMINAL",0,0,"r",,terminal_output +4495,7754307,"TERMINAL",0,0,"",,terminal_output +4496,7754414,"TERMINAL",0,0,"4",,terminal_output +4497,7755174,"TERMINAL",0,0,"-",,terminal_output +4498,7755610,"TERMINAL",0,0,"5",,terminal_output +4499,7755757,"TERMINAL",0,0,"r",,terminal_output +4500,7755820,"TERMINAL",0,0,"f",,terminal_output +4501,7755881,"TERMINAL",0,0," ",,terminal_output +4502,7756500,"TERMINAL",0,0,"6",,terminal_output +4503,7756963,"TERMINAL",0,0,"v",,terminal_output +4504,7757255,"TERMINAL",0,0,"e",,terminal_output +4505,7757430,"TERMINAL",0,0,"n",,terminal_output +4506,7757514,"TERMINAL",0,0,"v_3_11/",,terminal_output +4507,7757533,"TERMINAL",0,0,"7",,terminal_output +4508,7758516,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +4509,7758581,"TERMINAL",0,0,"8",,terminal_output +4510,7759599,"TERMINAL",0,0,"9",,terminal_output +4511,7760670,"TERMINAL",0,0,"50",,terminal_output +4512,7760704,"TERMINAL",0,0,"watch",,terminal_focus +4513,7761007,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn1991:~/Projects/jasmine",,terminal_output +4514,7763626,"TERMINAL",0,0,"srun",,terminal_focus +4515,7763900,"TERMINAL",0,0,"^C\r\n]0;tum_cte0515@hkn0401:~/Projects/jasmine[?2004h[tum_cte0515@hkn0401 jasmine]$ ",,terminal_output +4516,7764980,"TERMINAL",0,0,"bash",,terminal_focus +4517,7767799,"TERMINAL",0,0,"rm -rf venv_3_11/",,terminal_command +4518,7767842,"TERMINAL",0,0,"]633;C",,terminal_output +4519,7770311,"TERMINAL",0,0,"",,terminal_focus +4520,7773068,"TERMINAL",0,0,"rm",,terminal_focus +4521,7774723,"TERMINAL",0,0,"",,terminal_focus +4522,7779793,"TERMINAL",0,0,"rm -rf .venv/",,terminal_command +4523,7779844,"TERMINAL",0,0,"]633;C",,terminal_output +4524,7780480,"TERMINAL",0,0,"srun",,terminal_focus +4525,7797060,"sample.py",0,0,"from dataclasses import dataclass\nimport time\nimport os\nimport optax\n\nimport dm_pix as pix\nimport einops\nimport jax\nimport jax.numpy as jnp\nimport flax.linen as nn\nimport numpy as np\nimport orbax.checkpoint as ocp\nfrom PIL import Image, ImageDraw\nimport tyro\nfrom flax import nnx\n\nfrom genie import Genie\nfrom utils.dataloader import get_dataloader\n\n\n@dataclass\nclass Args:\n # Experiment\n seed: int = 0\n seq_len: int = 16\n image_channels: int = 3\n image_height: int = 90\n image_width: int = 160\n data_dir: str = ""data/coinrun_episodes""\n checkpoint: str = """"\n # Sampling\n batch_size: int = 1\n maskgit_steps: int = 25\n temperature: float = 1.0\n sample_argmax: bool = True\n start_frame: int = 1\n # Tokenizer checkpoint\n tokenizer_dim: int = 512\n tokenizer_ffn_dim: int = 2048\n latent_patch_dim: int = 32\n num_patch_latents: int = 1024\n patch_size: int = 4\n tokenizer_num_blocks: int = 4\n tokenizer_num_heads: int = 8\n # LAM checkpoint\n lam_dim: int = 512\n lam_ffn_dim: int = 2048\n latent_action_dim: int = 32\n num_actions: int = 6\n lam_patch_size: int = 16\n lam_num_blocks: int = 4\n lam_num_heads: int = 8\n use_gt_actions: bool = False\n # Dynamics checkpoint\n dyna_type: str = ""maskgit""\n dyna_dim: int = 512\n dyna_ffn_dim: int = 2048\n dyna_num_blocks: int = 6\n dyna_num_heads: int = 8\n param_dtype = jnp.float32\n dtype = jnp.bfloat16\n use_flash_attention: bool = True\n\n\nargs = tyro.cli(Args)\n\nif __name__ == ""__main__"":\n """"""\n Dimension keys:\n B: batch size\n T: number of input (conditioning) frames\n N: number of patches per frame\n S: sequence length\n H: height\n W: width\n E: B * (S - 1)\n """"""\n jax.distributed.initialize()\n\n rng = jax.random.key(args.seed)\n\n # --- Load Genie checkpoint ---\n rngs = nnx.Rngs(rng)\n genie = Genie(\n # Tokenizer\n in_dim=args.image_channels,\n tokenizer_dim=args.tokenizer_dim,\n tokenizer_ffn_dim=args.tokenizer_ffn_dim,\n latent_patch_dim=args.latent_patch_dim,\n num_patch_latents=args.num_patch_latents,\n patch_size=args.patch_size,\n tokenizer_num_blocks=args.tokenizer_num_blocks,\n tokenizer_num_heads=args.tokenizer_num_heads,\n # LAM\n lam_dim=args.lam_dim,\n lam_ffn_dim=args.lam_ffn_dim,\n latent_action_dim=args.latent_action_dim,\n num_actions=args.num_actions,\n lam_patch_size=args.lam_patch_size,\n lam_num_blocks=args.lam_num_blocks,\n lam_num_heads=args.lam_num_heads,\n lam_co_train=False,\n use_gt_actions=args.use_gt_actions,\n # Dynamics\n dyna_type=args.dyna_type,\n dyna_dim=args.dyna_dim,\n dyna_ffn_dim=args.dyna_ffn_dim,\n dyna_num_blocks=args.dyna_num_blocks,\n dyna_num_heads=args.dyna_num_heads,\n param_dtype=args.param_dtype,\n dtype=args.dtype,\n use_flash_attention=args.use_flash_attention,\n # FIXME (f.srambical): implement spatiotemporal KV caching and set decode=True\n decode=False,\n rngs=rngs,\n )\n\n # Need to delete lam decoder for checkpoint loading\n if not args.use_gt_actions:\n assert genie.lam is not None\n del genie.lam.decoder\n\n handler_registry = ocp.handlers.DefaultCheckpointHandlerRegistry()\n handler_registry.add(\n ""model_state"", ocp.args.PyTreeSave, ocp.handlers.PyTreeCheckpointHandler\n )\n handler_registry.add(\n ""model_state"", ocp.args.PyTreeRestore, ocp.handlers.PyTreeCheckpointHandler\n )\n checkpoint_options = ocp.CheckpointManagerOptions(\n step_format_fixed_length=6,\n )\n checkpoint_manager = ocp.CheckpointManager(\n args.checkpoint,\n options=checkpoint_options,\n handler_registry=handler_registry,\n )\n\n dummy_tx = optax.adamw(\n learning_rate=optax.linear_schedule(0.0001, 0.0001, 10000),\n b1=0.9,\n b2=0.9,\n weight_decay=1e-4,\n mu_dtype=args.dtype,\n )\n dummy_optimizer = nnx.ModelAndOptimizer(genie, dummy_tx)\n\n abstract_optimizer = nnx.eval_shape(lambda: dummy_optimizer)\n abstract_optimizer_state = nnx.state(abstract_optimizer)\n restored = checkpoint_manager.restore(\n checkpoint_manager.latest_step(),\n args=ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore(abstract_optimizer_state), # type: ignore\n ),\n )\n restored_optimizer_state = restored[""model_state""]\n nnx.update(dummy_optimizer, restored_optimizer_state)\n\n # --- Define sampling function ---\n def _sampling_fn(model: Genie, batch: dict) -> jax.Array:\n """"""Runs Genie.sample with pre-defined generation hyper-parameters.""""""\n assert args.dyna_type in [\n ""maskgit"",\n ""causal"",\n ], f""Invalid dynamics type: {args.dyna_type}""\n frames, _ = model.sample(\n batch,\n args.seq_len,\n args.temperature,\n args.sample_argmax,\n args.maskgit_steps,\n )\n return frames\n\n # --- Define autoregressive sampling loop ---\n def _autoreg_sample(genie, rng, batch):\n batch[""videos""] = batch[""videos""][:, : args.start_frame]\n batch[""rng""] = rng\n generated_vid_BSHWC = _sampling_fn(genie, batch)\n return generated_vid_BSHWC\n\n # --- Get video + latent actions ---\n array_record_files = [\n os.path.join(args.data_dir, x)\n for x in os.listdir(args.data_dir)\n if x.endswith("".array_record"")\n ]\n dataloader = get_dataloader(\n array_record_files,\n args.seq_len,\n args.batch_size,\n args.image_height,\n args.image_width,\n args.image_channels,\n # We don't use workers in order to avoid grain shutdown issues (https://github.com/google/grain/issues/398)\n num_workers=0,\n prefetch_buffer_size=1,\n seed=args.seed,\n )\n dataloader = iter(dataloader)\n batch = next(dataloader)\n gt_video = jnp.asarray(batch[""videos""], dtype=jnp.float32) / 255.0\n batch[""videos""] = gt_video.astype(args.dtype)\n # Get latent actions for all videos in the batch\n action_batch_E = None\n if not args.use_gt_actions:\n action_batch_E = genie.vq_encode(batch, training=False)\n batch[""latent_actions""] = action_batch_E\n\n # --- Sample + evaluate video ---\n recon_video_BSHWC = _autoreg_sample(genie, rng, batch)\n recon_video_BSHWC = recon_video_BSHWC.astype(jnp.float32)\n gt = (\n gt_video[:, : recon_video_BSHWC.shape[1]]\n .clip(0, 1)\n .reshape(-1, *gt_video.shape[2:])\n )\n recon = recon_video_BSHWC.clip(0, 1).reshape(-1, *recon_video_BSHWC.shape[2:])\n ssim = jnp.asarray(\n pix.ssim(gt[:, args.start_frame :], recon[:, args.start_frame :])\n ).mean()\n print(f""SSIM: {ssim}"")\n\n # --- Construct video ---\n true_videos = (gt_video * 255).astype(np.uint8)\n pred_videos = (recon_video_BSHWC * 255).astype(np.uint8)\n video_comparison = np.zeros((2, *recon_video_BSHWC.shape), dtype=np.uint8)\n video_comparison[0] = true_videos[:, : args.seq_len]\n video_comparison[1] = pred_videos\n frames = einops.rearrange(video_comparison, ""n b t h w c -> t (b h) (n w) c"")\n\n # --- Save video ---\n imgs = [Image.fromarray(img) for img in frames]\n # Write actions on each frame, on each row (i.e., for each video in the batch, on the GT row)\n B = batch[""videos""].shape[0]\n if action_batch_E is not None:\n action_batch_BSm11 = jnp.reshape(action_batch_E, (B, args.seq_len - 1, 1))\n else:\n action_batch_BSm11 = jnp.reshape(\n batch[""actions""][:, :-1], (B, args.seq_len - 1, 1)\n )\n for t, img in enumerate(imgs[1:]):\n d = ImageDraw.Draw(img)\n for row in range(B):\n action = action_batch_BSm11[row, t, 0]\n y_offset = row * batch[""videos""].shape[2] + 2\n # d.text((2, y_offset), f""{action}"", fill=255)\n imgs[0].save(\n f""generation_{time.time()}.gif"",\n save_all=True,\n append_images=imgs[1:],\n duration=250,\n loop=0,\n )\n",python,tab +4526,7809306,"sample.py",5116,0,"",python,selection_mouse +4527,7809696,"sample.py",5098,0,"",python,selection_mouse +4528,7821312,"TERMINAL",0,0,"p",,terminal_output +4529,7834140,"TERMINAL",0,0,"\r[tum_cte0515@hkn0401 jasmine]$ p",,terminal_output +4530,7846577,"jasmine/sample.py",0,0,"from dataclasses import dataclass\nimport time\nimport os\nimport optax\n\nimport dm_pix as pix\nimport einops\nimport jax\nimport jax.numpy as jnp\nimport flax.linen as nn\nimport numpy as np\nimport orbax.checkpoint as ocp\nfrom PIL import Image, ImageDraw\nimport tyro\nfrom flax import nnx\n\nfrom genie import Genie\nfrom utils.dataloader import get_dataloader\n\n\n@dataclass\nclass Args:\n # Experiment\n seed: int = 0\n seq_len: int = 16\n image_channels: int = 3\n image_height: int = 90\n image_width: int = 160\n data_dir: str = ""data/coinrun_episodes""\n checkpoint: str = """"\n # Sampling\n batch_size: int = 1\n maskgit_steps: int = 25\n temperature: float = 1.0\n sample_argmax: bool = True\n start_frame: int = 1\n # Tokenizer checkpoint\n tokenizer_dim: int = 512\n tokenizer_ffn_dim: int = 2048\n latent_patch_dim: int = 32\n num_patch_latents: int = 1024\n patch_size: int = 4\n tokenizer_num_blocks: int = 4\n tokenizer_num_heads: int = 8\n # LAM checkpoint\n lam_dim: int = 512\n lam_ffn_dim: int = 2048\n latent_action_dim: int = 32\n num_actions: int = 6\n lam_patch_size: int = 16\n lam_num_blocks: int = 4\n lam_num_heads: int = 8\n use_gt_actions: bool = False\n # Dynamics checkpoint\n dyna_type: str = ""maskgit""\n dyna_dim: int = 512\n dyna_ffn_dim: int = 2048\n dyna_num_blocks: int = 6\n dyna_num_heads: int = 8\n param_dtype = jnp.float32\n dtype = jnp.bfloat16\n use_flash_attention: bool = True\n\n\nargs = tyro.cli(Args)\n\nif __name__ == ""__main__"":\n """"""\n Dimension keys:\n B: batch size\n T: number of input (conditioning) frames\n N: number of patches per frame\n S: sequence length\n H: height\n W: width\n E: B * (S - 1)\n """"""\n jax.distributed.initialize()\n\n rng = jax.random.key(args.seed)\n\n # --- Load Genie checkpoint ---\n rngs = nnx.Rngs(rng)\n genie = Genie(\n # Tokenizer\n in_dim=args.image_channels,\n tokenizer_dim=args.tokenizer_dim,\n tokenizer_ffn_dim=args.tokenizer_ffn_dim,\n latent_patch_dim=args.latent_patch_dim,\n num_patch_latents=args.num_patch_latents,\n patch_size=args.patch_size,\n tokenizer_num_blocks=args.tokenizer_num_blocks,\n tokenizer_num_heads=args.tokenizer_num_heads,\n # LAM\n lam_dim=args.lam_dim,\n lam_ffn_dim=args.lam_ffn_dim,\n latent_action_dim=args.latent_action_dim,\n num_actions=args.num_actions,\n lam_patch_size=args.lam_patch_size,\n lam_num_blocks=args.lam_num_blocks,\n lam_num_heads=args.lam_num_heads,\n lam_co_train=False,\n use_gt_actions=args.use_gt_actions,\n # Dynamics\n dyna_type=args.dyna_type,\n dyna_dim=args.dyna_dim,\n dyna_ffn_dim=args.dyna_ffn_dim,\n dyna_num_blocks=args.dyna_num_blocks,\n dyna_num_heads=args.dyna_num_heads,\n param_dtype=args.param_dtype,\n dtype=args.dtype,\n use_flash_attention=args.use_flash_attention,\n # FIXME (f.srambical): implement spatiotemporal KV caching and set decode=True\n decode=False,\n rngs=rngs,\n )\n\n del genie.tokenizer.vq.drop\n # Need to delete lam decoder for checkpoint loading\n if not args.use_gt_actions:\n assert genie.lam is not None\n del genie.lam.decoder\n\n handler_registry = ocp.handlers.DefaultCheckpointHandlerRegistry()\n handler_registry.add(\n ""model_state"", ocp.args.PyTreeSave, ocp.handlers.PyTreeCheckpointHandler\n )\n handler_registry.add(\n ""model_state"", ocp.args.PyTreeRestore, ocp.handlers.PyTreeCheckpointHandler\n )\n checkpoint_options = ocp.CheckpointManagerOptions(\n step_format_fixed_length=6,\n )\n checkpoint_manager = ocp.CheckpointManager(\n args.checkpoint,\n options=checkpoint_options,\n handler_registry=handler_registry,\n )\n\n dummy_tx = optax.adamw(\n learning_rate=optax.linear_schedule(0.0001, 0.0001, 10000),\n b1=0.9,\n b2=0.9,\n weight_decay=1e-4,\n mu_dtype=args.dtype,\n )\n dummy_optimizer = nnx.Optimizer(genie, dummy_tx)\n\n abstract_optimizer = nnx.eval_shape(lambda: dummy_optimizer)\n abstract_optimizer_state = nnx.state(abstract_optimizer)\n restored = checkpoint_manager.restore(\n checkpoint_manager.latest_step(),\n args=ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore(abstract_optimizer_state), # type: ignore\n ),\n )\n restored_optimizer_state = restored[""model_state""]\n nnx.update(dummy_optimizer, restored_optimizer_state)\n\n # --- Define sampling function ---\n def _sampling_fn(model: Genie, batch: dict) -> jax.Array:\n """"""Runs Genie.sample with pre-defined generation hyper-parameters.""""""\n assert args.dyna_type in [\n ""maskgit"",\n ""causal"",\n ], f""Invalid dynamics type: {args.dyna_type}""\n frames, _ = model.sample(\n batch,\n args.seq_len,\n args.temperature,\n args.sample_argmax,\n args.maskgit_steps,\n )\n return frames\n\n # --- Define autoregressive sampling loop ---\n def _autoreg_sample(genie, rng, batch):\n batch[""videos""] = batch[""videos""][:, : args.start_frame]\n batch[""rng""] = rng\n generated_vid_BSHWC = _sampling_fn(genie, batch)\n return generated_vid_BSHWC\n\n # --- Get video + latent actions ---\n array_record_files = [\n os.path.join(args.data_dir, x)\n for x in os.listdir(args.data_dir)\n if x.endswith("".array_record"")\n ]\n dataloader = get_dataloader(\n array_record_files,\n args.seq_len,\n args.batch_size,\n args.image_height,\n args.image_width,\n args.image_channels,\n # We don't use workers in order to avoid grain shutdown issues (https://github.com/google/grain/issues/398)\n num_workers=0,\n prefetch_buffer_size=1,\n seed=args.seed,\n )\n dataloader = iter(dataloader)\n batch = next(dataloader)\n gt_video = jnp.asarray(batch[""videos""], dtype=jnp.float32) / 255.0\n batch[""videos""] = gt_video.astype(args.dtype)\n # Get latent actions for all videos in the batch\n action_batch_E = None\n if not args.use_gt_actions:\n action_batch_E = genie.vq_encode(batch, training=False)\n batch[""latent_actions""] = action_batch_E\n\n # --- Sample + evaluate video ---\n recon_video_BSHWC = _autoreg_sample(genie, rng, batch)\n recon_video_BSHWC = recon_video_BSHWC.astype(jnp.float32)\n gt = (\n gt_video[:, : recon_video_BSHWC.shape[1]]\n .clip(0, 1)\n .reshape(-1, *gt_video.shape[2:])\n )\n recon = recon_video_BSHWC.clip(0, 1).reshape(-1, *recon_video_BSHWC.shape[2:])\n ssim = jnp.asarray(\n pix.ssim(gt[:, args.start_frame :], recon[:, args.start_frame :])\n ).mean()\n print(f""SSIM: {ssim}"")\n\n # --- Construct video ---\n true_videos = (gt_video * 255).astype(np.uint8)\n pred_videos = (recon_video_BSHWC * 255).astype(np.uint8)\n video_comparison = np.zeros((2, *recon_video_BSHWC.shape), dtype=np.uint8)\n video_comparison[0] = true_videos[:, : args.seq_len]\n video_comparison[1] = pred_videos\n frames = einops.rearrange(video_comparison, ""n b t h w c -> t (b h) (n w) c"")\n\n # --- Save video ---\n imgs = [Image.fromarray(img) for img in frames]\n # Write actions on each frame, on each row (i.e., for each video in the batch, on the GT row)\n B = batch[""videos""].shape[0]\n if action_batch_E is not None:\n action_batch_BSm11 = jnp.reshape(action_batch_E, (B, args.seq_len - 1, 1))\n else:\n action_batch_BSm11 = jnp.reshape(\n batch[""actions""][:, :-1], (B, args.seq_len - 1, 1)\n )\n for t, img in enumerate(imgs[1:]):\n d = ImageDraw.Draw(img)\n for row in range(B):\n action = action_batch_BSm11[row, t, 0]\n y_offset = row * batch[""videos""].shape[2] + 2\n d.text((2, y_offset), f""{action}"", fill=255)\n imgs[0].save(\n f""generation_{time.time()}.gif"",\n save_all=True,\n append_images=imgs[1:],\n duration=250,\n loop=0,\n )\n",python,tab +4531,7852781,"TERMINAL",0,0,"rm",,terminal_focus +4532,7853122,"TERMINAL",0,0,"\r[tum_cte0515@hkn0401 jasmine]$ p\r[tum_cte0515@hkn0401 jasmine]$ p\r[tum_cte0515@hkn0401 jasmine]$ p",,terminal_output +4533,7860483,"TERMINAL",0,0,"srun",,terminal_focus +4534,7862057,"TERMINAL",0,0,"",,terminal_output +4535,7863563,"TERMINAL",0,0,"u",,terminal_output +4536,7863664,"TERMINAL",0,0,"v",,terminal_output +4537,7863895,"TERMINAL",0,0," ",,terminal_output +4538,7867550,"TERMINAL",0,0,"s",,terminal_output +4539,7867745,"TERMINAL",0,0,"y",,terminal_output +4540,7867826,"TERMINAL",0,0,"n",,terminal_output +4541,7867941,"TERMINAL",0,0,"c",,terminal_output +4542,7870933,"TERMINAL",0,0,"",,terminal_output +4543,7871089,"TERMINAL",0,0,"",,terminal_output +4544,7871237,"TERMINAL",0,0,"",,terminal_output +4545,7871631,"TERMINAL",0,0,"d",,terminal_output +4546,7872075,"TERMINAL",0,0,"",,terminal_output +4547,7872131,"TERMINAL",0,0,"c",,terminal_output +4548,7872200,"TERMINAL",0,0,"d",,terminal_output +4549,7872388,"TERMINAL",0,0," .",,terminal_output +4550,7872552,"TERMINAL",0,0,".",,terminal_output +4551,7872749,"TERMINAL",0,0,"\r\n[?2004l\r]0;tum_cte0515@hkn0401:~/Projects[?2004h[tum_cte0515@hkn0401 Projects]$ ",,terminal_output +4552,7872850,"TERMINAL",0,0,"]0;tum_cte0515@hkn1991:~/Projects/jasmine",,terminal_output +4553,7872862,"TERMINAL",0,0,"]0;tum_cte0515@hkn1991:~/Projects/jasmine",,terminal_output +4554,7874112,"TERMINAL",0,0,"c",,terminal_output +4555,7874209,"TERMINAL",0,0,"d",,terminal_output +4556,7874271,"TERMINAL",0,0," ",,terminal_output +4557,7874412,"TERMINAL",0,0,"j",,terminal_output +4558,7874475,"TERMINAL",0,0,"a",,terminal_output +4559,7874888,"TERMINAL",0,0,"",,terminal_output +4560,7875704,"TERMINAL",0,0,"a",,terminal_output +4561,7875997,"TERMINAL",0,0,"",,terminal_output +4562,7876581,"TERMINAL",0,0,"s",,terminal_output +4563,7876827,"TERMINAL",0,0,"mine",,terminal_output +4564,7877305,"TERMINAL",0,0,"\r\n[?2004l\r]0;tum_cte0515@hkn0401:~/Projects/jasmine[?2004h[tum_cte0515@hkn0401 jasmine]$ ",,terminal_output +4565,7878111,"TERMINAL",0,0,"u",,terminal_output +4566,7878778,"TERMINAL",0,0,"v",,terminal_output +4567,7878840,"TERMINAL",0,0," ",,terminal_output +4568,7879091,"TERMINAL",0,0,"s",,terminal_output +4569,7879309,"TERMINAL",0,0,"y",,terminal_output +4570,7879445,"TERMINAL",0,0,"nc",,terminal_output +4571,7879657,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +4572,7880039,"TERMINAL",0,0,"Using CPython 3.12.1 interpreter at: /usr/bin/python3.12\r\nCreating virtual environment at: .venv\r\n⠋ Resolving dependencies... \r⠙ Resolving dependencies... \r⠋ Resolving dependencies... \r⠙ Resolving dependencies... \r⠋ Resolving dependencies... \r⠙ Resolving dependencies... \r⠙ jasmine==0.1.0 ",,terminal_output +4573,7880147,"TERMINAL",0,0,"\r⠙ array-record==0.8.1 ",,terminal_output +4574,7880259,"TERMINAL",0,0,"\r⠹ array-record==0.8.1 \r⠹ dm-pix==0.4.4 \r⠹ einops==0.8.1 \r⠹ flax==0.11.2 \r⠹ grain==0.2.12 \r⠹ array-record==0.8.1 \r⠹ jax==0.7.2 ",,terminal_output +4575,7880346,"TERMINAL",0,0,"\r⠹ jax==0.7.2 \r⠹ optax==0.2.6 \r⠹ pillow==11.3.0 \r⠹ pre-commit==4.3.0 \r⠹ tyro==0.9.32 \r⠹ wandb==0.22.0 \r⠹ absl-py==2.3.1 ",,terminal_output +4576,7880460,"TERMINAL",0,0,"\r⠸ absl-py==2.3.1 ",,terminal_output +4577,7880568,"TERMINAL",0,0,"\r⠸ etils==1.13.0 ",,terminal_output +4578,7880621,"TERMINAL",0,0,"\r⠼ etils==1.13.0 ",,terminal_output +4579,7880725,"TERMINAL",0,0,"\r⠼ etils==1.13.0 \r⠼ etils==1.13.0 \r⠼ chex==0.1.91 \r⠼ msgpack==1.1.1 \r⠼ numpy==2.3.3 \r⠼ numpy==2.3.3 \r⠼ numpy==2.3.3 \r⠼ orbax-checkpoint==0.11.25 \r⠼ pyyaml==6.0.2 \r⠼ rich==14.1.0 \r⠼ tensorstore==0.1.77 \r⠼ treescope==0.1.10 ",,terminal_output +4580,7880831,"TERMINAL",0,0,"\r⠼ jax-cuda12-plugin==0.7.2 \r⠴ jax-cuda12-plugin==0.7.2 ",,terminal_output +4581,7881053,"TERMINAL",0,0,"\r⠦ jax-cuda12-plugin==0.7.2 \r⠦ jax-cuda12-pjrt==0.7.2 ",,terminal_output +4582,7881111,"TERMINAL",0,0,"\r⠦ cfgv==3.4.0 \r⠦ identify==2.6.14 \r⠦ nodeenv==1.9.1 \r⠦ virtualenv==20.34.0 ",,terminal_output +4583,7881225,"TERMINAL",0,0,"\r⠦ pydantic-core==2.33.2 \r⠦ requests==2.32.5 \r⠧ nvidia-cublas-cu12==12.9.1.4 ",,terminal_output +4584,7881337,"TERMINAL",0,0,"\r⠧ nvidia-cuda-nvrtc-cu12==12.9.86 \r⠧ nvidia-cusolver-cu12==11.7.5.82 ",,terminal_output +4585,7881392,"TERMINAL",0,0,"\r⠧ nvidia-nvshmem-cu12==3.4.5 ",,terminal_output +4586,7881454,"TERMINAL",0,0,"\r⠧ smmap==5.0.2 \rResolved 84 packages in 1.42s\r\n",,terminal_output +4587,7883388,"TERMINAL",0,0,"⠋ Preparing packages... (0/0) \r⠋ Preparing packages... (0/83) \r⠙ Preparing packages... (0/83) \r Building jasmine @ file:///hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine\r\n⠙ Preparing packages... (0/83) \r\r Building jasmine @ file:///hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine\r\n⠙ Preparing packages... (0/83)\r\ndistlib  ------------------------------ 0 B/458.05 KiB \r\r\r Building jasmine @ file:///hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine\r\n⠙ Preparing packages... (0/83)\r\ndistlib  ------------------------------ 0 B/458.05 KiB \r\r\r Building jasmine @ file:///hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine\r\n⠙ Preparing packages... (0/83)\r\ncertifi  ------------------------------ 0 B/157.44 KiB\r\ndistlib  ------------------------------ 0 B/458.05 KiB \r\r\r\r Building jasmine @ file:///hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine\r\n⠙ Preparing packages... (0/83)\r\ncertifi  ------------------------------ 0 B/157.44 KiB\r\ndistlib  ------------------------------ 0 B/458.05 KiB \r\r\r\r Building jasmine @ file:///hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine\r\n⠙ Preparing packages... (0/83)\r\ncertifi  ------------------------------ 0 B/157.44 KiB\r\ndistlib  ------------------------------ 0 B/458.05 KiB \r\r\r\r Building jasmine @ file:///hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine\r\n⠙ Preparing packages... (0/83)\r\nurllib3  ------------------------------ 0 B/126.75 KiB\r\ncertifi  ------------------------------ 0 B/157.44 KiB\r\ndistlib  ------------------------------ 0 B/458.05 KiB \r\r\r\r\r Building jasmine @ file:///hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine\r\n⠙ Preparing packages... (0/83)\r\nurllib3  ------------------------------ 0 B/126.75 KiB\r\ncertifi  ------------------------------ 0 B/157.44 KiB\r\ndistlib  ------------------------------ 14.88 KiB/458.05 KiB \r\r\r\r\r Building jasmine @ file:///hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine\r\n⠙ Preparing packages... (0/83)\r\nurllib3  ------------------------------ 0 B/126.75 KiB\r\ncertifi  ------------------------------ 14.88 KiB/157.44 KiB\r\ndistlib  ------------------------------ 14.88 KiB/458.05 KiB \r\r\r\r\r Building jasmine @ file:///hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine\r\n⠙ Preparing packages... (0/83)\r\nurllib3  ------------------------------ 14.88 KiB/126.75 KiB\r\ncertifi  ------------------------------ 14.88 KiB/157.44 KiB\r\ndistlib  ------------------------------ 14.88 KiB/458.05 KiB \r\r\r\r\r Building jasmine @ file:///hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine\r\n⠙ Preparing packages... (0/83)\r\nurllib3  ------------------------------ 14.88 KiB/126.75 KiB\r\ncertifi  ------------------------------ 14.88 KiB/157.44 KiB\r\ndistlib  ------------------------------ 14.88 KiB/458.05 KiB \r\r\r\r\r Building jasmine @ file:///hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine\r\n⠙ Preparing packages... (0/83)\r\nurllib3  ------------------------------ 14.88 KiB/126.75 KiB\r\ncertifi  ------------------------------ 14.88 KiB/157.44 KiB\r\ndistlib  ------------------------------ 14.88 KiB/458.05 KiB \r\r\r\r\r Building jasmine @ file:///hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine\r\n⠙ Preparing packages... (0/83)\r\nurllib3  ------------------------------ 14.88 KiB/126.75 KiB\r\ncertifi  ------------------------------ 14.88 KiB/157.44 KiB\r\ndistlib  ------------------------------ 14.88 KiB/458.05 KiB \r\r\r\r\r Building jasmine @ file:///hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine\r\n⠙ Preparing packages... (0/83)\r\nurllib3  ------------------------------ 14.88 KiB/126.75 KiB\r\nabsl-py  ------------------------------ 0 B/132.63 KiB\r\ncertifi  ------------------------------ 14.88 KiB/157.44 KiB\r\ndistlib  ------------------------------ 14.88 KiB/458.05 KiB \r\r\r\r\r\r Building jasmine @ file:///hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine\r\n⠙ Preparing packages... (0/83)\r\nurllib3  ------------------------------ 14.88 KiB/126.75 KiB\r\nabsl-py  ------------------------------ 0 B/132.63 KiB\r\ncertifi  ------------------------------ 14.88 KiB/157.44 KiB\r\ndistlib  ------------------------------ 14.88 KiB/458.05 KiB \r\r\r\r\r\r Building jasmine @ file:///hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine\r\n⠙ Preparing packages... (0/83)\r\nurllib3  ------------------------------ 14.88 KiB/126.75 KiB\r\nabsl-py  ------------------------------ 0 B/132.63 KiB\r\ncertifi  ------------------------------ 14.88 KiB/157.44 KiB\r\ndistlib  ------------------------------ 14.88 KiB/458.05 KiB \r\r\r\r\r\r Building jasmine @ file:///hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine\r\n⠙ Preparing packages... (0/83)\r\nurllib3  ------------------------------ 14.88 KiB/126.75 KiB\r\nabsl-py  ------------------------------ 0 B/132.63 KiB\r\ncertifi  ------------------------------ 14.88 KiB/157.44 KiB\r\ndistlib  ------------------------------ 14.88 KiB/458.05 KiB \r\r\r\r\r\r Building jasmine @ file:///hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine\r\n⠙ Preparing packages... (0/83)\r\nurllib3  ------------------------------ 14.88 KiB/126.75 KiB\r\nabsl-py  ------------------------------ 0 B/132.63 KiB\r\ncertifi  ------------------------------ 14.88 KiB/157.44 KiB\r\ndistlib  ------------------------------ 14.88 KiB/458.05 KiB \r\r\r\r\r\r Building jasmine @ file:///hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine\r\n⠙ Preparing packages... (0/83)\r\nurllib3  ------------------------------ 14.88 KiB/126.75 KiB\r\nabsl-py  ------------------------------ 0 B/132.63 KiB\r\ncertifi  ------------------------------ 14.88 KiB/157.44 KiB\r\nsentry-sdk  ------------------------------ 0 B/361.67 KiB\r\ndistlib  ------------------------------ 14.88 KiB/458.05 KiB ",,terminal_output +4588,7883526,"TERMINAL",0,0,"\r\r\r\r\r\r\r Building jasmine @ file:///hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine\r\n⠙ Preparing packages... (0/83)\r\nwrapt  ------------------------------ 14.91 KiB/85.97 KiB\r\nurllib3  ------------------------------ 14.88 KiB/126.75 KiB\r\nabsl-py  ------------------------------ 16.00 KiB/132.63 KiB\r\ncharset-normalizer  ------------------------------ 16.00 KiB/148.27 KiB\r\nsimplejson  ------------------------------ 16.00 KiB/148.65 KiB\r\ndm-tree  ------------------------------ 0 B/149.43 KiB\r\ncertifi  ------------------------------ 75.19 KiB/157.44 KiB\r\ntreescope  ------------------------------ 4.12 KiB/177.98 KiB\r\nfsspec  ------------------------------ 14.88 KiB/194.62 KiB\r\ngitpython  ------------------------------ 0 B/203.29 KiB\r\npre-commit  ------------------------------ 14.91 KiB/215.79 KiB\r\nrich  ------------------------------ 14.88 KiB/237.66 KiB\r\nprotobuf  ------------------------------ 8.46 KiB/314.47 KiB\r\noptax  ------------------------------ 16.00 KiB/359.16 KiB\r\nsentry-sdk  ------------------------------ 14.87 KiB/361.67 KiB\r\nmsgpack  ------------------------------ 0 B/416.90 KiB\r\npydantic  ------------------------------ 14.88 KiB/434.43 KiB\r\ndistlib  ------------------------------ 46.88 KiB/458.05 KiB\r\ngrain  ------------------------------ 0 B/487.72 KiB\r\norbax-checkpoint  ------------------------------ 0 B/549.91 KiB\r\npyyaml  ------------------------------ 14.91 KiB/749.55 KiB\r\npygments  ------------------------------ 14.87 KiB/1.17 MiB\r\npydantic-core  ------------------------------ 14.90 KiB/1.91 MiB\r\narray-record  ------------------------------ 14.91 KiB/2.35 MiB\r\njax  ------------------------------ 14.92 KiB/2.70 MiB\r\nvirtualenv  ------------------------------ 14.87 KiB/5.71 MiB\r\npillow  ------------------------------ 0 B/6.34 MiB\r\nnvidia-cuda-cupti-cu12 ------------------------------ 16.00 KiB/10.31 MiB\r\ntensorstore  ------------------------------ 14.91 KiB/18.63 MiB\r\nwandb  ------------------------------ 14.91 KiB/18.66 MiB\r\nscipy  ------------------------------ 0 B/34.02 MiB\r\njaxlib  ------------------------------ 14.91 KiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12 ------------------------------ 14.89 KiB/85.42 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 0 B/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 14.82 KiB/349.49 MiB \r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r Building jasmine @ file:///hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine\r\n⠙ Preparing packages... (0/83)\r\nwrapt  ------------------------------ 14.91 KiB/85.97 KiB\r\nurllib3  ------------------------------ 46.88 KiB/126.75 KiB\r\ntyro  ------------------------------ 16.00 KiB/129.36 KiB\r\nabsl-py  ------------------------------ 16.00 KiB/132.63 KiB\r\ncharset-normalizer  ------------------------------ 16.00 KiB/148.27 KiB\r\nsimplejson  ------------------------------ 16.00 KiB/148.65 KiB\r\ndm-tree  ------------------------------ 16.00 KiB/149.43 KiB\r\ncertifi  ------------------------------ 123.19 KiB/157.44 KiB\r\netils  ------------------------------ 14.91 KiB/166.60 KiB\r\ntreescope  ------------------------------ 14.88 KiB/177.98 KiB\r\nfsspec  ------------------------------ 14.88 KiB/194.62 KiB\r\ngitpython  ------------------------------ 16.00 KiB/203.29 KiB\r\npre-commit  ------------------------------ 14.91 KiB/215.79 KiB\r\nrich  ------------------------------ 14.88 KiB/237.66 KiB\r\nprotobuf  ------------------------------ 62.77 KiB/314.47 KiB\r\noptax  ------------------------------ 16.00 KiB/359.16 KiB\r\nsentry-sdk  ------------------------------ 14.87 KiB/361.67 KiB\r\nmsgpack  ------------------------------ 16.00 KiB/416.90 KiB\r\npydantic  ------------------------------ 14.88 KiB/434.43 KiB\r\nflax  ------------------------------ 14.92 KiB/447.36 KiB\r\ndistlib  ------------------------------ 46.88 KiB/458.05 KiB\r\ngrain  ------------------------------ 16.00 KiB/487.72 KiB\r\norbax-checkpoint  ------------------------------ 14.00 KiB/549.91 KiB\r\npyyaml  ------------------------------ 14.91 KiB/749.55 KiB\r\npygments  ------------------------------ 14.87 KiB/1.17 MiB\r\npydantic-core  ------------------------------ 14.90 KiB/1.91 MiB\r\narray-record  ------------------------------ 14.91 KiB/2.35 MiB\r\njax  ------------------------------ 14.92 KiB/2.70 MiB\r\nnvidia-cuda-runtime-cu12 ------------------------------ 14.89 KiB/3.33 MiB\r\nml-dtypes  ------------------------------ 14.91 KiB/4.70 MiB\r\njax-cuda12-plugin  ------------------------------ 62.90 KiB/5.22 MiB\r\nvirtualenv  ------------------------------ 14.87 KiB/5.71 MiB\r\npillow  ------------------------------ 4.16 KiB/6.34 MiB\r\nnvidia-cuda-cupti-cu12  ------------------------------ 16.00 KiB/10.31 MiB\r\nnumpy  ------------------------------ 14.91 KiB/15.87 MiB\r\ntensorstore  ------------------------------ 14.91 KiB/18.63 MiB\r\nwandb  ------------------------------ 46.91 KiB/18.66 MiB\r\nscipy  ------------------------------ 16.00 KiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 14.89 KiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 14.89 KiB/38.67 MiB\r\njaxlib  ------------------------------ 14.91 KiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 14.89 KiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 3.78 KiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 14.83 KiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 14.82 KiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 14.87 KiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 16.00 KiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 14.82 KiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 14.82 KiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 14.86 KiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r Building jasmine @ file:///hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine\r\n⠹ Preparing packages... (0/83)\r\nwrapt  ------------------------------ 75.69 KiB/85.97 KiB\r\nurllib3  ------------------------------ 46.88 KiB/126.75 KiB\r\ntyro  ------------------------------ 48.00 KiB/129.36 KiB\r\nabsl-py  ------------------------------ 16.00 KiB/132.63 KiB\r\ncharset-normalizer  ------------------------------ 16.00 KiB/148.27 KiB\r\nsimplejson  ------------------------------ 16.00 KiB/148.65 KiB\r\ndm-tree  ------------------------------ 16.00 KiB/149.43 KiB\r\ncertifi  ------------------------------ 155.19 KiB/157.44 KiB\r\netils  ------------------------------ 14.91 KiB/166.60 KiB\r\ntreescope  ------------------------------ 14.88 KiB/177.98 KiB\r\nfsspec  ------------------------------ 30.88 KiB/194.62 KiB\r\ngitpython  ------------------------------ 16.00 KiB/203.29 KiB\r\npre-commit  ------------------------------ 14.91 KiB/215.79 KiB\r\nrich  ------------------------------ 14.88 KiB/237.66 KiB\r\nprotobuf  ------------------------------ 141.78 KiB/314.47 KiB\r\noptax  ------------------------------ 16.00 KiB/359.16 KiB\r\nsentry-sdk  ------------------------------ 14.87 KiB/361.67 KiB\r\nmsgpack  ------------------------------ 125.23 KiB/416.90 KiB\r\npydantic  ------------------------------ 14.88 KiB/434.43 KiB\r\nflax  ------------------------------ 14.92 KiB/447.36 KiB\r\ndistlib  ------------------------------ 63.67 KiB/458.05 KiB\r\ngrain  ------------------------------ 16.00 KiB/487.72 KiB\r\norbax-checkpoint  ------------------------------ 14.00 KiB/549.91 KiB\r\npyyaml  ------------------------------ 14.91 KiB/749.55 KiB\r\npygments  ------------------------------ 14.87 KiB/1.17 MiB\r\npydantic-core  ------------------------------ 46.90 KiB/1.91 MiB\r\narray-record  ------------------------------ 14.91 KiB/2.35 MiB\r\njax  ------------------------------ 14.92 KiB/2.70 MiB\r\nnvidia-cuda-runtime-cu12 ------------------------------ 30.89 KiB/3.33 MiB\r\nml-dtypes  ------------------------------ 14.91 KiB/4.70 MiB\r\njax-cuda12-plugin  ------------------------------ 142.90 KiB/5.22 MiB\r\nvirtualenv  ------------------------------ 14.87 KiB/5.71 MiB\r\npillow  ------------------------------ 20.06 KiB/6.34 MiB\r\nnvidia-cuda-cupti-cu12  ------------------------------ 64.00 KiB/10.31 MiB\r\nnumpy  ------------------------------ 14.91 KiB/15.87 MiB\r\ntensorstore  ------------------------------ 14.91 KiB/18.63 MiB\r\nwandb  ------------------------------ 62.91 KiB/18.66 MiB\r\nscipy  ------------------------------ 95.11 KiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 14.89 KiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 126.89 KiB/38.67 MiB\r\njaxlib  ------------------------------ 14.91 KiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 126.79 KiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 14.83 KiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 14.83 KiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 14.82 KiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 14.87 KiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 16.00 KiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 30.82 KiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 30.82 KiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 14.86 KiB/614.37 MiB ",,terminal_output +4589,7883581,"TERMINAL",0,0,"\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r Building jasmine @ file:///hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine\r\n⠹ Preparing packages... (0/83)\r\nwrapt  ------------------------------ 75.69 KiB/85.97 KiB\r\nurllib3  ------------------------------ 62.88 KiB/126.75 KiB\r\ntyro  ------------------------------ 64.00 KiB/129.36 KiB\r\nabsl-py  ------------------------------ 16.00 KiB/132.63 KiB\r\ncharset-normalizer  ------------------------------ 124.98 KiB/148.27 KiB\r\nsimplejson  ------------------------------ 16.00 KiB/148.65 KiB\r\ndm-tree  ------------------------------ 16.00 KiB/149.43 KiB\r\ncertifi  ------------------------------ 157.44 KiB/157.44 KiB\r\netils  ------------------------------ 14.91 KiB/166.60 KiB\r\ntreescope  ------------------------------ 30.88 KiB/177.98 KiB\r\nfsspec  ------------------------------ 30.88 KiB/194.62 KiB\r\ngitpython  ------------------------------ 48.00 KiB/203.29 KiB\r\npre-commit  ------------------------------ 14.91 KiB/215.79 KiB\r\nrich  ------------------------------ 46.88 KiB/237.66 KiB\r\nprotobuf  ------------------------------ 157.78 KiB/314.47 KiB\r\noptax  ------------------------------ 46.05 KiB/359.16 KiB\r\nsentry-sdk  ------------------------------ 30.87 KiB/361.67 KiB\r\nmsgpack  ------------------------------ 157.23 KiB/416.90 KiB\r\npydantic  ------------------------------ 30.88 KiB/434.43 KiB\r\nflax  ------------------------------ 30.92 KiB/447.36 KiB\r\ndistlib  ------------------------------ 200.77 KiB/458.05 KiB\r\ngrain  ------------------------------ 16.00 KiB/487.72 KiB\r\norbax-checkpoint  ------------------------------ 14.00 KiB/549.91 KiB\r\npyyaml  ------------------------------ 14.91 KiB/749.55 KiB\r\npygments  ------------------------------ 30.87 KiB/1.17 MiB\r\npydantic-core  ------------------------------ 189.27 KiB/1.91 MiB\r\narray-record  ------------------------------ 14.91 KiB/2.35 MiB\r\njax  ------------------------------ 14.92 KiB/2.70 MiB\r\nnvidia-cuda-runtime-cu12 ------------------------------ 174.89 KiB/3.33 MiB\r\nml-dtypes  ------------------------------ 158.80 KiB/4.70 MiB\r\njax-cuda12-plugin  ------------------------------ 174.90 KiB/5.22 MiB\r\nvirtualenv  ------------------------------ 14.87 KiB/5.71 MiB\r\npillow  ------------------------------ 20.06 KiB/6.34 MiB\r\nnvidia-cuda-cupti-cu12  ------------------------------ 80.00 KiB/10.31 MiB\r\nnumpy  ------------------------------ 66.58 KiB/15.87 MiB\r\ntensorstore  ------------------------------ 19.67 KiB/18.63 MiB\r\nwandb  ------------------------------ 62.91 KiB/18.66 MiB\r\nscipy  ------------------------------ 95.11 KiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 142.89 KiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 142.89 KiB/38.67 MiB\r\njaxlib  ------------------------------ 14.91 KiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 158.89 KiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 142.83 KiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 14.83 KiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 30.82 KiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 14.87 KiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 32.00 KiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 174.82 KiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 48.78 KiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 14.86 KiB/614.37 MiB ",,terminal_output +4590,7883775,"TERMINAL",0,0,"\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r Building jasmine @ file:///hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine\r\n⠹ Preparing packages... (0/83)\r\nwrapt  ------------------------------ 75.69 KiB/85.97 KiB\r\nurllib3  ------------------------------ 62.88 KiB/126.75 KiB\r\ntyro  ------------------------------ 64.00 KiB/129.36 KiB\r\nabsl-py  ------------------------------ 16.00 KiB/132.63 KiB\r\ncharset-normalizer  ------------------------------ 140.98 KiB/148.27 KiB\r\nsimplejson  ------------------------------ 16.00 KiB/148.65 KiB\r\ndm-tree  ------------------------------ 16.00 KiB/149.43 KiB\r\netils  ------------------------------ 14.91 KiB/166.60 KiB\r\ntreescope  ------------------------------ 30.88 KiB/177.98 KiB\r\nfsspec  ------------------------------ 30.88 KiB/194.62 KiB\r\ngitpython  ------------------------------ 48.00 KiB/203.29 KiB\r\npre-commit  ------------------------------ 14.91 KiB/215.79 KiB\r\nrich  ------------------------------ 46.88 KiB/237.66 KiB\r\nprotobuf  ------------------------------ 157.78 KiB/314.47 KiB\r\noptax  ------------------------------ 46.05 KiB/359.16 KiB\r\nsentry-sdk  ------------------------------ 54.81 KiB/361.67 KiB\r\nmsgpack  ------------------------------ 189.23 KiB/416.90 KiB\r\npydantic  ------------------------------ 30.88 KiB/434.43 KiB\r\nflax  ------------------------------ 30.92 KiB/447.36 KiB\r\ndistlib  ------------------------------ 200.77 KiB/458.05 KiB\r\ngrain  ------------------------------ 16.00 KiB/487.72 KiB\r\norbax-checkpoint  ------------------------------ 30.00 KiB/549.91 KiB\r\npyyaml  ------------------------------ 14.91 KiB/749.55 KiB\r\npygments  ------------------------------ 30.87 KiB/1.17 MiB\r\npydantic-core  ------------------------------ 221.27 KiB/1.91 MiB\r\narray-record  ------------------------------ 14.91 KiB/2.35 MiB\r\njax  ------------------------------ 14.92 KiB/2.70 MiB\r\nnvidia-cuda-runtime-cu12 ------------------------------ 206.79 KiB/3.33 MiB\r\nml-dtypes  ------------------------------ 187.04 KiB/4.70 MiB\r\njax-cuda12-plugin  ------------------------------ 204.63 KiB/5.22 MiB\r\nvirtualenv  ------------------------------ 14.87 KiB/5.71 MiB\r\npillow  ------------------------------ 36.06 KiB/6.34 MiB\r\nnvidia-cuda-cupti-cu12  ------------------------------ 80.00 KiB/10.31 MiB\r\nnumpy  ------------------------------ 66.58 KiB/15.87 MiB\r\ntensorstore  ------------------------------ 174.80 KiB/18.63 MiB\r\nwandb  ------------------------------ 62.91 KiB/18.66 MiB\r\nscipy  ------------------------------ 95.11 KiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 174.89 KiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 169.08 KiB/38.67 MiB\r\njaxlib  ------------------------------ 14.91 KiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 170.04 KiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 174.83 KiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 14.83 KiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 190.82 KiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 14.87 KiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 32.00 KiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 190.82 KiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 48.78 KiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 14.86 KiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r Building jasmine @ file:///hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine\r\n⠹ Preparing packages... (0/83)\r\nwrapt  ------------------------------ 75.69 KiB/85.97 KiB\r\nurllib3  ------------------------------ 62.88 KiB/126.75 KiB\r\ntyro  ------------------------------ 64.00 KiB/129.36 KiB\r\nabsl-py  ------------------------------ 16.00 KiB/132.63 KiB\r\ncharset-normalizer  ------------------------------ 140.98 KiB/148.27 KiB\r\nsimplejson  ------------------------------ 16.00 KiB/148.65 KiB\r\ndm-tree  ------------------------------ 16.00 KiB/149.43 KiB\r\netils  ------------------------------ 14.91 KiB/166.60 KiB\r\ntreescope  ------------------------------ 30.88 KiB/177.98 KiB\r\nfsspec  ------------------------------ 30.88 KiB/194.62 KiB\r\ngitpython  ------------------------------ 48.00 KiB/203.29 KiB\r\npre-commit  ------------------------------ 14.91 KiB/215.79 KiB\r\nrich  ------------------------------ 46.88 KiB/237.66 KiB\r\nprotobuf  ------------------------------ 157.78 KiB/314.47 KiB\r\noptax  ------------------------------ 46.05 KiB/359.16 KiB\r\nsentry-sdk  ------------------------------ 54.81 KiB/361.67 KiB\r\nmsgpack  ------------------------------ 189.23 KiB/416.90 KiB\r\npydantic  ------------------------------ 30.88 KiB/434.43 KiB\r\nflax  ------------------------------ 30.92 KiB/447.36 KiB\r\ndistlib  ------------------------------ 200.77 KiB/458.05 KiB\r\ngrain  ------------------------------ 16.00 KiB/487.72 KiB\r\norbax-checkpoint  ------------------------------ 30.00 KiB/549.91 KiB\r\npyyaml  ------------------------------ 14.91 KiB/749.55 KiB\r\npygments  ------------------------------ 30.87 KiB/1.17 MiB\r\npydantic-core  ------------------------------ 221.27 KiB/1.91 MiB\r\narray-record  ------------------------------ 14.91 KiB/2.35 MiB\r\njax  ------------------------------ 14.92 KiB/2.70 MiB\r\nnvidia-cuda-runtime-cu12 ------------------------------ 206.79 KiB/3.33 MiB\r\nml-dtypes  ------------------------------ 187.04 KiB/4.70 MiB\r\njax-cuda12-plugin  ------------------------------ 204.63 KiB/5.22 MiB\r\nvirtualenv  ------------------------------ 14.87 KiB/5.71 MiB\r\npillow  ------------------------------ 36.06 KiB/6.34 MiB\r\nnvidia-cuda-cupti-cu12  ------------------------------ 80.00 KiB/10.31 MiB\r\nnumpy  ------------------------------ 66.58 KiB/15.87 MiB\r\ntensorstore  ------------------------------ 174.80 KiB/18.63 MiB\r\nwandb  ------------------------------ 62.91 KiB/18.66 MiB\r\nscipy  ------------------------------ 95.11 KiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 174.89 KiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 169.08 KiB/38.67 MiB\r\njaxlib  ------------------------------ 14.91 KiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 170.04 KiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 174.83 KiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 14.83 KiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 190.82 KiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 14.87 KiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 32.00 KiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 190.82 KiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 48.78 KiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 30.86 KiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r Building jasmine @ file:///hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine\r\n⠸ Preparing packages... (0/83)\r\nwrapt  ------------------------------ 75.69 KiB/85.97 KiB\r\nurllib3  ------------------------------ 66.57 KiB/126.75 KiB\r\ntyro  ------------------------------ 64.00 KiB/129.36 KiB\r\nabsl-py  ------------------------------ 32.00 KiB/132.63 KiB\r\ncharset-normalizer  ------------------------------ 140.98 KiB/148.27 KiB\r\nsimplejson  ------------------------------ 108.22 KiB/148.65 KiB\r\ndm-tree  ------------------------------ 144.00 KiB/149.43 KiB\r\netils  ------------------------------ 14.91 KiB/166.60 KiB\r\ntreescope  ------------------------------ 30.88 KiB/177.98 KiB\r\nfsspec  ------------------------------ 46.88 KiB/194.62 KiB\r\ngitpython  ------------------------------ 48.00 KiB/203.29 KiB\r\npre-commit  ------------------------------ 14.91 KiB/215.79 KiB\r\nrich  ------------------------------ 46.88 KiB/237.66 KiB\r\nprotobuf  ------------------------------ 157.78 KiB/314.47 KiB\r\noptax  ------------------------------ 46.05 KiB/359.16 KiB\r\nsentry-sdk  ------------------------------ 54.81 KiB/361.67 KiB\r\nmsgpack  ------------------------------ 189.23 KiB/416.90 KiB\r\npydantic  ------------------------------ 30.88 KiB/434.43 KiB\r\nflax  ------------------------------ 30.92 KiB/447.36 KiB\r\ndistlib  ------------------------------ 204.97 KiB/458.05 KiB\r\ngrain  ------------------------------ 16.00 KiB/487.72 KiB\r\norbax-checkpoint  ------------------------------ 30.00 KiB/549.91 KiB\r\npyyaml  ------------------------------ 14.91 KiB/749.55 KiB\r\npygments  ------------------------------ 30.87 KiB/1.17 MiB\r\npydantic-core  ------------------------------ 221.27 KiB/1.91 MiB\r\narray-record  ------------------------------ 14.91 KiB/2.35 MiB\r\njax  ------------------------------ 14.92 KiB/2.70 MiB\r\nnvidia-cuda-runtime-cu12 ------------------------------ 206.79 KiB/3.33 MiB\r\nml-dtypes  ------------------------------ 187.04 KiB/4.70 MiB\r\njax-cuda12-plugin  ------------------------------ 204.63 KiB/5.22 MiB\r\nvirtualenv  ------------------------------ 14.87 KiB/5.71 MiB\r\npillow  ------------------------------ 36.06 KiB/6.34 MiB\r\nnvidia-cuda-cupti-cu12  ------------------------------ 96.00 KiB/10.31 MiB\r\nnumpy  ------------------------------ 66.58 KiB/15.87 MiB\r\ntensorstore  ------------------------------ 174.80 KiB/18.63 MiB\r\nwandb  ------------------------------ 62.91 KiB/18.66 MiB\r\nscipy  ------------------------------ 111.11 KiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 174.89 KiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 169.08 KiB/38.67 MiB\r\njaxlib  ------------------------------ 14.91 KiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 170.04 KiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 174.83 KiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 14.83 KiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 190.82 KiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 14.87 KiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 32.00 KiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 190.82 KiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 48.78 KiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 30.86 KiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r Building jasmine @ file:///hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine\r\n⠸ Preparing packages... (0/83)\r\nwrapt  ------------------------------ 85.97 KiB/85.97 KiB\r\nhumanize  ------------------------------ 0 B/125.85 KiB\r\nurllib3  ------------------------------ 77.33 KiB/126.75 KiB\r\ntyro  ------------------------------ 80.00 KiB/129.36 KiB\r\nabsl-py  ------------------------------ 48.00 KiB/132.63 KiB\r\ncharset-normalizer  ------------------------------ 140.98 KiB/148.27 KiB\r\nsimplejson  ------------------------------ 124.22 KiB/148.65 KiB\r\ndm-tree  ------------------------------ 149.43 KiB/149.43 KiB\r\netils  ------------------------------ 30.91 KiB/166.60 KiB\r\ntreescope  ------------------------------ 46.88 KiB/177.98 KiB\r\nfsspec  ------------------------------ 46.88 KiB/194.62 KiB\r\ngitpython  ------------------------------ 78.38 KiB/203.29 KiB\r\npre-commit  ------------------------------ 14.91 KiB/215.79 KiB\r\nrich  ------------------------------ 46.88 KiB/237.66 KiB\r\nprotobuf  ------------------------------ 176.51 KiB/314.47 KiB\r\noptax  ------------------------------ 46.05 KiB/359.16 KiB\r\nsentry-sdk  ------------------------------ 54.81 KiB/361.67 KiB\r\nmsgpack  ------------------------------ 221.23 KiB/416.90 KiB\r\npydantic  ------------------------------ 31.94 KiB/434.43 KiB\r\nflax  ------------------------------ 46.92 KiB/447.36 KiB\r\ndistlib  ------------------------------ 236.97 KiB/458.05 KiB\r\ngrain  ------------------------------ 16.00 KiB/487.72 KiB\r\norbax-checkpoint  ------------------------------ 46.00 KiB/549.91 KiB\r\npyyaml  ------------------------------ 30.91 KiB/749.55 KiB\r\npygments  ------------------------------ 30.87 KiB/1.17 MiB\r\npydantic-core  ------------------------------ 237.27 KiB/1.91 MiB\r\narray-record  ------------------------------ 14.91 KiB/2.35 MiB\r\njax  ------------------------------ 14.92 KiB/2.70 MiB\r\nnvidia-cuda-runtime-cu12 ------------------------------ 222.89 KiB/3.33 MiB\r\nml-dtypes  ------------------------------ 187.60 KiB/4.70 MiB\r\njax-cuda12-plugin  ------------------------------ 235.96 KiB/5.22 MiB\r\nvirtualenv  ------------------------------ 14.87 KiB/5.71 MiB\r\npillow  ------------------------------ 36.06 KiB/6.34 MiB\r\nnvidia-cuda-cupti-cu12  ------------------------------ 114.46 KiB/10.31 MiB\r\nnumpy  ------------------------------ 66.58 KiB/15.87 MiB\r\ntensorstore  ------------------------------ 202.68 KiB/18.63 MiB\r\nwandb  ------------------------------ 62.91 KiB/18.66 MiB\r\nscipy  ------------------------------ 111.11 KiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 190.89 KiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 185.08 KiB/38.67 MiB\r\njaxlib  ------------------------------ 14.91 KiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 202.04 KiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 206.83 KiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 14.83 KiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 206.82 KiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 14.87 KiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 188.35 KiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 222.82 KiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 190.82 KiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 46.86 KiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r Building jasmine @ file:///hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine\r\n⠸ Preparing packages... (0/83)\r\nwrapt  ------------------------------ 85.97 KiB/85.97 KiB\r\nhumanize  ------------------------------ 0 B/125.85 KiB\r\nurllib3  ------------------------------ 77.33 KiB/126.75 KiB\r\ntyro  ------------------------------ 80.00 KiB/129.36 KiB\r\nabsl-py  ------------------------------ 61.06 KiB/132.63 KiB\r\ncharset-normalizer  ------------------------------ 140.98 KiB/148.27 KiB\r\nsimplejson  ------------------------------ 124.22 KiB/148.65 KiB\r\netils  ------------------------------ 30.91 KiB/166.60 KiB\r\ntreescope  ------------------------------ 46.88 KiB/177.98 KiB\r\nfsspec  ------------------------------ 46.88 KiB/194.62 KiB\r\ngitpython  ------------------------------ 78.38 KiB/203.29 KiB\r\npre-commit  ------------------------------ 14.91 KiB/215.79 KiB\r\nrich  ------------------------------ 46.88 KiB/237.66 KiB\r\nprotobuf  ------------------------------ 176.51 KiB/314.47 KiB\r\noptax  ------------------------------ 46.05 KiB/359.16 KiB\r\nsentry-sdk  ------------------------------ 54.81 KiB/361.67 KiB\r\nmsgpack  ------------------------------ 221.23 KiB/416.90 KiB\r\npydantic  ------------------------------ 31.94 KiB/434.43 KiB\r\nflax  ------------------------------ 46.92 KiB/447.36 KiB\r\ndistlib  ------------------------------ 236.97 KiB/458.05 KiB\r\ngrain  ------------------------------ 16.00 KiB/487.72 KiB\r\norbax-checkpoint  ------------------------------ 46.00 KiB/549.91 KiB\r\npyyaml  ------------------------------ 30.91 KiB/749.55 KiB\r\npygments  ------------------------------ 62.87 KiB/1.17 MiB\r\npydantic-core  ------------------------------ 237.27 KiB/1.91 MiB\r\narray-record  ------------------------------ 14.91 KiB/2.35 MiB\r\njax  ------------------------------ 14.92 KiB/2.70 MiB\r\nnvidia-cuda-runtime-cu12 ------------------------------ 222.89 KiB/3.33 MiB\r\nml-dtypes  ------------------------------ 187.60 KiB/4.70 MiB\r\njax-cuda12-plugin  ------------------------------ 235.96 KiB/5.22 MiB\r\nvirtualenv  ------------------------------ 14.87 KiB/5.71 MiB\r\npillow  ------------------------------ 36.06 KiB/6.34 MiB\r\nnvidia-cuda-cupti-cu12  ------------------------------ 114.46 KiB/10.31 MiB\r\nnumpy  ------------------------------ 66.58 KiB/15.87 MiB\r\ntensorstore  ------------------------------ 202.68 KiB/18.63 MiB\r\nwandb  ------------------------------ 62.91 KiB/18.66 MiB\r\nscipy  ------------------------------ 111.11 KiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 190.89 KiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 185.08 KiB/38.67 MiB\r\njaxlib  ------------------------------ 14.91 KiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 202.04 KiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 206.83 KiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 14.83 KiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 206.82 KiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 14.87 KiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 188.35 KiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 222.82 KiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 190.82 KiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 46.86 KiB/614.37 MiB ",,terminal_output +4591,7883888,"TERMINAL",0,0,"\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r Building jasmine @ file:///hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine\r\n⠸ Preparing packages... (0/83)\r\nwrapt  ------------------------------ 85.97 KiB/85.97 KiB\r\nclick  ------------------------------ 0 B/104.78 KiB\r\nhumanize  ------------------------------ 16.00 KiB/125.85 KiB\r\nurllib3  ------------------------------ 77.33 KiB/126.75 KiB\r\ntyro  ------------------------------ 80.00 KiB/129.36 KiB\r\nabsl-py  ------------------------------ 61.06 KiB/132.63 KiB\r\ncharset-normalizer  ------------------------------ 148.27 KiB/148.27 KiB\r\nsimplejson  ------------------------------ 124.22 KiB/148.65 KiB\r\netils  ------------------------------ 30.91 KiB/166.60 KiB\r\ntreescope  ------------------------------ 46.88 KiB/177.98 KiB\r\nfsspec  ------------------------------ 46.88 KiB/194.62 KiB\r\ngitpython  ------------------------------ 110.38 KiB/203.29 KiB\r\npre-commit  ------------------------------ 30.91 KiB/215.79 KiB\r\nrich  ------------------------------ 46.88 KiB/237.66 KiB\r\nprotobuf  ------------------------------ 176.51 KiB/314.47 KiB\r\noptax  ------------------------------ 46.05 KiB/359.16 KiB\r\nsentry-sdk  ------------------------------ 70.81 KiB/361.67 KiB\r\nmsgpack  ------------------------------ 237.23 KiB/416.90 KiB\r\npydantic  ------------------------------ 47.94 KiB/434.43 KiB\r\nflax  ------------------------------ 46.92 KiB/447.36 KiB\r\ndistlib  ------------------------------ 252.97 KiB/458.05 KiB\r\ngrain  ------------------------------ 16.00 KiB/487.72 KiB\r\norbax-checkpoint  ------------------------------ 46.00 KiB/549.91 KiB\r\npyyaml  ------------------------------ 30.91 KiB/749.55 KiB\r\npygments  ------------------------------ 62.87 KiB/1.17 MiB\r\npydantic-core  ------------------------------ 260.88 KiB/1.91 MiB\r\narray-record  ------------------------------ 238.91 KiB/2.35 MiB\r\njax  ------------------------------ 14.92 KiB/2.70 MiB\r\nnvidia-cuda-runtime-cu12 ------------------------------ 238.89 KiB/3.33 MiB\r\nml-dtypes  ------------------------------ 206.91 KiB/4.70 MiB\r\njax-cuda12-plugin  ------------------------------ 251.96 KiB/5.22 MiB\r\nvirtualenv  ------------------------------ 14.87 KiB/5.71 MiB\r\npillow  ------------------------------ 53.56 KiB/6.34 MiB\r\nnvidia-cuda-cupti-cu12  ------------------------------ 128.00 KiB/10.31 MiB\r\nnumpy  ------------------------------ 66.58 KiB/15.87 MiB\r\ntensorstore  ------------------------------ 213.68 KiB/18.63 MiB\r\nwandb  ------------------------------ 62.91 KiB/18.66 MiB\r\nscipy  ------------------------------ 143.11 KiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 222.89 KiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 206.89 KiB/38.67 MiB\r\njaxlib  ------------------------------ 14.91 KiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 214.41 KiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 222.83 KiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 30.83 KiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 237.01 KiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 30.87 KiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 192.00 KiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 254.82 KiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 222.82 KiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 78.86 KiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r Building jasmine @ file:///hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine\r\n⠸ Preparing packages... (0/83)\r\nwrapt  ------------------------------ 85.97 KiB/85.97 KiB\r\nclick  ------------------------------ 494 B/104.78 KiB\r\nhumanize  ------------------------------ 16.00 KiB/125.85 KiB\r\nurllib3  ------------------------------ 93.22 KiB/126.75 KiB\r\ntyro  ------------------------------ 96.00 KiB/129.36 KiB\r\nabsl-py  ------------------------------ 77.06 KiB/132.63 KiB\r\ncharset-normalizer  ------------------------------ 148.27 KiB/148.27 KiB\r\nsimplejson  ------------------------------ 124.22 KiB/148.65 KiB\r\netils  ------------------------------ 46.91 KiB/166.60 KiB\r\ntreescope  ------------------------------ 62.88 KiB/177.98 KiB\r\nfsspec  ------------------------------ 62.88 KiB/194.62 KiB\r\ngitpython  ------------------------------ 110.38 KiB/203.29 KiB\r\npre-commit  ------------------------------ 30.91 KiB/215.79 KiB\r\nrich  ------------------------------ 62.88 KiB/237.66 KiB\r\nprotobuf  ------------------------------ 192.51 KiB/314.47 KiB\r\noptax  ------------------------------ 62.05 KiB/359.16 KiB\r\nsentry-sdk  ------------------------------ 91.83 KiB/361.67 KiB\r\nmsgpack  ------------------------------ 269.23 KiB/416.90 KiB\r\npydantic  ------------------------------ 63.94 KiB/434.43 KiB\r\nflax  ------------------------------ 46.92 KiB/447.36 KiB\r\ndistlib  ------------------------------ 284.97 KiB/458.05 KiB\r\ngrain  ------------------------------ 16.00 KiB/487.72 KiB\r\norbax-checkpoint  ------------------------------ 46.00 KiB/549.91 KiB\r\npyyaml  ------------------------------ 30.91 KiB/749.55 KiB\r\npygments  ------------------------------ 77.23 KiB/1.17 MiB\r\npydantic-core  ------------------------------ 285.27 KiB/1.91 MiB\r\narray-record  ------------------------------ 270.91 KiB/2.35 MiB\r\njax  ------------------------------ 24.99 KiB/2.70 MiB\r\nnvidia-cuda-runtime-cu12 ------------------------------ 238.89 KiB/3.33 MiB\r\nml-dtypes  ------------------------------ 238.91 KiB/4.70 MiB\r\njax-cuda12-plugin  ------------------------------ 268.63 KiB/5.22 MiB\r\nvirtualenv  ------------------------------ 14.87 KiB/5.71 MiB\r\npillow  ------------------------------ 53.56 KiB/6.34 MiB\r\nnvidia-cuda-cupti-cu12  ------------------------------ 128.00 KiB/10.31 MiB\r\nnumpy  ------------------------------ 78.91 KiB/15.87 MiB\r\ntensorstore  ------------------------------ 229.68 KiB/18.63 MiB\r\nwandb  ------------------------------ 62.91 KiB/18.66 MiB\r\nscipy  ------------------------------ 159.11 KiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 254.89 KiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 209.82 KiB/38.67 MiB\r\njaxlib  ------------------------------ 30.91 KiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 230.41 KiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 234.87 KiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 30.83 KiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 269.01 KiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 30.87 KiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 224.00 KiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 270.82 KiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 254.82 KiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 110.76 KiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r Building jasmine @ file:///hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine\r\n⠸ Preparing packages... (0/83)\r\nclick  ------------------------------ 10.10 KiB/104.78 KiB\r\nhumanize  ------------------------------ 16.00 KiB/125.85 KiB\r\nurllib3  ------------------------------ 93.22 KiB/126.75 KiB\r\ntyro  ------------------------------ 96.00 KiB/129.36 KiB\r\nabsl-py  ------------------------------ 77.06 KiB/132.63 KiB\r\ncharset-normalizer  ------------------------------ 148.27 KiB/148.27 KiB\r\nsimplejson  ------------------------------ 124.22 KiB/148.65 KiB\r\netils  ------------------------------ 46.91 KiB/166.60 KiB\r\ntreescope  ------------------------------ 62.88 KiB/177.98 KiB\r\nfsspec  ------------------------------ 62.88 KiB/194.62 KiB\r\ngitpython  ------------------------------ 110.38 KiB/203.29 KiB\r\npre-commit  ------------------------------ 30.91 KiB/215.79 KiB\r\nrich  ------------------------------ 62.88 KiB/237.66 KiB\r\nprotobuf  ------------------------------ 192.51 KiB/314.47 KiB\r\noptax  ------------------------------ 62.05 KiB/359.16 KiB\r\nsentry-sdk  ------------------------------ 91.83 KiB/361.67 KiB\r\nmsgpack  ------------------------------ 269.23 KiB/416.90 KiB\r\npydantic  ------------------------------ 63.94 KiB/434.43 KiB\r\nflax  ------------------------------ 46.92 KiB/447.36 KiB\r\ndistlib  ------------------------------ 284.97 KiB/458.05 KiB\r\ngrain  ------------------------------ 16.00 KiB/487.72 KiB\r\norbax-checkpoint  ------------------------------ 46.00 KiB/549.91 KiB\r\npyyaml  ------------------------------ 30.91 KiB/749.55 KiB\r\npygments  ------------------------------ 77.23 KiB/1.17 MiB\r\npydantic-core  ------------------------------ 285.27 KiB/1.91 MiB\r\narray-record  ------------------------------ 270.91 KiB/2.35 MiB\r\njax  ------------------------------ 24.99 KiB/2.70 MiB\r\nnvidia-cuda-runtime-cu12 ------------------------------ 238.89 KiB/3.33 MiB\r\nml-dtypes  ------------------------------ 238.91 KiB/4.70 MiB\r\njax-cuda12-plugin  ------------------------------ 268.63 KiB/5.22 MiB\r\nvirtualenv  ------------------------------ 14.87 KiB/5.71 MiB\r\npillow  ------------------------------ 53.56 KiB/6.34 MiB\r\nnvidia-cuda-cupti-cu12  ------------------------------ 128.00 KiB/10.31 MiB\r\nnumpy  ------------------------------ 78.91 KiB/15.87 MiB\r\ntensorstore  ------------------------------ 245.68 KiB/18.63 MiB\r\nwandb  ------------------------------ 62.91 KiB/18.66 MiB\r\nscipy  ------------------------------ 159.11 KiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 254.89 KiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 209.82 KiB/38.67 MiB\r\njaxlib  ------------------------------ 30.91 KiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 230.41 KiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 234.87 KiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 30.83 KiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 269.01 KiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 30.87 KiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 224.00 KiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 286.82 KiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 254.82 KiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 110.76 KiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r Building jasmine @ file:///hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine\r\n⠸ Preparing packages... (0/83)\r\nidna  ------------------------------ 0 B/68.79 KiB\r\nclick  ------------------------------ 26.10 KiB/104.78 KiB\r\nhumanize  ------------------------------ 16.00 KiB/125.85 KiB\r\nurllib3  ------------------------------ 93.22 KiB/126.75 KiB\r\ntyro  ------------------------------ 96.00 KiB/129.36 KiB\r\nabsl-py  ------------------------------ 77.06 KiB/132.63 KiB\r\ncharset-normalizer  ------------------------------ 148.27 KiB/148.27 KiB\r\nsimplejson  ------------------------------ 140.22 KiB/148.65 KiB\r\netils  ------------------------------ 46.91 KiB/166.60 KiB\r\ntreescope  ------------------------------ 62.88 KiB/177.98 KiB\r\nfsspec  ------------------------------ 92.25 KiB/194.62 KiB\r\ngitpython  ------------------------------ 110.38 KiB/203.29 KiB\r\npre-commit  ------------------------------ 30.91 KiB/215.79 KiB\r\nrich  ------------------------------ 62.88 KiB/237.66 KiB\r\nprotobuf  ------------------------------ 205.78 KiB/314.47 KiB\r\noptax  ------------------------------ 77.19 KiB/359.16 KiB\r\nsentry-sdk  ------------------------------ 91.83 KiB/361.67 KiB\r\nmsgpack  ------------------------------ 285.23 KiB/416.90 KiB\r\npydantic  ------------------------------ 108.49 KiB/434.43 KiB\r\nflax  ------------------------------ 46.92 KiB/447.36 KiB\r\ndistlib  ------------------------------ 300.97 KiB/458.05 KiB\r\ngrain  ------------------------------ 32.00 KiB/487.72 KiB\r\norbax-checkpoint  ------------------------------ 62.00 KiB/549.91 KiB\r\npyyaml  ------------------------------ 46.81 KiB/749.55 KiB\r\npygments  ------------------------------ 77.23 KiB/1.17 MiB\r\npydantic-core  ------------------------------ 301.27 KiB/1.91 MiB\r\narray-record  ------------------------------ 302.91 KiB/2.35 MiB\r\njax  ------------------------------ 24.99 KiB/2.70 MiB\r\nnvidia-cuda-runtime-cu12 ------------------------------ 254.89 KiB/3.33 MiB\r\nml-dtypes  ------------------------------ 254.91 KiB/4.70 MiB\r\njax-cuda12-plugin  ------------------------------ 284.63 KiB/5.22 MiB\r\nvirtualenv  ------------------------------ 14.87 KiB/5.71 MiB\r\npillow  ------------------------------ 62.69 KiB/6.34 MiB\r\nnvidia-cuda-cupti-cu12  ------------------------------ 144.00 KiB/10.31 MiB\r\nnumpy  ------------------------------ 78.91 KiB/15.87 MiB\r\ntensorstore  ------------------------------ 261.68 KiB/18.63 MiB\r\nwandb  ------------------------------ 75.17 KiB/18.66 MiB\r\nscipy  ------------------------------ 159.11 KiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 270.79 KiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 225.82 KiB/38.67 MiB\r\njaxlib  ------------------------------ 301.28 KiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 262.41 KiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 250.87 KiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 30.83 KiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 270.82 KiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 30.87 KiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 240.00 KiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 302.82 KiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 270.82 KiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 275.68 KiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r Building jasmine @ file:///hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine\r\n⠼ Preparing packages... (3/83)\r\nidna  ------------------------------ 0 B/68.79 KiB\r\nclick  ------------------------------ 26.10 KiB/104.78 KiB\r\nhumanize  ------------------------------ 16.00 KiB/125.85 KiB\r\nurllib3  ------------------------------ 93.22 KiB/126.75 KiB\r\ntyro  ------------------------------ 96.00 KiB/129.36 KiB\r\nabsl-py  ------------------------------ 77.06 KiB/132.63 KiB\r\nsimplejson  ------------------------------ 140.22 KiB/148.65 KiB\r\netils  ------------------------------ 46.91 KiB/166.60 KiB\r\ntreescope  ------------------------------ 62.88 KiB/177.98 KiB\r\nfsspec  ------------------------------ 92.25 KiB/194.62 KiB\r\ngitpython  ------------------------------ 110.38 KiB/203.29 KiB\r\npre-commit  ------------------------------ 30.91 KiB/215.79 KiB\r\nrich  ------------------------------ 62.88 KiB/237.66 KiB\r\nprotobuf  ------------------------------ 205.78 KiB/314.47 KiB\r\noptax  ------------------------------ 77.19 KiB/359.16 KiB\r\nsentry-sdk  ------------------------------ 91.83 KiB/361.67 KiB\r\nmsgpack  ------------------------------ 301.23 KiB/416.90 KiB\r\npydantic  ------------------------------ 108.49 KiB/434.43 KiB\r\nflax  ------------------------------ 62.92 KiB/447.36 KiB\r\ndistlib  ------------------------------ 316.97 KiB/458.05 KiB\r\ngrain  ------------------------------ 32.00 KiB/487.72 KiB\r\norbax-checkpoint  ------------------------------ 62.00 KiB/549.91 KiB\r\npyyaml  ------------------------------ 46.81 KiB/749.55 KiB\r\npygments  ------------------------------ 77.23 KiB/1.17 MiB\r\npydantic-core  ------------------------------ 317.27 KiB/1.91 MiB\r\narray-record  ------------------------------ 302.91 KiB/2.35 MiB\r\njax  ------------------------------ 24.99 KiB/2.70 MiB\r\nnvidia-cuda-runtime-cu12 ------------------------------ 254.89 KiB/3.33 MiB\r\nml-dtypes  ------------------------------ 254.91 KiB/4.70 MiB\r\njax-cuda12-plugin  ------------------------------ 300.53 KiB/5.22 MiB\r\nvirtualenv  ------------------------------ 14.87 KiB/5.71 MiB\r\npillow  ------------------------------ 62.69 KiB/6.34 MiB\r\nnvidia-cuda-cupti-cu12  ------------------------------ 144.00 KiB/10.31 MiB\r\nnumpy  ------------------------------ 78.91 KiB/15.87 MiB\r\ntensorstore  ------------------------------ 261.68 KiB/18.63 MiB\r\nwandb  ------------------------------ 75.17 KiB/18.66 MiB\r\nscipy  ------------------------------ 175.11 KiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 270.89 KiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 241.82 KiB/38.67 MiB\r\njaxlib  ------------------------------ 301.28 KiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 262.41 KiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 266.87 KiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 30.83 KiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 270.82 KiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 30.87 KiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 256.00 KiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 302.82 KiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 270.82 KiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 275.68 KiB/614.37 MiB ",,terminal_output +4592,7883983,"TERMINAL",0,0,"\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r Building jasmine @ file:///hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine\r\n⠼ Preparing packages... (3/83)\r\neinops  ------------------------------ 0 B/62.85 KiB\r\nidna  ------------------------------ 16.00 KiB/68.79 KiB\r\nclick  ------------------------------ 26.10 KiB/104.78 KiB\r\nhumanize  ------------------------------ 16.00 KiB/125.85 KiB\r\nurllib3  ------------------------------ 93.22 KiB/126.75 KiB\r\ntyro  ------------------------------ 96.00 KiB/129.36 KiB\r\nabsl-py  ------------------------------ 113.72 KiB/132.63 KiB\r\nsimplejson  ------------------------------ 140.22 KiB/148.65 KiB\r\netils  ------------------------------ 46.91 KiB/166.60 KiB\r\ntreescope  ------------------------------ 73.74 KiB/177.98 KiB\r\nfsspec  ------------------------------ 92.25 KiB/194.62 KiB\r\ngitpython  ------------------------------ 126.38 KiB/203.29 KiB\r\npre-commit  ------------------------------ 30.91 KiB/215.79 KiB\r\nrich  ------------------------------ 62.88 KiB/237.66 KiB\r\nprotobuf  ------------------------------ 205.78 KiB/314.47 KiB\r\noptax  ------------------------------ 93.19 KiB/359.16 KiB\r\nsentry-sdk  ------------------------------ 123.83 KiB/361.67 KiB\r\nmsgpack  ------------------------------ 317.23 KiB/416.90 KiB\r\npydantic  ------------------------------ 124.49 KiB/434.43 KiB\r\nflax  ------------------------------ 94.92 KiB/447.36 KiB\r\ndistlib  ------------------------------ 332.97 KiB/458.05 KiB\r\ngrain  ------------------------------ 32.00 KiB/487.72 KiB\r\norbax-checkpoint  ------------------------------ 62.00 KiB/549.91 KiB\r\npyyaml  ------------------------------ 238.32 KiB/749.55 KiB\r\npygments  ------------------------------ 93.23 KiB/1.17 MiB\r\npydantic-core  ------------------------------ 333.27 KiB/1.91 MiB\r\narray-record  ------------------------------ 318.91 KiB/2.35 MiB\r\njax  ------------------------------ 24.99 KiB/2.70 MiB\r\nnvidia-cuda-runtime-cu12 ------------------------------ 286.89 KiB/3.33 MiB\r\nml-dtypes  ------------------------------ 286.91 KiB/4.70 MiB\r\njax-cuda12-plugin  ------------------------------ 316.53 KiB/5.22 MiB\r\nvirtualenv  ------------------------------ 14.87 KiB/5.71 MiB\r\npillow  ------------------------------ 78.69 KiB/6.34 MiB\r\nnvidia-cuda-cupti-cu12  ------------------------------ 144.00 KiB/10.31 MiB\r\nnumpy  ------------------------------ 94.80 KiB/15.87 MiB\r\ntensorstore  ------------------------------ 276.22 KiB/18.63 MiB\r\nwandb  ------------------------------ 91.17 KiB/18.66 MiB\r\nscipy  ------------------------------ 175.11 KiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 286.89 KiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 257.82 KiB/38.67 MiB\r\njaxlib  ------------------------------ 333.28 KiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 285.92 KiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 270.83 KiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 30.83 KiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 302.82 KiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 286.87 KiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 266.30 KiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 334.82 KiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 302.82 KiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 303.60 KiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r Building jasmine @ file:///hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine\r\n⠼ Preparing packages... (3/83)\r\neinops  ------------------------------ 14.88 KiB/62.85 KiB\r\nidna  ------------------------------ 16.00 KiB/68.79 KiB\r\nclick  ------------------------------ 58.10 KiB/104.78 KiB\r\nhumanize  ------------------------------ 23.79 KiB/125.85 KiB\r\nurllib3  ------------------------------ 105.58 KiB/126.75 KiB\r\ntyro  ------------------------------ 96.00 KiB/129.36 KiB\r\nabsl-py  ------------------------------ 113.72 KiB/132.63 KiB\r\nsimplejson  ------------------------------ 140.22 KiB/148.65 KiB\r\netils  ------------------------------ 46.91 KiB/166.60 KiB\r\ntreescope  ------------------------------ 78.88 KiB/177.98 KiB\r\nfsspec  ------------------------------ 92.25 KiB/194.62 KiB\r\ngitpython  ------------------------------ 142.38 KiB/203.29 KiB\r\npre-commit  ------------------------------ 46.91 KiB/215.79 KiB\r\nrich  ------------------------------ 62.88 KiB/237.66 KiB\r\nprotobuf  ------------------------------ 237.78 KiB/314.47 KiB\r\noptax  ------------------------------ 93.19 KiB/359.16 KiB\r\nsentry-sdk  ------------------------------ 139.83 KiB/361.67 KiB\r\nmsgpack  ------------------------------ 365.23 KiB/416.90 KiB\r\npydantic  ------------------------------ 124.49 KiB/434.43 KiB\r\nflax  ------------------------------ 94.92 KiB/447.36 KiB\r\ndistlib  ------------------------------ 380.97 KiB/458.05 KiB\r\ngrain  ------------------------------ 32.00 KiB/487.72 KiB\r\norbax-checkpoint  ------------------------------ 62.00 KiB/549.91 KiB\r\npyyaml  ------------------------------ 350.32 KiB/749.55 KiB\r\npygments  ------------------------------ 109.23 KiB/1.17 MiB\r\npydantic-core  ------------------------------ 381.27 KiB/1.91 MiB\r\narray-record  ------------------------------ 366.91 KiB/2.35 MiB\r\njax  ------------------------------ 40.99 KiB/2.70 MiB\r\nnvidia-cuda-runtime-cu12 ------------------------------ 318.89 KiB/3.33 MiB\r\nml-dtypes  ------------------------------ 325.77 KiB/4.70 MiB\r\njax-cuda12-plugin  ------------------------------ 348.63 KiB/5.22 MiB\r\nvirtualenv  ------------------------------ 14.87 KiB/5.71 MiB\r\npillow  ------------------------------ 110.69 KiB/6.34 MiB\r\nnvidia-cuda-cupti-cu12  ------------------------------ 160.00 KiB/10.31 MiB\r\nnumpy  ------------------------------ 94.80 KiB/15.87 MiB\r\ntensorstore  ------------------------------ 324.22 KiB/18.63 MiB\r\nwandb  ------------------------------ 107.17 KiB/18.66 MiB\r\nscipy  ------------------------------ 191.11 KiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 334.89 KiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 277.49 KiB/38.67 MiB\r\njaxlib  ------------------------------ 381.28 KiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 329.43 KiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 315.74 KiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 30.83 KiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 334.73 KiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 334.87 KiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 314.20 KiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 382.82 KiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 350.82 KiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 350.86 KiB/614.37 MiB ",,terminal_output +4593,7884076,"TERMINAL",0,0,"\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r Building jasmine @ file:///hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine\r\n⠼ Preparing packages... (3/83)\r\neinops  ------------------------------ 14.88 KiB/62.85 KiB\r\nidna  ------------------------------ 32.00 KiB/68.79 KiB\r\nclick  ------------------------------ 64.00 KiB/104.78 KiB\r\nhumanize  ------------------------------ 23.79 KiB/125.85 KiB\r\nurllib3  ------------------------------ 121.58 KiB/126.75 KiB\r\ntyro  ------------------------------ 112.00 KiB/129.36 KiB\r\nabsl-py  ------------------------------ 125.06 KiB/132.63 KiB\r\nsimplejson  ------------------------------ 140.22 KiB/148.65 KiB\r\netils  ------------------------------ 62.91 KiB/166.60 KiB\r\ntreescope  ------------------------------ 94.88 KiB/177.98 KiB\r\nfsspec  ------------------------------ 108.25 KiB/194.62 KiB\r\ngitpython  ------------------------------ 158.38 KiB/203.29 KiB\r\npre-commit  ------------------------------ 46.91 KiB/215.79 KiB\r\nrich  ------------------------------ 68.21 KiB/237.66 KiB\r\nprotobuf  ------------------------------ 253.78 KiB/314.47 KiB\r\noptax  ------------------------------ 110.05 KiB/359.16 KiB\r\nsentry-sdk  ------------------------------ 139.83 KiB/361.67 KiB\r\nmsgpack  ------------------------------ 381.23 KiB/416.90 KiB\r\npydantic  ------------------------------ 140.49 KiB/434.43 KiB\r\nflax  ------------------------------ 94.92 KiB/447.36 KiB\r\ndistlib  ------------------------------ 396.97 KiB/458.05 KiB\r\ngrain  ------------------------------ 32.00 KiB/487.72 KiB\r\norbax-checkpoint  ------------------------------ 62.00 KiB/549.91 KiB\r\npyyaml  ------------------------------ 382.32 KiB/749.55 KiB\r\npygments  ------------------------------ 109.23 KiB/1.17 MiB\r\npydantic-core  ------------------------------ 413.27 KiB/1.91 MiB\r\narray-record  ------------------------------ 389.16 KiB/2.35 MiB\r\njax  ------------------------------ 40.99 KiB/2.70 MiB\r\nnvidia-cuda-runtime-cu12 ------------------------------ 350.89 KiB/3.33 MiB\r\nml-dtypes  ------------------------------ 350.91 KiB/4.70 MiB\r\njax-cuda12-plugin  ------------------------------ 371.39 KiB/5.22 MiB\r\nvirtualenv  ------------------------------ 30.87 KiB/5.71 MiB\r\npillow  ------------------------------ 126.69 KiB/6.34 MiB\r\nnvidia-cuda-cupti-cu12  ------------------------------ 176.00 KiB/10.31 MiB\r\nnumpy  ------------------------------ 94.80 KiB/15.87 MiB\r\ntensorstore  ------------------------------ 332.36 KiB/18.63 MiB\r\nwandb  ------------------------------ 123.17 KiB/18.66 MiB\r\nscipy  ------------------------------ 217.19 KiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 366.89 KiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 309.49 KiB/38.67 MiB\r\njaxlib  ------------------------------ 397.28 KiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 334.89 KiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 334.83 KiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 46.83 KiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 350.82 KiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 352.74 KiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 336.00 KiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 392.56 KiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 366.82 KiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 382.86 KiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r Building jasmine @ file:///hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine\r\n⠴ Preparing packages... (4/83)\r\neinops  ------------------------------ 30.88 KiB/62.85 KiB\r\nidna  ------------------------------ 64.00 KiB/68.79 KiB\r\nclick  ------------------------------ 96.00 KiB/104.78 KiB\r\nhumanize  ------------------------------ 23.79 KiB/125.85 KiB\r\nurllib3  ------------------------------ 121.58 KiB/126.75 KiB\r\ntyro  ------------------------------ 112.00 KiB/129.36 KiB\r\nabsl-py  ------------------------------ 132.63 KiB/132.63 KiB\r\nsimplejson  ------------------------------ 140.22 KiB/148.65 KiB\r\netils  ------------------------------ 62.91 KiB/166.60 KiB\r\ntreescope  ------------------------------ 110.88 KiB/177.98 KiB\r\nfsspec  ------------------------------ 124.25 KiB/194.62 KiB\r\ngitpython  ------------------------------ 158.38 KiB/203.29 KiB\r\npre-commit  ------------------------------ 46.91 KiB/215.79 KiB\r\nrich  ------------------------------ 78.88 KiB/237.66 KiB\r\nprotobuf  ------------------------------ 253.78 KiB/314.47 KiB\r\noptax  ------------------------------ 126.05 KiB/359.16 KiB\r\nsentry-sdk  ------------------------------ 139.83 KiB/361.67 KiB\r\nmsgpack  ------------------------------ 413.23 KiB/416.90 KiB\r\npydantic  ------------------------------ 157.00 KiB/434.43 KiB\r\nflax  ------------------------------ 110.92 KiB/447.36 KiB\r\ndistlib  ------------------------------ 444.97 KiB/458.05 KiB\r\ngrain  ------------------------------ 32.00 KiB/487.72 KiB\r\norbax-checkpoint  ------------------------------ 78.00 KiB/549.91 KiB\r\npyyaml  ------------------------------ 391.85 KiB/749.55 KiB\r\npygments  ------------------------------ 109.23 KiB/1.17 MiB\r\npydantic-core  ------------------------------ 429.27 KiB/1.91 MiB\r\narray-record  ------------------------------ 400.87 KiB/2.35 MiB\r\njax  ------------------------------ 56.99 KiB/2.70 MiB\r\nnvidia-cuda-runtime-cu12 ------------------------------ 366.89 KiB/3.33 MiB\r\nml-dtypes  ------------------------------ 366.91 KiB/4.70 MiB\r\njax-cuda12-plugin  ------------------------------ 396.63 KiB/5.22 MiB\r\nvirtualenv  ------------------------------ 30.87 KiB/5.71 MiB\r\npillow  ------------------------------ 142.69 KiB/6.34 MiB\r\nnvidia-cuda-cupti-cu12  ------------------------------ 176.00 KiB/10.31 MiB\r\nnumpy  ------------------------------ 142.70 KiB/15.87 MiB\r\ntensorstore  ------------------------------ 364.36 KiB/18.63 MiB\r\nwandb  ------------------------------ 123.17 KiB/18.66 MiB\r\nscipy  ------------------------------ 255.11 KiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 382.89 KiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 334.89 KiB/38.67 MiB\r\njaxlib  ------------------------------ 425.79 KiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 366.89 KiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 366.83 KiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 46.83 KiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 366.82 KiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 354.70 KiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 353.96 KiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 424.56 KiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 392.56 KiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 392.56 KiB/614.37 MiB ",,terminal_output +4594,7884195,"TERMINAL",0,0,"\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r Building jasmine @ file:///hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine\r\n⠴ Preparing packages... (4/83)\r\neinops  ------------------------------ 30.88 KiB/62.85 KiB\r\nidna  ------------------------------ 64.00 KiB/68.79 KiB\r\nclick  ------------------------------ 104.78 KiB/104.78 KiB\r\nhumanize  ------------------------------ 23.79 KiB/125.85 KiB\r\nurllib3  ------------------------------ 121.58 KiB/126.75 KiB\r\ntyro  ------------------------------ 112.00 KiB/129.36 KiB\r\nsimplejson  ------------------------------ 140.22 KiB/148.65 KiB\r\netils  ------------------------------ 62.91 KiB/166.60 KiB\r\ntreescope  ------------------------------ 110.88 KiB/177.98 KiB\r\nfsspec  ------------------------------ 124.25 KiB/194.62 KiB\r\ngitpython  ------------------------------ 158.38 KiB/203.29 KiB\r\npre-commit  ------------------------------ 46.91 KiB/215.79 KiB\r\nrich  ------------------------------ 110.88 KiB/237.66 KiB\r\nprotobuf  ------------------------------ 253.78 KiB/314.47 KiB\r\noptax  ------------------------------ 126.05 KiB/359.16 KiB\r\nsentry-sdk  ------------------------------ 155.83 KiB/361.67 KiB\r\nmsgpack  ------------------------------ 413.23 KiB/416.90 KiB\r\npydantic  ------------------------------ 157.00 KiB/434.43 KiB\r\nflax  ------------------------------ 110.92 KiB/447.36 KiB\r\ndistlib  ------------------------------ 444.97 KiB/458.05 KiB\r\ngrain  ------------------------------ 32.00 KiB/487.72 KiB\r\norbax-checkpoint  ------------------------------ 78.00 KiB/549.91 KiB\r\npyyaml  ------------------------------ 391.85 KiB/749.55 KiB\r\npygments  ------------------------------ 125.23 KiB/1.17 MiB\r\npydantic-core  ------------------------------ 445.27 KiB/1.91 MiB\r\narray-record  ------------------------------ 414.97 KiB/2.35 MiB\r\njax  ------------------------------ 94.92 KiB/2.70 MiB\r\nnvidia-cuda-runtime-cu12 ------------------------------ 366.89 KiB/3.33 MiB\r\nml-dtypes  ------------------------------ 382.91 KiB/4.70 MiB\r\njax-cuda12-plugin  ------------------------------ 396.63 KiB/5.22 MiB\r\nvirtualenv  ------------------------------ 30.87 KiB/5.71 MiB\r\npillow  ------------------------------ 158.69 KiB/6.34 MiB\r\nnvidia-cuda-cupti-cu12  ------------------------------ 192.00 KiB/10.31 MiB\r\nnumpy  ------------------------------ 158.91 KiB/15.87 MiB\r\ntensorstore  ------------------------------ 380.36 KiB/18.63 MiB\r\nwandb  ------------------------------ 123.17 KiB/18.66 MiB\r\nscipy  ------------------------------ 255.11 KiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 385.54 KiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 334.89 KiB/38.67 MiB\r\njaxlib  ------------------------------ 441.79 KiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 366.89 KiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 366.83 KiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 46.83 KiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 382.82 KiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 370.70 KiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 353.96 KiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 424.56 KiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 408.56 KiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 408.56 KiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r Building jasmine @ file:///hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine\r\n⠴ Preparing packages... (4/83)\r\neinops  ------------------------------ 30.88 KiB/62.85 KiB\r\nidna  ------------------------------ 64.00 KiB/68.79 KiB\r\nclick  ------------------------------ 104.78 KiB/104.78 KiB\r\nhumanize  ------------------------------ 23.79 KiB/125.85 KiB\r\nurllib3  ------------------------------ 121.58 KiB/126.75 KiB\r\ntyro  ------------------------------ 112.00 KiB/129.36 KiB\r\nsimplejson  ------------------------------ 140.22 KiB/148.65 KiB\r\netils  ------------------------------ 62.91 KiB/166.60 KiB\r\ntreescope  ------------------------------ 110.88 KiB/177.98 KiB\r\nfsspec  ------------------------------ 124.25 KiB/194.62 KiB\r\ngitpython  ------------------------------ 158.38 KiB/203.29 KiB\r\npre-commit  ------------------------------ 46.91 KiB/215.79 KiB\r\nrich  ------------------------------ 110.88 KiB/237.66 KiB\r\nprotobuf  ------------------------------ 253.78 KiB/314.47 KiB\r\noptax  ------------------------------ 126.05 KiB/359.16 KiB\r\nsentry-sdk  ------------------------------ 155.83 KiB/361.67 KiB\r\nmsgpack  ------------------------------ 413.23 KiB/416.90 KiB\r\npydantic  ------------------------------ 157.00 KiB/434.43 KiB\r\nflax  ------------------------------ 110.92 KiB/447.36 KiB\r\ndistlib  ------------------------------ 444.97 KiB/458.05 KiB\r\ngrain  ------------------------------ 32.00 KiB/487.72 KiB\r\norbax-checkpoint  ------------------------------ 78.00 KiB/549.91 KiB\r\npyyaml  ------------------------------ 391.85 KiB/749.55 KiB\r\npygments  ------------------------------ 125.23 KiB/1.17 MiB\r\npydantic-core  ------------------------------ 445.27 KiB/1.91 MiB\r\narray-record  ------------------------------ 414.97 KiB/2.35 MiB\r\njax  ------------------------------ 94.92 KiB/2.70 MiB\r\nnvidia-cuda-runtime-cu12 ------------------------------ 366.89 KiB/3.33 MiB\r\nml-dtypes  ------------------------------ 382.91 KiB/4.70 MiB\r\njax-cuda12-plugin  ------------------------------ 396.63 KiB/5.22 MiB\r\nvirtualenv  ------------------------------ 30.87 KiB/5.71 MiB\r\npillow  ------------------------------ 174.69 KiB/6.34 MiB\r\nnvidia-cuda-cupti-cu12  ------------------------------ 192.00 KiB/10.31 MiB\r\nnumpy  ------------------------------ 158.91 KiB/15.87 MiB\r\ntensorstore  ------------------------------ 380.36 KiB/18.63 MiB\r\nwandb  ------------------------------ 123.17 KiB/18.66 MiB\r\nscipy  ------------------------------ 255.11 KiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 385.54 KiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 334.89 KiB/38.67 MiB\r\njaxlib  ------------------------------ 441.79 KiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 366.89 KiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 366.83 KiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 46.83 KiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 382.82 KiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 370.70 KiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 353.96 KiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 424.56 KiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 408.56 KiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 408.56 KiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r Building jasmine @ file:///hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine\r\n⠴ Preparing packages... (4/83)\r\neinops  ------------------------------ 30.88 KiB/62.85 KiB\r\nidna  ------------------------------ 64.00 KiB/68.79 KiB\r\nclick  ------------------------------ 104.78 KiB/104.78 KiB\r\nhumanize  ------------------------------ 23.79 KiB/125.85 KiB\r\nurllib3  ------------------------------ 121.58 KiB/126.75 KiB\r\ntyro  ------------------------------ 112.00 KiB/129.36 KiB\r\nsimplejson  ------------------------------ 140.22 KiB/148.65 KiB\r\netils  ------------------------------ 62.91 KiB/166.60 KiB\r\ntreescope  ------------------------------ 110.88 KiB/177.98 KiB\r\nfsspec  ------------------------------ 124.25 KiB/194.62 KiB\r\ngitpython  ------------------------------ 158.38 KiB/203.29 KiB\r\npre-commit  ------------------------------ 46.91 KiB/215.79 KiB\r\nrich  ------------------------------ 110.88 KiB/237.66 KiB\r\nprotobuf  ------------------------------ 253.78 KiB/314.47 KiB\r\noptax  ------------------------------ 126.05 KiB/359.16 KiB\r\nsentry-sdk  ------------------------------ 155.83 KiB/361.67 KiB\r\nmsgpack  ------------------------------ 413.23 KiB/416.90 KiB\r\npydantic  ------------------------------ 157.00 KiB/434.43 KiB\r\nflax  ------------------------------ 110.92 KiB/447.36 KiB\r\ndistlib  ------------------------------ 444.97 KiB/458.05 KiB\r\ngrain  ------------------------------ 32.00 KiB/487.72 KiB\r\norbax-checkpoint  ------------------------------ 78.00 KiB/549.91 KiB\r\npyyaml  ------------------------------ 391.85 KiB/749.55 KiB\r\npygments  ------------------------------ 125.23 KiB/1.17 MiB\r\npydantic-core  ------------------------------ 445.27 KiB/1.91 MiB\r\narray-record  ------------------------------ 414.97 KiB/2.35 MiB\r\njax  ------------------------------ 94.92 KiB/2.70 MiB\r\nnvidia-cuda-runtime-cu12 ------------------------------ 371.74 KiB/3.33 MiB\r\nml-dtypes  ------------------------------ 382.91 KiB/4.70 MiB\r\njax-cuda12-plugin  ------------------------------ 396.63 KiB/5.22 MiB\r\nvirtualenv  ------------------------------ 30.87 KiB/5.71 MiB\r\npillow  ------------------------------ 174.69 KiB/6.34 MiB\r\nnvidia-cuda-cupti-cu12  ------------------------------ 192.00 KiB/10.31 MiB\r\nnumpy  ------------------------------ 158.91 KiB/15.87 MiB\r\ntensorstore  ------------------------------ 380.36 KiB/18.63 MiB\r\nwandb  ------------------------------ 123.17 KiB/18.66 MiB\r\nscipy  ------------------------------ 255.11 KiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 385.54 KiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 334.89 KiB/38.67 MiB\r\njaxlib  ------------------------------ 441.79 KiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 366.89 KiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 366.83 KiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 46.83 KiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 382.82 KiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 370.70 KiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 353.96 KiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 424.56 KiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 408.56 KiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 408.56 KiB/614.37 MiB ",,terminal_output +4595,7884329,"TERMINAL",0,0,"\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r Building jasmine @ file:///hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine\r\n⠴ Preparing packages... (4/83)\r\neinops  ------------------------------ 30.88 KiB/62.85 KiB\r\nidna  ------------------------------ 68.79 KiB/68.79 KiB\r\nclick  ------------------------------ 104.78 KiB/104.78 KiB\r\nhumanize  ------------------------------ 39.79 KiB/125.85 KiB\r\nurllib3  ------------------------------ 126.75 KiB/126.75 KiB\r\ntyro  ------------------------------ 112.00 KiB/129.36 KiB\r\nsimplejson  ------------------------------ 140.22 KiB/148.65 KiB\r\netils  ------------------------------ 62.91 KiB/166.60 KiB\r\ntreescope  ------------------------------ 110.88 KiB/177.98 KiB\r\nfsspec  ------------------------------ 124.25 KiB/194.62 KiB\r\ngitpython  ------------------------------ 174.38 KiB/203.29 KiB\r\npre-commit  ------------------------------ 46.91 KiB/215.79 KiB\r\nrich  ------------------------------ 110.88 KiB/237.66 KiB\r\nprotobuf  ------------------------------ 253.78 KiB/314.47 KiB\r\noptax  ------------------------------ 126.05 KiB/359.16 KiB\r\nsentry-sdk  ------------------------------ 155.83 KiB/361.67 KiB\r\nmsgpack  ------------------------------ 413.23 KiB/416.90 KiB\r\npydantic  ------------------------------ 157.00 KiB/434.43 KiB\r\nflax  ------------------------------ 110.92 KiB/447.36 KiB\r\ndistlib  ------------------------------ 444.97 KiB/458.05 KiB\r\ngrain  ------------------------------ 32.00 KiB/487.72 KiB\r\norbax-checkpoint  ------------------------------ 78.00 KiB/549.91 KiB\r\npyyaml  ------------------------------ 391.85 KiB/749.55 KiB\r\npygments  ------------------------------ 125.23 KiB/1.17 MiB\r\npydantic-core  ------------------------------ 445.27 KiB/1.91 MiB\r\narray-record  ------------------------------ 414.97 KiB/2.35 MiB\r\njax  ------------------------------ 110.92 KiB/2.70 MiB\r\nnvidia-cuda-runtime-cu12 ------------------------------ 387.74 KiB/3.33 MiB\r\nml-dtypes  ------------------------------ 382.91 KiB/4.70 MiB\r\njax-cuda12-plugin  ------------------------------ 396.63 KiB/5.22 MiB\r\nvirtualenv  ------------------------------ 30.87 KiB/5.71 MiB\r\npillow  ------------------------------ 174.69 KiB/6.34 MiB\r\nnvidia-cuda-cupti-cu12  ------------------------------ 192.00 KiB/10.31 MiB\r\nnumpy  ------------------------------ 158.91 KiB/15.87 MiB\r\ntensorstore  ------------------------------ 380.36 KiB/18.63 MiB\r\nwandb  ------------------------------ 123.17 KiB/18.66 MiB\r\nscipy  ------------------------------ 255.11 KiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 385.54 KiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 334.89 KiB/38.67 MiB\r\njaxlib  ------------------------------ 441.79 KiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 366.89 KiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 366.83 KiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 46.83 KiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 382.82 KiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 370.70 KiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 353.96 KiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 424.56 KiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 408.56 KiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 408.56 KiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r Building jasmine @ file:///hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine\r\n⠦ Preparing packages... (5/83)\r\neinops  ------------------------------ 30.88 KiB/62.85 KiB\r\nidna  ------------------------------ 68.79 KiB/68.79 KiB\r\nclick  ------------------------------ 104.78 KiB/104.78 KiB\r\nhumanize  ------------------------------ 39.79 KiB/125.85 KiB\r\nurllib3  ------------------------------ 126.75 KiB/126.75 KiB\r\ntyro  ------------------------------ 112.00 KiB/129.36 KiB\r\nsimplejson  ------------------------------ 140.22 KiB/148.65 KiB\r\netils  ------------------------------ 62.91 KiB/166.60 KiB\r\ntreescope  ------------------------------ 110.88 KiB/177.98 KiB\r\nfsspec  ------------------------------ 140.25 KiB/194.62 KiB\r\ngitpython  ------------------------------ 174.38 KiB/203.29 KiB\r\npre-commit  ------------------------------ 46.91 KiB/215.79 KiB\r\nrich  ------------------------------ 110.88 KiB/237.66 KiB\r\nprotobuf  ------------------------------ 253.78 KiB/314.47 KiB\r\noptax  ------------------------------ 126.05 KiB/359.16 KiB\r\nsentry-sdk  ------------------------------ 155.83 KiB/361.67 KiB\r\nmsgpack  ------------------------------ 413.23 KiB/416.90 KiB\r\npydantic  ------------------------------ 173.00 KiB/434.43 KiB\r\nflax  ------------------------------ 110.92 KiB/447.36 KiB\r\ndistlib  ------------------------------ 444.97 KiB/458.05 KiB\r\ngrain  ------------------------------ 48.00 KiB/487.72 KiB\r\norbax-checkpoint  ------------------------------ 78.00 KiB/549.91 KiB\r\npyyaml  ------------------------------ 391.85 KiB/749.55 KiB\r\npygments  ------------------------------ 157.23 KiB/1.17 MiB\r\npydantic-core  ------------------------------ 445.27 KiB/1.91 MiB\r\narray-record  ------------------------------ 414.97 KiB/2.35 MiB\r\njax  ------------------------------ 110.92 KiB/2.70 MiB\r\nnvidia-cuda-runtime-cu12 ------------------------------ 387.74 KiB/3.33 MiB\r\nml-dtypes  ------------------------------ 382.91 KiB/4.70 MiB\r\njax-cuda12-plugin  ------------------------------ 396.63 KiB/5.22 MiB\r\nvirtualenv  ------------------------------ 30.87 KiB/5.71 MiB\r\npillow  ------------------------------ 174.69 KiB/6.34 MiB\r\nnvidia-cuda-cupti-cu12  ------------------------------ 192.00 KiB/10.31 MiB\r\nnumpy  ------------------------------ 158.91 KiB/15.87 MiB\r\ntensorstore  ------------------------------ 380.36 KiB/18.63 MiB\r\nwandb  ------------------------------ 123.17 KiB/18.66 MiB\r\nscipy  ------------------------------ 271.11 KiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 385.54 KiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 334.89 KiB/38.67 MiB\r\njaxlib  ------------------------------ 441.79 KiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 366.89 KiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 366.83 KiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 46.83 KiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 382.82 KiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 370.70 KiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 353.96 KiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 424.56 KiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 408.56 KiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 408.56 KiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r Building jasmine @ file:///hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine\r\n⠦ Preparing packages... (5/83)\r\neinops  ------------------------------ 30.88 KiB/62.85 KiB\r\nclick  ------------------------------ 104.78 KiB/104.78 KiB\r\nhumanize  ------------------------------ 39.79 KiB/125.85 KiB\r\nurllib3  ------------------------------ 126.75 KiB/126.75 KiB\r\ntyro  ------------------------------ 112.00 KiB/129.36 KiB\r\nsimplejson  ------------------------------ 140.22 KiB/148.65 KiB\r\netils  ------------------------------ 62.91 KiB/166.60 KiB\r\ntreescope  ------------------------------ 110.88 KiB/177.98 KiB\r\nfsspec  ------------------------------ 140.25 KiB/194.62 KiB\r\ngitpython  ------------------------------ 174.38 KiB/203.29 KiB\r\npre-commit  ------------------------------ 46.91 KiB/215.79 KiB\r\nrich  ------------------------------ 110.88 KiB/237.66 KiB\r\nprotobuf  ------------------------------ 253.78 KiB/314.47 KiB\r\noptax  ------------------------------ 126.05 KiB/359.16 KiB\r\nsentry-sdk  ------------------------------ 155.83 KiB/361.67 KiB\r\nmsgpack  ------------------------------ 413.23 KiB/416.90 KiB\r\npydantic  ------------------------------ 173.00 KiB/434.43 KiB\r\nflax  ------------------------------ 110.92 KiB/447.36 KiB\r\ndistlib  ------------------------------ 444.97 KiB/458.05 KiB\r\ngrain  ------------------------------ 48.00 KiB/487.72 KiB\r\norbax-checkpoint  ------------------------------ 78.00 KiB/549.91 KiB\r\npyyaml  ------------------------------ 391.85 KiB/749.55 KiB\r\npygments  ------------------------------ 157.23 KiB/1.17 MiB\r\npydantic-core  ------------------------------ 445.27 KiB/1.91 MiB\r\narray-record  ------------------------------ 414.97 KiB/2.35 MiB\r\njax  ------------------------------ 110.92 KiB/2.70 MiB\r\nnvidia-cuda-runtime-cu12 ------------------------------ 387.74 KiB/3.33 MiB\r\nml-dtypes  ------------------------------ 382.91 KiB/4.70 MiB\r\njax-cuda12-plugin  ------------------------------ 396.63 KiB/5.22 MiB\r\nvirtualenv  ------------------------------ 30.87 KiB/5.71 MiB\r\npillow  ------------------------------ 174.69 KiB/6.34 MiB\r\nnvidia-cuda-cupti-cu12  ------------------------------ 192.00 KiB/10.31 MiB\r\nnumpy  ------------------------------ 158.91 KiB/15.87 MiB\r\ntensorstore  ------------------------------ 380.36 KiB/18.63 MiB\r\nwandb  ------------------------------ 123.17 KiB/18.66 MiB\r\nscipy  ------------------------------ 271.11 KiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 385.54 KiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 334.89 KiB/38.67 MiB\r\njaxlib  ------------------------------ 441.79 KiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 366.89 KiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 366.83 KiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 46.83 KiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 382.82 KiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 370.70 KiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 353.96 KiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 424.56 KiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 408.56 KiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 408.56 KiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r Building jasmine @ file:///hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine\r\n⠦ Preparing packages... (5/83)\r\neinops  ------------------------------ 30.88 KiB/62.85 KiB\r\nclick  ------------------------------ 104.78 KiB/104.78 KiB\r\nhumanize  ------------------------------ 39.79 KiB/125.85 KiB\r\ntyro  ------------------------------ 128.00 KiB/129.36 KiB\r\nsimplejson  ------------------------------ 140.22 KiB/148.65 KiB\r\netils  ------------------------------ 62.91 KiB/166.60 KiB\r\ntreescope  ------------------------------ 110.88 KiB/177.98 KiB\r\nfsspec  ------------------------------ 140.25 KiB/194.62 KiB\r\ngitpython  ------------------------------ 174.38 KiB/203.29 KiB\r\npre-commit  ------------------------------ 46.91 KiB/215.79 KiB\r\nrich  ------------------------------ 110.88 KiB/237.66 KiB\r\nprotobuf  ------------------------------ 269.78 KiB/314.47 KiB\r\noptax  ------------------------------ 126.05 KiB/359.16 KiB\r\nsentry-sdk  ------------------------------ 155.83 KiB/361.67 KiB\r\nmsgpack  ------------------------------ 416.90 KiB/416.90 KiB\r\npydantic  ------------------------------ 173.00 KiB/434.43 KiB\r\nflax  ------------------------------ 110.92 KiB/447.36 KiB\r\ndistlib  ------------------------------ 458.05 KiB/458.05 KiB\r\ngrain  ------------------------------ 48.00 KiB/487.72 KiB\r\norbax-checkpoint  ------------------------------ 78.00 KiB/549.91 KiB\r\npyyaml  ------------------------------ 398.32 KiB/749.55 KiB\r\npygments  ------------------------------ 157.23 KiB/1.17 MiB\r\npydantic-core  ------------------------------ 455.73 KiB/1.91 MiB\r\narray-record  ------------------------------ 414.97 KiB/2.35 MiB\r\njax  ------------------------------ 110.92 KiB/2.70 MiB\r\nnvidia-cuda-runtime-cu12 ------------------------------ 387.74 KiB/3.33 MiB\r\nml-dtypes  ------------------------------ 392.56 KiB/4.70 MiB\r\njax-cuda12-plugin  ------------------------------ 412.63 KiB/5.22 MiB\r\nvirtualenv  ------------------------------ 30.87 KiB/5.71 MiB\r\npillow  ------------------------------ 174.69 KiB/6.34 MiB\r\nnvidia-cuda-cupti-cu12  ------------------------------ 192.00 KiB/10.31 MiB\r\nnumpy  ------------------------------ 158.91 KiB/15.87 MiB\r\ntensorstore  ------------------------------ 380.36 KiB/18.63 MiB\r\nwandb  ------------------------------ 123.17 KiB/18.66 MiB\r\nscipy  ------------------------------ 319.11 KiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 387.63 KiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 350.89 KiB/38.67 MiB\r\njaxlib  ------------------------------ 441.79 KiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 382.89 KiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 382.83 KiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 46.83 KiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 382.82 KiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 370.70 KiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 369.96 KiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 440.56 KiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 408.56 KiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 424.56 KiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r Building jasmine @ file:///hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine\r\n⠦ Preparing packages... (5/83)\r\neinops  ------------------------------ 30.88 KiB/62.85 KiB\r\nclick  ------------------------------ 104.78 KiB/104.78 KiB\r\nhumanize  ------------------------------ 39.79 KiB/125.85 KiB\r\ntyro  ------------------------------ 128.00 KiB/129.36 KiB\r\nsimplejson  ------------------------------ 140.22 KiB/148.65 KiB\r\netils  ------------------------------ 62.91 KiB/166.60 KiB\r\ntreescope  ------------------------------ 110.88 KiB/177.98 KiB\r\nfsspec  ------------------------------ 140.25 KiB/194.62 KiB\r\ngitpython  ------------------------------ 174.38 KiB/203.29 KiB\r\npre-commit  ------------------------------ 46.91 KiB/215.79 KiB\r\nrich  ------------------------------ 110.88 KiB/237.66 KiB\r\nprotobuf  ------------------------------ 269.78 KiB/314.47 KiB\r\noptax  ------------------------------ 126.05 KiB/359.16 KiB\r\nsentry-sdk  ------------------------------ 155.83 KiB/361.67 KiB\r\nmsgpack  ------------------------------ 416.90 KiB/416.90 KiB\r\npydantic  ------------------------------ 173.00 KiB/434.43 KiB\r\nflax  ------------------------------ 110.92 KiB/447.36 KiB\r\ndistlib  ------------------------------ 458.05 KiB/458.05 KiB\r\ngrain  ------------------------------ 48.00 KiB/487.72 KiB\r\norbax-checkpoint  ------------------------------ 78.00 KiB/549.91 KiB\r\npyyaml  ------------------------------ 398.32 KiB/749.55 KiB\r\npygments  ------------------------------ 157.23 KiB/1.17 MiB\r\npydantic-core  ------------------------------ 455.73 KiB/1.91 MiB\r\narray-record  ------------------------------ 414.97 KiB/2.35 MiB\r\njax  ------------------------------ 110.92 KiB/2.70 MiB\r\nnvidia-cuda-runtime-cu12 ------------------------------ 392.56 KiB/3.33 MiB\r\nml-dtypes  ------------------------------ 392.56 KiB/4.70 MiB\r\njax-cuda12-plugin  ------------------------------ 412.63 KiB/5.22 MiB\r\nvirtualenv  ------------------------------ 30.87 KiB/5.71 MiB\r\npillow  ------------------------------ 174.69 KiB/6.34 MiB\r\nnvidia-cuda-cupti-cu12  ------------------------------ 192.00 KiB/10.31 MiB\r\nnumpy  ------------------------------ 158.91 KiB/15.87 MiB\r\ntensorstore  ------------------------------ 380.36 KiB/18.63 MiB\r\nwandb  ------------------------------ 123.17 KiB/18.66 MiB\r\nscipy  ------------------------------ 319.11 KiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 387.63 KiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 350.89 KiB/38.67 MiB\r\njaxlib  ------------------------------ 441.79 KiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 382.89 KiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 382.83 KiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 46.83 KiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 382.82 KiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 370.70 KiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 369.96 KiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 440.56 KiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 408.56 KiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 424.56 KiB/614.37 MiB ",,terminal_output +4596,7884393,"TERMINAL",0,0,"\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r Building jasmine @ file:///hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine\r\n⠦ Preparing packages... (5/83)\r\neinops  ------------------------------ 30.88 KiB/62.85 KiB\r\nopt-einsum  ------------------------------ 0 B/70.25 KiB\r\nidentify  ------------------------------ 0 B/96.85 KiB\r\nhumanize  ------------------------------ 39.79 KiB/125.85 KiB\r\ntyro  ------------------------------ 128.00 KiB/129.36 KiB\r\nsimplejson  ------------------------------ 140.22 KiB/148.65 KiB\r\netils  ------------------------------ 62.91 KiB/166.60 KiB\r\ntreescope  ------------------------------ 126.88 KiB/177.98 KiB\r\nfsspec  ------------------------------ 140.25 KiB/194.62 KiB\r\ngitpython  ------------------------------ 190.38 KiB/203.29 KiB\r\npre-commit  ------------------------------ 62.80 KiB/215.79 KiB\r\nrich  ------------------------------ 110.88 KiB/237.66 KiB\r\nprotobuf  ------------------------------ 269.78 KiB/314.47 KiB\r\noptax  ------------------------------ 126.05 KiB/359.16 KiB\r\nsentry-sdk  ------------------------------ 155.83 KiB/361.67 KiB\r\nmsgpack  ------------------------------ 416.90 KiB/416.90 KiB\r\npydantic  ------------------------------ 173.00 KiB/434.43 KiB\r\nflax  ------------------------------ 110.92 KiB/447.36 KiB\r\ndistlib  ------------------------------ 458.05 KiB/458.05 KiB\r\ngrain  ------------------------------ 48.00 KiB/487.72 KiB\r\norbax-checkpoint  ------------------------------ 94.00 KiB/549.91 KiB\r\npyyaml  ------------------------------ 414.32 KiB/749.55 KiB\r\npygments  ------------------------------ 173.23 KiB/1.17 MiB\r\npydantic-core  ------------------------------ 455.73 KiB/1.91 MiB\r\narray-record  ------------------------------ 421.12 KiB/2.35 MiB\r\njax  ------------------------------ 126.92 KiB/2.70 MiB\r\nnvidia-cuda-runtime-cu12 ------------------------------ 406.69 KiB/3.33 MiB\r\nml-dtypes  ------------------------------ 392.56 KiB/4.70 MiB\r\njax-cuda12-plugin  ------------------------------ 422.51 KiB/5.22 MiB\r\nvirtualenv  ------------------------------ 30.87 KiB/5.71 MiB\r\npillow  ------------------------------ 190.69 KiB/6.34 MiB\r\nnvidia-cuda-cupti-cu12  ------------------------------ 208.00 KiB/10.31 MiB\r\nnumpy  ------------------------------ 158.91 KiB/15.87 MiB\r\ntensorstore  ------------------------------ 396.36 KiB/18.63 MiB\r\nwandb  ------------------------------ 123.17 KiB/18.66 MiB\r\nscipy  ------------------------------ 319.11 KiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 387.63 KiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 350.89 KiB/38.67 MiB\r\njaxlib  ------------------------------ 457.79 KiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 392.56 KiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 382.83 KiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 46.83 KiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 392.56 KiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 386.70 KiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 384.00 KiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 440.68 KiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 424.56 KiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 424.56 KiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r Building jasmine @ file:///hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine\r\n⠦ Preparing packages... (5/83)\r\neinops  ------------------------------ 30.88 KiB/62.85 KiB\r\nopt-einsum  ------------------------------ 0 B/70.25 KiB\r\nidentify  ------------------------------ 0 B/96.85 KiB\r\nhumanize  ------------------------------ 39.79 KiB/125.85 KiB\r\ntyro  ------------------------------ 128.00 KiB/129.36 KiB\r\nsimplejson  ------------------------------ 140.22 KiB/148.65 KiB\r\netils  ------------------------------ 62.91 KiB/166.60 KiB\r\ntreescope  ------------------------------ 126.88 KiB/177.98 KiB\r\nfsspec  ------------------------------ 140.25 KiB/194.62 KiB\r\ngitpython  ------------------------------ 190.38 KiB/203.29 KiB\r\npre-commit  ------------------------------ 62.80 KiB/215.79 KiB\r\nrich  ------------------------------ 110.88 KiB/237.66 KiB\r\nprotobuf  ------------------------------ 269.78 KiB/314.47 KiB\r\noptax  ------------------------------ 142.05 KiB/359.16 KiB\r\nsentry-sdk  ------------------------------ 155.83 KiB/361.67 KiB\r\nmsgpack  ------------------------------ 416.90 KiB/416.90 KiB\r\npydantic  ------------------------------ 173.00 KiB/434.43 KiB\r\nflax  ------------------------------ 110.92 KiB/447.36 KiB\r\ndistlib  ------------------------------ 458.05 KiB/458.05 KiB\r\ngrain  ------------------------------ 48.00 KiB/487.72 KiB\r\norbax-checkpoint  ------------------------------ 94.00 KiB/549.91 KiB\r\npyyaml  ------------------------------ 414.32 KiB/749.55 KiB\r\npygments  ------------------------------ 173.23 KiB/1.17 MiB\r\npydantic-core  ------------------------------ 455.73 KiB/1.91 MiB\r\narray-record  ------------------------------ 421.12 KiB/2.35 MiB\r\njax  ------------------------------ 126.92 KiB/2.70 MiB\r\nnvidia-cuda-runtime-cu12 ------------------------------ 406.69 KiB/3.33 MiB\r\nml-dtypes  ------------------------------ 392.56 KiB/4.70 MiB\r\njax-cuda12-plugin  ------------------------------ 422.51 KiB/5.22 MiB\r\nvirtualenv  ------------------------------ 30.87 KiB/5.71 MiB\r\npillow  ------------------------------ 190.69 KiB/6.34 MiB\r\nnvidia-cuda-cupti-cu12  ------------------------------ 208.00 KiB/10.31 MiB\r\nnumpy  ------------------------------ 158.91 KiB/15.87 MiB\r\ntensorstore  ------------------------------ 396.36 KiB/18.63 MiB\r\nwandb  ------------------------------ 123.17 KiB/18.66 MiB\r\nscipy  ------------------------------ 319.11 KiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 387.63 KiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 350.89 KiB/38.67 MiB\r\njaxlib  ------------------------------ 457.79 KiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 392.56 KiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 382.83 KiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 46.83 KiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 392.56 KiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 386.70 KiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 384.00 KiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 440.68 KiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 424.56 KiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 424.56 KiB/614.37 MiB ",,terminal_output +4597,7884503,"TERMINAL",0,0,"\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r Building jasmine @ file:///hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine\r\n⠦ Preparing packages... (5/83)\r\neinops  ------------------------------ 30.88 KiB/62.85 KiB\r\nmore-itertools  ------------------------------ 16.00 KiB/68.03 KiB\r\nopt-einsum  ------------------------------ 16.00 KiB/70.25 KiB\r\nmarkdown-it-py  ------------------------------ 0 B/85.27 KiB\r\nidentify  ------------------------------ 16.00 KiB/96.85 KiB\r\nhumanize  ------------------------------ 39.79 KiB/125.85 KiB\r\ntyro  ------------------------------ 128.00 KiB/129.36 KiB\r\nsimplejson  ------------------------------ 140.22 KiB/148.65 KiB\r\netils  ------------------------------ 62.91 KiB/166.60 KiB\r\ntreescope  ------------------------------ 142.88 KiB/177.98 KiB\r\nfsspec  ------------------------------ 172.25 KiB/194.62 KiB\r\ngitpython  ------------------------------ 203.29 KiB/203.29 KiB\r\npre-commit  ------------------------------ 62.80 KiB/215.79 KiB\r\nrich  ------------------------------ 110.88 KiB/237.66 KiB\r\nprotobuf  ------------------------------ 285.78 KiB/314.47 KiB\r\noptax  ------------------------------ 142.05 KiB/359.16 KiB\r\nsentry-sdk  ------------------------------ 169.86 KiB/361.67 KiB\r\nmsgpack  ------------------------------ 416.90 KiB/416.90 KiB\r\npydantic  ------------------------------ 189.00 KiB/434.43 KiB\r\nflax  ------------------------------ 126.92 KiB/447.36 KiB\r\ngrain  ------------------------------ 64.00 KiB/487.72 KiB\r\norbax-checkpoint  ------------------------------ 110.00 KiB/549.91 KiB\r\npyyaml  ------------------------------ 446.32 KiB/749.55 KiB\r\npygments  ------------------------------ 221.23 KiB/1.17 MiB\r\npydantic-core  ------------------------------ 493.27 KiB/1.91 MiB\r\narray-record  ------------------------------ 453.12 KiB/2.35 MiB\r\njax  ------------------------------ 142.92 KiB/2.70 MiB\r\nnvidia-cuda-runtime-cu12 ------------------------------ 438.69 KiB/3.33 MiB\r\nml-dtypes  ------------------------------ 424.56 KiB/4.70 MiB\r\njax-cuda12-plugin  ------------------------------ 438.51 KiB/5.22 MiB\r\nvirtualenv  ------------------------------ 30.87 KiB/5.71 MiB\r\npillow  ------------------------------ 190.69 KiB/6.34 MiB\r\nnvidia-cuda-cupti-cu12  ------------------------------ 441.81 KiB/10.31 MiB\r\nnumpy  ------------------------------ 174.91 KiB/15.87 MiB\r\ntensorstore  ------------------------------ 424.76 KiB/18.63 MiB\r\nwandb  ------------------------------ 123.17 KiB/18.66 MiB\r\nscipy  ------------------------------ 335.11 KiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 408.56 KiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 382.89 KiB/38.67 MiB\r\njaxlib  ------------------------------ 477.18 KiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 416.63 KiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 414.83 KiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 62.83 KiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 424.56 KiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 392.56 KiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 400.00 KiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 472.56 KiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 456.56 KiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 456.46 KiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r Building jasmine @ file:///hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine\r\n⠦ Preparing packages... (5/83)\r\neinops  ------------------------------ 30.88 KiB/62.85 KiB\r\nmore-itertools  ------------------------------ 16.00 KiB/68.03 KiB\r\nopt-einsum  ------------------------------ 16.00 KiB/70.25 KiB\r\nmarkdown-it-py  ------------------------------ 0 B/85.27 KiB\r\nidentify  ------------------------------ 16.00 KiB/96.85 KiB\r\nhumanize  ------------------------------ 39.79 KiB/125.85 KiB\r\ntyro  ------------------------------ 128.00 KiB/129.36 KiB\r\nsimplejson  ------------------------------ 140.22 KiB/148.65 KiB\r\netils  ------------------------------ 62.91 KiB/166.60 KiB\r\ntreescope  ------------------------------ 142.88 KiB/177.98 KiB\r\nfsspec  ------------------------------ 172.25 KiB/194.62 KiB\r\ngitpython  ------------------------------ 203.29 KiB/203.29 KiB\r\npre-commit  ------------------------------ 62.80 KiB/215.79 KiB\r\nrich  ------------------------------ 110.88 KiB/237.66 KiB\r\nprotobuf  ------------------------------ 285.78 KiB/314.47 KiB\r\noptax  ------------------------------ 142.05 KiB/359.16 KiB\r\nsentry-sdk  ------------------------------ 169.86 KiB/361.67 KiB\r\nmsgpack  ------------------------------ 416.90 KiB/416.90 KiB\r\npydantic  ------------------------------ 189.00 KiB/434.43 KiB\r\nflax  ------------------------------ 126.92 KiB/447.36 KiB\r\ngrain  ------------------------------ 64.00 KiB/487.72 KiB\r\norbax-checkpoint  ------------------------------ 110.00 KiB/549.91 KiB\r\npyyaml  ------------------------------ 446.32 KiB/749.55 KiB\r\npygments  ------------------------------ 221.23 KiB/1.17 MiB\r\npydantic-core  ------------------------------ 493.27 KiB/1.91 MiB\r\narray-record  ------------------------------ 453.12 KiB/2.35 MiB\r\njax  ------------------------------ 142.92 KiB/2.70 MiB\r\nnvidia-cuda-runtime-cu12 ------------------------------ 438.69 KiB/3.33 MiB\r\nml-dtypes  ------------------------------ 424.56 KiB/4.70 MiB\r\njax-cuda12-plugin  ------------------------------ 438.51 KiB/5.22 MiB\r\nvirtualenv  ------------------------------ 30.87 KiB/5.71 MiB\r\npillow  ------------------------------ 190.69 KiB/6.34 MiB\r\nnvidia-cuda-cupti-cu12  ------------------------------ 441.81 KiB/10.31 MiB\r\nnumpy  ------------------------------ 174.91 KiB/15.87 MiB\r\ntensorstore  ------------------------------ 424.76 KiB/18.63 MiB\r\nwandb  ------------------------------ 123.17 KiB/18.66 MiB\r\nscipy  ------------------------------ 335.11 KiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 408.56 KiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 382.89 KiB/38.67 MiB\r\njaxlib  ------------------------------ 477.18 KiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 416.63 KiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 414.83 KiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 78.83 KiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 424.56 KiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 392.56 KiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 400.00 KiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 472.56 KiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 456.56 KiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 456.46 KiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r Building jasmine @ file:///hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine\r\n⠦ Preparing packages... (5/83)\r\neinops  ------------------------------ 30.88 KiB/62.85 KiB\r\nmore-itertools  ------------------------------ 16.00 KiB/68.03 KiB\r\nopt-einsum  ------------------------------ 16.00 KiB/70.25 KiB\r\nmarkdown-it-py  ------------------------------ 0 B/85.27 KiB\r\nidentify  ------------------------------ 16.00 KiB/96.85 KiB\r\nhumanize  ------------------------------ 39.79 KiB/125.85 KiB\r\ntyro  ------------------------------ 128.00 KiB/129.36 KiB\r\nsimplejson  ------------------------------ 140.22 KiB/148.65 KiB\r\netils  ------------------------------ 62.91 KiB/166.60 KiB\r\ntreescope  ------------------------------ 142.88 KiB/177.98 KiB\r\nfsspec  ------------------------------ 172.25 KiB/194.62 KiB\r\ngitpython  ------------------------------ 203.29 KiB/203.29 KiB\r\npre-commit  ------------------------------ 62.80 KiB/215.79 KiB\r\nrich  ------------------------------ 110.88 KiB/237.66 KiB\r\nprotobuf  ------------------------------ 285.78 KiB/314.47 KiB\r\noptax  ------------------------------ 142.05 KiB/359.16 KiB\r\nsentry-sdk  ------------------------------ 169.86 KiB/361.67 KiB\r\npydantic  ------------------------------ 189.00 KiB/434.43 KiB\r\nflax  ------------------------------ 126.92 KiB/447.36 KiB\r\ngrain  ------------------------------ 64.00 KiB/487.72 KiB\r\norbax-checkpoint  ------------------------------ 110.00 KiB/549.91 KiB\r\npyyaml  ------------------------------ 446.32 KiB/749.55 KiB\r\npygments  ------------------------------ 221.23 KiB/1.17 MiB\r\npydantic-core  ------------------------------ 493.27 KiB/1.91 MiB\r\narray-record  ------------------------------ 453.12 KiB/2.35 MiB\r\njax  ------------------------------ 142.92 KiB/2.70 MiB\r\nnvidia-cuda-runtime-cu12 ------------------------------ 438.69 KiB/3.33 MiB\r\nml-dtypes  ------------------------------ 424.56 KiB/4.70 MiB\r\njax-cuda12-plugin  ------------------------------ 438.51 KiB/5.22 MiB\r\nvirtualenv  ------------------------------ 30.87 KiB/5.71 MiB\r\npillow  ------------------------------ 190.69 KiB/6.34 MiB\r\nnvidia-cuda-cupti-cu12  ------------------------------ 441.81 KiB/10.31 MiB\r\nnumpy  ------------------------------ 174.91 KiB/15.87 MiB\r\ntensorstore  ------------------------------ 424.76 KiB/18.63 MiB\r\nwandb  ------------------------------ 123.17 KiB/18.66 MiB\r\nscipy  ------------------------------ 335.11 KiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 408.56 KiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 382.89 KiB/38.67 MiB\r\njaxlib  ------------------------------ 477.18 KiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 416.63 KiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 414.83 KiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 78.83 KiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 424.56 KiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 392.56 KiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 400.00 KiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 472.56 KiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 456.56 KiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 456.46 KiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r Building jasmine @ file:///hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine\r\n⠧ Preparing packages... (10/83)\r\neinops  ------------------------------ 30.88 KiB/62.85 KiB\r\nmore-itertools  ------------------------------ 33.19 KiB/68.03 KiB\r\nopt-einsum  ------------------------------ 16.00 KiB/70.25 KiB\r\nmarkdown-it-py  ------------------------------ 14.87 KiB/85.27 KiB\r\nidentify  ------------------------------ 16.00 KiB/96.85 KiB\r\nhumanize  ------------------------------ 39.79 KiB/125.85 KiB\r\ntyro  ------------------------------ 129.36 KiB/129.36 KiB\r\nsimplejson  ------------------------------ 140.22 KiB/148.65 KiB\r\netils  ------------------------------ 78.91 KiB/166.60 KiB\r\ntreescope  ------------------------------ 142.88 KiB/177.98 KiB\r\nfsspec  ------------------------------ 172.25 KiB/194.62 KiB\r\ngitpython  ------------------------------ 203.29 KiB/203.29 KiB\r\npre-commit  ------------------------------ 62.80 KiB/215.79 KiB\r\nrich  ------------------------------ 110.88 KiB/237.66 KiB\r\nprotobuf  ------------------------------ 285.78 KiB/314.47 KiB\r\noptax  ------------------------------ 142.05 KiB/359.16 KiB\r\nsentry-sdk  ------------------------------ 169.86 KiB/361.67 KiB\r\npydantic  ------------------------------ 189.00 KiB/434.43 KiB\r\nflax  ------------------------------ 126.92 KiB/447.36 KiB\r\ngrain  ------------------------------ 64.00 KiB/487.72 KiB\r\norbax-checkpoint  ------------------------------ 110.00 KiB/549.91 KiB\r\npyyaml  ------------------------------ 462.32 KiB/749.55 KiB\r\npygments  ------------------------------ 237.23 KiB/1.17 MiB\r\npydantic-core  ------------------------------ 509.27 KiB/1.91 MiB\r\narray-record  ------------------------------ 462.91 KiB/2.35 MiB\r\njax  ------------------------------ 158.92 KiB/2.70 MiB\r\nnvidia-cuda-runtime-cu12 ------------------------------ 456.56 KiB/3.33 MiB\r\nml-dtypes  ------------------------------ 456.56 KiB/4.70 MiB\r\njax-cuda12-plugin  ------------------------------ 460.63 KiB/5.22 MiB\r\nvirtualenv  ------------------------------ 30.87 KiB/5.71 MiB\r\npillow  ------------------------------ 190.69 KiB/6.34 MiB\r\nnvidia-cuda-cupti-cu12  ------------------------------ 457.71 KiB/10.31 MiB\r\nnumpy  ------------------------------ 174.91 KiB/15.87 MiB\r\ntensorstore  ------------------------------ 440.76 KiB/18.63 MiB\r\nwandb  ------------------------------ 123.17 KiB/18.66 MiB\r\nscipy  ------------------------------ 351.11 KiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 424.56 KiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 392.56 KiB/38.67 MiB\r\njaxlib  ------------------------------ 493.18 KiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 432.63 KiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 440.07 KiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 78.83 KiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 440.56 KiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 408.56 KiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 432.00 KiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 488.56 KiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 472.56 KiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 456.56 KiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r Building jasmine @ file:///hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine\r\n⠧ Preparing packages... (10/83)\r\neinops  ------------------------------ 30.88 KiB/62.85 KiB\r\nmore-itertools  ------------------------------ 33.19 KiB/68.03 KiB\r\nopt-einsum  ------------------------------ 16.00 KiB/70.25 KiB\r\nmarkdown-it-py  ------------------------------ 14.87 KiB/85.27 KiB\r\nidentify  ------------------------------ 16.00 KiB/96.85 KiB\r\nhumanize  ------------------------------ 39.79 KiB/125.85 KiB\r\nsimplejson  ------------------------------ 140.22 KiB/148.65 KiB\r\netils  ------------------------------ 78.91 KiB/166.60 KiB\r\ntreescope  ------------------------------ 142.88 KiB/177.98 KiB\r\nfsspec  ------------------------------ 172.25 KiB/194.62 KiB\r\ngitpython  ------------------------------ 203.29 KiB/203.29 KiB\r\npre-commit  ------------------------------ 62.80 KiB/215.79 KiB\r\nrich  ------------------------------ 110.88 KiB/237.66 KiB\r\nprotobuf  ------------------------------ 285.78 KiB/314.47 KiB\r\noptax  ------------------------------ 142.05 KiB/359.16 KiB\r\nsentry-sdk  ------------------------------ 169.86 KiB/361.67 KiB\r\npydantic  ------------------------------ 189.00 KiB/434.43 KiB\r\nflax  ------------------------------ 126.92 KiB/447.36 KiB\r\ngrain  ------------------------------ 64.00 KiB/487.72 KiB\r\norbax-checkpoint  ------------------------------ 110.00 KiB/549.91 KiB\r\npyyaml  ------------------------------ 462.32 KiB/749.55 KiB\r\npygments  ------------------------------ 237.23 KiB/1.17 MiB\r\npydantic-core  ------------------------------ 509.27 KiB/1.91 MiB\r\narray-record  ------------------------------ 462.91 KiB/2.35 MiB\r\njax  ------------------------------ 158.92 KiB/2.70 MiB\r\nnvidia-cuda-runtime-cu12 ------------------------------ 456.56 KiB/3.33 MiB\r\nml-dtypes  ------------------------------ 456.56 KiB/4.70 MiB\r\njax-cuda12-plugin  ------------------------------ 460.63 KiB/5.22 MiB\r\nvirtualenv  ------------------------------ 30.87 KiB/5.71 MiB\r\npillow  ------------------------------ 190.69 KiB/6.34 MiB\r\nnvidia-cuda-cupti-cu12  ------------------------------ 457.71 KiB/10.31 MiB\r\nnumpy  ------------------------------ 174.91 KiB/15.87 MiB\r\ntensorstore  ------------------------------ 440.76 KiB/18.63 MiB\r\nwandb  ------------------------------ 123.17 KiB/18.66 MiB\r\nscipy  ------------------------------ 351.11 KiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 424.56 KiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 392.56 KiB/38.67 MiB\r\njaxlib  ------------------------------ 493.18 KiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 432.63 KiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 440.07 KiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 78.83 KiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 440.56 KiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 408.56 KiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 432.00 KiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 488.56 KiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 472.56 KiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 456.56 KiB/614.37 MiB ",,terminal_output +4598,7884624,"TERMINAL",0,0,"\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r Building jasmine @ file:///hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine\r\n⠧ Preparing packages... (10/83)\r\neinops  ------------------------------ 46.88 KiB/62.85 KiB\r\nrequests  ------------------------------ 0 B/63.22 KiB\r\npackaging  ------------------------------ 0 B/64.91 KiB\r\nmore-itertools  ------------------------------ 49.08 KiB/68.03 KiB\r\nopt-einsum  ------------------------------ 32.00 KiB/70.25 KiB\r\nmarkdown-it-py  ------------------------------ 14.87 KiB/85.27 KiB\r\nidentify  ------------------------------ 62.21 KiB/96.85 KiB\r\nhumanize  ------------------------------ 39.79 KiB/125.85 KiB\r\nsimplejson  ------------------------------ 148.65 KiB/148.65 KiB\r\netils  ------------------------------ 78.91 KiB/166.60 KiB\r\ntreescope  ------------------------------ 158.88 KiB/177.98 KiB\r\nfsspec  ------------------------------ 188.25 KiB/194.62 KiB\r\npre-commit  ------------------------------ 62.80 KiB/215.79 KiB\r\nrich  ------------------------------ 121.69 KiB/237.66 KiB\r\nprotobuf  ------------------------------ 285.78 KiB/314.47 KiB\r\noptax  ------------------------------ 158.05 KiB/359.16 KiB\r\nsentry-sdk  ------------------------------ 185.86 KiB/361.67 KiB\r\npydantic  ------------------------------ 237.00 KiB/434.43 KiB\r\nflax  ------------------------------ 174.92 KiB/447.36 KiB\r\ngrain  ------------------------------ 64.00 KiB/487.72 KiB\r\norbax-checkpoint  ------------------------------ 110.00 KiB/549.91 KiB\r\npyyaml  ------------------------------ 478.32 KiB/749.55 KiB\r\npygments  ------------------------------ 253.23 KiB/1.17 MiB\r\npydantic-core  ------------------------------ 509.27 KiB/1.91 MiB\r\narray-record  ------------------------------ 478.91 KiB/2.35 MiB\r\njax  ------------------------------ 158.92 KiB/2.70 MiB\r\nnvidia-cuda-runtime-cu12 ------------------------------ 456.56 KiB/3.33 MiB\r\nml-dtypes  ------------------------------ 456.56 KiB/4.70 MiB\r\njax-cuda12-plugin  ------------------------------ 460.63 KiB/5.22 MiB\r\nvirtualenv  ------------------------------ 46.87 KiB/5.71 MiB\r\npillow  ------------------------------ 206.69 KiB/6.34 MiB\r\nnvidia-cuda-cupti-cu12  ------------------------------ 457.71 KiB/10.31 MiB\r\nnumpy  ------------------------------ 222.91 KiB/15.87 MiB\r\ntensorstore  ------------------------------ 456.66 KiB/18.63 MiB\r\nwandb  ------------------------------ 123.17 KiB/18.66 MiB\r\nscipy  ------------------------------ 351.11 KiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 440.56 KiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 408.56 KiB/38.67 MiB\r\njaxlib  ------------------------------ 509.18 KiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 432.63 KiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 440.07 KiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 94.83 KiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 456.56 KiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 424.56 KiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 432.00 KiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 504.56 KiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 483.08 KiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 472.56 KiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r Building jasmine @ file:///hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine\r\n⠧ Preparing packages... (10/83)\r\neinops  ------------------------------ 46.88 KiB/62.85 KiB\r\nrequests  ------------------------------ 0 B/63.22 KiB\r\npackaging  ------------------------------ 0 B/64.91 KiB\r\nmore-itertools  ------------------------------ 49.08 KiB/68.03 KiB\r\nopt-einsum  ------------------------------ 32.00 KiB/70.25 KiB\r\nmarkdown-it-py  ------------------------------ 14.87 KiB/85.27 KiB\r\nidentify  ------------------------------ 62.21 KiB/96.85 KiB\r\nhumanize  ------------------------------ 39.79 KiB/125.85 KiB\r\nsimplejson  ------------------------------ 148.65 KiB/148.65 KiB\r\netils  ------------------------------ 78.91 KiB/166.60 KiB\r\ntreescope  ------------------------------ 158.88 KiB/177.98 KiB\r\nfsspec  ------------------------------ 188.25 KiB/194.62 KiB\r\npre-commit  ------------------------------ 62.80 KiB/215.79 KiB\r\nrich  ------------------------------ 121.69 KiB/237.66 KiB\r\nprotobuf  ------------------------------ 285.78 KiB/314.47 KiB\r\noptax  ------------------------------ 158.05 KiB/359.16 KiB\r\nsentry-sdk  ------------------------------ 185.86 KiB/361.67 KiB\r\npydantic  ------------------------------ 237.00 KiB/434.43 KiB\r\nflax  ------------------------------ 174.92 KiB/447.36 KiB\r\ngrain  ------------------------------ 64.00 KiB/487.72 KiB\r\norbax-checkpoint  ------------------------------ 110.00 KiB/549.91 KiB\r\npyyaml  ------------------------------ 478.32 KiB/749.55 KiB\r\npygments  ------------------------------ 269.23 KiB/1.17 MiB\r\npydantic-core  ------------------------------ 509.27 KiB/1.91 MiB\r\narray-record  ------------------------------ 478.91 KiB/2.35 MiB\r\njax  ------------------------------ 158.92 KiB/2.70 MiB\r\nnvidia-cuda-runtime-cu12 ------------------------------ 456.56 KiB/3.33 MiB\r\nml-dtypes  ------------------------------ 456.56 KiB/4.70 MiB\r\njax-cuda12-plugin  ------------------------------ 460.63 KiB/5.22 MiB\r\nvirtualenv  ------------------------------ 46.87 KiB/5.71 MiB\r\npillow  ------------------------------ 206.69 KiB/6.34 MiB\r\nnvidia-cuda-cupti-cu12  ------------------------------ 457.71 KiB/10.31 MiB\r\nnumpy  ------------------------------ 222.91 KiB/15.87 MiB\r\ntensorstore  ------------------------------ 456.66 KiB/18.63 MiB\r\nwandb  ------------------------------ 123.17 KiB/18.66 MiB\r\nscipy  ------------------------------ 351.11 KiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 440.56 KiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 408.56 KiB/38.67 MiB\r\njaxlib  ------------------------------ 509.18 KiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 432.63 KiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 440.07 KiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 94.83 KiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 456.56 KiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 424.56 KiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 432.00 KiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 504.56 KiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 483.08 KiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 472.56 KiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r Building jasmine @ file:///hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine\r\n⠧ Preparing packages... (10/83)\r\neinops  ------------------------------ 62.85 KiB/62.85 KiB\r\nrequests  ------------------------------ 16.00 KiB/63.22 KiB\r\npackaging  ------------------------------ 16.00 KiB/64.91 KiB\r\nmore-itertools  ------------------------------ 64.00 KiB/68.03 KiB\r\nopt-einsum  ------------------------------ 48.00 KiB/70.25 KiB\r\nmarkdown-it-py  ------------------------------ 14.87 KiB/85.27 KiB\r\nidentify  ------------------------------ 80.00 KiB/96.85 KiB\r\nchex  ------------------------------ 16.00 KiB/98.60 KiB\r\nhumanize  ------------------------------ 55.79 KiB/125.85 KiB\r\nsimplejson  ------------------------------ 148.65 KiB/148.65 KiB\r\netils  ------------------------------ 78.91 KiB/166.60 KiB\r\ntreescope  ------------------------------ 158.88 KiB/177.98 KiB\r\nfsspec  ------------------------------ 188.25 KiB/194.62 KiB\r\npre-commit  ------------------------------ 62.80 KiB/215.79 KiB\r\nrich  ------------------------------ 137.69 KiB/237.66 KiB\r\nprotobuf  ------------------------------ 301.78 KiB/314.47 KiB\r\noptax  ------------------------------ 158.05 KiB/359.16 KiB\r\nsentry-sdk  ------------------------------ 185.86 KiB/361.67 KiB\r\npydantic  ------------------------------ 237.00 KiB/434.43 KiB\r\nflax  ------------------------------ 174.92 KiB/447.36 KiB\r\ngrain  ------------------------------ 80.00 KiB/487.72 KiB\r\norbax-checkpoint  ------------------------------ 125.90 KiB/549.91 KiB\r\npyyaml  ------------------------------ 510.32 KiB/749.55 KiB\r\npygments  ------------------------------ 285.23 KiB/1.17 MiB\r\npydantic-core  ------------------------------ 541.27 KiB/1.91 MiB\r\narray-record  ------------------------------ 510.91 KiB/2.35 MiB\r\njax  ------------------------------ 188.03 KiB/2.70 MiB\r\nnvidia-cuda-runtime-cu12 ------------------------------ 488.56 KiB/3.33 MiB\r\nml-dtypes  ------------------------------ 472.69 KiB/4.70 MiB\r\njax-cuda12-plugin  ------------------------------ 492.63 KiB/5.22 MiB\r\nvirtualenv  ------------------------------ 46.87 KiB/5.71 MiB\r\npillow  ------------------------------ 222.69 KiB/6.34 MiB\r\nnvidia-cuda-cupti-cu12  ------------------------------ 489.71 KiB/10.31 MiB\r\nnumpy  ------------------------------ 456.56 KiB/15.87 MiB\r\ntensorstore  ------------------------------ 463.75 KiB/18.63 MiB\r\nwandb  ------------------------------ 123.17 KiB/18.66 MiB\r\nscipy  ------------------------------ 351.11 KiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 472.56 KiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 440.56 KiB/38.67 MiB\r\njaxlib  ------------------------------ 525.28 KiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 456.56 KiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 462.83 KiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 110.83 KiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 488.56 KiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 456.46 KiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 451.21 KiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 520.56 KiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 515.08 KiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 504.56 KiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r Building jasmine @ file:///hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine\r\n⠧ Preparing packages... (10/83)\r\nimportlib-resources  ------------------------------ 14.87 KiB/36.58 KiB\r\neinops  ------------------------------ 62.85 KiB/62.85 KiB\r\nrequests  ------------------------------ 16.00 KiB/63.22 KiB\r\npackaging  ------------------------------ 16.00 KiB/64.91 KiB\r\nmore-itertools  ------------------------------ 64.00 KiB/68.03 KiB\r\nopt-einsum  ------------------------------ 48.00 KiB/70.25 KiB\r\nmarkdown-it-py  ------------------------------ 14.87 KiB/85.27 KiB\r\nidentify  ------------------------------ 96.00 KiB/96.85 KiB\r\nchex  ------------------------------ 16.00 KiB/98.60 KiB\r\nhumanize  ------------------------------ 55.79 KiB/125.85 KiB\r\netils  ------------------------------ 78.91 KiB/166.60 KiB\r\ntreescope  ------------------------------ 158.88 KiB/177.98 KiB\r\nfsspec  ------------------------------ 188.25 KiB/194.62 KiB\r\npre-commit  ------------------------------ 62.80 KiB/215.79 KiB\r\nrich  ------------------------------ 137.69 KiB/237.66 KiB\r\nprotobuf  ------------------------------ 301.78 KiB/314.47 KiB\r\noptax  ------------------------------ 174.05 KiB/359.16 KiB\r\nsentry-sdk  ------------------------------ 185.86 KiB/361.67 KiB\r\npydantic  ------------------------------ 237.00 KiB/434.43 KiB\r\nflax  ------------------------------ 190.92 KiB/447.36 KiB\r\ngrain  ------------------------------ 80.00 KiB/487.72 KiB\r\norbax-checkpoint  ------------------------------ 141.90 KiB/549.91 KiB\r\npyyaml  ------------------------------ 542.32 KiB/749.55 KiB\r\npygments  ------------------------------ 302.40 KiB/1.17 MiB\r\npydantic-core  ------------------------------ 587.46 KiB/1.91 MiB\r\narray-record  ------------------------------ 542.91 KiB/2.35 MiB\r\njax  ------------------------------ 222.92 KiB/2.70 MiB\r\nnvidia-cuda-runtime-cu12 ------------------------------ 520.56 KiB/3.33 MiB\r\nml-dtypes  ------------------------------ 504.69 KiB/4.70 MiB\r\njax-cuda12-plugin  ------------------------------ 524.63 KiB/5.22 MiB\r\nvirtualenv  ------------------------------ 46.87 KiB/5.71 MiB\r\npillow  ------------------------------ 222.69 KiB/6.34 MiB\r\nnvidia-cuda-cupti-cu12  ------------------------------ 521.81 KiB/10.31 MiB\r\nnumpy  ------------------------------ 488.56 KiB/15.87 MiB\r\ntensorstore  ------------------------------ 495.75 KiB/18.63 MiB\r\nwandb  ------------------------------ 123.17 KiB/18.66 MiB\r\nscipy  ------------------------------ 351.11 KiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 504.56 KiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 456.56 KiB/38.67 MiB\r\njaxlib  ------------------------------ 557.06 KiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 493.68 KiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 510.83 KiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 126.83 KiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 520.56 KiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 472.56 KiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 499.21 KiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 552.56 KiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 536.56 KiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 536.56 KiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r Building jasmine @ file:///hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine\r\n⠧ Preparing packages... (10/83)\r\nimportlib-resources  ------------------------------ 14.87 KiB/36.58 KiB\r\neinops  ------------------------------ 62.85 KiB/62.85 KiB\r\nrequests  ------------------------------ 16.00 KiB/63.22 KiB\r\npackaging  ------------------------------ 16.00 KiB/64.91 KiB\r\nmore-itertools  ------------------------------ 64.00 KiB/68.03 KiB\r\nopt-einsum  ------------------------------ 48.00 KiB/70.25 KiB\r\nmarkdown-it-py  ------------------------------ 14.87 KiB/85.27 KiB\r\nidentify  ------------------------------ 96.00 KiB/96.85 KiB\r\nchex  ------------------------------ 16.00 KiB/98.60 KiB\r\nhumanize  ------------------------------ 55.79 KiB/125.85 KiB\r\netils  ------------------------------ 94.91 KiB/166.60 KiB\r\ntreescope  ------------------------------ 158.88 KiB/177.98 KiB\r\nfsspec  ------------------------------ 188.25 KiB/194.62 KiB\r\npre-commit  ------------------------------ 62.80 KiB/215.79 KiB\r\nrich  ------------------------------ 137.69 KiB/237.66 KiB\r\nprotobuf  ------------------------------ 301.78 KiB/314.47 KiB\r\noptax  ------------------------------ 174.05 KiB/359.16 KiB\r\nsentry-sdk  ------------------------------ 185.86 KiB/361.67 KiB\r\npydantic  ------------------------------ 253.00 KiB/434.43 KiB\r\nflax  ------------------------------ 190.92 KiB/447.36 KiB\r\ngrain  ------------------------------ 80.00 KiB/487.72 KiB\r\norbax-checkpoint  ------------------------------ 141.90 KiB/549.91 KiB\r\npyyaml  ------------------------------ 542.32 KiB/749.55 KiB\r\npygments  ------------------------------ 311.60 KiB/1.17 MiB\r\npydantic-core  ------------------------------ 587.46 KiB/1.91 MiB\r\narray-record  ------------------------------ 558.91 KiB/2.35 MiB\r\njax  ------------------------------ 238.92 KiB/2.70 MiB\r\nnvidia-cuda-runtime-cu12 ------------------------------ 536.56 KiB/3.33 MiB\r\nml-dtypes  ------------------------------ 520.56 KiB/4.70 MiB\r\njax-cuda12-plugin  ------------------------------ 540.63 KiB/5.22 MiB\r\nvirtualenv  ------------------------------ 46.87 KiB/5.71 MiB\r\npillow  ------------------------------ 222.69 KiB/6.34 MiB\r\nnvidia-cuda-cupti-cu12  ------------------------------ 530.31 KiB/10.31 MiB\r\nnumpy  ------------------------------ 504.56 KiB/15.87 MiB\r\ntensorstore  ------------------------------ 511.75 KiB/18.63 MiB\r\nwandb  ------------------------------ 139.17 KiB/18.66 MiB\r\nscipy  ------------------------------ 351.11 KiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 520.56 KiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 472.56 KiB/38.67 MiB\r\njaxlib  ------------------------------ 557.08 KiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 509.68 KiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 526.83 KiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 126.83 KiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 536.56 KiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 488.56 KiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 512.00 KiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 568.56 KiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 552.56 KiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 552.56 KiB/614.37 MiB ",,terminal_output +4599,7884746,"TERMINAL",0,0,"\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r Building jasmine @ file:///hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine\r\n⠇ Preparing packages... (13/83)\r\ndocstring-parser  ------------------------------ 14.87 KiB/36.03 KiB\r\nimportlib-resources  ------------------------------ 14.87 KiB/36.58 KiB\r\neinops  ------------------------------ 62.85 KiB/62.85 KiB\r\nrequests  ------------------------------ 16.00 KiB/63.22 KiB\r\npackaging  ------------------------------ 16.00 KiB/64.91 KiB\r\nmore-itertools  ------------------------------ 68.03 KiB/68.03 KiB\r\nopt-einsum  ------------------------------ 48.00 KiB/70.25 KiB\r\nmarkdown-it-py  ------------------------------ 14.87 KiB/85.27 KiB\r\nidentify  ------------------------------ 96.00 KiB/96.85 KiB\r\nchex  ------------------------------ 32.00 KiB/98.60 KiB\r\nhumanize  ------------------------------ 55.79 KiB/125.85 KiB\r\netils  ------------------------------ 94.91 KiB/166.60 KiB\r\ntreescope  ------------------------------ 174.88 KiB/177.98 KiB\r\nfsspec  ------------------------------ 188.25 KiB/194.62 KiB\r\npre-commit  ------------------------------ 77.62 KiB/215.79 KiB\r\nrich  ------------------------------ 137.69 KiB/237.66 KiB\r\nprotobuf  ------------------------------ 314.47 KiB/314.47 KiB\r\noptax  ------------------------------ 174.05 KiB/359.16 KiB\r\nsentry-sdk  ------------------------------ 201.86 KiB/361.67 KiB\r\npydantic  ------------------------------ 253.00 KiB/434.43 KiB\r\nflax  ------------------------------ 222.92 KiB/447.36 KiB\r\ngrain  ------------------------------ 80.00 KiB/487.72 KiB\r\norbax-checkpoint  ------------------------------ 158.00 KiB/549.91 KiB\r\npyyaml  ------------------------------ 590.32 KiB/749.55 KiB\r\npygments  ------------------------------ 327.60 KiB/1.17 MiB\r\npydantic-core  ------------------------------ 605.27 KiB/1.91 MiB\r\narray-record  ------------------------------ 588.47 KiB/2.35 MiB\r\njax  ------------------------------ 254.92 KiB/2.70 MiB\r\nnvidia-cuda-runtime-cu12 ------------------------------ 536.56 KiB/3.33 MiB\r\nml-dtypes  ------------------------------ 552.56 KiB/4.70 MiB\r\njax-cuda12-plugin  ------------------------------ 564.12 KiB/5.22 MiB\r\nvirtualenv  ------------------------------ 46.87 KiB/5.71 MiB\r\npillow  ------------------------------ 238.69 KiB/6.34 MiB\r\nnvidia-cuda-cupti-cu12  ------------------------------ 559.67 KiB/10.31 MiB\r\nnumpy  ------------------------------ 536.56 KiB/15.87 MiB\r\ntensorstore  ------------------------------ 540.36 KiB/18.63 MiB\r\nwandb  ------------------------------ 139.17 KiB/18.66 MiB\r\nscipy  ------------------------------ 351.11 KiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 552.56 KiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 499.33 KiB/38.67 MiB\r\njaxlib  ------------------------------ 589.08 KiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 520.56 KiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 533.17 KiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 126.83 KiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 568.46 KiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 520.56 KiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 544.00 KiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 600.56 KiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 584.35 KiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 584.56 KiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r Building jasmine @ file:///hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine\r\n⠇ Preparing packages... (13/83)\r\ndocstring-parser  ------------------------------ 14.87 KiB/36.03 KiB\r\nimportlib-resources  ------------------------------ 14.87 KiB/36.58 KiB\r\neinops  ------------------------------ 62.85 KiB/62.85 KiB\r\nrequests  ------------------------------ 16.00 KiB/63.22 KiB\r\npackaging  ------------------------------ 16.00 KiB/64.91 KiB\r\nopt-einsum  ------------------------------ 48.00 KiB/70.25 KiB\r\nmarkdown-it-py  ------------------------------ 14.87 KiB/85.27 KiB\r\nidentify  ------------------------------ 96.00 KiB/96.85 KiB\r\nchex  ------------------------------ 32.00 KiB/98.60 KiB\r\nhumanize  ------------------------------ 55.79 KiB/125.85 KiB\r\netils  ------------------------------ 94.91 KiB/166.60 KiB\r\ntreescope  ------------------------------ 174.88 KiB/177.98 KiB\r\nfsspec  ------------------------------ 188.25 KiB/194.62 KiB\r\npre-commit  ------------------------------ 77.62 KiB/215.79 KiB\r\nrich  ------------------------------ 137.69 KiB/237.66 KiB\r\nprotobuf  ------------------------------ 314.47 KiB/314.47 KiB\r\noptax  ------------------------------ 174.05 KiB/359.16 KiB\r\nsentry-sdk  ------------------------------ 201.86 KiB/361.67 KiB\r\npydantic  ------------------------------ 253.00 KiB/434.43 KiB\r\nflax  ------------------------------ 222.92 KiB/447.36 KiB\r\ngrain  ------------------------------ 80.00 KiB/487.72 KiB\r\norbax-checkpoint  ------------------------------ 158.00 KiB/549.91 KiB\r\npyyaml  ------------------------------ 590.32 KiB/749.55 KiB\r\npygments  ------------------------------ 327.60 KiB/1.17 MiB\r\npydantic-core  ------------------------------ 605.27 KiB/1.91 MiB\r\narray-record  ------------------------------ 588.47 KiB/2.35 MiB\r\njax  ------------------------------ 254.92 KiB/2.70 MiB\r\nnvidia-cuda-runtime-cu12 ------------------------------ 536.56 KiB/3.33 MiB\r\nml-dtypes  ------------------------------ 552.56 KiB/4.70 MiB\r\njax-cuda12-plugin  ------------------------------ 564.12 KiB/5.22 MiB\r\nvirtualenv  ------------------------------ 46.87 KiB/5.71 MiB\r\npillow  ------------------------------ 238.69 KiB/6.34 MiB\r\nnvidia-cuda-cupti-cu12  ------------------------------ 559.67 KiB/10.31 MiB\r\nnumpy  ------------------------------ 536.56 KiB/15.87 MiB\r\ntensorstore  ------------------------------ 540.36 KiB/18.63 MiB\r\nwandb  ------------------------------ 139.17 KiB/18.66 MiB\r\nscipy  ------------------------------ 351.11 KiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 552.56 KiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 499.33 KiB/38.67 MiB\r\njaxlib  ------------------------------ 589.08 KiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 520.56 KiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 533.17 KiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 126.83 KiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 568.46 KiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 520.56 KiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 544.00 KiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 600.56 KiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 584.35 KiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 584.56 KiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r Building jasmine @ file:///hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine\r\n⠇ Preparing packages... (13/83)\r\nnodeenv  ------------------------------ 0 B/21.79 KiB\r\ndocstring-parser  ------------------------------ 14.87 KiB/36.03 KiB\r\nimportlib-resources  ------------------------------ 14.87 KiB/36.58 KiB\r\nrequests  ------------------------------ 32.00 KiB/63.22 KiB\r\npackaging  ------------------------------ 32.00 KiB/64.91 KiB\r\nopt-einsum  ------------------------------ 48.00 KiB/70.25 KiB\r\nmarkdown-it-py  ------------------------------ 30.87 KiB/85.27 KiB\r\nidentify  ------------------------------ 96.00 KiB/96.85 KiB\r\nchex  ------------------------------ 48.00 KiB/98.60 KiB\r\nhumanize  ------------------------------ 55.79 KiB/125.85 KiB\r\netils  ------------------------------ 94.91 KiB/166.60 KiB\r\ntreescope  ------------------------------ 174.88 KiB/177.98 KiB\r\nfsspec  ------------------------------ 188.25 KiB/194.62 KiB\r\npre-commit  ------------------------------ 77.62 KiB/215.79 KiB\r\nrich  ------------------------------ 158.88 KiB/237.66 KiB\r\nprotobuf  ------------------------------ 314.47 KiB/314.47 KiB\r\noptax  ------------------------------ 181.88 KiB/359.16 KiB\r\nsentry-sdk  ------------------------------ 201.86 KiB/361.67 KiB\r\npydantic  ------------------------------ 253.00 KiB/434.43 KiB\r\nflax  ------------------------------ 222.92 KiB/447.36 KiB\r\ngrain  ------------------------------ 96.00 KiB/487.72 KiB\r\norbax-checkpoint  ------------------------------ 173.90 KiB/549.91 KiB\r\npyyaml  ------------------------------ 606.32 KiB/749.55 KiB\r\npygments  ------------------------------ 349.23 KiB/1.17 MiB\r\npydantic-core  ------------------------------ 637.27 KiB/1.91 MiB\r\narray-record  ------------------------------ 604.47 KiB/2.35 MiB\r\njax  ------------------------------ 270.92 KiB/2.70 MiB\r\nnvidia-cuda-runtime-cu12 ------------------------------ 536.56 KiB/3.33 MiB\r\nml-dtypes  ------------------------------ 584.56 KiB/4.70 MiB\r\njax-cuda12-plugin  ------------------------------ 588.63 KiB/5.22 MiB\r\nvirtualenv  ------------------------------ 46.87 KiB/5.71 MiB\r\npillow  ------------------------------ 238.69 KiB/6.34 MiB\r\nnvidia-cuda-cupti-cu12  ------------------------------ 569.81 KiB/10.31 MiB\r\nnumpy  ------------------------------ 552.56 KiB/15.87 MiB\r\ntensorstore  ------------------------------ 572.36 KiB/18.63 MiB\r\nwandb  ------------------------------ 139.17 KiB/18.66 MiB\r\nscipy  ------------------------------ 351.11 KiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 578.50 KiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 520.56 KiB/38.67 MiB\r\njaxlib  ------------------------------ 605.28 KiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 552.56 KiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 565.17 KiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 573.95 KiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 578.78 KiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 541.23 KiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 560.00 KiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 616.56 KiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 584.56 KiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 616.56 KiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r Building jasmine @ file:///hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine\r\n⠇ Preparing packages... (13/83)\r\nnodeenv  ------------------------------ 0 B/21.79 KiB\r\ndocstring-parser  ------------------------------ 14.87 KiB/36.03 KiB\r\nimportlib-resources  ------------------------------ 14.87 KiB/36.58 KiB\r\nrequests  ------------------------------ 32.00 KiB/63.22 KiB\r\npackaging  ------------------------------ 32.00 KiB/64.91 KiB\r\nopt-einsum  ------------------------------ 48.00 KiB/70.25 KiB\r\nmarkdown-it-py  ------------------------------ 30.87 KiB/85.27 KiB\r\nchex  ------------------------------ 64.00 KiB/98.60 KiB\r\nhumanize  ------------------------------ 55.79 KiB/125.85 KiB\r\netils  ------------------------------ 94.91 KiB/166.60 KiB\r\ntreescope  ------------------------------ 174.88 KiB/177.98 KiB\r\nfsspec  ------------------------------ 188.25 KiB/194.62 KiB\r\npre-commit  ------------------------------ 77.62 KiB/215.79 KiB\r\nrich  ------------------------------ 158.88 KiB/237.66 KiB\r\nprotobuf  ------------------------------ 314.47 KiB/314.47 KiB\r\noptax  ------------------------------ 181.88 KiB/359.16 KiB\r\nsentry-sdk  ------------------------------ 201.86 KiB/361.67 KiB\r\npydantic  ------------------------------ 269.00 KiB/434.43 KiB\r\nflax  ------------------------------ 222.92 KiB/447.36 KiB\r\ngrain  ------------------------------ 112.00 KiB/487.72 KiB\r\norbax-checkpoint  ------------------------------ 173.90 KiB/549.91 KiB\r\npyyaml  ------------------------------ 606.32 KiB/749.55 KiB\r\npygments  ------------------------------ 349.23 KiB/1.17 MiB\r\npydantic-core  ------------------------------ 637.27 KiB/1.91 MiB\r\narray-record  ------------------------------ 604.47 KiB/2.35 MiB\r\njax  ------------------------------ 286.92 KiB/2.70 MiB\r\nnvidia-cuda-runtime-cu12 ------------------------------ 536.56 KiB/3.33 MiB\r\nml-dtypes  ------------------------------ 584.56 KiB/4.70 MiB\r\njax-cuda12-plugin  ------------------------------ 588.63 KiB/5.22 MiB\r\nvirtualenv  ------------------------------ 46.87 KiB/5.71 MiB\r\npillow  ------------------------------ 238.69 KiB/6.34 MiB\r\nnvidia-cuda-cupti-cu12  ------------------------------ 569.81 KiB/10.31 MiB\r\nnumpy  ------------------------------ 552.56 KiB/15.87 MiB\r\ntensorstore  ------------------------------ 572.36 KiB/18.63 MiB\r\nwandb  ------------------------------ 155.17 KiB/18.66 MiB\r\nscipy  ------------------------------ 351.11 KiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 578.50 KiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 520.56 KiB/38.67 MiB\r\njaxlib  ------------------------------ 605.28 KiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 552.56 KiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 565.17 KiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 573.95 KiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 578.78 KiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 541.23 KiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 560.00 KiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 616.56 KiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 584.56 KiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 616.56 KiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r Building jasmine @ file:///hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine\r\n⠇ Preparing packages... (13/83)\r\nnodeenv  ------------------------------ 0 B/21.79 KiB\r\ndocstring-parser  ------------------------------ 14.87 KiB/36.03 KiB\r\nimportlib-resources  ------------------------------ 14.87 KiB/36.58 KiB\r\nrequests  ------------------------------ 32.00 KiB/63.22 KiB\r\npackaging  ------------------------------ 32.00 KiB/64.91 KiB\r\nopt-einsum  ------------------------------ 48.00 KiB/70.25 KiB\r\nmarkdown-it-py  ------------------------------ 30.87 KiB/85.27 KiB\r\nchex  ------------------------------ 64.00 KiB/98.60 KiB\r\nhumanize  ------------------------------ 55.79 KiB/125.85 KiB\r\netils  ------------------------------ 94.91 KiB/166.60 KiB\r\ntreescope  ------------------------------ 174.88 KiB/177.98 KiB\r\nfsspec  ------------------------------ 188.25 KiB/194.62 KiB\r\npre-commit  ------------------------------ 77.62 KiB/215.79 KiB\r\nrich  ------------------------------ 174.88 KiB/237.66 KiB\r\nprotobuf  ------------------------------ 314.47 KiB/314.47 KiB\r\noptax  ------------------------------ 181.88 KiB/359.16 KiB\r\nsentry-sdk  ------------------------------ 201.86 KiB/361.67 KiB\r\npydantic  ------------------------------ 269.00 KiB/434.43 KiB\r\nflax  ------------------------------ 222.92 KiB/447.36 KiB\r\ngrain  ------------------------------ 112.00 KiB/487.72 KiB\r\norbax-checkpoint  ------------------------------ 173.90 KiB/549.91 KiB\r\npyyaml  ------------------------------ 606.32 KiB/749.55 KiB\r\npygments  ------------------------------ 349.23 KiB/1.17 MiB\r\npydantic-core  ------------------------------ 637.27 KiB/1.91 MiB\r\narray-record  ------------------------------ 604.47 KiB/2.35 MiB\r\njax  ------------------------------ 286.92 KiB/2.70 MiB\r\nnvidia-cuda-runtime-cu12 ------------------------------ 536.56 KiB/3.33 MiB\r\nml-dtypes  ------------------------------ 584.56 KiB/4.70 MiB\r\njax-cuda12-plugin  ------------------------------ 588.63 KiB/5.22 MiB\r\nvirtualenv  ------------------------------ 46.87 KiB/5.71 MiB\r\npillow  ------------------------------ 238.69 KiB/6.34 MiB\r\nnvidia-cuda-cupti-cu12  ------------------------------ 569.81 KiB/10.31 MiB\r\nnumpy  ------------------------------ 552.56 KiB/15.87 MiB\r\ntensorstore  ------------------------------ 572.36 KiB/18.63 MiB\r\nwandb  ------------------------------ 155.17 KiB/18.66 MiB\r\nscipy  ------------------------------ 351.11 KiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 578.50 KiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 520.56 KiB/38.67 MiB\r\njaxlib  ------------------------------ 605.28 KiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 552.56 KiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 565.17 KiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 573.95 KiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 578.78 KiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 541.23 KiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 560.00 KiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 616.56 KiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 584.56 KiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 616.56 KiB/614.37 MiB ",,terminal_output +4600,7884874,"TERMINAL",0,0,"\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r Building jasmine @ file:///hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine\r\n⠇ Preparing packages... (13/83)\r\nnodeenv  ------------------------------ 14.88 KiB/21.79 KiB\r\ndocstring-parser  ------------------------------ 14.87 KiB/36.03 KiB\r\nimportlib-resources  ------------------------------ 14.87 KiB/36.58 KiB\r\nrequests  ------------------------------ 32.00 KiB/63.22 KiB\r\npackaging  ------------------------------ 48.00 KiB/64.91 KiB\r\nopt-einsum  ------------------------------ 48.00 KiB/70.25 KiB\r\nmarkdown-it-py  ------------------------------ 30.87 KiB/85.27 KiB\r\nchex  ------------------------------ 64.00 KiB/98.60 KiB\r\nhumanize  ------------------------------ 55.79 KiB/125.85 KiB\r\netils  ------------------------------ 94.91 KiB/166.60 KiB\r\ntreescope  ------------------------------ 174.88 KiB/177.98 KiB\r\nfsspec  ------------------------------ 188.25 KiB/194.62 KiB\r\npre-commit  ------------------------------ 77.62 KiB/215.79 KiB\r\nrich  ------------------------------ 174.88 KiB/237.66 KiB\r\nprotobuf  ------------------------------ 314.47 KiB/314.47 KiB\r\noptax  ------------------------------ 197.77 KiB/359.16 KiB\r\nsentry-sdk  ------------------------------ 201.86 KiB/361.67 KiB\r\npydantic  ------------------------------ 269.00 KiB/434.43 KiB\r\nflax  ------------------------------ 222.92 KiB/447.36 KiB\r\ngrain  ------------------------------ 112.00 KiB/487.72 KiB\r\norbax-checkpoint  ------------------------------ 173.90 KiB/549.91 KiB\r\npyyaml  ------------------------------ 638.32 KiB/749.55 KiB\r\npygments  ------------------------------ 349.23 KiB/1.17 MiB\r\npydantic-core  ------------------------------ 659.28 KiB/1.91 MiB\r\narray-record  ------------------------------ 636.47 KiB/2.35 MiB\r\njax  ------------------------------ 286.92 KiB/2.70 MiB\r\nnvidia-cuda-runtime-cu12 ------------------------------ 536.56 KiB/3.33 MiB\r\nml-dtypes  ------------------------------ 600.56 KiB/4.70 MiB\r\njax-cuda12-plugin  ------------------------------ 620.63 KiB/5.22 MiB\r\nvirtualenv  ------------------------------ 62.87 KiB/5.71 MiB\r\npillow  ------------------------------ 238.69 KiB/6.34 MiB\r\nnvidia-cuda-cupti-cu12  ------------------------------ 601.81 KiB/10.31 MiB\r\nnumpy  ------------------------------ 584.56 KiB/15.87 MiB\r\ntensorstore  ------------------------------ 603.67 KiB/18.63 MiB\r\nwandb  ------------------------------ 155.17 KiB/18.66 MiB\r\nscipy  ------------------------------ 351.11 KiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 600.56 KiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 540.04 KiB/38.67 MiB\r\njaxlib  ------------------------------ 637.28 KiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 584.56 KiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 590.83 KiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 584.56 KiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 591.24 KiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 561.34 KiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 586.89 KiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 648.46 KiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 616.56 KiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 648.56 KiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r Building jasmine @ file:///hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine\r\n⠇ Preparing packages... (13/83)\r\nnodeenv  ------------------------------ 14.88 KiB/21.79 KiB\r\ndocstring-parser  ------------------------------ 14.87 KiB/36.03 KiB\r\nimportlib-resources  ------------------------------ 14.87 KiB/36.58 KiB\r\nrequests  ------------------------------ 32.00 KiB/63.22 KiB\r\npackaging  ------------------------------ 48.00 KiB/64.91 KiB\r\nopt-einsum  ------------------------------ 48.00 KiB/70.25 KiB\r\nmarkdown-it-py  ------------------------------ 30.87 KiB/85.27 KiB\r\nchex  ------------------------------ 64.00 KiB/98.60 KiB\r\nhumanize  ------------------------------ 55.79 KiB/125.85 KiB\r\netils  ------------------------------ 94.91 KiB/166.60 KiB\r\nfsspec  ------------------------------ 188.25 KiB/194.62 KiB\r\npre-commit  ------------------------------ 77.62 KiB/215.79 KiB\r\nrich  ------------------------------ 174.88 KiB/237.66 KiB\r\nprotobuf  ------------------------------ 314.47 KiB/314.47 KiB\r\noptax  ------------------------------ 197.77 KiB/359.16 KiB\r\nsentry-sdk  ------------------------------ 201.86 KiB/361.67 KiB\r\npydantic  ------------------------------ 269.00 KiB/434.43 KiB\r\nflax  ------------------------------ 222.92 KiB/447.36 KiB\r\ngrain  ------------------------------ 112.00 KiB/487.72 KiB\r\norbax-checkpoint  ------------------------------ 173.90 KiB/549.91 KiB\r\npyyaml  ------------------------------ 638.32 KiB/749.55 KiB\r\npygments  ------------------------------ 349.23 KiB/1.17 MiB\r\npydantic-core  ------------------------------ 659.28 KiB/1.91 MiB\r\narray-record  ------------------------------ 636.47 KiB/2.35 MiB\r\njax  ------------------------------ 286.92 KiB/2.70 MiB\r\nnvidia-cuda-runtime-cu12 ------------------------------ 536.56 KiB/3.33 MiB\r\nml-dtypes  ------------------------------ 600.56 KiB/4.70 MiB\r\njax-cuda12-plugin  ------------------------------ 620.63 KiB/5.22 MiB\r\nvirtualenv  ------------------------------ 62.87 KiB/5.71 MiB\r\npillow  ------------------------------ 238.69 KiB/6.34 MiB\r\nnvidia-cuda-cupti-cu12  ------------------------------ 601.81 KiB/10.31 MiB\r\nnumpy  ------------------------------ 584.56 KiB/15.87 MiB\r\ntensorstore  ------------------------------ 603.67 KiB/18.63 MiB\r\nwandb  ------------------------------ 155.17 KiB/18.66 MiB\r\nscipy  ------------------------------ 351.11 KiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 600.56 KiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 540.04 KiB/38.67 MiB\r\njaxlib  ------------------------------ 637.28 KiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 584.56 KiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 590.83 KiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 584.56 KiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 591.24 KiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 561.34 KiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 586.89 KiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 648.46 KiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 616.56 KiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 648.56 KiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r Building jasmine @ file:///hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine\r\n⠇ Preparing packages... (13/83)\r\nnodeenv  ------------------------------ 14.88 KiB/21.79 KiB\r\ndocstring-parser  ------------------------------ 14.87 KiB/36.03 KiB\r\nimportlib-resources  ------------------------------ 14.87 KiB/36.58 KiB\r\nrequests  ------------------------------ 32.00 KiB/63.22 KiB\r\npackaging  ------------------------------ 48.00 KiB/64.91 KiB\r\nopt-einsum  ------------------------------ 48.00 KiB/70.25 KiB\r\nmarkdown-it-py  ------------------------------ 30.87 KiB/85.27 KiB\r\nchex  ------------------------------ 64.00 KiB/98.60 KiB\r\nhumanize  ------------------------------ 71.79 KiB/125.85 KiB\r\netils  ------------------------------ 94.91 KiB/166.60 KiB\r\nfsspec  ------------------------------ 194.62 KiB/194.62 KiB\r\npre-commit  ------------------------------ 77.62 KiB/215.79 KiB\r\nrich  ------------------------------ 174.88 KiB/237.66 KiB\r\nprotobuf  ------------------------------ 314.47 KiB/314.47 KiB\r\noptax  ------------------------------ 197.77 KiB/359.16 KiB\r\nsentry-sdk  ------------------------------ 203.83 KiB/361.67 KiB\r\npydantic  ------------------------------ 285.00 KiB/434.43 KiB\r\nflax  ------------------------------ 229.67 KiB/447.36 KiB\r\ngrain  ------------------------------ 112.00 KiB/487.72 KiB\r\norbax-checkpoint  ------------------------------ 173.90 KiB/549.91 KiB\r\npyyaml  ------------------------------ 654.32 KiB/749.55 KiB\r\npygments  ------------------------------ 349.23 KiB/1.17 MiB\r\npydantic-core  ------------------------------ 669.27 KiB/1.91 MiB\r\narray-record  ------------------------------ 668.47 KiB/2.35 MiB\r\njax  ------------------------------ 302.92 KiB/2.70 MiB\r\nnvidia-cuda-runtime-cu12 ------------------------------ 552.56 KiB/3.33 MiB\r\nml-dtypes  ------------------------------ 632.56 KiB/4.70 MiB\r\njax-cuda12-plugin  ------------------------------ 636.63 KiB/5.22 MiB\r\nvirtualenv  ------------------------------ 62.87 KiB/5.71 MiB\r\npillow  ------------------------------ 238.69 KiB/6.34 MiB\r\nnvidia-cuda-cupti-cu12  ------------------------------ 633.81 KiB/10.31 MiB\r\nnumpy  ------------------------------ 616.56 KiB/15.87 MiB\r\ntensorstore  ------------------------------ 619.67 KiB/18.63 MiB\r\nwandb  ------------------------------ 155.17 KiB/18.66 MiB\r\nscipy  ------------------------------ 367.11 KiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 616.56 KiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 556.04 KiB/38.67 MiB\r\njaxlib  ------------------------------ 639.86 KiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 600.56 KiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 606.83 KiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 616.46 KiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 623.24 KiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 577.34 KiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 618.89 KiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 664.56 KiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 648.56 KiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 664.56 KiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r Building jasmine @ file:///hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine\r\n⠇ Preparing packages... (13/83)\r\nnodeenv  ------------------------------ 14.88 KiB/21.79 KiB\r\nsmmap  ------------------------------ 15.89 KiB/23.73 KiB\r\ndocstring-parser  ------------------------------ 14.87 KiB/36.03 KiB\r\nimportlib-resources  ------------------------------ 14.87 KiB/36.58 KiB\r\nrequests  ------------------------------ 32.00 KiB/63.22 KiB\r\npackaging  ------------------------------ 48.00 KiB/64.91 KiB\r\nopt-einsum  ------------------------------ 64.00 KiB/70.25 KiB\r\nmarkdown-it-py  ------------------------------ 30.87 KiB/85.27 KiB\r\nchex  ------------------------------ 64.00 KiB/98.60 KiB\r\nhumanize  ------------------------------ 71.79 KiB/125.85 KiB\r\netils  ------------------------------ 94.91 KiB/166.60 KiB\r\npre-commit  ------------------------------ 77.62 KiB/215.79 KiB\r\nrich  ------------------------------ 174.88 KiB/237.66 KiB\r\nprotobuf  ------------------------------ 314.47 KiB/314.47 KiB\r\noptax  ------------------------------ 213.77 KiB/359.16 KiB\r\nsentry-sdk  ------------------------------ 219.83 KiB/361.67 KiB\r\npydantic  ------------------------------ 285.00 KiB/434.43 KiB\r\nflax  ------------------------------ 229.67 KiB/447.36 KiB\r\ngrain  ------------------------------ 128.00 KiB/487.72 KiB\r\norbax-checkpoint  ------------------------------ 189.90 KiB/549.91 KiB\r\npyyaml  ------------------------------ 654.32 KiB/749.55 KiB\r\npygments  ------------------------------ 349.23 KiB/1.17 MiB\r\npydantic-core  ------------------------------ 669.27 KiB/1.91 MiB\r\narray-record  ------------------------------ 668.47 KiB/2.35 MiB\r\njax  ------------------------------ 302.92 KiB/2.70 MiB\r\nnvidia-cuda-runtime-cu12 ------------------------------ 552.56 KiB/3.33 MiB\r\nml-dtypes  ------------------------------ 632.56 KiB/4.70 MiB\r\njax-cuda12-plugin  ------------------------------ 636.63 KiB/5.22 MiB\r\nvirtualenv  ------------------------------ 62.87 KiB/5.71 MiB\r\npillow  ------------------------------ 238.69 KiB/6.34 MiB\r\nnvidia-cuda-cupti-cu12  ------------------------------ 633.81 KiB/10.31 MiB\r\nnumpy  ------------------------------ 616.56 KiB/15.87 MiB\r\ntensorstore  ------------------------------ 619.67 KiB/18.63 MiB\r\nwandb  ------------------------------ 155.17 KiB/18.66 MiB\r\nscipy  ------------------------------ 367.11 KiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 616.56 KiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 556.04 KiB/38.67 MiB\r\njaxlib  ------------------------------ 639.86 KiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 600.56 KiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 606.83 KiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 616.46 KiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 623.24 KiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 577.34 KiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 618.89 KiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 664.56 KiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 648.56 KiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 664.56 KiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r Building jasmine @ file:///hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine\r\n⠋ Preparing packages... (17/83)\r\nnodeenv  ------------------------------ 14.88 KiB/21.79 KiB\r\nsmmap  ------------------------------ 15.89 KiB/23.73 KiB\r\ndocstring-parser  ------------------------------ 14.87 KiB/36.03 KiB\r\nimportlib-resources  ------------------------------ 14.87 KiB/36.58 KiB\r\nrequests  ------------------------------ 47.30 KiB/63.22 KiB\r\npackaging  ------------------------------ 48.00 KiB/64.91 KiB\r\nopt-einsum  ------------------------------ 64.00 KiB/70.25 KiB\r\nmarkdown-it-py  ------------------------------ 30.87 KiB/85.27 KiB\r\nchex  ------------------------------ 80.00 KiB/98.60 KiB\r\nhumanize  ------------------------------ 71.79 KiB/125.85 KiB\r\netils  ------------------------------ 110.91 KiB/166.60 KiB\r\npre-commit  ------------------------------ 77.62 KiB/215.79 KiB\r\nrich  ------------------------------ 174.88 KiB/237.66 KiB\r\nprotobuf  ------------------------------ 314.47 KiB/314.47 KiB\r\noptax  ------------------------------ 213.77 KiB/359.16 KiB\r\nsentry-sdk  ------------------------------ 219.83 KiB/361.67 KiB\r\npydantic  ------------------------------ 285.00 KiB/434.43 KiB\r\nflax  ------------------------------ 229.67 KiB/447.36 KiB\r\ngrain  ------------------------------ 128.00 KiB/487.72 KiB\r\norbax-checkpoint  ------------------------------ 189.90 KiB/549.91 KiB\r\npyyaml  ------------------------------ 654.32 KiB/749.55 KiB\r\npygments  ------------------------------ 349.23 KiB/1.17 MiB\r\npydantic-core  ------------------------------ 669.27 KiB/1.91 MiB\r\narray-record  ------------------------------ 668.47 KiB/2.35 MiB\r\njax  ------------------------------ 302.92 KiB/2.70 MiB\r\nnvidia-cuda-runtime-cu12 ------------------------------ 552.56 KiB/3.33 MiB\r\nml-dtypes  ------------------------------ 632.56 KiB/4.70 MiB\r\njax-cuda12-plugin  ------------------------------ 636.63 KiB/5.22 MiB\r\nvirtualenv  ------------------------------ 62.87 KiB/5.71 MiB\r\npillow  ------------------------------ 238.69 KiB/6.34 MiB\r\nnvidia-cuda-cupti-cu12  ------------------------------ 633.81 KiB/10.31 MiB\r\nnumpy  ------------------------------ 616.56 KiB/15.87 MiB\r\ntensorstore  ------------------------------ 619.67 KiB/18.63 MiB\r\nwandb  ------------------------------ 155.17 KiB/18.66 MiB\r\nscipy  ------------------------------ 367.11 KiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 616.56 KiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 556.04 KiB/38.67 MiB\r\njaxlib  ------------------------------ 639.86 KiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 600.56 KiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 606.83 KiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 616.46 KiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 623.24 KiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 577.34 KiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 618.89 KiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 664.56 KiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 648.56 KiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 664.56 KiB/614.37 MiB ",,terminal_output +4601,7885253,"TERMINAL",0,0,"\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r Building jasmine @ file:///hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine\r\n⠋ Preparing packages... (17/83)\r\nplatformdirs  ------------------------------ 14.88 KiB/18.22 KiB\r\nnodeenv  ------------------------------ 21.79 KiB/21.79 KiB\r\nsmmap  ------------------------------ 15.89 KiB/23.73 KiB\r\ndocstring-parser  ------------------------------ 30.87 KiB/36.03 KiB\r\nimportlib-resources  ------------------------------ 14.87 KiB/36.58 KiB\r\ntyping-extensions  ------------------------------ 16.00 KiB/43.57 KiB\r\ntoolz  ------------------------------ 14.88 KiB/55.06 KiB\r\nrequests  ------------------------------ 47.30 KiB/63.22 KiB\r\npackaging  ------------------------------ 63.97 KiB/64.91 KiB\r\nopt-einsum  ------------------------------ 64.00 KiB/70.25 KiB\r\nmarkdown-it-py  ------------------------------ 30.87 KiB/85.27 KiB\r\nchex  ------------------------------ 80.00 KiB/98.60 KiB\r\nhumanize  ------------------------------ 71.79 KiB/125.85 KiB\r\netils  ------------------------------ 110.91 KiB/166.60 KiB\r\npre-commit  ------------------------------ 77.62 KiB/215.79 KiB\r\nrich  ------------------------------ 174.88 KiB/237.66 KiB\r\nprotobuf  ------------------------------ 314.47 KiB/314.47 KiB\r\noptax  ------------------------------ 213.77 KiB/359.16 KiB\r\nsentry-sdk  ------------------------------ 219.83 KiB/361.67 KiB\r\npydantic  ------------------------------ 285.00 KiB/434.43 KiB\r\nflax  ------------------------------ 261.67 KiB/447.36 KiB\r\ngrain  ------------------------------ 128.00 KiB/487.72 KiB\r\norbax-checkpoint  ------------------------------ 189.90 KiB/549.91 KiB\r\npyyaml  ------------------------------ 730.07 KiB/749.55 KiB\r\npygments  ------------------------------ 365.23 KiB/1.17 MiB\r\npydantic-core  ------------------------------ 749.27 KiB/1.91 MiB\r\narray-record  ------------------------------ 716.47 KiB/2.35 MiB\r\njax  ------------------------------ 318.92 KiB/2.70 MiB\r\nnvidia-cuda-runtime-cu12 ------------------------------ 584.56 KiB/3.33 MiB\r\nml-dtypes  ------------------------------ 696.05 KiB/4.70 MiB\r\njax-cuda12-plugin  ------------------------------ 716.63 KiB/5.22 MiB\r\nvirtualenv  ------------------------------ 62.87 KiB/5.71 MiB\r\npillow  ------------------------------ 270.69 KiB/6.34 MiB\r\nnvidia-cuda-cupti-cu12  ------------------------------ 681.81 KiB/10.31 MiB\r\nnumpy  ------------------------------ 692.34 KiB/15.87 MiB\r\ntensorstore  ------------------------------ 684.36 KiB/18.63 MiB\r\nwandb  ------------------------------ 155.17 KiB/18.66 MiB\r\nscipy  ------------------------------ 367.11 KiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 696.56 KiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 632.56 KiB/38.67 MiB\r\njaxlib  ------------------------------ 717.28 KiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 680.56 KiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 686.83 KiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 664.56 KiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 696.56 KiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 647.87 KiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 657.24 KiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 712.56 KiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 712.56 KiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 728.56 KiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r Building jasmine @ file:///hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine\r\n⠋ Preparing packages... (17/83)\r\nplatformdirs  ------------------------------ 14.88 KiB/18.22 KiB\r\nnodeenv  ------------------------------ 21.79 KiB/21.79 KiB\r\nsmmap  ------------------------------ 15.89 KiB/23.73 KiB\r\ndocstring-parser  ------------------------------ 30.87 KiB/36.03 KiB\r\nimportlib-resources  ------------------------------ 14.87 KiB/36.58 KiB\r\ntyping-extensions  ------------------------------ 43.57 KiB/43.57 KiB\r\ntoolz  ------------------------------ 14.88 KiB/55.06 KiB\r\nrequests  ------------------------------ 47.30 KiB/63.22 KiB\r\npackaging  ------------------------------ 63.97 KiB/64.91 KiB\r\nopt-einsum  ------------------------------ 64.00 KiB/70.25 KiB\r\nmarkdown-it-py  ------------------------------ 30.87 KiB/85.27 KiB\r\nchex  ------------------------------ 96.00 KiB/98.60 KiB\r\nhumanize  ------------------------------ 71.79 KiB/125.85 KiB\r\netils  ------------------------------ 110.91 KiB/166.60 KiB\r\npre-commit  ------------------------------ 77.62 KiB/215.79 KiB\r\nrich  ------------------------------ 174.88 KiB/237.66 KiB\r\noptax  ------------------------------ 213.77 KiB/359.16 KiB\r\nsentry-sdk  ------------------------------ 219.83 KiB/361.67 KiB\r\npydantic  ------------------------------ 285.00 KiB/434.43 KiB\r\nflax  ------------------------------ 261.67 KiB/447.36 KiB\r\ngrain  ------------------------------ 128.00 KiB/487.72 KiB\r\norbax-checkpoint  ------------------------------ 189.90 KiB/549.91 KiB\r\npyyaml  ------------------------------ 749.55 KiB/749.55 KiB\r\npygments  ------------------------------ 365.23 KiB/1.17 MiB\r\npydantic-core  ------------------------------ 776.90 KiB/1.91 MiB\r\narray-record  ------------------------------ 732.47 KiB/2.35 MiB\r\njax  ------------------------------ 334.92 KiB/2.70 MiB\r\nnvidia-cuda-runtime-cu12 ------------------------------ 584.56 KiB/3.33 MiB\r\nml-dtypes  ------------------------------ 712.56 KiB/4.70 MiB\r\njax-cuda12-plugin  ------------------------------ 733.50 KiB/5.22 MiB\r\nvirtualenv  ------------------------------ 62.87 KiB/5.71 MiB\r\npillow  ------------------------------ 270.69 KiB/6.34 MiB\r\nnvidia-cuda-cupti-cu12  ------------------------------ 729.81 KiB/10.31 MiB\r\nnumpy  ------------------------------ 708.34 KiB/15.87 MiB\r\ntensorstore  ------------------------------ 716.36 KiB/18.63 MiB\r\nwandb  ------------------------------ 171.17 KiB/18.66 MiB\r\nscipy  ------------------------------ 367.11 KiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 712.56 KiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 648.56 KiB/38.67 MiB\r\njaxlib  ------------------------------ 749.28 KiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 712.56 KiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 702.93 KiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 696.56 KiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 712.56 KiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 648.56 KiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 689.24 KiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 743.06 KiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 744.56 KiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 760.56 KiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r Building jasmine @ file:///hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine\r\n⠋ Preparing packages... (17/83)\r\nplatformdirs  ------------------------------ 14.88 KiB/18.22 KiB\r\nsmmap  ------------------------------ 15.89 KiB/23.73 KiB\r\ndocstring-parser  ------------------------------ 30.87 KiB/36.03 KiB\r\nimportlib-resources  ------------------------------ 30.87 KiB/36.58 KiB\r\ntyping-extensions  ------------------------------ 43.57 KiB/43.57 KiB\r\ntoolz  ------------------------------ 14.88 KiB/55.06 KiB\r\nrequests  ------------------------------ 63.22 KiB/63.22 KiB\r\npackaging  ------------------------------ 63.97 KiB/64.91 KiB\r\nopt-einsum  ------------------------------ 64.00 KiB/70.25 KiB\r\nmarkdown-it-py  ------------------------------ 30.87 KiB/85.27 KiB\r\nchex  ------------------------------ 96.00 KiB/98.60 KiB\r\nhumanize  ------------------------------ 71.79 KiB/125.85 KiB\r\netils  ------------------------------ 110.91 KiB/166.60 KiB\r\npre-commit  ------------------------------ 77.62 KiB/215.79 KiB\r\nrich  ------------------------------ 174.88 KiB/237.66 KiB\r\noptax  ------------------------------ 213.77 KiB/359.16 KiB\r\nsentry-sdk  ------------------------------ 219.83 KiB/361.67 KiB\r\npydantic  ------------------------------ 285.00 KiB/434.43 KiB\r\nflax  ------------------------------ 261.67 KiB/447.36 KiB\r\ngrain  ------------------------------ 128.00 KiB/487.72 KiB\r\norbax-checkpoint  ------------------------------ 189.90 KiB/549.91 KiB\r\npyyaml  ------------------------------ 749.55 KiB/749.55 KiB\r\npygments  ------------------------------ 365.23 KiB/1.17 MiB\r\npydantic-core  ------------------------------ 792.90 KiB/1.91 MiB\r\narray-record  ------------------------------ 764.47 KiB/2.35 MiB\r\njax  ------------------------------ 334.92 KiB/2.70 MiB\r\nnvidia-cuda-runtime-cu12 ------------------------------ 600.56 KiB/3.33 MiB\r\nml-dtypes  ------------------------------ 728.56 KiB/4.70 MiB\r\njax-cuda12-plugin  ------------------------------ 749.50 KiB/5.22 MiB\r\nvirtualenv  ------------------------------ 75.55 KiB/5.71 MiB\r\npillow  ------------------------------ 286.69 KiB/6.34 MiB\r\nnvidia-cuda-cupti-cu12  ------------------------------ 732.82 KiB/10.31 MiB\r\nnumpy  ------------------------------ 712.56 KiB/15.87 MiB\r\ntensorstore  ------------------------------ 732.36 KiB/18.63 MiB\r\nwandb  ------------------------------ 171.17 KiB/18.66 MiB\r\nscipy  ------------------------------ 367.11 KiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 728.56 KiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 664.56 KiB/38.67 MiB\r\njaxlib  ------------------------------ 762.14 KiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 728.56 KiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 718.83 KiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 712.56 KiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 728.56 KiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 659.57 KiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 699.00 KiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 759.06 KiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 744.65 KiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 776.56 KiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r Building jasmine @ file:///hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine\r\n⠋ Preparing packages... (17/83)\r\nplatformdirs  ------------------------------ 14.88 KiB/18.22 KiB\r\nsmmap  ------------------------------ 15.89 KiB/23.73 KiB\r\ndocstring-parser  ------------------------------ 30.87 KiB/36.03 KiB\r\nimportlib-resources  ------------------------------ 30.87 KiB/36.58 KiB\r\ntyping-extensions  ------------------------------ 43.57 KiB/43.57 KiB\r\ntoolz  ------------------------------ 14.88 KiB/55.06 KiB\r\nrequests  ------------------------------ 63.22 KiB/63.22 KiB\r\npackaging  ------------------------------ 63.97 KiB/64.91 KiB\r\nopt-einsum  ------------------------------ 64.00 KiB/70.25 KiB\r\nmarkdown-it-py  ------------------------------ 30.87 KiB/85.27 KiB\r\nchex  ------------------------------ 96.00 KiB/98.60 KiB\r\nhumanize  ------------------------------ 71.79 KiB/125.85 KiB\r\netils  ------------------------------ 110.91 KiB/166.60 KiB\r\npre-commit  ------------------------------ 77.62 KiB/215.79 KiB\r\nrich  ------------------------------ 190.88 KiB/237.66 KiB\r\noptax  ------------------------------ 213.77 KiB/359.16 KiB\r\nsentry-sdk  ------------------------------ 219.83 KiB/361.67 KiB\r\npydantic  ------------------------------ 285.00 KiB/434.43 KiB\r\nflax  ------------------------------ 261.67 KiB/447.36 KiB\r\ngrain  ------------------------------ 128.00 KiB/487.72 KiB\r\norbax-checkpoint  ------------------------------ 189.90 KiB/549.91 KiB\r\npyyaml  ------------------------------ 749.55 KiB/749.55 KiB\r\npygments  ------------------------------ 365.23 KiB/1.17 MiB\r\npydantic-core  ------------------------------ 792.90 KiB/1.91 MiB\r\narray-record  ------------------------------ 764.47 KiB/2.35 MiB\r\njax  ------------------------------ 334.92 KiB/2.70 MiB\r\nnvidia-cuda-runtime-cu12 ------------------------------ 600.56 KiB/3.33 MiB\r\nml-dtypes  ------------------------------ 728.56 KiB/4.70 MiB\r\njax-cuda12-plugin  ------------------------------ 749.50 KiB/5.22 MiB\r\nvirtualenv  ------------------------------ 75.55 KiB/5.71 MiB\r\npillow  ------------------------------ 286.69 KiB/6.34 MiB\r\nnvidia-cuda-cupti-cu12  ------------------------------ 732.82 KiB/10.31 MiB\r\nnumpy  ------------------------------ 712.56 KiB/15.87 MiB\r\ntensorstore  ------------------------------ 732.36 KiB/18.63 MiB\r\nwandb  ------------------------------ 171.17 KiB/18.66 MiB\r\nscipy  ------------------------------ 367.11 KiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 728.56 KiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 664.56 KiB/38.67 MiB\r\njaxlib  ------------------------------ 762.14 KiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 728.56 KiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 718.83 KiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 712.56 KiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 728.56 KiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 659.57 KiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 699.00 KiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 759.06 KiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 744.65 KiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 776.56 KiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r Building jasmine @ file:///hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine\r\n⠋ Preparing packages... (17/83)\r\nplatformdirs  ------------------------------ 14.88 KiB/18.22 KiB\r\nsmmap  ------------------------------ 23.73 KiB/23.73 KiB\r\ndocstring-parser  ------------------------------ 30.87 KiB/36.03 KiB\r\nimportlib-resources  ------------------------------ 30.87 KiB/36.58 KiB\r\ntyping-extensions  ------------------------------ 43.57 KiB/43.57 KiB\r\ntoolz  ------------------------------ 14.88 KiB/55.06 KiB\r\nrequests  ------------------------------ 63.22 KiB/63.22 KiB\r\npackaging  ------------------------------ 63.97 KiB/64.91 KiB\r\nopt-einsum  ------------------------------ 70.25 KiB/70.25 KiB\r\nmarkdown-it-py  ------------------------------ 30.87 KiB/85.27 KiB\r\nchex  ------------------------------ 96.00 KiB/98.60 KiB\r\nhumanize  ------------------------------ 71.79 KiB/125.85 KiB\r\netils  ------------------------------ 126.91 KiB/166.60 KiB\r\npre-commit  ------------------------------ 77.62 KiB/215.79 KiB\r\nrich  ------------------------------ 190.88 KiB/237.66 KiB\r\noptax  ------------------------------ 222.05 KiB/359.16 KiB\r\nsentry-sdk  ------------------------------ 235.72 KiB/361.67 KiB\r\npydantic  ------------------------------ 285.00 KiB/434.43 KiB\r\nflax  ------------------------------ 270.92 KiB/447.36 KiB\r\ngrain  ------------------------------ 128.00 KiB/487.72 KiB\r\norbax-checkpoint  ------------------------------ 189.90 KiB/549.91 KiB\r\npygments  ------------------------------ 381.23 KiB/1.17 MiB\r\npydantic-core  ------------------------------ 792.90 KiB/1.91 MiB\r\narray-record  ------------------------------ 764.47 KiB/2.35 MiB\r\njax  ------------------------------ 334.92 KiB/2.70 MiB\r\nnvidia-cuda-runtime-cu12 ------------------------------ 600.56 KiB/3.33 MiB\r\nml-dtypes  ------------------------------ 728.56 KiB/4.70 MiB\r\njax-cuda12-plugin  ------------------------------ 749.50 KiB/5.22 MiB\r\nvirtualenv  ------------------------------ 75.55 KiB/5.71 MiB\r\npillow  ------------------------------ 286.69 KiB/6.34 MiB\r\nnvidia-cuda-cupti-cu12  ------------------------------ 732.82 KiB/10.31 MiB\r\nnumpy  ------------------------------ 712.56 KiB/15.87 MiB\r\ntensorstore  ------------------------------ 732.36 KiB/18.63 MiB\r\nwandb  ------------------------------ 187.17 KiB/18.66 MiB\r\nscipy  ------------------------------ 383.11 KiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 728.56 KiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 664.56 KiB/38.67 MiB\r\njaxlib  ------------------------------ 762.14 KiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 728.56 KiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 718.83 KiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 712.56 KiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 728.56 KiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 659.57 KiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 699.00 KiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 759.06 KiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 744.65 KiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 776.56 KiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r Building jasmine @ file:///hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine\r\n⠋ Preparing packages... (17/83)\r\nplatformdirs  ------------------------------ 14.88 KiB/18.22 KiB\r\nsmmap  ------------------------------ 23.73 KiB/23.73 KiB\r\ndocstring-parser  ------------------------------ 30.87 KiB/36.03 KiB\r\nimportlib-resources  ------------------------------ 30.87 KiB/36.58 KiB\r\ntoolz  ------------------------------ 14.88 KiB/55.06 KiB\r\nrequests  ------------------------------ 63.22 KiB/63.22 KiB\r\npackaging  ------------------------------ 63.97 KiB/64.91 KiB\r\nopt-einsum  ------------------------------ 70.25 KiB/70.25 KiB\r\nmarkdown-it-py  ------------------------------ 30.87 KiB/85.27 KiB\r\nchex  ------------------------------ 96.00 KiB/98.60 KiB\r\nhumanize  ------------------------------ 71.79 KiB/125.85 KiB\r\netils  ------------------------------ 126.91 KiB/166.60 KiB\r\npre-commit  ------------------------------ 77.62 KiB/215.79 KiB\r\nrich  ------------------------------ 190.88 KiB/237.66 KiB\r\noptax  ------------------------------ 222.05 KiB/359.16 KiB\r\nsentry-sdk  ------------------------------ 235.72 KiB/361.67 KiB\r\npydantic  ------------------------------ 285.00 KiB/434.43 KiB\r\nflax  ------------------------------ 270.92 KiB/447.36 KiB\r\ngrain  ------------------------------ 128.00 KiB/487.72 KiB\r\norbax-checkpoint  ------------------------------ 189.90 KiB/549.91 KiB\r\npygments  ------------------------------ 381.23 KiB/1.17 MiB\r\npydantic-core  ------------------------------ 792.90 KiB/1.91 MiB\r\narray-record  ------------------------------ 764.47 KiB/2.35 MiB\r\njax  ------------------------------ 334.92 KiB/2.70 MiB\r\nnvidia-cuda-runtime-cu12 ------------------------------ 600.56 KiB/3.33 MiB\r\nml-dtypes  ------------------------------ 728.56 KiB/4.70 MiB\r\njax-cuda12-plugin  ------------------------------ 749.50 KiB/5.22 MiB\r\nvirtualenv  ------------------------------ 75.55 KiB/5.71 MiB\r\npillow  ------------------------------ 286.69 KiB/6.34 MiB\r\nnvidia-cuda-cupti-cu12  ------------------------------ 732.82 KiB/10.31 MiB\r\nnumpy  ------------------------------ 712.56 KiB/15.87 MiB\r\ntensorstore  ------------------------------ 732.36 KiB/18.63 MiB\r\nwandb  ------------------------------ 187.17 KiB/18.66 MiB\r\nscipy  ------------------------------ 383.11 KiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 728.56 KiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 664.56 KiB/38.67 MiB\r\njaxlib  ------------------------------ 762.14 KiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 728.56 KiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 718.83 KiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 712.56 KiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 728.56 KiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 659.57 KiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 699.00 KiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 759.06 KiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 744.65 KiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 776.56 KiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r Building jasmine @ file:///hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine\r\n⠋ Preparing packages... (17/83)\r\nplatformdirs  ------------------------------ 14.88 KiB/18.22 KiB\r\nsmmap  ------------------------------ 23.73 KiB/23.73 KiB\r\ndocstring-parser  ------------------------------ 30.87 KiB/36.03 KiB\r\nimportlib-resources  ------------------------------ 30.87 KiB/36.58 KiB\r\ntoolz  ------------------------------ 14.88 KiB/55.06 KiB\r\nrequests  ------------------------------ 63.22 KiB/63.22 KiB\r\npackaging  ------------------------------ 63.97 KiB/64.91 KiB\r\nopt-einsum  ------------------------------ 70.25 KiB/70.25 KiB\r\nmarkdown-it-py  ------------------------------ 30.87 KiB/85.27 KiB\r\nchex  ------------------------------ 96.00 KiB/98.60 KiB\r\nhumanize  ------------------------------ 71.79 KiB/125.85 KiB\r\netils  ------------------------------ 126.91 KiB/166.60 KiB\r\npre-commit  ------------------------------ 77.62 KiB/215.79 KiB\r\nrich  ------------------------------ 190.88 KiB/237.66 KiB\r\noptax  ------------------------------ 222.05 KiB/359.16 KiB\r\nsentry-sdk  ------------------------------ 235.72 KiB/361.67 KiB\r\npydantic  ------------------------------ 285.00 KiB/434.43 KiB\r\nflax  ------------------------------ 270.92 KiB/447.36 KiB\r\ngrain  ------------------------------ 128.00 KiB/487.72 KiB\r\norbax-checkpoint  ------------------------------ 189.90 KiB/549.91 KiB\r\npygments  ------------------------------ 381.23 KiB/1.17 MiB\r\npydantic-core  ------------------------------ 792.90 KiB/1.91 MiB\r\narray-record  ------------------------------ 764.47 KiB/2.35 MiB\r\njax  ------------------------------ 334.92 KiB/2.70 MiB\r\nnvidia-cuda-runtime-cu12 ------------------------------ 600.56 KiB/3.33 MiB\r\nml-dtypes  ------------------------------ 728.56 KiB/4.70 MiB\r\njax-cuda12-plugin  ------------------------------ 749.50 KiB/5.22 MiB\r\nvirtualenv  ------------------------------ 75.55 KiB/5.71 MiB\r\npillow  ------------------------------ 286.69 KiB/6.34 MiB\r\nnvidia-cuda-cupti-cu12  ------------------------------ 732.82 KiB/10.31 MiB\r\nnumpy  ------------------------------ 712.56 KiB/15.87 MiB\r\ntensorstore  ------------------------------ 732.36 KiB/18.63 MiB\r\nwandb  ------------------------------ 187.17 KiB/18.66 MiB\r\nscipy  ------------------------------ 383.11 KiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 728.56 KiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 664.56 KiB/38.67 MiB\r\njaxlib  ------------------------------ 762.14 KiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 728.56 KiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 718.83 KiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 712.56 KiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 728.56 KiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 659.57 KiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 699.00 KiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 775.06 KiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 744.65 KiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 776.56 KiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r Building jasmine @ file:///hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine\r\n⠋ Preparing packages... (17/83)\r\nplatformdirs  ------------------------------ 14.88 KiB/18.22 KiB\r\nsmmap  ------------------------------ 23.73 KiB/23.73 KiB\r\ndocstring-parser  ------------------------------ 30.87 KiB/36.03 KiB\r\nimportlib-resources  ------------------------------ 30.87 KiB/36.58 KiB\r\ntoolz  ------------------------------ 14.88 KiB/55.06 KiB\r\ndm-pix  ------------------------------ 0 B/55.42 KiB\r\nrequests  ------------------------------ 63.22 KiB/63.22 KiB\r\nopt-einsum  ------------------------------ 70.25 KiB/70.25 KiB\r\nmarkdown-it-py  ------------------------------ 30.87 KiB/85.27 KiB\r\nchex  ------------------------------ 96.00 KiB/98.60 KiB\r\nhumanize  ------------------------------ 71.79 KiB/125.85 KiB\r\netils  ------------------------------ 126.91 KiB/166.60 KiB\r\npre-commit  ------------------------------ 93.62 KiB/215.79 KiB\r\nrich  ------------------------------ 190.88 KiB/237.66 KiB\r\noptax  ------------------------------ 222.05 KiB/359.16 KiB\r\nsentry-sdk  ------------------------------ 235.72 KiB/361.67 KiB\r\npydantic  ------------------------------ 301.00 KiB/434.43 KiB\r\nflax  ------------------------------ 286.92 KiB/447.36 KiB\r\ngrain  ------------------------------ 144.00 KiB/487.72 KiB\r\norbax-checkpoint  ------------------------------ 205.90 KiB/549.91 KiB\r\npygments  ------------------------------ 381.23 KiB/1.17 MiB\r\npydantic-core  ------------------------------ 797.27 KiB/1.91 MiB\r\narray-record  ------------------------------ 764.47 KiB/2.35 MiB\r\njax  ------------------------------ 350.40 KiB/2.70 MiB\r\nnvidia-cuda-runtime-cu12 ------------------------------ 600.56 KiB/3.33 MiB\r\nml-dtypes  ------------------------------ 728.56 KiB/4.70 MiB\r\njax-cuda12-plugin  ------------------------------ 749.50 KiB/5.22 MiB\r\nvirtualenv  ------------------------------ 75.55 KiB/5.71 MiB\r\npillow  ------------------------------ 286.69 KiB/6.34 MiB\r\nnvidia-cuda-cupti-cu12  ------------------------------ 732.82 KiB/10.31 MiB\r\nnumpy  ------------------------------ 728.56 KiB/15.87 MiB\r\ntensorstore  ------------------------------ 732.36 KiB/18.63 MiB\r\nwandb  ------------------------------ 187.17 KiB/18.66 MiB\r\nscipy  ------------------------------ 383.11 KiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 728.56 KiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 680.48 KiB/38.67 MiB\r\njaxlib  ------------------------------ 762.14 KiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 728.56 KiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 718.83 KiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 728.56 KiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 744.56 KiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 675.57 KiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 699.00 KiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 775.06 KiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 760.65 KiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 776.56 KiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r Building jasmine @ file:///hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine\r\n⠋ Preparing packages... (17/83)\r\nplatformdirs  ------------------------------ 14.88 KiB/18.22 KiB\r\nsmmap  ------------------------------ 23.73 KiB/23.73 KiB\r\ndocstring-parser  ------------------------------ 30.87 KiB/36.03 KiB\r\nimportlib-resources  ------------------------------ 30.87 KiB/36.58 KiB\r\ntoolz  ------------------------------ 30.88 KiB/55.06 KiB\r\ndm-pix  ------------------------------ 0 B/55.42 KiB\r\nrequests  ------------------------------ 63.22 KiB/63.22 KiB\r\nmarkdown-it-py  ------------------------------ 46.87 KiB/85.27 KiB\r\nchex  ------------------------------ 98.60 KiB/98.60 KiB\r\nhumanize  ------------------------------ 71.79 KiB/125.85 KiB\r\netils  ------------------------------ 126.91 KiB/166.60 KiB\r\npre-commit  ------------------------------ 93.62 KiB/215.79 KiB\r\nrich  ------------------------------ 206.88 KiB/237.66 KiB\r\noptax  ------------------------------ 222.05 KiB/359.16 KiB\r\nsentry-sdk  ------------------------------ 235.72 KiB/361.67 KiB\r\npydantic  ------------------------------ 301.00 KiB/434.43 KiB\r\nflax  ------------------------------ 286.92 KiB/447.36 KiB\r\ngrain  ------------------------------ 144.00 KiB/487.72 KiB\r\norbax-checkpoint  ------------------------------ 205.90 KiB/549.91 KiB\r\npygments  ------------------------------ 381.23 KiB/1.17 MiB\r\npydantic-core  ------------------------------ 797.27 KiB/1.91 MiB\r\narray-record  ------------------------------ 764.47 KiB/2.35 MiB\r\njax  ------------------------------ 350.40 KiB/2.70 MiB\r\nnvidia-cuda-runtime-cu12 ------------------------------ 600.56 KiB/3.33 MiB\r\nml-dtypes  ------------------------------ 728.56 KiB/4.70 MiB\r\njax-cuda12-plugin  ------------------------------ 749.50 KiB/5.22 MiB\r\nvirtualenv  ------------------------------ 75.55 KiB/5.71 MiB\r\npillow  ------------------------------ 302.69 KiB/6.34 MiB\r\nnvidia-cuda-cupti-cu12  ------------------------------ 732.82 KiB/10.31 MiB\r\nnumpy  ------------------------------ 728.56 KiB/15.87 MiB\r\ntensorstore  ------------------------------ 732.36 KiB/18.63 MiB\r\nwandb  ------------------------------ 187.17 KiB/18.66 MiB\r\nscipy  ------------------------------ 383.11 KiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 728.56 KiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 680.48 KiB/38.67 MiB\r\njaxlib  ------------------------------ 762.14 KiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 728.56 KiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 718.83 KiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 728.56 KiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 744.56 KiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 675.57 KiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 699.00 KiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 775.06 KiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 760.65 KiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 776.56 KiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r Building jasmine @ file:///hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine\r\n⠋ Preparing packages... (17/83)\r\nplatformdirs  ------------------------------ 14.88 KiB/18.22 KiB\r\nsmmap  ------------------------------ 23.73 KiB/23.73 KiB\r\ndocstring-parser  ------------------------------ 30.87 KiB/36.03 KiB\r\nimportlib-resources  ------------------------------ 30.87 KiB/36.58 KiB\r\ntoolz  ------------------------------ 30.88 KiB/55.06 KiB\r\ndm-pix  ------------------------------ 14.92 KiB/55.42 KiB\r\ngitdb  ------------------------------ 0 B/61.32 KiB\r\nrequests  ------------------------------ 63.22 KiB/63.22 KiB\r\nmarkdown-it-py  ------------------------------ 46.87 KiB/85.27 KiB\r\nchex  ------------------------------ 98.60 KiB/98.60 KiB\r\nhumanize  ------------------------------ 71.79 KiB/125.85 KiB\r\netils  ------------------------------ 126.91 KiB/166.60 KiB\r\npre-commit  ------------------------------ 93.62 KiB/215.79 KiB\r\nrich  ------------------------------ 206.88 KiB/237.66 KiB\r\noptax  ------------------------------ 222.05 KiB/359.16 KiB\r\nsentry-sdk  ------------------------------ 235.72 KiB/361.67 KiB\r\npydantic  ------------------------------ 301.00 KiB/434.43 KiB\r\nflax  ------------------------------ 286.92 KiB/447.36 KiB\r\ngrain  ------------------------------ 144.00 KiB/487.72 KiB\r\norbax-checkpoint  ------------------------------ 205.90 KiB/549.91 KiB\r\npygments  ------------------------------ 384.37 KiB/1.17 MiB\r\npydantic-core  ------------------------------ 797.27 KiB/1.91 MiB\r\narray-record  ------------------------------ 780.47 KiB/2.35 MiB\r\njax  ------------------------------ 350.40 KiB/2.70 MiB\r\nnvidia-cuda-runtime-cu12 ------------------------------ 600.56 KiB/3.33 MiB\r\nml-dtypes  ------------------------------ 744.56 KiB/4.70 MiB\r\njax-cuda12-plugin  ------------------------------ 765.50 KiB/5.22 MiB\r\nvirtualenv  ------------------------------ 75.55 KiB/5.71 MiB\r\npillow  ------------------------------ 302.69 KiB/6.34 MiB\r\nnvidia-cuda-cupti-cu12  ------------------------------ 748.82 KiB/10.31 MiB\r\nnumpy  ------------------------------ 732.97 KiB/15.87 MiB\r\ntensorstore  ------------------------------ 747.87 KiB/18.63 MiB\r\nwandb  ------------------------------ 187.17 KiB/18.66 MiB\r\nscipy  ------------------------------ 383.11 KiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 744.56 KiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 696.48 KiB/38.67 MiB\r\njaxlib  ------------------------------ 778.14 KiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 744.56 KiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 734.83 KiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 728.56 KiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 744.56 KiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 689.54 KiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 715.00 KiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 776.56 KiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 760.65 KiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 792.56 KiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r Building jasmine @ file:///hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine\r\n⠙ Preparing packages... (23/83)\r\nplatformdirs  ------------------------------ 14.88 KiB/18.22 KiB\r\nsmmap  ------------------------------ 23.73 KiB/23.73 KiB\r\ndocstring-parser  ------------------------------ 36.03 KiB/36.03 KiB\r\nimportlib-resources  ------------------------------ 30.87 KiB/36.58 KiB\r\ntoolz  ------------------------------ 30.88 KiB/55.06 KiB\r\ndm-pix  ------------------------------ 14.92 KiB/55.42 KiB\r\ngitdb  ------------------------------ 0 B/61.32 KiB\r\nrequests  ------------------------------ 63.22 KiB/63.22 KiB\r\nmarkdown-it-py  ------------------------------ 46.87 KiB/85.27 KiB\r\nhumanize  ------------------------------ 71.79 KiB/125.85 KiB\r\netils  ------------------------------ 126.91 KiB/166.60 KiB\r\npre-commit  ------------------------------ 93.62 KiB/215.79 KiB\r\nrich  ------------------------------ 206.88 KiB/237.66 KiB\r\noptax  ------------------------------ 222.05 KiB/359.16 KiB\r\nsentry-sdk  ------------------------------ 235.72 KiB/361.67 KiB\r\npydantic  ------------------------------ 301.00 KiB/434.43 KiB\r\nflax  ------------------------------ 286.92 KiB/447.36 KiB\r\ngrain  ------------------------------ 144.00 KiB/487.72 KiB\r\norbax-checkpoint  ------------------------------ 205.90 KiB/549.91 KiB\r\npygments  ------------------------------ 397.23 KiB/1.17 MiB\r\npydantic-core  ------------------------------ 797.27 KiB/1.91 MiB\r\narray-record  ------------------------------ 780.47 KiB/2.35 MiB\r\njax  ------------------------------ 350.40 KiB/2.70 MiB\r\nnvidia-cuda-runtime-cu12 ------------------------------ 616.56 KiB/3.33 MiB\r\nml-dtypes  ------------------------------ 744.56 KiB/4.70 MiB\r\njax-cuda12-plugin  ------------------------------ 765.50 KiB/5.22 MiB\r\nvirtualenv  ------------------------------ 75.55 KiB/5.71 MiB\r\npillow  ------------------------------ 318.69 KiB/6.34 MiB\r\nnvidia-cuda-cupti-cu12  ------------------------------ 748.82 KiB/10.31 MiB\r\nnumpy  ------------------------------ 732.97 KiB/15.87 MiB\r\ntensorstore  ------------------------------ 747.87 KiB/18.63 MiB\r\nwandb  ------------------------------ 187.17 KiB/18.66 MiB\r\nscipy  ------------------------------ 383.11 KiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 744.56 KiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 696.48 KiB/38.67 MiB\r\njaxlib  ------------------------------ 778.14 KiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 744.56 KiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 734.83 KiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 728.56 KiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 744.56 KiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 689.54 KiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 715.00 KiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 776.56 KiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 760.65 KiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 792.56 KiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r Building jasmine @ file:///hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine\r\n⠙ Preparing packages... (23/83)\r\nplatformdirs  ------------------------------ 14.88 KiB/18.22 KiB\r\ncloudpickle  ------------------------------ 0 B/20.50 KiB\r\nsmmap  ------------------------------ 23.73 KiB/23.73 KiB\r\ndocstring-parser  ------------------------------ 36.03 KiB/36.03 KiB\r\nimportlib-resources  ------------------------------ 30.87 KiB/36.58 KiB\r\ntoolz  ------------------------------ 30.88 KiB/55.06 KiB\r\ndm-pix  ------------------------------ 14.92 KiB/55.42 KiB\r\ngitdb  ------------------------------ 14.91 KiB/61.32 KiB\r\nattrs  ------------------------------ 0 B/62.32 KiB\r\nmarkdown-it-py  ------------------------------ 46.87 KiB/85.27 KiB\r\nhumanize  ------------------------------ 71.79 KiB/125.85 KiB\r\netils  ------------------------------ 126.91 KiB/166.60 KiB\r\npre-commit  ------------------------------ 93.62 KiB/215.79 KiB\r\nrich  ------------------------------ 206.88 KiB/237.66 KiB\r\noptax  ------------------------------ 222.05 KiB/359.16 KiB\r\nsentry-sdk  ------------------------------ 235.72 KiB/361.67 KiB\r\npydantic  ------------------------------ 301.00 KiB/434.43 KiB\r\nflax  ------------------------------ 286.92 KiB/447.36 KiB\r\ngrain  ------------------------------ 144.00 KiB/487.72 KiB\r\norbax-checkpoint  ------------------------------ 205.90 KiB/549.91 KiB\r\npygments  ------------------------------ 397.23 KiB/1.17 MiB\r\npydantic-core  ------------------------------ 813.27 KiB/1.91 MiB\r\narray-record  ------------------------------ 796.47 KiB/2.35 MiB\r\njax  ------------------------------ 350.40 KiB/2.70 MiB\r\nnvidia-cuda-runtime-cu12 ------------------------------ 616.56 KiB/3.33 MiB\r\nml-dtypes  ------------------------------ 760.56 KiB/4.70 MiB\r\njax-cuda12-plugin  ------------------------------ 780.63 KiB/5.22 MiB\r\nvirtualenv  ------------------------------ 75.55 KiB/5.71 MiB\r\npillow  ------------------------------ 318.69 KiB/6.34 MiB\r\nnvidia-cuda-cupti-cu12  ------------------------------ 761.81 KiB/10.31 MiB\r\nnumpy  ------------------------------ 735.51 KiB/15.87 MiB\r\ntensorstore  ------------------------------ 762.96 KiB/18.63 MiB\r\nwandb  ------------------------------ 187.17 KiB/18.66 MiB\r\nscipy  ------------------------------ 383.11 KiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 760.56 KiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 712.48 KiB/38.67 MiB\r\njaxlib  ------------------------------ 781.28 KiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 760.56 KiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 750.83 KiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 744.56 KiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 760.56 KiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 705.54 KiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 731.00 KiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 792.56 KiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 776.56 KiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 808.56 KiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r Building jasmine @ file:///hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine\r\n⠙ Preparing packages... (23/83)\r\nplatformdirs  ------------------------------ 18.22 KiB/18.22 KiB\r\ncloudpickle  ------------------------------ 0 B/20.50 KiB\r\nsmmap  ------------------------------ 23.73 KiB/23.73 KiB\r\ndocstring-parser  ------------------------------ 36.03 KiB/36.03 KiB\r\nimportlib-resources  ------------------------------ 30.87 KiB/36.58 KiB\r\ntoolz  ------------------------------ 30.88 KiB/55.06 KiB\r\ndm-pix  ------------------------------ 14.92 KiB/55.42 KiB\r\ngitdb  ------------------------------ 14.91 KiB/61.32 KiB\r\nattrs  ------------------------------ 0 B/62.32 KiB\r\nmarkdown-it-py  ------------------------------ 46.87 KiB/85.27 KiB\r\nhumanize  ------------------------------ 71.79 KiB/125.85 KiB\r\netils  ------------------------------ 126.91 KiB/166.60 KiB\r\npre-commit  ------------------------------ 93.62 KiB/215.79 KiB\r\nrich  ------------------------------ 222.88 KiB/237.66 KiB\r\noptax  ------------------------------ 222.05 KiB/359.16 KiB\r\nsentry-sdk  ------------------------------ 235.72 KiB/361.67 KiB\r\npydantic  ------------------------------ 301.00 KiB/434.43 KiB\r\nflax  ------------------------------ 297.22 KiB/447.36 KiB\r\ngrain  ------------------------------ 144.00 KiB/487.72 KiB\r\norbax-checkpoint  ------------------------------ 205.90 KiB/549.91 KiB\r\npygments  ------------------------------ 397.23 KiB/1.17 MiB\r\npydantic-core  ------------------------------ 813.27 KiB/1.91 MiB\r\narray-record  ------------------------------ 796.47 KiB/2.35 MiB\r\njax  ------------------------------ 350.40 KiB/2.70 MiB\r\nnvidia-cuda-runtime-cu12 ------------------------------ 616.56 KiB/3.33 MiB\r\nml-dtypes  ------------------------------ 760.56 KiB/4.70 MiB\r\njax-cuda12-plugin  ------------------------------ 780.63 KiB/5.22 MiB\r\nvirtualenv  ------------------------------ 75.55 KiB/5.71 MiB\r\npillow  ------------------------------ 318.69 KiB/6.34 MiB\r\nnvidia-cuda-cupti-cu12  ------------------------------ 761.81 KiB/10.31 MiB\r\nnumpy  ------------------------------ 735.51 KiB/15.87 MiB\r\ntensorstore  ------------------------------ 762.96 KiB/18.63 MiB\r\nwandb  ------------------------------ 203.17 KiB/18.66 MiB\r\nscipy  ------------------------------ 383.11 KiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 760.56 KiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 712.48 KiB/38.67 MiB\r\njaxlib  ------------------------------ 781.28 KiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 760.56 KiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 750.83 KiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 744.56 KiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 760.56 KiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 705.54 KiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 731.00 KiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 792.56 KiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 776.56 KiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 808.56 KiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r Building jasmine @ file:///hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine\r\n⠙ Preparing packages... (23/83)\r\naiofiles  ------------------------------ 0 B/15.52 KiB\r\nplatformdirs  ------------------------------ 18.22 KiB/18.22 KiB\r\ncloudpickle  ------------------------------ 5.16 KiB/20.50 KiB\r\ntypeguard  ------------------------------ 0 B/34.06 KiB\r\ndocstring-parser  ------------------------------ 36.03 KiB/36.03 KiB\r\nimportlib-resources  ------------------------------ 30.87 KiB/36.58 KiB\r\ntoolz  ------------------------------ 30.88 KiB/55.06 KiB\r\ndm-pix  ------------------------------ 14.92 KiB/55.42 KiB\r\ngitdb  ------------------------------ 14.91 KiB/61.32 KiB\r\nattrs  ------------------------------ 0 B/62.32 KiB\r\nmarkdown-it-py  ------------------------------ 46.87 KiB/85.27 KiB\r\nhumanize  ------------------------------ 87.79 KiB/125.85 KiB\r\netils  ------------------------------ 126.91 KiB/166.60 KiB\r\npre-commit  ------------------------------ 93.62 KiB/215.79 KiB\r\nrich  ------------------------------ 222.88 KiB/237.66 KiB\r\noptax  ------------------------------ 238.05 KiB/359.16 KiB\r\nsentry-sdk  ------------------------------ 235.72 KiB/361.67 KiB\r\npydantic  ------------------------------ 301.00 KiB/434.43 KiB\r\nflax  ------------------------------ 297.22 KiB/447.36 KiB\r\ngrain  ------------------------------ 144.00 KiB/487.72 KiB\r\norbax-checkpoint  ------------------------------ 222.00 KiB/549.91 KiB\r\npygments  ------------------------------ 397.23 KiB/1.17 MiB\r\npydantic-core  ------------------------------ 813.27 KiB/1.91 MiB\r\narray-record  ------------------------------ 796.47 KiB/2.35 MiB\r\njax  ------------------------------ 366.40 KiB/2.70 MiB\r\nnvidia-cuda-runtime-cu12 ------------------------------ 616.56 KiB/3.33 MiB\r\nml-dtypes  ------------------------------ 760.56 KiB/4.70 MiB\r\njax-cuda12-plugin  ------------------------------ 780.63 KiB/5.22 MiB\r\nvirtualenv  ------------------------------ 75.55 KiB/5.71 MiB\r\npillow  ------------------------------ 318.69 KiB/6.34 MiB\r\nnvidia-cuda-cupti-cu12  ------------------------------ 761.81 KiB/10.31 MiB\r\nnumpy  ------------------------------ 735.51 KiB/15.87 MiB\r\ntensorstore  ------------------------------ 762.96 KiB/18.63 MiB\r\nwandb  ------------------------------ 203.17 KiB/18.66 MiB\r\nscipy  ------------------------------ 383.11 KiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 760.56 KiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 712.48 KiB/38.67 MiB\r\njaxlib  ------------------------------ 781.28 KiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 760.56 KiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 750.83 KiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 760.56 KiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 776.56 KiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 705.54 KiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 731.00 KiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 792.56 KiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 776.56 KiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 808.56 KiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r Building jasmine @ file:///hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine\r\n⠙ Preparing packages... (23/83)\r\naiofiles  ------------------------------ 0 B/15.52 KiB\r\nplatformdirs  ------------------------------ 18.22 KiB/18.22 KiB\r\ncloudpickle  ------------------------------ 5.16 KiB/20.50 KiB\r\ntypeguard  ------------------------------ 0 B/34.06 KiB\r\nimportlib-resources  ------------------------------ 30.87 KiB/36.58 KiB\r\ntoolz  ------------------------------ 30.88 KiB/55.06 KiB\r\ndm-pix  ------------------------------ 14.92 KiB/55.42 KiB\r\ngitdb  ------------------------------ 14.91 KiB/61.32 KiB\r\nattrs  ------------------------------ 0 B/62.32 KiB\r\nmarkdown-it-py  ------------------------------ 46.87 KiB/85.27 KiB\r\nhumanize  ------------------------------ 87.79 KiB/125.85 KiB\r\netils  ------------------------------ 126.91 KiB/166.60 KiB\r\npre-commit  ------------------------------ 93.62 KiB/215.79 KiB\r\nrich  ------------------------------ 222.88 KiB/237.66 KiB\r\noptax  ------------------------------ 238.05 KiB/359.16 KiB\r\nsentry-sdk  ------------------------------ 251.72 KiB/361.67 KiB\r\npydantic  ------------------------------ 301.00 KiB/434.43 KiB\r\nflax  ------------------------------ 297.22 KiB/447.36 KiB\r\ngrain  ------------------------------ 144.00 KiB/487.72 KiB\r\norbax-checkpoint  ------------------------------ 222.00 KiB/549.91 KiB\r\npygments  ------------------------------ 397.23 KiB/1.17 MiB\r\npydantic-core  ------------------------------ 813.27 KiB/1.91 MiB\r\narray-record  ------------------------------ 796.47 KiB/2.35 MiB\r\njax  ------------------------------ 366.40 KiB/2.70 MiB\r\nnvidia-cuda-runtime-cu12 ------------------------------ 616.56 KiB/3.33 MiB\r\nml-dtypes  ------------------------------ 760.56 KiB/4.70 MiB\r\njax-cuda12-plugin  ------------------------------ 780.63 KiB/5.22 MiB\r\nvirtualenv  ------------------------------ 75.55 KiB/5.71 MiB\r\npillow  ------------------------------ 318.69 KiB/6.34 MiB\r\nnvidia-cuda-cupti-cu12  ------------------------------ 761.81 KiB/10.31 MiB\r\nnumpy  ------------------------------ 735.51 KiB/15.87 MiB\r\ntensorstore  ------------------------------ 762.96 KiB/18.63 MiB\r\nwandb  ------------------------------ 203.17 KiB/18.66 MiB\r\nscipy  ------------------------------ 383.11 KiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 760.56 KiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 712.48 KiB/38.67 MiB\r\njaxlib  ------------------------------ 781.28 KiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 760.56 KiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 750.83 KiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 760.56 KiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 776.56 KiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 705.54 KiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 731.00 KiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 792.56 KiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 776.56 KiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 808.56 KiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r Building jasmine @ file:///hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine\r\n⠙ Preparing packages... (23/83)\r\naiofiles  ------------------------------ 0 B/15.52 KiB\r\nplatformdirs  ------------------------------ 18.22 KiB/18.22 KiB\r\ncloudpickle  ------------------------------ 5.16 KiB/20.50 KiB\r\ntypeguard  ------------------------------ 0 B/34.06 KiB\r\nimportlib-resources  ------------------------------ 30.87 KiB/36.58 KiB\r\ntoolz  ------------------------------ 30.88 KiB/55.06 KiB\r\ndm-pix  ------------------------------ 14.92 KiB/55.42 KiB\r\ngitdb  ------------------------------ 14.91 KiB/61.32 KiB\r\nattrs  ------------------------------ 16.00 KiB/62.32 KiB\r\nmarkdown-it-py  ------------------------------ 46.87 KiB/85.27 KiB\r\nhumanize  ------------------------------ 87.79 KiB/125.85 KiB\r\netils  ------------------------------ 126.91 KiB/166.60 KiB\r\npre-commit  ------------------------------ 93.62 KiB/215.79 KiB\r\nrich  ------------------------------ 222.88 KiB/237.66 KiB\r\noptax  ------------------------------ 238.05 KiB/359.16 KiB\r\nsentry-sdk  ------------------------------ 251.72 KiB/361.67 KiB\r\npydantic  ------------------------------ 301.00 KiB/434.43 KiB\r\nflax  ------------------------------ 297.22 KiB/447.36 KiB\r\ngrain  ------------------------------ 144.00 KiB/487.72 KiB\r\norbax-checkpoint  ------------------------------ 222.00 KiB/549.91 KiB\r\npygments  ------------------------------ 397.23 KiB/1.17 MiB\r\npydantic-core  ------------------------------ 829.27 KiB/1.91 MiB\r\narray-record  ------------------------------ 796.47 KiB/2.35 MiB\r\njax  ------------------------------ 366.40 KiB/2.70 MiB\r\nnvidia-cuda-runtime-cu12 ------------------------------ 616.56 KiB/3.33 MiB\r\nml-dtypes  ------------------------------ 760.56 KiB/4.70 MiB\r\njax-cuda12-plugin  ------------------------------ 780.63 KiB/5.22 MiB\r\nvirtualenv  ------------------------------ 75.55 KiB/5.71 MiB\r\npillow  ------------------------------ 318.69 KiB/6.34 MiB\r\nnvidia-cuda-cupti-cu12  ------------------------------ 761.81 KiB/10.31 MiB\r\nnumpy  ------------------------------ 735.51 KiB/15.87 MiB\r\ntensorstore  ------------------------------ 762.96 KiB/18.63 MiB\r\nwandb  ------------------------------ 203.17 KiB/18.66 MiB\r\nscipy  ------------------------------ 383.11 KiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 760.56 KiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 712.48 KiB/38.67 MiB\r\njaxlib  ------------------------------ 781.28 KiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 776.56 KiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 750.83 KiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 760.56 KiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 776.56 KiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 705.54 KiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 745.86 KiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 792.56 KiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 792.56 KiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 808.56 KiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r Building jasmine @ file:///hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine\r\n⠙ Preparing packages... (23/83)\r\nannotated-types  ------------------------------ 0 B/13.32 KiB\r\nshtab  ------------------------------ 0 B/13.88 KiB\r\naiofiles  ------------------------------ 15.52 KiB/15.52 KiB\r\ncloudpickle  ------------------------------ 5.16 KiB/20.50 KiB\r\ntypeguard  ------------------------------ 16.00 KiB/34.06 KiB\r\nimportlib-resources  ------------------------------ 30.87 KiB/36.58 KiB\r\ntoolz  ------------------------------ 30.88 KiB/55.06 KiB\r\ndm-pix  ------------------------------ 14.92 KiB/55.42 KiB\r\ngitdb  ------------------------------ 14.91 KiB/61.32 KiB\r\nattrs  ------------------------------ 16.00 KiB/62.32 KiB\r\nmarkdown-it-py  ------------------------------ 46.87 KiB/85.27 KiB\r\nhumanize  ------------------------------ 87.79 KiB/125.85 KiB\r\netils  ------------------------------ 126.91 KiB/166.60 KiB\r\npre-commit  ------------------------------ 93.62 KiB/215.79 KiB\r\nrich  ------------------------------ 222.88 KiB/237.66 KiB\r\noptax  ------------------------------ 238.05 KiB/359.16 KiB\r\nsentry-sdk  ------------------------------ 251.72 KiB/361.67 KiB\r\npydantic  ------------------------------ 317.00 KiB/434.43 KiB\r\nflax  ------------------------------ 313.11 KiB/447.36 KiB\r\ngrain  ------------------------------ 144.00 KiB/487.72 KiB\r\norbax-checkpoint  ------------------------------ 222.00 KiB/549.91 KiB\r\npygments  ------------------------------ 413.23 KiB/1.17 MiB\r\npydantic-core  ------------------------------ 829.27 KiB/1.91 MiB\r\narray-record  ------------------------------ 796.47 KiB/2.35 MiB\r\njax  ------------------------------ 366.40 KiB/2.70 MiB\r\nnvidia-cuda-runtime-cu12 ------------------------------ 616.56 KiB/3.33 MiB\r\nml-dtypes  ------------------------------ 776.56 KiB/4.70 MiB\r\njax-cuda12-plugin  ------------------------------ 796.63 KiB/5.22 MiB\r\nvirtualenv  ------------------------------ 75.55 KiB/5.71 MiB\r\npillow  ------------------------------ 318.69 KiB/6.34 MiB\r\nnvidia-cuda-cupti-cu12  ------------------------------ 777.81 KiB/10.31 MiB\r\nnumpy  ------------------------------ 735.51 KiB/15.87 MiB\r\ntensorstore  ------------------------------ 778.96 KiB/18.63 MiB\r\nwandb  ------------------------------ 203.17 KiB/18.66 MiB\r\nscipy  ------------------------------ 399.11 KiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 776.56 KiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 712.48 KiB/38.67 MiB\r\njaxlib  ------------------------------ 797.28 KiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 776.56 KiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 766.83 KiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 760.56 KiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 776.56 KiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 705.54 KiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 745.86 KiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 792.56 KiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 792.56 KiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 812.30 KiB/614.37 MiB ",,terminal_output +4602,7885343,"TERMINAL",0,0,"\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r Building jasmine @ file:///hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine\r\n⠙ Preparing packages... (23/83)\r\ncfgv  ------------------------------ 0 B/7.08 KiB\r\nzipp  ------------------------------ 0 B/10.04 KiB\r\nannotated-types  ------------------------------ 602 B/13.32 KiB\r\nshtab  ------------------------------ 13.88 KiB/13.88 KiB\r\naiofiles  ------------------------------ 15.52 KiB/15.52 KiB\r\ncloudpickle  ------------------------------ 20.50 KiB/20.50 KiB\r\ntypeguard  ------------------------------ 16.00 KiB/34.06 KiB\r\nimportlib-resources  ------------------------------ 36.58 KiB/36.58 KiB\r\ntoolz  ------------------------------ 30.88 KiB/55.06 KiB\r\ndm-pix  ------------------------------ 30.92 KiB/55.42 KiB\r\ngitdb  ------------------------------ 30.81 KiB/61.32 KiB\r\nattrs  ------------------------------ 16.00 KiB/62.32 KiB\r\nmarkdown-it-py  ------------------------------ 46.87 KiB/85.27 KiB\r\nhumanize  ------------------------------ 87.79 KiB/125.85 KiB\r\netils  ------------------------------ 142.91 KiB/166.60 KiB\r\npre-commit  ------------------------------ 93.62 KiB/215.79 KiB\r\nrich  ------------------------------ 222.88 KiB/237.66 KiB\r\noptax  ------------------------------ 238.05 KiB/359.16 KiB\r\nsentry-sdk  ------------------------------ 251.72 KiB/361.67 KiB\r\npydantic  ------------------------------ 317.00 KiB/434.43 KiB\r\nflax  ------------------------------ 313.11 KiB/447.36 KiB\r\ngrain  ------------------------------ 160.00 KiB/487.72 KiB\r\norbax-checkpoint  ------------------------------ 222.00 KiB/549.91 KiB\r\npygments  ------------------------------ 413.23 KiB/1.17 MiB\r\npydantic-core  ------------------------------ 861.27 KiB/1.91 MiB\r\narray-record  ------------------------------ 814.72 KiB/2.35 MiB\r\njax  ------------------------------ 382.40 KiB/2.70 MiB\r\nnvidia-cuda-runtime-cu12 ------------------------------ 632.34 KiB/3.33 MiB\r\nml-dtypes  ------------------------------ 792.56 KiB/4.70 MiB\r\njax-cuda12-plugin  ------------------------------ 812.63 KiB/5.22 MiB\r\nvirtualenv  ------------------------------ 91.55 KiB/5.71 MiB\r\npillow  ------------------------------ 318.69 KiB/6.34 MiB\r\nnvidia-cuda-cupti-cu12  ------------------------------ 807.56 KiB/10.31 MiB\r\nnumpy  ------------------------------ 758.78 KiB/15.87 MiB\r\ntensorstore  ------------------------------ 780.36 KiB/18.63 MiB\r\nwandb  ------------------------------ 219.17 KiB/18.66 MiB\r\nscipy  ------------------------------ 399.11 KiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 792.56 KiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 728.56 KiB/38.67 MiB\r\njaxlib  ------------------------------ 829.28 KiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 808.56 KiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 782.83 KiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 792.56 KiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 807.50 KiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 728.56 KiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 763.00 KiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 824.56 KiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 818.82 KiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 828.30 KiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r Building jasmine @ file:///hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine\r\n⠙ Preparing packages... (23/83)\r\nnest-asyncio  ------------------------------ 5.07 KiB/5.07 KiB\r\ncfgv  ------------------------------ 7.08 KiB/7.08 KiB\r\nzipp  ------------------------------ 9.55 KiB/10.04 KiB\r\nannotated-types  ------------------------------ 13.32 KiB/13.32 KiB\r\nshtab  ------------------------------ 13.88 KiB/13.88 KiB\r\naiofiles  ------------------------------ 15.52 KiB/15.52 KiB\r\ncloudpickle  ------------------------------ 20.50 KiB/20.50 KiB\r\ntypeguard  ------------------------------ 16.00 KiB/34.06 KiB\r\nimportlib-resources  ------------------------------ 36.58 KiB/36.58 KiB\r\ntoolz  ------------------------------ 30.88 KiB/55.06 KiB\r\ndm-pix  ------------------------------ 30.92 KiB/55.42 KiB\r\ngitdb  ------------------------------ 46.81 KiB/61.32 KiB\r\nattrs  ------------------------------ 16.00 KiB/62.32 KiB\r\nmarkdown-it-py  ------------------------------ 46.87 KiB/85.27 KiB\r\nhumanize  ------------------------------ 87.79 KiB/125.85 KiB\r\netils  ------------------------------ 142.91 KiB/166.60 KiB\r\npre-commit  ------------------------------ 125.62 KiB/215.79 KiB\r\nrich  ------------------------------ 237.66 KiB/237.66 KiB\r\noptax  ------------------------------ 241.19 KiB/359.16 KiB\r\nsentry-sdk  ------------------------------ 251.72 KiB/361.67 KiB\r\npydantic  ------------------------------ 317.00 KiB/434.43 KiB\r\nflax  ------------------------------ 313.11 KiB/447.36 KiB\r\ngrain  ------------------------------ 160.00 KiB/487.72 KiB\r\norbax-checkpoint  ------------------------------ 222.00 KiB/549.91 KiB\r\npygments  ------------------------------ 429.23 KiB/1.17 MiB\r\npydantic-core  ------------------------------ 877.27 KiB/1.91 MiB\r\narray-record  ------------------------------ 832.47 KiB/2.35 MiB\r\njax  ------------------------------ 382.40 KiB/2.70 MiB\r\nnvidia-cuda-runtime-cu12 ------------------------------ 632.34 KiB/3.33 MiB\r\nml-dtypes  ------------------------------ 824.56 KiB/4.70 MiB\r\njax-cuda12-plugin  ------------------------------ 844.63 KiB/5.22 MiB\r\nvirtualenv  ------------------------------ 91.55 KiB/5.71 MiB\r\npillow  ------------------------------ 350.69 KiB/6.34 MiB\r\nnvidia-cuda-cupti-cu12  ------------------------------ 820.81 KiB/10.31 MiB\r\nnumpy  ------------------------------ 774.78 KiB/15.87 MiB\r\ntensorstore  ------------------------------ 809.92 KiB/18.63 MiB\r\nwandb  ------------------------------ 219.17 KiB/18.66 MiB\r\nscipy  ------------------------------ 399.11 KiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 824.56 KiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 744.56 KiB/38.67 MiB\r\njaxlib  ------------------------------ 845.28 KiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 824.56 KiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 798.83 KiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 808.56 KiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 823.50 KiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 744.56 KiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 779.00 KiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 840.56 KiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 820.08 KiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 840.56 KiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r Building jasmine @ file:///hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine\r\n⠹ Preparing packages... (29/83)\r\nnest-asyncio  ------------------------------ 5.07 KiB/5.07 KiB\r\ncfgv  ------------------------------ 7.08 KiB/7.08 KiB\r\nzipp  ------------------------------ 9.55 KiB/10.04 KiB\r\nannotated-types  ------------------------------ 13.32 KiB/13.32 KiB\r\nshtab  ------------------------------ 13.88 KiB/13.88 KiB\r\naiofiles  ------------------------------ 15.52 KiB/15.52 KiB\r\ncloudpickle  ------------------------------ 20.50 KiB/20.50 KiB\r\ntypeguard  ------------------------------ 16.00 KiB/34.06 KiB\r\ntoolz  ------------------------------ 30.88 KiB/55.06 KiB\r\ndm-pix  ------------------------------ 30.92 KiB/55.42 KiB\r\ngitdb  ------------------------------ 46.81 KiB/61.32 KiB\r\nattrs  ------------------------------ 16.00 KiB/62.32 KiB\r\nmarkdown-it-py  ------------------------------ 46.87 KiB/85.27 KiB\r\nhumanize  ------------------------------ 87.79 KiB/125.85 KiB\r\netils  ------------------------------ 142.91 KiB/166.60 KiB\r\npre-commit  ------------------------------ 125.62 KiB/215.79 KiB\r\nrich  ------------------------------ 237.66 KiB/237.66 KiB\r\noptax  ------------------------------ 241.19 KiB/359.16 KiB\r\nsentry-sdk  ------------------------------ 251.72 KiB/361.67 KiB\r\npydantic  ------------------------------ 317.00 KiB/434.43 KiB\r\nflax  ------------------------------ 313.11 KiB/447.36 KiB\r\ngrain  ------------------------------ 160.00 KiB/487.72 KiB\r\norbax-checkpoint  ------------------------------ 222.00 KiB/549.91 KiB\r\npygments  ------------------------------ 429.23 KiB/1.17 MiB\r\npydantic-core  ------------------------------ 877.27 KiB/1.91 MiB\r\narray-record  ------------------------------ 832.47 KiB/2.35 MiB\r\njax  ------------------------------ 382.40 KiB/2.70 MiB\r\nnvidia-cuda-runtime-cu12 ------------------------------ 632.34 KiB/3.33 MiB\r\nml-dtypes  ------------------------------ 824.56 KiB/4.70 MiB\r\njax-cuda12-plugin  ------------------------------ 844.63 KiB/5.22 MiB\r\nvirtualenv  ------------------------------ 91.55 KiB/5.71 MiB\r\npillow  ------------------------------ 350.69 KiB/6.34 MiB\r\nnvidia-cuda-cupti-cu12  ------------------------------ 820.81 KiB/10.31 MiB\r\nnumpy  ------------------------------ 776.56 KiB/15.87 MiB\r\ntensorstore  ------------------------------ 809.92 KiB/18.63 MiB\r\nwandb  ------------------------------ 219.17 KiB/18.66 MiB\r\nscipy  ------------------------------ 399.11 KiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 824.56 KiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 760.56 KiB/38.67 MiB\r\njaxlib  ------------------------------ 845.28 KiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 824.56 KiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 798.83 KiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 824.56 KiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 839.50 KiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 760.56 KiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 779.00 KiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 840.56 KiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 823.41 KiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 856.56 KiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r Building jasmine @ file:///hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine\r\n⠹ Preparing packages... (29/83)\r\nnest-asyncio  ------------------------------ 5.07 KiB/5.07 KiB\r\ncfgv  ------------------------------ 7.08 KiB/7.08 KiB\r\nzipp  ------------------------------ 9.55 KiB/10.04 KiB\r\nannotated-types  ------------------------------ 13.32 KiB/13.32 KiB\r\nshtab  ------------------------------ 13.88 KiB/13.88 KiB\r\naiofiles  ------------------------------ 15.52 KiB/15.52 KiB\r\ncloudpickle  ------------------------------ 20.50 KiB/20.50 KiB\r\ntypeguard  ------------------------------ 16.00 KiB/34.06 KiB\r\ntoolz  ------------------------------ 30.88 KiB/55.06 KiB\r\ndm-pix  ------------------------------ 30.92 KiB/55.42 KiB\r\ngitdb  ------------------------------ 46.81 KiB/61.32 KiB\r\nattrs  ------------------------------ 16.00 KiB/62.32 KiB\r\nmarkdown-it-py  ------------------------------ 46.87 KiB/85.27 KiB\r\nhumanize  ------------------------------ 87.79 KiB/125.85 KiB\r\netils  ------------------------------ 142.91 KiB/166.60 KiB\r\npre-commit  ------------------------------ 205.62 KiB/215.79 KiB\r\noptax  ------------------------------ 241.19 KiB/359.16 KiB\r\nsentry-sdk  ------------------------------ 251.72 KiB/361.67 KiB\r\npydantic  ------------------------------ 333.00 KiB/434.43 KiB\r\nflax  ------------------------------ 313.11 KiB/447.36 KiB\r\ngrain  ------------------------------ 160.00 KiB/487.72 KiB\r\norbax-checkpoint  ------------------------------ 222.00 KiB/549.91 KiB\r\npygments  ------------------------------ 429.23 KiB/1.17 MiB\r\npydantic-core  ------------------------------ 877.27 KiB/1.91 MiB\r\narray-record  ------------------------------ 832.47 KiB/2.35 MiB\r\njax  ------------------------------ 382.40 KiB/2.70 MiB\r\nnvidia-cuda-runtime-cu12 ------------------------------ 632.34 KiB/3.33 MiB\r\nml-dtypes  ------------------------------ 824.56 KiB/4.70 MiB\r\njax-cuda12-plugin  ------------------------------ 844.63 KiB/5.22 MiB\r\nvirtualenv  ------------------------------ 91.55 KiB/5.71 MiB\r\npillow  ------------------------------ 350.69 KiB/6.34 MiB\r\nnvidia-cuda-cupti-cu12  ------------------------------ 820.81 KiB/10.31 MiB\r\nnumpy  ------------------------------ 776.56 KiB/15.87 MiB\r\ntensorstore  ------------------------------ 809.92 KiB/18.63 MiB\r\nwandb  ------------------------------ 219.17 KiB/18.66 MiB\r\nscipy  ------------------------------ 399.11 KiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 824.56 KiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 760.56 KiB/38.67 MiB\r\njaxlib  ------------------------------ 845.28 KiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 824.56 KiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 798.83 KiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 824.56 KiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 839.50 KiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 760.56 KiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 779.00 KiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 840.56 KiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 823.41 KiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 856.56 KiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r Building jasmine @ file:///hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine\r\n⠹ Preparing packages... (29/83)\r\nnest-asyncio  ------------------------------ 5.07 KiB/5.07 KiB\r\ncfgv  ------------------------------ 7.08 KiB/7.08 KiB\r\nzipp  ------------------------------ 9.55 KiB/10.04 KiB\r\nannotated-types  ------------------------------ 13.32 KiB/13.32 KiB\r\nshtab  ------------------------------ 13.88 KiB/13.88 KiB\r\naiofiles  ------------------------------ 15.52 KiB/15.52 KiB\r\ncloudpickle  ------------------------------ 20.50 KiB/20.50 KiB\r\ntypeguard  ------------------------------ 16.00 KiB/34.06 KiB\r\ntoolz  ------------------------------ 30.88 KiB/55.06 KiB\r\ndm-pix  ------------------------------ 30.92 KiB/55.42 KiB\r\ngitdb  ------------------------------ 46.81 KiB/61.32 KiB\r\nattrs  ------------------------------ 16.00 KiB/62.32 KiB\r\nmarkdown-it-py  ------------------------------ 46.87 KiB/85.27 KiB\r\nhumanize  ------------------------------ 87.79 KiB/125.85 KiB\r\netils  ------------------------------ 142.91 KiB/166.60 KiB\r\npre-commit  ------------------------------ 205.62 KiB/215.79 KiB\r\noptax  ------------------------------ 257.19 KiB/359.16 KiB\r\nsentry-sdk  ------------------------------ 267.72 KiB/361.67 KiB\r\npydantic  ------------------------------ 333.00 KiB/434.43 KiB\r\nflax  ------------------------------ 332.64 KiB/447.36 KiB\r\ngrain  ------------------------------ 160.00 KiB/487.72 KiB\r\norbax-checkpoint  ------------------------------ 222.00 KiB/549.91 KiB\r\npygments  ------------------------------ 429.23 KiB/1.17 MiB\r\npydantic-core  ------------------------------ 893.27 KiB/1.91 MiB\r\narray-record  ------------------------------ 840.12 KiB/2.35 MiB\r\njax  ------------------------------ 382.40 KiB/2.70 MiB\r\nnvidia-cuda-runtime-cu12 ------------------------------ 648.34 KiB/3.33 MiB\r\nml-dtypes  ------------------------------ 840.46 KiB/4.70 MiB\r\njax-cuda12-plugin  ------------------------------ 860.63 KiB/5.22 MiB\r\nvirtualenv  ------------------------------ 91.55 KiB/5.71 MiB\r\npillow  ------------------------------ 350.69 KiB/6.34 MiB\r\nnvidia-cuda-cupti-cu12  ------------------------------ 836.81 KiB/10.31 MiB\r\nnumpy  ------------------------------ 776.56 KiB/15.87 MiB\r\ntensorstore  ------------------------------ 825.92 KiB/18.63 MiB\r\nwandb  ------------------------------ 219.17 KiB/18.66 MiB\r\nscipy  ------------------------------ 415.11 KiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 840.56 KiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 760.56 KiB/38.67 MiB\r\njaxlib  ------------------------------ 861.28 KiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 840.56 KiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 801.27 KiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 824.56 KiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 839.50 KiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 760.56 KiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 794.89 KiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 856.56 KiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 823.41 KiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 856.56 KiB/614.37 MiB ",,terminal_output +4603,7885862,"TERMINAL",0,0,"\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r Building jasmine @ file:///hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine\r\n⠹ Preparing packages... (29/83)\r\nnest-asyncio  ------------------------------ 5.07 KiB/5.07 KiB\r\ncfgv  ------------------------------ 7.08 KiB/7.08 KiB\r\nzipp  ------------------------------ 9.55 KiB/10.04 KiB\r\nannotated-types  ------------------------------ 13.32 KiB/13.32 KiB\r\nshtab  ------------------------------ 13.88 KiB/13.88 KiB\r\naiofiles  ------------------------------ 15.52 KiB/15.52 KiB\r\nfilelock  ------------------------------ 0 B/15.61 KiB\r\ntypeguard  ------------------------------ 32.00 KiB/34.06 KiB\r\ntoolz  ------------------------------ 46.88 KiB/55.06 KiB\r\ndm-pix  ------------------------------ 46.92 KiB/55.42 KiB\r\ngitdb  ------------------------------ 46.81 KiB/61.32 KiB\r\nattrs  ------------------------------ 16.00 KiB/62.32 KiB\r\nmarkdown-it-py  ------------------------------ 62.87 KiB/85.27 KiB\r\nhumanize  ------------------------------ 87.79 KiB/125.85 KiB\r\netils  ------------------------------ 142.91 KiB/166.60 KiB\r\npre-commit  ------------------------------ 205.62 KiB/215.79 KiB\r\noptax  ------------------------------ 257.19 KiB/359.16 KiB\r\nsentry-sdk  ------------------------------ 267.72 KiB/361.67 KiB\r\npydantic  ------------------------------ 333.00 KiB/434.43 KiB\r\nflax  ------------------------------ 332.64 KiB/447.36 KiB\r\ngrain  ------------------------------ 175.66 KiB/487.72 KiB\r\norbax-checkpoint  ------------------------------ 238.00 KiB/549.91 KiB\r\npygments  ------------------------------ 429.23 KiB/1.17 MiB\r\npydantic-core  ------------------------------ 909.27 KiB/1.91 MiB\r\narray-record  ------------------------------ 840.12 KiB/2.35 MiB\r\njax  ------------------------------ 382.40 KiB/2.70 MiB\r\nnvidia-cuda-runtime-cu12 ------------------------------ 648.34 KiB/3.33 MiB\r\nml-dtypes  ------------------------------ 840.46 KiB/4.70 MiB\r\njax-cuda12-plugin  ------------------------------ 874.15 KiB/5.22 MiB\r\nvirtualenv  ------------------------------ 91.55 KiB/5.71 MiB\r\npillow  ------------------------------ 910.69 KiB/6.34 MiB\r\nnvidia-cuda-cupti-cu12  ------------------------------ 852.81 KiB/10.31 MiB\r\nnumpy  ------------------------------ 792.56 KiB/15.87 MiB\r\ntensorstore  ------------------------------ 841.92 KiB/18.63 MiB\r\nwandb  ------------------------------ 219.17 KiB/18.66 MiB\r\nscipy  ------------------------------ 415.11 KiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 840.56 KiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 776.56 KiB/38.67 MiB\r\njaxlib  ------------------------------ 877.28 KiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 856.56 KiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 817.27 KiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 840.56 KiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 840.56 KiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 776.56 KiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 810.89 KiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 872.56 KiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 838.35 KiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 872.56 KiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r Building jasmine @ file:///hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine\r\n⠹ Preparing packages... (29/83)\r\nnest-asyncio  ------------------------------ 5.07 KiB/5.07 KiB\r\ncfgv  ------------------------------ 7.08 KiB/7.08 KiB\r\nzipp  ------------------------------ 9.55 KiB/10.04 KiB\r\nannotated-types  ------------------------------ 13.32 KiB/13.32 KiB\r\nshtab  ------------------------------ 13.88 KiB/13.88 KiB\r\naiofiles  ------------------------------ 15.52 KiB/15.52 KiB\r\nfilelock  ------------------------------ 0 B/15.61 KiB\r\ntypeguard  ------------------------------ 32.00 KiB/34.06 KiB\r\ntoolz  ------------------------------ 46.88 KiB/55.06 KiB\r\ndm-pix  ------------------------------ 46.92 KiB/55.42 KiB\r\ngitdb  ------------------------------ 46.81 KiB/61.32 KiB\r\nattrs  ------------------------------ 47.70 KiB/62.32 KiB\r\nmarkdown-it-py  ------------------------------ 62.87 KiB/85.27 KiB\r\nhumanize  ------------------------------ 103.79 KiB/125.85 KiB\r\netils  ------------------------------ 142.91 KiB/166.60 KiB\r\npre-commit  ------------------------------ 205.62 KiB/215.79 KiB\r\noptax  ------------------------------ 257.19 KiB/359.16 KiB\r\nsentry-sdk  ------------------------------ 267.72 KiB/361.67 KiB\r\npydantic  ------------------------------ 349.00 KiB/434.43 KiB\r\nflax  ------------------------------ 332.64 KiB/447.36 KiB\r\ngrain  ------------------------------ 175.66 KiB/487.72 KiB\r\norbax-checkpoint  ------------------------------ 238.00 KiB/549.91 KiB\r\npygments  ------------------------------ 429.23 KiB/1.17 MiB\r\npydantic-core  ------------------------------ 909.27 KiB/1.91 MiB\r\narray-record  ------------------------------ 860.47 KiB/2.35 MiB\r\njax  ------------------------------ 424.56 KiB/2.70 MiB\r\nnvidia-cuda-runtime-cu12 ------------------------------ 648.34 KiB/3.33 MiB\r\nml-dtypes  ------------------------------ 856.46 KiB/4.70 MiB\r\njax-cuda12-plugin  ------------------------------ 874.15 KiB/5.22 MiB\r\nvirtualenv  ------------------------------ 91.55 KiB/5.71 MiB\r\npillow  ------------------------------ 910.69 KiB/6.34 MiB\r\nnvidia-cuda-cupti-cu12  ------------------------------ 852.81 KiB/10.31 MiB\r\nnumpy  ------------------------------ 824.56 KiB/15.87 MiB\r\ntensorstore  ------------------------------ 841.92 KiB/18.63 MiB\r\nwandb  ------------------------------ 219.17 KiB/18.66 MiB\r\nscipy  ------------------------------ 415.11 KiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 888.56 KiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 824.56 KiB/38.67 MiB\r\njaxlib  ------------------------------ 877.28 KiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 856.56 KiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 817.27 KiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 865.76 KiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 888.56 KiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 824.56 KiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 810.89 KiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 904.56 KiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 856.56 KiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 904.56 KiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r Building jasmine @ file:///hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine\r\n⠹ Preparing packages... (29/83)\r\nnest-asyncio  ------------------------------ 5.07 KiB/5.07 KiB\r\ncfgv  ------------------------------ 7.08 KiB/7.08 KiB\r\nzipp  ------------------------------ 9.55 KiB/10.04 KiB\r\nshtab  ------------------------------ 13.88 KiB/13.88 KiB\r\ntyping-inspection  ------------------------------ 14.21 KiB/14.21 KiB\r\naiofiles  ------------------------------ 15.52 KiB/15.52 KiB\r\nfilelock  ------------------------------ 15.61 KiB/15.61 KiB\r\ntypeguard  ------------------------------ 32.00 KiB/34.06 KiB\r\ntoolz  ------------------------------ 46.88 KiB/55.06 KiB\r\ndm-pix  ------------------------------ 46.92 KiB/55.42 KiB\r\ngitdb  ------------------------------ 46.81 KiB/61.32 KiB\r\nattrs  ------------------------------ 47.70 KiB/62.32 KiB\r\nmarkdown-it-py  ------------------------------ 62.87 KiB/85.27 KiB\r\nhumanize  ------------------------------ 103.79 KiB/125.85 KiB\r\netils  ------------------------------ 142.91 KiB/166.60 KiB\r\npre-commit  ------------------------------ 205.62 KiB/215.79 KiB\r\noptax  ------------------------------ 257.19 KiB/359.16 KiB\r\nsentry-sdk  ------------------------------ 267.72 KiB/361.67 KiB\r\npydantic  ------------------------------ 349.00 KiB/434.43 KiB\r\nflax  ------------------------------ 332.64 KiB/447.36 KiB\r\ngrain  ------------------------------ 175.66 KiB/487.72 KiB\r\norbax-checkpoint  ------------------------------ 238.00 KiB/549.91 KiB\r\npygments  ------------------------------ 445.23 KiB/1.17 MiB\r\npydantic-core  ------------------------------ 941.27 KiB/1.91 MiB\r\narray-record  ------------------------------ 860.47 KiB/2.35 MiB\r\njax  ------------------------------ 424.56 KiB/2.70 MiB\r\nnvidia-cuda-runtime-cu12 ------------------------------ 648.34 KiB/3.33 MiB\r\nml-dtypes  ------------------------------ 856.46 KiB/4.70 MiB\r\njax-cuda12-plugin  ------------------------------ 890.15 KiB/5.22 MiB\r\nvirtualenv  ------------------------------ 91.55 KiB/5.71 MiB\r\npillow  ------------------------------ 942.69 KiB/6.34 MiB\r\nnvidia-cuda-cupti-cu12  ------------------------------ 884.81 KiB/10.31 MiB\r\nnumpy  ------------------------------ 824.56 KiB/15.87 MiB\r\ntensorstore  ------------------------------ 846.68 KiB/18.63 MiB\r\nwandb  ------------------------------ 219.17 KiB/18.66 MiB\r\nscipy  ------------------------------ 415.11 KiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 888.56 KiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 824.56 KiB/38.67 MiB\r\njaxlib  ------------------------------ 909.28 KiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 888.56 KiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 846.83 KiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 881.76 KiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 888.56 KiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 824.56 KiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 827.00 KiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 904.56 KiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 856.56 KiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 904.56 KiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r Building jasmine @ file:///hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine\r\n⠹ Preparing packages... (29/83)\r\nnest-asyncio  ------------------------------ 5.07 KiB/5.07 KiB\r\nmdurl  ------------------------------ 0 B/9.75 KiB\r\nzipp  ------------------------------ 9.55 KiB/10.04 KiB\r\nshtab  ------------------------------ 13.88 KiB/13.88 KiB\r\ntyping-inspection  ------------------------------ 14.21 KiB/14.21 KiB\r\naiofiles  ------------------------------ 15.52 KiB/15.52 KiB\r\nfilelock  ------------------------------ 15.61 KiB/15.61 KiB\r\ntypeguard  ------------------------------ 32.00 KiB/34.06 KiB\r\ntoolz  ------------------------------ 46.88 KiB/55.06 KiB\r\ndm-pix  ------------------------------ 46.92 KiB/55.42 KiB\r\ngitdb  ------------------------------ 46.81 KiB/61.32 KiB\r\nattrs  ------------------------------ 47.70 KiB/62.32 KiB\r\nmarkdown-it-py  ------------------------------ 62.87 KiB/85.27 KiB\r\nhumanize  ------------------------------ 103.79 KiB/125.85 KiB\r\netils  ------------------------------ 142.91 KiB/166.60 KiB\r\npre-commit  ------------------------------ 205.62 KiB/215.79 KiB\r\noptax  ------------------------------ 257.19 KiB/359.16 KiB\r\nsentry-sdk  ------------------------------ 267.72 KiB/361.67 KiB\r\npydantic  ------------------------------ 349.00 KiB/434.43 KiB\r\nflax  ------------------------------ 350.92 KiB/447.36 KiB\r\ngrain  ------------------------------ 191.66 KiB/487.72 KiB\r\norbax-checkpoint  ------------------------------ 238.00 KiB/549.91 KiB\r\npygments  ------------------------------ 445.23 KiB/1.17 MiB\r\npydantic-core  ------------------------------ 945.58 KiB/1.91 MiB\r\narray-record  ------------------------------ 876.47 KiB/2.35 MiB\r\njax  ------------------------------ 440.56 KiB/2.70 MiB\r\nnvidia-cuda-runtime-cu12 ------------------------------ 648.34 KiB/3.33 MiB\r\nml-dtypes  ------------------------------ 872.46 KiB/4.70 MiB\r\njax-cuda12-plugin  ------------------------------ 906.15 KiB/5.22 MiB\r\nvirtualenv  ------------------------------ 91.55 KiB/5.71 MiB\r\npillow  ------------------------------ 958.69 KiB/6.34 MiB\r\nnvidia-cuda-cupti-cu12  ------------------------------ 884.81 KiB/10.31 MiB\r\nnumpy  ------------------------------ 840.56 KiB/15.87 MiB\r\ntensorstore  ------------------------------ 846.68 KiB/18.63 MiB\r\nwandb  ------------------------------ 219.17 KiB/18.66 MiB\r\nscipy  ------------------------------ 415.11 KiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 888.56 KiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 824.56 KiB/38.67 MiB\r\njaxlib  ------------------------------ 909.28 KiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 888.56 KiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 846.83 KiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 881.76 KiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 888.56 KiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 824.56 KiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 830.28 KiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 920.56 KiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 872.56 KiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 920.46 KiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r Building jasmine @ file:///hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine\r\n⠹ Preparing packages... (29/83)\r\nnest-asyncio  ------------------------------ 5.07 KiB/5.07 KiB\r\nmdurl  ------------------------------ 0 B/9.75 KiB\r\nzipp  ------------------------------ 9.55 KiB/10.04 KiB\r\nshtab  ------------------------------ 13.88 KiB/13.88 KiB\r\ntyping-inspection  ------------------------------ 14.21 KiB/14.21 KiB\r\naiofiles  ------------------------------ 15.52 KiB/15.52 KiB\r\nfilelock  ------------------------------ 15.61 KiB/15.61 KiB\r\ntypeguard  ------------------------------ 32.00 KiB/34.06 KiB\r\ntoolz  ------------------------------ 46.88 KiB/55.06 KiB\r\ndm-pix  ------------------------------ 46.92 KiB/55.42 KiB\r\ngitdb  ------------------------------ 46.81 KiB/61.32 KiB\r\nattrs  ------------------------------ 47.70 KiB/62.32 KiB\r\nmarkdown-it-py  ------------------------------ 62.87 KiB/85.27 KiB\r\nhumanize  ------------------------------ 103.79 KiB/125.85 KiB\r\netils  ------------------------------ 142.91 KiB/166.60 KiB\r\npre-commit  ------------------------------ 205.62 KiB/215.79 KiB\r\noptax  ------------------------------ 257.19 KiB/359.16 KiB\r\nsentry-sdk  ------------------------------ 267.72 KiB/361.67 KiB\r\npydantic  ------------------------------ 365.00 KiB/434.43 KiB\r\nflax  ------------------------------ 350.92 KiB/447.36 KiB\r\ngrain  ------------------------------ 191.66 KiB/487.72 KiB\r\norbax-checkpoint  ------------------------------ 238.00 KiB/549.91 KiB\r\npygments  ------------------------------ 445.23 KiB/1.17 MiB\r\npydantic-core  ------------------------------ 961.58 KiB/1.91 MiB\r\narray-record  ------------------------------ 908.47 KiB/2.35 MiB\r\njax  ------------------------------ 440.56 KiB/2.70 MiB\r\nnvidia-cuda-runtime-cu12 ------------------------------ 648.34 KiB/3.33 MiB\r\nml-dtypes  ------------------------------ 904.35 KiB/4.70 MiB\r\njax-cuda12-plugin  ------------------------------ 924.63 KiB/5.22 MiB\r\nvirtualenv  ------------------------------ 91.55 KiB/5.71 MiB\r\npillow  ------------------------------ 958.69 KiB/6.34 MiB\r\nnvidia-cuda-cupti-cu12  ------------------------------ 914.21 KiB/10.31 MiB\r\nnumpy  ------------------------------ 840.56 KiB/15.87 MiB\r\ntensorstore  ------------------------------ 878.68 KiB/18.63 MiB\r\nwandb  ------------------------------ 219.17 KiB/18.66 MiB\r\nscipy  ------------------------------ 431.11 KiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 920.56 KiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 856.56 KiB/38.67 MiB\r\njaxlib  ------------------------------ 941.28 KiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 907.57 KiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 878.83 KiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 904.56 KiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 920.56 KiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 841.14 KiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 830.28 KiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 920.56 KiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 888.56 KiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 945.54 KiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r Building jasmine @ file:///hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine\r\n⠹ Preparing packages... (29/83)\r\nmdurl  ------------------------------ 9.75 KiB/9.75 KiB\r\nzipp  ------------------------------ 9.55 KiB/10.04 KiB\r\nshtab  ------------------------------ 13.88 KiB/13.88 KiB\r\ntyping-inspection  ------------------------------ 14.21 KiB/14.21 KiB\r\naiofiles  ------------------------------ 15.52 KiB/15.52 KiB\r\nfilelock  ------------------------------ 15.61 KiB/15.61 KiB\r\ntypeguard  ------------------------------ 32.00 KiB/34.06 KiB\r\ntoolz  ------------------------------ 46.88 KiB/55.06 KiB\r\ndm-pix  ------------------------------ 46.92 KiB/55.42 KiB\r\ngitdb  ------------------------------ 46.81 KiB/61.32 KiB\r\nattrs  ------------------------------ 47.70 KiB/62.32 KiB\r\nmarkdown-it-py  ------------------------------ 62.87 KiB/85.27 KiB\r\nhumanize  ------------------------------ 103.79 KiB/125.85 KiB\r\netils  ------------------------------ 142.91 KiB/166.60 KiB\r\npre-commit  ------------------------------ 215.79 KiB/215.79 KiB\r\noptax  ------------------------------ 269.08 KiB/359.16 KiB\r\nsentry-sdk  ------------------------------ 267.72 KiB/361.67 KiB\r\npydantic  ------------------------------ 381.00 KiB/434.43 KiB\r\nflax  ------------------------------ 350.92 KiB/447.36 KiB\r\ngrain  ------------------------------ 191.66 KiB/487.72 KiB\r\norbax-checkpoint  ------------------------------ 238.00 KiB/549.91 KiB\r\npygments  ------------------------------ 445.23 KiB/1.17 MiB\r\npydantic-core  ------------------------------ 961.58 KiB/1.91 MiB\r\narray-record  ------------------------------ 908.47 KiB/2.35 MiB\r\njax  ------------------------------ 440.56 KiB/2.70 MiB\r\nnvidia-cuda-runtime-cu12 ------------------------------ 648.34 KiB/3.33 MiB\r\nml-dtypes  ------------------------------ 904.35 KiB/4.70 MiB\r\njax-cuda12-plugin  ------------------------------ 924.63 KiB/5.22 MiB\r\nvirtualenv  ------------------------------ 91.55 KiB/5.71 MiB\r\npillow  ------------------------------ 974.69 KiB/6.34 MiB\r\nnvidia-cuda-cupti-cu12  ------------------------------ 914.21 KiB/10.31 MiB\r\nnumpy  ------------------------------ 856.56 KiB/15.87 MiB\r\ntensorstore  ------------------------------ 878.68 KiB/18.63 MiB\r\nwandb  ------------------------------ 219.17 KiB/18.66 MiB\r\nscipy  ------------------------------ 431.11 KiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 920.56 KiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 856.56 KiB/38.67 MiB\r\njaxlib  ------------------------------ 941.28 KiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 907.57 KiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 878.83 KiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 904.56 KiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 920.56 KiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 841.14 KiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 846.17 KiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 936.56 KiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 888.56 KiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 945.54 KiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r Building jasmine @ file:///hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine\r\n⠹ Preparing packages... (29/83)\r\nmdurl  ------------------------------ 9.75 KiB/9.75 KiB\r\nzipp  ------------------------------ 9.55 KiB/10.04 KiB\r\ntyping-inspection  ------------------------------ 14.21 KiB/14.21 KiB\r\naiofiles  ------------------------------ 15.52 KiB/15.52 KiB\r\nfilelock  ------------------------------ 15.61 KiB/15.61 KiB\r\ntypeguard  ------------------------------ 32.00 KiB/34.06 KiB\r\ntoolz  ------------------------------ 55.06 KiB/55.06 KiB\r\ndm-pix  ------------------------------ 46.92 KiB/55.42 KiB\r\ngitdb  ------------------------------ 61.32 KiB/61.32 KiB\r\nattrs  ------------------------------ 47.70 KiB/62.32 KiB\r\nmarkdown-it-py  ------------------------------ 62.87 KiB/85.27 KiB\r\nhumanize  ------------------------------ 103.79 KiB/125.85 KiB\r\netils  ------------------------------ 142.91 KiB/166.60 KiB\r\npre-commit  ------------------------------ 215.79 KiB/215.79 KiB\r\noptax  ------------------------------ 269.08 KiB/359.16 KiB\r\nsentry-sdk  ------------------------------ 267.72 KiB/361.67 KiB\r\npydantic  ------------------------------ 381.00 KiB/434.43 KiB\r\nflax  ------------------------------ 350.92 KiB/447.36 KiB\r\ngrain  ------------------------------ 191.66 KiB/487.72 KiB\r\norbax-checkpoint  ------------------------------ 254.00 KiB/549.91 KiB\r\npygments  ------------------------------ 445.23 KiB/1.17 MiB\r\npydantic-core  ------------------------------ 961.58 KiB/1.91 MiB\r\narray-record  ------------------------------ 908.47 KiB/2.35 MiB\r\njax  ------------------------------ 440.56 KiB/2.70 MiB\r\nnvidia-cuda-runtime-cu12 ------------------------------ 648.56 KiB/3.33 MiB\r\nml-dtypes  ------------------------------ 904.35 KiB/4.70 MiB\r\njax-cuda12-plugin  ------------------------------ 924.63 KiB/5.22 MiB\r\nvirtualenv  ------------------------------ 91.55 KiB/5.71 MiB\r\npillow  ------------------------------ 974.69 KiB/6.34 MiB\r\nnvidia-cuda-cupti-cu12  ------------------------------ 914.21 KiB/10.31 MiB\r\nnumpy  ------------------------------ 856.56 KiB/15.87 MiB\r\ntensorstore  ------------------------------ 878.68 KiB/18.63 MiB\r\nwandb  ------------------------------ 219.17 KiB/18.66 MiB\r\nscipy  ------------------------------ 431.11 KiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 920.56 KiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 856.56 KiB/38.67 MiB\r\njaxlib  ------------------------------ 941.28 KiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 907.57 KiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 878.83 KiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 904.56 KiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 920.56 KiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 841.14 KiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 846.17 KiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 936.56 KiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 888.56 KiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 945.54 KiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r Building jasmine @ file:///hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine\r\n⠹ Preparing packages... (29/83)\r\nmdurl  ------------------------------ 9.75 KiB/9.75 KiB\r\nzipp  ------------------------------ 9.55 KiB/10.04 KiB\r\ntyping-inspection  ------------------------------ 14.21 KiB/14.21 KiB\r\naiofiles  ------------------------------ 15.52 KiB/15.52 KiB\r\nfilelock  ------------------------------ 15.61 KiB/15.61 KiB\r\ntypeguard  ------------------------------ 32.00 KiB/34.06 KiB\r\ntoolz  ------------------------------ 55.06 KiB/55.06 KiB\r\ndm-pix  ------------------------------ 46.92 KiB/55.42 KiB\r\ngitdb  ------------------------------ 61.32 KiB/61.32 KiB\r\nattrs  ------------------------------ 47.70 KiB/62.32 KiB\r\nmarkdown-it-py  ------------------------------ 62.87 KiB/85.27 KiB\r\nhumanize  ------------------------------ 103.79 KiB/125.85 KiB\r\netils  ------------------------------ 142.91 KiB/166.60 KiB\r\npre-commit  ------------------------------ 215.79 KiB/215.79 KiB\r\noptax  ------------------------------ 269.08 KiB/359.16 KiB\r\nsentry-sdk  ------------------------------ 267.72 KiB/361.67 KiB\r\npydantic  ------------------------------ 381.00 KiB/434.43 KiB\r\nflax  ------------------------------ 350.92 KiB/447.36 KiB\r\ngrain  ------------------------------ 191.66 KiB/487.72 KiB\r\norbax-checkpoint  ------------------------------ 254.00 KiB/549.91 KiB\r\npygments  ------------------------------ 445.23 KiB/1.17 MiB\r\npydantic-core  ------------------------------ 961.58 KiB/1.91 MiB\r\narray-record  ------------------------------ 908.47 KiB/2.35 MiB\r\njax  ------------------------------ 456.56 KiB/2.70 MiB\r\nnvidia-cuda-runtime-cu12 ------------------------------ 664.56 KiB/3.33 MiB\r\nml-dtypes  ------------------------------ 904.35 KiB/4.70 MiB\r\njax-cuda12-plugin  ------------------------------ 924.63 KiB/5.22 MiB\r\nvirtualenv  ------------------------------ 91.55 KiB/5.71 MiB\r\npillow  ------------------------------ 974.69 KiB/6.34 MiB\r\nnvidia-cuda-cupti-cu12  ------------------------------ 914.21 KiB/10.31 MiB\r\nnumpy  ------------------------------ 856.56 KiB/15.87 MiB\r\ntensorstore  ------------------------------ 878.68 KiB/18.63 MiB\r\nwandb  ------------------------------ 219.17 KiB/18.66 MiB\r\nscipy  ------------------------------ 431.11 KiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 920.56 KiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 856.56 KiB/38.67 MiB\r\njaxlib  ------------------------------ 941.28 KiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 907.57 KiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 878.83 KiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 904.56 KiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 920.56 KiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 841.14 KiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 846.17 KiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 936.56 KiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 888.56 KiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 945.54 KiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r Building jasmine @ file:///hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine\r\n⠸ Preparing packages... (34/83)\r\nmdurl  ------------------------------ 9.75 KiB/9.75 KiB\r\nzipp  ------------------------------ 9.55 KiB/10.04 KiB\r\ntyping-inspection  ------------------------------ 14.21 KiB/14.21 KiB\r\naiofiles  ------------------------------ 15.52 KiB/15.52 KiB\r\nfilelock  ------------------------------ 15.61 KiB/15.61 KiB\r\ntypeguard  ------------------------------ 32.00 KiB/34.06 KiB\r\ntoolz  ------------------------------ 55.06 KiB/55.06 KiB\r\ndm-pix  ------------------------------ 46.92 KiB/55.42 KiB\r\ngitdb  ------------------------------ 61.32 KiB/61.32 KiB\r\nattrs  ------------------------------ 47.70 KiB/62.32 KiB\r\nmarkdown-it-py  ------------------------------ 62.87 KiB/85.27 KiB\r\nhumanize  ------------------------------ 103.79 KiB/125.85 KiB\r\netils  ------------------------------ 142.91 KiB/166.60 KiB\r\noptax  ------------------------------ 269.08 KiB/359.16 KiB\r\nsentry-sdk  ------------------------------ 267.72 KiB/361.67 KiB\r\npydantic  ------------------------------ 381.00 KiB/434.43 KiB\r\nflax  ------------------------------ 350.92 KiB/447.36 KiB\r\ngrain  ------------------------------ 191.66 KiB/487.72 KiB\r\norbax-checkpoint  ------------------------------ 254.00 KiB/549.91 KiB\r\npygments  ------------------------------ 461.23 KiB/1.17 MiB\r\npydantic-core  ------------------------------ 993.58 KiB/1.91 MiB\r\narray-record  ------------------------------ 940.47 KiB/2.35 MiB\r\njax  ------------------------------ 483.93 KiB/2.70 MiB\r\nnvidia-cuda-runtime-cu12 ------------------------------ 664.56 KiB/3.33 MiB\r\nml-dtypes  ------------------------------ 918.43 KiB/4.70 MiB\r\njax-cuda12-plugin  ------------------------------ 936.71 KiB/5.22 MiB\r\nvirtualenv  ------------------------------ 91.55 KiB/5.71 MiB\r\npillow  ------------------------------ 988.17 KiB/6.34 MiB\r\nnvidia-cuda-cupti-cu12  ------------------------------ 946.21 KiB/10.31 MiB\r\nnumpy  ------------------------------ 888.56 KiB/15.87 MiB\r\ntensorstore  ------------------------------ 908.36 KiB/18.63 MiB\r\nwandb  ------------------------------ 219.17 KiB/18.66 MiB\r\nscipy  ------------------------------ 447.11 KiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 952.56 KiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 888.56 KiB/38.67 MiB\r\njaxlib  ------------------------------ 973.28 KiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 939.57 KiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 910.83 KiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 936.56 KiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 952.56 KiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 872.82 KiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 878.17 KiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 963.56 KiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 920.56 KiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 963.56 KiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r Building jasmine @ file:///hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine\r\n⠸ Preparing packages... (34/83)\r\nmdurl  ------------------------------ 9.75 KiB/9.75 KiB\r\nzipp  ------------------------------ 9.55 KiB/10.04 KiB\r\ntyping-inspection  ------------------------------ 14.21 KiB/14.21 KiB\r\naiofiles  ------------------------------ 15.52 KiB/15.52 KiB\r\nfilelock  ------------------------------ 15.61 KiB/15.61 KiB\r\ntypeguard  ------------------------------ 32.00 KiB/34.06 KiB\r\ntoolz  ------------------------------ 55.06 KiB/55.06 KiB\r\ndm-pix  ------------------------------ 55.42 KiB/55.42 KiB\r\ngitdb  ------------------------------ 61.32 KiB/61.32 KiB\r\nattrs  ------------------------------ 47.70 KiB/62.32 KiB\r\nmarkdown-it-py  ------------------------------ 62.87 KiB/85.27 KiB\r\nhumanize  ------------------------------ 103.79 KiB/125.85 KiB\r\netils  ------------------------------ 142.91 KiB/166.60 KiB\r\noptax  ------------------------------ 269.08 KiB/359.16 KiB\r\nsentry-sdk  ------------------------------ 271.83 KiB/361.67 KiB\r\npydantic  ------------------------------ 397.00 KiB/434.43 KiB\r\nflax  ------------------------------ 350.92 KiB/447.36 KiB\r\ngrain  ------------------------------ 207.66 KiB/487.72 KiB\r\norbax-checkpoint  ------------------------------ 254.00 KiB/549.91 KiB\r\npygments  ------------------------------ 461.23 KiB/1.17 MiB\r\npydantic-core  ------------------------------ 1021.27 KiB/1.91 MiB\r\narray-record  ------------------------------ 972.47 KiB/2.35 MiB\r\njax  ------------------------------ 483.93 KiB/2.70 MiB\r\nnvidia-cuda-runtime-cu12 ------------------------------ 664.56 KiB/3.33 MiB\r\nml-dtypes  ------------------------------ 950.43 KiB/4.70 MiB\r\njax-cuda12-plugin  ------------------------------ 968.71 KiB/5.22 MiB\r\nvirtualenv  ------------------------------ 107.55 KiB/5.71 MiB\r\npillow  ------------------------------ 1006.69 KiB/6.34 MiB\r\nnvidia-cuda-cupti-cu12  ------------------------------ 956.71 KiB/10.31 MiB\r\nnumpy  ------------------------------ 936.56 KiB/15.87 MiB\r\ntensorstore  ------------------------------ 940.36 KiB/18.63 MiB\r\nwandb  ------------------------------ 235.17 KiB/18.66 MiB\r\nscipy  ------------------------------ 447.11 KiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 979.56 KiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 909.70 KiB/38.67 MiB\r\njaxlib  ------------------------------ 1005.28 KiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 963.56 KiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 940.12 KiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 963.56 KiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 979.56 KiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 904.56 KiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 904.37 KiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 995.86 KiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 963.56 KiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 989.45 KiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r Building jasmine @ file:///hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine\r\n⠸ Preparing packages... (34/83)\r\nmdurl  ------------------------------ 9.75 KiB/9.75 KiB\r\ntyping-inspection  ------------------------------ 14.21 KiB/14.21 KiB\r\naiofiles  ------------------------------ 15.52 KiB/15.52 KiB\r\nfilelock  ------------------------------ 15.61 KiB/15.61 KiB\r\ntypeguard  ------------------------------ 32.00 KiB/34.06 KiB\r\ntoolz  ------------------------------ 55.06 KiB/55.06 KiB\r\ndm-pix  ------------------------------ 55.42 KiB/55.42 KiB\r\ngitdb  ------------------------------ 61.32 KiB/61.32 KiB\r\nattrs  ------------------------------ 47.70 KiB/62.32 KiB\r\nmarkdown-it-py  ------------------------------ 62.87 KiB/85.27 KiB\r\nhumanize  ------------------------------ 103.79 KiB/125.85 KiB\r\netils  ------------------------------ 142.91 KiB/166.60 KiB\r\noptax  ------------------------------ 269.08 KiB/359.16 KiB\r\nsentry-sdk  ------------------------------ 271.83 KiB/361.67 KiB\r\npydantic  ------------------------------ 397.00 KiB/434.43 KiB\r\nflax  ------------------------------ 350.92 KiB/447.36 KiB\r\ngrain  ------------------------------ 207.66 KiB/487.72 KiB\r\norbax-checkpoint  ------------------------------ 254.00 KiB/549.91 KiB\r\npygments  ------------------------------ 461.23 KiB/1.17 MiB\r\npydantic-core  ------------------------------ 1.01 MiB/1.91 MiB\r\narray-record  ------------------------------ 972.47 KiB/2.35 MiB\r\njax  ------------------------------ 483.93 KiB/2.70 MiB\r\nnvidia-cuda-runtime-cu12 ------------------------------ 664.56 KiB/3.33 MiB\r\nml-dtypes  ------------------------------ 950.43 KiB/4.70 MiB\r\njax-cuda12-plugin  ------------------------------ 972.63 KiB/5.22 MiB\r\nvirtualenv  ------------------------------ 107.55 KiB/5.71 MiB\r\npillow  ------------------------------ 1022.69 KiB/6.34 MiB\r\nnvidia-cuda-cupti-cu12  ------------------------------ 956.71 KiB/10.31 MiB\r\nnumpy  ------------------------------ 936.56 KiB/15.87 MiB\r\ntensorstore  ------------------------------ 956.36 KiB/18.63 MiB\r\nwandb  ------------------------------ 235.17 KiB/18.66 MiB\r\nscipy  ------------------------------ 447.11 KiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 979.56 KiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 909.70 KiB/38.67 MiB\r\njaxlib  ------------------------------ 1021.28 KiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 963.56 KiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 956.12 KiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 963.56 KiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 979.56 KiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 904.56 KiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 920.37 KiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 995.86 KiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 963.56 KiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 989.45 KiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r Building jasmine @ file:///hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine\r\n⠸ Preparing packages... (34/83)\r\nmdurl  ------------------------------ 9.75 KiB/9.75 KiB\r\ntyping-inspection  ------------------------------ 14.21 KiB/14.21 KiB\r\nfilelock  ------------------------------ 15.61 KiB/15.61 KiB\r\ntypeguard  ------------------------------ 32.00 KiB/34.06 KiB\r\ntoolz  ------------------------------ 55.06 KiB/55.06 KiB\r\ndm-pix  ------------------------------ 55.42 KiB/55.42 KiB\r\ngitdb  ------------------------------ 61.32 KiB/61.32 KiB\r\nattrs  ------------------------------ 47.70 KiB/62.32 KiB\r\nmarkdown-it-py  ------------------------------ 62.87 KiB/85.27 KiB\r\nhumanize  ------------------------------ 103.79 KiB/125.85 KiB\r\netils  ------------------------------ 142.91 KiB/166.60 KiB\r\noptax  ------------------------------ 269.08 KiB/359.16 KiB\r\nsentry-sdk  ------------------------------ 271.83 KiB/361.67 KiB\r\npydantic  ------------------------------ 397.00 KiB/434.43 KiB\r\nflax  ------------------------------ 350.92 KiB/447.36 KiB\r\ngrain  ------------------------------ 207.66 KiB/487.72 KiB\r\norbax-checkpoint  ------------------------------ 254.00 KiB/549.91 KiB\r\npygments  ------------------------------ 461.23 KiB/1.17 MiB\r\npydantic-core  ------------------------------ 1.01 MiB/1.91 MiB\r\narray-record  ------------------------------ 972.47 KiB/2.35 MiB\r\njax  ------------------------------ 483.93 KiB/2.70 MiB\r\nnvidia-cuda-runtime-cu12 ------------------------------ 664.56 KiB/3.33 MiB\r\nml-dtypes  ------------------------------ 950.43 KiB/4.70 MiB\r\njax-cuda12-plugin  ------------------------------ 972.63 KiB/5.22 MiB\r\nvirtualenv  ------------------------------ 107.55 KiB/5.71 MiB\r\npillow  ------------------------------ 1022.69 KiB/6.34 MiB\r\nnvidia-cuda-cupti-cu12  ------------------------------ 956.71 KiB/10.31 MiB\r\nnumpy  ------------------------------ 936.56 KiB/15.87 MiB\r\ntensorstore  ------------------------------ 956.36 KiB/18.63 MiB\r\nwandb  ------------------------------ 235.17 KiB/18.66 MiB\r\nscipy  ------------------------------ 447.11 KiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 979.56 KiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 909.70 KiB/38.67 MiB\r\njaxlib  ------------------------------ 1021.28 KiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 963.56 KiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 956.12 KiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 963.56 KiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 979.56 KiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 904.56 KiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 920.37 KiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 995.86 KiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 963.56 KiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 989.45 KiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r Building jasmine @ file:///hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine\r\n⠸ Preparing packages... (34/83)\r\nmdurl  ------------------------------ 9.75 KiB/9.75 KiB\r\ntyping-inspection  ------------------------------ 14.21 KiB/14.21 KiB\r\nfilelock  ------------------------------ 15.61 KiB/15.61 KiB\r\ntypeguard  ------------------------------ 32.00 KiB/34.06 KiB\r\ntoolz  ------------------------------ 55.06 KiB/55.06 KiB\r\ndm-pix  ------------------------------ 55.42 KiB/55.42 KiB\r\ngitdb  ------------------------------ 61.32 KiB/61.32 KiB\r\nattrs  ------------------------------ 47.70 KiB/62.32 KiB\r\nmarkdown-it-py  ------------------------------ 62.87 KiB/85.27 KiB\r\nhumanize  ------------------------------ 103.79 KiB/125.85 KiB\r\netils  ------------------------------ 142.91 KiB/166.60 KiB\r\noptax  ------------------------------ 269.08 KiB/359.16 KiB\r\nsentry-sdk  ------------------------------ 271.83 KiB/361.67 KiB\r\npydantic  ------------------------------ 397.00 KiB/434.43 KiB\r\nflax  ------------------------------ 350.92 KiB/447.36 KiB\r\ngrain  ------------------------------ 207.66 KiB/487.72 KiB\r\norbax-checkpoint  ------------------------------ 254.00 KiB/549.91 KiB\r\npygments  ------------------------------ 461.23 KiB/1.17 MiB\r\npydantic-core  ------------------------------ 1.01 MiB/1.91 MiB\r\narray-record  ------------------------------ 972.47 KiB/2.35 MiB\r\njax  ------------------------------ 499.93 KiB/2.70 MiB\r\nnvidia-cuda-runtime-cu12 ------------------------------ 664.56 KiB/3.33 MiB\r\nml-dtypes  ------------------------------ 950.43 KiB/4.70 MiB\r\njax-cuda12-plugin  ------------------------------ 972.63 KiB/5.22 MiB\r\nvirtualenv  ------------------------------ 107.55 KiB/5.71 MiB\r\npillow  ------------------------------ 1022.69 KiB/6.34 MiB\r\nnvidia-cuda-cupti-cu12  ------------------------------ 972.71 KiB/10.31 MiB\r\nnumpy  ------------------------------ 936.56 KiB/15.87 MiB\r\ntensorstore  ------------------------------ 956.36 KiB/18.63 MiB\r\nwandb  ------------------------------ 235.17 KiB/18.66 MiB\r\nscipy  ------------------------------ 463.11 KiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 979.56 KiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 909.70 KiB/38.67 MiB\r\njaxlib  ------------------------------ 1021.28 KiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 963.56 KiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 956.12 KiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 963.56 KiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 979.56 KiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 904.56 KiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 920.37 KiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 995.86 KiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 963.56 KiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 989.45 KiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r Building jasmine @ file:///hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine\r\n⠸ Preparing packages... (34/83)\r\nmdurl  ------------------------------ 9.75 KiB/9.75 KiB\r\ntyping-inspection  ------------------------------ 14.21 KiB/14.21 KiB\r\nfilelock  ------------------------------ 15.61 KiB/15.61 KiB\r\ntypeguard  ------------------------------ 32.00 KiB/34.06 KiB\r\ntoolz  ------------------------------ 55.06 KiB/55.06 KiB\r\ngitdb  ------------------------------ 61.32 KiB/61.32 KiB\r\nattrs  ------------------------------ 47.70 KiB/62.32 KiB\r\nmarkdown-it-py  ------------------------------ 62.87 KiB/85.27 KiB\r\nhumanize  ------------------------------ 103.79 KiB/125.85 KiB\r\netils  ------------------------------ 158.91 KiB/166.60 KiB\r\noptax  ------------------------------ 269.08 KiB/359.16 KiB\r\nsentry-sdk  ------------------------------ 271.83 KiB/361.67 KiB\r\npydantic  ------------------------------ 397.00 KiB/434.43 KiB\r\nflax  ------------------------------ 350.92 KiB/447.36 KiB\r\ngrain  ------------------------------ 207.66 KiB/487.72 KiB\r\norbax-checkpoint  ------------------------------ 254.00 KiB/549.91 KiB\r\npygments  ------------------------------ 461.23 KiB/1.17 MiB\r\npydantic-core  ------------------------------ 1.03 MiB/1.91 MiB\r\narray-record  ------------------------------ 988.47 KiB/2.35 MiB\r\njax  ------------------------------ 499.93 KiB/2.70 MiB\r\nnvidia-cuda-runtime-cu12 ------------------------------ 664.56 KiB/3.33 MiB\r\nml-dtypes  ------------------------------ 963.56 KiB/4.70 MiB\r\njax-cuda12-plugin  ------------------------------ 988.63 KiB/5.22 MiB\r\nvirtualenv  ------------------------------ 815.55 KiB/5.71 MiB\r\npillow  ------------------------------ 1.01 MiB/6.34 MiB\r\nnvidia-cuda-cupti-cu12  ------------------------------ 988.71 KiB/10.31 MiB\r\nnumpy  ------------------------------ 952.56 KiB/15.87 MiB\r\ntensorstore  ------------------------------ 972.36 KiB/18.63 MiB\r\nwandb  ------------------------------ 235.17 KiB/18.66 MiB\r\nscipy  ------------------------------ 463.11 KiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 995.56 KiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 912.46 KiB/38.67 MiB\r\njaxlib  ------------------------------ 1.01 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 995.56 KiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 972.12 KiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 979.56 KiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 995.56 KiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 920.56 KiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 936.37 KiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 1011.86 KiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 979.56 KiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 1005.45 KiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r Building jasmine @ file:///hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine\r\n⠸ Preparing packages... (34/83)\r\nmdurl  ------------------------------ 9.75 KiB/9.75 KiB\r\nfilelock  ------------------------------ 15.61 KiB/15.61 KiB\r\ntypeguard  ------------------------------ 32.00 KiB/34.06 KiB\r\ntoolz  ------------------------------ 55.06 KiB/55.06 KiB\r\ngitdb  ------------------------------ 61.32 KiB/61.32 KiB\r\nattrs  ------------------------------ 47.70 KiB/62.32 KiB\r\nmarkdown-it-py  ------------------------------ 62.87 KiB/85.27 KiB\r\nhumanize  ------------------------------ 103.79 KiB/125.85 KiB\r\netils  ------------------------------ 158.91 KiB/166.60 KiB\r\noptax  ------------------------------ 269.08 KiB/359.16 KiB\r\nsentry-sdk  ------------------------------ 271.83 KiB/361.67 KiB\r\npydantic  ------------------------------ 397.00 KiB/434.43 KiB\r\nflax  ------------------------------ 366.92 KiB/447.36 KiB\r\ngrain  ------------------------------ 207.66 KiB/487.72 KiB\r\norbax-checkpoint  ------------------------------ 270.00 KiB/549.91 KiB\r\npygments  ------------------------------ 477.23 KiB/1.17 MiB\r\npydantic-core  ------------------------------ 1.03 MiB/1.91 MiB\r\narray-record  ------------------------------ 988.47 KiB/2.35 MiB\r\njax  ------------------------------ 499.93 KiB/2.70 MiB\r\nnvidia-cuda-runtime-cu12 ------------------------------ 664.56 KiB/3.33 MiB\r\nml-dtypes  ------------------------------ 963.56 KiB/4.70 MiB\r\njax-cuda12-plugin  ------------------------------ 988.63 KiB/5.22 MiB\r\nvirtualenv  ------------------------------ 815.55 KiB/5.71 MiB\r\npillow  ------------------------------ 1.01 MiB/6.34 MiB\r\nnvidia-cuda-cupti-cu12  ------------------------------ 988.71 KiB/10.31 MiB\r\nnumpy  ------------------------------ 952.56 KiB/15.87 MiB\r\ntensorstore  ------------------------------ 972.36 KiB/18.63 MiB\r\nwandb  ------------------------------ 235.17 KiB/18.66 MiB\r\nscipy  ------------------------------ 463.11 KiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 995.56 KiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 912.46 KiB/38.67 MiB\r\njaxlib  ------------------------------ 1.01 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 995.56 KiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 972.12 KiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 979.56 KiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 995.56 KiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 920.56 KiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 936.37 KiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 1011.86 KiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 979.56 KiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 1005.45 KiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r Building jasmine @ file:///hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine\r\n⠸ Preparing packages... (34/83)\r\nmdurl  ------------------------------ 9.75 KiB/9.75 KiB\r\nfilelock  ------------------------------ 15.61 KiB/15.61 KiB\r\ntoolz  ------------------------------ 55.06 KiB/55.06 KiB\r\ngitdb  ------------------------------ 61.32 KiB/61.32 KiB\r\nattrs  ------------------------------ 47.70 KiB/62.32 KiB\r\nmarkdown-it-py  ------------------------------ 62.87 KiB/85.27 KiB\r\nhumanize  ------------------------------ 103.79 KiB/125.85 KiB\r\netils  ------------------------------ 158.91 KiB/166.60 KiB\r\noptax  ------------------------------ 269.08 KiB/359.16 KiB\r\nsentry-sdk  ------------------------------ 282.28 KiB/361.67 KiB\r\npydantic  ------------------------------ 413.00 KiB/434.43 KiB\r\nflax  ------------------------------ 366.92 KiB/447.36 KiB\r\ngrain  ------------------------------ 207.66 KiB/487.72 KiB\r\norbax-checkpoint  ------------------------------ 270.00 KiB/549.91 KiB\r\npygments  ------------------------------ 477.23 KiB/1.17 MiB\r\npydantic-core  ------------------------------ 1.06 MiB/1.91 MiB\r\narray-record  ------------------------------ 1006.03 KiB/2.35 MiB\r\njax  ------------------------------ 515.93 KiB/2.70 MiB\r\nnvidia-cuda-runtime-cu12 ------------------------------ 664.56 KiB/3.33 MiB\r\nml-dtypes  ------------------------------ 995.56 KiB/4.70 MiB\r\njax-cuda12-plugin  ------------------------------ 1008.24 KiB/5.22 MiB\r\nvirtualenv  ------------------------------ 1005.49 KiB/5.71 MiB\r\npillow  ------------------------------ 1.04 MiB/6.34 MiB\r\nnvidia-cuda-cupti-cu12  ------------------------------ 1012.81 KiB/10.31 MiB\r\nnumpy  ------------------------------ 979.56 KiB/15.87 MiB\r\ntensorstore  ------------------------------ 1004.36 KiB/18.63 MiB\r\nwandb  ------------------------------ 235.17 KiB/18.66 MiB\r\nscipy  ------------------------------ 463.11 KiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 1.00 MiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 944.46 KiB/38.67 MiB\r\njaxlib  ------------------------------ 1.04 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 1019.34 KiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 990.83 KiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 1011.56 KiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 1.00 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 952.46 KiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 955.00 KiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 1.02 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 1011.56 KiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 1.00 MiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r Building jasmine @ file:///hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine\r\n⠸ Preparing packages... (34/83)\r\nmdurl  ------------------------------ 9.75 KiB/9.75 KiB\r\nfilelock  ------------------------------ 15.61 KiB/15.61 KiB\r\ntoolz  ------------------------------ 55.06 KiB/55.06 KiB\r\ngitdb  ------------------------------ 61.32 KiB/61.32 KiB\r\nattrs  ------------------------------ 62.32 KiB/62.32 KiB\r\nmarkdown-it-py  ------------------------------ 62.87 KiB/85.27 KiB\r\nhumanize  ------------------------------ 103.79 KiB/125.85 KiB\r\netils  ------------------------------ 158.91 KiB/166.60 KiB\r\noptax  ------------------------------ 269.08 KiB/359.16 KiB\r\nsentry-sdk  ------------------------------ 282.28 KiB/361.67 KiB\r\npydantic  ------------------------------ 413.00 KiB/434.43 KiB\r\nflax  ------------------------------ 366.92 KiB/447.36 KiB\r\ngrain  ------------------------------ 207.66 KiB/487.72 KiB\r\norbax-checkpoint  ------------------------------ 270.00 KiB/549.91 KiB\r\npygments  ------------------------------ 477.23 KiB/1.17 MiB\r\npydantic-core  ------------------------------ 1.06 MiB/1.91 MiB\r\narray-record  ------------------------------ 1006.03 KiB/2.35 MiB\r\njax  ------------------------------ 515.93 KiB/2.70 MiB\r\nnvidia-cuda-runtime-cu12 ------------------------------ 664.56 KiB/3.33 MiB\r\nml-dtypes  ------------------------------ 995.56 KiB/4.70 MiB\r\njax-cuda12-plugin  ------------------------------ 1008.24 KiB/5.22 MiB\r\nvirtualenv  ------------------------------ 1005.49 KiB/5.71 MiB\r\npillow  ------------------------------ 1.04 MiB/6.34 MiB\r\nnvidia-cuda-cupti-cu12  ------------------------------ 1012.81 KiB/10.31 MiB\r\nnumpy  ------------------------------ 979.56 KiB/15.87 MiB\r\ntensorstore  ------------------------------ 1004.36 KiB/18.63 MiB\r\nwandb  ------------------------------ 235.17 KiB/18.66 MiB\r\nscipy  ------------------------------ 463.11 KiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 1.00 MiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 944.46 KiB/38.67 MiB\r\njaxlib  ------------------------------ 1.04 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 1019.34 KiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 990.83 KiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 1011.56 KiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 1.00 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 952.46 KiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 955.00 KiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 1.02 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 1011.56 KiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 1.00 MiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r Building jasmine @ file:///hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine\r\n⠸ Preparing packages... (34/83)\r\nmdurl  ------------------------------ 9.75 KiB/9.75 KiB\r\nfilelock  ------------------------------ 15.61 KiB/15.61 KiB\r\ngitdb  ------------------------------ 61.32 KiB/61.32 KiB\r\nattrs  ------------------------------ 62.32 KiB/62.32 KiB\r\nmarkdown-it-py  ------------------------------ 62.87 KiB/85.27 KiB\r\nhumanize  ------------------------------ 103.79 KiB/125.85 KiB\r\netils  ------------------------------ 158.91 KiB/166.60 KiB\r\noptax  ------------------------------ 285.08 KiB/359.16 KiB\r\nsentry-sdk  ------------------------------ 282.28 KiB/361.67 KiB\r\npydantic  ------------------------------ 413.00 KiB/434.43 KiB\r\nflax  ------------------------------ 366.92 KiB/447.36 KiB\r\ngrain  ------------------------------ 207.66 KiB/487.72 KiB\r\norbax-checkpoint  ------------------------------ 292.98 KiB/549.91 KiB\r\npygments  ------------------------------ 477.23 KiB/1.17 MiB\r\npydantic-core  ------------------------------ 1.11 MiB/1.91 MiB\r\narray-record  ------------------------------ 1.04 MiB/2.35 MiB\r\njax  ------------------------------ 515.93 KiB/2.70 MiB\r\nnvidia-cuda-runtime-cu12 ------------------------------ 680.56 KiB/3.33 MiB\r\nml-dtypes  ------------------------------ 1.00 MiB/4.70 MiB\r\njax-cuda12-plugin  ------------------------------ 1.03 MiB/5.22 MiB\r\nvirtualenv  ------------------------------ 1.03 MiB/5.71 MiB\r\npillow  ------------------------------ 1.10 MiB/6.34 MiB\r\nnvidia-cuda-cupti-cu12  ------------------------------ 1.03 MiB/10.31 MiB\r\nnumpy  ------------------------------ 1.00 MiB/15.87 MiB\r\ntensorstore  ------------------------------ 1.02 MiB/18.63 MiB\r\nwandb  ------------------------------ 235.17 KiB/18.66 MiB\r\nscipy  ------------------------------ 463.11 KiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 1.07 MiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 995.56 KiB/38.67 MiB\r\njaxlib  ------------------------------ 1.08 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 1.03 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 1.01 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 1.03 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 1.07 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 1011.56 KiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 1019.00 KiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 1.07 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 1.04 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 1.07 MiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r Building jasmine @ file:///hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine\r\n⠼ Preparing packages... (43/83)\r\nmdurl  ------------------------------ 9.75 KiB/9.75 KiB\r\nfilelock  ------------------------------ 15.61 KiB/15.61 KiB\r\ngitdb  ------------------------------ 61.32 KiB/61.32 KiB\r\nattrs  ------------------------------ 62.32 KiB/62.32 KiB\r\nmarkdown-it-py  ------------------------------ 62.87 KiB/85.27 KiB\r\nhumanize  ------------------------------ 119.79 KiB/125.85 KiB\r\netils  ------------------------------ 158.91 KiB/166.60 KiB\r\noptax  ------------------------------ 285.08 KiB/359.16 KiB\r\nsentry-sdk  ------------------------------ 298.28 KiB/361.67 KiB\r\npydantic  ------------------------------ 429.00 KiB/434.43 KiB\r\nflax  ------------------------------ 382.92 KiB/447.36 KiB\r\ngrain  ------------------------------ 207.66 KiB/487.72 KiB\r\norbax-checkpoint  ------------------------------ 292.98 KiB/549.91 KiB\r\npygments  ------------------------------ 477.23 KiB/1.17 MiB\r\npydantic-core  ------------------------------ 1.12 MiB/1.91 MiB\r\narray-record  ------------------------------ 1.06 MiB/2.35 MiB\r\njax  ------------------------------ 536.56 KiB/2.70 MiB\r\nnvidia-cuda-runtime-cu12 ------------------------------ 680.56 KiB/3.33 MiB\r\nml-dtypes  ------------------------------ 1.03 MiB/4.70 MiB\r\njax-cuda12-plugin  ------------------------------ 1.06 MiB/5.22 MiB\r\nvirtualenv  ------------------------------ 1.04 MiB/5.71 MiB\r\npillow  ------------------------------ 1.12 MiB/6.34 MiB\r\nnvidia-cuda-cupti-cu12  ------------------------------ 1.07 MiB/10.31 MiB\r\nnumpy  ------------------------------ 1.03 MiB/15.87 MiB\r\ntensorstore  ------------------------------ 1.03 MiB/18.63 MiB\r\nwandb  ------------------------------ 235.17 KiB/18.66 MiB\r\nscipy  ------------------------------ 463.11 KiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 1.08 MiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 1011.56 KiB/38.67 MiB\r\njaxlib  ------------------------------ 1.11 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 1.05 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 1.05 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 1.05 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 1.07 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 1.00 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 1.01 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 1.08 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 1.06 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 1.08 MiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r Building jasmine @ file:///hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine\r\n⠼ Preparing packages... (43/83)\r\nfilelock  ------------------------------ 15.61 KiB/15.61 KiB\r\ngitdb  ------------------------------ 61.32 KiB/61.32 KiB\r\nattrs  ------------------------------ 62.32 KiB/62.32 KiB\r\nmarkdown-it-py  ------------------------------ 62.87 KiB/85.27 KiB\r\nhumanize  ------------------------------ 119.79 KiB/125.85 KiB\r\netils  ------------------------------ 158.91 KiB/166.60 KiB\r\noptax  ------------------------------ 285.08 KiB/359.16 KiB\r\nsentry-sdk  ------------------------------ 298.28 KiB/361.67 KiB\r\npydantic  ------------------------------ 429.00 KiB/434.43 KiB\r\nflax  ------------------------------ 382.92 KiB/447.36 KiB\r\ngrain  ------------------------------ 207.66 KiB/487.72 KiB\r\norbax-checkpoint  ------------------------------ 292.98 KiB/549.91 KiB\r\npygments  ------------------------------ 477.23 KiB/1.17 MiB\r\npydantic-core  ------------------------------ 1.12 MiB/1.91 MiB\r\narray-record  ------------------------------ 1.06 MiB/2.35 MiB\r\njax  ------------------------------ 536.56 KiB/2.70 MiB\r\nnvidia-cuda-runtime-cu12 ------------------------------ 680.56 KiB/3.33 MiB\r\nml-dtypes  ------------------------------ 1.03 MiB/4.70 MiB\r\njax-cuda12-plugin  ------------------------------ 1.06 MiB/5.22 MiB\r\nvirtualenv  ------------------------------ 1.04 MiB/5.71 MiB\r\npillow  ------------------------------ 1.12 MiB/6.34 MiB\r\nnvidia-cuda-cupti-cu12  ------------------------------ 1.07 MiB/10.31 MiB\r\nnumpy  ------------------------------ 1.03 MiB/15.87 MiB\r\ntensorstore  ------------------------------ 1.03 MiB/18.63 MiB\r\nwandb  ------------------------------ 235.17 KiB/18.66 MiB\r\nscipy  ------------------------------ 463.11 KiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 1.08 MiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 1011.56 KiB/38.67 MiB\r\njaxlib  ------------------------------ 1.11 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 1.05 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 1.05 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 1.05 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 1.07 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 1.00 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 1.01 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 1.08 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 1.06 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 1.08 MiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r Building jasmine @ file:///hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine\r\n⠼ Preparing packages... (43/83)\r\ngitdb  ------------------------------ 61.32 KiB/61.32 KiB\r\nattrs  ------------------------------ 62.32 KiB/62.32 KiB\r\nmarkdown-it-py  ------------------------------ 62.87 KiB/85.27 KiB\r\nhumanize  ------------------------------ 119.79 KiB/125.85 KiB\r\netils  ------------------------------ 158.91 KiB/166.60 KiB\r\noptax  ------------------------------ 285.08 KiB/359.16 KiB\r\nsentry-sdk  ------------------------------ 298.28 KiB/361.67 KiB\r\npydantic  ------------------------------ 429.00 KiB/434.43 KiB\r\nflax  ------------------------------ 382.92 KiB/447.36 KiB\r\ngrain  ------------------------------ 207.66 KiB/487.72 KiB\r\norbax-checkpoint  ------------------------------ 292.98 KiB/549.91 KiB\r\npygments  ------------------------------ 477.23 KiB/1.17 MiB\r\npydantic-core  ------------------------------ 1.12 MiB/1.91 MiB\r\narray-record  ------------------------------ 1.06 MiB/2.35 MiB\r\njax  ------------------------------ 536.56 KiB/2.70 MiB\r\nnvidia-cuda-runtime-cu12 ------------------------------ 680.56 KiB/3.33 MiB\r\nml-dtypes  ------------------------------ 1.03 MiB/4.70 MiB\r\njax-cuda12-plugin  ------------------------------ 1.06 MiB/5.22 MiB\r\nvirtualenv  ------------------------------ 1.04 MiB/5.71 MiB\r\npillow  ------------------------------ 1.12 MiB/6.34 MiB\r\nnvidia-cuda-cupti-cu12  ------------------------------ 1.07 MiB/10.31 MiB\r\nnumpy  ------------------------------ 1.03 MiB/15.87 MiB\r\ntensorstore  ------------------------------ 1.03 MiB/18.63 MiB\r\nwandb  ------------------------------ 235.17 KiB/18.66 MiB\r\nscipy  ------------------------------ 463.11 KiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 1.08 MiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 1011.56 KiB/38.67 MiB\r\njaxlib  ------------------------------ 1.11 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 1.05 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 1.05 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 1.05 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 1.07 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 1.00 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 1.01 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 1.08 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 1.06 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 1.08 MiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r Building jasmine @ file:///hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine\r\n⠼ Preparing packages... (43/83)\r\nattrs  ------------------------------ 62.32 KiB/62.32 KiB\r\nmarkdown-it-py  ------------------------------ 78.87 KiB/85.27 KiB\r\nhumanize  ------------------------------ 119.79 KiB/125.85 KiB\r\netils  ------------------------------ 158.91 KiB/166.60 KiB\r\noptax  ------------------------------ 285.08 KiB/359.16 KiB\r\nsentry-sdk  ------------------------------ 298.28 KiB/361.67 KiB\r\npydantic  ------------------------------ 429.00 KiB/434.43 KiB\r\nflax  ------------------------------ 382.92 KiB/447.36 KiB\r\ngrain  ------------------------------ 208.00 KiB/487.72 KiB\r\norbax-checkpoint  ------------------------------ 308.98 KiB/549.91 KiB\r\npygments  ------------------------------ 493.23 KiB/1.17 MiB\r\npydantic-core  ------------------------------ 1.18 MiB/1.91 MiB\r\narray-record  ------------------------------ 1.12 MiB/2.35 MiB\r\njax  ------------------------------ 536.56 KiB/2.70 MiB\r\nnvidia-cuda-runtime-cu12 ------------------------------ 680.56 KiB/3.33 MiB\r\nml-dtypes  ------------------------------ 1.07 MiB/4.70 MiB\r\njax-cuda12-plugin  ------------------------------ 1.09 MiB/5.22 MiB\r\nvirtualenv  ------------------------------ 1.09 MiB/5.71 MiB\r\npillow  ------------------------------ 1.16 MiB/6.34 MiB\r\nnvidia-cuda-cupti-cu12  ------------------------------ 1.11 MiB/10.31 MiB\r\nnumpy  ------------------------------ 1.06 MiB/15.87 MiB\r\ntensorstore  ------------------------------ 1.07 MiB/18.63 MiB\r\nwandb  ------------------------------ 235.17 KiB/18.66 MiB\r\nscipy  ------------------------------ 479.11 KiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 1.14 MiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 1.03 MiB/38.67 MiB\r\njaxlib  ------------------------------ 1.15 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 1.10 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 1.09 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 1.08 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 1.11 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 1.04 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 1.06 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 1.13 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 1.10 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 1.14 MiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r Building jasmine @ file:///hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine\r\n⠼ Preparing packages... (43/83)\r\nattrs  ------------------------------ 62.32 KiB/62.32 KiB\r\nmarkdown-it-py  ------------------------------ 78.87 KiB/85.27 KiB\r\nhumanize  ------------------------------ 119.79 KiB/125.85 KiB\r\netils  ------------------------------ 158.91 KiB/166.60 KiB\r\noptax  ------------------------------ 286.05 KiB/359.16 KiB\r\nsentry-sdk  ------------------------------ 298.28 KiB/361.67 KiB\r\npydantic  ------------------------------ 429.00 KiB/434.43 KiB\r\nflax  ------------------------------ 382.92 KiB/447.36 KiB\r\ngrain  ------------------------------ 208.00 KiB/487.72 KiB\r\norbax-checkpoint  ------------------------------ 308.98 KiB/549.91 KiB\r\npygments  ------------------------------ 493.23 KiB/1.17 MiB\r\npydantic-core  ------------------------------ 1.18 MiB/1.91 MiB\r\narray-record  ------------------------------ 1.12 MiB/2.35 MiB\r\njax  ------------------------------ 536.56 KiB/2.70 MiB\r\nnvidia-cuda-runtime-cu12 ------------------------------ 680.56 KiB/3.33 MiB\r\nml-dtypes  ------------------------------ 1.07 MiB/4.70 MiB\r\njax-cuda12-plugin  ------------------------------ 1.09 MiB/5.22 MiB\r\nvirtualenv  ------------------------------ 1.09 MiB/5.71 MiB\r\npillow  ------------------------------ 1.16 MiB/6.34 MiB\r\nnvidia-cuda-cupti-cu12  ------------------------------ 1.11 MiB/10.31 MiB\r\nnumpy  ------------------------------ 1.06 MiB/15.87 MiB\r\ntensorstore  ------------------------------ 1.07 MiB/18.63 MiB\r\nwandb  ------------------------------ 235.17 KiB/18.66 MiB\r\nscipy  ------------------------------ 479.11 KiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 1.14 MiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 1.03 MiB/38.67 MiB\r\njaxlib  ------------------------------ 1.15 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 1.10 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 1.09 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 1.08 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 1.11 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 1.04 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 1.06 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 1.13 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 1.10 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 1.14 MiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r Building jasmine @ file:///hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine\r\n⠼ Preparing packages... (43/83)\r\nattrs  ------------------------------ 62.32 KiB/62.32 KiB\r\nmarkdown-it-py  ------------------------------ 78.87 KiB/85.27 KiB\r\nhumanize  ------------------------------ 119.79 KiB/125.85 KiB\r\netils  ------------------------------ 158.91 KiB/166.60 KiB\r\noptax  ------------------------------ 296.31 KiB/359.16 KiB\r\nsentry-sdk  ------------------------------ 298.28 KiB/361.67 KiB\r\nflax  ------------------------------ 382.92 KiB/447.36 KiB\r\ngrain  ------------------------------ 224.00 KiB/487.72 KiB\r\norbax-checkpoint  ------------------------------ 308.98 KiB/549.91 KiB\r\npygments  ------------------------------ 493.23 KiB/1.17 MiB\r\npydantic-core  ------------------------------ 1.18 MiB/1.91 MiB\r\narray-record  ------------------------------ 1.12 MiB/2.35 MiB\r\njax  ------------------------------ 552.56 KiB/2.70 MiB\r\nnvidia-cuda-runtime-cu12 ------------------------------ 696.56 KiB/3.33 MiB\r\nml-dtypes  ------------------------------ 1.07 MiB/4.70 MiB\r\njax-cuda12-plugin  ------------------------------ 1.09 MiB/5.22 MiB\r\nvirtualenv  ------------------------------ 1.09 MiB/5.71 MiB\r\npillow  ------------------------------ 1.16 MiB/6.34 MiB\r\nnvidia-cuda-cupti-cu12  ------------------------------ 1.11 MiB/10.31 MiB\r\nnumpy  ------------------------------ 1.06 MiB/15.87 MiB\r\ntensorstore  ------------------------------ 1.07 MiB/18.63 MiB\r\nwandb  ------------------------------ 235.17 KiB/18.66 MiB\r\nscipy  ------------------------------ 479.11 KiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 1.14 MiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 1.03 MiB/38.67 MiB\r\njaxlib  ------------------------------ 1.15 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 1.10 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 1.09 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 1.08 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 1.11 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 1.04 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 1.06 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 1.13 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 1.10 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 1.14 MiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r Building jasmine @ file:///hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine\r\n⠼ Preparing packages... (43/83)\r\nattrs  ------------------------------ 62.32 KiB/62.32 KiB\r\nmarkdown-it-py  ------------------------------ 78.87 KiB/85.27 KiB\r\nhumanize  ------------------------------ 119.79 KiB/125.85 KiB\r\noptax  ------------------------------ 296.31 KiB/359.16 KiB\r\nsentry-sdk  ------------------------------ 298.28 KiB/361.67 KiB\r\nflax  ------------------------------ 398.92 KiB/447.36 KiB\r\ngrain  ------------------------------ 224.00 KiB/487.72 KiB\r\norbax-checkpoint  ------------------------------ 308.98 KiB/549.91 KiB\r\npygments  ------------------------------ 509.23 KiB/1.17 MiB\r\npydantic-core  ------------------------------ 1.21 MiB/1.91 MiB\r\narray-record  ------------------------------ 1.14 MiB/2.35 MiB\r\njax  ------------------------------ 568.56 KiB/2.70 MiB\r\nnvidia-cuda-runtime-cu12 ------------------------------ 696.56 KiB/3.33 MiB\r\nml-dtypes  ------------------------------ 1.10 MiB/4.70 MiB\r\njax-cuda12-plugin  ------------------------------ 1.14 MiB/5.22 MiB\r\nvirtualenv  ------------------------------ 1.14 MiB/5.71 MiB\r\npillow  ------------------------------ 1.20 MiB/6.34 MiB\r\nnvidia-cuda-cupti-cu12  ------------------------------ 1.14 MiB/10.31 MiB\r\nnumpy  ------------------------------ 1.10 MiB/15.87 MiB\r\ntensorstore  ------------------------------ 1.12 MiB/18.63 MiB\r\nwandb  ------------------------------ 235.17 KiB/18.66 MiB\r\nscipy  ------------------------------ 495.11 KiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 1.17 MiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 1.08 MiB/38.67 MiB\r\njaxlib  ------------------------------ 1.20 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 1.14 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 1.13 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 1.13 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 1.16 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 1.08 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 1.10 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 1.16 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 1.14 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 1.17 MiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r Building jasmine @ file:///hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine\r\n⠼ Preparing packages... (43/83)\r\nattrs  ------------------------------ 62.32 KiB/62.32 KiB\r\nmarkdown-it-py  ------------------------------ 78.87 KiB/85.27 KiB\r\nhumanize  ------------------------------ 119.79 KiB/125.85 KiB\r\noptax  ------------------------------ 306.14 KiB/359.16 KiB\r\nsentry-sdk  ------------------------------ 298.28 KiB/361.67 KiB\r\nflax  ------------------------------ 398.92 KiB/447.36 KiB\r\ngrain  ------------------------------ 224.00 KiB/487.72 KiB\r\norbax-checkpoint  ------------------------------ 308.98 KiB/549.91 KiB\r\npygments  ------------------------------ 509.23 KiB/1.17 MiB\r\npydantic-core  ------------------------------ 1.21 MiB/1.91 MiB\r\narray-record  ------------------------------ 1.14 MiB/2.35 MiB\r\njax  ------------------------------ 568.56 KiB/2.70 MiB\r\nnvidia-cuda-runtime-cu12 ------------------------------ 696.56 KiB/3.33 MiB\r\nml-dtypes  ------------------------------ 1.10 MiB/4.70 MiB\r\njax-cuda12-plugin  ------------------------------ 1.14 MiB/5.22 MiB\r\nvirtualenv  ------------------------------ 1.14 MiB/5.71 MiB\r\npillow  ------------------------------ 1.20 MiB/6.34 MiB\r\nnvidia-cuda-cupti-cu12  ------------------------------ 1.14 MiB/10.31 MiB\r\nnumpy  ------------------------------ 1.10 MiB/15.87 MiB\r\ntensorstore  ------------------------------ 1.12 MiB/18.63 MiB\r\nwandb  ------------------------------ 235.17 KiB/18.66 MiB\r\nscipy  ------------------------------ 495.11 KiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 1.17 MiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 1.08 MiB/38.67 MiB\r\njaxlib  ------------------------------ 1.20 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 1.14 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 1.13 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 1.13 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 1.16 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 1.08 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 1.10 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 1.16 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 1.14 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 1.17 MiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r Building jasmine @ file:///hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine\r\n⠼ Preparing packages... (43/83)\r\nmarkdown-it-py  ------------------------------ 78.87 KiB/85.27 KiB\r\nhumanize  ------------------------------ 119.79 KiB/125.85 KiB\r\noptax  ------------------------------ 306.14 KiB/359.16 KiB\r\nsentry-sdk  ------------------------------ 298.28 KiB/361.67 KiB\r\nflax  ------------------------------ 414.92 KiB/447.36 KiB\r\ngrain  ------------------------------ 448.00 KiB/487.72 KiB\r\norbax-checkpoint  ------------------------------ 308.98 KiB/549.91 KiB\r\npygments  ------------------------------ 525.23 KiB/1.17 MiB\r\npydantic-core  ------------------------------ 1.23 MiB/1.91 MiB\r\narray-record  ------------------------------ 1.17 MiB/2.35 MiB\r\njax  ------------------------------ 584.56 KiB/2.70 MiB\r\nnvidia-cuda-runtime-cu12 ------------------------------ 1.11 MiB/3.33 MiB\r\nml-dtypes  ------------------------------ 1.13 MiB/4.70 MiB\r\njax-cuda12-plugin  ------------------------------ 1.15 MiB/5.22 MiB\r\nvirtualenv  ------------------------------ 1.15 MiB/5.71 MiB\r\npillow  ------------------------------ 1.21 MiB/6.34 MiB\r\nnvidia-cuda-cupti-cu12  ------------------------------ 1.18 MiB/10.31 MiB\r\nnumpy  ------------------------------ 1.13 MiB/15.87 MiB\r\ntensorstore  ------------------------------ 1.14 MiB/18.63 MiB\r\nwandb  ------------------------------ 251.17 KiB/18.66 MiB\r\nscipy  ------------------------------ 495.11 KiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 1.21 MiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 1.11 MiB/38.67 MiB\r\njaxlib  ------------------------------ 1.22 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 1.17 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 1.14 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 1.14 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 1.17 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 1.08 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 1.12 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 1.19 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 1.16 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 1.21 MiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r Building jasmine @ file:///hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine\r\n⠼ Preparing packages... (43/83)\r\nmarkdown-it-py  ------------------------------ 78.87 KiB/85.27 KiB\r\noptax  ------------------------------ 306.14 KiB/359.16 KiB\r\nsentry-sdk  ------------------------------ 298.28 KiB/361.67 KiB\r\nflax  ------------------------------ 414.92 KiB/447.36 KiB\r\ngrain  ------------------------------ 448.00 KiB/487.72 KiB\r\norbax-checkpoint  ------------------------------ 308.98 KiB/549.91 KiB\r\npygments  ------------------------------ 525.23 KiB/1.17 MiB\r\npydantic-core  ------------------------------ 1.23 MiB/1.91 MiB\r\narray-record  ------------------------------ 1.17 MiB/2.35 MiB\r\njax  ------------------------------ 590.52 KiB/2.70 MiB\r\nnvidia-cuda-runtime-cu12 ------------------------------ 1.11 MiB/3.33 MiB\r\nml-dtypes  ------------------------------ 1.13 MiB/4.70 MiB\r\njax-cuda12-plugin  ------------------------------ 1.15 MiB/5.22 MiB\r\nvirtualenv  ------------------------------ 1.15 MiB/5.71 MiB\r\npillow  ------------------------------ 1.21 MiB/6.34 MiB\r\nnvidia-cuda-cupti-cu12  ------------------------------ 1.18 MiB/10.31 MiB\r\nnumpy  ------------------------------ 1.13 MiB/15.87 MiB\r\ntensorstore  ------------------------------ 1.14 MiB/18.63 MiB\r\nwandb  ------------------------------ 251.17 KiB/18.66 MiB\r\nscipy  ------------------------------ 495.11 KiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 1.21 MiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 1.11 MiB/38.67 MiB\r\njaxlib  ------------------------------ 1.22 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 1.17 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 1.14 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 1.14 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 1.17 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 1.08 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 1.12 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 1.19 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 1.16 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 1.21 MiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r Building jasmine @ file:///hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine\r\n⠼ Preparing packages... (43/83)\r\nmarkdown-it-py  ------------------------------ 78.87 KiB/85.27 KiB\r\noptax  ------------------------------ 306.14 KiB/359.16 KiB\r\nsentry-sdk  ------------------------------ 298.28 KiB/361.67 KiB\r\nflax  ------------------------------ 414.92 KiB/447.36 KiB\r\ngrain  ------------------------------ 448.00 KiB/487.72 KiB\r\norbax-checkpoint  ------------------------------ 308.98 KiB/549.91 KiB\r\npygments  ------------------------------ 525.23 KiB/1.17 MiB\r\npydantic-core  ------------------------------ 1.23 MiB/1.91 MiB\r\narray-record  ------------------------------ 1.17 MiB/2.35 MiB\r\njax  ------------------------------ 590.52 KiB/2.70 MiB\r\nnvidia-cuda-runtime-cu12 ------------------------------ 1.11 MiB/3.33 MiB\r\nml-dtypes  ------------------------------ 1.13 MiB/4.70 MiB\r\njax-cuda12-plugin  ------------------------------ 1.17 MiB/5.22 MiB\r\nvirtualenv  ------------------------------ 1.15 MiB/5.71 MiB\r\npillow  ------------------------------ 1.21 MiB/6.34 MiB\r\nnvidia-cuda-cupti-cu12  ------------------------------ 1.18 MiB/10.31 MiB\r\nnumpy  ------------------------------ 1.13 MiB/15.87 MiB\r\ntensorstore  ------------------------------ 1.14 MiB/18.63 MiB\r\nwandb  ------------------------------ 251.17 KiB/18.66 MiB\r\nscipy  ------------------------------ 495.11 KiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 1.21 MiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 1.11 MiB/38.67 MiB\r\njaxlib  ------------------------------ 1.22 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 1.17 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 1.14 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 1.14 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 1.17 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 1.08 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 1.12 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 1.19 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 1.16 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 1.21 MiB/614.37 MiB ",,terminal_output +4604,7885921,"TERMINAL",0,0,"\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r Building jasmine @ file:///hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine\r\n⠼ Preparing packages... (43/83)\r\noptax  ------------------------------ 322.14 KiB/359.16 KiB\r\nsentry-sdk  ------------------------------ 298.28 KiB/361.67 KiB\r\nflax  ------------------------------ 414.92 KiB/447.36 KiB\r\ngrain  ------------------------------ 448.00 KiB/487.72 KiB\r\norbax-checkpoint  ------------------------------ 324.98 KiB/549.91 KiB\r\npygments  ------------------------------ 541.23 KiB/1.17 MiB\r\npydantic-core  ------------------------------ 1.31 MiB/1.91 MiB\r\narray-record  ------------------------------ 1.23 MiB/2.35 MiB\r\njax  ------------------------------ 622.52 KiB/2.70 MiB\r\nnvidia-cuda-runtime-cu12 ------------------------------ 1.17 MiB/3.33 MiB\r\nml-dtypes  ------------------------------ 1.19 MiB/4.70 MiB\r\njax-cuda12-plugin  ------------------------------ 1.22 MiB/5.22 MiB\r\nvirtualenv  ------------------------------ 1.22 MiB/5.71 MiB\r\npillow  ------------------------------ 1.29 MiB/6.34 MiB\r\nnvidia-cuda-cupti-cu12  ------------------------------ 1.22 MiB/10.31 MiB\r\nnumpy  ------------------------------ 1.20 MiB/15.87 MiB\r\ntensorstore  ------------------------------ 1.20 MiB/18.63 MiB\r\nwandb  ------------------------------ 267.17 KiB/18.66 MiB\r\nscipy  ------------------------------ 511.11 KiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 1.27 MiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 1.15 MiB/38.67 MiB\r\njaxlib  ------------------------------ 1.29 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 1.23 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 1.22 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 1.21 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 1.25 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 1.16 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 1.18 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 1.25 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 1.21 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 1.27 MiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r Building jasmine @ file:///hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine\r\n⠼ Preparing packages... (43/83)\r\noptax  ------------------------------ 322.14 KiB/359.16 KiB\r\nsentry-sdk  ------------------------------ 314.28 KiB/361.67 KiB\r\nflax  ------------------------------ 414.92 KiB/447.36 KiB\r\ngrain  ------------------------------ 448.00 KiB/487.72 KiB\r\norbax-checkpoint  ------------------------------ 324.98 KiB/549.91 KiB\r\npygments  ------------------------------ 541.23 KiB/1.17 MiB\r\npydantic-core  ------------------------------ 1.31 MiB/1.91 MiB\r\narray-record  ------------------------------ 1.23 MiB/2.35 MiB\r\njax  ------------------------------ 622.52 KiB/2.70 MiB\r\nnvidia-cuda-runtime-cu12 ------------------------------ 1.17 MiB/3.33 MiB\r\nml-dtypes  ------------------------------ 1.19 MiB/4.70 MiB\r\njax-cuda12-plugin  ------------------------------ 1.22 MiB/5.22 MiB\r\nvirtualenv  ------------------------------ 1.22 MiB/5.71 MiB\r\npillow  ------------------------------ 1.29 MiB/6.34 MiB\r\nnvidia-cuda-cupti-cu12  ------------------------------ 1.22 MiB/10.31 MiB\r\nnumpy  ------------------------------ 1.20 MiB/15.87 MiB\r\ntensorstore  ------------------------------ 1.20 MiB/18.63 MiB\r\nwandb  ------------------------------ 267.17 KiB/18.66 MiB\r\nscipy  ------------------------------ 511.11 KiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 1.27 MiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 1.15 MiB/38.67 MiB\r\njaxlib  ------------------------------ 1.29 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 1.23 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 1.22 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 1.21 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 1.25 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 1.16 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 1.18 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 1.25 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 1.21 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 1.27 MiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r Building jasmine @ file:///hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine\r\n⠴ Preparing packages... (50/83)\r\noptax  ------------------------------ 338.14 KiB/359.16 KiB\r\nsentry-sdk  ------------------------------ 314.28 KiB/361.67 KiB\r\nflax  ------------------------------ 430.92 KiB/447.36 KiB\r\ngrain  ------------------------------ 479.89 KiB/487.72 KiB\r\norbax-checkpoint  ------------------------------ 347.98 KiB/549.91 KiB\r\npygments  ------------------------------ 557.23 KiB/1.17 MiB\r\npydantic-core  ------------------------------ 1.45 MiB/1.91 MiB\r\narray-record  ------------------------------ 1.39 MiB/2.35 MiB\r\njax  ------------------------------ 648.56 KiB/2.70 MiB\r\nnvidia-cuda-runtime-cu12 ------------------------------ 1.31 MiB/3.33 MiB\r\nml-dtypes  ------------------------------ 1.35 MiB/4.70 MiB\r\njax-cuda12-plugin  ------------------------------ 1.34 MiB/5.22 MiB\r\nvirtualenv  ------------------------------ 1.37 MiB/5.71 MiB\r\npillow  ------------------------------ 1.39 MiB/6.34 MiB\r\nnvidia-cuda-cupti-cu12  ------------------------------ 1.37 MiB/10.31 MiB\r\nnumpy  ------------------------------ 1.35 MiB/15.87 MiB\r\ntensorstore  ------------------------------ 1.34 MiB/18.63 MiB\r\nwandb  ------------------------------ 303.17 KiB/18.66 MiB\r\nscipy  ------------------------------ 527.11 KiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 1.38 MiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 1.28 MiB/38.67 MiB\r\njaxlib  ------------------------------ 1.43 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 1.33 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 1.37 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 1.36 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 1.41 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 1.30 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 1.31 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 1.41 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 1.35 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 1.42 MiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r Building jasmine @ file:///hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine\r\n⠴ Preparing packages... (50/83)\r\noptax  ------------------------------ 338.14 KiB/359.16 KiB\r\nsentry-sdk  ------------------------------ 314.28 KiB/361.67 KiB\r\nflax  ------------------------------ 430.92 KiB/447.36 KiB\r\norbax-checkpoint  ------------------------------ 347.98 KiB/549.91 KiB\r\npygments  ------------------------------ 557.23 KiB/1.17 MiB\r\npydantic-core  ------------------------------ 1.45 MiB/1.91 MiB\r\narray-record  ------------------------------ 1.40 MiB/2.35 MiB\r\njax  ------------------------------ 648.56 KiB/2.70 MiB\r\nnvidia-cuda-runtime-cu12 ------------------------------ 1.33 MiB/3.33 MiB\r\nml-dtypes  ------------------------------ 1.36 MiB/4.70 MiB\r\njax-cuda12-plugin  ------------------------------ 1.36 MiB/5.22 MiB\r\nvirtualenv  ------------------------------ 1.37 MiB/5.71 MiB\r\npillow  ------------------------------ 1.39 MiB/6.34 MiB\r\nnvidia-cuda-cupti-cu12  ------------------------------ 1.37 MiB/10.31 MiB\r\nnumpy  ------------------------------ 1.35 MiB/15.87 MiB\r\ntensorstore  ------------------------------ 1.34 MiB/18.63 MiB\r\nwandb  ------------------------------ 303.17 KiB/18.66 MiB\r\nscipy  ------------------------------ 527.11 KiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 1.38 MiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 1.30 MiB/38.67 MiB\r\njaxlib  ------------------------------ 1.43 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 1.33 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 1.37 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 1.36 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 1.41 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 1.30 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 1.32 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 1.41 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 1.35 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 1.42 MiB/614.37 MiB ",,terminal_output +4605,7886043,"TERMINAL",0,0,"\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r Building jasmine @ file:///hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine\r\n⠴ Preparing packages... (50/83)\r\nsentry-sdk  ------------------------------ 314.28 KiB/361.67 KiB\r\nflax  ------------------------------ 430.92 KiB/447.36 KiB\r\norbax-checkpoint  ------------------------------ 363.98 KiB/549.91 KiB\r\npygments  ------------------------------ 573.23 KiB/1.17 MiB\r\npydantic-core  ------------------------------ 1.50 MiB/1.91 MiB\r\narray-record  ------------------------------ 1.45 MiB/2.35 MiB\r\njax  ------------------------------ 648.56 KiB/2.70 MiB\r\nnvidia-cuda-runtime-cu12 ------------------------------ 1.38 MiB/3.33 MiB\r\nml-dtypes  ------------------------------ 1.39 MiB/4.70 MiB\r\njax-cuda12-plugin  ------------------------------ 1.40 MiB/5.22 MiB\r\nvirtualenv  ------------------------------ 1.43 MiB/5.71 MiB\r\npillow  ------------------------------ 1.50 MiB/6.34 MiB\r\nnvidia-cuda-cupti-cu12  ------------------------------ 1.41 MiB/10.31 MiB\r\nnumpy  ------------------------------ 1.38 MiB/15.87 MiB\r\ntensorstore  ------------------------------ 1.40 MiB/18.63 MiB\r\nwandb  ------------------------------ 319.17 KiB/18.66 MiB\r\nscipy  ------------------------------ 543.11 KiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 1.42 MiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 1.35 MiB/38.67 MiB\r\njaxlib  ------------------------------ 1.50 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 1.38 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 1.42 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 1.41 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 1.46 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 1.35 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 1.37 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 1.46 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 1.39 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 1.48 MiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r Building jasmine @ file:///hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine\r\n⠴ Preparing packages... (50/83)\r\nsentry-sdk  ------------------------------ 314.28 KiB/361.67 KiB\r\nflax  ------------------------------ 430.92 KiB/447.36 KiB\r\norbax-checkpoint  ------------------------------ 363.98 KiB/549.91 KiB\r\npygments  ------------------------------ 573.23 KiB/1.17 MiB\r\npydantic-core  ------------------------------ 1.50 MiB/1.91 MiB\r\narray-record  ------------------------------ 1.45 MiB/2.35 MiB\r\njax  ------------------------------ 648.56 KiB/2.70 MiB\r\nnvidia-cuda-runtime-cu12 ------------------------------ 1.38 MiB/3.33 MiB\r\nml-dtypes  ------------------------------ 1.39 MiB/4.70 MiB\r\njax-cuda12-plugin  ------------------------------ 1.40 MiB/5.22 MiB\r\nvirtualenv  ------------------------------ 1.43 MiB/5.71 MiB\r\npillow  ------------------------------ 1.50 MiB/6.34 MiB\r\nnvidia-cuda-cupti-cu12  ------------------------------ 1.41 MiB/10.31 MiB\r\nnumpy  ------------------------------ 1.38 MiB/15.87 MiB\r\ntensorstore  ------------------------------ 1.40 MiB/18.63 MiB\r\nwandb  ------------------------------ 319.17 KiB/18.66 MiB\r\nscipy  ------------------------------ 543.11 KiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 1.42 MiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 1.35 MiB/38.67 MiB\r\njaxlib  ------------------------------ 1.50 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 1.38 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 1.42 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 1.41 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 1.46 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 1.36 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 1.37 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 1.46 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 1.39 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 1.48 MiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r Building jasmine @ file:///hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine\r\n⠴ Preparing packages... (50/83)\r\nsentry-sdk  ------------------------------ 314.28 KiB/361.67 KiB\r\norbax-checkpoint  ------------------------------ 363.98 KiB/549.91 KiB\r\npygments  ------------------------------ 589.23 KiB/1.17 MiB\r\npydantic-core  ------------------------------ 1.56 MiB/1.91 MiB\r\narray-record  ------------------------------ 1.50 MiB/2.35 MiB\r\njax  ------------------------------ 664.56 KiB/2.70 MiB\r\nnvidia-cuda-runtime-cu12 ------------------------------ 1.42 MiB/3.33 MiB\r\nml-dtypes  ------------------------------ 1.45 MiB/4.70 MiB\r\njax-cuda12-plugin  ------------------------------ 1.45 MiB/5.22 MiB\r\nvirtualenv  ------------------------------ 1.50 MiB/5.71 MiB\r\npillow  ------------------------------ 1.56 MiB/6.34 MiB\r\nnvidia-cuda-cupti-cu12  ------------------------------ 1.47 MiB/10.31 MiB\r\nnumpy  ------------------------------ 1.44 MiB/15.87 MiB\r\ntensorstore  ------------------------------ 1.45 MiB/18.63 MiB\r\nwandb  ------------------------------ 319.17 KiB/18.66 MiB\r\nscipy  ------------------------------ 543.11 KiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 1.47 MiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 1.38 MiB/38.67 MiB\r\njaxlib  ------------------------------ 1.54 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 1.42 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 1.48 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 1.47 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 1.51 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 1.41 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 1.43 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 1.52 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 1.46 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 1.53 MiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r Building jasmine @ file:///hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine\r\n⠴ Preparing packages... (50/83)\r\nsentry-sdk  ------------------------------ 314.28 KiB/361.67 KiB\r\norbax-checkpoint  ------------------------------ 363.98 KiB/549.91 KiB\r\npygments  ------------------------------ 589.23 KiB/1.17 MiB\r\npydantic-core  ------------------------------ 1.56 MiB/1.91 MiB\r\narray-record  ------------------------------ 1.50 MiB/2.35 MiB\r\njax  ------------------------------ 680.43 KiB/2.70 MiB\r\nnvidia-cuda-runtime-cu12 ------------------------------ 1.42 MiB/3.33 MiB\r\nml-dtypes  ------------------------------ 1.45 MiB/4.70 MiB\r\njax-cuda12-plugin  ------------------------------ 1.45 MiB/5.22 MiB\r\nvirtualenv  ------------------------------ 1.50 MiB/5.71 MiB\r\npillow  ------------------------------ 1.56 MiB/6.34 MiB\r\nnvidia-cuda-cupti-cu12  ------------------------------ 1.47 MiB/10.31 MiB\r\nnumpy  ------------------------------ 1.44 MiB/15.87 MiB\r\ntensorstore  ------------------------------ 1.45 MiB/18.63 MiB\r\nwandb  ------------------------------ 319.17 KiB/18.66 MiB\r\nscipy  ------------------------------ 543.11 KiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 1.47 MiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 1.38 MiB/38.67 MiB\r\njaxlib  ------------------------------ 1.54 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 1.42 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 1.48 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 1.47 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 1.51 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 1.41 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 1.43 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 1.52 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 1.46 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 1.53 MiB/614.37 MiB ",,terminal_output +4606,7886126,"TERMINAL",0,0,"\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r Building jasmine @ file:///hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine\r\n⠴ Preparing packages... (50/83)\r\nsentry-sdk  ------------------------------ 330.28 KiB/361.67 KiB\r\norbax-checkpoint  ------------------------------ 387.83 KiB/549.91 KiB\r\npygments  ------------------------------ 605.23 KiB/1.17 MiB\r\npydantic-core  ------------------------------ 1.64 MiB/1.91 MiB\r\narray-record  ------------------------------ 1.57 MiB/2.35 MiB\r\njax  ------------------------------ 712.43 KiB/2.70 MiB\r\nnvidia-cuda-runtime-cu12 ------------------------------ 1.50 MiB/3.33 MiB\r\nml-dtypes  ------------------------------ 1.52 MiB/4.70 MiB\r\njax-cuda12-plugin  ------------------------------ 1.53 MiB/5.22 MiB\r\nvirtualenv  ------------------------------ 1.56 MiB/5.71 MiB\r\npillow  ------------------------------ 1.59 MiB/6.34 MiB\r\nnvidia-cuda-cupti-cu12  ------------------------------ 1.53 MiB/10.31 MiB\r\nnumpy  ------------------------------ 1.52 MiB/15.87 MiB\r\ntensorstore  ------------------------------ 1.51 MiB/18.63 MiB\r\nwandb  ------------------------------ 367.17 KiB/18.66 MiB\r\nscipy  ------------------------------ 655.11 KiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 1.54 MiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 1.46 MiB/38.67 MiB\r\njaxlib  ------------------------------ 1.61 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 1.49 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 1.54 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 1.55 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 1.58 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 1.46 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 1.49 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 1.56 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 1.52 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 1.58 MiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r Building jasmine @ file:///hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine\r\n⠦ Preparing packages... (54/83)\r\nsentry-sdk  ------------------------------ 351.83 KiB/361.67 KiB\r\norbax-checkpoint  ------------------------------ 387.83 KiB/549.91 KiB\r\npygments  ------------------------------ 637.23 KiB/1.17 MiB\r\npydantic-core  ------------------------------ 1.76 MiB/1.91 MiB\r\narray-record  ------------------------------ 1.70 MiB/2.35 MiB\r\njax  ------------------------------ 744.56 KiB/2.70 MiB\r\nnvidia-cuda-runtime-cu12 ------------------------------ 1.65 MiB/3.33 MiB\r\nml-dtypes  ------------------------------ 1.63 MiB/4.70 MiB\r\njax-cuda12-plugin  ------------------------------ 1.68 MiB/5.22 MiB\r\nvirtualenv  ------------------------------ 1.68 MiB/5.71 MiB\r\npillow  ------------------------------ 1.60 MiB/6.34 MiB\r\nnvidia-cuda-cupti-cu12  ------------------------------ 1.67 MiB/10.31 MiB\r\nnumpy  ------------------------------ 1.67 MiB/15.87 MiB\r\ntensorstore  ------------------------------ 1.67 MiB/18.63 MiB\r\nwandb  ------------------------------ 431.17 KiB/18.66 MiB\r\nscipy  ------------------------------ 879.11 KiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 1.67 MiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 1.60 MiB/38.67 MiB\r\njaxlib  ------------------------------ 1.73 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 1.63 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 1.69 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 1.69 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 1.73 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 1.61 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 1.63 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 1.71 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 1.66 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 1.73 MiB/614.37 MiB ",,terminal_output +4607,7886235,"TERMINAL",0,0,"\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r Building jasmine @ file:///hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine\r\n⠦ Preparing packages... (54/83)\r\norbax-checkpoint  ------------------------------ 387.83 KiB/549.91 KiB\r\npygments  ------------------------------ 637.23 KiB/1.17 MiB\r\npydantic-core  ------------------------------ 1.76 MiB/1.91 MiB\r\narray-record  ------------------------------ 1.70 MiB/2.35 MiB\r\njax  ------------------------------ 744.56 KiB/2.70 MiB\r\nnvidia-cuda-runtime-cu12 ------------------------------ 1.65 MiB/3.33 MiB\r\nml-dtypes  ------------------------------ 1.64 MiB/4.70 MiB\r\njax-cuda12-plugin  ------------------------------ 1.70 MiB/5.22 MiB\r\nvirtualenv  ------------------------------ 1.70 MiB/5.71 MiB\r\npillow  ------------------------------ 1.60 MiB/6.34 MiB\r\nnvidia-cuda-cupti-cu12  ------------------------------ 1.69 MiB/10.31 MiB\r\nnumpy  ------------------------------ 1.67 MiB/15.87 MiB\r\ntensorstore  ------------------------------ 1.67 MiB/18.63 MiB\r\nwandb  ------------------------------ 431.17 KiB/18.66 MiB\r\nscipy  ------------------------------ 879.11 KiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 1.67 MiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 1.61 MiB/38.67 MiB\r\njaxlib  ------------------------------ 1.73 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 1.64 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 1.70 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 1.71 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 1.73 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 1.61 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 1.63 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 1.73 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 1.66 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 1.73 MiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r Building jasmine @ file:///hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine\r\n⠦ Preparing packages... (54/83)\r\norbax-checkpoint  ------------------------------ 387.83 KiB/549.91 KiB\r\npygments  ------------------------------ 637.23 KiB/1.17 MiB\r\npydantic-core  ------------------------------ 1.76 MiB/1.91 MiB\r\narray-record  ------------------------------ 1.70 MiB/2.35 MiB\r\njax  ------------------------------ 760.56 KiB/2.70 MiB\r\nnvidia-cuda-runtime-cu12 ------------------------------ 1.65 MiB/3.33 MiB\r\nml-dtypes  ------------------------------ 1.64 MiB/4.70 MiB\r\njax-cuda12-plugin  ------------------------------ 1.70 MiB/5.22 MiB\r\nvirtualenv  ------------------------------ 1.70 MiB/5.71 MiB\r\npillow  ------------------------------ 1.60 MiB/6.34 MiB\r\nnvidia-cuda-cupti-cu12  ------------------------------ 1.69 MiB/10.31 MiB\r\nnumpy  ------------------------------ 1.67 MiB/15.87 MiB\r\ntensorstore  ------------------------------ 1.67 MiB/18.63 MiB\r\nwandb  ------------------------------ 431.17 KiB/18.66 MiB\r\nscipy  ------------------------------ 879.11 KiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 1.67 MiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 1.61 MiB/38.67 MiB\r\njaxlib  ------------------------------ 1.73 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 1.64 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 1.70 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 1.71 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 1.73 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 1.61 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 1.63 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 1.73 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 1.66 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 1.73 MiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r Building jasmine @ file:///hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine\r\n⠦ Preparing packages... (54/83)\r\norbax-checkpoint  ------------------------------ 398.00 KiB/549.91 KiB\r\npygments  ------------------------------ 653.23 KiB/1.17 MiB\r\npydantic-core  ------------------------------ 1.81 MiB/1.91 MiB\r\narray-record  ------------------------------ 1.76 MiB/2.35 MiB\r\njax  ------------------------------ 760.56 KiB/2.70 MiB\r\nnvidia-cuda-runtime-cu12 ------------------------------ 1.69 MiB/3.33 MiB\r\nml-dtypes  ------------------------------ 1.68 MiB/4.70 MiB\r\njax-cuda12-plugin  ------------------------------ 1.73 MiB/5.22 MiB\r\nvirtualenv  ------------------------------ 1.73 MiB/5.71 MiB\r\npillow  ------------------------------ 1.64 MiB/6.34 MiB\r\nnvidia-cuda-cupti-cu12  ------------------------------ 1.73 MiB/10.31 MiB\r\nnumpy  ------------------------------ 1.72 MiB/15.87 MiB\r\ntensorstore  ------------------------------ 1.73 MiB/18.63 MiB\r\nwandb  ------------------------------ 431.17 KiB/18.66 MiB\r\nscipy  ------------------------------ 895.11 KiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 1.72 MiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 1.64 MiB/38.67 MiB\r\njaxlib  ------------------------------ 1.78 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 1.69 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 1.74 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 1.75 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 1.79 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 1.67 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 1.69 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 1.78 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 1.72 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 1.79 MiB/614.37 MiB ",,terminal_output +4608,7886299,"TERMINAL",0,0,"\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r Building jasmine @ file:///hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine\r\n⠦ Preparing packages... (54/83)\r\norbax-checkpoint  ------------------------------ 436.11 KiB/549.91 KiB\r\npygments  ------------------------------ 733.23 KiB/1.17 MiB\r\npydantic-core  ------------------------------ 1.91 MiB/1.91 MiB\r\narray-record  ------------------------------ 1.87 MiB/2.35 MiB\r\njax  ------------------------------ 856.46 KiB/2.70 MiB\r\nnvidia-cuda-runtime-cu12 ------------------------------ 1.80 MiB/3.33 MiB\r\nml-dtypes  ------------------------------ 1.79 MiB/4.70 MiB\r\njax-cuda12-plugin  ------------------------------ 1.84 MiB/5.22 MiB\r\nvirtualenv  ------------------------------ 1.86 MiB/5.71 MiB\r\npillow  ------------------------------ 1.93 MiB/6.34 MiB\r\nnvidia-cuda-cupti-cu12  ------------------------------ 1.86 MiB/10.31 MiB\r\nnumpy  ------------------------------ 1.85 MiB/15.87 MiB\r\ntensorstore  ------------------------------ 1.86 MiB/18.63 MiB\r\nwandb  ------------------------------ 495.17 KiB/18.66 MiB\r\nscipy  ------------------------------ 927.11 KiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 1.83 MiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 1.76 MiB/38.67 MiB\r\njaxlib  ------------------------------ 1.90 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 1.81 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 1.84 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 1.86 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 1.90 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 1.76 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 1.80 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 1.89 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 1.83 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 1.92 MiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r Building jasmine @ file:///hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine\r\n⠦ Preparing packages... (54/83)\r\norbax-checkpoint  ------------------------------ 436.11 KiB/549.91 KiB\r\npygments  ------------------------------ 733.23 KiB/1.17 MiB\r\narray-record  ------------------------------ 1.87 MiB/2.35 MiB\r\njax  ------------------------------ 856.46 KiB/2.70 MiB\r\nnvidia-cuda-runtime-cu12 ------------------------------ 1.82 MiB/3.33 MiB\r\nml-dtypes  ------------------------------ 1.79 MiB/4.70 MiB\r\njax-cuda12-plugin  ------------------------------ 1.86 MiB/5.22 MiB\r\nvirtualenv  ------------------------------ 1.86 MiB/5.71 MiB\r\npillow  ------------------------------ 1.95 MiB/6.34 MiB\r\nnvidia-cuda-cupti-cu12  ------------------------------ 1.86 MiB/10.31 MiB\r\nnumpy  ------------------------------ 1.85 MiB/15.87 MiB\r\ntensorstore  ------------------------------ 1.87 MiB/18.63 MiB\r\nwandb  ------------------------------ 495.17 KiB/18.66 MiB\r\nscipy  ------------------------------ 927.11 KiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 1.84 MiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 1.76 MiB/38.67 MiB\r\njaxlib  ------------------------------ 1.92 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 1.81 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 1.85 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 1.87 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 1.92 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 1.78 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 1.80 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 1.89 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 1.84 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 1.92 MiB/614.37 MiB ",,terminal_output +4609,7886424,"TERMINAL",0,0,"\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r Building jasmine @ file:///hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine\r\n⠧ Preparing packages... (55/83)\r\norbax-checkpoint  ------------------------------ 452.11 KiB/549.91 KiB\r\npygments  ------------------------------ 795.74 KiB/1.17 MiB\r\narray-record  ------------------------------ 1.98 MiB/2.35 MiB\r\njax  ------------------------------ 936.56 KiB/2.70 MiB\r\nnvidia-cuda-runtime-cu12 ------------------------------ 1.91 MiB/3.33 MiB\r\nml-dtypes  ------------------------------ 1.89 MiB/4.70 MiB\r\njax-cuda12-plugin  ------------------------------ 1.95 MiB/5.22 MiB\r\nvirtualenv  ------------------------------ 1.96 MiB/5.71 MiB\r\npillow  ------------------------------ 2.04 MiB/6.34 MiB\r\nnvidia-cuda-cupti-cu12  ------------------------------ 1.97 MiB/10.31 MiB\r\nnumpy  ------------------------------ 1.95 MiB/15.87 MiB\r\ntensorstore  ------------------------------ 1.97 MiB/18.63 MiB\r\nwandb  ------------------------------ 559.17 KiB/18.66 MiB\r\nscipy  ------------------------------ 1.34 MiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 1.94 MiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 1.86 MiB/38.67 MiB\r\njaxlib  ------------------------------ 2.01 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 1.92 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 1.95 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 1.96 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 2.03 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 1.89 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 1.89 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 1.98 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 1.92 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 2.00 MiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r Building jasmine @ file:///hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine\r\n⠧ Preparing packages... (55/83)\r\norbax-checkpoint  ------------------------------ 452.11 KiB/549.91 KiB\r\npygments  ------------------------------ 795.74 KiB/1.17 MiB\r\narray-record  ------------------------------ 1.98 MiB/2.35 MiB\r\njax  ------------------------------ 936.56 KiB/2.70 MiB\r\nnvidia-cuda-runtime-cu12 ------------------------------ 1.91 MiB/3.33 MiB\r\nml-dtypes  ------------------------------ 1.89 MiB/4.70 MiB\r\njax-cuda12-plugin  ------------------------------ 1.95 MiB/5.22 MiB\r\nvirtualenv  ------------------------------ 1.96 MiB/5.71 MiB\r\npillow  ------------------------------ 2.04 MiB/6.34 MiB\r\nnvidia-cuda-cupti-cu12  ------------------------------ 1.97 MiB/10.31 MiB\r\nnumpy  ------------------------------ 1.95 MiB/15.87 MiB\r\ntensorstore  ------------------------------ 1.97 MiB/18.63 MiB\r\nwandb  ------------------------------ 559.17 KiB/18.66 MiB\r\nscipy  ------------------------------ 1.34 MiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 1.94 MiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 1.86 MiB/38.67 MiB\r\njaxlib  ------------------------------ 2.01 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 1.92 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 1.95 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 1.96 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 2.03 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 1.89 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 1.89 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 1.98 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 1.92 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 2.00 MiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r Built jasmine @ file:///hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine\r\n⠧ Preparing packages... (55/83)\r\norbax-checkpoint  ------------------------------ 452.11 KiB/549.91 KiB\r\npygments  ------------------------------ 795.74 KiB/1.17 MiB\r\narray-record  ------------------------------ 2.03 MiB/2.35 MiB\r\njax  ------------------------------ 936.56 KiB/2.70 MiB\r\nnvidia-cuda-runtime-cu12 ------------------------------ 1.95 MiB/3.33 MiB\r\nml-dtypes  ------------------------------ 1.93 MiB/4.70 MiB\r\njax-cuda12-plugin  ------------------------------ 2.00 MiB/5.22 MiB\r\nvirtualenv  ------------------------------ 2.00 MiB/5.71 MiB\r\npillow  ------------------------------ 2.08 MiB/6.34 MiB\r\nnvidia-cuda-cupti-cu12  ------------------------------ 2.01 MiB/10.31 MiB\r\nnumpy  ------------------------------ 2.00 MiB/15.87 MiB\r\ntensorstore  ------------------------------ 2.01 MiB/18.63 MiB\r\nwandb  ------------------------------ 559.17 KiB/18.66 MiB\r\nscipy  ------------------------------ 1.34 MiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 1.97 MiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 1.89 MiB/38.67 MiB\r\njaxlib  ------------------------------ 2.06 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 1.97 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 2.00 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 1.98 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 2.08 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 1.92 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 1.93 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 2.03 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 1.97 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 2.04 MiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r⠧ Preparing packages... (55/83)\r\norbax-checkpoint  ------------------------------ 452.11 KiB/549.91 KiB\r\npygments  ------------------------------ 797.23 KiB/1.17 MiB\r\narray-record  ------------------------------ 2.04 MiB/2.35 MiB\r\njax  ------------------------------ 936.56 KiB/2.70 MiB\r\nnvidia-cuda-runtime-cu12 ------------------------------ 1.97 MiB/3.33 MiB\r\nml-dtypes  ------------------------------ 1.95 MiB/4.70 MiB\r\njax-cuda12-plugin  ------------------------------ 2.01 MiB/5.22 MiB\r\nvirtualenv  ------------------------------ 2.01 MiB/5.71 MiB\r\npillow  ------------------------------ 2.10 MiB/6.34 MiB\r\nnvidia-cuda-cupti-cu12  ------------------------------ 2.03 MiB/10.31 MiB\r\nnumpy  ------------------------------ 2.02 MiB/15.87 MiB\r\ntensorstore  ------------------------------ 2.01 MiB/18.63 MiB\r\nwandb  ------------------------------ 575.17 KiB/18.66 MiB\r\nscipy  ------------------------------ 1.35 MiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 1.98 MiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 1.90 MiB/38.67 MiB\r\njaxlib  ------------------------------ 2.08 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 1.98 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 2.01 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 2.00 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 2.09 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 1.92 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 1.94 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 2.04 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 1.98 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 2.06 MiB/614.37 MiB ",,terminal_output +4610,7886574,"TERMINAL",0,0,"\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r⠧ Preparing packages... (55/83)\r\norbax-checkpoint  ------------------------------ 478.00 KiB/549.91 KiB\r\npygments  ------------------------------ 861.23 KiB/1.17 MiB\r\narray-record  ------------------------------ 2.18 MiB/2.35 MiB\r\njax  ------------------------------ 1.02 MiB/2.70 MiB\r\nnvidia-cuda-runtime-cu12 ------------------------------ 2.14 MiB/3.33 MiB\r\nml-dtypes  ------------------------------ 2.12 MiB/4.70 MiB\r\njax-cuda12-plugin  ------------------------------ 2.18 MiB/5.22 MiB\r\nvirtualenv  ------------------------------ 2.16 MiB/5.71 MiB\r\npillow  ------------------------------ 2.24 MiB/6.34 MiB\r\nnvidia-cuda-cupti-cu12  ------------------------------ 2.20 MiB/10.31 MiB\r\nnumpy  ------------------------------ 2.17 MiB/15.87 MiB\r\ntensorstore  ------------------------------ 2.17 MiB/18.63 MiB\r\nwandb  ------------------------------ 629.51 KiB/18.66 MiB\r\nscipy  ------------------------------ 1.37 MiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 2.14 MiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 2.09 MiB/38.67 MiB\r\njaxlib  ------------------------------ 2.23 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 2.14 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 2.17 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 2.16 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 2.24 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 2.06 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 2.10 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 2.18 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 2.14 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 2.21 MiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r⠇ Preparing packages... (57/83)\r\norbax-checkpoint  ------------------------------ 497.29 KiB/549.91 KiB\r\npygments  ------------------------------ 893.23 KiB/1.17 MiB\r\narray-record  ------------------------------ 2.34 MiB/2.35 MiB\r\njax  ------------------------------ 1.25 MiB/2.70 MiB\r\nnvidia-cuda-runtime-cu12 ------------------------------ 2.28 MiB/3.33 MiB\r\nml-dtypes  ------------------------------ 2.26 MiB/4.70 MiB\r\njax-cuda12-plugin  ------------------------------ 2.34 MiB/5.22 MiB\r\nvirtualenv  ------------------------------ 2.34 MiB/5.71 MiB\r\npillow  ------------------------------ 2.40 MiB/6.34 MiB\r\nnvidia-cuda-cupti-cu12  ------------------------------ 2.34 MiB/10.31 MiB\r\nnumpy  ------------------------------ 2.33 MiB/15.87 MiB\r\ntensorstore  ------------------------------ 2.32 MiB/18.63 MiB\r\nwandb  ------------------------------ 691.93 KiB/18.66 MiB\r\nscipy  ------------------------------ 1.68 MiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 2.31 MiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 2.25 MiB/38.67 MiB\r\njaxlib  ------------------------------ 2.37 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 2.28 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 2.33 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 2.29 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 2.40 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 2.22 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 2.27 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 2.36 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 2.31 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 2.40 MiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r⠇ Preparing packages... (57/83)\r\norbax-checkpoint  ------------------------------ 497.29 KiB/549.91 KiB\r\npygments  ------------------------------ 909.23 KiB/1.17 MiB\r\njax  ------------------------------ 1.25 MiB/2.70 MiB\r\nnvidia-cuda-runtime-cu12 ------------------------------ 2.36 MiB/3.33 MiB\r\nml-dtypes  ------------------------------ 2.35 MiB/4.70 MiB\r\njax-cuda12-plugin  ------------------------------ 2.43 MiB/5.22 MiB\r\nvirtualenv  ------------------------------ 2.41 MiB/5.71 MiB\r\npillow  ------------------------------ 2.50 MiB/6.34 MiB\r\nnvidia-cuda-cupti-cu12  ------------------------------ 2.45 MiB/10.31 MiB\r\nnumpy  ------------------------------ 2.43 MiB/15.87 MiB\r\ntensorstore  ------------------------------ 2.40 MiB/18.63 MiB\r\nwandb  ------------------------------ 691.93 KiB/18.66 MiB\r\nscipy  ------------------------------ 1.76 MiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 2.42 MiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 2.34 MiB/38.67 MiB\r\njaxlib  ------------------------------ 2.48 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 2.37 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 2.42 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 2.40 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 2.49 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 2.31 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 2.38 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 2.45 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 2.39 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 2.47 MiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r⠇ Preparing packages... (57/83)\r\norbax-checkpoint  ------------------------------ 497.29 KiB/549.91 KiB\r\npygments  ------------------------------ 925.23 KiB/1.17 MiB\r\njax  ------------------------------ 1.25 MiB/2.70 MiB\r\nnvidia-cuda-runtime-cu12 ------------------------------ 2.47 MiB/3.33 MiB\r\nml-dtypes  ------------------------------ 2.45 MiB/4.70 MiB\r\njax-cuda12-plugin  ------------------------------ 2.51 MiB/5.22 MiB\r\nvirtualenv  ------------------------------ 2.51 MiB/5.71 MiB\r\npillow  ------------------------------ 2.60 MiB/6.34 MiB\r\nnvidia-cuda-cupti-cu12  ------------------------------ 2.55 MiB/10.31 MiB\r\nnumpy  ------------------------------ 2.52 MiB/15.87 MiB\r\ntensorstore  ------------------------------ 2.48 MiB/18.63 MiB\r\nwandb  ------------------------------ 691.93 KiB/18.66 MiB\r\nscipy  ------------------------------ 2.07 MiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 2.52 MiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 2.45 MiB/38.67 MiB\r\njaxlib  ------------------------------ 2.55 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 2.47 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 2.51 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 2.48 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 2.58 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 2.39 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 2.48 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 2.54 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 2.48 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 2.58 MiB/614.37 MiB ",,terminal_output +4611,7886689,"TERMINAL",0,0,"\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r⠇ Preparing packages... (57/83)\r\norbax-checkpoint  ------------------------------ 513.29 KiB/549.91 KiB\r\npygments  ------------------------------ 941.23 KiB/1.17 MiB\r\njax  ------------------------------ 1.33 MiB/2.70 MiB\r\nnvidia-cuda-runtime-cu12 ------------------------------ 2.61 MiB/3.33 MiB\r\nml-dtypes  ------------------------------ 2.65 MiB/4.70 MiB\r\njax-cuda12-plugin  ------------------------------ 2.69 MiB/5.22 MiB\r\nvirtualenv  ------------------------------ 2.65 MiB/5.71 MiB\r\npillow  ------------------------------ 2.74 MiB/6.34 MiB\r\nnvidia-cuda-cupti-cu12  ------------------------------ 2.71 MiB/10.31 MiB\r\nnumpy  ------------------------------ 2.69 MiB/15.87 MiB\r\ntensorstore  ------------------------------ 2.66 MiB/18.63 MiB\r\nwandb  ------------------------------ 735.17 KiB/18.66 MiB\r\nscipy  ------------------------------ 2.40 MiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 2.66 MiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 2.61 MiB/38.67 MiB\r\njaxlib  ------------------------------ 2.72 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 2.64 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 2.70 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 2.65 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 2.77 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 2.54 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 2.65 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 2.71 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 2.62 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 2.77 MiB/614.37 MiB ",,terminal_output +4612,7886805,"TERMINAL",0,0,"\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r⠇ Preparing packages... (57/83)\r\norbax-checkpoint  ------------------------------ 513.29 KiB/549.91 KiB\r\npygments  ------------------------------ 1002.58 KiB/1.17 MiB\r\njax  ------------------------------ 1.35 MiB/2.70 MiB\r\nnvidia-cuda-runtime-cu12 ------------------------------ 2.79 MiB/3.33 MiB\r\nml-dtypes  ------------------------------ 2.82 MiB/4.70 MiB\r\njax-cuda12-plugin  ------------------------------ 2.87 MiB/5.22 MiB\r\nvirtualenv  ------------------------------ 2.82 MiB/5.71 MiB\r\npillow  ------------------------------ 2.93 MiB/6.34 MiB\r\nnvidia-cuda-cupti-cu12  ------------------------------ 2.88 MiB/10.31 MiB\r\nnumpy  ------------------------------ 2.86 MiB/15.87 MiB\r\ntensorstore  ------------------------------ 2.85 MiB/18.63 MiB\r\nwandb  ------------------------------ 767.17 KiB/18.66 MiB\r\nscipy  ------------------------------ 2.45 MiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 2.83 MiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 2.79 MiB/38.67 MiB\r\njaxlib  ------------------------------ 2.92 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 2.83 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 2.87 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 2.83 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 2.94 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 2.73 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 2.82 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 2.91 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 2.77 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 2.94 MiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r⠋ Preparing packages... (58/83)\r\norbax-checkpoint  ------------------------------ 513.29 KiB/549.91 KiB\r\npygments  ------------------------------ 1002.58 KiB/1.17 MiB\r\njax  ------------------------------ 1.39 MiB/2.70 MiB\r\nnvidia-cuda-runtime-cu12 ------------------------------ 2.96 MiB/3.33 MiB\r\nml-dtypes  ------------------------------ 3.01 MiB/4.70 MiB\r\njax-cuda12-plugin  ------------------------------ 3.03 MiB/5.22 MiB\r\nvirtualenv  ------------------------------ 2.99 MiB/5.71 MiB\r\npillow  ------------------------------ 3.08 MiB/6.34 MiB\r\nnvidia-cuda-cupti-cu12  ------------------------------ 3.07 MiB/10.31 MiB\r\nnumpy  ------------------------------ 3.03 MiB/15.87 MiB\r\ntensorstore  ------------------------------ 3.01 MiB/18.63 MiB\r\nwandb  ------------------------------ 799.17 KiB/18.66 MiB\r\nscipy  ------------------------------ 2.46 MiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 3.02 MiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 2.96 MiB/38.67 MiB\r\njaxlib  ------------------------------ 3.06 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 3.02 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 3.01 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 3.04 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 3.09 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 2.90 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 2.99 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 3.09 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 2.93 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 3.09 MiB/614.37 MiB ",,terminal_output +4613,7887088,"pyproject.toml",0,0,"[project]\nname = ""jasmine""\nversion = ""0.1.0""\nrequires-python = "">=3.11""\ndependencies = [\n ""dm-pix>=0.4.3"",\n ""einops>=0.8.0"",\n ""flax>=0.10.7"",\n ""jax[cuda12]>=0.6.2"",\n ""optax>=0.2.3"",\n ""tyro>=0.8.5"",\n ""wandb>=0.17.4"",\n ""grain>=0.2.10"",\n ""array-record>=0.7.2"",\n ""pre-commit>=4.2.0"",\n ""pillow>=11.3.0"",\n]\n\n[build-system]\nrequires = [""uv_build>=0.8.22,<0.9.0""]\nbuild-backend = ""uv_build""\n\n[tool.uv.build-backend]\nmodule-root = """"",plaintext,tab +4614,7887090,"TERMINAL",0,0,"\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r⠋ Preparing packages... (58/83)\r\norbax-checkpoint  ------------------------------ 544.21 KiB/549.91 KiB\r\npygments  ------------------------------ 1.01 MiB/1.17 MiB\r\njax  ------------------------------ 1.55 MiB/2.70 MiB\r\nnvidia-cuda-runtime-cu12 ------------------------------ 3.16 MiB/3.33 MiB\r\nml-dtypes  ------------------------------ 3.20 MiB/4.70 MiB\r\njax-cuda12-plugin  ------------------------------ 3.21 MiB/5.22 MiB\r\nvirtualenv  ------------------------------ 3.15 MiB/5.71 MiB\r\npillow  ------------------------------ 3.28 MiB/6.34 MiB\r\nnvidia-cuda-cupti-cu12  ------------------------------ 3.24 MiB/10.31 MiB\r\nnumpy  ------------------------------ 3.24 MiB/15.87 MiB\r\ntensorstore  ------------------------------ 3.20 MiB/18.63 MiB\r\nwandb  ------------------------------ 831.17 KiB/18.66 MiB\r\nscipy  ------------------------------ 2.51 MiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 3.19 MiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 3.16 MiB/38.67 MiB\r\njaxlib  ------------------------------ 3.26 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 3.17 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 3.22 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 3.24 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 3.27 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 3.08 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 3.19 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 3.27 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 3.12 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 3.30 MiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r⠋ Preparing packages... (58/83)\r\npygments  ------------------------------ 1.01 MiB/1.17 MiB\r\njax  ------------------------------ 1.58 MiB/2.70 MiB\r\nnvidia-cuda-runtime-cu12 ------------------------------ 3.16 MiB/3.33 MiB\r\nml-dtypes  ------------------------------ 3.20 MiB/4.70 MiB\r\njax-cuda12-plugin  ------------------------------ 3.21 MiB/5.22 MiB\r\nvirtualenv  ------------------------------ 3.15 MiB/5.71 MiB\r\npillow  ------------------------------ 3.28 MiB/6.34 MiB\r\nnvidia-cuda-cupti-cu12  ------------------------------ 3.24 MiB/10.31 MiB\r\nnumpy  ------------------------------ 3.24 MiB/15.87 MiB\r\ntensorstore  ------------------------------ 3.20 MiB/18.63 MiB\r\nwandb  ------------------------------ 831.17 KiB/18.66 MiB\r\nscipy  ------------------------------ 2.51 MiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 3.19 MiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 3.16 MiB/38.67 MiB\r\njaxlib  ------------------------------ 3.26 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 3.17 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 3.22 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 3.24 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 3.27 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 3.08 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 3.19 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 3.27 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 3.12 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 3.30 MiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r⠋ Preparing packages... (58/83)\r\npygments  ------------------------------ 1.04 MiB/1.17 MiB\r\njax  ------------------------------ 1.60 MiB/2.70 MiB\r\nnvidia-cuda-runtime-cu12 ------------------------------ 3.27 MiB/3.33 MiB\r\nml-dtypes  ------------------------------ 3.30 MiB/4.70 MiB\r\njax-cuda12-plugin  ------------------------------ 3.31 MiB/5.22 MiB\r\nvirtualenv  ------------------------------ 3.22 MiB/5.71 MiB\r\npillow  ------------------------------ 3.39 MiB/6.34 MiB\r\nnvidia-cuda-cupti-cu12  ------------------------------ 3.32 MiB/10.31 MiB\r\nnumpy  ------------------------------ 3.34 MiB/15.87 MiB\r\ntensorstore  ------------------------------ 3.30 MiB/18.63 MiB\r\nwandb  ------------------------------ 924.04 KiB/18.66 MiB\r\nscipy  ------------------------------ 2.73 MiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 3.29 MiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 3.27 MiB/38.67 MiB\r\njaxlib  ------------------------------ 3.39 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 3.27 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 3.31 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 3.32 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 3.36 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 3.16 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 3.30 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 3.38 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 3.21 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 3.39 MiB/614.37 MiB ",,terminal_output +4615,7887090,"TERMINAL",0,0,"\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r⠙ Preparing packages... (59/83)\r\npygments  ------------------------------ 1.06 MiB/1.17 MiB\r\njax  ------------------------------ 1.64 MiB/2.70 MiB\r\nnvidia-cuda-runtime-cu12 ------------------------------ 3.33 MiB/3.33 MiB\r\nml-dtypes  ------------------------------ 3.51 MiB/4.70 MiB\r\njax-cuda12-plugin  ------------------------------ 3.53 MiB/5.22 MiB\r\nvirtualenv  ------------------------------ 3.42 MiB/5.71 MiB\r\npillow  ------------------------------ 3.60 MiB/6.34 MiB\r\nnvidia-cuda-cupti-cu12  ------------------------------ 3.52 MiB/10.31 MiB\r\nnumpy  ------------------------------ 3.55 MiB/15.87 MiB\r\ntensorstore  ------------------------------ 3.50 MiB/18.63 MiB\r\nwandb  ------------------------------ 940.04 KiB/18.66 MiB\r\nscipy  ------------------------------ 2.95 MiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 3.49 MiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 3.47 MiB/38.67 MiB\r\njaxlib  ------------------------------ 3.58 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 3.47 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 3.51 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 3.54 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 3.58 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 3.35 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 3.51 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 3.57 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 3.41 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 3.58 MiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r⠙ Preparing packages... (59/83)\r\npygments  ------------------------------ 1.06 MiB/1.17 MiB\r\njax  ------------------------------ 1.64 MiB/2.70 MiB\r\nml-dtypes  ------------------------------ 3.51 MiB/4.70 MiB\r\njax-cuda12-plugin  ------------------------------ 3.53 MiB/5.22 MiB\r\nvirtualenv  ------------------------------ 3.42 MiB/5.71 MiB\r\npillow  ------------------------------ 3.60 MiB/6.34 MiB\r\nnvidia-cuda-cupti-cu12  ------------------------------ 3.52 MiB/10.31 MiB\r\nnumpy  ------------------------------ 3.55 MiB/15.87 MiB\r\ntensorstore  ------------------------------ 3.50 MiB/18.63 MiB\r\nwandb  ------------------------------ 940.04 KiB/18.66 MiB\r\nscipy  ------------------------------ 2.95 MiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 3.49 MiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 3.47 MiB/38.67 MiB\r\njaxlib  ------------------------------ 3.58 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 3.47 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 3.51 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 3.54 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 3.58 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 3.35 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 3.51 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 3.57 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 3.41 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 3.58 MiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r⠙ Preparing packages... (59/83)\r\npygments  ------------------------------ 1.07 MiB/1.17 MiB\r\njax  ------------------------------ 1.67 MiB/2.70 MiB\r\nml-dtypes  ------------------------------ 3.67 MiB/4.70 MiB\r\njax-cuda12-plugin  ------------------------------ 3.70 MiB/5.22 MiB\r\nvirtualenv  ------------------------------ 3.58 MiB/5.71 MiB\r\npillow  ------------------------------ 3.75 MiB/6.34 MiB\r\nnvidia-cuda-cupti-cu12  ------------------------------ 3.69 MiB/10.31 MiB\r\nnumpy  ------------------------------ 3.71 MiB/15.87 MiB\r\ntensorstore  ------------------------------ 3.68 MiB/18.63 MiB\r\nwandb  ------------------------------ 940.04 KiB/18.66 MiB\r\nscipy  ------------------------------ 3.22 MiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 3.67 MiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 3.61 MiB/38.67 MiB\r\njaxlib  ------------------------------ 3.75 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 3.63 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 3.69 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 3.70 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 3.71 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 3.53 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 3.68 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 3.73 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 3.59 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 3.76 MiB/614.37 MiB ",,terminal_output +4616,7887233,"TERMINAL",0,0,"\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r⠙ Preparing packages... (59/83)\r\npygments  ------------------------------ 1.09 MiB/1.17 MiB\r\njax  ------------------------------ 1.78 MiB/2.70 MiB\r\nml-dtypes  ------------------------------ 3.90 MiB/4.70 MiB\r\njax-cuda12-plugin  ------------------------------ 3.95 MiB/5.22 MiB\r\nvirtualenv  ------------------------------ 3.83 MiB/5.71 MiB\r\npillow  ------------------------------ 3.96 MiB/6.34 MiB\r\nnvidia-cuda-cupti-cu12  ------------------------------ 3.89 MiB/10.31 MiB\r\nnumpy  ------------------------------ 3.89 MiB/15.87 MiB\r\ntensorstore  ------------------------------ 3.90 MiB/18.63 MiB\r\nwandb  ------------------------------ 959.17 KiB/18.66 MiB\r\nscipy  ------------------------------ 3.25 MiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 3.89 MiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 3.84 MiB/38.67 MiB\r\njaxlib  ------------------------------ 3.97 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 3.86 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 3.94 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 3.93 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 3.95 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 3.74 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 3.89 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 3.97 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 3.81 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 3.98 MiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r⠙ Preparing packages... (59/83)\r\npygments  ------------------------------ 1.10 MiB/1.17 MiB\r\njax  ------------------------------ 1.87 MiB/2.70 MiB\r\nml-dtypes  ------------------------------ 4.11 MiB/4.70 MiB\r\njax-cuda12-plugin  ------------------------------ 4.17 MiB/5.22 MiB\r\nvirtualenv  ------------------------------ 4.07 MiB/5.71 MiB\r\npillow  ------------------------------ 4.13 MiB/6.34 MiB\r\nnvidia-cuda-cupti-cu12  ------------------------------ 4.08 MiB/10.31 MiB\r\nnumpy  ------------------------------ 3.93 MiB/15.87 MiB\r\ntensorstore  ------------------------------ 4.14 MiB/18.63 MiB\r\nwandb  ------------------------------ 975.17 KiB/18.66 MiB\r\nscipy  ------------------------------ 3.32 MiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 4.09 MiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 4.03 MiB/38.67 MiB\r\njaxlib  ------------------------------ 4.16 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 4.09 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 4.15 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 4.14 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 4.13 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 3.96 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 4.08 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 4.15 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 4.00 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 4.20 MiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r⠙ Preparing packages... (59/83)\r\npygments  ------------------------------ 1.10 MiB/1.17 MiB\r\njax  ------------------------------ 1.95 MiB/2.70 MiB\r\nml-dtypes  ------------------------------ 4.31 MiB/4.70 MiB\r\njax-cuda12-plugin  ------------------------------ 4.40 MiB/5.22 MiB\r\nvirtualenv  ------------------------------ 4.26 MiB/5.71 MiB\r\npillow  ------------------------------ 4.33 MiB/6.34 MiB\r\nnvidia-cuda-cupti-cu12  ------------------------------ 4.27 MiB/10.31 MiB\r\nnumpy  ------------------------------ 4.03 MiB/15.87 MiB\r\ntensorstore  ------------------------------ 4.32 MiB/18.63 MiB\r\nwandb  ------------------------------ 991.17 KiB/18.66 MiB\r\nscipy  ------------------------------ 3.39 MiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 4.30 MiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 4.25 MiB/38.67 MiB\r\njaxlib  ------------------------------ 4.36 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 4.28 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 4.39 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 4.34 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 4.33 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 4.15 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 4.27 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 4.34 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 4.20 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 4.38 MiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r⠹ Preparing packages... (60/83)\r\npygments  ------------------------------ 1.12 MiB/1.17 MiB\r\njax  ------------------------------ 1.97 MiB/2.70 MiB\r\nml-dtypes  ------------------------------ 4.48 MiB/4.70 MiB\r\njax-cuda12-plugin  ------------------------------ 4.61 MiB/5.22 MiB\r\nvirtualenv  ------------------------------ 4.42 MiB/5.71 MiB\r\npillow  ------------------------------ 4.49 MiB/6.34 MiB\r\nnvidia-cuda-cupti-cu12  ------------------------------ 4.45 MiB/10.31 MiB\r\nnumpy  ------------------------------ 4.06 MiB/15.87 MiB\r\ntensorstore  ------------------------------ 4.54 MiB/18.63 MiB\r\nwandb  ------------------------------ 1023.17 KiB/18.66 MiB\r\nscipy  ------------------------------ 3.41 MiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 4.51 MiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 4.45 MiB/38.67 MiB\r\njaxlib  ------------------------------ 4.51 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 4.47 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 4.56 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 4.50 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 4.51 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 4.30 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 4.45 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 4.51 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 4.37 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 4.57 MiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r⠹ Preparing packages... (60/83)\r\npygments  ------------------------------ 1.13 MiB/1.17 MiB\r\njax  ------------------------------ 1.98 MiB/2.70 MiB\r\nml-dtypes  ------------------------------ 4.62 MiB/4.70 MiB\r\njax-cuda12-plugin  ------------------------------ 4.78 MiB/5.22 MiB\r\nvirtualenv  ------------------------------ 4.61 MiB/5.71 MiB\r\npillow  ------------------------------ 4.64 MiB/6.34 MiB\r\nnvidia-cuda-cupti-cu12  ------------------------------ 4.62 MiB/10.31 MiB\r\nnumpy  ------------------------------ 4.09 MiB/15.87 MiB\r\ntensorstore  ------------------------------ 4.70 MiB/18.63 MiB\r\nwandb  ------------------------------ 1.01 MiB/18.66 MiB\r\nscipy  ------------------------------ 4.05 MiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 4.66 MiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 4.62 MiB/38.67 MiB\r\njaxlib  ------------------------------ 4.68 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 4.63 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 4.70 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 4.65 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 4.69 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 4.46 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 4.63 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 4.68 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 4.51 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 4.75 MiB/614.37 MiB ",,terminal_output +4617,7887373,"TERMINAL",0,0,"\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r⠹ Preparing packages... (60/83)\r\njax  ------------------------------ 1.98 MiB/2.70 MiB\r\nml-dtypes  ------------------------------ 4.70 MiB/4.70 MiB\r\njax-cuda12-plugin  ------------------------------ 4.86 MiB/5.22 MiB\r\nvirtualenv  ------------------------------ 4.68 MiB/5.71 MiB\r\npillow  ------------------------------ 4.72 MiB/6.34 MiB\r\nnvidia-cuda-cupti-cu12  ------------------------------ 4.68 MiB/10.31 MiB\r\nnumpy  ------------------------------ 4.09 MiB/15.87 MiB\r\ntensorstore  ------------------------------ 4.78 MiB/18.63 MiB\r\nwandb  ------------------------------ 1.01 MiB/18.66 MiB\r\nscipy  ------------------------------ 4.06 MiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 4.74 MiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 4.70 MiB/38.67 MiB\r\njaxlib  ------------------------------ 4.73 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 4.71 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 4.76 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 4.74 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 4.77 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 4.54 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 4.70 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 4.77 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 4.59 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 4.83 MiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r⠹ Preparing packages... (60/83)\r\njax  ------------------------------ 1.99 MiB/2.70 MiB\r\nml-dtypes  ------------------------------ 4.70 MiB/4.70 MiB\r\njax-cuda12-plugin  ------------------------------ 4.91 MiB/5.22 MiB\r\nvirtualenv  ------------------------------ 4.75 MiB/5.71 MiB\r\npillow  ------------------------------ 4.77 MiB/6.34 MiB\r\nnvidia-cuda-cupti-cu12  ------------------------------ 4.74 MiB/10.31 MiB\r\nnumpy  ------------------------------ 4.10 MiB/15.87 MiB\r\ntensorstore  ------------------------------ 4.82 MiB/18.63 MiB\r\nwandb  ------------------------------ 1.01 MiB/18.66 MiB\r\nscipy  ------------------------------ 4.08 MiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 4.79 MiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 4.73 MiB/38.67 MiB\r\njaxlib  ------------------------------ 4.78 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 4.76 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 4.83 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 4.78 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 4.83 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 4.59 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 4.76 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 4.81 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 4.63 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 4.89 MiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r⠹ Preparing packages... (60/83)\r\njax  ------------------------------ 1.99 MiB/2.70 MiB\r\njax-cuda12-plugin  ------------------------------ 4.91 MiB/5.22 MiB\r\nvirtualenv  ------------------------------ 4.75 MiB/5.71 MiB\r\npillow  ------------------------------ 4.77 MiB/6.34 MiB\r\nnvidia-cuda-cupti-cu12  ------------------------------ 4.74 MiB/10.31 MiB\r\nnumpy  ------------------------------ 4.10 MiB/15.87 MiB\r\ntensorstore  ------------------------------ 4.82 MiB/18.63 MiB\r\nwandb  ------------------------------ 1.01 MiB/18.66 MiB\r\nscipy  ------------------------------ 4.12 MiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 4.79 MiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 4.73 MiB/38.67 MiB\r\njaxlib  ------------------------------ 4.78 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 4.76 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 4.83 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 4.78 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 4.83 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 4.59 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 4.76 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 4.81 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 4.63 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 4.89 MiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r⠸ Preparing packages... (62/83)\r\njax  ------------------------------ 2.01 MiB/2.70 MiB\r\njax-cuda12-plugin  ------------------------------ 5.01 MiB/5.22 MiB\r\nvirtualenv  ------------------------------ 4.82 MiB/5.71 MiB\r\npillow  ------------------------------ 4.84 MiB/6.34 MiB\r\nnvidia-cuda-cupti-cu12  ------------------------------ 4.82 MiB/10.31 MiB\r\nnumpy  ------------------------------ 4.14 MiB/15.87 MiB\r\ntensorstore  ------------------------------ 4.92 MiB/18.63 MiB\r\nwandb  ------------------------------ 1.03 MiB/18.66 MiB\r\nscipy  ------------------------------ 4.14 MiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 4.88 MiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 4.84 MiB/38.67 MiB\r\njaxlib  ------------------------------ 4.87 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 4.84 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 4.92 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 4.85 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 4.94 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 4.66 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 4.82 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 4.90 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 4.73 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 4.97 MiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r⠸ Preparing packages... (62/83)\r\njax  ------------------------------ 2.04 MiB/2.70 MiB\r\njax-cuda12-plugin  ------------------------------ 5.15 MiB/5.22 MiB\r\nvirtualenv  ------------------------------ 4.98 MiB/5.71 MiB\r\npillow  ------------------------------ 4.97 MiB/6.34 MiB\r\nnvidia-cuda-cupti-cu12  ------------------------------ 4.99 MiB/10.31 MiB\r\nnumpy  ------------------------------ 4.17 MiB/15.87 MiB\r\ntensorstore  ------------------------------ 5.07 MiB/18.63 MiB\r\nwandb  ------------------------------ 1.04 MiB/18.66 MiB\r\nscipy  ------------------------------ 4.32 MiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 5.01 MiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 4.96 MiB/38.67 MiB\r\njaxlib  ------------------------------ 5.06 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 5.00 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 5.08 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 4.99 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 5.08 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 4.81 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 4.99 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 5.05 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 4.88 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 5.10 MiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r⠸ Preparing packages... (62/83)\r\njax  ------------------------------ 2.08 MiB/2.70 MiB\r\nvirtualenv  ------------------------------ 5.14 MiB/5.71 MiB\r\npillow  ------------------------------ 5.10 MiB/6.34 MiB\r\nnvidia-cuda-cupti-cu12  ------------------------------ 5.14 MiB/10.31 MiB\r\nnumpy  ------------------------------ 4.22 MiB/15.87 MiB\r\ntensorstore  ------------------------------ 5.23 MiB/18.63 MiB\r\nwandb  ------------------------------ 1.06 MiB/18.66 MiB\r\nscipy  ------------------------------ 4.34 MiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 5.15 MiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 5.13 MiB/38.67 MiB\r\njaxlib  ------------------------------ 5.23 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 5.13 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 5.25 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 5.15 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 5.22 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 4.96 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 5.13 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 5.21 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 5.04 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 5.25 MiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r⠸ Preparing packages... (62/83)\r\njax  ------------------------------ 2.08 MiB/2.70 MiB\r\nvirtualenv  ------------------------------ 5.15 MiB/5.71 MiB\r\npillow  ------------------------------ 5.10 MiB/6.34 MiB\r\nnvidia-cuda-cupti-cu12  ------------------------------ 5.17 MiB/10.31 MiB\r\nnumpy  ------------------------------ 4.23 MiB/15.87 MiB\r\ntensorstore  ------------------------------ 5.25 MiB/18.63 MiB\r\nwandb  ------------------------------ 1.06 MiB/18.66 MiB\r\nscipy  ------------------------------ 4.34 MiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 5.16 MiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 5.13 MiB/38.67 MiB\r\njaxlib  ------------------------------ 5.23 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 5.15 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 5.25 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 5.16 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 5.24 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 4.98 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 5.15 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 5.24 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 5.07 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 5.28 MiB/614.37 MiB ",,terminal_output +4618,7887467,"TERMINAL",0,0,"\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r⠸ Preparing packages... (62/83)\r\njax  ------------------------------ 2.11 MiB/2.70 MiB\r\nvirtualenv  ------------------------------ 5.27 MiB/5.71 MiB\r\npillow  ------------------------------ 5.23 MiB/6.34 MiB\r\nnvidia-cuda-cupti-cu12  ------------------------------ 5.28 MiB/10.31 MiB\r\nnumpy  ------------------------------ 4.33 MiB/15.87 MiB\r\ntensorstore  ------------------------------ 5.39 MiB/18.63 MiB\r\nwandb  ------------------------------ 1.07 MiB/18.66 MiB\r\nscipy  ------------------------------ 4.37 MiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 5.32 MiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 5.27 MiB/38.67 MiB\r\njaxlib  ------------------------------ 5.39 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 5.26 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 5.39 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 5.27 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 5.36 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 5.10 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 5.31 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 5.38 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 5.19 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 5.41 MiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r⠸ Preparing packages... (62/83)\r\njax  ------------------------------ 2.15 MiB/2.70 MiB\r\nvirtualenv  ------------------------------ 5.37 MiB/5.71 MiB\r\npillow  ------------------------------ 5.35 MiB/6.34 MiB\r\nnvidia-cuda-cupti-cu12  ------------------------------ 5.41 MiB/10.31 MiB\r\nnumpy  ------------------------------ 4.48 MiB/15.87 MiB\r\ntensorstore  ------------------------------ 5.48 MiB/18.63 MiB\r\nwandb  ------------------------------ 1.12 MiB/18.66 MiB\r\nscipy  ------------------------------ 4.37 MiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 5.42 MiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 5.40 MiB/38.67 MiB\r\njaxlib  ------------------------------ 5.50 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 5.40 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 5.50 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 5.41 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 5.47 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 5.18 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 5.42 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 5.50 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 5.30 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 5.52 MiB/614.37 MiB ",,terminal_output +4619,7887535,"TERMINAL",0,0,"\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r⠼ Preparing packages... (63/83)\r\njax  ------------------------------ 2.28 MiB/2.70 MiB\r\nvirtualenv  ------------------------------ 5.49 MiB/5.71 MiB\r\npillow  ------------------------------ 5.45 MiB/6.34 MiB\r\nnvidia-cuda-cupti-cu12  ------------------------------ 5.52 MiB/10.31 MiB\r\nnumpy  ------------------------------ 4.65 MiB/15.87 MiB\r\ntensorstore  ------------------------------ 5.60 MiB/18.63 MiB\r\nwandb  ------------------------------ 1.14 MiB/18.66 MiB\r\nscipy  ------------------------------ 4.37 MiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 5.50 MiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 5.52 MiB/38.67 MiB\r\njaxlib  ------------------------------ 5.62 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 5.52 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 5.64 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 5.55 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 5.57 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 5.27 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 5.53 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 5.62 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 5.43 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 5.62 MiB/614.37 MiB ",,terminal_output +4620,7887695,"TERMINAL",0,0,"\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r⠼ Preparing packages... (63/83)\r\njax  ------------------------------ 2.36 MiB/2.70 MiB\r\nvirtualenv  ------------------------------ 5.62 MiB/5.71 MiB\r\npillow  ------------------------------ 5.59 MiB/6.34 MiB\r\nnvidia-cuda-cupti-cu12  ------------------------------ 5.64 MiB/10.31 MiB\r\nnumpy  ------------------------------ 4.90 MiB/15.87 MiB\r\ntensorstore  ------------------------------ 5.75 MiB/18.63 MiB\r\nwandb  ------------------------------ 1.15 MiB/18.66 MiB\r\nscipy  ------------------------------ 4.39 MiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 5.60 MiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 5.63 MiB/38.67 MiB\r\njaxlib  ------------------------------ 5.74 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 5.66 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 5.76 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 5.67 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 5.70 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 5.39 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 5.64 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 5.73 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 5.54 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 5.76 MiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r⠼ Preparing packages... (63/83)\r\njax  ------------------------------ 2.40 MiB/2.70 MiB\r\nvirtualenv  ------------------------------ 5.70 MiB/5.71 MiB\r\npillow  ------------------------------ 5.71 MiB/6.34 MiB\r\nnvidia-cuda-cupti-cu12  ------------------------------ 5.75 MiB/10.31 MiB\r\nnumpy  ------------------------------ 5.00 MiB/15.87 MiB\r\ntensorstore  ------------------------------ 5.82 MiB/18.63 MiB\r\nwandb  ------------------------------ 1.17 MiB/18.66 MiB\r\nscipy  ------------------------------ 4.39 MiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 5.75 MiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 5.73 MiB/38.67 MiB\r\njaxlib  ------------------------------ 5.86 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 5.79 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 5.90 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 5.81 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 5.82 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 5.51 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 5.80 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 5.86 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 5.67 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 5.87 MiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r⠼ Preparing packages... (63/83)\r\njax  ------------------------------ 2.42 MiB/2.70 MiB\r\npillow  ------------------------------ 5.76 MiB/6.34 MiB\r\nnvidia-cuda-cupti-cu12  ------------------------------ 5.79 MiB/10.31 MiB\r\nnumpy  ------------------------------ 5.00 MiB/15.87 MiB\r\ntensorstore  ------------------------------ 5.87 MiB/18.63 MiB\r\nwandb  ------------------------------ 1.18 MiB/18.66 MiB\r\nscipy  ------------------------------ 4.39 MiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 5.80 MiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 5.77 MiB/38.67 MiB\r\njaxlib  ------------------------------ 5.90 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 5.83 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 5.94 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 5.84 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 5.87 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 5.54 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 5.85 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 5.89 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 5.70 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 5.92 MiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r⠼ Preparing packages... (63/83)\r\njax  ------------------------------ 2.45 MiB/2.70 MiB\r\npillow  ------------------------------ 5.82 MiB/6.34 MiB\r\nnvidia-cuda-cupti-cu12  ------------------------------ 5.81 MiB/10.31 MiB\r\nnumpy  ------------------------------ 5.01 MiB/15.87 MiB\r\ntensorstore  ------------------------------ 5.90 MiB/18.63 MiB\r\nwandb  ------------------------------ 1.20 MiB/18.66 MiB\r\nscipy  ------------------------------ 4.39 MiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 5.84 MiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 5.82 MiB/38.67 MiB\r\njaxlib  ------------------------------ 5.95 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 5.89 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 5.98 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 5.87 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 5.92 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 5.57 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 5.89 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 5.93 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 5.75 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 5.98 MiB/614.37 MiB ",,terminal_output +4621,7887772,"TERMINAL",0,0,"\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r⠴ Preparing packages... (64/83)\r\njax  ------------------------------ 2.54 MiB/2.70 MiB\r\npillow  ------------------------------ 5.92 MiB/6.34 MiB\r\nnvidia-cuda-cupti-cu12  ------------------------------ 5.92 MiB/10.31 MiB\r\nnumpy  ------------------------------ 5.03 MiB/15.87 MiB\r\ntensorstore  ------------------------------ 6.01 MiB/18.63 MiB\r\nwandb  ------------------------------ 1.24 MiB/18.66 MiB\r\nscipy  ------------------------------ 4.42 MiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 5.95 MiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 5.92 MiB/38.67 MiB\r\njaxlib  ------------------------------ 6.08 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 5.97 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 6.09 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 5.97 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 6.01 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 5.66 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 6.00 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 6.03 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 5.83 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 6.06 MiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r⠴ Preparing packages... (64/83)\r\njax  ------------------------------ 2.58 MiB/2.70 MiB\r\npillow  ------------------------------ 5.98 MiB/6.34 MiB\r\nnvidia-cuda-cupti-cu12  ------------------------------ 6.01 MiB/10.31 MiB\r\nnumpy  ------------------------------ 5.04 MiB/15.87 MiB\r\ntensorstore  ------------------------------ 6.07 MiB/18.63 MiB\r\nwandb  ------------------------------ 1.25 MiB/18.66 MiB\r\nscipy  ------------------------------ 4.44 MiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 6.02 MiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 6.02 MiB/38.67 MiB\r\njaxlib  ------------------------------ 6.15 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 6.03 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 6.17 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 6.06 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 6.10 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 5.76 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 6.06 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 6.11 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 5.89 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 6.12 MiB/614.37 MiB ",,terminal_output +4622,7887835,"TERMINAL",0,0,"\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r⠴ Preparing packages... (64/83)\r\njax  ------------------------------ 2.58 MiB/2.70 MiB\r\npillow  ------------------------------ 6.04 MiB/6.34 MiB\r\nnvidia-cuda-cupti-cu12  ------------------------------ 6.07 MiB/10.31 MiB\r\nnumpy  ------------------------------ 5.08 MiB/15.87 MiB\r\ntensorstore  ------------------------------ 6.14 MiB/18.63 MiB\r\nwandb  ------------------------------ 3.27 MiB/18.66 MiB\r\nscipy  ------------------------------ 4.44 MiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 6.08 MiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 6.06 MiB/38.67 MiB\r\njaxlib  ------------------------------ 6.22 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 6.08 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 6.22 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 6.12 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 6.15 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 5.80 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 6.13 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 6.16 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 5.93 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 6.17 MiB/614.37 MiB ",,terminal_output +4623,7887905,"TERMINAL",0,0,"\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r⠴ Preparing packages... (64/83)\r\njax  ------------------------------ 2.59 MiB/2.70 MiB\r\npillow  ------------------------------ 6.29 MiB/6.34 MiB\r\nnvidia-cuda-cupti-cu12  ------------------------------ 6.32 MiB/10.31 MiB\r\nnumpy  ------------------------------ 5.15 MiB/15.87 MiB\r\ntensorstore  ------------------------------ 6.42 MiB/18.63 MiB\r\nwandb  ------------------------------ 3.48 MiB/18.66 MiB\r\nscipy  ------------------------------ 4.45 MiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 6.34 MiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 6.30 MiB/38.67 MiB\r\njaxlib  ------------------------------ 6.45 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 6.33 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 6.48 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 6.37 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 6.37 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 6.05 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 6.35 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 6.40 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 6.17 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 6.41 MiB/614.37 MiB ",,terminal_output +4624,7887976,"TERMINAL",0,0,"\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r⠦ Preparing packages... (64/83)\r\njax  ------------------------------ 2.61 MiB/2.70 MiB\r\npillow  ------------------------------ 6.34 MiB/6.34 MiB\r\nnvidia-cuda-cupti-cu12  ------------------------------ 6.60 MiB/10.31 MiB\r\nnumpy  ------------------------------ 5.18 MiB/15.87 MiB\r\ntensorstore  ------------------------------ 6.70 MiB/18.63 MiB\r\nwandb  ------------------------------ 3.76 MiB/18.66 MiB\r\nscipy  ------------------------------ 4.47 MiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 6.60 MiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 6.54 MiB/38.67 MiB\r\njaxlib  ------------------------------ 6.74 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 6.57 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 6.76 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 6.65 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 6.61 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 6.30 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 6.60 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 6.66 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 6.43 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 6.66 MiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r⠦ Preparing packages... (64/83)\r\njax  ------------------------------ 2.61 MiB/2.70 MiB\r\nnvidia-cuda-cupti-cu12  ------------------------------ 6.63 MiB/10.31 MiB\r\nnumpy  ------------------------------ 5.18 MiB/15.87 MiB\r\ntensorstore  ------------------------------ 6.71 MiB/18.63 MiB\r\nwandb  ------------------------------ 3.78 MiB/18.66 MiB\r\nscipy  ------------------------------ 4.47 MiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 6.65 MiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 6.59 MiB/38.67 MiB\r\njaxlib  ------------------------------ 6.78 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 6.63 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 6.78 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 6.70 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 6.66 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 6.35 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 6.64 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 6.70 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 6.48 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 6.71 MiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r⠦ Preparing packages... (64/83)\r\njax  ------------------------------ 2.62 MiB/2.70 MiB\r\nnvidia-cuda-cupti-cu12  ------------------------------ 6.83 MiB/10.31 MiB\r\nnumpy  ------------------------------ 5.21 MiB/15.87 MiB\r\ntensorstore  ------------------------------ 6.95 MiB/18.63 MiB\r\nwandb  ------------------------------ 3.98 MiB/18.66 MiB\r\nscipy  ------------------------------ 4.50 MiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 6.88 MiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 6.84 MiB/38.67 MiB\r\njaxlib  ------------------------------ 7.00 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 6.85 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 7.00 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 6.91 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 6.89 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 6.58 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 6.86 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 6.93 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 6.71 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 6.93 MiB/614.37 MiB ",,terminal_output +4625,7888085,"TERMINAL",0,0,"\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r⠦ Preparing packages... (64/83)\r\njax  ------------------------------ 2.62 MiB/2.70 MiB\r\nnvidia-cuda-cupti-cu12  ------------------------------ 7.07 MiB/10.31 MiB\r\nnumpy  ------------------------------ 5.23 MiB/15.87 MiB\r\ntensorstore  ------------------------------ 7.17 MiB/18.63 MiB\r\nwandb  ------------------------------ 4.22 MiB/18.66 MiB\r\nscipy  ------------------------------ 5.12 MiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 7.08 MiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 7.05 MiB/38.67 MiB\r\njaxlib  ------------------------------ 7.22 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 7.07 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 7.22 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 7.15 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 7.10 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 6.79 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 7.09 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 7.14 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 6.93 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 7.11 MiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r⠦ Preparing packages... (64/83)\r\nnvidia-cuda-cupti-cu12  ------------------------------ 7.19 MiB/10.31 MiB\r\nnumpy  ------------------------------ 5.23 MiB/15.87 MiB\r\ntensorstore  ------------------------------ 7.31 MiB/18.63 MiB\r\nwandb  ------------------------------ 4.37 MiB/18.66 MiB\r\nscipy  ------------------------------ 6.07 MiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 7.22 MiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 7.21 MiB/38.67 MiB\r\njaxlib  ------------------------------ 7.39 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 7.17 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 7.35 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 7.28 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 7.25 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 6.94 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 7.23 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 7.28 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 7.08 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 7.24 MiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r⠦ Preparing packages... (64/83)\r\nnvidia-cuda-cupti-cu12  ------------------------------ 7.19 MiB/10.31 MiB\r\nnumpy  ------------------------------ 5.23 MiB/15.87 MiB\r\ntensorstore  ------------------------------ 7.31 MiB/18.63 MiB\r\nwandb  ------------------------------ 4.37 MiB/18.66 MiB\r\nscipy  ------------------------------ 6.07 MiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 7.22 MiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 7.21 MiB/38.67 MiB\r\njaxlib  ------------------------------ 7.39 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 7.17 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 7.35 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 7.28 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 7.25 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 6.94 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 7.23 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 7.30 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 7.08 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 7.24 MiB/614.37 MiB ",,terminal_output +4626,7888219,"TERMINAL",0,0,"\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r⠧ Preparing packages... (65/83)\r\nnvidia-cuda-cupti-cu12  ------------------------------ 7.45 MiB/10.31 MiB\r\nnumpy  ------------------------------ 5.23 MiB/15.87 MiB\r\ntensorstore  ------------------------------ 7.55 MiB/18.63 MiB\r\nwandb  ------------------------------ 4.64 MiB/18.66 MiB\r\nscipy  ------------------------------ 6.26 MiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 7.47 MiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 7.47 MiB/38.67 MiB\r\njaxlib  ------------------------------ 7.64 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 7.42 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 7.61 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 7.55 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 7.49 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 7.17 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 7.50 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 7.55 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 7.35 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 7.48 MiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r⠧ Preparing packages... (65/83)\r\nnvidia-cuda-cupti-cu12  ------------------------------ 7.76 MiB/10.31 MiB\r\nnumpy  ------------------------------ 5.23 MiB/15.87 MiB\r\ntensorstore  ------------------------------ 7.81 MiB/18.63 MiB\r\nwandb  ------------------------------ 4.93 MiB/18.66 MiB\r\nscipy  ------------------------------ 6.50 MiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 7.79 MiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 7.76 MiB/38.67 MiB\r\njaxlib  ------------------------------ 7.90 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 7.74 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 7.87 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 7.84 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 7.80 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 7.45 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 7.79 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 7.81 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 7.63 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 7.77 MiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r⠧ Preparing packages... (65/83)\r\nnvidia-cuda-cupti-cu12  ------------------------------ 8.04 MiB/10.31 MiB\r\nnumpy  ------------------------------ 5.24 MiB/15.87 MiB\r\ntensorstore  ------------------------------ 8.09 MiB/18.63 MiB\r\nwandb  ------------------------------ 5.21 MiB/18.66 MiB\r\nscipy  ------------------------------ 6.61 MiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 8.04 MiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 8.03 MiB/38.67 MiB\r\njaxlib  ------------------------------ 8.20 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 8.02 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 8.14 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 8.14 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 8.11 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 7.75 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 8.08 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 8.11 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 7.92 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 8.07 MiB/614.37 MiB ",,terminal_output +4627,7888277,"TERMINAL",0,0,"\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r⠧ Preparing packages... (65/83)\r\nnvidia-cuda-cupti-cu12  ------------------------------ 8.19 MiB/10.31 MiB\r\nnumpy  ------------------------------ 5.24 MiB/15.87 MiB\r\ntensorstore  ------------------------------ 8.21 MiB/18.63 MiB\r\nwandb  ------------------------------ 5.31 MiB/18.66 MiB\r\nscipy  ------------------------------ 6.91 MiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 8.20 MiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 8.15 MiB/38.67 MiB\r\njaxlib  ------------------------------ 8.33 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 8.16 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 8.29 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 8.28 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 8.25 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 7.87 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 8.22 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 8.23 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 8.04 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 8.19 MiB/614.37 MiB ",,terminal_output +4628,7888385,"TERMINAL",0,0,"\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r⠇ Preparing packages... (66/83)\r\nnvidia-cuda-cupti-cu12  ------------------------------ 8.40 MiB/10.31 MiB\r\nnumpy  ------------------------------ 5.51 MiB/15.87 MiB\r\ntensorstore  ------------------------------ 8.43 MiB/18.63 MiB\r\nwandb  ------------------------------ 5.54 MiB/18.66 MiB\r\nscipy  ------------------------------ 7.13 MiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 8.40 MiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 8.35 MiB/38.67 MiB\r\njaxlib  ------------------------------ 8.53 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 8.36 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 8.48 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 8.50 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 8.46 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 8.09 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 8.42 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 8.43 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 8.25 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 8.40 MiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r⠇ Preparing packages... (66/83)\r\nnvidia-cuda-cupti-cu12  ------------------------------ 8.62 MiB/10.31 MiB\r\nnumpy  ------------------------------ 5.62 MiB/15.87 MiB\r\ntensorstore  ------------------------------ 8.64 MiB/18.63 MiB\r\nwandb  ------------------------------ 5.74 MiB/18.66 MiB\r\nscipy  ------------------------------ 7.29 MiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 8.62 MiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 8.58 MiB/38.67 MiB\r\njaxlib  ------------------------------ 8.75 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 8.56 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 8.72 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 8.72 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 8.66 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 8.32 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 8.65 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 8.66 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 8.45 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 8.60 MiB/614.37 MiB ",,terminal_output +4629,7888490,"TERMINAL",0,0,"\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r⠇ Preparing packages... (66/83)\r\nnvidia-cuda-cupti-cu12  ------------------------------ 8.86 MiB/10.31 MiB\r\nnumpy  ------------------------------ 5.71 MiB/15.87 MiB\r\ntensorstore  ------------------------------ 8.89 MiB/18.63 MiB\r\nwandb  ------------------------------ 6.01 MiB/18.66 MiB\r\nscipy  ------------------------------ 7.59 MiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 8.90 MiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 8.83 MiB/38.67 MiB\r\njaxlib  ------------------------------ 9.03 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 8.79 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 8.99 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 8.96 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 8.92 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 8.57 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 8.90 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 8.91 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 8.71 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 8.86 MiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r⠇ Preparing packages... (66/83)\r\nnvidia-cuda-cupti-cu12  ------------------------------ 9.16 MiB/10.31 MiB\r\nnumpy  ------------------------------ 5.74 MiB/15.87 MiB\r\ntensorstore  ------------------------------ 9.17 MiB/18.63 MiB\r\nwandb  ------------------------------ 6.27 MiB/18.66 MiB\r\nscipy  ------------------------------ 7.95 MiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 9.18 MiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 9.11 MiB/38.67 MiB\r\njaxlib  ------------------------------ 9.33 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 9.00 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 9.26 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 9.25 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 9.22 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 8.86 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 9.16 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 9.19 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 8.97 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 9.16 MiB/614.37 MiB ",,terminal_output +4630,7888569,"TERMINAL",0,0,"\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r⠋ Preparing packages... (66/83)\r\nnvidia-cuda-cupti-cu12  ------------------------------ 9.47 MiB/10.31 MiB\r\nnumpy  ------------------------------ 5.85 MiB/15.87 MiB\r\ntensorstore  ------------------------------ 9.45 MiB/18.63 MiB\r\nwandb  ------------------------------ 6.54 MiB/18.66 MiB\r\nscipy  ------------------------------ 7.98 MiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 9.47 MiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 9.38 MiB/38.67 MiB\r\njaxlib  ------------------------------ 9.62 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 9.19 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 9.58 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 9.52 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 9.51 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 9.13 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 9.43 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 9.49 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 9.27 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 9.44 MiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r⠋ Preparing packages... (66/83)\r\nnvidia-cuda-cupti-cu12  ------------------------------ 9.72 MiB/10.31 MiB\r\nnumpy  ------------------------------ 5.88 MiB/15.87 MiB\r\ntensorstore  ------------------------------ 9.73 MiB/18.63 MiB\r\nwandb  ------------------------------ 6.82 MiB/18.66 MiB\r\nscipy  ------------------------------ 9.19 MiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 9.76 MiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 9.65 MiB/38.67 MiB\r\njaxlib  ------------------------------ 9.87 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 9.49 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 9.83 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 9.77 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 9.78 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 9.38 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 9.71 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 9.78 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 9.54 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 9.73 MiB/614.37 MiB ",,terminal_output +4631,7888629,"TERMINAL",0,0,"\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r⠋ Preparing packages... (66/83)\r\nnvidia-cuda-cupti-cu12  ------------------------------ 10.01 MiB/10.31 MiB\r\nnumpy  ------------------------------ 6.02 MiB/15.87 MiB\r\ntensorstore  ------------------------------ 10.00 MiB/18.63 MiB\r\nwandb  ------------------------------ 7.13 MiB/18.66 MiB\r\nscipy  ------------------------------ 9.49 MiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 10.06 MiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 9.95 MiB/38.67 MiB\r\njaxlib  ------------------------------ 10.17 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 9.79 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 10.12 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 10.00 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 10.09 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 9.66 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 10.00 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 10.08 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 9.84 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 10.01 MiB/614.37 MiB ",,terminal_output +4632,7888723,"TERMINAL",0,0,"\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r⠋ Preparing packages... (66/83)\r\nnvidia-cuda-cupti-cu12  ------------------------------ 10.28 MiB/10.31 MiB\r\nnumpy  ------------------------------ 6.18 MiB/15.87 MiB\r\ntensorstore  ------------------------------ 10.31 MiB/18.63 MiB\r\nwandb  ------------------------------ 7.47 MiB/18.66 MiB\r\nscipy  ------------------------------ 9.78 MiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 10.36 MiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 10.26 MiB/38.67 MiB\r\njaxlib  ------------------------------ 10.48 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 10.09 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 10.42 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 10.00 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 10.42 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 9.96 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 10.30 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 10.34 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 10.15 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 10.31 MiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r⠙ Preparing packages... (66/83)\r\nnumpy  ------------------------------ 6.20 MiB/15.87 MiB\r\ntensorstore  ------------------------------ 10.43 MiB/18.63 MiB\r\nwandb  ------------------------------ 7.59 MiB/18.66 MiB\r\nscipy  ------------------------------ 9.87 MiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 10.50 MiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 10.38 MiB/38.67 MiB\r\njaxlib  ------------------------------ 10.61 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 10.21 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 10.53 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 10.02 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 10.55 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 10.08 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 10.42 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 10.47 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 10.30 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 10.43 MiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r⠙ Preparing packages... (66/83)\r\nnumpy  ------------------------------ 6.20 MiB/15.87 MiB\r\ntensorstore  ------------------------------ 10.51 MiB/18.63 MiB\r\nwandb  ------------------------------ 7.66 MiB/18.66 MiB\r\nscipy  ------------------------------ 9.97 MiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 10.57 MiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 10.46 MiB/38.67 MiB\r\njaxlib  ------------------------------ 10.68 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 10.37 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 10.61 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 10.17 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 10.61 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 10.12 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 10.48 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 10.62 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 10.45 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 10.60 MiB/614.37 MiB ",,terminal_output +4633,7888786,"TERMINAL",0,0,"\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r⠙ Preparing packages... (66/83)\r\nnumpy  ------------------------------ 6.25 MiB/15.87 MiB\r\ntensorstore  ------------------------------ 10.89 MiB/18.63 MiB\r\nwandb  ------------------------------ 8.06 MiB/18.66 MiB\r\nscipy  ------------------------------ 10.35 MiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 10.93 MiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 10.86 MiB/38.67 MiB\r\njaxlib  ------------------------------ 11.06 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 10.63 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 10.98 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 10.46 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 10.98 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 10.52 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 10.87 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 10.90 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 10.77 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 10.87 MiB/614.37 MiB ",,terminal_output +4634,7888915,"TERMINAL",0,0,"\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r⠙ Preparing packages... (66/83)\r\nnumpy  ------------------------------ 6.34 MiB/15.87 MiB\r\ntensorstore  ------------------------------ 11.21 MiB/18.63 MiB\r\nwandb  ------------------------------ 8.37 MiB/18.66 MiB\r\nscipy  ------------------------------ 10.51 MiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 11.23 MiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 11.14 MiB/38.67 MiB\r\njaxlib  ------------------------------ 11.38 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 10.94 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 11.33 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 10.76 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 11.30 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 10.83 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 11.18 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 11.18 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 11.04 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 11.18 MiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r⠙ Preparing packages... (66/83)\r\nnumpy  ------------------------------ 6.87 MiB/15.87 MiB\r\ntensorstore  ------------------------------ 11.54 MiB/18.63 MiB\r\nwandb  ------------------------------ 8.65 MiB/18.66 MiB\r\nscipy  ------------------------------ 10.58 MiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 11.51 MiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 11.30 MiB/38.67 MiB\r\njaxlib  ------------------------------ 11.72 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 11.24 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 11.65 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 11.04 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 11.61 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 11.10 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 11.49 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 11.48 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 11.37 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 11.48 MiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r⠹ Preparing packages... (67/83)\r\nnumpy  ------------------------------ 7.15 MiB/15.87 MiB\r\ntensorstore  ------------------------------ 11.84 MiB/18.63 MiB\r\nwandb  ------------------------------ 8.95 MiB/18.66 MiB\r\nscipy  ------------------------------ 10.64 MiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 11.77 MiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 11.36 MiB/38.67 MiB\r\njaxlib  ------------------------------ 12.00 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 11.59 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 11.98 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 11.35 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 11.92 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 11.42 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 11.80 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 11.81 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 11.68 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 11.79 MiB/614.37 MiB ",,terminal_output +4635,7888975,"TERMINAL",0,0,"\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r⠹ Preparing packages... (67/83)\r\nnumpy  ------------------------------ 7.39 MiB/15.87 MiB\r\ntensorstore  ------------------------------ 12.14 MiB/18.63 MiB\r\nwandb  ------------------------------ 9.26 MiB/18.66 MiB\r\nscipy  ------------------------------ 10.65 MiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 12.09 MiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 12.06 MiB/38.67 MiB\r\njaxlib  ------------------------------ 12.30 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 11.86 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 12.26 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 11.61 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 12.23 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 11.74 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 12.10 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 12.12 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 11.93 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 12.09 MiB/614.37 MiB ",,terminal_output +4636,7889084,"TERMINAL",0,0,"\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r⠹ Preparing packages... (67/83)\r\nnumpy  ------------------------------ 7.51 MiB/15.87 MiB\r\ntensorstore  ------------------------------ 12.45 MiB/18.63 MiB\r\nwandb  ------------------------------ 9.62 MiB/18.66 MiB\r\nscipy  ------------------------------ 10.76 MiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 12.38 MiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 12.34 MiB/38.67 MiB\r\njaxlib  ------------------------------ 12.61 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 12.23 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 12.58 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 11.94 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 12.54 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 12.06 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 12.43 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 12.44 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 12.28 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 12.39 MiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r⠸ Preparing packages... (67/83)\r\nnumpy  ------------------------------ 7.55 MiB/15.87 MiB\r\ntensorstore  ------------------------------ 12.71 MiB/18.63 MiB\r\nwandb  ------------------------------ 9.90 MiB/18.66 MiB\r\nscipy  ------------------------------ 10.78 MiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 12.64 MiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 12.59 MiB/38.67 MiB\r\njaxlib  ------------------------------ 12.86 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 12.49 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 12.84 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 12.19 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 12.82 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 12.32 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 12.70 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 12.71 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 12.54 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 12.66 MiB/614.37 MiB ",,terminal_output +4637,7889208,"TERMINAL",0,0,"\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r⠸ Preparing packages... (67/83)\r\nnumpy  ------------------------------ 7.62 MiB/15.87 MiB\r\ntensorstore  ------------------------------ 13.01 MiB/18.63 MiB\r\nwandb  ------------------------------ 10.22 MiB/18.66 MiB\r\nscipy  ------------------------------ 10.90 MiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 12.95 MiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 12.92 MiB/38.67 MiB\r\njaxlib  ------------------------------ 13.17 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 12.77 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 13.15 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 12.51 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 13.07 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 12.63 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 12.99 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 13.00 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 12.82 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 12.93 MiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r⠸ Preparing packages... (67/83)\r\nnumpy  ------------------------------ 7.62 MiB/15.87 MiB\r\ntensorstore  ------------------------------ 13.34 MiB/18.63 MiB\r\nwandb  ------------------------------ 10.57 MiB/18.66 MiB\r\nscipy  ------------------------------ 11.29 MiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 13.27 MiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 13.21 MiB/38.67 MiB\r\njaxlib  ------------------------------ 13.48 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 13.09 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 13.48 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 12.82 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 13.40 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 12.94 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 13.32 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 13.30 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 13.14 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 13.24 MiB/614.37 MiB ",,terminal_output +4638,7889265,"TERMINAL",0,0,"\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r⠸ Preparing packages... (67/83)\r\nnumpy  ------------------------------ 7.68 MiB/15.87 MiB\r\ntensorstore  ------------------------------ 13.65 MiB/18.63 MiB\r\nwandb  ------------------------------ 10.83 MiB/18.66 MiB\r\nscipy  ------------------------------ 11.39 MiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 13.58 MiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 13.54 MiB/38.67 MiB\r\njaxlib  ------------------------------ 13.84 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 13.39 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 13.81 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 13.12 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 13.72 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 13.19 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 13.65 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 13.64 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 13.44 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 13.54 MiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r⠸ Preparing packages... (67/83)\r\nnumpy  ------------------------------ 7.70 MiB/15.87 MiB\r\ntensorstore  ------------------------------ 13.95 MiB/18.63 MiB\r\nwandb  ------------------------------ 11.14 MiB/18.66 MiB\r\nscipy  ------------------------------ 11.54 MiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 13.94 MiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 13.85 MiB/38.67 MiB\r\njaxlib  ------------------------------ 14.16 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 13.68 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 14.12 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 13.43 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 14.05 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 13.53 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 14.00 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 13.91 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 13.75 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 13.84 MiB/614.37 MiB ",,terminal_output +4639,7889370,"TERMINAL",0,0,"\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r⠼ Preparing packages... (67/83)\r\nnumpy  ------------------------------ 7.70 MiB/15.87 MiB\r\ntensorstore  ------------------------------ 14.18 MiB/18.63 MiB\r\nwandb  ------------------------------ 11.36 MiB/18.66 MiB\r\nscipy  ------------------------------ 12.87 MiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 14.17 MiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 14.12 MiB/38.67 MiB\r\njaxlib  ------------------------------ 14.42 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 13.94 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 14.37 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 13.70 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 14.28 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 13.79 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 14.25 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 14.15 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 13.98 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 14.10 MiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r⠼ Preparing packages... (67/83)\r\nnumpy  ------------------------------ 7.72 MiB/15.87 MiB\r\ntensorstore  ------------------------------ 14.51 MiB/18.63 MiB\r\nwandb  ------------------------------ 11.69 MiB/18.66 MiB\r\nscipy  ------------------------------ 13.12 MiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 14.46 MiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 14.44 MiB/38.67 MiB\r\njaxlib  ------------------------------ 14.74 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 14.24 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 14.69 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 13.99 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 14.57 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 14.10 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 14.58 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 14.48 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 14.34 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 14.40 MiB/614.37 MiB ",,terminal_output +4640,7889475,"TERMINAL",0,0,"\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r⠼ Preparing packages... (67/83)\r\nnumpy  ------------------------------ 7.72 MiB/15.87 MiB\r\ntensorstore  ------------------------------ 14.85 MiB/18.63 MiB\r\nwandb  ------------------------------ 11.99 MiB/18.66 MiB\r\nscipy  ------------------------------ 13.50 MiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 14.76 MiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 14.74 MiB/38.67 MiB\r\njaxlib  ------------------------------ 15.05 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 14.55 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 15.01 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 14.31 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 14.88 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 14.41 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 14.88 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 14.79 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 14.66 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 14.72 MiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r⠼ Preparing packages... (67/83)\r\nnumpy  ------------------------------ 7.72 MiB/15.87 MiB\r\ntensorstore  ------------------------------ 15.15 MiB/18.63 MiB\r\nwandb  ------------------------------ 12.33 MiB/18.66 MiB\r\nscipy  ------------------------------ 13.73 MiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 15.07 MiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 15.08 MiB/38.67 MiB\r\njaxlib  ------------------------------ 15.39 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 14.87 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 15.36 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 14.66 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 15.21 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 14.73 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 15.19 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 15.12 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 14.97 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 15.08 MiB/614.37 MiB ",,terminal_output +4641,7889584,"TERMINAL",0,0,"\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r⠴ Preparing packages... (67/83)\r\nnumpy  ------------------------------ 7.74 MiB/15.87 MiB\r\ntensorstore  ------------------------------ 15.45 MiB/18.63 MiB\r\nwandb  ------------------------------ 12.64 MiB/18.66 MiB\r\nscipy  ------------------------------ 14.06 MiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 15.39 MiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 15.39 MiB/38.67 MiB\r\njaxlib  ------------------------------ 15.77 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 15.20 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 15.65 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 14.98 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 15.55 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 15.07 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 15.51 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 15.47 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 15.30 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 15.42 MiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r⠴ Preparing packages... (67/83)\r\nnumpy  ------------------------------ 7.74 MiB/15.87 MiB\r\ntensorstore  ------------------------------ 15.79 MiB/18.63 MiB\r\nwandb  ------------------------------ 12.97 MiB/18.66 MiB\r\nscipy  ------------------------------ 14.09 MiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 15.73 MiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 15.73 MiB/38.67 MiB\r\njaxlib  ------------------------------ 16.11 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 15.57 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 16.00 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 15.32 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 15.90 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 15.39 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 15.86 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 15.81 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 15.65 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 15.75 MiB/614.37 MiB ",,terminal_output +4642,7889647,"TERMINAL",0,0,"\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r⠴ Preparing packages... (67/83)\r\nnumpy  ------------------------------ 7.76 MiB/15.87 MiB\r\ntensorstore  ------------------------------ 16.14 MiB/18.63 MiB\r\nwandb  ------------------------------ 13.28 MiB/18.66 MiB\r\nscipy  ------------------------------ 14.12 MiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 16.08 MiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 16.08 MiB/38.67 MiB\r\njaxlib  ------------------------------ 16.44 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 15.88 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 16.36 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 15.63 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 16.22 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 15.75 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 16.14 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 16.13 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 15.97 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 16.07 MiB/614.37 MiB ",,terminal_output +4643,7889720,"TERMINAL",0,0,"\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r⠦ Preparing packages... (67/83)\r\nnumpy  ------------------------------ 7.76 MiB/15.87 MiB\r\ntensorstore  ------------------------------ 16.20 MiB/18.63 MiB\r\nwandb  ------------------------------ 13.35 MiB/18.66 MiB\r\nscipy  ------------------------------ 14.25 MiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 16.14 MiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 16.12 MiB/38.67 MiB\r\njaxlib  ------------------------------ 16.50 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 15.94 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 16.45 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 15.69 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 16.30 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 15.82 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 16.22 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 16.20 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 16.03 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 16.13 MiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r⠦ Preparing packages... (67/83)\r\nnumpy  ------------------------------ 7.76 MiB/15.87 MiB\r\ntensorstore  ------------------------------ 16.29 MiB/18.63 MiB\r\nwandb  ------------------------------ 13.43 MiB/18.66 MiB\r\nscipy  ------------------------------ 14.36 MiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 16.25 MiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 16.25 MiB/38.67 MiB\r\njaxlib  ------------------------------ 16.62 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 16.03 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 16.54 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 15.78 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 16.43 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 15.90 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 16.31 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 16.32 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 16.12 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 16.22 MiB/614.37 MiB ",,terminal_output +4644,7889776,"TERMINAL",0,0,"\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r⠦ Preparing packages... (67/83)\r\nnumpy  ------------------------------ 7.79 MiB/15.87 MiB\r\ntensorstore  ------------------------------ 16.64 MiB/18.63 MiB\r\nwandb  ------------------------------ 13.79 MiB/18.66 MiB\r\nscipy  ------------------------------ 14.73 MiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 16.58 MiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 16.57 MiB/38.67 MiB\r\njaxlib  ------------------------------ 16.92 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 16.33 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 16.89 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 16.13 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 16.74 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 16.24 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 16.62 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 16.62 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 16.44 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 16.51 MiB/614.37 MiB ",,terminal_output +4645,7889842,"TERMINAL",0,0,"\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r⠦ Preparing packages... (67/83)\r\nnumpy  ------------------------------ 9.76 MiB/15.87 MiB\r\ntensorstore  ------------------------------ 16.88 MiB/18.63 MiB\r\nwandb  ------------------------------ 14.01 MiB/18.66 MiB\r\nscipy  ------------------------------ 14.79 MiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 16.80 MiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 16.81 MiB/38.67 MiB\r\njaxlib  ------------------------------ 17.16 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 16.56 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 17.11 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 16.38 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 16.99 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 16.44 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 16.82 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 16.85 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 16.69 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 16.76 MiB/614.37 MiB ",,terminal_output +4646,7889902,"TERMINAL",0,0,"\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r⠦ Preparing packages... (67/83)\r\nnumpy  ------------------------------ 10.23 MiB/15.87 MiB\r\ntensorstore  ------------------------------ 17.20 MiB/18.63 MiB\r\nwandb  ------------------------------ 14.37 MiB/18.66 MiB\r\nscipy  ------------------------------ 14.80 MiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 17.19 MiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 17.20 MiB/38.67 MiB\r\njaxlib  ------------------------------ 17.55 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 16.95 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 17.50 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 16.76 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 17.39 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 16.79 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 17.19 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 17.24 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 17.05 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 17.15 MiB/614.37 MiB ",,terminal_output +4647,7889966,"TERMINAL",0,0,"\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r⠧ Preparing packages... (67/83)\r\nnumpy  ------------------------------ 10.55 MiB/15.87 MiB\r\ntensorstore  ------------------------------ 17.53 MiB/18.63 MiB\r\nwandb  ------------------------------ 14.71 MiB/18.66 MiB\r\nscipy  ------------------------------ 15.35 MiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 17.47 MiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 17.55 MiB/38.67 MiB\r\njaxlib  ------------------------------ 17.84 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 17.28 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 17.81 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 17.09 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 17.73 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 17.12 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 17.53 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 17.52 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 17.40 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 17.50 MiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r⠧ Preparing packages... (67/83)\r\nnumpy  ------------------------------ 10.88 MiB/15.87 MiB\r\ntensorstore  ------------------------------ 17.89 MiB/18.63 MiB\r\nwandb  ------------------------------ 15.06 MiB/18.66 MiB\r\nscipy  ------------------------------ 15.82 MiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 17.79 MiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 17.86 MiB/38.67 MiB\r\njaxlib  ------------------------------ 18.22 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 17.61 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 18.11 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 17.43 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 18.07 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 17.46 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 17.87 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 17.86 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 17.73 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 17.83 MiB/614.37 MiB ",,terminal_output +4648,7890106,"TERMINAL",0,0,"\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r⠧ Preparing packages... (67/83)\r\nnumpy  ------------------------------ 11.22 MiB/15.87 MiB\r\ntensorstore  ------------------------------ 18.20 MiB/18.63 MiB\r\nwandb  ------------------------------ 15.41 MiB/18.66 MiB\r\nscipy  ------------------------------ 15.87 MiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 18.17 MiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 18.21 MiB/38.67 MiB\r\njaxlib  ------------------------------ 18.56 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 17.97 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 18.47 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 17.78 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 18.39 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 17.79 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 18.23 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 18.18 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 18.09 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 18.18 MiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r⠧ Preparing packages... (67/83)\r\nnumpy  ------------------------------ 11.55 MiB/15.87 MiB\r\ntensorstore  ------------------------------ 18.53 MiB/18.63 MiB\r\nwandb  ------------------------------ 15.74 MiB/18.66 MiB\r\nscipy  ------------------------------ 15.99 MiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 18.49 MiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 18.55 MiB/38.67 MiB\r\njaxlib  ------------------------------ 18.89 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 18.26 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 18.83 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 18.11 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 18.75 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 18.09 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 18.57 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 18.49 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 18.41 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 18.49 MiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r⠇ Preparing packages... (67/83)\r\nnumpy  ------------------------------ 11.72 MiB/15.87 MiB\r\nwandb  ------------------------------ 15.88 MiB/18.66 MiB\r\nscipy  ------------------------------ 16.06 MiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 18.67 MiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 18.71 MiB/38.67 MiB\r\njaxlib  ------------------------------ 19.03 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 18.39 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 18.95 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 18.27 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 18.89 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 18.26 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 18.72 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 18.63 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 18.57 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 18.65 MiB/614.37 MiB ",,terminal_output +4649,7890183,"TERMINAL",0,0,"\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r⠇ Preparing packages... (67/83)\r\nnumpy  ------------------------------ 11.92 MiB/15.87 MiB\r\nwandb  ------------------------------ 16.09 MiB/18.66 MiB\r\nscipy  ------------------------------ 16.11 MiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 18.86 MiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 18.92 MiB/38.67 MiB\r\njaxlib  ------------------------------ 19.20 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 18.59 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 19.14 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 18.48 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 19.08 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 18.47 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 18.94 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 18.85 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 18.73 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 18.84 MiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r⠇ Preparing packages... (67/83)\r\nnumpy  ------------------------------ 12.30 MiB/15.87 MiB\r\nwandb  ------------------------------ 16.43 MiB/18.66 MiB\r\nscipy  ------------------------------ 16.36 MiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 19.22 MiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 19.25 MiB/38.67 MiB\r\njaxlib  ------------------------------ 19.61 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 18.96 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 19.51 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 18.79 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 19.48 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 18.86 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 19.27 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 19.21 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 19.09 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 19.22 MiB/614.37 MiB ",,terminal_output +4650,7890239,"TERMINAL",0,0,"\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r⠇ Preparing packages... (67/83)\r\nnumpy  ------------------------------ 12.63 MiB/15.87 MiB\r\nwandb  ------------------------------ 16.79 MiB/18.66 MiB\r\nscipy  ------------------------------ 16.78 MiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 19.57 MiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 19.59 MiB/38.67 MiB\r\njaxlib  ------------------------------ 19.92 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 19.34 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 19.89 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 19.12 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 19.79 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 19.17 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 19.62 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 19.56 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 19.45 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 19.55 MiB/614.37 MiB ",,terminal_output +4651,7890363,"TERMINAL",0,0,"\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r⠋ Preparing packages... (68/83)\r\nnumpy  ------------------------------ 12.89 MiB/15.87 MiB\r\nwandb  ------------------------------ 17.07 MiB/18.66 MiB\r\nscipy  ------------------------------ 17.17 MiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 19.82 MiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 19.85 MiB/38.67 MiB\r\njaxlib  ------------------------------ 20.17 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 19.59 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 20.14 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 19.39 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 20.05 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 19.43 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 19.87 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 19.86 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 19.75 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 19.84 MiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r⠋ Preparing packages... (68/83)\r\nnumpy  ------------------------------ 13.15 MiB/15.87 MiB\r\nwandb  ------------------------------ 17.38 MiB/18.66 MiB\r\nscipy  ------------------------------ 17.20 MiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 20.10 MiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 20.12 MiB/38.67 MiB\r\njaxlib  ------------------------------ 20.48 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 19.84 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 20.40 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 19.64 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 20.35 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 19.70 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 20.15 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 20.12 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 20.00 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 20.14 MiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r⠋ Preparing packages... (68/83)\r\nnumpy  ------------------------------ 13.48 MiB/15.87 MiB\r\nwandb  ------------------------------ 17.71 MiB/18.66 MiB\r\nscipy  ------------------------------ 17.94 MiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 20.48 MiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 20.43 MiB/38.67 MiB\r\njaxlib  ------------------------------ 20.81 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 20.19 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 20.74 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 19.96 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 20.71 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 20.03 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 20.48 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 20.48 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 20.35 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 20.48 MiB/614.37 MiB ",,terminal_output +4652,7890460,"TERMINAL",0,0,"\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r⠋ Preparing packages... (68/83)\r\nnumpy  ------------------------------ 13.81 MiB/15.87 MiB\r\nwandb  ------------------------------ 17.77 MiB/18.66 MiB\r\nscipy  ------------------------------ 19.34 MiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 20.79 MiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 20.75 MiB/38.67 MiB\r\njaxlib  ------------------------------ 21.12 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 20.47 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 21.05 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 20.29 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 21.00 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 20.37 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 20.81 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 20.84 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 20.66 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 20.80 MiB/614.37 MiB ",,terminal_output +4653,7890516,"TERMINAL",0,0,"\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r⠋ Preparing packages... (68/83)\r\nnumpy  ------------------------------ 14.10 MiB/15.87 MiB\r\nwandb  ------------------------------ 17.79 MiB/18.66 MiB\r\nscipy  ------------------------------ 19.49 MiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 21.13 MiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 21.05 MiB/38.67 MiB\r\njaxlib  ------------------------------ 21.40 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 20.75 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 21.35 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 20.61 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 21.33 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 20.65 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 21.12 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 21.15 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 20.96 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 21.14 MiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r⠙ Preparing packages... (68/83)\r\nnumpy  ------------------------------ 14.50 MiB/15.87 MiB\r\nwandb  ------------------------------ 17.84 MiB/18.66 MiB\r\nscipy  ------------------------------ 20.24 MiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 21.54 MiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 21.47 MiB/38.67 MiB\r\njaxlib  ------------------------------ 21.79 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 21.16 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 21.69 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 21.03 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 21.71 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 21.03 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 21.50 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 21.53 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 21.33 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 21.51 MiB/614.37 MiB ",,terminal_output +4654,7890574,"TERMINAL",0,0,"\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r⠙ Preparing packages... (68/83)\r\nnumpy  ------------------------------ 14.83 MiB/15.87 MiB\r\nwandb  ------------------------------ 17.88 MiB/18.66 MiB\r\nscipy  ------------------------------ 20.25 MiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 21.90 MiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 21.82 MiB/38.67 MiB\r\njaxlib  ------------------------------ 22.14 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 21.50 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 22.02 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 21.41 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 22.04 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 21.37 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 21.87 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 21.89 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 21.68 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 21.82 MiB/614.37 MiB ",,terminal_output +4655,7890690,"TERMINAL",0,0,"\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r⠙ Preparing packages... (68/83)\r\nnumpy  ------------------------------ 15.19 MiB/15.87 MiB\r\nwandb  ------------------------------ 17.95 MiB/18.66 MiB\r\nscipy  ------------------------------ 20.31 MiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 22.28 MiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 22.19 MiB/38.67 MiB\r\njaxlib  ------------------------------ 22.50 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 21.89 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 22.37 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 21.75 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 22.40 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 21.77 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 22.24 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 22.25 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 22.04 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 22.21 MiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r⠙ Preparing packages... (68/83)\r\nnumpy  ------------------------------ 15.55 MiB/15.87 MiB\r\nwandb  ------------------------------ 17.98 MiB/18.66 MiB\r\nscipy  ------------------------------ 20.36 MiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 22.68 MiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 22.52 MiB/38.67 MiB\r\njaxlib  ------------------------------ 22.83 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 22.28 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 22.74 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 22.12 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 22.77 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 22.15 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 22.62 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 22.65 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 22.41 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 22.56 MiB/614.37 MiB ",,terminal_output +4656,7890771,"TERMINAL",0,0,"\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r⠹ Preparing packages... (68/83)\r\nnumpy  ------------------------------ 15.75 MiB/15.87 MiB\r\nwandb  ------------------------------ 18.02 MiB/18.66 MiB\r\nscipy  ------------------------------ 20.39 MiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 23.05 MiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 22.91 MiB/38.67 MiB\r\njaxlib  ------------------------------ 23.25 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 22.65 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 23.14 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 22.44 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 23.13 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 22.53 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 23.00 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 23.00 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 22.74 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 22.95 MiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r⠹ Preparing packages... (68/83)\r\nnumpy  ------------------------------ 15.87 MiB/15.87 MiB\r\nwandb  ------------------------------ 18.02 MiB/18.66 MiB\r\nscipy  ------------------------------ 20.39 MiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 23.43 MiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 23.28 MiB/38.67 MiB\r\njaxlib  ------------------------------ 23.62 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 23.04 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 23.52 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 22.82 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 23.52 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 22.91 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 23.43 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 23.39 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 23.16 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 23.36 MiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r⠹ Preparing packages... (68/83)\r\nwandb  ------------------------------ 18.02 MiB/18.66 MiB\r\nscipy  ------------------------------ 20.39 MiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 23.44 MiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 23.30 MiB/38.67 MiB\r\njaxlib  ------------------------------ 23.62 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 23.06 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 23.54 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 22.82 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 23.54 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 22.92 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 23.43 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 23.40 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 23.17 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 23.36 MiB/614.37 MiB ",,terminal_output +4657,7890841,"TERMINAL",0,0,"\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r⠹ Preparing packages... (68/83)\r\nwandb  ------------------------------ 18.08 MiB/18.66 MiB\r\nscipy  ------------------------------ 20.39 MiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 23.75 MiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 23.59 MiB/38.67 MiB\r\njaxlib  ------------------------------ 23.92 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 23.37 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 23.83 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 23.12 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 23.84 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 23.21 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 23.74 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 23.67 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 23.49 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 23.66 MiB/614.37 MiB ",,terminal_output +4658,7890899,"TERMINAL",0,0,"\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r⠹ Preparing packages... (68/83)\r\nwandb  ------------------------------ 18.08 MiB/18.66 MiB\r\nscipy  ------------------------------ 20.39 MiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 24.09 MiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 23.90 MiB/38.67 MiB\r\njaxlib  ------------------------------ 24.23 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 23.66 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 24.15 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 23.45 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 24.13 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 23.51 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 24.06 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 23.99 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 23.80 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 24.01 MiB/614.37 MiB ",,terminal_output +4659,7890970,"TERMINAL",0,0,"\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r⠸ Preparing packages... (69/83)\r\nwandb  ------------------------------ 18.10 MiB/18.66 MiB\r\nscipy  ------------------------------ 20.44 MiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 24.40 MiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 24.21 MiB/38.67 MiB\r\njaxlib  ------------------------------ 24.49 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 23.93 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 24.43 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 23.74 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 24.41 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 23.78 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 24.37 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 24.27 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 24.06 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 24.26 MiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r\r\r\r\r⠸ Preparing packages... (69/83)\r\nwandb  ------------------------------ 18.11 MiB/18.66 MiB\r\nscipy  ------------------------------ 21.98 MiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 24.71 MiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 24.55 MiB/38.67 MiB\r\njaxlib  ------------------------------ 24.79 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 24.25 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 24.74 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 24.03 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 24.71 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 24.11 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 24.68 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 24.59 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 24.37 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 24.61 MiB/614.37 MiB ",,terminal_output +4660,7891027,"TERMINAL",0,0,"\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r⠸ Preparing packages... (69/83)\r\nwandb  ------------------------------ 18.11 MiB/18.66 MiB\r\nscipy  ------------------------------ 22.36 MiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 24.98 MiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 24.83 MiB/38.67 MiB\r\njaxlib  ------------------------------ 25.06 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 24.54 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 25.05 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 24.30 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 24.99 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 24.39 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 24.96 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 24.88 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 24.63 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 24.87 MiB/614.37 MiB ",,terminal_output +4661,7891131,"TERMINAL",0,0,"\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r⠸ Preparing packages... (69/83)\r\nwandb  ------------------------------ 18.15 MiB/18.66 MiB\r\nscipy  ------------------------------ 22.62 MiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 25.00 MiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 25.16 MiB/38.67 MiB\r\njaxlib  ------------------------------ 25.42 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 24.92 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 25.41 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 24.62 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 25.34 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 24.73 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 25.29 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 25.23 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 24.99 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 25.21 MiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r\r\r\r\r⠼ Preparing packages... (69/83)\r\nwandb  ------------------------------ 18.18 MiB/18.66 MiB\r\nscipy  ------------------------------ 23.53 MiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 25.00 MiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 25.51 MiB/38.67 MiB\r\njaxlib  ------------------------------ 25.71 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 25.26 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 25.76 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 24.98 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 25.70 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 25.09 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 25.62 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 25.59 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 25.29 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 25.58 MiB/614.37 MiB ",,terminal_output +4662,7891263,"TERMINAL",0,0,"\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r⠼ Preparing packages... (69/83)\r\nwandb  ------------------------------ 18.21 MiB/18.66 MiB\r\nscipy  ------------------------------ 23.64 MiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 25.00 MiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 25.88 MiB/38.67 MiB\r\njaxlib  ------------------------------ 26.04 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 25.58 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 26.13 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 25.33 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 26.03 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 25.45 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 25.96 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 25.95 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 25.66 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 25.94 MiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r\r\r\r\r⠼ Preparing packages... (69/83)\r\nwandb  ------------------------------ 18.24 MiB/18.66 MiB\r\nscipy  ------------------------------ 24.22 MiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 25.31 MiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 26.17 MiB/38.67 MiB\r\njaxlib  ------------------------------ 26.38 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 25.92 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 26.44 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 25.63 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 26.36 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 25.74 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 26.27 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 26.25 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 25.97 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 26.24 MiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r\r\r\r\r⠼ Preparing packages... (69/83)\r\nwandb  ------------------------------ 18.29 MiB/18.66 MiB\r\nscipy  ------------------------------ 24.31 MiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 25.51 MiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 26.39 MiB/38.67 MiB\r\njaxlib  ------------------------------ 26.54 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 26.14 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 26.67 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 25.84 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 26.59 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 25.98 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 26.52 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 26.47 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 26.19 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 26.47 MiB/614.37 MiB ",,terminal_output +4663,7891327,"TERMINAL",0,0,"\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r⠴ Preparing packages... (69/83)\r\nwandb  ------------------------------ 18.32 MiB/18.66 MiB\r\nscipy  ------------------------------ 24.50 MiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 25.91 MiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 26.81 MiB/38.67 MiB\r\njaxlib  ------------------------------ 26.70 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 26.50 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 27.05 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 26.16 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 26.73 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 26.34 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 26.91 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 26.70 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 26.59 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 26.88 MiB/614.37 MiB ",,terminal_output +4664,7891603,"jasmine/sample.py",0,0,"",python,tab +4665,7891623,"TERMINAL",0,0,"\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r⠴ Preparing packages... (69/83)\r\nwandb  ------------------------------ 18.33 MiB/18.66 MiB\r\nscipy  ------------------------------ 24.68 MiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 26.21 MiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 27.12 MiB/38.67 MiB\r\njaxlib  ------------------------------ 27.06 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 26.84 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 27.37 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 26.48 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 27.06 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 26.69 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 27.25 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 27.02 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 26.89 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 27.23 MiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r\r\r\r\r⠴ Preparing packages... (69/83)\r\nwandb  ------------------------------ 18.35 MiB/18.66 MiB\r\nscipy  ------------------------------ 24.79 MiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 26.49 MiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 27.45 MiB/38.67 MiB\r\njaxlib  ------------------------------ 27.37 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 27.12 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 27.70 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 26.78 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 27.38 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 27.00 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 27.57 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 27.36 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 27.20 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 27.54 MiB/614.37 MiB ",,terminal_output +4666,7891711,"TERMINAL",0,0,"\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r⠴ Preparing packages... (69/83)\r\nwandb  ------------------------------ 18.37 MiB/18.66 MiB\r\nscipy  ------------------------------ 24.79 MiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 26.78 MiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 27.74 MiB/38.67 MiB\r\njaxlib  ------------------------------ 27.67 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 27.44 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 27.98 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 27.08 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 27.69 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 27.32 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 27.87 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 27.65 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 27.50 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 27.86 MiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r\r\r\r\r⠦ Preparing packages... (69/83)\r\nwandb  ------------------------------ 18.40 MiB/18.66 MiB\r\nscipy  ------------------------------ 25.78 MiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 27.06 MiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 28.07 MiB/38.67 MiB\r\njaxlib  ------------------------------ 27.98 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 27.73 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 28.28 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 27.36 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 27.99 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 27.61 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 28.18 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 27.96 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 27.79 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 28.17 MiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r\r\r\r\r⠦ Preparing packages... (69/83)\r\nwandb  ------------------------------ 18.46 MiB/18.66 MiB\r\nscipy  ------------------------------ 26.09 MiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 27.39 MiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 28.36 MiB/38.67 MiB\r\njaxlib  ------------------------------ 28.28 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 28.04 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 28.60 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 27.67 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 28.30 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 27.93 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 28.48 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 28.22 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 28.09 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 28.47 MiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r\r\r\r\r⠦ Preparing packages... (69/83)\r\nwandb  ------------------------------ 18.49 MiB/18.66 MiB\r\nscipy  ------------------------------ 26.39 MiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 27.73 MiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 28.67 MiB/38.67 MiB\r\njaxlib  ------------------------------ 28.56 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 28.35 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 28.94 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 27.99 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 28.65 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 28.20 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 28.82 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 28.56 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 28.46 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 28.83 MiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r\r\r\r\r⠦ Preparing packages... (69/83)\r\nwandb  ------------------------------ 18.52 MiB/18.66 MiB\r\nscipy  ------------------------------ 26.69 MiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 28.07 MiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 28.97 MiB/38.67 MiB\r\njaxlib  ------------------------------ 28.87 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 28.67 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 29.27 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 28.33 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 28.93 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 28.52 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 29.12 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 28.87 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 28.78 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 29.13 MiB/614.37 MiB ",,terminal_output +4667,7891818,"TERMINAL",0,0,"\r\r\r\r\r\r\r\r\r\r\r\r\r\r\r⠧ Preparing packages... (69/83)\r\nwandb  ------------------------------ 18.55 MiB/18.66 MiB\r\nscipy  ------------------------------ 27.00 MiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 28.40 MiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 29.29 MiB/38.67 MiB\r\njaxlib  ------------------------------ 29.18 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 29.01 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 29.62 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 28.64 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 29.28 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 28.83 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 29.43 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 29.23 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 29.10 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 29.44 MiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r\r\r\r\r⠧ Preparing packages... (69/83)\r\nwandb  ------------------------------ 18.66 MiB/18.66 MiB\r\nscipy  ------------------------------ 27.38 MiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 28.78 MiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 29.65 MiB/38.67 MiB\r\njaxlib  ------------------------------ 29.61 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 29.38 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 29.98 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 29.07 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 29.66 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 29.24 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 29.81 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 29.64 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 29.49 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 29.81 MiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r\r\r\r\r⠧ Preparing packages... (69/83)\r\nscipy  ------------------------------ 27.42 MiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 28.83 MiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 29.69 MiB/38.67 MiB\r\njaxlib  ------------------------------ 29.62 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 29.39 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 30.02 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 29.12 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 29.69 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 29.28 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 29.84 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 29.68 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 29.53 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 29.86 MiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r\r\r\r⠧ Preparing packages... (69/83)\r\nscipy  ------------------------------ 27.68 MiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 29.08 MiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 29.95 MiB/38.67 MiB\r\njaxlib  ------------------------------ 29.89 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 29.66 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 30.30 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 29.37 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 29.97 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 29.53 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 30.09 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 29.92 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 29.75 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 30.12 MiB/614.37 MiB ",,terminal_output +4668,7891931,"TERMINAL",0,0,"\r\r\r\r\r\r\r\r\r\r\r\r\r\r⠧ Preparing packages... (69/83)\r\nscipy  ------------------------------ 28.17 MiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 29.53 MiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 30.46 MiB/38.67 MiB\r\njaxlib  ------------------------------ 30.37 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 30.16 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 30.74 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 29.84 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 30.43 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 29.98 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 30.55 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 30.43 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 30.24 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 30.62 MiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r\r\r\r⠇ Preparing packages... (70/83)\r\nscipy  ------------------------------ 28.58 MiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 29.99 MiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 30.86 MiB/38.67 MiB\r\njaxlib  ------------------------------ 30.79 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 30.57 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 31.16 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 30.27 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 30.90 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 30.40 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 31.02 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 30.85 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 30.67 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 31.05 MiB/614.37 MiB ",,terminal_output +4669,7891995,"TERMINAL",0,0,"\r\r\r\r\r\r\r\r\r\r\r\r\r\r⠇ Preparing packages... (70/83)\r\nscipy  ------------------------------ 28.96 MiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 30.43 MiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 31.26 MiB/38.67 MiB\r\njaxlib  ------------------------------ 31.20 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 31.00 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 31.55 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 30.71 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 31.12 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 30.82 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 31.40 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 31.32 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 31.07 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 31.46 MiB/614.37 MiB ",,terminal_output +4670,7892080,"TERMINAL",0,0,"\r\r\r\r\r\r\r\r\r\r\r\r\r\r⠇ Preparing packages... (70/83)\r\nscipy  ------------------------------ 29.37 MiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 30.80 MiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 31.65 MiB/38.67 MiB\r\njaxlib  ------------------------------ 31.62 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 31.41 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 31.94 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 31.12 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 31.54 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 31.22 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 31.78 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 31.71 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 31.47 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 31.87 MiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r\r\r\r⠇ Preparing packages... (70/83)\r\nscipy  ------------------------------ 29.75 MiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 31.20 MiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 32.05 MiB/38.67 MiB\r\njaxlib  ------------------------------ 32.05 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 31.86 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 32.31 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 31.52 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 31.99 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 31.60 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 32.17 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 32.11 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 31.83 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 32.32 MiB/614.37 MiB ",,terminal_output +4671,7892143,"TERMINAL",0,0,"\r\r\r\r\r\r\r\r\r\r\r\r\r\r⠋ Preparing packages... (70/83)\r\nscipy  ------------------------------ 30.18 MiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 31.62 MiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 32.45 MiB/38.67 MiB\r\njaxlib  ------------------------------ 32.41 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 32.26 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 32.73 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 31.90 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 32.41 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 31.98 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 32.60 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 32.50 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 32.21 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 32.72 MiB/614.37 MiB ",,terminal_output +4672,7892196,"TERMINAL",0,0,"\r\r\r\r\r\r\r\r\r\r\r\r\r\r⠋ Preparing packages... (70/83)\r\nscipy  ------------------------------ 30.59 MiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 31.95 MiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 32.84 MiB/38.67 MiB\r\njaxlib  ------------------------------ 32.80 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 32.66 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 33.14 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 32.32 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 32.82 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 32.39 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 32.99 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 32.91 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 32.65 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 33.13 MiB/614.37 MiB ",,terminal_output +4673,7892368,"TERMINAL",0,0,"\r\r\r\r\r\r\r\r\r\r\r\r\r\r⠋ Preparing packages... (70/83)\r\nscipy  ------------------------------ 30.99 MiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 32.36 MiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 33.25 MiB/38.67 MiB\r\njaxlib  ------------------------------ 33.24 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 33.08 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 33.58 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 32.72 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 33.04 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 32.79 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 33.42 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 33.31 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 33.03 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 33.51 MiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r\r\r\r⠋ Preparing packages... (70/83)\r\nscipy  ------------------------------ 31.38 MiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 32.71 MiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 33.63 MiB/38.67 MiB\r\njaxlib  ------------------------------ 33.59 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 33.45 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 33.98 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 33.11 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 33.42 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 33.17 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 33.79 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 33.67 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 33.38 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 33.87 MiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r\r\r\r⠙ Preparing packages... (70/83)\r\nscipy  ------------------------------ 31.79 MiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 33.12 MiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 34.05 MiB/38.67 MiB\r\njaxlib  ------------------------------ 33.96 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 33.81 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 34.31 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 33.54 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 33.85 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 33.56 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 34.16 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 34.11 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 33.78 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 34.30 MiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r\r\r\r⠙ Preparing packages... (70/83)\r\nscipy  ------------------------------ 32.17 MiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 33.50 MiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 34.45 MiB/38.67 MiB\r\njaxlib  ------------------------------ 34.36 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 34.20 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 34.72 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 33.97 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 34.25 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 33.91 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 34.57 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 34.50 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 34.16 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 34.68 MiB/614.37 MiB ",,terminal_output +4674,7892514,"TERMINAL",0,0,"\r\r\r\r\r\r\r\r\r\r\r\r\r\r⠙ Preparing packages... (70/83)\r\nscipy  ------------------------------ 32.54 MiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 33.89 MiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 34.84 MiB/38.67 MiB\r\njaxlib  ------------------------------ 34.76 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 34.61 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 35.08 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 34.38 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 34.62 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 34.28 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 34.95 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 34.90 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 34.58 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 35.07 MiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r\r\r\r⠙ Preparing packages... (70/83)\r\nscipy  ------------------------------ 32.88 MiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 34.34 MiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 35.25 MiB/38.67 MiB\r\njaxlib  ------------------------------ 35.19 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 35.02 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 35.52 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 34.80 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 35.03 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 34.67 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 35.34 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 35.16 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 34.97 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 35.48 MiB/614.37 MiB ",,terminal_output +4675,7892638,"TERMINAL",0,0,"\r\r\r\r\r\r\r\r\r\r\r\r\r\r⠹ Preparing packages... (70/83)\r\nscipy  ------------------------------ 33.22 MiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 34.74 MiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 35.63 MiB/38.67 MiB\r\njaxlib  ------------------------------ 35.58 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 35.38 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 35.86 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 35.16 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 35.42 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 35.03 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 35.72 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 35.57 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 35.34 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 35.85 MiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r\r\r\r⠹ Preparing packages... (70/83)\r\nscipy  ------------------------------ 33.59 MiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 35.10 MiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 36.00 MiB/38.67 MiB\r\njaxlib  ------------------------------ 35.95 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 35.76 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 36.21 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 35.52 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 35.81 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 35.40 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 36.09 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 35.94 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 35.71 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 36.21 MiB/614.37 MiB ",,terminal_output +4676,7892746,"TERMINAL",0,0,"\r\r\r\r\r\r\r\r\r\r\r\r\r\r⠹ Preparing packages... (70/83)\r\nscipy  ------------------------------ 33.93 MiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 35.48 MiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 36.37 MiB/38.67 MiB\r\njaxlib  ------------------------------ 36.31 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 36.11 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 36.56 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 35.84 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 36.15 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 35.78 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 36.46 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 36.33 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 36.05 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 36.57 MiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r\r\r\r⠹ Preparing packages... (70/83)\r\nscipy  ------------------------------ 33.97 MiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 35.87 MiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 36.73 MiB/38.67 MiB\r\njaxlib  ------------------------------ 36.70 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 36.53 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 36.94 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 36.23 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 36.55 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 36.09 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 36.83 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 36.71 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 36.41 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 36.97 MiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r\r\r\r⠸ Preparing packages... (70/83)\r\nscipy  ------------------------------ 34.00 MiB/34.02 MiB\r\nnvidia-nvjitlink-cu12  ------------------------------ 36.28 MiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 37.09 MiB/38.67 MiB\r\njaxlib  ------------------------------ 37.11 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 36.89 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 37.20 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 36.64 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 36.91 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 36.48 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 37.24 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 37.07 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 36.81 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 37.36 MiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r\r\r\r⠸ Preparing packages... (70/83)\r\nnvidia-nvjitlink-cu12  ------------------------------ 36.48 MiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 37.29 MiB/38.67 MiB\r\njaxlib  ------------------------------ 37.34 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 37.08 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 37.42 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 36.85 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 37.10 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 36.68 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 37.45 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 37.27 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 37.01 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 37.53 MiB/614.37 MiB ",,terminal_output +4677,7892808,"TERMINAL",0,0,"\r\r\r\r\r\r\r\r\r\r\r\r\r⠸ Preparing packages... (70/83)\r\nnvidia-nvjitlink-cu12  ------------------------------ 36.63 MiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 37.48 MiB/38.67 MiB\r\njaxlib  ------------------------------ 37.52 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 37.28 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 37.58 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 37.02 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 37.27 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 36.83 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 37.62 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 37.47 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 37.15 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 37.71 MiB/614.37 MiB ",,terminal_output +4678,7892867,"TERMINAL",0,0,"\r\r\r\r\r\r\r\r\r\r\r\r\r⠸ Preparing packages... (70/83)\r\nnvidia-nvjitlink-cu12  ------------------------------ 37.00 MiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 37.88 MiB/38.67 MiB\r\njaxlib  ------------------------------ 37.91 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 37.65 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 37.95 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 37.38 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 37.64 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 37.21 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 38.02 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 37.83 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 37.53 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 38.10 MiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r\r\r⠸ Preparing packages... (70/83)\r\nnvidia-nvjitlink-cu12  ------------------------------ 37.41 MiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 38.27 MiB/38.67 MiB\r\njaxlib  ------------------------------ 38.34 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 38.04 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 38.34 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 37.75 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 38.05 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 37.59 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 38.39 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 38.19 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 37.95 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 38.51 MiB/614.37 MiB ",,terminal_output +4679,7893047,"TERMINAL",0,0,"\r\r\r\r\r\r\r\r\r\r\r\r\r⠼ Preparing packages... (71/83)\r\nnvidia-nvjitlink-cu12  ------------------------------ 37.78 MiB/37.91 MiB\r\nnvidia-cuda-nvcc-cu12  ------------------------------ 38.65 MiB/38.67 MiB\r\njaxlib  ------------------------------ 38.72 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 38.43 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 38.78 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 38.13 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 38.46 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 37.96 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 38.77 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 38.56 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 38.36 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 38.93 MiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r\r\r⠼ Preparing packages... (71/83)\r\nnvidia-nvjitlink-cu12  ------------------------------ 37.89 MiB/37.91 MiB\r\njaxlib  ------------------------------ 38.86 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 38.54 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 38.92 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 38.26 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 38.59 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 38.08 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 38.89 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 38.69 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 38.48 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 39.00 MiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r\r⠼ Preparing packages... (71/83)\r\nnvidia-nvjitlink-cu12  ------------------------------ 37.89 MiB/37.91 MiB\r\njaxlib  ------------------------------ 39.00 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 38.73 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 39.04 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 38.43 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 38.91 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 38.25 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 39.19 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 39.00 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 38.83 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 39.17 MiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r\r⠼ Preparing packages... (71/83)\r\njaxlib  ------------------------------ 39.41 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 39.01 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 39.45 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 38.86 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 39.16 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 38.70 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 39.45 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 39.26 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 39.08 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 39.45 MiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r⠼ Preparing packages... (71/83)\r\njaxlib  ------------------------------ 39.62 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 39.20 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 39.66 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 39.04 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 39.32 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 38.89 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 39.63 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 39.46 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 39.29 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 39.62 MiB/614.37 MiB ",,terminal_output +4680,7893141,"TERMINAL",0,0,"\r\r\r\r\r\r\r\r\r\r\r⠼ Preparing packages... (71/83)\r\njaxlib  ------------------------------ 40.06 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 39.80 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 40.00 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 39.62 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 39.96 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 39.33 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 40.19 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 40.01 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 39.91 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 40.13 MiB/614.37 MiB ",,terminal_output +4681,7893308,"TERMINAL",0,0,"\r\r\r\r\r\r\r\r\r\r\r⠴ Preparing packages... (73/83)\r\njaxlib  ------------------------------ 40.56 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 40.34 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 40.23 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 40.11 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 40.46 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 39.78 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 40.69 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 40.53 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 40.41 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 40.60 MiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r⠴ Preparing packages... (73/83)\r\njaxlib  ------------------------------ 41.11 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 40.85 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 40.73 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 40.63 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 40.98 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 40.28 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 41.25 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 41.03 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 40.91 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 41.15 MiB/614.37 MiB ",,terminal_output +4682,7893407,"TERMINAL",0,0,"\r\r\r\r\r\r\r\r\r\r\r⠴ Preparing packages... (73/83)\r\njaxlib  ------------------------------ 41.64 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 41.40 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 41.25 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 41.15 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 41.47 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 40.82 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 41.79 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 41.60 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 41.45 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 41.69 MiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r⠴ Preparing packages... (73/83)\r\njaxlib  ------------------------------ 42.20 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 41.94 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 41.81 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 41.67 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 42.04 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 41.37 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 42.34 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 42.09 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 42.03 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 42.24 MiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r⠦ Preparing packages... (73/83)\r\njaxlib  ------------------------------ 42.76 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 42.47 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 42.34 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 42.30 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 42.60 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 41.82 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 42.90 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 42.63 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 42.60 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 42.75 MiB/614.37 MiB ",,terminal_output +4683,7893489,"TERMINAL",0,0,"\r\r\r\r\r\r\r\r\r\r\r⠦ Preparing packages... (73/83)\r\njaxlib  ------------------------------ 43.28 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 43.01 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 42.86 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 42.84 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 43.14 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 42.35 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 43.41 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 43.17 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 43.14 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 43.30 MiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r⠦ Preparing packages... (73/83)\r\njaxlib  ------------------------------ 43.84 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 43.59 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 43.39 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 43.39 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 43.71 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 42.90 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 43.92 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 43.70 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 43.68 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 43.86 MiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r⠦ Preparing packages... (73/83)\r\njaxlib  ------------------------------ 44.36 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 44.00 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 43.93 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 43.92 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 44.25 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 43.41 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 44.45 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 44.25 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 44.19 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 44.35 MiB/614.37 MiB ",,terminal_output +4684,7893565,"TERMINAL",0,0,"\r\r\r\r\r\r\r\r\r\r\r⠧ Preparing packages... (73/83)\r\njaxlib  ------------------------------ 44.94 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 44.48 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 44.51 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 44.41 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 44.81 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 43.86 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 44.96 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 44.78 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 44.69 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 44.85 MiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r⠧ Preparing packages... (73/83)\r\njaxlib  ------------------------------ 45.42 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 44.96 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 45.01 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 44.92 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 45.34 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 44.32 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 45.42 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 45.23 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 45.18 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 45.32 MiB/614.37 MiB ",,terminal_output +4685,7893628,"TERMINAL",0,0,"\r\r\r\r\r\r\r\r\r\r\r⠧ Preparing packages... (73/83)\r\njaxlib  ------------------------------ 45.92 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 45.49 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 45.47 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 45.38 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 45.82 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 44.83 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 45.90 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 45.73 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 45.68 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 45.80 MiB/614.37 MiB ",,terminal_output +4686,7893764,"TERMINAL",0,0,"\r\r\r\r\r\r\r\r\r\r\r⠧ Preparing packages... (73/83)\r\njaxlib  ------------------------------ 46.42 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 45.97 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 45.94 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 45.84 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 46.30 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 45.28 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 46.38 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 46.21 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 46.13 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 46.25 MiB/614.37 MiB ",,terminal_output +4687,7893817,"TERMINAL",0,0,"\r\r\r\r\r\r\r\r\r\r\r⠇ Preparing packages... (73/83)\r\njaxlib  ------------------------------ 46.94 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 46.46 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 46.43 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 46.35 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 46.79 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 45.75 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 46.87 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 46.65 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 46.63 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 46.71 MiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r⠇ Preparing packages... (73/83)\r\njaxlib  ------------------------------ 47.44 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 46.93 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 46.93 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 46.81 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 47.30 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 46.24 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 47.35 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 47.14 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 47.09 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 47.13 MiB/614.37 MiB ",,terminal_output +4688,7893828,"TERMINAL",0,0,"\r\r\r\r\r\r\r\r\r\r\r⠇ Preparing packages... (73/83)\r\njaxlib  ------------------------------ 47.89 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 47.43 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 47.42 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 47.30 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 47.78 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 46.72 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 47.83 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 47.62 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 47.59 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 47.59 MiB/614.37 MiB ",,terminal_output +4689,7893882,"TERMINAL",0,0,"\r\r\r\r\r\r\r\r\r\r\r⠇ Preparing packages... (73/83)\r\njaxlib  ------------------------------ 48.34 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 47.89 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 47.87 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 47.79 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 48.25 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 47.12 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 48.30 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 48.09 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 48.02 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 48.03 MiB/614.37 MiB ",,terminal_output +4690,7893940,"TERMINAL",0,0,"\r\r\r\r\r\r\r\r\r\r\r⠋ Preparing packages... (73/83)\r\njaxlib  ------------------------------ 48.87 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 48.42 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 48.43 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 48.30 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 48.75 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 47.62 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 48.81 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 48.62 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 48.54 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 48.52 MiB/614.37 MiB ",,terminal_output +4691,7894014,"TERMINAL",0,0,"\r\r\r\r\r\r\r\r\r\r\r⠋ Preparing packages... (73/83)\r\njaxlib  ------------------------------ 49.37 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 48.93 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 48.87 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 48.76 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 49.23 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 48.09 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 49.26 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 49.11 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 49.02 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 48.99 MiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r⠋ Preparing packages... (73/83)\r\njaxlib  ------------------------------ 49.87 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 49.40 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 49.37 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 49.23 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 49.73 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 48.51 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 49.75 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 49.62 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 49.52 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 49.20 MiB/614.37 MiB ",,terminal_output +4692,7894070,"TERMINAL",0,0,"\r\r\r\r\r\r\r\r\r\r\r⠋ Preparing packages... (73/83)\r\njaxlib  ------------------------------ 50.40 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 49.92 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 49.93 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 49.74 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 50.27 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 49.05 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 50.31 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 50.15 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 50.06 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 49.79 MiB/614.37 MiB ",,terminal_output +4693,7894178,"TERMINAL",0,0,"\r\r\r\r\r\r\r\r\r\r\r⠙ Preparing packages... (73/83)\r\njaxlib  ------------------------------ 50.89 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 50.40 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 50.44 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 50.24 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 50.77 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 49.54 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 50.85 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 50.66 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 50.60 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 50.30 MiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r⠙ Preparing packages... (73/83)\r\njaxlib  ------------------------------ 51.40 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 50.94 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 50.93 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 50.75 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 51.23 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 50.03 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 51.33 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 51.16 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 51.09 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 50.78 MiB/614.37 MiB ",,terminal_output +4694,7894280,"TERMINAL",0,0,"\r\r\r\r\r\r\r\r\r\r\r⠙ Preparing packages... (73/83)\r\njaxlib  ------------------------------ 51.99 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 51.34 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 51.42 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 51.30 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 51.87 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 50.56 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 51.91 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 51.74 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 51.67 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 51.30 MiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r⠙ Preparing packages... (73/83)\r\njaxlib  ------------------------------ 52.53 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 51.86 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 52.01 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 51.78 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 52.43 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 51.00 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 52.49 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 52.30 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 52.21 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 51.86 MiB/614.37 MiB ",,terminal_output +4695,7894346,"TERMINAL",0,0,"\r\r\r\r\r\r\r\r\r\r\r⠹ Preparing packages... (73/83)\r\njaxlib  ------------------------------ 53.06 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 52.41 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 52.54 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 52.32 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 52.99 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 51.56 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 53.03 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 52.84 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 52.73 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 52.33 MiB/614.37 MiB ",,terminal_output +4696,7894426,"TERMINAL",0,0,"\r\r\r\r\r\r\r\r\r\r\r⠹ Preparing packages... (73/83)\r\njaxlib  ------------------------------ 53.65 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 52.94 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 53.06 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 52.90 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 53.57 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 52.06 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 53.56 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 53.44 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 53.22 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 52.87 MiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r⠹ Preparing packages... (73/83)\r\njaxlib  ------------------------------ 54.22 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 53.53 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 53.59 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 53.37 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 54.02 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 52.64 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 54.11 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 53.96 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 53.78 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 53.43 MiB/614.37 MiB ",,terminal_output +4697,7894564,"TERMINAL",0,0,"\r\r\r\r\r\r\r\r\r\r\r⠹ Preparing packages... (73/83)\r\njaxlib  ------------------------------ 54.76 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 54.00 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 54.16 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 53.96 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 54.56 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 53.15 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 54.68 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 54.52 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 54.28 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 53.97 MiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r⠸ Preparing packages... (73/83)\r\njaxlib  ------------------------------ 55.36 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 54.55 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 54.73 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 54.57 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 55.00 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 53.70 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 55.29 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 55.00 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 54.77 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 54.49 MiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r⠸ Preparing packages... (73/83)\r\njaxlib  ------------------------------ 55.92 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 55.10 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 55.20 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 55.11 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 55.53 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 54.25 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 55.81 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 55.53 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 55.34 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 55.07 MiB/614.37 MiB ",,terminal_output +4698,7894627,"TERMINAL",0,0,"\r\r\r\r\r\r\r\r\r\r\r⠸ Preparing packages... (73/83)\r\njaxlib  ------------------------------ 56.39 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 55.64 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 55.75 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 55.64 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 56.10 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 54.79 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 56.34 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 56.12 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 55.87 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 55.60 MiB/614.37 MiB ",,terminal_output +4699,7894732,"TERMINAL",0,0,"\r\r\r\r\r\r\r\r\r\r\r⠸ Preparing packages... (73/83)\r\njaxlib  ------------------------------ 56.92 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 56.20 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 56.26 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 56.21 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 56.66 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 55.26 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 56.90 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 56.64 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 56.40 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 56.13 MiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r⠼ Preparing packages... (73/83)\r\njaxlib  ------------------------------ 57.47 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 56.76 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 56.81 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 56.75 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 57.19 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 55.81 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 57.44 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 57.18 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 56.93 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 56.69 MiB/614.37 MiB ",,terminal_output +4700,7894793,"TERMINAL",0,0,"\r\r\r\r\r\r\r\r\r\r\r⠼ Preparing packages... (73/83)\r\njaxlib  ------------------------------ 58.00 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 57.32 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 57.31 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 57.31 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 57.74 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 56.31 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 58.00 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 57.68 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 57.49 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 57.24 MiB/614.37 MiB ",,terminal_output +4701,7894952,"TERMINAL",0,0,"\r\r\r\r\r\r\r\r\r\r\r⠼ Preparing packages... (73/83)\r\njaxlib  ------------------------------ 58.52 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 57.87 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 57.86 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 57.85 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 58.30 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 56.86 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 58.52 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 58.17 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 58.02 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 57.80 MiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r⠼ Preparing packages... (73/83)\r\njaxlib  ------------------------------ 59.06 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 58.41 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 58.39 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 58.41 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 58.88 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 57.39 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 59.05 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 58.69 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 58.55 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 58.33 MiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r⠴ Preparing packages... (73/83)\r\njaxlib  ------------------------------ 59.62 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 58.93 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 58.88 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 58.98 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 59.42 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 57.85 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 59.59 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 59.27 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 59.07 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 58.85 MiB/614.37 MiB ",,terminal_output +4702,7895014,"TERMINAL",0,0,"\r\r\r\r\r\r\r\r\r\r\r⠴ Preparing packages... (73/83)\r\njaxlib  ------------------------------ 60.14 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 59.46 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 59.42 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 59.52 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 59.94 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 58.39 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 60.12 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 59.81 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 59.64 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 59.36 MiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r⠴ Preparing packages... (73/83)\r\njaxlib  ------------------------------ 60.69 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 59.96 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 59.98 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 60.08 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 60.45 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 58.87 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 60.62 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 60.33 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 60.16 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 59.91 MiB/614.37 MiB ",,terminal_output +4703,7895118,"TERMINAL",0,0,"\r\r\r\r\r\r\r\r\r\r\r⠴ Preparing packages... (73/83)\r\njaxlib  ------------------------------ 61.20 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 60.49 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 60.50 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 60.61 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 61.00 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 59.40 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 61.15 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 60.88 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 60.67 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 60.43 MiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r⠦ Preparing packages... (73/83)\r\njaxlib  ------------------------------ 61.77 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 61.02 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 61.02 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 61.13 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 61.50 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 59.95 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 61.70 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 61.35 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 61.18 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 60.97 MiB/614.37 MiB ",,terminal_output +4704,7895177,"TERMINAL",0,0,"\r\r\r\r\r\r\r\r\r\r\r⠦ Preparing packages... (73/83)\r\njaxlib  ------------------------------ 62.27 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 61.54 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 61.56 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 61.62 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 62.00 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 60.43 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 62.21 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 61.83 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 61.67 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 61.46 MiB/614.37 MiB ",,terminal_output +4705,7895289,"TERMINAL",0,0,"\r\r\r\r\r\r\r\r\r\r\r⠦ Preparing packages... (73/83)\r\njaxlib  ------------------------------ 62.77 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 62.08 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 62.00 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 62.12 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 62.49 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 60.93 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 62.70 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 62.33 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 62.18 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 61.90 MiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r⠦ Preparing packages... (73/83)\r\njaxlib  ------------------------------ 63.28 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 62.56 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 62.50 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 62.62 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 62.99 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 61.42 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 63.21 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 62.86 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 62.68 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 62.39 MiB/614.37 MiB ",,terminal_output +4706,7895399,"TERMINAL",0,0,"\r\r\r\r\r\r\r\r\r\r\r⠧ Preparing packages... (73/83)\r\njaxlib  ------------------------------ 63.78 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 63.07 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 63.00 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 63.12 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 63.54 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 61.94 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 63.72 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 63.35 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 63.22 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 62.95 MiB/614.37 MiB ",,terminal_output +4707,7895472,"TERMINAL",0,0,"\r\r\r\r\r\r\r\r\r\r\r⠧ Preparing packages... (73/83)\r\njaxlib  ------------------------------ 64.41 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 63.66 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 63.62 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 63.76 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 64.12 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 62.48 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 64.33 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 63.97 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 63.81 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 63.00 MiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r⠧ Preparing packages... (73/83)\r\njaxlib  ------------------------------ 64.98 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 64.20 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 64.20 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 64.34 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 64.67 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 63.05 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 64.92 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 64.52 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 64.39 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 63.27 MiB/614.37 MiB ",,terminal_output +4708,7895575,"TERMINAL",0,0,"\r\r\r\r\r\r\r\r\r\r\r⠧ Preparing packages... (73/83)\r\njaxlib  ------------------------------ 65.61 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 64.78 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 64.80 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 64.93 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 65.26 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 63.59 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 65.11 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 65.11 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 65.00 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 63.85 MiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r⠇ Preparing packages... (73/83)\r\njaxlib  ------------------------------ 66.17 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 65.30 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 65.34 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 65.50 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 65.82 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 64.03 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 65.68 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 65.64 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 65.57 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 64.37 MiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r⠇ Preparing packages... (73/83)\r\njaxlib  ------------------------------ 66.72 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 65.87 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 65.92 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 66.01 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 66.36 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 64.59 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 66.25 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 66.17 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 66.13 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 64.91 MiB/614.37 MiB ",,terminal_output +4709,7895638,"TERMINAL",0,0,"\r\r\r\r\r\r\r\r\r\r\r⠇ Preparing packages... (73/83)\r\njaxlib  ------------------------------ 67.25 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 66.38 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 66.48 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 66.56 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 66.88 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 65.15 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 66.78 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 66.71 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 66.68 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 65.47 MiB/614.37 MiB ",,terminal_output +4710,7895753,"TERMINAL",0,0,"\r\r\r\r\r\r\r\r\r\r\r⠇ Preparing packages... (73/83)\r\njaxlib  ------------------------------ 67.77 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 66.93 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 67.03 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 67.11 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 67.44 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 65.69 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 67.32 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 67.17 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 67.23 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 65.91 MiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r⠋ Preparing packages... (73/83)\r\njaxlib  ------------------------------ 68.32 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 67.47 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 67.56 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 67.64 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 68.00 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 66.20 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 67.89 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 67.66 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 67.81 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 66.44 MiB/614.37 MiB ",,terminal_output +4711,7895816,"TERMINAL",0,0,"\r\r\r\r\r\r\r\r\r\r\r⠋ Preparing packages... (73/83)\r\njaxlib  ------------------------------ 68.86 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 68.00 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 68.06 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 68.16 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 68.52 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 66.72 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 68.40 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 68.17 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 68.34 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 66.95 MiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r⠋ Preparing packages... (73/83)\r\njaxlib  ------------------------------ 69.34 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 68.49 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 68.56 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 68.67 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 69.05 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 67.27 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 68.89 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 68.66 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 68.85 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 67.46 MiB/614.37 MiB ",,terminal_output +4712,7895922,"TERMINAL",0,0,"\r\r\r\r\r\r\r\r\r\r\r⠋ Preparing packages... (73/83)\r\njaxlib  ------------------------------ 69.93 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 69.05 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 69.08 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 69.20 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 69.60 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 67.84 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 69.31 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 69.12 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 69.28 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 68.00 MiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r⠙ Preparing packages... (73/83)\r\njaxlib  ------------------------------ 70.39 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 69.55 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 69.58 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 69.71 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 70.08 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 68.25 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 69.81 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 69.63 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 69.78 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 68.50 MiB/614.37 MiB ",,terminal_output +4713,7895983,"TERMINAL",0,0,"\r\r\r\r\r\r\r\r\r\r\r⠙ Preparing packages... (73/83)\r\njaxlib  ------------------------------ 70.86 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 70.07 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 70.03 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 70.21 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 70.58 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 68.76 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 70.28 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 70.12 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 70.25 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 69.00 MiB/614.37 MiB ",,terminal_output +4714,7896088,"TERMINAL",0,0,"\r\r\r\r\r\r\r\r\r\r\r⠙ Preparing packages... (73/83)\r\njaxlib  ------------------------------ 71.32 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 70.55 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 70.53 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 70.66 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 71.02 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 69.17 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 70.70 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 70.54 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 70.70 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 69.43 MiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r⠙ Preparing packages... (73/83)\r\njaxlib  ------------------------------ 71.82 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 71.04 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 71.06 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 71.12 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 71.53 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 69.61 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 71.00 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 70.98 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 71.16 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 69.93 MiB/614.37 MiB ",,terminal_output +4715,7896196,"TERMINAL",0,0,"\r\r\r\r\r\r\r\r\r\r\r⠹ Preparing packages... (73/83)\r\njaxlib  ------------------------------ 72.29 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 71.54 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 71.62 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 71.67 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 72.06 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 70.12 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 71.00 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 71.50 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 71.71 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 70.47 MiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r⠹ Preparing packages... (73/83)\r\njaxlib  ------------------------------ 72.78 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 72.08 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 72.12 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 72.18 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 72.57 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 70.62 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 71.44 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 72.01 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 72.21 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 71.00 MiB/614.37 MiB ",,terminal_output +4716,7896249,"TERMINAL",0,0,"\r\r\r\r\r\r\r\r\r\r\r⠹ Preparing packages... (73/83)\r\njaxlib  ------------------------------ 73.24 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 72.52 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 72.55 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 72.60 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 72.99 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 71.03 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 71.87 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 72.45 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 72.65 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 71.43 MiB/614.37 MiB ",,terminal_output +4717,7896319,"TERMINAL",0,0,"\r\r\r\r\r\r\r\r\r\r\r⠹ Preparing packages... (73/83)\r\njaxlib  ------------------------------ 73.75 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 73.00 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 73.02 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 73.14 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 73.42 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 71.45 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 72.31 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 73.00 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 73.00 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 71.92 MiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r⠸ Preparing packages... (73/83)\r\njaxlib  ------------------------------ 74.22 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 73.45 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 73.45 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 73.60 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 73.87 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 71.92 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 72.80 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 73.47 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 73.42 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 72.37 MiB/614.37 MiB ",,terminal_output +4718,7896382,"TERMINAL",0,0,"\r\r\r\r\r\r\r\r\r\r\r⠸ Preparing packages... (73/83)\r\njaxlib  ------------------------------ 74.33 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 73.97 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 73.94 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 74.06 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 74.35 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 72.38 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 73.28 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 73.95 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 73.92 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 72.85 MiB/614.37 MiB ",,terminal_output +4719,7896443,"TERMINAL",0,0,"\r\r\r\r\r\r\r\r\r\r\r⠸ Preparing packages... (73/83)\r\njaxlib  ------------------------------ 74.44 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 74.52 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 74.42 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 74.57 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 74.89 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 72.94 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 73.80 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 74.48 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 74.40 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 73.39 MiB/614.37 MiB ",,terminal_output +4720,7896583,"TERMINAL",0,0,"\r\r\r\r\r\r\r\r\r\r\r⠸ Preparing packages... (73/83)\r\njaxlib  ------------------------------ 74.49 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 75.05 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 74.96 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 75.03 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 75.41 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 73.49 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 74.36 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 75.04 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 74.93 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 73.95 MiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r⠼ Preparing packages... (73/83)\r\njaxlib  ------------------------------ 74.55 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 75.59 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 75.52 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 75.57 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 75.97 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 74.00 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 74.92 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 75.56 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 75.50 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 74.43 MiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r⠼ Preparing packages... (73/83)\r\njaxlib  ------------------------------ 74.55 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 75.98 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 76.09 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 76.00 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 76.53 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 74.56 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 75.51 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 76.10 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 76.05 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 75.00 MiB/614.37 MiB ",,terminal_output +4721,7896720,"TERMINAL",0,0,"\r\r\r\r\r\r\r\r\r\r\r⠼ Preparing packages... (73/83)\r\njaxlib  ------------------------------ 74.55 MiB/74.55 MiB\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 76.38 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 76.67 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 76.54 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 77.06 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 75.10 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 76.06 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 76.67 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 76.57 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 75.53 MiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r\r⠼ Preparing packages... (73/83)\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 76.56 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 76.89 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 76.74 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 77.23 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 75.30 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 76.28 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 76.86 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 76.78 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 75.75 MiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r⠼ Preparing packages... (73/83)\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 76.85 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 77.20 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 77.05 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 77.54 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 75.56 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 76.54 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 77.15 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 77.06 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 76.03 MiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r⠴ Preparing packages... (74/83)\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 77.41 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 77.75 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 77.58 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 78.15 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 76.10 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 77.13 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 77.72 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 77.64 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 76.59 MiB/614.37 MiB ",,terminal_output +4722,7896791,"TERMINAL",0,0,"\r\r\r\r\r\r\r\r\r\r⠴ Preparing packages... (74/83)\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 77.88 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 78.27 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 78.06 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 78.65 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 76.61 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 77.64 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 78.29 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 78.15 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 77.12 MiB/614.37 MiB ",,terminal_output +4723,7896839,"TERMINAL",0,0,"\r\r\r\r\r\r\r\r\r\r⠴ Preparing packages... (74/83)\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 78.41 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 78.81 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 78.60 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 79.19 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 77.18 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 78.20 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 78.81 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 78.69 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 77.69 MiB/614.37 MiB ",,terminal_output +4724,7896942,"TERMINAL",0,0,"\r\r\r\r\r\r\r\r\r\r⠴ Preparing packages... (74/83)\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 78.93 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 79.34 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 79.18 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 79.75 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 77.69 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 78.71 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 79.30 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 79.24 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 78.23 MiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r⠦ Preparing packages... (74/83)\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 79.53 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 79.84 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 79.71 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 80.28 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 78.20 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 79.27 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 79.84 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 79.76 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 78.75 MiB/614.37 MiB ",,terminal_output +4725,7897015,"TERMINAL",0,0,"\r\r\r\r\r\r\r\r\r\r⠦ Preparing packages... (74/83)\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 80.06 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 80.38 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 80.23 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 80.84 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 78.71 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 79.80 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 80.35 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 80.30 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 79.30 MiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r⠦ Preparing packages... (74/83)\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 80.55 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 80.92 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 80.74 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 81.27 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 79.23 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 80.34 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 80.87 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 80.82 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 79.82 MiB/614.37 MiB ",,terminal_output +4726,7897079,"TERMINAL",0,0,"\r\r\r\r\r\r\r\r\r\r⠦ Preparing packages... (74/83)\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 81.08 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 81.45 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 81.27 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 81.80 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 79.73 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 80.84 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 81.39 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 81.37 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 80.36 MiB/614.37 MiB ",,terminal_output +4727,7897137,"TERMINAL",0,0,"\r\r\r\r\r\r\r\r\r\r⠧ Preparing packages... (74/83)\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 81.65 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 82.00 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 81.73 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 82.36 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 80.25 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 81.43 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 81.94 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 81.93 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 80.90 MiB/614.37 MiB ",,terminal_output +4728,7897216,"TERMINAL",0,0,"\r\r\r\r\r\r\r\r\r\r⠧ Preparing packages... (74/83)\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 82.19 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 82.56 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 82.20 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 82.93 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 80.78 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 81.94 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 82.47 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 82.48 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 81.34 MiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r⠧ Preparing packages... (74/83)\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 82.62 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 83.00 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 82.68 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 83.35 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 81.22 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 82.42 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 82.95 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 82.93 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 81.81 MiB/614.37 MiB ",,terminal_output +4729,7897280,"TERMINAL",0,0,"\r\r\r\r\r\r\r\r\r\r⠧ Preparing packages... (74/83)\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 83.04 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 83.39 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 83.07 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 83.77 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 81.61 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 82.82 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 83.33 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 83.35 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 82.18 MiB/614.37 MiB ",,terminal_output +4730,7897387,"TERMINAL",0,0,"\r\r\r\r\r\r\r\r\r\r⠇ Preparing packages... (74/83)\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 83.15 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 83.50 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 83.21 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 83.89 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 81.74 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 82.95 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 83.45 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 83.47 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 82.31 MiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r⠇ Preparing packages... (74/83)\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 83.60 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 83.79 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 83.61 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 84.16 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 81.99 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 83.40 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 83.72 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 83.93 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 82.72 MiB/614.37 MiB ",,terminal_output +4731,7897471,"TERMINAL",0,0,"\r\r\r\r\r\r\r\r\r\r⠇ Preparing packages... (74/83)\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 84.05 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 84.43 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 84.00 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 84.79 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 82.57 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 83.85 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 84.34 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 84.37 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 83.19 MiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r⠇ Preparing packages... (74/83)\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 84.51 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 84.90 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 84.25 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 85.25 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 82.94 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 84.28 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 84.77 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 84.86 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 83.65 MiB/614.37 MiB ",,terminal_output +4732,7897567,"TERMINAL",0,0,"\r\r\r\r\r\r\r\r\r\r⠋ Preparing packages... (74/83)\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 84.94 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 85.33 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 84.68 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 85.73 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 83.31 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 84.71 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 85.22 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 85.35 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 84.10 MiB/614.37 MiB \r\r\r\r\r\r\r\r\r\r⠋ Preparing packages... (74/83)\r\nnvidia-cuda-nvrtc-cu12  ------------------------------ 85.37 MiB/85.42 MiB\r\njax-cuda12-pjrt  ------------------------------ 85.72 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 85.14 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 86.17 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 83.76 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 85.14 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 85.64 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 85.74 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 84.52 MiB/614.37 MiB ",,terminal_output +4733,7897675,"TERMINAL",0,0,"\r\r\r\r\r\r\r\r\r\r⠋ Preparing packages... (74/83)\r\njax-cuda12-pjrt  ------------------------------ 85.92 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 85.31 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 86.37 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 83.95 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 85.31 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 85.83 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 85.95 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 84.71 MiB/614.37 MiB \r\r\r\r\r\r\r\r\r⠋ Preparing packages... (74/83)\r\njax-cuda12-pjrt  ------------------------------ 85.92 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 85.32 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 86.37 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 83.95 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 85.31 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 85.83 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 85.95 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 84.71 MiB/614.37 MiB \r\r\r\r\r\r\r\r\r⠋ Preparing packages... (74/83)\r\njax-cuda12-pjrt  ------------------------------ 86.39 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 85.79 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 86.85 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 84.43 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 85.77 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 86.24 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 86.41 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 85.19 MiB/614.37 MiB ",,terminal_output +4734,7897782,"TERMINAL",0,0,"\r\r\r\r\r\r\r\r\r⠙ Preparing packages... (75/83)\r\njax-cuda12-pjrt  ------------------------------ 86.81 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 86.15 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 87.32 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 84.86 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 86.14 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 86.69 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 86.84 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 85.63 MiB/614.37 MiB \r\r\r\r\r\r\r\r\r⠙ Preparing packages... (75/83)\r\njax-cuda12-pjrt  ------------------------------ 87.45 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 86.81 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 87.97 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 85.42 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 86.79 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 87.19 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 87.43 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 86.26 MiB/614.37 MiB ",,terminal_output +4735,7897886,"TERMINAL",0,0,"\r\r\r\r\r\r\r\r\r⠙ Preparing packages... (75/83)\r\njax-cuda12-pjrt  ------------------------------ 88.03 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 87.43 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 88.58 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 86.03 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 87.38 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 87.83 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 88.05 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 86.88 MiB/614.37 MiB \r\r\r\r\r\r\r\r\r⠙ Preparing packages... (75/83)\r\njax-cuda12-pjrt  ------------------------------ 88.68 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 88.09 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 89.19 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 86.56 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 88.00 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 88.48 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 88.71 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 87.50 MiB/614.37 MiB ",,terminal_output +4736,7897989,"TERMINAL",0,0,"\r\r\r\r\r\r\r\r\r⠹ Preparing packages... (75/83)\r\njax-cuda12-pjrt  ------------------------------ 89.29 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 88.66 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 89.75 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 87.11 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 88.55 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 89.09 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 89.28 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 88.10 MiB/614.37 MiB \r\r\r\r\r\r\r\r\r⠹ Preparing packages... (75/83)\r\njax-cuda12-pjrt  ------------------------------ 89.85 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 89.24 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 90.34 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 87.67 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 89.08 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 89.66 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 89.84 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 88.67 MiB/614.37 MiB ",,terminal_output +4737,7898093,"TERMINAL",0,0,"\r\r\r\r\r\r\r\r\r⠹ Preparing packages... (75/83)\r\njax-cuda12-pjrt  ------------------------------ 90.76 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 90.05 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 91.03 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 88.34 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 90.03 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 90.03 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 90.03 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 89.05 MiB/614.37 MiB \r\r\r\r\r\r\r\r\r⠹ Preparing packages... (75/83)\r\njax-cuda12-pjrt  ------------------------------ 91.13 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 90.45 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 91.45 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 88.76 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 90.44 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 90.44 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 90.42 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 89.43 MiB/614.37 MiB ",,terminal_output +4738,7898198,"TERMINAL",0,0,"\r\r\r\r\r\r\r\r\r⠸ Preparing packages... (75/83)\r\njax-cuda12-pjrt  ------------------------------ 91.69 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 91.01 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 92.01 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 89.30 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 91.03 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 91.01 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 90.98 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 89.99 MiB/614.37 MiB \r\r\r\r\r\r\r\r\r⠸ Preparing packages... (75/83)\r\njax-cuda12-pjrt  ------------------------------ 91.99 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 91.32 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 92.32 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 89.58 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 91.32 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 91.31 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 91.27 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 90.26 MiB/614.37 MiB ",,terminal_output +4739,7898282,"TERMINAL",0,0,"\r\r\r\r\r\r\r\r\r⠸ Preparing packages... (75/83)\r\njax-cuda12-pjrt  ------------------------------ 92.02 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 91.35 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 92.35 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 89.61 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 91.35 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 91.34 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 91.31 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 90.31 MiB/614.37 MiB \r\r\r\r\r\r\r\r\r⠼ Preparing packages... (75/83)\r\njax-cuda12-pjrt  ------------------------------ 92.07 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 91.40 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 92.40 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 89.66 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 91.41 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 91.39 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 91.34 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 90.34 MiB/614.37 MiB ",,terminal_output +4740,7898342,"TERMINAL",0,0,"\r\r\r\r\r\r\r\r\r⠼ Preparing packages... (75/83)\r\njax-cuda12-pjrt  ------------------------------ 92.08 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 91.42 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 92.42 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 89.69 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 91.43 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 91.40 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 91.35 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 90.35 MiB/614.37 MiB ",,terminal_output +4741,7898447,"TERMINAL",0,0,"\r\r\r\r\r\r\r\r\r⠼ Preparing packages... (75/83)\r\njax-cuda12-pjrt  ------------------------------ 92.13 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 91.49 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 92.50 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 89.73 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 91.48 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 91.46 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 91.42 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 90.43 MiB/614.37 MiB ",,terminal_output +4742,7898495,"TERMINAL",0,0,"\r\r\r\r\r\r\r\r\r⠼ Preparing packages... (75/83)\r\njax-cuda12-pjrt  ------------------------------ 92.21 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 91.57 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 92.57 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 89.83 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 91.55 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 91.54 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 91.50 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 90.50 MiB/614.37 MiB \r\r\r\r\r\r\r\r\r⠼ Preparing packages... (75/83)\r\njax-cuda12-pjrt  ------------------------------ 92.22 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 91.57 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 92.57 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 89.83 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 91.55 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 91.54 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 91.50 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 90.50 MiB/614.37 MiB ",,terminal_output +4743,7898670,"TERMINAL",0,0,"\r\r\r\r\r\r\r\r\r⠴ Preparing packages... (75/83)\r\njax-cuda12-pjrt  ------------------------------ 92.27 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 91.63 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 92.64 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 89.87 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 91.61 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 91.61 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 91.57 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 90.57 MiB/614.37 MiB \r\r\r\r\r\r\r\r\r⠴ Preparing packages... (75/83)\r\njax-cuda12-pjrt  ------------------------------ 92.54 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 91.93 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 92.92 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 90.15 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 91.91 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 91.88 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 91.85 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 90.85 MiB/614.37 MiB ",,terminal_output +4744,7898675,"TERMINAL",0,0,"\r\r\r\r\r\r\r\r\r⠴ Preparing packages... (75/83)\r\njax-cuda12-pjrt  ------------------------------ 93.02 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 92.39 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 93.37 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 90.62 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 92.36 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 92.36 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 92.32 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 91.32 MiB/614.37 MiB ",,terminal_output +4745,7898776,"TERMINAL",0,0,"\r\r\r\r\r\r\r\r\r⠴ Preparing packages... (75/83)\r\njax-cuda12-pjrt  ------------------------------ 93.69 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 93.11 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 94.00 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 91.29 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 93.07 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 93.02 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 92.91 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 92.06 MiB/614.37 MiB \r\r\r\r\r\r\r\r\r⠦ Preparing packages... (75/83)\r\njax-cuda12-pjrt  ------------------------------ 94.36 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 93.80 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 94.68 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 91.98 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 93.75 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 93.68 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 93.62 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 92.68 MiB/614.37 MiB ",,terminal_output +4746,7898826,"TERMINAL",0,0,"\r\r\r\r\r\r\r\r\r⠦ Preparing packages... (75/83)\r\njax-cuda12-pjrt  ------------------------------ 95.04 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 94.49 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 95.37 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 92.71 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 94.47 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 94.11 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 94.25 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 93.46 MiB/614.37 MiB \r\r\r\r\r\r\r\r\r⠦ Preparing packages... (75/83)\r\njax-cuda12-pjrt  ------------------------------ 95.65 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 95.12 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 96.02 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 93.37 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 95.12 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 94.77 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 94.93 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 94.12 MiB/614.37 MiB ",,terminal_output +4747,7898889,"TERMINAL",0,0,"\r\r\r\r\r\r\r\r\r⠦ Preparing packages... (75/83)\r\njax-cuda12-pjrt  ------------------------------ 96.30 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 95.78 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 96.67 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 93.98 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 95.76 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 95.42 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 95.54 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 94.78 MiB/614.37 MiB ",,terminal_output +4748,7898951,"TERMINAL",0,0,"\r\r\r\r\r\r\r\r\r⠧ Preparing packages... (75/83)\r\njax-cuda12-pjrt  ------------------------------ 96.97 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 96.37 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 97.41 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 94.61 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 96.48 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 96.11 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 96.18 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 95.47 MiB/614.37 MiB ",,terminal_output +4749,7899016,"TERMINAL",0,0,"\r\r\r\r\r\r\r\r\r⠧ Preparing packages... (75/83)\r\njax-cuda12-pjrt  ------------------------------ 97.66 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 97.02 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 98.09 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 95.26 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 97.14 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 96.76 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 96.84 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 96.14 MiB/614.37 MiB \r\r\r\r\r\r\r\r\r⠧ Preparing packages... (75/83)\r\njax-cuda12-pjrt  ------------------------------ 98.34 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 97.73 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 98.81 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 95.95 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 97.81 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 97.44 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 97.50 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 96.83 MiB/614.37 MiB ",,terminal_output +4750,7899087,"TERMINAL",0,0,"\r\r\r\r\r\r\r\r\r⠧ Preparing packages... (75/83)\r\njax-cuda12-pjrt  ------------------------------ 99.03 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 98.40 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 99.50 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 96.56 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 98.47 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 98.14 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 98.20 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 97.47 MiB/614.37 MiB ",,terminal_output +4751,7899189,"TERMINAL",0,0,"\r\r\r\r\r\r\r\r\r⠇ Preparing packages... (75/83)\r\njax-cuda12-pjrt  ------------------------------ 99.72 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 99.06 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 100.25 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 97.24 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 99.12 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 98.82 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 98.90 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 98.06 MiB/614.37 MiB \r\r\r\r\r\r\r\r\r⠇ Preparing packages... (75/83)\r\njax-cuda12-pjrt  ------------------------------ 100.36 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 99.75 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 100.88 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 97.87 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 99.79 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 99.49 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 99.57 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 98.70 MiB/614.37 MiB ",,terminal_output +4752,7899267,"TERMINAL",0,0,"\r\r\r\r\r\r\r\r\r⠇ Preparing packages... (75/83)\r\njax-cuda12-pjrt  ------------------------------ 101.06 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 100.36 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 101.62 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 98.50 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 100.49 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 100.19 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 100.26 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 99.39 MiB/614.37 MiB \r\r\r\r\r\r\r\r\r⠇ Preparing packages... (75/83)\r\njax-cuda12-pjrt  ------------------------------ 101.81 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 101.10 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 102.29 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 99.15 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 101.00 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 100.87 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 100.96 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 100.15 MiB/614.37 MiB ",,terminal_output +4753,7899324,"TERMINAL",0,0,"\r\r\r\r\r\r\r\r\r⠋ Preparing packages... (75/83)\r\njax-cuda12-pjrt  ------------------------------ 102.44 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 101.85 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 102.99 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 99.84 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 101.62 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 101.55 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 101.60 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 100.85 MiB/614.37 MiB ",,terminal_output +4754,7899427,"TERMINAL",0,0,"\r\r\r\r\r\r\r\r\r⠋ Preparing packages... (75/83)\r\njax-cuda12-pjrt  ------------------------------ 103.17 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 102.54 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 103.68 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 100.50 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 102.30 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 102.24 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 102.30 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 101.55 MiB/614.37 MiB \r\r\r\r\r\r\r\r\r⠋ Preparing packages... (75/83)\r\njax-cuda12-pjrt  ------------------------------ 103.84 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 103.17 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 104.38 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 101.12 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 103.00 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 102.98 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 102.96 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 102.24 MiB/614.37 MiB ",,terminal_output +4755,7899488,"TERMINAL",0,0,"\r\r\r\r\r\r\r\r\r⠋ Preparing packages... (75/83)\r\njax-cuda12-pjrt  ------------------------------ 104.57 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 103.87 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 105.16 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 101.70 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 103.64 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 103.70 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 103.64 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 102.90 MiB/614.37 MiB ",,terminal_output +4756,7899565,"TERMINAL",0,0,"\r\r\r\r\r\r\r\r\r⠙ Preparing packages... (75/83)\r\njax-cuda12-pjrt  ------------------------------ 105.41 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 104.62 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 105.89 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 102.46 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 104.41 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 104.44 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 104.37 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 103.00 MiB/614.37 MiB \r\r\r\r\r\r\r\r\r⠙ Preparing packages... (75/83)\r\njax-cuda12-pjrt  ------------------------------ 106.14 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 105.37 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 106.53 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 103.12 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 105.13 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 105.18 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 105.12 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 103.42 MiB/614.37 MiB ",,terminal_output +4757,7899627,"TERMINAL",0,0,"\r\r\r\r\r\r\r\r\r⠙ Preparing packages... (75/83)\r\njax-cuda12-pjrt  ------------------------------ 106.78 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 106.05 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 107.19 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 103.75 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 105.81 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 105.87 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 105.81 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 104.09 MiB/614.37 MiB ",,terminal_output +4758,7899684,"TERMINAL",0,0,"\r\r\r\r\r\r\r\r\r⠙ Preparing packages... (75/83)\r\njax-cuda12-pjrt  ------------------------------ 107.44 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 106.74 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 107.86 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 104.34 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 106.45 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 106.57 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 106.50 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 104.78 MiB/614.37 MiB ",,terminal_output +4759,7899746,"TERMINAL",0,0,"\r\r\r\r\r\r\r\r\r⠹ Preparing packages... (75/83)\r\njax-cuda12-pjrt  ------------------------------ 108.14 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 107.44 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 108.55 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 105.00 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 107.15 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 107.19 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 107.19 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 105.43 MiB/614.37 MiB ",,terminal_output +4760,7899798,"TERMINAL",0,0,"\r\r\r\r\r\r\r\r\r⠹ Preparing packages... (75/83)\r\njax-cuda12-pjrt  ------------------------------ 108.82 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 108.03 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 109.24 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 105.67 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 107.81 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 107.90 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 107.86 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 106.12 MiB/614.37 MiB ",,terminal_output +4761,7899851,"TERMINAL",0,0,"\r\r\r\r\r\r\r\r\r⠹ Preparing packages... (75/83)\r\njax-cuda12-pjrt  ------------------------------ 109.49 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 108.68 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 109.93 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 106.28 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 108.50 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 108.59 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 108.51 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 106.80 MiB/614.37 MiB ",,terminal_output +4762,7899915,"TERMINAL",0,0,"\r\r\r\r\r\r\r\r\r⠹ Preparing packages... (75/83)\r\njax-cuda12-pjrt  ------------------------------ 110.15 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 109.36 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 110.60 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 106.97 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 109.09 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 109.24 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 109.21 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 107.49 MiB/614.37 MiB \r\r\r\r\r\r\r\r\r⠸ Preparing packages... (75/83)\r\njax-cuda12-pjrt  ------------------------------ 110.88 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 110.05 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 111.29 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 107.54 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 109.74 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 109.99 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 109.91 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 108.14 MiB/614.37 MiB ",,terminal_output +4763,7899972,"TERMINAL",0,0,"\r\r\r\r\r\r\r\r\r⠸ Preparing packages... (75/83)\r\njax-cuda12-pjrt  ------------------------------ 111.56 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 110.80 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 111.95 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 108.20 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 110.38 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 110.59 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 110.52 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 108.79 MiB/614.37 MiB ",,terminal_output +4764,7900033,"TERMINAL",0,0,"\r\r\r\r\r\r\r\r\r⠸ Preparing packages... (75/83)\r\njax-cuda12-pjrt  ------------------------------ 112.25 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 111.50 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 112.66 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 108.81 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 111.03 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 111.27 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 111.20 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 109.44 MiB/614.37 MiB ",,terminal_output +4765,7900137,"TERMINAL",0,0,"\r\r\r\r\r\r\r\r\r⠸ Preparing packages... (75/83)\r\njax-cuda12-pjrt  ------------------------------ 113.03 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 112.05 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 113.03 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 109.48 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 111.76 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 112.03 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 111.95 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 110.12 MiB/614.37 MiB \r\r\r\r\r\r\r\r\r⠼ Preparing packages... (75/83)\r\njax-cuda12-pjrt  ------------------------------ 113.69 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 112.71 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 113.72 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 110.12 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 112.42 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 112.68 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 112.66 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 110.79 MiB/614.37 MiB ",,terminal_output +4766,7900247,"TERMINAL",0,0,"\r\r\r\r\r\r\r\r\r⠼ Preparing packages... (75/83)\r\njax-cuda12-pjrt  ------------------------------ 114.33 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 113.38 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 114.45 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 110.80 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 113.00 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 113.34 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 113.35 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 111.53 MiB/614.37 MiB \r\r\r\r\r\r\r\r\r⠼ Preparing packages... (75/83)\r\njax-cuda12-pjrt  ------------------------------ 115.11 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 114.00 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 115.00 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 111.37 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 113.65 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 114.00 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 114.10 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 112.25 MiB/614.37 MiB ",,terminal_output +4767,7900361,"TERMINAL",0,0,"\r\r\r\r\r\r\r\r\r⠼ Preparing packages... (75/83)\r\njax-cuda12-pjrt  ------------------------------ 115.83 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 114.62 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 115.62 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 111.95 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 114.32 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 114.68 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 114.82 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 112.98 MiB/614.37 MiB \r\r\r\r\r\r\r\r\r⠴ Preparing packages... (75/83)\r\njax-cuda12-pjrt  ------------------------------ 116.52 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 115.24 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 116.32 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 112.61 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 114.99 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 115.37 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 115.48 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 113.62 MiB/614.37 MiB \r\r\r\r\r\r\r\r\r⠴ Preparing packages... (75/83)\r\njax-cuda12-pjrt  ------------------------------ 117.22 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 115.93 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 116.97 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 113.23 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 115.65 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 116.07 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 116.16 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 114.19 MiB/614.37 MiB ",,terminal_output +4768,7900464,"TERMINAL",0,0,"\r\r\r\r\r\r\r\r\r⠴ Preparing packages... (75/83)\r\njax-cuda12-pjrt  ------------------------------ 117.87 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 116.60 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 117.66 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 113.75 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 116.31 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 116.74 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 116.83 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 114.85 MiB/614.37 MiB \r\r\r\r\r\r\r\r\r⠴ Preparing packages... (75/83)\r\njax-cuda12-pjrt  ------------------------------ 118.56 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 117.27 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 118.30 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 114.40 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 116.94 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 117.41 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 117.46 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 115.46 MiB/614.37 MiB ",,terminal_output +4769,7900525,"TERMINAL",0,0,"\r\r\r\r\r\r\r\r\r⠦ Preparing packages... (75/83)\r\njax-cuda12-pjrt  ------------------------------ 119.11 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 117.81 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 118.83 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 114.93 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 117.48 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 117.93 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 118.03 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 116.00 MiB/614.37 MiB ",,terminal_output +4770,7900591,"TERMINAL",0,0,"\r\r\r\r\r\r\r\r\r⠦ Preparing packages... (75/83)\r\njax-cuda12-pjrt  ------------------------------ 119.65 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 118.33 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 119.31 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 115.48 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 118.03 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 118.43 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 118.54 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 116.50 MiB/614.37 MiB ",,terminal_output +4771,7900648,"TERMINAL",0,0,"\r\r\r\r\r\r\r\r\r⠦ Preparing packages... (75/83)\r\njax-cuda12-pjrt  ------------------------------ 120.20 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 118.89 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 119.86 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 116.05 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 118.62 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 118.99 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 119.12 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 117.05 MiB/614.37 MiB ",,terminal_output +4772,7900721,"TERMINAL",0,0,"\r\r\r\r\r\r\r\r\r⠦ Preparing packages... (75/83)\r\njax-cuda12-pjrt  ------------------------------ 120.84 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 119.36 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 120.45 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 116.58 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 119.21 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 119.35 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 119.70 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 117.63 MiB/614.37 MiB \r\r\r\r\r\r\r\r\r⠧ Preparing packages... (75/83)\r\njax-cuda12-pjrt  ------------------------------ 121.43 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 119.93 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 121.00 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 117.06 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 119.75 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 119.90 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 120.25 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 118.24 MiB/614.37 MiB ",,terminal_output +4773,7900781,"TERMINAL",0,0,"\r\r\r\r\r\r\r\r\r⠧ Preparing packages... (75/83)\r\njax-cuda12-pjrt  ------------------------------ 122.01 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 120.49 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 121.47 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 117.58 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 120.34 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 120.42 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 120.86 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 118.83 MiB/614.37 MiB ",,terminal_output +4774,7900886,"TERMINAL",0,0,"\r\r\r\r\r\r\r\r\r⠧ Preparing packages... (75/83)\r\njax-cuda12-pjrt  ------------------------------ 122.58 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 121.07 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 122.00 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 118.12 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 120.89 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 120.91 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 121.32 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 119.30 MiB/614.37 MiB \r\r\r\r\r\r\r\r\r⠧ Preparing packages... (75/83)\r\njax-cuda12-pjrt  ------------------------------ 123.12 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 121.64 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 122.44 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 118.69 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 121.45 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 121.44 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 121.90 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 119.86 MiB/614.37 MiB ",,terminal_output +4775,7900969,"TERMINAL",0,0,"\r\r\r\r\r\r\r\r\r⠇ Preparing packages... (75/83)\r\njax-cuda12-pjrt  ------------------------------ 123.66 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 122.16 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 123.00 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 119.27 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 122.00 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 122.00 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 122.44 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 120.37 MiB/614.37 MiB \r\r\r\r\r\r\r\r\r⠇ Preparing packages... (75/83)\r\njax-cuda12-pjrt  ------------------------------ 124.20 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 122.70 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 123.56 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 119.88 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 122.49 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 122.25 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 123.13 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 120.89 MiB/614.37 MiB ",,terminal_output +4776,7901025,"TERMINAL",0,0,"\r\r\r\r\r\r\r\r\r⠇ Preparing packages... (75/83)\r\njax-cuda12-pjrt  ------------------------------ 124.78 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 123.26 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 124.11 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 120.42 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 122.99 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 122.82 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 123.66 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 121.45 MiB/614.37 MiB ",,terminal_output +4777,7901130,"TERMINAL",0,0,"\r\r\r\r\r\r\r\r\r⠇ Preparing packages... (75/83)\r\njax-cuda12-pjrt  ------------------------------ 125.31 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 123.85 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 124.68 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 120.91 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 123.54 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 123.32 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 124.20 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 122.03 MiB/614.37 MiB \r\r\r\r\r\r\r\r\r⠋ Preparing packages... (75/83)\r\njax-cuda12-pjrt  ------------------------------ 125.91 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 124.43 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 125.00 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 121.45 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 124.00 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 123.89 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 124.78 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 122.64 MiB/614.37 MiB ",,terminal_output +4778,7901229,"TERMINAL",0,0,"\r\r\r\r\r\r\r\r\r⠋ Preparing packages... (75/83)\r\njax-cuda12-pjrt  ------------------------------ 126.53 MiB/126.58 MiB\r\nnvidia-nvshmem-cu12  ------------------------------ 125.05 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 125.34 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 122.02 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 124.62 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 124.33 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 125.34 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 123.18 MiB/614.37 MiB \r\r\r\r\r\r\r\r\r⠋ Preparing packages... (75/83)\r\nnvidia-nvshmem-cu12  ------------------------------ 125.27 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 125.59 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 122.31 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 124.87 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 124.60 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 125.56 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 123.42 MiB/614.37 MiB \r\r\r\r\r\r\r\r⠋ Preparing packages... (75/83)\r\nnvidia-nvshmem-cu12  ------------------------------ 125.52 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 125.85 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 122.57 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 125.15 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 124.85 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 125.85 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 123.68 MiB/614.37 MiB ",,terminal_output +4779,7901336,"TERMINAL",0,0,"\r\r\r\r\r\r\r\r⠋ Preparing packages... (75/83)\r\nnvidia-nvshmem-cu12  ------------------------------ 126.12 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 126.61 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 123.34 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 125.83 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 125.61 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 126.61 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 124.38 MiB/614.37 MiB ",,terminal_output +4780,7901400,"TERMINAL",0,0,"\r\r\r\r\r\r\r\r⠙ Preparing packages... (76/83)\r\nnvidia-nvshmem-cu12  ------------------------------ 127.05 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 127.05 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 124.11 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 126.50 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 126.30 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 127.26 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 125.00 MiB/614.37 MiB \r\r\r\r\r\r\r\r⠙ Preparing packages... (76/83)\r\nnvidia-nvshmem-cu12  ------------------------------ 127.64 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 127.68 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 124.73 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 127.09 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 126.85 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 127.89 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 125.61 MiB/614.37 MiB ",,terminal_output +4781,7901450,"TERMINAL",0,0,"\r\r\r\r\r\r\r\r⠙ Preparing packages... (76/83)\r\nnvidia-nvshmem-cu12  ------------------------------ 128.27 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 128.35 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 125.36 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 127.78 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 127.49 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 128.50 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 126.23 MiB/614.37 MiB ",,terminal_output +4782,7901501,"TERMINAL",0,0,"\r\r\r\r\r\r\r\r⠙ Preparing packages... (76/83)\r\nnvidia-nvshmem-cu12  ------------------------------ 128.93 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 129.14 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 126.13 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 128.72 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 128.18 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 129.16 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 127.12 MiB/614.37 MiB ",,terminal_output +4783,7901565,"TERMINAL",0,0,"\r\r\r\r\r\r\r\r⠹ Preparing packages... (76/83)\r\nnvidia-nvshmem-cu12  ------------------------------ 129.69 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 129.91 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 126.97 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 129.42 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 128.95 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 129.90 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 127.93 MiB/614.37 MiB \r\r\r\r\r\r\r\r⠹ Preparing packages... (76/83)\r\nnvidia-nvshmem-cu12  ------------------------------ 130.46 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 130.75 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 127.80 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 130.27 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 129.78 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 130.68 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 128.52 MiB/614.37 MiB ",,terminal_output +4784,7901627,"TERMINAL",0,0,"\r\r\r\r\r\r\r\r⠹ Preparing packages... (76/83)\r\nnvidia-nvshmem-cu12  ------------------------------ 131.11 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 131.56 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 128.60 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 131.07 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 130.56 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 131.44 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 129.30 MiB/614.37 MiB ",,terminal_output +4785,7901687,"TERMINAL",0,0,"\r\r\r\r\r\r\r\r⠹ Preparing packages... (76/83)\r\nnvidia-nvshmem-cu12  ------------------------------ 131.78 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 132.35 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 129.33 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 131.76 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 131.25 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 132.17 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 130.04 MiB/614.37 MiB ",,terminal_output +4786,7901747,"TERMINAL",0,0,"\r\r\r\r\r\r\r\r⠸ Preparing packages... (76/83)\r\nnvidia-nvshmem-cu12  ------------------------------ 132.62 MiB/132.66 MiB\r\nnvidia-cufft-cu12  ------------------------------ 133.06 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 130.22 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 132.48 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 132.13 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 133.10 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 130.89 MiB/614.37 MiB \r\r\r\r\r\r\r\r⠸ Preparing packages... (76/83)\r\nnvidia-cufft-cu12  ------------------------------ 133.31 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 130.44 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 132.75 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 132.38 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 133.36 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 131.13 MiB/614.37 MiB ",,terminal_output +4787,7901842,"TERMINAL",0,0,"\r\r\r\r\r\r\r⠸ Preparing packages... (76/83)\r\nnvidia-cufft-cu12  ------------------------------ 133.65 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 130.78 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 133.24 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 132.80 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 133.82 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 131.70 MiB/614.37 MiB \r\r\r\r\r\r\r⠸ Preparing packages... (76/83)\r\nnvidia-cufft-cu12  ------------------------------ 134.89 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 131.90 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 134.30 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 134.00 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 135.08 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 132.52 MiB/614.37 MiB ",,terminal_output +4788,7901952,"TERMINAL",0,0,"\r\r\r\r\r\r\r⠸ Preparing packages... (76/83)\r\nnvidia-cufft-cu12  ------------------------------ 135.59 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 132.64 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 135.64 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 134.92 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 135.98 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 133.25 MiB/614.37 MiB \r\r\r\r\r\r\r⠼ Preparing packages... (77/83)\r\nnvidia-cufft-cu12  ------------------------------ 136.39 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 133.39 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 136.60 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 135.64 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 137.10 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 134.25 MiB/614.37 MiB ",,terminal_output +4789,7902066,"TERMINAL",0,0,"\r\r\r\r\r\r\r⠼ Preparing packages... (77/83)\r\nnvidia-cufft-cu12  ------------------------------ 137.32 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 134.25 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 137.38 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 136.41 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 138.08 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 135.28 MiB/614.37 MiB \r\r\r\r\r\r\r⠼ Preparing packages... (77/83)\r\nnvidia-cufft-cu12  ------------------------------ 138.14 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 135.16 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 138.29 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 137.28 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 138.98 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 136.17 MiB/614.37 MiB \r\r\r\r\r\r\r⠼ Preparing packages... (77/83)\r\nnvidia-cufft-cu12  ------------------------------ 139.00 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 135.97 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 139.19 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 138.17 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 139.75 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 137.09 MiB/614.37 MiB ",,terminal_output +4790,7902174,"TERMINAL",0,0,"\r\r\r\r\r\r\r⠴ Preparing packages... (77/83)\r\nnvidia-cufft-cu12  ------------------------------ 139.75 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 136.75 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 140.06 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 139.06 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 140.70 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 137.97 MiB/614.37 MiB \r\r\r\r\r\r\r⠴ Preparing packages... (77/83)\r\nnvidia-cufft-cu12  ------------------------------ 140.50 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 137.68 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 141.00 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 139.98 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 140.99 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 138.96 MiB/614.37 MiB ",,terminal_output +4791,7902232,"TERMINAL",0,0,"\r\r\r\r\r\r\r⠴ Preparing packages... (77/83)\r\nnvidia-cufft-cu12  ------------------------------ 141.34 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 138.56 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 141.60 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 140.89 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 141.68 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 139.51 MiB/614.37 MiB ",,terminal_output +4792,7902294,"TERMINAL",0,0,"\r\r\r\r\r\r\r⠴ Preparing packages... (77/83)\r\nnvidia-cufft-cu12  ------------------------------ 142.06 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 139.28 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 142.10 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 141.64 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 142.43 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 140.19 MiB/614.37 MiB ",,terminal_output +4793,7902353,"TERMINAL",0,0,"\r\r\r\r\r\r\r⠦ Preparing packages... (77/83)\r\nnvidia-cufft-cu12  ------------------------------ 142.76 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 139.91 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 142.80 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 142.29 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 143.18 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 140.85 MiB/614.37 MiB ",,terminal_output +4794,7902415,"TERMINAL",0,0,"\r\r\r\r\r\r\r⠦ Preparing packages... (77/83)\r\nnvidia-cufft-cu12  ------------------------------ 143.38 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 140.65 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 143.41 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 143.05 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 143.89 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 141.34 MiB/614.37 MiB \r\r\r\r\r\r\r⠦ Preparing packages... (77/83)\r\nnvidia-cufft-cu12  ------------------------------ 144.16 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 141.17 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 143.97 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 143.66 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 144.55 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 142.05 MiB/614.37 MiB ",,terminal_output +4795,7902479,"TERMINAL",0,0,"\r\r\r\r\r\r\r⠦ Preparing packages... (77/83)\r\nnvidia-cufft-cu12  ------------------------------ 144.92 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 141.90 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 144.56 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 144.00 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 145.31 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 142.81 MiB/614.37 MiB ",,terminal_output +4796,7902595,"TERMINAL",0,0,"\r\r\r\r\r\r\r⠧ Preparing packages... (77/83)\r\nnvidia-cufft-cu12  ------------------------------ 145.80 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 142.42 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 145.33 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 144.21 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 146.00 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 143.06 MiB/614.37 MiB \r\r\r\r\r\r\r⠧ Preparing packages... (77/83)\r\nnvidia-cufft-cu12  ------------------------------ 145.94 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 142.61 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 145.64 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 144.21 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 146.00 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 143.26 MiB/614.37 MiB ",,terminal_output +4797,7902647,"TERMINAL",0,0,"\r\r\r\r\r\r\r⠧ Preparing packages... (77/83)\r\nnvidia-cufft-cu12  ------------------------------ 146.64 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 143.39 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 146.38 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 145.38 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 147.46 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 143.98 MiB/614.37 MiB ",,terminal_output +4798,7902700,"TERMINAL",0,0,"\r\r\r\r\r\r\r⠧ Preparing packages... (77/83)\r\nnvidia-cufft-cu12  ------------------------------ 147.41 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 144.00 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 146.57 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 146.22 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 147.97 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 144.38 MiB/614.37 MiB ",,terminal_output +4799,7902755,"TERMINAL",0,0,"\r\r\r\r\r\r\r⠇ Preparing packages... (77/83)\r\nnvidia-cufft-cu12  ------------------------------ 148.13 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 144.45 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 147.00 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 147.14 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 148.56 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 145.13 MiB/614.37 MiB ",,terminal_output +4800,7902814,"TERMINAL",0,0,"\r\r\r\r\r\r\r⠇ Preparing packages... (77/83)\r\nnvidia-cufft-cu12  ------------------------------ 148.84 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 145.07 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 147.58 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 147.76 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 149.18 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 145.87 MiB/614.37 MiB \r\r\r\r\r\r\r⠇ Preparing packages... (77/83)\r\nnvidia-cufft-cu12  ------------------------------ 149.85 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 145.87 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 148.33 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 148.67 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 150.11 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 146.79 MiB/614.37 MiB ",,terminal_output +4801,7902873,"TERMINAL",0,0,"\r\r\r\r\r\r\r⠇ Preparing packages... (77/83)\r\nnvidia-cufft-cu12  ------------------------------ 150.81 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 146.79 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 149.14 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 149.56 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 151.00 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 147.68 MiB/614.37 MiB ",,terminal_output +4802,7902933,"TERMINAL",0,0,"\r\r\r\r\r\r\r⠋ Preparing packages... (77/83)\r\nnvidia-cufft-cu12  ------------------------------ 151.45 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 147.80 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 149.96 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 150.58 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 152.01 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 148.48 MiB/614.37 MiB ",,terminal_output +4803,7903068,"TERMINAL",0,0,"\r\r\r\r\r\r\r⠋ Preparing packages... (77/83)\r\nnvidia-cufft-cu12  ------------------------------ 152.03 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 148.44 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 150.50 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 151.19 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 152.59 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 149.00 MiB/614.37 MiB \r\r\r\r\r\r\r⠋ Preparing packages... (77/83)\r\nnvidia-cufft-cu12  ------------------------------ 152.66 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 149.01 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 151.12 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 151.83 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 153.19 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 149.53 MiB/614.37 MiB ",,terminal_output +4804,7903122,"TERMINAL",0,0,"\r\r\r\r\r\r\r⠋ Preparing packages... (77/83)\r\nnvidia-cufft-cu12  ------------------------------ 153.55 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 149.90 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 152.00 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 152.56 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 154.15 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 150.52 MiB/614.37 MiB \r\r\r\r\r\r\r⠙ Preparing packages... (77/83)\r\nnvidia-cufft-cu12  ------------------------------ 154.54 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 150.80 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 152.90 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 153.38 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 155.05 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 151.35 MiB/614.37 MiB ",,terminal_output +4805,7903181,"TERMINAL",0,0,"\r\r\r\r\r\r\r⠙ Preparing packages... (77/83)\r\nnvidia-cufft-cu12  ------------------------------ 155.39 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 151.75 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 153.66 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 154.28 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 155.92 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 152.36 MiB/614.37 MiB ",,terminal_output +4806,7903242,"TERMINAL",0,0,"\r\r\r\r\r\r\r⠙ Preparing packages... (77/83)\r\nnvidia-cufft-cu12  ------------------------------ 156.37 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 152.60 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 154.54 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 155.16 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 156.81 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 153.17 MiB/614.37 MiB ",,terminal_output +4807,7903363,"TERMINAL",0,0,"\r\r\r\r\r\r\r⠙ Preparing packages... (77/83)\r\nnvidia-cufft-cu12  ------------------------------ 157.18 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 153.51 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 155.38 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 156.07 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 157.78 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 154.00 MiB/614.37 MiB \r\r\r\r\r\r\r⠹ Preparing packages... (77/83)\r\nnvidia-cufft-cu12  ------------------------------ 158.00 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 154.42 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 156.30 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 157.00 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 158.71 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 154.86 MiB/614.37 MiB \r\r\r\r\r\r\r⠹ Preparing packages... (77/83)\r\nnvidia-cufft-cu12  ------------------------------ 158.92 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 155.32 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 157.12 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 157.87 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 159.57 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 155.83 MiB/614.37 MiB ",,terminal_output +4808,7903419,"TERMINAL",0,0,"\r\r\r\r\r\r\r⠹ Preparing packages... (77/83)\r\nnvidia-cufft-cu12  ------------------------------ 159.61 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 156.03 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 157.73 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 158.53 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 160.11 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 156.48 MiB/614.37 MiB ",,terminal_output +4809,7903478,"TERMINAL",0,0,"\r\r\r\r\r\r\r⠹ Preparing packages... (77/83)\r\nnvidia-cufft-cu12  ------------------------------ 160.40 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 156.99 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 158.51 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 159.41 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 161.16 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 157.22 MiB/614.37 MiB ",,terminal_output +4810,7903538,"TERMINAL",0,0,"\r\r\r\r\r\r\r⠸ Preparing packages... (77/83)\r\nnvidia-cufft-cu12  ------------------------------ 161.24 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 157.91 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 159.40 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 160.22 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 162.07 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 158.09 MiB/614.37 MiB ",,terminal_output +4811,7903663,"TERMINAL",0,0,"\r\r\r\r\r\r\r⠸ Preparing packages... (77/83)\r\nnvidia-cufft-cu12  ------------------------------ 161.85 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 158.76 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 160.42 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 160.97 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 163.00 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 159.16 MiB/614.37 MiB \r\r\r\r\r\r\r⠸ Preparing packages... (77/83)\r\nnvidia-cufft-cu12  ------------------------------ 162.89 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 159.48 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 160.99 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 161.92 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 163.99 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 160.17 MiB/614.37 MiB \r\r\r\r\r\r\r⠸ Preparing packages... (77/83)\r\nnvidia-cufft-cu12  ------------------------------ 163.86 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 160.49 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 161.60 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 162.87 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 164.93 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 161.00 MiB/614.37 MiB ",,terminal_output +4812,7903767,"TERMINAL",0,0,"\r\r\r\r\r\r\r⠼ Preparing packages... (77/83)\r\nnvidia-cufft-cu12  ------------------------------ 164.98 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 162.04 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 162.37 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 163.15 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 165.71 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 161.81 MiB/614.37 MiB \r\r\r\r\r\r\r⠼ Preparing packages... (77/83)\r\nnvidia-cufft-cu12  ------------------------------ 165.88 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 163.00 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 163.31 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 163.87 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 166.62 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 162.62 MiB/614.37 MiB ",,terminal_output +4813,7903871,"TERMINAL",0,0,"\r\r\r\r\r\r\r⠼ Preparing packages... (77/83)\r\nnvidia-cufft-cu12  ------------------------------ 166.85 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 163.81 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 164.14 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 164.93 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 167.44 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 163.48 MiB/614.37 MiB \r\r\r\r\r\r\r⠼ Preparing packages... (77/83)\r\nnvidia-cufft-cu12  ------------------------------ 167.71 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 164.73 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 165.03 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 165.78 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 168.19 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 164.45 MiB/614.37 MiB ",,terminal_output +4814,7903936,"TERMINAL",0,0,"\r\r\r\r\r\r\r⠴ Preparing packages... (77/83)\r\nnvidia-cufft-cu12  ------------------------------ 168.62 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 165.53 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 165.94 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 166.64 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 169.11 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 165.36 MiB/614.37 MiB ",,terminal_output +4815,7903994,"TERMINAL",0,0,"\r\r\r\r\r\r\r⠴ Preparing packages... (77/83)\r\nnvidia-cufft-cu12  ------------------------------ 169.53 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 166.42 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 166.80 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 167.50 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 169.99 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 166.25 MiB/614.37 MiB ",,terminal_output +4816,7904312,"TERMINAL",0,0,"\r\r\r\r\r\r\r⠴ Preparing packages... (77/83)\r\nnvidia-cufft-cu12  ------------------------------ 170.29 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 167.34 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 167.63 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 168.29 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 170.96 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 167.26 MiB/614.37 MiB \r\r\r\r\r\r\r⠴ Preparing packages... (77/83)\r\nnvidia-cufft-cu12  ------------------------------ 171.19 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 168.22 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 168.55 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 169.21 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 171.86 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 168.05 MiB/614.37 MiB \r\r\r\r\r\r\r⠦ Preparing packages... (77/83)\r\nnvidia-cufft-cu12  ------------------------------ 172.09 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 169.12 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 169.34 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 170.08 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 172.75 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 169.03 MiB/614.37 MiB \r\r\r\r\r\r\r⠦ Preparing packages... (77/83)\r\nnvidia-cufft-cu12  ------------------------------ 172.94 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 170.03 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 170.18 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 170.96 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 173.62 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 169.89 MiB/614.37 MiB ",,terminal_output +4817,7904425,"TERMINAL",0,0,"\r\r\r\r\r\r\r⠦ Preparing packages... (77/83)\r\nnvidia-cufft-cu12  ------------------------------ 173.84 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 170.92 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 171.12 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 171.90 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 174.31 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 170.79 MiB/614.37 MiB \r\r\r\r\r\r\r⠦ Preparing packages... (77/83)\r\nnvidia-cufft-cu12  ------------------------------ 174.74 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 171.69 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 172.00 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 172.84 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 175.20 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 171.71 MiB/614.37 MiB \r\r\r\r\r\r\r⠧ Preparing packages... (77/83)\r\nnvidia-cufft-cu12  ------------------------------ 175.56 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 172.59 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 172.87 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 173.87 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 176.33 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 172.24 MiB/614.37 MiB \r\r\r\r\r\r\r⠧ Preparing packages... (77/83)\r\nnvidia-cufft-cu12  ------------------------------ 176.47 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 173.61 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 173.81 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 174.80 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 177.02 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 173.20 MiB/614.37 MiB \r\r\r\r\r\r\r⠧ Preparing packages... (77/83)\r\nnvidia-cufft-cu12  ------------------------------ 177.34 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 174.47 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 174.61 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 175.62 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 177.99 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 174.13 MiB/614.37 MiB ",,terminal_output +4818,7904506,"TERMINAL",0,0,"\r\r\r\r\r\r\r⠧ Preparing packages... (77/83)\r\nnvidia-cufft-cu12  ------------------------------ 178.25 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 175.41 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 175.37 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 176.47 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 178.87 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 175.08 MiB/614.37 MiB ",,terminal_output +4819,7904568,"TERMINAL",0,0,"\r\r\r\r\r\r\r⠇ Preparing packages... (77/83)\r\nnvidia-cufft-cu12  ------------------------------ 179.09 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 176.31 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 176.23 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 177.40 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 179.73 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 176.03 MiB/614.37 MiB \r\r\r\r\r\r\r⠇ Preparing packages... (77/83)\r\nnvidia-cufft-cu12  ------------------------------ 180.00 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 177.18 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 177.14 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 178.34 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 180.49 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 176.90 MiB/614.37 MiB ",,terminal_output +4820,7904715,"TERMINAL",0,0,"\r\r\r\r\r\r\r⠇ Preparing packages... (77/83)\r\nnvidia-cufft-cu12  ------------------------------ 180.93 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 178.20 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 177.99 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 179.00 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 181.24 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 177.87 MiB/614.37 MiB ",,terminal_output +4821,7904769,"TERMINAL",0,0,"\r\r\r\r\r\r\r⠇ Preparing packages... (77/83)\r\nnvidia-cufft-cu12  ------------------------------ 181.82 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 178.91 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 178.90 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 179.88 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 182.26 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 178.87 MiB/614.37 MiB ",,terminal_output +4822,7904849,"TERMINAL",0,0,"\r\r\r\r\r\r\r⠋ Preparing packages... (77/83)\r\nnvidia-cufft-cu12  ------------------------------ 182.64 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 179.79 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 179.80 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 180.77 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 183.18 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 179.73 MiB/614.37 MiB \r\r\r\r\r\r\r⠋ Preparing packages... (77/83)\r\nnvidia-cufft-cu12  ------------------------------ 183.66 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 180.51 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 180.55 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 181.63 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 184.08 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 180.69 MiB/614.37 MiB \r\r\r\r\r\r\r⠋ Preparing packages... (77/83)\r\nnvidia-cufft-cu12  ------------------------------ 184.69 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 181.39 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 181.34 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 182.44 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 185.08 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 181.50 MiB/614.37 MiB ",,terminal_output +4823,7904874,"TERMINAL",0,0,"\r\r\r\r\r\r\r⠋ Preparing packages... (77/83)\r\nnvidia-cufft-cu12  ------------------------------ 185.63 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 182.35 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 182.00 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 183.36 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 185.98 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 182.39 MiB/614.37 MiB ",,terminal_output +4824,7904928,"TERMINAL",0,0,"\r\r\r\r\r\r\r⠙ Preparing packages... (77/83)\r\nnvidia-cufft-cu12  ------------------------------ 186.73 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 183.37 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 182.22 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 184.45 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 186.83 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 183.48 MiB/614.37 MiB ",,terminal_output +4825,7905032,"TERMINAL",0,0,"\r\r\r\r\r\r\r⠙ Preparing packages... (77/83)\r\nnvidia-cufft-cu12  ------------------------------ 187.59 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 184.23 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 183.08 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 185.33 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 187.71 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 184.42 MiB/614.37 MiB \r\r\r\r\r\r\r⠙ Preparing packages... (77/83)\r\nnvidia-cufft-cu12  ------------------------------ 188.33 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 185.10 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 184.02 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 186.29 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 188.65 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 185.33 MiB/614.37 MiB ",,terminal_output +4826,7905095,"TERMINAL",0,0,"\r\r\r\r\r\r\r⠙ Preparing packages... (77/83)\r\nnvidia-cufft-cu12  ------------------------------ 189.10 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 186.00 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 185.01 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 187.09 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 189.57 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 186.28 MiB/614.37 MiB ",,terminal_output +4827,7905215,"TERMINAL",0,0,"\r\r\r\r\r\r\r⠹ Preparing packages... (77/83)\r\nnvidia-cufft-cu12  ------------------------------ 189.97 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 186.84 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 185.94 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 187.97 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 190.37 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 187.14 MiB/614.37 MiB \r\r\r\r\r\r\r⠹ Preparing packages... (77/83)\r\nnvidia-cufft-cu12  ------------------------------ 190.79 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 187.70 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 186.76 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 188.92 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 191.35 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 188.12 MiB/614.37 MiB \r\r\r\r\r\r\r⠹ Preparing packages... (77/83)\r\nnvidia-cufft-cu12  ------------------------------ 191.57 MiB/191.57 MiB\r\nnvidia-nccl-cu12  ------------------------------ 188.55 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 187.71 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 189.81 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 192.20 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 188.97 MiB/614.37 MiB ",,terminal_output +4828,7905321,"TERMINAL",0,0,"\r\r\r\r\r\r\r⠹ Preparing packages... (77/83)\r\nnvidia-nccl-cu12  ------------------------------ 188.84 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 188.13 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 190.14 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 192.53 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 189.21 MiB/614.37 MiB \r\r\r\r\r\r⠹ Preparing packages... (77/83)\r\nnvidia-nccl-cu12  ------------------------------ 189.52 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 188.81 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 190.93 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 193.11 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 189.95 MiB/614.37 MiB \r\r\r\r\r\r⠸ Preparing packages... (78/83)\r\nnvidia-nccl-cu12  ------------------------------ 190.42 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 189.96 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 192.22 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 194.34 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 190.99 MiB/614.37 MiB ",,terminal_output +4829,7905383,"TERMINAL",0,0,"\r\r\r\r\r\r⠸ Preparing packages... (78/83)\r\nnvidia-nccl-cu12  ------------------------------ 191.24 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 191.00 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 193.24 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 195.51 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 192.06 MiB/614.37 MiB ",,terminal_output +4830,7905641,"TERMINAL",0,0,"\r\r\r\r\r\r⠸ Preparing packages... (78/83)\r\nnvidia-nccl-cu12  ------------------------------ 192.34 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 192.02 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 194.35 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 196.58 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 193.20 MiB/614.37 MiB \r\r\r\r\r\r⠸ Preparing packages... (78/83)\r\nnvidia-nccl-cu12  ------------------------------ 193.47 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 193.02 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 195.39 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 197.52 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 194.39 MiB/614.37 MiB \r\r\r\r\r\r⠼ Preparing packages... (78/83)\r\nnvidia-nccl-cu12  ------------------------------ 194.46 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 194.12 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 196.49 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 198.43 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 195.46 MiB/614.37 MiB ",,terminal_output +4831,7905718,"TERMINAL",0,0,"\r\r\r\r\r\r⠼ Preparing packages... (78/83)\r\nnvidia-nccl-cu12  ------------------------------ 195.57 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 195.43 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 197.57 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 199.57 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 196.18 MiB/614.37 MiB \r\r\r\r\r\r⠼ Preparing packages... (78/83)\r\nnvidia-nccl-cu12  ------------------------------ 196.70 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 196.55 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 198.60 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 200.69 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 196.98 MiB/614.37 MiB \r\r\r\r\r\r⠼ Preparing packages... (78/83)\r\nnvidia-nccl-cu12  ------------------------------ 197.92 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 197.00 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 199.80 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 201.93 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 198.23 MiB/614.37 MiB \r\r\r\r\r\r⠴ Preparing packages... (78/83)\r\nnvidia-nccl-cu12  ------------------------------ 199.11 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 197.00 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 201.16 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 203.20 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 199.68 MiB/614.37 MiB ",,terminal_output +4832,7905826,"TERMINAL",0,0,"\r\r\r\r\r\r⠴ Preparing packages... (78/83)\r\nnvidia-nccl-cu12  ------------------------------ 200.20 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 197.35 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 202.68 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 204.36 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 200.90 MiB/614.37 MiB ",,terminal_output +4833,7905879,"TERMINAL",0,0,"\r\r\r\r\r\r⠴ Preparing packages... (78/83)\r\nnvidia-nccl-cu12  ------------------------------ 201.22 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 198.39 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 203.71 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 205.37 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 202.02 MiB/614.37 MiB \r\r\r\r\r\r⠴ Preparing packages... (78/83)\r\nnvidia-nccl-cu12  ------------------------------ 202.36 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 199.43 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 204.81 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 206.43 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 203.08 MiB/614.37 MiB ",,terminal_output +4834,7905935,"TERMINAL",0,0,"\r\r\r\r\r\r⠦ Preparing packages... (78/83)\r\nnvidia-nccl-cu12  ------------------------------ 203.43 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 200.44 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 205.92 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 207.50 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 204.09 MiB/614.37 MiB ",,terminal_output +4835,7905986,"TERMINAL",0,0,"\r\r\r\r\r\r⠦ Preparing packages... (78/83)\r\nnvidia-nccl-cu12  ------------------------------ 204.50 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 201.45 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 206.85 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 208.46 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 205.35 MiB/614.37 MiB ",,terminal_output +4836,7906088,"TERMINAL",0,0,"\r\r\r\r\r\r⠦ Preparing packages... (78/83)\r\nnvidia-nccl-cu12  ------------------------------ 205.51 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 202.59 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 207.95 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 209.57 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 206.39 MiB/614.37 MiB \r\r\r\r\r\r⠦ Preparing packages... (78/83)\r\nnvidia-nccl-cu12  ------------------------------ 206.70 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 203.65 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 208.70 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 210.86 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 207.42 MiB/614.37 MiB ",,terminal_output +4837,7906153,"TERMINAL",0,0,"\r\r\r\r\r\r⠧ Preparing packages... (78/83)\r\nnvidia-nccl-cu12  ------------------------------ 207.80 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 204.39 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 209.80 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 212.28 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 208.39 MiB/614.37 MiB ",,terminal_output +4838,7906219,"TERMINAL",0,0,"\r\r\r\r\r\r⠧ Preparing packages... (78/83)\r\nnvidia-nccl-cu12  ------------------------------ 208.81 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 205.38 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 210.88 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 213.36 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 209.35 MiB/614.37 MiB \r\r\r\r\r\r⠧ Preparing packages... (78/83)\r\nnvidia-nccl-cu12  ------------------------------ 209.52 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 206.00 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 211.56 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 214.00 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 210.06 MiB/614.37 MiB ",,terminal_output +4839,7906430,"TERMINAL",0,0,"\r\r\r\r\r\r⠧ Preparing packages... (78/83)\r\nnvidia-nccl-cu12  ------------------------------ 210.75 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 206.95 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 212.02 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 214.63 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 211.97 MiB/614.37 MiB \r\r\r\r\r\r⠇ Preparing packages... (78/83)\r\nnvidia-nccl-cu12  ------------------------------ 211.86 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 207.79 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 213.44 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 215.85 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 212.85 MiB/614.37 MiB \r\r\r\r\r\r⠇ Preparing packages... (78/83)\r\nnvidia-nccl-cu12  ------------------------------ 212.92 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 208.88 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 214.73 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 216.79 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 213.81 MiB/614.37 MiB \r\r\r\r\r\r⠇ Preparing packages... (78/83)\r\nnvidia-nccl-cu12  ------------------------------ 214.04 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 209.91 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 215.95 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 218.05 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 214.56 MiB/614.37 MiB ",,terminal_output +4840,7906483,"TERMINAL",0,0,"\r\r\r\r\r\r⠇ Preparing packages... (78/83)\r\nnvidia-nccl-cu12  ------------------------------ 215.05 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 210.90 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 216.95 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 219.26 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 215.69 MiB/614.37 MiB ",,terminal_output +4841,7906563,"TERMINAL",0,0,"\r\r\r\r\r\r⠋ Preparing packages... (78/83)\r\nnvidia-nccl-cu12  ------------------------------ 216.06 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 211.97 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 218.00 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 220.32 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 216.84 MiB/614.37 MiB \r\r\r\r\r\r⠋ Preparing packages... (78/83)\r\nnvidia-nccl-cu12  ------------------------------ 217.24 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 213.08 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 219.02 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 221.46 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 217.71 MiB/614.37 MiB ",,terminal_output +4842,7906668,"TERMINAL",0,0,"\r\r\r\r\r\r⠋ Preparing packages... (78/83)\r\nnvidia-nccl-cu12  ------------------------------ 218.17 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 214.02 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 220.11 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 222.76 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 218.72 MiB/614.37 MiB \r\r\r\r\r\r⠋ Preparing packages... (78/83)\r\nnvidia-nccl-cu12  ------------------------------ 219.22 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 215.12 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 221.17 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 223.88 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 219.68 MiB/614.37 MiB ",,terminal_output +4843,7906830,"TERMINAL",0,0,"\r\r\r\r\r\r⠙ Preparing packages... (78/83)\r\nnvidia-nccl-cu12  ------------------------------ 220.28 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 216.20 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 222.24 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 225.00 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 220.64 MiB/614.37 MiB \r\r\r\r\r\r⠙ Preparing packages... (78/83)\r\nnvidia-nccl-cu12  ------------------------------ 221.28 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 217.22 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 223.23 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 226.00 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 221.80 MiB/614.37 MiB \r\r\r\r\r\r⠙ Preparing packages... (78/83)\r\nnvidia-nccl-cu12  ------------------------------ 222.33 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 218.24 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 224.22 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 226.95 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 222.89 MiB/614.37 MiB ",,terminal_output +4844,7906920,"TERMINAL",0,0,"\r\r\r\r\r\r⠙ Preparing packages... (78/83)\r\nnvidia-nccl-cu12  ------------------------------ 223.34 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 219.23 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 225.13 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 227.94 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 223.89 MiB/614.37 MiB \r\r\r\r\r\r⠹ Preparing packages... (78/83)\r\nnvidia-nccl-cu12  ------------------------------ 224.31 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 220.24 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 226.19 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 228.99 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 224.93 MiB/614.37 MiB ",,terminal_output +4845,7906981,"TERMINAL",0,0,"\r\r\r\r\r\r⠹ Preparing packages... (78/83)\r\nnvidia-nccl-cu12  ------------------------------ 225.59 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 221.31 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 226.97 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 230.00 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 226.00 MiB/614.37 MiB ",,terminal_output +4846,7907043,"TERMINAL",0,0,"\r\r\r\r\r\r⠹ Preparing packages... (78/83)\r\nnvidia-nccl-cu12  ------------------------------ 226.68 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 222.62 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 227.89 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 230.93 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 226.96 MiB/614.37 MiB ",,terminal_output +4847,7907100,"TERMINAL",0,0,"\r\r\r\r\r\r⠹ Preparing packages... (78/83)\r\nnvidia-nccl-cu12  ------------------------------ 227.72 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 223.68 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 229.00 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 231.95 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 228.00 MiB/614.37 MiB ",,terminal_output +4848,7907155,"TERMINAL",0,0,"\r\r\r\r\r\r⠸ Preparing packages... (78/83)\r\nnvidia-nccl-cu12  ------------------------------ 228.70 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 224.88 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 230.00 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 233.00 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 229.00 MiB/614.37 MiB ",,terminal_output +4849,7907225,"TERMINAL",0,0,"\r\r\r\r\r\r⠸ Preparing packages... (78/83)\r\nnvidia-nccl-cu12  ------------------------------ 229.80 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 225.84 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 231.02 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 234.03 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 230.03 MiB/614.37 MiB \r\r\r\r\r\r⠸ Preparing packages... (78/83)\r\nnvidia-nccl-cu12  ------------------------------ 230.84 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 226.62 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 232.03 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 235.32 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 231.08 MiB/614.37 MiB ",,terminal_output +4850,7907337,"TERMINAL",0,0,"\r\r\r\r\r\r⠸ Preparing packages... (78/83)\r\nnvidia-nccl-cu12  ------------------------------ 231.90 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 227.45 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 233.09 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 236.45 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 232.12 MiB/614.37 MiB \r\r\r\r\r\r⠼ Preparing packages... (78/83)\r\nnvidia-nccl-cu12  ------------------------------ 232.94 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 228.45 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 234.14 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 237.56 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 233.11 MiB/614.37 MiB ",,terminal_output +4851,7907435,"TERMINAL",0,0,"\r\r\r\r\r\r⠼ Preparing packages... (78/83)\r\nnvidia-nccl-cu12  ------------------------------ 233.94 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 229.47 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 235.17 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 238.65 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 234.11 MiB/614.37 MiB \r\r\r\r\r\r⠼ Preparing packages... (78/83)\r\nnvidia-nccl-cu12  ------------------------------ 234.87 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 230.51 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 236.25 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 239.66 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 235.13 MiB/614.37 MiB ",,terminal_output +4852,7907539,"TERMINAL",0,0,"\r\r\r\r\r\r⠼ Preparing packages... (78/83)\r\nnvidia-nccl-cu12  ------------------------------ 235.92 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 231.50 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 237.19 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 240.87 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 236.12 MiB/614.37 MiB \r\r\r\r\r\r⠴ Preparing packages... (78/83)\r\nnvidia-nccl-cu12  ------------------------------ 237.02 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 232.28 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 238.20 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 242.04 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 237.22 MiB/614.37 MiB ",,terminal_output +4853,7907632,"TERMINAL",0,0,"\r\r\r\r\r\r⠴ Preparing packages... (78/83)\r\nnvidia-nccl-cu12  ------------------------------ 238.06 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 233.34 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 238.99 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 243.29 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 238.28 MiB/614.37 MiB ",,terminal_output +4854,7907809,"TERMINAL",0,0,"\r\r\r\r\r\r⠴ Preparing packages... (78/83)\r\nnvidia-nccl-cu12  ------------------------------ 239.03 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 234.43 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 240.02 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 244.33 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 239.33 MiB/614.37 MiB \r\r\r\r\r\r⠴ Preparing packages... (78/83)\r\nnvidia-nccl-cu12  ------------------------------ 240.00 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 235.30 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 241.28 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 245.36 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 240.36 MiB/614.37 MiB \r\r\r\r\r\r⠦ Preparing packages... (78/83)\r\nnvidia-nccl-cu12  ------------------------------ 241.04 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 236.30 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 242.35 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 246.34 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 241.34 MiB/614.37 MiB \r\r\r\r\r\r⠦ Preparing packages... (78/83)\r\nnvidia-nccl-cu12  ------------------------------ 242.06 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 237.38 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 243.33 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 247.35 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 242.40 MiB/614.37 MiB ",,terminal_output +4855,7907863,"TERMINAL",0,0,"\r\r\r\r\r\r⠦ Preparing packages... (78/83)\r\nnvidia-nccl-cu12  ------------------------------ 243.22 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 238.47 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 244.06 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 248.43 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 243.51 MiB/614.37 MiB \r\r\r\r\r\r⠦ Preparing packages... (78/83)\r\nnvidia-nccl-cu12  ------------------------------ 244.29 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 239.43 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 245.05 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 249.44 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 244.50 MiB/614.37 MiB ",,terminal_output +4856,7907931,"TERMINAL",0,0,"\r\r\r\r\r\r⠧ Preparing packages... (78/83)\r\nnvidia-nccl-cu12  ------------------------------ 245.40 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 240.55 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 246.09 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 250.40 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 245.37 MiB/614.37 MiB ",,terminal_output +4857,7907991,"TERMINAL",0,0,"\r\r\r\r\r\r⠧ Preparing packages... (78/83)\r\nnvidia-nccl-cu12  ------------------------------ 246.41 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 241.43 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 247.29 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 251.45 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 246.39 MiB/614.37 MiB ",,terminal_output +4858,7908078,"TERMINAL",0,0,"\r\r\r\r\r\r⠧ Preparing packages... (78/83)\r\nnvidia-nccl-cu12  ------------------------------ 247.31 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 242.45 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 248.28 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 252.53 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 247.41 MiB/614.37 MiB \r\r\r\r\r\r⠧ Preparing packages... (78/83)\r\nnvidia-nccl-cu12  ------------------------------ 248.46 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 243.44 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 249.41 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 253.44 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 248.43 MiB/614.37 MiB ",,terminal_output +4859,7908148,"TERMINAL",0,0,"\r\r\r\r\r\r⠇ Preparing packages... (78/83)\r\nnvidia-nccl-cu12  ------------------------------ 249.39 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 244.53 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 250.50 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 254.50 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 249.40 MiB/614.37 MiB ",,terminal_output +4860,7908270,"TERMINAL",0,0,"\r\r\r\r\r\r⠇ Preparing packages... (78/83)\r\nnvidia-nccl-cu12  ------------------------------ 250.39 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 245.51 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 251.38 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 255.51 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 250.28 MiB/614.37 MiB \r\r\r\r\r\r⠇ Preparing packages... (78/83)\r\nnvidia-nccl-cu12  ------------------------------ 250.61 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 245.76 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 251.60 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 255.70 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 250.53 MiB/614.37 MiB \r\r\r\r\r\r⠇ Preparing packages... (78/83)\r\nnvidia-nccl-cu12  ------------------------------ 250.86 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 246.04 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 251.84 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 255.97 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 250.75 MiB/614.37 MiB ",,terminal_output +4861,7908324,"TERMINAL",0,0,"\r\r\r\r\r\r⠋ Preparing packages... (78/83)\r\nnvidia-nccl-cu12  ------------------------------ 251.67 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 246.90 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 252.85 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 256.99 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 251.59 MiB/614.37 MiB ",,terminal_output +4862,7908496,"TERMINAL",0,0,"\r\r\r\r\r\r⠋ Preparing packages... (78/83)\r\nnvidia-nccl-cu12  ------------------------------ 252.98 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 247.54 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 254.08 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 258.12 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 252.33 MiB/614.37 MiB \r\r\r\r\r\r⠋ Preparing packages... (78/83)\r\nnvidia-nccl-cu12  ------------------------------ 253.92 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 248.42 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 255.08 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 259.06 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 253.37 MiB/614.37 MiB ",,terminal_output +4863,7908583,"TERMINAL",0,0,"\r\r\r\r\r\r⠋ Preparing packages... (78/83)\r\nnvidia-nccl-cu12  ------------------------------ 254.93 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 249.42 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 256.09 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 260.03 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 254.35 MiB/614.37 MiB \r\r\r\r\r\r⠙ Preparing packages... (78/83)\r\nnvidia-nccl-cu12  ------------------------------ 255.81 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 250.42 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 257.05 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 261.05 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 255.33 MiB/614.37 MiB \r\r\r\r\r\r⠙ Preparing packages... (78/83)\r\nnvidia-nccl-cu12  ------------------------------ 256.70 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 251.38 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 258.00 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 262.00 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 256.38 MiB/614.37 MiB ",,terminal_output +4864,7908653,"TERMINAL",0,0,"\r\r\r\r\r\r⠙ Preparing packages... (78/83)\r\nnvidia-nccl-cu12  ------------------------------ 257.62 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 252.45 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 258.99 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 262.72 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 257.22 MiB/614.37 MiB ",,terminal_output +4865,7908746,"TERMINAL",0,0,"\r\r\r\r\r\r⠙ Preparing packages... (78/83)\r\nnvidia-nccl-cu12  ------------------------------ 258.53 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 253.39 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 259.95 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 263.71 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 258.17 MiB/614.37 MiB \r\r\r\r\r\r⠹ Preparing packages... (78/83)\r\nnvidia-nccl-cu12  ------------------------------ 259.39 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 254.29 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 260.81 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 264.64 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 259.05 MiB/614.37 MiB ",,terminal_output +4866,7908875,"TERMINAL",0,0,"\r\r\r\r\r\r⠹ Preparing packages... (78/83)\r\nnvidia-nccl-cu12  ------------------------------ 260.36 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 255.31 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 261.62 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 265.65 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 260.00 MiB/614.37 MiB \r\r\r\r\r\r⠹ Preparing packages... (78/83)\r\nnvidia-nccl-cu12  ------------------------------ 261.29 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 256.28 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 262.55 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 266.61 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 260.95 MiB/614.37 MiB \r\r\r\r\r\r⠹ Preparing packages... (78/83)\r\nnvidia-nccl-cu12  ------------------------------ 262.17 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 257.20 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 263.50 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 267.55 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 261.90 MiB/614.37 MiB ",,terminal_output +4867,7909005,"TERMINAL",0,0,"\r\r\r\r\r\r⠸ Preparing packages... (78/83)\r\nnvidia-nccl-cu12  ------------------------------ 263.09 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 258.09 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 264.00 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 268.47 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 262.99 MiB/614.37 MiB \r\r\r\r\r\r⠸ Preparing packages... (78/83)\r\nnvidia-nccl-cu12  ------------------------------ 263.98 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 259.45 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 265.40 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 269.42 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 262.99 MiB/614.37 MiB ",,terminal_output +4868,7909090,"TERMINAL",0,0,"\r\r\r\r\r\r⠸ Preparing packages... (78/83)\r\nnvidia-nccl-cu12  ------------------------------ 264.75 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 260.60 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 266.49 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 270.54 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 262.99 MiB/614.37 MiB ",,terminal_output +4869,7909160,"TERMINAL",0,0,"\r\r\r\r\r\r⠸ Preparing packages... (78/83)\r\nnvidia-nccl-cu12  ------------------------------ 265.72 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 261.63 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 267.68 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 271.28 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 262.99 MiB/614.37 MiB ",,terminal_output +4870,7909221,"TERMINAL",0,0,"\r\r\r\r\r\r⠼ Preparing packages... (78/83)\r\nnvidia-nccl-cu12  ------------------------------ 266.50 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 262.19 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 268.51 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 272.21 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 262.99 MiB/614.37 MiB \r\r\r\r\r\r⠼ Preparing packages... (78/83)\r\nnvidia-nccl-cu12  ------------------------------ 267.08 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 262.89 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 269.03 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 273.00 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 262.99 MiB/614.37 MiB \r\r\r\r\r\r⠼ Preparing packages... (78/83)\r\nnvidia-nccl-cu12  ------------------------------ 267.70 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 263.45 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 269.73 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 273.59 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 262.99 MiB/614.37 MiB ",,terminal_output +4871,7909326,"TERMINAL",0,0,"\r\r\r\r\r\r⠼ Preparing packages... (78/83)\r\nnvidia-nccl-cu12  ------------------------------ 268.37 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 263.66 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 271.33 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 273.80 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 262.99 MiB/614.37 MiB \r\r\r\r\r\r⠴ Preparing packages... (78/83)\r\nnvidia-nccl-cu12  ------------------------------ 268.98 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 264.00 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 272.05 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 274.89 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 262.99 MiB/614.37 MiB ",,terminal_output +4872,7909411,"TERMINAL",0,0,"\r\r\r\r\r\r⠴ Preparing packages... (78/83)\r\nnvidia-nccl-cu12  ------------------------------ 270.02 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 264.50 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 272.74 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 275.45 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 262.99 MiB/614.37 MiB ",,terminal_output +4873,7909473,"TERMINAL",0,0,"\r\r\r\r\r\r⠴ Preparing packages... (78/83)\r\nnvidia-nccl-cu12  ------------------------------ 270.82 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 265.00 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 273.30 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 276.31 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 262.99 MiB/614.37 MiB \r\r\r\r\r\r⠴ Preparing packages... (78/83)\r\nnvidia-nccl-cu12  ------------------------------ 271.46 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 265.62 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 274.05 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 277.03 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 262.99 MiB/614.37 MiB ",,terminal_output +4874,7909635,"TERMINAL",0,0,"\r\r\r\r\r\r⠦ Preparing packages... (78/83)\r\nnvidia-nccl-cu12  ------------------------------ 272.00 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 266.43 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 274.75 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 277.73 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 262.99 MiB/614.37 MiB \r\r\r\r\r\r⠦ Preparing packages... (78/83)\r\nnvidia-nccl-cu12  ------------------------------ 272.75 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 267.23 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 275.25 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 278.23 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 263.25 MiB/614.37 MiB \r\r\r\r\r\r⠦ Preparing packages... (78/83)\r\nnvidia-nccl-cu12  ------------------------------ 273.39 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 267.97 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 275.90 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 278.84 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 263.96 MiB/614.37 MiB ",,terminal_output +4875,7909679,"TERMINAL",0,0,"\r\r\r\r\r\r⠦ Preparing packages... (78/83)\r\nnvidia-nccl-cu12  ------------------------------ 273.98 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 268.60 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 276.56 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 279.00 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 264.55 MiB/614.37 MiB ",,terminal_output +4876,7909850,"TERMINAL",0,0,"\r\r\r\r\r\r⠧ Preparing packages... (78/83)\r\nnvidia-nccl-cu12  ------------------------------ 274.84 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 269.65 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 277.52 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 279.31 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 265.53 MiB/614.37 MiB \r\r\r\r\r\r⠧ Preparing packages... (78/83)\r\nnvidia-nccl-cu12  ------------------------------ 275.58 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 270.47 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 278.37 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 280.19 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 266.39 MiB/614.37 MiB \r\r\r\r\r\r⠧ Preparing packages... (78/83)\r\nnvidia-nccl-cu12  ------------------------------ 276.51 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 271.25 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 279.28 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 281.05 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 267.26 MiB/614.37 MiB ",,terminal_output +4877,7909900,"TERMINAL",0,0,"\r\r\r\r\r\r⠧ Preparing packages... (78/83)\r\nnvidia-nccl-cu12  ------------------------------ 277.31 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 272.42 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 279.95 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 281.68 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 267.87 MiB/614.37 MiB ",,terminal_output +4878,7909975,"TERMINAL",0,0,"\r\r\r\r\r\r⠇ Preparing packages... (78/83)\r\nnvidia-nccl-cu12  ------------------------------ 278.17 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 273.62 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 280.63 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 282.88 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 268.45 MiB/614.37 MiB \r\r\r\r\r\r⠇ Preparing packages... (78/83)\r\nnvidia-nccl-cu12  ------------------------------ 278.75 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 274.93 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 281.33 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 283.48 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 269.43 MiB/614.37 MiB ",,terminal_output +4879,7910033,"TERMINAL",0,0,"\r\r\r\r\r\r⠇ Preparing packages... (78/83)\r\nnvidia-nccl-cu12  ------------------------------ 279.70 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 275.87 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 282.20 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 284.26 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 270.29 MiB/614.37 MiB ",,terminal_output +4880,7910106,"TERMINAL",0,0,"\r\r\r\r\r\r⠇ Preparing packages... (78/83)\r\nnvidia-nccl-cu12  ------------------------------ 280.47 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 276.76 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 282.99 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 285.08 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 271.14 MiB/614.37 MiB ",,terminal_output +4881,7910169,"TERMINAL",0,0,"\r\r\r\r\r\r⠋ Preparing packages... (78/83)\r\nnvidia-nccl-cu12  ------------------------------ 281.40 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 277.42 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 283.98 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 286.03 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 272.03 MiB/614.37 MiB \r\r\r\r\r\r⠋ Preparing packages... (78/83)\r\nnvidia-nccl-cu12  ------------------------------ 282.19 MiB/282.19 MiB\r\nnvidia-cusolver-cu12  ------------------------------ 278.36 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 285.22 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 286.79 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 272.77 MiB/614.37 MiB ",,terminal_output +4882,7910235,"TERMINAL",0,0,"\r\r\r\r\r\r⠋ Preparing packages... (78/83)\r\nnvidia-cusolver-cu12  ------------------------------ 278.70 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 285.64 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 287.19 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 273.03 MiB/614.37 MiB \r\r\r\r\r⠋ Preparing packages... (78/83)\r\nnvidia-cusolver-cu12  ------------------------------ 279.16 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 286.27 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 288.14 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 274.06 MiB/614.37 MiB ",,terminal_output +4883,7910321,"TERMINAL",0,0,"\r\r\r\r\r⠋ Preparing packages... (78/83)\r\nnvidia-cusolver-cu12  ------------------------------ 280.69 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 287.77 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 289.28 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 275.32 MiB/614.37 MiB \r\r\r\r\r⠙ Preparing packages... (79/83)\r\nnvidia-cusolver-cu12  ------------------------------ 281.67 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 289.08 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 290.65 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 276.66 MiB/614.37 MiB ",,terminal_output +4884,7910557,"TERMINAL",0,0,"\r\r\r\r\r⠙ Preparing packages... (79/83)\r\nnvidia-cusolver-cu12  ------------------------------ 283.19 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 290.59 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 291.68 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 277.68 MiB/614.37 MiB \r\r\r\r\r⠙ Preparing packages... (79/83)\r\nnvidia-cusolver-cu12  ------------------------------ 284.09 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 291.59 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 293.11 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 279.06 MiB/614.37 MiB \r\r\r\r\r⠙ Preparing packages... (79/83)\r\nnvidia-cusolver-cu12  ------------------------------ 285.14 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 292.76 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 294.22 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 280.16 MiB/614.37 MiB ",,terminal_output +4885,7910631,"TERMINAL",0,0,"\r\r\r\r\r⠹ Preparing packages... (79/83)\r\nnvidia-cusolver-cu12  ------------------------------ 286.41 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 293.91 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 295.38 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 281.45 MiB/614.37 MiB \r\r\r\r\r⠹ Preparing packages... (79/83)\r\nnvidia-cusolver-cu12  ------------------------------ 287.75 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 295.25 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 296.38 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 282.74 MiB/614.37 MiB ",,terminal_output +4886,7910735,"TERMINAL",0,0,"\r\r\r\r\r⠹ Preparing packages... (79/83)\r\nnvidia-cusolver-cu12  ------------------------------ 289.27 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 296.38 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 297.45 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 283.99 MiB/614.37 MiB \r\r\r\r\r⠹ Preparing packages... (79/83)\r\nnvidia-cusolver-cu12  ------------------------------ 290.42 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 297.66 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 298.85 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 285.35 MiB/614.37 MiB \r\r\r\r\r⠸ Preparing packages... (79/83)\r\nnvidia-cusolver-cu12  ------------------------------ 291.91 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 299.00 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 299.99 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 286.43 MiB/614.37 MiB ",,terminal_output +4887,7910788,"TERMINAL",0,0,"\r\r\r\r\r⠸ Preparing packages... (79/83)\r\nnvidia-cusolver-cu12  ------------------------------ 293.23 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 300.37 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 301.38 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 287.43 MiB/614.37 MiB ",,terminal_output +4888,7910960,"TERMINAL",0,0,"\r\r\r\r\r⠸ Preparing packages... (79/83)\r\nnvidia-cusolver-cu12  ------------------------------ 294.57 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 301.57 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 302.50 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 288.87 MiB/614.37 MiB \r\r\r\r\r⠸ Preparing packages... (79/83)\r\nnvidia-cusolver-cu12  ------------------------------ 295.78 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 302.81 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 303.79 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 290.09 MiB/614.37 MiB \r\r\r\r\r⠼ Preparing packages... (79/83)\r\nnvidia-cusolver-cu12  ------------------------------ 297.00 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 304.00 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 305.02 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 291.50 MiB/614.37 MiB ",,terminal_output +4889,7911012,"TERMINAL",0,0,"\r\r\r\r\r⠼ Preparing packages... (79/83)\r\nnvidia-cusolver-cu12  ------------------------------ 298.41 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 305.21 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 306.29 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 292.70 MiB/614.37 MiB \r\r\r\r\r⠼ Preparing packages... (79/83)\r\nnvidia-cusolver-cu12  ------------------------------ 299.95 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 306.40 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 307.43 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 293.86 MiB/614.37 MiB ",,terminal_output +4890,7911091,"TERMINAL",0,0,"\r\r\r\r\r⠼ Preparing packages... (79/83)\r\nnvidia-cusolver-cu12  ------------------------------ 301.44 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 307.80 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 308.49 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 294.99 MiB/614.37 MiB ",,terminal_output +4891,7911149,"TERMINAL",0,0,"\r\r\r\r\r⠴ Preparing packages... (79/83)\r\nnvidia-cusolver-cu12  ------------------------------ 302.42 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 309.36 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 309.98 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 295.96 MiB/614.37 MiB ",,terminal_output +4892,7911220,"TERMINAL",0,0,"\r\r\r\r\r⠴ Preparing packages... (79/83)\r\nnvidia-cusolver-cu12  ------------------------------ 303.81 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 310.39 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 311.28 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 297.20 MiB/614.37 MiB \r\r\r\r\r⠴ Preparing packages... (79/83)\r\nnvidia-cusolver-cu12  ------------------------------ 305.30 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 311.56 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 312.50 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 298.35 MiB/614.37 MiB ",,terminal_output +4893,7911329,"TERMINAL",0,0,"\r\r\r\r\r⠴ Preparing packages... (79/83)\r\nnvidia-cusolver-cu12  ------------------------------ 306.42 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 312.86 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 313.72 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 299.67 MiB/614.37 MiB \r\r\r\r\r⠦ Preparing packages... (79/83)\r\nnvidia-cusolver-cu12  ------------------------------ 307.81 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 314.00 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 314.94 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 300.88 MiB/614.37 MiB ",,terminal_output +4894,7911389,"TERMINAL",0,0,"\r\r\r\r\r⠦ Preparing packages... (79/83)\r\nnvidia-cusolver-cu12  ------------------------------ 309.05 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 314.99 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 316.57 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 301.97 MiB/614.37 MiB ",,terminal_output +4895,7911522,"TERMINAL",0,0,"\r\r\r\r\r⠦ Preparing packages... (79/83)\r\nnvidia-cusolver-cu12  ------------------------------ 310.42 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 316.28 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 317.75 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 303.36 MiB/614.37 MiB \r\r\r\r\r⠦ Preparing packages... (79/83)\r\nnvidia-cusolver-cu12  ------------------------------ 311.66 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 317.59 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 318.78 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 304.68 MiB/614.37 MiB \r\r\r\r\r⠧ Preparing packages... (79/83)\r\nnvidia-cusolver-cu12  ------------------------------ 312.80 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 318.78 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 320.29 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 305.75 MiB/614.37 MiB ",,terminal_output +4896,7911625,"TERMINAL",0,0,"\r\r\r\r\r⠧ Preparing packages... (79/83)\r\nnvidia-cusolver-cu12  ------------------------------ 313.80 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 320.29 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 321.76 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 306.78 MiB/614.37 MiB ",,terminal_output +4897,7911661,"TERMINAL",0,0,"\r\r\r\r\r⠧ Preparing packages... (79/83)\r\nnvidia-cusolver-cu12  ------------------------------ 314.73 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 321.30 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 323.42 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 308.27 MiB/614.37 MiB ",,terminal_output +4898,7911722,"TERMINAL",0,0,"\r\r\r\r\r⠧ Preparing packages... (79/83)\r\nnvidia-cusolver-cu12  ------------------------------ 315.80 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 322.93 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 324.58 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 309.41 MiB/614.37 MiB ",,terminal_output +4899,7911766,"TERMINAL",0,0,"\r\r\r\r\r⠇ Preparing packages... (79/83)\r\nnvidia-cusolver-cu12  ------------------------------ 317.35 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 324.41 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 325.31 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 310.62 MiB/614.37 MiB \r\r\r\r\r⠇ Preparing packages... (79/83)\r\nnvidia-cusolver-cu12  ------------------------------ 318.75 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 325.90 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 325.97 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 311.95 MiB/614.37 MiB ",,terminal_output +4900,7911867,"TERMINAL",0,0,"\r\r\r\r\r⠇ Preparing packages... (79/83)\r\nnvidia-cusolver-cu12  ------------------------------ 319.97 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 327.16 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 327.08 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 313.16 MiB/614.37 MiB \r\r\r\r\r⠇ Preparing packages... (79/83)\r\nnvidia-cusolver-cu12  ------------------------------ 321.26 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 328.24 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 328.26 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 314.70 MiB/614.37 MiB ",,terminal_output +4901,7911972,"TERMINAL",0,0,"\r\r\r\r\r⠋ Preparing packages... (79/83)\r\nnvidia-cusolver-cu12  ------------------------------ 322.45 MiB/322.45 MiB\r\nnvidia-cusparse-cu12  ------------------------------ 329.00 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 329.95 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 316.02 MiB/614.37 MiB \r\r\r\r\r⠋ Preparing packages... (79/83)\r\nnvidia-cusparse-cu12  ------------------------------ 329.00 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 330.24 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 316.82 MiB/614.37 MiB \r\r\r\r⠋ Preparing packages... (79/83)\r\nnvidia-cusparse-cu12  ------------------------------ 330.14 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 331.49 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 317.43 MiB/614.37 MiB ",,terminal_output +4902,7912027,"TERMINAL",0,0,"\r\r\r\r⠋ Preparing packages... (79/83)\r\nnvidia-cusparse-cu12  ------------------------------ 332.11 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 333.52 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 319.38 MiB/614.37 MiB ",,terminal_output +4903,7912131,"TERMINAL",0,0,"\r\r\r\r⠋ Preparing packages... (79/83)\r\nnvidia-cusparse-cu12  ------------------------------ 333.85 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 335.28 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 320.95 MiB/614.37 MiB \r\r\r\r⠙ Preparing packages... (80/83)\r\nnvidia-cusparse-cu12  ------------------------------ 335.47 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 337.00 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 322.71 MiB/614.37 MiB ",,terminal_output +4904,7912242,"TERMINAL",0,0,"\r\r\r\r⠙ Preparing packages... (80/83)\r\nnvidia-cusparse-cu12  ------------------------------ 337.37 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 338.65 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 324.13 MiB/614.37 MiB \r\r\r\r⠙ Preparing packages... (80/83)\r\nnvidia-cusparse-cu12  ------------------------------ 339.40 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 339.87 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 325.92 MiB/614.37 MiB ",,terminal_output +4905,7912350,"TERMINAL",0,0,"\r\r\r\r⠙ Preparing packages... (80/83)\r\nnvidia-cusparse-cu12  ------------------------------ 340.65 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 341.98 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 327.73 MiB/614.37 MiB \r\r\r\r⠹ Preparing packages... (80/83)\r\nnvidia-cusparse-cu12  ------------------------------ 342.00 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 343.96 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 329.45 MiB/614.37 MiB ",,terminal_output +4906,7912400,"TERMINAL",0,0,"\r\r\r\r⠹ Preparing packages... (80/83)\r\nnvidia-cusparse-cu12  ------------------------------ 343.90 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 345.36 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 331.22 MiB/614.37 MiB ",,terminal_output +4907,7912526,"TERMINAL",0,0,"\r\r\r\r⠹ Preparing packages... (80/83)\r\nnvidia-cusparse-cu12  ------------------------------ 344.98 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 346.46 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 332.59 MiB/614.37 MiB \r\r\r\r⠹ Preparing packages... (80/83)\r\nnvidia-cusparse-cu12  ------------------------------ 344.98 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 346.46 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 332.87 MiB/614.37 MiB \r\r\r\r⠸ Preparing packages... (80/83)\r\nnvidia-cusparse-cu12  ------------------------------ 346.65 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 347.00 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 333.89 MiB/614.37 MiB ",,terminal_output +4908,7912576,"TERMINAL",0,0,"\r\r\r\r⠸ Preparing packages... (80/83)\r\nnvidia-cusparse-cu12  ------------------------------ 346.86 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 349.40 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 334.88 MiB/614.37 MiB ",,terminal_output +4909,7912641,"TERMINAL",0,0,"\r\r\r\r⠸ Preparing packages... (80/83)\r\nnvidia-cusparse-cu12  ------------------------------ 346.86 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 352.59 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 334.88 MiB/614.37 MiB ",,terminal_output +4910,7912768,"TERMINAL",0,0,"\r\r\r\r⠸ Preparing packages... (80/83)\r\nnvidia-cusparse-cu12  ------------------------------ 347.76 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 354.78 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 336.91 MiB/614.37 MiB \r\r\r\r⠼ Preparing packages... (80/83)\r\nnvidia-cusparse-cu12  ------------------------------ 349.49 MiB/349.49 MiB\r\nnvidia-cublas-cu12  ------------------------------ 356.47 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 338.39 MiB/614.37 MiB \r\r\r\r⠼ Preparing packages... (80/83)\r\nnvidia-cublas-cu12  ------------------------------ 356.98 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 338.93 MiB/614.37 MiB \r\r\r⠼ Preparing packages... (80/83)\r\nnvidia-cublas-cu12  ------------------------------ 359.28 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 340.49 MiB/614.37 MiB ",,terminal_output +4911,7912852,"TERMINAL",0,0,"\r\r\r⠼ Preparing packages... (80/83)\r\nnvidia-cublas-cu12  ------------------------------ 361.78 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 342.90 MiB/614.37 MiB ",,terminal_output +4912,7912928,"TERMINAL",0,0,"\r\r\r⠼ Preparing packages... (80/83)\r\nnvidia-cublas-cu12  ------------------------------ 364.25 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 345.35 MiB/614.37 MiB \r\r\r⠴ Preparing packages... (81/83)\r\nnvidia-cublas-cu12  ------------------------------ 366.29 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 347.97 MiB/614.37 MiB ",,terminal_output +4913,7912977,"TERMINAL",0,0,"\r\r\r⠴ Preparing packages... (81/83)\r\nnvidia-cublas-cu12  ------------------------------ 368.63 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 350.24 MiB/614.37 MiB ",,terminal_output +4914,7913082,"TERMINAL",0,0,"\r\r\r⠴ Preparing packages... (81/83)\r\nnvidia-cublas-cu12  ------------------------------ 370.73 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 352.47 MiB/614.37 MiB \r\r\r⠴ Preparing packages... (81/83)\r\nnvidia-cublas-cu12  ------------------------------ 373.03 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 354.74 MiB/614.37 MiB ",,terminal_output +4915,7913140,"TERMINAL",0,0,"\r\r\r⠦ Preparing packages... (81/83)\r\nnvidia-cublas-cu12  ------------------------------ 374.94 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 357.22 MiB/614.37 MiB ",,terminal_output +4916,7913213,"TERMINAL",0,0,"\r\r\r⠦ Preparing packages... (81/83)\r\nnvidia-cublas-cu12  ------------------------------ 377.00 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 358.97 MiB/614.37 MiB \r\r\r⠦ Preparing packages... (81/83)\r\nnvidia-cublas-cu12  ------------------------------ 378.83 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 361.27 MiB/614.37 MiB ",,terminal_output +4917,7913274,"TERMINAL",0,0,"\r\r\r⠦ Preparing packages... (81/83)\r\nnvidia-cublas-cu12  ------------------------------ 381.20 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 362.90 MiB/614.37 MiB ",,terminal_output +4918,7913336,"TERMINAL",0,0,"\r\r\r⠧ Preparing packages... (81/83)\r\nnvidia-cublas-cu12  ------------------------------ 382.93 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 364.61 MiB/614.37 MiB ",,terminal_output +4919,7913393,"TERMINAL",0,0,"\r\r\r⠧ Preparing packages... (81/83)\r\nnvidia-cublas-cu12  ------------------------------ 384.73 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 366.00 MiB/614.37 MiB ",,terminal_output +4920,7913462,"TERMINAL",0,0,"\r\r\r⠧ Preparing packages... (81/83)\r\nnvidia-cublas-cu12  ------------------------------ 385.69 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 367.58 MiB/614.37 MiB ",,terminal_output +4921,7913518,"TERMINAL",0,0,"\r\r\r⠧ Preparing packages... (81/83)\r\nnvidia-cublas-cu12  ------------------------------ 387.22 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 369.02 MiB/614.37 MiB \r\r\r⠇ Preparing packages... (81/83)\r\nnvidia-cublas-cu12  ------------------------------ 388.50 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 370.76 MiB/614.37 MiB ",,terminal_output +4922,7913570,"TERMINAL",0,0,"\r\r\r⠇ Preparing packages... (81/83)\r\nnvidia-cublas-cu12  ------------------------------ 390.22 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 372.19 MiB/614.37 MiB ",,terminal_output +4923,7913686,"TERMINAL",0,0,"\r\r\r⠇ Preparing packages... (81/83)\r\nnvidia-cublas-cu12  ------------------------------ 392.02 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 373.53 MiB/614.37 MiB \r\r\r⠇ Preparing packages... (81/83)\r\nnvidia-cublas-cu12  ------------------------------ 393.45 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 375.07 MiB/614.37 MiB ",,terminal_output +4924,7913739,"TERMINAL",0,0,"\r\r\r⠋ Preparing packages... (81/83)\r\nnvidia-cublas-cu12  ------------------------------ 394.94 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 376.50 MiB/614.37 MiB ",,terminal_output +4925,7913840,"TERMINAL",0,0,"\r\r\r⠋ Preparing packages... (81/83)\r\nnvidia-cublas-cu12  ------------------------------ 396.34 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 378.02 MiB/614.37 MiB ",,terminal_output +4926,7913914,"TERMINAL",0,0,"\r\r\r⠋ Preparing packages... (81/83)\r\nnvidia-cublas-cu12  ------------------------------ 397.74 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 379.57 MiB/614.37 MiB \r\r\r⠋ Preparing packages... (81/83)\r\nnvidia-cublas-cu12  ------------------------------ 399.17 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 381.25 MiB/614.37 MiB \r\r\r⠙ Preparing packages... (81/83)\r\nnvidia-cublas-cu12  ------------------------------ 401.28 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 383.11 MiB/614.37 MiB ",,terminal_output +4927,7913977,"TERMINAL",0,0,"\r\r\r⠙ Preparing packages... (81/83)\r\nnvidia-cublas-cu12  ------------------------------ 402.59 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 384.54 MiB/614.37 MiB ",,terminal_output +4928,7914040,"TERMINAL",0,0,"\r\r\r⠙ Preparing packages... (81/83)\r\nnvidia-cublas-cu12  ------------------------------ 402.59 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 384.81 MiB/614.37 MiB ",,terminal_output +4929,7914144,"TERMINAL",0,0,"\r\r\r⠙ Preparing packages... (81/83)\r\nnvidia-cublas-cu12  ------------------------------ 404.79 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 387.35 MiB/614.37 MiB \r\r\r⠹ Preparing packages... (81/83)\r\nnvidia-cublas-cu12  ------------------------------ 407.14 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 388.98 MiB/614.37 MiB ",,terminal_output +4930,7914320,"TERMINAL",0,0,"\r\r\r⠹ Preparing packages... (81/83)\r\nnvidia-cublas-cu12  ------------------------------ 409.00 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 391.05 MiB/614.37 MiB \r\r\r⠹ Preparing packages... (81/83)\r\nnvidia-cublas-cu12  ------------------------------ 411.47 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 392.55 MiB/614.37 MiB \r\r\r⠹ Preparing packages... (81/83)\r\nnvidia-cublas-cu12  ------------------------------ 413.20 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 394.59 MiB/614.37 MiB \r\r\r⠸ Preparing packages... (81/83)\r\nnvidia-cublas-cu12  ------------------------------ 414.94 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 396.57 MiB/614.37 MiB ",,terminal_output +4931,7914371,"TERMINAL",0,0,"\r\r\r⠸ Preparing packages... (81/83)\r\nnvidia-cublas-cu12  ------------------------------ 417.23 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 398.39 MiB/614.37 MiB ",,terminal_output +4932,7914433,"TERMINAL",0,0,"\r\r\r⠸ Preparing packages... (81/83)\r\nnvidia-cublas-cu12  ------------------------------ 418.93 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 400.57 MiB/614.37 MiB ",,terminal_output +4933,7914502,"TERMINAL",0,0,"\r\r\r⠸ Preparing packages... (81/83)\r\nnvidia-cublas-cu12  ------------------------------ 421.43 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 402.09 MiB/614.37 MiB ",,terminal_output +4934,7914837,"TERMINAL",0,0,"\r\r\r⠼ Preparing packages... (81/83)\r\nnvidia-cublas-cu12  ------------------------------ 422.87 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 404.53 MiB/614.37 MiB \r\r\r⠼ Preparing packages... (81/83)\r\nnvidia-cublas-cu12  ------------------------------ 424.76 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 406.66 MiB/614.37 MiB ",,terminal_output +4935,7914854,"TERMINAL",0,0,"\r\r\r⠼ Preparing packages... (81/83)\r\nnvidia-cublas-cu12  ------------------------------ 426.69 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 408.74 MiB/614.37 MiB \r\r\r⠼ Preparing packages... (81/83)\r\nnvidia-cublas-cu12  ------------------------------ 428.77 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 410.73 MiB/614.37 MiB \r\r\r⠴ Preparing packages... (81/83)\r\nnvidia-cublas-cu12  ------------------------------ 430.87 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 412.43 MiB/614.37 MiB ",,terminal_output +4936,7914970,"TERMINAL",0,0,"\r\r\r⠴ Preparing packages... (81/83)\r\nnvidia-cublas-cu12  ------------------------------ 432.99 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 414.51 MiB/614.37 MiB \r\r\r⠴ Preparing packages... (81/83)\r\nnvidia-cublas-cu12  ------------------------------ 435.11 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 416.35 MiB/614.37 MiB \r\r\r⠴ Preparing packages... (81/83)\r\nnvidia-cublas-cu12  ------------------------------ 436.95 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 418.64 MiB/614.37 MiB \r\r\r⠦ Preparing packages... (81/83)\r\nnvidia-cublas-cu12  ------------------------------ 439.08 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 420.74 MiB/614.37 MiB ",,terminal_output +4937,7915032,"TERMINAL",0,0,"\r\r\r⠦ Preparing packages... (81/83)\r\nnvidia-cublas-cu12  ------------------------------ 441.02 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 422.69 MiB/614.37 MiB \r\r\r⠦ Preparing packages... (81/83)\r\nnvidia-cublas-cu12  ------------------------------ 442.97 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 424.76 MiB/614.37 MiB ",,terminal_output +4938,7915219,"TERMINAL",0,0,"\r\r\r⠦ Preparing packages... (81/83)\r\nnvidia-cublas-cu12  ------------------------------ 445.22 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 426.52 MiB/614.37 MiB \r\r\r⠧ Preparing packages... (81/83)\r\nnvidia-cublas-cu12  ------------------------------ 447.26 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 428.53 MiB/614.37 MiB \r\r\r⠧ Preparing packages... (81/83)\r\nnvidia-cublas-cu12  ------------------------------ 449.00 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 430.86 MiB/614.37 MiB \r\r\r⠧ Preparing packages... (81/83)\r\nnvidia-cublas-cu12  ------------------------------ 450.85 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 432.81 MiB/614.37 MiB ",,terminal_output +4939,7915282,"TERMINAL",0,0,"\r\r\r⠧ Preparing packages... (81/83)\r\nnvidia-cublas-cu12  ------------------------------ 453.32 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 435.02 MiB/614.37 MiB ",,terminal_output +4940,7915372,"TERMINAL",0,0,"\r\r\r⠇ Preparing packages... (81/83)\r\nnvidia-cublas-cu12  ------------------------------ 455.81 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 437.14 MiB/614.37 MiB ",,terminal_output +4941,7915425,"TERMINAL",0,0,"\r\r\r⠇ Preparing packages... (81/83)\r\nnvidia-cublas-cu12  ------------------------------ 458.06 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 439.36 MiB/614.37 MiB \r\r\r⠇ Preparing packages... (81/83)\r\nnvidia-cublas-cu12  ------------------------------ 459.98 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 441.86 MiB/614.37 MiB ",,terminal_output +4942,7915479,"TERMINAL",0,0,"\r\r\r⠇ Preparing packages... (81/83)\r\nnvidia-cublas-cu12  ------------------------------ 462.46 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 443.98 MiB/614.37 MiB ",,terminal_output +4943,7915589,"TERMINAL",0,0,"\r\r\r⠋ Preparing packages... (81/83)\r\nnvidia-cublas-cu12  ------------------------------ 465.00 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 446.19 MiB/614.37 MiB \r\r\r⠋ Preparing packages... (81/83)\r\nnvidia-cublas-cu12  ------------------------------ 467.02 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 447.94 MiB/614.37 MiB ",,terminal_output +4944,7915642,"TERMINAL",0,0,"\r\r\r⠋ Preparing packages... (81/83)\r\nnvidia-cublas-cu12  ------------------------------ 468.90 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 450.12 MiB/614.37 MiB ",,terminal_output +4945,7915719,"TERMINAL",0,0,"\r\r\r⠋ Preparing packages... (81/83)\r\nnvidia-cublas-cu12  ------------------------------ 471.30 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 452.80 MiB/614.37 MiB \r\r\r⠙ Preparing packages... (81/83)\r\nnvidia-cublas-cu12  ------------------------------ 473.14 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 455.05 MiB/614.37 MiB ",,terminal_output +4946,7915789,"TERMINAL",0,0,"\r\r\r⠙ Preparing packages... (81/83)\r\nnvidia-cublas-cu12  ------------------------------ 474.20 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 456.19 MiB/614.37 MiB ",,terminal_output +4947,7915843,"TERMINAL",0,0,"\r\r\r⠙ Preparing packages... (81/83)\r\nnvidia-cublas-cu12  ------------------------------ 475.94 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 458.27 MiB/614.37 MiB ",,terminal_output +4948,7916008,"TERMINAL",0,0,"\r\r\r⠙ Preparing packages... (81/83)\r\nnvidia-cublas-cu12  ------------------------------ 477.87 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 460.72 MiB/614.37 MiB \r\r\r⠹ Preparing packages... (81/83)\r\nnvidia-cublas-cu12  ------------------------------ 480.65 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 462.95 MiB/614.37 MiB \r\r\r⠹ Preparing packages... (81/83)\r\nnvidia-cublas-cu12  ------------------------------ 482.87 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 465.50 MiB/614.37 MiB ",,terminal_output +4949,7916027,"TERMINAL",0,0,"\r\r\r⠹ Preparing packages... (81/83)\r\nnvidia-cublas-cu12  ------------------------------ 485.42 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 467.87 MiB/614.37 MiB ",,terminal_output +4950,7916169,"TERMINAL",0,0,"\r\r\r⠹ Preparing packages... (81/83)\r\nnvidia-cublas-cu12  ------------------------------ 487.57 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 470.40 MiB/614.37 MiB \r\r\r⠸ Preparing packages... (81/83)\r\nnvidia-cublas-cu12  ------------------------------ 490.03 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 472.85 MiB/614.37 MiB \r\r\r⠸ Preparing packages... (81/83)\r\nnvidia-cublas-cu12  ------------------------------ 492.83 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 474.91 MiB/614.37 MiB ",,terminal_output +4951,7916235,"TERMINAL",0,0,"\r\r\r⠸ Preparing packages... (81/83)\r\nnvidia-cublas-cu12  ------------------------------ 495.57 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 477.08 MiB/614.37 MiB ",,terminal_output +4952,7916298,"TERMINAL",0,0,"\r\r\r⠸ Preparing packages... (81/83)\r\nnvidia-cublas-cu12  ------------------------------ 497.98 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 479.76 MiB/614.37 MiB ",,terminal_output +4953,7916350,"TERMINAL",0,0,"\r\r\r⠼ Preparing packages... (81/83)\r\nnvidia-cublas-cu12  ------------------------------ 500.76 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 481.84 MiB/614.37 MiB ",,terminal_output +4954,7916412,"TERMINAL",0,0,"\r\r\r⠼ Preparing packages... (81/83)\r\nnvidia-cublas-cu12  ------------------------------ 502.64 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 484.39 MiB/614.37 MiB \r\r\r⠼ Preparing packages... (81/83)\r\nnvidia-cublas-cu12  ------------------------------ 505.36 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 486.62 MiB/614.37 MiB ",,terminal_output +4955,7916471,"TERMINAL",0,0,"\r\r\r⠼ Preparing packages... (81/83)\r\nnvidia-cublas-cu12  ------------------------------ 508.71 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 487.82 MiB/614.37 MiB ",,terminal_output +4956,7916586,"TERMINAL",0,0,"\r\r\r⠴ Preparing packages... (81/83)\r\nnvidia-cublas-cu12  ------------------------------ 511.42 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 490.37 MiB/614.37 MiB \r\r\r⠴ Preparing packages... (81/83)\r\nnvidia-cublas-cu12  ------------------------------ 513.82 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 492.62 MiB/614.37 MiB ",,terminal_output +4957,7916697,"TERMINAL",0,0,"\r\r\r⠴ Preparing packages... (81/83)\r\nnvidia-cublas-cu12  ------------------------------ 518.00 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 493.48 MiB/614.37 MiB \r\r\r⠴ Preparing packages... (81/83)\r\nnvidia-cublas-cu12  ------------------------------ 519.97 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 495.52 MiB/614.37 MiB ",,terminal_output +4958,7916751,"TERMINAL",0,0,"\r\r\r⠦ Preparing packages... (81/83)\r\nnvidia-cublas-cu12  ------------------------------ 522.71 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 497.96 MiB/614.37 MiB ",,terminal_output +4959,7916803,"TERMINAL",0,0,"\r\r\r⠦ Preparing packages... (81/83)\r\nnvidia-cublas-cu12  ------------------------------ 525.12 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 500.78 MiB/614.37 MiB ",,terminal_output +4960,7916866,"TERMINAL",0,0,"\r\r\r⠦ Preparing packages... (81/83)\r\nnvidia-cublas-cu12  ------------------------------ 527.98 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 503.00 MiB/614.37 MiB \r\r\r⠦ Preparing packages... (81/83)\r\nnvidia-cublas-cu12  ------------------------------ 530.76 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 505.62 MiB/614.37 MiB ",,terminal_output +4961,7916926,"TERMINAL",0,0,"\r\r\r⠧ Preparing packages... (81/83)\r\nnvidia-cublas-cu12  ------------------------------ 533.14 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 508.47 MiB/614.37 MiB ",,terminal_output +4962,7917031,"TERMINAL",0,0,"\r\r\r⠧ Preparing packages... (81/83)\r\nnvidia-cublas-cu12  ------------------------------ 535.92 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 510.90 MiB/614.37 MiB \r\r\r⠧ Preparing packages... (81/83)\r\nnvidia-cublas-cu12  ------------------------------ 538.63 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 513.54 MiB/614.37 MiB ",,terminal_output +4963,7917299,"jasmine/sample.py",0,0,"",python,tab +4964,7917359,"TERMINAL",0,0,"\r\r\r⠧ Preparing packages... (81/83)\r\nnvidia-cublas-cu12  ------------------------------ 541.13 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 516.42 MiB/614.37 MiB ",,terminal_output +4965,7917600,"TERMINAL",0,0,"\r\r\r⠇ Preparing packages... (81/83)\r\nnvidia-cublas-cu12  ------------------------------ 543.78 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 518.90 MiB/614.37 MiB \r\r\r⠇ Preparing packages... (81/83)\r\nnvidia-cublas-cu12  ------------------------------ 546.59 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 521.36 MiB/614.37 MiB \r\r\r⠇ Preparing packages... (81/83)\r\nnvidia-cublas-cu12  ------------------------------ 548.90 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 524.36 MiB/614.37 MiB \r\r\r⠇ Preparing packages... (81/83)\r\nnvidia-cublas-cu12  ------------------------------ 551.33 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 527.14 MiB/614.37 MiB \r\r\r⠋ Preparing packages... (81/83)\r\nnvidia-cublas-cu12  ------------------------------ 553.97 MiB/554.32 MiB\r\nnvidia-cudnn-cu12  ------------------------------ 529.80 MiB/614.37 MiB \r\r\r⠋ Preparing packages... (81/83)\r\nnvidia-cudnn-cu12  ------------------------------ 531.83 MiB/614.37 MiB \r\r⠋ Preparing packages... (81/83)\r\nnvidia-cudnn-cu12  ------------------------------ 533.13 MiB/614.37 MiB \r\r⠋ Preparing packages... (81/83)\r\nnvidia-cudnn-cu12  ------------------------------ 537.60 MiB/614.37 MiB \r\r⠋ Preparing packages... (81/83)\r\nnvidia-cudnn-cu12  ------------------------------ 541.43 MiB/614.37 MiB \r\r⠙ Preparing packages... (82/83)\r\nnvidia-cudnn-cu12  ------------------------------ 544.16 MiB/614.37 MiB ",,terminal_output +4966,7917773,"TERMINAL",0,0,"\r\r⠙ Preparing packages... (82/83)\r\nnvidia-cudnn-cu12  ------------------------------ 548.11 MiB/614.37 MiB \r\r⠙ Preparing packages... (82/83)\r\nnvidia-cudnn-cu12  ------------------------------ 551.02 MiB/614.37 MiB \r\r⠙ Preparing packages... (82/83)\r\nnvidia-cudnn-cu12  ------------------------------ 554.22 MiB/614.37 MiB \r\r⠹ Preparing packages... (82/83)\r\nnvidia-cudnn-cu12  ------------------------------ 557.89 MiB/614.37 MiB \r\r⠹ Preparing packages... (82/83)\r\nnvidia-cudnn-cu12  ------------------------------ 561.40 MiB/614.37 MiB ",,terminal_output +4967,7917835,"TERMINAL",0,0,"\r\r⠹ Preparing packages... (82/83)\r\nnvidia-cudnn-cu12  ------------------------------ 565.02 MiB/614.37 MiB ",,terminal_output +4968,7917918,"TERMINAL",0,0,"\r\r⠹ Preparing packages... (82/83)\r\nnvidia-cudnn-cu12  ------------------------------ 568.63 MiB/614.37 MiB \r\r⠸ Preparing packages... (82/83)\r\nnvidia-cudnn-cu12  ------------------------------ 572.32 MiB/614.37 MiB ",,terminal_output +4969,7917981,"TERMINAL",0,0,"\r\r⠸ Preparing packages... (82/83)\r\nnvidia-cudnn-cu12  ------------------------------ 576.02 MiB/614.37 MiB ",,terminal_output +4970,7918037,"TERMINAL",0,0,"\r\r⠸ Preparing packages... (82/83)\r\nnvidia-cudnn-cu12  ------------------------------ 579.05 MiB/614.37 MiB ",,terminal_output +4971,7918146,"TERMINAL",0,0,"\r\r⠸ Preparing packages... (82/83)\r\nnvidia-cudnn-cu12  ------------------------------ 583.02 MiB/614.37 MiB \r\r⠼ Preparing packages... (82/83)\r\nnvidia-cudnn-cu12  ------------------------------ 587.02 MiB/614.37 MiB ",,terminal_output +4972,7918201,"TERMINAL",0,0,"\r\r⠼ Preparing packages... (82/83)\r\nnvidia-cudnn-cu12  ------------------------------ 591.17 MiB/614.37 MiB ",,terminal_output +4973,7918320,"TERMINAL",0,0,"\r\r⠼ Preparing packages... (82/83)\r\nnvidia-cudnn-cu12  ------------------------------ 596.02 MiB/614.37 MiB \r\r⠼ Preparing packages... (82/83)\r\nnvidia-cudnn-cu12  ------------------------------ 600.81 MiB/614.37 MiB \r\r⠴ Preparing packages... (82/83)\r\nnvidia-cudnn-cu12  ------------------------------ 605.38 MiB/614.37 MiB ",,terminal_output +4974,7918378,"TERMINAL",0,0,"\r\r⠴ Preparing packages... (82/83)\r\nnvidia-cudnn-cu12  ------------------------------ 609.85 MiB/614.37 MiB ",,terminal_output +4975,7918441,"TERMINAL",0,0,"\r\r⠴ Preparing packages... (82/83)\r\nnvidia-cudnn-cu12  ------------------------------ 613.79 MiB/614.37 MiB ",,terminal_output +4976,7918536,"TERMINAL",0,0,"\r\r⠴ Preparing packages... (82/83) \rPrepared 83 packages in 35.19s\r\n░░░░░░░░░░░░░░░░░░░░ [0/0] Installing wheels... \r░░░░░░░░░░░░░░░░░░░░ [0/83] Installing wheels... \r░░░░░░░░░░░░░░░░░░░░ [0/83] requests==2.32.5 \r░░░░░░░░░░░░░░░░░░░░ [1/83] requests==2.32.5 \r░░░░░░░░░░░░░░░░░░░░ [1/83] pyyaml==6.0.2 \r░░░░░░░░░░░░░░░░░░░░ [2/83] pyyaml==6.0.2 \r░░░░░░░░░░░░░░░░░░░░ [2/83] certifi==2025.8.3 \r░░░░░░░░░░░░░░░░░░░░ [3/83] certifi==2025.8.3 \r░░░░░░░░░░░░░░░░░░░░ [3/83] jax-cuda12-plugin==0.7.2 \r░░░░░░░░░░░░░░░░░░░░ [4/83] jax-cuda12-plugin==0.7.2 \r░░░░░░░░░░░░░░░░░░░░ [4/83] rich==14.1.0 \r█░░░░░░░░░░░░░░░░░░░ [5/83] rich==14.1.0 \r█░░░░░░░░░░░░░░░░░░░ [5/83] nvidia-nvjitlink-cu12==12.9.86 \r█░░░░░░░░░░░░░░░░░░░ [6/83] nvidia-nvjitlink-cu12==12.9.86 \r█░░░░░░░░░░░░░░░░░░░ [6/83] typing-extensions==4.15.0 \r█░░░░░░░░░░░░░░░░░░░ [7/83] typing-extensions==4.15.0 \r█░░░░░░░░░░░░░░░░░░░ [7/83] tyro==0.9.32 \r█░░░░░░░░░░░░░░░░░░░ [8/83] tyro==0.9.32 \r█░░░░░░░░░░░░░░░░░░░ [8/83] typeguard==4.4.4 \r██░░░░░░░░░░░░░░░░░░ [9/83] typeguard==4.4.4 \r███░░░░░░░░░░░░░░░░░ [13/83] opt-einsum==3.4.0 ",,terminal_output +4977,7918587,"TERMINAL",0,0,"\r██████░░░░░░░░░░░░░░ [27/83] absl-py==2.3.1 ",,terminal_output +4978,7918692,"TERMINAL",0,0,"\r██████████░░░░░░░░░░ [44/83] distlib==0.4.0 \r██████████████░░░░░░ [59/83] treescope==0.1.10 ",,terminal_output +4979,7918798,"TERMINAL",0,0,"\r████████████████░░░░ [69/83] markdown-it-py==4.0.0 \r████████████████░░░░ [70/83] flax==0.11.2 ",,terminal_output +4980,7918849,"TERMINAL",0,0,"\r█████████████████░░░ [74/83] ml-dtypes==0.5.3 ",,terminal_output +4981,7918961,"TERMINAL",0,0,"\r██████████████████░░ [78/83] pygments==2.19.2 \r███████████████████░ [79/83] pygments==2.19.2 \r███████████████████░ [80/83] numpy==2.3.3 ",,terminal_output +4982,7919151,"TERMINAL",0,0,"\r███████████████████░ [82/83] scipy==1.16.2 \r████████████████████ [83/83] scipy==1.16.2 \rInstalled 83 packages in 662ms\r\n + absl-py==2.3.1\r\n + aiofiles==24.1.0\r\n + annotated-types==0.7.0\r\n + array-record==0.8.1\r\n + attrs==25.3.0\r\n + certifi==2025.8.3\r\n + cfgv==3.4.0\r\n + charset-normalizer==3.4.3\r\n + chex==0.1.91\r\n + click==8.3.0\r\n + cloudpickle==3.1.1\r\n + distlib==0.4.0\r\n + dm-pix==0.4.4\r\n + dm-tree==0.1.9\r\n + docstring-parser==0.17.0\r\n + einops==0.8.1\r\n + etils==1.13.0\r\n + filelock==3.19.1\r\n + flax==0.11.2\r\n + fsspec==2025.9.0\r\n + gitdb==4.0.12\r\n + gitpython==3.1.45\r\n + grain==0.2.12\r\n + humanize==4.13.0\r\n + identify==2.6.14\r\n + idna==3.10\r\n + importlib-resources==6.5.2\r\n + jasmine==0.1.0 (from file:///hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine)\r\n + jax==0.7.2\r\n + jax-cuda12-pjrt==0.7.2\r\n + jax-cuda12-plugin==0.7.2\r\n + jaxlib==0.7.2\r\n + markdown-it-py==4.0.0\r\n + mdurl==0.1.2\r\n + ml-dtypes==0.5.3\r\n + more-itertools==10.8.0\r\n + msgpack==1.1.1\r\n + nest-asyncio==1.6.0\r\n + nodeenv==1.9.1\r\n + numpy==2.3.3\r\n + nvidia-cublas-cu12==12.9.1.4\r\n + nvidia-cuda-cupti-cu12==12.9.79\r\n + nvidia-cuda-nvcc-cu12==12.9.86\r\n + nvidia-cuda-nvrtc-cu12==12.9.86\r\n + nvidia-cuda-runtime-cu12==12.9.79\r\n + nvidia-cudnn-cu12==9.13.0.50\r\n + nvidia-cufft-cu12==11.4.1.4\r\n + nvidia-cusolver-cu12==11.7.5.82\r\n + nvidia-cusparse-cu12==12.5.10.65\r\n + nvidia-nccl-cu12==2.28.3\r\n + nvidia-nvjitlink-cu12==12.9.86\r\n + nvidia-nvshmem-cu12==3.4.5\r\n + opt-einsum==3.4.0\r\n + optax==0.2.6\r\n + orbax-checkpoint==0.11.25\r\n + packaging==25.0\r\n + pillow==11.3.0\r\n + platformdirs==4.4.0\r\n + pre-commit==4.3.0\r\n + protobuf==6.32.1\r\n + pydantic==2.11.9\r\n + pydantic-core==2.33.2\r\n + pygments==2.19.2\r\n + pyyaml==6.0.2\r\n + requests==2.32.5\r\n + rich==14.1.0\r\n + scipy==1.16.2\r\n + sentry-sdk==2.38.0\r\n + shtab==1.7.2\r\n + simplejson==3.20.1\r\n + smmap==5.0.2\r\n + tensorstore==0.1.77\r\n + toolz==1.0.0\r\n + treescope==0.1.10\r\n + typeguard==4.4.4\r\n + typing-extensions==4.15.0\r\n + typing-inspection==0.4.1\r\n + tyro==0.9.32\r\n + urllib3==2.5.0\r\n + virtualenv==20.34.0\r\n + wandb==0.22.0\r\n + wrapt==1.17.3\r\n + zipp==3.23.0\r\n]0;tum_cte0515@hkn0401:~/Projects/jasmine[?2004h[tum_cte0515@hkn0401 jasmine]$ ",,terminal_output +4983,7952438,"jasmine/sample.py",0,0,"",python,tab +4984,7952439,"jasmine/sample.py",3150,0,"",python,selection_mouse +4985,7952912,"jasmine/sample.py",3173,0,"",python,selection_mouse +4986,7955799,"TERMINAL",0,0,"g",,terminal_output +4987,7955875,"TERMINAL",0,0,"it",,terminal_output +4988,7955953,"TERMINAL",0,0," ",,terminal_output +4989,7956046,"TERMINAL",0,0,"c",,terminal_output +4990,7956106,"TERMINAL",0,0,"h",,terminal_output +4991,7956328,"TERMINAL",0,0,"e",,terminal_output +4992,7956532,"TERMINAL",0,0,"ck",,terminal_output +4993,7956737,"TERMINAL",0,0,"ou",,terminal_output +4994,7956896,"TERMINAL",0,0,"t ",,terminal_output +4995,7957131,"TERMINAL",0,0,"-",,terminal_output +4996,7957281,"TERMINAL",0,0,"b",,terminal_output +4997,7957555,"TERMINAL",0,0," ",,terminal_output +4998,7957760,"TERMINAL",0,0,"""",,terminal_output +4999,7960152,"TERMINAL",0,0,"s",,terminal_output +5000,7960404,"TERMINAL",0,0,"a",,terminal_output +5001,7960931,"TERMINAL",0,0,"m",,terminal_output +5002,7961138,"TERMINAL",0,0,"p",,terminal_output +5003,7961202,"TERMINAL",0,0,"l",,terminal_output +5004,7961441,"TERMINAL",0,0,"in",,terminal_output +5005,7961581,"TERMINAL",0,0,"g",,terminal_output +5006,7961764,"TERMINAL",0,0,"-",,terminal_output +5007,7961990,"TERMINAL",0,0,"s",,terminal_output +5008,7962346,"TERMINAL",0,0,"c",,terminal_output +5009,7962552,"TERMINAL",0,0,"r",,terminal_output +5010,7962667,"TERMINAL",0,0,"i",,terminal_output +5011,7962809,"TERMINAL",0,0,"p",,terminal_output +5012,7962875,"TERMINAL",0,0,"t",,terminal_output +5013,7963038,"TERMINAL",0,0,"-",,terminal_output +5014,7963248,"TERMINAL",0,0,"a",,terminal_output +5015,7963417,"TERMINAL",0,0,"d",,terminal_output +5016,7963564,"TERMINAL",0,0,"d",,terminal_output +5017,7963677,"TERMINAL",0,0,"-",,terminal_output +5018,7964138,"TERMINAL",0,0,"m",,terminal_output +5019,7964544,"TERMINAL",0,0,"e",,terminal_output +5020,7964732,"TERMINAL",0,0,"t",,terminal_output +5021,7964927,"TERMINAL",0,0,"r",,terminal_output +5022,7964989,"TERMINAL",0,0,"i",,terminal_output +5023,7965133,"TERMINAL",0,0,"c",,terminal_output +5024,7965195,"TERMINAL",0,0,"s",,terminal_output +5025,7965473,"TERMINAL",0,0,"""",,terminal_output +5026,7965707,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +5027,7965745,"",0,0,"Switched from branch 'main' to 'sampling-script-add-metrics'",,git_branch_checkout +5028,7965792,"TERMINAL",0,0,"Switched to a new branch 'sampling-script-add-metrics'\r\n]0;tum_cte0515@hkn0401:~/Projects/jasmine[?2004h[tum_cte0515@hkn0401 jasmine]$ ",,terminal_output +5029,7967251,"jasmine/sample.py",0,0,"from dataclasses import dataclass\nimport time\nimport os\nimport optax\n\nimport dm_pix as pix\nimport einops\nimport jax\nimport jax.numpy as jnp\nimport flax.linen as nn\nimport numpy as np\nimport orbax.checkpoint as ocp\nfrom PIL import Image, ImageDraw\nimport tyro\nfrom flax import nnx\n\nfrom genie import Genie\nfrom utils.dataloader import get_dataloader\n\n\n@dataclass\nclass Args:\n # Experiment\n seed: int = 0\n seq_len: int = 16\n image_channels: int = 3\n image_height: int = 90\n image_width: int = 160\n data_dir: str = ""data/coinrun_episodes""\n checkpoint: str = """"\n # Sampling\n batch_size: int = 1\n maskgit_steps: int = 25\n temperature: float = 1.0\n sample_argmax: bool = True\n start_frame: int = 1\n # Tokenizer checkpoint\n tokenizer_dim: int = 512\n tokenizer_ffn_dim: int = 2048\n latent_patch_dim: int = 32\n num_patch_latents: int = 1024\n patch_size: int = 4\n tokenizer_num_blocks: int = 4\n tokenizer_num_heads: int = 8\n # LAM checkpoint\n lam_dim: int = 512\n lam_ffn_dim: int = 2048\n latent_action_dim: int = 32\n num_actions: int = 6\n lam_patch_size: int = 16\n lam_num_blocks: int = 4\n lam_num_heads: int = 8\n use_gt_actions: bool = False\n # Dynamics checkpoint\n dyna_type: str = ""maskgit""\n dyna_dim: int = 512\n dyna_ffn_dim: int = 2048\n dyna_num_blocks: int = 6\n dyna_num_heads: int = 8\n param_dtype = jnp.float32\n dtype = jnp.bfloat16\n use_flash_attention: bool = True\n\n\nargs = tyro.cli(Args)\n\nif __name__ == ""__main__"":\n """"""\n Dimension keys:\n B: batch size\n T: number of input (conditioning) frames\n N: number of patches per frame\n S: sequence length\n H: height\n W: width\n E: B * (S - 1)\n """"""\n jax.distributed.initialize()\n\n rng = jax.random.key(args.seed)\n\n # --- Load Genie checkpoint ---\n rngs = nnx.Rngs(rng)\n genie = Genie(\n # Tokenizer\n in_dim=args.image_channels,\n tokenizer_dim=args.tokenizer_dim,\n tokenizer_ffn_dim=args.tokenizer_ffn_dim,\n latent_patch_dim=args.latent_patch_dim,\n num_patch_latents=args.num_patch_latents,\n patch_size=args.patch_size,\n tokenizer_num_blocks=args.tokenizer_num_blocks,\n tokenizer_num_heads=args.tokenizer_num_heads,\n # LAM\n lam_dim=args.lam_dim,\n lam_ffn_dim=args.lam_ffn_dim,\n latent_action_dim=args.latent_action_dim,\n num_actions=args.num_actions,\n lam_patch_size=args.lam_patch_size,\n lam_num_blocks=args.lam_num_blocks,\n lam_num_heads=args.lam_num_heads,\n lam_co_train=False,\n use_gt_actions=args.use_gt_actions,\n # Dynamics\n dyna_type=args.dyna_type,\n dyna_dim=args.dyna_dim,\n dyna_ffn_dim=args.dyna_ffn_dim,\n dyna_num_blocks=args.dyna_num_blocks,\n dyna_num_heads=args.dyna_num_heads,\n param_dtype=args.param_dtype,\n dtype=args.dtype,\n use_flash_attention=args.use_flash_attention,\n # FIXME (f.srambical): implement spatiotemporal KV caching and set decode=True\n decode=False,\n rngs=rngs,\n )\n\n del genie.tokenizer.vq.drop\n # Need to delete lam decoder for checkpoint loading\n if not args.use_gt_actions:\n assert genie.lam is not None\n del genie.lam.decoder\n\n handler_registry = ocp.handlers.DefaultCheckpointHandlerRegistry()\n handler_registry.add(\n ""model_state"", ocp.args.PyTreeSave, ocp.handlers.PyTreeCheckpointHandler\n )\n handler_registry.add(\n ""model_state"", ocp.args.PyTreeRestore, ocp.handlers.PyTreeCheckpointHandler\n )\n checkpoint_options = ocp.CheckpointManagerOptions(\n step_format_fixed_length=6,\n )\n checkpoint_manager = ocp.CheckpointManager(\n args.checkpoint,\n options=checkpoint_options,\n handler_registry=handler_registry,\n )\n\n dummy_tx = optax.adamw(\n learning_rate=optax.linear_schedule(0.0001, 0.0001, 10000),\n b1=0.9,\n b2=0.9,\n weight_decay=1e-4,\n mu_dtype=args.dtype,\n )\n dummy_optimizer = nnx.Optimizer(genie, dummy_tx)\n\n abstract_optimizer = nnx.eval_shape(lambda: dummy_optimizer)\n abstract_optimizer_state = nnx.state(abstract_optimizer)\n restored = checkpoint_manager.restore(\n checkpoint_manager.latest_step(),\n args=ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore(abstract_optimizer_state), # type: ignore\n ),\n )\n restored_optimizer_state = restored[""model_state""]\n nnx.update(dummy_optimizer, restored_optimizer_state)\n\n # --- Define sampling function ---\n def _sampling_fn(model: Genie, batch: dict) -> jax.Array:\n """"""Runs Genie.sample with pre-defined generation hyper-parameters.""""""\n assert args.dyna_type in [\n ""maskgit"",\n ""causal"",\n ], f""Invalid dynamics type: {args.dyna_type}""\n frames, _ = model.sample(\n batch,\n args.seq_len,\n args.temperature,\n args.sample_argmax,\n args.maskgit_steps,\n )\n return frames\n\n # --- Define autoregressive sampling loop ---\n def _autoreg_sample(genie, rng, batch):\n batch[""videos""] = batch[""videos""][:, : args.start_frame]\n batch[""rng""] = rng\n generated_vid_BSHWC = _sampling_fn(genie, batch)\n return generated_vid_BSHWC\n\n # --- Get video + latent actions ---\n array_record_files = [\n os.path.join(args.data_dir, x)\n for x in os.listdir(args.data_dir)\n if x.endswith("".array_record"")\n ]\n dataloader = get_dataloader(\n array_record_files,\n args.seq_len,\n args.batch_size,\n args.image_height,\n args.image_width,\n args.image_channels,\n # We don't use workers in order to avoid grain shutdown issues (https://github.com/google/grain/issues/398)\n num_workers=0,\n prefetch_buffer_size=1,\n seed=args.seed,\n )\n dataloader = iter(dataloader)\n batch = next(dataloader)\n gt_video = jnp.asarray(batch[""videos""], dtype=jnp.float32) / 255.0\n batch[""videos""] = gt_video.astype(args.dtype)\n # Get latent actions for all videos in the batch\n action_batch_E = None\n if not args.use_gt_actions:\n action_batch_E = genie.vq_encode(batch, training=False)\n batch[""latent_actions""] = action_batch_E\n\n # --- Sample + evaluate video ---\n recon_video_BSHWC = _autoreg_sample(genie, rng, batch)\n recon_video_BSHWC = recon_video_BSHWC.astype(jnp.float32)\n gt = (\n gt_video[:, : recon_video_BSHWC.shape[1]]\n .clip(0, 1)\n .reshape(-1, *gt_video.shape[2:])\n )\n recon = recon_video_BSHWC.clip(0, 1).reshape(-1, *recon_video_BSHWC.shape[2:])\n ssim = jnp.asarray(\n pix.ssim(gt[:, args.start_frame :], recon[:, args.start_frame :])\n ).mean()\n print(f""SSIM: {ssim}"")\n\n # --- Construct video ---\n true_videos = (gt_video * 255).astype(np.uint8)\n pred_videos = (recon_video_BSHWC * 255).astype(np.uint8)\n video_comparison = np.zeros((2, *recon_video_BSHWC.shape), dtype=np.uint8)\n video_comparison[0] = true_videos[:, : args.seq_len]\n video_comparison[1] = pred_videos\n frames = einops.rearrange(video_comparison, ""n b t h w c -> t (b h) (n w) c"")\n\n # --- Save video ---\n imgs = [Image.fromarray(img) for img in frames]\n # Write actions on each frame, on each row (i.e., for each video in the batch, on the GT row)\n B = batch[""videos""].shape[0]\n if action_batch_E is not None:\n action_batch_BSm11 = jnp.reshape(action_batch_E, (B, args.seq_len - 1, 1))\n else:\n action_batch_BSm11 = jnp.reshape(\n batch[""actions""][:, :-1], (B, args.seq_len - 1, 1)\n )\n for t, img in enumerate(imgs[1:]):\n d = ImageDraw.Draw(img)\n for row in range(B):\n action = action_batch_BSm11[row, t, 0]\n y_offset = row * batch[""videos""].shape[2] + 2\n d.text((2, y_offset), f""{action}"", fill=255)\n imgs[0].save(\n f""generation_{time.time()}.gif"",\n save_all=True,\n append_images=imgs[1:],\n duration=250,\n loop=0,\n )\n",python,tab +5030,7967252,"jasmine/sample.py",3182,0,"",python,selection_mouse +5031,7967317,"jasmine/sample.py",3181,0,"",python,selection_command +5032,7969343,"jasmine/sample.py",3151,32,"",python,content +5033,7969398,"jasmine/sample.py",3155,0,"",python,selection_command +5034,7970191,"jasmine/sample.py",3211,0,"",python,selection_command +5035,7970694,"jasmine/sample.py",3243,0,"",python,selection_command +5036,7970724,"jasmine/sample.py",3280,0,"",python,selection_command +5037,7970761,"jasmine/sample.py",3306,0,"",python,selection_command +5038,7970806,"jasmine/sample.py",3311,0,"",python,selection_command +5039,7970837,"jasmine/sample.py",3382,0,"",python,selection_command +5040,7970871,"jasmine/sample.py",3408,0,"",python,selection_command +5041,7970901,"jasmine/sample.py",3489,0,"",python,selection_command +5042,7970912,"jasmine/sample.py",3495,0,"",python,selection_command +5043,7970978,"jasmine/sample.py",3521,0,"",python,selection_command +5044,7970978,"jasmine/sample.py",3605,0,"",python,selection_command +5045,7970998,"jasmine/sample.py",3611,0,"",python,selection_command +5046,7971037,"jasmine/sample.py",3666,0,"",python,selection_command +5047,7971059,"jasmine/sample.py",3702,0,"",python,selection_command +5048,7971087,"jasmine/sample.py",3708,0,"",python,selection_command +5049,7971120,"jasmine/sample.py",3756,0,"",python,selection_command +5050,7971148,"jasmine/sample.py",3781,0,"",python,selection_command +5051,7971187,"jasmine/sample.py",3817,0,"",python,selection_command +5052,7971207,"jasmine/sample.py",3860,0,"",python,selection_command +5053,7971274,"jasmine/sample.py",3862,0,"",python,selection_command +5054,7971281,"jasmine/sample.py",3867,0,"",python,selection_command +5055,7971317,"jasmine/sample.py",3895,0,"",python,selection_command +5056,7971348,"jasmine/sample.py",3963,0,"",python,selection_command +5057,7971356,"jasmine/sample.py",3979,0,"",python,selection_command +5058,7971387,"jasmine/sample.py",3995,0,"",python,selection_command +5059,7971424,"jasmine/sample.py",4022,0,"",python,selection_command +5060,7971453,"jasmine/sample.py",4051,0,"",python,selection_command +5061,7971486,"jasmine/sample.py",4057,0,"",python,selection_command +5062,7971538,"jasmine/sample.py",4106,0,"",python,selection_command +5063,7971575,"jasmine/sample.py",4111,0,"",python,selection_command +5064,7971575,"jasmine/sample.py",4176,0,"",python,selection_command +5065,7971633,"jasmine/sample.py",4237,0,"",python,selection_command +5066,7971659,"jasmine/sample.py",4280,0,"",python,selection_command +5067,7971660,"jasmine/sample.py",4322,0,"",python,selection_command +5068,7971718,"jasmine/sample.py",4355,0,"",python,selection_command +5069,7971729,"jasmine/sample.py",4445,0,"",python,selection_command +5070,7971756,"jasmine/sample.py",4456,0,"",python,selection_command +5071,7971776,"jasmine/sample.py",4462,0,"",python,selection_command +5072,7971832,"jasmine/sample.py",4517,0,"",python,selection_command +5073,7971841,"jasmine/sample.py",4571,0,"",python,selection_command +5074,7971877,"jasmine/sample.py",4576,0,"",python,selection_command +5075,7971913,"jasmine/sample.py",4615,0,"",python,selection_command +5076,7971922,"jasmine/sample.py",4677,0,"",python,selection_command +5077,7971960,"jasmine/sample.py",4755,0,"",python,selection_command +5078,7971992,"jasmine/sample.py",4790,0,"",python,selection_command +5079,7972020,"jasmine/sample.py",4813,0,"",python,selection_command +5080,7972076,"jasmine/sample.py",4835,0,"",python,selection_command +5081,7972111,"jasmine/sample.py",4889,0,"",python,selection_command +5082,7972136,"jasmine/sample.py",4923,0,"",python,selection_command +5083,7972170,"jasmine/sample.py",4942,0,"",python,selection_command +5084,7972178,"jasmine/sample.py",4968,0,"",python,selection_command +5085,7972204,"jasmine/sample.py",4998,0,"",python,selection_command +5086,7972226,"jasmine/sample.py",5030,0,"",python,selection_command +5087,7972308,"jasmine/sample.py",5062,0,"",python,selection_command +5088,7972309,"jasmine/sample.py",5072,0,"",python,selection_command +5089,7972334,"jasmine/sample.py",5090,0,"",python,selection_command +5090,7972375,"jasmine/sample.py",5095,0,"",python,selection_command +5091,7972383,"jasmine/sample.py",5145,0,"",python,selection_command +5092,7972447,"jasmine/sample.py",5189,0,"",python,selection_command +5093,7972448,"jasmine/sample.py",5254,0,"",python,selection_command +5094,7972483,"jasmine/sample.py",5281,0,"",python,selection_command +5095,7972528,"jasmine/sample.py",5338,0,"",python,selection_command +5096,7972562,"jasmine/sample.py",5369,0,"",python,selection_command +5097,7972599,"jasmine/sample.py",5374,0,"",python,selection_command +5098,7972621,"jasmine/sample.py",5415,0,"",python,selection_command +5099,7972658,"jasmine/sample.py",5442,0,"",python,selection_command +5100,7972719,"jasmine/sample.py",5481,0,"",python,selection_command +5101,7972729,"jasmine/sample.py",5524,0,"",python,selection_command +5102,7972739,"jasmine/sample.py",5563,0,"",python,selection_command +5103,7972778,"jasmine/sample.py",5569,0,"",python,selection_command +5104,7972786,"jasmine/sample.py",5602,0,"",python,selection_command +5105,7972949,"jasmine/sample.py",5630,0,"",python,selection_command +5106,7973518,"jasmine/sample.py",5652,0,"",python,selection_command +5107,7973520,"jasmine/sample.py",5677,0,"",python,selection_command +5108,7973520,"jasmine/sample.py",5704,0,"",python,selection_command +5109,7973521,"jasmine/sample.py",5730,0,"",python,selection_command +5110,7973534,"jasmine/sample.py",5759,0,"",python,selection_command +5111,7973570,"jasmine/sample.py",5875,0,"",python,selection_command +5112,7973700,"jasmine/sample.py",5898,0,"",python,selection_command +5113,7973701,"jasmine/sample.py",5930,0,"",python,selection_command +5114,7973732,"jasmine/sample.py",5954,0,"",python,selection_command +5115,7973746,"jasmine/sample.py",5960,0,"",python,selection_command +5116,7973757,"jasmine/sample.py",5994,0,"",python,selection_command +5117,7973758,"jasmine/sample.py",6023,0,"",python,selection_command +5118,7973829,"jasmine/sample.py",6094,0,"",python,selection_command +5119,7973846,"jasmine/sample.py",6144,0,"",python,selection_command +5120,7973854,"jasmine/sample.py",6197,0,"",python,selection_command +5121,7973885,"jasmine/sample.py",6223,0,"",python,selection_command +5122,7973938,"jasmine/sample.py",6255,0,"",python,selection_command +5123,7973978,"jasmine/sample.py",6319,0,"",python,selection_command +5124,7974006,"jasmine/sample.py",6364,0,"",python,selection_command +5125,7974063,"jasmine/sample.py",6369,0,"",python,selection_command +5126,7974064,"jasmine/sample.py",6407,0,"",python,selection_command +5127,7974083,"jasmine/sample.py",6466,0,"",python,selection_command +5128,7974090,"jasmine/sample.py",6528,0,"",python,selection_command +5129,7974145,"jasmine/sample.py",6539,0,"",python,selection_command +5130,7974183,"jasmine/sample.py",6589,0,"",python,selection_command +5131,7974207,"jasmine/sample.py",6609,0,"",python,selection_command +5132,7974242,"jasmine/sample.py",6651,0,"",python,selection_command +5133,7974253,"jasmine/sample.py",6657,0,"",python,selection_command +5134,7974296,"jasmine/sample.py",6740,0,"",python,selection_command +5135,7974304,"jasmine/sample.py",6764,0,"",python,selection_command +5136,7974336,"jasmine/sample.py",6838,0,"",python,selection_command +5137,7974394,"jasmine/sample.py",6851,0,"",python,selection_command +5138,7974426,"jasmine/sample.py",6874,0,"",python,selection_command +5139,7974470,"jasmine/sample.py",6879,0,"",python,selection_command +5140,7974479,"jasmine/sample.py",6909,0,"",python,selection_command +5141,7974947,"jasmine/sample.py",6961,0,"",python,selection_command +5142,7975457,"jasmine/sample.py",7022,0,"",python,selection_command +5143,7975513,"jasmine/sample.py",7101,0,"",python,selection_command +5144,7975537,"jasmine/sample.py",7158,0,"",python,selection_command +5145,7975566,"jasmine/sample.py",7196,0,"",python,selection_command +5146,7975600,"jasmine/sample.py",7274,0,"",python,selection_command +5147,7975663,"jasmine/sample.py",7279,0,"",python,selection_command +5148,7975663,"jasmine/sample.py",7304,0,"",python,selection_command +5149,7975694,"jasmine/sample.py",7356,0,"",python,selection_command +5150,7975726,"jasmine/sample.py",7454,0,"",python,selection_command +5151,7975726,"jasmine/sample.py",7487,0,"",python,selection_command +5152,7975782,"jasmine/sample.py",7522,0,"",python,selection_command +5153,7975844,"jasmine/sample.py",7605,0,"",python,selection_command +5154,7975844,"jasmine/sample.py",7615,0,"",python,selection_command +5155,7975872,"jasmine/sample.py",7657,0,"",python,selection_command +5156,7975931,"jasmine/sample.py",7720,0,"",python,selection_command +5157,7975932,"jasmine/sample.py",7730,0,"",python,selection_command +5158,7975961,"jasmine/sample.py",7769,0,"",python,selection_command +5159,7975992,"jasmine/sample.py",7801,0,"",python,selection_command +5160,7976018,"jasmine/sample.py",7830,0,"",python,selection_command +5161,7976050,"jasmine/sample.py",7881,0,"",python,selection_command +5162,7976082,"jasmine/sample.py",7939,0,"",python,selection_command +5163,7976100,"jasmine/sample.py",7996,0,"",python,selection_command +5164,7976111,"jasmine/sample.py",8014,0,"",python,selection_command +5165,7976147,"jasmine/sample.py",8055,0,"",python,selection_command +5166,7976176,"jasmine/sample.py",8078,0,"",python,selection_command +5167,7976203,"jasmine/sample.py",8110,0,"",python,selection_command +5168,7976233,"jasmine/sample.py",8132,0,"",python,selection_command +5169,7976260,"jasmine/sample.py",8148,0,"",python,selection_command +5170,7976319,"jasmine/sample.py",8150,0,"",python,selection_command +5171,7976861,"jasmine/sample.py",8148,0,"",python,selection_command +5172,7977364,"jasmine/sample.py",8132,0,"",python,selection_command +5173,7977393,"jasmine/sample.py",8110,0,"",python,selection_command +5174,7977426,"jasmine/sample.py",8078,0,"",python,selection_command +5175,7977474,"jasmine/sample.py",8055,0,"",python,selection_command +5176,7977668,"jasmine/sample.py",8014,0,"",python,selection_command +5177,7977669,"jasmine/sample.py",7996,0,"",python,selection_command +5178,7977669,"jasmine/sample.py",7939,0,"",python,selection_command +5179,7977687,"jasmine/sample.py",7881,0,"",python,selection_command +5180,7977688,"jasmine/sample.py",7830,0,"",python,selection_command +5181,7978015,"jasmine/sample.py",7881,0,"",python,selection_command +5182,7978210,"jasmine/sample.py",7939,0,"",python,selection_command +5183,7981431,"jasmine/sample.py",1422,0,"",python,selection_mouse +5184,7981441,"jasmine/sample.py",1421,0,"",python,selection_command +5185,7982102,"jasmine/sample.py",1483,0,"",python,selection_mouse +5186,7982722,"jasmine/sample.py",1192,0,"",python,selection_mouse +5187,7982729,"jasmine/sample.py",1191,0,"",python,selection_command +5188,7986745,"jasmine/sample.py",760,0,"",python,selection_mouse +5189,7986747,"jasmine/sample.py",759,0,"",python,selection_command +5190,7987362,"jasmine/sample.py",733,0,"",python,selection_mouse +5191,7987373,"jasmine/sample.py",732,0,"",python,selection_command +5192,7988287,"jasmine/sample.py",512,0,"",python,selection_mouse +5193,7988297,"jasmine/sample.py",511,0,"",python,selection_command +5194,7988778,"jasmine/sample.py",581,0,"",python,selection_mouse +5195,7988792,"jasmine/sample.py",580,0,"",python,selection_command +5196,7989797,"jasmine/sample.py",581,0,"\n ",python,content +5197,7990112,"jasmine/sample.py",586,0,"p",python,content +5198,7990113,"jasmine/sample.py",587,0,"",python,selection_keyboard +5199,7990215,"jasmine/sample.py",587,0,"r",python,content +5200,7990216,"jasmine/sample.py",588,0,"",python,selection_keyboard +5201,7990326,"jasmine/sample.py",588,0,"i",python,content +5202,7990327,"jasmine/sample.py",589,0,"",python,selection_keyboard +5203,7990387,"jasmine/sample.py",589,0,"n",python,content +5204,7990389,"jasmine/sample.py",590,0,"",python,selection_keyboard +5205,7990480,"jasmine/sample.py",590,0,"t",python,content +5206,7990482,"jasmine/sample.py",591,0,"",python,selection_keyboard +5207,7990947,"jasmine/sample.py",591,0,"-",python,content +5208,7990949,"jasmine/sample.py",592,0,"",python,selection_keyboard +5209,7991431,"jasmine/sample.py",591,1,"",python,content +5210,7992056,"jasmine/sample.py",591,0,"_",python,content +5211,7992057,"jasmine/sample.py",592,0,"",python,selection_keyboard +5212,7993278,"jasmine/sample.py",592,0,"a",python,content +5213,7993282,"jasmine/sample.py",593,0,"",python,selection_keyboard +5214,7993473,"jasmine/sample.py",593,0,"c",python,content +5215,7993474,"jasmine/sample.py",594,0,"",python,selection_keyboard +5216,7993600,"jasmine/sample.py",594,0,"t",python,content +5217,7993601,"jasmine/sample.py",595,0,"",python,selection_keyboard +5218,7993761,"jasmine/sample.py",595,0,"i",python,content +5219,7993762,"jasmine/sample.py",596,0,"",python,selection_keyboard +5220,7993811,"jasmine/sample.py",596,0,"o",python,content +5221,7993812,"jasmine/sample.py",597,0,"",python,selection_keyboard +5222,7993917,"jasmine/sample.py",597,0,"n",python,content +5223,7993919,"jasmine/sample.py",598,0,"",python,selection_keyboard +5224,7993987,"jasmine/sample.py",598,0,"s",python,content +5225,7993989,"jasmine/sample.py",599,0,"",python,selection_keyboard +5226,7994910,"jasmine/sample.py",598,1,"",python,content +5227,7995473,"jasmine/sample.py",598,0,"_",python,content +5228,7995473,"jasmine/sample.py",599,0,"",python,selection_keyboard +5229,7995756,"jasmine/sample.py",599,0,"i",python,content +5230,7995757,"jasmine/sample.py",600,0,"",python,selection_keyboard +5231,7995807,"jasmine/sample.py",600,0,"n",python,content +5232,7995808,"jasmine/sample.py",601,0,"",python,selection_keyboard +5233,7996235,"jasmine/sample.py",601,0,"i",python,content +5234,7996236,"jasmine/sample.py",602,0,"",python,selection_keyboard +5235,7996678,"jasmine/sample.py",601,1,"",python,content +5236,7996779,"jasmine/sample.py",601,0,"d",python,content +5237,7996781,"jasmine/sample.py",602,0,"",python,selection_keyboard +5238,7996887,"jasmine/sample.py",602,0,"i",python,content +5239,7996887,"jasmine/sample.py",603,0,"",python,selection_keyboard +5240,7996985,"jasmine/sample.py",603,0,"c",python,content +5241,7996986,"jasmine/sample.py",604,0,"",python,selection_keyboard +5242,7997151,"jasmine/sample.py",604,0,"e",python,content +5243,7997152,"jasmine/sample.py",605,0,"",python,selection_keyboard +5244,7997361,"jasmine/sample.py",605,0,"s",python,content +5245,7997362,"jasmine/sample.py",606,0,"",python,selection_keyboard +5246,7997976,"jasmine/sample.py",606,0,":",python,content +5247,7997977,"jasmine/sample.py",607,0,"",python,selection_keyboard +5248,7998210,"jasmine/sample.py",607,0," ",python,content +5249,7998210,"jasmine/sample.py",608,0,"",python,selection_keyboard +5250,7999432,"jasmine/sample.py",608,0,"b",python,content +5251,7999434,"jasmine/sample.py",609,0,"",python,selection_keyboard +5252,7999674,"jasmine/sample.py",609,0,"o",python,content +5253,7999675,"jasmine/sample.py",610,0,"",python,selection_keyboard +5254,7999775,"jasmine/sample.py",610,0,"o",python,content +5255,7999776,"jasmine/sample.py",611,0,"",python,selection_keyboard +5256,7999930,"jasmine/sample.py",611,0,"l",python,content +5257,7999931,"jasmine/sample.py",612,0,"",python,selection_keyboard +5258,8000471,"jasmine/sample.py",612,0," ",python,content +5259,8000472,"jasmine/sample.py",613,0,"",python,selection_keyboard +5260,8000664,"jasmine/sample.py",613,0,"=",python,content +5261,8000665,"jasmine/sample.py",614,0,"",python,selection_keyboard +5262,8000768,"jasmine/sample.py",614,0," ",python,content +5263,8000769,"jasmine/sample.py",615,0,"",python,selection_keyboard +5264,8001528,"jasmine/sample.py",615,0,"T",python,content +5265,8001529,"jasmine/sample.py",616,0,"",python,selection_keyboard +5266,8001710,"jasmine/sample.py",616,0,"r",python,content +5267,8001711,"jasmine/sample.py",617,0,"",python,selection_keyboard +5268,8001839,"jasmine/sample.py",617,0,"u",python,content +5269,8001840,"jasmine/sample.py",618,0,"",python,selection_keyboard +5270,8002427,"jasmine/sample.py",618,0,"e",python,content +5271,8002428,"jasmine/sample.py",619,0,"",python,selection_keyboard +5272,8002623,"jasmine/sample.py",618,0,"",python,selection_command +5273,8006355,"jasmine/sample.py",8044,0,"",python,selection_mouse +5274,8006887,"jasmine/sample.py",7937,0,"",python,selection_mouse +5275,8007416,"jasmine/sample.py",7972,0,"\n ",python,content +5276,8007898,"jasmine/sample.py",7985,0,"i",python,content +5277,8007899,"jasmine/sample.py",7986,0,"",python,selection_keyboard +5278,8007899,"jasmine/sample.py",7986,0,"f",python,content +5279,8007900,"jasmine/sample.py",7987,0,"",python,selection_keyboard +5280,8007964,"jasmine/sample.py",7987,0," ",python,content +5281,8007966,"jasmine/sample.py",7988,0,"",python,selection_keyboard +5282,8009274,"jasmine/sample.py",7988,0,"a",python,content +5283,8009276,"jasmine/sample.py",7989,0,"",python,selection_keyboard +5284,8009497,"jasmine/sample.py",7989,0,"r",python,content +5285,8009498,"jasmine/sample.py",7990,0,"",python,selection_keyboard +5286,8009602,"jasmine/sample.py",7990,0,"g",python,content +5287,8009603,"jasmine/sample.py",7991,0,"",python,selection_keyboard +5288,8009846,"jasmine/sample.py",7991,0,"s",python,content +5289,8009847,"jasmine/sample.py",7992,0,"",python,selection_keyboard +5290,8009949,"jasmine/sample.py",7992,0,".",python,content +5291,8009950,"jasmine/sample.py",7993,0,"",python,selection_keyboard +5292,8010085,"jasmine/sample.py",7993,0,"p",python,content +5293,8010086,"jasmine/sample.py",7994,0,"",python,selection_keyboard +5294,8010246,"jasmine/sample.py",7994,0,"r",python,content +5295,8010247,"jasmine/sample.py",7995,0,"",python,selection_keyboard +5296,8010406,"jasmine/sample.py",7995,0,"i",python,content +5297,8010407,"jasmine/sample.py",7996,0,"",python,selection_keyboard +5298,8010445,"jasmine/sample.py",7996,0,"n",python,content +5299,8010445,"jasmine/sample.py",7997,0,"",python,selection_keyboard +5300,8010487,"jasmine/sample.py",7997,0,"t",python,content +5301,8010487,"jasmine/sample.py",7998,0,"",python,selection_keyboard +5302,8010827,"jasmine/sample.py",7998,0,"_",python,content +5303,8010828,"jasmine/sample.py",7999,0,"",python,selection_keyboard +5304,8018265,"jasmine/sample.py",7998,1,"",python,content +5305,8018434,"jasmine/sample.py",7997,1,"",python,content +5306,8018620,"jasmine/sample.py",7993,4,"",python,content +5307,8021739,"jasmine/sample.py",593,0,"",python,selection_mouse +5308,8021890,"jasmine/sample.py",586,20,"print_action_indices",python,selection_mouse +5309,8026997,"jasmine/sample.py",7993,0,"",python,selection_mouse +5310,8027227,"jasmine/sample.py",7993,0,"print_action_indices",python,content +5311,8028335,"jasmine/sample.py",8013,0,":",python,content +5312,8028337,"jasmine/sample.py",8014,0,"",python,selection_keyboard +5313,8028890,"jasmine/sample.py",8013,0,"",python,selection_command +5314,8029247,"jasmine/sample.py",8055,0,"",python,selection_command +5315,8029442,"jasmine/sample.py",8027,0,"",python,selection_command +5316,8029595,"jasmine/sample.py",8027,0," ",python,content +5317,8029843,"jasmine/sample.py",8030,0,"",python,selection_command +5318,8098949,"pyproject.toml",0,0,"[project]\nname = ""jasmine""\nversion = ""0.1.0""\nrequires-python = "">=3.11""\ndependencies = [\n ""dm-pix>=0.4.3"",\n ""einops>=0.8.0"",\n ""flax>=0.10.7"",\n ""jax[cuda12]>=0.6.2"",\n ""optax>=0.2.3"",\n ""tyro>=0.8.5"",\n ""wandb>=0.17.4"",\n ""grain>=0.2.10"",\n ""array-record>=0.7.2"",\n ""pre-commit>=4.2.0"",\n ""pillow>=11.3.0"",\n]\n\n[build-system]\nrequires = [""uv_build>=0.8.22,<0.9.0""]\nbuild-backend = ""uv_build""\n\n[tool.uv.build-backend]\nmodule-root = """"",plaintext,tab +5319,8131753,"data/pyproject.toml",0,0,"[project]\nname = ""jasmine-data""\nversion = ""0.1.0""\nauthors = [\n {name = ""Alfred Nguyen"", email = ""alfrednguyen02@gmail.com""},\n {name = ""Mihir Mahajan"", email = ""mihir@pdoom.org""},\n {name = ""Franz Srambical"", email = ""franz@pdoom.org""},\n]\nrequires-python = ""==3.10.*""\ndependencies = [\n ""procgen>=0.10.7"",\n ""gym3>=0.3.3"",\n ""array-record>=0.7.2"",\n ""numpy"",\n ""hf-transfer==0.1.9"",\n ""huggingface-hub[cli]>=0.34.3"",\n ""ffmpeg-python==0.2.0"",\n ""pillow>=11.3.0"",\n ""tqdm>=4.67.1"",\n ""tyro>=0.8.5"",\n]\n\n[build-system]\nrequires = [""uv_build>=0.8.22,<0.9.0""]\nbuild-backend = ""uv_build""\n\n[tool.uv.build-backend]\nmodule-root = """"",plaintext,tab +5320,8140821,"data/pyproject.toml",624,0,"",plaintext,selection_mouse +5321,8140838,"data/pyproject.toml",623,0,"",plaintext,selection_command +5322,8145946,"TERMINAL",0,0,"salloc: Job 3519264 has exceeded its time limit and its allocation has been revoked.\n",,terminal_output +5323,8146054,"TERMINAL",0,0,"srun: Job step aborted: Waiting up to 32 seconds for job step to finish.\r\nslurmstepd: error: *** STEP 3519264.interactive ON hkn0401 CANCELLED AT 2025-09-24T14:59:16 DUE TO TIME LIMIT ***\r\n",,terminal_output +5324,8148246,"TERMINAL",0,0,"^C[?2004l\r[?2004h[?2004l\r\r\n]0;tum_cte0515@hkn0401:~/Projects/jasmine[?2004h[tum_cte0515@hkn0401 jasmine]$ ",,terminal_output +5325,8148811,"TERMINAL",0,0,"[?2004l\r\r\nexit\r\nsrun: error: hkn0401: task 0: Exited with exit code 130\r\n]0;tum_cte0515@hkn1991:~/Projects/jasmine",,terminal_output +5326,8150527,"TERMINAL",0,0,"salloc --time=01:00:00 --partition=dev_accelerated --nodes=1 --gres=gpu:1 --cpus-per-task=8",,terminal_command +5327,8150590,"TERMINAL",0,0,"]633;Csalloc: Pending job allocation 3519396\r\nsalloc: job 3519396 queued and waiting for resources\r\n",,terminal_output +5328,8151417,"TERMINAL",0,0,"bash",,terminal_focus +5329,8152394,"TERMINAL",0,0,"queue",,terminal_command +5330,8152442,"TERMINAL",0,0,"]633;C",,terminal_output +5331,8152545,"TERMINAL",0,0,"[?1049h(B[?7hEvery 1.0s: squeue --mehkn1991.localdomain: Wed Sep 24 14:59:22 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3519264 dev_accel interact tum_cte0 CG 1:00:07\t 1 hkn04013519396 dev_accel interact tum_cte0 PD\t0:00\t 1 (Priority)",,terminal_output +5332,8153654,"TERMINAL",0,0,"3",,terminal_output +5333,8154622,"TERMINAL",0,0,"4",,terminal_output +5334,8155172,"TERMINAL",0,0,"bash",,terminal_focus +5335,8155619,"TERMINAL",0,0,"5",,terminal_output +5336,8156665,"TERMINAL",0,0,"6",,terminal_output +5337,8157198,"TERMINAL",0,0,"scancel 3519264",,terminal_command +5338,8157229,"TERMINAL",0,0,"]633;C]0;tum_cte0515@hkn1991:~/Projects/jasmine",,terminal_output +5339,8157694,"TERMINAL",0,0,"7",,terminal_output +5340,8158739,"TERMINAL",0,0,"8",,terminal_output +5341,8159782,"TERMINAL",0,0,"9",,terminal_output +5342,8160909,"TERMINAL",0,0,"30",,terminal_output +5343,8161706,"TERMINAL",0,0,"idling",,terminal_command +5344,8161759,"TERMINAL",0,0,"]633;C[?1049h(B[?7hEvery 1.0s: sinfo_t_idlehkn1991.localdomain: Wed Sep 24 14:59:31 2025Partition dev_cpuonly:\t 8 nodes idle\rPartition cpuonly: 173 nodes idle\rPartition dev_accelerated:\t 1 nodes idle\rPartition accelerated:\t 2 nodes idle\rPartition dev_accelerated-h100 :\t 0 nodes idle\rPartition accelerated-h100:\t 0 nodes idle\rPartition large:\t 3 nodes idle\rPartition accelerated-h200:\t 2 nodes idle",,terminal_output +5345,8162037,"TERMINAL",0,0,"\r1",,terminal_output +5346,8162884,"TERMINAL",0,0,"2",,terminal_output +5347,8162984,"TERMINAL",0,0,"2",,terminal_output +5348,8163902,"TERMINAL",0,0,"3",,terminal_output +5349,8164035,"TERMINAL",0,0,"4",,terminal_output +5350,8164919,"TERMINAL",0,0,"4",,terminal_output +5351,8165017,"TERMINAL",0,0,"5",,terminal_output +5352,8165935,"TERMINAL",0,0,"5",,terminal_output +5353,8166026,"TERMINAL",0,0,"salloc",,terminal_focus +5354,8166090,"TERMINAL",0,0,"6",,terminal_output +5355,8166599,"TERMINAL",0,0,"^Csalloc: Job allocation 3519396 has been revoked.\r\nsalloc: Job aborted due to signal\r\n]0;tum_cte0515@hkn1991:~/Projects/jasmine",,terminal_output +5356,8166927,"TERMINAL",0,0,"6",,terminal_output +5357,8167085,"TERMINAL",0,0,"\r7",,terminal_output +5358,8167992,"TERMINAL",0,0,"8",,terminal_output +5359,8168174,"TERMINAL",0,0,"8",,terminal_output +5360,8169065,"TERMINAL",0,0,"9",,terminal_output +5361,8169202,"TERMINAL",0,0,"9",,terminal_output +5362,8170055,"TERMINAL",0,0,"40",,terminal_output +5363,8170246,"TERMINAL",0,0,"40",,terminal_output +5364,8171108,"TERMINAL",0,0,"1",,terminal_output +5365,8171287,"TERMINAL",0,0,"1",,terminal_output +5366,8171960,"TERMINAL",0,0,"salloc --time=01:00:00 --partition=dev_accelerated --nodes=1 --gres=gpu:1 --cpus-per-task=8",,terminal_command +5367,8172018,"TERMINAL",0,0,"]633;Csalloc: Pending job allocation 3519398\r\nsalloc: job 3519398 queued and waiting for resources\r\n",,terminal_output +5368,8172144,"TERMINAL",0,0,"2",,terminal_output +5369,8172337,"TERMINAL",0,0,"23519398 dev_accel interact tum_cte0 PD\t0:00\t 1 (Priority)",,terminal_output +5370,8173158,"TERMINAL",0,0,"3",,terminal_output +5371,8173463,"TERMINAL",0,0,"3",,terminal_output +5372,8174222,"TERMINAL",0,0,"4",,terminal_output +5373,8174418,"TERMINAL",0,0,"4",,terminal_output +5374,8175310,"TERMINAL",0,0,"5",,terminal_output +5375,8175466,"TERMINAL",0,0,"5",,terminal_output +5376,8176331,"TERMINAL",0,0,"6",,terminal_output +5377,8176507,"TERMINAL",0,0,"6",,terminal_output +5378,8177355,"TERMINAL",0,0,"7",,terminal_output +5379,8177560,"TERMINAL",0,0,"7",,terminal_output +5380,8178378,"TERMINAL",0,0,"8",,terminal_output +5381,8178686,"TERMINAL",0,0,"8",,terminal_output +5382,8179402,"TERMINAL",0,0,"9",,terminal_output +5383,8179639,"TERMINAL",0,0,"9",,terminal_output +5384,8180399,"TERMINAL",0,0,"50",,terminal_output +5385,8180678,"TERMINAL",0,0,"50",,terminal_output +5386,8181434,"TERMINAL",0,0,"1",,terminal_output +5387,8181763,"TERMINAL",0,0,"1",,terminal_output +5388,8182475,"TERMINAL",0,0,"2",,terminal_output +5389,8182782,"TERMINAL",0,0,"2",,terminal_output +5390,8183731,"TERMINAL",0,0,"3",,terminal_output +5391,8183838,"TERMINAL",0,0,"3",,terminal_output +5392,8184673,"TERMINAL",0,0,"43",,terminal_output +5393,8184854,"TERMINAL",0,0,"4",,terminal_output +5394,8185751,"TERMINAL",0,0,"5",,terminal_output +5395,8185892,"TERMINAL",0,0,"5",,terminal_output +5396,8186629,"TERMINAL",0,0,"69",,terminal_output +5397,8186933,"TERMINAL",0,0,"6",,terminal_output +5398,8187719,"TERMINAL",0,0,"7",,terminal_output +5399,8188005,"TERMINAL",0,0,"8",,terminal_output +5400,8188693,"TERMINAL",0,0,"8",,terminal_output +5401,8189017,"TERMINAL",0,0,"9",,terminal_output +5402,8189731,"TERMINAL",0,0,"9",,terminal_output +5403,8190154,"TERMINAL",0,0,"5:00:00",,terminal_output +5404,8190772,"TERMINAL",0,0,"5:00:00",,terminal_output +5405,8191179,"TERMINAL",0,0,"1",,terminal_output +5406,8191833,"TERMINAL",0,0,"1",,terminal_output +5407,8192203,"TERMINAL",0,0,"2",,terminal_output +5408,8192921,"TERMINAL",0,0,"2",,terminal_output +5409,8193230,"TERMINAL",0,0,"3",,terminal_output +5410,8193943,"TERMINAL",0,0,"3",,terminal_output +5411,8194251,"TERMINAL",0,0,"4",,terminal_output +5412,8194918,"TERMINAL",0,0,"4",,terminal_output +5413,8195348,"TERMINAL",0,0,"5",,terminal_output +5414,8195979,"TERMINAL",0,0,"6",,terminal_output +5415,8196401,"TERMINAL",0,0,"6",,terminal_output +5416,8197014,"TERMINAL",0,0,"7",,terminal_output +5417,8197373,"TERMINAL",0,0,"7",,terminal_output +5418,8198076,"TERMINAL",0,0,"8",,terminal_output +5419,8198448,"TERMINAL",0,0,"8",,terminal_output +5420,8199093,"TERMINAL",0,0,"90",,terminal_output +5421,8199472,"TERMINAL",0,0,"9Resources)",,terminal_output +5422,8200109,"TERMINAL",0,0,"10",,terminal_output +5423,8200481,"TERMINAL",0,0,"10",,terminal_output +5424,8201213,"TERMINAL",0,0,"1",,terminal_output +5425,8201623,"TERMINAL",0,0,"1",,terminal_output +5426,8202244,"TERMINAL",0,0,"2",,terminal_output +5427,8202706,"TERMINAL",0,0,"2",,terminal_output +5428,8203216,"TERMINAL",0,0,"3",,terminal_output +5429,8203614,"TERMINAL",0,0,"3",,terminal_output +5430,8204253,"TERMINAL",0,0,"4",,terminal_output +5431,8204699,"TERMINAL",0,0,"4",,terminal_output +5432,8205307,"TERMINAL",0,0,"5",,terminal_output +5433,8205675,"TERMINAL",0,0,"5",,terminal_output +5434,8206353,"TERMINAL",0,0,"6",,terminal_output +5435,8206710,"TERMINAL",0,0,"6",,terminal_output +5436,8207358,"TERMINAL",0,0,"7",,terminal_output +5437,8207743,"TERMINAL",0,0,"7",,terminal_output +5438,8208483,"TERMINAL",0,0,"8",,terminal_output +5439,8208793,"TERMINAL",0,0,"8",,terminal_output +5440,8209523,"TERMINAL",0,0,"9",,terminal_output +5441,8209917,"TERMINAL",0,0,"9",,terminal_output +5442,8210468,"TERMINAL",0,0,"20",,terminal_output +5443,8210878,"TERMINAL",0,0,"20",,terminal_output +5444,8211505,"TERMINAL",0,0,"1",,terminal_output +5445,8211919,"TERMINAL",0,0,"1",,terminal_output +5446,8212548,"TERMINAL",0,0,"2",,terminal_output +5447,8212990,"TERMINAL",0,0,"2",,terminal_output +5448,8213683,"TERMINAL",0,0,"3",,terminal_output +5449,8214012,"TERMINAL",0,0,"4",,terminal_output +5450,8214627,"TERMINAL",0,0,"4",,terminal_output +5451,8215038,"TERMINAL",0,0,"5",,terminal_output +5452,8215650,"TERMINAL",0,0,"5",,terminal_output +5453,8216050,"TERMINAL",0,0,"6",,terminal_output +5454,8216701,"TERMINAL",0,0,"6",,terminal_output +5455,8217088,"TERMINAL",0,0,"7",,terminal_output +5456,8217865,"TERMINAL",0,0,"7",,terminal_output +5457,8218134,"TERMINAL",0,0,"8",,terminal_output +5458,8218827,"TERMINAL",0,0,"8",,terminal_output +5459,8219237,"TERMINAL",0,0,"9",,terminal_output +5460,8219824,"TERMINAL",0,0,"9",,terminal_output +5461,8220206,"TERMINAL",0,0,"30",,terminal_output +5462,8220915,"TERMINAL",0,0,"30",,terminal_output +5463,8221532,"TERMINAL",0,0,"1",,terminal_output +5464,8221878,"TERMINAL",0,0,"1",,terminal_output +5465,8222325,"TERMINAL",0,0,"2",,terminal_output +5466,8222923,"TERMINAL",0,0,"2",,terminal_output +5467,8223515,"TERMINAL",0,0,"3",,terminal_output +5468,8224050,"TERMINAL",0,0,"4",,terminal_output +5469,8224541,"TERMINAL",0,0,"4",,terminal_output +5470,8224990,"TERMINAL",0,0,"5",,terminal_output +5471,8225463,"TERMINAL",0,0,"5",,terminal_output +5472,8226024,"TERMINAL",0,0,"6",,terminal_output +5473,8226494,"TERMINAL",0,0,"6",,terminal_output +5474,8227124,"TERMINAL",0,0,"7",,terminal_output +5475,8227635,"TERMINAL",0,0,"7",,terminal_output +5476,8228107,"TERMINAL",0,0,"8",,terminal_output +5477,8228554,"TERMINAL",0,0,"8",,terminal_output +5478,8229169,"TERMINAL",0,0,"9",,terminal_output +5479,8229579,"TERMINAL",0,0,"9",,terminal_output +5480,8230195,"TERMINAL",0,0,"40",,terminal_output +5481,8230573,"TERMINAL",0,0,"40",,terminal_output +5482,8231215,"TERMINAL",0,0,"1",,terminal_output +5483,8231673,"TERMINAL",0,0,"1",,terminal_output +5484,8232345,"TERMINAL",0,0,"2",,terminal_output +5485,8232657,"TERMINAL",0,0,"2",,terminal_output +5486,8233268,"TERMINAL",0,0,"3",,terminal_output +5487,8233687,"TERMINAL",0,0,"3",,terminal_output +5488,8234392,"TERMINAL",0,0,"4",,terminal_output +5489,8234829,"TERMINAL",0,0,"4",,terminal_output +5490,8235348,"TERMINAL",0,0,"5",,terminal_output +5491,8235826,"TERMINAL",0,0,"5",,terminal_output +5492,8236373,"TERMINAL",0,0,"6",,terminal_output +5493,8236806,"TERMINAL",0,0,"6",,terminal_output +5494,8237463,"TERMINAL",0,0,"7",,terminal_output +5495,8237834,"TERMINAL",0,0,"7",,terminal_output +5496,8238486,"TERMINAL",0,0,"8",,terminal_output +5497,8238870,"TERMINAL",0,0,"8",,terminal_output +5498,8239501,"TERMINAL",0,0,"9",,terminal_output +5499,8239978,"TERMINAL",0,0,"9",,terminal_output +5500,8240510,"TERMINAL",0,0,"50",,terminal_output +5501,8240967,"TERMINAL",0,0,"51",,terminal_output +5502,8241733,"TERMINAL",0,0,"1",,terminal_output +5503,8242016,"TERMINAL",0,0,"2",,terminal_output +5504,8242692,"TERMINAL",0,0,"2",,terminal_output +5505,8243068,"TERMINAL",0,0,"3",,terminal_output +5506,8243704,"TERMINAL",0,0,"3",,terminal_output +5507,8244055,"TERMINAL",0,0,"4",,terminal_output +5508,8244654,"TERMINAL",0,0,"4",,terminal_output +5509,8245143,"TERMINAL",0,0,"5",,terminal_output +5510,8245690,"TERMINAL",0,0,"5",,terminal_output +5511,8246134,"TERMINAL",0,0,"6",,terminal_output +5512,8246736,"TERMINAL",0,0,"6",,terminal_output +5513,8247197,"TERMINAL",0,0,"7",,terminal_output +5514,8247761,"TERMINAL",0,0,"7",,terminal_output +5515,8248318,"TERMINAL",0,0,"8",,terminal_output +5516,8248795,"TERMINAL",0,0,"8",,terminal_output +5517,8249248,"TERMINAL",0,0,"9",,terminal_output +5518,8249855,"TERMINAL",0,0,"9",,terminal_output +5519,8250347,"TERMINAL",0,0,"1:00",,terminal_output +5520,8250919,"TERMINAL",0,0,"1:00",,terminal_output +5521,8251390,"TERMINAL",0,0,"1",,terminal_output +5522,8252005,"TERMINAL",0,0,"1",,terminal_output +5523,8252413,"TERMINAL",0,0,"2",,terminal_output +5524,8253035,"TERMINAL",0,0,"3",,terminal_output +5525,8253437,"TERMINAL",0,0,"3",,terminal_output +5526,8254058,"TERMINAL",0,0,"4",,terminal_output +5527,8254463,"TERMINAL",0,0,"4",,terminal_output +5528,8255012,"TERMINAL",0,0,"5",,terminal_output +5529,8255463,"TERMINAL",0,0,"5",,terminal_output +5530,8256040,"TERMINAL",0,0,"6",,terminal_output +5531,8256510,"TERMINAL",0,0,"6",,terminal_output +5532,8257074,"TERMINAL",0,0,"710",,terminal_output +5533,8257637,"TERMINAL",0,0,"7",,terminal_output +5534,8258210,"TERMINAL",0,0,"8",,terminal_output +5535,8258651,"TERMINAL",0,0,"8",,terminal_output +5536,8259283,"TERMINAL",0,0,"9",,terminal_output +5537,8259647,"TERMINAL",0,0,"9",,terminal_output +5538,8260202,"TERMINAL",0,0,"10",,terminal_output +5539,8260654,"TERMINAL",0,0,"10",,terminal_output +5540,8261233,"TERMINAL",0,0,"1",,terminal_output +5541,8261685,"TERMINAL",0,0,"1",,terminal_output +5542,8262261,"TERMINAL",0,0,"2",,terminal_output +5543,8262722,"TERMINAL",0,0,"2",,terminal_output +5544,8263382,"TERMINAL",0,0,"3",,terminal_output +5545,8263761,"TERMINAL",0,0,"3",,terminal_output +5546,8264493,"TERMINAL",0,0,"4",,terminal_output +5547,8264828,"TERMINAL",0,0,"4",,terminal_output +5548,8265508,"TERMINAL",0,0,"5",,terminal_output +5549,8265878,"TERMINAL",0,0,"5",,terminal_output +5550,8266441,"TERMINAL",0,0,"6",,terminal_output +5551,8266892,"TERMINAL",0,0,"6",,terminal_output +5552,8267496,"TERMINAL",0,0,"7",,terminal_output +5553,8267984,"TERMINAL",0,0,"7",,terminal_output +5554,8268594,"TERMINAL",0,0,"8",,terminal_output +5555,8269034,"TERMINAL",0,0,"9",,terminal_output +5556,8269525,"TERMINAL",0,0,"9",,terminal_output +5557,8270094,"TERMINAL",0,0,"20",,terminal_output +5558,8270562,"TERMINAL",0,0,"20",,terminal_output +5559,8270630,"TERMINAL",0,0,"watch",,terminal_focus +5560,8271058,"TERMINAL",0,0,"1",,terminal_output +5561,8271221,"TERMINAL",0,0,"salloc",,terminal_focus +5562,8271597,"TERMINAL",0,0,"1",,terminal_output +5563,8272096,"TERMINAL",0,0,"2",,terminal_output +5564,8272634,"TERMINAL",0,0,"2",,terminal_output +5565,8273139,"TERMINAL",0,0,"3",,terminal_output +5566,8273724,"TERMINAL",0,0,"3",,terminal_output +5567,8274431,"TERMINAL",0,0,"4",,terminal_output +5568,8274712,"TERMINAL",0,0,"462",,terminal_output +5569,8275463,"TERMINAL",0,0,"5",,terminal_output +5570,8275753,"TERMINAL",0,0,"5",,terminal_output +5571,8276456,"TERMINAL",0,0,"6",,terminal_output +5572,8276789,"TERMINAL",0,0,"6",,terminal_output +5573,8277669,"TERMINAL",0,0,"7",,terminal_output +5574,8277823,"TERMINAL",0,0,"7",,terminal_output +5575,8278647,"TERMINAL",0,0,"8",,terminal_output +5576,8278961,"TERMINAL",0,0,"8",,terminal_output +5577,8280043,"TERMINAL",0,0,"",,terminal_focus +5578,8280346,"TERMINAL",0,0,"9",,terminal_output +5579,8280347,"TERMINAL",0,0,"9",,terminal_output +5580,8280754,"TERMINAL",0,0,"30",,terminal_output +5581,8280937,"TERMINAL",0,0,"30",,terminal_output +5582,8281724,"TERMINAL",0,0,"1",,terminal_output +5583,8282011,"TERMINAL",0,0,"2",,terminal_output +5584,8282751,"TERMINAL",0,0,"2",,terminal_output +5585,8283033,"TERMINAL",0,0,"3",,terminal_output +5586,8283755,"TERMINAL",0,0,"3",,terminal_output +5587,8284051,"TERMINAL",0,0,"4",,terminal_output +5588,8284928,"TERMINAL",0,0,"watch",,terminal_focus +5589,8284928,"TERMINAL",0,0,"bash",,terminal_focus +5590,8284928,"TERMINAL",0,0,"watch",,terminal_focus +5591,8285010,"TERMINAL",0,0,"4",,terminal_output +5592,8285212,"TERMINAL",0,0,"5",,terminal_output +5593,8286002,"TERMINAL",0,0,"5",,terminal_output +5594,8286134,"TERMINAL",0,0,"6",,terminal_output +5595,8286993,"TERMINAL",0,0,"6",,terminal_output +5596,8287174,"TERMINAL",0,0,"7",,terminal_output +5597,8287956,"TERMINAL",0,0,"7",,terminal_output +5598,8288267,"TERMINAL",0,0,"8",,terminal_output +5599,8288990,"TERMINAL",0,0,"9",,terminal_output +5600,8289230,"TERMINAL",0,0,"9",,terminal_output +5601,8290027,"TERMINAL",0,0,"40",,terminal_output +5602,8290269,"TERMINAL",0,0,"40",,terminal_output +5603,8291067,"TERMINAL",0,0,"1",,terminal_output +5604,8291328,"TERMINAL",0,0,"1",,terminal_output +5605,8292119,"TERMINAL",0,0,"2",,terminal_output +5606,8292458,"TERMINAL",0,0,"2",,terminal_output +5607,8293135,"TERMINAL",0,0,"3",,terminal_output +5608,8293388,"TERMINAL",0,0,"3",,terminal_output +5609,8294181,"TERMINAL",0,0,"4",,terminal_output +5610,8294423,"TERMINAL",0,0,"4",,terminal_output +5611,8295209,"TERMINAL",0,0,"salloc --time=05:00:00 --partition=accelerated --nodes=1 --gres=gpu:1 --cpus-per-task=8",,terminal_command +5612,8295281,"TERMINAL",0,0,"]633;Csalloc: Pending job allocation 3519404\r\nsalloc: job 3519404 queued and waiting for resources\r\n",,terminal_output +5613,8295299,"TERMINAL",0,0,"\r53519404 accelerat interact tum_cte0 PD\t0:00\t 1 (Priority)",,terminal_output +5614,8295461,"TERMINAL",0,0,"5",,terminal_output +5615,8296263,"TERMINAL",0,0,"6",,terminal_output +5616,8296549,"TERMINAL",0,0,"6",,terminal_output +5617,8297370,"TERMINAL",0,0,"7",,terminal_output +5618,8297534,"TERMINAL",0,0,"7",,terminal_output +5619,8298360,"TERMINAL",0,0,"8",,terminal_output +5620,8298577,"TERMINAL",0,0,"8",,terminal_output +5621,8299447,"TERMINAL",0,0,"9",,terminal_output +5622,8299625,"TERMINAL",0,0,"9",,terminal_output +5623,8300460,"TERMINAL",0,0,"50",,terminal_output +5624,8300599,"TERMINAL",0,0,"salloc",,terminal_focus +5625,8300663,"TERMINAL",0,0,"50",,terminal_output +5626,8301568,"TERMINAL",0,0,"1",,terminal_output +5627,8301690,"TERMINAL",0,0,"1",,terminal_output +5628,8302605,"TERMINAL",0,0,"2",,terminal_output +5629,8302740,"TERMINAL",0,0,"2",,terminal_output +5630,8303704,"TERMINAL",0,0,"3",,terminal_output +5631,8303768,"TERMINAL",0,0,"3",,terminal_output +5632,8304638,"TERMINAL",0,0,"4",,terminal_output +5633,8304806,"TERMINAL",0,0,"4",,terminal_output +5634,8305664,"TERMINAL",0,0,"5",,terminal_output +5635,8305843,"TERMINAL",0,0,"5",,terminal_output +5636,8306702,"TERMINAL",0,0,"6",,terminal_output +5637,8306967,"TERMINAL",0,0,"6",,terminal_output +5638,8307743,"TERMINAL",0,0,"7",,terminal_output +5639,8307923,"TERMINAL",0,0,"7",,terminal_output +5640,8308781,"TERMINAL",0,0,"8",,terminal_output +5641,8308954,"TERMINAL",0,0,"9",,terminal_output +5642,8309861,"TERMINAL",0,0,"9",,terminal_output +5643,8309994,"TERMINAL",0,0,"2:00",,terminal_output +5644,8310871,"TERMINAL",0,0,"2:00",,terminal_output +5645,8311034,"TERMINAL",0,0,"1",,terminal_output +5646,8311982,"jasmine/sample.py",0,0,"",python,tab +5647,8312254,"TERMINAL",0,0,"1",,terminal_output +5648,8312254,"TERMINAL",0,0,"2",,terminal_output +5649,8313178,"TERMINAL",0,0,"3",,terminal_output +5650,8313292,"TERMINAL",0,0,"3",,terminal_output +5651,8314011,"TERMINAL",0,0,"4",,terminal_output +5652,8314141,"TERMINAL",0,0,"4",,terminal_output +5653,8315082,"TERMINAL",0,0,"5",,terminal_output +5654,8315191,"TERMINAL",0,0,"5",,terminal_output +5655,8315543,"data/pyproject.toml",0,0,"",plaintext,tab +5656,8316175,"TERMINAL",0,0,"6",,terminal_output +5657,8316259,"TERMINAL",0,0,"6",,terminal_output +5658,8316682,"jasmine/sample.py",0,0,"",python,tab +5659,8317148,"TERMINAL",0,0,"7",,terminal_output +5660,8317259,"TERMINAL",0,0,"7",,terminal_output +5661,8318257,"TERMINAL",0,0,"8",,terminal_output +5662,8318309,"TERMINAL",0,0,"8",,terminal_output +5663,8319226,"TERMINAL",0,0,"9",,terminal_output +5664,8319333,"TERMINAL",0,0,"9",,terminal_output +5665,8320482,"TERMINAL",0,0,"10",,terminal_output +5666,8320482,"TERMINAL",0,0,"10",,terminal_output +5667,8321440,"TERMINAL",0,0,"1",,terminal_output +5668,8321488,"TERMINAL",0,0,"1",,terminal_output +5669,8322477,"TERMINAL",0,0,"2",,terminal_output +5670,8322477,"TERMINAL",0,0,"2",,terminal_output +5671,8323558,"TERMINAL",0,0,"3",,terminal_output +5672,8323559,"TERMINAL",0,0,"3",,terminal_output +5673,8324458,"TERMINAL",0,0,"4",,terminal_output +5674,8324521,"TERMINAL",0,0,"4",,terminal_output +5675,8325504,"TERMINAL",0,0,"5",,terminal_output +5676,8325633,"TERMINAL",0,0,"5",,terminal_output +5677,8326591,"TERMINAL",0,0,"6",,terminal_output +5678,8326634,"TERMINAL",0,0,"6",,terminal_output +5679,8327582,"TERMINAL",0,0,"7",,terminal_output +5680,8327652,"TERMINAL",0,0,"7",,terminal_output +5681,8328393,"jasmine/sample.py",6884,0,"",python,selection_mouse +5682,8328408,"jasmine/sample.py",6883,0,"",python,selection_command +5683,8328617,"TERMINAL",0,0,"8",,terminal_output +5684,8328682,"TERMINAL",0,0,"8",,terminal_output +5685,8329734,"TERMINAL",0,0,"9",,terminal_output +5686,8329734,"TERMINAL",0,0,"9",,terminal_output +5687,8330702,"TERMINAL",0,0,"20",,terminal_output +5688,8330762,"TERMINAL",0,0,"20",,terminal_output +5689,8331743,"TERMINAL",0,0,"1",,terminal_output +5690,8331801,"TERMINAL",0,0,"1",,terminal_output +5691,8332786,"TERMINAL",0,0,"2",,terminal_output +5692,8332847,"TERMINAL",0,0,"2",,terminal_output +5693,8333832,"TERMINAL",0,0,"3",,terminal_output +5694,8333897,"TERMINAL",0,0,"3",,terminal_output +5695,8334951,"TERMINAL",0,0,"4",,terminal_output +5696,8334951,"TERMINAL",0,0,"4",,terminal_output +5697,8335917,"TERMINAL",0,0,"5",,terminal_output +5698,8335931,"TERMINAL",0,0,"5",,terminal_output +5699,8336975,"TERMINAL",0,0,"7",,terminal_output +5700,8337003,"TERMINAL",0,0,"7",,terminal_output +5701,8338045,"TERMINAL",0,0,"8",,terminal_output +5702,8338085,"TERMINAL",0,0,"8",,terminal_output +5703,8339040,"TERMINAL",0,0,"9",,terminal_output +5704,8339056,"TERMINAL",0,0,"9",,terminal_output +5705,8340259,"TERMINAL",0,0,"30",,terminal_output +5706,8340259,"TERMINAL",0,0,"30",,terminal_output +5707,8341199,"TERMINAL",0,0,"1",,terminal_output +5708,8341199,"TERMINAL",0,0,"1",,terminal_output +5709,8342324,"TERMINAL",0,0,"2",,terminal_output +5710,8342324,"TERMINAL",0,0,"2",,terminal_output +5711,8343459,"TERMINAL",0,0,"3",,terminal_output +5712,8343460,"TERMINAL",0,0,"3",,terminal_output +5713,8344378,"TERMINAL",0,0,"4",,terminal_output +5714,8344379,"TERMINAL",0,0,"4",,terminal_output +5715,8345354,"TERMINAL",0,0,"5",,terminal_output +5716,8345354,"TERMINAL",0,0,"5",,terminal_output +5717,8346385,"TERMINAL",0,0,"6",,terminal_output +5718,8346386,"TERMINAL",0,0,"6",,terminal_output +5719,8347351,"TERMINAL",0,0,"7",,terminal_output +5720,8347409,"TERMINAL",0,0,"7",,terminal_output +5721,8348391,"TERMINAL",0,0,"86",,terminal_output +5722,8348453,"TERMINAL",0,0,"8",,terminal_output +5723,8349491,"TERMINAL",0,0,"9",,terminal_output +5724,8349505,"TERMINAL",0,0,"9",,terminal_output +5725,8350494,"TERMINAL",0,0,"403",,terminal_output +5726,8350554,"TERMINAL",0,0,"40",,terminal_output +5727,8351540,"TERMINAL",0,0,"1",,terminal_output +5728,8351557,"TERMINAL",0,0,"1",,terminal_output +5729,8352567,"TERMINAL",0,0,"2",,terminal_output +5730,8352674,"TERMINAL",0,0,"2",,terminal_output +5731,8353612,"TERMINAL",0,0,"3",,terminal_output +5732,8353668,"TERMINAL",0,0,"3",,terminal_output +5733,8354807,"TERMINAL",0,0,"4",,terminal_output +5734,8354808,"TERMINAL",0,0,"4",,terminal_output +5735,8355657,"TERMINAL",0,0,"5",,terminal_output +5736,8355718,"TERMINAL",0,0,"5",,terminal_output +5737,8356696,"TERMINAL",0,0,"6",,terminal_output +5738,8356793,"TERMINAL",0,0,"6",,terminal_output +5739,8357781,"TERMINAL",0,0,"7",,terminal_output +5740,8357796,"TERMINAL",0,0,"7",,terminal_output +5741,8358830,"TERMINAL",0,0,"8",,terminal_output +5742,8358935,"TERMINAL",0,0,"8",,terminal_output +5743,8359806,"TERMINAL",0,0,"9",,terminal_output +5744,8359912,"TERMINAL",0,0,"9",,terminal_output +5745,8360861,"TERMINAL",0,0,"50",,terminal_output +5746,8360920,"TERMINAL",0,0,"50",,terminal_output +5747,8361985,"TERMINAL",0,0,"1",,terminal_output +5748,8361986,"TERMINAL",0,0,"2",,terminal_output +5749,8362923,"TERMINAL",0,0,"2",,terminal_output +5750,8363020,"TERMINAL",0,0,"3",,terminal_output +5751,8364027,"TERMINAL",0,0,"4",,terminal_output +5752,8364071,"TERMINAL",0,0,"4",,terminal_output +5753,8365041,"TERMINAL",0,0,"5",,terminal_output +5754,8365093,"TERMINAL",0,0,"5",,terminal_output +5755,8366119,"TERMINAL",0,0,"6",,terminal_output +5756,8366133,"TERMINAL",0,0,"6",,terminal_output +5757,8367104,"TERMINAL",0,0,"7",,terminal_output +5758,8367161,"TERMINAL",0,0,"7",,terminal_output +5759,8368168,"TERMINAL",0,0,"8",,terminal_output +5760,8368187,"TERMINAL",0,0,"8",,terminal_output +5761,8369152,"TERMINAL",0,0,"9",,terminal_output +5762,8369260,"TERMINAL",0,0,"9",,terminal_output +5763,8370283,"TERMINAL",0,0,"3:00",,terminal_output +5764,8370292,"TERMINAL",0,0,"3:00",,terminal_output +5765,8371644,"TERMINAL",0,0,"",,terminal_focus +5766,8371771,"TERMINAL",0,0,"1",,terminal_output +5767,8371771,"TERMINAL",0,0,"1",,terminal_output +5768,8372366,"TERMINAL",0,0,"2",,terminal_output +5769,8372372,"TERMINAL",0,0,"2",,terminal_output +5770,8373402,"TERMINAL",0,0,"3",,terminal_output +5771,8373402,"TERMINAL",0,0,"3",,terminal_output +5772,8374663,"TERMINAL",0,0,"watch",,terminal_focus +5773,8374663,"TERMINAL",0,0,"bash",,terminal_focus +5774,8374663,"TERMINAL",0,0,"watch",,terminal_focus +5775,8374665,"TERMINAL",0,0,"4",,terminal_output +5776,8374665,"TERMINAL",0,0,"4",,terminal_output +5777,8375484,"TERMINAL",0,0,"5",,terminal_output +5778,8375484,"TERMINAL",0,0,"5",,terminal_output +5779,8376416,"TERMINAL",0,0,"6",,terminal_output +5780,8376523,"TERMINAL",0,0,"6",,terminal_output +5781,8377709,"TERMINAL",0,0,"7",,terminal_output +5782,8377709,"TERMINAL",0,0,"7",,terminal_output +5783,8378496,"TERMINAL",0,0,"8",,terminal_output +5784,8378603,"TERMINAL",0,0,"8",,terminal_output +5785,8379140,"TERMINAL",0,0,"salloc --time=01:00:00 --partition=dev_accelerated-h100 --nodes=1 --gres=gpu:1 --cpus-per-task=8",,terminal_command +5786,8379202,"TERMINAL",0,0,"]633;Csalloc: Pending job allocation 3519405\r\nsalloc: job 3519405 queued and waiting for resources\r\n",,terminal_output +5787,8379591,"TERMINAL",0,0,"9",,terminal_output +5788,8379637,"TERMINAL",0,0,"93519405 dev_accel interact tum_cte0 PD\t0:00\t 1 (Resources)",,terminal_output +5789,8380572,"TERMINAL",0,0,"10",,terminal_output +5790,8380679,"TERMINAL",0,0,"10",,terminal_output +5791,8381681,"TERMINAL",0,0,"1",,terminal_output +5792,8381696,"TERMINAL",0,0,"1",,terminal_output +5793,8382667,"TERMINAL",0,0,"2",,terminal_output +5794,8382776,"TERMINAL",0,0,"2",,terminal_output +5795,8383693,"TERMINAL",0,0,"3",,terminal_output +5796,8383802,"TERMINAL",0,0,"3",,terminal_output +5797,8384791,"TERMINAL",0,0,"4",,terminal_output +5798,8384822,"TERMINAL",0,0,"4",,terminal_output +5799,8385755,"TERMINAL",0,0,"5",,terminal_output +5800,8385863,"TERMINAL",0,0,"5",,terminal_output +5801,8386844,"TERMINAL",0,0,"6",,terminal_output +5802,8386897,"TERMINAL",0,0,"6",,terminal_output +5803,8387893,"TERMINAL",0,0,"7",,terminal_output +5804,8387947,"TERMINAL",0,0,"7",,terminal_output +5805,8388872,"TERMINAL",0,0,"8",,terminal_output +5806,8388980,"TERMINAL",0,0,"9",,terminal_output +5807,8390052,"TERMINAL",0,0,"9",,terminal_output +5808,8390053,"TERMINAL",0,0,"20",,terminal_output +5809,8390944,"TERMINAL",0,0,"21",,terminal_output +5810,8391051,"TERMINAL",0,0,"1",,terminal_output +5811,8392088,"TERMINAL",0,0,"2",,terminal_output +5812,8392097,"TERMINAL",0,0,"2",,terminal_output +5813,8393112,"TERMINAL",0,0,"3",,terminal_output +5814,8393127,"TERMINAL",0,0,"3",,terminal_output +5815,8394138,"TERMINAL",0,0,"4",,terminal_output +5816,8394192,"TERMINAL",0,0,"4",,terminal_output +5817,8394672,"TERMINAL",0,0,"salloc: job 3519404 has been allocated resources\r\nsalloc: Granted job allocation 3519404\r\nsalloc: Waiting for resource configuration\r\n",,terminal_output +5818,8395163,"TERMINAL",0,0,"5",,terminal_output +5819,8395218,"TERMINAL",0,0,"5 R1hkn0408",,terminal_output +5820,8396497,"TERMINAL",0,0,"6",,terminal_output +5821,8396507,"TERMINAL",0,0,"62",,terminal_output +5822,8397312,"TERMINAL",0,0,"7",,terminal_output +5823,8397367,"TERMINAL",0,0,"73",,terminal_output +5824,8398214,"TERMINAL",0,0,"8",,terminal_output +5825,8398336,"TERMINAL",0,0,"84",,terminal_output +5826,8399257,"TERMINAL",0,0,"9",,terminal_output +5827,8399382,"TERMINAL",0,0,"95",,terminal_output +5828,8400362,"TERMINAL",0,0,"30",,terminal_output +5829,8400459,"TERMINAL",0,0,"306",,terminal_output +5830,8401407,"TERMINAL",0,0,"1",,terminal_output +5831,8401462,"TERMINAL",0,0,"17",,terminal_output +5832,8402434,"TERMINAL",0,0,"2",,terminal_output +5833,8402553,"TERMINAL",0,0,"28",,terminal_output +5834,8403396,"TERMINAL",0,0,"3",,terminal_output +5835,8403538,"TERMINAL",0,0,"39",,terminal_output +5836,8404481,"TERMINAL",0,0,"4",,terminal_output +5837,8404589,"TERMINAL",0,0,"410",,terminal_output +5838,8405470,"TERMINAL",0,0,"5",,terminal_output +5839,8405611,"TERMINAL",0,0,"51",,terminal_output +5840,8406511,"TERMINAL",0,0,"6",,terminal_output +5841,8406644,"TERMINAL",0,0,"62",,terminal_output +5842,8407657,"TERMINAL",0,0,"7",,terminal_output +5843,8407789,"TERMINAL",0,0,"73",,terminal_output +5844,8408679,"TERMINAL",0,0,"8",,terminal_output +5845,8408785,"TERMINAL",0,0,"84",,terminal_output +5846,8409618,"TERMINAL",0,0,"9",,terminal_output +5847,8409763,"TERMINAL",0,0,"95",,terminal_output +5848,8410655,"TERMINAL",0,0,"40",,terminal_output +5849,8410924,"TERMINAL",0,0,"406",,terminal_output +5850,8411694,"TERMINAL",0,0,"1",,terminal_output +5851,8411934,"TERMINAL",0,0,"17",,terminal_output +5852,8412912,"TERMINAL",0,0,"2",,terminal_output +5853,8413032,"TERMINAL",0,0,"29",,terminal_output +5854,8413775,"TERMINAL",0,0,"3",,terminal_output +5855,8413961,"TERMINAL",0,0,"420",,terminal_output +5856,8414811,"TERMINAL",0,0,"4",,terminal_output +5857,8415183,"TERMINAL",0,0,"51",,terminal_output +5858,8415853,"TERMINAL",0,0,"5",,terminal_output +5859,8416169,"TERMINAL",0,0,"62",,terminal_output +5860,8416961,"TERMINAL",0,0,"6",,terminal_output +5861,8417202,"TERMINAL",0,0,"73",,terminal_output +5862,8417997,"TERMINAL",0,0,"7",,terminal_output +5863,8418152,"TERMINAL",0,0,"84",,terminal_output +5864,8419133,"TERMINAL",0,0,"9",,terminal_output +5865,8419238,"TERMINAL",0,0,"95",,terminal_output +5866,8420155,"TERMINAL",0,0,"50",,terminal_output +5867,8420218,"TERMINAL",0,0,"506",,terminal_output +5868,8421123,"TERMINAL",0,0,"1",,terminal_output +5869,8421328,"TERMINAL",0,0,"17",,terminal_output +5870,8421731,"TERMINAL",0,0,"salloc: Nodes hkn0408 are ready for job\r\n",,terminal_output +5871,8422106,"TERMINAL",0,0,"2",,terminal_output +5872,8422304,"TERMINAL",0,0,"28",,terminal_output +5873,8422625,"TERMINAL",0,0,"]0;tum_cte0515@hkn0408:~/Projects/jasmine[?2004h[tum_cte0515@hkn0408 jasmine]$ ",,terminal_output +5874,8423139,"TERMINAL",0,0,"3",,terminal_output +5875,8423505,"TERMINAL",0,0,"39",,terminal_output +5876,8424246,"TERMINAL",0,0,"4",,terminal_output +5877,8424394,"TERMINAL",0,0,"430",,terminal_output +5878,8425279,"TERMINAL",0,0,"5",,terminal_output +5879,8425455,"TERMINAL",0,0,"51",,terminal_output +5880,8426377,"TERMINAL",0,0,"6",,terminal_output +5881,8426481,"TERMINAL",0,0,"62",,terminal_output +5882,8427515,"TERMINAL",0,0,"7",,terminal_output +5883,8427559,"TERMINAL",0,0,"73",,terminal_output +5884,8428483,"TERMINAL",0,0,"8",,terminal_output +5885,8428567,"TERMINAL",0,0,"84",,terminal_output +5886,8429419,"TERMINAL",0,0,"9",,terminal_output +5887,8429529,"TERMINAL",0,0,"salloc",,terminal_focus +5888,8429670,"TERMINAL",0,0,"95",,terminal_output +5889,8430402,"TERMINAL",0,0,"4:00",,terminal_output +5890,8430665,"TERMINAL",0,0,"4:006",,terminal_output +5891,8431430,"TERMINAL",0,0,"1",,terminal_output +5892,8431882,"TERMINAL",0,0,"17",,terminal_output +5893,8432447,"TERMINAL",0,0,">",,terminal_output +5894,8432465,"TERMINAL",0,0,"2",,terminal_output +5895,8432697,"TERMINAL",0,0,"28",,terminal_output +5896,8432933,"TERMINAL",0,0,"srun",,terminal_focus +5897,8433500,"TERMINAL",0,0,"3",,terminal_output +5898,8433785,"TERMINAL",0,0,"39",,terminal_output +5899,8434589,"TERMINAL",0,0,"4",,terminal_output +5900,8434598,"TERMINAL",0,0,"s",,terminal_output +5901,8434675,"TERMINAL",0,0,"o",,terminal_output +5902,8434808,"TERMINAL",0,0,"u",,terminal_output +5903,8434809,"TERMINAL",0,0,"440",,terminal_output +5904,8434870,"TERMINAL",0,0,"r",,terminal_output +5905,8435052,"TERMINAL",0,0,"c",,terminal_output +5906,8435195,"TERMINAL",0,0,"e",,terminal_output +5907,8435279,"TERMINAL",0,0," ",,terminal_output +5908,8435387,"TERMINAL",0,0,".v",,terminal_output +5909,8435567,"TERMINAL",0,0,"env/",,terminal_output +5910,8435577,"TERMINAL",0,0,"5",,terminal_output +5911,8435725,"TERMINAL",0,0,"b",,terminal_output +5912,8435856,"TERMINAL",0,0,"51",,terminal_output +5913,8435856,"TERMINAL",0,0,"in/",,terminal_output +5914,8436212,"TERMINAL",0,0,"ac",,terminal_output +5915,8436796,"TERMINAL",0,0,"6",,terminal_output +5916,8436810,"TERMINAL",0,0,"tivate",,terminal_output +5917,8436905,"TERMINAL",0,0,"62",,terminal_output +5918,8437167,"TERMINAL",0,0,"\r\n[?2004l\r]0;tum_cte0515@hkn0408:~/Projects/jasmine[?2004h(jasmine) [tum_cte0515@hkn0408 jasmine]$ ",,terminal_output +5919,8437815,"TERMINAL",0,0,"7",,terminal_output +5920,8437910,"TERMINAL",0,0,"74",,terminal_output +5921,8438436,"TERMINAL",0,0,"salloc",,terminal_focus +5922,8438830,"TERMINAL",0,0,"8",,terminal_output +5923,8438951,"TERMINAL",0,0,"95",,terminal_output +5924,8439660,"TERMINAL",0,0,"^Csalloc: Job allocation 3519405 has been revoked.\r\nsalloc: Job aborted due to signal\r\n]0;tum_cte0515@hkn1991:~/Projects/jasmine",,terminal_output +5925,8439771,"TERMINAL",0,0,"9",,terminal_output +5926,8439979,"TERMINAL",0,0,"\r106",,terminal_output +5927,8440884,"TERMINAL",0,0,"salloc",,terminal_focus +5928,8440885,"TERMINAL",0,0,"10",,terminal_output +5929,8441095,"TERMINAL",0,0,"17",,terminal_output +5930,8441914,"TERMINAL",0,0,"1",,terminal_output +5931,8442155,"TERMINAL",0,0,"28",,terminal_output +5932,8442517,"TERMINAL",0,0,"^Csalloc: Job allocation 3519398 has been revoked.\r\nsalloc: Job aborted due to signal\r\n]0;tum_cte0515@hkn1991:~/Projects/jasmine",,terminal_output +5933,8442838,"TERMINAL",0,0,"2",,terminal_output +5934,8443136,"TERMINAL",0,0,"\r39",,terminal_output +5935,8444004,"TERMINAL",0,0,"3",,terminal_output +5936,8444144,"TERMINAL",0,0,"450",,terminal_output +5937,8444954,"TERMINAL",0,0,"4",,terminal_output +5938,8445236,"TERMINAL",0,0,"51",,terminal_output +5939,8445955,"TERMINAL",0,0,"6",,terminal_output +5940,8446267,"TERMINAL",0,0,"62",,terminal_output +5941,8446980,"TERMINAL",0,0,"7",,terminal_output +5942,8447263,"TERMINAL",0,0,"73",,terminal_output +5943,8447509,"TERMINAL",0,0,"watch",,terminal_focus +5944,8447510,"TERMINAL",0,0,"bash",,terminal_focus +5945,8448070,"TERMINAL",0,0,"8",,terminal_output +5946,8448361,"TERMINAL",0,0,"84",,terminal_output +5947,8449053,"TERMINAL",0,0,"9",,terminal_output +5948,8449435,"TERMINAL",0,0,"95",,terminal_output +5949,8450150,"TERMINAL",0,0,"20",,terminal_output +5950,8450457,"TERMINAL",0,0,"206",,terminal_output +5951,8450617,"TERMINAL",0,0,"watch",,terminal_focus +5952,8450617,"TERMINAL",0,0,"srun",,terminal_focus +5953,8451130,"TERMINAL",0,0,"1",,terminal_output +5954,8451439,"TERMINAL",0,0,"17",,terminal_output +5955,8452199,"TERMINAL",0,0,"2",,terminal_output +5956,8452506,"TERMINAL",0,0,"28",,terminal_output +5957,8453325,"TERMINAL",0,0,"3",,terminal_output +5958,8453524,"TERMINAL",0,0,"39",,terminal_output +5959,8454459,"TERMINAL",0,0,"4",,terminal_output +5960,8454559,"TERMINAL",0,0,"41:00",,terminal_output +5961,8455349,"TERMINAL",0,0,"5",,terminal_output +5962,8455600,"TERMINAL",0,0,"51",,terminal_output +5963,8456406,"TERMINAL",0,0,"6",,terminal_output +5964,8456732,"TERMINAL",0,0,"62",,terminal_output +5965,8457420,"TERMINAL",0,0,"7",,terminal_output +5966,8457734,"TERMINAL",0,0,"73",,terminal_output +5967,8458391,"TERMINAL",0,0,"8",,terminal_output +5968,8458814,"TERMINAL",0,0,"84",,terminal_output +5969,8459471,"TERMINAL",0,0,"9",,terminal_output +5970,8459878,"TERMINAL",0,0,"95",,terminal_output +5971,8460460,"TERMINAL",0,0,"30",,terminal_output +5972,8460907,"TERMINAL",0,0,"306",,terminal_output +5973,8461518,"TERMINAL",0,0,"1",,terminal_output +5974,8461949,"TERMINAL",0,0,"17",,terminal_output +5975,8462614,"TERMINAL",0,0,"2",,terminal_output +5976,8462922,"TERMINAL",0,0,"29",,terminal_output +5977,8463569,"TERMINAL",0,0,"3",,terminal_output +5978,8463979,"TERMINAL",0,0,"410",,terminal_output +5979,8464693,"TERMINAL",0,0,"4",,terminal_output +5980,8465010,"TERMINAL",0,0,"51",,terminal_output +5981,8465638,"TERMINAL",0,0,"5",,terminal_output +5982,8466091,"TERMINAL",0,0,"62",,terminal_output +5983,8466753,"TERMINAL",0,0,"6",,terminal_output +5984,8467062,"TERMINAL",0,0,"73",,terminal_output +5985,8467834,"TERMINAL",0,0,"7",,terminal_output +5986,8468190,"TERMINAL",0,0,"84",,terminal_output +5987,8468792,"TERMINAL",0,0,"8",,terminal_output +5988,8469196,"TERMINAL",0,0,"95",,terminal_output +5989,8469808,"TERMINAL",0,0,"9",,terminal_output +5990,8470223,"TERMINAL",0,0,"406",,terminal_output +5991,8470869,"TERMINAL",0,0,"40",,terminal_output +5992,8471200,"TERMINAL",0,0,"17",,terminal_output +5993,8471913,"TERMINAL",0,0,"1",,terminal_output +5994,8472236,"TERMINAL",0,0,"28",,terminal_output +5995,8472919,"TERMINAL",0,0,"2",,terminal_output +5996,8473378,"TERMINAL",0,0,"39",,terminal_output +5997,8473929,"TERMINAL",0,0,"3",,terminal_output +5998,8474423,"TERMINAL",0,0,"420",,terminal_output +5999,8475038,"TERMINAL",0,0,"5",,terminal_output +6000,8475360,"TERMINAL",0,0,"51",,terminal_output +6001,8476012,"TERMINAL",0,0,"6",,terminal_output +6002,8476395,"TERMINAL",0,0,"62",,terminal_output +6003,8477071,"TERMINAL",0,0,"7",,terminal_output +6004,8477493,"TERMINAL",0,0,"73",,terminal_output +6005,8478192,"TERMINAL",0,0,"8",,terminal_output +6006,8478518,"TERMINAL",0,0,"84",,terminal_output +6007,8479255,"TERMINAL",0,0,"9",,terminal_output +6008,8479527,"TERMINAL",0,0,"95",,terminal_output +6009,8480169,"TERMINAL",0,0,"50",,terminal_output +6010,8480572,"TERMINAL",0,0,"506",,terminal_output +6011,8481278,"TERMINAL",0,0,"1",,terminal_output +6012,8481596,"TERMINAL",0,0,"17",,terminal_output +6013,8482260,"TERMINAL",0,0,"2",,terminal_output +6014,8482714,"TERMINAL",0,0,"28",,terminal_output +6015,8483371,"TERMINAL",0,0,"3",,terminal_output +6016,8483684,"TERMINAL",0,0,"39",,terminal_output +6017,8484328,"TERMINAL",0,0,"4",,terminal_output +6018,8484765,"TERMINAL",0,0,"430",,terminal_output +6019,8485403,"TERMINAL",0,0,"5",,terminal_output +6020,8485835,"TERMINAL",0,0,"51",,terminal_output +6021,8486405,"TERMINAL",0,0,"6",,terminal_output +6022,8486818,"TERMINAL",0,0,"62",,terminal_output +6023,8487538,"TERMINAL",0,0,"7",,terminal_output +6024,8487826,"TERMINAL",0,0,"73",,terminal_output +6025,8488560,"TERMINAL",0,0,"8",,terminal_output +6026,8488960,"TERMINAL",0,0,"84",,terminal_output +6027,8489575,"TERMINAL",0,0,"9",,terminal_output +6028,8489983,"TERMINAL",0,0,"96",,terminal_output +6029,8490543,"TERMINAL",0,0,"5:00",,terminal_output +6030,8490943,"TERMINAL",0,0,"5:017",,terminal_output +6031,8491663,"TERMINAL",0,0,"1",,terminal_output +6032,8492210,"TERMINAL",0,0,"28",,terminal_output +6033,8492733,"TERMINAL",0,0,"2",,terminal_output +6034,8493155,"TERMINAL",0,0,"39",,terminal_output +6035,8493726,"TERMINAL",0,0,"3",,terminal_output +6036,8494086,"TERMINAL",0,0,"440",,terminal_output +6037,8494694,"TERMINAL",0,0,"4",,terminal_output +6038,8495116,"TERMINAL",0,0,"51",,terminal_output +6039,8495735,"TERMINAL",0,0,"5",,terminal_output +6040,8496158,"TERMINAL",0,0,"62",,terminal_output +6041,8496807,"TERMINAL",0,0,"6",,terminal_output +6042,8497257,"TERMINAL",0,0,"73",,terminal_output +6043,8497817,"TERMINAL",0,0,"7",,terminal_output +6044,8498251,"TERMINAL",0,0,"84",,terminal_output +6045,8498854,"TERMINAL",0,0,"8",,terminal_output +6046,8499394,"TERMINAL",0,0,"95",,terminal_output +6047,8500003,"TERMINAL",0,0,"9",,terminal_output +6048,8500348,"TERMINAL",0,0,"106",,terminal_output +6049,8501079,"TERMINAL",0,0,"10",,terminal_output +6050,8501366,"TERMINAL",0,0,"17",,terminal_output +6051,8502075,"TERMINAL",0,0,"2",,terminal_output +6052,8502477,"TERMINAL",0,0,"28",,terminal_output +6053,8503088,"jasmine/sample.py",0,0,"",python,tab +6054,8503089,"jasmine/sample.py",7229,0,"",python,selection_mouse +6055,8503213,"jasmine/sample.py",7228,0,"",python,selection_command +6056,8503263,"TERMINAL",0,0,"3",,terminal_output +6057,8503448,"TERMINAL",0,0,"39",,terminal_output +6058,8503717,"jasmine/sample.py",6884,0,"",python,selection_mouse +6059,8503726,"jasmine/sample.py",6883,0,"",python,selection_command +6060,8504086,"TERMINAL",0,0,"4",,terminal_output +6061,8504504,"jasmine/sample.py",6884,0,"\n ",python,content +6062,8504525,"TERMINAL",0,0,"450",,terminal_output +6063,8505080,"TERMINAL",0,0,"5",,terminal_output +6064,8505528,"TERMINAL",0,0,"51",,terminal_output +6065,8506329,"TERMINAL",0,0,"6",,terminal_output +6066,8506570,"TERMINAL",0,0,"62",,terminal_output +6067,8506657,"jasmine/sample.py",6885,4,"",python,content +6068,8506794,"jasmine/sample.py",6872,0,"",python,selection_command +6069,8507160,"TERMINAL",0,0,"7",,terminal_output +6070,8507270,"jasmine/sample.py",6885,0,"",python,selection_command +6071,8507768,"TERMINAL",0,0,"73",,terminal_output +6072,8508213,"TERMINAL",0,0,"8",,terminal_output +6073,8508732,"TERMINAL",0,0,"84",,terminal_output +6074,8509214,"jasmine/sample.py",6885,0," ",python,content +6075,8509231,"TERMINAL",0,0,"9",,terminal_output +6076,8509698,"TERMINAL",0,0,"95",,terminal_output +6077,8510374,"jasmine/sample.py",6889,0,"s",python,content +6078,8510374,"jasmine/sample.py",6890,0,"",python,selection_keyboard +6079,8510375,"TERMINAL",0,0,"20",,terminal_output +6080,8510451,"jasmine/sample.py",6890,0,"s",python,content +6081,8510454,"jasmine/sample.py",6891,0,"",python,selection_keyboard +6082,8510513,"jasmine/sample.py",6891,0,"i",python,content +6083,8510514,"jasmine/sample.py",6892,0,"",python,selection_keyboard +6084,8510590,"jasmine/sample.py",6892,0,"m",python,content +6085,8510591,"jasmine/sample.py",6893,0,"",python,selection_keyboard +6086,8510739,"jasmine/sample.py",6893,0," ",python,content +6087,8510740,"jasmine/sample.py",6894,0,"",python,selection_keyboard +6088,8510790,"TERMINAL",0,0,"206",,terminal_output +6089,8511081,"jasmine/sample.py",6894,0,"=",python,content +6090,8511082,"jasmine/sample.py",6895,0,"",python,selection_keyboard +6091,8511085,"jasmine/sample.py",6895,0," ",python,content +6092,8511085,"jasmine/sample.py",6896,0,"",python,selection_keyboard +6093,8511346,"TERMINAL",0,0,"1",,terminal_output +6094,8511355,"jasmine/sample.py",6896,0,"s",python,content +6095,8511356,"jasmine/sample.py",6897,0,"",python,selection_keyboard +6096,8511497,"jasmine/sample.py",6897,0,"s",python,content +6097,8511497,"jasmine/sample.py",6898,0,"",python,selection_keyboard +6098,8511811,"TERMINAL",0,0,"17",,terminal_output +6099,8512361,"TERMINAL",0,0,"2",,terminal_output +6100,8512567,"jasmine/sample.py",6898,0,"i",python,content +6101,8512568,"jasmine/sample.py",6899,0,"",python,selection_keyboard +6102,8512607,"jasmine/sample.py",6899,0,"m",python,content +6103,8512608,"jasmine/sample.py",6900,0,"",python,selection_keyboard +6104,8512903,"jasmine/sample.py",6900,0,".",python,content +6105,8512903,"jasmine/sample.py",6901,0,"",python,selection_keyboard +6106,8512905,"TERMINAL",0,0,"28",,terminal_output +6107,8513043,"jasmine/sample.py",6901,0,"m",python,content +6108,8513044,"jasmine/sample.py",6902,0,"",python,selection_keyboard +6109,8513244,"jasmine/sample.py",6902,0,"e",python,content +6110,8513245,"jasmine/sample.py",6903,0,"",python,selection_keyboard +6111,8513400,"jasmine/sample.py",6903,0,"a",python,content +6112,8513401,"jasmine/sample.py",6904,0,"",python,selection_keyboard +6113,8513487,"jasmine/sample.py",6904,0,"n",python,content +6114,8513488,"jasmine/sample.py",6905,0,"",python,selection_keyboard +6115,8513488,"TERMINAL",0,0,"3",,terminal_output +6116,8513895,"jasmine/sample.py",6904,0,"",python,selection_command +6117,8513902,"TERMINAL",0,0,"39",,terminal_output +6118,8514438,"jasmine/sample.py",6905,0,"",python,selection_command +6119,8514516,"TERMINAL",0,0,"4",,terminal_output +6120,8514924,"jasmine/sample.py",6905,0,"()",python,content +6121,8514925,"jasmine/sample.py",6906,0,"",python,selection_keyboard +6122,8515012,"jasmine/sample.py",6906,1,")",python,content +6123,8515013,"jasmine/sample.py",6907,0,"",python,selection_keyboard +6124,8515089,"TERMINAL",0,0,"42:01",,terminal_output +6125,8515432,"jasmine/sample.py",6907,0,"m",python,content +6126,8515433,"jasmine/sample.py",6908,0,"",python,selection_keyboard +6127,8515559,"TERMINAL",0,0,"5",,terminal_output +6128,8516031,"jasmine/sample.py",6907,0,"",python,selection_command +6129,8516101,"TERMINAL",0,0,"62",,terminal_output +6130,8516524,"TERMINAL",0,0,"6",,terminal_output +6131,8516699,"jasmine/sample.py",6907,1,"",python,content +6132,8516700,"jasmine/sample.py",6906,0,"",python,selection_command +6133,8516906,"jasmine/sample.py",6883,0,"",python,selection_command +6134,8517060,"TERMINAL",0,0,"73",,terminal_output +6135,8517205,"jasmine/sample.py",6884,0,"",python,selection_command +6136,8517428,"jasmine/sample.py",6883,1,"",python,content +6137,8517595,"TERMINAL",0,0,"7",,terminal_output +6138,8517641,"jasmine/sample.py",6882,1,"",python,content +6139,8517760,"jasmine/sample.py",6881,1,"",python,content +6140,8517888,"jasmine/sample.py",6880,1,"",python,content +6141,8518035,"jasmine/sample.py",6879,1,"",python,content +6142,8518048,"TERMINAL",0,0,"84",,terminal_output +6143,8518167,"jasmine/sample.py",6878,1,"",python,content +6144,8518297,"jasmine/sample.py",6877,1,"",python,content +6145,8518492,"jasmine/sample.py",6876,0,"",python,selection_command +6146,8518581,"TERMINAL",0,0,"8",,terminal_output +6147,8519153,"TERMINAL",0,0,"95",,terminal_output +6148,8519445,"jasmine/sample.py",6877,0,"",python,selection_mouse +6149,8519456,"jasmine/sample.py",6876,0,"",python,selection_command +6150,8519603,"TERMINAL",0,0,"9",,terminal_output +6151,8520133,"TERMINAL",0,0,"306",,terminal_output +6152,8520374,"jasmine/sample.py",6877,0,"\n ",python,content +6153,8520599,"jasmine/sample.py",6882,0,"j",python,content +6154,8520600,"jasmine/sample.py",6883,0,"",python,selection_keyboard +6155,8520715,"TERMINAL",0,0,"30",,terminal_output +6156,8520805,"jasmine/sample.py",6883,0,"a",python,content +6157,8520806,"jasmine/sample.py",6884,0,"",python,selection_keyboard +6158,8520865,"jasmine/sample.py",6884,0,"x",python,content +6159,8520866,"jasmine/sample.py",6885,0,"",python,selection_keyboard +6160,8520963,"jasmine/sample.py",6885,0,".",python,content +6161,8520964,"jasmine/sample.py",6886,0,"",python,selection_keyboard +6162,8521211,"TERMINAL",0,0,"17",,terminal_output +6163,8521372,"jasmine/sample.py",6886,0,"d",python,content +6164,8521373,"jasmine/sample.py",6887,0,"",python,selection_keyboard +6165,8521693,"jasmine/sample.py",6887,0,"e",python,content +6166,8521693,"jasmine/sample.py",6888,0,"",python,selection_keyboard +6167,8521703,"TERMINAL",0,0,"1",,terminal_output +6168,8521805,"jasmine/sample.py",6888,0,"b",python,content +6169,8521806,"jasmine/sample.py",6889,0,"",python,selection_keyboard +6170,8521945,"jasmine/sample.py",6889,0,"u",python,content +6171,8521945,"jasmine/sample.py",6890,0,"",python,selection_keyboard +6172,8522023,"jasmine/sample.py",6890,0,"g",python,content +6173,8522024,"jasmine/sample.py",6891,0,"",python,selection_keyboard +6174,8522176,"jasmine/sample.py",6891,0,".",python,content +6175,8522177,"jasmine/sample.py",6892,0,"",python,selection_keyboard +6176,8522196,"TERMINAL",0,0,"28",,terminal_output +6177,8522448,"jasmine/sample.py",6892,0,"b",python,content +6178,8522449,"jasmine/sample.py",6893,0,"",python,selection_keyboard +6179,8522536,"jasmine/sample.py",6893,0,"r",python,content +6180,8522536,"jasmine/sample.py",6894,0,"",python,selection_keyboard +6181,8522737,"jasmine/sample.py",6894,0,"e",python,content +6182,8522738,"jasmine/sample.py",6895,0,"",python,selection_keyboard +6183,8522739,"TERMINAL",0,0,"2",,terminal_output +6184,8522840,"jasmine/sample.py",6895,0,"a",python,content +6185,8522841,"jasmine/sample.py",6896,0,"",python,selection_keyboard +6186,8522921,"jasmine/sample.py",6896,0,"k",python,content +6187,8522922,"jasmine/sample.py",6897,0,"",python,selection_keyboard +6188,8523128,"jasmine/sample.py",6897,0,"p",python,content +6189,8523128,"jasmine/sample.py",6898,0,"",python,selection_keyboard +6190,8523225,"TERMINAL",0,0,"39",,terminal_output +6191,8523385,"jasmine/sample.py",6898,0,"o",python,content +6192,8523386,"jasmine/sample.py",6899,0,"",python,selection_keyboard +6193,8523602,"jasmine/sample.py",6899,0,"i",python,content +6194,8523603,"jasmine/sample.py",6900,0,"",python,selection_keyboard +6195,8523664,"jasmine/sample.py",6900,0,"n",python,content +6196,8523664,"jasmine/sample.py",6901,0,"",python,selection_keyboard +6197,8523733,"jasmine/sample.py",6901,0,"t",python,content +6198,8523733,"jasmine/sample.py",6902,0,"",python,selection_keyboard +6199,8523769,"TERMINAL",0,0,"3",,terminal_output +6200,8524268,"TERMINAL",0,0,"410",,terminal_output +6201,8524307,"jasmine/sample.py",6902,0,"()",python,content +6202,8524308,"jasmine/sample.py",6903,0,"",python,selection_keyboard +6203,8524395,"jasmine/sample.py",6903,1,")",python,content +6204,8524396,"jasmine/sample.py",6904,0,"",python,selection_keyboard +6205,8524512,"jasmine/sample.py",6903,0,"",python,selection_command +6206,8524772,"TERMINAL",0,0,"4",,terminal_output +6207,8525177,"jasmine/sample.py",6926,0,"",python,selection_command +6208,8525300,"TERMINAL",0,0,"51",,terminal_output +6209,8525383,"jasmine/sample.py",6927,0,"\n jax.debug.breakpoint()",python,content +6210,8525421,"jasmine/sample.py",6932,0,"",python,selection_command +6211,8525834,"TERMINAL",0,0,"5",,terminal_output +6212,8526345,"TERMINAL",0,0,"62",,terminal_output +6213,8526986,"TERMINAL",0,0,"6",,terminal_output +6214,8527438,"TERMINAL",0,0,"73",,terminal_output +6215,8527529,"TERMINAL",0,0,"source .venv/bin/activate",,terminal_output +6216,8528079,"TERMINAL",0,0,"7",,terminal_output +6217,8528242,"TERMINAL",0,0,"git checkout -b ""sampling-script-add-metrics""",,terminal_output +6218,8528289,"TERMINAL",0,0,"\ruv sync",,terminal_output +6219,8528449,"TERMINAL",0,0,"cd jasmine",,terminal_output +6220,8528449,"TERMINAL",0,0,"84",,terminal_output +6221,8528608,"TERMINAL",0,0,"..",,terminal_output +6222,8528780,"TERMINAL",0,0,"rm -rf venv_3_11/",,terminal_output +6223,8528972,"TERMINAL",0,0,".venv/bin/activate",,terminal_output +6224,8528972,"TERMINAL",0,0,"8",,terminal_output +6225,8529051,"TERMINAL",0,0,"deactivate",,terminal_output +6226,8529512,"TERMINAL",0,0,"95",,terminal_output +6227,8529642,"TERMINAL",0,0,"git pull",,terminal_output +6228,8529751,"TERMINAL",0,0,"checkout main",,terminal_output +6229,8529985,"TERMINAL",0,0,"ommit -am ""dev branch""",,terminal_output +6230,8530000,"TERMINAL",0,0,"40",,terminal_output +6231,8530106,"TERMINAL",0,0,"diff",,terminal_output +6232,8530485,"TERMINAL",0,0,"queue",,terminal_output +6233,8530539,"TERMINAL",0,0,"406",,terminal_output +6234,8530702,"TERMINAL",0,0,"sh slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",,terminal_output +6235,8531156,"TERMINAL",0,0,"1",,terminal_output +6236,8531564,"TERMINAL",0,0,"17",,terminal_output +6237,8532145,"TERMINAL",0,0,"2",,terminal_output +6238,8532723,"TERMINAL",0,0,"28",,terminal_output +6239,8533188,"TERMINAL",0,0,"3",,terminal_output +6240,8533655,"TERMINAL",0,0,"39",,terminal_output +6241,8534236,"TERMINAL",0,0,"4",,terminal_output +6242,8534906,"TERMINAL",0,0,"420",,terminal_output +6243,8535264,"TERMINAL",0,0,"5",,terminal_output +6244,8535688,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +6245,8535696,"TERMINAL",0,0,"51",,terminal_output +6246,8535830,"TERMINAL",0,0,"Sampling from checkpoint: /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519264\r\n",,terminal_output +6247,8535966,"TERMINAL",0,0,"GpuFreq=control_disabled\r\n",,terminal_output +6248,8536291,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/bin/python: can't open file '/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/sample.py': [Errno 2] No such file or directory\r\nsrun: error: hkn0408: task 0: Exited with exit code 2\r\n]0;tum_cte0515@hkn0408:~/Projects/jasmine[?2004h(jasmine) [tum_cte0515@hkn0408 jasmine]$ ",,terminal_output +6249,8536291,"TERMINAL",0,0,"6",,terminal_output +6250,8536759,"TERMINAL",0,0,"62",,terminal_output +6251,8537332,"TERMINAL",0,0,"7",,terminal_output +6252,8537899,"TERMINAL",0,0,"73",,terminal_output +6253,8538317,"TERMINAL",0,0,"8",,terminal_output +6254,8538815,"TERMINAL",0,0,"84",,terminal_output +6255,8539343,"TERMINAL",0,0,"9",,terminal_output +6256,8539907,"TERMINAL",0,0,"95",,terminal_output +6257,8540072,"TERMINAL",0,0,"gi",,terminal_output +6258,8540135,"TERMINAL",0,0,"t",,terminal_output +6259,8540339,"TERMINAL",0,0," ",,terminal_output +6260,8540349,"TERMINAL",0,0,"50",,terminal_output +6261,8540477,"TERMINAL",0,0,"p",,terminal_output +6262,8540640,"TERMINAL",0,0,"u",,terminal_output +6263,8540893,"TERMINAL",0,0,"506",,terminal_output +6264,8541392,"TERMINAL",0,0,"1",,terminal_output +6265,8541555,"TERMINAL",0,0,"",,terminal_output +6266,8541727,"TERMINAL",0,0,"",,terminal_output +6267,8541835,"TERMINAL",0,0,"",,terminal_output +6268,8541958,"TERMINAL",0,0,"",,terminal_output +6269,8541979,"TERMINAL",0,0,"18",,terminal_output +6270,8542395,"TERMINAL",0,0,"2",,terminal_output +6271,8542405,"TERMINAL",0,0,"",,terminal_output +6272,8542529,"TERMINAL",0,0,"c",,terminal_output +6273,8542659,"TERMINAL",0,0,"d",,terminal_output +6274,8542758,"TERMINAL",0,0," ",,terminal_output +6275,8542876,"TERMINAL",0,0,"ls",,terminal_output +6276,8542989,"TERMINAL",0,0,"39",,terminal_output +6277,8543339,"TERMINAL",0,0,"",,terminal_output +6278,8543467,"TERMINAL",0,0,"",,terminal_output +6279,8543490,"TERMINAL",0,0,"3",,terminal_output +6280,8543587,"TERMINAL",0,0,"s",,terminal_output +6281,8543713,"TERMINAL",0,0,"lurm/",,terminal_output +6282,8544031,"TERMINAL",0,0,"\r\n[?2004l\r]0;tum_cte0515@hkn0408:~/Projects/jasmine/slurm[?2004h(jasmine) [tum_cte0515@hkn0408 slurm]$ ",,terminal_output +6283,8544031,"TERMINAL",0,0,"430",,terminal_output +6284,8544480,"TERMINAL",0,0,"4",,terminal_output +6285,8544958,"TERMINAL",0,0,"gi",,terminal_output +6286,8545136,"TERMINAL",0,0,"t ",,terminal_output +6287,8545137,"TERMINAL",0,0,"51",,terminal_output +6288,8545309,"TERMINAL",0,0,"p",,terminal_output +6289,8545368,"TERMINAL",0,0,"u",,terminal_output +6290,8545537,"TERMINAL",0,0,"l",,terminal_output +6291,8545613,"TERMINAL",0,0,"5",,terminal_output +6292,8545701,"TERMINAL",0,0,"l\r\n[?2004l\r",,terminal_output +6293,8546146,"TERMINAL",0,0,"62",,terminal_output +6294,8546589,"TERMINAL",0,0,"6",,terminal_output +6295,8547335,"TERMINAL",0,0,"73",,terminal_output +6296,8547701,"TERMINAL",0,0,"remote: Enumerating objects: 992, done.\r\nremote: Counting objects: 0% (1/992)\rremote: Counting objects: 1% (10/992)\rremote: Counting objects: 2% (20/992)\rremote: Counting objects: 3% (30/992)\rremote: Counting objects: 4% (40/992)\rremote: Counting objects: 5% (50/992)\rremote: Counting objects: 6% (60/992)\rremote: Counting objects: 7% (70/992)\rremote: Counting objects: 8% (80/992)\rremote: Counting objects: 9% (90/992)\rremote: Counting objects: 10% (100/992)\rremote: Counting objects: 11% (110/992)\rremote: Counting objects: 12% (120/992)\rremote: Counting objects: 13% (129/992)\rremote: Counting objects: 14% (139/992)\rremote: Counting objects: 15% (149/992)\rremote: Counting objects: 16% (159/992)\rremote: Counting objects: 17% (169/992)\rremote: Counting objects: 18% (179/992)\rremote: Counting objects: 19% (189/992)\rremote: Counting objects: 20% (199/992)\rremote: Counting objects: 21% (209/992)\rremote: Counting objects: 22% (219/992)\rremote: Counting objects: 23% (229/992)\rremote: Counting objects: 24% (239/992)\rremote: Counting objects: 25% (248/992)\rremote: Counting objects: 26% (258/992)\rremote: Counting objects: 27% (268/992)\rremote: Counting objects: 28% (278/992)\rremote: Counting objects: 29% (288/992)\rremote: Counting objects: 30% (298/992)\rremote: Counting objects: 31% (308/992)\rremote: Counting objects: 32% (318/992)\rremote: Counting objects: 33% (328/992)\rremote: Counting objects: 34% (338/992)\rremote: Counting objects: 35% (348/992)\rremote: Counting objects: 36% (358/992)\rremote: Counting objects: 37% (368/992)\rremote: Counting objects: 38% (377/992)\rremote: Counting objects: 39% (387/992)\rremote: Counting objects: 40% (397/992)\rremote: Counting objects: 41% (407/992)\rremote: Counting objects: 42% (417/992)\rremote: Counting objects: 43% (427/992)\rremote: Counting objects: 44% (437/992)\rremote: Counting objects: 45% (447/992)\rremote: Counting objects: 46% (457/992)\rremote: Counting objects: 47% (467/992)\rremote: Counting objects: 48% (477/992)\rremote: Counting objects: 49% (487/992)\rremote: Counting objects: 50% (496/992)\rremote: Counting objects: 51% (506/992)\rremote: Counting objects: 52% (516/992)\rremote: Counting objects: 53% (526/992)\rremote: Counting objects: 54% (536/992)\rremote: Counting objects: 55% (546/992)\rremote: Counting objects: 56% (556/992)\rremote: Counting objects: 57% (566/992)\rremote: Counting objects: 58% (576/992)\rremote: Counting objects: 59% (586/992)\rremote: Counting objects: 60% (596/992)\rremote: Counting objects: 61% (606/992)\rremote: Counting objects: 62% (616/992)\rremote: Counting objects: 63% (625/992)\rremote: Counting objects: 64% (635/992)\rremote: Counting objects: 65% (645/992)\rremote: Counting objects: 66% (655/992)\rremote: Counting objects: 67% (665/992)\rremote: Counting objects: 68% (675/992)\rremote: Counting objects: 69% (685/992)\rremote: Counting objects: 70% (695/992)\rremote: Counting objects: 71% (705/992)\rremote: Counting objects: 72% (715/992)\rremote: Counting objects: 73% (725/992)\rremote: Counting objects: 74% (735/992)\rremote: Counting objects: 75% (744/992)\rremote: Counting objects: 76% (754/992)\rremote: Counting objects: 77% (764/992)\rremote: Counting objects: 78% (774/992)\rremote: Counting objects: 79% (784/992)\rremote: Counting objects: 80% (794/992)\rremote: Counting objects: 81% (804/992)\r",,terminal_output +6297,8547779,"TERMINAL",0,0,"7",,terminal_output +6298,8548088,"TERMINAL",0,0,"remote: Counting objects: 82% (814/992)\rremote: Counting objects: 83% (824/992)\rremote: Counting objects: 84% (834/992)\rremote: Counting objects: 85% (844/992)\rremote: Counting objects: 86% (854/992)\rremote: Counting objects: 87% (864/992)\rremote: Counting objects: 88% (873/992)\rremote: Counting objects: 89% (883/992)\rremote: Counting objects: 90% (893/992)\rremote: Counting objects: 91% (903/992)\rremote: Counting objects: 92% (913/992)\rremote: Counting objects: 93% (923/992)\rremote: Counting objects: 94% (933/992)\rremote: Counting objects: 95% (943/992)\rremote: Counting objects: 96% (953/992)\rremote: Counting objects: 97% (963/992)\rremote: Counting objects: 98% (973/992)\rremote: Counting objects: 99% (983/992)\rremote: Counting objects: 100% (992/992)\rremote: Counting objects: 100% (992/992), done.\r\nremote: Compressing objects: 0% (1/147)\rremote: Compressing objects: 1% (2/147)\rremote: Compressing objects: 2% (3/147)\rremote: Compressing objects: 3% (5/147)\rremote: Compressing objects: 4% (6/147)\rremote: Compressing objects: 5% (8/147)\rremote: Compressing objects: 6% (9/147)\rremote: Compressing objects: 7% (11/147)\rremote: Compressing objects: 8% (12/147)\rremote: Compressing objects: 9% (14/147)\rremote: Compressing objects: 10% (15/147)\rremote: Compressing objects: 11% (17/147)\rremote: Compressing objects: 12% (18/147)\rremote: Compressing objects: 13% (20/147)\rremote: Compressing objects: 14% (21/147)\rremote: Compressing objects: 15% (23/147)\rremote: Compressing objects: 16% (24/147)\rremote: Compressing objects: 17% (25/147)\rremote: Compressing objects: 18% (27/147)\rremote: Compressing objects: 19% (28/147)\rremote: Compressing objects: 20% (30/147)\rremote: Compressing objects: 21% (31/147)\rremote: Compressing objects: 22% (33/147)\rremote: Compressing objects: 23% (34/147)\rremote: Compressing objects: 24% (36/147)\rremote: Compressing objects: 25% (37/147)\rremote: Compressing objects: 26% (39/147)\rremote: Compressing objects: 27% (40/147)\rremote: Compressing objects: 28% (42/147)\rremote: Compressing objects: 29% (43/147)\rremote: Compressing objects: 30% (45/147)\rremote: Compressing objects: 31% (46/147)\rremote: Compressing objects: 32% (48/147)\rremote: Compressing objects: 33% (49/147)\rremote: Compressing objects: 34% (50/147)\rremote: Compressing objects: 35% (52/147)\rremote: Compressing objects: 36% (53/147)\rremote: Compressing objects: 37% (55/147)\rremote: Compressing objects: 38% (56/147)\rremote: Compressing objects: 39% (58/147)\rremote: Compressing objects: 40% (59/147)\rremote: Compressing objects: 41% (61/147)\rremote: Compressing objects: 42% (62/147)\rremote: Compressing objects: 43% (64/147)\rremote: Compressing objects: 44% (65/147)\rremote: Compressing objects: 45% (67/147)\rremote: Compressing objects: 46% (68/147)\rremote: Compressing objects: 47% (70/147)\rremote: Compressing objects: 48% (71/147)\rremote: Compressing objects: 49% (73/147)\rremote: Compressing objects: 50% (74/147)\rremote: Compressing objects: 51% (75/147)\rremote: Compressing objects: 52% (77/147)\rremote: Compressing objects: 53% (78/147)\rremote: Compressing objects: 54% (80/147)\rremote: Compressing objects: 55% (81/147)\rremote: Compressing objects: 56% (83/147)\rremote: Compressing objects: 57% (84/147)\rremote: Compressing objects: 58% (86/147)\rremote: Compressing objects: 59% (87/147)\rremote: Compressing objects: 60% (89/147)\rremote: Compressing objects: 61% (90/147)\rremote: Compressing objects: 62% (92/147)\rremote: Compressing objects: 63% (93/147)\rremote: Compressing objects: 64% (95/147)\rremote: Compressing objects: 65% (96/147)\rremote: Compressing objects: 66% (98/147)\rremote: Compressing objects: 67% (99/147)\rremote: Compressing objects: 68% (100/147)\rremote: Compressing objects: 69% (102/147)\rremote: Compressing objects: 70% (103/147)\rremote: Compressing objects: 71% (105/147)\rremote: Compressing objects: 72% (106/147)\rremote: Compressing objects: 73% (108/147)\rremote: Compressing objects: 74% (109/147)\rremote: Compressing objects: 75% (111/147)\rremote: Compressing objects: 76% (112/147)\rremote: Compressing objects: 77% (114/147)\rremote: Compressing objects: 78% (115/147)\rremote: Compressing objects: 79% (117/147)\rremote: Compressing objects: 80% (118/147)\rremote: Compressing objects: 81% (120/147)\rremote: Compressing objects: 82% (121/147)\rremote: Compressing objects: 83% (123/147)\rremote: Compressing objects: 84% (124/147)\rremote: Compressing objects: 85% (125/147)\rremote: Compressing objects: 86% (127/147)\rremote: Compressing objects: 87% (128/147)\rremote: Compressing objects: 88% (130/147)\rremote: Compressing objects: 89% (131/147)\rremote: Compressing objects: 90% (133/147)\rremote: Compressing objects: 91% (134/147)\rremote: Compressing objects: 92% (136/147)\rremote: Compressing objects: 93% (137/147)\rremote: Compressing objects: 94% (139/147)\rremote: Compressing objects: 95% (140/147)\rremote: Compressing objects: 96% (142/147)\rremote: Compressing objects: 97% (143/147)\rremote: Compressing objects: 98% (145/147)\rremote: Compressing objects: 99% (146/147)\rremote: Compressing objects: 100% (147/147)\rremote: Compressing objects: 100% (147/147), done.\r\nReceiving objects: 0% (1/640)\rReceiving objects: 1% (7/640)\rReceiving objects: 2% (13/640)\rReceiving objects: 3% (20/640)\rReceiving objects: 4% (26/640)\rReceiving objects: 5% (32/640)\rReceiving objects: 6% (39/640)\rReceiving objects: 7% (45/640)\rReceiving objects: 8% (52/640)\rReceiving objects: 9% (58/640)\rReceiving objects: 10% (64/640)\rReceiving objects: 11% (71/640)\rReceiving objects: 12% (77/640)\rReceiving objects: 13% (84/640)\rReceiving objects: 14% (90/640)\rReceiving objects: 15% (96/640)\rReceiving objects: 16% (103/640)\rReceiving objects: 17% (109/640)\rReceiving objects: 18% (116/640)\rReceiving objects: 19% (122/640)\rReceiving objects: 20% (128/640)\rReceiving objects: 21% (135/640)\rReceiving objects: 22% (141/640)\rReceiving objects: 23% (148/640)\rReceiving objects: 24% (154/640)\rReceiving objects: 25% (160/640)\rReceiving objects: 26% (167/640)\rReceiving objects: 27% (173/640)\rReceiving objects: 28% (180/640)\rReceiving objects: 29% (186/640)\rReceiving objects: 30% (192/640)\rReceiving objects: 31% (199/640)\rReceiving objects: 32% (205/640)\rReceiving objects: 33% (212/640)\rReceiving objects: 34% (218/640)\rReceiving objects: 35% (224/640)\rReceiving objects: 36% (231/640)\rReceiving objects: 37% (237/640)\rReceiving objects: 38% (244/640)\rReceiving objects: 39% (250/640)\rReceiving objects: 40% (256/640)\rReceiving objects: 41% (263/640)\rReceiving objects: 42% (269/640)\rReceiving objects: 43% (276/640)\rReceiving objects: 44% (282/640)\rReceiving objects: 45% (288/640)\rReceiving objects: 46% (295/640)\rReceiving objects: 47% (301/640)\rReceiving objects: 48% (308/640)\rReceiving objects: 49% (314/640)\rReceiving objects: 50% (320/640)\rReceiving objects: 51% (327/640)\rReceiving objects: 52% (333/640)\rReceiving objects: 53% (340/640)\rReceiving objects: 54% (346/640)\rReceiving objects: 55% (352/640)\rReceiving objects: 56% (359/640)\rReceiving objects: 57% (365/640)\rReceiving objects: 58% (372/640)\rReceiving objects: 59% (378/640)\rReceiving objects: 60% (384/640)\rReceiving objects: 61% (391/640)\rReceiving objects: 62% (397/640)\rReceiving objects: 63% (404/640)\rReceiving objects: 64% (410/640)\rremote: Total 640 (delta 496), reused 622 (delta 480), pack-reused 0 (from 0)\r\nReceiving objects: 65% (416/640)\rReceiving objects: 66% (423/640)\rReceiving objects: 67% (429/640)\rReceiving objects: 68% (436/640)\rReceiving objects: 69% (442/640)\rReceiving objects: 70% (448/640)\rReceiving objects: 71% (455/640)\rReceiving objects: 72% (461/640)\rReceiving objects: 73% (468/640)\rReceiving objects: 74% (474/640)\rReceiving objects: 75% (480/640)\rReceiving objects: 76% (487/640)\rReceiving objects: 77% (493/640)\rReceiving objects: 78% (500/640)\rReceiving objects: 79% (506/640)\rReceiving objects: 80% (512/640)\rReceiving objects: 81% (519/640)\rReceiving objects: 82% (525/640)\rReceiving objects: 83% (532/640)\rReceiving objects: 84% (538/640)\rReceiving objects: 85% (544/640)\rReceiving objects: 86% (551/640)\rReceiving objects: 87% (557/640)\rReceiving objects: 88% (564/640)\rReceiving objects: 89% (570/640)\rReceiving objects: 90% (576/640)\rReceiving objects: 91% (583/640)\rReceiving objects: 92% (589/640)\rReceiving objects: 93% (596/640)\rReceiving objects: 94% (602/640)\rReceiving objects: 95% (608/640)\rReceiving objects: 96% (615/640)\rReceiving objects: 97% (621/640)\rReceiving objects: 98% (628/640)\rReceiving objects: 99% (634/640)\rReceiving objects: 100% (640/640)\rReceiving objects: 100% (640/640), 85.94 KiB | 556.00 KiB/s, done.\r\nResolving deltas: 0% (0/496)\rResolving deltas: 1% (5/496)\rResolving deltas: 2% (10/496)\rResolving deltas: 3% (15/496)\rResolving deltas: 4% (20/496)\rResolving deltas: 5% (25/496)\rResolving deltas: 6% (30/496)\rResolving deltas: 7% (35/496)\rResolving deltas: 8% (40/496)\rResolving deltas: 9% (45/496)\rResolving deltas: 10% (50/496)\rResolving deltas: 11% (55/496)\rResolving deltas: 12% (60/496)\rResolving deltas: 13% (65/496)\rResolving deltas: 14% (70/496)\rResolving deltas: 15% (75/496)\rResolving deltas: 16% (80/496)\rResolving deltas: 17% (85/496)\rResolving deltas: 18% (90/496)\rResolving deltas: 19% (95/496)\rResolving deltas: 20% (100/496)\rResolving deltas: 21% (105/496)\rResolving deltas: 22% (110/496)\rResolving deltas: 23% (115/496)\rResolving deltas: 24% (120/496)\rResolving deltas: 25% (124/496)\rResolving deltas: 26% (129/496)\rResolving deltas: 27% (134/496)\rResolving deltas: 28% (139/496)\rResolving deltas: 29% (144/496)\rResolving deltas: 30% (149/496)\rResolving deltas: 31% (154/496)\rResolving deltas: 32% (159/496)\rResolving deltas: 33% (164/496)\rResolving deltas: 34% (169/496)\rResolving deltas: 35% (174/496)\rResolving deltas: 36% (179/496)\rResolving deltas: 37% (184/496)\rResolving deltas: 38% (189/496)\rResolving deltas: 39% (194/496)\rResolving deltas: 40% (199/496)\rResolving deltas: 41% (204/496)\rResolving deltas: 42% (209/496)\rResolving deltas: 43% (214/496)\rResolving deltas: 44% (219/496)\rResolving deltas: 45% (224/496)\rResolving deltas: 46% (229/496)\rResolving deltas: 47% (234/496)\rResolving deltas: 48% (239/496)\r",,terminal_output +6299,8548257,"TERMINAL",0,0,"Resolving deltas: 49% (244/496)\r",,terminal_output +6300,8548532,"TERMINAL",0,0,"84",,terminal_output +6301,8548533,"TERMINAL",0,0,"Resolving deltas: 50% (248/496)\rResolving deltas: 51% (253/496)\rResolving deltas: 52% (258/496)\rResolving deltas: 53% (263/496)\rResolving deltas: 54% (268/496)\rResolving deltas: 55% (273/496)\rResolving deltas: 56% (278/496)\rResolving deltas: 57% (283/496)\rResolving deltas: 58% (288/496)\rResolving deltas: 59% (293/496)\rResolving deltas: 60% (298/496)\rResolving deltas: 61% (303/496)\rResolving deltas: 62% (308/496)\rResolving deltas: 63% (313/496)\rResolving deltas: 64% (318/496)\rResolving deltas: 65% (323/496)\rResolving deltas: 66% (328/496)\rResolving deltas: 67% (333/496)\rResolving deltas: 68% (338/496)\rResolving deltas: 69% (343/496)\rResolving deltas: 70% (348/496)\rResolving deltas: 71% (353/496)\rResolving deltas: 72% (358/496)\rResolving deltas: 73% (363/496)\rResolving deltas: 74% (368/496)\rResolving deltas: 75% (372/496)\rResolving deltas: 76% (377/496)\rResolving deltas: 77% (382/496)\rResolving deltas: 78% (387/496)\rResolving deltas: 79% (392/496)\rResolving deltas: 80% (397/496)\rResolving deltas: 81% (402/496)\rResolving deltas: 82% (407/496)\rResolving deltas: 83% (412/496)\rResolving deltas: 84% (417/496)\rResolving deltas: 85% (422/496)\rResolving deltas: 86% (427/496)\rResolving deltas: 87% (432/496)\rResolving deltas: 88% (437/496)\rResolving deltas: 89% (442/496)\rResolving deltas: 90% (447/496)\rResolving deltas: 91% (452/496)\rResolving deltas: 92% (457/496)\rResolving deltas: 93% (462/496)\rResolving deltas: 94% (467/496)\rResolving deltas: 95% (472/496)\rResolving deltas: 96% (477/496)\rResolving deltas: 97% (482/496)\rResolving deltas: 98% (487/496)\rResolving deltas: 99% (492/496)\rResolving deltas: 100% (496/496)\rResolving deltas: 100% (496/496), completed with 263 local objects.\r\n",,terminal_output +6302,8548675,"TERMINAL",0,0,"8",,terminal_output +6303,8548676,"TERMINAL",0,0,"From github.com:p-doom/slurm\r\n 6676551..2c2f0eb main -> origin/main\r\n",,terminal_output +6304,8548766,"TERMINAL",0,0," * [new branch] change-path -> origin/change-path\r\nUpdating 6676551..2c2f0eb\r\n",,terminal_output +6305,8549373,"TERMINAL",0,0,"95",,terminal_output +6306,8549697,"TERMINAL",0,0,"9",,terminal_output +6307,8549822,"TERMINAL",0,0,"error: Your local changes to the following files would be overwritten by merge:\r\n\tjobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu.sh\r\n\tjobs/mihir/horeka/breakout/default_runs/train_lam_default.sh\r\n\tjobs/mihir/horeka/breakout/default_runs/train_tokenizer_default.sh\r\n\tjobs/mihir/horeka/coinrun/sample_maskgit.sbatch\r\n\tjobs/mihir/horeka/coinrun/train_dyn_single_gpu_gt_actions.sh\r\nPlease commit your changes or stash them before you merge.\r\nAborting\r\n]0;tum_cte0515@hkn0408:~/Projects/jasmine/slurm[?2004h(jasmine) [tum_cte0515@hkn0408 slurm]$ ",,terminal_output +6308,8550431,"TERMINAL",0,0,"6:006",,terminal_output +6309,8550717,"TERMINAL",0,0,"6:00",,terminal_output +6310,8551380,"TERMINAL",0,0,"17",,terminal_output +6311,8551763,"TERMINAL",0,0,"1",,terminal_output +6312,8552417,"TERMINAL",0,0,"28",,terminal_output +6313,8552417,"TERMINAL",0,0,"gi",,terminal_output +6314,8552451,"TERMINAL",0,0,"t",,terminal_output +6315,8552562,"TERMINAL",0,0," ",,terminal_output +6316,8552632,"TERMINAL",0,0,"s",,terminal_output +6317,8552757,"TERMINAL",0,0,"t",,terminal_output +6318,8552780,"TERMINAL",0,0,"2",,terminal_output +6319,8552810,"TERMINAL",0,0,"a",,terminal_output +6320,8552944,"TERMINAL",0,0,"sh",,terminal_output +6321,8553453,"TERMINAL",0,0,"39",,terminal_output +6322,8553827,"TERMINAL",0,0,"3",,terminal_output +6323,8554499,"TERMINAL",0,0,"440",,terminal_output +6324,8554907,"TERMINAL",0,0,"4",,terminal_output +6325,8555473,"TERMINAL",0,0,"51",,terminal_output +6326,8555891,"TERMINAL",0,0,"5",,terminal_output +6327,8556545,"TERMINAL",0,0,"62",,terminal_output +6328,8556933,"TERMINAL",0,0,"6",,terminal_output +6329,8556949,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +6330,8557338,"TERMINAL",0,0,"Saved working directory and index state WIP on main: 6676551 added scripts\r\n",,terminal_output +6331,8557628,"TERMINAL",0,0,"]0;tum_cte0515@hkn0408:~/Projects/jasmine/slurm[?2004h(jasmine) [tum_cte0515@hkn0408 slurm]$ ",,terminal_output +6332,8557754,"TERMINAL",0,0,"73",,terminal_output +6333,8558090,"TERMINAL",0,0,"8",,terminal_output +6334,8558315,"TERMINAL",0,0,"git stash",,terminal_output +6335,8558553,"TERMINAL",0,0,"pull",,terminal_output +6336,8558641,"TERMINAL",0,0,"84",,terminal_output +6337,8559003,"TERMINAL",0,0,"9",,terminal_output +6338,8559258,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +6339,8559726,"TERMINAL",0,0,"95",,terminal_output +6340,8560130,"TERMINAL",0,0,"10",,terminal_output +6341,8560864,"TERMINAL",0,0,"Updating 6676551..2c2f0eb\r\n",,terminal_output +6342,8560864,"TERMINAL",0,0,"106",,terminal_output +6343,8561154,"TERMINAL",0,0,"1",,terminal_output +6344,8561996,"TERMINAL",0,0,"17",,terminal_output +6345,8562006,"TERMINAL",0,0,"Updating files: 14% (68/478)\rUpdating files: 15% (72/478)\rUpdating files: 16% (77/478)\rUpdating files: 17% (82/478)\rUpdating files: 18% (87/478)\rUpdating files: 19% (91/478)\rUpdating files: 20% (96/478)\rUpdating files: 21% (101/478)\rUpdating files: 22% (106/478)\rUpdating files: 23% (110/478)\rUpdating files: 24% (115/478)\rUpdating files: 25% (120/478)\r",,terminal_output +6346,8562083,"TERMINAL",0,0,"Updating files: 26% (125/478)\r",,terminal_output +6347,8562222,"TERMINAL",0,0,"Updating files: 27% (130/478)\r",,terminal_output +6348,8562222,"TERMINAL",0,0,"2",,terminal_output +6349,8562402,"TERMINAL",0,0,"Updating files: 28% (134/478)\rUpdating files: 29% (139/478)\rUpdating files: 30% (144/478)\rUpdating files: 31% (149/478)\rUpdating files: 32% (153/478)\rUpdating files: 33% (158/478)\rUpdating files: 34% (163/478)\r",,terminal_output +6350,8562544,"TERMINAL",0,0,"Updating files: 35% (168/478)\r",,terminal_output +6351,8562674,"TERMINAL",0,0,"Updating files: 36% (173/478)\rUpdating files: 37% (177/478)\rUpdating files: 38% (182/478)\rUpdating files: 39% (187/478)\r",,terminal_output +6352,8562892,"TERMINAL",0,0,"Updating files: 40% (192/478)\r",,terminal_output +6353,8562942,"TERMINAL",0,0,"28",,terminal_output +6354,8563080,"TERMINAL",0,0,"Updating files: 41% (196/478)\rUpdating files: 42% (201/478)\rUpdating files: 43% (206/478)\rUpdating files: 44% (211/478)\r",,terminal_output +6355,8563157,"TERMINAL",0,0,"Updating files: 45% (216/478)\rUpdating files: 46% (220/478)\rUpdating files: 47% (225/478)\rUpdating files: 48% (230/478)\r",,terminal_output +6356,8563386,"TERMINAL",0,0,"3",,terminal_output +6357,8563386,"TERMINAL",0,0,"Updating files: 49% (235/478)\rUpdating files: 50% (239/478)\rUpdating files: 51% (244/478)\rUpdating files: 52% (249/478)\rUpdating files: 53% (254/478)\rUpdating files: 54% (259/478)\r",,terminal_output +6358,8563471,"TERMINAL",0,0,"Updating files: 55% (263/478)\rUpdating files: 56% (268/478)\r",,terminal_output +6359,8563659,"TERMINAL",0,0,"Updating files: 57% (273/478)\rUpdating files: 58% (278/478)\rUpdating files: 59% (283/478)\rUpdating files: 60% (287/478)\rUpdating files: 61% (292/478)\rUpdating files: 62% (297/478)\rUpdating files: 62% (298/478)\rUpdating files: 63% (302/478)\r",,terminal_output +6360,8563772,"TERMINAL",0,0,"Updating files: 64% (306/478)\rUpdating files: 65% (311/478)\r",,terminal_output +6361,8563791,"TERMINAL",0,0,"39",,terminal_output +6362,8563830,"TERMINAL",0,0,"Updating files: 66% (316/478)\r",,terminal_output +6363,8563932,"TERMINAL",0,0,"Updating files: 67% (321/478)\r",,terminal_output +6364,8563987,"TERMINAL",0,0,"Updating files: 68% (326/478)\rUpdating files: 69% (330/478)\r",,terminal_output +6365,8564093,"TERMINAL",0,0,"Updating files: 70% (335/478)\rUpdating files: 71% (340/478)\rUpdating files: 72% (345/478)\r",,terminal_output +6366,8564151,"TERMINAL",0,0,"Updating files: 73% (349/478)\r",,terminal_output +6367,8564209,"TERMINAL",0,0,"4",,terminal_output +6368,8564221,"TERMINAL",0,0,"Updating files: 74% (354/478)\rUpdating files: 75% (359/478)\r",,terminal_output +6369,8564364,"TERMINAL",0,0,"Updating files: 76% (364/478)\r",,terminal_output +6370,8564466,"TERMINAL",0,0,"Updating files: 77% (369/478)\r",,terminal_output +6371,8564698,"TERMINAL",0,0,"Updating files: 78% (373/478)\rUpdating files: 79% (378/478)\rUpdating files: 80% (383/478)\rUpdating files: 80% (385/478)\rUpdating files: 81% (388/478)\rUpdating files: 82% (392/478)\rUpdating files: 83% (397/478)\r",,terminal_output +6372,8564825,"TERMINAL",0,0,"Updating files: 84% (402/478)\rUpdating files: 85% (407/478)\rUpdating files: 86% (412/478)\r",,terminal_output +6373,8564825,"TERMINAL",0,0,"450",,terminal_output +6374,8564909,"TERMINAL",0,0,"Updating files: 87% (416/478)\r",,terminal_output +6375,8565230,"TERMINAL",0,0,"Updating files: 88% (421/478)\rUpdating files: 89% (426/478)\r",,terminal_output +6376,8565309,"TERMINAL",0,0,"5",,terminal_output +6377,8565310,"TERMINAL",0,0,"Updating files: 90% (431/478)\rUpdating files: 91% (435/478)\r",,terminal_output +6378,8565362,"TERMINAL",0,0,"Updating files: 92% (440/478)\r",,terminal_output +6379,8565448,"TERMINAL",0,0,"Updating files: 93% (445/478)\r",,terminal_output +6380,8565552,"TERMINAL",0,0,"Updating files: 94% (450/478)\r",,terminal_output +6381,8565741,"TERMINAL",0,0,"Updating files: 94% (453/478)\rUpdating files: 95% (455/478)\rUpdating files: 96% (459/478)\r",,terminal_output +6382,8565812,"TERMINAL",0,0,"Updating files: 97% (464/478)\rUpdating files: 98% (469/478)\r",,terminal_output +6383,8565877,"TERMINAL",0,0,"51",,terminal_output +6384,8565902,"TERMINAL",0,0,"Updating files: 99% (474/478)\rUpdating files: 100% (478/478)\rUpdating files: 100% (478/478), done.\r\n",,terminal_output +6385,8566047,"TERMINAL",0,0,"Fast-forward\r\n",,terminal_output +6386,8566341,"TERMINAL",0,0,"6",,terminal_output +6387,8566685,"TERMINAL",0,0," dev/alfred/berlin/gt_actions/sample_causal_32gpus.sbatch | 2 +-\r\n dev/alfred/berlin/gt_actions/sample_darkness_filter.sbatch | 2 +-\r\n dev/alfred/berlin/gt_actions/train_dynacmis_on_3nodes_2gpu.sbatch | 2 +-\r\n dev/alfred/berlin/gt_actions/train_dynacmis_on_8gpu.sbatch | 2 +-\r\n dev/alfred/berlin/gt_actions/train_dynacmis_on_more_than4.sbatch | 2 +-\r\n dev/alfred/berlin/gt_actions/train_dynacmis_overfit.sbatch | 2 +-\r\n dev/alfred/berlin/gt_actions/train_dynacmis_overfit_4gpu.sbatch | 2 +-\r\n dev/alfred/berlin/gt_actions/train_dynacmis_overfit_4gpu_to_8gpu.sbatch | 2 +-\r\n dev/alfred/berlin/gt_actions/train_tok_topology_one_gpu copy.sbatch | 4 +-\r\n dev/alfred/berlin/gt_actions/train_tok_topology_one_gpu.sbatch | 4 +-\r\n dev/alfred/berlin/gt_actions/train_tok_topology_restore_to_on_gpu.sbatch | 2 +-\r\n dev/alfred/berlin/gt_actions/train_tok_topology_two_gpus.sbatch | 4 +-\r\n dev/alfred/berlin/job_requeueing/cpu_requeue_dev.sbatch | 2 +-\r\n .../berlin/job_requeueing/dynamic_lr_tuning/train_dynacmis_lr_general.sbatch | 2 +-\r\n .../berlin/job_requeueing/dynamic_validation/train_dynacmis_lr_general.sbatch | 2 +-\r\n dev/alfred/berlin/job_requeueing/lam_lr_tuning/train_lam_lr_general.sbatch | 2 +-\r\n .../berlin/job_requeueing/tokenizer_lr_tuning/train_tokenizer_lr_general.sbatch | 2 +-\r\n dev/alfred/berlin/job_requeueing/train_lam_chain_dev.sbatch | 2 +-\r\n dev/alfred/berlin/job_requeueing/train_lam_requeue_dev.sbatch | 2 +-\r\n dev/alfred/berlin/job_requeueing/train_lam_requeue_dev_gemini.sbatch | 2 +-\r\n dev/alfred/berlin/restore_ckpt/restore_ckpt_single.sbatch | 2 +-\r\n dev/alfred/berlin/restore_ckpt/restore_multi.sbatch | 2 +-\r\n .../coinrun_dynamics_batch_size_144_3e-5_invest_nan_restore_40k.sbatch | 2 +-\r\n dev/alfred/berlin/test_franz_pr/train_dynacmis_overfit.sbatch | 2 +-\r\n dev/alfred/berlin/test_franz_pr/train_lam_overfit.sbatch | 2 +-\r\n dev/alfred/berlin/test_franz_pr/train_tokenizer_overfit.sbatch | 2 +-\r\n dev/alfred/berlin/topology/nnx/train_tokenizer_overfit_1.sbatch | 4 +-\r\n dev/alfred/berlin/topology/nnx/train_tokenizer_overfit_2_gpu.sbatch | 4 +-\r\n dev/alfred/berlin/topology/nnx/train_tokenizer_restore_2gpu_to_1gpu.sbatch | 2 +-\r\n dev/alfred/berlin/topology/nnx/train_tokenizer_restore_2gpu_to_2gpu.sbatch | 4 +-\r\n dev/alfred/berlin/topology/prennx/train_tokenizer_overfit_2_gpu.sbatch | 4 +-\r\n dev/alfred/berlin/topology/prennx/train_tokenizer_restore_2gpu_to_1gpu.sbatch | 4 +-\r\n dev/alfred/berlin/topology/prennx/train_tokenizer_restore_2gpu_to_2gpu.sbatch | 4 +-\r\n dev/alfred/berlin/topology/sample_overfit_single_gpu.sbatch | 2 +-\r\n dev/alfred/berlin/topology/train_dynacmis_overfit_1.sbatch | 2 +-\r\n dev/alfred/berlin/topology/train_dynacmis_overfit_1_gt_actions.sbatch | 2 +-\r\n dev/alfred/berlin/topology/train_dynacmis_overfit_1_gt_actions_noise.sbatch | 2 +-\r\n dev/alfred/berlin/topology/train_dynacmis_overfit_1_noise.sbatch | 2 +-\r\n dev/alfred/berlin/topology/train_dynacmis_overfit_2_nodes_2_gpu.sbatch | 2 +-\r\n dev/alfred/berlin/topology/train_dynacmis_overfit_2_nodes_4_gpu.sbatch | 2 +-\r\n dev/alfred/berlin/topology/train_dynacmis_overfit_4gpu.sbatch | 2 +-\r\n dev/alfred/berlin/topology/train_lam_overfit.sbatch | 2 +-\r\n dev/alfred/berlin/topology/train_tokenizer_overfit_1gpu.sbatch | 4 +-\r\n dev/alfred/berlin/topology/train_tokenizer_overfit_2_gpu.sbatch | 4 +-\r\n dev/alfred/berlin/topology/train_tokenizer_overfit_4_gpu.sbatch | 4 +-\r\n dev/alfred/berlin/topology/train_tokenizer_overfit_single_record.sbatch | 4 +-\r\n dev/alfred/berlin/topology/train_tokenizer_overfit_single_record_requeue.sbatch | 4 +-\r\n dev/alfred/berlin/topology/train_tokenizer_restore_1gpu_to_1gpu.sbatch | 4 +-\r\n dev/alfred/berlin/topology/train_tokenizer_restore_1gpu_to_2gpu.sbatch | 4 +-\r\n dev/alfred/berlin/topology/train_tokenizer_restore_2gpu_to_1gpu.sbatch | 4 +-\r\n dev/alfred/berlin/topology/train_tokenizer_restore_2gpu_to_2gpu.sbatch | 4 +-\r\n dev/alfred/berlin/topology_restore_fix/train_tokenizer_overfit_4_gpu.sbatch | 4 +-\r\n dev/alfred/berlin/topology_restore_fix/train_tokenizer_overfit_4_to_1_gpu.sbatch | 2 +-\r\n dev/alfred/berlin/train_dyn_dev/train_dynacmis_overfit.sbatch | 2 +-\r\n dev/alfred/berlin/train_lam_dev/train_lam.sbatch | 2 +-\r\n dev/alfred/berlin/train_tok_dev/train_tok.sbatch | 2 +-\r\n dev/alfred/berlin/train_tok_dev/train_tok_overfit.sbatch | 2 +-\r\n dev/alfred/berlin/validation/tokenizer_lr_tuning/train_tokenizer_lr_general.sbatch | 4 +-\r\n .../validation/tokenizer_without_optimizer/train_tokenizer_lr_general.sbatch | 4 +-\r\n dev/alfred/helmholtz_cluster/train_tok_dev/train_tok.sbatch | 2 +-\r\n dev/alfred/helmholtz_cluster/train_tok_dev/train_tok_overfit.sbatch | 2 +-\r\n dev/alfred/horeka/jobs_cur/atari/sample_causal.sbatch | 2 +-\r\n dev/alfred/horeka/jobs_cur/atari/sample_maskgit.sbatch | 2 +-\r\n dev/alfred/horeka/jobs_cur/atari/train_dynamics_causal.sbatch | 2 +-\r\n dev/alfred/horeka/jobs_cur/atari/train_dynamics_maskgit.sbatch | 2 +-\r\n dev/alfred/horeka/jobs_cur/atari/train_tokenizer_lr_1e-4.sbatch | 2 +-\r\n .../horeka/jobs_cur/dyn_gt_actions_ablation_prepend/dev_dyn_gt_actions.sbatch | 2 +-\r\n .../jobs_cur/dyn_gt_actions_ablation_prepend/dev_dyn_gt_actions_dev_single.sbatch | 2 +-\r\n .../horeka/jobs_cur/dyn_gt_actions_ablation_prepend/dyn_gt_actions_ablation.sbatch | 2 +-\r\n .../dyn_gt_actions_ablation_prepend/dyn_gt_actions_ablation_baseline.sbatch | 2 +-\r\n .../dyn_gt_actions_ablation_prepend/dyn_gt_actions_ablation_one_node.sbatch | 2 +-\r\n .../dyn_gt_actions_ablation_prepend/dyn_gt_actions_ablation_single_gpu copy.sbatch | 4 +-\r\n .../dyn_gt_actions_ablation_prepend/dyn_gt_actions_ablation_single_gpu.sbatch | 2 +-\r\n dev/alfred/horeka/jobs_cur/tokenizer/train_tokenizer_37M_single_gpu.sbatch | 2 +-\r\n dev/alfred/horeka/jobs_cur/tokenizer/train_tokenizer_37M_single_node.sbatch | 2 +-\r\n .../horeka/jobs_old/batchsize_scaling/adjusted_lr/train_tokenizer_16_nodes.sbatch | 2 +-\r\n .../horeka/jobs_old/batchsize_scaling/adjusted_lr/train_tokenizer_1_nodes.sbatch | 2 +-\r\n .../horeka/jobs_old/batchsize_scaling/adjusted_lr/train_tokenizer_1_nodes.sh | 2 +-\r\n .../horeka/jobs_old/batchsize_scaling/adjusted_lr/train_tokenizer_2_nodes.sbatch | 2 +-\r\n .../horeka/jobs_old/batchsize_scaling/adjusted_lr/train_tokenizer_32_nodes.sbatch | 2 +-\r\n .../horeka/jobs_old/batchsize_scaling/adjusted_lr/train_tokenizer_4_nodes.sbatch | 2 +-\r\n .../horeka/jobs_old/batchsize_scaling/adjusted_lr/train_tokenizer_4_nodes.sh | 2 +-\r\n .../batchsize_scaling/adjusted_lr/train_tokenizer_4_nodes_frequent_chkpt.sbatch | 2 +-\r\n .../horeka/jobs_old/batchsize_scaling/adjusted_lr/train_tokenizer_64_nodes.sbatch | 2 +-\r\n .../horeka/jobs_old/batchsize_scaling/adjusted_lr/train_tokenizer_8_nodes.sbatch | 2 +-\r\n .../horeka/jobs_old/batchsize_scaling/const_lr/train_tokenizer_16_nodes.sbatch | 2 +-\r\n .../horeka/jobs_old/batchsize_scaling/const_lr/train_tokenizer_1_nodes.sbatch | 2 +-\r\n .../horeka/jobs_old/batchsize_scaling/const_lr/train_tokenizer_2_nodes.sbatch | 2 +-\r\n .../horeka/jobs_old/batchsize_scaling/const_lr/train_tokenizer_32_nodes.sbatch | 2 +-\r\n .../horeka/jobs_old/batchsize_scaling/const_lr/train_tokenizer_4_nodes.sbatch | 2 +-\r\n .../horeka/jobs_old/batchsize_scaling/const_lr/train_tokenizer_8_nodes.sbatch | 2 +-\r\n dev/alfred/horeka/jobs_old/batchsize_scaling/oai_subset/train_tokenizer_2_nodes.sh | 2 +-\r\n .../batchsize_scaling/oai_subset/train_tokenizer_2_nodes_samples_500.sbatch | 2 +-\r\n dev/alfred/horeka/jobs_old/checkpoint_fix/train_tokenizer.sh | 2 +-\r\n dev/alfred/horeka/jobs_old/coinrun/base/train_dynamics_coinrun.sbatch | 2 +-\r\n dev/alfred/horeka/jobs_old/coinrun/base/train_lam_coinrun.sbatch | 2 +-\r\n dev/alfred/horeka/jobs_old/coinrun/base/train_tokenizer_coinrun.sbatch | 2 +-\r\n .../horeka/jobs_old/coinrun/latent_action_ablation/train_dynamics_coinrun.sbatch | 2 +-\r\n dev/alfred/horeka/jobs_old/coinrun/latent_action_ablation/train_lam_12.sbatch | 2 +-\r\n dev/alfred/horeka/jobs_old/coinrun/latent_action_ablation/train_lam_24.sbatch | 2 +-\r\n dev/alfred/horeka/jobs_old/coinrun/latent_action_ablation/train_lam_48.sbatch | 2 +-\r\n dev/alfred/horeka/jobs_old/coinrun/latent_action_ablation/train_lam_6.sbatch | 2 +-\r\n dev/alfred/horeka/jobs_old/coinrun/latent_action_ablation/train_lam_6.sh | 2 +-\r\n .../horeka/jobs_old/coinrun/latent_action_ablation/train_tokenizer_coinrun.sbatch | 2 +-\r\n .../horeka/jobs_old/coinrun/latent_action_ablation/train_tokenizer_coinrun.sh | 2 +-\r\n dev/alfred/horeka/jobs_old/coinrun/train_tokenizer_coinrun.sbatch | 2 +-\r\n dev/alfred/horeka/jobs_old/dyn_gt_actions_ablation/dyn_gt_actions_ablation.sbatch | 2 +-\r\n .../jobs_old/dyn_gt_actions_ablation/dyn_gt_actions_ablation_baseline.sbatch | 2 +-\r\n .../horeka/jobs_old/dyn_gt_actions_ablation/dyn_gt_actions_ablation_dev.sbatch | 2 +-\r\n dev/alfred/horeka/jobs_old/generate_single_samples/generate_samples_50k.sh | 2 +-\r\n .../job_requeueing/example_tokenizer_lr_tuning/train_tokenizer_lr_general.sbatch | 2 +-\r\n .../jobs_old/job_requeueing/lr_tuning/tokenizer/train_tokenizer_lr_general.sbatch | 2 +-\r\n dev/alfred/horeka/jobs_old/job_requeueing/train_lam_chain_dev.sbatch | 2 +-\r\n dev/alfred/horeka/jobs_old/job_requeueing/train_lam_requeue_dev.sbatch | 2 +-\r\n dev/alfred/horeka/jobs_old/masked_lim/masked_lim_dev.sbatch | 2 +-\r\n dev/alfred/horeka/jobs_old/masked_lim/masked_lim_yolo.sbatch | 2 +-\r\n dev/alfred/horeka/jobs_old/masked_lim_noise/masked_lim_dev.sbatch | 2 +-\r\n dev/alfred/horeka/jobs_old/masked_lim_noise/masked_lim_yolo.sbatch | 2 +-\r\n .../jobs_old/overfit_minecraft_single_sample/train_dynamics_overfit_sample.sbatch | 2 +-\r\n .../jobs_old/overfit_minecraft_single_sample/train_dynamics_overfit_sample.sh | 2 +-\r\n .../horeka/jobs_old/overfit_run_ds_oai/lam/from_ckpt/train_lam_samples_12.sbatch | 2 +-\r\n dev/alfred/horeka/jobs_old/overfit_run_ds_oai/lam/train_lam_init.sbatch | 4 +-\r\n dev/alfred/horeka/jobs_old/overfit_run_ds_oai/lam/train_lam_samples_12.sbatch | 4 +-\r\n dev/alfred/horeka/jobs_old/overfit_run_ds_oai/lam/train_lam_samples_12288.sbatch | 2 +-\r\n dev/alfred/horeka/jobs_old/overfit_run_ds_oai/lam/train_lam_samples_1536.sbatch | 2 +-\r\n dev/alfred/horeka/jobs_old/overfit_run_ds_oai/lam/train_lam_samples_24576.sbatch | 2 +-\r\n dev/alfred/horeka/jobs_old/overfit_run_ds_oai/lam/train_lam_samples_3072.sbatch | 2 +-\r\n dev/alfred/horeka/jobs_old/overfit_run_ds_oai/lam/train_lam_samples_384.sbatch | 2 +-\r\n dev/alfred/horeka/jobs_old/overfit_run_ds_oai/lam/train_lam_samples_49152.sbatch | 2 +-\r\n dev/alfred/horeka/jobs_old/overfit_run_ds_oai/lam/train_lam_samples_6144.sbatch | 2 +-\r\n dev/alfred/horeka/jobs_old/overfit_run_ds_oai/lam/train_lam_samples_96.sbatch | 2 +-\r\n .../overfit_run_ds_oai/tokenizer/from_ckpt/train_tokenizer_samples_12.sbatch | 2 +-\r\n dev/alfred/horeka/jobs_old/overfit_run_ds_oai/tokenizer/train_tokenizer_dev.sh | 2 +-\r\n .../horeka/jobs_old/overfit_run_ds_oai/tokenizer/train_tokenizer_samples_12.sbatch | 2 +-\r\n .../jobs_old/overfit_run_ds_oai/tokenizer/train_tokenizer_samples_12288.sbatch | 2 +-\r\n .../jobs_old/overfit_run_ds_oai/tokenizer/train_tokenizer_samples_1536.sbatch | 2 +-\r\n .../jobs_old/overfit_run_ds_oai/tokenizer/train_tokenizer_samples_24576.sbatch | 2 +-\r\n .../jobs_old/overfit_run_ds_oai/tokenizer/train_tokenizer_samples_3072.sbatch | 2 +-\r\n .../jobs_old/overfit_run_ds_oai/tokenizer/train_tokenizer_samples_384.sbatch | 2 +-\r\n .../jobs_old/overfit_run_ds_oai/tokenizer/train_tokenizer_samples_49152.sbatch | 2 +-\r\n .../jobs_old/overfit_run_ds_oai/tokenizer/train_tokenizer_samples_6144.sbatch | 2 +-\r\n .../horeka/jobs_old/overfit_run_ds_oai/tokenizer/train_tokenizer_samples_96.sbatch | 2 +-\r\n dev/alfred/horeka/jobs_old/overfit_sample/train_lam_overfit_sample.sbatch | 2 +-\r\n dev/alfred/horeka/jobs_old/overfit_sample/train_tokenizer_overfit_sample.sbatch | 2 +-\r\n .../jobs_old/overfit_sample/train_tokenizer_overfit_sample_size_0.6_mio.sbatch | 2 +-\r\n .../jobs_old/overfit_sample/train_tokenizer_overfit_sample_size_21_mio.sbatch | 2 +-\r\n .../jobs_old/overfit_sample/train_tokenizer_overfit_sample_size_2_mio.sbatch | 2 +-\r\n .../jobs_old/overfit_sample/train_tokenizer_overfit_sample_size_9_mio.sbatch | 2 +-\r\n .../jobs_old/overfit_sample/train_tokenizer_overfit_sample_size_small_mio.sbatch | 2 +-\r\n dev/alfred/horeka/jobs_old/procgen/data_gen_gym_acrobot.sbatch | 2 +-\r\n dev/alfred/horeka/jobs_old/procgen/data_gen_gym_carracing.sbatch | 2 +-\r\n dev/alfred/horeka/jobs_old/procgen/data_gen_gym_mountaincar copy.sbatch | 2 +-\r\n dev/alfred/horeka/jobs_old/procgen/data_gen_gym_mountaincar.sbatch | 2 +-\r\n dev/alfred/horeka/jobs_old/procgen/data_gen_gym_pendulum.sbatch | 2 +-\r\n dev/alfred/horeka/jobs_old/sampling/sample_coinrun.sh | 2 +-\r\n dev/alfred/horeka/jobs_old/train_dyn/train_dyn_knoms_full.sbatch | 2 +-\r\n dev/alfred/horeka/jobs_old/train_dyn_dev/train_dyn.sh | 4 +-\r\n dev/alfred/horeka/jobs_old/train_dyn_dev/train_dyn_checkpt_loading_test_dev.sbatch | 2 +-\r\n dev/alfred/horeka/jobs_old/train_dyn_dev/train_dyn_dev.sbatch | 2 +-\r\n dev/alfred/horeka/jobs_old/train_dyn_dev/train_dyn_single_batch.sh | 4 +-\r\n dev/alfred/horeka/jobs_old/train_lam/train_lam_full.sbatch | 2 +-\r\n dev/alfred/horeka/jobs_old/train_lam_dev/train_lam.sh | 2 +-\r\n dev/alfred/horeka/jobs_old/train_lam_dev/train_lam_dev.sbatch | 4 +-\r\n dev/alfred/horeka/jobs_old/train_lam_dev/train_lam_full_dev.sbatch | 2 +-\r\n dev/alfred/horeka/jobs_old/train_lam_dev/train_lam_single_batch.sh | 2 +-\r\n dev/alfred/horeka/jobs_old/train_tokenizer/train_lam_oai_dev copy.sbatch | 4 +-\r\n dev/alfred/horeka/jobs_old/train_tokenizer/train_lam_oai_dev.sbatch | 4 +-\r\n .../jobs_old/train_tokenizer/train_tokenizer_knoms_overfit_single_batch.sbatch | 2 +-\r\n .../jobs_old/train_tokenizer/train_tokenizer_knoms_overfit_single_sample.sbatch | 2 +-\r\n .../jobs_old/train_tokenizer/train_tokenizer_knoms_overfit_tfrecord_10.sbatch | 2 +-\r\n .../jobs_old/train_tokenizer/train_tokenizer_knoms_overfit_tfrecord_full.sbatch | 2 +-\r\n dev/alfred/horeka/jobs_old/train_tokenizer_dev/train_tokenizer.sbatch | 4 +-\r\n dev/alfred/horeka/jobs_old/train_tokenizer_dev/train_tokenizer.sh | 2 +-\r\n dev/alfred/horeka/jobs_old/train_tokenizer_dev/train_tokenizer_copy.sbatch | 2 +-\r\n dev/alfred/horeka/jobs_old/train_tokenizer_dev/train_tokenizer_h100.sbatch | 2 +-\r\n .../horeka/jobs_old/train_tokenizer_dev/train_tokenizer_overfit_tfrecord_10.sh | 2 +-\r\n dev/alfred/horeka/jobs_old/train_tokenizer_dev/train_tokenizer_single_batch.sh | 2 +-\r\n .../jobs_old/validation/tokenizer_lr_tuning/train_tokenizer_lr_general.sbatch | 4 +-\r\n .../validation/tokenizer_without_optimizer/train_tokenizer_lr_general.sbatch | 4 +-\r\n dev/alfred/parallel_lam_dynamics_training/train_lam_init_params.sbatch | 4 +-\r\n dev/alfred/parallel_lam_dynamics_training/train_parallel_lam_dynamics.sbatch | 2 +-\r\n .../parallel_lam_dynamics_training/train_parallel_lam_dynamics_lr_1e-4.sbatch | 2 +-\r\n dev/alfred/parallel_lam_dynamics_training/train_parallel_lam_dynamics_mock.sbatch | 2 +-\r\n dev/mihir/cremers/coinrun_train_tokenizer.sbatch | 2 +-\r\n dev/mihir/cremers/train_tokenizer.sbatch | 2 +-\r\n dev/mihir/cremers/train_tokenizer_overfit_batch_coinrun.sbatch | 2 +-\r\n dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_180M.sbatch | 2 +-\r\n dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_255M.sbatch | 2 +-\r\n dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_356M.sbatch | 2 +-\r\n dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_500M.sbatch | 2 +-\r\n dev/mihir/horeka/causal_fit_modelsizes/train_dynamics_new_arch_80M.sbatch | 2 +-\r\n dev/mihir/horeka/chunked_ablation/train_dynamics_chunked_dataset.sbatch | 2 +-\r\n dev/mihir/horeka/chunked_ablation/train_dynamics_unchunked_dataset.sbatch | 2 +-\r\n dev/mihir/horeka/overfit_batch/train_lam_overfit_batch.sbatch | 2 +-\r\n dev/mihir/horeka/overfit_batch/train_tokenizer_overfit_batch.sbatch | 2 +-\r\n dev/mihir/horeka/overfit_batch_tiny/train_dynamics_overfit_batch_big_lr.sbatch | 2 +-\r\n dev/mihir/horeka/overfit_batch_tiny/train_dynamics_overfit_batch_mid_lr.sbatch | 2 +-\r\n dev/mihir/horeka/overfit_batch_tiny/train_dynamics_overfit_batch_smol_lr.sbatch | 2 +-\r\n dev/mihir/horeka/overfit_batch_tiny/train_lam_overfit_batch.sbatch | 2 +-\r\n dev/mihir/horeka/overfit_batch_tiny/train_tokenizer_overfit_batch.sbatch | 2 +-\r\n dev/mihir/horeka/overfit_sample/causal/dynamics_overfit_sample.sbatch | 2 +-\r\n .../horeka/overfit_sample/causal/dynamics_overfit_sample_gaussian_noise.sbatch | 2 +-\r\n dev/mihir/horeka/overfit_sample/maskgit/train_lam_overfit_sample.sbatch | 2 +-\r\n dev/mihir/horeka/overfit_sample/maskgit/train_tokenizer_overfit_sample.sbatch | 2 +-\r\n dev/mihir/horeka/overfit_sample_tiny/train_dynamics_overfit_sample_big_lr-2.sbatch | 2 +-\r\n dev/mihir/horeka/overfit_sample_tiny/train_dynamics_overfit_sample_big_lr.sbatch | 2 +-\r\n dev/mihir/horeka/overfit_sample_tiny/train_dynamics_overfit_sample_mid_lr.sbatch | 2 +-\r\n dev/mihir/horeka/overfit_sample_tiny/train_dynamics_overfit_sample_smol_lr.sbatch | 2 +-\r\n dev/mihir/horeka/overfit_sample_tiny/train_lam_overfit_sample.sbatch | 2 +-\r\n dev/mihir/horeka/overfit_sample_tiny/train_tokenizer_overfit_sample.sbatch | 2 +-\r\n dev/mihir/horeka/train_tokenizer_coinrun.sbatch | 2 +-\r\n dev/mihir/horeka/yolo-runs/sampling.sh | 4 +-\r\n dev/mihir/horeka/yolo-runs/sampling_dev.sh | 4 +-\r\n dev/mihir/horeka/yolo-runs/tester.sh | 2 +-\r\n dev/mihir/horeka/yolo-runs/train_dynamics_maskgit_speedrun.sbatch | 2 +-\r\n dev/mihir/horeka/yolo-runs/train_dynamics_new_arch.sbatch | 2 +-\r\n dev/mihir/horeka/yolo-runs/train_dynamics_new_arch_speedrun.sbatch | 2 +-\r\n dev/mihir/horeka/yolo-runs/train_dynamics_yolorun.sbatch | 2 +-\r\n dev/mihir/horeka/yolo-runs/train_tokenizer_maskgit_speedrun.sbatch | 2 +-\r\n jobs/alfred/berlin/atari/atari_dynamics/atari_dynamics.sbatch | 2 +-\r\n jobs/alfred/berlin/atari/atari_sampling/sample_causal.sbatch | 2 +-\r\n jobs/alfred/berlin/atari/atari_sampling/sample_maskgit.sbatch | 2 +-\r\n jobs/alfred/berlin/coinrun/coinrun_ablation/coinrun_lam_baseline.sbatch | 2 +-\r\n .../alfred/berlin/coinrun/coinrun_ablation/coinrun_lam_ffn_2048_num_block_4.sbatch | 2 +-\r\n .../coinrun/coinrun_ablation/coinrun_lam_mix_precision_ffn_2048_num_block_4.sbatch | 2 +-\r\n jobs/alfred/berlin/coinrun/coinrun_ablation/coinrun_lam_mixed_prec.sbatch | 2 +-\r\n jobs/alfred/berlin/coinrun/coinrun_ablation/coinrun_lam_wsd.sbatch | 2 +-\r\n jobs/alfred/berlin/coinrun/coinrun_ablation/coinrun_lam_wsd_3e-5_3e-6.sbatch | 2 +-\r\n jobs/alfred/berlin/coinrun/coinrun_ablation/coinrun_lam_wsd_3e-6.sbatch | 2 +-\r\n jobs/alfred/berlin/coinrun/coinrun_ablation/coinrun_lam_wsd_8e-6.sbatch | 4 +-\r\n jobs/alfred/berlin/coinrun/coinrun_ablation/wsd/coinrun_lam_wsd_3e-5_3e-6.sbatch | 79 ++++++++++++++++++++++++\r\n jobs/alfred/berlin/coinrun/coinrun_ablation/wsd/coinrun_lam_wsd_3e-6.sbatch | 79 ++++++++++++++++++++++++\r\n jobs/alfred/berlin/coinrun/coinrun_ablation/wsd/coinrun_lam_wsd_8e-6.sbatch | 79 ++++++++++++++++++++++++\r\n jobs/alfred/berlin/coinrun/coinrun_bigrun/coinrun_dynamics_reproduction.sbatch | 2 +-\r\n .../berlin/coinrun/coinrun_bigrun/coinrun_dynamics_reproduction_cotrain.sbatch | 2 +-\r\n jobs/alfred/berlin/coinrun/coinrun_bigrun/coinrun_lam_big_run.sbatch | 2 +-\r\n .../coinrun_tokenizer_repoduction_ffn_512_n_blocks_8_full_prec.sbatch | 2 +-\r\n jobs/alfred/berlin/coinrun/coinrun_bigrun/sample.sbatch | 2 +-\r\n jobs/alfred/berlin/coinrun/coinrun_bigrun/sample_cotrain.sbatch | 2 +-\r\n jobs/alfred/berlin/coinrun/coinrun_data/generate_data_500mio.sbatch | 36 +++++++++++\r\n jobs/alfred/berlin/coinrun/coinrun_dynamics/coinrun_dynamics.sbatch | 2 +-\r\n jobs/alfred/berlin/coinrun/coinrun_dynamics/coinrun_dynamics_batch_size_144.sbatch | 2 +-\r\n .../berlin/coinrun/coinrun_dynamics/coinrun_dynamics_batch_size_144_3e-5.sbatch | 2 +-\r\n .../coinrun_dynamics/coinrun_dynamics_batch_size_144_3e-5_invest_nan.sbatch | 2 +-\r\n .../coinrun_dynamics_batch_size_144_3e-5_invest_nan_restore_40k.sbatch | 2 +-\r\n .../coinrun/coinrun_dynamics/coinrun_dynamics_batch_size_144_cotraining.sbatch | 2 +-\r\n .../coinrun_dynamics/coinrun_dynamics_batch_size_144_cotraining_3e-5.sbatch | 2 +-\r\n .../alfred/berlin/coinrun/coinrun_dynamics/coinrun_dynamics_maskgit_overfit.sbatch | 2 +-\r\n jobs/alfred/berlin/coinrun/coinrun_lam/lam_coinrun.sbatch | 2 +-\r\n .../berlin/coinrun/coinrun_reproduction/coinrun_dynamics_reproduction.sbatch | 2 +-\r\n .../coinrun/coinrun_reproduction/coinrun_dynamics_reproduction_cotrain.sbatch | 2 +-\r\n jobs/alfred/berlin/coinrun/coinrun_reproduction/coinrun_lam_reproduction.sbatch | 2 +-\r\n .../berlin/coinrun/coinrun_reproduction/coinrun_lam_reproduction_full_prec.sbatch | 2 +-\r\n .../coinrun/coinrun_reproduction/coinrun_lam_reproduction_full_prec_cosine.sbatch | 2 +-\r\n .../coinrun_reproduction/coinrun_lam_reproduction_full_prec_w_restore.sbatch | 2 +-\r\n .../coinrun/coinrun_reproduction/coinrun_lam_reproduction_mix_prec_cosine.sbatch | 2 +-\r\n .../coinrun_lam_reproduction_mix_prec_cosine_min_init_lt_3e-6.sbatch | 2 +-\r\n .../berlin/coinrun/coinrun_reproduction/coinrun_tokenizer_repoduction.sbatch | 2 +-\r\n jobs/alfred/berlin/coinrun/coinrun_reproduction/sample.sbatch | 2 +-\r\n jobs/alfred/berlin/coinrun/coinrun_reproduction/sample_cotrain.sbatch | 2 +-\r\n .../berlin/coinrun/coinrun_reproduction_10k/coinrun_dynamics_reproduction.sbatch | 2 +-\r\n .../coinrun/coinrun_reproduction_10k/coinrun_dynamics_reproduction_cotrain.sbatch | 2 +-\r\n .../alfred/berlin/coinrun/coinrun_reproduction_10k/coinrun_lam_reproduction.sbatch | 2 +-\r\n .../berlin/coinrun/coinrun_reproduction_10k/coinrun_lam_reproduction_dc_0.sbatch | 2 +-\r\n .../coinrun/coinrun_reproduction_10k/coinrun_lam_reproduction_ffn_512.sbatch | 2 +-\r\n .../coinrun_reproduction_10k/coinrun_lam_reproduction_ffn_512_num_blocks_8.sbatch | 2 +-\r\n .../coinrun_lam_reproduction_ffn_512_num_blocks_8_full_prec.sbatch | 2 +-\r\n .../coinrun/coinrun_reproduction_10k/coinrun_lam_reproduction_lower_lr.sbatch | 2 +-\r\n .../coinrun/coinrun_reproduction_10k/coinrun_lam_reproduction_lower_lr_0.5x.sbatch | 2 +-\r\n .../coinrun_reproduction_10k/coinrun_lam_reproduction_lower_lr_3e6_1e5_0.sbatch | 2 +-\r\n .../coinrun_reproduction_10k/coinrun_lam_reproduction_lower_lr_3e6_3e5_0.sbatch | 2 +-\r\n .../berlin/coinrun/coinrun_reproduction_10k/coinrun_tokenizer_repoduction.sbatch | 2 +-\r\n .../coinrun_tokenizer_repoduction_ffn_512_n_blocks_8.sbatch | 2 +-\r\n .../coinrun_tokenizer_repoduction_ffn_512_n_blocks_8_full_prec.sbatch | 2 +-\r\n jobs/alfred/berlin/coinrun/coinrun_reproduction_10k/sample.sbatch | 2 +-\r\n jobs/alfred/berlin/coinrun/coinrun_reproduction_10k/sample_cotrain.sbatch | 2 +-\r\n jobs/alfred/berlin/coinrun/coinrun_sampling/sample_causal.sbatch | 2 +-\r\n jobs/alfred/berlin/coinrun/coinrun_sampling/sample_maskgit.sbatch | 2 +-\r\n jobs/alfred/berlin/coinrun/coinrun_sampling/sample_maskgit_cotrain_3e-5.sbatch | 2 +-\r\n jobs/alfred/berlin/coinrun/coinrun_sampling/sample_maskgit_non_cotrain_3e-5.sbatch | 2 +-\r\n jobs/alfred/berlin/coinrun/coinrun_sampling/sample_maskgit_overfit.sbatch | 2 +-\r\n jobs/alfred/berlin/coinrun/w_val/lam_baseline.sbatch | 81 +++++++++++++++++++++++++\r\n jobs/alfred/berlin/coinrun/w_val/lam_baseline_no_print.sbatch | 81 +++++++++++++++++++++++++\r\n jobs/alfred/berlin/coinrun/w_val/lam_batch_100.sbatch | 81 +++++++++++++++++++++++++\r\n jobs/alfred/berlin/coinrun/w_val/lam_batch_200.sbatch | 81 +++++++++++++++++++++++++\r\n jobs/alfred/berlin/coinrun/w_val/lam_jasmine_default.sbatch | 79 ++++++++++++++++++++++++\r\n jobs/alfred/berlin/coinrun/w_val/tokenizer_baseline.sbatch | 80 +++++++++++++++++++++++++\r\n jobs/alfred/berlin/coinrun/w_val/tokenizer_baseline_debug.sbatch | 129 ++++++++++++++++++++++++++++++++++++++++\r\n jobs/alfred/berlin/coinrun/w_val/tokenizer_baseline_debug_500m.sbatch | 129 ++++++++++++++++++++++++++++++++++++++++\r\n jobs/alfred/berlin/coinrun/w_val/tokenizer_batch_100.sbatch | 80 +++++++++++++++++++++++++\r\n jobs/alfred/berlin/coinrun/w_val/tokenizer_batch_200.sbatch | 80 +++++++++++++++++++++++++\r\n jobs/alfred/berlin/jafar_og_reproduction/og_coinrun_dynamics_reproduction.sbatch | 2 +-\r\n .../berlin/jafar_og_reproduction/og_coinrun_dynamics_reproduction_requeue.sbatch | 2 +-\r\n .../jafar_og_reproduction/og_coinrun_dynamics_reproduction_requeue_2.0.sbatch | 2 +-\r\n jobs/alfred/berlin/jafar_og_reproduction/og_coinrun_lam_reproduction.sbatch | 2 +-\r\n jobs/alfred/berlin/jafar_og_reproduction/og_coinrun_tokenizer_repoduction.sbatch | 2 +-\r\n .../berlin/jafar_og_reproduction/og_coinrun_tokenizer_repoduction_requeue.sbatch | 2 +-\r\n .../jafar_og_reproduction/og_coinrun_tokenizer_repoduction_requeue_2.0.sbatch | 2 +-\r\n .../berlin/jafar_og_reproduction/requeue/og_coinrun_dynamics_reproduction.sbatch | 2 +-\r\n .../alfred/berlin/jafar_og_reproduction/requeue/og_coinrun_lam_reproduction.sbatch | 2 +-\r\n .../berlin/jafar_og_reproduction/requeue/og_coinrun_tokenizer_repoduction.sbatch | 2 +-\r\n jobs/alfred/berlin/jafar_og_reproduction/sample.sbatch | 2 +-\r\n jobs/alfred/berlin/jafar_og_reproduction/sample_og_150k.sbatch | 2 +-\r\n jobs/alfred/berlin/jafar_og_reproduction/sample_og_175k.sbatch | 2 +-\r\n jobs/alfred/berlin/jafar_og_reproduction/sample_og_200k.sbatch | 2 +-\r\n jobs/alfred/berlin/minecraft/gt_act_sampling/sample_gt_act_maskgit.sbatch | 2 +-\r\n jobs/alfred/berlin/minecraft/minecraft_sampling/sample_causal.sbatch | 2 +-\r\n .../sample_dynamics-maskgit-8-node-darkness-filter-3423250.sbatch | 2 +-\r\n .../jafar_og_reproduction/og_coinrun_dynamics_reproduction.sbatch | 2 +-\r\n .../helmholtz_cluster/jafar_og_reproduction/og_coinrun_lam_reproduction.sbatch | 2 +-\r\n .../jafar_og_reproduction/og_coinrun_tokenizer_repoduction.sbatch | 2 +-\r\n jobs/alfred/horeka/atari_dynamics/atari_dynamics.sbatch | 2 +-\r\n jobs/alfred/horeka/dyn_gt_actions_ablation_prepend/dyn_gt_actions_ablation.sbatch | 2 +-\r\n jobs/alfred/horeka/lr_tuning/tokenizer/train_tokenizer_lr_1e-4.sbatch | 2 +-\r\n jobs/alfred/horeka/lr_tuning/tokenizer/train_tokenizer_lr_5e-5.sbatch | 2 +-\r\n jobs/alfred/horeka/lr_tuning/tokenizer/train_tokenizer_lr_general.sbatch | 2 +-\r\n jobs/alfred/horeka/misc/train_dyn_knoms.sbatch | 2 +-\r\n jobs/alfred/horeka/misc/train_lam_knoms.sbatch | 2 +-\r\n jobs/alfred/horeka/misc/train_tokenizer_knoms.sbatch | 2 +-\r\n jobs/franz/berlin/atari/atari_data_gen.sh | 12 ++++\r\n jobs/franz/berlin/coinrun/coinrun_dynamics/coinrun_dynamics_fp32_adam_moments.sh | 2 +-\r\n jobs/franz/berlin/coinrun/coinrun_dynamics/coinrun_dynamics_fp32_layernorm.sh | 2 +-\r\n .../berlin/coinrun/coinrun_lam/lam_coinrun_nan_investigation_100k_to_107k.sbatch | 2 +-\r\n jobs/franz/berlin/coinrun/coinrun_lam/lam_coinrun_nan_investigation_z_loss.sbatch | 46 ++++++++++++++\r\n jobs/franz/berlin/coinrun/coinrun_tokenizer/coinrun_tokenizer_repoduction.sbatch | 51 ++++++++++++++++\r\n .../horeka/batchsize_scaling/dynamics_cotraining_new_arch/linear_lr/tester.sh | 2 +-\r\n .../dynamics_cotraining_new_arch/linear_lr/train_dynamics_16_nodes.sbatch | 2 +-\r\n .../dynamics_cotraining_new_arch/linear_lr/train_dynamics_1_nodes.sbatch | 2 +-\r\n .../dynamics_cotraining_new_arch/linear_lr/train_dynamics_2_nodes.sbatch | 2 +-\r\n .../dynamics_cotraining_new_arch/linear_lr/train_dynamics_4_nodes.sbatch | 2 +-\r\n .../dynamics_cotraining_new_arch/linear_lr/train_dynamics_8_nodes.sbatch | 2 +-\r\n jobs/franz/horeka/batchsize_scaling/dynamics_cotraining_new_arch/sqrt_lr/tester.sh | 2 +-\r\n .../dynamics_cotraining_new_arch/sqrt_lr/train_dynamics_16_nodes.sbatch | 2 +-\r\n .../dynamics_cotraining_new_arch/sqrt_lr/train_dynamics_1_nodes.sbatch | 2 +-\r\n .../dynamics_cotraining_new_arch/sqrt_lr/train_dynamics_2_nodes.sbatch | 2 +-\r\n .../dynamics_cotraining_new_arch/sqrt_lr/train_dynamics_4_nodes.sbatch | 2 +-\r\n .../dynamics_cotraining_new_arch/sqrt_lr/train_dynamics_8_nodes.sbatch | 2 +-\r\n .../action_space_scaling/co_training/train_dynamics_cotraining_12_actions.sbatch | 2 +-\r\n .../action_space_scaling/co_training/train_dynamics_cotraining_20_actions.sbatch | 2 +-\r\n .../action_space_scaling/co_training/train_dynamics_cotraining_50_actions.sbatch | 2 +-\r\n .../action_space_scaling/co_training/train_dynamics_cotraining_6_actions.sbatch | 2 +-\r\n .../action_space_scaling/co_training/train_dynamics_cotraining_8_actions.sbatch | 2 +-\r\n jobs/mihir/horeka/action_space_scaling/solo_lam/train_lam_actionspace_10.sbatch | 2 +-\r\n jobs/mihir/horeka/action_space_scaling/solo_lam/train_lam_actionspace_12.sbatch | 2 +-\r\n jobs/mihir/horeka/action_space_scaling/solo_lam/train_lam_actionspace_20.sbatch | 2 +-\r\n jobs/mihir/horeka/action_space_scaling/solo_lam/train_lam_actionspace_50.sbatch | 2 +-\r\n jobs/mihir/horeka/action_space_scaling/solo_lam/train_lam_actionspace_6.sbatch | 2 +-\r\n jobs/mihir/horeka/action_space_scaling/solo_lam/train_lam_actionspace_8.sbatch | 2 +-\r\n jobs/mihir/horeka/atari/sample_causal.sbatch | 2 +-\r\n jobs/mihir/horeka/atari/sample_maskgit.sbatch | 2 +-\r\n jobs/mihir/horeka/atari/train_dynamics_causal.sbatch | 2 +-\r\n jobs/mihir/horeka/atari/train_dynamics_maskgit.sbatch | 2 +-\r\n jobs/mihir/horeka/atari/train_tokenizer_lr_3e-5.sbatch | 2 +-\r\n jobs/mihir/horeka/batchsize_scaling/dynamics/sqrt_lr/tester.sh | 2 +-\r\n .../mihir/horeka/batchsize_scaling/dynamics/sqrt_lr/train_dynamics_16_nodes.sbatch | 2 +-\r\n jobs/mihir/horeka/batchsize_scaling/dynamics/sqrt_lr/train_dynamics_1_nodes.sbatch | 2 +-\r\n jobs/mihir/horeka/batchsize_scaling/dynamics/sqrt_lr/train_dynamics_2_nodes.sbatch | 2 +-\r\n .../mihir/horeka/batchsize_scaling/dynamics/sqrt_lr/train_dynamics_32_nodes.sbatch | 2 +-\r\n jobs/mihir/horeka/batchsize_scaling/dynamics/sqrt_lr/train_dynamics_4_nodes.sbatch | 2 +-\r\n jobs/mihir/horeka/batchsize_scaling/dynamics/sqrt_lr/train_dynamics_8_nodes.sbatch | 2 +-\r\n jobs/mihir/horeka/batchsize_scaling/dynamics_cotraining/sqrt_lr/tester.sh | 2 +-\r\n .../batchsize_scaling/dynamics_cotraining/sqrt_lr/train_dynamics_16_nodes.sbatch | 2 +-\r\n .../batchsize_scaling/dynamics_cotraining/sqrt_lr/train_dynamics_1_nodes.sbatch | 2 +-\r\n .../batchsize_scaling/dynamics_cotraining/sqrt_lr/train_dynamics_2_nodes.sbatch | 2 +-\r\n .../batchsize_scaling/dynamics_cotraining/sqrt_lr/train_dynamics_4_nodes.sbatch | 2 +-\r\n .../batchsize_scaling/dynamics_cotraining/sqrt_lr/train_dynamics_8_nodes.sbatch | 2 +-\r\n jobs/mihir/horeka/batchsize_scaling/lam/linear_lr/train_lam_16_nodes.sbatch | 2 +-\r\n jobs/mihir/horeka/batchsize_scaling/lam/linear_lr/train_lam_1_nodes.sbatch | 2 +-\r\n jobs/mihir/horeka/batchsize_scaling/lam/linear_lr/train_lam_2_nodes.sbatch | 2 +-\r\n jobs/mihir/horeka/batchsize_scaling/lam/linear_lr/train_lam_4_nodes.sbatch | 2 +-\r\n jobs/mihir/horeka/batchsize_scaling/lam/linear_lr/train_lam_8_nodes.sbatch | 2 +-\r\n jobs/mihir/horeka/batchsize_scaling/lam/sqrt_lr/train_lam_16_nodes.sbatch | 2 +-\r\n jobs/mihir/horeka/batchsize_scaling/lam/sqrt_lr/train_lam_1_nodes.sbatch | 2 +-\r\n jobs/mihir/horeka/batchsize_scaling/lam/sqrt_lr/train_lam_2_nodes.sbatch | 2 +-\r\n jobs/mihir/horeka/batchsize_scaling/lam/sqrt_lr/train_lam_4_nodes.sbatch | 2 +-\r\n jobs/mihir/horeka/batchsize_scaling/lam/sqrt_lr/train_lam_8_nodes.sbatch | 2 +-\r\n .../horeka/batchsize_scaling/tokenizer/const_lr/train_tokenizer_16_nodes.sbatch | 2 +-\r\n .../horeka/batchsize_scaling/tokenizer/const_lr/train_tokenizer_1_nodes.sbatch | 2 +-\r\n .../horeka/batchsize_scaling/tokenizer/const_lr/train_tokenizer_2_nodes.sbatch | 2 +-\r\n .../horeka/batchsize_scaling/tokenizer/const_lr/train_tokenizer_32_nodes.sbatch | 2 +-\r\n .../horeka/batchsize_scaling/tokenizer/const_lr/train_tokenizer_4_nodes.sbatch | 2 +-\r\n .../horeka/batchsize_scaling/tokenizer/const_lr/train_tokenizer_8_nodes.sbatch | 2 +-\r\n .../logs/logs_training/train_tokenizer_batch_size_scaling_2_node_3292213.log | 2 +-\r\n jobs/mihir/horeka/batchsize_scaling/tokenizer/linear_lr/tester.sh | 2 +-\r\n .../horeka/batchsize_scaling/tokenizer/linear_lr/train_tokenizer_16_nodes.sbatch | 2 +-\r\n .../horeka/batchsize_scaling/tokenizer/linear_lr/train_tokenizer_1_nodes.sbatch | 2 +-\r\n .../horeka/batchsize_scaling/tokenizer/linear_lr/train_tokenizer_2_nodes.sbatch | 2 +-\r\n .../horeka/batchsize_scaling/tokenizer/linear_lr/train_tokenizer_32_nodes.sbatch | 2 +-\r\n .../horeka/batchsize_scaling/tokenizer/linear_lr/train_tokenizer_4_nodes.sbatch | 2 +-\r\n .../horeka/batchsize_scaling/tokenizer/linear_lr/train_tokenizer_8_nodes.sbatch | 2 +-\r\n jobs/mihir/horeka/batchsize_scaling/tokenizer/sqrt_lr/tester.sh | 2 +-\r\n .../horeka/batchsize_scaling/tokenizer/sqrt_lr/train_tokenizer_16_nodes.sbatch | 2 +-\r\n .../horeka/batchsize_scaling/tokenizer/sqrt_lr/train_tokenizer_1_nodes.sbatch | 2 +-\r\n .../horeka/batchsize_scaling/tokenizer/sqrt_lr/train_tokenizer_2_nodes.sbatch | 2 +-\r\n .../horeka/batchsize_scaling/tokenizer/sqrt_lr/train_tokenizer_32_nodes.sbatch | 2 +-\r\n .../horeka/batchsize_scaling/tokenizer/sqrt_lr/train_tokenizer_4_nodes.sbatch | 2 +-\r\n .../horeka/batchsize_scaling/tokenizer/sqrt_lr/train_tokenizer_8_nodes.sbatch | 2 +-\r\n jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu.sh | 2 +-\r\n jobs/mihir/horeka/breakout/default_runs/train_lam_as_15.sh | 2 +-\r\n jobs/mihir/horeka/breakout/default_runs/train_lam_default.sh | 2 +-\r\n jobs/mihir/horeka/breakout/default_runs/train_tokenizer_default.sh | 2 +-\r\n jobs/mihir/horeka/causal_big_runs/train_dynamics_2_nodes.sbatch | 2 +-\r\n jobs/mihir/horeka/causal_big_runs/train_dynamics_8_nodes.sbatch | 2 +-\r\n jobs/mihir/horeka/causal_big_runs/train_dynamics_8_nodes_dev.sh | 2 +-\r\n jobs/mihir/horeka/causal_big_runs/train_dynamics_8_nodes_filter_dark.sbatch | 2 +-\r\n jobs/mihir/horeka/causal_big_runs/train_dynamics_8_nodes_filter_dark_req.sbatch | 2 +-\r\n jobs/mihir/horeka/causal_big_runs/train_dynamics_dev.sbatch | 2 +-\r\n jobs/mihir/horeka/coinrun/default_runs/train_lam_as_15.sh | 2 +-\r\n jobs/mihir/horeka/coinrun/default_runs/train_lam_default.sh | 2 +-\r\n jobs/mihir/horeka/coinrun/default_runs/train_tokenizer_default.sh | 2 +-\r\n jobs/mihir/horeka/coinrun/sample_causal.sbatch | 2 +-\r\n jobs/mihir/horeka/coinrun/sample_maskgit.sbatch | 2 +-\r\n jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh | 2 +-\r\n jobs/mihir/horeka/coinrun/train_dyn_single_gpu_gt_actions.sh | 2 +-\r\n jobs/mihir/horeka/coinrun/train_dynamics_causal.sbatch | 2 +-\r\n jobs/mihir/horeka/coinrun/train_dynamics_maskgit.sbatch | 2 +-\r\n jobs/mihir/horeka/coinrun/train_lam_single_gpu.sh | 2 +-\r\n jobs/mihir/horeka/coinrun/train_tokenizer_lr_1e-4.sbatch | 2 +-\r\n jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh | 2 +-\r\n jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_dynamics_reproduction.sbatch | 2 +-\r\n jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_lam_reproduction.sbatch | 2 +-\r\n jobs/mihir/horeka/jafar_og_reproduction/og_coinrun_tokenizer_repoduction.sbatch | 2 +-\r\n jobs/mihir/horeka/lam/coinrun/coinrun_lam_133M.sbatch | 2 +-\r\n jobs/mihir/horeka/lam/coinrun/coinrun_lam_311.sbatch | 2 +-\r\n jobs/mihir/horeka/lam/coinrun/coinrun_lam_400M.sbatch | 2 +-\r\n jobs/mihir/horeka/lam/coinrun/coinrun_lam_as_128.sbatch | 2 +-\r\n jobs/mihir/horeka/lam/coinrun/coinrun_lam_as_16.sbatch | 2 +-\r\n jobs/mihir/horeka/lam/coinrun/coinrun_lam_as_32.sbatch | 2 +-\r\n jobs/mihir/horeka/lam/coinrun/coinrun_lam_as_64.sbatch | 2 +-\r\n jobs/mihir/horeka/lam/coinrun/coinrun_lam_base.sbatch | 2 +-\r\n jobs/mihir/horeka/lam/minecraft/train_lam_minecraft_1node_dev.sbatch | 2 +-\r\n .../horeka/lam/minecraft/train_lam_minecraft_8node-darkness-filter-133M.sbatch | 2 +-\r\n .../horeka/lam/minecraft/train_lam_minecraft_8node-darkness-filter-311M.sbatch | 2 +-\r\n .../horeka/lam/minecraft/train_lam_minecraft_8node-darkness-filter-400M.sbatch | 2 +-\r\n .../horeka/lam/minecraft/train_lam_minecraft_8node-darkness-filter-base.sbatch | 2 +-\r\n jobs/mihir/horeka/lam/minecraft/train_lam_minecraft_8node.sbatch | 2 +-\r\n jobs/mihir/horeka/lamap/coinrun_lam_base.sbatch | 2 +-\r\n jobs/mihir/horeka/lamap/coinrun_lam_base_dev.sh | 2 +-\r\n jobs/mihir/horeka/lr_tuning/dynamics/train_dyn_default_const.sbatch | 2 +-\r\n jobs/mihir/horeka/lr_tuning/dynamics/train_dyn_default_cos.sbatch | 2 +-\r\n jobs/mihir/horeka/lr_tuning/dynamics/train_dyn_default_wsd.sbatch | 2 +-\r\n jobs/mihir/horeka/lr_tuning/tokenizer/train_tokenizer_lr_1e-4.sbatch | 2 +-\r\n jobs/mihir/horeka/lr_tuning/tokenizer/train_tokenizer_lr_1e-4_2nodes.sbatch | 2 +-\r\n jobs/mihir/horeka/lr_tuning/tokenizer/train_tokenizer_lr_1e-4_2nodes_req.sbatch | 2 +-\r\n jobs/mihir/horeka/lr_tuning/tokenizer/train_tokenizer_lr_1e-4_4nodes.sbatch | 2 +-\r\n jobs/mihir/horeka/lr_tuning/tokenizer/train_tokenizer_lr_1e-4_4nodes_req.sbatch | 2 +-\r\n jobs/mihir/horeka/lr_tuning/tokenizer/train_tokenizer_lr_1e-4_8nodes.sbatch | 2 +-\r\n jobs/mihir/horeka/lr_tuning/tokenizer/train_tokenizer_lr_1e-4_8nodes_dev.sbatch | 2 +-\r\n jobs/mihir/horeka/lr_tuning/tokenizer/train_tokenizer_lr_1e-4_dev.sbatch | 2 +-\r\n jobs/mihir/horeka/lr_tuning/tokenizer/train_tokenizer_lr_5e-5.sbatch | 2 +-\r\n jobs/mihir/horeka/lr_tuning/tokenizer/train_tokenizer_lr_5e-6_8nodes.sbatch | 2 +-\r\n jobs/mihir/horeka/mask_prob_fix/train_dynamics_2_nodes.sbatch | 2 +-\r\n jobs/mihir/horeka/mask_prob_fix/train_dynamics_2_nodes_small_model.sbatch | 2 +-\r\n jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes.sbatch | 2 +-\r\n jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes_dev.sbatch | 2 +-\r\n jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes_dev_requeue.sbatch | 2 +-\r\n jobs/mihir/horeka/mask_prob_fix/train_dynamics_8_nodes_small_model.sbatch | 2 +-\r\n jobs/mihir/horeka/maskgit_big_runs/train_dynamics_8_nodes.sbatch | 2 +-\r\n jobs/mihir/horeka/maskgit_big_runs/train_dynamics_8_nodes_filter_dark.sbatch | 2 +-\r\n jobs/mihir/horeka/maskgit_big_runs/train_dynamics_8_nodes_filter_dark_req.sbatch | 2 +-\r\n jobs/mihir/horeka/modelsize_scaling/dynamics/1_train_dyn_36M.sbatch | 2 +-\r\n jobs/mihir/horeka/modelsize_scaling/dynamics/2_train_dyn_110M.sbatch | 2 +-\r\n jobs/mihir/horeka/modelsize_scaling/dynamics/3_train_dyn_180M.sbatch | 2 +-\r\n jobs/mihir/horeka/modelsize_scaling/dynamics/4_train_dyn_270M.sbatch | 2 +-\r\n jobs/mihir/horeka/modelsize_scaling/dynamics/5_train_dyn_500M.sbatch | 2 +-\r\n jobs/mihir/horeka/modelsize_scaling/dynamics/A_train_dyn_1.5M.sbatch | 2 +-\r\n jobs/mihir/horeka/modelsize_scaling/dynamics/B_train_dyn_3.5.sbatch | 2 +-\r\n jobs/mihir/horeka/modelsize_scaling/dynamics/C_train_dyn_6M.sbatch | 2 +-\r\n jobs/mihir/horeka/modelsize_scaling/dynamics/D_train_dyn_12M.sbatch | 2 +-\r\n jobs/mihir/horeka/modelsize_scaling/dynamics/E_train_dyn_18M.sbatch | 2 +-\r\n jobs/mihir/horeka/modelsize_scaling/lam/tester.sh | 2 +-\r\n jobs/mihir/horeka/modelsize_scaling/lam/train_lam_38M.sbatch | 2 +-\r\n jobs/mihir/horeka/modelsize_scaling/runner.sh | 2 +-\r\n jobs/mihir/horeka/modelsize_scaling/tokenizer/tester.sh | 2 +-\r\n jobs/mihir/horeka/modelsize_scaling/tokenizer/train_tokenizer_127M.sbatch | 2 +-\r\n jobs/mihir/horeka/modelsize_scaling/tokenizer/train_tokenizer_227M.sbatch | 2 +-\r\n jobs/mihir/horeka/modelsize_scaling/tokenizer/train_tokenizer_37M.sbatch | 2 +-\r\n jobs/mihir/horeka/modelsize_scaling/tokenizer/train_tokenizer_74M.sbatch | 2 +-\r\n 478 files changed, 1780 insertions(+), 497 deletions(-)\r\n create mode 100644 jobs/alfred/berlin/coinrun/coinrun_ablation/wsd/coinrun_lam_wsd_3e-5_3e-6.sbatch\r\n create mode 100644 jobs/alfred/berlin/coinrun/coinrun_ablation/wsd/coinrun_lam_wsd_3e-6.sbatch\r\n create mode 100644 jobs/alfred/berlin/coinrun/coinrun_ablation/wsd/coinrun_lam_wsd_8e-6.sbatch\r\n create mode 100644 jobs/alfred/berlin/coinrun/coinrun_data/generate_data_500mio.sbatch\r\n create mode 100644 jobs/alfred/berlin/coinrun/w_val/lam_baseline.sbatch\r\n create mode 100644 jobs/alfred/berlin/coinrun/w_val/lam_baseline_no_print.sbatch\r\n create mode 100644 jobs/alfred/berlin/coinrun/w_val/lam_batch_100.sbatch\r\n create mode 100644 jobs/alfred/berlin/coinrun/w_val/lam_batch_200.sbatch\r\n create mode 100644 jobs/alfred/berlin/coinrun/w_val/lam_jasmine_default.sbatch\r\n create mode 100644 jobs/alfred/berlin/coinrun/w_val/tokenizer_baseline.sbatch\r\n create mode 100644 jobs/alfred/berlin/coinrun/w_val/tokenizer_baseline_debug.sbatch\r\n create mode 100644 jobs/alfred/berlin/coinrun/w_val/tokenizer_baseline_debug_500m.sbatch\r\n create mode 100644 jobs/alfred/berlin/coinrun/w_val/tokenizer_batch_100.sbatch\r\n create mode 100644 jobs/alfred/berlin/coinrun/w_val/tokenizer_batch_200.sbatch\r\n create mode 100644 jobs/franz/berlin/atari/atari_data_gen.sh\r\n create mode 100644 jobs/franz/berlin/coinrun/coinrun_lam/lam_coinrun_nan_investigation_z_loss.sbatch\r\n create mode 100644 jobs/franz/berlin/coinrun/coinrun_tokenizer/coinrun_tokenizer_repoduction.sbatch\r\n]0;tum_cte0515@hkn0408:~/Projects/jasmine/slurm[?2004h(jasmine) [tum_cte0515@hkn0408 slurm]$ ",,terminal_output +6388,8566895,"TERMINAL",0,0,"62",,terminal_output +6389,8567401,"TERMINAL",0,0,"7",,terminal_output +6390,8567794,"TERMINAL",0,0,"g",,terminal_output +6391,8568005,"TERMINAL",0,0,"74",,terminal_output +6392,8568172,"TERMINAL",0,0,"i",,terminal_output +6393,8568327,"TERMINAL",0,0,"t",,terminal_output +6394,8568393,"TERMINAL",0,0,"8",,terminal_output +6395,8568448,"TERMINAL",0,0," s",,terminal_output +6396,8568579,"TERMINAL",0,0,"t",,terminal_output +6397,8568649,"TERMINAL",0,0,"a",,terminal_output +6398,8568779,"TERMINAL",0,0,"s",,terminal_output +6399,8568897,"TERMINAL",0,0,"h",,terminal_output +6400,8569023,"TERMINAL",0,0," ",,terminal_output +6401,8569023,"TERMINAL",0,0,"95",,terminal_output +6402,8569089,"TERMINAL",0,0,"p",,terminal_output +6403,8569234,"TERMINAL",0,0,"o",,terminal_output +6404,8569352,"TERMINAL",0,0,"p",,terminal_output +6405,8569373,"TERMINAL",0,0,"9",,terminal_output +6406,8569502,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +6407,8569623,"TERMINAL",0,0,"Auto-merging jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu.sh\r\nAuto-merging jobs/mihir/horeka/breakout/default_runs/train_lam_default.sh\r\nAuto-merging jobs/mihir/horeka/breakout/default_runs/train_tokenizer_default.sh\r\nAuto-merging jobs/mihir/horeka/coinrun/sample_maskgit.sbatch\r\nAuto-merging jobs/mihir/horeka/coinrun/train_dyn_single_gpu_gt_actions.sh\r\n",,terminal_output +6408,8569694,"TERMINAL",0,0,"On branch main\r\nYour branch is up to date with 'origin/main'.\r\n\r\nChanges not staged for commit:\r\n (use ""git add ..."" to update what will be committed)\r\n (use ""git restore ..."" to discard changes in working directory)\r\n\tmodified: jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu.sh\r\n\tmodified: jobs/mihir/horeka/breakout/default_runs/train_lam_default.sh\r\n\tmodified: jobs/mihir/horeka/breakout/default_runs/train_tokenizer_default.sh\r\n\tmodified: jobs/mihir/horeka/coinrun/sample_maskgit.sbatch\r\n\tmodified: jobs/mihir/horeka/coinrun/train_dyn_single_gpu_gt_actions.sh\r\n\tmodified: jobs/mihir/horeka/preprocessing/breakout_chunked.sh\r\n\tmodified: jobs/mihir/horeka/preprocessing/coinrun_chunked.sh\r\n\r\nUntracked files:\r\n (use ""git add ..."" to include in what will be committed)\r\n\tjobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu_gt_actions.sh\r\n\tjobs/mihir/horeka/breakout/sample_maskgit-gt-actions.sbatch\r\n\tjobs/mihir/horeka/breakout/sample_maskgit.sbatch\r\n\r\nno changes added to commit (use ""git add"" and/or ""git commit -a"")\r\nDropped refs/stash@{0} (810421448336982e0294075ed03124a1cc24d5f8)\r\n]0;tum_cte0515@hkn0408:~/Projects/jasmine/slurm[?2004h(jasmine) [tum_cte0515@hkn0408 slurm]$ ",,terminal_output +6409,8570099,"TERMINAL",0,0,"206",,terminal_output +6410,8570468,"TERMINAL",0,0,"20",,terminal_output +6411,8571087,"TERMINAL",0,0,"17",,terminal_output +6412,8571511,"TERMINAL",0,0,"1",,terminal_output +6413,8572125,"TERMINAL",0,0,"28",,terminal_output +6414,8572648,"TERMINAL",0,0,"2",,terminal_output +6415,8573193,"TERMINAL",0,0,"39",,terminal_output +6416,8573702,"TERMINAL",0,0,"3",,terminal_output +6417,8574258,"TERMINAL",0,0,"43:00",,terminal_output +6418,8574656,"TERMINAL",0,0,"4",,terminal_output +6419,8575269,"TERMINAL",0,0,"51",,terminal_output +6420,8575322,"TERMINAL",0,0,"git stash pop",,terminal_output +6421,8575493,"TERMINAL",0,0,"pull",,terminal_output +6422,8575604,"TERMINAL",0,0,"5",,terminal_output +6423,8575885,"TERMINAL",0,0,"stash",,terminal_output +6424,8576114,"TERMINAL",0,0,"pull",,terminal_output +6425,8576338,"TERMINAL",0,0,"62",,terminal_output +6426,8576594,"TERMINAL",0,0,"cd slurm/",,terminal_output +6427,8576641,"TERMINAL",0,0,"6",,terminal_output +6428,8576910,"TERMINAL",0,0,"sh slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",,terminal_output +6429,8578200,"TERMINAL",0,0,"74",,terminal_output +6430,8578211,"TERMINAL",0,0,"77",,terminal_output +6431,8579280,"TERMINAL",0,0,"95",,terminal_output +6432,8579289,"TERMINAL",0,0,"9",,terminal_output +6433,8580335,"TERMINAL",0,0,"30",,terminal_output +6434,8580336,"TERMINAL",0,0,"306",,terminal_output +6435,8581359,"TERMINAL",0,0,"1",,terminal_output +6436,8581360,"TERMINAL",0,0,"17",,terminal_output +6437,8582404,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=4\n#SBATCH --time=01:00:00\n#SBATCH --partition=accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:4\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/dynamics/sampling/maskgit/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/dynamics/sampling/maskgit/%x_%j.log\n#SBATCH --job-name=coinrun_sample_maskgit\n\n# Unload modules that may interfere\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\n\n# Activate virtual environment\n# source .venv/bin/activate\n\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_breakout/breakout_episodes_perfect/val\n\nCHECKPOINT_PATH=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519264\n\n\necho ""Sampling from checkpoint: $CHECKPOINT_PATH""\n\nsrun python sample.py \\n --checkpoint ""$CHECKPOINT_PATH"" \\n --data_dir=$array_records_dir \\n --seq_len=16 \\n --batch_size=12 \\n --start_frame=4 \\n --image_height=10 \\n --image_width=10 \\n --dyna_type=maskgit \\n --lam_patch_size=4 \",shellscript,tab +6438,8582424,"TERMINAL",0,0,"2",,terminal_output +6439,8582464,"TERMINAL",0,0,"28",,terminal_output +6440,8583365,"TERMINAL",0,0,"3",,terminal_output +6441,8583462,"TERMINAL",0,0,"39",,terminal_output +6442,8584469,"TERMINAL",0,0,"4",,terminal_output +6443,8584469,"TERMINAL",0,0,"410",,terminal_output +6444,8585458,"TERMINAL",0,0,"5",,terminal_output +6445,8585473,"TERMINAL",0,0,"51",,terminal_output +6446,8585949,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",936,0,"",shellscript,selection_mouse +6447,8586551,"TERMINAL",0,0,"6",,terminal_output +6448,8586551,"TERMINAL",0,0,"62",,terminal_output +6449,8587584,"TERMINAL",0,0,"7",,terminal_output +6450,8587585,"TERMINAL",0,0,"73",,terminal_output +6451,8587614,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",936,0,"s",shellscript,content +6452,8587615,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",937,0,"",shellscript,selection_keyboard +6453,8588108,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",936,1,"",shellscript,content +6454,8588218,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",936,0,"j",shellscript,content +6455,8588219,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",937,0,"",shellscript,selection_keyboard +6456,8588285,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",937,0,"a",shellscript,content +6457,8588286,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",938,0,"",shellscript,selection_keyboard +6458,8588387,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",938,0,"s",shellscript,content +6459,8588388,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",939,0,"",shellscript,selection_keyboard +6460,8588458,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",939,0,"m",shellscript,content +6461,8588459,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",940,0,"",shellscript,selection_keyboard +6462,8588582,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",940,0,"i",shellscript,content +6463,8588582,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",941,0,"",shellscript,selection_keyboard +6464,8588583,"TERMINAL",0,0,"8",,terminal_output +6465,8588598,"TERMINAL",0,0,"84",,terminal_output +6466,8588656,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",941,0,"n",shellscript,content +6467,8588657,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",942,0,"",shellscript,selection_keyboard +6468,8588732,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",942,0,"e",shellscript,content +6469,8588733,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",943,0,"",shellscript,selection_keyboard +6470,8589258,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",943,0,"/",shellscript,content +6471,8589259,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",944,0,"",shellscript,selection_keyboard +6472,8589597,"slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",943,0,"",shellscript,selection_command +6473,8589643,"TERMINAL",0,0,"9",,terminal_output +6474,8589644,"TERMINAL",0,0,"95",,terminal_output +6475,8590676,"TERMINAL",0,0,"40",,terminal_output +6476,8590676,"TERMINAL",0,0,"406",,terminal_output +6477,8591680,"TERMINAL",0,0,"1",,terminal_output +6478,8591691,"TERMINAL",0,0,"17",,terminal_output +6479,8592711,"TERMINAL",0,0,"2",,terminal_output +6480,8592723,"TERMINAL",0,0,"28",,terminal_output +6481,8593742,"TERMINAL",0,0,"3",,terminal_output +6482,8593770,"TERMINAL",0,0,"39",,terminal_output +6483,8595003,"TERMINAL",0,0,"4",,terminal_output +6484,8595003,"TERMINAL",0,0,"420",,terminal_output +6485,8595821,"TERMINAL",0,0,"5",,terminal_output +6486,8595875,"TERMINAL",0,0,"51",,terminal_output +6487,8596889,"TERMINAL",0,0,"6",,terminal_output +6488,8596955,"TERMINAL",0,0,"62",,terminal_output +6489,8597714,"slurm/jobs/mihir/horeka/breakout/sample_maskgit-gt-actions.sbatch",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=4\n#SBATCH --time=01:00:00\n#SBATCH --partition=accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:4\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/dynamics/sampling/maskgit/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/dynamics/sampling/maskgit/%x_%j.log\n#SBATCH --job-name=coinrun_sample_maskgit\n\n# Unload modules that may interfere\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\n\n# Activate virtual environment\n# source .venv/bin/activate\n\narray_records_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_breakout/breakout_episodes_perfect/val\nCHECKPOINT_PATH=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn-gt-actions/interactive/3519095\n\necho ""Sampling from checkpoint: $CHECKPOINT_PATH""\n\nsrun python sample.py \\n --checkpoint ""$CHECKPOINT_PATH"" \\n --data_dir=$array_records_dir \\n --seq_len=16 \\n --batch_size=12 \\n --start_frame=4 \\n --image_height=10 \\n --image_width=10 \\n --dyna_type=maskgit \\n --use_gt_actions \",shellscript,tab +6490,8597910,"TERMINAL",0,0,"7",,terminal_output +6491,8597919,"TERMINAL",0,0,"74",,terminal_output +6492,8598922,"TERMINAL",0,0,"8",,terminal_output +6493,8598987,"TERMINAL",0,0,"95",,terminal_output +6494,8599468,"slurm/jobs/mihir/horeka/breakout/sample_maskgit-gt-actions.sbatch",938,0,"",shellscript,selection_command +6495,8599990,"TERMINAL",0,0,"50",,terminal_output +6496,8600131,"TERMINAL",0,0,"506",,terminal_output +6497,8600954,"slurm/jobs/mihir/horeka/breakout/sample_maskgit-gt-actions.sbatch",937,0,"",shellscript,selection_mouse +6498,8601005,"TERMINAL",0,0,"1",,terminal_output +6499,8601059,"TERMINAL",0,0,"17",,terminal_output +6500,8601847,"slurm/jobs/mihir/horeka/breakout/sample_maskgit-gt-actions.sbatch",938,0,"",shellscript,selection_command +6501,8602073,"TERMINAL",0,0,"2",,terminal_output +6502,8602124,"TERMINAL",0,0,"28",,terminal_output +6503,8602659,"slurm/jobs/mihir/horeka/breakout/sample_maskgit-gt-actions.sbatch",938,0,"j",shellscript,content +6504,8602660,"slurm/jobs/mihir/horeka/breakout/sample_maskgit-gt-actions.sbatch",939,0,"",shellscript,selection_keyboard +6505,8602726,"slurm/jobs/mihir/horeka/breakout/sample_maskgit-gt-actions.sbatch",939,0,"a",shellscript,content +6506,8602726,"slurm/jobs/mihir/horeka/breakout/sample_maskgit-gt-actions.sbatch",940,0,"",shellscript,selection_keyboard +6507,8602838,"slurm/jobs/mihir/horeka/breakout/sample_maskgit-gt-actions.sbatch",940,0,"s",shellscript,content +6508,8602841,"slurm/jobs/mihir/horeka/breakout/sample_maskgit-gt-actions.sbatch",941,0,"",shellscript,selection_keyboard +6509,8602970,"slurm/jobs/mihir/horeka/breakout/sample_maskgit-gt-actions.sbatch",941,0,"m",shellscript,content +6510,8602971,"slurm/jobs/mihir/horeka/breakout/sample_maskgit-gt-actions.sbatch",942,0,"",shellscript,selection_keyboard +6511,8603101,"TERMINAL",0,0,"3",,terminal_output +6512,8603138,"TERMINAL",0,0,"39",,terminal_output +6513,8603165,"slurm/jobs/mihir/horeka/breakout/sample_maskgit-gt-actions.sbatch",942,0,"i",shellscript,content +6514,8603166,"slurm/jobs/mihir/horeka/breakout/sample_maskgit-gt-actions.sbatch",943,0,"",shellscript,selection_keyboard +6515,8603248,"slurm/jobs/mihir/horeka/breakout/sample_maskgit-gt-actions.sbatch",943,0,"n",shellscript,content +6516,8603249,"slurm/jobs/mihir/horeka/breakout/sample_maskgit-gt-actions.sbatch",944,0,"",shellscript,selection_keyboard +6517,8603411,"slurm/jobs/mihir/horeka/breakout/sample_maskgit-gt-actions.sbatch",944,0,"e",shellscript,content +6518,8603412,"slurm/jobs/mihir/horeka/breakout/sample_maskgit-gt-actions.sbatch",945,0,"",shellscript,selection_keyboard +6519,8603949,"slurm/jobs/mihir/horeka/breakout/sample_maskgit-gt-actions.sbatch",945,0,"/",shellscript,content +6520,8603949,"slurm/jobs/mihir/horeka/breakout/sample_maskgit-gt-actions.sbatch",946,0,"",shellscript,selection_keyboard +6521,8604195,"slurm/jobs/mihir/horeka/breakout/sample_maskgit-gt-actions.sbatch",945,0,"",shellscript,selection_command +6522,8604196,"TERMINAL",0,0,"4",,terminal_output +6523,8604204,"TERMINAL",0,0,"430",,terminal_output +6524,8605365,"TERMINAL",0,0,"5",,terminal_output +6525,8605438,"TERMINAL",0,0,"51",,terminal_output +6526,8606210,"TERMINAL",0,0,"6",,terminal_output +6527,8606367,"TERMINAL",0,0,"62",,terminal_output +6528,8606846,"TERMINAL",0,0,"\r\n[?2004l\rsh: slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch: No such file or directory\r\n]0;tum_cte0515@hkn0408:~/Projects/jasmine/slurm[?2004h(jasmine) [tum_cte0515@hkn0408 slurm]$ ",,terminal_output +6529,8607287,"TERMINAL",0,0,"7",,terminal_output +6530,8607346,"TERMINAL",0,0,"73",,terminal_output +6531,8607845,"TERMINAL",0,0,"sh slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",,terminal_output +6532,8608361,"TERMINAL",0,0,"8",,terminal_output +6533,8608362,"TERMINAL",0,0,"84",,terminal_output +6534,8609388,"TERMINAL",0,0,"9",,terminal_output +6535,8609389,"TERMINAL",0,0,"95",,terminal_output +6536,8610355,"TERMINAL",0,0,"7:00",,terminal_output +6537,8610463,"TERMINAL",0,0,"7:006",,terminal_output +6538,8611406,"TERMINAL",0,0,"1",,terminal_output +6539,8611462,"TERMINAL",0,0,"17",,terminal_output +6540,8612427,"TERMINAL",0,0,"2",,terminal_output +6541,8612515,"TERMINAL",0,0,"28",,terminal_output +6542,8613251,"TERMINAL",0,0,"^C[?2004l\r[?2004h[?2004l\r\r\n]0;tum_cte0515@hkn0408:~/Projects/jasmine/slurm[?2004h(jasmine) [tum_cte0515@hkn0408 slurm]$ ",,terminal_output +6543,8613713,"TERMINAL",0,0,"3",,terminal_output +6544,8613713,"TERMINAL",0,0,"39",,terminal_output +6545,8613789,"TERMINAL",0,0,"cd",,terminal_output +6546,8614042,"TERMINAL",0,0," ",,terminal_output +6547,8614161,"TERMINAL",0,0,".",,terminal_output +6548,8614269,"TERMINAL",0,0,".",,terminal_output +6549,8614600,"TERMINAL",0,0,"4",,terminal_output +6550,8614639,"TERMINAL",0,0,"\r\n[?2004l\r]0;tum_cte0515@hkn0408:~/Projects/jasmine[?2004h(jasmine) [tum_cte0515@hkn0408 jasmine]$ ",,terminal_output +6551,8614695,"TERMINAL",0,0,"440",,terminal_output +6552,8614872,"TERMINAL",0,0,"cd ..",,terminal_output +6553,8615083,"TERMINAL",0,0,"sh slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",,terminal_output +6554,8615535,"TERMINAL",0,0,"5",,terminal_output +6555,8615545,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +6556,8615607,"TERMINAL",0,0,"51",,terminal_output +6557,8615719,"TERMINAL",0,0,"Sampling from checkpoint: /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519264\r\n",,terminal_output +6558,8615805,"TERMINAL",0,0,"GpuFreq=control_disabled\r\n",,terminal_output +6559,8616769,"TERMINAL",0,0,"6",,terminal_output +6560,8616780,"TERMINAL",0,0,"62",,terminal_output +6561,8617679,"TERMINAL",0,0,"7",,terminal_output +6562,8617680,"TERMINAL",0,0,"73",,terminal_output +6563,8618702,"TERMINAL",0,0,"82",,terminal_output +6564,8618761,"TERMINAL",0,0,"84",,terminal_output +6565,8619688,"TERMINAL",0,0,"9",,terminal_output +6566,8619765,"TERMINAL",0,0,"95",,terminal_output +6567,8621027,"TERMINAL",0,0,"10",,terminal_output +6568,8621070,"TERMINAL",0,0,"106",,terminal_output +6569,8621784,"TERMINAL",0,0,"1",,terminal_output +6570,8621829,"TERMINAL",0,0,"17",,terminal_output +6571,8622791,"TERMINAL",0,0,"2",,terminal_output +6572,8622903,"TERMINAL",0,0,"28",,terminal_output +6573,8623601,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu_gt_actions.sh",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=1\n#SBATCH --time=48:00:00\n#SBATCH --partition=accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:1\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/breakout/dyn/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/breakout/dyn/%x_%j.log\n#SBATCH --job-name=train_dyn_default_breakout_longer\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\n# source .venv/bin/activate\n\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_breakout/breakout_episodes_perfect_big/train\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_breakout/breakout_episodes_perfect_big/val\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn-gt-actions/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_checkpoint=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/tokenizer/interactive/3518963\n\nenv | grep SLURM\n\nexport PYTHONUNBUFFERED=1\n\nsrun python train_dynamics.py \\n --save_ckpt \\n --image_height=10 \\n --image_width=10 \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=120 \\n --init_lr=0 \\n --max_lr=3e-5 \\n --log_image_interval=250 \\n --log_checkpoint_interval=250 \\n --dyna_type=maskgit \\n --log \\n --name=breakout-dyn-default-gt-actions-$slurm_job_id \\n --tags dyn breakout default \\n --entity instant-uv \\n --project jafar \\n --patch_size 4 \\n --lam_patch_size 4 \\n --warmup_steps 100 \\n --wsd_decay_steps 1000 \\n --num_steps 5000 \\n --use_gt_actions \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val \\n --tokenizer_checkpoint $tokenizer_checkpoint \\n --val_interval 750 \\n --eval_full_frame \\n",shellscript,tab +6574,8623839,"TERMINAL",0,0,"3",,terminal_output +6575,8623906,"TERMINAL",0,0,"350",,terminal_output +6576,8624903,"TERMINAL",0,0,"4",,terminal_output +6577,8624947,"TERMINAL",0,0,"51",,terminal_output +6578,8625209,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu_gt_actions.sh",1291,0,"",shellscript,selection_command +6579,8625893,"TERMINAL",0,0,"5",,terminal_output +6580,8626000,"TERMINAL",0,0,"62",,terminal_output +6581,8626997,"TERMINAL",0,0,"6",,terminal_output +6582,8627011,"TERMINAL",0,0,"73",,terminal_output +6583,8627102,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu_gt_actions.sh",1290,0,"",shellscript,selection_mouse +6584,8628023,"TERMINAL",0,0,"8",,terminal_output +6585,8628079,"TERMINAL",0,0,"84",,terminal_output +6586,8628267,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu_gt_actions.sh",1290,0,"j",shellscript,content +6587,8628268,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu_gt_actions.sh",1291,0,"",shellscript,selection_keyboard +6588,8628355,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu_gt_actions.sh",1291,0,"a",shellscript,content +6589,8628356,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu_gt_actions.sh",1292,0,"",shellscript,selection_keyboard +6590,8628541,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu_gt_actions.sh",1292,0,"s",shellscript,content +6591,8628542,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu_gt_actions.sh",1293,0,"",shellscript,selection_keyboard +6592,8628560,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu_gt_actions.sh",1293,0,"m",shellscript,content +6593,8628561,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu_gt_actions.sh",1294,0,"",shellscript,selection_keyboard +6594,8628664,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu_gt_actions.sh",1294,0,"i",shellscript,content +6595,8628665,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu_gt_actions.sh",1295,0,"",shellscript,selection_keyboard +6596,8628739,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu_gt_actions.sh",1295,0,"n",shellscript,content +6597,8628740,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu_gt_actions.sh",1296,0,"",shellscript,selection_keyboard +6598,8628818,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu_gt_actions.sh",1296,0,"e",shellscript,content +6599,8628818,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu_gt_actions.sh",1297,0,"",shellscript,selection_keyboard +6600,8629020,"TERMINAL",0,0,"9",,terminal_output +6601,8629111,"TERMINAL",0,0,"95",,terminal_output +6602,8629476,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu_gt_actions.sh",1297,0,"/",shellscript,content +6603,8629477,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu_gt_actions.sh",1298,0,"",shellscript,selection_keyboard +6604,8630237,"TERMINAL",0,0,"20",,terminal_output +6605,8630250,"TERMINAL",0,0,"206",,terminal_output +6606,8631202,"slurm/jobs/mihir/horeka/breakout/default_runs/train_dyn_single_gpu.sh",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=1\n#SBATCH --time=48:00:00\n#SBATCH --partition=accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:1\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/breakout/dyn/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/breakout/dyn/%x_%j.log\n#SBATCH --job-name=train_dyn_default_breakout_longer\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\n# source .venv/bin/activate\n\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_breakout/breakout_episodes_perfect/train\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_breakout/breakout_episodes_perfect/val\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\n# lam_checkpoint=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/lam/interactive/3512576\n# tokenizer_checkpoint=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/tokenizer/interactive/3512502\n\nlam_checkpoint=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/lam/interactive/3518963\ntokenizer_checkpoint=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/tokenizer/interactive/3518963\n\nenv | grep SLURM\n\nexport PYTHONUNBUFFERED=1\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n --image_height=10 \\n --image_width=10 \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=120 \\n --init_lr=0 \\n --max_lr=3e-5 \\n --log_image_interval=250 \\n --log_checkpoint_interval=250 \\n --dyna_type=maskgit \\n --log \\n --name=breakout-dyn-default-$slurm_job_id \\n --tags dyn breakout default \\n --entity instant-uv \\n --project jafar \\n --patch_size 4 \\n --lam_patch_size 4 \\n --warmup_steps 100 \\n --wsd_decay_steps 1000 \\n --num_steps 5000 \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val \\n --tokenizer_checkpoint $tokenizer_checkpoint \\n --lam_checkpoint $lam_checkpoint \\n --val_interval 750 \\n --eval_full_frame \\n",shellscript,tab +6607,8631206,"TERMINAL",0,0,"1",,terminal_output +6608,8631270,"TERMINAL",0,0,"17",,terminal_output +6609,8632109,"TERMINAL",0,0,"2",,terminal_output +6610,8632229,"TERMINAL",0,0,"28",,terminal_output +6611,8633265,"TERMINAL",0,0,"3",,terminal_output +6612,8633265,"TERMINAL",0,0,"39",,terminal_output +6613,8633488,"slurm/jobs/mihir/horeka/breakout/default_runs/train_lam_as_15.sh",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=4\n#SBATCH --time=48:00:00\n#SBATCH --partition=accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:1\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/breakout/lam/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/breakout/lam/%x_%j.log\n#SBATCH --job-name=train_lam_actionspace_15_breakout\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_breakout/breakout_episodes_10m_gt_actions_split/train\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_breakout/breakout_episodes_10m_gt_actions_split/val\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/lam/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\nenv | grep SLURM\n\nexport PYTHONUNBUFFERED=1\n\nsrun python jasmine/train_lam.py \\n --save_ckpt \\n --image_height=10 \\n --image_width=10 \\n --ckpt_dir $CHECKPOINT_DIR \\n --patch_size=1 \\n --num_latents=15 \\n --batch_size=120 \\n --log \\n --name=breakout-lam-default-$slurm_job_id \\n --tags lam breakout default \\n --entity instant-uv \\n --project jafar \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val \n",shellscript,tab +6614,8634180,"TERMINAL",0,0,"4",,terminal_output +6615,8634290,"TERMINAL",0,0,"44:00",,terminal_output +6616,8634951,"slurm/jobs/mihir/horeka/breakout/default_runs/train_lam_default.sh",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=4\n#SBATCH --time=48:00:00\n#SBATCH --partition=accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:1\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/breakout/lam/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/breakout/lam/%x_%j.log\n#SBATCH --job-name=train_lam_default_breakout_long\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\n# source .venv/bin/activate\n\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_breakout/breakout_episodes_10m_gt_actions_split/train\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_breakout/breakout_episodes_10m_gt_actions_split/val\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/lam/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\nenv | grep SLURM\n\nexport PYTHONUNBUFFERED=1\n\nsrun python jasmine/train_lam.py \\n --save_ckpt \\n --image_height=10 \\n --image_width=10 \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=120 \\n --patch_size=4 \\n --log \\n --name=breakout-lam-default-$slurm_job_id \\n --tags lam breakout default \\n --entity instant-uv \\n --project jafar \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val \\n --num_steps 5000 \\n --max_lr 3e-5 \\n --warmup_steps 500 \\n --wsd_decay_steps 1000 \\n --log_image_interval 500 \\n --log_checkpoint_interval 500 \\n --log_checkpoint_keep_period 500 \\n --val_interval 500 \",shellscript,tab +6617,8635218,"TERMINAL",0,0,"56",,terminal_output +6618,8635348,"TERMINAL",0,0,"51",,terminal_output +6619,8636260,"TERMINAL",0,0,"6",,terminal_output +6620,8636366,"TERMINAL",0,0,"62",,terminal_output +6621,8636808,"slurm/jobs/mihir/horeka/breakout/default_runs/train_tokenizer_default.sh",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=4\n#SBATCH --time=48:00:00\n#SBATCH --partition=accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:1\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/breakout/tokenizer/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/breakout/tokenizer/%x_%j.log\n#SBATCH --job-name=train_tokenizer_default_breakout_big_TS_long\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\n# source .venv/bin/activate\n\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_breakout/breakout_episodes_10m_gt_actions_split/train\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_breakout/breakout_episodes_10m_gt_actions_split/val\n\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/tokenizer/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\nenv | grep SLURM\n\nexport PYTHONUNBUFFERED=1\n\nsrun python jasmine/train_tokenizer.py \\n --save_ckpt \\n --image_height=10 \\n --image_width=10 \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=120 \\n --init_lr=0 \\n --patch_size=4 \\n --log \\n --name=breakout-tokenizer-default-$slurm_job_id \\n --tags tokenizer breakout default \\n --entity instant-uv \\n --project jafar \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val \\n --num_steps 5000 \\n --max_lr 3e-5 \\n --warmup_steps 500 \\n --wsd_decay_steps 1000 \\n --log_image_interval 500 \\n --log_checkpoint_interval 500 \\n --log_checkpoint_keep_period 500 \\n --val_interval 500 \\n",shellscript,tab +6622,8637309,"TERMINAL",0,0,"7",,terminal_output +6623,8637449,"TERMINAL",0,0,"73",,terminal_output +6624,8638350,"TERMINAL",0,0,"8",,terminal_output +6625,8638455,"TERMINAL",0,0,"84",,terminal_output +6626,8639388,"TERMINAL",0,0,"9",,terminal_output +6627,8639455,"TERMINAL",0,0,"95",,terminal_output +6628,8640641,"TERMINAL",0,0,"30",,terminal_output +6629,8640695,"TERMINAL",0,0,"306",,terminal_output +6630,8641457,"TERMINAL",0,0,"1",,terminal_output +6631,8641564,"TERMINAL",0,0,"17",,terminal_output +6632,8642259,"TERMINAL",0,0,"Traceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/sample.py"", line 147, in \r\n dummy_optimizer = nnx.Optimizer(genie, dummy_tx)\r\n ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib64/python3.12/site-packages/flax/nnx/pytreelib.py"", line 289, in __call__\r\n return _graph_node_meta_call(cls, *args, **kwargs)\r\n ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib64/python3.12/site-packages/flax/nnx/pytreelib.py"", line 301, in _graph_node_meta_call\r\n cls._pytree_meta_construct(node, *args, **kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib64/python3.12/site-packages/flax/nnx/pytreelib.py"", line 292, in _pytree_meta_construct\r\n self.__init__(*args, **kwargs)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib64/python3.12/site-packages/flax/nnx/training/optimizer.py"", line 88, in _check_wrt_wrapper\r\n raise TypeError(\r\nTypeError: Missing required argument `wrt`. As of Flax 0.11.0 the `wrt` argument is required, if you want to keep the previous use nnx.ModelAndOptimizer instead of nnx.Optimizer.\r\n",,terminal_output +6633,8642528,"TERMINAL",0,0,"2",,terminal_output +6634,8642582,"TERMINAL",0,0,"28",,terminal_output +6635,8643587,"TERMINAL",0,0,"3",,terminal_output +6636,8643602,"TERMINAL",0,0,"39",,terminal_output +6637,8644614,"TERMINAL",0,0,"4",,terminal_output +6638,8644634,"TERMINAL",0,0,"410",,terminal_output +6639,8645629,"TERMINAL",0,0,"5",,terminal_output +6640,8645768,"TERMINAL",0,0,"51",,terminal_output +6641,8646712,"TERMINAL",0,0,"6",,terminal_output +6642,8646767,"TERMINAL",0,0,"62",,terminal_output +6643,8647286,"TERMINAL",0,0,"W0924 15:07:37.331027 2440099 pjrt_client.cc:1469] WatchJobStateAsync failed for task goo.gle/debugonly job_name: ""jax_worker"": UNAVAILABLE: Cancelling all calls\r\nAdditional GRPC error information from remote target coordination_service while calling /tensorflow.CoordinationService/WatchJobState:\r\n:UNKNOWN:Error received from peer {grpc_status:14, grpc_message:""Cancelling all calls""}\r\n",,terminal_output +6644,8647788,"TERMINAL",0,0,"7",,terminal_output +6645,8647792,"TERMINAL",0,0,"73",,terminal_output +6646,8647869,"TERMINAL",0,0,"srun: error: hkn0408: task 0: Exited with exit code 1\r\n]0;tum_cte0515@hkn0408:~/Projects/jasmine[?2004h(jasmine) [tum_cte0515@hkn0408 jasmine]$ ",,terminal_output +6647,8648687,"TERMINAL",0,0,"8",,terminal_output +6648,8648878,"TERMINAL",0,0,"84",,terminal_output +6649,8649721,"TERMINAL",0,0,"9",,terminal_output +6650,8649815,"TERMINAL",0,0,"95",,terminal_output +6651,8650857,"TERMINAL",0,0,"40",,terminal_output +6652,8650877,"TERMINAL",0,0,"406",,terminal_output +6653,8651797,"TERMINAL",0,0,"1",,terminal_output +6654,8651911,"TERMINAL",0,0,"18",,terminal_output +6655,8652832,"TERMINAL",0,0,"2",,terminal_output +6656,8652953,"TERMINAL",0,0,"39",,terminal_output +6657,8653859,"TERMINAL",0,0,"3",,terminal_output +6658,8653985,"TERMINAL",0,0,"420",,terminal_output +6659,8654893,"TERMINAL",0,0,"4",,terminal_output +6660,8655022,"TERMINAL",0,0,"51",,terminal_output +6661,8655931,"TERMINAL",0,0,"5",,terminal_output +6662,8656059,"TERMINAL",0,0,"62",,terminal_output +6663,8657087,"TERMINAL",0,0,"7",,terminal_output +6664,8657098,"TERMINAL",0,0,"73",,terminal_output +6665,8657897,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/sample.py",0,0,"from dataclasses import dataclass\nimport time\nimport os\nimport optax\n\nimport dm_pix as pix\nimport einops\nimport jax\nimport jax.numpy as jnp\nimport flax.linen as nn\nimport numpy as np\nimport orbax.checkpoint as ocp\nfrom PIL import Image, ImageDraw\nimport tyro\nfrom flax import nnx\n\nfrom genie import Genie\nfrom utils.dataloader import get_dataloader\n\n\n@dataclass\nclass Args:\n # Experiment\n seed: int = 0\n seq_len: int = 16\n image_channels: int = 3\n image_height: int = 90\n image_width: int = 160\n data_dir: str = ""data/coinrun_episodes""\n checkpoint: str = """"\n print_action_indices: bool = True\n # Sampling\n batch_size: int = 1\n maskgit_steps: int = 25\n temperature: float = 1.0\n sample_argmax: bool = True\n start_frame: int = 1\n # Tokenizer checkpoint\n tokenizer_dim: int = 512\n tokenizer_ffn_dim: int = 2048\n latent_patch_dim: int = 32\n num_patch_latents: int = 1024\n patch_size: int = 4\n tokenizer_num_blocks: int = 4\n tokenizer_num_heads: int = 8\n # LAM checkpoint\n lam_dim: int = 512\n lam_ffn_dim: int = 2048\n latent_action_dim: int = 32\n num_actions: int = 6\n lam_patch_size: int = 16\n lam_num_blocks: int = 4\n lam_num_heads: int = 8\n use_gt_actions: bool = False\n # Dynamics checkpoint\n dyna_type: str = ""maskgit""\n dyna_dim: int = 512\n dyna_ffn_dim: int = 2048\n dyna_num_blocks: int = 6\n dyna_num_heads: int = 8\n param_dtype = jnp.float32\n dtype = jnp.bfloat16\n use_flash_attention: bool = True\n\n\nargs = tyro.cli(Args)\n\nif __name__ == ""__main__"":\n """"""\n Dimension keys:\n B: batch size\n T: number of input (conditioning) frames\n N: number of patches per frame\n S: sequence length\n H: height\n W: width\n E: B * (S - 1)\n """"""\n jax.distributed.initialize()\n\n rng = jax.random.key(args.seed)\n\n # --- Load Genie checkpoint ---\n rngs = nnx.Rngs(rng)\n genie = Genie(\n # Tokenizer\n in_dim=args.image_channels,\n tokenizer_dim=args.tokenizer_dim,\n tokenizer_ffn_dim=args.tokenizer_ffn_dim,\n latent_patch_dim=args.latent_patch_dim,\n num_patch_latents=args.num_patch_latents,\n patch_size=args.patch_size,\n tokenizer_num_blocks=args.tokenizer_num_blocks,\n tokenizer_num_heads=args.tokenizer_num_heads,\n # LAM\n lam_dim=args.lam_dim,\n lam_ffn_dim=args.lam_ffn_dim,\n latent_action_dim=args.latent_action_dim,\n num_actions=args.num_actions,\n lam_patch_size=args.lam_patch_size,\n lam_num_blocks=args.lam_num_blocks,\n lam_num_heads=args.lam_num_heads,\n lam_co_train=False,\n use_gt_actions=args.use_gt_actions,\n # Dynamics\n dyna_type=args.dyna_type,\n dyna_dim=args.dyna_dim,\n dyna_ffn_dim=args.dyna_ffn_dim,\n dyna_num_blocks=args.dyna_num_blocks,\n dyna_num_heads=args.dyna_num_heads,\n param_dtype=args.param_dtype,\n dtype=args.dtype,\n use_flash_attention=args.use_flash_attention,\n # FIXME (f.srambical): implement spatiotemporal KV caching and set decode=True\n decode=False,\n rngs=rngs,\n )\n\n # Need to delete lam decoder for checkpoint loading\n if not args.use_gt_actions:\n assert genie.lam is not None\n del genie.lam.decoder\n\n handler_registry = ocp.handlers.DefaultCheckpointHandlerRegistry()\n handler_registry.add(\n ""model_state"", ocp.args.PyTreeSave, ocp.handlers.PyTreeCheckpointHandler\n )\n handler_registry.add(\n ""model_state"", ocp.args.PyTreeRestore, ocp.handlers.PyTreeCheckpointHandler\n )\n checkpoint_options = ocp.CheckpointManagerOptions(\n step_format_fixed_length=6,\n )\n checkpoint_manager = ocp.CheckpointManager(\n args.checkpoint,\n options=checkpoint_options,\n handler_registry=handler_registry,\n )\n\n dummy_tx = optax.adamw(\n learning_rate=optax.linear_schedule(0.0001, 0.0001, 10000),\n b1=0.9,\n b2=0.9,\n weight_decay=1e-4,\n mu_dtype=args.dtype,\n )\n dummy_optimizer = nnx.Optimizer(genie, dummy_tx)\n\n abstract_optimizer = nnx.eval_shape(lambda: dummy_optimizer)\n abstract_optimizer_state = nnx.state(abstract_optimizer)\n restored = checkpoint_manager.restore(\n checkpoint_manager.latest_step(),\n args=ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore(abstract_optimizer_state), # type: ignore\n ),\n )\n restored_optimizer_state = restored[""model_state""]\n nnx.update(dummy_optimizer, restored_optimizer_state)\n\n # --- Define sampling function ---\n def _sampling_fn(model: Genie, batch: dict) -> jax.Array:\n """"""Runs Genie.sample with pre-defined generation hyper-parameters.""""""\n assert args.dyna_type in [\n ""maskgit"",\n ""causal"",\n ], f""Invalid dynamics type: {args.dyna_type}""\n frames, _ = model.sample(\n batch,\n args.seq_len,\n args.temperature,\n args.sample_argmax,\n args.maskgit_steps,\n )\n return frames\n\n # --- Define autoregressive sampling loop ---\n def _autoreg_sample(genie, rng, batch):\n batch[""videos""] = batch[""videos""][:, : args.start_frame]\n batch[""rng""] = rng\n generated_vid_BSHWC = _sampling_fn(genie, batch)\n return generated_vid_BSHWC\n\n # --- Get video + latent actions ---\n array_record_files = [\n os.path.join(args.data_dir, x)\n for x in os.listdir(args.data_dir)\n if x.endswith("".array_record"")\n ]\n dataloader = get_dataloader(\n array_record_files,\n args.seq_len,\n args.batch_size,\n args.image_height,\n args.image_width,\n args.image_channels,\n # We don't use workers in order to avoid grain shutdown issues (https://github.com/google/grain/issues/398)\n num_workers=0,\n prefetch_buffer_size=1,\n seed=args.seed,\n )\n dataloader = iter(dataloader)\n batch = next(dataloader)\n gt_video = jnp.asarray(batch[""videos""], dtype=jnp.float32) / 255.0\n batch[""videos""] = gt_video.astype(args.dtype)\n # Get latent actions for all videos in the batch\n action_batch_E = None\n if not args.use_gt_actions:\n action_batch_E = genie.vq_encode(batch, training=False)\n batch[""latent_actions""] = action_batch_E\n\n # --- Sample + evaluate video ---\n recon_video_BSHWC = _autoreg_sample(genie, rng, batch)\n recon_video_BSHWC = recon_video_BSHWC.astype(jnp.float32)\n gt = (\n gt_video[:, : recon_video_BSHWC.shape[1]]\n .clip(0, 1)\n .reshape(-1, *gt_video.shape[2:])\n )\n recon = recon_video_BSHWC.clip(0, 1).reshape(-1, *recon_video_BSHWC.shape[2:])\n ssim = jnp.asarray(\n pix.ssim(gt[:, args.start_frame :], recon[:, args.start_frame :])\n )\n jax.debug.breakpoint()\n ssim = ssim.mean()\n jax.debug.breakpoint()\n print(f""SSIM: {ssim}"")\n\n # --- Construct video ---\n true_videos = (gt_video * 255).astype(np.uint8)\n pred_videos = (recon_video_BSHWC * 255).astype(np.uint8)\n video_comparison = np.zeros((2, *recon_video_BSHWC.shape), dtype=np.uint8)\n video_comparison[0] = true_videos[:, : args.seq_len]\n video_comparison[1] = pred_videos\n frames = einops.rearrange(video_comparison, ""n b t h w c -> t (b h) (n w) c"")\n\n # --- Save video ---\n imgs = [Image.fromarray(img) for img in frames]\n # Write actions on each frame, on each row (i.e., for each video in the batch, on the GT row)\n B = batch[""videos""].shape[0]\n if action_batch_E is not None:\n action_batch_BSm11 = jnp.reshape(action_batch_E, (B, args.seq_len - 1, 1))\n else:\n action_batch_BSm11 = jnp.reshape(\n batch[""actions""][:, :-1], (B, args.seq_len - 1, 1)\n )\n for t, img in enumerate(imgs[1:]):\n d = ImageDraw.Draw(img)\n for row in range(B):\n action = action_batch_BSm11[row, t, 0]\n y_offset = row * batch[""videos""].shape[2] + 2\n if args.print_action_indices:\n d.text((2, y_offset), f""{action}"", fill=255)\n imgs[0].save(\n f""generation_{time.time()}.gif"",\n save_all=True,\n append_images=imgs[1:],\n duration=250,\n loop=0,\n )\n",python,tab +6666,8657898,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/sample.py",4091,0,"",python,selection_command +6667,8658028,"TERMINAL",0,0,"8",,terminal_output +6668,8658138,"TERMINAL",0,0,"84",,terminal_output +6669,8659039,"TERMINAL",0,0,"9",,terminal_output +6670,8659296,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/sample.py",4118,0,"",python,selection_command +6671,8659367,"TERMINAL",0,0,"95",,terminal_output +6672,8660077,"TERMINAL",0,0,"50",,terminal_output +6673,8660213,"TERMINAL",0,0,"506",,terminal_output +6674,8661024,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/sample.py",4117,0,"",python,selection_mouse +6675,8661111,"TERMINAL",0,0,"1",,terminal_output +6676,8661267,"TERMINAL",0,0,"17",,terminal_output +6677,8662105,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/sample.py",4117,0,"M",python,content +6678,8662106,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/sample.py",4118,0,"",python,selection_keyboard +6679,8662158,"TERMINAL",0,0,"2",,terminal_output +6680,8662235,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/sample.py",4118,0,"o",python,content +6681,8662236,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/sample.py",4119,0,"",python,selection_keyboard +6682,8662320,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/sample.py",4119,0,"d",python,content +6683,8662321,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/sample.py",4120,0,"",python,selection_keyboard +6684,8662336,"TERMINAL",0,0,"28",,terminal_output +6685,8662431,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/sample.py",4120,0,"e",python,content +6686,8662432,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/sample.py",4121,0,"",python,selection_keyboard +6687,8662494,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/sample.py",4121,0,"l",python,content +6688,8662495,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/sample.py",4122,0,"",python,selection_keyboard +6689,8662851,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/sample.py",4122,0,"A",python,content +6690,8662852,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/sample.py",4123,0,"",python,selection_keyboard +6691,8662959,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/sample.py",4123,0,"n",python,content +6692,8662960,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/sample.py",4124,0,"",python,selection_keyboard +6693,8663083,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/sample.py",4124,0,"d",python,content +6694,8663084,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/sample.py",4125,0,"",python,selection_keyboard +6695,8663201,"TERMINAL",0,0,"3",,terminal_output +6696,8663328,"TERMINAL",0,0,"39",,terminal_output +6697,8664242,"TERMINAL",0,0,"4",,terminal_output +6698,8664421,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/sample.py",4131,0,"",python,selection_mouse +6699,8664423,"TERMINAL",0,0,"430",,terminal_output +6700,8664583,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/sample.py",4117,17,"ModelAndOptimizer",python,selection_mouse +6701,8665261,"TERMINAL",0,0,"5",,terminal_output +6702,8665408,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/sample.py",4133,0,"",python,selection_command +6703,8665422,"TERMINAL",0,0,"51",,terminal_output +6704,8666318,"TERMINAL",0,0,"6",,terminal_output +6705,8666472,"TERMINAL",0,0,"62",,terminal_output +6706,8667042,"TERMINAL",0,0,"sh slurm/jobs/mihir/horeka/breakout/sample_maskgit.sbatch",,terminal_output +6707,8667333,"TERMINAL",0,0,"7",,terminal_output +6708,8667492,"TERMINAL",0,0,"73",,terminal_output +6709,8667791,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +6710,8667901,"TERMINAL",0,0,"Sampling from checkpoint: /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/breakout/dyn/interactive/3519264\r\n",,terminal_output +6711,8668019,"TERMINAL",0,0,"GpuFreq=control_disabled\r\n",,terminal_output +6712,8668437,"TERMINAL",0,0,"8",,terminal_output +6713,8668544,"TERMINAL",0,0,"84",,terminal_output +6714,8669497,"TERMINAL",0,0,"9",,terminal_output +6715,8669607,"TERMINAL",0,0,"95",,terminal_output +6716,8670459,"TERMINAL",0,0,"8:00",,terminal_output +6717,8670616,"TERMINAL",0,0,"8:006",,terminal_output +6718,8671585,"TERMINAL",0,0,"1",,terminal_output +6719,8671648,"TERMINAL",0,0,"17",,terminal_output +6720,8672567,"TERMINAL",0,0,"2",,terminal_output +6721,8672686,"TERMINAL",0,0,"28",,terminal_output +6722,8673591,"TERMINAL",0,0,"3",,terminal_output +6723,8673722,"TERMINAL",0,0,"39",,terminal_output +6724,8674614,"TERMINAL",0,0,"4",,terminal_output +6725,8674765,"TERMINAL",0,0,"440",,terminal_output +6726,8675641,"TERMINAL",0,0,"5",,terminal_output +6727,8675794,"TERMINAL",0,0,"51",,terminal_output +6728,8676775,"TERMINAL",0,0,"6",,terminal_output +6729,8676835,"TERMINAL",0,0,"62",,terminal_output +6730,8677173,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib64/python3.12/site-packages/orbax/checkpoint/_src/serialization/type_handlers.py:1269: UserWarning: Sharding info not provided when restoring. Populating sharding info from sharding file. Please note restoration time will be slightly increased due to reading from file. Note also that this option is unsafe when restoring on a different topology than the checkpoint was saved with.\r\n warnings.warn(\r\n",,terminal_output +6731,8677811,"TERMINAL",0,0,"7",,terminal_output +6732,8677871,"TERMINAL",0,0,"73",,terminal_output +6733,8678815,"TERMINAL",0,0,"8",,terminal_output +6734,8678919,"TERMINAL",0,0,"85",,terminal_output +6735,8679839,"TERMINAL",0,0,"9",,terminal_output +6736,8679955,"TERMINAL",0,0,"106",,terminal_output +6737,8680870,"TERMINAL",0,0,"10",,terminal_output +6738,8681006,"TERMINAL",0,0,"17",,terminal_output +6739,8681889,"TERMINAL",0,0,"1",,terminal_output +6740,8682039,"TERMINAL",0,0,"28",,terminal_output +6741,8682882,"TERMINAL",0,0,"2",,terminal_output +6742,8683083,"TERMINAL",0,0,"39",,terminal_output +6743,8683919,"TERMINAL",0,0,"3",,terminal_output +6744,8684110,"TERMINAL",0,0,"450",,terminal_output +6745,8685060,"TERMINAL",0,0,"5",,terminal_output +6746,8685170,"TERMINAL",0,0,"51",,terminal_output +6747,8685989,"TERMINAL",0,0,"6",,terminal_output +6748,8686174,"TERMINAL",0,0,"62",,terminal_output +6749,8687108,"TERMINAL",0,0,"7",,terminal_output +6750,8687215,"TERMINAL",0,0,"73",,terminal_output +6751,8688165,"TERMINAL",0,0,"8",,terminal_output +6752,8688297,"TERMINAL",0,0,"84",,terminal_output +6753,8689156,"TERMINAL",0,0,"9",,terminal_output +6754,8689341,"TERMINAL",0,0,"95",,terminal_output +6755,8690188,"TERMINAL",0,0,"20",,terminal_output +6756,8690473,"TERMINAL",0,0,"206",,terminal_output +6757,8691212,"TERMINAL",0,0,"1",,terminal_output +6758,8691410,"TERMINAL",0,0,"17",,terminal_output +6759,8692221,"TERMINAL",0,0,"2",,terminal_output +6760,8692406,"TERMINAL",0,0,"28",,terminal_output +6761,8693355,"TERMINAL",0,0,"3",,terminal_output +6762,8693549,"TERMINAL",0,0,"39",,terminal_output +6763,8694397,"TERMINAL",0,0,"4",,terminal_output +6764,8694557,"TERMINAL",0,0,"45:00",,terminal_output +6765,8695357,"TERMINAL",0,0,"5",,terminal_output +6766,8695612,"TERMINAL",0,0,"51",,terminal_output +6767,8696362,"TERMINAL",0,0,"6",,terminal_output +6768,8696586,"TERMINAL",0,0,"62",,terminal_output +6769,8697465,"TERMINAL",0,0,"7",,terminal_output +6770,8697639,"TERMINAL",0,0,"73",,terminal_output +6771,8698473,"TERMINAL",0,0,"8",,terminal_output +6772,8698635,"TERMINAL",0,0,"84",,terminal_output +6773,8699653,"TERMINAL",0,0,"9",,terminal_output +6774,8699697,"TERMINAL",0,0,"95",,terminal_output +6775,8700525,"TERMINAL",0,0,"30",,terminal_output +6776,8700729,"TERMINAL",0,0,"306",,terminal_output +6777,8701609,"TERMINAL",0,0,"1",,terminal_output +6778,8701773,"TERMINAL",0,0,"17",,terminal_output +6779,8702674,"TERMINAL",0,0,"2",,terminal_output +6780,8702796,"TERMINAL",0,0,"28",,terminal_output +6781,8703714,"TERMINAL",0,0,"3",,terminal_output +6782,8703836,"TERMINAL",0,0,"39",,terminal_output +6783,8704647,"TERMINAL",0,0,"4",,terminal_output +6784,8704865,"TERMINAL",0,0,"410",,terminal_output +6785,8705846,"TERMINAL",0,0,"5",,terminal_output +6786,8705906,"TERMINAL",0,0,"52",,terminal_output +6787,8706781,"TERMINAL",0,0,"63",,terminal_output +6788,8706940,"TERMINAL",0,0,"73",,terminal_output +6789,8707849,"TERMINAL",0,0,"7",,terminal_output +6790,8708032,"TERMINAL",0,0,"84",,terminal_output +6791,8708800,"TERMINAL",0,0,"8",,terminal_output +6792,8709159,"TERMINAL",0,0,"95",,terminal_output +6793,8709830,"TERMINAL",0,0,"9",,terminal_output +6794,8710149,"TERMINAL",0,0,"406",,terminal_output +6795,8710887,"TERMINAL",0,0,"40",,terminal_output +6796,8711394,"TERMINAL",0,0,"17",,terminal_output +6797,8711984,"TERMINAL",0,0,"1",,terminal_output +6798,8712130,"TERMINAL",0,0,"28",,terminal_output +6799,8712947,"TERMINAL",0,0,"3",,terminal_output +6800,8713166,"TERMINAL",0,0,"39",,terminal_output +6801,8714054,"TERMINAL",0,0,"4",,terminal_output +6802,8714248,"TERMINAL",0,0,"420",,terminal_output +6803,8714799,"TERMINAL",0,0,"Entering jdb:\r\n",,terminal_output +6804,8715120,"TERMINAL",0,0,"5",,terminal_output +6805,8715273,"TERMINAL",0,0,"51",,terminal_output +6806,8716047,"TERMINAL",0,0,"6",,terminal_output +6807,8716293,"TERMINAL",0,0,"62",,terminal_output +6808,8717111,"TERMINAL",0,0,"7",,terminal_output +6809,8717422,"TERMINAL",0,0,"73",,terminal_output +6810,8718240,"TERMINAL",0,0,"8",,terminal_output +6811,8718394,"TERMINAL",0,0,"84",,terminal_output +6812,8719300,"TERMINAL",0,0,"9",,terminal_output +6813,8719415,"TERMINAL",0,0,"95",,terminal_output +6814,8720268,"TERMINAL",0,0,"50",,terminal_output +6815,8720461,"TERMINAL",0,0,"506",,terminal_output +6816,8721313,"TERMINAL",0,0,"1",,terminal_output +6817,8721489,"TERMINAL",0,0,"17",,terminal_output +6818,8722335,"TERMINAL",0,0,"2",,terminal_output +6819,8722539,"TERMINAL",0,0,"28",,terminal_output +6820,8723356,"TERMINAL",0,0,"3",,terminal_output +6821,8723626,"TERMINAL",0,0,"39",,terminal_output +6822,8724560,"TERMINAL",0,0,"4",,terminal_output +6823,8724639,"TERMINAL",0,0,"430",,terminal_output +6824,8725503,"TERMINAL",0,0,"5",,terminal_output +6825,8725739,"TERMINAL",0,0,"51",,terminal_output +6826,8726367,"TERMINAL",0,0,"l",,terminal_output +6827,8726430,"TERMINAL",0,0,"6",,terminal_output +6828,8726552,"TERMINAL",0,0,"\r\n(jdb) > /hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/sample.py(223)\r\n .reshape(-1, *gt_video.shape[2:])\r\n )\r\n recon = recon_video_BSHWC.clip(0, 1).reshape(-1, *recon_video_BSHWC.shape[2:])\r\n ssim = jnp.asarray(\r\n pix.ssim(gt[:, args.start_frame :], recon[:, args.start_frame :])\r\n )\r\n-> jax.debug.breakpoint()\r\n ssim = ssim.mean()\r\n jax.debug.breakpoint()\r\n print(f""SSIM: {ssim}"")\r\n \r\n # --- Construct video ---\r\n",,terminal_output +6829,8726709,"TERMINAL",0,0,"62",,terminal_output +6830,8727600,"TERMINAL",0,0,"7",,terminal_output +6831,8727722,"TERMINAL",0,0,"73",,terminal_output +6832,8728577,"TERMINAL",0,0,"8",,terminal_output +6833,8728585,"TERMINAL",0,0,"s",,terminal_output +6834,8728722,"TERMINAL",0,0,"s",,terminal_output +6835,8728797,"TERMINAL",0,0,"84",,terminal_output +6836,8728805,"TERMINAL",0,0,"im",,terminal_output +6837,8729466,"TERMINAL",0,0,"\r\n(jdb) Array([0.8719168 , 0.979592 , 0.9806774 , 0.9994842 , 0.9663542 ,\r\n 0.62541455, 0.8337695 , 0.7704017 , 0.72312605, 0.9633823 ,\r\n 0.95175153, 0.8701259 , 0.69967306, 0.56205654, 0.47414473,\r\n 0.51209915, 0.8719168 , 0.979592 , 0.9806774 , 0.9994842 ,\r\n 0.9663542 , 0.62541455, 0.8337695 , 0.7704017 , 0.74423504,\r\n 0.8925881 , 0.94404775, 0.8537997 , 0.6535391 , 0.51282483,\r\n 0.39086622, 0.8247448 , 0.8719168 , 0.97992545, 0.82860285,\r\n 0.9894424 , 0.95523643, 0.6901768 , 0.96347576, 0.9865565 ,\r\n 0.9069136 , 0.9082226 , 0.94293064, 0.98609596, 0.7584913 ,\r\n 0.61409044, 0.53887254, 0.82599723, 0.8719168 , 0.979592 ,\r\n 0.9806774 , 0.9994842 , 0.99948674, 0.95591885, 0.95974797,\r\n 0.5447833 , 0.50476223, 0.6101557 , 0.69963354, 0.9072893 ,\r\n 0.8782214 , 0.93108964, 0.88094884, 0.8361077 , 0.8719168 ,\r\n 0.979592 , 0.9675891 , 0.9984601 , 0.96181786, 0.58896685,\r\n 0.8180211 , 0.7406737 , 0.71000284, 0.8720492 , 0.9367464 ,\r\n 0.98492527, 0.7036349 , 0.56123155, 0.47157612, 0.3077804 ,\r\n 0.8786594 , 0.9360249 , 0.9688838 , 0.99835 , 0.996313 ,\r\n 0.98949486, 0.977293 , 0.6910594 , 0.83740443, 0.7546411 ,\r\n 0.510805 , 0.8269499 , 0.99748063, 0.99427384, 0.9972628 ,\r\n 0.82972175, 0.8786594 , 0.9360249 , 0.9688838 , 0.9989767 ,\r\n 0.99957067, 0.9499344 , 0.756049 , 0.91639215, 0.99858826,\r\n 0.99863607, 0.99760354, 0.9987988 , 0.99714386, 0.99647456,\r\n 0.45191002, 0.84263116, 0.8719168 , 0.97992545, 0.9794672 ,\r\n 0.9850322 , 0.96488124, 0.8654149 , 0.8984966 , 0.78111583,\r\n 0.7082204 , 0.94970006, 0.8444691 , 0.8607148 , 0.73337317,\r\n 0.5445521 , 0.43409023, 0.47210136, 0.8719168 , 0.97992545,\r\n 0.97871846, 0.9991432 , 0.9651877 , 0.62604827, 0.8156875 ,\r\n 0.74444294, 0.706319 , 0.89853054, 0.934495 , 0.86370957,\r\n 0.7341988 , 0.5997701 , 0.5239616 , 0.83312696, 0.8719168 ,\r\n 0.97992545, 0.9794672 , 0.9850322 , 0.96488124, 0.8654149 ,\r\n 0.8984966 , 0.78111583, 0.75983757, 0.8996647 , 0.9456452 ,\r\n 0.8705645 , 0.73891336, 0.617242 , 0.532819 , 0.82143146,\r\n 0.8786594 , 0.9360249 , 0.98705184, 0.99924576, 0.9996433 ,\r\n 0.78112984, 0.73971 , 0.909455 , 0.9973958 , 0.99876064,\r\n 0.9904164 , 0.99829113, 0.9981816 , 0.9994455 , 0.53903925,\r\n 0.8512075 , 0.8719168 , 0.979592 , 0.9806774 , 0.99735916,\r\n 0.96215695, 0.89594936, 0.9515029 , 0.54335266, 0.5061937 ,\r\n 0.58414257, 0.7006652 , 0.9087106 , 0.8660244 , 0.9312876 ,\r\n 0.80990124, 0.83163494], dtype=float32)\r\n",,terminal_output +6838,8729529,"TERMINAL",0,0,"9",,terminal_output +6839,8729841,"TERMINAL",0,0,"95",,terminal_output +6840,8730639,"TERMINAL",0,0,"9:00",,terminal_output +6841,8730865,"TERMINAL",0,0,"9:006",,terminal_output +6842,8731749,"TERMINAL",0,0,"1",,terminal_output +6843,8731916,"TERMINAL",0,0,"17",,terminal_output +6844,8732636,"TERMINAL",0,0,"2",,terminal_output +6845,8732907,"TERMINAL",0,0,"29",,terminal_output +6846,8733704,"TERMINAL",0,0,"3",,terminal_output +6847,8733970,"TERMINAL",0,0,"440",,terminal_output +6848,8734728,"TERMINAL",0,0,"4",,terminal_output +6849,8735072,"TERMINAL",0,0,"51",,terminal_output +6850,8735869,"TERMINAL",0,0,"5",,terminal_output +6851,8736041,"TERMINAL",0,0,"62",,terminal_output +6852,8736328,"TERMINAL",0,0,"s",,terminal_output +6853,8736447,"TERMINAL",0,0,"s",,terminal_output +6854,8736673,"TERMINAL",0,0,"s",,terminal_output +6855,8736860,"TERMINAL",0,0,"i",,terminal_output +6856,8736860,"TERMINAL",0,0,"6",,terminal_output +6857,8737054,"TERMINAL",0,0,"73",,terminal_output +6858,8737172,"TERMINAL",0,0," ",,terminal_output +6859,8737376,"TERMINAL",0,0," ",,terminal_output +6860,8737957,"TERMINAL",0,0,"75",,terminal_output +6861,8738101,"TERMINAL",0,0,"84",,terminal_output +6862,8738257,"TERMINAL",0,0,"i",,terminal_output +6863,8738427,"TERMINAL",0,0,"m",,terminal_output +6864,8738662,"TERMINAL",0,0,".",,terminal_output +6865,8738868,"TERMINAL",0,0,"8 6",,terminal_output +6866,8738868,"TERMINAL",0,0,"s",,terminal_output +6867,8739065,"TERMINAL",0,0,"h",,terminal_output +6868,8739069,"TERMINAL",0,0,"a",,terminal_output +6869,8739149,"TERMINAL",0,0,"95",,terminal_output +6870,8739201,"TERMINAL",0,0,"p",,terminal_output +6871,8739300,"TERMINAL",0,0,"e",,terminal_output +6872,8739394,"TERMINAL",0,0,"\r\n(jdb) (192,)\r\n",,terminal_output +6873,8739891,"TERMINAL",0,0,"9",,terminal_output +6874,8740257,"TERMINAL",0,0,"106",,terminal_output +6875,8740945,"TERMINAL",0,0,"10",,terminal_output +6876,8741370,"TERMINAL",0,0,"17",,terminal_output +6877,8741978,"TERMINAL",0,0,"2",,terminal_output +6878,8742249,"TERMINAL",0,0,"28",,terminal_output +6879,8743008,"TERMINAL",0,0,"3",,terminal_output +6880,8743326,"TERMINAL",0,0,"39",,terminal_output +6881,8744028,"TERMINAL",0,0,"4",,terminal_output +6882,8744353,"TERMINAL",0,0,"450",,terminal_output +6883,8745167,"TERMINAL",0,0,"5",,terminal_output +6884,8745392,"TERMINAL",0,0,"51",,terminal_output +6885,8746136,"TERMINAL",0,0,"6",,terminal_output +6886,8746615,"TERMINAL",0,0,"62",,terminal_output +6887,8747218,"TERMINAL",0,0,"7",,terminal_output +6888,8747456,"TERMINAL",0,0,"73",,terminal_output +6889,8748241,"TERMINAL",0,0,"8",,terminal_output +6890,8748861,"TERMINAL",0,0,"84",,terminal_output +6891,8749267,"TERMINAL",0,0,"9",,terminal_output +6892,8749572,"TERMINAL",0,0,"95",,terminal_output +6893,8749991,"TERMINAL",0,0,"1",,terminal_output +6894,8750086,"TERMINAL",0,0,"2",,terminal_output +6895,8750329,"TERMINAL",0,0,"20",,terminal_output +6896,8750704,"TERMINAL",0,0,"206",,terminal_output +6897,8750704,"TERMINAL",0,0," ",,terminal_output +6898,8750858,"TERMINAL",0,0,"*",,terminal_output +6899,8751127,"TERMINAL",0,0," ",,terminal_output +6900,8751327,"TERMINAL",0,0,"1",,terminal_output +6901,8751624,"TERMINAL",0,0,"17",,terminal_output +6902,8751839,"TERMINAL",0,0,"10",,terminal_output +6903,8752101,"TERMINAL",0,0," ",,terminal_output +6904,8752264,"TERMINAL",0,0,"*",,terminal_output +6905,8752326,"TERMINAL",0,0,"2",,terminal_output +6906,8752675,"TERMINAL",0,0,"28",,terminal_output +6907,8752715,"TERMINAL",0,0," ",,terminal_output +6908,8752835,"TERMINAL",0,0,"1",,terminal_output +6909,8752940,"TERMINAL",0,0,"0",,terminal_output +6910,8753132,"TERMINAL",0,0,"\r\n(jdb) 1200\r\n",,terminal_output +6911,8753469,"TERMINAL",0,0,"3",,terminal_output +6912,8753725,"TERMINAL",0,0,"39",,terminal_output +6913,8754473,"TERMINAL",0,0,"4",,terminal_output +6914,8754703,"TERMINAL",0,0,"46:00",,terminal_output +6915,8755519,"TERMINAL",0,0,"52",,terminal_output +6916,8755807,"TERMINAL",0,0,"51",,terminal_output +6917,8756592,"TERMINAL",0,0,"6",,terminal_output +6918,8756760,"TERMINAL",0,0,"62",,terminal_output +6919,8757585,"TERMINAL",0,0,"7",,terminal_output +6920,8757976,"TERMINAL",0,0,"73",,terminal_output +6921,8758686,"TERMINAL",0,0,"8",,terminal_output +6922,8758842,"TERMINAL",0,0,"84",,terminal_output +6923,8759758,"TERMINAL",0,0,"9",,terminal_output +6924,8759880,"TERMINAL",0,0,"95",,terminal_output +6925,8760823,"TERMINAL",0,0,"30",,terminal_output +6926,8761047,"TERMINAL",0,0,"307",,terminal_output +6927,8761759,"TERMINAL",0,0,"1",,terminal_output +6928,8761960,"TERMINAL",0,0,"28",,terminal_output +6929,8762794,"TERMINAL",0,0,"2",,terminal_output +6930,8763055,"TERMINAL",0,0,"39",,terminal_output +6931,8763970,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/sample.py",0,0,"",python,tab +6932,8763971,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/sample.py",6833,0,"",python,selection_mouse +6933,8764005,"TERMINAL",0,0,"3",,terminal_output +6934,8764096,"TERMINAL",0,0,"410",,terminal_output +6935,8764739,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/sample.py",6824,0,"",python,selection_mouse +6936,8764778,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/sample.py",6823,2,"gt",python,selection_mouse +6937,8764861,"TERMINAL",0,0,"4",,terminal_output +6938,8765063,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/sample.py",6823,4,"gt[:",python,selection_mouse +6939,8765064,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/sample.py",6823,6,"gt[:, ",python,selection_mouse +6940,8765064,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/sample.py",6823,10,"gt[:, args",python,selection_mouse +6941,8765065,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/sample.py",6823,11,"gt[:, args.",python,selection_mouse +6942,8765065,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/sample.py",6823,22,"gt[:, args.start_frame",python,selection_mouse +6943,8765196,"TERMINAL",0,0,"51",,terminal_output +6944,8765253,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/sample.py",6823,23,"gt[:, args.start_frame ",python,selection_mouse +6945,8765343,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/sample.py",6823,24,"gt[:, args.start_frame :",python,selection_mouse +6946,8765907,"TERMINAL",0,0,"5",,terminal_output +6947,8765974,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/sample.py",6823,25,"gt[:, args.start_frame :]",python,selection_mouse +6948,8766223,"TERMINAL",0,0,"62",,terminal_output +6949,8767212,"TERMINAL",0,0,"6",,terminal_output +6950,8767256,"TERMINAL",0,0,"73",,terminal_output +6951,8767995,"TERMINAL",0,0,"8",,terminal_output +6952,8768397,"TERMINAL",0,0,"84",,terminal_output +6953,8769071,"TERMINAL",0,0,"9",,terminal_output +6954,8769302,"TERMINAL",0,0,"95",,terminal_output +6955,8770030,"TERMINAL",0,0,"40",,terminal_output +6956,8770345,"TERMINAL",0,0,"406",,terminal_output +6957,8770468,"TERMINAL",0,0,"gt[:, args.start_frame :]",,terminal_output +6958,8770825,"TERMINAL",0,0,".",,terminal_output +6959,8771021,"TERMINAL",0,0,"s",,terminal_output +6960,8771084,"TERMINAL",0,0,"1",,terminal_output +6961,8771150,"TERMINAL",0,0,"h",,terminal_output +6962,8771266,"TERMINAL",0,0,"a",,terminal_output +6963,8771337,"TERMINAL",0,0,"17",,terminal_output +6964,8771356,"TERMINAL",0,0,"p",,terminal_output +6965,8771514,"TERMINAL",0,0,"e",,terminal_output +6966,8771635,"TERMINAL",0,0,"\r\n",,terminal_output +6967,8771699,"TERMINAL",0,0,"(jdb) (192, 6, 10, 3)\r\n",,terminal_output +6968,8772202,"TERMINAL",0,0,"2",,terminal_output +6969,8772372,"TERMINAL",0,0,"28",,terminal_output +6970,8773228,"TERMINAL",0,0,"3",,terminal_output +6971,8773407,"TERMINAL",0,0,"39",,terminal_output +6972,8774207,"TERMINAL",0,0,"4",,terminal_output +6973,8774458,"TERMINAL",0,0,"420",,terminal_output +6974,8775275,"TERMINAL",0,0,"5",,terminal_output +6975,8775478,"TERMINAL",0,0,"51",,terminal_output +6976,8776243,"TERMINAL",0,0,"6",,terminal_output +6977,8776615,"TERMINAL",0,0,"62",,terminal_output +6978,8777322,"TERMINAL",0,0,"7",,terminal_output +6979,8777631,"TERMINAL",0,0,"73",,terminal_output +6980,8778348,"TERMINAL",0,0,"8",,terminal_output +6981,8778668,"TERMINAL",0,0,"84",,terminal_output +6982,8779371,"TERMINAL",0,0,"9",,terminal_output +6983,8779678,"TERMINAL",0,0,"95",,terminal_output +6984,8780394,"TERMINAL",0,0,"50",,terminal_output +6985,8780804,"TERMINAL",0,0,"506",,terminal_output +6986,8781438,"TERMINAL",0,0,"1",,terminal_output +6987,8781827,"TERMINAL",0,0,"17",,terminal_output +6988,8782546,"TERMINAL",0,0,"2",,terminal_output +6989,8782852,"TERMINAL",0,0,"28",,terminal_output +6990,8783581,"TERMINAL",0,0,"3",,terminal_output +6991,8783915,"TERMINAL",0,0,"39",,terminal_output +6992,8784547,"TERMINAL",0,0,"4",,terminal_output +6993,8784908,"TERMINAL",0,0,"430",,terminal_output +6994,8785667,"TERMINAL",0,0,"5",,terminal_output +6995,8785900,"TERMINAL",0,0,"52",,terminal_output +6996,8786614,"TERMINAL",0,0,"6",,terminal_output +6997,8786949,"TERMINAL",0,0,"73",,terminal_output +6998,8787725,"TERMINAL",0,0,"7",,terminal_output +6999,8787973,"TERMINAL",0,0,"84",,terminal_output +7000,8788848,"TERMINAL",0,0,"8",,terminal_output +7001,8789042,"TERMINAL",0,0,"95",,terminal_output +7002,8789817,"TERMINAL",0,0,"9",,terminal_output +7003,8790124,"TERMINAL",0,0,"10:006",,terminal_output +7004,8790484,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/sample.py",0,0,"",python,tab +7005,8790485,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/sample.py",6805,0,"",python,selection_mouse +7006,8790629,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/sample.py",6804,0,"",python,selection_command +7007,8790825,"TERMINAL",0,0,"10:00",,terminal_output +7008,8791106,"TERMINAL",0,0,"17",,terminal_output +7009,8791629,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/sample.py",6740,0,"",python,selection_mouse +7010,8791843,"TERMINAL",0,0,"1",,terminal_output +7011,8792141,"TERMINAL",0,0,"28",,terminal_output +7012,8792430,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/sample.py",6761,0,"",python,selection_mouse +7013,8792602,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/sample.py",6753,17,"recon_video_BSHWC",python,selection_mouse +7014,8792786,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/sample.py",6753,18,"recon_video_BSHWC.",python,selection_mouse +7015,8792817,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/sample.py",6753,23,"recon_video_BSHWC.shape",python,selection_mouse +7016,8792891,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/sample.py",6753,24,"recon_video_BSHWC.shape[",python,selection_mouse +7017,8792892,"TERMINAL",0,0,"2",,terminal_output +7018,8792924,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/sample.py",6753,25,"recon_video_BSHWC.shape[2",python,selection_mouse +7019,8793009,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/sample.py",6753,26,"recon_video_BSHWC.shape[2:",python,selection_mouse +7020,8793198,"TERMINAL",0,0,"39",,terminal_output +7021,8793851,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/sample.py",6779,0,"",python,selection_mouse +7022,8793948,"TERMINAL",0,0,"3",,terminal_output +7023,8794232,"TERMINAL",0,0,"440",,terminal_output +7024,8794941,"TERMINAL",0,0,"4",,terminal_output +7025,8794997,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/sample.py",6722,0,"",python,selection_mouse +7026,8795162,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/sample.py",6711,17,"recon_video_BSHWC",python,selection_mouse +7027,8795291,"TERMINAL",0,0,"51",,terminal_output +7028,8795949,"TERMINAL",0,0,"6",,terminal_output +7029,8796324,"TERMINAL",0,0,"62",,terminal_output +7030,8796545,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/sample.py",6722,0,"",python,selection_mouse +7031,8797086,"TERMINAL",0,0,"7",,terminal_output +7032,8797394,"TERMINAL",0,0,"73",,terminal_output +7033,8798025,"TERMINAL",0,0,"8",,terminal_output +7034,8798519,"TERMINAL",0,0,"84",,terminal_output +7035,8799133,"TERMINAL",0,0,"9",,terminal_output +7036,8799545,"TERMINAL",0,0,"95",,terminal_output +7037,8800111,"TERMINAL",0,0,"10",,terminal_output +7038,8800501,"TERMINAL",0,0,"106",,terminal_output +7039,8801182,"TERMINAL",0,0,"1",,terminal_output +7040,8801593,"TERMINAL",0,0,"17",,terminal_output +7041,8802210,"TERMINAL",0,0,"2",,terminal_output +7042,8802613,"TERMINAL",0,0,"28",,terminal_output +7043,8803231,"TERMINAL",0,0,"3",,terminal_output +7044,8803643,"TERMINAL",0,0,"39",,terminal_output +7045,8804309,"TERMINAL",0,0,"4",,terminal_output +7046,8804743,"TERMINAL",0,0,"450",,terminal_output +7047,8805306,"TERMINAL",0,0,"5",,terminal_output +7048,8805846,"TERMINAL",0,0,"51",,terminal_output +7049,8806325,"TERMINAL",0,0,"67",,terminal_output +7050,8806877,"TERMINAL",0,0,"62",,terminal_output +7051,8807368,"TERMINAL",0,0,"7",,terminal_output +7052,8807955,"TERMINAL",0,0,"73",,terminal_output +7053,8808410,"TERMINAL",0,0,"8",,terminal_output +7054,8809048,"TERMINAL",0,0,"84",,terminal_output +7055,8809485,"TERMINAL",0,0,"9",,terminal_output +7056,8809915,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/sample.py",6752,0,"",python,selection_mouse +7057,8809926,"TERMINAL",0,0,"95",,terminal_output +7058,8810216,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/sample.py",6752,1,"*",python,selection_mouse +7059,8810216,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/sample.py",6752,2,"*r",python,selection_mouse +7060,8810217,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/sample.py",6752,4,"*rec",python,selection_mouse +7061,8810310,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/sample.py",6752,5,"*reco",python,selection_mouse +7062,8810311,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/sample.py",6752,7,"*recon_",python,selection_mouse +7063,8810311,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/sample.py",6752,8,"*recon_v",python,selection_mouse +7064,8810311,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/sample.py",6752,9,"*recon_vi",python,selection_mouse +7065,8810352,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/sample.py",6752,10,"*recon_vid",python,selection_mouse +7066,8810391,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/sample.py",6752,11,"*recon_vide",python,selection_mouse +7067,8810417,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/sample.py",6752,12,"*recon_video",python,selection_mouse +7068,8810501,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/sample.py",6752,13,"*recon_video_",python,selection_mouse +7069,8810539,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/sample.py",6752,14,"*recon_video_B",python,selection_mouse +7070,8810563,"TERMINAL",0,0,"20",,terminal_output +7071,8811102,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/sample.py",6752,15,"*recon_video_BS",python,selection_mouse +7072,8811103,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/sample.py",6752,16,"*recon_video_BSH",python,selection_mouse +7073,8811200,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/sample.py",6752,17,"*recon_video_BSHW",python,selection_mouse +7074,8811201,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/sample.py",6752,18,"*recon_video_BSHWC",python,selection_mouse +7075,8811201,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/sample.py",6752,19,"*recon_video_BSHWC.",python,selection_mouse +7076,8811201,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/sample.py",6752,21,"*recon_video_BSHWC.sh",python,selection_mouse +7077,8811201,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/sample.py",6752,22,"*recon_video_BSHWC.sha",python,selection_mouse +7078,8811202,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/sample.py",6752,23,"*recon_video_BSHWC.shap",python,selection_mouse +7079,8811244,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/sample.py",6752,24,"*recon_video_BSHWC.shape",python,selection_mouse +7080,8811271,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/sample.py",6752,25,"*recon_video_BSHWC.shape[",python,selection_mouse +7081,8811293,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/sample.py",6752,26,"*recon_video_BSHWC.shape[2",python,selection_mouse +7082,8811293,"TERMINAL",0,0,"207",,terminal_output +7083,8811522,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/sample.py",6752,27,"*recon_video_BSHWC.shape[2:",python,selection_mouse +7084,11301010,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/sample.py",0,0,"Switched from branch 'sampling-script-add-metrics' to 'main'",python,git_branch_checkout +7085,11371011,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/sample.py",0,0,"Switched from branch 'main' to 'sampling-script-add-metrics'",python,git_branch_checkout +7086,11586042,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/sample.py",0,0,"Switched from branch 'sampling-script-add-metrics' to 'main'",python,git_branch_checkout +7087,14356274,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/sample.py",0,0,"Switched from branch 'main' to 'add-noise-to-combat-exposure-bias'",python,git_branch_checkout +7088,14386253,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/sample.py",0,0,"Switched from branch 'add-noise-to-combat-exposure-bias' to 'main'",python,git_branch_checkout +7089,14406257,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/sample.py",0,0,"Switched from branch 'main' to 'add-noise-to-combat-exposure-bias'",python,git_branch_checkout diff --git a/927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-92ab1593-f937-4cc4-a174-544581a6ac991751909174142-2025_07_07-19.26.40.736/source.csv b/927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-92ab1593-f937-4cc4-a174-544581a6ac991751909174142-2025_07_07-19.26.40.736/source.csv new file mode 100644 index 0000000000000000000000000000000000000000..94bed8cb86d29a2fae7cad0c0240f8178ee28d26 --- /dev/null +++ b/927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-92ab1593-f937-4cc4-a174-544581a6ac991751909174142-2025_07_07-19.26.40.736/source.csv @@ -0,0 +1,1034 @@ +Sequence,Time,File,RangeOffset,RangeLength,Text,Language,Type +2,149,"extension-output-ms-vscode-remote.remote-ssh-#1-Remote - SSH",0,0,"[19:26:17.504] Log Level: 2\n[19:26:17.567] SSH Resolver called for ""ssh-remote+horeka"", attempt 1\n[19:26:17.569] ""remote.SSH.useLocalServer"": true\n[19:26:17.569] ""remote.SSH.useExecServer"": true\n[19:26:17.570] ""remote.SSH.path"": undefined\n[19:26:17.570] ""remote.SSH.configFile"": undefined\n[19:26:17.570] ""remote.SSH.useFlock"": true\n[19:26:17.571] ""remote.SSH.lockfilesInTmp"": false\n[19:26:17.571] ""remote.SSH.localServerDownload"": auto\n[19:26:17.571] ""remote.SSH.remoteServerListenOnSocket"": false\n[19:26:17.571] ""remote.SSH.showLoginTerminal"": false\n[19:26:17.572] ""remote.SSH.defaultExtensions"": []\n[19:26:17.572] ""remote.SSH.loglevel"": 2\n[19:26:17.572] ""remote.SSH.enableDynamicForwarding"": true\n[19:26:17.572] ""remote.SSH.enableRemoteCommand"": false\n[19:26:17.573] ""remote.SSH.serverPickPortsFromRange"": {}\n[19:26:17.573] ""remote.SSH.serverInstallPath"": {}\n[19:26:17.573] ""remote.SSH.permitPtyAllocation"": false\n[19:26:17.574] ""remote.SSH.preferredLocalPortRange: undefined\n[19:26:17.574] ""remote.SSH.useCurlAndWgetConfigurationFiles: false\n[19:26:17.601] VS Code version: 0.48.9\n[19:26:17.601] Remote-SSH version: remote-ssh@0.113.1\n[19:26:17.601] linux x64\n[19:26:17.610] SSH Resolver called for host: horeka\n[19:26:17.610] Setting up SSH remote ""horeka""\n[19:26:17.619] Acquiring local install lock: /tmp/vscode-remote-ssh-09f2063d-install.lock\n[19:26:17.626] Looking for existing server data file at /home/maharajamihir/.config/Cursor/User/globalStorage/ms-vscode-remote.remote-ssh/vscode-ssh-host-09f2063d-61e99179e4080fecf9d8b92c6e2e3e00fbfb53f0-0.113.1-es/data.json\n[19:26:17.628] Using commit id ""61e99179e4080fecf9d8b92c6e2e3e00fbfb53f0"" and quality ""stable"" for server\n[19:26:17.636] Install and start server if needed\n[19:26:17.645] PATH: /home/maharajamihir/.local/share/pnpm:/tmp/.mount_cursorm3mfCw/usr/bin/:/tmp/.mount_cursorm3mfCw/usr/sbin/:/tmp/.mount_cursorm3mfCw/usr/games/:/tmp/.mount_cursorm3mfCw/bin/:/tmp/.mount_cursorm3mfCw/sbin/:/home/maharajamihir/.local/bin:/usr/local/bin:/usr/bin:/usr/local/sbin:/opt/cuda/bin:/opt/cuda/nsight_compute:/opt/cuda/nsight_systems/bin:/usr/bin/site_perl:/usr/bin/vendor_perl:/usr/bin/core_perl\n[19:26:17.646] Checking ssh with ""ssh -V""\n[19:26:17.673] > OpenSSH_9.9p2, OpenSSL 3.4.1 11 Feb 2025\n\n[19:26:17.687] askpass server listening on /run/user/1000/vscode-ssh-askpass-b69f73c17c272a2832e51fb86385e3f882becd10.sock\n[19:26:17.688] Spawning local server with {""serverId"":1,""ipcHandlePath"":""/run/user/1000/vscode-ssh-askpass-6de42f510bc5e95e172f610e99b88c5bcd175212.sock"",""sshCommand"":""ssh"",""sshArgs"":[""-v"",""-T"",""-D"",""46417"",""-o"",""ConnectTimeout=15"",""horeka""],""serverDataFolderName"":"".cursor-server"",""dataFilePath"":""/home/maharajamihir/.config/Cursor/User/globalStorage/ms-vscode-remote.remote-ssh/vscode-ssh-host-09f2063d-61e99179e4080fecf9d8b92c6e2e3e00fbfb53f0-0.113.1-es/data.json""}\n[19:26:17.688] Local server env: {""SSH_AUTH_SOCK"":""/run/user/1000/gcr/ssh"",""SHELL"":""/bin/bash"",""DISPLAY"":"":0"",""ELECTRON_RUN_AS_NODE"":""1"",""SSH_ASKPASS"":""/home/maharajamihir/.cursor/extensions/ms-vscode-remote.remote-ssh-0.113.1/out/local-server/askpass.sh"",""VSCODE_SSH_ASKPASS_NODE"":""/tmp/.mount_cursorm3mfCw/usr/share/cursor/cursor"",""VSCODE_SSH_ASKPASS_EXTRA_ARGS"":"""",""VSCODE_SSH_ASKPASS_MAIN"":""/home/maharajamihir/.cursor/extensions/ms-vscode-remote.remote-ssh-0.113.1/out/askpass-main.js"",""VSCODE_SSH_ASKPASS_HANDLE"":""/run/user/1000/vscode-ssh-askpass-b69f73c17c272a2832e51fb86385e3f882becd10.sock""}\n[19:26:17.705] Spawned 187581\n[19:26:18.035] > local-server-1> Running ssh connection command: ssh -v -T -D 46417 -o ConnectTimeout=15 horeka\n[19:26:18.047] > local-server-1> Spawned ssh, pid=187590\n[19:26:18.058] stderr> OpenSSH_9.9p2, OpenSSL 3.4.1 11 Feb 2025\n[19:26:18.352] stderr> debug1: Server host key: ssh-ed25519 SHA256:yEe5nJ5hZZ1YbgieWr+phqRZKYbrV7zRe8OR3X03cn0\n[19:26:19.887] stderr> Authenticated to horeka.scc.kit.edu ([2a00:1398:4:180c::8d34:2b13]:22) using ""publickey"".\n[19:26:20.676] > ********************************************************************************\n> * _ _ _ __ *\n> * | | | | | |/ / *\n> * | |__| | ___ _ __ ___| ' / __ _ *\n> * ----| __ |/ _ \| '__/ _ \ < / _` |----> *\n> * ----| |--| | (_) | |-| __/ . \ (_| |----> *\n> * ----|_|--|_|\___/|_|--\___|_|\_\__,_|----> *\n> * *\n> * *\n> * Documentation: https://nhr.kit.edu/userdocs/horeka/ *\n> * Support: https://support.nhr.kit.edu/ *\n> * *\n> ********************************************************************************\n> Last failed login: Wed Jul 2 12:42:09 CEST 2025 from 2001:4ca0:2fff:2:5:2:0:4c on ssh:notty\n> There was 1 failed login attempt since the last successful login.\n[19:26:24.495] > ready: 9956fb6bda10\n[19:26:24.566] > Linux 5.14.0-427.68.1.el9_4.x86_64 #1 SMP PREEMPT_DYNAMIC Fri May 2 10:44:30 EDT 2025\n[19:26:24.567] Platform: linux\n[19:26:24.630] > /bin/bash\n[19:26:24.631] Shell: bash\n[19:26:24.631] Creating bash subshell inside ""bash""\n[19:26:24.699] > bash version: 5.1.8(1)-release\n[19:26:24.730] > bash version: 5.1.8(1)-release\n[19:26:24.772] > 9956fb6bda10: running\n[19:26:24.845] > Found existing installation at /home/hk-project-p0023960/tum_cte0515/.cursor-server...\n> Starting VS Code CLI... ""/home/hk-project-p0023960/tum_cte0515/.cursor-server/cursor-61e99179e4080fecf9d8b92c6e2e3e00fbfb53f0"" command-shell --cli-data-dir ""/home/hk-project-p0023960/tum_cte0515/.cursor-server/cli"" --on-port --on-host=127.0.0.1 --parent-process-id 3929128 &> ""/home/hk-project-p0023960/tum_cte0515/.cursor-server/.cli.61e99179e4080fecf9d8b92c6e2e3e00fbfb53f0.log"" < /dev/null\n[19:26:24.846] > printenv:\n[19:26:24.902] > SHELL=/bin/bash\n> LMOD_SITE_NAME=KIT\n> INTEL_MAN_DIR=/software/all/toolkit/Intel_OneAPI/compiler/2025.1/share/man\n> KIT_FAMILY_COMPILER=compiler/intel\n> GDD_INCLUDE_DIR=/software/all/toolkit/Intel_OneAPI/debugger/2025.0/opt/debugger/include\n> MXM_LOG_LEVEL=ERROR\n> PKG_CONFIG_PATH=/software/all/devel/cuda/12.4/pkgconfig:/software/all/mpi/openmpi/5.0-intel-2025.1_llvm/lib/pkgconfig:/software/all/toolkit/Intel_OneAPI/mkl/2025.1/lib/pkgconfig\n> HOSTNAME=hkn1991.localdomain\n> HISTSIZE=1000\n> FPATH=/usr/share/lmod/lmod/init/ksh_funcs\n> MPICC=/software/all/mpi/openmpi/5.0-intel-2025.1_llvm/bin/mpicc\n> _ModuleTable002_=aWxlci9pbnRlbC8yMDI1LjFfbGx2bSIsCmxvYWRPcmRlciA9IDIsCnByb3BUID0ge30sCnN0YWNrRGVwdGggPSAwLApzdGF0dXMgPSAiYWN0aXZlIiwKdXNlck5hbWUgPSAiY29tcGlsZXIvaW50ZWwiLAp3ViA9ICIwMDAwMDIwMjUuMDAwMDAwMDAxLipfLipsbHZtLip6ZmluYWwiLAp9LApbImRldmVsL2N1ZGEiXSA9IHsKZm4gPSAiL3NvZnR3YXJlL2FsbC9sbW9kL21vZHVsZWZpbGVzL0NvcmUvZGV2ZWwvY3VkYS8xMi40Lmx1YSIsCmZ1bGxOYW1lID0gImRldmVsL2N1ZGEvMTIuNCIsCmxvYWRPcmRlciA9IDQsCnByb3BUID0gewpzdGF0ZSA9IHsKZXhwZXJpbWVudGFsID0gMSwKfSwKfSwKcmVmX2NvdW50ID0gMSwKc3RhY2tEZXB0aCA9IDEsCnN0YXR1cyA9ICJhY3RpdmUi\n> __LMOD_REF_COUNT_MODULEPATH=/software/all/lmod/modulefiles/MPI/intel/2025.1_llvm/openmpi/5.0:1;/software/all/lmod/modulefiles/Compiler/intel/2025.1_llvm:1;/software/all/lmod/modulefiles/Core:1;/software/community/ICON/modulefiles:1\n> INTEL_DOC_DIR=/software/all/toolkit/Intel_OneAPI/compiler/2025.1/share/man\n> HISTTIMEFORMAT=%F %T \n> _ModuleTable005_=RGVwdGggPSAwLApzdGF0dXMgPSAiYWN0aXZlIiwKdXNlck5hbWUgPSAibnVtbGliL21rbCIsCndWID0gIjAwMDAwMjAyNS4wMDAwMDAwMDEuKnpmaW5hbCIsCn0sCn0sCm1wYXRoQSA9IHsKIi9zb2Z0d2FyZS9hbGwvbG1vZC9tb2R1bGVmaWxlcy9NUEkvaW50ZWwvMjAyNS4xX2xsdm0vb3Blbm1waS81LjAiCiwgIi9zb2Z0d2FyZS9hbGwvbG1vZC9tb2R1bGVmaWxlcy9Db21waWxlci9pbnRlbC8yMDI1LjFfbGx2bSIKLCAiL3NvZnR3YXJlL2FsbC9sbW9kL21vZHVsZWZpbGVzL0NvcmUiLCAiL3NvZnR3YXJlL2NvbW11bml0eS9JQ09OL21vZHVsZWZpbGVzIiwKfSwKc3lzdGVtQmFzZU1QQVRIID0gIi9zb2Z0d2FyZS9hbGwvbG1vZC9tb2R1bGVmaWxlcy9Db3JlOi9zb2Z0d2Fy\n> PROJECT_GROUP=hk-project-p0023960\n> MKLINCLUDE=/software/all/toolkit/Intel_OneAPI/mkl/2025.1/include\n> MPICXX=/software/all/mpi/openmpi/5.0-intel-2025.1_llvm/bin/mpic++\n> MKL_LIB_DIR=/software/all/toolkit/Intel_OneAPI/mkl/2025.1/lib/intel64\n> LMOD_DIR=/usr/share/lmod/lmod/libexec\n> FFLAGS=-O2 -xCORE-AVX2\n> MPIFC=/software/all/mpi/openmpi/5.0-intel-2025.1_llvm/bin/mpifort\n> CUDA_LIBRARY_DIR=/software/all/devel/cuda/12.4/lib64\n> MKL_BIN_DIR=/software/all/toolkit/Intel_OneAPI/mkl/2025.1/bin\n> PWD=/hkfs/home/project/hk-project-p0023960/tum_cte0515\n> LOGNAME=tum_cte0515\n> XDG_SESSION_TYPE=tty\n> MODULESHOME=/usr/share/lmod/lmod\n> MANPATH=/software/all/devel/cuda/12.4/doc/man:/software/all/mpi/openmpi/5.0-intel-2025.1_llvm/share/man:/software/all/toolkit/Intel_OneAPI/compiler/2025.1/share/man:/usr/share/lmod/lmod/share/man:/usr/share/man::\n> CXX=/software/all/toolkit/Intel_OneAPI/compiler/2025.1/bin/icpx\n> CXXFLAGS=-O2 -xCORE-AVX2\n> MKL_DOC_DIR=/software/all/toolkit/Intel_OneAPI/mkl/2025.1/documentation\n> __LMOD_REF_COUNT_INCLUDE=/software/all/devel/cuda/12.4/include:1;/software/all/toolkit/Intel_OneAPI/mkl/2025.1/include:1\n> MKL_HOME=/software/all/toolkit/Intel_OneAPI/mkl/2025.1\n> __LMOD_REF_COUNT_CXX_INCLUDE_DIR=/software/all/devel/cuda/12.4/include:1\n> KIT_FAMILY_MPI_VERSION=5.0\n> MOTD_SHOWN=pam\n> __LMOD_REF_COUNT_PATH=/software/all/devel/cuda/12.4/bin:1;/software/all/mpi/openmpi/5.0-intel-2025.1_llvm/bin:1;/software/all/toolkit/Intel_OneAPI/compiler/2025.1/bin:1;/usr/share/Modules/bin:1;/usr/local/bin:1;/usr/bin:1;/usr/local/sbin:1;/usr/sbin:1;.:1\n> __MODULES_SHARE_MANPATH=:1\n> HOME=/home/hk-project-p0023960/tum_cte0515\n> _ModuleTable_Sz_=6\n> LANG=en_US.UTF-8\n> __LMOD_REF_COUNT_LIBRARY_PATH=/software/all/devel/cuda/12.4/lib64:1;/software/all/toolkit/Intel_OneAPI/mkl/2025.1/lib/intel64:1;/software/all/toolkit/Intel_OneAPI/compiler/2025.1/lib:1\n> __LMOD_REF_COUNT_PKG_CONFIG_PATH=/software/all/devel/cuda/12.4/pkgconfig:1;/software/all/mpi/openmpi/5.0-intel-2025.1_llvm/lib/pkgconfig:1;/software/all/toolkit/Intel_OneAPI/mkl/2025.1/lib/pkgconfig:1\n> CUDA_MAN_DIR=/software/all/devel/cuda/12.4/doc/man\n> CXX_INCLUDE_DIR=/software/all/devel/cuda/12.4/include\n> SCRATCH=/scratch\n> CUDA_VERSION=12.4\n> LSDF=/lsdf\n> __LMOD_REF_COUNT_CPATH=/software/all/devel/cuda/12.4/include:1;/software/all/toolkit/Intel_OneAPI/mkl/2025.1/include:1;/software/all/toolkit/Intel_OneAPI/compiler/2025.1/include:1\n> LMOD_SETTARG_FULL_SUPPORT=no\n> TMPDIR=/scratch\n> GDB_HOME=/software/all/toolkit/Intel_OneAPI/debugger/2025.0\n> VSCODE_AGENT_FOLDER=/home/hk-project-p0023960/tum_cte0515/.cursor-server\n> MKL_INC_DIR=/software/all/toolkit/Intel_OneAPI/mkl/2025.1/include\n> DATASETS=/hkfs/home/dataset/datasets\n> LMOD_VERSION=8.7.55\n> SSH_CONNECTION=2001:4ca0:2fff:2:5:2:0:4c 39812 2a00:1398:4:180c::8d34:2b13 22\n> CUDA_PKGCONFIG_DIR=/software/all/devel/cuda/12.4/pkgconfig\n> LMOD_PACKAGE_PATH=/etc/lmod/?.lua;;\n> _ModuleTable003_=LAp1c2VyTmFtZSA9ICJkZXZlbC9jdWRhLzEyLjQiLAp3ViA9ICJeMDAwMDAwMTIuMDAwMDAwMDA0Lip6ZmluYWwiLAp9LApkb3QgPSB7CmZuID0gIi9zb2Z0d2FyZS9hbGwvbG1vZC9tb2R1bGVmaWxlcy9Db3JlL2RvdC5sdWEiLApmdWxsTmFtZSA9ICJkb3QiLApsb2FkT3JkZXIgPSAxLApwcm9wVCA9IHt9LApzdGFja0RlcHRoID0gMCwKc3RhdHVzID0gImFjdGl2ZSIsCnVzZXJOYW1lID0gImRvdCIsCndWID0gIk0uKnpmaW5hbCIsCn0sClsibXBpL29wZW5tcGkiXSA9IHsKYWN0aW9uQSA9IHsKInByZXBlbmRfcGF0aChcIk1PRFVMRVBBVEhcIixcIi9zb2Z0d2FyZS9hbGwvbG1vZC9tb2R1bGVmaWxlcy9NUEkvaW50ZWwvMjAyNS4xX2xsdm0vb3Blbm1waS81LjBcIikiLAp9\n> MODULEPATH_ROOT=/usr/share/modulefiles\n> MKL_EXA_DIR=/software/all/toolkit/Intel_OneAPI/mkl/2025.1/share/doc/mkl/examples\n> FCFLAGS=-O2 -xCORE-AVX2\n> XDG_SESSION_CLASS=user\n> LMOD_PKG=/usr/share/lmod/lmod\n> MKL_PKGCONFIG_DIR=/software/all/toolkit/Intel_OneAPI/mkl/2025.1/lib/pkgconfig\n> KIT_FAMILY_MPI=mpi/openmpi\n> F77=/software/all/toolkit/Intel_OneAPI/compiler/2025.1/bin/ifx\n> LESSOPEN=||/usr/bin/lesspipe.sh %s\n> CUDA_INCLUDE_DIR=/software/all/devel/cuda/12.4/include\n> USER=tum_cte0515\n> LIBRARY_PATH=/software/all/devel/cuda/12.4/lib64:/software/all/toolkit/Intel_OneAPI/mkl/2025.1/lib/intel64:/software/all/toolkit/Intel_OneAPI/compiler/2025.1/lib\n> CUDA_PATH=/software/all/devel/cuda/12.4\n> MPI_ROOT=/software/all/mpi/openmpi/5.0-intel-2025.1_llvm\n> CUDA_ROOT=/software/all/devel/cuda/12.4\n> INTEL_BIN_DIR=/software/all/toolkit/Intel_OneAPI/compiler/2025.1/bin\n> GDB_VERSION=2025.0\n> MODULES_RUN_QUARANTINE=LD_LIBRARY_PATH LD_PRELOAD\n> LOADEDMODULES=dot:compiler/intel/2025.1_llvm:numlib/mkl/2025.1:devel/cuda/12.4:mpi/openmpi/5.0\n> TEMP=/scratch\n> GDB_INC_DIR=/software/all/toolkit/Intel_OneAPI/debugger/2025.0/opt/debugger/include\n> INCLUDE=/software/all/devel/cuda/12.4/include:/software/all/toolkit/Intel_OneAPI/mkl/2025.1/include\n> LMOD_ROOT=/usr/share/lmod\n> SHLVL=2\n> _ModuleTable006_=ZS9jb21tdW5pdHkvSUNPTi9tb2R1bGVmaWxlcyIsCn0K\n> LMOD_sys=Linux\n> GDB_BIN_DIR=/software/all/toolkit/Intel_OneAPI/debugger/2025.0/opt/debugger/bin\n> LMOD_PAGER=less\n> __LMOD_REF_COUNT_MANPATH=/software/all/devel/cuda/12.4/doc/man:1;/software/all/mpi/openmpi/5.0-intel-2025.1_llvm/share/man:1;/software/all/toolkit/Intel_OneAPI/compiler/2025.1/share/man:1;/usr/share/lmod/lmod/share/man:1;/usr/share/man:1;:1\n> XDG_SESSION_ID=10617\n> KIT_FAMILY_COMPILER_VERSION=2025.1_llvm\n> _ModuleTable001_=X01vZHVsZVRhYmxlXyA9IHsKTVR2ZXJzaW9uID0gMywKY19yZWJ1aWxkVGltZSA9IGZhbHNlLApjX3Nob3J0VGltZSA9IGZhbHNlLApkZXB0aFQgPSB7fSwKZmFtaWx5ID0gewpjb21waWxlciA9ICJjb21waWxlci9pbnRlbCIsCm1waSA9ICJtcGkvb3Blbm1waSIsCn0sCm1UID0gewpbImNvbXBpbGVyL2ludGVsIl0gPSB7CmFjdGlvbkEgPSB7CiJwcmVwZW5kX3BhdGgoXCJNT0RVTEVQQVRIXCIsXCIvc29mdHdhcmUvYWxsL2xtb2QvbW9kdWxlZmlsZXMvQ29tcGlsZXIvaW50ZWwvMjAyNS4xX2xsdm1cIikiLAp9LApmbiA9ICIvc29mdHdhcmUvYWxsL2xtb2QvbW9kdWxlZmlsZXMvQ29yZS9jb21waWxlci9pbnRlbC8yMDI1LjFfbGx2bS5sdWEiLApmdWxsTmFtZSA9ICJjb21w\n> MPIRUN=/software/all/mpi/openmpi/5.0-intel-2025.1_llvm/bin/mpirun\n> LD_LIBRARY_PATH=/software/all/devel/cuda/12.4/lib64:/software/all/mpi/openmpi/5.0-intel-2025.1_llvm/lib:/software/all/toolkit/Intel_OneAPI/mkl/2025.1/lib/intel64:/software/all/toolkit/Intel_OneAPI/compiler/2025.1/lib\n> LMOD_FAMILY_COMPILER=compiler/intel\n> XDG_RUNTIME_DIR=/run/user/999226\n> CUDA_INC_DIR=/software/all/devel/cuda/12.4/include\n> MKL_VERSION=2025.1\n> GDB_LIB_DIR=/software/all/toolkit/Intel_OneAPI/debugger/2025.0/opt/debugger/lib\n> _ModuleTable004_=LApmbiA9ICIvc29mdHdhcmUvYWxsL2xtb2QvbW9kdWxlZmlsZXMvQ29tcGlsZXIvaW50ZWwvMjAyNS4xX2xsdm0vbXBpL29wZW5tcGkvNS4wLmx1YSIsCmZ1bGxOYW1lID0gIm1waS9vcGVubXBpLzUuMCIsCmxvYWRPcmRlciA9IDUsCnByb3BUID0ge30sCnN0YWNrRGVwdGggPSAwLApzdGF0dXMgPSAiYWN0aXZlIiwKdXNlck5hbWUgPSAibXBpL29wZW5tcGkiLAp3ViA9ICIwMDAwMDAwMDUuKnpmaW5hbCIsCn0sClsibnVtbGliL21rbCJdID0gewpmbiA9ICIvc29mdHdhcmUvYWxsL2xtb2QvbW9kdWxlZmlsZXMvQ29yZS9udW1saWIvbWtsLzIwMjUuMS5sdWEiLApmdWxsTmFtZSA9ICJudW1saWIvbWtsLzIwMjUuMSIsCmxvYWRPcmRlciA9IDMsCnByb3BUID0ge30sCnN0YWNr\n> LMOD_FAMILY_MPI_VERSION=5.0\n> OMPI_VERSION=5.0\n> CUDA_DOC_DIR=/software/all/devel/cuda/12.4/doc\n> SSH_CLIENT=2001:4ca0:2fff:2:5:2:0:4c 39812 22\n> __MODULES_LMINIT=module use --append /usr/share/Modules/modulefiles:module use --append /etc/modulefiles:module use --append /usr/share/modulefiles\n> MKLROOT=/software/all/toolkit/Intel_OneAPI/mkl/2025.1\n> INTEL_INC_DIR=/software/all/toolkit/Intel_OneAPI/compiler/2025.1/include\n> OMP_NUM_THREADS=1\n> LMOD_FAMILY_MPI=mpi/openmpi\n> FC=/software/all/toolkit/Intel_OneAPI/compiler/2025.1/bin/ifx\n> which_declare=declare -f\n> CUDA_BIN_DIR=/software/all/devel/cuda/12.4/bin\n> LMOD_FAMILY_COMPILER_VERSION=2025.1_llvm\n> CUDA_HOME=/software/all/devel/cuda/12.4\n> INTEL_INCLUDE_DIR=/software/all/toolkit/Intel_OneAPI/compiler/2025.1/include\n> XDG_DATA_DIRS=/home/hk-project-p0023960/tum_cte0515/.local/share/flatpak/exports/share:/var/lib/flatpak/exports/share:/usr/local/share:/usr/share\n> TMP=/scratch\n> PATH=/home/hk-project-p0023960/tum_cte0515/.local/bin:/home/hk-project-p0023960/tum_cte0515/bin:/software/all/bin:/software/all/devel/cuda/12.4/bin:/software/all/mpi/openmpi/5.0-intel-2025.1_llvm/bin:/software/all/toolkit/Intel_OneAPI/compiler/2025.1/bin:/usr/share/Modules/bin:/usr/local/bin:/usr/bin:/usr/local/sbin:/usr/sbin:.\n> HKPROJECT=/hkproject/hk-project-p0023960/tum_cte0515\n> MODULEPATH=/software/all/lmod/modulefiles/MPI/intel/2025.1_llvm/openmpi/5.0:/software/all/lmod/modulefiles/Compiler/intel/2025.1_llvm:/software/all/lmod/modulefiles/Core:/software/community/ICON/modulefiles\n> CC=/software/all/toolkit/Intel_OneAPI/compiler/2025.1/bin/icx\n> CFLAGS=-O2 -xCORE-AVX2\n> _LMFILES_=/software/all/lmod/modulefiles/Core/dot.lua:/software/all/lmod/modulefiles/Core/compiler/intel/2025.1_llvm.lua:/software/all/lmod/modulefiles/Core/numlib/mkl/2025.1.lua:/software/all/lmod/modulefiles/Core/devel/cuda/12.4.lua:/software/all/lmod/modulefiles/Compiler/intel/2025.1_llvm/mpi/openmpi/5.0.lua\n> CUDA_LIB_DIR=/software/all/devel/cuda/12.4/lib64\n> DBUS_SESSION_BUS_ADDRESS=unix:path=/run/user/999226/bus\n> LMOD_CMD=/usr/share/lmod/lmod/libexec/lmod\n> LMOD_SYSTEM_NAME=hk\n> MKL_NUM_THREADS=1\n> MAIL=/var/spool/mail/tum_cte0515\n> __LMOD_REF_COUNT_LD_LIBRARY_PATH=/software/all/devel/cuda/12.4/lib64:1;/software/all/mpi/openmpi/5.0-intel-2025.1_llvm/lib:1;/software/all/toolkit/Intel_OneAPI/mkl/2025.1/lib/intel64:1;/software/all/toolkit/Intel_OneAPI/compiler/2025.1/lib:1\n> INTEL_HOME=/software/all/toolkit/Intel_OneAPI/compiler/2025.1\n> INTEL_LICENSE_FILE=28518@scclic1.scc.kit.edu\n> LMOD_SYSTEM_DEFAULT_MODULES=dot:compiler/intel:numlib/mkl:mpi/openmpi\n> INTEL_LIB_DIR=/software/all/toolkit/Intel_OneAPI/compiler/2025.1/lib\n> ws_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data\n> MKLPATH=/software/all/toolkit/Intel_OneAPI/mkl/2025.1/lib/intel64\n> CPATH=/software/all/devel/cuda/12.4/include:/software/all/toolkit/Intel_OneAPI/mkl/2025.1/include:/software/all/toolkit/Intel_OneAPI/compiler/2025.1/include\n> PROJECT=/home/hk-project-p0023960/tum_cte0515\n> INTEL_VERSION=2025.1\n> GDB_MAN_DIR=/software/all/toolkit/Intel_OneAPI/debugger/2025.0/share/man\n> CLUSTER=hk\n> GDB_INF_DIR=/software/all/toolkit/Intel_OneAPI/debugger/2025.0/share/info\n> MODULES_CMD=/usr/share/Modules/libexec/modulecmd.tcl\n> BASH_FUNC_ml%%=() { eval ""$($LMOD_DIR/ml_cmd ""$@"")""\n> }\n> BASH_FUNC_which%%=() { ( alias;\n> eval ${which_declare} ) | /usr/bin/which --tty-only --read-alias --read-functions --show-tilde --show-dot $@\n> }\n> BASH_FUNC_module%%=() { eval $(/software/all/admin/modules/module-wrapper/modulecmd bash $*)\n> }\n> BASH_FUNC_scl%%=() { if [ ""$1"" = ""load"" -o ""$1"" = ""unload"" ]; then\n> eval ""module $@"";\n> else\n> /usr/bin/scl ""$@"";\n> fi\n> }\n> BASH_FUNC__module_raw%%=() { eval ""$(/usr/bin/tclsh '/usr/share/Modules/libexec/modulecmd.tcl' bash ""$@"")"";\n> _mlstatus=$?;\n> return $_mlstatus\n> }\n> _=/usr/bin/printenv\n[19:26:24.941] > Removing old logfile at /home/hk-project-p0023960/tum_cte0515/.cursor-server/.cli.61e99179e4080fecf9d8b92c6e2e3e00fbfb53f0.log\n[19:26:24.957] > Spawned remote CLI: 3929153\n[19:26:25.107] > Waiting for server log...\n[19:26:25.174] > 9956fb6bda10: start\n> SSH_AUTH_SOCK====\n> DISPLAY====\n> listeningOn==127.0.0.1:44211==\n> osReleaseId==rhel==\n> arch==x86_64==\n> vscodeArch==x64==\n> bitness==64==\n> tmpDir==/run/user/999226==\n> platform==linux==\n> unpackResult====\n> didLocalDownload==0==\n> downloadTime====\n> installTime====\n> serverStartTime==327==\n> execServerToken==a11a1111-11a1-11a1-a111-11aaa11a1a11==\n> 9956fb6bda10: end\n[19:26:25.174] Received install output: \nSSH_AUTH_SOCK====\nDISPLAY====\nlisteningOn==127.0.0.1:44211==\nosReleaseId==rhel==\narch==x86_64==\nvscodeArch==x64==\nbitness==64==\ntmpDir==/run/user/999226==\nplatform==linux==\nunpackResult====\ndidLocalDownload==0==\ndownloadTime====\ninstallTime====\nserverStartTime==327==\nexecServerToken==a11a1111-11a1-11a1-a111-11aaa11a1a11==\n\n[19:26:25.176] Remote server is listening on port 44211\n[19:26:25.176] Parsed server configuration: {""serverConfiguration"":{""remoteListeningOn"":{""port"":44211},""osReleaseId"":""rhel"",""arch"":""x86_64"",""sshAuthSock"":"""",""display"":"""",""tmpDir"":""/run/user/999226"",""platform"":""linux"",""execServerToken"":""a11a1111-11a1-11a1-a111-11aaa11a1a11""},""serverStartTime"":327,""installUnpackCode"":""""}\n[19:26:25.178] Persisting server connection details to /home/maharajamihir/.config/Cursor/User/globalStorage/ms-vscode-remote.remote-ssh/vscode-ssh-host-09f2063d-61e99179e4080fecf9d8b92c6e2e3e00fbfb53f0-0.113.1-es/data.json\n[19:26:25.186] Starting forwarding server. local port 41951 -> socksPort 46417 -> remotePort 44211\n[19:26:25.187] Forwarding server listening on port 41951\n[19:26:25.188] Waiting for ssh tunnel to be ready\n[19:26:25.191] [Forwarding server port 41951] Got connection 0\n[19:26:25.193] Tunneled port 44211 to local port 41951\n[19:26:25.194] Resolved ""ssh-remote+horeka"" to ""port 41951""\n[19:26:25.218] Initizing new exec server for ssh-remote+horeka\n[19:26:25.218] Resolving exec server at port 41951\n[19:26:25.263] [Forwarding server port 41951] Got connection 1\n[19:26:25.498] Exec server for ssh-remote+horeka created and cached\n[19:26:25.523] ------\n\n\n\n\n[19:26:25.872] [server] Checking /home/hk-project-p0023960/tum_cte0515/.cursor-server/cli/servers/Stable-61e99179e4080fecf9d8b92c6e2e3e00fbfb53f0/log.txt and /home/hk-project-p0023960/tum_cte0515/.cursor-server/cli/servers/Stable-61e99179e4080fecf9d8b92c6e2e3e00fbfb53f0/pid.txt for a running server...\n[19:26:25.901] [server] Installing and setting up Cursor Server...\n[19:26:25.940] [server] Server setup complete\n[19:26:25.941] [server] Starting server...\n[19:26:25.941] [server] Starting server with command... Command { std: ""/home/hk-project-p0023960/tum_cte0515/.cursor-server/cli/servers/Stable-61e99179e4080fecf9d8b92c6e2e3e00fbfb53f0/server/bin/cursor-server"" ""--connection-token=remotessh"" ""--accept-server-license-terms"" ""--start-server"" ""--enable-remote-auto-shutdown"" ""--socket-path=/scratch/cursor-8a3635c8-7398-477f-8d98-3fae27dee760"", kill_on_drop: false }\n[19:26:26.631] [server] Server bound to /scratch/cursor-8a3635c8-7398-477f-8d98-3fae27dee760\n[19:26:26.632] [server] Extension host agent listening on /scratch/cursor-8a3635c8-7398-477f-8d98-3fae27dee760\n[19:26:26.634] [server] parsed location: ""/scratch/cursor-8a3635c8-7398-477f-8d98-3fae27dee760""\n[19:26:26.634] [server] \n[19:26:26.635] [server] Server started\n[19:26:26.792] [server] [19:26:26] \n[19:26:26.792] [server] \n[19:26:26.792] [server] \n[19:26:26.793] [server] \n[19:26:27.371] [server] [19:26:27] Extension host agent started.\n[19:26:27.633] [server] [19:26:27] [][b344b228][ManagementConnection] New connection established.\n[19:26:27.635] [server] [19:26:27] [][491e413e][ExtensionHostConnection] New connection established.\n[19:26:29.503] [server] [19:26:29] [][491e413e][ExtensionHostConnection] <3929556> Launched Extension Host Process.\n[19:26:32.398] [server] rejected promise not handled within 1 second: CodeExpectedError: Could not find pty 4 on pty host\n[19:26:32.399] [server] stack trace: CodeExpectedError: Could not find pty 4 on pty host\n[19:26:32.400] [server] at A.W (file:///hkfs/home/project/hk-project-p0023960/tum_cte0515/.cursor-server/cli/servers/Stable-61e99179e4080fecf9d8b92c6e2e3e00fbfb53f0/server/out/vs/platform/terminal/node/ptyHostMain.js:48:12235)\n[19:26:32.402] [server] at A.resize (file:///hkfs/home/project/hk-project-p0023960/tum_cte0515/.cursor-server/cli/servers/Stable-61e99179e4080fecf9d8b92c6e2e3e00fbfb53f0/server/out/vs/platform/terminal/node/ptyHostMain.js:48:8775)\n[19:26:32.402] [server] at N.s. (file:///hkfs/home/project/hk-project-p0023960/tum_cte0515/.cursor-server/cli/servers/Stable-61e99179e4080fecf9d8b92c6e2e3e00fbfb53f0/server/out/vs/platform/terminal/node/ptyHostMain.js:48:2962)\n[19:26:32.403] [server] at el.q (file:///hkfs/home/project/hk-project-p0023960/tum_cte0515/.cursor-server/cli/servers/Stable-61e99179e4080fecf9d8b92c6e2e3e00fbfb53f0/server/out/vs/platform/terminal/node/ptyHostMain.js:29:78824)\n[19:26:32.403] [server] at ds.value (file:///hkfs/home/project/hk-project-p0023960/tum_cte0515/.cursor-server/cli/servers/Stable-61e99179e4080fecf9d8b92c6e2e3e00fbfb53f0/server/out/vs/platform/terminal/node/ptyHostMain.js:29:78226)\n[19:26:32.403] [server] at _.B (file:///hkfs/home/project/hk-project-p0023960/tum_cte0515/.cursor-server/cli/servers/Stable-61e99179e4080fecf9d8b92c6e2e3e00fbfb53f0/server/out/vs/platform/terminal/node/ptyHostMain.js:29:746)\n[19:26:32.404] [server] at _.fire (file:///hkfs/home/project/hk-project-p0023960/tum_cte0515/.cursor-server/cli/servers/Stable-61e99179e4080fecf9d8b92c6e2e3e00fbfb53f0/server/out/vs/platform/terminal/node/ptyHostMain.js:29:964)\n[19:26:32.404] [server] at process.w (file:///hkfs/home/project/hk-project-p0023960/tum_cte0515/.cursor-server/cli/servers/Stable-61e99179e4080fecf9d8b92c6e2e3e00fbfb53f0/server/out/vs/platform/terminal/node/ptyHostMain.js:24:29139)\n[19:26:32.405] [server] at process.emit (node:events:518:28)\n[19:26:32.405] [server] at emit (node:internal/child_process:950:14)\n[19:26:32.405] [server] at process.processTicksAndRejections (node:internal/process/task_queues:83:21)\n[19:26:32.406] [server] [19:26:32] Error [CodeExpectedError]: Could not find pty 4 on pty host\n[19:26:32.406] [server] at A.W (file:///hkfs/home/project/hk-project-p0023960/tum_cte0515/.cursor-server/cli/servers/Stable-61e99179e4080fecf9d8b92c6e2e3e00fbfb53f0/server/out/vs/platform/terminal/node/ptyHostMain.js:48:12235)\n[19:26:32.407] [server] at A.resize (file:///hkfs/home/project/hk-project-p0023960/tum_cte0515/.cursor-server/cli/servers/Stable-61e99179e4080fecf9d8b92c6e2e3e00fbfb53f0/server/out/vs/platform/terminal/node/ptyHostMain.js:48:8775)\n[19:26:32.407] [server] at N.s. (file:///hkfs/home/project/hk-project-p0023960/tum_cte0515/.cursor-server/cli/servers/Stable-61e99179e4080fecf9d8b92c6e2e3e00fbfb53f0/server/out/vs/platform/terminal/node/ptyHostMain.js:48:2962)\n[19:26:32.407] [server] at Object.call (file:///hkfs/home/project/hk-project-p0023960/tum_cte0515/.cursor-server/cli/servers/Stable-61e99179e4080fecf9d8b92c6e2e3e00fbfb53f0/server/out/vs/platform/terminal/node/ptyHostMain.js:31:4203)\n[19:26:32.408] [server] at el.s (file:///hkfs/home/project/hk-project-p0023960/tum_cte0515/.cursor-server/cli/servers/Stable-61e99179e4080fecf9d8b92c6e2e3e00fbfb53f0/server/out/vs/platform/terminal/node/ptyHostMain.js:29:79301)\n[19:26:32.408] [server] at el.q (file:///hkfs/home/project/hk-project-p0023960/tum_cte0515/.cursor-server/cli/servers/Stable-61e99179e4080fecf9d8b92c6e2e3e00fbfb53f0/server/out/vs/platform/terminal/node/ptyHostMain.js:29:78824)\n[19:26:32.408] [server] at ds.value (file:///hkfs/home/project/hk-project-p0023960/tum_cte0515/.cursor-server/cli/servers/Stable-61e99179e4080fecf9d8b92c6e2e3e00fbfb53f0/server/out/vs/platform/terminal/node/ptyHostMain.js:29:78226)\n[19:26:32.409] [server] at _.B (file:///hkfs/home/project/hk-project-p0023960/tum_cte0515/.cursor-server/cli/servers/Stable-61e99179e4080fecf9d8b92c6e2e3e00fbfb53f0/server/out/vs/platform/terminal/node/ptyHostMain.js:29:746)\n[19:26:36.948] Opening exec server for ssh-remote+horeka\n[19:26:37.647] Verified and reusing cached exec server for ssh-remote+horeka\n[19:26:37.886] Opening exec server for ssh-remote+horeka\n[19:26:38.070] Verified and reusing cached exec server for ssh-remote+horeka\n",Log,tab +3,2124,"extension-output-pdoom-org.crowd-code-#1-crowd-code",0,0,"7:26:40 PM [info] Activating crowd-code\n7:26:40 PM [info] Recording started\n7:26:40 PM [info] Initializing git provider using file system watchers...\n7:26:40 PM [info] Git repository found\n7:26:40 PM [info] Git provider initialized successfully\n7:26:40 PM [info] Initial git state: [object Object]\n",Log,tab +4,4583,"TERMINAL",0,0,"/bin/python3 /hkfs/home/project/hk-project-p0023960/tum_cte0515/.cursor-server/extensions/ms-python.python-2024.12.3-linux-x64/python_files/printEnvVariablesToFile.py /hkfs/home/project/hk-project-p0023960/tum_cte0515/.cursor-server/extensions/ms-python.python-2024.12.3-linux-x64/python_files/deactivate/bash/envVars.txt",,terminal_command +5,4592,"TERMINAL",0,0,"]633;E;2025-07-07 19:26:45 /bin/python3 /hkfs/home/project/hk-project-p0023960/tum_cte0515/.cursor-server/extensions/ms-python.python-2024.12.3-linux-x64/python_files/printEnvVariablesToFile.py /hkfs/home/project/hk-project-p0023960/tum_cte0515/.cursor-server/extensions/ms-python.python-2024.12.3-linux-x64/python_files/deactivate/bash/envVars.txt;6b90a4fa-6ae4-4de2-a698-148e6f7d33e3]633;C",,terminal_output +6,4630,"TERMINAL",0,0,"]0;tum_cte0515@hkn1991:/hkfs/home/project/hk-project-p0023960/tum_cte0515/.cursor-server/extensions/ms-python.python-2024.12.3-linux-x64/python_files/deactivate/bash]633;D;0",,terminal_output +7,18458,"train_tokenizer.py",0,0,"from dataclasses import dataclass, field\nimport os\n\nimport einops\nfrom flax.training import orbax_utils\nfrom flax.training.train_state import TrainState\nfrom jax.sharding import Mesh, PartitionSpec, NamedSharding\nfrom jax.experimental.mesh_utils import create_device_mesh\nimport optax\nimport orbax\nfrom orbax.checkpoint import PyTreeCheckpointer\nimport numpy as np\nimport dm_pix as pix\nimport jax\nimport jax.numpy as jnp\nimport tyro\nimport wandb\n\nfrom models.tokenizer import TokenizerVQVAE\nfrom utils.dataloader import get_dataloader\nfrom utils.parameter_utils import count_parameters_by_component\nfrom utils.logger import CompositeLogger\n\n\n@dataclass\nclass Args:\n # Experiment\n num_steps: int = 300_000\n seed: int = 0\n seq_len: int = 16\n image_channels: int = 3\n image_height: int = 90\n image_width: int = 160\n data_dir: str = ""data_tfrecords/coinrun""\n checkpoint: str = """"\n # Optimization\n vq_beta: float = 0.25\n batch_size: int = 48\n min_lr: float = 0.0\n max_lr: float = 3e-4\n warmup_steps: int = 10000\n # Tokenizer\n model_dim: int = 512\n latent_dim: int = 32\n num_latents: int = 1024\n patch_size: int = 4\n num_blocks: int = 8\n num_heads: int = 8\n dropout: float = 0.0\n codebook_dropout: float = 0.01\n # Logging\n log_dir: str = ""logs/"" \n loggers: list[str] = field(default_factory=lambda: [""console""])\n entity: str = """"\n project: str = """"\n name: str = ""train_tokenizer""\n tags: list[str] = field(default_factory=lambda: [""tokenizer""])\n log_interval: int = 5\n log_image_interval: int = 250\n ckpt_dir: str = """"\n log_checkpoint_interval: int = 10000\n log_gradients: bool = False\n\n\nargs = tyro.cli(Args)\n\n\ndef tokenizer_loss_fn(params, state, inputs):\n # --- Compute loss ---\n outputs = state.apply_fn(\n params,\n inputs,\n training=True,\n rngs={""params"": inputs[""rng""], ""dropout"": inputs[""dropout_rng""]},\n )\n mse = jnp.square(inputs[""videos""] - outputs[""recon""]).mean()\n q_loss = jnp.square(jax.lax.stop_gradient(outputs[""emb""]) - outputs[""z""]).mean()\n commitment_loss = jnp.square(\n outputs[""emb""] - jax.lax.stop_gradient(outputs[""z""])\n ).mean()\n loss = mse + q_loss + args.vq_beta * commitment_loss\n\n # --- Compute validation metrics ---\n gt = inputs[""videos""].clip(0, 1).reshape(-1, *inputs[""videos""].shape[2:])\n recon = outputs[""recon""].clip(0, 1).reshape(-1, *outputs[""recon""].shape[2:])\n psnr = pix.psnr(gt, recon).mean()\n ssim = pix.ssim(gt, recon).mean()\n _, index_counts = jnp.unique_counts(\n jnp.ravel(outputs[""indices""]), size=args.num_latents, fill_value=0\n )\n codebook_usage = (index_counts != 0).mean()\n metrics = dict(\n loss=loss,\n mse=mse,\n q_loss=q_loss,\n commitment_loss=commitment_loss,\n psnr=psnr,\n ssim=ssim,\n codebook_usage=codebook_usage,\n )\n return loss, (outputs[""recon""], metrics)\n\n\n@jax.jit\ndef train_step(state, inputs):\n grad_fn = jax.value_and_grad(tokenizer_loss_fn, has_aux=True, allow_int=True)\n (loss, (recon, metrics)), grads = grad_fn(state.params, state, inputs)\n state = state.apply_gradients(grads=grads)\n if args.log_gradients:\n metrics[""encoder_gradients_std/""] = jax.tree.map(\n lambda x: x.std(), grads[""params""][""encoder""]\n )\n metrics[""vq_gradients_std/""] = jax.tree.map(\n lambda x: x.std(), grads[""params""][""vq""]\n )\n metrics[""decoder_gradients_std/""] = jax.tree.map(\n lambda x: x.std(), grads[""params""][""decoder""]\n )\n return state, loss, recon, metrics\n\n\nif __name__ == ""__main__"":\n jax.distributed.initialize()\n num_devices = jax.device_count()\n if num_devices == 0:\n raise ValueError(""No JAX devices found."")\n print(f""Running on {num_devices} devices."")\n\n if args.batch_size % num_devices != 0:\n raise ValueError(\n f""Global batch size {args.batch_size} must be divisible by ""\n f""number of devices {num_devices}.""\n )\n\n per_device_batch_size_for_init = args.batch_size // num_devices\n\n rng = jax.random.PRNGKey(args.seed)\n\n # --- Initialize model ---\n tokenizer = TokenizerVQVAE(\n in_dim=args.image_channels,\n model_dim=args.model_dim,\n latent_dim=args.latent_dim,\n num_latents=args.num_latents,\n patch_size=args.patch_size,\n num_blocks=args.num_blocks,\n num_heads=args.num_heads,\n dropout=args.dropout,\n codebook_dropout=args.codebook_dropout,\n )\n rng, _rng = jax.random.split(rng)\n image_shape = (args.image_height, args.image_width, args.image_channels)\n inputs = dict(\n videos=jnp.zeros(\n (per_device_batch_size_for_init, args.seq_len, *image_shape),\n dtype=jnp.float32,\n ),\n )\n init_params = tokenizer.init(_rng, inputs)\n\n param_counts = count_parameters_by_component(init_params)\n\n if jax.process_index() == 0:\n cfg = vars(args).copy()\n cfg[""model_param_count""] = param_counts\n logger = CompositeLogger(args.loggers, cfg)\n\n print(""Parameter counts:"")\n print(param_counts)\n\n # --- Initialize optimizer ---\n lr_schedule = optax.warmup_cosine_decay_schedule(\n args.min_lr, args.max_lr, args.warmup_steps, args.num_steps\n )\n tx = optax.adamw(learning_rate=lr_schedule, b1=0.9, b2=0.9, weight_decay=1e-4)\n train_state = TrainState.create(apply_fn=tokenizer.apply, params=init_params, tx=tx)\n\n # FIXME: switch to create_hybrid_device_mesh for runs spanning multiple nodes\n device_mesh_arr = create_device_mesh((num_devices,))\n mesh = Mesh(devices=device_mesh_arr, axis_names=(""data"",))\n\n replicated_sharding = NamedSharding(mesh, PartitionSpec())\n videos_sharding = NamedSharding(\n mesh, PartitionSpec(""data"", None, None, None, None)\n )\n train_state = jax.device_put(train_state, replicated_sharding)\n\n # --- Load checkpoint ---\n step = 0\n if args.checkpoint:\n restore_target = {""model"": train_state}\n restore_args = orbax_utils.restore_args_from_target(restore_target)\n train_state.params[""params""].update(\n PyTreeCheckpointer()\n .restore(args.checkpoint, item=restore_target, restore_args=restore_args)[\n ""model""\n ]\n .params[""params""]\n )\n # Assume checkpoint is of the form tokenizer__\n step += int(args.checkpoint.split(""_"")[-1])\n\n # --- TRAIN LOOP ---\n # tfrecord_files = [\n # os.path.join(args.data_dir, x)\n # for x in os.listdir(args.data_dir)\n # if x.endswith("".tfrecord"")\n # ]\n # dataloader = get_dataloader(\n # # NOTE: We deliberately pass the global batch size\n # # The dataloader shards the dataset across all processes\n # tfrecord_files,\n # args.seq_len,\n # args.batch_size,\n # *image_shape,\n # seed=args.seed,\n # )\n # dataloader = (jax.make_array_from_process_local_data(videos_sharding, elem) for elem in dataloader) # type: ignore\n print(f""Starting training from step {step}..."")\n while step < args.num_steps:\n # for videos in dataloader:\n videos = np.load(""overfit_dir/single_sample_corner.npy"")\n videos = jax.make_array_from_process_local_data(videos_sharding, videos)\n while True:\n # --- Train step ---\n rng, _rng, _rng_dropout = jax.random.split(rng, 3)\n\n inputs = dict(videos=videos, rng=_rng, dropout_rng=_rng_dropout)\n train_state, loss, recon, metrics = train_step(train_state, inputs)\n step += 1\n\n # --- Logging ---\n if step % args.log_interval == 0 and jax.process_index() == 0:\n logger.log_metrics(\n {\n ""loss"": loss,\n **metrics,\n },\n step\n )\n if step % args.log_image_interval == 0:\n gt_seq = inputs[""videos""][0]\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=np.asarray(gt_seq[0]).astype(np.uint8),\n recon=np.asarray(recon_seq[0]).astype(np.uint8),\n true_vs_recon=np.asarray(comparison_seq.astype(np.uint8)\n ),\n )\n logger.log_images(log_images, step)\n if step % args.log_checkpoint_interval == 0:\n ckpt = {""model"": train_state}\n orbax_checkpointer = orbax.checkpoint.PyTreeCheckpointer()\n save_args = orbax_utils.save_args_from_target(ckpt)\n orbax_checkpointer.save(\n os.path.join(os.getcwd(), args.ckpt_dir, f""tokenizer_{step}""),\n ckpt,\n save_args=save_args,\n )\n if step >= args.num_steps:\n break\n",python,tab +8,18462,"train_tokenizer.py",4901,0,"",python,selection_mouse +9,35690,"TERMINAL",0,0,"",,terminal_focus +10,64526,"TERMINAL",0,0,"cd $ws_dir",,terminal_command +11,64568,"TERMINAL",0,0,"]633;E;2025-07-07 19:27:45 cd $ws_dir;24571f6f-7d62-4371-8977-e42b5a81d394]633;C]0;tum_cte0515@hkn1991:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data]633;D;0",,terminal_output +12,66510,"TERMINAL",0,0,"cd ..",,terminal_command +13,66576,"TERMINAL",0,0,"]633;E;2025-07-07 19:27:47 cd ..;24571f6f-7d62-4371-8977-e42b5a81d394]633;C]0;tum_cte0515@hkn1991:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared]633;D;0",,terminal_output +14,67868,"TERMINAL",0,0,"cd checkpoints/",,terminal_command +15,68124,"TERMINAL",0,0,"ls",,terminal_command +16,68168,"TERMINAL",0,0,"]633;E;2025-07-07 19:27:48 ls;24571f6f-7d62-4371-8977-e42b5a81d394]633;C",,terminal_output +17,68352,"TERMINAL",0,0,"0000 3290391 3292258 3292334 3294601 3296574 3297582 3299016 3299258 3300663 3301031 3310436 3313565 dynamics_ckpt_dir train_lam_minecraft_overfit_sample\r\n3290283 3290392 3292328 3292335 3294602 3296575 3297586 3299062 3299259 3300672 3306801 3310437 3313570 lam train_tokenizer_batch_size_scaling_16_node\r\n3290284 3290439 3292329 3292336 3294603 3297569 3297606 3299063 3299272 3301025 3307618 3311671 3313571 lam-1-action train_tokenizer_minecraft_overfit_sample\r\n3290295 3290440 3292330 3292337 3296502 3297575 3297671 3299065 3299579 3301026 3307619 3311672 3313572 lam_ckpt_dir\r\n3290296 3291405 3292331 3292338 3296540 3297576 3297693 3299066 3300233 3301027 3309662 3313562 3316022 lam_main_test\r\n3290366 3292213 3292332 3292339 3296571 3297577 3297706 3299068 3300290 3301029 3309663 3313563 debug tokenizer\r\n3290367 3292221 3292333 3294600 3296573 3297578 3297727 3299069 3300658 3301030 3309699 3313564 dyn tokenizer_ckpt_dir\r\n]0;tum_cte0515@hkn1991:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints]633;D;0",,terminal_output +18,83021,"TERMINAL",0,0,"cd 3318550",,terminal_command +19,85061,"TERMINAL",0,0,"cd ..",,terminal_command +20,86396,"TERMINAL",0,0,"cd data/",,terminal_command +21,88327,"TERMINAL",0,0,"cd 3318550",,terminal_command +22,89595,"TERMINAL",0,0,"ls",,terminal_command +23,89647,"TERMINAL",0,0,"]633;E;2025-07-07 19:28:10 ls;24571f6f-7d62-4371-8977-e42b5a81d394]633;Ccheckpoints knoms_arrayrecords_500_shards knoms_tfrecords_200_shards open_ai_minecraft_first_try overfit_dir_openai_npy\r\ncoinrun knoms_arrayrecords_500_shards_optimized_layout knoms_tfrecords_2_shards_overfit open_ai_minecraft_first_try_npy overfit_dir_openai_tfrecord\r\ndata_knoms knoms_mp4 knoms_tfrecords_500_shards open_ai_minecraft_first_try_tfrecord procgen_env_16_episodes_20000\r\ndummy knoms_mp4_clips knoms_tfrecords_500_shards_overfit_1 open_ai_minecraft_npy\r\ndummy_arrayrecords knoms_npy knoms_tfrecords_500_shards_overfit_10 open_ai_minecraft_tfrecord\r\ndummy_arrayrecords_500_shards knoms_tfrecords open_ai_minecraft overfit_dir\r\n]0;tum_cte0515@hkn1991:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data]633;D;0",,terminal_output +24,99744,"TERMINAL",0,0,"cd /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/checkpoints/train_lam_action_space_scaling_8/3318550",,terminal_command +25,99813,"TERMINAL",0,0,"]633;E;2025-07-07 19:28:20 cd /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/checkpoints/train_lam_action_space_scaling_8/3318550;24571f6f-7d62-4371-8977-e42b5a81d394]633;C]0;tum_cte0515@hkn1991:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/checkpoints/train_lam_action_space_scaling_8/3318550]633;D;0",,terminal_output +26,101937,"TERMINAL",0,0,"ls",,terminal_command +27,101979,"TERMINAL",0,0,"]633;E;2025-07-07 19:28:22 ls;24571f6f-7d62-4371-8977-e42b5a81d394]633;C",,terminal_output +28,102306,"TERMINAL",0,0,"lam_1751657975_1000 lam_1751657975_121000 lam_1751657975_144000 lam_1751657975_167000 lam_1751657975_19000 lam_1751657975_31000 lam_1751657975_54000 lam_1751657975_77000\r\nlam_1751657975_10000 lam_1751657975_122000 lam_1751657975_145000 lam_1751657975_168000 lam_1751657975_190000 lam_1751657975_32000 lam_1751657975_55000 lam_1751657975_78000\r\nlam_1751657975_100000 lam_1751657975_123000 lam_1751657975_146000 lam_1751657975_169000 lam_1751657975_191000 lam_1751657975_33000 lam_1751657975_56000 lam_1751657975_79000\r\nlam_1751657975_101000 lam_1751657975_124000 lam_1751657975_147000 lam_1751657975_17000 lam_1751657975_192000 lam_1751657975_34000 lam_1751657975_57000 lam_1751657975_8000\r\nlam_1751657975_102000 lam_1751657975_125000 lam_1751657975_148000 lam_1751657975_170000 lam_1751657975_193000 lam_1751657975_35000 lam_1751657975_58000 lam_1751657975_80000\r\nlam_1751657975_103000 lam_1751657975_126000 lam_1751657975_149000 lam_1751657975_171000 lam_1751657975_194000 lam_1751657975_36000 lam_1751657975_59000 lam_1751657975_81000\r\nlam_1751657975_104000 lam_1751657975_127000 lam_1751657975_15000 lam_1751657975_172000 lam_1751657975_195000 lam_1751657975_37000 lam_1751657975_6000 lam_1751657975_82000\r\nlam_1751657975_105000 lam_1751657975_128000 lam_1751657975_150000 lam_1751657975_173000 lam_1751657975_196000 lam_1751657975_38000 lam_1751657975_60000 lam_1751657975_83000\r\nlam_1751657975_106000 lam_1751657975_129000 lam_1751657975_151000 lam_1751657975_174000 lam_1751657975_197000 lam_1751657975_39000 lam_1751657975_61000 lam_1751657975_84000\r\nlam_1751657975_107000 lam_1751657975_13000 lam_1751657975_152000 lam_1751657975_175000 lam_1751657975_198000 lam_1751657975_4000 lam_1751657975_62000 lam_1751657975_85000\r\nlam_1751657975_108000 lam_1751657975_130000 lam_1751657975_153000 lam_1751657975_176000 lam_1751657975_199000 lam_1751657975_40000 lam_1751657975_63000 lam_1751657975_86000\r\nlam_1751657975_109000 lam_1751657975_131000 lam_1751657975_154000 lam_1751657975_177000 lam_1751657975_2000 lam_1751657975_41000 lam_1751657975_64000 lam_1751657975_87000\r\nlam_1751657975_11000 lam_1751657975_132000 lam_1751657975_155000 lam_1751657975_178000 lam_1751657975_20000 lam_1751657975_42000 lam_1751657975_65000 lam_1751657975_88000\r\nlam_1751657975_110000 lam_1751657975_133000 lam_1751657975_156000 lam_1751657975_179000 lam_1751657975_200000 lam_1751657975_43000 lam_1751657975_66000 lam_1751657975_89000\r\nlam_1751657975_111000 lam_1751657975_134000 lam_1751657975_157000 lam_1751657975_18000 lam_1751657975_21000 lam_1751657975_44000 lam_1751657975_67000 lam_1751657975_9000\r\nlam_1751657975_112000 lam_1751657975_135000 lam_1751657975_158000 lam_1751657975_180000 lam_1751657975_22000 lam_1751657975_45000 lam_1751657975_68000 lam_1751657975_90000\r\nlam_1751657975_113000 lam_1751657975_136000 lam_1751657975_159000 lam_1751657975_181000 lam_1751657975_23000 lam_1751657975_46000 lam_1751657975_69000 lam_1751657975_91000\r\nlam_1751657975_114000 lam_1751657975_137000 lam_1751657975_16000 lam_1751657975_182000 lam_1751657975_24000 lam_1751657975_47000 lam_1751657975_7000 lam_1751657975_92000\r\nlam_1751657975_115000 lam_1751657975_138000 lam_1751657975_160000 lam_1751657975_183000 lam_1751657975_25000 lam_1751657975_48000 lam_1751657975_70000 lam_1751657975_93000\r\nlam_1751657975_116000 lam_1751657975_139000 lam_1751657975_161000 lam_1751657975_184000 lam_1751657975_26000 lam_1751657975_49000 lam_1751657975_71000 lam_1751657975_94000\r\nlam_1751657975_117000 lam_1751657975_14000 lam_1751657975_162000 lam_1751657975_185000 lam_1751657975_27000 lam_1751657975_5000 lam_1751657975_72000 lam_1751657975_95000\r\nlam_1751657975_118000 lam_1751657975_140000 lam_1751657975_163000 lam_1751657975_186000 lam_1751657975_28000 lam_1751657975_50000 lam_1751657975_73000 lam_1751657975_96000\r\nlam_1751657975_119000 lam_1751657975_141000 lam_1751657975_164000 lam_1751657975_187000 lam_1751657975_29000 lam_1751657975_51000 lam_1751657975_74000 lam_1751657975_97000\r\nlam_1751657975_12000 lam_1751657975_142000 lam_1751657975_165000 lam_1751657975_188000 lam_1751657975_3000 lam_1751657975_52000 lam_1751657975_75000 lam_1751657975_98000\r\nlam_1751657975_120000 lam_1751657975_143000 lam_1751657975_166000 lam_1751657975_189000 lam_1751657975_30000 lam_1751657975_53000 lam_1751657975_76000 lam_1751657975_99000\r\n]0;tum_cte0515@hkn1991:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/checkpoints/train_lam_action_space_scaling_8/3318550]633;D;0",,terminal_output +29,105255,"TERMINAL",0,0,"du -h .",,terminal_command +30,105305,"TERMINAL",0,0,"]633;E;2025-07-07 19:28:25 du -h .;24571f6f-7d62-4371-8977-e42b5a81d394]633;C52M\t./lam_1751657975_25000/ocdbt.process_4/d\r\n52M\t./lam_1751657975_25000/ocdbt.process_4\r\n",,terminal_output +31,105555,"TERMINAL",0,0,"1.8M\t./lam_1751657975_25000/array_metadatas\r\n52M\t./lam_1751657975_25000/ocdbt.process_3/d\r\n52M\t./lam_1751657975_25000/ocdbt.process_3\r\n1.5M\t./lam_1751657975_25000/d\r\n52M\t./lam_1751657975_25000/ocdbt.process_5/d\r\n52M\t./lam_1751657975_25000/ocdbt.process_5\r\n",,terminal_output +32,105656,"TERMINAL",0,0,"52M\t./lam_1751657975_25000/ocdbt.process_0/d\r\n52M\t./lam_1751657975_25000/ocdbt.process_0\r\n",,terminal_output +33,105780,"TERMINAL",0,0,"53M\t./lam_1751657975_25000/ocdbt.process_7/d\r\n53M\t./lam_1751657975_25000/ocdbt.process_7\r\n52M\t./lam_1751657975_25000/ocdbt.process_6/d\r\n52M\t./lam_1751657975_25000/ocdbt.process_6\r\n52M\t./lam_1751657975_25000/ocdbt.process_1/d\r\n52M\t./lam_1751657975_25000/ocdbt.process_1\r\n52M\t./lam_1751657975_25000/ocdbt.process_2/d\r\n52M\t./lam_1751657975_25000/ocdbt.process_2\r\n420M\t./lam_1751657975_25000\r\n52M\t./lam_1751657975_181000/ocdbt.process_4/d\r\n52M\t./lam_1751657975_181000/ocdbt.process_4\r\n",,terminal_output +34,105909,"TERMINAL",0,0,"1.8M\t./lam_1751657975_181000/array_metadatas\r\n52M\t./lam_1751657975_181000/ocdbt.process_3/d\r\n52M\t./lam_1751657975_181000/ocdbt.process_3\r\n1.5M\t./lam_1751657975_181000/d\r\n52M\t./lam_1751657975_181000/ocdbt.process_5/d\r\n52M\t./lam_1751657975_181000/ocdbt.process_5\r\n",,terminal_output +35,106195,"TERMINAL",0,0,"52M\t./lam_1751657975_181000/ocdbt.process_0/d\r\n52M\t./lam_1751657975_181000/ocdbt.process_0\r\n52M\t./lam_1751657975_181000/ocdbt.process_7/d\r\n52M\t./lam_1751657975_181000/ocdbt.process_7\r\n52M\t./lam_1751657975_181000/ocdbt.process_6/d\r\n52M\t./lam_1751657975_181000/ocdbt.process_6\r\n52M\t./lam_1751657975_181000/ocdbt.process_1/d\r\n52M\t./lam_1751657975_181000/ocdbt.process_1\r\n52M\t./lam_1751657975_181000/ocdbt.process_2/d\r\n52M\t./lam_1751657975_181000/ocdbt.process_2\r\n419M\t./lam_1751657975_181000\r\n52M\t./lam_1751657975_116000/ocdbt.process_4/d\r\n52M\t./lam_1751657975_116000/ocdbt.process_4\r\n",,terminal_output +36,106457,"TERMINAL",0,0,"1.8M\t./lam_1751657975_116000/array_metadatas\r\n52M\t./lam_1751657975_116000/ocdbt.process_3/d\r\n52M\t./lam_1751657975_116000/ocdbt.process_3\r\n1.5M\t./lam_1751657975_116000/d\r\n52M\t./lam_1751657975_116000/ocdbt.process_5/d\r\n52M\t./lam_1751657975_116000/ocdbt.process_5\r\n",,terminal_output +37,106561,"TERMINAL",0,0,"52M\t./lam_1751657975_116000/ocdbt.process_0/d\r\n52M\t./lam_1751657975_116000/ocdbt.process_0\r\n52M\t./lam_1751657975_116000/ocdbt.process_7/d\r\n52M\t./lam_1751657975_116000/ocdbt.process_7\r\n52M\t./lam_1751657975_116000/ocdbt.process_6/d\r\n52M\t./lam_1751657975_116000/ocdbt.process_6\r\n52M\t./lam_1751657975_116000/ocdbt.process_1/d\r\n52M\t./lam_1751657975_116000/ocdbt.process_1\r\n52M\t./lam_1751657975_116000/ocdbt.process_2/d\r\n52M\t./lam_1751657975_116000/ocdbt.process_2\r\n419M\t./lam_1751657975_116000\r\n52M\t./lam_1751657975_196000/ocdbt.process_4/d\r\n52M\t./lam_1751657975_196000/ocdbt.process_4\r\n",,terminal_output +38,106807,"TERMINAL",0,0,"1.8M\t./lam_1751657975_196000/array_metadatas\r\n52M\t./lam_1751657975_196000/ocdbt.process_3/d\r\n52M\t./lam_1751657975_196000/ocdbt.process_3\r\n1.5M\t./lam_1751657975_196000/d\r\n",,terminal_output +39,107382,"TERMINAL",0,0,"52M\t./lam_1751657975_196000/ocdbt.process_5/d\r\n52M\t./lam_1751657975_196000/ocdbt.process_5\r\n",,terminal_output +40,107466,"TERMINAL",0,0,"52M\t./lam_1751657975_196000/ocdbt.process_0/d\r\n52M\t./lam_1751657975_196000/ocdbt.process_0\r\n52M\t./lam_1751657975_196000/ocdbt.process_7/d\r\n52M\t./lam_1751657975_196000/ocdbt.process_7\r\n52M\t./lam_1751657975_196000/ocdbt.process_6/d\r\n52M\t./lam_1751657975_196000/ocdbt.process_6\r\n52M\t./lam_1751657975_196000/ocdbt.process_1/d\r\n52M\t./lam_1751657975_196000/ocdbt.process_1\r\n52M\t./lam_1751657975_196000/ocdbt.process_2/d\r\n52M\t./lam_1751657975_196000/ocdbt.process_2\r\n419M\t./lam_1751657975_196000\r\n53M\t./lam_1751657975_15000/ocdbt.process_4/d\r\n53M\t./lam_1751657975_15000/ocdbt.process_4\r\n1.8M\t./lam_1751657975_15000/array_metadatas\r\n53M\t./lam_1751657975_15000/ocdbt.process_3/d\r\n53M\t./lam_1751657975_15000/ocdbt.process_3\r\n1.5M\t./lam_1751657975_15000/d\r\n53M\t./lam_1751657975_15000/ocdbt.process_5/d\r\n53M\t./lam_1751657975_15000/ocdbt.process_5\r\n",,terminal_output +41,107542,"TERMINAL",0,0,"53M\t./lam_1751657975_15000/ocdbt.process_0/d\r\n53M\t./lam_1751657975_15000/ocdbt.process_0\r\n53M\t./lam_1751657975_15000/ocdbt.process_7/d\r\n53M\t./lam_1751657975_15000/ocdbt.process_7\r\n",,terminal_output +42,107645,"TERMINAL",0,0,"52M\t./lam_1751657975_15000/ocdbt.process_6/d\r\n52M\t./lam_1751657975_15000/ocdbt.process_6\r\n52M\t./lam_1751657975_15000/ocdbt.process_1/d\r\n52M\t./lam_1751657975_15000/ocdbt.process_1\r\n",,terminal_output +43,107784,"TERMINAL",0,0,"52M\t./lam_1751657975_15000/ocdbt.process_2/d\r\n52M\t./lam_1751657975_15000/ocdbt.process_2\r\n421M\t./lam_1751657975_15000\r\n52M\t./lam_1751657975_48000/ocdbt.process_4/d\r\n52M\t./lam_1751657975_48000/ocdbt.process_4\r\n1.8M\t./lam_1751657975_48000/array_metadatas\r\n53M\t./lam_1751657975_48000/ocdbt.process_3/d\r\n53M\t./lam_1751657975_48000/ocdbt.process_3\r\n1.5M\t./lam_1751657975_48000/d\r\n",,terminal_output +44,107832,"TERMINAL",0,0,"52M\t./lam_1751657975_48000/ocdbt.process_5/d\r\n52M\t./lam_1751657975_48000/ocdbt.process_5\r\n",,terminal_output +45,108257,"TERMINAL",0,0,"52M\t./lam_1751657975_48000/ocdbt.process_0/d\r\n52M\t./lam_1751657975_48000/ocdbt.process_0\r\n52M\t./lam_1751657975_48000/ocdbt.process_7/d\r\n52M\t./lam_1751657975_48000/ocdbt.process_7\r\n52M\t./lam_1751657975_48000/ocdbt.process_6/d\r\n52M\t./lam_1751657975_48000/ocdbt.process_6\r\n52M\t./lam_1751657975_48000/ocdbt.process_1/d\r\n52M\t./lam_1751657975_48000/ocdbt.process_1\r\n52M\t./lam_1751657975_48000/ocdbt.process_2/d\r\n52M\t./lam_1751657975_48000/ocdbt.process_2\r\n420M\t./lam_1751657975_48000\r\n53M\t./lam_1751657975_76000/ocdbt.process_4/d\r\n53M\t./lam_1751657975_76000/ocdbt.process_4\r\n1.8M\t./lam_1751657975_76000/array_metadatas\r\n53M\t./lam_1751657975_76000/ocdbt.process_3/d\r\n53M\t./lam_1751657975_76000/ocdbt.process_3\r\n1.5M\t./lam_1751657975_76000/d\r\n",,terminal_output +46,108376,"TERMINAL",0,0,"52M\t./lam_1751657975_76000/ocdbt.process_5/d\r\n52M\t./lam_1751657975_76000/ocdbt.process_5\r\n",,terminal_output +47,108756,"TERMINAL",0,0,"53M\t./lam_1751657975_76000/ocdbt.process_0/d\r\n53M\t./lam_1751657975_76000/ocdbt.process_0\r\n52M\t./lam_1751657975_76000/ocdbt.process_7/d\r\n52M\t./lam_1751657975_76000/ocdbt.process_7\r\n52M\t./lam_1751657975_76000/ocdbt.process_6/d\r\n52M\t./lam_1751657975_76000/ocdbt.process_6\r\n53M\t./lam_1751657975_76000/ocdbt.process_1/d\r\n53M\t./lam_1751657975_76000/ocdbt.process_1\r\n52M\t./lam_1751657975_76000/ocdbt.process_2/d\r\n52M\t./lam_1751657975_76000/ocdbt.process_2\r\n421M\t./lam_1751657975_76000\r\n53M\t./lam_1751657975_39000/ocdbt.process_4/d\r\n53M\t./lam_1751657975_39000/ocdbt.process_4\r\n",,terminal_output +48,108959,"TERMINAL",0,0,"1.8M\t./lam_1751657975_39000/array_metadatas\r\n53M\t./lam_1751657975_39000/ocdbt.process_3/d\r\n53M\t./lam_1751657975_39000/ocdbt.process_3\r\n1.5M\t./lam_1751657975_39000/d\r\n",,terminal_output +49,109082,"TERMINAL",0,0,"52M\t./lam_1751657975_39000/ocdbt.process_5/d\r\n52M\t./lam_1751657975_39000/ocdbt.process_5\r\n",,terminal_output +50,109506,"TERMINAL",0,0,"53M\t./lam_1751657975_39000/ocdbt.process_0/d\r\n53M\t./lam_1751657975_39000/ocdbt.process_0\r\n53M\t./lam_1751657975_39000/ocdbt.process_7/d\r\n53M\t./lam_1751657975_39000/ocdbt.process_7\r\n52M\t./lam_1751657975_39000/ocdbt.process_6/d\r\n52M\t./lam_1751657975_39000/ocdbt.process_6\r\n53M\t./lam_1751657975_39000/ocdbt.process_1/d\r\n53M\t./lam_1751657975_39000/ocdbt.process_1\r\n53M\t./lam_1751657975_39000/ocdbt.process_2/d\r\n53M\t./lam_1751657975_39000/ocdbt.process_2\r\n421M\t./lam_1751657975_39000\r\n52M\t./lam_1751657975_137000/ocdbt.process_4/d\r\n52M\t./lam_1751657975_137000/ocdbt.process_4\r\n",,terminal_output +51,109631,"TERMINAL",0,0,"1.8M\t./lam_1751657975_137000/array_metadatas\r\n53M\t./lam_1751657975_137000/ocdbt.process_3/d\r\n53M\t./lam_1751657975_137000/ocdbt.process_3\r\n1.5M\t./lam_1751657975_137000/d\r\n52M\t./lam_1751657975_137000/ocdbt.process_5/d\r\n52M\t./lam_1751657975_137000/ocdbt.process_5\r\n",,terminal_output +52,109807,"TERMINAL",0,0,"52M\t./lam_1751657975_137000/ocdbt.process_0/d\r\n52M\t./lam_1751657975_137000/ocdbt.process_0\r\n52M\t./lam_1751657975_137000/ocdbt.process_7/d\r\n52M\t./lam_1751657975_137000/ocdbt.process_7\r\n52M\t./lam_1751657975_137000/ocdbt.process_6/d\r\n52M\t./lam_1751657975_137000/ocdbt.process_6\r\n52M\t./lam_1751657975_137000/ocdbt.process_1/d\r\n52M\t./lam_1751657975_137000/ocdbt.process_1\r\n52M\t./lam_1751657975_137000/ocdbt.process_2/d\r\n52M\t./lam_1751657975_137000/ocdbt.process_2\r\n420M\t./lam_1751657975_137000\r\n52M\t./lam_1751657975_127000/ocdbt.process_4/d\r\n52M\t./lam_1751657975_127000/ocdbt.process_4\r\n",,terminal_output +53,109996,"TERMINAL",0,0,"1.8M\t./lam_1751657975_127000/array_metadatas\r\n52M\t./lam_1751657975_127000/ocdbt.process_3/d\r\n52M\t./lam_1751657975_127000/ocdbt.process_3\r\n",,terminal_output +54,110106,"TERMINAL",0,0,"1.5M\t./lam_1751657975_127000/d\r\n52M\t./lam_1751657975_127000/ocdbt.process_5/d\r\n52M\t./lam_1751657975_127000/ocdbt.process_5\r\n",,terminal_output +55,110254,"TERMINAL",0,0,"52M\t./lam_1751657975_127000/ocdbt.process_0/d\r\n52M\t./lam_1751657975_127000/ocdbt.process_0\r\n52M\t./lam_1751657975_127000/ocdbt.process_7/d\r\n52M\t./lam_1751657975_127000/ocdbt.process_7\r\n52M\t./lam_1751657975_127000/ocdbt.process_6/d\r\n52M\t./lam_1751657975_127000/ocdbt.process_6\r\n52M\t./lam_1751657975_127000/ocdbt.process_1/d\r\n52M\t./lam_1751657975_127000/ocdbt.process_1\r\n52M\t./lam_1751657975_127000/ocdbt.process_2/d\r\n52M\t./lam_1751657975_127000/ocdbt.process_2\r\n420M\t./lam_1751657975_127000\r\n53M\t./lam_1751657975_18000/ocdbt.process_4/d\r\n53M\t./lam_1751657975_18000/ocdbt.process_4\r\n",,terminal_output +56,110373,"TERMINAL",0,0,"1.8M\t./lam_1751657975_18000/array_metadatas\r\n53M\t./lam_1751657975_18000/ocdbt.process_3/d\r\n53M\t./lam_1751657975_18000/ocdbt.process_3\r\n1.5M\t./lam_1751657975_18000/d\r\n",,terminal_output +57,110473,"TERMINAL",0,0,"53M\t./lam_1751657975_18000/ocdbt.process_5/d\r\n53M\t./lam_1751657975_18000/ocdbt.process_5\r\n52M\t./lam_1751657975_18000/ocdbt.process_0/d\r\n52M\t./lam_1751657975_18000/ocdbt.process_0\r\n53M\t./lam_1751657975_18000/ocdbt.process_7/d\r\n53M\t./lam_1751657975_18000/ocdbt.process_7\r\n52M\t./lam_1751657975_18000/ocdbt.process_6/d\r\n52M\t./lam_1751657975_18000/ocdbt.process_6\r\n52M\t./lam_1751657975_18000/ocdbt.process_1/d\r\n52M\t./lam_1751657975_18000/ocdbt.process_1\r\n52M\t./lam_1751657975_18000/ocdbt.process_2/d\r\n52M\t./lam_1751657975_18000/ocdbt.process_2\r\n421M\t./lam_1751657975_18000\r\n52M\t./lam_1751657975_129000/ocdbt.process_4/d\r\n52M\t./lam_1751657975_129000/ocdbt.process_4\r\n",,terminal_output +58,110588,"TERMINAL",0,0,"1.8M\t./lam_1751657975_129000/array_metadatas\r\n52M\t./lam_1751657975_129000/ocdbt.process_3/d\r\n52M\t./lam_1751657975_129000/ocdbt.process_3\r\n1.5M\t./lam_1751657975_129000/d\r\n",,terminal_output +59,110649,"TERMINAL",0,0,"52M\t./lam_1751657975_129000/ocdbt.process_5/d\r\n52M\t./lam_1751657975_129000/ocdbt.process_5\r\n",,terminal_output +60,110959,"TERMINAL",0,0,"52M\t./lam_1751657975_129000/ocdbt.process_0/d\r\n52M\t./lam_1751657975_129000/ocdbt.process_0\r\n52M\t./lam_1751657975_129000/ocdbt.process_7/d\r\n52M\t./lam_1751657975_129000/ocdbt.process_7\r\n52M\t./lam_1751657975_129000/ocdbt.process_6/d\r\n52M\t./lam_1751657975_129000/ocdbt.process_6\r\n52M\t./lam_1751657975_129000/ocdbt.process_1/d\r\n52M\t./lam_1751657975_129000/ocdbt.process_1\r\n52M\t./lam_1751657975_129000/ocdbt.process_2/d\r\n52M\t./lam_1751657975_129000/ocdbt.process_2\r\n420M\t./lam_1751657975_129000\r\n52M\t./lam_1751657975_155000/ocdbt.process_4/d\r\n52M\t./lam_1751657975_155000/ocdbt.process_4\r\n",,terminal_output +61,111152,"TERMINAL",0,0,"1.8M\t./lam_1751657975_155000/array_metadatas\r\n53M\t./lam_1751657975_155000/ocdbt.process_3/d\r\n53M\t./lam_1751657975_155000/ocdbt.process_3\r\n1.5M\t./lam_1751657975_155000/d\r\n",,terminal_output +62,111315,"TERMINAL",0,0,"52M\t./lam_1751657975_155000/ocdbt.process_5/d\r\n52M\t./lam_1751657975_155000/ocdbt.process_5\r\n52M\t./lam_1751657975_155000/ocdbt.process_0/d\r\n52M\t./lam_1751657975_155000/ocdbt.process_0\r\n52M\t./lam_1751657975_155000/ocdbt.process_7/d\r\n52M\t./lam_1751657975_155000/ocdbt.process_7\r\n52M\t./lam_1751657975_155000/ocdbt.process_6/d\r\n52M\t./lam_1751657975_155000/ocdbt.process_6\r\n52M\t./lam_1751657975_155000/ocdbt.process_1/d\r\n52M\t./lam_1751657975_155000/ocdbt.process_1\r\n52M\t./lam_1751657975_155000/ocdbt.process_2/d\r\n52M\t./lam_1751657975_155000/ocdbt.process_2\r\n420M\t./lam_1751657975_155000\r\n53M\t./lam_1751657975_51000/ocdbt.process_4/d\r\n53M\t./lam_1751657975_51000/ocdbt.process_4\r\n",,terminal_output +63,111441,"TERMINAL",0,0,"1.8M\t./lam_1751657975_51000/array_metadatas\r\n53M\t./lam_1751657975_51000/ocdbt.process_3/d\r\n53M\t./lam_1751657975_51000/ocdbt.process_3\r\n",,terminal_output +64,111588,"TERMINAL",0,0,"1.5M\t./lam_1751657975_51000/d\r\n",,terminal_output +65,111641,"TERMINAL",0,0,"52M\t./lam_1751657975_51000/ocdbt.process_5/d\r\n52M\t./lam_1751657975_51000/ocdbt.process_5\r\n",,terminal_output +66,111773,"TERMINAL",0,0,"53M\t./lam_1751657975_51000/ocdbt.process_0/d\r\n53M\t./lam_1751657975_51000/ocdbt.process_0\r\n53M\t./lam_1751657975_51000/ocdbt.process_7/d\r\n53M\t./lam_1751657975_51000/ocdbt.process_7\r\n52M\t./lam_1751657975_51000/ocdbt.process_6/d\r\n52M\t./lam_1751657975_51000/ocdbt.process_6\r\n52M\t./lam_1751657975_51000/ocdbt.process_1/d\r\n52M\t./lam_1751657975_51000/ocdbt.process_1\r\n53M\t./lam_1751657975_51000/ocdbt.process_2/d\r\n53M\t./lam_1751657975_51000/ocdbt.process_2\r\n421M\t./lam_1751657975_51000\r\n52M\t./lam_1751657975_158000/ocdbt.process_4/d\r\n52M\t./lam_1751657975_158000/ocdbt.process_4\r\n",,terminal_output +67,112047,"TERMINAL",0,0,"1.8M\t./lam_1751657975_158000/array_metadatas\r\n53M\t./lam_1751657975_158000/ocdbt.process_3/d\r\n53M\t./lam_1751657975_158000/ocdbt.process_3\r\n1.5M\t./lam_1751657975_158000/d\r\n53M\t./lam_1751657975_158000/ocdbt.process_5/d\r\n53M\t./lam_1751657975_158000/ocdbt.process_5\r\n",,terminal_output +68,112302,"TERMINAL",0,0,"52M\t./lam_1751657975_158000/ocdbt.process_0/d\r\n52M\t./lam_1751657975_158000/ocdbt.process_0\r\n52M\t./lam_1751657975_158000/ocdbt.process_7/d\r\n52M\t./lam_1751657975_158000/ocdbt.process_7\r\n52M\t./lam_1751657975_158000/ocdbt.process_6/d\r\n52M\t./lam_1751657975_158000/ocdbt.process_6\r\n52M\t./lam_1751657975_158000/ocdbt.process_1/d\r\n52M\t./lam_1751657975_158000/ocdbt.process_1\r\n52M\t./lam_1751657975_158000/ocdbt.process_2/d\r\n52M\t./lam_1751657975_158000/ocdbt.process_2\r\n420M\t./lam_1751657975_158000\r\n52M\t./lam_1751657975_134000/ocdbt.process_4/d\r\n52M\t./lam_1751657975_134000/ocdbt.process_4\r\n",,terminal_output +69,112387,"TERMINAL",0,0,"1.8M\t./lam_1751657975_134000/array_metadatas\r\n52M\t./lam_1751657975_134000/ocdbt.process_3/d\r\n52M\t./lam_1751657975_134000/ocdbt.process_3\r\n1.5M\t./lam_1751657975_134000/d\r\n",,terminal_output +70,112530,"TERMINAL",0,0,"52M\t./lam_1751657975_134000/ocdbt.process_5/d\r\n52M\t./lam_1751657975_134000/ocdbt.process_5\r\n52M\t./lam_1751657975_134000/ocdbt.process_0/d\r\n52M\t./lam_1751657975_134000/ocdbt.process_0\r\n52M\t./lam_1751657975_134000/ocdbt.process_7/d\r\n52M\t./lam_1751657975_134000/ocdbt.process_7\r\n52M\t./lam_1751657975_134000/ocdbt.process_6/d\r\n52M\t./lam_1751657975_134000/ocdbt.process_6\r\n52M\t./lam_1751657975_134000/ocdbt.process_1/d\r\n52M\t./lam_1751657975_134000/ocdbt.process_1\r\n52M\t./lam_1751657975_134000/ocdbt.process_2/d\r\n52M\t./lam_1751657975_134000/ocdbt.process_2\r\n418M\t./lam_1751657975_134000\r\n52M\t./lam_1751657975_169000/ocdbt.process_4/d\r\n52M\t./lam_1751657975_169000/ocdbt.process_4\r\n",,terminal_output +71,112597,"TERMINAL",0,0,"1.8M\t./lam_1751657975_169000/array_metadatas\r\n52M\t./lam_1751657975_169000/ocdbt.process_3/d\r\n52M\t./lam_1751657975_169000/ocdbt.process_3\r\n",,terminal_output +72,112704,"TERMINAL",0,0,"1.5M\t./lam_1751657975_169000/d\r\n52M\t./lam_1751657975_169000/ocdbt.process_5/d\r\n52M\t./lam_1751657975_169000/ocdbt.process_5\r\n",,terminal_output +73,112954,"TERMINAL",0,0,"53M\t./lam_1751657975_169000/ocdbt.process_0/d\r\n53M\t./lam_1751657975_169000/ocdbt.process_0\r\n52M\t./lam_1751657975_169000/ocdbt.process_7/d\r\n52M\t./lam_1751657975_169000/ocdbt.process_7\r\n53M\t./lam_1751657975_169000/ocdbt.process_6/d\r\n53M\t./lam_1751657975_169000/ocdbt.process_6\r\n52M\t./lam_1751657975_169000/ocdbt.process_1/d\r\n52M\t./lam_1751657975_169000/ocdbt.process_1\r\n53M\t./lam_1751657975_169000/ocdbt.process_2/d\r\n53M\t./lam_1751657975_169000/ocdbt.process_2\r\n420M\t./lam_1751657975_169000\r\n52M\t./lam_1751657975_144000/ocdbt.process_4/d\r\n52M\t./lam_1751657975_144000/ocdbt.process_4\r\n",,terminal_output +74,113182,"TERMINAL",0,0,"1.8M\t./lam_1751657975_144000/array_metadatas\r\n52M\t./lam_1751657975_144000/ocdbt.process_3/d\r\n52M\t./lam_1751657975_144000/ocdbt.process_3\r\n",,terminal_output +75,113279,"TERMINAL",0,0,"1.5M\t./lam_1751657975_144000/d\r\n",,terminal_output +76,113463,"TERMINAL",0,0,"52M\t./lam_1751657975_144000/ocdbt.process_5/d\r\n52M\t./lam_1751657975_144000/ocdbt.process_5\r\n52M\t./lam_1751657975_144000/ocdbt.process_0/d\r\n52M\t./lam_1751657975_144000/ocdbt.process_0\r\n52M\t./lam_1751657975_144000/ocdbt.process_7/d\r\n52M\t./lam_1751657975_144000/ocdbt.process_7\r\n52M\t./lam_1751657975_144000/ocdbt.process_6/d\r\n52M\t./lam_1751657975_144000/ocdbt.process_6\r\n52M\t./lam_1751657975_144000/ocdbt.process_1/d\r\n52M\t./lam_1751657975_144000/ocdbt.process_1\r\n52M\t./lam_1751657975_144000/ocdbt.process_2/d\r\n52M\t./lam_1751657975_144000/ocdbt.process_2\r\n419M\t./lam_1751657975_144000\r\n52M\t./lam_1751657975_135000/ocdbt.process_4/d\r\n52M\t./lam_1751657975_135000/ocdbt.process_4\r\n1.8M\t./lam_1751657975_135000/array_metadatas\r\n52M\t./lam_1751657975_135000/ocdbt.process_3/d\r\n52M\t./lam_1751657975_135000/ocdbt.process_3\r\n",,terminal_output +77,113533,"TERMINAL",0,0,"1.5M\t./lam_1751657975_135000/d\r\n",,terminal_output +78,113649,"TERMINAL",0,0,"52M\t./lam_1751657975_135000/ocdbt.process_5/d\r\n52M\t./lam_1751657975_135000/ocdbt.process_5\r\n",,terminal_output +79,113974,"TERMINAL",0,0,"52M\t./lam_1751657975_135000/ocdbt.process_0/d\r\n52M\t./lam_1751657975_135000/ocdbt.process_0\r\n52M\t./lam_1751657975_135000/ocdbt.process_7/d\r\n52M\t./lam_1751657975_135000/ocdbt.process_7\r\n52M\t./lam_1751657975_135000/ocdbt.process_6/d\r\n52M\t./lam_1751657975_135000/ocdbt.process_6\r\n",,terminal_output +80,114084,"TERMINAL",0,0,"52M\t./lam_1751657975_135000/ocdbt.process_1/d\r\n52M\t./lam_1751657975_135000/ocdbt.process_1\r\n52M\t./lam_1751657975_135000/ocdbt.process_2/d\r\n52M\t./lam_1751657975_135000/ocdbt.process_2\r\n419M\t./lam_1751657975_135000\r\n52M\t./lam_1751657975_145000/ocdbt.process_4/d\r\n52M\t./lam_1751657975_145000/ocdbt.process_4\r\n1.8M\t./lam_1751657975_145000/array_metadatas\r\n52M\t./lam_1751657975_145000/ocdbt.process_3/d\r\n52M\t./lam_1751657975_145000/ocdbt.process_3\r\n1.5M\t./lam_1751657975_145000/d\r\n52M\t./lam_1751657975_145000/ocdbt.process_5/d\r\n52M\t./lam_1751657975_145000/ocdbt.process_5\r\n",,terminal_output +81,114383,"TERMINAL",0,0,"53M\t./lam_1751657975_145000/ocdbt.process_0/d\r\n53M\t./lam_1751657975_145000/ocdbt.process_0\r\n52M\t./lam_1751657975_145000/ocdbt.process_7/d\r\n52M\t./lam_1751657975_145000/ocdbt.process_7\r\n52M\t./lam_1751657975_145000/ocdbt.process_6/d\r\n52M\t./lam_1751657975_145000/ocdbt.process_6\r\n52M\t./lam_1751657975_145000/ocdbt.process_1/d\r\n52M\t./lam_1751657975_145000/ocdbt.process_1\r\n52M\t./lam_1751657975_145000/ocdbt.process_2/d\r\n52M\t./lam_1751657975_145000/ocdbt.process_2\r\n420M\t./lam_1751657975_145000\r\n53M\t./lam_1751657975_57000/ocdbt.process_4/d\r\n53M\t./lam_1751657975_57000/ocdbt.process_4\r\n1.8M\t./lam_1751657975_57000/array_metadatas\r\n",,terminal_output +82,114500,"TERMINAL",0,0,"53M\t./lam_1751657975_57000/ocdbt.process_3/d\r\n53M\t./lam_1751657975_57000/ocdbt.process_3\r\n1.5M\t./lam_1751657975_57000/d\r\n52M\t./lam_1751657975_57000/ocdbt.process_5/d\r\n52M\t./lam_1751657975_57000/ocdbt.process_5\r\n",,terminal_output +83,114585,"TERMINAL",0,0,"52M\t./lam_1751657975_57000/ocdbt.process_0/d\r\n52M\t./lam_1751657975_57000/ocdbt.process_0\r\n",,terminal_output +84,114666,"TERMINAL",0,0,"53M\t./lam_1751657975_57000/ocdbt.process_7/d\r\n53M\t./lam_1751657975_57000/ocdbt.process_7\r\n52M\t./lam_1751657975_57000/ocdbt.process_6/d\r\n52M\t./lam_1751657975_57000/ocdbt.process_6\r\n52M\t./lam_1751657975_57000/ocdbt.process_1/d\r\n52M\t./lam_1751657975_57000/ocdbt.process_1\r\n52M\t./lam_1751657975_57000/ocdbt.process_2/d\r\n52M\t./lam_1751657975_57000/ocdbt.process_2\r\n421M\t./lam_1751657975_57000\r\n52M\t./lam_1751657975_78000/ocdbt.process_4/d\r\n52M\t./lam_1751657975_78000/ocdbt.process_4\r\n",,terminal_output +85,114765,"TERMINAL",0,0,"1.8M\t./lam_1751657975_78000/array_metadatas\r\n53M\t./lam_1751657975_78000/ocdbt.process_3/d\r\n53M\t./lam_1751657975_78000/ocdbt.process_3\r\n",,terminal_output +86,114818,"TERMINAL",0,0,"1.5M\t./lam_1751657975_78000/d\r\n52M\t./lam_1751657975_78000/ocdbt.process_5/d\r\n52M\t./lam_1751657975_78000/ocdbt.process_5\r\n",,terminal_output +87,115038,"TERMINAL",0,0,"52M\t./lam_1751657975_78000/ocdbt.process_0/d\r\n52M\t./lam_1751657975_78000/ocdbt.process_0\r\n52M\t./lam_1751657975_78000/ocdbt.process_7/d\r\n52M\t./lam_1751657975_78000/ocdbt.process_7\r\n52M\t./lam_1751657975_78000/ocdbt.process_6/d\r\n52M\t./lam_1751657975_78000/ocdbt.process_6\r\n52M\t./lam_1751657975_78000/ocdbt.process_1/d\r\n52M\t./lam_1751657975_78000/ocdbt.process_1\r\n52M\t./lam_1751657975_78000/ocdbt.process_2/d\r\n52M\t./lam_1751657975_78000/ocdbt.process_2\r\n420M\t./lam_1751657975_78000\r\n52M\t./lam_1751657975_87000/ocdbt.process_4/d\r\n52M\t./lam_1751657975_87000/ocdbt.process_4\r\n",,terminal_output +88,115216,"TERMINAL",0,0,"1.8M\t./lam_1751657975_87000/array_metadatas\r\n53M\t./lam_1751657975_87000/ocdbt.process_3/d\r\n53M\t./lam_1751657975_87000/ocdbt.process_3\r\n",,terminal_output +89,115310,"TERMINAL",0,0,"1.5M\t./lam_1751657975_87000/d\r\n52M\t./lam_1751657975_87000/ocdbt.process_5/d\r\n52M\t./lam_1751657975_87000/ocdbt.process_5\r\n",,terminal_output +90,115534,"TERMINAL",0,0,"52M\t./lam_1751657975_87000/ocdbt.process_0/d\r\n52M\t./lam_1751657975_87000/ocdbt.process_0\r\n52M\t./lam_1751657975_87000/ocdbt.process_7/d\r\n52M\t./lam_1751657975_87000/ocdbt.process_7\r\n52M\t./lam_1751657975_87000/ocdbt.process_6/d\r\n52M\t./lam_1751657975_87000/ocdbt.process_6\r\n52M\t./lam_1751657975_87000/ocdbt.process_1/d\r\n52M\t./lam_1751657975_87000/ocdbt.process_1\r\n52M\t./lam_1751657975_87000/ocdbt.process_2/d\r\n52M\t./lam_1751657975_87000/ocdbt.process_2\r\n420M\t./lam_1751657975_87000\r\n52M\t./lam_1751657975_85000/ocdbt.process_4/d\r\n52M\t./lam_1751657975_85000/ocdbt.process_4\r\n",,terminal_output +91,115749,"TERMINAL",0,0,"1.8M\t./lam_1751657975_85000/array_metadatas\r\n53M\t./lam_1751657975_85000/ocdbt.process_3/d\r\n53M\t./lam_1751657975_85000/ocdbt.process_3\r\n1.5M\t./lam_1751657975_85000/d\r\n",,terminal_output +92,115827,"TERMINAL",0,0,"52M\t./lam_1751657975_85000/ocdbt.process_5/d\r\n52M\t./lam_1751657975_85000/ocdbt.process_5\r\n",,terminal_output +93,116102,"TERMINAL",0,0,"52M\t./lam_1751657975_85000/ocdbt.process_0/d\r\n52M\t./lam_1751657975_85000/ocdbt.process_0\r\n52M\t./lam_1751657975_85000/ocdbt.process_7/d\r\n52M\t./lam_1751657975_85000/ocdbt.process_7\r\n52M\t./lam_1751657975_85000/ocdbt.process_6/d\r\n52M\t./lam_1751657975_85000/ocdbt.process_6\r\n52M\t./lam_1751657975_85000/ocdbt.process_1/d\r\n52M\t./lam_1751657975_85000/ocdbt.process_1\r\n52M\t./lam_1751657975_85000/ocdbt.process_2/d\r\n52M\t./lam_1751657975_85000/ocdbt.process_2\r\n420M\t./lam_1751657975_85000\r\n53M\t./lam_1751657975_44000/ocdbt.process_4/d\r\n53M\t./lam_1751657975_44000/ocdbt.process_4\r\n",,terminal_output +94,116336,"TERMINAL",0,0,"1.8M\t./lam_1751657975_44000/array_metadatas\r\n",,terminal_output +95,116427,"TERMINAL",0,0,"53M\t./lam_1751657975_44000/ocdbt.process_3/d\r\n53M\t./lam_1751657975_44000/ocdbt.process_3\r\n1.5M\t./lam_1751657975_44000/d\r\n",,terminal_output +96,116505,"TERMINAL",0,0,"52M\t./lam_1751657975_44000/ocdbt.process_5/d\r\n52M\t./lam_1751657975_44000/ocdbt.process_5\r\n",,terminal_output +97,116689,"TERMINAL",0,0,"52M\t./lam_1751657975_44000/ocdbt.process_0/d\r\n52M\t./lam_1751657975_44000/ocdbt.process_0\r\n",,terminal_output +98,116808,"TERMINAL",0,0,"53M\t./lam_1751657975_44000/ocdbt.process_7/d\r\n53M\t./lam_1751657975_44000/ocdbt.process_7\r\n52M\t./lam_1751657975_44000/ocdbt.process_6/d\r\n52M\t./lam_1751657975_44000/ocdbt.process_6\r\n52M\t./lam_1751657975_44000/ocdbt.process_1/d\r\n52M\t./lam_1751657975_44000/ocdbt.process_1\r\n53M\t./lam_1751657975_44000/ocdbt.process_2/d\r\n53M\t./lam_1751657975_44000/ocdbt.process_2\r\n421M\t./lam_1751657975_44000\r\n",,terminal_output +99,116860,"TERMINAL",0,0,"53M\t./lam_1751657975_160000/ocdbt.process_4/d\r\n53M\t./lam_1751657975_160000/ocdbt.process_4\r\n",,terminal_output +100,117160,"TERMINAL",0,0,"1.8M\t./lam_1751657975_160000/array_metadatas\r\n52M\t./lam_1751657975_160000/ocdbt.process_3/d\r\n52M\t./lam_1751657975_160000/ocdbt.process_3\r\n1.5M\t./lam_1751657975_160000/d\r\n52M\t./lam_1751657975_160000/ocdbt.process_5/d\r\n52M\t./lam_1751657975_160000/ocdbt.process_5\r\n",,terminal_output +101,117409,"TERMINAL",0,0,"52M\t./lam_1751657975_160000/ocdbt.process_0/d\r\n52M\t./lam_1751657975_160000/ocdbt.process_0\r\n52M\t./lam_1751657975_160000/ocdbt.process_7/d\r\n52M\t./lam_1751657975_160000/ocdbt.process_7\r\n52M\t./lam_1751657975_160000/ocdbt.process_6/d\r\n52M\t./lam_1751657975_160000/ocdbt.process_6\r\n52M\t./lam_1751657975_160000/ocdbt.process_1/d\r\n52M\t./lam_1751657975_160000/ocdbt.process_1\r\n52M\t./lam_1751657975_160000/ocdbt.process_2/d\r\n52M\t./lam_1751657975_160000/ocdbt.process_2\r\n419M\t./lam_1751657975_160000\r\n52M\t./lam_1751657975_118000/ocdbt.process_4/d\r\n52M\t./lam_1751657975_118000/ocdbt.process_4\r\n",,terminal_output +102,117549,"TERMINAL",0,0,"1.8M\t./lam_1751657975_118000/array_metadatas\r\n52M\t./lam_1751657975_118000/ocdbt.process_3/d\r\n52M\t./lam_1751657975_118000/ocdbt.process_3\r\n1.5M\t./lam_1751657975_118000/d\r\n",,terminal_output +103,117703,"TERMINAL",0,0,"52M\t./lam_1751657975_118000/ocdbt.process_5/d\r\n52M\t./lam_1751657975_118000/ocdbt.process_5\r\n52M\t./lam_1751657975_118000/ocdbt.process_0/d\r\n52M\t./lam_1751657975_118000/ocdbt.process_0\r\n52M\t./lam_1751657975_118000/ocdbt.process_7/d\r\n52M\t./lam_1751657975_118000/ocdbt.process_7\r\n52M\t./lam_1751657975_118000/ocdbt.process_6/d\r\n52M\t./lam_1751657975_118000/ocdbt.process_6\r\n52M\t./lam_1751657975_118000/ocdbt.process_1/d\r\n52M\t./lam_1751657975_118000/ocdbt.process_1\r\n52M\t./lam_1751657975_118000/ocdbt.process_2/d\r\n52M\t./lam_1751657975_118000/ocdbt.process_2\r\n419M\t./lam_1751657975_118000\r\n52M\t./lam_1751657975_180000/ocdbt.process_4/d\r\n52M\t./lam_1751657975_180000/ocdbt.process_4\r\n",,terminal_output +104,117888,"TERMINAL",0,0,"1.8M\t./lam_1751657975_180000/array_metadatas\r\n53M\t./lam_1751657975_180000/ocdbt.process_3/d\r\n53M\t./lam_1751657975_180000/ocdbt.process_3\r\n",,terminal_output +105,117948,"TERMINAL",0,0,"1.5M\t./lam_1751657975_180000/d\r\n",,terminal_output +106,118067,"TERMINAL",0,0,"52M\t./lam_1751657975_180000/ocdbt.process_5/d\r\n52M\t./lam_1751657975_180000/ocdbt.process_5\r\n",,terminal_output +107,118342,"TERMINAL",0,0,"52M\t./lam_1751657975_180000/ocdbt.process_0/d\r\n52M\t./lam_1751657975_180000/ocdbt.process_0\r\n52M\t./lam_1751657975_180000/ocdbt.process_7/d\r\n52M\t./lam_1751657975_180000/ocdbt.process_7\r\n52M\t./lam_1751657975_180000/ocdbt.process_6/d\r\n52M\t./lam_1751657975_180000/ocdbt.process_6\r\n53M\t./lam_1751657975_180000/ocdbt.process_1/d\r\n53M\t./lam_1751657975_180000/ocdbt.process_1\r\n52M\t./lam_1751657975_180000/ocdbt.process_2/d\r\n52M\t./lam_1751657975_180000/ocdbt.process_2\r\n420M\t./lam_1751657975_180000\r\n52M\t./lam_1751657975_73000/ocdbt.process_4/d\r\n52M\t./lam_1751657975_73000/ocdbt.process_4\r\n",,terminal_output +108,118576,"TERMINAL",0,0,"1.8M\t./lam_1751657975_73000/array_metadatas\r\n",,terminal_output +109,118713,"TERMINAL",0,0,"53M\t./lam_1751657975_73000/ocdbt.process_3/d\r\n53M\t./lam_1751657975_73000/ocdbt.process_3\r\n",,terminal_output +110,118792,"TERMINAL",0,0,"1.5M\t./lam_1751657975_73000/d\r\n52M\t./lam_1751657975_73000/ocdbt.process_5/d\r\n52M\t./lam_1751657975_73000/ocdbt.process_5\r\n",,terminal_output +111,119196,"TERMINAL",0,0,"52M\t./lam_1751657975_73000/ocdbt.process_0/d\r\n52M\t./lam_1751657975_73000/ocdbt.process_0\r\n52M\t./lam_1751657975_73000/ocdbt.process_7/d\r\n52M\t./lam_1751657975_73000/ocdbt.process_7\r\n52M\t./lam_1751657975_73000/ocdbt.process_6/d\r\n52M\t./lam_1751657975_73000/ocdbt.process_6\r\n52M\t./lam_1751657975_73000/ocdbt.process_1/d\r\n52M\t./lam_1751657975_73000/ocdbt.process_1\r\n52M\t./lam_1751657975_73000/ocdbt.process_2/d\r\n52M\t./lam_1751657975_73000/ocdbt.process_2\r\n420M\t./lam_1751657975_73000\r\n52M\t./lam_1751657975_136000/ocdbt.process_4/d\r\n52M\t./lam_1751657975_136000/ocdbt.process_4\r\n",,terminal_output +112,119273,"TERMINAL",0,0,"1.8M\t./lam_1751657975_136000/array_metadatas\r\n52M\t./lam_1751657975_136000/ocdbt.process_3/d\r\n52M\t./lam_1751657975_136000/ocdbt.process_3\r\n1.5M\t./lam_1751657975_136000/d\r\n52M\t./lam_1751657975_136000/ocdbt.process_5/d\r\n52M\t./lam_1751657975_136000/ocdbt.process_5\r\n",,terminal_output +113,119481,"TERMINAL",0,0,"52M\t./lam_1751657975_136000/ocdbt.process_0/d\r\n52M\t./lam_1751657975_136000/ocdbt.process_0\r\n52M\t./lam_1751657975_136000/ocdbt.process_7/d\r\n52M\t./lam_1751657975_136000/ocdbt.process_7\r\n52M\t./lam_1751657975_136000/ocdbt.process_6/d\r\n52M\t./lam_1751657975_136000/ocdbt.process_6\r\n52M\t./lam_1751657975_136000/ocdbt.process_1/d\r\n52M\t./lam_1751657975_136000/ocdbt.process_1\r\n52M\t./lam_1751657975_136000/ocdbt.process_2/d\r\n52M\t./lam_1751657975_136000/ocdbt.process_2\r\n418M\t./lam_1751657975_136000\r\n53M\t./lam_1751657975_148000/ocdbt.process_4/d\r\n53M\t./lam_1751657975_148000/ocdbt.process_4\r\n1.8M\t./lam_1751657975_148000/array_metadatas\r\n52M\t./lam_1751657975_148000/ocdbt.process_3/d\r\n52M\t./lam_1751657975_148000/ocdbt.process_3\r\n",,terminal_output +114,119559,"TERMINAL",0,0,"1.5M\t./lam_1751657975_148000/d\r\n",,terminal_output +115,119669,"TERMINAL",0,0,"52M\t./lam_1751657975_148000/ocdbt.process_5/d\r\n52M\t./lam_1751657975_148000/ocdbt.process_5\r\n",,terminal_output +116,119816,"TERMINAL",0,0,"52M\t./lam_1751657975_148000/ocdbt.process_0/d\r\n52M\t./lam_1751657975_148000/ocdbt.process_0\r\n52M\t./lam_1751657975_148000/ocdbt.process_7/d\r\n52M\t./lam_1751657975_148000/ocdbt.process_7\r\n52M\t./lam_1751657975_148000/ocdbt.process_6/d\r\n52M\t./lam_1751657975_148000/ocdbt.process_6\r\n52M\t./lam_1751657975_148000/ocdbt.process_1/d\r\n52M\t./lam_1751657975_148000/ocdbt.process_1\r\n52M\t./lam_1751657975_148000/ocdbt.process_2/d\r\n52M\t./lam_1751657975_148000/ocdbt.process_2\r\n420M\t./lam_1751657975_148000\r\n53M\t./lam_1751657975_12000/ocdbt.process_4/d\r\n53M\t./lam_1751657975_12000/ocdbt.process_4\r\n",,terminal_output +117,119937,"TERMINAL",0,0,"1.8M\t./lam_1751657975_12000/array_metadatas\r\n53M\t./lam_1751657975_12000/ocdbt.process_3/d\r\n53M\t./lam_1751657975_12000/ocdbt.process_3\r\n1.5M\t./lam_1751657975_12000/d\r\n",,terminal_output +118,120032,"TERMINAL",0,0,"53M\t./lam_1751657975_12000/ocdbt.process_5/d\r\n53M\t./lam_1751657975_12000/ocdbt.process_5\r\n",,terminal_output +119,120183,"TERMINAL",0,0,"52M\t./lam_1751657975_12000/ocdbt.process_0/d\r\n52M\t./lam_1751657975_12000/ocdbt.process_0\r\n53M\t./lam_1751657975_12000/ocdbt.process_7/d\r\n53M\t./lam_1751657975_12000/ocdbt.process_7\r\n52M\t./lam_1751657975_12000/ocdbt.process_6/d\r\n52M\t./lam_1751657975_12000/ocdbt.process_6\r\n53M\t./lam_1751657975_12000/ocdbt.process_1/d\r\n53M\t./lam_1751657975_12000/ocdbt.process_1\r\n53M\t./lam_1751657975_12000/ocdbt.process_2/d\r\n53M\t./lam_1751657975_12000/ocdbt.process_2\r\n422M\t./lam_1751657975_12000\r\n52M\t./lam_1751657975_28000/ocdbt.process_4/d\r\n52M\t./lam_1751657975_28000/ocdbt.process_4\r\n1.8M\t./lam_1751657975_28000/array_metadatas\r\n",,terminal_output +120,120292,"TERMINAL",0,0,"53M\t./lam_1751657975_28000/ocdbt.process_3/d\r\n53M\t./lam_1751657975_28000/ocdbt.process_3\r\n",,terminal_output +121,120409,"TERMINAL",0,0,"1.5M\t./lam_1751657975_28000/d\r\n52M\t./lam_1751657975_28000/ocdbt.process_5/d\r\n52M\t./lam_1751657975_28000/ocdbt.process_5\r\n",,terminal_output +122,120579,"TERMINAL",0,0,"52M\t./lam_1751657975_28000/ocdbt.process_0/d\r\n52M\t./lam_1751657975_28000/ocdbt.process_0\r\n52M\t./lam_1751657975_28000/ocdbt.process_7/d\r\n52M\t./lam_1751657975_28000/ocdbt.process_7\r\n52M\t./lam_1751657975_28000/ocdbt.process_6/d\r\n52M\t./lam_1751657975_28000/ocdbt.process_6\r\n52M\t./lam_1751657975_28000/ocdbt.process_1/d\r\n52M\t./lam_1751657975_28000/ocdbt.process_1\r\n52M\t./lam_1751657975_28000/ocdbt.process_2/d\r\n52M\t./lam_1751657975_28000/ocdbt.process_2\r\n420M\t./lam_1751657975_28000\r\n53M\t./lam_1751657975_31000/ocdbt.process_4/d\r\n53M\t./lam_1751657975_31000/ocdbt.process_4\r\n",,terminal_output +123,120639,"TERMINAL",0,0,"1.8M\t./lam_1751657975_31000/array_metadatas\r\n",,terminal_output +124,120794,"TERMINAL",0,0,"53M\t./lam_1751657975_31000/ocdbt.process_3/d\r\n53M\t./lam_1751657975_31000/ocdbt.process_3\r\n",,terminal_output +125,120908,"TERMINAL",0,0,"1.5M\t./lam_1751657975_31000/d\r\n53M\t./lam_1751657975_31000/ocdbt.process_5/d\r\n53M\t./lam_1751657975_31000/ocdbt.process_5\r\n",,terminal_output +126,121018,"TERMINAL",0,0,"52M\t./lam_1751657975_31000/ocdbt.process_0/d\r\n52M\t./lam_1751657975_31000/ocdbt.process_0\r\n52M\t./lam_1751657975_31000/ocdbt.process_7/d\r\n52M\t./lam_1751657975_31000/ocdbt.process_7\r\n52M\t./lam_1751657975_31000/ocdbt.process_6/d\r\n52M\t./lam_1751657975_31000/ocdbt.process_6\r\n52M\t./lam_1751657975_31000/ocdbt.process_1/d\r\n52M\t./lam_1751657975_31000/ocdbt.process_1\r\n52M\t./lam_1751657975_31000/ocdbt.process_2/d\r\n52M\t./lam_1751657975_31000/ocdbt.process_2\r\n421M\t./lam_1751657975_31000\r\n52M\t./lam_1751657975_159000/ocdbt.process_4/d\r\n52M\t./lam_1751657975_159000/ocdbt.process_4\r\n",,terminal_output +127,121127,"TERMINAL",0,0,"1.8M\t./lam_1751657975_159000/array_metadatas\r\n52M\t./lam_1751657975_159000/ocdbt.process_3/d\r\n52M\t./lam_1751657975_159000/ocdbt.process_3\r\n1.5M\t./lam_1751657975_159000/d\r\n",,terminal_output +128,121248,"TERMINAL",0,0,"52M\t./lam_1751657975_159000/ocdbt.process_5/d\r\n52M\t./lam_1751657975_159000/ocdbt.process_5\r\n",,terminal_output +129,121409,"TERMINAL",0,0,"52M\t./lam_1751657975_159000/ocdbt.process_0/d\r\n52M\t./lam_1751657975_159000/ocdbt.process_0\r\n",,terminal_output +130,121689,"TERMINAL",0,0,"53M\t./lam_1751657975_159000/ocdbt.process_7/d\r\n53M\t./lam_1751657975_159000/ocdbt.process_7\r\n52M\t./lam_1751657975_159000/ocdbt.process_6/d\r\n52M\t./lam_1751657975_159000/ocdbt.process_6\r\n52M\t./lam_1751657975_159000/ocdbt.process_1/d\r\n52M\t./lam_1751657975_159000/ocdbt.process_1\r\n52M\t./lam_1751657975_159000/ocdbt.process_2/d\r\n52M\t./lam_1751657975_159000/ocdbt.process_2\r\n419M\t./lam_1751657975_159000\r\n52M\t./lam_1751657975_111000/ocdbt.process_4/d\r\n52M\t./lam_1751657975_111000/ocdbt.process_4\r\n",,terminal_output +131,121912,"TERMINAL",0,0,"1.8M\t./lam_1751657975_111000/array_metadatas\r\n52M\t./lam_1751657975_111000/ocdbt.process_3/d\r\n52M\t./lam_1751657975_111000/ocdbt.process_3\r\n1.5M\t./lam_1751657975_111000/d\r\n",,terminal_output +132,122057,"TERMINAL",0,0,"52M\t./lam_1751657975_111000/ocdbt.process_5/d\r\n52M\t./lam_1751657975_111000/ocdbt.process_5\r\n52M\t./lam_1751657975_111000/ocdbt.process_0/d\r\n52M\t./lam_1751657975_111000/ocdbt.process_0\r\n53M\t./lam_1751657975_111000/ocdbt.process_7/d\r\n53M\t./lam_1751657975_111000/ocdbt.process_7\r\n52M\t./lam_1751657975_111000/ocdbt.process_6/d\r\n52M\t./lam_1751657975_111000/ocdbt.process_6\r\n52M\t./lam_1751657975_111000/ocdbt.process_1/d\r\n52M\t./lam_1751657975_111000/ocdbt.process_1\r\n52M\t./lam_1751657975_111000/ocdbt.process_2/d\r\n52M\t./lam_1751657975_111000/ocdbt.process_2\r\n420M\t./lam_1751657975_111000\r\n52M\t./lam_1751657975_33000/ocdbt.process_4/d\r\n52M\t./lam_1751657975_33000/ocdbt.process_4\r\n",,terminal_output +133,122170,"TERMINAL",0,0,"1.8M\t./lam_1751657975_33000/array_metadatas\r\n52M\t./lam_1751657975_33000/ocdbt.process_3/d\r\n52M\t./lam_1751657975_33000/ocdbt.process_3\r\n1.5M\t./lam_1751657975_33000/d\r\n",,terminal_output +134,122303,"TERMINAL",0,0,"52M\t./lam_1751657975_33000/ocdbt.process_5/d\r\n52M\t./lam_1751657975_33000/ocdbt.process_5\r\n",,terminal_output +135,122360,"TERMINAL",0,0,"53M\t./lam_1751657975_33000/ocdbt.process_0/d\r\n53M\t./lam_1751657975_33000/ocdbt.process_0\r\n",,terminal_output +136,122644,"TERMINAL",0,0,"54M\t./lam_1751657975_33000/ocdbt.process_7/d\r\n54M\t./lam_1751657975_33000/ocdbt.process_7\r\n53M\t./lam_1751657975_33000/ocdbt.process_6/d\r\n53M\t./lam_1751657975_33000/ocdbt.process_6\r\n52M\t./lam_1751657975_33000/ocdbt.process_1/d\r\n52M\t./lam_1751657975_33000/ocdbt.process_1\r\n52M\t./lam_1751657975_33000/ocdbt.process_2/d\r\n52M\t./lam_1751657975_33000/ocdbt.process_2\r\n421M\t./lam_1751657975_33000\r\n52M\t./lam_1751657975_58000/ocdbt.process_4/d\r\n52M\t./lam_1751657975_58000/ocdbt.process_4\r\n",,terminal_output +137,122740,"TERMINAL",0,0,"1.8M\t./lam_1751657975_58000/array_metadatas\r\n53M\t./lam_1751657975_58000/ocdbt.process_3/d\r\n53M\t./lam_1751657975_58000/ocdbt.process_3\r\n1.5M\t./lam_1751657975_58000/d\r\n",,terminal_output +138,122812,"TERMINAL",0,0,"52M\t./lam_1751657975_58000/ocdbt.process_5/d\r\n52M\t./lam_1751657975_58000/ocdbt.process_5\r\n",,terminal_output +139,122916,"TERMINAL",0,0,"52M\t./lam_1751657975_58000/ocdbt.process_0/d\r\n52M\t./lam_1751657975_58000/ocdbt.process_0\r\n52M\t./lam_1751657975_58000/ocdbt.process_7/d\r\n52M\t./lam_1751657975_58000/ocdbt.process_7\r\n52M\t./lam_1751657975_58000/ocdbt.process_6/d\r\n52M\t./lam_1751657975_58000/ocdbt.process_6\r\n52M\t./lam_1751657975_58000/ocdbt.process_1/d\r\n52M\t./lam_1751657975_58000/ocdbt.process_1\r\n52M\t./lam_1751657975_58000/ocdbt.process_2/d\r\n52M\t./lam_1751657975_58000/ocdbt.process_2\r\n420M\t./lam_1751657975_58000\r\n52M\t./lam_1751657975_184000/ocdbt.process_4/d\r\n52M\t./lam_1751657975_184000/ocdbt.process_4\r\n",,terminal_output +140,123080,"TERMINAL",0,0,"1.8M\t./lam_1751657975_184000/array_metadatas\r\n52M\t./lam_1751657975_184000/ocdbt.process_3/d\r\n52M\t./lam_1751657975_184000/ocdbt.process_3\r\n",,terminal_output +141,123149,"TERMINAL",0,0,"1.5M\t./lam_1751657975_184000/d\r\n",,terminal_output +142,123278,"TERMINAL",0,0,"52M\t./lam_1751657975_184000/ocdbt.process_5/d\r\n52M\t./lam_1751657975_184000/ocdbt.process_5\r\n",,terminal_output +143,123402,"TERMINAL",0,0,"52M\t./lam_1751657975_184000/ocdbt.process_0/d\r\n52M\t./lam_1751657975_184000/ocdbt.process_0\r\n52M\t./lam_1751657975_184000/ocdbt.process_7/d\r\n52M\t./lam_1751657975_184000/ocdbt.process_7\r\n52M\t./lam_1751657975_184000/ocdbt.process_6/d\r\n52M\t./lam_1751657975_184000/ocdbt.process_6\r\n52M\t./lam_1751657975_184000/ocdbt.process_1/d\r\n52M\t./lam_1751657975_184000/ocdbt.process_1\r\n52M\t./lam_1751657975_184000/ocdbt.process_2/d\r\n52M\t./lam_1751657975_184000/ocdbt.process_2\r\n419M\t./lam_1751657975_184000\r\n52M\t./lam_1751657975_183000/ocdbt.process_4/d\r\n52M\t./lam_1751657975_183000/ocdbt.process_4\r\n",,terminal_output +144,123677,"TERMINAL",0,0,"1.8M\t./lam_1751657975_183000/array_metadatas\r\n52M\t./lam_1751657975_183000/ocdbt.process_3/d\r\n52M\t./lam_1751657975_183000/ocdbt.process_3\r\n1.5M\t./lam_1751657975_183000/d\r\n52M\t./lam_1751657975_183000/ocdbt.process_5/d\r\n52M\t./lam_1751657975_183000/ocdbt.process_5\r\n",,terminal_output +145,123792,"TERMINAL",0,0,"52M\t./lam_1751657975_183000/ocdbt.process_0/d\r\n52M\t./lam_1751657975_183000/ocdbt.process_0\r\n52M\t./lam_1751657975_183000/ocdbt.process_7/d\r\n52M\t./lam_1751657975_183000/ocdbt.process_7\r\n52M\t./lam_1751657975_183000/ocdbt.process_6/d\r\n52M\t./lam_1751657975_183000/ocdbt.process_6\r\n52M\t./lam_1751657975_183000/ocdbt.process_1/d\r\n52M\t./lam_1751657975_183000/ocdbt.process_1\r\n52M\t./lam_1751657975_183000/ocdbt.process_2/d\r\n52M\t./lam_1751657975_183000/ocdbt.process_2\r\n419M\t./lam_1751657975_183000\r\n52M\t./lam_1751657975_63000/ocdbt.process_4/d\r\n52M\t./lam_1751657975_63000/ocdbt.process_4\r\n",,terminal_output +146,124106,"TERMINAL",0,0,"1.8M\t./lam_1751657975_63000/array_metadatas\r\n53M\t./lam_1751657975_63000/ocdbt.process_3/d\r\n53M\t./lam_1751657975_63000/ocdbt.process_3\r\n",,terminal_output +147,124301,"TERMINAL",0,0,"1.5M\t./lam_1751657975_63000/d\r\n",,terminal_output +148,124378,"TERMINAL",0,0,"52M\t./lam_1751657975_63000/ocdbt.process_5/d\r\n52M\t./lam_1751657975_63000/ocdbt.process_5\r\n",,terminal_output +149,124522,"TERMINAL",0,0,"53M\t./lam_1751657975_63000/ocdbt.process_0/d\r\n53M\t./lam_1751657975_63000/ocdbt.process_0\r\n52M\t./lam_1751657975_63000/ocdbt.process_7/d\r\n52M\t./lam_1751657975_63000/ocdbt.process_7\r\n52M\t./lam_1751657975_63000/ocdbt.process_6/d\r\n52M\t./lam_1751657975_63000/ocdbt.process_6\r\n53M\t./lam_1751657975_63000/ocdbt.process_1/d\r\n53M\t./lam_1751657975_63000/ocdbt.process_1\r\n52M\t./lam_1751657975_63000/ocdbt.process_2/d\r\n52M\t./lam_1751657975_63000/ocdbt.process_2\r\n420M\t./lam_1751657975_63000\r\n53M\t./lam_1751657975_139000/ocdbt.process_4/d\r\n53M\t./lam_1751657975_139000/ocdbt.process_4\r\n",,terminal_output +150,124707,"TERMINAL",0,0,"1.8M\t./lam_1751657975_139000/array_metadatas\r\n52M\t./lam_1751657975_139000/ocdbt.process_3/d\r\n52M\t./lam_1751657975_139000/ocdbt.process_3\r\n1.5M\t./lam_1751657975_139000/d\r\n52M\t./lam_1751657975_139000/ocdbt.process_5/d\r\n52M\t./lam_1751657975_139000/ocdbt.process_5\r\n",,terminal_output +151,124936,"TERMINAL",0,0,"52M\t./lam_1751657975_139000/ocdbt.process_0/d\r\n52M\t./lam_1751657975_139000/ocdbt.process_0\r\n52M\t./lam_1751657975_139000/ocdbt.process_7/d\r\n52M\t./lam_1751657975_139000/ocdbt.process_7\r\n52M\t./lam_1751657975_139000/ocdbt.process_6/d\r\n52M\t./lam_1751657975_139000/ocdbt.process_6\r\n52M\t./lam_1751657975_139000/ocdbt.process_1/d\r\n52M\t./lam_1751657975_139000/ocdbt.process_1\r\n52M\t./lam_1751657975_139000/ocdbt.process_2/d\r\n52M\t./lam_1751657975_139000/ocdbt.process_2\r\n419M\t./lam_1751657975_139000\r\n52M\t./lam_1751657975_56000/ocdbt.process_4/d\r\n52M\t./lam_1751657975_56000/ocdbt.process_4\r\n",,terminal_output +152,125123,"TERMINAL",0,0,"1.8M\t./lam_1751657975_56000/array_metadatas\r\n",,terminal_output +153,125265,"TERMINAL",0,0,"53M\t./lam_1751657975_56000/ocdbt.process_3/d\r\n53M\t./lam_1751657975_56000/ocdbt.process_3\r\n1.5M\t./lam_1751657975_56000/d\r\n52M\t./lam_1751657975_56000/ocdbt.process_5/d\r\n52M\t./lam_1751657975_56000/ocdbt.process_5\r\n",,terminal_output +154,125455,"TERMINAL",0,0,"52M\t./lam_1751657975_56000/ocdbt.process_0/d\r\n52M\t./lam_1751657975_56000/ocdbt.process_0\r\n52M\t./lam_1751657975_56000/ocdbt.process_7/d\r\n52M\t./lam_1751657975_56000/ocdbt.process_7\r\n52M\t./lam_1751657975_56000/ocdbt.process_6/d\r\n52M\t./lam_1751657975_56000/ocdbt.process_6\r\n52M\t./lam_1751657975_56000/ocdbt.process_1/d\r\n52M\t./lam_1751657975_56000/ocdbt.process_1\r\n52M\t./lam_1751657975_56000/ocdbt.process_2/d\r\n52M\t./lam_1751657975_56000/ocdbt.process_2\r\n420M\t./lam_1751657975_56000\r\n52M\t./lam_1751657975_59000/ocdbt.process_4/d\r\n52M\t./lam_1751657975_59000/ocdbt.process_4\r\n",,terminal_output +155,125648,"TERMINAL",0,0,"1.8M\t./lam_1751657975_59000/array_metadatas\r\n",,terminal_output +156,125791,"TERMINAL",0,0,"53M\t./lam_1751657975_59000/ocdbt.process_3/d\r\n53M\t./lam_1751657975_59000/ocdbt.process_3\r\n1.5M\t./lam_1751657975_59000/d\r\n52M\t./lam_1751657975_59000/ocdbt.process_5/d\r\n52M\t./lam_1751657975_59000/ocdbt.process_5\r\n",,terminal_output +157,126086,"TERMINAL",0,0,"52M\t./lam_1751657975_59000/ocdbt.process_0/d\r\n52M\t./lam_1751657975_59000/ocdbt.process_0\r\n52M\t./lam_1751657975_59000/ocdbt.process_7/d\r\n52M\t./lam_1751657975_59000/ocdbt.process_7\r\n52M\t./lam_1751657975_59000/ocdbt.process_6/d\r\n52M\t./lam_1751657975_59000/ocdbt.process_6\r\n52M\t./lam_1751657975_59000/ocdbt.process_1/d\r\n52M\t./lam_1751657975_59000/ocdbt.process_1\r\n52M\t./lam_1751657975_59000/ocdbt.process_2/d\r\n52M\t./lam_1751657975_59000/ocdbt.process_2\r\n420M\t./lam_1751657975_59000\r\n53M\t./lam_1751657975_124000/ocdbt.process_4/d\r\n53M\t./lam_1751657975_124000/ocdbt.process_4\r\n",,terminal_output +158,126233,"TERMINAL",0,0,"1.8M\t./lam_1751657975_124000/array_metadatas\r\n52M\t./lam_1751657975_124000/ocdbt.process_3/d\r\n52M\t./lam_1751657975_124000/ocdbt.process_3\r\n1.5M\t./lam_1751657975_124000/d\r\n",,terminal_output +159,126330,"TERMINAL",0,0,"52M\t./lam_1751657975_124000/ocdbt.process_5/d\r\n52M\t./lam_1751657975_124000/ocdbt.process_5\r\n",,terminal_output +160,126434,"TERMINAL",0,0,"52M\t./lam_1751657975_124000/ocdbt.process_0/d\r\n52M\t./lam_1751657975_124000/ocdbt.process_0\r\n53M\t./lam_1751657975_124000/ocdbt.process_7/d\r\n53M\t./lam_1751657975_124000/ocdbt.process_7\r\n52M\t./lam_1751657975_124000/ocdbt.process_6/d\r\n52M\t./lam_1751657975_124000/ocdbt.process_6\r\n52M\t./lam_1751657975_124000/ocdbt.process_1/d\r\n52M\t./lam_1751657975_124000/ocdbt.process_1\r\n52M\t./lam_1751657975_124000/ocdbt.process_2/d\r\n52M\t./lam_1751657975_124000/ocdbt.process_2\r\n420M\t./lam_1751657975_124000\r\n",,terminal_output +161,126490,"TERMINAL",0,0,"52M\t./lam_1751657975_81000/ocdbt.process_4/d\r\n52M\t./lam_1751657975_81000/ocdbt.process_4\r\n",,terminal_output +162,126673,"TERMINAL",0,0,"1.8M\t./lam_1751657975_81000/array_metadatas\r\n",,terminal_output +163,126757,"TERMINAL",0,0,"53M\t./lam_1751657975_81000/ocdbt.process_3/d\r\n53M\t./lam_1751657975_81000/ocdbt.process_3\r\n1.5M\t./lam_1751657975_81000/d\r\n52M\t./lam_1751657975_81000/ocdbt.process_5/d\r\n52M\t./lam_1751657975_81000/ocdbt.process_5\r\n",,terminal_output +164,127002,"TERMINAL",0,0,"52M\t./lam_1751657975_81000/ocdbt.process_0/d\r\n52M\t./lam_1751657975_81000/ocdbt.process_0\r\n53M\t./lam_1751657975_81000/ocdbt.process_7/d\r\n53M\t./lam_1751657975_81000/ocdbt.process_7\r\n52M\t./lam_1751657975_81000/ocdbt.process_6/d\r\n52M\t./lam_1751657975_81000/ocdbt.process_6\r\n52M\t./lam_1751657975_81000/ocdbt.process_1/d\r\n52M\t./lam_1751657975_81000/ocdbt.process_1\r\n52M\t./lam_1751657975_81000/ocdbt.process_2/d\r\n52M\t./lam_1751657975_81000/ocdbt.process_2\r\n420M\t./lam_1751657975_81000\r\n",,terminal_output +165,127380,"TERMINAL",0,0,"53M\t./lam_1751657975_195000/ocdbt.process_4/d\r\n53M\t./lam_1751657975_195000/ocdbt.process_4\r\n1.8M\t./lam_1751657975_195000/array_metadatas\r\n52M\t./lam_1751657975_195000/ocdbt.process_3/d\r\n52M\t./lam_1751657975_195000/ocdbt.process_3\r\n",,terminal_output +166,127437,"TERMINAL",0,0,"1.5M\t./lam_1751657975_195000/d\r\n52M\t./lam_1751657975_195000/ocdbt.process_5/d\r\n52M\t./lam_1751657975_195000/ocdbt.process_5\r\n",,terminal_output +167,127643,"TERMINAL",0,0,"53M\t./lam_1751657975_195000/ocdbt.process_0/d\r\n53M\t./lam_1751657975_195000/ocdbt.process_0\r\n",,terminal_output +168,127804,"TERMINAL",0,0,"53M\t./lam_1751657975_195000/ocdbt.process_7/d\r\n53M\t./lam_1751657975_195000/ocdbt.process_7\r\n52M\t./lam_1751657975_195000/ocdbt.process_6/d\r\n52M\t./lam_1751657975_195000/ocdbt.process_6\r\n52M\t./lam_1751657975_195000/ocdbt.process_1/d\r\n52M\t./lam_1751657975_195000/ocdbt.process_1\r\n52M\t./lam_1751657975_195000/ocdbt.process_2/d\r\n52M\t./lam_1751657975_195000/ocdbt.process_2\r\n421M\t./lam_1751657975_195000\r\n52M\t./lam_1751657975_24000/ocdbt.process_4/d\r\n52M\t./lam_1751657975_24000/ocdbt.process_4\r\n",,terminal_output +169,127946,"TERMINAL",0,0,"1.8M\t./lam_1751657975_24000/array_metadatas\r\n52M\t./lam_1751657975_24000/ocdbt.process_3/d\r\n52M\t./lam_1751657975_24000/ocdbt.process_3\r\n",,terminal_output +170,128004,"TERMINAL",0,0,"1.5M\t./lam_1751657975_24000/d\r\n52M\t./lam_1751657975_24000/ocdbt.process_5/d\r\n52M\t./lam_1751657975_24000/ocdbt.process_5\r\n",,terminal_output +171,128098,"TERMINAL",0,0,"52M\t./lam_1751657975_24000/ocdbt.process_0/d\r\n52M\t./lam_1751657975_24000/ocdbt.process_0\r\n",,terminal_output +172,128281,"TERMINAL",0,0,"53M\t./lam_1751657975_24000/ocdbt.process_7/d\r\n53M\t./lam_1751657975_24000/ocdbt.process_7\r\n52M\t./lam_1751657975_24000/ocdbt.process_6/d\r\n52M\t./lam_1751657975_24000/ocdbt.process_6\r\n52M\t./lam_1751657975_24000/ocdbt.process_1/d\r\n52M\t./lam_1751657975_24000/ocdbt.process_1\r\n52M\t./lam_1751657975_24000/ocdbt.process_2/d\r\n52M\t./lam_1751657975_24000/ocdbt.process_2\r\n420M\t./lam_1751657975_24000\r\n53M\t./lam_1751657975_79000/ocdbt.process_4/d\r\n53M\t./lam_1751657975_79000/ocdbt.process_4\r\n",,terminal_output +173,128350,"TERMINAL",0,0,"1.8M\t./lam_1751657975_79000/array_metadatas\r\n52M\t./lam_1751657975_79000/ocdbt.process_3/d\r\n52M\t./lam_1751657975_79000/ocdbt.process_3\r\n",,terminal_output +174,128535,"TERMINAL",0,0,"1.5M\t./lam_1751657975_79000/d\r\n52M\t./lam_1751657975_79000/ocdbt.process_5/d\r\n52M\t./lam_1751657975_79000/ocdbt.process_5\r\n",,terminal_output +175,128637,"TERMINAL",0,0,"52M\t./lam_1751657975_79000/ocdbt.process_0/d\r\n52M\t./lam_1751657975_79000/ocdbt.process_0\r\n52M\t./lam_1751657975_79000/ocdbt.process_7/d\r\n52M\t./lam_1751657975_79000/ocdbt.process_7\r\n53M\t./lam_1751657975_79000/ocdbt.process_6/d\r\n53M\t./lam_1751657975_79000/ocdbt.process_6\r\n52M\t./lam_1751657975_79000/ocdbt.process_1/d\r\n52M\t./lam_1751657975_79000/ocdbt.process_1\r\n52M\t./lam_1751657975_79000/ocdbt.process_2/d\r\n52M\t./lam_1751657975_79000/ocdbt.process_2\r\n420M\t./lam_1751657975_79000\r\n52M\t./lam_1751657975_152000/ocdbt.process_4/d\r\n52M\t./lam_1751657975_152000/ocdbt.process_4\r\n1.8M\t./lam_1751657975_152000/array_metadatas\r\n52M\t./lam_1751657975_152000/ocdbt.process_3/d\r\n52M\t./lam_1751657975_152000/ocdbt.process_3\r\n",,terminal_output +176,128744,"TERMINAL",0,0,"1.5M\t./lam_1751657975_152000/d\r\n",,terminal_output +177,128804,"TERMINAL",0,0,"52M\t./lam_1751657975_152000/ocdbt.process_5/d\r\n52M\t./lam_1751657975_152000/ocdbt.process_5\r\n",,terminal_output +178,129034,"TERMINAL",0,0,"52M\t./lam_1751657975_152000/ocdbt.process_0/d\r\n52M\t./lam_1751657975_152000/ocdbt.process_0\r\n",,terminal_output +179,129130,"TERMINAL",0,0,"53M\t./lam_1751657975_152000/ocdbt.process_7/d\r\n53M\t./lam_1751657975_152000/ocdbt.process_7\r\n52M\t./lam_1751657975_152000/ocdbt.process_6/d\r\n52M\t./lam_1751657975_152000/ocdbt.process_6\r\n52M\t./lam_1751657975_152000/ocdbt.process_1/d\r\n52M\t./lam_1751657975_152000/ocdbt.process_1\r\n52M\t./lam_1751657975_152000/ocdbt.process_2/d\r\n52M\t./lam_1751657975_152000/ocdbt.process_2\r\n419M\t./lam_1751657975_152000\r\n52M\t./lam_1751657975_98000/ocdbt.process_4/d\r\n52M\t./lam_1751657975_98000/ocdbt.process_4\r\n",,terminal_output +180,129227,"TERMINAL",0,0,"1.8M\t./lam_1751657975_98000/array_metadatas\r\n53M\t./lam_1751657975_98000/ocdbt.process_3/d\r\n53M\t./lam_1751657975_98000/ocdbt.process_3\r\n",,terminal_output +181,129286,"TERMINAL",0,0,"1.5M\t./lam_1751657975_98000/d\r\n",,terminal_output +182,129394,"TERMINAL",0,0,"52M\t./lam_1751657975_98000/ocdbt.process_5/d\r\n52M\t./lam_1751657975_98000/ocdbt.process_5\r\n",,terminal_output +183,129470,"TERMINAL",0,0,"52M\t./lam_1751657975_98000/ocdbt.process_0/d\r\n52M\t./lam_1751657975_98000/ocdbt.process_0\r\n",,terminal_output +184,129581,"TERMINAL",0,0,"52M\t./lam_1751657975_98000/ocdbt.process_7/d\r\n52M\t./lam_1751657975_98000/ocdbt.process_7\r\n52M\t./lam_1751657975_98000/ocdbt.process_6/d\r\n52M\t./lam_1751657975_98000/ocdbt.process_6\r\n52M\t./lam_1751657975_98000/ocdbt.process_1/d\r\n52M\t./lam_1751657975_98000/ocdbt.process_1\r\n52M\t./lam_1751657975_98000/ocdbt.process_2/d\r\n52M\t./lam_1751657975_98000/ocdbt.process_2\r\n420M\t./lam_1751657975_98000\r\n52M\t./lam_1751657975_43000/ocdbt.process_4/d\r\n52M\t./lam_1751657975_43000/ocdbt.process_4\r\n1.8M\t./lam_1751657975_43000/array_metadatas\r\n52M\t./lam_1751657975_43000/ocdbt.process_3/d\r\n52M\t./lam_1751657975_43000/ocdbt.process_3\r\n1.5M\t./lam_1751657975_43000/d\r\n",,terminal_output +185,129698,"TERMINAL",0,0,"53M\t./lam_1751657975_43000/ocdbt.process_5/d\r\n53M\t./lam_1751657975_43000/ocdbt.process_5\r\n",,terminal_output +186,129924,"TERMINAL",0,0,"52M\t./lam_1751657975_43000/ocdbt.process_0/d\r\n52M\t./lam_1751657975_43000/ocdbt.process_0\r\n52M\t./lam_1751657975_43000/ocdbt.process_7/d\r\n52M\t./lam_1751657975_43000/ocdbt.process_7\r\n52M\t./lam_1751657975_43000/ocdbt.process_6/d\r\n52M\t./lam_1751657975_43000/ocdbt.process_6\r\n52M\t./lam_1751657975_43000/ocdbt.process_1/d\r\n52M\t./lam_1751657975_43000/ocdbt.process_1\r\n53M\t./lam_1751657975_43000/ocdbt.process_2/d\r\n53M\t./lam_1751657975_43000/ocdbt.process_2\r\n420M\t./lam_1751657975_43000\r\n53M\t./lam_1751657975_49000/ocdbt.process_4/d\r\n53M\t./lam_1751657975_49000/ocdbt.process_4\r\n",,terminal_output +187,130336,"TERMINAL",0,0,"1.8M\t./lam_1751657975_49000/array_metadatas\r\n53M\t./lam_1751657975_49000/ocdbt.process_3/d\r\n53M\t./lam_1751657975_49000/ocdbt.process_3\r\n1.5M\t./lam_1751657975_49000/d\r\n52M\t./lam_1751657975_49000/ocdbt.process_5/d\r\n52M\t./lam_1751657975_49000/ocdbt.process_5\r\n53M\t./lam_1751657975_49000/ocdbt.process_0/d\r\n53M\t./lam_1751657975_49000/ocdbt.process_0\r\n52M\t./lam_1751657975_49000/ocdbt.process_7/d\r\n52M\t./lam_1751657975_49000/ocdbt.process_7\r\n52M\t./lam_1751657975_49000/ocdbt.process_6/d\r\n52M\t./lam_1751657975_49000/ocdbt.process_6\r\n53M\t./lam_1751657975_49000/ocdbt.process_1/d\r\n53M\t./lam_1751657975_49000/ocdbt.process_1\r\n53M\t./lam_1751657975_49000/ocdbt.process_2/d\r\n53M\t./lam_1751657975_49000/ocdbt.process_2\r\n421M\t./lam_1751657975_49000\r\n52M\t./lam_1751657975_104000/ocdbt.process_4/d\r\n52M\t./lam_1751657975_104000/ocdbt.process_4\r\n",,terminal_output +188,130474,"TERMINAL",0,0,"1.8M\t./lam_1751657975_104000/array_metadatas\r\n52M\t./lam_1751657975_104000/ocdbt.process_3/d\r\n52M\t./lam_1751657975_104000/ocdbt.process_3\r\n1.5M\t./lam_1751657975_104000/d\r\n52M\t./lam_1751657975_104000/ocdbt.process_5/d\r\n52M\t./lam_1751657975_104000/ocdbt.process_5\r\n52M\t./lam_1751657975_104000/ocdbt.process_0/d\r\n52M\t./lam_1751657975_104000/ocdbt.process_0\r\n",,terminal_output +189,130581,"TERMINAL",0,0,"53M\t./lam_1751657975_104000/ocdbt.process_7/d\r\n53M\t./lam_1751657975_104000/ocdbt.process_7\r\n52M\t./lam_1751657975_104000/ocdbt.process_6/d\r\n52M\t./lam_1751657975_104000/ocdbt.process_6\r\n52M\t./lam_1751657975_104000/ocdbt.process_1/d\r\n52M\t./lam_1751657975_104000/ocdbt.process_1\r\n52M\t./lam_1751657975_104000/ocdbt.process_2/d\r\n52M\t./lam_1751657975_104000/ocdbt.process_2\r\n419M\t./lam_1751657975_104000\r\n52M\t./lam_1751657975_38000/ocdbt.process_4/d\r\n52M\t./lam_1751657975_38000/ocdbt.process_4\r\n",,terminal_output +190,130662,"TERMINAL",0,0,"1.8M\t./lam_1751657975_38000/array_metadatas\r\n",,terminal_output +191,130751,"TERMINAL",0,0,"53M\t./lam_1751657975_38000/ocdbt.process_3/d\r\n53M\t./lam_1751657975_38000/ocdbt.process_3\r\n",,terminal_output +192,130889,"TERMINAL",0,0,"1.5M\t./lam_1751657975_38000/d\r\n52M\t./lam_1751657975_38000/ocdbt.process_5/d\r\n52M\t./lam_1751657975_38000/ocdbt.process_5\r\n52M\t./lam_1751657975_38000/ocdbt.process_0/d\r\n52M\t./lam_1751657975_38000/ocdbt.process_0\r\n52M\t./lam_1751657975_38000/ocdbt.process_7/d\r\n52M\t./lam_1751657975_38000/ocdbt.process_7\r\n52M\t./lam_1751657975_38000/ocdbt.process_6/d\r\n52M\t./lam_1751657975_38000/ocdbt.process_6\r\n",,terminal_output +193,130978,"TERMINAL",0,0,"52M\t./lam_1751657975_38000/ocdbt.process_1/d\r\n52M\t./lam_1751657975_38000/ocdbt.process_1\r\n52M\t./lam_1751657975_38000/ocdbt.process_2/d\r\n52M\t./lam_1751657975_38000/ocdbt.process_2\r\n420M\t./lam_1751657975_38000\r\n52M\t./lam_1751657975_191000/ocdbt.process_4/d\r\n52M\t./lam_1751657975_191000/ocdbt.process_4\r\n",,terminal_output +194,131073,"TERMINAL",0,0,"1.8M\t./lam_1751657975_191000/array_metadatas\r\n52M\t./lam_1751657975_191000/ocdbt.process_3/d\r\n52M\t./lam_1751657975_191000/ocdbt.process_3\r\n",,terminal_output +195,131177,"TERMINAL",0,0,"1.5M\t./lam_1751657975_191000/d\r\n",,terminal_output +196,131259,"TERMINAL",0,0,"52M\t./lam_1751657975_191000/ocdbt.process_5/d\r\n52M\t./lam_1751657975_191000/ocdbt.process_5\r\n",,terminal_output +197,131380,"TERMINAL",0,0,"52M\t./lam_1751657975_191000/ocdbt.process_0/d\r\n52M\t./lam_1751657975_191000/ocdbt.process_0\r\n52M\t./lam_1751657975_191000/ocdbt.process_7/d\r\n52M\t./lam_1751657975_191000/ocdbt.process_7\r\n52M\t./lam_1751657975_191000/ocdbt.process_6/d\r\n52M\t./lam_1751657975_191000/ocdbt.process_6\r\n52M\t./lam_1751657975_191000/ocdbt.process_1/d\r\n52M\t./lam_1751657975_191000/ocdbt.process_1\r\n52M\t./lam_1751657975_191000/ocdbt.process_2/d\r\n52M\t./lam_1751657975_191000/ocdbt.process_2\r\n419M\t./lam_1751657975_191000\r\n52M\t./lam_1751657975_119000/ocdbt.process_4/d\r\n52M\t./lam_1751657975_119000/ocdbt.process_4\r\n",,terminal_output +198,131732,"TERMINAL",0,0,"1.8M\t./lam_1751657975_119000/array_metadatas\r\n52M\t./lam_1751657975_119000/ocdbt.process_3/d\r\n52M\t./lam_1751657975_119000/ocdbt.process_3\r\n1.5M\t./lam_1751657975_119000/d\r\n52M\t./lam_1751657975_119000/ocdbt.process_5/d\r\n52M\t./lam_1751657975_119000/ocdbt.process_5\r\n52M\t./lam_1751657975_119000/ocdbt.process_0/d\r\n52M\t./lam_1751657975_119000/ocdbt.process_0\r\n52M\t./lam_1751657975_119000/ocdbt.process_7/d\r\n52M\t./lam_1751657975_119000/ocdbt.process_7\r\n52M\t./lam_1751657975_119000/ocdbt.process_6/d\r\n52M\t./lam_1751657975_119000/ocdbt.process_6\r\n52M\t./lam_1751657975_119000/ocdbt.process_1/d\r\n52M\t./lam_1751657975_119000/ocdbt.process_1\r\n52M\t./lam_1751657975_119000/ocdbt.process_2/d\r\n52M\t./lam_1751657975_119000/ocdbt.process_2\r\n419M\t./lam_1751657975_119000\r\n52M\t./lam_1751657975_67000/ocdbt.process_4/d\r\n52M\t./lam_1751657975_67000/ocdbt.process_4\r\n",,terminal_output +199,131994,"TERMINAL",0,0,"1.8M\t./lam_1751657975_67000/array_metadatas\r\n52M\t./lam_1751657975_67000/ocdbt.process_3/d\r\n52M\t./lam_1751657975_67000/ocdbt.process_3\r\n1.5M\t./lam_1751657975_67000/d\r\n52M\t./lam_1751657975_67000/ocdbt.process_5/d\r\n52M\t./lam_1751657975_67000/ocdbt.process_5\r\n",,terminal_output +200,132291,"TERMINAL",0,0,"52M\t./lam_1751657975_67000/ocdbt.process_0/d\r\n52M\t./lam_1751657975_67000/ocdbt.process_0\r\n52M\t./lam_1751657975_67000/ocdbt.process_7/d\r\n52M\t./lam_1751657975_67000/ocdbt.process_7\r\n52M\t./lam_1751657975_67000/ocdbt.process_6/d\r\n52M\t./lam_1751657975_67000/ocdbt.process_6\r\n52M\t./lam_1751657975_67000/ocdbt.process_1/d\r\n52M\t./lam_1751657975_67000/ocdbt.process_1\r\n52M\t./lam_1751657975_67000/ocdbt.process_2/d\r\n52M\t./lam_1751657975_67000/ocdbt.process_2\r\n420M\t./lam_1751657975_67000\r\n52M\t./lam_1751657975_17000/ocdbt.process_4/d\r\n52M\t./lam_1751657975_17000/ocdbt.process_4\r\n1.8M\t./lam_1751657975_17000/array_metadatas\r\n",,terminal_output +201,132416,"TERMINAL",0,0,"52M\t./lam_1751657975_17000/ocdbt.process_3/d\r\n52M\t./lam_1751657975_17000/ocdbt.process_3\r\n1.5M\t./lam_1751657975_17000/d\r\n52M\t./lam_1751657975_17000/ocdbt.process_5/d\r\n52M\t./lam_1751657975_17000/ocdbt.process_5\r\n52M\t./lam_1751657975_17000/ocdbt.process_0/d\r\n52M\t./lam_1751657975_17000/ocdbt.process_0\r\n53M\t./lam_1751657975_17000/ocdbt.process_7/d\r\n53M\t./lam_1751657975_17000/ocdbt.process_7\r\n53M\t./lam_1751657975_17000/ocdbt.process_6/d\r\n53M\t./lam_1751657975_17000/ocdbt.process_6\r\n52M\t./lam_1751657975_17000/ocdbt.process_1/d\r\n52M\t./lam_1751657975_17000/ocdbt.process_1\r\n52M\t./lam_1751657975_17000/ocdbt.process_2/d\r\n52M\t./lam_1751657975_17000/ocdbt.process_2\r\n420M\t./lam_1751657975_17000\r\n52M\t./lam_1751657975_80000/ocdbt.process_4/d\r\n52M\t./lam_1751657975_80000/ocdbt.process_4\r\n",,terminal_output +202,132740,"TERMINAL",0,0,"1.8M\t./lam_1751657975_80000/array_metadatas\r\n52M\t./lam_1751657975_80000/ocdbt.process_3/d\r\n52M\t./lam_1751657975_80000/ocdbt.process_3\r\n1.5M\t./lam_1751657975_80000/d\r\n52M\t./lam_1751657975_80000/ocdbt.process_5/d\r\n52M\t./lam_1751657975_80000/ocdbt.process_5\r\n52M\t./lam_1751657975_80000/ocdbt.process_0/d\r\n52M\t./lam_1751657975_80000/ocdbt.process_0\r\n52M\t./lam_1751657975_80000/ocdbt.process_7/d\r\n52M\t./lam_1751657975_80000/ocdbt.process_7\r\n52M\t./lam_1751657975_80000/ocdbt.process_6/d\r\n52M\t./lam_1751657975_80000/ocdbt.process_6\r\n52M\t./lam_1751657975_80000/ocdbt.process_1/d\r\n52M\t./lam_1751657975_80000/ocdbt.process_1\r\n52M\t./lam_1751657975_80000/ocdbt.process_2/d\r\n52M\t./lam_1751657975_80000/ocdbt.process_2\r\n420M\t./lam_1751657975_80000\r\n52M\t./lam_1751657975_108000/ocdbt.process_4/d\r\n52M\t./lam_1751657975_108000/ocdbt.process_4\r\n",,terminal_output +203,132848,"TERMINAL",0,0,"1.8M\t./lam_1751657975_108000/array_metadatas\r\n52M\t./lam_1751657975_108000/ocdbt.process_3/d\r\n52M\t./lam_1751657975_108000/ocdbt.process_3\r\n1.5M\t./lam_1751657975_108000/d\r\n",,terminal_output +204,132902,"TERMINAL",0,0,"52M\t./lam_1751657975_108000/ocdbt.process_5/d\r\n52M\t./lam_1751657975_108000/ocdbt.process_5\r\n",,terminal_output +205,132996,"TERMINAL",0,0,"52M\t./lam_1751657975_108000/ocdbt.process_0/d\r\n52M\t./lam_1751657975_108000/ocdbt.process_0\r\n52M\t./lam_1751657975_108000/ocdbt.process_7/d\r\n52M\t./lam_1751657975_108000/ocdbt.process_7\r\n52M\t./lam_1751657975_108000/ocdbt.process_6/d\r\n52M\t./lam_1751657975_108000/ocdbt.process_6\r\n52M\t./lam_1751657975_108000/ocdbt.process_1/d\r\n52M\t./lam_1751657975_108000/ocdbt.process_1\r\n52M\t./lam_1751657975_108000/ocdbt.process_2/d\r\n52M\t./lam_1751657975_108000/ocdbt.process_2\r\n419M\t./lam_1751657975_108000\r\n53M\t./lam_1751657975_3000/ocdbt.process_4/d\r\n53M\t./lam_1751657975_3000/ocdbt.process_4\r\n",,terminal_output +206,133197,"TERMINAL",0,0,"1.8M\t./lam_1751657975_3000/array_metadatas\r\n53M\t./lam_1751657975_3000/ocdbt.process_3/d\r\n53M\t./lam_1751657975_3000/ocdbt.process_3\r\n1.5M\t./lam_1751657975_3000/d\r\n",,terminal_output +207,133267,"TERMINAL",0,0,"53M\t./lam_1751657975_3000/ocdbt.process_5/d\r\n53M\t./lam_1751657975_3000/ocdbt.process_5\r\n",,terminal_output +208,133313,"TERMINAL",0,0,"53M\t./lam_1751657975_3000/ocdbt.process_0/d\r\n53M\t./lam_1751657975_3000/ocdbt.process_0\r\n",,terminal_output +209,133457,"TERMINAL",0,0,"54M\t./lam_1751657975_3000/ocdbt.process_7/d\r\n54M\t./lam_1751657975_3000/ocdbt.process_7\r\n53M\t./lam_1751657975_3000/ocdbt.process_6/d\r\n53M\t./lam_1751657975_3000/ocdbt.process_6\r\n53M\t./lam_1751657975_3000/ocdbt.process_1/d\r\n53M\t./lam_1751657975_3000/ocdbt.process_1\r\n53M\t./lam_1751657975_3000/ocdbt.process_2/d\r\n53M\t./lam_1751657975_3000/ocdbt.process_2\r\n423M\t./lam_1751657975_3000\r\n52M\t./lam_1751657975_22000/ocdbt.process_4/d\r\n52M\t./lam_1751657975_22000/ocdbt.process_4\r\n",,terminal_output +210,133647,"TERMINAL",0,0,"1.8M\t./lam_1751657975_22000/array_metadatas\r\n",,terminal_output +211,133717,"TERMINAL",0,0,"53M\t./lam_1751657975_22000/ocdbt.process_3/d\r\n53M\t./lam_1751657975_22000/ocdbt.process_3\r\n",,terminal_output +212,133838,"TERMINAL",0,0,"1.5M\t./lam_1751657975_22000/d\r\n52M\t./lam_1751657975_22000/ocdbt.process_5/d\r\n52M\t./lam_1751657975_22000/ocdbt.process_5\r\n",,terminal_output +213,134291,"TERMINAL",0,0,"52M\t./lam_1751657975_22000/ocdbt.process_0/d\r\n52M\t./lam_1751657975_22000/ocdbt.process_0\r\n53M\t./lam_1751657975_22000/ocdbt.process_7/d\r\n53M\t./lam_1751657975_22000/ocdbt.process_7\r\n52M\t./lam_1751657975_22000/ocdbt.process_6/d\r\n52M\t./lam_1751657975_22000/ocdbt.process_6\r\n52M\t./lam_1751657975_22000/ocdbt.process_1/d\r\n52M\t./lam_1751657975_22000/ocdbt.process_1\r\n53M\t./lam_1751657975_22000/ocdbt.process_2/d\r\n53M\t./lam_1751657975_22000/ocdbt.process_2\r\n421M\t./lam_1751657975_22000\r\n52M\t./lam_1751657975_193000/ocdbt.process_4/d\r\n52M\t./lam_1751657975_193000/ocdbt.process_4\r\n",,terminal_output +214,134506,"TERMINAL",0,0,"1.8M\t./lam_1751657975_193000/array_metadatas\r\n",,terminal_output +215,134600,"TERMINAL",0,0,"53M\t./lam_1751657975_193000/ocdbt.process_3/d\r\n53M\t./lam_1751657975_193000/ocdbt.process_3\r\n1.5M\t./lam_1751657975_193000/d\r\n",,terminal_output +216,134810,"TERMINAL",0,0,"52M\t./lam_1751657975_193000/ocdbt.process_5/d\r\n52M\t./lam_1751657975_193000/ocdbt.process_5\r\n52M\t./lam_1751657975_193000/ocdbt.process_0/d\r\n52M\t./lam_1751657975_193000/ocdbt.process_0\r\n52M\t./lam_1751657975_193000/ocdbt.process_7/d\r\n52M\t./lam_1751657975_193000/ocdbt.process_7\r\n52M\t./lam_1751657975_193000/ocdbt.process_6/d\r\n52M\t./lam_1751657975_193000/ocdbt.process_6\r\n52M\t./lam_1751657975_193000/ocdbt.process_1/d\r\n52M\t./lam_1751657975_193000/ocdbt.process_1\r\n52M\t./lam_1751657975_193000/ocdbt.process_2/d\r\n52M\t./lam_1751657975_193000/ocdbt.process_2\r\n419M\t./lam_1751657975_193000\r\n52M\t./lam_1751657975_125000/ocdbt.process_4/d\r\n52M\t./lam_1751657975_125000/ocdbt.process_4\r\n",,terminal_output +217,134915,"TERMINAL",0,0,"1.8M\t./lam_1751657975_125000/array_metadatas\r\n53M\t./lam_1751657975_125000/ocdbt.process_3/d\r\n53M\t./lam_1751657975_125000/ocdbt.process_3\r\n",,terminal_output +218,135012,"TERMINAL",0,0,"1.5M\t./lam_1751657975_125000/d\r\n",,terminal_output +219,135086,"TERMINAL",0,0,"52M\t./lam_1751657975_125000/ocdbt.process_5/d\r\n52M\t./lam_1751657975_125000/ocdbt.process_5\r\n",,terminal_output +220,135315,"TERMINAL",0,0,"53M\t./lam_1751657975_125000/ocdbt.process_0/d\r\n53M\t./lam_1751657975_125000/ocdbt.process_0\r\n52M\t./lam_1751657975_125000/ocdbt.process_7/d\r\n52M\t./lam_1751657975_125000/ocdbt.process_7\r\n52M\t./lam_1751657975_125000/ocdbt.process_6/d\r\n52M\t./lam_1751657975_125000/ocdbt.process_6\r\n52M\t./lam_1751657975_125000/ocdbt.process_1/d\r\n52M\t./lam_1751657975_125000/ocdbt.process_1\r\n53M\t./lam_1751657975_125000/ocdbt.process_2/d\r\n53M\t./lam_1751657975_125000/ocdbt.process_2\r\n420M\t./lam_1751657975_125000\r\n52M\t./lam_1751657975_54000/ocdbt.process_4/d\r\n52M\t./lam_1751657975_54000/ocdbt.process_4\r\n",,terminal_output +221,135447,"TERMINAL",0,0,"1.8M\t./lam_1751657975_54000/array_metadatas\r\n",,terminal_output +222,135506,"TERMINAL",0,0,"53M\t./lam_1751657975_54000/ocdbt.process_3/d\r\n53M\t./lam_1751657975_54000/ocdbt.process_3\r\n1.5M\t./lam_1751657975_54000/d\r\n",,terminal_output +223,135557,"TERMINAL",0,0,"52M\t./lam_1751657975_54000/ocdbt.process_5/d\r\n52M\t./lam_1751657975_54000/ocdbt.process_5\r\n",,terminal_output +224,135843,"TERMINAL",0,0,"52M\t./lam_1751657975_54000/ocdbt.process_0/d\r\n52M\t./lam_1751657975_54000/ocdbt.process_0\r\n53M\t./lam_1751657975_54000/ocdbt.process_7/d\r\n53M\t./lam_1751657975_54000/ocdbt.process_7\r\n52M\t./lam_1751657975_54000/ocdbt.process_6/d\r\n52M\t./lam_1751657975_54000/ocdbt.process_6\r\n52M\t./lam_1751657975_54000/ocdbt.process_1/d\r\n52M\t./lam_1751657975_54000/ocdbt.process_1\r\n52M\t./lam_1751657975_54000/ocdbt.process_2/d\r\n52M\t./lam_1751657975_54000/ocdbt.process_2\r\n420M\t./lam_1751657975_54000\r\n53M\t./lam_1751657975_8000/ocdbt.process_4/d\r\n53M\t./lam_1751657975_8000/ocdbt.process_4\r\n",,terminal_output +225,136132,"TERMINAL",0,0,"1.8M\t./lam_1751657975_8000/array_metadatas\r\n53M\t./lam_1751657975_8000/ocdbt.process_3/d\r\n53M\t./lam_1751657975_8000/ocdbt.process_3\r\n1.5M\t./lam_1751657975_8000/d\r\n53M\t./lam_1751657975_8000/ocdbt.process_5/d\r\n53M\t./lam_1751657975_8000/ocdbt.process_5\r\n",,terminal_output +226,136314,"TERMINAL",0,0,"53M\t./lam_1751657975_8000/ocdbt.process_0/d\r\n53M\t./lam_1751657975_8000/ocdbt.process_0\r\n",,terminal_output +227,136599,"TERMINAL",0,0,"53M\t./lam_1751657975_8000/ocdbt.process_7/d\r\n53M\t./lam_1751657975_8000/ocdbt.process_7\r\n53M\t./lam_1751657975_8000/ocdbt.process_6/d\r\n53M\t./lam_1751657975_8000/ocdbt.process_6\r\n53M\t./lam_1751657975_8000/ocdbt.process_1/d\r\n53M\t./lam_1751657975_8000/ocdbt.process_1\r\n53M\t./lam_1751657975_8000/ocdbt.process_2/d\r\n53M\t./lam_1751657975_8000/ocdbt.process_2\r\n423M\t./lam_1751657975_8000\r\n53M\t./lam_1751657975_166000/ocdbt.process_4/d\r\n53M\t./lam_1751657975_166000/ocdbt.process_4\r\n",,terminal_output +228,136749,"TERMINAL",0,0,"1.8M\t./lam_1751657975_166000/array_metadatas\r\n52M\t./lam_1751657975_166000/ocdbt.process_3/d\r\n52M\t./lam_1751657975_166000/ocdbt.process_3\r\n1.5M\t./lam_1751657975_166000/d\r\n52M\t./lam_1751657975_166000/ocdbt.process_5/d\r\n52M\t./lam_1751657975_166000/ocdbt.process_5\r\n",,terminal_output +229,136923,"TERMINAL",0,0,"53M\t./lam_1751657975_166000/ocdbt.process_0/d\r\n53M\t./lam_1751657975_166000/ocdbt.process_0\r\n52M\t./lam_1751657975_166000/ocdbt.process_7/d\r\n52M\t./lam_1751657975_166000/ocdbt.process_7\r\n52M\t./lam_1751657975_166000/ocdbt.process_6/d\r\n52M\t./lam_1751657975_166000/ocdbt.process_6\r\n52M\t./lam_1751657975_166000/ocdbt.process_1/d\r\n52M\t./lam_1751657975_166000/ocdbt.process_1\r\n52M\t./lam_1751657975_166000/ocdbt.process_2/d\r\n52M\t./lam_1751657975_166000/ocdbt.process_2\r\n420M\t./lam_1751657975_166000\r\n52M\t./lam_1751657975_99000/ocdbt.process_4/d\r\n52M\t./lam_1751657975_99000/ocdbt.process_4\r\n1.8M\t./lam_1751657975_99000/array_metadatas\r\n",,terminal_output +230,137037,"TERMINAL",0,0,"53M\t./lam_1751657975_99000/ocdbt.process_3/d\r\n53M\t./lam_1751657975_99000/ocdbt.process_3\r\n",,terminal_output +231,137113,"TERMINAL",0,0,"1.5M\t./lam_1751657975_99000/d\r\n",,terminal_output +232,137281,"TERMINAL",0,0,"52M\t./lam_1751657975_99000/ocdbt.process_5/d\r\n52M\t./lam_1751657975_99000/ocdbt.process_5\r\n",,terminal_output +233,137543,"TERMINAL",0,0,"52M\t./lam_1751657975_99000/ocdbt.process_0/d\r\n52M\t./lam_1751657975_99000/ocdbt.process_0\r\n52M\t./lam_1751657975_99000/ocdbt.process_7/d\r\n52M\t./lam_1751657975_99000/ocdbt.process_7\r\n52M\t./lam_1751657975_99000/ocdbt.process_6/d\r\n52M\t./lam_1751657975_99000/ocdbt.process_6\r\n52M\t./lam_1751657975_99000/ocdbt.process_1/d\r\n52M\t./lam_1751657975_99000/ocdbt.process_1\r\n52M\t./lam_1751657975_99000/ocdbt.process_2/d\r\n52M\t./lam_1751657975_99000/ocdbt.process_2\r\n420M\t./lam_1751657975_99000\r\n53M\t./lam_1751657975_176000/ocdbt.process_4/d\r\n53M\t./lam_1751657975_176000/ocdbt.process_4\r\n",,terminal_output +234,137766,"TERMINAL",0,0,"1.8M\t./lam_1751657975_176000/array_metadatas\r\n52M\t./lam_1751657975_176000/ocdbt.process_3/d\r\n52M\t./lam_1751657975_176000/ocdbt.process_3\r\n1.5M\t./lam_1751657975_176000/d\r\n52M\t./lam_1751657975_176000/ocdbt.process_5/d\r\n52M\t./lam_1751657975_176000/ocdbt.process_5\r\n",,terminal_output +235,137941,"TERMINAL",0,0,"52M\t./lam_1751657975_176000/ocdbt.process_0/d\r\n52M\t./lam_1751657975_176000/ocdbt.process_0\r\n53M\t./lam_1751657975_176000/ocdbt.process_7/d\r\n53M\t./lam_1751657975_176000/ocdbt.process_7\r\n52M\t./lam_1751657975_176000/ocdbt.process_6/d\r\n52M\t./lam_1751657975_176000/ocdbt.process_6\r\n52M\t./lam_1751657975_176000/ocdbt.process_1/d\r\n52M\t./lam_1751657975_176000/ocdbt.process_1\r\n52M\t./lam_1751657975_176000/ocdbt.process_2/d\r\n52M\t./lam_1751657975_176000/ocdbt.process_2\r\n419M\t./lam_1751657975_176000\r\n52M\t./lam_1751657975_142000/ocdbt.process_4/d\r\n52M\t./lam_1751657975_142000/ocdbt.process_4\r\n1.8M\t./lam_1751657975_142000/array_metadatas\r\n52M\t./lam_1751657975_142000/ocdbt.process_3/d\r\n52M\t./lam_1751657975_142000/ocdbt.process_3\r\n1.5M\t./lam_1751657975_142000/d\r\n",,terminal_output +236,138049,"TERMINAL",0,0,"52M\t./lam_1751657975_142000/ocdbt.process_5/d\r\n52M\t./lam_1751657975_142000/ocdbt.process_5\r\n",,terminal_output +237,138246,"TERMINAL",0,0,"53M\t./lam_1751657975_142000/ocdbt.process_0/d\r\n53M\t./lam_1751657975_142000/ocdbt.process_0\r\n",,terminal_output +238,138399,"TERMINAL",0,0,"52M\t./lam_1751657975_142000/ocdbt.process_7/d\r\n52M\t./lam_1751657975_142000/ocdbt.process_7\r\n",,terminal_output +239,138666,"TERMINAL",0,0,"52M\t./lam_1751657975_142000/ocdbt.process_6/d\r\n52M\t./lam_1751657975_142000/ocdbt.process_6\r\n52M\t./lam_1751657975_142000/ocdbt.process_1/d\r\n52M\t./lam_1751657975_142000/ocdbt.process_1\r\n52M\t./lam_1751657975_142000/ocdbt.process_2/d\r\n52M\t./lam_1751657975_142000/ocdbt.process_2\r\n419M\t./lam_1751657975_142000\r\n53M\t./lam_1751657975_7000/ocdbt.process_4/d\r\n53M\t./lam_1751657975_7000/ocdbt.process_4\r\n",,terminal_output +240,138750,"TERMINAL",0,0,"1.8M\t./lam_1751657975_7000/array_metadatas\r\n53M\t./lam_1751657975_7000/ocdbt.process_3/d\r\n53M\t./lam_1751657975_7000/ocdbt.process_3\r\n",,terminal_output +241,138801,"TERMINAL",0,0,"1.5M\t./lam_1751657975_7000/d\r\n",,terminal_output +242,138931,"TERMINAL",0,0,"53M\t./lam_1751657975_7000/ocdbt.process_5/d\r\n53M\t./lam_1751657975_7000/ocdbt.process_5\r\n",,terminal_output +243,138988,"TERMINAL",0,0,"53M\t./lam_1751657975_7000/ocdbt.process_0/d\r\n53M\t./lam_1751657975_7000/ocdbt.process_0\r\n",,terminal_output +244,139046,"TERMINAL",0,0,"53M\t./lam_1751657975_7000/ocdbt.process_7/d\r\n53M\t./lam_1751657975_7000/ocdbt.process_7\r\n53M\t./lam_1751657975_7000/ocdbt.process_6/d\r\n53M\t./lam_1751657975_7000/ocdbt.process_6\r\n",,terminal_output +245,139163,"TERMINAL",0,0,"53M\t./lam_1751657975_7000/ocdbt.process_1/d\r\n53M\t./lam_1751657975_7000/ocdbt.process_1\r\n53M\t./lam_1751657975_7000/ocdbt.process_2/d\r\n53M\t./lam_1751657975_7000/ocdbt.process_2\r\n422M\t./lam_1751657975_7000\r\n52M\t./lam_1751657975_117000/ocdbt.process_4/d\r\n52M\t./lam_1751657975_117000/ocdbt.process_4\r\n1.8M\t./lam_1751657975_117000/array_metadatas\r\n",,terminal_output +246,139238,"TERMINAL",0,0,"53M\t./lam_1751657975_117000/ocdbt.process_3/d\r\n53M\t./lam_1751657975_117000/ocdbt.process_3\r\n",,terminal_output +247,139336,"TERMINAL",0,0,"1.5M\t./lam_1751657975_117000/d\r\n",,terminal_output +248,139456,"TERMINAL",0,0,"52M\t./lam_1751657975_117000/ocdbt.process_5/d\r\n52M\t./lam_1751657975_117000/ocdbt.process_5\r\n",,terminal_output +249,139530,"TERMINAL",0,0,"52M\t./lam_1751657975_117000/ocdbt.process_0/d\r\n52M\t./lam_1751657975_117000/ocdbt.process_0\r\n53M\t./lam_1751657975_117000/ocdbt.process_7/d\r\n53M\t./lam_1751657975_117000/ocdbt.process_7\r\n",,terminal_output +250,139634,"TERMINAL",0,0,"52M\t./lam_1751657975_117000/ocdbt.process_6/d\r\n52M\t./lam_1751657975_117000/ocdbt.process_6\r\n52M\t./lam_1751657975_117000/ocdbt.process_1/d\r\n52M\t./lam_1751657975_117000/ocdbt.process_1\r\n52M\t./lam_1751657975_117000/ocdbt.process_2/d\r\n52M\t./lam_1751657975_117000/ocdbt.process_2\r\n420M\t./lam_1751657975_117000\r\n53M\t./lam_1751657975_10000/ocdbt.process_4/d\r\n53M\t./lam_1751657975_10000/ocdbt.process_4\r\n1.8M\t./lam_1751657975_10000/array_metadatas\r\n",,terminal_output +251,139687,"TERMINAL",0,0,"53M\t./lam_1751657975_10000/ocdbt.process_3/d\r\n53M\t./lam_1751657975_10000/ocdbt.process_3\r\n",,terminal_output +252,139799,"TERMINAL",0,0,"1.5M\t./lam_1751657975_10000/d\r\n53M\t./lam_1751657975_10000/ocdbt.process_5/d\r\n53M\t./lam_1751657975_10000/ocdbt.process_5\r\n",,terminal_output +253,140029,"TERMINAL",0,0,"53M\t./lam_1751657975_10000/ocdbt.process_0/d\r\n53M\t./lam_1751657975_10000/ocdbt.process_0\r\n53M\t./lam_1751657975_10000/ocdbt.process_7/d\r\n53M\t./lam_1751657975_10000/ocdbt.process_7\r\n53M\t./lam_1751657975_10000/ocdbt.process_6/d\r\n53M\t./lam_1751657975_10000/ocdbt.process_6\r\n53M\t./lam_1751657975_10000/ocdbt.process_1/d\r\n53M\t./lam_1751657975_10000/ocdbt.process_1\r\n53M\t./lam_1751657975_10000/ocdbt.process_2/d\r\n53M\t./lam_1751657975_10000/ocdbt.process_2\r\n422M\t./lam_1751657975_10000\r\n52M\t./lam_1751657975_140000/ocdbt.process_4/d\r\n52M\t./lam_1751657975_140000/ocdbt.process_4\r\n",,terminal_output +254,140145,"TERMINAL",0,0,"1.8M\t./lam_1751657975_140000/array_metadatas\r\n52M\t./lam_1751657975_140000/ocdbt.process_3/d\r\n52M\t./lam_1751657975_140000/ocdbt.process_3\r\n1.5M\t./lam_1751657975_140000/d\r\n",,terminal_output +255,140211,"TERMINAL",0,0,"52M\t./lam_1751657975_140000/ocdbt.process_5/d\r\n52M\t./lam_1751657975_140000/ocdbt.process_5\r\n",,terminal_output +256,140467,"TERMINAL",0,0,"52M\t./lam_1751657975_140000/ocdbt.process_0/d\r\n52M\t./lam_1751657975_140000/ocdbt.process_0\r\n52M\t./lam_1751657975_140000/ocdbt.process_7/d\r\n52M\t./lam_1751657975_140000/ocdbt.process_7\r\n52M\t./lam_1751657975_140000/ocdbt.process_6/d\r\n52M\t./lam_1751657975_140000/ocdbt.process_6\r\n",,terminal_output +257,140579,"TERMINAL",0,0,"52M\t./lam_1751657975_140000/ocdbt.process_1/d\r\n52M\t./lam_1751657975_140000/ocdbt.process_1\r\n52M\t./lam_1751657975_140000/ocdbt.process_2/d\r\n52M\t./lam_1751657975_140000/ocdbt.process_2\r\n419M\t./lam_1751657975_140000\r\n52M\t./lam_1751657975_187000/ocdbt.process_4/d\r\n52M\t./lam_1751657975_187000/ocdbt.process_4\r\n1.8M\t./lam_1751657975_187000/array_metadatas\r\n",,terminal_output +258,140736,"TERMINAL",0,0,"53M\t./lam_1751657975_187000/ocdbt.process_3/d\r\n53M\t./lam_1751657975_187000/ocdbt.process_3\r\n1.5M\t./lam_1751657975_187000/d\r\n52M\t./lam_1751657975_187000/ocdbt.process_5/d\r\n52M\t./lam_1751657975_187000/ocdbt.process_5\r\n",,terminal_output +259,140827,"TERMINAL",0,0,"52M\t./lam_1751657975_187000/ocdbt.process_0/d\r\n52M\t./lam_1751657975_187000/ocdbt.process_0\r\n",,terminal_output +260,140972,"TERMINAL",0,0,"53M\t./lam_1751657975_187000/ocdbt.process_7/d\r\n53M\t./lam_1751657975_187000/ocdbt.process_7\r\n52M\t./lam_1751657975_187000/ocdbt.process_6/d\r\n52M\t./lam_1751657975_187000/ocdbt.process_6\r\n52M\t./lam_1751657975_187000/ocdbt.process_1/d\r\n52M\t./lam_1751657975_187000/ocdbt.process_1\r\n52M\t./lam_1751657975_187000/ocdbt.process_2/d\r\n52M\t./lam_1751657975_187000/ocdbt.process_2\r\n420M\t./lam_1751657975_187000\r\n52M\t./lam_1751657975_60000/ocdbt.process_4/d\r\n52M\t./lam_1751657975_60000/ocdbt.process_4\r\n",,terminal_output +261,141030,"TERMINAL",0,0,"1.8M\t./lam_1751657975_60000/array_metadatas\r\n52M\t./lam_1751657975_60000/ocdbt.process_3/d\r\n52M\t./lam_1751657975_60000/ocdbt.process_3\r\n",,terminal_output +262,141265,"TERMINAL",0,0,"1.5M\t./lam_1751657975_60000/d\r\n52M\t./lam_1751657975_60000/ocdbt.process_5/d\r\n52M\t./lam_1751657975_60000/ocdbt.process_5\r\n",,terminal_output +263,141494,"TERMINAL",0,0,"52M\t./lam_1751657975_60000/ocdbt.process_0/d\r\n52M\t./lam_1751657975_60000/ocdbt.process_0\r\n52M\t./lam_1751657975_60000/ocdbt.process_7/d\r\n52M\t./lam_1751657975_60000/ocdbt.process_7\r\n52M\t./lam_1751657975_60000/ocdbt.process_6/d\r\n52M\t./lam_1751657975_60000/ocdbt.process_6\r\n52M\t./lam_1751657975_60000/ocdbt.process_1/d\r\n52M\t./lam_1751657975_60000/ocdbt.process_1\r\n52M\t./lam_1751657975_60000/ocdbt.process_2/d\r\n52M\t./lam_1751657975_60000/ocdbt.process_2\r\n419M\t./lam_1751657975_60000\r\n52M\t./lam_1751657975_171000/ocdbt.process_4/d\r\n52M\t./lam_1751657975_171000/ocdbt.process_4\r\n",,terminal_output +264,141730,"TERMINAL",0,0,"1.8M\t./lam_1751657975_171000/array_metadatas\r\n52M\t./lam_1751657975_171000/ocdbt.process_3/d\r\n52M\t./lam_1751657975_171000/ocdbt.process_3\r\n1.5M\t./lam_1751657975_171000/d\r\n52M\t./lam_1751657975_171000/ocdbt.process_5/d\r\n52M\t./lam_1751657975_171000/ocdbt.process_5\r\n",,terminal_output +265,142069,"TERMINAL",0,0,"52M\t./lam_1751657975_171000/ocdbt.process_0/d\r\n52M\t./lam_1751657975_171000/ocdbt.process_0\r\n52M\t./lam_1751657975_171000/ocdbt.process_7/d\r\n52M\t./lam_1751657975_171000/ocdbt.process_7\r\n52M\t./lam_1751657975_171000/ocdbt.process_6/d\r\n52M\t./lam_1751657975_171000/ocdbt.process_6\r\n52M\t./lam_1751657975_171000/ocdbt.process_1/d\r\n52M\t./lam_1751657975_171000/ocdbt.process_1\r\n52M\t./lam_1751657975_171000/ocdbt.process_2/d\r\n52M\t./lam_1751657975_171000/ocdbt.process_2\r\n419M\t./lam_1751657975_171000\r\n52M\t./lam_1751657975_143000/ocdbt.process_4/d\r\n52M\t./lam_1751657975_143000/ocdbt.process_4\r\n",,terminal_output +266,142340,"TERMINAL",0,0,"1.8M\t./lam_1751657975_143000/array_metadatas\r\n52M\t./lam_1751657975_143000/ocdbt.process_3/d\r\n52M\t./lam_1751657975_143000/ocdbt.process_3\r\n",,terminal_output +267,142513,"TERMINAL",0,0,"1.5M\t./lam_1751657975_143000/d\r\n52M\t./lam_1751657975_143000/ocdbt.process_5/d\r\n52M\t./lam_1751657975_143000/ocdbt.process_5\r\n",,terminal_output +268,142835,"TERMINAL",0,0,"52M\t./lam_1751657975_143000/ocdbt.process_0/d\r\n52M\t./lam_1751657975_143000/ocdbt.process_0\r\n52M\t./lam_1751657975_143000/ocdbt.process_7/d\r\n52M\t./lam_1751657975_143000/ocdbt.process_7\r\n52M\t./lam_1751657975_143000/ocdbt.process_6/d\r\n52M\t./lam_1751657975_143000/ocdbt.process_6\r\n52M\t./lam_1751657975_143000/ocdbt.process_1/d\r\n52M\t./lam_1751657975_143000/ocdbt.process_1\r\n52M\t./lam_1751657975_143000/ocdbt.process_2/d\r\n52M\t./lam_1751657975_143000/ocdbt.process_2\r\n420M\t./lam_1751657975_143000\r\n52M\t./lam_1751657975_165000/ocdbt.process_4/d\r\n52M\t./lam_1751657975_165000/ocdbt.process_4\r\n",,terminal_output +269,143025,"TERMINAL",0,0,"1.8M\t./lam_1751657975_165000/array_metadatas\r\n52M\t./lam_1751657975_165000/ocdbt.process_3/d\r\n52M\t./lam_1751657975_165000/ocdbt.process_3\r\n",,terminal_output +270,143149,"TERMINAL",0,0,"1.5M\t./lam_1751657975_165000/d\r\n52M\t./lam_1751657975_165000/ocdbt.process_5/d\r\n52M\t./lam_1751657975_165000/ocdbt.process_5\r\n",,terminal_output +271,143315,"TERMINAL",0,0,"52M\t./lam_1751657975_165000/ocdbt.process_0/d\r\n52M\t./lam_1751657975_165000/ocdbt.process_0\r\n53M\t./lam_1751657975_165000/ocdbt.process_7/d\r\n53M\t./lam_1751657975_165000/ocdbt.process_7\r\n52M\t./lam_1751657975_165000/ocdbt.process_6/d\r\n52M\t./lam_1751657975_165000/ocdbt.process_6\r\n52M\t./lam_1751657975_165000/ocdbt.process_1/d\r\n52M\t./lam_1751657975_165000/ocdbt.process_1\r\n52M\t./lam_1751657975_165000/ocdbt.process_2/d\r\n52M\t./lam_1751657975_165000/ocdbt.process_2\r\n419M\t./lam_1751657975_165000\r\n53M\t./lam_1751657975_62000/ocdbt.process_4/d\r\n53M\t./lam_1751657975_62000/ocdbt.process_4\r\n",,terminal_output +272,143428,"TERMINAL",0,0,"1.8M\t./lam_1751657975_62000/array_metadatas\r\n53M\t./lam_1751657975_62000/ocdbt.process_3/d\r\n53M\t./lam_1751657975_62000/ocdbt.process_3\r\n",,terminal_output +273,143538,"TERMINAL",0,0,"1.5M\t./lam_1751657975_62000/d\r\n52M\t./lam_1751657975_62000/ocdbt.process_5/d\r\n52M\t./lam_1751657975_62000/ocdbt.process_5\r\n",,terminal_output +274,143712,"TERMINAL",0,0,"52M\t./lam_1751657975_62000/ocdbt.process_0/d\r\n52M\t./lam_1751657975_62000/ocdbt.process_0\r\n52M\t./lam_1751657975_62000/ocdbt.process_7/d\r\n52M\t./lam_1751657975_62000/ocdbt.process_7\r\n52M\t./lam_1751657975_62000/ocdbt.process_6/d\r\n52M\t./lam_1751657975_62000/ocdbt.process_6\r\n52M\t./lam_1751657975_62000/ocdbt.process_1/d\r\n52M\t./lam_1751657975_62000/ocdbt.process_1\r\n52M\t./lam_1751657975_62000/ocdbt.process_2/d\r\n52M\t./lam_1751657975_62000/ocdbt.process_2\r\n420M\t./lam_1751657975_62000\r\n52M\t./lam_1751657975_164000/ocdbt.process_4/d\r\n52M\t./lam_1751657975_164000/ocdbt.process_4\r\n",,terminal_output +275,143829,"TERMINAL",0,0,"1.8M\t./lam_1751657975_164000/array_metadatas\r\n52M\t./lam_1751657975_164000/ocdbt.process_3/d\r\n52M\t./lam_1751657975_164000/ocdbt.process_3\r\n",,terminal_output +276,143941,"TERMINAL",0,0,"1.5M\t./lam_1751657975_164000/d\r\n",,terminal_output +277,144018,"TERMINAL",0,0,"52M\t./lam_1751657975_164000/ocdbt.process_5/d\r\n52M\t./lam_1751657975_164000/ocdbt.process_5\r\n",,terminal_output +278,144411,"TERMINAL",0,0,"52M\t./lam_1751657975_164000/ocdbt.process_0/d\r\n52M\t./lam_1751657975_164000/ocdbt.process_0\r\n53M\t./lam_1751657975_164000/ocdbt.process_7/d\r\n53M\t./lam_1751657975_164000/ocdbt.process_7\r\n52M\t./lam_1751657975_164000/ocdbt.process_6/d\r\n52M\t./lam_1751657975_164000/ocdbt.process_6\r\n",,terminal_output +279,144531,"TERMINAL",0,0,"52M\t./lam_1751657975_164000/ocdbt.process_1/d\r\n52M\t./lam_1751657975_164000/ocdbt.process_1\r\n52M\t./lam_1751657975_164000/ocdbt.process_2/d\r\n52M\t./lam_1751657975_164000/ocdbt.process_2\r\n419M\t./lam_1751657975_164000\r\n52M\t./lam_1751657975_110000/ocdbt.process_4/d\r\n52M\t./lam_1751657975_110000/ocdbt.process_4\r\n1.8M\t./lam_1751657975_110000/array_metadatas\r\n",,terminal_output +280,144656,"TERMINAL",0,0,"53M\t./lam_1751657975_110000/ocdbt.process_3/d\r\n53M\t./lam_1751657975_110000/ocdbt.process_3\r\n1.5M\t./lam_1751657975_110000/d\r\n52M\t./lam_1751657975_110000/ocdbt.process_5/d\r\n52M\t./lam_1751657975_110000/ocdbt.process_5\r\n",,terminal_output +281,144868,"TERMINAL",0,0,"53M\t./lam_1751657975_110000/ocdbt.process_0/d\r\n53M\t./lam_1751657975_110000/ocdbt.process_0\r\n53M\t./lam_1751657975_110000/ocdbt.process_7/d\r\n53M\t./lam_1751657975_110000/ocdbt.process_7\r\n52M\t./lam_1751657975_110000/ocdbt.process_6/d\r\n52M\t./lam_1751657975_110000/ocdbt.process_6\r\n52M\t./lam_1751657975_110000/ocdbt.process_1/d\r\n52M\t./lam_1751657975_110000/ocdbt.process_1\r\n52M\t./lam_1751657975_110000/ocdbt.process_2/d\r\n52M\t./lam_1751657975_110000/ocdbt.process_2\r\n420M\t./lam_1751657975_110000\r\n52M\t./lam_1751657975_154000/ocdbt.process_4/d\r\n52M\t./lam_1751657975_154000/ocdbt.process_4\r\n",,terminal_output +282,145034,"TERMINAL",0,0,"1.8M\t./lam_1751657975_154000/array_metadatas\r\n52M\t./lam_1751657975_154000/ocdbt.process_3/d\r\n52M\t./lam_1751657975_154000/ocdbt.process_3\r\n1.5M\t./lam_1751657975_154000/d\r\n52M\t./lam_1751657975_154000/ocdbt.process_5/d\r\n52M\t./lam_1751657975_154000/ocdbt.process_5\r\n",,terminal_output +283,145209,"TERMINAL",0,0,"53M\t./lam_1751657975_154000/ocdbt.process_0/d\r\n53M\t./lam_1751657975_154000/ocdbt.process_0\r\n52M\t./lam_1751657975_154000/ocdbt.process_7/d\r\n52M\t./lam_1751657975_154000/ocdbt.process_7\r\n52M\t./lam_1751657975_154000/ocdbt.process_6/d\r\n52M\t./lam_1751657975_154000/ocdbt.process_6\r\n52M\t./lam_1751657975_154000/ocdbt.process_1/d\r\n52M\t./lam_1751657975_154000/ocdbt.process_1\r\n52M\t./lam_1751657975_154000/ocdbt.process_2/d\r\n52M\t./lam_1751657975_154000/ocdbt.process_2\r\n419M\t./lam_1751657975_154000\r\n53M\t./lam_1751657975_120000/ocdbt.process_4/d\r\n53M\t./lam_1751657975_120000/ocdbt.process_4\r\n",,terminal_output +284,145424,"TERMINAL",0,0,"1.8M\t./lam_1751657975_120000/array_metadatas\r\n53M\t./lam_1751657975_120000/ocdbt.process_3/d\r\n53M\t./lam_1751657975_120000/ocdbt.process_3\r\n1.5M\t./lam_1751657975_120000/d\r\n",,terminal_output +285,145497,"TERMINAL",0,0,"52M\t./lam_1751657975_120000/ocdbt.process_5/d\r\n52M\t./lam_1751657975_120000/ocdbt.process_5\r\n",,terminal_output +286,145650,"TERMINAL",0,0,"52M\t./lam_1751657975_120000/ocdbt.process_0/d\r\n52M\t./lam_1751657975_120000/ocdbt.process_0\r\n53M\t./lam_1751657975_120000/ocdbt.process_7/d\r\n53M\t./lam_1751657975_120000/ocdbt.process_7\r\n52M\t./lam_1751657975_120000/ocdbt.process_6/d\r\n52M\t./lam_1751657975_120000/ocdbt.process_6\r\n52M\t./lam_1751657975_120000/ocdbt.process_1/d\r\n52M\t./lam_1751657975_120000/ocdbt.process_1\r\n52M\t./lam_1751657975_120000/ocdbt.process_2/d\r\n52M\t./lam_1751657975_120000/ocdbt.process_2\r\n420M\t./lam_1751657975_120000\r\n52M\t./lam_1751657975_72000/ocdbt.process_4/d\r\n52M\t./lam_1751657975_72000/ocdbt.process_4\r\n",,terminal_output +287,145838,"TERMINAL",0,0,"1.8M\t./lam_1751657975_72000/array_metadatas\r\n52M\t./lam_1751657975_72000/ocdbt.process_3/d\r\n52M\t./lam_1751657975_72000/ocdbt.process_3\r\n",,terminal_output +288,146002,"TERMINAL",0,0,"1.5M\t./lam_1751657975_72000/d\r\n52M\t./lam_1751657975_72000/ocdbt.process_5/d\r\n52M\t./lam_1751657975_72000/ocdbt.process_5\r\n",,terminal_output +289,146240,"TERMINAL",0,0,"52M\t./lam_1751657975_72000/ocdbt.process_0/d\r\n52M\t./lam_1751657975_72000/ocdbt.process_0\r\n52M\t./lam_1751657975_72000/ocdbt.process_7/d\r\n52M\t./lam_1751657975_72000/ocdbt.process_7\r\n52M\t./lam_1751657975_72000/ocdbt.process_6/d\r\n52M\t./lam_1751657975_72000/ocdbt.process_6\r\n52M\t./lam_1751657975_72000/ocdbt.process_1/d\r\n52M\t./lam_1751657975_72000/ocdbt.process_1\r\n",,terminal_output +290,146355,"TERMINAL",0,0,"52M\t./lam_1751657975_72000/ocdbt.process_2/d\r\n52M\t./lam_1751657975_72000/ocdbt.process_2\r\n419M\t./lam_1751657975_72000\r\n52M\t./lam_1751657975_133000/ocdbt.process_4/d\r\n52M\t./lam_1751657975_133000/ocdbt.process_4\r\n",,terminal_output +291,146429,"TERMINAL",0,0,"1.8M\t./lam_1751657975_133000/array_metadatas\r\n",,terminal_output +292,146483,"TERMINAL",0,0,"52M\t./lam_1751657975_133000/ocdbt.process_3/d\r\n52M\t./lam_1751657975_133000/ocdbt.process_3\r\n",,terminal_output +293,146535,"TERMINAL",0,0,"1.5M\t./lam_1751657975_133000/d\r\n",,terminal_output +294,146656,"TERMINAL",0,0,"52M\t./lam_1751657975_133000/ocdbt.process_5/d\r\n52M\t./lam_1751657975_133000/ocdbt.process_5\r\n",,terminal_output +295,146781,"TERMINAL",0,0,"53M\t./lam_1751657975_133000/ocdbt.process_0/d\r\n53M\t./lam_1751657975_133000/ocdbt.process_0\r\n52M\t./lam_1751657975_133000/ocdbt.process_7/d\r\n52M\t./lam_1751657975_133000/ocdbt.process_7\r\n52M\t./lam_1751657975_133000/ocdbt.process_6/d\r\n52M\t./lam_1751657975_133000/ocdbt.process_6\r\n52M\t./lam_1751657975_133000/ocdbt.process_1/d\r\n52M\t./lam_1751657975_133000/ocdbt.process_1\r\n",,terminal_output +296,146988,"TERMINAL",0,0,"52M\t./lam_1751657975_133000/ocdbt.process_2/d\r\n52M\t./lam_1751657975_133000/ocdbt.process_2\r\n420M\t./lam_1751657975_133000\r\n52M\t./lam_1751657975_35000/ocdbt.process_4/d\r\n52M\t./lam_1751657975_35000/ocdbt.process_4\r\n",,terminal_output +297,147118,"TERMINAL",0,0,"1.8M\t./lam_1751657975_35000/array_metadatas\r\n52M\t./lam_1751657975_35000/ocdbt.process_3/d\r\n52M\t./lam_1751657975_35000/ocdbt.process_3\r\n1.5M\t./lam_1751657975_35000/d\r\n",,terminal_output +298,147244,"TERMINAL",0,0,"52M\t./lam_1751657975_35000/ocdbt.process_5/d\r\n52M\t./lam_1751657975_35000/ocdbt.process_5\r\n",,terminal_output +299,147297,"TERMINAL",0,0,"52M\t./lam_1751657975_35000/ocdbt.process_0/d\r\n52M\t./lam_1751657975_35000/ocdbt.process_0\r\n",,terminal_output +300,147355,"TERMINAL",0,0,"52M\t./lam_1751657975_35000/ocdbt.process_7/d\r\n52M\t./lam_1751657975_35000/ocdbt.process_7\r\n52M\t./lam_1751657975_35000/ocdbt.process_6/d\r\n52M\t./lam_1751657975_35000/ocdbt.process_6\r\n",,terminal_output +301,147435,"TERMINAL",0,0,"52M\t./lam_1751657975_35000/ocdbt.process_1/d\r\n52M\t./lam_1751657975_35000/ocdbt.process_1\r\n52M\t./lam_1751657975_35000/ocdbt.process_2/d\r\n52M\t./lam_1751657975_35000/ocdbt.process_2\r\n420M\t./lam_1751657975_35000\r\n52M\t./lam_1751657975_177000/ocdbt.process_4/d\r\n52M\t./lam_1751657975_177000/ocdbt.process_4\r\n",,terminal_output +302,147486,"TERMINAL",0,0,"1.8M\t./lam_1751657975_177000/array_metadatas\r\n52M\t./lam_1751657975_177000/ocdbt.process_3/d\r\n52M\t./lam_1751657975_177000/ocdbt.process_3\r\n",,terminal_output +303,147582,"TERMINAL",0,0,"1.5M\t./lam_1751657975_177000/d\r\n52M\t./lam_1751657975_177000/ocdbt.process_5/d\r\n52M\t./lam_1751657975_177000/ocdbt.process_5\r\n",,terminal_output +304,147838,"TERMINAL",0,0,"52M\t./lam_1751657975_177000/ocdbt.process_0/d\r\n52M\t./lam_1751657975_177000/ocdbt.process_0\r\n52M\t./lam_1751657975_177000/ocdbt.process_7/d\r\n52M\t./lam_1751657975_177000/ocdbt.process_7\r\n52M\t./lam_1751657975_177000/ocdbt.process_6/d\r\n52M\t./lam_1751657975_177000/ocdbt.process_6\r\n52M\t./lam_1751657975_177000/ocdbt.process_1/d\r\n52M\t./lam_1751657975_177000/ocdbt.process_1\r\n52M\t./lam_1751657975_177000/ocdbt.process_2/d\r\n52M\t./lam_1751657975_177000/ocdbt.process_2\r\n419M\t./lam_1751657975_177000\r\n52M\t./lam_1751657975_173000/ocdbt.process_4/d\r\n52M\t./lam_1751657975_173000/ocdbt.process_4\r\n",,terminal_output +305,147980,"TERMINAL",0,0,"1.8M\t./lam_1751657975_173000/array_metadatas\r\n52M\t./lam_1751657975_173000/ocdbt.process_3/d\r\n52M\t./lam_1751657975_173000/ocdbt.process_3\r\n1.5M\t./lam_1751657975_173000/d\r\n",,terminal_output +306,148124,"TERMINAL",0,0,"52M\t./lam_1751657975_173000/ocdbt.process_5/d\r\n52M\t./lam_1751657975_173000/ocdbt.process_5\r\n",,terminal_output +307,148424,"TERMINAL",0,0,"52M\t./lam_1751657975_173000/ocdbt.process_0/d\r\n52M\t./lam_1751657975_173000/ocdbt.process_0\r\n",,terminal_output +308,148486,"TERMINAL",0,0,"52M\t./lam_1751657975_173000/ocdbt.process_7/d\r\n52M\t./lam_1751657975_173000/ocdbt.process_7\r\n52M\t./lam_1751657975_173000/ocdbt.process_6/d\r\n52M\t./lam_1751657975_173000/ocdbt.process_6\r\n52M\t./lam_1751657975_173000/ocdbt.process_1/d\r\n52M\t./lam_1751657975_173000/ocdbt.process_1\r\n52M\t./lam_1751657975_173000/ocdbt.process_2/d\r\n52M\t./lam_1751657975_173000/ocdbt.process_2\r\n419M\t./lam_1751657975_173000\r\n53M\t./lam_1751657975_2000/ocdbt.process_4/d\r\n53M\t./lam_1751657975_2000/ocdbt.process_4\r\n",,terminal_output +309,148691,"TERMINAL",0,0,"1.8M\t./lam_1751657975_2000/array_metadatas\r\n53M\t./lam_1751657975_2000/ocdbt.process_3/d\r\n53M\t./lam_1751657975_2000/ocdbt.process_3\r\n",,terminal_output +310,148766,"TERMINAL",0,0,"1.5M\t./lam_1751657975_2000/d\r\n53M\t./lam_1751657975_2000/ocdbt.process_5/d\r\n53M\t./lam_1751657975_2000/ocdbt.process_5\r\n",,terminal_output +311,148833,"TERMINAL",0,0,"53M\t./lam_1751657975_2000/ocdbt.process_0/d\r\n53M\t./lam_1751657975_2000/ocdbt.process_0\r\n",,terminal_output +312,148962,"TERMINAL",0,0,"54M\t./lam_1751657975_2000/ocdbt.process_7/d\r\n54M\t./lam_1751657975_2000/ocdbt.process_7\r\n53M\t./lam_1751657975_2000/ocdbt.process_6/d\r\n53M\t./lam_1751657975_2000/ocdbt.process_6\r\n53M\t./lam_1751657975_2000/ocdbt.process_1/d\r\n53M\t./lam_1751657975_2000/ocdbt.process_1\r\n53M\t./lam_1751657975_2000/ocdbt.process_2/d\r\n53M\t./lam_1751657975_2000/ocdbt.process_2\r\n424M\t./lam_1751657975_2000\r\n52M\t./lam_1751657975_84000/ocdbt.process_4/d\r\n52M\t./lam_1751657975_84000/ocdbt.process_4\r\n",,terminal_output +313,149130,"TERMINAL",0,0,"1.8M\t./lam_1751657975_84000/array_metadatas\r\n52M\t./lam_1751657975_84000/ocdbt.process_3/d\r\n52M\t./lam_1751657975_84000/ocdbt.process_3\r\n",,terminal_output +314,149265,"TERMINAL",0,0,"1.5M\t./lam_1751657975_84000/d\r\n52M\t./lam_1751657975_84000/ocdbt.process_5/d\r\n52M\t./lam_1751657975_84000/ocdbt.process_5\r\n",,terminal_output +315,149423,"TERMINAL",0,0,"52M\t./lam_1751657975_84000/ocdbt.process_0/d\r\n52M\t./lam_1751657975_84000/ocdbt.process_0\r\n52M\t./lam_1751657975_84000/ocdbt.process_7/d\r\n52M\t./lam_1751657975_84000/ocdbt.process_7\r\n52M\t./lam_1751657975_84000/ocdbt.process_6/d\r\n52M\t./lam_1751657975_84000/ocdbt.process_6\r\n52M\t./lam_1751657975_84000/ocdbt.process_1/d\r\n52M\t./lam_1751657975_84000/ocdbt.process_1\r\n52M\t./lam_1751657975_84000/ocdbt.process_2/d\r\n52M\t./lam_1751657975_84000/ocdbt.process_2\r\n419M\t./lam_1751657975_84000\r\n52M\t./lam_1751657975_83000/ocdbt.process_4/d\r\n52M\t./lam_1751657975_83000/ocdbt.process_4\r\n",,terminal_output +316,149582,"TERMINAL",0,0,"1.8M\t./lam_1751657975_83000/array_metadatas\r\n",,terminal_output +317,149830,"TERMINAL",0,0,"53M\t./lam_1751657975_83000/ocdbt.process_3/d\r\n53M\t./lam_1751657975_83000/ocdbt.process_3\r\n1.5M\t./lam_1751657975_83000/d\r\n52M\t./lam_1751657975_83000/ocdbt.process_5/d\r\n52M\t./lam_1751657975_83000/ocdbt.process_5\r\n52M\t./lam_1751657975_83000/ocdbt.process_0/d\r\n52M\t./lam_1751657975_83000/ocdbt.process_0\r\n52M\t./lam_1751657975_83000/ocdbt.process_7/d\r\n52M\t./lam_1751657975_83000/ocdbt.process_7\r\n52M\t./lam_1751657975_83000/ocdbt.process_6/d\r\n52M\t./lam_1751657975_83000/ocdbt.process_6\r\n52M\t./lam_1751657975_83000/ocdbt.process_1/d\r\n52M\t./lam_1751657975_83000/ocdbt.process_1\r\n52M\t./lam_1751657975_83000/ocdbt.process_2/d\r\n52M\t./lam_1751657975_83000/ocdbt.process_2\r\n420M\t./lam_1751657975_83000\r\n52M\t./lam_1751657975_157000/ocdbt.process_4/d\r\n52M\t./lam_1751657975_157000/ocdbt.process_4\r\n1.8M\t./lam_1751657975_157000/array_metadatas\r\n52M\t./lam_1751657975_157000/ocdbt.process_3/d\r\n52M\t./lam_1751657975_157000/ocdbt.process_3\r\n1.5M\t./lam_1751657975_157000/d\r\n",,terminal_output +318,149955,"TERMINAL",0,0,"52M\t./lam_1751657975_157000/ocdbt.process_5/d\r\n52M\t./lam_1751657975_157000/ocdbt.process_5\r\n",,terminal_output +319,150372,"TERMINAL",0,0,"52M\t./lam_1751657975_157000/ocdbt.process_0/d\r\n52M\t./lam_1751657975_157000/ocdbt.process_0\r\n53M\t./lam_1751657975_157000/ocdbt.process_7/d\r\n53M\t./lam_1751657975_157000/ocdbt.process_7\r\n52M\t./lam_1751657975_157000/ocdbt.process_6/d\r\n52M\t./lam_1751657975_157000/ocdbt.process_6\r\n52M\t./lam_1751657975_157000/ocdbt.process_1/d\r\n52M\t./lam_1751657975_157000/ocdbt.process_1\r\n52M\t./lam_1751657975_157000/ocdbt.process_2/d\r\n52M\t./lam_1751657975_157000/ocdbt.process_2\r\n420M\t./lam_1751657975_157000\r\n53M\t./lam_1751657975_27000/ocdbt.process_4/d\r\n53M\t./lam_1751657975_27000/ocdbt.process_4\r\n",,terminal_output +320,150571,"TERMINAL",0,0,"1.8M\t./lam_1751657975_27000/array_metadatas\r\n53M\t./lam_1751657975_27000/ocdbt.process_3/d\r\n53M\t./lam_1751657975_27000/ocdbt.process_3\r\n",,terminal_output +321,150637,"TERMINAL",0,0,"1.5M\t./lam_1751657975_27000/d\r\n",,terminal_output +322,150737,"TERMINAL",0,0,"52M\t./lam_1751657975_27000/ocdbt.process_5/d\r\n52M\t./lam_1751657975_27000/ocdbt.process_5\r\n",,terminal_output +323,150821,"TERMINAL",0,0,"52M\t./lam_1751657975_27000/ocdbt.process_0/d\r\n52M\t./lam_1751657975_27000/ocdbt.process_0\r\n",,terminal_output +324,150938,"TERMINAL",0,0,"53M\t./lam_1751657975_27000/ocdbt.process_7/d\r\n53M\t./lam_1751657975_27000/ocdbt.process_7\r\n52M\t./lam_1751657975_27000/ocdbt.process_6/d\r\n52M\t./lam_1751657975_27000/ocdbt.process_6\r\n",,terminal_output +325,151135,"TERMINAL",0,0,"52M\t./lam_1751657975_27000/ocdbt.process_1/d\r\n52M\t./lam_1751657975_27000/ocdbt.process_1\r\n",,terminal_output +326,151270,"TERMINAL",0,0,"52M\t./lam_1751657975_27000/ocdbt.process_2/d\r\n52M\t./lam_1751657975_27000/ocdbt.process_2\r\n420M\t./lam_1751657975_27000\r\n52M\t./lam_1751657975_52000/ocdbt.process_4/d\r\n52M\t./lam_1751657975_52000/ocdbt.process_4\r\n",,terminal_output +327,151443,"TERMINAL",0,0,"1.8M\t./lam_1751657975_52000/array_metadatas\r\n52M\t./lam_1751657975_52000/ocdbt.process_3/d\r\n52M\t./lam_1751657975_52000/ocdbt.process_3\r\n1.5M\t./lam_1751657975_52000/d\r\n",,terminal_output +328,151605,"TERMINAL",0,0,"53M\t./lam_1751657975_52000/ocdbt.process_5/d\r\n53M\t./lam_1751657975_52000/ocdbt.process_5\r\n",,terminal_output +329,151702,"TERMINAL",0,0,"52M\t./lam_1751657975_52000/ocdbt.process_0/d\r\n52M\t./lam_1751657975_52000/ocdbt.process_0\r\n52M\t./lam_1751657975_52000/ocdbt.process_7/d\r\n52M\t./lam_1751657975_52000/ocdbt.process_7\r\n52M\t./lam_1751657975_52000/ocdbt.process_6/d\r\n52M\t./lam_1751657975_52000/ocdbt.process_6\r\n",,terminal_output +330,151816,"TERMINAL",0,0,"52M\t./lam_1751657975_52000/ocdbt.process_1/d\r\n52M\t./lam_1751657975_52000/ocdbt.process_1\r\n",,terminal_output +331,151994,"TERMINAL",0,0,"52M\t./lam_1751657975_52000/ocdbt.process_2/d\r\n52M\t./lam_1751657975_52000/ocdbt.process_2\r\n420M\t./lam_1751657975_52000\r\n53M\t./lam_1751657975_36000/ocdbt.process_4/d\r\n53M\t./lam_1751657975_36000/ocdbt.process_4\r\n1.8M\t./lam_1751657975_36000/array_metadatas\r\n53M\t./lam_1751657975_36000/ocdbt.process_3/d\r\n53M\t./lam_1751657975_36000/ocdbt.process_3\r\n1.5M\t./lam_1751657975_36000/d\r\n52M\t./lam_1751657975_36000/ocdbt.process_5/d\r\n52M\t./lam_1751657975_36000/ocdbt.process_5\r\n52M\t./lam_1751657975_36000/ocdbt.process_0/d\r\n52M\t./lam_1751657975_36000/ocdbt.process_0\r\n53M\t./lam_1751657975_36000/ocdbt.process_7/d\r\n53M\t./lam_1751657975_36000/ocdbt.process_7\r\n52M\t./lam_1751657975_36000/ocdbt.process_6/d\r\n52M\t./lam_1751657975_36000/ocdbt.process_6\r\n52M\t./lam_1751657975_36000/ocdbt.process_1/d\r\n52M\t./lam_1751657975_36000/ocdbt.process_1\r\n",,terminal_output +332,152049,"TERMINAL",0,0,"52M\t./lam_1751657975_36000/ocdbt.process_2/d\r\n52M\t./lam_1751657975_36000/ocdbt.process_2\r\n420M\t./lam_1751657975_36000\r\n53M\t./lam_1751657975_199000/ocdbt.process_4/d\r\n53M\t./lam_1751657975_199000/ocdbt.process_4\r\n",,terminal_output +333,152242,"TERMINAL",0,0,"1.8M\t./lam_1751657975_199000/array_metadatas\r\n53M\t./lam_1751657975_199000/ocdbt.process_3/d\r\n53M\t./lam_1751657975_199000/ocdbt.process_3\r\n1.5M\t./lam_1751657975_199000/d\r\n",,terminal_output +334,152341,"TERMINAL",0,0,"52M\t./lam_1751657975_199000/ocdbt.process_5/d\r\n52M\t./lam_1751657975_199000/ocdbt.process_5\r\n",,terminal_output +335,152493,"TERMINAL",0,0,"52M\t./lam_1751657975_199000/ocdbt.process_0/d\r\n52M\t./lam_1751657975_199000/ocdbt.process_0\r\n53M\t./lam_1751657975_199000/ocdbt.process_7/d\r\n53M\t./lam_1751657975_199000/ocdbt.process_7\r\n52M\t./lam_1751657975_199000/ocdbt.process_6/d\r\n52M\t./lam_1751657975_199000/ocdbt.process_6\r\n52M\t./lam_1751657975_199000/ocdbt.process_1/d\r\n52M\t./lam_1751657975_199000/ocdbt.process_1\r\n52M\t./lam_1751657975_199000/ocdbt.process_2/d\r\n52M\t./lam_1751657975_199000/ocdbt.process_2\r\n420M\t./lam_1751657975_199000\r\n",,terminal_output +336,152605,"TERMINAL",0,0,"53M\t./lam_1751657975_146000/ocdbt.process_4/d\r\n53M\t./lam_1751657975_146000/ocdbt.process_4\r\n1.8M\t./lam_1751657975_146000/array_metadatas\r\n52M\t./lam_1751657975_146000/ocdbt.process_3/d\r\n52M\t./lam_1751657975_146000/ocdbt.process_3\r\n",,terminal_output +337,152718,"TERMINAL",0,0,"1.5M\t./lam_1751657975_146000/d\r\n52M\t./lam_1751657975_146000/ocdbt.process_5/d\r\n52M\t./lam_1751657975_146000/ocdbt.process_5\r\n52M\t./lam_1751657975_146000/ocdbt.process_0/d\r\n52M\t./lam_1751657975_146000/ocdbt.process_0\r\n52M\t./lam_1751657975_146000/ocdbt.process_7/d\r\n52M\t./lam_1751657975_146000/ocdbt.process_7\r\n52M\t./lam_1751657975_146000/ocdbt.process_6/d\r\n52M\t./lam_1751657975_146000/ocdbt.process_6\r\n52M\t./lam_1751657975_146000/ocdbt.process_1/d\r\n52M\t./lam_1751657975_146000/ocdbt.process_1\r\n52M\t./lam_1751657975_146000/ocdbt.process_2/d\r\n52M\t./lam_1751657975_146000/ocdbt.process_2\r\n420M\t./lam_1751657975_146000\r\n52M\t./lam_1751657975_188000/ocdbt.process_4/d\r\n52M\t./lam_1751657975_188000/ocdbt.process_4\r\n",,terminal_output +338,152834,"TERMINAL",0,0,"1.8M\t./lam_1751657975_188000/array_metadatas\r\n52M\t./lam_1751657975_188000/ocdbt.process_3/d\r\n52M\t./lam_1751657975_188000/ocdbt.process_3\r\n1.5M\t./lam_1751657975_188000/d\r\n",,terminal_output +339,152936,"TERMINAL",0,0,"52M\t./lam_1751657975_188000/ocdbt.process_5/d\r\n52M\t./lam_1751657975_188000/ocdbt.process_5\r\n",,terminal_output +340,153101,"TERMINAL",0,0,"52M\t./lam_1751657975_188000/ocdbt.process_0/d\r\n52M\t./lam_1751657975_188000/ocdbt.process_0\r\n52M\t./lam_1751657975_188000/ocdbt.process_7/d\r\n52M\t./lam_1751657975_188000/ocdbt.process_7\r\n52M\t./lam_1751657975_188000/ocdbt.process_6/d\r\n52M\t./lam_1751657975_188000/ocdbt.process_6\r\n52M\t./lam_1751657975_188000/ocdbt.process_1/d\r\n52M\t./lam_1751657975_188000/ocdbt.process_1\r\n",,terminal_output +341,153214,"TERMINAL",0,0,"52M\t./lam_1751657975_188000/ocdbt.process_2/d\r\n52M\t./lam_1751657975_188000/ocdbt.process_2\r\n419M\t./lam_1751657975_188000\r\n52M\t./lam_1751657975_179000/ocdbt.process_4/d\r\n52M\t./lam_1751657975_179000/ocdbt.process_4\r\n",,terminal_output +342,153385,"TERMINAL",0,0,"1.8M\t./lam_1751657975_179000/array_metadatas\r\n52M\t./lam_1751657975_179000/ocdbt.process_3/d\r\n52M\t./lam_1751657975_179000/ocdbt.process_3\r\n1.5M\t./lam_1751657975_179000/d\r\n",,terminal_output +343,153658,"TERMINAL",0,0,"52M\t./lam_1751657975_179000/ocdbt.process_5/d\r\n52M\t./lam_1751657975_179000/ocdbt.process_5\r\n52M\t./lam_1751657975_179000/ocdbt.process_0/d\r\n52M\t./lam_1751657975_179000/ocdbt.process_0\r\n53M\t./lam_1751657975_179000/ocdbt.process_7/d\r\n53M\t./lam_1751657975_179000/ocdbt.process_7\r\n53M\t./lam_1751657975_179000/ocdbt.process_6/d\r\n53M\t./lam_1751657975_179000/ocdbt.process_6\r\n52M\t./lam_1751657975_179000/ocdbt.process_1/d\r\n52M\t./lam_1751657975_179000/ocdbt.process_1\r\n52M\t./lam_1751657975_179000/ocdbt.process_2/d\r\n52M\t./lam_1751657975_179000/ocdbt.process_2\r\n420M\t./lam_1751657975_179000\r\n52M\t./lam_1751657975_53000/ocdbt.process_4/d\r\n52M\t./lam_1751657975_53000/ocdbt.process_4\r\n",,terminal_output +344,153886,"TERMINAL",0,0,"1.8M\t./lam_1751657975_53000/array_metadatas\r\n53M\t./lam_1751657975_53000/ocdbt.process_3/d\r\n53M\t./lam_1751657975_53000/ocdbt.process_3\r\n",,terminal_output +345,154022,"TERMINAL",0,0,"1.5M\t./lam_1751657975_53000/d\r\n52M\t./lam_1751657975_53000/ocdbt.process_5/d\r\n52M\t./lam_1751657975_53000/ocdbt.process_5\r\n",,terminal_output +346,154331,"TERMINAL",0,0,"52M\t./lam_1751657975_53000/ocdbt.process_0/d\r\n52M\t./lam_1751657975_53000/ocdbt.process_0\r\n53M\t./lam_1751657975_53000/ocdbt.process_7/d\r\n53M\t./lam_1751657975_53000/ocdbt.process_7\r\n53M\t./lam_1751657975_53000/ocdbt.process_6/d\r\n53M\t./lam_1751657975_53000/ocdbt.process_6\r\n52M\t./lam_1751657975_53000/ocdbt.process_1/d\r\n52M\t./lam_1751657975_53000/ocdbt.process_1\r\n52M\t./lam_1751657975_53000/ocdbt.process_2/d\r\n52M\t./lam_1751657975_53000/ocdbt.process_2\r\n420M\t./lam_1751657975_53000\r\n52M\t./lam_1751657975_162000/ocdbt.process_4/d\r\n52M\t./lam_1751657975_162000/ocdbt.process_4\r\n",,terminal_output +347,154410,"TERMINAL",0,0,"1.8M\t./lam_1751657975_162000/array_metadatas\r\n52M\t./lam_1751657975_162000/ocdbt.process_3/d\r\n52M\t./lam_1751657975_162000/ocdbt.process_3\r\n1.5M\t./lam_1751657975_162000/d\r\n",,terminal_output +348,154541,"TERMINAL",0,0,"52M\t./lam_1751657975_162000/ocdbt.process_5/d\r\n52M\t./lam_1751657975_162000/ocdbt.process_5\r\n",,terminal_output +349,154755,"TERMINAL",0,0,"52M\t./lam_1751657975_162000/ocdbt.process_0/d\r\n52M\t./lam_1751657975_162000/ocdbt.process_0\r\n53M\t./lam_1751657975_162000/ocdbt.process_7/d\r\n53M\t./lam_1751657975_162000/ocdbt.process_7\r\n52M\t./lam_1751657975_162000/ocdbt.process_6/d\r\n52M\t./lam_1751657975_162000/ocdbt.process_6\r\n52M\t./lam_1751657975_162000/ocdbt.process_1/d\r\n52M\t./lam_1751657975_162000/ocdbt.process_1\r\n52M\t./lam_1751657975_162000/ocdbt.process_2/d\r\n52M\t./lam_1751657975_162000/ocdbt.process_2\r\n420M\t./lam_1751657975_162000\r\n",,terminal_output +350,154807,"TERMINAL",0,0,"52M\t./lam_1751657975_132000/ocdbt.process_4/d\r\n52M\t./lam_1751657975_132000/ocdbt.process_4\r\n",,terminal_output +351,154881,"TERMINAL",0,0,"1.8M\t./lam_1751657975_132000/array_metadatas\r\n53M\t./lam_1751657975_132000/ocdbt.process_3/d\r\n53M\t./lam_1751657975_132000/ocdbt.process_3\r\n",,terminal_output +352,154934,"TERMINAL",0,0,"1.5M\t./lam_1751657975_132000/d\r\n52M\t./lam_1751657975_132000/ocdbt.process_5/d\r\n52M\t./lam_1751657975_132000/ocdbt.process_5\r\n",,terminal_output +353,155019,"TERMINAL",0,0,"52M\t./lam_1751657975_132000/ocdbt.process_0/d\r\n52M\t./lam_1751657975_132000/ocdbt.process_0\r\n52M\t./lam_1751657975_132000/ocdbt.process_7/d\r\n52M\t./lam_1751657975_132000/ocdbt.process_7\r\n52M\t./lam_1751657975_132000/ocdbt.process_6/d\r\n52M\t./lam_1751657975_132000/ocdbt.process_6\r\n52M\t./lam_1751657975_132000/ocdbt.process_1/d\r\n52M\t./lam_1751657975_132000/ocdbt.process_1\r\n52M\t./lam_1751657975_132000/ocdbt.process_2/d\r\n52M\t./lam_1751657975_132000/ocdbt.process_2\r\n419M\t./lam_1751657975_132000\r\n52M\t./lam_1751657975_19000/ocdbt.process_4/d\r\n52M\t./lam_1751657975_19000/ocdbt.process_4\r\n",,terminal_output +354,155199,"TERMINAL",0,0,"1.8M\t./lam_1751657975_19000/array_metadatas\r\n52M\t./lam_1751657975_19000/ocdbt.process_3/d\r\n52M\t./lam_1751657975_19000/ocdbt.process_3\r\n",,terminal_output +355,155315,"TERMINAL",0,0,"1.5M\t./lam_1751657975_19000/d\r\n52M\t./lam_1751657975_19000/ocdbt.process_5/d\r\n52M\t./lam_1751657975_19000/ocdbt.process_5\r\n",,terminal_output +356,155387,"TERMINAL",0,0,"52M\t./lam_1751657975_19000/ocdbt.process_0/d\r\n52M\t./lam_1751657975_19000/ocdbt.process_0\r\n53M\t./lam_1751657975_19000/ocdbt.process_7/d\r\n53M\t./lam_1751657975_19000/ocdbt.process_7\r\n52M\t./lam_1751657975_19000/ocdbt.process_6/d\r\n52M\t./lam_1751657975_19000/ocdbt.process_6\r\n",,terminal_output +357,155444,"TERMINAL",0,0,"52M\t./lam_1751657975_19000/ocdbt.process_1/d\r\n52M\t./lam_1751657975_19000/ocdbt.process_1\r\n52M\t./lam_1751657975_19000/ocdbt.process_2/d\r\n52M\t./lam_1751657975_19000/ocdbt.process_2\r\n420M\t./lam_1751657975_19000\r\n52M\t./lam_1751657975_131000/ocdbt.process_4/d\r\n52M\t./lam_1751657975_131000/ocdbt.process_4\r\n",,terminal_output +358,155666,"TERMINAL",0,0,"1.8M\t./lam_1751657975_131000/array_metadatas\r\n52M\t./lam_1751657975_131000/ocdbt.process_3/d\r\n52M\t./lam_1751657975_131000/ocdbt.process_3\r\n1.5M\t./lam_1751657975_131000/d\r\n52M\t./lam_1751657975_131000/ocdbt.process_5/d\r\n52M\t./lam_1751657975_131000/ocdbt.process_5\r\n",,terminal_output +359,155784,"TERMINAL",0,0,"52M\t./lam_1751657975_131000/ocdbt.process_0/d\r\n52M\t./lam_1751657975_131000/ocdbt.process_0\r\n52M\t./lam_1751657975_131000/ocdbt.process_7/d\r\n52M\t./lam_1751657975_131000/ocdbt.process_7\r\n52M\t./lam_1751657975_131000/ocdbt.process_6/d\r\n52M\t./lam_1751657975_131000/ocdbt.process_6\r\n52M\t./lam_1751657975_131000/ocdbt.process_1/d\r\n52M\t./lam_1751657975_131000/ocdbt.process_1\r\n52M\t./lam_1751657975_131000/ocdbt.process_2/d\r\n52M\t./lam_1751657975_131000/ocdbt.process_2\r\n419M\t./lam_1751657975_131000\r\n52M\t./lam_1751657975_167000/ocdbt.process_4/d\r\n52M\t./lam_1751657975_167000/ocdbt.process_4\r\n1.8M\t./lam_1751657975_167000/array_metadatas\r\n52M\t./lam_1751657975_167000/ocdbt.process_3/d\r\n52M\t./lam_1751657975_167000/ocdbt.process_3\r\n1.5M\t./lam_1751657975_167000/d\r\n",,terminal_output +360,155874,"TERMINAL",0,0,"52M\t./lam_1751657975_167000/ocdbt.process_5/d\r\n52M\t./lam_1751657975_167000/ocdbt.process_5\r\n52M\t./lam_1751657975_167000/ocdbt.process_0/d\r\n52M\t./lam_1751657975_167000/ocdbt.process_0\r\n52M\t./lam_1751657975_167000/ocdbt.process_7/d\r\n52M\t./lam_1751657975_167000/ocdbt.process_7\r\n52M\t./lam_1751657975_167000/ocdbt.process_6/d\r\n52M\t./lam_1751657975_167000/ocdbt.process_6\r\n52M\t./lam_1751657975_167000/ocdbt.process_1/d\r\n52M\t./lam_1751657975_167000/ocdbt.process_1\r\n52M\t./lam_1751657975_167000/ocdbt.process_2/d\r\n52M\t./lam_1751657975_167000/ocdbt.process_2\r\n419M\t./lam_1751657975_167000\r\n",,terminal_output +361,155927,"TERMINAL",0,0,"52M\t./lam_1751657975_64000/ocdbt.process_4/d\r\n52M\t./lam_1751657975_64000/ocdbt.process_4\r\n",,terminal_output +362,156028,"TERMINAL",0,0,"1.8M\t./lam_1751657975_64000/array_metadatas\r\n",,terminal_output +363,156137,"TERMINAL",0,0,"53M\t./lam_1751657975_64000/ocdbt.process_3/d\r\n53M\t./lam_1751657975_64000/ocdbt.process_3\r\n",,terminal_output +364,156279,"TERMINAL",0,0,"1.5M\t./lam_1751657975_64000/d\r\n52M\t./lam_1751657975_64000/ocdbt.process_5/d\r\n52M\t./lam_1751657975_64000/ocdbt.process_5\r\n",,terminal_output +365,156409,"TERMINAL",0,0,"52M\t./lam_1751657975_64000/ocdbt.process_0/d\r\n52M\t./lam_1751657975_64000/ocdbt.process_0\r\n52M\t./lam_1751657975_64000/ocdbt.process_7/d\r\n52M\t./lam_1751657975_64000/ocdbt.process_7\r\n52M\t./lam_1751657975_64000/ocdbt.process_6/d\r\n52M\t./lam_1751657975_64000/ocdbt.process_6\r\n52M\t./lam_1751657975_64000/ocdbt.process_1/d\r\n52M\t./lam_1751657975_64000/ocdbt.process_1\r\n52M\t./lam_1751657975_64000/ocdbt.process_2/d\r\n52M\t./lam_1751657975_64000/ocdbt.process_2\r\n420M\t./lam_1751657975_64000\r\n52M\t./lam_1751657975_151000/ocdbt.process_4/d\r\n52M\t./lam_1751657975_151000/ocdbt.process_4\r\n1.8M\t./lam_1751657975_151000/array_metadatas\r\n52M\t./lam_1751657975_151000/ocdbt.process_3/d\r\n52M\t./lam_1751657975_151000/ocdbt.process_3\r\n1.5M\t./lam_1751657975_151000/d\r\n52M\t./lam_1751657975_151000/ocdbt.process_5/d\r\n52M\t./lam_1751657975_151000/ocdbt.process_5\r\n",,terminal_output +366,156707,"TERMINAL",0,0,"52M\t./lam_1751657975_151000/ocdbt.process_0/d\r\n52M\t./lam_1751657975_151000/ocdbt.process_0\r\n52M\t./lam_1751657975_151000/ocdbt.process_7/d\r\n52M\t./lam_1751657975_151000/ocdbt.process_7\r\n52M\t./lam_1751657975_151000/ocdbt.process_6/d\r\n52M\t./lam_1751657975_151000/ocdbt.process_6\r\n52M\t./lam_1751657975_151000/ocdbt.process_1/d\r\n52M\t./lam_1751657975_151000/ocdbt.process_1\r\n52M\t./lam_1751657975_151000/ocdbt.process_2/d\r\n52M\t./lam_1751657975_151000/ocdbt.process_2\r\n419M\t./lam_1751657975_151000\r\n52M\t./lam_1751657975_197000/ocdbt.process_4/d\r\n52M\t./lam_1751657975_197000/ocdbt.process_4\r\n",,terminal_output +367,156896,"TERMINAL",0,0,"1.8M\t./lam_1751657975_197000/array_metadatas\r\n53M\t./lam_1751657975_197000/ocdbt.process_3/d\r\n53M\t./lam_1751657975_197000/ocdbt.process_3\r\n1.5M\t./lam_1751657975_197000/d\r\n52M\t./lam_1751657975_197000/ocdbt.process_5/d\r\n52M\t./lam_1751657975_197000/ocdbt.process_5\r\n",,terminal_output +368,157023,"TERMINAL",0,0,"52M\t./lam_1751657975_197000/ocdbt.process_0/d\r\n52M\t./lam_1751657975_197000/ocdbt.process_0\r\n52M\t./lam_1751657975_197000/ocdbt.process_7/d\r\n52M\t./lam_1751657975_197000/ocdbt.process_7\r\n52M\t./lam_1751657975_197000/ocdbt.process_6/d\r\n52M\t./lam_1751657975_197000/ocdbt.process_6\r\n52M\t./lam_1751657975_197000/ocdbt.process_1/d\r\n52M\t./lam_1751657975_197000/ocdbt.process_1\r\n",,terminal_output +369,157099,"TERMINAL",0,0,"52M\t./lam_1751657975_197000/ocdbt.process_2/d\r\n52M\t./lam_1751657975_197000/ocdbt.process_2\r\n419M\t./lam_1751657975_197000\r\n52M\t./lam_1751657975_37000/ocdbt.process_4/d\r\n52M\t./lam_1751657975_37000/ocdbt.process_4\r\n",,terminal_output +370,157313,"TERMINAL",0,0,"1.8M\t./lam_1751657975_37000/array_metadatas\r\n53M\t./lam_1751657975_37000/ocdbt.process_3/d\r\n53M\t./lam_1751657975_37000/ocdbt.process_3\r\n1.5M\t./lam_1751657975_37000/d\r\n",,terminal_output +371,157444,"TERMINAL",0,0,"52M\t./lam_1751657975_37000/ocdbt.process_5/d\r\n52M\t./lam_1751657975_37000/ocdbt.process_5\r\n",,terminal_output +372,157593,"TERMINAL",0,0,"52M\t./lam_1751657975_37000/ocdbt.process_0/d\r\n52M\t./lam_1751657975_37000/ocdbt.process_0\r\n52M\t./lam_1751657975_37000/ocdbt.process_7/d\r\n52M\t./lam_1751657975_37000/ocdbt.process_7\r\n52M\t./lam_1751657975_37000/ocdbt.process_6/d\r\n52M\t./lam_1751657975_37000/ocdbt.process_6\r\n52M\t./lam_1751657975_37000/ocdbt.process_1/d\r\n52M\t./lam_1751657975_37000/ocdbt.process_1\r\n52M\t./lam_1751657975_37000/ocdbt.process_2/d\r\n52M\t./lam_1751657975_37000/ocdbt.process_2\r\n420M\t./lam_1751657975_37000\r\n52M\t./lam_1751657975_114000/ocdbt.process_4/d\r\n52M\t./lam_1751657975_114000/ocdbt.process_4\r\n",,terminal_output +373,157768,"TERMINAL",0,0,"1.8M\t./lam_1751657975_114000/array_metadatas\r\n53M\t./lam_1751657975_114000/ocdbt.process_3/d\r\n53M\t./lam_1751657975_114000/ocdbt.process_3\r\n",,terminal_output +374,157878,"TERMINAL",0,0,"1.5M\t./lam_1751657975_114000/d\r\n",,terminal_output +375,158033,"TERMINAL",0,0,"52M\t./lam_1751657975_114000/ocdbt.process_5/d\r\n52M\t./lam_1751657975_114000/ocdbt.process_5\r\n",,terminal_output +376,158193,"TERMINAL",0,0,"52M\t./lam_1751657975_114000/ocdbt.process_0/d\r\n52M\t./lam_1751657975_114000/ocdbt.process_0\r\n52M\t./lam_1751657975_114000/ocdbt.process_7/d\r\n52M\t./lam_1751657975_114000/ocdbt.process_7\r\n52M\t./lam_1751657975_114000/ocdbt.process_6/d\r\n52M\t./lam_1751657975_114000/ocdbt.process_6\r\n52M\t./lam_1751657975_114000/ocdbt.process_1/d\r\n52M\t./lam_1751657975_114000/ocdbt.process_1\r\n52M\t./lam_1751657975_114000/ocdbt.process_2/d\r\n52M\t./lam_1751657975_114000/ocdbt.process_2\r\n419M\t./lam_1751657975_114000\r\n52M\t./lam_1751657975_90000/ocdbt.process_4/d\r\n52M\t./lam_1751657975_90000/ocdbt.process_4\r\n1.8M\t./lam_1751657975_90000/array_metadatas\r\n53M\t./lam_1751657975_90000/ocdbt.process_3/d\r\n53M\t./lam_1751657975_90000/ocdbt.process_3\r\n1.5M\t./lam_1751657975_90000/d\r\n52M\t./lam_1751657975_90000/ocdbt.process_5/d\r\n52M\t./lam_1751657975_90000/ocdbt.process_5\r\n",,terminal_output +377,158369,"TERMINAL",0,0,"52M\t./lam_1751657975_90000/ocdbt.process_0/d\r\n52M\t./lam_1751657975_90000/ocdbt.process_0\r\n52M\t./lam_1751657975_90000/ocdbt.process_7/d\r\n52M\t./lam_1751657975_90000/ocdbt.process_7\r\n52M\t./lam_1751657975_90000/ocdbt.process_6/d\r\n52M\t./lam_1751657975_90000/ocdbt.process_6\r\n52M\t./lam_1751657975_90000/ocdbt.process_1/d\r\n52M\t./lam_1751657975_90000/ocdbt.process_1\r\n52M\t./lam_1751657975_90000/ocdbt.process_2/d\r\n52M\t./lam_1751657975_90000/ocdbt.process_2\r\n420M\t./lam_1751657975_90000\r\n52M\t./lam_1751657975_100000/ocdbt.process_4/d\r\n52M\t./lam_1751657975_100000/ocdbt.process_4\r\n",,terminal_output +378,158651,"TERMINAL",0,0,"1.8M\t./lam_1751657975_100000/array_metadatas\r\n53M\t./lam_1751657975_100000/ocdbt.process_3/d\r\n53M\t./lam_1751657975_100000/ocdbt.process_3\r\n",,terminal_output +379,158742,"TERMINAL",0,0,"1.5M\t./lam_1751657975_100000/d\r\n",,terminal_output +380,158822,"TERMINAL",0,0,"52M\t./lam_1751657975_100000/ocdbt.process_5/d\r\n52M\t./lam_1751657975_100000/ocdbt.process_5\r\n",,terminal_output +381,158983,"TERMINAL",0,0,"52M\t./lam_1751657975_100000/ocdbt.process_0/d\r\n52M\t./lam_1751657975_100000/ocdbt.process_0\r\n52M\t./lam_1751657975_100000/ocdbt.process_7/d\r\n52M\t./lam_1751657975_100000/ocdbt.process_7\r\n52M\t./lam_1751657975_100000/ocdbt.process_6/d\r\n52M\t./lam_1751657975_100000/ocdbt.process_6\r\n",,terminal_output +382,159134,"TERMINAL",0,0,"52M\t./lam_1751657975_100000/ocdbt.process_1/d\r\n52M\t./lam_1751657975_100000/ocdbt.process_1\r\n52M\t./lam_1751657975_100000/ocdbt.process_2/d\r\n52M\t./lam_1751657975_100000/ocdbt.process_2\r\n420M\t./lam_1751657975_100000\r\n52M\t./lam_1751657975_141000/ocdbt.process_4/d\r\n52M\t./lam_1751657975_141000/ocdbt.process_4\r\n",,terminal_output +383,159340,"TERMINAL",0,0,"1.8M\t./lam_1751657975_141000/array_metadatas\r\n52M\t./lam_1751657975_141000/ocdbt.process_3/d\r\n52M\t./lam_1751657975_141000/ocdbt.process_3\r\n",,terminal_output +384,159447,"TERMINAL",0,0,"1.5M\t./lam_1751657975_141000/d\r\n",,terminal_output +385,159560,"TERMINAL",0,0,"52M\t./lam_1751657975_141000/ocdbt.process_5/d\r\n52M\t./lam_1751657975_141000/ocdbt.process_5\r\n",,terminal_output +386,159928,"TERMINAL",0,0,"52M\t./lam_1751657975_141000/ocdbt.process_0/d\r\n52M\t./lam_1751657975_141000/ocdbt.process_0\r\n53M\t./lam_1751657975_141000/ocdbt.process_7/d\r\n53M\t./lam_1751657975_141000/ocdbt.process_7\r\n52M\t./lam_1751657975_141000/ocdbt.process_6/d\r\n52M\t./lam_1751657975_141000/ocdbt.process_6\r\n52M\t./lam_1751657975_141000/ocdbt.process_1/d\r\n52M\t./lam_1751657975_141000/ocdbt.process_1\r\n52M\t./lam_1751657975_141000/ocdbt.process_2/d\r\n52M\t./lam_1751657975_141000/ocdbt.process_2\r\n419M\t./lam_1751657975_141000\r\n52M\t./lam_1751657975_96000/ocdbt.process_4/d\r\n52M\t./lam_1751657975_96000/ocdbt.process_4\r\n",,terminal_output +387,160147,"TERMINAL",0,0,"1.8M\t./lam_1751657975_96000/array_metadatas\r\n53M\t./lam_1751657975_96000/ocdbt.process_3/d\r\n53M\t./lam_1751657975_96000/ocdbt.process_3\r\n1.5M\t./lam_1751657975_96000/d\r\n52M\t./lam_1751657975_96000/ocdbt.process_5/d\r\n52M\t./lam_1751657975_96000/ocdbt.process_5\r\n",,terminal_output +388,160296,"TERMINAL",0,0,"52M\t./lam_1751657975_96000/ocdbt.process_0/d\r\n52M\t./lam_1751657975_96000/ocdbt.process_0\r\n52M\t./lam_1751657975_96000/ocdbt.process_7/d\r\n52M\t./lam_1751657975_96000/ocdbt.process_7\r\n52M\t./lam_1751657975_96000/ocdbt.process_6/d\r\n52M\t./lam_1751657975_96000/ocdbt.process_6\r\n52M\t./lam_1751657975_96000/ocdbt.process_1/d\r\n52M\t./lam_1751657975_96000/ocdbt.process_1\r\n52M\t./lam_1751657975_96000/ocdbt.process_2/d\r\n52M\t./lam_1751657975_96000/ocdbt.process_2\r\n420M\t./lam_1751657975_96000\r\n",,terminal_output +389,160484,"TERMINAL",0,0,"53M\t./lam_1751657975_175000/ocdbt.process_4/d\r\n53M\t./lam_1751657975_175000/ocdbt.process_4\r\n1.8M\t./lam_1751657975_175000/array_metadatas\r\n52M\t./lam_1751657975_175000/ocdbt.process_3/d\r\n52M\t./lam_1751657975_175000/ocdbt.process_3\r\n1.5M\t./lam_1751657975_175000/d\r\n52M\t./lam_1751657975_175000/ocdbt.process_5/d\r\n52M\t./lam_1751657975_175000/ocdbt.process_5\r\n",,terminal_output +390,160603,"TERMINAL",0,0,"52M\t./lam_1751657975_175000/ocdbt.process_0/d\r\n52M\t./lam_1751657975_175000/ocdbt.process_0\r\n",,terminal_output +391,160698,"TERMINAL",0,0,"53M\t./lam_1751657975_175000/ocdbt.process_7/d\r\n53M\t./lam_1751657975_175000/ocdbt.process_7\r\n",,terminal_output +392,160818,"TERMINAL",0,0,"52M\t./lam_1751657975_175000/ocdbt.process_6/d\r\n52M\t./lam_1751657975_175000/ocdbt.process_6\r\n52M\t./lam_1751657975_175000/ocdbt.process_1/d\r\n52M\t./lam_1751657975_175000/ocdbt.process_1\r\n52M\t./lam_1751657975_175000/ocdbt.process_2/d\r\n52M\t./lam_1751657975_175000/ocdbt.process_2\r\n420M\t./lam_1751657975_175000\r\n52M\t./lam_1751657975_103000/ocdbt.process_4/d\r\n52M\t./lam_1751657975_103000/ocdbt.process_4\r\n",,terminal_output +393,160881,"TERMINAL",0,0,"1.8M\t./lam_1751657975_103000/array_metadatas\r\n52M\t./lam_1751657975_103000/ocdbt.process_3/d\r\n52M\t./lam_1751657975_103000/ocdbt.process_3\r\n",,terminal_output +394,161007,"TERMINAL",0,0,"1.5M\t./lam_1751657975_103000/d\r\n52M\t./lam_1751657975_103000/ocdbt.process_5/d\r\n52M\t./lam_1751657975_103000/ocdbt.process_5\r\n",,terminal_output +395,161122,"TERMINAL",0,0,"52M\t./lam_1751657975_103000/ocdbt.process_0/d\r\n52M\t./lam_1751657975_103000/ocdbt.process_0\r\n52M\t./lam_1751657975_103000/ocdbt.process_7/d\r\n",,terminal_output +396,161214,"TERMINAL",0,0,"52M\t./lam_1751657975_103000/ocdbt.process_7\r\n52M\t./lam_1751657975_103000/ocdbt.process_6/d\r\n52M\t./lam_1751657975_103000/ocdbt.process_6\r\n52M\t./lam_1751657975_103000/ocdbt.process_1/d\r\n52M\t./lam_1751657975_103000/ocdbt.process_1\r\n52M\t./lam_1751657975_103000/ocdbt.process_2/d\r\n52M\t./lam_1751657975_103000/ocdbt.process_2\r\n419M\t./lam_1751657975_103000\r\n52M\t./lam_1751657975_189000/ocdbt.process_4/d\r\n52M\t./lam_1751657975_189000/ocdbt.process_4\r\n1.8M\t./lam_1751657975_189000/array_metadatas\r\n52M\t./lam_1751657975_189000/ocdbt.process_3/d\r\n52M\t./lam_1751657975_189000/ocdbt.process_3\r\n1.5M\t./lam_1751657975_189000/d\r\n52M\t./lam_1751657975_189000/ocdbt.process_5/d\r\n52M\t./lam_1751657975_189000/ocdbt.process_5\r\n",,terminal_output +397,161330,"TERMINAL",0,0,"53M\t./lam_1751657975_189000/ocdbt.process_0/d\r\n53M\t./lam_1751657975_189000/ocdbt.process_0\r\n",,terminal_output +398,161500,"TERMINAL",0,0,"53M\t./lam_1751657975_189000/ocdbt.process_7/d\r\n53M\t./lam_1751657975_189000/ocdbt.process_7\r\n52M\t./lam_1751657975_189000/ocdbt.process_6/d\r\n52M\t./lam_1751657975_189000/ocdbt.process_6\r\n52M\t./lam_1751657975_189000/ocdbt.process_1/d\r\n52M\t./lam_1751657975_189000/ocdbt.process_1\r\n52M\t./lam_1751657975_189000/ocdbt.process_2/d\r\n52M\t./lam_1751657975_189000/ocdbt.process_2\r\n419M\t./lam_1751657975_189000\r\n52M\t./lam_1751657975_198000/ocdbt.process_4/d\r\n52M\t./lam_1751657975_198000/ocdbt.process_4\r\n",,terminal_output +399,161775,"TERMINAL",0,0,"1.8M\t./lam_1751657975_198000/array_metadatas\r\n52M\t./lam_1751657975_198000/ocdbt.process_3/d\r\n52M\t./lam_1751657975_198000/ocdbt.process_3\r\n1.5M\t./lam_1751657975_198000/d\r\n52M\t./lam_1751657975_198000/ocdbt.process_5/d\r\n52M\t./lam_1751657975_198000/ocdbt.process_5\r\n52M\t./lam_1751657975_198000/ocdbt.process_0/d\r\n52M\t./lam_1751657975_198000/ocdbt.process_0\r\n53M\t./lam_1751657975_198000/ocdbt.process_7/d\r\n53M\t./lam_1751657975_198000/ocdbt.process_7\r\n52M\t./lam_1751657975_198000/ocdbt.process_6/d\r\n52M\t./lam_1751657975_198000/ocdbt.process_6\r\n52M\t./lam_1751657975_198000/ocdbt.process_1/d\r\n52M\t./lam_1751657975_198000/ocdbt.process_1\r\n52M\t./lam_1751657975_198000/ocdbt.process_2/d\r\n52M\t./lam_1751657975_198000/ocdbt.process_2\r\n419M\t./lam_1751657975_198000\r\n52M\t./lam_1751657975_97000/ocdbt.process_4/d\r\n52M\t./lam_1751657975_97000/ocdbt.process_4\r\n1.8M\t./lam_1751657975_97000/array_metadatas\r\n52M\t./lam_1751657975_97000/ocdbt.process_3/d\r\n52M\t./lam_1751657975_97000/ocdbt.process_3\r\n1.5M\t./lam_1751657975_97000/d\r\n",,terminal_output +400,161883,"TERMINAL",0,0,"52M\t./lam_1751657975_97000/ocdbt.process_5/d\r\n52M\t./lam_1751657975_97000/ocdbt.process_5\r\n",,terminal_output +401,162082,"TERMINAL",0,0,"52M\t./lam_1751657975_97000/ocdbt.process_0/d\r\n52M\t./lam_1751657975_97000/ocdbt.process_0\r\n53M\t./lam_1751657975_97000/ocdbt.process_7/d\r\n53M\t./lam_1751657975_97000/ocdbt.process_7\r\n52M\t./lam_1751657975_97000/ocdbt.process_6/d\r\n52M\t./lam_1751657975_97000/ocdbt.process_6\r\n",,terminal_output +402,162251,"TERMINAL",0,0,"52M\t./lam_1751657975_97000/ocdbt.process_1/d\r\n52M\t./lam_1751657975_97000/ocdbt.process_1\r\n52M\t./lam_1751657975_97000/ocdbt.process_2/d\r\n52M\t./lam_1751657975_97000/ocdbt.process_2\r\n419M\t./lam_1751657975_97000\r\n52M\t./lam_1751657975_161000/ocdbt.process_4/d\r\n52M\t./lam_1751657975_161000/ocdbt.process_4\r\n",,terminal_output +403,162403,"TERMINAL",0,0,"1.8M\t./lam_1751657975_161000/array_metadatas\r\n52M\t./lam_1751657975_161000/ocdbt.process_3/d\r\n52M\t./lam_1751657975_161000/ocdbt.process_3\r\n1.5M\t./lam_1751657975_161000/d\r\n52M\t./lam_1751657975_161000/ocdbt.process_5/d\r\n52M\t./lam_1751657975_161000/ocdbt.process_5\r\n",,terminal_output +404,162631,"TERMINAL",0,0,"52M\t./lam_1751657975_161000/ocdbt.process_0/d\r\n52M\t./lam_1751657975_161000/ocdbt.process_0\r\n52M\t./lam_1751657975_161000/ocdbt.process_7/d\r\n52M\t./lam_1751657975_161000/ocdbt.process_7\r\n52M\t./lam_1751657975_161000/ocdbt.process_6/d\r\n52M\t./lam_1751657975_161000/ocdbt.process_6\r\n52M\t./lam_1751657975_161000/ocdbt.process_1/d\r\n52M\t./lam_1751657975_161000/ocdbt.process_1\r\n52M\t./lam_1751657975_161000/ocdbt.process_2/d\r\n52M\t./lam_1751657975_161000/ocdbt.process_2\r\n419M\t./lam_1751657975_161000\r\n52M\t./lam_1751657975_40000/ocdbt.process_4/d\r\n52M\t./lam_1751657975_40000/ocdbt.process_4\r\n1.8M\t./lam_1751657975_40000/array_metadatas\r\n52M\t./lam_1751657975_40000/ocdbt.process_3/d\r\n52M\t./lam_1751657975_40000/ocdbt.process_3\r\n",,terminal_output +405,162774,"TERMINAL",0,0,"1.5M\t./lam_1751657975_40000/d\r\n",,terminal_output +406,162871,"TERMINAL",0,0,"52M\t./lam_1751657975_40000/ocdbt.process_5/d\r\n52M\t./lam_1751657975_40000/ocdbt.process_5\r\n",,terminal_output +407,163007,"TERMINAL",0,0,"52M\t./lam_1751657975_40000/ocdbt.process_0/d\r\n52M\t./lam_1751657975_40000/ocdbt.process_0\r\n52M\t./lam_1751657975_40000/ocdbt.process_7/d\r\n52M\t./lam_1751657975_40000/ocdbt.process_7\r\n52M\t./lam_1751657975_40000/ocdbt.process_6/d\r\n52M\t./lam_1751657975_40000/ocdbt.process_6\r\n52M\t./lam_1751657975_40000/ocdbt.process_1/d\r\n52M\t./lam_1751657975_40000/ocdbt.process_1\r\n",,terminal_output +408,163082,"TERMINAL",0,0,"52M\t./lam_1751657975_40000/ocdbt.process_2/d\r\n52M\t./lam_1751657975_40000/ocdbt.process_2\r\n420M\t./lam_1751657975_40000\r\n53M\t./lam_1751657975_61000/ocdbt.process_4/d\r\n53M\t./lam_1751657975_61000/ocdbt.process_4\r\n",,terminal_output +409,163207,"TERMINAL",0,0,"1.8M\t./lam_1751657975_61000/array_metadatas\r\n53M\t./lam_1751657975_61000/ocdbt.process_3/d\r\n53M\t./lam_1751657975_61000/ocdbt.process_3\r\n1.5M\t./lam_1751657975_61000/d\r\n",,terminal_output +410,163367,"TERMINAL",0,0,"52M\t./lam_1751657975_61000/ocdbt.process_5/d\r\n52M\t./lam_1751657975_61000/ocdbt.process_5\r\n53M\t./lam_1751657975_61000/ocdbt.process_0/d\r\n53M\t./lam_1751657975_61000/ocdbt.process_0\r\n53M\t./lam_1751657975_61000/ocdbt.process_7/d\r\n53M\t./lam_1751657975_61000/ocdbt.process_7\r\n52M\t./lam_1751657975_61000/ocdbt.process_6/d\r\n52M\t./lam_1751657975_61000/ocdbt.process_6\r\n53M\t./lam_1751657975_61000/ocdbt.process_1/d\r\n53M\t./lam_1751657975_61000/ocdbt.process_1\r\n53M\t./lam_1751657975_61000/ocdbt.process_2/d\r\n53M\t./lam_1751657975_61000/ocdbt.process_2\r\n421M\t./lam_1751657975_61000\r\n52M\t./lam_1751657975_130000/ocdbt.process_4/d\r\n52M\t./lam_1751657975_130000/ocdbt.process_4\r\n",,terminal_output +411,163417,"TERMINAL",0,0,"1.8M\t./lam_1751657975_130000/array_metadatas\r\n",,terminal_output +412,163478,"TERMINAL",0,0,"53M\t./lam_1751657975_130000/ocdbt.process_3/d\r\n53M\t./lam_1751657975_130000/ocdbt.process_3\r\n1.5M\t./lam_1751657975_130000/d\r\n52M\t./lam_1751657975_130000/ocdbt.process_5/d\r\n52M\t./lam_1751657975_130000/ocdbt.process_5\r\n",,terminal_output +413,163540,"TERMINAL",0,0,"52M\t./lam_1751657975_130000/ocdbt.process_0/d\r\n52M\t./lam_1751657975_130000/ocdbt.process_0\r\n52M\t./lam_1751657975_130000/ocdbt.process_7/d\r\n52M\t./lam_1751657975_130000/ocdbt.process_7\r\n52M\t./lam_1751657975_130000/ocdbt.process_6/d\r\n52M\t./lam_1751657975_130000/ocdbt.process_6\r\n52M\t./lam_1751657975_130000/ocdbt.process_1/d\r\n52M\t./lam_1751657975_130000/ocdbt.process_1\r\n52M\t./lam_1751657975_130000/ocdbt.process_2/d\r\n52M\t./lam_1751657975_130000/ocdbt.process_2\r\n419M\t./lam_1751657975_130000\r\n52M\t./lam_1751657975_46000/ocdbt.process_4/d\r\n52M\t./lam_1751657975_46000/ocdbt.process_4\r\n1.8M\t./lam_1751657975_46000/array_metadatas\r\n",,terminal_output +414,163639,"TERMINAL",0,0,"53M\t./lam_1751657975_46000/ocdbt.process_3/d\r\n53M\t./lam_1751657975_46000/ocdbt.process_3\r\n1.5M\t./lam_1751657975_46000/d\r\n",,terminal_output +415,163849,"TERMINAL",0,0,"52M\t./lam_1751657975_46000/ocdbt.process_5/d\r\n52M\t./lam_1751657975_46000/ocdbt.process_5\r\n52M\t./lam_1751657975_46000/ocdbt.process_0/d\r\n52M\t./lam_1751657975_46000/ocdbt.process_0\r\n52M\t./lam_1751657975_46000/ocdbt.process_7/d\r\n52M\t./lam_1751657975_46000/ocdbt.process_7\r\n52M\t./lam_1751657975_46000/ocdbt.process_6/d\r\n52M\t./lam_1751657975_46000/ocdbt.process_6\r\n52M\t./lam_1751657975_46000/ocdbt.process_1/d\r\n52M\t./lam_1751657975_46000/ocdbt.process_1\r\n52M\t./lam_1751657975_46000/ocdbt.process_2/d\r\n52M\t./lam_1751657975_46000/ocdbt.process_2\r\n420M\t./lam_1751657975_46000\r\n53M\t./lam_1751657975_14000/ocdbt.process_4/d\r\n53M\t./lam_1751657975_14000/ocdbt.process_4\r\n1.8M\t./lam_1751657975_14000/array_metadatas\r\n53M\t./lam_1751657975_14000/ocdbt.process_3/d\r\n53M\t./lam_1751657975_14000/ocdbt.process_3\r\n1.5M\t./lam_1751657975_14000/d\r\n52M\t./lam_1751657975_14000/ocdbt.process_5/d\r\n52M\t./lam_1751657975_14000/ocdbt.process_5\r\n",,terminal_output +416,164060,"TERMINAL",0,0,"52M\t./lam_1751657975_14000/ocdbt.process_0/d\r\n52M\t./lam_1751657975_14000/ocdbt.process_0\r\n53M\t./lam_1751657975_14000/ocdbt.process_7/d\r\n53M\t./lam_1751657975_14000/ocdbt.process_7\r\n52M\t./lam_1751657975_14000/ocdbt.process_6/d\r\n52M\t./lam_1751657975_14000/ocdbt.process_6\r\n52M\t./lam_1751657975_14000/ocdbt.process_1/d\r\n52M\t./lam_1751657975_14000/ocdbt.process_1\r\n52M\t./lam_1751657975_14000/ocdbt.process_2/d\r\n52M\t./lam_1751657975_14000/ocdbt.process_2\r\n421M\t./lam_1751657975_14000\r\n53M\t./lam_1751657975_1000/ocdbt.process_4/d\r\n53M\t./lam_1751657975_1000/ocdbt.process_4\r\n",,terminal_output +417,164361,"TERMINAL",0,0,"1.8M\t./lam_1751657975_1000/array_metadatas\r\n53M\t./lam_1751657975_1000/ocdbt.process_3/d\r\n53M\t./lam_1751657975_1000/ocdbt.process_3\r\n1.5M\t./lam_1751657975_1000/d\r\n",,terminal_output +418,164499,"TERMINAL",0,0,"53M\t./lam_1751657975_1000/ocdbt.process_5/d\r\n53M\t./lam_1751657975_1000/ocdbt.process_5\r\n",,terminal_output +419,164882,"TERMINAL",0,0,"53M\t./lam_1751657975_1000/ocdbt.process_0/d\r\n53M\t./lam_1751657975_1000/ocdbt.process_0\r\n53M\t./lam_1751657975_1000/ocdbt.process_7/d\r\n53M\t./lam_1751657975_1000/ocdbt.process_7\r\n53M\t./lam_1751657975_1000/ocdbt.process_6/d\r\n53M\t./lam_1751657975_1000/ocdbt.process_6\r\n53M\t./lam_1751657975_1000/ocdbt.process_1/d\r\n53M\t./lam_1751657975_1000/ocdbt.process_1\r\n53M\t./lam_1751657975_1000/ocdbt.process_2/d\r\n53M\t./lam_1751657975_1000/ocdbt.process_2\r\n422M\t./lam_1751657975_1000\r\n52M\t./lam_1751657975_32000/ocdbt.process_4/d\r\n52M\t./lam_1751657975_32000/ocdbt.process_4\r\n",,terminal_output +420,165071,"TERMINAL",0,0,"1.8M\t./lam_1751657975_32000/array_metadatas\r\n52M\t./lam_1751657975_32000/ocdbt.process_3/d\r\n52M\t./lam_1751657975_32000/ocdbt.process_3\r\n",,terminal_output +421,165124,"TERMINAL",0,0,"1.5M\t./lam_1751657975_32000/d\r\n52M\t./lam_1751657975_32000/ocdbt.process_5/d\r\n52M\t./lam_1751657975_32000/ocdbt.process_5\r\n",,terminal_output +422,165290,"TERMINAL",0,0,"52M\t./lam_1751657975_32000/ocdbt.process_0/d\r\n52M\t./lam_1751657975_32000/ocdbt.process_0\r\n52M\t./lam_1751657975_32000/ocdbt.process_7/d\r\n52M\t./lam_1751657975_32000/ocdbt.process_7\r\n52M\t./lam_1751657975_32000/ocdbt.process_6/d\r\n52M\t./lam_1751657975_32000/ocdbt.process_6\r\n52M\t./lam_1751657975_32000/ocdbt.process_1/d\r\n52M\t./lam_1751657975_32000/ocdbt.process_1\r\n52M\t./lam_1751657975_32000/ocdbt.process_2/d\r\n52M\t./lam_1751657975_32000/ocdbt.process_2\r\n419M\t./lam_1751657975_32000\r\n52M\t./lam_1751657975_163000/ocdbt.process_4/d\r\n52M\t./lam_1751657975_163000/ocdbt.process_4\r\n1.8M\t./lam_1751657975_163000/array_metadatas\r\n52M\t./lam_1751657975_163000/ocdbt.process_3/d\r\n52M\t./lam_1751657975_163000/ocdbt.process_3\r\n",,terminal_output +423,165400,"TERMINAL",0,0,"1.5M\t./lam_1751657975_163000/d\r\n",,terminal_output +424,165497,"TERMINAL",0,0,"52M\t./lam_1751657975_163000/ocdbt.process_5/d\r\n52M\t./lam_1751657975_163000/ocdbt.process_5\r\n",,terminal_output +425,165776,"TERMINAL",0,0,"52M\t./lam_1751657975_163000/ocdbt.process_0/d\r\n52M\t./lam_1751657975_163000/ocdbt.process_0\r\n53M\t./lam_1751657975_163000/ocdbt.process_7/d\r\n53M\t./lam_1751657975_163000/ocdbt.process_7\r\n52M\t./lam_1751657975_163000/ocdbt.process_6/d\r\n52M\t./lam_1751657975_163000/ocdbt.process_6\r\n52M\t./lam_1751657975_163000/ocdbt.process_1/d\r\n52M\t./lam_1751657975_163000/ocdbt.process_1\r\n52M\t./lam_1751657975_163000/ocdbt.process_2/d\r\n52M\t./lam_1751657975_163000/ocdbt.process_2\r\n419M\t./lam_1751657975_163000\r\n53M\t./lam_1751657975_42000/ocdbt.process_4/d\r\n53M\t./lam_1751657975_42000/ocdbt.process_4\r\n",,terminal_output +426,165903,"TERMINAL",0,0,"1.8M\t./lam_1751657975_42000/array_metadatas\r\n",,terminal_output +427,165970,"TERMINAL",0,0,"53M\t./lam_1751657975_42000/ocdbt.process_3/d\r\n53M\t./lam_1751657975_42000/ocdbt.process_3\r\n",,terminal_output +428,166080,"TERMINAL",0,0,"1.5M\t./lam_1751657975_42000/d\r\n52M\t./lam_1751657975_42000/ocdbt.process_5/d\r\n52M\t./lam_1751657975_42000/ocdbt.process_5\r\n",,terminal_output +429,166390,"TERMINAL",0,0,"52M\t./lam_1751657975_42000/ocdbt.process_0/d\r\n52M\t./lam_1751657975_42000/ocdbt.process_0\r\n52M\t./lam_1751657975_42000/ocdbt.process_7/d\r\n52M\t./lam_1751657975_42000/ocdbt.process_7\r\n52M\t./lam_1751657975_42000/ocdbt.process_6/d\r\n52M\t./lam_1751657975_42000/ocdbt.process_6\r\n52M\t./lam_1751657975_42000/ocdbt.process_1/d\r\n52M\t./lam_1751657975_42000/ocdbt.process_1\r\n52M\t./lam_1751657975_42000/ocdbt.process_2/d\r\n52M\t./lam_1751657975_42000/ocdbt.process_2\r\n421M\t./lam_1751657975_42000\r\n52M\t./lam_1751657975_190000/ocdbt.process_4/d\r\n52M\t./lam_1751657975_190000/ocdbt.process_4\r\n",,terminal_output +430,166542,"TERMINAL",0,0,"1.8M\t./lam_1751657975_190000/array_metadatas\r\n",,terminal_output +431,166650,"TERMINAL",0,0,"53M\t./lam_1751657975_190000/ocdbt.process_3/d\r\n53M\t./lam_1751657975_190000/ocdbt.process_3\r\n1.5M\t./lam_1751657975_190000/d\r\n",,terminal_output +432,166707,"TERMINAL",0,0,"52M\t./lam_1751657975_190000/ocdbt.process_5/d\r\n52M\t./lam_1751657975_190000/ocdbt.process_5\r\n",,terminal_output +433,167083,"TERMINAL",0,0,"53M\t./lam_1751657975_190000/ocdbt.process_0/d\r\n53M\t./lam_1751657975_190000/ocdbt.process_0\r\n52M\t./lam_1751657975_190000/ocdbt.process_7/d\r\n52M\t./lam_1751657975_190000/ocdbt.process_7\r\n52M\t./lam_1751657975_190000/ocdbt.process_6/d\r\n52M\t./lam_1751657975_190000/ocdbt.process_6\r\n52M\t./lam_1751657975_190000/ocdbt.process_1/d\r\n52M\t./lam_1751657975_190000/ocdbt.process_1\r\n52M\t./lam_1751657975_190000/ocdbt.process_2/d\r\n52M\t./lam_1751657975_190000/ocdbt.process_2\r\n420M\t./lam_1751657975_190000\r\n53M\t./lam_1751657975_11000/ocdbt.process_4/d\r\n53M\t./lam_1751657975_11000/ocdbt.process_4\r\n",,terminal_output +434,167183,"TERMINAL",0,0,"1.8M\t./lam_1751657975_11000/array_metadatas\r\n53M\t./lam_1751657975_11000/ocdbt.process_3/d\r\n53M\t./lam_1751657975_11000/ocdbt.process_3\r\n",,terminal_output +435,167236,"TERMINAL",0,0,"1.5M\t./lam_1751657975_11000/d\r\n",,terminal_output +436,167347,"TERMINAL",0,0,"53M\t./lam_1751657975_11000/ocdbt.process_5/d\r\n53M\t./lam_1751657975_11000/ocdbt.process_5\r\n53M\t./lam_1751657975_11000/ocdbt.process_0/d\r\n53M\t./lam_1751657975_11000/ocdbt.process_0\r\n53M\t./lam_1751657975_11000/ocdbt.process_7/d\r\n53M\t./lam_1751657975_11000/ocdbt.process_7\r\n53M\t./lam_1751657975_11000/ocdbt.process_6/d\r\n53M\t./lam_1751657975_11000/ocdbt.process_6\r\n52M\t./lam_1751657975_11000/ocdbt.process_1/d\r\n52M\t./lam_1751657975_11000/ocdbt.process_1\r\n53M\t./lam_1751657975_11000/ocdbt.process_2/d\r\n53M\t./lam_1751657975_11000/ocdbt.process_2\r\n421M\t./lam_1751657975_11000\r\n52M\t./lam_1751657975_126000/ocdbt.process_4/d\r\n52M\t./lam_1751657975_126000/ocdbt.process_4\r\n",,terminal_output +437,167540,"TERMINAL",0,0,"1.8M\t./lam_1751657975_126000/array_metadatas\r\n52M\t./lam_1751657975_126000/ocdbt.process_3/d\r\n52M\t./lam_1751657975_126000/ocdbt.process_3\r\n",,terminal_output +438,167657,"TERMINAL",0,0,"1.5M\t./lam_1751657975_126000/d\r\n",,terminal_output +439,167722,"TERMINAL",0,0,"52M\t./lam_1751657975_126000/ocdbt.process_5/d\r\n52M\t./lam_1751657975_126000/ocdbt.process_5\r\n",,terminal_output +440,167831,"TERMINAL",0,0,"52M\t./lam_1751657975_126000/ocdbt.process_0/d\r\n52M\t./lam_1751657975_126000/ocdbt.process_0\r\n52M\t./lam_1751657975_126000/ocdbt.process_7/d\r\n52M\t./lam_1751657975_126000/ocdbt.process_7\r\n",,terminal_output +441,167904,"TERMINAL",0,0,"52M\t./lam_1751657975_126000/ocdbt.process_6/d\r\n52M\t./lam_1751657975_126000/ocdbt.process_6\r\n52M\t./lam_1751657975_126000/ocdbt.process_1/d\r\n52M\t./lam_1751657975_126000/ocdbt.process_1\r\n",,terminal_output +442,167980,"TERMINAL",0,0,"52M\t./lam_1751657975_126000/ocdbt.process_2/d\r\n52M\t./lam_1751657975_126000/ocdbt.process_2\r\n419M\t./lam_1751657975_126000\r\n52M\t./lam_1751657975_101000/ocdbt.process_4/d\r\n52M\t./lam_1751657975_101000/ocdbt.process_4\r\n",,terminal_output +443,168135,"TERMINAL",0,0,"1.8M\t./lam_1751657975_101000/array_metadatas\r\n",,terminal_output +444,168395,"TERMINAL",0,0,"53M\t./lam_1751657975_101000/ocdbt.process_3/d\r\n53M\t./lam_1751657975_101000/ocdbt.process_3\r\n",,terminal_output +445,168507,"TERMINAL",0,0,"1.5M\t./lam_1751657975_101000/d\r\n52M\t./lam_1751657975_101000/ocdbt.process_5/d\r\n52M\t./lam_1751657975_101000/ocdbt.process_5\r\n",,terminal_output +446,168611,"TERMINAL",0,0,"52M\t./lam_1751657975_101000/ocdbt.process_0/d\r\n52M\t./lam_1751657975_101000/ocdbt.process_0\r\n52M\t./lam_1751657975_101000/ocdbt.process_7/d\r\n52M\t./lam_1751657975_101000/ocdbt.process_7\r\n52M\t./lam_1751657975_101000/ocdbt.process_6/d\r\n52M\t./lam_1751657975_101000/ocdbt.process_6\r\n52M\t./lam_1751657975_101000/ocdbt.process_1/d\r\n52M\t./lam_1751657975_101000/ocdbt.process_1\r\n52M\t./lam_1751657975_101000/ocdbt.process_2/d\r\n52M\t./lam_1751657975_101000/ocdbt.process_2\r\n420M\t./lam_1751657975_101000\r\n52M\t./lam_1751657975_50000/ocdbt.process_4/d\r\n52M\t./lam_1751657975_50000/ocdbt.process_4\r\n",,terminal_output +447,168708,"TERMINAL",0,0,"1.8M\t./lam_1751657975_50000/array_metadatas\r\n52M\t./lam_1751657975_50000/ocdbt.process_3/d\r\n52M\t./lam_1751657975_50000/ocdbt.process_3\r\n1.5M\t./lam_1751657975_50000/d\r\n",,terminal_output +448,168805,"TERMINAL",0,0,"52M\t./lam_1751657975_50000/ocdbt.process_5/d\r\n52M\t./lam_1751657975_50000/ocdbt.process_5\r\n52M\t./lam_1751657975_50000/ocdbt.process_0/d\r\n52M\t./lam_1751657975_50000/ocdbt.process_0\r\n",,terminal_output +449,168865,"TERMINAL",0,0,"52M\t./lam_1751657975_50000/ocdbt.process_7/d\r\n52M\t./lam_1751657975_50000/ocdbt.process_7\r\n52M\t./lam_1751657975_50000/ocdbt.process_6/d\r\n52M\t./lam_1751657975_50000/ocdbt.process_6\r\n",,terminal_output +450,168962,"TERMINAL",0,0,"52M\t./lam_1751657975_50000/ocdbt.process_1/d\r\n52M\t./lam_1751657975_50000/ocdbt.process_1\r\n52M\t./lam_1751657975_50000/ocdbt.process_2/d\r\n52M\t./lam_1751657975_50000/ocdbt.process_2\r\n419M\t./lam_1751657975_50000\r\n52M\t./lam_1751657975_138000/ocdbt.process_4/d\r\n52M\t./lam_1751657975_138000/ocdbt.process_4\r\n",,terminal_output +451,169148,"TERMINAL",0,0,"1.8M\t./lam_1751657975_138000/array_metadatas\r\n53M\t./lam_1751657975_138000/ocdbt.process_3/d\r\n53M\t./lam_1751657975_138000/ocdbt.process_3\r\n",,terminal_output +452,169257,"TERMINAL",0,0,"1.5M\t./lam_1751657975_138000/d\r\n52M\t./lam_1751657975_138000/ocdbt.process_5/d\r\n52M\t./lam_1751657975_138000/ocdbt.process_5\r\n",,terminal_output +453,169424,"TERMINAL",0,0,"52M\t./lam_1751657975_138000/ocdbt.process_0/d\r\n52M\t./lam_1751657975_138000/ocdbt.process_0\r\n52M\t./lam_1751657975_138000/ocdbt.process_7/d\r\n52M\t./lam_1751657975_138000/ocdbt.process_7\r\n52M\t./lam_1751657975_138000/ocdbt.process_6/d\r\n52M\t./lam_1751657975_138000/ocdbt.process_6\r\n52M\t./lam_1751657975_138000/ocdbt.process_1/d\r\n52M\t./lam_1751657975_138000/ocdbt.process_1\r\n52M\t./lam_1751657975_138000/ocdbt.process_2/d\r\n52M\t./lam_1751657975_138000/ocdbt.process_2\r\n419M\t./lam_1751657975_138000\r\n52M\t./lam_1751657975_91000/ocdbt.process_4/d\r\n52M\t./lam_1751657975_91000/ocdbt.process_4\r\n",,terminal_output +454,169648,"TERMINAL",0,0,"1.8M\t./lam_1751657975_91000/array_metadatas\r\n52M\t./lam_1751657975_91000/ocdbt.process_3/d\r\n52M\t./lam_1751657975_91000/ocdbt.process_3\r\n1.5M\t./lam_1751657975_91000/d\r\n52M\t./lam_1751657975_91000/ocdbt.process_5/d\r\n52M\t./lam_1751657975_91000/ocdbt.process_5\r\n52M\t./lam_1751657975_91000/ocdbt.process_0/d\r\n52M\t./lam_1751657975_91000/ocdbt.process_0\r\n52M\t./lam_1751657975_91000/ocdbt.process_7/d\r\n52M\t./lam_1751657975_91000/ocdbt.process_7\r\n52M\t./lam_1751657975_91000/ocdbt.process_6/d\r\n52M\t./lam_1751657975_91000/ocdbt.process_6\r\n52M\t./lam_1751657975_91000/ocdbt.process_1/d\r\n52M\t./lam_1751657975_91000/ocdbt.process_1\r\n52M\t./lam_1751657975_91000/ocdbt.process_2/d\r\n52M\t./lam_1751657975_91000/ocdbt.process_2\r\n420M\t./lam_1751657975_91000\r\n53M\t./lam_1751657975_20000/ocdbt.process_4/d\r\n53M\t./lam_1751657975_20000/ocdbt.process_4\r\n",,terminal_output +455,169748,"TERMINAL",0,0,"1.8M\t./lam_1751657975_20000/array_metadatas\r\n53M\t./lam_1751657975_20000/ocdbt.process_3/d\r\n53M\t./lam_1751657975_20000/ocdbt.process_3\r\n",,terminal_output +456,169854,"TERMINAL",0,0,"1.5M\t./lam_1751657975_20000/d\r\n",,terminal_output +457,169922,"TERMINAL",0,0,"52M\t./lam_1751657975_20000/ocdbt.process_5/d\r\n52M\t./lam_1751657975_20000/ocdbt.process_5\r\n",,terminal_output +458,169975,"TERMINAL",0,0,"52M\t./lam_1751657975_20000/ocdbt.process_0/d\r\n52M\t./lam_1751657975_20000/ocdbt.process_0\r\n52M\t./lam_1751657975_20000/ocdbt.process_7/d\r\n52M\t./lam_1751657975_20000/ocdbt.process_7\r\n52M\t./lam_1751657975_20000/ocdbt.process_6/d\r\n52M\t./lam_1751657975_20000/ocdbt.process_6\r\n",,terminal_output +459,170045,"TERMINAL",0,0,"52M\t./lam_1751657975_20000/ocdbt.process_1/d\r\n52M\t./lam_1751657975_20000/ocdbt.process_1\r\n52M\t./lam_1751657975_20000/ocdbt.process_2/d\r\n52M\t./lam_1751657975_20000/ocdbt.process_2\r\n421M\t./lam_1751657975_20000\r\n52M\t./lam_1751657975_89000/ocdbt.process_4/d\r\n52M\t./lam_1751657975_89000/ocdbt.process_4\r\n1.8M\t./lam_1751657975_89000/array_metadatas\r\n52M\t./lam_1751657975_89000/ocdbt.process_3/d\r\n52M\t./lam_1751657975_89000/ocdbt.process_3\r\n",,terminal_output +460,170147,"TERMINAL",0,0,"1.5M\t./lam_1751657975_89000/d\r\n52M\t./lam_1751657975_89000/ocdbt.process_5/d\r\n52M\t./lam_1751657975_89000/ocdbt.process_5\r\n",,terminal_output +461,170238,"TERMINAL",0,0,"52M\t./lam_1751657975_89000/ocdbt.process_0/d\r\n52M\t./lam_1751657975_89000/ocdbt.process_0\r\n52M\t./lam_1751657975_89000/ocdbt.process_7/d\r\n52M\t./lam_1751657975_89000/ocdbt.process_7\r\n52M\t./lam_1751657975_89000/ocdbt.process_6/d\r\n52M\t./lam_1751657975_89000/ocdbt.process_6\r\n52M\t./lam_1751657975_89000/ocdbt.process_1/d\r\n52M\t./lam_1751657975_89000/ocdbt.process_1\r\n52M\t./lam_1751657975_89000/ocdbt.process_2/d\r\n52M\t./lam_1751657975_89000/ocdbt.process_2\r\n419M\t./lam_1751657975_89000\r\n52M\t./lam_1751657975_69000/ocdbt.process_4/d\r\n52M\t./lam_1751657975_69000/ocdbt.process_4\r\n",,terminal_output +462,170340,"TERMINAL",0,0,"1.8M\t./lam_1751657975_69000/array_metadatas\r\n53M\t./lam_1751657975_69000/ocdbt.process_3/d\r\n53M\t./lam_1751657975_69000/ocdbt.process_3\r\n",,terminal_output +463,170469,"TERMINAL",0,0,"1.5M\t./lam_1751657975_69000/d\r\n52M\t./lam_1751657975_69000/ocdbt.process_5/d\r\n52M\t./lam_1751657975_69000/ocdbt.process_5\r\n",,terminal_output +464,170681,"TERMINAL",0,0,"52M\t./lam_1751657975_69000/ocdbt.process_0/d\r\n52M\t./lam_1751657975_69000/ocdbt.process_0\r\n52M\t./lam_1751657975_69000/ocdbt.process_7/d\r\n52M\t./lam_1751657975_69000/ocdbt.process_7\r\n52M\t./lam_1751657975_69000/ocdbt.process_6/d\r\n52M\t./lam_1751657975_69000/ocdbt.process_6\r\n52M\t./lam_1751657975_69000/ocdbt.process_1/d\r\n52M\t./lam_1751657975_69000/ocdbt.process_1\r\n52M\t./lam_1751657975_69000/ocdbt.process_2/d\r\n52M\t./lam_1751657975_69000/ocdbt.process_2\r\n420M\t./lam_1751657975_69000\r\n53M\t./lam_1751657975_4000/ocdbt.process_4/d\r\n53M\t./lam_1751657975_4000/ocdbt.process_4\r\n",,terminal_output +465,170777,"TERMINAL",0,0,"1.8M\t./lam_1751657975_4000/array_metadatas\r\n",,terminal_output +466,170850,"TERMINAL",0,0,"53M\t./lam_1751657975_4000/ocdbt.process_3/d\r\n53M\t./lam_1751657975_4000/ocdbt.process_3\r\n",,terminal_output +467,170920,"TERMINAL",0,0,"1.5M\t./lam_1751657975_4000/d\r\n",,terminal_output +468,171045,"TERMINAL",0,0,"53M\t./lam_1751657975_4000/ocdbt.process_5/d\r\n53M\t./lam_1751657975_4000/ocdbt.process_5\r\n",,terminal_output +469,171235,"TERMINAL",0,0,"53M\t./lam_1751657975_4000/ocdbt.process_0/d\r\n53M\t./lam_1751657975_4000/ocdbt.process_0\r\n53M\t./lam_1751657975_4000/ocdbt.process_7/d\r\n53M\t./lam_1751657975_4000/ocdbt.process_7\r\n53M\t./lam_1751657975_4000/ocdbt.process_6/d\r\n53M\t./lam_1751657975_4000/ocdbt.process_6\r\n53M\t./lam_1751657975_4000/ocdbt.process_1/d\r\n53M\t./lam_1751657975_4000/ocdbt.process_1\r\n53M\t./lam_1751657975_4000/ocdbt.process_2/d\r\n53M\t./lam_1751657975_4000/ocdbt.process_2\r\n422M\t./lam_1751657975_4000\r\n52M\t./lam_1751657975_185000/ocdbt.process_4/d\r\n52M\t./lam_1751657975_185000/ocdbt.process_4\r\n",,terminal_output +470,171720,"TERMINAL",0,0,"1.8M\t./lam_1751657975_185000/array_metadatas\r\n52M\t./lam_1751657975_185000/ocdbt.process_3/d\r\n52M\t./lam_1751657975_185000/ocdbt.process_3\r\n1.5M\t./lam_1751657975_185000/d\r\n52M\t./lam_1751657975_185000/ocdbt.process_5/d\r\n52M\t./lam_1751657975_185000/ocdbt.process_5\r\n",,terminal_output +471,172066,"TERMINAL",0,0,"52M\t./lam_1751657975_185000/ocdbt.process_0/d\r\n52M\t./lam_1751657975_185000/ocdbt.process_0\r\n52M\t./lam_1751657975_185000/ocdbt.process_7/d\r\n52M\t./lam_1751657975_185000/ocdbt.process_7\r\n52M\t./lam_1751657975_185000/ocdbt.process_6/d\r\n52M\t./lam_1751657975_185000/ocdbt.process_6\r\n52M\t./lam_1751657975_185000/ocdbt.process_1/d\r\n52M\t./lam_1751657975_185000/ocdbt.process_1\r\n52M\t./lam_1751657975_185000/ocdbt.process_2/d\r\n52M\t./lam_1751657975_185000/ocdbt.process_2\r\n419M\t./lam_1751657975_185000\r\n52M\t./lam_1751657975_34000/ocdbt.process_4/d\r\n52M\t./lam_1751657975_34000/ocdbt.process_4\r\n",,terminal_output +472,172231,"TERMINAL",0,0,"1.8M\t./lam_1751657975_34000/array_metadatas\r\n53M\t./lam_1751657975_34000/ocdbt.process_3/d\r\n53M\t./lam_1751657975_34000/ocdbt.process_3\r\n",,terminal_output +473,172303,"TERMINAL",0,0,"1.5M\t./lam_1751657975_34000/d\r\n52M\t./lam_1751657975_34000/ocdbt.process_5/d\r\n52M\t./lam_1751657975_34000/ocdbt.process_5\r\n",,terminal_output +474,172688,"TERMINAL",0,0,"52M\t./lam_1751657975_34000/ocdbt.process_0/d\r\n52M\t./lam_1751657975_34000/ocdbt.process_0\r\n52M\t./lam_1751657975_34000/ocdbt.process_7/d\r\n52M\t./lam_1751657975_34000/ocdbt.process_7\r\n52M\t./lam_1751657975_34000/ocdbt.process_6/d\r\n52M\t./lam_1751657975_34000/ocdbt.process_6\r\n52M\t./lam_1751657975_34000/ocdbt.process_1/d\r\n52M\t./lam_1751657975_34000/ocdbt.process_1\r\n52M\t./lam_1751657975_34000/ocdbt.process_2/d\r\n52M\t./lam_1751657975_34000/ocdbt.process_2\r\n420M\t./lam_1751657975_34000\r\n52M\t./lam_1751657975_128000/ocdbt.process_4/d\r\n52M\t./lam_1751657975_128000/ocdbt.process_4\r\n",,terminal_output +475,172844,"TERMINAL",0,0,"1.8M\t./lam_1751657975_128000/array_metadatas\r\n52M\t./lam_1751657975_128000/ocdbt.process_3/d\r\n52M\t./lam_1751657975_128000/ocdbt.process_3\r\n1.5M\t./lam_1751657975_128000/d\r\n",,terminal_output +476,172914,"TERMINAL",0,0,"52M\t./lam_1751657975_128000/ocdbt.process_5/d\r\n52M\t./lam_1751657975_128000/ocdbt.process_5\r\n",,terminal_output +477,173176,"TERMINAL",0,0,"52M\t./lam_1751657975_128000/ocdbt.process_0/d\r\n52M\t./lam_1751657975_128000/ocdbt.process_0\r\n52M\t./lam_1751657975_128000/ocdbt.process_7/d\r\n52M\t./lam_1751657975_128000/ocdbt.process_7\r\n52M\t./lam_1751657975_128000/ocdbt.process_6/d\r\n52M\t./lam_1751657975_128000/ocdbt.process_6\r\n",,terminal_output +478,173413,"TERMINAL",0,0,"52M\t./lam_1751657975_128000/ocdbt.process_1/d\r\n52M\t./lam_1751657975_128000/ocdbt.process_1\r\n52M\t./lam_1751657975_128000/ocdbt.process_2/d\r\n52M\t./lam_1751657975_128000/ocdbt.process_2\r\n419M\t./lam_1751657975_128000\r\n52M\t./lam_1751657975_113000/ocdbt.process_4/d\r\n52M\t./lam_1751657975_113000/ocdbt.process_4\r\n",,terminal_output +479,173654,"TERMINAL",0,0,"1.8M\t./lam_1751657975_113000/array_metadatas\r\n",,terminal_output +480,173751,"TERMINAL",0,0,"53M\t./lam_1751657975_113000/ocdbt.process_3/d\r\n53M\t./lam_1751657975_113000/ocdbt.process_3\r\n",,terminal_output +481,173915,"TERMINAL",0,0,"1.5M\t./lam_1751657975_113000/d\r\n52M\t./lam_1751657975_113000/ocdbt.process_5/d\r\n52M\t./lam_1751657975_113000/ocdbt.process_5\r\n52M\t./lam_1751657975_113000/ocdbt.process_0/d\r\n52M\t./lam_1751657975_113000/ocdbt.process_0\r\n52M\t./lam_1751657975_113000/ocdbt.process_7/d\r\n52M\t./lam_1751657975_113000/ocdbt.process_7\r\n52M\t./lam_1751657975_113000/ocdbt.process_6/d\r\n52M\t./lam_1751657975_113000/ocdbt.process_6\r\n52M\t./lam_1751657975_113000/ocdbt.process_1/d\r\n52M\t./lam_1751657975_113000/ocdbt.process_1\r\n52M\t./lam_1751657975_113000/ocdbt.process_2/d\r\n52M\t./lam_1751657975_113000/ocdbt.process_2\r\n420M\t./lam_1751657975_113000\r\n53M\t./lam_1751657975_13000/ocdbt.process_4/d\r\n53M\t./lam_1751657975_13000/ocdbt.process_4\r\n",,terminal_output +482,174023,"TERMINAL",0,0,"1.8M\t./lam_1751657975_13000/array_metadatas\r\n53M\t./lam_1751657975_13000/ocdbt.process_3/d\r\n53M\t./lam_1751657975_13000/ocdbt.process_3\r\n",,terminal_output +483,174157,"TERMINAL",0,0,"1.5M\t./lam_1751657975_13000/d\r\n52M\t./lam_1751657975_13000/ocdbt.process_5/d\r\n52M\t./lam_1751657975_13000/ocdbt.process_5\r\n",,terminal_output +484,174239,"TERMINAL",0,0,"53M\t./lam_1751657975_13000/ocdbt.process_0/d\r\n53M\t./lam_1751657975_13000/ocdbt.process_0\r\n53M\t./lam_1751657975_13000/ocdbt.process_7/d\r\n53M\t./lam_1751657975_13000/ocdbt.process_7\r\n52M\t./lam_1751657975_13000/ocdbt.process_6/d\r\n52M\t./lam_1751657975_13000/ocdbt.process_6\r\n52M\t./lam_1751657975_13000/ocdbt.process_1/d\r\n52M\t./lam_1751657975_13000/ocdbt.process_1\r\n52M\t./lam_1751657975_13000/ocdbt.process_2/d\r\n52M\t./lam_1751657975_13000/ocdbt.process_2\r\n422M\t./lam_1751657975_13000\r\n52M\t./lam_1751657975_55000/ocdbt.process_4/d\r\n52M\t./lam_1751657975_55000/ocdbt.process_4\r\n",,terminal_output +485,174372,"TERMINAL",0,0,"1.8M\t./lam_1751657975_55000/array_metadatas\r\n52M\t./lam_1751657975_55000/ocdbt.process_3/d\r\n52M\t./lam_1751657975_55000/ocdbt.process_3\r\n1.5M\t./lam_1751657975_55000/d\r\n",,terminal_output +486,174425,"TERMINAL",0,0,"52M\t./lam_1751657975_55000/ocdbt.process_5/d\r\n52M\t./lam_1751657975_55000/ocdbt.process_5\r\n",,terminal_output +487,174523,"TERMINAL",0,0,"53M\t./lam_1751657975_55000/ocdbt.process_0/d\r\n53M\t./lam_1751657975_55000/ocdbt.process_0\r\n52M\t./lam_1751657975_55000/ocdbt.process_7/d\r\n52M\t./lam_1751657975_55000/ocdbt.process_7\r\n52M\t./lam_1751657975_55000/ocdbt.process_6/d\r\n52M\t./lam_1751657975_55000/ocdbt.process_6\r\n52M\t./lam_1751657975_55000/ocdbt.process_1/d\r\n52M\t./lam_1751657975_55000/ocdbt.process_1\r\n52M\t./lam_1751657975_55000/ocdbt.process_2/d\r\n52M\t./lam_1751657975_55000/ocdbt.process_2\r\n420M\t./lam_1751657975_55000\r\n52M\t./lam_1751657975_121000/ocdbt.process_4/d\r\n52M\t./lam_1751657975_121000/ocdbt.process_4\r\n",,terminal_output +488,174655,"TERMINAL",0,0,"1.8M\t./lam_1751657975_121000/array_metadatas\r\n52M\t./lam_1751657975_121000/ocdbt.process_3/d\r\n52M\t./lam_1751657975_121000/ocdbt.process_3\r\n1.5M\t./lam_1751657975_121000/d\r\n52M\t./lam_1751657975_121000/ocdbt.process_5/d\r\n52M\t./lam_1751657975_121000/ocdbt.process_5\r\n",,terminal_output +489,174766,"TERMINAL",0,0,"52M\t./lam_1751657975_121000/ocdbt.process_0/d\r\n52M\t./lam_1751657975_121000/ocdbt.process_0\r\n52M\t./lam_1751657975_121000/ocdbt.process_7/d\r\n52M\t./lam_1751657975_121000/ocdbt.process_7\r\n52M\t./lam_1751657975_121000/ocdbt.process_6/d\r\n52M\t./lam_1751657975_121000/ocdbt.process_6\r\n",,terminal_output +490,174867,"TERMINAL",0,0,"52M\t./lam_1751657975_121000/ocdbt.process_1/d\r\n52M\t./lam_1751657975_121000/ocdbt.process_1\r\n52M\t./lam_1751657975_121000/ocdbt.process_2/d\r\n52M\t./lam_1751657975_121000/ocdbt.process_2\r\n419M\t./lam_1751657975_121000\r\n52M\t./lam_1751657975_93000/ocdbt.process_4/d\r\n52M\t./lam_1751657975_93000/ocdbt.process_4\r\n",,terminal_output +491,175093,"TERMINAL",0,0,"1.8M\t./lam_1751657975_93000/array_metadatas\r\n52M\t./lam_1751657975_93000/ocdbt.process_3/d\r\n52M\t./lam_1751657975_93000/ocdbt.process_3\r\n",,terminal_output +492,175211,"TERMINAL",0,0,"1.5M\t./lam_1751657975_93000/d\r\n",,terminal_output +493,175267,"TERMINAL",0,0,"52M\t./lam_1751657975_93000/ocdbt.process_5/d\r\n52M\t./lam_1751657975_93000/ocdbt.process_5\r\n",,terminal_output +494,175484,"TERMINAL",0,0,"52M\t./lam_1751657975_93000/ocdbt.process_0/d\r\n52M\t./lam_1751657975_93000/ocdbt.process_0\r\n",,terminal_output +495,175672,"TERMINAL",0,0,"52M\t./lam_1751657975_93000/ocdbt.process_7/d\r\n52M\t./lam_1751657975_93000/ocdbt.process_7\r\n",,terminal_output +496,175883,"TERMINAL",0,0,"52M\t./lam_1751657975_93000/ocdbt.process_6/d\r\n52M\t./lam_1751657975_93000/ocdbt.process_6\r\n52M\t./lam_1751657975_93000/ocdbt.process_1/d\r\n52M\t./lam_1751657975_93000/ocdbt.process_1\r\n52M\t./lam_1751657975_93000/ocdbt.process_2/d\r\n52M\t./lam_1751657975_93000/ocdbt.process_2\r\n420M\t./lam_1751657975_93000\r\n52M\t./lam_1751657975_168000/ocdbt.process_4/d\r\n52M\t./lam_1751657975_168000/ocdbt.process_4\r\n",,terminal_output +497,176042,"TERMINAL",0,0,"1.8M\t./lam_1751657975_168000/array_metadatas\r\n52M\t./lam_1751657975_168000/ocdbt.process_3/d\r\n52M\t./lam_1751657975_168000/ocdbt.process_3\r\n",,terminal_output +498,176168,"TERMINAL",0,0,"1.5M\t./lam_1751657975_168000/d\r\n52M\t./lam_1751657975_168000/ocdbt.process_5/d\r\n52M\t./lam_1751657975_168000/ocdbt.process_5\r\n52M\t./lam_1751657975_168000/ocdbt.process_0/d\r\n52M\t./lam_1751657975_168000/ocdbt.process_0\r\n52M\t./lam_1751657975_168000/ocdbt.process_7/d\r\n",,terminal_output +499,176219,"TERMINAL",0,0,"52M\t./lam_1751657975_168000/ocdbt.process_7\r\n52M\t./lam_1751657975_168000/ocdbt.process_6/d\r\n52M\t./lam_1751657975_168000/ocdbt.process_6\r\n52M\t./lam_1751657975_168000/ocdbt.process_1/d\r\n52M\t./lam_1751657975_168000/ocdbt.process_1\r\n52M\t./lam_1751657975_168000/ocdbt.process_2/d\r\n52M\t./lam_1751657975_168000/ocdbt.process_2\r\n419M\t./lam_1751657975_168000\r\n52M\t./lam_1751657975_45000/ocdbt.process_4/d\r\n52M\t./lam_1751657975_45000/ocdbt.process_4\r\n",,terminal_output +500,176397,"TERMINAL",0,0,"1.8M\t./lam_1751657975_45000/array_metadatas\r\n53M\t./lam_1751657975_45000/ocdbt.process_3/d\r\n53M\t./lam_1751657975_45000/ocdbt.process_3\r\n",,terminal_output +501,176479,"TERMINAL",0,0,"1.5M\t./lam_1751657975_45000/d\r\n",,terminal_output +502,176617,"TERMINAL",0,0,"53M\t./lam_1751657975_45000/ocdbt.process_5/d\r\n53M\t./lam_1751657975_45000/ocdbt.process_5\r\n",,terminal_output +503,176805,"TERMINAL",0,0,"52M\t./lam_1751657975_45000/ocdbt.process_0/d\r\n52M\t./lam_1751657975_45000/ocdbt.process_0\r\n53M\t./lam_1751657975_45000/ocdbt.process_7/d\r\n53M\t./lam_1751657975_45000/ocdbt.process_7\r\n52M\t./lam_1751657975_45000/ocdbt.process_6/d\r\n52M\t./lam_1751657975_45000/ocdbt.process_6\r\n52M\t./lam_1751657975_45000/ocdbt.process_1/d\r\n52M\t./lam_1751657975_45000/ocdbt.process_1\r\n52M\t./lam_1751657975_45000/ocdbt.process_2/d\r\n52M\t./lam_1751657975_45000/ocdbt.process_2\r\n421M\t./lam_1751657975_45000\r\n",,terminal_output +504,177016,"TERMINAL",0,0,"53M\t./lam_1751657975_174000/ocdbt.process_4/d\r\n53M\t./lam_1751657975_174000/ocdbt.process_4\r\n",,terminal_output +505,177124,"TERMINAL",0,0,"1.8M\t./lam_1751657975_174000/array_metadatas\r\n53M\t./lam_1751657975_174000/ocdbt.process_3/d\r\n53M\t./lam_1751657975_174000/ocdbt.process_3\r\n1.5M\t./lam_1751657975_174000/d\r\n",,terminal_output +506,177223,"TERMINAL",0,0,"52M\t./lam_1751657975_174000/ocdbt.process_5/d\r\n52M\t./lam_1751657975_174000/ocdbt.process_5\r\n",,terminal_output +507,177367,"TERMINAL",0,0,"52M\t./lam_1751657975_174000/ocdbt.process_0/d\r\n52M\t./lam_1751657975_174000/ocdbt.process_0\r\n52M\t./lam_1751657975_174000/ocdbt.process_7/d\r\n52M\t./lam_1751657975_174000/ocdbt.process_7\r\n52M\t./lam_1751657975_174000/ocdbt.process_6/d\r\n52M\t./lam_1751657975_174000/ocdbt.process_6\r\n52M\t./lam_1751657975_174000/ocdbt.process_1/d\r\n52M\t./lam_1751657975_174000/ocdbt.process_1\r\n53M\t./lam_1751657975_174000/ocdbt.process_2/d\r\n53M\t./lam_1751657975_174000/ocdbt.process_2\r\n420M\t./lam_1751657975_174000\r\n52M\t./lam_1751657975_115000/ocdbt.process_4/d\r\n52M\t./lam_1751657975_115000/ocdbt.process_4\r\n",,terminal_output +508,177482,"TERMINAL",0,0,"1.8M\t./lam_1751657975_115000/array_metadatas\r\n52M\t./lam_1751657975_115000/ocdbt.process_3/d\r\n52M\t./lam_1751657975_115000/ocdbt.process_3\r\n",,terminal_output +509,177546,"TERMINAL",0,0,"1.5M\t./lam_1751657975_115000/d\r\n",,terminal_output +510,177657,"TERMINAL",0,0,"52M\t./lam_1751657975_115000/ocdbt.process_5/d\r\n52M\t./lam_1751657975_115000/ocdbt.process_5\r\n",,terminal_output +511,177949,"TERMINAL",0,0,"52M\t./lam_1751657975_115000/ocdbt.process_0/d\r\n52M\t./lam_1751657975_115000/ocdbt.process_0\r\n52M\t./lam_1751657975_115000/ocdbt.process_7/d\r\n52M\t./lam_1751657975_115000/ocdbt.process_7\r\n52M\t./lam_1751657975_115000/ocdbt.process_6/d\r\n52M\t./lam_1751657975_115000/ocdbt.process_6\r\n52M\t./lam_1751657975_115000/ocdbt.process_1/d\r\n52M\t./lam_1751657975_115000/ocdbt.process_1\r\n52M\t./lam_1751657975_115000/ocdbt.process_2/d\r\n52M\t./lam_1751657975_115000/ocdbt.process_2\r\n419M\t./lam_1751657975_115000\r\n53M\t./lam_1751657975_41000/ocdbt.process_4/d\r\n53M\t./lam_1751657975_41000/ocdbt.process_4\r\n",,terminal_output +512,178173,"TERMINAL",0,0,"1.8M\t./lam_1751657975_41000/array_metadatas\r\n52M\t./lam_1751657975_41000/ocdbt.process_3/d\r\n52M\t./lam_1751657975_41000/ocdbt.process_3\r\n",,terminal_output +513,178361,"TERMINAL",0,0,"1.5M\t./lam_1751657975_41000/d\r\n52M\t./lam_1751657975_41000/ocdbt.process_5/d\r\n52M\t./lam_1751657975_41000/ocdbt.process_5\r\n52M\t./lam_1751657975_41000/ocdbt.process_0/d\r\n52M\t./lam_1751657975_41000/ocdbt.process_0\r\n53M\t./lam_1751657975_41000/ocdbt.process_7/d\r\n53M\t./lam_1751657975_41000/ocdbt.process_7\r\n52M\t./lam_1751657975_41000/ocdbt.process_6/d\r\n52M\t./lam_1751657975_41000/ocdbt.process_6\r\n52M\t./lam_1751657975_41000/ocdbt.process_1/d\r\n52M\t./lam_1751657975_41000/ocdbt.process_1\r\n52M\t./lam_1751657975_41000/ocdbt.process_2/d\r\n52M\t./lam_1751657975_41000/ocdbt.process_2\r\n420M\t./lam_1751657975_41000\r\n52M\t./lam_1751657975_194000/ocdbt.process_4/d\r\n52M\t./lam_1751657975_194000/ocdbt.process_4\r\n",,terminal_output +514,178425,"TERMINAL",0,0,"1.8M\t./lam_1751657975_194000/array_metadatas\r\n52M\t./lam_1751657975_194000/ocdbt.process_3/d\r\n52M\t./lam_1751657975_194000/ocdbt.process_3\r\n1.5M\t./lam_1751657975_194000/d\r\n52M\t./lam_1751657975_194000/ocdbt.process_5/d\r\n52M\t./lam_1751657975_194000/ocdbt.process_5\r\n",,terminal_output +515,178558,"TERMINAL",0,0,"53M\t./lam_1751657975_194000/ocdbt.process_0/d\r\n53M\t./lam_1751657975_194000/ocdbt.process_0\r\n52M\t./lam_1751657975_194000/ocdbt.process_7/d\r\n52M\t./lam_1751657975_194000/ocdbt.process_7\r\n52M\t./lam_1751657975_194000/ocdbt.process_6/d\r\n52M\t./lam_1751657975_194000/ocdbt.process_6\r\n52M\t./lam_1751657975_194000/ocdbt.process_1/d\r\n52M\t./lam_1751657975_194000/ocdbt.process_1\r\n",,terminal_output +516,178611,"TERMINAL",0,0,"52M\t./lam_1751657975_194000/ocdbt.process_2/d\r\n52M\t./lam_1751657975_194000/ocdbt.process_2\r\n419M\t./lam_1751657975_194000\r\n52M\t./lam_1751657975_105000/ocdbt.process_4/d\r\n52M\t./lam_1751657975_105000/ocdbt.process_4\r\n",,terminal_output +517,178815,"TERMINAL",0,0,"1.8M\t./lam_1751657975_105000/array_metadatas\r\n52M\t./lam_1751657975_105000/ocdbt.process_3/d\r\n52M\t./lam_1751657975_105000/ocdbt.process_3\r\n",,terminal_output +518,178924,"TERMINAL",0,0,"1.5M\t./lam_1751657975_105000/d\r\n",,terminal_output +519,179023,"TERMINAL",0,0,"52M\t./lam_1751657975_105000/ocdbt.process_5/d\r\n52M\t./lam_1751657975_105000/ocdbt.process_5\r\n",,terminal_output +520,179141,"TERMINAL",0,0,"52M\t./lam_1751657975_105000/ocdbt.process_0/d\r\n52M\t./lam_1751657975_105000/ocdbt.process_0\r\n",,terminal_output +521,179208,"TERMINAL",0,0,"52M\t./lam_1751657975_105000/ocdbt.process_7/d\r\n52M\t./lam_1751657975_105000/ocdbt.process_7\r\n52M\t./lam_1751657975_105000/ocdbt.process_6/d\r\n52M\t./lam_1751657975_105000/ocdbt.process_6\r\n52M\t./lam_1751657975_105000/ocdbt.process_1/d\r\n52M\t./lam_1751657975_105000/ocdbt.process_1\r\n52M\t./lam_1751657975_105000/ocdbt.process_2/d\r\n52M\t./lam_1751657975_105000/ocdbt.process_2\r\n419M\t./lam_1751657975_105000\r\n53M\t./lam_1751657975_9000/ocdbt.process_4/d\r\n53M\t./lam_1751657975_9000/ocdbt.process_4\r\n",,terminal_output +522,179422,"TERMINAL",0,0,"1.8M\t./lam_1751657975_9000/array_metadatas\r\n53M\t./lam_1751657975_9000/ocdbt.process_3/d\r\n53M\t./lam_1751657975_9000/ocdbt.process_3\r\n",,terminal_output +523,179480,"TERMINAL",0,0,"1.5M\t./lam_1751657975_9000/d\r\n53M\t./lam_1751657975_9000/ocdbt.process_5/d\r\n53M\t./lam_1751657975_9000/ocdbt.process_5\r\n",,terminal_output +524,179644,"TERMINAL",0,0,"53M\t./lam_1751657975_9000/ocdbt.process_0/d\r\n53M\t./lam_1751657975_9000/ocdbt.process_0\r\n53M\t./lam_1751657975_9000/ocdbt.process_7/d\r\n53M\t./lam_1751657975_9000/ocdbt.process_7\r\n53M\t./lam_1751657975_9000/ocdbt.process_6/d\r\n53M\t./lam_1751657975_9000/ocdbt.process_6\r\n53M\t./lam_1751657975_9000/ocdbt.process_1/d\r\n53M\t./lam_1751657975_9000/ocdbt.process_1\r\n52M\t./lam_1751657975_9000/ocdbt.process_2/d\r\n52M\t./lam_1751657975_9000/ocdbt.process_2\r\n422M\t./lam_1751657975_9000\r\n52M\t./lam_1751657975_172000/ocdbt.process_4/d\r\n52M\t./lam_1751657975_172000/ocdbt.process_4\r\n",,terminal_output +525,179895,"TERMINAL",0,0,"1.8M\t./lam_1751657975_172000/array_metadatas\r\n52M\t./lam_1751657975_172000/ocdbt.process_3/d\r\n52M\t./lam_1751657975_172000/ocdbt.process_3\r\n1.5M\t./lam_1751657975_172000/d\r\n",,terminal_output +526,179996,"TERMINAL",0,0,"52M\t./lam_1751657975_172000/ocdbt.process_5/d\r\n52M\t./lam_1751657975_172000/ocdbt.process_5\r\n",,terminal_output +527,180210,"TERMINAL",0,0,"52M\t./lam_1751657975_172000/ocdbt.process_0/d\r\n52M\t./lam_1751657975_172000/ocdbt.process_0\r\n52M\t./lam_1751657975_172000/ocdbt.process_7/d\r\n52M\t./lam_1751657975_172000/ocdbt.process_7\r\n52M\t./lam_1751657975_172000/ocdbt.process_6/d\r\n52M\t./lam_1751657975_172000/ocdbt.process_6\r\n52M\t./lam_1751657975_172000/ocdbt.process_1/d\r\n52M\t./lam_1751657975_172000/ocdbt.process_1\r\n52M\t./lam_1751657975_172000/ocdbt.process_2/d\r\n52M\t./lam_1751657975_172000/ocdbt.process_2\r\n419M\t./lam_1751657975_172000\r\n52M\t./lam_1751657975_92000/ocdbt.process_4/d\r\n52M\t./lam_1751657975_92000/ocdbt.process_4\r\n",,terminal_output +528,180566,"TERMINAL",0,0,"1.8M\t./lam_1751657975_92000/array_metadatas\r\n53M\t./lam_1751657975_92000/ocdbt.process_3/d\r\n53M\t./lam_1751657975_92000/ocdbt.process_3\r\n1.5M\t./lam_1751657975_92000/d\r\n",,terminal_output +529,180662,"TERMINAL",0,0,"52M\t./lam_1751657975_92000/ocdbt.process_5/d\r\n52M\t./lam_1751657975_92000/ocdbt.process_5\r\n",,terminal_output +530,180804,"TERMINAL",0,0,"52M\t./lam_1751657975_92000/ocdbt.process_0/d\r\n52M\t./lam_1751657975_92000/ocdbt.process_0\r\n52M\t./lam_1751657975_92000/ocdbt.process_7/d\r\n52M\t./lam_1751657975_92000/ocdbt.process_7\r\n52M\t./lam_1751657975_92000/ocdbt.process_6/d\r\n52M\t./lam_1751657975_92000/ocdbt.process_6\r\n52M\t./lam_1751657975_92000/ocdbt.process_1/d\r\n52M\t./lam_1751657975_92000/ocdbt.process_1\r\n52M\t./lam_1751657975_92000/ocdbt.process_2/d\r\n52M\t./lam_1751657975_92000/ocdbt.process_2\r\n419M\t./lam_1751657975_92000\r\n53M\t./lam_1751657975_82000/ocdbt.process_4/d\r\n53M\t./lam_1751657975_82000/ocdbt.process_4\r\n1.8M\t./lam_1751657975_82000/array_metadatas\r\n",,terminal_output +531,180910,"TERMINAL",0,0,"53M\t./lam_1751657975_82000/ocdbt.process_3/d\r\n53M\t./lam_1751657975_82000/ocdbt.process_3\r\n",,terminal_output +532,181033,"TERMINAL",0,0,"1.5M\t./lam_1751657975_82000/d\r\n52M\t./lam_1751657975_82000/ocdbt.process_5/d\r\n52M\t./lam_1751657975_82000/ocdbt.process_5\r\n",,terminal_output +533,181168,"TERMINAL",0,0,"52M\t./lam_1751657975_82000/ocdbt.process_0/d\r\n52M\t./lam_1751657975_82000/ocdbt.process_0\r\n52M\t./lam_1751657975_82000/ocdbt.process_7/d\r\n52M\t./lam_1751657975_82000/ocdbt.process_7\r\n52M\t./lam_1751657975_82000/ocdbt.process_6/d\r\n52M\t./lam_1751657975_82000/ocdbt.process_6\r\n52M\t./lam_1751657975_82000/ocdbt.process_1/d\r\n52M\t./lam_1751657975_82000/ocdbt.process_1\r\n52M\t./lam_1751657975_82000/ocdbt.process_2/d\r\n52M\t./lam_1751657975_82000/ocdbt.process_2\r\n420M\t./lam_1751657975_82000\r\n52M\t./lam_1751657975_112000/ocdbt.process_4/d\r\n52M\t./lam_1751657975_112000/ocdbt.process_4\r\n",,terminal_output +534,181235,"TERMINAL",0,0,"1.8M\t./lam_1751657975_112000/array_metadatas\r\n52M\t./lam_1751657975_112000/ocdbt.process_3/d\r\n52M\t./lam_1751657975_112000/ocdbt.process_3\r\n1.5M\t./lam_1751657975_112000/d\r\n",,terminal_output +535,181406,"TERMINAL",0,0,"52M\t./lam_1751657975_112000/ocdbt.process_5/d\r\n52M\t./lam_1751657975_112000/ocdbt.process_5\r\n52M\t./lam_1751657975_112000/ocdbt.process_0/d\r\n52M\t./lam_1751657975_112000/ocdbt.process_0\r\n52M\t./lam_1751657975_112000/ocdbt.process_7/d\r\n52M\t./lam_1751657975_112000/ocdbt.process_7\r\n52M\t./lam_1751657975_112000/ocdbt.process_6/d\r\n52M\t./lam_1751657975_112000/ocdbt.process_6\r\n52M\t./lam_1751657975_112000/ocdbt.process_1/d\r\n52M\t./lam_1751657975_112000/ocdbt.process_1\r\n52M\t./lam_1751657975_112000/ocdbt.process_2/d\r\n52M\t./lam_1751657975_112000/ocdbt.process_2\r\n419M\t./lam_1751657975_112000\r\n52M\t./lam_1751657975_21000/ocdbt.process_4/d\r\n52M\t./lam_1751657975_21000/ocdbt.process_4\r\n",,terminal_output +536,181551,"TERMINAL",0,0,"1.8M\t./lam_1751657975_21000/array_metadatas\r\n53M\t./lam_1751657975_21000/ocdbt.process_3/d\r\n53M\t./lam_1751657975_21000/ocdbt.process_3\r\n1.5M\t./lam_1751657975_21000/d\r\n",,terminal_output +537,181689,"TERMINAL",0,0,"52M\t./lam_1751657975_21000/ocdbt.process_5/d\r\n52M\t./lam_1751657975_21000/ocdbt.process_5\r\n",,terminal_output +538,181824,"TERMINAL",0,0,"53M\t./lam_1751657975_21000/ocdbt.process_0/d\r\n53M\t./lam_1751657975_21000/ocdbt.process_0\r\n52M\t./lam_1751657975_21000/ocdbt.process_7/d\r\n52M\t./lam_1751657975_21000/ocdbt.process_7\r\n52M\t./lam_1751657975_21000/ocdbt.process_6/d\r\n52M\t./lam_1751657975_21000/ocdbt.process_6\r\n",,terminal_output +539,182042,"TERMINAL",0,0,"52M\t./lam_1751657975_21000/ocdbt.process_1/d\r\n52M\t./lam_1751657975_21000/ocdbt.process_1\r\n52M\t./lam_1751657975_21000/ocdbt.process_2/d\r\n52M\t./lam_1751657975_21000/ocdbt.process_2\r\n421M\t./lam_1751657975_21000\r\n52M\t./lam_1751657975_23000/ocdbt.process_4/d\r\n52M\t./lam_1751657975_23000/ocdbt.process_4\r\n",,terminal_output +540,182235,"TERMINAL",0,0,"1.8M\t./lam_1751657975_23000/array_metadatas\r\n53M\t./lam_1751657975_23000/ocdbt.process_3/d\r\n53M\t./lam_1751657975_23000/ocdbt.process_3\r\n",,terminal_output +541,182341,"TERMINAL",0,0,"1.5M\t./lam_1751657975_23000/d\r\n",,terminal_output +542,182394,"TERMINAL",0,0,"52M\t./lam_1751657975_23000/ocdbt.process_5/d\r\n52M\t./lam_1751657975_23000/ocdbt.process_5\r\n",,terminal_output +543,182670,"TERMINAL",0,0,"52M\t./lam_1751657975_23000/ocdbt.process_0/d\r\n52M\t./lam_1751657975_23000/ocdbt.process_0\r\n53M\t./lam_1751657975_23000/ocdbt.process_7/d\r\n53M\t./lam_1751657975_23000/ocdbt.process_7\r\n52M\t./lam_1751657975_23000/ocdbt.process_6/d\r\n52M\t./lam_1751657975_23000/ocdbt.process_6\r\n52M\t./lam_1751657975_23000/ocdbt.process_1/d\r\n52M\t./lam_1751657975_23000/ocdbt.process_1\r\n52M\t./lam_1751657975_23000/ocdbt.process_2/d\r\n52M\t./lam_1751657975_23000/ocdbt.process_2\r\n420M\t./lam_1751657975_23000\r\n52M\t./lam_1751657975_200000/ocdbt.process_4/d\r\n52M\t./lam_1751657975_200000/ocdbt.process_4\r\n",,terminal_output +544,182858,"TERMINAL",0,0,"1.8M\t./lam_1751657975_200000/array_metadatas\r\n52M\t./lam_1751657975_200000/ocdbt.process_3/d\r\n52M\t./lam_1751657975_200000/ocdbt.process_3\r\n1.5M\t./lam_1751657975_200000/d\r\n52M\t./lam_1751657975_200000/ocdbt.process_5/d\r\n52M\t./lam_1751657975_200000/ocdbt.process_5\r\n",,terminal_output +545,183087,"TERMINAL",0,0,"52M\t./lam_1751657975_200000/ocdbt.process_0/d\r\n52M\t./lam_1751657975_200000/ocdbt.process_0\r\n52M\t./lam_1751657975_200000/ocdbt.process_7/d\r\n52M\t./lam_1751657975_200000/ocdbt.process_7\r\n",,terminal_output +546,183220,"TERMINAL",0,0,"52M\t./lam_1751657975_200000/ocdbt.process_6/d\r\n52M\t./lam_1751657975_200000/ocdbt.process_6\r\n52M\t./lam_1751657975_200000/ocdbt.process_1/d\r\n52M\t./lam_1751657975_200000/ocdbt.process_1\r\n52M\t./lam_1751657975_200000/ocdbt.process_2/d\r\n52M\t./lam_1751657975_200000/ocdbt.process_2\r\n419M\t./lam_1751657975_200000\r\n52M\t./lam_1751657975_182000/ocdbt.process_4/d\r\n52M\t./lam_1751657975_182000/ocdbt.process_4\r\n",,terminal_output +547,183374,"TERMINAL",0,0,"1.8M\t./lam_1751657975_182000/array_metadatas\r\n53M\t./lam_1751657975_182000/ocdbt.process_3/d\r\n53M\t./lam_1751657975_182000/ocdbt.process_3\r\n1.5M\t./lam_1751657975_182000/d\r\n52M\t./lam_1751657975_182000/ocdbt.process_5/d\r\n52M\t./lam_1751657975_182000/ocdbt.process_5\r\n",,terminal_output +548,183440,"TERMINAL",0,0,"52M\t./lam_1751657975_182000/ocdbt.process_0/d\r\n52M\t./lam_1751657975_182000/ocdbt.process_0\r\n52M\t./lam_1751657975_182000/ocdbt.process_7/d\r\n52M\t./lam_1751657975_182000/ocdbt.process_7\r\n52M\t./lam_1751657975_182000/ocdbt.process_6/d\r\n52M\t./lam_1751657975_182000/ocdbt.process_6\r\n52M\t./lam_1751657975_182000/ocdbt.process_1/d\r\n52M\t./lam_1751657975_182000/ocdbt.process_1\r\n52M\t./lam_1751657975_182000/ocdbt.process_2/d\r\n52M\t./lam_1751657975_182000/ocdbt.process_2\r\n419M\t./lam_1751657975_182000\r\n52M\t./lam_1751657975_156000/ocdbt.process_4/d\r\n52M\t./lam_1751657975_156000/ocdbt.process_4\r\n",,terminal_output +549,183660,"TERMINAL",0,0,"1.8M\t./lam_1751657975_156000/array_metadatas\r\n52M\t./lam_1751657975_156000/ocdbt.process_3/d\r\n52M\t./lam_1751657975_156000/ocdbt.process_3\r\n1.5M\t./lam_1751657975_156000/d\r\n52M\t./lam_1751657975_156000/ocdbt.process_5/d\r\n52M\t./lam_1751657975_156000/ocdbt.process_5\r\n",,terminal_output +550,183914,"TERMINAL",0,0,"53M\t./lam_1751657975_156000/ocdbt.process_0/d\r\n53M\t./lam_1751657975_156000/ocdbt.process_0\r\n",,terminal_output +551,184101,"TERMINAL",0,0,"53M\t./lam_1751657975_156000/ocdbt.process_7/d\r\n53M\t./lam_1751657975_156000/ocdbt.process_7\r\n52M\t./lam_1751657975_156000/ocdbt.process_6/d\r\n52M\t./lam_1751657975_156000/ocdbt.process_6\r\n52M\t./lam_1751657975_156000/ocdbt.process_1/d\r\n52M\t./lam_1751657975_156000/ocdbt.process_1\r\n52M\t./lam_1751657975_156000/ocdbt.process_2/d\r\n52M\t./lam_1751657975_156000/ocdbt.process_2\r\n420M\t./lam_1751657975_156000\r\n52M\t./lam_1751657975_95000/ocdbt.process_4/d\r\n52M\t./lam_1751657975_95000/ocdbt.process_4\r\n1.8M\t./lam_1751657975_95000/array_metadatas\r\n53M\t./lam_1751657975_95000/ocdbt.process_3/d\r\n53M\t./lam_1751657975_95000/ocdbt.process_3\r\n1.5M\t./lam_1751657975_95000/d\r\n52M\t./lam_1751657975_95000/ocdbt.process_5/d\r\n52M\t./lam_1751657975_95000/ocdbt.process_5\r\n52M\t./lam_1751657975_95000/ocdbt.process_0/d\r\n52M\t./lam_1751657975_95000/ocdbt.process_0\r\n52M\t./lam_1751657975_95000/ocdbt.process_7/d\r\n52M\t./lam_1751657975_95000/ocdbt.process_7\r\n52M\t./lam_1751657975_95000/ocdbt.process_6/d\r\n52M\t./lam_1751657975_95000/ocdbt.process_6\r\n52M\t./lam_1751657975_95000/ocdbt.process_1/d\r\n52M\t./lam_1751657975_95000/ocdbt.process_1\r\n52M\t./lam_1751657975_95000/ocdbt.process_2/d\r\n52M\t./lam_1751657975_95000/ocdbt.process_2\r\n419M\t./lam_1751657975_95000\r\n53M\t./lam_1751657975_192000/ocdbt.process_4/d\r\n53M\t./lam_1751657975_192000/ocdbt.process_4\r\n",,terminal_output +552,184198,"TERMINAL",0,0,"1.8M\t./lam_1751657975_192000/array_metadatas\r\n53M\t./lam_1751657975_192000/ocdbt.process_3/d\r\n53M\t./lam_1751657975_192000/ocdbt.process_3\r\n1.5M\t./lam_1751657975_192000/d\r\n",,terminal_output +553,184310,"TERMINAL",0,0,"52M\t./lam_1751657975_192000/ocdbt.process_5/d\r\n52M\t./lam_1751657975_192000/ocdbt.process_5\r\n",,terminal_output +554,184473,"TERMINAL",0,0,"52M\t./lam_1751657975_192000/ocdbt.process_0/d\r\n52M\t./lam_1751657975_192000/ocdbt.process_0\r\n",,terminal_output +555,184624,"TERMINAL",0,0,"53M\t./lam_1751657975_192000/ocdbt.process_7/d\r\n53M\t./lam_1751657975_192000/ocdbt.process_7\r\n52M\t./lam_1751657975_192000/ocdbt.process_6/d\r\n52M\t./lam_1751657975_192000/ocdbt.process_6\r\n52M\t./lam_1751657975_192000/ocdbt.process_1/d\r\n52M\t./lam_1751657975_192000/ocdbt.process_1\r\n52M\t./lam_1751657975_192000/ocdbt.process_2/d\r\n52M\t./lam_1751657975_192000/ocdbt.process_2\r\n420M\t./lam_1751657975_192000\r\n53M\t./lam_1751657975_65000/ocdbt.process_4/d\r\n53M\t./lam_1751657975_65000/ocdbt.process_4\r\n",,terminal_output +556,184749,"TERMINAL",0,0,"1.8M\t./lam_1751657975_65000/array_metadatas\r\n53M\t./lam_1751657975_65000/ocdbt.process_3/d\r\n53M\t./lam_1751657975_65000/ocdbt.process_3\r\n1.5M\t./lam_1751657975_65000/d\r\n52M\t./lam_1751657975_65000/ocdbt.process_5/d\r\n52M\t./lam_1751657975_65000/ocdbt.process_5\r\n",,terminal_output +557,184863,"TERMINAL",0,0,"52M\t./lam_1751657975_65000/ocdbt.process_0/d\r\n52M\t./lam_1751657975_65000/ocdbt.process_0\r\n53M\t./lam_1751657975_65000/ocdbt.process_7/d\r\n53M\t./lam_1751657975_65000/ocdbt.process_7\r\n52M\t./lam_1751657975_65000/ocdbt.process_6/d\r\n52M\t./lam_1751657975_65000/ocdbt.process_6\r\n52M\t./lam_1751657975_65000/ocdbt.process_1/d\r\n52M\t./lam_1751657975_65000/ocdbt.process_1\r\n52M\t./lam_1751657975_65000/ocdbt.process_2/d\r\n52M\t./lam_1751657975_65000/ocdbt.process_2\r\n420M\t./lam_1751657975_65000\r\n53M\t./lam_1751657975_5000/ocdbt.process_4/d\r\n53M\t./lam_1751657975_5000/ocdbt.process_4\r\n",,terminal_output +558,184935,"TERMINAL",0,0,"1.8M\t./lam_1751657975_5000/array_metadatas\r\n53M\t./lam_1751657975_5000/ocdbt.process_3/d\r\n53M\t./lam_1751657975_5000/ocdbt.process_3\r\n",,terminal_output +559,185079,"TERMINAL",0,0,"1.5M\t./lam_1751657975_5000/d\r\n53M\t./lam_1751657975_5000/ocdbt.process_5/d\r\n53M\t./lam_1751657975_5000/ocdbt.process_5\r\n53M\t./lam_1751657975_5000/ocdbt.process_0/d\r\n53M\t./lam_1751657975_5000/ocdbt.process_0\r\n",,terminal_output +560,185233,"TERMINAL",0,0,"53M\t./lam_1751657975_5000/ocdbt.process_7/d\r\n53M\t./lam_1751657975_5000/ocdbt.process_7\r\n53M\t./lam_1751657975_5000/ocdbt.process_6/d\r\n53M\t./lam_1751657975_5000/ocdbt.process_6\r\n53M\t./lam_1751657975_5000/ocdbt.process_1/d\r\n53M\t./lam_1751657975_5000/ocdbt.process_1\r\n53M\t./lam_1751657975_5000/ocdbt.process_2/d\r\n53M\t./lam_1751657975_5000/ocdbt.process_2\r\n423M\t./lam_1751657975_5000\r\n53M\t./lam_1751657975_66000/ocdbt.process_4/d\r\n53M\t./lam_1751657975_66000/ocdbt.process_4\r\n1.8M\t./lam_1751657975_66000/array_metadatas\r\n",,terminal_output +561,185453,"TERMINAL",0,0,"53M\t./lam_1751657975_66000/ocdbt.process_3/d\r\n53M\t./lam_1751657975_66000/ocdbt.process_3\r\n1.5M\t./lam_1751657975_66000/d\r\n53M\t./lam_1751657975_66000/ocdbt.process_5/d\r\n53M\t./lam_1751657975_66000/ocdbt.process_5\r\n53M\t./lam_1751657975_66000/ocdbt.process_0/d\r\n53M\t./lam_1751657975_66000/ocdbt.process_0\r\n53M\t./lam_1751657975_66000/ocdbt.process_7/d\r\n53M\t./lam_1751657975_66000/ocdbt.process_7\r\n",,terminal_output +562,185513,"TERMINAL",0,0,"52M\t./lam_1751657975_66000/ocdbt.process_6/d\r\n52M\t./lam_1751657975_66000/ocdbt.process_6\r\n52M\t./lam_1751657975_66000/ocdbt.process_1/d\r\n52M\t./lam_1751657975_66000/ocdbt.process_1\r\n52M\t./lam_1751657975_66000/ocdbt.process_2/d\r\n52M\t./lam_1751657975_66000/ocdbt.process_2\r\n421M\t./lam_1751657975_66000\r\n52M\t./lam_1751657975_26000/ocdbt.process_4/d\r\n52M\t./lam_1751657975_26000/ocdbt.process_4\r\n",,terminal_output +563,185611,"TERMINAL",0,0,"1.8M\t./lam_1751657975_26000/array_metadatas\r\n52M\t./lam_1751657975_26000/ocdbt.process_3/d\r\n52M\t./lam_1751657975_26000/ocdbt.process_3\r\n1.5M\t./lam_1751657975_26000/d\r\n52M\t./lam_1751657975_26000/ocdbt.process_5/d\r\n52M\t./lam_1751657975_26000/ocdbt.process_5\r\n",,terminal_output +564,185933,"TERMINAL",0,0,"52M\t./lam_1751657975_26000/ocdbt.process_0/d\r\n52M\t./lam_1751657975_26000/ocdbt.process_0\r\n52M\t./lam_1751657975_26000/ocdbt.process_7/d\r\n52M\t./lam_1751657975_26000/ocdbt.process_7\r\n53M\t./lam_1751657975_26000/ocdbt.process_6/d\r\n53M\t./lam_1751657975_26000/ocdbt.process_6\r\n",,terminal_output +565,186056,"TERMINAL",0,0,"52M\t./lam_1751657975_26000/ocdbt.process_1/d\r\n52M\t./lam_1751657975_26000/ocdbt.process_1\r\n52M\t./lam_1751657975_26000/ocdbt.process_2/d\r\n52M\t./lam_1751657975_26000/ocdbt.process_2\r\n420M\t./lam_1751657975_26000\r\n53M\t./lam_1751657975_47000/ocdbt.process_4/d\r\n53M\t./lam_1751657975_47000/ocdbt.process_4\r\n",,terminal_output +566,186126,"TERMINAL",0,0,"1.8M\t./lam_1751657975_47000/array_metadatas\r\n52M\t./lam_1751657975_47000/ocdbt.process_3/d\r\n52M\t./lam_1751657975_47000/ocdbt.process_3\r\n1.5M\t./lam_1751657975_47000/d\r\n",,terminal_output +567,186242,"TERMINAL",0,0,"52M\t./lam_1751657975_47000/ocdbt.process_5/d\r\n52M\t./lam_1751657975_47000/ocdbt.process_5\r\n53M\t./lam_1751657975_47000/ocdbt.process_0/d\r\n53M\t./lam_1751657975_47000/ocdbt.process_0\r\n53M\t./lam_1751657975_47000/ocdbt.process_7/d\r\n53M\t./lam_1751657975_47000/ocdbt.process_7\r\n52M\t./lam_1751657975_47000/ocdbt.process_6/d\r\n52M\t./lam_1751657975_47000/ocdbt.process_6\r\n52M\t./lam_1751657975_47000/ocdbt.process_1/d\r\n52M\t./lam_1751657975_47000/ocdbt.process_1\r\n52M\t./lam_1751657975_47000/ocdbt.process_2/d\r\n52M\t./lam_1751657975_47000/ocdbt.process_2\r\n421M\t./lam_1751657975_47000\r\n52M\t./lam_1751657975_68000/ocdbt.process_4/d\r\n52M\t./lam_1751657975_68000/ocdbt.process_4\r\n1.8M\t./lam_1751657975_68000/array_metadatas\r\n52M\t./lam_1751657975_68000/ocdbt.process_3/d\r\n52M\t./lam_1751657975_68000/ocdbt.process_3\r\n1.5M\t./lam_1751657975_68000/d\r\n52M\t./lam_1751657975_68000/ocdbt.process_5/d\r\n52M\t./lam_1751657975_68000/ocdbt.process_5\r\n",,terminal_output +568,186343,"TERMINAL",0,0,"52M\t./lam_1751657975_68000/ocdbt.process_0/d\r\n52M\t./lam_1751657975_68000/ocdbt.process_0\r\n52M\t./lam_1751657975_68000/ocdbt.process_7/d\r\n52M\t./lam_1751657975_68000/ocdbt.process_7\r\n52M\t./lam_1751657975_68000/ocdbt.process_6/d\r\n52M\t./lam_1751657975_68000/ocdbt.process_6\r\n52M\t./lam_1751657975_68000/ocdbt.process_1/d\r\n52M\t./lam_1751657975_68000/ocdbt.process_1\r\n52M\t./lam_1751657975_68000/ocdbt.process_2/d\r\n52M\t./lam_1751657975_68000/ocdbt.process_2\r\n420M\t./lam_1751657975_68000\r\n53M\t./lam_1751657975_178000/ocdbt.process_4/d\r\n53M\t./lam_1751657975_178000/ocdbt.process_4\r\n",,terminal_output +569,186518,"TERMINAL",0,0,"1.8M\t./lam_1751657975_178000/array_metadatas\r\n52M\t./lam_1751657975_178000/ocdbt.process_3/d\r\n52M\t./lam_1751657975_178000/ocdbt.process_3\r\n1.5M\t./lam_1751657975_178000/d\r\n52M\t./lam_1751657975_178000/ocdbt.process_5/d\r\n52M\t./lam_1751657975_178000/ocdbt.process_5\r\n52M\t./lam_1751657975_178000/ocdbt.process_0/d\r\n52M\t./lam_1751657975_178000/ocdbt.process_0\r\n52M\t./lam_1751657975_178000/ocdbt.process_7/d\r\n52M\t./lam_1751657975_178000/ocdbt.process_7\r\n52M\t./lam_1751657975_178000/ocdbt.process_6/d\r\n52M\t./lam_1751657975_178000/ocdbt.process_6\r\n52M\t./lam_1751657975_178000/ocdbt.process_1/d\r\n52M\t./lam_1751657975_178000/ocdbt.process_1\r\n52M\t./lam_1751657975_178000/ocdbt.process_2/d\r\n52M\t./lam_1751657975_178000/ocdbt.process_2\r\n419M\t./lam_1751657975_178000\r\n52M\t./lam_1751657975_71000/ocdbt.process_4/d\r\n52M\t./lam_1751657975_71000/ocdbt.process_4\r\n",,terminal_output +570,186678,"TERMINAL",0,0,"1.8M\t./lam_1751657975_71000/array_metadatas\r\n53M\t./lam_1751657975_71000/ocdbt.process_3/d\r\n53M\t./lam_1751657975_71000/ocdbt.process_3\r\n",,terminal_output +571,186766,"TERMINAL",0,0,"1.5M\t./lam_1751657975_71000/d\r\n",,terminal_output +572,186860,"TERMINAL",0,0,"52M\t./lam_1751657975_71000/ocdbt.process_5/d\r\n52M\t./lam_1751657975_71000/ocdbt.process_5\r\n",,terminal_output +573,187072,"TERMINAL",0,0,"53M\t./lam_1751657975_71000/ocdbt.process_0/d\r\n53M\t./lam_1751657975_71000/ocdbt.process_0\r\n54M\t./lam_1751657975_71000/ocdbt.process_7/d\r\n54M\t./lam_1751657975_71000/ocdbt.process_7\r\n52M\t./lam_1751657975_71000/ocdbt.process_6/d\r\n52M\t./lam_1751657975_71000/ocdbt.process_6\r\n52M\t./lam_1751657975_71000/ocdbt.process_1/d\r\n52M\t./lam_1751657975_71000/ocdbt.process_1\r\n52M\t./lam_1751657975_71000/ocdbt.process_2/d\r\n52M\t./lam_1751657975_71000/ocdbt.process_2\r\n421M\t./lam_1751657975_71000\r\n52M\t./lam_1751657975_153000/ocdbt.process_4/d\r\n52M\t./lam_1751657975_153000/ocdbt.process_4\r\n",,terminal_output +574,187208,"TERMINAL",0,0,"1.8M\t./lam_1751657975_153000/array_metadatas\r\n52M\t./lam_1751657975_153000/ocdbt.process_3/d\r\n52M\t./lam_1751657975_153000/ocdbt.process_3\r\n1.5M\t./lam_1751657975_153000/d\r\n52M\t./lam_1751657975_153000/ocdbt.process_5/d\r\n52M\t./lam_1751657975_153000/ocdbt.process_5\r\n",,terminal_output +575,187394,"TERMINAL",0,0,"52M\t./lam_1751657975_153000/ocdbt.process_0/d\r\n52M\t./lam_1751657975_153000/ocdbt.process_0\r\n52M\t./lam_1751657975_153000/ocdbt.process_7/d\r\n52M\t./lam_1751657975_153000/ocdbt.process_7\r\n53M\t./lam_1751657975_153000/ocdbt.process_6/d\r\n53M\t./lam_1751657975_153000/ocdbt.process_6\r\n52M\t./lam_1751657975_153000/ocdbt.process_1/d\r\n52M\t./lam_1751657975_153000/ocdbt.process_1\r\n52M\t./lam_1751657975_153000/ocdbt.process_2/d\r\n52M\t./lam_1751657975_153000/ocdbt.process_2\r\n419M\t./lam_1751657975_153000\r\n53M\t./lam_1751657975_6000/ocdbt.process_4/d\r\n53M\t./lam_1751657975_6000/ocdbt.process_4\r\n",,terminal_output +576,187573,"TERMINAL",0,0,"1.8M\t./lam_1751657975_6000/array_metadatas\r\n53M\t./lam_1751657975_6000/ocdbt.process_3/d\r\n53M\t./lam_1751657975_6000/ocdbt.process_3\r\n1.5M\t./lam_1751657975_6000/d\r\n",,terminal_output +577,187701,"TERMINAL",0,0,"53M\t./lam_1751657975_6000/ocdbt.process_5/d\r\n53M\t./lam_1751657975_6000/ocdbt.process_5\r\n",,terminal_output +578,187861,"TERMINAL",0,0,"53M\t./lam_1751657975_6000/ocdbt.process_0/d\r\n53M\t./lam_1751657975_6000/ocdbt.process_0\r\n",,terminal_output +579,187988,"TERMINAL",0,0,"53M\t./lam_1751657975_6000/ocdbt.process_7/d\r\n53M\t./lam_1751657975_6000/ocdbt.process_7\r\n53M\t./lam_1751657975_6000/ocdbt.process_6/d\r\n53M\t./lam_1751657975_6000/ocdbt.process_6\r\n53M\t./lam_1751657975_6000/ocdbt.process_1/d\r\n53M\t./lam_1751657975_6000/ocdbt.process_1\r\n53M\t./lam_1751657975_6000/ocdbt.process_2/d\r\n53M\t./lam_1751657975_6000/ocdbt.process_2\r\n423M\t./lam_1751657975_6000\r\n52M\t./lam_1751657975_186000/ocdbt.process_4/d\r\n52M\t./lam_1751657975_186000/ocdbt.process_4\r\n",,terminal_output +580,188104,"TERMINAL",0,0,"1.8M\t./lam_1751657975_186000/array_metadatas\r\n52M\t./lam_1751657975_186000/ocdbt.process_3/d\r\n52M\t./lam_1751657975_186000/ocdbt.process_3\r\n",,terminal_output +581,188174,"TERMINAL",0,0,"1.5M\t./lam_1751657975_186000/d\r\n",,terminal_output +582,188439,"TERMINAL",0,0,"52M\t./lam_1751657975_186000/ocdbt.process_5/d\r\n52M\t./lam_1751657975_186000/ocdbt.process_5\r\n52M\t./lam_1751657975_186000/ocdbt.process_0/d\r\n52M\t./lam_1751657975_186000/ocdbt.process_0\r\n52M\t./lam_1751657975_186000/ocdbt.process_7/d\r\n52M\t./lam_1751657975_186000/ocdbt.process_7\r\n52M\t./lam_1751657975_186000/ocdbt.process_6/d\r\n52M\t./lam_1751657975_186000/ocdbt.process_6\r\n52M\t./lam_1751657975_186000/ocdbt.process_1/d\r\n52M\t./lam_1751657975_186000/ocdbt.process_1\r\n52M\t./lam_1751657975_186000/ocdbt.process_2/d\r\n52M\t./lam_1751657975_186000/ocdbt.process_2\r\n419M\t./lam_1751657975_186000\r\n52M\t./lam_1751657975_149000/ocdbt.process_4/d\r\n52M\t./lam_1751657975_149000/ocdbt.process_4\r\n",,terminal_output +583,188494,"TERMINAL",0,0,"1.8M\t./lam_1751657975_149000/array_metadatas\r\n",,terminal_output +584,188614,"TERMINAL",0,0,"52M\t./lam_1751657975_149000/ocdbt.process_3/d\r\n52M\t./lam_1751657975_149000/ocdbt.process_3\r\n1.5M\t./lam_1751657975_149000/d\r\n52M\t./lam_1751657975_149000/ocdbt.process_5/d\r\n52M\t./lam_1751657975_149000/ocdbt.process_5\r\n52M\t./lam_1751657975_149000/ocdbt.process_0/d\r\n52M\t./lam_1751657975_149000/ocdbt.process_0\r\n52M\t./lam_1751657975_149000/ocdbt.process_7/d\r\n52M\t./lam_1751657975_149000/ocdbt.process_7\r\n52M\t./lam_1751657975_149000/ocdbt.process_6/d\r\n52M\t./lam_1751657975_149000/ocdbt.process_6\r\n52M\t./lam_1751657975_149000/ocdbt.process_1/d\r\n52M\t./lam_1751657975_149000/ocdbt.process_1\r\n52M\t./lam_1751657975_149000/ocdbt.process_2/d\r\n52M\t./lam_1751657975_149000/ocdbt.process_2\r\n419M\t./lam_1751657975_149000\r\n52M\t./lam_1751657975_88000/ocdbt.process_4/d\r\n52M\t./lam_1751657975_88000/ocdbt.process_4\r\n1.8M\t./lam_1751657975_88000/array_metadatas\r\n52M\t./lam_1751657975_88000/ocdbt.process_3/d\r\n52M\t./lam_1751657975_88000/ocdbt.process_3\r\n",,terminal_output +585,188822,"TERMINAL",0,0,"1.5M\t./lam_1751657975_88000/d\r\n52M\t./lam_1751657975_88000/ocdbt.process_5/d\r\n52M\t./lam_1751657975_88000/ocdbt.process_5\r\n52M\t./lam_1751657975_88000/ocdbt.process_0/d\r\n52M\t./lam_1751657975_88000/ocdbt.process_0\r\n53M\t./lam_1751657975_88000/ocdbt.process_7/d\r\n53M\t./lam_1751657975_88000/ocdbt.process_7\r\n52M\t./lam_1751657975_88000/ocdbt.process_6/d\r\n52M\t./lam_1751657975_88000/ocdbt.process_6\r\n52M\t./lam_1751657975_88000/ocdbt.process_1/d\r\n52M\t./lam_1751657975_88000/ocdbt.process_1\r\n52M\t./lam_1751657975_88000/ocdbt.process_2/d\r\n52M\t./lam_1751657975_88000/ocdbt.process_2\r\n420M\t./lam_1751657975_88000\r\n53M\t./lam_1751657975_150000/ocdbt.process_4/d\r\n53M\t./lam_1751657975_150000/ocdbt.process_4\r\n",,terminal_output +586,188964,"TERMINAL",0,0,"1.8M\t./lam_1751657975_150000/array_metadatas\r\n52M\t./lam_1751657975_150000/ocdbt.process_3/d\r\n52M\t./lam_1751657975_150000/ocdbt.process_3\r\n1.5M\t./lam_1751657975_150000/d\r\n52M\t./lam_1751657975_150000/ocdbt.process_5/d\r\n52M\t./lam_1751657975_150000/ocdbt.process_5\r\n",,terminal_output +587,189073,"TERMINAL",0,0,"52M\t./lam_1751657975_150000/ocdbt.process_0/d\r\n52M\t./lam_1751657975_150000/ocdbt.process_0\r\n",,terminal_output +588,189147,"TERMINAL",0,0,"53M\t./lam_1751657975_150000/ocdbt.process_7/d\r\n53M\t./lam_1751657975_150000/ocdbt.process_7\r\n52M\t./lam_1751657975_150000/ocdbt.process_6/d\r\n52M\t./lam_1751657975_150000/ocdbt.process_6\r\n52M\t./lam_1751657975_150000/ocdbt.process_1/d\r\n52M\t./lam_1751657975_150000/ocdbt.process_1\r\n52M\t./lam_1751657975_150000/ocdbt.process_2/d\r\n52M\t./lam_1751657975_150000/ocdbt.process_2\r\n420M\t./lam_1751657975_150000\r\n52M\t./lam_1751657975_74000/ocdbt.process_4/d\r\n52M\t./lam_1751657975_74000/ocdbt.process_4\r\n",,terminal_output +589,189348,"TERMINAL",0,0,"1.8M\t./lam_1751657975_74000/array_metadatas\r\n53M\t./lam_1751657975_74000/ocdbt.process_3/d\r\n53M\t./lam_1751657975_74000/ocdbt.process_3\r\n1.5M\t./lam_1751657975_74000/d\r\n52M\t./lam_1751657975_74000/ocdbt.process_5/d\r\n52M\t./lam_1751657975_74000/ocdbt.process_5\r\n52M\t./lam_1751657975_74000/ocdbt.process_0/d\r\n52M\t./lam_1751657975_74000/ocdbt.process_0\r\n53M\t./lam_1751657975_74000/ocdbt.process_7/d\r\n53M\t./lam_1751657975_74000/ocdbt.process_7\r\n",,terminal_output +590,189516,"TERMINAL",0,0,"53M\t./lam_1751657975_74000/ocdbt.process_6/d\r\n53M\t./lam_1751657975_74000/ocdbt.process_6\r\n52M\t./lam_1751657975_74000/ocdbt.process_1/d\r\n52M\t./lam_1751657975_74000/ocdbt.process_1\r\n52M\t./lam_1751657975_74000/ocdbt.process_2/d\r\n52M\t./lam_1751657975_74000/ocdbt.process_2\r\n420M\t./lam_1751657975_74000\r\n52M\t./lam_1751657975_86000/ocdbt.process_4/d\r\n52M\t./lam_1751657975_86000/ocdbt.process_4\r\n1.8M\t./lam_1751657975_86000/array_metadatas\r\n52M\t./lam_1751657975_86000/ocdbt.process_3/d\r\n52M\t./lam_1751657975_86000/ocdbt.process_3\r\n1.5M\t./lam_1751657975_86000/d\r\n52M\t./lam_1751657975_86000/ocdbt.process_5/d\r\n52M\t./lam_1751657975_86000/ocdbt.process_5\r\n52M\t./lam_1751657975_86000/ocdbt.process_0/d\r\n52M\t./lam_1751657975_86000/ocdbt.process_0\r\n52M\t./lam_1751657975_86000/ocdbt.process_7/d\r\n52M\t./lam_1751657975_86000/ocdbt.process_7\r\n52M\t./lam_1751657975_86000/ocdbt.process_6/d\r\n52M\t./lam_1751657975_86000/ocdbt.process_6\r\n52M\t./lam_1751657975_86000/ocdbt.process_1/d\r\n52M\t./lam_1751657975_86000/ocdbt.process_1\r\n52M\t./lam_1751657975_86000/ocdbt.process_2/d\r\n52M\t./lam_1751657975_86000/ocdbt.process_2\r\n420M\t./lam_1751657975_86000\r\n52M\t./lam_1751657975_77000/ocdbt.process_4/d\r\n52M\t./lam_1751657975_77000/ocdbt.process_4\r\n",,terminal_output +591,189569,"TERMINAL",0,0,"1.8M\t./lam_1751657975_77000/array_metadatas\r\n",,terminal_output +592,189626,"TERMINAL",0,0,"53M\t./lam_1751657975_77000/ocdbt.process_3/d\r\n53M\t./lam_1751657975_77000/ocdbt.process_3\r\n",,terminal_output +593,189740,"TERMINAL",0,0,"1.5M\t./lam_1751657975_77000/d\r\n",,terminal_output +594,189845,"TERMINAL",0,0,"52M\t./lam_1751657975_77000/ocdbt.process_5/d\r\n52M\t./lam_1751657975_77000/ocdbt.process_5\r\n",,terminal_output +595,190090,"TERMINAL",0,0,"52M\t./lam_1751657975_77000/ocdbt.process_0/d\r\n52M\t./lam_1751657975_77000/ocdbt.process_0\r\n52M\t./lam_1751657975_77000/ocdbt.process_7/d\r\n52M\t./lam_1751657975_77000/ocdbt.process_7\r\n52M\t./lam_1751657975_77000/ocdbt.process_6/d\r\n52M\t./lam_1751657975_77000/ocdbt.process_6\r\n52M\t./lam_1751657975_77000/ocdbt.process_1/d\r\n52M\t./lam_1751657975_77000/ocdbt.process_1\r\n52M\t./lam_1751657975_77000/ocdbt.process_2/d\r\n52M\t./lam_1751657975_77000/ocdbt.process_2\r\n420M\t./lam_1751657975_77000\r\n52M\t./lam_1751657975_70000/ocdbt.process_4/d\r\n52M\t./lam_1751657975_70000/ocdbt.process_4\r\n1.8M\t./lam_1751657975_70000/array_metadatas\r\n53M\t./lam_1751657975_70000/ocdbt.process_3/d\r\n53M\t./lam_1751657975_70000/ocdbt.process_3\r\n1.5M\t./lam_1751657975_70000/d\r\n52M\t./lam_1751657975_70000/ocdbt.process_5/d\r\n52M\t./lam_1751657975_70000/ocdbt.process_5\r\n52M\t./lam_1751657975_70000/ocdbt.process_0/d\r\n52M\t./lam_1751657975_70000/ocdbt.process_0\r\n52M\t./lam_1751657975_70000/ocdbt.process_7/d\r\n52M\t./lam_1751657975_70000/ocdbt.process_7\r\n53M\t./lam_1751657975_70000/ocdbt.process_6/d\r\n53M\t./lam_1751657975_70000/ocdbt.process_6\r\n52M\t./lam_1751657975_70000/ocdbt.process_1/d\r\n52M\t./lam_1751657975_70000/ocdbt.process_1\r\n52M\t./lam_1751657975_70000/ocdbt.process_2/d\r\n52M\t./lam_1751657975_70000/ocdbt.process_2\r\n421M\t./lam_1751657975_70000\r\n52M\t./lam_1751657975_123000/ocdbt.process_4/d\r\n52M\t./lam_1751657975_123000/ocdbt.process_4\r\n",,terminal_output +596,190200,"TERMINAL",0,0,"1.8M\t./lam_1751657975_123000/array_metadatas\r\n52M\t./lam_1751657975_123000/ocdbt.process_3/d\r\n52M\t./lam_1751657975_123000/ocdbt.process_3\r\n1.5M\t./lam_1751657975_123000/d\r\n52M\t./lam_1751657975_123000/ocdbt.process_5/d\r\n52M\t./lam_1751657975_123000/ocdbt.process_5\r\n",,terminal_output +597,190298,"TERMINAL",0,0,"52M\t./lam_1751657975_123000/ocdbt.process_0/d\r\n52M\t./lam_1751657975_123000/ocdbt.process_0\r\n",,terminal_output +598,190574,"TERMINAL",0,0,"53M\t./lam_1751657975_123000/ocdbt.process_7/d\r\n53M\t./lam_1751657975_123000/ocdbt.process_7\r\n52M\t./lam_1751657975_123000/ocdbt.process_6/d\r\n52M\t./lam_1751657975_123000/ocdbt.process_6\r\n52M\t./lam_1751657975_123000/ocdbt.process_1/d\r\n52M\t./lam_1751657975_123000/ocdbt.process_1\r\n52M\t./lam_1751657975_123000/ocdbt.process_2/d\r\n52M\t./lam_1751657975_123000/ocdbt.process_2\r\n420M\t./lam_1751657975_123000\r\n52M\t./lam_1751657975_102000/ocdbt.process_4/d\r\n52M\t./lam_1751657975_102000/ocdbt.process_4\r\n1.8M\t./lam_1751657975_102000/array_metadatas\r\n52M\t./lam_1751657975_102000/ocdbt.process_3/d\r\n52M\t./lam_1751657975_102000/ocdbt.process_3\r\n1.5M\t./lam_1751657975_102000/d\r\n52M\t./lam_1751657975_102000/ocdbt.process_5/d\r\n52M\t./lam_1751657975_102000/ocdbt.process_5\r\n",,terminal_output +599,190726,"TERMINAL",0,0,"52M\t./lam_1751657975_102000/ocdbt.process_0/d\r\n52M\t./lam_1751657975_102000/ocdbt.process_0\r\n53M\t./lam_1751657975_102000/ocdbt.process_7/d\r\n53M\t./lam_1751657975_102000/ocdbt.process_7\r\n52M\t./lam_1751657975_102000/ocdbt.process_6/d\r\n52M\t./lam_1751657975_102000/ocdbt.process_6\r\n52M\t./lam_1751657975_102000/ocdbt.process_1/d\r\n52M\t./lam_1751657975_102000/ocdbt.process_1\r\n52M\t./lam_1751657975_102000/ocdbt.process_2/d\r\n52M\t./lam_1751657975_102000/ocdbt.process_2\r\n419M\t./lam_1751657975_102000\r\n52M\t./lam_1751657975_109000/ocdbt.process_4/d\r\n52M\t./lam_1751657975_109000/ocdbt.process_4\r\n1.8M\t./lam_1751657975_109000/array_metadatas\r\n52M\t./lam_1751657975_109000/ocdbt.process_3/d\r\n52M\t./lam_1751657975_109000/ocdbt.process_3\r\n1.5M\t./lam_1751657975_109000/d\r\n52M\t./lam_1751657975_109000/ocdbt.process_5/d\r\n52M\t./lam_1751657975_109000/ocdbt.process_5\r\n",,terminal_output +600,190831,"TERMINAL",0,0,"52M\t./lam_1751657975_109000/ocdbt.process_0/d\r\n52M\t./lam_1751657975_109000/ocdbt.process_0\r\n52M\t./lam_1751657975_109000/ocdbt.process_7/d\r\n52M\t./lam_1751657975_109000/ocdbt.process_7\r\n52M\t./lam_1751657975_109000/ocdbt.process_6/d\r\n52M\t./lam_1751657975_109000/ocdbt.process_6\r\n52M\t./lam_1751657975_109000/ocdbt.process_1/d\r\n52M\t./lam_1751657975_109000/ocdbt.process_1\r\n52M\t./lam_1751657975_109000/ocdbt.process_2/d\r\n52M\t./lam_1751657975_109000/ocdbt.process_2\r\n419M\t./lam_1751657975_109000\r\n52M\t./lam_1751657975_94000/ocdbt.process_4/d\r\n52M\t./lam_1751657975_94000/ocdbt.process_4\r\n",,terminal_output +601,190931,"TERMINAL",0,0,"1.8M\t./lam_1751657975_94000/array_metadatas\r\n",,terminal_output +602,191137,"TERMINAL",0,0,"52M\t./lam_1751657975_94000/ocdbt.process_3/d\r\n52M\t./lam_1751657975_94000/ocdbt.process_3\r\n",,terminal_output +603,191320,"TERMINAL",0,0,"1.5M\t./lam_1751657975_94000/d\r\n52M\t./lam_1751657975_94000/ocdbt.process_5/d\r\n52M\t./lam_1751657975_94000/ocdbt.process_5\r\n",,terminal_output +604,191494,"TERMINAL",0,0,"52M\t./lam_1751657975_94000/ocdbt.process_0/d\r\n52M\t./lam_1751657975_94000/ocdbt.process_0\r\n52M\t./lam_1751657975_94000/ocdbt.process_7/d\r\n52M\t./lam_1751657975_94000/ocdbt.process_7\r\n52M\t./lam_1751657975_94000/ocdbt.process_6/d\r\n52M\t./lam_1751657975_94000/ocdbt.process_6\r\n52M\t./lam_1751657975_94000/ocdbt.process_1/d\r\n52M\t./lam_1751657975_94000/ocdbt.process_1\r\n52M\t./lam_1751657975_94000/ocdbt.process_2/d\r\n52M\t./lam_1751657975_94000/ocdbt.process_2\r\n419M\t./lam_1751657975_94000\r\n53M\t./lam_1751657975_16000/ocdbt.process_4/d\r\n53M\t./lam_1751657975_16000/ocdbt.process_4\r\n1.8M\t./lam_1751657975_16000/array_metadatas\r\n",,terminal_output +605,191547,"TERMINAL",0,0,"53M\t./lam_1751657975_16000/ocdbt.process_3/d\r\n53M\t./lam_1751657975_16000/ocdbt.process_3\r\n",,terminal_output +606,191603,"TERMINAL",0,0,"1.5M\t./lam_1751657975_16000/d\r\n53M\t./lam_1751657975_16000/ocdbt.process_5/d\r\n53M\t./lam_1751657975_16000/ocdbt.process_5\r\n",,terminal_output +607,191674,"TERMINAL",0,0,"52M\t./lam_1751657975_16000/ocdbt.process_0/d\r\n52M\t./lam_1751657975_16000/ocdbt.process_0\r\n53M\t./lam_1751657975_16000/ocdbt.process_7/d\r\n53M\t./lam_1751657975_16000/ocdbt.process_7\r\n52M\t./lam_1751657975_16000/ocdbt.process_6/d\r\n52M\t./lam_1751657975_16000/ocdbt.process_6\r\n52M\t./lam_1751657975_16000/ocdbt.process_1/d\r\n52M\t./lam_1751657975_16000/ocdbt.process_1\r\n52M\t./lam_1751657975_16000/ocdbt.process_2/d\r\n52M\t./lam_1751657975_16000/ocdbt.process_2\r\n421M\t./lam_1751657975_16000\r\n52M\t./lam_1751657975_147000/ocdbt.process_4/d\r\n52M\t./lam_1751657975_147000/ocdbt.process_4\r\n",,terminal_output +608,191783,"TERMINAL",0,0,"1.8M\t./lam_1751657975_147000/array_metadatas\r\n",,terminal_output +609,191913,"TERMINAL",0,0,"53M\t./lam_1751657975_147000/ocdbt.process_3/d\r\n53M\t./lam_1751657975_147000/ocdbt.process_3\r\n1.5M\t./lam_1751657975_147000/d\r\n52M\t./lam_1751657975_147000/ocdbt.process_5/d\r\n52M\t./lam_1751657975_147000/ocdbt.process_5\r\n",,terminal_output +610,192063,"TERMINAL",0,0,"52M\t./lam_1751657975_147000/ocdbt.process_0/d\r\n52M\t./lam_1751657975_147000/ocdbt.process_0\r\n52M\t./lam_1751657975_147000/ocdbt.process_7/d\r\n52M\t./lam_1751657975_147000/ocdbt.process_7\r\n52M\t./lam_1751657975_147000/ocdbt.process_6/d\r\n52M\t./lam_1751657975_147000/ocdbt.process_6\r\n52M\t./lam_1751657975_147000/ocdbt.process_1/d\r\n52M\t./lam_1751657975_147000/ocdbt.process_1\r\n52M\t./lam_1751657975_147000/ocdbt.process_2/d\r\n52M\t./lam_1751657975_147000/ocdbt.process_2\r\n420M\t./lam_1751657975_147000\r\n52M\t./lam_1751657975_107000/ocdbt.process_4/d\r\n52M\t./lam_1751657975_107000/ocdbt.process_4\r\n",,terminal_output +611,192141,"TERMINAL",0,0,"1.8M\t./lam_1751657975_107000/array_metadatas\r\n52M\t./lam_1751657975_107000/ocdbt.process_3/d\r\n52M\t./lam_1751657975_107000/ocdbt.process_3\r\n1.5M\t./lam_1751657975_107000/d\r\n",,terminal_output +612,192250,"TERMINAL",0,0,"52M\t./lam_1751657975_107000/ocdbt.process_5/d\r\n52M\t./lam_1751657975_107000/ocdbt.process_5\r\n",,terminal_output +613,192438,"TERMINAL",0,0,"52M\t./lam_1751657975_107000/ocdbt.process_0/d\r\n52M\t./lam_1751657975_107000/ocdbt.process_0\r\n52M\t./lam_1751657975_107000/ocdbt.process_7/d\r\n52M\t./lam_1751657975_107000/ocdbt.process_7\r\n52M\t./lam_1751657975_107000/ocdbt.process_6/d\r\n52M\t./lam_1751657975_107000/ocdbt.process_6\r\n52M\t./lam_1751657975_107000/ocdbt.process_1/d\r\n52M\t./lam_1751657975_107000/ocdbt.process_1\r\n52M\t./lam_1751657975_107000/ocdbt.process_2/d\r\n52M\t./lam_1751657975_107000/ocdbt.process_2\r\n419M\t./lam_1751657975_107000\r\n52M\t./lam_1751657975_106000/ocdbt.process_4/d\r\n52M\t./lam_1751657975_106000/ocdbt.process_4\r\n1.8M\t./lam_1751657975_106000/array_metadatas\r\n52M\t./lam_1751657975_106000/ocdbt.process_3/d\r\n52M\t./lam_1751657975_106000/ocdbt.process_3\r\n1.5M\t./lam_1751657975_106000/d\r\n52M\t./lam_1751657975_106000/ocdbt.process_5/d\r\n52M\t./lam_1751657975_106000/ocdbt.process_5\r\n52M\t./lam_1751657975_106000/ocdbt.process_0/d\r\n52M\t./lam_1751657975_106000/ocdbt.process_0\r\n52M\t./lam_1751657975_106000/ocdbt.process_7/d\r\n52M\t./lam_1751657975_106000/ocdbt.process_7\r\n52M\t./lam_1751657975_106000/ocdbt.process_6/d\r\n52M\t./lam_1751657975_106000/ocdbt.process_6\r\n52M\t./lam_1751657975_106000/ocdbt.process_1/d\r\n52M\t./lam_1751657975_106000/ocdbt.process_1\r\n52M\t./lam_1751657975_106000/ocdbt.process_2/d\r\n52M\t./lam_1751657975_106000/ocdbt.process_2\r\n419M\t./lam_1751657975_106000\r\n52M\t./lam_1751657975_75000/ocdbt.process_4/d\r\n52M\t./lam_1751657975_75000/ocdbt.process_4\r\n",,terminal_output +614,192573,"TERMINAL",0,0,"1.8M\t./lam_1751657975_75000/array_metadatas\r\n52M\t./lam_1751657975_75000/ocdbt.process_3/d\r\n52M\t./lam_1751657975_75000/ocdbt.process_3\r\n1.5M\t./lam_1751657975_75000/d\r\n52M\t./lam_1751657975_75000/ocdbt.process_5/d\r\n52M\t./lam_1751657975_75000/ocdbt.process_5\r\n52M\t./lam_1751657975_75000/ocdbt.process_0/d\r\n52M\t./lam_1751657975_75000/ocdbt.process_0\r\n",,terminal_output +615,192709,"TERMINAL",0,0,"53M\t./lam_1751657975_75000/ocdbt.process_7/d\r\n53M\t./lam_1751657975_75000/ocdbt.process_7\r\n53M\t./lam_1751657975_75000/ocdbt.process_6/d\r\n53M\t./lam_1751657975_75000/ocdbt.process_6\r\n53M\t./lam_1751657975_75000/ocdbt.process_1/d\r\n53M\t./lam_1751657975_75000/ocdbt.process_1\r\n53M\t./lam_1751657975_75000/ocdbt.process_2/d\r\n53M\t./lam_1751657975_75000/ocdbt.process_2\r\n421M\t./lam_1751657975_75000\r\n52M\t./lam_1751657975_122000/ocdbt.process_4/d\r\n52M\t./lam_1751657975_122000/ocdbt.process_4\r\n",,terminal_output +616,192934,"TERMINAL",0,0,"1.8M\t./lam_1751657975_122000/array_metadatas\r\n53M\t./lam_1751657975_122000/ocdbt.process_3/d\r\n53M\t./lam_1751657975_122000/ocdbt.process_3\r\n1.5M\t./lam_1751657975_122000/d\r\n52M\t./lam_1751657975_122000/ocdbt.process_5/d\r\n52M\t./lam_1751657975_122000/ocdbt.process_5\r\n",,terminal_output +617,193284,"TERMINAL",0,0,"52M\t./lam_1751657975_122000/ocdbt.process_0/d\r\n52M\t./lam_1751657975_122000/ocdbt.process_0\r\n52M\t./lam_1751657975_122000/ocdbt.process_7/d\r\n52M\t./lam_1751657975_122000/ocdbt.process_7\r\n52M\t./lam_1751657975_122000/ocdbt.process_6/d\r\n52M\t./lam_1751657975_122000/ocdbt.process_6\r\n52M\t./lam_1751657975_122000/ocdbt.process_1/d\r\n52M\t./lam_1751657975_122000/ocdbt.process_1\r\n52M\t./lam_1751657975_122000/ocdbt.process_2/d\r\n52M\t./lam_1751657975_122000/ocdbt.process_2\r\n419M\t./lam_1751657975_122000\r\n52M\t./lam_1751657975_30000/ocdbt.process_4/d\r\n52M\t./lam_1751657975_30000/ocdbt.process_4\r\n",,terminal_output +618,193396,"TERMINAL",0,0,"1.8M\t./lam_1751657975_30000/array_metadatas\r\n",,terminal_output +619,193487,"TERMINAL",0,0,"53M\t./lam_1751657975_30000/ocdbt.process_3/d\r\n53M\t./lam_1751657975_30000/ocdbt.process_3\r\n1.5M\t./lam_1751657975_30000/d\r\n52M\t./lam_1751657975_30000/ocdbt.process_5/d\r\n52M\t./lam_1751657975_30000/ocdbt.process_5\r\n",,terminal_output +620,193752,"TERMINAL",0,0,"53M\t./lam_1751657975_30000/ocdbt.process_0/d\r\n53M\t./lam_1751657975_30000/ocdbt.process_0\r\n52M\t./lam_1751657975_30000/ocdbt.process_7/d\r\n52M\t./lam_1751657975_30000/ocdbt.process_7\r\n52M\t./lam_1751657975_30000/ocdbt.process_6/d\r\n52M\t./lam_1751657975_30000/ocdbt.process_6\r\n52M\t./lam_1751657975_30000/ocdbt.process_1/d\r\n52M\t./lam_1751657975_30000/ocdbt.process_1\r\n52M\t./lam_1751657975_30000/ocdbt.process_2/d\r\n52M\t./lam_1751657975_30000/ocdbt.process_2\r\n421M\t./lam_1751657975_30000\r\n52M\t./lam_1751657975_29000/ocdbt.process_4/d\r\n52M\t./lam_1751657975_29000/ocdbt.process_4\r\n1.8M\t./lam_1751657975_29000/array_metadatas\r\n53M\t./lam_1751657975_29000/ocdbt.process_3/d\r\n53M\t./lam_1751657975_29000/ocdbt.process_3\r\n1.5M\t./lam_1751657975_29000/d\r\n52M\t./lam_1751657975_29000/ocdbt.process_5/d\r\n52M\t./lam_1751657975_29000/ocdbt.process_5\r\n52M\t./lam_1751657975_29000/ocdbt.process_0/d\r\n52M\t./lam_1751657975_29000/ocdbt.process_0\r\n53M\t./lam_1751657975_29000/ocdbt.process_7/d\r\n53M\t./lam_1751657975_29000/ocdbt.process_7\r\n52M\t./lam_1751657975_29000/ocdbt.process_6/d\r\n52M\t./lam_1751657975_29000/ocdbt.process_6\r\n52M\t./lam_1751657975_29000/ocdbt.process_1/d\r\n52M\t./lam_1751657975_29000/ocdbt.process_1\r\n52M\t./lam_1751657975_29000/ocdbt.process_2/d\r\n52M\t./lam_1751657975_29000/ocdbt.process_2\r\n420M\t./lam_1751657975_29000\r\n52M\t./lam_1751657975_170000/ocdbt.process_4/d\r\n52M\t./lam_1751657975_170000/ocdbt.process_4\r\n1.8M\t./lam_1751657975_170000/array_metadatas\r\n52M\t./lam_1751657975_170000/ocdbt.process_3/d\r\n52M\t./lam_1751657975_170000/ocdbt.process_3\r\n1.5M\t./lam_1751657975_170000/d\r\n52M\t./lam_1751657975_170000/ocdbt.process_5/d\r\n52M\t./lam_1751657975_170000/ocdbt.process_5\r\n52M\t./lam_1751657975_170000/ocdbt.process_0/d\r\n52M\t./lam_1751657975_170000/ocdbt.process_0\r\n52M\t./lam_1751657975_170000/ocdbt.process_7/d\r\n52M\t./lam_1751657975_170000/ocdbt.process_7\r\n52M\t./lam_1751657975_170000/ocdbt.process_6/d\r\n52M\t./lam_1751657975_170000/ocdbt.process_6\r\n52M\t./lam_1751657975_170000/ocdbt.process_1/d\r\n52M\t./lam_1751657975_170000/ocdbt.process_1\r\n52M\t./lam_1751657975_170000/ocdbt.process_2/d\r\n52M\t./lam_1751657975_170000/ocdbt.process_2\r\n419M\t./lam_1751657975_170000\r\n82G\t.\r\n]0;tum_cte0515@hkn1991:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/checkpoints/train_lam_action_space_scaling_8/3318550]633;D;0",,terminal_output +621,306271,"TERMINAL",0,0,"cd ..",,terminal_command +622,306316,"TERMINAL",0,0,"]633;E;2025-07-07 19:31:46 cd ..;24571f6f-7d62-4371-8977-e42b5a81d394]633;C]0;tum_cte0515@hkn1991:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/checkpoints/train_lam_action_space_scaling_8]633;D;0",,terminal_output +623,306998,"TERMINAL",0,0,"ls",,terminal_command +624,307043,"TERMINAL",0,0,"]633;E;2025-07-07 19:31:47 ls;24571f6f-7d62-4371-8977-e42b5a81d394]633;C3318550\r\n]0;tum_cte0515@hkn1991:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/checkpoints/train_lam_action_space_scaling_8]633;D;0",,terminal_output +625,308420,"TERMINAL",0,0,"cd ..",,terminal_command +626,308433,"TERMINAL",0,0,"]633;E;2025-07-07 19:31:49 cd ..;24571f6f-7d62-4371-8977-e42b5a81d394]633;C]0;tum_cte0515@hkn1991:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/checkpoints]633;D;0",,terminal_output +627,308769,"TERMINAL",0,0,"ls",,terminal_command +628,308831,"TERMINAL",0,0,"]633;E;2025-07-07 19:31:49 ls;24571f6f-7d62-4371-8977-e42b5a81d394]633;C",,terminal_output +629,309046,"TERMINAL",0,0,"3285360 3292207 3296719 tokenizer train_tokenizer_batch_size_scaling_2_node train_tokenizer_model_size_scaling_227M\r\n3285369 3292255 3297342 train_lam_action_space_scaling_10 train_tokenizer_batch_size_scaling_4_node train_tokenizer_model_size_scaling_37M\r\n3285642 3292306 3297546 train_lam_action_space_scaling_12 train_tokenizer_batch_size_scaling_8_node train_tokenizer_model_size_scaling_74M\r\n3292019 3292307 3297579 train_lam_action_space_scaling_20 train_tokenizer_batch_size_scaling_linear_lr_scaling_16_nodes train_tokenizer_no_warmup_ablation\r\n3292119 3292310 3299271 train_lam_action_space_scaling_50 train_tokenizer_batch_size_scaling_linear_lr_scaling_2_nodes\r\n3292139 3292313 3306801 train_lam_action_space_scaling_6 train_tokenizer_batch_size_scaling_linear_lr_scaling_4_nodes\r\n3292156 3292314 3306965 train_lam_action_space_scaling_8 train_tokenizer_batch_size_scaling_linear_lr_scaling_8_nodes\r\n3292206 3296462 lam train_tokenizer_batch_size_scaling_1_node train_tokenizer_model_size_scaling_127M\r\n]0;tum_cte0515@hkn1991:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/checkpoints]633;D;0",,terminal_output +630,311176,"TERMINAL",0,0,"cd ..",,terminal_command +631,312078,"TERMINAL",0,0,"ls",,terminal_command +632,312123,"TERMINAL",0,0,"]633;E;2025-07-07 19:31:52 ls;24571f6f-7d62-4371-8977-e42b5a81d394]633;C",,terminal_output +633,312260,"TERMINAL",0,0,"checkpoints knoms_arrayrecords_500_shards knoms_tfrecords_200_shards open_ai_minecraft_first_try overfit_dir_openai_npy\r\ncoinrun knoms_arrayrecords_500_shards_optimized_layout knoms_tfrecords_2_shards_overfit open_ai_minecraft_first_try_npy overfit_dir_openai_tfrecord\r\ndata_knoms knoms_mp4 knoms_tfrecords_500_shards open_ai_minecraft_first_try_tfrecord procgen_env_16_episodes_20000\r\ndummy knoms_mp4_clips knoms_tfrecords_500_shards_overfit_1 open_ai_minecraft_npy\r\ndummy_arrayrecords knoms_npy knoms_tfrecords_500_shards_overfit_10 open_ai_minecraft_tfrecord\r\ndummy_arrayrecords_500_shards knoms_tfrecords open_ai_minecraft overfit_dir\r\n]0;tum_cte0515@hkn1991:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data]633;D;0",,terminal_output +634,322886,"TERMINAL",0,0,"cd open_ai_minecraft_tfrecord/",,terminal_command +635,323488,"TERMINAL",0,0,"ls",,terminal_command +636,323536,"TERMINAL",0,0,"]633;E;2025-07-07 19:32:04 ls;24571f6f-7d62-4371-8977-e42b5a81d394]633;C",,terminal_output +637,323865,"TERMINAL",0,0,"shard-00000-of-00500.tfrecord shard-00084-of-00500.tfrecord shard-00168-of-00500.tfrecord shard-00252-of-00500.tfrecord shard-00336-of-00500.tfrecord shard-00420-of-00500.tfrecord\r\nshard-00001-of-00500.tfrecord shard-00085-of-00500.tfrecord shard-00169-of-00500.tfrecord shard-00253-of-00500.tfrecord shard-00337-of-00500.tfrecord shard-00421-of-00500.tfrecord\r\nshard-00002-of-00500.tfrecord shard-00086-of-00500.tfrecord shard-00170-of-00500.tfrecord shard-00254-of-00500.tfrecord shard-00338-of-00500.tfrecord shard-00422-of-00500.tfrecord\r\nshard-00003-of-00500.tfrecord shard-00087-of-00500.tfrecord shard-00171-of-00500.tfrecord shard-00255-of-00500.tfrecord shard-00339-of-00500.tfrecord shard-00423-of-00500.tfrecord\r\nshard-00004-of-00500.tfrecord shard-00088-of-00500.tfrecord shard-00172-of-00500.tfrecord shard-00256-of-00500.tfrecord shard-00340-of-00500.tfrecord shard-00424-of-00500.tfrecord\r\nshard-00005-of-00500.tfrecord shard-00089-of-00500.tfrecord shard-00173-of-00500.tfrecord shard-00257-of-00500.tfrecord shard-00341-of-00500.tfrecord shard-00425-of-00500.tfrecord\r\nshard-00006-of-00500.tfrecord shard-00090-of-00500.tfrecord shard-00174-of-00500.tfrecord shard-00258-of-00500.tfrecord shard-00342-of-00500.tfrecord shard-00426-of-00500.tfrecord\r\nshard-00007-of-00500.tfrecord shard-00091-of-00500.tfrecord shard-00175-of-00500.tfrecord shard-00259-of-00500.tfrecord shard-00343-of-00500.tfrecord shard-00427-of-00500.tfrecord\r\nshard-00008-of-00500.tfrecord shard-00092-of-00500.tfrecord shard-00176-of-00500.tfrecord shard-00260-of-00500.tfrecord shard-00344-of-00500.tfrecord shard-00428-of-00500.tfrecord\r\nshard-00009-of-00500.tfrecord shard-00093-of-00500.tfrecord shard-00177-of-00500.tfrecord shard-00261-of-00500.tfrecord shard-00345-of-00500.tfrecord shard-00429-of-00500.tfrecord\r\nshard-00010-of-00500.tfrecord shard-00094-of-00500.tfrecord shard-00178-of-00500.tfrecord shard-00262-of-00500.tfrecord shard-00346-of-00500.tfrecord shard-00430-of-00500.tfrecord\r\nshard-00011-of-00500.tfrecord shard-00095-of-00500.tfrecord shard-00179-of-00500.tfrecord shard-00263-of-00500.tfrecord shard-00347-of-00500.tfrecord shard-00431-of-00500.tfrecord\r\nshard-00012-of-00500.tfrecord shard-00096-of-00500.tfrecord shard-00180-of-00500.tfrecord shard-00264-of-00500.tfrecord shard-00348-of-00500.tfrecord shard-00432-of-00500.tfrecord\r\nshard-00013-of-00500.tfrecord shard-00097-of-00500.tfrecord shard-00181-of-00500.tfrecord shard-00265-of-00500.tfrecord shard-00349-of-00500.tfrecord shard-00433-of-00500.tfrecord\r\nshard-00014-of-00500.tfrecord shard-00098-of-00500.tfrecord shard-00182-of-00500.tfrecord shard-00266-of-00500.tfrecord shard-00350-of-00500.tfrecord shard-00434-of-00500.tfrecord\r\nshard-00015-of-00500.tfrecord shard-00099-of-00500.tfrecord shard-00183-of-00500.tfrecord shard-00267-of-00500.tfrecord shard-00351-of-00500.tfrecord shard-00435-of-00500.tfrecord\r\nshard-00016-of-00500.tfrecord shard-00100-of-00500.tfrecord shard-00184-of-00500.tfrecord shard-00268-of-00500.tfrecord shard-00352-of-00500.tfrecord shard-00436-of-00500.tfrecord\r\nshard-00017-of-00500.tfrecord shard-00101-of-00500.tfrecord shard-00185-of-00500.tfrecord shard-00269-of-00500.tfrecord shard-00353-of-00500.tfrecord shard-00437-of-00500.tfrecord\r\nshard-00018-of-00500.tfrecord shard-00102-of-00500.tfrecord shard-00186-of-00500.tfrecord shard-00270-of-00500.tfrecord shard-00354-of-00500.tfrecord shard-00438-of-00500.tfrecord\r\nshard-00019-of-00500.tfrecord shard-00103-of-00500.tfrecord shard-00187-of-00500.tfrecord shard-00271-of-00500.tfrecord shard-00355-of-00500.tfrecord shard-00439-of-00500.tfrecord\r\nshard-00020-of-00500.tfrecord shard-00104-of-00500.tfrecord shard-00188-of-00500.tfrecord shard-00272-of-00500.tfrecord shard-00356-of-00500.tfrecord shard-00440-of-00500.tfrecord\r\nshard-00021-of-00500.tfrecord shard-00105-of-00500.tfrecord shard-00189-of-00500.tfrecord shard-00273-of-00500.tfrecord shard-00357-of-00500.tfrecord shard-00441-of-00500.tfrecord\r\nshard-00022-of-00500.tfrecord shard-00106-of-00500.tfrecord shard-00190-of-00500.tfrecord shard-00274-of-00500.tfrecord shard-00358-of-00500.tfrecord shard-00442-of-00500.tfrecord\r\nshard-00023-of-00500.tfrecord shard-00107-of-00500.tfrecord shard-00191-of-00500.tfrecord shard-00275-of-00500.tfrecord shard-00359-of-00500.tfrecord shard-00443-of-00500.tfrecord\r\nshard-00024-of-00500.tfrecord shard-00108-of-00500.tfrecord shard-00192-of-00500.tfrecord shard-00276-of-00500.tfrecord shard-00360-of-00500.tfrecord shard-00444-of-00500.tfrecord\r\nshard-00025-of-00500.tfrecord shard-00109-of-00500.tfrecord shard-00193-of-00500.tfrecord shard-00277-of-00500.tfrecord shard-00361-of-00500.tfrecord shard-00445-of-00500.tfrecord\r\nshard-00026-of-00500.tfrecord shard-00110-of-00500.tfrecord shard-00194-of-00500.tfrecord shard-00278-of-00500.tfrecord shard-00362-of-00500.tfrecord shard-00446-of-00500.tfrecord\r\nshard-00027-of-00500.tfrecord shard-00111-of-00500.tfrecord shard-00195-of-00500.tfrecord shard-00279-of-00500.tfrecord shard-00363-of-00500.tfrecord shard-00447-of-00500.tfrecord\r\nshard-00028-of-00500.tfrecord shard-00112-of-00500.tfrecord shard-00196-of-00500.tfrecord shard-00280-of-00500.tfrecord shard-00364-of-00500.tfrecord shard-00448-of-00500.tfrecord\r\nshard-00029-of-00500.tfrecord shard-00113-of-00500.tfrecord shard-00197-of-00500.tfrecord shard-00281-of-00500.tfrecord shard-00365-of-00500.tfrecord shard-00449-of-00500.tfrecord\r\nshard-00030-of-00500.tfrecord shard-00114-of-00500.tfrecord shard-00198-of-00500.tfrecord shard-00282-of-00500.tfrecord shard-00366-of-00500.tfrecord shard-00450-of-00500.tfrecord\r\nshard-00031-of-00500.tfrecord shard-00115-of-00500.tfrecord shard-00199-of-00500.tfrecord shard-00283-of-00500.tfrecord shard-00367-of-00500.tfrecord shard-00451-of-00500.tfrecord\r\nshard-00032-of-00500.tfrecord shard-00116-of-00500.tfrecord shard-00200-of-00500.tfrecord shard-00284-of-00500.tfrecord shard-00368-of-00500.tfrecord shard-00452-of-00500.tfrecord\r\nshard-00033-of-00500.tfrecord shard-00117-of-00500.tfrecord shard-00201-of-00500.tfrecord shard-00285-of-00500.tfrecord shard-00369-of-00500.tfrecord shard-00453-of-00500.tfrecord\r\nshard-00034-of-00500.tfrecord shard-00118-of-00500.tfrecord shard-00202-of-00500.tfrecord shard-00286-of-00500.tfrecord shard-00370-of-00500.tfrecord shard-00454-of-00500.tfrecord\r\nshard-00035-of-00500.tfrecord shard-00119-of-00500.tfrecord shard-00203-of-00500.tfrecord shard-00287-of-00500.tfrecord shard-00371-of-00500.tfrecord shard-00455-of-00500.tfrecord\r\nshard-00036-of-00500.tfrecord shard-00120-of-00500.tfrecord shard-00204-of-00500.tfrecord shard-00288-of-00500.tfrecord shard-00372-of-00500.tfrecord shard-00456-of-00500.tfrecord\r\nshard-00037-of-00500.tfrecord shard-00121-of-00500.tfrecord shard-00205-of-00500.tfrecord shard-00289-of-00500.tfrecord shard-00373-of-00500.tfrecord shard-00457-of-00500.tfrecord\r\nshard-00038-of-00500.tfrecord shard-00122-of-00500.tfrecord shard-00206-of-00500.tfrecord shard-00290-of-00500.tfrecord shard-00374-of-00500.tfrecord shard-00458-of-00500.tfrecord\r\nshard-00039-of-00500.tfrecord shard-00123-of-00500.tfrecord shard-00207-of-00500.tfrecord shard-00291-of-00500.tfrecord shard-00375-of-00500.tfrecord shard-00459-of-00500.tfrecord\r\nshard-00040-of-00500.tfrecord shard-00124-of-00500.tfrecord shard-00208-of-00500.tfrecord shard-00292-of-00500.tfrecord shard-00376-of-00500.tfrecord shard-00460-of-00500.tfrecord\r\nshard-00041-of-00500.tfrecord shard-00125-of-00500.tfrecord shard-00209-of-00500.tfrecord shard-00293-of-00500.tfrecord shard-00377-of-00500.tfrecord shard-00461-of-00500.tfrecord\r\nshard-00042-of-00500.tfrecord shard-00126-of-00500.tfrecord shard-00210-of-00500.tfrecord shard-00294-of-00500.tfrecord shard-00378-of-00500.tfrecord shard-00462-of-00500.tfrecord\r\nshard-00043-of-00500.tfrecord shard-00127-of-00500.tfrecord shard-00211-of-00500.tfrecord shard-00295-of-00500.tfrecord shard-00379-of-00500.tfrecord shard-00463-of-00500.tfrecord\r\nshard-00044-of-00500.tfrecord shard-00128-of-00500.tfrecord shard-00212-of-00500.tfrecord shard-00296-of-00500.tfrecord shard-00380-of-00500.tfrecord shard-00464-of-00500.tfrecord\r\nshard-00045-of-00500.tfrecord shard-00129-of-00500.tfrecord shard-00213-of-00500.tfrecord shard-00297-of-00500.tfrecord shard-00381-of-00500.tfrecord shard-00465-of-00500.tfrecord\r\nshard-00046-of-00500.tfrecord shard-00130-of-00500.tfrecord shard-00214-of-00500.tfrecord shard-00298-of-00500.tfrecord shard-00382-of-00500.tfrecord shard-00466-of-00500.tfrecord\r\nshard-00047-of-00500.tfrecord shard-00131-of-00500.tfrecord shard-00215-of-00500.tfrecord shard-00299-of-00500.tfrecord shard-00383-of-00500.tfrecord shard-00467-of-00500.tfrecord\r\nshard-00048-of-00500.tfrecord shard-00132-of-00500.tfrecord shard-00216-of-00500.tfrecord shard-00300-of-00500.tfrecord shard-00384-of-00500.tfrecord shard-00468-of-00500.tfrecord\r\nshard-00049-of-00500.tfrecord shard-00133-of-00500.tfrecord shard-00217-of-00500.tfrecord shard-00301-of-00500.tfrecord shard-00385-of-00500.tfrecord shard-00469-of-00500.tfrecord\r\nshard-00050-of-00500.tfrecord shard-00134-of-00500.tfrecord shard-00218-of-00500.tfrecord shard-00302-of-00500.tfrecord shard-00386-of-00500.tfrecord shard-00470-of-00500.tfrecord\r\nshard-00051-of-00500.tfrecord shard-00135-of-00500.tfrecord shard-00219-of-00500.tfrecord shard-00303-of-00500.tfrecord shard-00387-of-00500.tfrecord shard-00471-of-00500.tfrecord\r\nshard-00052-of-00500.tfrecord shard-00136-of-00500.tfrecord shard-00220-of-00500.tfrecord shard-00304-of-00500.tfrecord shard-00388-of-00500.tfrecord shard-00472-of-00500.tfrecord\r\nshard-00053-of-00500.tfrecord shard-00137-of-00500.tfrecord shard-00221-of-00500.tfrecord shard-00305-of-00500.tfrecord shard-00389-of-00500.tfrecord shard-00473-of-00500.tfrecord\r\nshard-00054-of-00500.tfrecord shard-00138-of-00500.tfrecord shard-00222-of-00500.tfrecord shard-00306-of-00500.tfrecord shard-00390-of-00500.tfrecord shard-00474-of-00500.tfrecord\r\nshard-00055-of-00500.tfrecord shard-00139-of-00500.tfrecord shard-00223-of-00500.tfrecord shard-00307-of-00500.tfrecord shard-00391-of-00500.tfrecord shard-00475-of-00500.tfrecord\r\nshard-00056-of-00500.tfrecord shard-00140-of-00500.tfrecord shard-00224-of-00500.tfrecord shard-00308-of-00500.tfrecord shard-00392-of-00500.tfrecord shard-00476-of-00500.tfrecord\r\nshard-00057-of-00500.tfrecord shard-00141-of-00500.tfrecord shard-00225-of-00500.tfrecord shard-00309-of-00500.tfrecord shard-00393-of-00500.tfrecord shard-00477-of-00500.tfrecord\r\nshard-00058-of-00500.tfrecord shard-00142-of-00500.tfrecord shard-00226-of-00500.tfrecord shard-00310-of-00500.tfrecord shard-00394-of-00500.tfrecord shard-00478-of-00500.tfrecord\r\nshard-00059-of-00500.tfrecord shard-00143-of-00500.tfrecord shard-00227-of-00500.tfrecord shard-00311-of-00500.tfrecord shard-00395-of-00500.tfrecord shard-00479-of-00500.tfrecord\r\nshard-00060-of-00500.tfrecord shard-00144-of-00500.tfrecord shard-00228-of-00500.tfrecord shard-00312-of-00500.tfrecord shard-00396-of-00500.tfrecord shard-00480-of-00500.tfrecord\r\nshard-00061-of-00500.tfrecord shard-00145-of-00500.tfrecord shard-00229-of-00500.tfrecord shard-00313-of-00500.tfrecord shard-00397-of-00500.tfrecord shard-00481-of-00500.tfrecord\r\nshard-00062-of-00500.tfrecord shard-00146-of-00500.tfrecord shard-00230-of-00500.tfrecord shard-00314-of-00500.tfrecord shard-00398-of-00500.tfrecord shard-00482-of-00500.tfrecord\r\nshard-00063-of-00500.tfrecord shard-00147-of-00500.tfrecord shard-00231-of-00500.tfrecord shard-00315-of-00500.tfrecord shard-00399-of-00500.tfrecord shard-00483-of-00500.tfrecord\r\nshard-00064-of-00500.tfrecord shard-00148-of-00500.tfrecord shard-00232-of-00500.tfrecord shard-00316-of-00500.tfrecord shard-00400-of-00500.tfrecord shard-00484-of-00500.tfrecord\r\nshard-00065-of-00500.tfrecord shard-00149-of-00500.tfrecord shard-00233-of-00500.tfrecord shard-00317-of-00500.tfrecord shard-00401-of-00500.tfrecord shard-00485-of-00500.tfrecord\r\nshard-00066-of-00500.tfrecord shard-00150-of-00500.tfrecord shard-00234-of-00500.tfrecord shard-00318-of-00500.tfrecord shard-00402-of-00500.tfrecord shard-00486-of-00500.tfrecord\r\nshard-00067-of-00500.tfrecord shard-00151-of-00500.tfrecord shard-00235-of-00500.tfrecord shard-00319-of-00500.tfrecord shard-00403-of-00500.tfrecord shard-00487-of-00500.tfrecord\r\nshard-00068-of-00500.tfrecord shard-00152-of-00500.tfrecord shard-00236-of-00500.tfrecord shard-00320-of-00500.tfrecord shard-00404-of-00500.tfrecord shard-00488-of-00500.tfrecord\r\nshard-00069-of-00500.tfrecord shard-00153-of-00500.tfrecord shard-00237-of-00500.tfrecord shard-00321-of-00500.tfrecord shard-00405-of-00500.tfrecord shard-00489-of-00500.tfrecord\r\nshard-00070-of-00500.tfrecord shard-00154-of-00500.tfrecord shard-00238-of-00500.tfrecord shard-00322-of-00500.tfrecord shard-00406-of-00500.tfrecord shard-00490-of-00500.tfrecord\r\nshard-00071-of-00500.tfrecord shard-00155-of-00500.tfrecord shard-00239-of-00500.tfrecord shard-00323-of-00500.tfrecord shard-00407-of-00500.tfrecord shard-00491-of-00500.tfrecord\r\nshard-00072-of-00500.tfrecord shard-00156-of-00500.tfrecord shard-00240-of-00500.tfrecord shard-00324-of-00500.tfrecord shard-00408-of-00500.tfrecord shard-00492-of-00500.tfrecord\r\nshard-00073-of-00500.tfrecord shard-00157-of-00500.tfrecord shard-00241-of-00500.tfrecord shard-00325-of-00500.tfrecord shard-00409-of-00500.tfrecord shard-00493-of-00500.tfrecord\r\nshard-00074-of-00500.tfrecord shard-00158-of-00500.tfrecord shard-00242-of-00500.tfrecord shard-00326-of-00500.tfrecord shard-00410-of-00500.tfrecord shard-00494-of-00500.tfrecord\r\nshard-00075-of-00500.tfrecord shard-00159-of-00500.tfrecord shard-00243-of-00500.tfrecord shard-00327-of-00500.tfrecord shard-00411-of-00500.tfrecord shard-00495-of-00500.tfrecord\r\nshard-00076-of-00500.tfrecord shard-00160-of-00500.tfrecord shard-00244-of-00500.tfrecord shard-00328-of-00500.tfrecord shard-00412-of-00500.tfrecord shard-00496-of-00500.tfrecord\r\nshard-00077-of-00500.tfrecord shard-00161-of-00500.tfrecord shard-00245-of-00500.tfrecord shard-00329-of-00500.tfrecord shard-00413-of-00500.tfrecord shard-00497-of-00500.tfrecord\r\nshard-00078-of-00500.tfrecord shard-00162-of-00500.tfrecord shard-00246-of-00500.tfrecord shard-00330-of-00500.tfrecord shard-00414-of-00500.tfrecord shard-00498-of-00500.tfrecord\r\nshard-00079-of-00500.tfrecord shard-00163-of-00500.tfrecord shard-00247-of-00500.tfrecord shard-00331-of-00500.tfrecord shard-00415-of-00500.tfrecord shard-00499-of-00500.tfrecord\r\nshard-00080-of-00500.tfrecord shard-00164-of-00500.tfrecord shard-00248-of-00500.tfrecord shard-00332-of-00500.tfrecord shard-00416-of-00500.tfrecord\r\nshard-00081-of-00500.tfrecord shard-00165-of-00500.tfrecord shard-00249-of-00500.tfrecord shard-00333-of-00500.tfrecord shard-00417-of-00500.tfrecord\r\nshard-00082-of-00500.tfrecord shard-00166-of-00500.tfrecord shard-00250-of-00500.tfrecord shard-00334-of-00500.tfrecord shard-00418-of-00500.tfrecord\r\nshard-00083-of-00500.tfrecord shard-00167-of-00500.tfrecord shard-00251-of-00500.tfrecord shard-00335-of-00500.tfrecord shard-00419-of-00500.tfrecord\r\n]0;tum_cte0515@hkn1991:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/open_ai_minecraft_tfrecord]633;D;0",,terminal_output +638,326097,"TERMINAL",0,0,"du -h .",,terminal_command +639,326140,"TERMINAL",0,0,"]633;E;2025-07-07 19:32:06 du -h .;24571f6f-7d62-4371-8977-e42b5a81d394]633;C4.2T\t.\r\n]0;tum_cte0515@hkn1991:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/open_ai_minecraft_tfrecord]633;D;0",,terminal_output +640,352868,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/utils/logger.py",0,0,"# utils/logger.py\n\nimport os\nimport json\nfrom abc import ABC, abstractmethod\nfrom typing import Dict, Any\nfrom pprint import pprint\n\nclass BaseLogger(ABC):\n @abstractmethod\n def log_metrics(self, metrics: Dict[str, Any], step: int):\n pass\n\n @abstractmethod\n def log_images(self, images: Dict[str, Any], step: int):\n pass\n\nclass WandbLogger(BaseLogger):\n def __init__(self, config):\n import wandb\n self.wandb = wandb\n self.wandb.init(\n entity=config[""entity""],\n project=config[""project""],\n name=config[""name""],\n tags=config[""tags""],\n group=""debug"",\n config=config,\n )\n\n def log_metrics(self, metrics, step):\n self.wandb.log({**metrics, ""step"": step})\n\n def log_images(self, images, step):\n log_images = {k: self.wandb.Image(v) for k, v in images.items()}\n self.wandb.log({**log_images, ""step"": step})\n\nclass TensorboardLogger(BaseLogger):\n def __init__(self, config):\n from torch.utils.tensorboard import SummaryWriter\n log_dir = os.path.join(config[""log_dir""], ""tb_logger"")\n self.log_dir = log_dir\n self.writer = SummaryWriter(log_dir=log_dir)\n\n def log_metrics(self, metrics, step):\n for k, v in metrics.items():\n self.writer.add_scalar(k, v, step)\n\n def log_images(self, images, step):\n for k, v in images.items():\n self.writer.add_image(k, v, step, dataformats='HWC')\n\nclass LocalLogger(BaseLogger):\n def __init__(self, config):\n log_dir = os.path.join(config[""log_dir""], ""local_logger"")\n self.log_dir = log_dir\n os.makedirs(log_dir, exist_ok=True)\n self.metrics_file = os.path.join(log_dir, ""metrics.jsonl"")\n self.images_dir = os.path.join(log_dir, ""images"")\n os.makedirs(self.images_dir, exist_ok=True)\n\n def log_metrics(self, metrics, step):\n with open(self.metrics_file, ""a"") as f:\n metrics = {k: str(v) for k, v in metrics.items()}\n f.write(json.dumps({""step"": step, **metrics}) + ""\n"")\n\n def log_images(self, images, step):\n for k, v in images.items():\n # v is expected to be a numpy array (HWC, uint8)\n from PIL import Image\n img = Image.fromarray(v)\n img.save(os.path.join(self.images_dir, f""{k}_step{step}.png""))\n\nclass ConsoleLogger(BaseLogger):\n def __init__(self, cfg):\n pprint(cfg, compact=True)\n\n def log_metrics(self, metrics, step):\n print(f""[Step {step}] Metrics: "" + "", "".join(f""{k}: {v}"" for k, v in metrics.items()))\n\n def log_images(self, images, step):\n print(f""[Step {step}] Images logged: {', '.join(images.keys())}"")\n\n\nclass CompositeLogger(BaseLogger):\n def __init__(self, loggers, cfg):\n available_loggers = {""wandb"": WandbLogger,\n ""tb"": TensorboardLogger,\n ""json"": TensorboardLogger,\n ""local"": LocalLogger,\n ""console"": ConsoleLogger}\n self.loggers = []\n for logger in loggers:\n assert logger in available_loggers.keys(), f""Logger \""{logger}\"" not known. Available loggers are: {available_loggers.keys()}"" \n logger_class = available_loggers[logger]\n self.loggers.append(logger_class(cfg))\n\n\n def log_metrics(self, metrics, step):\n for logger in self.loggers:\n logger.log_metrics(metrics, step)\n\n def log_images(self, images, step):\n for logger in self.loggers:\n logger.log_images(images, step)\n\n def log_checkpoint(self, checkpoint, step):\n for logger in self.loggers:\n logger.log_checkpoint(checkpoint, step)",python,tab +641,505631,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/utils/logger.py",1493,0,"",python,selection_mouse +642,505636,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/utils/logger.py",1492,0,"",python,selection_command +643,711957,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/train_tokenizer.py",0,0,"from dataclasses import dataclass, field\nimport os\n\nimport einops\nfrom flax.training import orbax_utils\nfrom flax.training.train_state import TrainState\nfrom jax.sharding import Mesh, PartitionSpec, NamedSharding\nfrom jax.experimental.mesh_utils import create_device_mesh\nimport optax\nimport orbax\nfrom orbax.checkpoint import PyTreeCheckpointer\nimport numpy as np\nimport dm_pix as pix\nimport jax\nimport jax.numpy as jnp\nimport tyro\nimport wandb\n\nfrom models.tokenizer import TokenizerVQVAE\nfrom utils.dataloader import get_dataloader\nfrom utils.parameter_utils import count_parameters_by_component\nfrom utils.logger import CompositeLogger\n\n\n@dataclass\nclass Args:\n # Experiment\n num_steps: int = 300_000\n seed: int = 0\n seq_len: int = 16\n image_channels: int = 3\n image_height: int = 90\n image_width: int = 160\n data_dir: str = ""data_tfrecords/coinrun""\n checkpoint: str = """"\n # Optimization\n vq_beta: float = 0.25\n batch_size: int = 48\n min_lr: float = 0.0\n max_lr: float = 3e-4\n warmup_steps: int = 10000\n # Tokenizer\n model_dim: int = 512\n latent_dim: int = 32\n num_latents: int = 1024\n patch_size: int = 4\n num_blocks: int = 8\n num_heads: int = 8\n dropout: float = 0.0\n codebook_dropout: float = 0.01\n # Logging\n log_dir: str = ""logs/"" \n loggers: list[str] = field(default_factory=lambda: [""console""])\n entity: str = """"\n project: str = """"\n name: str = ""train_tokenizer""\n tags: list[str] = field(default_factory=lambda: [""tokenizer""])\n log_interval: int = 5\n log_image_interval: int = 250\n ckpt_dir: str = """"\n log_checkpoint_interval: int = 10000\n log_gradients: bool = False\n\n\nargs = tyro.cli(Args)\n\n\ndef tokenizer_loss_fn(params, state, inputs):\n # --- Compute loss ---\n outputs = state.apply_fn(\n params,\n inputs,\n training=True,\n rngs={""params"": inputs[""rng""], ""dropout"": inputs[""dropout_rng""]},\n )\n mse = jnp.square(inputs[""videos""] - outputs[""recon""]).mean()\n q_loss = jnp.square(jax.lax.stop_gradient(outputs[""emb""]) - outputs[""z""]).mean()\n commitment_loss = jnp.square(\n outputs[""emb""] - jax.lax.stop_gradient(outputs[""z""])\n ).mean()\n loss = mse + q_loss + args.vq_beta * commitment_loss\n\n # --- Compute validation metrics ---\n gt = inputs[""videos""].clip(0, 1).reshape(-1, *inputs[""videos""].shape[2:])\n recon = outputs[""recon""].clip(0, 1).reshape(-1, *outputs[""recon""].shape[2:])\n psnr = pix.psnr(gt, recon).mean()\n ssim = pix.ssim(gt, recon).mean()\n _, index_counts = jnp.unique_counts(\n jnp.ravel(outputs[""indices""]), size=args.num_latents, fill_value=0\n )\n codebook_usage = (index_counts != 0).mean()\n metrics = dict(\n loss=loss,\n mse=mse,\n q_loss=q_loss,\n commitment_loss=commitment_loss,\n psnr=psnr,\n ssim=ssim,\n codebook_usage=codebook_usage,\n )\n return loss, (outputs[""recon""], metrics)\n\n\n@jax.jit\ndef train_step(state, inputs):\n grad_fn = jax.value_and_grad(tokenizer_loss_fn, has_aux=True, allow_int=True)\n (loss, (recon, metrics)), grads = grad_fn(state.params, state, inputs)\n state = state.apply_gradients(grads=grads)\n if args.log_gradients:\n metrics[""encoder_gradients_std/""] = jax.tree.map(\n lambda x: x.std(), grads[""params""][""encoder""]\n )\n metrics[""vq_gradients_std/""] = jax.tree.map(\n lambda x: x.std(), grads[""params""][""vq""]\n )\n metrics[""decoder_gradients_std/""] = jax.tree.map(\n lambda x: x.std(), grads[""params""][""decoder""]\n )\n return state, loss, recon, metrics\n\n\nif __name__ == ""__main__"":\n jax.distributed.initialize()\n num_devices = jax.device_count()\n if num_devices == 0:\n raise ValueError(""No JAX devices found."")\n print(f""Running on {num_devices} devices."")\n\n if args.batch_size % num_devices != 0:\n raise ValueError(\n f""Global batch size {args.batch_size} must be divisible by ""\n f""number of devices {num_devices}.""\n )\n\n per_device_batch_size_for_init = args.batch_size // num_devices\n\n rng = jax.random.PRNGKey(args.seed)\n\n # --- Initialize model ---\n tokenizer = TokenizerVQVAE(\n in_dim=args.image_channels,\n model_dim=args.model_dim,\n latent_dim=args.latent_dim,\n num_latents=args.num_latents,\n patch_size=args.patch_size,\n num_blocks=args.num_blocks,\n num_heads=args.num_heads,\n dropout=args.dropout,\n codebook_dropout=args.codebook_dropout,\n )\n rng, _rng = jax.random.split(rng)\n image_shape = (args.image_height, args.image_width, args.image_channels)\n inputs = dict(\n videos=jnp.zeros(\n (per_device_batch_size_for_init, args.seq_len, *image_shape),\n dtype=jnp.float32,\n ),\n )\n init_params = tokenizer.init(_rng, inputs)\n\n param_counts = count_parameters_by_component(init_params)\n\n if jax.process_index() == 0:\n cfg = vars(args).copy()\n cfg[""model_param_count""] = param_counts\n logger = CompositeLogger(args.loggers, cfg)\n\n print(""Parameter counts:"")\n print(param_counts)\n\n # --- Initialize optimizer ---\n lr_schedule = optax.warmup_cosine_decay_schedule(\n args.min_lr, args.max_lr, args.warmup_steps, args.num_steps\n )\n tx = optax.adamw(learning_rate=lr_schedule, b1=0.9, b2=0.9, weight_decay=1e-4)\n train_state = TrainState.create(apply_fn=tokenizer.apply, params=init_params, tx=tx)\n\n # FIXME: switch to create_hybrid_device_mesh for runs spanning multiple nodes\n device_mesh_arr = create_device_mesh((num_devices,))\n mesh = Mesh(devices=device_mesh_arr, axis_names=(""data"",))\n\n replicated_sharding = NamedSharding(mesh, PartitionSpec())\n videos_sharding = NamedSharding(\n mesh, PartitionSpec(""data"", None, None, None, None)\n )\n train_state = jax.device_put(train_state, replicated_sharding)\n\n # --- Load checkpoint ---\n step = 0\n if args.checkpoint:\n restore_target = {""model"": train_state}\n restore_args = orbax_utils.restore_args_from_target(restore_target)\n train_state.params[""params""].update(\n PyTreeCheckpointer()\n .restore(args.checkpoint, item=restore_target, restore_args=restore_args)[\n ""model""\n ]\n .params[""params""]\n )\n # Assume checkpoint is of the form tokenizer__\n step += int(args.checkpoint.split(""_"")[-1])\n\n # --- TRAIN LOOP ---\n # tfrecord_files = [\n # os.path.join(args.data_dir, x)\n # for x in os.listdir(args.data_dir)\n # if x.endswith("".tfrecord"")\n # ]\n # dataloader = get_dataloader(\n # # NOTE: We deliberately pass the global batch size\n # # The dataloader shards the dataset across all processes\n # tfrecord_files,\n # args.seq_len,\n # args.batch_size,\n # *image_shape,\n # seed=args.seed,\n # )\n # dataloader = (jax.make_array_from_process_local_data(videos_sharding, elem) for elem in dataloader) # type: ignore\n print(f""Starting training from step {step}..."")\n while step < args.num_steps:\n # for videos in dataloader:\n videos = np.load(""overfit_dir/single_sample_corner.npy"")\n videos = jax.make_array_from_process_local_data(videos_sharding, videos)\n while True:\n # --- Train step ---\n rng, _rng, _rng_dropout = jax.random.split(rng, 3)\n\n inputs = dict(videos=videos, rng=_rng, dropout_rng=_rng_dropout)\n train_state, loss, recon, metrics = train_step(train_state, inputs)\n step += 1\n\n # --- Logging ---\n if step % args.log_interval == 0 and jax.process_index() == 0:\n logger.log_metrics(\n {\n ""loss"": loss,\n **metrics,\n },\n step\n )\n if step % args.log_image_interval == 0:\n gt_seq = inputs[""videos""][0]\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=np.asarray(gt_seq[0]).astype(np.uint8),\n recon=np.asarray(recon_seq[0]).astype(np.uint8),\n true_vs_recon=np.asarray(comparison_seq.astype(np.uint8)\n ),\n )\n logger.log_images(log_images, step)\n if step % args.log_checkpoint_interval == 0:\n ckpt = {""model"": train_state}\n orbax_checkpointer = orbax.checkpoint.PyTreeCheckpointer()\n save_args = orbax_utils.save_args_from_target(ckpt)\n orbax_checkpointer.save(\n os.path.join(os.getcwd(), args.ckpt_dir, f""tokenizer_{step}""),\n ckpt,\n save_args=save_args,\n )\n if step >= args.num_steps:\n break\n",python,tab +644,719323,"TERMINAL",0,0,"jobmon",,terminal_command +645,719377,"TERMINAL",0,0,"]633;E;2025-07-07 19:38:39 jobmon;24571f6f-7d62-4371-8977-e42b5a81d394]633;Cbash: jobmon: command not found...\r\n",,terminal_output +646,720552,"TERMINAL",0,0,"]0;tum_cte0515@hkn1991:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/open_ai_minecraft_tfrecord]633;D;127",,terminal_output +647,720807,"TERMINAL",0,0,"^C",,terminal_command +648,720849,"TERMINAL",0,0,"^C[?2004l\r[?2004h[?2004l\r\r\n]633;E;;24571f6f-7d62-4371-8977-e42b5a81d394]633;C]0;tum_cte0515@hkn1991:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/open_ai_minecraft_tfrecord]633;D",,terminal_output +649,722494,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jafar/utils/logger.py",0,0,"",python,tab +650,723702,"utils/logger.py",0,0,"# utils/logger.py\n\nimport os\nimport json\nfrom abc import ABC, abstractmethod\nfrom typing import Dict, Any\nfrom pprint import pprint\n\nclass BaseLogger(ABC):\n @abstractmethod\n def log_metrics(self, metrics: Dict[str, Any], step: int):\n pass\n\n @abstractmethod\n def log_images(self, images: Dict[str, Any], step: int):\n pass\n\nclass WandbLogger(BaseLogger):\n def __init__(self, config):\n import wandb\n self.wandb = wandb\n self.wandb.init(\n entity=config[""entity""],\n project=config[""project""],\n name=config[""name""],\n tags=config[""tags""],\n group=""debug"",\n config=config,\n )\n\n def log_metrics(self, metrics, step):\n self.wandb.log({**metrics, ""step"": step})\n\n def log_images(self, images, step):\n log_images = {k: self.wandb.Image(v) for k, v in images.items()}\n self.wandb.log({**log_images, ""step"": step})\n\nclass TensorboardLogger(BaseLogger):\n def __init__(self, config):\n from torch.utils.tensorboard import SummaryWriter\n log_dir = os.path.join(config[""log_dir""], ""tb_logger"")\n self.log_dir = log_dir\n self.writer = SummaryWriter(log_dir=log_dir)\n\n def log_metrics(self, metrics, step):\n for k, v in metrics.items():\n self.writer.add_scalar(k, v, step)\n\n def log_images(self, images, step):\n for k, v in images.items():\n self.writer.add_image(k, v, step, dataformats='HWC')\n\nclass LocalLogger(BaseLogger):\n def __init__(self, config):\n log_dir = os.path.join(config[""log_dir""], ""local_logger"")\n self.log_dir = log_dir\n os.makedirs(log_dir, exist_ok=True)\n self.metrics_file = os.path.join(log_dir, ""metrics.jsonl"")\n self.images_dir = os.path.join(log_dir, ""images"")\n os.makedirs(self.images_dir, exist_ok=True)\n\n def log_metrics(self, metrics, step):\n with open(self.metrics_file, ""a"") as f:\n metrics = {k: str(v) for k, v in metrics.items()}\n f.write(json.dumps({""step"": step, **metrics}) + ""\n"")\n\n def log_images(self, images, step):\n for k, v in images.items():\n # v is expected to be a numpy array (HWC, uint8)\n from PIL import Image\n img = Image.fromarray(v)\n img.save(os.path.join(self.images_dir, f""{k}_step{step}.png""))\n\nclass ConsoleLogger(BaseLogger):\n def __init__(self, cfg):\n pprint(cfg, compact=True)\n\n def log_metrics(self, metrics, step):\n print(f""[Step {step}] Metrics: "" + "", "".join(f""{k}: {v}"" for k, v in metrics.items()))\n\n def log_images(self, images, step):\n print(f""[Step {step}] Images logged: {', '.join(images.keys())}"")\n\n\nclass CompositeLogger(BaseLogger):\n def __init__(self, loggers, cfg):\n available_loggers = {""wandb"": WandbLogger,\n ""tb"": TensorboardLogger,\n ""json"": TensorboardLogger,\n ""local"": LocalLogger,\n ""console"": ConsoleLogger}\n self.loggers = []\n for logger in loggers:\n assert logger in available_loggers.keys(), f""Logger \""{logger}\"" not known. Available loggers are: {available_loggers.keys()}"" \n logger_class = available_loggers[logger]\n self.loggers.append(logger_class(cfg))\n\n\n def log_metrics(self, metrics, step):\n for logger in self.loggers:\n logger.log_metrics(metrics, step)\n\n def log_images(self, images, step):\n for logger in self.loggers:\n logger.log_images(images, step)\n\n def log_checkpoint(self, checkpoint, step):\n for logger in self.loggers:\n logger.log_checkpoint(checkpoint, step)",python,tab +651,1388571,"utils/logger.py",1522,0,"",python,selection_mouse +652,1428081,"utils/dataloader.py",0,0,"import functools\nimport jax\n\nimport tensorflow as tf\n\n# reserve GPU memory for JAX only if tensorflow is built with GPU support\ntf.config.experimental.set_visible_devices([], ""GPU"")\n\n\n# --- TensorFlow function for processing: slicing, normalization ---\ndef _tf_process_episode(episode_tensor, seq_len, image_h, image_w, image_c, seed):\n """"""\n Processes a raw episode tensor in TensorFlow.\n Takes a full episode, extracts a random sequence, and normalizes it.\n Args:\n episode_tensor: A TensorFlow tensor representing a full video episode.\n Expected shape: (dynamic_length, image_h, image_w, image_c)\n Expected dtype: e.g., tf.uint8 (raw pixel values)\n seq_len: The desired length of the sub-sequence to extract.\n image_h: The height of each frame.\n image_w: The width of each frame.\n image_c: The number of channels in each frame.\n seed: The seed for the random number generator.\n Returns:\n A TensorFlow tensor representing the processed video sequence.\n Shape: (seq_len, image_h, image_w, image_c)\n Dtype: tf.float32 (normalized pixel values)\n """"""\n current_episode_len = tf.shape(episode_tensor)[0]\n\n max_start_idx = current_episode_len - seq_len\n\n start_idx = tf.random.uniform(\n shape=(), minval=0, maxval=max_start_idx + 1, dtype=tf.int32, seed=seed\n )\n\n seq = episode_tensor[start_idx : start_idx + seq_len]\n\n seq = tf.cast(seq, tf.float32) / 255.0\n\n # Ensure the final shape is statically known for batching.\n # tf.reshape is robust, but tf.ensure_shape or set_shape can also be used if confident.\n processed_sequence = tf.reshape(seq, [seq_len, image_h, image_w, image_c])\n\n return processed_sequence\n\n\ndef _parse_tfrecord_fn(example_proto, image_h, image_w, image_c):\n feature_description = {\n ""height"": tf.io.FixedLenFeature([], tf.int64),\n ""width"": tf.io.FixedLenFeature([], tf.int64),\n ""channels"": tf.io.FixedLenFeature([], tf.int64),\n ""sequence_length"": tf.io.FixedLenFeature([], tf.int64),\n ""raw_video"": tf.io.FixedLenFeature([], tf.string),\n }\n example = tf.io.parse_single_example(example_proto, feature_description)\n\n video_shape = (example[""sequence_length""], image_h, image_w, image_c)\n\n episode_tensor = tf.io.decode_raw(example[""raw_video""], out_type=tf.uint8)\n episode_tensor = tf.reshape(episode_tensor, video_shape)\n\n episode_tensor = tf.ensure_shape(episode_tensor, [None, image_h, image_w, image_c])\n return episode_tensor\n\n\ndef get_dataloader(\n tfrecord_paths: list[str], # List of TFRecord file paths\n seq_len: int,\n global_batch_size: int,\n image_h: int,\n image_w: int,\n image_c: int,\n shuffle_buffer_size: int = 1000,\n num_parallel_calls: int = tf.data.AUTOTUNE,\n seed: int = 42,\n):\n """"""\n Creates a tf.data.Dataset pipeline from TFRecord files.\n """"""\n if not tfrecord_paths:\n raise ValueError(""tfrecord_paths list cannot be empty."")\n\n process_id = jax.process_index()\n num_processes = jax.process_count()\n\n assert (\n global_batch_size % num_processes == 0\n ), ""Global batch size {global_batch_size} \\n must be divisible by the number of JAX processes {num_processes} for proper sharding.""\n per_process_batch_size = global_batch_size // num_processes\n\n dataset = tf.data.TFRecordDataset(\n tfrecord_paths, num_parallel_reads=tf.data.AUTOTUNE\n )\n\n dataset = dataset.shard(num_shards=num_processes, index=process_id)\n\n # (f.srambical) NOTE: For TFRecords, it's often good to have a large shuffle buffer.\n if shuffle_buffer_size > 0:\n dataset = dataset.shuffle(\n buffer_size=shuffle_buffer_size, seed=seed, reshuffle_each_iteration=True\n )\n parse_fn = functools.partial(\n _parse_tfrecord_fn, image_h=image_h, image_w=image_w, image_c=image_c\n )\n dataset = dataset.map(parse_fn, num_parallel_calls=num_parallel_calls)\n\n tf_process_fn = functools.partial(\n _tf_process_episode,\n seq_len=seq_len,\n image_h=image_h,\n image_w=image_w,\n image_c=image_c,\n seed=seed,\n )\n dataset = dataset.map(tf_process_fn, num_parallel_calls=num_parallel_calls)\n\n dataset = dataset.repeat(None)\n dataset = dataset.batch(per_process_batch_size, drop_remainder=True)\n dataset = dataset.prefetch(tf.data.AUTOTUNE)\n\n return dataset.as_numpy_iterator()\n",python,tab +653,1433345,"utils/dataloader.py",3439,0,"",python,selection_mouse +654,1433473,"utils/dataloader.py",3439,18,"num_parallel_reads",python,selection_mouse +655,1434055,"utils/dataloader.py",3404,0,"",python,selection_mouse +656,1434212,"utils/dataloader.py",3398,15,"TFRecordDataset",python,selection_mouse +657,1435908,"utils/dataloader.py",3448,0,"",python,selection_mouse +658,1436045,"utils/dataloader.py",3439,18,"num_parallel_reads",python,selection_mouse +659,1436389,"utils/dataloader.py",3439,21,"num_parallel_reads=tf",python,selection_mouse +660,1436390,"utils/dataloader.py",3439,22,"num_parallel_reads=tf.",python,selection_mouse +661,1436391,"utils/dataloader.py",3414,43,"\n tfrecord_paths, num_parallel_reads",python,selection_mouse +662,1436977,"utils/dataloader.py",3439,35,"num_parallel_reads=tf.data.AUTOTUNE",python,selection_mouse +663,1437473,"utils/dataloader.py",3474,0,"",python,selection_mouse +664,1437505,"utils/dataloader.py",3473,0,"",python,selection_command +665,1437633,"utils/dataloader.py",3466,8,"AUTOTUNE",python,selection_mouse +666,1437676,"utils/dataloader.py",3467,7,"UTOTUNE",python,selection_command +667,1437908,"utils/dataloader.py",3467,0,"",python,selection_mouse +668,1437908,"utils/dataloader.py",3461,6,"data.A",python,selection_mouse +669,1437909,"utils/dataloader.py",3439,28,"num_parallel_reads=tf.data.A",python,selection_mouse +670,1438035,"utils/dataloader.py",3467,13,"UTOTUNE\n )",python,selection_mouse +671,1438848,"utils/dataloader.py",3408,0,"",python,selection_mouse +672,1438991,"utils/dataloader.py",3398,15,"TFRecordDataset",python,selection_mouse +673,1439174,"utils/dataloader.py",3398,59,"TFRecordDataset(\n tfrecord_paths, num_parallel_reads",python,selection_mouse +674,1439252,"utils/dataloader.py",3398,60,"TFRecordDataset(\n tfrecord_paths, num_parallel_reads=",python,selection_mouse +675,1439295,"utils/dataloader.py",3398,82,"TFRecordDataset(\n tfrecord_paths, num_parallel_reads=tf.data.AUTOTUNE\n )",python,selection_mouse +676,1439622,"utils/dataloader.py",3480,0,"",python,selection_mouse +677,1439638,"utils/dataloader.py",3479,0,"",python,selection_command +678,1439882,"utils/dataloader.py",3480,0,"",python,selection_mouse +679,1439886,"utils/dataloader.py",3479,0,"",python,selection_command +680,1440151,"utils/dataloader.py",3479,1,")",python,selection_mouse +681,1440152,"utils/dataloader.py",3461,18,"data.AUTOTUNE\n ",python,selection_mouse +682,1440152,"utils/dataloader.py",3458,21,"tf.data.AUTOTUNE\n ",python,selection_mouse +683,1440153,"utils/dataloader.py",3439,40,"num_parallel_reads=tf.data.AUTOTUNE\n ",python,selection_mouse +684,1440154,"utils/dataloader.py",3480,0,"",python,selection_command +685,1440207,"utils/dataloader.py",3439,41,"num_parallel_reads=tf.data.AUTOTUNE\n )",python,selection_mouse +686,1440589,"utils/dataloader.py",3453,0,"",python,selection_mouse +687,1513922,"utils/dataloader.py",3554,0,"",python,selection_mouse +688,1514532,"utils/dataloader.py",3120,0,"",python,selection_mouse +689,1514537,"utils/dataloader.py",3119,0,"",python,selection_command +690,1525710,"utils/dataloader.py",3806,0,"",python,selection_mouse +691,1525728,"utils/dataloader.py",3805,0,"",python,selection_command +692,1534908,"utils/logger.py",0,0,"",python,tab +693,1536100,"utils/dataloader.py",0,0,"",python,tab +694,1540627,"TERMINAL",0,0,"",,terminal_focus +695,1549651,"TERMINAL",0,0,"",,terminal_command +696,1549752,"TERMINAL",0,0,"",,terminal_command +697,1549752,"TERMINAL",0,0,"]0;tum_cte0515@hkn1991:~/Projects/jafar[?2004h]633;Ajafar[tum_cte0515@hkn1991 jafar]$ ]633;B\r\n[?2004l\r]633;E;;7d45707f-8bba-4662-a344-aba14e13e9ae]633;C]0;tum_cte0515@hkn1991:~/Projects/jafar]633;D",,terminal_output +698,1554137,"TERMINAL",0,0,"cd ..",,terminal_command +699,1554153,"TERMINAL",0,0,"]633;E;2025-07-07 19:52:34 cd ..;7d45707f-8bba-4662-a344-aba14e13e9ae]633;C]0;tum_cte0515@hkn1991:~/Projects]633;D;0",,terminal_output +700,1556682,"TERMINAL",0,0,"mkdir tmp",,terminal_command +701,1556738,"TERMINAL",0,0,"]633;E;2025-07-07 19:52:37 mkdir tmp;7d45707f-8bba-4662-a344-aba14e13e9ae]633;C]0;tum_cte0515@hkn1991:~/Projects]633;D;0",,terminal_output +702,1558351,"TERMINAL",0,0,"cd tmp/",,terminal_command +703,1558401,"TERMINAL",0,0,"]633;E;2025-07-07 19:52:38 cd tmp/;7d45707f-8bba-4662-a344-aba14e13e9ae]633;C]0;tum_cte0515@hkn1991:~/Projects/tmp]633;D;0",,terminal_output +704,1558692,"TERMINAL",0,0,"ls",,terminal_command +705,1558699,"TERMINAL",0,0,"]633;E;2025-07-07 19:52:39 ls;7d45707f-8bba-4662-a344-aba14e13e9ae]633;C]0;tum_cte0515@hkn1991:~/Projects/tmp]633;D;0]633;P;Cwd=/home/hk-project-p0023960/tum_cte0515/Projects/tmp",,terminal_output +706,1595052,"TERMINAL",0,0,"git clone git@github.com:FLAIROx/jafar.git",,terminal_command +707,1595092,"TERMINAL",0,0,"]633;E;2025-07-07 19:53:15 git clone git@github.com:FLAIROx/jafar.git;7d45707f-8bba-4662-a344-aba14e13e9ae]633;CCloning into 'jafar'...\r\n",,terminal_output +708,1596638,"TERMINAL",0,0,"remote: Enumerating objects: 71, done.\r\nremote: Counting objects: 2% (1/41)\rremote: Counting objects: 4% (2/41)\rremote: Counting objects: 7% (3/41)\rremote: Counting objects: 9% (4/41)\rremote: Counting objects: 12% (5/41)\rremote: Counting objects: 14% (6/41)\rremote: Counting objects: 17% (7/41)\rremote: Counting objects: 19% (8/41)\rremote: Counting objects: 21% (9/41)\rremote: Counting objects: 24% (10/41)\rremote: Counting objects: 26% (11/41)\rremote: Counting objects: 29% (12/41)\rremote: Counting objects: 31% (13/41)\rremote: Counting objects: 34% (14/41)\rremote: Counting objects: 36% (15/41)\rremote: Counting objects: 39% (16/41)\rremote: Counting objects: 41% (17/41)\rremote: Counting objects: 43% (18/41)\rremote: Counting objects: 46% (19/41)\rremote: Counting objects: 48% (20/41)\rremote: Counting objects: 51% (21/41)\rremote: Counting objects: 53% (22/41)\rremote: Counting objects: 56% (23/41)\rremote: Counting objects: 58% (24/41)\rremote: Counting objects: 60% (25/41)\rremote: Counting objects: 63% (26/41)\rremote: Counting objects: 65% (27/41)\rremote: Counting objects: 68% (28/41)\rremote: Counting objects: 70% (29/41)\rremote: Counting objects: 73% (30/41)\rremote: Counting objects: 75% (31/41)\rremote: Counting objects: 78% (32/41)\rremote: Counting objects: 80% (33/41)\rremote: Counting objects: 82% (34/41)\rremote: Counting objects: 85% (35/41)\rremote: Counting objects: 87% (36/41)\rremote: Counting objects: 90% (37/41)\rremote: Counting objects: 92% (38/41)\rremote: Counting objects: 95% (39/41)\rremote: Counting objects: 97% (40/41)\rremote: Counting objects: 100% (41/41)\rremote: Counting objects: 100% (41/41), done.\r\nremote: Compressing objects: 5% (1/19)\rremote: Compressing objects: 10% (2/19)\rremote: Compressing objects: 15% (3/19)\rremote: Compressing objects: 21% (4/19)\rremote: Compressing objects: 26% (5/19)\rremote: Compressing objects: 31% (6/19)\rremote: Compressing objects: 36% (7/19)\r",,terminal_output +709,1596757,"TERMINAL",0,0,"remote: Compressing objects: 42% (8/19)\rremote: Compressing objects: 47% (9/19)\rremote: Compressing objects: 52% (10/19)\rremote: Compressing objects: 57% (11/19)\rremote: Compressing objects: 63% (12/19)\rremote: Compressing objects: 68% (13/19)\rremote: Compressing objects: 73% (14/19)\rremote: Compressing objects: 78% (15/19)\rremote: Compressing objects: 84% (16/19)\rremote: Compressing objects: 89% (17/19)\rremote: Compressing objects: 94% (18/19)\rremote: Compressing objects: 100% (19/19)\rremote: Compressing objects: 100% (19/19), done.\r\nReceiving objects: 1% (1/71)\rReceiving objects: 2% (2/71)\rReceiving objects: 4% (3/71)\rReceiving objects: 5% (4/71)\rReceiving objects: 7% (5/71)\rReceiving objects: 8% (6/71)\rReceiving objects: 9% (7/71)\rReceiving objects: 11% (8/71)\rReceiving objects: 12% (9/71)\rReceiving objects: 14% (10/71)\rReceiving objects: 15% (11/71)\rReceiving objects: 16% (12/71)\rReceiving objects: 18% (13/71)\rReceiving objects: 19% (14/71)\r",,terminal_output +710,1596815,"TERMINAL",0,0,"Receiving objects: 21% (15/71)\rReceiving objects: 22% (16/71)\rReceiving objects: 23% (17/71)\rReceiving objects: 25% (18/71)\rReceiving objects: 26% (19/71)\rReceiving objects: 28% (20/71)\rReceiving objects: 29% (21/71)\rReceiving objects: 30% (22/71)\rReceiving objects: 32% (23/71)\rReceiving objects: 33% (24/71)\rReceiving objects: 35% (25/71)\rReceiving objects: 36% (26/71)\rReceiving objects: 38% (27/71)\rReceiving objects: 39% (28/71)\rReceiving objects: 40% (29/71)\rReceiving objects: 42% (30/71)\rReceiving objects: 43% (31/71)\rReceiving objects: 45% (32/71)\rReceiving objects: 46% (33/71)\rReceiving objects: 47% (34/71)\rReceiving objects: 49% (35/71)\rReceiving objects: 50% (36/71)\rReceiving objects: 52% (37/71)\rReceiving objects: 53% (38/71)\rReceiving objects: 54% (39/71)\rReceiving objects: 56% (40/71)\rReceiving objects: 57% (41/71)\rReceiving objects: 59% (42/71)\rReceiving objects: 60% (43/71)\rReceiving objects: 61% (44/71)\rReceiving objects: 63% (45/71)\rReceiving objects: 64% (46/71)\rReceiving objects: 66% (47/71)\rReceiving objects: 67% (48/71)\rReceiving objects: 69% (49/71)\rReceiving objects: 70% (50/71)\rReceiving objects: 71% (51/71)\rReceiving objects: 73% (52/71)\rReceiving objects: 74% (53/71)\rReceiving objects: 76% (54/71)\r",,terminal_output +711,1596938,"TERMINAL",0,0,"remote: Total 71 (delta 32), reused 22 (delta 22), pack-reused 30 (from 1)\r\nReceiving objects: 77% (55/71)\rReceiving objects: 78% (56/71)\rReceiving objects: 80% (57/71)\rReceiving objects: 81% (58/71)\rReceiving objects: 83% (59/71)\rReceiving objects: 84% (60/71)\rReceiving objects: 85% (61/71)\rReceiving objects: 87% (62/71)\rReceiving objects: 88% (63/71)\rReceiving objects: 90% (64/71)\rReceiving objects: 91% (65/71)\rReceiving objects: 92% (66/71)\rReceiving objects: 94% (67/71)\rReceiving objects: 95% (68/71)\rReceiving objects: 97% (69/71)\rReceiving objects: 98% (70/71)\rReceiving objects: 100% (71/71)\rReceiving objects: 100% (71/71), 83.28 KiB | 446.00 KiB/s, done.\r\nResolving deltas: 0% (0/33)\rResolving deltas: 3% (1/33)\rResolving deltas: 6% (2/33)\rResolving deltas: 9% (3/33)\rResolving deltas: 15% (5/33)\rResolving deltas: 21% (7/33)\rResolving deltas: 24% (8/33)\rResolving deltas: 27% (9/33)\rResolving deltas: 33% (11/33)\rResolving deltas: 36% (12/33)\rResolving deltas: 39% (13/33)\rResolving deltas: 42% (14/33)\rResolving deltas: 45% (15/33)\rResolving deltas: 48% (16/33)\rResolving deltas: 51% (17/33)\rResolving deltas: 54% (18/33)\rResolving deltas: 57% (19/33)\rResolving deltas: 60% (20/33)\rResolving deltas: 63% (21/33)\rResolving deltas: 66% (22/33)\rResolving deltas: 69% (23/33)\rResolving deltas: 81% (27/33)\rResolving deltas: 84% (28/33)\rResolving deltas: 87% (29/33)\rResolving deltas: 90% (30/33)\rResolving deltas: 93% (31/33)\rResolving deltas: 96% (32/33)\rResolving deltas: 100% (33/33)\rResolving deltas: 100% (33/33), done.\r\n",,terminal_output +712,1597009,"TERMINAL",0,0,"]0;tum_cte0515@hkn1991:~/Projects/tmp]633;D;0",,terminal_output +713,1602099,"TERMINAL",0,0,"cd jafar/",,terminal_command +714,1602139,"TERMINAL",0,0,"]633;E;2025-07-07 19:53:22 cd jafar/;7d45707f-8bba-4662-a344-aba14e13e9ae]633;C]0;tum_cte0515@hkn1991:~/Projects/tmp/jafar]633;D;0",,terminal_output +715,1602435,"TERMINAL",0,0,"ls",,terminal_command +716,1602489,"TERMINAL",0,0,"]633;E;2025-07-07 19:53:23 ls;7d45707f-8bba-4662-a344-aba14e13e9ae]633;Cgenerate_dataset.py genie.py LICENSE models README.md requirements.txt sample.py train_dynamics.py train_lam.py train_tokenizer.py utils\r\n]0;tum_cte0515@hkn1991:~/Projects/tmp/jafar]633;D;0",,terminal_output +717,1621571,"TERMINAL",0,0,"echo $(pwd)/sample.py",,terminal_command +718,1621611,"TERMINAL",0,0,"]633;E;2025-07-07 19:53:42 echo $(pwd)/sample.py;7d45707f-8bba-4662-a344-aba14e13e9ae]633;C/home/hk-project-p0023960/tum_cte0515/Projects/tmp/jafar/sample.py\r\n]0;tum_cte0515@hkn1991:~/Projects/tmp/jafar]633;D;0",,terminal_output +719,1629459,"TERMINAL",0,0,"echo $(pwd)/genie.py",,terminal_command +720,1629465,"TERMINAL",0,0,"]633;E;2025-07-07 19:53:50 echo $(pwd)/genie.py;7d45707f-8bba-4662-a344-aba14e13e9ae]633;C/home/hk-project-p0023960/tum_cte0515/Projects/tmp/jafar/genie.py\r\n]0;tum_cte0515@hkn1991:~/Projects/tmp/jafar]633;D;0",,terminal_output +721,1630926,"/home/hk-project-p0023960/tum_cte0515/Projects/tmp/jafar/sample.py",0,0,"from dataclasses import dataclass\nimport time\n\nimport dm_pix as pix\nimport einops\nimport jax\nimport jax.numpy as jnp\nimport numpy as np\nfrom orbax.checkpoint import PyTreeCheckpointer\nfrom PIL import Image, ImageDraw\nimport tyro\n\nfrom genie import Genie\nfrom utils.dataloader import get_dataloader\n\n\n@dataclass\nclass Args:\n # Experiment\n seed: int = 0\n seq_len: int = 16\n image_channels: int = 3\n image_resolution: int = 64\n data_dir: str = ""data/coinrun_episodes""\n checkpoint: str = """"\n # Sampling\n batch_size: int = 1\n maskgit_steps: int = 25\n temperature: float = 1.0\n sample_argmax: bool = True\n start_frame: int = 0\n # Tokenizer checkpoint\n tokenizer_dim: int = 512\n latent_patch_dim: int = 32\n num_patch_latents: int = 1024\n patch_size: int = 4\n tokenizer_num_blocks: int = 8\n tokenizer_num_heads: int = 8\n # LAM checkpoint\n lam_dim: int = 512\n latent_action_dim: int = 32\n num_latent_actions: int = 6\n lam_patch_size: int = 16\n lam_num_blocks: int = 8\n lam_num_heads: int = 8\n # Dynamics checkpoint\n dyna_dim: int = 512\n dyna_num_blocks: int = 12\n dyna_num_heads: int = 8\n\n\nargs = tyro.cli(Args)\nrng = jax.random.PRNGKey(args.seed)\n\n# --- Load Genie checkpoint ---\ngenie = Genie(\n # Tokenizer\n in_dim=args.image_channels,\n tokenizer_dim=args.tokenizer_dim,\n latent_patch_dim=args.latent_patch_dim,\n num_patch_latents=args.num_patch_latents,\n patch_size=args.patch_size,\n tokenizer_num_blocks=args.tokenizer_num_blocks,\n tokenizer_num_heads=args.tokenizer_num_heads,\n # LAM\n lam_dim=args.lam_dim,\n latent_action_dim=args.latent_action_dim,\n num_latent_actions=args.num_latent_actions,\n lam_patch_size=args.lam_patch_size,\n lam_num_blocks=args.lam_num_blocks,\n lam_num_heads=args.lam_num_heads,\n # Dynamics\n dyna_dim=args.dyna_dim,\n dyna_num_blocks=args.dyna_num_blocks,\n dyna_num_heads=args.dyna_num_heads,\n)\nrng, _rng = jax.random.split(rng)\nimage_shape = (args.image_resolution, args.image_resolution, args.image_channels)\ndummy_inputs = dict(\n videos=jnp.zeros((args.batch_size, args.seq_len, *image_shape), dtype=jnp.float32),\n mask_rng=_rng,\n)\nrng, _rng = jax.random.split(rng)\nparams = genie.init(_rng, dummy_inputs)\nckpt = PyTreeCheckpointer().restore(args.checkpoint)[""model""][""params""][""params""]\nparams[""params""].update(ckpt)\n\n# --- Define autoregressive sampling loop ---\ndef _autoreg_sample(rng, video_batch, action_batch):\n vid = video_batch[:, : args.start_frame + 1]\n for frame_idx in range(args.start_frame + 1, args.seq_len):\n # --- Sample next frame ---\n print(""Frame"", frame_idx)\n rng, _rng = jax.random.split(rng)\n batch = dict(videos=vid, latent_actions=action_batch[:, :frame_idx], rng=_rng)\n new_frame = genie.apply(\n params,\n batch,\n args.maskgit_steps,\n args.temperature,\n args.sample_argmax,\n method=Genie.sample,\n )\n vid = jnp.concatenate([vid, new_frame], axis=1)\n return vid\n\n\n# --- Get video + latent actions ---\ndataloader = get_dataloader(args.data_dir, args.seq_len, args.batch_size)\nvideo_batch = next(iter(dataloader))\n# Get latent actions from first video only\nfirst_video = video_batch[:1]\nbatch = dict(videos=first_video)\naction_batch = genie.apply(params, batch, False, method=Genie.vq_encode)\naction_batch = action_batch.reshape(1, args.seq_len - 1, 1)\n# Use actions from first video for all videos\naction_batch = jnp.repeat(action_batch, video_batch.shape[0], axis=0)\n\n# --- Sample + evaluate video ---\nvid = _autoreg_sample(rng, video_batch, action_batch)\ngt = video_batch[:, : vid.shape[1]].clip(0, 1).reshape(-1, *video_batch.shape[2:])\nrecon = vid.clip(0, 1).reshape(-1, *vid.shape[2:])\nssim = pix.ssim(gt[:, args.start_frame + 1 :], recon[:, args.start_frame + 1 :]).mean()\nprint(f""SSIM: {ssim}"")\n\n# --- Construct video ---\nfirst_true = (video_batch[0:1] * 255).astype(np.uint8)\nfirst_pred = (vid[0:1] * 255).astype(np.uint8)\nfirst_video_comparison = np.zeros((2, *vid.shape[1:5]), dtype=np.uint8)\nfirst_video_comparison[0] = first_true[:, : vid.shape[1]]\nfirst_video_comparison[1] = first_pred\n# For other videos, only show generated video\nother_preds = (vid[1:] * 255).astype(np.uint8)\nall_frames = np.concatenate([first_video_comparison, other_preds], axis=0)\nflat_vid = einops.rearrange(all_frames, ""n t h w c -> t h (n w) c"")\n\n# --- Save video ---\nimgs = [Image.fromarray(img) for img in flat_vid]\n# Write actions on each frame\nfor img, action in zip(imgs[1:], action_batch[0, :, 0]):\n d = ImageDraw.Draw(img)\n d.text((2, 2), f""{action}"", fill=255)\nimgs[0].save(\n f""generation_{time.time()}.gif"",\n save_all=True,\n append_images=imgs[1:],\n duration=250,\n loop=0,\n)\n",python,tab +722,1645438,"/home/hk-project-p0023960/tum_cte0515/Projects/tmp/jafar/genie.py",0,0,"from typing import Dict, Any\n\nfrom orbax.checkpoint import PyTreeCheckpointer\nimport jax\nimport jax.numpy as jnp\nimport flax.linen as nn\n\nfrom models.dynamics import DynamicsMaskGIT\nfrom models.lam import LatentActionModel\nfrom models.tokenizer import TokenizerVQVAE\n\n\nclass Genie(nn.Module):\n """"""Genie model""""""\n\n # --- Tokenizer ---\n in_dim: int\n tokenizer_dim: int\n latent_patch_dim: int\n num_patch_latents: int\n patch_size: int\n tokenizer_num_blocks: int\n tokenizer_num_heads: int\n # --- LAM ---\n lam_dim: int\n latent_action_dim: int\n num_latent_actions: int\n lam_patch_size: int\n lam_num_blocks: int\n lam_num_heads: int\n # --- Dynamics ---\n dyna_dim: int\n dyna_num_blocks: int\n dyna_num_heads: int\n dropout: float = 0.0\n mask_limit: float = 0.0\n\n def setup(self):\n self.tokenizer = TokenizerVQVAE(\n in_dim=self.in_dim,\n model_dim=self.tokenizer_dim,\n latent_dim=self.latent_patch_dim,\n num_latents=self.num_patch_latents,\n patch_size=self.patch_size,\n num_blocks=self.tokenizer_num_blocks,\n num_heads=self.tokenizer_num_heads,\n dropout=0.0,\n codebook_dropout=0.0,\n )\n self.lam = LatentActionModel(\n in_dim=self.in_dim,\n model_dim=self.lam_dim,\n latent_dim=self.latent_patch_dim,\n num_latents=self.num_latent_actions,\n patch_size=self.lam_patch_size,\n num_blocks=self.lam_num_blocks,\n num_heads=self.lam_num_heads,\n dropout=0.0,\n codebook_dropout=0.0,\n )\n self.dynamics = DynamicsMaskGIT(\n model_dim=self.dyna_dim,\n num_latents=self.num_patch_latents,\n num_blocks=self.dyna_num_blocks,\n num_heads=self.dyna_num_heads,\n dropout=self.dropout,\n mask_limit=self.mask_limit,\n )\n\n def __call__(self, batch: Dict[str, Any], training: bool = True) -> Dict[str, Any]:\n tokenizer_outputs = self.tokenizer.vq_encode(batch[""videos""], training=False)\n lam_outputs = self.lam.vq_encode(batch[""videos""], training=False)\n outputs = dict(\n video_tokens=jax.lax.stop_gradient(tokenizer_outputs[""indices""]),\n latent_actions=jax.lax.stop_gradient(lam_outputs[""z_q""]),\n )\n outputs[""mask_rng""] = batch[""mask_rng""]\n dyna_outputs = self.dynamics(outputs, training)\n outputs.update(dyna_outputs)\n mle_indices = jnp.argmax(outputs[""token_logits""], axis=-1)\n outputs[""recon""] = self.tokenizer.decode(\n mle_indices, batch[""videos""].shape[2:4]\n )\n return outputs\n\n @nn.compact\n def sample(\n self,\n batch: Dict[str, Any],\n steps: int = 25,\n temperature: int = 1,\n sample_argmax: bool = False,\n ) -> Any:\n # --- Encode videos and actions ---\n tokenizer_out = self.tokenizer.vq_encode(batch[""videos""], training=False)\n token_idxs = tokenizer_out[""indices""]\n new_frame_idxs = jnp.zeros_like(token_idxs)[:, 0]\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n\n # --- Initialize MaskGIT ---\n init_mask = jnp.ones_like(token_idxs, dtype=bool)[:, 0]\n init_carry = (\n batch[""rng""],\n new_frame_idxs,\n init_mask,\n token_idxs,\n action_tokens,\n )\n MaskGITLoop = nn.scan(\n MaskGITStep,\n variable_broadcast=""params"",\n split_rngs={""params"": False},\n in_axes=0,\n out_axes=0,\n length=steps,\n )\n\n # --- Run MaskGIT loop ---\n loop_fn = MaskGITLoop(\n dynamics=self.dynamics,\n tokenizer=self.tokenizer,\n temperature=temperature,\n sample_argmax=sample_argmax,\n steps=steps,\n )\n final_carry, _ = loop_fn(init_carry, jnp.arange(steps))\n new_frame_idxs = final_carry[1]\n new_frame_pixels = self.tokenizer.decode(\n jnp.expand_dims(new_frame_idxs, 1),\n video_hw=batch[""videos""].shape[2:4],\n )\n return new_frame_pixels\n\n def vq_encode(self, batch, training) -> Dict[str, Any]:\n # --- Preprocess videos ---\n lam_output = self.lam.vq_encode(batch[""videos""], training=training)\n return lam_output[""indices""]\n\n\nclass MaskGITStep(nn.Module):\n dynamics: nn.Module\n tokenizer: nn.Module\n temperature: float\n sample_argmax: bool\n steps: int\n\n @nn.compact\n def __call__(self, carry, x):\n rng, final_token_idxs, mask, token_idxs, action_tokens = carry\n step = x\n B, T, N = token_idxs.shape[:3]\n\n # --- Construct + encode video ---\n vid_token_idxs = jnp.concatenate(\n (token_idxs, jnp.expand_dims(final_token_idxs, 1)), axis=1\n )\n vid_embed = self.dynamics.patch_embed(vid_token_idxs)\n curr_masked_frame = jnp.where(\n jnp.expand_dims(mask, -1),\n self.dynamics.mask_token[0],\n vid_embed[:, -1],\n )\n vid_embed = vid_embed.at[:, -1].set(curr_masked_frame)\n\n # --- Predict transition ---\n act_embed = self.dynamics.action_up(action_tokens)\n vid_embed += jnp.pad(act_embed, ((0, 0), (1, 0), (0, 0), (0, 0)))\n unmasked_ratio = jnp.cos(jnp.pi * (step + 1) / (self.steps * 2))\n step_temp = self.temperature * (1.0 - unmasked_ratio)\n final_logits = self.dynamics.dynamics(vid_embed)[:, -1] / step_temp\n\n # --- Sample new tokens for final frame ---\n if self.sample_argmax:\n sampled_token_idxs = jnp.argmax(final_logits, axis=-1)\n else:\n rng, _rng = jax.random.split(rng)\n sampled_token_idxs = jnp.where(\n step == self.steps - 1,\n jnp.argmax(final_logits, axis=-1),\n jax.random.categorical(_rng, final_logits),\n )\n gather_fn = jax.vmap(jax.vmap(lambda x, y: x[y]))\n final_token_probs = gather_fn(jax.nn.softmax(final_logits), sampled_token_idxs)\n final_token_probs += ~mask\n # Update masked tokens only\n new_token_idxs = jnp.where(mask, sampled_token_idxs, final_token_idxs)\n\n # --- Update mask ---\n num_unmasked_tokens = jnp.round(N * (1.0 - unmasked_ratio)).astype(int)\n idx_mask = jnp.arange(final_token_probs.shape[-1]) > num_unmasked_tokens\n sorted_idxs = jnp.argsort(final_token_probs, axis=-1, descending=True)\n mask_update_fn = jax.vmap(lambda msk, ids: msk.at[ids].set(idx_mask))\n new_mask = mask_update_fn(mask, sorted_idxs)\n\n new_carry = (rng, new_token_idxs, new_mask, token_idxs, action_tokens)\n return new_carry, None\n\n\ndef restore_genie_components(params: Dict[str, Any], tokenizer: str, lam: str):\n """"""Restore pre-trained Genie components""""""\n params[""params""][""tokenizer""].update(\n PyTreeCheckpointer().restore(tokenizer)[""model""][""params""][""params""]\n )\n params[""params""][""lam""].update(\n PyTreeCheckpointer().restore(lam)[""model""][""params""][""params""]\n )\n return params\n",python,tab +723,1647250,"/home/hk-project-p0023960/tum_cte0515/Projects/tmp/jafar/sample.py",0,0,"",python,tab +724,1648242,"/home/hk-project-p0023960/tum_cte0515/Projects/tmp/jafar/sample.py",0,0,"",python,tab +725,1649040,"/home/hk-project-p0023960/tum_cte0515/Projects/tmp/jafar/genie.py",0,0,"",python,tab +726,1650007,"/home/hk-project-p0023960/tum_cte0515/Projects/tmp/jafar/genie.py",0,0,"",python,tab +727,1660758,"TERMINAL",0,0,"bash",,terminal_focus +728,1667647,"TERMINAL",0,0,"git checkout fix-sampling",,terminal_command +729,1667663,"TERMINAL",0,0,"]633;E;2025-07-07 19:54:28 git checkout fix-sampling;24571f6f-7d62-4371-8977-e42b5a81d394]633;Cfatal: not a git repository (or any parent up to mount point /hkfs)\r\nStopping at filesystem boundary (GIT_DISCOVERY_ACROSS_FILESYSTEM not set).\r\n]0;tum_cte0515@hkn1991:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data/open_ai_minecraft_tfrecord]633;D;128",,terminal_output +730,1669936,"TERMINAL",0,0,"bash",,terminal_focus +731,1673047,"TERMINAL",0,0,"cd ..",,terminal_command +732,1673084,"TERMINAL",0,0,"]633;E;2025-07-07 19:54:33 cd ..;7d45707f-8bba-4662-a344-aba14e13e9ae]633;C]0;tum_cte0515@hkn1991:~/Projects/tmp]633;D;0",,terminal_output +733,1673311,"TERMINAL",0,0,"ls",,terminal_command +734,1673359,"TERMINAL",0,0,"]633;E;2025-07-07 19:54:33 ls;7d45707f-8bba-4662-a344-aba14e13e9ae]633;Cjafar\r\n]0;tum_cte0515@hkn1991:~/Projects/tmp]633;D;0",,terminal_output +735,1674638,"TERMINAL",0,0,"cd ..",,terminal_command +736,1674685,"TERMINAL",0,0,"]633;E;2025-07-07 19:54:35 cd ..;7d45707f-8bba-4662-a344-aba14e13e9ae]633;C]0;tum_cte0515@hkn1991:~/Projects]633;D;0]633;P;Cwd=/home/hk-project-p0023960/tum_cte0515/Projects",,terminal_output +737,1676195,"TERMINAL",0,0,"cd jafar",,terminal_command +738,1676226,"TERMINAL",0,0,"]633;E;2025-07-07 19:54:36 cd jafar;7d45707f-8bba-4662-a344-aba14e13e9ae]633;C]0;tum_cte0515@hkn1991:~/Projects/jafar]633;D;0",,terminal_output +739,1681211,"TERMINAL",0,0,"git checkout fix-sampling",,terminal_command +740,1681220,"TERMINAL",0,0,"]633;E;2025-07-07 19:54:41 git checkout fix-sampling;7d45707f-8bba-4662-a344-aba14e13e9ae]633;C",,terminal_output +741,1681414,"TERMINAL",0,0,"error: Your local changes to the following files would be overwritten by checkout:\r\n\ttrain_tokenizer.py\r\nPlease commit your changes or stash them before you switch branches.\r\nAborting\r\n]0;tum_cte0515@hkn1991:~/Projects/jafar]633;D;1",,terminal_output +742,1684464,"train_tokenizer.py",0,0,"",python,tab +743,1689766,"TERMINAL",0,0,"git stash",,terminal_command +744,1689811,"TERMINAL",0,0,"]633;E;2025-07-07 19:54:50 git stash;7d45707f-8bba-4662-a344-aba14e13e9ae]633;C",,terminal_output +745,1690102,"TERMINAL",0,0,"Saved working directory and index state WIP on logging-variants: f613a97 init other typed of loggers\r\n]0;tum_cte0515@hkn1991:~/Projects/jafar]633;D;0",,terminal_output +746,1691573,"TERMINAL",0,0,"git checkout fix-sampling",,terminal_command +747,1691622,"TERMINAL",0,0,"]633;E;2025-07-07 19:54:52 git checkout fix-sampling;7d45707f-8bba-4662-a344-aba14e13e9ae]633;C",,terminal_output +748,1691759,"TERMINAL",0,0,"Switched to branch 'fix-sampling'\r\nYour branch is up to date with 'origin/fix-sampling'.\r\n]0;tum_cte0515@hkn1991:~/Projects/jafar]633;D;0",,terminal_output +749,1695311,"",0,0,"Switched from branch 'logging-variants' to 'fix-sampling'",,git_branch_checkout +750,1695670,"sample.py",0,0,"from dataclasses import dataclass\nimport time\nimport os\n\nimport dm_pix as pix\nimport einops\nimport jax\nimport jax.numpy as jnp\nimport flax.linen as nn\nimport numpy as np\nfrom orbax.checkpoint import PyTreeCheckpointer\nfrom PIL import Image, ImageDraw\nimport tyro\n\nfrom genie import Genie\nfrom utils.dataloader import get_dataloader\n\n\n@dataclass\nclass Args:\n # Experiment\n seed: int = 0\n seq_len: int = 16\n image_channels: int = 3\n image_height: int = 90\n image_width: int = 160\n data_dir: str = ""data/coinrun_episodes""\n checkpoint: str = """"\n # Sampling\n batch_size: int = 1\n maskgit_steps: int = 25\n temperature: float = 1.0\n sample_argmax: bool = True\n start_frame: int = 0\n # Tokenizer checkpoint\n tokenizer_dim: int = 512\n latent_patch_dim: int = 32\n num_patch_latents: int = 1024\n patch_size: int = 4\n tokenizer_num_blocks: int = 8\n tokenizer_num_heads: int = 8\n # LAM checkpoint\n lam_dim: int = 512\n latent_action_dim: int = 32\n num_latent_actions: int = 6\n lam_patch_size: int = 16\n lam_num_blocks: int = 8\n lam_num_heads: int = 8\n # Dynamics checkpoint\n dyna_dim: int = 512\n dyna_num_blocks: int = 12\n dyna_num_heads: int = 8\n\n\nargs = tyro.cli(Args)\nrng = jax.random.PRNGKey(args.seed)\n\n# --- Load Genie checkpoint ---\ngenie = Genie(\n # Tokenizer\n in_dim=args.image_channels,\n tokenizer_dim=args.tokenizer_dim,\n latent_patch_dim=args.latent_patch_dim,\n num_patch_latents=args.num_patch_latents,\n patch_size=args.patch_size,\n tokenizer_num_blocks=args.tokenizer_num_blocks,\n tokenizer_num_heads=args.tokenizer_num_heads,\n # LAM\n lam_dim=args.lam_dim,\n latent_action_dim=args.latent_action_dim,\n num_latent_actions=args.num_latent_actions,\n lam_patch_size=args.lam_patch_size,\n lam_num_blocks=args.lam_num_blocks,\n lam_num_heads=args.lam_num_heads,\n # Dynamics\n dyna_dim=args.dyna_dim,\n dyna_num_blocks=args.dyna_num_blocks,\n dyna_num_heads=args.dyna_num_heads,\n)\nrng, _rng = jax.random.split(rng)\nimage_shape = (args.image_height, args.image_width, args.image_channels)\ndummy_inputs = dict(\n videos=jnp.zeros((args.batch_size, args.seq_len, *image_shape), dtype=jnp.float32),\n mask_rng=_rng,\n)\nrng, _rng = jax.random.split(rng)\nparams = genie.init(_rng, dummy_inputs)\nckpt = PyTreeCheckpointer().restore(args.checkpoint)[""model""][""params""][""params""]\nparams[""params""].update(ckpt)\n\n\ndef _sampling_wrapper(module, batch):\n return module.sample(batch, args.seq_len, args.maskgit_steps, args.temperature, args.sample_argmax)\n\n# --- Define autoregressive sampling loop ---\ndef _autoreg_sample(rng, video_batch, action_batch):\n vid = video_batch[:, : args.start_frame + 1]\n sampling_fn = jax.jit(nn.apply(_sampling_wrapper, genie)) \n rng, _rng = jax.random.split(rng)\n batch = dict(videos=vid, latent_actions=action_batch, rng=_rng)\n generated_vid = sampling_fn(\n params,\n batch\n )\n return generated_vid\n\n# --- Get video + latent actions ---\ntfrecord_files = [\n os.path.join(args.data_dir, x)\n for x in os.listdir(args.data_dir)\n if x.endswith("".tfrecord"")\n]\ndataloader = get_dataloader(\n tfrecord_files,\n args.seq_len,\n args.batch_size,\n args.image_height,\n args.image_width,\n args.image_channels,\n seed=args.seed,\n)\nvideo_batch = next(iter(dataloader))\n# Get latent actions from first video only; clip them down to the specified seq_len\nfirst_video = video_batch[:1, :args.seq_len]\nbatch = dict(videos=first_video)\naction_batch = genie.apply(params, batch, False, method=Genie.vq_encode)\naction_batch = action_batch.reshape(1, args.seq_len - 1, 1)\n# Use actions from first video for all videos\naction_batch = jnp.repeat(action_batch, video_batch.shape[0], axis=0)\n\n# --- Sample + evaluate video ---\nvid = _autoreg_sample(rng, video_batch, action_batch)\ngt = video_batch[:, : vid.shape[1]].clip(0, 1).reshape(-1, *video_batch.shape[2:])\nrecon = vid.clip(0, 1).reshape(-1, *vid.shape[2:])\nssim = pix.ssim(gt[:, args.start_frame + 1 :], recon[:, args.start_frame + 1 :]).mean()\nprint(f""SSIM: {ssim}"")\n\n# --- Construct video ---\nfirst_true = (video_batch[0:1] * 255).astype(np.uint8)\nfirst_pred = (vid[0:1] * 255).astype(np.uint8)\nfirst_video_comparison = np.zeros((2, *vid.shape[1:5]), dtype=np.uint8)\nfirst_video_comparison[0] = first_true[:, : vid.shape[1]]\nfirst_video_comparison[1] = first_pred\n# For other videos, only show generated video\nother_preds = (vid[1:] * 255).astype(np.uint8)\nall_frames = np.concatenate([first_video_comparison, other_preds], axis=0)\nflat_vid = einops.rearrange(all_frames, ""n t h w c -> t h (n w) c"")\n\n# --- Save video ---\nimgs = [Image.fromarray(img) for img in flat_vid]\n# Write actions on each frame\nfor img, action in zip(imgs[1:], action_batch[0, :, 0]):\n d = ImageDraw.Draw(img)\n d.text((2, 2), f""{action}"", fill=255)\nimgs[0].save(\n f""generation_{time.time()}.gif"",\n save_all=True,\n append_images=imgs[1:],\n duration=250,\n loop=0,\n)\n",python,tab +751,1700544,"genie.py",0,0,"from typing import Dict, Any\n\nimport optax\nimport jax\nimport jax.numpy as jnp\nimport flax.linen as nn\nfrom jax import NamedSharding\nfrom flax.training.train_state import TrainState\nfrom flax.training import orbax_utils\nfrom orbax.checkpoint import PyTreeCheckpointer\n\nfrom models.dynamics import DynamicsMaskGIT\nfrom models.lam import LatentActionModel\nfrom models.tokenizer import TokenizerVQVAE\n\n\nclass Genie(nn.Module):\n """"""Genie model""""""\n\n # --- Tokenizer ---\n in_dim: int\n tokenizer_dim: int\n latent_patch_dim: int\n num_patch_latents: int\n patch_size: int\n tokenizer_num_blocks: int\n tokenizer_num_heads: int\n # --- LAM ---\n lam_dim: int\n latent_action_dim: int\n num_latent_actions: int\n lam_patch_size: int\n lam_num_blocks: int\n lam_num_heads: int\n # --- Dynamics ---\n dyna_dim: int\n dyna_num_blocks: int\n dyna_num_heads: int\n dropout: float = 0.0\n mask_limit: float = 0.0\n\n def setup(self):\n self.tokenizer = TokenizerVQVAE(\n in_dim=self.in_dim,\n model_dim=self.tokenizer_dim,\n latent_dim=self.latent_patch_dim,\n num_latents=self.num_patch_latents,\n patch_size=self.patch_size,\n num_blocks=self.tokenizer_num_blocks,\n num_heads=self.tokenizer_num_heads,\n dropout=0.0,\n codebook_dropout=0.0,\n )\n self.lam = LatentActionModel(\n in_dim=self.in_dim,\n model_dim=self.lam_dim,\n latent_dim=self.latent_patch_dim,\n num_latents=self.num_latent_actions,\n patch_size=self.lam_patch_size,\n num_blocks=self.lam_num_blocks,\n num_heads=self.lam_num_heads,\n dropout=0.0,\n codebook_dropout=0.0,\n )\n self.dynamics = DynamicsMaskGIT(\n model_dim=self.dyna_dim,\n num_latents=self.num_patch_latents,\n num_blocks=self.dyna_num_blocks,\n num_heads=self.dyna_num_heads,\n dropout=self.dropout,\n mask_limit=self.mask_limit,\n )\n\n def __call__(self, batch: Dict[str, Any], training: bool = True) -> Dict[str, Any]:\n tokenizer_outputs = self.tokenizer.vq_encode(batch[""videos""], training=False)\n lam_outputs = self.lam.vq_encode(batch[""videos""], training=False)\n outputs = dict(\n video_tokens=jax.lax.stop_gradient(tokenizer_outputs[""indices""]),\n latent_actions=jax.lax.stop_gradient(lam_outputs[""z_q""]),\n )\n outputs[""mask_rng""] = batch[""mask_rng""]\n dyna_outputs = self.dynamics(outputs, training)\n outputs.update(dyna_outputs)\n mle_indices = jnp.argmax(outputs[""token_logits""], axis=-1)\n outputs[""recon""] = self.tokenizer.decode(\n mle_indices, batch[""videos""].shape[2:4]\n )\n return outputs\n\n @nn.compact\n def sample(\n self,\n batch: Dict[str, Any],\n seq_len: int,\n steps: int = 25,\n temperature: float = 1,\n sample_argmax: bool = False,\n ) -> Any:\n """"""\n Autoregressively samples up to `seq_len` future frames, following Figure 8 of the paper.\n\n - Input frames are tokenized once.\n - Future frames are generated autoregressively in token space.\n - All frames are detokenized in a single pass.\n\n Note:\n - For interactive or step-wise sampling, detokenization should occur after each action.\n - To maintain consistent tensor shapes across timesteps, all current and future frames are decoded at every step.\n - Temporal causal structure is preserved by \n a) reapplying the mask before each decoding step.\n b) a temporal causal mask is applied within each ST-transformer block.\n\n Dimension keys:\n B: batch size \n T: number of input (conditioning) frames \n N: patches per frame \n S: sequence length \n A: action space \n D: model latent dimension\n """"""\n # --- Encode videos and actions ---\n tokenizer_out = self.tokenizer.vq_encode(batch[""videos""], training=False)\n token_idxs = tokenizer_out[""indices""] # (B, T, N)\n B, T, N = token_idxs.shape\n pad_shape = (B, seq_len - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1) # (B, S, N)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n\n MaskGITLoop = nn.scan(\n MaskGITStep,\n variable_broadcast=""params"",\n split_rngs={""params"": False},\n in_axes=0,\n out_axes=0,\n length=steps,\n )\n \n loop_fn = MaskGITLoop(\n dynamics=self.dynamics,\n tokenizer=self.tokenizer,\n temperature=temperature,\n sample_argmax=sample_argmax,\n steps=steps,\n )\n\n def generation_step_fn(carry, step_t):\n rng, current_token_idxs = carry\n rng, step_rng = jax.random.split(rng)\n\n # Mask current and future frames (i.e., t >= step_t)\n mask = jnp.arange(seq_len) >= step_t # (S,)\n mask = jnp.broadcast_to(mask[None, :, None], (B, seq_len, N)) # (B, S, N)\n mask = mask.astype(bool)\n masked_token_idxs = current_token_idxs * ~mask\n\n # --- Initialize and run MaskGIT loop ---\n init_carry_maskgit = (\n step_rng,\n masked_token_idxs,\n mask,\n action_tokens,\n )\n final_carry_maskgit, _ = loop_fn(init_carry_maskgit, jnp.arange(steps))\n updated_token_idxs = final_carry_maskgit[1]\n new_carry = (rng, updated_token_idxs)\n return new_carry, None\n\n # --- Run the autoregressive generation using scan ---\n initial_carry = (batch[""rng""], token_idxs)\n timesteps_to_scan = jnp.arange(T, seq_len)\n final_carry, _ = jax.lax.scan(\n generation_step_fn,\n initial_carry,\n timesteps_to_scan\n )\n final_token_idxs = final_carry[1]\n\n # --- Decode all tokens at once at the end ---\n final_frames = self.tokenizer.decode(\n final_token_idxs,\n video_hw=batch[""videos""].shape[2:4],\n )\n return final_frames\n\n def vq_encode(self, batch, training) -> Dict[str, Any]:\n # --- Preprocess videos ---\n lam_output = self.lam.vq_encode(batch[""videos""], training=training)\n return lam_output[""indices""]\n\n\nclass MaskGITStep(nn.Module):\n dynamics: nn.Module\n tokenizer: nn.Module\n temperature: float\n sample_argmax: bool\n steps: int\n\n @nn.compact\n def __call__(self, carry, x):\n rng, token_idxs, mask, action_tokens = carry\n step = x\n N = token_idxs.shape[2]\n\n # --- Construct + encode video ---\n vid_embed = self.dynamics.patch_embed(token_idxs) # (B, S, N, D)\n mask_token = self.dynamics.mask_token # (1, 1, 1, D,)\n mask_expanded = mask[..., None] # (B, S, N, 1) \n vid_embed = jnp.where(mask_expanded, mask_token, vid_embed)\n\n # --- Predict transition ---\n act_embed = self.dynamics.action_up(action_tokens)\n vid_embed += jnp.pad(act_embed, ((0, 0), (1, 0), (0, 0), (0, 0)))\n unmasked_ratio = jnp.cos(jnp.pi * (step + 1) / (self.steps * 2))\n step_temp = self.temperature * (1.0 - unmasked_ratio)\n final_logits = self.dynamics.dynamics(vid_embed) / step_temp\n\n # --- Sample new tokens for final frame ---\n if self.sample_argmax:\n sampled_token_idxs = jnp.argmax(final_logits, axis=-1)\n else:\n rng, _rng = jax.random.split(rng)\n sampled_token_idxs = jnp.where(\n step == self.steps - 1,\n jnp.argmax(final_logits, axis=-1),\n jax.random.categorical(_rng, final_logits),\n )\n gather_fn = jax.vmap(jax.vmap(jax.vmap(lambda x, y: x[y])))\n final_token_probs = gather_fn(jax.nn.softmax(final_logits), sampled_token_idxs)\n final_token_probs += ~mask\n # Update masked tokens only\n token_idxs = jnp.where(mask, sampled_token_idxs, token_idxs)\n\n # --- Update mask ---\n num_unmasked_tokens = jnp.round(N * (1.0 - unmasked_ratio)).astype(int)\n idx_mask = jnp.arange(final_token_probs.shape[-1]) > num_unmasked_tokens\n sorted_idxs = jnp.argsort(final_token_probs, axis=-1, descending=True)\n mask_update_fn = jax.vmap(lambda msk, ids: msk.at[ids].set(idx_mask))\n new_mask = mask_update_fn(mask, sorted_idxs)\n\n new_carry = (rng, token_idxs, new_mask, action_tokens)\n return new_carry, None\n\ndef restore_genie_components(\n train_state: TrainState,\n sharding: NamedSharding,\n inputs: Dict[str, jax.Array],\n rng: jax.Array,\n args,\n):\n """"""Restore pre-trained Genie components""""""\n rng, _rng = jax.random.split(rng)\n\n dummy_tokenizer = TokenizerVQVAE(\n in_dim=args.image_channels,\n model_dim=args.tokenizer_dim,\n latent_dim=args.latent_patch_dim,\n num_latents=args.num_patch_latents,\n patch_size=args.patch_size,\n num_blocks=args.tokenizer_num_blocks,\n num_heads=args.tokenizer_num_heads,\n dropout=args.dropout,\n codebook_dropout=args.dropout,\n )\n dummy_lam = LatentActionModel(\n in_dim=args.image_channels,\n model_dim=args.lam_dim,\n latent_dim=args.latent_patch_dim,\n num_latents=args.num_latent_actions,\n patch_size=args.lam_patch_size,\n num_blocks=args.lam_num_blocks,\n num_heads=args.lam_num_heads,\n dropout=args.dropout,\n codebook_dropout=args.dropout,\n )\n tokenizer_init_params = dummy_tokenizer.init(_rng, inputs)\n lam_init_params = dummy_lam.init(_rng, inputs)\n\n # dummy values since we only use tx to initialize the dummy train states\n dummy_tx = optax.adamw(\n learning_rate=optax.constant_schedule(args.max_lr),\n b1=0.9,\n b2=0.9,\n weight_decay=1e-4,\n )\n\n dummy_tokenizer_train_state = TrainState.create(\n apply_fn=dummy_tokenizer.apply, params=tokenizer_init_params, tx=dummy_tx\n )\n dummy_lam_train_state = TrainState.create(\n apply_fn=dummy_lam.apply, params=lam_init_params, tx=dummy_tx\n )\n\n def create_abstract_sharded_pytree(pytree_template, sharding_spec):\n """"""Replaces arrays in a pytree with ShapeDtypeStructs having the given sharding.""""""\n\n def map_fn(leaf_template):\n if hasattr(leaf_template, ""shape"") and hasattr(leaf_template, ""dtype""):\n return jax.ShapeDtypeStruct(\n leaf_template.shape, leaf_template.dtype, sharding=sharding_spec\n )\n return leaf_template\n\n return jax.tree_util.tree_map(map_fn, pytree_template)\n\n abstract_sharded_tokenizer_state = create_abstract_sharded_pytree(\n dummy_tokenizer_train_state, sharding\n )\n abstract_sharded_lam_state = create_abstract_sharded_pytree(\n dummy_lam_train_state, sharding\n )\n\n tokenizer_restore_target = {""model"": abstract_sharded_tokenizer_state}\n lam_restore_target = {""model"": abstract_sharded_lam_state}\n\n tokenizer_restore_args = orbax_utils.restore_args_from_target(\n tokenizer_restore_target\n )\n lam_restore_args = orbax_utils.restore_args_from_target(lam_restore_target)\n\n restored_tokenizer_params = (\n PyTreeCheckpointer()\n .restore(\n args.tokenizer_checkpoint,\n item=tokenizer_restore_target,\n restore_args=tokenizer_restore_args,\n )[""model""]\n .params[""params""]\n )\n restored_lam_params = (\n PyTreeCheckpointer()\n .restore(\n args.lam_checkpoint, item=lam_restore_target, restore_args=lam_restore_args\n )[""model""]\n .params[""params""]\n )\n # Genie does not initialize all LAM modules, thus we omit those extra modules during restoration\n # (f.srambical) FIXME: Currently, this is a small HBM memory crunch since the LAM's decoder is loaded into HBM and immediately dicarded.\n # A workaround would be to restore to host memory first, and only move the weights to HBM after pruning the decoder\n restored_lam_params = {\n k: v\n for k, v in restored_lam_params.items()\n if k in train_state.params[""params""][""lam""]\n }\n\n train_state.params[""params""][""tokenizer""].update(restored_tokenizer_params)\n train_state.params[""params""][""lam""].update(restored_lam_params)\n\n return train_state\n",python,tab +752,1766618,"TERMINAL",0,0,"bash",,terminal_focus +753,1767434,"genie.py",6747,0,"",python,selection_mouse +754,1770468,"TERMINAL",0,0,"bash",,terminal_focus +755,1774584,"TERMINAL",0,0,"cd ../tmp/",,terminal_command +756,1774634,"TERMINAL",0,0,"]633;E;2025-07-07 19:56:15 cd ../tmp/;7d45707f-8bba-4662-a344-aba14e13e9ae]633;C]0;tum_cte0515@hkn1991:~/Projects/tmp]633;D;0",,terminal_output +757,1776575,"TERMINAL",0,0,"rm -rf jafar/",,terminal_command +758,1776632,"TERMINAL",0,0,"]633;E;2025-07-07 19:56:17 rm -rf jafar/;7d45707f-8bba-4662-a344-aba14e13e9ae]633;C",,terminal_output +759,1777569,"TERMINAL",0,0,"]0;tum_cte0515@hkn1991:~/Projects/tmp]633;D;0]633;P;Cwd=/home/hk-project-p0023960/tum_cte0515/Projects/tmp",,terminal_output +760,1781311,"TERMINAL",0,0,"git clone git@github.com:maharajamihir/jafar.git",,terminal_command +761,1781359,"TERMINAL",0,0,"]633;E;2025-07-07 19:56:21 git clone git@github.com:maharajamihir/jafar.git;7d45707f-8bba-4662-a344-aba14e13e9ae]633;CCloning into 'jafar'...\r\n",,terminal_output +762,1782972,"TERMINAL",0,0,"remote: Enumerating objects: 61, done.\r\nremote: Counting objects: 2% (1/38)\rremote: Counting objects: 5% (2/38)\rremote: Counting objects: 7% (3/38)\rremote: Counting objects: 10% (4/38)\rremote: Counting objects: 13% (5/38)\rremote: Counting objects: 15% (6/38)\rremote: Counting objects: 18% (7/38)\rremote: Counting objects: 21% (8/38)\rremote: Counting objects: 23% (9/38)\rremote: Counting objects: 26% (10/38)\rremote: Counting objects: 28% (11/38)\rremote: Counting objects: 31% (12/38)\rremote: Counting objects: 34% (13/38)\rremote: Counting objects: 36% (14/38)\rremote: Counting objects: 39% (15/38)\rremote: Counting objects: 42% (16/38)\rremote: Counting objects: 44% (17/38)\rremote: Counting objects: 47% (18/38)\rremote: Counting objects: 50% (19/38)\rremote: Counting objects: 52% (20/38)\rremote: Counting objects: 55% (21/38)\rremote: Counting objects: 57% (22/38)\rremote: Counting objects: 60% (23/38)\rremote: Counting objects: 63% (24/38)\rremote: Counting objects: 65% (25/38)\rremote: Counting objects: 68% (26/38)\rremote: Counting objects: 71% (27/38)\rremote: Counting objects: 73% (28/38)\rremote: Counting objects: 76% (29/38)\rremote: Counting objects: 78% (30/38)\rremote: Counting objects: 81% (31/38)\rremote: Counting objects: 84% (32/38)\rremote: Counting objects: 86% (33/38)\rremote: Counting objects: 89% (34/38)\rremote: Counting objects: 92% (35/38)\rremote: Counting objects: 94% (36/38)\rremote: Counting objects: 97% (37/38)\rremote: Counting objects: 100% (38/38)\rremote: Counting objects: 100% (38/38), done.\r\nremote: Compressing objects: 5% (1/20)\rremote: Compressing objects: 10% (2/20)\rremote: Compressing objects: 15% (3/20)\rremote: Compressing objects: 20% (4/20)\rremote: Compressing objects: 25% (5/20)\rremote: Compressing objects: 30% (6/20)\rremote: Compressing objects: 35% (7/20)\r",,terminal_output +763,1783108,"TERMINAL",0,0,"remote: Compressing objects: 40% (8/20)\rremote: Compressing objects: 45% (9/20)\rremote: Compressing objects: 50% (10/20)\rremote: Compressing objects: 55% (11/20)\rremote: Compressing objects: 60% (12/20)\rremote: Compressing objects: 65% (13/20)\rremote: Compressing objects: 70% (14/20)\rremote: Compressing objects: 75% (15/20)\rremote: Compressing objects: 80% (16/20)\rremote: Compressing objects: 85% (17/20)\rremote: Compressing objects: 90% (18/20)\rremote: Compressing objects: 95% (19/20)\rremote: Compressing objects: 100% (20/20)\rremote: Compressing objects: 100% (20/20), done.\r\nReceiving objects: 1% (1/61)\rReceiving objects: 3% (2/61)\rReceiving objects: 4% (3/61)\rReceiving objects: 6% (4/61)\rReceiving objects: 8% (5/61)\rReceiving objects: 9% (6/61)\rReceiving objects: 11% (7/61)\rReceiving objects: 13% (8/61)\rReceiving objects: 14% (9/61)\rReceiving objects: 16% (10/61)\rReceiving objects: 18% (11/61)\rReceiving objects: 19% (12/61)\rReceiving objects: 21% (13/61)\rReceiving objects: 22% (14/61)\rReceiving objects: 24% (15/61)\rReceiving objects: 26% (16/61)\rReceiving objects: 27% (17/61)\rReceiving objects: 29% (18/61)\rReceiving objects: 31% (19/61)\rReceiving objects: 32% (20/61)\rReceiving objects: 34% (21/61)\rReceiving objects: 36% (22/61)\rReceiving objects: 37% (23/61)\rReceiving objects: 39% (24/61)\rReceiving objects: 40% (25/61)\rReceiving objects: 42% (26/61)\rReceiving objects: 44% (27/61)\rReceiving objects: 45% (28/61)\rReceiving objects: 47% (29/61)\rReceiving objects: 49% (30/61)\rReceiving objects: 50% (31/61)\rReceiving objects: 52% (32/61)\rReceiving objects: 54% (33/61)\rReceiving objects: 55% (34/61)\rReceiving objects: 57% (35/61)\rReceiving objects: 59% (36/61)\rReceiving objects: 60% (37/61)\rReceiving objects: 62% (38/61)\r",,terminal_output +764,1783178,"TERMINAL",0,0,"remote: Total 61 (delta 28), reused 18 (delta 18), pack-reused 23 (from 1)\r\nReceiving objects: 63% (39/61)\rReceiving objects: 65% (40/61)\rReceiving objects: 67% (41/61)\rReceiving objects: 68% (42/61)\rReceiving objects: 70% (43/61)\rReceiving objects: 72% (44/61)\rReceiving objects: 73% (45/61)\rReceiving objects: 75% (46/61)\rReceiving objects: 77% (47/61)\rReceiving objects: 78% (48/61)\rReceiving objects: 80% (49/61)\rReceiving objects: 81% (50/61)\rReceiving objects: 83% (51/61)\rReceiving objects: 85% (52/61)\rReceiving objects: 86% (53/61)\rReceiving objects: 88% (54/61)\rReceiving objects: 90% (55/61)\rReceiving objects: 91% (56/61)\rReceiving objects: 93% (57/61)\rReceiving objects: 95% (58/61)\rReceiving objects: 96% (59/61)\rReceiving objects: 98% (60/61)\rReceiving objects: 100% (61/61)\rReceiving objects: 100% (61/61), 29.60 KiB | 312.00 KiB/s, done.\r\nResolving deltas: 0% (0/28)\rResolving deltas: 3% (1/28)\rResolving deltas: 7% (2/28)\rResolving deltas: 10% (3/28)\rResolving deltas: 17% (5/28)\rResolving deltas: 21% (6/28)\rResolving deltas: 25% (7/28)\rResolving deltas: 28% (8/28)\rResolving deltas: 32% (9/28)\rResolving deltas: 39% (11/28)\rResolving deltas: 42% (12/28)\rResolving deltas: 46% (13/28)\rResolving deltas: 50% (14/28)\rResolving deltas: 53% (15/28)\rResolving deltas: 60% (17/28)\rResolving deltas: 64% (18/28)\rResolving deltas: 67% (19/28)\rResolving deltas: 71% (20/28)\rResolving deltas: 75% (21/28)\rResolving deltas: 78% (22/28)\rResolving deltas: 85% (24/28)\rResolving deltas: 89% (25/28)\rResolving deltas: 92% (26/28)\rResolving deltas: 96% (27/28)\rResolving deltas: 100% (28/28)\rResolving deltas: 100% (28/28), done.\r\n",,terminal_output +765,1783274,"TERMINAL",0,0,"]0;tum_cte0515@hkn1991:~/Projects/tmp]633;D;0",,terminal_output +766,1786438,"TERMINAL",0,0,"cd jafar/",,terminal_command +767,1786450,"TERMINAL",0,0,"]633;E;2025-07-07 19:56:27 cd jafar/;7d45707f-8bba-4662-a344-aba14e13e9ae]633;C]0;tum_cte0515@hkn1991:~/Projects/tmp/jafar]633;D;0",,terminal_output +768,1920161,"TERMINAL",0,0,"git checkout -b ""faster-genie--sampling""",,terminal_command +769,1920211,"TERMINAL",0,0,"]633;E;2025-07-07 19:58:40 git checkout -b ""faster-genie--sampling"";7d45707f-8bba-4662-a344-aba14e13e9ae]633;C",,terminal_output +770,1920347,"TERMINAL",0,0,"Switched to a new branch 'faster-genie--sampling'\r\n]0;tum_cte0515@hkn1991:~/Projects/tmp/jafar]633;D;0",,terminal_output +771,1925798,"TERMINAL",0,0,"git checkout main",,terminal_command +772,1925896,"TERMINAL",0,0,"]633;E;2025-07-07 19:58:46 git checkout main;7d45707f-8bba-4662-a344-aba14e13e9ae]633;CSwitched to branch 'main'\r\nYour branch is up to date with 'origin/main'.\r\n]0;tum_cte0515@hkn1991:~/Projects/tmp/jafar]633;D;0",,terminal_output +773,1932822,"TERMINAL",0,0,"git branch -d faster-genie--sampling",,terminal_command +774,1932872,"TERMINAL",0,0,"]633;E;2025-07-07 19:58:53 git branch -d faster-genie--sampling;7d45707f-8bba-4662-a344-aba14e13e9ae]633;C",,terminal_output +775,1933028,"TERMINAL",0,0,"Deleted branch faster-genie--sampling (was 5ff9fc7).\r\n]0;tum_cte0515@hkn1991:~/Projects/tmp/jafar]633;D;0",,terminal_output +776,1939625,"TERMINAL",0,0,"git checkout -b ""genie-sampling""",,terminal_command +777,1939668,"TERMINAL",0,0,"]633;E;2025-07-07 19:59:00 git checkout -b ""genie-sampling"";7d45707f-8bba-4662-a344-aba14e13e9ae]633;CSwitched to a new branch 'genie-sampling'\r\n]0;tum_cte0515@hkn1991:~/Projects/tmp/jafar]633;D;0",,terminal_output +778,1945266,"TERMINAL",0,0,"echo $(pwd)/genie.py",,terminal_command +779,1945308,"TERMINAL",0,0,"]633;E;2025-07-07 19:59:05 echo $(pwd)/genie.py;7d45707f-8bba-4662-a344-aba14e13e9ae]633;C/home/hk-project-p0023960/tum_cte0515/Projects/tmp/jafar/genie.py\r\n]0;tum_cte0515@hkn1991:~/Projects/tmp/jafar]633;D;0",,terminal_output +780,1948965,"TERMINAL",0,0,"echo $(pwd)/sample.py",,terminal_command +781,1949006,"TERMINAL",0,0,"]633;E;2025-07-07 19:59:09 echo $(pwd)/sample.py;7d45707f-8bba-4662-a344-aba14e13e9ae]633;C/home/hk-project-p0023960/tum_cte0515/Projects/tmp/jafar/sample.py\r\n]0;tum_cte0515@hkn1991:~/Projects/tmp/jafar]633;D;0",,terminal_output +782,1950514,"/home/hk-project-p0023960/tum_cte0515/Projects/tmp/jafar/genie.py",0,0,"from typing import Dict, Any\n\nfrom orbax.checkpoint import PyTreeCheckpointer\nimport jax\nimport jax.numpy as jnp\nimport flax.linen as nn\n\nfrom models.dynamics import DynamicsMaskGIT\nfrom models.lam import LatentActionModel\nfrom models.tokenizer import TokenizerVQVAE\n\n\nclass Genie(nn.Module):\n """"""Genie model""""""\n\n # --- Tokenizer ---\n in_dim: int\n tokenizer_dim: int\n latent_patch_dim: int\n num_patch_latents: int\n patch_size: int\n tokenizer_num_blocks: int\n tokenizer_num_heads: int\n # --- LAM ---\n lam_dim: int\n latent_action_dim: int\n num_latent_actions: int\n lam_patch_size: int\n lam_num_blocks: int\n lam_num_heads: int\n # --- Dynamics ---\n dyna_dim: int\n dyna_num_blocks: int\n dyna_num_heads: int\n dropout: float = 0.0\n mask_limit: float = 0.0\n\n def setup(self):\n self.tokenizer = TokenizerVQVAE(\n in_dim=self.in_dim,\n model_dim=self.tokenizer_dim,\n latent_dim=self.latent_patch_dim,\n num_latents=self.num_patch_latents,\n patch_size=self.patch_size,\n num_blocks=self.tokenizer_num_blocks,\n num_heads=self.tokenizer_num_heads,\n dropout=0.0,\n codebook_dropout=0.0,\n )\n self.lam = LatentActionModel(\n in_dim=self.in_dim,\n model_dim=self.lam_dim,\n latent_dim=self.latent_patch_dim,\n num_latents=self.num_latent_actions,\n patch_size=self.lam_patch_size,\n num_blocks=self.lam_num_blocks,\n num_heads=self.lam_num_heads,\n dropout=0.0,\n codebook_dropout=0.0,\n )\n self.dynamics = DynamicsMaskGIT(\n model_dim=self.dyna_dim,\n num_latents=self.num_patch_latents,\n num_blocks=self.dyna_num_blocks,\n num_heads=self.dyna_num_heads,\n dropout=self.dropout,\n mask_limit=self.mask_limit,\n )\n\n def __call__(self, batch: Dict[str, Any], training: bool = True) -> Dict[str, Any]:\n tokenizer_outputs = self.tokenizer.vq_encode(batch[""videos""], training=False)\n lam_outputs = self.lam.vq_encode(batch[""videos""], training=False)\n outputs = dict(\n video_tokens=jax.lax.stop_gradient(tokenizer_outputs[""indices""]),\n latent_actions=jax.lax.stop_gradient(lam_outputs[""z_q""]),\n )\n outputs[""mask_rng""] = batch[""mask_rng""]\n dyna_outputs = self.dynamics(outputs, training)\n outputs.update(dyna_outputs)\n mle_indices = jnp.argmax(outputs[""token_logits""], axis=-1)\n outputs[""recon""] = self.tokenizer.decode(\n mle_indices, batch[""videos""].shape[2:4]\n )\n return outputs\n\n @nn.compact\n def sample(\n self,\n batch: Dict[str, Any],\n steps: int = 25,\n temperature: int = 1,\n sample_argmax: bool = False,\n ) -> Any:\n # --- Encode videos and actions ---\n tokenizer_out = self.tokenizer.vq_encode(batch[""videos""], training=False)\n token_idxs = tokenizer_out[""indices""]\n new_frame_idxs = jnp.zeros_like(token_idxs)[:, 0]\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n\n # --- Initialize MaskGIT ---\n init_mask = jnp.ones_like(token_idxs, dtype=bool)[:, 0]\n init_carry = (\n batch[""rng""],\n new_frame_idxs,\n init_mask,\n token_idxs,\n action_tokens,\n )\n MaskGITLoop = nn.scan(\n MaskGITStep,\n variable_broadcast=""params"",\n split_rngs={""params"": False},\n in_axes=0,\n out_axes=0,\n length=steps,\n )\n\n # --- Run MaskGIT loop ---\n loop_fn = MaskGITLoop(\n dynamics=self.dynamics,\n tokenizer=self.tokenizer,\n temperature=temperature,\n sample_argmax=sample_argmax,\n steps=steps,\n )\n final_carry, _ = loop_fn(init_carry, jnp.arange(steps))\n new_frame_idxs = final_carry[1]\n new_frame_pixels = self.tokenizer.decode(\n jnp.expand_dims(new_frame_idxs, 1),\n video_hw=batch[""videos""].shape[2:4],\n )\n return new_frame_pixels\n\n def vq_encode(self, batch, training) -> Dict[str, Any]:\n # --- Preprocess videos ---\n lam_output = self.lam.vq_encode(batch[""videos""], training=training)\n return lam_output[""indices""]\n\n\nclass MaskGITStep(nn.Module):\n dynamics: nn.Module\n tokenizer: nn.Module\n temperature: float\n sample_argmax: bool\n steps: int\n\n @nn.compact\n def __call__(self, carry, x):\n rng, final_token_idxs, mask, token_idxs, action_tokens = carry\n step = x\n B, T, N = token_idxs.shape[:3]\n\n # --- Construct + encode video ---\n vid_token_idxs = jnp.concatenate(\n (token_idxs, jnp.expand_dims(final_token_idxs, 1)), axis=1\n )\n vid_embed = self.dynamics.patch_embed(vid_token_idxs)\n curr_masked_frame = jnp.where(\n jnp.expand_dims(mask, -1),\n self.dynamics.mask_token[0],\n vid_embed[:, -1],\n )\n vid_embed = vid_embed.at[:, -1].set(curr_masked_frame)\n\n # --- Predict transition ---\n act_embed = self.dynamics.action_up(action_tokens)\n vid_embed += jnp.pad(act_embed, ((0, 0), (1, 0), (0, 0), (0, 0)))\n unmasked_ratio = jnp.cos(jnp.pi * (step + 1) / (self.steps * 2))\n step_temp = self.temperature * (1.0 - unmasked_ratio)\n final_logits = self.dynamics.dynamics(vid_embed)[:, -1] / step_temp\n\n # --- Sample new tokens for final frame ---\n if self.sample_argmax:\n sampled_token_idxs = jnp.argmax(final_logits, axis=-1)\n else:\n rng, _rng = jax.random.split(rng)\n sampled_token_idxs = jnp.where(\n step == self.steps - 1,\n jnp.argmax(final_logits, axis=-1),\n jax.random.categorical(_rng, final_logits),\n )\n gather_fn = jax.vmap(jax.vmap(lambda x, y: x[y]))\n final_token_probs = gather_fn(jax.nn.softmax(final_logits), sampled_token_idxs)\n final_token_probs += ~mask\n # Update masked tokens only\n new_token_idxs = jnp.where(mask, sampled_token_idxs, final_token_idxs)\n\n # --- Update mask ---\n num_unmasked_tokens = jnp.round(N * (1.0 - unmasked_ratio)).astype(int)\n idx_mask = jnp.arange(final_token_probs.shape[-1]) > num_unmasked_tokens\n sorted_idxs = jnp.argsort(final_token_probs, axis=-1, descending=True)\n mask_update_fn = jax.vmap(lambda msk, ids: msk.at[ids].set(idx_mask))\n new_mask = mask_update_fn(mask, sorted_idxs)\n\n new_carry = (rng, new_token_idxs, new_mask, token_idxs, action_tokens)\n return new_carry, None\n\n\ndef restore_genie_components(params: Dict[str, Any], tokenizer: str, lam: str):\n """"""Restore pre-trained Genie components""""""\n params[""params""][""tokenizer""].update(\n PyTreeCheckpointer().restore(tokenizer)[""model""][""params""][""params""]\n )\n params[""params""][""lam""].update(\n PyTreeCheckpointer().restore(lam)[""model""][""params""][""params""]\n )\n return params\n",python,tab +783,1953879,"/home/hk-project-p0023960/tum_cte0515/Projects/tmp/jafar/sample.py",0,0,"from dataclasses import dataclass\nimport time\n\nimport dm_pix as pix\nimport einops\nimport jax\nimport jax.numpy as jnp\nimport numpy as np\nfrom orbax.checkpoint import PyTreeCheckpointer\nfrom PIL import Image, ImageDraw\nimport tyro\n\nfrom genie import Genie\nfrom utils.dataloader import get_dataloader\n\n\n@dataclass\nclass Args:\n # Experiment\n seed: int = 0\n seq_len: int = 16\n image_channels: int = 3\n image_resolution: int = 64\n data_dir: str = ""data/coinrun_episodes""\n checkpoint: str = """"\n # Sampling\n batch_size: int = 1\n maskgit_steps: int = 25\n temperature: float = 1.0\n sample_argmax: bool = True\n start_frame: int = 0\n # Tokenizer checkpoint\n tokenizer_dim: int = 512\n latent_patch_dim: int = 32\n num_patch_latents: int = 1024\n patch_size: int = 4\n tokenizer_num_blocks: int = 8\n tokenizer_num_heads: int = 8\n # LAM checkpoint\n lam_dim: int = 512\n latent_action_dim: int = 32\n num_latent_actions: int = 6\n lam_patch_size: int = 16\n lam_num_blocks: int = 8\n lam_num_heads: int = 8\n # Dynamics checkpoint\n dyna_dim: int = 512\n dyna_num_blocks: int = 12\n dyna_num_heads: int = 8\n\n\nargs = tyro.cli(Args)\nrng = jax.random.PRNGKey(args.seed)\n\n# --- Load Genie checkpoint ---\ngenie = Genie(\n # Tokenizer\n in_dim=args.image_channels,\n tokenizer_dim=args.tokenizer_dim,\n latent_patch_dim=args.latent_patch_dim,\n num_patch_latents=args.num_patch_latents,\n patch_size=args.patch_size,\n tokenizer_num_blocks=args.tokenizer_num_blocks,\n tokenizer_num_heads=args.tokenizer_num_heads,\n # LAM\n lam_dim=args.lam_dim,\n latent_action_dim=args.latent_action_dim,\n num_latent_actions=args.num_latent_actions,\n lam_patch_size=args.lam_patch_size,\n lam_num_blocks=args.lam_num_blocks,\n lam_num_heads=args.lam_num_heads,\n # Dynamics\n dyna_dim=args.dyna_dim,\n dyna_num_blocks=args.dyna_num_blocks,\n dyna_num_heads=args.dyna_num_heads,\n)\nrng, _rng = jax.random.split(rng)\nimage_shape = (args.image_resolution, args.image_resolution, args.image_channels)\ndummy_inputs = dict(\n videos=jnp.zeros((args.batch_size, args.seq_len, *image_shape), dtype=jnp.float32),\n mask_rng=_rng,\n)\nrng, _rng = jax.random.split(rng)\nparams = genie.init(_rng, dummy_inputs)\nckpt = PyTreeCheckpointer().restore(args.checkpoint)[""model""][""params""][""params""]\nparams[""params""].update(ckpt)\n\n# --- Define autoregressive sampling loop ---\ndef _autoreg_sample(rng, video_batch, action_batch):\n vid = video_batch[:, : args.start_frame + 1]\n for frame_idx in range(args.start_frame + 1, args.seq_len):\n # --- Sample next frame ---\n print(""Frame"", frame_idx)\n rng, _rng = jax.random.split(rng)\n batch = dict(videos=vid, latent_actions=action_batch[:, :frame_idx], rng=_rng)\n new_frame = genie.apply(\n params,\n batch,\n args.maskgit_steps,\n args.temperature,\n args.sample_argmax,\n method=Genie.sample,\n )\n vid = jnp.concatenate([vid, new_frame], axis=1)\n return vid\n\n\n# --- Get video + latent actions ---\ndataloader = get_dataloader(args.data_dir, args.seq_len, args.batch_size)\nvideo_batch = next(iter(dataloader))\n# Get latent actions from first video only\nfirst_video = video_batch[:1]\nbatch = dict(videos=first_video)\naction_batch = genie.apply(params, batch, False, method=Genie.vq_encode)\naction_batch = action_batch.reshape(1, args.seq_len - 1, 1)\n# Use actions from first video for all videos\naction_batch = jnp.repeat(action_batch, video_batch.shape[0], axis=0)\n\n# --- Sample + evaluate video ---\nvid = _autoreg_sample(rng, video_batch, action_batch)\ngt = video_batch[:, : vid.shape[1]].clip(0, 1).reshape(-1, *video_batch.shape[2:])\nrecon = vid.clip(0, 1).reshape(-1, *vid.shape[2:])\nssim = pix.ssim(gt[:, args.start_frame + 1 :], recon[:, args.start_frame + 1 :]).mean()\nprint(f""SSIM: {ssim}"")\n\n# --- Construct video ---\nfirst_true = (video_batch[0:1] * 255).astype(np.uint8)\nfirst_pred = (vid[0:1] * 255).astype(np.uint8)\nfirst_video_comparison = np.zeros((2, *vid.shape[1:5]), dtype=np.uint8)\nfirst_video_comparison[0] = first_true[:, : vid.shape[1]]\nfirst_video_comparison[1] = first_pred\n# For other videos, only show generated video\nother_preds = (vid[1:] * 255).astype(np.uint8)\nall_frames = np.concatenate([first_video_comparison, other_preds], axis=0)\nflat_vid = einops.rearrange(all_frames, ""n t h w c -> t h (n w) c"")\n\n# --- Save video ---\nimgs = [Image.fromarray(img) for img in flat_vid]\n# Write actions on each frame\nfor img, action in zip(imgs[1:], action_batch[0, :, 0]):\n d = ImageDraw.Draw(img)\n d.text((2, 2), f""{action}"", fill=255)\nimgs[0].save(\n f""generation_{time.time()}.gif"",\n save_all=True,\n append_images=imgs[1:],\n duration=250,\n loop=0,\n)\n",python,tab +784,1955694,"/home/hk-project-p0023960/tum_cte0515/Projects/tmp/jafar/genie.py",0,0,"",python,tab +785,1957365,"/home/hk-project-p0023960/tum_cte0515/Projects/tmp/jafar/genie.py",0,0,"",python,tab +786,1958460,"/home/hk-project-p0023960/tum_cte0515/Projects/tmp/jafar/sample.py",0,0,"",python,tab +787,1959274,"/home/hk-project-p0023960/tum_cte0515/Projects/tmp/jafar/sample.py",0,0,"",python,tab +788,1960926,"sample.py",0,0,"",python,tab +789,1965095,"sample.py",150,0,"",python,selection_mouse +790,1965097,"sample.py",149,0,"",python,selection_command +791,1967862,"/home/hk-project-p0023960/tum_cte0515/Projects/tmp/jafar/sample.py",0,0,"",python,tab +792,1967863,"/home/hk-project-p0023960/tum_cte0515/Projects/tmp/jafar/sample.py",124,0,"",python,selection_mouse +793,1971389,"/home/hk-project-p0023960/tum_cte0515/Projects/tmp/jafar/sample.py",100,0,"",python,selection_command +794,1972554,"/home/hk-project-p0023960/tum_cte0515/Projects/tmp/jafar/sample.py",116,0,"\nimport flax.linen as nn",python,content +795,1972586,"/home/hk-project-p0023960/tum_cte0515/Projects/tmp/jafar/sample.py",117,0,"",python,selection_command +796,2002476,"sample.py",0,0,"",python,tab +797,2002477,"sample.py",3000,0,"",python,selection_mouse +798,2002485,"sample.py",2999,0,"",python,selection_command +799,2002670,"sample.py",2999,1,"d",python,selection_mouse +800,2002671,"sample.py",2975,24,"\n return generated_vi",python,selection_mouse +801,2002671,"sample.py",2954,45,",\n batch\n )\n return generated_vi",python,selection_mouse +802,2002672,"sample.py",2848,151," = dict(videos=vid, latent_actions=action_batch, rng=_rng)\n generated_vid = sampling_fn(\n params,\n batch\n )\n return generated_vi",python,selection_mouse +803,2002672,"sample.py",2742,257,"sampling_fn = jax.jit(nn.apply(_sampling_wrapper, genie)) \n rng, _rng = jax.random.split(rng)\n batch = dict(videos=vid, latent_actions=action_batch, rng=_rng)\n generated_vid = sampling_fn(\n params,\n batch\n )\n return generated_vi",python,selection_mouse +804,2002716,"sample.py",3000,0,"",python,selection_command +805,2002716,"sample.py",2637,363,"ef _autoreg_sample(rng, video_batch, action_batch):\n vid = video_batch[:, : args.start_frame + 1]\n sampling_fn = jax.jit(nn.apply(_sampling_wrapper, genie)) \n rng, _rng = jax.random.split(rng)\n batch = dict(videos=vid, latent_actions=action_batch, rng=_rng)\n generated_vid = sampling_fn(\n params,\n batch\n )\n return generated_vid",python,selection_mouse +806,2002746,"sample.py",2590,410,"# --- Define autoregressive sampling loop ---\ndef _autoreg_sample(rng, video_batch, action_batch):\n vid = video_batch[:, : args.start_frame + 1]\n sampling_fn = jax.jit(nn.apply(_sampling_wrapper, genie)) \n rng, _rng = jax.random.split(rng)\n batch = dict(videos=vid, latent_actions=action_batch, rng=_rng)\n generated_vid = sampling_fn(\n params,\n batch\n )\n return generated_vid",python,selection_mouse +807,2002786,"sample.py",2589,411,"\n# --- Define autoregressive sampling loop ---\ndef _autoreg_sample(rng, video_batch, action_batch):\n vid = video_batch[:, : args.start_frame + 1]\n sampling_fn = jax.jit(nn.apply(_sampling_wrapper, genie)) \n rng, _rng = jax.random.split(rng)\n batch = dict(videos=vid, latent_actions=action_batch, rng=_rng)\n generated_vid = sampling_fn(\n params,\n batch\n )\n return generated_vid",python,selection_mouse +808,2002901,"sample.py",2485,515," return module.sample(batch, args.seq_len, args.maskgit_steps, args.temperature, args.sample_argmax)\n\n# --- Define autoregressive sampling loop ---\ndef _autoreg_sample(rng, video_batch, action_batch):\n vid = video_batch[:, : args.start_frame + 1]\n sampling_fn = jax.jit(nn.apply(_sampling_wrapper, genie)) \n rng, _rng = jax.random.split(rng)\n batch = dict(videos=vid, latent_actions=action_batch, rng=_rng)\n generated_vid = sampling_fn(\n params,\n batch\n )\n return generated_vid",python,selection_mouse +809,2002954,"sample.py",2447,553,"def _sampling_wrapper(module, batch):\n return module.sample(batch, args.seq_len, args.maskgit_steps, args.temperature, args.sample_argmax)\n\n# --- Define autoregressive sampling loop ---\ndef _autoreg_sample(rng, video_batch, action_batch):\n vid = video_batch[:, : args.start_frame + 1]\n sampling_fn = jax.jit(nn.apply(_sampling_wrapper, genie)) \n rng, _rng = jax.random.split(rng)\n batch = dict(videos=vid, latent_actions=action_batch, rng=_rng)\n generated_vid = sampling_fn(\n params,\n batch\n )\n return generated_vid",python,selection_mouse +810,2003033,"sample.py",2446,554,"\ndef _sampling_wrapper(module, batch):\n return module.sample(batch, args.seq_len, args.maskgit_steps, args.temperature, args.sample_argmax)\n\n# --- Define autoregressive sampling loop ---\ndef _autoreg_sample(rng, video_batch, action_batch):\n vid = video_batch[:, : args.start_frame + 1]\n sampling_fn = jax.jit(nn.apply(_sampling_wrapper, genie)) \n rng, _rng = jax.random.split(rng)\n batch = dict(videos=vid, latent_actions=action_batch, rng=_rng)\n generated_vid = sampling_fn(\n params,\n batch\n )\n return generated_vid",python,selection_mouse +811,2003557,"sample.py",2447,553,"def _sampling_wrapper(module, batch):\n return module.sample(batch, args.seq_len, args.maskgit_steps, args.temperature, args.sample_argmax)\n\n# --- Define autoregressive sampling loop ---\ndef _autoreg_sample(rng, video_batch, action_batch):\n vid = video_batch[:, : args.start_frame + 1]\n sampling_fn = jax.jit(nn.apply(_sampling_wrapper, genie)) \n rng, _rng = jax.random.split(rng)\n batch = dict(videos=vid, latent_actions=action_batch, rng=_rng)\n generated_vid = sampling_fn(\n params,\n batch\n )\n return generated_vid",python,selection_mouse +812,2007864,"/home/hk-project-p0023960/tum_cte0515/Projects/tmp/jafar/sample.py",0,0,"",python,tab +813,2007865,"/home/hk-project-p0023960/tum_cte0515/Projects/tmp/jafar/sample.py",3112,0,"",python,selection_mouse +814,2007875,"/home/hk-project-p0023960/tum_cte0515/Projects/tmp/jafar/sample.py",3111,0,"",python,selection_command +815,2008077,"/home/hk-project-p0023960/tum_cte0515/Projects/tmp/jafar/sample.py",3111,1,"d",python,selection_mouse +816,2008077,"/home/hk-project-p0023960/tum_cte0515/Projects/tmp/jafar/sample.py",3061,50,"oncatenate([vid, new_frame], axis=1)\n return vi",python,selection_mouse +817,2008078,"/home/hk-project-p0023960/tum_cte0515/Projects/tmp/jafar/sample.py",3041,70,"\n vid = jnp.concatenate([vid, new_frame], axis=1)\n return vi",python,selection_mouse +818,2008078,"/home/hk-project-p0023960/tum_cte0515/Projects/tmp/jafar/sample.py",2912,199," args.maskgit_steps,\n args.temperature,\n args.sample_argmax,\n method=Genie.sample,\n )\n vid = jnp.concatenate([vid, new_frame], axis=1)\n return vi",python,selection_mouse +819,2008078,"/home/hk-project-p0023960/tum_cte0515/Projects/tmp/jafar/sample.py",2868,243," params,\n batch,\n args.maskgit_steps,\n args.temperature,\n args.sample_argmax,\n method=Genie.sample,\n )\n vid = jnp.concatenate([vid, new_frame], axis=1)\n return vi",python,selection_mouse +820,2008118,"/home/hk-project-p0023960/tum_cte0515/Projects/tmp/jafar/sample.py",3112,0,"",python,selection_command +821,2008119,"/home/hk-project-p0023960/tum_cte0515/Projects/tmp/jafar/sample.py",2746,366," batch = dict(videos=vid, latent_actions=action_batch[:, :frame_idx], rng=_rng)\n new_frame = genie.apply(\n params,\n batch,\n args.maskgit_steps,\n args.temperature,\n args.sample_argmax,\n method=Genie.sample,\n )\n vid = jnp.concatenate([vid, new_frame], axis=1)\n return vid",python,selection_mouse +822,2008172,"/home/hk-project-p0023960/tum_cte0515/Projects/tmp/jafar/sample.py",2704,408," rng, _rng = jax.random.split(rng)\n batch = dict(videos=vid, latent_actions=action_batch[:, :frame_idx], rng=_rng)\n new_frame = genie.apply(\n params,\n batch,\n args.maskgit_steps,\n args.temperature,\n args.sample_argmax,\n method=Genie.sample,\n )\n vid = jnp.concatenate([vid, new_frame], axis=1)\n return vid",python,selection_mouse +823,2008204,"/home/hk-project-p0023960/tum_cte0515/Projects/tmp/jafar/sample.py",2670,442," print(""Frame"", frame_idx)\n rng, _rng = jax.random.split(rng)\n batch = dict(videos=vid, latent_actions=action_batch[:, :frame_idx], rng=_rng)\n new_frame = genie.apply(\n params,\n batch,\n args.maskgit_steps,\n args.temperature,\n args.sample_argmax,\n method=Genie.sample,\n )\n vid = jnp.concatenate([vid, new_frame], axis=1)\n return vid",python,selection_mouse +824,2008331,"/home/hk-project-p0023960/tum_cte0515/Projects/tmp/jafar/sample.py",2634,478," # --- Sample next frame ---\n print(""Frame"", frame_idx)\n rng, _rng = jax.random.split(rng)\n batch = dict(videos=vid, latent_actions=action_batch[:, :frame_idx], rng=_rng)\n new_frame = genie.apply(\n params,\n batch,\n args.maskgit_steps,\n args.temperature,\n args.sample_argmax,\n method=Genie.sample,\n )\n vid = jnp.concatenate([vid, new_frame], axis=1)\n return vid",python,selection_mouse +825,2008653,"/home/hk-project-p0023960/tum_cte0515/Projects/tmp/jafar/sample.py",2570,542," for frame_idx in range(args.start_frame + 1, args.seq_len):\n # --- Sample next frame ---\n print(""Frame"", frame_idx)\n rng, _rng = jax.random.split(rng)\n batch = dict(videos=vid, latent_actions=action_batch[:, :frame_idx], rng=_rng)\n new_frame = genie.apply(\n params,\n batch,\n args.maskgit_steps,\n args.temperature,\n args.sample_argmax,\n method=Genie.sample,\n )\n vid = jnp.concatenate([vid, new_frame], axis=1)\n return vid",python,selection_mouse +826,2009461,"/home/hk-project-p0023960/tum_cte0515/Projects/tmp/jafar/sample.py",2521,591," vid = video_batch[:, : args.start_frame + 1]\n for frame_idx in range(args.start_frame + 1, args.seq_len):\n # --- Sample next frame ---\n print(""Frame"", frame_idx)\n rng, _rng = jax.random.split(rng)\n batch = dict(videos=vid, latent_actions=action_batch[:, :frame_idx], rng=_rng)\n new_frame = genie.apply(\n params,\n batch,\n args.maskgit_steps,\n args.temperature,\n args.sample_argmax,\n method=Genie.sample,\n )\n vid = jnp.concatenate([vid, new_frame], axis=1)\n return vid",python,selection_mouse +827,2009527,"/home/hk-project-p0023960/tum_cte0515/Projects/tmp/jafar/sample.py",2468,644,"def _autoreg_sample(rng, video_batch, action_batch):\n vid = video_batch[:, : args.start_frame + 1]\n for frame_idx in range(args.start_frame + 1, args.seq_len):\n # --- Sample next frame ---\n print(""Frame"", frame_idx)\n rng, _rng = jax.random.split(rng)\n batch = dict(videos=vid, latent_actions=action_batch[:, :frame_idx], rng=_rng)\n new_frame = genie.apply(\n params,\n batch,\n args.maskgit_steps,\n args.temperature,\n args.sample_argmax,\n method=Genie.sample,\n )\n vid = jnp.concatenate([vid, new_frame], axis=1)\n return vid",python,selection_mouse +828,2009621,"/home/hk-project-p0023960/tum_cte0515/Projects/tmp/jafar/sample.py",2422,690,"# --- Define autoregressive sampling loop ---\ndef _autoreg_sample(rng, video_batch, action_batch):\n vid = video_batch[:, : args.start_frame + 1]\n for frame_idx in range(args.start_frame + 1, args.seq_len):\n # --- Sample next frame ---\n print(""Frame"", frame_idx)\n rng, _rng = jax.random.split(rng)\n batch = dict(videos=vid, latent_actions=action_batch[:, :frame_idx], rng=_rng)\n new_frame = genie.apply(\n params,\n batch,\n args.maskgit_steps,\n args.temperature,\n args.sample_argmax,\n method=Genie.sample,\n )\n vid = jnp.concatenate([vid, new_frame], axis=1)\n return vid",python,selection_mouse +829,2010645,"/home/hk-project-p0023960/tum_cte0515/Projects/tmp/jafar/sample.py",2422,690,"",python,content +830,2011130,"/home/hk-project-p0023960/tum_cte0515/Projects/tmp/jafar/sample.py",2422,0,"def _sampling_wrapper(module, batch):\n return module.sample(batch, args.seq_len, args.maskgit_steps, args.temperature, args.sample_argmax)\n\n# --- Define autoregressive sampling loop ---\ndef _autoreg_sample(rng, video_batch, action_batch):\n vid = video_batch[:, : args.start_frame + 1]\n sampling_fn = jax.jit(nn.apply(_sampling_wrapper, genie)) \n rng, _rng = jax.random.split(rng)\n batch = dict(videos=vid, latent_actions=action_batch, rng=_rng)\n generated_vid = sampling_fn(\n params,\n batch\n )\n return generated_vid",python,content +831,2013275,"/home/hk-project-p0023960/tum_cte0515/Projects/tmp/jafar/sample.py",2583,0,"",python,selection_mouse +832,2014493,"/home/hk-project-p0023960/tum_cte0515/Projects/tmp/jafar/sample.py",2582,0,"",python,selection_command +833,2015518,"/home/hk-project-p0023960/tum_cte0515/Projects/tmp/jafar/sample.py",2421,0,"",python,selection_mouse +834,2016566,"/home/hk-project-p0023960/tum_cte0515/Projects/tmp/jafar/sample.py",2421,0,"\n",python,content +835,2020617,"sample.py",0,0,"",python,tab +836,2020618,"sample.py",3000,0,"",python,selection_mouse +837,2020685,"sample.py",2999,0,"",python,selection_command +838,2028272,"sample.py",3533,0,"",python,selection_mouse +839,2028459,"sample.py",3531,11,"first_video",python,selection_mouse +840,2029423,"sample.py",3469,0,"",python,selection_mouse +841,2029519,"sample.py",3466,11,"first_video",python,selection_mouse +842,2030767,"sample.py",3551,0,"",python,selection_mouse +843,2030896,"sample.py",3544,12,"action_batch",python,selection_mouse +844,2034722,"sample.py",3849,0,"",python,selection_mouse +845,2036375,"sample.py",3874,0,"",python,selection_mouse +846,2037372,"sample.py",3856,0,"",python,selection_mouse +847,2037971,"sample.py",3852,0,"",python,selection_mouse +848,2038103,"sample.py",3850,3,"rng",python,selection_mouse +849,2081293,"genie.py",0,0,"",python,tab +850,2082786,"/home/hk-project-p0023960/tum_cte0515/Projects/tmp/jafar/genie.py",0,0,"",python,tab +851,2089846,"genie.py",0,0,"",python,tab +852,2089848,"genie.py",2864,0,"",python,selection_mouse +853,2091440,"genie.py",2856,15," @nn.compact",python,selection_command +854,2092071,"genie.py",2856,31," @nn.compact\n def sample(",python,selection_command +855,2092371,"genie.py",2856,45," @nn.compact\n def sample(\n self,",python,selection_command +856,2092850,"genie.py",2856,76," @nn.compact\n def sample(\n self,\n batch: Dict[str, Any],",python,selection_command +857,2092899,"genie.py",2856,98," @nn.compact\n def sample(\n self,\n batch: Dict[str, Any],\n seq_len: int,",python,selection_command +858,2092944,"genie.py",2856,123," @nn.compact\n def sample(\n self,\n batch: Dict[str, Any],\n seq_len: int,\n steps: int = 25,",python,selection_command +859,2092955,"genie.py",2856,155," @nn.compact\n def sample(\n self,\n batch: Dict[str, Any],\n seq_len: int,\n steps: int = 25,\n temperature: float = 1,",python,selection_command +860,2093003,"genie.py",2856,192," @nn.compact\n def sample(\n self,\n batch: Dict[str, Any],\n seq_len: int,\n steps: int = 25,\n temperature: float = 1,\n sample_argmax: bool = False,",python,selection_command +861,2093044,"genie.py",2856,206," @nn.compact\n def sample(\n self,\n batch: Dict[str, Any],\n seq_len: int,\n steps: int = 25,\n temperature: float = 1,\n sample_argmax: bool = False,\n ) -> Any:",python,selection_command +862,2093099,"genie.py",2856,218," @nn.compact\n def sample(\n self,\n batch: Dict[str, Any],\n seq_len: int,\n steps: int = 25,\n temperature: float = 1,\n sample_argmax: bool = False,\n ) -> Any:\n """"""",python,selection_command +863,2093100,"genie.py",2856,315," @nn.compact\n def sample(\n self,\n batch: Dict[str, Any],\n seq_len: int,\n steps: int = 25,\n temperature: float = 1,\n sample_argmax: bool = False,\n ) -> Any:\n """"""\n Autoregressively samples up to `seq_len` future frames, following Figure 8 of the paper.",python,selection_command +864,2093183,"genie.py",2856,316," @nn.compact\n def sample(\n self,\n batch: Dict[str, Any],\n seq_len: int,\n steps: int = 25,\n temperature: float = 1,\n sample_argmax: bool = False,\n ) -> Any:\n """"""\n Autoregressively samples up to `seq_len` future frames, following Figure 8 of the paper.\n",python,selection_command +865,2093184,"genie.py",2856,359," @nn.compact\n def sample(\n self,\n batch: Dict[str, Any],\n seq_len: int,\n steps: int = 25,\n temperature: float = 1,\n sample_argmax: bool = False,\n ) -> Any:\n """"""\n Autoregressively samples up to `seq_len` future frames, following Figure 8 of the paper.\n\n - Input frames are tokenized once.",python,selection_command +866,2093240,"genie.py",2856,430," @nn.compact\n def sample(\n self,\n batch: Dict[str, Any],\n seq_len: int,\n steps: int = 25,\n temperature: float = 1,\n sample_argmax: bool = False,\n ) -> Any:\n """"""\n Autoregressively samples up to `seq_len` future frames, following Figure 8 of the paper.\n\n - Input frames are tokenized once.\n - Future frames are generated autoregressively in token space.",python,selection_command +867,2093241,"genie.py",2856,485," @nn.compact\n def sample(\n self,\n batch: Dict[str, Any],\n seq_len: int,\n steps: int = 25,\n temperature: float = 1,\n sample_argmax: bool = False,\n ) -> Any:\n """"""\n Autoregressively samples up to `seq_len` future frames, following Figure 8 of the paper.\n\n - Input frames are tokenized once.\n - Future frames are generated autoregressively in token space.\n - All frames are detokenized in a single pass.",python,selection_command +868,2093277,"genie.py",2856,486," @nn.compact\n def sample(\n self,\n batch: Dict[str, Any],\n seq_len: int,\n steps: int = 25,\n temperature: float = 1,\n sample_argmax: bool = False,\n ) -> Any:\n """"""\n Autoregressively samples up to `seq_len` future frames, following Figure 8 of the paper.\n\n - Input frames are tokenized once.\n - Future frames are generated autoregressively in token space.\n - All frames are detokenized in a single pass.\n",python,selection_command +869,2093277,"genie.py",2856,500," @nn.compact\n def sample(\n self,\n batch: Dict[str, Any],\n seq_len: int,\n steps: int = 25,\n temperature: float = 1,\n sample_argmax: bool = False,\n ) -> Any:\n """"""\n Autoregressively samples up to `seq_len` future frames, following Figure 8 of the paper.\n\n - Input frames are tokenized once.\n - Future frames are generated autoregressively in token space.\n - All frames are detokenized in a single pass.\n\n Note:",python,selection_command +870,2093367,"genie.py",2856,596," @nn.compact\n def sample(\n self,\n batch: Dict[str, Any],\n seq_len: int,\n steps: int = 25,\n temperature: float = 1,\n sample_argmax: bool = False,\n ) -> Any:\n """"""\n Autoregressively samples up to `seq_len` future frames, following Figure 8 of the paper.\n\n - Input frames are tokenized once.\n - Future frames are generated autoregressively in token space.\n - All frames are detokenized in a single pass.\n\n Note:\n - For interactive or step-wise sampling, detokenization should occur after each action.",python,selection_command +871,2093368,"genie.py",2856,718," @nn.compact\n def sample(\n self,\n batch: Dict[str, Any],\n seq_len: int,\n steps: int = 25,\n temperature: float = 1,\n sample_argmax: bool = False,\n ) -> Any:\n """"""\n Autoregressively samples up to `seq_len` future frames, following Figure 8 of the paper.\n\n - Input frames are tokenized once.\n - Future frames are generated autoregressively in token space.\n - All frames are detokenized in a single pass.\n\n Note:\n - For interactive or step-wise sampling, detokenization should occur after each action.\n - To maintain consistent tensor shapes across timesteps, all current and future frames are decoded at every step.",python,selection_command +872,2093457,"genie.py",2856,771," @nn.compact\n def sample(\n self,\n batch: Dict[str, Any],\n seq_len: int,\n steps: int = 25,\n temperature: float = 1,\n sample_argmax: bool = False,\n ) -> Any:\n """"""\n Autoregressively samples up to `seq_len` future frames, following Figure 8 of the paper.\n\n - Input frames are tokenized once.\n - Future frames are generated autoregressively in token space.\n - All frames are detokenized in a single pass.\n\n Note:\n - For interactive or step-wise sampling, detokenization should occur after each action.\n - To maintain consistent tensor shapes across timesteps, all current and future frames are decoded at every step.\n - Temporal causal structure is preserved by ",python,selection_command +873,2093458,"genie.py",2856,833," @nn.compact\n def sample(\n self,\n batch: Dict[str, Any],\n seq_len: int,\n steps: int = 25,\n temperature: float = 1,\n sample_argmax: bool = False,\n ) -> Any:\n """"""\n Autoregressively samples up to `seq_len` future frames, following Figure 8 of the paper.\n\n - Input frames are tokenized once.\n - Future frames are generated autoregressively in token space.\n - All frames are detokenized in a single pass.\n\n Note:\n - For interactive or step-wise sampling, detokenization should occur after each action.\n - To maintain consistent tensor shapes across timesteps, all current and future frames are decoded at every step.\n - Temporal causal structure is preserved by \n a) reapplying the mask before each decoding step.",python,selection_command +874,2093546,"genie.py",2856,916," @nn.compact\n def sample(\n self,\n batch: Dict[str, Any],\n seq_len: int,\n steps: int = 25,\n temperature: float = 1,\n sample_argmax: bool = False,\n ) -> Any:\n """"""\n Autoregressively samples up to `seq_len` future frames, following Figure 8 of the paper.\n\n - Input frames are tokenized once.\n - Future frames are generated autoregressively in token space.\n - All frames are detokenized in a single pass.\n\n Note:\n - For interactive or step-wise sampling, detokenization should occur after each action.\n - To maintain consistent tensor shapes across timesteps, all current and future frames are decoded at every step.\n - Temporal causal structure is preserved by \n a) reapplying the mask before each decoding step.\n b) a temporal causal mask is applied within each ST-transformer block.",python,selection_command +875,2093546,"genie.py",2856,917," @nn.compact\n def sample(\n self,\n batch: Dict[str, Any],\n seq_len: int,\n steps: int = 25,\n temperature: float = 1,\n sample_argmax: bool = False,\n ) -> Any:\n """"""\n Autoregressively samples up to `seq_len` future frames, following Figure 8 of the paper.\n\n - Input frames are tokenized once.\n - Future frames are generated autoregressively in token space.\n - All frames are detokenized in a single pass.\n\n Note:\n - For interactive or step-wise sampling, detokenization should occur after each action.\n - To maintain consistent tensor shapes across timesteps, all current and future frames are decoded at every step.\n - Temporal causal structure is preserved by \n a) reapplying the mask before each decoding step.\n b) a temporal causal mask is applied within each ST-transformer block.\n",python,selection_command +876,2093547,"genie.py",2856,941," @nn.compact\n def sample(\n self,\n batch: Dict[str, Any],\n seq_len: int,\n steps: int = 25,\n temperature: float = 1,\n sample_argmax: bool = False,\n ) -> Any:\n """"""\n Autoregressively samples up to `seq_len` future frames, following Figure 8 of the paper.\n\n - Input frames are tokenized once.\n - Future frames are generated autoregressively in token space.\n - All frames are detokenized in a single pass.\n\n Note:\n - For interactive or step-wise sampling, detokenization should occur after each action.\n - To maintain consistent tensor shapes across timesteps, all current and future frames are decoded at every step.\n - Temporal causal structure is preserved by \n a) reapplying the mask before each decoding step.\n b) a temporal causal mask is applied within each ST-transformer block.\n\n Dimension keys:",python,selection_command +877,2093638,"genie.py",2856,969," @nn.compact\n def sample(\n self,\n batch: Dict[str, Any],\n seq_len: int,\n steps: int = 25,\n temperature: float = 1,\n sample_argmax: bool = False,\n ) -> Any:\n """"""\n Autoregressively samples up to `seq_len` future frames, following Figure 8 of the paper.\n\n - Input frames are tokenized once.\n - Future frames are generated autoregressively in token space.\n - All frames are detokenized in a single pass.\n\n Note:\n - For interactive or step-wise sampling, detokenization should occur after each action.\n - To maintain consistent tensor shapes across timesteps, all current and future frames are decoded at every step.\n - Temporal causal structure is preserved by \n a) reapplying the mask before each decoding step.\n b) a temporal causal mask is applied within each ST-transformer block.\n\n Dimension keys:\n B: batch size ",python,selection_command +878,2093639,"genie.py",2856,1024," @nn.compact\n def sample(\n self,\n batch: Dict[str, Any],\n seq_len: int,\n steps: int = 25,\n temperature: float = 1,\n sample_argmax: bool = False,\n ) -> Any:\n """"""\n Autoregressively samples up to `seq_len` future frames, following Figure 8 of the paper.\n\n - Input frames are tokenized once.\n - Future frames are generated autoregressively in token space.\n - All frames are detokenized in a single pass.\n\n Note:\n - For interactive or step-wise sampling, detokenization should occur after each action.\n - To maintain consistent tensor shapes across timesteps, all current and future frames are decoded at every step.\n - Temporal causal structure is preserved by \n a) reapplying the mask before each decoding step.\n b) a temporal causal mask is applied within each ST-transformer block.\n\n Dimension keys:\n B: batch size \n T: number of input (conditioning) frames ",python,selection_command +879,2093639,"genie.py",2856,1059," @nn.compact\n def sample(\n self,\n batch: Dict[str, Any],\n seq_len: int,\n steps: int = 25,\n temperature: float = 1,\n sample_argmax: bool = False,\n ) -> Any:\n """"""\n Autoregressively samples up to `seq_len` future frames, following Figure 8 of the paper.\n\n - Input frames are tokenized once.\n - Future frames are generated autoregressively in token space.\n - All frames are detokenized in a single pass.\n\n Note:\n - For interactive or step-wise sampling, detokenization should occur after each action.\n - To maintain consistent tensor shapes across timesteps, all current and future frames are decoded at every step.\n - Temporal causal structure is preserved by \n a) reapplying the mask before each decoding step.\n b) a temporal causal mask is applied within each ST-transformer block.\n\n Dimension keys:\n B: batch size \n T: number of input (conditioning) frames \n N: patches per frame ",python,selection_command +880,2093762,"genie.py",2856,1092," @nn.compact\n def sample(\n self,\n batch: Dict[str, Any],\n seq_len: int,\n steps: int = 25,\n temperature: float = 1,\n sample_argmax: bool = False,\n ) -> Any:\n """"""\n Autoregressively samples up to `seq_len` future frames, following Figure 8 of the paper.\n\n - Input frames are tokenized once.\n - Future frames are generated autoregressively in token space.\n - All frames are detokenized in a single pass.\n\n Note:\n - For interactive or step-wise sampling, detokenization should occur after each action.\n - To maintain consistent tensor shapes across timesteps, all current and future frames are decoded at every step.\n - Temporal causal structure is preserved by \n a) reapplying the mask before each decoding step.\n b) a temporal causal mask is applied within each ST-transformer block.\n\n Dimension keys:\n B: batch size \n T: number of input (conditioning) frames \n N: patches per frame \n S: sequence length ",python,selection_command +881,2093763,"genie.py",2856,1122," @nn.compact\n def sample(\n self,\n batch: Dict[str, Any],\n seq_len: int,\n steps: int = 25,\n temperature: float = 1,\n sample_argmax: bool = False,\n ) -> Any:\n """"""\n Autoregressively samples up to `seq_len` future frames, following Figure 8 of the paper.\n\n - Input frames are tokenized once.\n - Future frames are generated autoregressively in token space.\n - All frames are detokenized in a single pass.\n\n Note:\n - For interactive or step-wise sampling, detokenization should occur after each action.\n - To maintain consistent tensor shapes across timesteps, all current and future frames are decoded at every step.\n - Temporal causal structure is preserved by \n a) reapplying the mask before each decoding step.\n b) a temporal causal mask is applied within each ST-transformer block.\n\n Dimension keys:\n B: batch size \n T: number of input (conditioning) frames \n N: patches per frame \n S: sequence length \n A: action space ",python,selection_command +882,2093763,"genie.py",2856,1160," @nn.compact\n def sample(\n self,\n batch: Dict[str, Any],\n seq_len: int,\n steps: int = 25,\n temperature: float = 1,\n sample_argmax: bool = False,\n ) -> Any:\n """"""\n Autoregressively samples up to `seq_len` future frames, following Figure 8 of the paper.\n\n - Input frames are tokenized once.\n - Future frames are generated autoregressively in token space.\n - All frames are detokenized in a single pass.\n\n Note:\n - For interactive or step-wise sampling, detokenization should occur after each action.\n - To maintain consistent tensor shapes across timesteps, all current and future frames are decoded at every step.\n - Temporal causal structure is preserved by \n a) reapplying the mask before each decoding step.\n b) a temporal causal mask is applied within each ST-transformer block.\n\n Dimension keys:\n B: batch size \n T: number of input (conditioning) frames \n N: patches per frame \n S: sequence length \n A: action space \n D: model latent dimension",python,selection_command +883,2093946,"genie.py",2856,1172," @nn.compact\n def sample(\n self,\n batch: Dict[str, Any],\n seq_len: int,\n steps: int = 25,\n temperature: float = 1,\n sample_argmax: bool = False,\n ) -> Any:\n """"""\n Autoregressively samples up to `seq_len` future frames, following Figure 8 of the paper.\n\n - Input frames are tokenized once.\n - Future frames are generated autoregressively in token space.\n - All frames are detokenized in a single pass.\n\n Note:\n - For interactive or step-wise sampling, detokenization should occur after each action.\n - To maintain consistent tensor shapes across timesteps, all current and future frames are decoded at every step.\n - Temporal causal structure is preserved by \n a) reapplying the mask before each decoding step.\n b) a temporal causal mask is applied within each ST-transformer block.\n\n Dimension keys:\n B: batch size \n T: number of input (conditioning) frames \n N: patches per frame \n S: sequence length \n A: action space \n D: model latent dimension\n """"""",python,selection_command +884,2093947,"genie.py",2856,1216," @nn.compact\n def sample(\n self,\n batch: Dict[str, Any],\n seq_len: int,\n steps: int = 25,\n temperature: float = 1,\n sample_argmax: bool = False,\n ) -> Any:\n """"""\n Autoregressively samples up to `seq_len` future frames, following Figure 8 of the paper.\n\n - Input frames are tokenized once.\n - Future frames are generated autoregressively in token space.\n - All frames are detokenized in a single pass.\n\n Note:\n - For interactive or step-wise sampling, detokenization should occur after each action.\n - To maintain consistent tensor shapes across timesteps, all current and future frames are decoded at every step.\n - Temporal causal structure is preserved by \n a) reapplying the mask before each decoding step.\n b) a temporal causal mask is applied within each ST-transformer block.\n\n Dimension keys:\n B: batch size \n T: number of input (conditioning) frames \n N: patches per frame \n S: sequence length \n A: action space \n D: model latent dimension\n """"""\n # --- Encode videos and actions ---",python,selection_command +885,2093947,"genie.py",2856,1298," @nn.compact\n def sample(\n self,\n batch: Dict[str, Any],\n seq_len: int,\n steps: int = 25,\n temperature: float = 1,\n sample_argmax: bool = False,\n ) -> Any:\n """"""\n Autoregressively samples up to `seq_len` future frames, following Figure 8 of the paper.\n\n - Input frames are tokenized once.\n - Future frames are generated autoregressively in token space.\n - All frames are detokenized in a single pass.\n\n Note:\n - For interactive or step-wise sampling, detokenization should occur after each action.\n - To maintain consistent tensor shapes across timesteps, all current and future frames are decoded at every step.\n - Temporal causal structure is preserved by \n a) reapplying the mask before each decoding step.\n b) a temporal causal mask is applied within each ST-transformer block.\n\n Dimension keys:\n B: batch size \n T: number of input (conditioning) frames \n N: patches per frame \n S: sequence length \n A: action space \n D: model latent dimension\n """"""\n # --- Encode videos and actions ---\n tokenizer_out = self.tokenizer.vq_encode(batch[""videos""], training=False)",python,selection_command +886,2093948,"genie.py",2856,1356," @nn.compact\n def sample(\n self,\n batch: Dict[str, Any],\n seq_len: int,\n steps: int = 25,\n temperature: float = 1,\n sample_argmax: bool = False,\n ) -> Any:\n """"""\n Autoregressively samples up to `seq_len` future frames, following Figure 8 of the paper.\n\n - Input frames are tokenized once.\n - Future frames are generated autoregressively in token space.\n - All frames are detokenized in a single pass.\n\n Note:\n - For interactive or step-wise sampling, detokenization should occur after each action.\n - To maintain consistent tensor shapes across timesteps, all current and future frames are decoded at every step.\n - Temporal causal structure is preserved by \n a) reapplying the mask before each decoding step.\n b) a temporal causal mask is applied within each ST-transformer block.\n\n Dimension keys:\n B: batch size \n T: number of input (conditioning) frames \n N: patches per frame \n S: sequence length \n A: action space \n D: model latent dimension\n """"""\n # --- Encode videos and actions ---\n tokenizer_out = self.tokenizer.vq_encode(batch[""videos""], training=False)\n token_idxs = tokenizer_out[""indices""] # (B, T, N)",python,selection_command +887,2094293,"genie.py",2856,1391," @nn.compact\n def sample(\n self,\n batch: Dict[str, Any],\n seq_len: int,\n steps: int = 25,\n temperature: float = 1,\n sample_argmax: bool = False,\n ) -> Any:\n """"""\n Autoregressively samples up to `seq_len` future frames, following Figure 8 of the paper.\n\n - Input frames are tokenized once.\n - Future frames are generated autoregressively in token space.\n - All frames are detokenized in a single pass.\n\n Note:\n - For interactive or step-wise sampling, detokenization should occur after each action.\n - To maintain consistent tensor shapes across timesteps, all current and future frames are decoded at every step.\n - Temporal causal structure is preserved by \n a) reapplying the mask before each decoding step.\n b) a temporal causal mask is applied within each ST-transformer block.\n\n Dimension keys:\n B: batch size \n T: number of input (conditioning) frames \n N: patches per frame \n S: sequence length \n A: action space \n D: model latent dimension\n """"""\n # --- Encode videos and actions ---\n tokenizer_out = self.tokenizer.vq_encode(batch[""videos""], training=False)\n token_idxs = tokenizer_out[""indices""] # (B, T, N)\n B, T, N = token_idxs.shape",python,selection_command +888,2094294,"genie.py",2856,1431," @nn.compact\n def sample(\n self,\n batch: Dict[str, Any],\n seq_len: int,\n steps: int = 25,\n temperature: float = 1,\n sample_argmax: bool = False,\n ) -> Any:\n """"""\n Autoregressively samples up to `seq_len` future frames, following Figure 8 of the paper.\n\n - Input frames are tokenized once.\n - Future frames are generated autoregressively in token space.\n - All frames are detokenized in a single pass.\n\n Note:\n - For interactive or step-wise sampling, detokenization should occur after each action.\n - To maintain consistent tensor shapes across timesteps, all current and future frames are decoded at every step.\n - Temporal causal structure is preserved by \n a) reapplying the mask before each decoding step.\n b) a temporal causal mask is applied within each ST-transformer block.\n\n Dimension keys:\n B: batch size \n T: number of input (conditioning) frames \n N: patches per frame \n S: sequence length \n A: action space \n D: model latent dimension\n """"""\n # --- Encode videos and actions ---\n tokenizer_out = self.tokenizer.vq_encode(batch[""videos""], training=False)\n token_idxs = tokenizer_out[""indices""] # (B, T, N)\n B, T, N = token_idxs.shape\n pad_shape = (B, seq_len - T, N)",python,selection_command +889,2094294,"genie.py",2856,1490," @nn.compact\n def sample(\n self,\n batch: Dict[str, Any],\n seq_len: int,\n steps: int = 25,\n temperature: float = 1,\n sample_argmax: bool = False,\n ) -> Any:\n """"""\n Autoregressively samples up to `seq_len` future frames, following Figure 8 of the paper.\n\n - Input frames are tokenized once.\n - Future frames are generated autoregressively in token space.\n - All frames are detokenized in a single pass.\n\n Note:\n - For interactive or step-wise sampling, detokenization should occur after each action.\n - To maintain consistent tensor shapes across timesteps, all current and future frames are decoded at every step.\n - Temporal causal structure is preserved by \n a) reapplying the mask before each decoding step.\n b) a temporal causal mask is applied within each ST-transformer block.\n\n Dimension keys:\n B: batch size \n T: number of input (conditioning) frames \n N: patches per frame \n S: sequence length \n A: action space \n D: model latent dimension\n """"""\n # --- Encode videos and actions ---\n tokenizer_out = self.tokenizer.vq_encode(batch[""videos""], training=False)\n token_idxs = tokenizer_out[""indices""] # (B, T, N)\n B, T, N = token_idxs.shape\n pad_shape = (B, seq_len - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)",python,selection_command +890,2094295,"genie.py",2856,1566," @nn.compact\n def sample(\n self,\n batch: Dict[str, Any],\n seq_len: int,\n steps: int = 25,\n temperature: float = 1,\n sample_argmax: bool = False,\n ) -> Any:\n """"""\n Autoregressively samples up to `seq_len` future frames, following Figure 8 of the paper.\n\n - Input frames are tokenized once.\n - Future frames are generated autoregressively in token space.\n - All frames are detokenized in a single pass.\n\n Note:\n - For interactive or step-wise sampling, detokenization should occur after each action.\n - To maintain consistent tensor shapes across timesteps, all current and future frames are decoded at every step.\n - Temporal causal structure is preserved by \n a) reapplying the mask before each decoding step.\n b) a temporal causal mask is applied within each ST-transformer block.\n\n Dimension keys:\n B: batch size \n T: number of input (conditioning) frames \n N: patches per frame \n S: sequence length \n A: action space \n D: model latent dimension\n """"""\n # --- Encode videos and actions ---\n tokenizer_out = self.tokenizer.vq_encode(batch[""videos""], training=False)\n token_idxs = tokenizer_out[""indices""] # (B, T, N)\n B, T, N = token_idxs.shape\n pad_shape = (B, seq_len - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1) # (B, S, N)",python,selection_command +891,2094295,"genie.py",2856,1637," @nn.compact\n def sample(\n self,\n batch: Dict[str, Any],\n seq_len: int,\n steps: int = 25,\n temperature: float = 1,\n sample_argmax: bool = False,\n ) -> Any:\n """"""\n Autoregressively samples up to `seq_len` future frames, following Figure 8 of the paper.\n\n - Input frames are tokenized once.\n - Future frames are generated autoregressively in token space.\n - All frames are detokenized in a single pass.\n\n Note:\n - For interactive or step-wise sampling, detokenization should occur after each action.\n - To maintain consistent tensor shapes across timesteps, all current and future frames are decoded at every step.\n - Temporal causal structure is preserved by \n a) reapplying the mask before each decoding step.\n b) a temporal causal mask is applied within each ST-transformer block.\n\n Dimension keys:\n B: batch size \n T: number of input (conditioning) frames \n N: patches per frame \n S: sequence length \n A: action space \n D: model latent dimension\n """"""\n # --- Encode videos and actions ---\n tokenizer_out = self.tokenizer.vq_encode(batch[""videos""], training=False)\n token_idxs = tokenizer_out[""indices""] # (B, T, N)\n B, T, N = token_idxs.shape\n pad_shape = (B, seq_len - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1) # (B, S, N)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])",python,selection_command +892,2094296,"genie.py",2856,1638," @nn.compact\n def sample(\n self,\n batch: Dict[str, Any],\n seq_len: int,\n steps: int = 25,\n temperature: float = 1,\n sample_argmax: bool = False,\n ) -> Any:\n """"""\n Autoregressively samples up to `seq_len` future frames, following Figure 8 of the paper.\n\n - Input frames are tokenized once.\n - Future frames are generated autoregressively in token space.\n - All frames are detokenized in a single pass.\n\n Note:\n - For interactive or step-wise sampling, detokenization should occur after each action.\n - To maintain consistent tensor shapes across timesteps, all current and future frames are decoded at every step.\n - Temporal causal structure is preserved by \n a) reapplying the mask before each decoding step.\n b) a temporal causal mask is applied within each ST-transformer block.\n\n Dimension keys:\n B: batch size \n T: number of input (conditioning) frames \n N: patches per frame \n S: sequence length \n A: action space \n D: model latent dimension\n """"""\n # --- Encode videos and actions ---\n tokenizer_out = self.tokenizer.vq_encode(batch[""videos""], training=False)\n token_idxs = tokenizer_out[""indices""] # (B, T, N)\n B, T, N = token_idxs.shape\n pad_shape = (B, seq_len - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1) # (B, S, N)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n",python,selection_command +893,2094296,"genie.py",2856,1669," @nn.compact\n def sample(\n self,\n batch: Dict[str, Any],\n seq_len: int,\n steps: int = 25,\n temperature: float = 1,\n sample_argmax: bool = False,\n ) -> Any:\n """"""\n Autoregressively samples up to `seq_len` future frames, following Figure 8 of the paper.\n\n - Input frames are tokenized once.\n - Future frames are generated autoregressively in token space.\n - All frames are detokenized in a single pass.\n\n Note:\n - For interactive or step-wise sampling, detokenization should occur after each action.\n - To maintain consistent tensor shapes across timesteps, all current and future frames are decoded at every step.\n - Temporal causal structure is preserved by \n a) reapplying the mask before each decoding step.\n b) a temporal causal mask is applied within each ST-transformer block.\n\n Dimension keys:\n B: batch size \n T: number of input (conditioning) frames \n N: patches per frame \n S: sequence length \n A: action space \n D: model latent dimension\n """"""\n # --- Encode videos and actions ---\n tokenizer_out = self.tokenizer.vq_encode(batch[""videos""], training=False)\n token_idxs = tokenizer_out[""indices""] # (B, T, N)\n B, T, N = token_idxs.shape\n pad_shape = (B, seq_len - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1) # (B, S, N)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n\n MaskGITLoop = nn.scan(",python,selection_command +894,2094470,"genie.py",2856,1694," @nn.compact\n def sample(\n self,\n batch: Dict[str, Any],\n seq_len: int,\n steps: int = 25,\n temperature: float = 1,\n sample_argmax: bool = False,\n ) -> Any:\n """"""\n Autoregressively samples up to `seq_len` future frames, following Figure 8 of the paper.\n\n - Input frames are tokenized once.\n - Future frames are generated autoregressively in token space.\n - All frames are detokenized in a single pass.\n\n Note:\n - For interactive or step-wise sampling, detokenization should occur after each action.\n - To maintain consistent tensor shapes across timesteps, all current and future frames are decoded at every step.\n - Temporal causal structure is preserved by \n a) reapplying the mask before each decoding step.\n b) a temporal causal mask is applied within each ST-transformer block.\n\n Dimension keys:\n B: batch size \n T: number of input (conditioning) frames \n N: patches per frame \n S: sequence length \n A: action space \n D: model latent dimension\n """"""\n # --- Encode videos and actions ---\n tokenizer_out = self.tokenizer.vq_encode(batch[""videos""], training=False)\n token_idxs = tokenizer_out[""indices""] # (B, T, N)\n B, T, N = token_idxs.shape\n pad_shape = (B, seq_len - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1) # (B, S, N)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n\n MaskGITLoop = nn.scan(\n MaskGITStep,",python,selection_command +895,2094470,"genie.py",2856,1735," @nn.compact\n def sample(\n self,\n batch: Dict[str, Any],\n seq_len: int,\n steps: int = 25,\n temperature: float = 1,\n sample_argmax: bool = False,\n ) -> Any:\n """"""\n Autoregressively samples up to `seq_len` future frames, following Figure 8 of the paper.\n\n - Input frames are tokenized once.\n - Future frames are generated autoregressively in token space.\n - All frames are detokenized in a single pass.\n\n Note:\n - For interactive or step-wise sampling, detokenization should occur after each action.\n - To maintain consistent tensor shapes across timesteps, all current and future frames are decoded at every step.\n - Temporal causal structure is preserved by \n a) reapplying the mask before each decoding step.\n b) a temporal causal mask is applied within each ST-transformer block.\n\n Dimension keys:\n B: batch size \n T: number of input (conditioning) frames \n N: patches per frame \n S: sequence length \n A: action space \n D: model latent dimension\n """"""\n # --- Encode videos and actions ---\n tokenizer_out = self.tokenizer.vq_encode(batch[""videos""], training=False)\n token_idxs = tokenizer_out[""indices""] # (B, T, N)\n B, T, N = token_idxs.shape\n pad_shape = (B, seq_len - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1) # (B, S, N)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n\n MaskGITLoop = nn.scan(\n MaskGITStep,\n variable_broadcast=""params"",",python,selection_command +896,2094471,"genie.py",2856,1777," @nn.compact\n def sample(\n self,\n batch: Dict[str, Any],\n seq_len: int,\n steps: int = 25,\n temperature: float = 1,\n sample_argmax: bool = False,\n ) -> Any:\n """"""\n Autoregressively samples up to `seq_len` future frames, following Figure 8 of the paper.\n\n - Input frames are tokenized once.\n - Future frames are generated autoregressively in token space.\n - All frames are detokenized in a single pass.\n\n Note:\n - For interactive or step-wise sampling, detokenization should occur after each action.\n - To maintain consistent tensor shapes across timesteps, all current and future frames are decoded at every step.\n - Temporal causal structure is preserved by \n a) reapplying the mask before each decoding step.\n b) a temporal causal mask is applied within each ST-transformer block.\n\n Dimension keys:\n B: batch size \n T: number of input (conditioning) frames \n N: patches per frame \n S: sequence length \n A: action space \n D: model latent dimension\n """"""\n # --- Encode videos and actions ---\n tokenizer_out = self.tokenizer.vq_encode(batch[""videos""], training=False)\n token_idxs = tokenizer_out[""indices""] # (B, T, N)\n B, T, N = token_idxs.shape\n pad_shape = (B, seq_len - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1) # (B, S, N)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n\n MaskGITLoop = nn.scan(\n MaskGITStep,\n variable_broadcast=""params"",\n split_rngs={""params"": False},",python,selection_command +897,2094471,"genie.py",2856,1800," @nn.compact\n def sample(\n self,\n batch: Dict[str, Any],\n seq_len: int,\n steps: int = 25,\n temperature: float = 1,\n sample_argmax: bool = False,\n ) -> Any:\n """"""\n Autoregressively samples up to `seq_len` future frames, following Figure 8 of the paper.\n\n - Input frames are tokenized once.\n - Future frames are generated autoregressively in token space.\n - All frames are detokenized in a single pass.\n\n Note:\n - For interactive or step-wise sampling, detokenization should occur after each action.\n - To maintain consistent tensor shapes across timesteps, all current and future frames are decoded at every step.\n - Temporal causal structure is preserved by \n a) reapplying the mask before each decoding step.\n b) a temporal causal mask is applied within each ST-transformer block.\n\n Dimension keys:\n B: batch size \n T: number of input (conditioning) frames \n N: patches per frame \n S: sequence length \n A: action space \n D: model latent dimension\n """"""\n # --- Encode videos and actions ---\n tokenizer_out = self.tokenizer.vq_encode(batch[""videos""], training=False)\n token_idxs = tokenizer_out[""indices""] # (B, T, N)\n B, T, N = token_idxs.shape\n pad_shape = (B, seq_len - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1) # (B, S, N)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n\n MaskGITLoop = nn.scan(\n MaskGITStep,\n variable_broadcast=""params"",\n split_rngs={""params"": False},\n in_axes=0,",python,selection_command +898,2094472,"genie.py",2856,1824," @nn.compact\n def sample(\n self,\n batch: Dict[str, Any],\n seq_len: int,\n steps: int = 25,\n temperature: float = 1,\n sample_argmax: bool = False,\n ) -> Any:\n """"""\n Autoregressively samples up to `seq_len` future frames, following Figure 8 of the paper.\n\n - Input frames are tokenized once.\n - Future frames are generated autoregressively in token space.\n - All frames are detokenized in a single pass.\n\n Note:\n - For interactive or step-wise sampling, detokenization should occur after each action.\n - To maintain consistent tensor shapes across timesteps, all current and future frames are decoded at every step.\n - Temporal causal structure is preserved by \n a) reapplying the mask before each decoding step.\n b) a temporal causal mask is applied within each ST-transformer block.\n\n Dimension keys:\n B: batch size \n T: number of input (conditioning) frames \n N: patches per frame \n S: sequence length \n A: action space \n D: model latent dimension\n """"""\n # --- Encode videos and actions ---\n tokenizer_out = self.tokenizer.vq_encode(batch[""videos""], training=False)\n token_idxs = tokenizer_out[""indices""] # (B, T, N)\n B, T, N = token_idxs.shape\n pad_shape = (B, seq_len - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1) # (B, S, N)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n\n MaskGITLoop = nn.scan(\n MaskGITStep,\n variable_broadcast=""params"",\n split_rngs={""params"": False},\n in_axes=0,\n out_axes=0,",python,selection_command +899,2094472,"genie.py",2856,1850," @nn.compact\n def sample(\n self,\n batch: Dict[str, Any],\n seq_len: int,\n steps: int = 25,\n temperature: float = 1,\n sample_argmax: bool = False,\n ) -> Any:\n """"""\n Autoregressively samples up to `seq_len` future frames, following Figure 8 of the paper.\n\n - Input frames are tokenized once.\n - Future frames are generated autoregressively in token space.\n - All frames are detokenized in a single pass.\n\n Note:\n - For interactive or step-wise sampling, detokenization should occur after each action.\n - To maintain consistent tensor shapes across timesteps, all current and future frames are decoded at every step.\n - Temporal causal structure is preserved by \n a) reapplying the mask before each decoding step.\n b) a temporal causal mask is applied within each ST-transformer block.\n\n Dimension keys:\n B: batch size \n T: number of input (conditioning) frames \n N: patches per frame \n S: sequence length \n A: action space \n D: model latent dimension\n """"""\n # --- Encode videos and actions ---\n tokenizer_out = self.tokenizer.vq_encode(batch[""videos""], training=False)\n token_idxs = tokenizer_out[""indices""] # (B, T, N)\n B, T, N = token_idxs.shape\n pad_shape = (B, seq_len - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1) # (B, S, N)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n\n MaskGITLoop = nn.scan(\n MaskGITStep,\n variable_broadcast=""params"",\n split_rngs={""params"": False},\n in_axes=0,\n out_axes=0,\n length=steps,",python,selection_command +900,2094472,"genie.py",2856,1860," @nn.compact\n def sample(\n self,\n batch: Dict[str, Any],\n seq_len: int,\n steps: int = 25,\n temperature: float = 1,\n sample_argmax: bool = False,\n ) -> Any:\n """"""\n Autoregressively samples up to `seq_len` future frames, following Figure 8 of the paper.\n\n - Input frames are tokenized once.\n - Future frames are generated autoregressively in token space.\n - All frames are detokenized in a single pass.\n\n Note:\n - For interactive or step-wise sampling, detokenization should occur after each action.\n - To maintain consistent tensor shapes across timesteps, all current and future frames are decoded at every step.\n - Temporal causal structure is preserved by \n a) reapplying the mask before each decoding step.\n b) a temporal causal mask is applied within each ST-transformer block.\n\n Dimension keys:\n B: batch size \n T: number of input (conditioning) frames \n N: patches per frame \n S: sequence length \n A: action space \n D: model latent dimension\n """"""\n # --- Encode videos and actions ---\n tokenizer_out = self.tokenizer.vq_encode(batch[""videos""], training=False)\n token_idxs = tokenizer_out[""indices""] # (B, T, N)\n B, T, N = token_idxs.shape\n pad_shape = (B, seq_len - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1) # (B, S, N)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n\n MaskGITLoop = nn.scan(\n MaskGITStep,\n variable_broadcast=""params"",\n split_rngs={""params"": False},\n in_axes=0,\n out_axes=0,\n length=steps,\n )",python,selection_command +901,2094473,"genie.py",2856,1865," @nn.compact\n def sample(\n self,\n batch: Dict[str, Any],\n seq_len: int,\n steps: int = 25,\n temperature: float = 1,\n sample_argmax: bool = False,\n ) -> Any:\n """"""\n Autoregressively samples up to `seq_len` future frames, following Figure 8 of the paper.\n\n - Input frames are tokenized once.\n - Future frames are generated autoregressively in token space.\n - All frames are detokenized in a single pass.\n\n Note:\n - For interactive or step-wise sampling, detokenization should occur after each action.\n - To maintain consistent tensor shapes across timesteps, all current and future frames are decoded at every step.\n - Temporal causal structure is preserved by \n a) reapplying the mask before each decoding step.\n b) a temporal causal mask is applied within each ST-transformer block.\n\n Dimension keys:\n B: batch size \n T: number of input (conditioning) frames \n N: patches per frame \n S: sequence length \n A: action space \n D: model latent dimension\n """"""\n # --- Encode videos and actions ---\n tokenizer_out = self.tokenizer.vq_encode(batch[""videos""], training=False)\n token_idxs = tokenizer_out[""indices""] # (B, T, N)\n B, T, N = token_idxs.shape\n pad_shape = (B, seq_len - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1) # (B, S, N)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n\n MaskGITLoop = nn.scan(\n MaskGITStep,\n variable_broadcast=""params"",\n split_rngs={""params"": False},\n in_axes=0,\n out_axes=0,\n length=steps,\n )\n ",python,selection_command +902,2094533,"genie.py",2856,1896," @nn.compact\n def sample(\n self,\n batch: Dict[str, Any],\n seq_len: int,\n steps: int = 25,\n temperature: float = 1,\n sample_argmax: bool = False,\n ) -> Any:\n """"""\n Autoregressively samples up to `seq_len` future frames, following Figure 8 of the paper.\n\n - Input frames are tokenized once.\n - Future frames are generated autoregressively in token space.\n - All frames are detokenized in a single pass.\n\n Note:\n - For interactive or step-wise sampling, detokenization should occur after each action.\n - To maintain consistent tensor shapes across timesteps, all current and future frames are decoded at every step.\n - Temporal causal structure is preserved by \n a) reapplying the mask before each decoding step.\n b) a temporal causal mask is applied within each ST-transformer block.\n\n Dimension keys:\n B: batch size \n T: number of input (conditioning) frames \n N: patches per frame \n S: sequence length \n A: action space \n D: model latent dimension\n """"""\n # --- Encode videos and actions ---\n tokenizer_out = self.tokenizer.vq_encode(batch[""videos""], training=False)\n token_idxs = tokenizer_out[""indices""] # (B, T, N)\n B, T, N = token_idxs.shape\n pad_shape = (B, seq_len - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1) # (B, S, N)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n\n MaskGITLoop = nn.scan(\n MaskGITStep,\n variable_broadcast=""params"",\n split_rngs={""params"": False},\n in_axes=0,\n out_axes=0,\n length=steps,\n )\n \n loop_fn = MaskGITLoop(",python,selection_command +903,2094576,"genie.py",2856,1932," @nn.compact\n def sample(\n self,\n batch: Dict[str, Any],\n seq_len: int,\n steps: int = 25,\n temperature: float = 1,\n sample_argmax: bool = False,\n ) -> Any:\n """"""\n Autoregressively samples up to `seq_len` future frames, following Figure 8 of the paper.\n\n - Input frames are tokenized once.\n - Future frames are generated autoregressively in token space.\n - All frames are detokenized in a single pass.\n\n Note:\n - For interactive or step-wise sampling, detokenization should occur after each action.\n - To maintain consistent tensor shapes across timesteps, all current and future frames are decoded at every step.\n - Temporal causal structure is preserved by \n a) reapplying the mask before each decoding step.\n b) a temporal causal mask is applied within each ST-transformer block.\n\n Dimension keys:\n B: batch size \n T: number of input (conditioning) frames \n N: patches per frame \n S: sequence length \n A: action space \n D: model latent dimension\n """"""\n # --- Encode videos and actions ---\n tokenizer_out = self.tokenizer.vq_encode(batch[""videos""], training=False)\n token_idxs = tokenizer_out[""indices""] # (B, T, N)\n B, T, N = token_idxs.shape\n pad_shape = (B, seq_len - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1) # (B, S, N)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n\n MaskGITLoop = nn.scan(\n MaskGITStep,\n variable_broadcast=""params"",\n split_rngs={""params"": False},\n in_axes=0,\n out_axes=0,\n length=steps,\n )\n \n loop_fn = MaskGITLoop(\n dynamics=self.dynamics,",python,selection_command +904,2094672,"genie.py",2856,1970," @nn.compact\n def sample(\n self,\n batch: Dict[str, Any],\n seq_len: int,\n steps: int = 25,\n temperature: float = 1,\n sample_argmax: bool = False,\n ) -> Any:\n """"""\n Autoregressively samples up to `seq_len` future frames, following Figure 8 of the paper.\n\n - Input frames are tokenized once.\n - Future frames are generated autoregressively in token space.\n - All frames are detokenized in a single pass.\n\n Note:\n - For interactive or step-wise sampling, detokenization should occur after each action.\n - To maintain consistent tensor shapes across timesteps, all current and future frames are decoded at every step.\n - Temporal causal structure is preserved by \n a) reapplying the mask before each decoding step.\n b) a temporal causal mask is applied within each ST-transformer block.\n\n Dimension keys:\n B: batch size \n T: number of input (conditioning) frames \n N: patches per frame \n S: sequence length \n A: action space \n D: model latent dimension\n """"""\n # --- Encode videos and actions ---\n tokenizer_out = self.tokenizer.vq_encode(batch[""videos""], training=False)\n token_idxs = tokenizer_out[""indices""] # (B, T, N)\n B, T, N = token_idxs.shape\n pad_shape = (B, seq_len - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1) # (B, S, N)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n\n MaskGITLoop = nn.scan(\n MaskGITStep,\n variable_broadcast=""params"",\n split_rngs={""params"": False},\n in_axes=0,\n out_axes=0,\n length=steps,\n )\n \n loop_fn = MaskGITLoop(\n dynamics=self.dynamics,\n tokenizer=self.tokenizer,",python,selection_command +905,2094825,"genie.py",2856,2007," @nn.compact\n def sample(\n self,\n batch: Dict[str, Any],\n seq_len: int,\n steps: int = 25,\n temperature: float = 1,\n sample_argmax: bool = False,\n ) -> Any:\n """"""\n Autoregressively samples up to `seq_len` future frames, following Figure 8 of the paper.\n\n - Input frames are tokenized once.\n - Future frames are generated autoregressively in token space.\n - All frames are detokenized in a single pass.\n\n Note:\n - For interactive or step-wise sampling, detokenization should occur after each action.\n - To maintain consistent tensor shapes across timesteps, all current and future frames are decoded at every step.\n - Temporal causal structure is preserved by \n a) reapplying the mask before each decoding step.\n b) a temporal causal mask is applied within each ST-transformer block.\n\n Dimension keys:\n B: batch size \n T: number of input (conditioning) frames \n N: patches per frame \n S: sequence length \n A: action space \n D: model latent dimension\n """"""\n # --- Encode videos and actions ---\n tokenizer_out = self.tokenizer.vq_encode(batch[""videos""], training=False)\n token_idxs = tokenizer_out[""indices""] # (B, T, N)\n B, T, N = token_idxs.shape\n pad_shape = (B, seq_len - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1) # (B, S, N)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n\n MaskGITLoop = nn.scan(\n MaskGITStep,\n variable_broadcast=""params"",\n split_rngs={""params"": False},\n in_axes=0,\n out_axes=0,\n length=steps,\n )\n \n loop_fn = MaskGITLoop(\n dynamics=self.dynamics,\n tokenizer=self.tokenizer,\n temperature=temperature,",python,selection_command +906,2095159,"genie.py",2856,2048," @nn.compact\n def sample(\n self,\n batch: Dict[str, Any],\n seq_len: int,\n steps: int = 25,\n temperature: float = 1,\n sample_argmax: bool = False,\n ) -> Any:\n """"""\n Autoregressively samples up to `seq_len` future frames, following Figure 8 of the paper.\n\n - Input frames are tokenized once.\n - Future frames are generated autoregressively in token space.\n - All frames are detokenized in a single pass.\n\n Note:\n - For interactive or step-wise sampling, detokenization should occur after each action.\n - To maintain consistent tensor shapes across timesteps, all current and future frames are decoded at every step.\n - Temporal causal structure is preserved by \n a) reapplying the mask before each decoding step.\n b) a temporal causal mask is applied within each ST-transformer block.\n\n Dimension keys:\n B: batch size \n T: number of input (conditioning) frames \n N: patches per frame \n S: sequence length \n A: action space \n D: model latent dimension\n """"""\n # --- Encode videos and actions ---\n tokenizer_out = self.tokenizer.vq_encode(batch[""videos""], training=False)\n token_idxs = tokenizer_out[""indices""] # (B, T, N)\n B, T, N = token_idxs.shape\n pad_shape = (B, seq_len - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1) # (B, S, N)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n\n MaskGITLoop = nn.scan(\n MaskGITStep,\n variable_broadcast=""params"",\n split_rngs={""params"": False},\n in_axes=0,\n out_axes=0,\n length=steps,\n )\n \n loop_fn = MaskGITLoop(\n dynamics=self.dynamics,\n tokenizer=self.tokenizer,\n temperature=temperature,\n sample_argmax=sample_argmax,",python,selection_command +907,2095325,"genie.py",2856,2073," @nn.compact\n def sample(\n self,\n batch: Dict[str, Any],\n seq_len: int,\n steps: int = 25,\n temperature: float = 1,\n sample_argmax: bool = False,\n ) -> Any:\n """"""\n Autoregressively samples up to `seq_len` future frames, following Figure 8 of the paper.\n\n - Input frames are tokenized once.\n - Future frames are generated autoregressively in token space.\n - All frames are detokenized in a single pass.\n\n Note:\n - For interactive or step-wise sampling, detokenization should occur after each action.\n - To maintain consistent tensor shapes across timesteps, all current and future frames are decoded at every step.\n - Temporal causal structure is preserved by \n a) reapplying the mask before each decoding step.\n b) a temporal causal mask is applied within each ST-transformer block.\n\n Dimension keys:\n B: batch size \n T: number of input (conditioning) frames \n N: patches per frame \n S: sequence length \n A: action space \n D: model latent dimension\n """"""\n # --- Encode videos and actions ---\n tokenizer_out = self.tokenizer.vq_encode(batch[""videos""], training=False)\n token_idxs = tokenizer_out[""indices""] # (B, T, N)\n B, T, N = token_idxs.shape\n pad_shape = (B, seq_len - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1) # (B, S, N)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n\n MaskGITLoop = nn.scan(\n MaskGITStep,\n variable_broadcast=""params"",\n split_rngs={""params"": False},\n in_axes=0,\n out_axes=0,\n length=steps,\n )\n \n loop_fn = MaskGITLoop(\n dynamics=self.dynamics,\n tokenizer=self.tokenizer,\n temperature=temperature,\n sample_argmax=sample_argmax,\n steps=steps,",python,selection_command +908,2095509,"genie.py",2856,2083," @nn.compact\n def sample(\n self,\n batch: Dict[str, Any],\n seq_len: int,\n steps: int = 25,\n temperature: float = 1,\n sample_argmax: bool = False,\n ) -> Any:\n """"""\n Autoregressively samples up to `seq_len` future frames, following Figure 8 of the paper.\n\n - Input frames are tokenized once.\n - Future frames are generated autoregressively in token space.\n - All frames are detokenized in a single pass.\n\n Note:\n - For interactive or step-wise sampling, detokenization should occur after each action.\n - To maintain consistent tensor shapes across timesteps, all current and future frames are decoded at every step.\n - Temporal causal structure is preserved by \n a) reapplying the mask before each decoding step.\n b) a temporal causal mask is applied within each ST-transformer block.\n\n Dimension keys:\n B: batch size \n T: number of input (conditioning) frames \n N: patches per frame \n S: sequence length \n A: action space \n D: model latent dimension\n """"""\n # --- Encode videos and actions ---\n tokenizer_out = self.tokenizer.vq_encode(batch[""videos""], training=False)\n token_idxs = tokenizer_out[""indices""] # (B, T, N)\n B, T, N = token_idxs.shape\n pad_shape = (B, seq_len - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1) # (B, S, N)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n\n MaskGITLoop = nn.scan(\n MaskGITStep,\n variable_broadcast=""params"",\n split_rngs={""params"": False},\n in_axes=0,\n out_axes=0,\n length=steps,\n )\n \n loop_fn = MaskGITLoop(\n dynamics=self.dynamics,\n tokenizer=self.tokenizer,\n temperature=temperature,\n sample_argmax=sample_argmax,\n steps=steps,\n )",python,selection_command +909,2095595,"genie.py",2856,2084," @nn.compact\n def sample(\n self,\n batch: Dict[str, Any],\n seq_len: int,\n steps: int = 25,\n temperature: float = 1,\n sample_argmax: bool = False,\n ) -> Any:\n """"""\n Autoregressively samples up to `seq_len` future frames, following Figure 8 of the paper.\n\n - Input frames are tokenized once.\n - Future frames are generated autoregressively in token space.\n - All frames are detokenized in a single pass.\n\n Note:\n - For interactive or step-wise sampling, detokenization should occur after each action.\n - To maintain consistent tensor shapes across timesteps, all current and future frames are decoded at every step.\n - Temporal causal structure is preserved by \n a) reapplying the mask before each decoding step.\n b) a temporal causal mask is applied within each ST-transformer block.\n\n Dimension keys:\n B: batch size \n T: number of input (conditioning) frames \n N: patches per frame \n S: sequence length \n A: action space \n D: model latent dimension\n """"""\n # --- Encode videos and actions ---\n tokenizer_out = self.tokenizer.vq_encode(batch[""videos""], training=False)\n token_idxs = tokenizer_out[""indices""] # (B, T, N)\n B, T, N = token_idxs.shape\n pad_shape = (B, seq_len - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1) # (B, S, N)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n\n MaskGITLoop = nn.scan(\n MaskGITStep,\n variable_broadcast=""params"",\n split_rngs={""params"": False},\n in_axes=0,\n out_axes=0,\n length=steps,\n )\n \n loop_fn = MaskGITLoop(\n dynamics=self.dynamics,\n tokenizer=self.tokenizer,\n temperature=temperature,\n sample_argmax=sample_argmax,\n steps=steps,\n )\n",python,selection_command +910,2095796,"genie.py",2856,2131," @nn.compact\n def sample(\n self,\n batch: Dict[str, Any],\n seq_len: int,\n steps: int = 25,\n temperature: float = 1,\n sample_argmax: bool = False,\n ) -> Any:\n """"""\n Autoregressively samples up to `seq_len` future frames, following Figure 8 of the paper.\n\n - Input frames are tokenized once.\n - Future frames are generated autoregressively in token space.\n - All frames are detokenized in a single pass.\n\n Note:\n - For interactive or step-wise sampling, detokenization should occur after each action.\n - To maintain consistent tensor shapes across timesteps, all current and future frames are decoded at every step.\n - Temporal causal structure is preserved by \n a) reapplying the mask before each decoding step.\n b) a temporal causal mask is applied within each ST-transformer block.\n\n Dimension keys:\n B: batch size \n T: number of input (conditioning) frames \n N: patches per frame \n S: sequence length \n A: action space \n D: model latent dimension\n """"""\n # --- Encode videos and actions ---\n tokenizer_out = self.tokenizer.vq_encode(batch[""videos""], training=False)\n token_idxs = tokenizer_out[""indices""] # (B, T, N)\n B, T, N = token_idxs.shape\n pad_shape = (B, seq_len - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1) # (B, S, N)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n\n MaskGITLoop = nn.scan(\n MaskGITStep,\n variable_broadcast=""params"",\n split_rngs={""params"": False},\n in_axes=0,\n out_axes=0,\n length=steps,\n )\n \n loop_fn = MaskGITLoop(\n dynamics=self.dynamics,\n tokenizer=self.tokenizer,\n temperature=temperature,\n sample_argmax=sample_argmax,\n steps=steps,\n )\n\n def generation_step_fn(carry, step_t):",python,selection_command +911,2096529,"genie.py",2856,2175," @nn.compact\n def sample(\n self,\n batch: Dict[str, Any],\n seq_len: int,\n steps: int = 25,\n temperature: float = 1,\n sample_argmax: bool = False,\n ) -> Any:\n """"""\n Autoregressively samples up to `seq_len` future frames, following Figure 8 of the paper.\n\n - Input frames are tokenized once.\n - Future frames are generated autoregressively in token space.\n - All frames are detokenized in a single pass.\n\n Note:\n - For interactive or step-wise sampling, detokenization should occur after each action.\n - To maintain consistent tensor shapes across timesteps, all current and future frames are decoded at every step.\n - Temporal causal structure is preserved by \n a) reapplying the mask before each decoding step.\n b) a temporal causal mask is applied within each ST-transformer block.\n\n Dimension keys:\n B: batch size \n T: number of input (conditioning) frames \n N: patches per frame \n S: sequence length \n A: action space \n D: model latent dimension\n """"""\n # --- Encode videos and actions ---\n tokenizer_out = self.tokenizer.vq_encode(batch[""videos""], training=False)\n token_idxs = tokenizer_out[""indices""] # (B, T, N)\n B, T, N = token_idxs.shape\n pad_shape = (B, seq_len - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1) # (B, S, N)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n\n MaskGITLoop = nn.scan(\n MaskGITStep,\n variable_broadcast=""params"",\n split_rngs={""params"": False},\n in_axes=0,\n out_axes=0,\n length=steps,\n )\n \n loop_fn = MaskGITLoop(\n dynamics=self.dynamics,\n tokenizer=self.tokenizer,\n temperature=temperature,\n sample_argmax=sample_argmax,\n steps=steps,\n )\n\n def generation_step_fn(carry, step_t):\n rng, current_token_idxs = carry",python,selection_command +912,2097027,"genie.py",2856,2225," @nn.compact\n def sample(\n self,\n batch: Dict[str, Any],\n seq_len: int,\n steps: int = 25,\n temperature: float = 1,\n sample_argmax: bool = False,\n ) -> Any:\n """"""\n Autoregressively samples up to `seq_len` future frames, following Figure 8 of the paper.\n\n - Input frames are tokenized once.\n - Future frames are generated autoregressively in token space.\n - All frames are detokenized in a single pass.\n\n Note:\n - For interactive or step-wise sampling, detokenization should occur after each action.\n - To maintain consistent tensor shapes across timesteps, all current and future frames are decoded at every step.\n - Temporal causal structure is preserved by \n a) reapplying the mask before each decoding step.\n b) a temporal causal mask is applied within each ST-transformer block.\n\n Dimension keys:\n B: batch size \n T: number of input (conditioning) frames \n N: patches per frame \n S: sequence length \n A: action space \n D: model latent dimension\n """"""\n # --- Encode videos and actions ---\n tokenizer_out = self.tokenizer.vq_encode(batch[""videos""], training=False)\n token_idxs = tokenizer_out[""indices""] # (B, T, N)\n B, T, N = token_idxs.shape\n pad_shape = (B, seq_len - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1) # (B, S, N)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n\n MaskGITLoop = nn.scan(\n MaskGITStep,\n variable_broadcast=""params"",\n split_rngs={""params"": False},\n in_axes=0,\n out_axes=0,\n length=steps,\n )\n \n loop_fn = MaskGITLoop(\n dynamics=self.dynamics,\n tokenizer=self.tokenizer,\n temperature=temperature,\n sample_argmax=sample_argmax,\n steps=steps,\n )\n\n def generation_step_fn(carry, step_t):\n rng, current_token_idxs = carry\n rng, step_rng = jax.random.split(rng)",python,selection_command +913,2097166,"genie.py",2856,2226," @nn.compact\n def sample(\n self,\n batch: Dict[str, Any],\n seq_len: int,\n steps: int = 25,\n temperature: float = 1,\n sample_argmax: bool = False,\n ) -> Any:\n """"""\n Autoregressively samples up to `seq_len` future frames, following Figure 8 of the paper.\n\n - Input frames are tokenized once.\n - Future frames are generated autoregressively in token space.\n - All frames are detokenized in a single pass.\n\n Note:\n - For interactive or step-wise sampling, detokenization should occur after each action.\n - To maintain consistent tensor shapes across timesteps, all current and future frames are decoded at every step.\n - Temporal causal structure is preserved by \n a) reapplying the mask before each decoding step.\n b) a temporal causal mask is applied within each ST-transformer block.\n\n Dimension keys:\n B: batch size \n T: number of input (conditioning) frames \n N: patches per frame \n S: sequence length \n A: action space \n D: model latent dimension\n """"""\n # --- Encode videos and actions ---\n tokenizer_out = self.tokenizer.vq_encode(batch[""videos""], training=False)\n token_idxs = tokenizer_out[""indices""] # (B, T, N)\n B, T, N = token_idxs.shape\n pad_shape = (B, seq_len - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1) # (B, S, N)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n\n MaskGITLoop = nn.scan(\n MaskGITStep,\n variable_broadcast=""params"",\n split_rngs={""params"": False},\n in_axes=0,\n out_axes=0,\n length=steps,\n )\n \n loop_fn = MaskGITLoop(\n dynamics=self.dynamics,\n tokenizer=self.tokenizer,\n temperature=temperature,\n sample_argmax=sample_argmax,\n steps=steps,\n )\n\n def generation_step_fn(carry, step_t):\n rng, current_token_idxs = carry\n rng, step_rng = jax.random.split(rng)\n",python,selection_command +914,2097216,"genie.py",2856,2291," @nn.compact\n def sample(\n self,\n batch: Dict[str, Any],\n seq_len: int,\n steps: int = 25,\n temperature: float = 1,\n sample_argmax: bool = False,\n ) -> Any:\n """"""\n Autoregressively samples up to `seq_len` future frames, following Figure 8 of the paper.\n\n - Input frames are tokenized once.\n - Future frames are generated autoregressively in token space.\n - All frames are detokenized in a single pass.\n\n Note:\n - For interactive or step-wise sampling, detokenization should occur after each action.\n - To maintain consistent tensor shapes across timesteps, all current and future frames are decoded at every step.\n - Temporal causal structure is preserved by \n a) reapplying the mask before each decoding step.\n b) a temporal causal mask is applied within each ST-transformer block.\n\n Dimension keys:\n B: batch size \n T: number of input (conditioning) frames \n N: patches per frame \n S: sequence length \n A: action space \n D: model latent dimension\n """"""\n # --- Encode videos and actions ---\n tokenizer_out = self.tokenizer.vq_encode(batch[""videos""], training=False)\n token_idxs = tokenizer_out[""indices""] # (B, T, N)\n B, T, N = token_idxs.shape\n pad_shape = (B, seq_len - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1) # (B, S, N)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n\n MaskGITLoop = nn.scan(\n MaskGITStep,\n variable_broadcast=""params"",\n split_rngs={""params"": False},\n in_axes=0,\n out_axes=0,\n length=steps,\n )\n \n loop_fn = MaskGITLoop(\n dynamics=self.dynamics,\n tokenizer=self.tokenizer,\n temperature=temperature,\n sample_argmax=sample_argmax,\n steps=steps,\n )\n\n def generation_step_fn(carry, step_t):\n rng, current_token_idxs = carry\n rng, step_rng = jax.random.split(rng)\n\n # Mask current and future frames (i.e., t >= step_t)",python,selection_command +915,2097217,"genie.py",2856,2347," @nn.compact\n def sample(\n self,\n batch: Dict[str, Any],\n seq_len: int,\n steps: int = 25,\n temperature: float = 1,\n sample_argmax: bool = False,\n ) -> Any:\n """"""\n Autoregressively samples up to `seq_len` future frames, following Figure 8 of the paper.\n\n - Input frames are tokenized once.\n - Future frames are generated autoregressively in token space.\n - All frames are detokenized in a single pass.\n\n Note:\n - For interactive or step-wise sampling, detokenization should occur after each action.\n - To maintain consistent tensor shapes across timesteps, all current and future frames are decoded at every step.\n - Temporal causal structure is preserved by \n a) reapplying the mask before each decoding step.\n b) a temporal causal mask is applied within each ST-transformer block.\n\n Dimension keys:\n B: batch size \n T: number of input (conditioning) frames \n N: patches per frame \n S: sequence length \n A: action space \n D: model latent dimension\n """"""\n # --- Encode videos and actions ---\n tokenizer_out = self.tokenizer.vq_encode(batch[""videos""], training=False)\n token_idxs = tokenizer_out[""indices""] # (B, T, N)\n B, T, N = token_idxs.shape\n pad_shape = (B, seq_len - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1) # (B, S, N)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n\n MaskGITLoop = nn.scan(\n MaskGITStep,\n variable_broadcast=""params"",\n split_rngs={""params"": False},\n in_axes=0,\n out_axes=0,\n length=steps,\n )\n \n loop_fn = MaskGITLoop(\n dynamics=self.dynamics,\n tokenizer=self.tokenizer,\n temperature=temperature,\n sample_argmax=sample_argmax,\n steps=steps,\n )\n\n def generation_step_fn(carry, step_t):\n rng, current_token_idxs = carry\n rng, step_rng = jax.random.split(rng)\n\n # Mask current and future frames (i.e., t >= step_t)\n mask = jnp.arange(seq_len) >= step_t # (S,)",python,selection_command +916,2097305,"genie.py",2856,2433," @nn.compact\n def sample(\n self,\n batch: Dict[str, Any],\n seq_len: int,\n steps: int = 25,\n temperature: float = 1,\n sample_argmax: bool = False,\n ) -> Any:\n """"""\n Autoregressively samples up to `seq_len` future frames, following Figure 8 of the paper.\n\n - Input frames are tokenized once.\n - Future frames are generated autoregressively in token space.\n - All frames are detokenized in a single pass.\n\n Note:\n - For interactive or step-wise sampling, detokenization should occur after each action.\n - To maintain consistent tensor shapes across timesteps, all current and future frames are decoded at every step.\n - Temporal causal structure is preserved by \n a) reapplying the mask before each decoding step.\n b) a temporal causal mask is applied within each ST-transformer block.\n\n Dimension keys:\n B: batch size \n T: number of input (conditioning) frames \n N: patches per frame \n S: sequence length \n A: action space \n D: model latent dimension\n """"""\n # --- Encode videos and actions ---\n tokenizer_out = self.tokenizer.vq_encode(batch[""videos""], training=False)\n token_idxs = tokenizer_out[""indices""] # (B, T, N)\n B, T, N = token_idxs.shape\n pad_shape = (B, seq_len - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1) # (B, S, N)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n\n MaskGITLoop = nn.scan(\n MaskGITStep,\n variable_broadcast=""params"",\n split_rngs={""params"": False},\n in_axes=0,\n out_axes=0,\n length=steps,\n )\n \n loop_fn = MaskGITLoop(\n dynamics=self.dynamics,\n tokenizer=self.tokenizer,\n temperature=temperature,\n sample_argmax=sample_argmax,\n steps=steps,\n )\n\n def generation_step_fn(carry, step_t):\n rng, current_token_idxs = carry\n rng, step_rng = jax.random.split(rng)\n\n # Mask current and future frames (i.e., t >= step_t)\n mask = jnp.arange(seq_len) >= step_t # (S,)\n mask = jnp.broadcast_to(mask[None, :, None], (B, seq_len, N)) # (B, S, N)",python,selection_command +917,2097305,"genie.py",2856,2470," @nn.compact\n def sample(\n self,\n batch: Dict[str, Any],\n seq_len: int,\n steps: int = 25,\n temperature: float = 1,\n sample_argmax: bool = False,\n ) -> Any:\n """"""\n Autoregressively samples up to `seq_len` future frames, following Figure 8 of the paper.\n\n - Input frames are tokenized once.\n - Future frames are generated autoregressively in token space.\n - All frames are detokenized in a single pass.\n\n Note:\n - For interactive or step-wise sampling, detokenization should occur after each action.\n - To maintain consistent tensor shapes across timesteps, all current and future frames are decoded at every step.\n - Temporal causal structure is preserved by \n a) reapplying the mask before each decoding step.\n b) a temporal causal mask is applied within each ST-transformer block.\n\n Dimension keys:\n B: batch size \n T: number of input (conditioning) frames \n N: patches per frame \n S: sequence length \n A: action space \n D: model latent dimension\n """"""\n # --- Encode videos and actions ---\n tokenizer_out = self.tokenizer.vq_encode(batch[""videos""], training=False)\n token_idxs = tokenizer_out[""indices""] # (B, T, N)\n B, T, N = token_idxs.shape\n pad_shape = (B, seq_len - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1) # (B, S, N)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n\n MaskGITLoop = nn.scan(\n MaskGITStep,\n variable_broadcast=""params"",\n split_rngs={""params"": False},\n in_axes=0,\n out_axes=0,\n length=steps,\n )\n \n loop_fn = MaskGITLoop(\n dynamics=self.dynamics,\n tokenizer=self.tokenizer,\n temperature=temperature,\n sample_argmax=sample_argmax,\n steps=steps,\n )\n\n def generation_step_fn(carry, step_t):\n rng, current_token_idxs = carry\n rng, step_rng = jax.random.split(rng)\n\n # Mask current and future frames (i.e., t >= step_t)\n mask = jnp.arange(seq_len) >= step_t # (S,)\n mask = jnp.broadcast_to(mask[None, :, None], (B, seq_len, N)) # (B, S, N)\n mask = mask.astype(bool)",python,selection_command +918,2097306,"genie.py",2856,2529," @nn.compact\n def sample(\n self,\n batch: Dict[str, Any],\n seq_len: int,\n steps: int = 25,\n temperature: float = 1,\n sample_argmax: bool = False,\n ) -> Any:\n """"""\n Autoregressively samples up to `seq_len` future frames, following Figure 8 of the paper.\n\n - Input frames are tokenized once.\n - Future frames are generated autoregressively in token space.\n - All frames are detokenized in a single pass.\n\n Note:\n - For interactive or step-wise sampling, detokenization should occur after each action.\n - To maintain consistent tensor shapes across timesteps, all current and future frames are decoded at every step.\n - Temporal causal structure is preserved by \n a) reapplying the mask before each decoding step.\n b) a temporal causal mask is applied within each ST-transformer block.\n\n Dimension keys:\n B: batch size \n T: number of input (conditioning) frames \n N: patches per frame \n S: sequence length \n A: action space \n D: model latent dimension\n """"""\n # --- Encode videos and actions ---\n tokenizer_out = self.tokenizer.vq_encode(batch[""videos""], training=False)\n token_idxs = tokenizer_out[""indices""] # (B, T, N)\n B, T, N = token_idxs.shape\n pad_shape = (B, seq_len - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1) # (B, S, N)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n\n MaskGITLoop = nn.scan(\n MaskGITStep,\n variable_broadcast=""params"",\n split_rngs={""params"": False},\n in_axes=0,\n out_axes=0,\n length=steps,\n )\n \n loop_fn = MaskGITLoop(\n dynamics=self.dynamics,\n tokenizer=self.tokenizer,\n temperature=temperature,\n sample_argmax=sample_argmax,\n steps=steps,\n )\n\n def generation_step_fn(carry, step_t):\n rng, current_token_idxs = carry\n rng, step_rng = jax.random.split(rng)\n\n # Mask current and future frames (i.e., t >= step_t)\n mask = jnp.arange(seq_len) >= step_t # (S,)\n mask = jnp.broadcast_to(mask[None, :, None], (B, seq_len, N)) # (B, S, N)\n mask = mask.astype(bool)\n masked_token_idxs = current_token_idxs * ~mask",python,selection_command +919,2097448,"genie.py",2856,2530," @nn.compact\n def sample(\n self,\n batch: Dict[str, Any],\n seq_len: int,\n steps: int = 25,\n temperature: float = 1,\n sample_argmax: bool = False,\n ) -> Any:\n """"""\n Autoregressively samples up to `seq_len` future frames, following Figure 8 of the paper.\n\n - Input frames are tokenized once.\n - Future frames are generated autoregressively in token space.\n - All frames are detokenized in a single pass.\n\n Note:\n - For interactive or step-wise sampling, detokenization should occur after each action.\n - To maintain consistent tensor shapes across timesteps, all current and future frames are decoded at every step.\n - Temporal causal structure is preserved by \n a) reapplying the mask before each decoding step.\n b) a temporal causal mask is applied within each ST-transformer block.\n\n Dimension keys:\n B: batch size \n T: number of input (conditioning) frames \n N: patches per frame \n S: sequence length \n A: action space \n D: model latent dimension\n """"""\n # --- Encode videos and actions ---\n tokenizer_out = self.tokenizer.vq_encode(batch[""videos""], training=False)\n token_idxs = tokenizer_out[""indices""] # (B, T, N)\n B, T, N = token_idxs.shape\n pad_shape = (B, seq_len - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1) # (B, S, N)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n\n MaskGITLoop = nn.scan(\n MaskGITStep,\n variable_broadcast=""params"",\n split_rngs={""params"": False},\n in_axes=0,\n out_axes=0,\n length=steps,\n )\n \n loop_fn = MaskGITLoop(\n dynamics=self.dynamics,\n tokenizer=self.tokenizer,\n temperature=temperature,\n sample_argmax=sample_argmax,\n steps=steps,\n )\n\n def generation_step_fn(carry, step_t):\n rng, current_token_idxs = carry\n rng, step_rng = jax.random.split(rng)\n\n # Mask current and future frames (i.e., t >= step_t)\n mask = jnp.arange(seq_len) >= step_t # (S,)\n mask = jnp.broadcast_to(mask[None, :, None], (B, seq_len, N)) # (B, S, N)\n mask = mask.astype(bool)\n masked_token_idxs = current_token_idxs * ~mask\n",python,selection_command +920,2097449,"genie.py",2856,2584," @nn.compact\n def sample(\n self,\n batch: Dict[str, Any],\n seq_len: int,\n steps: int = 25,\n temperature: float = 1,\n sample_argmax: bool = False,\n ) -> Any:\n """"""\n Autoregressively samples up to `seq_len` future frames, following Figure 8 of the paper.\n\n - Input frames are tokenized once.\n - Future frames are generated autoregressively in token space.\n - All frames are detokenized in a single pass.\n\n Note:\n - For interactive or step-wise sampling, detokenization should occur after each action.\n - To maintain consistent tensor shapes across timesteps, all current and future frames are decoded at every step.\n - Temporal causal structure is preserved by \n a) reapplying the mask before each decoding step.\n b) a temporal causal mask is applied within each ST-transformer block.\n\n Dimension keys:\n B: batch size \n T: number of input (conditioning) frames \n N: patches per frame \n S: sequence length \n A: action space \n D: model latent dimension\n """"""\n # --- Encode videos and actions ---\n tokenizer_out = self.tokenizer.vq_encode(batch[""videos""], training=False)\n token_idxs = tokenizer_out[""indices""] # (B, T, N)\n B, T, N = token_idxs.shape\n pad_shape = (B, seq_len - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1) # (B, S, N)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n\n MaskGITLoop = nn.scan(\n MaskGITStep,\n variable_broadcast=""params"",\n split_rngs={""params"": False},\n in_axes=0,\n out_axes=0,\n length=steps,\n )\n \n loop_fn = MaskGITLoop(\n dynamics=self.dynamics,\n tokenizer=self.tokenizer,\n temperature=temperature,\n sample_argmax=sample_argmax,\n steps=steps,\n )\n\n def generation_step_fn(carry, step_t):\n rng, current_token_idxs = carry\n rng, step_rng = jax.random.split(rng)\n\n # Mask current and future frames (i.e., t >= step_t)\n mask = jnp.arange(seq_len) >= step_t # (S,)\n mask = jnp.broadcast_to(mask[None, :, None], (B, seq_len, N)) # (B, S, N)\n mask = mask.astype(bool)\n masked_token_idxs = current_token_idxs * ~mask\n\n # --- Initialize and run MaskGIT loop ---",python,selection_command +921,2097449,"genie.py",2856,2619," @nn.compact\n def sample(\n self,\n batch: Dict[str, Any],\n seq_len: int,\n steps: int = 25,\n temperature: float = 1,\n sample_argmax: bool = False,\n ) -> Any:\n """"""\n Autoregressively samples up to `seq_len` future frames, following Figure 8 of the paper.\n\n - Input frames are tokenized once.\n - Future frames are generated autoregressively in token space.\n - All frames are detokenized in a single pass.\n\n Note:\n - For interactive or step-wise sampling, detokenization should occur after each action.\n - To maintain consistent tensor shapes across timesteps, all current and future frames are decoded at every step.\n - Temporal causal structure is preserved by \n a) reapplying the mask before each decoding step.\n b) a temporal causal mask is applied within each ST-transformer block.\n\n Dimension keys:\n B: batch size \n T: number of input (conditioning) frames \n N: patches per frame \n S: sequence length \n A: action space \n D: model latent dimension\n """"""\n # --- Encode videos and actions ---\n tokenizer_out = self.tokenizer.vq_encode(batch[""videos""], training=False)\n token_idxs = tokenizer_out[""indices""] # (B, T, N)\n B, T, N = token_idxs.shape\n pad_shape = (B, seq_len - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1) # (B, S, N)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n\n MaskGITLoop = nn.scan(\n MaskGITStep,\n variable_broadcast=""params"",\n split_rngs={""params"": False},\n in_axes=0,\n out_axes=0,\n length=steps,\n )\n \n loop_fn = MaskGITLoop(\n dynamics=self.dynamics,\n tokenizer=self.tokenizer,\n temperature=temperature,\n sample_argmax=sample_argmax,\n steps=steps,\n )\n\n def generation_step_fn(carry, step_t):\n rng, current_token_idxs = carry\n rng, step_rng = jax.random.split(rng)\n\n # Mask current and future frames (i.e., t >= step_t)\n mask = jnp.arange(seq_len) >= step_t # (S,)\n mask = jnp.broadcast_to(mask[None, :, None], (B, seq_len, N)) # (B, S, N)\n mask = mask.astype(bool)\n masked_token_idxs = current_token_idxs * ~mask\n\n # --- Initialize and run MaskGIT loop ---\n init_carry_maskgit = (",python,selection_command +922,2097668,"genie.py",2856,2645," @nn.compact\n def sample(\n self,\n batch: Dict[str, Any],\n seq_len: int,\n steps: int = 25,\n temperature: float = 1,\n sample_argmax: bool = False,\n ) -> Any:\n """"""\n Autoregressively samples up to `seq_len` future frames, following Figure 8 of the paper.\n\n - Input frames are tokenized once.\n - Future frames are generated autoregressively in token space.\n - All frames are detokenized in a single pass.\n\n Note:\n - For interactive or step-wise sampling, detokenization should occur after each action.\n - To maintain consistent tensor shapes across timesteps, all current and future frames are decoded at every step.\n - Temporal causal structure is preserved by \n a) reapplying the mask before each decoding step.\n b) a temporal causal mask is applied within each ST-transformer block.\n\n Dimension keys:\n B: batch size \n T: number of input (conditioning) frames \n N: patches per frame \n S: sequence length \n A: action space \n D: model latent dimension\n """"""\n # --- Encode videos and actions ---\n tokenizer_out = self.tokenizer.vq_encode(batch[""videos""], training=False)\n token_idxs = tokenizer_out[""indices""] # (B, T, N)\n B, T, N = token_idxs.shape\n pad_shape = (B, seq_len - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1) # (B, S, N)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n\n MaskGITLoop = nn.scan(\n MaskGITStep,\n variable_broadcast=""params"",\n split_rngs={""params"": False},\n in_axes=0,\n out_axes=0,\n length=steps,\n )\n \n loop_fn = MaskGITLoop(\n dynamics=self.dynamics,\n tokenizer=self.tokenizer,\n temperature=temperature,\n sample_argmax=sample_argmax,\n steps=steps,\n )\n\n def generation_step_fn(carry, step_t):\n rng, current_token_idxs = carry\n rng, step_rng = jax.random.split(rng)\n\n # Mask current and future frames (i.e., t >= step_t)\n mask = jnp.arange(seq_len) >= step_t # (S,)\n mask = jnp.broadcast_to(mask[None, :, None], (B, seq_len, N)) # (B, S, N)\n mask = mask.astype(bool)\n masked_token_idxs = current_token_idxs * ~mask\n\n # --- Initialize and run MaskGIT loop ---\n init_carry_maskgit = (\n step_rng,",python,selection_command +923,2097669,"genie.py",2856,2680," @nn.compact\n def sample(\n self,\n batch: Dict[str, Any],\n seq_len: int,\n steps: int = 25,\n temperature: float = 1,\n sample_argmax: bool = False,\n ) -> Any:\n """"""\n Autoregressively samples up to `seq_len` future frames, following Figure 8 of the paper.\n\n - Input frames are tokenized once.\n - Future frames are generated autoregressively in token space.\n - All frames are detokenized in a single pass.\n\n Note:\n - For interactive or step-wise sampling, detokenization should occur after each action.\n - To maintain consistent tensor shapes across timesteps, all current and future frames are decoded at every step.\n - Temporal causal structure is preserved by \n a) reapplying the mask before each decoding step.\n b) a temporal causal mask is applied within each ST-transformer block.\n\n Dimension keys:\n B: batch size \n T: number of input (conditioning) frames \n N: patches per frame \n S: sequence length \n A: action space \n D: model latent dimension\n """"""\n # --- Encode videos and actions ---\n tokenizer_out = self.tokenizer.vq_encode(batch[""videos""], training=False)\n token_idxs = tokenizer_out[""indices""] # (B, T, N)\n B, T, N = token_idxs.shape\n pad_shape = (B, seq_len - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1) # (B, S, N)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n\n MaskGITLoop = nn.scan(\n MaskGITStep,\n variable_broadcast=""params"",\n split_rngs={""params"": False},\n in_axes=0,\n out_axes=0,\n length=steps,\n )\n \n loop_fn = MaskGITLoop(\n dynamics=self.dynamics,\n tokenizer=self.tokenizer,\n temperature=temperature,\n sample_argmax=sample_argmax,\n steps=steps,\n )\n\n def generation_step_fn(carry, step_t):\n rng, current_token_idxs = carry\n rng, step_rng = jax.random.split(rng)\n\n # Mask current and future frames (i.e., t >= step_t)\n mask = jnp.arange(seq_len) >= step_t # (S,)\n mask = jnp.broadcast_to(mask[None, :, None], (B, seq_len, N)) # (B, S, N)\n mask = mask.astype(bool)\n masked_token_idxs = current_token_idxs * ~mask\n\n # --- Initialize and run MaskGIT loop ---\n init_carry_maskgit = (\n step_rng,\n masked_token_idxs,",python,selection_command +924,2097670,"genie.py",2856,2702," @nn.compact\n def sample(\n self,\n batch: Dict[str, Any],\n seq_len: int,\n steps: int = 25,\n temperature: float = 1,\n sample_argmax: bool = False,\n ) -> Any:\n """"""\n Autoregressively samples up to `seq_len` future frames, following Figure 8 of the paper.\n\n - Input frames are tokenized once.\n - Future frames are generated autoregressively in token space.\n - All frames are detokenized in a single pass.\n\n Note:\n - For interactive or step-wise sampling, detokenization should occur after each action.\n - To maintain consistent tensor shapes across timesteps, all current and future frames are decoded at every step.\n - Temporal causal structure is preserved by \n a) reapplying the mask before each decoding step.\n b) a temporal causal mask is applied within each ST-transformer block.\n\n Dimension keys:\n B: batch size \n T: number of input (conditioning) frames \n N: patches per frame \n S: sequence length \n A: action space \n D: model latent dimension\n """"""\n # --- Encode videos and actions ---\n tokenizer_out = self.tokenizer.vq_encode(batch[""videos""], training=False)\n token_idxs = tokenizer_out[""indices""] # (B, T, N)\n B, T, N = token_idxs.shape\n pad_shape = (B, seq_len - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1) # (B, S, N)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n\n MaskGITLoop = nn.scan(\n MaskGITStep,\n variable_broadcast=""params"",\n split_rngs={""params"": False},\n in_axes=0,\n out_axes=0,\n length=steps,\n )\n \n loop_fn = MaskGITLoop(\n dynamics=self.dynamics,\n tokenizer=self.tokenizer,\n temperature=temperature,\n sample_argmax=sample_argmax,\n steps=steps,\n )\n\n def generation_step_fn(carry, step_t):\n rng, current_token_idxs = carry\n rng, step_rng = jax.random.split(rng)\n\n # Mask current and future frames (i.e., t >= step_t)\n mask = jnp.arange(seq_len) >= step_t # (S,)\n mask = jnp.broadcast_to(mask[None, :, None], (B, seq_len, N)) # (B, S, N)\n mask = mask.astype(bool)\n masked_token_idxs = current_token_idxs * ~mask\n\n # --- Initialize and run MaskGIT loop ---\n init_carry_maskgit = (\n step_rng,\n masked_token_idxs,\n mask,",python,selection_command +925,2097670,"genie.py",2856,2733," @nn.compact\n def sample(\n self,\n batch: Dict[str, Any],\n seq_len: int,\n steps: int = 25,\n temperature: float = 1,\n sample_argmax: bool = False,\n ) -> Any:\n """"""\n Autoregressively samples up to `seq_len` future frames, following Figure 8 of the paper.\n\n - Input frames are tokenized once.\n - Future frames are generated autoregressively in token space.\n - All frames are detokenized in a single pass.\n\n Note:\n - For interactive or step-wise sampling, detokenization should occur after each action.\n - To maintain consistent tensor shapes across timesteps, all current and future frames are decoded at every step.\n - Temporal causal structure is preserved by \n a) reapplying the mask before each decoding step.\n b) a temporal causal mask is applied within each ST-transformer block.\n\n Dimension keys:\n B: batch size \n T: number of input (conditioning) frames \n N: patches per frame \n S: sequence length \n A: action space \n D: model latent dimension\n """"""\n # --- Encode videos and actions ---\n tokenizer_out = self.tokenizer.vq_encode(batch[""videos""], training=False)\n token_idxs = tokenizer_out[""indices""] # (B, T, N)\n B, T, N = token_idxs.shape\n pad_shape = (B, seq_len - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1) # (B, S, N)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n\n MaskGITLoop = nn.scan(\n MaskGITStep,\n variable_broadcast=""params"",\n split_rngs={""params"": False},\n in_axes=0,\n out_axes=0,\n length=steps,\n )\n \n loop_fn = MaskGITLoop(\n dynamics=self.dynamics,\n tokenizer=self.tokenizer,\n temperature=temperature,\n sample_argmax=sample_argmax,\n steps=steps,\n )\n\n def generation_step_fn(carry, step_t):\n rng, current_token_idxs = carry\n rng, step_rng = jax.random.split(rng)\n\n # Mask current and future frames (i.e., t >= step_t)\n mask = jnp.arange(seq_len) >= step_t # (S,)\n mask = jnp.broadcast_to(mask[None, :, None], (B, seq_len, N)) # (B, S, N)\n mask = mask.astype(bool)\n masked_token_idxs = current_token_idxs * ~mask\n\n # --- Initialize and run MaskGIT loop ---\n init_carry_maskgit = (\n step_rng,\n masked_token_idxs,\n mask,\n action_tokens,",python,selection_command +926,2097671,"genie.py",2856,2747," @nn.compact\n def sample(\n self,\n batch: Dict[str, Any],\n seq_len: int,\n steps: int = 25,\n temperature: float = 1,\n sample_argmax: bool = False,\n ) -> Any:\n """"""\n Autoregressively samples up to `seq_len` future frames, following Figure 8 of the paper.\n\n - Input frames are tokenized once.\n - Future frames are generated autoregressively in token space.\n - All frames are detokenized in a single pass.\n\n Note:\n - For interactive or step-wise sampling, detokenization should occur after each action.\n - To maintain consistent tensor shapes across timesteps, all current and future frames are decoded at every step.\n - Temporal causal structure is preserved by \n a) reapplying the mask before each decoding step.\n b) a temporal causal mask is applied within each ST-transformer block.\n\n Dimension keys:\n B: batch size \n T: number of input (conditioning) frames \n N: patches per frame \n S: sequence length \n A: action space \n D: model latent dimension\n """"""\n # --- Encode videos and actions ---\n tokenizer_out = self.tokenizer.vq_encode(batch[""videos""], training=False)\n token_idxs = tokenizer_out[""indices""] # (B, T, N)\n B, T, N = token_idxs.shape\n pad_shape = (B, seq_len - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1) # (B, S, N)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n\n MaskGITLoop = nn.scan(\n MaskGITStep,\n variable_broadcast=""params"",\n split_rngs={""params"": False},\n in_axes=0,\n out_axes=0,\n length=steps,\n )\n \n loop_fn = MaskGITLoop(\n dynamics=self.dynamics,\n tokenizer=self.tokenizer,\n temperature=temperature,\n sample_argmax=sample_argmax,\n steps=steps,\n )\n\n def generation_step_fn(carry, step_t):\n rng, current_token_idxs = carry\n rng, step_rng = jax.random.split(rng)\n\n # Mask current and future frames (i.e., t >= step_t)\n mask = jnp.arange(seq_len) >= step_t # (S,)\n mask = jnp.broadcast_to(mask[None, :, None], (B, seq_len, N)) # (B, S, N)\n mask = mask.astype(bool)\n masked_token_idxs = current_token_idxs * ~mask\n\n # --- Initialize and run MaskGIT loop ---\n init_carry_maskgit = (\n step_rng,\n masked_token_idxs,\n mask,\n action_tokens,\n )",python,selection_command +927,2097773,"genie.py",2856,2831," @nn.compact\n def sample(\n self,\n batch: Dict[str, Any],\n seq_len: int,\n steps: int = 25,\n temperature: float = 1,\n sample_argmax: bool = False,\n ) -> Any:\n """"""\n Autoregressively samples up to `seq_len` future frames, following Figure 8 of the paper.\n\n - Input frames are tokenized once.\n - Future frames are generated autoregressively in token space.\n - All frames are detokenized in a single pass.\n\n Note:\n - For interactive or step-wise sampling, detokenization should occur after each action.\n - To maintain consistent tensor shapes across timesteps, all current and future frames are decoded at every step.\n - Temporal causal structure is preserved by \n a) reapplying the mask before each decoding step.\n b) a temporal causal mask is applied within each ST-transformer block.\n\n Dimension keys:\n B: batch size \n T: number of input (conditioning) frames \n N: patches per frame \n S: sequence length \n A: action space \n D: model latent dimension\n """"""\n # --- Encode videos and actions ---\n tokenizer_out = self.tokenizer.vq_encode(batch[""videos""], training=False)\n token_idxs = tokenizer_out[""indices""] # (B, T, N)\n B, T, N = token_idxs.shape\n pad_shape = (B, seq_len - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1) # (B, S, N)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n\n MaskGITLoop = nn.scan(\n MaskGITStep,\n variable_broadcast=""params"",\n split_rngs={""params"": False},\n in_axes=0,\n out_axes=0,\n length=steps,\n )\n \n loop_fn = MaskGITLoop(\n dynamics=self.dynamics,\n tokenizer=self.tokenizer,\n temperature=temperature,\n sample_argmax=sample_argmax,\n steps=steps,\n )\n\n def generation_step_fn(carry, step_t):\n rng, current_token_idxs = carry\n rng, step_rng = jax.random.split(rng)\n\n # Mask current and future frames (i.e., t >= step_t)\n mask = jnp.arange(seq_len) >= step_t # (S,)\n mask = jnp.broadcast_to(mask[None, :, None], (B, seq_len, N)) # (B, S, N)\n mask = mask.astype(bool)\n masked_token_idxs = current_token_idxs * ~mask\n\n # --- Initialize and run MaskGIT loop ---\n init_carry_maskgit = (\n step_rng,\n masked_token_idxs,\n mask,\n action_tokens,\n )\n final_carry_maskgit, _ = loop_fn(init_carry_maskgit, jnp.arange(steps))",python,selection_command +928,2097775,"genie.py",2856,2887," @nn.compact\n def sample(\n self,\n batch: Dict[str, Any],\n seq_len: int,\n steps: int = 25,\n temperature: float = 1,\n sample_argmax: bool = False,\n ) -> Any:\n """"""\n Autoregressively samples up to `seq_len` future frames, following Figure 8 of the paper.\n\n - Input frames are tokenized once.\n - Future frames are generated autoregressively in token space.\n - All frames are detokenized in a single pass.\n\n Note:\n - For interactive or step-wise sampling, detokenization should occur after each action.\n - To maintain consistent tensor shapes across timesteps, all current and future frames are decoded at every step.\n - Temporal causal structure is preserved by \n a) reapplying the mask before each decoding step.\n b) a temporal causal mask is applied within each ST-transformer block.\n\n Dimension keys:\n B: batch size \n T: number of input (conditioning) frames \n N: patches per frame \n S: sequence length \n A: action space \n D: model latent dimension\n """"""\n # --- Encode videos and actions ---\n tokenizer_out = self.tokenizer.vq_encode(batch[""videos""], training=False)\n token_idxs = tokenizer_out[""indices""] # (B, T, N)\n B, T, N = token_idxs.shape\n pad_shape = (B, seq_len - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1) # (B, S, N)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n\n MaskGITLoop = nn.scan(\n MaskGITStep,\n variable_broadcast=""params"",\n split_rngs={""params"": False},\n in_axes=0,\n out_axes=0,\n length=steps,\n )\n \n loop_fn = MaskGITLoop(\n dynamics=self.dynamics,\n tokenizer=self.tokenizer,\n temperature=temperature,\n sample_argmax=sample_argmax,\n steps=steps,\n )\n\n def generation_step_fn(carry, step_t):\n rng, current_token_idxs = carry\n rng, step_rng = jax.random.split(rng)\n\n # Mask current and future frames (i.e., t >= step_t)\n mask = jnp.arange(seq_len) >= step_t # (S,)\n mask = jnp.broadcast_to(mask[None, :, None], (B, seq_len, N)) # (B, S, N)\n mask = mask.astype(bool)\n masked_token_idxs = current_token_idxs * ~mask\n\n # --- Initialize and run MaskGIT loop ---\n init_carry_maskgit = (\n step_rng,\n masked_token_idxs,\n mask,\n action_tokens,\n )\n final_carry_maskgit, _ = loop_fn(init_carry_maskgit, jnp.arange(steps))\n updated_token_idxs = final_carry_maskgit[1]",python,selection_command +929,2097775,"genie.py",2856,2937," @nn.compact\n def sample(\n self,\n batch: Dict[str, Any],\n seq_len: int,\n steps: int = 25,\n temperature: float = 1,\n sample_argmax: bool = False,\n ) -> Any:\n """"""\n Autoregressively samples up to `seq_len` future frames, following Figure 8 of the paper.\n\n - Input frames are tokenized once.\n - Future frames are generated autoregressively in token space.\n - All frames are detokenized in a single pass.\n\n Note:\n - For interactive or step-wise sampling, detokenization should occur after each action.\n - To maintain consistent tensor shapes across timesteps, all current and future frames are decoded at every step.\n - Temporal causal structure is preserved by \n a) reapplying the mask before each decoding step.\n b) a temporal causal mask is applied within each ST-transformer block.\n\n Dimension keys:\n B: batch size \n T: number of input (conditioning) frames \n N: patches per frame \n S: sequence length \n A: action space \n D: model latent dimension\n """"""\n # --- Encode videos and actions ---\n tokenizer_out = self.tokenizer.vq_encode(batch[""videos""], training=False)\n token_idxs = tokenizer_out[""indices""] # (B, T, N)\n B, T, N = token_idxs.shape\n pad_shape = (B, seq_len - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1) # (B, S, N)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n\n MaskGITLoop = nn.scan(\n MaskGITStep,\n variable_broadcast=""params"",\n split_rngs={""params"": False},\n in_axes=0,\n out_axes=0,\n length=steps,\n )\n \n loop_fn = MaskGITLoop(\n dynamics=self.dynamics,\n tokenizer=self.tokenizer,\n temperature=temperature,\n sample_argmax=sample_argmax,\n steps=steps,\n )\n\n def generation_step_fn(carry, step_t):\n rng, current_token_idxs = carry\n rng, step_rng = jax.random.split(rng)\n\n # Mask current and future frames (i.e., t >= step_t)\n mask = jnp.arange(seq_len) >= step_t # (S,)\n mask = jnp.broadcast_to(mask[None, :, None], (B, seq_len, N)) # (B, S, N)\n mask = mask.astype(bool)\n masked_token_idxs = current_token_idxs * ~mask\n\n # --- Initialize and run MaskGIT loop ---\n init_carry_maskgit = (\n step_rng,\n masked_token_idxs,\n mask,\n action_tokens,\n )\n final_carry_maskgit, _ = loop_fn(init_carry_maskgit, jnp.arange(steps))\n updated_token_idxs = final_carry_maskgit[1]\n new_carry = (rng, updated_token_idxs)",python,selection_command +930,2097776,"genie.py",2856,2972," @nn.compact\n def sample(\n self,\n batch: Dict[str, Any],\n seq_len: int,\n steps: int = 25,\n temperature: float = 1,\n sample_argmax: bool = False,\n ) -> Any:\n """"""\n Autoregressively samples up to `seq_len` future frames, following Figure 8 of the paper.\n\n - Input frames are tokenized once.\n - Future frames are generated autoregressively in token space.\n - All frames are detokenized in a single pass.\n\n Note:\n - For interactive or step-wise sampling, detokenization should occur after each action.\n - To maintain consistent tensor shapes across timesteps, all current and future frames are decoded at every step.\n - Temporal causal structure is preserved by \n a) reapplying the mask before each decoding step.\n b) a temporal causal mask is applied within each ST-transformer block.\n\n Dimension keys:\n B: batch size \n T: number of input (conditioning) frames \n N: patches per frame \n S: sequence length \n A: action space \n D: model latent dimension\n """"""\n # --- Encode videos and actions ---\n tokenizer_out = self.tokenizer.vq_encode(batch[""videos""], training=False)\n token_idxs = tokenizer_out[""indices""] # (B, T, N)\n B, T, N = token_idxs.shape\n pad_shape = (B, seq_len - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1) # (B, S, N)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n\n MaskGITLoop = nn.scan(\n MaskGITStep,\n variable_broadcast=""params"",\n split_rngs={""params"": False},\n in_axes=0,\n out_axes=0,\n length=steps,\n )\n \n loop_fn = MaskGITLoop(\n dynamics=self.dynamics,\n tokenizer=self.tokenizer,\n temperature=temperature,\n sample_argmax=sample_argmax,\n steps=steps,\n )\n\n def generation_step_fn(carry, step_t):\n rng, current_token_idxs = carry\n rng, step_rng = jax.random.split(rng)\n\n # Mask current and future frames (i.e., t >= step_t)\n mask = jnp.arange(seq_len) >= step_t # (S,)\n mask = jnp.broadcast_to(mask[None, :, None], (B, seq_len, N)) # (B, S, N)\n mask = mask.astype(bool)\n masked_token_idxs = current_token_idxs * ~mask\n\n # --- Initialize and run MaskGIT loop ---\n init_carry_maskgit = (\n step_rng,\n masked_token_idxs,\n mask,\n action_tokens,\n )\n final_carry_maskgit, _ = loop_fn(init_carry_maskgit, jnp.arange(steps))\n updated_token_idxs = final_carry_maskgit[1]\n new_carry = (rng, updated_token_idxs)\n return new_carry, None",python,selection_command +931,2097776,"genie.py",2856,2973," @nn.compact\n def sample(\n self,\n batch: Dict[str, Any],\n seq_len: int,\n steps: int = 25,\n temperature: float = 1,\n sample_argmax: bool = False,\n ) -> Any:\n """"""\n Autoregressively samples up to `seq_len` future frames, following Figure 8 of the paper.\n\n - Input frames are tokenized once.\n - Future frames are generated autoregressively in token space.\n - All frames are detokenized in a single pass.\n\n Note:\n - For interactive or step-wise sampling, detokenization should occur after each action.\n - To maintain consistent tensor shapes across timesteps, all current and future frames are decoded at every step.\n - Temporal causal structure is preserved by \n a) reapplying the mask before each decoding step.\n b) a temporal causal mask is applied within each ST-transformer block.\n\n Dimension keys:\n B: batch size \n T: number of input (conditioning) frames \n N: patches per frame \n S: sequence length \n A: action space \n D: model latent dimension\n """"""\n # --- Encode videos and actions ---\n tokenizer_out = self.tokenizer.vq_encode(batch[""videos""], training=False)\n token_idxs = tokenizer_out[""indices""] # (B, T, N)\n B, T, N = token_idxs.shape\n pad_shape = (B, seq_len - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1) # (B, S, N)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n\n MaskGITLoop = nn.scan(\n MaskGITStep,\n variable_broadcast=""params"",\n split_rngs={""params"": False},\n in_axes=0,\n out_axes=0,\n length=steps,\n )\n \n loop_fn = MaskGITLoop(\n dynamics=self.dynamics,\n tokenizer=self.tokenizer,\n temperature=temperature,\n sample_argmax=sample_argmax,\n steps=steps,\n )\n\n def generation_step_fn(carry, step_t):\n rng, current_token_idxs = carry\n rng, step_rng = jax.random.split(rng)\n\n # Mask current and future frames (i.e., t >= step_t)\n mask = jnp.arange(seq_len) >= step_t # (S,)\n mask = jnp.broadcast_to(mask[None, :, None], (B, seq_len, N)) # (B, S, N)\n mask = mask.astype(bool)\n masked_token_idxs = current_token_idxs * ~mask\n\n # --- Initialize and run MaskGIT loop ---\n init_carry_maskgit = (\n step_rng,\n masked_token_idxs,\n mask,\n action_tokens,\n )\n final_carry_maskgit, _ = loop_fn(init_carry_maskgit, jnp.arange(steps))\n updated_token_idxs = final_carry_maskgit[1]\n new_carry = (rng, updated_token_idxs)\n return new_carry, None\n",python,selection_command +932,2097776,"genie.py",2856,3036," @nn.compact\n def sample(\n self,\n batch: Dict[str, Any],\n seq_len: int,\n steps: int = 25,\n temperature: float = 1,\n sample_argmax: bool = False,\n ) -> Any:\n """"""\n Autoregressively samples up to `seq_len` future frames, following Figure 8 of the paper.\n\n - Input frames are tokenized once.\n - Future frames are generated autoregressively in token space.\n - All frames are detokenized in a single pass.\n\n Note:\n - For interactive or step-wise sampling, detokenization should occur after each action.\n - To maintain consistent tensor shapes across timesteps, all current and future frames are decoded at every step.\n - Temporal causal structure is preserved by \n a) reapplying the mask before each decoding step.\n b) a temporal causal mask is applied within each ST-transformer block.\n\n Dimension keys:\n B: batch size \n T: number of input (conditioning) frames \n N: patches per frame \n S: sequence length \n A: action space \n D: model latent dimension\n """"""\n # --- Encode videos and actions ---\n tokenizer_out = self.tokenizer.vq_encode(batch[""videos""], training=False)\n token_idxs = tokenizer_out[""indices""] # (B, T, N)\n B, T, N = token_idxs.shape\n pad_shape = (B, seq_len - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1) # (B, S, N)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n\n MaskGITLoop = nn.scan(\n MaskGITStep,\n variable_broadcast=""params"",\n split_rngs={""params"": False},\n in_axes=0,\n out_axes=0,\n length=steps,\n )\n \n loop_fn = MaskGITLoop(\n dynamics=self.dynamics,\n tokenizer=self.tokenizer,\n temperature=temperature,\n sample_argmax=sample_argmax,\n steps=steps,\n )\n\n def generation_step_fn(carry, step_t):\n rng, current_token_idxs = carry\n rng, step_rng = jax.random.split(rng)\n\n # Mask current and future frames (i.e., t >= step_t)\n mask = jnp.arange(seq_len) >= step_t # (S,)\n mask = jnp.broadcast_to(mask[None, :, None], (B, seq_len, N)) # (B, S, N)\n mask = mask.astype(bool)\n masked_token_idxs = current_token_idxs * ~mask\n\n # --- Initialize and run MaskGIT loop ---\n init_carry_maskgit = (\n step_rng,\n masked_token_idxs,\n mask,\n action_tokens,\n )\n final_carry_maskgit, _ = loop_fn(init_carry_maskgit, jnp.arange(steps))\n updated_token_idxs = final_carry_maskgit[1]\n new_carry = (rng, updated_token_idxs)\n return new_carry, None\n\n # --- Run the autoregressive generation using scan ---",python,selection_command +933,2097777,"genie.py",2856,3087," @nn.compact\n def sample(\n self,\n batch: Dict[str, Any],\n seq_len: int,\n steps: int = 25,\n temperature: float = 1,\n sample_argmax: bool = False,\n ) -> Any:\n """"""\n Autoregressively samples up to `seq_len` future frames, following Figure 8 of the paper.\n\n - Input frames are tokenized once.\n - Future frames are generated autoregressively in token space.\n - All frames are detokenized in a single pass.\n\n Note:\n - For interactive or step-wise sampling, detokenization should occur after each action.\n - To maintain consistent tensor shapes across timesteps, all current and future frames are decoded at every step.\n - Temporal causal structure is preserved by \n a) reapplying the mask before each decoding step.\n b) a temporal causal mask is applied within each ST-transformer block.\n\n Dimension keys:\n B: batch size \n T: number of input (conditioning) frames \n N: patches per frame \n S: sequence length \n A: action space \n D: model latent dimension\n """"""\n # --- Encode videos and actions ---\n tokenizer_out = self.tokenizer.vq_encode(batch[""videos""], training=False)\n token_idxs = tokenizer_out[""indices""] # (B, T, N)\n B, T, N = token_idxs.shape\n pad_shape = (B, seq_len - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1) # (B, S, N)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n\n MaskGITLoop = nn.scan(\n MaskGITStep,\n variable_broadcast=""params"",\n split_rngs={""params"": False},\n in_axes=0,\n out_axes=0,\n length=steps,\n )\n \n loop_fn = MaskGITLoop(\n dynamics=self.dynamics,\n tokenizer=self.tokenizer,\n temperature=temperature,\n sample_argmax=sample_argmax,\n steps=steps,\n )\n\n def generation_step_fn(carry, step_t):\n rng, current_token_idxs = carry\n rng, step_rng = jax.random.split(rng)\n\n # Mask current and future frames (i.e., t >= step_t)\n mask = jnp.arange(seq_len) >= step_t # (S,)\n mask = jnp.broadcast_to(mask[None, :, None], (B, seq_len, N)) # (B, S, N)\n mask = mask.astype(bool)\n masked_token_idxs = current_token_idxs * ~mask\n\n # --- Initialize and run MaskGIT loop ---\n init_carry_maskgit = (\n step_rng,\n masked_token_idxs,\n mask,\n action_tokens,\n )\n final_carry_maskgit, _ = loop_fn(init_carry_maskgit, jnp.arange(steps))\n updated_token_idxs = final_carry_maskgit[1]\n new_carry = (rng, updated_token_idxs)\n return new_carry, None\n\n # --- Run the autoregressive generation using scan ---\n initial_carry = (batch[""rng""], token_idxs)",python,selection_command +934,2097857,"genie.py",2856,3138," @nn.compact\n def sample(\n self,\n batch: Dict[str, Any],\n seq_len: int,\n steps: int = 25,\n temperature: float = 1,\n sample_argmax: bool = False,\n ) -> Any:\n """"""\n Autoregressively samples up to `seq_len` future frames, following Figure 8 of the paper.\n\n - Input frames are tokenized once.\n - Future frames are generated autoregressively in token space.\n - All frames are detokenized in a single pass.\n\n Note:\n - For interactive or step-wise sampling, detokenization should occur after each action.\n - To maintain consistent tensor shapes across timesteps, all current and future frames are decoded at every step.\n - Temporal causal structure is preserved by \n a) reapplying the mask before each decoding step.\n b) a temporal causal mask is applied within each ST-transformer block.\n\n Dimension keys:\n B: batch size \n T: number of input (conditioning) frames \n N: patches per frame \n S: sequence length \n A: action space \n D: model latent dimension\n """"""\n # --- Encode videos and actions ---\n tokenizer_out = self.tokenizer.vq_encode(batch[""videos""], training=False)\n token_idxs = tokenizer_out[""indices""] # (B, T, N)\n B, T, N = token_idxs.shape\n pad_shape = (B, seq_len - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1) # (B, S, N)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n\n MaskGITLoop = nn.scan(\n MaskGITStep,\n variable_broadcast=""params"",\n split_rngs={""params"": False},\n in_axes=0,\n out_axes=0,\n length=steps,\n )\n \n loop_fn = MaskGITLoop(\n dynamics=self.dynamics,\n tokenizer=self.tokenizer,\n temperature=temperature,\n sample_argmax=sample_argmax,\n steps=steps,\n )\n\n def generation_step_fn(carry, step_t):\n rng, current_token_idxs = carry\n rng, step_rng = jax.random.split(rng)\n\n # Mask current and future frames (i.e., t >= step_t)\n mask = jnp.arange(seq_len) >= step_t # (S,)\n mask = jnp.broadcast_to(mask[None, :, None], (B, seq_len, N)) # (B, S, N)\n mask = mask.astype(bool)\n masked_token_idxs = current_token_idxs * ~mask\n\n # --- Initialize and run MaskGIT loop ---\n init_carry_maskgit = (\n step_rng,\n masked_token_idxs,\n mask,\n action_tokens,\n )\n final_carry_maskgit, _ = loop_fn(init_carry_maskgit, jnp.arange(steps))\n updated_token_idxs = final_carry_maskgit[1]\n new_carry = (rng, updated_token_idxs)\n return new_carry, None\n\n # --- Run the autoregressive generation using scan ---\n initial_carry = (batch[""rng""], token_idxs)\n timesteps_to_scan = jnp.arange(T, seq_len)",python,selection_command +935,2097858,"genie.py",2856,3177," @nn.compact\n def sample(\n self,\n batch: Dict[str, Any],\n seq_len: int,\n steps: int = 25,\n temperature: float = 1,\n sample_argmax: bool = False,\n ) -> Any:\n """"""\n Autoregressively samples up to `seq_len` future frames, following Figure 8 of the paper.\n\n - Input frames are tokenized once.\n - Future frames are generated autoregressively in token space.\n - All frames are detokenized in a single pass.\n\n Note:\n - For interactive or step-wise sampling, detokenization should occur after each action.\n - To maintain consistent tensor shapes across timesteps, all current and future frames are decoded at every step.\n - Temporal causal structure is preserved by \n a) reapplying the mask before each decoding step.\n b) a temporal causal mask is applied within each ST-transformer block.\n\n Dimension keys:\n B: batch size \n T: number of input (conditioning) frames \n N: patches per frame \n S: sequence length \n A: action space \n D: model latent dimension\n """"""\n # --- Encode videos and actions ---\n tokenizer_out = self.tokenizer.vq_encode(batch[""videos""], training=False)\n token_idxs = tokenizer_out[""indices""] # (B, T, N)\n B, T, N = token_idxs.shape\n pad_shape = (B, seq_len - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1) # (B, S, N)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n\n MaskGITLoop = nn.scan(\n MaskGITStep,\n variable_broadcast=""params"",\n split_rngs={""params"": False},\n in_axes=0,\n out_axes=0,\n length=steps,\n )\n \n loop_fn = MaskGITLoop(\n dynamics=self.dynamics,\n tokenizer=self.tokenizer,\n temperature=temperature,\n sample_argmax=sample_argmax,\n steps=steps,\n )\n\n def generation_step_fn(carry, step_t):\n rng, current_token_idxs = carry\n rng, step_rng = jax.random.split(rng)\n\n # Mask current and future frames (i.e., t >= step_t)\n mask = jnp.arange(seq_len) >= step_t # (S,)\n mask = jnp.broadcast_to(mask[None, :, None], (B, seq_len, N)) # (B, S, N)\n mask = mask.astype(bool)\n masked_token_idxs = current_token_idxs * ~mask\n\n # --- Initialize and run MaskGIT loop ---\n init_carry_maskgit = (\n step_rng,\n masked_token_idxs,\n mask,\n action_tokens,\n )\n final_carry_maskgit, _ = loop_fn(init_carry_maskgit, jnp.arange(steps))\n updated_token_idxs = final_carry_maskgit[1]\n new_carry = (rng, updated_token_idxs)\n return new_carry, None\n\n # --- Run the autoregressive generation using scan ---\n initial_carry = (batch[""rng""], token_idxs)\n timesteps_to_scan = jnp.arange(T, seq_len)\n final_carry, _ = jax.lax.scan(",python,selection_command +936,2097868,"genie.py",2856,3209," @nn.compact\n def sample(\n self,\n batch: Dict[str, Any],\n seq_len: int,\n steps: int = 25,\n temperature: float = 1,\n sample_argmax: bool = False,\n ) -> Any:\n """"""\n Autoregressively samples up to `seq_len` future frames, following Figure 8 of the paper.\n\n - Input frames are tokenized once.\n - Future frames are generated autoregressively in token space.\n - All frames are detokenized in a single pass.\n\n Note:\n - For interactive or step-wise sampling, detokenization should occur after each action.\n - To maintain consistent tensor shapes across timesteps, all current and future frames are decoded at every step.\n - Temporal causal structure is preserved by \n a) reapplying the mask before each decoding step.\n b) a temporal causal mask is applied within each ST-transformer block.\n\n Dimension keys:\n B: batch size \n T: number of input (conditioning) frames \n N: patches per frame \n S: sequence length \n A: action space \n D: model latent dimension\n """"""\n # --- Encode videos and actions ---\n tokenizer_out = self.tokenizer.vq_encode(batch[""videos""], training=False)\n token_idxs = tokenizer_out[""indices""] # (B, T, N)\n B, T, N = token_idxs.shape\n pad_shape = (B, seq_len - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1) # (B, S, N)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n\n MaskGITLoop = nn.scan(\n MaskGITStep,\n variable_broadcast=""params"",\n split_rngs={""params"": False},\n in_axes=0,\n out_axes=0,\n length=steps,\n )\n \n loop_fn = MaskGITLoop(\n dynamics=self.dynamics,\n tokenizer=self.tokenizer,\n temperature=temperature,\n sample_argmax=sample_argmax,\n steps=steps,\n )\n\n def generation_step_fn(carry, step_t):\n rng, current_token_idxs = carry\n rng, step_rng = jax.random.split(rng)\n\n # Mask current and future frames (i.e., t >= step_t)\n mask = jnp.arange(seq_len) >= step_t # (S,)\n mask = jnp.broadcast_to(mask[None, :, None], (B, seq_len, N)) # (B, S, N)\n mask = mask.astype(bool)\n masked_token_idxs = current_token_idxs * ~mask\n\n # --- Initialize and run MaskGIT loop ---\n init_carry_maskgit = (\n step_rng,\n masked_token_idxs,\n mask,\n action_tokens,\n )\n final_carry_maskgit, _ = loop_fn(init_carry_maskgit, jnp.arange(steps))\n updated_token_idxs = final_carry_maskgit[1]\n new_carry = (rng, updated_token_idxs)\n return new_carry, None\n\n # --- Run the autoregressive generation using scan ---\n initial_carry = (batch[""rng""], token_idxs)\n timesteps_to_scan = jnp.arange(T, seq_len)\n final_carry, _ = jax.lax.scan(\n generation_step_fn,",python,selection_command +937,2098012,"genie.py",2856,3236," @nn.compact\n def sample(\n self,\n batch: Dict[str, Any],\n seq_len: int,\n steps: int = 25,\n temperature: float = 1,\n sample_argmax: bool = False,\n ) -> Any:\n """"""\n Autoregressively samples up to `seq_len` future frames, following Figure 8 of the paper.\n\n - Input frames are tokenized once.\n - Future frames are generated autoregressively in token space.\n - All frames are detokenized in a single pass.\n\n Note:\n - For interactive or step-wise sampling, detokenization should occur after each action.\n - To maintain consistent tensor shapes across timesteps, all current and future frames are decoded at every step.\n - Temporal causal structure is preserved by \n a) reapplying the mask before each decoding step.\n b) a temporal causal mask is applied within each ST-transformer block.\n\n Dimension keys:\n B: batch size \n T: number of input (conditioning) frames \n N: patches per frame \n S: sequence length \n A: action space \n D: model latent dimension\n """"""\n # --- Encode videos and actions ---\n tokenizer_out = self.tokenizer.vq_encode(batch[""videos""], training=False)\n token_idxs = tokenizer_out[""indices""] # (B, T, N)\n B, T, N = token_idxs.shape\n pad_shape = (B, seq_len - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1) # (B, S, N)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n\n MaskGITLoop = nn.scan(\n MaskGITStep,\n variable_broadcast=""params"",\n split_rngs={""params"": False},\n in_axes=0,\n out_axes=0,\n length=steps,\n )\n \n loop_fn = MaskGITLoop(\n dynamics=self.dynamics,\n tokenizer=self.tokenizer,\n temperature=temperature,\n sample_argmax=sample_argmax,\n steps=steps,\n )\n\n def generation_step_fn(carry, step_t):\n rng, current_token_idxs = carry\n rng, step_rng = jax.random.split(rng)\n\n # Mask current and future frames (i.e., t >= step_t)\n mask = jnp.arange(seq_len) >= step_t # (S,)\n mask = jnp.broadcast_to(mask[None, :, None], (B, seq_len, N)) # (B, S, N)\n mask = mask.astype(bool)\n masked_token_idxs = current_token_idxs * ~mask\n\n # --- Initialize and run MaskGIT loop ---\n init_carry_maskgit = (\n step_rng,\n masked_token_idxs,\n mask,\n action_tokens,\n )\n final_carry_maskgit, _ = loop_fn(init_carry_maskgit, jnp.arange(steps))\n updated_token_idxs = final_carry_maskgit[1]\n new_carry = (rng, updated_token_idxs)\n return new_carry, None\n\n # --- Run the autoregressive generation using scan ---\n initial_carry = (batch[""rng""], token_idxs)\n timesteps_to_scan = jnp.arange(T, seq_len)\n final_carry, _ = jax.lax.scan(\n generation_step_fn,\n initial_carry,",python,selection_command +938,2098201,"genie.py",2856,3266," @nn.compact\n def sample(\n self,\n batch: Dict[str, Any],\n seq_len: int,\n steps: int = 25,\n temperature: float = 1,\n sample_argmax: bool = False,\n ) -> Any:\n """"""\n Autoregressively samples up to `seq_len` future frames, following Figure 8 of the paper.\n\n - Input frames are tokenized once.\n - Future frames are generated autoregressively in token space.\n - All frames are detokenized in a single pass.\n\n Note:\n - For interactive or step-wise sampling, detokenization should occur after each action.\n - To maintain consistent tensor shapes across timesteps, all current and future frames are decoded at every step.\n - Temporal causal structure is preserved by \n a) reapplying the mask before each decoding step.\n b) a temporal causal mask is applied within each ST-transformer block.\n\n Dimension keys:\n B: batch size \n T: number of input (conditioning) frames \n N: patches per frame \n S: sequence length \n A: action space \n D: model latent dimension\n """"""\n # --- Encode videos and actions ---\n tokenizer_out = self.tokenizer.vq_encode(batch[""videos""], training=False)\n token_idxs = tokenizer_out[""indices""] # (B, T, N)\n B, T, N = token_idxs.shape\n pad_shape = (B, seq_len - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1) # (B, S, N)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n\n MaskGITLoop = nn.scan(\n MaskGITStep,\n variable_broadcast=""params"",\n split_rngs={""params"": False},\n in_axes=0,\n out_axes=0,\n length=steps,\n )\n \n loop_fn = MaskGITLoop(\n dynamics=self.dynamics,\n tokenizer=self.tokenizer,\n temperature=temperature,\n sample_argmax=sample_argmax,\n steps=steps,\n )\n\n def generation_step_fn(carry, step_t):\n rng, current_token_idxs = carry\n rng, step_rng = jax.random.split(rng)\n\n # Mask current and future frames (i.e., t >= step_t)\n mask = jnp.arange(seq_len) >= step_t # (S,)\n mask = jnp.broadcast_to(mask[None, :, None], (B, seq_len, N)) # (B, S, N)\n mask = mask.astype(bool)\n masked_token_idxs = current_token_idxs * ~mask\n\n # --- Initialize and run MaskGIT loop ---\n init_carry_maskgit = (\n step_rng,\n masked_token_idxs,\n mask,\n action_tokens,\n )\n final_carry_maskgit, _ = loop_fn(init_carry_maskgit, jnp.arange(steps))\n updated_token_idxs = final_carry_maskgit[1]\n new_carry = (rng, updated_token_idxs)\n return new_carry, None\n\n # --- Run the autoregressive generation using scan ---\n initial_carry = (batch[""rng""], token_idxs)\n timesteps_to_scan = jnp.arange(T, seq_len)\n final_carry, _ = jax.lax.scan(\n generation_step_fn,\n initial_carry,\n timesteps_to_scan",python,selection_command +939,2098317,"genie.py",2856,3276," @nn.compact\n def sample(\n self,\n batch: Dict[str, Any],\n seq_len: int,\n steps: int = 25,\n temperature: float = 1,\n sample_argmax: bool = False,\n ) -> Any:\n """"""\n Autoregressively samples up to `seq_len` future frames, following Figure 8 of the paper.\n\n - Input frames are tokenized once.\n - Future frames are generated autoregressively in token space.\n - All frames are detokenized in a single pass.\n\n Note:\n - For interactive or step-wise sampling, detokenization should occur after each action.\n - To maintain consistent tensor shapes across timesteps, all current and future frames are decoded at every step.\n - Temporal causal structure is preserved by \n a) reapplying the mask before each decoding step.\n b) a temporal causal mask is applied within each ST-transformer block.\n\n Dimension keys:\n B: batch size \n T: number of input (conditioning) frames \n N: patches per frame \n S: sequence length \n A: action space \n D: model latent dimension\n """"""\n # --- Encode videos and actions ---\n tokenizer_out = self.tokenizer.vq_encode(batch[""videos""], training=False)\n token_idxs = tokenizer_out[""indices""] # (B, T, N)\n B, T, N = token_idxs.shape\n pad_shape = (B, seq_len - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1) # (B, S, N)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n\n MaskGITLoop = nn.scan(\n MaskGITStep,\n variable_broadcast=""params"",\n split_rngs={""params"": False},\n in_axes=0,\n out_axes=0,\n length=steps,\n )\n \n loop_fn = MaskGITLoop(\n dynamics=self.dynamics,\n tokenizer=self.tokenizer,\n temperature=temperature,\n sample_argmax=sample_argmax,\n steps=steps,\n )\n\n def generation_step_fn(carry, step_t):\n rng, current_token_idxs = carry\n rng, step_rng = jax.random.split(rng)\n\n # Mask current and future frames (i.e., t >= step_t)\n mask = jnp.arange(seq_len) >= step_t # (S,)\n mask = jnp.broadcast_to(mask[None, :, None], (B, seq_len, N)) # (B, S, N)\n mask = mask.astype(bool)\n masked_token_idxs = current_token_idxs * ~mask\n\n # --- Initialize and run MaskGIT loop ---\n init_carry_maskgit = (\n step_rng,\n masked_token_idxs,\n mask,\n action_tokens,\n )\n final_carry_maskgit, _ = loop_fn(init_carry_maskgit, jnp.arange(steps))\n updated_token_idxs = final_carry_maskgit[1]\n new_carry = (rng, updated_token_idxs)\n return new_carry, None\n\n # --- Run the autoregressive generation using scan ---\n initial_carry = (batch[""rng""], token_idxs)\n timesteps_to_scan = jnp.arange(T, seq_len)\n final_carry, _ = jax.lax.scan(\n generation_step_fn,\n initial_carry,\n timesteps_to_scan\n )",python,selection_command +940,2098520,"genie.py",2856,3318," @nn.compact\n def sample(\n self,\n batch: Dict[str, Any],\n seq_len: int,\n steps: int = 25,\n temperature: float = 1,\n sample_argmax: bool = False,\n ) -> Any:\n """"""\n Autoregressively samples up to `seq_len` future frames, following Figure 8 of the paper.\n\n - Input frames are tokenized once.\n - Future frames are generated autoregressively in token space.\n - All frames are detokenized in a single pass.\n\n Note:\n - For interactive or step-wise sampling, detokenization should occur after each action.\n - To maintain consistent tensor shapes across timesteps, all current and future frames are decoded at every step.\n - Temporal causal structure is preserved by \n a) reapplying the mask before each decoding step.\n b) a temporal causal mask is applied within each ST-transformer block.\n\n Dimension keys:\n B: batch size \n T: number of input (conditioning) frames \n N: patches per frame \n S: sequence length \n A: action space \n D: model latent dimension\n """"""\n # --- Encode videos and actions ---\n tokenizer_out = self.tokenizer.vq_encode(batch[""videos""], training=False)\n token_idxs = tokenizer_out[""indices""] # (B, T, N)\n B, T, N = token_idxs.shape\n pad_shape = (B, seq_len - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1) # (B, S, N)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n\n MaskGITLoop = nn.scan(\n MaskGITStep,\n variable_broadcast=""params"",\n split_rngs={""params"": False},\n in_axes=0,\n out_axes=0,\n length=steps,\n )\n \n loop_fn = MaskGITLoop(\n dynamics=self.dynamics,\n tokenizer=self.tokenizer,\n temperature=temperature,\n sample_argmax=sample_argmax,\n steps=steps,\n )\n\n def generation_step_fn(carry, step_t):\n rng, current_token_idxs = carry\n rng, step_rng = jax.random.split(rng)\n\n # Mask current and future frames (i.e., t >= step_t)\n mask = jnp.arange(seq_len) >= step_t # (S,)\n mask = jnp.broadcast_to(mask[None, :, None], (B, seq_len, N)) # (B, S, N)\n mask = mask.astype(bool)\n masked_token_idxs = current_token_idxs * ~mask\n\n # --- Initialize and run MaskGIT loop ---\n init_carry_maskgit = (\n step_rng,\n masked_token_idxs,\n mask,\n action_tokens,\n )\n final_carry_maskgit, _ = loop_fn(init_carry_maskgit, jnp.arange(steps))\n updated_token_idxs = final_carry_maskgit[1]\n new_carry = (rng, updated_token_idxs)\n return new_carry, None\n\n # --- Run the autoregressive generation using scan ---\n initial_carry = (batch[""rng""], token_idxs)\n timesteps_to_scan = jnp.arange(T, seq_len)\n final_carry, _ = jax.lax.scan(\n generation_step_fn,\n initial_carry,\n timesteps_to_scan\n )\n final_token_idxs = final_carry[1]",python,selection_command +941,2098625,"genie.py",2856,3319," @nn.compact\n def sample(\n self,\n batch: Dict[str, Any],\n seq_len: int,\n steps: int = 25,\n temperature: float = 1,\n sample_argmax: bool = False,\n ) -> Any:\n """"""\n Autoregressively samples up to `seq_len` future frames, following Figure 8 of the paper.\n\n - Input frames are tokenized once.\n - Future frames are generated autoregressively in token space.\n - All frames are detokenized in a single pass.\n\n Note:\n - For interactive or step-wise sampling, detokenization should occur after each action.\n - To maintain consistent tensor shapes across timesteps, all current and future frames are decoded at every step.\n - Temporal causal structure is preserved by \n a) reapplying the mask before each decoding step.\n b) a temporal causal mask is applied within each ST-transformer block.\n\n Dimension keys:\n B: batch size \n T: number of input (conditioning) frames \n N: patches per frame \n S: sequence length \n A: action space \n D: model latent dimension\n """"""\n # --- Encode videos and actions ---\n tokenizer_out = self.tokenizer.vq_encode(batch[""videos""], training=False)\n token_idxs = tokenizer_out[""indices""] # (B, T, N)\n B, T, N = token_idxs.shape\n pad_shape = (B, seq_len - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1) # (B, S, N)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n\n MaskGITLoop = nn.scan(\n MaskGITStep,\n variable_broadcast=""params"",\n split_rngs={""params"": False},\n in_axes=0,\n out_axes=0,\n length=steps,\n )\n \n loop_fn = MaskGITLoop(\n dynamics=self.dynamics,\n tokenizer=self.tokenizer,\n temperature=temperature,\n sample_argmax=sample_argmax,\n steps=steps,\n )\n\n def generation_step_fn(carry, step_t):\n rng, current_token_idxs = carry\n rng, step_rng = jax.random.split(rng)\n\n # Mask current and future frames (i.e., t >= step_t)\n mask = jnp.arange(seq_len) >= step_t # (S,)\n mask = jnp.broadcast_to(mask[None, :, None], (B, seq_len, N)) # (B, S, N)\n mask = mask.astype(bool)\n masked_token_idxs = current_token_idxs * ~mask\n\n # --- Initialize and run MaskGIT loop ---\n init_carry_maskgit = (\n step_rng,\n masked_token_idxs,\n mask,\n action_tokens,\n )\n final_carry_maskgit, _ = loop_fn(init_carry_maskgit, jnp.arange(steps))\n updated_token_idxs = final_carry_maskgit[1]\n new_carry = (rng, updated_token_idxs)\n return new_carry, None\n\n # --- Run the autoregressive generation using scan ---\n initial_carry = (batch[""rng""], token_idxs)\n timesteps_to_scan = jnp.arange(T, seq_len)\n final_carry, _ = jax.lax.scan(\n generation_step_fn,\n initial_carry,\n timesteps_to_scan\n )\n final_token_idxs = final_carry[1]\n",python,selection_command +942,2098774,"genie.py",2856,3374," @nn.compact\n def sample(\n self,\n batch: Dict[str, Any],\n seq_len: int,\n steps: int = 25,\n temperature: float = 1,\n sample_argmax: bool = False,\n ) -> Any:\n """"""\n Autoregressively samples up to `seq_len` future frames, following Figure 8 of the paper.\n\n - Input frames are tokenized once.\n - Future frames are generated autoregressively in token space.\n - All frames are detokenized in a single pass.\n\n Note:\n - For interactive or step-wise sampling, detokenization should occur after each action.\n - To maintain consistent tensor shapes across timesteps, all current and future frames are decoded at every step.\n - Temporal causal structure is preserved by \n a) reapplying the mask before each decoding step.\n b) a temporal causal mask is applied within each ST-transformer block.\n\n Dimension keys:\n B: batch size \n T: number of input (conditioning) frames \n N: patches per frame \n S: sequence length \n A: action space \n D: model latent dimension\n """"""\n # --- Encode videos and actions ---\n tokenizer_out = self.tokenizer.vq_encode(batch[""videos""], training=False)\n token_idxs = tokenizer_out[""indices""] # (B, T, N)\n B, T, N = token_idxs.shape\n pad_shape = (B, seq_len - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1) # (B, S, N)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n\n MaskGITLoop = nn.scan(\n MaskGITStep,\n variable_broadcast=""params"",\n split_rngs={""params"": False},\n in_axes=0,\n out_axes=0,\n length=steps,\n )\n \n loop_fn = MaskGITLoop(\n dynamics=self.dynamics,\n tokenizer=self.tokenizer,\n temperature=temperature,\n sample_argmax=sample_argmax,\n steps=steps,\n )\n\n def generation_step_fn(carry, step_t):\n rng, current_token_idxs = carry\n rng, step_rng = jax.random.split(rng)\n\n # Mask current and future frames (i.e., t >= step_t)\n mask = jnp.arange(seq_len) >= step_t # (S,)\n mask = jnp.broadcast_to(mask[None, :, None], (B, seq_len, N)) # (B, S, N)\n mask = mask.astype(bool)\n masked_token_idxs = current_token_idxs * ~mask\n\n # --- Initialize and run MaskGIT loop ---\n init_carry_maskgit = (\n step_rng,\n masked_token_idxs,\n mask,\n action_tokens,\n )\n final_carry_maskgit, _ = loop_fn(init_carry_maskgit, jnp.arange(steps))\n updated_token_idxs = final_carry_maskgit[1]\n new_carry = (rng, updated_token_idxs)\n return new_carry, None\n\n # --- Run the autoregressive generation using scan ---\n initial_carry = (batch[""rng""], token_idxs)\n timesteps_to_scan = jnp.arange(T, seq_len)\n final_carry, _ = jax.lax.scan(\n generation_step_fn,\n initial_carry,\n timesteps_to_scan\n )\n final_token_idxs = final_carry[1]\n\n # --- Decode all tokens at once at the end ---",python,selection_command +943,2098927,"genie.py",2856,3420," @nn.compact\n def sample(\n self,\n batch: Dict[str, Any],\n seq_len: int,\n steps: int = 25,\n temperature: float = 1,\n sample_argmax: bool = False,\n ) -> Any:\n """"""\n Autoregressively samples up to `seq_len` future frames, following Figure 8 of the paper.\n\n - Input frames are tokenized once.\n - Future frames are generated autoregressively in token space.\n - All frames are detokenized in a single pass.\n\n Note:\n - For interactive or step-wise sampling, detokenization should occur after each action.\n - To maintain consistent tensor shapes across timesteps, all current and future frames are decoded at every step.\n - Temporal causal structure is preserved by \n a) reapplying the mask before each decoding step.\n b) a temporal causal mask is applied within each ST-transformer block.\n\n Dimension keys:\n B: batch size \n T: number of input (conditioning) frames \n N: patches per frame \n S: sequence length \n A: action space \n D: model latent dimension\n """"""\n # --- Encode videos and actions ---\n tokenizer_out = self.tokenizer.vq_encode(batch[""videos""], training=False)\n token_idxs = tokenizer_out[""indices""] # (B, T, N)\n B, T, N = token_idxs.shape\n pad_shape = (B, seq_len - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1) # (B, S, N)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n\n MaskGITLoop = nn.scan(\n MaskGITStep,\n variable_broadcast=""params"",\n split_rngs={""params"": False},\n in_axes=0,\n out_axes=0,\n length=steps,\n )\n \n loop_fn = MaskGITLoop(\n dynamics=self.dynamics,\n tokenizer=self.tokenizer,\n temperature=temperature,\n sample_argmax=sample_argmax,\n steps=steps,\n )\n\n def generation_step_fn(carry, step_t):\n rng, current_token_idxs = carry\n rng, step_rng = jax.random.split(rng)\n\n # Mask current and future frames (i.e., t >= step_t)\n mask = jnp.arange(seq_len) >= step_t # (S,)\n mask = jnp.broadcast_to(mask[None, :, None], (B, seq_len, N)) # (B, S, N)\n mask = mask.astype(bool)\n masked_token_idxs = current_token_idxs * ~mask\n\n # --- Initialize and run MaskGIT loop ---\n init_carry_maskgit = (\n step_rng,\n masked_token_idxs,\n mask,\n action_tokens,\n )\n final_carry_maskgit, _ = loop_fn(init_carry_maskgit, jnp.arange(steps))\n updated_token_idxs = final_carry_maskgit[1]\n new_carry = (rng, updated_token_idxs)\n return new_carry, None\n\n # --- Run the autoregressive generation using scan ---\n initial_carry = (batch[""rng""], token_idxs)\n timesteps_to_scan = jnp.arange(T, seq_len)\n final_carry, _ = jax.lax.scan(\n generation_step_fn,\n initial_carry,\n timesteps_to_scan\n )\n final_token_idxs = final_carry[1]\n\n # --- Decode all tokens at once at the end ---\n final_frames = self.tokenizer.decode(",python,selection_command +944,2099085,"genie.py",2856,3450," @nn.compact\n def sample(\n self,\n batch: Dict[str, Any],\n seq_len: int,\n steps: int = 25,\n temperature: float = 1,\n sample_argmax: bool = False,\n ) -> Any:\n """"""\n Autoregressively samples up to `seq_len` future frames, following Figure 8 of the paper.\n\n - Input frames are tokenized once.\n - Future frames are generated autoregressively in token space.\n - All frames are detokenized in a single pass.\n\n Note:\n - For interactive or step-wise sampling, detokenization should occur after each action.\n - To maintain consistent tensor shapes across timesteps, all current and future frames are decoded at every step.\n - Temporal causal structure is preserved by \n a) reapplying the mask before each decoding step.\n b) a temporal causal mask is applied within each ST-transformer block.\n\n Dimension keys:\n B: batch size \n T: number of input (conditioning) frames \n N: patches per frame \n S: sequence length \n A: action space \n D: model latent dimension\n """"""\n # --- Encode videos and actions ---\n tokenizer_out = self.tokenizer.vq_encode(batch[""videos""], training=False)\n token_idxs = tokenizer_out[""indices""] # (B, T, N)\n B, T, N = token_idxs.shape\n pad_shape = (B, seq_len - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1) # (B, S, N)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n\n MaskGITLoop = nn.scan(\n MaskGITStep,\n variable_broadcast=""params"",\n split_rngs={""params"": False},\n in_axes=0,\n out_axes=0,\n length=steps,\n )\n \n loop_fn = MaskGITLoop(\n dynamics=self.dynamics,\n tokenizer=self.tokenizer,\n temperature=temperature,\n sample_argmax=sample_argmax,\n steps=steps,\n )\n\n def generation_step_fn(carry, step_t):\n rng, current_token_idxs = carry\n rng, step_rng = jax.random.split(rng)\n\n # Mask current and future frames (i.e., t >= step_t)\n mask = jnp.arange(seq_len) >= step_t # (S,)\n mask = jnp.broadcast_to(mask[None, :, None], (B, seq_len, N)) # (B, S, N)\n mask = mask.astype(bool)\n masked_token_idxs = current_token_idxs * ~mask\n\n # --- Initialize and run MaskGIT loop ---\n init_carry_maskgit = (\n step_rng,\n masked_token_idxs,\n mask,\n action_tokens,\n )\n final_carry_maskgit, _ = loop_fn(init_carry_maskgit, jnp.arange(steps))\n updated_token_idxs = final_carry_maskgit[1]\n new_carry = (rng, updated_token_idxs)\n return new_carry, None\n\n # --- Run the autoregressive generation using scan ---\n initial_carry = (batch[""rng""], token_idxs)\n timesteps_to_scan = jnp.arange(T, seq_len)\n final_carry, _ = jax.lax.scan(\n generation_step_fn,\n initial_carry,\n timesteps_to_scan\n )\n final_token_idxs = final_carry[1]\n\n # --- Decode all tokens at once at the end ---\n final_frames = self.tokenizer.decode(\n final_token_idxs,",python,selection_command +945,2099269,"genie.py",2856,3499," @nn.compact\n def sample(\n self,\n batch: Dict[str, Any],\n seq_len: int,\n steps: int = 25,\n temperature: float = 1,\n sample_argmax: bool = False,\n ) -> Any:\n """"""\n Autoregressively samples up to `seq_len` future frames, following Figure 8 of the paper.\n\n - Input frames are tokenized once.\n - Future frames are generated autoregressively in token space.\n - All frames are detokenized in a single pass.\n\n Note:\n - For interactive or step-wise sampling, detokenization should occur after each action.\n - To maintain consistent tensor shapes across timesteps, all current and future frames are decoded at every step.\n - Temporal causal structure is preserved by \n a) reapplying the mask before each decoding step.\n b) a temporal causal mask is applied within each ST-transformer block.\n\n Dimension keys:\n B: batch size \n T: number of input (conditioning) frames \n N: patches per frame \n S: sequence length \n A: action space \n D: model latent dimension\n """"""\n # --- Encode videos and actions ---\n tokenizer_out = self.tokenizer.vq_encode(batch[""videos""], training=False)\n token_idxs = tokenizer_out[""indices""] # (B, T, N)\n B, T, N = token_idxs.shape\n pad_shape = (B, seq_len - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1) # (B, S, N)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n\n MaskGITLoop = nn.scan(\n MaskGITStep,\n variable_broadcast=""params"",\n split_rngs={""params"": False},\n in_axes=0,\n out_axes=0,\n length=steps,\n )\n \n loop_fn = MaskGITLoop(\n dynamics=self.dynamics,\n tokenizer=self.tokenizer,\n temperature=temperature,\n sample_argmax=sample_argmax,\n steps=steps,\n )\n\n def generation_step_fn(carry, step_t):\n rng, current_token_idxs = carry\n rng, step_rng = jax.random.split(rng)\n\n # Mask current and future frames (i.e., t >= step_t)\n mask = jnp.arange(seq_len) >= step_t # (S,)\n mask = jnp.broadcast_to(mask[None, :, None], (B, seq_len, N)) # (B, S, N)\n mask = mask.astype(bool)\n masked_token_idxs = current_token_idxs * ~mask\n\n # --- Initialize and run MaskGIT loop ---\n init_carry_maskgit = (\n step_rng,\n masked_token_idxs,\n mask,\n action_tokens,\n )\n final_carry_maskgit, _ = loop_fn(init_carry_maskgit, jnp.arange(steps))\n updated_token_idxs = final_carry_maskgit[1]\n new_carry = (rng, updated_token_idxs)\n return new_carry, None\n\n # --- Run the autoregressive generation using scan ---\n initial_carry = (batch[""rng""], token_idxs)\n timesteps_to_scan = jnp.arange(T, seq_len)\n final_carry, _ = jax.lax.scan(\n generation_step_fn,\n initial_carry,\n timesteps_to_scan\n )\n final_token_idxs = final_carry[1]\n\n # --- Decode all tokens at once at the end ---\n final_frames = self.tokenizer.decode(\n final_token_idxs,\n video_hw=batch[""videos""].shape[2:4],",python,selection_command +946,2099386,"genie.py",2856,3509," @nn.compact\n def sample(\n self,\n batch: Dict[str, Any],\n seq_len: int,\n steps: int = 25,\n temperature: float = 1,\n sample_argmax: bool = False,\n ) -> Any:\n """"""\n Autoregressively samples up to `seq_len` future frames, following Figure 8 of the paper.\n\n - Input frames are tokenized once.\n - Future frames are generated autoregressively in token space.\n - All frames are detokenized in a single pass.\n\n Note:\n - For interactive or step-wise sampling, detokenization should occur after each action.\n - To maintain consistent tensor shapes across timesteps, all current and future frames are decoded at every step.\n - Temporal causal structure is preserved by \n a) reapplying the mask before each decoding step.\n b) a temporal causal mask is applied within each ST-transformer block.\n\n Dimension keys:\n B: batch size \n T: number of input (conditioning) frames \n N: patches per frame \n S: sequence length \n A: action space \n D: model latent dimension\n """"""\n # --- Encode videos and actions ---\n tokenizer_out = self.tokenizer.vq_encode(batch[""videos""], training=False)\n token_idxs = tokenizer_out[""indices""] # (B, T, N)\n B, T, N = token_idxs.shape\n pad_shape = (B, seq_len - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1) # (B, S, N)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n\n MaskGITLoop = nn.scan(\n MaskGITStep,\n variable_broadcast=""params"",\n split_rngs={""params"": False},\n in_axes=0,\n out_axes=0,\n length=steps,\n )\n \n loop_fn = MaskGITLoop(\n dynamics=self.dynamics,\n tokenizer=self.tokenizer,\n temperature=temperature,\n sample_argmax=sample_argmax,\n steps=steps,\n )\n\n def generation_step_fn(carry, step_t):\n rng, current_token_idxs = carry\n rng, step_rng = jax.random.split(rng)\n\n # Mask current and future frames (i.e., t >= step_t)\n mask = jnp.arange(seq_len) >= step_t # (S,)\n mask = jnp.broadcast_to(mask[None, :, None], (B, seq_len, N)) # (B, S, N)\n mask = mask.astype(bool)\n masked_token_idxs = current_token_idxs * ~mask\n\n # --- Initialize and run MaskGIT loop ---\n init_carry_maskgit = (\n step_rng,\n masked_token_idxs,\n mask,\n action_tokens,\n )\n final_carry_maskgit, _ = loop_fn(init_carry_maskgit, jnp.arange(steps))\n updated_token_idxs = final_carry_maskgit[1]\n new_carry = (rng, updated_token_idxs)\n return new_carry, None\n\n # --- Run the autoregressive generation using scan ---\n initial_carry = (batch[""rng""], token_idxs)\n timesteps_to_scan = jnp.arange(T, seq_len)\n final_carry, _ = jax.lax.scan(\n generation_step_fn,\n initial_carry,\n timesteps_to_scan\n )\n final_token_idxs = final_carry[1]\n\n # --- Decode all tokens at once at the end ---\n final_frames = self.tokenizer.decode(\n final_token_idxs,\n video_hw=batch[""videos""].shape[2:4],\n )",python,selection_command +947,2099527,"genie.py",2856,3537," @nn.compact\n def sample(\n self,\n batch: Dict[str, Any],\n seq_len: int,\n steps: int = 25,\n temperature: float = 1,\n sample_argmax: bool = False,\n ) -> Any:\n """"""\n Autoregressively samples up to `seq_len` future frames, following Figure 8 of the paper.\n\n - Input frames are tokenized once.\n - Future frames are generated autoregressively in token space.\n - All frames are detokenized in a single pass.\n\n Note:\n - For interactive or step-wise sampling, detokenization should occur after each action.\n - To maintain consistent tensor shapes across timesteps, all current and future frames are decoded at every step.\n - Temporal causal structure is preserved by \n a) reapplying the mask before each decoding step.\n b) a temporal causal mask is applied within each ST-transformer block.\n\n Dimension keys:\n B: batch size \n T: number of input (conditioning) frames \n N: patches per frame \n S: sequence length \n A: action space \n D: model latent dimension\n """"""\n # --- Encode videos and actions ---\n tokenizer_out = self.tokenizer.vq_encode(batch[""videos""], training=False)\n token_idxs = tokenizer_out[""indices""] # (B, T, N)\n B, T, N = token_idxs.shape\n pad_shape = (B, seq_len - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1) # (B, S, N)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n\n MaskGITLoop = nn.scan(\n MaskGITStep,\n variable_broadcast=""params"",\n split_rngs={""params"": False},\n in_axes=0,\n out_axes=0,\n length=steps,\n )\n \n loop_fn = MaskGITLoop(\n dynamics=self.dynamics,\n tokenizer=self.tokenizer,\n temperature=temperature,\n sample_argmax=sample_argmax,\n steps=steps,\n )\n\n def generation_step_fn(carry, step_t):\n rng, current_token_idxs = carry\n rng, step_rng = jax.random.split(rng)\n\n # Mask current and future frames (i.e., t >= step_t)\n mask = jnp.arange(seq_len) >= step_t # (S,)\n mask = jnp.broadcast_to(mask[None, :, None], (B, seq_len, N)) # (B, S, N)\n mask = mask.astype(bool)\n masked_token_idxs = current_token_idxs * ~mask\n\n # --- Initialize and run MaskGIT loop ---\n init_carry_maskgit = (\n step_rng,\n masked_token_idxs,\n mask,\n action_tokens,\n )\n final_carry_maskgit, _ = loop_fn(init_carry_maskgit, jnp.arange(steps))\n updated_token_idxs = final_carry_maskgit[1]\n new_carry = (rng, updated_token_idxs)\n return new_carry, None\n\n # --- Run the autoregressive generation using scan ---\n initial_carry = (batch[""rng""], token_idxs)\n timesteps_to_scan = jnp.arange(T, seq_len)\n final_carry, _ = jax.lax.scan(\n generation_step_fn,\n initial_carry,\n timesteps_to_scan\n )\n final_token_idxs = final_carry[1]\n\n # --- Decode all tokens at once at the end ---\n final_frames = self.tokenizer.decode(\n final_token_idxs,\n video_hw=batch[""videos""].shape[2:4],\n )\n return final_frames",python,selection_command +948,2099973,"genie.py",2856,3538," @nn.compact\n def sample(\n self,\n batch: Dict[str, Any],\n seq_len: int,\n steps: int = 25,\n temperature: float = 1,\n sample_argmax: bool = False,\n ) -> Any:\n """"""\n Autoregressively samples up to `seq_len` future frames, following Figure 8 of the paper.\n\n - Input frames are tokenized once.\n - Future frames are generated autoregressively in token space.\n - All frames are detokenized in a single pass.\n\n Note:\n - For interactive or step-wise sampling, detokenization should occur after each action.\n - To maintain consistent tensor shapes across timesteps, all current and future frames are decoded at every step.\n - Temporal causal structure is preserved by \n a) reapplying the mask before each decoding step.\n b) a temporal causal mask is applied within each ST-transformer block.\n\n Dimension keys:\n B: batch size \n T: number of input (conditioning) frames \n N: patches per frame \n S: sequence length \n A: action space \n D: model latent dimension\n """"""\n # --- Encode videos and actions ---\n tokenizer_out = self.tokenizer.vq_encode(batch[""videos""], training=False)\n token_idxs = tokenizer_out[""indices""] # (B, T, N)\n B, T, N = token_idxs.shape\n pad_shape = (B, seq_len - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1) # (B, S, N)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n\n MaskGITLoop = nn.scan(\n MaskGITStep,\n variable_broadcast=""params"",\n split_rngs={""params"": False},\n in_axes=0,\n out_axes=0,\n length=steps,\n )\n \n loop_fn = MaskGITLoop(\n dynamics=self.dynamics,\n tokenizer=self.tokenizer,\n temperature=temperature,\n sample_argmax=sample_argmax,\n steps=steps,\n )\n\n def generation_step_fn(carry, step_t):\n rng, current_token_idxs = carry\n rng, step_rng = jax.random.split(rng)\n\n # Mask current and future frames (i.e., t >= step_t)\n mask = jnp.arange(seq_len) >= step_t # (S,)\n mask = jnp.broadcast_to(mask[None, :, None], (B, seq_len, N)) # (B, S, N)\n mask = mask.astype(bool)\n masked_token_idxs = current_token_idxs * ~mask\n\n # --- Initialize and run MaskGIT loop ---\n init_carry_maskgit = (\n step_rng,\n masked_token_idxs,\n mask,\n action_tokens,\n )\n final_carry_maskgit, _ = loop_fn(init_carry_maskgit, jnp.arange(steps))\n updated_token_idxs = final_carry_maskgit[1]\n new_carry = (rng, updated_token_idxs)\n return new_carry, None\n\n # --- Run the autoregressive generation using scan ---\n initial_carry = (batch[""rng""], token_idxs)\n timesteps_to_scan = jnp.arange(T, seq_len)\n final_carry, _ = jax.lax.scan(\n generation_step_fn,\n initial_carry,\n timesteps_to_scan\n )\n final_token_idxs = final_carry[1]\n\n # --- Decode all tokens at once at the end ---\n final_frames = self.tokenizer.decode(\n final_token_idxs,\n video_hw=batch[""videos""].shape[2:4],\n )\n return final_frames\n",python,selection_command +949,2100364,"genie.py",2856,3537," @nn.compact\n def sample(\n self,\n batch: Dict[str, Any],\n seq_len: int,\n steps: int = 25,\n temperature: float = 1,\n sample_argmax: bool = False,\n ) -> Any:\n """"""\n Autoregressively samples up to `seq_len` future frames, following Figure 8 of the paper.\n\n - Input frames are tokenized once.\n - Future frames are generated autoregressively in token space.\n - All frames are detokenized in a single pass.\n\n Note:\n - For interactive or step-wise sampling, detokenization should occur after each action.\n - To maintain consistent tensor shapes across timesteps, all current and future frames are decoded at every step.\n - Temporal causal structure is preserved by \n a) reapplying the mask before each decoding step.\n b) a temporal causal mask is applied within each ST-transformer block.\n\n Dimension keys:\n B: batch size \n T: number of input (conditioning) frames \n N: patches per frame \n S: sequence length \n A: action space \n D: model latent dimension\n """"""\n # --- Encode videos and actions ---\n tokenizer_out = self.tokenizer.vq_encode(batch[""videos""], training=False)\n token_idxs = tokenizer_out[""indices""] # (B, T, N)\n B, T, N = token_idxs.shape\n pad_shape = (B, seq_len - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1) # (B, S, N)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n\n MaskGITLoop = nn.scan(\n MaskGITStep,\n variable_broadcast=""params"",\n split_rngs={""params"": False},\n in_axes=0,\n out_axes=0,\n length=steps,\n )\n \n loop_fn = MaskGITLoop(\n dynamics=self.dynamics,\n tokenizer=self.tokenizer,\n temperature=temperature,\n sample_argmax=sample_argmax,\n steps=steps,\n )\n\n def generation_step_fn(carry, step_t):\n rng, current_token_idxs = carry\n rng, step_rng = jax.random.split(rng)\n\n # Mask current and future frames (i.e., t >= step_t)\n mask = jnp.arange(seq_len) >= step_t # (S,)\n mask = jnp.broadcast_to(mask[None, :, None], (B, seq_len, N)) # (B, S, N)\n mask = mask.astype(bool)\n masked_token_idxs = current_token_idxs * ~mask\n\n # --- Initialize and run MaskGIT loop ---\n init_carry_maskgit = (\n step_rng,\n masked_token_idxs,\n mask,\n action_tokens,\n )\n final_carry_maskgit, _ = loop_fn(init_carry_maskgit, jnp.arange(steps))\n updated_token_idxs = final_carry_maskgit[1]\n new_carry = (rng, updated_token_idxs)\n return new_carry, None\n\n # --- Run the autoregressive generation using scan ---\n initial_carry = (batch[""rng""], token_idxs)\n timesteps_to_scan = jnp.arange(T, seq_len)\n final_carry, _ = jax.lax.scan(\n generation_step_fn,\n initial_carry,\n timesteps_to_scan\n )\n final_token_idxs = final_carry[1]\n\n # --- Decode all tokens at once at the end ---\n final_frames = self.tokenizer.decode(\n final_token_idxs,\n video_hw=batch[""videos""].shape[2:4],\n )\n return final_frames",python,selection_command +950,2101330,"genie.py",2856,0,"",python,selection_command +951,2105454,"/home/hk-project-p0023960/tum_cte0515/Projects/tmp/jafar/genie.py",0,0,"",python,tab +952,2105455,"/home/hk-project-p0023960/tum_cte0515/Projects/tmp/jafar/genie.py",4242,0,"",python,selection_mouse +953,2105603,"/home/hk-project-p0023960/tum_cte0515/Projects/tmp/jafar/genie.py",2743,1499," def sample(\n self,\n batch: Dict[str, Any],\n steps: int = 25,\n temperature: int = 1,\n sample_argmax: bool = False,\n ) -> Any:\n # --- Encode videos and actions ---\n tokenizer_out = self.tokenizer.vq_encode(batch[""videos""], training=False)\n token_idxs = tokenizer_out[""indices""]\n new_frame_idxs = jnp.zeros_like(token_idxs)[:, 0]\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n\n # --- Initialize MaskGIT ---\n init_mask = jnp.ones_like(token_idxs, dtype=bool)[:, 0]\n init_carry = (\n batch[""rng""],\n new_frame_idxs,\n init_mask,\n token_idxs,\n action_tokens,\n )\n MaskGITLoop = nn.scan(\n MaskGITStep,\n variable_broadcast=""params"",\n split_rngs={""params"": False},\n in_axes=0,\n out_axes=0,\n length=steps,\n )\n\n # --- Run MaskGIT loop ---\n loop_fn = MaskGITLoop(\n dynamics=self.dynamics,\n tokenizer=self.tokenizer,\n temperature=temperature,\n sample_argmax=sample_argmax,\n steps=steps,\n )\n final_carry, _ = loop_fn(init_carry, jnp.arange(steps))\n new_frame_idxs = final_carry[1]\n new_frame_pixels = self.tokenizer.decode(\n jnp.expand_dims(new_frame_idxs, 1),\n video_hw=batch[""videos""].shape[2:4],\n )\n return new_frame_pixels\n",python,selection_mouse +954,2105604,"/home/hk-project-p0023960/tum_cte0515/Projects/tmp/jafar/genie.py",2742,1500," def sample(\n self,\n batch: Dict[str, Any],\n steps: int = 25,\n temperature: int = 1,\n sample_argmax: bool = False,\n ) -> Any:\n # --- Encode videos and actions ---\n tokenizer_out = self.tokenizer.vq_encode(batch[""videos""], training=False)\n token_idxs = tokenizer_out[""indices""]\n new_frame_idxs = jnp.zeros_like(token_idxs)[:, 0]\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n\n # --- Initialize MaskGIT ---\n init_mask = jnp.ones_like(token_idxs, dtype=bool)[:, 0]\n init_carry = (\n batch[""rng""],\n new_frame_idxs,\n init_mask,\n token_idxs,\n action_tokens,\n )\n MaskGITLoop = nn.scan(\n MaskGITStep,\n variable_broadcast=""params"",\n split_rngs={""params"": False},\n in_axes=0,\n out_axes=0,\n length=steps,\n )\n\n # --- Run MaskGIT loop ---\n loop_fn = MaskGITLoop(\n dynamics=self.dynamics,\n tokenizer=self.tokenizer,\n temperature=temperature,\n sample_argmax=sample_argmax,\n steps=steps,\n )\n final_carry, _ = loop_fn(init_carry, jnp.arange(steps))\n new_frame_idxs = final_carry[1]\n new_frame_pixels = self.tokenizer.decode(\n jnp.expand_dims(new_frame_idxs, 1),\n video_hw=batch[""videos""].shape[2:4],\n )\n return new_frame_pixels\n",python,selection_mouse +955,2105606,"/home/hk-project-p0023960/tum_cte0515/Projects/tmp/jafar/genie.py",2743,1499," def sample(\n self,\n batch: Dict[str, Any],\n steps: int = 25,\n temperature: int = 1,\n sample_argmax: bool = False,\n ) -> Any:\n # --- Encode videos and actions ---\n tokenizer_out = self.tokenizer.vq_encode(batch[""videos""], training=False)\n token_idxs = tokenizer_out[""indices""]\n new_frame_idxs = jnp.zeros_like(token_idxs)[:, 0]\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n\n # --- Initialize MaskGIT ---\n init_mask = jnp.ones_like(token_idxs, dtype=bool)[:, 0]\n init_carry = (\n batch[""rng""],\n new_frame_idxs,\n init_mask,\n token_idxs,\n action_tokens,\n )\n MaskGITLoop = nn.scan(\n MaskGITStep,\n variable_broadcast=""params"",\n split_rngs={""params"": False},\n in_axes=0,\n out_axes=0,\n length=steps,\n )\n\n # --- Run MaskGIT loop ---\n loop_fn = MaskGITLoop(\n dynamics=self.dynamics,\n tokenizer=self.tokenizer,\n temperature=temperature,\n sample_argmax=sample_argmax,\n steps=steps,\n )\n final_carry, _ = loop_fn(init_carry, jnp.arange(steps))\n new_frame_idxs = final_carry[1]\n new_frame_pixels = self.tokenizer.decode(\n jnp.expand_dims(new_frame_idxs, 1),\n video_hw=batch[""videos""].shape[2:4],\n )\n return new_frame_pixels\n",python,selection_command +956,2105657,"/home/hk-project-p0023960/tum_cte0515/Projects/tmp/jafar/genie.py",2742,1500," def sample(\n self,\n batch: Dict[str, Any],\n steps: int = 25,\n temperature: int = 1,\n sample_argmax: bool = False,\n ) -> Any:\n # --- Encode videos and actions ---\n tokenizer_out = self.tokenizer.vq_encode(batch[""videos""], training=False)\n token_idxs = tokenizer_out[""indices""]\n new_frame_idxs = jnp.zeros_like(token_idxs)[:, 0]\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n\n # --- Initialize MaskGIT ---\n init_mask = jnp.ones_like(token_idxs, dtype=bool)[:, 0]\n init_carry = (\n batch[""rng""],\n new_frame_idxs,\n init_mask,\n token_idxs,\n action_tokens,\n )\n MaskGITLoop = nn.scan(\n MaskGITStep,\n variable_broadcast=""params"",\n split_rngs={""params"": False},\n in_axes=0,\n out_axes=0,\n length=steps,\n )\n\n # --- Run MaskGIT loop ---\n loop_fn = MaskGITLoop(\n dynamics=self.dynamics,\n tokenizer=self.tokenizer,\n temperature=temperature,\n sample_argmax=sample_argmax,\n steps=steps,\n )\n final_carry, _ = loop_fn(init_carry, jnp.arange(steps))\n new_frame_idxs = final_carry[1]\n new_frame_pixels = self.tokenizer.decode(\n jnp.expand_dims(new_frame_idxs, 1),\n video_hw=batch[""videos""].shape[2:4],\n )\n return new_frame_pixels\n",python,selection_mouse +957,2105733,"/home/hk-project-p0023960/tum_cte0515/Projects/tmp/jafar/genie.py",2726,1516," @nn.compact\n def sample(\n self,\n batch: Dict[str, Any],\n steps: int = 25,\n temperature: int = 1,\n sample_argmax: bool = False,\n ) -> Any:\n # --- Encode videos and actions ---\n tokenizer_out = self.tokenizer.vq_encode(batch[""videos""], training=False)\n token_idxs = tokenizer_out[""indices""]\n new_frame_idxs = jnp.zeros_like(token_idxs)[:, 0]\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n\n # --- Initialize MaskGIT ---\n init_mask = jnp.ones_like(token_idxs, dtype=bool)[:, 0]\n init_carry = (\n batch[""rng""],\n new_frame_idxs,\n init_mask,\n token_idxs,\n action_tokens,\n )\n MaskGITLoop = nn.scan(\n MaskGITStep,\n variable_broadcast=""params"",\n split_rngs={""params"": False},\n in_axes=0,\n out_axes=0,\n length=steps,\n )\n\n # --- Run MaskGIT loop ---\n loop_fn = MaskGITLoop(\n dynamics=self.dynamics,\n tokenizer=self.tokenizer,\n temperature=temperature,\n sample_argmax=sample_argmax,\n steps=steps,\n )\n final_carry, _ = loop_fn(init_carry, jnp.arange(steps))\n new_frame_idxs = final_carry[1]\n new_frame_pixels = self.tokenizer.decode(\n jnp.expand_dims(new_frame_idxs, 1),\n video_hw=batch[""videos""].shape[2:4],\n )\n return new_frame_pixels\n",python,selection_mouse +958,2105901,"/home/hk-project-p0023960/tum_cte0515/Projects/tmp/jafar/genie.py",2725,1517,"\n @nn.compact\n def sample(\n self,\n batch: Dict[str, Any],\n steps: int = 25,\n temperature: int = 1,\n sample_argmax: bool = False,\n ) -> Any:\n # --- Encode videos and actions ---\n tokenizer_out = self.tokenizer.vq_encode(batch[""videos""], training=False)\n token_idxs = tokenizer_out[""indices""]\n new_frame_idxs = jnp.zeros_like(token_idxs)[:, 0]\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n\n # --- Initialize MaskGIT ---\n init_mask = jnp.ones_like(token_idxs, dtype=bool)[:, 0]\n init_carry = (\n batch[""rng""],\n new_frame_idxs,\n init_mask,\n token_idxs,\n action_tokens,\n )\n MaskGITLoop = nn.scan(\n MaskGITStep,\n variable_broadcast=""params"",\n split_rngs={""params"": False},\n in_axes=0,\n out_axes=0,\n length=steps,\n )\n\n # --- Run MaskGIT loop ---\n loop_fn = MaskGITLoop(\n dynamics=self.dynamics,\n tokenizer=self.tokenizer,\n temperature=temperature,\n sample_argmax=sample_argmax,\n steps=steps,\n )\n final_carry, _ = loop_fn(init_carry, jnp.arange(steps))\n new_frame_idxs = final_carry[1]\n new_frame_pixels = self.tokenizer.decode(\n jnp.expand_dims(new_frame_idxs, 1),\n video_hw=batch[""videos""].shape[2:4],\n )\n return new_frame_pixels\n",python,selection_mouse +959,2107117,"/home/hk-project-p0023960/tum_cte0515/Projects/tmp/jafar/genie.py",2725,1518,"",python,content +960,2111493,"/home/hk-project-p0023960/tum_cte0515/Projects/tmp/jafar/genie.py",2724,0,"\n ",python,content +961,2111546,"/home/hk-project-p0023960/tum_cte0515/Projects/tmp/jafar/genie.py",2729,4,"",python,content +962,2111962,"/home/hk-project-p0023960/tum_cte0515/Projects/tmp/jafar/genie.py",2729,0,"\n ",python,content +963,2112133,"/home/hk-project-p0023960/tum_cte0515/Projects/tmp/jafar/genie.py",2734,0,"\n ",python,content +964,2112133,"/home/hk-project-p0023960/tum_cte0515/Projects/tmp/jafar/genie.py",2730,4,"",python,content +965,2112400,"/home/hk-project-p0023960/tum_cte0515/Projects/tmp/jafar/genie.py",2730,0,"",python,selection_command +966,2113154,"/home/hk-project-p0023960/tum_cte0515/Projects/tmp/jafar/genie.py",2731,4,"",python,content +967,2113155,"/home/hk-project-p0023960/tum_cte0515/Projects/tmp/jafar/genie.py",2730,0,"def _sampling_wrapper(module, batch):\n return module.sample(batch, args.seq_len, args.maskgit_steps, args.temperature, args.sample_argmax)\n\n# --- Define autoregressive sampling loop ---\ndef _autoreg_sample(rng, video_batch, action_batch):\n vid = video_batch[:, : args.start_frame + 1]\n sampling_fn = jax.jit(nn.apply(_sampling_wrapper, genie)) \n rng, _rng = jax.random.split(rng)\n batch = dict(videos=vid, latent_actions=action_batch, rng=_rng)\n generated_vid = sampling_fn(\n params,\n batch\n )\n return generated_vid",python,content +968,2115028,"/home/hk-project-p0023960/tum_cte0515/Projects/tmp/jafar/genie.py",3284,0," ",python,content +969,2115028,"/home/hk-project-p0023960/tum_cte0515/Projects/tmp/jafar/genie.py",2730,553,"",python,content +970,2116504,"/home/hk-project-p0023960/tum_cte0515/Projects/tmp/jafar/genie.py",2730,0,"\n @nn.compact\n def sample(\n self,\n batch: Dict[str, Any],\n steps: int = 25,\n temperature: int = 1,\n sample_argmax: bool = False,\n ) -> Any:\n # --- Encode videos and actions ---\n tokenizer_out = self.tokenizer.vq_encode(batch[""videos""], training=False)\n token_idxs = tokenizer_out[""indices""]\n new_frame_idxs = jnp.zeros_like(token_idxs)[:, 0]\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n\n # --- Initialize MaskGIT ---\n init_mask = jnp.ones_like(token_idxs, dtype=bool)[:, 0]\n init_carry = (\n batch[""rng""],\n new_frame_idxs,\n init_mask,\n token_idxs,\n action_tokens,\n )\n MaskGITLoop = nn.scan(\n MaskGITStep,\n variable_broadcast=""params"",\n split_rngs={""params"": False},\n in_axes=0,\n out_axes=0,\n length=steps,\n )\n\n # --- Run MaskGIT loop ---\n loop_fn = MaskGITLoop(\n dynamics=self.dynamics,\n tokenizer=self.tokenizer,\n temperature=temperature,\n sample_argmax=sample_argmax,\n steps=steps,\n )\n final_carry, _ = loop_fn(init_carry, jnp.arange(steps))\n new_frame_idxs = final_carry[1]\n new_frame_pixels = self.tokenizer.decode(\n jnp.expand_dims(new_frame_idxs, 1),\n video_hw=batch[""videos""].shape[2:4],\n )\n return new_frame_pixels\n\n",python,content +971,2116572,"/home/hk-project-p0023960/tum_cte0515/Projects/tmp/jafar/genie.py",2730,0,"",python,selection_command +972,2119162,"/home/hk-project-p0023960/tum_cte0515/Projects/tmp/jafar/genie.py",4248,0,"",python,selection_mouse +973,2119966,"/home/hk-project-p0023960/tum_cte0515/Projects/tmp/jafar/genie.py",4248,6,"",python,content +974,2120029,"/home/hk-project-p0023960/tum_cte0515/Projects/tmp/jafar/genie.py",4252,0,"",python,selection_command +975,2120272,"/home/hk-project-p0023960/tum_cte0515/Projects/tmp/jafar/genie.py",4247,0,"",python,selection_command +976,2124081,"/home/hk-project-p0023960/tum_cte0515/Projects/tmp/jafar/genie.py",6841,0,"",python,selection_mouse +977,2124280,"/home/hk-project-p0023960/tum_cte0515/Projects/tmp/jafar/genie.py",6840,1,"\n",python,selection_mouse +978,2124322,"/home/hk-project-p0023960/tum_cte0515/Projects/tmp/jafar/genie.py",4459,2382,"class MaskGITStep(nn.Module):\n dynamics: nn.Module\n tokenizer: nn.Module\n temperature: float\n sample_argmax: bool\n steps: int\n\n @nn.compact\n def __call__(self, carry, x):\n rng, final_token_idxs, mask, token_idxs, action_tokens = carry\n step = x\n B, T, N = token_idxs.shape[:3]\n\n # --- Construct + encode video ---\n vid_token_idxs = jnp.concatenate(\n (token_idxs, jnp.expand_dims(final_token_idxs, 1)), axis=1\n )\n vid_embed = self.dynamics.patch_embed(vid_token_idxs)\n curr_masked_frame = jnp.where(\n jnp.expand_dims(mask, -1),\n self.dynamics.mask_token[0],\n vid_embed[:, -1],\n )\n vid_embed = vid_embed.at[:, -1].set(curr_masked_frame)\n\n # --- Predict transition ---\n act_embed = self.dynamics.action_up(action_tokens)\n vid_embed += jnp.pad(act_embed, ((0, 0), (1, 0), (0, 0), (0, 0)))\n unmasked_ratio = jnp.cos(jnp.pi * (step + 1) / (self.steps * 2))\n step_temp = self.temperature * (1.0 - unmasked_ratio)\n final_logits = self.dynamics.dynamics(vid_embed)[:, -1] / step_temp\n\n # --- Sample new tokens for final frame ---\n if self.sample_argmax:\n sampled_token_idxs = jnp.argmax(final_logits, axis=-1)\n else:\n rng, _rng = jax.random.split(rng)\n sampled_token_idxs = jnp.where(\n step == self.steps - 1,\n jnp.argmax(final_logits, axis=-1),\n jax.random.categorical(_rng, final_logits),\n )\n gather_fn = jax.vmap(jax.vmap(lambda x, y: x[y]))\n final_token_probs = gather_fn(jax.nn.softmax(final_logits), sampled_token_idxs)\n final_token_probs += ~mask\n # Update masked tokens only\n new_token_idxs = jnp.where(mask, sampled_token_idxs, final_token_idxs)\n\n # --- Update mask ---\n num_unmasked_tokens = jnp.round(N * (1.0 - unmasked_ratio)).astype(int)\n idx_mask = jnp.arange(final_token_probs.shape[-1]) > num_unmasked_tokens\n sorted_idxs = jnp.argsort(final_token_probs, axis=-1, descending=True)\n mask_update_fn = jax.vmap(lambda msk, ids: msk.at[ids].set(idx_mask))\n new_mask = mask_update_fn(mask, sorted_idxs)\n\n new_carry = (rng, new_token_idxs, new_mask, token_idxs, action_tokens)\n return new_carry, None\n\n",python,selection_mouse +979,2124507,"/home/hk-project-p0023960/tum_cte0515/Projects/tmp/jafar/genie.py",4458,2383,"\nclass MaskGITStep(nn.Module):\n dynamics: nn.Module\n tokenizer: nn.Module\n temperature: float\n sample_argmax: bool\n steps: int\n\n @nn.compact\n def __call__(self, carry, x):\n rng, final_token_idxs, mask, token_idxs, action_tokens = carry\n step = x\n B, T, N = token_idxs.shape[:3]\n\n # --- Construct + encode video ---\n vid_token_idxs = jnp.concatenate(\n (token_idxs, jnp.expand_dims(final_token_idxs, 1)), axis=1\n )\n vid_embed = self.dynamics.patch_embed(vid_token_idxs)\n curr_masked_frame = jnp.where(\n jnp.expand_dims(mask, -1),\n self.dynamics.mask_token[0],\n vid_embed[:, -1],\n )\n vid_embed = vid_embed.at[:, -1].set(curr_masked_frame)\n\n # --- Predict transition ---\n act_embed = self.dynamics.action_up(action_tokens)\n vid_embed += jnp.pad(act_embed, ((0, 0), (1, 0), (0, 0), (0, 0)))\n unmasked_ratio = jnp.cos(jnp.pi * (step + 1) / (self.steps * 2))\n step_temp = self.temperature * (1.0 - unmasked_ratio)\n final_logits = self.dynamics.dynamics(vid_embed)[:, -1] / step_temp\n\n # --- Sample new tokens for final frame ---\n if self.sample_argmax:\n sampled_token_idxs = jnp.argmax(final_logits, axis=-1)\n else:\n rng, _rng = jax.random.split(rng)\n sampled_token_idxs = jnp.where(\n step == self.steps - 1,\n jnp.argmax(final_logits, axis=-1),\n jax.random.categorical(_rng, final_logits),\n )\n gather_fn = jax.vmap(jax.vmap(lambda x, y: x[y]))\n final_token_probs = gather_fn(jax.nn.softmax(final_logits), sampled_token_idxs)\n final_token_probs += ~mask\n # Update masked tokens only\n new_token_idxs = jnp.where(mask, sampled_token_idxs, final_token_idxs)\n\n # --- Update mask ---\n num_unmasked_tokens = jnp.round(N * (1.0 - unmasked_ratio)).astype(int)\n idx_mask = jnp.arange(final_token_probs.shape[-1]) > num_unmasked_tokens\n sorted_idxs = jnp.argsort(final_token_probs, axis=-1, descending=True)\n mask_update_fn = jax.vmap(lambda msk, ids: msk.at[ids].set(idx_mask))\n new_mask = mask_update_fn(mask, sorted_idxs)\n\n new_carry = (rng, new_token_idxs, new_mask, token_idxs, action_tokens)\n return new_carry, None\n\n",python,selection_mouse +980,2130129,"/home/hk-project-p0023960/tum_cte0515/Projects/tmp/jafar/genie.py",6840,0,"",python,selection_mouse +981,2130724,"/home/hk-project-p0023960/tum_cte0515/Projects/tmp/jafar/genie.py",4460,2380,"lass MaskGITStep(nn.Module):\n dynamics: nn.Module\n tokenizer: nn.Module\n temperature: float\n sample_argmax: bool\n steps: int\n\n @nn.compact\n def __call__(self, carry, x):\n rng, final_token_idxs, mask, token_idxs, action_tokens = carry\n step = x\n B, T, N = token_idxs.shape[:3]\n\n # --- Construct + encode video ---\n vid_token_idxs = jnp.concatenate(\n (token_idxs, jnp.expand_dims(final_token_idxs, 1)), axis=1\n )\n vid_embed = self.dynamics.patch_embed(vid_token_idxs)\n curr_masked_frame = jnp.where(\n jnp.expand_dims(mask, -1),\n self.dynamics.mask_token[0],\n vid_embed[:, -1],\n )\n vid_embed = vid_embed.at[:, -1].set(curr_masked_frame)\n\n # --- Predict transition ---\n act_embed = self.dynamics.action_up(action_tokens)\n vid_embed += jnp.pad(act_embed, ((0, 0), (1, 0), (0, 0), (0, 0)))\n unmasked_ratio = jnp.cos(jnp.pi * (step + 1) / (self.steps * 2))\n step_temp = self.temperature * (1.0 - unmasked_ratio)\n final_logits = self.dynamics.dynamics(vid_embed)[:, -1] / step_temp\n\n # --- Sample new tokens for final frame ---\n if self.sample_argmax:\n sampled_token_idxs = jnp.argmax(final_logits, axis=-1)\n else:\n rng, _rng = jax.random.split(rng)\n sampled_token_idxs = jnp.where(\n step == self.steps - 1,\n jnp.argmax(final_logits, axis=-1),\n jax.random.categorical(_rng, final_logits),\n )\n gather_fn = jax.vmap(jax.vmap(lambda x, y: x[y]))\n final_token_probs = gather_fn(jax.nn.softmax(final_logits), sampled_token_idxs)\n final_token_probs += ~mask\n # Update masked tokens only\n new_token_idxs = jnp.where(mask, sampled_token_idxs, final_token_idxs)\n\n # --- Update mask ---\n num_unmasked_tokens = jnp.round(N * (1.0 - unmasked_ratio)).astype(int)\n idx_mask = jnp.arange(final_token_probs.shape[-1]) > num_unmasked_tokens\n sorted_idxs = jnp.argsort(final_token_probs, axis=-1, descending=True)\n mask_update_fn = jax.vmap(lambda msk, ids: msk.at[ids].set(idx_mask))\n new_mask = mask_update_fn(mask, sorted_idxs)\n\n new_carry = (rng, new_token_idxs, new_mask, token_idxs, action_tokens)\n return new_carry, None\n",python,selection_mouse +982,2130757,"/home/hk-project-p0023960/tum_cte0515/Projects/tmp/jafar/genie.py",4459,2381,"class MaskGITStep(nn.Module):\n dynamics: nn.Module\n tokenizer: nn.Module\n temperature: float\n sample_argmax: bool\n steps: int\n\n @nn.compact\n def __call__(self, carry, x):\n rng, final_token_idxs, mask, token_idxs, action_tokens = carry\n step = x\n B, T, N = token_idxs.shape[:3]\n\n # --- Construct + encode video ---\n vid_token_idxs = jnp.concatenate(\n (token_idxs, jnp.expand_dims(final_token_idxs, 1)), axis=1\n )\n vid_embed = self.dynamics.patch_embed(vid_token_idxs)\n curr_masked_frame = jnp.where(\n jnp.expand_dims(mask, -1),\n self.dynamics.mask_token[0],\n vid_embed[:, -1],\n )\n vid_embed = vid_embed.at[:, -1].set(curr_masked_frame)\n\n # --- Predict transition ---\n act_embed = self.dynamics.action_up(action_tokens)\n vid_embed += jnp.pad(act_embed, ((0, 0), (1, 0), (0, 0), (0, 0)))\n unmasked_ratio = jnp.cos(jnp.pi * (step + 1) / (self.steps * 2))\n step_temp = self.temperature * (1.0 - unmasked_ratio)\n final_logits = self.dynamics.dynamics(vid_embed)[:, -1] / step_temp\n\n # --- Sample new tokens for final frame ---\n if self.sample_argmax:\n sampled_token_idxs = jnp.argmax(final_logits, axis=-1)\n else:\n rng, _rng = jax.random.split(rng)\n sampled_token_idxs = jnp.where(\n step == self.steps - 1,\n jnp.argmax(final_logits, axis=-1),\n jax.random.categorical(_rng, final_logits),\n )\n gather_fn = jax.vmap(jax.vmap(lambda x, y: x[y]))\n final_token_probs = gather_fn(jax.nn.softmax(final_logits), sampled_token_idxs)\n final_token_probs += ~mask\n # Update masked tokens only\n new_token_idxs = jnp.where(mask, sampled_token_idxs, final_token_idxs)\n\n # --- Update mask ---\n num_unmasked_tokens = jnp.round(N * (1.0 - unmasked_ratio)).astype(int)\n idx_mask = jnp.arange(final_token_probs.shape[-1]) > num_unmasked_tokens\n sorted_idxs = jnp.argsort(final_token_probs, axis=-1, descending=True)\n mask_update_fn = jax.vmap(lambda msk, ids: msk.at[ids].set(idx_mask))\n new_mask = mask_update_fn(mask, sorted_idxs)\n\n new_carry = (rng, new_token_idxs, new_mask, token_idxs, action_tokens)\n return new_carry, None\n",python,selection_mouse +983,2130968,"/home/hk-project-p0023960/tum_cte0515/Projects/tmp/jafar/genie.py",4458,2382,"\nclass MaskGITStep(nn.Module):\n dynamics: nn.Module\n tokenizer: nn.Module\n temperature: float\n sample_argmax: bool\n steps: int\n\n @nn.compact\n def __call__(self, carry, x):\n rng, final_token_idxs, mask, token_idxs, action_tokens = carry\n step = x\n B, T, N = token_idxs.shape[:3]\n\n # --- Construct + encode video ---\n vid_token_idxs = jnp.concatenate(\n (token_idxs, jnp.expand_dims(final_token_idxs, 1)), axis=1\n )\n vid_embed = self.dynamics.patch_embed(vid_token_idxs)\n curr_masked_frame = jnp.where(\n jnp.expand_dims(mask, -1),\n self.dynamics.mask_token[0],\n vid_embed[:, -1],\n )\n vid_embed = vid_embed.at[:, -1].set(curr_masked_frame)\n\n # --- Predict transition ---\n act_embed = self.dynamics.action_up(action_tokens)\n vid_embed += jnp.pad(act_embed, ((0, 0), (1, 0), (0, 0), (0, 0)))\n unmasked_ratio = jnp.cos(jnp.pi * (step + 1) / (self.steps * 2))\n step_temp = self.temperature * (1.0 - unmasked_ratio)\n final_logits = self.dynamics.dynamics(vid_embed)[:, -1] / step_temp\n\n # --- Sample new tokens for final frame ---\n if self.sample_argmax:\n sampled_token_idxs = jnp.argmax(final_logits, axis=-1)\n else:\n rng, _rng = jax.random.split(rng)\n sampled_token_idxs = jnp.where(\n step == self.steps - 1,\n jnp.argmax(final_logits, axis=-1),\n jax.random.categorical(_rng, final_logits),\n )\n gather_fn = jax.vmap(jax.vmap(lambda x, y: x[y]))\n final_token_probs = gather_fn(jax.nn.softmax(final_logits), sampled_token_idxs)\n final_token_probs += ~mask\n # Update masked tokens only\n new_token_idxs = jnp.where(mask, sampled_token_idxs, final_token_idxs)\n\n # --- Update mask ---\n num_unmasked_tokens = jnp.round(N * (1.0 - unmasked_ratio)).astype(int)\n idx_mask = jnp.arange(final_token_probs.shape[-1]) > num_unmasked_tokens\n sorted_idxs = jnp.argsort(final_token_probs, axis=-1, descending=True)\n mask_update_fn = jax.vmap(lambda msk, ids: msk.at[ids].set(idx_mask))\n new_mask = mask_update_fn(mask, sorted_idxs)\n\n new_carry = (rng, new_token_idxs, new_mask, token_idxs, action_tokens)\n return new_carry, None\n",python,selection_mouse +984,2132178,"/home/hk-project-p0023960/tum_cte0515/Projects/tmp/jafar/genie.py",4458,2383,"",python,content +985,2138178,"genie.py",0,0,"",python,tab +986,2138181,"genie.py",6604,0,"",python,selection_mouse +987,2138346,"genie.py",6604,1,"\n",python,selection_mouse +988,2138346,"genie.py",6604,2,"\n\n",python,selection_mouse +989,2138347,"genie.py",6604,2188,"\n\nclass MaskGITStep(nn.Module):\n dynamics: nn.Module\n tokenizer: nn.Module\n temperature: float\n sample_argmax: bool\n steps: int\n\n @nn.compact\n def __call__(self, carry, x):\n rng, token_idxs, mask, action_tokens = carry\n step = x\n N = token_idxs.shape[2]\n\n # --- Construct + encode video ---\n vid_embed = self.dynamics.patch_embed(token_idxs) # (B, S, N, D)\n mask_token = self.dynamics.mask_token # (1, 1, 1, D,)\n mask_expanded = mask[..., None] # (B, S, N, 1) \n vid_embed = jnp.where(mask_expanded, mask_token, vid_embed)\n\n # --- Predict transition ---\n act_embed = self.dynamics.action_up(action_tokens)\n vid_embed += jnp.pad(act_embed, ((0, 0), (1, 0), (0, 0), (0, 0)))\n unmasked_ratio = jnp.cos(jnp.pi * (step + 1) / (self.steps * 2))\n step_temp = self.temperature * (1.0 - unmasked_ratio)\n final_logits = self.dynamics.dynamics(vid_embed) / step_temp\n\n # --- Sample new tokens for final frame ---\n if self.sample_argmax:\n sampled_token_idxs = jnp.argmax(final_logits, axis=-1)\n else:\n rng, _rng = jax.random.split(rng)\n sampled_token_idxs = jnp.where(\n step == self.steps - 1,\n jnp.argmax(final_logits, axis=-1),\n jax.random.categorical(_rng, final_logits),\n )\n gather_fn = jax.vmap(jax.vmap(jax.vmap(lambda x, y: x[y])))\n final_token_probs = gather_fn(jax.nn.softmax(final_logits), sampled_token_idxs)\n final_token_probs += ~mask\n # Update masked tokens only\n token_idxs = jnp.where(mask, sampled_token_idxs, token_idxs)\n\n # --- Update mask ---\n num_unmasked_tokens = jnp.round(N * (1.0 - unmasked_ratio)).astype(int)\n idx_mask = jnp.arange(final_token_probs.shape[-1]) > num_unmasked_tokens\n sorted_idxs = jnp.argsort(final_token_probs, axis=-1, descending=True)\n mask_update_fn = jax.vmap(lambda msk, ids: msk.at[ids].set(idx_mask))\n new_mask = mask_update_fn(mask, sorted_idxs)\n\n new_carry = (rng, token_idxs, new_mask, action_tokens)\n return new_carry, None\n",python,selection_mouse +990,2138349,"genie.py",6604,1,"\n",python,selection_command +991,2138413,"genie.py",6604,2188,"\n\nclass MaskGITStep(nn.Module):\n dynamics: nn.Module\n tokenizer: nn.Module\n temperature: float\n sample_argmax: bool\n steps: int\n\n @nn.compact\n def __call__(self, carry, x):\n rng, token_idxs, mask, action_tokens = carry\n step = x\n N = token_idxs.shape[2]\n\n # --- Construct + encode video ---\n vid_embed = self.dynamics.patch_embed(token_idxs) # (B, S, N, D)\n mask_token = self.dynamics.mask_token # (1, 1, 1, D,)\n mask_expanded = mask[..., None] # (B, S, N, 1) \n vid_embed = jnp.where(mask_expanded, mask_token, vid_embed)\n\n # --- Predict transition ---\n act_embed = self.dynamics.action_up(action_tokens)\n vid_embed += jnp.pad(act_embed, ((0, 0), (1, 0), (0, 0), (0, 0)))\n unmasked_ratio = jnp.cos(jnp.pi * (step + 1) / (self.steps * 2))\n step_temp = self.temperature * (1.0 - unmasked_ratio)\n final_logits = self.dynamics.dynamics(vid_embed) / step_temp\n\n # --- Sample new tokens for final frame ---\n if self.sample_argmax:\n sampled_token_idxs = jnp.argmax(final_logits, axis=-1)\n else:\n rng, _rng = jax.random.split(rng)\n sampled_token_idxs = jnp.where(\n step == self.steps - 1,\n jnp.argmax(final_logits, axis=-1),\n jax.random.categorical(_rng, final_logits),\n )\n gather_fn = jax.vmap(jax.vmap(jax.vmap(lambda x, y: x[y])))\n final_token_probs = gather_fn(jax.nn.softmax(final_logits), sampled_token_idxs)\n final_token_probs += ~mask\n # Update masked tokens only\n token_idxs = jnp.where(mask, sampled_token_idxs, token_idxs)\n\n # --- Update mask ---\n num_unmasked_tokens = jnp.round(N * (1.0 - unmasked_ratio)).astype(int)\n idx_mask = jnp.arange(final_token_probs.shape[-1]) > num_unmasked_tokens\n sorted_idxs = jnp.argsort(final_token_probs, axis=-1, descending=True)\n mask_update_fn = jax.vmap(lambda msk, ids: msk.at[ids].set(idx_mask))\n new_mask = mask_update_fn(mask, sorted_idxs)\n\n new_carry = (rng, token_idxs, new_mask, action_tokens)\n return new_carry, None\n",python,selection_mouse +992,2141988,"/home/hk-project-p0023960/tum_cte0515/Projects/tmp/jafar/genie.py",0,0,"",python,tab +993,2143322,"/home/hk-project-p0023960/tum_cte0515/Projects/tmp/jafar/genie.py",4458,0,"\n\nclass MaskGITStep(nn.Module):\n dynamics: nn.Module\n tokenizer: nn.Module\n temperature: float\n sample_argmax: bool\n steps: int\n\n @nn.compact\n def __call__(self, carry, x):\n rng, token_idxs, mask, action_tokens = carry\n step = x\n N = token_idxs.shape[2]\n\n # --- Construct + encode video ---\n vid_embed = self.dynamics.patch_embed(token_idxs) # (B, S, N, D)\n mask_token = self.dynamics.mask_token # (1, 1, 1, D,)\n mask_expanded = mask[..., None] # (B, S, N, 1) \n vid_embed = jnp.where(mask_expanded, mask_token, vid_embed)\n\n # --- Predict transition ---\n act_embed = self.dynamics.action_up(action_tokens)\n vid_embed += jnp.pad(act_embed, ((0, 0), (1, 0), (0, 0), (0, 0)))\n unmasked_ratio = jnp.cos(jnp.pi * (step + 1) / (self.steps * 2))\n step_temp = self.temperature * (1.0 - unmasked_ratio)\n final_logits = self.dynamics.dynamics(vid_embed) / step_temp\n\n # --- Sample new tokens for final frame ---\n if self.sample_argmax:\n sampled_token_idxs = jnp.argmax(final_logits, axis=-1)\n else:\n rng, _rng = jax.random.split(rng)\n sampled_token_idxs = jnp.where(\n step == self.steps - 1,\n jnp.argmax(final_logits, axis=-1),\n jax.random.categorical(_rng, final_logits),\n )\n gather_fn = jax.vmap(jax.vmap(jax.vmap(lambda x, y: x[y])))\n final_token_probs = gather_fn(jax.nn.softmax(final_logits), sampled_token_idxs)\n final_token_probs += ~mask\n # Update masked tokens only\n token_idxs = jnp.where(mask, sampled_token_idxs, token_idxs)\n\n # --- Update mask ---\n num_unmasked_tokens = jnp.round(N * (1.0 - unmasked_ratio)).astype(int)\n idx_mask = jnp.arange(final_token_probs.shape[-1]) > num_unmasked_tokens\n sorted_idxs = jnp.argsort(final_token_probs, axis=-1, descending=True)\n mask_update_fn = jax.vmap(lambda msk, ids: msk.at[ids].set(idx_mask))\n new_mask = mask_update_fn(mask, sorted_idxs)\n\n new_carry = (rng, token_idxs, new_mask, action_tokens)\n return new_carry, None\n",python,content +994,2145531,"/home/hk-project-p0023960/tum_cte0515/Projects/tmp/jafar/genie.py",4458,0,"",python,selection_mouse +995,2146442,"/home/hk-project-p0023960/tum_cte0515/Projects/tmp/jafar/genie.py",4457,1,"",python,content +996,2146936,"/home/hk-project-p0023960/tum_cte0515/Projects/tmp/jafar/genie.py",4456,1,"",python,content +997,2147440,"/home/hk-project-p0023960/tum_cte0515/Projects/tmp/jafar/genie.py",4455,0,"",python,selection_command +998,2150391,"/home/hk-project-p0023960/tum_cte0515/Projects/tmp/jafar/genie.py",4307,0,"",python,selection_command +999,2202864,"/home/hk-project-p0023960/tum_cte0515/Projects/tmp/jafar/sample.py",0,0,"",python,tab +1000,2206365,"/home/hk-project-p0023960/tum_cte0515/Projects/tmp/jafar/sample.py",317,0,"",python,selection_mouse +1001,2206499,"/home/hk-project-p0023960/tum_cte0515/Projects/tmp/jafar/sample.py",307,14,"get_dataloader",python,selection_mouse +1002,2248363,"TERMINAL",0,0,"bash",,terminal_focus +1003,2249658,"TERMINAL",0,0,"bash",,terminal_focus +1004,2252486,"TERMINAL",0,0,"pwd",,terminal_command +1005,2252527,"TERMINAL",0,0,"]633;E;2025-07-07 20:04:13 pwd;7d45707f-8bba-4662-a344-aba14e13e9ae]633;C/home/hk-project-p0023960/tum_cte0515/Projects/tmp/jafar\r\n]0;tum_cte0515@hkn1991:~/Projects/tmp/jafar]633;D;0",,terminal_output +1006,2256704,"TERMINAL",0,0,"cursor .",,terminal_command +1007,2256753,"TERMINAL",0,0,"]633;E;2025-07-07 20:04:17 cursor .;7d45707f-8bba-4662-a344-aba14e13e9ae]633;C",,terminal_output +1008,2257110,"TERMINAL",0,0,"]0;tum_cte0515@hkn1991:~/Projects/tmp/jafar]633;D;0",,terminal_output +1009,3862593,"/home/hk-project-p0023960/tum_cte0515/Projects/tmp/jafar/sample.py",0,0,"",python,tab +1010,3862595,"/home/hk-project-p0023960/tum_cte0515/Projects/tmp/jafar/sample.py",639,0,"",python,selection_mouse +1011,3867774,"TERMINAL",0,0,"bash",,terminal_focus +1012,3869106,"TERMINAL",0,0,"bash",,terminal_focus +1013,3872678,"TERMINAL",0,0,"pwd",,terminal_command +1014,3872722,"TERMINAL",0,0,"]633;E;2025-07-07 20:31:13 pwd;7d45707f-8bba-4662-a344-aba14e13e9ae]633;C/home/hk-project-p0023960/tum_cte0515/Projects/tmp/jafar\r\n]0;tum_cte0515@hkn1991:~/Projects/tmp/jafar]633;D;0",,terminal_output +1015,3874253,"TERMINAL",0,0,"cd ..",,terminal_command +1016,3874298,"TERMINAL",0,0,"]633;E;2025-07-07 20:31:14 cd ..;7d45707f-8bba-4662-a344-aba14e13e9ae]633;C]0;tum_cte0515@hkn1991:~/Projects/tmp]633;D;0",,terminal_output +1017,3875778,"TERMINAL",0,0,"cd ..",,terminal_command +1018,3875788,"TERMINAL",0,0,"]633;E;2025-07-07 20:31:16 cd ..;7d45707f-8bba-4662-a344-aba14e13e9ae]633;C]0;tum_cte0515@hkn1991:~/Projects]633;D;0",,terminal_output +1019,3877035,"TERMINAL",0,0,"cd jafar",,terminal_command +1020,3878418,"TERMINAL",0,0,"pwd",,terminal_command +1021,3878470,"TERMINAL",0,0,"]633;E;2025-07-07 20:31:18 pwd;7d45707f-8bba-4662-a344-aba14e13e9ae]633;C/home/hk-project-p0023960/tum_cte0515/Projects/jafar\r\n]0;tum_cte0515@hkn1991:~/Projects/jafar]633;D;0]633;P;Cwd=/home/hk-project-p0023960/tum_cte0515/Projects/jafar",,terminal_output +1022,3885992,"TERMINAL",0,0,"mv *.gif gifs",,terminal_command +1023,3886066,"TERMINAL",0,0,"]633;E;2025-07-07 20:31:26 mv *.gif gifs;7d45707f-8bba-4662-a344-aba14e13e9ae]633;C]0;tum_cte0515@hkn1991:~/Projects/jafar]633;D;0]633;P;Cwd=/home/hk-project-p0023960/tum_cte0515/Projects/jafar",,terminal_output +1024,3961875,"TERMINAL",0,0,"bash",,terminal_focus +1025,3964498,"TERMINAL",0,0,"bash",,terminal_focus +1026,3968405,"sample.py",0,0,"",python,tab +1027,3970244,"sample.py",3231,0,"",python,selection_mouse +1028,3970262,"sample.py",3230,0,"",python,selection_command +1029,3971010,"sample.py",2890,0,"",python,selection_mouse +1030,3973975,"sample.py",4682,0,"",python,selection_mouse +1031,3973978,"sample.py",4681,0,"",python,selection_command +1032,3974615,"sample.py",4647,0,"",python,selection_mouse +1033,3975847,"sample.py",4661,0,"",python,selection_mouse +1034,4051369,"sample.py",5020,0,"",python,selection_mouse diff --git a/927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-9ab67804-e8b6-44ba-9ee4-ddec1e42461f1757968391960-2025_09_15-22.33.57.229/source.csv b/927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-9ab67804-e8b6-44ba-9ee4-ddec1e42461f1757968391960-2025_09_15-22.33.57.229/source.csv new file mode 100644 index 0000000000000000000000000000000000000000..8ceb506cf27c1af93fdff629376cf332fec1bac7 --- /dev/null +++ b/927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-9ab67804-e8b6-44ba-9ee4-ddec1e42461f1757968391960-2025_09_15-22.33.57.229/source.csv @@ -0,0 +1,37 @@ +Sequence,Time,File,RangeOffset,RangeLength,Text,Language,Type +2,1209,"extension-output-pdoom-org.crowd-code-#1-crowd-code",0,0,"10:33:57 PM [info] Activating crowd-code\n10:33:57 PM [info] Recording started\n10:33:57 PM [info] Initializing git provider using file system watchers...\n10:33:57 PM [info] Git repository found\n10:33:57 PM [info] Git provider initialized successfully\n10:33:57 PM [info] Initial git state: [object Object]\n",Log,tab +3,2243,"TERMINAL",0,0,"bash",,terminal_focus +4,10241,"TERMINAL",0,0,"git status",,terminal_command +5,10311,"TERMINAL",0,0,"]633;COn branch val-loss\r\nYour branch is up to date with 'origin/val-loss'.\r\n\r\nChanges not staged for commit:\r\n (use ""git add ..."" to update what will be committed)\r\n (use ""git restore ..."" to discard changes in working directory)\r\n\tmodified: models/dynamics.py\r\n\r\nUntracked files:\r\n (use ""git add ..."" to include in what will be committed)\r\n\tdata/\r\n\tdiff.diff\r\n\tkiller.sh\r\n\tkiller_partition.sh\r\n\tlog.log\r\n\tlogs/\r\n\toverfit_dir.zip\r\n\trequirements-franz.txt\r\n\tsamples/\r\n\tscripts_cremers/\r\n\tslurm/\r\n\tutils/visualizer.py\r\n\r\nno changes added to commit (use ""git add"" and/or ""git commit -a"")\r\n]0;tum_cte0515@hkn1993:~/Projects/jasmine",,terminal_output +6,14147,"TERMINAL",0,0,"git diff",,terminal_command +7,14189,"TERMINAL",0,0,"]633;C[?1h=\rdiff --git a/models/dynamics.py b/models/dynamics.py\r\nindex 74fde10..1ad90cf 100644\r\n--- a/models/dynamics.py\r\n+++ b/models/dynamics.py\r\n@@ -72,9 +72,14 @@ class DynamicsMaskGIT(nnx.Module):\r\n )\r\n \r\n def __call__(\r\n- self, batch: Dict[str, jax.Array], training: bool = True, pred_full_frame: bool = False,\r\n+ self,\r\n+ batch: Dict[str, jax.Array],\r\n+ training: bool = True,\r\n+ pred_full_frame: bool = False,\r\n ) -> tuple[jax.Array, jax.Array | None]:\r\n- assert not (training and pred_full_frame), ""Cannot evaluate full frame prediction during training.""\r\n+ assert not (\r\n+ training and pred_full_frame\r\n+ ), ""Cannot evaluate full frame prediction during training.""\r\n # --- Mask videos ---\r\n video_tokens_BTN = batch[""video_tokens""]\r\n latent_actions_BTm11L = batch[""latent_actions""]\r\n@@ -170,9 +175,14 @@ class DynamicsCausal(nnx.Module):\r\n )\r\n \r\n def __call__(\r\n- self, batch: Dict[str, jax.Array], training: bool = True, pred_full_frame: bool = False,\r\n+ self,\r\n+ batch: Dict[str, jax.Array],\r\n+ training: bool = True,\r\n+ pred_full_frame: bool = False,\r\n ) -> tuple[jax.Array, jax.Array | None]:\r\n:",,terminal_output +8,15345,"TERMINAL",0,0,"",,terminal_command +9,16216,"TERMINAL",0,0,"\r- assert not (training and pred_full_frame), ""Cannot evaluate full frame prediction during training.""\r\n:",,terminal_output +10,17045,"TERMINAL",0,0,"\r+ assert not (\r\n:\r+ training and pred_full_frame\r\n:\r+ ), ""Cannot evaluate full frame prediction during training.""\r\n:\r video_tokens_BTN = batch[""video_tokens""]\r\n:\r latent_actions_BTm11L = batch[""latent_actions""]\r\n:\r if pred_full_frame:\r\n:\r@@ -184,16 +194,31 @@ class DynamicsCausal(nnx.Module):\r\n:\r def _pred_full_frame(carry, step_n):\r\n:\r video_tokens_BTN, final_logits_BTNV = carry\r\n:\r # We need to reconstruct submodules inside scan body to prevent trace context mismatches\r\n:\r- patch_embed = nnx.Embed(self.num_latents, self.model_dim, rngs=nnx.Rngs(0))\r\n:\r+ patch_embed = nnx.Embed(\r\n:\r+ self.num_latents, self.model_dim, rngs=nnx.Rngs(0)\r\n:",,terminal_output +11,17400,"TERMINAL",0,0,"\r+ )\r\n:\r nnx.update(patch_embed, patch_embed_state)\r\n:\r action_up = nnx.Linear(\r\n:\r- self.latent_action_dim, self.model_dim, param_dtype=self.param_dtype, dtype=self.dtype, rngs=nnx.Rn :\rgs(0)\r\n:\r+ self.latent_action_dim,\r\n:\r+ self.model_dim,\r\n:\r+ param_dtype=self.param_dtype,\r\n:\r+ dtype=self.dtype,\r\n:\r+ rngs=nnx.Rngs(0),\r\n:\r )\r\n:\r nnx.update(action_up, action_up_state)\r\n:",,terminal_output +12,17643,"TERMINAL",0,0,"\r transformer = Transformer(\r\n:\r- self.model_dim, self.model_dim, self.ffn_dim, self.num_latents, self.num_blocks, self.num_heads,\r\n:\r- self.dropout, self.param_dtype, self.dtype, use_flash_attention=self.use_flash_attention,\r\n:\r- decode=self.decode, rngs=nnx.Rngs(0)\r\n:\r+ self.model_dim,\r\n:\r+ self.model_dim,\r\n:\r+ self.ffn_dim,\r\n:\r+ self.num_latents,\r\n:",,terminal_output +13,17762,"TERMINAL",0,0,"\r+ self.num_blocks,\r\n:\r+ self.num_heads,\r\n:",,terminal_output +14,17883,"TERMINAL",0,0,"\r+ self.dropout,\r\n:\r+ self.param_dtype,\r\n:",,terminal_output +15,18897,"TERMINAL",0,0,"\r+ self.dtype,\r\n:",,terminal_output +16,19279,"TERMINAL",0,0,"\r+ use_flash_attention=self.use_flash_attention,\r\n:",,terminal_output +17,19756,"TERMINAL",0,0,"\r+ decode=self.decode,\r\n:\r+ rngs=nnx.Rngs(0),\r\n:",,terminal_output +18,19862,"TERMINAL",0,0,"\r )\r\n:\r nnx.update(transformer, transformer_state)\r\n:\r \r\n:",,terminal_output +19,19966,"TERMINAL",0,0,"\r@@ -207,7 +232,9 @@ class DynamicsCausal(nnx.Module):\r\n:\r )\r\n:\r step_logits_BTNp1V = transformer(vid_embed_BTNp1M)\r\n:",,terminal_output +20,20114,"TERMINAL",0,0,"\r step_logits_BV = step_logits_BTNp1V[:, -1, step_n, :]\r\n:\r- final_logits_BTNV = final_logits_BTNV.at[:, -1, step_n].set(step_logits_BV)\r\n:\r+ final_logits_BTNV = final_logits_BTNV.at[:, -1, step_n].set(\r\n:\r+ step_logits_BV\r\n:\r+ )\r\n:\r sampled_token_idxs_B = jnp.argmax(step_logits_BV, axis=-1)\r\n:",,terminal_output +21,20303,"TERMINAL",0,0,"\r video_tokens_BTN = video_tokens_BTN.at[:, -1, step_n].set(\r\n:\r sampled_token_idxs_B\r\n:\r@@ -216,10 +243,11 @@ class DynamicsCausal(nnx.Module):\r\n:\r \r\n:\r (_, final_logits_BTNV), _ = jax.lax.scan(\r\n:",,terminal_output +22,20421,"TERMINAL",0,0,"\r _pred_full_frame,\r\n:\r- (video_tokens_BTN, jnp.zeros((\r\n:",,terminal_output +23,20492,"TERMINAL",0,0,"\r- **video_tokens_BTN.shape,\r\n:\r- self.num_latents))),\r\n:\r- jnp.arange(video_tokens_BTN.shape[2])\r\n:\r+ (\r\n:",,terminal_output +24,20800,"TERMINAL",0,0,"\r+ video_tokens_BTN,\r\n:\r+ jnp.zeros((*video_tokens_BTN.shape, self.num_latents)),\r\n:\r+ ),\r\n:\r+ jnp.arange(video_tokens_BTN.shape[2]),\r\n:\r )\r\n:\r mask_out = jnp.zeros_like(video_tokens_BTN)\r\n:\r mask_out = mask_out.at[:, -1].set(True)\r\n:\r\r(END)\r\r(END)\r\r(END)\r\r(END)\r\r(END)",,terminal_output +25,21746,"TERMINAL",0,0,"\r[?1l>]0;tum_cte0515@hkn1993:~/Projects/jasmine",,terminal_output +26,22679,"TERMINAL",0,0,"",,terminal_command +27,31747,"TERMINAL",0,0,"git commit -am ""run pre-commit""",,terminal_command +28,31829,"TERMINAL",0,0,"]633;C",,terminal_output +29,34352,"TERMINAL",0,0,"[INFO] Installing environment for https://github.com/psf/black.\r\n[INFO] Once installed this environment will be reused.\r\n[INFO] This may take a few minutes...\r\n",,terminal_output +30,50819,"TERMINAL",0,0,"black....................................................................",,terminal_output +31,50994,"TERMINAL",0,0,"Passed\r\n",,terminal_output +32,51145,"TERMINAL",0,0,"[val-loss 263a0c0] run pre-commit\r\n 1 file changed, 42 insertions(+), 14 deletions(-)\r\n]0;tum_cte0515@hkn1993:~/Projects/jasmine",,terminal_output +33,60713,"TERMINAL",0,0,"git push",,terminal_command +34,60808,"TERMINAL",0,0,"]633;C",,terminal_output +35,62167,"TERMINAL",0,0,"Enumerating objects: 7, done.\r\nCounting objects: 14% (1/7)\rCounting objects: 28% (2/7)\rCounting objects: 42% (3/7)\rCounting objects: 57% (4/7)\rCounting objects: 71% (5/7)\rCounting objects: 85% (6/7)\rCounting objects: 100% (7/7)\rCounting objects: 100% (7/7), done.\r\nDelta compression using up to 152 threads\r\nCompressing objects: 25% (1/4)\rCompressing objects: 50% (2/4)\rCompressing objects: 75% (3/4)\rCompressing objects: 100% (4/4)\rCompressing objects: 100% (4/4), done.\r\nWriting objects: 25% (1/4)\rWriting objects: 50% (2/4)\rWriting objects: 75% (3/4)\rWriting objects: 100% (4/4)\rWriting objects: 100% (4/4), 714 bytes | 714.00 KiB/s, done.\r\nTotal 4 (delta 2), reused 0 (delta 0), pack-reused 0\r\nremote: Resolving deltas: 0% (0/2)\rremote: Resolving deltas: 50% (1/2)\rremote: Resolving deltas: 100% (2/2)\rremote: Resolving deltas: 100% (2/2), completed with 2 local objects.\r\n",,terminal_output +36,62453,"TERMINAL",0,0,"To github.com:p-doom/jasmine.git\r\n a9f9ec1..263a0c0 val-loss -> val-loss\r\n",,terminal_output +37,62482,"TERMINAL",0,0,"]0;tum_cte0515@hkn1993:~/Projects/jasmine",,terminal_output diff --git a/927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-9eb2e164-5989-4db7-8f31-6e8db1a38df41757236520211-2025_09_07-11.16.00.62/source.csv b/927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-9eb2e164-5989-4db7-8f31-6e8db1a38df41757236520211-2025_09_07-11.16.00.62/source.csv new file mode 100644 index 0000000000000000000000000000000000000000..abae4e0af0e585a75ee9fcb41db3c3b4a292c098 --- /dev/null +++ b/927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-9eb2e164-5989-4db7-8f31-6e8db1a38df41757236520211-2025_09_07-11.16.00.62/source.csv @@ -0,0 +1,5 @@ +Sequence,Time,File,RangeOffset,RangeLength,Text,Language,Type +2,2178,"extension-output-pdoom-org.crowd-code-#1-crowd-code",0,0,"11:16:00 AM [info] Activating crowd-code\n11:16:00 AM [info] Recording started\n11:16:00 AM [info] Initializing git provider using file system watchers...\n11:16:00 AM [info] Git repository found\n11:16:00 AM [info] Git provider initialized successfully\n11:16:01 AM [info] Initial git state: [object Object]\n",Log,tab +3,108194,"TERMINAL",0,0,"bash",,terminal_focus +4,215106,"TERMINAL",0,0,"bash",,terminal_focus +5,218728,"TERMINAL",0,0,"bash",,terminal_focus diff --git a/927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-b150d533-89a6-42d8-b7b7-d5a004d568971759420118221-2025_10_02-17.49.22.758/source.csv b/927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-b150d533-89a6-42d8-b7b7-d5a004d568971759420118221-2025_10_02-17.49.22.758/source.csv new file mode 100644 index 0000000000000000000000000000000000000000..bedb92e8dc997fc9a96ea7e37007edf5ca37d97d --- /dev/null +++ b/927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-b150d533-89a6-42d8-b7b7-d5a004d568971759420118221-2025_10_02-17.49.22.758/source.csv @@ -0,0 +1,5355 @@ +Sequence,Time,File,RangeOffset,RangeLength,Text,Language,Type +2,1993,"extension-output-pdoom-org.crowd-code-#1-crowd-code",0,0,"5:49:22 PM [info] Activating crowd-code\n5:49:22 PM [info] Recording started\n5:49:22 PM [info] Initializing git provider using file system watchers...\n",Log,tab +3,2361,"extension-output-pdoom-org.crowd-code-#1-crowd-code",150,0,"5:49:23 PM [info] Git repository found\n5:49:23 PM [info] Git provider initialized successfully\n5:49:23 PM [info] Initial git state: [object Object]\n",Log,content +4,6249,"TERMINAL",0,0,"git status",,terminal_command +5,6309,"TERMINAL",0,0,"]633;COn branch ablation/use-pytorch-dataloader\r\nYour branch is ahead of 'origin/ablation/use-pytorch-dataloader' by 6 commits.\r\n (use ""git push"" to publish your local commits)\r\n\r\nLast commands done (2 commands done):\r\n pick ba37453 feat: generate coinrun dataset with val split\r\n pick faadd10 feat: implemented validation loss for all three models\r\nNext commands to do (26 remaining commands):\r\n pick 9a17dbb fix: pass val data path to dataloader\r\n pick 6e69cdb fix typo in image logging\r\n (use ""git rebase --edit-todo"" to view and edit)\r\nYou are currently editing a commit while rebasing branch 'gt-actions' on 'c7522f2'.\r\n (use ""git commit --amend"" to amend the current commit)\r\n (use ""git rebase --continue"" once you are satisfied with your changes)\r\n\r\nUntracked files:\r\n (use ""git add ..."" to include in what will be committed)\r\n\tali-old-branch.diff\r\n\tdata/_vizdoom.ini\r\n\tdata/data/\r\n\tdata/jasmine_data/vizdoom/\r\n\tdata/uv.lock\r\n\tdiff.diff\r\n\tdiff2.diff\r\n\tinput_pipeline/\r\n\tkiller.sh\r\n\tkiller_partition.sh\r\n\tlog.log\r\n\tmessage.md\r\n\toverfit_dir.zip\r\n\trequirements-franz.txt\r\n\tsamples/\r\n\tscripts_cremers/\r\n\tslurm/\r\n\ttest.py\r\n\tutils/\r\n\tuv.lock\r\n\r\nnothing added to commit but untracked files present (use ""git add"" to track)\r\n]0;tum_cte0515@hkn1991:~/Projects/jasmine",,terminal_output +6,28381,"TERMINAL",0,0,"git push",,terminal_command +7,28414,"TERMINAL",0,0,"]633;CEnumerating objects: 14, done.\r\nCounting objects: 7% (1/14)\rCounting objects: 14% (2/14)\rCounting objects: 21% (3/14)\rCounting objects: 28% (4/14)\rCounting objects: 35% (5/14)\rCounting objects: 42% (6/14)\rCounting objects: 50% (7/14)\rCounting objects: 57% (8/14)\rCounting objects: 64% (9/14)\rCounting objects: 71% (10/14)\rCounting objects: 78% (11/14)\rCounting objects: 85% (12/14)\rCounting objects: 92% (13/14)\rCounting objects: 100% (14/14)\rCounting objects: 100% (14/14), done.\r\nDelta compression using up to 152 threads\r\nCompressing objects: 12% (1/8)\rCompressing objects: 25% (2/8)\rCompressing objects: 37% (3/8)\rCompressing objects: 50% (4/8)\rCompressing objects: 62% (5/8)\rCompressing objects: 75% (6/8)\rCompressing objects: 87% (7/8)\rCompressing objects: 100% (8/8)\rCompressing objects: 100% (8/8), done.\r\nWriting objects: 12% (1/8)\rWriting objects: 25% (2/8)\rWriting objects: 37% (3/8)\rWriting objects: 50% (4/8)\rWriting objects: 75% (6/8)\rWriting objects: 87% (7/8)\rWriting objects: 100% (8/8)\rWriting objects: 100% (8/8), 861 bytes | 430.00 KiB/s, done.\r\nTotal 8 (delta 6), reused 0 (delta 0), pack-reused 0\r\nremote: Resolving deltas: 0% (0/6)\rremote: Resolving deltas: 16% (1/6)\rremote: Resolving deltas: 33% (2/6)\rremote: Resolving deltas: 50% (3/6)\rremote: Resolving deltas: 66% (4/6)\rremote: Resolving deltas: 83% (5/6)\rremote: Resolving deltas: 100% (6/6)\rremote: Resolving deltas: 100% (6/6), completed with 3 local objects.\r\nTo github.com:p-doom/jasmine.git\r\n 66d0535..982415a ablation/use-pytorch-dataloader -> ablation/use-pytorch-dataloader\r\n]0;tum_cte0515@hkn1991:~/Projects/jasmine",,terminal_output +8,39461,"TERMINAL",0,0,"bash",,terminal_focus +9,40438,"TERMINAL",0,0,"bash",,terminal_focus +10,200386,"slurm/jobs/mihir/horeka/mila-submission/speed-ablations/train_dynamics_base.sh",0,0,"",shellscript,tab +11,231360,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=1\n#SBATCH --time=48:00:00\n#SBATCH --partition=accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:1\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/dynamics/maskgit/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/dynamics/maskgit/%x_%j.log\n#SBATCH --job-name=train_dynamics_maskgit\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_tokenizer_default/3528955\n\nenv | grep SLURM\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=110 \\n --image_height=64 \\n --image_width=64 \\n --patch_size=16 \\n --max_lr=3e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-grain-ablation-$slurm_job_id \\n --tags coinrun dynamics maskgit grain-ablation \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --eval_full_frame \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val &\n\nchild_pid=$!\n\nwait $child_pid",shellscript,tab +12,235007,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",1992,0,"",shellscript,selection_mouse +13,235954,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",1990,0,"",shellscript,selection_mouse +14,236131,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",1988,3,"110",shellscript,selection_mouse +15,237049,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2010,0,"",shellscript,selection_mouse +16,237563,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",1988,0,"",shellscript,selection_mouse +17,237990,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",1984,0,"",shellscript,selection_mouse +18,238131,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",1977,10,"batch_size",shellscript,selection_mouse +19,238576,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",1988,0,"",shellscript,selection_mouse +20,238751,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",1988,3,"110",shellscript,selection_mouse +21,239087,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",1986,0,"",shellscript,selection_mouse +22,239255,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",1977,10,"batch_size",shellscript,selection_mouse +23,239679,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",1989,0,"",shellscript,selection_mouse +24,239830,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",1988,3,"110",shellscript,selection_mouse +25,240213,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",1986,0,"",shellscript,selection_mouse +26,240386,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",1977,10,"batch_size",shellscript,selection_mouse +27,240766,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",1989,0,"",shellscript,selection_mouse +28,240912,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",1988,3,"110",shellscript,selection_mouse +29,241394,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",1986,0,"",shellscript,selection_mouse +30,241546,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",1977,10,"batch_size",shellscript,selection_mouse +31,242001,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",1989,0,"",shellscript,selection_mouse +32,242128,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",1988,3,"110",shellscript,selection_mouse +33,252186,"TERMINAL",0,0,"bash",,terminal_focus +34,254716,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2491,0,"",shellscript,selection_mouse +35,255346,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2489,2,"&\n",shellscript,selection_mouse +36,255410,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2486,5,"al &\n",shellscript,selection_mouse +37,255411,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2484,7,"_val &\n",shellscript,selection_mouse +38,255412,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2481,10,"dir_val &\n",shellscript,selection_mouse +39,255412,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2479,12,"s_dir_val &\n",shellscript,selection_mouse +40,255469,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2477,14,"rds_dir_val &\n",shellscript,selection_mouse +41,255470,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2475,16,"cords_dir_val &\n",shellscript,selection_mouse +42,255470,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2430,61,"ords_dir_train \\n --val_data_dir $array_records_dir_val &\n",shellscript,selection_mouse +43,255502,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2428,63,"ecords_dir_train \\n --val_data_dir $array_records_dir_val &\n",shellscript,selection_mouse +44,255533,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2427,64,"records_dir_train \\n --val_data_dir $array_records_dir_val &\n",shellscript,selection_mouse +45,255565,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2401,90,"e \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val &\n",shellscript,selection_mouse +46,255566,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2400,91,"me \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val &\n",shellscript,selection_mouse +47,255622,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2350,141,"eckpoint=$tokenizer_ckpt_dir \\n --eval_full_frame \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val &\n",shellscript,selection_mouse +48,255653,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2327,164,"ar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --eval_full_frame \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val &\n",shellscript,selection_mouse +49,255656,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2300,191,"tant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --eval_full_frame \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val &\n",shellscript,selection_mouse +50,255684,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2246,245,"run dynamics maskgit grain-ablation \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --eval_full_frame \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val &\n",shellscript,selection_mouse +51,255706,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2140,351,"ckpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-grain-ablation-$slurm_job_id \\n --tags coinrun dynamics maskgit grain-ablation \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --eval_full_frame \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val &\n",shellscript,selection_mouse +52,255764,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2126,365,"\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-grain-ablation-$slurm_job_id \\n --tags coinrun dynamics maskgit grain-ablation \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --eval_full_frame \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val &\n",shellscript,selection_mouse +53,255794,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2094,397,"mage_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-grain-ablation-$slurm_job_id \\n --tags coinrun dynamics maskgit grain-ablation \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --eval_full_frame \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val &\n",shellscript,selection_mouse +54,255831,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2027,464,"ge_width=64 \\n --patch_size=16 \\n --max_lr=3e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-grain-ablation-$slurm_job_id \\n --tags coinrun dynamics maskgit grain-ablation \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --eval_full_frame \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val &\n",shellscript,selection_mouse +55,255864,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2002,489,"age_height=64 \\n --image_width=64 \\n --patch_size=16 \\n --max_lr=3e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-grain-ablation-$slurm_job_id \\n --tags coinrun dynamics maskgit grain-ablation \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --eval_full_frame \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val &\n",shellscript,selection_mouse +56,255894,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",1978,513,"atch_size=110 \\n --image_height=64 \\n --image_width=64 \\n --patch_size=16 \\n --max_lr=3e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-grain-ablation-$slurm_job_id \\n --tags coinrun dynamics maskgit grain-ablation \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --eval_full_frame \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val &\n",shellscript,selection_mouse +57,255956,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",1944,547,"ckpt_dir $CHECKPOINT_DIR \\n --batch_size=110 \\n --image_height=64 \\n --image_width=64 \\n --patch_size=16 \\n --max_lr=3e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-grain-ablation-$slurm_job_id \\n --tags coinrun dynamics maskgit grain-ablation \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --eval_full_frame \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val &\n",shellscript,selection_mouse +58,255989,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",1912,579,"-wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=110 \\n --image_height=64 \\n --image_width=64 \\n --patch_size=16 \\n --max_lr=3e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-grain-ablation-$slurm_job_id \\n --tags coinrun dynamics maskgit grain-ablation \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --eval_full_frame \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val &\n",shellscript,selection_mouse +59,256030,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",1886,605,"$restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=110 \\n --image_height=64 \\n --image_width=64 \\n --patch_size=16 \\n --max_lr=3e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-grain-ablation-$slurm_job_id \\n --tags coinrun dynamics maskgit grain-ablation \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --eval_full_frame \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val &\n",shellscript,selection_mouse +60,256138,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",1866,625," --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=110 \\n --image_height=64 \\n --image_width=64 \\n --patch_size=16 \\n --max_lr=3e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-grain-ablation-$slurm_job_id \\n --tags coinrun dynamics maskgit grain-ablation \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --eval_full_frame \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val &\n",shellscript,selection_mouse +61,256253,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",1826,665,"un python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=110 \\n --image_height=64 \\n --image_width=64 \\n --patch_size=16 \\n --max_lr=3e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-grain-ablation-$slurm_job_id \\n --tags coinrun dynamics maskgit grain-ablation \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --eval_full_frame \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val &\n",shellscript,selection_mouse +62,256289,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",1825,666,"run python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=110 \\n --image_height=64 \\n --image_width=64 \\n --patch_size=16 \\n --max_lr=3e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-grain-ablation-$slurm_job_id \\n --tags coinrun dynamics maskgit grain-ablation \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --eval_full_frame \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val &\n",shellscript,selection_mouse +63,256727,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",1825,0,"",shellscript,selection_mouse +64,256809,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",1824,4,"srun",shellscript,selection_mouse +65,256969,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",1824,44,"srun python jasmine/train_dynamics.py \\n ",shellscript,selection_mouse +66,257086,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",1824,97,"srun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id",shellscript,selection_mouse +67,257087,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",1824,128,"srun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir",shellscript,selection_mouse +68,257088,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",1824,188,"srun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=110 \\n --image_height",shellscript,selection_mouse +69,257088,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",1824,211,"srun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=110 \\n --image_height=64 \\n --image_width",shellscript,selection_mouse +70,257089,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",1824,234,"srun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=110 \\n --image_height=64 \\n --image_width=64 \\n --patch_size=",shellscript,selection_mouse +71,257089,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",1824,258,"srun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=110 \\n --image_height=64 \\n --image_width=64 \\n --patch_size=16 \\n --max_lr=3e-5 \",shellscript,selection_mouse +72,257090,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",1824,283,"srun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=110 \\n --image_height=64 \\n --image_width=64 \\n --patch_size=16 \\n --max_lr=3e-5 \\n --log_image_interval",shellscript,selection_mouse +73,257122,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",1824,302,"srun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=110 \\n --image_height=64 \\n --image_width=64 \\n --patch_size=16 \\n --max_lr=3e-5 \\n --log_image_interval=1000 \\n --log \",shellscript,selection_mouse +74,257155,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",1824,332,"srun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=110 \\n --image_height=64 \\n --image_width=64 \\n --patch_size=16 \\n --max_lr=3e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval",shellscript,selection_mouse +75,257192,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",1824,368,"srun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=110 \\n --image_height=64 \\n --image_width=64 \\n --patch_size=16 \\n --max_lr=3e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-",shellscript,selection_mouse +76,257193,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",1824,442,"srun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=110 \\n --image_height=64 \\n --image_width=64 \\n --patch_size=16 \\n --max_lr=3e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-grain-ablation-$slurm_job_id \\n --tags coinrun dynamics maskgit",shellscript,selection_mouse +77,257224,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",1824,485,"srun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=110 \\n --image_height=64 \\n --image_width=64 \\n --patch_size=16 \\n --max_lr=3e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-grain-ablation-$slurm_job_id \\n --tags coinrun dynamics maskgit grain-ablation \\n --entity instant-uv \",shellscript,selection_mouse +78,257257,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",1824,507,"srun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=110 \\n --image_height=64 \\n --image_width=64 \\n --patch_size=16 \\n --max_lr=3e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-grain-ablation-$slurm_job_id \\n --tags coinrun dynamics maskgit grain-ablation \\n --entity instant-uv \\n --project jafar \",shellscript,selection_mouse +79,257258,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",1824,554,"srun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=110 \\n --image_height=64 \\n --image_width=64 \\n --patch_size=16 \\n --max_lr=3e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-grain-ablation-$slurm_job_id \\n --tags coinrun dynamics maskgit grain-ablation \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir",shellscript,selection_mouse +80,257291,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",1824,580,"srun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=110 \\n --image_height=64 \\n --image_width=64 \\n --patch_size=16 \\n --max_lr=3e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-grain-ablation-$slurm_job_id \\n --tags coinrun dynamics maskgit grain-ablation \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --eval_full_frame \",shellscript,selection_mouse +81,257353,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",1824,620,"srun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=110 \\n --image_height=64 \\n --image_width=64 \\n --patch_size=16 \\n --max_lr=3e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-grain-ablation-$slurm_job_id \\n --tags coinrun dynamics maskgit grain-ablation \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --eval_full_frame \\n --data_dir $array_records_dir_train",shellscript,selection_mouse +82,257354,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",1824,621,"srun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=110 \\n --image_height=64 \\n --image_width=64 \\n --patch_size=16 \\n --max_lr=3e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-grain-ablation-$slurm_job_id \\n --tags coinrun dynamics maskgit grain-ablation \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --eval_full_frame \\n --data_dir $array_records_dir_train ",shellscript,selection_mouse +83,257392,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",1824,622,"srun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=110 \\n --image_height=64 \\n --image_width=64 \\n --patch_size=16 \\n --max_lr=3e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-grain-ablation-$slurm_job_id \\n --tags coinrun dynamics maskgit grain-ablation \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --eval_full_frame \\n --data_dir $array_records_dir_train \",shellscript,selection_mouse +84,257439,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",1824,664,"srun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=110 \\n --image_height=64 \\n --image_width=64 \\n --patch_size=16 \\n --max_lr=3e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-grain-ablation-$slurm_job_id \\n --tags coinrun dynamics maskgit grain-ablation \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --eval_full_frame \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val",shellscript,selection_mouse +85,257440,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",1824,665,"srun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=110 \\n --image_height=64 \\n --image_width=64 \\n --patch_size=16 \\n --max_lr=3e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-grain-ablation-$slurm_job_id \\n --tags coinrun dynamics maskgit grain-ablation \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --eval_full_frame \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val ",shellscript,selection_mouse +86,257569,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",1824,666,"srun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=110 \\n --image_height=64 \\n --image_width=64 \\n --patch_size=16 \\n --max_lr=3e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-grain-ablation-$slurm_job_id \\n --tags coinrun dynamics maskgit grain-ablation \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --eval_full_frame \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val &",shellscript,selection_mouse +87,257692,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",1824,667,"srun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=110 \\n --image_height=64 \\n --image_width=64 \\n --patch_size=16 \\n --max_lr=3e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-grain-ablation-$slurm_job_id \\n --tags coinrun dynamics maskgit grain-ablation \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --eval_full_frame \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val &\n",shellscript,selection_mouse +88,258168,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2491,0,"",shellscript,selection_mouse +89,258476,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2489,2,"&\n",shellscript,selection_mouse +90,258485,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2467,24,"array_records_dir_val &\n",shellscript,selection_mouse +91,258535,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2421,70,"array_records_dir_train \\n --val_data_dir $array_records_dir_val &\n",shellscript,selection_mouse +92,258536,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2404,87,"\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val &\n",shellscript,selection_mouse +93,258665,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2358,133,"=$tokenizer_ckpt_dir \\n --eval_full_frame \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val &\n",shellscript,selection_mouse +94,258666,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2331,160,"\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --eval_full_frame \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val &\n",shellscript,selection_mouse +95,258667,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2305,186,"uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --eval_full_frame \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val &\n",shellscript,selection_mouse +96,258667,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2297,194,"instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --eval_full_frame \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val &\n",shellscript,selection_mouse +97,258667,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2242,249,"coinrun dynamics maskgit grain-ablation \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --eval_full_frame \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val &\n",shellscript,selection_mouse +98,258698,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2175,316,"coinrun-dynamics-maskgit-grain-ablation-$slurm_job_id \\n --tags coinrun dynamics maskgit grain-ablation \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --eval_full_frame \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val &\n",shellscript,selection_mouse +99,258699,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2133,358,"log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-grain-ablation-$slurm_job_id \\n --tags coinrun dynamics maskgit grain-ablation \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --eval_full_frame \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val &\n",shellscript,selection_mouse +100,258700,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2125,366,"\\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-grain-ablation-$slurm_job_id \\n --tags coinrun dynamics maskgit grain-ablation \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --eval_full_frame \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val &\n",shellscript,selection_mouse +101,258732,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2089,402,"log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-grain-ablation-$slurm_job_id \\n --tags coinrun dynamics maskgit grain-ablation \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --eval_full_frame \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val &\n",shellscript,selection_mouse +102,258765,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2069,422,"max_lr=3e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-grain-ablation-$slurm_job_id \\n --tags coinrun dynamics maskgit grain-ablation \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --eval_full_frame \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val &\n",shellscript,selection_mouse +103,258797,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2047,444,"patch_size=16 \\n --max_lr=3e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-grain-ablation-$slurm_job_id \\n --tags coinrun dynamics maskgit grain-ablation \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --eval_full_frame \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val &\n",shellscript,selection_mouse +104,258799,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2046,445,"-patch_size=16 \\n --max_lr=3e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-grain-ablation-$slurm_job_id \\n --tags coinrun dynamics maskgit grain-ablation \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --eval_full_frame \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val &\n",shellscript,selection_mouse +105,258833,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2023,468,"-image_width=64 \\n --patch_size=16 \\n --max_lr=3e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-grain-ablation-$slurm_job_id \\n --tags coinrun dynamics maskgit grain-ablation \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --eval_full_frame \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val &\n",shellscript,selection_mouse +106,258868,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",1999,492,"-image_height=64 \\n --image_width=64 \\n --patch_size=16 \\n --max_lr=3e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-grain-ablation-$slurm_job_id \\n --tags coinrun dynamics maskgit grain-ablation \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --eval_full_frame \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val &\n",shellscript,selection_mouse +107,258869,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",1975,516,"--batch_size=110 \\n --image_height=64 \\n --image_width=64 \\n --patch_size=16 \\n --max_lr=3e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-grain-ablation-$slurm_job_id \\n --tags coinrun dynamics maskgit grain-ablation \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --eval_full_frame \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val &\n",shellscript,selection_mouse +108,258899,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",1942,549,"--ckpt_dir $CHECKPOINT_DIR \\n --batch_size=110 \\n --image_height=64 \\n --image_width=64 \\n --patch_size=16 \\n --max_lr=3e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-grain-ablation-$slurm_job_id \\n --tags coinrun dynamics maskgit grain-ablation \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --eval_full_frame \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val &\n",shellscript,selection_mouse +109,258935,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",1910,581," --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=110 \\n --image_height=64 \\n --image_width=64 \\n --patch_size=16 \\n --max_lr=3e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-grain-ablation-$slurm_job_id \\n --tags coinrun dynamics maskgit grain-ablation \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --eval_full_frame \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val &\n",shellscript,selection_mouse +110,258986,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",1884,607," $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=110 \\n --image_height=64 \\n --image_width=64 \\n --patch_size=16 \\n --max_lr=3e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-grain-ablation-$slurm_job_id \\n --tags coinrun dynamics maskgit grain-ablation \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --eval_full_frame \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val &\n",shellscript,selection_mouse +111,258988,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",1866,625," --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=110 \\n --image_height=64 \\n --image_width=64 \\n --patch_size=16 \\n --max_lr=3e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-grain-ablation-$slurm_job_id \\n --tags coinrun dynamics maskgit grain-ablation \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --eval_full_frame \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val &\n",shellscript,selection_mouse +112,259060,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",1824,667,"srun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=110 \\n --image_height=64 \\n --image_width=64 \\n --patch_size=16 \\n --max_lr=3e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-grain-ablation-$slurm_job_id \\n --tags coinrun dynamics maskgit grain-ablation \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --eval_full_frame \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val &\n",shellscript,selection_mouse +113,259164,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",1823,668,"\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=110 \\n --image_height=64 \\n --image_width=64 \\n --patch_size=16 \\n --max_lr=3e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-grain-ablation-$slurm_job_id \\n --tags coinrun dynamics maskgit grain-ablation \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --eval_full_frame \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val &\n",shellscript,selection_mouse +114,259717,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",1826,0,"",shellscript,selection_mouse +115,259753,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",1824,4,"srun",shellscript,selection_mouse +116,259955,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",1824,45,"srun python jasmine/train_dynamics.py \\n -",shellscript,selection_mouse +117,259955,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",1824,55,"srun python jasmine/train_dynamics.py \\n --save_ckpt",shellscript,selection_mouse +118,259991,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",1824,80,"srun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag",shellscript,selection_mouse +119,259991,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",1824,97,"srun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id",shellscript,selection_mouse +120,260072,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",1824,128,"srun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir",shellscript,selection_mouse +121,260073,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",1824,163,"srun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size",shellscript,selection_mouse +122,260081,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",1824,212,"srun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=110 \\n --image_height=64 \\n --image_width=",shellscript,selection_mouse +123,260082,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",1824,237,"srun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=110 \\n --image_height=64 \\n --image_width=64 \\n --patch_size=16 ",shellscript,selection_mouse +124,260082,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",1824,258,"srun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=110 \\n --image_height=64 \\n --image_width=64 \\n --patch_size=16 \\n --max_lr=3e-5 \",shellscript,selection_mouse +125,260107,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",1824,284,"srun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=110 \\n --image_height=64 \\n --image_width=64 \\n --patch_size=16 \\n --max_lr=3e-5 \\n --log_image_interval=",shellscript,selection_mouse +126,260146,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",1824,375,"srun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=110 \\n --image_height=64 \\n --image_width=64 \\n --patch_size=16 \\n --max_lr=3e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit",shellscript,selection_mouse +127,260164,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",1824,442,"srun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=110 \\n --image_height=64 \\n --image_width=64 \\n --patch_size=16 \\n --max_lr=3e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-grain-ablation-$slurm_job_id \\n --tags coinrun dynamics maskgit",shellscript,selection_mouse +128,260184,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",1824,507,"srun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=110 \\n --image_height=64 \\n --image_width=64 \\n --patch_size=16 \\n --max_lr=3e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-grain-ablation-$slurm_job_id \\n --tags coinrun dynamics maskgit grain-ablation \\n --entity instant-uv \\n --project jafar \",shellscript,selection_mouse +129,260208,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",1824,554,"srun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=110 \\n --image_height=64 \\n --image_width=64 \\n --patch_size=16 \\n --max_lr=3e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-grain-ablation-$slurm_job_id \\n --tags coinrun dynamics maskgit grain-ablation \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir",shellscript,selection_mouse +130,260233,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",1824,621,"srun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=110 \\n --image_height=64 \\n --image_width=64 \\n --patch_size=16 \\n --max_lr=3e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-grain-ablation-$slurm_job_id \\n --tags coinrun dynamics maskgit grain-ablation \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --eval_full_frame \\n --data_dir $array_records_dir_train ",shellscript,selection_mouse +131,260256,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",1824,664,"srun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=110 \\n --image_height=64 \\n --image_width=64 \\n --patch_size=16 \\n --max_lr=3e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-grain-ablation-$slurm_job_id \\n --tags coinrun dynamics maskgit grain-ablation \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --eval_full_frame \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val",shellscript,selection_mouse +132,260279,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",1824,667,"srun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=110 \\n --image_height=64 \\n --image_width=64 \\n --patch_size=16 \\n --max_lr=3e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-grain-ablation-$slurm_job_id \\n --tags coinrun dynamics maskgit grain-ablation \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --eval_full_frame \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val &\n",shellscript,selection_mouse +133,260317,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",1824,680,"srun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=110 \\n --image_height=64 \\n --image_width=64 \\n --patch_size=16 \\n --max_lr=3e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-grain-ablation-$slurm_job_id \\n --tags coinrun dynamics maskgit grain-ablation \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --eval_full_frame \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val &\n\nchild_pid=$!",shellscript,selection_mouse +134,260672,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2504,0,"",shellscript,selection_mouse +135,260690,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2503,0,"",shellscript,selection_command +136,261136,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2521,0,"",shellscript,selection_mouse +137,261140,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2520,0,"",shellscript,selection_command +138,261306,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2512,9,"child_pid",shellscript,selection_mouse +139,261307,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2513,8,"hild_pid",shellscript,selection_command +140,261423,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2505,8,"\nwait $c",shellscript,selection_mouse +141,261469,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2504,9,"\n\nwait $c",shellscript,selection_mouse +142,261470,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2421,92,"array_records_dir_train \\n --val_data_dir $array_records_dir_val &\n\nchild_pid=$!\n\nwait $c",shellscript,selection_mouse +143,261582,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2338,175,"tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --eval_full_frame \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val &\n\nchild_pid=$!\n\nwait $c",shellscript,selection_mouse +144,261583,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2331,182,"\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --eval_full_frame \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val &\n\nchild_pid=$!\n\nwait $c",shellscript,selection_mouse +145,261583,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2250,263,"dynamics maskgit grain-ablation \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --eval_full_frame \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val &\n\nchild_pid=$!\n\nwait $c",shellscript,selection_mouse +146,261616,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2133,380,"log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-grain-ablation-$slurm_job_id \\n --tags coinrun dynamics maskgit grain-ablation \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --eval_full_frame \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val &\n\nchild_pid=$!\n\nwait $c",shellscript,selection_mouse +147,261617,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2089,424,"log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-grain-ablation-$slurm_job_id \\n --tags coinrun dynamics maskgit grain-ablation \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --eval_full_frame \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val &\n\nchild_pid=$!\n\nwait $c",shellscript,selection_mouse +148,261617,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2080,433," \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-grain-ablation-$slurm_job_id \\n --tags coinrun dynamics maskgit grain-ablation \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --eval_full_frame \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val &\n\nchild_pid=$!\n\nwait $c",shellscript,selection_mouse +149,261618,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2024,489,"image_width=64 \\n --patch_size=16 \\n --max_lr=3e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-grain-ablation-$slurm_job_id \\n --tags coinrun dynamics maskgit grain-ablation \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --eval_full_frame \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val &\n\nchild_pid=$!\n\nwait $c",shellscript,selection_mouse +150,261618,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",1977,536,"batch_size=110 \\n --image_height=64 \\n --image_width=64 \\n --patch_size=16 \\n --max_lr=3e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-grain-ablation-$slurm_job_id \\n --tags coinrun dynamics maskgit grain-ablation \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --eval_full_frame \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val &\n\nchild_pid=$!\n\nwait $c",shellscript,selection_mouse +151,261652,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",1913,600,"wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=110 \\n --image_height=64 \\n --image_width=64 \\n --patch_size=16 \\n --max_lr=3e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-grain-ablation-$slurm_job_id \\n --tags coinrun dynamics maskgit grain-ablation \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --eval_full_frame \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val &\n\nchild_pid=$!\n\nwait $c",shellscript,selection_mouse +152,261684,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",1887,626,"restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=110 \\n --image_height=64 \\n --image_width=64 \\n --patch_size=16 \\n --max_lr=3e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-grain-ablation-$slurm_job_id \\n --tags coinrun dynamics maskgit grain-ablation \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --eval_full_frame \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val &\n\nchild_pid=$!\n\nwait $c",shellscript,selection_mouse +153,261685,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",1870,643,"save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=110 \\n --image_height=64 \\n --image_width=64 \\n --patch_size=16 \\n --max_lr=3e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-grain-ablation-$slurm_job_id \\n --tags coinrun dynamics maskgit grain-ablation \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --eval_full_frame \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val &\n\nchild_pid=$!\n\nwait $c",shellscript,selection_mouse +154,261752,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",1829,684,"python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=110 \\n --image_height=64 \\n --image_width=64 \\n --patch_size=16 \\n --max_lr=3e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-grain-ablation-$slurm_job_id \\n --tags coinrun dynamics maskgit grain-ablation \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --eval_full_frame \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val &\n\nchild_pid=$!\n\nwait $c",shellscript,selection_mouse +155,261861,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",1823,690,"\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=110 \\n --image_height=64 \\n --image_width=64 \\n --patch_size=16 \\n --max_lr=3e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-grain-ablation-$slurm_job_id \\n --tags coinrun dynamics maskgit grain-ablation \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --eval_full_frame \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val &\n\nchild_pid=$!\n\nwait $c",shellscript,selection_mouse +156,262093,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",1809,704," | grep SLURM\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=110 \\n --image_height=64 \\n --image_width=64 \\n --patch_size=16 \\n --max_lr=3e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-grain-ablation-$slurm_job_id \\n --tags coinrun dynamics maskgit grain-ablation \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --eval_full_frame \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val &\n\nchild_pid=$!\n\nwait $c",shellscript,selection_mouse +157,262179,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",1806,707,"env | grep SLURM\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=110 \\n --image_height=64 \\n --image_width=64 \\n --patch_size=16 \\n --max_lr=3e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-grain-ablation-$slurm_job_id \\n --tags coinrun dynamics maskgit grain-ablation \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --eval_full_frame \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val &\n\nchild_pid=$!\n\nwait $c",shellscript,selection_mouse +158,262210,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",1805,708,"\nenv | grep SLURM\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=110 \\n --image_height=64 \\n --image_width=64 \\n --patch_size=16 \\n --max_lr=3e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-grain-ablation-$slurm_job_id \\n --tags coinrun dynamics maskgit grain-ablation \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --eval_full_frame \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val &\n\nchild_pid=$!\n\nwait $c",shellscript,selection_mouse +159,262858,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",1805,0,"",shellscript,selection_mouse +160,263314,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",1807,0,"",shellscript,selection_mouse +161,263464,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",1806,3,"env",shellscript,selection_mouse +162,263655,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",1806,22,"env | grep SLURM\n\nsrun",shellscript,selection_mouse +163,263656,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",1806,63,"env | grep SLURM\n\nsrun python jasmine/train_dynamics.py \\n -",shellscript,selection_mouse +164,263723,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",1806,115,"env | grep SLURM\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id",shellscript,selection_mouse +165,263724,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",1806,181,"env | grep SLURM\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size",shellscript,selection_mouse +166,263725,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",1806,206,"env | grep SLURM\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=110 \\n --image_height",shellscript,selection_mouse +167,263725,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",1806,229,"env | grep SLURM\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=110 \\n --image_height=64 \\n --image_width",shellscript,selection_mouse +168,263795,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",1806,251,"env | grep SLURM\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=110 \\n --image_height=64 \\n --image_width=64 \\n --patch_size",shellscript,selection_mouse +169,263796,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",1806,272,"env | grep SLURM\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=110 \\n --image_height=64 \\n --image_width=64 \\n --patch_size=16 \\n --max_lr=3e",shellscript,selection_mouse +170,263796,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",1806,301,"env | grep SLURM\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=110 \\n --image_height=64 \\n --image_width=64 \\n --patch_size=16 \\n --max_lr=3e-5 \\n --log_image_interval",shellscript,selection_mouse +171,263797,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",1806,320,"env | grep SLURM\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=110 \\n --image_height=64 \\n --image_width=64 \\n --patch_size=16 \\n --max_lr=3e-5 \\n --log_image_interval=1000 \\n --log \",shellscript,selection_mouse +172,263828,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",1806,385,"env | grep SLURM\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=110 \\n --image_height=64 \\n --image_width=64 \\n --patch_size=16 \\n --max_lr=3e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics",shellscript,selection_mouse +173,263858,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",1806,501,"env | grep SLURM\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=110 \\n --image_height=64 \\n --image_width=64 \\n --patch_size=16 \\n --max_lr=3e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-grain-ablation-$slurm_job_id \\n --tags coinrun dynamics maskgit grain-ablation \\n --entity instant-uv",shellscript,selection_mouse +174,263891,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",1806,525,"env | grep SLURM\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=110 \\n --image_height=64 \\n --image_width=64 \\n --patch_size=16 \\n --max_lr=3e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-grain-ablation-$slurm_job_id \\n --tags coinrun dynamics maskgit grain-ablation \\n --entity instant-uv \\n --project jafar \",shellscript,selection_mouse +175,263921,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",1806,552,"env | grep SLURM\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=110 \\n --image_height=64 \\n --image_width=64 \\n --patch_size=16 \\n --max_lr=3e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-grain-ablation-$slurm_job_id \\n --tags coinrun dynamics maskgit grain-ablation \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint",shellscript,selection_mouse +176,263951,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",1806,598,"env | grep SLURM\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=110 \\n --image_height=64 \\n --image_width=64 \\n --patch_size=16 \\n --max_lr=3e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-grain-ablation-$slurm_job_id \\n --tags coinrun dynamics maskgit grain-ablation \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --eval_full_frame \",shellscript,selection_mouse +177,264011,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",1806,638,"env | grep SLURM\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=110 \\n --image_height=64 \\n --image_width=64 \\n --patch_size=16 \\n --max_lr=3e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-grain-ablation-$slurm_job_id \\n --tags coinrun dynamics maskgit grain-ablation \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --eval_full_frame \\n --data_dir $array_records_dir_train",shellscript,selection_mouse +178,264081,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",1806,682,"env | grep SLURM\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=110 \\n --image_height=64 \\n --image_width=64 \\n --patch_size=16 \\n --max_lr=3e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-grain-ablation-$slurm_job_id \\n --tags coinrun dynamics maskgit grain-ablation \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --eval_full_frame \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val",shellscript,selection_mouse +179,264326,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",1806,685,"env | grep SLURM\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=110 \\n --image_height=64 \\n --image_width=64 \\n --patch_size=16 \\n --max_lr=3e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-grain-ablation-$slurm_job_id \\n --tags coinrun dynamics maskgit grain-ablation \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --eval_full_frame \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val &\n",shellscript,selection_mouse +180,265165,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2491,0,"",shellscript,selection_mouse +181,265882,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2490,1,"\n",shellscript,selection_mouse +182,265989,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2446,45,"\n --val_data_dir $array_records_dir_val &\n",shellscript,selection_mouse +183,265990,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2445,46,"\\n --val_data_dir $array_records_dir_val &\n",shellscript,selection_mouse +184,266017,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2404,87,"\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val &\n",shellscript,selection_mouse +185,266085,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2360,131,"tokenizer_ckpt_dir \\n --eval_full_frame \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val &\n",shellscript,selection_mouse +186,266158,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2331,160,"\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --eval_full_frame \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val &\n",shellscript,selection_mouse +187,266159,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2309,182,"\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --eval_full_frame \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val &\n",shellscript,selection_mouse +188,266159,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2250,241,"dynamics maskgit grain-ablation \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --eval_full_frame \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val &\n",shellscript,selection_mouse +189,266264,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2183,308,"dynamics-maskgit-grain-ablation-$slurm_job_id \\n --tags coinrun dynamics maskgit grain-ablation \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --eval_full_frame \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val &\n",shellscript,selection_mouse +190,266265,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2133,358,"log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-grain-ablation-$slurm_job_id \\n --tags coinrun dynamics maskgit grain-ablation \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --eval_full_frame \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val &\n",shellscript,selection_mouse +191,266265,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2089,402,"log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-grain-ablation-$slurm_job_id \\n --tags coinrun dynamics maskgit grain-ablation \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --eval_full_frame \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val &\n",shellscript,selection_mouse +192,266266,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2075,416,"=3e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-grain-ablation-$slurm_job_id \\n --tags coinrun dynamics maskgit grain-ablation \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --eval_full_frame \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val &\n",shellscript,selection_mouse +193,266266,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2047,444,"patch_size=16 \\n --max_lr=3e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-grain-ablation-$slurm_job_id \\n --tags coinrun dynamics maskgit grain-ablation \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --eval_full_frame \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val &\n",shellscript,selection_mouse +194,266268,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2024,467,"image_width=64 \\n --patch_size=16 \\n --max_lr=3e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-grain-ablation-$slurm_job_id \\n --tags coinrun dynamics maskgit grain-ablation \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --eval_full_frame \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val &\n",shellscript,selection_mouse +195,266308,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2000,491,"image_height=64 \\n --image_width=64 \\n --patch_size=16 \\n --max_lr=3e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-grain-ablation-$slurm_job_id \\n --tags coinrun dynamics maskgit grain-ablation \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --eval_full_frame \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val &\n",shellscript,selection_mouse +196,266309,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",1999,492,"-image_height=64 \\n --image_width=64 \\n --patch_size=16 \\n --max_lr=3e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-grain-ablation-$slurm_job_id \\n --tags coinrun dynamics maskgit grain-ablation \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --eval_full_frame \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val &\n",shellscript,selection_mouse +197,266342,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",1975,516,"--batch_size=110 \\n --image_height=64 \\n --image_width=64 \\n --patch_size=16 \\n --max_lr=3e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-grain-ablation-$slurm_job_id \\n --tags coinrun dynamics maskgit grain-ablation \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --eval_full_frame \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val &\n",shellscript,selection_mouse +198,266380,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",1941,550," --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=110 \\n --image_height=64 \\n --image_width=64 \\n --patch_size=16 \\n --max_lr=3e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-grain-ablation-$slurm_job_id \\n --tags coinrun dynamics maskgit grain-ablation \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --eval_full_frame \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val &\n",shellscript,selection_mouse +199,266442,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",1909,582," --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=110 \\n --image_height=64 \\n --image_width=64 \\n --patch_size=16 \\n --max_lr=3e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-grain-ablation-$slurm_job_id \\n --tags coinrun dynamics maskgit grain-ablation \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --eval_full_frame \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val &\n",shellscript,selection_mouse +200,266474,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",1883,608," $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=110 \\n --image_height=64 \\n --image_width=64 \\n --patch_size=16 \\n --max_lr=3e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-grain-ablation-$slurm_job_id \\n --tags coinrun dynamics maskgit grain-ablation \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --eval_full_frame \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val &\n",shellscript,selection_mouse +201,266507,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",1864,627," --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=110 \\n --image_height=64 \\n --image_width=64 \\n --patch_size=16 \\n --max_lr=3e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-grain-ablation-$slurm_job_id \\n --tags coinrun dynamics maskgit grain-ablation \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --eval_full_frame \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val &\n",shellscript,selection_mouse +202,266582,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",1824,667,"srun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=110 \\n --image_height=64 \\n --image_width=64 \\n --patch_size=16 \\n --max_lr=3e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-grain-ablation-$slurm_job_id \\n --tags coinrun dynamics maskgit grain-ablation \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --eval_full_frame \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val &\n",shellscript,selection_mouse +203,267525,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",1824,4,"srun",shellscript,selection_mouse +204,267665,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",1824,44,"srun python jasmine/train_dynamics.py \\n ",shellscript,selection_mouse +205,267702,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",1824,63,"srun python jasmine/train_dynamics.py \\n --save_ckpt \\n $",shellscript,selection_mouse +206,267703,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",1824,97,"srun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id",shellscript,selection_mouse +207,267806,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",1824,128,"srun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir",shellscript,selection_mouse +208,267806,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",1824,163,"srun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size",shellscript,selection_mouse +209,267807,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",1824,188,"srun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=110 \\n --image_height",shellscript,selection_mouse +210,267807,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",1824,211,"srun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=110 \\n --image_height=64 \\n --image_width",shellscript,selection_mouse +211,267809,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",1824,258,"srun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=110 \\n --image_height=64 \\n --image_width=64 \\n --patch_size=16 \\n --max_lr=3e-5 \",shellscript,selection_mouse +212,267836,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",1824,283,"srun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=110 \\n --image_height=64 \\n --image_width=64 \\n --patch_size=16 \\n --max_lr=3e-5 \\n --log_image_interval",shellscript,selection_mouse +213,267861,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",1824,302,"srun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=110 \\n --image_height=64 \\n --image_width=64 \\n --patch_size=16 \\n --max_lr=3e-5 \\n --log_image_interval=1000 \\n --log \",shellscript,selection_mouse +214,267941,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",1824,332,"srun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=110 \\n --image_height=64 \\n --image_width=64 \\n --patch_size=16 \\n --max_lr=3e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval",shellscript,selection_mouse +215,267942,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",1824,333,"srun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=110 \\n --image_height=64 \\n --image_width=64 \\n --patch_size=16 \\n --max_lr=3e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=",shellscript,selection_mouse +216,267942,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",1824,375,"srun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=110 \\n --image_height=64 \\n --image_width=64 \\n --patch_size=16 \\n --max_lr=3e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit",shellscript,selection_mouse +217,267957,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",1824,442,"srun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=110 \\n --image_height=64 \\n --image_width=64 \\n --patch_size=16 \\n --max_lr=3e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-grain-ablation-$slurm_job_id \\n --tags coinrun dynamics maskgit",shellscript,selection_mouse +218,267978,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",1824,448,"srun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=110 \\n --image_height=64 \\n --image_width=64 \\n --patch_size=16 \\n --max_lr=3e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-grain-ablation-$slurm_job_id \\n --tags coinrun dynamics maskgit grain",shellscript,selection_mouse +219,268000,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",1824,485,"srun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=110 \\n --image_height=64 \\n --image_width=64 \\n --patch_size=16 \\n --max_lr=3e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-grain-ablation-$slurm_job_id \\n --tags coinrun dynamics maskgit grain-ablation \\n --entity instant-uv \",shellscript,selection_mouse +220,268039,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",1824,507,"srun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=110 \\n --image_height=64 \\n --image_width=64 \\n --patch_size=16 \\n --max_lr=3e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-grain-ablation-$slurm_job_id \\n --tags coinrun dynamics maskgit grain-ablation \\n --entity instant-uv \\n --project jafar \",shellscript,selection_mouse +221,268179,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",1824,556,"srun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=110 \\n --image_height=64 \\n --image_width=64 \\n --patch_size=16 \\n --max_lr=3e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-grain-ablation-$slurm_job_id \\n --tags coinrun dynamics maskgit grain-ablation \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \",shellscript,selection_mouse +222,268183,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",1824,580,"srun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=110 \\n --image_height=64 \\n --image_width=64 \\n --patch_size=16 \\n --max_lr=3e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-grain-ablation-$slurm_job_id \\n --tags coinrun dynamics maskgit grain-ablation \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --eval_full_frame \",shellscript,selection_mouse +223,268250,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",1824,622,"srun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=110 \\n --image_height=64 \\n --image_width=64 \\n --patch_size=16 \\n --max_lr=3e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-grain-ablation-$slurm_job_id \\n --tags coinrun dynamics maskgit grain-ablation \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --eval_full_frame \\n --data_dir $array_records_dir_train \",shellscript,selection_mouse +224,268390,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",1824,666,"srun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=110 \\n --image_height=64 \\n --image_width=64 \\n --patch_size=16 \\n --max_lr=3e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-grain-ablation-$slurm_job_id \\n --tags coinrun dynamics maskgit grain-ablation \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --eval_full_frame \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val &",shellscript,selection_mouse +225,268774,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2490,0,"",shellscript,selection_mouse +226,268806,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2489,0,"",shellscript,selection_command +227,269100,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2490,0,"",shellscript,selection_mouse +228,269104,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2489,0,"",shellscript,selection_command +229,269314,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2489,1,"&",shellscript,selection_mouse +230,269340,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2490,0,"",shellscript,selection_command +231,269391,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2446,44,"\n --val_data_dir $array_records_dir_val &",shellscript,selection_mouse +232,269472,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2404,86,"\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val &",shellscript,selection_mouse +233,269494,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2360,130,"tokenizer_ckpt_dir \\n --eval_full_frame \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val &",shellscript,selection_mouse +234,269530,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2331,159,"\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --eval_full_frame \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val &",shellscript,selection_mouse +235,269531,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2309,181,"\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --eval_full_frame \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val &",shellscript,selection_mouse +236,269564,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2307,183," \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --eval_full_frame \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val &",shellscript,selection_mouse +237,269596,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2250,240,"dynamics maskgit grain-ablation \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --eval_full_frame \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val &",shellscript,selection_mouse +238,269597,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2249,241," dynamics maskgit grain-ablation \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --eval_full_frame \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val &",shellscript,selection_mouse +239,269630,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2182,308,"-dynamics-maskgit-grain-ablation-$slurm_job_id \\n --tags coinrun dynamics maskgit grain-ablation \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --eval_full_frame \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val &",shellscript,selection_mouse +240,269631,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2175,315,"coinrun-dynamics-maskgit-grain-ablation-$slurm_job_id \\n --tags coinrun dynamics maskgit grain-ablation \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --eval_full_frame \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val &",shellscript,selection_mouse +241,269706,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2133,357,"log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-grain-ablation-$slurm_job_id \\n --tags coinrun dynamics maskgit grain-ablation \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --eval_full_frame \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val &",shellscript,selection_mouse +242,269729,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2126,364,"\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-grain-ablation-$slurm_job_id \\n --tags coinrun dynamics maskgit grain-ablation \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --eval_full_frame \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val &",shellscript,selection_mouse +243,269761,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2089,401,"log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-grain-ablation-$slurm_job_id \\n --tags coinrun dynamics maskgit grain-ablation \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --eval_full_frame \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val &",shellscript,selection_mouse +244,269784,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2075,415,"=3e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-grain-ablation-$slurm_job_id \\n --tags coinrun dynamics maskgit grain-ablation \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --eval_full_frame \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val &",shellscript,selection_mouse +245,269808,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2047,443,"patch_size=16 \\n --max_lr=3e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-grain-ablation-$slurm_job_id \\n --tags coinrun dynamics maskgit grain-ablation \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --eval_full_frame \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val &",shellscript,selection_mouse +246,269838,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2024,466,"image_width=64 \\n --patch_size=16 \\n --max_lr=3e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-grain-ablation-$slurm_job_id \\n --tags coinrun dynamics maskgit grain-ablation \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --eval_full_frame \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val &",shellscript,selection_mouse +247,269869,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2000,490,"image_height=64 \\n --image_width=64 \\n --patch_size=16 \\n --max_lr=3e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-grain-ablation-$slurm_job_id \\n --tags coinrun dynamics maskgit grain-ablation \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --eval_full_frame \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val &",shellscript,selection_mouse +248,269951,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",1977,513,"batch_size=110 \\n --image_height=64 \\n --image_width=64 \\n --patch_size=16 \\n --max_lr=3e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-grain-ablation-$slurm_job_id \\n --tags coinrun dynamics maskgit grain-ablation \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --eval_full_frame \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val &",shellscript,selection_mouse +249,269984,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",1944,546,"ckpt_dir $CHECKPOINT_DIR \\n --batch_size=110 \\n --image_height=64 \\n --image_width=64 \\n --patch_size=16 \\n --max_lr=3e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-grain-ablation-$slurm_job_id \\n --tags coinrun dynamics maskgit grain-ablation \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --eval_full_frame \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val &",shellscript,selection_mouse +250,270061,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",1913,577,"wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=110 \\n --image_height=64 \\n --image_width=64 \\n --patch_size=16 \\n --max_lr=3e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-grain-ablation-$slurm_job_id \\n --tags coinrun dynamics maskgit grain-ablation \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --eval_full_frame \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val &",shellscript,selection_mouse +251,270138,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",1887,603,"restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=110 \\n --image_height=64 \\n --image_width=64 \\n --patch_size=16 \\n --max_lr=3e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-grain-ablation-$slurm_job_id \\n --tags coinrun dynamics maskgit grain-ablation \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --eval_full_frame \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val &",shellscript,selection_mouse +252,270214,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",1869,621,"-save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=110 \\n --image_height=64 \\n --image_width=64 \\n --patch_size=16 \\n --max_lr=3e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-grain-ablation-$slurm_job_id \\n --tags coinrun dynamics maskgit grain-ablation \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --eval_full_frame \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val &",shellscript,selection_mouse +253,270244,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",1868,622,"--save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=110 \\n --image_height=64 \\n --image_width=64 \\n --patch_size=16 \\n --max_lr=3e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-grain-ablation-$slurm_job_id \\n --tags coinrun dynamics maskgit grain-ablation \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --eval_full_frame \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val &",shellscript,selection_mouse +254,270463,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",1824,666,"srun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=110 \\n --image_height=64 \\n --image_width=64 \\n --patch_size=16 \\n --max_lr=3e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-grain-ablation-$slurm_job_id \\n --tags coinrun dynamics maskgit grain-ablation \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --eval_full_frame \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val &",shellscript,selection_mouse +255,270874,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",1826,0,"",shellscript,selection_mouse +256,270927,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",1824,4,"srun",shellscript,selection_mouse +257,271061,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",1824,42,"srun python jasmine/train_dynamics.py \\n ",shellscript,selection_mouse +258,271070,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",1824,43,"srun python jasmine/train_dynamics.py \\n ",shellscript,selection_mouse +259,271071,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",1824,62,"srun python jasmine/train_dynamics.py \\n --save_ckpt \\n ",shellscript,selection_mouse +260,271082,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",1824,88,"srun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n -",shellscript,selection_mouse +261,271110,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",1824,128,"srun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir",shellscript,selection_mouse +262,271238,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",1824,163,"srun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size",shellscript,selection_mouse +263,271239,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",1824,211,"srun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=110 \\n --image_height=64 \\n --image_width",shellscript,selection_mouse +264,271239,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",1824,256,"srun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=110 \\n --image_height=64 \\n --image_width=64 \\n --patch_size=16 \\n --max_lr=3e-5",shellscript,selection_mouse +265,271240,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",1824,302,"srun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=110 \\n --image_height=64 \\n --image_width=64 \\n --patch_size=16 \\n --max_lr=3e-5 \\n --log_image_interval=1000 \\n --log \",shellscript,selection_mouse +266,271240,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",1824,332,"srun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=110 \\n --image_height=64 \\n --image_width=64 \\n --patch_size=16 \\n --max_lr=3e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval",shellscript,selection_mouse +267,271240,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",1824,442,"srun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=110 \\n --image_height=64 \\n --image_width=64 \\n --patch_size=16 \\n --max_lr=3e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-grain-ablation-$slurm_job_id \\n --tags coinrun dynamics maskgit",shellscript,selection_mouse +268,271241,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",1824,485,"srun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=110 \\n --image_height=64 \\n --image_width=64 \\n --patch_size=16 \\n --max_lr=3e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-grain-ablation-$slurm_job_id \\n --tags coinrun dynamics maskgit grain-ablation \\n --entity instant-uv \",shellscript,selection_mouse +269,271241,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",1824,507,"srun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=110 \\n --image_height=64 \\n --image_width=64 \\n --patch_size=16 \\n --max_lr=3e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-grain-ablation-$slurm_job_id \\n --tags coinrun dynamics maskgit grain-ablation \\n --entity instant-uv \\n --project jafar \",shellscript,selection_mouse +270,271276,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",1824,580,"srun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=110 \\n --image_height=64 \\n --image_width=64 \\n --patch_size=16 \\n --max_lr=3e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-grain-ablation-$slurm_job_id \\n --tags coinrun dynamics maskgit grain-ablation \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --eval_full_frame \",shellscript,selection_mouse +271,271314,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",1824,622,"srun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=110 \\n --image_height=64 \\n --image_width=64 \\n --patch_size=16 \\n --max_lr=3e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-grain-ablation-$slurm_job_id \\n --tags coinrun dynamics maskgit grain-ablation \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --eval_full_frame \\n --data_dir $array_records_dir_train \",shellscript,selection_mouse +272,271348,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",1824,666,"srun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=110 \\n --image_height=64 \\n --image_width=64 \\n --patch_size=16 \\n --max_lr=3e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-grain-ablation-$slurm_job_id \\n --tags coinrun dynamics maskgit grain-ablation \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --eval_full_frame \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val &",shellscript,selection_mouse +273,271389,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",1824,667,"srun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=110 \\n --image_height=64 \\n --image_width=64 \\n --patch_size=16 \\n --max_lr=3e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-grain-ablation-$slurm_job_id \\n --tags coinrun dynamics maskgit grain-ablation \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --eval_full_frame \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val &\n",shellscript,selection_mouse +274,271481,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",1824,680,"srun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=110 \\n --image_height=64 \\n --image_width=64 \\n --patch_size=16 \\n --max_lr=3e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-grain-ablation-$slurm_job_id \\n --tags coinrun dynamics maskgit grain-ablation \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --eval_full_frame \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val &\n\nchild_pid=$!",shellscript,selection_mouse +275,271826,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2504,0,"",shellscript,selection_mouse +276,271859,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2503,0,"",shellscript,selection_command +277,292591,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2040,0,"",shellscript,selection_mouse +278,292603,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2039,0,"",shellscript,selection_command +279,292647,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2039,1,"\",shellscript,selection_mouse +280,292692,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2040,0,"",shellscript,selection_command +281,293449,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2039,0,"",shellscript,selection_command +282,293732,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2016,0,"",shellscript,selection_command +283,293901,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",1992,0,"",shellscript,selection_command +284,294065,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",1960,0,"",shellscript,selection_command +285,294243,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",1992,0,"",shellscript,selection_command +286,294742,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2016,0,"",shellscript,selection_command +287,294809,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2039,0,"",shellscript,selection_command +288,294839,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2061,0,"",shellscript,selection_command +289,294871,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2081,0,"",shellscript,selection_command +290,294951,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2105,0,"",shellscript,selection_command +291,299996,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2106,0,"",shellscript,selection_command +292,300191,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2107,0,"",shellscript,selection_command +293,300341,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2108,0,"",shellscript,selection_command +294,300485,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2109,0,"",shellscript,selection_command +295,301008,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2108,0,"",shellscript,selection_command +296,301225,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2109,0,"",shellscript,selection_command +297,303489,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2083,32,"",shellscript,content +298,303564,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2087,0,"",shellscript,selection_command +299,304172,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2083,12,"",shellscript,content +300,304224,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2087,0,"",shellscript,selection_command +301,309297,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2092,0," \\n --log",shellscript,content +302,309667,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2092,0,"_image_interval=1000 \\n --log",shellscript,content +303,309693,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2109,0,"",shellscript,selection_command +304,310790,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2108,0,"",shellscript,selection_command +305,315017,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2109,0,"",shellscript,selection_command +306,315400,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2108,0,"",shellscript,selection_command +307,318123,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2108,1,"5",shellscript,content +308,320378,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2158,0,"5",shellscript,content +309,320378,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2157,1,"",shellscript,content +310,322148,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2125,0,"",shellscript,selection_command +311,322297,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2152,0,"",shellscript,selection_command +312,322783,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2153,0,"",shellscript,selection_command +313,322968,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2154,0,"",shellscript,selection_command +314,323127,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2155,0,"",shellscript,selection_command +315,323310,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2156,0,"",shellscript,selection_command +316,323432,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2157,0,"",shellscript,selection_command +317,323592,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2158,0,"",shellscript,selection_command +318,324156,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2157,1,"",shellscript,content +319,324255,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2157,0,"1",shellscript,content +320,324256,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2158,0,"",shellscript,selection_keyboard +321,324888,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2158,0,"0",shellscript,content +322,324889,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2159,0,"",shellscript,selection_keyboard +323,325262,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2126,0,"",shellscript,selection_command +324,325693,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2094,0,"",shellscript,selection_command +325,326624,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2111,0,"",shellscript,selection_command +326,326779,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2110,0,"",shellscript,selection_command +327,326991,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2109,0,"",shellscript,selection_command +328,327210,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2108,1,"",shellscript,content +329,327505,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2108,0,"1",shellscript,content +330,327507,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2109,0,"",shellscript,selection_keyboard +331,327594,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2109,0,"0",shellscript,content +332,327595,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2110,0,"",shellscript,selection_keyboard +333,327960,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2109,0,"",shellscript,selection_command +334,328251,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2126,0,"",shellscript,selection_command +335,328419,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2154,0,"",shellscript,selection_command +336,328774,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2192,0,"",shellscript,selection_command +337,329140,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2154,0,"",shellscript,selection_command +338,329360,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2192,0,"",shellscript,selection_command +339,329530,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2259,0,"",shellscript,selection_command +340,330004,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2192,0,"",shellscript,selection_command +341,330462,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2259,0,"",shellscript,selection_command +342,330871,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2310,0,"",shellscript,selection_command +343,331178,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2332,0,"",shellscript,selection_command +344,331539,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2360,0,"",shellscript,selection_command +345,331912,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2405,0,"",shellscript,selection_command +346,332323,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2360,0,"",shellscript,selection_command +347,332563,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2332,0,"",shellscript,selection_command +348,332810,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2360,0,"",shellscript,selection_command +349,333013,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2405,0,"",shellscript,selection_command +350,335602,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2433,0,"",shellscript,selection_command +351,335735,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2475,0,"",shellscript,selection_command +352,336931,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2449,44,"",shellscript,content +353,337371,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2407,0,"",shellscript,selection_command +354,337706,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2448,0,"",shellscript,selection_command +355,337896,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2447,1,"",shellscript,content +356,339473,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2447,0,"&",shellscript,content +357,339474,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2448,0,"",shellscript,selection_keyboard +358,340051,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2447,0,"",shellscript,selection_command +359,340639,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2405,0,"",shellscript,selection_command +360,340807,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2374,0,"",shellscript,selection_command +361,341162,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2332,0,"",shellscript,selection_command +362,341583,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2310,0,"",shellscript,selection_command +363,341771,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2332,0,"",shellscript,selection_command +364,341938,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2374,0,"",shellscript,selection_command +365,342071,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2405,0,"",shellscript,selection_command +366,342406,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2383,24,"",shellscript,content +367,342450,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2387,0,"",shellscript,selection_command +368,342536,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2338,0,"",shellscript,selection_command +369,342718,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2316,0,"",shellscript,selection_command +370,342910,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2290,0,"",shellscript,selection_command +371,343095,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2316,0,"",shellscript,selection_command +372,343269,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2338,0,"",shellscript,selection_command +373,344225,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2316,0,"",shellscript,selection_command +374,344560,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2290,0,"",shellscript,selection_command +375,344757,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2237,0,"",shellscript,selection_command +376,345110,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2170,0,"",shellscript,selection_command +377,345280,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2132,0,"",shellscript,selection_command +378,345434,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2120,0,"",shellscript,selection_command +379,345622,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2087,0,"",shellscript,selection_command +380,345772,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2067,0,"",shellscript,selection_command +381,345917,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2045,0,"",shellscript,selection_command +382,346287,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",2022,0,"",shellscript,selection_command +383,346535,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",1998,0,"",shellscript,selection_command +384,346687,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",1975,0,"",shellscript,selection_command +385,359162,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh",0,0,"",shellscript,tab +386,363566,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=1\n#SBATCH --time=48:00:00\n#SBATCH --partition=accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:1\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/dynamics/maskgit/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/dynamics/maskgit/%x_%j.log\n#SBATCH --job-name=train_dynamics_maskgit\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_tokenizer_default/3528955\n\nenv | grep SLURM\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=110 \\n --image_height=64 \\n --image_width=64 \\n --patch_size=16 \\n --max_lr=3e-5 \\n --log_image_interval=10000 \\n --log \\n --log_checkpoint_interval=10000 \\n --name=coinrun-dynamics-maskgit-grain-ablation-$slurm_job_id \\n --tags coinrun dynamics maskgit grain-ablation \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir_train &\n\nchild_pid=$!\n\nwait $child_pid",shellscript,tab +387,366640,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",2062,0,"",shellscript,selection_mouse +388,366641,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",2061,0,"",shellscript,selection_command +389,367002,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",2062,0,"",shellscript,selection_mouse +390,367037,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",2061,0,"",shellscript,selection_command +391,367363,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",2040,0,"",shellscript,selection_mouse +392,367399,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",2039,0,"",shellscript,selection_command +393,367400,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",2017,22,"\n --image_width=64 ",shellscript,selection_mouse +394,367431,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",2017,23,"\n --image_width=64 \",shellscript,selection_command +395,367645,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",2017,0,"",shellscript,selection_mouse +396,367687,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",2016,0,"",shellscript,selection_command +397,368263,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",2017,0,"",shellscript,selection_mouse +398,368264,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",2016,0,"",shellscript,selection_command +399,368807,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",2115,0,"",shellscript,selection_mouse +400,368809,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",2114,0,"",shellscript,selection_command +401,369145,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",2127,0,"",shellscript,selection_mouse +402,369154,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",2126,0,"",shellscript,selection_command +403,369541,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",2111,0,"",shellscript,selection_mouse +404,369675,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",2108,5,"10000",shellscript,selection_mouse +405,370463,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",2127,0,"",shellscript,selection_mouse +406,370501,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",2126,0,"",shellscript,selection_command +407,370502,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",2126,1,"\",shellscript,selection_mouse +408,370546,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",2127,0,"",shellscript,selection_command +409,371654,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",2135,0,"",shellscript,selection_mouse +410,372821,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",2124,0,"",shellscript,selection_mouse +411,373733,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",2122,0,"",shellscript,selection_mouse +412,374200,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",2127,0,"",shellscript,selection_mouse +413,374234,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",2126,0,"",shellscript,selection_command +414,374637,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",2122,0,"",shellscript,selection_mouse +415,374915,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",2124,0,"",shellscript,selection_mouse +416,375361,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",2122,0,"",shellscript,selection_mouse +417,375958,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",2123,0,"",shellscript,selection_mouse +418,376803,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",2127,0,"",shellscript,selection_mouse +419,376840,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",2126,0,"",shellscript,selection_command +420,376854,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",2126,1,"\",shellscript,selection_mouse +421,376919,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",2127,0,"",shellscript,selection_command +422,378223,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",2399,0,"",shellscript,selection_mouse +423,378306,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",2398,1,"$",shellscript,selection_mouse +424,378882,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",2102,0,"",shellscript,selection_mouse +425,379207,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",2127,0,"",shellscript,selection_mouse +426,379241,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",2126,0,"",shellscript,selection_command +427,380261,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",2104,0,"",shellscript,selection_mouse +428,380993,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",2076,0,"",shellscript,selection_mouse +429,381462,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",2082,0,"",shellscript,selection_mouse +430,381468,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",2081,0,"",shellscript,selection_command +431,383298,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",2082,0,"\n ",shellscript,content +432,383653,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",2087,0,".",shellscript,content +433,383654,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",2088,0,"",shellscript,selection_keyboard +434,383761,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",2088,0,".",shellscript,content +435,383762,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",2089,0,"",shellscript,selection_keyboard +436,384451,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",2088,1,"",shellscript,content +437,384629,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",2087,1,"",shellscript,content +438,385013,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",2087,0,"-",shellscript,content +439,385014,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",2088,0,"",shellscript,selection_keyboard +440,385145,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",2088,0,"-",shellscript,content +441,385147,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",2089,0,"",shellscript,selection_keyboard +442,385273,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",2089,0,"w",shellscript,content +443,385275,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",2090,0,"",shellscript,selection_keyboard +444,385510,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",2090,0,"a",shellscript,content +445,385511,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",2091,0,"",shellscript,selection_keyboard +446,385730,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",2091,0,"r",shellscript,content +447,385731,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",2092,0,"",shellscript,selection_keyboard +448,385832,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",2092,0,"m",shellscript,content +449,385833,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",2093,0,"",shellscript,selection_keyboard +450,386054,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",2093,0,"u",shellscript,content +451,386055,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",2094,0,"",shellscript,selection_keyboard +452,386723,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",2094,0,"p_steps=10000 \",shellscript,content +453,387028,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",2108,0,"",shellscript,selection_command +454,387204,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",2107,0,"",shellscript,selection_command +455,387491,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",2106,0,"",shellscript,selection_command +456,387972,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",2102,4,"",shellscript,content +457,388999,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",2103,0,"",shellscript,selection_command +458,389419,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",2104,0,"",shellscript,selection_command +459,389420,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",2105,0,"",shellscript,selection_command +460,389734,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",2105,0,"\n ",shellscript,content +461,390551,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",2110,0,"-",shellscript,content +462,390553,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",2111,0,"",shellscript,selection_keyboard +463,390688,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",2111,0,"-",shellscript,content +464,390690,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",2112,0,"",shellscript,selection_keyboard +465,391703,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",2112,0,"wsd_decay_steps=0 \",shellscript,content +466,394837,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",2131,0,"\n ",shellscript,content +467,395654,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",2136,0,".",shellscript,content +468,395656,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",2137,0,"",shellscript,selection_keyboard +469,396366,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",2136,1,"",shellscript,content +470,396621,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",2136,0,"-",shellscript,content +471,396622,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",2137,0,"",shellscript,selection_keyboard +472,396814,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",2137,0,"-",shellscript,content +473,396815,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",2138,0,"",shellscript,selection_keyboard +474,397059,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",2138,0,"n",shellscript,content +475,397061,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",2139,0,"",shellscript,selection_keyboard +476,397690,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",2139,0,"um_steps=10 \",shellscript,content +477,398067,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",2151,0,"",shellscript,selection_command +478,398215,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",2150,0,"",shellscript,selection_command +479,398900,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",2150,0,"0",shellscript,content +480,398902,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",2151,0,"",shellscript,selection_keyboard +481,399050,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",2151,0,"0",shellscript,content +482,399051,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",2152,0,"",shellscript,selection_keyboard +483,402851,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",2264,0,"",shellscript,selection_mouse +484,406997,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",2060,0,"",shellscript,selection_mouse +485,409216,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",2059,0,"",shellscript,selection_command +486,409466,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",2036,0,"",shellscript,selection_command +487,409987,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",1994,47,"",shellscript,content +488,410036,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",1998,0,"",shellscript,selection_command +489,410580,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",2034,0,"",shellscript,selection_command +490,410800,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",2054,0,"",shellscript,selection_command +491,411288,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",2034,0,"",shellscript,selection_command +492,417179,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",2016,20,"",shellscript,content +493,417275,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",2020,0,"",shellscript,selection_command +494,418154,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",1998,0,"",shellscript,selection_command +495,418368,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",1975,0,"",shellscript,selection_command +496,419340,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",1998,0,"",shellscript,selection_command +497,419494,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",2020,0,"",shellscript,selection_command +498,419625,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",2043,0,"",shellscript,selection_command +499,421408,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",2125,0,"",shellscript,selection_command +500,423180,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",2092,0,"",shellscript,selection_command +501,423445,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",2069,0,"",shellscript,selection_command +502,424227,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",2092,0,"",shellscript,selection_command +503,424310,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",2125,0,"",shellscript,selection_command +504,424536,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",2137,0,"",shellscript,selection_command +505,426078,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",2242,0,"",shellscript,selection_command +506,426516,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",2295,0,"",shellscript,selection_command +507,426712,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",2321,0,"",shellscript,selection_command +508,426852,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",2343,0,"",shellscript,selection_command +509,427080,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",2392,0,"",shellscript,selection_command +510,427312,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",2430,0,"",shellscript,selection_command +511,427503,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",2435,0,"",shellscript,selection_command +512,433175,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",1937,0,"",shellscript,selection_mouse +513,433186,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",1936,0,"",shellscript,selection_command +514,436796,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",1805,0,"",shellscript,selection_mouse +515,438242,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",1805,0,"\n",shellscript,content +516,438430,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",1806,0,"\n",shellscript,content +517,438576,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",1807,0,"\n",shellscript,content +518,438929,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",1807,0,"",shellscript,selection_command +519,439374,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",1806,0,"",shellscript,selection_command +520,440391,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",1805,1,"",shellscript,content +521,440676,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",1806,0,"",shellscript,selection_command +522,440841,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",1805,1,"",shellscript,content +523,440991,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",1806,0,"",shellscript,selection_command +524,441892,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",1805,1,"",shellscript,content +525,444470,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",1799,0,"",shellscript,selection_mouse +526,444610,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",1797,7,"3528955",shellscript,selection_mouse +527,444762,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",1797,8,"3528955\n",shellscript,selection_mouse +528,444843,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",1797,25,"3528955\n\nenv | grep SLURM",shellscript,selection_mouse +529,445035,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",1797,8,"3528955\n",shellscript,selection_mouse +530,445120,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",1773,31,"train_tokenizer_default/3528955",shellscript,selection_mouse +531,445363,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",1772,32,"/train_tokenizer_default/3528955",shellscript,selection_mouse +532,445377,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",1797,8,"3528955\n",shellscript,selection_mouse +533,445812,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",1715,89,"tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_tokenizer_default/3528955",shellscript,selection_mouse +534,445888,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",1707,97,"scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_tokenizer_default/3528955",shellscript,selection_mouse +535,445968,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",1706,98,"/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_tokenizer_default/3528955",shellscript,selection_mouse +536,445970,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",1797,8,"3528955\n",shellscript,selection_mouse +537,446063,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",1797,25,"3528955\n\nenv | grep SLURM",shellscript,selection_mouse +538,447673,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",1686,0,"",shellscript,selection_mouse +539,447912,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",1686,1,"/",shellscript,selection_mouse +540,447914,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",1686,2,"/h",shellscript,selection_mouse +541,447914,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",1686,4,"/hkf",shellscript,selection_mouse +542,447914,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",1686,6,"/hkfs/",shellscript,selection_mouse +543,447933,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",1686,8,"/hkfs/wo",shellscript,selection_mouse +544,447934,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",1666,20,"\ntokenizer_ckpt_dir=",shellscript,selection_mouse +545,447980,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",1665,21,"\n\ntokenizer_ckpt_dir=",shellscript,selection_mouse +546,448128,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",1666,20,"\ntokenizer_ckpt_dir=",shellscript,selection_mouse +547,448215,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",1686,61,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/check",shellscript,selection_mouse +548,448246,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",1686,67,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints",shellscript,selection_mouse +549,448274,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",1686,119,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_tokenizer_default/3528955\n",shellscript,selection_mouse +550,448482,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",1686,88,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/t",shellscript,selection_mouse +551,448498,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",1686,90,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/tra",shellscript,selection_mouse +552,448514,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",1686,91,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/trai",shellscript,selection_mouse +553,448552,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",1686,92,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train",shellscript,selection_mouse +554,448585,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",1686,94,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_t",shellscript,selection_mouse +555,448586,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",1686,95,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_to",shellscript,selection_mouse +556,448587,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",1686,96,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_tok",shellscript,selection_mouse +557,448619,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",1686,97,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_toke",shellscript,selection_mouse +558,448620,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",1686,98,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_token",shellscript,selection_mouse +559,448648,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",1686,99,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_tokeni",shellscript,selection_mouse +560,448649,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",1686,100,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_tokeniz",shellscript,selection_mouse +561,448681,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",1686,102,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_tokenizer",shellscript,selection_mouse +562,448682,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",1666,20,"\ntokenizer_ckpt_dir=",shellscript,selection_mouse +563,449141,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",1686,118,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_tokenizer_default/3528955",shellscript,selection_mouse +564,449790,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",1686,119,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_tokenizer_default/3528955\n",shellscript,selection_mouse +565,450453,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",1805,0,"",shellscript,selection_mouse +566,450743,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",1797,8,"3528955\n",shellscript,selection_mouse +567,451208,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",1800,0,"",shellscript,selection_mouse +568,451250,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",1797,7,"3528955",shellscript,selection_mouse +569,451400,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",1797,8,"3528955\n",shellscript,selection_mouse +570,451439,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",1797,25,"3528955\n\nenv | grep SLURM",shellscript,selection_mouse +571,451517,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",1797,8,"3528955\n",shellscript,selection_mouse +572,451578,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",1797,25,"3528955\n\nenv | grep SLURM",shellscript,selection_mouse +573,451579,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",1797,8,"3528955\n",shellscript,selection_mouse +574,451623,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",1706,98,"/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_tokenizer_default/3528955",shellscript,selection_mouse +575,451660,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",1697,107,"workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_tokenizer_default/3528955",shellscript,selection_mouse +576,451690,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",1692,112,"work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_tokenizer_default/3528955",shellscript,selection_mouse +577,451724,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",1687,117,"hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_tokenizer_default/3528955",shellscript,selection_mouse +578,451758,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",1686,118,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_tokenizer_default/3528955",shellscript,selection_mouse +579,451759,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",1685,119,"=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_tokenizer_default/3528955",shellscript,selection_mouse +580,451790,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",1667,137,"tokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_tokenizer_default/3528955",shellscript,selection_mouse +581,452312,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",1687,0,"",shellscript,selection_mouse +582,452410,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",1687,4,"hkfs",shellscript,selection_mouse +583,452634,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",1687,5,"hkfs/",shellscript,selection_mouse +584,452650,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",1687,9,"hkfs/work",shellscript,selection_mouse +585,452667,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",1687,10,"hkfs/work/",shellscript,selection_mouse +586,452707,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",1687,19,"hkfs/work/workspace",shellscript,selection_mouse +587,453608,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",1686,0,"",shellscript,selection_mouse +588,453785,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",1685,2,"=/",shellscript,selection_mouse +589,453825,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",1685,2,"=/",shellscript,selection_mouse +590,453915,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",1685,6,"=/hkfs",shellscript,selection_mouse +591,453917,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",1685,11,"=/hkfs/work",shellscript,selection_mouse +592,453942,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",1685,21,"=/hkfs/work/workspace",shellscript,selection_mouse +593,454053,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",1685,120,"=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_tokenizer_default/3528955\n",shellscript,selection_mouse +594,454064,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",1685,29,"=/hkfs/work/workspace/scratch",shellscript,selection_mouse +595,454083,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",1685,41,"=/hkfs/work/workspace/scratch/tum_ind3695",shellscript,selection_mouse +596,454143,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",1685,42,"=/hkfs/work/workspace/scratch/tum_ind3695-",shellscript,selection_mouse +597,454202,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",1685,56,"=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared",shellscript,selection_mouse +598,454249,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",1685,57,"=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/",shellscript,selection_mouse +599,454293,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",1685,68,"=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints",shellscript,selection_mouse +600,454403,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",1685,72,"=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big",shellscript,selection_mouse +601,454503,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",1685,77,"=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs",shellscript,selection_mouse +602,454504,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",1685,87,"=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer",shellscript,selection_mouse +603,454534,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",1685,111,"=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_tokenizer_default",shellscript,selection_mouse +604,454636,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",1685,112,"=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_tokenizer_default/",shellscript,selection_mouse +605,454703,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",1685,119,"=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_tokenizer_default/3528955",shellscript,selection_mouse +606,457893,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=1\n#SBATCH --time=00:40:00\n#SBATCH --partition=accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:1\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/dynamics/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/dynamics/%x_%j.log\n#SBATCH --job-name=train_dyn_single_gpu\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/npy_test\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/npy_test\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/dyn/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_checkpoint=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/interactive/3536670\n\nenv | grep SLURM\n\nexport PYTHONUNBUFFERED=1\n\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n --image_height=64 \\n --image_width=64 \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=110 \\n --patch_size=4 \\n --init_lr=0 \\n --max_lr=1e-4 \\n --log_image_interval=50 \\n --log_checkpoint_interval=50 \\n --dyna_type=maskgit \\n --log \\n --name=coinrun-dyn-dev-$slurm_job_id \\n --tags dyn coinrun dev \\n --entity instant-uv \\n --project jafar \\n --warmup_steps 0 \\n --wsd_decay_steps 0 \\n --num_steps 10 \\n --data_dir $array_records_dir_train \\n --tokenizer_checkpoint $tokenizer_checkpoint \\n --val_data_dir $array_records_dir_val \\n --log_interval 1 \\n --val_interval 50 \\n --eval_full_frame \\n --val_steps 5\n",shellscript,tab +607,462128,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",0,0,"",shellscript,tab +608,463510,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",1666,0,"",shellscript,selection_mouse +609,471589,"TERMINAL",0,0,"bash",,terminal_focus +610,472811,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default-grain-ablation.sh",0,0,"",shellscript,tab +611,484580,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain-ablation.sh",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=1\n#SBATCH --time=48:00:00\n#SBATCH --partition=accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:1\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/dynamics/maskgit/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/dynamics/maskgit/%x_%j.log\n#SBATCH --job-name=train_dynamics_maskgit\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_tokenizer_default/3528955\n\nenv | grep SLURM\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=110 \\n --patch_size=16 \\n --warmup_steps=0 \\n --wsd_decay_steps=0 \\n --num_steps=1000 \\n --log_image_interval=10000 \\n --log \\n --log_checkpoint_interval=10000 \\n --name=coinrun-dynamics-maskgit-grain-ablation-$slurm_job_id \\n --tags coinrun dynamics maskgit grain-ablation \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir_train &\n\nchild_pid=$!\n\nwait $child_pid",shellscript,tab +612,491903,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=1\n#SBATCH --time=48:00:00\n#SBATCH --partition=accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:1\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/dynamics/maskgit/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/dynamics/maskgit/%x_%j.log\n#SBATCH --job-name=train_dynamics_maskgit\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_tokenizer_default/3528955\n\nenv | grep SLURM\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=110 \\n --patch_size=16 \\n --warmup_steps=0 \\n --wsd_decay_steps=0 \\n --num_steps=1000 \\n --log_image_interval=10000 \\n --log \\n --log_checkpoint_interval=10000 \\n --name=coinrun-dynamics-maskgit-grain-ablation-$slurm_job_id \\n --tags coinrun dynamics maskgit grain-ablation \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir_train &\n\nchild_pid=$!\n\nwait $child_pid",shellscript,tab +613,493949,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation copy.sh",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=1\n#SBATCH --time=48:00:00\n#SBATCH --partition=accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:1\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/dynamics/maskgit/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/dynamics/maskgit/%x_%j.log\n#SBATCH --job-name=train_dynamics_maskgit\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_tokenizer_default/3528955\n\nenv | grep SLURM\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=110 \\n --patch_size=16 \\n --warmup_steps=0 \\n --wsd_decay_steps=0 \\n --num_steps=1000 \\n --log_image_interval=10000 \\n --log \\n --log_checkpoint_interval=10000 \\n --name=coinrun-dynamics-maskgit-grain-ablation-$slurm_job_id \\n --tags coinrun dynamics maskgit grain-ablation \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir_train &\n\nchild_pid=$!\n\nwait $child_pid",shellscript,tab +614,501078,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=1\n#SBATCH --time=48:00:00\n#SBATCH --partition=accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:1\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/dynamics/maskgit/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/dynamics/maskgit/%x_%j.log\n#SBATCH --job-name=train_dynamics_maskgit\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_tokenizer_default/3528955\n\nenv | grep SLURM\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=110 \\n --patch_size=16 \\n --warmup_steps=0 \\n --wsd_decay_steps=0 \\n --num_steps=1000 \\n --log_image_interval=10000 \\n --log \\n --log_checkpoint_interval=10000 \\n --name=coinrun-dynamics-maskgit-grain-ablation-$slurm_job_id \\n --tags coinrun dynamics maskgit grain-ablation \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir_train &\n\nchild_pid=$!\n\nwait $child_pid",shellscript,tab +615,504228,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",0,0,"",shellscript,tab +616,506759,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",458,0,"",shellscript,selection_mouse +617,506791,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",457,0,"",shellscript,selection_command +618,507229,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",458,0,"",shellscript,selection_command +619,507520,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",458,0,"_",shellscript,content +620,507521,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",459,0,"",shellscript,selection_keyboard +621,508500,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",459,0,"g",shellscript,content +622,508501,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",460,0,"",shellscript,selection_keyboard +623,508579,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",460,0,"r",shellscript,content +624,508580,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",461,0,"",shellscript,selection_keyboard +625,508747,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",461,0,"a",shellscript,content +626,508749,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",462,0,"",shellscript,selection_keyboard +627,508780,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",462,0,"i",shellscript,content +628,508781,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",463,0,"",shellscript,selection_keyboard +629,508980,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",463,0,"n",shellscript,content +630,508981,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",464,0,"",shellscript,selection_keyboard +631,509222,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",464,0,"_",shellscript,content +632,509223,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",465,0,"",shellscript,selection_keyboard +633,509453,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",465,0,"a",shellscript,content +634,509454,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",466,0,"",shellscript,selection_keyboard +635,509632,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",466,0,"b",shellscript,content +636,509636,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",467,0,"",shellscript,selection_keyboard +637,509830,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",467,0,"l",shellscript,content +638,509831,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",468,0,"",shellscript,selection_keyboard +639,509948,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",468,0,"a",shellscript,content +640,509949,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",469,0,"",shellscript,selection_keyboard +641,510092,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",469,0,"t",shellscript,content +642,510093,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",470,0,"",shellscript,selection_keyboard +643,510169,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",470,0,"i",shellscript,content +644,510174,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",471,0,"",shellscript,selection_keyboard +645,510255,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",471,0,"o",shellscript,content +646,510256,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",472,0,"",shellscript,selection_keyboard +647,510381,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",472,0,"n",shellscript,content +648,510382,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",473,0,"",shellscript,selection_keyboard +649,510799,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",472,0,"",shellscript,selection_command +650,511002,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",490,0,"",shellscript,selection_command +651,511516,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",541,0,"",shellscript,selection_command +652,511553,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",543,0,"",shellscript,selection_command +653,511586,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",594,0,"",shellscript,selection_command +654,511626,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",610,0,"",shellscript,selection_command +655,511663,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",667,0,"",shellscript,selection_command +656,511700,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",750,0,"",shellscript,selection_command +657,511701,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",800,0,"",shellscript,selection_command +658,511733,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",835,0,"",shellscript,selection_command +659,511772,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",846,0,"",shellscript,selection_command +660,511900,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",848,0,"",shellscript,selection_command +661,512342,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",850,0,"",shellscript,selection_command +662,512383,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",874,0,"",shellscript,selection_command +663,512424,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",876,0,"",shellscript,selection_command +664,512456,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",920,0,"",shellscript,selection_command +665,512494,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",977,0,"",shellscript,selection_command +666,512527,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",1015,0,"",shellscript,selection_command +667,512527,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",1048,0,"",shellscript,selection_command +668,512604,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",1090,0,"",shellscript,selection_command +669,512605,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",1095,0,"",shellscript,selection_command +670,512651,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",1134,0,"",shellscript,selection_command +671,512687,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",1137,0,"",shellscript,selection_command +672,512688,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",1139,0,"",shellscript,selection_command +673,512766,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",1140,0,"",shellscript,selection_command +674,512775,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",1141,0,"",shellscript,selection_command +675,512900,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",1164,0,"",shellscript,selection_command +676,513392,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",1171,0,"",shellscript,selection_command +677,513423,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",1173,0,"",shellscript,selection_command +678,513483,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",1202,0,"",shellscript,selection_command +679,513518,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",1232,0,"",shellscript,selection_command +680,513520,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",1258,0,"",shellscript,selection_command +681,513668,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",1260,0,"",shellscript,selection_command +682,513670,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",1316,0,"",shellscript,selection_command +683,513783,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",1426,0,"",shellscript,selection_command +684,513784,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",1479,0,"",shellscript,selection_command +685,513845,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",1503,0,"",shellscript,selection_command +686,513846,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",1530,0,"",shellscript,selection_command +687,513847,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",1532,0,"",shellscript,selection_command +688,513848,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",1588,0,"",shellscript,selection_command +689,513848,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",1679,0,"",shellscript,selection_command +690,513878,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",1681,0,"",shellscript,selection_command +691,513879,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",1737,0,"",shellscript,selection_command +692,513972,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",1820,0,"",shellscript,selection_command +693,513973,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",1836,0,"",shellscript,selection_command +694,513995,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",1838,0,"",shellscript,selection_command +695,513996,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",1877,0,"",shellscript,selection_command +696,514044,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",1895,0,"",shellscript,selection_command +697,514045,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",1920,0,"",shellscript,selection_command +698,514066,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",1951,0,"",shellscript,selection_command +699,514094,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",1984,0,"",shellscript,selection_command +700,514133,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",2007,0,"",shellscript,selection_command +701,514167,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",2029,0,"",shellscript,selection_command +702,519746,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",2007,0,"",shellscript,selection_command +703,519908,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",1984,0,"",shellscript,selection_command +704,520407,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",1951,0,"",shellscript,selection_command +705,520440,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",1920,0,"",shellscript,selection_command +706,520480,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",1895,0,"",shellscript,selection_command +707,520514,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",1877,0,"",shellscript,selection_command +708,520549,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",1838,0,"",shellscript,selection_command +709,520583,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",1836,0,"",shellscript,selection_command +710,521057,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",1820,0,"",shellscript,selection_command +711,521224,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",1737,0,"",shellscript,selection_command +712,521372,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",1681,0,"",shellscript,selection_command +713,521520,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",1679,0,"",shellscript,selection_command +714,521708,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",1588,0,"",shellscript,selection_command +715,523614,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",0,0,"",shellscript,tab +716,524631,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",831,0,"",shellscript,selection_mouse +717,524665,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",830,0,"",shellscript,selection_command +718,524731,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",830,1,"t",shellscript,selection_mouse +719,524765,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",831,0,"",shellscript,selection_command +720,524830,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",828,3,"est",shellscript,selection_mouse +721,524830,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",831,1,"\n",shellscript,selection_mouse +722,524861,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",831,26,"\n\njob_name=$SLURM_JOB_NAME",shellscript,selection_mouse +723,524932,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",831,1,"\n",shellscript,selection_mouse +724,525009,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",759,72,"/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/npy_test",shellscript,selection_mouse +725,525010,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",754,77,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/npy_test",shellscript,selection_mouse +726,525044,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",746,85,"dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/npy_test",shellscript,selection_mouse +727,525045,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",743,88,"ds_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/npy_test",shellscript,selection_mouse +728,525045,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",740,91,"cords_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/npy_test",shellscript,selection_mouse +729,525111,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",738,93,"records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/npy_test",shellscript,selection_mouse +730,525111,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",737,94,"_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/npy_test",shellscript,selection_mouse +731,525184,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",738,93,"records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/npy_test",shellscript,selection_mouse +732,525243,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",831,1,"\n",shellscript,selection_mouse +733,525919,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",750,81,"val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/npy_test",shellscript,selection_mouse +734,525920,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",751,80,"al=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/npy_test",shellscript,selection_mouse +735,525920,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",752,79,"l=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/npy_test",shellscript,selection_mouse +736,526079,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",753,78,"=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/npy_test",shellscript,selection_mouse +737,526155,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",754,77,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/npy_test",shellscript,selection_mouse +738,529851,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",0,0,"",shellscript,tab +739,531057,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",1478,0,"",shellscript,selection_mouse +740,531058,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",1477,0,"",shellscript,selection_command +741,531074,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",1477,1,"s",shellscript,selection_mouse +742,531081,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",1478,0,"",shellscript,selection_command +743,531154,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",1478,1,"\n",shellscript,selection_mouse +744,531226,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",1478,26,"\n\njob_name=$SLURM_JOB_NAME",shellscript,selection_mouse +745,531265,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",1478,53,"\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID",shellscript,selection_mouse +746,531503,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",1478,26,"\n\njob_name=$SLURM_JOB_NAME",shellscript,selection_mouse +747,531513,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",1478,1,"\n",shellscript,selection_mouse +748,531932,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",1398,80,"/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes",shellscript,selection_mouse +749,531963,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",1397,81,"s/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes",shellscript,selection_mouse +750,532043,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",1396,82,"fs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes",shellscript,selection_mouse +751,532045,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",1395,83,"kfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes",shellscript,selection_mouse +752,532115,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",1394,84,"hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes",shellscript,selection_mouse +753,532367,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",1393,85,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes",shellscript,selection_mouse +754,533250,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",1393,85,"",shellscript,content +755,533292,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",1392,0,"",shellscript,selection_command +756,533885,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",1393,0,"",shellscript,selection_command +757,534886,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",1393,0,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/npy_test",shellscript,content +758,537696,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",1285,0,"",shellscript,selection_mouse +759,537696,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",1285,1,"/",shellscript,selection_mouse +760,537697,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",1285,2,"/h",shellscript,selection_mouse +761,537697,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",1285,5,"/hkfs",shellscript,selection_mouse +762,537697,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",1285,6,"/hkfs/",shellscript,selection_mouse +763,537698,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",1285,9,"/hkfs/wor",shellscript,selection_mouse +764,537698,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",1285,11,"/hkfs/work/",shellscript,selection_mouse +765,537698,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",1260,25,"\narray_records_dir_train=",shellscript,selection_mouse +766,537699,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",1259,26,"\n\narray_records_dir_train=",shellscript,selection_mouse +767,537699,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",1260,25,"\narray_records_dir_train=",shellscript,selection_mouse +768,537699,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",1259,26,"\n\narray_records_dir_train=",shellscript,selection_mouse +769,537699,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",1260,25,"\narray_records_dir_train=",shellscript,selection_mouse +770,537700,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",1285,85,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes",shellscript,selection_mouse +771,537723,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",1285,85,"",shellscript,content +772,537873,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",1285,0,"/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/npy_test",shellscript,content +773,539021,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",1463,0,"",shellscript,selection_mouse +774,539475,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",1488,0,"",shellscript,selection_mouse +775,539763,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",1515,0,"",shellscript,selection_mouse +776,541022,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",1488,0,"",shellscript,selection_mouse +777,541313,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",1515,0,"",shellscript,selection_mouse +778,541869,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",1880,0,"",shellscript,selection_mouse +779,542592,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",2014,0,"",shellscript,selection_mouse +780,543027,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",2063,0,"",shellscript,selection_mouse +781,568117,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh",0,0,"",shellscript,tab +782,572718,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh",2087,0,"",shellscript,selection_mouse +783,572740,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh",2086,0,"",shellscript,selection_command +784,573112,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh",2037,0,"",shellscript,selection_mouse +785,573550,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh",2306,0,"",shellscript,selection_mouse +786,573999,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh",2257,0,"",shellscript,selection_mouse +787,575437,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh",2207,0,"",shellscript,selection_mouse +788,576444,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh",2207,1,"g",shellscript,selection_command +789,576870,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh",2207,1,"g",shellscript,selection_command +790,577587,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh",2207,0,"",shellscript,selection_command +791,578805,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh",2274,5,"",shellscript,content +792,578806,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh",2207,5,"",shellscript,content +793,579456,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh",2269,1,"",shellscript,content +794,579457,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh",2207,1,"",shellscript,content +795,579806,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh",2268,8,"",shellscript,content +796,579807,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh",2207,8,"",shellscript,content +797,580657,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh",2260,0,"d",shellscript,content +798,580658,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh",2207,0,"d",shellscript,content +799,580659,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh",2208,0,"",shellscript,selection_keyboard +800,580866,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh",2262,0,"e",shellscript,content +801,580866,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh",2208,0,"e",shellscript,content +802,580867,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh",2209,0,"",shellscript,selection_keyboard +803,580983,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh",2264,0,"f",shellscript,content +804,580984,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh",2209,0,"f",shellscript,content +805,580984,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh",2210,0,"",shellscript,selection_keyboard +806,581124,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh",2266,0,"a",shellscript,content +807,581124,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh",2210,0,"a",shellscript,content +808,581125,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh",2211,0,"",shellscript,selection_keyboard +809,581210,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh",2268,0,"u",shellscript,content +810,581211,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh",2211,0,"u",shellscript,content +811,581212,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh",2212,0,"",shellscript,selection_keyboard +812,581400,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh",2270,0,"l",shellscript,content +813,581400,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh",2212,0,"l",shellscript,content +814,581401,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh",2213,0,"",shellscript,selection_keyboard +815,581486,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh",2272,0,"t",shellscript,content +816,581487,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh",2213,0,"t",shellscript,content +817,581487,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh",2214,0,"",shellscript,selection_keyboard +818,581809,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh",2213,0,"",shellscript,selection_command +819,587482,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh",2169,0,"",shellscript,selection_command +820,587648,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh",2131,0,"",shellscript,selection_command +821,587798,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh",2119,0,"",shellscript,selection_command +822,587948,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh",2086,0,"",shellscript,selection_command +823,588071,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh",2063,0,"",shellscript,selection_command +824,588208,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh",2037,0,"",shellscript,selection_command +825,588395,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh",2014,0,"",shellscript,selection_command +826,588580,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh",1992,0,"",shellscript,selection_command +827,589080,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh",1969,0,"",shellscript,selection_command +828,589182,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh",1936,0,"",shellscript,selection_command +829,589184,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh",1905,0,"",shellscript,selection_command +830,589190,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh",1880,0,"",shellscript,selection_command +831,589364,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh",1862,0,"",shellscript,selection_command +832,589394,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh",1880,0,"",shellscript,selection_command +833,589903,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh",1905,0,"",shellscript,selection_command +834,589915,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh",1936,0,"",shellscript,selection_command +835,589988,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh",1969,0,"",shellscript,selection_command +836,590008,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh",1992,0,"",shellscript,selection_command +837,590022,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh",2014,0,"",shellscript,selection_command +838,590055,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh",2037,0,"",shellscript,selection_command +839,590092,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh",2063,0,"",shellscript,selection_command +840,590228,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh",2086,0,"",shellscript,selection_command +841,590229,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh",2119,0,"",shellscript,selection_command +842,590300,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh",2131,0,"",shellscript,selection_command +843,590301,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh",2169,0,"",shellscript,selection_command +844,590301,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh",2213,0,"",shellscript,selection_command +845,590339,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh",2273,0,"",shellscript,selection_command +846,590339,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh",2301,0,"",shellscript,selection_command +847,590373,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh",2323,0,"",shellscript,selection_command +848,590482,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh",2367,0,"",shellscript,selection_command +849,590640,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh",2414,0,"",shellscript,selection_command +850,592569,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh",2367,0,"",shellscript,selection_command +851,593112,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh",2323,0,"",shellscript,selection_command +852,596884,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-no-noise-main.sh",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=1\n#SBATCH --time=48:00:00\n#SBATCH --partition=accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:1\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/dynamics/maskgit/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/dynamics/maskgit/%x_%j.log\n#SBATCH --job-name=train_dynamics_maskgit\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/train\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/val\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_tokenizer_default/3528955\n\nenv | grep SLURM\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=110 \\n --image_height=64 \\n --image_width=64 \\n --patch_size=16 \\n --max_lr=3e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-no-noise-main-$slurm_job_id \\n --tags coinrun dynamics maskgit no-noise-main \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --eval_full_frame \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val &\n\nchild_pid=$!\n\nwait $child_pid",shellscript,tab +853,598405,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-no-noise-main.sh",2523,0,"",shellscript,selection_mouse +854,600295,"slurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-no-noise-main.sh",1275,0,"",shellscript,selection_mouse +855,603962,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",0,0,"",shellscript,tab +856,605906,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh",0,0,"",shellscript,tab +857,606788,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh",1273,0,"",shellscript,selection_mouse +858,607779,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh",1355,0,"\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/train",shellscript,content +859,607823,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh",1356,0,"",shellscript,selection_command +860,608300,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh",1246,0,"",shellscript,selection_command +861,608742,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh",1246,110,"",shellscript,content +862,608847,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh",1383,0,"",shellscript,selection_command +863,610198,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh",1383,108,"",shellscript,content +864,612168,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh",1384,0,"",shellscript,selection_command +865,612650,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh",1409,0,"",shellscript,selection_command +866,612676,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh",1436,0,"",shellscript,selection_command +867,612716,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh",1437,0,"",shellscript,selection_command +868,612740,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh",1560,0,"",shellscript,selection_command +869,612768,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh",1585,0,"",shellscript,selection_command +870,612801,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh",1586,0,"",shellscript,selection_command +871,612851,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh",1724,0,"",shellscript,selection_command +872,612883,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh",1725,0,"",shellscript,selection_command +873,613628,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh",1724,0,"",shellscript,selection_command +874,613784,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh",1586,0,"",shellscript,selection_command +875,613956,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh",1585,0,"",shellscript,selection_command +876,614385,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh",1560,0,"",shellscript,selection_command +877,614472,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh",1437,0,"",shellscript,selection_command +878,618591,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh",476,0,"",shellscript,selection_mouse +879,618593,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh",475,0,"",shellscript,selection_command +880,619215,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh",476,0,"",shellscript,selection_mouse +881,619254,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh",475,0,"",shellscript,selection_command +882,619828,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh",455,0,"",shellscript,selection_mouse +883,620227,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh",458,0,"",shellscript,selection_command +884,620579,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh",458,0,"_",shellscript,content +885,620580,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh",459,0,"",shellscript,selection_keyboard +886,620873,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh",459,0,"d",shellscript,content +887,620874,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh",460,0,"",shellscript,selection_keyboard +888,620980,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh",460,0,"e",shellscript,content +889,620981,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh",461,0,"",shellscript,selection_keyboard +890,621089,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh",461,0,"f",shellscript,content +891,621090,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh",462,0,"",shellscript,selection_keyboard +892,621239,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh",462,0,"a",shellscript,content +893,621240,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh",463,0,"",shellscript,selection_keyboard +894,621316,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh",463,0,"u",shellscript,content +895,621317,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh",464,0,"",shellscript,selection_keyboard +896,621443,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh",464,0,"l",shellscript,content +897,621444,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh",465,0,"",shellscript,selection_keyboard +898,621520,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh",465,0,"t",shellscript,content +899,621521,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh",466,0,"",shellscript,selection_keyboard +900,621757,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh",465,0,"",shellscript,selection_command +901,672105,"TERMINAL",0,0,"git push --set-upstream origin ablation/use-pytorch-dataloader^C",,terminal_command +902,672153,"TERMINAL",0,0,"]633;C]0;tum_cte0515@hkn1991:~/Projects/jasmine",,terminal_output +903,674566,"TERMINAL",0,0,"git branch",,terminal_command +904,674618,"TERMINAL",0,0,"]633;C[?1h=\r",,terminal_output +905,674753,"TERMINAL",0,0,"* ablation/use-pytorch-dataloader\r\n action-mapper\r\n add-noise-to-combat-exposure-bias\r\n add-wandb-name-and-tags\r\n before-nnx\r\n causal-mem-reduce\r\n causal-spatiotemporal-kv-cache\r\n causal-st-transformer\r\n causal-transformer-dynamics-model\r\n causal-transformer-nnx-no-kv-cache\r\n change-default-parameters\r\n change-default-to-wsd\r\n coinrun-gt-actions\r\n convert-to-jax-array-in-iter\r\n correct-batched-sampling\r\n dev\r\n dont-let-tf-see-gpu\r\n dynamics_coinrun_500m_dataset_29519\r\n feat/darkness-filter\r\n feat/explicit-image-dims\r\n fix-action-padding-lam-future-information-access\r\n fix-sampling\r\n fix-transformer-forwardpass\r\n fix/dyn-restore-after-nnx-upgrade\r\n fix/spatiotemporal-pe-once-in-STTransformer\r\n generate-minatar-breakout-dataset\r\n grad-norm-log-and-clip\r\n grain-dataloader\r\n gt-actions\r\n hotfix/eval-full-frame-fix\r\n hotfix/fix-val-loss-maskgit-masking\r\n hotfix/full-frame-eval-only-calculate-last-frame-metrics\r\n hotfix/sampling-shapes-error\r\n input_pipeline/add-npy2array_record\r\n logging-variants\r\n lr-schedules\r\n main\r\n maskgit-different-maskprob-per-sample\r\n maskgit-sampling-iterative-unmasking-fix\r\n:",,terminal_output +906,675344,"TERMINAL",0,0,"\r/",,terminal_output +907,675431,"TERMINAL",0,0,"pp",,terminal_output +908,675703,"TERMINAL",0,0,"rr",,terminal_output +909,675870,"TERMINAL",0,0,"ee",,terminal_output +910,675952,"TERMINAL",0,0,"pp",,terminal_output +911,676140,"TERMINAL",0,0,"ee",,terminal_output +912,676252,"TERMINAL",0,0,"nn",,terminal_output +913,676326,"TERMINAL",0,0,"dd",,terminal_output +914,676806,"TERMINAL",0,0,"\r* ablation/use-pytorch-dataloader\r\n action-mapper\r\n add-noise-to-combat-exposure-bias\r\n add-wandb-name-and-tags\r\n before-nnx\r\n causal-mem-reduce\r\n causal-spatiotemporal-kv-cache\r\n causal-st-transformer\r\n causal-transformer-dynamics-model\r\n causal-transformer-nnx-no-kv-cache\r\n change-default-parameters\r\n change-default-to-wsd\r\n coinrun-gt-actions\r\n convert-to-jax-array-in-iter\r\n correct-batched-sampling\r\n dev\r\n dont-let-tf-see-gpu\r\n dynamics_coinrun_500m_dataset_29519\r\n feat/darkness-filter\r\n feat/explicit-image-dims\r\n fix-action-padding-lam-future-information-access\r\n fix-sampling\r\n fix-transformer-forwardpass\r\n fix/dyn-restore-after-nnx-upgrade\r\n fix/spatiotemporal-pe-once-in-STTransformer\r\n generate-minatar-breakout-dataset\r\n grad-norm-log-and-clip\r\n grain-dataloader\r\n gt-actions\r\n hotfix/eval-full-frame-fix\r\n hotfix/fix-val-loss-maskgit-masking\r\n hotfix/full-frame-eval-only-calculate-last-frame-metrics\r\n hotfix/sampling-shapes-error\r\n input_pipeline/add-npy2array_record\r\n logging-variants\r\n lr-schedules\r\n main\r\n maskgit-different-maskprob-per-sample\r\n maskgit-sampling-iterative-unmasking-fix\r\n* ablation/use-pytorch-dataloader\r\n action-mapper\r\n add-noise-to-combat-exposure-bias\r\n add-wandb-name-and-tags\r\n before-nnx\r\n causal-mem-reduce\r\n causal-spatiotemporal-kv-cache\r\n causal-st-transformer\r\n causal-transformer-dynamics-model\r\n causal-transformer-nnx-no-kv-cache\r\n change-default-parameters\r\n change-default-to-wsd\r\n coinrun-gt-actions\r\n convert-to-jax-array-in-iter\r\n correct-batched-sampling\r\n dev\r\n dont-let-tf-see-gpu\r\n dynamics_coinrun_500m_dataset_29519\r\n feat/darkness-filter\r\n feat/explicit-image-dims\r\n fix-action-padding-lam-future-information-access\r\n fix-sampling\r\n fix-transformer-forwardpass\r\n fix/dyn-restore-after-nnx-upgrade\r\n fix/spatiotemporal-pe-once-in-STTransformer\r\n generate-minatar-breakout-dataset\r\n grad-norm-log-and-clip\r\n grain-dataloader\r\n gt-actions\r\n hotfix/eval-full-frame-fix\r\n hotfix/fix-val-loss-maskgit-masking\r\n hotfix/full-frame-eval-only-calculate-last-frame-metrics\r\n hotfix/sampling-shapes-error\r\n input_pipeline/add-npy2array_record\r\n logging-variants\r\n lr-schedules\r\n main\r\n maskgit-different-maskprob-per-sample\r\n maskgit-sampling-iterative-unmasking-fix\r\n...skipping...\r\n prepend-action-maskgit\r\n preprocess_video\r\n refactor-full-frame-val-loss\r\n refactor-tmp\r\n remove-restore-branching\r\n revised-dataloader\r\n runner\r\n runner-grain\r\n sample-ali-branch\r\n sample-from-different-topologies\r\n sampling-script-add-metrics\r\n sampling-startframe-indexing-fix\r\n seeding-data-generation\r\n speedup-tfrecord-preprocessing\r\n train_lam_coinrun_ablation_wsd_3e-6_28747\r\n val-loss\r\n vizdoom-dataset\r\n z-loss\r\n zloss-runs\r\n~\r\n~\r\n~\r\n~\r\n~\r\n~\r\n~\r\n~\r\n~\r\n~\r\n~\r\n~\r\n~\r\n~\r\n~\r\n~\r\n~\r\n~\r\n~\r\n~\r\n(END)",,terminal_output +915,680188,"TERMINAL",0,0,"\r[?1l>]0;tum_cte0515@hkn1991:~/Projects/jasmine",,terminal_output +916,683440,"TERMINAL",0,0,"git checkout prepend-action-maskgit",,terminal_command +917,683487,"TERMINAL",0,0,"]633;C",,terminal_output +918,683641,"TERMINAL",0,0,"Switched to branch 'prepend-action-maskgit'\r\nYour branch is up to date with 'origin/prepend-action-maskgit'.\r\n]0;tum_cte0515@hkn1991:~/Projects/jasmine",,terminal_output +919,686182,"",0,0,"Switched from branch 'ablation/use-pytorch-dataloader' to 'prepend-action-maskgit'",,git_branch_checkout +920,698370,"TERMINAL",0,0,"git checkout -b ""ablation/full-precision-training""",,terminal_command +921,698431,"TERMINAL",0,0,"]633;CSwitched to a new branch 'ablation/full-precision-training'\r\n]0;tum_cte0515@hkn1991:~/Projects/jasmine",,terminal_output +922,701182,"",0,0,"Switched from branch 'prepend-action-maskgit' to 'ablation/full-precision-training'",,git_branch_checkout +923,763075,"TERMINAL",0,0,"bash",,terminal_focus +924,763908,"TERMINAL",0,0,"bash",,terminal_focus +925,765595,"TERMINAL",0,0,"python",,terminal_command +926,765644,"TERMINAL",0,0,"]633;CPython 3.9.18 (main, Sep 4 2025, 00:00:00) \r\n[GCC 11.4.1 20231218 (Red Hat 11.4.1-3)] on linux\r\nType ""help"", ""copyright"", ""credits"" or ""license"" for more information.\r\n>>> ",,terminal_output +927,769014,"TERMINAL",0,0,"i",,terminal_output +928,769126,"TERMINAL",0,0,"m",,terminal_output +929,769279,"TERMINAL",0,0,"p",,terminal_output +930,769363,"TERMINAL",0,0,"o",,terminal_output +931,769488,"TERMINAL",0,0,"r",,terminal_output +932,769681,"TERMINAL",0,0,"t ",,terminal_output +933,769894,"TERMINAL",0,0,"n",,terminal_output +934,770022,"TERMINAL",0,0,"u",,terminal_output +935,770163,"TERMINAL",0,0,"m",,terminal_output +936,770330,"TERMINAL",0,0,"p",,terminal_output +937,770478,"TERMINAL",0,0,"y",,terminal_output +938,770542,"TERMINAL",0,0," ",,terminal_output +939,770997,"TERMINAL",0,0,"a",,terminal_output +940,771047,"TERMINAL",0,0,"s",,terminal_output +941,771175,"TERMINAL",0,0," ",,terminal_output +942,771307,"TERMINAL",0,0,"n",,terminal_output +943,771439,"TERMINAL",0,0,"p",,terminal_output +944,771792,"TERMINAL",0,0,"\r\n",,terminal_output +945,771897,"TERMINAL",0,0,">>> ",,terminal_output +946,773733,"TERMINAL",0,0,"a",,terminal_output +947,775017,"TERMINAL",0,0," ",,terminal_output +948,775190,"TERMINAL",0,0,"= ",,terminal_output +949,775905,"TERMINAL",0,0,"n",,terminal_output +950,776410,"TERMINAL",0,0,"p",,terminal_output +951,776677,"TERMINAL",0,0,".",,terminal_output +952,777058,"TERMINAL",0,0,"z",,terminal_output +953,777130,"TERMINAL",0,0,"e",,terminal_output +954,777179,"TERMINAL",0,0,"r",,terminal_output +955,777296,"TERMINAL",0,0,"o",,terminal_output +956,777386,"TERMINAL",0,0,"s",,terminal_output +957,777954,"TERMINAL",0,0,"(",,terminal_output +958,784636,"TERMINAL",0,0,"4",,terminal_output +959,784701,"TERMINAL",0,0,"8",,terminal_output +960,784936,"TERMINAL",0,0,",",,terminal_output +961,785084,"TERMINAL",0,0," ",,terminal_output +962,786702,"TERMINAL",0,0,"1",,terminal_output +963,787072,"TERMINAL",0,0,"6",,terminal_output +964,787130,"TERMINAL",0,0,",",,terminal_output +965,787240,"TERMINAL",0,0," ",,terminal_output +966,792009,"TERMINAL",0,0,"3",,terminal_output +967,792067,"TERMINAL",0,0,"2",,terminal_output +968,793334,"TERMINAL",0,0,")",,terminal_output +969,794684,"TERMINAL",0,0,"\r\nTraceback (most recent call last):\r\n File """", line 1, in \r\nTypeError: Cannot interpret '16' as a data type\r\n>>> ",,terminal_output +970,795868,"TERMINAL",0,0,"a = np.zeros(48, 16, 32)",,terminal_output +971,796526,"TERMINAL",0,0,"",,terminal_output +972,797221,"TERMINAL",0,0,")",,terminal_output +973,797544,"TERMINAL",0,0,"",,terminal_output +974,797919,"TERMINAL",0,0,"",,terminal_output +975,798295,"TERMINAL",0,0,"",,terminal_output +976,799227,"TERMINAL",0,0,"[1@(",,terminal_output +977,799419,"TERMINAL",0,0,"\r\n>>> ",,terminal_output +978,800619,"TERMINAL",0,0,"s",,terminal_output +979,800721,"TERMINAL",0,0,".",,terminal_output +980,801280,"TERMINAL",0,0,"sh",,terminal_output +981,801325,"TERMINAL",0,0,"a",,terminal_output +982,801447,"TERMINAL",0,0,"p",,terminal_output +983,801795,"TERMINAL",0,0,"",,terminal_output +984,801921,"TERMINAL",0,0,"",,terminal_output +985,802021,"TERMINAL",0,0,"",,terminal_output +986,802146,"TERMINAL",0,0,"",,terminal_output +987,802340,"TERMINAL",0,0,"",,terminal_output +988,802426,"TERMINAL",0,0,"",,terminal_output +989,802614,"TERMINAL",0,0,"a",,terminal_output +990,802814,"TERMINAL",0,0,".",,terminal_output +991,802990,"TERMINAL",0,0,"s",,terminal_output +992,803115,"TERMINAL",0,0,"h",,terminal_output +993,803198,"TERMINAL",0,0,"a",,terminal_output +994,803301,"TERMINAL",0,0,"p",,terminal_output +995,803364,"TERMINAL",0,0,"e",,terminal_output +996,803482,"TERMINAL",0,0,"\r\n(48, 16, 32)\r\n>>> ",,terminal_output +997,875512,"TERMINAL",0,0,"\r\nKeyboardInterrupt\r\n>>> ",,terminal_output +998,875741,"TERMINAL",0,0,"\r\n",,terminal_output +999,875883,"TERMINAL",0,0,"]0;tum_cte0515@hkn1991:~/Projects/jasmine",,terminal_output +1000,880624,"TERMINAL",0,0,"git branch",,terminal_command +1001,880672,"TERMINAL",0,0,"]633;C[?1h=\r* ablation/full-precision-training\r\n ablation/use-pytorch-dataloader\r\n action-mapper\r\n add-noise-to-combat-exposure-bias\r\n add-wandb-name-and-tags\r\n before-nnx\r\n causal-mem-reduce\r\n causal-spatiotemporal-kv-cache\r\n causal-st-transformer\r\n causal-transformer-dynamics-model\r\n causal-transformer-nnx-no-kv-cache\r\n change-default-parameters\r\n change-default-to-wsd\r\n coinrun-gt-actions\r\n convert-to-jax-array-in-iter\r\n correct-batched-sampling\r\n dev\r\n dont-let-tf-see-gpu\r\n dynamics_coinrun_500m_dataset_29519\r\n feat/darkness-filter\r\n feat/explicit-image-dims\r\n fix-action-padding-lam-future-information-access\r\n fix-sampling\r\n fix-transformer-forwardpass\r\n fix/dyn-restore-after-nnx-upgrade\r\n fix/spatiotemporal-pe-once-in-STTransformer\r\n generate-minatar-breakout-dataset\r\n grad-norm-log-and-clip\r\n grain-dataloader\r\n gt-actions\r\n hotfix/eval-full-frame-fix\r\n hotfix/fix-val-loss-maskgit-masking\r\n hotfix/full-frame-eval-only-calculate-last-frame-metrics\r\n hotfix/sampling-shapes-error\r\n input_pipeline/add-npy2array_record\r\n logging-variants\r\n lr-schedules\r\n main\r\n maskgit-different-maskprob-per-sample\r\n:",,terminal_output +1002,881237,"TERMINAL",0,0,"* ablation/full-precision-training\r\n ablation/use-pytorch-dataloader\r\n action-mapper\r\n add-noise-to-combat-exposure-bias\r\n add-wandb-name-and-tags\r\n before-nnx\r\n causal-mem-reduce\r\n causal-spatiotemporal-kv-cache\r\n causal-st-transformer\r\n causal-transformer-dynamics-model\r\n causal-transformer-nnx-no-kv-cache\r\n change-default-parameters\r\n change-default-to-wsd\r\n coinrun-gt-actions\r\n convert-to-jax-array-in-iter\r\n correct-batched-sampling\r\n dev\r\n dont-let-tf-see-gpu\r\n dynamics_coinrun_500m_dataset_29519\r\n feat/darkness-filter\r\n feat/explicit-image-dims\r\n fix-action-padding-lam-future-information-access\r\n fix-sampling\r\n fix-transformer-forwardpass\r\n fix/dyn-restore-after-nnx-upgrade\r\n fix/spatiotemporal-pe-once-in-STTransformer\r\n generate-minatar-breakout-dataset\r\n grad-norm-log-and-clip\r\n grain-dataloader\r\n gt-actions\r\n hotfix/eval-full-frame-fix\r\n hotfix/fix-val-loss-maskgit-masking\r\n hotfix/full-frame-eval-only-calculate-last-frame-metrics\r\n hotfix/sampling-shapes-error\r\n input_pipeline/add-npy2array_record\r\n logging-variants\r\n lr-schedules\r\n main\r\n maskgit-different-maskprob-per-sample\r\n:",,terminal_output +1003,881805,"TERMINAL",0,0,"\r\r:\r...skipping...\r\n* ablation/full-precision-training\r\n ablation/use-pytorch-dataloader\r\n action-mapper\r\n add-noise-to-combat-exposure-bias\r\n add-wandb-name-and-tags\r\n before-nnx\r\n causal-mem-reduce\r\n causal-spatiotemporal-kv-cache\r\n causal-st-transformer\r\n causal-transformer-dynamics-model\r\n causal-transformer-nnx-no-kv-cache\r\n change-default-parameters\r\n change-default-to-wsd\r\n coinrun-gt-actions\r\n convert-to-jax-array-in-iter\r\n correct-batched-sampling\r\n dev\r\n dont-let-tf-see-gpu\r\n dynamics_coinrun_500m_dataset_29519\r\n feat/darkness-filter\r\n feat/explicit-image-dims\r\n fix-action-padding-lam-future-information-access\r\n fix-sampling\r\n fix-transformer-forwardpass\r\n fix/dyn-restore-after-nnx-upgrade\r\n fix/spatiotemporal-pe-once-in-STTransformer\r\n generate-minatar-breakout-dataset\r\n grad-norm-log-and-clip\r\n grain-dataloader\r\n gt-actions\r\n hotfix/eval-full-frame-fix\r\n hotfix/fix-val-loss-maskgit-masking\r\n hotfix/full-frame-eval-only-calculate-last-frame-metrics\r\n hotfix/sampling-shapes-error\r\n input_pipeline/add-npy2array_record\r\n logging-variants\r\n lr-schedules\r\n main\r\n maskgit-different-maskprob-per-sample\r\n:\r:",,terminal_output +1004,881880,"TERMINAL",0,0,"\r/\r* ablation/full-precision-training\r\n ablation/use-pytorch-dataloader\r\n action-mapper\r\n add-noise-to-combat-exposure-bias\r\n add-wandb-name-and-tags\r\n before-nnx\r\n causal-mem-reduce\r\n causal-spatiotemporal-kv-cache\r\n causal-st-transformer\r\n causal-transformer-dynamics-model\r\n causal-transformer-nnx-no-kv-cache\r\n change-default-parameters\r\n change-default-to-wsd\r\n coinrun-gt-actions\r\n convert-to-jax-array-in-iter\r\n correct-batched-sampling\r\n dev\r\n dont-let-tf-see-gpu\r\n dynamics_coinrun_500m_dataset_29519\r\n feat/darkness-filter\r\n feat/explicit-image-dims\r\n fix-action-padding-lam-future-information-access\r\n fix-sampling\r\n fix-transformer-forwardpass\r\n fix/dyn-restore-after-nnx-upgrade\r\n fix/spatiotemporal-pe-once-in-STTransformer\r\n generate-minatar-breakout-dataset\r\n grad-norm-log-and-clip\r\n grain-dataloader\r\n gt-actions\r\n hotfix/eval-full-frame-fix\r\n hotfix/fix-val-loss-maskgit-masking\r\n hotfix/full-frame-eval-only-calculate-last-frame-metrics\r\n hotfix/sampling-shapes-error\r\n input_pipeline/add-npy2array_record\r\n logging-variants\r\n lr-schedules\r\n main\r\n maskgit-different-maskprob-per-sample\r\n...skipping...\r\n prepend-action-maskgit\r\n preprocess_video\r\n refactor-full-frame-val-loss\r\n refactor-tmp\r\n remove-restore-branching\r\n revised-dataloader\r\n runner\r\n runner-grain\r\n sample-ali-branch\r\n sample-from-different-topologies\r\n sampling-script-add-metrics\r\n sampling-startframe-indexing-fix\r\n seeding-data-generation\r\n speedup-tfrecord-preprocessing\r\n train_lam_coinrun_ablation_wsd_3e-6_28747\r\n val-loss\r\n vizdoom-dataset\r\n z-loss\r\n zloss-runs\r\n~\r\n~\r\n~\r\n~\r\n~\r\n~\r\n~\r\n~\r\n~\r\n~\r\n~\r\n~\r\n~\r\n~\r\n~\r\n~\r\n~\r\n~\r\n~\r\n~\r\n(END)",,terminal_output +1005,882071,"TERMINAL",0,0,"\r(END)",,terminal_output +1006,882139,"TERMINAL",0,0,"\r...skipping...\r\n\r\n SUMMARY OF LESS COMMANDS\r\n\r\n Commands marked with * may be preceded by a number, N.\r\n Notes in parentheses indicate the behavior if N is given.\r\n A key preceded by a caret indicates the Ctrl key; thus ^K is ctrl-K.\r\n\r\n h H Display this help.\r\n q :q Q :Q ZZ Exit.\r\n ---------------------------------------------------------------------------\r\n\r\n MOVING\r\n\r\n e ^E j ^N CR * Forward one line (or N lines).\r\n y ^Y k ^K ^P * Backward one line (or N lines).\r\n f ^F ^V SPACE * Forward one window (or N lines).\r\n b ^B ESC-v * Backward one window (or N lines).\r\n z * Forward one window (and set window to N).\r\n w * Backward one window (and set window to N).\r\n ESC-SPACE * Forward one window, but don't stop at end-of-file.\r\n d ^D * Forward one half-window (and set half-window to N).\r\n u ^U * Backward one half-window (and set half-window to N).\r\n ESC-) RightArrow * Right one half screen width (or N positions).\r\n ESC-( LeftArrow * Left one half screen width (or N positions).\r\n ESC-} ^RightArrow Right to last column displayed.\r\n ESC-{ ^LeftArrow Left to first column.\r\n F Forward forever; like ""tail -f"".\r\n ESC-F Like F but stop when search pattern is found.\r\n r ^R ^L Repaint screen.\r\n R Repaint screen, discarding buffered input.\r\n ---------------------------------------------------\r\n Default ""window"" is the screen height.\r\n Default ""half-window"" is half of the screen height.\r\n ---------------------------------------------------------------------------\r\n\r\n SEARCHING\r\n\r\n /pattern * Search forward for (N-th) matching line.\r\n ?pattern * Search backward for (N-th) matching line.\r\nHELP -- Press RETURN for more, or q when done",,terminal_output +1007,882289,"TERMINAL",0,0,"\r n * Repeat previous search (for N-th occurrence).\r\nHELP -- Press RETURN for more, or q when done",,terminal_output +1008,883190,"TERMINAL",0,0,"...skipping...\r\n prepend-action-maskgit\r\n preprocess_video\r\n refactor-full-frame-val-loss\r\n refactor-tmp\r\n remove-restore-branching\r\n revised-dataloader\r\n runner\r\n runner-grain\r\n sample-ali-branch\r\n sample-from-different-topologies\r\n sampling-script-add-metrics\r\n sampling-startframe-indexing-fix\r\n seeding-data-generation\r\n speedup-tfrecord-preprocessing\r\n train_lam_coinrun_ablation_wsd_3e-6_28747\r\n val-loss\r\n vizdoom-dataset\r\n z-loss\r\n zloss-runs\r\n~\r\n~\r\n~\r\n~\r\n~\r\n~\r\n~\r\n~\r\n~\r\n~\r\n~\r\n~\r\n~\r\n~\r\n~\r\n~\r\n~\r\n~\r\n~\r\n~\r\n(END)",,terminal_output +1009,883690,"TERMINAL",0,0,"\r[?1l>\r\n]0;tum_cte0515@hkn1991:~/Projects/jasmine",,terminal_output +1010,884692,"TERMINAL",0,0,"branch",,terminal_command +1011,884710,"TERMINAL",0,0,"]633;Cablation/full-precision-training\r\n]0;tum_cte0515@hkn1991:~/Projects/jasmine",,terminal_output +1012,889634,"jasmine/train_dynamics.py",0,0,"import os\n\n\nos.environ.setdefault(""XLA_PYTHON_CLIENT_MEM_FRACTION"", ""0.98"")\n\nfrom dataclasses import dataclass, field\nimport itertools\nfrom typing import cast, Optional\n\nimport einops\nfrom jax.sharding import Mesh, PartitionSpec, NamedSharding\nfrom jax.experimental.mesh_utils import create_device_mesh\nimport optax\nimport orbax.checkpoint as ocp\nimport numpy as np\nimport dm_pix as pix\nimport jax\nimport jax.numpy as jnp\nimport tyro\nimport wandb\nimport grain\nimport flax.nnx as nnx\n\nfrom genie import Genie, restore_genie_components\nfrom utils.dataloader import get_dataloader\nfrom utils.train_utils import (\n get_lr_schedule,\n count_parameters_by_component,\n print_mem_stats,\n print_compiled_memory_stats,\n print_compiled_cost_analysis,\n)\n\n\n@dataclass\nclass Args:\n # Experiment\n num_steps: int = 200_000\n seed: int = 0\n seq_len: int = 16\n image_channels: int = 3\n image_height: int = 64\n image_width: int = 64\n data_dir: str = """"\n save_ckpt: bool = False\n restore_ckpt: bool = False\n # Optimization\n batch_size: int = 36\n init_lr: float = 0.0\n max_lr: float = 3e-5\n decay_end: float = 0.0\n wsd_decay_steps: int = (\n 20_000 # NOTE: wsd_decay_steps will only be used when using a wsd-schedule\n )\n warmup_steps: int = 5000\n lr_schedule: str = ""wsd"" # supported options: wsd, cos\n # Tokenizer\n tokenizer_dim: int = 512\n tokenizer_ffn_dim: int = 2048\n latent_patch_dim: int = 32\n num_patch_latents: int = 1024\n patch_size: int = 16\n tokenizer_num_blocks: int = 4\n tokenizer_num_heads: int = 8\n tokenizer_checkpoint: str = """"\n # LAM\n lam_dim: int = 512\n lam_ffn_dim: int = 2048\n latent_action_dim: int = 32\n num_actions: int = 6\n lam_patch_size: int = 16\n lam_num_blocks: int = 4\n lam_num_heads: int = 8\n lam_checkpoint: str = """"\n # Dynamics\n dyna_type: str = ""maskgit"" # supported options: maskgit, causal\n dyna_dim: int = 512\n dyna_ffn_dim: int = 2048\n dyna_num_blocks: int = 6\n dyna_num_heads: int = 8\n dropout: float = 0.0\n mask_limit: float = 0.5\n z_loss_weight: float = 0.0\n param_dtype = jnp.float32\n dtype = jnp.bfloat16\n use_flash_attention: bool = True\n use_gt_actions: bool = False\n # Logging\n log: bool = True\n entity: str = """"\n project: str = """"\n name: str = ""train_dynamics""\n tags: list[str] = field(default_factory=lambda: [""dynamics""])\n log_interval: int = 50\n log_image_interval: int = 1000\n ckpt_dir: str = """"\n log_checkpoint_interval: int = 5000\n log_checkpoint_keep_period: int = 20_000\n log_gradients: bool = False\n val_data_dir: str = """"\n val_interval: int = 20_000\n val_steps: int = 50\n eval_full_frame: bool = True\n val_maskgit_steps: int = 25\n val_temperature: float = 1\n val_sample_argmax: bool = False\n wandb_id: str = """"\n\n\ndef build_model(args: Args, rng: jax.Array) -> tuple[Genie, jax.Array]:\n rng, _rng = jax.random.split(rng)\n rngs = nnx.Rngs(_rng)\n genie = Genie(\n # Tokenizer\n in_dim=args.image_channels,\n tokenizer_dim=args.tokenizer_dim,\n tokenizer_ffn_dim=args.tokenizer_ffn_dim,\n latent_patch_dim=args.latent_patch_dim,\n num_patch_latents=args.num_patch_latents,\n patch_size=args.patch_size,\n tokenizer_num_blocks=args.tokenizer_num_blocks,\n tokenizer_num_heads=args.tokenizer_num_heads,\n # LAM\n lam_dim=args.lam_dim,\n lam_ffn_dim=args.lam_ffn_dim,\n latent_action_dim=args.latent_action_dim,\n num_actions=args.num_actions,\n lam_patch_size=args.lam_patch_size,\n lam_num_blocks=args.lam_num_blocks,\n lam_num_heads=args.lam_num_heads,\n lam_co_train=not args.lam_checkpoint,\n use_gt_actions=args.use_gt_actions,\n # Dynamics\n dyna_type=args.dyna_type,\n dyna_dim=args.dyna_dim,\n dyna_ffn_dim=args.dyna_ffn_dim,\n dyna_num_blocks=args.dyna_num_blocks,\n dyna_num_heads=args.dyna_num_heads,\n dropout=args.dropout,\n mask_limit=args.mask_limit,\n param_dtype=args.param_dtype,\n dtype=args.dtype,\n use_flash_attention=args.use_flash_attention,\n decode=False,\n rngs=rngs,\n )\n if args.use_gt_actions:\n assert (\n not args.lam_checkpoint\n ), ""Cannot use LAM when using ground-truth actions.""\n else:\n assert genie.lam is not None\n del genie.lam.decoder\n return genie, rng\n\n\ndef build_optimizer(genie: Genie, args: Args) -> nnx.ModelAndOptimizer:\n lr_schedule = get_lr_schedule(\n args.lr_schedule,\n args.init_lr,\n args.max_lr,\n args.decay_end,\n args.num_steps,\n args.warmup_steps,\n args.wsd_decay_steps,\n )\n tx = optax.adamw(\n learning_rate=lr_schedule,\n b1=0.9,\n b2=0.9,\n weight_decay=1e-4,\n mu_dtype=args.param_dtype, # moments in full precision\n )\n optimizer = nnx.ModelAndOptimizer(genie, tx)\n return optimizer\n\n\ndef build_mesh_and_sharding(\n num_devices: int,\n) -> tuple[Mesh, NamedSharding, NamedSharding, NamedSharding]:\n device_mesh_arr = create_device_mesh((num_devices,))\n mesh = Mesh(devices=device_mesh_arr, axis_names=(""data"",))\n replicated_sharding = NamedSharding(mesh, PartitionSpec())\n videos_sharding = NamedSharding(mesh, PartitionSpec(""data"", None, None, None, None))\n actions_sharding = NamedSharding(mesh, PartitionSpec(""data"", None))\n return mesh, replicated_sharding, videos_sharding, actions_sharding\n\n\ndef shard_optimizer_states(\n optimizer: nnx.ModelAndOptimizer, replicated_sharding: NamedSharding\n) -> None:\n model_state = nnx.state(optimizer.model)\n model_sharded_state = jax.lax.with_sharding_constraint(\n model_state, replicated_sharding\n )\n nnx.update(optimizer.model, model_sharded_state)\n optimizer_state = nnx.state(optimizer, nnx.optimizer.OptState)\n optimizer_sharded_state = jax.lax.with_sharding_constraint(\n optimizer_state, replicated_sharding\n )\n nnx.update(optimizer, optimizer_sharded_state)\n\n\ndef build_dataloader(args: Args, data_dir: str) -> grain.DataLoaderIterator:\n image_shape = (args.image_height, args.image_width, args.image_channels)\n array_record_files = [\n os.path.join(data_dir, x)\n for x in os.listdir(data_dir)\n if x.endswith("".array_record"")\n ]\n grain_dataloader = get_dataloader(\n array_record_files,\n args.seq_len,\n # NOTE: We deliberately pass the global batch size\n # The dataloader shards the dataset across all processes\n args.batch_size,\n *image_shape,\n num_workers=8,\n prefetch_buffer_size=1,\n seed=args.seed,\n )\n initial_state = grain_dataloader._create_initial_state()\n grain_iterator = grain.DataLoaderIterator(grain_dataloader, initial_state)\n return grain_iterator\n\n\ndef build_checkpoint_manager(args: Args) -> Optional[ocp.CheckpointManager]:\n if args.restore_ckpt or args.save_ckpt:\n handler_registry = ocp.handlers.DefaultCheckpointHandlerRegistry()\n handler_registry.add(\n ""model_state"", ocp.args.PyTreeSave, ocp.handlers.PyTreeCheckpointHandler\n )\n handler_registry.add(\n ""model_state"", ocp.args.PyTreeRestore, ocp.handlers.PyTreeCheckpointHandler\n )\n handler_registry.add(\n ""train_dataloader_state"",\n grain.checkpoint.CheckpointSave,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n handler_registry.add(\n ""train_dataloader_state"",\n grain.checkpoint.CheckpointRestore,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n if args.val_data_dir:\n handler_registry.add(\n ""val_dataloader_state"",\n grain.checkpoint.CheckpointSave,\n cast(\n ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler\n ),\n )\n handler_registry.add(\n ""val_dataloader_state"",\n grain.checkpoint.CheckpointRestore,\n cast(\n ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler\n ),\n )\n checkpoint_options = ocp.CheckpointManagerOptions(\n save_interval_steps=args.log_checkpoint_interval,\n max_to_keep=3,\n keep_period=args.log_checkpoint_keep_period,\n step_format_fixed_length=6,\n cleanup_tmp_directories=True,\n )\n checkpoint_manager = ocp.CheckpointManager(\n args.ckpt_dir,\n options=checkpoint_options,\n handler_registry=handler_registry,\n )\n return checkpoint_manager\n else:\n return None\n\n\ndef restore_or_initialize_components(\n args: Args,\n checkpoint_manager: Optional[ocp.CheckpointManager],\n optimizer: nnx.ModelAndOptimizer,\n train_iterator: grain.DataLoaderIterator,\n rng: jax.Array,\n replicated_sharding: NamedSharding,\n val_iterator: Optional[grain.DataLoaderIterator],\n restore_step: Optional[int] = None,\n) -> tuple[\n int,\n nnx.ModelAndOptimizer,\n grain.DataLoaderIterator,\n grain.DataLoaderIterator,\n jax.Array,\n]:\n step = 0\n if checkpoint_manager and restore_step is None:\n restore_step = checkpoint_manager.latest_step()\n if args.restore_ckpt:\n assert checkpoint_manager is not None\n abstract_optimizer = nnx.eval_shape(lambda: optimizer)\n abstract_optimizer_state = nnx.state(abstract_optimizer)\n if val_iterator:\n restore_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore(abstract_optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointRestore(train_iterator), # type: ignore\n val_dataloader_state=grain.checkpoint.CheckpointRestore(val_iterator), # type: ignore\n )\n else:\n restore_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore(abstract_optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointRestore(train_iterator), # type: ignore\n )\n restored = checkpoint_manager.restore(\n checkpoint_manager.latest_step(), args=restore_args\n )\n restored_optimizer_state = restored[""model_state""]\n nnx.update(optimizer, restored_optimizer_state)\n train_iterator = restored[""train_dataloader_state""]\n if val_iterator:\n val_iterator = restored[""val_dataloader_state""]\n step = checkpoint_manager.latest_step() or 0\n print(f""Restored dataloader and model state from step {step}"")\n else:\n # Restore from pre-trained tokenizer (and LAM)\n rng, _rng = jax.random.split(rng)\n optimizer = restore_genie_components(optimizer, replicated_sharding, _rng, args)\n return step, optimizer, train_iterator, val_iterator, rng\n\n\ndef _calculate_top_k_accuracy(\n token_logits_BTNV: jax.Array,\n video_tokens_BTN: jax.Array,\n mask_BTN: jax.Array,\n k: int,\n) -> jax.Array:\n _, topk_indices_BTNK = jax.lax.top_k(token_logits_BTNV, k)\n topk_correct = jnp.any(\n topk_indices_BTNK == video_tokens_BTN[..., jnp.newaxis], axis=-1\n )\n topk_acc = (mask_BTN * topk_correct).sum() / mask_BTN.sum()\n return topk_acc\n\n\ndef _calculate_step_metrics(\n outputs: dict[str, jax.Array],\n gt: jax.Array,\n num_actions: int,\n num_patch_latents: int,\n) -> tuple[jax.Array, dict]:\n mask_BTN = outputs[""mask""]\n outputs[""token_logits""] = outputs[""token_logits""].astype(jnp.float32)\n ce_loss = optax.softmax_cross_entropy_with_integer_labels(\n outputs[""token_logits""], outputs[""video_tokens""]\n )\n ce_loss = (mask_BTN * ce_loss).sum() / mask_BTN.sum()\n z_val = jax.nn.logsumexp(outputs[""token_logits""], axis=-1)\n z_loss_metric = (mask_BTN * (z_val**2)).sum() / mask_BTN.sum()\n\n masked_token_top_1_acc = _calculate_top_k_accuracy(\n outputs[""token_logits""], outputs[""video_tokens""], mask_BTN, 1\n )\n masked_token_top_2_acc = _calculate_top_k_accuracy(\n outputs[""token_logits""], outputs[""video_tokens""], mask_BTN, 2\n )\n masked_token_top_5_acc = _calculate_top_k_accuracy(\n outputs[""token_logits""], outputs[""video_tokens""], mask_BTN, 5\n )\n masked_token_top_16_acc = _calculate_top_k_accuracy(\n outputs[""token_logits""], outputs[""video_tokens""], mask_BTN, 16\n )\n\n select_probs = jax.nn.softmax(outputs[""token_logits""])\n gt_val = gt.clip(0, 1).reshape(-1, *gt.shape[2:])\n recon = outputs[""recon""].clip(0, 1).reshape(-1, *outputs[""recon""].shape[2:])\n psnr = jnp.asarray(pix.psnr(gt_val, recon)).mean()\n ssim = jnp.asarray(pix.ssim(gt_val, recon)).mean()\n _, index_counts_tokenizer = jnp.unique_counts(\n jnp.ravel(outputs[""video_tokens""]),\n size=num_patch_latents,\n fill_value=0,\n )\n codebook_usage_tokenizer = (index_counts_tokenizer != 0).mean()\n metrics = dict(\n cross_entropy_loss=ce_loss,\n masked_token_top1_accuracy=masked_token_top_1_acc,\n masked_token_top2_accuracy=masked_token_top_2_acc,\n masked_token_top5_accuracy=masked_token_top_5_acc,\n masked_token_top16_accuracy=masked_token_top_16_acc,\n select_logit=outputs[""token_logits""].max(-1).mean(),\n select_p=select_probs.max(-1).mean(),\n entropy=jax.scipy.special.entr(select_probs).sum(-1).mean(),\n z_loss=z_loss_metric,\n psnr=psnr,\n ssim=ssim,\n codebook_usage_tokenizer=codebook_usage_tokenizer,\n )\n if ""lam_indices"" in outputs.keys():\n _, index_counts_lam = jnp.unique_counts(\n jnp.ravel(outputs[""lam_indices""]),\n size=num_actions,\n fill_value=0,\n )\n codebook_usage_lam = (index_counts_lam != 0).mean()\n metrics[""codebook_usage_lam""] = codebook_usage_lam\n return ce_loss, metrics\n\n\ndef main(args: Args) -> None:\n jax.distributed.initialize()\n num_devices = jax.device_count()\n if num_devices == 0:\n raise ValueError(""No JAX devices found."")\n print(f""Running on {num_devices} devices."")\n\n if args.batch_size % num_devices != 0:\n raise ValueError(\n f""Global batch size {args.batch_size} must be divisible by ""\n f""number of devices {num_devices}.""\n )\n\n rng = jax.random.key(args.seed)\n\n # --- Initialize model ---\n genie, rng = build_model(args, rng)\n _, params, _ = nnx.split(genie, nnx.Param, ...)\n param_counts = count_parameters_by_component(params)\n\n if args.log and jax.process_index() == 0:\n wandb_init_kwargs = {\n ""entity"": args.entity,\n ""project"": args.project,\n ""name"": args.name,\n ""tags"": args.tags,\n ""group"": ""debug"",\n ""config"": args,\n }\n\n if args.wandb_id:\n wandb_init_kwargs.update(\n {\n ""id"": args.wandb_id,\n ""resume"": ""allow"",\n }\n )\n wandb.init(**wandb_init_kwargs)\n\n wandb.config.update({""model_param_count"": param_counts})\n\n print(""Parameter counts:"")\n print(param_counts)\n\n # --- Initialize optimizer ---\n optimizer = build_optimizer(genie, args)\n del genie\n\n # FIXME: switch to create_hybrid_device_mesh for runs spanning multiple nodes\n _, replicated_sharding, videos_sharding, actions_sharding = build_mesh_and_sharding(\n num_devices\n )\n\n shard_optimizer_states(optimizer, replicated_sharding)\n\n # --- Initialize checkpoint manager ---\n checkpoint_manager = build_checkpoint_manager(args)\n\n # --- Create DataLoaderIterator from dataloader ---\n train_iterator = build_dataloader(args, args.data_dir)\n val_iterator = None\n if args.val_data_dir:\n val_iterator = build_dataloader(args, args.val_data_dir)\n\n # --- Restore checkpoint ---\n step, optimizer, train_iterator, val_iterator, rng = (\n restore_or_initialize_components(\n args,\n checkpoint_manager,\n optimizer,\n train_iterator,\n rng,\n replicated_sharding,\n val_iterator,\n )\n )\n\n # --- Define loss and train step (close over args) ---\n def dynamics_loss_fn(\n model: Genie,\n inputs: dict,\n ) -> tuple[jax.Array, tuple[jax.Array, dict]]:\n gt = jnp.asarray(inputs[""videos""], dtype=jnp.float32) / 255.0\n inputs[""videos""] = gt.astype(args.dtype)\n outputs = model(inputs)\n ce_loss, metrics = _calculate_step_metrics(\n outputs, gt, args.num_actions, args.num_patch_latents\n )\n z_loss = metrics[""z_loss""]\n total_loss = ce_loss + args.z_loss_weight * z_loss\n metrics[""total_loss""] = total_loss\n return total_loss, (outputs[""recon""], metrics)\n\n @nnx.jit(donate_argnums=0)\n def train_step(\n optimizer: nnx.ModelAndOptimizer, inputs: dict\n ) -> tuple[jax.Array, jax.Array, dict]:\n def loss_fn(model: Genie) -> tuple[jax.Array, tuple[jax.Array, dict]]:\n model.train()\n return dynamics_loss_fn(model, inputs)\n\n (loss, (recon, metrics)), grads = nnx.value_and_grad(loss_fn, has_aux=True)(\n optimizer.model\n )\n optimizer.update(grads)\n if args.log_gradients:\n metrics[""gradients_std/""] = jax.tree.map(\n lambda x: x.std(), grads[""params""][""dynamics""]\n )\n return loss, recon, metrics\n\n @nnx.jit\n def val_step(genie: Genie, inputs: dict) -> dict:\n """"""Evaluate model and compute metrics""""""\n genie.eval()\n gt = jnp.asarray(inputs[""videos""], dtype=jnp.float32) / 255.0\n (loss, (recon, metrics)) = dynamics_loss_fn(genie, inputs)\n val_output = {""loss"": loss, ""recon"": recon, ""metrics"": metrics}\n\n # --- Evaluate full frame prediction (sampling) ---\n if args.eval_full_frame:\n inputs[""videos""] = gt.astype(args.dtype)\n tokenizer_outputs = genie.tokenizer.vq_encode(\n inputs[""videos""], training=False\n )\n tokens_full_frame = tokenizer_outputs[""indices""]\n lam_indices_E = None\n if not args.use_gt_actions:\n lam_indices_E = genie.vq_encode(inputs, training=False)\n inputs[""latent_actions""] = lam_indices_E\n inputs[""videos""] = inputs[""videos""][\n :, :-1\n ] # remove last frame for generation\n recon_full_frame, logits_full_frame = genie.sample(\n inputs,\n args.seq_len,\n args.val_temperature,\n args.val_sample_argmax,\n args.val_maskgit_steps,\n )\n # Calculate metrics for the last frame only\n step_outputs = {\n ""recon"": recon_full_frame[:, -1],\n ""token_logits"": logits_full_frame[:, -1],\n ""video_tokens"": tokens_full_frame[:, -1],\n ""mask"": jnp.ones_like(tokens_full_frame[:, -1]),\n }\n if lam_indices_E is not None:\n lam_indices_B = lam_indices_E.reshape((-1, args.seq_len - 1))[:, -1]\n step_outputs[""lam_indices""] = lam_indices_B\n\n loss_full_frame, metrics_full_frame = _calculate_step_metrics(\n step_outputs, gt[:, -1], args.num_actions, args.num_patch_latents\n )\n val_output.update(\n {\n ""loss_full_frame"": loss_full_frame,\n ""recon_full_frame"": recon_full_frame,\n ""metrics_full_frame"": metrics_full_frame,\n }\n )\n return val_output\n\n def calculate_validation_metrics(val_dataloader, genie, rng):\n step = 0\n loss_per_step = []\n metrics_per_step = []\n loss_full_frame_per_step = []\n metrics_full_frame_per_step = []\n batch = None\n recon = None\n recon_full_frame = None\n for batch in val_dataloader:\n rng, _rng_mask = jax.random.split(rng, 2)\n batch[""rng""] = _rng_mask\n val_outputs = val_step(genie, batch)\n loss_per_step.append(val_outputs[""loss""])\n metrics_per_step.append(val_outputs[""metrics""])\n recon = val_outputs[""recon""]\n if args.eval_full_frame:\n loss_full_frame_per_step.append(val_outputs[""loss_full_frame""])\n metrics_full_frame_per_step.append(val_outputs[""metrics_full_frame""])\n recon_full_frame = val_outputs[""recon_full_frame""]\n step += 1\n if step > args.val_steps:\n break\n\n if step < args.val_steps:\n print(\n f""Warning: Your validation dataset is too small to make val_steps many steps. Made {step} steps, expected {args.val_steps}""\n )\n\n val_metrics = {\n f""val_{key}"": np.mean([float(m[key]) for m in metrics_per_step])\n for key in metrics_per_step[0].keys()\n }\n val_metrics[""val_loss""] = np.mean(loss_per_step)\n if args.eval_full_frame:\n val_metrics_full_frame = {\n f""val_full_frame_{key}"": np.mean(\n [float(m[key]) for m in metrics_full_frame_per_step]\n )\n for key in metrics_full_frame_per_step[0].keys()\n }\n val_metrics.update(val_metrics_full_frame)\n val_metrics[""val_full_frame_loss""] = np.mean(loss_full_frame_per_step)\n return val_metrics, batch, recon, recon_full_frame\n\n # --- TRAIN LOOP ---\n dataloader_train = (\n {\n ""videos"": jax.make_array_from_process_local_data(\n videos_sharding, local_data=elem[""videos""]\n ),\n ""actions"": (\n jax.make_array_from_process_local_data(\n actions_sharding, elem[""actions""]\n )\n if args.use_gt_actions\n else None\n ),\n }\n for elem in train_iterator\n )\n dataloader_val = None\n if val_iterator:\n dataloader_val = (\n {\n ""videos"": jax.make_array_from_process_local_data(\n videos_sharding, elem[""videos""]\n ),\n ""actions"": (\n jax.make_array_from_process_local_data(\n actions_sharding, elem[""actions""]\n )\n if args.use_gt_actions\n else None\n ),\n }\n for elem in val_iterator\n )\n if jax.process_index() == 0:\n first_batch = next(dataloader_train)\n first_batch[""rng""] = rng # type: ignore\n compiled = train_step.lower(optimizer, first_batch).compile()\n print_compiled_memory_stats(compiled.memory_analysis())\n print_compiled_cost_analysis(compiled.cost_analysis())\n # Do not skip the first batch during training\n dataloader_train = itertools.chain([first_batch], dataloader_train)\n print(f""Starting training from step {step}..."")\n first_step = step\n while step < args.num_steps:\n for batch in dataloader_train:\n # --- Train step ---\n rng, _rng_mask = jax.random.split(rng, 2)\n batch[""rng""] = _rng_mask\n loss, recon, metrics = train_step(optimizer, batch)\n if step == first_step:\n print_mem_stats(""After params initialized"")\n step += 1\n\n # --- Validation loss ---\n val_results = {}\n if dataloader_val and step % args.val_interval == 0:\n rng, _rng_mask_val = jax.random.split(rng, 2)\n print(""Calculating validation metrics..."")\n val_metrics, val_gt_batch, val_recon, val_recon_full_frame = (\n calculate_validation_metrics(\n dataloader_val, optimizer.model, _rng_mask_val\n )\n )\n print(f""Step {step}, validation loss: {val_metrics['val_loss']}"")\n val_results = {\n ""metrics"": val_metrics,\n ""gt_batch"": val_gt_batch,\n ""recon"": val_recon,\n ""full_frame"": val_recon_full_frame,\n }\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {""loss"": loss, ""step"": step, **metrics}\n if val_results:\n log_dict.update(val_results[""metrics""])\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if val_results:\n val_results[""gt_seq_val""] = (\n val_results[""gt_batch""][""videos""][0].astype(jnp.float32)\n / 255.0\n )\n val_results[""recon_seq_val""] = val_results[""recon""][0].clip(\n 0, 1\n )\n val_comparison_seq = jnp.concatenate(\n (val_results[""gt_seq_val""], val_results[""recon_seq_val""]),\n axis=1,\n )\n val_results[""val_comparison_seq""] = einops.rearrange(\n val_comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if args.eval_full_frame:\n val_results[""full_frame_seq_val""] = val_results[\n ""full_frame""\n ][0].clip(0, 1)\n val_results[""val_full_frame_comparison_seq""] = (\n jnp.concatenate(\n (\n val_results[""gt_seq_val""],\n val_results[""full_frame_seq_val""],\n ),\n axis=1,\n )\n )\n val_results[""val_full_frame_comparison_seq""] = (\n einops.rearrange(\n val_results[""val_full_frame_comparison_seq""] * 255,\n ""t h w c -> h (t w) c"",\n )\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if val_results:\n log_images.update(\n dict(\n val_image=wandb.Image(\n np.asarray(\n val_results[""gt_seq_val""][args.seq_len - 1]\n )\n ),\n val_recon=wandb.Image(\n np.asarray(\n val_results[""recon_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_recon=wandb.Image(\n np.asarray(\n val_results[""val_comparison_seq""].astype(\n np.uint8\n )\n )\n ),\n )\n )\n if args.eval_full_frame:\n log_images.update(\n dict(\n val_full_frame=wandb.Image(\n np.asarray(\n val_results[""full_frame_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_full_frame=wandb.Image(\n np.asarray(\n val_results[\n ""val_full_frame_comparison_seq""\n ].astype(np.uint8)\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break\n\n if checkpoint_manager:\n checkpoint_manager.close()\n\n\nif __name__ == ""__main__"":\n args = tyro.cli(Args)\n main(args)\n",python,tab +1013,892718,"jasmine/train_dynamics.py",397,0,"",python,selection_mouse +1014,892719,"jasmine/train_dynamics.py",396,0,"",python,selection_command +1015,893115,"jasmine/train_dynamics.py",686,0,"",python,selection_mouse +1016,893134,"jasmine/train_dynamics.py",685,0,"",python,selection_command +1017,893397,"jasmine/train_dynamics.py",757,0,"",python,selection_mouse +1018,893730,"jasmine/train_dynamics.py",750,0,"",python,selection_mouse +1019,894150,"jasmine/train_dynamics.py",718,0,"",python,selection_mouse +1020,894566,"jasmine/train_dynamics.py",755,0,"",python,selection_mouse +1021,894568,"jasmine/train_dynamics.py",754,0,"",python,selection_command +1022,902260,"jasmine/train_dynamics.py",0,0,"",python,tab +1023,906242,"jasmine/train_dynamics.py",2193,0,"",python,selection_mouse +1024,906242,"jasmine/train_dynamics.py",2192,0,"",python,selection_command +1025,906995,"jasmine/train_dynamics.py",2193,0,"",python,selection_command +1026,907193,"jasmine/train_dynamics.py",2192,1,"",python,content +1027,907677,"jasmine/train_dynamics.py",2185,7,"",python,content +1028,907978,"jasmine/train_dynamics.py",2185,0,"f",python,content +1029,907979,"jasmine/train_dynamics.py",2186,0,"",python,selection_keyboard +1030,908215,"jasmine/train_dynamics.py",2186,0,"l",python,content +1031,908216,"jasmine/train_dynamics.py",2187,0,"",python,selection_keyboard +1032,908243,"jasmine/train_dynamics.py",2187,0,"o",python,content +1033,908245,"jasmine/train_dynamics.py",2188,0,"",python,selection_keyboard +1034,908478,"jasmine/train_dynamics.py",2188,0,"a",python,content +1035,908480,"jasmine/train_dynamics.py",2189,0,"",python,selection_keyboard +1036,908947,"jasmine/train_dynamics.py",2189,0,"t",python,content +1037,908949,"jasmine/train_dynamics.py",2190,0,"",python,selection_keyboard +1038,909510,"jasmine/train_dynamics.py",2190,0,"3",python,content +1039,909513,"jasmine/train_dynamics.py",2191,0,"",python,selection_keyboard +1040,909604,"jasmine/train_dynamics.py",2191,0,"2",python,content +1041,909606,"jasmine/train_dynamics.py",2192,0,"",python,selection_keyboard +1042,910029,"jasmine/train_dynamics.py",2191,0,"",python,selection_command +1043,919753,"jasmine/train_dynamics.py",2192,0,"",python,selection_mouse +1044,919760,"jasmine/train_dynamics.py",2191,0,"",python,selection_command +1045,920653,"jasmine/train_dynamics.py",2142,0,"",python,selection_mouse +1046,920804,"jasmine/train_dynamics.py",2139,4," ",python,selection_mouse +1047,920923,"jasmine/train_dynamics.py",2139,15," param_dtype",python,selection_mouse +1048,921004,"jasmine/train_dynamics.py",2139,41," param_dtype = jnp.float32\n dtype =",python,selection_mouse +1049,921005,"jasmine/train_dynamics.py",2139,42," param_dtype = jnp.float32\n dtype = ",python,selection_mouse +1050,921030,"jasmine/train_dynamics.py",2139,45," param_dtype = jnp.float32\n dtype = jnp",python,selection_mouse +1051,921074,"jasmine/train_dynamics.py",2139,46," param_dtype = jnp.float32\n dtype = jnp.",python,selection_mouse +1052,921075,"jasmine/train_dynamics.py",2139,53," param_dtype = jnp.float32\n dtype = jnp.float32",python,selection_mouse +1053,921588,"jasmine/train_dynamics.py",2192,0,"",python,selection_mouse +1054,921606,"jasmine/train_dynamics.py",2191,0,"",python,selection_command +1055,921761,"jasmine/train_dynamics.py",2185,7,"float32",python,selection_mouse +1056,921762,"jasmine/train_dynamics.py",2186,6,"loat32",python,selection_command +1057,921950,"jasmine/train_dynamics.py",2186,30,"loat32\n use_flash_attention",python,selection_mouse +1058,922023,"jasmine/train_dynamics.py",2186,11,"loat32\n ",python,selection_mouse +1059,922037,"jasmine/train_dynamics.py",2186,10,"loat32\n ",python,selection_mouse +1060,922070,"jasmine/train_dynamics.py",2186,9,"loat32\n ",python,selection_mouse +1061,922256,"jasmine/train_dynamics.py",2171,15," dtype = jnp.f",python,selection_mouse +1062,922256,"jasmine/train_dynamics.py",2172,14," dtype = jnp.f",python,selection_mouse +1063,922406,"jasmine/train_dynamics.py",2142,44," param_dtype = jnp.float32\n dtype = jnp.f",python,selection_mouse +1064,922465,"jasmine/train_dynamics.py",2141,45," param_dtype = jnp.float32\n dtype = jnp.f",python,selection_mouse +1065,923090,"jasmine/train_dynamics.py",2141,0,"",python,selection_mouse +1066,923208,"jasmine/train_dynamics.py",2139,4," ",python,selection_mouse +1067,923357,"jasmine/train_dynamics.py",2139,39," param_dtype = jnp.float32\n dtype",python,selection_mouse +1068,923451,"jasmine/train_dynamics.py",2139,42," param_dtype = jnp.float32\n dtype = ",python,selection_mouse +1069,923461,"jasmine/train_dynamics.py",2139,45," param_dtype = jnp.float32\n dtype = jnp",python,selection_mouse +1070,923462,"jasmine/train_dynamics.py",2139,53," param_dtype = jnp.float32\n dtype = jnp.float32",python,selection_mouse +1071,923874,"jasmine/train_dynamics.py",2192,0,"",python,selection_mouse +1072,923884,"jasmine/train_dynamics.py",2191,0,"",python,selection_command +1073,924024,"jasmine/train_dynamics.py",2185,7,"float32",python,selection_mouse +1074,924034,"jasmine/train_dynamics.py",2186,6,"loat32",python,selection_command +1075,924241,"jasmine/train_dynamics.py",2181,5,"jnp.f",python,selection_mouse +1076,924282,"jasmine/train_dynamics.py",2179,7,"= jnp.f",python,selection_mouse +1077,924283,"jasmine/train_dynamics.py",2173,13,"dtype = jnp.f",python,selection_mouse +1078,924407,"jasmine/train_dynamics.py",2172,14," dtype = jnp.f",python,selection_mouse +1079,924474,"jasmine/train_dynamics.py",2171,15," dtype = jnp.f",python,selection_mouse +1080,924509,"jasmine/train_dynamics.py",2140,46," param_dtype = jnp.float32\n dtype = jnp.f",python,selection_mouse +1081,924543,"jasmine/train_dynamics.py",2139,47," param_dtype = jnp.float32\n dtype = jnp.f",python,selection_mouse +1082,925610,"jasmine/train_dynamics.py",2141,0,"",python,selection_mouse +1083,925621,"jasmine/train_dynamics.py",2139,4," ",python,selection_mouse +1084,925759,"jasmine/train_dynamics.py",2139,33," param_dtype = jnp.float32\n ",python,selection_mouse +1085,925832,"jasmine/train_dynamics.py",2139,34," param_dtype = jnp.float32\n ",python,selection_mouse +1086,925832,"jasmine/train_dynamics.py",2139,39," param_dtype = jnp.float32\n dtype",python,selection_mouse +1087,925859,"jasmine/train_dynamics.py",2139,40," param_dtype = jnp.float32\n dtype ",python,selection_mouse +1088,925860,"jasmine/train_dynamics.py",2139,42," param_dtype = jnp.float32\n dtype = ",python,selection_mouse +1089,925896,"jasmine/train_dynamics.py",2139,45," param_dtype = jnp.float32\n dtype = jnp",python,selection_mouse +1090,925897,"jasmine/train_dynamics.py",2139,46," param_dtype = jnp.float32\n dtype = jnp.",python,selection_mouse +1091,925933,"jasmine/train_dynamics.py",2139,53," param_dtype = jnp.float32\n dtype = jnp.float32",python,selection_mouse +1092,926367,"jasmine/train_dynamics.py",2192,0,"",python,selection_mouse +1093,926376,"jasmine/train_dynamics.py",2191,0,"",python,selection_command +1094,926510,"jasmine/train_dynamics.py",2185,7,"float32",python,selection_mouse +1095,926520,"jasmine/train_dynamics.py",2186,6,"loat32",python,selection_command +1096,926697,"jasmine/train_dynamics.py",2181,5,"jnp.f",python,selection_mouse +1097,926742,"jasmine/train_dynamics.py",2180,6," jnp.f",python,selection_mouse +1098,926743,"jasmine/train_dynamics.py",2173,13,"dtype = jnp.f",python,selection_mouse +1099,926908,"jasmine/train_dynamics.py",2172,14," dtype = jnp.f",python,selection_mouse +1100,926909,"jasmine/train_dynamics.py",2171,15," dtype = jnp.f",python,selection_mouse +1101,927393,"jasmine/train_dynamics.py",2140,0,"",python,selection_mouse +1102,927563,"jasmine/train_dynamics.py",2139,4," ",python,selection_mouse +1103,927712,"jasmine/train_dynamics.py",2139,32," param_dtype = jnp.float32\n ",python,selection_mouse +1104,927729,"jasmine/train_dynamics.py",2139,33," param_dtype = jnp.float32\n ",python,selection_mouse +1105,927780,"jasmine/train_dynamics.py",2139,34," param_dtype = jnp.float32\n ",python,selection_mouse +1106,927781,"jasmine/train_dynamics.py",2139,39," param_dtype = jnp.float32\n dtype",python,selection_mouse +1107,927861,"jasmine/train_dynamics.py",2139,41," param_dtype = jnp.float32\n dtype =",python,selection_mouse +1108,927893,"jasmine/train_dynamics.py",2139,45," param_dtype = jnp.float32\n dtype = jnp",python,selection_mouse +1109,927893,"jasmine/train_dynamics.py",2139,53," param_dtype = jnp.float32\n dtype = jnp.float32",python,selection_mouse +1110,928242,"jasmine/train_dynamics.py",2192,0,"",python,selection_mouse +1111,928273,"jasmine/train_dynamics.py",2191,0,"",python,selection_command +1112,934333,"TERMINAL",0,0,"git diff",,terminal_command +1113,934398,"TERMINAL",0,0,"]633;C[?1h=\rdiff --git a/jasmine/train_dynamics.py b/jasmine/train_dynamics.py\r\nindex 06cd966..62d84cd 100644\r\n--- a/jasmine/train_dynamics.py\r\n+++ b/jasmine/train_dynamics.py\r\n@@ -82,7 +82,7 @@ class Args:\r\n mask_limit: float = 0.5\r\n z_loss_weight: float = 0.0\r\n param_dtype = jnp.float32\r\n- dtype = jnp.bfloat16\r\n+ dtype = jnp.float32\r\n use_flash_attention: bool = True\r\n use_gt_actions: bool = False\r\n # Logging\r\n\r[?1l>]0;tum_cte0515@hkn1991:~/Projects/jasmine",,terminal_output +1114,943235,"TERMINAL",0,0,"git status",,terminal_command +1115,943277,"TERMINAL",0,0,"]633;COn branch ablation/full-precision-training\r\nLast commands done (2 commands done):\r\n pick ba37453 feat: generate coinrun dataset with val split\r\n pick faadd10 feat: implemented validation loss for all three models\r\nNext commands to do (26 remaining commands):\r\n pick 9a17dbb fix: pass val data path to dataloader\r\n pick 6e69cdb fix typo in image logging\r\n (use ""git rebase --edit-todo"" to view and edit)\r\nYou are currently editing a commit while rebasing branch 'gt-actions' on 'c7522f2'.\r\n (use ""git commit --amend"" to amend the current commit)\r\n (use ""git rebase --continue"" once you are satisfied with your changes)\r\n\r\nChanges not staged for commit:\r\n (use ""git add ..."" to update what will be committed)\r\n (use ""git restore ..."" to discard changes in working directory)\r\n\tmodified: jasmine/train_dynamics.py\r\n\r\nUntracked files:\r\n (use ""git add ..."" to include in what will be committed)\r\n\tali-old-branch.diff\r\n\tdata/_vizdoom.ini\r\n\tdata/data/\r\n\tdata/jasmine_data/vizdoom/\r\n\tdata/uv.lock\r\n\tdiff.diff\r\n\tdiff2.diff\r\n\tinput_pipeline/\r\n\tkiller.sh\r\n\tkiller_partition.sh\r\n\tlog.log\r\n\tmessage.md\r\n\toverfit_dir.zip\r\n\trequirements-franz.txt\r\n\tsamples/\r\n\tscripts_cremers/\r\n\tslurm/\r\n\ttest.py\r\n\tutils/\r\n\tuv.lock\r\n\r\nno changes added to commit (use ""git add"" and/or ""git commit -a"")\r\n]0;tum_cte0515@hkn1991:~/Projects/jasmine",,terminal_output +1116,947543,"TERMINAL",0,0,"git branch",,terminal_command +1117,947593,"TERMINAL",0,0,"]633;C[?1h=\r* ablation/full-precision-training\r\n ablation/use-pytorch-dataloader\r\n action-mapper\r\n add-noise-to-combat-exposure-bias\r\n add-wandb-name-and-tags\r\n before-nnx\r\n causal-mem-reduce\r\n causal-spatiotemporal-kv-cache\r\n causal-st-transformer\r\n causal-transformer-dynamics-model\r\n causal-transformer-nnx-no-kv-cache\r\n change-default-parameters\r\n change-default-to-wsd\r\n coinrun-gt-actions\r\n convert-to-jax-array-in-iter\r\n correct-batched-sampling\r\n dev\r\n dont-let-tf-see-gpu\r\n dynamics_coinrun_500m_dataset_29519\r\n feat/darkness-filter\r\n feat/explicit-image-dims\r\n fix-action-padding-lam-future-information-access\r\n fix-sampling\r\n fix-transformer-forwardpass\r\n fix/dyn-restore-after-nnx-upgrade\r\n fix/spatiotemporal-pe-once-in-STTransformer\r\n generate-minatar-breakout-dataset\r\n grad-norm-log-and-clip\r\n grain-dataloader\r\n gt-actions\r\n hotfix/eval-full-frame-fix\r\n hotfix/fix-val-loss-maskgit-masking\r\n hotfix/full-frame-eval-only-calculate-last-frame-metrics\r\n hotfix/sampling-shapes-error\r\n input_pipeline/add-npy2array_record\r\n logging-variants\r\n lr-schedules\r\n main\r\n maskgit-different-maskprob-per-sample\r\n:",,terminal_output +1118,949245,"TERMINAL",0,0,"\r maskgit-sampling-iterative-unmasking-fix\r\n:\r metrics-logging-for-dynamics-model\r\n:",,terminal_output +1119,949645,"TERMINAL",0,0,"\rM ablation/use-pytorch-dataloader\r\n\r:",,terminal_output +1120,949778,"TERMINAL",0,0,"\rM* ablation/full-precision-training\r\n\r:",,terminal_output +1121,949867,"TERMINAL",0,0,"\r\r:",,terminal_output +1122,950641,"TERMINAL",0,0,"\r[?1l>]0;tum_cte0515@hkn1991:~/Projects/jasmine",,terminal_output +1123,985719,"TERMINAL",0,0,"git commit -am ""switched to full precision for ablation""",,terminal_command +1124,985789,"TERMINAL",0,0,"]633;C",,terminal_output +1125,985958,"TERMINAL",0,0,"g",,terminal_output +1126,986042,"TERMINAL",0,0,"i",,terminal_output +1127,986858,"TERMINAL",0,0,"black....................................................................",,terminal_output +1128,987476,"TERMINAL",0,0,"t",,terminal_output +1129,988109,"TERMINAL",0,0,"Passed\r\n",,terminal_output +1130,988211,"TERMINAL",0,0,"[ablation/full-precision-training 7535ecf] switched to full precision for ablation\r\n 1 file changed, 1 insertion(+), 1 deletion(-)\r\n]0;tum_cte0515@hkn1991:~/Projects/jasmine",,terminal_output +1131,989926,"TERMINAL",0,0,"git push",,terminal_command +1132,989962,"TERMINAL",0,0,"]633;Cfatal: The current branch ablation/full-precision-training has no upstream branch.\r\nTo push the current branch and set the remote as upstream, use\r\n\r\n git push --set-upstream origin ablation/full-precision-training\r\n\r\nTo have this happen automatically for branches without a tracking\r\nupstream, see 'push.autoSetupRemote' in 'git help config'.\r\n\r\n]0;tum_cte0515@hkn1991:~/Projects/jasmine",,terminal_output +1133,994224,"TERMINAL",0,0,"git push --set-upstream origin ablation/full-precision-training",,terminal_command +1134,994274,"TERMINAL",0,0,"]633;C",,terminal_output +1135,995691,"TERMINAL",0,0,"Enumerating objects: 7, done.\r\nCounting objects: 14% (1/7)\rCounting objects: 28% (2/7)\rCounting objects: 42% (3/7)\rCounting objects: 57% (4/7)\rCounting objects: 71% (5/7)\rCounting objects: 85% (6/7)\rCounting objects: 100% (7/7)\rCounting objects: 100% (7/7), done.\r\nDelta compression using up to 152 threads\r\nCompressing objects: 25% (1/4)\rCompressing objects: 50% (2/4)\rCompressing objects: 75% (3/4)\rCompressing objects: 100% (4/4)\rCompressing objects: 100% (4/4), done.\r\nWriting objects: 25% (1/4)\rWriting objects: 50% (2/4)\rWriting objects: 75% (3/4)\rWriting objects: 100% (4/4)\rWriting objects: 100% (4/4), 388 bytes | 388.00 KiB/s, done.\r\nTotal 4 (delta 3), reused 0 (delta 0), pack-reused 0\r\n",,terminal_output +1136,995816,"TERMINAL",0,0,"remote: Resolving deltas: 0% (0/3)\rremote: Resolving deltas: 33% (1/3)\rremote: Resolving deltas: 66% (2/3)\rremote: Resolving deltas: 100% (3/3)\rremote: Resolving deltas: 100% (3/3), completed with 3 local objects.\r\n",,terminal_output +1137,996067,"TERMINAL",0,0,"remote: \r\nremote: Create a pull request for 'ablation/full-precision-training' on GitHub by visiting:\r\nremote: https://github.com/p-doom/jasmine/pull/new/ablation/full-precision-training\r\nremote: \r\nTo github.com:p-doom/jasmine.git\r\n * [new branch] ablation/full-precision-training -> ablation/full-precision-training\r\nbranch 'ablation/full-precision-training' set up to track 'origin/ablation/full-precision-training'.\r\n]0;tum_cte0515@hkn1991:~/Projects/jasmine",,terminal_output +1138,1264166,"TERMINAL",0,0,"git diff",,terminal_command +1139,1264306,"TERMINAL",0,0,"]633;C[?1h=\r\r[?1l>]0;tum_cte0515@hkn1991:~/Projects/jasmine",,terminal_output +1140,1267712,"TERMINAL",0,0,"git checkout change-default-parameters",,terminal_command +1141,1267773,"TERMINAL",0,0,"]633;C",,terminal_output +1142,1267871,"TERMINAL",0,0,"Switched to branch 'change-default-parameters'\r\nYour branch is ahead of 'origin/change-default-parameters' by 1 commit.\r\n (use ""git push"" to publish your local commits)\r\n]0;tum_cte0515@hkn1991:~/Projects/jasmine",,terminal_output +1143,1271297,"",0,0,"Switched from branch 'ablation/full-precision-training' to 'change-default-parameters'",,git_branch_checkout +1144,1271645,"TERMINAL",0,0,"git diff main",,terminal_command +1145,1271729,"TERMINAL",0,0,"]633;C[?1h=\r\r[?1l>]0;tum_cte0515@hkn1991:~/Projects/jasmine",,terminal_output +1146,1282821,"TERMINAL",0,0,"git diff main",,terminal_command +1147,1282862,"TERMINAL",0,0,"]633;C[?1h=\r\r[?1l>]0;tum_cte0515@hkn1991:~/Projects/jasmine",,terminal_output +1148,1286075,"TERMINAL",0,0,"git log",,terminal_command +1149,1286123,"TERMINAL",0,0,"]633;C",,terminal_output +1150,1286289,"TERMINAL",0,0,"[?1h=\rcommit 3963262c3ce1b2fbdbfee3ff2144c7d4fd01c707 (HEAD -> change-default-parameters)\r\nAuthor: Mihir Mahajan \r\nDate: Tue Sep 30 18:36:39 2025 +0200\r\n\r\n switch back to wsd\r\n\r\ncommit cdc26555715620427d76de57d02d9b986af3f83f (origin/change-default-parameters)\r\nAuthor: Mihir Mahajan \r\nDate: Tue Sep 30 18:18:38 2025 +0200\r\n\r\n set log to true for dynamics\r\n\r\ncommit a0abe756d614b1192ff517843979e88895cec0dc\r\nAuthor: Franz Srambical <79149449+emergenz@users.noreply.github.com>\r\nDate: Tue Sep 30 18:17:40 2025 +0200\r\n\r\n Update jasmine/train_tokenizer.py\r\n \r\n Co-authored-by: mihir <78321484+maharajamihir@users.noreply.github.com>\r\n\r\ncommit 0f6c660643c9e8cd7f9e80e9c50c34237320bc46\r\nAuthor: Franz Srambical <79149449+emergenz@users.noreply.github.com>\r\nDate: Tue Sep 30 18:17:34 2025 +0200\r\n\r\n Update jasmine/train_lam.py\r\n \r\n Co-authored-by: mihir <78321484+maharajamihir@users.noreply.github.com>\r\n\r\ncommit 07b6d60317c0ee29c747054dea8efdb0e8c8df87\r\nAuthor: Mihir Mahajan \r\nDate: Tue Sep 30 18:06:37 2025 +0200\r\n\r\n change patch size to 16\r\n\r\ncommit 8d2614d187a831b39eaebf7857763e4a87a708e8\r\nAuthor: Mihir Mahajan \r\nDate: Tue Sep 30 17:57:46 2025 +0200\r\n\r\n changed default hyperparameters\r\n:",,terminal_output +1151,1292998,"TERMINAL",0,0,"\r[?1l>]0;tum_cte0515@hkn1991:~/Projects/jasmine",,terminal_output +1152,1298845,"TERMINAL",0,0,"git log",,terminal_command +1153,1298916,"TERMINAL",0,0,"]633;C",,terminal_output +1154,1299029,"TERMINAL",0,0,"[?1h=\rcommit 3963262c3ce1b2fbdbfee3ff2144c7d4fd01c707 (HEAD -> change-default-parameters)\r\nAuthor: Mihir Mahajan \r\nDate: Tue Sep 30 18:36:39 2025 +0200\r\n\r\n switch back to wsd\r\n\r\ncommit cdc26555715620427d76de57d02d9b986af3f83f (origin/change-default-parameters)\r\nAuthor: Mihir Mahajan \r\nDate: Tue Sep 30 18:18:38 2025 +0200\r\n\r\n set log to true for dynamics\r\n\r\ncommit a0abe756d614b1192ff517843979e88895cec0dc\r\nAuthor: Franz Srambical <79149449+emergenz@users.noreply.github.com>\r\nDate: Tue Sep 30 18:17:40 2025 +0200\r\n\r\n Update jasmine/train_tokenizer.py\r\n \r\n Co-authored-by: mihir <78321484+maharajamihir@users.noreply.github.com>\r\n\r\ncommit 0f6c660643c9e8cd7f9e80e9c50c34237320bc46\r\nAuthor: Franz Srambical <79149449+emergenz@users.noreply.github.com>\r\nDate: Tue Sep 30 18:17:34 2025 +0200\r\n\r\n Update jasmine/train_lam.py\r\n \r\n Co-authored-by: mihir <78321484+maharajamihir@users.noreply.github.com>\r\n\r\ncommit 07b6d60317c0ee29c747054dea8efdb0e8c8df87\r\nAuthor: Mihir Mahajan \r\nDate: Tue Sep 30 18:06:37 2025 +0200\r\n\r\n change patch size to 16\r\n\r\ncommit 8d2614d187a831b39eaebf7857763e4a87a708e8\r\nAuthor: Mihir Mahajan \r\nDate: Tue Sep 30 17:57:46 2025 +0200\r\n\r\n changed default hyperparameters\r\n:",,terminal_output +1155,1305853,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=4\n#SBATCH --time=00:20:00\n#SBATCH --partition=dev_accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:1\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/tokenizer/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/tokenizer/%x_%j.log\n#SBATCH --job-name=train_tokenizer_1e-4\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\n# array_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_test\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_test/train\n# array_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_chunked\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_test/val\n\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\nenv | grep SLURM\n\nexport PYTHONUNBUFFERED=1\n\nsrun python jasmine/train_tokenizer.py \\n --save_ckpt \\n --image_height=64 \\n --image_width=64 \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=120 \\n --init_lr=0 \\n --patch_size=4 \\n --max_lr=1e-4 \\n --log_image_interval=50 \\n --log_checkpoint_interval=2 \\n --log \\n --name=coinrun-tokenizer-dataset-test-$slurm_job_id \\n --tags tokenizer coinrun dev \\n --entity instant-uv \\n --project jafar \\n --warmup_steps 0 \\n --wsd_decay_steps 0 \\n --num_steps 10 \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val \\n --val_interval 3 \\n --val_steps 5\n",shellscript,tab +1156,1322787,"TERMINAL",0,0,"\r[?1l>]0;tum_cte0515@hkn1991:~/Projects/jasmine",,terminal_output +1157,1629324,"TERMINAL",0,0,"clear",,terminal_command +1158,1629440,"TERMINAL",0,0,"]633;C]0;tum_cte0515@hkn1991:~/Projects/jasmine",,terminal_output +1159,1662768,"slurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh",0,0,"",shellscript,tab +1160,1666324,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=1\n#SBATCH --time=48:00:00\n#SBATCH --partition=accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:1\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/dynamics/maskgit/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/dynamics/maskgit/%x_%j.log\n#SBATCH --job-name=train_dynamics_maskgit_default\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/train\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_tokenizer_default/3528955\n\nenv | grep SLURM\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=110 \\n --patch_size=16 \\n --warmup_steps=0 \\n --wsd_decay_steps=0 \\n --num_steps=1000 \\n --log_image_interval=10000 \\n --log \\n --log_checkpoint_interval=10000 \\n --name=coinrun-dynamics-maskgit-default-$slurm_job_id \\n --tags coinrun dynamics maskgit default \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir_train &\n\nchild_pid=$!\n\nwait $child_pid",shellscript,tab +1161,1666901,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=1\n#SBATCH --time=48:00:00\n#SBATCH --partition=accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:1\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/dynamics/maskgit/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/dynamics/maskgit/%x_%j.log\n#SBATCH --job-name=train_dynamics_maskgit_grain_ablation\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/npy_test\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/npy_test\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_tokenizer_default/3528955\n\nenv | grep SLURM\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=110 \\n --patch_size=16 \\n --warmup_steps=0 \\n --wsd_decay_steps=0 \\n --num_steps=1000 \\n --log_image_interval=10000 \\n --log \\n --log_checkpoint_interval=10000 \\n --name=coinrun-dynamics-maskgit-grain-ablation-$slurm_job_id \\n --tags coinrun dynamics maskgit grain-ablation \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir_train &\n\nchild_pid=$!\n\nwait $child_pid",shellscript,tab +1162,1667475,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh",0,0,"",shellscript,tab +1163,1667766,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",0,0,"",shellscript,tab +1164,1668376,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh",0,0,"",shellscript,tab +1165,1668810,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",0,0,"",shellscript,tab +1166,1669871,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh",0,0,"",shellscript,tab +1167,1671797,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default copy.sh",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=1\n#SBATCH --time=48:00:00\n#SBATCH --partition=accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:1\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/dynamics/maskgit/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/dynamics/maskgit/%x_%j.log\n#SBATCH --job-name=train_dynamics_maskgit_default\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/train\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_tokenizer_default/3528955\n\nenv | grep SLURM\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=110 \\n --patch_size=16 \\n --warmup_steps=0 \\n --wsd_decay_steps=0 \\n --num_steps=1000 \\n --log_image_interval=10000 \\n --log \\n --log_checkpoint_interval=10000 \\n --name=coinrun-dynamics-maskgit-default-$slurm_job_id \\n --tags coinrun dynamics maskgit default \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir_train &\n\nchild_pid=$!\n\nwait $child_pid",shellscript,tab +1168,1695781,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=1\n#SBATCH --time=48:00:00\n#SBATCH --partition=accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:1\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/dynamics/maskgit/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/dynamics/maskgit/%x_%j.log\n#SBATCH --job-name=train_dynamics_maskgit_default\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/train\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_tokenizer_default/3528955\n\nenv | grep SLURM\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=110 \\n --patch_size=16 \\n --warmup_steps=0 \\n --wsd_decay_steps=0 \\n --num_steps=1000 \\n --log_image_interval=10000 \\n --log \\n --log_checkpoint_interval=10000 \\n --name=coinrun-dynamics-maskgit-default-$slurm_job_id \\n --tags coinrun dynamics maskgit default \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir_train &\n\nchild_pid=$!\n\nwait $child_pid",shellscript,tab +1169,1709449,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",2134,0,"",shellscript,selection_mouse +1170,1710381,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",2098,59," --name=coinrun-dynamics-maskgit-default-$slurm_job_id \",shellscript,selection_command +1171,1710508,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",2098,105," --name=coinrun-dynamics-maskgit-default-$slurm_job_id \\n --tags coinrun dynamics maskgit default \",shellscript,selection_command +1172,1711448,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",2194,0,"",shellscript,selection_command +1173,1712569,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",2134,0,"",shellscript,selection_command +1174,1713443,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",2134,1,"d",shellscript,selection_command +1175,1713652,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",2134,1,"d",shellscript,selection_command +1176,1713965,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",2134,0,"",shellscript,selection_command +1177,1718261,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",2194,7,"",shellscript,content +1178,1718261,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",2134,7,"",shellscript,content +1179,1719364,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",2187,0,"m",shellscript,content +1180,1719364,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",2134,0,"m",shellscript,content +1181,1719365,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",2135,0,"",shellscript,selection_keyboard +1182,1719624,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",2189,0,"i",shellscript,content +1183,1719625,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",2135,0,"i",shellscript,content +1184,1719625,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",2136,0,"",shellscript,selection_keyboard +1185,1719975,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",2191,0,"x",shellscript,content +1186,1719976,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",2136,0,"x",shellscript,content +1187,1719976,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",2137,0,"",shellscript,selection_keyboard +1188,1720182,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",2193,0,"e",shellscript,content +1189,1720183,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",2137,0,"e",shellscript,content +1190,1720183,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",2138,0,"",shellscript,selection_keyboard +1191,1720242,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",2195,0,"d",shellscript,content +1192,1720243,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",2138,0,"d",shellscript,content +1193,1720243,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",2139,0,"",shellscript,selection_keyboard +1194,1720416,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",2197,0,"_",shellscript,content +1195,1720416,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",2139,0,"_",shellscript,content +1196,1720417,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",2140,0,"",shellscript,selection_keyboard +1197,1720950,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",2198,1,"",shellscript,content +1198,1720950,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",2139,1,"",shellscript,content +1199,1722034,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",2197,0,"-",shellscript,content +1200,1722035,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",2139,0,"-",shellscript,content +1201,1722035,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",2140,0,"",shellscript,selection_keyboard +1202,1722297,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",2199,0,"p",shellscript,content +1203,1722298,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",2140,0,"p",shellscript,content +1204,1722298,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",2141,0,"",shellscript,selection_keyboard +1205,1722361,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",2201,0,"r",shellscript,content +1206,1722361,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",2141,0,"r",shellscript,content +1207,1722362,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",2142,0,"",shellscript,selection_keyboard +1208,1722479,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",2203,0,"e",shellscript,content +1209,1722480,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",2142,0,"e",shellscript,content +1210,1722481,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",2143,0,"",shellscript,selection_keyboard +1211,1722653,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",2205,0,"c",shellscript,content +1212,1722654,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",2143,0,"c",shellscript,content +1213,1722655,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",2144,0,"",shellscript,selection_keyboard +1214,1722784,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",2207,0,"i",shellscript,content +1215,1722785,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",2144,0,"i",shellscript,content +1216,1722785,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",2145,0,"",shellscript,selection_keyboard +1217,1722870,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",2209,0,"s",shellscript,content +1218,1722871,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",2145,0,"s",shellscript,content +1219,1722871,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",2146,0,"",shellscript,selection_keyboard +1220,1722984,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",2211,0,"i",shellscript,content +1221,1722984,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",2146,0,"i",shellscript,content +1222,1722985,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",2147,0,"",shellscript,selection_keyboard +1223,1723069,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",2213,0,"o",shellscript,content +1224,1723069,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",2147,0,"o",shellscript,content +1225,1723070,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",2148,0,"",shellscript,selection_keyboard +1226,1723225,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",2215,0,"n",shellscript,content +1227,1723225,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",2148,0,"n",shellscript,content +1228,1723226,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",2149,0,"",shellscript,selection_keyboard +1229,1723463,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",2217,0,"-",shellscript,content +1230,1723464,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",2149,0,"-",shellscript,content +1231,1723464,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",2150,0,"",shellscript,selection_keyboard +1232,1723836,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",2219,0,"a",shellscript,content +1233,1723836,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",2150,0,"a",shellscript,content +1234,1723837,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",2151,0,"",shellscript,selection_keyboard +1235,1723904,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",2221,0,"b",shellscript,content +1236,1723904,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",2151,0,"b",shellscript,content +1237,1723905,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",2152,0,"",shellscript,selection_keyboard +1238,1724103,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",2223,0,"l",shellscript,content +1239,1724104,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",2152,0,"l",shellscript,content +1240,1724106,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",2153,0,"",shellscript,selection_keyboard +1241,1724218,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",2225,0,"a",shellscript,content +1242,1724219,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",2153,0,"a",shellscript,content +1243,1724219,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",2154,0,"",shellscript,selection_keyboard +1244,1724422,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",2227,0,"t",shellscript,content +1245,1724423,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",2154,0,"t",shellscript,content +1246,1724423,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",2155,0,"",shellscript,selection_keyboard +1247,1724511,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",2229,0,"i",shellscript,content +1248,1724511,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",2155,0,"i",shellscript,content +1249,1724512,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",2156,0,"",shellscript,selection_keyboard +1250,1724546,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",2231,0,"o",shellscript,content +1251,1724546,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",2156,0,"o",shellscript,content +1252,1724547,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",2157,0,"",shellscript,selection_keyboard +1253,1724796,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",2233,0,"n",shellscript,content +1254,1724796,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",2157,0,"n",shellscript,content +1255,1724797,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",2158,0,"",shellscript,selection_keyboard +1256,1725105,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",2157,0,"",shellscript,selection_command +1257,1728114,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",2227,0,"",shellscript,selection_mouse +1258,1729646,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",0,0,"",shellscript,tab +1259,1730594,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh",0,0,"",shellscript,tab +1260,1731719,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",0,0,"",shellscript,tab +1261,1732331,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",0,0,"",shellscript,tab +1262,1738926,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation copy.sh",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=1\n#SBATCH --time=48:00:00\n#SBATCH --partition=accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:1\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/dynamics/maskgit/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/dynamics/maskgit/%x_%j.log\n#SBATCH --job-name=train_dynamics_maskgit_default\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/train\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_tokenizer_default/3528955\n\nenv | grep SLURM\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=110 \\n --patch_size=16 \\n --warmup_steps=0 \\n --wsd_decay_steps=0 \\n --num_steps=1000 \\n --log_image_interval=10000 \\n --log \\n --log_checkpoint_interval=10000 \\n --name=coinrun-dynamics-maskgit-mixed-precision-ablation-$slurm_job_id \\n --tags coinrun dynamics maskgit mixed-precision-ablation \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir_train &\n\nchild_pid=$!\n\nwait $child_pid",shellscript,tab +1263,1753017,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=1\n#SBATCH --time=48:00:00\n#SBATCH --partition=accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:1\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/dynamics/maskgit/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/dynamics/maskgit/%x_%j.log\n#SBATCH --job-name=train_dynamics_maskgit_default\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/train\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_tokenizer_default/3528955\n\nenv | grep SLURM\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=110 \\n --patch_size=16 \\n --warmup_steps=0 \\n --wsd_decay_steps=0 \\n --num_steps=1000 \\n --log_image_interval=10000 \\n --log \\n --log_checkpoint_interval=10000 \\n --name=coinrun-dynamics-maskgit-mixed-precision-ablation-$slurm_job_id \\n --tags coinrun dynamics maskgit mixed-precision-ablation \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir_train &\n\nchild_pid=$!\n\nwait $child_pid",shellscript,tab +1264,1776873,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2285,0,"",shellscript,selection_mouse +1265,1776913,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2284,0,"",shellscript,selection_command +1266,1776922,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2284,1,"\",shellscript,selection_mouse +1267,1776950,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2285,0,"",shellscript,selection_command +1268,1777633,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2263,0,"",shellscript,selection_mouse +1269,1777640,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2262,0,"",shellscript,selection_command +1270,1778227,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2285,0,"",shellscript,selection_mouse +1271,1778242,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2284,0,"",shellscript,selection_command +1272,1778684,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2263,0,"",shellscript,selection_mouse +1273,1778690,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2262,0,"",shellscript,selection_command +1274,1778947,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2285,0,"",shellscript,selection_mouse +1275,1778966,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2284,0,"",shellscript,selection_command +1276,1806678,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2285,0,"",shellscript,selection_mouse +1277,1806710,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2284,0,"",shellscript,selection_command +1278,1806711,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2284,1,"\",shellscript,selection_mouse +1279,1806711,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2285,0,"",shellscript,selection_command +1280,1807505,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2263,0,"",shellscript,selection_mouse +1281,1807523,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2262,0,"",shellscript,selection_command +1282,1807905,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2334,0,"",shellscript,selection_mouse +1283,1807906,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2333,0,"",shellscript,selection_command +1284,1808371,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2285,0,"",shellscript,selection_mouse +1285,1808372,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2284,0,"",shellscript,selection_command +1286,1809400,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2285,0,"",shellscript,selection_mouse +1287,1809433,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2284,0,"",shellscript,selection_command +1288,1810174,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2263,0,"",shellscript,selection_mouse +1289,1810188,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2262,0,"",shellscript,selection_command +1290,1810479,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2285,0,"",shellscript,selection_mouse +1291,1810487,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2284,0,"",shellscript,selection_command +1292,1810972,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2237,0,"",shellscript,selection_mouse +1293,1810974,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2236,0,"",shellscript,selection_command +1294,1811299,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2263,0,"",shellscript,selection_mouse +1295,1811307,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2262,0,"",shellscript,selection_command +1296,1811384,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2262,1,"\",shellscript,selection_mouse +1297,1811385,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2263,0,"",shellscript,selection_command +1298,1811640,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2237,0,"",shellscript,selection_mouse +1299,1811658,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2236,0,"",shellscript,selection_command +1300,1811983,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2263,0,"",shellscript,selection_mouse +1301,1811998,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2262,0,"",shellscript,selection_command +1302,1812581,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2285,0,"",shellscript,selection_mouse +1303,1812590,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2284,0,"",shellscript,selection_command +1304,1812875,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2263,0,"",shellscript,selection_mouse +1305,1812876,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2262,0,"",shellscript,selection_command +1306,1813168,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2285,0,"",shellscript,selection_mouse +1307,1813174,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2284,0,"",shellscript,selection_command +1308,1813459,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2263,0,"",shellscript,selection_mouse +1309,1813470,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2262,0,"",shellscript,selection_command +1310,1813749,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2285,0,"",shellscript,selection_mouse +1311,1813760,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2284,0,"",shellscript,selection_command +1312,1814053,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2263,0,"",shellscript,selection_mouse +1313,1814059,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2262,0,"",shellscript,selection_command +1314,1814337,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2285,0,"",shellscript,selection_mouse +1315,1814344,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2284,0,"",shellscript,selection_command +1316,1814446,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2284,1,"\",shellscript,selection_mouse +1317,1814449,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2285,0,"",shellscript,selection_command +1318,1814613,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2263,0,"",shellscript,selection_mouse +1319,1814644,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2262,0,"",shellscript,selection_command +1320,1814742,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2262,1,"\",shellscript,selection_mouse +1321,1814743,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2263,0,"",shellscript,selection_command +1322,1814910,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2285,0,"",shellscript,selection_mouse +1323,1814949,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2284,0,"",shellscript,selection_command +1324,1815226,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2263,0,"",shellscript,selection_mouse +1325,1815228,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2262,0,"",shellscript,selection_command +1326,1815566,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2285,0,"",shellscript,selection_mouse +1327,1815569,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2284,0,"",shellscript,selection_command +1328,1815949,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2263,0,"",shellscript,selection_mouse +1329,1815988,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2262,0,"",shellscript,selection_command +1330,1816244,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2285,0,"",shellscript,selection_mouse +1331,1816245,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2284,0,"",shellscript,selection_command +1332,1816596,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2263,0,"",shellscript,selection_mouse +1333,1816600,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2262,0,"",shellscript,selection_command +1334,1826365,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2097,0,"",shellscript,selection_mouse +1335,1826376,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2096,0,"",shellscript,selection_command +1336,1826416,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2096,1,"\",shellscript,selection_mouse +1337,1826427,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2097,0,"",shellscript,selection_command +1338,1827026,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2142,0,"",shellscript,selection_mouse +1339,1828202,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2134,0,"",shellscript,selection_mouse +1340,1829239,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2134,1,"m",shellscript,selection_command +1341,1829477,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2134,1,"m",shellscript,selection_command +1342,1830013,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2134,0,"",shellscript,selection_command +1343,1830581,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2211,5,"",shellscript,content +1344,1830582,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2134,5,"",shellscript,content +1345,1830866,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2206,1,"",shellscript,content +1346,1830867,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2134,1,"",shellscript,content +1347,1831184,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2205,9,"",shellscript,content +1348,1831184,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2134,9,"",shellscript,content +1349,1831967,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2196,0,"f",shellscript,content +1350,1831967,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2134,0,"f",shellscript,content +1351,1831968,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2135,0,"",shellscript,selection_keyboard +1352,1832059,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2198,0,"l",shellscript,content +1353,1832060,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2135,0,"l",shellscript,content +1354,1832060,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2136,0,"",shellscript,selection_keyboard +1355,1832169,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2200,0,"a",shellscript,content +1356,1832169,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2136,0,"a",shellscript,content +1357,1832170,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2137,0,"",shellscript,selection_keyboard +1358,1832300,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2202,0,"s",shellscript,content +1359,1832300,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2137,0,"s",shellscript,content +1360,1832301,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2138,0,"",shellscript,selection_keyboard +1361,1832516,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2204,0,"h",shellscript,content +1362,1832516,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2138,0,"h",shellscript,content +1363,1832517,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2139,0,"",shellscript,selection_keyboard +1364,1832768,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2206,0,"-",shellscript,content +1365,1832769,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2139,0,"-",shellscript,content +1366,1832769,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2140,0,"",shellscript,selection_keyboard +1367,1833032,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2208,0,"a",shellscript,content +1368,1833032,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2140,0,"a",shellscript,content +1369,1833033,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2141,0,"",shellscript,selection_keyboard +1370,1833263,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2210,0,"t",shellscript,content +1371,1833263,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2141,0,"t",shellscript,content +1372,1833264,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2142,0,"",shellscript,selection_keyboard +1373,1833425,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2212,0,"t",shellscript,content +1374,1833426,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2142,0,"t",shellscript,content +1375,1833426,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2143,0,"",shellscript,selection_keyboard +1376,1833514,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2214,0,"n",shellscript,content +1377,1833514,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2143,0,"n",shellscript,content +1378,1833515,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2144,0,"",shellscript,selection_keyboard +1379,1847720,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2097,0,"",shellscript,selection_mouse +1380,1848363,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2059,0,"",shellscript,selection_mouse +1381,1849290,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2059,0,"\n ",shellscript,content +1382,1849538,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2064,0,"-",shellscript,content +1383,1849539,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2065,0,"",shellscript,selection_keyboard +1384,1849672,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2065,0,"-",shellscript,content +1385,1849673,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2066,0,"",shellscript,selection_keyboard +1386,1849936,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2066,0,"n",shellscript,content +1387,1849939,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2067,0,"",shellscript,selection_keyboard +1388,1850123,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2067,0,"p",shellscript,content +1389,1850124,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2068,0,"",shellscript,selection_keyboard +1390,1850873,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2067,1,"",shellscript,content +1391,1851450,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2067,0,"o",shellscript,content +1392,1851451,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2068,0,"",shellscript,selection_keyboard +1393,1851757,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2068,0,"-",shellscript,content +1394,1851758,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2069,0,"",shellscript,selection_keyboard +1395,1852034,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2069,0,"u",shellscript,content +1396,1852035,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2070,0,"",shellscript,selection_keyboard +1397,1852127,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2070,0,"s",shellscript,content +1398,1852128,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2071,0,"",shellscript,selection_keyboard +1399,1852257,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2071,0,"e",shellscript,content +1400,1852258,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2072,0,"",shellscript,selection_keyboard +1401,1852385,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2072,0,"-",shellscript,content +1402,1852385,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2073,0,"",shellscript,selection_keyboard +1403,1852708,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2073,0,"l",shellscript,content +1404,1852709,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2074,0,"",shellscript,selection_keyboard +1405,1853043,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2073,1,"",shellscript,content +1406,1853468,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2073,0,"f",shellscript,content +1407,1853468,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2074,0,"",shellscript,selection_keyboard +1408,1853558,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2074,0,"l",shellscript,content +1409,1853558,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2075,0,"",shellscript,selection_keyboard +1410,1853659,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2075,0,"a",shellscript,content +1411,1853660,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2076,0,"",shellscript,selection_keyboard +1412,1853742,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2076,0,"s",shellscript,content +1413,1853743,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2077,0,"",shellscript,selection_keyboard +1414,1853859,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2077,0,"h",shellscript,content +1415,1853860,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2078,0,"",shellscript,selection_keyboard +1416,1854392,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2078,0,"-",shellscript,content +1417,1854393,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2079,0,"",shellscript,selection_keyboard +1418,1854731,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2079,0,"a",shellscript,content +1419,1854732,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2080,0,"",shellscript,selection_keyboard +1420,1854827,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2080,0,"t",shellscript,content +1421,1854827,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2081,0,"",shellscript,selection_keyboard +1422,1854948,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2081,0,"t",shellscript,content +1423,1854949,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2082,0,"",shellscript,selection_keyboard +1424,1855126,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2082,0,"e",shellscript,content +1425,1855127,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2083,0,"",shellscript,selection_keyboard +1426,1855293,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2083,0,"n",shellscript,content +1427,1855294,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2084,0,"",shellscript,selection_keyboard +1428,1855537,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2084,0,"t",shellscript,content +1429,1855538,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2085,0,"",shellscript,selection_keyboard +1430,1855609,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2085,0,"i",shellscript,content +1431,1855610,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2086,0,"",shellscript,selection_keyboard +1432,1855691,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2086,0,"o",shellscript,content +1433,1855692,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2087,0,"",shellscript,selection_keyboard +1434,1855844,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2087,0,"n",shellscript,content +1435,1855845,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2088,0,"",shellscript,selection_keyboard +1436,1856232,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2088,0," ",shellscript,content +1437,1856233,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2089,0,"",shellscript,selection_keyboard +1438,1856711,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2089,0,"\",shellscript,content +1439,1856712,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2090,0,"",shellscript,selection_keyboard +1440,1856995,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2089,0,"",shellscript,selection_command +1441,1858333,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2058,0,"",shellscript,selection_command +1442,1875381,"TERMINAL",0,0,"bash",,terminal_focus +1443,1876305,"TERMINAL",0,0,"bash",,terminal_focus +1444,1878444,"TERMINAL",0,0,"branch",,terminal_command +1445,1878479,"TERMINAL",0,0,"]633;Cchange-default-parameters\r\n]0;tum_cte0515@hkn1991:~/Projects/jasmine",,terminal_output +1446,1882167,"TERMINAL",0,0,"git checkout main",,terminal_command +1447,1882207,"TERMINAL",0,0,"]633;C",,terminal_output +1448,1882351,"TERMINAL",0,0,"Switched to branch 'main'\r\nYour branch is up to date with 'origin/main'.\r\n]0;tum_cte0515@hkn1991:~/Projects/jasmine",,terminal_output +1449,1886062,"TERMINAL",0,0,"git pull",,terminal_command +1450,1886117,"TERMINAL",0,0,"]633;C",,terminal_output +1451,1886426,"",0,0,"Switched from branch 'change-default-parameters' to 'main'",,git_branch_checkout +1452,1887270,"TERMINAL",0,0,"Already up to date.\r\n]0;tum_cte0515@hkn1991:~/Projects/jasmine",,terminal_output +1453,1892307,"TERMINAL",0,0,"source .venv/bin/activate",,terminal_command +1454,1914293,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=1\n#SBATCH --time=48:00:00\n#SBATCH --partition=accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:1\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/dynamics/maskgit/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/dynamics/maskgit/%x_%j.log\n#SBATCH --job-name=train_dynamics_maskgit_default\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/train\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_tokenizer_default/3528955\n\nenv | grep SLURM\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=110 \\n --patch_size=16 \\n --warmup_steps=0 \\n --wsd_decay_steps=0 \\n --num_steps=1000 \\n --log_image_interval=10000 \\n --log \\n --log_checkpoint_interval=10000 \\n --name=coinrun-dynamics-maskgit-default-$slurm_job_id \\n --tags coinrun dynamics maskgit default \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir_train &\n\nchild_pid=$!\n\nwait $child_pid",shellscript,tab +1455,1918196,"TERMINAL",0,0,"idling",,terminal_command +1456,1918313,"TERMINAL",0,0,"]633;C[?1049h(B[?7hEvery 1.0s: sinfo_t_idlehkn1991.localdomain: Thu Oct 2 18:21:20 2025Partition dev_cpuonly: 12 nodes idle\rPartition cpuonly: 158 nodes idle\rPartition dev_accelerated:\t 2 nodes idle\rPartition accelerated:\t 1 nodes idle\rPartition dev_accelerated-h100 :\t 0 nodes idle\rPartition accelerated-h100:\t 0 nodes idle\rPartition large:\t 1 nodes idle\rPartition accelerated-h200:\t 0 nodes idle",,terminal_output +1457,1919319,"TERMINAL",0,0,"1",,terminal_output +1458,1920414,"TERMINAL",0,0,"2",,terminal_output +1459,1921466,"TERMINAL",0,0,"4",,terminal_output +1460,1922597,"TERMINAL",0,0,"5",,terminal_output +1461,1923519,"TERMINAL",0,0,"6",,terminal_output +1462,1924496,"TERMINAL",0,0,"7",,terminal_output +1463,1925134,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn1991:~/Projects/jasmine",,terminal_output +1464,1926540,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=1\n#SBATCH --time=48:00:00\n#SBATCH --partition=accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:1\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/dynamics/maskgit/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/dynamics/maskgit/%x_%j.log\n#SBATCH --job-name=train_dynamics_maskgit_grain_ablation\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/npy_test\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/npy_test\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_tokenizer_default/3528955\n\nenv | grep SLURM\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=110 \\n --patch_size=16 \\n --warmup_steps=0 \\n --wsd_decay_steps=0 \\n --num_steps=1000 \\n --log_image_interval=10000 \\n --log \\n --log_checkpoint_interval=10000 \\n --name=coinrun-dynamics-maskgit-grain-ablation-$slurm_job_id \\n --tags coinrun dynamics maskgit grain-ablation \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir_train &\n\nchild_pid=$!\n\nwait $child_pid",shellscript,tab +1465,1930747,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh",0,0,"",shellscript,tab +1466,1933411,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",0,0,"",shellscript,tab +1467,1934226,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",1455,0,"",shellscript,selection_mouse +1468,1934853,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",1455,0,"d",shellscript,content +1469,1934855,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",1456,0,"",shellscript,selection_keyboard +1470,1935001,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",1456,0,"d",shellscript,content +1471,1935002,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",1457,0,"",shellscript,selection_keyboard +1472,1935864,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",1456,0,"",shellscript,selection_command +1473,1936651,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",1363,102,"",shellscript,content +1474,1938202,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",2008,0,"",shellscript,selection_command +1475,1938647,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",2058,0,"",shellscript,selection_command +1476,1938997,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",2008,0,"",shellscript,selection_command +1477,1944683,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh",0,0,"",shellscript,tab +1478,1950931,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=1\n#SBATCH --time=48:00:00\n#SBATCH --partition=accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:1\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/dynamics/maskgit/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/dynamics/maskgit/%x_%j.log\n#SBATCH --job-name=train_dynamics_maskgit_default\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/train\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_tokenizer_default/3528955\n\nenv | grep SLURM\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=110 \\n --patch_size=16 \\n --warmup_steps=0 \\n --wsd_decay_steps=0 \\n --num_steps=1000 \\n --log_image_interval=10000 \\n --log \\n --log_checkpoint_interval=10000 \\n --name=coinrun-dynamics-maskgit-mixed-precision-ablation-$slurm_job_id \\n --tags coinrun dynamics maskgit mixed-precision-ablation \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir_train &\n\nchild_pid=$!\n\nwait $child_pid",shellscript,tab +1479,1953279,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",466,0,"",shellscript,selection_mouse +1480,1953312,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",465,0,"",shellscript,selection_command +1481,1954462,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",466,0,"",shellscript,selection_command +1482,1954947,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",465,1,"",shellscript,content +1483,1955060,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",464,1,"",shellscript,content +1484,1955228,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",463,1,"",shellscript,content +1485,1955368,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",462,1,"",shellscript,content +1486,1955516,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",461,1,"",shellscript,content +1487,1955642,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",460,1,"",shellscript,content +1488,1955780,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",459,1,"",shellscript,content +1489,1956008,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",459,0,"m",shellscript,content +1490,1956009,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",460,0,"",shellscript,selection_keyboard +1491,1956194,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",460,0,"i",shellscript,content +1492,1956195,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",461,0,"",shellscript,selection_keyboard +1493,1956340,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",461,0,"x",shellscript,content +1494,1956341,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",462,0,"",shellscript,selection_keyboard +1495,1956554,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",462,0,"e",shellscript,content +1496,1956555,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",463,0,"",shellscript,selection_keyboard +1497,1956651,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",463,0,"d",shellscript,content +1498,1956652,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",464,0,"",shellscript,selection_keyboard +1499,1956838,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",464,0,"_",shellscript,content +1500,1956839,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",465,0,"",shellscript,selection_keyboard +1501,1957014,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",465,0,"p",shellscript,content +1502,1957015,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",466,0,"",shellscript,selection_keyboard +1503,1957127,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",466,0,"r",shellscript,content +1504,1957128,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",467,0,"",shellscript,selection_keyboard +1505,1957279,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",467,0,"e",shellscript,content +1506,1957281,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",468,0,"",shellscript,selection_keyboard +1507,1957366,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",468,0,"c",shellscript,content +1508,1957367,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",469,0,"",shellscript,selection_keyboard +1509,1957586,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",469,0,"_",shellscript,content +1510,1957587,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",470,0,"",shellscript,selection_keyboard +1511,1958170,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",470,0,"a",shellscript,content +1512,1958171,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",471,0,"",shellscript,selection_keyboard +1513,1958278,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",471,0,"b",shellscript,content +1514,1958279,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",472,0,"",shellscript,selection_keyboard +1515,1958523,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",472,0,"a",shellscript,content +1516,1958524,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",473,0,"",shellscript,selection_keyboard +1517,1958966,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",472,1,"",shellscript,content +1518,1959151,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",472,0,"l",shellscript,content +1519,1959152,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",473,0,"",shellscript,selection_keyboard +1520,1959196,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",473,0,"a",shellscript,content +1521,1959198,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",474,0,"",shellscript,selection_keyboard +1522,1959413,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",474,0,"t",shellscript,content +1523,1959414,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",475,0,"",shellscript,selection_keyboard +1524,1959536,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",475,0,"i",shellscript,content +1525,1959537,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",476,0,"",shellscript,selection_keyboard +1526,1959590,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",476,0,"o",shellscript,content +1527,1959591,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",477,0,"",shellscript,selection_keyboard +1528,1959671,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",477,0,"n",shellscript,content +1529,1959672,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",478,0,"",shellscript,selection_keyboard +1530,1961258,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=1\n#SBATCH --time=48:00:00\n#SBATCH --partition=accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:1\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/dynamics/maskgit/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/dynamics/maskgit/%x_%j.log\n#SBATCH --job-name=train_dynamics_maskgit_default\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/train\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_tokenizer_default/3528955\n\nenv | grep SLURM\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=110 \\n --patch_size=16 \\n --warmup_steps=0 \\n --wsd_decay_steps=0 \\n --num_steps=1000 \\n --log_image_interval=10000 \\n --log \\n --no-use-flash-attention \\n --log_checkpoint_interval=10000 \\n --name=coinrun-dynamics-maskgit-flash-attn-ablation-$slurm_job_id \\n --tags coinrun dynamics maskgit flash-attn-ablation \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir_train &\n\nchild_pid=$!\n\nwait $child_pid",shellscript,tab +1531,1963428,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",466,0,"",shellscript,selection_mouse +1532,1963469,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",465,0,"",shellscript,selection_command +1533,1963872,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",466,0,"",shellscript,selection_command +1534,1964318,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",465,1,"",shellscript,content +1535,1964468,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",464,1,"",shellscript,content +1536,1964554,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",463,1,"",shellscript,content +1537,1964696,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",462,1,"",shellscript,content +1538,1964825,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",461,1,"",shellscript,content +1539,1964956,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",460,1,"",shellscript,content +1540,1965104,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",459,1,"",shellscript,content +1541,1965199,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",459,0,"f",shellscript,content +1542,1965200,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",460,0,"",shellscript,selection_keyboard +1543,1965335,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",460,0,"l",shellscript,content +1544,1965336,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",461,0,"",shellscript,selection_keyboard +1545,1965475,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",461,0,"a",shellscript,content +1546,1965476,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",462,0,"",shellscript,selection_keyboard +1547,1965562,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",462,0,"s",shellscript,content +1548,1965563,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",463,0,"",shellscript,selection_keyboard +1549,1965701,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",463,0,"g",shellscript,content +1550,1965702,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",464,0,"",shellscript,selection_keyboard +1551,1966045,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",464,0,"_",shellscript,content +1552,1966046,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",465,0,"",shellscript,selection_keyboard +1553,1966343,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",464,1,"",shellscript,content +1554,1966454,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",463,1,"",shellscript,content +1555,1966940,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",463,0,"h",shellscript,content +1556,1966941,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",464,0,"",shellscript,selection_keyboard +1557,1967236,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",464,0,"_",shellscript,content +1558,1967237,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",465,0,"",shellscript,selection_keyboard +1559,1967450,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",465,0,"a",shellscript,content +1560,1967451,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",466,0,"",shellscript,selection_keyboard +1561,1967687,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",466,0,"t",shellscript,content +1562,1967688,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",467,0,"",shellscript,selection_keyboard +1563,1967848,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",467,0,"t",shellscript,content +1564,1967849,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",468,0,"",shellscript,selection_keyboard +1565,1967924,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",468,0,"n",shellscript,content +1566,1967925,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",469,0,"",shellscript,selection_keyboard +1567,1968211,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",469,0,"_",shellscript,content +1568,1968213,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",470,0,"",shellscript,selection_keyboard +1569,1968511,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",470,0,"a",shellscript,content +1570,1968512,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",471,0,"",shellscript,selection_keyboard +1571,1968608,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",471,0,"b",shellscript,content +1572,1968609,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",472,0,"",shellscript,selection_keyboard +1573,1968807,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",472,0,"l",shellscript,content +1574,1968808,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",473,0,"",shellscript,selection_keyboard +1575,1968899,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",473,0,"a",shellscript,content +1576,1968900,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",474,0,"",shellscript,selection_keyboard +1577,1969085,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",474,0,"t",shellscript,content +1578,1969086,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",475,0,"",shellscript,selection_keyboard +1579,1969139,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",475,0,"i",shellscript,content +1580,1969140,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",476,0,"",shellscript,selection_keyboard +1581,1969255,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",476,0,"o",shellscript,content +1582,1969256,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",477,0,"",shellscript,selection_keyboard +1583,1969372,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",477,0,"n",shellscript,content +1584,1969373,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",478,0,"",shellscript,selection_keyboard +1585,1969496,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",478,0,"\n",shellscript,content +1586,1969964,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",478,1,"",shellscript,content +1587,1971295,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",670,0,"",shellscript,selection_mouse +1588,1971690,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",616,0,"",shellscript,selection_mouse +1589,1972024,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",548,0,"",shellscript,selection_mouse +1590,1972520,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",600,0,"",shellscript,selection_mouse +1591,1973286,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",599,0,"",shellscript,selection_command +1592,1989485,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh",0,0,"",shellscript,tab +1593,1989988,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",0,0,"",shellscript,tab +1594,1997705,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",1890,0,"",shellscript,selection_mouse +1595,1998751,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",1887,3,"",shellscript,content +1596,2000098,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",1887,0,"3",shellscript,content +1597,2000099,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",1888,0,"",shellscript,selection_keyboard +1598,2000100,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",1888,0,"6",shellscript,content +1599,2000100,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",1889,0,"",shellscript,selection_keyboard +1600,2001582,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default_mixed_precision_ablation.sh",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=1\n#SBATCH --time=48:00:00\n#SBATCH --partition=accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:1\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/dynamics/maskgit/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/dynamics/maskgit/%x_%j.log\n#SBATCH --job-name=train_dynamics_maskgit_mixed_prec_ablation\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/train\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_tokenizer_default/3528955\n\nenv | grep SLURM\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=110 \\n --patch_size=16 \\n --warmup_steps=0 \\n --wsd_decay_steps=0 \\n --num_steps=1000 \\n --log_image_interval=10000 \\n --log \\n --log_checkpoint_interval=10000 \\n --name=coinrun-dynamics-maskgit-mixed-precision-ablation-$slurm_job_id \\n --tags coinrun dynamics maskgit mixed-precision-ablation \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir_train &\n\nchild_pid=$!\n\nwait $child_pid",shellscript,tab +1601,2004075,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default_mixed_precision_ablation.sh",1930,0,"",shellscript,selection_mouse +1602,2005140,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default_mixed_precision_ablation.sh",1927,3,"",shellscript,content +1603,2006422,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default_mixed_precision_ablation.sh",1927,0,"3",shellscript,content +1604,2006424,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default_mixed_precision_ablation.sh",1928,0,"",shellscript,selection_keyboard +1605,2006502,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default_mixed_precision_ablation.sh",1928,0,"6",shellscript,content +1606,2006503,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default_mixed_precision_ablation.sh",1929,0,"",shellscript,selection_keyboard +1607,2007996,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default.sh",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=1\n#SBATCH --time=48:00:00\n#SBATCH --partition=accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:1\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/dynamics/maskgit/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/dynamics/maskgit/%x_%j.log\n#SBATCH --job-name=train_dynamics_maskgit_default\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/train\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_tokenizer_default/3528955\n\nenv | grep SLURM\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=110 \\n --patch_size=16 \\n --warmup_steps=0 \\n --wsd_decay_steps=0 \\n --num_steps=1000 \\n --log_image_interval=10000 \\n --log \\n --log_checkpoint_interval=10000 \\n --name=coinrun-dynamics-maskgit-default-$slurm_job_id \\n --tags coinrun dynamics maskgit default \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir_train &\n\nchild_pid=$!\n\nwait $child_pid",shellscript,tab +1608,2010584,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default.sh",1918,0,"",shellscript,selection_mouse +1609,2011537,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default.sh",1915,3,"",shellscript,content +1610,2012674,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default.sh",1915,0,"3",shellscript,content +1611,2012675,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default.sh",1916,0,"",shellscript,selection_keyboard +1612,2012798,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default.sh",1916,0,"6",shellscript,content +1613,2012799,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default.sh",1917,0,"",shellscript,selection_keyboard +1614,2014352,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_grain_ablation.sh",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=1\n#SBATCH --time=48:00:00\n#SBATCH --partition=accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:1\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/dynamics/maskgit/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/dynamics/maskgit/%x_%j.log\n#SBATCH --job-name=train_dynamics_maskgit_grain_ablation\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/npy_test\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_tokenizer_default/3528955\n\nenv | grep SLURM\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=110 \\n --patch_size=16 \\n --warmup_steps=0 \\n --wsd_decay_steps=0 \\n --num_steps=1000 \\n --log_image_interval=10000 \\n --log \\n --log_checkpoint_interval=10000 \\n --name=coinrun-dynamics-maskgit-grain-ablation-$slurm_job_id \\n --tags coinrun dynamics maskgit grain-ablation \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir_train &\n\nchild_pid=$!\n\nwait $child_pid",shellscript,tab +1615,2016305,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_grain_ablation.sh",1890,0,"",shellscript,selection_mouse +1616,2017300,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_grain_ablation.sh",1887,3,"",shellscript,content +1617,2018395,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_grain_ablation.sh",1887,0,"3",shellscript,content +1618,2018396,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_grain_ablation.sh",1888,0,"",shellscript,selection_keyboard +1619,2018481,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_grain_ablation.sh",1888,0,"6",shellscript,content +1620,2018482,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_grain_ablation.sh",1889,0,"",shellscript,selection_keyboard +1621,2033298,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default_flash_attn_ablation.sh",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=1\n#SBATCH --time=48:00:00\n#SBATCH --partition=accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:1\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/dynamics/maskgit/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/dynamics/maskgit/%x_%j.log\n#SBATCH --job-name=train_dynamics_maskgit_flash_attn_ablation\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/train\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_tokenizer_default/3528955\n\nenv | grep SLURM\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=110 \\n --patch_size=16 \\n --warmup_steps=0 \\n --wsd_decay_steps=0 \\n --num_steps=1000 \\n --log_image_interval=10000 \\n --log \\n --no-use-flash-attention \\n --log_checkpoint_interval=10000 \\n --name=coinrun-dynamics-maskgit-flash-attn-ablation-$slurm_job_id \\n --tags coinrun dynamics maskgit flash-attn-ablation \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir_train &\n\nchild_pid=$!\n\nwait $child_pid",shellscript,tab +1622,2034874,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default_flash_attn_ablation.sh",496,0,"",shellscript,selection_mouse +1623,2034875,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default_flash_attn_ablation.sh",495,0,"",shellscript,selection_command +1624,2034896,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default_flash_attn_ablation.sh",495,1,"e",shellscript,selection_mouse +1625,2034927,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default_flash_attn_ablation.sh",496,0,"",shellscript,selection_command +1626,2035094,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default_flash_attn_ablation.sh",478,0,"",shellscript,selection_mouse +1627,2035095,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default_flash_attn_ablation.sh",477,0,"",shellscript,selection_command +1628,2036052,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default_flash_attn_ablation.sh",478,0,"",shellscript,selection_command +1629,2036348,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default_flash_attn_ablation.sh",478,0,"_",shellscript,content +1630,2036350,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default_flash_attn_ablation.sh",479,0,"",shellscript,selection_keyboard +1631,2036650,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default_flash_attn_ablation.sh",479,0,"b",shellscript,content +1632,2036651,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default_flash_attn_ablation.sh",480,0,"",shellscript,selection_keyboard +1633,2036736,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default_flash_attn_ablation.sh",480,0,"s",shellscript,content +1634,2036737,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default_flash_attn_ablation.sh",481,0,"",shellscript,selection_keyboard +1635,2038616,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default_flash_attn_ablation.sh",481,0,"3",shellscript,content +1636,2038618,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default_flash_attn_ablation.sh",482,0,"",shellscript,selection_keyboard +1637,2038695,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default_flash_attn_ablation.sh",482,0,"6",shellscript,content +1638,2038696,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default_flash_attn_ablation.sh",483,0,"",shellscript,selection_keyboard +1639,2041426,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default_flash_attn_ablation.sh",2201,0,"",shellscript,selection_mouse +1640,2042405,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default_flash_attn_ablation.sh",2200,0,"",shellscript,selection_command +1641,2042685,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default_flash_attn_ablation.sh",2201,0,"",shellscript,selection_command +1642,2043254,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default_flash_attn_ablation.sh",2201,1,"-",shellscript,selection_command +1643,2043499,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default_flash_attn_ablation.sh",2201,1,"-",shellscript,selection_command +1644,2043744,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default_flash_attn_ablation.sh",2201,0,"",shellscript,selection_command +1645,2045085,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default_flash_attn_ablation.sh",2273,0,"-",shellscript,content +1646,2045086,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default_flash_attn_ablation.sh",2201,0,"-",shellscript,content +1647,2045087,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default_flash_attn_ablation.sh",2202,0,"",shellscript,selection_keyboard +1648,2045401,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default_flash_attn_ablation.sh",2275,0,"b",shellscript,content +1649,2045402,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default_flash_attn_ablation.sh",2202,0,"b",shellscript,content +1650,2045403,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default_flash_attn_ablation.sh",2203,0,"",shellscript,selection_keyboard +1651,2045505,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default_flash_attn_ablation.sh",2277,0,"a",shellscript,content +1652,2045506,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default_flash_attn_ablation.sh",2203,0,"a",shellscript,content +1653,2045506,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default_flash_attn_ablation.sh",2204,0,"",shellscript,selection_keyboard +1654,2045520,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default_flash_attn_ablation.sh",2279,0,"s",shellscript,content +1655,2045520,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default_flash_attn_ablation.sh",2204,0,"s",shellscript,content +1656,2045521,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default_flash_attn_ablation.sh",2205,0,"",shellscript,selection_keyboard +1657,2045910,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default_flash_attn_ablation.sh",2280,1,"",shellscript,content +1658,2045910,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default_flash_attn_ablation.sh",2204,1,"",shellscript,content +1659,2046047,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default_flash_attn_ablation.sh",2278,1,"",shellscript,content +1660,2046048,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default_flash_attn_ablation.sh",2203,1,"",shellscript,content +1661,2046178,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default_flash_attn_ablation.sh",2277,0,"s",shellscript,content +1662,2046178,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default_flash_attn_ablation.sh",2203,0,"s",shellscript,content +1663,2046179,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default_flash_attn_ablation.sh",2204,0,"",shellscript,selection_keyboard +1664,2046935,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default_flash_attn_ablation.sh",2278,1,"",shellscript,content +1665,2046936,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default_flash_attn_ablation.sh",2203,1,"",shellscript,content +1666,2047220,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default_flash_attn_ablation.sh",2277,0,"a",shellscript,content +1667,2047221,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default_flash_attn_ablation.sh",2203,0,"a",shellscript,content +1668,2047222,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default_flash_attn_ablation.sh",2204,0,"",shellscript,selection_keyboard +1669,2047351,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default_flash_attn_ablation.sh",2279,0,"t",shellscript,content +1670,2047352,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default_flash_attn_ablation.sh",2204,0,"t",shellscript,content +1671,2047352,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default_flash_attn_ablation.sh",2205,0,"",shellscript,selection_keyboard +1672,2047711,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default_flash_attn_ablation.sh",2280,1,"",shellscript,content +1673,2047711,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default_flash_attn_ablation.sh",2204,1,"",shellscript,content +1674,2047833,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default_flash_attn_ablation.sh",2278,1,"",shellscript,content +1675,2047833,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default_flash_attn_ablation.sh",2203,1,"",shellscript,content +1676,2047964,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default_flash_attn_ablation.sh",2277,0,"s",shellscript,content +1677,2047965,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default_flash_attn_ablation.sh",2203,0,"s",shellscript,content +1678,2047965,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default_flash_attn_ablation.sh",2204,0,"",shellscript,selection_keyboard +1679,2050389,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default_flash_attn_ablation.sh",2279,0,"3",shellscript,content +1680,2050389,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default_flash_attn_ablation.sh",2204,0,"3",shellscript,content +1681,2050390,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default_flash_attn_ablation.sh",2205,0,"",shellscript,selection_keyboard +1682,2050405,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default_flash_attn_ablation.sh",2281,0,"6",shellscript,content +1683,2050405,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default_flash_attn_ablation.sh",2205,0,"6",shellscript,content +1684,2050406,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default_flash_attn_ablation.sh",2206,0,"",shellscript,selection_keyboard +1685,2051355,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default_flash_attn_ablation.sh",2205,0,"",shellscript,selection_command +1686,2052401,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default_flash_attn_ablation.sh",2204,0,"",shellscript,selection_command +1687,2052683,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default_flash_attn_ablation.sh",2203,0,"",shellscript,selection_command +1688,2053082,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default_flash_attn_ablation.sh",2280,0,"",shellscript,selection_command +1689,2053345,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default_flash_attn_ablation.sh",2279,0,"",shellscript,selection_command +1690,2053565,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default_flash_attn_ablation.sh",2278,0,"",shellscript,selection_command +1691,2055084,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default_flash_attn_ablation.sh",2278,1," ",shellscript,content +1692,2056891,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default_mixed_precision_ablation.sh",0,0,"",shellscript,tab +1693,2059510,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default_mixed_precision_ablation.sh",2169,0,"",shellscript,selection_mouse +1694,2060881,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default_mixed_precision_ablation.sh",2169,0,"-",shellscript,content +1695,2060883,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default_mixed_precision_ablation.sh",2170,0,"",shellscript,selection_keyboard +1696,2061156,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default_mixed_precision_ablation.sh",2170,0,"b",shellscript,content +1697,2061157,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default_mixed_precision_ablation.sh",2171,0,"",shellscript,selection_keyboard +1698,2061282,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default_mixed_precision_ablation.sh",2171,0,"s",shellscript,content +1699,2061283,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default_mixed_precision_ablation.sh",2172,0,"",shellscript,selection_keyboard +1700,2063316,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default_mixed_precision_ablation.sh",2172,0,"3",shellscript,content +1701,2063317,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default_mixed_precision_ablation.sh",2173,0,"",shellscript,selection_keyboard +1702,2063372,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default_mixed_precision_ablation.sh",2173,0,"6",shellscript,content +1703,2063373,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default_mixed_precision_ablation.sh",2174,0,"",shellscript,selection_keyboard +1704,2064654,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default_mixed_precision_ablation.sh",2252,0,"",shellscript,selection_mouse +1705,2065749,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default_mixed_precision_ablation.sh",2252,0,"b",shellscript,content +1706,2065750,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default_mixed_precision_ablation.sh",2253,0,"",shellscript,selection_keyboard +1707,2065836,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default_mixed_precision_ablation.sh",2253,0,"s",shellscript,content +1708,2065837,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default_mixed_precision_ablation.sh",2254,0,"",shellscript,selection_keyboard +1709,2067255,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default_mixed_precision_ablation.sh",2254,0,"3",shellscript,content +1710,2067257,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default_mixed_precision_ablation.sh",2255,0,"",shellscript,selection_keyboard +1711,2067273,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default_mixed_precision_ablation.sh",2255,0,"6",shellscript,content +1712,2067273,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default_mixed_precision_ablation.sh",2256,0,"",shellscript,selection_keyboard +1713,2069363,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default_mixed_precision_ablation.sh",2256,0," ",shellscript,content +1714,2069364,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default_mixed_precision_ablation.sh",2257,0,"",shellscript,selection_keyboard +1715,2071244,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default.sh",0,0,"",shellscript,tab +1716,2073634,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default.sh",2141,0,"",shellscript,selection_mouse +1717,2074476,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default.sh",2141,0,"b",shellscript,content +1718,2074478,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default.sh",2142,0,"",shellscript,selection_keyboard +1719,2074559,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default.sh",2142,0,"s",shellscript,content +1720,2074560,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default.sh",2143,0,"",shellscript,selection_keyboard +1721,2075751,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default.sh",2143,0,"3",shellscript,content +1722,2075753,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default.sh",2144,0,"",shellscript,selection_keyboard +1723,2075796,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default.sh",2144,0,"6",shellscript,content +1724,2075797,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default.sh",2145,0,"",shellscript,selection_keyboard +1725,2076379,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default.sh",2206,0,"",shellscript,selection_command +1726,2076596,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default.sh",2232,0,"",shellscript,selection_command +1727,2076892,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default.sh",2186,0,"",shellscript,selection_command +1728,2078411,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default.sh",2204,0," ",shellscript,content +1729,2078413,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default.sh",2205,0,"",shellscript,selection_keyboard +1730,2079012,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default.sh",2205,0,"b",shellscript,content +1731,2079014,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default.sh",2206,0,"",shellscript,selection_keyboard +1732,2079092,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default.sh",2206,0,"s",shellscript,content +1733,2079093,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default.sh",2207,0,"",shellscript,selection_keyboard +1734,2079881,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default.sh",2207,0,"3",shellscript,content +1735,2079882,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default.sh",2208,0,"",shellscript,selection_keyboard +1736,2080005,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default.sh",2208,0,"6",shellscript,content +1737,2080006,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default.sh",2209,0,"",shellscript,selection_keyboard +1738,2081499,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_grain_ablation.sh",0,0,"",shellscript,tab +1739,2083152,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_grain_ablation.sh",2118,0,"",shellscript,selection_mouse +1740,2083485,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_grain_ablation.sh",2120,0,"",shellscript,selection_mouse +1741,2084953,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_grain_ablation.sh",2120,0,"b",shellscript,content +1742,2084954,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_grain_ablation.sh",2121,0,"",shellscript,selection_keyboard +1743,2085004,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_grain_ablation.sh",2121,0,"s",shellscript,content +1744,2085005,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_grain_ablation.sh",2122,0,"",shellscript,selection_keyboard +1745,2086681,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_grain_ablation.sh",2122,0,"3",shellscript,content +1746,2086682,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_grain_ablation.sh",2123,0,"",shellscript,selection_keyboard +1747,2086758,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_grain_ablation.sh",2123,0,"6",shellscript,content +1748,2086759,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_grain_ablation.sh",2124,0,"",shellscript,selection_keyboard +1749,2087367,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_grain_ablation.sh",2124,0," ",shellscript,content +1750,2087368,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_grain_ablation.sh",2125,0,"",shellscript,selection_keyboard +1751,2087838,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_grain_ablation.sh",2124,1,"",shellscript,content +1752,2088120,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_grain_ablation.sh",2124,0,"-",shellscript,content +1753,2088121,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_grain_ablation.sh",2125,0,"",shellscript,selection_keyboard +1754,2088486,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_grain_ablation.sh",2193,0,"",shellscript,selection_command +1755,2088735,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_grain_ablation.sh",2192,0,"",shellscript,selection_command +1756,2090041,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_grain_ablation.sh",2192,0,"b",shellscript,content +1757,2090043,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_grain_ablation.sh",2193,0,"",shellscript,selection_keyboard +1758,2090088,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_grain_ablation.sh",2193,0,"s",shellscript,content +1759,2090089,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_grain_ablation.sh",2194,0,"",shellscript,selection_keyboard +1760,2091084,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_grain_ablation.sh",2194,0,"3",shellscript,content +1761,2091085,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_grain_ablation.sh",2195,0,"",shellscript,selection_keyboard +1762,2091135,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_grain_ablation.sh",2195,0,"6",shellscript,content +1763,2091136,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_grain_ablation.sh",2196,0,"",shellscript,selection_keyboard +1764,2092334,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_grain_ablation.sh",2196,0," ",shellscript,content +1765,2092335,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_grain_ablation.sh",2197,0,"",shellscript,selection_keyboard +1766,2092883,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_grain_ablation.sh",2224,0,"",shellscript,selection_mouse +1767,2093187,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_grain_ablation.sh",2246,0,"",shellscript,selection_mouse +1768,2094978,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",0,0,"",shellscript,tab +1769,2097842,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2197,0,"",shellscript,selection_mouse +1770,2099065,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2197,0,"b",shellscript,content +1771,2099066,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2198,0,"",shellscript,selection_keyboard +1772,2099141,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2198,0,"s",shellscript,content +1773,2099142,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2199,0,"",shellscript,selection_keyboard +1774,2099515,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2199,0,".",shellscript,content +1775,2099516,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2200,0,"",shellscript,selection_keyboard +1776,2100082,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2199,1,"",shellscript,content +1777,2100801,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2199,0,"-",shellscript,content +1778,2100802,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2200,0,"",shellscript,selection_keyboard +1779,2101094,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2199,1,"",shellscript,content +1780,2101463,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2199,0,"1",shellscript,content +1781,2101464,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2200,0,"",shellscript,selection_keyboard +1782,2101641,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2200,0,"1",shellscript,content +1783,2101642,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2201,0,"",shellscript,selection_keyboard +1784,2101827,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2201,0,"0",shellscript,content +1785,2101828,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2202,0,"",shellscript,selection_keyboard +1786,2102824,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2202,0,"-",shellscript,content +1787,2102825,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2203,0,"",shellscript,selection_keyboard +1788,2103191,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2276,0,"",shellscript,selection_command +1789,2103495,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2275,0,"",shellscript,selection_command +1790,2105218,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2275,0,"b",shellscript,content +1791,2105219,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2276,0,"",shellscript,selection_keyboard +1792,2105300,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2276,0,"s",shellscript,content +1793,2105301,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2277,0,"",shellscript,selection_keyboard +1794,2105888,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2277,0,"1",shellscript,content +1795,2105889,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2278,0,"",shellscript,selection_keyboard +1796,2106037,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2278,0,"1",shellscript,content +1797,2106038,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2279,0,"",shellscript,selection_keyboard +1798,2106070,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2279,0,"0",shellscript,content +1799,2106070,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2280,0,"",shellscript,selection_keyboard +1800,2106958,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2280,0," ",shellscript,content +1801,2106959,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",2281,0,"",shellscript,selection_keyboard +1802,2108502,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",0,0,"",shellscript,tab +1803,2110578,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",2171,0,"",shellscript,selection_mouse +1804,2111459,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",2170,0,"",shellscript,selection_command +1805,2111729,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",2171,0,"",shellscript,selection_command +1806,2112490,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",2171,1,"$",shellscript,selection_command +1807,2112723,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",2171,1,"$",shellscript,selection_command +1808,2112974,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",2171,0,"",shellscript,selection_command +1809,2114293,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",2248,0,"b",shellscript,content +1810,2114294,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",2171,0,"b",shellscript,content +1811,2114295,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",2172,0,"",shellscript,selection_keyboard +1812,2114344,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",2250,0,"s",shellscript,content +1813,2114344,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",2172,0,"s",shellscript,content +1814,2114345,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",2173,0,"",shellscript,selection_keyboard +1815,2115313,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",2252,0,"1",shellscript,content +1816,2115314,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",2173,0,"1",shellscript,content +1817,2115314,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",2174,0,"",shellscript,selection_keyboard +1818,2115440,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",2254,0,"1",shellscript,content +1819,2115440,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",2174,0,"1",shellscript,content +1820,2115441,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",2175,0,"",shellscript,selection_keyboard +1821,2115592,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",2256,0,"0",shellscript,content +1822,2115592,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",2175,0,"0",shellscript,content +1823,2115593,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",2176,0,"",shellscript,selection_keyboard +1824,2116321,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",2258,0,"-",shellscript,content +1825,2116321,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",2176,0,"-",shellscript,content +1826,2116322,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",2177,0,"",shellscript,selection_keyboard +1827,2117082,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",2176,0,"",shellscript,selection_command +1828,2117397,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",2259,0,"",shellscript,selection_command +1829,2118488,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",2259,1," ",shellscript,content +1830,2120917,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh",0,0,"",shellscript,tab +1831,2123649,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh",2143,0,"",shellscript,selection_mouse +1832,2124000,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh",2141,0,"",shellscript,selection_mouse +1833,2124958,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh",2142,0,"",shellscript,selection_command +1834,2126377,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh",2098,59," --name=coinrun-dynamics-maskgit-default-$slurm_job_id \",shellscript,selection_command +1835,2126608,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh",2098,105," --name=coinrun-dynamics-maskgit-default-$slurm_job_id \\n --tags coinrun dynamics maskgit default \",shellscript,selection_command +1836,2126929,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh",2102,0,"",shellscript,selection_command +1837,2127783,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh",2101,0,"",shellscript,selection_command +1838,2129677,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh",2142,0,"",shellscript,selection_mouse +1839,2130813,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh",2142,1,"$",shellscript,selection_command +1840,2130980,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh",2142,1,"$",shellscript,selection_command +1841,2131250,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh",2142,0,"",shellscript,selection_command +1842,2132132,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh",2202,0,"b",shellscript,content +1843,2132133,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh",2142,0,"b",shellscript,content +1844,2132134,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh",2143,0,"",shellscript,selection_keyboard +1845,2132219,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh",2204,0,"s",shellscript,content +1846,2132220,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh",2143,0,"s",shellscript,content +1847,2132220,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh",2144,0,"",shellscript,selection_keyboard +1848,2133024,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh",2206,0,"1",shellscript,content +1849,2133024,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh",2144,0,"1",shellscript,content +1850,2133025,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh",2145,0,"",shellscript,selection_keyboard +1851,2133157,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh",2208,0,"1",shellscript,content +1852,2133158,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh",2145,0,"1",shellscript,content +1853,2133159,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh",2146,0,"",shellscript,selection_keyboard +1854,2133232,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh",2210,0,"0",shellscript,content +1855,2133233,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh",2146,0,"0",shellscript,content +1856,2133233,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh",2147,0,"",shellscript,selection_keyboard +1857,2133916,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh",2212,0,"-",shellscript,content +1858,2133917,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh",2147,0,"-",shellscript,content +1859,2133917,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh",2148,0,"",shellscript,selection_keyboard +1860,2134574,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh",2214,0," ",shellscript,content +1861,2134575,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh",2148,0," ",shellscript,content +1862,2134575,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh",2149,0,"",shellscript,selection_keyboard +1863,2135117,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh",2215,1,"",shellscript,content +1864,2135117,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh",2148,1,"",shellscript,content +1865,2135459,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh",2147,0,"",shellscript,selection_command +1866,2135553,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh",2213,0,"",shellscript,selection_command +1867,2136338,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh",2213,1," ",shellscript,content +1868,2138000,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",0,0,"",shellscript,tab +1869,2138828,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",2187,0,"",shellscript,selection_mouse +1870,2139346,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",2119,0,"",shellscript,selection_mouse +1871,2139879,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",2120,0,"",shellscript,selection_mouse +1872,2140506,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",2119,0,"",shellscript,selection_command +1873,2141450,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",2069,66," --name=coinrun-dynamics-maskgit-grain-ablation-$slurm_job_id \",shellscript,selection_command +1874,2141641,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",2069,119," --name=coinrun-dynamics-maskgit-grain-ablation-$slurm_job_id \\n --tags coinrun dynamics maskgit grain-ablation \",shellscript,selection_command +1875,2142612,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",2186,0,"",shellscript,selection_command +1876,2142850,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",2187,0,"",shellscript,selection_command +1877,2143198,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",2120,0,"",shellscript,selection_command +1878,2144017,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",2120,1,"$",shellscript,selection_command +1879,2144220,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",2120,1,"$",shellscript,selection_command +1880,2144483,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",2120,0,"",shellscript,selection_command +1881,2145755,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",2187,0,"b",shellscript,content +1882,2145756,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",2120,0,"b",shellscript,content +1883,2145757,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",2121,0,"",shellscript,selection_keyboard +1884,2145834,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",2189,0,"s",shellscript,content +1885,2145835,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",2121,0,"s",shellscript,content +1886,2145836,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",2122,0,"",shellscript,selection_keyboard +1887,2146209,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",2191,0,"1",shellscript,content +1888,2146210,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",2122,0,"1",shellscript,content +1889,2146211,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",2123,0,"",shellscript,selection_keyboard +1890,2146335,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",2193,0,"1",shellscript,content +1891,2146336,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",2123,0,"1",shellscript,content +1892,2146336,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",2124,0,"",shellscript,selection_keyboard +1893,2146369,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",2195,0,"0",shellscript,content +1894,2146369,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",2124,0,"0",shellscript,content +1895,2146370,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",2125,0,"",shellscript,selection_keyboard +1896,2147427,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",2197,0,"-",shellscript,content +1897,2147427,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",2125,0,"-",shellscript,content +1898,2147428,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",2126,0,"",shellscript,selection_keyboard +1899,2148112,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",2125,0,"",shellscript,selection_command +1900,2148419,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",2198,0,"",shellscript,selection_command +1901,2149174,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",2198,1," ",shellscript,content +1902,2155628,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",473,0,"",shellscript,selection_mouse +1903,2155628,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",472,0,"",shellscript,selection_command +1904,2155629,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",473,0,"",shellscript,selection_command +1905,2155629,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",473,0,"_",shellscript,content +1906,2155630,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",474,0,"",shellscript,selection_keyboard +1907,2155714,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",474,0,"b",shellscript,content +1908,2155715,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",475,0,"",shellscript,selection_keyboard +1909,2155841,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",475,0,"s",shellscript,content +1910,2155842,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",476,0,"",shellscript,selection_keyboard +1911,2156096,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",476,0,"1",shellscript,content +1912,2156097,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",477,0,"",shellscript,selection_keyboard +1913,2156222,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",477,0,"1",shellscript,content +1914,2156223,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",478,0,"",shellscript,selection_keyboard +1915,2156312,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",478,0,"0",shellscript,content +1916,2156313,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",479,0,"",shellscript,selection_keyboard +1917,2158509,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh",0,0,"",shellscript,tab +1918,2160071,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh",466,0,"",shellscript,selection_mouse +1919,2160101,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh",465,0,"",shellscript,selection_command +1920,2160533,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh",466,0,"",shellscript,selection_command +1921,2160937,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh",466,0,"_",shellscript,content +1922,2160939,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh",467,0,"",shellscript,selection_keyboard +1923,2161312,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh",467,0,"b",shellscript,content +1924,2161313,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh",468,0,"",shellscript,selection_keyboard +1925,2161766,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh",468,0,"s",shellscript,content +1926,2161767,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh",469,0,"",shellscript,selection_keyboard +1927,2162005,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh",469,0,"1",shellscript,content +1928,2162006,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh",470,0,"",shellscript,selection_keyboard +1929,2162206,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh",470,0,"1",shellscript,content +1930,2162207,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh",471,0,"",shellscript,selection_keyboard +1931,2162238,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh",471,0,"0",shellscript,content +1932,2162239,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh",472,0,"",shellscript,selection_keyboard +1933,2164093,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",0,0,"",shellscript,tab +1934,2165687,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",476,0,"",shellscript,selection_mouse +1935,2166326,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",478,0,"",shellscript,selection_mouse +1936,2166326,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",477,0,"",shellscript,selection_command +1937,2166702,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",478,0,"",shellscript,selection_command +1938,2167629,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",478,0,"_",shellscript,content +1939,2167631,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",479,0,"",shellscript,selection_keyboard +1940,2168112,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",479,0,"b",shellscript,content +1941,2168113,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",480,0,"",shellscript,selection_keyboard +1942,2168188,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",480,0,"s",shellscript,content +1943,2168189,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",481,0,"",shellscript,selection_keyboard +1944,2168819,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",481,0,"1",shellscript,content +1945,2168820,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",482,0,"",shellscript,selection_keyboard +1946,2169030,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",482,0,"0",shellscript,content +1947,2169031,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",483,0,"",shellscript,selection_keyboard +1948,2169456,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",482,1,"",shellscript,content +1949,2169570,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",482,0,"1",shellscript,content +1950,2169571,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",483,0,"",shellscript,selection_keyboard +1951,2169654,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",483,0,"0",shellscript,content +1952,2169655,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",484,0,"",shellscript,selection_keyboard +1953,2171302,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",0,0,"",shellscript,tab +1954,2172769,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",478,0,"",shellscript,selection_mouse +1955,2173959,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",478,0,"_",shellscript,content +1956,2173960,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",479,0,"",shellscript,selection_keyboard +1957,2174314,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",479,0,"b",shellscript,content +1958,2174315,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",480,0,"",shellscript,selection_keyboard +1959,2174391,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",480,0,"s",shellscript,content +1960,2174392,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",481,0,"",shellscript,selection_keyboard +1961,2174599,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",481,0,"1",shellscript,content +1962,2174600,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",482,0,"",shellscript,selection_keyboard +1963,2174733,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",482,0,"1",shellscript,content +1964,2174734,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",483,0,"",shellscript,selection_keyboard +1965,2174942,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",483,0,"0",shellscript,content +1966,2174943,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",484,0,"",shellscript,selection_keyboard +1967,2176661,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_grain_ablation.sh",0,0,"",shellscript,tab +1968,2177944,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_grain_ablation.sh",473,0,"",shellscript,selection_mouse +1969,2179146,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_grain_ablation.sh",473,0,"_",shellscript,content +1970,2179147,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_grain_ablation.sh",474,0,"",shellscript,selection_keyboard +1971,2179862,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_grain_ablation.sh",474,0,"b",shellscript,content +1972,2179863,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_grain_ablation.sh",475,0,"",shellscript,selection_keyboard +1973,2179980,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_grain_ablation.sh",475,0,"s",shellscript,content +1974,2179981,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_grain_ablation.sh",476,0,"",shellscript,selection_keyboard +1975,2181023,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_grain_ablation.sh",476,0,"3",shellscript,content +1976,2181024,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_grain_ablation.sh",477,0,"",shellscript,selection_keyboard +1977,2181088,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_grain_ablation.sh",477,0,"6",shellscript,content +1978,2181089,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_grain_ablation.sh",478,0,"",shellscript,selection_keyboard +1979,2183162,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default.sh",0,0,"",shellscript,tab +1980,2184399,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default.sh",484,0,"",shellscript,selection_mouse +1981,2184679,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default.sh",351,0,"",shellscript,selection_mouse +1982,2185018,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default.sh",466,0,"",shellscript,selection_mouse +1983,2185967,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default.sh",466,0,"_",shellscript,content +1984,2185968,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default.sh",467,0,"",shellscript,selection_keyboard +1985,2187139,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default.sh",467,0,"b",shellscript,content +1986,2187140,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default.sh",468,0,"",shellscript,selection_keyboard +1987,2187251,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default.sh",468,0,"s",shellscript,content +1988,2187252,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default.sh",469,0,"",shellscript,selection_keyboard +1989,2188290,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default.sh",469,0,"3",shellscript,content +1990,2188291,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default.sh",470,0,"",shellscript,selection_keyboard +1991,2188352,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default.sh",470,0,"6",shellscript,content +1992,2188353,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default.sh",471,0,"",shellscript,selection_keyboard +1993,2190135,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default_mixed_precision_ablation.sh",0,0,"",shellscript,tab +1994,2192081,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default_mixed_precision_ablation.sh",478,0,"",shellscript,selection_mouse +1995,2193037,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default_mixed_precision_ablation.sh",478,0,"_",shellscript,content +1996,2193039,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default_mixed_precision_ablation.sh",479,0,"",shellscript,selection_keyboard +1997,2193758,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default_mixed_precision_ablation.sh",479,0,"b",shellscript,content +1998,2193759,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default_mixed_precision_ablation.sh",480,0,"",shellscript,selection_keyboard +1999,2193991,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default_mixed_precision_ablation.sh",480,0,"s",shellscript,content +2000,2193992,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default_mixed_precision_ablation.sh",481,0,"",shellscript,selection_keyboard +2001,2194526,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default_mixed_precision_ablation.sh",481,0,"3",shellscript,content +2002,2194527,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default_mixed_precision_ablation.sh",482,0,"",shellscript,selection_keyboard +2003,2194603,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default_mixed_precision_ablation.sh",482,0,"6",shellscript,content +2004,2194604,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default_mixed_precision_ablation.sh",483,0,"",shellscript,selection_keyboard +2005,2196176,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default_flash_attn_ablation.sh",0,0,"",shellscript,tab +2006,2198028,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default_flash_attn_ablation.sh",483,0,"",shellscript,selection_mouse +2007,2198064,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default_flash_attn_ablation.sh",482,0,"",shellscript,selection_command +2008,2199367,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_grain_ablation.sh",0,0,"",shellscript,tab +2009,2201952,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default_flash_attn_ablation.sh",0,0,"",shellscript,tab +2010,2210441,"TERMINAL",0,0,"sync-runner",,terminal_command +2011,2210506,"TERMINAL",0,0,"]633;Csending incremental file list\r\n",,terminal_output +2012,2214770,"TERMINAL",0,0,"^C",,terminal_output +2013,2215140,"TERMINAL",0,0,"rsync error: received SIGINT, SIGTERM, or SIGHUP (code 20) at io.c(510) [generator=3.2.3]\r\n",,terminal_output +2014,2215158,"TERMINAL",0,0,"rsync error: received SIGINT, SIGTERM, or SIGHUP (code 20) at rsync.c(703) [sender=3.2.3]\r\n]0;tum_cte0515@hkn1991:~/Projects/jasmine",,terminal_output +2015,2217977,"TERMINAL",0,0,"git branch",,terminal_command +2016,2218043,"TERMINAL",0,0,"]633;C[?1h=\r ablation/full-precision-training\r\n ablation/use-pytorch-dataloader\r\n action-mapper\r\n add-noise-to-combat-exposure-bias\r\n add-wandb-name-and-tags\r\n before-nnx\r\n causal-mem-reduce\r\n causal-spatiotemporal-kv-cache\r\n causal-st-transformer\r\n causal-transformer-dynamics-model\r\n causal-transformer-nnx-no-kv-cache\r\n change-default-parameters\r\n change-default-to-wsd\r\n coinrun-gt-actions\r\n convert-to-jax-array-in-iter\r\n correct-batched-sampling\r\n dev\r\n dont-let-tf-see-gpu\r\n dynamics_coinrun_500m_dataset_29519\r\n feat/darkness-filter\r\n feat/explicit-image-dims\r\n fix-action-padding-lam-future-information-access\r\n fix-sampling\r\n fix-transformer-forwardpass\r\n fix/dyn-restore-after-nnx-upgrade\r\n fix/spatiotemporal-pe-once-in-STTransformer\r\n generate-minatar-breakout-dataset\r\n grad-norm-log-and-clip\r\n grain-dataloader\r\n gt-actions\r\n hotfix/eval-full-frame-fix\r\n hotfix/fix-val-loss-maskgit-masking\r\n hotfix/full-frame-eval-only-calculate-last-frame-metrics\r\n hotfix/sampling-shapes-error\r\n input_pipeline/add-npy2array_record\r\n logging-variants\r\n lr-schedules\r\n* main\r\n maskgit-different-maskprob-per-sample\r\n:",,terminal_output +2017,2221132,"TERMINAL",0,0,"\r/pprr",,terminal_output +2018,2221296,"TERMINAL",0,0,"ee",,terminal_output +2019,2221347,"TERMINAL",0,0,"pp",,terminal_output +2020,2221580,"TERMINAL",0,0,"ee",,terminal_output +2021,2221664,"TERMINAL",0,0,"nn",,terminal_output +2022,2221815,"TERMINAL",0,0,"dd",,terminal_output +2023,2221930,"TERMINAL",0,0,"\r ablation/full-precision-training\r\n ablation/use-pytorch-dataloader\r\n action-mapper\r\n add-noise-to-combat-exposure-bias\r\n add-wandb-name-and-tags\r\n before-nnx\r\n causal-mem-reduce\r\n causal-spatiotemporal-kv-cache\r\n causal-st-transformer\r\n causal-transformer-dynamics-model\r\n causal-transformer-nnx-no-kv-cache\r\n change-default-parameters\r\n change-default-to-wsd\r\n coinrun-gt-actions\r\n convert-to-jax-array-in-iter\r\n correct-batched-sampling\r\n dev\r\n dont-let-tf-see-gpu\r\n dynamics_coinrun_500m_dataset_29519\r\n feat/darkness-filter\r\n feat/explicit-image-dims\r\n fix-action-padding-lam-future-information-access\r\n fix-sampling\r\n fix-transformer-forwardpass\r\n fix/dyn-restore-after-nnx-upgrade\r\n fix/spatiotemporal-pe-once-in-STTransformer\r\n generate-minatar-breakout-dataset\r\n grad-norm-log-and-clip\r\n grain-dataloader\r\n gt-actions\r\n hotfix/eval-full-frame-fix\r\n hotfix/fix-val-loss-maskgit-masking\r\n hotfix/full-frame-eval-only-calculate-last-frame-metrics\r\n hotfix/sampling-shapes-error\r\n input_pipeline/add-npy2array_record\r\n logging-variants\r\n lr-schedules\r\n* main\r\n maskgit-different-maskprob-per-sample\r\n ablation/full-precision-training\r\n ablation/use-pytorch-dataloader\r\n action-mapper\r\n add-noise-to-combat-exposure-bias\r\n add-wandb-name-and-tags\r\n before-nnx\r\n causal-mem-reduce\r\n causal-spatiotemporal-kv-cache\r\n causal-st-transformer\r\n causal-transformer-dynamics-model\r\n causal-transformer-nnx-no-kv-cache\r\n change-default-parameters\r\n change-default-to-wsd\r\n coinrun-gt-actions\r\n convert-to-jax-array-in-iter\r\n correct-batched-sampling\r\n dev\r\n dont-let-tf-see-gpu\r\n dynamics_coinrun_500m_dataset_29519\r\n feat/darkness-filter\r\n feat/explicit-image-dims\r\n fix-action-padding-lam-future-information-access\r\n fix-sampling\r\n fix-transformer-forwardpass\r\n fix/dyn-restore-after-nnx-upgrade\r\n fix/spatiotemporal-pe-once-in-STTransformer\r\n generate-minatar-breakout-dataset\r\n grad-norm-log-and-clip\r\n grain-dataloader\r\n gt-actions\r\n hotfix/eval-full-frame-fix\r\n hotfix/fix-val-loss-maskgit-masking\r\n hotfix/full-frame-eval-only-calculate-last-frame-metrics\r\n hotfix/sampling-shapes-error\r\n input_pipeline/add-npy2array_record\r\n logging-variants\r\n lr-schedules\r\n* main\r\n maskgit-different-maskprob-per-sample\r\n...skipping...\r\n prepend-action-maskgit\r\n preprocess_video\r\n refactor-full-frame-val-loss\r\n refactor-tmp\r\n remove-restore-branching\r\n revised-dataloader\r\n runner\r\n runner-grain\r\n sample-ali-branch\r\n sample-from-different-topologies\r\n sampling-script-add-metrics\r\n sampling-startframe-indexing-fix\r\n seeding-data-generation\r\n speedup-tfrecord-preprocessing\r\n train_lam_coinrun_ablation_wsd_3e-6_28747\r\n val-loss\r\n vizdoom-dataset\r\n z-loss\r\n zloss-runs\r\n~\r\n~\r\n~\r\n~\r\n~\r\n~\r\n~\r\n~\r\n~\r\n~\r\n~\r\n~\r\n~\r\n~\r\n~\r\n~\r\n~\r\n~\r\n~\r\n~\r\n(END)",,terminal_output +2024,2224792,"TERMINAL",0,0,"\r[?1l>]0;tum_cte0515@hkn1991:~/Projects/jasmine",,terminal_output +2025,2227902,"TERMINAL",0,0,"git checkout prepend-action-maskgit",,terminal_command +2026,2227931,"TERMINAL",0,0,"]633;C",,terminal_output +2027,2228035,"TERMINAL",0,0,"Switched to branch 'prepend-action-maskgit'\r\nYour branch is up to date with 'origin/prepend-action-maskgit'.\r\n]0;tum_cte0515@hkn1991:~/Projects/jasmine",,terminal_output +2028,2231447,"",0,0,"Switched from branch 'main' to 'prepend-action-maskgit'",,git_branch_checkout +2029,2231551,"TERMINAL",0,0,"sync-runner",,terminal_command +2030,2231659,"TERMINAL",0,0,"]633;Csending incremental file list\r\n",,terminal_output +2031,2236566,"TERMINAL",0,0,"ali-old-branch.diff\r\nmessage.md\r\npyproject.toml\r\n",,terminal_output +2032,2238527,"TERMINAL",0,0,"jasmine/\r\njasmine/genie.py\r\njasmine/sample.py\r\njasmine/train_dynamics.py\r\njasmine/train_lam.py\r\njasmine/train_tokenizer.py\r\njasmine/models/\r\njasmine/models/dynamics.py\r\njasmine/utils/\r\n",,terminal_output +2033,2238718,"TERMINAL",0,0,"slurm/dev/\r\n",,terminal_output +2034,2239091,"TERMINAL",0,0,"slurm/dev/alfred/berlin/\r\nslurm/dev/alfred/berlin/dataset_investigation/\r\nslurm/dev/alfred/berlin/dataset_investigation/gen_1.sbatch\r\nslurm/dev/alfred/berlin/dataset_investigation/gen_2.sbatch\r\n",,terminal_output +2035,2239565,"TERMINAL",0,0,"slurm/dev/alfred/berlin/test_exposure_bias/\r\nslurm/dev/alfred/berlin/test_exposure_bias/dynamics_causal.sbatch\r\nslurm/dev/alfred/berlin/test_exposure_bias/dynamics_causal_no_noise.sbatch\r\nslurm/dev/alfred/berlin/test_exposure_bias/dynamics_full_prec_from_80k_nan_invest_requeue_every2h.sbatch\r\nslurm/dev/alfred/berlin/test_exposure_bias/dynamics_full_prec_from_80k_nan_invest_requeue_every_10h.sbatch\r\nslurm/dev/alfred/berlin/test_exposure_bias/dynamics_maskgit.sbatch\r\nslurm/dev/alfred/berlin/test_exposure_bias/dynamics_maskgit_no_noise.sbatch\r\nslurm/dev/alfred/berlin/test_exposure_bias/dynamics_maskgit_no_noise_from_main.sbatch\r\nslurm/dev/alfred/berlin/test_exposure_bias/dynamics_maskgit_no_noise_from_main_w_full_frame.sbatch\r\nslurm/dev/alfred/berlin/test_exposure_bias/sample_180k.sbatch\r\nslurm/dev/alfred/berlin/test_exposure_bias/sample_causal.sbatch\r\nslurm/dev/alfred/berlin/test_exposure_bias/sample_causal_no_noise.sbatch\r\nslurm/dev/alfred/berlin/test_exposure_bias/sample_maskgit.sbatch\r\nslurm/dev/alfred/berlin/test_exposure_bias/sample_maskgit_no_noise.sbatch\r\nslurm/dev/alfred/berlin/test_exposure_bias_climber/\r\nslurm/dev/alfred/berlin/test_exposure_bias_climber/sample.sbatch\r\nslurm/dev/franz/\r\nslurm/dev/franz/berlin/\r\nslurm/dev/franz/berlin/coinrun/\r\nslurm/dev/franz/berlin/coinrun/sample/\r\nslurm/dev/franz/berlin/coinrun/sample/causal/\r\nslurm/dev/franz/berlin/coinrun/sample/causal/sample_causal.sh\r\nslurm/dev/franz/berlin/coinrun/sample/causal/sample_noised_causal.sh\r\nslurm/dev/franz/berlin/coinrun/sample/maskgit/\r\nslurm/dev/franz/berlin/coinrun/sample/maskgit/sample_dynamics_from_140k_tokenizer.sh\r\nslurm/dev/franz/berlin/coinrun/sample/maskgit/sample_dynamics_from_fully_trained_tokenizer.sh\r\nslurm/jobs/alfred/berlin/\r\nslurm/jobs/alfred/berlin/coinrun/1m_steps/\r\nslurm/jobs/alfred/berlin/coinrun/1m_steps/sample.sbatch\r\nslurm/jobs/alfred/berlin/coinrun/overfitting_test/500k_dataset/\r\nslurm/jobs/alfred/berlin/coinrun/overfitting_test/500k_dataset/sample.sbatch\r\nslurm/jobs/alfred/berlin/coinrun/overfitting_test/500m_dataset/\r\nslurm/jobs/alfred/berlin/coinrun/overfitting_test/500m_dataset/sample.sbatch\r\nslurm/jobs/alfred/berlin/coinrun/overfitting_test/500m_dataset_climber/\r\nslurm/jobs/alfred/berlin/coinrun/overfitting_test/500m_dataset_climber/sample.sbatch\r\nslurm/jobs/alfred/berlin/workshop/\r\nslurm/jobs/alfred/berlin/workshop/jafar_default/\r\nslurm/jobs/alfred/berlin/workshop/jafar_default/dynamics.sbatch\r\nslurm/jobs/alfred/berlin/workshop/jasmine_default/\r\nslurm/jobs/alfred/berlin/workshop/jasmine_default/dynamics.sbatch\r\nslurm/jobs/alfred/berlin/workshop/lam_mixed_precision_nan/\r\nslurm/jobs/alfred/berlin/workshop/lam_mixed_precision_nan/lam_full_precision_nan_invest.sbatch\r\nslurm/jobs/alfred/berlin/workshop/lam_mixed_precision_nan/lam_mixed_precision_nan_invest.sbatch\r\nslurm/jobs/alfred/berlin/workshop/lam_mixed_precision_nan/lam_mixed_precision_nan_invest_jafar_cos_init.sbatch\r\nslurm/jobs/alfred/berlin/workshop/lam_mixed_precision_nan/lam_mixed_precision_nan_invest_jafar_cos_init_w_flash_attention.sbatch\r\nslurm/jobs/alfred/berlin/workshop/lam_mixed_precision_nan/lam_mixed_precision_nan_invest_w_flash_attention.sbatch\r\nslurm/jobs/franz/berlin/\r\nslurm/jobs/franz/berlin/atari/\r\nslurm/jobs/franz/berlin/atari/data_generation/\r\nslurm/jobs/franz/berlin/atari/data_generation/alien.sh\r\nslurm/jobs/franz/berlin/atari/data_generation/amidar.sh\r\nslurm/jobs/franz/berlin/atari/data_generation/assault.sh\r\nslurm/jobs/franz/berlin/atari/data_generation/asterix.sh\r\nslurm/jobs/franz/berlin/atari/data_generation/bank_heist.sh\r\nslurm/jobs/franz/berlin/atari/data_generation/battle_zone.sh\r\nslurm/jobs/franz/berlin/atari/data_generation/boxing.sh\r\nslurm/jobs/franz/berlin/atari/data_generation/breakout.sh\r\nslurm/jobs/franz/berlin/atari/data_generation/chopper_command.sh\r\nslurm/jobs/franz/berlin/atari/data_generation/crazy_climber.sh\r\nslurm/jobs/franz/berlin/atari/data_generation/demon_attack.sh\r\nslurm/jobs/franz/berlin/atari/data_generation/pong.sh\r\nslurm/jobs/franz/berlin/atari/data_generation/spawner.sh\r\nslurm/jobs/franz/berlin/atari/legacy_cleanrl_data_generation/\r\nslurm/jobs/franz/berlin/atari/legacy_cleanrl_data_generation/atari_breakout_data_gen.sh\r\nslurm/jobs/franz/berlin/atari/legacy_cleanrl_data_generation/atari_breakout_v4_data_gen.sh\r\nslurm/jobs/franz/berlin/atari/tokenizer/\r\nslurm/jobs/franz/berlin/atari/tokenizer/atari_breakout_tokenizer.sbatch\r\nslurm/jobs/franz/berlin/coinrun/\r\nslurm/jobs/franz/berlin/coinrun/mila_submission/\r\nslurm/jobs/franz/berlin/coinrun/mila_submission/coinrun_dynamics_base.sh\r\nslurm/jobs/franz/berlin/coinrun/mila_submission/coinrun_lam_base.sh\r\nslurm/jobs/franz/berlin/coinrun/mila_submission/coinrun_lam_no_flash_attention.sh\r\nslurm/jobs/franz/berlin/coinrun/mila_submission/coinrun_tokenizer_base.sh\r\nslurm/jobs/franz/berlin/coinrun/mila_submission/ablations/\r\nslurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_causal.sh\r\nslurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh\r\nslurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_gt_actions.sh\r\nslurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh\r\nslurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn.sh\r\nslurm/jobs/franz/berlin/coinrun/submission_debug/\r\nslurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_16_action_prepend.sh\r\nslurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_16_noise-branch.sh\r\nslurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_4_action_prepend.sh\r\nslurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_4_main.sh\r\nslurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_4_noise-branch.sh\r\nslurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_tokenizer_patch_size_4.sh\r\nslurm/jobs/mihir/horeka/\r\nslurm/jobs/mihir/horeka/coinrun/\r\nslurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh\r\nslurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh\r\nslurm/jobs/mihir/horeka/coinrun/ablations/\r\nslurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-no-noise-main.sh\r\nslurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-no-noise.sh\r\nslurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-sqrt-ablation.sh\r\nslurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/\r\nslurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default.sh\r\nslurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default_flash_attn_ablation.sh\r\nslurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default_mixed_precision_ablation.sh\r\nslurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_grain_ablation.sh\r\nslurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/\r\nslurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh\r\nslurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh\r\nslurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh\r\nslurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh\r\nslurm/jobs/mihir/horeka/coinrun/default_runs/\r\nslurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh\r\n",,terminal_output +2036,2240013,"TERMINAL",0,0,"slurm/jobs/mihir/horeka/preprocessing/\r\nslurm/jobs/mihir/horeka/preprocessing/coinrun_chunked.sh\r\nslurm/jobs/mihir/horeka/preprocessing/coinrun_chunked_500m.sh\r\n",,terminal_output +2037,2240150,"TERMINAL",0,0,"\r\nsent 368,425 bytes received 2,058 bytes 43,586.24 bytes/sec\r\ntotal size is 28,401,685 speedup is 76.66\r\n]0;tum_cte0515@hkn1991:~/Projects/jasmine",,terminal_output +2038,2243106,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default.sh",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=1\n#SBATCH --time=48:00:00\n#SBATCH --partition=accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:1\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/dynamics/maskgit/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/dynamics/maskgit/%x_%j.log\n#SBATCH --job-name=train_dynamics_maskgit_default_bs36\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/train\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_tokenizer_default/3528955\n\nenv | grep SLURM\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=36 \\n --patch_size=16 \\n --warmup_steps=0 \\n --wsd_decay_steps=0 \\n --num_steps=1000 \\n --log_image_interval=10000 \\n --log \\n --log_checkpoint_interval=10000 \\n --name=coinrun-dynamics-maskgit-default-bs36$slurm_job_id \\n --tags coinrun dynamics maskgit default bs36 \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir_train &\n\nchild_pid=$!\n\nwait $child_pid",shellscript,tab +2039,2249339,"TERMINAL",0,0,"runner",,terminal_command +2040,2263776,"TERMINAL",0,0,"sync-runner",,terminal_command +2041,2263844,"TERMINAL",0,0,"]633;Csending incremental file list\r\n",,terminal_output +2042,2263947,"TERMINAL",0,0,"\r\nsent 40,691 bytes received 254 bytes 81,890.00 bytes/sec\r\ntotal size is 28,401,685 speedup is 693.65\r\n]0;tum_cte0515@hkn1991:~/Projects/jasmine_jobs",,terminal_output +2043,2268626,"TERMINAL",0,0,"sbatch slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default.sh",,terminal_command +2044,2268649,"TERMINAL",0,0,"]633;CSubmitted batch job 3538479\r\n]0;tum_cte0515@hkn1991:~/Projects/jasmine_jobs",,terminal_output +2045,2278329,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=1\n#SBATCH --time=48:00:00\n#SBATCH --partition=accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:1\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/dynamics/maskgit/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/dynamics/maskgit/%x_%j.log\n#SBATCH --job-name=train_dynamics_maskgit_default_bs110\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/train\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_tokenizer_default/3528955\n\nenv | grep SLURM\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=110 \\n --patch_size=16 \\n --warmup_steps=0 \\n --wsd_decay_steps=0 \\n --num_steps=1000 \\n --log_image_interval=10000 \\n --log \\n --log_checkpoint_interval=10000 \\n --name=coinrun-dynamics-maskgit-default-bs110-$slurm_job_id \\n --tags coinrun dynamics maskgit default bs110 \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir_train &\n\nchild_pid=$!\n\nwait $child_pid",shellscript,tab +2046,2292796,"TERMINAL",0,0,"sbatch slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh",,terminal_command +2047,2292827,"TERMINAL",0,0,"]633;CSubmitted batch job 3538481\r\n]0;tum_cte0515@hkn1991:~/Projects/jasmine_jobs",,terminal_output +2048,2306267,"TERMINAL",0,0,"dev",,terminal_command +2049,2306314,"TERMINAL",0,0,"]633;C]0;tum_cte0515@hkn1991:~/Projects/jasmine",,terminal_output +2050,2312738,"TERMINAL",0,0,"git branch",,terminal_command +2051,2312804,"TERMINAL",0,0,"]633;C[?1h=\r ablation/full-precision-training\r\n ablation/use-pytorch-dataloader\r\n action-mapper\r\n add-noise-to-combat-exposure-bias\r\n add-wandb-name-and-tags\r\n before-nnx\r\n causal-mem-reduce\r\n causal-spatiotemporal-kv-cache\r\n causal-st-transformer\r\n causal-transformer-dynamics-model\r\n causal-transformer-nnx-no-kv-cache\r\n change-default-parameters\r\n change-default-to-wsd\r\n coinrun-gt-actions\r\n convert-to-jax-array-in-iter\r\n correct-batched-sampling\r\n dev\r\n dont-let-tf-see-gpu\r\n dynamics_coinrun_500m_dataset_29519\r\n feat/darkness-filter\r\n feat/explicit-image-dims\r\n fix-action-padding-lam-future-information-access\r\n fix-sampling\r\n fix-transformer-forwardpass\r\n fix/dyn-restore-after-nnx-upgrade\r\n fix/spatiotemporal-pe-once-in-STTransformer\r\n generate-minatar-breakout-dataset\r\n grad-norm-log-and-clip\r\n grain-dataloader\r\n gt-actions\r\n hotfix/eval-full-frame-fix\r\n hotfix/fix-val-loss-maskgit-masking\r\n hotfix/full-frame-eval-only-calculate-last-frame-metrics\r\n hotfix/sampling-shapes-error\r\n input_pipeline/add-npy2array_record\r\n logging-variants\r\n lr-schedules\r\n main\r\n maskgit-different-maskprob-per-sample\r\n:",,terminal_output +2052,2321030,"TERMINAL",0,0,"\r[?1l>]0;tum_cte0515@hkn1991:~/Projects/jasmine",,terminal_output +2053,2324897,"TERMINAL",0,0,"git checkout ablation/full-precision-training",,terminal_command +2054,2324976,"TERMINAL",0,0,"]633;CSwitched to branch 'ablation/full-precision-training'\r\nYour branch is up to date with 'origin/ablation/full-precision-training'.\r\n]0;tum_cte0515@hkn1991:~/Projects/jasmine",,terminal_output +2055,2326448,"",0,0,"Switched from branch 'prepend-action-maskgit' to 'ablation/full-precision-training'",,git_branch_checkout +2056,2334418,"TERMINAL",0,0,"sync-runner-2",,terminal_command +2057,2334534,"TERMINAL",0,0,"]633;Csending incremental file list\r\n",,terminal_output +2058,2334623,"TERMINAL",0,0,"./\r\nali-old-branch.diff\r\nmessage.md\r\npyproject.toml\r\n",,terminal_output +2059,2336366,"TERMINAL",0,0,"jasmine/\r\njasmine/genie.py\r\njasmine/sample.py\r\njasmine/train_dynamics.py\r\njasmine/train_lam.py\r\njasmine/train_tokenizer.py\r\njasmine/models/\r\njasmine/models/dynamics.py\r\njasmine/utils/\r\nslurm/dev/\r\nslurm/dev/alfred/berlin/\r\nslurm/dev/alfred/berlin/dataset_investigation/\r\nslurm/dev/alfred/berlin/dataset_investigation/gen_1.sbatch\r\nslurm/dev/alfred/berlin/dataset_investigation/gen_2.sbatch\r\nslurm/dev/alfred/berlin/test_exposure_bias/\r\nslurm/dev/alfred/berlin/test_exposure_bias/dynamics_causal.sbatch\r\nslurm/dev/alfred/berlin/test_exposure_bias/dynamics_causal_no_noise.sbatch\r\nslurm/dev/alfred/berlin/test_exposure_bias/dynamics_full_prec_from_80k_nan_invest_requeue_every2h.sbatch\r\nslurm/dev/alfred/berlin/test_exposure_bias/dynamics_full_prec_from_80k_nan_invest_requeue_every_10h.sbatch\r\nslurm/dev/alfred/berlin/test_exposure_bias/dynamics_maskgit.sbatch\r\nslurm/dev/alfred/berlin/test_exposure_bias/dynamics_maskgit_no_noise.sbatch\r\nslurm/dev/alfred/berlin/test_exposure_bias/dynamics_maskgit_no_noise_from_main.sbatch\r\nslurm/dev/alfred/berlin/test_exposure_bias/dynamics_maskgit_no_noise_from_main_w_full_frame.sbatch\r\nslurm/dev/alfred/berlin/test_exposure_bias/sample_180k.sbatch\r\nslurm/dev/alfred/berlin/test_exposure_bias/sample_causal.sbatch\r\nslurm/dev/alfred/berlin/test_exposure_bias/sample_causal_no_noise.sbatch\r\nslurm/dev/alfred/berlin/test_exposure_bias/sample_maskgit.sbatch\r\nslurm/dev/alfred/berlin/test_exposure_bias/sample_maskgit_no_noise.sbatch\r\nslurm/dev/alfred/berlin/test_exposure_bias_climber/\r\nslurm/dev/alfred/berlin/test_exposure_bias_climber/sample.sbatch\r\nslurm/dev/franz/\r\nslurm/dev/franz/berlin/\r\nslurm/dev/franz/berlin/coinrun/\r\nslurm/dev/franz/berlin/coinrun/sample/\r\nslurm/dev/franz/berlin/coinrun/sample/causal/\r\nslurm/dev/franz/berlin/coinrun/sample/causal/sample_causal.sh\r\nslurm/dev/franz/berlin/coinrun/sample/causal/sample_noised_causal.sh\r\nslurm/dev/franz/berlin/coinrun/sample/maskgit/\r\nslurm/dev/franz/berlin/coinrun/sample/maskgit/sample_dynamics_from_140k_tokenizer.sh\r\nslurm/dev/franz/berlin/coinrun/sample/maskgit/sample_dynamics_from_fully_trained_tokenizer.sh\r\nslurm/jobs/alfred/berlin/\r\nslurm/jobs/alfred/berlin/coinrun/1m_steps/\r\nslurm/jobs/alfred/berlin/coinrun/1m_steps/sample.sbatch\r\nslurm/jobs/alfred/berlin/coinrun/overfitting_test/500k_dataset/\r\nslurm/jobs/alfred/berlin/coinrun/overfitting_test/500k_dataset/sample.sbatch\r\nslurm/jobs/alfred/berlin/coinrun/overfitting_test/500m_dataset/\r\nslurm/jobs/alfred/berlin/coinrun/overfitting_test/500m_dataset/sample.sbatch\r\nslurm/jobs/alfred/berlin/coinrun/overfitting_test/500m_dataset_climber/\r\nslurm/jobs/alfred/berlin/coinrun/overfitting_test/500m_dataset_climber/sample.sbatch\r\nslurm/jobs/alfred/berlin/workshop/\r\nslurm/jobs/alfred/berlin/workshop/jafar_default/\r\nslurm/jobs/alfred/berlin/workshop/jafar_default/dynamics.sbatch\r\nslurm/jobs/alfred/berlin/workshop/jasmine_default/\r\nslurm/jobs/alfred/berlin/workshop/jasmine_default/dynamics.sbatch\r\nslurm/jobs/alfred/berlin/workshop/lam_mixed_precision_nan/\r\nslurm/jobs/alfred/berlin/workshop/lam_mixed_precision_nan/lam_full_precision_nan_invest.sbatch\r\nslurm/jobs/alfred/berlin/workshop/lam_mixed_precision_nan/lam_mixed_precision_nan_invest.sbatch\r\nslurm/jobs/alfred/berlin/workshop/lam_mixed_precision_nan/lam_mixed_precision_nan_invest_jafar_cos_init.sbatch\r\nslurm/jobs/alfred/berlin/workshop/lam_mixed_precision_nan/lam_mixed_precision_nan_invest_jafar_cos_init_w_flash_attention.sbatch\r\nslurm/jobs/alfred/berlin/workshop/lam_mixed_precision_nan/lam_mixed_precision_nan_invest_w_flash_attention.sbatch\r\nslurm/jobs/franz/berlin/\r\nslurm/jobs/franz/berlin/atari/\r\nslurm/jobs/franz/berlin/atari/data_generation/\r\nslurm/jobs/franz/berlin/atari/data_generation/alien.sh\r\nslurm/jobs/franz/berlin/atari/data_generation/amidar.sh\r\nslurm/jobs/franz/berlin/atari/data_generation/assault.sh\r\nslurm/jobs/franz/berlin/atari/data_generation/asterix.sh\r\nslurm/jobs/franz/berlin/atari/data_generation/bank_heist.sh\r\nslurm/jobs/franz/berlin/atari/data_generation/battle_zone.sh\r\nslurm/jobs/franz/berlin/atari/data_generation/boxing.sh\r\nslurm/jobs/franz/berlin/atari/data_generation/breakout.sh\r\nslurm/jobs/franz/berlin/atari/data_generation/chopper_command.sh\r\nslurm/jobs/franz/berlin/atari/data_generation/crazy_climber.sh\r\nslurm/jobs/franz/berlin/atari/data_generation/demon_attack.sh\r\nslurm/jobs/franz/berlin/atari/data_generation/pong.sh\r\nslurm/jobs/franz/berlin/atari/data_generation/spawner.sh\r\nslurm/jobs/franz/berlin/atari/legacy_cleanrl_data_generation/\r\nslurm/jobs/franz/berlin/atari/legacy_cleanrl_data_generation/atari_breakout_data_gen.sh\r\nslurm/jobs/franz/berlin/atari/legacy_cleanrl_data_generation/atari_breakout_v4_data_gen.sh\r\nslurm/jobs/franz/berlin/atari/tokenizer/\r\nslurm/jobs/franz/berlin/atari/tokenizer/atari_breakout_tokenizer.sbatch\r\nslurm/jobs/franz/berlin/coinrun/\r\nslurm/jobs/franz/berlin/coinrun/mila_submission/\r\nslurm/jobs/franz/berlin/coinrun/mila_submission/coinrun_dynamics_base.sh\r\nslurm/jobs/franz/berlin/coinrun/mila_submission/coinrun_lam_base.sh\r\nslurm/jobs/franz/berlin/coinrun/mila_submission/coinrun_lam_no_flash_attention.sh\r\nslurm/jobs/franz/berlin/coinrun/mila_submission/coinrun_tokenizer_base.sh\r\nslurm/jobs/franz/berlin/coinrun/mila_submission/ablations/\r\nslurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_causal.sh\r\nslurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh\r\nslurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_gt_actions.sh\r\nslurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh\r\nslurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn.sh\r\nslurm/jobs/franz/berlin/coinrun/submission_debug/\r\nslurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_16_action_prepend.sh\r\nslurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_16_noise-branch.sh\r\nslurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_4_action_prepend.sh\r\nslurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_4_main.sh\r\nslurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_4_noise-branch.sh\r\nslurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_tokenizer_patch_size_4.sh\r\nslurm/jobs/mihir/horeka/\r\n",,terminal_output +2060,2336608,"TERMINAL",0,0,"slurm/jobs/mihir/horeka/coinrun/\r\nslurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh\r\nslurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh\r\nslurm/jobs/mihir/horeka/coinrun/ablations/\r\nslurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-no-noise-main.sh\r\nslurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-no-noise.sh\r\nslurm/jobs/mihir/horeka/coinrun/ablations/train_dyn_default-sqrt-ablation.sh\r\nslurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/\r\nslurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default.sh\r\nslurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default_flash_attn_ablation.sh\r\nslurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default_mixed_precision_ablation.sh\r\nslurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_grain_ablation.sh\r\nslurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/\r\nslurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh\r\nslurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh\r\nslurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh\r\nslurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh\r\nslurm/jobs/mihir/horeka/coinrun/default_runs/\r\nslurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh\r\n",,terminal_output +2061,2336872,"TERMINAL",0,0,"slurm/jobs/mihir/horeka/preprocessing/\r\nslurm/jobs/mihir/horeka/preprocessing/coinrun_chunked.sh\r\nslurm/jobs/mihir/horeka/preprocessing/coinrun_chunked_500m.sh\r\n",,terminal_output +2062,2337279,"TERMINAL",0,0,"\r\nsent 368,423 bytes received 2,057 bytes 148,192.00 bytes/sec\r\ntotal size is 28,401,684 speedup is 76.66\r\n]0;tum_cte0515@hkn1991:~/Projects/jasmine",,terminal_output +2063,2339110,"TERMINAL",0,0,"runner-2",,terminal_command +2064,2341466,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=1\n#SBATCH --time=48:00:00\n#SBATCH --partition=accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:1\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/dynamics/maskgit/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/dynamics/maskgit/%x_%j.log\n#SBATCH --job-name=train_dynamics_maskgit_mixed_prec_ablation_bs110\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/train\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_tokenizer_default/3528955\n\nenv | grep SLURM\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=110 \\n --patch_size=16 \\n --warmup_steps=0 \\n --wsd_decay_steps=0 \\n --num_steps=1000 \\n --log_image_interval=10000 \\n --log \\n --log_checkpoint_interval=10000 \\n --name=coinrun-dynamics-maskgit-mixed-precision-ablation-bs110-$slurm_job_id \\n --tags coinrun dynamics maskgit mixed-precision-ablation bs110 \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir_train &\n\nchild_pid=$!\n\nwait $child_pid",shellscript,tab +2065,2346681,"TERMINAL",0,0,"sbatch slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",,terminal_command +2066,2346712,"TERMINAL",0,0,"]633;CSubmitted batch job 3538482\r\n]0;tum_cte0515@hkn1991:~/Projects/jasmine_jobs_2",,terminal_output +2067,2350859,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=1\n#SBATCH --time=48:00:00\n#SBATCH --partition=accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:1\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/dynamics/maskgit/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/dynamics/maskgit/%x_%j.log\n#SBATCH --job-name=train_dynamics_maskgit_flash_attn_ablation_bs110\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/train\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_tokenizer_default/3528955\n\nenv | grep SLURM\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=110 \\n --patch_size=16 \\n --warmup_steps=0 \\n --wsd_decay_steps=0 \\n --num_steps=1000 \\n --log_image_interval=10000 \\n --log \\n --no-use-flash-attention \\n --log_checkpoint_interval=10000 \\n --name=coinrun-dynamics-maskgit-flash-attn-ablation-bs110-$slurm_job_id \\n --tags coinrun dynamics maskgit flash-attn-ablation bs110 \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir_train &\n\nchild_pid=$!\n\nwait $child_pid",shellscript,tab +2068,2352929,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",85,0,"",shellscript,selection_mouse +2069,2353941,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",84,0,"",shellscript,selection_mouse +2070,2353983,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",56,28,"per-node=1\n#SBATCH --time=48",shellscript,selection_mouse +2071,2354663,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",84,0,"",shellscript,selection_mouse +2072,2356722,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",83,1,"",shellscript,content +2073,2356829,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",82,1,"",shellscript,content +2074,2358594,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",82,0,"0",shellscript,content +2075,2358595,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",83,0,"",shellscript,selection_keyboard +2076,2358636,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",83,0,"2",shellscript,content +2077,2358637,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",84,0,"",shellscript,selection_keyboard +2078,2360597,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",0,0,"",shellscript,tab +2079,2361231,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",84,0,"",shellscript,selection_mouse +2080,2362188,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",83,1,"",shellscript,content +2081,2362494,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",82,1,"",shellscript,content +2082,2362731,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",82,0,"2",shellscript,content +2083,2362732,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",83,0,"",shellscript,selection_keyboard +2084,2363012,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",82,1,"",shellscript,content +2085,2363137,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",82,0,"0",shellscript,content +2086,2363138,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",83,0,"",shellscript,selection_keyboard +2087,2363274,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",83,0,"2",shellscript,content +2088,2363274,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",84,0,"",shellscript,selection_keyboard +2089,2364756,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=1\n#SBATCH --time=48:00:00\n#SBATCH --partition=accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:1\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/dynamics/maskgit/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/dynamics/maskgit/%x_%j.log\n#SBATCH --job-name=train_dynamics_maskgit_default_bs110\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/train\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_tokenizer_default/3528955\n\nenv | grep SLURM\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=110 \\n --patch_size=16 \\n --warmup_steps=0 \\n --wsd_decay_steps=0 \\n --num_steps=1000 \\n --log_image_interval=10000 \\n --log \\n --log_checkpoint_interval=10000 \\n --name=coinrun-dynamics-maskgit-default-bs110-$slurm_job_id \\n --tags coinrun dynamics maskgit default bs110 \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir_train &\n\nchild_pid=$!\n\nwait $child_pid",shellscript,tab +2090,2370064,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh",84,0,"",shellscript,selection_mouse +2091,2371056,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh",83,1,"",shellscript,content +2092,2371275,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh",82,1,"",shellscript,content +2093,2371293,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh",82,0,"0",shellscript,content +2094,2371294,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh",83,0,"",shellscript,selection_keyboard +2095,2371405,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh",83,0,"2",shellscript,content +2096,2371406,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh",84,0,"",shellscript,selection_keyboard +2097,2375618,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=1\n#SBATCH --time=48:00:00\n#SBATCH --partition=accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:1\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/dynamics/maskgit/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/dynamics/maskgit/%x_%j.log\n#SBATCH --job-name=train_dynamics_maskgit_grain_ablation_bs110\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/npy_test\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_tokenizer_default/3528955\n\nenv | grep SLURM\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=36 \\n --patch_size=16 \\n --warmup_steps=0 \\n --wsd_decay_steps=0 \\n --num_steps=1000 \\n --log_image_interval=10000 \\n --log \\n --log_checkpoint_interval=10000 \\n --name=coinrun-dynamics-maskgit-grain-ablation-bs110-$slurm_job_id \\n --tags coinrun dynamics maskgit grain-ablation bs110 \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir_train &\n\nchild_pid=$!\n\nwait $child_pid",shellscript,tab +2098,2376474,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",84,0,"",shellscript,selection_mouse +2099,2377408,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",83,1,"",shellscript,content +2100,2377521,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",82,1,"",shellscript,content +2101,2377607,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",82,0,"0",shellscript,content +2102,2377608,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",83,0,"",shellscript,selection_keyboard +2103,2377722,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",83,0,"2",shellscript,content +2104,2377723,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",84,0,"",shellscript,selection_keyboard +2105,2380989,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_grain_ablation.sh",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=1\n#SBATCH --time=48:00:00\n#SBATCH --partition=accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:1\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/dynamics/maskgit/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/dynamics/maskgit/%x_%j.log\n#SBATCH --job-name=train_dynamics_maskgit_grain_ablation_bs36\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/npy_test\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_tokenizer_default/3528955\n\nenv | grep SLURM\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=36 \\n --patch_size=16 \\n --warmup_steps=0 \\n --wsd_decay_steps=0 \\n --num_steps=1000 \\n --log_image_interval=10000 \\n --log \\n --log_checkpoint_interval=10000 \\n --name=coinrun-dynamics-maskgit-grain-ablation-bs36-$slurm_job_id \\n --tags coinrun dynamics maskgit grain-ablation bs36 \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir_train &\n\nchild_pid=$!\n\nwait $child_pid",shellscript,tab +2106,2382054,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_grain_ablation.sh",84,0,"",shellscript,selection_mouse +2107,2383103,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_grain_ablation.sh",83,1,"",shellscript,content +2108,2383216,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_grain_ablation.sh",82,1,"",shellscript,content +2109,2383452,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_grain_ablation.sh",82,0,"0",shellscript,content +2110,2383453,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_grain_ablation.sh",83,0,"",shellscript,selection_keyboard +2111,2383481,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_grain_ablation.sh",83,0,"2",shellscript,content +2112,2383481,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_grain_ablation.sh",84,0,"",shellscript,selection_keyboard +2113,2385085,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default.sh",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=1\n#SBATCH --time=48:00:00\n#SBATCH --partition=accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:1\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/dynamics/maskgit/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/dynamics/maskgit/%x_%j.log\n#SBATCH --job-name=train_dynamics_maskgit_default_bs36\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/train\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_tokenizer_default/3528955\n\nenv | grep SLURM\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=36 \\n --patch_size=16 \\n --warmup_steps=0 \\n --wsd_decay_steps=0 \\n --num_steps=1000 \\n --log_image_interval=10000 \\n --log \\n --log_checkpoint_interval=10000 \\n --name=coinrun-dynamics-maskgit-default-bs36$slurm_job_id \\n --tags coinrun dynamics maskgit default bs36 \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir_train &\n\nchild_pid=$!\n\nwait $child_pid",shellscript,tab +2114,2385958,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default.sh",84,0,"",shellscript,selection_mouse +2115,2386901,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default.sh",83,0,"",shellscript,selection_command +2116,2387051,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default.sh",82,0,"",shellscript,selection_command +2117,2387189,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default.sh",67,0,"",shellscript,selection_command +2118,2387830,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default.sh",68,0,"",shellscript,selection_command +2119,2388352,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default.sh",69,0,"",shellscript,selection_command +2120,2388370,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default.sh",70,0,"",shellscript,selection_command +2121,2388385,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default.sh",71,0,"",shellscript,selection_command +2122,2388425,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default.sh",72,0,"",shellscript,selection_command +2123,2388461,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default.sh",73,0,"",shellscript,selection_command +2124,2388493,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default.sh",74,0,"",shellscript,selection_command +2125,2388528,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default.sh",75,0,"",shellscript,selection_command +2126,2388561,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default.sh",76,0,"",shellscript,selection_command +2127,2388591,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default.sh",77,0,"",shellscript,selection_command +2128,2388623,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default.sh",78,0,"",shellscript,selection_command +2129,2388653,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default.sh",79,0,"",shellscript,selection_command +2130,2388684,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default.sh",80,0,"",shellscript,selection_command +2131,2388747,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default.sh",81,0,"",shellscript,selection_command +2132,2388748,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default.sh",82,0,"",shellscript,selection_command +2133,2388782,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default.sh",83,0,"",shellscript,selection_command +2134,2388813,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default.sh",84,0,"",shellscript,selection_command +2135,2389564,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default.sh",83,0,"",shellscript,selection_command +2136,2389696,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default.sh",82,0,"",shellscript,selection_command +2137,2390599,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default.sh",83,0,"",shellscript,selection_command +2138,2391146,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default.sh",84,0,"",shellscript,selection_command +2139,2392026,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default.sh",83,1,"",shellscript,content +2140,2392147,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default.sh",82,1,"",shellscript,content +2141,2392251,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default.sh",82,0,"0",shellscript,content +2142,2392252,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default.sh",83,0,"",shellscript,selection_keyboard +2143,2392371,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default.sh",83,0,"2",shellscript,content +2144,2392372,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default.sh",84,0,"",shellscript,selection_keyboard +2145,2393923,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default_mixed_precision_ablation.sh",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=1\n#SBATCH --time=48:00:00\n#SBATCH --partition=accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:1\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/dynamics/maskgit/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/dynamics/maskgit/%x_%j.log\n#SBATCH --job-name=train_dynamics_maskgit_mixed_prec_ablation_bs36\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/train\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_tokenizer_default/3528955\n\nenv | grep SLURM\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=36 \\n --patch_size=16 \\n --warmup_steps=0 \\n --wsd_decay_steps=0 \\n --num_steps=1000 \\n --log_image_interval=10000 \\n --log \\n --log_checkpoint_interval=10000 \\n --name=coinrun-dynamics-maskgit-mixed-precision-ablation-bs36-$slurm_job_id \\n --tags coinrun dynamics maskgit mixed-precision-ablation bs36 \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir_train &\n\nchild_pid=$!\n\nwait $child_pid",shellscript,tab +2146,2395243,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default_mixed_precision_ablation.sh",108,0,"",shellscript,selection_mouse +2147,2396441,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default_mixed_precision_ablation.sh",84,0,"",shellscript,selection_mouse +2148,2397495,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default_mixed_precision_ablation.sh",83,1,"",shellscript,content +2149,2397629,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default_mixed_precision_ablation.sh",82,1,"",shellscript,content +2150,2397857,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default_mixed_precision_ablation.sh",82,0,"0",shellscript,content +2151,2397858,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default_mixed_precision_ablation.sh",83,0,"",shellscript,selection_keyboard +2152,2397961,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default_mixed_precision_ablation.sh",83,0,"2",shellscript,content +2153,2397962,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default_mixed_precision_ablation.sh",84,0,"",shellscript,selection_keyboard +2154,2399508,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default_flash_attn_ablation.sh",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=1\n#SBATCH --time=48:00:00\n#SBATCH --partition=accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:1\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/dynamics/maskgit/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/dynamics/maskgit/%x_%j.log\n#SBATCH --job-name=train_dynamics_maskgit_flash_attn_ablation_bs36\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/train\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_tokenizer_default/3528955\n\nenv | grep SLURM\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=110 \\n --patch_size=16 \\n --warmup_steps=0 \\n --wsd_decay_steps=0 \\n --num_steps=1000 \\n --log_image_interval=10000 \\n --log \\n --no-use-flash-attention \\n --log_checkpoint_interval=10000 \\n --name=coinrun-dynamics-maskgit-flash-attn-ablation-bs36-$slurm_job_id \\n --tags coinrun dynamics maskgit flash-attn-ablation bs36 \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir_train &\n\nchild_pid=$!\n\nwait $child_pid",shellscript,tab +2155,2400840,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default_flash_attn_ablation.sh",84,0,"",shellscript,selection_mouse +2156,2401926,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default_flash_attn_ablation.sh",83,1,"",shellscript,content +2157,2402057,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default_flash_attn_ablation.sh",82,1,"",shellscript,content +2158,2402261,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default_flash_attn_ablation.sh",82,0,"0",shellscript,content +2159,2402262,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default_flash_attn_ablation.sh",83,0,"",shellscript,selection_keyboard +2160,2402396,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default_flash_attn_ablation.sh",83,0,"2",shellscript,content +2161,2402397,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default_flash_attn_ablation.sh",84,0,"",shellscript,selection_keyboard +2162,2403734,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default_mixed_precision_ablation.sh",0,0,"",shellscript,tab +2163,2404183,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default.sh",0,0,"",shellscript,tab +2164,2404644,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_grain_ablation.sh",0,0,"",shellscript,tab +2165,2405520,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",0,0,"",shellscript,tab +2166,2406127,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",0,0,"",shellscript,tab +2167,2406515,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh",0,0,"",shellscript,tab +2168,2407106,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",0,0,"",shellscript,tab +2169,2407792,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh",0,0,"",shellscript,tab +2170,2408090,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",0,0,"",shellscript,tab +2171,2408481,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",0,0,"",shellscript,tab +2172,2409037,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_grain_ablation.sh",0,0,"",shellscript,tab +2173,2409372,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default.sh",0,0,"",shellscript,tab +2174,2409694,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default_mixed_precision_ablation.sh",0,0,"",shellscript,tab +2175,2410078,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default_flash_attn_ablation.sh",0,0,"",shellscript,tab +2176,2422084,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default_mixed_precision_ablation.sh",0,0,"",shellscript,tab +2177,2423057,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default.sh",0,0,"",shellscript,tab +2178,2429643,"TERMINAL",0,0,"queue",,terminal_command +2179,2429752,"TERMINAL",0,0,"]633;C[?1049h(B[?7hEvery 1.0s: squeue --mehkn1991.localdomain: Thu Oct 2 18:29:52 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3538482 accelerat train_dy tum_cte0 CG\t1:19\t 1 hkn06333538479 accelerat train_dy tum_cte0 R\t1:21\t 1 hkn08163538481 accelerat train_dy tum_cte0 R\t1:21\t 1 hkn0633",,terminal_output +2180,2430848,"TERMINAL",0,0,"322",,terminal_output +2181,2431822,"TERMINAL",0,0,"433",,terminal_output +2182,2432991,"TERMINAL",0,0,"544",,terminal_output +2183,2433538,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn1991:~/Projects/jasmine_jobs_2",,terminal_output +2184,2435921,"TERMINAL",0,0,"scancel --me",,terminal_command +2185,2435941,"TERMINAL",0,0,"]633;C]0;tum_cte0515@hkn1991:~/Projects/jasmine_jobs_2",,terminal_output +2186,2437971,"TERMINAL",0,0,"dev",,terminal_command +2187,2443016,"TERMINAL",0,0,"git branch",,terminal_command +2188,2443092,"TERMINAL",0,0,"]633;C[?1h=\r* ablation/full-precision-training\r\n ablation/use-pytorch-dataloader\r\n action-mapper\r\n add-noise-to-combat-exposure-bias\r\n add-wandb-name-and-tags\r\n before-nnx\r\n causal-mem-reduce\r\n causal-spatiotemporal-kv-cache\r\n causal-st-transformer\r\n causal-transformer-dynamics-model\r\n causal-transformer-nnx-no-kv-cache\r\n change-default-parameters\r\n change-default-to-wsd\r\n coinrun-gt-actions\r\n convert-to-jax-array-in-iter\r\n correct-batched-sampling\r\n dev\r\n dont-let-tf-see-gpu\r\n dynamics_coinrun_500m_dataset_29519\r\n feat/darkness-filter\r\n feat/explicit-image-dims\r\n fix-action-padding-lam-future-information-access\r\n fix-sampling\r\n fix-transformer-forwardpass\r\n fix/dyn-restore-after-nnx-upgrade\r\n fix/spatiotemporal-pe-once-in-STTransformer\r\n generate-minatar-breakout-dataset\r\n grad-norm-log-and-clip\r\n grain-dataloader\r\n gt-actions\r\n hotfix/eval-full-frame-fix\r\n hotfix/fix-val-loss-maskgit-masking\r\n hotfix/full-frame-eval-only-calculate-last-frame-metrics\r\n hotfix/sampling-shapes-error\r\n input_pipeline/add-npy2array_record\r\n logging-variants\r\n lr-schedules\r\n main\r\n maskgit-different-maskprob-per-sample\r\n:",,terminal_output +2189,2445595,"TERMINAL",0,0,"\r[?1l>]0;tum_cte0515@hkn1991:~/Projects/jasmine",,terminal_output +2190,2449625,"TERMINAL",0,0,"sync-runner-2",,terminal_command +2191,2449701,"TERMINAL",0,0,"]633;Csending incremental file list\r\n",,terminal_output +2192,2449876,"TERMINAL",0,0,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default.sh\r\nslurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default_flash_attn_ablation.sh\r\nslurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default_mixed_precision_ablation.sh\r\nslurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_grain_ablation.sh\r\nslurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh\r\nslurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh\r\nslurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh\r\nslurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh\r\n\r\nsent 60,343 bytes received 408 bytes 121,502.00 bytes/sec\r\ntotal size is 28,401,684 speedup is 467.51\r\n]0;tum_cte0515@hkn1991:~/Projects/jasmine",,terminal_output +2193,2476010,"TERMINAL",0,0,"sbatch slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default_mixed_precision_ablation.sh^C",,terminal_command +2194,2476045,"TERMINAL",0,0,"]633;C]0;tum_cte0515@hkn1991:~/Projects/jasmine",,terminal_output +2195,2478492,"TERMINAL",0,0,"sync-runner-2",,terminal_command +2196,2478556,"TERMINAL",0,0,"]633;Csending incremental file list\r\n",,terminal_output +2197,2478699,"TERMINAL",0,0,"\r\nsent 40,688 bytes received 254 bytes 81,884.00 bytes/sec\r\ntotal size is 28,401,684 speedup is 693.71\r\n]0;tum_cte0515@hkn1991:~/Projects/jasmine",,terminal_output +2198,2480433,"TERMINAL",0,0,"runner-2",,terminal_command +2199,2480472,"TERMINAL",0,0,"]633;C]0;tum_cte0515@hkn1991:~/Projects/jasmine_jobs_2",,terminal_output +2200,2487828,"TERMINAL",0,0,"sbatch slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default_mixed_precision_ablation.sh",,terminal_command +2201,2487862,"TERMINAL",0,0,"]633;CSubmitted batch job 3538486\r\n]0;tum_cte0515@hkn1991:~/Projects/jasmine_jobs_2",,terminal_output +2202,2491347,"TERMINAL",0,0,"sbatch slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default_flash_attn_ablation.sh",,terminal_command +2203,2491378,"TERMINAL",0,0,"]633;CSubmitted batch job 3538487\r\n]0;tum_cte0515@hkn1991:~/Projects/jasmine_jobs_2",,terminal_output +2204,2495189,"TERMINAL",0,0,"sbatch slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",,terminal_command +2205,2495233,"TERMINAL",0,0,"]633;CSubmitted batch job 3538488\r\n]0;tum_cte0515@hkn1991:~/Projects/jasmine_jobs_2",,terminal_output +2206,2498992,"TERMINAL",0,0,"sbatch slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",,terminal_command +2207,2499059,"TERMINAL",0,0,"]633;C",,terminal_output +2208,2499134,"TERMINAL",0,0,"Submitted batch job 3538489\r\n]0;tum_cte0515@hkn1991:~/Projects/jasmine_jobs_2",,terminal_output +2209,2500511,"TERMINAL",0,0,"dev",,terminal_command +2210,2509558,"TERMINAL",0,0,"git branch",,terminal_command +2211,2509614,"TERMINAL",0,0,"]633;C[?1h=\r* ablation/full-precision-training\r\n ablation/use-pytorch-dataloader\r\n action-mapper\r\n add-noise-to-combat-exposure-bias\r\n add-wandb-name-and-tags\r\n before-nnx\r\n causal-mem-reduce\r\n causal-spatiotemporal-kv-cache\r\n causal-st-transformer\r\n causal-transformer-dynamics-model\r\n causal-transformer-nnx-no-kv-cache\r\n change-default-parameters\r\n change-default-to-wsd\r\n coinrun-gt-actions\r\n convert-to-jax-array-in-iter\r\n correct-batched-sampling\r\n dev\r\n dont-let-tf-see-gpu\r\n dynamics_coinrun_500m_dataset_29519\r\n feat/darkness-filter\r\n feat/explicit-image-dims\r\n fix-action-padding-lam-future-information-access\r\n fix-sampling\r\n fix-transformer-forwardpass\r\n fix/dyn-restore-after-nnx-upgrade\r\n fix/spatiotemporal-pe-once-in-STTransformer\r\n generate-minatar-breakout-dataset\r\n grad-norm-log-and-clip\r\n grain-dataloader\r\n gt-actions\r\n hotfix/eval-full-frame-fix\r\n hotfix/fix-val-loss-maskgit-masking\r\n hotfix/full-frame-eval-only-calculate-last-frame-metrics\r\n hotfix/sampling-shapes-error\r\n input_pipeline/add-npy2array_record\r\n logging-variants\r\n lr-schedules\r\n main\r\n maskgit-different-maskprob-per-sample\r\n:",,terminal_output +2212,2512204,"TERMINAL",0,0,"\r[?1l>]0;tum_cte0515@hkn1991:~/Projects/jasmine",,terminal_output +2213,2515290,"TERMINAL",0,0,"git checkout ablation/use-pytorch-dataloader",,terminal_command +2214,2515362,"TERMINAL",0,0,"]633;C",,terminal_output +2215,2515440,"TERMINAL",0,0,"Switched to branch 'ablation/use-pytorch-dataloader'\r\nYour branch is up to date with 'origin/ablation/use-pytorch-dataloader'.\r\n]0;tum_cte0515@hkn1991:~/Projects/jasmine",,terminal_output +2216,2516456,"",0,0,"Switched from branch 'ablation/full-precision-training' to 'ablation/use-pytorch-dataloader'",,git_branch_checkout +2217,2518660,"TERMINAL",0,0,"sync-runner-3",,terminal_command +2218,2518672,"TERMINAL",0,0,"]633;C",,terminal_output +2219,2518780,"TERMINAL",0,0,"sending incremental file list\r\n./\r\nali-old-branch.diff\r\nmessage.md\r\npyproject.toml\r\n",,terminal_output +2220,2520183,"TERMINAL",0,0,"jasmine/\r\njasmine/genie.py\r\njasmine/sample.py\r\njasmine/train_dynamics.py\r\njasmine/train_lam.py\r\njasmine/train_tokenizer.py\r\njasmine/models/\r\njasmine/models/dynamics.py\r\njasmine/utils/\r\njasmine/utils/dataloader_torch.py\r\nslurm/dev/\r\nslurm/dev/alfred/berlin/\r\nslurm/dev/alfred/berlin/dataset_investigation/\r\nslurm/dev/alfred/berlin/dataset_investigation/gen_1.sbatch\r\nslurm/dev/alfred/berlin/dataset_investigation/gen_2.sbatch\r\nslurm/dev/alfred/berlin/test_exposure_bias/\r\nslurm/dev/alfred/berlin/test_exposure_bias/dynamics_causal.sbatch\r\nslurm/dev/alfred/berlin/test_exposure_bias/dynamics_causal_no_noise.sbatch\r\nslurm/dev/alfred/berlin/test_exposure_bias/dynamics_full_prec_from_80k_nan_invest_requeue_every2h.sbatch\r\nslurm/dev/alfred/berlin/test_exposure_bias/dynamics_full_prec_from_80k_nan_invest_requeue_every_10h.sbatch\r\nslurm/dev/alfred/berlin/test_exposure_bias/dynamics_maskgit.sbatch\r\nslurm/dev/alfred/berlin/test_exposure_bias/dynamics_maskgit_no_noise.sbatch\r\nslurm/dev/alfred/berlin/test_exposure_bias/dynamics_maskgit_no_noise_from_main.sbatch\r\nslurm/dev/alfred/berlin/test_exposure_bias/dynamics_maskgit_no_noise_from_main_w_full_frame.sbatch\r\nslurm/dev/alfred/berlin/test_exposure_bias/sample_180k.sbatch\r\nslurm/dev/alfred/berlin/test_exposure_bias/sample_causal.sbatch\r\nslurm/dev/alfred/berlin/test_exposure_bias/sample_causal_no_noise.sbatch\r\nslurm/dev/alfred/berlin/test_exposure_bias/sample_maskgit.sbatch\r\nslurm/dev/alfred/berlin/test_exposure_bias/sample_maskgit_no_noise.sbatch\r\nslurm/dev/alfred/berlin/test_exposure_bias_climber/\r\nslurm/dev/alfred/berlin/test_exposure_bias_climber/sample.sbatch\r\nslurm/dev/franz/\r\nslurm/dev/franz/berlin/\r\nslurm/dev/franz/berlin/coinrun/\r\nslurm/dev/franz/berlin/coinrun/sample/\r\nslurm/dev/franz/berlin/coinrun/sample/causal/\r\nslurm/dev/franz/berlin/coinrun/sample/causal/sample_causal.sh\r\nslurm/dev/franz/berlin/coinrun/sample/causal/sample_noised_causal.sh\r\nslurm/dev/franz/berlin/coinrun/sample/maskgit/\r\nslurm/dev/franz/berlin/coinrun/sample/maskgit/sample_dynamics_from_140k_tokenizer.sh\r\nslurm/dev/franz/berlin/coinrun/sample/maskgit/sample_dynamics_from_fully_trained_tokenizer.sh\r\nslurm/jobs/alfred/berlin/\r\nslurm/jobs/alfred/berlin/coinrun/1m_steps/\r\nslurm/jobs/alfred/berlin/coinrun/1m_steps/sample.sbatch\r\nslurm/jobs/alfred/berlin/coinrun/overfitting_test/500k_dataset/\r\nslurm/jobs/alfred/berlin/coinrun/overfitting_test/500k_dataset/sample.sbatch\r\nslurm/jobs/alfred/berlin/coinrun/overfitting_test/500m_dataset/\r\nslurm/jobs/alfred/berlin/coinrun/overfitting_test/500m_dataset/sample.sbatch\r\nslurm/jobs/alfred/berlin/coinrun/overfitting_test/500m_dataset_climber/\r\nslurm/jobs/alfred/berlin/coinrun/overfitting_test/500m_dataset_climber/sample.sbatch\r\nslurm/jobs/alfred/berlin/workshop/\r\nslurm/jobs/alfred/berlin/workshop/jafar_default/\r\nslurm/jobs/alfred/berlin/workshop/jafar_default/dynamics.sbatch\r\nslurm/jobs/alfred/berlin/workshop/jasmine_default/\r\nslurm/jobs/alfred/berlin/workshop/jasmine_default/dynamics.sbatch\r\nslurm/jobs/alfred/berlin/workshop/lam_mixed_precision_nan/\r\nslurm/jobs/alfred/berlin/workshop/lam_mixed_precision_nan/lam_full_precision_nan_invest.sbatch\r\nslurm/jobs/alfred/berlin/workshop/lam_mixed_precision_nan/lam_mixed_precision_nan_invest.sbatch\r\nslurm/jobs/alfred/berlin/workshop/lam_mixed_precision_nan/lam_mixed_precision_nan_invest_jafar_cos_init.sbatch\r\nslurm/jobs/alfred/berlin/workshop/lam_mixed_precision_nan/lam_mixed_precision_nan_invest_jafar_cos_init_w_flash_attention.sbatch\r\nslurm/jobs/alfred/berlin/workshop/lam_mixed_precision_nan/lam_mixed_precision_nan_invest_w_flash_attention.sbatch\r\nslurm/jobs/franz/berlin/\r\nslurm/jobs/franz/berlin/atari/\r\nslurm/jobs/franz/berlin/atari/data_generation/\r\nslurm/jobs/franz/berlin/atari/data_generation/alien.sh\r\nslurm/jobs/franz/berlin/atari/data_generation/amidar.sh\r\nslurm/jobs/franz/berlin/atari/data_generation/assault.sh\r\nslurm/jobs/franz/berlin/atari/data_generation/asterix.sh\r\nslurm/jobs/franz/berlin/atari/data_generation/bank_heist.sh\r\nslurm/jobs/franz/berlin/atari/data_generation/battle_zone.sh\r\nslurm/jobs/franz/berlin/atari/data_generation/boxing.sh\r\nslurm/jobs/franz/berlin/atari/data_generation/breakout.sh\r\nslurm/jobs/franz/berlin/atari/data_generation/chopper_command.sh\r\nslurm/jobs/franz/berlin/atari/data_generation/crazy_climber.sh\r\nslurm/jobs/franz/berlin/atari/data_generation/demon_attack.sh\r\nslurm/jobs/franz/berlin/atari/data_generation/pong.sh\r\nslurm/jobs/franz/berlin/atari/data_generation/spawner.sh\r\nslurm/jobs/franz/berlin/atari/legacy_cleanrl_data_generation/\r\nslurm/jobs/franz/berlin/atari/legacy_cleanrl_data_generation/atari_breakout_data_gen.sh\r\nslurm/jobs/franz/berlin/atari/legacy_cleanrl_data_generation/atari_breakout_v4_data_gen.sh\r\nslurm/jobs/franz/berlin/atari/tokenizer/\r\nslurm/jobs/franz/berlin/atari/tokenizer/atari_breakout_tokenizer.sbatch\r\nslurm/jobs/franz/berlin/coinrun/\r\nslurm/jobs/franz/berlin/coinrun/mila_submission/\r\nslurm/jobs/franz/berlin/coinrun/mila_submission/coinrun_dynamics_base.sh\r\nslurm/jobs/franz/berlin/coinrun/mila_submission/coinrun_lam_base.sh\r\nslurm/jobs/franz/berlin/coinrun/mila_submission/coinrun_lam_no_flash_attention.sh\r\nslurm/jobs/franz/berlin/coinrun/mila_submission/coinrun_tokenizer_base.sh\r\nslurm/jobs/franz/berlin/coinrun/mila_submission/ablations/\r\nslurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_causal.sh\r\nslurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh\r\nslurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_gt_actions.sh\r\nslurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh\r\nslurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn.sh\r\nslurm/jobs/franz/berlin/coinrun/submission_debug/\r\nslurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_16_action_prepend.sh\r\nslurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_16_noise-branch.sh\r\nslurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_4_action_prepend.sh\r\nslurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_4_main.sh\r\nslurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_4_noise-branch.sh\r\nslurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_tokenizer_patch_size_4.sh\r\nslurm/jobs/mihir/horeka/\r\n",,terminal_output +2221,2520378,"TERMINAL",0,0,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh\r\nslurm/jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh\r\nslurm/jobs/mihir/horeka/coinrun/ablations/\r\nslurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/\r\nslurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default.sh\r\nslurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default_flash_attn_ablation.sh\r\nslurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default_mixed_precision_ablation.sh\r\nslurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_grain_ablation.sh\r\nslurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/\r\nslurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh\r\nslurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh\r\nslurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh\r\nslurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh\r\n",,terminal_output +2222,2520840,"TERMINAL",0,0,"slurm/jobs/mihir/horeka/preprocessing/\r\nslurm/jobs/mihir/horeka/preprocessing/coinrun_chunked.sh\r\nslurm/jobs/mihir/horeka/preprocessing/coinrun_chunked_500m.sh\r\n",,terminal_output +2223,2520907,"TERMINAL",0,0,"\r\nsent 354,389 bytes received 1,990 bytes 142,551.60 bytes/sec\r\ntotal size is 28,398,050 speedup is 79.68\r\n]0;tum_cte0515@hkn1991:~/Projects/jasmine",,terminal_output +2224,2529139,"TERMINAL",0,0,"runner-3",,terminal_command +2225,2532719,"TERMINAL",0,0,"sbatch slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh",,terminal_command +2226,2537297,"TERMINAL",0,0,"sbatch slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_grain_ablation.sh",,terminal_command +2227,2537320,"TERMINAL",0,0,"]633;CSubmitted batch job 3538492\r\n]0;tum_cte0515@hkn1991:~/Projects/jasmine_jobs_3",,terminal_output +2228,2540263,"TERMINAL",0,0,"dev",,terminal_command +2229,2545920,"TERMINAL",0,0,"git branch",,terminal_command +2230,2545983,"TERMINAL",0,0,"]633;C[?1h=\r ablation/full-precision-training\r\n* ablation/use-pytorch-dataloader\r\n action-mapper\r\n add-noise-to-combat-exposure-bias\r\n add-wandb-name-and-tags\r\n before-nnx\r\n causal-mem-reduce\r\n causal-spatiotemporal-kv-cache\r\n causal-st-transformer\r\n causal-transformer-dynamics-model\r\n causal-transformer-nnx-no-kv-cache\r\n change-default-parameters\r\n change-default-to-wsd\r\n coinrun-gt-actions\r\n convert-to-jax-array-in-iter\r\n correct-batched-sampling\r\n dev\r\n dont-let-tf-see-gpu\r\n dynamics_coinrun_500m_dataset_29519\r\n feat/darkness-filter\r\n feat/explicit-image-dims\r\n fix-action-padding-lam-future-information-access\r\n fix-sampling\r\n fix-transformer-forwardpass\r\n fix/dyn-restore-after-nnx-upgrade\r\n fix/spatiotemporal-pe-once-in-STTransformer\r\n generate-minatar-breakout-dataset\r\n grad-norm-log-and-clip\r\n grain-dataloader\r\n gt-actions\r\n hotfix/eval-full-frame-fix\r\n hotfix/fix-val-loss-maskgit-masking\r\n hotfix/full-frame-eval-only-calculate-last-frame-metrics\r\n hotfix/sampling-shapes-error\r\n input_pipeline/add-npy2array_record\r\n logging-variants\r\n lr-schedules\r\n main\r\n maskgit-different-maskprob-per-sample\r\n:",,terminal_output +2231,2550829,"TERMINAL",0,0,"\r maskgit-sampling-iterative-unmasking-fix\r\n:",,terminal_output +2232,2551426,"TERMINAL",0,0,"\r metrics-logging-for-dynamics-model\r\n:\r monkey-patch\r\n:\r new-arch-sampling\r\n:\r prepend-action-maskgit\r\n:\r preprocess_video\r\n:",,terminal_output +2233,2551745,"TERMINAL",0,0,"\r refactor-full-frame-val-loss\r\n:\r refactor-tmp\r\n:\r remove-restore-branching\r\n:\r revised-dataloader\r\n:\r runner\r\n:\r runner-grain\r\n:\r sample-ali-branch\r\n:\r sample-from-different-topologies\r\n:\r sampling-script-add-metrics\r\n:\r sampling-startframe-indexing-fix\r\n:",,terminal_output +2234,2551822,"TERMINAL",0,0,"\r seeding-data-generation\r\n:\r speedup-tfrecord-preprocessing\r\n:",,terminal_output +2235,2551903,"TERMINAL",0,0,"\r train_lam_coinrun_ablation_wsd_3e-6_28747\r\n:\r val-loss\r\n:\r vizdoom-dataset\r\n:",,terminal_output +2236,2554624,"TERMINAL",0,0,"\r[?1l>]0;tum_cte0515@hkn1991:~/Projects/jasmine",,terminal_output +2237,2557943,"TERMINAL",0,0,"git checkout prepend-action-maskgit",,terminal_command +2238,2558046,"TERMINAL",0,0,"]633;C",,terminal_output +2239,2558068,"TERMINAL",0,0,"Switched to branch 'prepend-action-maskgit'\r\nYour branch is up to date with 'origin/prepend-action-maskgit'.\r\n]0;tum_cte0515@hkn1991:~/Projects/jasmine",,terminal_output +2240,2561463,"",0,0,"Switched from branch 'ablation/use-pytorch-dataloader' to 'prepend-action-maskgit'",,git_branch_checkout +2241,2565681,"TERMINAL",0,0,"sync-runner",,terminal_command +2242,2565749,"TERMINAL",0,0,"]633;Csending incremental file list\r\n",,terminal_output +2243,2565798,"TERMINAL",0,0,"./\r\npyproject.toml\r\n",,terminal_output +2244,2565847,"TERMINAL",0,0,"jasmine/\r\njasmine/train_dynamics.py\r\njasmine/utils/\r\nslurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default.sh\r\nslurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default_flash_attn_ablation.sh\r\nslurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default_mixed_precision_ablation.sh\r\nslurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_grain_ablation.sh\r\nslurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh\r\nslurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh\r\nslurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh\r\nslurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh\r\n",,terminal_output +2245,2565931,"TERMINAL",0,0,"\r\nsent 92,559 bytes received 467 bytes 186,052.00 bytes/sec\r\ntotal size is 28,401,685 speedup is 305.31\r\n]0;tum_cte0515@hkn1991:~/Projects/jasmine",,terminal_output +2246,2567875,"TERMINAL",0,0,"runner",,terminal_command +2247,2573552,"TERMINAL",0,0,"sbatch slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh",,terminal_command +2248,2573584,"TERMINAL",0,0,"]633;CSubmitted batch job 3538493\r\n]0;tum_cte0515@hkn1991:~/Projects/jasmine_jobs",,terminal_output +2249,2577207,"TERMINAL",0,0,"sbatch slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default.sh",,terminal_command +2250,2577252,"TERMINAL",0,0,"]633;CSubmitted batch job 3538494\r\n]0;tum_cte0515@hkn1991:~/Projects/jasmine_jobs",,terminal_output +2251,2578397,"TERMINAL",0,0,"queue",,terminal_command +2252,2578441,"TERMINAL",0,0,"]633;C[?1049h(B[?7hEvery 1.0s: squeue --mehkn1991.localdomain: Thu Oct 2 18:32:21 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3538494 accelerat train_dy tum_cte0 PD\t0:00\t 1 (Priority)3538493 accelerat train_dy tum_cte0 PD\t0:00\t 1 (Priority)3538492 accelerat train_dy tum_cte0 PD\t0:00\t 1 (Priority)3538490 accelerat train_dy tum_cte0 PD\t0:00\t 1 (Priority)3538489 accelerat train_dy tum_cte0 PD\t0:00\t 1 (Priority)3538488 accelerat train_dy tum_cte0 PD\t0:00\t 1 (Priority)3538487 accelerat train_dy tum_cte0 PD\t0:00\t 1 (Priority)3538486 accelerat train_dy tum_cte0 PD\t0:00\t 1 (Priority)",,terminal_output +2253,2579491,"TERMINAL",0,0,"2",,terminal_output +2254,2580531,"TERMINAL",0,0,"3",,terminal_output +2255,2581639,"TERMINAL",0,0,"4",,terminal_output +2256,2582945,"TERMINAL",0,0,"5",,terminal_output +2257,2583689,"TERMINAL",0,0,"6",,terminal_output +2258,2584011,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn1991:~/Projects/jasmine_jobs",,terminal_output +2259,2585467,"TERMINAL",0,0,"fqueue",,terminal_command +2260,2585533,"TERMINAL",0,0,"]633;C[?1049h(B[?7hEvery 1.0s: squeue -o ""%.10i %.16P %.30j %.8u %.8T %.10M %.9l %.6D %R""hkn1991.localdomain: Thu Oct 2 18:32:28 2025JOBIDPARTITIONNAME USER STATE\t TIME TIME_LIMI NODES NODELIST(REASON)3538494\taccelerated train_dynamics_maskgit_default tum_cte0 PENDING\t 0:00 2:00:001 (Priority)3538493\taccelerated train_dynamics_maskgit_default tum_cte0 PENDING\t 0:00 2:00:001 (Priority)3538492\taccelerated train_dynamics_maskgit_grain_a tum_cte0 PENDING\t 0:00 2:00:001 (Priority)3538490\taccelerated train_dynamics_maskgit_grain_a tum_cte0 PENDING\t 0:00 2:00:001 (Priority)3538489\taccelerated train_dynamics_maskgit_mixed_p tum_cte0 PENDING\t 0:00 2:00:001 (Priority)3538488\taccelerated train_dynamics_maskgit_flash_a tum_cte0 PENDING\t 0:00 2:00:001 (Priority)3538487\taccelerated train_dynamics_maskgit_flash_a tum_cte0 PENDING\t 0:00 2:00:001 (Priority)3538486\taccelerated train_dynamics_maskgit_mixed_p tum_cte0 PENDING\t 0:00 2:00:001 (Priority)",,terminal_output +2261,2586567,"TERMINAL",0,0,"9",,terminal_output +2262,2587681,"TERMINAL",0,0,"30",,terminal_output +2263,2588633,"TERMINAL",0,0,"1",,terminal_output +2264,2589701,"TERMINAL",0,0,"286mixed_pRUNNhkn081687flash_aRUNNhkn063388flashRUNNhkn063389mixed_pRUNNhkn063490grain_aRUNNhkn072092grainRUNNhkn072093defaultRUNNhkn072194defaultRUNNhkn0724",,terminal_output +2265,2590735,"TERMINAL",0,0,"311111111",,terminal_output +2266,2591773,"TERMINAL",0,0,"422222222",,terminal_output +2267,2592762,"TERMINAL",0,0,"533333333",,terminal_output +2268,2593827,"TERMINAL",0,0,"644444444",,terminal_output +2269,2594955,"TERMINAL",0,0,"755555555",,terminal_output +2270,2595891,"TERMINAL",0,0,"866666666",,terminal_output +2271,2596920,"TERMINAL",0,0,"977777777",,terminal_output +2272,2597940,"TERMINAL",0,0,"4088888888",,terminal_output +2273,2599241,"TERMINAL",0,0,"199999999",,terminal_output +2274,2600081,"TERMINAL",0,0,"21010101010101010",,terminal_output +2275,2601092,"TERMINAL",0,0,"311111111",,terminal_output +2276,2602146,"TERMINAL",0,0,"422222222",,terminal_output +2277,2603145,"TERMINAL",0,0,"533333333",,terminal_output +2278,2604137,"TERMINAL",0,0,"644444444",,terminal_output +2279,2605166,"TERMINAL",0,0,"755555555",,terminal_output +2280,2606227,"TERMINAL",0,0,"866666666",,terminal_output +2281,2607549,"TERMINAL",0,0,"977777777",,terminal_output +2282,2608318,"TERMINAL",0,0,"5088888888",,terminal_output +2283,2609318,"TERMINAL",0,0,"199999999",,terminal_output +2284,2610370,"TERMINAL",0,0,"22020202020202020",,terminal_output +2285,2611449,"TERMINAL",0,0,"322222222",,terminal_output +2286,2612427,"TERMINAL",0,0,"533333333",,terminal_output +2287,2613487,"TERMINAL",0,0,"644444444",,terminal_output +2288,2614622,"TERMINAL",0,0,"755555555",,terminal_output +2289,2615506,"TERMINAL",0,0,"866666666",,terminal_output +2290,2616539,"TERMINAL",0,0,"977777777",,terminal_output +2291,2617574,"TERMINAL",0,0,"3:0088888888",,terminal_output +2292,2618594,"TERMINAL",0,0,"199999999",,terminal_output +2293,2619708,"TERMINAL",0,0,"23030303030303030",,terminal_output +2294,2620836,"TERMINAL",0,0,"311111111",,terminal_output +2295,2621781,"TERMINAL",0,0,"422222222",,terminal_output +2296,2622752,"TERMINAL",0,0,"533333333",,terminal_output +2297,2623848,"TERMINAL",0,0,"644444444",,terminal_output +2298,2624815,"TERMINAL",0,0,"755555555",,terminal_output +2299,2625832,"TERMINAL",0,0,"866666666",,terminal_output +2300,2626927,"TERMINAL",0,0,"977777777",,terminal_output +2301,2627949,"TERMINAL",0,0,"1088888888",,terminal_output +2302,2628963,"TERMINAL",0,0,"199999999",,terminal_output +2303,2630084,"TERMINAL",0,0,"24040404040404040",,terminal_output +2304,2630999,"TERMINAL",0,0,"311111111",,terminal_output +2305,2632026,"TERMINAL",0,0,"422222222",,terminal_output +2306,2633071,"TERMINAL",0,0,"533333333",,terminal_output +2307,2634099,"TERMINAL",0,0,"644444444",,terminal_output +2308,2635130,"TERMINAL",0,0,"755555555",,terminal_output +2309,2636208,"TERMINAL",0,0,"866666666",,terminal_output +2310,2637240,"TERMINAL",0,0,"977777777",,terminal_output +2311,2638265,"TERMINAL",0,0,"2088888888",,terminal_output +2312,2639288,"TERMINAL",0,0,"199999999",,terminal_output +2313,2640530,"TERMINAL",0,0,"25050505050505050",,terminal_output +2314,2641394,"TERMINAL",0,0,"322222222",,terminal_output +2315,2642478,"TERMINAL",0,0,"533333333",,terminal_output +2316,2643484,"TERMINAL",0,0,"644444444",,terminal_output +2317,2644456,"TERMINAL",0,0,"755555555",,terminal_output +2318,2645494,"TERMINAL",0,0,"866666666",,terminal_output +2319,2646551,"TERMINAL",0,0,"977777777",,terminal_output +2320,2647557,"TERMINAL",0,0,"3088888888",,terminal_output +2321,2648592,"TERMINAL",0,0,"199999999",,terminal_output +2322,2649623,"TERMINAL",0,0,"21:001:001:001:001:001:001:001:00",,terminal_output +2323,2651205,"TERMINAL",0,0,"311111111",,terminal_output +2324,2651865,"TERMINAL",0,0,"422222222",,terminal_output +2325,2652736,"TERMINAL",0,0,"533333333",,terminal_output +2326,2653836,"TERMINAL",0,0,"6COMPLETI4444444",,terminal_output +2327,2655071,"TERMINAL",0,0,"75555555",,terminal_output +2328,2655819,"TERMINAL",0,0,"86666666",,terminal_output +2329,2656909,"TERMINAL",0,0,"97777777",,terminal_output +2330,2657923,"TERMINAL",0,0,"408888888",,terminal_output +2331,2658945,"TERMINAL",0,0,"19999999",,terminal_output +2332,2660035,"TERMINAL",0,0,"210101010101010",,terminal_output +2333,2661111,"TERMINAL",0,0,"31111111",,terminal_output +2334,2662148,"TERMINAL",0,0,"42222222",,terminal_output +2335,2663150,"TERMINAL",0,0,"59mixed_pCOMPLETI4738flash_a333333",,terminal_output +2336,2664297,"TERMINAL",0,0,"\r6444444",,terminal_output +2337,2665133,"TERMINAL",0,0,"7555555",,terminal_output +2338,2666168,"TERMINAL",0,0,"8666666",,terminal_output +2339,2667215,"TERMINAL",0,0,"9777777",,terminal_output +2340,2668241,"TERMINAL",0,0,"50888888",,terminal_output +2341,2669358,"TERMINAL",0,0,"1999999",,terminal_output +2342,2670316,"TERMINAL",0,0,"2202020202020",,terminal_output +2343,2671447,"TERMINAL",0,0,"3111111",,terminal_output +2344,2672467,"TERMINAL",0,0,"5333333",,terminal_output +2345,2673492,"TERMINAL",0,0,"\r67flash_a RUNNING2438490grain4720243default41444",,terminal_output +2346,2674434,"TERMINAL",0,0,"7555555",,terminal_output +2347,2675466,"TERMINAL",0,0,"8666666",,terminal_output +2348,2676505,"TERMINAL",0,0,"9777777",,terminal_output +2349,2677533,"TERMINAL",0,0,"4:00888888",,terminal_output +2350,2678587,"TERMINAL",0,0,"1999999",,terminal_output +2351,2679598,"TERMINAL",0,0,"2303030303030",,terminal_output +2352,2680633,"TERMINAL",0,0,"3111111",,terminal_output +2353,2681667,"TERMINAL",0,0,"4222222",,terminal_output +2354,2682705,"TERMINAL",0,0,"5333333",,terminal_output +2355,2683729,"TERMINAL",0,0,"6444444",,terminal_output +2356,2684855,"TERMINAL",0,0,"7555555",,terminal_output +2357,2685800,"TERMINAL",0,0,"8666666",,terminal_output +2358,2686835,"TERMINAL",0,0,"9777777",,terminal_output +2359,2687865,"TERMINAL",0,0,"10888888",,terminal_output +2360,2688916,"TERMINAL",0,0,"1999999",,terminal_output +2361,2689942,"TERMINAL",0,0,"2404040404040",,terminal_output +2362,2691015,"TERMINAL",0,0,"3111111",,terminal_output +2363,2692061,"TERMINAL",0,0,"4222222",,terminal_output +2364,2693150,"TERMINAL",0,0,"5333333",,terminal_output +2365,2694154,"TERMINAL",0,0,"6444444",,terminal_output +2366,2695133,"TERMINAL",0,0,"7555555",,terminal_output +2367,2696167,"TERMINAL",0,0,"8666666",,terminal_output +2368,2697203,"TERMINAL",0,0,"9777777",,terminal_output +2369,2698222,"TERMINAL",0,0,"20888888",,terminal_output +2370,2699313,"TERMINAL",0,0,"1999999",,terminal_output +2371,2700287,"TERMINAL",0,0,"2505050505050",,terminal_output +2372,2701307,"TERMINAL",0,0,"3111111",,terminal_output +2373,2702387,"TERMINAL",0,0,"4333333",,terminal_output +2374,2703389,"TERMINAL",0,0,"6444444",,terminal_output +2375,2704541,"TERMINAL",0,0,"7555555",,terminal_output +2376,2705457,"TERMINAL",0,0,"8666666",,terminal_output +2377,2706478,"TERMINAL",0,0,"9777777",,terminal_output +2378,2707523,"TERMINAL",0,0,"30888888",,terminal_output +2379,2708541,"TERMINAL",0,0,"1999999",,terminal_output +2380,2709570,"TERMINAL",0,0,"22:002:002:002:002:002:00",,terminal_output +2381,2710613,"TERMINAL",0,0,"3111111",,terminal_output +2382,2711689,"TERMINAL",0,0,"4222222",,terminal_output +2383,2712710,"TERMINAL",0,0,"5333333",,terminal_output +2384,2713734,"TERMINAL",0,0,"6444444",,terminal_output +2385,2714734,"TERMINAL",0,0,"7555555",,terminal_output +2386,2715767,"TERMINAL",0,0,"8666666",,terminal_output +2387,2716801,"TERMINAL",0,0,"9777777",,terminal_output +2388,2717856,"TERMINAL",0,0,"40888888",,terminal_output +2389,2718871,"TERMINAL",0,0,"1999999",,terminal_output +2390,2719902,"TERMINAL",0,0,"2101010101010",,terminal_output +2391,2720937,"TERMINAL",0,0,"3111111",,terminal_output +2392,2721972,"TERMINAL",0,0,"4222222",,terminal_output +2393,2723019,"TERMINAL",0,0,"5333333",,terminal_output +2394,2724037,"TERMINAL",0,0,"6444444",,terminal_output +2395,2725075,"TERMINAL",0,0,"7555555",,terminal_output +2396,2726504,"TERMINAL",0,0,"8666666",,terminal_output +2397,2727320,"TERMINAL",0,0,"9777777",,terminal_output +2398,2728550,"TERMINAL",0,0,"50888888",,terminal_output +2399,2729232,"TERMINAL",0,0,"1999999",,terminal_output +2400,2730327,"TERMINAL",0,0,"2202020202020",,terminal_output +2401,2731447,"TERMINAL",0,0,"3111111",,terminal_output +2402,2732434,"TERMINAL",0,0,"4222222",,terminal_output +2403,2733405,"TERMINAL",0,0,"5444444",,terminal_output +2404,2734476,"TERMINAL",0,0,"7555555",,terminal_output +2405,2737259,"TERMINAL",0,0,"86666669777777",,terminal_output +2406,2737694,"TERMINAL",0,0,"5:00888888",,terminal_output +2407,2738656,"TERMINAL",0,0,"1999999",,terminal_output +2408,2739679,"TERMINAL",0,0,"2303030303030",,terminal_output +2409,2740677,"TERMINAL",0,0,"3111111",,terminal_output +2410,2741667,"TERMINAL",0,0,"4222222",,terminal_output +2411,2742673,"TERMINAL",0,0,"5333333",,terminal_output +2412,2743705,"TERMINAL",0,0,"6444444",,terminal_output +2413,2744788,"TERMINAL",0,0,"7555555",,terminal_output +2414,2745770,"TERMINAL",0,0,"8666666",,terminal_output +2415,2746799,"TERMINAL",0,0,"9777777",,terminal_output +2416,2747837,"TERMINAL",0,0,"10888888",,terminal_output +2417,2748961,"TERMINAL",0,0,"1999999",,terminal_output +2418,2749982,"TERMINAL",0,0,"2404040404040",,terminal_output +2419,2750954,"TERMINAL",0,0,"3111111",,terminal_output +2420,2751977,"TERMINAL",0,0,"4222222",,terminal_output +2421,2753010,"TERMINAL",0,0,"5333333",,terminal_output +2422,2754054,"TERMINAL",0,0,"6444444",,terminal_output +2423,2755090,"TERMINAL",0,0,"7555555",,terminal_output +2424,2756126,"TERMINAL",0,0,"8666666",,terminal_output +2425,2757161,"TERMINAL",0,0,"9777777",,terminal_output +2426,2758194,"TERMINAL",0,0,"20888888",,terminal_output +2427,2759232,"TERMINAL",0,0,"1999999",,terminal_output +2428,2760265,"TERMINAL",0,0,"2505050505050",,terminal_output +2429,2761299,"TERMINAL",0,0,"3111111",,terminal_output +2430,2762336,"TERMINAL",0,0,"4222222",,terminal_output +2431,2763377,"TERMINAL",0,0,"6444444",,terminal_output +2432,2764413,"TERMINAL",0,0,"7555555",,terminal_output +2433,2765451,"TERMINAL",0,0,"8666666",,terminal_output +2434,2766479,"TERMINAL",0,0,"9777777",,terminal_output +2435,2767515,"TERMINAL",0,0,"30888888",,terminal_output +2436,2768552,"TERMINAL",0,0,"1999999",,terminal_output +2437,2769584,"TERMINAL",0,0,"23:003:003:003:003:003:00",,terminal_output +2438,2770618,"TERMINAL",0,0,"3111111",,terminal_output +2439,2771653,"TERMINAL",0,0,"4222222",,terminal_output +2440,2772716,"TERMINAL",0,0,"5333333",,terminal_output +2441,2773719,"TERMINAL",0,0,"6444444",,terminal_output +2442,2774755,"TERMINAL",0,0,"7555555",,terminal_output +2443,2775793,"TERMINAL",0,0,"8666666",,terminal_output +2444,2776832,"TERMINAL",0,0,"9777777",,terminal_output +2445,2777865,"TERMINAL",0,0,"40888888",,terminal_output +2446,2778898,"TERMINAL",0,0,"1999999",,terminal_output +2447,2779934,"TERMINAL",0,0,"2101010101010",,terminal_output +2448,2781014,"TERMINAL",0,0,"3111111",,terminal_output +2449,2782001,"TERMINAL",0,0,"4222222",,terminal_output +2450,2783039,"TERMINAL",0,0,"5333333",,terminal_output +2451,2784082,"TERMINAL",0,0,"6444444",,terminal_output +2452,2785209,"TERMINAL",0,0,"7555555",,terminal_output +2453,2786232,"TERMINAL",0,0,"8666666",,terminal_output +2454,2787181,"TERMINAL",0,0,"9777777",,terminal_output +2455,2788216,"TERMINAL",0,0,"50888888",,terminal_output +2456,2789247,"TERMINAL",0,0,"1999999",,terminal_output +2457,2790319,"TERMINAL",0,0,"2202020202020",,terminal_output +2458,2791337,"TERMINAL",0,0,"3111111",,terminal_output +2459,2792377,"TERMINAL",0,0,"4333333",,terminal_output +2460,2793388,"TERMINAL",0,0,"6444444",,terminal_output +2461,2794417,"TERMINAL",0,0,"7555555",,terminal_output +2462,2794805,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default_mixed_precision_ablation.sh",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=1\n#SBATCH --time=02:00:00\n#SBATCH --partition=accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:1\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/dynamics/maskgit/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/dynamics/maskgit/%x_%j.log\n#SBATCH --job-name=train_dynamics_maskgit_mixed_prec_ablation_bs36\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/train\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_tokenizer_default/3528955\n\nenv | grep SLURM\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=36 \\n --patch_size=16 \\n --warmup_steps=0 \\n --wsd_decay_steps=0 \\n --num_steps=1000 \\n --log_image_interval=10000 \\n --log \\n --log_checkpoint_interval=10000 \\n --name=coinrun-dynamics-maskgit-mixed-precision-ablation-bs36-$slurm_job_id \\n --tags coinrun dynamics maskgit mixed-precision-ablation bs36 \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir_train &\n\nchild_pid=$!\n\nwait $child_pid",shellscript,tab +2463,2795536,"TERMINAL",0,0,"8666666",,terminal_output +2464,2796516,"TERMINAL",0,0,"9777777",,terminal_output +2465,2797606,"TERMINAL",0,0,"6:00888888",,terminal_output +2466,2798568,"TERMINAL",0,0,"1999999",,terminal_output +2467,2799586,"TERMINAL",0,0,"2303030303030",,terminal_output +2468,2800681,"TERMINAL",0,0,"3111111",,terminal_output +2469,2800889,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default_flash_attn_ablation.sh",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=1\n#SBATCH --time=02:00:00\n#SBATCH --partition=accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:1\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/dynamics/maskgit/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/dynamics/maskgit/%x_%j.log\n#SBATCH --job-name=train_dynamics_maskgit_flash_attn_ablation_bs36\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/train\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_tokenizer_default/3528955\n\nenv | grep SLURM\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=110 \\n --patch_size=16 \\n --warmup_steps=0 \\n --wsd_decay_steps=0 \\n --num_steps=1000 \\n --log_image_interval=10000 \\n --log \\n --no-use-flash-attention \\n --log_checkpoint_interval=10000 \\n --name=coinrun-dynamics-maskgit-flash-attn-ablation-bs36-$slurm_job_id \\n --tags coinrun dynamics maskgit flash-attn-ablation bs36 \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir_train &\n\nchild_pid=$!\n\nwait $child_pid",shellscript,tab +2470,2801673,"TERMINAL",0,0,"4222222",,terminal_output +2471,2802742,"TERMINAL",0,0,"5333333",,terminal_output +2472,2803752,"TERMINAL",0,0,"6444444",,terminal_output +2473,2804761,"TERMINAL",0,0,"7555555",,terminal_output +2474,2805796,"TERMINAL",0,0,"8666666",,terminal_output +2475,2806130,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default_mixed_precision_ablation.sh",0,0,"",shellscript,tab +2476,2806847,"TERMINAL",0,0,"9777777",,terminal_output +2477,2807789,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default_flash_attn_ablation.sh",0,0,"",shellscript,tab +2478,2808264,"TERMINAL",0,0,"10888888",,terminal_output +2479,2808939,"TERMINAL",0,0,"1999999",,terminal_output +2480,2809533,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default_flash_attn_ablation.sh",2091,0,"",shellscript,selection_mouse +2481,2809954,"TERMINAL",0,0,"2404040404040",,terminal_output +2482,2811318,"TERMINAL",0,0,"3111111",,terminal_output +2483,2812036,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default_mixed_precision_ablation.sh",0,0,"",shellscript,tab +2484,2812400,"TERMINAL",0,0,"4222222",,terminal_output +2485,2812866,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default_mixed_precision_ablation.sh",2091,0,"",shellscript,selection_mouse +2486,2813081,"TERMINAL",0,0,"5333333",,terminal_output +2487,2813681,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default_mixed_precision_ablation.sh",2113,0,"\n --no-use-flash-attention \",shellscript,content +2488,2813683,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default_mixed_precision_ablation.sh",2118,0,"",shellscript,selection_command +2489,2814106,"TERMINAL",0,0,"6444444",,terminal_output +2490,2815159,"TERMINAL",0,0,"7555555",,terminal_output +2491,2816202,"TERMINAL",0,0,"8666666",,terminal_output +2492,2817281,"TERMINAL",0,0,"9777777",,terminal_output +2493,2818548,"TERMINAL",0,0,"20888888",,terminal_output +2494,2819371,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=1\n#SBATCH --time=02:00:00\n#SBATCH --partition=accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:1\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/dynamics/maskgit/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/dynamics/maskgit/%x_%j.log\n#SBATCH --job-name=train_dynamics_maskgit_mixed_prec_ablation_bs110\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/train\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_tokenizer_default/3528955\n\nenv | grep SLURM\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=110 \\n --patch_size=16 \\n --warmup_steps=0 \\n --wsd_decay_steps=0 \\n --num_steps=1000 \\n --log_image_interval=10000 \\n --log \\n --log_checkpoint_interval=10000 \\n --name=coinrun-dynamics-maskgit-mixed-precision-ablation-bs110-$slurm_job_id \\n --tags coinrun dynamics maskgit mixed-precision-ablation bs110 \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir_train &\n\nchild_pid=$!\n\nwait $child_pid",shellscript,tab +2495,2819714,"TERMINAL",0,0,"1999999",,terminal_output +2496,2820407,"TERMINAL",0,0,"2505050505050",,terminal_output +2497,2821372,"TERMINAL",0,0,"3111111",,terminal_output +2498,2821713,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",2052,0,"",shellscript,selection_mouse +2499,2822273,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",2077,0,"",shellscript,selection_mouse +2500,2822276,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",2076,0,"",shellscript,selection_command +2501,2822310,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",2076,1,"\",shellscript,selection_mouse +2502,2822327,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",2077,0,"",shellscript,selection_command +2503,2822404,"TERMINAL",0,0,"5333333",,terminal_output +2504,2823349,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",2076,0,"",shellscript,selection_command +2505,2823413,"TERMINAL",0,0,"6444444",,terminal_output +2506,2823811,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",2077,0,"\n --no-use-flash-attention \",shellscript,content +2507,2823833,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",2082,0,"",shellscript,selection_command +2508,2824496,"TERMINAL",0,0,"7555555",,terminal_output +2509,2826113,"TERMINAL",0,0,"8666666",,terminal_output +2510,2826587,"TERMINAL",0,0,"9777777",,terminal_output +2511,2827597,"TERMINAL",0,0,"30888888",,terminal_output +2512,2828630,"TERMINAL",0,0,"1999999",,terminal_output +2513,2829719,"TERMINAL",0,0,"24:004:004:004:004:004:00",,terminal_output +2514,2830671,"TERMINAL",0,0,"3111111",,terminal_output +2515,2831777,"TERMINAL",0,0,"4222222",,terminal_output +2516,2832746,"TERMINAL",0,0,"5333333",,terminal_output +2517,2833797,"TERMINAL",0,0,"6444444",,terminal_output +2518,2834812,"TERMINAL",0,0,"7555555",,terminal_output +2519,2835844,"TERMINAL",0,0,"8666666",,terminal_output +2520,2836900,"TERMINAL",0,0,"9777777",,terminal_output +2521,2837942,"TERMINAL",0,0,"40888888",,terminal_output +2522,2838993,"TERMINAL",0,0,"1999999",,terminal_output +2523,2840101,"TERMINAL",0,0,"2101010101010",,terminal_output +2524,2841050,"TERMINAL",0,0,"3111111",,terminal_output +2525,2842074,"TERMINAL",0,0,"4222222",,terminal_output +2526,2843098,"TERMINAL",0,0,"5333333",,terminal_output +2527,2844160,"TERMINAL",0,0,"6444444",,terminal_output +2528,2845170,"TERMINAL",0,0,"7555555",,terminal_output +2529,2846228,"TERMINAL",0,0,"8666666",,terminal_output +2530,2847239,"TERMINAL",0,0,"9777777",,terminal_output +2531,2848281,"TERMINAL",0,0,"50888888",,terminal_output +2532,2849311,"TERMINAL",0,0,"1999999",,terminal_output +2533,2850347,"TERMINAL",0,0,"2212121212121",,terminal_output +2534,2851386,"TERMINAL",0,0,"4222222",,terminal_output +2535,2852485,"TERMINAL",0,0,"5333333",,terminal_output +2536,2853458,"TERMINAL",0,0,"6444444",,terminal_output +2537,2854520,"TERMINAL",0,0,"7555555",,terminal_output +2538,2855521,"TERMINAL",0,0,"8666666",,terminal_output +2539,2856562,"TERMINAL",0,0,"9777777",,terminal_output +2540,2857601,"TERMINAL",0,0,"7:00888888",,terminal_output +2541,2858629,"TERMINAL",0,0,"1999999",,terminal_output +2542,2859664,"TERMINAL",0,0,"294defaultCOMPLETI72473088flash306330302grain_a3003301",,terminal_output +2543,2860709,"TERMINAL",0,0,"311111",,terminal_output +2544,2861778,"TERMINAL",0,0,"422222",,terminal_output +2545,2862773,"TERMINAL",0,0,"533333",,terminal_output +2546,2863810,"TERMINAL",0,0,"644444",,terminal_output +2547,2864879,"TERMINAL",0,0,"755555",,terminal_output +2548,2865880,"TERMINAL",0,0,"866666",,terminal_output +2549,2866919,"TERMINAL",0,0,"977777",,terminal_output +2550,2867953,"TERMINAL",0,0,"1088888",,terminal_output +2551,2868992,"TERMINAL",0,0,"199999",,terminal_output +2552,2870047,"TERMINAL",0,0,"\r287flash_a RUNNING4063384090grain407202403default401",,terminal_output +2553,2871054,"TERMINAL",0,0,"311111",,terminal_output +2554,2872114,"TERMINAL",0,0,"422222",,terminal_output +2555,2873125,"TERMINAL",0,0,"533333",,terminal_output +2556,2874162,"TERMINAL",0,0,"644444",,terminal_output +2557,2875219,"TERMINAL",0,0,"755555",,terminal_output +2558,2876245,"TERMINAL",0,0,"866666",,terminal_output +2559,2877370,"TERMINAL",0,0,"977777",,terminal_output +2560,2878299,"TERMINAL",0,0,"2088888",,terminal_output +2561,2879415,"TERMINAL",0,0,"199999",,terminal_output +2562,2880373,"TERMINAL",0,0,"25151515151",,terminal_output +2563,2881395,"TERMINAL",0,0,"422222",,terminal_output +2564,2882433,"TERMINAL",0,0,"533333",,terminal_output +2565,2883482,"TERMINAL",0,0,"644444",,terminal_output +2566,2884535,"TERMINAL",0,0,"755555",,terminal_output +2567,2885562,"TERMINAL",0,0,"866666",,terminal_output +2568,2886585,"TERMINAL",0,0,"977777",,terminal_output +2569,2887606,"TERMINAL",0,0,"3088888",,terminal_output +2570,2888636,"TERMINAL",0,0,"199999",,terminal_output +2571,2889671,"TERMINAL",0,0,"25:005:005:005:005:00",,terminal_output +2572,2890704,"TERMINAL",0,0,"311111",,terminal_output +2573,2891773,"TERMINAL",0,0,"422222",,terminal_output +2574,2892775,"TERMINAL",0,0,"533333",,terminal_output +2575,2893807,"TERMINAL",0,0,"644444",,terminal_output +2576,2894891,"TERMINAL",0,0,"755555",,terminal_output +2577,2895914,"TERMINAL",0,0,"866666",,terminal_output +2578,2897164,"TERMINAL",0,0,"977777",,terminal_output +2579,2897970,"TERMINAL",0,0,"4088888",,terminal_output +2580,2899010,"TERMINAL",0,0,"199999",,terminal_output +2581,2900037,"TERMINAL",0,0,"21010101010",,terminal_output +2582,2901101,"TERMINAL",0,0,"311111",,terminal_output +2583,2902175,"TERMINAL",0,0,"422222",,terminal_output +2584,2903174,"TERMINAL",0,0,"533333",,terminal_output +2585,2904200,"TERMINAL",0,0,"644444",,terminal_output +2586,2905224,"TERMINAL",0,0,"755555",,terminal_output +2587,2906348,"TERMINAL",0,0,"866666",,terminal_output +2588,2907286,"TERMINAL",0,0,"977777",,terminal_output +2589,2908316,"TERMINAL",0,0,"5088888",,terminal_output +2590,2909352,"TERMINAL",0,0,"12020202020",,terminal_output +2591,2910389,"TERMINAL",0,0,"311111",,terminal_output +2592,2911416,"TERMINAL",0,0,"422222",,terminal_output +2593,2912450,"TERMINAL",0,0,"533333",,terminal_output +2594,2913497,"TERMINAL",0,0,"644444",,terminal_output +2595,2914551,"TERMINAL",0,0,"755555",,terminal_output +2596,2915560,"TERMINAL",0,0,"866666",,terminal_output +2597,2916596,"TERMINAL",0,0,"977777",,terminal_output +2598,2917637,"TERMINAL",0,0,"8:0088888",,terminal_output +2599,2918707,"TERMINAL",0,0,"199999",,terminal_output +2600,2919707,"TERMINAL",0,0,"23030303030",,terminal_output +2601,2920736,"TERMINAL",0,0,"311111",,terminal_output +2602,2921780,"TERMINAL",0,0,"422222",,terminal_output +2603,2922806,"TERMINAL",0,0,"533333",,terminal_output +2604,2923837,"TERMINAL",0,0,"644444",,terminal_output +2605,2924896,"TERMINAL",0,0,"755555",,terminal_output +2606,2925914,"TERMINAL",0,0,"866666",,terminal_output +2607,2926937,"TERMINAL",0,0,"977777",,terminal_output +2608,2927997,"TERMINAL",0,0,"1088888",,terminal_output +2609,2933710,"TERMINAL",0,0,"19999924040404040311111422222533333",,terminal_output +2610,2934180,"TERMINAL",0,0,"644444",,terminal_output +2611,2935236,"TERMINAL",0,0,"755555",,terminal_output +2612,2936333,"TERMINAL",0,0,"866666",,terminal_output +2613,2937282,"TERMINAL",0,0,"977777",,terminal_output +2614,2938402,"TERMINAL",0,0,"2088888",,terminal_output +2615,2939350,"TERMINAL",0,0,"15050505050",,terminal_output +2616,2940383,"TERMINAL",0,0,"311111",,terminal_output +2617,2941481,"TERMINAL",0,0,"422222",,terminal_output +2618,2942499,"TERMINAL",0,0,"533333",,terminal_output +2619,2943488,"TERMINAL",0,0,"644444",,terminal_output +2620,2944516,"TERMINAL",0,0,"755555",,terminal_output +2621,2945576,"TERMINAL",0,0,"866666",,terminal_output +2622,2946594,"TERMINAL",0,0,"977777",,terminal_output +2623,2947617,"TERMINAL",0,0,"3088888",,terminal_output +2624,2948649,"TERMINAL",0,0,"199999",,terminal_output +2625,2949681,"TERMINAL",0,0,"26:006:006:006:006:00",,terminal_output +2626,2950717,"TERMINAL",0,0,"311111",,terminal_output +2627,2951793,"TERMINAL",0,0,"422222",,terminal_output +2628,2952840,"TERMINAL",0,0,"533333",,terminal_output +2629,2953814,"TERMINAL",0,0,"644444",,terminal_output +2630,2954845,"TERMINAL",0,0,"755555",,terminal_output +2631,2955903,"TERMINAL",0,0,"866666",,terminal_output +2632,2956931,"TERMINAL",0,0,"977777",,terminal_output +2633,2957970,"TERMINAL",0,0,"4088888",,terminal_output +2634,2958995,"TERMINAL",0,0,"199999",,terminal_output +2635,2960015,"TERMINAL",0,0,"21010101010",,terminal_output +2636,2961244,"TERMINAL",0,0,"311111",,terminal_output +2637,2962077,"TERMINAL",0,0,"422222",,terminal_output +2638,2963114,"TERMINAL",0,0,"533333",,terminal_output +2639,2964167,"TERMINAL",0,0,"693defaultCOMPLETI47217488flash4633042grain_a40",,terminal_output +2640,2965178,"TERMINAL",0,0,"75555",,terminal_output +2641,2966229,"TERMINAL",0,0,"86666",,terminal_output +2642,2967246,"TERMINAL",0,0,"97777",,terminal_output +2643,2967739,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn1991:~/Projects/jasmine_jobs",,terminal_output +2644,2970417,"TERMINAL",0,0,"runner2",,terminal_command +2645,2970484,"TERMINAL",0,0,"]633;Cbash: runner2: command not found...\r\n",,terminal_output +2646,2971397,"TERMINAL",0,0,"^C\r\n]0;tum_cte0515@hkn1991:~/Projects/jasmine_jobs",,terminal_output +2647,2973010,"TERMINAL",0,0,"runner-2",,terminal_command +2648,2985392,"TERMINAL",0,0,"cat jasmine/train_dynamics.py | grep dtype",,terminal_command +2649,3029852,"TERMINAL",0,0,"sync-runner-2",,terminal_command +2650,3029932,"TERMINAL",0,0,"]633;Csending incremental file list\r\n./\r\npyproject.toml\r\n",,terminal_output +2651,3030080,"TERMINAL",0,0,"jasmine/\r\njasmine/train_dynamics.py\r\njasmine/utils/\r\nslurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default_mixed_precision_ablation.sh\r\nslurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh\r\n\r\nsent 77,925 bytes received 353 bytes 156,556.00 bytes/sec\r\ntotal size is 28,401,747 speedup is 362.83\r\n]0;tum_cte0515@hkn1991:~/Projects/jasmine_jobs_2",,terminal_output +2652,3036605,"TERMINAL",0,0,"sbatch slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default_mixed_precision_ablation.sh",,terminal_command +2653,3041796,"TERMINAL",0,0,"sbatch slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",,terminal_command +2654,3041858,"TERMINAL",0,0,"]633;CSubmitted batch job 3538500\r\n]0;tum_cte0515@hkn1991:~/Projects/jasmine_jobs_2",,terminal_output +2655,3046483,"TERMINAL",0,0,"dev",,terminal_command +2656,3047268,"TERMINAL",0,0,"queue",,terminal_command +2657,3047365,"TERMINAL",0,0,"]633;C[?1049h(B[?7hEvery 1.0s: squeue --mehkn1991.localdomain: Thu Oct 2 18:40:09 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3538500 accelerat train_dy tum_cte0 PD\t0:00\t 1 (Priority)3538499 accelerat train_dy tum_cte0 PD\t0:00\t 1 (Priority)3538487 accelerat train_dy tum_cte0 R\t7:37\t 1 hkn06333538488 accelerat train_dy tum_cte0 R\t7:37\t 1 hkn0633",,terminal_output +2658,3048361,"TERMINAL",0,0,"1099",,terminal_output +2659,3049504,"TERMINAL",0,0,"24040",,terminal_output +2660,3050438,"TERMINAL",0,0,"311",,terminal_output +2661,3051478,"TERMINAL",0,0,"422",,terminal_output +2662,3052526,"TERMINAL",0,0,"533",,terminal_output +2663,3053598,"TERMINAL",0,0,"644",,terminal_output +2664,3054669,"TERMINAL",0,0,"755",,terminal_output +2665,3055683,"TERMINAL",0,0,"866",,terminal_output +2666,3056685,"TERMINAL",0,0,"9487CG\t7:46hkn063350099PD\t0:00(Priority)7",,terminal_output +2667,3057717,"TERMINAL",0,0,"208",,terminal_output +2668,3058756,"TERMINAL",0,0,"1488CG\t7:48hkn063350099PD\t0:00(Priority)",,terminal_output +2669,3059798,"TERMINAL",0,0,"2",,terminal_output +2670,3060839,"TERMINAL",0,0,"3",,terminal_output +2671,3061905,"TERMINAL",0,0,"4",,terminal_output +2672,3062922,"TERMINAL",0,0,"5",,terminal_output +2673,3063965,"TERMINAL",0,0,"6",,terminal_output +2674,3065031,"TERMINAL",0,0,"7",,terminal_output +2675,3066057,"TERMINAL",0,0,"8",,terminal_output +2676,3067118,"TERMINAL",0,0,"\r9",,terminal_output +2677,3068224,"TERMINAL",0,0,"\r30",,terminal_output +2678,3069198,"TERMINAL",0,0,"1",,terminal_output +2679,3070231,"TERMINAL",0,0,"2499 Rhkn0816500 Rhkn0633",,terminal_output +2680,3071275,"TERMINAL",0,0,"311",,terminal_output +2681,3072441,"TERMINAL",0,0,"422",,terminal_output +2682,3073362,"TERMINAL",0,0,"544",,terminal_output +2683,3074413,"TERMINAL",0,0,"755",,terminal_output +2684,3075516,"TERMINAL",0,0,"866",,terminal_output +2685,3076549,"TERMINAL",0,0,"977",,terminal_output +2686,3077543,"TERMINAL",0,0,"4088",,terminal_output +2687,3078590,"TERMINAL",0,0,"199",,terminal_output +2688,3079643,"TERMINAL",0,0,"21010",,terminal_output +2689,3080688,"TERMINAL",0,0,"311",,terminal_output +2690,3081748,"TERMINAL",0,0,"422",,terminal_output +2691,3082776,"TERMINAL",0,0,"533",,terminal_output +2692,3083817,"TERMINAL",0,0,"644",,terminal_output +2693,3084858,"TERMINAL",0,0,"755",,terminal_output +2694,3085903,"TERMINAL",0,0,"866",,terminal_output +2695,3086937,"TERMINAL",0,0,"977",,terminal_output +2696,3088012,"TERMINAL",0,0,"5088",,terminal_output +2697,3089137,"TERMINAL",0,0,"199",,terminal_output +2698,3090069,"TERMINAL",0,0,"22020",,terminal_output +2699,3091128,"TERMINAL",0,0,"311",,terminal_output +2700,3092155,"TERMINAL",0,0,"422",,terminal_output +2701,3093232,"TERMINAL",0,0,"533",,terminal_output +2702,3094234,"TERMINAL",0,0,"644",,terminal_output +2703,3095322,"TERMINAL",0,0,"755",,terminal_output +2704,3096381,"TERMINAL",0,0,"877",,terminal_output +2705,3097456,"TERMINAL",0,0,"1:0088",,terminal_output +2706,3098453,"TERMINAL",0,0,"199",,terminal_output +2707,3099613,"TERMINAL",0,0,"23030",,terminal_output +2708,3100516,"TERMINAL",0,0,"311",,terminal_output +2709,3101632,"TERMINAL",0,0,"422",,terminal_output +2710,3102651,"TERMINAL",0,0,"533",,terminal_output +2711,3103677,"TERMINAL",0,0,"644",,terminal_output +2712,3104672,"TERMINAL",0,0,"755",,terminal_output +2713,3105705,"TERMINAL",0,0,"866",,terminal_output +2714,3106782,"TERMINAL",0,0,"977",,terminal_output +2715,3107785,"TERMINAL",0,0,"1088",,terminal_output +2716,3108826,"TERMINAL",0,0,"199",,terminal_output +2717,3109866,"TERMINAL",0,0,"24040",,terminal_output +2718,3110950,"TERMINAL",0,0,"311",,terminal_output +2719,3111949,"TERMINAL",0,0,"422",,terminal_output +2720,3113098,"TERMINAL",0,0,"533",,terminal_output +2721,3114131,"TERMINAL",0,0,"644",,terminal_output +2722,3115068,"TERMINAL",0,0,"755",,terminal_output +2723,3116106,"TERMINAL",0,0,"866",,terminal_output +2724,3117147,"TERMINAL",0,0,"977",,terminal_output +2725,3118188,"TERMINAL",0,0,"2088",,terminal_output +2726,3119229,"TERMINAL",0,0,"199",,terminal_output +2727,3120271,"TERMINAL",0,0,"25050",,terminal_output +2728,3121336,"TERMINAL",0,0,"311",,terminal_output +2729,3122415,"TERMINAL",0,0,"433",,terminal_output +2730,3123476,"TERMINAL",0,0,"644",,terminal_output +2731,3124424,"TERMINAL",0,0,"755",,terminal_output +2732,3125469,"TERMINAL",0,0,"866",,terminal_output +2733,3126514,"TERMINAL",0,0,"977",,terminal_output +2734,3127632,"TERMINAL",0,0,"3088",,terminal_output +2735,3128660,"TERMINAL",0,0,"199",,terminal_output +2736,3129690,"TERMINAL",0,0,"21:001:00",,terminal_output +2737,3130683,"TERMINAL",0,0,"311",,terminal_output +2738,3131724,"TERMINAL",0,0,"422",,terminal_output +2739,3132766,"TERMINAL",0,0,"533",,terminal_output +2740,3133828,"TERMINAL",0,0,"644",,terminal_output +2741,3134845,"TERMINAL",0,0,"755",,terminal_output +2742,3135888,"TERMINAL",0,0,"866",,terminal_output +2743,3136924,"TERMINAL",0,0,"977",,terminal_output +2744,3137981,"TERMINAL",0,0,"4088",,terminal_output +2745,3139013,"TERMINAL",0,0,"199",,terminal_output +2746,3140057,"TERMINAL",0,0,"21010",,terminal_output +2747,3141110,"TERMINAL",0,0,"311",,terminal_output +2748,3142176,"TERMINAL",0,0,"422",,terminal_output +2749,3143200,"TERMINAL",0,0,"533",,terminal_output +2750,3144327,"TERMINAL",0,0,"644",,terminal_output +2751,3145352,"TERMINAL",0,0,"755",,terminal_output +2752,3146298,"TERMINAL",0,0,"866",,terminal_output +2753,3147336,"TERMINAL",0,0,"988",,terminal_output +2754,3148378,"TERMINAL",0,0,"5199",,terminal_output +2755,3149440,"TERMINAL",0,0,"22020",,terminal_output +2756,3150492,"TERMINAL",0,0,"311",,terminal_output +2757,3151635,"TERMINAL",0,0,"422",,terminal_output +2758,3152656,"TERMINAL",0,0,"533",,terminal_output +2759,3153649,"TERMINAL",0,0,"644",,terminal_output +2760,3154674,"TERMINAL",0,0,"755",,terminal_output +2761,3155657,"TERMINAL",0,0,"866",,terminal_output +2762,3156693,"TERMINAL",0,0,"977",,terminal_output +2763,3157735,"TERMINAL",0,0,"2:0088",,terminal_output +2764,3158796,"TERMINAL",0,0,"199",,terminal_output +2765,3159868,"TERMINAL",0,0,"23030",,terminal_output +2766,3160877,"TERMINAL",0,0,"311",,terminal_output +2767,3161921,"TERMINAL",0,0,"422",,terminal_output +2768,3162956,"TERMINAL",0,0,"533",,terminal_output +2769,3164095,"TERMINAL",0,0,"644",,terminal_output +2770,3165115,"TERMINAL",0,0,"755",,terminal_output +2771,3166138,"TERMINAL",0,0,"866",,terminal_output +2772,3167114,"TERMINAL",0,0,"977",,terminal_output +2773,3168157,"TERMINAL",0,0,"1088",,terminal_output +2774,3169193,"TERMINAL",0,0,"199",,terminal_output +2775,3170231,"TERMINAL",0,0,"24040",,terminal_output +2776,3171271,"TERMINAL",0,0,"311",,terminal_output +2777,3172314,"TERMINAL",0,0,"422",,terminal_output +2778,3173353,"TERMINAL",0,0,"544",,terminal_output +2779,3174398,"TERMINAL",0,0,"755",,terminal_output +2780,3175459,"TERMINAL",0,0,"866",,terminal_output +2781,3176556,"TERMINAL",0,0,"977",,terminal_output +2782,3177624,"TERMINAL",0,0,"2088",,terminal_output +2783,3178633,"TERMINAL",0,0,"199",,terminal_output +2784,3179640,"TERMINAL",0,0,"25050",,terminal_output +2785,3180667,"TERMINAL",0,0,"311",,terminal_output +2786,3181709,"TERMINAL",0,0,"422",,terminal_output +2787,3182743,"TERMINAL",0,0,"533",,terminal_output +2788,3183785,"TERMINAL",0,0,"644",,terminal_output +2789,3184880,"TERMINAL",0,0,"755",,terminal_output +2790,3185873,"TERMINAL",0,0,"866",,terminal_output +2791,3186896,"TERMINAL",0,0,"977",,terminal_output +2792,3187952,"TERMINAL",0,0,"3088",,terminal_output +2793,3188976,"TERMINAL",0,0,"199",,terminal_output +2794,3190033,"TERMINAL",0,0,"22:002:00",,terminal_output +2795,3192170,"TERMINAL",0,0,"322",,terminal_output +2796,3193177,"TERMINAL",0,0,"533",,terminal_output +2797,3194200,"TERMINAL",0,0,"644",,terminal_output +2798,3195323,"TERMINAL",0,0,"755",,terminal_output +2799,3196352,"TERMINAL",0,0,"866",,terminal_output +2800,3197373,"TERMINAL",0,0,"977",,terminal_output +2801,3198394,"TERMINAL",0,0,"4099",,terminal_output +2802,3199420,"TERMINAL",0,0,"21010",,terminal_output +2803,3200421,"TERMINAL",0,0,"311",,terminal_output +2804,3201461,"TERMINAL",0,0,"422",,terminal_output +2805,3202508,"TERMINAL",0,0,"533",,terminal_output +2806,3203563,"TERMINAL",0,0,"644",,terminal_output +2807,3204642,"TERMINAL",0,0,"755",,terminal_output +2808,3205665,"TERMINAL",0,0,"866",,terminal_output +2809,3206693,"TERMINAL",0,0,"977",,terminal_output +2810,3207718,"TERMINAL",0,0,"5088",,terminal_output +2811,3208757,"TERMINAL",0,0,"199",,terminal_output +2812,3209849,"TERMINAL",0,0,"22020",,terminal_output +2813,3210833,"TERMINAL",0,0,"311",,terminal_output +2814,3211911,"TERMINAL",0,0,"422",,terminal_output +2815,3212925,"TERMINAL",0,0,"533",,terminal_output +2816,3213971,"TERMINAL",0,0,"644",,terminal_output +2817,3215030,"TERMINAL",0,0,"755",,terminal_output +2818,3216053,"TERMINAL",0,0,"866",,terminal_output +2819,3217112,"TERMINAL",0,0,"977",,terminal_output +2820,3218164,"TERMINAL",0,0,"3:0088",,terminal_output +2821,3219261,"TERMINAL",0,0,"199",,terminal_output +2822,3220336,"TERMINAL",0,0,"23030",,terminal_output +2823,3221332,"TERMINAL",0,0,"311",,terminal_output +2824,3222357,"TERMINAL",0,0,"422",,terminal_output +2825,3223347,"TERMINAL",0,0,"544",,terminal_output +2826,3224409,"TERMINAL",0,0,"755",,terminal_output +2827,3225531,"TERMINAL",0,0,"866",,terminal_output +2828,3226555,"TERMINAL",0,0,"977",,terminal_output +2829,3227595,"TERMINAL",0,0,"1088",,terminal_output +2830,3228705,"TERMINAL",0,0,"199",,terminal_output +2831,3229733,"TERMINAL",0,0,"24040",,terminal_output +2832,3230685,"TERMINAL",0,0,"311",,terminal_output +2833,3231775,"TERMINAL",0,0,"422",,terminal_output +2834,3232776,"TERMINAL",0,0,"533",,terminal_output +2835,3233820,"TERMINAL",0,0,"644",,terminal_output +2836,3234875,"TERMINAL",0,0,"755",,terminal_output +2837,3235977,"TERMINAL",0,0,"866",,terminal_output +2838,3236944,"TERMINAL",0,0,"977",,terminal_output +2839,3238026,"TERMINAL",0,0,"2088",,terminal_output +2840,3239050,"TERMINAL",0,0,"199",,terminal_output +2841,3240098,"TERMINAL",0,0,"25050",,terminal_output +2842,3241202,"TERMINAL",0,0,"311",,terminal_output +2843,3242224,"TERMINAL",0,0,"422",,terminal_output +2844,3243218,"TERMINAL",0,0,"533",,terminal_output +2845,3244256,"TERMINAL",0,0,"644",,terminal_output +2846,3245292,"TERMINAL",0,0,"755",,terminal_output +2847,3246334,"TERMINAL",0,0,"866",,terminal_output +2848,3247371,"TERMINAL",0,0,"988",,terminal_output +2849,3248438,"TERMINAL",0,0,"3199",,terminal_output +2850,3249493,"TERMINAL",0,0,"23:003:00",,terminal_output +2851,3250521,"TERMINAL",0,0,"311",,terminal_output +2852,3251542,"TERMINAL",0,0,"422",,terminal_output +2853,3252702,"TERMINAL",0,0,"533",,terminal_output +2854,3253692,"TERMINAL",0,0,"644",,terminal_output +2855,3254715,"TERMINAL",0,0,"755",,terminal_output +2856,3255724,"TERMINAL",0,0,"866",,terminal_output +2857,3256777,"TERMINAL",0,0,"977",,terminal_output +2858,3257795,"TERMINAL",0,0,"4088",,terminal_output +2859,3258852,"TERMINAL",0,0,"199",,terminal_output +2860,3259922,"TERMINAL",0,0,"21010",,terminal_output +2861,3260930,"TERMINAL",0,0,"311",,terminal_output +2862,3261981,"TERMINAL",0,0,"422",,terminal_output +2863,3263078,"TERMINAL",0,0,"533",,terminal_output +2864,3264065,"TERMINAL",0,0,"644",,terminal_output +2865,3265164,"TERMINAL",0,0,"755",,terminal_output +2866,3266135,"TERMINAL",0,0,"866",,terminal_output +2867,3267181,"TERMINAL",0,0,"977",,terminal_output +2868,3268335,"TERMINAL",0,0,"5088",,terminal_output +2869,3269275,"TERMINAL",0,0,"199",,terminal_output +2870,3270355,"TERMINAL",0,0,"22020",,terminal_output +2871,3271396,"TERMINAL",0,0,"322",,terminal_output +2872,3272537,"TERMINAL",0,0,"533",,terminal_output +2873,3273478,"TERMINAL",0,0,"644",,terminal_output +2874,3274516,"TERMINAL",0,0,"755",,terminal_output +2875,3275554,"TERMINAL",0,0,"866",,terminal_output +2876,3276613,"TERMINAL",0,0,"977",,terminal_output +2877,3277674,"TERMINAL",0,0,"4:0088",,terminal_output +2878,3278680,"TERMINAL",0,0,"199",,terminal_output +2879,3279710,"TERMINAL",0,0,"23030",,terminal_output +2880,3280748,"TERMINAL",0,0,"311",,terminal_output +2881,3281787,"TERMINAL",0,0,"422",,terminal_output +2882,3282830,"TERMINAL",0,0,"533",,terminal_output +2883,3283902,"TERMINAL",0,0,"644",,terminal_output +2884,3284923,"TERMINAL",0,0,"755",,terminal_output +2885,3286054,"TERMINAL",0,0,"866",,terminal_output +2886,3287077,"TERMINAL",0,0,"977",,terminal_output +2887,3288059,"TERMINAL",0,0,"1088",,terminal_output +2888,3289124,"TERMINAL",0,0,"199",,terminal_output +2889,3290135,"TERMINAL",0,0,"24040",,terminal_output +2890,3291176,"TERMINAL",0,0,"311",,terminal_output +2891,3292287,"TERMINAL",0,0,"422",,terminal_output +2892,3293338,"TERMINAL",0,0,"533",,terminal_output +2893,3294309,"TERMINAL",0,0,"644",,terminal_output +2894,3295372,"TERMINAL",0,0,"766",,terminal_output +2895,3296505,"TERMINAL",0,0,"977",,terminal_output +2896,3297519,"TERMINAL",0,0,"2088",,terminal_output +2897,3298549,"TERMINAL",0,0,"199",,terminal_output +2898,3299566,"TERMINAL",0,0,"25050",,terminal_output +2899,3300682,"TERMINAL",0,0,"311",,terminal_output +2900,3301719,"TERMINAL",0,0,"422",,terminal_output +2901,3302768,"TERMINAL",0,0,"533",,terminal_output +2902,3303740,"TERMINAL",0,0,"644",,terminal_output +2903,3304786,"TERMINAL",0,0,"755",,terminal_output +2904,3305860,"TERMINAL",0,0,"866",,terminal_output +2905,3306912,"TERMINAL",0,0,"977",,terminal_output +2906,3307941,"TERMINAL",0,0,"3088",,terminal_output +2907,3308977,"TERMINAL",0,0,"199",,terminal_output +2908,3310029,"TERMINAL",0,0,"24:004:00",,terminal_output +2909,3311141,"TERMINAL",0,0,"311",,terminal_output +2910,3312266,"TERMINAL",0,0,"422",,terminal_output +2911,3313292,"TERMINAL",0,0,"533",,terminal_output +2912,3314312,"TERMINAL",0,0,"644",,terminal_output +2913,3315442,"TERMINAL",0,0,"766",,terminal_output +2914,3316462,"TERMINAL",0,0,"977",,terminal_output +2915,3317489,"TERMINAL",0,0,"4088",,terminal_output +2916,3318488,"TERMINAL",0,0,"199",,terminal_output +2917,3319542,"TERMINAL",0,0,"21010",,terminal_output +2918,3320579,"TERMINAL",0,0,"311",,terminal_output +2919,3321623,"TERMINAL",0,0,"422",,terminal_output +2920,3322678,"TERMINAL",0,0,"533",,terminal_output +2921,3323714,"TERMINAL",0,0,"644",,terminal_output +2922,3324775,"TERMINAL",0,0,"755",,terminal_output +2923,3325824,"TERMINAL",0,0,"866",,terminal_output +2924,3326849,"TERMINAL",0,0,"977",,terminal_output +2925,3327889,"TERMINAL",0,0,"5088",,terminal_output +2926,3328951,"TERMINAL",0,0,"199",,terminal_output +2927,3329991,"TERMINAL",0,0,"22020",,terminal_output +2928,3331074,"TERMINAL",0,0,"311",,terminal_output +2929,3332080,"TERMINAL",0,0,"422",,terminal_output +2930,3333163,"TERMINAL",0,0,"533",,terminal_output +2931,3334192,"TERMINAL",0,0,"644",,terminal_output +2932,3335308,"TERMINAL",0,0,"755",,terminal_output +2933,3336341,"TERMINAL",0,0,"866",,terminal_output +2934,3337371,"TERMINAL",0,0,"977",,terminal_output +2935,3338355,"TERMINAL",0,0,"5:0099",,terminal_output +2936,3339407,"TERMINAL",0,0,"23030",,terminal_output +2937,3340433,"TERMINAL",0,0,"311",,terminal_output +2938,3341507,"TERMINAL",0,0,"422",,terminal_output +2939,3342550,"TERMINAL",0,0,"533",,terminal_output +2940,3343621,"TERMINAL",0,0,"6CG4",,terminal_output +2941,3344626,"TERMINAL",0,0,"75",,terminal_output +2942,3345656,"TERMINAL",0,0,"86",,terminal_output +2943,3346734,"TERMINAL",0,0,"97",,terminal_output +2944,3347802,"TERMINAL",0,0,"108",,terminal_output +2945,3348825,"TERMINAL",0,0,"19",,terminal_output +2946,3349840,"TERMINAL",0,0,"240",,terminal_output +2947,3350887,"TERMINAL",0,0,"31",,terminal_output +2948,3351932,"TERMINAL",0,0,"42",,terminal_output +2949,3353020,"TERMINAL",0,0,"53",,terminal_output +2950,3354014,"TERMINAL",0,0,"\r6500 R44633",,terminal_output +2951,3355063,"TERMINAL",0,0,"75",,terminal_output +2952,3356188,"TERMINAL",0,0,"86",,terminal_output +2953,3357333,"TERMINAL",0,0,"97",,terminal_output +2954,3358300,"TERMINAL",0,0,"208",,terminal_output +2955,3359268,"TERMINAL",0,0,"19",,terminal_output +2956,3360291,"TERMINAL",0,0,"250",,terminal_output +2957,3361338,"TERMINAL",0,0,"32",,terminal_output +2958,3362388,"TERMINAL",0,0,"53",,terminal_output +2959,3363427,"TERMINAL",0,0,"64",,terminal_output +2960,3364475,"TERMINAL",0,0,"75",,terminal_output +2961,3365543,"TERMINAL",0,0,"86",,terminal_output +2962,3366594,"TERMINAL",0,0,"97",,terminal_output +2963,3367671,"TERMINAL",0,0,"308",,terminal_output +2964,3368705,"TERMINAL",0,0,"19",,terminal_output +2965,3369813,"TERMINAL",0,0,"25:00",,terminal_output +2966,3370759,"TERMINAL",0,0,"31",,terminal_output +2967,3371806,"TERMINAL",0,0,"42",,terminal_output +2968,3372852,"TERMINAL",0,0,"53",,terminal_output +2969,3373905,"TERMINAL",0,0,"64",,terminal_output +2970,3375045,"TERMINAL",0,0,"75",,terminal_output +2971,3375998,"TERMINAL",0,0,"86",,terminal_output +2972,3377045,"TERMINAL",0,0,"97",,terminal_output +2973,3378087,"TERMINAL",0,0,"408",,terminal_output +2974,3379132,"TERMINAL",0,0,"19",,terminal_output +2975,3380180,"TERMINAL",0,0,"210",,terminal_output +2976,3381257,"TERMINAL",0,0,"31",,terminal_output +2977,3382268,"TERMINAL",0,0,"42",,terminal_output +2978,3383350,"TERMINAL",0,0,"53",,terminal_output +2979,3384448,"TERMINAL",0,0,"65",,terminal_output +2980,3385436,"TERMINAL",0,0,"86",,terminal_output +2981,3386511,"TERMINAL",0,0,"97",,terminal_output +2982,3387518,"TERMINAL",0,0,"508",,terminal_output +2983,3388563,"TERMINAL",0,0,"19",,terminal_output +2984,3389689,"TERMINAL",0,0,"220",,terminal_output +2985,3390671,"TERMINAL",0,0,"31",,terminal_output +2986,3391729,"TERMINAL",0,0,"42",,terminal_output +2987,3392756,"TERMINAL",0,0,"53",,terminal_output +2988,3393802,"TERMINAL",0,0,"64",,terminal_output +2989,3394868,"TERMINAL",0,0,"75",,terminal_output +2990,3395863,"TERMINAL",0,0,"86",,terminal_output +2991,3396932,"TERMINAL",0,0,"97",,terminal_output +2992,3397947,"TERMINAL",0,0,"6:008",,terminal_output +2993,3398989,"TERMINAL",0,0,"19",,terminal_output +2994,3400032,"TERMINAL",0,0,"230",,terminal_output +2995,3401154,"TERMINAL",0,0,"31",,terminal_output +2996,3402174,"TERMINAL",0,0,"42",,terminal_output +2997,3403197,"TERMINAL",0,0,"53",,terminal_output +2998,3404219,"TERMINAL",0,0,"64",,terminal_output +2999,3405234,"TERMINAL",0,0,"75",,terminal_output +3000,3406274,"TERMINAL",0,0,"86",,terminal_output +3001,3407313,"TERMINAL",0,0,"97",,terminal_output +3002,3408377,"TERMINAL",0,0,"109",,terminal_output +3003,3409395,"TERMINAL",0,0,"240",,terminal_output +3004,3410465,"TERMINAL",0,0,"31",,terminal_output +3005,3411475,"TERMINAL",0,0,"4CG2",,terminal_output +3006,3412516,"TERMINAL",0,0,"5",,terminal_output +3007,3413645,"TERMINAL",0,0,"6",,terminal_output +3008,3414594,"TERMINAL",0,0,"7",,terminal_output +3009,3415663,"TERMINAL",0,0,"8",,terminal_output +3010,3416750,"TERMINAL",0,0,"9",,terminal_output +3011,3417768,"TERMINAL",0,0,"20",,terminal_output +3012,3418761,"TERMINAL",0,0,"1",,terminal_output +3013,3419798,"TERMINAL",0,0,"2",,terminal_output +3014,3420837,"TERMINAL",0,0,"3",,terminal_output +3015,3421917,"TERMINAL",0,0,"4",,terminal_output +3016,3422929,"TERMINAL",0,0,"\r5",,terminal_output +3017,3423963,"TERMINAL",0,0,"6",,terminal_output +3018,3425010,"TERMINAL",0,0,"7",,terminal_output +3019,3426060,"TERMINAL",0,0,"8",,terminal_output +3020,3427162,"TERMINAL",0,0,"9",,terminal_output +3021,3428191,"TERMINAL",0,0,"30",,terminal_output +3022,3429207,"TERMINAL",0,0,"1",,terminal_output +3023,3430231,"TERMINAL",0,0,"2",,terminal_output +3024,3431265,"TERMINAL",0,0,"3",,terminal_output +3025,3432388,"TERMINAL",0,0,"4",,terminal_output +3026,3433441,"TERMINAL",0,0,"6",,terminal_output +3027,3434535,"TERMINAL",0,0,"7",,terminal_output +3028,3435555,"TERMINAL",0,0,"8",,terminal_output +3029,3436598,"TERMINAL",0,0,"9",,terminal_output +3030,3437790,"TERMINAL",0,0,"40",,terminal_output +3031,3438629,"TERMINAL",0,0,"1",,terminal_output +3032,3439642,"TERMINAL",0,0,"2",,terminal_output +3033,3440699,"TERMINAL",0,0,"3",,terminal_output +3034,3441758,"TERMINAL",0,0,"4",,terminal_output +3035,3442395,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",0,0,"",shellscript,tab +3036,3443120,"TERMINAL",0,0,"5",,terminal_output +3037,3443812,"TERMINAL",0,0,"6",,terminal_output +3038,3444895,"TERMINAL",0,0,"7",,terminal_output +3039,3445898,"TERMINAL",0,0,"8",,terminal_output +3040,3446679,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=1\n#SBATCH --time=02:00:00\n#SBATCH --partition=accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:1\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/dynamics/maskgit/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/dynamics/maskgit/%x_%j.log\n#SBATCH --job-name=train_dynamics_maskgit_flash_attn_ablation_bs110\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/train\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_tokenizer_default/3528955\n\nenv | grep SLURM\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=110 \\n --patch_size=16 \\n --warmup_steps=0 \\n --wsd_decay_steps=0 \\n --num_steps=1000 \\n --log_image_interval=10000 \\n --log \\n --no-use-flash-attention \\n --log_checkpoint_interval=10000 \\n --name=coinrun-dynamics-maskgit-flash-attn-ablation-bs110-$slurm_job_id \\n --tags coinrun dynamics maskgit flash-attn-ablation bs110 \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir_train &\n\nchild_pid=$!\n\nwait $child_pid",shellscript,tab +3041,3447313,"TERMINAL",0,0,"9",,terminal_output +3042,3447999,"TERMINAL",0,0,"50",,terminal_output +3043,3449280,"TERMINAL",0,0,"1",,terminal_output +3044,3449711,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",0,0,"",shellscript,tab +3045,3450243,"TERMINAL",0,0,"2",,terminal_output +3046,3451163,"TERMINAL",0,0,"3",,terminal_output +3047,3452211,"TERMINAL",0,0,"4",,terminal_output +3048,3453273,"TERMINAL",0,0,"5",,terminal_output +3049,3454260,"TERMINAL",0,0,"6",,terminal_output +3050,3455306,"TERMINAL",0,0,"7",,terminal_output +3051,3456342,"TERMINAL",0,0,"8",,terminal_output +3052,3457381,"TERMINAL",0,0,"7:00",,terminal_output +3053,3458423,"TERMINAL",0,0,"1",,terminal_output +3054,3459475,"TERMINAL",0,0,"2",,terminal_output +3055,3460576,"TERMINAL",0,0,"3",,terminal_output +3056,3461567,"TERMINAL",0,0,"4",,terminal_output +3057,3462627,"TERMINAL",0,0,"5",,terminal_output +3058,3463719,"TERMINAL",0,0,"6",,terminal_output +3059,3464742,"TERMINAL",0,0,"7",,terminal_output +3060,3465709,"TERMINAL",0,0,"8",,terminal_output +3061,3466787,"TERMINAL",0,0,"9",,terminal_output +3062,3467812,"TERMINAL",0,0,"10",,terminal_output +3063,3468830,"TERMINAL",0,0,"1",,terminal_output +3064,3469867,"TERMINAL",0,0,"2",,terminal_output +3065,3470924,"TERMINAL",0,0,"3",,terminal_output +3066,3471950,"TERMINAL",0,0,"4",,terminal_output +3067,3472991,"TERMINAL",0,0,"5",,terminal_output +3068,3474026,"TERMINAL",0,0,"6",,terminal_output +3069,3475077,"TERMINAL",0,0,"7",,terminal_output +3070,3476213,"TERMINAL",0,0,"8",,terminal_output +3071,3477233,"TERMINAL",0,0,"9",,terminal_output +3072,3478275,"TERMINAL",0,0,"20",,terminal_output +3073,3479279,"TERMINAL",0,0,"1",,terminal_output +3074,3480271,"TERMINAL",0,0,"2",,terminal_output +3075,3481321,"TERMINAL",0,0,"3",,terminal_output +3076,3482357,"TERMINAL",0,0,"4",,terminal_output +3077,3483417,"TERMINAL",0,0,"6",,terminal_output +3078,3484447,"TERMINAL",0,0,"7",,terminal_output +3079,3485505,"TERMINAL",0,0,"8",,terminal_output +3080,3486554,"TERMINAL",0,0,"9",,terminal_output +3081,3487576,"TERMINAL",0,0,"30",,terminal_output +3082,3488715,"TERMINAL",0,0,"1",,terminal_output +3083,3489726,"TERMINAL",0,0,"2",,terminal_output +3084,3490688,"TERMINAL",0,0,"3",,terminal_output +3085,3491773,"TERMINAL",0,0,"4",,terminal_output +3086,3492797,"TERMINAL",0,0,"5",,terminal_output +3087,3493813,"TERMINAL",0,0,"6",,terminal_output +3088,3494862,"TERMINAL",0,0,"7",,terminal_output +3089,3495898,"TERMINAL",0,0,"8",,terminal_output +3090,3496922,"TERMINAL",0,0,"9",,terminal_output +3091,3497972,"TERMINAL",0,0,"40",,terminal_output +3092,3499017,"TERMINAL",0,0,"1",,terminal_output +3093,3500056,"TERMINAL",0,0,"2",,terminal_output +3094,3501101,"TERMINAL",0,0,"3",,terminal_output +3095,3502135,"TERMINAL",0,0,"4",,terminal_output +3096,3503193,"TERMINAL",0,0,"5",,terminal_output +3097,3504224,"TERMINAL",0,0,"6",,terminal_output +3098,3505292,"TERMINAL",0,0,"7",,terminal_output +3099,3506315,"TERMINAL",0,0,"8",,terminal_output +3100,3507351,"TERMINAL",0,0,"9",,terminal_output +3101,3508391,"TERMINAL",0,0,"51",,terminal_output +3102,3509429,"TERMINAL",0,0,"2",,terminal_output +3103,3510465,"TERMINAL",0,0,"3",,terminal_output +3104,3511506,"TERMINAL",0,0,"4",,terminal_output +3105,3512546,"TERMINAL",0,0,"5",,terminal_output +3106,3513619,"TERMINAL",0,0,"6",,terminal_output +3107,3514702,"TERMINAL",0,0,"7",,terminal_output +3108,3515697,"TERMINAL",0,0,"8",,terminal_output +3109,3516777,"TERMINAL",0,0,"9",,terminal_output +3110,3517806,"TERMINAL",0,0,"8:00",,terminal_output +3111,3518814,"TERMINAL",0,0,"1",,terminal_output +3112,3519853,"TERMINAL",0,0,"2",,terminal_output +3113,3520890,"TERMINAL",0,0,"3",,terminal_output +3114,3521935,"TERMINAL",0,0,"4",,terminal_output +3115,3522973,"TERMINAL",0,0,"5",,terminal_output +3116,3524010,"TERMINAL",0,0,"6",,terminal_output +3117,3525071,"TERMINAL",0,0,"7",,terminal_output +3118,3526106,"TERMINAL",0,0,"8",,terminal_output +3119,3527138,"TERMINAL",0,0,"9",,terminal_output +3120,3528168,"TERMINAL",0,0,"10",,terminal_output +3121,3529278,"TERMINAL",0,0,"1",,terminal_output +3122,3530260,"TERMINAL",0,0,"2",,terminal_output +3123,3531303,"TERMINAL",0,0,"3",,terminal_output +3124,3532336,"TERMINAL",0,0,"4",,terminal_output +3125,3533386,"TERMINAL",0,0,"6",,terminal_output +3126,3534474,"TERMINAL",0,0,"7",,terminal_output +3127,3535472,"TERMINAL",0,0,"8",,terminal_output +3128,3536526,"TERMINAL",0,0,"9",,terminal_output +3129,3537559,"TERMINAL",0,0,"20",,terminal_output +3130,3538622,"TERMINAL",0,0,"1",,terminal_output +3131,3539636,"TERMINAL",0,0,"2",,terminal_output +3132,3540676,"TERMINAL",0,0,"3",,terminal_output +3133,3541764,"TERMINAL",0,0,"4",,terminal_output +3134,3542787,"TERMINAL",0,0,"5",,terminal_output +3135,3543897,"TERMINAL",0,0,"6",,terminal_output +3136,3544842,"TERMINAL",0,0,"7",,terminal_output +3137,3545886,"TERMINAL",0,0,"8",,terminal_output +3138,3546923,"TERMINAL",0,0,"9",,terminal_output +3139,3547966,"TERMINAL",0,0,"30",,terminal_output +3140,3549004,"TERMINAL",0,0,"1",,terminal_output +3141,3550045,"TERMINAL",0,0,"2",,terminal_output +3142,3551085,"TERMINAL",0,0,"3",,terminal_output +3143,3552158,"TERMINAL",0,0,"4",,terminal_output +3144,3553309,"TERMINAL",0,0,"5",,terminal_output +3145,3554349,"TERMINAL",0,0,"6",,terminal_output +3146,3555411,"TERMINAL",0,0,"8",,terminal_output +3147,3556444,"TERMINAL",0,0,"9",,terminal_output +3148,3557515,"TERMINAL",0,0,"40",,terminal_output +3149,3558577,"TERMINAL",0,0,"1",,terminal_output +3150,3559564,"TERMINAL",0,0,"2",,terminal_output +3151,3560581,"TERMINAL",0,0,"3",,terminal_output +3152,3561712,"TERMINAL",0,0,"4",,terminal_output +3153,3562669,"TERMINAL",0,0,"5",,terminal_output +3154,3563759,"TERMINAL",0,0,"6",,terminal_output +3155,3564800,"TERMINAL",0,0,"7",,terminal_output +3156,3565810,"TERMINAL",0,0,"8",,terminal_output +3157,3566850,"TERMINAL",0,0,"9",,terminal_output +3158,3567863,"TERMINAL",0,0,"50",,terminal_output +3159,3568899,"TERMINAL",0,0,"1",,terminal_output +3160,3569989,"TERMINAL",0,0,"2",,terminal_output +3161,3571047,"TERMINAL",0,0,"3",,terminal_output +3162,3572023,"TERMINAL",0,0,"4",,terminal_output +3163,3573080,"TERMINAL",0,0,"5",,terminal_output +3164,3574100,"TERMINAL",0,0,"6",,terminal_output +3165,3575236,"TERMINAL",0,0,"7",,terminal_output +3166,3576172,"TERMINAL",0,0,"8",,terminal_output +3167,3577208,"TERMINAL",0,0,"9",,terminal_output +3168,3578279,"TERMINAL",0,0,"9:00",,terminal_output +3169,3579318,"TERMINAL",0,0,"1",,terminal_output +3170,3580426,"TERMINAL",0,0,"2",,terminal_output +3171,3581502,"TERMINAL",0,0,"4",,terminal_output +3172,3582445,"TERMINAL",0,0,"5",,terminal_output +3173,3583511,"TERMINAL",0,0,"6",,terminal_output +3174,3584553,"TERMINAL",0,0,"7",,terminal_output +3175,3585578,"TERMINAL",0,0,"8",,terminal_output +3176,3586699,"TERMINAL",0,0,"9",,terminal_output +3177,3587725,"TERMINAL",0,0,"10",,terminal_output +3178,3588747,"TERMINAL",0,0,"1",,terminal_output +3179,3589769,"TERMINAL",0,0,"2",,terminal_output +3180,3590814,"TERMINAL",0,0,"3",,terminal_output +3181,3591824,"TERMINAL",0,0,"4",,terminal_output +3182,3592852,"TERMINAL",0,0,"5",,terminal_output +3183,3593893,"TERMINAL",0,0,"6",,terminal_output +3184,3594952,"TERMINAL",0,0,"7",,terminal_output +3185,3595974,"TERMINAL",0,0,"8",,terminal_output +3186,3597018,"TERMINAL",0,0,"9",,terminal_output +3187,3598055,"TERMINAL",0,0,"20",,terminal_output +3188,3599191,"TERMINAL",0,0,"1",,terminal_output +3189,3600134,"TERMINAL",0,0,"2",,terminal_output +3190,3601258,"TERMINAL",0,0,"3",,terminal_output +3191,3602220,"TERMINAL",0,0,"4",,terminal_output +3192,3603264,"TERMINAL",0,0,"5",,terminal_output +3193,3604300,"TERMINAL",0,0,"6",,terminal_output +3194,3605445,"TERMINAL",0,0,"7",,terminal_output +3195,3606378,"TERMINAL",0,0,"9",,terminal_output +3196,3607419,"TERMINAL",0,0,"30",,terminal_output +3197,3608458,"TERMINAL",0,0,"1",,terminal_output +3198,3609514,"TERMINAL",0,0,"2",,terminal_output +3199,3610562,"TERMINAL",0,0,"3",,terminal_output +3200,3611664,"TERMINAL",0,0,"4",,terminal_output +3201,3612733,"TERMINAL",0,0,"5",,terminal_output +3202,3613671,"TERMINAL",0,0,"6",,terminal_output +3203,3614917,"TERMINAL",0,0,"7",,terminal_output +3204,3615756,"TERMINAL",0,0,"8",,terminal_output +3205,3616811,"TERMINAL",0,0,"9",,terminal_output +3206,3618155,"TERMINAL",0,0,"40",,terminal_output +3207,3618895,"TERMINAL",0,0,"1",,terminal_output +3208,3619943,"TERMINAL",0,0,"2",,terminal_output +3209,3620983,"TERMINAL",0,0,"3",,terminal_output +3210,3625899,"TERMINAL",0,0,"4",,terminal_output +3211,3626014,"TERMINAL",0,0,"567",,terminal_output +3212,3626220,"TERMINAL",0,0,"8",,terminal_output +3213,3627263,"TERMINAL",0,0,"9",,terminal_output +3214,3628311,"TERMINAL",0,0,"50",,terminal_output +3215,3629366,"TERMINAL",0,0,"1",,terminal_output +3216,3630437,"TERMINAL",0,0,"3",,terminal_output +3217,3631459,"TERMINAL",0,0,"4",,terminal_output +3218,3632520,"TERMINAL",0,0,"5",,terminal_output +3219,3633550,"TERMINAL",0,0,"6",,terminal_output +3220,3634591,"TERMINAL",0,0,"7",,terminal_output +3221,3635644,"TERMINAL",0,0,"8",,terminal_output +3222,3636687,"TERMINAL",0,0,"9",,terminal_output +3223,3637734,"TERMINAL",0,0,"50:00",,terminal_output +3224,3638789,"TERMINAL",0,0,"1",,terminal_output +3225,3639900,"TERMINAL",0,0,"2",,terminal_output +3226,3640874,"TERMINAL",0,0,"3",,terminal_output +3227,3641929,"TERMINAL",0,0,"4",,terminal_output +3228,3642970,"TERMINAL",0,0,"5",,terminal_output +3229,3644021,"TERMINAL",0,0,"6",,terminal_output +3230,3645096,"TERMINAL",0,0,"7",,terminal_output +3231,3646116,"TERMINAL",0,0,"8",,terminal_output +3232,3647162,"TERMINAL",0,0,"9",,terminal_output +3233,3648344,"TERMINAL",0,0,"10",,terminal_output +3234,3649274,"TERMINAL",0,0,"1",,terminal_output +3235,3650397,"TERMINAL",0,0,"2",,terminal_output +3236,3651344,"TERMINAL",0,0,"3",,terminal_output +3237,3652446,"TERMINAL",0,0,"5",,terminal_output +3238,3653469,"TERMINAL",0,0,"6",,terminal_output +3239,3654491,"TERMINAL",0,0,"7",,terminal_output +3240,3655530,"TERMINAL",0,0,"8",,terminal_output +3241,3656576,"TERMINAL",0,0,"9",,terminal_output +3242,3657662,"TERMINAL",0,0,"20",,terminal_output +3243,3658891,"TERMINAL",0,0,"1",,terminal_output +3244,3659849,"TERMINAL",0,0,"2",,terminal_output +3245,3660765,"TERMINAL",0,0,"3",,terminal_output +3246,3661840,"TERMINAL",0,0,"4",,terminal_output +3247,3662855,"TERMINAL",0,0,"5",,terminal_output +3248,3663911,"TERMINAL",0,0,"6",,terminal_output +3249,3664957,"TERMINAL",0,0,"7",,terminal_output +3250,3665997,"TERMINAL",0,0,"8",,terminal_output +3251,3667279,"TERMINAL",0,0,"9",,terminal_output +3252,3668114,"TERMINAL",0,0,"30",,terminal_output +3253,3669242,"TERMINAL",0,0,"1",,terminal_output +3254,3670202,"TERMINAL",0,0,"2",,terminal_output +3255,3671256,"TERMINAL",0,0,"3",,terminal_output +3256,3672417,"TERMINAL",0,0,"4",,terminal_output +3257,3673440,"TERMINAL",0,0,"5",,terminal_output +3258,3674702,"TERMINAL",0,0,"7",,terminal_output +3259,3675504,"TERMINAL",0,0,"8",,terminal_output +3260,3676547,"TERMINAL",0,0,"9",,terminal_output +3261,3677591,"TERMINAL",0,0,"40",,terminal_output +3262,3678640,"TERMINAL",0,0,"1",,terminal_output +3263,3679695,"TERMINAL",0,0,"2",,terminal_output +3264,3680731,"TERMINAL",0,0,"3",,terminal_output +3265,3681776,"TERMINAL",0,0,"4",,terminal_output +3266,3682860,"TERMINAL",0,0,"5",,terminal_output +3267,3683882,"TERMINAL",0,0,"6",,terminal_output +3268,3684914,"TERMINAL",0,0,"7",,terminal_output +3269,3685966,"TERMINAL",0,0,"8",,terminal_output +3270,3687011,"TERMINAL",0,0,"9",,terminal_output +3271,3688068,"TERMINAL",0,0,"50",,terminal_output +3272,3689116,"TERMINAL",0,0,"1",,terminal_output +3273,3690263,"TERMINAL",0,0,"2",,terminal_output +3274,3691331,"TERMINAL",0,0,"3",,terminal_output +3275,3692947,"TERMINAL",0,0,"4",,terminal_output +3276,3693321,"TERMINAL",0,0,"5",,terminal_output +3277,3694375,"TERMINAL",0,0,"6",,terminal_output +3278,3696060,"TERMINAL",0,0,"8",,terminal_output +3279,3696465,"TERMINAL",0,0,"9",,terminal_output +3280,3697512,"TERMINAL",0,0,"1:00",,terminal_output +3281,3698565,"TERMINAL",0,0,"1",,terminal_output +3282,3699597,"TERMINAL",0,0,"2",,terminal_output +3283,3700653,"TERMINAL",0,0,"3",,terminal_output +3284,3701686,"TERMINAL",0,0,"4",,terminal_output +3285,3702761,"TERMINAL",0,0,"5",,terminal_output +3286,3703782,"TERMINAL",0,0,"6",,terminal_output +3287,3704826,"TERMINAL",0,0,"7",,terminal_output +3288,3705870,"TERMINAL",0,0,"8",,terminal_output +3289,3706924,"TERMINAL",0,0,"9",,terminal_output +3290,3707957,"TERMINAL",0,0,"10",,terminal_output +3291,3709004,"TERMINAL",0,0,"1",,terminal_output +3292,3710044,"TERMINAL",0,0,"2",,terminal_output +3293,3711228,"TERMINAL",0,0,"3",,terminal_output +3294,3712122,"TERMINAL",0,0,"4",,terminal_output +3295,3713174,"TERMINAL",0,0,"5",,terminal_output +3296,3714221,"TERMINAL",0,0,"6",,terminal_output +3297,3715304,"TERMINAL",0,0,"7",,terminal_output +3298,3716305,"TERMINAL",0,0,"8",,terminal_output +3299,3717369,"TERMINAL",0,0,"9",,terminal_output +3300,3718406,"TERMINAL",0,0,"21",,terminal_output +3301,3719530,"TERMINAL",0,0,"2",,terminal_output +3302,3720542,"TERMINAL",0,0,"3",,terminal_output +3303,3721684,"TERMINAL",0,0,"4",,terminal_output +3304,3722794,"TERMINAL",0,0,"5",,terminal_output +3305,3723820,"TERMINAL",0,0,"6",,terminal_output +3306,3724876,"TERMINAL",0,0,"7",,terminal_output +3307,3725881,"TERMINAL",0,0,"8",,terminal_output +3308,3726922,"TERMINAL",0,0,"9",,terminal_output +3309,3727940,"TERMINAL",0,0,"30",,terminal_output +3310,3729012,"TERMINAL",0,0,"1",,terminal_output +3311,3730239,"TERMINAL",0,0,"2",,terminal_output +3312,3731062,"TERMINAL",0,0,"3",,terminal_output +3313,3732110,"TERMINAL",0,0,"4",,terminal_output +3314,3733247,"TERMINAL",0,0,"5",,terminal_output +3315,3734203,"TERMINAL",0,0,"6",,terminal_output +3316,3735347,"TERMINAL",0,0,"7",,terminal_output +3317,3736386,"TERMINAL",0,0,"8",,terminal_output +3318,3738417,"TERMINAL",0,0,"940",,terminal_output +3319,3739413,"TERMINAL",0,0,"2",,terminal_output +3320,3740471,"TERMINAL",0,0,"3",,terminal_output +3321,3741481,"TERMINAL",0,0,"4",,terminal_output +3322,3743529,"TERMINAL",0,0,"5",,terminal_output +3323,3743581,"TERMINAL",0,0,"6",,terminal_output +3324,3744631,"TERMINAL",0,0,"7",,terminal_output +3325,3745647,"TERMINAL",0,0,"8",,terminal_output +3326,3746681,"TERMINAL",0,0,"9",,terminal_output +3327,3747715,"TERMINAL",0,0,"50",,terminal_output +3328,3748756,"TERMINAL",0,0,"1",,terminal_output +3329,3749793,"TERMINAL",0,0,"2",,terminal_output +3330,3750834,"TERMINAL",0,0,"3",,terminal_output +3331,3751870,"TERMINAL",0,0,"4",,terminal_output +3332,3752910,"TERMINAL",0,0,"5",,terminal_output +3333,3753947,"TERMINAL",0,0,"6",,terminal_output +3334,3755014,"TERMINAL",0,0,"7",,terminal_output +3335,3756088,"TERMINAL",0,0,"8",,terminal_output +3336,3757130,"TERMINAL",0,0,"9",,terminal_output +3337,3758136,"TERMINAL",0,0,"2:00",,terminal_output +3338,3759190,"TERMINAL",0,0,"1",,terminal_output +3339,3760213,"TERMINAL",0,0,"2",,terminal_output +3340,3761298,"TERMINAL",0,0,"3",,terminal_output +3341,3762378,"TERMINAL",0,0,"4",,terminal_output +3342,3763347,"TERMINAL",0,0,"5",,terminal_output +3343,3764374,"TERMINAL",0,0,"6",,terminal_output +3344,3765409,"TERMINAL",0,0,"8",,terminal_output +3345,3766462,"TERMINAL",0,0,"9",,terminal_output +3346,3767544,"TERMINAL",0,0,"10",,terminal_output +3347,3768735,"TERMINAL",0,0,"1",,terminal_output +3348,3769595,"TERMINAL",0,0,"2",,terminal_output +3349,3770616,"TERMINAL",0,0,"3",,terminal_output +3350,3771671,"TERMINAL",0,0,"4",,terminal_output +3351,3772704,"TERMINAL",0,0,"5",,terminal_output +3352,3773745,"TERMINAL",0,0,"6",,terminal_output +3353,3774836,"TERMINAL",0,0,"7",,terminal_output +3354,3775841,"TERMINAL",0,0,"8",,terminal_output +3355,3776867,"TERMINAL",0,0,"9",,terminal_output +3356,3778019,"TERMINAL",0,0,"20",,terminal_output +3357,3779018,"TERMINAL",0,0,"1",,terminal_output +3358,3779981,"TERMINAL",0,0,"2",,terminal_output +3359,3781037,"TERMINAL",0,0,"3",,terminal_output +3360,3782059,"TERMINAL",0,0,"4",,terminal_output +3361,3783095,"TERMINAL",0,0,"5",,terminal_output +3362,3784148,"TERMINAL",0,0,"6",,terminal_output +3363,3785257,"TERMINAL",0,0,"7",,terminal_output +3364,3786338,"TERMINAL",0,0,"8",,terminal_output +3365,3787257,"TERMINAL",0,0,"9",,terminal_output +3366,3788342,"TERMINAL",0,0,"30",,terminal_output +3367,3789583,"TERMINAL",0,0,"1",,terminal_output +3368,3790437,"TERMINAL",0,0,"2",,terminal_output +3369,3791415,"TERMINAL",0,0,"4",,terminal_output +3370,3792743,"TERMINAL",0,0,"5",,terminal_output +3371,3793559,"TERMINAL",0,0,"6",,terminal_output +3372,3794621,"TERMINAL",0,0,"7",,terminal_output +3373,3795630,"TERMINAL",0,0,"8",,terminal_output +3374,3796670,"TERMINAL",0,0,"9",,terminal_output +3375,3797734,"TERMINAL",0,0,"40",,terminal_output +3376,3798799,"TERMINAL",0,0,"1",,terminal_output +3377,3799798,"TERMINAL",0,0,"2",,terminal_output +3378,3800829,"TERMINAL",0,0,"3",,terminal_output +3379,3801876,"TERMINAL",0,0,"4",,terminal_output +3380,3802976,"TERMINAL",0,0,"5",,terminal_output +3381,3804006,"TERMINAL",0,0,"6",,terminal_output +3382,3805000,"TERMINAL",0,0,"7",,terminal_output +3383,3806043,"TERMINAL",0,0,"8",,terminal_output +3384,3807275,"TERMINAL",0,0,"9",,terminal_output +3385,3808134,"TERMINAL",0,0,"50",,terminal_output +3386,3809184,"TERMINAL",0,0,"1",,terminal_output +3387,3810221,"TERMINAL",0,0,"2",,terminal_output +3388,3811263,"TERMINAL",0,0,"3",,terminal_output +3389,3812294,"TERMINAL",0,0,"4",,terminal_output +3390,3813403,"TERMINAL",0,0,"5",,terminal_output +3391,3814383,"TERMINAL",0,0,"7",,terminal_output +3392,3815559,"TERMINAL",0,0,"8",,terminal_output +3393,3816458,"TERMINAL",0,0,"9",,terminal_output +3394,3817509,"TERMINAL",0,0,"3:00",,terminal_output +3395,3818642,"TERMINAL",0,0,"1",,terminal_output +3396,3819671,"TERMINAL",0,0,"2",,terminal_output +3397,3820659,"TERMINAL",0,0,"3",,terminal_output +3398,3821712,"TERMINAL",0,0,"4",,terminal_output +3399,3822853,"TERMINAL",0,0,"5",,terminal_output +3400,3823961,"TERMINAL",0,0,"6",,terminal_output +3401,3824822,"TERMINAL",0,0,"7",,terminal_output +3402,3825865,"TERMINAL",0,0,"8",,terminal_output +3403,3826922,"TERMINAL",0,0,"9",,terminal_output +3404,3827963,"TERMINAL",0,0,"10",,terminal_output +3405,3828991,"TERMINAL",0,0,"1",,terminal_output +3406,3830027,"TERMINAL",0,0,"2",,terminal_output +3407,3831063,"TERMINAL",0,0,"3",,terminal_output +3408,3832165,"TERMINAL",0,0,"4",,terminal_output +3409,3833144,"TERMINAL",0,0,"5",,terminal_output +3410,3834210,"TERMINAL",0,0,"6",,terminal_output +3411,3835227,"TERMINAL",0,0,"7",,terminal_output +3412,3836643,"TERMINAL",0,0,"8",,terminal_output +3413,3837297,"TERMINAL",0,0,"9",,terminal_output +3414,3838357,"TERMINAL",0,0,"20",,terminal_output +3415,3839417,"TERMINAL",0,0,"2",,terminal_output +3416,3840429,"TERMINAL",0,0,"3",,terminal_output +3417,3841531,"TERMINAL",0,0,"4",,terminal_output +3418,3842602,"TERMINAL",0,0,"5",,terminal_output +3419,3843668,"TERMINAL",0,0,"6",,terminal_output +3420,3844990,"TERMINAL",0,0,"7",,terminal_output +3421,3845654,"TERMINAL",0,0,"8",,terminal_output +3422,3846669,"TERMINAL",0,0,"9",,terminal_output +3423,3847703,"TERMINAL",0,0,"30",,terminal_output +3424,3848754,"TERMINAL",0,0,"1",,terminal_output +3425,3849824,"TERMINAL",0,0,"2",,terminal_output +3426,3850836,"TERMINAL",0,0,"3",,terminal_output +3427,3851873,"TERMINAL",0,0,"4",,terminal_output +3428,3852916,"TERMINAL",0,0,"5",,terminal_output +3429,3853970,"TERMINAL",0,0,"6",,terminal_output +3430,3855037,"TERMINAL",0,0,"7",,terminal_output +3431,3856091,"TERMINAL",0,0,"8",,terminal_output +3432,3857074,"TERMINAL",0,0,"9",,terminal_output +3433,3858111,"TERMINAL",0,0,"40",,terminal_output +3434,3859157,"TERMINAL",0,0,"1",,terminal_output +3435,3860209,"TERMINAL",0,0,"2",,terminal_output +3436,3861248,"TERMINAL",0,0,"3",,terminal_output +3437,3862268,"TERMINAL",0,0,"4",,terminal_output +3438,3863396,"TERMINAL",0,0,"5",,terminal_output +3439,3864351,"TERMINAL",0,0,"6",,terminal_output +3440,3865393,"TERMINAL",0,0,"8",,terminal_output +3441,3866447,"TERMINAL",0,0,"9",,terminal_output +3442,3867466,"TERMINAL",0,0,"50",,terminal_output +3443,3868517,"TERMINAL",0,0,"1",,terminal_output +3444,3869543,"TERMINAL",0,0,"2",,terminal_output +3445,3870651,"TERMINAL",0,0,"3",,terminal_output +3446,3871775,"TERMINAL",0,0,"4",,terminal_output +3447,3872676,"TERMINAL",0,0,"5",,terminal_output +3448,3873737,"TERMINAL",0,0,"6",,terminal_output +3449,3874759,"TERMINAL",0,0,"7",,terminal_output +3450,3875806,"TERMINAL",0,0,"8",,terminal_output +3451,3876840,"TERMINAL",0,0,"9",,terminal_output +3452,3877958,"TERMINAL",0,0,"4:00",,terminal_output +3453,3879287,"TERMINAL",0,0,"1",,terminal_output +3454,3879959,"TERMINAL",0,0,"2",,terminal_output +3455,3881012,"TERMINAL",0,0,"3",,terminal_output +3456,3882037,"TERMINAL",0,0,"4",,terminal_output +3457,3883084,"TERMINAL",0,0,"5",,terminal_output +3458,3884120,"TERMINAL",0,0,"6",,terminal_output +3459,3885213,"TERMINAL",0,0,"7",,terminal_output +3460,3886286,"TERMINAL",0,0,"8",,terminal_output +3461,3887315,"TERMINAL",0,0,"9",,terminal_output +3462,3888383,"TERMINAL",0,0,"10",,terminal_output +3463,3889359,"TERMINAL",0,0,"1",,terminal_output +3464,3890356,"TERMINAL",0,0,"2",,terminal_output +3465,3891396,"TERMINAL",0,0,"4",,terminal_output +3466,3892474,"TERMINAL",0,0,"5",,terminal_output +3467,3893497,"TERMINAL",0,0,"6",,terminal_output +3468,3894529,"TERMINAL",0,0,"7",,terminal_output +3469,3895570,"TERMINAL",0,0,"8",,terminal_output +3470,3896643,"TERMINAL",0,0,"9",,terminal_output +3471,3897646,"TERMINAL",0,0,"20",,terminal_output +3472,3898728,"TERMINAL",0,0,"1",,terminal_output +3473,3899737,"TERMINAL",0,0,"2",,terminal_output +3474,3900766,"TERMINAL",0,0,"3",,terminal_output +3475,3901805,"TERMINAL",0,0,"4",,terminal_output +3476,3902850,"TERMINAL",0,0,"5",,terminal_output +3477,3903946,"TERMINAL",0,0,"6",,terminal_output +3478,3904925,"TERMINAL",0,0,"7",,terminal_output +3479,3905989,"TERMINAL",0,0,"8",,terminal_output +3480,3906997,"TERMINAL",0,0,"9",,terminal_output +3481,3908046,"TERMINAL",0,0,"30",,terminal_output +3482,3909082,"TERMINAL",0,0,"1",,terminal_output +3483,3910139,"TERMINAL",0,0,"2",,terminal_output +3484,3911181,"TERMINAL",0,0,"3",,terminal_output +3485,3912228,"TERMINAL",0,0,"4",,terminal_output +3486,3913259,"TERMINAL",0,0,"5",,terminal_output +3487,3914360,"TERMINAL",0,0,"6",,terminal_output +3488,3915345,"TERMINAL",0,0,"7",,terminal_output +3489,3916387,"TERMINAL",0,0,"9",,terminal_output +3490,3917467,"TERMINAL",0,0,"40",,terminal_output +3491,3918645,"TERMINAL",0,0,"1",,terminal_output +3492,3919502,"TERMINAL",0,0,"2",,terminal_output +3493,3920542,"TERMINAL",0,0,"3",,terminal_output +3494,3921637,"TERMINAL",0,0,"4",,terminal_output +3495,3922717,"TERMINAL",0,0,"5",,terminal_output +3496,3923671,"TERMINAL",0,0,"6",,terminal_output +3497,3924715,"TERMINAL",0,0,"7",,terminal_output +3498,3925760,"TERMINAL",0,0,"8",,terminal_output +3499,3926800,"TERMINAL",0,0,"9",,terminal_output +3500,3927837,"TERMINAL",0,0,"50",,terminal_output +3501,3928887,"TERMINAL",0,0,"1",,terminal_output +3502,3929921,"TERMINAL",0,0,"2",,terminal_output +3503,3930994,"TERMINAL",0,0,"3",,terminal_output +3504,3932031,"TERMINAL",0,0,"4",,terminal_output +3505,3933112,"TERMINAL",0,0,"5",,terminal_output +3506,3934116,"TERMINAL",0,0,"6",,terminal_output +3507,3935189,"TERMINAL",0,0,"7",,terminal_output +3508,3936195,"TERMINAL",0,0,"8",,terminal_output +3509,3937239,"TERMINAL",0,0,"9",,terminal_output +3510,3938352,"TERMINAL",0,0,"5:00",,terminal_output +3511,3939523,"TERMINAL",0,0,"1",,terminal_output +3512,3940379,"TERMINAL",0,0,"2",,terminal_output +3513,3941413,"TERMINAL",0,0,"4",,terminal_output +3514,3942437,"TERMINAL",0,0,"5",,terminal_output +3515,3943469,"TERMINAL",0,0,"6",,terminal_output +3516,3944505,"TERMINAL",0,0,"7",,terminal_output +3517,3945543,"TERMINAL",0,0,"8",,terminal_output +3518,3946645,"TERMINAL",0,0,"9",,terminal_output +3519,3947670,"TERMINAL",0,0,"10",,terminal_output +3520,3948694,"TERMINAL",0,0,"1",,terminal_output +3521,3949820,"TERMINAL",0,0,"2",,terminal_output +3522,3950754,"TERMINAL",0,0,"3",,terminal_output +3523,3951798,"TERMINAL",0,0,"4",,terminal_output +3524,3952833,"TERMINAL",0,0,"5",,terminal_output +3525,3953875,"TERMINAL",0,0,"6",,terminal_output +3526,3954909,"TERMINAL",0,0,"7",,terminal_output +3527,3955965,"TERMINAL",0,0,"8",,terminal_output +3528,3956985,"TERMINAL",0,0,"9",,terminal_output +3529,3958040,"TERMINAL",0,0,"20",,terminal_output +3530,3959061,"TERMINAL",0,0,"1",,terminal_output +3531,3960109,"TERMINAL",0,0,"2",,terminal_output +3532,3961159,"TERMINAL",0,0,"3",,terminal_output +3533,3962260,"TERMINAL",0,0,"4",,terminal_output +3534,3963243,"TERMINAL",0,0,"5",,terminal_output +3535,3964291,"TERMINAL",0,0,"6",,terminal_output +3536,3965337,"TERMINAL",0,0,"7",,terminal_output +3537,3966388,"TERMINAL",0,0,"9",,terminal_output +3538,3967420,"TERMINAL",0,0,"30",,terminal_output +3539,3968462,"TERMINAL",0,0,"1",,terminal_output +3540,3969537,"TERMINAL",0,0,"2",,terminal_output +3541,3970614,"TERMINAL",0,0,"3",,terminal_output +3542,3971629,"TERMINAL",0,0,"4",,terminal_output +3543,3972655,"TERMINAL",0,0,"5",,terminal_output +3544,3973681,"TERMINAL",0,0,"6",,terminal_output +3545,3974735,"TERMINAL",0,0,"7",,terminal_output +3546,3975829,"TERMINAL",0,0,"8",,terminal_output +3547,3976806,"TERMINAL",0,0,"9",,terminal_output +3548,3977873,"TERMINAL",0,0,"40",,terminal_output +3549,3978874,"TERMINAL",0,0,"1",,terminal_output +3550,3979916,"TERMINAL",0,0,"2",,terminal_output +3551,3980985,"TERMINAL",0,0,"3",,terminal_output +3552,3982022,"TERMINAL",0,0,"4",,terminal_output +3553,3983033,"TERMINAL",0,0,"5",,terminal_output +3554,3984061,"TERMINAL",0,0,"6",,terminal_output +3555,3985117,"TERMINAL",0,0,"7",,terminal_output +3556,3986149,"TERMINAL",0,0,"8",,terminal_output +3557,3987226,"TERMINAL",0,0,"9",,terminal_output +3558,3988235,"TERMINAL",0,0,"50",,terminal_output +3559,3989358,"TERMINAL",0,0,"1",,terminal_output +3560,3990373,"TERMINAL",0,0,"2",,terminal_output +3561,3991397,"TERMINAL",0,0,"3",,terminal_output +3562,3992420,"TERMINAL",0,0,"5",,terminal_output +3563,3993566,"TERMINAL",0,0,"6",,terminal_output +3564,3994578,"TERMINAL",0,0,"7",,terminal_output +3565,3995592,"TERMINAL",0,0,"8",,terminal_output +3566,3996577,"TERMINAL",0,0,"9",,terminal_output +3567,3997613,"TERMINAL",0,0,"6:00",,terminal_output +3568,3998653,"TERMINAL",0,0,"1",,terminal_output +3569,3999698,"TERMINAL",0,0,"2",,terminal_output +3570,4000744,"TERMINAL",0,0,"3",,terminal_output +3571,4001791,"TERMINAL",0,0,"4",,terminal_output +3572,4002865,"TERMINAL",0,0,"5",,terminal_output +3573,4003915,"TERMINAL",0,0,"6",,terminal_output +3574,4005014,"TERMINAL",0,0,"7",,terminal_output +3575,4006037,"TERMINAL",0,0,"8",,terminal_output +3576,4007062,"TERMINAL",0,0,"9",,terminal_output +3577,4008060,"TERMINAL",0,0,"10",,terminal_output +3578,4009103,"TERMINAL",0,0,"1",,terminal_output +3579,4010154,"TERMINAL",0,0,"2",,terminal_output +3580,4011234,"TERMINAL",0,0,"3",,terminal_output +3581,4012246,"TERMINAL",0,0,"4",,terminal_output +3582,4013467,"TERMINAL",0,0,"5",,terminal_output +3583,4014362,"TERMINAL",0,0,"6",,terminal_output +3584,4015374,"TERMINAL",0,0,"7",,terminal_output +3585,4016637,"TERMINAL",0,0,"9",,terminal_output +3586,4017462,"TERMINAL",0,0,"20",,terminal_output +3587,4018526,"TERMINAL",0,0,"1",,terminal_output +3588,4019573,"TERMINAL",0,0,"2",,terminal_output +3589,4020601,"TERMINAL",0,0,"3",,terminal_output +3590,4021646,"TERMINAL",0,0,"4",,terminal_output +3591,4022691,"TERMINAL",0,0,"5",,terminal_output +3592,4023737,"TERMINAL",0,0,"6",,terminal_output +3593,4024827,"TERMINAL",0,0,"7",,terminal_output +3594,4025835,"TERMINAL",0,0,"8",,terminal_output +3595,4026889,"TERMINAL",0,0,"9",,terminal_output +3596,4027954,"TERMINAL",0,0,"30",,terminal_output +3597,4029080,"TERMINAL",0,0,"1",,terminal_output +3598,4030050,"TERMINAL",0,0,"2",,terminal_output +3599,4031073,"TERMINAL",0,0,"3",,terminal_output +3600,4032118,"TERMINAL",0,0,"4",,terminal_output +3601,4033174,"TERMINAL",0,0,"5",,terminal_output +3602,4034213,"TERMINAL",0,0,"6",,terminal_output +3603,4035276,"TERMINAL",0,0,"7",,terminal_output +3604,4036342,"TERMINAL",0,0,"8",,terminal_output +3605,4037377,"TERMINAL",0,0,"9",,terminal_output +3606,4038415,"TERMINAL",0,0,"41",,terminal_output +3607,4039452,"TERMINAL",0,0,"2",,terminal_output +3608,4040500,"TERMINAL",0,0,"3",,terminal_output +3609,4041716,"TERMINAL",0,0,"4",,terminal_output +3610,4042698,"TERMINAL",0,0,"5",,terminal_output +3611,4043723,"TERMINAL",0,0,"6",,terminal_output +3612,4044850,"TERMINAL",0,0,"7",,terminal_output +3613,4046216,"TERMINAL",0,0,"8",,terminal_output +3614,4046791,"TERMINAL",0,0,"9",,terminal_output +3615,4047842,"TERMINAL",0,0,"50",,terminal_output +3616,4048993,"TERMINAL",0,0,"1",,terminal_output +3617,4049937,"TERMINAL",0,0,"2",,terminal_output +3618,4050989,"TERMINAL",0,0,"3",,terminal_output +3619,4052029,"TERMINAL",0,0,"4",,terminal_output +3620,4053144,"TERMINAL",0,0,"5",,terminal_output +3621,4054135,"TERMINAL",0,0,"6",,terminal_output +3622,4055182,"TERMINAL",0,0,"7",,terminal_output +3623,4056318,"TERMINAL",0,0,"8",,terminal_output +3624,4057345,"TERMINAL",0,0,"9",,terminal_output +3625,4058464,"TERMINAL",0,0,"7:00",,terminal_output +3626,4059373,"TERMINAL",0,0,"1",,terminal_output +3627,4060423,"TERMINAL",0,0,"3",,terminal_output +3628,4061467,"TERMINAL",0,0,"4",,terminal_output +3629,4062549,"TERMINAL",0,0,"5",,terminal_output +3630,4063606,"TERMINAL",0,0,"6",,terminal_output +3631,4064705,"TERMINAL",0,0,"7",,terminal_output +3632,4065651,"TERMINAL",0,0,"8",,terminal_output +3633,4066741,"TERMINAL",0,0,"9",,terminal_output +3634,4067741,"TERMINAL",0,0,"10",,terminal_output +3635,4068809,"TERMINAL",0,0,"1",,terminal_output +3636,4069936,"TERMINAL",0,0,"2",,terminal_output +3637,4070891,"TERMINAL",0,0,"3",,terminal_output +3638,4071938,"TERMINAL",0,0,"4",,terminal_output +3639,4072982,"TERMINAL",0,0,"5",,terminal_output +3640,4074073,"TERMINAL",0,0,"6",,terminal_output +3641,4075126,"TERMINAL",0,0,"7",,terminal_output +3642,4076139,"TERMINAL",0,0,"8",,terminal_output +3643,4077364,"TERMINAL",0,0,"9",,terminal_output +3644,4078235,"TERMINAL",0,0,"20",,terminal_output +3645,4079369,"TERMINAL",0,0,"1",,terminal_output +3646,4080594,"TERMINAL",0,0,"2",,terminal_output +3647,4081363,"TERMINAL",0,0,"3",,terminal_output +3648,4082404,"TERMINAL",0,0,"5",,terminal_output +3649,4083498,"TERMINAL",0,0,"6",,terminal_output +3650,4084495,"TERMINAL",0,0,"7",,terminal_output +3651,4085619,"TERMINAL",0,0,"8",,terminal_output +3652,4086626,"TERMINAL",0,0,"9",,terminal_output +3653,4087651,"TERMINAL",0,0,"30",,terminal_output +3654,4088778,"TERMINAL",0,0,"1",,terminal_output +3655,4089718,"TERMINAL",0,0,"2",,terminal_output +3656,4090762,"TERMINAL",0,0,"3",,terminal_output +3657,4091810,"TERMINAL",0,0,"4",,terminal_output +3658,4092865,"TERMINAL",0,0,"5",,terminal_output +3659,4093893,"TERMINAL",0,0,"6",,terminal_output +3660,4094929,"TERMINAL",0,0,"7",,terminal_output +3661,4095974,"TERMINAL",0,0,"8",,terminal_output +3662,4097042,"TERMINAL",0,0,"9",,terminal_output +3663,4098066,"TERMINAL",0,0,"40",,terminal_output +3664,4099122,"TERMINAL",0,0,"1",,terminal_output +3665,4100193,"TERMINAL",0,0,"2",,terminal_output +3666,4101187,"TERMINAL",0,0,"3",,terminal_output +3667,4102224,"TERMINAL",0,0,"4",,terminal_output +3668,4103318,"TERMINAL",0,0,"5",,terminal_output +3669,4104377,"TERMINAL",0,0,"6",,terminal_output +3670,4105379,"TERMINAL",0,0,"7",,terminal_output +3671,4106382,"TERMINAL",0,0,"9",,terminal_output +3672,4107428,"TERMINAL",0,0,"50",,terminal_output +3673,4108465,"TERMINAL",0,0,"1",,terminal_output +3674,4109504,"TERMINAL",0,0,"2",,terminal_output +3675,4110565,"TERMINAL",0,0,"3",,terminal_output +3676,4111582,"TERMINAL",0,0,"4",,terminal_output +3677,4112634,"TERMINAL",0,0,"5",,terminal_output +3678,4113794,"TERMINAL",0,0,"6",,terminal_output +3679,4114715,"TERMINAL",0,0,"7",,terminal_output +3680,4115744,"TERMINAL",0,0,"8",,terminal_output +3681,4116789,"TERMINAL",0,0,"9",,terminal_output +3682,4117851,"TERMINAL",0,0,"8:00",,terminal_output +3683,4118881,"TERMINAL",0,0,"1",,terminal_output +3684,4119921,"TERMINAL",0,0,"2",,terminal_output +3685,4120956,"TERMINAL",0,0,"3",,terminal_output +3686,4122020,"TERMINAL",0,0,"4",,terminal_output +3687,4123079,"TERMINAL",0,0,"5",,terminal_output +3688,4124121,"TERMINAL",0,0,"6",,terminal_output +3689,4125167,"TERMINAL",0,0,"7",,terminal_output +3690,4126189,"TERMINAL",0,0,"8",,terminal_output +3691,4127239,"TERMINAL",0,0,"9",,terminal_output +3692,4128321,"TERMINAL",0,0,"10",,terminal_output +3693,4129507,"TERMINAL",0,0,"1",,terminal_output +3694,4130467,"TERMINAL",0,0,"2",,terminal_output +3695,4131424,"TERMINAL",0,0,"4",,terminal_output +3696,4132532,"TERMINAL",0,0,"5",,terminal_output +3697,4133629,"TERMINAL",0,0,"6",,terminal_output +3698,4134657,"TERMINAL",0,0,"7",,terminal_output +3699,4135610,"TERMINAL",0,0,"8",,terminal_output +3700,4136653,"TERMINAL",0,0,"9",,terminal_output +3701,4137701,"TERMINAL",0,0,"20",,terminal_output +3702,4138750,"TERMINAL",0,0,"1",,terminal_output +3703,4139790,"TERMINAL",0,0,"2",,terminal_output +3704,4140857,"TERMINAL",0,0,"3",,terminal_output +3705,4141878,"TERMINAL",0,0,"4",,terminal_output +3706,4142946,"TERMINAL",0,0,"5",,terminal_output +3707,4144151,"TERMINAL",0,0,"6",,terminal_output +3708,4145126,"TERMINAL",0,0,"7",,terminal_output +3709,4146121,"TERMINAL",0,0,"8",,terminal_output +3710,4147169,"TERMINAL",0,0,"9",,terminal_output +3711,4148146,"TERMINAL",0,0,"30",,terminal_output +3712,4149192,"TERMINAL",0,0,"1",,terminal_output +3713,4150236,"TERMINAL",0,0,"2",,terminal_output +3714,4151282,"TERMINAL",0,0,"3",,terminal_output +3715,4152338,"TERMINAL",0,0,"4",,terminal_output +3716,4153366,"TERMINAL",0,0,"5",,terminal_output +3717,4154408,"TERMINAL",0,0,"7",,terminal_output +3718,4155500,"TERMINAL",0,0,"8",,terminal_output +3719,4156543,"TERMINAL",0,0,"9",,terminal_output +3720,4157621,"TERMINAL",0,0,"40",,terminal_output +3721,4158607,"TERMINAL",0,0,"1",,terminal_output +3722,4159651,"TERMINAL",0,0,"2",,terminal_output +3723,4160739,"TERMINAL",0,0,"3",,terminal_output +3724,4161799,"TERMINAL",0,0,"4",,terminal_output +3725,4162812,"TERMINAL",0,0,"5",,terminal_output +3726,4163836,"TERMINAL",0,0,"6",,terminal_output +3727,4164877,"TERMINAL",0,0,"7",,terminal_output +3728,4165915,"TERMINAL",0,0,"8",,terminal_output +3729,4166962,"TERMINAL",0,0,"9",,terminal_output +3730,4168011,"TERMINAL",0,0,"50",,terminal_output +3731,4169051,"TERMINAL",0,0,"1",,terminal_output +3732,4170111,"TERMINAL",0,0,"2",,terminal_output +3733,4171215,"TERMINAL",0,0,"3",,terminal_output +3734,4172175,"TERMINAL",0,0,"4",,terminal_output +3735,4173218,"TERMINAL",0,0,"5",,terminal_output +3736,4174260,"TERMINAL",0,0,"6",,terminal_output +3737,4175418,"TERMINAL",0,0,"7",,terminal_output +3738,4176428,"TERMINAL",0,0,"8",,terminal_output +3739,4177460,"TERMINAL",0,0,"9:00",,terminal_output +3740,4178421,"TERMINAL",0,0,"1",,terminal_output +3741,4179470,"TERMINAL",0,0,"2",,terminal_output +3742,4180501,"TERMINAL",0,0,"3",,terminal_output +3743,4181564,"TERMINAL",0,0,"4",,terminal_output +3744,4182577,"TERMINAL",0,0,"5",,terminal_output +3745,4183613,"TERMINAL",0,0,"6",,terminal_output +3746,4184732,"TERMINAL",0,0,"7",,terminal_output +3747,4185694,"TERMINAL",0,0,"8",,terminal_output +3748,4186738,"TERMINAL",0,0,"9",,terminal_output +3749,4187901,"TERMINAL",0,0,"10",,terminal_output +3750,4188926,"TERMINAL",0,0,"1",,terminal_output +3751,4189963,"TERMINAL",0,0,"2",,terminal_output +3752,4190972,"TERMINAL",0,0,"3",,terminal_output +3753,4191945,"TERMINAL",0,0,"4",,terminal_output +3754,4192984,"TERMINAL",0,0,"5",,terminal_output +3755,4194036,"TERMINAL",0,0,"6",,terminal_output +3756,4195078,"TERMINAL",0,0,"7",,terminal_output +3757,4196154,"TERMINAL",0,0,"8",,terminal_output +3758,4197229,"TERMINAL",0,0,"9",,terminal_output +3759,4198206,"TERMINAL",0,0,"20",,terminal_output +3760,4199246,"TERMINAL",0,0,"1",,terminal_output +3761,4200291,"TERMINAL",0,0,"2",,terminal_output +3762,4201536,"TERMINAL",0,0,"3",,terminal_output +3763,4202373,"TERMINAL",0,0,"4",,terminal_output +3764,4203463,"TERMINAL",0,0,"6",,terminal_output +3765,4204456,"TERMINAL",0,0,"7",,terminal_output +3766,4205515,"TERMINAL",0,0,"8",,terminal_output +3767,4206549,"TERMINAL",0,0,"9",,terminal_output +3768,4207592,"TERMINAL",0,0,"30",,terminal_output +3769,4208647,"TERMINAL",0,0,"1",,terminal_output +3770,4209679,"TERMINAL",0,0,"2",,terminal_output +3771,4210721,"TERMINAL",0,0,"3",,terminal_output +3772,4211767,"TERMINAL",0,0,"4",,terminal_output +3773,4212871,"TERMINAL",0,0,"5",,terminal_output +3774,4213910,"TERMINAL",0,0,"6",,terminal_output +3775,4215038,"TERMINAL",0,0,"7",,terminal_output +3776,4215962,"TERMINAL",0,0,"8",,terminal_output +3777,4216979,"TERMINAL",0,0,"9",,terminal_output +3778,4218314,"TERMINAL",0,0,"40",,terminal_output +3779,4219114,"TERMINAL",0,0,"1",,terminal_output +3780,4220156,"TERMINAL",0,0,"2",,terminal_output +3781,4221290,"TERMINAL",0,0,"3",,terminal_output +3782,4222222,"TERMINAL",0,0,"4",,terminal_output +3783,4223268,"TERMINAL",0,0,"5",,terminal_output +3784,4224338,"TERMINAL",0,0,"6",,terminal_output +3785,4225364,"TERMINAL",0,0,"7",,terminal_output +3786,4226411,"TERMINAL",0,0,"9",,terminal_output +3787,4227523,"TERMINAL",0,0,"50",,terminal_output +3788,4228613,"TERMINAL",0,0,"1",,terminal_output +3789,4229582,"TERMINAL",0,0,"2",,terminal_output +3790,4230600,"TERMINAL",0,0,"3",,terminal_output +3791,4231628,"TERMINAL",0,0,"4",,terminal_output +3792,4232751,"TERMINAL",0,0,"5",,terminal_output +3793,4233686,"TERMINAL",0,0,"6",,terminal_output +3794,4234726,"TERMINAL",0,0,"7",,terminal_output +3795,4235771,"TERMINAL",0,0,"8",,terminal_output +3796,4236819,"TERMINAL",0,0,"9",,terminal_output +3797,4237872,"TERMINAL",0,0,"9:00:00",,terminal_output +3798,4238930,"TERMINAL",0,0,"1",,terminal_output +3799,4239926,"TERMINAL",0,0,"2",,terminal_output +3800,4240971,"TERMINAL",0,0,"3",,terminal_output +3801,4242073,"TERMINAL",0,0,"4",,terminal_output +3802,4243061,"TERMINAL",0,0,"5",,terminal_output +3803,4244107,"TERMINAL",0,0,"6",,terminal_output +3804,4245174,"TERMINAL",0,0,"7",,terminal_output +3805,4246283,"TERMINAL",0,0,"8",,terminal_output +3806,4247238,"TERMINAL",0,0,"9",,terminal_output +3807,4248267,"TERMINAL",0,0,"10",,terminal_output +3808,4249319,"TERMINAL",0,0,"1",,terminal_output +3809,4250346,"TERMINAL",0,0,"2",,terminal_output +3810,4251426,"TERMINAL",0,0,"4",,terminal_output +3811,4252441,"TERMINAL",0,0,"5",,terminal_output +3812,4253550,"TERMINAL",0,0,"6",,terminal_output +3813,4254523,"TERMINAL",0,0,"7",,terminal_output +3814,4255593,"TERMINAL",0,0,"8",,terminal_output +3815,4256757,"TERMINAL",0,0,"9",,terminal_output +3816,4257736,"TERMINAL",0,0,"20",,terminal_output +3817,4258862,"TERMINAL",0,0,"1",,terminal_output +3818,4259787,"TERMINAL",0,0,"2",,terminal_output +3819,4260772,"TERMINAL",0,0,"3",,terminal_output +3820,4261821,"TERMINAL",0,0,"4",,terminal_output +3821,4262860,"TERMINAL",0,0,"5",,terminal_output +3822,4263939,"TERMINAL",0,0,"6",,terminal_output +3823,4265047,"TERMINAL",0,0,"7",,terminal_output +3824,4266001,"TERMINAL",0,0,"8",,terminal_output +3825,4267037,"TERMINAL",0,0,"9",,terminal_output +3826,4268082,"TERMINAL",0,0,"30",,terminal_output +3827,4269108,"TERMINAL",0,0,"1",,terminal_output +3828,4270252,"TERMINAL",0,0,"2",,terminal_output +3829,4271355,"TERMINAL",0,0,"3",,terminal_output +3830,4272229,"TERMINAL",0,0,"4",,terminal_output +3831,4273412,"TERMINAL",0,0,"5",,terminal_output +3832,4274328,"TERMINAL",0,0,"6",,terminal_output +3833,4275373,"TERMINAL",0,0,"7",,terminal_output +3834,4276470,"TERMINAL",0,0,"9",,terminal_output +3835,4277428,"TERMINAL",0,0,"40",,terminal_output +3836,4278460,"TERMINAL",0,0,"1",,terminal_output +3837,4279553,"TERMINAL",0,0,"2",,terminal_output +3838,4280548,"TERMINAL",0,0,"3",,terminal_output +3839,4281632,"TERMINAL",0,0,"4",,terminal_output +3840,4282825,"TERMINAL",0,0,"5",,terminal_output +3841,4283747,"TERMINAL",0,0,"6",,terminal_output +3842,4284788,"TERMINAL",0,0,"7",,terminal_output +3843,4285769,"TERMINAL",0,0,"8",,terminal_output +3844,4286813,"TERMINAL",0,0,"9",,terminal_output +3845,4287854,"TERMINAL",0,0,"50",,terminal_output +3846,4288893,"TERMINAL",0,0,"1",,terminal_output +3847,4289947,"TERMINAL",0,0,"2",,terminal_output +3848,4290973,"TERMINAL",0,0,"3",,terminal_output +3849,4292025,"TERMINAL",0,0,"4",,terminal_output +3850,4293083,"TERMINAL",0,0,"5",,terminal_output +3851,4294221,"TERMINAL",0,0,"6",,terminal_output +3852,4295250,"TERMINAL",0,0,"7",,terminal_output +3853,4296226,"TERMINAL",0,0,"8",,terminal_output +3854,4297269,"TERMINAL",0,0,"9",,terminal_output +3855,4298266,"TERMINAL",0,0,"1:00",,terminal_output +3856,4299302,"TERMINAL",0,0,"1",,terminal_output +3857,4300343,"TERMINAL",0,0,"2",,terminal_output +3858,4301388,"TERMINAL",0,0,"4",,terminal_output +3859,4302426,"TERMINAL",0,0,"5",,terminal_output +3860,4303462,"TERMINAL",0,0,"6",,terminal_output +3861,4304628,"TERMINAL",0,0,"7",,terminal_output +3862,4305578,"TERMINAL",0,0,"8",,terminal_output +3863,4306715,"TERMINAL",0,0,"9",,terminal_output +3864,4307710,"TERMINAL",0,0,"10",,terminal_output +3865,4308665,"TERMINAL",0,0,"1",,terminal_output +3866,4309756,"TERMINAL",0,0,"2",,terminal_output +3867,4310744,"TERMINAL",0,0,"3",,terminal_output +3868,4311806,"TERMINAL",0,0,"4",,terminal_output +3869,4312821,"TERMINAL",0,0,"5",,terminal_output +3870,4313898,"TERMINAL",0,0,"6",,terminal_output +3871,4314906,"TERMINAL",0,0,"7",,terminal_output +3872,4315990,"TERMINAL",0,0,"8",,terminal_output +3873,4317028,"TERMINAL",0,0,"9",,terminal_output +3874,4318030,"TERMINAL",0,0,"20",,terminal_output +3875,4319382,"TERMINAL",0,0,"1",,terminal_output +3876,4320240,"TERMINAL",0,0,"2",,terminal_output +3877,4321168,"TERMINAL",0,0,"3",,terminal_output +3878,4322232,"TERMINAL",0,0,"4",,terminal_output +3879,4323244,"TERMINAL",0,0,"5",,terminal_output +3880,4324339,"TERMINAL",0,0,"6",,terminal_output +3881,4325328,"TERMINAL",0,0,"7",,terminal_output +3882,4326453,"TERMINAL",0,0,"8",,terminal_output +3883,4327392,"TERMINAL",0,0,"30",,terminal_output +3884,4328503,"TERMINAL",0,0,"1",,terminal_output +3885,4329551,"TERMINAL",0,0,"2",,terminal_output +3886,4330529,"TERMINAL",0,0,"3",,terminal_output +3887,4331559,"TERMINAL",0,0,"4",,terminal_output +3888,4332609,"TERMINAL",0,0,"5",,terminal_output +3889,4333635,"TERMINAL",0,0,"6",,terminal_output +3890,4334676,"TERMINAL",0,0,"7",,terminal_output +3891,4335721,"TERMINAL",0,0,"8",,terminal_output +3892,4336747,"TERMINAL",0,0,"9",,terminal_output +3893,4337935,"TERMINAL",0,0,"40",,terminal_output +3894,4338855,"TERMINAL",0,0,"1",,terminal_output +3895,4339880,"TERMINAL",0,0,"2",,terminal_output +3896,4340991,"TERMINAL",0,0,"3",,terminal_output +3897,4341961,"TERMINAL",0,0,"4",,terminal_output +3898,4343038,"TERMINAL",0,0,"5",,terminal_output +3899,4344268,"TERMINAL",0,0,"6",,terminal_output +3900,4345196,"TERMINAL",0,0,"7",,terminal_output +3901,4346126,"TERMINAL",0,0,"8",,terminal_output +3902,4347164,"TERMINAL",0,0,"9",,terminal_output +3903,4348232,"TERMINAL",0,0,"50",,terminal_output +3904,4349250,"TERMINAL",0,0,"1",,terminal_output +3905,4350306,"TERMINAL",0,0,"2",,terminal_output +3906,4351330,"TERMINAL",0,0,"3",,terminal_output +3907,4352388,"TERMINAL",0,0,"4",,terminal_output +3908,4353412,"TERMINAL",0,0,"6",,terminal_output +3909,4354450,"TERMINAL",0,0,"7",,terminal_output +3910,4355527,"TERMINAL",0,0,"8",,terminal_output +3911,4356531,"TERMINAL",0,0,"9",,terminal_output +3912,4357576,"TERMINAL",0,0,"2:00",,terminal_output +3913,4358644,"TERMINAL",0,0,"1",,terminal_output +3914,4359656,"TERMINAL",0,0,"2",,terminal_output +3915,4360736,"TERMINAL",0,0,"3",,terminal_output +3916,4361782,"TERMINAL",0,0,"4",,terminal_output +3917,4362890,"TERMINAL",0,0,"5",,terminal_output +3918,4363959,"TERMINAL",0,0,"6",,terminal_output +3919,4364951,"TERMINAL",0,0,"7",,terminal_output +3920,4366107,"TERMINAL",0,0,"8",,terminal_output +3921,4366996,"TERMINAL",0,0,"9",,terminal_output +3922,4368037,"TERMINAL",0,0,"10",,terminal_output +3923,4369155,"TERMINAL",0,0,"1",,terminal_output +3924,4370180,"TERMINAL",0,0,"2",,terminal_output +3925,4371233,"TERMINAL",0,0,"3",,terminal_output +3926,4372160,"TERMINAL",0,0,"4",,terminal_output +3927,4373200,"TERMINAL",0,0,"5",,terminal_output +3928,4374268,"TERMINAL",0,0,"6",,terminal_output +3929,4375307,"TERMINAL",0,0,"7",,terminal_output +3930,4376354,"TERMINAL",0,0,"8",,terminal_output +3931,4377360,"TERMINAL",0,0,"9",,terminal_output +3932,4378661,"TERMINAL",0,0,"21",,terminal_output +3933,4379549,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",2451,0,"",shellscript,selection_mouse +3934,4379550,"slurm/jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh",2450,0,"",shellscript,selection_command +3935,4379702,"TERMINAL",0,0,"2",,terminal_output +3936,4380632,"TERMINAL",0,0,"3",,terminal_output +3937,4381565,"TERMINAL",0,0,"4",,terminal_output +3938,4382670,"TERMINAL",0,0,"5",,terminal_output +3939,4384169,"TERMINAL",0,0,"6",,terminal_output +3940,4384730,"TERMINAL",0,0,"7",,terminal_output +3941,4385897,"TERMINAL",0,0,"8",,terminal_output +3942,4386796,"TERMINAL",0,0,"9",,terminal_output +3943,4387784,"TERMINAL",0,0,"30",,terminal_output +3944,4389425,"TERMINAL",0,0,"1",,terminal_output +3945,4390377,"TERMINAL",0,0,"2",,terminal_output +3946,4391409,"TERMINAL",0,0,"4",,terminal_output +3947,4392442,"TERMINAL",0,0,"5",,terminal_output +3948,4393472,"TERMINAL",0,0,"6",,terminal_output +3949,4394646,"TERMINAL",0,0,"7",,terminal_output +3950,4395599,"TERMINAL",0,0,"8",,terminal_output +3951,4396705,"TERMINAL",0,0,"9",,terminal_output +3952,4397723,"TERMINAL",0,0,"40",,terminal_output +3953,4398758,"TERMINAL",0,0,"1",,terminal_output +3954,4399752,"TERMINAL",0,0,"2",,terminal_output +3955,4400799,"TERMINAL",0,0,"3",,terminal_output +3956,4401835,"TERMINAL",0,0,"4",,terminal_output +3957,4402875,"TERMINAL",0,0,"5",,terminal_output +3958,4403926,"TERMINAL",0,0,"6",,terminal_output +3959,4405008,"TERMINAL",0,0,"7",,terminal_output +3960,4406025,"TERMINAL",0,0,"8",,terminal_output +3961,4407045,"TERMINAL",0,0,"9",,terminal_output +3962,4408360,"TERMINAL",0,0,"50",,terminal_output +3963,4409189,"TERMINAL",0,0,"1",,terminal_output +3964,4410426,"TERMINAL",0,0,"2",,terminal_output +3965,4411217,"TERMINAL",0,0,"3",,terminal_output +3966,4412257,"TERMINAL",0,0,"4",,terminal_output +3967,4413313,"TERMINAL",0,0,"5",,terminal_output +3968,4414373,"TERMINAL",0,0,"6",,terminal_output +3969,4415406,"TERMINAL",0,0,"8",,terminal_output +3970,4416437,"TERMINAL",0,0,"9",,terminal_output +3971,4417496,"TERMINAL",0,0,"3:00",,terminal_output +3972,4418526,"TERMINAL",0,0,"1",,terminal_output +3973,4419565,"TERMINAL",0,0,"2",,terminal_output +3974,4420591,"TERMINAL",0,0,"3",,terminal_output +3975,4421729,"TERMINAL",0,0,"4",,terminal_output +3976,4422707,"TERMINAL",0,0,"5",,terminal_output +3977,4423740,"TERMINAL",0,0,"6",,terminal_output +3978,4424793,"TERMINAL",0,0,"7",,terminal_output +3979,4425841,"TERMINAL",0,0,"8",,terminal_output +3980,4426970,"TERMINAL",0,0,"9",,terminal_output +3981,4428144,"TERMINAL",0,0,"10",,terminal_output +3982,4429066,"TERMINAL",0,0,"1",,terminal_output +3983,4430078,"TERMINAL",0,0,"2",,terminal_output +3984,4431119,"TERMINAL",0,0,"3",,terminal_output +3985,4432140,"TERMINAL",0,0,"4",,terminal_output +3986,4433158,"TERMINAL",0,0,"5",,terminal_output +3987,4434372,"TERMINAL",0,0,"6",,terminal_output +3988,4435255,"TERMINAL",0,0,"7",,terminal_output +3989,4436504,"TERMINAL",0,0,"8",,terminal_output +3990,4437357,"TERMINAL",0,0,"9",,terminal_output +3991,4438390,"TERMINAL",0,0,"21",,terminal_output +3992,4439436,"TERMINAL",0,0,"2",,terminal_output +3993,4440510,"TERMINAL",0,0,"3",,terminal_output +3994,4441660,"TERMINAL",0,0,"4",,terminal_output +3995,4442592,"TERMINAL",0,0,"5",,terminal_output +3996,4444045,"TERMINAL",0,0,"6",,terminal_output +3997,4444746,"TERMINAL",0,0,"7",,terminal_output +3998,4445723,"TERMINAL",0,0,"8",,terminal_output +3999,4446775,"TERMINAL",0,0,"9",,terminal_output +4000,4447902,"TERMINAL",0,0,"30",,terminal_output +4001,4448967,"TERMINAL",0,0,"1",,terminal_output +4002,4450245,"TERMINAL",0,0,"2",,terminal_output +4003,4450970,"TERMINAL",0,0,"3",,terminal_output +4004,4451999,"TERMINAL",0,0,"4",,terminal_output +4005,4453040,"TERMINAL",0,0,"5",,terminal_output +4006,4454153,"TERMINAL",0,0,"6",,terminal_output +4007,4455208,"TERMINAL",0,0,"7",,terminal_output +4008,4456224,"TERMINAL",0,0,"8",,terminal_output +4009,4457355,"TERMINAL",0,0,"9",,terminal_output +4010,4458769,"TERMINAL",0,0,"40",,terminal_output +4011,4459348,"TERMINAL",0,0,"1",,terminal_output +4012,4459806,"slurm/jobs/franz/berlin/coinrun/mila_submission/coinrun_tokenizer_base.sh",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=1\n#SBATCH --gres=gpu:1\n#SBATCH --time=24:00:00\n#SBATCH --cpus-per-task=8\n#SBATCH --output=/fast/project/HFMI_SynergyUnit/jafar_ws/logs/franz/coinrun/tokenizer/%x_%j.log\n#SBATCH --error=/fast/project/HFMI_SynergyUnit/jafar_ws/logs/franz/coinrun/tokenizer/%x_%j.log\n#SBATCH --job-name=tokenizer_coinrun_mila_submission\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n# Log the sbatch script\ncat $0\n\nsource .venv/bin/activate\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\ntags=""coinrun tokenizer 500m_dataset mila_submission""\n\narray_records_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/data/coinrun/array_records_500m_seed_w_increment""\nCHECKPOINT_DIR=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/tokenizer/${job_name}_${slurm_job_id}""\nmkdir -p $CHECKPOINT_DIR\n\nenv | grep SLURM\n\nsrun python jasmine/train_tokenizer.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --name=""${job_name}_${slurm_job_id}"" \\n --tags $tags \\n --entity instant-uv \\n --project jafar \\n --data_dir=""${array_records_dir}/train"" \\n --val_data_dir=""${array_records_dir}/val"" &\n\nchild_pid=$!\n\nwait $child_pid\n",shellscript,tab +4013,4460425,"TERMINAL",0,0,"2",,terminal_output +4014,4460847,"slurm/jobs/franz/berlin/coinrun/mila_submission/coinrun_dynamics_base.sh",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=1\n#SBATCH --time=24:00:00\n#SBATCH --cpus-per-task=8\n#SBATCH --gres=gpu:1\n#SBATCH --output=/fast/project/HFMI_SynergyUnit/jafar_ws/logs/franz/coinrun/dynamics/%x_%j.log\n#SBATCH --error=/fast/project/HFMI_SynergyUnit/jafar_ws/logs/franz/coinrun/dynamics/%x_%j.log\n#SBATCH --job-name=dynamics_coinrun_mila_submission\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nsource .venv/bin/activate\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\ntags=""coinrun dynamics 500m_dataset mila_submission""\n\narray_records_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/data/coinrun/array_records_500m_seed_w_increment""\ntokenizer_ckpt_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/tokenizer/tokenizer_coinrun_mila_submission_29736/""\nCHECKPOINT_DIR=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/dynamics/${job_name}/${slurm_job_id}""\nmkdir -p $CHECKPOINT_DIR\n\nenv | grep SLURM\n\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --name=""${job_name}_${slurm_job_id}"" \\n --tags ${tags} \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=""${tokenizer_ckpt_dir}"" \\n --val_data_dir=""${array_records_dir}/val"" \\n --data_dir=""${array_records_dir}/train"" &\n\nchild_pid=$!\n\nwait $child_pid\n\n",shellscript,tab +4015,4461503,"TERMINAL",0,0,"4",,terminal_output +4016,4462512,"TERMINAL",0,0,"5",,terminal_output +4017,4463578,"TERMINAL",0,0,"6",,terminal_output +4018,4465016,"TERMINAL",0,0,"7",,terminal_output +4019,4465588,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/coinrun_dynamics_base.sh",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=1\n#SBATCH --time=24:00:00\n#SBATCH --cpus-per-task=8\n#SBATCH --gres=gpu:1\n#SBATCH --output=/fast/project/HFMI_SynergyUnit/jafar_ws/logs/franz/coinrun/dynamics/%x_%j.log\n#SBATCH --error=/fast/project/HFMI_SynergyUnit/jafar_ws/logs/franz/coinrun/dynamics/%x_%j.log\n#SBATCH --job-name=dynamics_coinrun_mila_submission\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nsource .venv/bin/activate\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\ntags=""coinrun dynamics 500m_dataset mila_submission""\n\narray_records_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/data/coinrun/array_records_500m_seed_w_increment""\ntokenizer_ckpt_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/tokenizer/tokenizer_coinrun_mila_submission_29736/""\nCHECKPOINT_DIR=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/dynamics/${job_name}/${slurm_job_id}""\nmkdir -p $CHECKPOINT_DIR\n\nenv | grep SLURM\n\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --name=""${job_name}_${slurm_job_id}"" \\n --tags ${tags} \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=""${tokenizer_ckpt_dir}"" \\n --val_data_dir=""${array_records_dir}/val"" \\n --data_dir=""${array_records_dir}/train"" &\n\nchild_pid=$!\n\nwait $child_pid\n\n",shellscript,tab +4020,4465764,"TERMINAL",0,0,"8",,terminal_output +4021,4466864,"TERMINAL",0,0,"9",,terminal_output +4022,4467832,"TERMINAL",0,0,"50",,terminal_output +4023,4468854,"TERMINAL",0,0,"1",,terminal_output +4024,4469868,"TERMINAL",0,0,"2",,terminal_output +4025,4471136,"TERMINAL",0,0,"3",,terminal_output +4026,4472136,"TERMINAL",0,0,"4",,terminal_output +4027,4473003,"TERMINAL",0,0,"5",,terminal_output +4028,4474067,"TERMINAL",0,0,"6",,terminal_output +4029,4475171,"TERMINAL",0,0,"7",,terminal_output +4030,4476272,"TERMINAL",0,0,"8",,terminal_output +4031,4477207,"TERMINAL",0,0,"9",,terminal_output +4032,4478325,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/coinrun_dynamics_base.sh",1210,0,"",shellscript,selection_mouse +4033,4478361,"TERMINAL",0,0,"4:00",,terminal_output +4034,4479451,"TERMINAL",0,0,"1",,terminal_output +4035,4479910,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/coinrun_dynamics_base.sh",1871,0,"",shellscript,selection_mouse +4036,4479918,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/coinrun_dynamics_base.sh",1870,0,"",shellscript,selection_command +4037,4480353,"TERMINAL",0,0,"2",,terminal_output +4038,4480494,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/coinrun_dynamics_base.sh",1849,0,"",shellscript,selection_mouse +4039,4480526,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/coinrun_dynamics_base.sh",1848,0,"",shellscript,selection_command +4040,4481412,"TERMINAL",0,0,"3",,terminal_output +4041,4482463,"TERMINAL",0,0,"5",,terminal_output +4042,4483029,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/coinrun_dynamics_base.sh",378,0,"",shellscript,selection_mouse +4043,4483073,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/coinrun_dynamics_base.sh",377,0,"",shellscript,selection_command +4044,4483109,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/coinrun_dynamics_base.sh",377,1,"n",shellscript,selection_mouse +4045,4483110,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/coinrun_dynamics_base.sh",378,0,"",shellscript,selection_command +4046,4483480,"TERMINAL",0,0,"6",,terminal_output +4047,4484179,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/coinrun_dynamics_base.sh",377,0,"",shellscript,selection_command +4048,4484512,"TERMINAL",0,0,"7",,terminal_output +4049,4484733,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/coinrun_dynamics_base.sh",378,0,"",shellscript,selection_command +4050,4484985,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/coinrun_dynamics_base.sh",378,0,"_",shellscript,content +4051,4484988,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/coinrun_dynamics_base.sh",379,0,"",shellscript,selection_keyboard +4052,4485440,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/coinrun_dynamics_base.sh",379,0,"s",shellscript,content +4053,4485441,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/coinrun_dynamics_base.sh",380,0,"",shellscript,selection_keyboard +4054,4485546,"TERMINAL",0,0,"8",,terminal_output +4055,4485585,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/coinrun_dynamics_base.sh",380,0,"p",shellscript,content +4056,4485586,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/coinrun_dynamics_base.sh",381,0,"",shellscript,selection_keyboard +4057,4485698,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/coinrun_dynamics_base.sh",381,0,"e",shellscript,content +4058,4485699,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/coinrun_dynamics_base.sh",382,0,"",shellscript,selection_keyboard +4059,4485850,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/coinrun_dynamics_base.sh",382,0,"e",shellscript,content +4060,4485851,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/coinrun_dynamics_base.sh",383,0,"",shellscript,selection_keyboard +4061,4485948,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/coinrun_dynamics_base.sh",383,0,"d",shellscript,content +4062,4485949,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/coinrun_dynamics_base.sh",384,0,"",shellscript,selection_keyboard +4063,4486154,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/coinrun_dynamics_base.sh",384,0,"_",shellscript,content +4064,4486155,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/coinrun_dynamics_base.sh",385,0,"",shellscript,selection_keyboard +4065,4486608,"TERMINAL",0,0,"9",,terminal_output +4066,4486894,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/coinrun_dynamics_base.sh",385,0,"e",shellscript,content +4067,4486895,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/coinrun_dynamics_base.sh",386,0,"",shellscript,selection_keyboard +4068,4487359,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/coinrun_dynamics_base.sh",385,1,"",shellscript,content +4069,4487699,"TERMINAL",0,0,"10",,terminal_output +4070,4487734,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/coinrun_dynamics_base.sh",385,0,"t",shellscript,content +4071,4487735,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/coinrun_dynamics_base.sh",386,0,"",shellscript,selection_keyboard +4072,4487784,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/coinrun_dynamics_base.sh",386,0,"e",shellscript,content +4073,4487785,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/coinrun_dynamics_base.sh",387,0,"",shellscript,selection_keyboard +4074,4487918,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/coinrun_dynamics_base.sh",387,0,"s",shellscript,content +4075,4487919,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/coinrun_dynamics_base.sh",388,0,"",shellscript,selection_keyboard +4076,4487935,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/coinrun_dynamics_base.sh",388,0,"t",shellscript,content +4077,4487936,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/coinrun_dynamics_base.sh",389,0,"",shellscript,selection_keyboard +4078,4488680,"TERMINAL",0,0,"1",,terminal_output +4079,4489936,"TERMINAL",0,0,"2",,terminal_output +4080,4490772,"TERMINAL",0,0,"3",,terminal_output +4081,4491816,"TERMINAL",0,0,"4",,terminal_output +4082,4493156,"TERMINAL",0,0,"5",,terminal_output +4083,4493939,"TERMINAL",0,0,"6",,terminal_output +4084,4494975,"TERMINAL",0,0,"7",,terminal_output +4085,4496174,"TERMINAL",0,0,"8",,terminal_output +4086,4497257,"TERMINAL",0,0,"9",,terminal_output +4087,4498200,"TERMINAL",0,0,"20",,terminal_output +4088,4499340,"TERMINAL",0,0,"1",,terminal_output +4089,4500219,"TERMINAL",0,0,"2",,terminal_output +4090,4501063,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/coinrun_dynamics_base_speed_ablation.sh",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=1\n#SBATCH --time=24:00:00\n#SBATCH --cpus-per-task=8\n#SBATCH --gres=gpu:1\n#SBATCH --output=/fast/project/HFMI_SynergyUnit/jafar_ws/logs/franz/coinrun/dynamics/%x_%j.log\n#SBATCH --error=/fast/project/HFMI_SynergyUnit/jafar_ws/logs/franz/coinrun/dynamics/%x_%j.log\n#SBATCH --job-name=dynamics_coinrun_mila_submission_speed_test\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nsource .venv/bin/activate\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\ntags=""coinrun dynamics 500m_dataset mila_submission""\n\narray_records_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/data/coinrun/array_records_500m_seed_w_increment""\ntokenizer_ckpt_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/tokenizer/tokenizer_coinrun_mila_submission_29736/""\nCHECKPOINT_DIR=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/dynamics/${job_name}/${slurm_job_id}""\nmkdir -p $CHECKPOINT_DIR\n\nenv | grep SLURM\n\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --name=""${job_name}_${slurm_job_id}"" \\n --tags ${tags} \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=""${tokenizer_ckpt_dir}"" \\n --val_data_dir=""${array_records_dir}/val"" \\n --data_dir=""${array_records_dir}/train"" &\n\nchild_pid=$!\n\nwait $child_pid\n\n",shellscript,tab +4091,4501334,"TERMINAL",0,0,"3",,terminal_output +4092,4502353,"TERMINAL",0,0,"4",,terminal_output +4093,4503381,"TERMINAL",0,0,"5",,terminal_output +4094,4504400,"TERMINAL",0,0,"7",,terminal_output +4095,4505802,"TERMINAL",0,0,"8",,terminal_output +4096,4506515,"TERMINAL",0,0,"9",,terminal_output +4097,4507520,"TERMINAL",0,0,"30",,terminal_output +4098,4508569,"TERMINAL",0,0,"1",,terminal_output +4099,4509683,"TERMINAL",0,0,"2",,terminal_output +4100,4510663,"TERMINAL",0,0,"3",,terminal_output +4101,4511719,"TERMINAL",0,0,"4",,terminal_output +4102,4512817,"TERMINAL",0,0,"5",,terminal_output +4103,4513763,"TERMINAL",0,0,"6",,terminal_output +4104,4514868,"TERMINAL",0,0,"7",,terminal_output +4105,4515844,"TERMINAL",0,0,"8",,terminal_output +4106,4516891,"TERMINAL",0,0,"9",,terminal_output +4107,4517972,"TERMINAL",0,0,"40",,terminal_output +4108,4518990,"TERMINAL",0,0,"1",,terminal_output +4109,4520048,"TERMINAL",0,0,"2",,terminal_output +4110,4521109,"TERMINAL",0,0,"3",,terminal_output +4111,4522135,"TERMINAL",0,0,"4",,terminal_output +4112,4523178,"TERMINAL",0,0,"5",,terminal_output +4113,4524351,"TERMINAL",0,0,"6",,terminal_output +4114,4525349,"TERMINAL",0,0,"7",,terminal_output +4115,4526312,"TERMINAL",0,0,"8",,terminal_output +4116,4527386,"TERMINAL",0,0,"9",,terminal_output +4117,4528633,"TERMINAL",0,0,"51",,terminal_output +4118,4529462,"TERMINAL",0,0,"2",,terminal_output +4119,4530539,"TERMINAL",0,0,"3",,terminal_output +4120,4531570,"TERMINAL",0,0,"4",,terminal_output +4121,4532766,"TERMINAL",0,0,"5",,terminal_output +4122,4533632,"TERMINAL",0,0,"6",,terminal_output +4123,4534754,"TERMINAL",0,0,"7",,terminal_output +4124,4535715,"TERMINAL",0,0,"8",,terminal_output +4125,4536772,"TERMINAL",0,0,"9",,terminal_output +4126,4537840,"TERMINAL",0,0,"5:00",,terminal_output +4127,4538994,"TERMINAL",0,0,"1",,terminal_output +4128,4539708,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/coinrun_dynamics_base_speed_ablation.sh",1221,0,"",shellscript,selection_mouse +4129,4539902,"TERMINAL",0,0,"2",,terminal_output +4130,4540770,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/coinrun_dynamics_base_speed_ablation.sh",1221,0," ",shellscript,content +4131,4540772,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/coinrun_dynamics_base_speed_ablation.sh",1222,0,"",shellscript,selection_keyboard +4132,4540975,"TERMINAL",0,0,"3",,terminal_output +4133,4541992,"TERMINAL",0,0,"4",,terminal_output +4134,4542814,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/coinrun_dynamics_base_speed_ablation.sh",1222,0,"s",shellscript,content +4135,4542815,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/coinrun_dynamics_base_speed_ablation.sh",1223,0,"",shellscript,selection_keyboard +4136,4542931,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/coinrun_dynamics_base_speed_ablation.sh",1223,0,"p",shellscript,content +4137,4542931,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/coinrun_dynamics_base_speed_ablation.sh",1224,0,"",shellscript,selection_keyboard +4138,4543078,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/coinrun_dynamics_base_speed_ablation.sh",1224,0,"e",shellscript,content +4139,4543079,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/coinrun_dynamics_base_speed_ablation.sh",1225,0,"",shellscript,selection_keyboard +4140,4543187,"TERMINAL",0,0,"5",,terminal_output +4141,4543316,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/coinrun_dynamics_base_speed_ablation.sh",1225,0,"e",shellscript,content +4142,4543317,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/coinrun_dynamics_base_speed_ablation.sh",1226,0,"",shellscript,selection_keyboard +4143,4543445,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/coinrun_dynamics_base_speed_ablation.sh",1226,0,"d",shellscript,content +4144,4543446,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/coinrun_dynamics_base_speed_ablation.sh",1227,0,"",shellscript,selection_keyboard +4145,4543995,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/coinrun_dynamics_base_speed_ablation.sh",1227,0,"_",shellscript,content +4146,4543996,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/coinrun_dynamics_base_speed_ablation.sh",1228,0,"",shellscript,selection_keyboard +4147,4544096,"TERMINAL",0,0,"6",,terminal_output +4148,4544879,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/coinrun_dynamics_base_speed_ablation.sh",1227,1,"",shellscript,content +4149,4545212,"TERMINAL",0,0,"7",,terminal_output +4150,4545313,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/coinrun_dynamics_base_speed_ablation.sh",1227,0,"_",shellscript,content +4151,4545315,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/coinrun_dynamics_base_speed_ablation.sh",1228,0,"",shellscript,selection_keyboard +4152,4545769,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/coinrun_dynamics_base_speed_ablation.sh",1228,0,"t",shellscript,content +4153,4545770,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/coinrun_dynamics_base_speed_ablation.sh",1229,0,"",shellscript,selection_keyboard +4154,4545881,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/coinrun_dynamics_base_speed_ablation.sh",1229,0,"e",shellscript,content +4155,4545882,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/coinrun_dynamics_base_speed_ablation.sh",1230,0,"",shellscript,selection_keyboard +4156,4546046,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/coinrun_dynamics_base_speed_ablation.sh",1230,0,"s",shellscript,content +4157,4546047,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/coinrun_dynamics_base_speed_ablation.sh",1231,0,"",shellscript,selection_keyboard +4158,4546135,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/coinrun_dynamics_base_speed_ablation.sh",1231,0,"t",shellscript,content +4159,4546136,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/coinrun_dynamics_base_speed_ablation.sh",1232,0,"",shellscript,selection_keyboard +4160,4546243,"TERMINAL",0,0,"8",,terminal_output +4161,4547297,"TERMINAL",0,0,"9",,terminal_output +4162,4548366,"TERMINAL",0,0,"10",,terminal_output +4163,4549382,"TERMINAL",0,0,"1",,terminal_output +4164,4550368,"TERMINAL",0,0,"2",,terminal_output +4165,4551526,"TERMINAL",0,0,"4",,terminal_output +4166,4552560,"TERMINAL",0,0,"5",,terminal_output +4167,4553502,"TERMINAL",0,0,"6",,terminal_output +4168,4554553,"TERMINAL",0,0,"7",,terminal_output +4169,4555604,"TERMINAL",0,0,"8",,terminal_output +4170,4556644,"TERMINAL",0,0,"9",,terminal_output +4171,4557709,"TERMINAL",0,0,"20",,terminal_output +4172,4558735,"TERMINAL",0,0,"1",,terminal_output +4173,4559797,"TERMINAL",0,0,"2",,terminal_output +4174,4560840,"TERMINAL",0,0,"3",,terminal_output +4175,4561901,"TERMINAL",0,0,"4",,terminal_output +4176,4562979,"TERMINAL",0,0,"5",,terminal_output +4177,4564055,"TERMINAL",0,0,"6",,terminal_output +4178,4565147,"TERMINAL",0,0,"7",,terminal_output +4179,4566072,"TERMINAL",0,0,"8",,terminal_output +4180,4567201,"TERMINAL",0,0,"9",,terminal_output +4181,4568146,"TERMINAL",0,0,"30",,terminal_output +4182,4569199,"TERMINAL",0,0,"1",,terminal_output +4183,4570318,"TERMINAL",0,0,"2",,terminal_output +4184,4571285,"TERMINAL",0,0,"3",,terminal_output +4185,4572337,"TERMINAL",0,0,"4",,terminal_output +4186,4573411,"TERMINAL",0,0,"5",,terminal_output +4187,4574474,"TERMINAL",0,0,"7",,terminal_output +4188,4575492,"TERMINAL",0,0,"8",,terminal_output +4189,4576617,"TERMINAL",0,0,"9",,terminal_output +4190,4577635,"TERMINAL",0,0,"40",,terminal_output +4191,4578770,"TERMINAL",0,0,"1",,terminal_output +4192,4580104,"TERMINAL",0,0,"2",,terminal_output +4193,4580795,"TERMINAL",0,0,"3",,terminal_output +4194,4581849,"TERMINAL",0,0,"4",,terminal_output +4195,4582872,"TERMINAL",0,0,"5",,terminal_output +4196,4584096,"TERMINAL",0,0,"6",,terminal_output +4197,4584895,"TERMINAL",0,0,"7",,terminal_output +4198,4585930,"TERMINAL",0,0,"8",,terminal_output +4199,4586973,"TERMINAL",0,0,"9",,terminal_output +4200,4588027,"TERMINAL",0,0,"50",,terminal_output +4201,4589075,"TERMINAL",0,0,"1",,terminal_output +4202,4590142,"TERMINAL",0,0,"2",,terminal_output +4203,4591216,"TERMINAL",0,0,"3",,terminal_output +4204,4592228,"TERMINAL",0,0,"4",,terminal_output +4205,4593309,"TERMINAL",0,0,"5",,terminal_output +4206,4594430,"TERMINAL",0,0,"6",,terminal_output +4207,4595385,"TERMINAL",0,0,"7",,terminal_output +4208,4596395,"TERMINAL",0,0,"9",,terminal_output +4209,4597445,"TERMINAL",0,0,"6:00",,terminal_output +4210,4598494,"TERMINAL",0,0,"1",,terminal_output +4211,4599531,"TERMINAL",0,0,"2",,terminal_output +4212,4600587,"TERMINAL",0,0,"3",,terminal_output +4213,4601626,"TERMINAL",0,0,"4",,terminal_output +4214,4602688,"TERMINAL",0,0,"5",,terminal_output +4215,4603783,"TERMINAL",0,0,"6",,terminal_output +4216,4604875,"TERMINAL",0,0,"7",,terminal_output +4217,4606080,"TERMINAL",0,0,"8",,terminal_output +4218,4606853,"TERMINAL",0,0,"9",,terminal_output +4219,4607949,"TERMINAL",0,0,"10",,terminal_output +4220,4608969,"TERMINAL",0,0,"1",,terminal_output +4221,4610003,"TERMINAL",0,0,"2",,terminal_output +4222,4611046,"TERMINAL",0,0,"3",,terminal_output +4223,4612195,"TERMINAL",0,0,"4",,terminal_output +4224,4613171,"TERMINAL",0,0,"5",,terminal_output +4225,4614227,"TERMINAL",0,0,"6",,terminal_output +4226,4614974,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=1\n#SBATCH --time=24:00:00\n#SBATCH --cpus-per-task=8\n#SBATCH --gres=gpu:1\n#SBATCH --output=/fast/project/HFMI_SynergyUnit/jafar_ws/logs/franz/coinrun/dynamics/%x_%j.log\n#SBATCH --error=/fast/project/HFMI_SynergyUnit/jafar_ws/logs/franz/coinrun/dynamics/%x_%j.log\n#SBATCH --job-name=dynamics_coinrun_mila_submission_speed_test\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nsource .venv/bin/activate\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\ntags=""coinrun dynamics 500m_dataset mila_submission speed_test""\n\narray_records_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/data/coinrun/array_records_500m_seed_w_increment""\ntokenizer_ckpt_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/tokenizer/tokenizer_coinrun_mila_submission_29736/""\nCHECKPOINT_DIR=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/dynamics/${job_name}/${slurm_job_id}""\nmkdir -p $CHECKPOINT_DIR\n\nenv | grep SLURM\n\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --name=""${job_name}_${slurm_job_id}"" \\n --tags ${tags} \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=""${tokenizer_ckpt_dir}"" \\n --val_data_dir=""${array_records_dir}/val"" \\n --data_dir=""${array_records_dir}/train"" &\n\nchild_pid=$!\n\nwait $child_pid\n\n",shellscript,tab +4227,4615306,"TERMINAL",0,0,"7",,terminal_output +4228,4616574,"TERMINAL",0,0,"8",,terminal_output +4229,4617304,"TERMINAL",0,0,"9",,terminal_output +4230,4618535,"TERMINAL",0,0,"20",,terminal_output +4231,4619405,"TERMINAL",0,0,"1",,terminal_output +4232,4620447,"TERMINAL",0,0,"3",,terminal_output +4233,4620698,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",1232,0," ",shellscript,content +4234,4620700,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",1233,0,"",shellscript,selection_keyboard +4235,4620856,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",1233,0,"b",shellscript,content +4236,4620857,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",1234,0,"",shellscript,selection_keyboard +4237,4620956,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",1234,0,"a",shellscript,content +4238,4620957,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",1235,0,"",shellscript,selection_keyboard +4239,4621060,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",1235,0,"t",shellscript,content +4240,4621061,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",1236,0,"",shellscript,selection_keyboard +4241,4621178,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",1236,0,"c",shellscript,content +4242,4621179,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",1237,0,"",shellscript,selection_keyboard +4243,4621259,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",1237,0,"h",shellscript,content +4244,4621260,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",1238,0,"",shellscript,selection_keyboard +4245,4621441,"TERMINAL",0,0,"4",,terminal_output +4246,4621661,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",1238,0,"_",shellscript,content +4247,4621662,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",1239,0,"",shellscript,selection_keyboard +4248,4621983,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",1239,0,"s",shellscript,content +4249,4621984,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",1240,0,"",shellscript,selection_keyboard +4250,4622058,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",1240,0,"i",shellscript,content +4251,4622058,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",1241,0,"",shellscript,selection_keyboard +4252,4622214,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",1241,0,"z",shellscript,content +4253,4622215,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",1242,0,"",shellscript,selection_keyboard +4254,4622325,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",1242,0,"e",shellscript,content +4255,4622326,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",1243,0,"",shellscript,selection_keyboard +4256,4622522,"TERMINAL",0,0,"5",,terminal_output +4257,4622697,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",1243,0,"_",shellscript,content +4258,4622698,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",1244,0,"",shellscript,selection_keyboard +4259,4623564,"TERMINAL",0,0,"6",,terminal_output +4260,4623776,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",1244,0,"3",shellscript,content +4261,4623777,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",1245,0,"",shellscript,selection_keyboard +4262,4624165,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",1245,0,"6",shellscript,content +4263,4624166,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",1246,0,"",shellscript,selection_keyboard +4264,4624566,"TERMINAL",0,0,"7",,terminal_output +4265,4625649,"TERMINAL",0,0,"8",,terminal_output +4266,4626341,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",389,0,"",shellscript,selection_mouse +4267,4626724,"TERMINAL",0,0,"9",,terminal_output +4268,4627078,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",389,0,"_",shellscript,content +4269,4627079,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",390,0,"",shellscript,selection_keyboard +4270,4627436,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",390,0,"b",shellscript,content +4271,4627437,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",391,0,"",shellscript,selection_keyboard +4272,4627505,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",391,0,"a",shellscript,content +4273,4627506,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",392,0,"",shellscript,selection_keyboard +4274,4627592,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",392,0,"t",shellscript,content +4275,4627593,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",393,0,"",shellscript,selection_keyboard +4276,4627701,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",393,0,"c",shellscript,content +4277,4627702,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",394,0,"",shellscript,selection_keyboard +4278,4627732,"TERMINAL",0,0,"30",,terminal_output +4279,4627809,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",394,0,"h",shellscript,content +4280,4627810,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",395,0,"",shellscript,selection_keyboard +4281,4628580,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",395,0,"_",shellscript,content +4282,4628581,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",396,0,"",shellscript,selection_keyboard +4283,4628753,"TERMINAL",0,0,"1",,terminal_output +4284,4629195,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",396,0,"s",shellscript,content +4285,4629196,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",397,0,"",shellscript,selection_keyboard +4286,4629262,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",397,0,"i",shellscript,content +4287,4629263,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",398,0,"",shellscript,selection_keyboard +4288,4629504,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",398,0,"z",shellscript,content +4289,4629505,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",399,0,"",shellscript,selection_keyboard +4290,4629686,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",399,0,"e",shellscript,content +4291,4629687,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",400,0,"",shellscript,selection_keyboard +4292,4629811,"TERMINAL",0,0,"2",,terminal_output +4293,4630550,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",400,0,"3",shellscript,content +4294,4630551,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",401,0,"",shellscript,selection_keyboard +4295,4630587,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",401,0,"6",shellscript,content +4296,4630588,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",402,0,"",shellscript,selection_keyboard +4297,4630884,"TERMINAL",0,0,"3",,terminal_output +4298,4631262,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",401,0,"",shellscript,selection_command +4299,4631432,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",400,0,"",shellscript,selection_command +4300,4631738,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",400,0,"_",shellscript,content +4301,4631739,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",401,0,"",shellscript,selection_keyboard +4302,4631862,"TERMINAL",0,0,"4",,terminal_output +4303,4632912,"TERMINAL",0,0,"5",,terminal_output +4304,4634070,"TERMINAL",0,0,"6",,terminal_output +4305,4635246,"TERMINAL",0,0,"7",,terminal_output +4306,4636213,"TERMINAL",0,0,"8",,terminal_output +4307,4636839,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",1899,0,"",shellscript,selection_mouse +4308,4637340,"TERMINAL",0,0,"9",,terminal_output +4309,4637423,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",1921,0,"",shellscript,selection_mouse +4310,4638082,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",1873,0,"",shellscript,selection_mouse +4311,4638239,"TERMINAL",0,0,"40",,terminal_output +4312,4638775,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",1970,0,"",shellscript,selection_mouse +4313,4639291,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",1899,0,"",shellscript,selection_mouse +4314,4639325,"TERMINAL",0,0,"1",,terminal_output +4315,4640419,"TERMINAL",0,0,"2",,terminal_output +4316,4641440,"TERMINAL",0,0,"3",,terminal_output +4317,4642407,"TERMINAL",0,0,"5",,terminal_output +4318,4643460,"TERMINAL",0,0,"6",,terminal_output +4319,4644486,"TERMINAL",0,0,"7",,terminal_output +4320,4645522,"TERMINAL",0,0,"8",,terminal_output +4321,4646568,"TERMINAL",0,0,"9",,terminal_output +4322,4647615,"TERMINAL",0,0,"50",,terminal_output +4323,4648804,"TERMINAL",0,0,"1",,terminal_output +4324,4650034,"TERMINAL",0,0,"2",,terminal_output +4325,4650737,"TERMINAL",0,0,"3",,terminal_output +4326,4651781,"TERMINAL",0,0,"4",,terminal_output +4327,4652823,"TERMINAL",0,0,"5",,terminal_output +4328,4653864,"TERMINAL",0,0,"6",,terminal_output +4329,4654901,"TERMINAL",0,0,"7",,terminal_output +4330,4655970,"TERMINAL",0,0,"8",,terminal_output +4331,4657060,"TERMINAL",0,0,"9",,terminal_output +4332,4658142,"TERMINAL",0,0,"7:00",,terminal_output +4333,4659069,"TERMINAL",0,0,"1",,terminal_output +4334,4660309,"TERMINAL",0,0,"2",,terminal_output +4335,4661303,"TERMINAL",0,0,"3",,terminal_output +4336,4662178,"TERMINAL",0,0,"4",,terminal_output +4337,4663212,"TERMINAL",0,0,"5",,terminal_output +4338,4664255,"TERMINAL",0,0,"6",,terminal_output +4339,4665312,"TERMINAL",0,0,"7",,terminal_output +4340,4666349,"TERMINAL",0,0,"8",,terminal_output +4341,4667481,"TERMINAL",0,0,"10",,terminal_output +4342,4668424,"TERMINAL",0,0,"1",,terminal_output +4343,4669482,"TERMINAL",0,0,"2",,terminal_output +4344,4670608,"TERMINAL",0,0,"3",,terminal_output +4345,4671551,"TERMINAL",0,0,"4",,terminal_output +4346,4672952,"TERMINAL",0,0,"5",,terminal_output +4347,4673637,"TERMINAL",0,0,"6",,terminal_output +4348,4674686,"TERMINAL",0,0,"7",,terminal_output +4349,4675721,"TERMINAL",0,0,"8",,terminal_output +4350,4676755,"TERMINAL",0,0,"9",,terminal_output +4351,4677944,"TERMINAL",0,0,"20",,terminal_output +4352,4678991,"TERMINAL",0,0,"1",,terminal_output +4353,4680108,"TERMINAL",0,0,"2",,terminal_output +4354,4680942,"TERMINAL",0,0,"3",,terminal_output +4355,4682001,"TERMINAL",0,0,"4",,terminal_output +4356,4683018,"TERMINAL",0,0,"5",,terminal_output +4357,4684144,"TERMINAL",0,0,"6",,terminal_output +4358,4685067,"TERMINAL",0,0,"7",,terminal_output +4359,4686112,"TERMINAL",0,0,"8",,terminal_output +4360,4687149,"TERMINAL",0,0,"9",,terminal_output +4361,4688187,"TERMINAL",0,0,"30",,terminal_output +4362,4689253,"TERMINAL",0,0,"1",,terminal_output +4363,4690348,"TERMINAL",0,0,"2",,terminal_output +4364,4691418,"TERMINAL",0,0,"3",,terminal_output +4365,4692436,"TERMINAL",0,0,"4",,terminal_output +4366,4693443,"TERMINAL",0,0,"6",,terminal_output +4367,4694429,"TERMINAL",0,0,"7",,terminal_output +4368,4695460,"TERMINAL",0,0,"8",,terminal_output +4369,4696505,"TERMINAL",0,0,"9",,terminal_output +4370,4697546,"TERMINAL",0,0,"40",,terminal_output +4371,4698593,"TERMINAL",0,0,"1",,terminal_output +4372,4699647,"TERMINAL",0,0,"2",,terminal_output +4373,4700661,"TERMINAL",0,0,"3",,terminal_output +4374,4701786,"TERMINAL",0,0,"4",,terminal_output +4375,4702796,"TERMINAL",0,0,"5",,terminal_output +4376,4703904,"TERMINAL",0,0,"6",,terminal_output +4377,4704878,"TERMINAL",0,0,"7",,terminal_output +4378,4705952,"TERMINAL",0,0,"8",,terminal_output +4379,4706972,"TERMINAL",0,0,"9",,terminal_output +4380,4708096,"TERMINAL",0,0,"50",,terminal_output +4381,4709221,"TERMINAL",0,0,"1",,terminal_output +4382,4710116,"TERMINAL",0,0,"2",,terminal_output +4383,4711162,"TERMINAL",0,0,"3",,terminal_output +4384,4712200,"TERMINAL",0,0,"4",,terminal_output +4385,4713254,"TERMINAL",0,0,"5",,terminal_output +4386,4714292,"TERMINAL",0,0,"6",,terminal_output +4387,4715376,"TERMINAL",0,0,"7",,terminal_output +4388,4716503,"TERMINAL",0,0,"9",,terminal_output +4389,4717447,"TERMINAL",0,0,"8:00",,terminal_output +4390,4718495,"TERMINAL",0,0,"1",,terminal_output +4391,4719528,"TERMINAL",0,0,"2",,terminal_output +4392,4720588,"TERMINAL",0,0,"3",,terminal_output +4393,4721623,"TERMINAL",0,0,"4",,terminal_output +4394,4722666,"TERMINAL",0,0,"5",,terminal_output +4395,4723843,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",1971,0,"",shellscript,selection_mouse +4396,4723871,"TERMINAL",0,0,"6",,terminal_output +4397,4724441,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",2022,0,"",shellscript,selection_mouse +4398,4724770,"TERMINAL",0,0,"7",,terminal_output +4399,4725166,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",2069,0,"",shellscript,selection_mouse +4400,4725813,"TERMINAL",0,0,"8",,terminal_output +4401,4725927,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",1809,0,"",shellscript,selection_mouse +4402,4726938,"TERMINAL",0,0,"9",,terminal_output +4403,4727971,"TERMINAL",0,0,"10",,terminal_output +4404,4728968,"TERMINAL",0,0,"1",,terminal_output +4405,4729992,"TERMINAL",0,0,"2",,terminal_output +4406,4731056,"TERMINAL",0,0,"3",,terminal_output +4407,4732090,"TERMINAL",0,0,"4",,terminal_output +4408,4733124,"TERMINAL",0,0,"5",,terminal_output +4409,4734352,"TERMINAL",0,0,"6",,terminal_output +4410,4735045,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn1991:~/Projects/jasmine",,terminal_output +4411,4736261,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",0,0,"",shellscript,tab +4412,4744387,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default.sh",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=1\n#SBATCH --time=02:00:00\n#SBATCH --partition=accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:1\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/dynamics/maskgit/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/dynamics/maskgit/%x_%j.log\n#SBATCH --job-name=train_dynamics_maskgit_default_bs36\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/train\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_tokenizer_default/3528955\n\nenv | grep SLURM\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=36 \\n --patch_size=16 \\n --warmup_steps=0 \\n --wsd_decay_steps=0 \\n --num_steps=1000 \\n --log_image_interval=10000 \\n --log \\n --log_checkpoint_interval=10000 \\n --name=coinrun-dynamics-maskgit-default-bs36$slurm_job_id \\n --tags coinrun dynamics maskgit default bs36 \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --data_dir $array_records_dir_train &\n\nchild_pid=$!\n\nwait $child_pid",shellscript,tab +4413,4746840,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default.sh",0,0,"",shellscript,tab +4414,4751888,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",0,0,"",shellscript,tab +4415,4751889,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",1734,0,"",shellscript,selection_mouse +4416,4753192,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default.sh",0,0,"",shellscript,tab +4417,4753193,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default.sh",1960,0,"",shellscript,selection_mouse +4418,4753193,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default.sh",1938,22,"ize=16 \\n --warmup_",shellscript,selection_mouse +4419,4753613,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default.sh",1920,0,"",shellscript,selection_mouse +4420,4755537,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default.sh",1958,0,"",shellscript,selection_mouse +4421,4755973,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default.sh",1982,0,"",shellscript,selection_mouse +4422,4756386,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default.sh",2008,0,"",shellscript,selection_mouse +4423,4761942,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default.sh",2006,0,"",shellscript,selection_mouse +4424,4763504,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default.sh",1957,0,"",shellscript,selection_mouse +4425,4764728,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default.sh",1947,22," --warmup_steps=0 \",shellscript,selection_command +4426,4764986,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default.sh",1947,48," --warmup_steps=0 \\n --wsd_decay_steps=0 \",shellscript,selection_command +4427,4765146,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default.sh",1947,71," --warmup_steps=0 \\n --wsd_decay_steps=0 \\n --num_steps=1000 \",shellscript,selection_command +4428,4765813,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default.sh",1947,0,"",shellscript,selection_command +4429,4767800,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",0,0,"",shellscript,tab +4430,4767800,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",1909,0,"",shellscript,selection_mouse +4431,4768713,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",1908,0,"",shellscript,selection_command +4432,4769700,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",1921,0,"\n --warmup_steps=0 \\n --wsd_decay_steps=0 \\n --num_steps=1000 \",shellscript,content +4433,4769732,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",1926,0,"",shellscript,selection_command +4434,4821792,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",1993,0,"",shellscript,selection_mouse +4435,4821809,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",1992,0,"",shellscript,selection_command +4436,4824034,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",2108,0,"",shellscript,selection_mouse +4437,4824738,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",2062,0,"",shellscript,selection_mouse +4438,4826364,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",2047,48,"",shellscript,content +4439,4826430,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",2051,0,"",shellscript,selection_command +4440,4831735,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",1998,0,"",shellscript,selection_command +4441,4831909,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",1975,0,"",shellscript,selection_command +4442,4832068,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",1998,0,"",shellscript,selection_command +4443,4832269,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",2051,0,"",shellscript,selection_command +4444,4832420,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",1998,0,"",shellscript,selection_command +4445,4832717,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",1975,0,"",shellscript,selection_command +4446,4832969,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",1998,0,"",shellscript,selection_command +4447,4833192,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",1975,0,"",shellscript,selection_command +4448,4836466,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default.sh",0,0,"",shellscript,tab +4449,4836466,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default.sh",2041,0,"",shellscript,selection_mouse +4450,4838670,"jasmine/train_dynamics.py",0,0,"import os\n\n\nos.environ.setdefault(""XLA_PYTHON_CLIENT_MEM_FRACTION"", ""0.98"")\n\nfrom dataclasses import dataclass, field\nimport itertools\nfrom typing import cast, Optional\n\nimport einops\nfrom jax.sharding import Mesh, PartitionSpec, NamedSharding\nfrom jax.experimental.mesh_utils import create_device_mesh\nimport optax\nimport orbax.checkpoint as ocp\nimport numpy as np\nimport dm_pix as pix\nimport jax\nimport jax.numpy as jnp\nimport tyro\nimport wandb\nimport grain\nimport flax.nnx as nnx\n\nfrom genie import Genie, restore_genie_components\nfrom utils.dataloader import get_dataloader\nfrom utils.train_utils import (\n get_lr_schedule,\n count_parameters_by_component,\n print_mem_stats,\n print_compiled_memory_stats,\n print_compiled_cost_analysis,\n)\n\n\n@dataclass\nclass Args:\n # Experiment\n num_steps: int = 200_000\n seed: int = 0\n seq_len: int = 16\n image_channels: int = 3\n image_height: int = 64\n image_width: int = 64\n data_dir: str = """"\n save_ckpt: bool = False\n restore_ckpt: bool = False\n # Optimization\n batch_size: int = 36\n init_lr: float = 0.0\n max_lr: float = 3e-5\n decay_end: float = 0.0\n wsd_decay_steps: int = (\n 20_000 # NOTE: wsd_decay_steps will only be used when using a wsd-schedule\n )\n warmup_steps: int = 5000\n lr_schedule: str = ""wsd"" # supported options: wsd, cos\n # Tokenizer\n tokenizer_dim: int = 512\n tokenizer_ffn_dim: int = 2048\n latent_patch_dim: int = 32\n num_patch_latents: int = 1024\n patch_size: int = 16\n tokenizer_num_blocks: int = 4\n tokenizer_num_heads: int = 8\n tokenizer_checkpoint: str = """"\n # LAM\n lam_dim: int = 512\n lam_ffn_dim: int = 2048\n latent_action_dim: int = 32\n num_actions: int = 6\n lam_patch_size: int = 16\n lam_num_blocks: int = 4\n lam_num_heads: int = 8\n lam_checkpoint: str = """"\n # Dynamics\n dyna_type: str = ""maskgit"" # supported options: maskgit, causal\n dyna_dim: int = 512\n dyna_ffn_dim: int = 2048\n dyna_num_blocks: int = 6\n dyna_num_heads: int = 8\n dropout: float = 0.0\n mask_limit: float = 0.5\n z_loss_weight: float = 0.0\n param_dtype = jnp.float32\n dtype = jnp.float32\n use_flash_attention: bool = True\n use_gt_actions: bool = False\n # Logging\n log: bool = True\n entity: str = """"\n project: str = """"\n name: str = ""train_dynamics""\n tags: list[str] = field(default_factory=lambda: [""dynamics""])\n log_interval: int = 50\n log_image_interval: int = 1000\n ckpt_dir: str = """"\n log_checkpoint_interval: int = 5000\n log_checkpoint_keep_period: int = 20_000\n log_gradients: bool = False\n val_data_dir: str = """"\n val_interval: int = 20_000\n val_steps: int = 50\n eval_full_frame: bool = True\n val_maskgit_steps: int = 25\n val_temperature: float = 1\n val_sample_argmax: bool = False\n wandb_id: str = """"\n\n\ndef build_model(args: Args, rng: jax.Array) -> tuple[Genie, jax.Array]:\n rng, _rng = jax.random.split(rng)\n rngs = nnx.Rngs(_rng)\n genie = Genie(\n # Tokenizer\n in_dim=args.image_channels,\n tokenizer_dim=args.tokenizer_dim,\n tokenizer_ffn_dim=args.tokenizer_ffn_dim,\n latent_patch_dim=args.latent_patch_dim,\n num_patch_latents=args.num_patch_latents,\n patch_size=args.patch_size,\n tokenizer_num_blocks=args.tokenizer_num_blocks,\n tokenizer_num_heads=args.tokenizer_num_heads,\n # LAM\n lam_dim=args.lam_dim,\n lam_ffn_dim=args.lam_ffn_dim,\n latent_action_dim=args.latent_action_dim,\n num_actions=args.num_actions,\n lam_patch_size=args.lam_patch_size,\n lam_num_blocks=args.lam_num_blocks,\n lam_num_heads=args.lam_num_heads,\n lam_co_train=not args.lam_checkpoint,\n use_gt_actions=args.use_gt_actions,\n # Dynamics\n dyna_type=args.dyna_type,\n dyna_dim=args.dyna_dim,\n dyna_ffn_dim=args.dyna_ffn_dim,\n dyna_num_blocks=args.dyna_num_blocks,\n dyna_num_heads=args.dyna_num_heads,\n dropout=args.dropout,\n mask_limit=args.mask_limit,\n param_dtype=args.param_dtype,\n dtype=args.dtype,\n use_flash_attention=args.use_flash_attention,\n decode=False,\n rngs=rngs,\n )\n if args.use_gt_actions:\n assert (\n not args.lam_checkpoint\n ), ""Cannot use LAM when using ground-truth actions.""\n else:\n assert genie.lam is not None\n del genie.lam.decoder\n return genie, rng\n\n\ndef build_optimizer(genie: Genie, args: Args) -> nnx.ModelAndOptimizer:\n lr_schedule = get_lr_schedule(\n args.lr_schedule,\n args.init_lr,\n args.max_lr,\n args.decay_end,\n args.num_steps,\n args.warmup_steps,\n args.wsd_decay_steps,\n )\n tx = optax.adamw(\n learning_rate=lr_schedule,\n b1=0.9,\n b2=0.9,\n weight_decay=1e-4,\n mu_dtype=args.param_dtype, # moments in full precision\n )\n optimizer = nnx.ModelAndOptimizer(genie, tx)\n return optimizer\n\n\ndef build_mesh_and_sharding(\n num_devices: int,\n) -> tuple[Mesh, NamedSharding, NamedSharding, NamedSharding]:\n device_mesh_arr = create_device_mesh((num_devices,))\n mesh = Mesh(devices=device_mesh_arr, axis_names=(""data"",))\n replicated_sharding = NamedSharding(mesh, PartitionSpec())\n videos_sharding = NamedSharding(mesh, PartitionSpec(""data"", None, None, None, None))\n actions_sharding = NamedSharding(mesh, PartitionSpec(""data"", None))\n return mesh, replicated_sharding, videos_sharding, actions_sharding\n\n\ndef shard_optimizer_states(\n optimizer: nnx.ModelAndOptimizer, replicated_sharding: NamedSharding\n) -> None:\n model_state = nnx.state(optimizer.model)\n model_sharded_state = jax.lax.with_sharding_constraint(\n model_state, replicated_sharding\n )\n nnx.update(optimizer.model, model_sharded_state)\n optimizer_state = nnx.state(optimizer, nnx.optimizer.OptState)\n optimizer_sharded_state = jax.lax.with_sharding_constraint(\n optimizer_state, replicated_sharding\n )\n nnx.update(optimizer, optimizer_sharded_state)\n\n\ndef build_dataloader(args: Args, data_dir: str) -> grain.DataLoaderIterator:\n image_shape = (args.image_height, args.image_width, args.image_channels)\n array_record_files = [\n os.path.join(data_dir, x)\n for x in os.listdir(data_dir)\n if x.endswith("".array_record"")\n ]\n grain_dataloader = get_dataloader(\n array_record_files,\n args.seq_len,\n # NOTE: We deliberately pass the global batch size\n # The dataloader shards the dataset across all processes\n args.batch_size,\n *image_shape,\n num_workers=8,\n prefetch_buffer_size=1,\n seed=args.seed,\n )\n initial_state = grain_dataloader._create_initial_state()\n grain_iterator = grain.DataLoaderIterator(grain_dataloader, initial_state)\n return grain_iterator\n\n\ndef build_checkpoint_manager(args: Args) -> Optional[ocp.CheckpointManager]:\n if args.restore_ckpt or args.save_ckpt:\n handler_registry = ocp.handlers.DefaultCheckpointHandlerRegistry()\n handler_registry.add(\n ""model_state"", ocp.args.PyTreeSave, ocp.handlers.PyTreeCheckpointHandler\n )\n handler_registry.add(\n ""model_state"", ocp.args.PyTreeRestore, ocp.handlers.PyTreeCheckpointHandler\n )\n handler_registry.add(\n ""train_dataloader_state"",\n grain.checkpoint.CheckpointSave,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n handler_registry.add(\n ""train_dataloader_state"",\n grain.checkpoint.CheckpointRestore,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n if args.val_data_dir:\n handler_registry.add(\n ""val_dataloader_state"",\n grain.checkpoint.CheckpointSave,\n cast(\n ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler\n ),\n )\n handler_registry.add(\n ""val_dataloader_state"",\n grain.checkpoint.CheckpointRestore,\n cast(\n ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler\n ),\n )\n checkpoint_options = ocp.CheckpointManagerOptions(\n save_interval_steps=args.log_checkpoint_interval,\n max_to_keep=3,\n keep_period=args.log_checkpoint_keep_period,\n step_format_fixed_length=6,\n cleanup_tmp_directories=True,\n )\n checkpoint_manager = ocp.CheckpointManager(\n args.ckpt_dir,\n options=checkpoint_options,\n handler_registry=handler_registry,\n )\n return checkpoint_manager\n else:\n return None\n\n\ndef restore_or_initialize_components(\n args: Args,\n checkpoint_manager: Optional[ocp.CheckpointManager],\n optimizer: nnx.ModelAndOptimizer,\n train_iterator: grain.DataLoaderIterator,\n rng: jax.Array,\n replicated_sharding: NamedSharding,\n val_iterator: Optional[grain.DataLoaderIterator],\n restore_step: Optional[int] = None,\n) -> tuple[\n int,\n nnx.ModelAndOptimizer,\n grain.DataLoaderIterator,\n grain.DataLoaderIterator,\n jax.Array,\n]:\n step = 0\n if checkpoint_manager and restore_step is None:\n restore_step = checkpoint_manager.latest_step()\n if args.restore_ckpt:\n assert checkpoint_manager is not None\n abstract_optimizer = nnx.eval_shape(lambda: optimizer)\n abstract_optimizer_state = nnx.state(abstract_optimizer)\n if val_iterator:\n restore_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore(abstract_optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointRestore(train_iterator), # type: ignore\n val_dataloader_state=grain.checkpoint.CheckpointRestore(val_iterator), # type: ignore\n )\n else:\n restore_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore(abstract_optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointRestore(train_iterator), # type: ignore\n )\n restored = checkpoint_manager.restore(\n checkpoint_manager.latest_step(), args=restore_args\n )\n restored_optimizer_state = restored[""model_state""]\n nnx.update(optimizer, restored_optimizer_state)\n train_iterator = restored[""train_dataloader_state""]\n if val_iterator:\n val_iterator = restored[""val_dataloader_state""]\n step = checkpoint_manager.latest_step() or 0\n print(f""Restored dataloader and model state from step {step}"")\n else:\n # Restore from pre-trained tokenizer (and LAM)\n rng, _rng = jax.random.split(rng)\n optimizer = restore_genie_components(optimizer, replicated_sharding, _rng, args)\n return step, optimizer, train_iterator, val_iterator, rng\n\n\ndef _calculate_top_k_accuracy(\n token_logits_BTNV: jax.Array,\n video_tokens_BTN: jax.Array,\n mask_BTN: jax.Array,\n k: int,\n) -> jax.Array:\n _, topk_indices_BTNK = jax.lax.top_k(token_logits_BTNV, k)\n topk_correct = jnp.any(\n topk_indices_BTNK == video_tokens_BTN[..., jnp.newaxis], axis=-1\n )\n topk_acc = (mask_BTN * topk_correct).sum() / mask_BTN.sum()\n return topk_acc\n\n\ndef _calculate_step_metrics(\n outputs: dict[str, jax.Array],\n gt: jax.Array,\n num_actions: int,\n num_patch_latents: int,\n) -> tuple[jax.Array, dict]:\n mask_BTN = outputs[""mask""]\n outputs[""token_logits""] = outputs[""token_logits""].astype(jnp.float32)\n ce_loss = optax.softmax_cross_entropy_with_integer_labels(\n outputs[""token_logits""], outputs[""video_tokens""]\n )\n ce_loss = (mask_BTN * ce_loss).sum() / mask_BTN.sum()\n z_val = jax.nn.logsumexp(outputs[""token_logits""], axis=-1)\n z_loss_metric = (mask_BTN * (z_val**2)).sum() / mask_BTN.sum()\n\n masked_token_top_1_acc = _calculate_top_k_accuracy(\n outputs[""token_logits""], outputs[""video_tokens""], mask_BTN, 1\n )\n masked_token_top_2_acc = _calculate_top_k_accuracy(\n outputs[""token_logits""], outputs[""video_tokens""], mask_BTN, 2\n )\n masked_token_top_5_acc = _calculate_top_k_accuracy(\n outputs[""token_logits""], outputs[""video_tokens""], mask_BTN, 5\n )\n masked_token_top_16_acc = _calculate_top_k_accuracy(\n outputs[""token_logits""], outputs[""video_tokens""], mask_BTN, 16\n )\n\n select_probs = jax.nn.softmax(outputs[""token_logits""])\n gt_val = gt.clip(0, 1).reshape(-1, *gt.shape[2:])\n recon = outputs[""recon""].clip(0, 1).reshape(-1, *outputs[""recon""].shape[2:])\n psnr = jnp.asarray(pix.psnr(gt_val, recon)).mean()\n ssim = jnp.asarray(pix.ssim(gt_val, recon)).mean()\n _, index_counts_tokenizer = jnp.unique_counts(\n jnp.ravel(outputs[""video_tokens""]),\n size=num_patch_latents,\n fill_value=0,\n )\n codebook_usage_tokenizer = (index_counts_tokenizer != 0).mean()\n metrics = dict(\n cross_entropy_loss=ce_loss,\n masked_token_top1_accuracy=masked_token_top_1_acc,\n masked_token_top2_accuracy=masked_token_top_2_acc,\n masked_token_top5_accuracy=masked_token_top_5_acc,\n masked_token_top16_accuracy=masked_token_top_16_acc,\n select_logit=outputs[""token_logits""].max(-1).mean(),\n select_p=select_probs.max(-1).mean(),\n entropy=jax.scipy.special.entr(select_probs).sum(-1).mean(),\n z_loss=z_loss_metric,\n psnr=psnr,\n ssim=ssim,\n codebook_usage_tokenizer=codebook_usage_tokenizer,\n )\n if ""lam_indices"" in outputs.keys():\n _, index_counts_lam = jnp.unique_counts(\n jnp.ravel(outputs[""lam_indices""]),\n size=num_actions,\n fill_value=0,\n )\n codebook_usage_lam = (index_counts_lam != 0).mean()\n metrics[""codebook_usage_lam""] = codebook_usage_lam\n return ce_loss, metrics\n\n\ndef main(args: Args) -> None:\n jax.distributed.initialize()\n num_devices = jax.device_count()\n if num_devices == 0:\n raise ValueError(""No JAX devices found."")\n print(f""Running on {num_devices} devices."")\n\n if args.batch_size % num_devices != 0:\n raise ValueError(\n f""Global batch size {args.batch_size} must be divisible by ""\n f""number of devices {num_devices}.""\n )\n\n rng = jax.random.key(args.seed)\n\n # --- Initialize model ---\n genie, rng = build_model(args, rng)\n _, params, _ = nnx.split(genie, nnx.Param, ...)\n param_counts = count_parameters_by_component(params)\n\n if args.log and jax.process_index() == 0:\n wandb_init_kwargs = {\n ""entity"": args.entity,\n ""project"": args.project,\n ""name"": args.name,\n ""tags"": args.tags,\n ""group"": ""debug"",\n ""config"": args,\n }\n\n if args.wandb_id:\n wandb_init_kwargs.update(\n {\n ""id"": args.wandb_id,\n ""resume"": ""allow"",\n }\n )\n wandb.init(**wandb_init_kwargs)\n\n wandb.config.update({""model_param_count"": param_counts})\n\n print(""Parameter counts:"")\n print(param_counts)\n\n # --- Initialize optimizer ---\n optimizer = build_optimizer(genie, args)\n del genie\n\n # FIXME: switch to create_hybrid_device_mesh for runs spanning multiple nodes\n _, replicated_sharding, videos_sharding, actions_sharding = build_mesh_and_sharding(\n num_devices\n )\n\n shard_optimizer_states(optimizer, replicated_sharding)\n\n # --- Initialize checkpoint manager ---\n checkpoint_manager = build_checkpoint_manager(args)\n\n # --- Create DataLoaderIterator from dataloader ---\n train_iterator = build_dataloader(args, args.data_dir)\n val_iterator = None\n if args.val_data_dir:\n val_iterator = build_dataloader(args, args.val_data_dir)\n\n # --- Restore checkpoint ---\n step, optimizer, train_iterator, val_iterator, rng = (\n restore_or_initialize_components(\n args,\n checkpoint_manager,\n optimizer,\n train_iterator,\n rng,\n replicated_sharding,\n val_iterator,\n )\n )\n\n # --- Define loss and train step (close over args) ---\n def dynamics_loss_fn(\n model: Genie,\n inputs: dict,\n ) -> tuple[jax.Array, tuple[jax.Array, dict]]:\n gt = jnp.asarray(inputs[""videos""], dtype=jnp.float32) / 255.0\n inputs[""videos""] = gt.astype(args.dtype)\n outputs = model(inputs)\n ce_loss, metrics = _calculate_step_metrics(\n outputs, gt, args.num_actions, args.num_patch_latents\n )\n z_loss = metrics[""z_loss""]\n total_loss = ce_loss + args.z_loss_weight * z_loss\n metrics[""total_loss""] = total_loss\n return total_loss, (outputs[""recon""], metrics)\n\n @nnx.jit(donate_argnums=0)\n def train_step(\n optimizer: nnx.ModelAndOptimizer, inputs: dict\n ) -> tuple[jax.Array, jax.Array, dict]:\n def loss_fn(model: Genie) -> tuple[jax.Array, tuple[jax.Array, dict]]:\n model.train()\n return dynamics_loss_fn(model, inputs)\n\n (loss, (recon, metrics)), grads = nnx.value_and_grad(loss_fn, has_aux=True)(\n optimizer.model\n )\n optimizer.update(grads)\n if args.log_gradients:\n metrics[""gradients_std/""] = jax.tree.map(\n lambda x: x.std(), grads[""params""][""dynamics""]\n )\n return loss, recon, metrics\n\n @nnx.jit\n def val_step(genie: Genie, inputs: dict) -> dict:\n """"""Evaluate model and compute metrics""""""\n genie.eval()\n gt = jnp.asarray(inputs[""videos""], dtype=jnp.float32) / 255.0\n (loss, (recon, metrics)) = dynamics_loss_fn(genie, inputs)\n val_output = {""loss"": loss, ""recon"": recon, ""metrics"": metrics}\n\n # --- Evaluate full frame prediction (sampling) ---\n if args.eval_full_frame:\n inputs[""videos""] = gt.astype(args.dtype)\n tokenizer_outputs = genie.tokenizer.vq_encode(\n inputs[""videos""], training=False\n )\n tokens_full_frame = tokenizer_outputs[""indices""]\n lam_indices_E = None\n if not args.use_gt_actions:\n lam_indices_E = genie.vq_encode(inputs, training=False)\n inputs[""latent_actions""] = lam_indices_E\n inputs[""videos""] = inputs[""videos""][\n :, :-1\n ] # remove last frame for generation\n recon_full_frame, logits_full_frame = genie.sample(\n inputs,\n args.seq_len,\n args.val_temperature,\n args.val_sample_argmax,\n args.val_maskgit_steps,\n )\n # Calculate metrics for the last frame only\n step_outputs = {\n ""recon"": recon_full_frame[:, -1],\n ""token_logits"": logits_full_frame[:, -1],\n ""video_tokens"": tokens_full_frame[:, -1],\n ""mask"": jnp.ones_like(tokens_full_frame[:, -1]),\n }\n if lam_indices_E is not None:\n lam_indices_B = lam_indices_E.reshape((-1, args.seq_len - 1))[:, -1]\n step_outputs[""lam_indices""] = lam_indices_B\n\n loss_full_frame, metrics_full_frame = _calculate_step_metrics(\n step_outputs, gt[:, -1], args.num_actions, args.num_patch_latents\n )\n val_output.update(\n {\n ""loss_full_frame"": loss_full_frame,\n ""recon_full_frame"": recon_full_frame,\n ""metrics_full_frame"": metrics_full_frame,\n }\n )\n return val_output\n\n def calculate_validation_metrics(val_dataloader, genie, rng):\n step = 0\n loss_per_step = []\n metrics_per_step = []\n loss_full_frame_per_step = []\n metrics_full_frame_per_step = []\n batch = None\n recon = None\n recon_full_frame = None\n for batch in val_dataloader:\n rng, _rng_mask = jax.random.split(rng, 2)\n batch[""rng""] = _rng_mask\n val_outputs = val_step(genie, batch)\n loss_per_step.append(val_outputs[""loss""])\n metrics_per_step.append(val_outputs[""metrics""])\n recon = val_outputs[""recon""]\n if args.eval_full_frame:\n loss_full_frame_per_step.append(val_outputs[""loss_full_frame""])\n metrics_full_frame_per_step.append(val_outputs[""metrics_full_frame""])\n recon_full_frame = val_outputs[""recon_full_frame""]\n step += 1\n if step > args.val_steps:\n break\n\n if step < args.val_steps:\n print(\n f""Warning: Your validation dataset is too small to make val_steps many steps. Made {step} steps, expected {args.val_steps}""\n )\n\n val_metrics = {\n f""val_{key}"": np.mean([float(m[key]) for m in metrics_per_step])\n for key in metrics_per_step[0].keys()\n }\n val_metrics[""val_loss""] = np.mean(loss_per_step)\n if args.eval_full_frame:\n val_metrics_full_frame = {\n f""val_full_frame_{key}"": np.mean(\n [float(m[key]) for m in metrics_full_frame_per_step]\n )\n for key in metrics_full_frame_per_step[0].keys()\n }\n val_metrics.update(val_metrics_full_frame)\n val_metrics[""val_full_frame_loss""] = np.mean(loss_full_frame_per_step)\n return val_metrics, batch, recon, recon_full_frame\n\n # --- TRAIN LOOP ---\n dataloader_train = (\n {\n ""videos"": jax.make_array_from_process_local_data(\n videos_sharding, local_data=elem[""videos""]\n ),\n ""actions"": (\n jax.make_array_from_process_local_data(\n actions_sharding, elem[""actions""]\n )\n if args.use_gt_actions\n else None\n ),\n }\n for elem in train_iterator\n )\n dataloader_val = None\n if val_iterator:\n dataloader_val = (\n {\n ""videos"": jax.make_array_from_process_local_data(\n videos_sharding, elem[""videos""]\n ),\n ""actions"": (\n jax.make_array_from_process_local_data(\n actions_sharding, elem[""actions""]\n )\n if args.use_gt_actions\n else None\n ),\n }\n for elem in val_iterator\n )\n if jax.process_index() == 0:\n first_batch = next(dataloader_train)\n first_batch[""rng""] = rng # type: ignore\n compiled = train_step.lower(optimizer, first_batch).compile()\n print_compiled_memory_stats(compiled.memory_analysis())\n print_compiled_cost_analysis(compiled.cost_analysis())\n # Do not skip the first batch during training\n dataloader_train = itertools.chain([first_batch], dataloader_train)\n print(f""Starting training from step {step}..."")\n first_step = step\n while step < args.num_steps:\n for batch in dataloader_train:\n # --- Train step ---\n rng, _rng_mask = jax.random.split(rng, 2)\n batch[""rng""] = _rng_mask\n loss, recon, metrics = train_step(optimizer, batch)\n if step == first_step:\n print_mem_stats(""After params initialized"")\n step += 1\n\n # --- Validation loss ---\n val_results = {}\n if dataloader_val and step % args.val_interval == 0:\n rng, _rng_mask_val = jax.random.split(rng, 2)\n print(""Calculating validation metrics..."")\n val_metrics, val_gt_batch, val_recon, val_recon_full_frame = (\n calculate_validation_metrics(\n dataloader_val, optimizer.model, _rng_mask_val\n )\n )\n print(f""Step {step}, validation loss: {val_metrics['val_loss']}"")\n val_results = {\n ""metrics"": val_metrics,\n ""gt_batch"": val_gt_batch,\n ""recon"": val_recon,\n ""full_frame"": val_recon_full_frame,\n }\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {""loss"": loss, ""step"": step, **metrics}\n if val_results:\n log_dict.update(val_results[""metrics""])\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if val_results:\n val_results[""gt_seq_val""] = (\n val_results[""gt_batch""][""videos""][0].astype(jnp.float32)\n / 255.0\n )\n val_results[""recon_seq_val""] = val_results[""recon""][0].clip(\n 0, 1\n )\n val_comparison_seq = jnp.concatenate(\n (val_results[""gt_seq_val""], val_results[""recon_seq_val""]),\n axis=1,\n )\n val_results[""val_comparison_seq""] = einops.rearrange(\n val_comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if args.eval_full_frame:\n val_results[""full_frame_seq_val""] = val_results[\n ""full_frame""\n ][0].clip(0, 1)\n val_results[""val_full_frame_comparison_seq""] = (\n jnp.concatenate(\n (\n val_results[""gt_seq_val""],\n val_results[""full_frame_seq_val""],\n ),\n axis=1,\n )\n )\n val_results[""val_full_frame_comparison_seq""] = (\n einops.rearrange(\n val_results[""val_full_frame_comparison_seq""] * 255,\n ""t h w c -> h (t w) c"",\n )\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if val_results:\n log_images.update(\n dict(\n val_image=wandb.Image(\n np.asarray(\n val_results[""gt_seq_val""][args.seq_len - 1]\n )\n ),\n val_recon=wandb.Image(\n np.asarray(\n val_results[""recon_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_recon=wandb.Image(\n np.asarray(\n val_results[""val_comparison_seq""].astype(\n np.uint8\n )\n )\n ),\n )\n )\n if args.eval_full_frame:\n log_images.update(\n dict(\n val_full_frame=wandb.Image(\n np.asarray(\n val_results[""full_frame_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_full_frame=wandb.Image(\n np.asarray(\n val_results[\n ""val_full_frame_comparison_seq""\n ].astype(np.uint8)\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break\n\n if checkpoint_manager:\n checkpoint_manager.close()\n\n\nif __name__ == ""__main__"":\n args = tyro.cli(Args)\n main(args)\n",python,tab +4451,4838938,"jasmine/train_dynamics.py",2169,24," dtype = jnp.bfloat16\n",python,content +4452,4840429,"jasmine/train_dynamics.py",2193,0,"",python,selection_mouse +4453,4840430,"jasmine/train_dynamics.py",2192,0,"",python,selection_command +4454,4840876,"jasmine/train_dynamics.py",2230,0,"",python,selection_mouse +4455,4840878,"jasmine/train_dynamics.py",2229,0,"",python,selection_command +4456,4844877,"jasmine/train_dynamics.py",2479,0,"",python,selection_mouse +4457,4845616,"jasmine/train_dynamics.py",2472,18,"log_image_interval",python,selection_mouse +4458,4846296,"jasmine/train_dynamics.py",2479,0,"",python,selection_mouse +4459,4846297,"jasmine/train_dynamics.py",2472,18,"log_image_interval",python,selection_mouse +4460,4847573,"jasmine/train_dynamics.py",2540,0,"",python,selection_mouse +4461,4847782,"jasmine/train_dynamics.py",2530,23,"log_checkpoint_interval",python,selection_mouse +4462,4849928,"jasmine/train_dynamics.py",2579,0,"",python,selection_mouse +4463,4850034,"jasmine/train_dynamics.py",2570,26,"log_checkpoint_keep_period",python,selection_mouse +4464,4851480,"jasmine/train_dynamics.py",2714,0,"",python,selection_mouse +4465,4852559,"jasmine/train_dynamics.py",2682,0,"",python,selection_mouse +4466,4853702,"jasmine/train_dynamics.py",2452,0,"",python,selection_mouse +4467,4853855,"jasmine/train_dynamics.py",2445,12,"log_interval",python,selection_mouse +4468,4855965,"jasmine/train_dynamics.py",2475,0,"",python,selection_mouse +4469,4856103,"jasmine/train_dynamics.py",2472,18,"log_image_interval",python,selection_mouse +4470,4859545,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",0,0,"",shellscript,tab +4471,4860981,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",1993,0,"\n ",shellscript,content +4472,4861475,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",1998,0,"log_image_interval",shellscript,content +4473,4864110,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",1998,0,"-",shellscript,content +4474,4864112,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",1999,0,"",shellscript,selection_keyboard +4475,4864232,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",1999,0,"-",shellscript,content +4476,4864233,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",2000,0,"",shellscript,selection_keyboard +4477,4865948,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",2018,0,"=",shellscript,content +4478,4865950,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",2019,0,"",shellscript,selection_keyboard +4479,4870799,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",2019,0,"2",shellscript,content +4480,4870801,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",2020,0,"",shellscript,selection_keyboard +4481,4870850,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",2020,0,"0",shellscript,content +4482,4870850,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",2021,0,"",shellscript,selection_keyboard +4483,4871002,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",2021,0,"0",shellscript,content +4484,4871004,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",2022,0,"",shellscript,selection_keyboard +4485,4871169,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",2022,0,"0",shellscript,content +4486,4871170,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",2023,0,"",shellscript,selection_keyboard +4487,4871938,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",2023,0," ",shellscript,content +4488,4871939,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",2024,0,"",shellscript,selection_keyboard +4489,4872434,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",2024,0,"\",shellscript,content +4490,4872435,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",2025,0,"",shellscript,selection_keyboard +4491,4872885,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",2024,0,"",shellscript,selection_command +4492,4874119,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",2139,0,"",shellscript,selection_mouse +4493,4928309,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",2125,0,"",shellscript,selection_mouse +4494,4928767,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",2052,0,"",shellscript,selection_mouse +4495,4929158,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",2023,0,"",shellscript,selection_mouse +4496,4929540,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",2052,0,"",shellscript,selection_mouse +4497,4929867,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",2021,0,"",shellscript,selection_mouse +4498,4930099,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",2051,0,"",shellscript,selection_mouse +4499,4930376,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",2021,0,"",shellscript,selection_mouse +4500,4930662,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",2051,0,"",shellscript,selection_mouse +4501,4930913,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",2020,0,"",shellscript,selection_mouse +4502,4931210,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",2049,0,"",shellscript,selection_mouse +4503,4931459,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",2019,0,"",shellscript,selection_mouse +4504,4931742,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",2047,0,"",shellscript,selection_mouse +4505,4931999,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",2022,0,"",shellscript,selection_mouse +4506,4932294,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",2052,0,"",shellscript,selection_mouse +4507,4932535,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",2022,0,"",shellscript,selection_mouse +4508,4932801,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",2052,0,"",shellscript,selection_mouse +4509,4932969,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",2032,20,"tokenizer_checkpoint",shellscript,selection_mouse +4510,4933256,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",2022,0,"",shellscript,selection_mouse +4511,4933397,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",2019,4,"2000",shellscript,selection_mouse +4512,4933820,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",2051,0,"",shellscript,selection_mouse +4513,4933950,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",2032,20,"tokenizer_checkpoint",shellscript,selection_mouse +4514,4962808,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",2020,0,"",shellscript,selection_mouse +4515,4963973,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",2019,1,"",shellscript,content +4516,4964072,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",2019,0,"1",shellscript,content +4517,4964073,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",2020,0,"",shellscript,selection_keyboard +4518,4964167,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",2020,0,"0",shellscript,content +4519,4964168,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",2021,0,"",shellscript,selection_keyboard +4520,4967498,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default.sh",0,0,"",shellscript,tab +4521,4967499,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default.sh",2044,0,"",shellscript,selection_mouse +4522,4967596,"slurm/jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default.sh",2044,5,"10000",shellscript,selection_mouse +4523,4972380,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",0,0,"",shellscript,tab +4524,4972381,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",1921,0,"",shellscript,selection_mouse +4525,4973159,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",1899,0,"",shellscript,selection_mouse +4526,4973701,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",1921,0,"",shellscript,selection_mouse +4527,4974510,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",1745,0,"",shellscript,selection_mouse +4528,4975612,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",1720,0,"",shellscript,selection_mouse +4529,4977475,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",1720,0,"\n ",shellscript,content +4530,4978639,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",1725,0,"-",shellscript,content +4531,4978640,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",1726,0,"",shellscript,selection_keyboard +4532,4978774,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",1726,0,"-",shellscript,content +4533,4978775,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",1727,0,"",shellscript,selection_keyboard +4534,4978955,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",1727,0,"p",shellscript,content +4535,4978956,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",1728,0,"",shellscript,selection_keyboard +4536,4979058,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",1728,0,"a",shellscript,content +4537,4979059,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",1729,0,"",shellscript,selection_keyboard +4538,4979251,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",1729,0,"t",shellscript,content +4539,4979252,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",1730,0,"",shellscript,selection_keyboard +4540,4979462,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",1730,0,"c",shellscript,content +4541,4979463,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",1731,0,"",shellscript,selection_keyboard +4542,4979540,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",1731,0,"h",shellscript,content +4543,4979542,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",1732,0,"",shellscript,selection_keyboard +4544,4979824,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",1732,0,"_",shellscript,content +4545,4979825,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",1733,0,"",shellscript,selection_keyboard +4546,4980106,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",1733,0,"s",shellscript,content +4547,4980107,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",1734,0,"",shellscript,selection_keyboard +4548,4980191,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",1734,0,"i",shellscript,content +4549,4980192,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",1735,0,"",shellscript,selection_keyboard +4550,4980378,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",1735,0,"z",shellscript,content +4551,4980379,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",1736,0,"",shellscript,selection_keyboard +4552,4980493,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",1736,0,"e",shellscript,content +4553,4980494,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",1737,0,"",shellscript,selection_keyboard +4554,4981096,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",1737,0,"_",shellscript,content +4555,4981097,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",1738,0,"",shellscript,selection_keyboard +4556,4981441,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",1738,0,"1",shellscript,content +4557,4981442,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",1739,0,"",shellscript,selection_keyboard +4558,4981521,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",1739,0,"6",shellscript,content +4559,4981522,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",1740,0,"",shellscript,selection_keyboard +4560,4982361,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",1739,0,"",shellscript,selection_command +4561,4986208,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",1740,0,"\n ",shellscript,content +4562,4986537,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",1745,0,"-",shellscript,content +4563,4986538,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",1746,0,"",shellscript,selection_keyboard +4564,4986683,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",1746,0,"-",shellscript,content +4565,4986683,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",1747,0,"",shellscript,selection_keyboard +4566,4987111,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",1747,0,"a",shellscript,content +4567,4987112,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",1748,0,"",shellscript,selection_keyboard +4568,4987495,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",1747,1,"",shellscript,content +4569,4987843,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",1747,0,"b",shellscript,content +4570,4987845,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",1748,0,"",shellscript,selection_keyboard +4571,4987944,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",1748,0,"a",shellscript,content +4572,4987945,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",1749,0,"",shellscript,selection_keyboard +4573,4988062,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",1749,0,"t",shellscript,content +4574,4988063,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",1750,0,"",shellscript,selection_keyboard +4575,4988143,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",1750,0,"c",shellscript,content +4576,4988144,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",1751,0,"",shellscript,selection_keyboard +4577,4988263,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",1751,0,"h",shellscript,content +4578,4988264,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",1752,0,"",shellscript,selection_keyboard +4579,4988589,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",1752,0,"_",shellscript,content +4580,4988590,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",1753,0,"",shellscript,selection_keyboard +4581,4989034,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",1747,6,"batch_size",shellscript,content +4582,4991466,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",1757,0,"=",shellscript,content +4583,4991467,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",1758,0,"",shellscript,selection_keyboard +4584,4993160,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",1758,0,"3",shellscript,content +4585,4993162,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",1759,0,"",shellscript,selection_keyboard +4586,4993207,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",1759,0,"6",shellscript,content +4587,4993208,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",1760,0,"",shellscript,selection_keyboard +4588,4994153,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",1760,0," ",shellscript,content +4589,4994154,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",1761,0,"",shellscript,selection_keyboard +4590,4994361,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",1761,0,"\",shellscript,content +4591,4994362,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",1762,0,"",shellscript,selection_keyboard +4592,4994852,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",1761,0,"",shellscript,selection_command +4593,4995001,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",1739,0,"",shellscript,selection_command +4594,4995443,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",1740,0,"",shellscript,selection_command +4595,4995792,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",1740,0," ",shellscript,content +4596,4995793,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",1741,0,"",shellscript,selection_keyboard +4597,4996888,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",1741,0,"\",shellscript,content +4598,4996889,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",1742,0,"",shellscript,selection_keyboard +4599,4997054,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",1741,0,"",shellscript,selection_command +4600,4999273,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",1763,0,"",shellscript,selection_command +4601,5000436,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",1762,0,"",shellscript,selection_command +4602,5000556,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",1761,0,"",shellscript,selection_command +4603,5000864,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",1760,0,"",shellscript,selection_command +4604,5001152,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",1759,0,"",shellscript,selection_command +4605,5002076,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",1759,1,"=",shellscript,selection_command +4606,5002291,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",1737,1,"_",shellscript,selection_command +4607,5004493,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",1737,0,"",shellscript,selection_command +4608,5005343,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",1759,1,"",shellscript,content +4609,5005344,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",1737,1,"",shellscript,content +4610,5005429,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",1758,0," ",shellscript,content +4611,5005430,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",1737,0," ",shellscript,content +4612,5005430,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",1738,0,"",shellscript,selection_keyboard +4613,5005881,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",1737,0,"",shellscript,selection_command +4614,5007945,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",1737,1," ",shellscript,selection_command +4615,5008158,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",1737,1," ",shellscript,selection_command +4616,5008428,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",1737,0,"",shellscript,selection_command +4617,5009465,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",1759,1,"",shellscript,content +4618,5009465,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",1737,1,"",shellscript,content +4619,5009958,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",1758,0,"=",shellscript,content +4620,5009959,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",1737,0,"=",shellscript,content +4621,5009960,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",1738,0,"",shellscript,selection_keyboard +4622,5010710,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",1737,0,"",shellscript,selection_command +4623,5039277,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",2014,0,"",shellscript,selection_mouse +4624,5039279,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",2013,0,"",shellscript,selection_command +4625,5061188,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",1889,0,"",shellscript,selection_mouse +4626,5061843,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",1789,0,"",shellscript,selection_mouse +4627,5061850,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",1788,0,"",shellscript,selection_command +4628,5062502,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",1943,0,"",shellscript,selection_mouse +4629,5062508,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",1942,0,"",shellscript,selection_command +4630,5062829,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",1988,0,"",shellscript,selection_mouse +4631,5062833,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",1987,0,"",shellscript,selection_command +4632,5063149,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",2070,0,"",shellscript,selection_mouse +4633,5063186,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",2069,0,"",shellscript,selection_command +4634,5063475,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",2169,0,"",shellscript,selection_mouse +4635,5063476,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",2168,0,"",shellscript,selection_command +4636,5063859,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",2123,0,"",shellscript,selection_mouse +4637,5063892,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",2122,0,"",shellscript,selection_command +4638,5064434,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",2037,0,"",shellscript,selection_mouse +4639,5064460,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",2036,0,"",shellscript,selection_command +4640,5064926,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",1988,0,"",shellscript,selection_mouse +4641,5064963,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",1987,0,"",shellscript,selection_command +4642,5065493,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",1988,0,"",shellscript,selection_mouse +4643,5065527,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",1987,0,"",shellscript,selection_command +4644,5065877,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",2014,0,"",shellscript,selection_mouse +4645,5065915,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",2013,0,"",shellscript,selection_command +4646,5066254,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",2070,0,"",shellscript,selection_mouse +4647,5066255,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",2069,0,"",shellscript,selection_command +4648,5066762,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",2037,0,"",shellscript,selection_mouse +4649,5066763,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",2036,0,"",shellscript,selection_command +4650,5067280,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",2070,0,"",shellscript,selection_mouse +4651,5067286,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",2069,0,"",shellscript,selection_command +4652,5067795,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",2170,0,"",shellscript,selection_mouse +4653,5068248,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",2169,0,"",shellscript,selection_mouse +4654,5068262,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",2168,0,"",shellscript,selection_command +4655,5068647,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",2123,0,"",shellscript,selection_mouse +4656,5068647,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",2122,0,"",shellscript,selection_command +4657,5069096,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",2037,0,"",shellscript,selection_mouse +4658,5069097,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",2036,0,"",shellscript,selection_command +4659,5069580,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",2070,0,"",shellscript,selection_mouse +4660,5069581,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",2069,0,"",shellscript,selection_command +4661,5069881,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",2014,0,"",shellscript,selection_mouse +4662,5069897,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",2013,0,"",shellscript,selection_command +4663,5070398,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",2037,0,"",shellscript,selection_mouse +4664,5070430,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",2036,0,"",shellscript,selection_command +4665,5070431,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",2036,1,"\",shellscript,selection_mouse +4666,5070432,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",2037,0,"",shellscript,selection_command +4667,5070723,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",1988,0,"",shellscript,selection_mouse +4668,5070749,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",1987,0,"",shellscript,selection_command +4669,5071242,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",2014,0,"",shellscript,selection_mouse +4670,5071248,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",2013,0,"",shellscript,selection_command +4671,5071315,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",2013,1,"\",shellscript,selection_mouse +4672,5071348,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",2014,0,"",shellscript,selection_command +4673,5072200,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",2037,0,"",shellscript,selection_mouse +4674,5072232,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",2036,0,"",shellscript,selection_command +4675,5072618,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",2014,0,"",shellscript,selection_mouse +4676,5072618,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",2013,0,"",shellscript,selection_command +4677,5073016,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",1988,0,"",shellscript,selection_mouse +4678,5073054,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",1987,0,"",shellscript,selection_command +4679,5073383,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",1943,0,"",shellscript,selection_mouse +4680,5073384,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",1942,0,"",shellscript,selection_command +4681,5073984,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",1965,0,"",shellscript,selection_mouse +4682,5073985,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",1964,0,"",shellscript,selection_command +4683,5074268,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",1917,0,"",shellscript,selection_mouse +4684,5074293,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",1916,0,"",shellscript,selection_command +4685,5075035,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",1896,0,"",shellscript,selection_mouse +4686,5075051,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",1895,0,"",shellscript,selection_command +4687,5075460,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",1820,0,"",shellscript,selection_mouse +4688,5075491,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",1819,0,"",shellscript,selection_command +4689,5075910,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",1853,0,"",shellscript,selection_mouse +4690,5075919,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",1852,0,"",shellscript,selection_command +4691,5075952,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",1852,1,"\",shellscript,selection_mouse +4692,5075953,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",1853,0,"",shellscript,selection_command +4693,5076637,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",1820,0,"",shellscript,selection_mouse +4694,5076647,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",1819,0,"",shellscript,selection_command +4695,5077170,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",1853,0,"",shellscript,selection_mouse +4696,5077171,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",1852,0,"",shellscript,selection_command +4697,5077189,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",1852,1,"\",shellscript,selection_mouse +4698,5077221,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",1853,0,"",shellscript,selection_command +4699,5078639,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",1917,0,"",shellscript,selection_mouse +4700,5078656,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",1916,0,"",shellscript,selection_command +4701,5111184,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",0,0,"",shellscript,tab +4702,5173229,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_248/coinrun_dynamics_base_speed_ablation.sh",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=1\n#SBATCH --time=24:00:00\n#SBATCH --cpus-per-task=8\n#SBATCH --gres=gpu:1\n#SBATCH --output=/fast/project/HFMI_SynergyUnit/jafar_ws/logs/franz/coinrun/dynamics/%x_%j.log\n#SBATCH --error=/fast/project/HFMI_SynergyUnit/jafar_ws/logs/franz/coinrun/dynamics/%x_%j.log\n#SBATCH --job-name=dynamics_coinrun_mila_submission_speed_test_batch_size_36\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nsource .venv/bin/activate\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\ntags=""coinrun dynamics 500m_dataset mila_submission speed_test batch_size_36""\n\narray_records_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/data/coinrun/array_records_500m_seed_w_increment""\ntokenizer_ckpt_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/tokenizer/tokenizer_coinrun_mila_submission_29736/""\nCHECKPOINT_DIR=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/dynamics/${job_name}/${slurm_job_id}""\nmkdir -p $CHECKPOINT_DIR\n\nenv | grep SLURM\n\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n --patch_size=16 \\n --batch_size=36 \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --name=""${job_name}_${slurm_job_id}"" \\n --tags ${tags} \\n --entity instant-uv \\n --project jafar \\n --warmup_steps=0 \\n --wsd_decay_steps=0 \\n --num_steps=1000 \\n --log_image_interval=10000 \\n --tokenizer_checkpoint=""${tokenizer_ckpt_dir}"" \\n --data_dir=""${array_records_dir}/train"" &\n\nchild_pid=$!\n\nwait $child_pid\n\n",shellscript,tab +4703,5175154,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_248/coinrun_dynamics_base_speed_ablation.sh",403,0,"",shellscript,selection_mouse +4704,5175189,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_248/coinrun_dynamics_base_speed_ablation.sh",402,0,"",shellscript,selection_command +4705,5175885,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_248/coinrun_dynamics_base_speed_ablation.sh",403,0,"",shellscript,selection_command +4706,5176553,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_248/coinrun_dynamics_base_speed_ablation.sh",402,1,"",shellscript,content +4707,5176638,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_248/coinrun_dynamics_base_speed_ablation.sh",401,1,"",shellscript,content +4708,5177353,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_248/coinrun_dynamics_base_speed_ablation.sh",401,0,"2",shellscript,content +4709,5177354,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_248/coinrun_dynamics_base_speed_ablation.sh",402,0,"",shellscript,selection_keyboard +4710,5179398,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_248/coinrun_dynamics_base_speed_ablation.sh",402,0,"4",shellscript,content +4711,5179399,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_248/coinrun_dynamics_base_speed_ablation.sh",403,0,"",shellscript,selection_keyboard +4712,5179471,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_248/coinrun_dynamics_base_speed_ablation.sh",403,0,"8",shellscript,content +4713,5179472,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_248/coinrun_dynamics_base_speed_ablation.sh",404,0,"",shellscript,selection_keyboard +4714,5180000,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_248/coinrun_dynamics_base_speed_ablation.sh",403,0,"",shellscript,selection_command +4715,5182911,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_248/coinrun_dynamics_base_speed_ablation.sh",1762,0,"",shellscript,selection_mouse +4716,5184066,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_248/coinrun_dynamics_base_speed_ablation.sh",1763,0,"",shellscript,selection_command +4717,5184469,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_248/coinrun_dynamics_base_speed_ablation.sh",1762,1,"",shellscript,content +4718,5184577,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_248/coinrun_dynamics_base_speed_ablation.sh",1761,1,"",shellscript,content +4719,5185160,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_248/coinrun_dynamics_base_speed_ablation.sh",1761,0,"2",shellscript,content +4720,5185160,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_248/coinrun_dynamics_base_speed_ablation.sh",1762,0,"",shellscript,selection_keyboard +4721,5185429,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_248/coinrun_dynamics_base_speed_ablation.sh",1762,0,"4",shellscript,content +4722,5185430,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_248/coinrun_dynamics_base_speed_ablation.sh",1763,0,"",shellscript,selection_keyboard +4723,5185813,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_248/coinrun_dynamics_base_speed_ablation.sh",1763,0,"8",shellscript,content +4724,5185813,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_248/coinrun_dynamics_base_speed_ablation.sh",1764,0,"",shellscript,selection_keyboard +4725,5188085,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_248/coinrun_dynamics_base_speed_ablation.sh",1261,0,"",shellscript,selection_mouse +4726,5188919,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_248/coinrun_dynamics_base_speed_ablation.sh",1260,1,"",shellscript,content +4727,5189029,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_248/coinrun_dynamics_base_speed_ablation.sh",1259,1,"",shellscript,content +4728,5189177,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_248/coinrun_dynamics_base_speed_ablation.sh",1259,0,"2",shellscript,content +4729,5189178,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_248/coinrun_dynamics_base_speed_ablation.sh",1260,0,"",shellscript,selection_keyboard +4730,5189518,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_248/coinrun_dynamics_base_speed_ablation.sh",1260,0,"4",shellscript,content +4731,5189518,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_248/coinrun_dynamics_base_speed_ablation.sh",1261,0,"",shellscript,selection_keyboard +4732,5189734,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_248/coinrun_dynamics_base_speed_ablation.sh",1261,0,"8",shellscript,content +4733,5189735,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_248/coinrun_dynamics_base_speed_ablation.sh",1262,0,"",shellscript,selection_keyboard +4734,5190255,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_248/coinrun_dynamics_base_speed_ablation.sh",1261,0,"",shellscript,selection_command +4735,5212672,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",0,0,"",shellscript,tab +4736,5230982,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_248/coinrun_dynamics_base_speed_ablation.sh",0,0,"",shellscript,tab +4737,5496660,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",0,0,"",shellscript,tab +4738,5498704,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_248/coinrun_dynamics_base_speed_ablation.sh",0,0,"",shellscript,tab +4739,5501042,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",0,0,"",shellscript,tab +4740,5506160,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation copy.sh",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=1\n#SBATCH --time=24:00:00\n#SBATCH --cpus-per-task=8\n#SBATCH --gres=gpu:1\n#SBATCH --output=/fast/project/HFMI_SynergyUnit/jafar_ws/logs/franz/coinrun/dynamics/%x_%j.log\n#SBATCH --error=/fast/project/HFMI_SynergyUnit/jafar_ws/logs/franz/coinrun/dynamics/%x_%j.log\n#SBATCH --job-name=dynamics_coinrun_mila_submission_speed_test_batch_size_36\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nsource .venv/bin/activate\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\ntags=""coinrun dynamics 500m_dataset mila_submission speed_test batch_size_36""\n\narray_records_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/data/coinrun/array_records_500m_seed_w_increment""\ntokenizer_ckpt_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/tokenizer/tokenizer_coinrun_mila_submission_29736/""\nCHECKPOINT_DIR=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/dynamics/${job_name}/${slurm_job_id}""\nmkdir -p $CHECKPOINT_DIR\n\nenv | grep SLURM\n\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n --patch_size=16 \\n --batch_size=36 \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --name=""${job_name}_${slurm_job_id}"" \\n --tags ${tags} \\n --entity instant-uv \\n --project jafar \\n --warmup_steps=0 \\n --wsd_decay_steps=0 \\n --num_steps=1000 \\n --log_image_interval=10000 \\n --tokenizer_checkpoint=""${tokenizer_ckpt_dir}"" \\n --data_dir=""${array_records_dir}/train"" &\n\nchild_pid=$!\n\nwait $child_pid\n\n",shellscript,tab +4741,5521260,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_grain_ablation.sh",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=1\n#SBATCH --time=24:00:00\n#SBATCH --cpus-per-task=8\n#SBATCH --gres=gpu:1\n#SBATCH --output=/fast/project/HFMI_SynergyUnit/jafar_ws/logs/franz/coinrun/dynamics/%x_%j.log\n#SBATCH --error=/fast/project/HFMI_SynergyUnit/jafar_ws/logs/franz/coinrun/dynamics/%x_%j.log\n#SBATCH --job-name=dynamics_coinrun_mila_submission_speed_test_batch_size_36\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nsource .venv/bin/activate\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\ntags=""coinrun dynamics 500m_dataset mila_submission speed_test batch_size_36""\n\narray_records_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/data/coinrun/array_records_500m_seed_w_increment""\ntokenizer_ckpt_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/tokenizer/tokenizer_coinrun_mila_submission_29736/""\nCHECKPOINT_DIR=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/dynamics/${job_name}/${slurm_job_id}""\nmkdir -p $CHECKPOINT_DIR\n\nenv | grep SLURM\n\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n --patch_size=16 \\n --batch_size=36 \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --name=""${job_name}_${slurm_job_id}"" \\n --tags ${tags} \\n --entity instant-uv \\n --project jafar \\n --warmup_steps=0 \\n --wsd_decay_steps=0 \\n --num_steps=1000 \\n --log_image_interval=10000 \\n --tokenizer_checkpoint=""${tokenizer_ckpt_dir}"" \\n --data_dir=""${array_records_dir}/train"" &\n\nchild_pid=$!\n\nwait $child_pid\n\n",shellscript,tab +4742,5530192,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_grain_ablation.sh",1259,0,"",shellscript,selection_mouse +4743,5530627,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_grain_ablation.sh",1261,0,"",shellscript,selection_mouse +4744,5530628,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_grain_ablation.sh",1260,0,"",shellscript,selection_command +4745,5531623,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_grain_ablation.sh",1260,0," ",shellscript,content +4746,5531624,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_grain_ablation.sh",1261,0,"",shellscript,selection_keyboard +4747,5531906,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_grain_ablation.sh",1261,0,"a",shellscript,content +4748,5531906,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_grain_ablation.sh",1262,0,"",shellscript,selection_keyboard +4749,5532069,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_grain_ablation.sh",1262,0,"b",shellscript,content +4750,5532070,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_grain_ablation.sh",1263,0,"",shellscript,selection_keyboard +4751,5532158,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_grain_ablation.sh",1263,0,"l",shellscript,content +4752,5532159,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_grain_ablation.sh",1264,0,"",shellscript,selection_keyboard +4753,5532292,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_grain_ablation.sh",1264,0,"a",shellscript,content +4754,5532293,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_grain_ablation.sh",1265,0,"",shellscript,selection_keyboard +4755,5532523,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_grain_ablation.sh",1265,0,"t",shellscript,content +4756,5532523,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_grain_ablation.sh",1266,0,"",shellscript,selection_keyboard +4757,5532655,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_grain_ablation.sh",1266,0,"o",shellscript,content +4758,5532656,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_grain_ablation.sh",1267,0,"",shellscript,selection_keyboard +4759,5532694,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_grain_ablation.sh",1267,0,"p",shellscript,content +4760,5532695,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_grain_ablation.sh",1268,0,"",shellscript,selection_keyboard +4761,5533231,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_grain_ablation.sh",1267,1,"",shellscript,content +4762,5533355,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_grain_ablation.sh",1266,1,"",shellscript,content +4763,5533519,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_grain_ablation.sh",1266,0,"i",shellscript,content +4764,5533519,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_grain_ablation.sh",1267,0,"",shellscript,selection_keyboard +4765,5533599,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_grain_ablation.sh",1267,0,"o",shellscript,content +4766,5533600,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_grain_ablation.sh",1268,0,"",shellscript,selection_keyboard +4767,5533835,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_grain_ablation.sh",1268,0,"n",shellscript,content +4768,5533836,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_grain_ablation.sh",1269,0,"",shellscript,selection_keyboard +4769,5533935,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_grain_ablation.sh",1269,0," ",shellscript,content +4770,5533936,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_grain_ablation.sh",1270,0,"",shellscript,selection_keyboard +4771,5534068,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_grain_ablation.sh",1270,0,"d",shellscript,content +4772,5534069,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_grain_ablation.sh",1271,0,"",shellscript,selection_keyboard +4773,5534249,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_grain_ablation.sh",1271,0,"a",shellscript,content +4774,5534249,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_grain_ablation.sh",1272,0,"",shellscript,selection_keyboard +4775,5534284,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_grain_ablation.sh",1272,0,"t",shellscript,content +4776,5534285,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_grain_ablation.sh",1273,0,"",shellscript,selection_keyboard +4777,5534462,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_grain_ablation.sh",1273,0,"a",shellscript,content +4778,5534463,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_grain_ablation.sh",1274,0,"",shellscript,selection_keyboard +4779,5534602,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_grain_ablation.sh",1274,0,"l",shellscript,content +4780,5534602,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_grain_ablation.sh",1275,0,"",shellscript,selection_keyboard +4781,5534781,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_grain_ablation.sh",1275,0,"o",shellscript,content +4782,5534783,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_grain_ablation.sh",1276,0,"",shellscript,selection_keyboard +4783,5534893,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_grain_ablation.sh",1276,0,"a",shellscript,content +4784,5534893,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_grain_ablation.sh",1277,0,"",shellscript,selection_keyboard +4785,5534975,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_grain_ablation.sh",1277,0,"d",shellscript,content +4786,5534976,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_grain_ablation.sh",1278,0,"",shellscript,selection_keyboard +4787,5535082,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_grain_ablation.sh",1278,0,"e",shellscript,content +4788,5535083,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_grain_ablation.sh",1279,0,"",shellscript,selection_keyboard +4789,5535185,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_grain_ablation.sh",1279,0,"r",shellscript,content +4790,5535186,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_grain_ablation.sh",1280,0,"",shellscript,selection_keyboard +4791,5537233,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_grain_ablation.sh",313,0,"",shellscript,selection_mouse +4792,5537702,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_grain_ablation.sh",403,0,"",shellscript,selection_mouse +4793,5540568,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_grain_ablation.sh",403,0,"_",shellscript,content +4794,5540570,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_grain_ablation.sh",404,0,"",shellscript,selection_keyboard +4795,5541698,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_grain_ablation.sh",404,0,"g",shellscript,content +4796,5541699,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_grain_ablation.sh",405,0,"",shellscript,selection_keyboard +4797,5541774,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_grain_ablation.sh",405,0,"r",shellscript,content +4798,5541774,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_grain_ablation.sh",406,0,"",shellscript,selection_keyboard +4799,5541968,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_grain_ablation.sh",406,0,"a",shellscript,content +4800,5541969,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_grain_ablation.sh",407,0,"",shellscript,selection_keyboard +4801,5542041,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_grain_ablation.sh",407,0,"i",shellscript,content +4802,5542042,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_grain_ablation.sh",408,0,"",shellscript,selection_keyboard +4803,5542114,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_grain_ablation.sh",408,0,"n",shellscript,content +4804,5542115,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_grain_ablation.sh",409,0,"",shellscript,selection_keyboard +4805,5542788,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_grain_ablation.sh",409,0,"_",shellscript,content +4806,5542789,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_grain_ablation.sh",410,0,"",shellscript,selection_keyboard +4807,5543004,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_grain_ablation.sh",410,0,"a",shellscript,content +4808,5543005,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_grain_ablation.sh",411,0,"",shellscript,selection_keyboard +4809,5543183,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_grain_ablation.sh",411,0,"v",shellscript,content +4810,5543184,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_grain_ablation.sh",412,0,"",shellscript,selection_keyboard +4811,5543372,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_grain_ablation.sh",412,0,"l",shellscript,content +4812,5543373,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_grain_ablation.sh",413,0,"",shellscript,selection_keyboard +4813,5543471,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_grain_ablation.sh",413,0,"a",shellscript,content +4814,5543472,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_grain_ablation.sh",414,0,"",shellscript,selection_keyboard +4815,5543667,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_grain_ablation.sh",414,0,"t",shellscript,content +4816,5543668,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_grain_ablation.sh",415,0,"",shellscript,selection_keyboard +4817,5543806,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_grain_ablation.sh",415,0,"i",shellscript,content +4818,5543807,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_grain_ablation.sh",416,0,"",shellscript,selection_keyboard +4819,5544035,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_grain_ablation.sh",415,1,"",shellscript,content +4820,5544319,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_grain_ablation.sh",414,1,"",shellscript,content +4821,5544457,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_grain_ablation.sh",413,1,"",shellscript,content +4822,5544803,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_grain_ablation.sh",346,67,"",shellscript,content +4823,5545793,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_grain_ablation.sh",346,0,"dynamics_coinrun_mila_submission_speed_test_batch_size_36_grain_avl",shellscript,content +4824,5546075,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_grain_ablation.sh",412,1,"",shellscript,content +4825,5546571,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_grain_ablation.sh",411,1,"",shellscript,content +4826,5546591,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_grain_ablation.sh",410,1,"",shellscript,content +4827,5546638,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_grain_ablation.sh",409,1,"",shellscript,content +4828,5546677,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_grain_ablation.sh",408,1,"",shellscript,content +4829,5546711,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_grain_ablation.sh",407,1,"",shellscript,content +4830,5546747,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_grain_ablation.sh",406,1,"",shellscript,content +4831,5546779,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_grain_ablation.sh",405,1,"",shellscript,content +4832,5546812,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_grain_ablation.sh",404,1,"",shellscript,content +4833,5546843,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_grain_ablation.sh",403,1,"",shellscript,content +4834,5547747,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_grain_ablation.sh",403,0,"_",shellscript,content +4835,5547748,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_grain_ablation.sh",404,0,"",shellscript,selection_keyboard +4836,5547931,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_grain_ablation.sh",404,0,"d",shellscript,content +4837,5547932,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_grain_ablation.sh",405,0,"",shellscript,selection_keyboard +4838,5548119,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_grain_ablation.sh",405,0,"a",shellscript,content +4839,5548120,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_grain_ablation.sh",406,0,"",shellscript,selection_keyboard +4840,5548255,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_grain_ablation.sh",406,0,"t",shellscript,content +4841,5548256,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_grain_ablation.sh",407,0,"",shellscript,selection_keyboard +4842,5548373,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_grain_ablation.sh",407,0,"a",shellscript,content +4843,5548374,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_grain_ablation.sh",408,0,"",shellscript,selection_keyboard +4844,5548565,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_grain_ablation.sh",408,0,"l",shellscript,content +4845,5548566,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_grain_ablation.sh",409,0,"",shellscript,selection_keyboard +4846,5548718,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_grain_ablation.sh",409,0,"o",shellscript,content +4847,5548719,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_grain_ablation.sh",410,0,"",shellscript,selection_keyboard +4848,5548809,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_grain_ablation.sh",410,0,"a",shellscript,content +4849,5548810,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_grain_ablation.sh",411,0,"",shellscript,selection_keyboard +4850,5548921,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_grain_ablation.sh",411,0,"d",shellscript,content +4851,5548922,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_grain_ablation.sh",412,0,"",shellscript,selection_keyboard +4852,5549067,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_grain_ablation.sh",412,0,"e",shellscript,content +4853,5549067,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_grain_ablation.sh",413,0,"",shellscript,selection_keyboard +4854,5549170,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_grain_ablation.sh",413,0,"r",shellscript,content +4855,5549171,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_grain_ablation.sh",414,0,"",shellscript,selection_keyboard +4856,5549302,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_grain_ablation.sh",414,0,"_",shellscript,content +4857,5549303,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_grain_ablation.sh",415,0,"",shellscript,selection_keyboard +4858,5549549,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_grain_ablation.sh",415,0,"a",shellscript,content +4859,5549549,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_grain_ablation.sh",416,0,"",shellscript,selection_keyboard +4860,5549650,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_grain_ablation.sh",416,0,"b",shellscript,content +4861,5549651,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_grain_ablation.sh",417,0,"",shellscript,selection_keyboard +4862,5549853,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_grain_ablation.sh",417,0,"l",shellscript,content +4863,5549853,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_grain_ablation.sh",418,0,"",shellscript,selection_keyboard +4864,5549930,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_grain_ablation.sh",418,0,"a",shellscript,content +4865,5549931,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_grain_ablation.sh",419,0,"",shellscript,selection_keyboard +4866,5550140,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_grain_ablation.sh",419,0,"t",shellscript,content +4867,5550141,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_grain_ablation.sh",420,0,"",shellscript,selection_keyboard +4868,5550185,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_grain_ablation.sh",420,0,"i",shellscript,content +4869,5550186,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_grain_ablation.sh",421,0,"",shellscript,selection_keyboard +4870,5550265,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_grain_ablation.sh",421,0,"o",shellscript,content +4871,5550266,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_grain_ablation.sh",422,0,"",shellscript,selection_keyboard +4872,5550397,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_grain_ablation.sh",422,0,"n",shellscript,content +4873,5550398,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_grain_ablation.sh",423,0,"",shellscript,selection_keyboard +4874,5663211,"TERMINAL",0,0,"bash",,terminal_focus +4875,5664639,"TERMINAL",0,0,"cd slurm/",,terminal_command +4876,5666064,"TERMINAL",0,0,"git pull",,terminal_command +4877,5666103,"TERMINAL",0,0,"]633;C",,terminal_output +4878,5667911,"TERMINAL",0,0,"remote: Enumerating objects: 197, done.\r\nremote: Counting objects: 0% (1/177)\rremote: Counting objects: 1% (2/177)\rremote: Counting objects: 2% (4/177)\rremote: Counting objects: 3% (6/177)\rremote: Counting objects: 4% (8/177)\rremote: Counting objects: 5% (9/177)\rremote: Counting objects: 6% (11/177)\rremote: Counting objects: 7% (13/177)\rremote: Counting objects: 8% (15/177)\rremote: Counting objects: 9% (16/177)\rremote: Counting objects: 10% (18/177)\rremote: Counting objects: 11% (20/177)\rremote: Counting objects: 12% (22/177)\rremote: Counting objects: 13% (24/177)\rremote: Counting objects: 14% (25/177)\rremote: Counting objects: 15% (27/177)\rremote: Counting objects: 16% (29/177)\rremote: Counting objects: 17% (31/177)\rremote: Counting objects: 18% (32/177)\rremote: Counting objects: 19% (34/177)\rremote: Counting objects: 20% (36/177)\rremote: Counting objects: 21% (38/177)\rremote: Counting objects: 22% (39/177)\rremote: Counting objects: 23% (41/177)\rremote: Counting objects: 24% (43/177)\rremote: Counting objects: 25% (45/177)\rremote: Counting objects: 26% (47/177)\rremote: Counting objects: 27% (48/177)\rremote: Counting objects: 28% (50/177)\rremote: Counting objects: 29% (52/177)\rremote: Counting objects: 30% (54/177)\rremote: Counting objects: 31% (55/177)\rremote: Counting objects: 32% (57/177)\rremote: Counting objects: 33% (59/177)\rremote: Counting objects: 34% (61/177)\rremote: Counting objects: 35% (62/177)\rremote: Counting objects: 36% (64/177)\rremote: Counting objects: 37% (66/177)\rremote: Counting objects: 38% (68/177)\rremote: Counting objects: 39% (70/177)\rremote: Counting objects: 40% (71/177)\rremote: Counting objects: 41% (73/177)\rremote: Counting objects: 42% (75/177)\rremote: Counting objects: 43% (77/177)\rremote: Counting objects: 44% (78/177)\rremote: Counting objects: 45% (80/177)\rremote: Counting objects: 46% (82/177)\rremote: Counting objects: 47% (84/177)\rremote: Counting objects: 48% (85/177)\rremote: Counting objects: 49% (87/177)\rremote: Counting objects: 50% (89/177)\rremote: Counting objects: 51% (91/177)\rremote: Counting objects: 52% (93/177)\rremote: Counting objects: 53% (94/177)\rremote: Counting objects: 54% (96/177)\rremote: Counting objects: 55% (98/177)\rremote: Counting objects: 56% (100/177)\rremote: Counting objects: 57% (101/177)\rremote: Counting objects: 58% (103/177)\rremote: Counting objects: 59% (105/177)\rremote: Counting objects: 60% (107/177)\rremote: Counting objects: 61% (108/177)\rremote: Counting objects: 62% (110/177)\rremote: Counting objects: 63% (112/177)\rremote: Counting objects: 64% (114/177)\rremote: Counting objects: 65% (116/177)\rremote: Counting objects: 66% (117/177)\rremote: Counting objects: 67% (119/177)\rremote: Counting objects: 68% (121/177)\rremote: Counting objects: 69% (123/177)\rremote: Counting objects: 70% (124/177)\rremote: Counting objects: 71% (126/177)\r",,terminal_output +4879,5667985,"TERMINAL",0,0,"remote: Counting objects: 72% (128/177)\rremote: Counting objects: 73% (130/177)\rremote: Counting objects: 74% (131/177)\rremote: Counting objects: 75% (133/177)\rremote: Counting objects: 76% (135/177)\rremote: Counting objects: 77% (137/177)\rremote: Counting objects: 78% (139/177)\rremote: Counting objects: 79% (140/177)\rremote: Counting objects: 80% (142/177)\rremote: Counting objects: 81% (144/177)\rremote: Counting objects: 82% (146/177)\rremote: Counting objects: 83% (147/177)\rremote: Counting objects: 84% (149/177)\rremote: Counting objects: 85% (151/177)\rremote: Counting objects: 86% (153/177)\rremote: Counting objects: 87% (154/177)\rremote: Counting objects: 88% (156/177)\rremote: Counting objects: 89% (158/177)\rremote: Counting objects: 90% (160/177)\rremote: Counting objects: 91% (162/177)\rremote: Counting objects: 92% (163/177)\rremote: Counting objects: 93% (165/177)\rremote: Counting objects: 94% (167/177)\rremote: Counting objects: 95% (169/177)\rremote: Counting objects: 96% (170/177)\rremote: Counting objects: 97% (172/177)\rremote: Counting objects: 98% (174/177)\rremote: Counting objects: 99% (176/177)\rremote: Counting objects: 100% (177/177)\rremote: Counting objects: 100% (177/177), done.\r\nremote: Compressing objects: 1% (1/87)\rremote: Compressing objects: 2% (2/87)\rremote: Compressing objects: 3% (3/87)\rremote: Compressing objects: 4% (4/87)\rremote: Compressing objects: 5% (5/87)\rremote: Compressing objects: 6% (6/87)\rremote: Compressing objects: 8% (7/87)\rremote: Compressing objects: 9% (8/87)\rremote: Compressing objects: 10% (9/87)\rremote: Compressing objects: 11% (10/87)\rremote: Compressing objects: 12% (11/87)\rremote: Compressing objects: 13% (12/87)\rremote: Compressing objects: 14% (13/87)\rremote: Compressing objects: 16% (14/87)\rremote: Compressing objects: 17% (15/87)\rremote: Compressing objects: 18% (16/87)\rremote: Compressing objects: 19% (17/87)\rremote: Compressing objects: 20% (18/87)\rremote: Compressing objects: 21% (19/87)\rremote: Compressing objects: 22% (20/87)\rremote: Compressing objects: 24% (21/87)\rremote: Compressing objects: 25% (22/87)\rremote: Compressing objects: 26% (23/87)\rremote: Compressing objects: 27% (24/87)\rremote: Compressing objects: 28% (25/87)\rremote: Compressing objects: 29% (26/87)\rremote: Compressing objects: 31% (27/87)\rremote: Compressing objects: 32% (28/87)\rremote: Compressing objects: 33% (29/87)\rremote: Compressing objects: 34% (30/87)\rremote: Compressing objects: 35% (31/87)\rremote: Compressing objects: 36% (32/87)\rremote: Compressing objects: 37% (33/87)\rremote: Compressing objects: 39% (34/87)\rremote: Compressing objects: 40% (35/87)\rremote: Compressing objects: 41% (36/87)\rremote: Compressing objects: 42% (37/87)\rremote: Compressing objects: 43% (38/87)\rremote: Compressing objects: 44% (39/87)\rremote: Compressing objects: 45% (40/87)\rremote: Compressing objects: 47% (41/87)\rremote: Compressing objects: 48% (42/87)\rremote: Compressing objects: 49% (43/87)\rremote: Compressing objects: 50% (44/87)\rremote: Compressing objects: 51% (45/87)\rremote: Compressing objects: 52% (46/87)\rremote: Compressing objects: 54% (47/87)\rremote: Compressing objects: 55% (48/87)\rremote: Compressing objects: 56% (49/87)\rremote: Compressing objects: 57% (50/87)\rremote: Compressing objects: 58% (51/87)\rremote: Compressing objects: 59% (52/87)\rremote: Compressing objects: 60% (53/87)\rremote: Compressing objects: 62% (54/87)\rremote: Compressing objects: 63% (55/87)\rremote: Compressing objects: 64% (56/87)\rremote: Compressing objects: 65% (57/87)\rremote: Compressing objects: 66% (58/87)\rremote: Compressing objects: 67% (59/87)\rremote: Compressing objects: 68% (60/87)\rremote: Compressing objects: 70% (61/87)\rremote: Compressing objects: 71% (62/87)\rremote: Compressing objects: 72% (63/87)\rremote: Compressing objects: 73% (64/87)\rremote: Compressing objects: 74% (65/87)\rremote: Compressing objects: 75% (66/87)\rremote: Compressing objects: 77% (67/87)\rremote: Compressing objects: 78% (68/87)\rremote: Compressing objects: 79% (69/87)\rremote: Compressing objects: 80% (70/87)\rremote: Compressing objects: 81% (71/87)\rremote: Compressing objects: 82% (72/87)\rremote: Compressing objects: 83% (73/87)\rremote: Compressing objects: 85% (74/87)\rremote: Compressing objects: 86% (75/87)\rremote: Compressing objects: 87% (76/87)\rremote: Compressing objects: 88% (77/87)\rremote: Compressing objects: 89% (78/87)\rremote: Compressing objects: 90% (79/87)\rremote: Compressing objects: 91% (80/87)\rremote: Compressing objects: 93% (81/87)\rremote: Compressing objects: 94% (82/87)\rremote: Compressing objects: 95% (83/87)\rremote: Compressing objects: 96% (84/87)\rremote: Compressing objects: 97% (85/87)\rremote: Compressing objects: 98% (86/87)\rremote: Compressing objects: 100% (87/87)\rremote: Compressing objects: 100% (87/87), done.\r\nremote: Total 151 (delta 67), reused 136 (delta 54), pack-reused 0 (from 0)\r\n",,terminal_output +4880,5668211,"TERMINAL",0,0,"Receiving objects: 0% (1/151)\rReceiving objects: 1% (2/151)\rReceiving objects: 2% (4/151)\rReceiving objects: 3% (5/151)\rReceiving objects: 4% (7/151)\rReceiving objects: 5% (8/151)\rReceiving objects: 6% (10/151)\rReceiving objects: 7% (11/151)\rReceiving objects: 8% (13/151)\rReceiving objects: 9% (14/151)\rReceiving objects: 10% (16/151)\rReceiving objects: 11% (17/151)\rReceiving objects: 12% (19/151)\rReceiving objects: 13% (20/151)\rReceiving objects: 14% (22/151)\rReceiving objects: 15% (23/151)\rReceiving objects: 16% (25/151)\rReceiving objects: 17% (26/151)\rReceiving objects: 18% (28/151)\rReceiving objects: 19% (29/151)\rReceiving objects: 20% (31/151)\rReceiving objects: 21% (32/151)\rReceiving objects: 22% (34/151)\rReceiving objects: 23% (35/151)\rReceiving objects: 24% (37/151)\rReceiving objects: 25% (38/151)\rReceiving objects: 26% (40/151)\rReceiving objects: 27% (41/151)\rReceiving objects: 28% (43/151)\rReceiving objects: 29% (44/151)\rReceiving objects: 30% (46/151)\rReceiving objects: 31% (47/151)\rReceiving objects: 32% (49/151)\rReceiving objects: 33% (50/151)\rReceiving objects: 34% (52/151)\rReceiving objects: 35% (53/151)\rReceiving objects: 36% (55/151)\rReceiving objects: 37% (56/151)\rReceiving objects: 38% (58/151)\rReceiving objects: 39% (59/151)\rReceiving objects: 40% (61/151)\rReceiving objects: 41% (62/151)\rReceiving objects: 42% (64/151)\rReceiving objects: 43% (65/151)\rReceiving objects: 44% (67/151)\rReceiving objects: 45% (68/151)\rReceiving objects: 46% (70/151)\rReceiving objects: 47% (71/151)\rReceiving objects: 48% (73/151)\rReceiving objects: 49% (74/151)\rReceiving objects: 50% (76/151)\rReceiving objects: 51% (78/151)\rReceiving objects: 52% (79/151)\rReceiving objects: 53% (81/151)\rReceiving objects: 54% (82/151)\rReceiving objects: 55% (84/151)\rReceiving objects: 56% (85/151)\rReceiving objects: 57% (87/151)\rReceiving objects: 58% (88/151)\rReceiving objects: 59% (90/151)\rReceiving objects: 60% (91/151)\rReceiving objects: 61% (93/151)\rReceiving objects: 62% (94/151)\rReceiving objects: 63% (96/151)\rReceiving objects: 64% (97/151)\rReceiving objects: 65% (99/151)\rReceiving objects: 66% (100/151)\rReceiving objects: 67% (102/151)\rReceiving objects: 68% (103/151)\rReceiving objects: 69% (105/151)\rReceiving objects: 70% (106/151)\rReceiving objects: 71% (108/151)\rReceiving objects: 72% (109/151)\rReceiving objects: 73% (111/151)\rReceiving objects: 74% (112/151)\rReceiving objects: 75% (114/151)\rReceiving objects: 76% (115/151)\rReceiving objects: 77% (117/151)\rReceiving objects: 78% (118/151)\rReceiving objects: 79% (120/151)\rReceiving objects: 80% (121/151)\rReceiving objects: 81% (123/151)\rReceiving objects: 82% (124/151)\rReceiving objects: 83% (126/151)\rReceiving objects: 84% (127/151)\rReceiving objects: 85% (129/151)\rReceiving objects: 86% (130/151)\rReceiving objects: 87% (132/151)\rReceiving objects: 88% (133/151)\rReceiving objects: 89% (135/151)\rReceiving objects: 90% (136/151)\rReceiving objects: 91% (138/151)\rReceiving objects: 92% (139/151)\rReceiving objects: 93% (141/151)\rReceiving objects: 94% (142/151)\rReceiving objects: 95% (144/151)\rReceiving objects: 96% (145/151)\rReceiving objects: 97% (147/151)\rReceiving objects: 98% (148/151)\rReceiving objects: 99% (150/151)\rReceiving objects: 100% (151/151)\rReceiving objects: 100% (151/151), 17.13 KiB | 113.00 KiB/s, done.\r\nResolving deltas: 0% (0/67)\rResolving deltas: 1% (1/67)\rResolving deltas: 2% (2/67)\rResolving deltas: 4% (3/67)\rResolving deltas: 5% (4/67)\rResolving deltas: 7% (5/67)\rResolving deltas: 8% (6/67)\rResolving deltas: 10% (7/67)\rResolving deltas: 11% (8/67)\rResolving deltas: 13% (9/67)\r",,terminal_output +4881,5668304,"TERMINAL",0,0,"Resolving deltas: 14% (10/67)\rResolving deltas: 16% (11/67)\rResolving deltas: 17% (12/67)\rResolving deltas: 19% (13/67)\rResolving deltas: 20% (14/67)\rResolving deltas: 22% (15/67)\rResolving deltas: 23% (16/67)\rResolving deltas: 25% (17/67)\rResolving deltas: 26% (18/67)\rResolving deltas: 28% (19/67)\rResolving deltas: 29% (20/67)\rResolving deltas: 31% (21/67)\rResolving deltas: 32% (22/67)\rResolving deltas: 34% (23/67)\rResolving deltas: 35% (24/67)\rResolving deltas: 37% (25/67)\rResolving deltas: 38% (26/67)\rResolving deltas: 40% (27/67)\rResolving deltas: 41% (28/67)\rResolving deltas: 43% (29/67)\rResolving deltas: 44% (30/67)\rResolving deltas: 46% (31/67)\rResolving deltas: 47% (32/67)\rResolving deltas: 49% (33/67)\rResolving deltas: 50% (34/67)\rResolving deltas: 52% (35/67)\rResolving deltas: 53% (36/67)\rResolving deltas: 55% (37/67)\rResolving deltas: 56% (38/67)\rResolving deltas: 58% (39/67)\rResolving deltas: 59% (40/67)\rResolving deltas: 61% (41/67)\rResolving deltas: 62% (42/67)\rResolving deltas: 64% (43/67)\rResolving deltas: 65% (44/67)\rResolving deltas: 67% (45/67)\rResolving deltas: 68% (46/67)\rResolving deltas: 70% (47/67)\rResolving deltas: 71% (48/67)\rResolving deltas: 73% (49/67)\rResolving deltas: 74% (50/67)\rResolving deltas: 76% (51/67)\rResolving deltas: 77% (52/67)\rResolving deltas: 79% (53/67)\rResolving deltas: 80% (54/67)\rResolving deltas: 82% (55/67)\rResolving deltas: 83% (56/67)\rResolving deltas: 85% (57/67)\rResolving deltas: 86% (58/67)\rResolving deltas: 88% (59/67)\rResolving deltas: 89% (60/67)\rResolving deltas: 91% (61/67)\rResolving deltas: 92% (62/67)\rResolving deltas: 94% (63/67)\rResolving deltas: 95% (64/67)\rResolving deltas: 97% (65/67)\rResolving deltas: 98% (66/67)\rResolving deltas: 100% (67/67)\rResolving deltas: 100% (67/67), completed with 11 local objects.\r\n",,terminal_output +4882,5668839,"TERMINAL",0,0,"From github.com:p-doom/slurm\r\n c792f14..e7208e3 main -> origin/main\r\nUpdating c792f14..e7208e3\r\n",,terminal_output +4883,5670478,"TERMINAL",0,0,"Fast-forward\r\n",,terminal_output +4884,5670601,"TERMINAL",0,0," dev/alfred/berlin/test_exposure_bias/dynamics_full_prec copy.sbatch | 82 +++++++++++++++++++++++++++++++++++++++++++++\r\n dev/alfred/berlin/test_exposure_bias/dynamics_maskgit_full_prec.sbatch | 82 +++++++++++++++++++++++++++++++++++++++++++++\r\n dev/alfred/berlin/test_exposure_bias/dynamics_maskgit_mix_prec.sbatch | 82 +++++++++++++++++++++++++++++++++++++++++++++\r\n dev/alfred/berlin/test_exposure_bias/dynamics_nan_fix.sbatch | 80 ++++++++++++++++++++++++++++++++++++++++++++\r\n dev/franz/berlin/coinrun/dynamics_debug/debug_gt_actions.sh | 28 ++++++++++++++++\r\n dev/franz/berlin/coinrun/sample/maskgit/sample_dynamics_from_fully_trained_tokenizer.sh | 5 +--\r\n jobs/alfred/berlin/coinrun/coinrun_data/generate_data_500mio_no_distinct_seeding.sbatch | 35 ++++++++++++++++++++\r\n jobs/alfred/berlin/coinrun/dataset_investigation/generate_data_500m_without_chunks.sbatch | 40 ++++++++++++++++++++++\r\n jobs/alfred/berlin/coinrun/dataset_investigation/generate_data_test_without_chunks.sbatch | 41 +++++++++++++++++++++++\r\n jobs/alfred/berlin/coinrun/dataset_investigation/generate_gen_1.sbatch | 36 ++++++++++++++++++++\r\n jobs/alfred/berlin/coinrun/overfitting_test/500m_dataset/dynamics_w_eval_ff.sbatch | 82 +++++++++++++++++++++++++++++++++++++++++++++\r\n jobs/alfred/berlin/coinrun/overfitting_test/500m_dataset/dynamics_w_eval_ff_from_current_main.sbatch | 82 +++++++++++++++++++++++++++++++++++++++++++++\r\n jobs/alfred/berlin/coinrun/w_val/tokenizer_baseline_debug_default_jasming_old_commit.sbatch | 104 ++++++++++++++++++++++++++++++++++++++++++++++++++++++++++\r\n jobs/alfred/berlin/workshop/case_study_vanilla_genie/debug/lam.sh | 70 +++++++++++++++++++++++++++++++++++++++\r\n jobs/alfred/berlin/workshop/case_study_vanilla_genie/debug/tokenizer.sh | 72 ++++++++++++++++++++++++++++++++++++++++\r\n jobs/alfred/berlin/workshop/case_study_vanilla_genie/tokenizer.sh | 72 ++++++++++++++++++++++++++++++++++++++++\r\n jobs/alfred/berlin/workshop/case_study_vanilla_genie_250M/debug/tokenizer.sh | 73 +++++++++++++++++++++++++++++++++++++++++\r\n jobs/alfred/berlin/workshop/case_study_vanilla_genie_250M/tokenizer.sh | 72 ++++++++++++++++++++++++++++++++++++++++\r\n jobs/alfred/berlin/workshop/confirm_mix_prec_speedup/dynamics.sbatch | 84 +++++++++++++++++++++++++++++++++++++++++++++++\r\n jobs/{franz/berlin/coinrun/mila_submission => alfred/berlin/workshop/jafar_default}/coinrun_dynamics_base.sh | 0\r\n jobs/alfred/berlin/workshop/mix_prec_ablation/coinrun_dynamics_full_prec.sh | 71 +++++++++++++++++++++++++++++++++++++++\r\n jobs/alfred/berlin/workshop/mix_prec_ablation/coinrun_dynamics_mix_precision.sh | 70 +++++++++++++++++++++++++++++++++++++++\r\n jobs/alfred/berlin/workshop/mix_prec_ablation/coinrun_dynamics_mix_precision_no_flash_attention.sh | 71 +++++++++++++++++++++++++++++++++++++++\r\n jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_gt_actions.sh | 1 +\r\n jobs/franz/berlin/coinrun/mila_submission/legacy_runs_50M_dataset/coinrun_dynamics_base.sh | 70 +++++++++++++++++++++++++++++++++++++++\r\n jobs/franz/berlin/coinrun/mila_submission/{ => legacy_runs_50M_dataset}/coinrun_lam_base.sh | 0\r\n jobs/franz/berlin/coinrun/mila_submission/{ => legacy_runs_50M_dataset}/coinrun_lam_no_flash_attention.sh | 0\r\n jobs/franz/berlin/coinrun/mila_submission/{ => legacy_runs_50M_dataset}/coinrun_tokenizer_base.sh | 0\r\n .../legacy_runs_50M_dataset}/submission_debug/coinrun_dynamics_base_patch_size_16_action_prepend.sh | 1 -\r\n .../legacy_runs_50M_dataset/submission_debug/coinrun_dynamics_base_patch_size_16_action_prepend_cos.sh | 76 ++++++++++++++++++++++++++++++++++++++++++\r\n .../legacy_runs_50M_dataset}/submission_debug/coinrun_dynamics_base_patch_size_16_noise-branch.sh | 0\r\n .../legacy_runs_50M_dataset}/submission_debug/coinrun_dynamics_base_patch_size_4_action_prepend.sh | 1 -\r\n .../{ => mila_submission/legacy_runs_50M_dataset}/submission_debug/coinrun_dynamics_base_patch_size_4_main.sh | 4 +--\r\n .../legacy_runs_50M_dataset}/submission_debug/coinrun_dynamics_base_patch_size_4_noise-branch.sh | 4 +--\r\n .../mila_submission/legacy_runs_50M_dataset/submission_debug/coinrun_dynamics_gt_actions_concat_branch.sh | 77 +++++++++++++++++++++++++++++++++++++++++++\r\n .../coinrun/{ => mila_submission/legacy_runs_50M_dataset}/submission_debug/coinrun_tokenizer_patch_size_4.sh | 0\r\n 36 files changed, 1660 insertions(+), 8 deletions(-)\r\n create mode 100644 dev/alfred/berlin/test_exposure_bias/dynamics_full_prec copy.sbatch\r\n create mode 100644 dev/alfred/berlin/test_exposure_bias/dynamics_maskgit_full_prec.sbatch\r\n create mode 100644 dev/alfred/berlin/test_exposure_bias/dynamics_maskgit_mix_prec.sbatch\r\n create mode 100644 dev/alfred/berlin/test_exposure_bias/dynamics_nan_fix.sbatch\r\n create mode 100644 dev/franz/berlin/coinrun/dynamics_debug/debug_gt_actions.sh\r\n create mode 100644 jobs/alfred/berlin/coinrun/coinrun_data/generate_data_500mio_no_distinct_seeding.sbatch\r\n create mode 100644 jobs/alfred/berlin/coinrun/dataset_investigation/generate_data_500m_without_chunks.sbatch\r\n create mode 100644 jobs/alfred/berlin/coinrun/dataset_investigation/generate_data_test_without_chunks.sbatch\r\n create mode 100644 jobs/alfred/berlin/coinrun/dataset_investigation/generate_gen_1.sbatch\r\n create mode 100644 jobs/alfred/berlin/coinrun/overfitting_test/500m_dataset/dynamics_w_eval_ff.sbatch\r\n create mode 100644 jobs/alfred/berlin/coinrun/overfitting_test/500m_dataset/dynamics_w_eval_ff_from_current_main.sbatch\r\n create mode 100644 jobs/alfred/berlin/coinrun/w_val/tokenizer_baseline_debug_default_jasming_old_commit.sbatch\r\n create mode 100644 jobs/alfred/berlin/workshop/case_study_vanilla_genie/debug/lam.sh\r\n create mode 100644 jobs/alfred/berlin/workshop/case_study_vanilla_genie/debug/tokenizer.sh\r\n create mode 100644 jobs/alfred/berlin/workshop/case_study_vanilla_genie/tokenizer.sh\r\n create mode 100644 jobs/alfred/berlin/workshop/case_study_vanilla_genie_250M/debug/tokenizer.sh\r\n create mode 100644 jobs/alfred/berlin/workshop/case_study_vanilla_genie_250M/tokenizer.sh\r\n create mode 100644 jobs/alfred/berlin/workshop/confirm_mix_prec_speedup/dynamics.sbatch\r\n rename jobs/{franz/berlin/coinrun/mila_submission => alfred/berlin/workshop/jafar_default}/coinrun_dynamics_base.sh (100%)\r\n create mode 100644 jobs/alfred/berlin/workshop/mix_prec_ablation/coinrun_dynamics_full_prec.sh\r\n create mode 100644 jobs/alfred/berlin/workshop/mix_prec_ablation/coinrun_dynamics_mix_precision.sh\r\n create mode 100644 jobs/alfred/berlin/workshop/mix_prec_ablation/coinrun_dynamics_mix_precision_no_flash_attention.sh\r\n create mode 100644 jobs/franz/berlin/coinrun/mila_submission/legacy_runs_50M_dataset/coinrun_dynamics_base.sh\r\n rename jobs/franz/berlin/coinrun/mila_submission/{ => legacy_runs_50M_dataset}/coinrun_lam_base.sh (100%)\r\n rename jobs/franz/berlin/coinrun/mila_submission/{ => legacy_runs_50M_dataset}/coinrun_lam_no_flash_attention.sh (100%)\r\n rename jobs/franz/berlin/coinrun/mila_submission/{ => legacy_runs_50M_dataset}/coinrun_tokenizer_base.sh (100%)\r\n rename jobs/franz/berlin/coinrun/{ => mila_submission/legacy_runs_50M_dataset}/submission_debug/coinrun_dynamics_base_patch_size_16_action_prepend.sh (97%)\r\n create mode 100644 jobs/franz/berlin/coinrun/mila_submission/legacy_runs_50M_dataset/submission_debug/coinrun_dynamics_base_patch_size_16_action_prepend_cos.sh\r\n rename jobs/franz/berlin/coinrun/{ => mila_submission/legacy_runs_50M_dataset}/submission_debug/coinrun_dynamics_base_patch_size_16_noise-branch.sh (100%)\r\n rename jobs/franz/berlin/coinrun/{ => mila_submission/legacy_runs_50M_dataset}/submission_debug/coinrun_dynamics_base_patch_size_4_action_prepend.sh (97%)\r\n rename jobs/franz/berlin/coinrun/{ => mila_submission/legacy_runs_50M_dataset}/submission_debug/coinrun_dynamics_base_patch_size_4_main.sh (97%)\r\n rename jobs/franz/berlin/coinrun/{ => mila_submission/legacy_runs_50M_dataset}/submission_debug/coinrun_dynamics_base_patch_size_4_noise-branch.sh (96%)\r\n create mode 100644 jobs/franz/berlin/coinrun/mila_submission/legacy_runs_50M_dataset/submission_debug/coinrun_dynamics_gt_actions_concat_branch.sh\r\n rename jobs/franz/berlin/coinrun/{ => mila_submission/legacy_runs_50M_dataset}/submission_debug/coinrun_tokenizer_patch_size_4.sh (100%)\r\n]0;tum_cte0515@hkn1991:~/Projects/jasmine/slurm",,terminal_output +4885,5685384,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",0,0,"",shellscript,tab +4886,5685875,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_grain_ablation.sh",0,0,"",shellscript,tab +4887,5686577,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",0,0,"",shellscript,tab +4888,5707206,"slurm/jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_gt_actions.sh",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=1\n#SBATCH --time=24:00:00\n#SBATCH --cpus-per-task=8\n#SBATCH --gres=gpu:1\n#SBATCH --output=/fast/project/HFMI_SynergyUnit/jafar_ws/logs/franz/coinrun/dynamics/%x_%j.log\n#SBATCH --error=/fast/project/HFMI_SynergyUnit/jafar_ws/logs/franz/coinrun/dynamics/%x_%j.log\n#SBATCH --job-name=dynamics_coinrun_mila_submission_gt_actions\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nsource .venv/bin/activate\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\ntags=""coinrun dynamics 500m_dataset mila_submission ablation gt-actions""\n\narray_records_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/data/coinrun/array_records_500m_seed_w_increment""\ntokenizer_ckpt_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/tokenizer/tokenizer_coinrun_mila_submission_29736/""\nCHECKPOINT_DIR=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/dynamics/${job_name}/${slurm_job_id}""\nmkdir -p $CHECKPOINT_DIR\n\nenv | grep SLURM\n\n\nsrun python jasmine/train_dynamics.py \\n --use_gt_actions \\n --num_actions=15 \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --name=""${job_name}_${slurm_job_id}"" \\n --tags ${tags} \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=""${tokenizer_ckpt_dir}"" \\n --val_data_dir=""${array_records_dir}/val"" \\n --data_dir=""${array_records_dir}/train"" &\n\nchild_pid=$!\n\nwait $child_pid\n\n",shellscript,tab +4889,5713006,"slurm/jobs/franz/berlin/coinrun/mila_submission/arch_ablations/coinrun_dynamics_gt_actions.sh",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=1\n#SBATCH --time=24:00:00\n#SBATCH --cpus-per-task=8\n#SBATCH --gres=gpu:1\n#SBATCH --output=/fast/project/HFMI_SynergyUnit/jafar_ws/logs/franz/coinrun/dynamics/%x_%j.log\n#SBATCH --error=/fast/project/HFMI_SynergyUnit/jafar_ws/logs/franz/coinrun/dynamics/%x_%j.log\n#SBATCH --job-name=dynamics_coinrun_mila_submission_gt_actions\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nsource .venv/bin/activate\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\ntags=""coinrun dynamics 500m_dataset mila_submission ablation gt-actions""\n\narray_records_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/data/coinrun/array_records_500m_seed_w_increment""\ntokenizer_ckpt_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/tokenizer/tokenizer_coinrun_mila_submission_29736/""\nCHECKPOINT_DIR=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/dynamics/${job_name}/${slurm_job_id}""\nmkdir -p $CHECKPOINT_DIR\n\nenv | grep SLURM\n\n\nsrun python jasmine/train_dynamics.py \\n --use_gt_actions \\n --num_actions=15 \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --name=""${job_name}_${slurm_job_id}"" \\n --tags ${tags} \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=""${tokenizer_ckpt_dir}"" \\n --val_data_dir=""${array_records_dir}/val"" \\n --data_dir=""${array_records_dir}/train"" &\n\nchild_pid=$!\n\nwait $child_pid\n\n",shellscript,tab +4890,5740904,"slurm/jobs/franz/berlin/coinrun/mila_submission/legacy_runs_50M_dataset/coinrun_dynamics_base.sh",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=1\n#SBATCH --time=24:00:00\n#SBATCH --cpus-per-task=8\n#SBATCH --gres=gpu:1\n#SBATCH --output=/fast/project/HFMI_SynergyUnit/jafar_ws/logs/franz/coinrun/dynamics/%x_%j.log\n#SBATCH --error=/fast/project/HFMI_SynergyUnit/jafar_ws/logs/franz/coinrun/dynamics/%x_%j.log\n#SBATCH --job-name=dynamics_coinrun_mila_submission\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nsource .venv/bin/activate\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\ntags=""coinrun dynamics 500m_dataset mila_submission""\n\narray_records_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/data/coinrun/array_records_500m_seed_w_increment""\ntokenizer_ckpt_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/tokenizer/tokenizer_coinrun_mila_submission_29736/""\nCHECKPOINT_DIR=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/dynamics/${job_name}/${slurm_job_id}""\nmkdir -p $CHECKPOINT_DIR\n\nenv | grep SLURM\n\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --name=""${job_name}_${slurm_job_id}"" \\n --tags ${tags} \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=""${tokenizer_ckpt_dir}"" \\n --val_data_dir=""${array_records_dir}/val"" \\n --data_dir=""${array_records_dir}/train"" &\n\nchild_pid=$!\n\nwait $child_pid\n\n",shellscript,tab +4891,5754493,"TERMINAL",0,0,"bash",,terminal_focus +4892,5761960,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",0,0,"",shellscript,tab +4893,5769460,"TERMINAL",0,0,"diff slurm/jobs/franz/berlin/coinrun/mila_submission/legacy_runs_50M_dataset/coinrun_dynamics_base.sh slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",,terminal_command +4894,5874198,"slurm/jobs/franz/berlin/coinrun/mila_submission/legacy_runs_50M_dataset/coinrun_dynamics_base.sh",0,0,"",shellscript,tab +4895,5879813,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_dynamics_base.sh",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=1\n#SBATCH --time=24:00:00\n#SBATCH --cpus-per-task=8\n#SBATCH --gres=gpu:1\n#SBATCH --output=/fast/project/HFMI_SynergyUnit/jafar_ws/logs/franz/coinrun/dynamics/%x_%j.log\n#SBATCH --error=/fast/project/HFMI_SynergyUnit/jafar_ws/logs/franz/coinrun/dynamics/%x_%j.log\n#SBATCH --job-name=dynamics_coinrun_mila_submission\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nsource .venv/bin/activate\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\ntags=""coinrun dynamics 500m_dataset mila_submission""\n\narray_records_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/data/coinrun/array_records_500m_seed_w_increment""\ntokenizer_ckpt_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/tokenizer/tokenizer_coinrun_mila_submission_29736/""\nCHECKPOINT_DIR=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/dynamics/${job_name}/${slurm_job_id}""\nmkdir -p $CHECKPOINT_DIR\n\nenv | grep SLURM\n\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --name=""${job_name}_${slurm_job_id}"" \\n --tags ${tags} \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=""${tokenizer_ckpt_dir}"" \\n --val_data_dir=""${array_records_dir}/val"" \\n --data_dir=""${array_records_dir}/train"" &\n\nchild_pid=$!\n\nwait $child_pid\n\n",shellscript,tab +4896,5885474,"slurm/jobs/franz/berlin/coinrun/mila_submission/legacy_runs_50M_dataset/coinrun_tokenizer_base.sh",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=1\n#SBATCH --gres=gpu:1\n#SBATCH --time=24:00:00\n#SBATCH --cpus-per-task=8\n#SBATCH --output=/fast/project/HFMI_SynergyUnit/jafar_ws/logs/franz/coinrun/tokenizer/%x_%j.log\n#SBATCH --error=/fast/project/HFMI_SynergyUnit/jafar_ws/logs/franz/coinrun/tokenizer/%x_%j.log\n#SBATCH --job-name=tokenizer_coinrun_mila_submission\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n# Log the sbatch script\ncat $0\n\nsource .venv/bin/activate\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\ntags=""coinrun tokenizer 500m_dataset mila_submission""\n\narray_records_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/data/coinrun/array_records_500m_seed_w_increment""\nCHECKPOINT_DIR=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/tokenizer/${job_name}_${slurm_job_id}""\nmkdir -p $CHECKPOINT_DIR\n\nenv | grep SLURM\n\nsrun python jasmine/train_tokenizer.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --name=""${job_name}_${slurm_job_id}"" \\n --tags $tags \\n --entity instant-uv \\n --project jafar \\n --data_dir=""${array_records_dir}/train"" \\n --val_data_dir=""${array_records_dir}/val"" &\n\nchild_pid=$!\n\nwait $child_pid\n",shellscript,tab +4897,5887205,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_dynamics_base.sh",0,0,"",shellscript,tab +4898,5889214,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_tokenizer_base.sh",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=1\n#SBATCH --gres=gpu:1\n#SBATCH --time=24:00:00\n#SBATCH --cpus-per-task=8\n#SBATCH --output=/fast/project/HFMI_SynergyUnit/jafar_ws/logs/franz/coinrun/tokenizer/%x_%j.log\n#SBATCH --error=/fast/project/HFMI_SynergyUnit/jafar_ws/logs/franz/coinrun/tokenizer/%x_%j.log\n#SBATCH --job-name=tokenizer_coinrun_mila_submission\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n# Log the sbatch script\ncat $0\n\nsource .venv/bin/activate\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\ntags=""coinrun tokenizer 500m_dataset mila_submission""\n\narray_records_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/data/coinrun/array_records_500m_seed_w_increment""\nCHECKPOINT_DIR=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/tokenizer/${job_name}_${slurm_job_id}""\nmkdir -p $CHECKPOINT_DIR\n\nenv | grep SLURM\n\nsrun python jasmine/train_tokenizer.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --name=""${job_name}_${slurm_job_id}"" \\n --tags $tags \\n --entity instant-uv \\n --project jafar \\n --data_dir=""${array_records_dir}/train"" \\n --val_data_dir=""${array_records_dir}/val"" &\n\nchild_pid=$!\n\nwait $child_pid\n",shellscript,tab +4899,5891150,"slurm/jobs/franz/berlin/coinrun/mila_submission/legacy_runs_50M_dataset/coinrun_lam_base.sh",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=1\n#SBATCH --gres=gpu:1\n#SBATCH --time=24:00:00\n#SBATCH --cpus-per-task=8\n#SBATCH --output=/fast/project/HFMI_SynergyUnit/jafar_ws/logs/franz/coinrun/lam/%x_%j.log\n#SBATCH --error=/fast/project/HFMI_SynergyUnit/jafar_ws/logs/franz/coinrun/lam/%x_%j.log\n#SBATCH --job-name=lam_coinrun_mila_submission\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n# Log the sbatch script\ncat $0\n\nsource .venv/bin/activate\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\ntags=""coinrun lam 500m_dataset mila_submission""\n\narray_records_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/data/coinrun/array_records_500m_seed_w_increment""\nCHECKPOINT_DIR=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/lam/${job_name}_${slurm_job_id}""\nmkdir -p $CHECKPOINT_DIR\n\nenv | grep SLURM\n\nsrun python jasmine/train_lam.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --name=""${job_name}_${slurm_job_id}"" \\n --tags $tags \\n --entity instant-uv \\n --project jafar \\n --data_dir=""${array_records_dir}/train"" \\n --val_data_dir=""${array_records_dir}/val"" &\n\nchild_pid=$!\n\nwait $child_pid\n",shellscript,tab +4900,5894040,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_lam_base.sh",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=1\n#SBATCH --gres=gpu:1\n#SBATCH --time=24:00:00\n#SBATCH --cpus-per-task=8\n#SBATCH --output=/fast/project/HFMI_SynergyUnit/jafar_ws/logs/franz/coinrun/lam/%x_%j.log\n#SBATCH --error=/fast/project/HFMI_SynergyUnit/jafar_ws/logs/franz/coinrun/lam/%x_%j.log\n#SBATCH --job-name=lam_coinrun_mila_submission\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n# Log the sbatch script\ncat $0\n\nsource .venv/bin/activate\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\ntags=""coinrun lam 500m_dataset mila_submission""\n\narray_records_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/data/coinrun/array_records_500m_seed_w_increment""\nCHECKPOINT_DIR=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/lam/${job_name}_${slurm_job_id}""\nmkdir -p $CHECKPOINT_DIR\n\nenv | grep SLURM\n\nsrun python jasmine/train_lam.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --name=""${job_name}_${slurm_job_id}"" \\n --tags $tags \\n --entity instant-uv \\n --project jafar \\n --data_dir=""${array_records_dir}/train"" \\n --val_data_dir=""${array_records_dir}/val"" &\n\nchild_pid=$!\n\nwait $child_pid\n",shellscript,tab +4901,5925876,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_dynamics_base.sh",0,0,"",shellscript,tab +4902,5933718,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_dynamics_base.sh",1232,0,"",shellscript,selection_mouse +4903,5933790,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_dynamics_base.sh",1232,1,"/",shellscript,selection_mouse +4904,5933878,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_dynamics_base.sh",1232,2,"/f",shellscript,selection_mouse +4905,5933879,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_dynamics_base.sh",1232,5,"/fast",shellscript,selection_mouse +4906,5933879,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_dynamics_base.sh",1232,7,"/fast/p",shellscript,selection_mouse +4907,5933879,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_dynamics_base.sh",1232,10,"/fast/proj",shellscript,selection_mouse +4908,5933880,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_dynamics_base.sh",1232,12,"/fast/projec",shellscript,selection_mouse +4909,5933903,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_dynamics_base.sh",1232,14,"/fast/project/",shellscript,selection_mouse +4910,5933933,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_dynamics_base.sh",1232,16,"/fast/project/HF",shellscript,selection_mouse +4911,5933955,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_dynamics_base.sh",1232,19,"/fast/project/HFMI_",shellscript,selection_mouse +4912,5933974,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_dynamics_base.sh",1232,22,"/fast/project/HFMI_Syn",shellscript,selection_mouse +4913,5934048,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_dynamics_base.sh",1232,28,"/fast/project/HFMI_SynergyUn",shellscript,selection_mouse +4914,5934101,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_dynamics_base.sh",1232,40,"/fast/project/HFMI_SynergyUnit/jafar_ws/",shellscript,selection_mouse +4915,5934128,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_dynamics_base.sh",1232,57,"/fast/project/HFMI_SynergyUnit/jafar_ws/data/coinrun/arra",shellscript,selection_mouse +4916,5934154,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_dynamics_base.sh",1232,71,"/fast/project/HFMI_SynergyUnit/jafar_ws/data/coinrun/array_records_500m",shellscript,selection_mouse +4917,5934245,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_dynamics_base.sh",1232,185,"/fast/project/HFMI_SynergyUnit/jafar_ws/data/coinrun/array_records_500m_seed_w_increment""\ntokenizer_ckpt_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/tokenizer/token",shellscript,selection_mouse +4918,5934246,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_dynamics_base.sh",1232,193,"/fast/project/HFMI_SynergyUnit/jafar_ws/data/coinrun/array_records_500m_seed_w_increment""\ntokenizer_ckpt_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/tokenizer/tokenizer_coi",shellscript,selection_mouse +4919,5934279,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_dynamics_base.sh",1232,196,"/fast/project/HFMI_SynergyUnit/jafar_ws/data/coinrun/array_records_500m_seed_w_increment""\ntokenizer_ckpt_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/tokenizer/tokenizer_coinru",shellscript,selection_mouse +4920,5934280,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_dynamics_base.sh",1232,199,"/fast/project/HFMI_SynergyUnit/jafar_ws/data/coinrun/array_records_500m_seed_w_increment""\ntokenizer_ckpt_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/tokenizer/tokenizer_coinrun_m",shellscript,selection_mouse +4921,5934312,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_dynamics_base.sh",1232,202,"/fast/project/HFMI_SynergyUnit/jafar_ws/data/coinrun/array_records_500m_seed_w_increment""\ntokenizer_ckpt_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/tokenizer/tokenizer_coinrun_mila",shellscript,selection_mouse +4922,5934313,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_dynamics_base.sh",1232,203,"/fast/project/HFMI_SynergyUnit/jafar_ws/data/coinrun/array_records_500m_seed_w_increment""\ntokenizer_ckpt_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/tokenizer/tokenizer_coinrun_mila_",shellscript,selection_mouse +4923,5934354,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_dynamics_base.sh",1232,204,"/fast/project/HFMI_SynergyUnit/jafar_ws/data/coinrun/array_records_500m_seed_w_increment""\ntokenizer_ckpt_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/tokenizer/tokenizer_coinrun_mila_s",shellscript,selection_mouse +4924,5934457,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_dynamics_base.sh",1232,203,"/fast/project/HFMI_SynergyUnit/jafar_ws/data/coinrun/array_records_500m_seed_w_increment""\ntokenizer_ckpt_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/tokenizer/tokenizer_coinrun_mila_",shellscript,selection_mouse +4925,5934502,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_dynamics_base.sh",1232,202,"/fast/project/HFMI_SynergyUnit/jafar_ws/data/coinrun/array_records_500m_seed_w_increment""\ntokenizer_ckpt_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/tokenizer/tokenizer_coinrun_mila",shellscript,selection_mouse +4926,5934522,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_dynamics_base.sh",1232,201,"/fast/project/HFMI_SynergyUnit/jafar_ws/data/coinrun/array_records_500m_seed_w_increment""\ntokenizer_ckpt_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/tokenizer/tokenizer_coinrun_mil",shellscript,selection_mouse +4927,5934611,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_dynamics_base.sh",1232,89,"/fast/project/HFMI_SynergyUnit/jafar_ws/data/coinrun/array_records_500m_seed_w_increment""",shellscript,selection_mouse +4928,5934859,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_dynamics_base.sh",1232,88,"/fast/project/HFMI_SynergyUnit/jafar_ws/data/coinrun/array_records_500m_seed_w_increment",shellscript,selection_mouse +4929,5935745,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_dynamics_base.sh",1232,88,"",shellscript,content +4930,5936249,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_dynamics_base.sh",1232,0,"/fast/project/HFMI_SynergyUnit/jafar_ws/data/coinrun/array_records_250M_npy_arr_rec/array_record/",shellscript,content +4931,5938085,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_lam_base.sh",0,0,"",shellscript,tab +4932,5940167,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_lam_base.sh",1210,0,"",shellscript,selection_mouse +4933,5940255,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_lam_base.sh",1210,2,"/f",shellscript,selection_mouse +4934,5940309,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_lam_base.sh",1210,4,"/fas",shellscript,selection_mouse +4935,5940309,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_lam_base.sh",1210,7,"/fast/p",shellscript,selection_mouse +4936,5940310,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_lam_base.sh",1210,9,"/fast/pro",shellscript,selection_mouse +4937,5940378,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_lam_base.sh",1210,12,"/fast/projec",shellscript,selection_mouse +4938,5940379,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_lam_base.sh",1210,15,"/fast/project/H",shellscript,selection_mouse +4939,5940410,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_lam_base.sh",1210,17,"/fast/project/HFM",shellscript,selection_mouse +4940,5940411,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_lam_base.sh",1210,19,"/fast/project/HFMI_",shellscript,selection_mouse +4941,5940443,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_lam_base.sh",1210,21,"/fast/project/HFMI_Sy",shellscript,selection_mouse +4942,5940444,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_lam_base.sh",1210,22,"/fast/project/HFMI_Syn",shellscript,selection_mouse +4943,5940480,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_lam_base.sh",1210,25,"/fast/project/HFMI_Synerg",shellscript,selection_mouse +4944,5940480,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_lam_base.sh",1210,29,"/fast/project/HFMI_SynergyUni",shellscript,selection_mouse +4945,5940514,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_lam_base.sh",1210,33,"/fast/project/HFMI_SynergyUnit/ja",shellscript,selection_mouse +4946,5940550,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_lam_base.sh",1210,146,"/fast/project/HFMI_SynergyUnit/jafar_ws/data/coinrun/array_records_500m_seed_w_increment""\nCHECKPOINT_DIR=""/fast/project/HFMI_SynergyUnit/jafar_ws/",shellscript,selection_mouse +4947,5940551,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_lam_base.sh",1210,150,"/fast/project/HFMI_SynergyUnit/jafar_ws/data/coinrun/array_records_500m_seed_w_increment""\nCHECKPOINT_DIR=""/fast/project/HFMI_SynergyUnit/jafar_ws/chec",shellscript,selection_mouse +4948,5940585,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_lam_base.sh",1210,154,"/fast/project/HFMI_SynergyUnit/jafar_ws/data/coinrun/array_records_500m_seed_w_increment""\nCHECKPOINT_DIR=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoi",shellscript,selection_mouse +4949,5940586,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_lam_base.sh",1210,159,"/fast/project/HFMI_SynergyUnit/jafar_ws/data/coinrun/array_records_500m_seed_w_increment""\nCHECKPOINT_DIR=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/c",shellscript,selection_mouse +4950,5940620,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_lam_base.sh",1210,162,"/fast/project/HFMI_SynergyUnit/jafar_ws/data/coinrun/array_records_500m_seed_w_increment""\nCHECKPOINT_DIR=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coin",shellscript,selection_mouse +4951,5940651,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_lam_base.sh",1210,56,"/fast/project/HFMI_SynergyUnit/jafar_ws/data/coinrun/arr",shellscript,selection_mouse +4952,5940682,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_lam_base.sh",1210,60,"/fast/project/HFMI_SynergyUnit/jafar_ws/data/coinrun/array_r",shellscript,selection_mouse +4953,5940683,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_lam_base.sh",1210,63,"/fast/project/HFMI_SynergyUnit/jafar_ws/data/coinrun/array_reco",shellscript,selection_mouse +4954,5940710,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_lam_base.sh",1210,67,"/fast/project/HFMI_SynergyUnit/jafar_ws/data/coinrun/array_records_",shellscript,selection_mouse +4955,5940751,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_lam_base.sh",1210,70,"/fast/project/HFMI_SynergyUnit/jafar_ws/data/coinrun/array_records_500",shellscript,selection_mouse +4956,5940751,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_lam_base.sh",1190,20,"\narray_records_dir=""",shellscript,selection_mouse +4957,5941113,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_lam_base.sh",1210,87,"/fast/project/HFMI_SynergyUnit/jafar_ws/data/coinrun/array_records_500m_seed_w_incremen",shellscript,selection_mouse +4958,5941205,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_lam_base.sh",1210,88,"/fast/project/HFMI_SynergyUnit/jafar_ws/data/coinrun/array_records_500m_seed_w_increment",shellscript,selection_mouse +4959,5941968,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_lam_base.sh",1210,88,"",shellscript,content +4960,5942368,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_lam_base.sh",1210,0,"/fast/project/HFMI_SynergyUnit/jafar_ws/data/coinrun/array_records_250M_npy_arr_rec/array_record/",shellscript,content +4961,5944276,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_tokenizer_base.sh",0,0,"",shellscript,tab +4962,5946396,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_tokenizer_base.sh",1234,0,"",shellscript,selection_mouse +4963,5946529,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_tokenizer_base.sh",1234,2,"/f",shellscript,selection_mouse +4964,5946566,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_tokenizer_base.sh",1234,3,"/fa",shellscript,selection_mouse +4965,5946567,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_tokenizer_base.sh",1234,5,"/fast",shellscript,selection_mouse +4966,5946709,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_tokenizer_base.sh",1234,8,"/fast/pr",shellscript,selection_mouse +4967,5946710,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_tokenizer_base.sh",1234,11,"/fast/proje",shellscript,selection_mouse +4968,5946710,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_tokenizer_base.sh",1234,13,"/fast/project",shellscript,selection_mouse +4969,5946710,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_tokenizer_base.sh",1234,18,"/fast/project/HFMI",shellscript,selection_mouse +4970,5946711,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_tokenizer_base.sh",1234,22,"/fast/project/HFMI_Syn",shellscript,selection_mouse +4971,5946711,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_tokenizer_base.sh",1234,23,"/fast/project/HFMI_Syne",shellscript,selection_mouse +4972,5946711,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_tokenizer_base.sh",1234,27,"/fast/project/HFMI_SynergyU",shellscript,selection_mouse +4973,5946711,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_tokenizer_base.sh",1234,28,"/fast/project/HFMI_SynergyUn",shellscript,selection_mouse +4974,5946712,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_tokenizer_base.sh",1234,31,"/fast/project/HFMI_SynergyUnit/",shellscript,selection_mouse +4975,5946793,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_tokenizer_base.sh",1214,20,"\narray_records_dir=""",shellscript,selection_mouse +4976,5946890,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_tokenizer_base.sh",1213,21,"\n\narray_records_dir=""",shellscript,selection_mouse +4977,5947088,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_tokenizer_base.sh",1214,20,"\narray_records_dir=""",shellscript,selection_mouse +4978,5947207,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_tokenizer_base.sh",1234,82,"/fast/project/HFMI_SynergyUnit/jafar_ws/data/coinrun/array_records_500m_seed_w_inc",shellscript,selection_mouse +4979,5947253,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_tokenizer_base.sh",1234,83,"/fast/project/HFMI_SynergyUnit/jafar_ws/data/coinrun/array_records_500m_seed_w_incr",shellscript,selection_mouse +4980,5947341,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_tokenizer_base.sh",1234,84,"/fast/project/HFMI_SynergyUnit/jafar_ws/data/coinrun/array_records_500m_seed_w_incre",shellscript,selection_mouse +4981,5947341,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_tokenizer_base.sh",1234,85,"/fast/project/HFMI_SynergyUnit/jafar_ws/data/coinrun/array_records_500m_seed_w_increm",shellscript,selection_mouse +4982,5947355,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_tokenizer_base.sh",1234,86,"/fast/project/HFMI_SynergyUnit/jafar_ws/data/coinrun/array_records_500m_seed_w_increme",shellscript,selection_mouse +4983,5947437,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_tokenizer_base.sh",1234,87,"/fast/project/HFMI_SynergyUnit/jafar_ws/data/coinrun/array_records_500m_seed_w_incremen",shellscript,selection_mouse +4984,5947502,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_tokenizer_base.sh",1234,88,"/fast/project/HFMI_SynergyUnit/jafar_ws/data/coinrun/array_records_500m_seed_w_increment",shellscript,selection_mouse +4985,5947536,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_tokenizer_base.sh",1234,89,"/fast/project/HFMI_SynergyUnit/jafar_ws/data/coinrun/array_records_500m_seed_w_increment""",shellscript,selection_mouse +4986,5948632,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_tokenizer_base.sh",1234,89,"",shellscript,content +4987,5948685,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_tokenizer_base.sh",1233,0,"",shellscript,selection_command +4988,5949038,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_tokenizer_base.sh",1233,0,"/fast/project/HFMI_SynergyUnit/jafar_ws/data/coinrun/array_records_250M_npy_arr_rec/array_record/",shellscript,content +4989,5949359,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_tokenizer_base.sh",1330,0,"""",shellscript,content +4990,5949360,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_tokenizer_base.sh",1331,0,"",shellscript,selection_keyboard +4991,5951890,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_tokenizer_base.sh",1331,1,"",shellscript,content +4992,5952230,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_tokenizer_base.sh",1330,1,"",shellscript,content +4993,5954543,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_tokenizer_base.sh",1232,0,"",shellscript,selection_mouse +4994,5954943,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_tokenizer_base.sh",1233,0,"",shellscript,selection_mouse +4995,5956222,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_lam_base.sh",0,0,"",shellscript,tab +4996,5957993,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_tokenizer_base.sh",0,0,"",shellscript,tab +4997,5958780,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_tokenizer_base.sh",1234,0,"",shellscript,selection_mouse +4998,5959814,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_tokenizer_base.sh",1233,0,"",shellscript,selection_command +4999,5960461,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_tokenizer_base.sh",1233,0,"""",shellscript,content +5000,5960463,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_tokenizer_base.sh",1234,0,"",shellscript,selection_keyboard +5001,5960982,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_tokenizer_base.sh",1233,0,"",shellscript,selection_command +5002,5961431,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_tokenizer_base.sh",1331,0,"",shellscript,selection_command +5003,5961816,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_tokenizer_base.sh",1331,0,"""",shellscript,content +5004,5961817,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_tokenizer_base.sh",1332,0,"",shellscript,selection_keyboard +5005,5962246,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_tokenizer_base.sh",1331,0,"",shellscript,selection_command +5006,5968723,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_tokenizer_base.sh",1187,0,"",shellscript,selection_mouse +5007,5969905,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_tokenizer_base.sh",1186,1,"",shellscript,content +5008,5970002,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_tokenizer_base.sh",1185,1,"",shellscript,content +5009,5970158,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_tokenizer_base.sh",1184,1,"",shellscript,content +5010,5970702,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_tokenizer_base.sh",1184,0,"2",shellscript,content +5011,5970703,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_tokenizer_base.sh",1185,0,"",shellscript,selection_keyboard +5012,5971039,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_tokenizer_base.sh",1185,0,"5",shellscript,content +5013,5971041,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_tokenizer_base.sh",1186,0,"",shellscript,selection_keyboard +5014,5971120,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_tokenizer_base.sh",1186,0,"0",shellscript,content +5015,5971121,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_tokenizer_base.sh",1187,0,"",shellscript,selection_keyboard +5016,5972600,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_lam_base.sh",0,0,"",shellscript,tab +5017,5973896,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_lam_base.sh",1163,0,"",shellscript,selection_mouse +5018,5974997,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_lam_base.sh",1160,3,"",shellscript,content +5019,5975644,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_lam_base.sh",1160,0,"2",shellscript,content +5020,5975645,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_lam_base.sh",1161,0,"",shellscript,selection_keyboard +5021,5975964,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_lam_base.sh",1161,0,"5",shellscript,content +5022,5975965,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_lam_base.sh",1162,0,"",shellscript,selection_keyboard +5023,5975990,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_lam_base.sh",1162,0,"0",shellscript,content +5024,5975991,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_lam_base.sh",1163,0,"",shellscript,selection_keyboard +5025,5978568,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_dynamics_base.sh",0,0,"",shellscript,tab +5026,5979596,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_dynamics_base.sh",1185,0,"",shellscript,selection_mouse +5027,5980493,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_dynamics_base.sh",1182,3,"",shellscript,content +5028,5980759,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_dynamics_base.sh",1182,0,"2",shellscript,content +5029,5980760,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_dynamics_base.sh",1183,0,"",shellscript,selection_keyboard +5030,5981042,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_dynamics_base.sh",1183,0,"5",shellscript,content +5031,5981043,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_dynamics_base.sh",1184,0,"",shellscript,selection_keyboard +5032,5981147,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_dynamics_base.sh",1184,0,"0",shellscript,content +5033,5981148,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_dynamics_base.sh",1185,0,"",shellscript,selection_keyboard +5034,5990886,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_lam_base.sh",0,0,"",shellscript,tab +5035,5992095,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_tokenizer_base.sh",0,0,"",shellscript,tab +5036,5992893,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_lam_base.sh",0,0,"",shellscript,tab +5037,5994855,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_dynamics_base.sh",0,0,"",shellscript,tab +5038,5995594,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_lam_base.sh",0,0,"",shellscript,tab +5039,5995952,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_tokenizer_base.sh",0,0,"",shellscript,tab +5040,6008556,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_tokenizer_base.sh",1532,0,"",shellscript,selection_mouse +5041,6009478,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_tokenizer_base.sh",1532,0,"\n",shellscript,content +5042,6010517,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_tokenizer_base.sh",1533,0," ",shellscript,content +5043,6010639,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_tokenizer_base.sh",1537,0,"-",shellscript,content +5044,6010640,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_tokenizer_base.sh",1538,0,"",shellscript,selection_keyboard +5045,6010795,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_tokenizer_base.sh",1538,0,"-",shellscript,content +5046,6010796,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_tokenizer_base.sh",1539,0,"",shellscript,selection_keyboard +5047,6014439,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_tokenizer_base.sh",1539,0,"p",shellscript,content +5048,6014441,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_tokenizer_base.sh",1540,0,"",shellscript,selection_keyboard +5049,6014518,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_tokenizer_base.sh",1540,0,"a",shellscript,content +5050,6014519,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_tokenizer_base.sh",1541,0,"",shellscript,selection_keyboard +5051,6014715,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_tokenizer_base.sh",1541,0,"t",shellscript,content +5052,6014716,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_tokenizer_base.sh",1542,0,"",shellscript,selection_keyboard +5053,6014942,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_tokenizer_base.sh",1542,0,"c",shellscript,content +5054,6014943,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_tokenizer_base.sh",1543,0,"",shellscript,selection_keyboard +5055,6014997,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_tokenizer_base.sh",1543,0,"h",shellscript,content +5056,6014998,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_tokenizer_base.sh",1544,0,"",shellscript,selection_keyboard +5057,6015319,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_tokenizer_base.sh",1544,0,"_",shellscript,content +5058,6015320,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_tokenizer_base.sh",1545,0,"",shellscript,selection_keyboard +5059,6015532,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_tokenizer_base.sh",1545,0,"s",shellscript,content +5060,6015533,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_tokenizer_base.sh",1546,0,"",shellscript,selection_keyboard +5061,6015726,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_tokenizer_base.sh",1546,0,"i",shellscript,content +5062,6015727,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_tokenizer_base.sh",1547,0,"",shellscript,selection_keyboard +5063,6016070,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_tokenizer_base.sh",1547,0,"z",shellscript,content +5064,6016070,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_tokenizer_base.sh",1548,0,"",shellscript,selection_keyboard +5065,6016122,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_tokenizer_base.sh",1548,0,"e",shellscript,content +5066,6016123,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_tokenizer_base.sh",1549,0,"",shellscript,selection_keyboard +5067,6017273,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_tokenizer_base.sh",1549,0,"=",shellscript,content +5068,6017274,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_tokenizer_base.sh",1550,0,"",shellscript,selection_keyboard +5069,6017496,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_tokenizer_base.sh",1550,0,"1",shellscript,content +5070,6017497,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_tokenizer_base.sh",1551,0,"",shellscript,selection_keyboard +5071,6017766,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_tokenizer_base.sh",1551,0,"6",shellscript,content +5072,6017767,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_tokenizer_base.sh",1552,0,"",shellscript,selection_keyboard +5073,6017952,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_tokenizer_base.sh",1552,0," ",shellscript,content +5074,6017953,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_tokenizer_base.sh",1553,0,"",shellscript,selection_keyboard +5075,6018244,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_tokenizer_base.sh",1553,0,"\",shellscript,content +5076,6018245,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_tokenizer_base.sh",1554,0,"",shellscript,selection_keyboard +5077,6018370,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_tokenizer_base.sh",1553,0,"",shellscript,selection_command +5078,6019945,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_lam_base.sh",0,0,"",shellscript,tab +5079,6021826,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_lam_base.sh",1496,0,"",shellscript,selection_mouse +5080,6022609,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_lam_base.sh",1496,0,"\n",shellscript,content +5081,6023111,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_lam_base.sh",1497,0," ",shellscript,content +5082,6023321,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_lam_base.sh",1501,0,"-",shellscript,content +5083,6023321,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_lam_base.sh",1502,0,"",shellscript,selection_keyboard +5084,6023485,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_lam_base.sh",1502,0,"-",shellscript,content +5085,6023486,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_lam_base.sh",1503,0,"",shellscript,selection_keyboard +5086,6024034,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_lam_base.sh",1503,0,"p",shellscript,content +5087,6024035,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_lam_base.sh",1504,0,"",shellscript,selection_keyboard +5088,6024134,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_lam_base.sh",1504,0,"a",shellscript,content +5089,6024135,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_lam_base.sh",1505,0,"",shellscript,selection_keyboard +5090,6024330,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_lam_base.sh",1505,0,"t",shellscript,content +5091,6024330,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_lam_base.sh",1506,0,"",shellscript,selection_keyboard +5092,6024480,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_lam_base.sh",1506,0,"c",shellscript,content +5093,6024481,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_lam_base.sh",1507,0,"",shellscript,selection_keyboard +5094,6024572,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_lam_base.sh",1507,0,"h",shellscript,content +5095,6024573,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_lam_base.sh",1508,0,"",shellscript,selection_keyboard +5096,6024902,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_lam_base.sh",1508,0,"_",shellscript,content +5097,6024903,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_lam_base.sh",1509,0,"",shellscript,selection_keyboard +5098,6025067,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_lam_base.sh",1509,0,"s",shellscript,content +5099,6025068,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_lam_base.sh",1510,0,"",shellscript,selection_keyboard +5100,6025181,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_lam_base.sh",1510,0,"i",shellscript,content +5101,6025182,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_lam_base.sh",1511,0,"",shellscript,selection_keyboard +5102,6025318,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_lam_base.sh",1511,0,"z",shellscript,content +5103,6025319,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_lam_base.sh",1512,0,"",shellscript,selection_keyboard +5104,6025430,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_lam_base.sh",1512,0,"e",shellscript,content +5105,6025431,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_lam_base.sh",1513,0,"",shellscript,selection_keyboard +5106,6025918,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_lam_base.sh",1513,0,"=",shellscript,content +5107,6025919,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_lam_base.sh",1514,0,"",shellscript,selection_keyboard +5108,6026149,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_lam_base.sh",1514,0,"1",shellscript,content +5109,6026149,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_lam_base.sh",1515,0,"",shellscript,selection_keyboard +5110,6026337,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_lam_base.sh",1515,0,"6",shellscript,content +5111,6026337,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_lam_base.sh",1516,0,"",shellscript,selection_keyboard +5112,6026646,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_lam_base.sh",1515,0,"",shellscript,selection_command +5113,6027234,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_lam_base.sh",1516,0,"",shellscript,selection_command +5114,6027796,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_lam_base.sh",1516,0," ",shellscript,content +5115,6027797,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_lam_base.sh",1517,0,"",shellscript,selection_keyboard +5116,6028019,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_lam_base.sh",1517,0,"\",shellscript,content +5117,6028020,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_lam_base.sh",1518,0,"",shellscript,selection_keyboard +5118,6028721,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_lam_base.sh",1517,0,"",shellscript,selection_command +5119,6029337,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_lam_base.sh",1162,0,"",shellscript,selection_mouse +5120,6029474,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_lam_base.sh",1160,12,"250m_dataset",shellscript,selection_mouse +5121,6030909,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_dynamics_base.sh",0,0,"",shellscript,tab +5122,6032025,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_dynamics_base.sh",1661,0,"",shellscript,selection_mouse +5123,6032679,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_dynamics_base.sh",1661,0,"\n",shellscript,content +5124,6033173,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_dynamics_base.sh",1662,0," ",shellscript,content +5125,6033374,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_dynamics_base.sh",1666,0,"-",shellscript,content +5126,6033375,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_dynamics_base.sh",1667,0,"",shellscript,selection_keyboard +5127,6033552,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_dynamics_base.sh",1667,0,"-",shellscript,content +5128,6033553,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_dynamics_base.sh",1668,0,"",shellscript,selection_keyboard +5129,6034049,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_dynamics_base.sh",1668,0,"p",shellscript,content +5130,6034050,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_dynamics_base.sh",1669,0,"",shellscript,selection_keyboard +5131,6034139,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_dynamics_base.sh",1669,0,"a",shellscript,content +5132,6034139,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_dynamics_base.sh",1670,0,"",shellscript,selection_keyboard +5133,6034389,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_dynamics_base.sh",1670,0,"t",shellscript,content +5134,6034389,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_dynamics_base.sh",1671,0,"",shellscript,selection_keyboard +5135,6034746,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_dynamics_base.sh",1671,0,"c",shellscript,content +5136,6034747,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_dynamics_base.sh",1672,0,"",shellscript,selection_keyboard +5137,6034827,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_dynamics_base.sh",1672,0,"h",shellscript,content +5138,6034828,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_dynamics_base.sh",1673,0,"",shellscript,selection_keyboard +5139,6035131,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_dynamics_base.sh",1673,0,"_",shellscript,content +5140,6035132,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_dynamics_base.sh",1674,0,"",shellscript,selection_keyboard +5141,6035488,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_dynamics_base.sh",1674,0,"s",shellscript,content +5142,6035489,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_dynamics_base.sh",1675,0,"",shellscript,selection_keyboard +5143,6035573,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_dynamics_base.sh",1675,0,"i",shellscript,content +5144,6035574,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_dynamics_base.sh",1676,0,"",shellscript,selection_keyboard +5145,6035705,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_dynamics_base.sh",1676,0,"z",shellscript,content +5146,6035706,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_dynamics_base.sh",1677,0,"",shellscript,selection_keyboard +5147,6035825,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_dynamics_base.sh",1677,0,"e",shellscript,content +5148,6035826,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_dynamics_base.sh",1678,0,"",shellscript,selection_keyboard +5149,6036415,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_dynamics_base.sh",1678,0,"=",shellscript,content +5150,6036416,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_dynamics_base.sh",1679,0,"",shellscript,selection_keyboard +5151,6036711,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_dynamics_base.sh",1679,0,"1",shellscript,content +5152,6036712,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_dynamics_base.sh",1680,0,"",shellscript,selection_keyboard +5153,6036995,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_dynamics_base.sh",1680,0,"6",shellscript,content +5154,6036996,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_dynamics_base.sh",1681,0,"",shellscript,selection_keyboard +5155,6037195,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_dynamics_base.sh",1681,0," ",shellscript,content +5156,6037196,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_dynamics_base.sh",1682,0,"",shellscript,selection_keyboard +5157,6037480,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_dynamics_base.sh",1682,0,"\",shellscript,content +5158,6037481,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_dynamics_base.sh",1683,0,"",shellscript,selection_keyboard +5159,6039295,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_dynamics_base.sh",1682,0,"",shellscript,selection_command +5160,6048448,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_dynamics_base.sh",1170,0,"",shellscript,selection_mouse +5161,6048604,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_dynamics_base.sh",1165,7,"coinrun",shellscript,selection_mouse +5162,6048737,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_dynamics_base.sh",1165,16,"coinrun dynamics",shellscript,selection_mouse +5163,6048799,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_dynamics_base.sh",1165,29,"coinrun dynamics 250m_dataset",shellscript,selection_mouse +5164,6048835,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_dynamics_base.sh",1165,47,"coinrun dynamics 250m_dataset mila_submission""\n",shellscript,selection_mouse +5165,6048906,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_dynamics_base.sh",1165,97,"coinrun dynamics 250m_dataset mila_submission""\n\narray_records_dir=""/fast/project/HFMI_SynergyUnit",shellscript,selection_mouse +5166,6049170,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_dynamics_base.sh",1165,47,"coinrun dynamics 250m_dataset mila_submission""\n",shellscript,selection_mouse +5167,6049280,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_dynamics_base.sh",1165,45,"coinrun dynamics 250m_dataset mila_submission",shellscript,selection_mouse +5168,6051092,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_dynamics_base.sh",1832,0,"",shellscript,selection_mouse +5169,6065721,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_lam_base.sh",0,0,"",shellscript,tab +5170,6067527,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_tokenizer_base.sh",0,0,"",shellscript,tab +5171,6072740,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_dynamics_base.sh",0,0,"",shellscript,tab +5172,6075279,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_dynamics_base.sh",2082,0,"",shellscript,selection_mouse +5173,6080577,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_dynamics_base.sh",2036,0,"",shellscript,selection_mouse +5174,6106322,"slurm/jobs/franz/berlin/coinrun/mila_submission/legacy_runs_50M_dataset/coinrun_dynamics_base.sh",0,0,"",shellscript,tab +5175,6110859,"slurm/jobs/franz/berlin/coinrun/mila_submission/arch_ablations/coinrun_dynamics_ffn_dim_ablation.sh",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=1\n#SBATCH --time=24:00:00\n#SBATCH --cpus-per-task=8\n#SBATCH --gres=gpu:1\n#SBATCH --output=/fast/project/HFMI_SynergyUnit/jafar_ws/logs/franz/coinrun/dynamics/%x_%j.log\n#SBATCH --error=/fast/project/HFMI_SynergyUnit/jafar_ws/logs/franz/coinrun/dynamics/%x_%j.log\n#SBATCH --job-name=dynamics_coinrun_mila_submission_ffn_dim_ablation\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nsource .venv/bin/activate\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\ntags=""coinrun dynamics 500m_dataset mila_submission ffn_dim ablation""\n\narray_records_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/data/coinrun/array_records_500m_seed_w_increment""\ntokenizer_ckpt_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/tokenizer/tokenizer_coinrun_mila_submission_29736/""\nCHECKPOINT_DIR=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/dynamics/${job_name}/${slurm_job_id}""\nmkdir -p $CHECKPOINT_DIR\n\nenv | grep SLURM\n\n\nsrun python jasmine/train_dynamics.py \\n --dyna_ffn_dim=512 \\n --dyna_num_blocks=12 \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --name=""${job_name}_${slurm_job_id}"" \\n --tags ${tags} \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=""${tokenizer_ckpt_dir}"" \\n --val_data_dir=""${array_records_dir}/val"" \\n --data_dir=""${array_records_dir}/train"" &\n\nchild_pid=$!\n\nwait $child_pid\n\n",shellscript,tab +5176,6116626,"slurm/jobs/franz/berlin/coinrun/mila_submission/legacy_runs_50M_dataset/coinrun_lam_no_flash_attention.sh",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=1\n#SBATCH --gres=gpu:1\n#SBATCH --time=24:00:00\n#SBATCH --cpus-per-task=8\n#SBATCH --output=/fast/project/HFMI_SynergyUnit/jafar_ws/logs/franz/coinrun/lam/%x_%j.log\n#SBATCH --error=/fast/project/HFMI_SynergyUnit/jafar_ws/logs/franz/coinrun/lam/%x_%j.log\n#SBATCH --job-name=lam_coinrun_mila_submission_no_flash_attention\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n# Log the sbatch script\ncat $0\n\nsource .venv/bin/activate\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\ntags=""coinrun lam 500m_dataset mila_submission no_flash_attention""\n\narray_records_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/data/coinrun/array_records_500m_seed_w_increment""\nCHECKPOINT_DIR=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/lam/${job_name}_${slurm_job_id}""\nmkdir -p $CHECKPOINT_DIR\n\nenv | grep SLURM\n\nsrun python jasmine/train_lam.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --name=""${job_name}_${slurm_job_id}"" \\n --tags $tags \\n --entity instant-uv \\n --project jafar \\n --data_dir=""${array_records_dir}/train"" \\n --no_use_flash_attention \\n --val_data_dir=""${array_records_dir}/val"" &\n\nchild_pid=$!\n\nwait $child_pid\n",shellscript,tab +5177,6121165,"slurm/jobs/franz/berlin/coinrun/mila_submission/legacy_runs_50M_dataset/submission_debug/coinrun_dynamics_base_patch_size_16_action_prepend_cos.sh",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=1\n#SBATCH --time=24:00:00\n#SBATCH --cpus-per-task=8\n#SBATCH --gres=gpu:1\n#SBATCH --output=/fast/project/HFMI_SynergyUnit/jafar_ws/logs/franz/coinrun/dynamics/%x_%j.log\n#SBATCH --error=/fast/project/HFMI_SynergyUnit/jafar_ws/logs/franz/coinrun/dynamics/%x_%j.log\n#SBATCH --job-name=dynamics_coinrun_mila_submission_patch_size_16_action_prepend_branch_cos_schedule\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nsource .venv/bin/activate\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\ntags=""coinrun dynamics 500m_dataset mila_submission debug patch_size_16 action_prepend_branch cos_schedule""\n\narray_records_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/data/coinrun/array_records_500m_seed_w_increment""\ntokenizer_ckpt_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/tokenizer/tokenizer_coinrun_mila_submission_29736/""\nCHECKPOINT_DIR=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/dynamics/${job_name}/${slurm_job_id}""\nmkdir -p $CHECKPOINT_DIR\n\nenv | grep SLURM\n\ncurrent_branch=$(git rev-parse --abbrev-ref HEAD)\nif [ ""$current_branch"" != ""prepend-action-maskgit"" ]; then\n echo ""This script must be run from the prepend-action-maskgit branch. Current branch is $current_branch. Exiting.""\n exit 1\nfi\n\nsrun python jasmine/train_dynamics.py \\n --lr_schedule=""cos"" \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --name=""${job_name}_${slurm_job_id}"" \\n --tags ${tags} \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=""${tokenizer_ckpt_dir}"" \\n --val_data_dir=""${array_records_dir}/val"" \\n --data_dir=""${array_records_dir}/train"" &\n\nchild_pid=$!\n\nwait $child_pid\n\n",shellscript,tab +5178,6123604,"slurm/jobs/franz/berlin/coinrun/mila_submission/legacy_runs_50M_dataset/submission_debug/coinrun_dynamics_base_patch_size_16_action_prepend_cos.sh",1957,0,"",shellscript,selection_mouse +5179,6123605,"slurm/jobs/franz/berlin/coinrun/mila_submission/legacy_runs_50M_dataset/submission_debug/coinrun_dynamics_base_patch_size_16_action_prepend_cos.sh",1956,0,"",shellscript,selection_command +5180,6123640,"slurm/jobs/franz/berlin/coinrun/mila_submission/legacy_runs_50M_dataset/submission_debug/coinrun_dynamics_base_patch_size_16_action_prepend_cos.sh",1956,1,"i",shellscript,selection_mouse +5181,6123652,"slurm/jobs/franz/berlin/coinrun/mila_submission/legacy_runs_50M_dataset/submission_debug/coinrun_dynamics_base_patch_size_16_action_prepend_cos.sh",1957,0,"",shellscript,selection_command +5182,6123715,"slurm/jobs/franz/berlin/coinrun/mila_submission/legacy_runs_50M_dataset/submission_debug/coinrun_dynamics_base_patch_size_16_action_prepend_cos.sh",1947,10," exit 1\nfi",shellscript,selection_mouse +5183,6123716,"slurm/jobs/franz/berlin/coinrun/mila_submission/legacy_runs_50M_dataset/submission_debug/coinrun_dynamics_base_patch_size_16_action_prepend_cos.sh",1946,11," exit 1\nfi",shellscript,selection_mouse +5184,6123758,"slurm/jobs/franz/berlin/coinrun/mila_submission/legacy_runs_50M_dataset/submission_debug/coinrun_dynamics_base_patch_size_16_action_prepend_cos.sh",1826,131," echo ""This script must be run from the prepend-action-maskgit branch. Current branch is $current_branch. Exiting.""\n exit 1\nfi",shellscript,selection_mouse +5185,6123826,"slurm/jobs/franz/berlin/coinrun/mila_submission/legacy_runs_50M_dataset/submission_debug/coinrun_dynamics_base_patch_size_16_action_prepend_cos.sh",1825,132," echo ""This script must be run from the prepend-action-maskgit branch. Current branch is $current_branch. Exiting.""\n exit 1\nfi",shellscript,selection_mouse +5186,6123925,"slurm/jobs/franz/berlin/coinrun/mila_submission/legacy_runs_50M_dataset/submission_debug/coinrun_dynamics_base_patch_size_16_action_prepend_cos.sh",1766,191,"if [ ""$current_branch"" != ""prepend-action-maskgit"" ]; then\n echo ""This script must be run from the prepend-action-maskgit branch. Current branch is $current_branch. Exiting.""\n exit 1\nfi",shellscript,selection_mouse +5187,6124002,"slurm/jobs/franz/berlin/coinrun/mila_submission/legacy_runs_50M_dataset/submission_debug/coinrun_dynamics_base_patch_size_16_action_prepend_cos.sh",1716,241,"current_branch=$(git rev-parse --abbrev-ref HEAD)\nif [ ""$current_branch"" != ""prepend-action-maskgit"" ]; then\n echo ""This script must be run from the prepend-action-maskgit branch. Current branch is $current_branch. Exiting.""\n exit 1\nfi",shellscript,selection_mouse +5188,6127621,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_dynamics_base.sh",0,0,"",shellscript,tab +5189,6130055,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_dynamics_base.sh",1620,0,"",shellscript,selection_mouse +5190,6131465,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_dynamics_base.sh",1620,0,"\n",shellscript,content +5191,6131875,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_dynamics_base.sh",1621,0,"current_branch=$(git rev-parse --abbrev-ref HEAD)\nif [ ""$current_branch"" != ""prepend-action-maskgit"" ]; then\n echo ""This script must be run from the prepend-action-maskgit branch. Current branch is $current_branch. Exiting.""\n exit 1\nfi",shellscript,content +5192,6135191,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_lam_base.sh",0,0,"",shellscript,tab +5193,6137411,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_lam_base.sh",1461,0,"",shellscript,selection_mouse +5194,6138379,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_lam_base.sh",1461,0,"\n",shellscript,content +5195,6138554,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_lam_base.sh",1462,0,"\n",shellscript,content +5196,6139044,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_lam_base.sh",1462,0,"",shellscript,selection_command +5197,6139350,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_lam_base.sh",1462,0,"current_branch=$(git rev-parse --abbrev-ref HEAD)\nif [ ""$current_branch"" != ""prepend-action-maskgit"" ]; then\n echo ""This script must be run from the prepend-action-maskgit branch. Current branch is $current_branch. Exiting.""\n exit 1\nfi",shellscript,content +5198,6141018,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_tokenizer_base.sh",0,0,"",shellscript,tab +5199,6141975,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_tokenizer_base.sh",1491,0,"",shellscript,selection_mouse +5200,6142837,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_tokenizer_base.sh",1491,0,"\n",shellscript,content +5201,6143014,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_tokenizer_base.sh",1492,0,"\n",shellscript,content +5202,6143335,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_tokenizer_base.sh",1492,0,"",shellscript,selection_command +5203,6143623,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_tokenizer_base.sh",1492,0,"current_branch=$(git rev-parse --abbrev-ref HEAD)\nif [ ""$current_branch"" != ""prepend-action-maskgit"" ]; then\n echo ""This script must be run from the prepend-action-maskgit branch. Current branch is $current_branch. Exiting.""\n exit 1\nfi",shellscript,content +5204,6166197,"jasmine/train_dynamics.py",0,0,"",python,tab +5205,6167338,"jasmine/train_dynamics.py",2546,0,"",python,selection_mouse +5206,6167787,"jasmine/train_dynamics.py",2356,0,"",python,selection_mouse +5207,6170197,"jasmine/train_dynamics.py",4772,0,"",python,selection_command +5208,6170586,"jasmine/train_dynamics.py",1350,0,"",python,selection_command +5209,6174125,"jasmine/train_dynamics.py",1325,0,"",python,selection_mouse +5210,6177481,"jasmine/train_dynamics.py",1403,0,"",python,selection_mouse +5211,6177495,"jasmine/train_dynamics.py",1402,0,"",python,selection_command +5212,6231721,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/arch_ablations/coinrun_dynamics_causal.sh",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=1\n#SBATCH --time=24:00:00\n#SBATCH --cpus-per-task=8\n#SBATCH --gres=gpu:1\n#SBATCH --output=/fast/project/HFMI_SynergyUnit/jafar_ws/logs/franz/coinrun/dynamics/%x_%j.log\n#SBATCH --error=/fast/project/HFMI_SynergyUnit/jafar_ws/logs/franz/coinrun/dynamics/%x_%j.log\n#SBATCH --job-name=dynamics_coinrun_mila_submission_causal\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nsource .venv/bin/activate\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\ntags=""coinrun dynamics 500m_dataset mila_submission ablation causal""\n\narray_records_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/data/coinrun/array_records_500m_seed_w_increment""\ntokenizer_ckpt_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/tokenizer/tokenizer_coinrun_mila_submission_29736/""\nCHECKPOINT_DIR=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/dynamics/${job_name}/${slurm_job_id}""\nmkdir -p $CHECKPOINT_DIR\n\nenv | grep SLURM\n\n\nsrun python jasmine/train_dynamics.py \\n --dyna_type=causal \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --name=""${job_name}_${slurm_job_id}"" \\n --tags ${tags} \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=""${tokenizer_ckpt_dir}"" \\n --val_data_dir=""${array_records_dir}/val"" \\n --data_dir=""${array_records_dir}/train"" &\n\nchild_pid=$!\n\nwait $child_pid\n\n",shellscript,tab +5213,6240666,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_dynamics_base.sh",0,0,"",shellscript,tab +5214,6241926,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_dynamics_base.sh",1289,0,"",shellscript,selection_mouse +5215,6242440,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_dynamics_base.sh",1288,0,"",shellscript,selection_command +5216,6244946,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/arch_ablations/coinrun_dynamics_causal.sh",0,0,"",shellscript,tab +5217,6245743,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/arch_ablations/coinrun_dynamics_causal.sh",1336,0,"",shellscript,selection_mouse +5218,6246992,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/arch_ablations/coinrun_dynamics_causal.sh",1344,0,"\narray_records_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/data/coinrun/array_records_250M_npy_arr_rec/array_record/""",shellscript,content +5219,6247022,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/arch_ablations/coinrun_dynamics_causal.sh",1345,0,"",shellscript,selection_command +5220,6247717,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/arch_ablations/coinrun_dynamics_causal.sh",1236,0,"",shellscript,selection_command +5221,6248207,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/arch_ablations/coinrun_dynamics_causal.sh",1236,109,"",shellscript,content +5222,6252692,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/arch_ablations/coinrun_dynamics_ffn_dim_ablation.sh",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=1\n#SBATCH --time=24:00:00\n#SBATCH --cpus-per-task=8\n#SBATCH --gres=gpu:1\n#SBATCH --output=/fast/project/HFMI_SynergyUnit/jafar_ws/logs/franz/coinrun/dynamics/%x_%j.log\n#SBATCH --error=/fast/project/HFMI_SynergyUnit/jafar_ws/logs/franz/coinrun/dynamics/%x_%j.log\n#SBATCH --job-name=dynamics_coinrun_mila_submission_ffn_dim_ablation\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nsource .venv/bin/activate\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\ntags=""coinrun dynamics 500m_dataset mila_submission ffn_dim ablation""\n\narray_records_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/data/coinrun/array_records_500m_seed_w_increment""\ntokenizer_ckpt_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/tokenizer/tokenizer_coinrun_mila_submission_29736/""\nCHECKPOINT_DIR=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/dynamics/${job_name}/${slurm_job_id}""\nmkdir -p $CHECKPOINT_DIR\n\nenv | grep SLURM\n\n\nsrun python jasmine/train_dynamics.py \\n --dyna_ffn_dim=512 \\n --dyna_num_blocks=12 \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --name=""${job_name}_${slurm_job_id}"" \\n --tags ${tags} \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=""${tokenizer_ckpt_dir}"" \\n --val_data_dir=""${array_records_dir}/val"" \\n --data_dir=""${array_records_dir}/train"" &\n\nchild_pid=$!\n\nwait $child_pid\n\n",shellscript,tab +5223,6253429,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/arch_ablations/coinrun_dynamics_ffn_dim_ablation.sh",1265,0,"",shellscript,selection_mouse +5224,6254192,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/arch_ablations/coinrun_dynamics_ffn_dim_ablation.sh",1355,0,"\narray_records_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/data/coinrun/array_records_250M_npy_arr_rec/array_record/""",shellscript,content +5225,6254213,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/arch_ablations/coinrun_dynamics_ffn_dim_ablation.sh",1356,0,"",shellscript,selection_command +5226,6254971,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/arch_ablations/coinrun_dynamics_ffn_dim_ablation.sh",1474,0,"",shellscript,selection_command +5227,6256241,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/arch_ablations/coinrun_dynamics_ffn_dim_ablation.sh",1356,0,"",shellscript,selection_command +5228,6256360,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/arch_ablations/coinrun_dynamics_ffn_dim_ablation.sh",1247,0,"",shellscript,selection_command +5229,6256930,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/arch_ablations/coinrun_dynamics_ffn_dim_ablation.sh",1247,109,"",shellscript,content +5230,6259668,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/arch_ablations/coinrun_dynamics_gt_actions.sh",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=1\n#SBATCH --time=24:00:00\n#SBATCH --cpus-per-task=8\n#SBATCH --gres=gpu:1\n#SBATCH --output=/fast/project/HFMI_SynergyUnit/jafar_ws/logs/franz/coinrun/dynamics/%x_%j.log\n#SBATCH --error=/fast/project/HFMI_SynergyUnit/jafar_ws/logs/franz/coinrun/dynamics/%x_%j.log\n#SBATCH --job-name=dynamics_coinrun_mila_submission_gt_actions\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nsource .venv/bin/activate\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\ntags=""coinrun dynamics 500m_dataset mila_submission ablation gt-actions""\n\narray_records_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/data/coinrun/array_records_500m_seed_w_increment""\ntokenizer_ckpt_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/tokenizer/tokenizer_coinrun_mila_submission_29736/""\nCHECKPOINT_DIR=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/dynamics/${job_name}/${slurm_job_id}""\nmkdir -p $CHECKPOINT_DIR\n\nenv | grep SLURM\n\n\nsrun python jasmine/train_dynamics.py \\n --use_gt_actions \\n --num_actions=15 \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --name=""${job_name}_${slurm_job_id}"" \\n --tags ${tags} \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=""${tokenizer_ckpt_dir}"" \\n --val_data_dir=""${array_records_dir}/val"" \\n --data_dir=""${array_records_dir}/train"" &\n\nchild_pid=$!\n\nwait $child_pid\n\n",shellscript,tab +5231,6261176,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/arch_ablations/coinrun_dynamics_gt_actions.sh",1269,0,"",shellscript,selection_mouse +5232,6262312,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/arch_ablations/coinrun_dynamics_gt_actions.sh",1352,0,"\narray_records_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/data/coinrun/array_records_250M_npy_arr_rec/array_record/""",shellscript,content +5233,6262379,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/arch_ablations/coinrun_dynamics_gt_actions.sh",1353,0,"",shellscript,selection_command +5234,6262838,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/arch_ablations/coinrun_dynamics_gt_actions.sh",1471,0,"",shellscript,selection_command +5235,6264177,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/arch_ablations/coinrun_dynamics_gt_actions.sh",1353,0,"",shellscript,selection_command +5236,6264364,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/arch_ablations/coinrun_dynamics_gt_actions.sh",1244,0,"",shellscript,selection_command +5237,6264806,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/arch_ablations/coinrun_dynamics_gt_actions.sh",1244,109,"",shellscript,content +5238,6267763,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=1\n#SBATCH --time=24:00:00\n#SBATCH --cpus-per-task=8\n#SBATCH --gres=gpu:1\n#SBATCH --output=/fast/project/HFMI_SynergyUnit/jafar_ws/logs/franz/coinrun/dynamics/%x_%j.log\n#SBATCH --error=/fast/project/HFMI_SynergyUnit/jafar_ws/logs/franz/coinrun/dynamics/%x_%j.log\n#SBATCH --job-name=dynamics_coinrun_mila_submission_no_cotraining\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nsource .venv/bin/activate\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\ntags=""coinrun dynamics 500m_dataset mila_submission ablation no-cotraining""\n\narray_records_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/data/coinrun/array_records_500m_seed_w_increment""\ntokenizer_ckpt_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/tokenizer/tokenizer_coinrun_mila_submission_29736/""\nlam_ckpt_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/lam/lam_coinrun_mila_submission_no_flash_attention_29738/""\nCHECKPOINT_DIR=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/dynamics/${job_name}/${slurm_job_id}""\nmkdir -p $CHECKPOINT_DIR\n\nenv | grep SLURM\n\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --name=""${job_name}_${slurm_job_id}"" \\n --tags ${tags} \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=""${tokenizer_ckpt_dir}"" \\n --lam_checkpoint=""${lam_ckpt_dir}"" \\n --val_data_dir=""${array_records_dir}/val"" \\n --data_dir=""${array_records_dir}/train"" &\n\nchild_pid=$!\n\nwait $child_pid\n\n",shellscript,tab +5239,6268917,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1264,0,"",shellscript,selection_mouse +5240,6269904,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1358,0,"\narray_records_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/data/coinrun/array_records_250M_npy_arr_rec/array_record/""",shellscript,content +5241,6269981,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1359,0,"",shellscript,selection_command +5242,6270610,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1477,0,"",shellscript,selection_command +5243,6271046,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1359,0,"",shellscript,selection_command +5244,6271201,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1250,0,"",shellscript,selection_command +5245,6271910,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1250,109,"",shellscript,content +5246,6273919,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/arch_ablations/coinrun_dynamics_no_flash_attn.sh",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=1\n#SBATCH --time=24:00:00\n#SBATCH --cpus-per-task=8\n#SBATCH --gres=gpu:1\n#SBATCH --output=/fast/project/HFMI_SynergyUnit/jafar_ws/logs/franz/coinrun/dynamics/%x_%j.log\n#SBATCH --error=/fast/project/HFMI_SynergyUnit/jafar_ws/logs/franz/coinrun/dynamics/%x_%j.log\n#SBATCH --job-name=dynamics_coinrun_mila_submission_no_flash_attn\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nsource .venv/bin/activate\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\ntags=""coinrun dynamics 500m_dataset mila_submission ablation no-flash-attn""\n\narray_records_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/data/coinrun/array_records_500m_seed_w_increment""\ntokenizer_ckpt_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/tokenizer/tokenizer_coinrun_mila_submission_29736/""\nCHECKPOINT_DIR=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/dynamics/${job_name}/${slurm_job_id}""\nmkdir -p $CHECKPOINT_DIR\n\nenv | grep SLURM\n\n\nsrun python jasmine/train_dynamics.py \\n --no-use-flash-attention \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --name=""${job_name}_${slurm_job_id}"" \\n --tags ${tags} \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=""${tokenizer_ckpt_dir}"" \\n --val_data_dir=""${array_records_dir}/val"" \\n --data_dir=""${array_records_dir}/train"" &\n\nchild_pid=$!\n\nwait $child_pid\n\n",shellscript,tab +5247,6285097,"slurm/jobs/franz/berlin/coinrun/mila_submission/speed_ablations/coinrun_dynamics_no_flash_attn.sh",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=1\n#SBATCH --time=24:00:00\n#SBATCH --cpus-per-task=8\n#SBATCH --gres=gpu:1\n#SBATCH --output=/fast/project/HFMI_SynergyUnit/jafar_ws/logs/franz/coinrun/dynamics/%x_%j.log\n#SBATCH --error=/fast/project/HFMI_SynergyUnit/jafar_ws/logs/franz/coinrun/dynamics/%x_%j.log\n#SBATCH --job-name=dynamics_coinrun_mila_submission_no_flash_attn\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nsource .venv/bin/activate\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\ntags=""coinrun dynamics 500m_dataset mila_submission ablation no-flash-attn""\n\narray_records_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/data/coinrun/array_records_500m_seed_w_increment""\ntokenizer_ckpt_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/tokenizer/tokenizer_coinrun_mila_submission_29736/""\nCHECKPOINT_DIR=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/dynamics/${job_name}/${slurm_job_id}""\nmkdir -p $CHECKPOINT_DIR\n\nenv | grep SLURM\n\n\nsrun python jasmine/train_dynamics.py \\n --no-use-flash-attention \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --name=""${job_name}_${slurm_job_id}"" \\n --tags ${tags} \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=""${tokenizer_ckpt_dir}"" \\n --val_data_dir=""${array_records_dir}/val"" \\n --data_dir=""${array_records_dir}/train"" &\n\nchild_pid=$!\n\nwait $child_pid\n\n",shellscript,tab +5248,6294236,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/speed_ablations/coinrun_dynamics_no_flash_attn.sh",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=1\n#SBATCH --time=24:00:00\n#SBATCH --cpus-per-task=8\n#SBATCH --gres=gpu:1\n#SBATCH --output=/fast/project/HFMI_SynergyUnit/jafar_ws/logs/franz/coinrun/dynamics/%x_%j.log\n#SBATCH --error=/fast/project/HFMI_SynergyUnit/jafar_ws/logs/franz/coinrun/dynamics/%x_%j.log\n#SBATCH --job-name=dynamics_coinrun_mila_submission_no_flash_attn\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nsource .venv/bin/activate\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\ntags=""coinrun dynamics 500m_dataset mila_submission ablation no-flash-attn""\n\narray_records_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/data/coinrun/array_records_500m_seed_w_increment""\ntokenizer_ckpt_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/tokenizer/tokenizer_coinrun_mila_submission_29736/""\nCHECKPOINT_DIR=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/dynamics/${job_name}/${slurm_job_id}""\nmkdir -p $CHECKPOINT_DIR\n\nenv | grep SLURM\n\n\nsrun python jasmine/train_dynamics.py \\n --no-use-flash-attention \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --name=""${job_name}_${slurm_job_id}"" \\n --tags ${tags} \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=""${tokenizer_ckpt_dir}"" \\n --val_data_dir=""${array_records_dir}/val"" \\n --data_dir=""${array_records_dir}/train"" &\n\nchild_pid=$!\n\nwait $child_pid\n\n",shellscript,tab +5249,6299692,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/speed_ablations/coinrun_dynamics_no_flash_attn.sh",2009,0,"",shellscript,selection_mouse +5250,6300299,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/speed_ablations/coinrun_dynamics_no_flash_attn.sh",1868,0,"",shellscript,selection_mouse +5251,6303200,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/speed_ablations/coinrun_dynamics_no_flash_attn.sh",441,1,"5",shellscript,selection_command +5252,6303232,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/speed_ablations/coinrun_dynamics_no_flash_attn.sh",1196,2,"50",shellscript,selection_command +5253,6303351,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/speed_ablations/coinrun_dynamics_no_flash_attn.sh",1196,3,"500",shellscript,selection_command +5254,6303799,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/speed_ablations/coinrun_dynamics_no_flash_attn.sh",1196,4,"500m",shellscript,selection_command +5255,6309433,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/speed_ablations/batch_size_36/coinrun_dynamics_grain_ablation.sh",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=1\n#SBATCH --time=24:00:00\n#SBATCH --cpus-per-task=8\n#SBATCH --gres=gpu:1\n#SBATCH --output=/fast/project/HFMI_SynergyUnit/jafar_ws/logs/franz/coinrun/dynamics/%x_%j.log\n#SBATCH --error=/fast/project/HFMI_SynergyUnit/jafar_ws/logs/franz/coinrun/dynamics/%x_%j.log\n#SBATCH --job-name=dynamics_coinrun_mila_submission_speed_test_batch_size_36_dataloader_ablation\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nsource .venv/bin/activate\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\ntags=""coinrun dynamics 500m_dataset mila_submission speed_test batch_size_36 ablation dataloader""\n\narray_records_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/data/coinrun/array_records_500m_seed_w_increment""\ntokenizer_ckpt_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/tokenizer/tokenizer_coinrun_mila_submission_29736/""\nCHECKPOINT_DIR=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/dynamics/${job_name}/${slurm_job_id}""\nmkdir -p $CHECKPOINT_DIR\n\nenv | grep SLURM\n\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n --patch_size=16 \\n --batch_size=36 \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --name=""${job_name}_${slurm_job_id}"" \\n --tags ${tags} \\n --entity instant-uv \\n --project jafar \\n --warmup_steps=0 \\n --wsd_decay_steps=0 \\n --num_steps=1000 \\n --log_image_interval=10000 \\n --tokenizer_checkpoint=""${tokenizer_ckpt_dir}"" \\n --data_dir=""${array_records_dir}/train"" &\n\nchild_pid=$!\n\nwait $child_pid\n\n",shellscript,tab +5256,6310939,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/arch_ablations/coinrun_dynamics_causal.sh",0,0,"",shellscript,tab +5257,6315995,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/arch_ablations/coinrun_dynamics_causal.sh",1189,4,"500m",shellscript,selection_command +5258,6320524,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/arch_ablations/coinrun_dynamics_causal.sh",1189,4,"250m",shellscript,content +5259,6322984,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/arch_ablations/coinrun_dynamics_ffn_dim_ablation.sh",0,0,"",shellscript,tab +5260,6324807,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/arch_ablations/coinrun_dynamics_ffn_dim_ablation.sh",1199,4,"500m",shellscript,selection_command +5261,6326851,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/arch_ablations/coinrun_dynamics_ffn_dim_ablation.sh",1199,4,"250m",shellscript,content +5262,6328887,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/arch_ablations/coinrun_dynamics_gt_actions.sh",0,0,"",shellscript,tab +5263,6331628,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/arch_ablations/coinrun_dynamics_gt_actions.sh",1193,4,"500m",shellscript,selection_command +5264,6333090,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/arch_ablations/coinrun_dynamics_gt_actions.sh",1193,4,"250m",shellscript,content +5265,6334616,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",0,0,"",shellscript,tab +5266,6336630,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1196,4,"500m",shellscript,selection_command +5267,6337174,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1196,4,"250m",shellscript,content +5268,6341306,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/arch_ablations/coinrun_dynamics_gt_actions.sh",0,0,"",shellscript,tab +5269,6343986,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/arch_ablations/coinrun_dynamics_ffn_dim_ablation.sh",0,0,"",shellscript,tab +5270,6346198,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/arch_ablations/coinrun_dynamics_causal.sh",0,0,"",shellscript,tab +5271,6355945,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_dynamics_base.sh",0,0,"",shellscript,tab +5272,6356969,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_dynamics_base.sh",1913,0,"",shellscript,selection_mouse +5273,6359282,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/arch_ablations/coinrun_dynamics_causal.sh",0,0,"",shellscript,tab +5274,6360148,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/arch_ablations/coinrun_dynamics_causal.sh",1666,0,"",shellscript,selection_mouse +5275,6360910,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/arch_ablations/coinrun_dynamics_causal.sh",1684,0,"\n --patch_size=16 \",shellscript,content +5276,6360941,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/arch_ablations/coinrun_dynamics_causal.sh",1689,0,"",shellscript,selection_command +5277,6362552,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_tokenizer_base.sh",0,0,"",shellscript,tab +5278,6363881,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_lam_base.sh",0,0,"",shellscript,tab +5279,6365165,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_lam_base.sh",1754,0,"",shellscript,selection_mouse +5280,6366582,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_lam_base.sh",1753,0,"",shellscript,selection_command +5281,6367955,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/arch_ablations/coinrun_dynamics_causal.sh",0,0,"",shellscript,tab +5282,6370457,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/arch_ablations/coinrun_dynamics_ffn_dim_ablation.sh",0,0,"",shellscript,tab +5283,6372403,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/arch_ablations/coinrun_dynamics_ffn_dim_ablation.sh",1671,0,"",shellscript,selection_mouse +5284,6373206,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/arch_ablations/coinrun_dynamics_ffn_dim_ablation.sh",1695,0,"\n --patch_size=16 \",shellscript,content +5285,6373240,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/arch_ablations/coinrun_dynamics_ffn_dim_ablation.sh",1700,0,"",shellscript,selection_command +5286,6374695,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/arch_ablations/coinrun_dynamics_gt_actions.sh",0,0,"",shellscript,tab +5287,6376250,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/arch_ablations/coinrun_dynamics_gt_actions.sh",1668,0,"",shellscript,selection_mouse +5288,6376931,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/arch_ablations/coinrun_dynamics_gt_actions.sh",1692,0,"\n --patch_size=16 \",shellscript,content +5289,6376952,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/arch_ablations/coinrun_dynamics_gt_actions.sh",1697,0,"",shellscript,selection_command +5290,6378277,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",0,0,"",shellscript,tab +5291,6380088,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1846,0,"",shellscript,selection_mouse +5292,6380600,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1808,0,"",shellscript,selection_mouse +5293,6381373,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1831,0,"\n --patch_size=16 \",shellscript,content +5294,6381400,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1836,0,"",shellscript,selection_command +5295,6395326,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/arch_ablations/coinrun_dynamics_causal.sh",0,0,"",shellscript,tab +5296,6396233,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/arch_ablations/coinrun_dynamics_ffn_dim_ablation.sh",0,0,"",shellscript,tab +5297,6397441,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/arch_ablations/coinrun_dynamics_gt_actions.sh",0,0,"",shellscript,tab +5298,6398618,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",0,0,"",shellscript,tab +5299,6410911,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/arch_ablations/coinrun_dynamics_causal.sh",0,0,"",shellscript,tab +5300,6473490,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_dynamics_base.sh",0,0,"",shellscript,tab +5301,6474914,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_dynamics_base.sh",1863,0,"",shellscript,selection_mouse +5302,6474915,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_dynamics_base.sh",1862,1,"\n",shellscript,selection_mouse +5303,6474984,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_dynamics_base.sh",1863,0,"",shellscript,selection_command +5304,6474984,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_dynamics_base.sh",1862,1,"\n",shellscript,selection_mouse +5305,6475016,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_dynamics_base.sh",1857,6," 1\nfi\n",shellscript,selection_mouse +5306,6475052,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_dynamics_base.sh",1856,7,"t 1\nfi\n",shellscript,selection_mouse +5307,6475083,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_dynamics_base.sh",1855,8,"it 1\nfi\n",shellscript,selection_mouse +5308,6475124,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_dynamics_base.sh",1736,127,"ho ""This script must be run from the prepend-action-maskgit branch. Current branch is $current_branch. Exiting.""\n exit 1\nfi\n",shellscript,selection_mouse +5309,6475125,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_dynamics_base.sh",1735,128,"cho ""This script must be run from the prepend-action-maskgit branch. Current branch is $current_branch. Exiting.""\n exit 1\nfi\n",shellscript,selection_mouse +5310,6475159,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_dynamics_base.sh",1734,129,"echo ""This script must be run from the prepend-action-maskgit branch. Current branch is $current_branch. Exiting.""\n exit 1\nfi\n",shellscript,selection_mouse +5311,6475191,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_dynamics_base.sh",1733,130," echo ""This script must be run from the prepend-action-maskgit branch. Current branch is $current_branch. Exiting.""\n exit 1\nfi\n",shellscript,selection_mouse +5312,6475229,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_dynamics_base.sh",1674,189,"[ ""$current_branch"" != ""prepend-action-maskgit"" ]; then\n echo ""This script must be run from the prepend-action-maskgit branch. Current branch is $current_branch. Exiting.""\n exit 1\nfi\n",shellscript,selection_mouse +5313,6475259,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_dynamics_base.sh",1673,190," [ ""$current_branch"" != ""prepend-action-maskgit"" ]; then\n echo ""This script must be run from the prepend-action-maskgit branch. Current branch is $current_branch. Exiting.""\n exit 1\nfi\n",shellscript,selection_mouse +5314,6475290,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_dynamics_base.sh",1672,191,"f [ ""$current_branch"" != ""prepend-action-maskgit"" ]; then\n echo ""This script must be run from the prepend-action-maskgit branch. Current branch is $current_branch. Exiting.""\n exit 1\nfi\n",shellscript,selection_mouse +5315,6475349,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_dynamics_base.sh",1671,192,"if [ ""$current_branch"" != ""prepend-action-maskgit"" ]; then\n echo ""This script must be run from the prepend-action-maskgit branch. Current branch is $current_branch. Exiting.""\n exit 1\nfi\n",shellscript,selection_mouse +5316,6475414,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_dynamics_base.sh",1621,242,"current_branch=$(git rev-parse --abbrev-ref HEAD)\nif [ ""$current_branch"" != ""prepend-action-maskgit"" ]; then\n echo ""This script must be run from the prepend-action-maskgit branch. Current branch is $current_branch. Exiting.""\n exit 1\nfi\n",shellscript,selection_mouse +5317,6476241,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_dynamics_base.sh",1621,0,"",shellscript,selection_command +5318,6477316,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/arch_ablations/coinrun_dynamics_causal.sh",0,0,"",shellscript,tab +5319,6478393,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/arch_ablations/coinrun_dynamics_causal.sh",1641,0,"",shellscript,selection_mouse +5320,6478759,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/arch_ablations/coinrun_dynamics_causal.sh",1643,0,"",shellscript,selection_mouse +5321,6479578,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/arch_ablations/coinrun_dynamics_causal.sh",1643,0,"current_branch=$(git rev-parse --abbrev-ref HEAD)\nif [ ""$current_branch"" != ""prepend-action-maskgit"" ]; then\n echo ""This script must be run from the prepend-action-maskgit branch. Current branch is $current_branch. Exiting.""\n exit 1\nfi\n\n",shellscript,content +5322,6479666,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/arch_ablations/coinrun_dynamics_causal.sh",1643,0,"",shellscript,selection_command +5323,6481168,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/arch_ablations/coinrun_dynamics_causal.sh",1642,0,"\n",shellscript,content +5324,6482747,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/arch_ablations/coinrun_dynamics_gt_actions.sh",0,0,"",shellscript,tab +5325,6483253,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/arch_ablations/coinrun_dynamics_ffn_dim_ablation.sh",0,0,"",shellscript,tab +5326,6484251,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/arch_ablations/coinrun_dynamics_ffn_dim_ablation.sh",1654,0,"",shellscript,selection_mouse +5327,6485104,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/arch_ablations/coinrun_dynamics_ffn_dim_ablation.sh",1654,0,"current_branch=$(git rev-parse --abbrev-ref HEAD)\nif [ ""$current_branch"" != ""prepend-action-maskgit"" ]; then\n echo ""This script must be run from the prepend-action-maskgit branch. Current branch is $current_branch. Exiting.""\n exit 1\nfi\n\n",shellscript,content +5328,6485152,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/arch_ablations/coinrun_dynamics_ffn_dim_ablation.sh",1654,0,"",shellscript,selection_command +5329,6486209,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/arch_ablations/coinrun_dynamics_ffn_dim_ablation.sh",1653,0,"\n",shellscript,content +5330,6487985,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/arch_ablations/coinrun_dynamics_gt_actions.sh",0,0,"",shellscript,tab +5331,6488680,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/arch_ablations/coinrun_dynamics_gt_actions.sh",1652,0,"",shellscript,selection_mouse +5332,6489497,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/arch_ablations/coinrun_dynamics_gt_actions.sh",1652,0,"current_branch=$(git rev-parse --abbrev-ref HEAD)\nif [ ""$current_branch"" != ""prepend-action-maskgit"" ]; then\n echo ""This script must be run from the prepend-action-maskgit branch. Current branch is $current_branch. Exiting.""\n exit 1\nfi\n\n",shellscript,content +5333,6489549,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/arch_ablations/coinrun_dynamics_gt_actions.sh",1652,0,"",shellscript,selection_command +5334,6490955,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",0,0,"",shellscript,tab +5335,6491730,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1059,0,"",shellscript,selection_mouse +5336,6493706,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1791,0,"",shellscript,selection_mouse +5337,6494491,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1791,0,"current_branch=$(git rev-parse --abbrev-ref HEAD)\nif [ ""$current_branch"" != ""prepend-action-maskgit"" ]; then\n echo ""This script must be run from the prepend-action-maskgit branch. Current branch is $current_branch. Exiting.""\n exit 1\nfi\n\n",shellscript,content +5338,6494659,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/arch_ablations/coinrun_dynamics_no_cotraining.sh",1791,0,"",shellscript,selection_command +5339,6630622,"slurm/jobs/franz/berlin/coinrun/mila_submission/250M_dataset/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=1\n#SBATCH --time=24:00:00\n#SBATCH --cpus-per-task=8\n#SBATCH --gres=gpu:1\n#SBATCH --output=/fast/project/HFMI_SynergyUnit/jafar_ws/logs/franz/coinrun/dynamics/%x_%j.log\n#SBATCH --error=/fast/project/HFMI_SynergyUnit/jafar_ws/logs/franz/coinrun/dynamics/%x_%j.log\n#SBATCH --job-name=dynamics_coinrun_mila_submission_speed_test_batch_size_36\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nsource .venv/bin/activate\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\ntags=""coinrun dynamics 500m_dataset mila_submission speed_test batch_size_36""\n\narray_records_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/data/coinrun/array_records_500m_seed_w_increment""\ntokenizer_ckpt_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/tokenizer/tokenizer_coinrun_mila_submission_29736/""\nCHECKPOINT_DIR=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/dynamics/${job_name}/${slurm_job_id}""\nmkdir -p $CHECKPOINT_DIR\n\nenv | grep SLURM\n\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n --patch_size=16 \\n --batch_size=36 \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --name=""${job_name}_${slurm_job_id}"" \\n --tags ${tags} \\n --entity instant-uv \\n --project jafar \\n --warmup_steps=0 \\n --wsd_decay_steps=0 \\n --num_steps=1000 \\n --log_image_interval=10000 \\n --tokenizer_checkpoint=""${tokenizer_ckpt_dir}"" \\n --data_dir=""${array_records_dir}/train"" &\n\nchild_pid=$!\n\nwait $child_pid\n\n",shellscript,tab +5340,6653007,"TERMINAL",0,0,"bash",,terminal_focus +5341,6654984,"TERMINAL",0,0,"git status",,terminal_command +5342,6655015,"TERMINAL",0,0,"]633;C",,terminal_output +5343,6656091,"TERMINAL",0,0,"On branch main\r\nYour branch is up to date with 'origin/main'.\r\n\r\nChanges not staged for commit:\r\n (use ""git add/rm ..."" to update what will be committed)\r\n (use ""git restore ..."" to discard changes in working directory)\r\n\tdeleted: jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_causal.sh\r\n\tdeleted: jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_ffn_dim_ablation.sh\r\n\tdeleted: jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_gt_actions.sh\r\n\tdeleted: jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_cotraining.sh\r\n\tdeleted: jobs/franz/berlin/coinrun/mila_submission/ablations/coinrun_dynamics_no_flash_attn.sh\r\n\tdeleted: jobs/mihir/horeka/coinrun/ablations/train_dyn_default-grain-ablation.sh\r\n\tmodified: jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh\r\n\tmodified: jobs/mihir/horeka/coinrun/train_tokenizer_single_gpu.sh\r\n\r\nUntracked files:\r\n (use ""git add ..."" to include in what will be committed)\r\n\tjobs/franz/berlin/coinrun/mila_submission/250M_dataset/\r\n\tjobs/mihir/horeka/coinrun/ablations/batch_size_36/\r\n\tjobs/mihir/horeka/coinrun/ablations/max_batchsize/\r\n\r\nno changes added to commit (use ""git add"" and/or ""git commit -a"")\r\n]0;tum_cte0515@hkn1991:~/Projects/jasmine/slurm",,terminal_output +5344,6660782,"TERMINAL",0,0,"git add jobs/",,terminal_command +5345,6660844,"TERMINAL",0,0,"]633;C",,terminal_output +5346,6661409,"TERMINAL",0,0,"]0;tum_cte0515@hkn1991:~/Projects/jasmine/slurm",,terminal_output +5347,6684571,"TERMINAL",0,0,"git commit -am ""added submission sbatch scripts for 250m dataset base runs + arch ablations""",,terminal_command +5348,6684639,"TERMINAL",0,0,"]633;C",,terminal_output +5349,6685134,"TERMINAL",0,0,"[main 19d9601] added submission sbatch scripts for 250m dataset base runs + arch ablations\r\n 21 files changed, 1058 insertions(+), 36 deletions(-)\r\n rename jobs/franz/berlin/coinrun/mila_submission/{ablations => 250M_dataset/arch_ablations}/coinrun_dynamics_causal.sh (83%)\r\n rename jobs/franz/berlin/coinrun/mila_submission/{ablations => 250M_dataset/arch_ablations}/coinrun_dynamics_ffn_dim_ablation.sh (84%)\r\n rename jobs/franz/berlin/coinrun/mila_submission/{ablations => 250M_dataset/arch_ablations}/coinrun_dynamics_gt_actions.sh (84%)\r\n rename jobs/franz/berlin/coinrun/mila_submission/{ablations => 250M_dataset/arch_ablations}/coinrun_dynamics_no_cotraining.sh (84%)\r\n create mode 100644 jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_dynamics_base.sh\r\n create mode 100644 jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_lam_base.sh\r\n create mode 100644 jobs/franz/berlin/coinrun/mila_submission/250M_dataset/coinrun_tokenizer_base.sh\r\n create mode 100644 jobs/franz/berlin/coinrun/mila_submission/250M_dataset/speed_ablations/batch_size_248/coinrun_dynamics_base_speed_ablation.sh\r\n create mode 100644 jobs/franz/berlin/coinrun/mila_submission/250M_dataset/speed_ablations/batch_size_36/coinrun_dynamics_base_speed_ablation.sh\r\n create mode 100644 jobs/franz/berlin/coinrun/mila_submission/250M_dataset/speed_ablations/batch_size_36/coinrun_dynamics_grain_ablation.sh\r\n rename jobs/franz/berlin/coinrun/mila_submission/{ablations => 250M_dataset/speed_ablations}/coinrun_dynamics_no_flash_attn.sh (100%)\r\n create mode 100644 jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default.sh\r\n rename jobs/mihir/horeka/coinrun/ablations/{train_dyn_default-grain-ablation.sh => batch_size_36/train_dyn_default_flash_attn_ablation.sh} (76%)\r\n create mode 100644 jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_default_mixed_precision_ablation.sh\r\n create mode 100644 jobs/mihir/horeka/coinrun/ablations/batch_size_36/train_dyn_grain_ablation.sh\r\n create mode 100644 jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default.sh\r\n create mode 100644 jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_flash_attn_ablation.sh\r\n create mode 100644 jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_default_mixed_precision_ablation.sh\r\n create mode 100644 jobs/mihir/horeka/coinrun/ablations/max_batchsize/train_dyn_grain_ablation.sh\r\n]0;tum_cte0515@hkn1991:~/Projects/jasmine/slurm",,terminal_output +5350,6687546,"TERMINAL",0,0,"git push",,terminal_command +5351,6687637,"TERMINAL",0,0,"]633;C",,terminal_output +5352,6688909,"TERMINAL",0,0,"Enumerating objects: 50, done.\r\nCounting objects: 2% (1/50)\rCounting objects: 4% (2/50)\rCounting objects: 6% (3/50)\rCounting objects: 8% (4/50)\rCounting objects: 10% (5/50)\rCounting objects: 12% (6/50)\rCounting objects: 14% (7/50)\rCounting objects: 16% (8/50)\rCounting objects: 18% (9/50)\rCounting objects: 20% (10/50)\rCounting objects: 22% (11/50)\rCounting objects: 24% (12/50)\rCounting objects: 26% (13/50)\rCounting objects: 28% (14/50)\rCounting objects: 30% (15/50)\rCounting objects: 32% (16/50)\rCounting objects: 34% (17/50)\rCounting objects: 36% (18/50)\rCounting objects: 38% (19/50)\rCounting objects: 40% (20/50)\rCounting objects: 42% (21/50)\rCounting objects: 44% (22/50)\rCounting objects: 46% (23/50)\rCounting objects: 48% (24/50)\rCounting objects: 50% (25/50)\rCounting objects: 52% (26/50)\rCounting objects: 54% (27/50)\rCounting objects: 56% (28/50)\rCounting objects: 58% (29/50)\rCounting objects: 60% (30/50)\rCounting objects: 62% (31/50)\rCounting objects: 64% (32/50)\rCounting objects: 66% (33/50)\rCounting objects: 68% (34/50)\rCounting objects: 70% (35/50)\rCounting objects: 72% (36/50)\rCounting objects: 74% (37/50)\rCounting objects: 76% (38/50)\rCounting objects: 78% (39/50)\rCounting objects: 80% (40/50)\rCounting objects: 82% (41/50)\rCounting objects: 84% (42/50)\rCounting objects: 86% (43/50)\rCounting objects: 88% (44/50)\rCounting objects: 90% (45/50)\rCounting objects: 92% (46/50)\rCounting objects: 94% (47/50)\rCounting objects: 96% (48/50)\rCounting objects: 98% (49/50)\rCounting objects: 100% (50/50)\rCounting objects: 100% (50/50), done.\r\nDelta compression using up to 152 threads\r\nCompressing objects: 2% (1/39)\rCompressing objects: 5% (2/39)\rCompressing objects: 7% (3/39)\rCompressing objects: 10% (4/39)\rCompressing objects: 12% (5/39)\rCompressing objects: 15% (6/39)\rCompressing objects: 17% (7/39)\rCompressing objects: 20% (8/39)\rCompressing objects: 23% (9/39)\rCompressing objects: 25% (10/39)\rCompressing objects: 28% (11/39)\rCompressing objects: 30% (12/39)\rCompressing objects: 33% (13/39)\rCompressing objects: 35% (14/39)\rCompressing objects: 38% (15/39)\rCompressing objects: 41% (16/39)\rCompressing objects: 43% (17/39)\rCompressing objects: 46% (18/39)\rCompressing objects: 48% (19/39)\rCompressing objects: 51% (20/39)\rCompressing objects: 53% (21/39)\rCompressing objects: 56% (22/39)\rCompressing objects: 58% (23/39)\rCompressing objects: 61% (24/39)\rCompressing objects: 64% (25/39)\rCompressing objects: 66% (26/39)\rCompressing objects: 69% (27/39)\rCompressing objects: 71% (28/39)\rCompressing objects: 74% (29/39)\rCompressing objects: 76% (30/39)\rCompressing objects: 79% (31/39)\rCompressing objects: 82% (32/39)\rCompressing objects: 84% (33/39)\rCompressing objects: 87% (34/39)\rCompressing objects: 89% (35/39)\rCompressing objects: 92% (36/39)\rCompressing objects: 94% (37/39)\rCompressing objects: 97% (38/39)\rCompressing objects: 100% (39/39)\rCompressing objects: 100% (39/39), done.\r\nWriting objects: 2% (1/39)\rWriting objects: 5% (2/39)\rWriting objects: 7% (3/39)\rWriting objects: 10% (4/39)\rWriting objects: 12% (5/39)\rWriting objects: 15% (6/39)\rWriting objects: 17% (7/39)\rWriting objects: 20% (8/39)\rWriting objects: 23% (9/39)\rWriting objects: 28% (11/39)\rWriting objects: 30% (12/39)\rWriting objects: 33% (13/39)\rWriting objects: 35% (14/39)\rWriting objects: 41% (16/39)\rWriting objects: 43% (17/39)\rWriting objects: 46% (18/39)\rWriting objects: 51% (20/39)\rWriting objects: 53% (21/39)\rWriting objects: 56% (22/39)\rWriting objects: 58% (23/39)\rWriting objects: 61% (24/39)\rWriting objects: 64% (25/39)\rWriting objects: 66% (26/39)\rWriting objects: 69% (27/39)\rWriting objects: 74% (29/39)\rWriting objects: 79% (31/39)\rWriting objects: 84% (33/39)\rWriting objects: 89% (35/39)\rWriting objects: 94% (37/39)\rWriting objects: 97% (38/39)\rWriting objects: 100% (39/39)\rWriting objects: 100% (39/39), 5.78 KiB | 739.00 KiB/s, done.\r\nTotal 39 (delta 25), reused 0 (delta 0), pack-reused 0\r\n",,terminal_output +5353,6689059,"TERMINAL",0,0,"remote: Resolving deltas: 0% (0/25)\rremote: Resolving deltas: 4% (1/25)\rremote: Resolving deltas: 8% (2/25)\rremote: Resolving deltas: 12% (3/25)\rremote: Resolving deltas: 16% (4/25)\rremote: Resolving deltas: 20% (5/25)\rremote: Resolving deltas: 24% (6/25)\rremote: Resolving deltas: 28% (7/25)\rremote: Resolving deltas: 32% (8/25)\rremote: Resolving deltas: 36% (9/25)\rremote: Resolving deltas: 40% (10/25)\rremote: Resolving deltas: 44% (11/25)\rremote: Resolving deltas: 48% (12/25)\rremote: Resolving deltas: 52% (13/25)\rremote: Resolving deltas: 56% (14/25)\rremote: Resolving deltas: 60% (15/25)\rremote: Resolving deltas: 64% (16/25)\rremote: Resolving deltas: 68% (17/25)\rremote: Resolving deltas: 72% (18/25)\rremote: Resolving deltas: 76% (19/25)\rremote: Resolving deltas: 80% (20/25)\rremote: Resolving deltas: 84% (21/25)\rremote: Resolving deltas: 88% (22/25)\rremote: Resolving deltas: 92% (23/25)\rremote: Resolving deltas: 96% (24/25)\rremote: Resolving deltas: 100% (25/25)\rremote: Resolving deltas: 100% (25/25), completed with 6 local objects.\r\n",,terminal_output +5354,6689160,"TERMINAL",0,0,"To github.com:p-doom/slurm.git\r\n e7208e3..19d9601 main -> main\r\n",,terminal_output +5355,6689217,"TERMINAL",0,0,"]0;tum_cte0515@hkn1991:~/Projects/jasmine/slurm",,terminal_output diff --git a/927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-c52c974d-6c8a-40d7-8b6d-40ee3b3624c21759325522612-2025_10_01-15.32.35.344/source.csv b/927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-c52c974d-6c8a-40d7-8b6d-40ee3b3624c21759325522612-2025_10_01-15.32.35.344/source.csv new file mode 100644 index 0000000000000000000000000000000000000000..255a2ff4205aa97ea956e092e862354737dffd79 --- /dev/null +++ b/927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-c52c974d-6c8a-40d7-8b6d-40ee3b3624c21759325522612-2025_10_01-15.32.35.344/source.csv @@ -0,0 +1,819 @@ +Sequence,Time,File,RangeOffset,RangeLength,Text,Language,Type +1,4,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_tokenizer_patch_size_4.sh",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=1\n#SBATCH --gres=gpu:1\n#SBATCH --time=24:00:00\n#SBATCH --cpus-per-task=8\n#SBATCH --output=/fast/project/HFMI_SynergyUnit/jafar_ws/logs/franz/coinrun/tokenizer/%x_%j.log\n#SBATCH --error=/fast/project/HFMI_SynergyUnit/jafar_ws/logs/franz/coinrun/tokenizer/%x_%j.log\n#SBATCH --job-name=tokenizer_coinrun_mila_submission_patch_size_4\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n# Log the sbatch script\ncat $0\n\nsource .venv/bin/activate\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\ntags=""coinrun tokenizer 500m_dataset mila_submission patch_size_4""\n\narray_records_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/data/coinrun/array_records_500m_seed_w_increment""\nCHECKPOINT_DIR=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/tokenizer/${job_name}_${slurm_job_id}""\nmkdir -p $CHECKPOINT_DIR\n\nenv | grep SLURM\n\nsrun python jasmine/train_tokenizer.py \\n --patch_size=4 \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --name=""${job_name}_${slurm_job_id}"" \\n --tags $tags \\n --entity instant-uv \\n --project jafar \\n --data_dir=""${array_records_dir}/train"" \\n --val_data_dir=""${array_records_dir}/val"" &\n\nchild_pid=$!\n\nwait $child_pid\n",shellscript,tab +2,369,"extension-output-pdoom-org.crowd-code-#1-crowd-code",0,0,"3:32:35 PM [info] Activating crowd-code\n3:32:35 PM [info] Recording started\n3:32:35 PM [info] Initializing git provider using file system watchers...\n",Log,tab +3,372,"extension-output-pdoom-org.crowd-code-#1-crowd-code",150,0,"3:32:35 PM [info] Git repository found\n3:32:35 PM [info] Git provider initialized successfully\n3:32:35 PM [info] Initial git state: [object Object]\n",Log,content +4,1556,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_tokenizer_patch_size_4.sh",0,0,"",shellscript,tab +5,3838,"TERMINAL",0,0,"git branch",,terminal_command +6,3882,"TERMINAL",0,0,"]633;C[?1h=\r ablation/use-pytorch-dataloader\r\n action-mapper\r\n* add-noise-to-combat-exposure-bias\r\n add-wandb-name-and-tags\r\n before-nnx\r\n causal-mem-reduce\r\n causal-spatiotemporal-kv-cache\r\n causal-st-transformer\r\n causal-transformer-dynamics-model\r\n causal-transformer-nnx-no-kv-cache\r\n:",,terminal_output +7,5572,"TERMINAL",0,0,"\r[?1l>]0;tum_cte0515@hkn1990:~/Projects/jasmine",,terminal_output +8,8259,"TERMINAL",0,0,"git checkout main",,terminal_command +9,8329,"TERMINAL",0,0,"]633;C",,terminal_output +10,8469,"TERMINAL",0,0,"Switched to branch 'main'\r\nYour branch is up to date with 'origin/main'.\r\n]0;tum_cte0515@hkn1990:~/Projects/jasmine",,terminal_output +11,10157,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_tokenizer_patch_size_4.sh",0,0,"Switched from branch 'add-noise-to-combat-exposure-bias' to 'main'",shellscript,git_branch_checkout +12,10279,"TERMINAL",0,0,"git pull",,terminal_command +13,10326,"TERMINAL",0,0,"]633;C",,terminal_output +14,12250,"TERMINAL",0,0,"Already up to date.\r\n]0;tum_cte0515@hkn1990:~/Projects/jasmine",,terminal_output +15,15806,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_16_noise-branch.sh",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=1\n#SBATCH --time=24:00:00\n#SBATCH --cpus-per-task=8\n#SBATCH --gres=gpu:1\n#SBATCH --output=/fast/project/HFMI_SynergyUnit/jafar_ws/logs/franz/coinrun/dynamics/%x_%j.log\n#SBATCH --error=/fast/project/HFMI_SynergyUnit/jafar_ws/logs/franz/coinrun/dynamics/%x_%j.log\n#SBATCH --job-name=dynamics_coinrun_mila_submission_patch_size_16_noise_branch\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nsource .venv/bin/activate\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\ntags=""coinrun dynamics 500m_dataset mila_submission debug patch_size_16 noise-branch""\n\narray_records_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/data/coinrun/array_records_500m_seed_w_increment""\ntokenizer_ckpt_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/tokenizer/tokenizer_coinrun_mila_submission_29736/""\nCHECKPOINT_DIR=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/dynamics/${job_name}/${slurm_job_id}""\nmkdir -p $CHECKPOINT_DIR\n\nenv | grep SLURM\n\n# Check if the current branch is the main branch\ncurrent_branch=$(git rev-parse --abbrev-ref HEAD)\nif [ ""$current_branch"" != ""add-noise-to-combat-exposure-bias"" ]; then\n echo ""This script must be run from the noise-exposure branch. Current branch is $current_branch. Exiting.""\n exit 1\nfi\n\nsrun python jasmine/train_dynamics.py \\n --max_noise_level=0 \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --name=""${job_name}_${slurm_job_id}"" \\n --tags ${tags} \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=""${tokenizer_ckpt_dir}"" \\n --val_data_dir=""${array_records_dir}/val"" \\n --data_dir=""${array_records_dir}/train"" &\n\nchild_pid=$!\n\nwait $child_pid\n\n",shellscript,tab +16,16122,"jasmine/train_dynamics.py",0,0,"import os\n\n\nos.environ.setdefault(""XLA_PYTHON_CLIENT_MEM_FRACTION"", ""0.98"")\n\nfrom dataclasses import dataclass, field\nimport itertools\nfrom typing import cast, Optional\n\nimport einops\nfrom jax.sharding import Mesh, PartitionSpec, NamedSharding\nfrom jax.experimental.mesh_utils import create_device_mesh\nimport optax\nimport orbax.checkpoint as ocp\nimport numpy as np\nimport dm_pix as pix\nimport jax\nimport jax.numpy as jnp\nimport tyro\nimport wandb\nimport grain\nimport flax.nnx as nnx\n\nfrom genie import Genie, restore_genie_components\nfrom utils.dataloader import get_dataloader\nfrom utils.train_utils import (\n get_lr_schedule,\n count_parameters_by_component,\n print_mem_stats,\n print_compiled_memory_stats,\n print_compiled_cost_analysis,\n)\n\n\n@dataclass\nclass Args:\n # Experiment\n num_steps: int = 200_000\n seed: int = 0\n seq_len: int = 16\n image_channels: int = 3\n image_height: int = 64\n image_width: int = 64\n data_dir: str = """"\n save_ckpt: bool = False\n restore_ckpt: bool = False\n # Optimization\n batch_size: int = 36\n init_lr: float = 0.0\n max_lr: float = 3e-5\n decay_end: float = 0.0\n wsd_decay_steps: int = (\n 20_000 # NOTE: wsd_decay_steps will only be used when using a wsd-schedule\n )\n warmup_steps: int = 5000\n lr_schedule: str = ""wsd"" # supported options: wsd, cos\n # Tokenizer\n tokenizer_dim: int = 512\n tokenizer_ffn_dim: int = 2048\n latent_patch_dim: int = 32\n num_patch_latents: int = 1024\n patch_size: int = 16\n tokenizer_num_blocks: int = 4\n tokenizer_num_heads: int = 8\n tokenizer_checkpoint: str = """"\n # LAM\n lam_dim: int = 512\n lam_ffn_dim: int = 2048\n latent_action_dim: int = 32\n num_actions: int = 6\n lam_patch_size: int = 16\n lam_num_blocks: int = 4\n lam_num_heads: int = 8\n lam_checkpoint: str = """"\n # Dynamics\n dyna_type: str = ""maskgit"" # supported options: maskgit, causal\n dyna_dim: int = 512\n dyna_ffn_dim: int = 2048\n dyna_num_blocks: int = 6\n dyna_num_heads: int = 8\n dropout: float = 0.0\n mask_limit: float = 0.5\n z_loss_weight: float = 0.0\n param_dtype = jnp.float32\n dtype = jnp.bfloat16\n use_flash_attention: bool = True\n use_gt_actions: bool = False\n # Logging\n log: bool = True\n entity: str = """"\n project: str = """"\n name: str = ""train_dynamics""\n tags: list[str] = field(default_factory=lambda: [""dynamics""])\n log_interval: int = 50\n log_image_interval: int = 1000\n ckpt_dir: str = """"\n log_checkpoint_interval: int = 5000\n log_checkpoint_keep_period: int = 20_000\n log_gradients: bool = False\n val_data_dir: str = """"\n val_interval: int = 20_000\n val_steps: int = 50\n eval_full_frame: bool = True\n val_maskgit_steps: int = 25\n val_temperature: float = 1\n val_sample_argmax: bool = False\n wandb_id: str = """"\n\n\ndef build_model(args: Args, rng: jax.Array) -> tuple[Genie, jax.Array]:\n rng, _rng = jax.random.split(rng)\n rngs = nnx.Rngs(_rng)\n genie = Genie(\n # Tokenizer\n in_dim=args.image_channels,\n tokenizer_dim=args.tokenizer_dim,\n tokenizer_ffn_dim=args.tokenizer_ffn_dim,\n latent_patch_dim=args.latent_patch_dim,\n num_patch_latents=args.num_patch_latents,\n patch_size=args.patch_size,\n tokenizer_num_blocks=args.tokenizer_num_blocks,\n tokenizer_num_heads=args.tokenizer_num_heads,\n # LAM\n lam_dim=args.lam_dim,\n lam_ffn_dim=args.lam_ffn_dim,\n latent_action_dim=args.latent_action_dim,\n num_actions=args.num_actions,\n lam_patch_size=args.lam_patch_size,\n lam_num_blocks=args.lam_num_blocks,\n lam_num_heads=args.lam_num_heads,\n lam_co_train=not args.lam_checkpoint,\n use_gt_actions=args.use_gt_actions,\n # Dynamics\n dyna_type=args.dyna_type,\n dyna_dim=args.dyna_dim,\n dyna_ffn_dim=args.dyna_ffn_dim,\n dyna_num_blocks=args.dyna_num_blocks,\n dyna_num_heads=args.dyna_num_heads,\n dropout=args.dropout,\n mask_limit=args.mask_limit,\n param_dtype=args.param_dtype,\n dtype=args.dtype,\n use_flash_attention=args.use_flash_attention,\n decode=False,\n rngs=rngs,\n )\n if args.use_gt_actions:\n assert (\n not args.lam_checkpoint\n ), ""Cannot use LAM when using ground-truth actions.""\n else:\n assert genie.lam is not None\n del genie.lam.decoder\n return genie, rng\n\n\ndef build_optimizer(genie: Genie, args: Args) -> nnx.ModelAndOptimizer:\n lr_schedule = get_lr_schedule(\n args.lr_schedule,\n args.init_lr,\n args.max_lr,\n args.decay_end,\n args.num_steps,\n args.warmup_steps,\n args.wsd_decay_steps,\n )\n tx = optax.adamw(\n learning_rate=lr_schedule,\n b1=0.9,\n b2=0.9,\n weight_decay=1e-4,\n mu_dtype=args.param_dtype, # moments in full precision\n )\n optimizer = nnx.ModelAndOptimizer(genie, tx)\n return optimizer\n\n\ndef build_mesh_and_sharding(\n num_devices: int,\n) -> tuple[Mesh, NamedSharding, NamedSharding, NamedSharding]:\n device_mesh_arr = create_device_mesh((num_devices,))\n mesh = Mesh(devices=device_mesh_arr, axis_names=(""data"",))\n replicated_sharding = NamedSharding(mesh, PartitionSpec())\n videos_sharding = NamedSharding(mesh, PartitionSpec(""data"", None, None, None, None))\n actions_sharding = NamedSharding(mesh, PartitionSpec(""data"", None))\n return mesh, replicated_sharding, videos_sharding, actions_sharding\n\n\ndef shard_optimizer_states(\n optimizer: nnx.ModelAndOptimizer, replicated_sharding: NamedSharding\n) -> None:\n model_state = nnx.state(optimizer.model)\n model_sharded_state = jax.lax.with_sharding_constraint(\n model_state, replicated_sharding\n )\n nnx.update(optimizer.model, model_sharded_state)\n optimizer_state = nnx.state(optimizer, nnx.optimizer.OptState)\n optimizer_sharded_state = jax.lax.with_sharding_constraint(\n optimizer_state, replicated_sharding\n )\n nnx.update(optimizer, optimizer_sharded_state)\n\n\ndef build_dataloader(args: Args, data_dir: str) -> grain.DataLoaderIterator:\n image_shape = (args.image_height, args.image_width, args.image_channels)\n array_record_files = [\n os.path.join(data_dir, x)\n for x in os.listdir(data_dir)\n if x.endswith("".array_record"")\n ]\n grain_dataloader = get_dataloader(\n array_record_files,\n args.seq_len,\n # NOTE: We deliberately pass the global batch size\n # The dataloader shards the dataset across all processes\n args.batch_size,\n *image_shape,\n num_workers=8,\n prefetch_buffer_size=1,\n seed=args.seed,\n )\n initial_state = grain_dataloader._create_initial_state()\n grain_iterator = grain.DataLoaderIterator(grain_dataloader, initial_state)\n return grain_iterator\n\n\ndef build_checkpoint_manager(args: Args) -> Optional[ocp.CheckpointManager]:\n if args.restore_ckpt or args.save_ckpt:\n handler_registry = ocp.handlers.DefaultCheckpointHandlerRegistry()\n handler_registry.add(\n ""model_state"", ocp.args.PyTreeSave, ocp.handlers.PyTreeCheckpointHandler\n )\n handler_registry.add(\n ""model_state"", ocp.args.PyTreeRestore, ocp.handlers.PyTreeCheckpointHandler\n )\n handler_registry.add(\n ""train_dataloader_state"",\n grain.checkpoint.CheckpointSave,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n handler_registry.add(\n ""train_dataloader_state"",\n grain.checkpoint.CheckpointRestore,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n if args.val_data_dir:\n handler_registry.add(\n ""val_dataloader_state"",\n grain.checkpoint.CheckpointSave,\n cast(\n ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler\n ),\n )\n handler_registry.add(\n ""val_dataloader_state"",\n grain.checkpoint.CheckpointRestore,\n cast(\n ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler\n ),\n )\n checkpoint_options = ocp.CheckpointManagerOptions(\n save_interval_steps=args.log_checkpoint_interval,\n max_to_keep=3,\n keep_period=args.log_checkpoint_keep_period,\n step_format_fixed_length=6,\n cleanup_tmp_directories=True,\n )\n checkpoint_manager = ocp.CheckpointManager(\n args.ckpt_dir,\n options=checkpoint_options,\n handler_registry=handler_registry,\n )\n return checkpoint_manager\n else:\n return None\n\n\ndef restore_or_initialize_components(\n args: Args,\n checkpoint_manager: Optional[ocp.CheckpointManager],\n optimizer: nnx.ModelAndOptimizer,\n train_iterator: grain.DataLoaderIterator,\n rng: jax.Array,\n replicated_sharding: NamedSharding,\n val_iterator: Optional[grain.DataLoaderIterator],\n restore_step: Optional[int] = None,\n) -> tuple[\n int,\n nnx.ModelAndOptimizer,\n grain.DataLoaderIterator,\n grain.DataLoaderIterator,\n jax.Array,\n]:\n step = 0\n if checkpoint_manager and restore_step is None:\n restore_step = checkpoint_manager.latest_step()\n if args.restore_ckpt:\n assert checkpoint_manager is not None\n abstract_optimizer = nnx.eval_shape(lambda: optimizer)\n abstract_optimizer_state = nnx.state(abstract_optimizer)\n if val_iterator:\n restore_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore(abstract_optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointRestore(train_iterator), # type: ignore\n val_dataloader_state=grain.checkpoint.CheckpointRestore(val_iterator), # type: ignore\n )\n else:\n restore_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore(abstract_optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointRestore(train_iterator), # type: ignore\n )\n restored = checkpoint_manager.restore(\n checkpoint_manager.latest_step(), args=restore_args\n )\n restored_optimizer_state = restored[""model_state""]\n nnx.update(optimizer, restored_optimizer_state)\n train_iterator = restored[""train_dataloader_state""]\n if val_iterator:\n val_iterator = restored[""val_dataloader_state""]\n step = checkpoint_manager.latest_step() or 0\n print(f""Restored dataloader and model state from step {step}"")\n else:\n # Restore from pre-trained tokenizer (and LAM)\n rng, _rng = jax.random.split(rng)\n optimizer = restore_genie_components(optimizer, replicated_sharding, _rng, args)\n return step, optimizer, train_iterator, val_iterator, rng\n\n\ndef _calculate_top_k_accuracy(\n token_logits_BTNV: jax.Array,\n video_tokens_BTN: jax.Array,\n mask_BTN: jax.Array,\n k: int,\n) -> jax.Array:\n _, topk_indices_BTNK = jax.lax.top_k(token_logits_BTNV, k)\n topk_correct = jnp.any(\n topk_indices_BTNK == video_tokens_BTN[..., jnp.newaxis], axis=-1\n )\n topk_acc = (mask_BTN * topk_correct).sum() / mask_BTN.sum()\n return topk_acc\n\n\ndef _calculate_step_metrics(\n outputs: dict[str, jax.Array],\n gt: jax.Array,\n num_actions: int,\n num_patch_latents: int,\n) -> tuple[jax.Array, dict]:\n mask_BTN = outputs[""mask""]\n outputs[""token_logits""] = outputs[""token_logits""].astype(jnp.float32)\n ce_loss = optax.softmax_cross_entropy_with_integer_labels(\n outputs[""token_logits""], outputs[""video_tokens""]\n )\n ce_loss = (mask_BTN * ce_loss).sum() / mask_BTN.sum()\n z_val = jax.nn.logsumexp(outputs[""token_logits""], axis=-1)\n z_loss_metric = (mask_BTN * (z_val**2)).sum() / mask_BTN.sum()\n\n masked_token_top_1_acc = _calculate_top_k_accuracy(\n outputs[""token_logits""], outputs[""video_tokens""], mask_BTN, 1\n )\n masked_token_top_2_acc = _calculate_top_k_accuracy(\n outputs[""token_logits""], outputs[""video_tokens""], mask_BTN, 2\n )\n masked_token_top_5_acc = _calculate_top_k_accuracy(\n outputs[""token_logits""], outputs[""video_tokens""], mask_BTN, 5\n )\n masked_token_top_16_acc = _calculate_top_k_accuracy(\n outputs[""token_logits""], outputs[""video_tokens""], mask_BTN, 16\n )\n\n select_probs = jax.nn.softmax(outputs[""token_logits""])\n gt_val = gt.clip(0, 1).reshape(-1, *gt.shape[2:])\n recon = outputs[""recon""].clip(0, 1).reshape(-1, *outputs[""recon""].shape[2:])\n psnr = jnp.asarray(pix.psnr(gt_val, recon)).mean()\n ssim = jnp.asarray(pix.ssim(gt_val, recon)).mean()\n _, index_counts_tokenizer = jnp.unique_counts(\n jnp.ravel(outputs[""video_tokens""]),\n size=num_patch_latents,\n fill_value=0,\n )\n codebook_usage_tokenizer = (index_counts_tokenizer != 0).mean()\n metrics = dict(\n cross_entropy_loss=ce_loss,\n masked_token_top1_accuracy=masked_token_top_1_acc,\n masked_token_top2_accuracy=masked_token_top_2_acc,\n masked_token_top5_accuracy=masked_token_top_5_acc,\n masked_token_top16_accuracy=masked_token_top_16_acc,\n select_logit=outputs[""token_logits""].max(-1).mean(),\n select_p=select_probs.max(-1).mean(),\n entropy=jax.scipy.special.entr(select_probs).sum(-1).mean(),\n z_loss=z_loss_metric,\n psnr=psnr,\n ssim=ssim,\n codebook_usage_tokenizer=codebook_usage_tokenizer,\n )\n if ""lam_indices"" in outputs.keys():\n _, index_counts_lam = jnp.unique_counts(\n jnp.ravel(outputs[""lam_indices""]),\n size=num_actions,\n fill_value=0,\n )\n codebook_usage_lam = (index_counts_lam != 0).mean()\n metrics[""codebook_usage_lam""] = codebook_usage_lam\n return ce_loss, metrics\n\n\ndef main(args: Args) -> None:\n jax.distributed.initialize()\n num_devices = jax.device_count()\n if num_devices == 0:\n raise ValueError(""No JAX devices found."")\n print(f""Running on {num_devices} devices."")\n\n if args.batch_size % num_devices != 0:\n raise ValueError(\n f""Global batch size {args.batch_size} must be divisible by ""\n f""number of devices {num_devices}.""\n )\n\n rng = jax.random.key(args.seed)\n\n # --- Initialize model ---\n genie, rng = build_model(args, rng)\n _, params, _ = nnx.split(genie, nnx.Param, ...)\n param_counts = count_parameters_by_component(params)\n\n if args.log and jax.process_index() == 0:\n wandb_init_kwargs = {\n ""entity"": args.entity,\n ""project"": args.project,\n ""name"": args.name,\n ""tags"": args.tags,\n ""group"": ""debug"",\n ""config"": args,\n }\n\n if args.wandb_id:\n wandb_init_kwargs.update(\n {\n ""id"": args.wandb_id,\n ""resume"": ""allow"",\n }\n )\n wandb.init(**wandb_init_kwargs)\n\n wandb.config.update({""model_param_count"": param_counts})\n\n print(""Parameter counts:"")\n print(param_counts)\n\n # --- Initialize optimizer ---\n optimizer = build_optimizer(genie, args)\n del genie\n\n # FIXME: switch to create_hybrid_device_mesh for runs spanning multiple nodes\n _, replicated_sharding, videos_sharding, actions_sharding = build_mesh_and_sharding(\n num_devices\n )\n\n shard_optimizer_states(optimizer, replicated_sharding)\n\n # --- Initialize checkpoint manager ---\n checkpoint_manager = build_checkpoint_manager(args)\n\n # --- Create DataLoaderIterator from dataloader ---\n train_iterator = build_dataloader(args, args.data_dir)\n val_iterator = None\n if args.val_data_dir:\n val_iterator = build_dataloader(args, args.val_data_dir)\n\n # --- Restore checkpoint ---\n step, optimizer, train_iterator, val_iterator, rng = (\n restore_or_initialize_components(\n args,\n checkpoint_manager,\n optimizer,\n train_iterator,\n rng,\n replicated_sharding,\n val_iterator,\n )\n )\n\n # --- Define loss and train step (close over args) ---\n def dynamics_loss_fn(\n model: Genie,\n inputs: dict,\n ) -> tuple[jax.Array, tuple[jax.Array, dict]]:\n gt = jnp.asarray(inputs[""videos""], dtype=jnp.float32) / 255.0\n inputs[""videos""] = gt.astype(args.dtype)\n outputs = model(inputs)\n ce_loss, metrics = _calculate_step_metrics(\n outputs, gt, args.num_actions, args.num_patch_latents\n )\n z_loss = metrics[""z_loss""]\n total_loss = ce_loss + args.z_loss_weight * z_loss\n metrics[""total_loss""] = total_loss\n return total_loss, (outputs[""recon""], metrics)\n\n @nnx.jit(donate_argnums=0)\n def train_step(\n optimizer: nnx.ModelAndOptimizer, inputs: dict\n ) -> tuple[jax.Array, jax.Array, dict]:\n def loss_fn(model: Genie) -> tuple[jax.Array, tuple[jax.Array, dict]]:\n model.train()\n return dynamics_loss_fn(model, inputs)\n\n (loss, (recon, metrics)), grads = nnx.value_and_grad(loss_fn, has_aux=True)(\n optimizer.model\n )\n optimizer.update(grads)\n if args.log_gradients:\n metrics[""gradients_std/""] = jax.tree.map(\n lambda x: x.std(), grads[""params""][""dynamics""]\n )\n return loss, recon, metrics\n\n @nnx.jit\n def val_step(genie: Genie, inputs: dict) -> dict:\n """"""Evaluate model and compute metrics""""""\n genie.eval()\n gt = jnp.asarray(inputs[""videos""], dtype=jnp.float32) / 255.0\n (loss, (recon, metrics)) = dynamics_loss_fn(genie, inputs)\n val_output = {""loss"": loss, ""recon"": recon, ""metrics"": metrics}\n\n # --- Evaluate full frame prediction (sampling) ---\n if args.eval_full_frame:\n inputs[""videos""] = gt.astype(args.dtype)\n tokenizer_outputs = genie.tokenizer.vq_encode(\n inputs[""videos""], training=False\n )\n tokens_full_frame = tokenizer_outputs[""indices""]\n lam_indices_E = None\n if not args.use_gt_actions:\n lam_indices_E = genie.vq_encode(inputs, training=False)\n inputs[""latent_actions""] = lam_indices_E\n inputs[""videos""] = inputs[""videos""][\n :, :-1\n ] # remove last frame for generation\n recon_full_frame, logits_full_frame = genie.sample(\n inputs,\n args.seq_len,\n args.val_temperature,\n args.val_sample_argmax,\n args.val_maskgit_steps,\n )\n # Calculate metrics for the last frame only\n step_outputs = {\n ""recon"": recon_full_frame[:, -1],\n ""token_logits"": logits_full_frame[:, -1],\n ""video_tokens"": tokens_full_frame[:, -1],\n ""mask"": jnp.ones_like(tokens_full_frame[:, -1]),\n }\n if lam_indices_E is not None:\n lam_indices_B = lam_indices_E.reshape((-1, args.seq_len - 1))[:, -1]\n step_outputs[""lam_indices""] = lam_indices_B\n\n loss_full_frame, metrics_full_frame = _calculate_step_metrics(\n step_outputs, gt[:, -1], args.num_actions, args.num_patch_latents\n )\n val_output.update(\n {\n ""loss_full_frame"": loss_full_frame,\n ""recon_full_frame"": recon_full_frame,\n ""metrics_full_frame"": metrics_full_frame,\n }\n )\n return val_output\n\n def calculate_validation_metrics(val_dataloader, genie, rng):\n step = 0\n loss_per_step = []\n metrics_per_step = []\n loss_full_frame_per_step = []\n metrics_full_frame_per_step = []\n batch = None\n recon = None\n recon_full_frame = None\n for batch in val_dataloader:\n rng, _rng_mask = jax.random.split(rng, 2)\n batch[""rng""] = _rng_mask\n val_outputs = val_step(genie, batch)\n loss_per_step.append(val_outputs[""loss""])\n metrics_per_step.append(val_outputs[""metrics""])\n recon = val_outputs[""recon""]\n if args.eval_full_frame:\n loss_full_frame_per_step.append(val_outputs[""loss_full_frame""])\n metrics_full_frame_per_step.append(val_outputs[""metrics_full_frame""])\n recon_full_frame = val_outputs[""recon_full_frame""]\n step += 1\n if step > args.val_steps:\n break\n\n if step < args.val_steps:\n print(\n f""Warning: Your validation dataset is too small to make val_steps many steps. Made {step} steps, expected {args.val_steps}""\n )\n\n val_metrics = {\n f""val_{key}"": np.mean([float(m[key]) for m in metrics_per_step])\n for key in metrics_per_step[0].keys()\n }\n val_metrics[""val_loss""] = np.mean(loss_per_step)\n if args.eval_full_frame:\n val_metrics_full_frame = {\n f""val_full_frame_{key}"": np.mean(\n [float(m[key]) for m in metrics_full_frame_per_step]\n )\n for key in metrics_full_frame_per_step[0].keys()\n }\n val_metrics.update(val_metrics_full_frame)\n val_metrics[""val_full_frame_loss""] = np.mean(loss_full_frame_per_step)\n return val_metrics, batch, recon, recon_full_frame\n\n # --- TRAIN LOOP ---\n dataloader_train = (\n {\n ""videos"": jax.make_array_from_process_local_data(\n videos_sharding, local_data=elem[""videos""]\n ),\n ""actions"": (\n jax.make_array_from_process_local_data(\n actions_sharding, elem[""actions""]\n )\n if args.use_gt_actions\n else None\n ),\n }\n for elem in train_iterator\n )\n dataloader_val = None\n if val_iterator:\n dataloader_val = (\n {\n ""videos"": jax.make_array_from_process_local_data(\n videos_sharding, elem[""videos""]\n ),\n ""actions"": (\n jax.make_array_from_process_local_data(\n actions_sharding, elem[""actions""]\n )\n if args.use_gt_actions\n else None\n ),\n }\n for elem in val_iterator\n )\n if jax.process_index() == 0:\n first_batch = next(dataloader_train)\n first_batch[""rng""] = rng # type: ignore\n compiled = train_step.lower(optimizer, first_batch).compile()\n print_compiled_memory_stats(compiled.memory_analysis())\n print_compiled_cost_analysis(compiled.cost_analysis())\n # Do not skip the first batch during training\n dataloader_train = itertools.chain([first_batch], dataloader_train)\n print(f""Starting training from step {step}..."")\n first_step = step\n while step < args.num_steps:\n for batch in dataloader_train:\n # --- Train step ---\n rng, _rng_mask = jax.random.split(rng, 2)\n batch[""rng""] = _rng_mask\n loss, recon, metrics = train_step(optimizer, batch)\n if step == first_step:\n print_mem_stats(""After params initialized"")\n step += 1\n\n # --- Validation loss ---\n val_results = {}\n if dataloader_val and step % args.val_interval == 0:\n rng, _rng_mask_val = jax.random.split(rng, 2)\n print(""Calculating validation metrics..."")\n val_metrics, val_gt_batch, val_recon, val_recon_full_frame = (\n calculate_validation_metrics(\n dataloader_val, optimizer.model, _rng_mask_val\n )\n )\n print(f""Step {step}, validation loss: {val_metrics['val_loss']}"")\n val_results = {\n ""metrics"": val_metrics,\n ""gt_batch"": val_gt_batch,\n ""recon"": val_recon,\n ""full_frame"": val_recon_full_frame,\n }\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {""loss"": loss, ""step"": step, **metrics}\n if val_results:\n log_dict.update(val_results[""metrics""])\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if val_results:\n val_results[""gt_seq_val""] = (\n val_results[""gt_batch""][""videos""][0].astype(jnp.float32)\n / 255.0\n )\n val_results[""recon_seq_val""] = val_results[""recon""][0].clip(\n 0, 1\n )\n val_comparison_seq = jnp.concatenate(\n (val_results[""gt_seq_val""], val_results[""recon_seq_val""]),\n axis=1,\n )\n val_results[""val_comparison_seq""] = einops.rearrange(\n val_comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if args.eval_full_frame:\n val_results[""full_frame_seq_val""] = val_results[\n ""full_frame""\n ][0].clip(0, 1)\n val_results[""val_full_frame_comparison_seq""] = (\n jnp.concatenate(\n (\n val_results[""gt_seq_val""],\n val_results[""full_frame_seq_val""],\n ),\n axis=1,\n )\n )\n val_results[""val_full_frame_comparison_seq""] = (\n einops.rearrange(\n val_results[""val_full_frame_comparison_seq""] * 255,\n ""t h w c -> h (t w) c"",\n )\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if val_results:\n log_images.update(\n dict(\n val_image=wandb.Image(\n np.asarray(\n val_results[""gt_seq_val""][args.seq_len - 1]\n )\n ),\n val_recon=wandb.Image(\n np.asarray(\n val_results[""recon_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_recon=wandb.Image(\n np.asarray(\n val_results[""val_comparison_seq""].astype(\n np.uint8\n )\n )\n ),\n )\n )\n if args.eval_full_frame:\n log_images.update(\n dict(\n val_full_frame=wandb.Image(\n np.asarray(\n val_results[""full_frame_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_full_frame=wandb.Image(\n np.asarray(\n val_results[\n ""val_full_frame_comparison_seq""\n ].astype(np.uint8)\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break\n\n if checkpoint_manager:\n checkpoint_manager.close()\n\n\nif __name__ == ""__main__"":\n args = tyro.cli(Args)\n main(args)\n",python,tab +17,16458,"TERMINAL",0,0,"bash",,terminal_focus +18,36580,"TERMINAL",0,0,"git checkout -b ""prepend-action-maskgit""",,terminal_command +19,36657,"TERMINAL",0,0,"]633;CSwitched to a new branch 'prepend-action-maskgit'\r\n]0;tum_cte0515@hkn1990:~/Projects/jasmine",,terminal_output +20,40156,"",0,0,"Switched from branch 'main' to 'prepend-action-maskgit'",,git_branch_checkout +21,43497,"jasmine/models/dynamics.py",0,0,"from typing import Dict\n\nimport jax\nimport jax.numpy as jnp\nimport flax.nnx as nnx\n\nfrom utils.nn import STTransformer, Transformer\n\n\nclass DynamicsMaskGIT(nnx.Module):\n """"""\n MaskGIT dynamics model\n\n Dimension keys:\n B: batch size\n T: sequence length\n N: number of patches per frame\n L: latent dimension\n V: vocabulary size (number of latents)\n """"""\n\n def __init__(\n self,\n model_dim: int,\n ffn_dim: int,\n num_latents: int,\n latent_action_dim: int,\n num_blocks: int,\n num_heads: int,\n dropout: float,\n mask_limit: float,\n param_dtype: jnp.dtype,\n dtype: jnp.dtype,\n use_flash_attention: bool,\n rngs: nnx.Rngs,\n ):\n self.model_dim = model_dim\n self.ffn_dim = ffn_dim\n self.num_latents = num_latents\n self.latent_action_dim = latent_action_dim\n self.num_blocks = num_blocks\n self.num_heads = num_heads\n self.dropout = dropout\n self.mask_limit = mask_limit\n self.param_dtype = param_dtype\n self.dtype = dtype\n self.use_flash_attention = use_flash_attention\n\n self.transformer = STTransformer(\n self.model_dim,\n self.model_dim,\n self.ffn_dim,\n self.num_latents,\n self.num_blocks,\n self.num_heads,\n self.dropout,\n self.param_dtype,\n self.dtype,\n use_flash_attention=self.use_flash_attention,\n rngs=rngs,\n )\n self.patch_embed = nnx.Embed(self.num_latents, self.model_dim, rngs=rngs)\n self.mask_token = nnx.Param(\n nnx.initializers.lecun_uniform()(rngs.params(), (1, 1, 1, self.model_dim))\n )\n self.action_up = nnx.Linear(\n self.latent_action_dim,\n self.model_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n rngs=rngs,\n )\n\n def __call__(\n self,\n batch: Dict[str, jax.Array],\n ) -> tuple[jax.Array, jax.Array]:\n # --- Mask videos ---\n video_tokens_BTN = batch[""video_tokens""]\n latent_actions_BTm11L = batch[""latent_actions""]\n vid_embed_BTNM = self.patch_embed(video_tokens_BTN)\n\n batch_size = vid_embed_BTNM.shape[0]\n _rng_prob, *_rngs_mask = jax.random.split(batch[""mask_rng""], batch_size + 1)\n mask_prob = jax.random.uniform(\n _rng_prob, shape=(batch_size,), minval=self.mask_limit\n )\n per_sample_shape = vid_embed_BTNM.shape[1:-1]\n mask = jax.vmap(\n lambda rng, prob: jax.random.bernoulli(rng, prob, per_sample_shape),\n in_axes=(0, 0),\n )(jnp.asarray(_rngs_mask), mask_prob)\n mask = mask.at[:, 0].set(False)\n vid_embed_BTNM = jnp.where(\n jnp.expand_dims(mask, -1), self.mask_token.value, vid_embed_BTNM\n )\n\n # --- Predict transition ---\n act_embed_BTm11M = self.action_up(latent_actions_BTm11L)\n padded_act_embed_BT1M = jnp.pad(\n act_embed_BTm11M, ((0, 0), (1, 0), (0, 0), (0, 0))\n )\n padded_act_embed_BTNM = jnp.broadcast_to(\n padded_act_embed_BT1M, vid_embed_BTNM.shape\n )\n vid_embed_BTNM += padded_act_embed_BTNM\n logits_BTNV = self.transformer(vid_embed_BTNM)\n return logits_BTNV, mask\n\n\nclass DynamicsCausal(nnx.Module):\n """"""Causal dynamics model""""""\n\n def __init__(\n self,\n model_dim: int,\n ffn_dim: int,\n num_latents: int,\n latent_action_dim: int,\n num_blocks: int,\n num_heads: int,\n dropout: float,\n param_dtype: jnp.dtype,\n dtype: jnp.dtype,\n use_flash_attention: bool,\n decode: bool,\n rngs: nnx.Rngs,\n ):\n self.model_dim = model_dim\n self.ffn_dim = ffn_dim\n self.num_latents = num_latents\n self.latent_action_dim = latent_action_dim\n self.num_blocks = num_blocks\n self.num_heads = num_heads\n self.dropout = dropout\n self.param_dtype = param_dtype\n self.dtype = dtype\n self.use_flash_attention = use_flash_attention\n self.decode = decode\n\n self.transformer = Transformer(\n self.model_dim,\n self.model_dim,\n self.ffn_dim,\n self.num_latents,\n self.num_blocks,\n self.num_heads,\n self.dropout,\n self.param_dtype,\n self.dtype,\n use_flash_attention=self.use_flash_attention,\n decode=self.decode,\n rngs=rngs,\n )\n self.patch_embed = nnx.Embed(self.num_latents, self.model_dim, rngs=rngs)\n self.action_up = nnx.Linear(\n self.latent_action_dim,\n self.model_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n rngs=rngs,\n )\n\n def __call__(\n self,\n batch: Dict[str, jax.Array],\n ) -> tuple[jax.Array, jax.Array]:\n video_tokens_BTN = batch[""video_tokens""]\n latent_actions_BTm11L = batch[""latent_actions""]\n vid_embed_BTNM = self.patch_embed(video_tokens_BTN)\n act_embed_BTm11M = self.action_up(latent_actions_BTm11L)\n padded_act_embed_BT1M = jnp.pad(\n act_embed_BTm11M, ((0, 0), (1, 0), (0, 0), (0, 0))\n )\n vid_embed_BTNp1M = jnp.concatenate(\n [padded_act_embed_BT1M, vid_embed_BTNM], axis=2\n )\n logits_BTNp1V = self.transformer(vid_embed_BTNp1M)\n logits_BTNV = logits_BTNp1V[:, :, :-1]\n return logits_BTNV, jnp.ones_like(video_tokens_BTN)\n",python,tab +22,50383,"jasmine/models/dynamics.py",3229,0,"",python,selection_mouse +23,51586,"jasmine/models/dynamics.py",3231,0,"",python,selection_mouse +24,52275,"jasmine/models/dynamics.py",3288,0,"",python,selection_mouse +25,100538,"jasmine/models/dynamics.py",3395,0,"",python,selection_mouse +26,100541,"jasmine/models/dynamics.py",3394,0,"",python,selection_command +27,100676,"jasmine/models/dynamics.py",3394,1,"k",python,selection_mouse +28,100691,"jasmine/models/dynamics.py",3395,0,"",python,selection_command +29,100746,"jasmine/models/dynamics.py",3342,53,"rmer(vid_embed_BTNM)\n return logits_BTNV, mask",python,selection_mouse +30,100762,"jasmine/models/dynamics.py",3336,59,"ransformer(vid_embed_BTNM)\n return logits_BTNV, mask",python,selection_mouse +31,100784,"jasmine/models/dynamics.py",3333,62,"f.transformer(vid_embed_BTNM)\n return logits_BTNV, mask",python,selection_mouse +32,100798,"jasmine/models/dynamics.py",3331,64,"elf.transformer(vid_embed_BTNM)\n return logits_BTNV, mask",python,selection_mouse +33,100811,"jasmine/models/dynamics.py",3281,114,"M += padded_act_embed_BTNM\n logits_BTNV = self.transformer(vid_embed_BTNM)\n return logits_BTNV, mask",python,selection_mouse +34,100837,"jasmine/models/dynamics.py",3277,118,"_BTNM += padded_act_embed_BTNM\n logits_BTNV = self.transformer(vid_embed_BTNM)\n return logits_BTNV, mask",python,selection_mouse +35,100851,"jasmine/models/dynamics.py",3259,136,"\n vid_embed_BTNM += padded_act_embed_BTNM\n logits_BTNV = self.transformer(vid_embed_BTNM)\n return logits_BTNV, mask",python,selection_mouse +36,100922,"jasmine/models/dynamics.py",3258,137,")\n vid_embed_BTNM += padded_act_embed_BTNM\n logits_BTNV = self.transformer(vid_embed_BTNM)\n return logits_BTNV, mask",python,selection_mouse +37,100923,"jasmine/models/dynamics.py",3201,194," padded_act_embed_BT1M, vid_embed_BTNM.shape\n )\n vid_embed_BTNM += padded_act_embed_BTNM\n logits_BTNV = self.transformer(vid_embed_BTNM)\n return logits_BTNV, mask",python,selection_mouse +38,100927,"jasmine/models/dynamics.py",3200,195," padded_act_embed_BT1M, vid_embed_BTNM.shape\n )\n vid_embed_BTNM += padded_act_embed_BTNM\n logits_BTNV = self.transformer(vid_embed_BTNM)\n return logits_BTNV, mask",python,selection_mouse +39,100938,"jasmine/models/dynamics.py",3198,197," padded_act_embed_BT1M, vid_embed_BTNM.shape\n )\n vid_embed_BTNM += padded_act_embed_BTNM\n logits_BTNV = self.transformer(vid_embed_BTNM)\n return logits_BTNV, mask",python,selection_mouse +40,100959,"jasmine/models/dynamics.py",3147,248," padded_act_embed_BTNM = jnp.broadcast_to(\n padded_act_embed_BT1M, vid_embed_BTNM.shape\n )\n vid_embed_BTNM += padded_act_embed_BTNM\n logits_BTNV = self.transformer(vid_embed_BTNM)\n return logits_BTNV, mask",python,selection_mouse +41,100985,"jasmine/models/dynamics.py",3145,250," padded_act_embed_BTNM = jnp.broadcast_to(\n padded_act_embed_BT1M, vid_embed_BTNM.shape\n )\n vid_embed_BTNM += padded_act_embed_BTNM\n logits_BTNV = self.transformer(vid_embed_BTNM)\n return logits_BTNV, mask",python,selection_mouse +42,100997,"jasmine/models/dynamics.py",3144,251," padded_act_embed_BTNM = jnp.broadcast_to(\n padded_act_embed_BT1M, vid_embed_BTNM.shape\n )\n vid_embed_BTNM += padded_act_embed_BTNM\n logits_BTNV = self.transformer(vid_embed_BTNM)\n return logits_BTNV, mask",python,selection_mouse +43,101058,"jasmine/models/dynamics.py",3134,261," )\n padded_act_embed_BTNM = jnp.broadcast_to(\n padded_act_embed_BT1M, vid_embed_BTNM.shape\n )\n vid_embed_BTNM += padded_act_embed_BTNM\n logits_BTNV = self.transformer(vid_embed_BTNM)\n return logits_BTNV, mask",python,selection_mouse +44,101059,"jasmine/models/dynamics.py",3071,324," act_embed_BTm11M, ((0, 0), (1, 0), (0, 0), (0, 0))\n )\n padded_act_embed_BTNM = jnp.broadcast_to(\n padded_act_embed_BT1M, vid_embed_BTNM.shape\n )\n vid_embed_BTNM += padded_act_embed_BTNM\n logits_BTNV = self.transformer(vid_embed_BTNM)\n return logits_BTNV, mask",python,selection_mouse +45,101092,"jasmine/models/dynamics.py",3030,365," padded_act_embed_BT1M = jnp.pad(\n act_embed_BTm11M, ((0, 0), (1, 0), (0, 0), (0, 0))\n )\n padded_act_embed_BTNM = jnp.broadcast_to(\n padded_act_embed_BT1M, vid_embed_BTNM.shape\n )\n vid_embed_BTNM += padded_act_embed_BTNM\n logits_BTNV = self.transformer(vid_embed_BTNM)\n return logits_BTNV, mask",python,selection_mouse +46,101179,"jasmine/models/dynamics.py",2965,430," act_embed_BTm11M = self.action_up(latent_actions_BTm11L)\n padded_act_embed_BT1M = jnp.pad(\n act_embed_BTm11M, ((0, 0), (1, 0), (0, 0), (0, 0))\n )\n padded_act_embed_BTNM = jnp.broadcast_to(\n padded_act_embed_BT1M, vid_embed_BTNM.shape\n )\n vid_embed_BTNM += padded_act_embed_BTNM\n logits_BTNV = self.transformer(vid_embed_BTNM)\n return logits_BTNV, mask",python,selection_mouse +47,101390,"jasmine/models/dynamics.py",2928,467," # --- Predict transition ---\n act_embed_BTm11M = self.action_up(latent_actions_BTm11L)\n padded_act_embed_BT1M = jnp.pad(\n act_embed_BTm11M, ((0, 0), (1, 0), (0, 0), (0, 0))\n )\n padded_act_embed_BTNM = jnp.broadcast_to(\n padded_act_embed_BT1M, vid_embed_BTNM.shape\n )\n vid_embed_BTNM += padded_act_embed_BTNM\n logits_BTNV = self.transformer(vid_embed_BTNM)\n return logits_BTNV, mask",python,selection_mouse +48,102089,"jasmine/models/dynamics.py",2928,467,"",python,content +49,102516,"jasmine/models/dynamics.py",2928,0," # --- Predict transition ---\n act_embed_BTm11M = self.action_up(latent_actions_BTm11L)\n padded_act_embed_BT1M = jnp.pad(\n act_embed_BTm11M, ((0, 0), (1, 0), (0, 0), (0, 0))\n )\n vid_embed_BTNp2M = jnp.concatenate(\n [padded_act_embed_BT1M, noise_level_embed_BT1M, vid_embed_BTNM], axis=2\n )\n logits_BTNp2V = self.transformer(vid_embed_BTNp2M)\n logits_BTNV = logits_BTNp2V[:, :, 2:]\n return logits_BTNV, mask",python,content +50,103712,"jasmine/models/dynamics.py",3166,0,"",python,selection_mouse +51,103922,"jasmine/models/dynamics.py",3165,0,"",python,selection_command +52,104562,"jasmine/models/dynamics.py",3209,0,"",python,selection_command +53,104777,"jasmine/models/dynamics.py",3210,0,"",python,selection_command +54,105285,"jasmine/models/dynamics.py",3211,0,"",python,selection_command +55,105297,"jasmine/models/dynamics.py",3212,0,"",python,selection_command +56,105355,"jasmine/models/dynamics.py",3213,0,"",python,selection_command +57,105357,"jasmine/models/dynamics.py",3214,0,"",python,selection_command +58,105415,"jasmine/models/dynamics.py",3215,0,"",python,selection_command +59,105417,"jasmine/models/dynamics.py",3216,0,"",python,selection_command +60,105482,"jasmine/models/dynamics.py",3217,0,"",python,selection_command +61,105483,"jasmine/models/dynamics.py",3218,0,"",python,selection_command +62,105539,"jasmine/models/dynamics.py",3219,0,"",python,selection_command +63,105539,"jasmine/models/dynamics.py",3220,0,"",python,selection_command +64,105600,"jasmine/models/dynamics.py",3221,0,"",python,selection_command +65,105736,"jasmine/models/dynamics.py",3222,0,"",python,selection_command +66,105926,"jasmine/models/dynamics.py",3223,0,"",python,selection_command +67,106833,"jasmine/models/dynamics.py",3224,24,"",python,content +68,107690,"jasmine/models/dynamics.py",3224,0,"",python,selection_command +69,108180,"jasmine/models/dynamics.py",3225,0,"",python,selection_command +70,108235,"jasmine/models/dynamics.py",3226,0,"",python,selection_command +71,108236,"jasmine/models/dynamics.py",3227,0,"",python,selection_command +72,108295,"jasmine/models/dynamics.py",3228,0,"",python,selection_command +73,108299,"jasmine/models/dynamics.py",3229,0,"",python,selection_command +74,108356,"jasmine/models/dynamics.py",3230,0,"",python,selection_command +75,108359,"jasmine/models/dynamics.py",3231,0,"",python,selection_command +76,108421,"jasmine/models/dynamics.py",3232,0,"",python,selection_command +77,108422,"jasmine/models/dynamics.py",3233,0,"",python,selection_command +78,108481,"jasmine/models/dynamics.py",3234,0,"",python,selection_command +79,108482,"jasmine/models/dynamics.py",3235,0,"",python,selection_command +80,108541,"jasmine/models/dynamics.py",3236,0,"",python,selection_command +81,108542,"jasmine/models/dynamics.py",3237,0,"",python,selection_command +82,108566,"jasmine/models/dynamics.py",3238,0,"",python,selection_command +83,108628,"jasmine/models/dynamics.py",3239,0,"",python,selection_command +84,108629,"jasmine/models/dynamics.py",3240,0,"",python,selection_command +85,108652,"jasmine/models/dynamics.py",3241,0,"",python,selection_command +86,108715,"jasmine/models/dynamics.py",3242,0,"",python,selection_command +87,108718,"jasmine/models/dynamics.py",3243,0,"",python,selection_command +88,108760,"jasmine/models/dynamics.py",3244,0,"",python,selection_command +89,108780,"jasmine/models/dynamics.py",3245,0,"",python,selection_command +90,109038,"jasmine/models/dynamics.py",3247,0,"",python,selection_command +91,109855,"jasmine/models/dynamics.py",3246,0,"",python,selection_command +92,110004,"jasmine/models/dynamics.py",3186,0,"",python,selection_command +93,110233,"jasmine/models/dynamics.py",3142,0,"",python,selection_command +94,110427,"jasmine/models/dynamics.py",3129,0,"",python,selection_command +95,110664,"jasmine/models/dynamics.py",3142,0,"",python,selection_command +96,110824,"jasmine/models/dynamics.py",3186,0,"",python,selection_command +97,111370,"jasmine/models/dynamics.py",3185,0,"",python,selection_command +98,111862,"jasmine/models/dynamics.py",3184,0,"",python,selection_command +99,111920,"jasmine/models/dynamics.py",3183,0,"",python,selection_command +100,111921,"jasmine/models/dynamics.py",3182,0,"",python,selection_command +101,111981,"jasmine/models/dynamics.py",3181,0,"",python,selection_command +102,111982,"jasmine/models/dynamics.py",3180,0,"",python,selection_command +103,112037,"jasmine/models/dynamics.py",3179,0,"",python,selection_command +104,112040,"jasmine/models/dynamics.py",3178,0,"",python,selection_command +105,112099,"jasmine/models/dynamics.py",3177,0,"",python,selection_command +106,112100,"jasmine/models/dynamics.py",3176,0,"",python,selection_command +107,112127,"jasmine/models/dynamics.py",3175,0,"",python,selection_command +108,112444,"jasmine/models/dynamics.py",3174,0,"",python,selection_command +109,112605,"jasmine/models/dynamics.py",3173,0,"",python,selection_command +110,112753,"jasmine/models/dynamics.py",3172,0,"",python,selection_command +111,112916,"jasmine/models/dynamics.py",3171,0,"",python,selection_command +112,113108,"jasmine/models/dynamics.py",3170,0,"",python,selection_command +113,113240,"jasmine/models/dynamics.py",3169,0,"",python,selection_command +114,113405,"jasmine/models/dynamics.py",3168,0,"",python,selection_command +115,113742,"jasmine/models/dynamics.py",3167,0,"",python,selection_command +116,116389,"jasmine/models/dynamics.py",3166,1,"",python,content +117,116497,"jasmine/models/dynamics.py",3166,0,"1",python,content +118,116498,"jasmine/models/dynamics.py",3167,0,"",python,selection_keyboard +119,118170,"jasmine/models/dynamics.py",3166,0,"",python,selection_command +120,119078,"jasmine/models/dynamics.py",3210,0,"",python,selection_command +121,120226,"jasmine/models/dynamics.py",3314,0,"1",python,content +122,120227,"jasmine/models/dynamics.py",3313,1,"",python,content +123,122054,"jasmine/models/dynamics.py",3360,0,"1",python,content +124,122054,"jasmine/models/dynamics.py",3359,1,"",python,content +125,122054,"jasmine/models/dynamics.py",3351,0,"1",python,content +126,122054,"jasmine/models/dynamics.py",3350,1,"",python,content +127,123018,"jasmine/models/dynamics.py",3256,0,"",python,selection_command +128,125445,"jasmine/models/dynamics.py",3278,0,"1",python,content +129,125446,"jasmine/models/dynamics.py",3277,1,"",python,content +130,131260,"jasmine/models/dynamics.py",3362,0,"",python,selection_mouse +131,131264,"jasmine/models/dynamics.py",3361,0,"",python,selection_command +132,132211,"jasmine/models/dynamics.py",3395,0,"",python,selection_mouse +133,132212,"jasmine/models/dynamics.py",3394,0,"",python,selection_command +134,140307,"TERMINAL",0,0,"salloc --time=01:00:00 --partition=dev_accelerated --nodes=1 --gres=gpu:1 --cpus-per-task=8",,terminal_command +135,140362,"TERMINAL",0,0,"]633;Csalloc: Granted job allocation 3535390\r\n",,terminal_output +136,140464,"TERMINAL",0,0,"salloc: Waiting for resource configuration\r\n",,terminal_output +137,167565,"TERMINAL",0,0,"salloc: Nodes hkn0401 are ready for job\r\n",,terminal_output +138,168369,"TERMINAL",0,0,"]0;tum_cte0515@hkn0401:~/Projects/jasmine[?2004h[tum_cte0515@hkn0401 jasmine]$ ",,terminal_output +139,168882,"TERMINAL",0,0,"s",,terminal_output +140,169035,"TERMINAL",0,0,"u",,terminal_output +141,169146,"TERMINAL",0,0,"r",,terminal_output +142,169428,"TERMINAL",0,0,"",,terminal_output +143,169722,"TERMINAL",0,0,"o",,terminal_output +144,170076,"TERMINAL",0,0,"",,terminal_output +145,170145,"TERMINAL",0,0,"",,terminal_output +146,170332,"TERMINAL",0,0,"o",,terminal_output +147,170399,"TERMINAL",0,0,"u",,terminal_output +148,170463,"TERMINAL",0,0,"r",,terminal_output +149,170721,"TERMINAL",0,0,"c",,terminal_output +150,170830,"TERMINAL",0,0,"e",,terminal_output +151,170893,"TERMINAL",0,0," ",,terminal_output +152,171044,"TERMINAL",0,0,".",,terminal_output +153,171108,"TERMINAL",0,0,"v",,terminal_output +154,171275,"TERMINAL",0,0,"env/",,terminal_output +155,171486,"TERMINAL",0,0,"b",,terminal_output +156,171675,"TERMINAL",0,0,"in/",,terminal_output +157,171865,"TERMINAL",0,0,"ac",,terminal_output +158,172109,"TERMINAL",0,0,"tivate",,terminal_output +159,172236,"TERMINAL",0,0,"\r\n[?2004l\r]0;tum_cte0515@hkn0401:~/Projects/jasmine[?2004h(jasmine) [tum_cte0515@hkn0401 jasmine]$ ",,terminal_output +160,191850,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=4\n#SBATCH --time=00:20:00\n#SBATCH --partition=dev_accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:1\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/lam/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/lam/%x_%j.log\n#SBATCH --job-name=train_lam_1e-4\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\n# array_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_test\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_split/train\n# array_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_chunked\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_split/val\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/dyn/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\nlam_checkpoint=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/lam/interactive/3498707\ntokenizer_checkpoint=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/interactive/3498707\n\nenv | grep SLURM\n\nexport PYTHONUNBUFFERED=1\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n --image_height=64 \\n --image_width=64 \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=110 \\n --init_lr=0 \\n --max_lr=1e-4 \\n --log_image_interval=50 \\n --log_checkpoint_interval=2 \\n --dyna_type=maskgit \\n --log \\n --name=coinrun-dyn-dev-$slurm_job_id \\n --tags dyn coinrun dev \\n --entity instant-uv \\n --project jafar \\n --warmup_steps 0 \\n --wsd_decay_steps 0 \\n --num_steps 10 \\n --data_dir $array_records_dir_train \\n --tokenizer_checkpoint $tokenizer_checkpoint \\n --val_data_dir $array_records_dir_val \\n --val_interval 2 \\n --val_steps 5\n",shellscript,tab +161,197334,"TERMINAL",0,0,"s",,terminal_output +162,197400,"TERMINAL",0,0,"h",,terminal_output +163,197490,"TERMINAL",0,0," ",,terminal_output +164,197757,"TERMINAL",0,0,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",,terminal_output +165,198993,"TERMINAL",0,0,"\rslurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh\r\n[?2004l\r#!/usr/bin/env bash\r\n\r\n#SBATCH --nodes=1\r\n#SBATCH --ntasks-per-node=4\r\n#SBATCH --time=00:20:00\r\n#SBATCH --partition=dev_accelerated\r\n#SBATCH --cpus-per-task=5\r\n#SBATCH --gres=gpu:1\r\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/lam/%x_%j.log\r\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/lam/%x_%j.log\r\n#SBATCH --job-name=train_lam_1e-4\r\n#SBATCH --requeue\r\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\r\n\r\n# Log the sbatch script\r\ncat $0\r\n\r\nmodule unload mpi/openmpi/5.0\r\nmodule unload devel/cuda/12.4\r\nsource .venv/bin/activate\r\n\r\n# array_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_test\r\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_split/train\r\n# array_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_chunked\r\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_split/val\r\n\r\njob_name=$SLURM_JOB_NAME\r\nslurm_job_id=$SLURM_JOB_ID\r\n\r\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/dyn/$job_name/$slurm_job_id\r\nmkdir -p $CHECKPOINT_DIR\r\n\r\nlam_checkpoint=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/lam/interactive/3498707\r\ntokenizer_checkpoint=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/interactive/3498707\r\n\r\nenv | grep SLURM\r\n\r\nexport PYTHONUNBUFFERED=1\r\n\r\nsrun python jasmine/train_dynamics.py \\r\n --save_ckpt \\r\n --image_height=64 \\r\n --image_width=64 \\r\n --ckpt_dir $CHECKPOINT_DIR \\r\n --batch_size=110 \\r\n --init_lr=0 \\r\n --max_lr=1e-4 \\r\n --log_image_interval=50 \\r\n --log_checkpoint_interval=2 \\r\n --dyna_type=maskgit \\r\n --log \\r\n --name=coinrun-dyn-dev-$slurm_job_id \\r\n --tags dyn coinrun dev \\r\n --entity instant-uv \\r\n --project jafar \\r\n --warmup_steps 0 \\r\n --wsd_decay_steps 0 \\r\n --num_steps 10 \\r\n --data_dir $array_records_dir_train \\r\n --tokenizer_checkpoint $tokenizer_checkpoint \\r\n --val_data_dir $array_records_dir_val \\r\n --val_interval 2 \\r\n --val_steps 5\r\n",,terminal_output +166,199192,"TERMINAL",0,0,"SLURM_STEP_NUM_TASKS=1\r\nSLURM_JOB_USER=tum_cte0515\r\nSLURM_TASKS_PER_NODE=1\r\nSLURM_JOB_UID=999226\r\nSLURM_TASK_PID=1175644\r\nSLURM_JOB_GPUS=0\r\nSLURM_LOCALID=0\r\nSLURM_SUBMIT_DIR=/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine\r\nSLURMD_NODENAME=hkn0401\r\nSLURM_JOB_START_TIME=1759325695\r\nSLURM_STEP_NODELIST=hkn0401\r\nSLURM_CLUSTER_NAME=hk\r\nSLURM_JOB_END_TIME=1759329295\r\nSLURM_PMI2_SRUN_PORT=40605\r\nSLURM_CPUS_ON_NODE=8\r\nSLURM_JOB_CPUS_PER_NODE=8\r\nSLURM_GPUS_ON_NODE=1\r\nSLURM_GTIDS=0\r\nSLURM_JOB_PARTITION=dev_accelerated\r\nSLURM_TRES_PER_TASK=cpu=8\r\nSLURM_OOM_KILL_STEP=0\r\nSLURM_JOB_NUM_NODES=1\r\nSLURM_STEPID=4294967290\r\nSLURM_JOBID=3535390\r\nSLURM_PTY_PORT=35743\r\nSLURM_JOB_QOS=normal\r\nSLURM_LAUNCH_NODE_IPADDR=10.0.7.198\r\nSLURM_PTY_WIN_ROW=32\r\nSLURM_PMI2_PROC_MAPPING=(vector,(0,1,1))\r\nSLURMD_DEBUG=2\r\nSLURM_PROCID=0\r\nSLURM_CPUS_PER_TASK=8\r\nSLURM_TOPOLOGY_ADDR=hkibb.hkibbi1.hkibbi1e11.hkn0401\r\nSLURM_TOPOLOGY_ADDR_PATTERN=switch.switch.switch.node\r\nSLURM_SRUN_COMM_HOST=10.0.7.198\r\nSLURM_SCRIPT_CONTEXT=prolog_task\r\nSLURM_PTY_WIN_COL=179\r\nSLURM_NODELIST=hkn0401\r\nSLURM_SRUN_COMM_PORT=33917\r\nSLURM_STEP_ID=4294967290\r\nSLURM_JOB_ACCOUNT=hk-project-p0023960\r\nSLURM_PRIO_PROCESS=0\r\nSLURM_NNODES=1\r\nSLURM_SUBMIT_HOST=hkn1990.localdomain\r\nSLURM_JOB_ID=3535390\r\nSLURM_NODEID=0\r\nSLURM_STEP_NUM_NODES=1\r\nSLURM_STEP_TASKS_PER_NODE=1\r\nSLURM_MPI_TYPE=pmi2\r\nSLURM_PMI2_STEP_NODES=hkn0401\r\nSLURM_CONF=/etc/slurm/slurm.conf\r\nSLURM_JOB_NAME=interactive\r\nSLURM_STEP_LAUNCHER_PORT=33917\r\nSLURM_JOB_GID=502226\r\nSLURM_JOB_NODELIST=hkn0401\r\n",,terminal_output +167,199301,"TERMINAL",0,0,"GpuFreq=control_disabled\r\n",,terminal_output +168,204786,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",0,0,"",shellscript,tab +169,208596,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",2040,0,"",shellscript,selection_mouse +170,208620,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",2039,0,"",shellscript,selection_command +171,212389,"TERMINAL",0,0,"Running on 1 devices.\r\n",,terminal_output +172,217718,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",0,0,"",shellscript,tab +173,218496,"TERMINAL",0,0,"Counting all components: ['dynamics', 'lam', 'tokenizer']\r\n",,terminal_output +174,218861,"TERMINAL",0,0,"wandb: Currently logged in as: mihir-mahajan2002 (instant-uv) to https://api.wandb.ai. Use `wandb login --relogin` to force relogin\r\n",,terminal_output +175,220112,"TERMINAL",0,0,"wandb: Tracking run with wandb version 0.22.0\r\nwandb: Run data is saved locally in /hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/wandb/run-20251001_153614-qxx0fwad\r\nwandb: Run `wandb offline` to turn off syncing.\r\nwandb: Syncing run coinrun-dyn-dev-3535390\r\nwandb: ⭐️ View project at https://wandb.ai/instant-uv/jafar\r\nwandb: 🚀 View run at https://wandb.ai/instant-uv/jafar/runs/qxx0fwad\r\n",,terminal_output +176,220626,"TERMINAL",0,0,"Parameter counts:\r\n{'dynamics': 26555904, 'lam': 17640416, 'tokenizer': 34489696, 'total': 78686016}\r\n",,terminal_output +177,226740,"TERMINAL",0,0,"Traceback (most recent call last):\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/train_dynamics.py"", line 821, in \r\n main(args)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/train_dynamics.py"", line 473, in main\r\n restore_or_initialize_components(\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/train_dynamics.py"", line 325, in restore_or_initialize_components\r\n optimizer = restore_genie_components(optimizer, replicated_sharding, _rng, args)\r\n ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/genie.py"", line 614, in restore_genie_components\r\n restored_tokenizer = tokenizer_checkpoint_manager.restore(\r\n ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib64/python3.12/site-packages/orbax/checkpoint/checkpoint_manager.py"", line 1673, in restore\r\n restored = self._checkpointer.restore(restore_directory, args=args)\r\n ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib64/python3.12/site-packages/orbax/checkpoint/_src/checkpointers/async_checkpointer.py"", line 571, in restore\r\n return super().restore(directory, *args, **kwargs)\r\n ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib64/python3.12/site-packages/orbax/checkpoint/_src/checkpointers/checkpointer.py"", line 306, in restore\r\n restored = self._restore(directory, args=ckpt_args)\r\n ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib64/python3.12/site-packages/orbax/checkpoint/_src/checkpointers/checkpointer.py"", line 328, in _restore\r\n return self._handler.restore(directory, args=args)\r\n ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib64/python3.12/site-packages/orbax/checkpoint/_src/handlers/composite_checkpoint_handler.py"", line 857, in restore\r\n restored[item_name] = handler.restore(\r\n ^^^^^^^^^^^^^^^^\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib64/python3.12/site-packages/orbax/checkpoint/_src/handlers/pytree_checkpoint_handler.py"", line 835, in restore\r\n return self._handler_impl.restore(directory, args=args)\r\n ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib64/python3.12/site-packages/orbax/checkpoint/_src/handlers/base_pytree_checkpoint_handler.py"", line 949, in restore\r\n raise ValueError(\r\nValueError: User-provided restore item and on-disk value metadata tree structures do not match:\r\nmodel.vq.drop.rngs.count:\r\n - Source: {'value': ShapeDtypeStruct(shape=(), dtype=uint32, sharding=NamedSharding(mesh=Mesh('data': 1, axis_types=(Auto,)), spec=PartitionSpec(), memory_kind=device))}\r\n - Target: MISSING\r\n\r\nmodel.vq.drop.rngs.default:\r\n - Source: MISSING\r\n - Target: {'count': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=())}, 'key': {'value': ValueMetadataEntry(value_type='jax.Array', skip_deserialize=False, write_shape=(2,))}}\r\n\r\nmodel.vq.drop.rngs.key:\r\n - Source: {'value': ShapeDtypeStruct(shape=(), dtype=key, sharding=NamedSharding(mesh=Mesh('data': 1, axis_types=(Auto,)), spec=PartitionSpec(), memory_kind=device))}\r\n - Target: MISSING\r\n",,terminal_output +178,227923,"TERMINAL",0,0,"wandb: \r\nwandb: 🚀 View run coinrun-dyn-dev-3535390 at: https://wandb.ai/instant-uv/jafar/runs/qxx0fwad\r\nwandb: Find logs at: ../../../../../hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/wandb/run-20251001_153614-qxx0fwad/logs\r\n",,terminal_output +179,228057,"TERMINAL",0,0,"W1001 15:36:23.261615 1175916 pjrt_client.cc:1469] WatchJobStateAsync failed for task goo.gle/debugstr job_name: ""jax_worker"": CANCELLED: CANCELLED\r\nAdditional GRPC error information from remote target coordination_service while calling /tensorflow.CoordinationService/WatchJobState:\r\n:UNKNOWN:Error received from peer {grpc_status:1, grpc_message:""CANCELLED""} [type.googleapis.com/tensorflow.DerivedStatus='']\r\n",,terminal_output +180,229007,"TERMINAL",0,0,"srun: error: hkn0401: task 0: Exited with exit code 1\r\n]0;tum_cte0515@hkn0401:~/Projects/jasmine[?2004h(jasmine) [tum_cte0515@hkn0401 jasmine]$ ",,terminal_output +181,235282,"TERMINAL",0,0,"bash",,terminal_focus +182,236705,"TERMINAL",0,0,"srun",,terminal_focus +183,247839,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=1\n#SBATCH --time=48:00:00\n#SBATCH --partition=accelerated\n#SBATCH --cpus-per-task=5\n#SBATCH --gres=gpu:1\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/dynamics/maskgit/%x_%j.log\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/dynamics/maskgit/%x_%j.log\n#SBATCH --job-name=train_dynamics_maskgit\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nmodule unload mpi/openmpi/5.0\nmodule unload devel/cuda/12.4\nsource .venv/bin/activate\n\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/train\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_gt_actions_split/val\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/coinrun/maskgit/$job_name/$slurm_job_id\nmkdir -p $CHECKPOINT_DIR\n\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_tokenizer_default/3528955\n\nenv | grep SLURM\n\nsrun python jasmine/train_dynamics.py \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --batch_size=110 \\n --image_height=64 \\n --image_width=64 \\n --patch_size=16 \\n --max_lr=3e-5 \\n --log_image_interval=1000 \\n --log \\n --log_checkpoint_interval=1000 \\n --name=coinrun-dynamics-maskgit-default-$slurm_job_id \\n --tags coinrun dynamics maskgit default \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=$tokenizer_ckpt_dir \\n --eval_full_frame \\n --data_dir $array_records_dir_train \\n --val_data_dir $array_records_dir_val &\n\nchild_pid=$!\n\nwait $child_pid",shellscript,tab +184,249689,"slurm/jobs/mihir/horeka/coinrun/default_runs/train_dyn_default.sh",1748,0,"",shellscript,selection_mouse +185,256654,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",0,0,"",shellscript,tab +186,259049,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1437,0,"",shellscript,selection_mouse +187,259833,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1550,0,"\ntokenizer_ckpt_dir=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_tokenizer_default/3528955",shellscript,content +188,259851,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1551,0,"",shellscript,selection_command +189,260249,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1423,0,"",shellscript,selection_command +190,261507,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1551,0,"",shellscript,selection_command +191,261743,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1552,0,"",shellscript,selection_command +192,262274,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1553,0,"",shellscript,selection_command +193,262275,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1554,0,"",shellscript,selection_command +194,262334,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1555,0,"",shellscript,selection_command +195,262335,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1556,0,"",shellscript,selection_command +196,262394,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1557,0,"",shellscript,selection_command +197,262395,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1558,0,"",shellscript,selection_command +198,262454,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1559,0,"",shellscript,selection_command +199,262454,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1560,0,"",shellscript,selection_command +200,262514,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1561,0,"",shellscript,selection_command +201,263015,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1561,8,"",shellscript,content +202,265039,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1561,0,"c",shellscript,content +203,265041,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1562,0,"",shellscript,selection_keyboard +204,265150,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1562,0,"h",shellscript,content +205,265151,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1563,0,"",shellscript,selection_keyboard +206,265210,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1563,0,"e",shellscript,content +207,265211,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1564,0,"",shellscript,selection_keyboard +208,265350,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1564,0,"c",shellscript,content +209,265351,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1565,0,"",shellscript,selection_keyboard +210,265450,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1565,0,"k",shellscript,content +211,265451,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1566,0,"",shellscript,selection_keyboard +212,265666,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1566,0,"o",shellscript,content +213,265667,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1567,0,"",shellscript,selection_keyboard +214,266508,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1566,1,"",shellscript,content +215,266709,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1566,0,"p",shellscript,content +216,266710,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1567,0,"",shellscript,selection_keyboard +217,267061,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1567,0,"o",shellscript,content +218,267062,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1568,0,"",shellscript,selection_keyboard +219,267324,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1568,0,"i",shellscript,content +220,267325,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1569,0,"",shellscript,selection_keyboard +221,267649,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1569,0,"n",shellscript,content +222,267652,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1570,0,"",shellscript,selection_keyboard +223,267927,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1570,0,"t",shellscript,content +224,267928,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1571,0,"",shellscript,selection_keyboard +225,269368,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1571,0,"=",shellscript,content +226,269370,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1572,0,"",shellscript,selection_keyboard +227,270328,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1571,1,"",shellscript,content +228,270590,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1570,0,"",shellscript,selection_command +229,270850,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1442,0,"",shellscript,selection_command +230,271461,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",1307,244,"",shellscript,content +231,273389,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",271,0,"",shellscript,selection_command +232,275404,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",271,3,"",shellscript,content +233,276882,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",271,0,"dynamics",shellscript,content +234,277894,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",389,0,"ics",shellscript,content +235,277895,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",387,0,"dyn",shellscript,content +236,277895,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",386,1,"",shellscript,content +237,279582,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",438,0,"_gpu",shellscript,content +238,279582,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",436,2,"",shellscript,content +239,279582,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",435,0,"singl",shellscript,content +240,279582,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",434,1,"",shellscript,content +241,279582,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",433,0,"dyn",shellscript,content +242,279585,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",430,3,"",shellscript,content +243,281487,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",278,0,"",shellscript,selection_command +244,291681,"TERMINAL",0,0,"s",,terminal_output +245,291750,"TERMINAL",0,0,"h",,terminal_output +246,291889,"TERMINAL",0,0," ",,terminal_output +247,292133,"TERMINAL",0,0,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",,terminal_output +248,292355,"TERMINAL",0,0,"\rslurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh\r\n[?2004l\r#!/usr/bin/env bash\r\n\r\n#SBATCH --nodes=1\r\n#SBATCH --ntasks-per-node=4\r\n#SBATCH --time=00:20:00\r\n#SBATCH --partition=dev_accelerated\r\n#SBATCH --cpus-per-task=5\r\n#SBATCH --gres=gpu:1\r\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/dynamics/%x_%j.log\r\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/dynamics/%x_%j.log\r\n#SBATCH --job-name=train_dyn_single_gpu\r\n#SBATCH --requeue\r\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\r\n\r\n# Log the sbatch script\r\ncat $0\r\n\r\nmodule unload mpi/openmpi/5.0\r\nmodule unload devel/cuda/12.4\r\nsource .venv/bin/activate\r\n\r\n# array_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_test\r\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_split/train\r\n# array_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_chunked\r\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_split/val\r\n\r\njob_name=$SLURM_JOB_NAME\r\nslurm_job_id=$SLURM_JOB_ID\r\n\r\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/dyn/$job_name/$slurm_job_id\r\nmkdir -p $CHECKPOINT_DIR\r\n\r\ntokenizer_checkpoint=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_tokenizer_default/3528955\r\n\r\nenv | grep SLURM\r\n\r\nexport PYTHONUNBUFFERED=1\r\n\r\nsrun python jasmine/train_dynamics.py \\r\n --save_ckpt \\r\n --image_height=64 \\r\n --image_width=64 \\r\n --ckpt_dir $CHECKPOINT_DIR \\r\n --batch_size=110 \\r\n --init_lr=0 \\r\n --max_lr=1e-4 \\r\n --log_image_interval=50 \\r\n --log_checkpoint_interval=2 \\r\n --dyna_type=maskgit \\r\n --log \\r\n --name=coinrun-dyn-dev-$slurm_job_id \\r\n --tags dyn coinrun dev \\r\n --entity instant-uv \\r\n --project jafar \\r\n --warmup_steps 0 \\r\n --wsd_decay_steps 0 \\r\n --num_steps 10 \\r\n --data_dir $array_records_dir_train \\r\n --tokenizer_checkpoint $tokenizer_checkpoint \\r\n --val_data_dir $array_records_dir_val \\r\n --val_interval 2 \\r\n --val_steps 5\r\n",,terminal_output +249,292468,"TERMINAL",0,0,"SLURM_STEP_NUM_TASKS=1\r\nSLURM_JOB_USER=tum_cte0515\r\nSLURM_TASKS_PER_NODE=1\r\nSLURM_JOB_UID=999226\r\nSLURM_TASK_PID=1175644\r\nSLURM_JOB_GPUS=0\r\nSLURM_LOCALID=0\r\nSLURM_SUBMIT_DIR=/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine\r\nSLURMD_NODENAME=hkn0401\r\nSLURM_JOB_START_TIME=1759325695\r\nSLURM_STEP_NODELIST=hkn0401\r\nSLURM_CLUSTER_NAME=hk\r\nSLURM_JOB_END_TIME=1759329295\r\nSLURM_PMI2_SRUN_PORT=40605\r\nSLURM_CPUS_ON_NODE=8\r\nSLURM_JOB_CPUS_PER_NODE=8\r\nSLURM_GPUS_ON_NODE=1\r\nSLURM_GTIDS=0\r\nSLURM_JOB_PARTITION=dev_accelerated\r\nSLURM_TRES_PER_TASK=cpu=8\r\nSLURM_OOM_KILL_STEP=0\r\nSLURM_JOB_NUM_NODES=1\r\nSLURM_STEPID=4294967290\r\nSLURM_JOBID=3535390\r\nSLURM_PTY_PORT=35743\r\nSLURM_JOB_QOS=normal\r\nSLURM_LAUNCH_NODE_IPADDR=10.0.7.198\r\nSLURM_PTY_WIN_ROW=32\r\nSLURM_PMI2_PROC_MAPPING=(vector,(0,1,1))\r\nSLURMD_DEBUG=2\r\nSLURM_PROCID=0\r\nSLURM_CPUS_PER_TASK=8\r\nSLURM_TOPOLOGY_ADDR=hkibb.hkibbi1.hkibbi1e11.hkn0401\r\nSLURM_TOPOLOGY_ADDR_PATTERN=switch.switch.switch.node\r\nSLURM_SRUN_COMM_HOST=10.0.7.198\r\nSLURM_SCRIPT_CONTEXT=prolog_task\r\nSLURM_PTY_WIN_COL=179\r\nSLURM_NODELIST=hkn0401\r\nSLURM_SRUN_COMM_PORT=33917\r\nSLURM_STEP_ID=4294967290\r\nSLURM_JOB_ACCOUNT=hk-project-p0023960\r\nSLURM_PRIO_PROCESS=0\r\nSLURM_NNODES=1\r\nSLURM_SUBMIT_HOST=hkn1990.localdomain\r\nSLURM_JOB_ID=3535390\r\nSLURM_NODEID=0\r\nSLURM_STEP_NUM_NODES=1\r\nSLURM_STEP_TASKS_PER_NODE=1\r\nSLURM_MPI_TYPE=pmi2\r\nSLURM_PMI2_STEP_NODES=hkn0401\r\nSLURM_CONF=/etc/slurm/slurm.conf\r\nSLURM_JOB_NAME=interactive\r\nSLURM_STEP_LAUNCHER_PORT=33917\r\nSLURM_JOB_GID=502226\r\nSLURM_JOB_NODELIST=hkn0401\r\n",,terminal_output +250,292577,"TERMINAL",0,0,"GpuFreq=control_disabled\r\n",,terminal_output +251,295245,"TERMINAL",0,0,"Running on 1 devices.\r\n",,terminal_output +252,301184,"TERMINAL",0,0,"Counting all components: ['dynamics', 'lam', 'tokenizer']\r\n",,terminal_output +253,301530,"TERMINAL",0,0,"wandb: Currently logged in as: mihir-mahajan2002 (instant-uv) to https://api.wandb.ai. Use `wandb login --relogin` to force relogin\r\n",,terminal_output +254,302288,"TERMINAL",0,0,"wandb: Tracking run with wandb version 0.22.0\r\nwandb: Run data is saved locally in /hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/wandb/run-20251001_153736-bmlob0lg\r\nwandb: Run `wandb offline` to turn off syncing.\r\nwandb: Syncing run coinrun-dyn-dev-3535390\r\nwandb: ⭐️ View project at https://wandb.ai/instant-uv/jafar\r\nwandb: 🚀 View run at https://wandb.ai/instant-uv/jafar/runs/bmlob0lg\r\n",,terminal_output +255,302471,"TERMINAL",0,0,"Parameter counts:\r\n{'dynamics': 26555904, 'lam': 17640416, 'tokenizer': 34489696, 'total': 78686016}\r\n",,terminal_output +256,306720,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib64/python3.12/site-packages/orbax/checkpoint/_src/serialization/type_handlers.py:1269: UserWarning: Sharding info not provided when restoring. Populating sharding info from sharding file. Please note restoration time will be slightly increased due to reading from file. Note also that this option is unsafe when restoring on a different topology than the checkpoint was saved with.\r\n warnings.warn(\r\n",,terminal_output +257,378341,"TERMINAL",0,0,"Total memory size: 3.2 GB, Output size: 0.9 GB, Temp size: 2.2 GB, Argument size: 0.9 GB, Host temp size: 0.0 GB.\r\nFLOPs: 1.057e+11, Bytes: 7.723e+10 (71.9 GB), Intensity: 1.4 FLOPs/byte\r\nStarting training from step 0...\r\n",,terminal_output +258,378763,"TERMINAL",0,0,"\r\nMemstats: After params initialized:\r\n\tUsing (GB) 1.13 / 38.7 (2.919897%) on cuda:0\r\n",,terminal_output +259,415784,"TERMINAL",0,0,"Calculating validation metrics...\r\n",,terminal_output +260,422251,"TERMINAL",0,0,"bash",,terminal_focus +261,442949,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_16_noise-branch.sh",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=1\n#SBATCH --time=24:00:00\n#SBATCH --cpus-per-task=8\n#SBATCH --gres=gpu:1\n#SBATCH --output=/fast/project/HFMI_SynergyUnit/jafar_ws/logs/franz/coinrun/dynamics/%x_%j.log\n#SBATCH --error=/fast/project/HFMI_SynergyUnit/jafar_ws/logs/franz/coinrun/dynamics/%x_%j.log\n#SBATCH --job-name=dynamics_coinrun_mila_submission_patch_size_16_noise_branch\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nsource .venv/bin/activate\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\ntags=""coinrun dynamics 500m_dataset mila_submission debug patch_size_16 noise-branch""\n\narray_records_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/data/coinrun/array_records_500m_seed_w_increment""\ntokenizer_ckpt_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/tokenizer/tokenizer_coinrun_mila_submission_29736/""\nCHECKPOINT_DIR=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/dynamics/${job_name}/${slurm_job_id}""\nmkdir -p $CHECKPOINT_DIR\n\nenv | grep SLURM\n\n# Check if the current branch is the main branch\ncurrent_branch=$(git rev-parse --abbrev-ref HEAD)\nif [ ""$current_branch"" != ""add-noise-to-combat-exposure-bias"" ]; then\n echo ""This script must be run from the noise-exposure branch. Current branch is $current_branch. Exiting.""\n exit 1\nfi\n\nsrun python jasmine/train_dynamics.py \\n --max_noise_level=0 \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --name=""${job_name}_${slurm_job_id}"" \\n --tags ${tags} \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=""${tokenizer_ckpt_dir}"" \\n --val_data_dir=""${array_records_dir}/val"" \\n --data_dir=""${array_records_dir}/train"" &\n\nchild_pid=$!\n\nwait $child_pid\n\n",shellscript,tab +262,448560,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_16_noise-branch.sh",1737,0,"",shellscript,selection_mouse +263,448762,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_16_noise-branch.sh",1735,3,"=$(",shellscript,selection_mouse +264,450114,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_16_noise-branch.sh",1742,0,"",shellscript,selection_mouse +265,450775,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_16_noise-branch.sh",1739,0,"",shellscript,selection_mouse +266,450924,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_16_noise-branch.sh",1738,3,"git",shellscript,selection_mouse +267,451133,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_16_noise-branch.sh",1738,8,"git rev-",shellscript,selection_mouse +268,451150,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_16_noise-branch.sh",1738,13,"git rev-parse",shellscript,selection_mouse +269,451207,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_16_noise-branch.sh",1738,15,"git rev-parse -",shellscript,selection_mouse +270,451208,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_16_noise-branch.sh",1738,16,"git rev-parse --",shellscript,selection_mouse +271,451218,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_16_noise-branch.sh",1738,69,"git rev-parse --abbrev-ref HEAD)\nif [ ""$current_branch"" != ""add-noise",shellscript,selection_mouse +272,451234,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_16_noise-branch.sh",1738,70,"git rev-parse --abbrev-ref HEAD)\nif [ ""$current_branch"" != ""add-noise-",shellscript,selection_mouse +273,451265,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_16_noise-branch.sh",1738,72,"git rev-parse --abbrev-ref HEAD)\nif [ ""$current_branch"" != ""add-noise-to",shellscript,selection_mouse +274,451324,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_16_noise-branch.sh",1738,73,"git rev-parse --abbrev-ref HEAD)\nif [ ""$current_branch"" != ""add-noise-to-",shellscript,selection_mouse +275,451333,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_16_noise-branch.sh",1738,79,"git rev-parse --abbrev-ref HEAD)\nif [ ""$current_branch"" != ""add-noise-to-combat",shellscript,selection_mouse +276,451390,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_16_noise-branch.sh",1738,26,"git rev-parse --abbrev-ref",shellscript,selection_mouse +277,451466,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_16_noise-branch.sh",1738,27,"git rev-parse --abbrev-ref ",shellscript,selection_mouse +278,451531,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_16_noise-branch.sh",1738,31,"git rev-parse --abbrev-ref HEAD",shellscript,selection_mouse +279,455339,"TERMINAL",0,0,"branch",,terminal_command +280,455389,"TERMINAL",0,0,"]633;Cbash: branch: command not found...\r\n",,terminal_output +281,456460,"TERMINAL",0,0,"^C\r\n]0;tum_cte0515@hkn1990:~/Projects/jasmine",,terminal_output +282,461143,"TERMINAL",0,0,"vim ~/.bashrc",,terminal_command +283,461295,"TERMINAL",0,0,"]633;C[?1049h[>4;2m[?1h=[?2004h[?1004h[?12h[?12l[?25l""~/.bashrc"" 51L, 2690B▽ Pzz\[0%m [>c]10;?]11;?alias fsacct_week='sacct --format=""JobID%15,JobName%30,Partition%16,AllocCPUS%3,State%12,Elapsed%10,Timelimit%10"" --starttime $(date -d ""last week"" +%Y-%m-%d) | grep -vE ""*.batch||*.extern|*.inter|bash|python|CANCELLED|echo""'alias logs=""cd /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir""alias sbatch_dir=""sh /home/hk-project-p0023960/tum_cte0515/sbatch_dir.sh $@""\r\n\r\n\r\noverlapper() {\r\n if [ -z ""$1"" ]; thenecho ""Usage: overlap ""36,7476%[?25hP+q436f\P+q6b75\P+q6b64\P+q6b72\P+q6b6c\P+q2332\P+q2334\P+q2569\P+q2a37\P+q6b31\[?12$p[?25l/3333/3333 [?25h[?25l/f6f6/e3e3 [?25h",,terminal_output +284,462095,"TERMINAL",0,0,"[?25lj 7,0-1[?25h",,terminal_output +285,462602,"TERMINAL",0,0,"[?25lk 6,74 [?25h",,terminal_output +286,462934,"TERMINAL",0,0,"[?25lo -- INSERT --37,174%37,174%[?25h",,terminal_output +287,463392,"TERMINAL",0,0,"[?25la2[?25h",,terminal_output +288,463456,"TERMINAL",0,0,"[?25ll3[?25h",,terminal_output +289,463636,"TERMINAL",0,0,"[?25li4[?25h",,terminal_output +290,463701,"TERMINAL",0,0,"[?25la5[?25h",,terminal_output +291,463767,"TERMINAL",0,0,"[?25l\ralias6[?25h",,terminal_output +292,463963,"TERMINAL",0,0,"[?25l7[?25h",,terminal_output +293,464136,"TERMINAL",0,0,"[?25lb8[?25h",,terminal_output +294,464203,"TERMINAL",0,0,"[?25l br9[?25h",,terminal_output +295,464428,"TERMINAL",0,0,"[?25la10[?25h[?25ln1[?25h",,terminal_output +296,464576,"TERMINAL",0,0,"[?25lc2[?25h",,terminal_output +297,464640,"TERMINAL",0,0,"[?25lh3[?25h",,terminal_output +298,465311,"TERMINAL",0,0,"[?25l=4[?25h",,terminal_output +299,465851,"TERMINAL",0,0,"[?25l""\r\n\r\n\r\noverlapper() {\r\n if [ -z  ]; then5[?25h",,terminal_output +300,466505,"TERMINAL",0,0,"[?25lgit rev-parse --abbrev-ref HEAD[?25h[?25l46[?25h",,terminal_output +301,467108,"TERMINAL",0,0,"[?25l""\r\n\r\n\r\noverlapper() {\r\n if [ -z  ]; then7[?25h",,terminal_output +302,468779,"TERMINAL",0,0,"[?25l^[",,terminal_output +303,468895,"TERMINAL",0,0," 37,4674%[?25h",,terminal_output +304,468959,"TERMINAL",0,0,"[?25l::[?25h",,terminal_output +305,469163,"TERMINAL",0,0,"w",,terminal_output +306,469310,"TERMINAL",0,0,"q",,terminal_output +307,469448,"TERMINAL",0,0,"\r[?25l[?2004l[>4;m""~/.bashrc"" 52L, 2737B written",,terminal_output +308,469458,"TERMINAL",0,0,"\r\r\r\n[?1004l[?2004l[?1l>[?25h[>4;m[?1049l]0;tum_cte0515@hkn1990:~/Projects/jasmine",,terminal_output +309,472482,"TERMINAL",0,0,"source ~/.bashrc",,terminal_command +310,473443,"TERMINAL",0,0,"branch",,terminal_command +311,475914,"TERMINAL",0,0,"srun",,terminal_focus +312,476793,"TERMINAL",0,0,"Step 2, validation loss: 12.786288261413574\r\nWARNING:absl:[process=0][thread=MainThread][operation_id=1] _SignalingThread.join() waiting for signals ([, ]) blocking the main thread will slow down blocking save times. This is likely due to main thread calling result() on a CommitFuture.\r\nWARNING:absl:[process=0][thread=MainThread][operation_id=1] _SignalingThread.join() waiting for signals ([, ]) blocking the main thread will slow down blocking save times. This is likely due to main thread calling result() on a CommitFuture.\r\n",,terminal_output +313,477386,"TERMINAL",0,0,"Saved checkpoint at step 2\r\n",,terminal_output +314,502192,"TERMINAL",0,0,"bash",,terminal_focus +315,504443,"TERMINAL",0,0,"git status",,terminal_command +316,504495,"TERMINAL",0,0,"]633;COn branch prepend-action-maskgit\r\n",,terminal_output +317,504524,"TERMINAL",0,0,"Last commands done (2 commands done):\r\n pick ba37453 feat: generate coinrun dataset with val split\r\n pick faadd10 feat: implemented validation loss for all three models\r\nNext commands to do (26 remaining commands):\r\n pick 9a17dbb fix: pass val data path to dataloader\r\n pick 6e69cdb fix typo in image logging\r\n (use ""git rebase --edit-todo"" to view and edit)\r\nYou are currently editing a commit while rebasing branch 'gt-actions' on 'c7522f2'.\r\n (use ""git commit --amend"" to amend the current commit)\r\n (use ""git rebase --continue"" once you are satisfied with your changes)\r\n\r\nChanges not staged for commit:\r\n (use ""git add ..."" to update what will be committed)\r\n (use ""git restore ..."" to discard changes in working directory)\r\n\tmodified: jasmine/models/dynamics.py\r\n\r\nUntracked files:\r\n (use ""git add ..."" to include in what will be committed)\r\n\tdata/_vizdoom.ini\r\n\tdata/data/\r\n\tdata/jasmine_data/vizdoom/\r\n\tdata/uv.lock\r\n\tdiff.diff\r\n\tdiff2.diff\r\n\tinput_pipeline/\r\n\tkiller.sh\r\n\tkiller_partition.sh\r\n\tlog.log\r\n\tmessage.md\r\n\toverfit_dir.zip\r\n\trequirements-franz.txt\r\n\tsamples/\r\n\tscripts_cremers/\r\n\tslurm/\r\n\ttest.py\r\n\tutils/\r\n\tuv.lock\r\n\r\nno changes added to commit (use ""git add"" and/or ""git commit -a"")\r\n]0;tum_cte0515@hkn1990:~/Projects/jasmine",,terminal_output +318,516589,"TERMINAL",0,0,"Calculating validation metrics...\r\n",,terminal_output +319,520725,"TERMINAL",0,0,"Step 4, validation loss: 14.277682304382324\r\n",,terminal_output +320,520798,"TERMINAL",0,0,"WARNING:absl:[process=0][thread=MainThread][operation_id=2] _SignalingThread.join() waiting for signals ([, ]) blocking the main thread will slow down blocking save times. This is likely due to main thread calling result() on a CommitFuture.\r\nWARNING:absl:[process=0][thread=MainThread][operation_id=2] _SignalingThread.join() waiting for signals ([, ]) blocking the main thread will slow down blocking save times. This is likely due to main thread calling result() on a CommitFuture.\r\n",,terminal_output +321,521545,"TERMINAL",0,0,"Saved checkpoint at step 4\r\n",,terminal_output +322,522147,"TERMINAL",0,0,"Calculating validation metrics...\r\n",,terminal_output +323,526238,"TERMINAL",0,0,"Step 6, validation loss: 12.4479341506958\r\nWARNING:absl:[process=0][thread=MainThread][operation_id=3] _SignalingThread.join() waiting for signals ([, ]) blocking the main thread will slow down blocking save times. This is likely due to main thread calling result() on a CommitFuture.\r\nWARNING:absl:[process=0][thread=MainThread][operation_id=3] _SignalingThread.join() waiting for signals ([, ]) blocking the main thread will slow down blocking save times. This is likely due to main thread calling result() on a CommitFuture.\r\n",,terminal_output +324,526832,"TERMINAL",0,0,"Saved checkpoint at step 6\r\n",,terminal_output +325,528026,"TERMINAL",0,0,"Calculating validation metrics...\r\n",,terminal_output +326,532175,"TERMINAL",0,0,"Step 8, validation loss: 12.086461067199707\r\nWARNING:absl:[process=0][thread=MainThread][operation_id=4] _SignalingThread.join() waiting for signals ([, ]) blocking the main thread will slow down blocking save times. This is likely due to main thread calling result() on a CommitFuture.\r\nWARNING:absl:[process=0][thread=MainThread][operation_id=4] _SignalingThread.join() waiting for signals ([, ]) blocking the main thread will slow down blocking save times. This is likely due to main thread calling result() on a CommitFuture.\r\n",,terminal_output +327,532875,"TERMINAL",0,0,"Saved checkpoint at step 8\r\n",,terminal_output +328,533565,"TERMINAL",0,0,"Calculating validation metrics...\r\n",,terminal_output +329,536030,"TERMINAL",0,0,"WARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/dyn/interactive/3535390/000008 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/dyn/interactive/3535390/000008) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/dyn/interactive/3535390/000002 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/dyn/interactive/3535390/000002) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/dyn/interactive/3535390/000006 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/dyn/interactive/3535390/000006) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/dyn/interactive/3535390/000004 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/dyn/interactive/3535390/000004) to end with "".orbax-checkpoint-tmp"".\r\n",,terminal_output +330,537598,"TERMINAL",0,0,"Step 10, validation loss: 8.801262855529785\r\n",,terminal_output +331,537652,"TERMINAL",0,0,"WARNING:absl:[process=0][thread=MainThread][operation_id=5] _SignalingThread.join() waiting for signals ([, ]) blocking the main thread will slow down blocking save times. This is likely due to main thread calling result() on a CommitFuture.\r\nWARNING:absl:[process=0][thread=MainThread][operation_id=5] _SignalingThread.join() waiting for signals ([, ]) blocking the main thread will slow down blocking save times. This is likely due to main thread calling result() on a CommitFuture.\r\n",,terminal_output +332,538119,"TERMINAL",0,0,"Saved checkpoint at step 10\r\n",,terminal_output +333,540103,"TERMINAL",0,0,"WARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/dyn/interactive/3535390/000008 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/dyn/interactive/3535390/000008) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/dyn/interactive/3535390/000006 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/dyn/interactive/3535390/000006) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/dyn/interactive/3535390/000010 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/dyn/interactive/3535390/000010) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/dyn/interactive/3535390/000004 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/dyn/interactive/3535390/000004) to end with "".orbax-checkpoint-tmp"".\r\n",,terminal_output +334,542402,"TERMINAL",0,0,"git commit -am ""concatenate (prepend) action embedding to video embeddings for transformer forward pass""",,terminal_command +335,542464,"TERMINAL",0,0,"]633;C",,terminal_output +336,543153,"TERMINAL",0,0,"wandb: \r\nwandb: 🚀 View run coinrun-dyn-dev-3535390 at: https://wandb.ai/instant-uv/jafar/runs/bmlob0lg\r\nwandb: Find logs at: ../../../../../hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/wandb/run-20251001_153736-bmlob0lg/logs\r\n",,terminal_output +337,543587,"TERMINAL",0,0,"black....................................................................",,terminal_output +338,544324,"TERMINAL",0,0,"W1001 15:41:39.487608 1176545 pjrt_client.cc:1469] WatchJobStateAsync failed for task goo.gle/debugproto job_name: ""jax_worker"": UNAVAILABLE: failed to connect to all addresses; last error: UNKNOWN: ipv4:10.0.1.33:61982: Failed to connect to remote host: Connection refused\r\nAdditional GRPC error information from remote target coordination_service while calling /tensorflow.CoordinationService/WatchJobState:\r\n:UNKNOWN:Error received from peer {grpc_status:14, grpc_message:""failed to connect to all addresses; last error: UNKNOWN: ipv4:10.0.1.33:61982: Failed to connect to remote host: Connection refused""}\r\n",,terminal_output +339,545120,"TERMINAL",0,0,"]0;tum_cte0515@hkn0401:~/Projects/jasmine[?2004h(jasmine) [tum_cte0515@hkn0401 jasmine]$ ",,terminal_output +340,545183,"TERMINAL",0,0,"Passed\r\n",,terminal_output +341,545249,"TERMINAL",0,0,"[prepend-action-maskgit dd426e3] concatenate (prepend) action embedding to video embeddings for transformer forward pass\r\n 1 file changed, 4 insertions(+), 4 deletions(-)\r\n]0;tum_cte0515@hkn1990:~/Projects/jasmine",,terminal_output +342,547141,"TERMINAL",0,0,"git push",,terminal_command +343,547179,"TERMINAL",0,0,"]633;Cfatal: The current branch prepend-action-maskgit has no upstream branch.\r\nTo push the current branch and set the remote as upstream, use\r\n\r\n git push --set-upstream origin prepend-action-maskgit\r\n\r\nTo have this happen automatically for branches without a tracking\r\nupstream, see 'push.autoSetupRemote' in 'git help config'.\r\n\r\n]0;tum_cte0515@hkn1990:~/Projects/jasmine",,terminal_output +344,552311,"TERMINAL",0,0,"git push --set-upstream origin prepend-action-maskgit",,terminal_command +345,552366,"TERMINAL",0,0,"]633;C",,terminal_output +346,553755,"TERMINAL",0,0,"Enumerating objects: 9, done.\r\nCounting objects: 11% (1/9)\rCounting objects: 22% (2/9)\rCounting objects: 33% (3/9)\rCounting objects: 44% (4/9)\rCounting objects: 55% (5/9)\rCounting objects: 66% (6/9)\rCounting objects: 77% (7/9)\rCounting objects: 88% (8/9)\rCounting objects: 100% (9/9)\rCounting objects: 100% (9/9), done.\r\nDelta compression using up to 152 threads\r\nCompressing objects: 20% (1/5)\rCompressing objects: 40% (2/5)\rCompressing objects: 60% (3/5)\rCompressing objects: 80% (4/5)\rCompressing objects: 100% (5/5)\rCompressing objects: 100% (5/5), done.\r\nWriting objects: 20% (1/5)\rWriting objects: 40% (2/5)\rWriting objects: 60% (3/5)\rWriting objects: 80% (4/5)\rWriting objects: 100% (5/5)\rWriting objects: 100% (5/5), 529 bytes | 264.00 KiB/s, done.\r\nTotal 5 (delta 3), reused 0 (delta 0), pack-reused 0\r\n",,terminal_output +347,553794,"TERMINAL",0,0,"remote: Resolving deltas: 0% (0/3)\rremote: Resolving deltas: 33% (1/3)\rremote: Resolving deltas: 66% (2/3)\rremote: Resolving deltas: 100% (3/3)\rremote: Resolving deltas: 100% (3/3), completed with 3 local objects.\r\n",,terminal_output +348,554127,"TERMINAL",0,0,"remote: \r\nremote: Create a pull request for 'prepend-action-maskgit' on GitHub by visiting:\r\nremote: https://github.com/p-doom/jasmine/pull/new/prepend-action-maskgit\r\nremote: \r\nTo github.com:p-doom/jasmine.git\r\n * [new branch] prepend-action-maskgit -> prepend-action-maskgit\r\nbranch 'prepend-action-maskgit' set up to track 'origin/prepend-action-maskgit'.\r\n]0;tum_cte0515@hkn1990:~/Projects/jasmine",,terminal_output +349,598578,"TERMINAL",0,0,"git push",,terminal_command +350,598628,"TERMINAL",0,0,"]633;C",,terminal_output +351,600072,"TERMINAL",0,0,"Everything up-to-date\r\n]0;tum_cte0515@hkn1990:~/Projects/jasmine",,terminal_output +352,739191,"TERMINAL",0,0,"srun",,terminal_focus +353,742044,"jasmine/genie.py",0,0,"from typing import Dict\n\nimport einops\nimport jax\nimport jax.numpy as jnp\nimport flax.nnx as nnx\nimport orbax.checkpoint as ocp\n\nfrom models.dynamics import DynamicsMaskGIT, DynamicsCausal\nfrom models.lam import LatentActionModel\nfrom models.tokenizer import TokenizerVQVAE\n\n\nclass Genie(nnx.Module):\n """"""Genie model""""""\n\n def __init__(\n self,\n in_dim: int,\n tokenizer_dim: int,\n tokenizer_ffn_dim: int,\n latent_patch_dim: int,\n num_patch_latents: int,\n patch_size: int,\n tokenizer_num_blocks: int,\n tokenizer_num_heads: int,\n lam_dim: int,\n lam_ffn_dim: int,\n latent_action_dim: int,\n num_actions: int,\n lam_patch_size: int,\n lam_num_blocks: int,\n lam_num_heads: int,\n lam_co_train: bool,\n use_gt_actions: bool,\n dyna_type: str,\n dyna_dim: int,\n dyna_ffn_dim: int,\n dyna_num_blocks: int,\n dyna_num_heads: int,\n param_dtype: jnp.dtype,\n dtype: jnp.dtype,\n use_flash_attention: bool,\n decode: bool,\n rngs: nnx.Rngs,\n dropout: float = 0.0,\n mask_limit: float = 0.0,\n ):\n # --- Tokenizer ---\n self.in_dim = in_dim\n self.tokenizer_dim = tokenizer_dim\n self.tokenizer_ffn_dim = tokenizer_ffn_dim\n self.latent_patch_dim = latent_patch_dim\n self.num_patch_latents = num_patch_latents\n self.patch_size = patch_size\n self.tokenizer_num_blocks = tokenizer_num_blocks\n self.tokenizer_num_heads = tokenizer_num_heads\n # --- LAM ---\n self.lam_dim = lam_dim\n self.lam_ffn_dim = lam_ffn_dim\n self.latent_action_dim = latent_action_dim\n self.num_actions = num_actions\n self.lam_patch_size = lam_patch_size\n self.lam_num_blocks = lam_num_blocks\n self.lam_num_heads = lam_num_heads\n self.lam_co_train = lam_co_train\n self.use_gt_actions = use_gt_actions\n # --- Dynamics ---\n self.dyna_type = dyna_type\n self.dyna_dim = dyna_dim\n self.dyna_ffn_dim = dyna_ffn_dim\n self.dyna_num_blocks = dyna_num_blocks\n self.dyna_num_heads = dyna_num_heads\n self.param_dtype = param_dtype\n self.dtype = dtype\n self.use_flash_attention = use_flash_attention\n self.dropout = dropout\n self.mask_limit = mask_limit\n self.decode = decode\n\n self.tokenizer = TokenizerVQVAE(\n in_dim=self.in_dim,\n model_dim=self.tokenizer_dim,\n ffn_dim=self.tokenizer_ffn_dim,\n latent_dim=self.latent_patch_dim,\n num_latents=self.num_patch_latents,\n patch_size=self.patch_size,\n num_blocks=self.tokenizer_num_blocks,\n num_heads=self.tokenizer_num_heads,\n dropout=0.0,\n codebook_dropout=0.0,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n use_flash_attention=self.use_flash_attention,\n rngs=rngs,\n )\n if self.use_gt_actions:\n self.action_embed = nnx.Embed(\n self.num_actions, self.latent_action_dim, rngs=rngs\n )\n self.lam = None\n else:\n self.lam = LatentActionModel(\n in_dim=self.in_dim,\n model_dim=self.lam_dim,\n ffn_dim=self.lam_ffn_dim,\n latent_dim=self.latent_patch_dim,\n num_latents=self.num_actions,\n patch_size=self.lam_patch_size,\n num_blocks=self.lam_num_blocks,\n num_heads=self.lam_num_heads,\n dropout=0.0,\n codebook_dropout=0.0,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n use_flash_attention=self.use_flash_attention,\n rngs=rngs,\n )\n self.action_embed = None\n if self.dyna_type == ""maskgit"":\n self.dynamics = DynamicsMaskGIT(\n model_dim=self.dyna_dim,\n ffn_dim=self.dyna_ffn_dim,\n num_latents=self.num_patch_latents,\n latent_action_dim=self.latent_action_dim,\n num_blocks=self.dyna_num_blocks,\n num_heads=self.dyna_num_heads,\n dropout=self.dropout,\n mask_limit=self.mask_limit,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n use_flash_attention=self.use_flash_attention,\n rngs=rngs,\n )\n elif self.dyna_type == ""causal"":\n self.dynamics = DynamicsCausal(\n model_dim=self.dyna_dim,\n ffn_dim=self.dyna_ffn_dim,\n num_latents=self.num_patch_latents,\n latent_action_dim=self.latent_action_dim,\n num_blocks=self.dyna_num_blocks,\n num_heads=self.dyna_num_heads,\n dropout=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n use_flash_attention=self.use_flash_attention,\n decode=decode,\n rngs=rngs,\n )\n else:\n raise ValueError(f""Invalid dynamics type: {self.dyna_type}"")\n\n def __call__(\n self,\n batch: Dict[str, jax.Array],\n ) -> Dict[str, jax.Array]:\n videos_BTHWC = batch[""videos""]\n tokenizer_outputs = self.tokenizer.vq_encode(videos_BTHWC, training=False)\n token_indices_BTN = tokenizer_outputs[""indices""]\n latent_actions_BTm11L = None\n action_embeddings_BTm11L = None\n if self.use_gt_actions:\n assert self.action_embed is not None\n action_indices_E = None\n action_embeddings_BT1L = self.action_embed(batch[""actions""]).reshape(\n *batch[""actions""].shape[:2], 1, self.latent_action_dim\n )\n action_embeddings_BTm11L = action_embeddings_BT1L[:, :-1]\n else:\n assert self.lam is not None\n lam_outputs = self.lam.vq_encode(videos_BTHWC, training=False)\n z_q_BTm11L = lam_outputs[""z_q""]\n action_indices_E = lam_outputs[""indices""]\n latent_actions_BTm11L = jax.lax.cond(\n self.lam_co_train,\n lambda: z_q_BTm11L,\n lambda: jax.lax.stop_gradient(z_q_BTm11L),\n )\n outputs = dict(\n video_tokens=jax.lax.stop_gradient(token_indices_BTN),\n latent_actions=(\n action_embeddings_BTm11L\n if self.use_gt_actions\n else latent_actions_BTm11L\n ),\n )\n outputs[""mask_rng""] = batch[""rng""]\n dyna_logits_BTNV, dyna_mask = self.dynamics(outputs)\n outputs[""token_logits""] = dyna_logits_BTNV\n outputs[""mask""] = dyna_mask\n mle_indices_BTN = jnp.argmax(outputs[""token_logits""], axis=-1)\n H, W = batch[""videos""].shape[2:4]\n outputs[""recon""] = self.tokenizer.decode(mle_indices_BTN, (H, W))\n if action_indices_E is not None:\n outputs[""lam_indices""] = action_indices_E\n return outputs\n\n def sample(\n self,\n batch: Dict[str, jax.Array],\n seq_len: int,\n temperature: float = 1,\n sample_argmax: bool = False,\n maskgit_steps: int = 25,\n ) -> tuple[jax.Array, jax.Array]:\n if self.dyna_type == ""maskgit"":\n return self.sample_maskgit(\n batch, seq_len, maskgit_steps, temperature, sample_argmax\n )\n elif self.dyna_type == ""causal"":\n return self.sample_causal(batch, seq_len, temperature, sample_argmax)\n else:\n raise ValueError(f""Dynamics model type unknown: {self.dyna_type}"")\n\n def sample_maskgit(\n self,\n batch: Dict[str, jax.Array],\n seq_len: int,\n steps: int = 25,\n temperature: float = 1,\n sample_argmax: bool = False,\n ) -> tuple[jax.Array, jax.Array]:\n """"""\n Autoregressively samples up to `seq_len` future frames, following Figure 8 of the paper.\n\n - Input frames are tokenized once.\n - Future frames are generated autoregressively in token space.\n - All frames are detokenized in a single pass.\n\n Note:\n - For interactive or step-wise sampling, detokenization should occur after each action.\n - To maintain consistent tensor shapes across timesteps, all current and future frames are decoded at every step.\n - Temporal causal structure is preserved by\n a) reapplying the mask before each decoding step.\n b) a temporal causal mask is applied within each ST-transformer block.\n\n Dimension keys:\n B: batch size\n T: number of input (conditioning) frames\n N: number of patches per frame\n M: model dimension\n S: sequence length\n H: height\n W: width\n E: B * (S - 1)\n P: S * N\n """"""\n assert isinstance(self.dynamics, DynamicsMaskGIT)\n # --- Encode videos and actions ---\n videos_BTHWC = batch[""videos""]\n tokenizer_out = self.tokenizer.vq_encode(videos_BTHWC, training=False)\n token_idxs_BTN = tokenizer_out[""indices""]\n B, T, N = token_idxs_BTN.shape\n pad_shape = (B, seq_len - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs_BTN.dtype)\n token_idxs_BSN = jnp.concatenate([token_idxs_BTN, pad], axis=1)\n init_logits_BSNV = jnp.zeros(\n shape=(*token_idxs_BSN.shape, self.num_patch_latents)\n )\n if self.use_gt_actions:\n assert self.action_embed is not None\n latent_actions_BT1L = self.action_embed(batch[""actions""]).reshape(\n *batch[""actions""].shape[:2], 1, self.latent_action_dim\n )\n latent_actions_BTm11L = latent_actions_BT1L[:, :-1]\n action_tokens_EL = latent_actions_BTm11L.reshape(-1, self.latent_action_dim)\n else:\n assert self.lam is not None\n latent_actions_E = batch[""latent_actions""]\n action_tokens_EL = self.lam.vq.get_codes(latent_actions_E)\n\n # --- Extract submodule state ---\n dynamics_state = nnx.state(self.dynamics)\n\n @nnx.scan(in_axes=(nnx.Carry, 0), out_axes=nnx.Carry)\n def maskgit_step_fn(\n carry: tuple[jax.Array, jax.Array, jax.Array, jax.Array, jax.Array],\n step: jax.Array,\n ) -> tuple[jax.Array, jax.Array, jax.Array, jax.Array, jax.Array]:\n rng, token_idxs_BSN, logits_BSNV, mask_BSN, action_tokens_EL = carry\n S, N = token_idxs_BSN.shape[1:]\n L = action_tokens_EL.shape[-1]\n\n # We need to reconstruct the submodule inside scan body to prevent trace context mismatches\n dynamics_maskgit = DynamicsMaskGIT(\n model_dim=self.dyna_dim,\n ffn_dim=self.dyna_ffn_dim,\n num_latents=self.num_patch_latents,\n latent_action_dim=self.latent_action_dim,\n num_blocks=self.dyna_num_blocks,\n num_heads=self.dyna_num_heads,\n dropout=self.dropout,\n mask_limit=self.mask_limit,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n use_flash_attention=self.use_flash_attention,\n rngs=nnx.Rngs(0),\n )\n nnx.update(dynamics_maskgit, dynamics_state)\n\n # --- Construct + encode video ---\n vid_embed_BSNM = dynamics_maskgit.patch_embed(token_idxs_BSN)\n mask_token_111M = dynamics_maskgit.mask_token.value\n mask_expanded_BSN1 = mask_BSN[..., None]\n vid_embed_BSNM = jnp.where(\n mask_expanded_BSN1, mask_token_111M, vid_embed_BSNM\n )\n\n # --- Predict transition ---\n action_tokens_BSm1L = jnp.reshape(action_tokens_EL, (B, S - 1, L))\n act_embed_BSm1M = dynamics_maskgit.action_up(action_tokens_BSm1L)\n act_embed_BSM = jnp.pad(act_embed_BSm1M, ((0, 0), (1, 0), (0, 0)))\n act_embed_BS1M = jnp.reshape(\n act_embed_BSM, (B, S, 1, act_embed_BSM.shape[-1])\n )\n vid_embed_BSNM += act_embed_BS1M\n unmasked_ratio = jnp.cos(jnp.pi * (step + 1) / (steps * 2))\n step_temp = temperature * (1.0 - unmasked_ratio)\n final_logits_BSNV = dynamics_maskgit.transformer(vid_embed_BSNM) / step_temp\n\n # --- Sample new tokens for final frame ---\n if sample_argmax:\n sampled_token_idxs_BSN = jnp.argmax(final_logits_BSNV, axis=-1)\n else:\n rng, _rng = jax.random.split(rng)\n sampled_token_idxs_BSN = jax.random.categorical(_rng, final_logits_BSNV)\n gather_fn = jax.vmap(jax.vmap(jax.vmap(lambda x, y: x[y])))\n final_token_probs_BSN = gather_fn(\n jax.nn.softmax(final_logits_BSNV), sampled_token_idxs_BSN\n )\n final_token_probs_BSN += ~mask_BSN\n # Update masked tokens and logits only\n token_idxs_BSN = jnp.where(mask_BSN, sampled_token_idxs_BSN, token_idxs_BSN)\n logits_BSNV = jnp.where(\n jnp.expand_dims(mask_BSN, -1), final_logits_BSNV, logits_BSNV\n )\n\n # --- Update mask ---\n num_unmasked_tokens = jnp.round(N * (1.0 - unmasked_ratio)).astype(int)\n final_token_probs_flat_BP = einops.rearrange(\n final_token_probs_BSN, ""b s n -> b (s n)""\n )\n idx_mask_P = (\n jnp.arange(final_token_probs_flat_BP.shape[-1])\n <= N - num_unmasked_tokens\n )\n sorted_idxs_BP = jnp.argsort(final_token_probs_flat_BP, axis=-1)\n mask_update_fn = jax.vmap(lambda msk, ids: msk.at[ids].set(idx_mask_P))\n mask_flat_BP = einops.rearrange(mask_BSN, ""b s n -> b (s n)"")\n new_mask_flat_BP = mask_update_fn(mask_flat_BP, sorted_idxs_BP)\n new_mask_BSN = einops.rearrange(new_mask_flat_BP, ""b (s n) -> b s n"", n=N)\n\n new_carry = (\n rng,\n token_idxs_BSN,\n logits_BSNV,\n new_mask_BSN,\n action_tokens_EL,\n )\n return new_carry\n\n @nnx.scan(in_axes=(nnx.Carry, 0), out_axes=nnx.Carry)\n def generation_step_fn(\n carry: tuple[jax.Array, jax.Array, jax.Array], step_t: jax.Array\n ) -> tuple[jax.Array, jax.Array, jax.Array]:\n rng, current_token_idxs_BSN, current_logits_BSNV = carry\n rng, step_rng = jax.random.split(rng)\n\n # Mask current frame (i.e., t == step_t)\n mask_S = jnp.arange(seq_len) == step_t\n mask_BSN = jnp.broadcast_to(mask_S[None, :, None], (B, seq_len, N)).astype(\n bool\n )\n masked_token_idxs_BSN = current_token_idxs_BSN * ~mask_BSN\n masked_logits_BSNV = current_logits_BSNV * jnp.expand_dims(~mask_BSN, -1)\n\n # --- Initialize and run MaskGIT loop ---\n init_carry_maskgit = (\n step_rng,\n masked_token_idxs_BSN,\n masked_logits_BSNV,\n mask_BSN,\n action_tokens_EL,\n )\n final_carry_maskgit = maskgit_step_fn(init_carry_maskgit, jnp.arange(steps))\n updated_token_idxs_BSN = final_carry_maskgit[1]\n updated_logits_BSNV = final_carry_maskgit[2]\n new_carry = (rng, updated_token_idxs_BSN, updated_logits_BSNV)\n return new_carry\n\n # --- Run the autoregressive generation using jax.lax.scan ---\n initial_carry = (batch[""rng""], token_idxs_BSN, init_logits_BSNV)\n timesteps_to_scan = jnp.arange(T, seq_len)\n final_carry = generation_step_fn(initial_carry, timesteps_to_scan)\n final_token_idxs_BSN = final_carry[1]\n final_logits_BSNV = final_carry[2]\n\n # --- Decode all tokens at once at the end ---\n H, W = batch[""videos""].shape[2:4]\n final_frames_BSHWC = self.tokenizer.decode(\n final_token_idxs_BSN,\n video_hw=(H, W),\n )\n return final_frames_BSHWC, final_logits_BSNV\n\n def sample_causal(\n self,\n batch: Dict[str, jax.Array],\n seq_len: int,\n temperature: float = 1,\n sample_argmax: bool = False,\n ) -> tuple[jax.Array, jax.Array]:\n """"""\n Autoregressively samples up to `seq_len` future frames, following Figure 8 of the paper.\n\n - Input frames are tokenized once.\n - Future frames are generated autoregressively in token space.\n - All frames are detokenized in a single pass.\n\n Note:\n - For interactive or step-wise sampling, detokenization should occur after each action.\n - To maintain consistent tensor shapes across timesteps, all current and future frames are decoded at every step.\n - Temporal causal structure is preserved by\n a) reapplying the mask before each decoding step.\n b) a temporal causal mask is applied within each ST-transformer block.\n\n Dimension keys:\n B: batch size\n T: number of input (conditioning) frames\n N: number of patches per frame\n M: model dimension\n S: sequence length\n H: height\n W: width\n E: B * (S - 1)\n """"""\n assert isinstance(self.dynamics, DynamicsCausal)\n # --- Encode videos and actions ---\n videos_BTHWC = batch[""videos""]\n tokenizer_out = self.tokenizer.vq_encode(videos_BTHWC, training=False)\n token_idxs_BTN = tokenizer_out[""indices""]\n B, T, N = token_idxs_BTN.shape\n pad_shape = (B, seq_len - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs_BTN.dtype)\n token_idxs_BSN = jnp.concatenate([token_idxs_BTN, pad], axis=1)\n logits_BSNV = jnp.zeros((*token_idxs_BSN.shape, self.num_patch_latents))\n dynamics_state = nnx.state(self.dynamics)\n\n if self.use_gt_actions:\n assert self.action_embed is not None\n latent_actions_BT1L = self.action_embed(batch[""actions""]).reshape(\n *batch[""actions""].shape[:2], 1, self.latent_action_dim\n )\n latent_actions_BTm11L = latent_actions_BT1L[:, :-1]\n action_tokens_EL = latent_actions_BTm11L.reshape(-1, self.latent_action_dim)\n else:\n assert self.lam is not None\n latent_actions_E = batch[""latent_actions""]\n action_tokens_EL = self.lam.vq.get_codes(latent_actions_E)\n\n @nnx.scan(in_axes=(nnx.Carry, 0), out_axes=nnx.Carry)\n def causal_step_fn(\n carry: tuple[jax.Array, jax.Array, jax.Array, jax.Array, jax.Array],\n step_n: jax.Array,\n ) -> tuple[jax.Array, jax.Array, jax.Array, jax.Array, jax.Array]:\n rng, token_idxs_BSN, logits_BSNV, action_tokens_EL, step_t = carry\n S, N = token_idxs_BSN.shape[1:]\n L = action_tokens_EL.shape[-1]\n\n # We need to reconstruct the submodule inside scan body to prevent trace context mismatches\n dynamics_causal = DynamicsCausal(\n model_dim=self.dyna_dim,\n ffn_dim=self.dyna_ffn_dim,\n num_latents=self.num_patch_latents,\n latent_action_dim=self.latent_action_dim,\n num_blocks=self.dyna_num_blocks,\n num_heads=self.dyna_num_heads,\n dropout=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n use_flash_attention=self.use_flash_attention,\n decode=self.decode,\n rngs=nnx.Rngs(0),\n )\n nnx.update(dynamics_causal, dynamics_state)\n\n # --- Construct + encode video ---\n vid_embed_BSNM = dynamics_causal.patch_embed(token_idxs_BSN)\n\n # --- Predict transition ---\n action_tokens_BSm1L = jnp.reshape(action_tokens_EL, (B, S - 1, L))\n act_embed_BSm1M = dynamics_causal.action_up(action_tokens_BSm1L)\n act_embed_BSM = jnp.pad(act_embed_BSm1M, ((0, 0), (1, 0), (0, 0)))\n act_embed_BS1M = jnp.reshape(\n act_embed_BSM, (B, S, 1, act_embed_BSM.shape[-1])\n )\n vid_embed_BSNp1M = jnp.concatenate([act_embed_BS1M, vid_embed_BSNM], axis=2)\n final_logits_BTNp1V = (\n dynamics_causal.transformer(vid_embed_BSNp1M, (step_t, step_n))\n / temperature\n )\n final_logits_BV = final_logits_BTNp1V[:, step_t, step_n, :]\n\n # --- Sample new tokens for final frame ---\n if sample_argmax:\n sampled_token_idxs_B = jnp.argmax(final_logits_BV, axis=-1)\n else:\n rng, _rng = jax.random.split(rng)\n sampled_token_idxs_B = jax.random.categorical(_rng, final_logits_BV)\n # Update next tokens only\n token_idxs_BSN = token_idxs_BSN.at[:, step_t, step_n].set(\n sampled_token_idxs_B\n )\n logits_BSNV = logits_BSNV.at[:, step_t, step_n].set(final_logits_BV)\n\n new_carry = (rng, token_idxs_BSN, logits_BSNV, action_tokens_EL, step_t)\n return new_carry\n\n @nnx.scan(in_axes=(nnx.Carry, 0), out_axes=nnx.Carry)\n def generation_step_fn(\n carry: tuple[jax.Array, jax.Array, jax.Array], step_t: jax.Array\n ) -> tuple[jax.Array, jax.Array, jax.Array]:\n rng, current_token_idxs_BSN, current_logits_BSNV = carry\n rng, step_rng = jax.random.split(rng)\n\n # --- Initialize and run causal loop ---\n init_carry_causal = (\n step_rng,\n current_token_idxs_BSN,\n current_logits_BSNV,\n action_tokens_EL,\n step_t,\n )\n final_carry_causal = causal_step_fn(init_carry_causal, jnp.arange(N))\n updated_token_idxs_BSN = final_carry_causal[1]\n updated_logits_BSNV = final_carry_causal[2]\n new_carry = (rng, updated_token_idxs_BSN, updated_logits_BSNV)\n return new_carry\n\n # --- Run the autoregressive generation using jax.lax.scan ---\n initial_carry = (batch[""rng""], token_idxs_BSN, logits_BSNV)\n timesteps_to_scan = jnp.arange(T, seq_len)\n final_carry = generation_step_fn(initial_carry, timesteps_to_scan)\n final_token_idxs_BSN = final_carry[1]\n final_logits_BSNV = final_carry[2]\n\n # --- Decode all tokens at once at the end ---\n H, W = batch[""videos""].shape[2:4]\n final_frames_BSHWC = self.tokenizer.decode(\n final_token_idxs_BSN,\n video_hw=(H, W),\n )\n return final_frames_BSHWC, final_logits_BSNV\n\n def vq_encode(self, batch: Dict[str, jax.Array], training: bool) -> jax.Array:\n # --- Preprocess videos ---\n assert self.lam is not None\n video_BTHWC = batch[""videos""]\n lam_output: Dict[str, jax.Array] = self.lam.vq_encode(\n video_BTHWC, training=training\n )\n lam_indices_E = lam_output[""indices""]\n return lam_indices_E\n\n\n# FIXME (f.srambical): add conversion script for old checkpoints\ndef restore_genie_components(\n optimizer: nnx.ModelAndOptimizer,\n sharding: jax.sharding.NamedSharding,\n rng: jax.Array,\n args,\n) -> nnx.ModelAndOptimizer:\n """"""Restore pre-trained Genie components""""""\n rng_tokenizer, rng_lam = jax.random.split(rng)\n rngs_tokenizer = nnx.Rngs(rng_tokenizer)\n rngs_lam = nnx.Rngs(rng_lam)\n\n tx = optimizer.tx\n model = optimizer.model\n handler_registry = ocp.handlers.DefaultCheckpointHandlerRegistry()\n handler_registry.add(\n ""model_state"", ocp.args.PyTreeRestore, ocp.handlers.PyTreeCheckpointHandler\n )\n\n checkpoint_options = ocp.CheckpointManagerOptions(\n step_format_fixed_length=6,\n )\n tokenizer_checkpoint_manager = ocp.CheckpointManager(\n directory=args.tokenizer_checkpoint,\n options=checkpoint_options,\n handler_registry=handler_registry,\n )\n dummy_tokenizer = TokenizerVQVAE(\n in_dim=args.image_channels,\n model_dim=args.tokenizer_dim,\n ffn_dim=args.tokenizer_ffn_dim,\n latent_dim=args.latent_patch_dim,\n num_latents=args.num_patch_latents,\n patch_size=args.patch_size,\n num_blocks=args.tokenizer_num_blocks,\n num_heads=args.tokenizer_num_heads,\n dropout=args.dropout,\n codebook_dropout=args.dropout,\n param_dtype=args.param_dtype,\n dtype=args.dtype,\n use_flash_attention=args.use_flash_attention,\n rngs=rngs_tokenizer,\n )\n dummy_tokenizer_optimizer = nnx.ModelAndOptimizer(dummy_tokenizer, tx)\n dummy_tokenizer_optimizer_state = nnx.state(dummy_tokenizer_optimizer)\n abstract_sharded_tokenizer_optimizer_state = _create_abstract_sharded_pytree(\n dummy_tokenizer_optimizer_state, sharding\n )\n restored_tokenizer = tokenizer_checkpoint_manager.restore(\n step=tokenizer_checkpoint_manager.latest_step(),\n args=ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore( # type: ignore\n abstract_sharded_tokenizer_optimizer_state # type: ignore\n ),\n ),\n )[""model_state""]\n nnx.update(dummy_tokenizer_optimizer.model, restored_tokenizer.model)\n model.tokenizer = dummy_tokenizer_optimizer.model\n tokenizer_checkpoint_manager.close()\n\n if args.lam_checkpoint:\n lam_checkpoint_manager = ocp.CheckpointManager(\n directory=args.lam_checkpoint,\n options=checkpoint_options,\n handler_registry=handler_registry,\n )\n dummy_lam = LatentActionModel(\n in_dim=args.image_channels,\n model_dim=args.lam_dim,\n ffn_dim=args.lam_ffn_dim,\n latent_dim=args.latent_patch_dim,\n num_latents=args.num_actions,\n patch_size=args.lam_patch_size,\n num_blocks=args.lam_num_blocks,\n num_heads=args.lam_num_heads,\n dropout=args.dropout,\n codebook_dropout=args.dropout,\n param_dtype=args.param_dtype,\n dtype=args.dtype,\n use_flash_attention=args.use_flash_attention,\n rngs=rngs_lam,\n )\n dummy_lam_optimizer = nnx.ModelAndOptimizer(dummy_lam, tx)\n dummy_lam_optimizer_state = nnx.state(dummy_lam_optimizer)\n abstract_sharded_lam_optimizer_state = _create_abstract_sharded_pytree(\n dummy_lam_optimizer_state, sharding\n )\n restored_lam_optimizer = lam_checkpoint_manager.restore(\n step=lam_checkpoint_manager.latest_step(),\n args=ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore( # type: ignore\n abstract_sharded_lam_optimizer_state # type: ignore\n ),\n ),\n )[""model_state""]\n nnx.update(dummy_lam_optimizer.model, restored_lam_optimizer.model)\n model.lam = dummy_lam_optimizer.model\n # Remove the LAM decoder to save memory and avoid unnecessary computation.\n del model.lam.decoder\n lam_checkpoint_manager.close()\n\n # Reinitialize the optimizer states\n optimizer = nnx.ModelAndOptimizer(model, tx)\n return optimizer\n\n\ndef _create_abstract_sharded_pytree(\n pytree_template: nnx.GraphState, sharding_spec: jax.sharding.NamedSharding\n) -> jax.Array:\n """"""Replaces arrays in a pytree with ShapeDtypeStructs having the given sharding.""""""\n\n def map_fn(leaf_template):\n if hasattr(leaf_template, ""shape"") and hasattr(leaf_template, ""dtype""):\n return jax.ShapeDtypeStruct(\n leaf_template.shape, leaf_template.dtype, sharding=sharding_spec\n )\n return leaf_template\n\n return jax.tree_util.tree_map(map_fn, pytree_template)\n",python,tab +354,748468,"jasmine/genie.py",12448,0,"",python,selection_mouse +355,749823,"jasmine/genie.py",12292,0,"",python,selection_mouse +356,749827,"jasmine/genie.py",12291,0,"",python,selection_command +357,758809,"jasmine/models/dynamics.py",0,0,"",python,tab +358,760069,"jasmine/models/dynamics.py",3300,0,"",python,selection_mouse +359,760734,"jasmine/models/dynamics.py",3359,0,"",python,selection_mouse +360,764342,"jasmine/genie.py",0,0,"",python,tab +361,766006,"jasmine/genie.py",12278,0,"",python,selection_mouse +362,766008,"jasmine/genie.py",12277,0,"",python,selection_command +363,766759,"jasmine/genie.py",12266,0,"",python,selection_mouse +364,767493,"jasmine/genie.py",12212,0,"",python,selection_mouse +365,767498,"jasmine/genie.py",12211,0,"",python,selection_command +366,768956,"jasmine/genie.py",12292,0,"",python,selection_mouse +367,768959,"jasmine/genie.py",12291,0,"",python,selection_command +368,769558,"jasmine/genie.py",12388,0,"",python,selection_mouse +369,770170,"jasmine/genie.py",12337,0,"",python,selection_mouse +370,770172,"jasmine/genie.py",12336,0,"",python,selection_command +371,834326,"jasmine/genie.py",12560,0,"",python,selection_mouse +372,835154,"jasmine/genie.py",12333,0,"",python,selection_mouse +373,835285,"jasmine/genie.py",12323,14,"act_embed_BS1M",python,selection_mouse +374,837386,"jasmine/genie.py",12199,0,"",python,selection_mouse +375,839024,"jasmine/genie.py",12332,0,"",python,selection_mouse +376,839143,"jasmine/genie.py",12323,14,"act_embed_BS1M",python,selection_mouse +377,839902,"jasmine/genie.py",12292,0,"",python,selection_mouse +378,839904,"jasmine/genie.py",12291,0,"",python,selection_command +379,841257,"jasmine/genie.py",12317,0,"",python,selection_mouse +380,852356,"jasmine/genie.py",12559,0,"",python,selection_mouse +381,852359,"jasmine/genie.py",12558,0,"",python,selection_command +382,852528,"jasmine/genie.py",12558,1,"p",python,selection_mouse +383,852528,"jasmine/genie.py",12559,0,"",python,selection_command +384,852542,"jasmine/genie.py",12557,2,"mp",python,selection_mouse +385,852543,"jasmine/genie.py",12556,3,"emp",python,selection_mouse +386,852601,"jasmine/genie.py",12550,9,"step_temp",python,selection_mouse +387,852602,"jasmine/genie.py",12546,13,") / step_temp",python,selection_mouse +388,852602,"jasmine/genie.py",12534,25,"d_embed_BSNM) / step_temp",python,selection_mouse +389,852612,"jasmine/genie.py",12529,30,"er(vid_embed_BSNM) / step_temp",python,selection_mouse +390,852626,"jasmine/genie.py",12525,34,"former(vid_embed_BSNM) / step_temp",python,selection_mouse +391,852688,"jasmine/genie.py",12521,38,"ransformer(vid_embed_BSNM) / step_temp",python,selection_mouse +392,852689,"jasmine/genie.py",12519,40,".transformer(vid_embed_BSNM) / step_temp",python,selection_mouse +393,852689,"jasmine/genie.py",12559,1,"\n",python,selection_mouse +394,852726,"jasmine/genie.py",12509,50,"cs_maskgit.transformer(vid_embed_BSNM) / step_temp",python,selection_mouse +395,852744,"jasmine/genie.py",12505,54,"namics_maskgit.transformer(vid_embed_BSNM) / step_temp",python,selection_mouse +396,852768,"jasmine/genie.py",12503,56,"dynamics_maskgit.transformer(vid_embed_BSNM) / step_temp",python,selection_mouse +397,852782,"jasmine/genie.py",12501,58,"= dynamics_maskgit.transformer(vid_embed_BSNM) / step_temp",python,selection_mouse +398,852798,"jasmine/genie.py",12498,61,"NV = dynamics_maskgit.transformer(vid_embed_BSNM) / step_temp",python,selection_mouse +399,852814,"jasmine/genie.py",12496,63,"BSNV = dynamics_maskgit.transformer(vid_embed_BSNM) / step_temp",python,selection_mouse +400,852827,"jasmine/genie.py",12433,126," temperature * (1.0 - unmasked_ratio)\n final_logits_BSNV = dynamics_maskgit.transformer(vid_embed_BSNM) / step_temp",python,selection_mouse +401,852862,"jasmine/genie.py",12432,127,"= temperature * (1.0 - unmasked_ratio)\n final_logits_BSNV = dynamics_maskgit.transformer(vid_embed_BSNM) / step_temp",python,selection_mouse +402,852863,"jasmine/genie.py",12430,129,"p = temperature * (1.0 - unmasked_ratio)\n final_logits_BSNV = dynamics_maskgit.transformer(vid_embed_BSNM) / step_temp",python,selection_mouse +403,852921,"jasmine/genie.py",12428,131,"emp = temperature * (1.0 - unmasked_ratio)\n final_logits_BSNV = dynamics_maskgit.transformer(vid_embed_BSNM) / step_temp",python,selection_mouse +404,852922,"jasmine/genie.py",12427,132,"temp = temperature * (1.0 - unmasked_ratio)\n final_logits_BSNV = dynamics_maskgit.transformer(vid_embed_BSNM) / step_temp",python,selection_mouse +405,852923,"jasmine/genie.py",12425,134,"p_temp = temperature * (1.0 - unmasked_ratio)\n final_logits_BSNV = dynamics_maskgit.transformer(vid_embed_BSNM) / step_temp",python,selection_mouse +406,852928,"jasmine/genie.py",12424,135,"ep_temp = temperature * (1.0 - unmasked_ratio)\n final_logits_BSNV = dynamics_maskgit.transformer(vid_embed_BSNM) / step_temp",python,selection_mouse +407,852943,"jasmine/genie.py",12423,136,"tep_temp = temperature * (1.0 - unmasked_ratio)\n final_logits_BSNV = dynamics_maskgit.transformer(vid_embed_BSNM) / step_temp",python,selection_mouse +408,852998,"jasmine/genie.py",12349,210," unmasked_ratio = jnp.cos(jnp.pi * (step + 1) / (steps * 2))\n step_temp = temperature * (1.0 - unmasked_ratio)\n final_logits_BSNV = dynamics_maskgit.transformer(vid_embed_BSNM) / step_temp",python,selection_mouse +409,853008,"jasmine/genie.py",12348,211," unmasked_ratio = jnp.cos(jnp.pi * (step + 1) / (steps * 2))\n step_temp = temperature * (1.0 - unmasked_ratio)\n final_logits_BSNV = dynamics_maskgit.transformer(vid_embed_BSNM) / step_temp",python,selection_mouse +410,853008,"jasmine/genie.py",12347,212," unmasked_ratio = jnp.cos(jnp.pi * (step + 1) / (steps * 2))\n step_temp = temperature * (1.0 - unmasked_ratio)\n final_logits_BSNV = dynamics_maskgit.transformer(vid_embed_BSNM) / step_temp",python,selection_mouse +411,853009,"jasmine/genie.py",12345,214," unmasked_ratio = jnp.cos(jnp.pi * (step + 1) / (steps * 2))\n step_temp = temperature * (1.0 - unmasked_ratio)\n final_logits_BSNV = dynamics_maskgit.transformer(vid_embed_BSNM) / step_temp",python,selection_mouse +412,853068,"jasmine/genie.py",12344,215," unmasked_ratio = jnp.cos(jnp.pi * (step + 1) / (steps * 2))\n step_temp = temperature * (1.0 - unmasked_ratio)\n final_logits_BSNV = dynamics_maskgit.transformer(vid_embed_BSNM) / step_temp",python,selection_mouse +413,853068,"jasmine/genie.py",12343,216," unmasked_ratio = jnp.cos(jnp.pi * (step + 1) / (steps * 2))\n step_temp = temperature * (1.0 - unmasked_ratio)\n final_logits_BSNV = dynamics_maskgit.transformer(vid_embed_BSNM) / step_temp",python,selection_mouse +414,853076,"jasmine/genie.py",12342,217," unmasked_ratio = jnp.cos(jnp.pi * (step + 1) / (steps * 2))\n step_temp = temperature * (1.0 - unmasked_ratio)\n final_logits_BSNV = dynamics_maskgit.transformer(vid_embed_BSNM) / step_temp",python,selection_mouse +415,853093,"jasmine/genie.py",12341,218," unmasked_ratio = jnp.cos(jnp.pi * (step + 1) / (steps * 2))\n step_temp = temperature * (1.0 - unmasked_ratio)\n final_logits_BSNV = dynamics_maskgit.transformer(vid_embed_BSNM) / step_temp",python,selection_mouse +416,853148,"jasmine/genie.py",12340,219," unmasked_ratio = jnp.cos(jnp.pi * (step + 1) / (steps * 2))\n step_temp = temperature * (1.0 - unmasked_ratio)\n final_logits_BSNV = dynamics_maskgit.transformer(vid_embed_BSNM) / step_temp",python,selection_mouse +417,853148,"jasmine/genie.py",12339,220," unmasked_ratio = jnp.cos(jnp.pi * (step + 1) / (steps * 2))\n step_temp = temperature * (1.0 - unmasked_ratio)\n final_logits_BSNV = dynamics_maskgit.transformer(vid_embed_BSNM) / step_temp",python,selection_mouse +418,853160,"jasmine/genie.py",12338,221," unmasked_ratio = jnp.cos(jnp.pi * (step + 1) / (steps * 2))\n step_temp = temperature * (1.0 - unmasked_ratio)\n final_logits_BSNV = dynamics_maskgit.transformer(vid_embed_BSNM) / step_temp",python,selection_mouse +419,853474,"jasmine/genie.py",12293,266," vid_embed_BSNM += act_embed_BS1M\n unmasked_ratio = jnp.cos(jnp.pi * (step + 1) / (steps * 2))\n step_temp = temperature * (1.0 - unmasked_ratio)\n final_logits_BSNV = dynamics_maskgit.transformer(vid_embed_BSNM) / step_temp",python,selection_mouse +420,854175,"jasmine/genie.py",12293,266,"",python,content +421,854732,"jasmine/genie.py",12293,0," vid_embed_BSNp2M = jnp.concatenate(\n [act_embed_BS1M, noise_level_embed_BS1M, vid_embed_BSNM], axis=2\n )\n unmasked_ratio = jnp.cos(jnp.pi * (step + 1) / (steps * 2))\n step_temp = temperature * (1.0 - unmasked_ratio)\n final_logits_BSNp2V = (\n dynamics_maskgit.transformer(vid_embed_BSNp2M) / step_temp\n )\n final_logits_BSNV = final_logits_BSNp2V[:, :, 2:]",python,content +422,863172,"jasmine/genie.py",12361,24,"",python,content +423,863172,"jasmine/genie.py",12320,0,"1",python,content +424,863173,"jasmine/genie.py",12319,1,"",python,content +425,866963,"jasmine/genie.py",12575,0,"1",python,content +426,866964,"jasmine/genie.py",12574,1,"",python,content +427,871977,"jasmine/genie.py",12635,0,"",python,selection_mouse +428,873512,"jasmine/genie.py",12641,0,"1",python,content +429,873512,"jasmine/genie.py",12640,1,"",python,content +430,874944,"jasmine/genie.py",12729,0,"1",python,content +431,874945,"jasmine/genie.py",12728,1,"",python,content +432,874945,"jasmine/genie.py",12720,0,"1",python,content +433,874945,"jasmine/genie.py",12719,1,"",python,content +434,877813,"jasmine/genie.py",12732,0,"",python,selection_mouse +435,905885,"jasmine/models/dynamics.py",0,0,"",python,tab +436,915188,"jasmine/models/dynamics.py",3046,0,"",python,selection_mouse +437,915333,"jasmine/models/dynamics.py",3038,21,"padded_act_embed_BT1M",python,selection_mouse +438,916143,"jasmine/models/dynamics.py",2983,0,"",python,selection_mouse +439,916306,"jasmine/models/dynamics.py",2973,16,"act_embed_BTm11M",python,selection_mouse +440,923909,"jasmine/genie.py",0,0,"",python,tab +441,932304,"TERMINAL",0,0,"sh slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",,terminal_output +442,933234,"TERMINAL",0,0,"\r\n[?2004l\r#!/usr/bin/env bash\r\n\r\n#SBATCH --nodes=1\r\n#SBATCH --ntasks-per-node=4\r\n#SBATCH --time=00:20:00\r\n#SBATCH --partition=dev_accelerated\r\n#SBATCH --cpus-per-task=5\r\n#SBATCH --gres=gpu:1\r\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/dynamics/%x_%j.log\r\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/dynamics/%x_%j.log\r\n#SBATCH --job-name=train_dyn_single_gpu\r\n#SBATCH --requeue\r\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\r\n\r\n# Log the sbatch script\r\ncat $0\r\n\r\nmodule unload mpi/openmpi/5.0\r\nmodule unload devel/cuda/12.4\r\nsource .venv/bin/activate\r\n\r\n# array_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_test\r\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_split/train\r\n# array_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_chunked\r\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_split/val\r\n\r\njob_name=$SLURM_JOB_NAME\r\nslurm_job_id=$SLURM_JOB_ID\r\n\r\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/dyn/$job_name/$slurm_job_id\r\nmkdir -p $CHECKPOINT_DIR\r\n\r\ntokenizer_checkpoint=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_tokenizer_default/3528955\r\n\r\nenv | grep SLURM\r\n\r\nexport PYTHONUNBUFFERED=1\r\n\r\nsrun python jasmine/train_dynamics.py \\r\n --save_ckpt \\r\n --image_height=64 \\r\n --image_width=64 \\r\n --ckpt_dir $CHECKPOINT_DIR \\r\n --batch_size=110 \\r\n --init_lr=0 \\r\n --max_lr=1e-4 \\r\n --log_image_interval=50 \\r\n --log_checkpoint_interval=2 \\r\n --dyna_type=maskgit \\r\n --log \\r\n --name=coinrun-dyn-dev-$slurm_job_id \\r\n --tags dyn coinrun dev \\r\n --entity instant-uv \\r\n --project jafar \\r\n --warmup_steps 0 \\r\n --wsd_decay_steps 0 \\r\n --num_steps 10 \\r\n --data_dir $array_records_dir_train \\r\n --tokenizer_checkpoint $tokenizer_checkpoint \\r\n --val_data_dir $array_records_dir_val \\r\n --val_interval 2 \\r\n --val_steps 5\r\n",,terminal_output +443,933383,"TERMINAL",0,0,"SLURM_STEP_NUM_TASKS=1\r\nSLURM_JOB_USER=tum_cte0515\r\nSLURM_TASKS_PER_NODE=1\r\nSLURM_JOB_UID=999226\r\nSLURM_TASK_PID=1175644\r\nSLURM_JOB_GPUS=0\r\nSLURM_LOCALID=0\r\nSLURM_SUBMIT_DIR=/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine\r\nSLURMD_NODENAME=hkn0401\r\nSLURM_JOB_START_TIME=1759325695\r\nSLURM_STEP_NODELIST=hkn0401\r\nSLURM_CLUSTER_NAME=hk\r\nSLURM_JOB_END_TIME=1759329295\r\nSLURM_PMI2_SRUN_PORT=40605\r\nSLURM_CPUS_ON_NODE=8\r\nSLURM_JOB_CPUS_PER_NODE=8\r\nSLURM_GPUS_ON_NODE=1\r\nSLURM_GTIDS=0\r\nSLURM_JOB_PARTITION=dev_accelerated\r\nSLURM_TRES_PER_TASK=cpu=8\r\nSLURM_OOM_KILL_STEP=0\r\nSLURM_JOB_NUM_NODES=1\r\nSLURM_STEPID=4294967290\r\nSLURM_JOBID=3535390\r\nSLURM_PTY_PORT=35743\r\nSLURM_JOB_QOS=normal\r\nSLURM_LAUNCH_NODE_IPADDR=10.0.7.198\r\nSLURM_PTY_WIN_ROW=32\r\nSLURM_PMI2_PROC_MAPPING=(vector,(0,1,1))\r\nSLURMD_DEBUG=2\r\nSLURM_PROCID=0\r\nSLURM_CPUS_PER_TASK=8\r\nSLURM_TOPOLOGY_ADDR=hkibb.hkibbi1.hkibbi1e11.hkn0401\r\nSLURM_TOPOLOGY_ADDR_PATTERN=switch.switch.switch.node\r\nSLURM_SRUN_COMM_HOST=10.0.7.198\r\nSLURM_SCRIPT_CONTEXT=prolog_task\r\nSLURM_PTY_WIN_COL=179\r\nSLURM_NODELIST=hkn0401\r\nSLURM_SRUN_COMM_PORT=33917\r\nSLURM_STEP_ID=4294967290\r\nSLURM_JOB_ACCOUNT=hk-project-p0023960\r\nSLURM_PRIO_PROCESS=0\r\nSLURM_NNODES=1\r\nSLURM_SUBMIT_HOST=hkn1990.localdomain\r\nSLURM_JOB_ID=3535390\r\nSLURM_NODEID=0\r\nSLURM_STEP_NUM_NODES=1\r\nSLURM_STEP_TASKS_PER_NODE=1\r\nSLURM_MPI_TYPE=pmi2\r\nSLURM_PMI2_STEP_NODES=hkn0401\r\nSLURM_CONF=/etc/slurm/slurm.conf\r\nSLURM_JOB_NAME=interactive\r\nSLURM_STEP_LAUNCHER_PORT=33917\r\nSLURM_JOB_GID=502226\r\nSLURM_JOB_NODELIST=hkn0401\r\n",,terminal_output +444,933528,"TERMINAL",0,0,"GpuFreq=control_disabled\r\n",,terminal_output +445,936089,"TERMINAL",0,0,"Running on 1 devices.\r\n",,terminal_output +446,942095,"TERMINAL",0,0,"Counting all components: ['dynamics', 'lam', 'tokenizer']\r\n",,terminal_output +447,942445,"TERMINAL",0,0,"wandb: Currently logged in as: mihir-mahajan2002 (instant-uv) to https://api.wandb.ai. Use `wandb login --relogin` to force relogin\r\n",,terminal_output +448,942859,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",0,0,"",shellscript,tab +449,943251,"TERMINAL",0,0,"wandb: Tracking run with wandb version 0.22.0\r\nwandb: Run data is saved locally in /hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/wandb/run-20251001_154817-ra4hk6lg\r\nwandb: Run `wandb offline` to turn off syncing.\r\nwandb: Syncing run coinrun-dyn-dev-3535390\r\nwandb: ⭐️ View project at https://wandb.ai/instant-uv/jafar\r\nwandb: 🚀 View run at https://wandb.ai/instant-uv/jafar/runs/ra4hk6lg\r\n",,terminal_output +450,943397,"TERMINAL",0,0,"Parameter counts:\r\n{'dynamics': 26555904, 'lam': 17640416, 'tokenizer': 34489696, 'total': 78686016}\r\n",,terminal_output +451,943916,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",2159,0,"",shellscript,selection_mouse +452,943931,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",2158,0,"",shellscript,selection_command +453,944714,"TERMINAL",0,0,"WARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/dyn/interactive/3535390/000008 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/dyn/interactive/3535390/000008) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/dyn/interactive/3535390/000006 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/dyn/interactive/3535390/000006) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/dyn/interactive/3535390/000010 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/dyn/interactive/3535390/000010) to end with "".orbax-checkpoint-tmp"".\r\n",,terminal_output +454,945439,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",2159,0," vid_embed_BSNM += act_embed_BS1M\n unmasked_ratio = jnp.cos(jnp.pi * (step + 1) / (steps * 2))\n step_temp = temperature * (1.0 - unmasked_ratio)\n final_logits_BSNV = dynamics_maskgit.transformer(vid_embed_BSNM) / step_temp",shellscript,content +455,945455,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",2159,0,"",shellscript,selection_command +456,946663,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",2159,266,"",shellscript,content +457,946683,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",2158,0,"",shellscript,selection_command +458,947495,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",2159,0,"\n ",shellscript,content +459,947577,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib64/python3.12/site-packages/orbax/checkpoint/_src/serialization/type_handlers.py:1269: UserWarning: Sharding info not provided when restoring. Populating sharding info from sharding file. Please note restoration time will be slightly increased due to reading from file. Note also that this option is unsafe when restoring on a different topology than the checkpoint was saved with.\r\n warnings.warn(\r\n",,terminal_output +460,947782,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",2164,0,"-",shellscript,content +461,947783,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",2165,0,"",shellscript,selection_keyboard +462,947924,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",2165,0,"-",shellscript,content +463,947925,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",2166,0,"",shellscript,selection_keyboard +464,948233,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",2166,0,"e",shellscript,content +465,948234,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",2167,0,"",shellscript,selection_keyboard +466,948399,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",2167,0,"v",shellscript,content +467,948400,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",2168,0,"",shellscript,selection_keyboard +468,948589,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",2168,0,"a",shellscript,content +469,948590,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",2169,0,"",shellscript,selection_keyboard +470,948608,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",2169,0,"l",shellscript,content +471,948609,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",2170,0,"",shellscript,selection_keyboard +472,949241,"slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",2170,0,"_full_frame \",shellscript,content +473,952853,"TERMINAL",0,0,"^Csrun: interrupt (one more within 1 sec to abort)\r\nsrun: StepId=3535390.2 task 0: running\r\n",,terminal_output +474,953047,"TERMINAL",0,0,"^Csrun: sending Ctrl-C to StepId=3535390.2\r\nsrun: forcing job termination\r\nTraceback (most recent call last):\r\nTraceback (most recent call last):\r\n File """", line 1, in \r\nTraceback (most recent call last):\r\n File ""/usr/lib64/python3.12/multiprocessing/spawn.py"", line 122, in spawn_main\r\n File """", line 1, in \r\n File """", line 1, in \r\n File ""/usr/lib64/python3.12/multiprocessing/spawn.py"", line 122, in spawn_main\r\n File ""/usr/lib64/python3.12/multiprocessing/spawn.py"", line 122, in spawn_main\r\nTraceback (most recent call last):\r\n File """", line 1, in \r\n File ""/usr/lib64/python3.12/multiprocessing/spawn.py"", line 122, in spawn_main\r\nTraceback (most recent call last):\r\n File """", line 1, in \r\n File ""/usr/lib64/python3.12/multiprocessing/spawn.py"", line 122, in spawn_main\r\n exitcode = _main(fd, parent_sentinel)\r\n ^^^Traceback (most recent call last):\r\n^^^^^^ File """", line 1, in \r\n^^^ File ""/usr/lib64/python3.12/multiprocessing/spawn.py"", line 122, in spawn_main\r\n^^^^^^ ^^exitcode = _main(fd, parent_sentinel)^\r\n^^^^^\r\n exitcode = _main(fd, parent_sentinel) File ""/usr/lib64/python3.12/multiprocessing/spawn.py"", line 131, in _main\r\n\r\n ^ ^ ^ ^ ^ ^ ^ ^ ^ ^ ^ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\r\n^^^^ File ""/usr/lib64/python3.12/multiprocessing/spawn.py"", line 131, in _main\r\n^^^^^^^^\r\n File ""/usr/lib64/python3.12/multiprocessing/spawn.py"", line 131, in _main\r\n exitcode = _main(fd, parent_sentinel) \r\nexitcode = _main(fd, parent_sentinel)\r\nexitcode = _main(fd, parent_sentinel)\r\n prepare(preparation_data) \r\n ^^ File ""/usr/lib64/python3.12/multiprocessing/spawn.py"", line 246, in prepare\r\n^^^^^^^^^^ ^ ^prepare(preparation_data) ^\r\n ^ ^ ^ ^ ^ ^ ^ ^ ^ ^^^^\r\n^^^^ File ""/usr/lib64/python3.12/multiprocessing/spawn.py"", line 131, in _main\r\n^ File ""/usr/lib64/python3.12/multiprocessing/spawn.py"", line 246, in prepare\r\n^^^^^^^^^^^^^^^^^^^\r\n File ""/usr/lib64/python3.12/multiprocessing/spawn.py"", line 131, in _main\r\n _fixup_main_from_path(data['init_main_from_path']) \r\n _fixup_main_from_path(data['init_main_from_path']) \r\n ^^^^^^ File ""/usr/lib64/python3.12/multiprocessing/spawn.py"", line 297, in _fixup_main_from_path\r\n^^^^^^^^^^^^^ File ""/usr/lib64/python3.12/multiprocessing/spawn.py"", line 297, in _fixup_main_from_path\r\n^^^^^^^\r\n File ""/usr/lib64/python3.12/multiprocessing/spawn.py"", line 131, in _main\r\nprepare(preparation_data)\r\n File ""/usr/lib64/python3.12/multiprocessing/spawn.py"", line 246, in prepare\r\n prepare(preparation_data)\r\n main_content = runpy.run_path(main_path,\r\n File ""/usr/lib64/python3.12/multiprocessing/spawn.py"", line 246, in prepare\r\n ^^^^^^ ^_fixup_main_from_path(data['init_main_from_path'])^\r\n^^^^^^^^^^^^^^^ File ""/usr/lib64/python3.12/multiprocessing/spawn.py"", line 297, in _fixup_main_from_path\r\n^^\r\n File """", line 286, in run_path\r\n File """", line 98, in _run_module_code\r\n File """", line 88, in _run_code\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/train_dynamics.py"", line 14, in \r\n main_content = runpy.run_path(main_path,\r\n ^^^^^^^^^^^^^ ^main_content = runpy.run_path(main_path,^\r\n^^^ ^ ^ ^ ^ ^ ^ ^ \r\n import orbax.checkpoint as ocp File """", line 286, in run_path\r\n\r\n File """", line 98, in _run_module_code\r\n File """", line 88, in _run_code\r\n^^^^ File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/train_dynamics.py"", line 14, in \r\n^^^^ File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib64/python3.12/site-packages/orbax/checkpoint/__init__.py"", line 22, in \r\n^^^^^^^^^^^^^^^^^\r\n File """", line 286, in run_path\r\n File """", line 98, in _run_module_code\r\n File """", line 88, in _run_code\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/train_dynamics.py"", line 14, in \r\n prepare(preparation_data)\r\n import orbax.checkpoint as ocp\r\n File ""/usr/lib64/python3.12/multiprocessing/spawn.py"", line 246, in prepare\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib64/python3.12/site-packages/orbax/checkpoint/__init__.py"", line 22, in \r\n_fixup_main_from_path(data['init_main_from_path'])\r\n import orbax.checkpoint as ocp\r\n File ""/usr/lib64/python3.12/multiprocessing/spawn.py"", line 297, in _fixup_main_from_path\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib64/python3.12/site-packages/orbax/checkpoint/__init__.py"", line 22, in \r\n prepare(preparation_data)\r\n _fixup_main_from_path(data['init_main_from_path'])\r\n File ""/usr/lib64/python3.12/multiprocessing/spawn.py"", line 246, in prepare\r\n File ""/usr/lib64/python3.12/multiprocessing/spawn.py"", line 297, in _fixup_main_from_path\r\n main_content = runpy.run_path(main_path,\r\n main_content = runpy.run_path(main_path, \r\n^^ ^ ^ ^ ^ ^ ^ ^_fixup_main_from_path(data['init_main_from_path']) ^\r\n^ ^ ^ ^ ^ ^ ^^^^^^^^^^^^^^^^^^^^^^\r\n^^ File ""/usr/lib64/python3.12/multiprocessing/spawn.py"", line 297, in _fixup_main_from_path\r\n^^^^ File """", line 286, in run_path\r\n^^^^^ File """", line 98, in _run_module_code\r\n^\r\n File """", line 286, in run_path\r\n File """", line 88, in _run_code\r\n File """", line 98, in _run_module_code\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/train_dynamics.py"", line 14, in \r\n File """", line 88, in _run_code\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/train_dynamics.py"", line 14, in \r\n import orbax.checkpoint as ocp\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib64/python3.12/site-packages/orbax/checkpoint/__init__.py"", line 22, in \r\n main_content = runpy.run_path(main_path,\r\n import orbax.checkpoint as ocp \r\n ^^^^^^^^^^^^ File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib64/python3.12/site-packages/orbax/checkpoint/__init__.py"", line 22, in \r\n^^^^^^^^^^^^^\r\n File """", line 286, in run_path\r\n File """", line 98, in _run_module_code\r\n File """", line 88, in _run_code\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/train_dynamics.py"", line 14, in \r\n import orbax.checkpoint as ocp\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib64/python3.12/site-packages/orbax/checkpoint/__init__.py"", line 22, in \r\n from orbax.checkpoint.experimental import v1\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib64/python3.12/site-packages/orbax/checkpoint/experimental/v1/__init__.py"", line 24, in \r\n from orbax.checkpoint.experimental.v1 import handlers\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib64/python3.12/site-packages/orbax/checkpoint/experimental/v1/handlers.py"", line 19, in \r\n import orbax.checkpoint.experimental.v1._src.handlers.global_registration\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib64/python3.12/site-packages/orbax/checkpoint/experimental/v1/_src/handlers/global_registration.py"", line 25, in \r\n from orbax.checkpoint.experimental import v1\r\n from orbax.checkpoint.experimental import v1from orbax.checkpoint.experimental import v1\r\n\r\n from orbax.checkpoint.experimental.v1._src.handlers import json_handler\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib64/python3.12/site-packages/orbax/checkpoint/experimental/v1/__init__.py"", line 24, in \r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib64/python3.12/site-packages/orbax/checkpoint/experimental/v1/__init__.py"", line 24, in \r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib64/python3.12/site-packages/orbax/checkpoint/experimental/v1/__init__.py"", line 24, in \r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib64/python3.12/site-packages/orbax/checkpoint/experimental/v1/_src/handlers/json_handler.py"", line 23, in \r\n from orbax.checkpoint.experimental import v1\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib64/python3.12/site-packages/orbax/checkpoint/experimental/v1/__init__.py"", line 24, in \r\n from orbax.checkpoint.experimental import v1\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib64/python3.12/site-packages/orbax/checkpoint/experimental/v1/__init__.py"", line 24, in \r\n from orbax.checkpoint.experimental.v1 import handlers\r\n from orbax.checkpoint.experimental.v1 import handlers\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib64/python3.12/site-packages/orbax/checkpoint/experimental/v1/handlers.py"", line 19, in \r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib64/python3.12/site-packages/orbax/checkpoint/experimental/v1/handlers.py"", line 19, in \r\n from orbax.checkpoint.experimental.v1 import handlers\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib64/python3.12/site-packages/orbax/checkpoint/experimental/v1/handlers.py"", line 19, in \r\n from orbax.checkpoint.experimental.v1 import handlersfrom orbax.checkpoint.experimental.v1._src.context import context as context_lib\r\n\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib64/python3.12/site-packages/orbax/checkpoint/experimental/v1/handlers.py"", line 19, in \r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib64/python3.12/site-packages/orbax/checkpoint/experimental/v1/_src/context/context.py"", line 24, in \r\n from orbax.checkpoint.experimental.v1 import handlers\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib64/python3.12/site-packages/orbax/checkpoint/experimental/v1/handlers.py"", line 19, in \r\n import orbax.checkpoint.experimental.v1._src.handlers.global_registration\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib64/python3.12/site-packages/orbax/checkpoint/experimental/v1/_src/handlers/global_registration.py"", line 25, in \r\nsrun: Job step aborted: Waiting up to 32 seconds for job step to finish.\r\n import orbax.checkpoint.experimental.v1._src.handlers.global_registration\r\n import orbax.checkpoint.experimental.v1._src.handlers.global_registration\r\n import orbax.checkpoint.experimental.v1._src.handlers.global_registration\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib64/python3.12/site-packages/orbax/checkpoint/experimental/v1/_src/handlers/global_registration.py"", line 25, in \r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib64/python3.12/site-packages/orbax/checkpoint/experimental/v1/_src/handlers/global_registration.py"", line 25, in \r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib64/python3.12/site-packages/orbax/checkpoint/experimental/v1/_src/handlers/global_registration.py"", line 25, in \r\n import orbax.checkpoint.experimental.v1._src.handlers.global_registration\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib64/python3.12/site-packages/orbax/checkpoint/experimental/v1/_src/handlers/global_registration.py"", line 25, in \r\n from orbax.checkpoint.experimental.v1._src.handlers import json_handler\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib64/python3.12/site-packages/orbax/checkpoint/experimental/v1/_src/handlers/json_handler.py"", line 23, in \r\nKeyboardInterrupt\r\n\r\nThe above exception was the direct cause of the following exception:\r\n\r\nKeyboardInterruptTraceback (most recent call last):\r\n\r\n\r\nThe above exception was the direct cause of the following exception:\r\n File """", line 1, in \r\n\r\nTraceback (most recent call last):\r\n File ""/usr/lib64/python3.12/multiprocessing/spawn.py"", line 122, in spawn_main\r\n File """", line 1, in \r\n File ""/usr/lib64/python3.12/multiprocessing/spawn.py"", line 122, in spawn_main\r\n from orbax.checkpoint.experimental.v1._src.handlers import json_handler\r\n from orbax.checkpoint.experimental.v1._src.handlers import json_handler\r\n from orbax.checkpoint.experimental.v1._src.handlers import json_handler\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib64/python3.12/site-packages/orbax/checkpoint/experimental/v1/_src/handlers/json_handler.py"", line 23, in \r\n exitcode = _main(fd, parent_sentinel)\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib64/python3.12/site-packages/orbax/checkpoint/experimental/v1/_src/handlers/json_handler.py"", line 23, in \r\n exitcode = _main(fd, parent_sentinel) File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib64/python3.12/site-packages/orbax/checkpoint/experimental/v1/_src/handlers/json_handler.py"", line 23, in \r\n\r\n ^^^^ ^ ^^ ^ ^ ^ ^ ^ ^ ^ ^^ ^ ^^ ^ ^^ ^^^from orbax.checkpoint.experimental.v1._src.handlers import json_handler^^^\r\n^^^^^\r\n^^^^^^^ File ""/usr/lib64/python3.12/multiprocessing/spawn.py"", line 131, in _main\r\n^^^^^^^^^^^^\r\n File ""/usr/lib64/python3.12/multiprocessing/spawn.py"", line 131, in _main\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib64/python3.12/site-packages/orbax/checkpoint/experimental/v1/_src/handlers/json_handler.py"", line 23, in \r\n prepare(preparation_data)\r\n File ""/usr/lib64/python3.12/multiprocessing/spawn.py"", line 246, in prepare\r\n prepare(preparation_data)\r\n File ""/usr/lib64/python3.12/multiprocessing/spawn.py"", line 246, in prepare\r\n _fixup_main_from_path(data['init_main_from_path'])\r\n _fixup_main_from_path(data['init_main_from_path'])\r\n File ""/usr/lib64/python3.12/multiprocessing/spawn.py"", line 297, in _fixup_main_from_path\r\n File ""/usr/lib64/python3.12/multiprocessing/spawn.py"", line 297, in _fixup_main_from_path\r\n main_content = runpy.run_path(main_path,\r\n ^^^^^^^^^^^^^^^^^^ ^main_content = runpy.run_path(main_path,^\r\n^^^^ ^ \r\n File """", line 286, in run_path\r\n File """", line 98, in _run_module_code\r\n File """", line 88, in _run_code\r\n ^ File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/train_dynamics.py"", line 14, in \r\n^^^^^^^^^^^^^^^^^^^^^^^^\r\n File """", line 286, in run_path\r\n File """", line 98, in _run_module_code\r\n File """", line 88, in _run_code\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/jasmine/train_dynamics.py"", line 14, in \r\n from orbax.checkpoint.experimental.v1._src.context import context as context_lib\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib64/python3.12/site-packages/orbax/checkpoint/experimental/v1/_src/context/context.py"", line 24, in \r\n import orbax.checkpoint as ocp\r\n from orbax.checkpoint.experimental.v1._src.context import context as context_lib\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib64/python3.12/site-packages/orbax/checkpoint/__init__.py"", line 22, in \r\nimport orbax.checkpoint as ocp\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib64/python3.12/site-packages/orbax/checkpoint/experimental/v1/_src/context/context.py"", line 24, in \r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib64/python3.12/site-packages/orbax/checkpoint/__init__.py"", line 22, in \r\n from orbax.checkpoint.experimental.v1._src.context import context as context_lib\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib64/python3.12/site-packages/orbax/checkpoint/experimental/v1/_src/context/context.py"", line 24, in \r\n from orbax.checkpoint.experimental import v1\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib64/python3.12/site-packages/orbax/checkpoint/experimental/v1/__init__.py"", line 24, in \r\n from orbax.checkpoint.experimental.v1._src.context import context as context_lib\r\n from orbax.checkpoint.experimental.v1._src.context import context as context_lib\r\n from orbax.checkpoint.experimental import v1\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib64/python3.12/site-packages/orbax/checkpoint/experimental/v1/_src/context/context.py"", line 24, in \r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib64/python3.12/site-packages/orbax/checkpoint/experimental/v1/_src/context/context.py"", line 24, in \r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib64/python3.12/site-packages/orbax/checkpoint/experimental/v1/__init__.py"", line 24, in \r\n from orbax.checkpoint.experimental.v1._src.context import options as options_lib\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib64/python3.12/site-packages/orbax/checkpoint/experimental/v1/_src/context/options.py"", line 26, in \r\n from orbax.checkpoint.experimental.v1 import handlers\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib64/python3.12/site-packages/orbax/checkpoint/experimental/v1/handlers.py"", line 19, in \r\n from orbax.checkpoint.experimental.v1._src.context import options as options_lib\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib64/python3.12/site-packages/orbax/checkpoint/experimental/v1/_src/context/options.py"", line 26, in \r\n from orbax.checkpoint.experimental.v1 import handlers\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib64/python3.12/site-packages/orbax/checkpoint/experimental/v1/handlers.py"", line 19, in \r\n from orbax.checkpoint.experimental.v1._src.context import options as options_lib\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib64/python3.12/site-packages/orbax/checkpoint/experimental/v1/_src/context/options.py"", line 25, in \r\n from orbax.checkpoint.experimental.v1._src.context import options as options_libfrom orbax.checkpoint.experimental.v1._src.context import options as options_lib\r\n\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib64/python3.12/site-packages/orbax/checkpoint/experimental/v1/_src/context/options.py"", line 26, in \r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib64/python3.12/site-packages/orbax/checkpoint/experimental/v1/_src/context/options.py"", line 26, in \r\n from orbax.checkpoint.experimental.v1._src.context import options as options_lib\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib64/python3.12/site-packages/orbax/checkpoint/experimental/v1/_src/context/options.py"", line 25, in \r\n import orbax.checkpoint.experimental.v1._src.handlers.global_registration\r\n import orbax.checkpoint.experimental.v1._src.handlers.global_registration\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib64/python3.12/site-packages/orbax/checkpoint/experimental/v1/_src/handlers/global_registration.py"", line 25, in \r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib64/python3.12/site-packages/orbax/checkpoint/experimental/v1/_src/handlers/global_registration.py"", line 25, in \r\n from orbax.checkpoint.experimental.v1._src.handlers import json_handler\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib64/python3.12/site-packages/orbax/checkpoint/experimental/v1/_src/handlers/json_handler.py"", line 23, in \r\n from orbax.checkpoint.experimental.v1._src.handlers import json_handler\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib64/python3.12/site-packages/orbax/checkpoint/experimental/v1/_src/handlers/json_handler.py"", line 23, in \r\nslurmstepd: error: *** STEP 3535390.2 ON hkn0401 CANCELLED AT 2025-10-01T15:48:28 ***\r\n from orbax.checkpoint.experimental.v1._src.context import context as context_lib\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib64/python3.12/site-packages/orbax/checkpoint/experimental/v1/_src/context/context.py"", line 24, in \r\n from orbax.checkpoint.experimental.v1._src.context import context as context_lib\r\n File ""/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib64/python3.12/site-packages/orbax/checkpoint/experimental/v1/_src/context/context.py"", line 24, in \r\n",,terminal_output +475,953189,"TERMINAL",0,0,"^Csrun: sending Ctrl-C to StepId=3535390.2\r\nsrun: job abort in progress\r\n",,terminal_output +476,953413,"TERMINAL",0,0,"^[[Aslurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh: line 64: ame: command not found\r\n]0;tum_cte0515@hkn0401:~/Projects/jasmine[?2004h(jasmine) [tum_cte0515@hkn0401 jasmine]$ sh slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh",,terminal_output +477,954150,"TERMINAL",0,0,"\r\n[?2004l\r#!/usr/bin/env bash\r\n\r\n#SBATCH --nodes=1\r\n#SBATCH --ntasks-per-node=4\r\n#SBATCH --time=00:20:00\r\n#SBATCH --partition=dev_accelerated\r\n#SBATCH --cpus-per-task=5\r\n#SBATCH --gres=gpu:1\r\n#SBATCH --output=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/dynamics/%x_%j.log\r\n#SBATCH --error=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/coinrun/dynamics/%x_%j.log\r\n#SBATCH --job-name=train_dyn_single_gpu\r\n#SBATCH --requeue\r\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\r\n\r\n# Log the sbatch script\r\ncat $0\r\n\r\nmodule unload mpi/openmpi/5.0\r\nmodule unload devel/cuda/12.4\r\nsource .venv/bin/activate\r\n\r\n# array_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_test\r\narray_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_split/train\r\n# array_records_dir_train=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_chunked\r\narray_records_dir_val=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/data_coinrun/coinrun_episodes_10m_split/val\r\n\r\njob_name=$SLURM_JOB_NAME\r\nslurm_job_id=$SLURM_JOB_ID\r\n\r\nCHECKPOINT_DIR=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/dyn/$job_name/$slurm_job_id\r\nmkdir -p $CHECKPOINT_DIR\r\n\r\ntokenizer_checkpoint=/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/tokenizer/train_tokenizer_default/3528955\r\n\r\nenv | grep SLURM\r\n\r\nexport PYTHONUNBUFFERED=1\r\n\r\nsrun python jasmine/train_dynamics.py \\r\n --save_ckpt \\r\n --image_height=64 \\r\n --image_width=64 \\r\n --ckpt_dir $CHECKPOINT_DIR \\r\n --batch_size=110 \\r\n --init_lr=0 \\r\n --max_lr=1e-4 \\r\n --log_image_interval=50 \\r\n --log_checkpoint_interval=2 \\r\n --dyna_type=maskgit \\r\n --log \\r\n --name=coinrun-dyn-dev-$slurm_job_id \\r\n --tags dyn coinrun dev \\r\n --entity instant-uv \\r\n --project jafar \\r\n --warmup_steps 0 \\r\n --wsd_decay_steps 0 \\r\n --num_steps 10 \\r\n --data_dir $array_records_dir_train \\r\n --tokenizer_checkpoint $tokenizer_checkpoint \\r\n --val_data_dir $array_records_dir_val \\r\n --val_interval 2 \\r\n --eval_full_frame \\r\n --val_steps 5\r\n",,terminal_output +478,954270,"TERMINAL",0,0,"SLURM_STEP_NUM_TASKS=1\r\nSLURM_JOB_USER=tum_cte0515\r\nSLURM_TASKS_PER_NODE=1\r\nSLURM_JOB_UID=999226\r\nSLURM_TASK_PID=1175644\r\nSLURM_JOB_GPUS=0\r\nSLURM_LOCALID=0\r\nSLURM_SUBMIT_DIR=/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine\r\nSLURMD_NODENAME=hkn0401\r\nSLURM_JOB_START_TIME=1759325695\r\nSLURM_STEP_NODELIST=hkn0401\r\nSLURM_CLUSTER_NAME=hk\r\nSLURM_JOB_END_TIME=1759329295\r\nSLURM_PMI2_SRUN_PORT=40605\r\nSLURM_CPUS_ON_NODE=8\r\nSLURM_JOB_CPUS_PER_NODE=8\r\nSLURM_GPUS_ON_NODE=1\r\nSLURM_GTIDS=0\r\nSLURM_JOB_PARTITION=dev_accelerated\r\nSLURM_TRES_PER_TASK=cpu=8\r\nSLURM_OOM_KILL_STEP=0\r\nSLURM_JOB_NUM_NODES=1\r\nSLURM_STEPID=4294967290\r\nSLURM_JOBID=3535390\r\nSLURM_PTY_PORT=35743\r\nSLURM_JOB_QOS=normal\r\nSLURM_LAUNCH_NODE_IPADDR=10.0.7.198\r\nSLURM_PTY_WIN_ROW=32\r\nSLURM_PMI2_PROC_MAPPING=(vector,(0,1,1))\r\nSLURMD_DEBUG=2\r\nSLURM_PROCID=0\r\nSLURM_CPUS_PER_TASK=8\r\nSLURM_TOPOLOGY_ADDR=hkibb.hkibbi1.hkibbi1e11.hkn0401\r\nSLURM_TOPOLOGY_ADDR_PATTERN=switch.switch.switch.node\r\nSLURM_SRUN_COMM_HOST=10.0.7.198\r\nSLURM_SCRIPT_CONTEXT=prolog_task\r\nSLURM_PTY_WIN_COL=179\r\nSLURM_NODELIST=hkn0401\r\nSLURM_SRUN_COMM_PORT=33917\r\nSLURM_STEP_ID=4294967290\r\nSLURM_JOB_ACCOUNT=hk-project-p0023960\r\nSLURM_PRIO_PROCESS=0\r\nSLURM_NNODES=1\r\nSLURM_SUBMIT_HOST=hkn1990.localdomain\r\nSLURM_JOB_ID=3535390\r\nSLURM_NODEID=0\r\nSLURM_STEP_NUM_NODES=1\r\nSLURM_STEP_TASKS_PER_NODE=1\r\nSLURM_MPI_TYPE=pmi2\r\nSLURM_PMI2_STEP_NODES=hkn0401\r\nSLURM_CONF=/etc/slurm/slurm.conf\r\nSLURM_JOB_NAME=interactive\r\nSLURM_STEP_LAUNCHER_PORT=33917\r\nSLURM_JOB_GID=502226\r\nSLURM_JOB_NODELIST=hkn0401\r\n",,terminal_output +479,954399,"TERMINAL",0,0,"GpuFreq=control_disabled\r\n",,terminal_output +480,957084,"TERMINAL",0,0,"Running on 1 devices.\r\n",,terminal_output +481,962986,"TERMINAL",0,0,"Counting all components: ['dynamics', 'lam', 'tokenizer']\r\n",,terminal_output +482,963322,"TERMINAL",0,0,"wandb: Currently logged in as: mihir-mahajan2002 (instant-uv) to https://api.wandb.ai. Use `wandb login --relogin` to force relogin\r\n",,terminal_output +483,964441,"TERMINAL",0,0,"wandb: Tracking run with wandb version 0.22.0\r\nwandb: Run data is saved locally in /hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/wandb/run-20251001_154838-ugtqwvfa\r\nwandb: Run `wandb offline` to turn off syncing.\r\nwandb: Syncing run coinrun-dyn-dev-3535390\r\nwandb: ⭐️ View project at https://wandb.ai/instant-uv/jafar\r\nwandb: 🚀 View run at https://wandb.ai/instant-uv/jafar/runs/ugtqwvfa\r\n",,terminal_output +484,964617,"TERMINAL",0,0,"Parameter counts:\r\n{'dynamics': 26555904, 'lam': 17640416, 'tokenizer': 34489696, 'total': 78686016}\r\n",,terminal_output +485,965927,"TERMINAL",0,0,"WARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/dyn/interactive/3535390/000008 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/dyn/interactive/3535390/000008) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/dyn/interactive/3535390/000006 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/dyn/interactive/3535390/000006) to end with "".orbax-checkpoint-tmp"".\r\nWARNING:absl:Path /hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/dyn/interactive/3535390/000010 could not be identified as a temporary checkpoint path using . Got error: Expected AtomicRenameTemporaryPath (/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/checkpoints/big-runs/dyn/interactive/3535390/000010) to end with "".orbax-checkpoint-tmp"".\r\n",,terminal_output +486,968961,"TERMINAL",0,0,"/hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/.venv/lib64/python3.12/site-packages/orbax/checkpoint/_src/serialization/type_handlers.py:1269: UserWarning: Sharding info not provided when restoring. Populating sharding info from sharding file. Please note restoration time will be slightly increased due to reading from file. Note also that this option is unsafe when restoring on a different topology than the checkpoint was saved with.\r\n warnings.warn(\r\n",,terminal_output +487,1039800,"TERMINAL",0,0,"Total memory size: 3.2 GB, Output size: 0.9 GB, Temp size: 2.2 GB, Argument size: 0.9 GB, Host temp size: 0.0 GB.\r\n",,terminal_output +488,1039873,"TERMINAL",0,0,"FLOPs: 5.546e+11, Bytes: 7.733e+10 (72.0 GB), Intensity: 7.2 FLOPs/byte\r\nStarting training from step 0...\r\n",,terminal_output +489,1040334,"TERMINAL",0,0,"\r\nMemstats: After params initialized:\r\n\tUsing (GB) 1.13 / 38.7 (2.919897%) on cuda:0\r\n",,terminal_output +490,1077238,"TERMINAL",0,0,"Calculating validation metrics...\r\n",,terminal_output +491,1136792,"TERMINAL",0,0,"Step 2, validation loss: 12.786288261413574\r\n",,terminal_output +492,1137108,"TERMINAL",0,0,"Saved checkpoint at step 2\r\n",,terminal_output +493,1174165,"TERMINAL",0,0,"Calculating validation metrics...\r\n",,terminal_output +494,1178565,"TERMINAL",0,0,"Step 4, validation loss: 14.277682304382324\r\n",,terminal_output +495,1178842,"TERMINAL",0,0,"Saved checkpoint at step 4\r\n",,terminal_output +496,1178986,"TERMINAL",0,0,"Calculating validation metrics...\r\n",,terminal_output +497,1183493,"TERMINAL",0,0,"Step 6, validation loss: 12.4479341506958\r\nSaved checkpoint at step 6\r\n",,terminal_output +498,1183594,"TERMINAL",0,0,"Calculating validation metrics...\r\n",,terminal_output +499,1187963,"TERMINAL",0,0,"Step 8, validation loss: 12.086461067199707\r\n",,terminal_output +500,1188029,"TERMINAL",0,0,"Saved checkpoint at step 8\r\n",,terminal_output +501,1188508,"TERMINAL",0,0,"Calculating validation metrics...\r\n",,terminal_output +502,1192895,"TERMINAL",0,0,"Step 10, validation loss: 8.801260948181152\r\n",,terminal_output +503,1192960,"TERMINAL",0,0,"Saved checkpoint at step 10\r\n",,terminal_output +504,1195809,"TERMINAL",0,0,"wandb: \r\nwandb: 🚀 View run coinrun-dyn-dev-3535390 at: https://wandb.ai/instant-uv/jafar/runs/ugtqwvfa\r\nwandb: Find logs at: ../../../../../hkfs/home/project/hk-project-p0023960/tum_cte0515/Projects/jasmine/wandb/run-20251001_154838-ugtqwvfa/logs\r\n",,terminal_output +505,1196998,"TERMINAL",0,0,"W1001 15:52:32.220639 1181077 pjrt_client.cc:1469] WatchJobStateAsync failed for task goo.gle/debugproto job_name: ""jax_worker"": UNAVAILABLE: failed to connect to all addresses; last error: UNKNOWN: ipv4:10.0.1.33:61982: Failed to connect to remote host: Connection refused\r\nAdditional GRPC error information from remote target coordination_service while calling /tensorflow.CoordinationService/WatchJobState:\r\n:UNKNOWN:Error received from peer {grpc_message:""failed to connect to all addresses; last error: UNKNOWN: ipv4:10.0.1.33:61982: Failed to connect to remote host: Connection refused"", grpc_status:14}\r\n",,terminal_output +506,1197677,"TERMINAL",0,0,"]0;tum_cte0515@hkn0401:~/Projects/jasmine[?2004h(jasmine) [tum_cte0515@hkn0401 jasmine]$ ",,terminal_output +507,1199984,"TERMINAL",0,0,"\r(reverse-i-search)`': ",,terminal_output +508,1200311,"TERMINAL",0,0,"g': sh slurm/jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh\ri': git status",,terminal_output +509,1200443,"TERMINAL",0,0,"[1@t': git",,terminal_output +510,1201279,"TERMINAL",0,0," ': git status",,terminal_output +511,1201345,"TERMINAL",0,0,"c': git checkout add-noise-to-combat-exposure-bias\r",,terminal_output +512,1201459,"TERMINAL",0,0,"o': git commit -am ""added patchsize 4 tokenizer""\r",,terminal_output +513,1202169,"TERMINAL",0,0,"\r[13@jasmine) [tum_cte0515@hkn0401 jasmine]$ git co",,terminal_output +514,1202807,"TERMINAL",0,0,"",,terminal_output +515,1202949,"TERMINAL",0,0,"",,terminal_output +516,1203135,"TERMINAL",0,0,"",,terminal_output +517,1203296,"TERMINAL",0,0,"",,terminal_output +518,1203442,"TERMINAL",0,0,"",,terminal_output +519,1203595,"TERMINAL",0,0,"",,terminal_output +520,1203885,"TERMINAL",0,0,"",,terminal_output +521,1204301,"TERMINAL",0,0,"",,terminal_output +522,1204969,"TERMINAL",0,0,"""",,terminal_output +523,1205329,"TERMINAL",0,0,"""",,terminal_output +524,1205679,"TERMINAL",0,0,"""",,terminal_output +525,1206071,"TERMINAL",0,0,"",,terminal_output +526,1206902,"TERMINAL",0,0,"W""",,terminal_output +527,1207700,"TERMINAL",0,0,"""",,terminal_output +528,1208312,"TERMINAL",0,0,"""",,terminal_output +529,1208584,"TERMINAL",0,0,"",,terminal_output +530,1209072,"TERMINAL",0,0,"",,terminal_output +531,1209592,"TERMINAL",0,0,"a""",,terminal_output +532,1209755,"TERMINAL",0,0,"d""",,terminal_output +533,1209904,"TERMINAL",0,0,"d""",,terminal_output +534,1210074,"TERMINAL",0,0,"e""",,terminal_output +535,1210160,"TERMINAL",0,0,"d"" """,,terminal_output +536,1210601,"TERMINAL",0,0,"a""",,terminal_output +537,1210751,"TERMINAL",0,0,"c""",,terminal_output +538,1210967,"TERMINAL",0,0,"t""",,terminal_output +539,1211157,"TERMINAL",0,0,"p""",,terminal_output +540,1211758,"TERMINAL",0,0,"""",,terminal_output +541,1211935,"TERMINAL",0,0,"i""",,terminal_output +542,1211998,"TERMINAL",0,0,"o""",,terminal_output +543,1212178,"TERMINAL",0,0,"n""",,terminal_output +544,1212290,"TERMINAL",0,0," """,,terminal_output +545,1212397,"TERMINAL",0,0,"e""",,terminal_output +546,1212462,"TERMINAL",0,0,"m""",,terminal_output +547,1212654,"TERMINAL",0,0,"b""",,terminal_output +548,1212714,"TERMINAL",0,0,"e""",,terminal_output +549,1212784,"TERMINAL",0,0,"d""",,terminal_output +550,1212971,"TERMINAL",0,0,"d""",,terminal_output +551,1213134,"TERMINAL",0,0,"i""",,terminal_output +552,1213197,"TERMINAL",0,0,"n""",,terminal_output +553,1213272,"TERMINAL",0,0,"g""",,terminal_output +554,1213381,"TERMINAL",0,0," """,,terminal_output +555,1214587,"TERMINAL",0,0,"p""",,terminal_output +556,1214707,"TERMINAL",0,0,"r""",,terminal_output +557,1214875,"TERMINAL",0,0,"e""",,terminal_output +558,1214939,"TERMINAL",0,0,"m""",,terminal_output +559,1215655,"TERMINAL",0,0,"""",,terminal_output +560,1215904,"TERMINAL",0,0,"p""",,terminal_output +561,1216071,"TERMINAL",0,0,"e""",,terminal_output +562,1216522,"TERMINAL",0,0,"n""",,terminal_output +563,1216868,"TERMINAL",0,0,"d""",,terminal_output +564,1217073,"TERMINAL",0,0,"i""n""",,terminal_output +565,1217225,"TERMINAL",0,0,"g""",,terminal_output +566,1217645,"TERMINAL",0,0,"",,terminal_output +567,1218259,"TERMINAL",0,0,"",,terminal_output +568,1218843,"TERMINAL",0,0," """,,terminal_output +569,1218983,"TERMINAL",0,0,"t""",,terminal_output +570,1219046,"TERMINAL",0,0,"o""",,terminal_output +571,1219110,"TERMINAL",0,0," """,,terminal_output +572,1219329,"TERMINAL",0,0,"s""",,terminal_output +573,1219750,"TERMINAL",0,0,"a""m""",,terminal_output +574,1220001,"TERMINAL",0,0,"p""l""",,terminal_output +575,1220277,"TERMINAL",0,0,"i""n""",,terminal_output +576,1220371,"TERMINAL",0,0,"g""",,terminal_output +577,1220531,"TERMINAL",0,0," """,,terminal_output +578,1220605,"TERMINAL",0,0,"c""",,terminal_output +579,1220712,"TERMINAL",0,0,"o""",,terminal_output +580,1220831,"TERMINAL",0,0,"d""",,terminal_output +581,1221228,"TERMINAL",0,0,"e""",,terminal_output +582,1221345,"TERMINAL",0,0," """,,terminal_output +583,1221494,"TERMINAL",0,0,"w""",,terminal_output +584,1221765,"TERMINAL",0,0,"""a""",,terminal_output +585,1221890,"TERMINAL",0,0,"s""",,terminal_output +586,1221954,"TERMINAL",0,0," """,,terminal_output +587,1222076,"TERMINAL",0,0,"w""",,terminal_output +588,1222182,"TERMINAL",0,0,"e""",,terminal_output +589,1222247,"TERMINAL",0,0,"l""",,terminal_output +590,1222366,"TERMINAL",0,0,"l""",,terminal_output +591,1223367,"TERMINAL",0,0,"",,terminal_output +592,1239680,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +593,1240468,"TERMINAL",0,0,"black....................................................................",,terminal_output +594,1241566,"TERMINAL",0,0,"Failed\r\n- hook id: black\r\n- files were modified by this hook\r\n\r\nreformatted jasmine/genie.py\r\n\r\nAll done! ✨ 🍰 ✨\r\n1 file reformatted.\r\n\r\n]0;tum_cte0515@hkn0401:~/Projects/jasmine[?2004h(jasmine) [tum_cte0515@hkn0401 jasmine]$ ",,terminal_output +595,1242889,"TERMINAL",0,0,"git commit -am ""added action embedding prepending to sampling code as well""",,terminal_output +596,1243041,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +597,1243488,"TERMINAL",0,0,"black....................................................................",,terminal_output +598,1243541,"TERMINAL",0,0,"Passed\r\n",,terminal_output +599,1243744,"TERMINAL",0,0,"[prepend-action-maskgit 9c917fd] added action embedding prepending to sampling code as well\r\n 1 file changed, 5 insertions(+), 2 deletions(-)\r\n]0;tum_cte0515@hkn0401:~/Projects/jasmine[?2004h(jasmine) [tum_cte0515@hkn0401 jasmine]$ ",,terminal_output +600,1244388,"TERMINAL",0,0,"g",,terminal_output +601,1244439,"TERMINAL",0,0,"i",,terminal_output +602,1244503,"TERMINAL",0,0,"t",,terminal_output +603,1244744,"TERMINAL",0,0,"p",,terminal_output +604,1244944,"TERMINAL",0,0,"u",,terminal_output +605,1245265,"TERMINAL",0,0,"",,terminal_output +606,1245449,"TERMINAL",0,0,"",,terminal_output +607,1245800,"TERMINAL",0,0," ",,terminal_output +608,1245921,"TERMINAL",0,0,"p",,terminal_output +609,1246171,"TERMINAL",0,0,"u",,terminal_output +610,1246244,"TERMINAL",0,0,"s",,terminal_output +611,1246351,"TERMINAL",0,0,"h",,terminal_output +612,1246505,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +613,1248588,"TERMINAL",0,0,"Enumerating objects: 7, done.\r\nCounting objects: 14% (1/7)\rCounting objects: 28% (2/7)\rCounting objects: 42% (3/7)\rCounting objects: 57% (4/7)\rCounting objects: 71% (5/7)\rCounting objects: 85% (6/7)\rCounting objects: 100% (7/7)\rCounting objects: 100% (7/7), done.\r\nDelta compression using up to 152 threads\r\nCompressing objects: 25% (1/4)\rCompressing objects: 50% (2/4)\rCompressing objects: 75% (3/4)\rCompressing objects: 100% (4/4)\rCompressing objects: 100% (4/4), done.\r\nWriting objects: 25% (1/4)\rWriting objects: 50% (2/4)\rWriting objects: 75% (3/4)\rWriting objects: 100% (4/4)\rWriting objects: 100% (4/4), 471 bytes | 471.00 KiB/s, done.\r\nTotal 4 (delta 3), reused 0 (delta 0), pack-reused 0\r\n",,terminal_output +614,1248696,"TERMINAL",0,0,"remote: Resolving deltas: 0% (0/3)\rremote: Resolving deltas: 33% (1/3)\rremote: Resolving deltas: 66% (2/3)\rremote: Resolving deltas: 100% (3/3)\rremote: Resolving deltas: 100% (3/3), completed with 3 local objects.\r\n",,terminal_output +615,1249022,"TERMINAL",0,0,"To github.com:p-doom/jasmine.git\r\n dd426e3..9c917fd prepend-action-maskgit -> prepend-action-maskgit\r\n",,terminal_output +616,1249155,"TERMINAL",0,0,"]0;tum_cte0515@hkn0401:~/Projects/jasmine[?2004h(jasmine) [tum_cte0515@hkn0401 jasmine]$ ",,terminal_output +617,1335206,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_16_noise-branch.sh",0,0,"",shellscript,tab +618,1389057,"TERMINAL",0,0,"bash",,terminal_focus +619,1398864,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_16_noise-branch.sh",1799,0,"",shellscript,selection_mouse +620,1402298,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_16_noise-branch.sh",1798,0,"",shellscript,selection_command +621,1402827,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_16_noise-branch.sh",1798,3,"",shellscript,content +622,1403129,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_16_noise-branch.sh",1798,1,"",shellscript,content +623,1403387,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_16_noise-branch.sh",1798,5,"",shellscript,content +624,1403757,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_16_noise-branch.sh",1798,1,"",shellscript,content +625,1404096,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_16_noise-branch.sh",1798,2,"",shellscript,content +626,1404436,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_16_noise-branch.sh",1798,1,"",shellscript,content +627,1404788,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_16_noise-branch.sh",1798,6,"",shellscript,content +628,1405153,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_16_noise-branch.sh",1798,1,"",shellscript,content +629,1405730,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_16_noise-branch.sh",1798,8,"",shellscript,content +630,1406224,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_16_noise-branch.sh",1798,1,"",shellscript,content +631,1407028,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_16_noise-branch.sh",1798,4,"",shellscript,content +632,1408035,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_16_noise-branch.sh",1798,0,"prepend-action-maskgit",shellscript,content +633,1410076,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_16_noise-branch.sh",1887,0,"",shellscript,selection_mouse +634,1410629,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_16_noise-branch.sh",1879,8,"",shellscript,content +635,1410814,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_16_noise-branch.sh",1878,1,"",shellscript,content +636,1411073,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_16_noise-branch.sh",1873,5,"",shellscript,content +637,1411349,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_16_noise-branch.sh",1873,0,"prepend-action-maskgit",shellscript,content +638,1412952,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_16_noise-branch.sh",1962,0,"",shellscript,selection_mouse +639,1418623,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_16_noise-branch.sh",405,0,"",shellscript,selection_mouse +640,1419211,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_16_noise-branch.sh",404,1,"",shellscript,content +641,1419460,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_16_noise-branch.sh",403,1,"",shellscript,content +642,1419620,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_16_noise-branch.sh",402,1,"",shellscript,content +643,1419735,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_16_noise-branch.sh",401,1,"",shellscript,content +644,1419863,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_16_noise-branch.sh",400,1,"",shellscript,content +645,1419997,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_16_noise-branch.sh",399,1,"",shellscript,content +646,1420114,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_16_noise-branch.sh",398,1,"",shellscript,content +647,1420242,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_16_noise-branch.sh",397,1,"",shellscript,content +648,1420384,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_16_noise-branch.sh",396,1,"",shellscript,content +649,1420515,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_16_noise-branch.sh",395,1,"",shellscript,content +650,1420646,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_16_noise-branch.sh",394,1,"",shellscript,content +651,1420991,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_16_noise-branch.sh",393,1,"",shellscript,content +652,1421117,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_16_noise-branch.sh",393,0,"a",shellscript,content +653,1421118,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_16_noise-branch.sh",394,0,"",shellscript,selection_keyboard +654,1421246,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_16_noise-branch.sh",394,0,"c",shellscript,content +655,1421247,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_16_noise-branch.sh",395,0,"",shellscript,selection_keyboard +656,1421465,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_16_noise-branch.sh",395,0,"t",shellscript,content +657,1421466,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_16_noise-branch.sh",396,0,"",shellscript,selection_keyboard +658,1421580,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_16_noise-branch.sh",396,0,"i",shellscript,content +659,1421581,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_16_noise-branch.sh",397,0,"",shellscript,selection_keyboard +660,1421668,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_16_noise-branch.sh",397,0,"o",shellscript,content +661,1421669,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_16_noise-branch.sh",398,0,"",shellscript,selection_keyboard +662,1421818,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_16_noise-branch.sh",398,0,"n",shellscript,content +663,1421819,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_16_noise-branch.sh",399,0,"",shellscript,selection_keyboard +664,1422329,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_16_noise-branch.sh",399,0,"_",shellscript,content +665,1422330,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_16_noise-branch.sh",400,0,"",shellscript,selection_keyboard +666,1422665,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_16_noise-branch.sh",400,0,"p",shellscript,content +667,1422666,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_16_noise-branch.sh",401,0,"",shellscript,selection_keyboard +668,1422793,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_16_noise-branch.sh",401,0,"r",shellscript,content +669,1422794,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_16_noise-branch.sh",402,0,"",shellscript,selection_keyboard +670,1422979,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_16_noise-branch.sh",402,0,"e",shellscript,content +671,1422980,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_16_noise-branch.sh",403,0,"",shellscript,selection_keyboard +672,1423038,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_16_noise-branch.sh",403,0,"p",shellscript,content +673,1423039,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_16_noise-branch.sh",404,0,"",shellscript,selection_keyboard +674,1423183,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_16_noise-branch.sh",404,0,"e",shellscript,content +675,1423184,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_16_noise-branch.sh",405,0,"",shellscript,selection_keyboard +676,1423339,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_16_noise-branch.sh",405,0,"n",shellscript,content +677,1423340,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_16_noise-branch.sh",406,0,"",shellscript,selection_keyboard +678,1423483,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_16_noise-branch.sh",406,0,"d",shellscript,content +679,1423484,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_16_noise-branch.sh",407,0,"",shellscript,selection_keyboard +680,1423886,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_16_noise-branch.sh",407,0,"_",shellscript,content +681,1423887,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_16_noise-branch.sh",408,0,"",shellscript,selection_keyboard +682,1424247,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_16_noise-branch.sh",408,0,"b",shellscript,content +683,1424249,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_16_noise-branch.sh",409,0,"",shellscript,selection_keyboard +684,1424322,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_16_noise-branch.sh",409,0,"r",shellscript,content +685,1424323,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_16_noise-branch.sh",410,0,"",shellscript,selection_keyboard +686,1424497,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_16_noise-branch.sh",410,0,"a",shellscript,content +687,1424498,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_16_noise-branch.sh",411,0,"",shellscript,selection_keyboard +688,1424579,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_16_noise-branch.sh",411,0,"n",shellscript,content +689,1424580,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_16_noise-branch.sh",412,0,"",shellscript,selection_keyboard +690,1424722,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_16_noise-branch.sh",412,0,"c",shellscript,content +691,1424723,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_16_noise-branch.sh",413,0,"",shellscript,selection_keyboard +692,1424813,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_16_noise-branch.sh",413,0,"h",shellscript,content +693,1424814,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_16_noise-branch.sh",414,0,"",shellscript,selection_keyboard +694,1427912,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_16_noise-branch.sh",1279,0,"",shellscript,selection_mouse +695,1427999,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_16_noise-branch.sh",1279,0,"prepend-action-maskgit",shellscript,content +696,1428001,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_16_noise-branch.sh",1301,0,"",shellscript,selection_keyboard +697,1429662,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_16_noise-branch.sh",1279,22,"",shellscript,content +698,1430535,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_16_noise-branch.sh",1273,6,"",shellscript,content +699,1430932,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_16_noise-branch.sh",1272,1,"",shellscript,content +700,1431309,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_16_noise-branch.sh",1267,5,"",shellscript,content +701,1431918,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_16_noise-branch.sh",1267,0,"a",shellscript,content +702,1431919,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_16_noise-branch.sh",1268,0,"",shellscript,selection_keyboard +703,1432058,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_16_noise-branch.sh",1268,0,"c",shellscript,content +704,1432059,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_16_noise-branch.sh",1269,0,"",shellscript,selection_keyboard +705,1432287,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_16_noise-branch.sh",1269,0,"t",shellscript,content +706,1432288,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_16_noise-branch.sh",1270,0,"",shellscript,selection_keyboard +707,1432348,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_16_noise-branch.sh",1270,0,"i",shellscript,content +708,1432349,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_16_noise-branch.sh",1271,0,"",shellscript,selection_keyboard +709,1432451,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_16_noise-branch.sh",1271,0,"o",shellscript,content +710,1432452,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_16_noise-branch.sh",1272,0,"",shellscript,selection_keyboard +711,1432593,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_16_noise-branch.sh",1272,0,"n",shellscript,content +712,1432593,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_16_noise-branch.sh",1273,0,"",shellscript,selection_keyboard +713,1433887,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_16_noise-branch.sh",1273,0,"_prepend_branch",shellscript,content +714,1442446,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_16_noise-branch.sh",2021,0,"\n --patch_size=16 \",shellscript,content +715,1446932,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_16_noise-branch.sh",2047,0,"",shellscript,selection_command +716,1447934,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_16_noise-branch.sh",2044,26,"",shellscript,content +717,1447948,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_16_noise-branch.sh",2048,0,"",shellscript,selection_command +718,1449977,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_16_noise-branch.sh",2026,0,"",shellscript,selection_command +719,1450414,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_16_noise-branch.sh",2048,0,"",shellscript,selection_command +720,1450575,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_16_noise-branch.sh",2026,0,"",shellscript,selection_command +721,1450702,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_16_noise-branch.sh",2048,0,"",shellscript,selection_command +722,1450832,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_16_noise-branch.sh",2026,0,"",shellscript,selection_command +723,1450907,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_16_noise-branch.sh",2048,0,"",shellscript,selection_command +724,1451074,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_16_noise-branch.sh",2026,0,"",shellscript,selection_command +725,1451117,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_16_noise-branch.sh",2048,0,"",shellscript,selection_command +726,1451277,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_16_noise-branch.sh",2026,0,"",shellscript,selection_command +727,1451329,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_16_noise-branch.sh",2048,0,"",shellscript,selection_command +728,1451498,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_16_noise-branch.sh",2026,0,"",shellscript,selection_command +729,1451552,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_16_noise-branch.sh",2048,0,"",shellscript,selection_command +730,1451790,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_16_noise-branch.sh",2066,0,"",shellscript,selection_command +731,1452144,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_16_noise-branch.sh",2048,0,"",shellscript,selection_command +732,1452326,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_16_noise-branch.sh",2026,0,"",shellscript,selection_command +733,1452487,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_16_noise-branch.sh",2048,0,"",shellscript,selection_command +734,1452611,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_16_noise-branch.sh",2026,0,"",shellscript,selection_command +735,1452700,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_16_noise-branch.sh",2048,0,"",shellscript,selection_command +736,1452834,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_16_noise-branch.sh",2026,0,"",shellscript,selection_command +737,1452903,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_16_noise-branch.sh",2048,0,"",shellscript,selection_command +738,1453063,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_16_noise-branch.sh",2026,0,"",shellscript,selection_command +739,1453161,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_16_noise-branch.sh",2048,0,"",shellscript,selection_command +740,1453293,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_16_noise-branch.sh",2026,0,"",shellscript,selection_command +741,1453367,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_16_noise-branch.sh",2048,0,"",shellscript,selection_command +742,1458848,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_16_noise-branch copy.sh",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=1\n#SBATCH --time=24:00:00\n#SBATCH --cpus-per-task=8\n#SBATCH --gres=gpu:1\n#SBATCH --output=/fast/project/HFMI_SynergyUnit/jafar_ws/logs/franz/coinrun/dynamics/%x_%j.log\n#SBATCH --error=/fast/project/HFMI_SynergyUnit/jafar_ws/logs/franz/coinrun/dynamics/%x_%j.log\n#SBATCH --job-name=dynamics_coinrun_mila_submission_patch_size_16_action_prepend_branch\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nsource .venv/bin/activate\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\ntags=""coinrun dynamics 500m_dataset mila_submission debug patch_size_16 action_prepend_branch""\n\narray_records_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/data/coinrun/array_records_500m_seed_w_increment""\ntokenizer_ckpt_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/tokenizer/tokenizer_coinrun_mila_submission_29736/""\nCHECKPOINT_DIR=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/dynamics/${job_name}/${slurm_job_id}""\nmkdir -p $CHECKPOINT_DIR\n\nenv | grep SLURM\n\n# Check if the current branch is the main branch\ncurrent_branch=$(git rev-parse --abbrev-ref HEAD)\nif [ ""$current_branch"" != ""prepend-action-maskgit"" ]; then\n echo ""This script must be run from the prepend-action-maskgit branch. Current branch is $current_branch. Exiting.""\n exit 1\nfi\n\nsrun python jasmine/train_dynamics.py \\n --patch_size=16 \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --name=""${job_name}_${slurm_job_id}"" \\n --tags ${tags} \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=""${tokenizer_ckpt_dir}"" \\n --val_data_dir=""${array_records_dir}/val"" \\n --data_dir=""${array_records_dir}/train"" &\n\nchild_pid=$!\n\nwait $child_pid\n\n",shellscript,tab +743,1460708,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_16_noise-branch.sh",0,0,"",shellscript,tab +744,1461326,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_16_noise-branch.sh",2180,0,"",shellscript,selection_mouse +745,1462275,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_16_noise-branch.sh",2044,0," --max_noise_level=0 \\n",shellscript,content +746,1462760,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_16_noise-branch.sh",2021,22,"",shellscript,content +747,1462793,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_16_noise-branch.sh",1273,15,"",shellscript,content +748,1462830,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_16_noise-branch.sh",1267,6,"",shellscript,content +749,1462853,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_16_noise-branch.sh",1267,0,"noise",shellscript,content +750,1462872,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_16_noise-branch.sh",1272,0,"-",shellscript,content +751,1462938,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_16_noise-branch.sh",1273,0,"branch",shellscript,content +752,1462980,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_16_noise-branch.sh",393,21,"",shellscript,content +753,1462984,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_16_noise-branch.sh",393,0,"noise_branch",shellscript,content +754,1463000,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_16_noise-branch.sh",1873,22,"",shellscript,content +755,1463058,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_16_noise-branch.sh",1873,0,"noise",shellscript,content +756,1463071,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_16_noise-branch.sh",1878,0,"-",shellscript,content +757,1463096,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_16_noise-branch.sh",1879,0,"exposure",shellscript,content +758,1463124,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_16_noise-branch.sh",1798,22,"",shellscript,content +759,1463165,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_16_noise-branch.sh",1798,0,"bias",shellscript,content +760,1463177,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_16_noise-branch.sh",1798,0,"-",shellscript,content +761,1463234,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_16_noise-branch.sh",1798,0,"exposure",shellscript,content +762,1463235,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_16_noise-branch.sh",1798,0,"-",shellscript,content +763,1463301,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_16_noise-branch.sh",1798,0,"combat",shellscript,content +764,1463302,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_16_noise-branch.sh",1798,0,"-",shellscript,content +765,1463358,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_16_noise-branch.sh",1798,0,"to",shellscript,content +766,1463359,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_16_noise-branch.sh",1798,0,"-",shellscript,content +767,1463390,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_16_noise-branch.sh",1798,0,"noise",shellscript,content +768,1463432,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_16_noise-branch.sh",1798,0,"-",shellscript,content +769,1463474,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_16_noise-branch.sh",1798,0,"add",shellscript,content +770,1468114,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_16_noise-branch copy.sh",0,0,"",shellscript,tab +771,1478936,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_16_action_prepend.sh",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=1\n#SBATCH --time=24:00:00\n#SBATCH --cpus-per-task=8\n#SBATCH --gres=gpu:1\n#SBATCH --output=/fast/project/HFMI_SynergyUnit/jafar_ws/logs/franz/coinrun/dynamics/%x_%j.log\n#SBATCH --error=/fast/project/HFMI_SynergyUnit/jafar_ws/logs/franz/coinrun/dynamics/%x_%j.log\n#SBATCH --job-name=dynamics_coinrun_mila_submission_patch_size_16_action_prepend_branch\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nsource .venv/bin/activate\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\ntags=""coinrun dynamics 500m_dataset mila_submission debug patch_size_16 action_prepend_branch""\n\narray_records_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/data/coinrun/array_records_500m_seed_w_increment""\ntokenizer_ckpt_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/tokenizer/tokenizer_coinrun_mila_submission_29736/""\nCHECKPOINT_DIR=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/dynamics/${job_name}/${slurm_job_id}""\nmkdir -p $CHECKPOINT_DIR\n\nenv | grep SLURM\n\n# Check if the current branch is the main branch\ncurrent_branch=$(git rev-parse --abbrev-ref HEAD)\nif [ ""$current_branch"" != ""prepend-action-maskgit"" ]; then\n echo ""This script must be run from the prepend-action-maskgit branch. Current branch is $current_branch. Exiting.""\n exit 1\nfi\n\nsrun python jasmine/train_dynamics.py \\n --patch_size=16 \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --name=""${job_name}_${slurm_job_id}"" \\n --tags ${tags} \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=""${tokenizer_ckpt_dir}"" \\n --val_data_dir=""${array_records_dir}/val"" \\n --data_dir=""${array_records_dir}/train"" &\n\nchild_pid=$!\n\nwait $child_pid\n\n",shellscript,tab +772,1494107,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_16_action_prepend copy.sh",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=1\n#SBATCH --time=24:00:00\n#SBATCH --cpus-per-task=8\n#SBATCH --gres=gpu:1\n#SBATCH --output=/fast/project/HFMI_SynergyUnit/jafar_ws/logs/franz/coinrun/dynamics/%x_%j.log\n#SBATCH --error=/fast/project/HFMI_SynergyUnit/jafar_ws/logs/franz/coinrun/dynamics/%x_%j.log\n#SBATCH --job-name=dynamics_coinrun_mila_submission_patch_size_16_action_prepend_branch\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nsource .venv/bin/activate\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\ntags=""coinrun dynamics 500m_dataset mila_submission debug patch_size_16 action_prepend_branch""\n\narray_records_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/data/coinrun/array_records_500m_seed_w_increment""\ntokenizer_ckpt_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/tokenizer/tokenizer_coinrun_mila_submission_29736/""\nCHECKPOINT_DIR=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/dynamics/${job_name}/${slurm_job_id}""\nmkdir -p $CHECKPOINT_DIR\n\nenv | grep SLURM\n\n# Check if the current branch is the main branch\ncurrent_branch=$(git rev-parse --abbrev-ref HEAD)\nif [ ""$current_branch"" != ""prepend-action-maskgit"" ]; then\n echo ""This script must be run from the prepend-action-maskgit branch. Current branch is $current_branch. Exiting.""\n exit 1\nfi\n\nsrun python jasmine/train_dynamics.py \\n --patch_size=16 \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --name=""${job_name}_${slurm_job_id}"" \\n --tags ${tags} \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=""${tokenizer_ckpt_dir}"" \\n --val_data_dir=""${array_records_dir}/val"" \\n --data_dir=""${array_records_dir}/train"" &\n\nchild_pid=$!\n\nwait $child_pid\n\n",shellscript,tab +773,1502799,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_4_action_prepend.sh",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=1\n#SBATCH --time=24:00:00\n#SBATCH --cpus-per-task=8\n#SBATCH --gres=gpu:1\n#SBATCH --output=/fast/project/HFMI_SynergyUnit/jafar_ws/logs/franz/coinrun/dynamics/%x_%j.log\n#SBATCH --error=/fast/project/HFMI_SynergyUnit/jafar_ws/logs/franz/coinrun/dynamics/%x_%j.log\n#SBATCH --job-name=dynamics_coinrun_mila_submission_patch_size_16_action_prepend_branch\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nsource .venv/bin/activate\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\ntags=""coinrun dynamics 500m_dataset mila_submission debug patch_size_16 action_prepend_branch""\n\narray_records_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/data/coinrun/array_records_500m_seed_w_increment""\ntokenizer_ckpt_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/tokenizer/tokenizer_coinrun_mila_submission_29736/""\nCHECKPOINT_DIR=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/dynamics/${job_name}/${slurm_job_id}""\nmkdir -p $CHECKPOINT_DIR\n\nenv | grep SLURM\n\n# Check if the current branch is the main branch\ncurrent_branch=$(git rev-parse --abbrev-ref HEAD)\nif [ ""$current_branch"" != ""prepend-action-maskgit"" ]; then\n echo ""This script must be run from the prepend-action-maskgit branch. Current branch is $current_branch. Exiting.""\n exit 1\nfi\n\nsrun python jasmine/train_dynamics.py \\n --patch_size=16 \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --name=""${job_name}_${slurm_job_id}"" \\n --tags ${tags} \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=""${tokenizer_ckpt_dir}"" \\n --val_data_dir=""${array_records_dir}/val"" \\n --data_dir=""${array_records_dir}/train"" &\n\nchild_pid=$!\n\nwait $child_pid\n\n",shellscript,tab +774,1503541,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_4_action_prepend.sh",956,0,"",shellscript,selection_mouse +775,1504908,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_4_action_prepend.sh",1264,1,"1",shellscript,selection_command +776,1504999,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_4_action_prepend.sh",1264,2,"16",shellscript,selection_command +777,1512793,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_4_action_prepend.sh",1264,2,"4",shellscript,content +778,1512797,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_4_action_prepend.sh",2038,2,"16",shellscript,selection_command +779,1513777,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_4_action_prepend.sh",2038,2,"4",shellscript,content +780,1513780,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_4_action_prepend.sh",390,2,"16",shellscript,selection_command +781,1514641,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_4_action_prepend.sh",390,2,"4",shellscript,content +782,1523826,"jasmine/train_dynamics.py",0,0,"import os\n\n\nos.environ.setdefault(""XLA_PYTHON_CLIENT_MEM_FRACTION"", ""0.98"")\n\nfrom dataclasses import dataclass, field\nimport itertools\nfrom typing import cast, Optional\n\nimport einops\nfrom jax.sharding import Mesh, PartitionSpec, NamedSharding\nfrom jax.experimental.mesh_utils import create_device_mesh\nimport optax\nimport orbax.checkpoint as ocp\nimport numpy as np\nimport dm_pix as pix\nimport jax\nimport jax.numpy as jnp\nimport tyro\nimport wandb\nimport grain\nimport flax.nnx as nnx\n\nfrom genie import Genie, restore_genie_components\nfrom utils.dataloader import get_dataloader\nfrom utils.train_utils import (\n get_lr_schedule,\n count_parameters_by_component,\n print_mem_stats,\n print_compiled_memory_stats,\n print_compiled_cost_analysis,\n)\n\n\n@dataclass\nclass Args:\n # Experiment\n num_steps: int = 200_000\n seed: int = 0\n seq_len: int = 16\n image_channels: int = 3\n image_height: int = 64\n image_width: int = 64\n data_dir: str = """"\n save_ckpt: bool = False\n restore_ckpt: bool = False\n # Optimization\n batch_size: int = 36\n init_lr: float = 0.0\n max_lr: float = 3e-5\n decay_end: float = 0.0\n wsd_decay_steps: int = (\n 20_000 # NOTE: wsd_decay_steps will only be used when using a wsd-schedule\n )\n warmup_steps: int = 5000\n lr_schedule: str = ""wsd"" # supported options: wsd, cos\n # Tokenizer\n tokenizer_dim: int = 512\n tokenizer_ffn_dim: int = 2048\n latent_patch_dim: int = 32\n num_patch_latents: int = 1024\n patch_size: int = 16\n tokenizer_num_blocks: int = 4\n tokenizer_num_heads: int = 8\n tokenizer_checkpoint: str = """"\n # LAM\n lam_dim: int = 512\n lam_ffn_dim: int = 2048\n latent_action_dim: int = 32\n num_actions: int = 6\n lam_patch_size: int = 16\n lam_num_blocks: int = 4\n lam_num_heads: int = 8\n lam_checkpoint: str = """"\n # Dynamics\n dyna_type: str = ""maskgit"" # supported options: maskgit, causal\n dyna_dim: int = 512\n dyna_ffn_dim: int = 2048\n dyna_num_blocks: int = 6\n dyna_num_heads: int = 8\n dropout: float = 0.0\n mask_limit: float = 0.5\n z_loss_weight: float = 0.0\n param_dtype = jnp.float32\n dtype = jnp.bfloat16\n use_flash_attention: bool = True\n use_gt_actions: bool = False\n # Logging\n log: bool = True\n entity: str = """"\n project: str = """"\n name: str = ""train_dynamics""\n tags: list[str] = field(default_factory=lambda: [""dynamics""])\n log_interval: int = 50\n log_image_interval: int = 1000\n ckpt_dir: str = """"\n log_checkpoint_interval: int = 5000\n log_checkpoint_keep_period: int = 20_000\n log_gradients: bool = False\n val_data_dir: str = """"\n val_interval: int = 20_000\n val_steps: int = 50\n eval_full_frame: bool = True\n val_maskgit_steps: int = 25\n val_temperature: float = 1\n val_sample_argmax: bool = False\n wandb_id: str = """"\n\n\ndef build_model(args: Args, rng: jax.Array) -> tuple[Genie, jax.Array]:\n rng, _rng = jax.random.split(rng)\n rngs = nnx.Rngs(_rng)\n genie = Genie(\n # Tokenizer\n in_dim=args.image_channels,\n tokenizer_dim=args.tokenizer_dim,\n tokenizer_ffn_dim=args.tokenizer_ffn_dim,\n latent_patch_dim=args.latent_patch_dim,\n num_patch_latents=args.num_patch_latents,\n patch_size=args.patch_size,\n tokenizer_num_blocks=args.tokenizer_num_blocks,\n tokenizer_num_heads=args.tokenizer_num_heads,\n # LAM\n lam_dim=args.lam_dim,\n lam_ffn_dim=args.lam_ffn_dim,\n latent_action_dim=args.latent_action_dim,\n num_actions=args.num_actions,\n lam_patch_size=args.lam_patch_size,\n lam_num_blocks=args.lam_num_blocks,\n lam_num_heads=args.lam_num_heads,\n lam_co_train=not args.lam_checkpoint,\n use_gt_actions=args.use_gt_actions,\n # Dynamics\n dyna_type=args.dyna_type,\n dyna_dim=args.dyna_dim,\n dyna_ffn_dim=args.dyna_ffn_dim,\n dyna_num_blocks=args.dyna_num_blocks,\n dyna_num_heads=args.dyna_num_heads,\n dropout=args.dropout,\n mask_limit=args.mask_limit,\n param_dtype=args.param_dtype,\n dtype=args.dtype,\n use_flash_attention=args.use_flash_attention,\n decode=False,\n rngs=rngs,\n )\n if args.use_gt_actions:\n assert (\n not args.lam_checkpoint\n ), ""Cannot use LAM when using ground-truth actions.""\n else:\n assert genie.lam is not None\n del genie.lam.decoder\n return genie, rng\n\n\ndef build_optimizer(genie: Genie, args: Args) -> nnx.ModelAndOptimizer:\n lr_schedule = get_lr_schedule(\n args.lr_schedule,\n args.init_lr,\n args.max_lr,\n args.decay_end,\n args.num_steps,\n args.warmup_steps,\n args.wsd_decay_steps,\n )\n tx = optax.adamw(\n learning_rate=lr_schedule,\n b1=0.9,\n b2=0.9,\n weight_decay=1e-4,\n mu_dtype=args.param_dtype, # moments in full precision\n )\n optimizer = nnx.ModelAndOptimizer(genie, tx)\n return optimizer\n\n\ndef build_mesh_and_sharding(\n num_devices: int,\n) -> tuple[Mesh, NamedSharding, NamedSharding, NamedSharding]:\n device_mesh_arr = create_device_mesh((num_devices,))\n mesh = Mesh(devices=device_mesh_arr, axis_names=(""data"",))\n replicated_sharding = NamedSharding(mesh, PartitionSpec())\n videos_sharding = NamedSharding(mesh, PartitionSpec(""data"", None, None, None, None))\n actions_sharding = NamedSharding(mesh, PartitionSpec(""data"", None))\n return mesh, replicated_sharding, videos_sharding, actions_sharding\n\n\ndef shard_optimizer_states(\n optimizer: nnx.ModelAndOptimizer, replicated_sharding: NamedSharding\n) -> None:\n model_state = nnx.state(optimizer.model)\n model_sharded_state = jax.lax.with_sharding_constraint(\n model_state, replicated_sharding\n )\n nnx.update(optimizer.model, model_sharded_state)\n optimizer_state = nnx.state(optimizer, nnx.optimizer.OptState)\n optimizer_sharded_state = jax.lax.with_sharding_constraint(\n optimizer_state, replicated_sharding\n )\n nnx.update(optimizer, optimizer_sharded_state)\n\n\ndef build_dataloader(args: Args, data_dir: str) -> grain.DataLoaderIterator:\n image_shape = (args.image_height, args.image_width, args.image_channels)\n array_record_files = [\n os.path.join(data_dir, x)\n for x in os.listdir(data_dir)\n if x.endswith("".array_record"")\n ]\n grain_dataloader = get_dataloader(\n array_record_files,\n args.seq_len,\n # NOTE: We deliberately pass the global batch size\n # The dataloader shards the dataset across all processes\n args.batch_size,\n *image_shape,\n num_workers=8,\n prefetch_buffer_size=1,\n seed=args.seed,\n )\n initial_state = grain_dataloader._create_initial_state()\n grain_iterator = grain.DataLoaderIterator(grain_dataloader, initial_state)\n return grain_iterator\n\n\ndef build_checkpoint_manager(args: Args) -> Optional[ocp.CheckpointManager]:\n if args.restore_ckpt or args.save_ckpt:\n handler_registry = ocp.handlers.DefaultCheckpointHandlerRegistry()\n handler_registry.add(\n ""model_state"", ocp.args.PyTreeSave, ocp.handlers.PyTreeCheckpointHandler\n )\n handler_registry.add(\n ""model_state"", ocp.args.PyTreeRestore, ocp.handlers.PyTreeCheckpointHandler\n )\n handler_registry.add(\n ""train_dataloader_state"",\n grain.checkpoint.CheckpointSave,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n handler_registry.add(\n ""train_dataloader_state"",\n grain.checkpoint.CheckpointRestore,\n cast(ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler),\n )\n if args.val_data_dir:\n handler_registry.add(\n ""val_dataloader_state"",\n grain.checkpoint.CheckpointSave,\n cast(\n ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler\n ),\n )\n handler_registry.add(\n ""val_dataloader_state"",\n grain.checkpoint.CheckpointRestore,\n cast(\n ocp.handlers.CheckpointHandler, grain.checkpoint.CheckpointHandler\n ),\n )\n checkpoint_options = ocp.CheckpointManagerOptions(\n save_interval_steps=args.log_checkpoint_interval,\n max_to_keep=3,\n keep_period=args.log_checkpoint_keep_period,\n step_format_fixed_length=6,\n cleanup_tmp_directories=True,\n )\n checkpoint_manager = ocp.CheckpointManager(\n args.ckpt_dir,\n options=checkpoint_options,\n handler_registry=handler_registry,\n )\n return checkpoint_manager\n else:\n return None\n\n\ndef restore_or_initialize_components(\n args: Args,\n checkpoint_manager: Optional[ocp.CheckpointManager],\n optimizer: nnx.ModelAndOptimizer,\n train_iterator: grain.DataLoaderIterator,\n rng: jax.Array,\n replicated_sharding: NamedSharding,\n val_iterator: Optional[grain.DataLoaderIterator],\n restore_step: Optional[int] = None,\n) -> tuple[\n int,\n nnx.ModelAndOptimizer,\n grain.DataLoaderIterator,\n grain.DataLoaderIterator,\n jax.Array,\n]:\n step = 0\n if checkpoint_manager and restore_step is None:\n restore_step = checkpoint_manager.latest_step()\n if args.restore_ckpt:\n assert checkpoint_manager is not None\n abstract_optimizer = nnx.eval_shape(lambda: optimizer)\n abstract_optimizer_state = nnx.state(abstract_optimizer)\n if val_iterator:\n restore_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore(abstract_optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointRestore(train_iterator), # type: ignore\n val_dataloader_state=grain.checkpoint.CheckpointRestore(val_iterator), # type: ignore\n )\n else:\n restore_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeRestore(abstract_optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointRestore(train_iterator), # type: ignore\n )\n restored = checkpoint_manager.restore(\n checkpoint_manager.latest_step(), args=restore_args\n )\n restored_optimizer_state = restored[""model_state""]\n nnx.update(optimizer, restored_optimizer_state)\n train_iterator = restored[""train_dataloader_state""]\n if val_iterator:\n val_iterator = restored[""val_dataloader_state""]\n step = checkpoint_manager.latest_step() or 0\n print(f""Restored dataloader and model state from step {step}"")\n else:\n # Restore from pre-trained tokenizer (and LAM)\n rng, _rng = jax.random.split(rng)\n optimizer = restore_genie_components(optimizer, replicated_sharding, _rng, args)\n return step, optimizer, train_iterator, val_iterator, rng\n\n\ndef _calculate_top_k_accuracy(\n token_logits_BTNV: jax.Array,\n video_tokens_BTN: jax.Array,\n mask_BTN: jax.Array,\n k: int,\n) -> jax.Array:\n _, topk_indices_BTNK = jax.lax.top_k(token_logits_BTNV, k)\n topk_correct = jnp.any(\n topk_indices_BTNK == video_tokens_BTN[..., jnp.newaxis], axis=-1\n )\n topk_acc = (mask_BTN * topk_correct).sum() / mask_BTN.sum()\n return topk_acc\n\n\ndef _calculate_step_metrics(\n outputs: dict[str, jax.Array],\n gt: jax.Array,\n num_actions: int,\n num_patch_latents: int,\n) -> tuple[jax.Array, dict]:\n mask_BTN = outputs[""mask""]\n outputs[""token_logits""] = outputs[""token_logits""].astype(jnp.float32)\n ce_loss = optax.softmax_cross_entropy_with_integer_labels(\n outputs[""token_logits""], outputs[""video_tokens""]\n )\n ce_loss = (mask_BTN * ce_loss).sum() / mask_BTN.sum()\n z_val = jax.nn.logsumexp(outputs[""token_logits""], axis=-1)\n z_loss_metric = (mask_BTN * (z_val**2)).sum() / mask_BTN.sum()\n\n masked_token_top_1_acc = _calculate_top_k_accuracy(\n outputs[""token_logits""], outputs[""video_tokens""], mask_BTN, 1\n )\n masked_token_top_2_acc = _calculate_top_k_accuracy(\n outputs[""token_logits""], outputs[""video_tokens""], mask_BTN, 2\n )\n masked_token_top_5_acc = _calculate_top_k_accuracy(\n outputs[""token_logits""], outputs[""video_tokens""], mask_BTN, 5\n )\n masked_token_top_16_acc = _calculate_top_k_accuracy(\n outputs[""token_logits""], outputs[""video_tokens""], mask_BTN, 16\n )\n\n select_probs = jax.nn.softmax(outputs[""token_logits""])\n gt_val = gt.clip(0, 1).reshape(-1, *gt.shape[2:])\n recon = outputs[""recon""].clip(0, 1).reshape(-1, *outputs[""recon""].shape[2:])\n psnr = jnp.asarray(pix.psnr(gt_val, recon)).mean()\n ssim = jnp.asarray(pix.ssim(gt_val, recon)).mean()\n _, index_counts_tokenizer = jnp.unique_counts(\n jnp.ravel(outputs[""video_tokens""]),\n size=num_patch_latents,\n fill_value=0,\n )\n codebook_usage_tokenizer = (index_counts_tokenizer != 0).mean()\n metrics = dict(\n cross_entropy_loss=ce_loss,\n masked_token_top1_accuracy=masked_token_top_1_acc,\n masked_token_top2_accuracy=masked_token_top_2_acc,\n masked_token_top5_accuracy=masked_token_top_5_acc,\n masked_token_top16_accuracy=masked_token_top_16_acc,\n select_logit=outputs[""token_logits""].max(-1).mean(),\n select_p=select_probs.max(-1).mean(),\n entropy=jax.scipy.special.entr(select_probs).sum(-1).mean(),\n z_loss=z_loss_metric,\n psnr=psnr,\n ssim=ssim,\n codebook_usage_tokenizer=codebook_usage_tokenizer,\n )\n if ""lam_indices"" in outputs.keys():\n _, index_counts_lam = jnp.unique_counts(\n jnp.ravel(outputs[""lam_indices""]),\n size=num_actions,\n fill_value=0,\n )\n codebook_usage_lam = (index_counts_lam != 0).mean()\n metrics[""codebook_usage_lam""] = codebook_usage_lam\n return ce_loss, metrics\n\n\ndef main(args: Args) -> None:\n jax.distributed.initialize()\n num_devices = jax.device_count()\n if num_devices == 0:\n raise ValueError(""No JAX devices found."")\n print(f""Running on {num_devices} devices."")\n\n if args.batch_size % num_devices != 0:\n raise ValueError(\n f""Global batch size {args.batch_size} must be divisible by ""\n f""number of devices {num_devices}.""\n )\n\n rng = jax.random.key(args.seed)\n\n # --- Initialize model ---\n genie, rng = build_model(args, rng)\n _, params, _ = nnx.split(genie, nnx.Param, ...)\n param_counts = count_parameters_by_component(params)\n\n if args.log and jax.process_index() == 0:\n wandb_init_kwargs = {\n ""entity"": args.entity,\n ""project"": args.project,\n ""name"": args.name,\n ""tags"": args.tags,\n ""group"": ""debug"",\n ""config"": args,\n }\n\n if args.wandb_id:\n wandb_init_kwargs.update(\n {\n ""id"": args.wandb_id,\n ""resume"": ""allow"",\n }\n )\n wandb.init(**wandb_init_kwargs)\n\n wandb.config.update({""model_param_count"": param_counts})\n\n print(""Parameter counts:"")\n print(param_counts)\n\n # --- Initialize optimizer ---\n optimizer = build_optimizer(genie, args)\n del genie\n\n # FIXME: switch to create_hybrid_device_mesh for runs spanning multiple nodes\n _, replicated_sharding, videos_sharding, actions_sharding = build_mesh_and_sharding(\n num_devices\n )\n\n shard_optimizer_states(optimizer, replicated_sharding)\n\n # --- Initialize checkpoint manager ---\n checkpoint_manager = build_checkpoint_manager(args)\n\n # --- Create DataLoaderIterator from dataloader ---\n train_iterator = build_dataloader(args, args.data_dir)\n val_iterator = None\n if args.val_data_dir:\n val_iterator = build_dataloader(args, args.val_data_dir)\n\n # --- Restore checkpoint ---\n step, optimizer, train_iterator, val_iterator, rng = (\n restore_or_initialize_components(\n args,\n checkpoint_manager,\n optimizer,\n train_iterator,\n rng,\n replicated_sharding,\n val_iterator,\n )\n )\n\n # --- Define loss and train step (close over args) ---\n def dynamics_loss_fn(\n model: Genie,\n inputs: dict,\n ) -> tuple[jax.Array, tuple[jax.Array, dict]]:\n gt = jnp.asarray(inputs[""videos""], dtype=jnp.float32) / 255.0\n inputs[""videos""] = gt.astype(args.dtype)\n outputs = model(inputs)\n ce_loss, metrics = _calculate_step_metrics(\n outputs, gt, args.num_actions, args.num_patch_latents\n )\n z_loss = metrics[""z_loss""]\n total_loss = ce_loss + args.z_loss_weight * z_loss\n metrics[""total_loss""] = total_loss\n return total_loss, (outputs[""recon""], metrics)\n\n @nnx.jit(donate_argnums=0)\n def train_step(\n optimizer: nnx.ModelAndOptimizer, inputs: dict\n ) -> tuple[jax.Array, jax.Array, dict]:\n def loss_fn(model: Genie) -> tuple[jax.Array, tuple[jax.Array, dict]]:\n model.train()\n return dynamics_loss_fn(model, inputs)\n\n (loss, (recon, metrics)), grads = nnx.value_and_grad(loss_fn, has_aux=True)(\n optimizer.model\n )\n optimizer.update(grads)\n if args.log_gradients:\n metrics[""gradients_std/""] = jax.tree.map(\n lambda x: x.std(), grads[""params""][""dynamics""]\n )\n return loss, recon, metrics\n\n @nnx.jit\n def val_step(genie: Genie, inputs: dict) -> dict:\n """"""Evaluate model and compute metrics""""""\n genie.eval()\n gt = jnp.asarray(inputs[""videos""], dtype=jnp.float32) / 255.0\n (loss, (recon, metrics)) = dynamics_loss_fn(genie, inputs)\n val_output = {""loss"": loss, ""recon"": recon, ""metrics"": metrics}\n\n # --- Evaluate full frame prediction (sampling) ---\n if args.eval_full_frame:\n inputs[""videos""] = gt.astype(args.dtype)\n tokenizer_outputs = genie.tokenizer.vq_encode(\n inputs[""videos""], training=False\n )\n tokens_full_frame = tokenizer_outputs[""indices""]\n lam_indices_E = None\n if not args.use_gt_actions:\n lam_indices_E = genie.vq_encode(inputs, training=False)\n inputs[""latent_actions""] = lam_indices_E\n inputs[""videos""] = inputs[""videos""][\n :, :-1\n ] # remove last frame for generation\n recon_full_frame, logits_full_frame = genie.sample(\n inputs,\n args.seq_len,\n args.val_temperature,\n args.val_sample_argmax,\n args.val_maskgit_steps,\n )\n # Calculate metrics for the last frame only\n step_outputs = {\n ""recon"": recon_full_frame[:, -1],\n ""token_logits"": logits_full_frame[:, -1],\n ""video_tokens"": tokens_full_frame[:, -1],\n ""mask"": jnp.ones_like(tokens_full_frame[:, -1]),\n }\n if lam_indices_E is not None:\n lam_indices_B = lam_indices_E.reshape((-1, args.seq_len - 1))[:, -1]\n step_outputs[""lam_indices""] = lam_indices_B\n\n loss_full_frame, metrics_full_frame = _calculate_step_metrics(\n step_outputs, gt[:, -1], args.num_actions, args.num_patch_latents\n )\n val_output.update(\n {\n ""loss_full_frame"": loss_full_frame,\n ""recon_full_frame"": recon_full_frame,\n ""metrics_full_frame"": metrics_full_frame,\n }\n )\n return val_output\n\n def calculate_validation_metrics(val_dataloader, genie, rng):\n step = 0\n loss_per_step = []\n metrics_per_step = []\n loss_full_frame_per_step = []\n metrics_full_frame_per_step = []\n batch = None\n recon = None\n recon_full_frame = None\n for batch in val_dataloader:\n rng, _rng_mask = jax.random.split(rng, 2)\n batch[""rng""] = _rng_mask\n val_outputs = val_step(genie, batch)\n loss_per_step.append(val_outputs[""loss""])\n metrics_per_step.append(val_outputs[""metrics""])\n recon = val_outputs[""recon""]\n if args.eval_full_frame:\n loss_full_frame_per_step.append(val_outputs[""loss_full_frame""])\n metrics_full_frame_per_step.append(val_outputs[""metrics_full_frame""])\n recon_full_frame = val_outputs[""recon_full_frame""]\n step += 1\n if step > args.val_steps:\n break\n\n if step < args.val_steps:\n print(\n f""Warning: Your validation dataset is too small to make val_steps many steps. Made {step} steps, expected {args.val_steps}""\n )\n\n val_metrics = {\n f""val_{key}"": np.mean([float(m[key]) for m in metrics_per_step])\n for key in metrics_per_step[0].keys()\n }\n val_metrics[""val_loss""] = np.mean(loss_per_step)\n if args.eval_full_frame:\n val_metrics_full_frame = {\n f""val_full_frame_{key}"": np.mean(\n [float(m[key]) for m in metrics_full_frame_per_step]\n )\n for key in metrics_full_frame_per_step[0].keys()\n }\n val_metrics.update(val_metrics_full_frame)\n val_metrics[""val_full_frame_loss""] = np.mean(loss_full_frame_per_step)\n return val_metrics, batch, recon, recon_full_frame\n\n # --- TRAIN LOOP ---\n dataloader_train = (\n {\n ""videos"": jax.make_array_from_process_local_data(\n videos_sharding, local_data=elem[""videos""]\n ),\n ""actions"": (\n jax.make_array_from_process_local_data(\n actions_sharding, elem[""actions""]\n )\n if args.use_gt_actions\n else None\n ),\n }\n for elem in train_iterator\n )\n dataloader_val = None\n if val_iterator:\n dataloader_val = (\n {\n ""videos"": jax.make_array_from_process_local_data(\n videos_sharding, elem[""videos""]\n ),\n ""actions"": (\n jax.make_array_from_process_local_data(\n actions_sharding, elem[""actions""]\n )\n if args.use_gt_actions\n else None\n ),\n }\n for elem in val_iterator\n )\n if jax.process_index() == 0:\n first_batch = next(dataloader_train)\n first_batch[""rng""] = rng # type: ignore\n compiled = train_step.lower(optimizer, first_batch).compile()\n print_compiled_memory_stats(compiled.memory_analysis())\n print_compiled_cost_analysis(compiled.cost_analysis())\n # Do not skip the first batch during training\n dataloader_train = itertools.chain([first_batch], dataloader_train)\n print(f""Starting training from step {step}..."")\n first_step = step\n while step < args.num_steps:\n for batch in dataloader_train:\n # --- Train step ---\n rng, _rng_mask = jax.random.split(rng, 2)\n batch[""rng""] = _rng_mask\n loss, recon, metrics = train_step(optimizer, batch)\n if step == first_step:\n print_mem_stats(""After params initialized"")\n step += 1\n\n # --- Validation loss ---\n val_results = {}\n if dataloader_val and step % args.val_interval == 0:\n rng, _rng_mask_val = jax.random.split(rng, 2)\n print(""Calculating validation metrics..."")\n val_metrics, val_gt_batch, val_recon, val_recon_full_frame = (\n calculate_validation_metrics(\n dataloader_val, optimizer.model, _rng_mask_val\n )\n )\n print(f""Step {step}, validation loss: {val_metrics['val_loss']}"")\n val_results = {\n ""metrics"": val_metrics,\n ""gt_batch"": val_gt_batch,\n ""recon"": val_recon,\n ""full_frame"": val_recon_full_frame,\n }\n\n # --- Logging ---\n if args.log:\n if step % args.log_interval == 0 and jax.process_index() == 0:\n log_dict = {""loss"": loss, ""step"": step, **metrics}\n if val_results:\n log_dict.update(val_results[""metrics""])\n wandb.log(log_dict)\n if step % args.log_image_interval == 0:\n gt_seq = batch[""videos""][0].astype(jnp.float32) / 255.0\n recon_seq = recon[0].clip(0, 1)\n comparison_seq = jnp.concatenate((gt_seq, recon_seq), axis=1)\n comparison_seq = einops.rearrange(\n comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if val_results:\n val_results[""gt_seq_val""] = (\n val_results[""gt_batch""][""videos""][0].astype(jnp.float32)\n / 255.0\n )\n val_results[""recon_seq_val""] = val_results[""recon""][0].clip(\n 0, 1\n )\n val_comparison_seq = jnp.concatenate(\n (val_results[""gt_seq_val""], val_results[""recon_seq_val""]),\n axis=1,\n )\n val_results[""val_comparison_seq""] = einops.rearrange(\n val_comparison_seq * 255, ""t h w c -> h (t w) c""\n )\n if args.eval_full_frame:\n val_results[""full_frame_seq_val""] = val_results[\n ""full_frame""\n ][0].clip(0, 1)\n val_results[""val_full_frame_comparison_seq""] = (\n jnp.concatenate(\n (\n val_results[""gt_seq_val""],\n val_results[""full_frame_seq_val""],\n ),\n axis=1,\n )\n )\n val_results[""val_full_frame_comparison_seq""] = (\n einops.rearrange(\n val_results[""val_full_frame_comparison_seq""] * 255,\n ""t h w c -> h (t w) c"",\n )\n )\n # NOTE: Process-dependent control flow deliberately happens\n # after indexing operation since it must not contain code\n # sections that lead to cross-accelerator communication.\n if jax.process_index() == 0:\n log_images = dict(\n image=wandb.Image(np.asarray(gt_seq[args.seq_len - 1])),\n recon=wandb.Image(np.asarray(recon_seq[args.seq_len - 1])),\n true_vs_recon=wandb.Image(\n np.asarray(comparison_seq.astype(np.uint8))\n ),\n )\n if val_results:\n log_images.update(\n dict(\n val_image=wandb.Image(\n np.asarray(\n val_results[""gt_seq_val""][args.seq_len - 1]\n )\n ),\n val_recon=wandb.Image(\n np.asarray(\n val_results[""recon_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_recon=wandb.Image(\n np.asarray(\n val_results[""val_comparison_seq""].astype(\n np.uint8\n )\n )\n ),\n )\n )\n if args.eval_full_frame:\n log_images.update(\n dict(\n val_full_frame=wandb.Image(\n np.asarray(\n val_results[""full_frame_seq_val""][\n args.seq_len - 1\n ]\n )\n ),\n val_true_vs_full_frame=wandb.Image(\n np.asarray(\n val_results[\n ""val_full_frame_comparison_seq""\n ].astype(np.uint8)\n )\n ),\n )\n )\n wandb.log(log_images)\n # --- Checkpointing ---\n if args.save_ckpt and step % args.log_checkpoint_interval == 0:\n assert checkpoint_manager is not None\n optimizer_state = nnx.state(optimizer)\n if val_iterator:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n val_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n val_iterator # type: ignore\n ),\n )\n else:\n ckpt_manager_args = ocp.args.Composite(\n model_state=ocp.args.PyTreeSave(optimizer_state), # type: ignore\n train_dataloader_state=grain.checkpoint.CheckpointSave( # type: ignore\n train_iterator # type: ignore\n ),\n )\n checkpoint_manager.save(step, args=ckpt_manager_args)\n print(f""Saved checkpoint at step {step}"")\n if step >= args.num_steps:\n break\n\n if checkpoint_manager:\n checkpoint_manager.close()\n\n\nif __name__ == ""__main__"":\n args = tyro.cli(Args)\n main(args)\n",python,tab +783,1525045,"jasmine/train_dynamics.py",1662,0,"",python,selection_mouse +784,1525046,"jasmine/train_dynamics.py",1661,0,"",python,selection_command +785,1530800,"jasmine/train_dynamics.py",1516,0,"",python,selection_mouse +786,1534081,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_16_action_prepend.sh",0,0,"",shellscript,tab +787,1535751,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_16_action_prepend.sh",2061,0,"",shellscript,selection_mouse +788,1535764,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_16_action_prepend.sh",2060,0,"",shellscript,selection_command +789,1536324,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_16_action_prepend.sh",2037,0,"",shellscript,selection_mouse +790,1537172,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_16_action_prepend.sh",2022,22,"",shellscript,content +791,1537186,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_16_action_prepend.sh",2026,0,"",shellscript,selection_command +792,1550641,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_4_noise-branch.sh",0,0,"#!/usr/bin/env bash\n\n#SBATCH --nodes=1\n#SBATCH --ntasks-per-node=1\n#SBATCH --time=24:00:00\n#SBATCH --cpus-per-task=8\n#SBATCH --gres=gpu:1\n#SBATCH --output=/fast/project/HFMI_SynergyUnit/jafar_ws/logs/franz/coinrun/dynamics/%x_%j.log\n#SBATCH --error=/fast/project/HFMI_SynergyUnit/jafar_ws/logs/franz/coinrun/dynamics/%x_%j.log\n#SBATCH --job-name=dynamics_coinrun_mila_submission_patch_size_4_noise_branch\n#SBATCH --requeue\n#SBATCH --signal=b:usr1@300 # 5 min before timeout\n\n# --- signal trap to requeue job before timeout ---\nrequeue_job() {\n echo ""[$(date)] caught sigusr1 (timeout warning), requeueing slurm job $SLURM_JOB_ID...""\n # optional: trigger checkpoint saving here\n # e.g., touch $checkpoint_dir/requeue_trigger\n scontrol requeue $SLURM_JOB_ID\n exit 0\n}\n\ntrap requeue_job sigusr1\n\n# set checkpoint flag based on restart count\nrestart_count=$(scontrol show job $SLURM_JOB_ID | grep -o 'Restarts=[0-9]*' | cut -d'=' -f2)\n\nif [ $restart_count -eq 0 ]; then\n restore_ckpt_flag=""--no-restore-ckpt""\nelse\n restore_ckpt_flag=""--restore-ckpt""\nfi\n\n\n\n# Log the sbatch script\ncat $0\n\nsource .venv/bin/activate\n\njob_name=$SLURM_JOB_NAME\nslurm_job_id=$SLURM_JOB_ID\n\ntags=""coinrun dynamics 500m_dataset mila_submission debug patch_size_4 noise-branch""\n\narray_records_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/data/coinrun/array_records_500m_seed_w_increment""\ntokenizer_ckpt_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/tokenizer/tokenizer_coinrun_500m_dataset_29490/""\nCHECKPOINT_DIR=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/dynamics/${job_name}/${slurm_job_id}""\nmkdir -p $CHECKPOINT_DIR\n\nenv | grep SLURM\n\n# Check if the current branch is the main branch\ncurrent_branch=$(git rev-parse --abbrev-ref HEAD)\nif [ ""$current_branch"" != ""add-noise-to-combat-exposure-bias"" ]; then\n echo ""This script must be run from the noise-exposure branch. Current branch is $current_branch. Exiting.""\n exit 1\nfi\n\nsrun python jasmine/train_dynamics.py \\n --patch_size=4 \\n --max_noise_level=0 \\n --save_ckpt \\n $restore_ckpt_flag \\n --wandb_id $SLURM_JOB_ID \\n --ckpt_dir $CHECKPOINT_DIR \\n --name=""${job_name}_${slurm_job_id}"" \\n --tags ${tags} \\n --entity instant-uv \\n --project jafar \\n --tokenizer_checkpoint=""${tokenizer_ckpt_dir}"" \\n --val_data_dir=""${array_records_dir}/val"" \\n --data_dir=""${array_records_dir}/train"" &\n\nchild_pid=$!\n\nwait $child_pid\n\n",shellscript,tab +793,1551894,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_4_noise-branch.sh",1461,0,"",shellscript,selection_mouse +794,1552724,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_4_noise-branch.sh",1457,0,"",shellscript,selection_mouse +795,1556453,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_4_action_prepend.sh",0,0,"",shellscript,tab +796,1559226,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_4_action_prepend.sh",1416,0,"",shellscript,selection_mouse +797,1559917,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_4_action_prepend.sh",1529,0,"\ntokenizer_ckpt_dir=""/fast/project/HFMI_SynergyUnit/jafar_ws/checkpoints/coinrun/tokenizer/tokenizer_coinrun_500m_dataset_29490/""",shellscript,content +798,1559932,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_4_action_prepend.sh",1530,0,"",shellscript,selection_command +799,1560288,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_4_action_prepend.sh",1398,0,"",shellscript,selection_command +800,1560853,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_4_action_prepend.sh",1398,132,"",shellscript,content +801,1607600,"TERMINAL",0,0,"cd slurm/",,terminal_command +802,1614550,"TERMINAL",0,0,"git add jobs/franz/berlin/coinrun/submission_debug",,terminal_command +803,1614562,"TERMINAL",0,0,"]633;C]0;tum_cte0515@hkn1990:~/Projects/jasmine/slurm",,terminal_output +804,1620694,"TERMINAL",0,0,"git status",,terminal_command +805,1620708,"TERMINAL",0,0,"]633;COn branch main\r\nYour branch is up to date with 'origin/main'.\r\n\r\nChanges to be committed:\r\n (use ""git restore --staged ..."" to unstage)\r\n\tnew file: jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_16_action_prepend.sh\r\n\tnew file: jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_4_action_prepend.sh\r\n\r\nChanges not staged for commit:\r\n (use ""git add ..."" to update what will be committed)\r\n (use ""git restore ..."" to discard changes in working directory)\r\n\tmodified: jobs/mihir/horeka/coinrun/train_dyn_single_gpu.sh\r\n\r\n]0;tum_cte0515@hkn1990:~/Projects/jasmine/slurm",,terminal_output +806,1638034,"TERMINAL",0,0,"git commit -m ""added scripts für action_prepend branch""",,terminal_command +807,1638079,"TERMINAL",0,0,"]633;C",,terminal_output +808,1638279,"TERMINAL",0,0,"[main c792f14] added scripts für action_prepend branch\r\n 2 files changed, 153 insertions(+)\r\n create mode 100644 jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_16_action_prepend.sh\r\n create mode 100644 jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_4_action_prepend.sh\r\n]0;tum_cte0515@hkn1990:~/Projects/jasmine/slurm",,terminal_output +809,1639986,"TERMINAL",0,0,"git push",,terminal_command +810,1640036,"TERMINAL",0,0,"]633;C",,terminal_output +811,1641328,"TERMINAL",0,0,"Enumerating objects: 15, done.\r\nCounting objects: 6% (1/15)\rCounting objects: 13% (2/15)\rCounting objects: 20% (3/15)\rCounting objects: 26% (4/15)\rCounting objects: 33% (5/15)\rCounting objects: 40% (6/15)\rCounting objects: 46% (7/15)\rCounting objects: 53% (8/15)\rCounting objects: 60% (9/15)\rCounting objects: 66% (10/15)\rCounting objects: 73% (11/15)\rCounting objects: 80% (12/15)\rCounting objects: 86% (13/15)\rCounting objects: 93% (14/15)\rCounting objects: 100% (15/15)\rCounting objects: 100% (15/15), done.\r\nDelta compression using up to 152 threads\r\nCompressing objects: 11% (1/9)\rCompressing objects: 22% (2/9)\rCompressing objects: 33% (3/9)\rCompressing objects: 44% (4/9)\rCompressing objects: 55% (5/9)\rCompressing objects: 66% (6/9)\rCompressing objects: 77% (7/9)\rCompressing objects: 88% (8/9)\rCompressing objects: 100% (9/9)\rCompressing objects: 100% (9/9), done.\r\nWriting objects: 11% (1/9)\rWriting objects: 22% (2/9)\rWriting objects: 33% (3/9)\rWriting objects: 44% (4/9)\rWriting objects: 55% (5/9)\rWriting objects: 66% (6/9)\rWriting objects: 77% (7/9)\rWriting objects: 100% (9/9)\rWriting objects: 100% (9/9), 1.83 KiB | 626.00 KiB/s, done.\r\nTotal 9 (delta 4), reused 0 (delta 0), pack-reused 0\r\n",,terminal_output +812,1641441,"TERMINAL",0,0,"remote: Resolving deltas: 0% (0/4)\rremote: Resolving deltas: 25% (1/4)\rremote: Resolving deltas: 50% (2/4)\rremote: Resolving deltas: 75% (3/4)\rremote: Resolving deltas: 100% (4/4)\rremote: Resolving deltas: 100% (4/4), completed with 3 local objects.\r\n",,terminal_output +813,1641553,"TERMINAL",0,0,"To github.com:p-doom/slurm.git\r\n c222ad1..c792f14 main -> main\r\n",,terminal_output +814,1641574,"TERMINAL",0,0,"]0;tum_cte0515@hkn1990:~/Projects/jasmine/slurm",,terminal_output +815,1668531,"TERMINAL",0,0,"cd ..",,terminal_command +816,1669980,"TERMINAL",0,0,"branch",,terminal_command +817,1856874,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_16_action_prepend.sh",0,0,"",shellscript,tab +818,2054051,"slurm/jobs/franz/berlin/coinrun/submission_debug/coinrun_dynamics_base_patch_size_4_action_prepend.sh",0,0,"",shellscript,tab diff --git a/927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-e9c5a28e-55ca-497d-97b5-e0c37d2af2781751878142668-2025_07_07-10.49.22.270/source.csv b/927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-e9c5a28e-55ca-497d-97b5-e0c37d2af2781751878142668-2025_07_07-10.49.22.270/source.csv new file mode 100644 index 0000000000000000000000000000000000000000..df72a34679e322599c24c4d3309c44f74a328a8e --- /dev/null +++ b/927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-e9c5a28e-55ca-497d-97b5-e0c37d2af2781751878142668-2025_07_07-10.49.22.270/source.csv @@ -0,0 +1,213 @@ +Sequence,Time,File,RangeOffset,RangeLength,Text,Language,Type +2,764,"extension-output-pdoom-org.crowd-code-#1-crowd-code",0,0,"10:49:22 AM [info] Activating crowd-code\n10:49:22 AM [info] Recording started\n10:49:22 AM [info] Initializing git provider using file system watchers...\n10:49:22 AM [info] Git repository found\n10:49:22 AM [info] Git provider initialized successfully\n10:49:22 AM [info] Initial git state: [object Object]\n",Log,tab +3,4308,"TERMINAL",0,0,"/bin/python3 /hkfs/home/project/hk-project-p0023960/tum_cte0515/.cursor-server/extensions/ms-python.python-2024.12.3-linux-x64/python_files/printEnvVariablesToFile.py /hkfs/home/project/hk-project-p0023960/tum_cte0515/.cursor-server/extensions/ms-python.python-2024.12.3-linux-x64/python_files/deactivate/bash/envVars.txt",,terminal_command +4,4359,"TERMINAL",0,0,"]633;E;2025-07-07 10:49:26 /bin/python3 /hkfs/home/project/hk-project-p0023960/tum_cte0515/.cursor-server/extensions/ms-python.python-2024.12.3-linux-x64/python_files/printEnvVariablesToFile.py /hkfs/home/project/hk-project-p0023960/tum_cte0515/.cursor-server/extensions/ms-python.python-2024.12.3-linux-x64/python_files/deactivate/bash/envVars.txt;bf94d117-cd0c-449b-b408-59a02a05b60e]633;C]0;tum_cte0515@hkn1991:/hkfs/home/project/hk-project-p0023960/tum_cte0515/.cursor-server/extensions/ms-python.python-2024.12.3-linux-x64/python_files/deactivate/bash]633;D;0",,terminal_output +5,7832,"TERMINAL",0,0,"git branch",,terminal_command +6,7884,"TERMINAL",0,0,"]633;E;2025-07-07 10:49:30 git branch;f27e7af0-1f60-464b-afac-68c0fe98f46d]633;C[?1h=\r add-wandb-name-and-tags\r\n convert-to-jax-array-in-iter\r\n dont-let-tf-see-gpu\r\n feat/explicit-image-dims\r\n fix-sampling\r\n main\r\n preprocess_video\r\n revised-dataloader\r\n* runner\r\n tmp\r\n",,terminal_output +7,7960,"TERMINAL",0,0,"\r[?1l>]0;tum_cte0515@hkn1991:~/Projects/jafar]633;D;0",,terminal_output +8,77190,"TERMINAL",0,0,"git status",,terminal_command +9,77237,"TERMINAL",0,0,"]633;E;2025-07-07 10:50:39 git status;f27e7af0-1f60-464b-afac-68c0fe98f46d]633;C",,terminal_output +10,77308,"TERMINAL",0,0,"On branch runner\r\nAll conflicts fixed but you are still merging.\r\n (use ""git commit"" to conclude merge)\r\n\r\nChanges to be committed:\r\n\tmodified: train_dynamics.py\r\n\tmodified: train_lam.py\r\n\tmodified: train_tokenizer.py\r\n\r\nChanges not staged for commit:\r\n (use ""git add ..."" to update what will be committed)\r\n (use ""git restore ..."" to discard changes in working directory)\r\n\tmodified: train_lam.py\r\n\tmodified: train_tokenizer.py\r\n\r\nUntracked files:\r\n (use ""git add ..."" to include in what will be committed)\r\n\tdata_tfrecord_duplicated/\r\n\tdata_tfrecords/\r\n\tlogs/\r\n\tread_tf_record.py\r\n\trequirements-franz.txt\r\n\tscripts_cremers/\r\n\tscripts_horeka/\r\n\tslurm-3309772.out\r\n\tslurm/\r\n\tutils/visualizer.py\r\n\r\n]0;tum_cte0515@hkn1991:~/Projects/jafar]633;D;0",,terminal_output +11,85704,"TERMINAL",0,0,"git diff train_lam.py",,terminal_command +12,85768,"TERMINAL",0,0,"]633;E;2025-07-07 10:50:47 git diff train_lam.py;f27e7af0-1f60-464b-afac-68c0fe98f46d]633;C[?1h=\rdiff --git a/train_lam.py b/train_lam.py\r\nindex 858990e..540a464 100644\r\n--- a/train_lam.py\r\n+++ b/train_lam.py\r\n@@ -59,7 +59,6 @@ class Args:\r\n log_interval: int = 5\r\n log_image_interval: int = 250\r\n ckpt_dir: str = """"\r\n- tmp_ckpt_dir: str = ""/tmp/checkpoints/""\r\n log_checkpoint_interval: int = 10000\r\n \r\n \r\n\r[?1l>]0;tum_cte0515@hkn1991:~/Projects/jafar]633;D;0",,terminal_output +13,97852,"TERMINAL",0,0,"git commit -am ""removed tmp""",,terminal_command +14,97890,"TERMINAL",0,0,"]633;E;2025-07-07 10:51:00 git commit -am ""removed tmp"";f27e7af0-1f60-464b-afac-68c0fe98f46d]633;C",,terminal_output +15,98308,"TERMINAL",0,0,"[runner 316eae6] removed tmp\r\n]0;tum_cte0515@hkn1991:~/Projects/jafar]633;D;0",,terminal_output +16,109638,"TERMINAL",0,0,"git checkout revised-dataloader",,terminal_command +17,109689,"TERMINAL",0,0,"]633;E;2025-07-07 10:51:11 git checkout revised-dataloader;f27e7af0-1f60-464b-afac-68c0fe98f46d]633;C",,terminal_output +18,110012,"TERMINAL",0,0,"Switched to branch 'revised-dataloader'\r\nYour branch is up to date with 'origin/revised-dataloader'.\r\n]0;tum_cte0515@hkn1991:~/Projects/jafar]633;D;0",,terminal_output +19,110376,"extension-output-pdoom-org.crowd-code-#1-crowd-code",0,0,"Switched from branch 'runner' to 'revised-dataloader'",Log,git_branch_checkout +20,110484,"extension-output-pdoom-org.crowd-code-#1-crowd-code",304,0,"10:51:12 AM [info] Branch checkout detected: runner -> revised-dataloader\n10:51:12 AM [info] Recording git checkout: Switched from branch 'runner' to 'revised-dataloader'\n10:51:12 AM [info] Resetting file cache due to branch checkout\n",Log,content +21,112104,"TERMINAL",0,0,"git status",,terminal_command +22,112207,"TERMINAL",0,0,"]633;E;2025-07-07 10:51:14 git status;f27e7af0-1f60-464b-afac-68c0fe98f46d]633;COn branch revised-dataloader\r\nYour branch is up to date with 'origin/revised-dataloader'.\r\n\r\nUntracked files:\r\n (use ""git add ..."" to include in what will be committed)\r\n\tdata_tfrecord_duplicated/\r\n\tdata_tfrecords/\r\n\tlogs/\r\n\tread_tf_record.py\r\n\trequirements-franz.txt\r\n\tscripts_cremers/\r\n\tscripts_horeka/\r\n\tslurm-3309772.out\r\n\tslurm/\r\n\tutils/visualizer.py\r\n\r\nnothing added to commit but untracked files present (use ""git add"" to track)\r\n]0;tum_cte0515@hkn1991:~/Projects/jafar]633;D;0",,terminal_output +23,128400,"TERMINAL",0,0,"git pull",,terminal_command +24,128448,"TERMINAL",0,0,"]633;E;2025-07-07 10:51:30 git pull;f27e7af0-1f60-464b-afac-68c0fe98f46d]633;C",,terminal_output +25,130043,"TERMINAL",0,0,"remote: Enumerating objects: 60, done.\r\nremote: Counting objects: 1% (1/54)\rremote: Counting objects: 3% (2/54)\rremote: Counting objects: 5% (3/54)\rremote: Counting objects: 7% (4/54)\rremote: Counting objects: 9% (5/54)\rremote: Counting objects: 11% (6/54)\rremote: Counting objects: 12% (7/54)\rremote: Counting objects: 14% (8/54)\rremote: Counting objects: 16% (9/54)\rremote: Counting objects: 18% (10/54)\rremote: Counting objects: 20% (11/54)\rremote: Counting objects: 22% (12/54)\rremote: Counting objects: 24% (13/54)\rremote: Counting objects: 25% (14/54)\rremote: Counting objects: 27% (15/54)\rremote: Counting objects: 29% (16/54)\rremote: Counting objects: 31% (17/54)\rremote: Counting objects: 33% (18/54)\rremote: Counting objects: 35% (19/54)\rremote: Counting objects: 37% (20/54)\rremote: Counting objects: 38% (21/54)\rremote: Counting objects: 40% (22/54)\rremote: Counting objects: 42% (23/54)\rremote: Counting objects: 44% (24/54)\rremote: Counting objects: 46% (25/54)\rremote: Counting objects: 48% (26/54)\rremote: Counting objects: 50% (27/54)\rremote: Counting objects: 51% (28/54)\rremote: Counting objects: 53% (29/54)\rremote: Counting objects: 55% (30/54)\rremote: Counting objects: 57% (31/54)\rremote: Counting objects: 59% (32/54)\rremote: Counting objects: 61% (33/54)\rremote: Counting objects: 62% (34/54)\rremote: Counting objects: 64% (35/54)\rremote: Counting objects: 66% (36/54)\rremote: Counting objects: 68% (37/54)\rremote: Counting objects: 70% (38/54)\rremote: Counting objects: 72% (39/54)\rremote: Counting objects: 74% (40/54)\rremote: Counting objects: 75% (41/54)\rremote: Counting objects: 77% (42/54)\rremote: Counting objects: 79% (43/54)\rremote: Counting objects: 81% (44/54)\rremote: Counting objects: 83% (45/54)\rremote: Counting objects: 85% (46/54)\rremote: Counting objects: 87% (47/54)\rremote: Counting objects: 88% (48/54)\rremote: Counting objects: 90% (49/54)\rremote: Counting objects: 92% (50/54)\rremote: Counting objects: 94% (51/54)\rremote: Counting objects: 96% (52/54)\rremote: Counting objects: 98% (53/54)\rremote: Counting objects: 100% (54/54)\rremote: Counting objects: 100% (54/54), done.\r\nremote: Compressing objects: 5% (1/20)\rremote: Compressing objects: 10% (2/20)\rremote: Compressing objects: 15% (3/20)\rremote: Compressing objects: 20% (4/20)\rremote: Compressing objects: 25% (5/20)\rremote: Compressing objects: 30% (6/20)\rremote: Compressing objects: 35% (7/20)\rremote: Compressing objects: 40% (8/20)\rremote: Compressing objects: 45% (9/20)\rremote: Compressing objects: 50% (10/20)\rremote: Compressing objects: 55% (11/20)\rremote: Compressing objects: 60% (12/20)\rremote: Compressing objects: 65% (13/20)\rremote: Compressing objects: 70% (14/20)\rremote: Compressing objects: 75% (15/20)\rremote: Compressing objects: 80% (16/20)\rremote: Compressing objects: 85% (17/20)\rremote: Compressing objects: 90% (18/20)\rremote: Compressing objects: 95% (19/20)\rremote: Compressing objects: 100% (20/20)\rremote: Compressing objects: 100% (20/20), done.\r\nremote: Total 36 (delta 26), reused 22 (delta 16), pack-reused 0 (from 0)\r\n",,terminal_output +26,130161,"TERMINAL",0,0,"Unpacking objects: 2% (1/36)\rUnpacking objects: 5% (2/36)\rUnpacking objects: 8% (3/36)\rUnpacking objects: 11% (4/36)\rUnpacking objects: 13% (5/36)\rUnpacking objects: 16% (6/36)\rUnpacking objects: 19% (7/36)\r",,terminal_output +27,130215,"TERMINAL",0,0,"Unpacking objects: 22% (8/36)\rUnpacking objects: 25% (9/36)\r",,terminal_output +28,130280,"TERMINAL",0,0,"Unpacking objects: 27% (10/36)\rUnpacking objects: 30% (11/36)\r",,terminal_output +29,130404,"TERMINAL",0,0,"Unpacking objects: 33% (12/36)\rUnpacking objects: 36% (13/36)\rUnpacking objects: 38% (14/36)\rUnpacking objects: 41% (15/36)\rUnpacking objects: 44% (16/36)\rUnpacking objects: 47% (17/36)\rUnpacking objects: 50% (18/36)\rUnpacking objects: 52% (19/36)\rUnpacking objects: 55% (20/36)\rUnpacking objects: 58% (21/36)\rUnpacking objects: 61% (22/36)\rUnpacking objects: 63% (23/36)\rUnpacking objects: 66% (24/36)\r",,terminal_output +30,130473,"TERMINAL",0,0,"Unpacking objects: 69% (25/36)\rUnpacking objects: 72% (26/36)\rUnpacking objects: 75% (27/36)\rUnpacking objects: 77% (28/36)\rUnpacking objects: 80% (29/36)\rUnpacking objects: 83% (30/36)\r",,terminal_output +31,130587,"TERMINAL",0,0,"Unpacking objects: 86% (31/36)\rUnpacking objects: 88% (32/36)\rUnpacking objects: 91% (33/36)\rUnpacking objects: 94% (34/36)\rUnpacking objects: 97% (35/36)\rUnpacking objects: 100% (36/36)\rUnpacking objects: 100% (36/36), 6.91 KiB | 14.00 KiB/s, done.\r\n",,terminal_output +32,130774,"TERMINAL",0,0,"From github.com:p-doom/jafar\r\n * [new branch] correct-batched-sampling -> origin/correct-batched-sampling\r\n 4ec9ebe..9edd0c1 dynamics-lam-co-training -> origin/dynamics-lam-co-training\r\n 32b3f04..3176718 feat/restore_train_state -> origin/feat/restore_train_state\r\n ae9451f..6e623c6 fix-sampling -> origin/fix-sampling\r\n c8dd7ea..9fb362e main -> origin/main\r\n * [new branch] make-warmup-default -> origin/make-warmup-default\r\n",,terminal_output +33,130837,"TERMINAL",0,0,"Already up to date.\r\n]0;tum_cte0515@hkn1991:~/Projects/jafar]633;D;0",,terminal_output +34,133144,"TERMINAL",0,0,"git status",,terminal_command +35,133186,"TERMINAL",0,0,"]633;E;2025-07-07 10:51:35 git status;f27e7af0-1f60-464b-afac-68c0fe98f46d]633;COn branch revised-dataloader\r\nYour branch is up to date with 'origin/revised-dataloader'.\r\n\r\nUntracked files:\r\n (use ""git add ..."" to include in what will be committed)\r\n\tdata_tfrecord_duplicated/\r\n\tdata_tfrecords/\r\n\tlogs/\r\n\tread_tf_record.py\r\n\trequirements-franz.txt\r\n\tscripts_cremers/\r\n\tscripts_horeka/\r\n\tslurm-3309772.out\r\n\tslurm/\r\n\tutils/visualizer.py\r\n\r\nnothing added to commit but untracked files present (use ""git add"" to track)\r\n]0;tum_cte0515@hkn1991:~/Projects/jafar]633;D;0",,terminal_output +36,148140,"utils/dataloader.py",0,0,"import functools\nimport jax\n\nimport tensorflow as tf\n\n# reserve GPU memory for JAX only if tensorflow is built with GPU support\ntf.config.experimental.set_visible_devices([], ""GPU"")\n\n\n# --- TensorFlow function for processing: slicing, normalization ---\ndef _tf_process_episode(episode_tensor, seq_len, image_h, image_w, image_c):\n """"""\n Processes a raw episode tensor in TensorFlow.\n Takes a full episode, extracts a random sequence, and normalizes it.\n Args:\n episode_tensor: A TensorFlow tensor representing a full video episode.\n Expected shape: (dynamic_length, image_h, image_w, image_c)\n Expected dtype: e.g., tf.uint8 (raw pixel values)\n seq_len: The desired length of the sub-sequence to extract.\n image_h: The height of each frame.\n image_w: The width of each frame.\n image_c: The number of channels in each frame.\n Returns:\n A TensorFlow tensor representing the processed video sequence.\n Shape: (seq_len, image_h, image_w, image_c)\n Dtype: tf.float32 (normalized pixel values)\n """"""\n current_episode_len = tf.shape(episode_tensor)[0]\n\n max_start_idx = current_episode_len - seq_len\n\n start_idx = tf.random.uniform(\n shape=(), minval=0, maxval=max_start_idx + 1, dtype=tf.int32\n )\n\n seq = episode_tensor[start_idx : start_idx + seq_len]\n\n seq = tf.cast(seq, tf.float32) / 255.0\n\n # Ensure the final shape is statically known for batching.\n # tf.reshape is robust, but tf.ensure_shape or set_shape can also be used if confident.\n processed_sequence = tf.reshape(seq, [seq_len, image_h, image_w, image_c])\n\n return processed_sequence\n\n\ndef _parse_tfrecord_fn(example_proto, image_h, image_w, image_c):\n feature_description = {\n ""height"": tf.io.FixedLenFeature([], tf.int64),\n ""width"": tf.io.FixedLenFeature([], tf.int64),\n ""channels"": tf.io.FixedLenFeature([], tf.int64),\n ""sequence_length"": tf.io.FixedLenFeature([], tf.int64),\n ""raw_video"": tf.io.FixedLenFeature([], tf.string),\n }\n example = tf.io.parse_single_example(example_proto, feature_description)\n\n video_shape = (example[""sequence_length""], image_h, image_w, image_c)\n\n episode_tensor = tf.io.decode_raw(example[""raw_video""], out_type=tf.uint8)\n episode_tensor = tf.reshape(episode_tensor, video_shape)\n\n episode_tensor = tf.ensure_shape(episode_tensor, [None, image_h, image_w, image_c])\n return episode_tensor\n\n\ndef _create_processed_dataset_from_file(file_path, image_h, image_w, image_c, seq_len, num_parallel_calls):\n """"""Creates a fully processed dataset from a single TFRecord file.""""""\n dataset = tf.data.TFRecordDataset([file_path])\n \n parse_fn = functools.partial(\n _parse_tfrecord_fn, image_h=image_h, image_w=image_w, image_c=image_c\n )\n dataset = dataset.map(parse_fn, num_parallel_calls=num_parallel_calls)\n\n tf_process_fn = functools.partial(\n _tf_process_episode,\n seq_len=seq_len,\n image_h=image_h,\n image_w=image_w,\n image_c=image_c,\n )\n dataset = dataset.map(tf_process_fn, num_parallel_calls=num_parallel_calls)\n \n return dataset\n\n\ndef get_dataloader(\n tfrecord_paths: list[str],\n seq_len: int,\n global_batch_size: int,\n image_h: int,\n image_w: int,\n image_c: int,\n shuffle_buffer_size: int = 1000,\n num_parallel_calls: int = tf.data.AUTOTUNE,\n seed: int = 42,\n cycle_length: int = 4,\n block_length: int = 1,\n):\n """"""\n Creates a tf.data.Dataset pipeline from TFRecord files.\n """"""\n if not tfrecord_paths:\n raise ValueError(""tfrecord_paths list cannot be empty."")\n\n process_id = jax.process_index()\n num_processes = jax.process_count()\n\n assert (\n global_batch_size % num_processes == 0\n ), f""Global batch size {global_batch_size} \\n must be divisible by the number of JAX processes {num_processes} for proper sharding.""\n per_process_batch_size = global_batch_size // num_processes\n\n def dataset_fn(file_path):\n return _create_processed_dataset_from_file(\n file_path, image_h, image_w, image_c, seq_len, num_parallel_calls\n )\n \n dataset = tf.data.Dataset.from_tensor_slices(tfrecord_paths)\n dataset = dataset.shard(num_shards=num_processes, index=process_id)\n \n dataset = dataset.interleave(\n dataset_fn,\n cycle_length=cycle_length,\n block_length=block_length,\n num_parallel_calls=num_parallel_calls,\n deterministic=False\n )\n \n if shuffle_buffer_size > 0:\n dataset = dataset.shuffle(\n buffer_size=shuffle_buffer_size, seed=seed, reshuffle_each_iteration=True\n )\n\n dataset = dataset.repeat(None)\n dataset = dataset.batch(per_process_batch_size, drop_remainder=True)\n dataset = dataset.prefetch(tf.data.AUTOTUNE)\n\n return dataset.as_numpy_iterator()\n",python,tab +37,153941,"utils/dataloader.py",3046,0,"",python,selection_mouse +38,153957,"utils/dataloader.py",3045,0,"",python,selection_command +39,167620,"TERMINAL",0,0,"git checkout runner",,terminal_command +40,167670,"TERMINAL",0,0,"]633;E;2025-07-07 10:52:09 git checkout runner;f27e7af0-1f60-464b-afac-68c0fe98f46d]633;C",,terminal_output +41,167734,"TERMINAL",0,0,"Switched to branch 'runner'\r\n]0;tum_cte0515@hkn1991:~/Projects/jafar]633;D;0",,terminal_output +42,169889,"utils/dataloader.py",0,0,"",python,tab +43,170107,"utils/dataloader.py",253,3946,"def _tf_process_episode(episode_tensor, seq_len, image_h, image_w, image_c, seed):\n """"""\n Processes a raw episode tensor in TensorFlow.\n Takes a full episode, extracts a random sequence, and normalizes it.\n Args:\n episode_tensor: A TensorFlow tensor representing a full video episode.\n Expected shape: (dynamic_length, image_h, image_w, image_c)\n Expected dtype: e.g., tf.uint8 (raw pixel values)\n seq_len: The desired length of the sub-sequence to extract.\n image_h: The height of each frame.\n image_w: The width of each frame.\n image_c: The number of channels in each frame.\n seed: The seed for the random number generator.\n Returns:\n A TensorFlow tensor representing the processed video sequence.\n Shape: (seq_len, image_h, image_w, image_c)\n Dtype: tf.float32 (normalized pixel values)\n """"""\n current_episode_len = tf.shape(episode_tensor)[0]\n\n max_start_idx = current_episode_len - seq_len\n\n start_idx = tf.random.uniform(\n shape=(), minval=0, maxval=max_start_idx + 1, dtype=tf.int32, seed=seed\n )\n\n seq = episode_tensor[start_idx : start_idx + seq_len]\n\n seq = tf.cast(seq, tf.float32) / 255.0\n\n # Ensure the final shape is statically known for batching.\n # tf.reshape is robust, but tf.ensure_shape or set_shape can also be used if confident.\n processed_sequence = tf.reshape(seq, [seq_len, image_h, image_w, image_c])\n\n return processed_sequence\n\n\ndef _parse_tfrecord_fn(example_proto, image_h, image_w, image_c):\n feature_description = {\n ""height"": tf.io.FixedLenFeature([], tf.int64),\n ""width"": tf.io.FixedLenFeature([], tf.int64),\n ""channels"": tf.io.FixedLenFeature([], tf.int64),\n ""sequence_length"": tf.io.FixedLenFeature([], tf.int64),\n ""raw_video"": tf.io.FixedLenFeature([], tf.string),\n }\n example = tf.io.parse_single_example(example_proto, feature_description)\n\n video_shape = (example[""sequence_length""], image_h, image_w, image_c)\n\n episode_tensor = tf.io.decode_raw(example[""raw_video""], out_type=tf.uint8)\n episode_tensor = tf.reshape(episode_tensor, video_shape)\n\n episode_tensor = tf.ensure_shape(episode_tensor, [None, image_h, image_w, image_c])\n return episode_tensor\n\n\ndef _create_processed_dataset_from_file(file_path, image_h, image_w, image_c, seq_len, num_parallel_calls, seed):\n """"""Creates a fully processed dataset from a single TFRecord file.""""""\n dataset = tf.data.TFRecordDataset([file_path])\n \n parse_fn = functools.partial(\n _parse_tfrecord_fn, image_h=image_h, image_w=image_w, image_c=image_c\n )\n dataset = dataset.map(parse_fn, num_parallel_calls=num_parallel_calls)\n\n # Filter out episodes that are too short\n def filter_short_episodes(episode_tensor):\n return tf.shape(episode_tensor)[0] >= seq_len\n \n dataset = dataset.filter(filter_short_episodes)\n\n tf_process_fn = functools.partial(\n _tf_process_episode,\n seq_len=seq_len,\n image_h=image_h,\n image_w=image_w,\n image_c=image_c,\n seed=seed,\n )\n dataset = dataset.map(tf_process_fn, num_parallel_calls=num_parallel_calls)\n \n return dataset\n\n\ndef get_dataloader(\n tfrecord_paths: list[str],\n seq_len: int,\n global_batch_size: int,\n image_h: int,\n image_w: int,\n image_c: int,\n shuffle_buffer_size: int = 10,\n num_parallel_calls: int = tf.data.AUTOTUNE,\n seed: int = 42,\n cycle_length: int = 4,\n block_length: int = 1,\n):\n """"""\n Creates a tf.data.Dataset pipeline from TFRecord files.\n """"""\n if not tfrecord_paths:\n raise ValueError(""tfrecord_paths list cannot be empty."")\n\n process_id = jax.process_index()\n num_processes = jax.process_count()\n\n assert (\n global_batch_size % num_processes == 0\n ), f""Global batch size {global_batch_size} \\n must be divisible by the number of JAX processes {num_processes} for proper sharding.""\n per_process_batch_size = global_batch_size // num_processes\n\n def dataset_fn(file_path):\n return _create_processed_dataset_from_file(\n file_path, image_h, image_w, image_c, seq_len, num_parallel_calls, seed\n",python,content +44,170389,"utils/dataloader.py",0,0,"Switched from branch 'revised-dataloader' to 'runner'",python,git_branch_checkout +45,172501,"utils/dataloader.py",3153,0,"",python,selection_mouse +46,172512,"utils/dataloader.py",3152,0,"",python,selection_command +47,172733,"utils/dataloader.py",3152,1,"n",python,selection_mouse +48,172734,"utils/dataloader.py",3152,6,"n\n ",python,selection_mouse +49,172735,"utils/dataloader.py",3135,17,"sor)[0] >= seq_le",python,selection_mouse +50,172735,"utils/dataloader.py",3079,73,"des(episode_tensor):\n return tf.shape(episode_tensor)[0] >= seq_le",python,selection_mouse +51,172751,"utils/dataloader.py",3153,0,"",python,selection_command +52,172824,"utils/dataloader.py",3027,126,"isodes that are too short\n def filter_short_episodes(episode_tensor):\n return tf.shape(episode_tensor)[0] >= seq_len",python,selection_mouse +53,172825,"utils/dataloader.py",3021,132,"out episodes that are too short\n def filter_short_episodes(episode_tensor):\n return tf.shape(episode_tensor)[0] >= seq_len",python,selection_mouse +54,172826,"utils/dataloader.py",3018,135,"er out episodes that are too short\n def filter_short_episodes(episode_tensor):\n return tf.shape(episode_tensor)[0] >= seq_len",python,selection_mouse +55,172842,"utils/dataloader.py",3014,139,"Filter out episodes that are too short\n def filter_short_episodes(episode_tensor):\n return tf.shape(episode_tensor)[0] >= seq_len",python,selection_mouse +56,172874,"utils/dataloader.py",3007,146,"\n # Filter out episodes that are too short\n def filter_short_episodes(episode_tensor):\n return tf.shape(episode_tensor)[0] >= seq_len",python,selection_mouse +57,173126,"utils/dataloader.py",2932,221," dataset = dataset.map(parse_fn, num_parallel_calls=num_parallel_calls)\n\n # Filter out episodes that are too short\n def filter_short_episodes(episode_tensor):\n return tf.shape(episode_tensor)[0] >= seq_len",python,selection_mouse +58,177555,"utils/dataloader.py",3210,0,"",python,selection_mouse +59,177618,"utils/dataloader.py",3209,0,"",python,selection_command +60,177705,"utils/dataloader.py",3209,1,")",python,selection_mouse +61,177723,"utils/dataloader.py",3210,0,"",python,selection_command +62,177807,"utils/dataloader.py",3207,3,"es)",python,selection_mouse +63,177808,"utils/dataloader.py",3158,52,"\n dataset = dataset.filter(filter_short_episodes)",python,selection_mouse +64,177815,"utils/dataloader.py",3131,79,"_tensor)[0] >= seq_len\n \n dataset = dataset.filter(filter_short_episodes)",python,selection_mouse +65,177818,"utils/dataloader.py",3127,83,"sode_tensor)[0] >= seq_len\n \n dataset = dataset.filter(filter_short_episodes)",python,selection_mouse +66,177843,"utils/dataloader.py",3124,86,"episode_tensor)[0] >= seq_len\n \n dataset = dataset.filter(filter_short_episodes)",python,selection_mouse +67,177843,"utils/dataloader.py",3123,87,"(episode_tensor)[0] >= seq_len\n \n dataset = dataset.filter(filter_short_episodes)",python,selection_mouse +68,177910,"utils/dataloader.py",3121,89,"pe(episode_tensor)[0] >= seq_len\n \n dataset = dataset.filter(filter_short_episodes)",python,selection_mouse +69,177912,"utils/dataloader.py",3119,91,"hape(episode_tensor)[0] >= seq_len\n \n dataset = dataset.filter(filter_short_episodes)",python,selection_mouse +70,177920,"utils/dataloader.py",3116,94,"f.shape(episode_tensor)[0] >= seq_len\n \n dataset = dataset.filter(filter_short_episodes)",python,selection_mouse +71,177943,"utils/dataloader.py",3114,96," tf.shape(episode_tensor)[0] >= seq_len\n \n dataset = dataset.filter(filter_short_episodes)",python,selection_mouse +72,178059,"utils/dataloader.py",3112,98,"rn tf.shape(episode_tensor)[0] >= seq_len\n \n dataset = dataset.filter(filter_short_episodes)",python,selection_mouse +73,178060,"utils/dataloader.py",3111,99,"urn tf.shape(episode_tensor)[0] >= seq_len\n \n dataset = dataset.filter(filter_short_episodes)",python,selection_mouse +74,178060,"utils/dataloader.py",3109,101,"eturn tf.shape(episode_tensor)[0] >= seq_len\n \n dataset = dataset.filter(filter_short_episodes)",python,selection_mouse +75,178062,"utils/dataloader.py",3108,102,"return tf.shape(episode_tensor)[0] >= seq_len\n \n dataset = dataset.filter(filter_short_episodes)",python,selection_mouse +76,178062,"utils/dataloader.py",3060,150," filter_short_episodes(episode_tensor):\n return tf.shape(episode_tensor)[0] >= seq_len\n \n dataset = dataset.filter(filter_short_episodes)",python,selection_mouse +77,178083,"utils/dataloader.py",3059,151,"f filter_short_episodes(episode_tensor):\n return tf.shape(episode_tensor)[0] >= seq_len\n \n dataset = dataset.filter(filter_short_episodes)",python,selection_mouse +78,178145,"utils/dataloader.py",3058,152,"ef filter_short_episodes(episode_tensor):\n return tf.shape(episode_tensor)[0] >= seq_len\n \n dataset = dataset.filter(filter_short_episodes)",python,selection_mouse +79,178216,"utils/dataloader.py",3057,153,"def filter_short_episodes(episode_tensor):\n return tf.shape(episode_tensor)[0] >= seq_len\n \n dataset = dataset.filter(filter_short_episodes)",python,selection_mouse +80,178360,"utils/dataloader.py",3056,154," def filter_short_episodes(episode_tensor):\n return tf.shape(episode_tensor)[0] >= seq_len\n \n dataset = dataset.filter(filter_short_episodes)",python,selection_mouse +81,178361,"utils/dataloader.py",3055,155," def filter_short_episodes(episode_tensor):\n return tf.shape(episode_tensor)[0] >= seq_len\n \n dataset = dataset.filter(filter_short_episodes)",python,selection_mouse +82,178504,"utils/dataloader.py",3010,200," # Filter out episodes that are too short\n def filter_short_episodes(episode_tensor):\n return tf.shape(episode_tensor)[0] >= seq_len\n \n dataset = dataset.filter(filter_short_episodes)",python,selection_mouse +83,178505,"utils/dataloader.py",3055,155," def filter_short_episodes(episode_tensor):\n return tf.shape(episode_tensor)[0] >= seq_len\n \n dataset = dataset.filter(filter_short_episodes)",python,selection_mouse +84,178505,"utils/dataloader.py",3009,201," # Filter out episodes that are too short\n def filter_short_episodes(episode_tensor):\n return tf.shape(episode_tensor)[0] >= seq_len\n \n dataset = dataset.filter(filter_short_episodes)",python,selection_mouse +85,178827,"utils/dataloader.py",3008,202," # Filter out episodes that are too short\n def filter_short_episodes(episode_tensor):\n return tf.shape(episode_tensor)[0] >= seq_len\n \n dataset = dataset.filter(filter_short_episodes)",python,selection_mouse +86,189453,"TERMINAL",0,0,"git checkout revised-dataloader",,terminal_command +87,189506,"TERMINAL",0,0,"]633;E;2025-07-07 10:52:31 git checkout revised-dataloader;f27e7af0-1f60-464b-afac-68c0fe98f46d]633;CSwitched to branch 'revised-dataloader'\r\nYour branch is up to date with 'origin/revised-dataloader'.\r\n]0;tum_cte0515@hkn1991:~/Projects/jafar]633;D;0",,terminal_output +88,190380,"",0,0,"Switched from branch 'runner' to 'revised-dataloader'",,git_branch_checkout +89,190846,"utils/dataloader.py",0,0,"import functools\nimport jax\n\nimport tensorflow as tf\n\n# reserve GPU memory for JAX only if tensorflow is built with GPU support\ntf.config.experimental.set_visible_devices([], ""GPU"")\n\n\n# --- TensorFlow function for processing: slicing, normalization ---\ndef _tf_process_episode(episode_tensor, seq_len, image_h, image_w, image_c, seed):\n """"""\n Processes a raw episode tensor in TensorFlow.\n Takes a full episode, extracts a random sequence, and normalizes it.\n Args:\n episode_tensor: A TensorFlow tensor representing a full video episode.\n Expected shape: (dynamic_length, image_h, image_w, image_c)\n Expected dtype: e.g., tf.uint8 (raw pixel values)\n seq_len: The desired length of the sub-sequence to extract.\n image_h: The height of each frame.\n image_w: The width of each frame.\n image_c: The number of channels in each frame.\n seed: The seed for the random number generator.\n Returns:\n A TensorFlow tensor representing the processed video sequence.\n Shape: (seq_len, image_h, image_w, image_c)\n Dtype: tf.float32 (normalized pixel values)\n """"""\n current_episode_len = tf.shape(episode_tensor)[0]\n\n max_start_idx = current_episode_len - seq_len\n\n start_idx = tf.random.uniform(\n shape=(), minval=0, maxval=max_start_idx + 1, dtype=tf.int32, seed=seed\n )\n\n seq = episode_tensor[start_idx : start_idx + seq_len]\n\n seq = tf.cast(seq, tf.float32) / 255.0\n\n # Ensure the final shape is statically known for batching.\n # tf.reshape is robust, but tf.ensure_shape or set_shape can also be used if confident.\n processed_sequence = tf.reshape(seq, [seq_len, image_h, image_w, image_c])\n\n return processed_sequence\n\n\ndef _parse_tfrecord_fn(example_proto, image_h, image_w, image_c):\n feature_description = {\n ""height"": tf.io.FixedLenFeature([], tf.int64),\n ""width"": tf.io.FixedLenFeature([], tf.int64),\n ""channels"": tf.io.FixedLenFeature([], tf.int64),\n ""sequence_length"": tf.io.FixedLenFeature([], tf.int64),\n ""raw_video"": tf.io.FixedLenFeature([], tf.string),\n }\n example = tf.io.parse_single_example(example_proto, feature_description)\n\n video_shape = (example[""sequence_length""], image_h, image_w, image_c)\n\n episode_tensor = tf.io.decode_raw(example[""raw_video""], out_type=tf.uint8)\n episode_tensor = tf.reshape(episode_tensor, video_shape)\n\n episode_tensor = tf.ensure_shape(episode_tensor, [None, image_h, image_w, image_c])\n return episode_tensor\n\n\ndef _create_processed_dataset_from_file(file_path, image_h, image_w, image_c, seq_len, num_parallel_calls, seed):\n """"""Creates a fully processed dataset from a single TFRecord file.""""""\n dataset = tf.data.TFRecordDataset([file_path])\n \n parse_fn = functools.partial(\n _parse_tfrecord_fn, image_h=image_h, image_w=image_w, image_c=image_c\n )\n dataset = dataset.map(parse_fn, num_parallel_calls=num_parallel_calls)\n\n # Filter out episodes that are too short\n def filter_short_episodes(episode_tensor):\n return tf.shape(episode_tensor)[0] >= seq_len\n \n dataset = dataset.filter(filter_short_episodes)\n\n tf_process_fn = functools.partial(\n _tf_process_episode,\n seq_len=seq_len,\n image_h=image_h,\n image_w=image_w,\n image_c=image_c,\n seed=seed,\n )\n dataset = dataset.map(tf_process_fn, num_parallel_calls=num_parallel_calls)\n \n return dataset\n\n\ndef get_dataloader(\n tfrecord_paths: list[str],\n seq_len: int,\n global_batch_size: int,\n image_h: int,\n image_w: int,\n image_c: int,\n shuffle_buffer_size: int = 10,\n num_parallel_calls: int = tf.data.AUTOTUNE,\n seed: int = 42,\n cycle_length: int = 4,\n block_length: int = 1,\n):\n """"""\n Creates a tf.data.Dataset pipeline from TFRecord files.\n """"""\n if not tfrecord_paths:\n raise ValueError(""tfrecord_paths list cannot be empty."")\n\n process_id = jax.process_index()\n num_processes = jax.process_count()\n\n assert (\n global_batch_size % num_processes == 0\n ), f""Global batch size {global_batch_size} \\n must be divisible by the number of JAX processes {num_processes} for proper sharding.""\n per_process_batch_size = global_batch_size // num_processes\n\n def dataset_fn(file_path):\n return _create_processed_dataset_from_file(\n file_path, image_h, image_w, image_c, seq_len, num_parallel_calls, seed\n )\n \n dataset = tf.data.Dataset.from_tensor_slices(tfrecord_paths)\n dataset = dataset.shard(num_shards=num_processes, index=process_id)\n \n dataset = dataset.interleave(\n dataset_fn,\n cycle_length=cycle_length,\n block_length=block_length,\n num_parallel_calls=num_parallel_calls,\n deterministic=False\n )\n \n if shuffle_buffer_size > 0:\n dataset = dataset.shuffle(\n buffer_size=shuffle_buffer_size, seed=seed, reshuffle_each_iteration=True\n )\n\n dataset = dataset.repeat(None)\n dataset = dataset.batch(per_process_batch_size, drop_remainder=True)\n dataset = dataset.prefetch(tf.data.AUTOTUNE)\n\n return dataset.as_numpy_iterator()\n",python,tab +90,191033,"utils/dataloader.py",253,4252,"def _tf_process_episode(episode_tensor, seq_len, image_h, image_w, image_c):\n """"""\n Processes a raw episode tensor in TensorFlow.\n Takes a full episode, extracts a random sequence, and normalizes it.\n Args:\n episode_tensor: A TensorFlow tensor representing a full video episode.\n Expected shape: (dynamic_length, image_h, image_w, image_c)\n Expected dtype: e.g., tf.uint8 (raw pixel values)\n seq_len: The desired length of the sub-sequence to extract.\n image_h: The height of each frame.\n image_w: The width of each frame.\n image_c: The number of channels in each frame.\n Returns:\n A TensorFlow tensor representing the processed video sequence.\n Shape: (seq_len, image_h, image_w, image_c)\n Dtype: tf.float32 (normalized pixel values)\n """"""\n current_episode_len = tf.shape(episode_tensor)[0]\n\n max_start_idx = current_episode_len - seq_len\n\n start_idx = tf.random.uniform(\n shape=(), minval=0, maxval=max_start_idx + 1, dtype=tf.int32\n )\n\n seq = episode_tensor[start_idx : start_idx + seq_len]\n\n seq = tf.cast(seq, tf.float32) / 255.0\n\n # Ensure the final shape is statically known for batching.\n # tf.reshape is robust, but tf.ensure_shape or set_shape can also be used if confident.\n processed_sequence = tf.reshape(seq, [seq_len, image_h, image_w, image_c])\n\n return processed_sequence\n\n\ndef _parse_tfrecord_fn(example_proto, image_h, image_w, image_c):\n feature_description = {\n ""height"": tf.io.FixedLenFeature([], tf.int64),\n ""width"": tf.io.FixedLenFeature([], tf.int64),\n ""channels"": tf.io.FixedLenFeature([], tf.int64),\n ""sequence_length"": tf.io.FixedLenFeature([], tf.int64),\n ""raw_video"": tf.io.FixedLenFeature([], tf.string),\n }\n example = tf.io.parse_single_example(example_proto, feature_description)\n\n video_shape = (example[""sequence_length""], image_h, image_w, image_c)\n\n episode_tensor = tf.io.decode_raw(example[""raw_video""], out_type=tf.uint8)\n episode_tensor = tf.reshape(episode_tensor, video_shape)\n\n episode_tensor = tf.ensure_shape(episode_tensor, [None, image_h, image_w, image_c])\n return episode_tensor\n\n\ndef _create_processed_dataset_from_file(file_path, image_h, image_w, image_c, seq_len, num_parallel_calls):\n """"""Creates a fully processed dataset from a single TFRecord file.""""""\n dataset = tf.data.TFRecordDataset([file_path])\n \n parse_fn = functools.partial(\n _parse_tfrecord_fn, image_h=image_h, image_w=image_w, image_c=image_c\n )\n dataset = dataset.map(parse_fn, num_parallel_calls=num_parallel_calls)\n\n tf_process_fn = functools.partial(\n _tf_process_episode,\n seq_len=seq_len,\n image_h=image_h,\n image_w=image_w,\n image_c=image_c,\n )\n dataset = dataset.map(tf_process_fn, num_parallel_calls=num_parallel_calls)\n \n return dataset\n\n\ndef get_dataloader(\n tfrecord_paths: list[str],\n seq_len: int,\n global_batch_size: int,\n image_h: int,\n image_w: int,\n image_c: int,\n shuffle_buffer_size: int = 1000,\n num_parallel_calls: int = tf.data.AUTOTUNE,\n seed: int = 42,\n cycle_length: int = 4,\n block_length: int = 1,\n):\n """"""\n Creates a tf.data.Dataset pipeline from TFRecord files.\n """"""\n if not tfrecord_paths:\n raise ValueError(""tfrecord_paths list cannot be empty."")\n\n process_id = jax.process_index()\n num_processes = jax.process_count()\n\n assert (\n global_batch_size % num_processes == 0\n ), f""Global batch size {global_batch_size} \\n must be divisible by the number of JAX processes {num_processes} for proper sharding.""\n per_process_batch_size = global_batch_size // num_processes\n\n def dataset_fn(file_path):\n return _create_processed_dataset_from_file(\n file_path, image_h, image_w, image_c, seq_len, num_parallel_calls\n",python,content +91,192106,"utils/dataloader.py",2928,0,"",python,selection_mouse +92,193410,"utils/dataloader.py",2928,0,"\n",python,content +93,193787,"utils/dataloader.py",2929,0," # Filter out episodes that are too short\n def filter_short_episodes(episode_tensor):\n return tf.shape(episode_tensor)[0] >= seq_len\n \n dataset = dataset.filter(filter_short_episodes)",python,content +94,194713,"utils/dataloader.py",3131,0,"\n ",python,content +95,195064,"utils/dataloader.py",3132,4,"",python,content +96,195607,"utils/dataloader.py",3080,0,"",python,selection_command +97,195836,"utils/dataloader.py",3075,0,"",python,selection_command +98,195995,"utils/dataloader.py",3021,0,"",python,selection_command +99,196145,"utils/dataloader.py",2974,0,"",python,selection_command +100,196311,"utils/dataloader.py",2929,0,"",python,selection_command +101,202514,"TERMINAL",0,0,"git status",,terminal_command +102,202573,"TERMINAL",0,0,"]633;E;2025-07-07 10:52:44 git status;f27e7af0-1f60-464b-afac-68c0fe98f46d]633;COn branch revised-dataloader\r\nYour branch is up to date with 'origin/revised-dataloader'.\r\n\r\nChanges not staged for commit:\r\n (use ""git add ..."" to update what will be committed)\r\n (use ""git restore ..."" to discard changes in working directory)\r\n\tmodified: utils/dataloader.py\r\n\r\nUntracked files:\r\n (use ""git add ..."" to include in what will be committed)\r\n\tdata_tfrecord_duplicated/\r\n\tdata_tfrecords/\r\n\tlogs/\r\n\tread_tf_record.py\r\n\trequirements-franz.txt\r\n\tscripts_cremers/\r\n\tscripts_horeka/\r\n\tslurm-3309772.out\r\n\tslurm/\r\n\tutils/visualizer.py\r\n\r\nno changes added to commit (use ""git add"" and/or ""git commit -a"")\r\n]0;tum_cte0515@hkn1991:~/Projects/jafar]633;D;0",,terminal_output +103,206815,"TERMINAL",0,0,"git add utils/dataloader.py",,terminal_command +104,206843,"TERMINAL",0,0,"]633;E;2025-07-07 10:52:49 git add utils/dataloader.py ;f27e7af0-1f60-464b-afac-68c0fe98f46d]633;C]0;tum_cte0515@hkn1991:~/Projects/jafar]633;D;0",,terminal_output +105,217694,"TERMINAL",0,0,"git commit -m ""added filter for too short episodes in dataloader""",,terminal_command +106,217741,"TERMINAL",0,0,"]633;E;2025-07-07 10:52:59 git commit -m ""added filter for too short episodes in dataloader"";f27e7af0-1f60-464b-afac-68c0fe98f46d]633;C",,terminal_output +107,217921,"TERMINAL",0,0,"[revised-dataloader 1e306ff] added filter for too short episodes in dataloader\r\n 1 file changed, 6 insertions(+)\r\n]0;tum_cte0515@hkn1991:~/Projects/jafar]633;D;0",,terminal_output +108,219165,"TERMINAL",0,0,"git push",,terminal_command +109,219215,"TERMINAL",0,0,"]633;E;2025-07-07 10:53:01 git push;f27e7af0-1f60-464b-afac-68c0fe98f46d]633;C",,terminal_output +110,220680,"TERMINAL",0,0,"Enumerating objects: 7, done.\r\nCounting objects: 14% (1/7)\rCounting objects: 28% (2/7)\rCounting objects: 42% (3/7)\rCounting objects: 57% (4/7)\rCounting objects: 71% (5/7)\rCounting objects: 85% (6/7)\rCounting objects: 100% (7/7)\rCounting objects: 100% (7/7), done.\r\nDelta compression using up to 152 threads\r\nCompressing objects: 25% (1/4)\rCompressing objects: 50% (2/4)\rCompressing objects: 75% (3/4)\rCompressing objects: 100% (4/4)\rCompressing objects: 100% (4/4), done.\r\nWriting objects: 25% (1/4)\rWriting objects: 50% (2/4)\rWriting objects: 75% (3/4)\rWriting objects: 100% (4/4)\rWriting objects: 100% (4/4), 489 bytes | 244.00 KiB/s, done.\r\nTotal 4 (delta 3), reused 0 (delta 0), pack-reused 0\r\nremote: Resolving deltas: 0% (0/3)\rremote: Resolving deltas: 33% (1/3)\rremote: Resolving deltas: 66% (2/3)\rremote: Resolving deltas: 100% (3/3)\rremote: Resolving deltas: 100% (3/3), completed with 3 local objects.\r\n",,terminal_output +111,220984,"TERMINAL",0,0,"To github.com:p-doom/jafar.git\r\n 1eac634..1e306ff revised-dataloader -> revised-dataloader\r\n]0;tum_cte0515@hkn1991:~/Projects/jafar]633;D;0",,terminal_output +112,704052,"TERMINAL",0,0,"queue",,terminal_command +113,704136,"TERMINAL",0,0,"]633;E;2025-07-07 11:01:06 queue;f27e7af0-1f60-464b-afac-68c0fe98f46d]633;C[?1049h(B[?7hEvery 1.0s: squeue --mehkn1991.localdomain: Mon Jul 7 11:01:06 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3320180 accelerat train_la tum_cte0 CG\t0:00\t 1 hkn0405",,terminal_output +114,705241,"TERMINAL",0,0,"7\t ",,terminal_output +115,706271,"TERMINAL",0,0,"8\t ",,terminal_output +116,707322,"TERMINAL",0,0,"9\t ",,terminal_output +117,708343,"TERMINAL",0,0,"10\t ",,terminal_output +118,708459,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn1991:~/Projects/jafar]633;D;0",,terminal_output +119,722511,"TERMINAL",0,0,"cd $ws_dir",,terminal_command +120,724566,"TERMINAL",0,0,"cd ..",,terminal_command +121,726017,"TERMINAL",0,0,"cd logs/",,terminal_command +122,726381,"TERMINAL",0,0,"ls",,terminal_command +123,726418,"TERMINAL",0,0,"]633;E;2025-07-07 11:01:28 ls;f27e7af0-1f60-464b-afac-68c0fe98f46d]633;C3306965 logs_alfred logs_franz logs_mihir\r\n]0;tum_cte0515@hkn1991:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs]633;D;0",,terminal_output +124,728472,"TERMINAL",0,0,"cd logs_mihir/",,terminal_command +125,728753,"TERMINAL",0,0,"ls",,terminal_command +126,728804,"TERMINAL",0,0,"]633;E;2025-07-07 11:01:30 ls;f27e7af0-1f60-464b-afac-68c0fe98f46d]633;C",,terminal_output +127,729035,"TERMINAL",0,0,"train_lam_action_space_scaling_10_3320179.log train_lam_action_space_scaling_6_3318549.log train_lam_model_size_scaling_38M_3317231.log train_tokenizer_batch_size_scaling_8_node_3320176.log train_tokenizer_model_size_scaling_200M_3313563.log train_tokenizer_model_size_scaling_37M_3316022.log train_tokenizer_model_size_scaling_80M_3313564.log\r\ntrain_lam_action_space_scaling_10_3321529.log train_lam_action_space_scaling_6_3320178.log train_tokenizer_batch_size_scaling_16_node_3321526.log train_tokenizer_batch_size_scaling_8_node_3321525.log train_tokenizer_model_size_scaling_200M_3316020.log train_tokenizer_model_size_scaling_37M_3317232.log train_tokenizer_model_size_scaling_80M_3316026.log\r\ntrain_lam_action_space_scaling_12_3318546.log train_lam_action_space_scaling_6_3321528.log train_tokenizer_batch_size_scaling_1_node_3318551.log train_tokenizer_minecraft_overfit_sample_3309656.log train_tokenizer_model_size_scaling_227M_3317234.log train_tokenizer_model_size_scaling_37M_3317239.log\r\ntrain_lam_action_space_scaling_12_3320177.log train_lam_action_space_scaling_8_3318550.log train_tokenizer_batch_size_scaling_2_node_3318552.log train_tokenizer_model_size_scaling_127M_3317233.log train_tokenizer_model_size_scaling_227M_3318555.log train_tokenizer_model_size_scaling_37M_3318556.log\r\ntrain_lam_action_space_scaling_12_3321527.log train_lam_minecraft_overfit_sample_3309655.log train_tokenizer_batch_size_scaling_4_node_3318553.log train_tokenizer_model_size_scaling_127M_3318554.log train_tokenizer_model_size_scaling_227M_3320173.log train_tokenizer_model_size_scaling_74M_3318557.log\r\ntrain_lam_action_space_scaling_20_3318547.log train_lam_model_size_scaling_38M_3317098.log train_tokenizer_batch_size_scaling_4_node_3320175.log train_tokenizer_model_size_scaling_140M_3313562.log train_tokenizer_model_size_scaling_227M_3321523.log train_tokenizer_model_size_scaling_74M_3320174.log\r\ntrain_lam_action_space_scaling_50_3320180.log train_lam_model_size_scaling_38M_3317115.log train_tokenizer_batch_size_scaling_4_node_3321524.log train_tokenizer_model_size_scaling_140M_3316019.log train_tokenizer_model_size_scaling_37M_3313565.log train_tokenizer_model_size_scaling_74M_3321522.log\r\n]0;tum_cte0515@hkn1991:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir]633;D;0",,terminal_output +128,756754,"TERMINAL",0,0,"echo $(pwd)/train_tokenizer_batch_size_scaling_16_node_3321526.log",,terminal_command +129,756798,"TERMINAL",0,0,"]633;E;2025-07-07 11:01:58 echo $(pwd)/train_tokenizer_batch_size_scaling_16_node_3321526.log;f27e7af0-1f60-464b-afac-68c0fe98f46d]633;C/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir/train_tokenizer_batch_size_scaling_16_node_3321526.log\r\n]0;tum_cte0515@hkn1991:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir]633;D;0",,terminal_output +130,1152416,"utils/dataloader.py",0,0,"",python,tab +131,1177005,"TERMINAL",0,0,"git checkout runner",,terminal_command +132,1177092,"TERMINAL",0,0,"]633;E;2025-07-07 11:08:59 git checkout runner;f27e7af0-1f60-464b-afac-68c0fe98f46d]633;Cfatal: not a git repository (or any parent up to mount point /hkfs)\r\nStopping at filesystem boundary (GIT_DISCOVERY_ACROSS_FILESYSTEM not set).\r\n]0;tum_cte0515@hkn1991:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir]633;D;128",,terminal_output +133,1190795,"utils/dataloader.py",0,0,"",python,tab +134,1384534,"utils/dataloader.py",4648,0,"",python,selection_mouse +135,1384551,"utils/dataloader.py",4647,0,"",python,selection_command +136,1388072,"TERMINAL",0,0,"queue",,terminal_command +137,1388130,"TERMINAL",0,0,"]633;E;2025-07-07 11:12:30 queue;f27e7af0-1f60-464b-afac-68c0fe98f46d]633;C[?1049h(B[?7hEvery 1.0s: squeue --mehkn1991.localdomain: Mon Jul 7 11:12:30 2025JOBID PARTITION NAME USER ST\tTIME NODES NODELIST(REASON)3320180 accelerat train_la tum_cte0 CG\t0:00\t 1 hkn0405",,terminal_output +138,1389126,"TERMINAL",0,0,"1\t ",,terminal_output +139,1390163,"TERMINAL",0,0,"[?1049l\r[?1l>]0;tum_cte0515@hkn1991:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir]633;D;0",,terminal_output +140,1393075,"TERMINAL",0,0,"idling",,terminal_command +141,1393136,"TERMINAL",0,0,"]633;E;2025-07-07 11:12:35 idling;f27e7af0-1f60-464b-afac-68c0fe98f46d]633;C[?1049h(B[?7hEvery 1.0s: sinfo_t_idlehkn1991.localdomain: Mon Jul 7 11:12:35 2025Partition dev_cpuonly: 11 nodes idle\rPartition cpuonly: 325 nodes idle\rPartition dev_accelerated:\t 3 nodes idle\rPartition accelerated: 39 nodes idle\rPartition dev_accelerated-h100 :\t 0 nodes idle\rPartition accelerated-h100:\t 1 nodes idle\rPartition large:\t 7 nodes idle",,terminal_output +142,1394285,"TERMINAL",0,0,"6\t [?1049l\r[?1l>]0;tum_cte0515@hkn1991:/hkfs/work/workspace/scratch/tum_ind3695-jafa_ws_shared/logs/logs_mihir]633;D;0",,terminal_output +143,1397165,"TERMINAL",0,0,"cd",,terminal_command +144,1397180,"TERMINAL",0,0,"]633;E;2025-07-07 11:12:39 cd;f27e7af0-1f60-464b-afac-68c0fe98f46d]633;C]0;tum_cte0515@hkn1991:~]633;D;0]633;P;Cwd=/home/hk-project-p0023960/tum_cte0515",,terminal_output +145,1404501,"TERMINAL",0,0,"cd Projects/jafar",,terminal_command +146,1445110,"TERMINAL",0,0,"salloc --time=10:00:00 --partition=accelerated --nodes=1 --ntasks-per-node=1 --gres=gpu:1 --cpus-per-task=5",,terminal_command +147,1445175,"TERMINAL",0,0,"]633;E;2025-07-07 11:13:27 salloc --time=10:00:00 --partition=accelerated --nodes=1 --ntasks-per-node=1 --gres=gpu:1 --cpus-per-task=5 ;f27e7af0-1f60-464b-afac-68c0fe98f46d]633;Csalloc: Granted job allocation 3326035\r\n",,terminal_output +148,1445309,"TERMINAL",0,0,"salloc: Waiting for resource configuration\r\n",,terminal_output +149,1472365,"TERMINAL",0,0,"salloc: Nodes hkn0734 are ready for job\r\n",,terminal_output +150,1473217,"TERMINAL",0,0,"]0;tum_cte0515@hkn0734:~/Projects/jafar[?2004h[tum_cte0515@hkn0734 jafar]$ ",,terminal_output +151,1699781,"TERMINAL",0,0,"s",,terminal_output +152,1699852,"TERMINAL",0,0,"[?25lo[?25h",,terminal_output +153,1699960,"TERMINAL",0,0,"[?25lu[?25h",,terminal_output +154,1700026,"TERMINAL",0,0,"[?25lr[?25h",,terminal_output +155,1700174,"TERMINAL",0,0,"[?25lc[?25h",,terminal_output +156,1700311,"TERMINAL",0,0,"[?25le[?25h",,terminal_output +157,1700451,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +158,1700517,"TERMINAL",0,0,"[?25l.[?25h",,terminal_output +159,1700959,"TERMINAL",0,0,"[?25lv[?25h[?25le[?25h",,terminal_output +160,1701278,"TERMINAL",0,0,"nv/",,terminal_output +161,1701883,"TERMINAL",0,0,"[?25lb[?25h",,terminal_output +162,1702072,"TERMINAL",0,0,"in/",,terminal_output +163,1702317,"TERMINAL",0,0,"[?25la[?25h",,terminal_output +164,1702396,"TERMINAL",0,0,"[?25lc[?25h",,terminal_output +165,1702648,"TERMINAL",0,0,"tivate",,terminal_output +166,1702996,"TERMINAL",0,0,"\r\n[?2004l\r]0;tum_cte0515@hkn0734:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0734 jafar]$ ",,terminal_output +167,1705224,"TERMINAL",0,0,"[?25lgi[?25h[?25li[?25h",,terminal_output +168,1705388,"TERMINAL",0,0,"[?25lt[?25h",,terminal_output +169,1705441,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +170,1705641,"TERMINAL",0,0,"[?25le[?25h",,terminal_output +171,1707051,"TERMINAL",0,0,"[?25lc[?25h",,terminal_output +172,1707116,"TERMINAL",0,0,"[?25lh[?25h",,terminal_output +173,1707261,"TERMINAL",0,0,"[?25le[?25h",,terminal_output +174,1707413,"TERMINAL",0,0,"[?25lc[?25h[?25lk[?25h",,terminal_output +175,1707656,"TERMINAL",0,0,"[?25lo[?25h",,terminal_output +176,1707736,"TERMINAL",0,0,"[?25lu[?25h",,terminal_output +177,1707856,"TERMINAL",0,0,"[?25lt[?25h",,terminal_output +178,1707922,"TERMINAL",0,0,"[?25l [?25h",,terminal_output +179,1708772,"TERMINAL",0,0,"[?25lf[?25h",,terminal_output +180,1708895,"TERMINAL",0,0,"[?25li[?25h",,terminal_output +181,1709131,"TERMINAL",0,0,"[?25lx[?25h",,terminal_output +182,1709618,"TERMINAL",0,0,"[?25l-[?25h",,terminal_output +183,1709774,"TERMINAL",0,0,"[?25ls[?25h",,terminal_output +184,1709934,"TERMINAL",0,0,"[?25la[?25h",,terminal_output +185,1710041,"TERMINAL",0,0,"[?25lm[?25h",,terminal_output +186,1710274,"TERMINAL",0,0,"[?25lp[?25h[?25ll[?25h",,terminal_output +187,1710539,"TERMINAL",0,0,"[?25li[?25h[?25ln[?25h",,terminal_output +188,1710608,"TERMINAL",0,0,"[?25lg[?25h",,terminal_output +189,1710830,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +190,1711585,"TERMINAL",0,0,"Switched to branch 'fix-sampling'\r\nYour branch is behind 'origin/fix-sampling' by 2 commits, and can be fast-forwarded.\r\n (use ""git pull"" to update your local branch)\r\n]0;tum_cte0515@hkn0734:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0734 jafar]$ ",,terminal_output +191,1712699,"TERMINAL",0,0,"g",,terminal_output +192,1712812,"TERMINAL",0,0,"[?25li[?25h",,terminal_output +193,1712904,"TERMINAL",0,0,"[?25lt[?25h[?25l [?25h",,terminal_output +194,1713002,"TERMINAL",0,0,"[?25lp[?25h",,terminal_output +195,1713211,"TERMINAL",0,0,"[?25lu[?25h",,terminal_output +196,1713391,"TERMINAL",0,0,"[?25ll[?25h",,terminal_output +197,1713523,"TERMINAL",0,0,"[?25ll[?25h",,terminal_output +198,1713632,"TERMINAL",0,0,"\r\n[?2004l\r",,terminal_output +199,1715291,"TERMINAL",0,0,"Updating ae9451f..6e623c6\r\nFast-forward\r\n genie.py | 16 ++++++++--------\r\n 1 file changed, 8 insertions(+), 8 deletions(-)\r\n]0;tum_cte0515@hkn0734:~/Projects/jafar[?2004h(jafar) [tum_cte0515@hkn0734 jafar]$ ",,terminal_output +200,1715555,"",0,0,"Switched from branch 'revised-dataloader' to 'fix-sampling'",,git_branch_checkout +201,1722688,"genie.py",0,0,"from typing import Dict, Any\n\nimport optax\nimport jax\nimport jax.numpy as jnp\nimport flax.linen as nn\nfrom jax import NamedSharding\nfrom flax.training.train_state import TrainState\nfrom flax.training import orbax_utils\nfrom orbax.checkpoint import PyTreeCheckpointer\n\nfrom models.dynamics import DynamicsMaskGIT\nfrom models.lam import LatentActionModel\nfrom models.tokenizer import TokenizerVQVAE\n\n\nclass Genie(nn.Module):\n """"""Genie model""""""\n\n # --- Tokenizer ---\n in_dim: int\n tokenizer_dim: int\n latent_patch_dim: int\n num_patch_latents: int\n patch_size: int\n tokenizer_num_blocks: int\n tokenizer_num_heads: int\n # --- LAM ---\n lam_dim: int\n latent_action_dim: int\n num_latent_actions: int\n lam_patch_size: int\n lam_num_blocks: int\n lam_num_heads: int\n # --- Dynamics ---\n dyna_dim: int\n dyna_num_blocks: int\n dyna_num_heads: int\n dropout: float = 0.0\n mask_limit: float = 0.0\n\n def setup(self):\n self.tokenizer = TokenizerVQVAE(\n in_dim=self.in_dim,\n model_dim=self.tokenizer_dim,\n latent_dim=self.latent_patch_dim,\n num_latents=self.num_patch_latents,\n patch_size=self.patch_size,\n num_blocks=self.tokenizer_num_blocks,\n num_heads=self.tokenizer_num_heads,\n dropout=0.0,\n codebook_dropout=0.0,\n )\n self.lam = LatentActionModel(\n in_dim=self.in_dim,\n model_dim=self.lam_dim,\n latent_dim=self.latent_patch_dim,\n num_latents=self.num_latent_actions,\n patch_size=self.lam_patch_size,\n num_blocks=self.lam_num_blocks,\n num_heads=self.lam_num_heads,\n dropout=0.0,\n codebook_dropout=0.0,\n )\n self.dynamics = DynamicsMaskGIT(\n model_dim=self.dyna_dim,\n num_latents=self.num_patch_latents,\n num_blocks=self.dyna_num_blocks,\n num_heads=self.dyna_num_heads,\n dropout=self.dropout,\n mask_limit=self.mask_limit,\n )\n\n def __call__(self, batch: Dict[str, Any], training: bool = True) -> Dict[str, Any]:\n tokenizer_outputs = self.tokenizer.vq_encode(batch[""videos""], training=False)\n lam_outputs = self.lam.vq_encode(batch[""videos""], training=False)\n outputs = dict(\n video_tokens=jax.lax.stop_gradient(tokenizer_outputs[""indices""]),\n latent_actions=jax.lax.stop_gradient(lam_outputs[""z_q""]),\n )\n outputs[""mask_rng""] = batch[""mask_rng""]\n dyna_outputs = self.dynamics(outputs, training)\n outputs.update(dyna_outputs)\n mle_indices = jnp.argmax(outputs[""token_logits""], axis=-1)\n outputs[""recon""] = self.tokenizer.decode(\n mle_indices, batch[""videos""].shape[2:4]\n )\n return outputs\n\n @nn.compact\n def sample(\n self,\n batch: Dict[str, Any],\n seq_len: int,\n steps: int = 25,\n temperature: float = 1,\n sample_argmax: bool = False,\n ) -> Any:\n """"""\n Autoregressively samples up to `seq_len` future frames, following Figure 8 of the paper.\n\n - Input frames are tokenized once.\n - Future frames are generated autoregressively in token space.\n - All frames are detokenized in a single pass.\n\n Note:\n - For interactive or step-wise sampling, detokenization should occur after each action.\n - To maintain consistent tensor shapes across timesteps, all current and future frames are decoded at every step.\n - Temporal causal structure is preserved by \n a) reapplying the mask before each decoding step.\n b) a temporal causal mask is applied within each ST-transformer block.\n\n Dimension keys:\n B: batch size \n T: number of input (conditioning) frames \n N: patches per frame \n S: sequence length \n A: action space \n D: model latent dimension\n """"""\n # --- Encode videos and actions ---\n tokenizer_out = self.tokenizer.vq_encode(batch[""videos""], training=False)\n token_idxs = tokenizer_out[""indices""] # (B, T, N)\n B, T, N = token_idxs.shape\n pad_shape = (B, seq_len - T, N)\n pad = jnp.zeros(pad_shape, dtype=token_idxs.dtype)\n token_idxs = jnp.concatenate([token_idxs, pad], axis=1) # (B, S, N)\n action_tokens = self.lam.vq.get_codes(batch[""latent_actions""])\n\n for step_t in range(T, seq_len):\n print(f""Sampling Frame {step_t}..."")\n # mask current and future frames (i.e., t >= step_t)\n mask = jnp.arange(seq_len) >= step_t # (S,)\n mask = jnp.broadcast_to(mask[None, :, None], (B, seq_len, N)) # (B, S, N)\n mask = mask.astype(bool)\n token_idxs *= ~mask\n\n # --- Initialize MaskGIT ---\n init_carry = (\n batch[""rng""],\n token_idxs,\n mask,\n action_tokens,\n )\n\n MaskGITLoop = nn.scan(\n MaskGITStep,\n variable_broadcast=""params"",\n split_rngs={""params"": False},\n in_axes=0,\n out_axes=0,\n length=steps,\n )\n\n # --- Run MaskGIT loop ---\n loop_fn = MaskGITLoop(\n dynamics=self.dynamics,\n tokenizer=self.tokenizer,\n temperature=temperature,\n sample_argmax=sample_argmax,\n steps=steps,\n )\n final_carry, _ = loop_fn(init_carry, jnp.arange(steps))\n token_idxs = final_carry[1]\n\n final_frames = self.tokenizer.decode(\n token_idxs,\n video_hw=batch[""videos""].shape[2:4],\n )\n return final_frames \n\n def vq_encode(self, batch, training) -> Dict[str, Any]:\n # --- Preprocess videos ---\n lam_output = self.lam.vq_encode(batch[""videos""], training=training)\n return lam_output[""indices""]\n\n\nclass MaskGITStep(nn.Module):\n dynamics: nn.Module\n tokenizer: nn.Module\n temperature: float\n sample_argmax: bool\n steps: int\n\n @nn.compact\n def __call__(self, carry, x):\n rng, token_idxs, mask, action_tokens = carry\n step = x\n N = token_idxs.shape[2]\n\n # --- Construct + encode video ---\n vid_embed = self.dynamics.patch_embed(token_idxs) # (B, S, N, D)\n mask_token = self.dynamics.mask_token # (1, 1, 1, D,)\n mask_expanded = mask[..., None] # (B, S, N, 1) \n vid_embed = jnp.where(mask_expanded, mask_token, vid_embed)\n\n # --- Predict transition ---\n act_embed = self.dynamics.action_up(action_tokens)\n vid_embed += jnp.pad(act_embed, ((0, 0), (1, 0), (0, 0), (0, 0)))\n unmasked_ratio = jnp.cos(jnp.pi * (step + 1) / (self.steps * 2))\n step_temp = self.temperature * (1.0 - unmasked_ratio)\n final_logits = self.dynamics.dynamics(vid_embed) / step_temp\n\n # --- Sample new tokens for final frame ---\n if self.sample_argmax:\n sampled_token_idxs = jnp.argmax(final_logits, axis=-1)\n else:\n rng, _rng = jax.random.split(rng)\n sampled_token_idxs = jnp.where(\n step == self.steps - 1,\n jnp.argmax(final_logits, axis=-1),\n jax.random.categorical(_rng, final_logits),\n )\n gather_fn = jax.vmap(jax.vmap(jax.vmap(lambda x, y: x[y])))\n final_token_probs = gather_fn(jax.nn.softmax(final_logits), sampled_token_idxs)\n final_token_probs += ~mask\n # Update masked tokens only\n token_idxs = jnp.where(mask, sampled_token_idxs, token_idxs)\n\n # --- Update mask ---\n num_unmasked_tokens = jnp.round(N * (1.0 - unmasked_ratio)).astype(int)\n idx_mask = jnp.arange(final_token_probs.shape[-1]) > num_unmasked_tokens\n sorted_idxs = jnp.argsort(final_token_probs, axis=-1, descending=True)\n mask_update_fn = jax.vmap(lambda msk, ids: msk.at[ids].set(idx_mask))\n new_mask = mask_update_fn(mask, sorted_idxs)\n\n new_carry = (rng, token_idxs, new_mask, action_tokens)\n return new_carry, None\n\ndef restore_genie_components(\n train_state: TrainState,\n sharding: NamedSharding,\n inputs: Dict[str, jax.Array],\n rng: jax.Array,\n args,\n):\n """"""Restore pre-trained Genie components""""""\n rng, _rng = jax.random.split(rng)\n\n dummy_tokenizer = TokenizerVQVAE(\n in_dim=args.image_channels,\n model_dim=args.tokenizer_dim,\n latent_dim=args.latent_patch_dim,\n num_latents=args.num_patch_latents,\n patch_size=args.patch_size,\n num_blocks=args.tokenizer_num_blocks,\n num_heads=args.tokenizer_num_heads,\n dropout=args.dropout,\n codebook_dropout=args.dropout,\n )\n dummy_lam = LatentActionModel(\n in_dim=args.image_channels,\n model_dim=args.lam_dim,\n latent_dim=args.latent_patch_dim,\n num_latents=args.num_latent_actions,\n patch_size=args.lam_patch_size,\n num_blocks=args.lam_num_blocks,\n num_heads=args.lam_num_heads,\n dropout=args.dropout,\n codebook_dropout=args.dropout,\n )\n tokenizer_init_params = dummy_tokenizer.init(_rng, inputs)\n lam_init_params = dummy_lam.init(_rng, inputs)\n\n # dummy values since we only use tx to initialize the dummy train states\n dummy_tx = optax.adamw(\n learning_rate=optax.constant_schedule(args.max_lr),\n b1=0.9,\n b2=0.9,\n weight_decay=1e-4,\n )\n\n dummy_tokenizer_train_state = TrainState.create(\n apply_fn=dummy_tokenizer.apply, params=tokenizer_init_params, tx=dummy_tx\n )\n dummy_lam_train_state = TrainState.create(\n apply_fn=dummy_lam.apply, params=lam_init_params, tx=dummy_tx\n )\n\n def create_abstract_sharded_pytree(pytree_template, sharding_spec):\n """"""Replaces arrays in a pytree with ShapeDtypeStructs having the given sharding.""""""\n\n def map_fn(leaf_template):\n if hasattr(leaf_template, ""shape"") and hasattr(leaf_template, ""dtype""):\n return jax.ShapeDtypeStruct(\n leaf_template.shape, leaf_template.dtype, sharding=sharding_spec\n )\n return leaf_template\n\n return jax.tree_util.tree_map(map_fn, pytree_template)\n\n abstract_sharded_tokenizer_state = create_abstract_sharded_pytree(\n dummy_tokenizer_train_state, sharding\n )\n abstract_sharded_lam_state = create_abstract_sharded_pytree(\n dummy_lam_train_state, sharding\n )\n\n tokenizer_restore_target = {""model"": abstract_sharded_tokenizer_state}\n lam_restore_target = {""model"": abstract_sharded_lam_state}\n\n tokenizer_restore_args = orbax_utils.restore_args_from_target(\n tokenizer_restore_target\n )\n lam_restore_args = orbax_utils.restore_args_from_target(lam_restore_target)\n\n restored_tokenizer_params = (\n PyTreeCheckpointer()\n .restore(\n args.tokenizer_checkpoint,\n item=tokenizer_restore_target,\n restore_args=tokenizer_restore_args,\n )[""model""]\n .params[""params""]\n )\n restored_lam_params = (\n PyTreeCheckpointer()\n .restore(\n args.lam_checkpoint, item=lam_restore_target, restore_args=lam_restore_args\n )[""model""]\n .params[""params""]\n )\n # Genie does not initialize all LAM modules, thus we omit those extra modules during restoration\n # (f.srambical) FIXME: Currently, this is a small HBM memory crunch since the LAM's decoder is loaded into HBM and immediately dicarded.\n # A workaround would be to restore to host memory first, and only move the weights to HBM after pruning the decoder\n restored_lam_params = {\n k: v\n for k, v in restored_lam_params.items()\n if k in train_state.params[""params""][""lam""]\n }\n\n train_state.params[""params""][""tokenizer""].update(restored_tokenizer_params)\n train_state.params[""params""][""lam""].update(restored_lam_params)\n\n return train_state\n",python,tab +202,1731948,"sample.py",0,0,"from dataclasses import dataclass\nimport time\nimport os\n\nimport dm_pix as pix\nimport einops\nimport jax\nimport jax.numpy as jnp\nimport flax.linen as nn\nimport numpy as np\nfrom orbax.checkpoint import PyTreeCheckpointer\nfrom PIL import Image, ImageDraw\nimport tyro\n\nfrom genie import Genie\nfrom utils.dataloader import get_dataloader\n\n\n@dataclass\nclass Args:\n # Experiment\n seed: int = 0\n seq_len: int = 16\n image_channels: int = 3\n image_height: int = 90\n image_width: int = 160\n data_dir: str = ""data/coinrun_episodes""\n checkpoint: str = """"\n # Sampling\n batch_size: int = 1\n maskgit_steps: int = 25\n temperature: float = 1.0\n sample_argmax: bool = True\n start_frame: int = 0\n # Tokenizer checkpoint\n tokenizer_dim: int = 512\n latent_patch_dim: int = 32\n num_patch_latents: int = 1024\n patch_size: int = 4\n tokenizer_num_blocks: int = 8\n tokenizer_num_heads: int = 8\n # LAM checkpoint\n lam_dim: int = 512\n latent_action_dim: int = 32\n num_latent_actions: int = 6\n lam_patch_size: int = 16\n lam_num_blocks: int = 8\n lam_num_heads: int = 8\n # Dynamics checkpoint\n dyna_dim: int = 512\n dyna_num_blocks: int = 12\n dyna_num_heads: int = 8\n\n\nargs = tyro.cli(Args)\nrng = jax.random.PRNGKey(args.seed)\n\n# --- Load Genie checkpoint ---\ngenie = Genie(\n # Tokenizer\n in_dim=args.image_channels,\n tokenizer_dim=args.tokenizer_dim,\n latent_patch_dim=args.latent_patch_dim,\n num_patch_latents=args.num_patch_latents,\n patch_size=args.patch_size,\n tokenizer_num_blocks=args.tokenizer_num_blocks,\n tokenizer_num_heads=args.tokenizer_num_heads,\n # LAM\n lam_dim=args.lam_dim,\n latent_action_dim=args.latent_action_dim,\n num_latent_actions=args.num_latent_actions,\n lam_patch_size=args.lam_patch_size,\n lam_num_blocks=args.lam_num_blocks,\n lam_num_heads=args.lam_num_heads,\n # Dynamics\n dyna_dim=args.dyna_dim,\n dyna_num_blocks=args.dyna_num_blocks,\n dyna_num_heads=args.dyna_num_heads,\n)\nrng, _rng = jax.random.split(rng)\nimage_shape = (args.image_height, args.image_width, args.image_channels)\ndummy_inputs = dict(\n videos=jnp.zeros((args.batch_size, args.seq_len, *image_shape), dtype=jnp.float32),\n mask_rng=_rng,\n)\nrng, _rng = jax.random.split(rng)\nparams = genie.init(_rng, dummy_inputs)\nckpt = PyTreeCheckpointer().restore(args.checkpoint)[""model""][""params""][""params""]\nparams[""params""].update(ckpt)\n\n\ndef _sampling_wrapper(module, batch):\n return module.sample(batch, args.seq_len, args.maskgit_steps, args.temperature, args.sample_argmax)\n\n# --- Define autoregressive sampling loop ---\ndef _autoreg_sample(rng, video_batch, action_batch):\n vid = video_batch[:, : args.start_frame + 1]\n sampling_fn = jax.jit(nn.apply(_sampling_wrapper, genie)) \n rng, _rng = jax.random.split(rng)\n batch = dict(videos=vid, latent_actions=action_batch, rng=_rng)\n generated_vid = sampling_fn(\n params,\n batch\n )\n return generated_vid\n\n# --- Get video + latent actions ---\ntfrecord_files = [\n os.path.join(args.data_dir, x)\n for x in os.listdir(args.data_dir)\n if x.endswith("".tfrecord"")\n]\ndataloader = get_dataloader(\n tfrecord_files,\n args.seq_len,\n args.batch_size,\n args.image_height,\n args.image_width,\n args.image_channels,\n seed=args.seed,\n)\nvideo_batch = next(iter(dataloader))\n# Get latent actions from first video only; clip them down to the specified seq_len\nfirst_video = video_batch[:1, :args.seq_len]\nbatch = dict(videos=first_video)\naction_batch = genie.apply(params, batch, False, method=Genie.vq_encode)\naction_batch = action_batch.reshape(1, args.seq_len - 1, 1)\n# Use actions from first video for all videos\naction_batch = jnp.repeat(action_batch, video_batch.shape[0], axis=0)\n\n# --- Sample + evaluate video ---\nvid = _autoreg_sample(rng, video_batch, action_batch)\ngt = video_batch[:, : vid.shape[1]].clip(0, 1).reshape(-1, *video_batch.shape[2:])\nrecon = vid.clip(0, 1).reshape(-1, *vid.shape[2:])\nssim = pix.ssim(gt[:, args.start_frame + 1 :], recon[:, args.start_frame + 1 :]).mean()\nprint(f""SSIM: {ssim}"")\n\n# --- Construct video ---\nfirst_true = (video_batch[0:1] * 255).astype(np.uint8)\nfirst_pred = (vid[0:1] * 255).astype(np.uint8)\nfirst_video_comparison = np.zeros((2, *vid.shape[1:5]), dtype=np.uint8)\nfirst_video_comparison[0] = first_true[:, : vid.shape[1]]\nfirst_video_comparison[1] = first_pred\n# For other videos, only show generated video\nother_preds = (vid[1:] * 255).astype(np.uint8)\nall_frames = np.concatenate([first_video_comparison, other_preds], axis=0)\nflat_vid = einops.rearrange(all_frames, ""n t h w c -> t h (n w) c"")\n\n# --- Save video ---\nimgs = [Image.fromarray(img) for img in flat_vid]\n# Write actions on each frame\nfor img, action in zip(imgs[1:], action_batch[0, :, 0]):\n d = ImageDraw.Draw(img)\n d.text((2, 2), f""{action}"", fill=255)\nimgs[0].save(\n f""generation_{time.time()}.gif"",\n save_all=True,\n append_images=imgs[1:],\n duration=250,\n loop=0,\n)\n",python,tab +203,1733276,"sample.py",3057,0,"",python,selection_mouse +204,1733276,"sample.py",3056,0,"",python,selection_command +205,1734341,"genie.py",0,0,"",python,tab +206,1758226,"genie.py",4621,0,"",python,selection_mouse +207,1761801,"genie.py",4527,0,"",python,selection_mouse +208,1773944,"genie.py",4950,0,"",python,selection_mouse +209,1774091,"genie.py",4946,5,"batch",python,selection_mouse +210,1782472,"genie.py",4954,0,"",python,selection_mouse +211,1782652,"genie.py",4953,3,"rng",python,selection_mouse +212,10441352,"genie.py",0,0,"Switched from branch 'fix-sampling' to 'correct-batched-sampling'",python,git_branch_checkout +213,15726982,"genie.py",0,0,"Switched from branch 'correct-batched-sampling' to 'main'",python,git_branch_checkout diff --git a/927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-f1a23455-555b-44b7-b7f2-5fb8550d75021753722279799-2025_07_28-19.04.51.762/source.csv b/927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-f1a23455-555b-44b7-b7f2-5fb8550d75021753722279799-2025_07_28-19.04.51.762/source.csv new file mode 100644 index 0000000000000000000000000000000000000000..481267577d8e2ac25e259c64037029db180a63f0 --- /dev/null +++ b/927a8af5474e5654810c00ce2e09fd2de87d3e5722f33fa1090d867db114e403/crowd-code-f1a23455-555b-44b7-b7f2-5fb8550d75021753722279799-2025_07_28-19.04.51.762/source.csv @@ -0,0 +1,3 @@ +Sequence,Time,File,RangeOffset,RangeLength,Text,Language,Type +1,5,"utils/nn.py",0,0,"import math\nfrom typing import Tuple\n\nfrom flax import linen as nn\nimport jax\nimport jax.numpy as jnp\nimport einops\n\n\nclass PositionalEncoding(nn.Module):\n """"""https://uvadlc-notebooks.readthedocs.io/en/latest/tutorial_notebooks/JAX/tutorial6/Transformers_and_MHAttention.html""""""\n\n d_model: int # Hidden dimensionality of the input.\n max_len: int = 5000 # Maximum length of a sequence to expect.\n\n def setup(self):\n # Create matrix of [SeqLen, HiddenDim] representing the positional encoding for max_len inputs\n self.pe = jnp.zeros((self.max_len, self.d_model))\n position = jnp.arange(0, self.max_len, dtype=jnp.float32)[:, None]\n div_term = jnp.exp(\n jnp.arange(0, self.d_model, 2) * (-math.log(10000.0) / self.d_model)\n )\n self.pe = self.pe.at[:, 0::2].set(jnp.sin(position * div_term))\n self.pe = self.pe.at[:, 1::2].set(jnp.cos(position * div_term))\n\n def __call__(self, x):\n x = x + self.pe[: x.shape[2]]\n return x\n\n\nclass STBlock(nn.Module):\n dim: int\n ffn_dim: int\n num_heads: int\n dropout: float\n param_dtype: jnp.dtype\n dtype: jnp.dtype\n use_flash_attention: bool\n\n @nn.remat\n @nn.compact\n def __call__(self, x: jax.Array) -> jax.Array:\n # --- Spatial attention ---\n z = PositionalEncoding(self.dim)(x)\n z = nn.LayerNorm(\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(z)\n z = nn.MultiHeadAttention(\n num_heads=self.num_heads,\n qkv_features=self.dim,\n dropout_rate=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n attention_fn=_create_flash_attention_fn(\n self.use_flash_attention, is_causal=False\n ),\n )(z)\n x = x + z\n\n # --- Temporal attention ---\n x = x.swapaxes(1, 2)\n z = PositionalEncoding(self.dim)(x)\n z = nn.LayerNorm(\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(z)\n causal_mask = jnp.tri(z.shape[-2])\n z = nn.MultiHeadAttention(\n num_heads=self.num_heads,\n qkv_features=self.dim,\n dropout_rate=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n attention_fn=_create_flash_attention_fn(\n self.use_flash_attention, is_causal=True\n ),\n # FIXME (f.srambical): check whether we should still pass the mask if we set is_causal=True\n )(z, mask=causal_mask)\n x = x + z\n x = x.swapaxes(1, 2)\n\n # --- Feedforward ---\n z = nn.LayerNorm(\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(x)\n z = nn.Dense(\n self.ffn_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(z)\n z = nn.gelu(z)\n z = nn.Dense(\n self.dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(z)\n x = x + z\n\n return x\n\n\nclass STTransformer(nn.Module):\n model_dim: int\n ffn_dim: int\n out_dim: int\n num_blocks: int\n num_heads: int\n dropout: float\n param_dtype: jnp.dtype\n dtype: jnp.dtype\n use_flash_attention: bool\n\n @nn.compact\n def __call__(self, x: jax.Array) -> jax.Array:\n x = nn.Sequential(\n [\n nn.LayerNorm(\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n ),\n nn.Dense(\n self.model_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n ),\n nn.LayerNorm(\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n ),\n ]\n )(x)\n for _ in range(self.num_blocks):\n x = STBlock(\n dim=self.model_dim,\n ffn_dim=self.ffn_dim,\n num_heads=self.num_heads,\n dropout=self.dropout,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n use_flash_attention=self.use_flash_attention,\n )(x)\n x = nn.Dense(\n self.out_dim,\n param_dtype=self.param_dtype,\n dtype=self.dtype,\n )(x)\n return x # (B, T, E)\n\n\ndef normalize(x):\n return x / (jnp.linalg.norm(x, ord=2, axis=-1, keepdims=True) + 1e-8)\n\n\nclass VectorQuantizer(nn.Module):\n latent_dim: int\n num_latents: int\n dropout: float\n\n def setup(self):\n self.codebook = normalize(\n self.param(\n ""codebook"",\n nn.initializers.lecun_uniform(),\n (self.num_latents, self.latent_dim),\n )\n )\n self.drop = nn.Dropout(self.dropout, deterministic=False)\n\n def __call__(\n self, x: jax.Array, training: bool\n ) -> Tuple[jax.Array, jax.Array, jax.Array, jax.Array]:\n # --- Compute distances ---\n x = normalize(x)\n codebook = normalize(self.codebook)\n distance = -jnp.matmul(x, codebook.T)\n if training:\n dropout_key = self.make_rng(""dropout"")\n distance = self.drop(distance, rng=dropout_key)\n\n # --- Get indices and embeddings ---\n indices = jnp.argmin(distance, axis=-1)\n z = self.codebook[indices]\n\n # --- Straight through estimator ---\n z_q = x + jax.lax.stop_gradient(z - x)\n return z_q, z, x, indices\n\n def get_codes(self, indices: jax.Array):\n return self.codebook[indices]\n\n\ndef _create_flash_attention_fn(use_flash_attention: bool, is_causal: bool):\n """"""\n Create an attention function that uses flash attention if enabled.\n\n Flax MultiHeadAttention provides tensors with shape (batch..., length, num_heads, head_dim)\n jax.nn.dot_product_attention expects (batch, length, num_heads, head_dim).\n\n We need to reshape to ensure compatibility. cuDNN's flash attention additionally\n requires a sequence length that is a multiple of 4. We pad the sequence length to the nearest\n multiple of 4 and mask accordingly.\n """"""\n\n def attention_fn(query, key, value, bias=None, mask=None, **kwargs):\n implementation = ""cudnn"" if use_flash_attention else None\n\n def _rearrange(x):\n return einops.rearrange(x, ""... l h d -> (...) l h d"")\n\n def _pad(x):\n return jnp.pad(x, ((0, 0), (0, pad_size), (0, 0), (0, 0)))\n\n def _fuse_masks(mask: jax.Array, attention_mask: jax.Array) -> jax.Array:\n mask_bool = mask.astype(jnp.bool_)\n expanded_mask = jnp.pad(\n mask_bool, ((0, pad_size), (0, pad_size)), constant_values=False\n )\n return jnp.logical_and(attention_mask, expanded_mask)\n\n original_shape = query.shape\n original_seq_len = query.shape[-3]\n\n # Pad to nearest multiple of 4\n target_seq_len = ((original_seq_len + 3) // 4) * 4\n pad_size = target_seq_len - original_seq_len\n\n query_4d = _pad(_rearrange(query))\n key_4d = _pad(_rearrange(key))\n value_4d = _pad(_rearrange(value))\n\n attention_mask = jnp.ones((target_seq_len, target_seq_len), dtype=jnp.bool_)\n attention_mask = attention_mask.at[original_seq_len:, :].set(False)\n attention_mask = attention_mask.at[:, original_seq_len:].set(False)\n\n mask_4d = (\n _fuse_masks(mask, attention_mask) if mask is not None else attention_mask\n )\n mask_4d = mask_4d[jnp.newaxis, jnp.newaxis, :, :] # (1, 1, seq_len, seq_len)\n\n bias_4d = _pad(_rearrange(bias)) if bias is not None else None\n\n output_4d = jax.nn.dot_product_attention(\n query=query_4d,\n key=key_4d,\n value=value_4d,\n bias=bias_4d,\n mask=mask_4d,\n implementation=implementation,\n is_causal=is_causal,\n **kwargs\n )\n return output_4d[..., :original_seq_len, :, :].reshape(original_shape)\n\n return attention_fn\n",python,tab +2,511,"extension-output-pdoom-org.crowd-code-#1-crowd-code",0,0,"7:04:51 PM [info] Activating crowd-code\n7:04:51 PM [info] Recording started\n7:04:51 PM [info] Initializing git provider using file system watchers...\n7:04:52 PM [info] Git repository found\n7:04:52 PM [info] Git provider initialized successfully\n7:04:52 PM [info] Initial git state: [object Object]\n",Log,tab